cc-claw 0.18.5 → 0.19.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli.js +2278 -204
- package/package.json +1 -1
package/dist/cli.js
CHANGED
|
@@ -33,7 +33,7 @@ var VERSION;
|
|
|
33
33
|
var init_version = __esm({
|
|
34
34
|
"src/version.ts"() {
|
|
35
35
|
"use strict";
|
|
36
|
-
VERSION = true ? "0.
|
|
36
|
+
VERSION = true ? "0.19.1" : (() => {
|
|
37
37
|
try {
|
|
38
38
|
return JSON.parse(readFileSync(join(process.cwd(), "package.json"), "utf-8")).version ?? "unknown";
|
|
39
39
|
} catch {
|
|
@@ -1690,6 +1690,34 @@ function initSchema(db3) {
|
|
|
1690
1690
|
} catch {
|
|
1691
1691
|
}
|
|
1692
1692
|
applySalienceDecay(db3);
|
|
1693
|
+
db3.exec(`
|
|
1694
|
+
CREATE TABLE IF NOT EXISTS ollama_servers (
|
|
1695
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
1696
|
+
name TEXT NOT NULL UNIQUE,
|
|
1697
|
+
host TEXT NOT NULL,
|
|
1698
|
+
port INTEGER NOT NULL DEFAULT 11434,
|
|
1699
|
+
api_key TEXT,
|
|
1700
|
+
status TEXT NOT NULL DEFAULT 'offline',
|
|
1701
|
+
last_health_check TEXT,
|
|
1702
|
+
created_at TEXT NOT NULL DEFAULT (datetime('now'))
|
|
1703
|
+
);
|
|
1704
|
+
`);
|
|
1705
|
+
db3.exec(`
|
|
1706
|
+
CREATE TABLE IF NOT EXISTS ollama_models (
|
|
1707
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
1708
|
+
server_id INTEGER NOT NULL REFERENCES ollama_servers(id) ON DELETE CASCADE,
|
|
1709
|
+
name TEXT NOT NULL,
|
|
1710
|
+
family TEXT,
|
|
1711
|
+
parameter_size TEXT,
|
|
1712
|
+
quantization TEXT,
|
|
1713
|
+
context_window INTEGER DEFAULT 4096,
|
|
1714
|
+
size_bytes INTEGER,
|
|
1715
|
+
digest TEXT,
|
|
1716
|
+
last_seen TEXT NOT NULL DEFAULT (datetime('now')),
|
|
1717
|
+
quality_score REAL,
|
|
1718
|
+
UNIQUE(server_id, name)
|
|
1719
|
+
);
|
|
1720
|
+
`);
|
|
1693
1721
|
initAgentTables(db3);
|
|
1694
1722
|
initMcpTables(db3);
|
|
1695
1723
|
initActivityTable(db3);
|
|
@@ -2028,7 +2056,7 @@ var init_embeddings = __esm({
|
|
|
2028
2056
|
return results[0];
|
|
2029
2057
|
}
|
|
2030
2058
|
async embedBatch(texts) {
|
|
2031
|
-
const url = `https://generativelanguage.googleapis.com/v1beta/models/${this.model}:batchEmbedContents
|
|
2059
|
+
const url = `https://generativelanguage.googleapis.com/v1beta/models/${this.model}:batchEmbedContents`;
|
|
2032
2060
|
const requests = texts.map((text) => ({
|
|
2033
2061
|
model: `models/${this.model}`,
|
|
2034
2062
|
content: { parts: [{ text }] },
|
|
@@ -2036,7 +2064,10 @@ var init_embeddings = __esm({
|
|
|
2036
2064
|
}));
|
|
2037
2065
|
const res = await fetch(url, {
|
|
2038
2066
|
method: "POST",
|
|
2039
|
-
headers: {
|
|
2067
|
+
headers: {
|
|
2068
|
+
"Content-Type": "application/json",
|
|
2069
|
+
"x-goog-api-key": this.apiKey
|
|
2070
|
+
},
|
|
2040
2071
|
body: JSON.stringify({ requests })
|
|
2041
2072
|
});
|
|
2042
2073
|
if (!res.ok) {
|
|
@@ -2485,6 +2516,7 @@ var chat_settings_exports = {};
|
|
|
2485
2516
|
__export(chat_settings_exports, {
|
|
2486
2517
|
ALL_TOOLS: () => ALL_TOOLS,
|
|
2487
2518
|
ChatSettingsSnapshot: () => ChatSettingsSnapshot,
|
|
2519
|
+
GLOBAL_SUMMARIZER_SENTINEL: () => GLOBAL_SUMMARIZER_SENTINEL,
|
|
2488
2520
|
clearAgentMode: () => clearAgentMode,
|
|
2489
2521
|
clearAllPaidSlots: () => clearAllPaidSlots,
|
|
2490
2522
|
clearChatPaidSlots: () => clearChatPaidSlots,
|
|
@@ -2504,6 +2536,7 @@ __export(chat_settings_exports, {
|
|
|
2504
2536
|
getCwd: () => getCwd,
|
|
2505
2537
|
getEnabledTools: () => getEnabledTools,
|
|
2506
2538
|
getExecMode: () => getExecMode,
|
|
2539
|
+
getGlobalSummarizer: () => getGlobalSummarizer,
|
|
2507
2540
|
getMode: () => getMode,
|
|
2508
2541
|
getModel: () => getModel,
|
|
2509
2542
|
getPendingEscalation: () => getPendingEscalation,
|
|
@@ -2521,6 +2554,7 @@ __export(chat_settings_exports, {
|
|
|
2521
2554
|
setBackend: () => setBackend,
|
|
2522
2555
|
setCwd: () => setCwd,
|
|
2523
2556
|
setExecMode: () => setExecMode,
|
|
2557
|
+
setGlobalSummarizer: () => setGlobalSummarizer,
|
|
2524
2558
|
setMode: () => setMode,
|
|
2525
2559
|
setModel: () => setModel,
|
|
2526
2560
|
setSessionLogEnabled: () => setSessionLogEnabled,
|
|
@@ -2755,11 +2789,29 @@ function clearThinkingLevel(chatId) {
|
|
|
2755
2789
|
getDb().prepare("DELETE FROM chat_thinking WHERE chat_id = ?").run(chatId);
|
|
2756
2790
|
}
|
|
2757
2791
|
function getSummarizer(chatId) {
|
|
2758
|
-
const
|
|
2792
|
+
const perChat = getDb().prepare(
|
|
2759
2793
|
"SELECT backend, model FROM chat_summarizer WHERE chat_id = ?"
|
|
2760
2794
|
).get(chatId);
|
|
2795
|
+
if (perChat && (perChat.backend || perChat.model)) return perChat;
|
|
2796
|
+
const global = getDb().prepare(
|
|
2797
|
+
"SELECT backend, model FROM chat_summarizer WHERE chat_id = ?"
|
|
2798
|
+
).get(GLOBAL_SUMMARIZER_SENTINEL);
|
|
2799
|
+
if (global && (global.backend || global.model)) return global;
|
|
2800
|
+
return { backend: null, model: null };
|
|
2801
|
+
}
|
|
2802
|
+
function getGlobalSummarizer() {
|
|
2803
|
+
const row = getDb().prepare(
|
|
2804
|
+
"SELECT backend, model FROM chat_summarizer WHERE chat_id = ?"
|
|
2805
|
+
).get(GLOBAL_SUMMARIZER_SENTINEL);
|
|
2761
2806
|
return row ?? { backend: null, model: null };
|
|
2762
2807
|
}
|
|
2808
|
+
function setGlobalSummarizer(backend2, model2) {
|
|
2809
|
+
getDb().prepare(`
|
|
2810
|
+
INSERT INTO chat_summarizer (chat_id, backend, model)
|
|
2811
|
+
VALUES (?, ?, ?)
|
|
2812
|
+
ON CONFLICT(chat_id) DO UPDATE SET backend = ?, model = ?
|
|
2813
|
+
`).run(GLOBAL_SUMMARIZER_SENTINEL, backend2, model2, backend2, model2);
|
|
2814
|
+
}
|
|
2763
2815
|
function setSummarizer(chatId, backend2, model2) {
|
|
2764
2816
|
getDb().prepare(`
|
|
2765
2817
|
INSERT INTO chat_summarizer (chat_id, backend, model)
|
|
@@ -2836,7 +2888,7 @@ function clearChatPaidSlots(chatId) {
|
|
|
2836
2888
|
function clearAllPaidSlots() {
|
|
2837
2889
|
getDb().prepare("DELETE FROM chat_allow_paid_slots").run();
|
|
2838
2890
|
}
|
|
2839
|
-
var pendingEscalations, ESCALATION_TTL_MS, ALL_TOOLS, ChatSettingsSnapshot;
|
|
2891
|
+
var pendingEscalations, ESCALATION_TTL_MS, ALL_TOOLS, GLOBAL_SUMMARIZER_SENTINEL, ChatSettingsSnapshot;
|
|
2840
2892
|
var init_chat_settings = __esm({
|
|
2841
2893
|
"src/memory/chat-settings.ts"() {
|
|
2842
2894
|
"use strict";
|
|
@@ -2845,6 +2897,7 @@ var init_chat_settings = __esm({
|
|
|
2845
2897
|
pendingEscalations = /* @__PURE__ */ new Map();
|
|
2846
2898
|
ESCALATION_TTL_MS = 5 * 60 * 1e3;
|
|
2847
2899
|
ALL_TOOLS = ["Read", "Glob", "Grep", "Bash", "Write", "Edit", "WebFetch", "WebSearch", "Agent", "AskUserQuestion"];
|
|
2900
|
+
GLOBAL_SUMMARIZER_SENTINEL = "__global__";
|
|
2848
2901
|
ChatSettingsSnapshot = class {
|
|
2849
2902
|
constructor(chatId) {
|
|
2850
2903
|
this.chatId = chatId;
|
|
@@ -3754,6 +3807,7 @@ var store_exports5 = {};
|
|
|
3754
3807
|
__export(store_exports5, {
|
|
3755
3808
|
ALL_TOOLS: () => ALL_TOOLS,
|
|
3756
3809
|
ChatSettingsSnapshot: () => ChatSettingsSnapshot,
|
|
3810
|
+
GLOBAL_SUMMARIZER_SENTINEL: () => GLOBAL_SUMMARIZER_SENTINEL,
|
|
3757
3811
|
addBackendSlot: () => addBackendSlot,
|
|
3758
3812
|
addGeminiSlot: () => addGeminiSlot,
|
|
3759
3813
|
addHeartbeatWatch: () => addHeartbeatWatch,
|
|
@@ -3815,6 +3869,7 @@ __export(store_exports5, {
|
|
|
3815
3869
|
getExecMode: () => getExecMode,
|
|
3816
3870
|
getGeminiRotationMode: () => getGeminiRotationMode,
|
|
3817
3871
|
getGeminiSlots: () => getGeminiSlots,
|
|
3872
|
+
getGlobalSummarizer: () => getGlobalSummarizer,
|
|
3818
3873
|
getHeartbeatConfig: () => getHeartbeatConfig,
|
|
3819
3874
|
getJobById: () => getJobById,
|
|
3820
3875
|
getJobRuns: () => getJobRuns,
|
|
@@ -3892,6 +3947,7 @@ __export(store_exports5, {
|
|
|
3892
3947
|
setExecMode: () => setExecMode,
|
|
3893
3948
|
setGeminiRotationMode: () => setGeminiRotationMode,
|
|
3894
3949
|
setGeminiSlotEnabled: () => setGeminiSlotEnabled,
|
|
3950
|
+
setGlobalSummarizer: () => setGlobalSummarizer,
|
|
3895
3951
|
setHeartbeatConfig: () => setHeartbeatConfig,
|
|
3896
3952
|
setMode: () => setMode,
|
|
3897
3953
|
setModel: () => setModel,
|
|
@@ -5231,19 +5287,1043 @@ var init_cursor = __esm({
|
|
|
5231
5287
|
} : void 0
|
|
5232
5288
|
});
|
|
5233
5289
|
}
|
|
5234
|
-
return events;
|
|
5290
|
+
return events;
|
|
5291
|
+
}
|
|
5292
|
+
getSubagentInstructions() {
|
|
5293
|
+
return "You have native sub-agent support via the Task tool. Built-in agent types: generalPurpose (multi-step tasks), explore (fast codebase search), shell (command execution), code-reviewer. Use them for parallel work.";
|
|
5294
|
+
}
|
|
5295
|
+
shouldKillOnResult() {
|
|
5296
|
+
return false;
|
|
5297
|
+
}
|
|
5298
|
+
resolveModelWithThinking(model2, level) {
|
|
5299
|
+
if (!model2 || !level || level === "auto") return model2;
|
|
5300
|
+
const variants = THINKING_VARIANTS[model2];
|
|
5301
|
+
if (!variants) return model2;
|
|
5302
|
+
return variants[level] ?? model2;
|
|
5303
|
+
}
|
|
5304
|
+
};
|
|
5305
|
+
}
|
|
5306
|
+
});
|
|
5307
|
+
|
|
5308
|
+
// src/services/ollama/client.ts
|
|
5309
|
+
var client_exports = {};
|
|
5310
|
+
__export(client_exports, {
|
|
5311
|
+
chat: () => chat,
|
|
5312
|
+
generate: () => generate,
|
|
5313
|
+
listModels: () => listModels,
|
|
5314
|
+
ping: () => ping,
|
|
5315
|
+
runningModels: () => runningModels,
|
|
5316
|
+
showModel: () => showModel
|
|
5317
|
+
});
|
|
5318
|
+
function buildHeaders(apiKey) {
|
|
5319
|
+
const headers = {
|
|
5320
|
+
"Content-Type": "application/json"
|
|
5321
|
+
};
|
|
5322
|
+
if (apiKey) {
|
|
5323
|
+
headers["Authorization"] = `Bearer ${apiKey}`;
|
|
5324
|
+
}
|
|
5325
|
+
return headers;
|
|
5326
|
+
}
|
|
5327
|
+
async function parseNdjsonStream(reader, onChunk, signal) {
|
|
5328
|
+
const decoder = new TextDecoder();
|
|
5329
|
+
let buffer = "";
|
|
5330
|
+
try {
|
|
5331
|
+
while (true) {
|
|
5332
|
+
if (signal?.aborted) break;
|
|
5333
|
+
const { done, value } = await reader.read();
|
|
5334
|
+
if (done) break;
|
|
5335
|
+
buffer += decoder.decode(value, { stream: true });
|
|
5336
|
+
const lines = buffer.split("\n");
|
|
5337
|
+
buffer = lines.pop() ?? "";
|
|
5338
|
+
for (const line of lines) {
|
|
5339
|
+
const trimmed = line.trim();
|
|
5340
|
+
if (!trimmed) continue;
|
|
5341
|
+
try {
|
|
5342
|
+
const parsed = JSON.parse(trimmed);
|
|
5343
|
+
onChunk(parsed);
|
|
5344
|
+
} catch {
|
|
5345
|
+
}
|
|
5346
|
+
}
|
|
5347
|
+
}
|
|
5348
|
+
if (buffer.trim()) {
|
|
5349
|
+
try {
|
|
5350
|
+
const parsed = JSON.parse(buffer.trim());
|
|
5351
|
+
onChunk(parsed);
|
|
5352
|
+
} catch {
|
|
5353
|
+
}
|
|
5354
|
+
}
|
|
5355
|
+
} finally {
|
|
5356
|
+
reader.releaseLock();
|
|
5357
|
+
}
|
|
5358
|
+
}
|
|
5359
|
+
async function ping(baseUrl, opts) {
|
|
5360
|
+
try {
|
|
5361
|
+
const controller = new AbortController();
|
|
5362
|
+
const timeout = setTimeout(
|
|
5363
|
+
() => controller.abort(),
|
|
5364
|
+
opts?.timeoutMs ?? DEFAULT_TIMEOUT_MS
|
|
5365
|
+
);
|
|
5366
|
+
try {
|
|
5367
|
+
const res = await fetch(baseUrl, {
|
|
5368
|
+
signal: controller.signal,
|
|
5369
|
+
headers: buildHeaders(opts?.apiKey)
|
|
5370
|
+
});
|
|
5371
|
+
return res.ok;
|
|
5372
|
+
} finally {
|
|
5373
|
+
clearTimeout(timeout);
|
|
5374
|
+
}
|
|
5375
|
+
} catch {
|
|
5376
|
+
return false;
|
|
5377
|
+
}
|
|
5378
|
+
}
|
|
5379
|
+
async function listModels(baseUrl, opts) {
|
|
5380
|
+
const res = await fetch(`${baseUrl}/api/tags`, {
|
|
5381
|
+
signal: opts?.signal,
|
|
5382
|
+
headers: buildHeaders(opts?.apiKey)
|
|
5383
|
+
});
|
|
5384
|
+
if (!res.ok) {
|
|
5385
|
+
throw new Error(`Ollama listModels failed: ${res.status} ${res.statusText}`);
|
|
5386
|
+
}
|
|
5387
|
+
return res.json();
|
|
5388
|
+
}
|
|
5389
|
+
async function showModel(baseUrl, model2, opts) {
|
|
5390
|
+
const res = await fetch(`${baseUrl}/api/show`, {
|
|
5391
|
+
method: "POST",
|
|
5392
|
+
headers: buildHeaders(opts?.apiKey),
|
|
5393
|
+
body: JSON.stringify({ name: model2 }),
|
|
5394
|
+
signal: opts?.signal
|
|
5395
|
+
});
|
|
5396
|
+
if (!res.ok) {
|
|
5397
|
+
throw new Error(`Ollama showModel(${model2}) failed: ${res.status} ${res.statusText}`);
|
|
5398
|
+
}
|
|
5399
|
+
return res.json();
|
|
5400
|
+
}
|
|
5401
|
+
async function runningModels(baseUrl, opts) {
|
|
5402
|
+
const res = await fetch(`${baseUrl}/api/ps`, {
|
|
5403
|
+
signal: opts?.signal,
|
|
5404
|
+
headers: buildHeaders(opts?.apiKey)
|
|
5405
|
+
});
|
|
5406
|
+
if (!res.ok) {
|
|
5407
|
+
throw new Error(`Ollama runningModels failed: ${res.status} ${res.statusText}`);
|
|
5408
|
+
}
|
|
5409
|
+
return res.json();
|
|
5410
|
+
}
|
|
5411
|
+
async function generate(baseUrl, model2, prompt, opts) {
|
|
5412
|
+
const overallTimeoutMs = opts?.timeoutMs ?? DEFAULT_GENERATE_TIMEOUT_MS;
|
|
5413
|
+
const controller = new AbortController();
|
|
5414
|
+
if (opts?.signal) {
|
|
5415
|
+
if (opts.signal.aborted) {
|
|
5416
|
+
controller.abort(opts.signal.reason);
|
|
5417
|
+
} else {
|
|
5418
|
+
opts.signal.addEventListener("abort", () => controller.abort(opts.signal.reason), { once: true });
|
|
5419
|
+
}
|
|
5420
|
+
}
|
|
5421
|
+
const overallTimer = setTimeout(() => controller.abort("Overall timeout"), overallTimeoutMs);
|
|
5422
|
+
let firstByteReceived = false;
|
|
5423
|
+
const firstByteTimer = setTimeout(() => {
|
|
5424
|
+
if (!firstByteReceived) {
|
|
5425
|
+
controller.abort("First byte timeout \u2014 model may be loading into VRAM");
|
|
5426
|
+
}
|
|
5427
|
+
}, FIRST_BYTE_TIMEOUT_MS);
|
|
5428
|
+
try {
|
|
5429
|
+
const body = {
|
|
5430
|
+
model: model2,
|
|
5431
|
+
prompt,
|
|
5432
|
+
stream: true
|
|
5433
|
+
};
|
|
5434
|
+
if (opts?.systemPrompt) body.system = opts.systemPrompt;
|
|
5435
|
+
if (opts?.temperature != null || opts?.maxTokens != null) {
|
|
5436
|
+
const options = {};
|
|
5437
|
+
if (opts.temperature != null) options.temperature = opts.temperature;
|
|
5438
|
+
if (opts.maxTokens != null) options.num_predict = opts.maxTokens;
|
|
5439
|
+
body.options = options;
|
|
5440
|
+
}
|
|
5441
|
+
const res = await fetch(`${baseUrl}/api/generate`, {
|
|
5442
|
+
method: "POST",
|
|
5443
|
+
headers: buildHeaders(opts?.apiKey),
|
|
5444
|
+
body: JSON.stringify(body),
|
|
5445
|
+
signal: controller.signal
|
|
5446
|
+
});
|
|
5447
|
+
if (!res.ok) {
|
|
5448
|
+
const errorText = await res.text().catch(() => "");
|
|
5449
|
+
throw new Error(`Ollama generate failed: ${res.status} ${res.statusText} \u2014 ${errorText}`);
|
|
5450
|
+
}
|
|
5451
|
+
if (!res.body) {
|
|
5452
|
+
throw new Error("Ollama generate: response body is null (streaming not supported?)");
|
|
5453
|
+
}
|
|
5454
|
+
const reader = res.body.getReader();
|
|
5455
|
+
let fullText = "";
|
|
5456
|
+
let promptTokens = 0;
|
|
5457
|
+
let completionTokens = 0;
|
|
5458
|
+
await parseNdjsonStream(
|
|
5459
|
+
reader,
|
|
5460
|
+
(chunk) => {
|
|
5461
|
+
if (!firstByteReceived) {
|
|
5462
|
+
firstByteReceived = true;
|
|
5463
|
+
clearTimeout(firstByteTimer);
|
|
5464
|
+
}
|
|
5465
|
+
if (chunk.response) {
|
|
5466
|
+
fullText += chunk.response;
|
|
5467
|
+
opts?.onStream?.(chunk.response);
|
|
5468
|
+
}
|
|
5469
|
+
if (chunk.done) {
|
|
5470
|
+
promptTokens = chunk.prompt_eval_count ?? 0;
|
|
5471
|
+
completionTokens = chunk.eval_count ?? 0;
|
|
5472
|
+
}
|
|
5473
|
+
},
|
|
5474
|
+
controller.signal
|
|
5475
|
+
);
|
|
5476
|
+
return {
|
|
5477
|
+
text: fullText,
|
|
5478
|
+
usage: { promptTokens, completionTokens }
|
|
5479
|
+
};
|
|
5480
|
+
} finally {
|
|
5481
|
+
clearTimeout(overallTimer);
|
|
5482
|
+
clearTimeout(firstByteTimer);
|
|
5483
|
+
}
|
|
5484
|
+
}
|
|
5485
|
+
async function chat(baseUrl, model2, messages, opts) {
|
|
5486
|
+
const overallTimeoutMs = opts?.timeoutMs ?? DEFAULT_GENERATE_TIMEOUT_MS;
|
|
5487
|
+
const controller = new AbortController();
|
|
5488
|
+
if (opts?.signal) {
|
|
5489
|
+
if (opts.signal.aborted) {
|
|
5490
|
+
controller.abort(opts.signal.reason);
|
|
5491
|
+
} else {
|
|
5492
|
+
opts.signal.addEventListener("abort", () => controller.abort(opts.signal.reason), { once: true });
|
|
5493
|
+
}
|
|
5494
|
+
}
|
|
5495
|
+
const overallTimer = setTimeout(() => controller.abort("Overall timeout"), overallTimeoutMs);
|
|
5496
|
+
let firstByteReceived = false;
|
|
5497
|
+
const firstByteTimer = setTimeout(() => {
|
|
5498
|
+
if (!firstByteReceived) {
|
|
5499
|
+
controller.abort("First byte timeout \u2014 model may be loading into VRAM");
|
|
5500
|
+
}
|
|
5501
|
+
}, FIRST_BYTE_TIMEOUT_MS);
|
|
5502
|
+
try {
|
|
5503
|
+
const body = {
|
|
5504
|
+
model: model2,
|
|
5505
|
+
messages,
|
|
5506
|
+
stream: true
|
|
5507
|
+
};
|
|
5508
|
+
if (opts?.temperature != null || opts?.maxTokens != null) {
|
|
5509
|
+
const options = {};
|
|
5510
|
+
if (opts.temperature != null) options.temperature = opts.temperature;
|
|
5511
|
+
if (opts.maxTokens != null) options.num_predict = opts.maxTokens;
|
|
5512
|
+
body.options = options;
|
|
5513
|
+
}
|
|
5514
|
+
const res = await fetch(`${baseUrl}/api/chat`, {
|
|
5515
|
+
method: "POST",
|
|
5516
|
+
headers: buildHeaders(opts?.apiKey),
|
|
5517
|
+
body: JSON.stringify(body),
|
|
5518
|
+
signal: controller.signal
|
|
5519
|
+
});
|
|
5520
|
+
if (!res.ok) {
|
|
5521
|
+
const errorText = await res.text().catch(() => "");
|
|
5522
|
+
throw new Error(`Ollama chat failed: ${res.status} ${res.statusText} \u2014 ${errorText}`);
|
|
5523
|
+
}
|
|
5524
|
+
if (!res.body) {
|
|
5525
|
+
throw new Error("Ollama chat: response body is null (streaming not supported?)");
|
|
5526
|
+
}
|
|
5527
|
+
const reader = res.body.getReader();
|
|
5528
|
+
let fullText = "";
|
|
5529
|
+
let promptTokens = 0;
|
|
5530
|
+
let completionTokens = 0;
|
|
5531
|
+
await parseNdjsonStream(
|
|
5532
|
+
reader,
|
|
5533
|
+
(chunk) => {
|
|
5534
|
+
if (!firstByteReceived) {
|
|
5535
|
+
firstByteReceived = true;
|
|
5536
|
+
clearTimeout(firstByteTimer);
|
|
5537
|
+
}
|
|
5538
|
+
if (chunk.message?.content) {
|
|
5539
|
+
fullText += chunk.message.content;
|
|
5540
|
+
opts?.onStream?.(chunk.message.content);
|
|
5541
|
+
}
|
|
5542
|
+
if (chunk.done) {
|
|
5543
|
+
promptTokens = chunk.prompt_eval_count ?? 0;
|
|
5544
|
+
completionTokens = chunk.eval_count ?? 0;
|
|
5545
|
+
}
|
|
5546
|
+
},
|
|
5547
|
+
controller.signal
|
|
5548
|
+
);
|
|
5549
|
+
return {
|
|
5550
|
+
text: fullText,
|
|
5551
|
+
usage: { promptTokens, completionTokens }
|
|
5552
|
+
};
|
|
5553
|
+
} finally {
|
|
5554
|
+
clearTimeout(overallTimer);
|
|
5555
|
+
clearTimeout(firstByteTimer);
|
|
5556
|
+
}
|
|
5557
|
+
}
|
|
5558
|
+
var DEFAULT_TIMEOUT_MS, DEFAULT_GENERATE_TIMEOUT_MS, FIRST_BYTE_TIMEOUT_MS;
|
|
5559
|
+
var init_client = __esm({
|
|
5560
|
+
"src/services/ollama/client.ts"() {
|
|
5561
|
+
"use strict";
|
|
5562
|
+
DEFAULT_TIMEOUT_MS = 1e4;
|
|
5563
|
+
DEFAULT_GENERATE_TIMEOUT_MS = 12e4;
|
|
5564
|
+
FIRST_BYTE_TIMEOUT_MS = 6e4;
|
|
5565
|
+
}
|
|
5566
|
+
});
|
|
5567
|
+
|
|
5568
|
+
// src/services/ollama/store.ts
|
|
5569
|
+
var store_exports6 = {};
|
|
5570
|
+
__export(store_exports6, {
|
|
5571
|
+
addServer: () => addServer,
|
|
5572
|
+
getAvailableModels: () => getAvailableModels,
|
|
5573
|
+
getBaseUrl: () => getBaseUrl,
|
|
5574
|
+
getModelByName: () => getModelByName,
|
|
5575
|
+
getServer: () => getServer,
|
|
5576
|
+
getServerById: () => getServerById,
|
|
5577
|
+
listModels: () => listModels2,
|
|
5578
|
+
listServers: () => listServers,
|
|
5579
|
+
removeServer: () => removeServer,
|
|
5580
|
+
removeStaleModels: () => removeStaleModels,
|
|
5581
|
+
updateModelContextWindow: () => updateModelContextWindow,
|
|
5582
|
+
updateModelQualityScore: () => updateModelQualityScore,
|
|
5583
|
+
updateServerStatus: () => updateServerStatus,
|
|
5584
|
+
upsertModel: () => upsertModel
|
|
5585
|
+
});
|
|
5586
|
+
function addServer(name, host, port, apiKey) {
|
|
5587
|
+
const db3 = getDb();
|
|
5588
|
+
const result = db3.prepare(`
|
|
5589
|
+
INSERT INTO ollama_servers (name, host, port, api_key, status)
|
|
5590
|
+
VALUES (?, ?, ?, ?, 'offline')
|
|
5591
|
+
`).run(name, host, port, apiKey ?? null);
|
|
5592
|
+
return {
|
|
5593
|
+
id: Number(result.lastInsertRowid),
|
|
5594
|
+
name,
|
|
5595
|
+
host,
|
|
5596
|
+
port,
|
|
5597
|
+
apiKey: apiKey ?? null,
|
|
5598
|
+
status: "offline",
|
|
5599
|
+
lastHealthCheck: null,
|
|
5600
|
+
createdAt: (/* @__PURE__ */ new Date()).toISOString()
|
|
5601
|
+
};
|
|
5602
|
+
}
|
|
5603
|
+
function removeServer(name) {
|
|
5604
|
+
const result = getDb().prepare(
|
|
5605
|
+
"DELETE FROM ollama_servers WHERE name = ?"
|
|
5606
|
+
).run(name);
|
|
5607
|
+
return result.changes > 0;
|
|
5608
|
+
}
|
|
5609
|
+
function getServer(name) {
|
|
5610
|
+
const row = getDb().prepare(
|
|
5611
|
+
"SELECT id, name, host, port, api_key, status, last_health_check, created_at FROM ollama_servers WHERE name = ?"
|
|
5612
|
+
).get(name);
|
|
5613
|
+
return row ? mapServerRow(row) : void 0;
|
|
5614
|
+
}
|
|
5615
|
+
function getServerById(id) {
|
|
5616
|
+
const row = getDb().prepare(
|
|
5617
|
+
"SELECT id, name, host, port, api_key, status, last_health_check, created_at FROM ollama_servers WHERE id = ?"
|
|
5618
|
+
).get(id);
|
|
5619
|
+
return row ? mapServerRow(row) : void 0;
|
|
5620
|
+
}
|
|
5621
|
+
function listServers() {
|
|
5622
|
+
const rows = getDb().prepare(
|
|
5623
|
+
"SELECT id, name, host, port, api_key, status, last_health_check, created_at FROM ollama_servers ORDER BY name"
|
|
5624
|
+
).all();
|
|
5625
|
+
return rows.map(mapServerRow);
|
|
5626
|
+
}
|
|
5627
|
+
function updateServerStatus(serverId, status) {
|
|
5628
|
+
getDb().prepare(
|
|
5629
|
+
"UPDATE ollama_servers SET status = ?, last_health_check = datetime('now') WHERE id = ?"
|
|
5630
|
+
).run(status, serverId);
|
|
5631
|
+
}
|
|
5632
|
+
function getBaseUrl(server) {
|
|
5633
|
+
return `http://${server.host}:${server.port}`;
|
|
5634
|
+
}
|
|
5635
|
+
function upsertModel(serverId, model2) {
|
|
5636
|
+
getDb().prepare(`
|
|
5637
|
+
INSERT INTO ollama_models (server_id, name, family, parameter_size, quantization, context_window, size_bytes, digest, last_seen)
|
|
5638
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, datetime('now'))
|
|
5639
|
+
ON CONFLICT(server_id, name) DO UPDATE SET
|
|
5640
|
+
family = excluded.family,
|
|
5641
|
+
parameter_size = excluded.parameter_size,
|
|
5642
|
+
quantization = excluded.quantization,
|
|
5643
|
+
context_window = COALESCE(excluded.context_window, context_window),
|
|
5644
|
+
size_bytes = excluded.size_bytes,
|
|
5645
|
+
digest = excluded.digest,
|
|
5646
|
+
last_seen = datetime('now')
|
|
5647
|
+
`).run(
|
|
5648
|
+
serverId,
|
|
5649
|
+
model2.name,
|
|
5650
|
+
model2.family ?? null,
|
|
5651
|
+
model2.parameterSize ?? null,
|
|
5652
|
+
model2.quantization ?? null,
|
|
5653
|
+
model2.contextWindow ?? 4096,
|
|
5654
|
+
model2.sizeBytes ?? 0,
|
|
5655
|
+
model2.digest ?? ""
|
|
5656
|
+
);
|
|
5657
|
+
}
|
|
5658
|
+
function listModels2(serverId) {
|
|
5659
|
+
const db3 = getDb();
|
|
5660
|
+
let rows;
|
|
5661
|
+
if (serverId != null) {
|
|
5662
|
+
rows = db3.prepare(
|
|
5663
|
+
"SELECT id, server_id, name, family, parameter_size, quantization, context_window, size_bytes, digest, last_seen, quality_score FROM ollama_models WHERE server_id = ? ORDER BY name"
|
|
5664
|
+
).all(serverId);
|
|
5665
|
+
} else {
|
|
5666
|
+
rows = db3.prepare(
|
|
5667
|
+
"SELECT id, server_id, name, family, parameter_size, quantization, context_window, size_bytes, digest, last_seen, quality_score FROM ollama_models ORDER BY server_id, name"
|
|
5668
|
+
).all();
|
|
5669
|
+
}
|
|
5670
|
+
return rows.map(mapModelRow);
|
|
5671
|
+
}
|
|
5672
|
+
function getModelByName(name, serverId) {
|
|
5673
|
+
const db3 = getDb();
|
|
5674
|
+
let row;
|
|
5675
|
+
if (serverId != null) {
|
|
5676
|
+
row = db3.prepare(
|
|
5677
|
+
"SELECT id, server_id, name, family, parameter_size, quantization, context_window, size_bytes, digest, last_seen, quality_score FROM ollama_models WHERE name = ? AND server_id = ?"
|
|
5678
|
+
).get(name, serverId);
|
|
5679
|
+
} else {
|
|
5680
|
+
row = db3.prepare(`
|
|
5681
|
+
SELECT m.id, m.server_id, m.name, m.family, m.parameter_size, m.quantization, m.context_window, m.size_bytes, m.digest, m.last_seen, m.quality_score
|
|
5682
|
+
FROM ollama_models m
|
|
5683
|
+
JOIN ollama_servers s ON s.id = m.server_id
|
|
5684
|
+
WHERE m.name = ? AND s.status = 'online'
|
|
5685
|
+
ORDER BY m.quality_score DESC NULLS LAST
|
|
5686
|
+
LIMIT 1
|
|
5687
|
+
`).get(name);
|
|
5688
|
+
}
|
|
5689
|
+
return row ? mapModelRow(row) : void 0;
|
|
5690
|
+
}
|
|
5691
|
+
function updateModelQualityScore(modelId, score) {
|
|
5692
|
+
getDb().prepare(
|
|
5693
|
+
"UPDATE ollama_models SET quality_score = ? WHERE id = ?"
|
|
5694
|
+
).run(score, modelId);
|
|
5695
|
+
}
|
|
5696
|
+
function updateModelContextWindow(modelId, contextWindow) {
|
|
5697
|
+
getDb().prepare(
|
|
5698
|
+
"UPDATE ollama_models SET context_window = ? WHERE id = ?"
|
|
5699
|
+
).run(contextWindow, modelId);
|
|
5700
|
+
}
|
|
5701
|
+
function removeStaleModels(serverId, currentNames) {
|
|
5702
|
+
if (currentNames.length === 0) {
|
|
5703
|
+
const r2 = getDb().prepare(
|
|
5704
|
+
"DELETE FROM ollama_models WHERE server_id = ?"
|
|
5705
|
+
).run(serverId);
|
|
5706
|
+
return r2.changes;
|
|
5707
|
+
}
|
|
5708
|
+
const placeholders = currentNames.map(() => "?").join(", ");
|
|
5709
|
+
const r = getDb().prepare(
|
|
5710
|
+
`DELETE FROM ollama_models WHERE server_id = ? AND name NOT IN (${placeholders})`
|
|
5711
|
+
).run(serverId, ...currentNames);
|
|
5712
|
+
return r.changes;
|
|
5713
|
+
}
|
|
5714
|
+
function getAvailableModels() {
|
|
5715
|
+
const rows = getDb().prepare(`
|
|
5716
|
+
SELECT m.id, m.server_id, m.name, m.family, m.parameter_size, m.quantization, m.context_window, m.size_bytes, m.digest, m.last_seen, m.quality_score
|
|
5717
|
+
FROM ollama_models m
|
|
5718
|
+
JOIN ollama_servers s ON s.id = m.server_id
|
|
5719
|
+
WHERE s.status = 'online'
|
|
5720
|
+
ORDER BY m.quality_score DESC NULLS LAST, m.size_bytes DESC
|
|
5721
|
+
`).all();
|
|
5722
|
+
return rows.map(mapModelRow);
|
|
5723
|
+
}
|
|
5724
|
+
function mapServerRow(row) {
|
|
5725
|
+
return {
|
|
5726
|
+
id: row.id,
|
|
5727
|
+
name: row.name,
|
|
5728
|
+
host: row.host,
|
|
5729
|
+
port: row.port,
|
|
5730
|
+
apiKey: row.api_key,
|
|
5731
|
+
status: row.status,
|
|
5732
|
+
lastHealthCheck: row.last_health_check,
|
|
5733
|
+
createdAt: row.created_at
|
|
5734
|
+
};
|
|
5735
|
+
}
|
|
5736
|
+
function mapModelRow(row) {
|
|
5737
|
+
return {
|
|
5738
|
+
id: row.id,
|
|
5739
|
+
serverId: row.server_id,
|
|
5740
|
+
name: row.name,
|
|
5741
|
+
family: row.family,
|
|
5742
|
+
parameterSize: row.parameter_size,
|
|
5743
|
+
quantization: row.quantization,
|
|
5744
|
+
contextWindow: row.context_window,
|
|
5745
|
+
sizeBytes: row.size_bytes,
|
|
5746
|
+
digest: row.digest,
|
|
5747
|
+
lastSeen: row.last_seen,
|
|
5748
|
+
qualityScore: row.quality_score
|
|
5749
|
+
};
|
|
5750
|
+
}
|
|
5751
|
+
var init_store6 = __esm({
|
|
5752
|
+
"src/services/ollama/store.ts"() {
|
|
5753
|
+
"use strict";
|
|
5754
|
+
init_store5();
|
|
5755
|
+
}
|
|
5756
|
+
});
|
|
5757
|
+
|
|
5758
|
+
// src/services/ollama/router.ts
|
|
5759
|
+
async function routeModelForTask(task, preferredModel) {
|
|
5760
|
+
if (preferredModel) {
|
|
5761
|
+
const model2 = getModelByName(preferredModel);
|
|
5762
|
+
if (model2) {
|
|
5763
|
+
const server2 = getServerById(model2.serverId);
|
|
5764
|
+
if (server2 && server2.status === "online") {
|
|
5765
|
+
return { model: model2, server: server2, reason: `User-pinned model "${preferredModel}"` };
|
|
5766
|
+
}
|
|
5767
|
+
}
|
|
5768
|
+
}
|
|
5769
|
+
const available = getAvailableModels();
|
|
5770
|
+
if (available.length === 0) return void 0;
|
|
5771
|
+
const loadedNames = await getLoadedModelNames();
|
|
5772
|
+
const candidates = filterByTask(available, task);
|
|
5773
|
+
if (candidates.length === 0) {
|
|
5774
|
+
if (available.length === 0) return void 0;
|
|
5775
|
+
const fallback = available[0];
|
|
5776
|
+
const server2 = getServerById(fallback.serverId);
|
|
5777
|
+
return server2 ? { model: fallback, server: server2, reason: "Fallback \u2014 no task-appropriate models" } : void 0;
|
|
5778
|
+
}
|
|
5779
|
+
const ranked = rankCandidates(candidates, task, loadedNames);
|
|
5780
|
+
const best = ranked[0];
|
|
5781
|
+
const server = getServerById(best.model.serverId);
|
|
5782
|
+
if (!server) return void 0;
|
|
5783
|
+
return { model: best.model, server, reason: best.reason };
|
|
5784
|
+
}
|
|
5785
|
+
function routeModelForTaskSync(task, preferredModel) {
|
|
5786
|
+
if (preferredModel) {
|
|
5787
|
+
const model2 = getModelByName(preferredModel);
|
|
5788
|
+
if (model2) {
|
|
5789
|
+
const server2 = getServerById(model2.serverId);
|
|
5790
|
+
if (server2 && server2.status === "online") {
|
|
5791
|
+
return { model: model2, server: server2, reason: `User-pinned model "${preferredModel}"` };
|
|
5792
|
+
}
|
|
5793
|
+
}
|
|
5794
|
+
}
|
|
5795
|
+
const available = getAvailableModels();
|
|
5796
|
+
if (available.length === 0) return void 0;
|
|
5797
|
+
const candidates = filterByTask(available, task);
|
|
5798
|
+
const pool = candidates.length > 0 ? candidates : available;
|
|
5799
|
+
const ranked = rankCandidates(pool, task, /* @__PURE__ */ new Set());
|
|
5800
|
+
const best = ranked[0];
|
|
5801
|
+
const server = getServerById(best.model.serverId);
|
|
5802
|
+
if (!server) return void 0;
|
|
5803
|
+
return { model: best.model, server, reason: best.reason };
|
|
5804
|
+
}
|
|
5805
|
+
async function getLoadedModelNames() {
|
|
5806
|
+
const loaded = /* @__PURE__ */ new Set();
|
|
5807
|
+
const servers = listServers().filter((s) => s.status === "online");
|
|
5808
|
+
for (const server of servers) {
|
|
5809
|
+
try {
|
|
5810
|
+
const baseUrl = getBaseUrl(server);
|
|
5811
|
+
const ps = await runningModels(baseUrl, {
|
|
5812
|
+
apiKey: server.apiKey
|
|
5813
|
+
});
|
|
5814
|
+
for (const m of ps.models) {
|
|
5815
|
+
loaded.add(m.name);
|
|
5816
|
+
}
|
|
5817
|
+
} catch {
|
|
5818
|
+
}
|
|
5819
|
+
}
|
|
5820
|
+
if (loaded.size > 0) {
|
|
5821
|
+
log(`[ollama/router] Loaded models: ${[...loaded].join(", ")}`);
|
|
5822
|
+
}
|
|
5823
|
+
return loaded;
|
|
5824
|
+
}
|
|
5825
|
+
function filterByTask(models, task) {
|
|
5826
|
+
switch (task) {
|
|
5827
|
+
case "summarize":
|
|
5828
|
+
return models.filter((m) => {
|
|
5829
|
+
const params = parseParamSize(m.parameterSize);
|
|
5830
|
+
const ctxOk = (m.contextWindow ?? 4096) >= MIN_SUMMARIZE_CTX;
|
|
5831
|
+
const sizeOk = params === null || params <= MAX_SUMMARIZER_PARAMS_B;
|
|
5832
|
+
return ctxOk && sizeOk;
|
|
5833
|
+
});
|
|
5834
|
+
case "classify":
|
|
5835
|
+
return models.filter((m) => {
|
|
5836
|
+
const params = parseParamSize(m.parameterSize);
|
|
5837
|
+
return params === null || params <= MAX_CLASSIFY_PARAMS_B;
|
|
5838
|
+
});
|
|
5839
|
+
case "cron":
|
|
5840
|
+
return models.filter((m) => {
|
|
5841
|
+
const params = parseParamSize(m.parameterSize);
|
|
5842
|
+
const sizeOk = params === null || params <= MAX_SUMMARIZER_PARAMS_B;
|
|
5843
|
+
const qualityOk = m.qualityScore === null || m.qualityScore >= MIN_CRON_QUALITY;
|
|
5844
|
+
return sizeOk && qualityOk;
|
|
5845
|
+
});
|
|
5846
|
+
case "chat":
|
|
5847
|
+
return [...models];
|
|
5848
|
+
default:
|
|
5849
|
+
return [...models];
|
|
5850
|
+
}
|
|
5851
|
+
}
|
|
5852
|
+
function rankCandidates(models, task, loadedNames) {
|
|
5853
|
+
const ranked = models.map((model2) => {
|
|
5854
|
+
let score = 0;
|
|
5855
|
+
const reasons = [];
|
|
5856
|
+
if (model2.qualityScore !== null) {
|
|
5857
|
+
score += model2.qualityScore * 100;
|
|
5858
|
+
reasons.push(`quality=${model2.qualityScore.toFixed(2)}`);
|
|
5859
|
+
}
|
|
5860
|
+
if (loadedNames.has(model2.name)) {
|
|
5861
|
+
score += LOADED_BONUS;
|
|
5862
|
+
reasons.push("loaded");
|
|
5863
|
+
}
|
|
5864
|
+
switch (task) {
|
|
5865
|
+
case "summarize":
|
|
5866
|
+
score += (model2.contextWindow ?? 4096) / 1e3;
|
|
5867
|
+
break;
|
|
5868
|
+
case "chat":
|
|
5869
|
+
score += (parseParamSize(model2.parameterSize) ?? 0) * 2;
|
|
5870
|
+
break;
|
|
5871
|
+
case "cron":
|
|
5872
|
+
score += Math.max(0, 20 - (parseParamSize(model2.parameterSize) ?? 0));
|
|
5873
|
+
break;
|
|
5874
|
+
case "classify":
|
|
5875
|
+
score += Math.max(0, 30 - (parseParamSize(model2.parameterSize) ?? 0) * 3);
|
|
5876
|
+
break;
|
|
5877
|
+
}
|
|
5878
|
+
return {
|
|
5879
|
+
model: model2,
|
|
5880
|
+
score,
|
|
5881
|
+
reason: reasons.length > 0 ? reasons.join(", ") : "default"
|
|
5882
|
+
};
|
|
5883
|
+
});
|
|
5884
|
+
ranked.sort((a, b) => b.score - a.score);
|
|
5885
|
+
return ranked;
|
|
5886
|
+
}
|
|
5887
|
+
function parseParamSize(size) {
|
|
5888
|
+
if (!size) return null;
|
|
5889
|
+
const match = size.match(/([\d.]+)\s*[Bb]/);
|
|
5890
|
+
if (!match) return null;
|
|
5891
|
+
return parseFloat(match[1]);
|
|
5892
|
+
}
|
|
5893
|
+
var MAX_SUMMARIZER_PARAMS_B, MAX_CLASSIFY_PARAMS_B, MIN_SUMMARIZE_CTX, MIN_CRON_QUALITY, LOADED_BONUS;
|
|
5894
|
+
var init_router = __esm({
|
|
5895
|
+
"src/services/ollama/router.ts"() {
|
|
5896
|
+
"use strict";
|
|
5897
|
+
init_store6();
|
|
5898
|
+
init_client();
|
|
5899
|
+
init_log();
|
|
5900
|
+
MAX_SUMMARIZER_PARAMS_B = 14;
|
|
5901
|
+
MAX_CLASSIFY_PARAMS_B = 9;
|
|
5902
|
+
MIN_SUMMARIZE_CTX = 4096;
|
|
5903
|
+
MIN_CRON_QUALITY = 0.7;
|
|
5904
|
+
LOADED_BONUS = 100;
|
|
5905
|
+
}
|
|
5906
|
+
});
|
|
5907
|
+
|
|
5908
|
+
// src/services/ollama/quality-gate.ts
|
|
5909
|
+
var quality_gate_exports = {};
|
|
5910
|
+
__export(quality_gate_exports, {
|
|
5911
|
+
ensureQualityScore: () => ensureQualityScore,
|
|
5912
|
+
runQualityGate: () => runQualityGate
|
|
5913
|
+
});
|
|
5914
|
+
async function runQualityGate(model2, server) {
|
|
5915
|
+
const baseUrl = getBaseUrl(server);
|
|
5916
|
+
const startTime = Date.now();
|
|
5917
|
+
try {
|
|
5918
|
+
const result = await generate(baseUrl, model2.name, TEST_PROMPT, {
|
|
5919
|
+
timeoutMs: TEST_TIMEOUT_MS,
|
|
5920
|
+
temperature: 0.3,
|
|
5921
|
+
// Low temp for deterministic test
|
|
5922
|
+
maxTokens: 300,
|
|
5923
|
+
// Short response expected
|
|
5924
|
+
apiKey: server.apiKey
|
|
5925
|
+
});
|
|
5926
|
+
const responseTimeMs = Date.now() - startTime;
|
|
5927
|
+
const response = result.text.trim();
|
|
5928
|
+
if (response.length < MIN_RESPONSE_LENGTH) {
|
|
5929
|
+
const score = 0.1;
|
|
5930
|
+
updateModelQualityScore(model2.id, score);
|
|
5931
|
+
return {
|
|
5932
|
+
passed: false,
|
|
5933
|
+
score,
|
|
5934
|
+
responseLength: response.length,
|
|
5935
|
+
keywordsFound: [],
|
|
5936
|
+
keywordsMissed: EXPECTED_KEYWORDS,
|
|
5937
|
+
responseTimeMs,
|
|
5938
|
+
hasRepetition: false,
|
|
5939
|
+
error: `Response too short (${response.length} chars, min ${MIN_RESPONSE_LENGTH})`
|
|
5940
|
+
};
|
|
5941
|
+
}
|
|
5942
|
+
const lowerResponse = response.toLowerCase();
|
|
5943
|
+
const found = EXPECTED_KEYWORDS.filter((k) => lowerResponse.includes(k));
|
|
5944
|
+
const missed = EXPECTED_KEYWORDS.filter((k) => !lowerResponse.includes(k));
|
|
5945
|
+
const keywordScore = found.length / EXPECTED_KEYWORDS.length * 0.5;
|
|
5946
|
+
const timeScore = responseTimeMs < 1e4 ? 0.3 : Math.max(0, 0.3 * (1 - (responseTimeMs - 1e4) / 2e4));
|
|
5947
|
+
const hasRepetition = detectRepetition(response);
|
|
5948
|
+
const repetitionScore = hasRepetition ? 0 : 0.2;
|
|
5949
|
+
const totalScore = parseFloat((keywordScore + timeScore + repetitionScore).toFixed(3));
|
|
5950
|
+
const passed = found.length >= 2 && response.length >= MIN_RESPONSE_LENGTH && !hasRepetition;
|
|
5951
|
+
updateModelQualityScore(model2.id, totalScore);
|
|
5952
|
+
log(`[ollama/quality] ${model2.name}: score=${totalScore} (keywords=${found.length}/${EXPECTED_KEYWORDS.length}, time=${responseTimeMs}ms, repetition=${hasRepetition})`);
|
|
5953
|
+
return {
|
|
5954
|
+
passed,
|
|
5955
|
+
score: totalScore,
|
|
5956
|
+
responseLength: response.length,
|
|
5957
|
+
keywordsFound: found,
|
|
5958
|
+
keywordsMissed: missed,
|
|
5959
|
+
responseTimeMs,
|
|
5960
|
+
hasRepetition
|
|
5961
|
+
};
|
|
5962
|
+
} catch (err) {
|
|
5963
|
+
const errorMsg = err instanceof Error ? err.message : String(err);
|
|
5964
|
+
warn(`[ollama/quality] ${model2.name} test failed: ${errorMsg}`);
|
|
5965
|
+
updateModelQualityScore(model2.id, 0);
|
|
5966
|
+
return {
|
|
5967
|
+
passed: false,
|
|
5968
|
+
score: 0,
|
|
5969
|
+
responseLength: 0,
|
|
5970
|
+
keywordsFound: [],
|
|
5971
|
+
keywordsMissed: EXPECTED_KEYWORDS,
|
|
5972
|
+
responseTimeMs: Date.now() - startTime,
|
|
5973
|
+
hasRepetition: false,
|
|
5974
|
+
error: errorMsg
|
|
5975
|
+
};
|
|
5976
|
+
}
|
|
5977
|
+
}
|
|
5978
|
+
async function ensureQualityScore(model2, server) {
|
|
5979
|
+
if (model2.qualityScore !== null) return model2.qualityScore;
|
|
5980
|
+
const result = await runQualityGate(model2, server);
|
|
5981
|
+
return result.score;
|
|
5982
|
+
}
|
|
5983
|
+
function detectRepetition(text) {
|
|
5984
|
+
const tail = text.slice(-200);
|
|
5985
|
+
for (let len = 6; len <= 50; len++) {
|
|
5986
|
+
const pattern = tail.slice(-len);
|
|
5987
|
+
if (!pattern.trim()) continue;
|
|
5988
|
+
let count = 0;
|
|
5989
|
+
let pos = 0;
|
|
5990
|
+
while (pos < tail.length) {
|
|
5991
|
+
const idx = tail.indexOf(pattern, pos);
|
|
5992
|
+
if (idx === -1) break;
|
|
5993
|
+
count++;
|
|
5994
|
+
pos = idx + 1;
|
|
5995
|
+
}
|
|
5996
|
+
if (count >= 3) return true;
|
|
5997
|
+
}
|
|
5998
|
+
return false;
|
|
5999
|
+
}
|
|
6000
|
+
var TEST_PROMPT, EXPECTED_KEYWORDS, TEST_TIMEOUT_MS, MIN_RESPONSE_LENGTH;
|
|
6001
|
+
var init_quality_gate = __esm({
|
|
6002
|
+
"src/services/ollama/quality-gate.ts"() {
|
|
6003
|
+
"use strict";
|
|
6004
|
+
init_client();
|
|
6005
|
+
init_store6();
|
|
6006
|
+
init_log();
|
|
6007
|
+
TEST_PROMPT = `Summarize the following text in 2-3 sentences:
|
|
6008
|
+
|
|
6009
|
+
The James Webb Space Telescope (JWST) launched in December 2021 and has revolutionized astronomy by capturing infrared images of the universe with unprecedented clarity. Its primary mirror, spanning 6.5 meters, is significantly larger than Hubble's 2.4-meter mirror, allowing it to observe galaxies formed shortly after the Big Bang. JWST operates at the second Lagrange point (L2), approximately 1.5 million kilometers from Earth, where it maintains a stable thermal environment shielded from solar radiation. Early discoveries include detailed atmospheric analysis of exoplanets, revealing the presence of water vapor and carbon dioxide in their atmospheres. The telescope has also identified some of the most distant galaxies ever observed, challenging existing models of galaxy formation in the early universe.`;
|
|
6010
|
+
EXPECTED_KEYWORDS = ["webb", "telescope", "infrared", "galaxies"];
|
|
6011
|
+
TEST_TIMEOUT_MS = 3e4;
|
|
6012
|
+
MIN_RESPONSE_LENGTH = 80;
|
|
6013
|
+
}
|
|
6014
|
+
});
|
|
6015
|
+
|
|
6016
|
+
// src/services/ollama/service.ts
|
|
6017
|
+
var service_exports = {};
|
|
6018
|
+
__export(service_exports, {
|
|
6019
|
+
addServer: () => addServer2,
|
|
6020
|
+
discoverModels: () => discoverModels,
|
|
6021
|
+
getLoadedModels: () => getLoadedModels,
|
|
6022
|
+
getModelForTask: () => getModelForTask,
|
|
6023
|
+
getModelForTaskAsync: () => getModelForTaskAsync,
|
|
6024
|
+
getServer: () => getServer2,
|
|
6025
|
+
getServerForModel: () => getServerForModel,
|
|
6026
|
+
healthCheck: () => healthCheck,
|
|
6027
|
+
isAnyServerOnline: () => isAnyServerOnline,
|
|
6028
|
+
listServers: () => listServers2,
|
|
6029
|
+
removeServer: () => removeServer2,
|
|
6030
|
+
runQualityGateTest: () => runQualityGateTest
|
|
6031
|
+
});
|
|
6032
|
+
function addServer2(name, host, port = 11434, apiKey) {
|
|
6033
|
+
const existing = getServer(name);
|
|
6034
|
+
if (existing) throw new Error(`Server "${name}" already exists`);
|
|
6035
|
+
const server = addServer(name, host, port, apiKey);
|
|
6036
|
+
log(`[ollama] Added server: ${name} (${host}:${port})`);
|
|
6037
|
+
return server;
|
|
6038
|
+
}
|
|
6039
|
+
function removeServer2(name) {
|
|
6040
|
+
const removed = removeServer(name);
|
|
6041
|
+
if (removed) log(`[ollama] Removed server: ${name}`);
|
|
6042
|
+
return removed;
|
|
6043
|
+
}
|
|
6044
|
+
function listServers2() {
|
|
6045
|
+
return listServers();
|
|
6046
|
+
}
|
|
6047
|
+
function getServer2(name) {
|
|
6048
|
+
return getServer(name);
|
|
6049
|
+
}
|
|
6050
|
+
async function healthCheck(serverName) {
|
|
6051
|
+
const servers = serverName ? [getServer(serverName)].filter(Boolean) : listServers();
|
|
6052
|
+
if (servers.length === 0) {
|
|
6053
|
+
warn("[ollama] No servers registered");
|
|
6054
|
+
return [];
|
|
6055
|
+
}
|
|
6056
|
+
const results = [];
|
|
6057
|
+
for (const server of servers) {
|
|
6058
|
+
const baseUrl = getBaseUrl(server);
|
|
6059
|
+
const ok = await ping(baseUrl, {
|
|
6060
|
+
timeoutMs: HEALTH_PING_TIMEOUT_MS,
|
|
6061
|
+
apiKey: server.apiKey
|
|
6062
|
+
});
|
|
6063
|
+
const newStatus = ok ? "online" : "offline";
|
|
6064
|
+
updateServerStatus(server.id, newStatus);
|
|
6065
|
+
results.push({ ...server, status: newStatus });
|
|
6066
|
+
if (newStatus !== server.status) {
|
|
6067
|
+
log(`[ollama] Server ${server.name}: ${server.status} \u2192 ${newStatus}`);
|
|
6068
|
+
}
|
|
6069
|
+
}
|
|
6070
|
+
return results;
|
|
6071
|
+
}
|
|
6072
|
+
async function isAnyServerOnline() {
|
|
6073
|
+
const servers = listServers();
|
|
6074
|
+
if (servers.length === 0) return false;
|
|
6075
|
+
for (const server of servers) {
|
|
6076
|
+
const baseUrl = getBaseUrl(server);
|
|
6077
|
+
const ok = await ping(baseUrl, {
|
|
6078
|
+
timeoutMs: 2e3,
|
|
6079
|
+
apiKey: server.apiKey
|
|
6080
|
+
});
|
|
6081
|
+
if (ok) {
|
|
6082
|
+
if (server.status !== "online") {
|
|
6083
|
+
updateServerStatus(server.id, "online");
|
|
6084
|
+
}
|
|
6085
|
+
return true;
|
|
6086
|
+
}
|
|
6087
|
+
}
|
|
6088
|
+
return false;
|
|
6089
|
+
}
|
|
6090
|
+
async function discoverModels(serverName) {
|
|
6091
|
+
const servers = serverName ? [getServer(serverName)].filter(Boolean) : listServers();
|
|
6092
|
+
const allDiscovered = [];
|
|
6093
|
+
for (const server of servers) {
|
|
6094
|
+
const baseUrl = getBaseUrl(server);
|
|
6095
|
+
try {
|
|
6096
|
+
const tags = await listModels(baseUrl, { apiKey: server.apiKey });
|
|
6097
|
+
const modelNames = [];
|
|
6098
|
+
for (const m of tags.models) {
|
|
6099
|
+
modelNames.push(m.name);
|
|
6100
|
+
let contextWindow = 4096;
|
|
6101
|
+
try {
|
|
6102
|
+
const showData = await showModel(baseUrl, m.name, { apiKey: server.apiKey });
|
|
6103
|
+
contextWindow = extractContextWindow(showData);
|
|
6104
|
+
} catch {
|
|
6105
|
+
}
|
|
6106
|
+
upsertModel(server.id, {
|
|
6107
|
+
name: m.name,
|
|
6108
|
+
family: m.details.family ?? null,
|
|
6109
|
+
parameterSize: m.details.parameter_size ?? null,
|
|
6110
|
+
quantization: m.details.quantization_level ?? null,
|
|
6111
|
+
contextWindow,
|
|
6112
|
+
sizeBytes: m.size,
|
|
6113
|
+
digest: m.digest
|
|
6114
|
+
});
|
|
6115
|
+
}
|
|
6116
|
+
const staleCount = removeStaleModels(server.id, modelNames);
|
|
6117
|
+
if (staleCount > 0) {
|
|
6118
|
+
log(`[ollama] Removed ${staleCount} stale model(s) from ${server.name}`);
|
|
6119
|
+
}
|
|
6120
|
+
updateServerStatus(server.id, "online");
|
|
6121
|
+
log(`[ollama] Discovered ${modelNames.length} model(s) on ${server.name}: ${modelNames.join(", ")}`);
|
|
6122
|
+
} catch (err) {
|
|
6123
|
+
error(`[ollama] Failed to discover models on ${server.name}:`, err);
|
|
6124
|
+
updateServerStatus(server.id, "offline");
|
|
6125
|
+
}
|
|
6126
|
+
}
|
|
6127
|
+
return getAvailableModels();
|
|
6128
|
+
}
|
|
6129
|
+
async function getLoadedModels(serverName) {
|
|
6130
|
+
const servers = serverName ? [getServer(serverName)].filter(Boolean) : listServers().filter((s) => s.status === "online");
|
|
6131
|
+
const loaded = [];
|
|
6132
|
+
for (const server of servers) {
|
|
6133
|
+
try {
|
|
6134
|
+
const baseUrl = getBaseUrl(server);
|
|
6135
|
+
const ps = await runningModels(baseUrl, { apiKey: server.apiKey });
|
|
6136
|
+
for (const m of ps.models) {
|
|
6137
|
+
loaded.push({ server: server.name, modelName: m.name, sizeVram: m.size_vram });
|
|
6138
|
+
}
|
|
6139
|
+
} catch {
|
|
6140
|
+
}
|
|
6141
|
+
}
|
|
6142
|
+
return loaded;
|
|
6143
|
+
}
|
|
6144
|
+
function getModelForTask(task, preferredModel) {
|
|
6145
|
+
return routeModelForTaskSync(task, preferredModel);
|
|
6146
|
+
}
|
|
6147
|
+
async function getModelForTaskAsync(task, preferredModel) {
|
|
6148
|
+
return routeModelForTask(task, preferredModel);
|
|
6149
|
+
}
|
|
6150
|
+
async function runQualityGateTest(modelName, serverName) {
|
|
6151
|
+
const model2 = serverName ? getModelByName(modelName, getServer(serverName)?.id) : getModelByName(modelName);
|
|
6152
|
+
if (!model2) return void 0;
|
|
6153
|
+
const server = getServerById(model2.serverId);
|
|
6154
|
+
if (!server || server.status !== "online") return void 0;
|
|
6155
|
+
const { runQualityGate: runQualityGate2 } = await Promise.resolve().then(() => (init_quality_gate(), quality_gate_exports));
|
|
6156
|
+
return runQualityGate2(model2, server);
|
|
6157
|
+
}
|
|
6158
|
+
function getServerForModel(modelName) {
|
|
6159
|
+
const model2 = getModelByName(modelName);
|
|
6160
|
+
if (!model2) return void 0;
|
|
6161
|
+
const server = getServerById(model2.serverId);
|
|
6162
|
+
if (!server || server.status !== "online") return void 0;
|
|
6163
|
+
return {
|
|
6164
|
+
baseUrl: getBaseUrl(server),
|
|
6165
|
+
apiKey: server.apiKey
|
|
6166
|
+
};
|
|
6167
|
+
}
|
|
6168
|
+
function extractContextWindow(show) {
|
|
6169
|
+
const info = show.model_info ?? {};
|
|
6170
|
+
for (const key of Object.keys(info)) {
|
|
6171
|
+
if (key.includes("context_length") || key.includes("context_window")) {
|
|
6172
|
+
const val = info[key];
|
|
6173
|
+
if (typeof val === "number" && val > 0) return val;
|
|
6174
|
+
}
|
|
6175
|
+
}
|
|
6176
|
+
const paramLines = (show.parameters ?? "").split("\n");
|
|
6177
|
+
for (const line of paramLines) {
|
|
6178
|
+
const match = line.match(/num_ctx\s+(\d+)/);
|
|
6179
|
+
if (match) return parseInt(match[1], 10);
|
|
6180
|
+
}
|
|
6181
|
+
return 4096;
|
|
6182
|
+
}
|
|
6183
|
+
var HEALTH_PING_TIMEOUT_MS;
|
|
6184
|
+
var init_service = __esm({
|
|
6185
|
+
"src/services/ollama/service.ts"() {
|
|
6186
|
+
"use strict";
|
|
6187
|
+
init_client();
|
|
6188
|
+
init_store6();
|
|
6189
|
+
init_router();
|
|
6190
|
+
init_log();
|
|
6191
|
+
HEALTH_PING_TIMEOUT_MS = 3e3;
|
|
6192
|
+
}
|
|
6193
|
+
});
|
|
6194
|
+
|
|
6195
|
+
// src/services/ollama/prompt.ts
|
|
6196
|
+
var prompt_exports = {};
|
|
6197
|
+
__export(prompt_exports, {
|
|
6198
|
+
stripForLocalModel: () => stripForLocalModel
|
|
6199
|
+
});
|
|
6200
|
+
function stripForLocalModel(prompt) {
|
|
6201
|
+
return prompt.replace(/\[SEND_FILE:[^\]]*\]/g, "").replace(/\[GENERATE_IMAGE:[^\]]*\]/g, "").replace(/\[REACT:[^\]]*\]/g, "").replace(/\[HISTORY_SEARCH:[^\]]*\]/g, "").replace(/You have access to the following tools:[\s\S]*?(?=\n\n)/g, "").replace(/Available tools:[\s\S]*?(?=\n\n)/g, "").replace(/MCP servers?:[\s\S]*?(?=\n\n)/g, "").replace(/You are CC-Claw[\s\S]*?(?=\n\n)/g, "").replace(/\n{3,}/g, "\n\n").trim();
|
|
6202
|
+
}
|
|
6203
|
+
var init_prompt = __esm({
|
|
6204
|
+
"src/services/ollama/prompt.ts"() {
|
|
6205
|
+
"use strict";
|
|
6206
|
+
}
|
|
6207
|
+
});
|
|
6208
|
+
|
|
6209
|
+
// src/services/ollama/index.ts
|
|
6210
|
+
var ollama_exports = {};
|
|
6211
|
+
__export(ollama_exports, {
|
|
6212
|
+
OllamaClient: () => client_exports,
|
|
6213
|
+
OllamaPrompt: () => prompt_exports,
|
|
6214
|
+
OllamaService: () => service_exports,
|
|
6215
|
+
OllamaStore: () => store_exports6
|
|
6216
|
+
});
|
|
6217
|
+
var init_ollama = __esm({
|
|
6218
|
+
"src/services/ollama/index.ts"() {
|
|
6219
|
+
"use strict";
|
|
6220
|
+
init_client();
|
|
6221
|
+
init_service();
|
|
6222
|
+
init_store6();
|
|
6223
|
+
init_prompt();
|
|
6224
|
+
}
|
|
6225
|
+
});
|
|
6226
|
+
|
|
6227
|
+
// src/backends/ollama.ts
|
|
6228
|
+
var OLLAMA_HTTP_SENTINEL, OllamaAdapter;
|
|
6229
|
+
var init_ollama2 = __esm({
|
|
6230
|
+
"src/backends/ollama.ts"() {
|
|
6231
|
+
"use strict";
|
|
6232
|
+
init_ollama();
|
|
6233
|
+
init_prompt();
|
|
6234
|
+
OLLAMA_HTTP_SENTINEL = "__ollama_http__";
|
|
6235
|
+
OllamaAdapter = class {
|
|
6236
|
+
id = "ollama";
|
|
6237
|
+
displayName = "Ollama (Local)";
|
|
6238
|
+
// Dynamically populated — starts empty and is refreshed by discoverModels()
|
|
6239
|
+
availableModels = {};
|
|
6240
|
+
pricing = {};
|
|
6241
|
+
contextWindow = {};
|
|
6242
|
+
get defaultModel() {
|
|
6243
|
+
try {
|
|
6244
|
+
const route = service_exports.getModelForTask("chat");
|
|
6245
|
+
return route?.model.name ?? "";
|
|
6246
|
+
} catch {
|
|
6247
|
+
return "";
|
|
6248
|
+
}
|
|
5235
6249
|
}
|
|
5236
|
-
|
|
5237
|
-
|
|
6250
|
+
get summarizerModel() {
|
|
6251
|
+
try {
|
|
6252
|
+
const route = service_exports.getModelForTask("summarize");
|
|
6253
|
+
return route?.model.name ?? "";
|
|
6254
|
+
} catch {
|
|
6255
|
+
return "";
|
|
6256
|
+
}
|
|
6257
|
+
}
|
|
6258
|
+
// ── CLI stubs (Ollama doesn't use CLI spawning) ────────────────────
|
|
6259
|
+
getExecutablePath() {
|
|
6260
|
+
return OLLAMA_HTTP_SENTINEL;
|
|
6261
|
+
}
|
|
6262
|
+
getEnv() {
|
|
6263
|
+
return {};
|
|
6264
|
+
}
|
|
6265
|
+
buildSpawnConfig(_opts) {
|
|
6266
|
+
throw new Error(
|
|
6267
|
+
"Ollama uses streamDirect(), not CLI spawning. If you're seeing this, askAgent's streamDirect check was bypassed."
|
|
6268
|
+
);
|
|
6269
|
+
}
|
|
6270
|
+
applyThinkingConfig(_level, _model) {
|
|
6271
|
+
return {};
|
|
6272
|
+
}
|
|
6273
|
+
parseLine(_raw) {
|
|
6274
|
+
return [];
|
|
5238
6275
|
}
|
|
5239
6276
|
shouldKillOnResult() {
|
|
5240
6277
|
return false;
|
|
5241
6278
|
}
|
|
5242
|
-
|
|
5243
|
-
|
|
5244
|
-
const
|
|
5245
|
-
if (!
|
|
5246
|
-
|
|
6279
|
+
// ── Direct execution (the real path) ───────────────────────────────
|
|
6280
|
+
async streamDirect(prompt, model2, opts) {
|
|
6281
|
+
const serverInfo = service_exports.getServerForModel(model2);
|
|
6282
|
+
if (!serverInfo) {
|
|
6283
|
+
throw new Error(`No online Ollama server found hosting model "${model2}"`);
|
|
6284
|
+
}
|
|
6285
|
+
const cleanPrompt = stripForLocalModel(prompt);
|
|
6286
|
+
const result = await client_exports.generate(
|
|
6287
|
+
serverInfo.baseUrl,
|
|
6288
|
+
model2,
|
|
6289
|
+
cleanPrompt,
|
|
6290
|
+
{
|
|
6291
|
+
systemPrompt: opts?.systemPrompt,
|
|
6292
|
+
temperature: opts?.temperature,
|
|
6293
|
+
maxTokens: opts?.maxTokens,
|
|
6294
|
+
timeoutMs: opts?.timeoutMs,
|
|
6295
|
+
signal: opts?.signal,
|
|
6296
|
+
onStream: opts?.onStream,
|
|
6297
|
+
apiKey: serverInfo.apiKey
|
|
6298
|
+
}
|
|
6299
|
+
);
|
|
6300
|
+
return {
|
|
6301
|
+
text: result.text,
|
|
6302
|
+
usage: {
|
|
6303
|
+
input: result.usage.promptTokens,
|
|
6304
|
+
output: result.usage.completionTokens
|
|
6305
|
+
}
|
|
6306
|
+
};
|
|
6307
|
+
}
|
|
6308
|
+
/**
|
|
6309
|
+
* Refresh the availableModels / pricing / contextWindow maps
|
|
6310
|
+
* from the discovered models in the database.
|
|
6311
|
+
* Called after discoverModels() or on startup.
|
|
6312
|
+
*/
|
|
6313
|
+
refreshModelCatalog() {
|
|
6314
|
+
for (const key of Object.keys(this.availableModels)) delete this.availableModels[key];
|
|
6315
|
+
for (const key of Object.keys(this.pricing)) delete this.pricing[key];
|
|
6316
|
+
for (const key of Object.keys(this.contextWindow)) delete this.contextWindow[key];
|
|
6317
|
+
const { OllamaStore } = (init_ollama(), __toCommonJS(ollama_exports));
|
|
6318
|
+
const models = OllamaStore.getAvailableModels();
|
|
6319
|
+
for (const m of models) {
|
|
6320
|
+
this.availableModels[m.name] = {
|
|
6321
|
+
label: `${m.name}${m.parameterSize ? ` (${m.parameterSize})` : ""}`,
|
|
6322
|
+
thinking: "none"
|
|
6323
|
+
};
|
|
6324
|
+
this.pricing[m.name] = { in: 0, out: 0, cache: 0 };
|
|
6325
|
+
this.contextWindow[m.name] = m.contextWindow ?? 4096;
|
|
6326
|
+
}
|
|
5247
6327
|
}
|
|
5248
6328
|
};
|
|
5249
6329
|
}
|
|
@@ -5261,7 +6341,8 @@ var init_types = __esm({
|
|
|
5261
6341
|
CLAUDE: "claude",
|
|
5262
6342
|
GEMINI: "gemini",
|
|
5263
6343
|
CODEX: "codex",
|
|
5264
|
-
CURSOR: "cursor"
|
|
6344
|
+
CURSOR: "cursor",
|
|
6345
|
+
OLLAMA: "ollama"
|
|
5265
6346
|
};
|
|
5266
6347
|
}
|
|
5267
6348
|
});
|
|
@@ -5276,14 +6357,17 @@ __export(backends_exports, {
|
|
|
5276
6357
|
getAllBackendIds: () => getAllBackendIds,
|
|
5277
6358
|
getAvailableAdapters: () => getAvailableAdapters,
|
|
5278
6359
|
getAvailableBackendIds: () => getAvailableBackendIds,
|
|
6360
|
+
getAvailableChatBackendIds: () => getAvailableChatBackendIds,
|
|
6361
|
+
getChatBackendIds: () => getChatBackendIds,
|
|
5279
6362
|
isBackendId: () => isBackendId,
|
|
5280
6363
|
probeBackendAvailability: () => probeBackendAvailability
|
|
5281
6364
|
});
|
|
5282
6365
|
import { existsSync as existsSync6 } from "fs";
|
|
5283
6366
|
import { execSync as execSync2 } from "child_process";
|
|
5284
|
-
function probeBackendAvailability() {
|
|
6367
|
+
async function probeBackendAvailability() {
|
|
5285
6368
|
availableSet.clear();
|
|
5286
6369
|
for (const [id, adapter] of Object.entries(adapters)) {
|
|
6370
|
+
if (id === "ollama") continue;
|
|
5287
6371
|
try {
|
|
5288
6372
|
const exe = adapter.getExecutablePath();
|
|
5289
6373
|
if (existsSync6(exe) || resolveOnPath(exe)) {
|
|
@@ -5292,6 +6376,14 @@ function probeBackendAvailability() {
|
|
|
5292
6376
|
} catch {
|
|
5293
6377
|
}
|
|
5294
6378
|
}
|
|
6379
|
+
try {
|
|
6380
|
+
const { OllamaService } = await Promise.resolve().then(() => (init_ollama(), ollama_exports));
|
|
6381
|
+
const online = await OllamaService.isAnyServerOnline();
|
|
6382
|
+
if (online) {
|
|
6383
|
+
availableSet.add("ollama");
|
|
6384
|
+
}
|
|
6385
|
+
} catch {
|
|
6386
|
+
}
|
|
5295
6387
|
log(`[backends] Available: ${[...availableSet].join(", ") || "none"}`);
|
|
5296
6388
|
}
|
|
5297
6389
|
function resolveOnPath(name) {
|
|
@@ -5320,6 +6412,9 @@ function getAllAdapters() {
|
|
|
5320
6412
|
function getAllBackendIds() {
|
|
5321
6413
|
return Object.keys(adapters);
|
|
5322
6414
|
}
|
|
6415
|
+
function getChatBackendIds() {
|
|
6416
|
+
return CHAT_BACKEND_IDS;
|
|
6417
|
+
}
|
|
5323
6418
|
function getAvailableAdapters() {
|
|
5324
6419
|
if (availableSet.size === 0) return Object.values(adapters);
|
|
5325
6420
|
return Object.values(adapters).filter((a) => availableSet.has(a.id));
|
|
@@ -5328,7 +6423,10 @@ function getAvailableBackendIds() {
|
|
|
5328
6423
|
if (availableSet.size === 0) return Object.keys(adapters);
|
|
5329
6424
|
return [...availableSet];
|
|
5330
6425
|
}
|
|
5331
|
-
|
|
6426
|
+
function getAvailableChatBackendIds() {
|
|
6427
|
+
return CHAT_BACKEND_IDS.filter((id) => availableSet.size === 0 || availableSet.has(id));
|
|
6428
|
+
}
|
|
6429
|
+
var adapters, CHAT_BACKEND_IDS, availableSet;
|
|
5332
6430
|
var init_backends = __esm({
|
|
5333
6431
|
"src/backends/index.ts"() {
|
|
5334
6432
|
"use strict";
|
|
@@ -5336,6 +6434,7 @@ var init_backends = __esm({
|
|
|
5336
6434
|
init_gemini();
|
|
5337
6435
|
init_codex();
|
|
5338
6436
|
init_cursor();
|
|
6437
|
+
init_ollama2();
|
|
5339
6438
|
init_store5();
|
|
5340
6439
|
init_log();
|
|
5341
6440
|
init_types();
|
|
@@ -5343,8 +6442,10 @@ var init_backends = __esm({
|
|
|
5343
6442
|
claude: new ClaudeAdapter(),
|
|
5344
6443
|
gemini: new GeminiAdapter(),
|
|
5345
6444
|
codex: new CodexAdapter(),
|
|
5346
|
-
cursor: new CursorAdapter()
|
|
6445
|
+
cursor: new CursorAdapter(),
|
|
6446
|
+
ollama: new OllamaAdapter()
|
|
5347
6447
|
};
|
|
6448
|
+
CHAT_BACKEND_IDS = ["claude", "gemini", "codex", "cursor"];
|
|
5348
6449
|
availableSet = /* @__PURE__ */ new Set();
|
|
5349
6450
|
}
|
|
5350
6451
|
});
|
|
@@ -6324,7 +7425,20 @@ function getTranscriptCap(model2) {
|
|
|
6324
7425
|
if (model2 && /flash/i.test(model2)) return TRANSCRIPT_CAP_FLASH;
|
|
6325
7426
|
return TRANSCRIPT_CAP_DEFAULT;
|
|
6326
7427
|
}
|
|
6327
|
-
function
|
|
7428
|
+
function getOllamaTranscriptCap(model2) {
|
|
7429
|
+
try {
|
|
7430
|
+
const { OllamaStore } = (init_ollama(), __toCommonJS(ollama_exports));
|
|
7431
|
+
const models = OllamaStore.listModels();
|
|
7432
|
+
const match = models.find((m) => m.name === model2);
|
|
7433
|
+
if (match?.contextWindow && match.contextWindow > 0) {
|
|
7434
|
+
return Math.min(Math.floor(match.contextWindow * 3 * 0.8), TRANSCRIPT_CAP_DEFAULT);
|
|
7435
|
+
}
|
|
7436
|
+
} catch {
|
|
7437
|
+
}
|
|
7438
|
+
return TRANSCRIPT_CAP_OLLAMA_DEFAULT;
|
|
7439
|
+
}
|
|
7440
|
+
function buildTranscript(entries, cap) {
|
|
7441
|
+
const transcriptCap = cap ?? getTranscriptCap();
|
|
6328
7442
|
const lines = [];
|
|
6329
7443
|
let totalLen = 0;
|
|
6330
7444
|
for (let i = entries.length - 1; i >= 0; i--) {
|
|
@@ -6333,7 +7447,7 @@ function buildTranscript(entries) {
|
|
|
6333
7447
|
const label2 = e.role === "user" ? "User" : "Agent";
|
|
6334
7448
|
const text = e.text.length > limit ? e.text.slice(0, limit) + "\n[...truncated]" : e.text;
|
|
6335
7449
|
const line = `[${label2}] ${text}`;
|
|
6336
|
-
if (totalLen + line.length >
|
|
7450
|
+
if (totalLen + line.length > transcriptCap) {
|
|
6337
7451
|
lines.push("[...earlier messages truncated for length]");
|
|
6338
7452
|
break;
|
|
6339
7453
|
}
|
|
@@ -6343,6 +7457,17 @@ function buildTranscript(entries) {
|
|
|
6343
7457
|
lines.reverse();
|
|
6344
7458
|
return lines.join("\n\n");
|
|
6345
7459
|
}
|
|
7460
|
+
function parseSummaryResult(rawText) {
|
|
7461
|
+
const summaryMatch = rawText.match(/SUMMARY:\s*(.+?)(?=\nKEY_DETAILS:|\nTOPICS:|$)/s);
|
|
7462
|
+
const keyDetailsMatch = rawText.match(/KEY_DETAILS:\s*(.+?)(?=\nTOPICS:|$)/s);
|
|
7463
|
+
const topicsMatch = rawText.match(/TOPICS:\s*(.+)/);
|
|
7464
|
+
let summary = summaryMatch?.[1]?.trim() ?? rawText.trim();
|
|
7465
|
+
const keyDetails = keyDetailsMatch?.[1]?.trim();
|
|
7466
|
+
if (keyDetails) summary += `
|
|
7467
|
+
Key details: ${keyDetails}`;
|
|
7468
|
+
const topics = topicsMatch?.[1]?.trim() ?? "";
|
|
7469
|
+
return { summary, topics };
|
|
7470
|
+
}
|
|
6346
7471
|
async function attemptSummarize(chatId, adapter, model2, entries) {
|
|
6347
7472
|
const transcript = buildTranscript(entries);
|
|
6348
7473
|
const prompt = `${SUMMARIZE_PROMPT}
|
|
@@ -6430,14 +7555,7 @@ ${transcript}`;
|
|
|
6430
7555
|
warn(`[summarize] ${adapter.id}:${model2} returned empty result for chat ${chatId}`);
|
|
6431
7556
|
return { success: false, rawText: "" };
|
|
6432
7557
|
}
|
|
6433
|
-
const
|
|
6434
|
-
const keyDetailsMatch = resultText.match(/KEY_DETAILS:\s*(.+?)(?=\nTOPICS:|$)/s);
|
|
6435
|
-
const topicsMatch = resultText.match(/TOPICS:\s*(.+)/);
|
|
6436
|
-
let summary = summaryMatch?.[1]?.trim() ?? resultText.trim();
|
|
6437
|
-
const keyDetails = keyDetailsMatch?.[1]?.trim();
|
|
6438
|
-
if (keyDetails) summary += `
|
|
6439
|
-
Key details: ${keyDetails}`;
|
|
6440
|
-
const topics = topicsMatch?.[1]?.trim() ?? "";
|
|
7558
|
+
const { summary, topics } = parseSummaryResult(resultText);
|
|
6441
7559
|
saveSessionSummaryWithEmbedding(chatId, summary, topics, Math.floor(entries.length / 2));
|
|
6442
7560
|
log(`[summarize] Saved summary via ${adapter.id}:${model2} for chat ${chatId}`);
|
|
6443
7561
|
return { success: true, rawText: resultText };
|
|
@@ -6446,6 +7564,35 @@ Key details: ${keyDetails}`;
|
|
|
6446
7564
|
return { success: false, rawText: "" };
|
|
6447
7565
|
}
|
|
6448
7566
|
}
|
|
7567
|
+
async function attemptSummarizeDirect(chatId, directFn, backendId, model2, entries, transcriptCap) {
|
|
7568
|
+
const transcript = buildTranscript(entries, transcriptCap);
|
|
7569
|
+
const prompt = `${SUMMARIZE_PROMPT}
|
|
7570
|
+
|
|
7571
|
+
Conversation:
|
|
7572
|
+
${transcript}`;
|
|
7573
|
+
try {
|
|
7574
|
+
const result = await Promise.race([
|
|
7575
|
+
directFn(prompt),
|
|
7576
|
+
new Promise(
|
|
7577
|
+
(_, reject) => setTimeout(() => reject(new Error("Direct summarize timeout")), SUMMARIZE_TIMEOUT_MS)
|
|
7578
|
+
)
|
|
7579
|
+
]);
|
|
7580
|
+
if (result.usage) {
|
|
7581
|
+
addUsage(chatId, result.usage.input, result.usage.output, 0, model2);
|
|
7582
|
+
}
|
|
7583
|
+
if (!result.text) {
|
|
7584
|
+
warn(`[summarize] ${backendId}:${model2} (direct) returned empty for ${chatId}`);
|
|
7585
|
+
return { success: false, rawText: "" };
|
|
7586
|
+
}
|
|
7587
|
+
const { summary, topics } = parseSummaryResult(result.text);
|
|
7588
|
+
saveSessionSummaryWithEmbedding(chatId, summary, topics, Math.floor(entries.length / 2));
|
|
7589
|
+
log(`[summarize] Saved summary via ${backendId}:${model2} (direct) for chat ${chatId}`);
|
|
7590
|
+
return { success: true, rawText: result.text };
|
|
7591
|
+
} catch (err) {
|
|
7592
|
+
warn(`[summarize] ${backendId}:${model2} (direct) failed for ${chatId}: ${errorMessage(err)}`);
|
|
7593
|
+
return { success: false, rawText: "" };
|
|
7594
|
+
}
|
|
7595
|
+
}
|
|
6449
7596
|
async function extractAndLogSignals(rawText, chatId, adapterId, model2) {
|
|
6450
7597
|
try {
|
|
6451
7598
|
const { getReflectionStatus: getReflectionStatus2, logSignal: logSignal2 } = await Promise.resolve().then(() => (init_store4(), store_exports4));
|
|
@@ -6493,6 +7640,26 @@ async function summarizeWithFallbackChain(chatId, targetBackendId, excludeBacken
|
|
|
6493
7640
|
const excluded = getAdapter(excludeBackend);
|
|
6494
7641
|
tried.add(`${excluded.id}:${excluded.summarizerModel}`);
|
|
6495
7642
|
}
|
|
7643
|
+
const globalConfig = getGlobalSummarizer();
|
|
7644
|
+
if (globalConfig.backend === "ollama") {
|
|
7645
|
+
try {
|
|
7646
|
+
const ollamaAdapter = getAdapter("ollama");
|
|
7647
|
+
if (ollamaAdapter.streamDirect) {
|
|
7648
|
+
const ollamaModel = globalConfig.model ?? ollamaAdapter.summarizerModel;
|
|
7649
|
+
const cap = getOllamaTranscriptCap(ollamaModel);
|
|
7650
|
+
const key = `${ollamaAdapter.id}:${ollamaModel}`;
|
|
7651
|
+
tried.add(key);
|
|
7652
|
+
const directFn = (prompt) => ollamaAdapter.streamDirect(prompt, ollamaModel);
|
|
7653
|
+
const result = await attemptSummarizeDirect(chatId, directFn, "ollama", ollamaModel, entries, cap);
|
|
7654
|
+
if (result.success) {
|
|
7655
|
+
await extractAndLogSignals(result.rawText, chatId, "ollama", ollamaModel);
|
|
7656
|
+
if (clearLogAfter) clearLog(chatId);
|
|
7657
|
+
return true;
|
|
7658
|
+
}
|
|
7659
|
+
}
|
|
7660
|
+
} catch {
|
|
7661
|
+
}
|
|
7662
|
+
}
|
|
6496
7663
|
try {
|
|
6497
7664
|
const config2 = getSummarizer(chatId);
|
|
6498
7665
|
if (config2.backend !== "off") {
|
|
@@ -6535,7 +7702,14 @@ async function summarizeWithFallbackChain(chatId, targetBackendId, excludeBacken
|
|
|
6535
7702
|
if (!tried.has(key)) {
|
|
6536
7703
|
tried.add(key);
|
|
6537
7704
|
fallbackCount++;
|
|
6538
|
-
|
|
7705
|
+
let result;
|
|
7706
|
+
if (adapter.streamDirect) {
|
|
7707
|
+
const cap = adapter.id === "ollama" ? getOllamaTranscriptCap(model2) : getTranscriptCap(model2);
|
|
7708
|
+
const directFn = (prompt) => adapter.streamDirect(prompt, model2);
|
|
7709
|
+
result = await attemptSummarizeDirect(chatId, directFn, adapter.id, model2, entries, cap);
|
|
7710
|
+
} else {
|
|
7711
|
+
result = await attemptSummarize(chatId, adapter, model2, entries);
|
|
7712
|
+
}
|
|
6539
7713
|
if (result.success) {
|
|
6540
7714
|
await extractAndLogSignals(result.rawText, chatId, adapter.id, model2);
|
|
6541
7715
|
if (clearLogAfter) clearLog(chatId);
|
|
@@ -6573,7 +7747,7 @@ async function summarizeAllPending() {
|
|
|
6573
7747
|
await summarizeSession(chatId);
|
|
6574
7748
|
}
|
|
6575
7749
|
}
|
|
6576
|
-
var MIN_PAIRS, summarizationLocks, USER_MSG_LIMIT, AGENT_MSG_LIMIT, TRANSCRIPT_CAP_DEFAULT, TRANSCRIPT_CAP_FLASH, SUMMARIZE_TIMEOUT_MS, SUMMARIZE_PROMPT, VALID_SIGNAL_TYPES;
|
|
7750
|
+
var MIN_PAIRS, summarizationLocks, USER_MSG_LIMIT, AGENT_MSG_LIMIT, TRANSCRIPT_CAP_DEFAULT, TRANSCRIPT_CAP_FLASH, TRANSCRIPT_CAP_OLLAMA_DEFAULT, SUMMARIZE_TIMEOUT_MS, SUMMARIZE_PROMPT, VALID_SIGNAL_TYPES;
|
|
6577
7751
|
var init_summarize = __esm({
|
|
6578
7752
|
"src/memory/summarize.ts"() {
|
|
6579
7753
|
"use strict";
|
|
@@ -6590,6 +7764,7 @@ var init_summarize = __esm({
|
|
|
6590
7764
|
AGENT_MSG_LIMIT = 6e3;
|
|
6591
7765
|
TRANSCRIPT_CAP_DEFAULT = 1e5;
|
|
6592
7766
|
TRANSCRIPT_CAP_FLASH = 5e4;
|
|
7767
|
+
TRANSCRIPT_CAP_OLLAMA_DEFAULT = 15e3;
|
|
6593
7768
|
SUMMARIZE_TIMEOUT_MS = 6e4;
|
|
6594
7769
|
SUMMARIZE_PROMPT = `You are summarizing a conversation session for long-term episodic memory. This summary will be injected into future conversations to provide context, so it must contain enough specific detail to be useful weeks or months later.
|
|
6595
7770
|
|
|
@@ -7647,7 +8822,8 @@ function spawnAgentProcess(runner, opts, callbacks) {
|
|
|
7647
8822
|
} catch {
|
|
7648
8823
|
try {
|
|
7649
8824
|
child.kill("SIGTERM");
|
|
7650
|
-
} catch {
|
|
8825
|
+
} catch (killErr) {
|
|
8826
|
+
warn(`[spawn] Kill fallback failed for pid ${child.pid}: ${killErr}`);
|
|
7651
8827
|
}
|
|
7652
8828
|
}
|
|
7653
8829
|
}
|
|
@@ -7673,6 +8849,7 @@ var init_spawn = __esm({
|
|
|
7673
8849
|
"use strict";
|
|
7674
8850
|
init_env();
|
|
7675
8851
|
init_roles();
|
|
8852
|
+
init_log();
|
|
7676
8853
|
SENSITIVE_ENV_KEYS = [
|
|
7677
8854
|
"TELEGRAM_BOT_TOKEN",
|
|
7678
8855
|
"GROQ_API_KEY",
|
|
@@ -7683,7 +8860,10 @@ var init_spawn = __esm({
|
|
|
7683
8860
|
// Audit C15: API keys that should not leak to sub-agents
|
|
7684
8861
|
"ANTHROPIC_API_KEY",
|
|
7685
8862
|
"GEMINI_API_KEY",
|
|
7686
|
-
"OPENAI_API_KEY"
|
|
8863
|
+
"OPENAI_API_KEY",
|
|
8864
|
+
// Audit C26: Cursor and Codex keys were missing — sub-agents inherited them
|
|
8865
|
+
"CURSOR_API_KEY",
|
|
8866
|
+
"CODEX_API_KEY"
|
|
7687
8867
|
];
|
|
7688
8868
|
}
|
|
7689
8869
|
});
|
|
@@ -11255,6 +12435,9 @@ var init_apply = __esm({
|
|
|
11255
12435
|
});
|
|
11256
12436
|
|
|
11257
12437
|
// src/dashboard/routes/evolve.ts
|
|
12438
|
+
function resolveChatId(body) {
|
|
12439
|
+
return body.chatId || (process.env.ALLOWED_CHAT_ID ?? "").split(",")[0]?.trim() || null;
|
|
12440
|
+
}
|
|
11258
12441
|
var handleAnalyze, handleApply, handleReject, handleUndo, handleEvolveOn, handleEvolveOff, handleEvolveModel, handleEvolveSettings;
|
|
11259
12442
|
var init_evolve = __esm({
|
|
11260
12443
|
"src/dashboard/routes/evolve.ts"() {
|
|
@@ -11265,8 +12448,9 @@ var init_evolve = __esm({
|
|
|
11265
12448
|
handleAnalyze = async (req, res) => {
|
|
11266
12449
|
try {
|
|
11267
12450
|
const body = JSON.parse(await readBody(req));
|
|
12451
|
+
const chatId = resolveChatId(body);
|
|
12452
|
+
if (!chatId) return jsonResponse(res, { error: "No chatId provided and ALLOWED_CHAT_ID is not set" }, 400);
|
|
11268
12453
|
const { runAnalysis: runAnalysis2 } = await Promise.resolve().then(() => (init_analyze(), analyze_exports));
|
|
11269
|
-
const chatId = body.chatId || (process.env.ALLOWED_CHAT_ID ?? "").split(",")[0]?.trim() || "default";
|
|
11270
12454
|
const insights = await runAnalysis2(chatId);
|
|
11271
12455
|
jsonResponse(res, { success: true, insights: insights.length });
|
|
11272
12456
|
} catch (err) {
|
|
@@ -11310,7 +12494,8 @@ var init_evolve = __esm({
|
|
|
11310
12494
|
const { existsSync: fileExists, readFileSync: fileRead } = await import("fs");
|
|
11311
12495
|
const { join: join35 } = await import("path");
|
|
11312
12496
|
const { CC_CLAW_HOME: home } = await Promise.resolve().then(() => (init_paths(), paths_exports));
|
|
11313
|
-
const chatId = body
|
|
12497
|
+
const chatId = resolveChatId(body);
|
|
12498
|
+
if (!chatId) return jsonResponse(res, { error: "No chatId provided and ALLOWED_CHAT_ID is not set" }, 400);
|
|
11314
12499
|
const soulPath = join35(home, "identity/SOUL.md");
|
|
11315
12500
|
const userPath = join35(home, "identity/USER.md");
|
|
11316
12501
|
const soul = fileExists(soulPath) ? fileRead(soulPath, "utf-8") : "";
|
|
@@ -11325,7 +12510,8 @@ var init_evolve = __esm({
|
|
|
11325
12510
|
try {
|
|
11326
12511
|
const body = JSON.parse(await readBody(req));
|
|
11327
12512
|
const { setReflectionStatus: setReflectionStatus2 } = await Promise.resolve().then(() => (init_store4(), store_exports4));
|
|
11328
|
-
const chatId = body
|
|
12513
|
+
const chatId = resolveChatId(body);
|
|
12514
|
+
if (!chatId) return jsonResponse(res, { error: "No chatId provided and ALLOWED_CHAT_ID is not set" }, 400);
|
|
11329
12515
|
setReflectionStatus2(getDb(), chatId, "frozen");
|
|
11330
12516
|
jsonResponse(res, { success: true, status: "frozen" });
|
|
11331
12517
|
} catch (err) {
|
|
@@ -11336,7 +12522,8 @@ var init_evolve = __esm({
|
|
|
11336
12522
|
try {
|
|
11337
12523
|
const body = JSON.parse(await readBody(req));
|
|
11338
12524
|
const { setReflectionModelConfig: setReflectionModelConfig2 } = await Promise.resolve().then(() => (init_store4(), store_exports4));
|
|
11339
|
-
const chatId = body
|
|
12525
|
+
const chatId = resolveChatId(body);
|
|
12526
|
+
if (!chatId) return jsonResponse(res, { error: "No chatId provided and ALLOWED_CHAT_ID is not set" }, 400);
|
|
11340
12527
|
setReflectionModelConfig2(getDb(), chatId, body.mode, body.backend, body.model);
|
|
11341
12528
|
jsonResponse(res, { success: true });
|
|
11342
12529
|
} catch (err) {
|
|
@@ -11347,7 +12534,8 @@ var init_evolve = __esm({
|
|
|
11347
12534
|
try {
|
|
11348
12535
|
const body = JSON.parse(await readBody(req));
|
|
11349
12536
|
const { setReflectionSettings: setReflectionSettings2, getReflectionSettings: getReflectionSettings2 } = await Promise.resolve().then(() => (init_store4(), store_exports4));
|
|
11350
|
-
const chatId = body
|
|
12537
|
+
const chatId = resolveChatId(body);
|
|
12538
|
+
if (!chatId) return jsonResponse(res, { error: "No chatId provided and ALLOWED_CHAT_ID is not set" }, 400);
|
|
11351
12539
|
const updates = {};
|
|
11352
12540
|
if (body.perFileCap !== void 0) updates.perFileCap = body.perFileCap;
|
|
11353
12541
|
if (body.backupRetentionDays !== void 0) updates.backupRetentionDays = body.backupRetentionDays;
|
|
@@ -11808,8 +12996,10 @@ __export(agent_exports, {
|
|
|
11808
12996
|
getInFlightMessage: () => getInFlightMessage,
|
|
11809
12997
|
isChatBusy: () => isChatBusy,
|
|
11810
12998
|
isSyntheticChatId: () => isSyntheticChatId,
|
|
12999
|
+
startStaleChatSweep: () => startStaleChatSweep,
|
|
11811
13000
|
stopAgent: () => stopAgent,
|
|
11812
|
-
stopAllActiveAgents: () => stopAllActiveAgents
|
|
13001
|
+
stopAllActiveAgents: () => stopAllActiveAgents,
|
|
13002
|
+
stopStaleChatSweep: () => stopStaleChatSweep
|
|
11813
13003
|
});
|
|
11814
13004
|
import { spawn as spawn6 } from "child_process";
|
|
11815
13005
|
import { createInterface as createInterface5 } from "readline";
|
|
@@ -11830,6 +13020,26 @@ function killProcessGroup(proc, signal = "SIGTERM") {
|
|
|
11830
13020
|
}
|
|
11831
13021
|
}
|
|
11832
13022
|
}
|
|
13023
|
+
function sweepStaleChatEntries() {
|
|
13024
|
+
for (const [chatId, state] of activeChats) {
|
|
13025
|
+
if (state.process && state.process.exitCode !== null) {
|
|
13026
|
+
warn(`[agent] Sweeping stale activeChats entry for ${chatId} (process exited with code ${state.process.exitCode})`);
|
|
13027
|
+
if (state.killTimer) clearTimeout(state.killTimer);
|
|
13028
|
+
activeChats.delete(chatId);
|
|
13029
|
+
}
|
|
13030
|
+
}
|
|
13031
|
+
}
|
|
13032
|
+
function startStaleChatSweep() {
|
|
13033
|
+
if (staleSweepTimer) return;
|
|
13034
|
+
staleSweepTimer = setInterval(sweepStaleChatEntries, 15 * 60 * 1e3);
|
|
13035
|
+
staleSweepTimer.unref();
|
|
13036
|
+
}
|
|
13037
|
+
function stopStaleChatSweep() {
|
|
13038
|
+
if (staleSweepTimer) {
|
|
13039
|
+
clearInterval(staleSweepTimer);
|
|
13040
|
+
staleSweepTimer = void 0;
|
|
13041
|
+
}
|
|
13042
|
+
}
|
|
11833
13043
|
function withChatLock(chatId, fn) {
|
|
11834
13044
|
const prev = chatLocks.get(chatId) ?? Promise.resolve();
|
|
11835
13045
|
const isBlocked = activeChats.has(chatId);
|
|
@@ -11858,6 +13068,9 @@ function stopAgent(chatId) {
|
|
|
11858
13068
|
const state = activeChats.get(chatId);
|
|
11859
13069
|
if (!state) return false;
|
|
11860
13070
|
state.cancelled = true;
|
|
13071
|
+
if (state.abortController) {
|
|
13072
|
+
state.abortController.abort();
|
|
13073
|
+
}
|
|
11861
13074
|
if (state.process) {
|
|
11862
13075
|
killProcessGroup(state.process, "SIGTERM");
|
|
11863
13076
|
state.killTimer = setTimeout(() => {
|
|
@@ -12324,6 +13537,49 @@ async function askAgentImpl(chatId, userMessage, opts) {
|
|
|
12324
13537
|
const effectiveAgentMode = optsAgentMode ?? getAgentMode(settingsChat);
|
|
12325
13538
|
const sideQuestCtx = settingsSourceChatId ? { parentChatId: settingsSourceChatId, actualChatId: chatId } : void 0;
|
|
12326
13539
|
const fullPrompt = await assembleBootstrapPrompt(userMessage, tier, settingsChat, mode, responseStyle, effectiveAgentMode, sideQuestCtx, planningDirective);
|
|
13540
|
+
if (adapter.streamDirect) {
|
|
13541
|
+
const resolvedModel2 = model2 ?? adapter.defaultModel;
|
|
13542
|
+
const abortController = new AbortController();
|
|
13543
|
+
const cancelState2 = { cancelled: false, userMessage, abortController };
|
|
13544
|
+
activeChats.set(chatId, cancelState2);
|
|
13545
|
+
try {
|
|
13546
|
+
const sdResult = await adapter.streamDirect(fullPrompt, resolvedModel2, {
|
|
13547
|
+
timeoutMs: timeoutMs ?? 12e4,
|
|
13548
|
+
onStream,
|
|
13549
|
+
signal: abortController.signal
|
|
13550
|
+
});
|
|
13551
|
+
if (!isSyntheticChatId(chatId)) {
|
|
13552
|
+
appendToLog(chatId, userMessage, sdResult.text, adapter.id, resolvedModel2, null);
|
|
13553
|
+
const AUTO_SUMMARIZE_THRESHOLD = 30;
|
|
13554
|
+
const pairCount = tier !== "chat" ? getMessagePairCount(chatId) : 0;
|
|
13555
|
+
if (pairCount >= AUTO_SUMMARIZE_THRESHOLD) {
|
|
13556
|
+
log(`[agent] Auto-summarizing chat ${chatId} after ${pairCount} turns`);
|
|
13557
|
+
summarizeWithFallbackChain(chatId).then((saved) => {
|
|
13558
|
+
if (saved) {
|
|
13559
|
+
clearSession(chatId);
|
|
13560
|
+
opts?.onCompaction?.(chatId);
|
|
13561
|
+
}
|
|
13562
|
+
}).catch((err) => {
|
|
13563
|
+
warn(`[agent] Auto-summarize failed for chat ${chatId}: ${err}`);
|
|
13564
|
+
});
|
|
13565
|
+
}
|
|
13566
|
+
}
|
|
13567
|
+
const sdUsage = sdResult.usage ?? { input: 0, output: 0 };
|
|
13568
|
+
if (sdUsage.input + sdUsage.output > 0) {
|
|
13569
|
+
addUsage(chatId, sdUsage.input, sdUsage.output, 0, resolvedModel2);
|
|
13570
|
+
}
|
|
13571
|
+
if (cancelState2.cancelled) {
|
|
13572
|
+
return { text: "Stopped.", usage: { input: sdUsage.input, output: sdUsage.output, cacheRead: 0 } };
|
|
13573
|
+
}
|
|
13574
|
+
return {
|
|
13575
|
+
text: sdResult.text || `(No response from ${adapter.displayName})`,
|
|
13576
|
+
usage: { input: sdUsage.input, output: sdUsage.output, cacheRead: 0 },
|
|
13577
|
+
resolvedModel: resolvedModel2
|
|
13578
|
+
};
|
|
13579
|
+
} finally {
|
|
13580
|
+
activeChats.delete(chatId);
|
|
13581
|
+
}
|
|
13582
|
+
}
|
|
12327
13583
|
const existingSessionId = settingsSourceChatId ? null : getSessionId(settingsChat);
|
|
12328
13584
|
const allowedTools = getEnabledTools(settingsChat);
|
|
12329
13585
|
const mcpConfigPath = tier !== "slim" && effectiveAgentMode !== "native" && MCP_CONFIG_FLAG[adapter.id] ? getMcpConfigPath(chatId) : null;
|
|
@@ -12471,24 +13727,46 @@ async function askAgentImpl(chatId, userMessage, opts) {
|
|
|
12471
13727
|
result = await spawnQuery(adapter, baseConfig, resolvedModel, cancelState, thinkingLevel, timeoutMs, maxTurns, retryOpts);
|
|
12472
13728
|
}
|
|
12473
13729
|
} else if (errMsg.startsWith(CONTENT_SILENCE_TIMEOUT_ERROR) && existingSessionId) {
|
|
12474
|
-
warn(`[agent] Content silence on ${adapter.id} \u2014
|
|
12475
|
-
|
|
12476
|
-
|
|
12477
|
-
|
|
12478
|
-
|
|
12479
|
-
|
|
12480
|
-
|
|
12481
|
-
|
|
12482
|
-
|
|
12483
|
-
|
|
12484
|
-
|
|
12485
|
-
|
|
12486
|
-
|
|
12487
|
-
|
|
12488
|
-
|
|
12489
|
-
|
|
12490
|
-
|
|
12491
|
-
|
|
13730
|
+
warn(`[agent] Content silence on ${adapter.id} \u2014 retrying with session ${existingSessionId}`);
|
|
13731
|
+
try {
|
|
13732
|
+
if (useGeminiRotation) {
|
|
13733
|
+
const rotationCb = onSlotRotation ? (from, to) => onSlotRotation(chatId, from, to) : void 0;
|
|
13734
|
+
const downgradeCb = onModelDowngrade ? (from, to, reason) => onModelDowngrade(chatId, from, to, reason) : void 0;
|
|
13735
|
+
result = await spawnGeminiWithRotation(chatId, adapter, configWithSession, configWithSession, resolvedModel, cancelState, thinkingLevel, timeoutMs, maxTurns, geminiRotationMode, spawnOpts, rotationCb, settingsSourceChatId, downgradeCb);
|
|
13736
|
+
} else if (useBackendRotation) {
|
|
13737
|
+
const rotationCb = onSlotRotation ? (from, to) => onSlotRotation(chatId, from, to) : void 0;
|
|
13738
|
+
result = await spawnWithSlotRotation(chatId, adapter, configWithSession, configWithSession, resolvedModel, cancelState, thinkingLevel, timeoutMs, maxTurns, backendRotationMode, allowPaid, spawnOpts, rotationCb);
|
|
13739
|
+
} else {
|
|
13740
|
+
const retryOpts = (() => {
|
|
13741
|
+
if (adapter.id !== "gemini") return spawnOpts;
|
|
13742
|
+
const geminiAdapter = adapter;
|
|
13743
|
+
const { env, slot } = geminiAdapter.resolveSlotEnv(chatId);
|
|
13744
|
+
if (slot) return { ...spawnOpts, envOverride: env };
|
|
13745
|
+
return spawnOpts;
|
|
13746
|
+
})();
|
|
13747
|
+
result = await spawnQuery(adapter, configWithSession, resolvedModel, cancelState, thinkingLevel, timeoutMs, maxTurns, retryOpts);
|
|
13748
|
+
}
|
|
13749
|
+
} catch (retryErr) {
|
|
13750
|
+
const retryMsg = retryErr instanceof Error ? retryErr.message : String(retryErr);
|
|
13751
|
+
warn(`[agent] Content silence session retry also failed on ${adapter.id} \u2014 clearing session and retrying fresh`);
|
|
13752
|
+
clearSession(chatId);
|
|
13753
|
+
if (useGeminiRotation) {
|
|
13754
|
+
const rotationCb = onSlotRotation ? (from, to) => onSlotRotation(chatId, from, to) : void 0;
|
|
13755
|
+
const downgradeCb = onModelDowngrade ? (from, to, reason) => onModelDowngrade(chatId, from, to, reason) : void 0;
|
|
13756
|
+
result = await spawnGeminiWithRotation(chatId, adapter, baseConfig, baseConfig, resolvedModel, cancelState, thinkingLevel, timeoutMs, maxTurns, geminiRotationMode, spawnOpts, rotationCb, settingsSourceChatId, downgradeCb);
|
|
13757
|
+
} else if (useBackendRotation) {
|
|
13758
|
+
const rotationCb = onSlotRotation ? (from, to) => onSlotRotation(chatId, from, to) : void 0;
|
|
13759
|
+
result = await spawnWithSlotRotation(chatId, adapter, baseConfig, baseConfig, resolvedModel, cancelState, thinkingLevel, timeoutMs, maxTurns, backendRotationMode, allowPaid, spawnOpts, rotationCb);
|
|
13760
|
+
} else {
|
|
13761
|
+
const retryOpts = (() => {
|
|
13762
|
+
if (adapter.id !== "gemini") return spawnOpts;
|
|
13763
|
+
const geminiAdapter = adapter;
|
|
13764
|
+
const { env, slot } = geminiAdapter.resolveSlotEnv(chatId);
|
|
13765
|
+
if (slot) return { ...spawnOpts, envOverride: env };
|
|
13766
|
+
return spawnOpts;
|
|
13767
|
+
})();
|
|
13768
|
+
result = await spawnQuery(adapter, baseConfig, resolvedModel, cancelState, thinkingLevel, timeoutMs, maxTurns, retryOpts);
|
|
13769
|
+
}
|
|
12492
13770
|
}
|
|
12493
13771
|
} else {
|
|
12494
13772
|
if (!isSyntheticChatId(chatId)) {
|
|
@@ -12594,7 +13872,7 @@ function injectMcpConfig(adapterId, args, mcpConfigPath) {
|
|
|
12594
13872
|
if (!flag) return args;
|
|
12595
13873
|
return [...args, ...flag, mcpConfigPath];
|
|
12596
13874
|
}
|
|
12597
|
-
var activeChats, chatLocks, SPAWN_TIMEOUT_MS, FIRST_RESPONSE_TIMEOUT_MS, CONTENT_SILENCE_TIMEOUT_MS, CONTENT_SILENCE_TIMEOUT_ERROR, FIRST_RESPONSE_TIMEOUT_ERROR, FREE_SLOTS_EXHAUSTED, GEMINI_FALLBACK_CHAIN, GEMINI_DOWNGRADE_MODELS, MCP_CONFIG_FLAG;
|
|
13875
|
+
var activeChats, staleSweepTimer, chatLocks, SPAWN_TIMEOUT_MS, FIRST_RESPONSE_TIMEOUT_MS, CONTENT_SILENCE_TIMEOUT_MS, CONTENT_SILENCE_TIMEOUT_ERROR, FIRST_RESPONSE_TIMEOUT_ERROR, FREE_SLOTS_EXHAUSTED, GEMINI_FALLBACK_CHAIN, GEMINI_DOWNGRADE_MODELS, MCP_CONFIG_FLAG;
|
|
12598
13876
|
var init_agent = __esm({
|
|
12599
13877
|
"src/agent.ts"() {
|
|
12600
13878
|
"use strict";
|
|
@@ -14284,6 +15562,12 @@ var init_session_log2 = __esm({
|
|
|
14284
15562
|
});
|
|
14285
15563
|
|
|
14286
15564
|
// src/router/live-status.ts
|
|
15565
|
+
function canFlushGlobally() {
|
|
15566
|
+
return Date.now() - globalLastFlushAt >= GLOBAL_MIN_GAP_MS;
|
|
15567
|
+
}
|
|
15568
|
+
function markGlobalFlush() {
|
|
15569
|
+
globalLastFlushAt = Date.now();
|
|
15570
|
+
}
|
|
14287
15571
|
function dedupThinking(entries) {
|
|
14288
15572
|
const out = [];
|
|
14289
15573
|
for (const e of entries) {
|
|
@@ -14327,7 +15611,7 @@ function makeLiveStatus(chatId, channel, modelLabel, verboseLevel, showThinking)
|
|
|
14327
15611
|
};
|
|
14328
15612
|
return { liveStatus, toolCb };
|
|
14329
15613
|
}
|
|
14330
|
-
var FLUSH_INTERVAL_DM_MS, FLUSH_INTERVAL_GROUP_MS, MAX_THINKING_CHARS, TRIM_THRESHOLD, MAX_ENTRIES, LiveStatusMessage;
|
|
15614
|
+
var FLUSH_INTERVAL_DM_MS, FLUSH_INTERVAL_GROUP_MS, MAX_THINKING_CHARS, GLOBAL_MIN_GAP_MS, globalLastFlushAt, TRIM_THRESHOLD, MAX_ENTRIES, LiveStatusMessage;
|
|
14331
15615
|
var init_live_status = __esm({
|
|
14332
15616
|
"src/router/live-status.ts"() {
|
|
14333
15617
|
"use strict";
|
|
@@ -14336,6 +15620,8 @@ var init_live_status = __esm({
|
|
|
14336
15620
|
FLUSH_INTERVAL_DM_MS = 1e3;
|
|
14337
15621
|
FLUSH_INTERVAL_GROUP_MS = 3e3;
|
|
14338
15622
|
MAX_THINKING_CHARS = 800;
|
|
15623
|
+
GLOBAL_MIN_GAP_MS = 500;
|
|
15624
|
+
globalLastFlushAt = 0;
|
|
14339
15625
|
TRIM_THRESHOLD = 3500;
|
|
14340
15626
|
MAX_ENTRIES = 200;
|
|
14341
15627
|
LiveStatusMessage = class {
|
|
@@ -14435,10 +15721,12 @@ var init_live_status = __esm({
|
|
|
14435
15721
|
async flush() {
|
|
14436
15722
|
if (this.finalized || !this.messageId || !this.channel.editText) return;
|
|
14437
15723
|
if (Date.now() < this.nextFlushAllowedAt) return;
|
|
15724
|
+
if (!canFlushGlobally()) return;
|
|
14438
15725
|
const deduped = dedupThinking(this.entries);
|
|
14439
15726
|
const body = renderEntries(deduped, this.modelLabel, Date.now() - this.startTime, this.hasTrimmed);
|
|
14440
15727
|
if (body === this.lastRendered) return;
|
|
14441
15728
|
this.lastRendered = body;
|
|
15729
|
+
markGlobalFlush();
|
|
14442
15730
|
try {
|
|
14443
15731
|
await this.channel.editText(this.chatId, this.messageId, body, "plain");
|
|
14444
15732
|
} catch (err) {
|
|
@@ -16101,10 +17389,21 @@ function assembleHeartbeatPrompt(chatId) {
|
|
|
16101
17389
|
if (failedSinceLastBeat.length > 0) {
|
|
16102
17390
|
healthLines.push(`${failedSinceLastBeat.length} job run(s) failed recently.`);
|
|
16103
17391
|
}
|
|
16104
|
-
for (const backend2 of
|
|
17392
|
+
for (const backend2 of getAllBackendIds()) {
|
|
16105
17393
|
const limitMsg = checkBackendLimits(backend2);
|
|
16106
17394
|
if (limitMsg) healthLines.push(limitMsg);
|
|
16107
17395
|
}
|
|
17396
|
+
try {
|
|
17397
|
+
const { OllamaService } = (init_ollama(), __toCommonJS(ollama_exports));
|
|
17398
|
+
const servers = OllamaService.listServers();
|
|
17399
|
+
if (servers.length > 0) {
|
|
17400
|
+
const offline = servers.filter((s) => s.status === "offline");
|
|
17401
|
+
if (offline.length > 0) {
|
|
17402
|
+
healthLines.push(`Ollama: ${offline.length}/${servers.length} server(s) offline: ${offline.map((s) => s.name).join(", ")}`);
|
|
17403
|
+
}
|
|
17404
|
+
}
|
|
17405
|
+
} catch {
|
|
17406
|
+
}
|
|
16108
17407
|
if (healthLines.length > 0) {
|
|
16109
17408
|
sections.push(`[System health]
|
|
16110
17409
|
${healthLines.join("\n")}`);
|
|
@@ -17487,13 +18786,13 @@ async function handleEvolveCallback(chatId, data, channel) {
|
|
|
17487
18786
|
const { getReflectionStatus: getReflectionStatus2, setReflectionStatus: setReflectionStatus2 } = await Promise.resolve().then(() => (init_store4(), store_exports4));
|
|
17488
18787
|
const current = getReflectionStatus2(getDb(), chatId);
|
|
17489
18788
|
if (current === "frozen") {
|
|
17490
|
-
const { readFileSync: readFileSync27, existsSync:
|
|
18789
|
+
const { readFileSync: readFileSync27, existsSync: existsSync56 } = await import("fs");
|
|
17491
18790
|
const { join: join35 } = await import("path");
|
|
17492
18791
|
const { CC_CLAW_HOME: CC_CLAW_HOME3 } = await Promise.resolve().then(() => (init_paths(), paths_exports));
|
|
17493
18792
|
const soulPath = join35(CC_CLAW_HOME3, "identity/SOUL.md");
|
|
17494
18793
|
const userPath = join35(CC_CLAW_HOME3, "identity/USER.md");
|
|
17495
|
-
const soul =
|
|
17496
|
-
const user =
|
|
18794
|
+
const soul = existsSync56(soulPath) ? readFileSync27(soulPath, "utf-8") : "";
|
|
18795
|
+
const user = existsSync56(userPath) ? readFileSync27(userPath, "utf-8") : "";
|
|
17497
18796
|
setReflectionStatus2(getDb(), chatId, "active", soul, user);
|
|
17498
18797
|
const { logActivity: logActivity2 } = await Promise.resolve().then(() => (init_store3(), store_exports3));
|
|
17499
18798
|
logActivity2(getDb(), { chatId, source: "telegram", eventType: "reflection_unfrozen", summary: "Reflection enabled" });
|
|
@@ -20393,6 +21692,278 @@ var init_command_handlers = __esm({
|
|
|
20393
21692
|
}
|
|
20394
21693
|
});
|
|
20395
21694
|
|
|
21695
|
+
// src/router/ollama.ts
|
|
21696
|
+
var ollama_exports2 = {};
|
|
21697
|
+
__export(ollama_exports2, {
|
|
21698
|
+
handleOllamaCallback: () => handleOllamaCallback,
|
|
21699
|
+
handleOllamaCommand: () => handleOllamaCommand
|
|
21700
|
+
});
|
|
21701
|
+
async function handleOllamaCommand(chatId, commandArgs, channel) {
|
|
21702
|
+
const [sub, ...rest] = (commandArgs || "").trim().split(/\s+/);
|
|
21703
|
+
switch (sub) {
|
|
21704
|
+
case "models":
|
|
21705
|
+
return sendModelList(chatId, channel, rest[0]);
|
|
21706
|
+
case "health":
|
|
21707
|
+
return sendHealthCheck(chatId, channel);
|
|
21708
|
+
case "discover":
|
|
21709
|
+
return sendDiscover(chatId, channel, rest[0]);
|
|
21710
|
+
case "add":
|
|
21711
|
+
if (rest.length >= 2) return handleAdd(chatId, channel, rest[0], rest[1], rest[2] ? parseInt(rest[2], 10) : void 0);
|
|
21712
|
+
if (rest.length === 1) return handleAdd(chatId, channel, rest[0], rest[0]);
|
|
21713
|
+
await channel.sendText(chatId, "Usage: /ollama add <name> <host> [port]", { parseMode: "plain" });
|
|
21714
|
+
return;
|
|
21715
|
+
case "remove":
|
|
21716
|
+
if (rest[0]) return sendRemoveConfirm(chatId, channel, rest[0]);
|
|
21717
|
+
await channel.sendText(chatId, "Usage: /ollama remove <server-name>", { parseMode: "plain" });
|
|
21718
|
+
return;
|
|
21719
|
+
default:
|
|
21720
|
+
return sendOllamaDashboard(chatId, channel);
|
|
21721
|
+
}
|
|
21722
|
+
}
|
|
21723
|
+
async function sendOllamaDashboard(chatId, channel) {
|
|
21724
|
+
const { OllamaStore } = await Promise.resolve().then(() => (init_ollama(), ollama_exports));
|
|
21725
|
+
const servers = OllamaStore.listServers();
|
|
21726
|
+
if (servers.length === 0) {
|
|
21727
|
+
const text = [
|
|
21728
|
+
"<b>\u{1F999} Ollama</b>",
|
|
21729
|
+
"",
|
|
21730
|
+
"No servers configured.",
|
|
21731
|
+
"",
|
|
21732
|
+
"Add your first server:",
|
|
21733
|
+
" <code>/ollama add <name> <host> [port]</code>",
|
|
21734
|
+
"",
|
|
21735
|
+
"Example:",
|
|
21736
|
+
" <code>/ollama add local 192.168.1.100</code>"
|
|
21737
|
+
].join("\n");
|
|
21738
|
+
if (typeof channel.sendKeyboard === "function") {
|
|
21739
|
+
await channel.sendKeyboard(chatId, text, [
|
|
21740
|
+
[{ label: "\u{1F4D6} Help", data: "ollama:help" }]
|
|
21741
|
+
]);
|
|
21742
|
+
} else {
|
|
21743
|
+
await channel.sendText(chatId, text, { parseMode: "html" });
|
|
21744
|
+
}
|
|
21745
|
+
return;
|
|
21746
|
+
}
|
|
21747
|
+
const lines = ["<b>\u{1F999} Ollama Servers</b>", ""];
|
|
21748
|
+
for (const s of servers) {
|
|
21749
|
+
const dot = s.status === "online" ? "\u{1F7E2}" : "\u{1F534}";
|
|
21750
|
+
const modelCount = OllamaStore.listModels(s.id).length;
|
|
21751
|
+
lines.push(`${dot} <b>${s.name}</b> <code>${s.host}:${s.port}</code>`);
|
|
21752
|
+
lines.push(` ${modelCount} model${modelCount !== 1 ? "s" : ""} \xB7 ${s.status}`);
|
|
21753
|
+
}
|
|
21754
|
+
const onlineCount = servers.filter((s) => s.status === "online").length;
|
|
21755
|
+
lines.push("", `${onlineCount}/${servers.length} online`);
|
|
21756
|
+
if (typeof channel.sendKeyboard === "function") {
|
|
21757
|
+
const buttons = [
|
|
21758
|
+
[
|
|
21759
|
+
{ label: "\u{1F50D} Discover", data: "ollama:discover" },
|
|
21760
|
+
{ label: "\u{1F49A} Health", data: "ollama:health", style: "success" }
|
|
21761
|
+
],
|
|
21762
|
+
[
|
|
21763
|
+
{ label: "\u{1F4CB} Models", data: "ollama:models" },
|
|
21764
|
+
{ label: "\u2795 Add", data: "ollama:add_prompt" }
|
|
21765
|
+
]
|
|
21766
|
+
];
|
|
21767
|
+
if (servers.length <= 4) {
|
|
21768
|
+
const removeRow = servers.map((s) => ({
|
|
21769
|
+
label: `\u{1F5D1} ${s.name}`,
|
|
21770
|
+
data: `ollama:remove_confirm:${s.name}`,
|
|
21771
|
+
style: "danger"
|
|
21772
|
+
}));
|
|
21773
|
+
buttons.push(removeRow);
|
|
21774
|
+
}
|
|
21775
|
+
await channel.sendKeyboard(chatId, lines.join("\n"), buttons);
|
|
21776
|
+
} else {
|
|
21777
|
+
await channel.sendText(chatId, lines.join("\n"), { parseMode: "html" });
|
|
21778
|
+
}
|
|
21779
|
+
}
|
|
21780
|
+
async function handleAdd(chatId, channel, name, host, port) {
|
|
21781
|
+
const { OllamaStore, OllamaClient, OllamaService } = await Promise.resolve().then(() => (init_ollama(), ollama_exports));
|
|
21782
|
+
const existing = OllamaStore.getServer(name);
|
|
21783
|
+
if (existing) {
|
|
21784
|
+
await channel.sendText(chatId, `Server "${name}" already exists. Remove it first.`, { parseMode: "plain" });
|
|
21785
|
+
return;
|
|
21786
|
+
}
|
|
21787
|
+
const actualPort = port ?? 11434;
|
|
21788
|
+
await channel.sendText(chatId, `Adding ${name} (${host}:${actualPort})...`, { parseMode: "plain" });
|
|
21789
|
+
const online = await OllamaClient.ping(`http://${host}:${actualPort}`, { timeoutMs: 5e3 });
|
|
21790
|
+
const server = OllamaStore.addServer(name, host, actualPort, null);
|
|
21791
|
+
OllamaStore.updateServerStatus(server.id, online ? "online" : "offline");
|
|
21792
|
+
if (!online) {
|
|
21793
|
+
await channel.sendText(chatId, `\u26A0\uFE0F Server "${name}" added but not responding. Check connectivity.`, { parseMode: "plain" });
|
|
21794
|
+
return;
|
|
21795
|
+
}
|
|
21796
|
+
const models = await OllamaService.discoverModels(name);
|
|
21797
|
+
const lines = [
|
|
21798
|
+
`\u2705 Added "${name}" (${host}:${actualPort})`,
|
|
21799
|
+
"",
|
|
21800
|
+
`Found ${models.length} model(s):`
|
|
21801
|
+
];
|
|
21802
|
+
for (const m of models) {
|
|
21803
|
+
lines.push(` \u2022 ${m.name}${m.parameterSize ? ` (${m.parameterSize})` : ""}`);
|
|
21804
|
+
}
|
|
21805
|
+
await channel.sendText(chatId, lines.join("\n"), { parseMode: "plain" });
|
|
21806
|
+
}
|
|
21807
|
+
async function sendModelList(chatId, channel, serverName) {
|
|
21808
|
+
const { OllamaStore } = await Promise.resolve().then(() => (init_ollama(), ollama_exports));
|
|
21809
|
+
let models;
|
|
21810
|
+
if (serverName) {
|
|
21811
|
+
const server = OllamaStore.getServer(serverName);
|
|
21812
|
+
if (!server) {
|
|
21813
|
+
await channel.sendText(chatId, `Server "${serverName}" not found.`, { parseMode: "plain" });
|
|
21814
|
+
return;
|
|
21815
|
+
}
|
|
21816
|
+
models = OllamaStore.listModels(server.id);
|
|
21817
|
+
} else {
|
|
21818
|
+
models = OllamaStore.getAvailableModels();
|
|
21819
|
+
}
|
|
21820
|
+
if (models.length === 0) {
|
|
21821
|
+
await channel.sendText(chatId, "No models discovered. Run /ollama discover first.", { parseMode: "plain" });
|
|
21822
|
+
return;
|
|
21823
|
+
}
|
|
21824
|
+
const lines = [
|
|
21825
|
+
`<b>\u{1F9E0} Ollama Models</b>${serverName ? ` (${serverName})` : ""}`,
|
|
21826
|
+
""
|
|
21827
|
+
];
|
|
21828
|
+
for (const m of models) {
|
|
21829
|
+
const sizeGB = m.sizeBytes > 0 ? `${(m.sizeBytes / 1e9).toFixed(1)}GB` : "";
|
|
21830
|
+
const ctxK = m.contextWindow ? `${Math.round(m.contextWindow / 1e3)}K ctx` : "";
|
|
21831
|
+
const meta = [m.parameterSize, m.quantization, sizeGB, ctxK].filter(Boolean).join(" \xB7 ");
|
|
21832
|
+
lines.push(` \u2022 <b>${m.name}</b>`);
|
|
21833
|
+
if (meta) lines.push(` <i>${meta}</i>`);
|
|
21834
|
+
}
|
|
21835
|
+
lines.push("", `${models.length} model${models.length !== 1 ? "s" : ""} total`);
|
|
21836
|
+
await channel.sendText(chatId, lines.join("\n"), { parseMode: "html" });
|
|
21837
|
+
}
|
|
21838
|
+
async function sendDiscover(chatId, channel, serverName) {
|
|
21839
|
+
await channel.sendText(chatId, "\u{1F50D} Discovering models...", { parseMode: "plain" });
|
|
21840
|
+
try {
|
|
21841
|
+
const { OllamaService } = await Promise.resolve().then(() => (init_ollama(), ollama_exports));
|
|
21842
|
+
const models = await OllamaService.discoverModels(serverName);
|
|
21843
|
+
if (models.length === 0) {
|
|
21844
|
+
await channel.sendText(chatId, "No models found. Check server connectivity.", { parseMode: "plain" });
|
|
21845
|
+
return;
|
|
21846
|
+
}
|
|
21847
|
+
const lines = [`\u2705 Found ${models.length} model(s):`, ""];
|
|
21848
|
+
for (const m of models) {
|
|
21849
|
+
const meta = [m.parameterSize, m.contextWindow ? `${Math.round(m.contextWindow / 1e3)}K ctx` : ""].filter(Boolean).join(" \xB7 ");
|
|
21850
|
+
lines.push(` \u2022 ${m.name}${meta ? ` (${meta})` : ""}`);
|
|
21851
|
+
}
|
|
21852
|
+
await channel.sendText(chatId, lines.join("\n"), { parseMode: "plain" });
|
|
21853
|
+
} catch (err) {
|
|
21854
|
+
await channel.sendText(chatId, `Discovery failed: ${err instanceof Error ? err.message : String(err)}`, { parseMode: "plain" });
|
|
21855
|
+
}
|
|
21856
|
+
}
|
|
21857
|
+
async function sendHealthCheck(chatId, channel) {
|
|
21858
|
+
await channel.sendText(chatId, "\u{1F49A} Pinging servers...", { parseMode: "plain" });
|
|
21859
|
+
try {
|
|
21860
|
+
const { OllamaService } = await Promise.resolve().then(() => (init_ollama(), ollama_exports));
|
|
21861
|
+
const results = await OllamaService.healthCheck();
|
|
21862
|
+
if (results.length === 0) {
|
|
21863
|
+
await channel.sendText(chatId, "No servers configured.", { parseMode: "plain" });
|
|
21864
|
+
return;
|
|
21865
|
+
}
|
|
21866
|
+
const lines = ["Health Check Results:", ""];
|
|
21867
|
+
for (const s of results) {
|
|
21868
|
+
const dot = s.status === "online" ? "\u{1F7E2}" : "\u{1F534}";
|
|
21869
|
+
lines.push(`${dot} ${s.name} (${s.host}:${s.port}) \u2014 ${s.status}`);
|
|
21870
|
+
}
|
|
21871
|
+
const online = results.filter((s) => s.status === "online").length;
|
|
21872
|
+
lines.push("", `${online}/${results.length} online`);
|
|
21873
|
+
await channel.sendText(chatId, lines.join("\n"), { parseMode: "plain" });
|
|
21874
|
+
} catch (err) {
|
|
21875
|
+
await channel.sendText(chatId, `Health check failed: ${err instanceof Error ? err.message : String(err)}`, { parseMode: "plain" });
|
|
21876
|
+
}
|
|
21877
|
+
}
|
|
21878
|
+
async function sendRemoveConfirm(chatId, channel, name) {
|
|
21879
|
+
const { OllamaStore } = await Promise.resolve().then(() => (init_ollama(), ollama_exports));
|
|
21880
|
+
const server = OllamaStore.getServer(name);
|
|
21881
|
+
if (!server) {
|
|
21882
|
+
await channel.sendText(chatId, `Server "${name}" not found.`, { parseMode: "plain" });
|
|
21883
|
+
return;
|
|
21884
|
+
}
|
|
21885
|
+
const modelCount = OllamaStore.listModels(server.id).length;
|
|
21886
|
+
const text = [
|
|
21887
|
+
`Remove server <b>${name}</b>?`,
|
|
21888
|
+
"",
|
|
21889
|
+
`Host: ${server.host}:${server.port}`,
|
|
21890
|
+
`Status: ${server.status}`,
|
|
21891
|
+
`Models: ${modelCount}`,
|
|
21892
|
+
"",
|
|
21893
|
+
"This will also remove all cached model data."
|
|
21894
|
+
].join("\n");
|
|
21895
|
+
if (typeof channel.sendKeyboard === "function") {
|
|
21896
|
+
await channel.sendKeyboard(chatId, text, [
|
|
21897
|
+
[
|
|
21898
|
+
{ label: "\u2705 Confirm", data: `ollama:remove:${name}`, style: "danger" },
|
|
21899
|
+
{ label: "\u274C Cancel", data: "ollama:dashboard" }
|
|
21900
|
+
]
|
|
21901
|
+
]);
|
|
21902
|
+
} else {
|
|
21903
|
+
await channel.sendText(chatId, text + "\n\nUse /ollama remove <name> to confirm.", { parseMode: "html" });
|
|
21904
|
+
}
|
|
21905
|
+
}
|
|
21906
|
+
async function handleOllamaCallback(chatId, data, channel) {
|
|
21907
|
+
const parts = data.split(":");
|
|
21908
|
+
const action = parts[1];
|
|
21909
|
+
switch (action) {
|
|
21910
|
+
case "dashboard":
|
|
21911
|
+
return sendOllamaDashboard(chatId, channel);
|
|
21912
|
+
case "models":
|
|
21913
|
+
return sendModelList(chatId, channel);
|
|
21914
|
+
case "discover":
|
|
21915
|
+
return sendDiscover(chatId, channel);
|
|
21916
|
+
case "health":
|
|
21917
|
+
return sendHealthCheck(chatId, channel);
|
|
21918
|
+
case "remove_confirm": {
|
|
21919
|
+
const name = parts.slice(2).join(":");
|
|
21920
|
+
return sendRemoveConfirm(chatId, channel, name);
|
|
21921
|
+
}
|
|
21922
|
+
case "remove": {
|
|
21923
|
+
const name = parts.slice(2).join(":");
|
|
21924
|
+
const { OllamaStore } = await Promise.resolve().then(() => (init_ollama(), ollama_exports));
|
|
21925
|
+
const removed = OllamaStore.removeServer(name);
|
|
21926
|
+
if (removed) {
|
|
21927
|
+
await channel.sendText(chatId, `\u2705 Removed server "${name}"`, { parseMode: "plain" });
|
|
21928
|
+
} else {
|
|
21929
|
+
await channel.sendText(chatId, `Server "${name}" not found.`, { parseMode: "plain" });
|
|
21930
|
+
}
|
|
21931
|
+
return sendOllamaDashboard(chatId, channel);
|
|
21932
|
+
}
|
|
21933
|
+
case "add_prompt":
|
|
21934
|
+
await channel.sendText(chatId, [
|
|
21935
|
+
"To add a server, send:",
|
|
21936
|
+
"",
|
|
21937
|
+
" /ollama add <name> <host> [port]",
|
|
21938
|
+
"",
|
|
21939
|
+
"Examples:",
|
|
21940
|
+
" /ollama add local 192.168.1.100",
|
|
21941
|
+
" /ollama add mac-studio 10.0.0.5 11434",
|
|
21942
|
+
" /ollama add cloud api.ollama.example.com 443"
|
|
21943
|
+
].join("\n"), { parseMode: "plain" });
|
|
21944
|
+
return;
|
|
21945
|
+
case "help":
|
|
21946
|
+
await channel.sendText(chatId, [
|
|
21947
|
+
"\u{1F999} Ollama Commands:",
|
|
21948
|
+
"",
|
|
21949
|
+
"/ollama \u2014 Server dashboard",
|
|
21950
|
+
"/ollama add <name> <host> [port] \u2014 Add server",
|
|
21951
|
+
"/ollama remove <name> \u2014 Remove server",
|
|
21952
|
+
"/ollama models [server] \u2014 List models",
|
|
21953
|
+
"/ollama discover [server] \u2014 Discover models",
|
|
21954
|
+
"/ollama health \u2014 Ping all servers"
|
|
21955
|
+
].join("\n"), { parseMode: "plain" });
|
|
21956
|
+
return;
|
|
21957
|
+
default:
|
|
21958
|
+
await channel.sendText(chatId, "Unknown action.", { parseMode: "plain" });
|
|
21959
|
+
}
|
|
21960
|
+
}
|
|
21961
|
+
var init_ollama3 = __esm({
|
|
21962
|
+
"src/router/ollama.ts"() {
|
|
21963
|
+
"use strict";
|
|
21964
|
+
}
|
|
21965
|
+
});
|
|
21966
|
+
|
|
20396
21967
|
// src/router/commands.ts
|
|
20397
21968
|
async function handleCommand(msg, channel) {
|
|
20398
21969
|
const { chatId, command, commandArgs } = msg;
|
|
@@ -20567,9 +22138,33 @@ async function handleCommand(msg, channel) {
|
|
|
20567
22138
|
case "evolve":
|
|
20568
22139
|
await handleEvolveCommandWrapper(chatId, commandArgs, msg, channel);
|
|
20569
22140
|
break;
|
|
22141
|
+
case "reflect": {
|
|
22142
|
+
const { runAnalysis: runAnalysis2 } = await Promise.resolve().then(() => (init_analyze(), analyze_exports));
|
|
22143
|
+
const { formatNightlySummary: formatNightlySummary2 } = await Promise.resolve().then(() => (init_propose(), propose_exports));
|
|
22144
|
+
const { getPrimaryChatId: getPrimaryChatId2 } = await Promise.resolve().then(() => (init_resolve(), resolve_exports));
|
|
22145
|
+
const reflectionChatId = getPrimaryChatId2() || chatId;
|
|
22146
|
+
await channel.sendText(chatId, "\u{1F50D} Running reflection analysis\u2026", { parseMode: "plain" });
|
|
22147
|
+
try {
|
|
22148
|
+
const insights = await runAnalysis2(reflectionChatId, { force: true });
|
|
22149
|
+
if (insights.length > 0) {
|
|
22150
|
+
const nightlyItems = insights.map((ins, i) => ({ id: i + 1, ...ins }));
|
|
22151
|
+
await channel.sendText(chatId, formatNightlySummary2(nightlyItems), { parseMode: "plain" });
|
|
22152
|
+
} else {
|
|
22153
|
+
await channel.sendText(chatId, "No new insights from reflection analysis.", { parseMode: "plain" });
|
|
22154
|
+
}
|
|
22155
|
+
} catch (e) {
|
|
22156
|
+
await channel.sendText(chatId, `Analysis failed: ${e}`, { parseMode: "plain" });
|
|
22157
|
+
}
|
|
22158
|
+
break;
|
|
22159
|
+
}
|
|
20570
22160
|
case "optimize":
|
|
20571
22161
|
await handleOptimizeCommandWrapper(chatId, commandArgs, msg, channel);
|
|
20572
22162
|
break;
|
|
22163
|
+
case "ollama": {
|
|
22164
|
+
const { handleOllamaCommand: handleOllamaCommand2 } = await Promise.resolve().then(() => (init_ollama3(), ollama_exports2));
|
|
22165
|
+
await handleOllamaCommand2(chatId, commandArgs ?? "", channel);
|
|
22166
|
+
break;
|
|
22167
|
+
}
|
|
20573
22168
|
default:
|
|
20574
22169
|
await channel.sendText(chatId, `Unknown command: /${command}. Type /help for available commands.`, { parseMode: "plain" });
|
|
20575
22170
|
}
|
|
@@ -20741,7 +22336,7 @@ ${PERM_MODES[chosen]}`,
|
|
|
20741
22336
|
const pending = getPendingEscalation(chatId);
|
|
20742
22337
|
if (pending) {
|
|
20743
22338
|
removePendingEscalation(chatId);
|
|
20744
|
-
const { handleMessage: handleMessage2 } = await Promise.resolve().then(() => (
|
|
22339
|
+
const { handleMessage: handleMessage2 } = await Promise.resolve().then(() => (init_router2(), router_exports));
|
|
20745
22340
|
await handleMessage2({ text: pending, chatId, source: "telegram", type: "text" }, channel);
|
|
20746
22341
|
}
|
|
20747
22342
|
} else if (data === "perm:deny") {
|
|
@@ -20800,7 +22395,7 @@ ${PERM_MODES[chosen]}`,
|
|
|
20800
22395
|
removePendingPlan(chatId);
|
|
20801
22396
|
await replaceWithText("\u2705 Approved. Executing...");
|
|
20802
22397
|
bypassBusyCheck.add(chatId);
|
|
20803
|
-
const { handleMessage: handleMessage2 } = await Promise.resolve().then(() => (
|
|
22398
|
+
const { handleMessage: handleMessage2 } = await Promise.resolve().then(() => (init_router2(), router_exports));
|
|
20804
22399
|
const overrideMsg = `[SYSTEM: Planning mode disabled. Execution APPROVED. Please execute the plan.]
|
|
20805
22400
|
${plan.originalMessage}`;
|
|
20806
22401
|
await handleMessage2(
|
|
@@ -20828,7 +22423,7 @@ ${plan.originalMessage}`;
|
|
|
20828
22423
|
setExecMode(chatId, "yolo");
|
|
20829
22424
|
await channel.sendText(chatId, "\u26A1 Switched to YOLO mode. Executing without approval gate...", { parseMode: "plain" });
|
|
20830
22425
|
bypassBusyCheck.add(chatId);
|
|
20831
|
-
const { handleMessage: handleMessage2 } = await Promise.resolve().then(() => (
|
|
22426
|
+
const { handleMessage: handleMessage2 } = await Promise.resolve().then(() => (init_router2(), router_exports));
|
|
20832
22427
|
const overrideMsg = `[SYSTEM: Planning mode disabled. YOLO mode enabled. Proceed immediately without asking for approval.]
|
|
20833
22428
|
${plan.originalMessage}`;
|
|
20834
22429
|
await handleMessage2(
|
|
@@ -21142,7 +22737,7 @@ ${rotationNote}`, { parseMode: "html" });
|
|
|
21142
22737
|
const pending = pendingFallbackMessages2.get(chatId);
|
|
21143
22738
|
if (pending) {
|
|
21144
22739
|
pendingFallbackMessages2.delete(chatId);
|
|
21145
|
-
const { handleMessage: handleMessage2 } = await Promise.resolve().then(() => (
|
|
22740
|
+
const { handleMessage: handleMessage2 } = await Promise.resolve().then(() => (init_router2(), router_exports));
|
|
21146
22741
|
await handleMessage2(pending.msg, pending.channel);
|
|
21147
22742
|
}
|
|
21148
22743
|
} else if (action === "deny") {
|
|
@@ -21216,14 +22811,14 @@ ${rotationNote}`, { parseMode: "html" });
|
|
|
21216
22811
|
stopAgent(targetChatId);
|
|
21217
22812
|
await channel.sendText(chatId, "\u26A1 Stopping current task and processing your message\u2026", { parseMode: "plain" });
|
|
21218
22813
|
bypassBusyCheck.add(targetChatId);
|
|
21219
|
-
const { handleMessage: handleMessage2 } = await Promise.resolve().then(() => (
|
|
22814
|
+
const { handleMessage: handleMessage2 } = await Promise.resolve().then(() => (init_router2(), router_exports));
|
|
21220
22815
|
handleMessage2(pending.msg, pending.channel).catch(() => {
|
|
21221
22816
|
});
|
|
21222
22817
|
} else if (action === "queue" && pending) {
|
|
21223
22818
|
pendingInterrupts.delete(targetChatId);
|
|
21224
22819
|
bypassBusyCheck.add(targetChatId);
|
|
21225
22820
|
await channel.sendText(chatId, "\u{1F4E5} Message queued \u2014 will process after current task.", { parseMode: "plain" });
|
|
21226
|
-
const { handleMessage: handleMessage2 } = await Promise.resolve().then(() => (
|
|
22821
|
+
const { handleMessage: handleMessage2 } = await Promise.resolve().then(() => (init_router2(), router_exports));
|
|
21227
22822
|
handleMessage2(pending.msg, pending.channel).catch(() => {
|
|
21228
22823
|
});
|
|
21229
22824
|
} else if (action === "sidequest") {
|
|
@@ -21267,7 +22862,7 @@ ${rotationNote}`, { parseMode: "html" });
|
|
|
21267
22862
|
setBackend(targetChatId, targetBackend);
|
|
21268
22863
|
const adapter = getAdapter(targetBackend);
|
|
21269
22864
|
await channel.sendText(chatId, `Switched to ${adapter.displayName}. Resending your message\u2026`, { parseMode: "plain" });
|
|
21270
|
-
const { handleMessage: handleMessage2 } = await Promise.resolve().then(() => (
|
|
22865
|
+
const { handleMessage: handleMessage2 } = await Promise.resolve().then(() => (init_router2(), router_exports));
|
|
21271
22866
|
await handleMessage2(pendingMsg.msg, pendingMsg.channel);
|
|
21272
22867
|
} else {
|
|
21273
22868
|
await channel.sendText(chatId, "Fallback expired. Use /backend to switch manually.", { parseMode: "plain" });
|
|
@@ -21281,6 +22876,10 @@ ${rotationNote}`, { parseMode: "html" });
|
|
|
21281
22876
|
} else if (data.startsWith("opt:")) {
|
|
21282
22877
|
await handleOptimizeCallback(chatId, data, channel);
|
|
21283
22878
|
return;
|
|
22879
|
+
} else if (data.startsWith("ollama:")) {
|
|
22880
|
+
const { handleOllamaCallback: handleOllamaCallback2 } = await Promise.resolve().then(() => (init_ollama3(), ollama_exports2));
|
|
22881
|
+
await handleOllamaCallback2(chatId, data, channel);
|
|
22882
|
+
return;
|
|
21284
22883
|
} else if (data.startsWith("summ:")) {
|
|
21285
22884
|
const action = data.slice(5);
|
|
21286
22885
|
if (action === "all") {
|
|
@@ -22175,7 +23774,7 @@ Approve paid usage for this session?`,
|
|
|
22175
23774
|
}
|
|
22176
23775
|
}
|
|
22177
23776
|
}
|
|
22178
|
-
var
|
|
23777
|
+
var init_router2 = __esm({
|
|
22179
23778
|
"src/router.ts"() {
|
|
22180
23779
|
"use strict";
|
|
22181
23780
|
init_profile();
|
|
@@ -22499,7 +24098,7 @@ async function runWithRetry(job, model2, runId, t0) {
|
|
|
22499
24098
|
const vLevel = getVerboseLevel3(chatId);
|
|
22500
24099
|
let onToolAction;
|
|
22501
24100
|
if (vLevel !== "off") {
|
|
22502
|
-
const { makeToolActionCallback: makeToolActionCallback2 } = await Promise.resolve().then(() => (
|
|
24101
|
+
const { makeToolActionCallback: makeToolActionCallback2 } = await Promise.resolve().then(() => (init_router2(), router_exports));
|
|
22503
24102
|
const channelName = job.channel ?? "telegram";
|
|
22504
24103
|
const { getChannelRegistry: getChannelRegistry2 } = await Promise.resolve().then(() => (init_delivery(), delivery_exports));
|
|
22505
24104
|
const channel = getChannelRegistry2()?.get(channelName);
|
|
@@ -23178,7 +24777,7 @@ var init_telegram = __esm({
|
|
|
23178
24777
|
return `<s>${text}</s>`;
|
|
23179
24778
|
},
|
|
23180
24779
|
codespan(code) {
|
|
23181
|
-
return `<code>${
|
|
24780
|
+
return `<code>${code}</code>`;
|
|
23182
24781
|
},
|
|
23183
24782
|
code(code, lang) {
|
|
23184
24783
|
const langAttr = lang ? ` class="language-${escapeHtml2(lang)}"` : "";
|
|
@@ -23229,13 +24828,37 @@ ${body.replace(/<[^>]*>/g, "").trim()}</code>
|
|
|
23229
24828
|
|
|
23230
24829
|
// src/channels/telegram.ts
|
|
23231
24830
|
import { API_CONSTANTS, Bot, GrammyError, InlineKeyboard, InputFile } from "grammy";
|
|
24831
|
+
function tripCircuitBreaker(retrySec) {
|
|
24832
|
+
const until = Date.now() + retrySec * 1e3;
|
|
24833
|
+
if (until > circuitBreakerUntil) {
|
|
24834
|
+
circuitBreakerUntil = until;
|
|
24835
|
+
warn(`[telegram] Circuit breaker tripped \u2014 pausing ALL Telegram API calls for ${retrySec}s`);
|
|
24836
|
+
}
|
|
24837
|
+
}
|
|
24838
|
+
async function waitForCircuitBreaker() {
|
|
24839
|
+
const remaining = circuitBreakerUntil - Date.now();
|
|
24840
|
+
if (remaining <= 0) return false;
|
|
24841
|
+
if (remaining > GIVE_UP_THRESHOLD_SEC * 1e3) return true;
|
|
24842
|
+
await new Promise((r) => setTimeout(r, remaining));
|
|
24843
|
+
return false;
|
|
24844
|
+
}
|
|
23232
24845
|
async function withRetry(label2, fn) {
|
|
23233
24846
|
for (let attempt = 0; attempt <= MAX_RETRIES2; attempt++) {
|
|
24847
|
+
const tooLong = await waitForCircuitBreaker();
|
|
24848
|
+
if (tooLong) {
|
|
24849
|
+
warn(`[telegram] 429 on ${label2} \u2014 circuit breaker cooldown too long, giving up`);
|
|
24850
|
+
throw new GrammyError(`Circuit breaker active \u2014 skipping ${label2}`, { ok: false, error_code: 429, description: "Rate limited (circuit breaker)" }, label2, {});
|
|
24851
|
+
}
|
|
23234
24852
|
try {
|
|
23235
24853
|
return await fn();
|
|
23236
24854
|
} catch (err) {
|
|
23237
24855
|
if (err instanceof GrammyError && err.error_code === 429) {
|
|
23238
|
-
const retrySec =
|
|
24856
|
+
const retrySec = err.parameters?.retry_after ?? FALLBACK_RETRY_SEC;
|
|
24857
|
+
tripCircuitBreaker(retrySec);
|
|
24858
|
+
if (retrySec > GIVE_UP_THRESHOLD_SEC) {
|
|
24859
|
+
warn(`[telegram] 429 on ${label2} \u2014 retry_after ${retrySec}s exceeds threshold, giving up`);
|
|
24860
|
+
throw err;
|
|
24861
|
+
}
|
|
23239
24862
|
if (attempt < MAX_RETRIES2) {
|
|
23240
24863
|
warn(`[telegram] 429 on ${label2} (attempt ${attempt + 1}/${MAX_RETRIES2}) \u2014 retrying in ${retrySec}s`);
|
|
23241
24864
|
await new Promise((r) => setTimeout(r, retrySec * 1e3));
|
|
@@ -23267,7 +24890,7 @@ function numericChatId(chatId) {
|
|
|
23267
24890
|
const raw = chatId.includes(":") ? chatId.split(":").pop() : chatId;
|
|
23268
24891
|
return parseInt(raw);
|
|
23269
24892
|
}
|
|
23270
|
-
var MAX_RETRIES2, FALLBACK_RETRY_SEC,
|
|
24893
|
+
var MAX_RETRIES2, FALLBACK_RETRY_SEC, GIVE_UP_THRESHOLD_SEC, circuitBreakerUntil, FAST_PATH_COMMANDS, TelegramChannel;
|
|
23271
24894
|
var init_telegram2 = __esm({
|
|
23272
24895
|
"src/channels/telegram.ts"() {
|
|
23273
24896
|
"use strict";
|
|
@@ -23276,7 +24899,8 @@ var init_telegram2 = __esm({
|
|
|
23276
24899
|
init_store5();
|
|
23277
24900
|
MAX_RETRIES2 = 3;
|
|
23278
24901
|
FALLBACK_RETRY_SEC = 3;
|
|
23279
|
-
|
|
24902
|
+
GIVE_UP_THRESHOLD_SEC = 60;
|
|
24903
|
+
circuitBreakerUntil = 0;
|
|
23280
24904
|
FAST_PATH_COMMANDS = /* @__PURE__ */ new Set(["stop", "status", "new", "newchat"]);
|
|
23281
24905
|
TelegramChannel = class {
|
|
23282
24906
|
name = "telegram";
|
|
@@ -23347,7 +24971,6 @@ var init_telegram2 = __esm({
|
|
|
23347
24971
|
{ command: "summarizer", description: "Configure session summarization model" },
|
|
23348
24972
|
// Permissions & tools
|
|
23349
24973
|
{ command: "permissions", description: "Permission mode (yolo/safe/readonly/plan)" },
|
|
23350
|
-
{ command: "mode", description: "Execution gate (approved/yolo)" },
|
|
23351
24974
|
{ command: "tools", description: "Configure which tools the agent can use" },
|
|
23352
24975
|
{ command: "verbose", description: "Tool visibility (off/normal/verbose)" },
|
|
23353
24976
|
{ command: "debug", description: "Toggle session debug logging (full tool I/O)" },
|
|
@@ -23392,7 +25015,9 @@ var init_telegram2 = __esm({
|
|
|
23392
25015
|
{ command: "intent", description: "Test intent classifier on a message" },
|
|
23393
25016
|
{ command: "evolve", description: "Self-learning & evolution controls" },
|
|
23394
25017
|
{ command: "reflect", description: "Trigger reflection analysis" },
|
|
23395
|
-
{ command: "optimize", description: "Audit identity files and skills" }
|
|
25018
|
+
{ command: "optimize", description: "Audit identity files and skills" },
|
|
25019
|
+
// Ollama
|
|
25020
|
+
{ command: "ollama", description: "Manage Ollama local LLM servers" }
|
|
23396
25021
|
]);
|
|
23397
25022
|
this.bot.on("message", async (ctx) => {
|
|
23398
25023
|
const chatId = ctx.chat.id.toString();
|
|
@@ -23613,10 +25238,11 @@ var init_telegram2 = __esm({
|
|
|
23613
25238
|
}
|
|
23614
25239
|
const MAX_KEYBOARD_TEXT = 4e3;
|
|
23615
25240
|
const safeText = text.length > MAX_KEYBOARD_TEXT ? text.slice(0, MAX_KEYBOARD_TEXT) + "\n\n\u2026(truncated)" : text;
|
|
25241
|
+
const formatted = formatForTelegram(safeText);
|
|
23616
25242
|
try {
|
|
23617
25243
|
const msg = await withRetry(
|
|
23618
25244
|
"sendKeyboard",
|
|
23619
|
-
() => this.bot.api.sendMessage(numericChatId(chatId),
|
|
25245
|
+
() => this.bot.api.sendMessage(numericChatId(chatId), formatted, {
|
|
23620
25246
|
parse_mode: "HTML",
|
|
23621
25247
|
reply_markup: keyboard
|
|
23622
25248
|
})
|
|
@@ -23624,6 +25250,28 @@ var init_telegram2 = __esm({
|
|
|
23624
25250
|
return msg.message_id.toString();
|
|
23625
25251
|
} catch (err) {
|
|
23626
25252
|
error(`[telegram] sendKeyboard failed (chat=${chatId}, textLen=${text.length}):`, err);
|
|
25253
|
+
try {
|
|
25254
|
+
const escaped = safeText.replace(/&/g, "&").replace(/</g, "<").replace(/>/g, ">");
|
|
25255
|
+
const retryMsg = await withRetry(
|
|
25256
|
+
"sendKeyboard:plain",
|
|
25257
|
+
() => this.bot.api.sendMessage(numericChatId(chatId), escaped, {
|
|
25258
|
+
reply_markup: keyboard
|
|
25259
|
+
})
|
|
25260
|
+
);
|
|
25261
|
+
return retryMsg.message_id.toString();
|
|
25262
|
+
} catch (plainErr) {
|
|
25263
|
+
error(`[telegram] sendKeyboard plain retry also failed:`, plainErr);
|
|
25264
|
+
}
|
|
25265
|
+
try {
|
|
25266
|
+
const plainText = safeText.replace(/<[^>]+>/g, "");
|
|
25267
|
+
if (plainText.trim()) {
|
|
25268
|
+
await withRetry(
|
|
25269
|
+
"sendKeyboard:text-rescue",
|
|
25270
|
+
() => this.bot.api.sendMessage(numericChatId(chatId), plainText, {})
|
|
25271
|
+
);
|
|
25272
|
+
}
|
|
25273
|
+
} catch {
|
|
25274
|
+
}
|
|
23627
25275
|
try {
|
|
23628
25276
|
const fallbackMsg = await withRetry(
|
|
23629
25277
|
"sendKeyboard:fallback",
|
|
@@ -24667,13 +26315,41 @@ async function main() {
|
|
|
24667
26315
|
}
|
|
24668
26316
|
try {
|
|
24669
26317
|
const { getAdapter: getAdapter4, probeBackendAvailability: probeBackendAvailability2 } = await Promise.resolve().then(() => (init_backends(), backends_exports));
|
|
24670
|
-
probeBackendAvailability2();
|
|
26318
|
+
await probeBackendAvailability2();
|
|
24671
26319
|
const claude = getAdapter4("claude");
|
|
24672
26320
|
if ("getAuthMode" in claude) claude.getAuthMode();
|
|
24673
26321
|
const cursor = getAdapter4("cursor");
|
|
24674
26322
|
if ("probeTier" in cursor) cursor.probeTier();
|
|
24675
26323
|
} catch {
|
|
24676
26324
|
}
|
|
26325
|
+
(async () => {
|
|
26326
|
+
try {
|
|
26327
|
+
const { OllamaService, OllamaStore } = await Promise.resolve().then(() => (init_ollama(), ollama_exports));
|
|
26328
|
+
const servers = OllamaStore.listServers();
|
|
26329
|
+
if (servers.length === 0) return;
|
|
26330
|
+
const checkedServers = await OllamaService.healthCheck();
|
|
26331
|
+
const onlineCount = checkedServers.filter((s) => s.status === "online").length;
|
|
26332
|
+
let modelCount = 0;
|
|
26333
|
+
for (const server of checkedServers) {
|
|
26334
|
+
if (server.status === "online") {
|
|
26335
|
+
const models = await OllamaService.discoverModels(server.name);
|
|
26336
|
+
modelCount += models.length;
|
|
26337
|
+
}
|
|
26338
|
+
}
|
|
26339
|
+
if (modelCount > 0) {
|
|
26340
|
+
try {
|
|
26341
|
+
const { getAdapter: getAdapter4 } = await Promise.resolve().then(() => (init_backends(), backends_exports));
|
|
26342
|
+
const adapter = getAdapter4("ollama");
|
|
26343
|
+
if ("refreshModelCatalog" in adapter) {
|
|
26344
|
+
adapter.refreshModelCatalog();
|
|
26345
|
+
}
|
|
26346
|
+
} catch {
|
|
26347
|
+
}
|
|
26348
|
+
}
|
|
26349
|
+
log(`[cc-claw] Ollama: ${onlineCount}/${servers.length} server(s) online, ${modelCount} model(s) discovered`);
|
|
26350
|
+
} catch {
|
|
26351
|
+
}
|
|
26352
|
+
})();
|
|
24677
26353
|
let pendingSummarizeNotify = null;
|
|
24678
26354
|
const { getLoggedChatIds: getLoggedChatIds2 } = await Promise.resolve().then(() => (init_session_log(), session_log_exports));
|
|
24679
26355
|
const pendingCount = getLoggedChatIds2().length;
|
|
@@ -24730,6 +26406,8 @@ async function main() {
|
|
|
24730
26406
|
startPersistedJobs();
|
|
24731
26407
|
initRunnerRegistry();
|
|
24732
26408
|
initOrchestrator();
|
|
26409
|
+
const { startStaleChatSweep: startStaleChatSweep2 } = await Promise.resolve().then(() => (init_agent(), agent_exports));
|
|
26410
|
+
startStaleChatSweep2();
|
|
24733
26411
|
let notifyQueue = Promise.resolve();
|
|
24734
26412
|
setNotifyCallback((chatId, message) => {
|
|
24735
26413
|
notifyQueue = notifyQueue.then(async () => {
|
|
@@ -24808,8 +26486,9 @@ async function main() {
|
|
|
24808
26486
|
const shutdown = async (signal) => {
|
|
24809
26487
|
log(`[cc-claw] Received ${signal}, shutting down...`);
|
|
24810
26488
|
try {
|
|
24811
|
-
const { stopAllActiveAgents: stopAllActiveAgents2 } = await Promise.resolve().then(() => (init_agent(), agent_exports));
|
|
26489
|
+
const { stopAllActiveAgents: stopAllActiveAgents2, stopStaleChatSweep: stopStaleChatSweep2 } = await Promise.resolve().then(() => (init_agent(), agent_exports));
|
|
24812
26490
|
stopAllActiveAgents2();
|
|
26491
|
+
stopStaleChatSweep2();
|
|
24813
26492
|
stopAllHeartbeats();
|
|
24814
26493
|
stopHealthMonitor3();
|
|
24815
26494
|
stopMonitor();
|
|
@@ -24839,6 +26518,13 @@ async function main() {
|
|
|
24839
26518
|
};
|
|
24840
26519
|
process.on("SIGTERM", () => shutdown("SIGTERM"));
|
|
24841
26520
|
process.on("SIGINT", () => shutdown("SIGINT"));
|
|
26521
|
+
process.on("unhandledRejection", (reason, promise) => {
|
|
26522
|
+
error("[cc-claw] Unhandled promise rejection:", reason);
|
|
26523
|
+
});
|
|
26524
|
+
process.on("uncaughtException", (err, origin) => {
|
|
26525
|
+
error(`[cc-claw] Uncaught exception (${origin}):`, err);
|
|
26526
|
+
setTimeout(() => process.exit(1), 500);
|
|
26527
|
+
});
|
|
24842
26528
|
}
|
|
24843
26529
|
var LOG_MAX_BYTES;
|
|
24844
26530
|
var init_index = __esm({
|
|
@@ -24854,7 +26540,7 @@ var init_index = __esm({
|
|
|
24854
26540
|
init_server();
|
|
24855
26541
|
init_registry3();
|
|
24856
26542
|
init_telegram2();
|
|
24857
|
-
|
|
26543
|
+
init_router2();
|
|
24858
26544
|
init_summarize();
|
|
24859
26545
|
init_detect();
|
|
24860
26546
|
init_bootstrap();
|
|
@@ -24987,8 +26673,8 @@ var init_api_client = __esm({
|
|
|
24987
26673
|
});
|
|
24988
26674
|
|
|
24989
26675
|
// src/service.ts
|
|
24990
|
-
var
|
|
24991
|
-
__export(
|
|
26676
|
+
var service_exports2 = {};
|
|
26677
|
+
__export(service_exports2, {
|
|
24992
26678
|
installService: () => installService,
|
|
24993
26679
|
serviceStatus: () => serviceStatus,
|
|
24994
26680
|
uninstallService: () => uninstallService
|
|
@@ -25228,7 +26914,7 @@ function serviceStatus() {
|
|
|
25228
26914
|
else console.error(` Unsupported platform: ${os2}.`);
|
|
25229
26915
|
}
|
|
25230
26916
|
var PLIST_LABEL, PLIST_PATH, SYSTEMD_DIR, UNIT_PATH;
|
|
25231
|
-
var
|
|
26917
|
+
var init_service2 = __esm({
|
|
25232
26918
|
"src/service.ts"() {
|
|
25233
26919
|
"use strict";
|
|
25234
26920
|
init_paths();
|
|
@@ -25403,7 +27089,7 @@ var init_daemon = __esm({
|
|
|
25403
27089
|
|
|
25404
27090
|
// src/cli/resolve-chat.ts
|
|
25405
27091
|
import { readFileSync as readFileSync19 } from "fs";
|
|
25406
|
-
function
|
|
27092
|
+
function resolveChatId2(globalOpts) {
|
|
25407
27093
|
const explicit = globalOpts.chat;
|
|
25408
27094
|
if (explicit) return explicit;
|
|
25409
27095
|
if (_cachedDefault) return _cachedDefault;
|
|
@@ -25437,7 +27123,7 @@ async function statusCommand(globalOpts, localOpts) {
|
|
|
25437
27123
|
try {
|
|
25438
27124
|
const { openDatabaseReadOnly: openDatabaseReadOnly2 } = await Promise.resolve().then(() => (init_store5(), store_exports5));
|
|
25439
27125
|
const readDb = openDatabaseReadOnly2();
|
|
25440
|
-
const chatId =
|
|
27126
|
+
const chatId = resolveChatId2(globalOpts);
|
|
25441
27127
|
const { getAdapterForChat: getAdapterForChat2, getAdapter: getAdapter4, getAllAdapters: getAllAdapters5 } = await Promise.resolve().then(() => (init_backends(), backends_exports));
|
|
25442
27128
|
let backend2 = null;
|
|
25443
27129
|
let modelName = "not set";
|
|
@@ -25505,6 +27191,17 @@ async function statusCommand(globalOpts, localOpts) {
|
|
|
25505
27191
|
},
|
|
25506
27192
|
db: { path: DB_PATH, sizeBytes: dbStat?.size ?? 0, exists: !!dbStat }
|
|
25507
27193
|
};
|
|
27194
|
+
try {
|
|
27195
|
+
const { OllamaStore } = await Promise.resolve().then(() => (init_ollama(), ollama_exports));
|
|
27196
|
+
const servers = OllamaStore.listServers();
|
|
27197
|
+
if (servers.length > 0) {
|
|
27198
|
+
const online = servers.filter((s) => s.status === "online").length;
|
|
27199
|
+
const models = OllamaStore.getAvailableModels();
|
|
27200
|
+
const topModel = models.length > 0 ? models.sort((a, b) => (b.qualityScore ?? 0) - (a.qualityScore ?? 0))[0].name : void 0;
|
|
27201
|
+
data.ollama = { servers: servers.length, online, models: models.length, topModel };
|
|
27202
|
+
}
|
|
27203
|
+
} catch {
|
|
27204
|
+
}
|
|
25508
27205
|
readDb.close();
|
|
25509
27206
|
output(data, (d) => {
|
|
25510
27207
|
const s = d;
|
|
@@ -25530,9 +27227,20 @@ async function statusCommand(globalOpts, localOpts) {
|
|
|
25530
27227
|
kvLine("Requests", String(s.usage.requests)),
|
|
25531
27228
|
kvLine("In tokens", s.usage.input_tokens.toLocaleString()),
|
|
25532
27229
|
kvLine("Out tokens", s.usage.output_tokens.toLocaleString()),
|
|
25533
|
-
kvLine("Cache read", s.usage.cache_read_tokens.toLocaleString())
|
|
25534
|
-
""
|
|
27230
|
+
kvLine("Cache read", s.usage.cache_read_tokens.toLocaleString())
|
|
25535
27231
|
);
|
|
27232
|
+
if (s.ollama) {
|
|
27233
|
+
lines.push(
|
|
27234
|
+
"",
|
|
27235
|
+
divider("Ollama (Local)"),
|
|
27236
|
+
kvLine("Servers", `${s.ollama.online}/${s.ollama.servers} online`),
|
|
27237
|
+
kvLine("Models", String(s.ollama.models))
|
|
27238
|
+
);
|
|
27239
|
+
if (s.ollama.topModel) {
|
|
27240
|
+
lines.push(kvLine("Top model", s.ollama.topModel));
|
|
27241
|
+
}
|
|
27242
|
+
}
|
|
27243
|
+
lines.push("");
|
|
25536
27244
|
return lines.join("\n");
|
|
25537
27245
|
});
|
|
25538
27246
|
} catch (err) {
|
|
@@ -25679,15 +27387,42 @@ async function doctorCommand(globalOpts, localOpts) {
|
|
|
25679
27387
|
try {
|
|
25680
27388
|
const { readFileSync: readFileSync27 } = await import("fs");
|
|
25681
27389
|
const logContent = readFileSync27(ERROR_LOG_PATH, "utf-8");
|
|
25682
|
-
const recentLines = logContent.split("\n").filter(Boolean).slice(-
|
|
27390
|
+
const recentLines = logContent.split("\n").filter(Boolean).slice(-500);
|
|
25683
27391
|
const last24h = Date.now() - 864e5;
|
|
25684
27392
|
const recentErrors = recentLines.filter((line) => {
|
|
25685
|
-
const match = line.match(/^\[(\d{4}-\d{2}-\d{2}
|
|
25686
|
-
if (match) return new Date(match[1]).getTime() > last24h;
|
|
27393
|
+
const match = line.match(/^\[(\d{4}-\d{2}-\d{2})\s+([\d:]+)/);
|
|
27394
|
+
if (match) return (/* @__PURE__ */ new Date(`${match[1]}T${match[2]}`)).getTime() > last24h;
|
|
25687
27395
|
return false;
|
|
25688
27396
|
});
|
|
25689
27397
|
if (recentErrors.length > 0) {
|
|
25690
|
-
|
|
27398
|
+
let rate429 = 0;
|
|
27399
|
+
let contentSilence = 0;
|
|
27400
|
+
let spawnTimeout = 0;
|
|
27401
|
+
let other = 0;
|
|
27402
|
+
for (const line of recentErrors) {
|
|
27403
|
+
if (/429|rate.?limit/i.test(line)) rate429++;
|
|
27404
|
+
else if (/content silence/i.test(line)) contentSilence++;
|
|
27405
|
+
else if (/spawn timeout|timeout after \d+s/i.test(line)) spawnTimeout++;
|
|
27406
|
+
else other++;
|
|
27407
|
+
}
|
|
27408
|
+
const logFix = "cc-claw doctor --fix to clear stale errors";
|
|
27409
|
+
if (rate429 > 10) {
|
|
27410
|
+
checks.push({ name: "Telegram rate limits", status: "error", message: `${rate429} rate-limit (429) errors in last 24h \u2014 message delivery blocked`, fix: logFix });
|
|
27411
|
+
} else if (rate429 > 0) {
|
|
27412
|
+
checks.push({ name: "Telegram rate limits", status: "warning", message: `${rate429} rate-limit (429) errors in last 24h`, fix: logFix });
|
|
27413
|
+
}
|
|
27414
|
+
if (contentSilence > 0) {
|
|
27415
|
+
checks.push({ name: "Content silence", status: "warning", message: `${contentSilence} agent silence timeout(s) in last 24h \u2014 API went unresponsive`, fix: logFix });
|
|
27416
|
+
}
|
|
27417
|
+
if (spawnTimeout > 0) {
|
|
27418
|
+
checks.push({ name: "Spawn timeouts", status: "warning", message: `${spawnTimeout} backend timeout(s) in last 24h`, fix: logFix });
|
|
27419
|
+
}
|
|
27420
|
+
if (other > 0) {
|
|
27421
|
+
checks.push({ name: "Other errors", status: "warning", message: `${other} other error(s) in last 24h`, fix: logFix });
|
|
27422
|
+
}
|
|
27423
|
+
if (rate429 === 0 && contentSilence === 0 && spawnTimeout === 0 && other === 0) {
|
|
27424
|
+
checks.push({ name: "Recent errors", status: "ok", message: "none in last 24h" });
|
|
27425
|
+
}
|
|
25691
27426
|
} else {
|
|
25692
27427
|
checks.push({ name: "Recent errors", status: "ok", message: "none in last 24h" });
|
|
25693
27428
|
}
|
|
@@ -25697,6 +27432,35 @@ async function doctorCommand(globalOpts, localOpts) {
|
|
|
25697
27432
|
} else {
|
|
25698
27433
|
checks.push({ name: "Error log", status: "ok", message: "no error log file" });
|
|
25699
27434
|
}
|
|
27435
|
+
try {
|
|
27436
|
+
const { OllamaStore } = await Promise.resolve().then(() => (init_ollama(), ollama_exports));
|
|
27437
|
+
const servers = OllamaStore.listServers();
|
|
27438
|
+
if (servers.length > 0) {
|
|
27439
|
+
const online = servers.filter((s) => s.status === "online");
|
|
27440
|
+
const models = OllamaStore.getAvailableModels();
|
|
27441
|
+
if (online.length === servers.length) {
|
|
27442
|
+
checks.push({
|
|
27443
|
+
name: "Ollama",
|
|
27444
|
+
status: "ok",
|
|
27445
|
+
message: `${online.length} server(s) online, ${models.length} model(s)`
|
|
27446
|
+
});
|
|
27447
|
+
} else if (online.length > 0) {
|
|
27448
|
+
checks.push({
|
|
27449
|
+
name: "Ollama",
|
|
27450
|
+
status: "warning",
|
|
27451
|
+
message: `${online.length}/${servers.length} server(s) online, ${models.length} model(s)`
|
|
27452
|
+
});
|
|
27453
|
+
} else {
|
|
27454
|
+
checks.push({
|
|
27455
|
+
name: "Ollama",
|
|
27456
|
+
status: "warning",
|
|
27457
|
+
message: `${servers.length} server(s) configured, all offline`,
|
|
27458
|
+
fix: "Check Ollama is running: ollama serve"
|
|
27459
|
+
});
|
|
27460
|
+
}
|
|
27461
|
+
}
|
|
27462
|
+
} catch {
|
|
27463
|
+
}
|
|
25700
27464
|
if (localOpts.fix) {
|
|
25701
27465
|
const fixable = checks.filter((c) => c.status !== "ok" && c.fix);
|
|
25702
27466
|
for (const check of fixable) {
|
|
@@ -25745,6 +27509,21 @@ async function doctorCommand(globalOpts, localOpts) {
|
|
|
25745
27509
|
}
|
|
25746
27510
|
}
|
|
25747
27511
|
}
|
|
27512
|
+
const errorChecks = checks.filter(
|
|
27513
|
+
(c) => ["Telegram rate limits", "Content silence", "Spawn timeouts", "Other errors"].includes(c.name) && c.status !== "ok"
|
|
27514
|
+
);
|
|
27515
|
+
if (errorChecks.length > 0 && existsSync30(ERROR_LOG_PATH)) {
|
|
27516
|
+
try {
|
|
27517
|
+
const { writeFileSync: writeFileSync13 } = await import("fs");
|
|
27518
|
+
writeFileSync13(ERROR_LOG_PATH, "");
|
|
27519
|
+
for (const c of errorChecks) {
|
|
27520
|
+
c.status = "ok";
|
|
27521
|
+
c.message = "cleared (log truncated)";
|
|
27522
|
+
delete c.fix;
|
|
27523
|
+
}
|
|
27524
|
+
} catch {
|
|
27525
|
+
}
|
|
27526
|
+
}
|
|
25748
27527
|
}
|
|
25749
27528
|
const errors = checks.filter((c) => c.status === "error").length;
|
|
25750
27529
|
const warnings = checks.filter((c) => c.status === "warning").length;
|
|
@@ -25770,8 +27549,9 @@ async function doctorCommand(globalOpts, localOpts) {
|
|
|
25770
27549
|
lines.push(` ${parts.join(", ")}`);
|
|
25771
27550
|
const fixable = r.checks.filter((c) => c.fix);
|
|
25772
27551
|
if (fixable.length > 0 && !localOpts.fix) {
|
|
25773
|
-
|
|
25774
|
-
|
|
27552
|
+
const uniqueFixes = [...new Set(fixable.map((f) => f.fix))];
|
|
27553
|
+
for (const fix of uniqueFixes) {
|
|
27554
|
+
lines.push(muted(` Fix: ${fix}`));
|
|
25775
27555
|
}
|
|
25776
27556
|
}
|
|
25777
27557
|
}
|
|
@@ -26555,6 +28335,271 @@ var init_backend_cmd_factory = __esm({
|
|
|
26555
28335
|
}
|
|
26556
28336
|
});
|
|
26557
28337
|
|
|
28338
|
+
// src/cli/commands/ollama.ts
|
|
28339
|
+
var ollama_exports3 = {};
|
|
28340
|
+
__export(ollama_exports3, {
|
|
28341
|
+
ollamaAdd: () => ollamaAdd,
|
|
28342
|
+
ollamaDiscover: () => ollamaDiscover,
|
|
28343
|
+
ollamaHealth: () => ollamaHealth,
|
|
28344
|
+
ollamaList: () => ollamaList,
|
|
28345
|
+
ollamaModels: () => ollamaModels,
|
|
28346
|
+
ollamaRemove: () => ollamaRemove,
|
|
28347
|
+
ollamaTest: () => ollamaTest
|
|
28348
|
+
});
|
|
28349
|
+
import { existsSync as existsSync35 } from "fs";
|
|
28350
|
+
function requireDb3() {
|
|
28351
|
+
if (!existsSync35(DB_PATH)) {
|
|
28352
|
+
outputError("DB_NOT_FOUND", "Database not found. Run cc-claw setup first.");
|
|
28353
|
+
process.exit(1);
|
|
28354
|
+
}
|
|
28355
|
+
}
|
|
28356
|
+
async function requireWriteDb3() {
|
|
28357
|
+
requireDb3();
|
|
28358
|
+
if (!dbInitialized3) {
|
|
28359
|
+
const { initDatabase: initDatabase2 } = await Promise.resolve().then(() => (init_store5(), store_exports5));
|
|
28360
|
+
initDatabase2();
|
|
28361
|
+
dbInitialized3 = true;
|
|
28362
|
+
}
|
|
28363
|
+
}
|
|
28364
|
+
async function ollamaList(globalOpts) {
|
|
28365
|
+
requireDb3();
|
|
28366
|
+
const { openDatabaseReadOnly: openDatabaseReadOnly2 } = await Promise.resolve().then(() => (init_store5(), store_exports5));
|
|
28367
|
+
const readDb = openDatabaseReadOnly2();
|
|
28368
|
+
const servers = readDb.prepare(`
|
|
28369
|
+
SELECT id, name, host, port, status, last_health_check, created_at
|
|
28370
|
+
FROM ollama_servers ORDER BY name
|
|
28371
|
+
`).all();
|
|
28372
|
+
const models = readDb.prepare(`
|
|
28373
|
+
SELECT server_id, COUNT(*) as count FROM ollama_models GROUP BY server_id
|
|
28374
|
+
`).all();
|
|
28375
|
+
readDb.close();
|
|
28376
|
+
const modelCounts = new Map(models.map((r) => [r.server_id, r.count]));
|
|
28377
|
+
if (servers.length === 0) {
|
|
28378
|
+
output(
|
|
28379
|
+
{ servers: [] },
|
|
28380
|
+
() => "\n No Ollama servers configured.\n Add one with: cc-claw ollama add <name> <host>\n"
|
|
28381
|
+
);
|
|
28382
|
+
return;
|
|
28383
|
+
}
|
|
28384
|
+
const data = servers.map((s) => ({
|
|
28385
|
+
...s,
|
|
28386
|
+
modelCount: modelCounts.get(s.id) ?? 0
|
|
28387
|
+
}));
|
|
28388
|
+
output(data, (d) => {
|
|
28389
|
+
const list = d;
|
|
28390
|
+
const lines = ["", divider("Ollama Servers"), ""];
|
|
28391
|
+
for (const s of list) {
|
|
28392
|
+
const dot = statusDot(s.status === "online" ? "active" : "offline");
|
|
28393
|
+
lines.push(` ${dot} ${s.name} ${muted(`(${s.host}:${s.port})`)}`);
|
|
28394
|
+
lines.push(` Models: ${s.modelCount} \xB7 Status: ${s.status === "online" ? success("online") : error2("offline")}`);
|
|
28395
|
+
if (s.last_health_check) {
|
|
28396
|
+
lines.push(` Last check: ${muted(s.last_health_check)}`);
|
|
28397
|
+
}
|
|
28398
|
+
}
|
|
28399
|
+
lines.push("");
|
|
28400
|
+
return lines.join("\n");
|
|
28401
|
+
});
|
|
28402
|
+
}
|
|
28403
|
+
async function ollamaAdd(globalOpts, name, host, opts) {
|
|
28404
|
+
await requireWriteDb3();
|
|
28405
|
+
const port = opts.port ? parseInt(opts.port, 10) : 11434;
|
|
28406
|
+
if (isNaN(port) || port < 1 || port > 65535) {
|
|
28407
|
+
outputError("INVALID_PORT", "Port must be a number between 1 and 65535.");
|
|
28408
|
+
process.exit(1);
|
|
28409
|
+
}
|
|
28410
|
+
const { OllamaStore } = await Promise.resolve().then(() => (init_ollama(), ollama_exports));
|
|
28411
|
+
const existing = OllamaStore.getServer(name);
|
|
28412
|
+
if (existing) {
|
|
28413
|
+
outputError("ALREADY_EXISTS", `Server "${name}" already exists. Remove it first with: cc-claw ollama remove ${name}`);
|
|
28414
|
+
process.exit(1);
|
|
28415
|
+
}
|
|
28416
|
+
console.log(` Checking ${host}:${port}...`);
|
|
28417
|
+
const { OllamaClient } = await Promise.resolve().then(() => (init_ollama(), ollama_exports));
|
|
28418
|
+
const online = await OllamaClient.ping(`http://${host}:${port}`, {
|
|
28419
|
+
timeoutMs: 5e3,
|
|
28420
|
+
apiKey: opts.key ?? void 0
|
|
28421
|
+
});
|
|
28422
|
+
if (!online) {
|
|
28423
|
+
console.log(warning(" \u26A0 Server did not respond \u2014 saving as offline."));
|
|
28424
|
+
} else {
|
|
28425
|
+
console.log(success(" \u2713 Server is reachable."));
|
|
28426
|
+
}
|
|
28427
|
+
const server = OllamaStore.addServer(name, host, port, opts.key ?? null);
|
|
28428
|
+
OllamaStore.updateServerStatus(server.id, online ? "online" : "offline");
|
|
28429
|
+
output(
|
|
28430
|
+
{ id: server.id, name, host, port, status: online ? "online" : "offline" },
|
|
28431
|
+
() => success(`
|
|
28432
|
+
Added Ollama server "${name}" (${host}:${port})
|
|
28433
|
+
`)
|
|
28434
|
+
);
|
|
28435
|
+
if (online) {
|
|
28436
|
+
console.log(" Discovering models...");
|
|
28437
|
+
const { OllamaService } = await Promise.resolve().then(() => (init_ollama(), ollama_exports));
|
|
28438
|
+
const discovered = await OllamaService.discoverModels(name);
|
|
28439
|
+
console.log(success(` \u2713 Found ${discovered.length} model(s)`));
|
|
28440
|
+
if (discovered.length > 0) {
|
|
28441
|
+
for (const m of discovered) {
|
|
28442
|
+
console.log(` \xB7 ${m.name}${m.parameterSize ? ` (${m.parameterSize})` : ""}`);
|
|
28443
|
+
}
|
|
28444
|
+
}
|
|
28445
|
+
console.log("");
|
|
28446
|
+
}
|
|
28447
|
+
}
|
|
28448
|
+
async function ollamaRemove(globalOpts, name) {
|
|
28449
|
+
await requireWriteDb3();
|
|
28450
|
+
const { OllamaStore } = await Promise.resolve().then(() => (init_ollama(), ollama_exports));
|
|
28451
|
+
const removed = OllamaStore.removeServer(name);
|
|
28452
|
+
if (removed) {
|
|
28453
|
+
output({ removed: true, name }, () => success(`Removed server "${name}"`));
|
|
28454
|
+
} else {
|
|
28455
|
+
outputError("NOT_FOUND", `Server "${name}" not found.`);
|
|
28456
|
+
}
|
|
28457
|
+
}
|
|
28458
|
+
async function ollamaModels(globalOpts, opts) {
|
|
28459
|
+
requireDb3();
|
|
28460
|
+
const { openDatabaseReadOnly: openDatabaseReadOnly2 } = await Promise.resolve().then(() => (init_store5(), store_exports5));
|
|
28461
|
+
const readDb = openDatabaseReadOnly2();
|
|
28462
|
+
let query;
|
|
28463
|
+
let params;
|
|
28464
|
+
if (opts.server) {
|
|
28465
|
+
const server = readDb.prepare("SELECT id FROM ollama_servers WHERE name = ?").get(opts.server);
|
|
28466
|
+
if (!server) {
|
|
28467
|
+
readDb.close();
|
|
28468
|
+
outputError("NOT_FOUND", `Server "${opts.server}" not found.`);
|
|
28469
|
+
return;
|
|
28470
|
+
}
|
|
28471
|
+
query = `
|
|
28472
|
+
SELECT m.name, m.family, m.parameter_size, m.quantization, m.context_window, m.size_bytes, m.quality_score,
|
|
28473
|
+
s.name as server_name
|
|
28474
|
+
FROM ollama_models m
|
|
28475
|
+
JOIN ollama_servers s ON s.id = m.server_id
|
|
28476
|
+
WHERE m.server_id = ?
|
|
28477
|
+
ORDER BY m.name
|
|
28478
|
+
`;
|
|
28479
|
+
params = [server.id];
|
|
28480
|
+
} else {
|
|
28481
|
+
query = `
|
|
28482
|
+
SELECT m.name, m.family, m.parameter_size, m.quantization, m.context_window, m.size_bytes, m.quality_score,
|
|
28483
|
+
s.name as server_name
|
|
28484
|
+
FROM ollama_models m
|
|
28485
|
+
JOIN ollama_servers s ON s.id = m.server_id
|
|
28486
|
+
ORDER BY s.name, m.name
|
|
28487
|
+
`;
|
|
28488
|
+
params = [];
|
|
28489
|
+
}
|
|
28490
|
+
const rows = readDb.prepare(query).all(...params);
|
|
28491
|
+
readDb.close();
|
|
28492
|
+
if (rows.length === 0) {
|
|
28493
|
+
output(
|
|
28494
|
+
{ models: [] },
|
|
28495
|
+
() => opts.server ? `
|
|
28496
|
+
No models discovered on "${opts.server}".
|
|
28497
|
+
Run: cc-claw ollama discover --server ${opts.server}
|
|
28498
|
+
` : "\n No models discovered.\n Run: cc-claw ollama discover\n"
|
|
28499
|
+
);
|
|
28500
|
+
return;
|
|
28501
|
+
}
|
|
28502
|
+
output(rows, (d) => {
|
|
28503
|
+
const models = d;
|
|
28504
|
+
const lines = ["", divider("Ollama Models"), ""];
|
|
28505
|
+
let lastServer = "";
|
|
28506
|
+
for (const m of models) {
|
|
28507
|
+
if (m.server_name !== lastServer) {
|
|
28508
|
+
if (lastServer) lines.push("");
|
|
28509
|
+
lines.push(` ${muted(`\u2500\u2500 ${m.server_name} \u2500\u2500`)}`);
|
|
28510
|
+
lastServer = m.server_name;
|
|
28511
|
+
}
|
|
28512
|
+
const sizeGB = m.size_bytes > 0 ? `${(m.size_bytes / 1e9).toFixed(1)}GB` : "";
|
|
28513
|
+
const ctxK = m.context_window ? `${(m.context_window / 1e3).toFixed(0)}K ctx` : "";
|
|
28514
|
+
const quality = m.quality_score !== null ? `Q:${m.quality_score}` : "";
|
|
28515
|
+
const meta = [m.parameter_size, m.quantization, sizeGB, ctxK, quality].filter(Boolean).join(" \xB7 ");
|
|
28516
|
+
lines.push(` ${m.name} ${muted(meta ? `(${meta})` : "")}`);
|
|
28517
|
+
}
|
|
28518
|
+
lines.push("");
|
|
28519
|
+
return lines.join("\n");
|
|
28520
|
+
});
|
|
28521
|
+
}
|
|
28522
|
+
async function ollamaDiscover(globalOpts, opts) {
|
|
28523
|
+
await requireWriteDb3();
|
|
28524
|
+
const { OllamaService } = await Promise.resolve().then(() => (init_ollama(), ollama_exports));
|
|
28525
|
+
if (opts.server) {
|
|
28526
|
+
const { OllamaStore } = await Promise.resolve().then(() => (init_ollama(), ollama_exports));
|
|
28527
|
+
const server = OllamaStore.getServer(opts.server);
|
|
28528
|
+
if (!server) {
|
|
28529
|
+
outputError("NOT_FOUND", `Server "${opts.server}" not found.`);
|
|
28530
|
+
return;
|
|
28531
|
+
}
|
|
28532
|
+
}
|
|
28533
|
+
console.log(opts.server ? ` Discovering models on ${opts.server}...` : " Discovering models on all servers...");
|
|
28534
|
+
const models = await OllamaService.discoverModels(opts.server);
|
|
28535
|
+
output(models.map((m) => ({
|
|
28536
|
+
name: m.name,
|
|
28537
|
+
parameterSize: m.parameterSize,
|
|
28538
|
+
contextWindow: m.contextWindow,
|
|
28539
|
+
sizeBytes: m.sizeBytes
|
|
28540
|
+
})), (d) => {
|
|
28541
|
+
const list = d;
|
|
28542
|
+
if (list.length === 0) {
|
|
28543
|
+
return "\n No models found. Check server connectivity.\n";
|
|
28544
|
+
}
|
|
28545
|
+
const lines = [`
|
|
28546
|
+
${success(`\u2713 Discovered ${list.length} model(s):`)}`, ""];
|
|
28547
|
+
for (const m of list) {
|
|
28548
|
+
const sizeGB = m.sizeBytes > 0 ? `${(m.sizeBytes / 1e9).toFixed(1)}GB` : "";
|
|
28549
|
+
const ctxK = m.contextWindow ? `${(m.contextWindow / 1e3).toFixed(0)}K ctx` : "";
|
|
28550
|
+
const meta = [m.parameterSize, sizeGB, ctxK].filter(Boolean).join(" \xB7 ");
|
|
28551
|
+
lines.push(` \xB7 ${m.name} ${muted(meta ? `(${meta})` : "")}`);
|
|
28552
|
+
}
|
|
28553
|
+
lines.push("");
|
|
28554
|
+
return lines.join("\n");
|
|
28555
|
+
});
|
|
28556
|
+
}
|
|
28557
|
+
async function ollamaHealth(globalOpts) {
|
|
28558
|
+
await requireWriteDb3();
|
|
28559
|
+
const { OllamaService } = await Promise.resolve().then(() => (init_ollama(), ollama_exports));
|
|
28560
|
+
console.log(" Pinging all Ollama servers...");
|
|
28561
|
+
const results = await OllamaService.healthCheck();
|
|
28562
|
+
if (results.length === 0) {
|
|
28563
|
+
output({ servers: [] }, () => "\n No Ollama servers configured.\n");
|
|
28564
|
+
return;
|
|
28565
|
+
}
|
|
28566
|
+
output(
|
|
28567
|
+
results.map((s) => ({ name: s.name, host: s.host, port: s.port, status: s.status })),
|
|
28568
|
+
(d) => {
|
|
28569
|
+
const servers = d;
|
|
28570
|
+
const lines = ["", divider("Ollama Health"), ""];
|
|
28571
|
+
for (const s of servers) {
|
|
28572
|
+
const dot = statusDot(s.status === "online" ? "active" : "offline");
|
|
28573
|
+
const statusLabel = s.status === "online" ? success("online") : error2("offline");
|
|
28574
|
+
lines.push(` ${dot} ${s.name} ${muted(`(${s.host}:${s.port})`)} \u2014 ${statusLabel}`);
|
|
28575
|
+
}
|
|
28576
|
+
const online = servers.filter((s) => s.status === "online").length;
|
|
28577
|
+
lines.push("");
|
|
28578
|
+
lines.push(` ${online}/${servers.length} servers online`);
|
|
28579
|
+
lines.push("");
|
|
28580
|
+
return lines.join("\n");
|
|
28581
|
+
}
|
|
28582
|
+
);
|
|
28583
|
+
}
|
|
28584
|
+
async function ollamaTest(globalOpts, model2) {
|
|
28585
|
+
output(
|
|
28586
|
+
{ model: model2, status: "not_implemented" },
|
|
28587
|
+
() => `
|
|
28588
|
+
${warning("Quality gate testing is coming in Phase 4.")}
|
|
28589
|
+
Model: ${model2}
|
|
28590
|
+
`
|
|
28591
|
+
);
|
|
28592
|
+
}
|
|
28593
|
+
var dbInitialized3;
|
|
28594
|
+
var init_ollama4 = __esm({
|
|
28595
|
+
"src/cli/commands/ollama.ts"() {
|
|
28596
|
+
"use strict";
|
|
28597
|
+
init_format2();
|
|
28598
|
+
init_paths();
|
|
28599
|
+
dbInitialized3 = false;
|
|
28600
|
+
}
|
|
28601
|
+
});
|
|
28602
|
+
|
|
26558
28603
|
// src/cli/commands/backend.ts
|
|
26559
28604
|
var backend_exports = {};
|
|
26560
28605
|
__export(backend_exports, {
|
|
@@ -26562,12 +28607,12 @@ __export(backend_exports, {
|
|
|
26562
28607
|
backendList: () => backendList,
|
|
26563
28608
|
backendSet: () => backendSet
|
|
26564
28609
|
});
|
|
26565
|
-
import { existsSync as
|
|
28610
|
+
import { existsSync as existsSync36 } from "fs";
|
|
26566
28611
|
async function backendList(globalOpts) {
|
|
26567
28612
|
const { getAvailableAdapters: getAvailableAdapters3 } = await Promise.resolve().then(() => (init_backends(), backends_exports));
|
|
26568
|
-
const chatId =
|
|
28613
|
+
const chatId = resolveChatId2(globalOpts);
|
|
26569
28614
|
let activeBackend = null;
|
|
26570
|
-
if (
|
|
28615
|
+
if (existsSync36(DB_PATH)) {
|
|
26571
28616
|
const { openDatabaseReadOnly: openDatabaseReadOnly2 } = await Promise.resolve().then(() => (init_store5(), store_exports5));
|
|
26572
28617
|
const readDb = openDatabaseReadOnly2();
|
|
26573
28618
|
try {
|
|
@@ -26597,8 +28642,8 @@ async function backendList(globalOpts) {
|
|
|
26597
28642
|
});
|
|
26598
28643
|
}
|
|
26599
28644
|
async function backendGet(globalOpts) {
|
|
26600
|
-
const chatId =
|
|
26601
|
-
if (!
|
|
28645
|
+
const chatId = resolveChatId2(globalOpts);
|
|
28646
|
+
if (!existsSync36(DB_PATH)) {
|
|
26602
28647
|
outputError("DB_NOT_FOUND", "Database not found. Run cc-claw setup first.");
|
|
26603
28648
|
process.exit(1);
|
|
26604
28649
|
}
|
|
@@ -26615,7 +28660,7 @@ async function backendSet(globalOpts, name) {
|
|
|
26615
28660
|
outputError("DAEMON_OFFLINE", "CC-Claw daemon is not running.\n\n Start it with: cc-claw service start");
|
|
26616
28661
|
process.exit(1);
|
|
26617
28662
|
}
|
|
26618
|
-
const chatId =
|
|
28663
|
+
const chatId = resolveChatId2(globalOpts);
|
|
26619
28664
|
const res = await apiPost2("/api/backend/set", { chatId, backend: name });
|
|
26620
28665
|
if (res.ok) {
|
|
26621
28666
|
output({ backend: name, success: true }, () => `
|
|
@@ -26642,13 +28687,13 @@ __export(model_exports, {
|
|
|
26642
28687
|
modelList: () => modelList,
|
|
26643
28688
|
modelSet: () => modelSet
|
|
26644
28689
|
});
|
|
26645
|
-
import { existsSync as
|
|
28690
|
+
import { existsSync as existsSync37 } from "fs";
|
|
26646
28691
|
async function modelList(globalOpts) {
|
|
26647
|
-
const chatId =
|
|
28692
|
+
const chatId = resolveChatId2(globalOpts);
|
|
26648
28693
|
const { openDatabaseReadOnly: openDatabaseReadOnly2 } = await Promise.resolve().then(() => (init_store5(), store_exports5));
|
|
26649
28694
|
const { getAdapter: getAdapter4, getAllAdapters: getAllAdapters5 } = await Promise.resolve().then(() => (init_backends(), backends_exports));
|
|
26650
28695
|
let backendId = "claude";
|
|
26651
|
-
if (
|
|
28696
|
+
if (existsSync37(DB_PATH)) {
|
|
26652
28697
|
const readDb = openDatabaseReadOnly2();
|
|
26653
28698
|
try {
|
|
26654
28699
|
const row = readDb.prepare("SELECT backend FROM chat_backend WHERE chat_id = ?").get(chatId);
|
|
@@ -26680,8 +28725,8 @@ async function modelList(globalOpts) {
|
|
|
26680
28725
|
}
|
|
26681
28726
|
}
|
|
26682
28727
|
async function modelGet(globalOpts) {
|
|
26683
|
-
const chatId =
|
|
26684
|
-
if (!
|
|
28728
|
+
const chatId = resolveChatId2(globalOpts);
|
|
28729
|
+
if (!existsSync37(DB_PATH)) {
|
|
26685
28730
|
outputError("DB_NOT_FOUND", "Database not found.");
|
|
26686
28731
|
process.exit(1);
|
|
26687
28732
|
}
|
|
@@ -26698,7 +28743,7 @@ async function modelSet(globalOpts, name) {
|
|
|
26698
28743
|
outputError("DAEMON_OFFLINE", "CC-Claw daemon is not running.\n\n Start it with: cc-claw service start");
|
|
26699
28744
|
process.exit(1);
|
|
26700
28745
|
}
|
|
26701
|
-
const chatId =
|
|
28746
|
+
const chatId = resolveChatId2(globalOpts);
|
|
26702
28747
|
const res = await apiPost2("/api/model/set", { chatId, model: name });
|
|
26703
28748
|
if (res.ok) {
|
|
26704
28749
|
output({ model: name, success: true }, () => `
|
|
@@ -26725,9 +28770,9 @@ __export(memory_exports2, {
|
|
|
26725
28770
|
memoryList: () => memoryList,
|
|
26726
28771
|
memorySearch: () => memorySearch
|
|
26727
28772
|
});
|
|
26728
|
-
import { existsSync as
|
|
28773
|
+
import { existsSync as existsSync38 } from "fs";
|
|
26729
28774
|
async function memoryList(globalOpts) {
|
|
26730
|
-
if (!
|
|
28775
|
+
if (!existsSync38(DB_PATH)) {
|
|
26731
28776
|
outputError("DB_NOT_FOUND", "Database not found. Run cc-claw setup first.");
|
|
26732
28777
|
process.exit(1);
|
|
26733
28778
|
}
|
|
@@ -26751,7 +28796,7 @@ async function memoryList(globalOpts) {
|
|
|
26751
28796
|
});
|
|
26752
28797
|
}
|
|
26753
28798
|
async function memorySearch(globalOpts, query) {
|
|
26754
|
-
if (!
|
|
28799
|
+
if (!existsSync38(DB_PATH)) {
|
|
26755
28800
|
outputError("DB_NOT_FOUND", "Database not found.");
|
|
26756
28801
|
process.exit(1);
|
|
26757
28802
|
}
|
|
@@ -26773,14 +28818,14 @@ async function memorySearch(globalOpts, query) {
|
|
|
26773
28818
|
});
|
|
26774
28819
|
}
|
|
26775
28820
|
async function memoryHistory(globalOpts, opts) {
|
|
26776
|
-
if (!
|
|
28821
|
+
if (!existsSync38(DB_PATH)) {
|
|
26777
28822
|
outputError("DB_NOT_FOUND", "Database not found.");
|
|
26778
28823
|
process.exit(1);
|
|
26779
28824
|
}
|
|
26780
28825
|
const { openDatabaseReadOnly: openDatabaseReadOnly2 } = await Promise.resolve().then(() => (init_store5(), store_exports5));
|
|
26781
28826
|
const readDb = openDatabaseReadOnly2();
|
|
26782
28827
|
const limit = parseInt(opts.limit ?? "10", 10);
|
|
26783
|
-
const chatId =
|
|
28828
|
+
const chatId = resolveChatId2(globalOpts);
|
|
26784
28829
|
const summaries = readDb.prepare(
|
|
26785
28830
|
"SELECT * FROM session_summaries WHERE chat_id = ? ORDER BY created_at DESC LIMIT ?"
|
|
26786
28831
|
).all(chatId, limit);
|
|
@@ -26821,7 +28866,7 @@ __export(cron_exports2, {
|
|
|
26821
28866
|
cronList: () => cronList,
|
|
26822
28867
|
cronRuns: () => cronRuns
|
|
26823
28868
|
});
|
|
26824
|
-
import { existsSync as
|
|
28869
|
+
import { existsSync as existsSync39 } from "fs";
|
|
26825
28870
|
function parseFallbacks(raw) {
|
|
26826
28871
|
return raw.slice(0, 3).map((f) => {
|
|
26827
28872
|
const [backend2, ...rest] = f.split(":");
|
|
@@ -26842,7 +28887,7 @@ function parseAndValidateTimeout(raw) {
|
|
|
26842
28887
|
return val;
|
|
26843
28888
|
}
|
|
26844
28889
|
async function cronList(globalOpts) {
|
|
26845
|
-
if (!
|
|
28890
|
+
if (!existsSync39(DB_PATH)) {
|
|
26846
28891
|
outputError("DB_NOT_FOUND", "Database not found.");
|
|
26847
28892
|
process.exit(1);
|
|
26848
28893
|
}
|
|
@@ -26880,7 +28925,7 @@ async function cronList(globalOpts) {
|
|
|
26880
28925
|
});
|
|
26881
28926
|
}
|
|
26882
28927
|
async function cronHealth(globalOpts) {
|
|
26883
|
-
if (!
|
|
28928
|
+
if (!existsSync39(DB_PATH)) {
|
|
26884
28929
|
outputError("DB_NOT_FOUND", "Database not found.");
|
|
26885
28930
|
process.exit(1);
|
|
26886
28931
|
}
|
|
@@ -26918,7 +28963,7 @@ async function cronCreate(globalOpts, opts) {
|
|
|
26918
28963
|
outputError("MISSING_SCHEDULE", "Must specify one of: --cron, --at, or --every");
|
|
26919
28964
|
process.exit(1);
|
|
26920
28965
|
}
|
|
26921
|
-
const chatId =
|
|
28966
|
+
const chatId = resolveChatId2(globalOpts);
|
|
26922
28967
|
const { success: successFmt } = await Promise.resolve().then(() => (init_format2(), format_exports));
|
|
26923
28968
|
const timeout = parseAndValidateTimeout(opts.timeout);
|
|
26924
28969
|
try {
|
|
@@ -27039,7 +29084,7 @@ async function cronEdit(globalOpts, id, opts) {
|
|
|
27039
29084
|
}
|
|
27040
29085
|
}
|
|
27041
29086
|
async function cronRuns(globalOpts, jobId, opts) {
|
|
27042
|
-
if (!
|
|
29087
|
+
if (!existsSync39(DB_PATH)) {
|
|
27043
29088
|
outputError("DB_NOT_FOUND", "Database not found.");
|
|
27044
29089
|
process.exit(1);
|
|
27045
29090
|
}
|
|
@@ -27086,9 +29131,9 @@ __export(agents_exports, {
|
|
|
27086
29131
|
runnersList: () => runnersList,
|
|
27087
29132
|
tasksList: () => tasksList
|
|
27088
29133
|
});
|
|
27089
|
-
import { existsSync as
|
|
29134
|
+
import { existsSync as existsSync40 } from "fs";
|
|
27090
29135
|
async function agentsList(globalOpts) {
|
|
27091
|
-
if (!
|
|
29136
|
+
if (!existsSync40(DB_PATH)) {
|
|
27092
29137
|
outputError("DB_NOT_FOUND", "Database not found.");
|
|
27093
29138
|
process.exit(1);
|
|
27094
29139
|
}
|
|
@@ -27119,7 +29164,7 @@ async function agentsList(globalOpts) {
|
|
|
27119
29164
|
});
|
|
27120
29165
|
}
|
|
27121
29166
|
async function tasksList(globalOpts) {
|
|
27122
|
-
if (!
|
|
29167
|
+
if (!existsSync40(DB_PATH)) {
|
|
27123
29168
|
outputError("DB_NOT_FOUND", "Database not found.");
|
|
27124
29169
|
process.exit(1);
|
|
27125
29170
|
}
|
|
@@ -27149,7 +29194,7 @@ async function agentsSpawn(globalOpts, opts) {
|
|
|
27149
29194
|
outputError("DAEMON_OFFLINE", "CC-Claw daemon is not running.\n\n Start it with: cc-claw service start");
|
|
27150
29195
|
process.exit(1);
|
|
27151
29196
|
}
|
|
27152
|
-
const chatId =
|
|
29197
|
+
const chatId = resolveChatId2(globalOpts);
|
|
27153
29198
|
const res = await apiPost2("/api/orchestrator/spawn", {
|
|
27154
29199
|
chatId,
|
|
27155
29200
|
runner: opts.runner,
|
|
@@ -27192,7 +29237,7 @@ async function agentsCancelAll(globalOpts) {
|
|
|
27192
29237
|
outputError("DAEMON_OFFLINE", "CC-Claw daemon is not running.");
|
|
27193
29238
|
process.exit(1);
|
|
27194
29239
|
}
|
|
27195
|
-
const chatId =
|
|
29240
|
+
const chatId = resolveChatId2(globalOpts);
|
|
27196
29241
|
const res = await apiPost2("/api/orchestrator/cancel-all", { chatId });
|
|
27197
29242
|
if (res.ok) {
|
|
27198
29243
|
const { success: s } = await Promise.resolve().then(() => (init_format2(), format_exports));
|
|
@@ -27247,10 +29292,10 @@ __export(db_exports, {
|
|
|
27247
29292
|
dbPath: () => dbPath,
|
|
27248
29293
|
dbStats: () => dbStats
|
|
27249
29294
|
});
|
|
27250
|
-
import { existsSync as
|
|
29295
|
+
import { existsSync as existsSync41, statSync as statSync11, copyFileSync as copyFileSync3, mkdirSync as mkdirSync16 } from "fs";
|
|
27251
29296
|
import { dirname as dirname7 } from "path";
|
|
27252
29297
|
async function dbStats(globalOpts) {
|
|
27253
|
-
if (!
|
|
29298
|
+
if (!existsSync41(DB_PATH)) {
|
|
27254
29299
|
outputError("DB_NOT_FOUND", `Database not found at ${DB_PATH}`);
|
|
27255
29300
|
process.exit(1);
|
|
27256
29301
|
}
|
|
@@ -27258,7 +29303,7 @@ async function dbStats(globalOpts) {
|
|
|
27258
29303
|
const readDb = openDatabaseReadOnly2();
|
|
27259
29304
|
const mainSize = statSync11(DB_PATH).size;
|
|
27260
29305
|
const walPath = DB_PATH + "-wal";
|
|
27261
|
-
const walSize =
|
|
29306
|
+
const walSize = existsSync41(walPath) ? statSync11(walPath).size : 0;
|
|
27262
29307
|
const tableNames = readDb.prepare(
|
|
27263
29308
|
"SELECT name FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%' AND name NOT LIKE '%_fts%' ORDER BY name"
|
|
27264
29309
|
).all();
|
|
@@ -27292,7 +29337,7 @@ async function dbPath(globalOpts) {
|
|
|
27292
29337
|
output({ path: DB_PATH }, (d) => d.path);
|
|
27293
29338
|
}
|
|
27294
29339
|
async function dbBackup(globalOpts, destPath) {
|
|
27295
|
-
if (!
|
|
29340
|
+
if (!existsSync41(DB_PATH)) {
|
|
27296
29341
|
outputError("DB_NOT_FOUND", `Database not found at ${DB_PATH}`);
|
|
27297
29342
|
process.exit(1);
|
|
27298
29343
|
}
|
|
@@ -27301,7 +29346,7 @@ async function dbBackup(globalOpts, destPath) {
|
|
|
27301
29346
|
mkdirSync16(dirname7(dest), { recursive: true });
|
|
27302
29347
|
copyFileSync3(DB_PATH, dest);
|
|
27303
29348
|
const walPath = DB_PATH + "-wal";
|
|
27304
|
-
if (
|
|
29349
|
+
if (existsSync41(walPath)) copyFileSync3(walPath, dest + "-wal");
|
|
27305
29350
|
output({ path: dest, sizeBytes: statSync11(dest).size }, (d) => {
|
|
27306
29351
|
const b = d;
|
|
27307
29352
|
return `
|
|
@@ -27330,9 +29375,9 @@ __export(usage_exports, {
|
|
|
27330
29375
|
usageCost: () => usageCost,
|
|
27331
29376
|
usageTokens: () => usageTokens
|
|
27332
29377
|
});
|
|
27333
|
-
import { existsSync as
|
|
29378
|
+
import { existsSync as existsSync42 } from "fs";
|
|
27334
29379
|
function ensureDb() {
|
|
27335
|
-
if (!
|
|
29380
|
+
if (!existsSync42(DB_PATH)) {
|
|
27336
29381
|
outputError("DB_NOT_FOUND", "Database not found. Run cc-claw setup first.");
|
|
27337
29382
|
process.exit(1);
|
|
27338
29383
|
}
|
|
@@ -27384,7 +29429,7 @@ async function usageCost(globalOpts, opts) {
|
|
|
27384
29429
|
return lines.join("\n");
|
|
27385
29430
|
});
|
|
27386
29431
|
} else {
|
|
27387
|
-
const chatId =
|
|
29432
|
+
const chatId = resolveChatId2(globalOpts);
|
|
27388
29433
|
const usage2 = readDb.prepare("SELECT * FROM chat_usage WHERE chat_id = ?").get(chatId);
|
|
27389
29434
|
readDb.close();
|
|
27390
29435
|
if (!usage2 || usage2.request_count === 0) {
|
|
@@ -27522,15 +29567,15 @@ __export(config_exports2, {
|
|
|
27522
29567
|
configList: () => configList,
|
|
27523
29568
|
configSet: () => configSet
|
|
27524
29569
|
});
|
|
27525
|
-
import { existsSync as
|
|
29570
|
+
import { existsSync as existsSync43, readFileSync as readFileSync24 } from "fs";
|
|
27526
29571
|
async function configList(globalOpts) {
|
|
27527
|
-
if (!
|
|
29572
|
+
if (!existsSync43(DB_PATH)) {
|
|
27528
29573
|
outputError("DB_NOT_FOUND", "Database not found.");
|
|
27529
29574
|
process.exit(1);
|
|
27530
29575
|
}
|
|
27531
29576
|
const { openDatabaseReadOnly: openDatabaseReadOnly2 } = await Promise.resolve().then(() => (init_store5(), store_exports5));
|
|
27532
29577
|
const readDb = openDatabaseReadOnly2();
|
|
27533
|
-
const chatId =
|
|
29578
|
+
const chatId = resolveChatId2(globalOpts);
|
|
27534
29579
|
const config2 = {};
|
|
27535
29580
|
for (const key of RUNTIME_KEYS) {
|
|
27536
29581
|
const { table, col } = KEY_TABLE_MAP[key];
|
|
@@ -27558,13 +29603,13 @@ async function configGet(globalOpts, key) {
|
|
|
27558
29603
|
outputError("INVALID_KEY", `Unknown config key "${key}". Valid keys: ${RUNTIME_KEYS.join(", ")}`);
|
|
27559
29604
|
process.exit(1);
|
|
27560
29605
|
}
|
|
27561
|
-
if (!
|
|
29606
|
+
if (!existsSync43(DB_PATH)) {
|
|
27562
29607
|
outputError("DB_NOT_FOUND", "Database not found.");
|
|
27563
29608
|
process.exit(1);
|
|
27564
29609
|
}
|
|
27565
29610
|
const { openDatabaseReadOnly: openDatabaseReadOnly2 } = await Promise.resolve().then(() => (init_store5(), store_exports5));
|
|
27566
29611
|
const readDb = openDatabaseReadOnly2();
|
|
27567
|
-
const chatId =
|
|
29612
|
+
const chatId = resolveChatId2(globalOpts);
|
|
27568
29613
|
const { table, col } = KEY_TABLE_MAP[key];
|
|
27569
29614
|
if (!ALLOWED_TABLES.has(table) || !ALLOWED_COLS.has(col)) {
|
|
27570
29615
|
outputError("INVALID_KEY", `Invalid config mapping for "${key}".`);
|
|
@@ -27592,7 +29637,7 @@ async function configSet(globalOpts, key, value) {
|
|
|
27592
29637
|
outputError("DAEMON_OFFLINE", "CC-Claw daemon is not running.\n\n Start it with: cc-claw service start");
|
|
27593
29638
|
process.exit(1);
|
|
27594
29639
|
}
|
|
27595
|
-
const chatId =
|
|
29640
|
+
const chatId = resolveChatId2(globalOpts);
|
|
27596
29641
|
const res = await apiPost2("/api/config/set", { chatId, key, value });
|
|
27597
29642
|
if (res.ok) {
|
|
27598
29643
|
output({ key, value, success: true }, () => `
|
|
@@ -27604,7 +29649,7 @@ async function configSet(globalOpts, key, value) {
|
|
|
27604
29649
|
}
|
|
27605
29650
|
}
|
|
27606
29651
|
async function configEnv(_globalOpts) {
|
|
27607
|
-
if (!
|
|
29652
|
+
if (!existsSync43(ENV_PATH)) {
|
|
27608
29653
|
outputError("ENV_NOT_FOUND", `No .env file at ${ENV_PATH}. Run cc-claw setup.`);
|
|
27609
29654
|
process.exit(1);
|
|
27610
29655
|
}
|
|
@@ -27658,15 +29703,15 @@ __export(session_exports, {
|
|
|
27658
29703
|
sessionGet: () => sessionGet,
|
|
27659
29704
|
sessionNew: () => sessionNew
|
|
27660
29705
|
});
|
|
27661
|
-
import { existsSync as
|
|
29706
|
+
import { existsSync as existsSync44 } from "fs";
|
|
27662
29707
|
async function sessionGet(globalOpts) {
|
|
27663
|
-
if (!
|
|
29708
|
+
if (!existsSync44(DB_PATH)) {
|
|
27664
29709
|
outputError("DB_NOT_FOUND", "Database not found.");
|
|
27665
29710
|
process.exit(1);
|
|
27666
29711
|
}
|
|
27667
29712
|
const { openDatabaseReadOnly: openDatabaseReadOnly2 } = await Promise.resolve().then(() => (init_store5(), store_exports5));
|
|
27668
29713
|
const readDb = openDatabaseReadOnly2();
|
|
27669
|
-
const chatId =
|
|
29714
|
+
const chatId = resolveChatId2(globalOpts);
|
|
27670
29715
|
const session2 = readDb.prepare("SELECT * FROM sessions WHERE chat_id = ?").get(chatId);
|
|
27671
29716
|
readDb.close();
|
|
27672
29717
|
const data = session2 ? { id: session2.session_id, active: true, updatedAt: session2.updated_at } : { id: null, active: false, updatedAt: null };
|
|
@@ -27689,7 +29734,7 @@ async function sessionNew(globalOpts) {
|
|
|
27689
29734
|
outputError("DAEMON_OFFLINE", "CC-Claw daemon is not running.\n\n Start it with: cc-claw service start");
|
|
27690
29735
|
process.exit(1);
|
|
27691
29736
|
}
|
|
27692
|
-
const chatId =
|
|
29737
|
+
const chatId = resolveChatId2(globalOpts);
|
|
27693
29738
|
const res = await apiPost2("/api/session/new", { chatId });
|
|
27694
29739
|
if (res.ok) {
|
|
27695
29740
|
output({ success: true }, () => `
|
|
@@ -27721,9 +29766,9 @@ __export(permissions_exports, {
|
|
|
27721
29766
|
verboseGet: () => verboseGet,
|
|
27722
29767
|
verboseSet: () => verboseSet
|
|
27723
29768
|
});
|
|
27724
|
-
import { existsSync as
|
|
29769
|
+
import { existsSync as existsSync45 } from "fs";
|
|
27725
29770
|
function ensureDb2() {
|
|
27726
|
-
if (!
|
|
29771
|
+
if (!existsSync45(DB_PATH)) {
|
|
27727
29772
|
outputError("DB_NOT_FOUND", "Database not found.");
|
|
27728
29773
|
process.exit(1);
|
|
27729
29774
|
}
|
|
@@ -27732,7 +29777,7 @@ async function permissionsGet(globalOpts) {
|
|
|
27732
29777
|
ensureDb2();
|
|
27733
29778
|
const { openDatabaseReadOnly: openDatabaseReadOnly2 } = await Promise.resolve().then(() => (init_store5(), store_exports5));
|
|
27734
29779
|
const readDb = openDatabaseReadOnly2();
|
|
27735
|
-
const chatId =
|
|
29780
|
+
const chatId = resolveChatId2(globalOpts);
|
|
27736
29781
|
const row = readDb.prepare("SELECT mode FROM chat_mode WHERE chat_id = ?").get(chatId);
|
|
27737
29782
|
readDb.close();
|
|
27738
29783
|
const mode = row?.mode ?? "yolo";
|
|
@@ -27750,7 +29795,7 @@ async function permissionsSet(globalOpts, mode) {
|
|
|
27750
29795
|
outputError("DAEMON_OFFLINE", "CC-Claw daemon is not running.\n\n Start it with: cc-claw service start");
|
|
27751
29796
|
process.exit(1);
|
|
27752
29797
|
}
|
|
27753
|
-
const chatId =
|
|
29798
|
+
const chatId = resolveChatId2(globalOpts);
|
|
27754
29799
|
const res = await apiPost2("/api/permissions/set", { chatId, mode });
|
|
27755
29800
|
if (res.ok) {
|
|
27756
29801
|
output({ mode, success: true }, () => `
|
|
@@ -27765,7 +29810,7 @@ async function toolsList(globalOpts) {
|
|
|
27765
29810
|
ensureDb2();
|
|
27766
29811
|
const { openDatabaseReadOnly: openDatabaseReadOnly2 } = await Promise.resolve().then(() => (init_store5(), store_exports5));
|
|
27767
29812
|
const readDb = openDatabaseReadOnly2();
|
|
27768
|
-
const chatId =
|
|
29813
|
+
const chatId = resolveChatId2(globalOpts);
|
|
27769
29814
|
const rows = readDb.prepare("SELECT tool, enabled FROM chat_tools WHERE chat_id = ?").all(chatId);
|
|
27770
29815
|
readDb.close();
|
|
27771
29816
|
const ALL_TOOLS3 = ["Read", "Write", "Edit", "Bash", "Glob", "Grep", "WebFetch", "WebSearch", "Agent", "AskUserQuestion"];
|
|
@@ -27793,7 +29838,7 @@ async function toolsReset(globalOpts) {
|
|
|
27793
29838
|
outputError("DAEMON_OFFLINE", "CC-Claw daemon is not running.\n\n Start it with: cc-claw service start");
|
|
27794
29839
|
process.exit(1);
|
|
27795
29840
|
}
|
|
27796
|
-
const chatId =
|
|
29841
|
+
const chatId = resolveChatId2(globalOpts);
|
|
27797
29842
|
const res = await apiPost2("/api/tools/reset", { chatId });
|
|
27798
29843
|
if (res.ok) {
|
|
27799
29844
|
output({ success: true }, () => `
|
|
@@ -27810,7 +29855,7 @@ async function toggleTool4(globalOpts, name, enabled) {
|
|
|
27810
29855
|
outputError("DAEMON_OFFLINE", "CC-Claw daemon is not running.\n\n Start it with: cc-claw service start");
|
|
27811
29856
|
process.exit(1);
|
|
27812
29857
|
}
|
|
27813
|
-
const chatId =
|
|
29858
|
+
const chatId = resolveChatId2(globalOpts);
|
|
27814
29859
|
const res = await apiPost2("/api/tools/toggle", { chatId, tool: name, enabled });
|
|
27815
29860
|
if (res.ok) {
|
|
27816
29861
|
const label2 = enabled ? "enabled" : "disabled";
|
|
@@ -27826,7 +29871,7 @@ async function verboseGet(globalOpts) {
|
|
|
27826
29871
|
ensureDb2();
|
|
27827
29872
|
const { openDatabaseReadOnly: openDatabaseReadOnly2 } = await Promise.resolve().then(() => (init_store5(), store_exports5));
|
|
27828
29873
|
const readDb = openDatabaseReadOnly2();
|
|
27829
|
-
const chatId =
|
|
29874
|
+
const chatId = resolveChatId2(globalOpts);
|
|
27830
29875
|
const row = readDb.prepare("SELECT level FROM chat_verbose WHERE chat_id = ?").get(chatId);
|
|
27831
29876
|
readDb.close();
|
|
27832
29877
|
const level = row?.level ?? "off";
|
|
@@ -27843,7 +29888,7 @@ async function verboseSet(globalOpts, level) {
|
|
|
27843
29888
|
outputError("DAEMON_OFFLINE", "CC-Claw daemon is not running.\n\n Start it with: cc-claw service start");
|
|
27844
29889
|
process.exit(1);
|
|
27845
29890
|
}
|
|
27846
|
-
const chatId =
|
|
29891
|
+
const chatId = resolveChatId2(globalOpts);
|
|
27847
29892
|
const res = await apiPost2("/api/verbose/set", { chatId, level });
|
|
27848
29893
|
if (res.ok) {
|
|
27849
29894
|
output({ level, success: true }, () => `
|
|
@@ -27870,15 +29915,15 @@ __export(cwd_exports, {
|
|
|
27870
29915
|
cwdGet: () => cwdGet,
|
|
27871
29916
|
cwdSet: () => cwdSet
|
|
27872
29917
|
});
|
|
27873
|
-
import { existsSync as
|
|
29918
|
+
import { existsSync as existsSync46 } from "fs";
|
|
27874
29919
|
async function cwdGet(globalOpts) {
|
|
27875
|
-
if (!
|
|
29920
|
+
if (!existsSync46(DB_PATH)) {
|
|
27876
29921
|
outputError("DB_NOT_FOUND", "Database not found.");
|
|
27877
29922
|
process.exit(1);
|
|
27878
29923
|
}
|
|
27879
29924
|
const { openDatabaseReadOnly: openDatabaseReadOnly2 } = await Promise.resolve().then(() => (init_store5(), store_exports5));
|
|
27880
29925
|
const readDb = openDatabaseReadOnly2();
|
|
27881
|
-
const chatId =
|
|
29926
|
+
const chatId = resolveChatId2(globalOpts);
|
|
27882
29927
|
const row = readDb.prepare("SELECT cwd FROM chat_cwd WHERE chat_id = ?").get(chatId);
|
|
27883
29928
|
readDb.close();
|
|
27884
29929
|
const cwd = row?.cwd ?? null;
|
|
@@ -27890,7 +29935,7 @@ async function cwdSet(globalOpts, path) {
|
|
|
27890
29935
|
outputError("DAEMON_OFFLINE", "CC-Claw daemon is not running.\n\n Start it with: cc-claw service start");
|
|
27891
29936
|
process.exit(1);
|
|
27892
29937
|
}
|
|
27893
|
-
const chatId =
|
|
29938
|
+
const chatId = resolveChatId2(globalOpts);
|
|
27894
29939
|
const resolved = path.startsWith("~") ? path.replace("~", process.env.HOME ?? "") : path;
|
|
27895
29940
|
const res = await apiPost2("/api/cwd/set", { chatId, cwd: resolved });
|
|
27896
29941
|
if (res.ok) {
|
|
@@ -27908,7 +29953,7 @@ async function cwdClear(globalOpts) {
|
|
|
27908
29953
|
outputError("DAEMON_OFFLINE", "CC-Claw daemon is not running.\n\n Start it with: cc-claw service start");
|
|
27909
29954
|
process.exit(1);
|
|
27910
29955
|
}
|
|
27911
|
-
const chatId =
|
|
29956
|
+
const chatId = resolveChatId2(globalOpts);
|
|
27912
29957
|
const res = await apiPost2("/api/cwd/clear", { chatId });
|
|
27913
29958
|
if (res.ok) {
|
|
27914
29959
|
output({ success: true }, () => `
|
|
@@ -27934,15 +29979,15 @@ __export(voice_exports, {
|
|
|
27934
29979
|
voiceGet: () => voiceGet,
|
|
27935
29980
|
voiceSet: () => voiceSet
|
|
27936
29981
|
});
|
|
27937
|
-
import { existsSync as
|
|
29982
|
+
import { existsSync as existsSync47 } from "fs";
|
|
27938
29983
|
async function voiceGet(globalOpts) {
|
|
27939
|
-
if (!
|
|
29984
|
+
if (!existsSync47(DB_PATH)) {
|
|
27940
29985
|
outputError("DB_NOT_FOUND", "Database not found.");
|
|
27941
29986
|
process.exit(1);
|
|
27942
29987
|
}
|
|
27943
29988
|
const { openDatabaseReadOnly: openDatabaseReadOnly2 } = await Promise.resolve().then(() => (init_store5(), store_exports5));
|
|
27944
29989
|
const readDb = openDatabaseReadOnly2();
|
|
27945
|
-
const chatId =
|
|
29990
|
+
const chatId = resolveChatId2(globalOpts);
|
|
27946
29991
|
const row = readDb.prepare("SELECT enabled FROM chat_voice WHERE chat_id = ?").get(chatId);
|
|
27947
29992
|
readDb.close();
|
|
27948
29993
|
const enabled = !!row?.enabled;
|
|
@@ -27959,7 +30004,7 @@ async function voiceSet(globalOpts, value) {
|
|
|
27959
30004
|
outputError("DAEMON_OFFLINE", "CC-Claw daemon is not running.\n\n Start it with: cc-claw service start");
|
|
27960
30005
|
process.exit(1);
|
|
27961
30006
|
}
|
|
27962
|
-
const chatId =
|
|
30007
|
+
const chatId = resolveChatId2(globalOpts);
|
|
27963
30008
|
const res = await apiPost2("/api/voice/set", { chatId, enabled: value === "on" });
|
|
27964
30009
|
if (res.ok) {
|
|
27965
30010
|
output({ enabled: value === "on", success: true }, () => `
|
|
@@ -27985,15 +30030,15 @@ __export(heartbeat_exports, {
|
|
|
27985
30030
|
heartbeatGet: () => heartbeatGet,
|
|
27986
30031
|
heartbeatSet: () => heartbeatSet
|
|
27987
30032
|
});
|
|
27988
|
-
import { existsSync as
|
|
30033
|
+
import { existsSync as existsSync48 } from "fs";
|
|
27989
30034
|
async function heartbeatGet(globalOpts) {
|
|
27990
|
-
if (!
|
|
30035
|
+
if (!existsSync48(DB_PATH)) {
|
|
27991
30036
|
outputError("DB_NOT_FOUND", "Database not found.");
|
|
27992
30037
|
process.exit(1);
|
|
27993
30038
|
}
|
|
27994
30039
|
const { openDatabaseReadOnly: openDatabaseReadOnly2 } = await Promise.resolve().then(() => (init_store5(), store_exports5));
|
|
27995
30040
|
const readDb = openDatabaseReadOnly2();
|
|
27996
|
-
const chatId =
|
|
30041
|
+
const chatId = resolveChatId2(globalOpts);
|
|
27997
30042
|
const row = readDb.prepare("SELECT * FROM chat_heartbeat WHERE chat_id = ?").get(chatId);
|
|
27998
30043
|
readDb.close();
|
|
27999
30044
|
const data = row ? {
|
|
@@ -28025,7 +30070,7 @@ async function heartbeatSet(globalOpts, subcommand, value) {
|
|
|
28025
30070
|
outputError("DAEMON_OFFLINE", "CC-Claw daemon is not running.\n\n Start it with: cc-claw service start");
|
|
28026
30071
|
process.exit(1);
|
|
28027
30072
|
}
|
|
28028
|
-
const chatId =
|
|
30073
|
+
const chatId = resolveChatId2(globalOpts);
|
|
28029
30074
|
let body = { chatId };
|
|
28030
30075
|
let successMsg = "";
|
|
28031
30076
|
switch (subcommand) {
|
|
@@ -28096,15 +30141,15 @@ __export(summarizer_exports, {
|
|
|
28096
30141
|
summarizerGet: () => summarizerGet,
|
|
28097
30142
|
summarizerSet: () => summarizerSet
|
|
28098
30143
|
});
|
|
28099
|
-
import { existsSync as
|
|
30144
|
+
import { existsSync as existsSync49 } from "fs";
|
|
28100
30145
|
async function summarizerGet(globalOpts) {
|
|
28101
|
-
if (!
|
|
30146
|
+
if (!existsSync49(DB_PATH)) {
|
|
28102
30147
|
outputError("DB_NOT_FOUND", "Database not found.");
|
|
28103
30148
|
process.exit(1);
|
|
28104
30149
|
}
|
|
28105
30150
|
const { openDatabaseReadOnly: openDatabaseReadOnly2 } = await Promise.resolve().then(() => (init_store5(), store_exports5));
|
|
28106
30151
|
const readDb = openDatabaseReadOnly2();
|
|
28107
|
-
const chatId =
|
|
30152
|
+
const chatId = resolveChatId2(globalOpts);
|
|
28108
30153
|
const row = readDb.prepare("SELECT backend, model FROM chat_summarizer WHERE chat_id = ?").get(chatId);
|
|
28109
30154
|
readDb.close();
|
|
28110
30155
|
const value = row?.backend === "off" ? "off" : row?.backend ? `${row.backend}:${row.model ?? "default"}` : "auto";
|
|
@@ -28116,7 +30161,7 @@ async function summarizerSet(globalOpts, value) {
|
|
|
28116
30161
|
outputError("DAEMON_OFFLINE", "Daemon not running.");
|
|
28117
30162
|
process.exit(1);
|
|
28118
30163
|
}
|
|
28119
|
-
const chatId =
|
|
30164
|
+
const chatId = resolveChatId2(globalOpts);
|
|
28120
30165
|
const res = await apiPost2("/api/summarizer/set", { chatId, value });
|
|
28121
30166
|
if (res.ok) {
|
|
28122
30167
|
output({ success: true }, () => `
|
|
@@ -28142,15 +30187,15 @@ __export(thinking_exports, {
|
|
|
28142
30187
|
thinkingGet: () => thinkingGet,
|
|
28143
30188
|
thinkingSet: () => thinkingSet
|
|
28144
30189
|
});
|
|
28145
|
-
import { existsSync as
|
|
30190
|
+
import { existsSync as existsSync50 } from "fs";
|
|
28146
30191
|
async function thinkingGet(globalOpts) {
|
|
28147
|
-
if (!
|
|
30192
|
+
if (!existsSync50(DB_PATH)) {
|
|
28148
30193
|
outputError("DB_NOT_FOUND", "Database not found.");
|
|
28149
30194
|
process.exit(1);
|
|
28150
30195
|
}
|
|
28151
30196
|
const { openDatabaseReadOnly: openDatabaseReadOnly2 } = await Promise.resolve().then(() => (init_store5(), store_exports5));
|
|
28152
30197
|
const readDb = openDatabaseReadOnly2();
|
|
28153
|
-
const chatId =
|
|
30198
|
+
const chatId = resolveChatId2(globalOpts);
|
|
28154
30199
|
const row = readDb.prepare("SELECT level FROM chat_thinking WHERE chat_id = ?").get(chatId);
|
|
28155
30200
|
readDb.close();
|
|
28156
30201
|
output({ level: row?.level ?? "auto" }, (d) => d.level);
|
|
@@ -28161,7 +30206,7 @@ async function thinkingSet(globalOpts, level) {
|
|
|
28161
30206
|
outputError("DAEMON_OFFLINE", "Daemon not running.");
|
|
28162
30207
|
process.exit(1);
|
|
28163
30208
|
}
|
|
28164
|
-
const chatId =
|
|
30209
|
+
const chatId = resolveChatId2(globalOpts);
|
|
28165
30210
|
const res = await apiPost2("/api/thinking/set", { chatId, level });
|
|
28166
30211
|
if (res.ok) {
|
|
28167
30212
|
output({ success: true }, () => `
|
|
@@ -28188,9 +30233,9 @@ __export(chats_exports, {
|
|
|
28188
30233
|
chatsList: () => chatsList,
|
|
28189
30234
|
chatsRemoveAlias: () => chatsRemoveAlias
|
|
28190
30235
|
});
|
|
28191
|
-
import { existsSync as
|
|
30236
|
+
import { existsSync as existsSync51 } from "fs";
|
|
28192
30237
|
async function chatsList(_globalOpts) {
|
|
28193
|
-
if (!
|
|
30238
|
+
if (!existsSync51(DB_PATH)) {
|
|
28194
30239
|
outputError("DB_NOT_FOUND", "Database not found.");
|
|
28195
30240
|
process.exit(1);
|
|
28196
30241
|
}
|
|
@@ -28318,9 +30363,9 @@ var mcps_exports2 = {};
|
|
|
28318
30363
|
__export(mcps_exports2, {
|
|
28319
30364
|
mcpsList: () => mcpsList
|
|
28320
30365
|
});
|
|
28321
|
-
import { existsSync as
|
|
30366
|
+
import { existsSync as existsSync52 } from "fs";
|
|
28322
30367
|
async function mcpsList(_globalOpts) {
|
|
28323
|
-
if (!
|
|
30368
|
+
if (!existsSync52(DB_PATH)) {
|
|
28324
30369
|
outputError("DB_NOT_FOUND", "Database not found.");
|
|
28325
30370
|
process.exit(1);
|
|
28326
30371
|
}
|
|
@@ -28357,11 +30402,11 @@ __export(chat_exports2, {
|
|
|
28357
30402
|
chatSend: () => chatSend
|
|
28358
30403
|
});
|
|
28359
30404
|
import { request as httpRequest2 } from "http";
|
|
28360
|
-
import { readFileSync as readFileSync25, existsSync as
|
|
30405
|
+
import { readFileSync as readFileSync25, existsSync as existsSync53 } from "fs";
|
|
28361
30406
|
function getToken2() {
|
|
28362
30407
|
if (process.env.CC_CLAW_API_TOKEN) return process.env.CC_CLAW_API_TOKEN;
|
|
28363
30408
|
try {
|
|
28364
|
-
if (
|
|
30409
|
+
if (existsSync53(TOKEN_PATH2)) return readFileSync25(TOKEN_PATH2, "utf-8").trim();
|
|
28365
30410
|
} catch {
|
|
28366
30411
|
}
|
|
28367
30412
|
return null;
|
|
@@ -28372,7 +30417,7 @@ async function chatSend(globalOpts, message, cmdOpts) {
|
|
|
28372
30417
|
outputError("DAEMON_OFFLINE", "CC-Claw daemon is not running.\n\n Start it with: cc-claw service start");
|
|
28373
30418
|
process.exit(1);
|
|
28374
30419
|
}
|
|
28375
|
-
const chatId =
|
|
30420
|
+
const chatId = resolveChatId2(globalOpts);
|
|
28376
30421
|
const token = getToken2();
|
|
28377
30422
|
const payload = JSON.stringify({
|
|
28378
30423
|
chatId,
|
|
@@ -28506,7 +30551,7 @@ async function tuiCommand(globalOpts, cmdOpts) {
|
|
|
28506
30551
|
outputError("DAEMON_OFFLINE", "CC-Claw daemon is not running.\n\n Start it with: cc-claw service start");
|
|
28507
30552
|
process.exit(1);
|
|
28508
30553
|
}
|
|
28509
|
-
const chatId =
|
|
30554
|
+
const chatId = resolveChatId2(globalOpts);
|
|
28510
30555
|
const { chatSend: chatSend2 } = await Promise.resolve().then(() => (init_chat2(), chat_exports2));
|
|
28511
30556
|
const rl2 = createInterface10({
|
|
28512
30557
|
input: process.stdin,
|
|
@@ -28831,9 +30876,9 @@ __export(evolve_exports2, {
|
|
|
28831
30876
|
evolveStatus: () => evolveStatus,
|
|
28832
30877
|
evolveUndo: () => evolveUndo
|
|
28833
30878
|
});
|
|
28834
|
-
import { existsSync as
|
|
30879
|
+
import { existsSync as existsSync54 } from "fs";
|
|
28835
30880
|
function ensureDb3() {
|
|
28836
|
-
if (!
|
|
30881
|
+
if (!existsSync54(DB_PATH)) {
|
|
28837
30882
|
outputError("DB_NOT_FOUND", "Database not found. Run cc-claw setup first.");
|
|
28838
30883
|
process.exit(1);
|
|
28839
30884
|
}
|
|
@@ -28849,7 +30894,7 @@ async function evolveStatus(globalOpts) {
|
|
|
28849
30894
|
ensureDb3();
|
|
28850
30895
|
const { openDatabaseReadOnly: openDatabaseReadOnly2 } = await Promise.resolve().then(() => (init_store5(), store_exports5));
|
|
28851
30896
|
const readDb = openDatabaseReadOnly2();
|
|
28852
|
-
const chatId =
|
|
30897
|
+
const chatId = resolveChatId2(globalOpts);
|
|
28853
30898
|
const { getReflectionStatus: getReflectionStatus2, getPendingInsightCount: getPendingInsightCount2, getReflectionModelConfig: getReflectionModelConfig2 } = await Promise.resolve().then(() => (init_store4(), store_exports4));
|
|
28854
30899
|
const status = getReflectionStatus2(readDb, chatId);
|
|
28855
30900
|
const pendingCount = getPendingInsightCount2(readDb, chatId);
|
|
@@ -28889,7 +30934,7 @@ async function evolveStatus(globalOpts) {
|
|
|
28889
30934
|
}
|
|
28890
30935
|
async function evolveAnalyze(globalOpts) {
|
|
28891
30936
|
await ensureDaemon();
|
|
28892
|
-
const chatId =
|
|
30937
|
+
const chatId = resolveChatId2(globalOpts);
|
|
28893
30938
|
const { apiPost: apiPost2 } = await Promise.resolve().then(() => (init_api_client(), api_client_exports));
|
|
28894
30939
|
const res = await apiPost2("/api/evolve/analyze", { chatId });
|
|
28895
30940
|
if (res.ok) {
|
|
@@ -28906,7 +30951,7 @@ async function evolveList(globalOpts) {
|
|
|
28906
30951
|
ensureDb3();
|
|
28907
30952
|
const { openDatabaseReadOnly: openDatabaseReadOnly2 } = await Promise.resolve().then(() => (init_store5(), store_exports5));
|
|
28908
30953
|
const readDb = openDatabaseReadOnly2();
|
|
28909
|
-
const chatId =
|
|
30954
|
+
const chatId = resolveChatId2(globalOpts);
|
|
28910
30955
|
const { getPendingInsights: getPendingInsights2 } = await Promise.resolve().then(() => (init_store4(), store_exports4));
|
|
28911
30956
|
const insights = getPendingInsights2(readDb, chatId);
|
|
28912
30957
|
readDb.close();
|
|
@@ -29051,7 +31096,7 @@ async function evolveUndo(globalOpts, idStr) {
|
|
|
29051
31096
|
}
|
|
29052
31097
|
async function evolveOn(globalOpts) {
|
|
29053
31098
|
await ensureDaemon();
|
|
29054
|
-
const chatId =
|
|
31099
|
+
const chatId = resolveChatId2(globalOpts);
|
|
29055
31100
|
const { apiPost: apiPost2 } = await Promise.resolve().then(() => (init_api_client(), api_client_exports));
|
|
29056
31101
|
const res = await apiPost2("/api/evolve/on", { chatId });
|
|
29057
31102
|
if (res.ok) {
|
|
@@ -29065,7 +31110,7 @@ async function evolveOn(globalOpts) {
|
|
|
29065
31110
|
}
|
|
29066
31111
|
async function evolveOff(globalOpts) {
|
|
29067
31112
|
await ensureDaemon();
|
|
29068
|
-
const chatId =
|
|
31113
|
+
const chatId = resolveChatId2(globalOpts);
|
|
29069
31114
|
const { apiPost: apiPost2 } = await Promise.resolve().then(() => (init_api_client(), api_client_exports));
|
|
29070
31115
|
const res = await apiPost2("/api/evolve/off", { chatId });
|
|
29071
31116
|
if (res.ok) {
|
|
@@ -29082,7 +31127,7 @@ async function evolveModel(globalOpts, mode, opts) {
|
|
|
29082
31127
|
ensureDb3();
|
|
29083
31128
|
const { openDatabaseReadOnly: openDatabaseReadOnly2 } = await Promise.resolve().then(() => (init_store5(), store_exports5));
|
|
29084
31129
|
const readDb = openDatabaseReadOnly2();
|
|
29085
|
-
const chatId2 =
|
|
31130
|
+
const chatId2 = resolveChatId2(globalOpts);
|
|
29086
31131
|
const { getReflectionModelConfig: getReflectionModelConfig2 } = await Promise.resolve().then(() => (init_store4(), store_exports4));
|
|
29087
31132
|
const config2 = getReflectionModelConfig2(readDb, chatId2);
|
|
29088
31133
|
readDb.close();
|
|
@@ -29095,7 +31140,7 @@ async function evolveModel(globalOpts, mode, opts) {
|
|
|
29095
31140
|
return;
|
|
29096
31141
|
}
|
|
29097
31142
|
await ensureDaemon();
|
|
29098
|
-
const chatId =
|
|
31143
|
+
const chatId = resolveChatId2(globalOpts);
|
|
29099
31144
|
if (!["auto", "pinned", "cheap"].includes(mode)) {
|
|
29100
31145
|
outputError("INVALID_MODE", "Mode must be auto, pinned, or cheap.");
|
|
29101
31146
|
process.exit(1);
|
|
@@ -29120,7 +31165,7 @@ async function evolveStats(globalOpts, opts) {
|
|
|
29120
31165
|
ensureDb3();
|
|
29121
31166
|
const { openDatabaseReadOnly: openDatabaseReadOnly2 } = await Promise.resolve().then(() => (init_store5(), store_exports5));
|
|
29122
31167
|
const readDb = openDatabaseReadOnly2();
|
|
29123
|
-
const chatId =
|
|
31168
|
+
const chatId = resolveChatId2(globalOpts);
|
|
29124
31169
|
const days = parseInt(opts.days ?? "30", 10);
|
|
29125
31170
|
const { buildGrowthReportData: buildGrowthReportData2 } = await Promise.resolve().then(() => (init_metrics(), metrics_exports));
|
|
29126
31171
|
const report = buildGrowthReportData2(readDb, chatId, days);
|
|
@@ -29149,7 +31194,7 @@ async function evolveHistory(globalOpts, opts) {
|
|
|
29149
31194
|
ensureDb3();
|
|
29150
31195
|
const { openDatabaseReadOnly: openDatabaseReadOnly2 } = await Promise.resolve().then(() => (init_store5(), store_exports5));
|
|
29151
31196
|
const readDb = openDatabaseReadOnly2();
|
|
29152
|
-
const chatId =
|
|
31197
|
+
const chatId = resolveChatId2(globalOpts);
|
|
29153
31198
|
const limit = parseInt(opts.limit ?? "20", 10);
|
|
29154
31199
|
let insights;
|
|
29155
31200
|
if (opts.status) {
|
|
@@ -29186,7 +31231,7 @@ async function evolveHistory(globalOpts, opts) {
|
|
|
29186
31231
|
async function evolveSettings(globalOpts, opts) {
|
|
29187
31232
|
ensureDb3();
|
|
29188
31233
|
const { openDatabaseReadOnly: openDatabaseReadOnly2 } = await Promise.resolve().then(() => (init_store5(), store_exports5));
|
|
29189
|
-
const chatId =
|
|
31234
|
+
const chatId = resolveChatId2(globalOpts);
|
|
29190
31235
|
const hasUpdates = opts.perFileCap !== void 0 || opts.backupRetentionDays !== void 0;
|
|
29191
31236
|
if (hasUpdates) {
|
|
29192
31237
|
await ensureDaemon();
|
|
@@ -29307,7 +31352,7 @@ var init_optimize2 = __esm({
|
|
|
29307
31352
|
|
|
29308
31353
|
// src/setup.ts
|
|
29309
31354
|
var setup_exports = {};
|
|
29310
|
-
import { existsSync as
|
|
31355
|
+
import { existsSync as existsSync55, writeFileSync as writeFileSync12, readFileSync as readFileSync26, copyFileSync as copyFileSync4, mkdirSync as mkdirSync18, statSync as statSync12 } from "fs";
|
|
29311
31356
|
import { execFileSync as execFileSync5 } from "child_process";
|
|
29312
31357
|
import { createInterface as createInterface11 } from "readline";
|
|
29313
31358
|
import { join as join34 } from "path";
|
|
@@ -29385,10 +31430,10 @@ async function setup() {
|
|
|
29385
31430
|
}
|
|
29386
31431
|
console.log("");
|
|
29387
31432
|
for (const dir of [CC_CLAW_HOME, DATA_PATH, LOGS_PATH, SKILLS_PATH, RUNNERS_PATH, AGENTS_PATH]) {
|
|
29388
|
-
if (!
|
|
31433
|
+
if (!existsSync55(dir)) mkdirSync18(dir, { recursive: true });
|
|
29389
31434
|
}
|
|
29390
31435
|
const env = {};
|
|
29391
|
-
const envSource =
|
|
31436
|
+
const envSource = existsSync55(ENV_PATH) ? ENV_PATH : existsSync55(".env") ? ".env" : null;
|
|
29392
31437
|
if (envSource) {
|
|
29393
31438
|
console.log(yellow(` Found existing config at ${envSource} \u2014 your values will be preserved`));
|
|
29394
31439
|
console.log(yellow(" unless you enter new ones. Just press Enter to keep existing values.\n"));
|
|
@@ -29399,7 +31444,7 @@ async function setup() {
|
|
|
29399
31444
|
}
|
|
29400
31445
|
}
|
|
29401
31446
|
const cwdDb = join34(process.cwd(), "cc-claw.db");
|
|
29402
|
-
if (
|
|
31447
|
+
if (existsSync55(cwdDb) && !existsSync55(DB_PATH)) {
|
|
29403
31448
|
const { size } = statSync12(cwdDb);
|
|
29404
31449
|
console.log(yellow(` Found existing database at ${cwdDb} (${(size / 1024).toFixed(0)}KB)`));
|
|
29405
31450
|
const migrate = await confirm("Copy database to ~/.cc-claw/? (preserves memories & history)", true);
|
|
@@ -29624,7 +31669,7 @@ async function setup() {
|
|
|
29624
31669
|
const installDaemon = await confirm("Install CC-Claw as a background service?", true);
|
|
29625
31670
|
if (installDaemon) {
|
|
29626
31671
|
try {
|
|
29627
|
-
const { installService: installService2 } = await Promise.resolve().then(() => (
|
|
31672
|
+
const { installService: installService2 } = await Promise.resolve().then(() => (init_service2(), service_exports2));
|
|
29628
31673
|
installService2();
|
|
29629
31674
|
console.log("");
|
|
29630
31675
|
console.log(dim(" Useful commands:"));
|
|
@@ -29700,7 +31745,7 @@ function registerServiceCommands(cmd) {
|
|
|
29700
31745
|
if (opts.foreground) {
|
|
29701
31746
|
await Promise.resolve().then(() => (init_index(), index_exports));
|
|
29702
31747
|
} else {
|
|
29703
|
-
const { installService: installService2 } = await Promise.resolve().then(() => (
|
|
31748
|
+
const { installService: installService2 } = await Promise.resolve().then(() => (init_service2(), service_exports2));
|
|
29704
31749
|
installService2();
|
|
29705
31750
|
}
|
|
29706
31751
|
});
|
|
@@ -29713,15 +31758,15 @@ function registerServiceCommands(cmd) {
|
|
|
29713
31758
|
await restartService2();
|
|
29714
31759
|
});
|
|
29715
31760
|
cmd.command("status").description("Service health (launchd/systemd)").action(async () => {
|
|
29716
|
-
const { serviceStatus: serviceStatus2 } = await Promise.resolve().then(() => (
|
|
31761
|
+
const { serviceStatus: serviceStatus2 } = await Promise.resolve().then(() => (init_service2(), service_exports2));
|
|
29717
31762
|
serviceStatus2();
|
|
29718
31763
|
});
|
|
29719
31764
|
cmd.command("install").description("Install as background service").action(async () => {
|
|
29720
|
-
const { installService: installService2 } = await Promise.resolve().then(() => (
|
|
31765
|
+
const { installService: installService2 } = await Promise.resolve().then(() => (init_service2(), service_exports2));
|
|
29721
31766
|
installService2();
|
|
29722
31767
|
});
|
|
29723
31768
|
cmd.command("uninstall").description("Remove background service").action(async () => {
|
|
29724
|
-
const { uninstallService: uninstallService2 } = await Promise.resolve().then(() => (
|
|
31769
|
+
const { uninstallService: uninstallService2 } = await Promise.resolve().then(() => (init_service2(), service_exports2));
|
|
29725
31770
|
uninstallService2();
|
|
29726
31771
|
});
|
|
29727
31772
|
}
|
|
@@ -29824,6 +31869,35 @@ function registerUnifiedSlotCommands(parentCmd, backendId, displayName) {
|
|
|
29824
31869
|
}
|
|
29825
31870
|
registerUnifiedSlotCommands(program, "claude", "Claude");
|
|
29826
31871
|
registerUnifiedSlotCommands(program, "codex", "Codex");
|
|
31872
|
+
var ollama = program.command("ollama").description("Manage Ollama local LLM servers and models");
|
|
31873
|
+
ollama.command("list").description("List all configured Ollama servers").action(async () => {
|
|
31874
|
+
const { ollamaList: ollamaList2 } = await Promise.resolve().then(() => (init_ollama4(), ollama_exports3));
|
|
31875
|
+
await ollamaList2(program.opts());
|
|
31876
|
+
});
|
|
31877
|
+
ollama.command("add <name> <host>").description("Add an Ollama server").option("--port <n>", "Port (default: 11434)").option("--key <apiKey>", "API key (for remote/proxy servers)").action(async (name, host, opts) => {
|
|
31878
|
+
const { ollamaAdd: ollamaAdd2 } = await Promise.resolve().then(() => (init_ollama4(), ollama_exports3));
|
|
31879
|
+
await ollamaAdd2(program.opts(), name, host, opts);
|
|
31880
|
+
});
|
|
31881
|
+
ollama.command("remove <name>").description("Remove an Ollama server and its cached models").action(async (name) => {
|
|
31882
|
+
const { ollamaRemove: ollamaRemove2 } = await Promise.resolve().then(() => (init_ollama4(), ollama_exports3));
|
|
31883
|
+
await ollamaRemove2(program.opts(), name);
|
|
31884
|
+
});
|
|
31885
|
+
ollama.command("models").description("List discovered models across all servers").option("--server <name>", "Filter to a specific server").action(async (opts) => {
|
|
31886
|
+
const { ollamaModels: ollamaModels2 } = await Promise.resolve().then(() => (init_ollama4(), ollama_exports3));
|
|
31887
|
+
await ollamaModels2(program.opts(), opts);
|
|
31888
|
+
});
|
|
31889
|
+
ollama.command("discover").description("Discover models on servers (syncs to local DB)").option("--server <name>", "Discover on a specific server only").action(async (opts) => {
|
|
31890
|
+
const { ollamaDiscover: ollamaDiscover2 } = await Promise.resolve().then(() => (init_ollama4(), ollama_exports3));
|
|
31891
|
+
await ollamaDiscover2(program.opts(), opts);
|
|
31892
|
+
});
|
|
31893
|
+
ollama.command("health").description("Ping all servers and update status").action(async () => {
|
|
31894
|
+
const { ollamaHealth: ollamaHealth2 } = await Promise.resolve().then(() => (init_ollama4(), ollama_exports3));
|
|
31895
|
+
await ollamaHealth2(program.opts());
|
|
31896
|
+
});
|
|
31897
|
+
ollama.command("test <model>").description("Run quality gate test on a model (Phase 4)").action(async (model2) => {
|
|
31898
|
+
const { ollamaTest: ollamaTest2 } = await Promise.resolve().then(() => (init_ollama4(), ollama_exports3));
|
|
31899
|
+
await ollamaTest2(program.opts(), model2);
|
|
31900
|
+
});
|
|
29827
31901
|
var backend = program.command("backend").description("Manage AI backend");
|
|
29828
31902
|
backend.command("list").description("Available backends with status").action(async () => {
|
|
29829
31903
|
const { backendList: backendList2 } = await Promise.resolve().then(() => (init_backend(), backend_exports));
|
|
@@ -30118,12 +32192,12 @@ mcps.command("list").description("All registered MCP servers").action(async () =
|
|
|
30118
32192
|
const { mcpsList: mcpsList2 } = await Promise.resolve().then(() => (init_mcps2(), mcps_exports2));
|
|
30119
32193
|
await mcpsList2(program.opts());
|
|
30120
32194
|
});
|
|
30121
|
-
var
|
|
30122
|
-
|
|
32195
|
+
var chat2 = program.command("chat").description("Chat with the AI");
|
|
32196
|
+
chat2.command("send <message>").description("Send a message and get a response").option("--backend <name>", "Override backend").option("--model <name>", "Override model").option("--thinking <level>", "Override thinking level").option("--cwd <path>", "Override working directory").option("--stream", "Stream response tokens as they arrive").action(async (message, opts) => {
|
|
30123
32197
|
const { chatSend: chatSend2 } = await Promise.resolve().then(() => (init_chat2(), chat_exports2));
|
|
30124
32198
|
await chatSend2(program.opts(), message, opts);
|
|
30125
32199
|
});
|
|
30126
|
-
|
|
32200
|
+
chat2.command("stop").description("Cancel the current running task").action(async () => {
|
|
30127
32201
|
const { isDaemonRunning: isDaemonRunning2, apiPost: apiPost2 } = await Promise.resolve().then(() => (init_api_client(), api_client_exports));
|
|
30128
32202
|
if (!await isDaemonRunning2()) {
|
|
30129
32203
|
const { outputError: outputError3 } = await Promise.resolve().then(() => (init_format2(), format_exports));
|
|
@@ -30143,7 +32217,7 @@ chat.command("stop").description("Cancel the current running task").action(async
|
|
|
30143
32217
|
process.exit(1);
|
|
30144
32218
|
}
|
|
30145
32219
|
});
|
|
30146
|
-
|
|
32220
|
+
chat2.argument("[message]", "Message to send (shorthand for chat send)").action(async (message, opts) => {
|
|
30147
32221
|
if (message && !["send", "stop"].includes(message)) {
|
|
30148
32222
|
const { chatSend: chatSend2 } = await Promise.resolve().then(() => (init_chat2(), chat_exports2));
|
|
30149
32223
|
await chatSend2(program.opts(), message, opts);
|
|
@@ -30223,11 +32297,11 @@ program.command("start", { hidden: true }).description("Run the bot in the foreg
|
|
|
30223
32297
|
await Promise.resolve().then(() => (init_index(), index_exports));
|
|
30224
32298
|
});
|
|
30225
32299
|
program.command("install", { hidden: true }).description("Install as background service \u2014 alias for service install").action(async () => {
|
|
30226
|
-
const { installService: installService2 } = await Promise.resolve().then(() => (
|
|
32300
|
+
const { installService: installService2 } = await Promise.resolve().then(() => (init_service2(), service_exports2));
|
|
30227
32301
|
installService2();
|
|
30228
32302
|
});
|
|
30229
32303
|
program.command("uninstall", { hidden: true }).description("Remove background service \u2014 alias for service uninstall").action(async () => {
|
|
30230
|
-
const { uninstallService: uninstallService2 } = await Promise.resolve().then(() => (
|
|
32304
|
+
const { uninstallService: uninstallService2 } = await Promise.resolve().then(() => (init_service2(), service_exports2));
|
|
30231
32305
|
uninstallService2();
|
|
30232
32306
|
});
|
|
30233
32307
|
program.command("setup").description("Interactive configuration wizard").action(async () => {
|