opencami 1.9.0 → 2.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/client/assets/{CSPContext-TfUptlEu.js → CSPContext-CrlIQW7-.js} +1 -1
- package/dist/client/assets/{DirectionContext-CQMv7g2N.js → DirectionContext-X-0CRn1O.js} +1 -1
- package/dist/client/assets/_sessionKey-yNQ57svB.js +23 -0
- package/dist/client/assets/agents-WqWjsymD.js +2 -0
- package/dist/client/assets/{agents-screen-fSZJpRi_.js → agents-screen-BhOVp_S6.js} +1 -1
- package/dist/client/assets/bots-DNqiFT7w.js +2 -0
- package/dist/client/assets/{bots-screen-4yT-e3cM.js → bots-screen-DLFd0ydi.js} +1 -1
- package/dist/client/assets/button-D0n2Qsd_.js +1 -0
- package/dist/client/assets/{composite-BLgu_EOL.js → composite-GtKwZKbV.js} +1 -1
- package/dist/client/assets/{connect-CiqRvR6s.js → connect-vQWL0_11.js} +1 -1
- package/dist/client/assets/{dashboard-CyWDWpbj.js → dashboard-Knwc61i1.js} +1 -1
- package/dist/client/assets/{event-2_Dxdv7h.js → event-CHpdjYFR.js} +1 -1
- package/dist/client/assets/file-explorer-screen-FoYNs9zK.js +1 -0
- package/dist/client/assets/files-BtR_gArr.js +2 -0
- package/dist/client/assets/follow-up-suggestions-DVXNLqga.js +5 -0
- package/dist/client/assets/{index-ygitKeM-.js → index-CNPHef4O.js} +1 -1
- package/dist/client/assets/{index-C_gsW9fo.js → index-CTT0Y1ya.js} +1 -1
- package/dist/client/assets/{keyboard-shortcuts-dialog-z-amTZVi.js → keyboard-shortcuts-dialog-DP8ptQ7N.js} +1 -1
- package/dist/client/assets/{main-ZBMVSJTF.js → main-xPlWrMhO.js} +3 -3
- package/dist/client/assets/{markdown-CHUjmWcv.js → markdown-3Js_RbUp.js} +1 -1
- package/dist/client/assets/memory-BRGPq5t6.js +2 -0
- package/dist/client/assets/{memory-screen-C_ZNDGLd.js → memory-screen-nzRra2Qi.js} +1 -1
- package/dist/client/assets/{menu-CB88T7R1.js → menu-BnSEqetd.js} +1 -1
- package/dist/client/assets/{opencami-logo-C0Kj1DiT.js → opencami-logo-B_hLbomw.js} +1 -1
- package/dist/client/assets/{proxy-D-juuhw6.js → proxy-BnlGpgC1.js} +1 -1
- package/dist/client/assets/{react-Akh4y69S.js → react-BgjQyJHw.js} +1 -1
- package/dist/client/assets/{search-dialog-BasfzCyM.js → search-dialog-Bib2QY9u.js} +1 -1
- package/dist/client/assets/{search-sources-badge-DwFHWd7S.js → search-sources-badge-COHcYFRB.js} +1 -1
- package/dist/client/assets/{session-export-dialog-CAl3iJnD.js → session-export-dialog-ooPnfHh_.js} +1 -1
- package/dist/client/assets/settings-dialog-B8mz99u-.js +1 -0
- package/dist/client/assets/skills-DA9J_tsC.js +2 -0
- package/dist/client/assets/{skills-panel-B7BRAofP.js → skills-panel-D2uMdCHp.js} +1 -1
- package/dist/client/assets/styles-CWabEzNU.css +1 -0
- package/dist/client/assets/{switch-DYEbEgy5.js → switch-BUQ0qH6r.js} +1 -1
- package/dist/client/assets/{tabs-eiBvL0H7.js → tabs-BEyU6TjN.js} +1 -1
- package/dist/client/assets/{thinking-CariuioI.js → thinking-CA48yhOE.js} +1 -1
- package/dist/client/assets/{tooltip-CekkGEYG.js → tooltip-CcIdgcV0.js} +1 -1
- package/dist/client/assets/{use-file-explorer-state-Dfyh4GwR.js → use-file-explorer-state-CN_IJGcd.js} +2 -2
- package/dist/client/assets/{useBaseUiId-DLhdkHJl.js → useBaseUiId-ClbEYEil.js} +1 -1
- package/dist/client/assets/useCompositeItem-B_OxfJee.js +1 -0
- package/dist/client/assets/{useControlled-CpliTEve.js → useControlled-CyT-lqbs.js} +1 -1
- package/dist/client/assets/{useMutation-CpD2Pn0F.js → useMutation-eQUrsn-X.js} +1 -1
- package/dist/client/assets/{useOnFirstRender-DsFYFJoB.js → useOnFirstRender-CR_o2MK_.js} +1 -1
- package/dist/client/assets/{useQuery-DMTgpIql.js → useQuery-k6EMRoMD.js} +1 -1
- package/dist/server/assets/{_sessionKey-Bhksr7VP.js → _sessionKey-CaFqmyhU.js} +388 -390
- package/dist/server/assets/{_tanstack-start-manifest_v-D-5ReiD4.js → _tanstack-start-manifest_v-P3skSR3R.js} +1 -1
- package/dist/server/assets/{follow-up-suggestions-B3hol2KT.js → follow-up-suggestions-DHv2_XzB.js} +13 -74
- package/dist/server/assets/{index-4G_4vZNY.js → index-C7lmufwX.js} +1 -1
- package/dist/server/assets/{router-C9JRmWMm.js → router-X2L0PDPI.js} +133 -294
- package/dist/server/assets/{search-dialog-CTJULPB8.js → search-dialog-CXhofdoP.js} +2 -2
- package/dist/server/assets/{settings-dialog-B5yR2pBy.js → settings-dialog-CPdftvjz.js} +1 -254
- package/dist/server/assets/{thinking-CHx4Oouj.js → thinking-YkRSlXtf.js} +2 -2
- package/dist/server/server.js +2 -2
- package/package.json +1 -1
- package/dist/client/assets/_sessionKey-DYknvaDS.js +0 -23
- package/dist/client/assets/agents-DNywJUai.js +0 -2
- package/dist/client/assets/bots-Bqjqhws8.js +0 -2
- package/dist/client/assets/button-DqP4GZwZ.js +0 -1
- package/dist/client/assets/file-explorer-screen-CZ2QKk-0.js +0 -1
- package/dist/client/assets/files-Cbhud0J8.js +0 -2
- package/dist/client/assets/follow-up-suggestions-Bi3Ci2my.js +0 -5
- package/dist/client/assets/memory-BRa-0plj.js +0 -2
- package/dist/client/assets/settings-dialog-C8OoRXwX.js +0 -1
- package/dist/client/assets/skills-Cx12984a.js +0 -2
- package/dist/client/assets/styles-CXa-SiWC.css +0 -1
- package/dist/client/assets/useCompositeItem-DTSTTR0Z.js +0 -1
|
@@ -12,7 +12,7 @@ import { execFile, execSync } from "node:child_process";
|
|
|
12
12
|
import { promisify } from "node:util";
|
|
13
13
|
import { readFile, mkdir, writeFile, rename, stat, readdir, rm, realpath, lstat } from "node:fs/promises";
|
|
14
14
|
import { posix } from "path";
|
|
15
|
-
const appCss = "/assets/styles-
|
|
15
|
+
const appCss = "/assets/styles-CWabEzNU.css";
|
|
16
16
|
const swRegisterScript = `
|
|
17
17
|
(() => {
|
|
18
18
|
// Skip PWA service worker inside Capacitor native shell — they conflict
|
|
@@ -372,11 +372,11 @@ const $$splitComponentImporter$2 = () => import("./agents-BuE0Yum3.js");
|
|
|
372
372
|
const Route$t = createFileRoute("/agents")({
|
|
373
373
|
component: lazyRouteComponent($$splitComponentImporter$2, "component")
|
|
374
374
|
});
|
|
375
|
-
const $$splitComponentImporter$1 = () => import("./index-
|
|
375
|
+
const $$splitComponentImporter$1 = () => import("./index-C7lmufwX.js");
|
|
376
376
|
const Route$s = createFileRoute("/")({
|
|
377
377
|
component: lazyRouteComponent($$splitComponentImporter$1, "component")
|
|
378
378
|
});
|
|
379
|
-
const $$splitComponentImporter = () => import("./_sessionKey-
|
|
379
|
+
const $$splitComponentImporter = () => import("./_sessionKey-CaFqmyhU.js").then((n) => n.$);
|
|
380
380
|
const Route$r = createFileRoute("/chat/$sessionKey")({
|
|
381
381
|
component: lazyRouteComponent($$splitComponentImporter, "component")
|
|
382
382
|
});
|
|
@@ -593,8 +593,7 @@ class PersistentGatewayConnection {
|
|
|
593
593
|
connectPromise = null;
|
|
594
594
|
pendingRpcs = /* @__PURE__ */ new Map();
|
|
595
595
|
reconnectTimer = null;
|
|
596
|
-
|
|
597
|
-
maxReconnectDelay = 3e4;
|
|
596
|
+
reconnectAttempt = 0;
|
|
598
597
|
destroyed = false;
|
|
599
598
|
_devicePending = false;
|
|
600
599
|
_deviceId = "";
|
|
@@ -754,7 +753,7 @@ class PersistentGatewayConnection {
|
|
|
754
753
|
this._devicePending = false;
|
|
755
754
|
}
|
|
756
755
|
this.connected = true;
|
|
757
|
-
this.
|
|
756
|
+
this.reconnectAttempt = 0;
|
|
758
757
|
console.log("[gateway-ws] Persistent connection established");
|
|
759
758
|
}
|
|
760
759
|
_onMessage(data) {
|
|
@@ -845,15 +844,18 @@ class PersistentGatewayConnection {
|
|
|
845
844
|
}
|
|
846
845
|
_scheduleReconnect() {
|
|
847
846
|
if (this.reconnectTimer) return;
|
|
848
|
-
const
|
|
849
|
-
|
|
850
|
-
|
|
847
|
+
const base = 1e3;
|
|
848
|
+
const cap = 3e4;
|
|
849
|
+
const exponential = Math.min(cap, base * 2 ** this.reconnectAttempt);
|
|
850
|
+
const delay = Math.round(Math.random() * exponential);
|
|
851
|
+
this.reconnectAttempt++;
|
|
852
|
+
console.log(`[gateway-ws] Reconnecting in ${delay}ms (attempt ${this.reconnectAttempt})...`);
|
|
851
853
|
this.reconnectTimer = setTimeout(async () => {
|
|
852
854
|
this.reconnectTimer = null;
|
|
853
855
|
try {
|
|
854
856
|
await this.ensureConnected();
|
|
855
857
|
} catch (err) {
|
|
856
|
-
console.error(
|
|
858
|
+
console.error(`[gateway-ws] Reconnect attempt ${this.reconnectAttempt} failed:`, err instanceof Error ? err.message : err);
|
|
857
859
|
}
|
|
858
860
|
}, delay);
|
|
859
861
|
}
|
|
@@ -2015,75 +2017,64 @@ const Route$h = createFileRoute("/api/models")({
|
|
|
2015
2017
|
}
|
|
2016
2018
|
}
|
|
2017
2019
|
});
|
|
2018
|
-
const
|
|
2019
|
-
const
|
|
2020
|
-
const
|
|
2021
|
-
|
|
2022
|
-
|
|
2023
|
-
|
|
2024
|
-
];
|
|
2020
|
+
const OPENCLAW_GATEWAY_URL = "http://127.0.0.1:18789/v1/chat/completions";
|
|
2021
|
+
const OPENCLAW_MODEL = "openclaw";
|
|
2022
|
+
const DEFAULT_TIMEOUT_MS = 3e4;
|
|
2023
|
+
function getGatewayToken() {
|
|
2024
|
+
return process.env.OPENCLAW_GATEWAY_TOKEN?.trim() || process.env.CLAWDBOT_GATEWAY_TOKEN?.trim() || null;
|
|
2025
|
+
}
|
|
2025
2026
|
async function chatCompletion(messages, options) {
|
|
2026
|
-
const
|
|
2027
|
-
|
|
2028
|
-
|
|
2029
|
-
|
|
2030
|
-
|
|
2031
|
-
|
|
2032
|
-
|
|
2033
|
-
|
|
2034
|
-
|
|
2035
|
-
|
|
2036
|
-
|
|
2037
|
-
|
|
2038
|
-
|
|
2039
|
-
|
|
2040
|
-
|
|
2041
|
-
|
|
2042
|
-
|
|
2043
|
-
|
|
2044
|
-
|
|
2045
|
-
|
|
2046
|
-
|
|
2047
|
-
|
|
2048
|
-
|
|
2049
|
-
|
|
2050
|
-
}
|
|
2051
|
-
|
|
2052
|
-
|
|
2053
|
-
|
|
2054
|
-
|
|
2055
|
-
|
|
2056
|
-
|
|
2057
|
-
console.log(`[llm-client] Model ${currentModel} not available, trying next...`);
|
|
2058
|
-
lastError = new Error(`Model ${currentModel} not found`);
|
|
2059
|
-
continue;
|
|
2060
|
-
}
|
|
2061
|
-
throw new Error(`OpenAI API error: ${response.status} ${errorText}`);
|
|
2062
|
-
}
|
|
2063
|
-
const data = await response.json();
|
|
2064
|
-
const content = data.choices[0]?.message?.content?.trim() || "";
|
|
2065
|
-
if (content) {
|
|
2066
|
-
return content;
|
|
2067
|
-
}
|
|
2068
|
-
lastError = new Error(`Model ${currentModel} returned empty response`);
|
|
2069
|
-
continue;
|
|
2070
|
-
} catch (error) {
|
|
2071
|
-
clearTimeout(timeoutId);
|
|
2072
|
-
if (error instanceof Error && error.name === "AbortError") {
|
|
2073
|
-
lastError = new Error("OpenAI API request timed out");
|
|
2074
|
-
continue;
|
|
2075
|
-
}
|
|
2076
|
-
lastError = error instanceof Error ? error : new Error(String(error));
|
|
2077
|
-
continue;
|
|
2027
|
+
const controller = new AbortController();
|
|
2028
|
+
const timeoutId = setTimeout(
|
|
2029
|
+
() => controller.abort(),
|
|
2030
|
+
options?.timeoutMs ?? DEFAULT_TIMEOUT_MS
|
|
2031
|
+
);
|
|
2032
|
+
try {
|
|
2033
|
+
const token = getGatewayToken();
|
|
2034
|
+
const response = await fetch(OPENCLAW_GATEWAY_URL, {
|
|
2035
|
+
method: "POST",
|
|
2036
|
+
headers: {
|
|
2037
|
+
"Content-Type": "application/json",
|
|
2038
|
+
...token ? { Authorization: `Bearer ${token}` } : {}
|
|
2039
|
+
},
|
|
2040
|
+
body: JSON.stringify({
|
|
2041
|
+
model: OPENCLAW_MODEL,
|
|
2042
|
+
messages,
|
|
2043
|
+
max_tokens: options?.maxTokens ?? 200,
|
|
2044
|
+
temperature: options?.temperature ?? 0.7
|
|
2045
|
+
}),
|
|
2046
|
+
signal: controller.signal
|
|
2047
|
+
});
|
|
2048
|
+
if (!response.ok) {
|
|
2049
|
+
const errorText = await response.text().catch(() => "");
|
|
2050
|
+
throw new Error(
|
|
2051
|
+
`OpenClaw Gateway error: ${response.status}${errorText ? ` ${errorText}` : ""}`
|
|
2052
|
+
);
|
|
2053
|
+
}
|
|
2054
|
+
const data = await response.json();
|
|
2055
|
+
const content = data.choices?.[0]?.message?.content?.trim() || "";
|
|
2056
|
+
if (!content) {
|
|
2057
|
+
throw new Error("OpenClaw Gateway returned empty content");
|
|
2078
2058
|
}
|
|
2059
|
+
return content;
|
|
2060
|
+
} catch (error) {
|
|
2061
|
+
if (error instanceof Error && error.name === "AbortError") {
|
|
2062
|
+
throw new Error("OpenClaw Gateway request timed out");
|
|
2063
|
+
}
|
|
2064
|
+
throw error instanceof Error ? error : new Error(String(error));
|
|
2065
|
+
} finally {
|
|
2066
|
+
clearTimeout(timeoutId);
|
|
2079
2067
|
}
|
|
2080
|
-
throw lastError || new Error("All models failed");
|
|
2081
2068
|
}
|
|
2082
|
-
|
|
2083
|
-
const
|
|
2069
|
+
function parseFollowUps(text) {
|
|
2070
|
+
const lines = text.split("\n").map((line) => line.trim()).filter((line) => line.length > 0).map((line) => line.replace(/^\d+[.)\s]+/, "").trim()).map((line) => line.replace(/^[-•*]\s*/, "").trim()).map((line) => line.replace(/^["']|["']$/g, "").trim()).filter((line) => line.length > 0 && line.length < 150);
|
|
2071
|
+
return lines.slice(0, 3);
|
|
2072
|
+
}
|
|
2073
|
+
async function generateTitleViaOpenclaw(message) {
|
|
2074
|
+
const systemPrompt = `Generate a concise 3-6 word title for this conversation.
|
|
2084
2075
|
Rules:
|
|
2085
2076
|
- No quotes or punctuation at the end
|
|
2086
|
-
- Capture the main topic
|
|
2077
|
+
- Capture the main topic or intent
|
|
2087
2078
|
- Be specific, not generic
|
|
2088
2079
|
- Use title case`;
|
|
2089
2080
|
return chatCompletion(
|
|
@@ -2091,104 +2082,49 @@ Rules:
|
|
|
2091
2082
|
{ role: "system", content: systemPrompt },
|
|
2092
2083
|
{ role: "user", content: message }
|
|
2093
2084
|
],
|
|
2094
|
-
{
|
|
2085
|
+
{ maxTokens: 60, temperature: 0.3 }
|
|
2095
2086
|
);
|
|
2096
2087
|
}
|
|
2097
|
-
async function
|
|
2098
|
-
const
|
|
2088
|
+
async function generateFollowUpsViaOpenclaw(responseText, contextSummary) {
|
|
2089
|
+
const truncatedResponse = responseText.length > 1500 ? `${responseText.slice(0, 1500)}...` : responseText;
|
|
2090
|
+
const trimmedSummary = contextSummary?.slice(0, 500).trim() || "";
|
|
2091
|
+
const userPrompt = trimmedSummary ? `Context: ${trimmedSummary}
|
|
2092
|
+
|
|
2093
|
+
Assistant's response:
|
|
2094
|
+
${truncatedResponse}` : `Assistant's response:
|
|
2095
|
+
${truncatedResponse}`;
|
|
2096
|
+
const systemPrompt = `You are a helpful assistant that generates follow-up question suggestions.
|
|
2097
|
+
Given the assistant's last response, generate exactly 3 short, natural follow-up questions the user might want to ask.
|
|
2098
|
+
|
|
2099
2099
|
Rules:
|
|
2100
|
-
- Each question
|
|
2101
|
-
- Make them
|
|
2102
|
-
- Vary the types: clarification, deeper
|
|
2103
|
-
-
|
|
2100
|
+
- Each suggestion should be a single, concise question (under 60 characters preferred)
|
|
2101
|
+
- Make them contextually relevant to the response
|
|
2102
|
+
- Vary the types: clarification, deeper exploration, practical application
|
|
2103
|
+
- Use natural, conversational language
|
|
2104
|
+
- Do not number them or add any prefix
|
|
2105
|
+
|
|
2106
|
+
Output format: Return ONLY the 3 questions, one per line, nothing else.`;
|
|
2104
2107
|
const response = await chatCompletion(
|
|
2105
2108
|
[
|
|
2106
2109
|
{ role: "system", content: systemPrompt },
|
|
2107
|
-
{ role: "user", content:
|
|
2110
|
+
{ role: "user", content: userPrompt }
|
|
2108
2111
|
],
|
|
2109
|
-
{
|
|
2112
|
+
{ maxTokens: 200, temperature: 0.7 }
|
|
2110
2113
|
);
|
|
2111
|
-
|
|
2112
|
-
let jsonStr = response.trim();
|
|
2113
|
-
if (jsonStr.startsWith("```")) {
|
|
2114
|
-
jsonStr = jsonStr.replace(/^```(?:json)?\n?/, "").replace(/\n?```$/, "");
|
|
2115
|
-
}
|
|
2116
|
-
const parsed = JSON.parse(jsonStr);
|
|
2117
|
-
if (Array.isArray(parsed) && parsed.length > 0) {
|
|
2118
|
-
return parsed.slice(0, 3).map(String);
|
|
2119
|
-
}
|
|
2120
|
-
} catch {
|
|
2121
|
-
}
|
|
2122
|
-
return [];
|
|
2114
|
+
return parseFollowUps(response);
|
|
2123
2115
|
}
|
|
2124
|
-
async function
|
|
2125
|
-
const llmOptions = typeof options === "string" ? { apiKey: options } : options;
|
|
2116
|
+
async function isOpenclawAvailable() {
|
|
2126
2117
|
try {
|
|
2127
|
-
await chatCompletion(
|
|
2128
|
-
|
|
2129
|
-
|
|
2130
|
-
|
|
2118
|
+
await chatCompletion([{ role: "user", content: "Hi" }], {
|
|
2119
|
+
maxTokens: 1,
|
|
2120
|
+
temperature: 0,
|
|
2121
|
+
timeoutMs: 1e4
|
|
2122
|
+
});
|
|
2131
2123
|
return true;
|
|
2132
2124
|
} catch {
|
|
2133
2125
|
return false;
|
|
2134
2126
|
}
|
|
2135
2127
|
}
|
|
2136
|
-
const DEFAULT_OPENAI_BASE_URL = "https://api.openai.com/v1";
|
|
2137
|
-
const PRESET_BASE_URL_ORIGINS = /* @__PURE__ */ new Set([
|
|
2138
|
-
"https://api.openai.com",
|
|
2139
|
-
"https://openrouter.ai",
|
|
2140
|
-
"https://api.kilo.ai",
|
|
2141
|
-
"http://localhost:11434",
|
|
2142
|
-
"http://127.0.0.1:11434"
|
|
2143
|
-
]);
|
|
2144
|
-
function getOrigin(rawBaseUrl) {
|
|
2145
|
-
try {
|
|
2146
|
-
return new URL(rawBaseUrl).origin;
|
|
2147
|
-
} catch {
|
|
2148
|
-
return null;
|
|
2149
|
-
}
|
|
2150
|
-
}
|
|
2151
|
-
function isAllowedClientBaseUrl(rawBaseUrl) {
|
|
2152
|
-
const parsed = new URL(rawBaseUrl);
|
|
2153
|
-
if (!["http:", "https:"].includes(parsed.protocol)) return false;
|
|
2154
|
-
if (parsed.username || parsed.password) return false;
|
|
2155
|
-
const hostname = parsed.hostname.toLowerCase();
|
|
2156
|
-
const isLocalHost = hostname === "localhost" || hostname === "127.0.0.1" || hostname === "::1";
|
|
2157
|
-
if (!isLocalHost && parsed.protocol !== "https:") return false;
|
|
2158
|
-
const origin = parsed.origin;
|
|
2159
|
-
if (PRESET_BASE_URL_ORIGINS.has(origin)) return true;
|
|
2160
|
-
const envBaseUrl = process.env.LLM_BASE_URL?.trim();
|
|
2161
|
-
const envOrigin = envBaseUrl ? getOrigin(envBaseUrl) : null;
|
|
2162
|
-
return Boolean(envOrigin && envOrigin === origin);
|
|
2163
|
-
}
|
|
2164
|
-
function detectProvider(rawBaseUrl) {
|
|
2165
|
-
const baseUrl = rawBaseUrl?.toLowerCase() || "";
|
|
2166
|
-
if (baseUrl.includes("openrouter.ai")) return "openrouter";
|
|
2167
|
-
if (baseUrl.includes("kilo.ai")) return "kilocode";
|
|
2168
|
-
return "openai";
|
|
2169
|
-
}
|
|
2170
|
-
function getLlmConfig(request) {
|
|
2171
|
-
const headerKey = request.headers.get("X-OpenAI-API-Key");
|
|
2172
|
-
const headerBaseUrl = request.headers.get("X-LLM-Base-URL")?.trim() || null;
|
|
2173
|
-
const envBaseUrl = process.env.LLM_BASE_URL?.trim() || null;
|
|
2174
|
-
if (headerBaseUrl) {
|
|
2175
|
-
const origin = getOrigin(headerBaseUrl);
|
|
2176
|
-
if (!origin || !isAllowedClientBaseUrl(headerBaseUrl)) {
|
|
2177
|
-
return {
|
|
2178
|
-
apiKey: null,
|
|
2179
|
-
baseUrl: null,
|
|
2180
|
-
model: null,
|
|
2181
|
-
error: "Disallowed X-LLM-Base-URL value"
|
|
2182
|
-
};
|
|
2183
|
-
}
|
|
2184
|
-
}
|
|
2185
|
-
const baseUrl = headerBaseUrl || envBaseUrl || DEFAULT_OPENAI_BASE_URL;
|
|
2186
|
-
const provider = detectProvider(baseUrl);
|
|
2187
|
-
const envKey = provider === "openrouter" ? process.env.OPENROUTER_API_KEY?.trim() || process.env.OPENAI_API_KEY?.trim() : provider === "kilocode" ? process.env.KILOCODE_API_KEY?.trim() || process.env.OPENAI_API_KEY?.trim() : process.env.OPENAI_API_KEY?.trim();
|
|
2188
|
-
const apiKey = headerKey?.trim() || envKey || null;
|
|
2189
|
-
const model = request.headers.get("X-LLM-Model")?.trim() || process.env.LLM_MODEL?.trim() || null;
|
|
2190
|
-
return { apiKey, baseUrl, model, error: null };
|
|
2191
|
-
}
|
|
2192
2128
|
function generateHeuristicTitle(message) {
|
|
2193
2129
|
let text = message.replace(/```[\s\S]*?```/g, " ");
|
|
2194
2130
|
text = text.replace(/`[^`]+`/g, " ");
|
|
@@ -2212,36 +2148,21 @@ function generateHeuristicTitle(message) {
|
|
|
2212
2148
|
const Route$g = createFileRoute("/api/llm-features")({
|
|
2213
2149
|
server: {
|
|
2214
2150
|
handlers: {
|
|
2215
|
-
/**
|
|
2216
|
-
* GET /api/llm-features - Check LLM features status
|
|
2217
|
-
*/
|
|
2218
2151
|
GET: async () => {
|
|
2219
2152
|
try {
|
|
2220
|
-
const
|
|
2221
|
-
const hasOpenRouterKey = Boolean(process.env.OPENROUTER_API_KEY?.trim());
|
|
2222
|
-
const hasKilocodeKey = Boolean(process.env.KILOCODE_API_KEY?.trim());
|
|
2153
|
+
const available = await isOpenclawAvailable();
|
|
2223
2154
|
return json({
|
|
2224
2155
|
ok: true,
|
|
2225
|
-
|
|
2226
|
-
hasOpenRouterKey,
|
|
2227
|
-
hasKilocodeKey
|
|
2156
|
+
available
|
|
2228
2157
|
});
|
|
2229
2158
|
} catch (err) {
|
|
2230
2159
|
return json({
|
|
2231
2160
|
ok: false,
|
|
2232
|
-
|
|
2161
|
+
available: false,
|
|
2233
2162
|
error: err instanceof Error ? err.message : String(err)
|
|
2234
2163
|
});
|
|
2235
2164
|
}
|
|
2236
2165
|
},
|
|
2237
|
-
/**
|
|
2238
|
-
* POST /api/llm-features - Handle LLM feature requests
|
|
2239
|
-
*
|
|
2240
|
-
* Request body should include an "action" field:
|
|
2241
|
-
* - action: "title" - Generate session title
|
|
2242
|
-
* - action: "followups" - Generate follow-up suggestions
|
|
2243
|
-
* - action: "test" - Test API key validity
|
|
2244
|
-
*/
|
|
2245
2166
|
POST: async ({ request }) => {
|
|
2246
2167
|
try {
|
|
2247
2168
|
const body = await request.json().catch(() => ({}));
|
|
@@ -2250,45 +2171,28 @@ const Route$g = createFileRoute("/api/llm-features")({
|
|
|
2250
2171
|
case "title": {
|
|
2251
2172
|
const { message } = body;
|
|
2252
2173
|
if (!message || typeof message !== "string" || message.trim().length < 3) {
|
|
2253
|
-
return json(
|
|
2254
|
-
|
|
2255
|
-
|
|
2256
|
-
|
|
2257
|
-
|
|
2258
|
-
|
|
2259
|
-
|
|
2260
|
-
return json({
|
|
2261
|
-
ok: false,
|
|
2262
|
-
error: llmConfig.error
|
|
2263
|
-
}, { status: 400 });
|
|
2264
|
-
}
|
|
2265
|
-
if (!llmConfig.apiKey && !llmConfig.baseUrl?.includes("localhost")) {
|
|
2266
|
-
const title = generateHeuristicTitle(message);
|
|
2267
|
-
return json({
|
|
2268
|
-
ok: true,
|
|
2269
|
-
title,
|
|
2270
|
-
source: "heuristic"
|
|
2271
|
-
});
|
|
2174
|
+
return json(
|
|
2175
|
+
{
|
|
2176
|
+
ok: false,
|
|
2177
|
+
error: "Message is required and must be at least 3 characters"
|
|
2178
|
+
},
|
|
2179
|
+
{ status: 400 }
|
|
2180
|
+
);
|
|
2272
2181
|
}
|
|
2273
2182
|
try {
|
|
2274
|
-
const title = await
|
|
2275
|
-
apiKey: llmConfig.apiKey || "",
|
|
2276
|
-
...llmConfig.baseUrl ? { baseUrl: llmConfig.baseUrl } : {},
|
|
2277
|
-
...llmConfig.model ? { model: llmConfig.model } : {}
|
|
2278
|
-
});
|
|
2183
|
+
const title = await generateTitleViaOpenclaw(message);
|
|
2279
2184
|
return json({
|
|
2280
2185
|
ok: true,
|
|
2281
2186
|
title,
|
|
2282
|
-
source: "
|
|
2187
|
+
source: "openclaw"
|
|
2283
2188
|
});
|
|
2284
2189
|
} catch (err) {
|
|
2285
2190
|
console.error("[llm-features] Title generation error:", err);
|
|
2286
|
-
const title = generateHeuristicTitle(message);
|
|
2287
2191
|
return json({
|
|
2288
2192
|
ok: true,
|
|
2289
|
-
title,
|
|
2193
|
+
title: generateHeuristicTitle(message),
|
|
2290
2194
|
source: "heuristic",
|
|
2291
|
-
error: err instanceof Error ? err.message : "
|
|
2195
|
+
error: err instanceof Error ? err.message : "OpenClaw error, used heuristic"
|
|
2292
2196
|
});
|
|
2293
2197
|
}
|
|
2294
2198
|
}
|
|
@@ -2301,30 +2205,12 @@ const Route$g = createFileRoute("/api/llm-features")({
|
|
|
2301
2205
|
source: "heuristic"
|
|
2302
2206
|
});
|
|
2303
2207
|
}
|
|
2304
|
-
const llmConfig = getLlmConfig(request);
|
|
2305
|
-
if (llmConfig.error) {
|
|
2306
|
-
return json({
|
|
2307
|
-
ok: false,
|
|
2308
|
-
error: llmConfig.error
|
|
2309
|
-
}, { status: 400 });
|
|
2310
|
-
}
|
|
2311
|
-
if (!llmConfig.apiKey && !llmConfig.baseUrl?.includes("localhost")) {
|
|
2312
|
-
return json({
|
|
2313
|
-
ok: true,
|
|
2314
|
-
suggestions: [],
|
|
2315
|
-
source: "heuristic"
|
|
2316
|
-
});
|
|
2317
|
-
}
|
|
2318
2208
|
try {
|
|
2319
|
-
const suggestions = await
|
|
2320
|
-
apiKey: llmConfig.apiKey || "",
|
|
2321
|
-
...llmConfig.baseUrl ? { baseUrl: llmConfig.baseUrl } : {},
|
|
2322
|
-
...llmConfig.model ? { model: llmConfig.model } : {}
|
|
2323
|
-
});
|
|
2209
|
+
const suggestions = await generateFollowUpsViaOpenclaw(conversationContext);
|
|
2324
2210
|
return json({
|
|
2325
2211
|
ok: true,
|
|
2326
2212
|
suggestions,
|
|
2327
|
-
source: "
|
|
2213
|
+
source: "openclaw"
|
|
2328
2214
|
});
|
|
2329
2215
|
} catch (err) {
|
|
2330
2216
|
console.error("[llm-features] Follow-ups generation error:", err);
|
|
@@ -2332,54 +2218,35 @@ const Route$g = createFileRoute("/api/llm-features")({
|
|
|
2332
2218
|
ok: true,
|
|
2333
2219
|
suggestions: [],
|
|
2334
2220
|
source: "heuristic",
|
|
2335
|
-
error: err instanceof Error ? err.message : "
|
|
2221
|
+
error: err instanceof Error ? err.message : "OpenClaw error"
|
|
2336
2222
|
});
|
|
2337
2223
|
}
|
|
2338
2224
|
}
|
|
2339
2225
|
case "test": {
|
|
2340
|
-
const
|
|
2341
|
-
|
|
2342
|
-
|
|
2343
|
-
|
|
2344
|
-
|
|
2345
|
-
}, { status: 400 });
|
|
2346
|
-
}
|
|
2347
|
-
if (!llmConfig.apiKey && !llmConfig.baseUrl?.includes("localhost")) {
|
|
2348
|
-
return json({
|
|
2349
|
-
ok: false,
|
|
2350
|
-
error: "API key required (or use Ollama for keyless access)"
|
|
2351
|
-
});
|
|
2352
|
-
}
|
|
2353
|
-
try {
|
|
2354
|
-
const valid = await testApiKey({
|
|
2355
|
-
apiKey: llmConfig.apiKey || "",
|
|
2356
|
-
...llmConfig.baseUrl ? { baseUrl: llmConfig.baseUrl } : {},
|
|
2357
|
-
...llmConfig.model ? { model: llmConfig.model } : {}
|
|
2358
|
-
});
|
|
2359
|
-
return json({
|
|
2360
|
-
ok: true,
|
|
2361
|
-
valid
|
|
2362
|
-
});
|
|
2363
|
-
} catch (err) {
|
|
2364
|
-
return json({
|
|
2365
|
-
ok: true,
|
|
2366
|
-
valid: false,
|
|
2367
|
-
error: err instanceof Error ? err.message : "Test failed"
|
|
2368
|
-
});
|
|
2369
|
-
}
|
|
2226
|
+
const available = await isOpenclawAvailable();
|
|
2227
|
+
return json({
|
|
2228
|
+
ok: true,
|
|
2229
|
+
available
|
|
2230
|
+
});
|
|
2370
2231
|
}
|
|
2371
2232
|
default:
|
|
2372
|
-
return json(
|
|
2373
|
-
|
|
2374
|
-
|
|
2375
|
-
|
|
2233
|
+
return json(
|
|
2234
|
+
{
|
|
2235
|
+
ok: false,
|
|
2236
|
+
error: `Unknown action: ${action}. Valid actions: title, followups, test`
|
|
2237
|
+
},
|
|
2238
|
+
{ status: 400 }
|
|
2239
|
+
);
|
|
2376
2240
|
}
|
|
2377
2241
|
} catch (err) {
|
|
2378
2242
|
console.error("[llm-features] Error:", err);
|
|
2379
|
-
return json(
|
|
2380
|
-
|
|
2381
|
-
|
|
2382
|
-
|
|
2243
|
+
return json(
|
|
2244
|
+
{
|
|
2245
|
+
ok: false,
|
|
2246
|
+
error: err instanceof Error ? err.message : String(err)
|
|
2247
|
+
},
|
|
2248
|
+
{ status: 500 }
|
|
2249
|
+
);
|
|
2383
2250
|
}
|
|
2384
2251
|
}
|
|
2385
2252
|
}
|
|
@@ -2433,21 +2300,6 @@ const Route$f = createFileRoute("/api/history")({
|
|
|
2433
2300
|
}
|
|
2434
2301
|
}
|
|
2435
2302
|
});
|
|
2436
|
-
const FOLLOW_UP_SYSTEM_PROMPT = `You are a helpful assistant that generates follow-up question suggestions.
|
|
2437
|
-
Given the assistant's last response, generate exactly 3 short, natural follow-up questions the user might want to ask.
|
|
2438
|
-
|
|
2439
|
-
Rules:
|
|
2440
|
-
- Each suggestion should be a single, concise question (under 60 characters preferred)
|
|
2441
|
-
- Make them contextually relevant to the response
|
|
2442
|
-
- Vary the types: clarification, deeper exploration, practical application
|
|
2443
|
-
- Use natural, conversational language
|
|
2444
|
-
- Do not number them or add any prefix
|
|
2445
|
-
|
|
2446
|
-
Output format: Return ONLY the 3 questions, one per line, nothing else.`;
|
|
2447
|
-
function parseFollowUps(text) {
|
|
2448
|
-
const lines = text.split("\n").map((line) => line.trim()).filter((line) => line.length > 0).map((line) => line.replace(/^\d+[.)\s]+/, "").trim()).map((line) => line.replace(/^[-•*]\s*/, "").trim()).map((line) => line.replace(/^["']|["']$/g, "").trim()).filter((line) => line.length > 0 && line.length < 150);
|
|
2449
|
-
return lines.slice(0, 3);
|
|
2450
|
-
}
|
|
2451
2303
|
const Route$e = createFileRoute("/api/follow-ups")({
|
|
2452
2304
|
server: {
|
|
2453
2305
|
handlers: {
|
|
@@ -2458,24 +2310,11 @@ const Route$e = createFileRoute("/api/follow-ups")({
|
|
|
2458
2310
|
if (!responseText || responseText.length < 30) {
|
|
2459
2311
|
return json({ ok: true, suggestions: [] });
|
|
2460
2312
|
}
|
|
2461
|
-
const
|
|
2462
|
-
const
|
|
2463
|
-
|
|
2464
|
-
|
|
2465
|
-
|
|
2466
|
-
${truncatedResponse}` : `Assistant's response:
|
|
2467
|
-
${truncatedResponse}`;
|
|
2468
|
-
const res = await gatewayRpc("chat.complete", {
|
|
2469
|
-
messages: [
|
|
2470
|
-
{ role: "system", content: FOLLOW_UP_SYSTEM_PROMPT },
|
|
2471
|
-
{ role: "user", content: userPrompt }
|
|
2472
|
-
],
|
|
2473
|
-
maxTokens: 200,
|
|
2474
|
-
temperature: 0.7
|
|
2475
|
-
// Use session's default model (no hardcoding!)
|
|
2476
|
-
});
|
|
2477
|
-
const content = res.content || res.message?.content || res.choices?.[0]?.message?.content || "";
|
|
2478
|
-
const suggestions = parseFollowUps(content);
|
|
2313
|
+
const contextSummary = typeof body.contextSummary === "string" ? body.contextSummary : void 0;
|
|
2314
|
+
const suggestions = await generateFollowUpsViaOpenclaw(
|
|
2315
|
+
responseText,
|
|
2316
|
+
contextSummary
|
|
2317
|
+
);
|
|
2479
2318
|
return json({ ok: true, suggestions });
|
|
2480
2319
|
} catch (err) {
|
|
2481
2320
|
console.error("[follow-ups] Error generating suggestions:", err);
|
|
@@ -5,7 +5,7 @@ import { HugeiconsIcon } from "@hugeicons/react";
|
|
|
5
5
|
import { Search01Icon, Cancel01Icon, Loading03Icon } from "@hugeicons/core-free-icons";
|
|
6
6
|
import { D as DialogRoot, a as DialogContent } from "./use-file-explorer-state-E6cUvMva.js";
|
|
7
7
|
import { useQueryClient } from "@tanstack/react-query";
|
|
8
|
-
import { c as chatQueryKeys } from "./_sessionKey-
|
|
8
|
+
import { c as chatQueryKeys } from "./_sessionKey-CaFqmyhU.js";
|
|
9
9
|
import { c as cn } from "./button-kI8fEIZQ.js";
|
|
10
10
|
import "@base-ui/react/dialog";
|
|
11
11
|
import "zustand";
|
|
@@ -26,7 +26,7 @@ import "remark-gfm";
|
|
|
26
26
|
import "./index-B_F4DTUu.js";
|
|
27
27
|
import "zustand/middleware";
|
|
28
28
|
import "react-dom";
|
|
29
|
-
import "./router-
|
|
29
|
+
import "./router-X2L0PDPI.js";
|
|
30
30
|
import "node:crypto";
|
|
31
31
|
import "node:fs";
|
|
32
32
|
import "node:os";
|