netheriteai-code 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +48 -0
- package/bin/netheriteai-code.js +8 -0
- package/hi.txt +1 -0
- package/package.json +18 -0
- package/src/agent.js +285 -0
- package/src/cli.js +405 -0
- package/src/ollama.js +252 -0
- package/src/state.js +100 -0
- package/src/tools.js +455 -0
- package/src/tui.js +1490 -0
- package/src/utils.js +83 -0
package/src/cli.js
ADDED
|
@@ -0,0 +1,405 @@
|
|
|
1
|
+
import fs from "node:fs";
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
import readline from "node:readline";
|
|
4
|
+
import { execFile } from "node:child_process";
|
|
5
|
+
import { promisify } from "node:util";
|
|
6
|
+
import { runAgentTurn } from "./agent.js";
|
|
7
|
+
import { listModels, pickDefaultModel } from "./ollama.js";
|
|
8
|
+
import { createSession, loadSessionById, saveSession, getSelectedModel, setSelectedModel } from "./state.js";
|
|
9
|
+
import { runTui } from "./tui.js";
|
|
10
|
+
import { printTable, resolveWorkspaceRoot } from "./utils.js";
|
|
11
|
+
|
|
12
|
+
const execFileAsync = promisify(execFile);
|
|
13
|
+
|
|
14
|
+
function displayModelName(model) {
|
|
15
|
+
const name = String(model || "");
|
|
16
|
+
if (name.toLowerCase().includes("glm5") || name.toLowerCase().includes("glm-5")) return "NetheriteAI:Code";
|
|
17
|
+
return name;
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
function makeSessionTitle(prompt, maxLength = 36) {
|
|
21
|
+
const text = String(prompt || "").trim().replace(/\s+/g, " ");
|
|
22
|
+
if (!text) return "";
|
|
23
|
+
if (text.length <= maxLength) return text;
|
|
24
|
+
return `${text.slice(0, Math.max(0, maxLength - 3))}...`;
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
function getLastAssistantMessage(session) {
|
|
28
|
+
const transcript = Array.isArray(session.transcript) ? session.transcript : [];
|
|
29
|
+
for (let index = transcript.length - 1; index >= 0; index -= 1) {
|
|
30
|
+
if (transcript[index]?.role === "assistant" && transcript[index]?.text?.trim()) {
|
|
31
|
+
return transcript[index].text;
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
const messages = Array.isArray(session.messages) ? session.messages : [];
|
|
36
|
+
for (let index = messages.length - 1; index >= 0; index -= 1) {
|
|
37
|
+
if (messages[index]?.role === "assistant" && messages[index]?.content?.trim()) {
|
|
38
|
+
return messages[index].content;
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
return "";
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
async function copyTextToClipboard(text) {
|
|
46
|
+
const value = String(text || "");
|
|
47
|
+
const attempts = process.platform === "darwin"
|
|
48
|
+
? [["pbcopy", []]]
|
|
49
|
+
: process.platform === "win32"
|
|
50
|
+
? [["clip", []], ["powershell.exe", ["-NoProfile", "-Command", "Set-Clipboard -Value ([Console]::In.ReadToEnd())"]]]
|
|
51
|
+
: [["wl-copy", []], ["xclip", ["-selection", "clipboard"]], ["xsel", ["--clipboard", "--input"]]];
|
|
52
|
+
|
|
53
|
+
for (const [command, args] of attempts) {
|
|
54
|
+
try {
|
|
55
|
+
await execFileAsync(command, args, {
|
|
56
|
+
input: value,
|
|
57
|
+
encoding: "utf8",
|
|
58
|
+
maxBuffer: 1024 * 1024,
|
|
59
|
+
});
|
|
60
|
+
return;
|
|
61
|
+
} catch {
|
|
62
|
+
// try next clipboard command
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
throw new Error("Clipboard command not available. Install wl-copy, xclip, or xsel.");
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
function parseArgs(argv) {
|
|
70
|
+
const args = [...argv];
|
|
71
|
+
const positional = [];
|
|
72
|
+
const flags = {};
|
|
73
|
+
|
|
74
|
+
while (args.length) {
|
|
75
|
+
const current = args.shift();
|
|
76
|
+
if (!current) continue;
|
|
77
|
+
if (current.startsWith("--")) {
|
|
78
|
+
const key = current.slice(2);
|
|
79
|
+
const value = args[0] && !args[0].startsWith("-") ? args.shift() : true;
|
|
80
|
+
flags[key] = value;
|
|
81
|
+
} else if (current.startsWith("-")) {
|
|
82
|
+
const key = current.slice(1);
|
|
83
|
+
const value = args[0] && !args[0].startsWith("-") ? args.shift() : true;
|
|
84
|
+
flags[key] = value;
|
|
85
|
+
} else {
|
|
86
|
+
positional.push(current);
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
return { positional, flags };
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
function printHelp() {
|
|
94
|
+
console.log(`NetheriteAI:Code
|
|
95
|
+
by hurdacu
|
|
96
|
+
|
|
97
|
+
Usage:
|
|
98
|
+
netheriteai-code [tui] [--model <name>] [--dir <path>]
|
|
99
|
+
netheriteai-code session <id>
|
|
100
|
+
netheriteai-code chat [--model <name>] [--dir <path>]
|
|
101
|
+
netheriteai-code tools
|
|
102
|
+
|
|
103
|
+
Commands:
|
|
104
|
+
tui Launch the terminal UI (default)
|
|
105
|
+
session Resume a saved session by id
|
|
106
|
+
chat Start a plain REPL chat session
|
|
107
|
+
tools List the AI toolset exposed to NetheriteAI
|
|
108
|
+
`);
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
async function resolveModel(cliModel) {
|
|
112
|
+
return cliModel || "glm-5:cloud";
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
function printToolList() {
|
|
116
|
+
console.log(`Built-in tools
|
|
117
|
+
|
|
118
|
+
- list_files(path=".", recursive=false)
|
|
119
|
+
- read_file(path)
|
|
120
|
+
- create_file(path, content)
|
|
121
|
+
- write_file(path, content)
|
|
122
|
+
- append_file(path, content)
|
|
123
|
+
- edit_file(path, oldText, newText, replaceAll=false)
|
|
124
|
+
- make_dir(path)
|
|
125
|
+
- remove_path(path, recursive=false)
|
|
126
|
+
- run_command(command|commandLine, args=[])
|
|
127
|
+
- batch_command(commands[])
|
|
128
|
+
- todo_create(text)
|
|
129
|
+
- todo_list()
|
|
130
|
+
- todo_complete(id)
|
|
131
|
+
`);
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
function slashHelp() {
|
|
135
|
+
return [
|
|
136
|
+
"Slash commands",
|
|
137
|
+
"/help - show available slash commands",
|
|
138
|
+
"/compact - forget prior AI context but keep the transcript visible",
|
|
139
|
+
"/copy - copy the last AI message to the clipboard",
|
|
140
|
+
"/model - show current model",
|
|
141
|
+
"/model <name> - switch model",
|
|
142
|
+
"/clear - clear visible messages but keep AI memory",
|
|
143
|
+
"/new - clear visible messages and AI memory",
|
|
144
|
+
"/pwd - show current workspace",
|
|
145
|
+
].join("\n");
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
async function handleSlashCommand({ prompt, workspaceRoot, session, ui, setModel }) {
|
|
149
|
+
const [command, ...rest] = prompt.trim().split(/\s+/);
|
|
150
|
+
const arg = rest.join(" ").trim();
|
|
151
|
+
|
|
152
|
+
if (command === "/help") {
|
|
153
|
+
return { handled: true, message: slashHelp() };
|
|
154
|
+
}
|
|
155
|
+
|
|
156
|
+
if (command === "/pwd") {
|
|
157
|
+
return { handled: true, message: workspaceRoot };
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
if (command === "/clear") {
|
|
161
|
+
session.transcript = [];
|
|
162
|
+
saveSession(session);
|
|
163
|
+
ui?.onClearTranscript?.();
|
|
164
|
+
return { handled: true };
|
|
165
|
+
}
|
|
166
|
+
|
|
167
|
+
if (command === "/new") {
|
|
168
|
+
session.messages = [];
|
|
169
|
+
session.transcript = [];
|
|
170
|
+
session.title = "";
|
|
171
|
+
saveSession(session);
|
|
172
|
+
ui?.onResetSession?.();
|
|
173
|
+
return { handled: true };
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
if (command === "/compact") {
|
|
177
|
+
session.messages = [];
|
|
178
|
+
saveSession(session);
|
|
179
|
+
return { handled: true, message: "Compacted session. The AI context was cleared." };
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
if (command === "/copy") {
|
|
183
|
+
const lastAssistantMessage = getLastAssistantMessage(session);
|
|
184
|
+
if (!lastAssistantMessage) {
|
|
185
|
+
return { handled: true, message: "No AI message available to copy." };
|
|
186
|
+
}
|
|
187
|
+
await copyTextToClipboard(lastAssistantMessage);
|
|
188
|
+
return { handled: true, message: "Copied the last AI message to the clipboard." };
|
|
189
|
+
}
|
|
190
|
+
|
|
191
|
+
if (command === "/model" && !arg) {
|
|
192
|
+
if (ui?.onPicker) {
|
|
193
|
+
ui.onPicker({
|
|
194
|
+
title: "Select Model",
|
|
195
|
+
items: [{ label: "NetheriteAI:Code (glm-5:cloud)", value: "glm-5:cloud" }],
|
|
196
|
+
onSelect(value) {
|
|
197
|
+
setSelectedModel(value);
|
|
198
|
+
setModel(value);
|
|
199
|
+
return `Switched model to ${displayModelName(value)}`;
|
|
200
|
+
},
|
|
201
|
+
});
|
|
202
|
+
return { handled: true };
|
|
203
|
+
}
|
|
204
|
+
return { handled: true, message: `Current model: ${displayModelName(getSelectedModel() || "not set")}` };
|
|
205
|
+
}
|
|
206
|
+
|
|
207
|
+
if (command === "/model" && arg) {
|
|
208
|
+
setSelectedModel(arg);
|
|
209
|
+
return { handled: true, message: `Switched model to ${displayModelName(arg)}`, model: arg };
|
|
210
|
+
}
|
|
211
|
+
|
|
212
|
+
return { handled: false };
|
|
213
|
+
}
|
|
214
|
+
|
|
215
|
+
async function runChatSession({ workspaceRoot, model, useTui, session }) {
|
|
216
|
+
let activeModel = model;
|
|
217
|
+
|
|
218
|
+
function persistSession() {
|
|
219
|
+
session.workspaceRoot = workspaceRoot;
|
|
220
|
+
saveSession(session);
|
|
221
|
+
}
|
|
222
|
+
|
|
223
|
+
async function submit(prompt, ui) {
|
|
224
|
+
if (!session.title) {
|
|
225
|
+
session.title = makeSessionTitle(prompt);
|
|
226
|
+
ui?.onTitleChange?.(session.title);
|
|
227
|
+
persistSession();
|
|
228
|
+
}
|
|
229
|
+
|
|
230
|
+
if (prompt.trim().startsWith("/")) {
|
|
231
|
+
const slash = await handleSlashCommand({
|
|
232
|
+
prompt,
|
|
233
|
+
workspaceRoot,
|
|
234
|
+
session,
|
|
235
|
+
ui,
|
|
236
|
+
setModel(value) {
|
|
237
|
+
activeModel = value;
|
|
238
|
+
},
|
|
239
|
+
});
|
|
240
|
+
if (slash.handled) {
|
|
241
|
+
if (slash.model) {
|
|
242
|
+
activeModel = slash.model;
|
|
243
|
+
}
|
|
244
|
+
if (slash.message) {
|
|
245
|
+
ui?.onAssistant?.(slash.message);
|
|
246
|
+
if (!useTui) {
|
|
247
|
+
console.log(`\n${slash.message}\n`);
|
|
248
|
+
}
|
|
249
|
+
}
|
|
250
|
+
return;
|
|
251
|
+
}
|
|
252
|
+
}
|
|
253
|
+
|
|
254
|
+
const result = await runAgentTurn({
|
|
255
|
+
workspaceRoot,
|
|
256
|
+
model: activeModel,
|
|
257
|
+
history: session.messages,
|
|
258
|
+
userPrompt: prompt,
|
|
259
|
+
signal: ui?.signal,
|
|
260
|
+
onEvent(event) {
|
|
261
|
+
if (event.type === "status") {
|
|
262
|
+
ui?.onStatus?.(event.text);
|
|
263
|
+
} else if (event.type === "tool_start") {
|
|
264
|
+
ui?.onToolStart?.(event);
|
|
265
|
+
} else if (event.type === "tool_progress") {
|
|
266
|
+
ui?.onToolProgress?.(event);
|
|
267
|
+
} else if (event.type === "tool_result") {
|
|
268
|
+
ui?.onToolResult?.(event);
|
|
269
|
+
} else if (event.type === "assistant_delta") {
|
|
270
|
+
ui?.onStream?.(event.text);
|
|
271
|
+
} else if (event.type === "reasoning_delta") {
|
|
272
|
+
ui?.onReasoning?.(event.text);
|
|
273
|
+
} else if (event.type === "reasoning") {
|
|
274
|
+
ui?.onReasoningDone?.(event.text);
|
|
275
|
+
} else if (event.type === "assistant") {
|
|
276
|
+
ui?.onAssistant?.(event.text);
|
|
277
|
+
}
|
|
278
|
+
},
|
|
279
|
+
});
|
|
280
|
+
session.messages = result.messages;
|
|
281
|
+
persistSession();
|
|
282
|
+
if (!useTui) {
|
|
283
|
+
console.log(`\n${result.reply}\n`);
|
|
284
|
+
}
|
|
285
|
+
}
|
|
286
|
+
|
|
287
|
+
if (useTui) {
|
|
288
|
+
const exitInfo = await runTui({
|
|
289
|
+
model: activeModel,
|
|
290
|
+
getModel: () => activeModel,
|
|
291
|
+
agentName: "NetheriteAI:Code",
|
|
292
|
+
providerName: "NetheriteAI",
|
|
293
|
+
version: "0.1.0",
|
|
294
|
+
workspaceLabel: workspaceRoot,
|
|
295
|
+
sessionId: session.id,
|
|
296
|
+
initialSessionTitle: session.title,
|
|
297
|
+
initialTranscript: session.transcript || [],
|
|
298
|
+
onTranscriptChange(transcript) {
|
|
299
|
+
session.transcript = transcript;
|
|
300
|
+
persistSession();
|
|
301
|
+
},
|
|
302
|
+
onTitleChange(title) {
|
|
303
|
+
session.title = title;
|
|
304
|
+
persistSession();
|
|
305
|
+
},
|
|
306
|
+
onSubmit: submit,
|
|
307
|
+
});
|
|
308
|
+
persistSession();
|
|
309
|
+
if (exitInfo?.reason === "interrupt") {
|
|
310
|
+
console.log(`\nYou can access this session by executing \u001b[38;2;83;149;255mnetheritecode session ${session.id}\u001b[0m\n`);
|
|
311
|
+
}
|
|
312
|
+
return;
|
|
313
|
+
}
|
|
314
|
+
|
|
315
|
+
const rl = readline.createInterface({
|
|
316
|
+
input: process.stdin,
|
|
317
|
+
output: process.stdout,
|
|
318
|
+
prompt: `${path.basename(workspaceRoot)} > `,
|
|
319
|
+
});
|
|
320
|
+
|
|
321
|
+
console.log(`NetheriteAI:Code model=${displayModelName(model)} provider=NetheriteAI by hurdacu`);
|
|
322
|
+
console.log(`Workspace: ${workspaceRoot}`);
|
|
323
|
+
console.log("Type `exit` to quit.\n");
|
|
324
|
+
rl.prompt();
|
|
325
|
+
|
|
326
|
+
for await (const line of rl) {
|
|
327
|
+
const prompt = line.trim();
|
|
328
|
+
if (!prompt) {
|
|
329
|
+
rl.prompt();
|
|
330
|
+
continue;
|
|
331
|
+
}
|
|
332
|
+
if (prompt === "exit" || prompt === "quit") {
|
|
333
|
+
rl.close();
|
|
334
|
+
break;
|
|
335
|
+
}
|
|
336
|
+
await submit(prompt);
|
|
337
|
+
rl.prompt();
|
|
338
|
+
}
|
|
339
|
+
}
|
|
340
|
+
|
|
341
|
+
export async function main() {
|
|
342
|
+
const { positional, flags } = parseArgs(process.argv.slice(2));
|
|
343
|
+
const command = positional[0] || "tui";
|
|
344
|
+
const requestedWorkspaceRoot = resolveWorkspaceRoot(flags.dir || flags.d || process.cwd());
|
|
345
|
+
|
|
346
|
+
if (!fs.existsSync(requestedWorkspaceRoot)) {
|
|
347
|
+
throw new Error(`Workspace does not exist: ${requestedWorkspaceRoot}`);
|
|
348
|
+
}
|
|
349
|
+
|
|
350
|
+
if (command === "help" || flags.help || flags.h) {
|
|
351
|
+
printHelp();
|
|
352
|
+
return;
|
|
353
|
+
}
|
|
354
|
+
|
|
355
|
+
if (command === "tools") {
|
|
356
|
+
printToolList();
|
|
357
|
+
return;
|
|
358
|
+
}
|
|
359
|
+
|
|
360
|
+
let session = null;
|
|
361
|
+
let workspaceRoot = requestedWorkspaceRoot;
|
|
362
|
+
|
|
363
|
+
if (command === "session") {
|
|
364
|
+
const sessionId = positional[1];
|
|
365
|
+
if (!sessionId) {
|
|
366
|
+
throw new Error("Usage: netheritecode session <id>");
|
|
367
|
+
}
|
|
368
|
+
session = loadSessionById(sessionId);
|
|
369
|
+
if (!session) {
|
|
370
|
+
throw new Error(`Session not found: ${sessionId}`);
|
|
371
|
+
}
|
|
372
|
+
workspaceRoot = session.workspaceRoot;
|
|
373
|
+
} else {
|
|
374
|
+
session = createSession(workspaceRoot);
|
|
375
|
+
}
|
|
376
|
+
|
|
377
|
+
const model = await resolveModel(flags.model || flags.m);
|
|
378
|
+
setSelectedModel(model);
|
|
379
|
+
|
|
380
|
+
if (command === "chat") {
|
|
381
|
+
await runChatSession({ workspaceRoot, model, useTui: false, session });
|
|
382
|
+
return;
|
|
383
|
+
}
|
|
384
|
+
|
|
385
|
+
if (command === "tui" || command === "session") {
|
|
386
|
+
await runChatSession({ workspaceRoot, model, useTui: true, session });
|
|
387
|
+
return;
|
|
388
|
+
}
|
|
389
|
+
|
|
390
|
+
if (!command.startsWith("-")) {
|
|
391
|
+
const prompt = positional.join(" ");
|
|
392
|
+
const result = await runAgentTurn({
|
|
393
|
+
workspaceRoot,
|
|
394
|
+
model,
|
|
395
|
+
history: session.messages,
|
|
396
|
+
userPrompt: prompt,
|
|
397
|
+
});
|
|
398
|
+
session.messages = result.messages;
|
|
399
|
+
saveSession(session);
|
|
400
|
+
console.log(result.reply);
|
|
401
|
+
return;
|
|
402
|
+
}
|
|
403
|
+
|
|
404
|
+
printHelp();
|
|
405
|
+
}
|
package/src/ollama.js
ADDED
|
@@ -0,0 +1,252 @@
|
|
|
1
|
+
import { execFile } from "node:child_process";
|
|
2
|
+
import { promisify } from "node:util";
|
|
3
|
+
|
|
4
|
+
const execFileAsync = promisify(execFile);
|
|
5
|
+
const OLLAMA_BASE_URL = process.env.OLLAMA_BASE_URL || "http://176.88.249.119:11434";
|
|
6
|
+
const PREFERRED_DEFAULT_MODELS = [
|
|
7
|
+
"glm-5:cloud",
|
|
8
|
+
];
|
|
9
|
+
|
|
10
|
+
function getThinkMode(model) {
|
|
11
|
+
const override = process.env.NETHERITEAI_THINK_MODE;
|
|
12
|
+
if (override === "true") return true;
|
|
13
|
+
if (override === "false") return false;
|
|
14
|
+
if (override === "medium" || override === "low" || override === "high") return override;
|
|
15
|
+
const name = String(model || "").toLowerCase();
|
|
16
|
+
if (name.includes("gpt-oss")) return "medium";
|
|
17
|
+
return true;
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
async function request(pathname, body, signal) {
|
|
21
|
+
const response = await fetch(`${OLLAMA_BASE_URL}${pathname}`, {
|
|
22
|
+
method: "POST",
|
|
23
|
+
headers: { "content-type": "application/json" },
|
|
24
|
+
body: JSON.stringify(body),
|
|
25
|
+
signal,
|
|
26
|
+
});
|
|
27
|
+
|
|
28
|
+
if (!response.ok) {
|
|
29
|
+
throw new Error(`NetheriteAI request failed: ${response.status} ${response.statusText}`);
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
return response;
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
export async function listModels() {
|
|
36
|
+
try {
|
|
37
|
+
const response = await fetch(`${OLLAMA_BASE_URL}/api/tags`);
|
|
38
|
+
if (!response.ok) {
|
|
39
|
+
throw new Error(`NetheriteAI request failed: ${response.status} ${response.statusText}`);
|
|
40
|
+
}
|
|
41
|
+
const json = await response.json();
|
|
42
|
+
return (json.models || []).map((model) => ({
|
|
43
|
+
name: model.name,
|
|
44
|
+
size: model.size,
|
|
45
|
+
modifiedAt: model.modified_at,
|
|
46
|
+
digest: model.digest,
|
|
47
|
+
details: model.details || {},
|
|
48
|
+
}));
|
|
49
|
+
} catch {
|
|
50
|
+
try {
|
|
51
|
+
const { stdout } = await execFileAsync("ollama", ["list"], { encoding: "utf8" });
|
|
52
|
+
const lines = stdout.trim().split("\n").slice(1).filter(Boolean);
|
|
53
|
+
return lines.map((line) => {
|
|
54
|
+
const parts = line.trim().split(/\s{2,}/);
|
|
55
|
+
return {
|
|
56
|
+
name: parts[0] || "",
|
|
57
|
+
size: parts[2] || "",
|
|
58
|
+
modifiedAt: parts[3] || "",
|
|
59
|
+
digest: parts[1] || "",
|
|
60
|
+
details: {},
|
|
61
|
+
};
|
|
62
|
+
});
|
|
63
|
+
} catch {
|
|
64
|
+
throw new Error(
|
|
65
|
+
"Could not connect to NetheriteAI. Start it with `ollama serve`, or set OLLAMA_BASE_URL if it runs elsewhere.",
|
|
66
|
+
);
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
export async function pickDefaultModel() {
|
|
72
|
+
const models = await listModels();
|
|
73
|
+
if (!models.length) {
|
|
74
|
+
throw new Error("No NetheriteAI models found. Pull one with `ollama pull <model>` first.");
|
|
75
|
+
}
|
|
76
|
+
const preferredMatch = models.find((model) =>
|
|
77
|
+
PREFERRED_DEFAULT_MODELS.some((preferred) => preferred.toLowerCase() === model.name.toLowerCase()),
|
|
78
|
+
);
|
|
79
|
+
if (preferredMatch) {
|
|
80
|
+
return preferredMatch.name;
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
const familyMatch = models.find((model) => model.name.toLowerCase().startsWith("netheriteai"));
|
|
84
|
+
return familyMatch?.name || models[0].name;
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
export async function chat({ model, messages, tools, signal }) {
|
|
88
|
+
const body = {
|
|
89
|
+
model,
|
|
90
|
+
messages,
|
|
91
|
+
stream: false,
|
|
92
|
+
};
|
|
93
|
+
if (tools && tools.length) {
|
|
94
|
+
body.tools = tools;
|
|
95
|
+
}
|
|
96
|
+
const response = await request("/api/chat", body, signal);
|
|
97
|
+
const json = await response.json();
|
|
98
|
+
return {
|
|
99
|
+
...json.message,
|
|
100
|
+
reasoning: json.message?.thinking || "",
|
|
101
|
+
};
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
function createTaggedStreamParser({ onContent, onReasoning }) {
|
|
105
|
+
let buffer = "";
|
|
106
|
+
let inReasoning = false;
|
|
107
|
+
const openTag = "<think>";
|
|
108
|
+
const closeTag = "</think>";
|
|
109
|
+
|
|
110
|
+
function emitVisible(text) {
|
|
111
|
+
if (text) onContent?.(text);
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
function emitReasoning(text) {
|
|
115
|
+
if (text) onReasoning?.(text);
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
function flush(final = false) {
|
|
119
|
+
while (buffer.length) {
|
|
120
|
+
if (inReasoning) {
|
|
121
|
+
const closeIndex = buffer.indexOf(closeTag);
|
|
122
|
+
if (closeIndex !== -1) {
|
|
123
|
+
emitReasoning(buffer.slice(0, closeIndex));
|
|
124
|
+
buffer = buffer.slice(closeIndex + closeTag.length);
|
|
125
|
+
inReasoning = false;
|
|
126
|
+
continue;
|
|
127
|
+
}
|
|
128
|
+
if (final) {
|
|
129
|
+
emitReasoning(buffer);
|
|
130
|
+
buffer = "";
|
|
131
|
+
return;
|
|
132
|
+
}
|
|
133
|
+
if (buffer.length > closeTag.length) {
|
|
134
|
+
emitReasoning(buffer.slice(0, buffer.length - closeTag.length));
|
|
135
|
+
buffer = buffer.slice(buffer.length - closeTag.length);
|
|
136
|
+
}
|
|
137
|
+
return;
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
const openIndex = buffer.indexOf(openTag);
|
|
141
|
+
if (openIndex !== -1) {
|
|
142
|
+
emitVisible(buffer.slice(0, openIndex));
|
|
143
|
+
buffer = buffer.slice(openIndex + openTag.length);
|
|
144
|
+
inReasoning = true;
|
|
145
|
+
continue;
|
|
146
|
+
}
|
|
147
|
+
if (final) {
|
|
148
|
+
emitVisible(buffer);
|
|
149
|
+
buffer = "";
|
|
150
|
+
return;
|
|
151
|
+
}
|
|
152
|
+
if (buffer.length > openTag.length) {
|
|
153
|
+
emitVisible(buffer.slice(0, buffer.length - openTag.length));
|
|
154
|
+
buffer = buffer.slice(buffer.length - openTag.length);
|
|
155
|
+
}
|
|
156
|
+
return;
|
|
157
|
+
}
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
return {
|
|
161
|
+
push(text) {
|
|
162
|
+
buffer += text;
|
|
163
|
+
flush(false);
|
|
164
|
+
},
|
|
165
|
+
finish() {
|
|
166
|
+
flush(true);
|
|
167
|
+
},
|
|
168
|
+
};
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
export async function chatStream({ model, messages, tools, onChunk, onReasoningChunk, signal }) {
|
|
172
|
+
const body = {
|
|
173
|
+
model,
|
|
174
|
+
messages,
|
|
175
|
+
stream: true,
|
|
176
|
+
};
|
|
177
|
+
if (tools && tools.length) {
|
|
178
|
+
body.tools = tools;
|
|
179
|
+
}
|
|
180
|
+
const response = await request("/api/chat", body, signal);
|
|
181
|
+
|
|
182
|
+
const reader = response.body?.getReader();
|
|
183
|
+
if (!reader) {
|
|
184
|
+
throw new Error("NetheriteAI stream did not provide a readable body.");
|
|
185
|
+
}
|
|
186
|
+
|
|
187
|
+
const decoder = new TextDecoder();
|
|
188
|
+
let buffer = "";
|
|
189
|
+
let lastMessage = { role: "assistant", content: "", reasoning: "", tool_calls: [] };
|
|
190
|
+
const parser = createTaggedStreamParser({
|
|
191
|
+
onContent(text) {
|
|
192
|
+
lastMessage.content += text;
|
|
193
|
+
onChunk?.(text);
|
|
194
|
+
},
|
|
195
|
+
onReasoning(text) {
|
|
196
|
+
lastMessage.reasoning += text;
|
|
197
|
+
onReasoningChunk?.(text);
|
|
198
|
+
},
|
|
199
|
+
});
|
|
200
|
+
|
|
201
|
+
while (true) {
|
|
202
|
+
const { value, done } = await reader.read();
|
|
203
|
+
if (done) break;
|
|
204
|
+
buffer += decoder.decode(value, { stream: true });
|
|
205
|
+
|
|
206
|
+
let newlineIndex = buffer.indexOf("\n");
|
|
207
|
+
while (newlineIndex !== -1) {
|
|
208
|
+
const line = buffer.slice(0, newlineIndex).trim();
|
|
209
|
+
buffer = buffer.slice(newlineIndex + 1);
|
|
210
|
+
if (line) {
|
|
211
|
+
const json = JSON.parse(line);
|
|
212
|
+
const message = json.message || {};
|
|
213
|
+
if (message.thinking) {
|
|
214
|
+
lastMessage.reasoning += message.thinking;
|
|
215
|
+
onReasoningChunk?.(message.thinking);
|
|
216
|
+
}
|
|
217
|
+
if (message.content) {
|
|
218
|
+
parser.push(message.content);
|
|
219
|
+
}
|
|
220
|
+
if (message.tool_calls?.length) {
|
|
221
|
+
lastMessage.tool_calls = message.tool_calls;
|
|
222
|
+
}
|
|
223
|
+
if (message.role) {
|
|
224
|
+
lastMessage.role = message.role;
|
|
225
|
+
}
|
|
226
|
+
}
|
|
227
|
+
newlineIndex = buffer.indexOf("\n");
|
|
228
|
+
}
|
|
229
|
+
}
|
|
230
|
+
|
|
231
|
+
if (buffer.trim()) {
|
|
232
|
+
const json = JSON.parse(buffer.trim());
|
|
233
|
+
const message = json.message || {};
|
|
234
|
+
if (message.thinking) {
|
|
235
|
+
lastMessage.reasoning += message.thinking;
|
|
236
|
+
onReasoningChunk?.(message.thinking);
|
|
237
|
+
}
|
|
238
|
+
if (message.content) {
|
|
239
|
+
parser.push(message.content);
|
|
240
|
+
}
|
|
241
|
+
if (message.tool_calls?.length) {
|
|
242
|
+
lastMessage.tool_calls = message.tool_calls;
|
|
243
|
+
}
|
|
244
|
+
if (message.role) {
|
|
245
|
+
lastMessage.role = message.role;
|
|
246
|
+
}
|
|
247
|
+
}
|
|
248
|
+
|
|
249
|
+
parser.finish();
|
|
250
|
+
|
|
251
|
+
return lastMessage;
|
|
252
|
+
}
|