@ztimson/ai-utils 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +7 -0
- package/README.md +106 -0
- package/dist/ai.d.ts +54 -0
- package/dist/antrhopic.d.ts +14 -0
- package/dist/index.d.ts +4 -0
- package/dist/index.js +633 -0
- package/dist/index.js.map +1 -0
- package/dist/index.mjs +621 -0
- package/dist/index.mjs.map +1 -0
- package/dist/llm.d.ts +110 -0
- package/dist/ollama.d.ts +14 -0
- package/dist/open-ai.d.ts +14 -0
- package/dist/provider.d.ts +7 -0
- package/dist/tools.d.ts +42 -0
- package/package.json +46 -0
package/dist/index.mjs
ADDED
|
@@ -0,0 +1,621 @@
|
|
|
1
|
+
var __defProp = Object.defineProperty;
|
|
2
|
+
var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
|
|
3
|
+
var __publicField = (obj, key, value) => __defNormalProp(obj, typeof key !== "symbol" ? key + "" : key, value);
|
|
4
|
+
import { $, $Sync } from "@ztimson/node-utils";
|
|
5
|
+
import { createWorker } from "tesseract.js";
|
|
6
|
+
import { objectMap, JSONAttemptParse, findByProp, JSONSanitize, Http, consoleInterceptor, fn, ASet } from "@ztimson/utils";
|
|
7
|
+
import { Anthropic as Anthropic$1 } from "@anthropic-ai/sdk";
|
|
8
|
+
import { Ollama as Ollama$1 } from "ollama";
|
|
9
|
+
import { OpenAI } from "openai";
|
|
10
|
+
import fs from "node:fs/promises";
|
|
11
|
+
import Path from "node:path";
|
|
12
|
+
import * as tf from "@tensorflow/tfjs";
|
|
13
|
+
class LLMProvider {
|
|
14
|
+
}
|
|
15
|
+
class Anthropic extends LLMProvider {
|
|
16
|
+
constructor(ai, apiToken, model) {
|
|
17
|
+
super();
|
|
18
|
+
__publicField(this, "client");
|
|
19
|
+
this.ai = ai;
|
|
20
|
+
this.apiToken = apiToken;
|
|
21
|
+
this.model = model;
|
|
22
|
+
this.client = new Anthropic$1({ apiKey: apiToken });
|
|
23
|
+
}
|
|
24
|
+
toStandard(history) {
|
|
25
|
+
for (let i = 0; i < history.length; i++) {
|
|
26
|
+
const orgI = i;
|
|
27
|
+
if (typeof history[orgI].content != "string") {
|
|
28
|
+
if (history[orgI].role == "assistant") {
|
|
29
|
+
history[orgI].content.filter((c) => c.type == "tool_use").forEach((c) => {
|
|
30
|
+
i++;
|
|
31
|
+
history.splice(i, 0, { role: "tool", id: c.id, name: c.name, args: c.input });
|
|
32
|
+
});
|
|
33
|
+
} else if (history[orgI].role == "user") {
|
|
34
|
+
history[orgI].content.filter((c) => c.type == "tool_result").forEach((c) => {
|
|
35
|
+
const h = history.find((h2) => h2.id == c.tool_use_id);
|
|
36
|
+
h[c.is_error ? "error" : "content"] = c.content;
|
|
37
|
+
});
|
|
38
|
+
}
|
|
39
|
+
history[orgI].content = history[orgI].content.filter((c) => c.type == "text").map((c) => c.text).join("\n\n");
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
return history.filter((h) => !!h.content);
|
|
43
|
+
}
|
|
44
|
+
fromStandard(history) {
|
|
45
|
+
for (let i = 0; i < history.length; i++) {
|
|
46
|
+
if (history[i].role == "tool") {
|
|
47
|
+
const h = history[i];
|
|
48
|
+
history.splice(
|
|
49
|
+
i,
|
|
50
|
+
0,
|
|
51
|
+
{ role: "assistant", content: [{ type: "tool_use", id: h.id, name: h.name, input: h.args }] },
|
|
52
|
+
{ role: "user", content: [{ type: "tool_result", tool_use_id: h.id, is_error: !!h.error, content: h.error || h.content }] }
|
|
53
|
+
);
|
|
54
|
+
i += 2;
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
return history;
|
|
58
|
+
}
|
|
59
|
+
ask(message, options = {}) {
|
|
60
|
+
const controller = new AbortController();
|
|
61
|
+
const response = new Promise(async (res, rej) => {
|
|
62
|
+
let history = this.fromStandard([...options.history || [], { role: "user", content: message }]);
|
|
63
|
+
if (options.compress) history = await this.ai.llm.compress(history, options.compress.max, options.compress.min, options);
|
|
64
|
+
const requestParams = {
|
|
65
|
+
model: options.model || this.model,
|
|
66
|
+
max_tokens: options.max_tokens || this.ai.options.max_tokens || 4096,
|
|
67
|
+
system: options.system || this.ai.options.system || "",
|
|
68
|
+
temperature: options.temperature || this.ai.options.temperature || 0.7,
|
|
69
|
+
tools: (options.tools || this.ai.options.tools || []).map((t) => ({
|
|
70
|
+
name: t.name,
|
|
71
|
+
description: t.description,
|
|
72
|
+
input_schema: {
|
|
73
|
+
type: "object",
|
|
74
|
+
properties: objectMap(t.args, (key, value) => ({ ...value, required: void 0 })),
|
|
75
|
+
required: Object.entries(t.args || {}).filter((t2) => t2[1].required).map((t2) => t2[0])
|
|
76
|
+
},
|
|
77
|
+
fn: void 0
|
|
78
|
+
})),
|
|
79
|
+
messages: history,
|
|
80
|
+
stream: !!options.stream
|
|
81
|
+
};
|
|
82
|
+
let resp;
|
|
83
|
+
do {
|
|
84
|
+
resp = await this.client.messages.create(requestParams);
|
|
85
|
+
if (options.stream) {
|
|
86
|
+
resp.content = [];
|
|
87
|
+
for await (const chunk of resp) {
|
|
88
|
+
if (controller.signal.aborted) break;
|
|
89
|
+
if (chunk.type === "content_block_start") {
|
|
90
|
+
if (chunk.content_block.type === "text") {
|
|
91
|
+
resp.content.push({ type: "text", text: "" });
|
|
92
|
+
} else if (chunk.content_block.type === "tool_use") {
|
|
93
|
+
resp.content.push({ type: "tool_use", id: chunk.content_block.id, name: chunk.content_block.name, input: "" });
|
|
94
|
+
}
|
|
95
|
+
} else if (chunk.type === "content_block_delta") {
|
|
96
|
+
if (chunk.delta.type === "text_delta") {
|
|
97
|
+
const text = chunk.delta.text;
|
|
98
|
+
resp.content.at(-1).text += text;
|
|
99
|
+
options.stream({ text });
|
|
100
|
+
} else if (chunk.delta.type === "input_json_delta") {
|
|
101
|
+
resp.content.at(-1).input += chunk.delta.partial_json;
|
|
102
|
+
}
|
|
103
|
+
} else if (chunk.type === "content_block_stop") {
|
|
104
|
+
const last = resp.content.at(-1);
|
|
105
|
+
if (last.input) last.input = JSONAttemptParse(last.input, {});
|
|
106
|
+
} else if (chunk.type === "message_stop") {
|
|
107
|
+
break;
|
|
108
|
+
}
|
|
109
|
+
}
|
|
110
|
+
}
|
|
111
|
+
const toolCalls = resp.content.filter((c) => c.type === "tool_use");
|
|
112
|
+
if (toolCalls.length && !controller.signal.aborted) {
|
|
113
|
+
history.push({ role: "assistant", content: resp.content });
|
|
114
|
+
const results = await Promise.all(toolCalls.map(async (toolCall) => {
|
|
115
|
+
var _a;
|
|
116
|
+
const tool = (_a = options.tools) == null ? void 0 : _a.find(findByProp("name", toolCall.name));
|
|
117
|
+
if (!tool) return { tool_use_id: toolCall.id, is_error: true, content: "Tool not found" };
|
|
118
|
+
try {
|
|
119
|
+
const result = await tool.fn(toolCall.input, this.ai);
|
|
120
|
+
return { type: "tool_result", tool_use_id: toolCall.id, content: JSONSanitize(result) };
|
|
121
|
+
} catch (err) {
|
|
122
|
+
return { type: "tool_result", tool_use_id: toolCall.id, is_error: true, content: (err == null ? void 0 : err.message) || (err == null ? void 0 : err.toString()) || "Unknown" };
|
|
123
|
+
}
|
|
124
|
+
}));
|
|
125
|
+
history.push({ role: "user", content: results });
|
|
126
|
+
requestParams.messages = history;
|
|
127
|
+
}
|
|
128
|
+
} while (!controller.signal.aborted && resp.content.some((c) => c.type === "tool_use"));
|
|
129
|
+
if (options.stream) options.stream({ done: true });
|
|
130
|
+
res(this.toStandard([...history, {
|
|
131
|
+
role: "assistant",
|
|
132
|
+
content: resp.content.filter((c) => c.type == "text").map((c) => c.text).join("\n\n")
|
|
133
|
+
}]));
|
|
134
|
+
});
|
|
135
|
+
return Object.assign(response, { abort: () => controller.abort() });
|
|
136
|
+
}
|
|
137
|
+
}
|
|
138
|
+
class Ollama extends LLMProvider {
|
|
139
|
+
constructor(ai, host, model) {
|
|
140
|
+
super();
|
|
141
|
+
__publicField(this, "client");
|
|
142
|
+
this.ai = ai;
|
|
143
|
+
this.host = host;
|
|
144
|
+
this.model = model;
|
|
145
|
+
this.client = new Ollama$1({ host });
|
|
146
|
+
}
|
|
147
|
+
toStandard(history) {
|
|
148
|
+
for (let i = 0; i < history.length; i++) {
|
|
149
|
+
if (history[i].role == "assistant" && history[i].tool_calls) {
|
|
150
|
+
if (history[i].content) delete history[i].tool_calls;
|
|
151
|
+
else {
|
|
152
|
+
history.splice(i, 1);
|
|
153
|
+
i--;
|
|
154
|
+
}
|
|
155
|
+
} else if (history[i].role == "tool") {
|
|
156
|
+
const error = history[i].content.startsWith('{"error":');
|
|
157
|
+
history[i] = { role: "tool", name: history[i].tool_name, args: history[i].args, [error ? "error" : "content"]: history[i].content };
|
|
158
|
+
}
|
|
159
|
+
}
|
|
160
|
+
return history;
|
|
161
|
+
}
|
|
162
|
+
fromStandard(history) {
|
|
163
|
+
return history.map((h) => {
|
|
164
|
+
if (h.role != "tool") return h;
|
|
165
|
+
return { role: "tool", tool_name: h.name, content: h.error || h.content };
|
|
166
|
+
});
|
|
167
|
+
}
|
|
168
|
+
ask(message, options = {}) {
|
|
169
|
+
const controller = new AbortController();
|
|
170
|
+
const response = new Promise(async (res, rej) => {
|
|
171
|
+
var _a, _b, _c, _d, _e, _f, _g;
|
|
172
|
+
let system = options.system || this.ai.options.system;
|
|
173
|
+
let history = this.fromStandard([...options.history || [], { role: "user", content: message }]);
|
|
174
|
+
if (history[0].roll == "system") {
|
|
175
|
+
if (!system) system = history.shift();
|
|
176
|
+
else history.shift();
|
|
177
|
+
}
|
|
178
|
+
if (options.compress) history = await this.ai.llm.compress(history, options.compress.max, options.compress.min);
|
|
179
|
+
if (options.system) history.unshift({ role: "system", content: system });
|
|
180
|
+
const requestParams = {
|
|
181
|
+
model: options.model || this.model,
|
|
182
|
+
messages: history,
|
|
183
|
+
stream: !!options.stream,
|
|
184
|
+
signal: controller.signal,
|
|
185
|
+
options: {
|
|
186
|
+
temperature: options.temperature || this.ai.options.temperature || 0.7,
|
|
187
|
+
num_predict: options.max_tokens || this.ai.options.max_tokens || 4096
|
|
188
|
+
},
|
|
189
|
+
tools: (options.tools || this.ai.options.tools || []).map((t) => ({
|
|
190
|
+
type: "function",
|
|
191
|
+
function: {
|
|
192
|
+
name: t.name,
|
|
193
|
+
description: t.description,
|
|
194
|
+
parameters: {
|
|
195
|
+
type: "object",
|
|
196
|
+
properties: objectMap(t.args, (key, value) => ({ ...value, required: void 0 })),
|
|
197
|
+
required: Object.entries(t.args || {}).filter((t2) => t2[1].required).map((t2) => t2[0])
|
|
198
|
+
}
|
|
199
|
+
}
|
|
200
|
+
}))
|
|
201
|
+
};
|
|
202
|
+
let resp;
|
|
203
|
+
do {
|
|
204
|
+
resp = await this.client.chat(requestParams);
|
|
205
|
+
if (options.stream) {
|
|
206
|
+
resp.message = { role: "assistant", content: "", tool_calls: [] };
|
|
207
|
+
for await (const chunk of resp) {
|
|
208
|
+
if (controller.signal.aborted) break;
|
|
209
|
+
if ((_a = chunk.message) == null ? void 0 : _a.content) {
|
|
210
|
+
resp.message.content += chunk.message.content;
|
|
211
|
+
options.stream({ text: chunk.message.content });
|
|
212
|
+
}
|
|
213
|
+
if ((_b = chunk.message) == null ? void 0 : _b.tool_calls) resp.message.tool_calls = chunk.message.tool_calls;
|
|
214
|
+
if (chunk.done) break;
|
|
215
|
+
}
|
|
216
|
+
}
|
|
217
|
+
if (((_d = (_c = resp.message) == null ? void 0 : _c.tool_calls) == null ? void 0 : _d.length) && !controller.signal.aborted) {
|
|
218
|
+
history.push(resp.message);
|
|
219
|
+
const results = await Promise.all(resp.message.tool_calls.map(async (toolCall) => {
|
|
220
|
+
var _a2;
|
|
221
|
+
const tool = (_a2 = options.tools || this.ai.options.tools) == null ? void 0 : _a2.find(findByProp("name", toolCall.function.name));
|
|
222
|
+
if (!tool) return { role: "tool", tool_name: toolCall.function.name, content: '{"error": "Tool not found"}' };
|
|
223
|
+
const args = typeof toolCall.function.arguments === "string" ? JSONAttemptParse(toolCall.function.arguments, {}) : toolCall.function.arguments;
|
|
224
|
+
try {
|
|
225
|
+
const result = await tool.fn(args, this.ai);
|
|
226
|
+
return { role: "tool", tool_name: toolCall.function.name, args, content: JSONSanitize(result) };
|
|
227
|
+
} catch (err) {
|
|
228
|
+
return { role: "tool", tool_name: toolCall.function.name, args, content: JSONSanitize({ error: (err == null ? void 0 : err.message) || (err == null ? void 0 : err.toString()) || "Unknown" }) };
|
|
229
|
+
}
|
|
230
|
+
}));
|
|
231
|
+
history.push(...results);
|
|
232
|
+
requestParams.messages = history;
|
|
233
|
+
}
|
|
234
|
+
} while (!controller.signal.aborted && ((_f = (_e = resp.message) == null ? void 0 : _e.tool_calls) == null ? void 0 : _f.length));
|
|
235
|
+
if (options.stream) options.stream({ done: true });
|
|
236
|
+
res(this.toStandard([...history, { role: "assistant", content: (_g = resp.message) == null ? void 0 : _g.content }]));
|
|
237
|
+
});
|
|
238
|
+
return Object.assign(response, { abort: () => controller.abort() });
|
|
239
|
+
}
|
|
240
|
+
}
|
|
241
|
+
class OpenAi extends LLMProvider {
|
|
242
|
+
constructor(ai, apiToken, model) {
|
|
243
|
+
super();
|
|
244
|
+
__publicField(this, "client");
|
|
245
|
+
this.ai = ai;
|
|
246
|
+
this.apiToken = apiToken;
|
|
247
|
+
this.model = model;
|
|
248
|
+
this.client = new OpenAI({ apiKey: apiToken });
|
|
249
|
+
}
|
|
250
|
+
toStandard(history) {
|
|
251
|
+
for (let i = 0; i < history.length; i++) {
|
|
252
|
+
const h = history[i];
|
|
253
|
+
if (h.role === "assistant" && h.tool_calls) {
|
|
254
|
+
const tools = h.tool_calls.map((tc) => ({
|
|
255
|
+
role: "tool",
|
|
256
|
+
id: tc.id,
|
|
257
|
+
name: tc.function.name,
|
|
258
|
+
args: JSONAttemptParse(tc.function.arguments, {})
|
|
259
|
+
}));
|
|
260
|
+
history.splice(i, 1, ...tools);
|
|
261
|
+
i += tools.length - 1;
|
|
262
|
+
} else if (h.role === "tool" && h.content) {
|
|
263
|
+
const record = history.find((h2) => h.tool_call_id == h2.id);
|
|
264
|
+
if (record) {
|
|
265
|
+
if (h.content.includes('"error":')) record.error = h.content;
|
|
266
|
+
else record.content = h.content;
|
|
267
|
+
}
|
|
268
|
+
history.splice(i, 1);
|
|
269
|
+
i--;
|
|
270
|
+
}
|
|
271
|
+
}
|
|
272
|
+
return history;
|
|
273
|
+
}
|
|
274
|
+
fromStandard(history) {
|
|
275
|
+
return history.reduce((result, h) => {
|
|
276
|
+
if (h.role === "tool") {
|
|
277
|
+
result.push({
|
|
278
|
+
role: "assistant",
|
|
279
|
+
content: null,
|
|
280
|
+
tool_calls: [{ id: h.id, type: "function", function: { name: h.name, arguments: JSON.stringify(h.args) } }],
|
|
281
|
+
refusal: null,
|
|
282
|
+
annotations: []
|
|
283
|
+
}, {
|
|
284
|
+
role: "tool",
|
|
285
|
+
tool_call_id: h.id,
|
|
286
|
+
content: h.error || h.content
|
|
287
|
+
});
|
|
288
|
+
} else {
|
|
289
|
+
result.push(h);
|
|
290
|
+
}
|
|
291
|
+
return result;
|
|
292
|
+
}, []);
|
|
293
|
+
}
|
|
294
|
+
ask(message, options = {}) {
|
|
295
|
+
const controller = new AbortController();
|
|
296
|
+
const response = new Promise(async (res, rej) => {
|
|
297
|
+
var _a, _b, _c, _d;
|
|
298
|
+
let history = this.fromStandard([...options.history || [], { role: "user", content: message }]);
|
|
299
|
+
if (options.compress) history = await this.ai.llm.compress(history, options.compress.max, options.compress.min, options);
|
|
300
|
+
const requestParams = {
|
|
301
|
+
model: options.model || this.model,
|
|
302
|
+
messages: history,
|
|
303
|
+
stream: !!options.stream,
|
|
304
|
+
max_tokens: options.max_tokens || this.ai.options.max_tokens || 4096,
|
|
305
|
+
temperature: options.temperature || this.ai.options.temperature || 0.7,
|
|
306
|
+
tools: (options.tools || this.ai.options.tools || []).map((t) => ({
|
|
307
|
+
type: "function",
|
|
308
|
+
function: {
|
|
309
|
+
name: t.name,
|
|
310
|
+
description: t.description,
|
|
311
|
+
parameters: {
|
|
312
|
+
type: "object",
|
|
313
|
+
properties: objectMap(t.args, (key, value) => ({ ...value, required: void 0 })),
|
|
314
|
+
required: Object.entries(t.args || {}).filter((t2) => t2[1].required).map((t2) => t2[0])
|
|
315
|
+
}
|
|
316
|
+
}
|
|
317
|
+
}))
|
|
318
|
+
};
|
|
319
|
+
let resp;
|
|
320
|
+
do {
|
|
321
|
+
resp = await this.client.chat.completions.create(requestParams);
|
|
322
|
+
if (options.stream) {
|
|
323
|
+
resp.choices = [];
|
|
324
|
+
for await (const chunk of resp) {
|
|
325
|
+
if (controller.signal.aborted) break;
|
|
326
|
+
if (chunk.choices[0].delta.content) {
|
|
327
|
+
options.stream({ text: chunk.choices[0].delta.content });
|
|
328
|
+
}
|
|
329
|
+
}
|
|
330
|
+
}
|
|
331
|
+
const toolCalls = resp.choices[0].message.tool_calls || [];
|
|
332
|
+
if (toolCalls.length && !controller.signal.aborted) {
|
|
333
|
+
history.push(resp.choices[0].message);
|
|
334
|
+
const results = await Promise.all(toolCalls.map(async (toolCall) => {
|
|
335
|
+
var _a2;
|
|
336
|
+
const tool = (_a2 = options.tools) == null ? void 0 : _a2.find(findByProp("name", toolCall.function.name));
|
|
337
|
+
if (!tool) return { role: "tool", tool_call_id: toolCall.id, content: '{"error": "Tool not found"}' };
|
|
338
|
+
try {
|
|
339
|
+
const args = JSONAttemptParse(toolCall.function.arguments, {});
|
|
340
|
+
const result = await tool.fn(args, this.ai);
|
|
341
|
+
return { role: "tool", tool_call_id: toolCall.id, content: JSONSanitize(result) };
|
|
342
|
+
} catch (err) {
|
|
343
|
+
return { role: "tool", tool_call_id: toolCall.id, content: JSONSanitize({ error: (err == null ? void 0 : err.message) || (err == null ? void 0 : err.toString()) || "Unknown" }) };
|
|
344
|
+
}
|
|
345
|
+
}));
|
|
346
|
+
history.push(...results);
|
|
347
|
+
requestParams.messages = history;
|
|
348
|
+
}
|
|
349
|
+
} while (!controller.signal.aborted && ((_d = (_c = (_b = (_a = resp.choices) == null ? void 0 : _a[0]) == null ? void 0 : _b.message) == null ? void 0 : _c.tool_calls) == null ? void 0 : _d.length));
|
|
350
|
+
if (options.stream) options.stream({ done: true });
|
|
351
|
+
res(this.toStandard([...history, { role: "assistant", content: resp.choices[0].message.content || "" }]));
|
|
352
|
+
});
|
|
353
|
+
return Object.assign(response, { abort: () => controller.abort() });
|
|
354
|
+
}
|
|
355
|
+
}
|
|
356
|
+
class LLM {
|
|
357
|
+
constructor(ai, options) {
|
|
358
|
+
__publicField(this, "providers", {});
|
|
359
|
+
var _a, _b, _c;
|
|
360
|
+
this.ai = ai;
|
|
361
|
+
this.options = options;
|
|
362
|
+
if ((_a = options.anthropic) == null ? void 0 : _a.token) this.providers.anthropic = new Anthropic(this.ai, options.anthropic.token, options.anthropic.model);
|
|
363
|
+
if ((_b = options.ollama) == null ? void 0 : _b.host) this.providers.ollama = new Ollama(this.ai, options.ollama.host, options.ollama.model);
|
|
364
|
+
if ((_c = options.openAi) == null ? void 0 : _c.token) this.providers.openAi = new OpenAi(this.ai, options.openAi.token, options.openAi.model);
|
|
365
|
+
}
|
|
366
|
+
/**
|
|
367
|
+
* Chat with LLM
|
|
368
|
+
* @param {string} message Question
|
|
369
|
+
* @param {LLMRequest} options Configuration options and chat history
|
|
370
|
+
* @returns {{abort: () => void, response: Promise<LLMMessage[]>}} Function to abort response and chat history
|
|
371
|
+
*/
|
|
372
|
+
ask(message, options = {}) {
|
|
373
|
+
var _a, _b;
|
|
374
|
+
const model = [((_a = options.model) == null ? void 0 : _a[0]) || this.options.model[0], ((_b = options.model) == null ? void 0 : _b[1]) || this.options.model[1]];
|
|
375
|
+
if (!this.providers[model[0]]) throw new Error(`Unknown LLM provider: ${model[0]}`);
|
|
376
|
+
return this.providers[model[0]].ask(message, { ...options, model: model[1] });
|
|
377
|
+
}
|
|
378
|
+
/**
|
|
379
|
+
* Compress chat history to reduce context size
|
|
380
|
+
* @param {LLMMessage[]} history Chatlog that will be compressed
|
|
381
|
+
* @param max Trigger compression once context is larger than max
|
|
382
|
+
* @param min Summarize until context size is less than min
|
|
383
|
+
* @param {LLMRequest} options LLM options
|
|
384
|
+
* @returns {Promise<LLMMessage[]>} New chat history will summary at index 0
|
|
385
|
+
*/
|
|
386
|
+
async compress(history, max, min, options) {
|
|
387
|
+
if (this.estimateTokens(history) < max) return history;
|
|
388
|
+
let keep = 0, tokens = 0;
|
|
389
|
+
for (let m of history.toReversed()) {
|
|
390
|
+
tokens += this.estimateTokens(m.content);
|
|
391
|
+
if (tokens < min) keep++;
|
|
392
|
+
else break;
|
|
393
|
+
}
|
|
394
|
+
if (history.length <= keep) return history;
|
|
395
|
+
const recent = keep == 0 ? [] : history.slice(-keep), process = (keep == 0 ? history : history.slice(0, -keep)).filter((h) => h.role === "assistant" || h.role === "user");
|
|
396
|
+
const summary = await this.summarize(process.map((m) => `${m.role}: ${m.content}`).join("\n\n"), 250, options);
|
|
397
|
+
return [{ role: "assistant", content: `Conversation Summary: ${summary}` }, ...recent];
|
|
398
|
+
}
|
|
399
|
+
/**
|
|
400
|
+
* Estimate variable as tokens
|
|
401
|
+
* @param history Object to size
|
|
402
|
+
* @returns {number} Rough token count
|
|
403
|
+
*/
|
|
404
|
+
estimateTokens(history) {
|
|
405
|
+
const text = JSON.stringify(history);
|
|
406
|
+
return Math.ceil(text.length / 4 * 1.2);
|
|
407
|
+
}
|
|
408
|
+
async json(message, options) {
|
|
409
|
+
var _a;
|
|
410
|
+
let resp = await this.ask(message, {
|
|
411
|
+
system: "",
|
|
412
|
+
...options
|
|
413
|
+
});
|
|
414
|
+
if (!((_a = resp == null ? void 0 : resp[0]) == null ? void 0 : _a.content)) return {};
|
|
415
|
+
return JSONAttemptParse(new RegExp("{[sS]*}").exec(resp[0].content), {});
|
|
416
|
+
}
|
|
417
|
+
/**
|
|
418
|
+
* Create a summary of some text
|
|
419
|
+
* @param {string} text Text to summarize
|
|
420
|
+
* @param {number} tokens Max number of tokens
|
|
421
|
+
* @param options LLM request options
|
|
422
|
+
* @returns {Promise<string>} Summary
|
|
423
|
+
*/
|
|
424
|
+
summarize(text, tokens, options) {
|
|
425
|
+
return this.ask(text, { system: `Generate a brief summary <= ${tokens} tokens. Output nothing else`, temperature: 0.3, ...options }).then((history) => {
|
|
426
|
+
var _a;
|
|
427
|
+
return ((_a = history.pop()) == null ? void 0 : _a.content) || null;
|
|
428
|
+
});
|
|
429
|
+
}
|
|
430
|
+
}
|
|
431
|
+
class Ai {
|
|
432
|
+
constructor(options) {
|
|
433
|
+
__publicField(this, "downloads", {});
|
|
434
|
+
__publicField(this, "whisperModel");
|
|
435
|
+
/** Large Language Models */
|
|
436
|
+
__publicField(this, "llm");
|
|
437
|
+
var _a;
|
|
438
|
+
this.options = options;
|
|
439
|
+
this.llm = new LLM(this, options);
|
|
440
|
+
if ((_a = this.options.whisper) == null ? void 0 : _a.binary) this.downloadAsrModel(this.options.whisper.model);
|
|
441
|
+
}
|
|
442
|
+
/**
|
|
443
|
+
* Convert audio to text using Auditory Speech Recognition
|
|
444
|
+
* @param {string} path Path to audio
|
|
445
|
+
* @param model Whisper model
|
|
446
|
+
* @returns {Promise<any>} Extracted text
|
|
447
|
+
*/
|
|
448
|
+
async asr(path, model) {
|
|
449
|
+
var _a;
|
|
450
|
+
if (!((_a = this.options.whisper) == null ? void 0 : _a.binary)) throw new Error("Whisper not configured");
|
|
451
|
+
if (!model) model = this.options.whisper.model;
|
|
452
|
+
await this.downloadAsrModel(model);
|
|
453
|
+
const name = Math.random().toString(36).substring(2, 10) + "-" + path.split("/").pop();
|
|
454
|
+
const output = Path.join(this.options.whisper.path || "/tmp", name);
|
|
455
|
+
await $`rm -f /tmp/${name}.txt && ${this.options.whisper.binary} -nt -np -m ${this.whisperModel} -f ${path} -otxt -of ${output}`;
|
|
456
|
+
return fs.readFile(output, "utf-8").then((text) => (text == null ? void 0 : text.trim()) || null).finally(() => fs.rm(output, { force: true }).catch(() => {
|
|
457
|
+
}));
|
|
458
|
+
}
|
|
459
|
+
/**
|
|
460
|
+
* Downloads the specified Whisper model if it is not already present locally.
|
|
461
|
+
*
|
|
462
|
+
* @param {string} model Whisper model that will be downloaded
|
|
463
|
+
* @return {Promise<void>} A promise that resolves once the model is downloaded and saved locally.
|
|
464
|
+
*/
|
|
465
|
+
async downloadAsrModel(model) {
|
|
466
|
+
var _a, _b, _c, _d;
|
|
467
|
+
if (!((_a = this.options.whisper) == null ? void 0 : _a.binary)) throw new Error("Whisper not configured");
|
|
468
|
+
this.whisperModel = Path.join((_b = this.options.whisper) == null ? void 0 : _b.path, ((_c = this.options.whisper) == null ? void 0 : _c.model) + ".bin");
|
|
469
|
+
if (await fs.stat(this.whisperModel).then(() => true).catch(() => false)) return;
|
|
470
|
+
if (!!this.downloads[model]) return this.downloads[model];
|
|
471
|
+
this.downloads[model] = fetch(`https://huggingface.co/ggerganov/whisper.cpp/resolve/main/${(_d = this.options.whisper) == null ? void 0 : _d.model}.bin`).then((resp) => resp.arrayBuffer()).then((arr) => Buffer.from(arr)).then(async (buffer) => {
|
|
472
|
+
await fs.writeFile(this.whisperModel, buffer);
|
|
473
|
+
delete this.downloads[model];
|
|
474
|
+
});
|
|
475
|
+
return this.downloads[model];
|
|
476
|
+
}
|
|
477
|
+
/**
|
|
478
|
+
* Convert image to text using Optical Character Recognition
|
|
479
|
+
* @param {string} path Path to image
|
|
480
|
+
* @returns {{abort: Function, response: Promise<string | null>}} Abort function & Promise of extracted text
|
|
481
|
+
*/
|
|
482
|
+
ocr(path) {
|
|
483
|
+
let worker;
|
|
484
|
+
return {
|
|
485
|
+
abort: () => {
|
|
486
|
+
worker == null ? void 0 : worker.terminate();
|
|
487
|
+
},
|
|
488
|
+
response: new Promise(async (res) => {
|
|
489
|
+
worker = await createWorker("eng");
|
|
490
|
+
const { data } = await worker.recognize(path);
|
|
491
|
+
await worker.terminate();
|
|
492
|
+
res(data.text.trim() || null);
|
|
493
|
+
})
|
|
494
|
+
};
|
|
495
|
+
}
|
|
496
|
+
/**
|
|
497
|
+
* Compare the difference between two strings using tensor math
|
|
498
|
+
* @param target Text that will checked
|
|
499
|
+
* @param {string} searchTerms Multiple search terms to check against target
|
|
500
|
+
* @returns {{avg: number, max: number, similarities: number[]}} Similarity values 0-1: 0 = unique, 1 = identical
|
|
501
|
+
*/
|
|
502
|
+
semanticSimilarity(target, ...searchTerms) {
|
|
503
|
+
if (searchTerms.length < 2) throw new Error("Requires at least 2 strings to compare");
|
|
504
|
+
const vector = (text, dimensions = 10) => {
|
|
505
|
+
return text.toLowerCase().split("").map((char, index) => char.charCodeAt(0) * (index + 1) % dimensions / dimensions).slice(0, dimensions);
|
|
506
|
+
};
|
|
507
|
+
const cosineSimilarity = (v1, v2) => {
|
|
508
|
+
if (v1.length !== v2.length) throw new Error("Vectors must be same length");
|
|
509
|
+
const tensor1 = tf.tensor1d(v1), tensor2 = tf.tensor1d(v2);
|
|
510
|
+
const dotProduct = tf.dot(tensor1, tensor2);
|
|
511
|
+
const magnitude1 = tf.norm(tensor1);
|
|
512
|
+
const magnitude2 = tf.norm(tensor2);
|
|
513
|
+
if (magnitude1.dataSync()[0] === 0 || magnitude2.dataSync()[0] === 0) return 0;
|
|
514
|
+
return dotProduct.dataSync()[0] / (magnitude1.dataSync()[0] * magnitude2.dataSync()[0]);
|
|
515
|
+
};
|
|
516
|
+
const v = vector(target);
|
|
517
|
+
const similarities = searchTerms.map((t) => vector(t)).map((refVector) => cosineSimilarity(v, refVector));
|
|
518
|
+
return { avg: similarities.reduce((acc, s) => acc + s, 0) / similarities.length, max: Math.max(...similarities), similarities };
|
|
519
|
+
}
|
|
520
|
+
}
|
|
521
|
+
const CliTool = {
|
|
522
|
+
name: "cli",
|
|
523
|
+
description: "Use the command line interface, returns any output",
|
|
524
|
+
args: { command: { type: "string", description: "Command to run", required: true } },
|
|
525
|
+
fn: (args) => $`${args.command}`
|
|
526
|
+
};
|
|
527
|
+
const DateTimeTool = {
|
|
528
|
+
name: "get_datetime",
|
|
529
|
+
description: "Get current date and time",
|
|
530
|
+
fn: async () => (/* @__PURE__ */ new Date()).toISOString()
|
|
531
|
+
};
|
|
532
|
+
const ExecTool = {
|
|
533
|
+
name: "exec",
|
|
534
|
+
description: "Run code/scripts",
|
|
535
|
+
args: {
|
|
536
|
+
language: { type: "string", description: "Execution language", enum: ["cli", "node", "python"], required: true },
|
|
537
|
+
code: { type: "string", description: "Code to execute", required: true }
|
|
538
|
+
},
|
|
539
|
+
fn: async (args, ai) => {
|
|
540
|
+
try {
|
|
541
|
+
switch (args.type) {
|
|
542
|
+
case "bash":
|
|
543
|
+
return await CliTool.fn({ command: args.code }, ai);
|
|
544
|
+
case "node":
|
|
545
|
+
return await JSTool.fn({ code: args.code }, ai);
|
|
546
|
+
case "python": {
|
|
547
|
+
return await PythonTool.fn({ code: args.code }, ai);
|
|
548
|
+
}
|
|
549
|
+
}
|
|
550
|
+
} catch (err) {
|
|
551
|
+
return { error: (err == null ? void 0 : err.message) || err.toString() };
|
|
552
|
+
}
|
|
553
|
+
}
|
|
554
|
+
};
|
|
555
|
+
const FetchTool = {
|
|
556
|
+
name: "fetch",
|
|
557
|
+
description: "Make HTTP request to URL",
|
|
558
|
+
args: {
|
|
559
|
+
url: { type: "string", description: "URL to fetch", required: true },
|
|
560
|
+
method: { type: "string", description: "HTTP method to use", enum: ["GET", "POST", "PUT", "DELETE"], default: "GET" },
|
|
561
|
+
headers: { type: "object", description: "HTTP headers to send", default: {} },
|
|
562
|
+
body: { type: "object", description: "HTTP body to send" }
|
|
563
|
+
},
|
|
564
|
+
fn: (args) => new Http({ url: args.url, headers: args.headers }).request({ method: args.method || "GET", body: args.body })
|
|
565
|
+
};
|
|
566
|
+
const JSTool = {
|
|
567
|
+
name: "exec_javascript",
|
|
568
|
+
description: "Execute commonjs javascript",
|
|
569
|
+
args: {
|
|
570
|
+
code: { type: "string", description: "CommonJS javascript", required: true }
|
|
571
|
+
},
|
|
572
|
+
fn: async (args) => {
|
|
573
|
+
const console = consoleInterceptor(null);
|
|
574
|
+
const resp = await fn({ console }, args.code, true).catch((err) => console.output.error.push(err));
|
|
575
|
+
return { ...console.output, return: resp, stdout: void 0, stderr: void 0 };
|
|
576
|
+
}
|
|
577
|
+
};
|
|
578
|
+
const PythonTool = {
|
|
579
|
+
name: "exec_javascript",
|
|
580
|
+
description: "Execute commonjs javascript",
|
|
581
|
+
args: {
|
|
582
|
+
code: { type: "string", description: "CommonJS javascript", required: true }
|
|
583
|
+
},
|
|
584
|
+
fn: async (args) => ({ result: $Sync`python -c "${args.code}"` })
|
|
585
|
+
};
|
|
586
|
+
const SearchTool = {
|
|
587
|
+
name: "search",
|
|
588
|
+
description: "Use a search engine to find relevant URLs, should be changed with fetch to scrape sources",
|
|
589
|
+
args: {
|
|
590
|
+
query: { type: "string", description: "Search string", required: true },
|
|
591
|
+
length: { type: "string", description: "Number of results to return", default: 5 }
|
|
592
|
+
},
|
|
593
|
+
fn: async (args) => {
|
|
594
|
+
var _a;
|
|
595
|
+
const html = await fetch(`https://html.duckduckgo.com/html/?q=${encodeURIComponent(args.query)}`, {
|
|
596
|
+
headers: { "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64)", "Accept-Language": "en-US,en;q=0.9" }
|
|
597
|
+
}).then((resp) => resp.text());
|
|
598
|
+
let match, regex = /<a .*?href="(.+?)".+?<\/a>/g;
|
|
599
|
+
const results = new ASet();
|
|
600
|
+
while ((match = regex.exec(html)) !== null) {
|
|
601
|
+
let url = (_a = /uddg=(.+)&?/.exec(decodeURIComponent(match[1]))) == null ? void 0 : _a[1];
|
|
602
|
+
if (url) url = decodeURIComponent(url);
|
|
603
|
+
if (url) results.add(url);
|
|
604
|
+
if (results.size >= (args.length || 5)) break;
|
|
605
|
+
}
|
|
606
|
+
return results;
|
|
607
|
+
}
|
|
608
|
+
};
|
|
609
|
+
export {
|
|
610
|
+
Ai,
|
|
611
|
+
Anthropic,
|
|
612
|
+
CliTool,
|
|
613
|
+
DateTimeTool,
|
|
614
|
+
ExecTool,
|
|
615
|
+
FetchTool,
|
|
616
|
+
JSTool,
|
|
617
|
+
LLM,
|
|
618
|
+
PythonTool,
|
|
619
|
+
SearchTool
|
|
620
|
+
};
|
|
621
|
+
//# sourceMappingURL=index.mjs.map
|