@ztimson/ai-utils 0.1.5 → 0.1.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/ai.d.ts +4 -2
- package/dist/index.js +6 -646
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +306 -441
- package/dist/index.mjs.map +1 -1
- package/package.json +3 -3
package/dist/index.mjs
CHANGED
|
@@ -1,387 +1,290 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
import {
|
|
5
|
-
import {
|
|
6
|
-
import {
|
|
7
|
-
import
|
|
8
|
-
import
|
|
9
|
-
import
|
|
10
|
-
|
|
11
|
-
import Path from "node:path";
|
|
12
|
-
import * as tf from "@tensorflow/tfjs";
|
|
13
|
-
class LLMProvider {
|
|
1
|
+
import { $ as j, $Sync as T } from "@ztimson/node-utils";
|
|
2
|
+
import { createWorker as q } from "tesseract.js";
|
|
3
|
+
import { objectMap as b, JSONAttemptParse as w, findByProp as k, JSONSanitize as _, Http as v, consoleInterceptor as P, fn as A, ASet as O } from "@ztimson/utils";
|
|
4
|
+
import { Anthropic as $ } from "@anthropic-ai/sdk";
|
|
5
|
+
import { Ollama as E } from "ollama";
|
|
6
|
+
import { OpenAI as M } from "openai";
|
|
7
|
+
import y from "node:fs/promises";
|
|
8
|
+
import S from "node:path";
|
|
9
|
+
import * as g from "@tensorflow/tfjs";
|
|
10
|
+
class x {
|
|
14
11
|
}
|
|
15
|
-
class
|
|
16
|
-
constructor(
|
|
17
|
-
super();
|
|
18
|
-
__publicField(this, "client");
|
|
19
|
-
this.ai = ai;
|
|
20
|
-
this.apiToken = apiToken;
|
|
21
|
-
this.model = model;
|
|
22
|
-
this.client = new Anthropic$1({ apiKey: apiToken });
|
|
12
|
+
class U extends x {
|
|
13
|
+
constructor(t, e, n) {
|
|
14
|
+
super(), this.ai = t, this.apiToken = e, this.model = n, this.client = new $({ apiKey: e });
|
|
23
15
|
}
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
h[c.is_error ? "error" : "content"] = c.content;
|
|
37
|
-
});
|
|
38
|
-
}
|
|
39
|
-
history[orgI].content = history[orgI].content.filter((c) => c.type == "text").map((c) => c.text).join("\n\n");
|
|
40
|
-
}
|
|
16
|
+
client;
|
|
17
|
+
toStandard(t) {
|
|
18
|
+
for (let e = 0; e < t.length; e++) {
|
|
19
|
+
const n = e;
|
|
20
|
+
typeof t[n].content != "string" && (t[n].role == "assistant" ? t[n].content.filter((s) => s.type == "tool_use").forEach((s) => {
|
|
21
|
+
e++, t.splice(e, 0, { role: "tool", id: s.id, name: s.name, args: s.input });
|
|
22
|
+
}) : t[n].role == "user" && t[n].content.filter((s) => s.type == "tool_result").forEach((s) => {
|
|
23
|
+
const i = t.find((f) => f.id == s.tool_use_id);
|
|
24
|
+
i[s.is_error ? "error" : "content"] = s.content;
|
|
25
|
+
}), t[n].content = t[n].content.filter((s) => s.type == "text").map((s) => s.text).join(`
|
|
26
|
+
|
|
27
|
+
`));
|
|
41
28
|
}
|
|
42
|
-
return
|
|
29
|
+
return t.filter((e) => !!e.content);
|
|
43
30
|
}
|
|
44
|
-
fromStandard(
|
|
45
|
-
for (let
|
|
46
|
-
if (
|
|
47
|
-
const
|
|
48
|
-
|
|
49
|
-
|
|
31
|
+
fromStandard(t) {
|
|
32
|
+
for (let e = 0; e < t.length; e++)
|
|
33
|
+
if (t[e].role == "tool") {
|
|
34
|
+
const n = t[e];
|
|
35
|
+
t.splice(
|
|
36
|
+
e,
|
|
50
37
|
1,
|
|
51
|
-
{ role: "assistant", content: [{ type: "tool_use", id:
|
|
52
|
-
{ role: "user", content: [{ type: "tool_result", tool_use_id:
|
|
53
|
-
)
|
|
54
|
-
i++;
|
|
38
|
+
{ role: "assistant", content: [{ type: "tool_use", id: n.id, name: n.name, input: n.args }] },
|
|
39
|
+
{ role: "user", content: [{ type: "tool_result", tool_use_id: n.id, is_error: !!n.error, content: n.error || n.content }] }
|
|
40
|
+
), e++;
|
|
55
41
|
}
|
|
56
|
-
|
|
57
|
-
return history;
|
|
42
|
+
return t;
|
|
58
43
|
}
|
|
59
|
-
ask(
|
|
60
|
-
const
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
description: t.description,
|
|
44
|
+
ask(t, e = {}) {
|
|
45
|
+
const n = new AbortController(), s = new Promise(async (i, f) => {
|
|
46
|
+
let c = this.fromStandard([...e.history || [], { role: "user", content: t }]);
|
|
47
|
+
e.compress && (c = await this.ai.llm.compress(c, e.compress.max, e.compress.min, e));
|
|
48
|
+
const m = {
|
|
49
|
+
model: e.model || this.model,
|
|
50
|
+
max_tokens: e.max_tokens || this.ai.options.max_tokens || 4096,
|
|
51
|
+
system: e.system || this.ai.options.system || "",
|
|
52
|
+
temperature: e.temperature || this.ai.options.temperature || 0.7,
|
|
53
|
+
tools: (e.tools || this.ai.options.tools || []).map((o) => ({
|
|
54
|
+
name: o.name,
|
|
55
|
+
description: o.description,
|
|
72
56
|
input_schema: {
|
|
73
57
|
type: "object",
|
|
74
|
-
properties:
|
|
75
|
-
required:
|
|
58
|
+
properties: o.args ? b(o.args, (r, a) => ({ ...a, required: void 0 })) : {},
|
|
59
|
+
required: o.args ? Object.entries(o.args).filter((r) => r[1].required).map((r) => r[0]) : []
|
|
76
60
|
},
|
|
77
61
|
fn: void 0
|
|
78
62
|
})),
|
|
79
|
-
messages:
|
|
80
|
-
stream: !!
|
|
63
|
+
messages: c,
|
|
64
|
+
stream: !!e.stream
|
|
81
65
|
};
|
|
82
|
-
let
|
|
66
|
+
let l;
|
|
83
67
|
do {
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
if (
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
}
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
options.stream({ text });
|
|
100
|
-
} else if (chunk.delta.type === "input_json_delta") {
|
|
101
|
-
resp.content.at(-1).input += chunk.delta.partial_json;
|
|
102
|
-
}
|
|
103
|
-
} else if (chunk.type === "content_block_stop") {
|
|
104
|
-
const last = resp.content.at(-1);
|
|
105
|
-
if (last.input != null) last.input = last.input ? JSONAttemptParse(last.input, {}) : {};
|
|
106
|
-
} else if (chunk.type === "message_stop") {
|
|
68
|
+
if (l = await this.client.messages.create(m), e.stream) {
|
|
69
|
+
l.content = [];
|
|
70
|
+
for await (const r of l) {
|
|
71
|
+
if (n.signal.aborted) break;
|
|
72
|
+
if (r.type === "content_block_start")
|
|
73
|
+
r.content_block.type === "text" ? l.content.push({ type: "text", text: "" }) : r.content_block.type === "tool_use" && l.content.push({ type: "tool_use", id: r.content_block.id, name: r.content_block.name, input: "" });
|
|
74
|
+
else if (r.type === "content_block_delta")
|
|
75
|
+
if (r.delta.type === "text_delta") {
|
|
76
|
+
const a = r.delta.text;
|
|
77
|
+
l.content.at(-1).text += a, e.stream({ text: a });
|
|
78
|
+
} else r.delta.type === "input_json_delta" && (l.content.at(-1).input += r.delta.partial_json);
|
|
79
|
+
else if (r.type === "content_block_stop") {
|
|
80
|
+
const a = l.content.at(-1);
|
|
81
|
+
a.input != null && (a.input = a.input ? w(a.input, {}) : {});
|
|
82
|
+
} else if (r.type === "message_stop")
|
|
107
83
|
break;
|
|
108
|
-
}
|
|
109
84
|
}
|
|
110
85
|
}
|
|
111
|
-
const
|
|
112
|
-
if (
|
|
113
|
-
|
|
114
|
-
const
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
if (!tool) return { tool_use_id: toolCall.id, is_error: true, content: "Tool not found" };
|
|
86
|
+
const o = l.content.filter((r) => r.type === "tool_use");
|
|
87
|
+
if (o.length && !n.signal.aborted) {
|
|
88
|
+
c.push({ role: "assistant", content: l.content });
|
|
89
|
+
const r = await Promise.all(o.map(async (a) => {
|
|
90
|
+
const u = e.tools?.find(k("name", a.name));
|
|
91
|
+
if (!u) return { tool_use_id: a.id, is_error: !0, content: "Tool not found" };
|
|
118
92
|
try {
|
|
119
|
-
const
|
|
120
|
-
return { type: "tool_result", tool_use_id:
|
|
121
|
-
} catch (
|
|
122
|
-
return { type: "tool_result", tool_use_id:
|
|
93
|
+
const p = await u.fn(a.input, this.ai);
|
|
94
|
+
return { type: "tool_result", tool_use_id: a.id, content: _(p) };
|
|
95
|
+
} catch (p) {
|
|
96
|
+
return { type: "tool_result", tool_use_id: a.id, is_error: !0, content: p?.message || p?.toString() || "Unknown" };
|
|
123
97
|
}
|
|
124
98
|
}));
|
|
125
|
-
|
|
126
|
-
requestParams.messages = history;
|
|
99
|
+
c.push({ role: "user", content: r }), m.messages = c;
|
|
127
100
|
}
|
|
128
|
-
} while (!
|
|
129
|
-
|
|
130
|
-
res(this.toStandard([...history, {
|
|
101
|
+
} while (!n.signal.aborted && l.content.some((o) => o.type === "tool_use"));
|
|
102
|
+
e.stream && e.stream({ done: !0 }), i(this.toStandard([...c, {
|
|
131
103
|
role: "assistant",
|
|
132
|
-
content:
|
|
104
|
+
content: l.content.filter((o) => o.type == "text").map((o) => o.text).join(`
|
|
105
|
+
|
|
106
|
+
`)
|
|
133
107
|
}]));
|
|
134
108
|
});
|
|
135
|
-
return Object.assign(
|
|
109
|
+
return Object.assign(s, { abort: () => n.abort() });
|
|
136
110
|
}
|
|
137
111
|
}
|
|
138
|
-
class
|
|
139
|
-
constructor(
|
|
140
|
-
super();
|
|
141
|
-
__publicField(this, "client");
|
|
142
|
-
this.ai = ai;
|
|
143
|
-
this.host = host;
|
|
144
|
-
this.model = model;
|
|
145
|
-
this.client = new Ollama$1({ host });
|
|
112
|
+
class L extends x {
|
|
113
|
+
constructor(t, e, n) {
|
|
114
|
+
super(), this.ai = t, this.host = e, this.model = n, this.client = new E({ host: e });
|
|
146
115
|
}
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
}
|
|
155
|
-
} else if (history[i].role == "tool") {
|
|
156
|
-
const error = history[i].content.startsWith('{"error":');
|
|
157
|
-
history[i] = { role: "tool", name: history[i].tool_name, args: history[i].args, [error ? "error" : "content"]: history[i].content };
|
|
116
|
+
client;
|
|
117
|
+
toStandard(t) {
|
|
118
|
+
for (let e = 0; e < t.length; e++)
|
|
119
|
+
if (t[e].role == "assistant" && t[e].tool_calls)
|
|
120
|
+
t[e].content ? delete t[e].tool_calls : (t.splice(e, 1), e--);
|
|
121
|
+
else if (t[e].role == "tool") {
|
|
122
|
+
const n = t[e].content.startsWith('{"error":');
|
|
123
|
+
t[e] = { role: "tool", name: t[e].tool_name, args: t[e].args, [n ? "error" : "content"]: t[e].content };
|
|
158
124
|
}
|
|
159
|
-
|
|
160
|
-
return history;
|
|
125
|
+
return t;
|
|
161
126
|
}
|
|
162
|
-
fromStandard(
|
|
163
|
-
return
|
|
164
|
-
if (h.role != "tool") return h;
|
|
165
|
-
return { role: "tool", tool_name: h.name, content: h.error || h.content };
|
|
166
|
-
});
|
|
127
|
+
fromStandard(t) {
|
|
128
|
+
return t.map((e) => e.role != "tool" ? e : { role: "tool", tool_name: e.name, content: e.error || e.content });
|
|
167
129
|
}
|
|
168
|
-
ask(
|
|
169
|
-
const
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
}
|
|
178
|
-
if (options.compress) history = await this.ai.llm.compress(history, options.compress.max, options.compress.min);
|
|
179
|
-
if (options.system) history.unshift({ role: "system", content: system });
|
|
180
|
-
const requestParams = {
|
|
181
|
-
model: options.model || this.model,
|
|
182
|
-
messages: history,
|
|
183
|
-
stream: !!options.stream,
|
|
184
|
-
signal: controller.signal,
|
|
130
|
+
ask(t, e = {}) {
|
|
131
|
+
const n = new AbortController(), s = new Promise(async (i, f) => {
|
|
132
|
+
let c = e.system || this.ai.options.system, m = this.fromStandard([...e.history || [], { role: "user", content: t }]);
|
|
133
|
+
m[0].roll == "system" && (c ? m.shift() : c = m.shift()), e.compress && (m = await this.ai.llm.compress(m, e.compress.max, e.compress.min)), e.system && m.unshift({ role: "system", content: c });
|
|
134
|
+
const l = {
|
|
135
|
+
model: e.model || this.model,
|
|
136
|
+
messages: m,
|
|
137
|
+
stream: !!e.stream,
|
|
138
|
+
signal: n.signal,
|
|
185
139
|
options: {
|
|
186
|
-
temperature:
|
|
187
|
-
num_predict:
|
|
140
|
+
temperature: e.temperature || this.ai.options.temperature || 0.7,
|
|
141
|
+
num_predict: e.max_tokens || this.ai.options.max_tokens || 4096
|
|
188
142
|
},
|
|
189
|
-
tools: (
|
|
143
|
+
tools: (e.tools || this.ai.options.tools || []).map((r) => ({
|
|
190
144
|
type: "function",
|
|
191
145
|
function: {
|
|
192
|
-
name:
|
|
193
|
-
description:
|
|
146
|
+
name: r.name,
|
|
147
|
+
description: r.description,
|
|
194
148
|
parameters: {
|
|
195
149
|
type: "object",
|
|
196
|
-
properties:
|
|
197
|
-
required:
|
|
150
|
+
properties: r.args ? b(r.args, (a, u) => ({ ...u, required: void 0 })) : {},
|
|
151
|
+
required: r.args ? Object.entries(r.args).filter((a) => a[1].required).map((a) => a[0]) : []
|
|
198
152
|
}
|
|
199
153
|
}
|
|
200
154
|
}))
|
|
201
155
|
};
|
|
202
|
-
let
|
|
156
|
+
let o;
|
|
203
157
|
do {
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
if (controller.signal.aborted) break;
|
|
209
|
-
if ((_a = chunk.message) == null ? void 0 : _a.content) {
|
|
210
|
-
resp.message.content += chunk.message.content;
|
|
211
|
-
options.stream({ text: chunk.message.content });
|
|
212
|
-
}
|
|
213
|
-
if ((_b = chunk.message) == null ? void 0 : _b.tool_calls) resp.message.tool_calls = chunk.message.tool_calls;
|
|
214
|
-
if (chunk.done) break;
|
|
215
|
-
}
|
|
158
|
+
if (o = await this.client.chat(l), e.stream) {
|
|
159
|
+
o.message = { role: "assistant", content: "", tool_calls: [] };
|
|
160
|
+
for await (const r of o)
|
|
161
|
+
if (n.signal.aborted || (r.message?.content && (o.message.content += r.message.content, e.stream({ text: r.message.content })), r.message?.tool_calls && (o.message.tool_calls = r.message.tool_calls), r.done)) break;
|
|
216
162
|
}
|
|
217
|
-
if (
|
|
218
|
-
|
|
219
|
-
const
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
const args = typeof toolCall.function.arguments === "string" ? JSONAttemptParse(toolCall.function.arguments, {}) : toolCall.function.arguments;
|
|
163
|
+
if (o.message?.tool_calls?.length && !n.signal.aborted) {
|
|
164
|
+
m.push(o.message);
|
|
165
|
+
const r = await Promise.all(o.message.tool_calls.map(async (a) => {
|
|
166
|
+
const u = (e.tools || this.ai.options.tools)?.find(k("name", a.function.name));
|
|
167
|
+
if (!u) return { role: "tool", tool_name: a.function.name, content: '{"error": "Tool not found"}' };
|
|
168
|
+
const p = typeof a.function.arguments == "string" ? w(a.function.arguments, {}) : a.function.arguments;
|
|
224
169
|
try {
|
|
225
|
-
const
|
|
226
|
-
return { role: "tool", tool_name:
|
|
227
|
-
} catch (
|
|
228
|
-
return { role: "tool", tool_name:
|
|
170
|
+
const h = await u.fn(p, this.ai);
|
|
171
|
+
return { role: "tool", tool_name: a.function.name, args: p, content: _(h) };
|
|
172
|
+
} catch (h) {
|
|
173
|
+
return { role: "tool", tool_name: a.function.name, args: p, content: _({ error: h?.message || h?.toString() || "Unknown" }) };
|
|
229
174
|
}
|
|
230
175
|
}));
|
|
231
|
-
|
|
232
|
-
requestParams.messages = history;
|
|
176
|
+
m.push(...r), l.messages = m;
|
|
233
177
|
}
|
|
234
|
-
} while (!
|
|
235
|
-
|
|
236
|
-
res(this.toStandard([...history, { role: "assistant", content: (_g = resp.message) == null ? void 0 : _g.content }]));
|
|
178
|
+
} while (!n.signal.aborted && o.message?.tool_calls?.length);
|
|
179
|
+
e.stream && e.stream({ done: !0 }), i(this.toStandard([...m, { role: "assistant", content: o.message?.content }]));
|
|
237
180
|
});
|
|
238
|
-
return Object.assign(
|
|
181
|
+
return Object.assign(s, { abort: () => n.abort() });
|
|
239
182
|
}
|
|
240
183
|
}
|
|
241
|
-
class
|
|
242
|
-
constructor(
|
|
243
|
-
super();
|
|
244
|
-
__publicField(this, "client");
|
|
245
|
-
this.ai = ai;
|
|
246
|
-
this.apiToken = apiToken;
|
|
247
|
-
this.model = model;
|
|
248
|
-
this.client = new OpenAI({ apiKey: apiToken });
|
|
184
|
+
class R extends x {
|
|
185
|
+
constructor(t, e, n) {
|
|
186
|
+
super(), this.ai = t, this.apiToken = e, this.model = n, this.client = new M({ apiKey: e });
|
|
249
187
|
}
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
188
|
+
client;
|
|
189
|
+
toStandard(t) {
|
|
190
|
+
for (let e = 0; e < t.length; e++) {
|
|
191
|
+
const n = t[e];
|
|
192
|
+
if (n.role === "assistant" && n.tool_calls) {
|
|
193
|
+
const s = n.tool_calls.map((i) => ({
|
|
255
194
|
role: "tool",
|
|
256
|
-
id:
|
|
257
|
-
name:
|
|
258
|
-
args:
|
|
195
|
+
id: i.id,
|
|
196
|
+
name: i.function.name,
|
|
197
|
+
args: w(i.function.arguments, {})
|
|
259
198
|
}));
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
if (record) {
|
|
265
|
-
if (h.content.includes('"error":')) record.error = h.content;
|
|
266
|
-
else record.content = h.content;
|
|
267
|
-
}
|
|
268
|
-
history.splice(i, 1);
|
|
269
|
-
i--;
|
|
199
|
+
t.splice(e, 1, ...s), e += s.length - 1;
|
|
200
|
+
} else if (n.role === "tool" && n.content) {
|
|
201
|
+
const s = t.find((i) => n.tool_call_id == i.id);
|
|
202
|
+
s && (n.content.includes('"error":') ? s.error = n.content : s.content = n.content), t.splice(e, 1), e--;
|
|
270
203
|
}
|
|
271
204
|
}
|
|
272
|
-
return
|
|
205
|
+
return t;
|
|
273
206
|
}
|
|
274
|
-
fromStandard(
|
|
275
|
-
return
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
content: h.error || h.content
|
|
287
|
-
});
|
|
288
|
-
} else {
|
|
289
|
-
result.push(h);
|
|
290
|
-
}
|
|
291
|
-
return result;
|
|
292
|
-
}, []);
|
|
207
|
+
fromStandard(t) {
|
|
208
|
+
return t.reduce((e, n) => (n.role === "tool" ? e.push({
|
|
209
|
+
role: "assistant",
|
|
210
|
+
content: null,
|
|
211
|
+
tool_calls: [{ id: n.id, type: "function", function: { name: n.name, arguments: JSON.stringify(n.args) } }],
|
|
212
|
+
refusal: null,
|
|
213
|
+
annotations: []
|
|
214
|
+
}, {
|
|
215
|
+
role: "tool",
|
|
216
|
+
tool_call_id: n.id,
|
|
217
|
+
content: n.error || n.content
|
|
218
|
+
}) : e.push(n), e), []);
|
|
293
219
|
}
|
|
294
|
-
ask(
|
|
295
|
-
const
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
temperature: options.temperature || this.ai.options.temperature || 0.7,
|
|
306
|
-
tools: (options.tools || this.ai.options.tools || []).map((t) => ({
|
|
220
|
+
ask(t, e = {}) {
|
|
221
|
+
const n = new AbortController(), s = new Promise(async (i, f) => {
|
|
222
|
+
let c = this.fromStandard([...e.history || [], { role: "user", content: t }]);
|
|
223
|
+
e.compress && (c = await this.ai.llm.compress(c, e.compress.max, e.compress.min, e));
|
|
224
|
+
const m = {
|
|
225
|
+
model: e.model || this.model,
|
|
226
|
+
messages: c,
|
|
227
|
+
stream: !!e.stream,
|
|
228
|
+
max_tokens: e.max_tokens || this.ai.options.max_tokens || 4096,
|
|
229
|
+
temperature: e.temperature || this.ai.options.temperature || 0.7,
|
|
230
|
+
tools: (e.tools || this.ai.options.tools || []).map((o) => ({
|
|
307
231
|
type: "function",
|
|
308
232
|
function: {
|
|
309
|
-
name:
|
|
310
|
-
description:
|
|
233
|
+
name: o.name,
|
|
234
|
+
description: o.description,
|
|
311
235
|
parameters: {
|
|
312
236
|
type: "object",
|
|
313
|
-
properties:
|
|
314
|
-
required:
|
|
237
|
+
properties: o.args ? b(o.args, (r, a) => ({ ...a, required: void 0 })) : {},
|
|
238
|
+
required: o.args ? Object.entries(o.args).filter((r) => r[1].required).map((r) => r[0]) : []
|
|
315
239
|
}
|
|
316
240
|
}
|
|
317
241
|
}))
|
|
318
242
|
};
|
|
319
|
-
let
|
|
243
|
+
let l;
|
|
320
244
|
do {
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
if (chunk.choices[0].delta.content) {
|
|
327
|
-
options.stream({ text: chunk.choices[0].delta.content });
|
|
328
|
-
}
|
|
245
|
+
if (l = await this.client.chat.completions.create(m), e.stream) {
|
|
246
|
+
l.choices = [];
|
|
247
|
+
for await (const r of l) {
|
|
248
|
+
if (n.signal.aborted) break;
|
|
249
|
+
r.choices[0].delta.content && e.stream({ text: r.choices[0].delta.content });
|
|
329
250
|
}
|
|
330
251
|
}
|
|
331
|
-
const
|
|
332
|
-
if (
|
|
333
|
-
|
|
334
|
-
const
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
if (!tool) return { role: "tool", tool_call_id: toolCall.id, content: '{"error": "Tool not found"}' };
|
|
252
|
+
const o = l.choices[0].message.tool_calls || [];
|
|
253
|
+
if (o.length && !n.signal.aborted) {
|
|
254
|
+
c.push(l.choices[0].message);
|
|
255
|
+
const r = await Promise.all(o.map(async (a) => {
|
|
256
|
+
const u = e.tools?.find(k("name", a.function.name));
|
|
257
|
+
if (!u) return { role: "tool", tool_call_id: a.id, content: '{"error": "Tool not found"}' };
|
|
338
258
|
try {
|
|
339
|
-
const
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
return { role: "tool", tool_call_id: toolCall.id, content: JSONSanitize({ error: (err == null ? void 0 : err.message) || (err == null ? void 0 : err.toString()) || "Unknown" }) };
|
|
259
|
+
const p = w(a.function.arguments, {}), h = await u.fn(p, this.ai);
|
|
260
|
+
return { role: "tool", tool_call_id: a.id, content: _(h) };
|
|
261
|
+
} catch (p) {
|
|
262
|
+
return { role: "tool", tool_call_id: a.id, content: _({ error: p?.message || p?.toString() || "Unknown" }) };
|
|
344
263
|
}
|
|
345
264
|
}));
|
|
346
|
-
|
|
347
|
-
requestParams.messages = history;
|
|
265
|
+
c.push(...r), m.messages = c;
|
|
348
266
|
}
|
|
349
|
-
} while (!
|
|
350
|
-
|
|
351
|
-
res(this.toStandard([...history, { role: "assistant", content: resp.choices[0].message.content || "" }]));
|
|
267
|
+
} while (!n.signal.aborted && l.choices?.[0]?.message?.tool_calls?.length);
|
|
268
|
+
e.stream && e.stream({ done: !0 }), i(this.toStandard([...c, { role: "assistant", content: l.choices[0].message.content || "" }]));
|
|
352
269
|
});
|
|
353
|
-
return Object.assign(
|
|
270
|
+
return Object.assign(s, { abort: () => n.abort() });
|
|
354
271
|
}
|
|
355
272
|
}
|
|
356
|
-
class
|
|
357
|
-
constructor(
|
|
358
|
-
|
|
359
|
-
var _a, _b, _c;
|
|
360
|
-
this.ai = ai;
|
|
361
|
-
this.options = options;
|
|
362
|
-
if ((_a = options.anthropic) == null ? void 0 : _a.token) this.providers.anthropic = new Anthropic(this.ai, options.anthropic.token, options.anthropic.model);
|
|
363
|
-
if ((_b = options.ollama) == null ? void 0 : _b.host) this.providers.ollama = new Ollama(this.ai, options.ollama.host, options.ollama.model);
|
|
364
|
-
if ((_c = options.openAi) == null ? void 0 : _c.token) this.providers.openAi = new OpenAi(this.ai, options.openAi.token, options.openAi.model);
|
|
273
|
+
class J {
|
|
274
|
+
constructor(t, e) {
|
|
275
|
+
this.ai = t, this.options = e, e.anthropic?.token && (this.providers.anthropic = new U(this.ai, e.anthropic.token, e.anthropic.model)), e.ollama?.host && (this.providers.ollama = new L(this.ai, e.ollama.host, e.ollama.model)), e.openAi?.token && (this.providers.openAi = new R(this.ai, e.openAi.token, e.openAi.model));
|
|
365
276
|
}
|
|
277
|
+
providers = {};
|
|
366
278
|
/**
|
|
367
279
|
* Chat with LLM
|
|
368
280
|
* @param {string} message Question
|
|
369
281
|
* @param {LLMRequest} options Configuration options and chat history
|
|
370
282
|
* @returns {{abort: () => void, response: Promise<LLMMessage[]>}} Function to abort response and chat history
|
|
371
283
|
*/
|
|
372
|
-
ask(
|
|
373
|
-
|
|
374
|
-
|
|
375
|
-
|
|
376
|
-
if (typeof options.model == "object") model = options.model;
|
|
377
|
-
else model = [options.model, (_a = this.options[options.model]) == null ? void 0 : _a.model];
|
|
378
|
-
}
|
|
379
|
-
if (!options.model || model[1] == null) {
|
|
380
|
-
if (typeof this.options.model == "object") model = this.options.model;
|
|
381
|
-
else model = [this.options.model, (_b = this.options[this.options.model]) == null ? void 0 : _b.model];
|
|
382
|
-
}
|
|
383
|
-
if (!model[0] || !model[1]) throw new Error(`Unknown LLM provider or model: ${model[0]} / ${model[1]}`);
|
|
384
|
-
return this.providers[model[0]].ask(message, { ...options, model: model[1] });
|
|
284
|
+
ask(t, e = {}) {
|
|
285
|
+
let n = [null, null];
|
|
286
|
+
if (e.model && (typeof e.model == "object" ? n = e.model : n = [e.model, this.options[e.model]?.model]), (!e.model || n[1] == null) && (typeof this.options.model == "object" ? n = this.options.model : n = [this.options.model, this.options[this.options.model]?.model]), !n[0] || !n[1]) throw new Error(`Unknown LLM provider or model: ${n[0]} / ${n[1]}`);
|
|
287
|
+
return this.providers[n[0]].ask(t, { ...e, model: n[1] });
|
|
385
288
|
}
|
|
386
289
|
/**
|
|
387
290
|
* Compress chat history to reduce context size
|
|
@@ -391,27 +294,26 @@ class LLM {
|
|
|
391
294
|
* @param {LLMRequest} options LLM options
|
|
392
295
|
* @returns {Promise<LLMMessage[]>} New chat history will summary at index 0
|
|
393
296
|
*/
|
|
394
|
-
async compress(
|
|
395
|
-
if (this.estimateTokens(
|
|
396
|
-
let
|
|
397
|
-
for (let
|
|
398
|
-
|
|
399
|
-
if (tokens < min) keep++;
|
|
297
|
+
async compress(t, e, n, s) {
|
|
298
|
+
if (this.estimateTokens(t) < e) return t;
|
|
299
|
+
let i = 0, f = 0;
|
|
300
|
+
for (let o of t.toReversed())
|
|
301
|
+
if (f += this.estimateTokens(o.content), f < n) i++;
|
|
400
302
|
else break;
|
|
401
|
-
|
|
402
|
-
|
|
403
|
-
|
|
404
|
-
|
|
405
|
-
|
|
303
|
+
if (t.length <= i) return t;
|
|
304
|
+
const c = i == 0 ? [] : t.slice(-i), m = (i == 0 ? t : t.slice(0, -i)).filter((o) => o.role === "assistant" || o.role === "user");
|
|
305
|
+
return [{ role: "assistant", content: `Conversation Summary: ${await this.summarize(m.map((o) => `${o.role}: ${o.content}`).join(`
|
|
306
|
+
|
|
307
|
+
`), 250, s)}` }, ...c];
|
|
406
308
|
}
|
|
407
309
|
/**
|
|
408
310
|
* Estimate variable as tokens
|
|
409
311
|
* @param history Object to size
|
|
410
312
|
* @returns {number} Rough token count
|
|
411
313
|
*/
|
|
412
|
-
estimateTokens(
|
|
413
|
-
const
|
|
414
|
-
return Math.ceil(
|
|
314
|
+
estimateTokens(t) {
|
|
315
|
+
const e = JSON.stringify(t);
|
|
316
|
+
return Math.ceil(e.length / 4 * 1.2);
|
|
415
317
|
}
|
|
416
318
|
/**
|
|
417
319
|
* Ask a question with JSON response
|
|
@@ -419,14 +321,12 @@ class LLM {
|
|
|
419
321
|
* @param {LLMRequest} options Configuration options and chat history
|
|
420
322
|
* @returns {Promise<{} | {} | RegExpExecArray | null>}
|
|
421
323
|
*/
|
|
422
|
-
async json(
|
|
423
|
-
|
|
424
|
-
let resp = await this.ask(message, {
|
|
324
|
+
async json(t, e) {
|
|
325
|
+
let n = await this.ask(t, {
|
|
425
326
|
system: "Respond using a JSON blob",
|
|
426
|
-
...
|
|
327
|
+
...e
|
|
427
328
|
});
|
|
428
|
-
|
|
429
|
-
return JSONAttemptParse(new RegExp("{[sS]*}").exec(resp[0].content), {});
|
|
329
|
+
return n?.[0]?.content ? w(new RegExp("{[sS]*}").exec(n[0].content), {}) : {};
|
|
430
330
|
}
|
|
431
331
|
/**
|
|
432
332
|
* Create a summary of some text
|
|
@@ -435,39 +335,29 @@ class LLM {
|
|
|
435
335
|
* @param options LLM request options
|
|
436
336
|
* @returns {Promise<string>} Summary
|
|
437
337
|
*/
|
|
438
|
-
summarize(
|
|
439
|
-
return this.ask(
|
|
440
|
-
var _a;
|
|
441
|
-
return ((_a = history.pop()) == null ? void 0 : _a.content) || null;
|
|
442
|
-
});
|
|
338
|
+
summarize(t, e, n) {
|
|
339
|
+
return this.ask(t, { system: `Generate a brief summary <= ${e} tokens. Output nothing else`, temperature: 0.3, ...n }).then((s) => s.pop()?.content || null);
|
|
443
340
|
}
|
|
444
341
|
}
|
|
445
|
-
class
|
|
446
|
-
constructor(
|
|
447
|
-
|
|
448
|
-
__publicField(this, "whisperModel");
|
|
449
|
-
/** Large Language Models */
|
|
450
|
-
__publicField(this, "llm");
|
|
451
|
-
var _a;
|
|
452
|
-
this.options = options;
|
|
453
|
-
this.llm = new LLM(this, options);
|
|
454
|
-
if ((_a = this.options.whisper) == null ? void 0 : _a.binary) this.downloadAsrModel(this.options.whisper.model);
|
|
342
|
+
class V {
|
|
343
|
+
constructor(t) {
|
|
344
|
+
this.options = t, this.llm = new J(this, t), this.options.whisper?.binary && (this.whisperModel = S.join(this.options.whisper?.path, this.options.whisper?.model + this.options.whisper?.model.endsWith(".bin") ? "" : ".bin"), this.downloadAsrModel());
|
|
455
345
|
}
|
|
346
|
+
downloads = {};
|
|
347
|
+
whisperModel;
|
|
348
|
+
/** Large Language Models */
|
|
349
|
+
llm;
|
|
456
350
|
/**
|
|
457
351
|
* Convert audio to text using Auditory Speech Recognition
|
|
458
352
|
* @param {string} path Path to audio
|
|
459
353
|
* @param model Whisper model
|
|
460
354
|
* @returns {Promise<any>} Extracted text
|
|
461
355
|
*/
|
|
462
|
-
async asr(
|
|
463
|
-
|
|
464
|
-
|
|
465
|
-
|
|
466
|
-
await this.
|
|
467
|
-
const name = Math.random().toString(36).substring(2, 10) + "-" + path.split("/").pop();
|
|
468
|
-
const output = Path.join(this.options.whisper.path || "/tmp", name);
|
|
469
|
-
await $`rm -f /tmp/${name}.txt && ${this.options.whisper.binary} -nt -np -m ${this.whisperModel} -f ${path} -otxt -of ${output}`;
|
|
470
|
-
return fs.readFile(output, "utf-8").then((text) => (text == null ? void 0 : text.trim()) || null).finally(() => fs.rm(output, { force: true }).catch(() => {
|
|
356
|
+
async asr(t, e) {
|
|
357
|
+
if (!this.options.whisper?.binary) throw new Error("Whisper not configured");
|
|
358
|
+
e || (e = this.options.whisper.model), await this.downloadAsrModel();
|
|
359
|
+
const n = Math.random().toString(36).substring(2, 10) + "-" + t.split("/").pop() + ".txt", s = S.join(this.options.whisper.temp || "/tmp", n);
|
|
360
|
+
return await j`rm -f ${s} && ${this.options.whisper.binary} -nt -np -m ${this.whisperModel} -f ${t} -otxt -of ${s}`, y.readFile(s, "utf-8").then((i) => i?.trim() || null).finally(() => y.rm(s, { force: !0 }).catch(() => {
|
|
471
361
|
}));
|
|
472
362
|
}
|
|
473
363
|
/**
|
|
@@ -476,34 +366,29 @@ class Ai {
|
|
|
476
366
|
* @param {string} model Whisper model that will be downloaded
|
|
477
367
|
* @return {Promise<void>} A promise that resolves once the model is downloaded and saved locally.
|
|
478
368
|
*/
|
|
479
|
-
async downloadAsrModel(
|
|
480
|
-
|
|
481
|
-
if (
|
|
482
|
-
|
|
483
|
-
|
|
484
|
-
|
|
485
|
-
this.downloads[
|
|
486
|
-
await fs.writeFile(this.whisperModel, buffer);
|
|
487
|
-
delete this.downloads[model];
|
|
488
|
-
});
|
|
489
|
-
return this.downloads[model];
|
|
369
|
+
async downloadAsrModel() {
|
|
370
|
+
if (!this.options.whisper?.binary) throw new Error("Whisper not configured");
|
|
371
|
+
if (await y.stat(this.whisperModel).then(() => !0).catch(() => !1)) return;
|
|
372
|
+
const t = this.whisperModel.split("/").at(-1);
|
|
373
|
+
return this.downloads[t] ? this.downloads[t] : (this.downloads[t] = fetch(`https://huggingface.co/ggerganov/whisper.cpp/resolve/main/${t}`).then((e) => e.arrayBuffer()).then((e) => Buffer.from(e)).then(async (e) => {
|
|
374
|
+
await y.writeFile(this.whisperModel, e), delete this.downloads[t];
|
|
375
|
+
}), this.downloads[t]);
|
|
490
376
|
}
|
|
491
377
|
/**
|
|
492
378
|
* Convert image to text using Optical Character Recognition
|
|
493
379
|
* @param {string} path Path to image
|
|
494
380
|
* @returns {{abort: Function, response: Promise<string | null>}} Abort function & Promise of extracted text
|
|
495
381
|
*/
|
|
496
|
-
ocr(
|
|
497
|
-
let
|
|
382
|
+
ocr(t) {
|
|
383
|
+
let e;
|
|
498
384
|
return {
|
|
499
385
|
abort: () => {
|
|
500
|
-
|
|
386
|
+
e?.terminate();
|
|
501
387
|
},
|
|
502
|
-
response: new Promise(async (
|
|
503
|
-
|
|
504
|
-
const { data } = await
|
|
505
|
-
await
|
|
506
|
-
res(data.text.trim() || null);
|
|
388
|
+
response: new Promise(async (n) => {
|
|
389
|
+
e = await q("eng");
|
|
390
|
+
const { data: s } = await e.recognize(t);
|
|
391
|
+
await e.terminate(), n(s.text.trim() || null);
|
|
507
392
|
})
|
|
508
393
|
};
|
|
509
394
|
}
|
|
@@ -513,124 +398,104 @@ class Ai {
|
|
|
513
398
|
* @param {string} searchTerms Multiple search terms to check against target
|
|
514
399
|
* @returns {{avg: number, max: number, similarities: number[]}} Similarity values 0-1: 0 = unique, 1 = identical
|
|
515
400
|
*/
|
|
516
|
-
semanticSimilarity(
|
|
517
|
-
if (
|
|
518
|
-
const
|
|
519
|
-
|
|
520
|
-
|
|
521
|
-
|
|
522
|
-
|
|
523
|
-
|
|
524
|
-
const dotProduct = tf.dot(tensor1, tensor2);
|
|
525
|
-
const magnitude1 = tf.norm(tensor1);
|
|
526
|
-
const magnitude2 = tf.norm(tensor2);
|
|
527
|
-
if (magnitude1.dataSync()[0] === 0 || magnitude2.dataSync()[0] === 0) return 0;
|
|
528
|
-
return dotProduct.dataSync()[0] / (magnitude1.dataSync()[0] * magnitude2.dataSync()[0]);
|
|
529
|
-
};
|
|
530
|
-
const v = vector(target);
|
|
531
|
-
const similarities = searchTerms.map((t) => vector(t)).map((refVector) => cosineSimilarity(v, refVector));
|
|
532
|
-
return { avg: similarities.reduce((acc, s) => acc + s, 0) / similarities.length, max: Math.max(...similarities), similarities };
|
|
401
|
+
semanticSimilarity(t, ...e) {
|
|
402
|
+
if (e.length < 2) throw new Error("Requires at least 2 strings to compare");
|
|
403
|
+
const n = (c, m = 10) => c.toLowerCase().split("").map((l, o) => l.charCodeAt(0) * (o + 1) % m / m).slice(0, m), s = (c, m) => {
|
|
404
|
+
if (c.length !== m.length) throw new Error("Vectors must be same length");
|
|
405
|
+
const l = g.tensor1d(c), o = g.tensor1d(m), r = g.dot(l, o), a = g.norm(l), u = g.norm(o);
|
|
406
|
+
return a.dataSync()[0] === 0 || u.dataSync()[0] === 0 ? 0 : r.dataSync()[0] / (a.dataSync()[0] * u.dataSync()[0]);
|
|
407
|
+
}, i = n(t), f = e.map((c) => n(c)).map((c) => s(i, c));
|
|
408
|
+
return { avg: f.reduce((c, m) => c + m, 0) / f.length, max: Math.max(...f), similarities: f };
|
|
533
409
|
}
|
|
534
410
|
}
|
|
535
|
-
const
|
|
411
|
+
const I = {
|
|
536
412
|
name: "cli",
|
|
537
413
|
description: "Use the command line interface, returns any output",
|
|
538
|
-
args: { command: { type: "string", description: "Command to run", required:
|
|
539
|
-
fn: (
|
|
540
|
-
}
|
|
541
|
-
const DateTimeTool = {
|
|
414
|
+
args: { command: { type: "string", description: "Command to run", required: !0 } },
|
|
415
|
+
fn: (d) => j`${d.command}`
|
|
416
|
+
}, Q = {
|
|
542
417
|
name: "get_datetime",
|
|
543
418
|
description: "Get current date and time",
|
|
544
419
|
args: {},
|
|
545
420
|
fn: async () => (/* @__PURE__ */ new Date()).toISOString()
|
|
546
|
-
}
|
|
547
|
-
const ExecTool = {
|
|
421
|
+
}, X = {
|
|
548
422
|
name: "exec",
|
|
549
423
|
description: "Run code/scripts",
|
|
550
424
|
args: {
|
|
551
|
-
language: { type: "string", description: "Execution language", enum: ["cli", "node", "python"], required:
|
|
552
|
-
code: { type: "string", description: "Code to execute", required:
|
|
425
|
+
language: { type: "string", description: "Execution language", enum: ["cli", "node", "python"], required: !0 },
|
|
426
|
+
code: { type: "string", description: "Code to execute", required: !0 }
|
|
553
427
|
},
|
|
554
|
-
fn: async (
|
|
428
|
+
fn: async (d, t) => {
|
|
555
429
|
try {
|
|
556
|
-
switch (
|
|
430
|
+
switch (d.type) {
|
|
557
431
|
case "bash":
|
|
558
|
-
return await
|
|
432
|
+
return await I.fn({ command: d.code }, t);
|
|
559
433
|
case "node":
|
|
560
|
-
return await
|
|
561
|
-
case "python":
|
|
562
|
-
return await
|
|
563
|
-
}
|
|
434
|
+
return await N.fn({ code: d.code }, t);
|
|
435
|
+
case "python":
|
|
436
|
+
return await W.fn({ code: d.code }, t);
|
|
564
437
|
}
|
|
565
|
-
} catch (
|
|
566
|
-
return { error:
|
|
438
|
+
} catch (e) {
|
|
439
|
+
return { error: e?.message || e.toString() };
|
|
567
440
|
}
|
|
568
441
|
}
|
|
569
|
-
}
|
|
570
|
-
const FetchTool = {
|
|
442
|
+
}, Y = {
|
|
571
443
|
name: "fetch",
|
|
572
444
|
description: "Make HTTP request to URL",
|
|
573
445
|
args: {
|
|
574
|
-
url: { type: "string", description: "URL to fetch", required:
|
|
446
|
+
url: { type: "string", description: "URL to fetch", required: !0 },
|
|
575
447
|
method: { type: "string", description: "HTTP method to use", enum: ["GET", "POST", "PUT", "DELETE"], default: "GET" },
|
|
576
448
|
headers: { type: "object", description: "HTTP headers to send", default: {} },
|
|
577
449
|
body: { type: "object", description: "HTTP body to send" }
|
|
578
450
|
},
|
|
579
|
-
fn: (
|
|
580
|
-
}
|
|
581
|
-
const JSTool = {
|
|
451
|
+
fn: (d) => new v({ url: d.url, headers: d.headers }).request({ method: d.method || "GET", body: d.body })
|
|
452
|
+
}, N = {
|
|
582
453
|
name: "exec_javascript",
|
|
583
454
|
description: "Execute commonjs javascript",
|
|
584
455
|
args: {
|
|
585
|
-
code: { type: "string", description: "CommonJS javascript", required:
|
|
456
|
+
code: { type: "string", description: "CommonJS javascript", required: !0 }
|
|
586
457
|
},
|
|
587
|
-
fn: async (
|
|
588
|
-
const
|
|
589
|
-
|
|
590
|
-
return { ...console.output, return: resp, stdout: void 0, stderr: void 0 };
|
|
458
|
+
fn: async (d) => {
|
|
459
|
+
const t = P(null), e = await A({ console: t }, d.code, !0).catch((n) => t.output.error.push(n));
|
|
460
|
+
return { ...t.output, return: e, stdout: void 0, stderr: void 0 };
|
|
591
461
|
}
|
|
592
|
-
}
|
|
593
|
-
const PythonTool = {
|
|
462
|
+
}, W = {
|
|
594
463
|
name: "exec_javascript",
|
|
595
464
|
description: "Execute commonjs javascript",
|
|
596
465
|
args: {
|
|
597
|
-
code: { type: "string", description: "CommonJS javascript", required:
|
|
466
|
+
code: { type: "string", description: "CommonJS javascript", required: !0 }
|
|
598
467
|
},
|
|
599
|
-
fn: async (
|
|
600
|
-
}
|
|
601
|
-
const SearchTool = {
|
|
468
|
+
fn: async (d) => ({ result: T`python -c "${d.code}"` })
|
|
469
|
+
}, Z = {
|
|
602
470
|
name: "search",
|
|
603
471
|
description: "Use a search engine to find relevant URLs, should be changed with fetch to scrape sources",
|
|
604
472
|
args: {
|
|
605
|
-
query: { type: "string", description: "Search string", required:
|
|
473
|
+
query: { type: "string", description: "Search string", required: !0 },
|
|
606
474
|
length: { type: "string", description: "Number of results to return", default: 5 }
|
|
607
475
|
},
|
|
608
|
-
fn: async (
|
|
609
|
-
|
|
610
|
-
const html = await fetch(`https://html.duckduckgo.com/html/?q=${encodeURIComponent(args.query)}`, {
|
|
476
|
+
fn: async (d) => {
|
|
477
|
+
const t = await fetch(`https://html.duckduckgo.com/html/?q=${encodeURIComponent(d.query)}`, {
|
|
611
478
|
headers: { "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64)", "Accept-Language": "en-US,en;q=0.9" }
|
|
612
|
-
}).then((
|
|
613
|
-
let
|
|
614
|
-
const
|
|
615
|
-
|
|
616
|
-
let
|
|
617
|
-
if (
|
|
618
|
-
if (url) results.add(url);
|
|
619
|
-
if (results.size >= (args.length || 5)) break;
|
|
479
|
+
}).then((i) => i.text());
|
|
480
|
+
let e, n = /<a .*?href="(.+?)".+?<\/a>/g;
|
|
481
|
+
const s = new O();
|
|
482
|
+
for (; (e = n.exec(t)) !== null; ) {
|
|
483
|
+
let i = /uddg=(.+)&?/.exec(decodeURIComponent(e[1]))?.[1];
|
|
484
|
+
if (i && (i = decodeURIComponent(i)), i && s.add(i), s.size >= (d.length || 5)) break;
|
|
620
485
|
}
|
|
621
|
-
return
|
|
486
|
+
return s;
|
|
622
487
|
}
|
|
623
488
|
};
|
|
624
489
|
export {
|
|
625
|
-
Ai,
|
|
626
|
-
Anthropic,
|
|
627
|
-
CliTool,
|
|
628
|
-
DateTimeTool,
|
|
629
|
-
ExecTool,
|
|
630
|
-
FetchTool,
|
|
631
|
-
JSTool,
|
|
632
|
-
LLM,
|
|
633
|
-
PythonTool,
|
|
634
|
-
SearchTool
|
|
490
|
+
V as Ai,
|
|
491
|
+
U as Anthropic,
|
|
492
|
+
I as CliTool,
|
|
493
|
+
Q as DateTimeTool,
|
|
494
|
+
X as ExecTool,
|
|
495
|
+
Y as FetchTool,
|
|
496
|
+
N as JSTool,
|
|
497
|
+
J as LLM,
|
|
498
|
+
W as PythonTool,
|
|
499
|
+
Z as SearchTool
|
|
635
500
|
};
|
|
636
501
|
//# sourceMappingURL=index.mjs.map
|