@ztimson/ai-utils 0.1.4 → 0.1.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +6 -640
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +312 -442
- package/dist/index.mjs.map +1 -1
- package/dist/llm.d.ts +7 -1
- package/package.json +3 -3
package/dist/index.mjs
CHANGED
|
@@ -1,387 +1,290 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
import {
|
|
5
|
-
import {
|
|
6
|
-
import {
|
|
7
|
-
import
|
|
8
|
-
import
|
|
9
|
-
import
|
|
10
|
-
|
|
11
|
-
import Path from "node:path";
|
|
12
|
-
import * as tf from "@tensorflow/tfjs";
|
|
13
|
-
class LLMProvider {
|
|
1
|
+
import { $ as j, $Sync as T } from "@ztimson/node-utils";
|
|
2
|
+
import { createWorker as q } from "tesseract.js";
|
|
3
|
+
import { objectMap as b, JSONAttemptParse as w, findByProp as k, JSONSanitize as _, Http as v, consoleInterceptor as P, fn as A, ASet as O } from "@ztimson/utils";
|
|
4
|
+
import { Anthropic as $ } from "@anthropic-ai/sdk";
|
|
5
|
+
import { Ollama as E } from "ollama";
|
|
6
|
+
import { OpenAI as M } from "openai";
|
|
7
|
+
import y from "node:fs/promises";
|
|
8
|
+
import S from "node:path";
|
|
9
|
+
import * as g from "@tensorflow/tfjs";
|
|
10
|
+
class x {
|
|
14
11
|
}
|
|
15
|
-
class
|
|
16
|
-
constructor(
|
|
17
|
-
super();
|
|
18
|
-
__publicField(this, "client");
|
|
19
|
-
this.ai = ai;
|
|
20
|
-
this.apiToken = apiToken;
|
|
21
|
-
this.model = model;
|
|
22
|
-
this.client = new Anthropic$1({ apiKey: apiToken });
|
|
12
|
+
class U extends x {
|
|
13
|
+
constructor(t, e, n) {
|
|
14
|
+
super(), this.ai = t, this.apiToken = e, this.model = n, this.client = new $({ apiKey: e });
|
|
23
15
|
}
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
h[c.is_error ? "error" : "content"] = c.content;
|
|
37
|
-
});
|
|
38
|
-
}
|
|
39
|
-
history[orgI].content = history[orgI].content.filter((c) => c.type == "text").map((c) => c.text).join("\n\n");
|
|
40
|
-
}
|
|
16
|
+
client;
|
|
17
|
+
toStandard(t) {
|
|
18
|
+
for (let e = 0; e < t.length; e++) {
|
|
19
|
+
const n = e;
|
|
20
|
+
typeof t[n].content != "string" && (t[n].role == "assistant" ? t[n].content.filter((s) => s.type == "tool_use").forEach((s) => {
|
|
21
|
+
e++, t.splice(e, 0, { role: "tool", id: s.id, name: s.name, args: s.input });
|
|
22
|
+
}) : t[n].role == "user" && t[n].content.filter((s) => s.type == "tool_result").forEach((s) => {
|
|
23
|
+
const i = t.find((f) => f.id == s.tool_use_id);
|
|
24
|
+
i[s.is_error ? "error" : "content"] = s.content;
|
|
25
|
+
}), t[n].content = t[n].content.filter((s) => s.type == "text").map((s) => s.text).join(`
|
|
26
|
+
|
|
27
|
+
`));
|
|
41
28
|
}
|
|
42
|
-
return
|
|
29
|
+
return t.filter((e) => !!e.content);
|
|
43
30
|
}
|
|
44
|
-
fromStandard(
|
|
45
|
-
for (let
|
|
46
|
-
if (
|
|
47
|
-
const
|
|
48
|
-
|
|
49
|
-
|
|
31
|
+
fromStandard(t) {
|
|
32
|
+
for (let e = 0; e < t.length; e++)
|
|
33
|
+
if (t[e].role == "tool") {
|
|
34
|
+
const n = t[e];
|
|
35
|
+
t.splice(
|
|
36
|
+
e,
|
|
50
37
|
1,
|
|
51
|
-
{ role: "assistant", content: [{ type: "tool_use", id:
|
|
52
|
-
{ role: "user", content: [{ type: "tool_result", tool_use_id:
|
|
53
|
-
)
|
|
54
|
-
i++;
|
|
38
|
+
{ role: "assistant", content: [{ type: "tool_use", id: n.id, name: n.name, input: n.args }] },
|
|
39
|
+
{ role: "user", content: [{ type: "tool_result", tool_use_id: n.id, is_error: !!n.error, content: n.error || n.content }] }
|
|
40
|
+
), e++;
|
|
55
41
|
}
|
|
56
|
-
|
|
57
|
-
return history;
|
|
42
|
+
return t;
|
|
58
43
|
}
|
|
59
|
-
ask(
|
|
60
|
-
const
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
description: t.description,
|
|
44
|
+
ask(t, e = {}) {
|
|
45
|
+
const n = new AbortController(), s = new Promise(async (i, f) => {
|
|
46
|
+
let c = this.fromStandard([...e.history || [], { role: "user", content: t }]);
|
|
47
|
+
e.compress && (c = await this.ai.llm.compress(c, e.compress.max, e.compress.min, e));
|
|
48
|
+
const m = {
|
|
49
|
+
model: e.model || this.model,
|
|
50
|
+
max_tokens: e.max_tokens || this.ai.options.max_tokens || 4096,
|
|
51
|
+
system: e.system || this.ai.options.system || "",
|
|
52
|
+
temperature: e.temperature || this.ai.options.temperature || 0.7,
|
|
53
|
+
tools: (e.tools || this.ai.options.tools || []).map((o) => ({
|
|
54
|
+
name: o.name,
|
|
55
|
+
description: o.description,
|
|
72
56
|
input_schema: {
|
|
73
57
|
type: "object",
|
|
74
|
-
properties:
|
|
75
|
-
required:
|
|
58
|
+
properties: o.args ? b(o.args, (r, a) => ({ ...a, required: void 0 })) : {},
|
|
59
|
+
required: o.args ? Object.entries(o.args).filter((r) => r[1].required).map((r) => r[0]) : []
|
|
76
60
|
},
|
|
77
61
|
fn: void 0
|
|
78
62
|
})),
|
|
79
|
-
messages:
|
|
80
|
-
stream: !!
|
|
63
|
+
messages: c,
|
|
64
|
+
stream: !!e.stream
|
|
81
65
|
};
|
|
82
|
-
let
|
|
66
|
+
let l;
|
|
83
67
|
do {
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
if (
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
}
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
options.stream({ text });
|
|
100
|
-
} else if (chunk.delta.type === "input_json_delta") {
|
|
101
|
-
resp.content.at(-1).input += chunk.delta.partial_json;
|
|
102
|
-
}
|
|
103
|
-
} else if (chunk.type === "content_block_stop") {
|
|
104
|
-
const last = resp.content.at(-1);
|
|
105
|
-
if (last.input != null) last.input = last.input ? JSONAttemptParse(last.input, {}) : {};
|
|
106
|
-
} else if (chunk.type === "message_stop") {
|
|
68
|
+
if (l = await this.client.messages.create(m), e.stream) {
|
|
69
|
+
l.content = [];
|
|
70
|
+
for await (const r of l) {
|
|
71
|
+
if (n.signal.aborted) break;
|
|
72
|
+
if (r.type === "content_block_start")
|
|
73
|
+
r.content_block.type === "text" ? l.content.push({ type: "text", text: "" }) : r.content_block.type === "tool_use" && l.content.push({ type: "tool_use", id: r.content_block.id, name: r.content_block.name, input: "" });
|
|
74
|
+
else if (r.type === "content_block_delta")
|
|
75
|
+
if (r.delta.type === "text_delta") {
|
|
76
|
+
const a = r.delta.text;
|
|
77
|
+
l.content.at(-1).text += a, e.stream({ text: a });
|
|
78
|
+
} else r.delta.type === "input_json_delta" && (l.content.at(-1).input += r.delta.partial_json);
|
|
79
|
+
else if (r.type === "content_block_stop") {
|
|
80
|
+
const a = l.content.at(-1);
|
|
81
|
+
a.input != null && (a.input = a.input ? w(a.input, {}) : {});
|
|
82
|
+
} else if (r.type === "message_stop")
|
|
107
83
|
break;
|
|
108
|
-
}
|
|
109
84
|
}
|
|
110
85
|
}
|
|
111
|
-
const
|
|
112
|
-
if (
|
|
113
|
-
|
|
114
|
-
const
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
if (!tool) return { tool_use_id: toolCall.id, is_error: true, content: "Tool not found" };
|
|
86
|
+
const o = l.content.filter((r) => r.type === "tool_use");
|
|
87
|
+
if (o.length && !n.signal.aborted) {
|
|
88
|
+
c.push({ role: "assistant", content: l.content });
|
|
89
|
+
const r = await Promise.all(o.map(async (a) => {
|
|
90
|
+
const u = e.tools?.find(k("name", a.name));
|
|
91
|
+
if (!u) return { tool_use_id: a.id, is_error: !0, content: "Tool not found" };
|
|
118
92
|
try {
|
|
119
|
-
const
|
|
120
|
-
return { type: "tool_result", tool_use_id:
|
|
121
|
-
} catch (
|
|
122
|
-
return { type: "tool_result", tool_use_id:
|
|
93
|
+
const p = await u.fn(a.input, this.ai);
|
|
94
|
+
return { type: "tool_result", tool_use_id: a.id, content: _(p) };
|
|
95
|
+
} catch (p) {
|
|
96
|
+
return { type: "tool_result", tool_use_id: a.id, is_error: !0, content: p?.message || p?.toString() || "Unknown" };
|
|
123
97
|
}
|
|
124
98
|
}));
|
|
125
|
-
|
|
126
|
-
requestParams.messages = history;
|
|
99
|
+
c.push({ role: "user", content: r }), m.messages = c;
|
|
127
100
|
}
|
|
128
|
-
} while (!
|
|
129
|
-
|
|
130
|
-
res(this.toStandard([...history, {
|
|
101
|
+
} while (!n.signal.aborted && l.content.some((o) => o.type === "tool_use"));
|
|
102
|
+
e.stream && e.stream({ done: !0 }), i(this.toStandard([...c, {
|
|
131
103
|
role: "assistant",
|
|
132
|
-
content:
|
|
104
|
+
content: l.content.filter((o) => o.type == "text").map((o) => o.text).join(`
|
|
105
|
+
|
|
106
|
+
`)
|
|
133
107
|
}]));
|
|
134
108
|
});
|
|
135
|
-
return Object.assign(
|
|
109
|
+
return Object.assign(s, { abort: () => n.abort() });
|
|
136
110
|
}
|
|
137
111
|
}
|
|
138
|
-
class
|
|
139
|
-
constructor(
|
|
140
|
-
super();
|
|
141
|
-
__publicField(this, "client");
|
|
142
|
-
this.ai = ai;
|
|
143
|
-
this.host = host;
|
|
144
|
-
this.model = model;
|
|
145
|
-
this.client = new Ollama$1({ host });
|
|
112
|
+
class L extends x {
|
|
113
|
+
constructor(t, e, n) {
|
|
114
|
+
super(), this.ai = t, this.host = e, this.model = n, this.client = new E({ host: e });
|
|
146
115
|
}
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
}
|
|
155
|
-
} else if (history[i].role == "tool") {
|
|
156
|
-
const error = history[i].content.startsWith('{"error":');
|
|
157
|
-
history[i] = { role: "tool", name: history[i].tool_name, args: history[i].args, [error ? "error" : "content"]: history[i].content };
|
|
116
|
+
client;
|
|
117
|
+
toStandard(t) {
|
|
118
|
+
for (let e = 0; e < t.length; e++)
|
|
119
|
+
if (t[e].role == "assistant" && t[e].tool_calls)
|
|
120
|
+
t[e].content ? delete t[e].tool_calls : (t.splice(e, 1), e--);
|
|
121
|
+
else if (t[e].role == "tool") {
|
|
122
|
+
const n = t[e].content.startsWith('{"error":');
|
|
123
|
+
t[e] = { role: "tool", name: t[e].tool_name, args: t[e].args, [n ? "error" : "content"]: t[e].content };
|
|
158
124
|
}
|
|
159
|
-
|
|
160
|
-
return history;
|
|
125
|
+
return t;
|
|
161
126
|
}
|
|
162
|
-
fromStandard(
|
|
163
|
-
return
|
|
164
|
-
if (h.role != "tool") return h;
|
|
165
|
-
return { role: "tool", tool_name: h.name, content: h.error || h.content };
|
|
166
|
-
});
|
|
127
|
+
fromStandard(t) {
|
|
128
|
+
return t.map((e) => e.role != "tool" ? e : { role: "tool", tool_name: e.name, content: e.error || e.content });
|
|
167
129
|
}
|
|
168
|
-
ask(
|
|
169
|
-
const
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
}
|
|
178
|
-
if (options.compress) history = await this.ai.llm.compress(history, options.compress.max, options.compress.min);
|
|
179
|
-
if (options.system) history.unshift({ role: "system", content: system });
|
|
180
|
-
const requestParams = {
|
|
181
|
-
model: options.model || this.model,
|
|
182
|
-
messages: history,
|
|
183
|
-
stream: !!options.stream,
|
|
184
|
-
signal: controller.signal,
|
|
130
|
+
ask(t, e = {}) {
|
|
131
|
+
const n = new AbortController(), s = new Promise(async (i, f) => {
|
|
132
|
+
let c = e.system || this.ai.options.system, m = this.fromStandard([...e.history || [], { role: "user", content: t }]);
|
|
133
|
+
m[0].roll == "system" && (c ? m.shift() : c = m.shift()), e.compress && (m = await this.ai.llm.compress(m, e.compress.max, e.compress.min)), e.system && m.unshift({ role: "system", content: c });
|
|
134
|
+
const l = {
|
|
135
|
+
model: e.model || this.model,
|
|
136
|
+
messages: m,
|
|
137
|
+
stream: !!e.stream,
|
|
138
|
+
signal: n.signal,
|
|
185
139
|
options: {
|
|
186
|
-
temperature:
|
|
187
|
-
num_predict:
|
|
140
|
+
temperature: e.temperature || this.ai.options.temperature || 0.7,
|
|
141
|
+
num_predict: e.max_tokens || this.ai.options.max_tokens || 4096
|
|
188
142
|
},
|
|
189
|
-
tools: (
|
|
143
|
+
tools: (e.tools || this.ai.options.tools || []).map((r) => ({
|
|
190
144
|
type: "function",
|
|
191
145
|
function: {
|
|
192
|
-
name:
|
|
193
|
-
description:
|
|
146
|
+
name: r.name,
|
|
147
|
+
description: r.description,
|
|
194
148
|
parameters: {
|
|
195
149
|
type: "object",
|
|
196
|
-
properties:
|
|
197
|
-
required:
|
|
150
|
+
properties: r.args ? b(r.args, (a, u) => ({ ...u, required: void 0 })) : {},
|
|
151
|
+
required: r.args ? Object.entries(r.args).filter((a) => a[1].required).map((a) => a[0]) : []
|
|
198
152
|
}
|
|
199
153
|
}
|
|
200
154
|
}))
|
|
201
155
|
};
|
|
202
|
-
let
|
|
156
|
+
let o;
|
|
203
157
|
do {
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
if (controller.signal.aborted) break;
|
|
209
|
-
if ((_a = chunk.message) == null ? void 0 : _a.content) {
|
|
210
|
-
resp.message.content += chunk.message.content;
|
|
211
|
-
options.stream({ text: chunk.message.content });
|
|
212
|
-
}
|
|
213
|
-
if ((_b = chunk.message) == null ? void 0 : _b.tool_calls) resp.message.tool_calls = chunk.message.tool_calls;
|
|
214
|
-
if (chunk.done) break;
|
|
215
|
-
}
|
|
158
|
+
if (o = await this.client.chat(l), e.stream) {
|
|
159
|
+
o.message = { role: "assistant", content: "", tool_calls: [] };
|
|
160
|
+
for await (const r of o)
|
|
161
|
+
if (n.signal.aborted || (r.message?.content && (o.message.content += r.message.content, e.stream({ text: r.message.content })), r.message?.tool_calls && (o.message.tool_calls = r.message.tool_calls), r.done)) break;
|
|
216
162
|
}
|
|
217
|
-
if (
|
|
218
|
-
|
|
219
|
-
const
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
const args = typeof toolCall.function.arguments === "string" ? JSONAttemptParse(toolCall.function.arguments, {}) : toolCall.function.arguments;
|
|
163
|
+
if (o.message?.tool_calls?.length && !n.signal.aborted) {
|
|
164
|
+
m.push(o.message);
|
|
165
|
+
const r = await Promise.all(o.message.tool_calls.map(async (a) => {
|
|
166
|
+
const u = (e.tools || this.ai.options.tools)?.find(k("name", a.function.name));
|
|
167
|
+
if (!u) return { role: "tool", tool_name: a.function.name, content: '{"error": "Tool not found"}' };
|
|
168
|
+
const p = typeof a.function.arguments == "string" ? w(a.function.arguments, {}) : a.function.arguments;
|
|
224
169
|
try {
|
|
225
|
-
const
|
|
226
|
-
return { role: "tool", tool_name:
|
|
227
|
-
} catch (
|
|
228
|
-
return { role: "tool", tool_name:
|
|
170
|
+
const h = await u.fn(p, this.ai);
|
|
171
|
+
return { role: "tool", tool_name: a.function.name, args: p, content: _(h) };
|
|
172
|
+
} catch (h) {
|
|
173
|
+
return { role: "tool", tool_name: a.function.name, args: p, content: _({ error: h?.message || h?.toString() || "Unknown" }) };
|
|
229
174
|
}
|
|
230
175
|
}));
|
|
231
|
-
|
|
232
|
-
requestParams.messages = history;
|
|
176
|
+
m.push(...r), l.messages = m;
|
|
233
177
|
}
|
|
234
|
-
} while (!
|
|
235
|
-
|
|
236
|
-
res(this.toStandard([...history, { role: "assistant", content: (_g = resp.message) == null ? void 0 : _g.content }]));
|
|
178
|
+
} while (!n.signal.aborted && o.message?.tool_calls?.length);
|
|
179
|
+
e.stream && e.stream({ done: !0 }), i(this.toStandard([...m, { role: "assistant", content: o.message?.content }]));
|
|
237
180
|
});
|
|
238
|
-
return Object.assign(
|
|
181
|
+
return Object.assign(s, { abort: () => n.abort() });
|
|
239
182
|
}
|
|
240
183
|
}
|
|
241
|
-
class
|
|
242
|
-
constructor(
|
|
243
|
-
super();
|
|
244
|
-
__publicField(this, "client");
|
|
245
|
-
this.ai = ai;
|
|
246
|
-
this.apiToken = apiToken;
|
|
247
|
-
this.model = model;
|
|
248
|
-
this.client = new OpenAI({ apiKey: apiToken });
|
|
184
|
+
class R extends x {
|
|
185
|
+
constructor(t, e, n) {
|
|
186
|
+
super(), this.ai = t, this.apiToken = e, this.model = n, this.client = new M({ apiKey: e });
|
|
249
187
|
}
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
188
|
+
client;
|
|
189
|
+
toStandard(t) {
|
|
190
|
+
for (let e = 0; e < t.length; e++) {
|
|
191
|
+
const n = t[e];
|
|
192
|
+
if (n.role === "assistant" && n.tool_calls) {
|
|
193
|
+
const s = n.tool_calls.map((i) => ({
|
|
255
194
|
role: "tool",
|
|
256
|
-
id:
|
|
257
|
-
name:
|
|
258
|
-
args:
|
|
195
|
+
id: i.id,
|
|
196
|
+
name: i.function.name,
|
|
197
|
+
args: w(i.function.arguments, {})
|
|
259
198
|
}));
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
if (record) {
|
|
265
|
-
if (h.content.includes('"error":')) record.error = h.content;
|
|
266
|
-
else record.content = h.content;
|
|
267
|
-
}
|
|
268
|
-
history.splice(i, 1);
|
|
269
|
-
i--;
|
|
199
|
+
t.splice(e, 1, ...s), e += s.length - 1;
|
|
200
|
+
} else if (n.role === "tool" && n.content) {
|
|
201
|
+
const s = t.find((i) => n.tool_call_id == i.id);
|
|
202
|
+
s && (n.content.includes('"error":') ? s.error = n.content : s.content = n.content), t.splice(e, 1), e--;
|
|
270
203
|
}
|
|
271
204
|
}
|
|
272
|
-
return
|
|
205
|
+
return t;
|
|
273
206
|
}
|
|
274
|
-
fromStandard(
|
|
275
|
-
return
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
content: h.error || h.content
|
|
287
|
-
});
|
|
288
|
-
} else {
|
|
289
|
-
result.push(h);
|
|
290
|
-
}
|
|
291
|
-
return result;
|
|
292
|
-
}, []);
|
|
207
|
+
fromStandard(t) {
|
|
208
|
+
return t.reduce((e, n) => (n.role === "tool" ? e.push({
|
|
209
|
+
role: "assistant",
|
|
210
|
+
content: null,
|
|
211
|
+
tool_calls: [{ id: n.id, type: "function", function: { name: n.name, arguments: JSON.stringify(n.args) } }],
|
|
212
|
+
refusal: null,
|
|
213
|
+
annotations: []
|
|
214
|
+
}, {
|
|
215
|
+
role: "tool",
|
|
216
|
+
tool_call_id: n.id,
|
|
217
|
+
content: n.error || n.content
|
|
218
|
+
}) : e.push(n), e), []);
|
|
293
219
|
}
|
|
294
|
-
ask(
|
|
295
|
-
const
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
temperature: options.temperature || this.ai.options.temperature || 0.7,
|
|
306
|
-
tools: (options.tools || this.ai.options.tools || []).map((t) => ({
|
|
220
|
+
ask(t, e = {}) {
|
|
221
|
+
const n = new AbortController(), s = new Promise(async (i, f) => {
|
|
222
|
+
let c = this.fromStandard([...e.history || [], { role: "user", content: t }]);
|
|
223
|
+
e.compress && (c = await this.ai.llm.compress(c, e.compress.max, e.compress.min, e));
|
|
224
|
+
const m = {
|
|
225
|
+
model: e.model || this.model,
|
|
226
|
+
messages: c,
|
|
227
|
+
stream: !!e.stream,
|
|
228
|
+
max_tokens: e.max_tokens || this.ai.options.max_tokens || 4096,
|
|
229
|
+
temperature: e.temperature || this.ai.options.temperature || 0.7,
|
|
230
|
+
tools: (e.tools || this.ai.options.tools || []).map((o) => ({
|
|
307
231
|
type: "function",
|
|
308
232
|
function: {
|
|
309
|
-
name:
|
|
310
|
-
description:
|
|
233
|
+
name: o.name,
|
|
234
|
+
description: o.description,
|
|
311
235
|
parameters: {
|
|
312
236
|
type: "object",
|
|
313
|
-
properties:
|
|
314
|
-
required:
|
|
237
|
+
properties: o.args ? b(o.args, (r, a) => ({ ...a, required: void 0 })) : {},
|
|
238
|
+
required: o.args ? Object.entries(o.args).filter((r) => r[1].required).map((r) => r[0]) : []
|
|
315
239
|
}
|
|
316
240
|
}
|
|
317
241
|
}))
|
|
318
242
|
};
|
|
319
|
-
let
|
|
243
|
+
let l;
|
|
320
244
|
do {
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
if (chunk.choices[0].delta.content) {
|
|
327
|
-
options.stream({ text: chunk.choices[0].delta.content });
|
|
328
|
-
}
|
|
245
|
+
if (l = await this.client.chat.completions.create(m), e.stream) {
|
|
246
|
+
l.choices = [];
|
|
247
|
+
for await (const r of l) {
|
|
248
|
+
if (n.signal.aborted) break;
|
|
249
|
+
r.choices[0].delta.content && e.stream({ text: r.choices[0].delta.content });
|
|
329
250
|
}
|
|
330
251
|
}
|
|
331
|
-
const
|
|
332
|
-
if (
|
|
333
|
-
|
|
334
|
-
const
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
if (!tool) return { role: "tool", tool_call_id: toolCall.id, content: '{"error": "Tool not found"}' };
|
|
252
|
+
const o = l.choices[0].message.tool_calls || [];
|
|
253
|
+
if (o.length && !n.signal.aborted) {
|
|
254
|
+
c.push(l.choices[0].message);
|
|
255
|
+
const r = await Promise.all(o.map(async (a) => {
|
|
256
|
+
const u = e.tools?.find(k("name", a.function.name));
|
|
257
|
+
if (!u) return { role: "tool", tool_call_id: a.id, content: '{"error": "Tool not found"}' };
|
|
338
258
|
try {
|
|
339
|
-
const
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
return { role: "tool", tool_call_id: toolCall.id, content: JSONSanitize({ error: (err == null ? void 0 : err.message) || (err == null ? void 0 : err.toString()) || "Unknown" }) };
|
|
259
|
+
const p = w(a.function.arguments, {}), h = await u.fn(p, this.ai);
|
|
260
|
+
return { role: "tool", tool_call_id: a.id, content: _(h) };
|
|
261
|
+
} catch (p) {
|
|
262
|
+
return { role: "tool", tool_call_id: a.id, content: _({ error: p?.message || p?.toString() || "Unknown" }) };
|
|
344
263
|
}
|
|
345
264
|
}));
|
|
346
|
-
|
|
347
|
-
requestParams.messages = history;
|
|
265
|
+
c.push(...r), m.messages = c;
|
|
348
266
|
}
|
|
349
|
-
} while (!
|
|
350
|
-
|
|
351
|
-
res(this.toStandard([...history, { role: "assistant", content: resp.choices[0].message.content || "" }]));
|
|
267
|
+
} while (!n.signal.aborted && l.choices?.[0]?.message?.tool_calls?.length);
|
|
268
|
+
e.stream && e.stream({ done: !0 }), i(this.toStandard([...c, { role: "assistant", content: l.choices[0].message.content || "" }]));
|
|
352
269
|
});
|
|
353
|
-
return Object.assign(
|
|
270
|
+
return Object.assign(s, { abort: () => n.abort() });
|
|
354
271
|
}
|
|
355
272
|
}
|
|
356
|
-
class
|
|
357
|
-
constructor(
|
|
358
|
-
|
|
359
|
-
var _a, _b, _c;
|
|
360
|
-
this.ai = ai;
|
|
361
|
-
this.options = options;
|
|
362
|
-
if ((_a = options.anthropic) == null ? void 0 : _a.token) this.providers.anthropic = new Anthropic(this.ai, options.anthropic.token, options.anthropic.model);
|
|
363
|
-
if ((_b = options.ollama) == null ? void 0 : _b.host) this.providers.ollama = new Ollama(this.ai, options.ollama.host, options.ollama.model);
|
|
364
|
-
if ((_c = options.openAi) == null ? void 0 : _c.token) this.providers.openAi = new OpenAi(this.ai, options.openAi.token, options.openAi.model);
|
|
273
|
+
class J {
|
|
274
|
+
constructor(t, e) {
|
|
275
|
+
this.ai = t, this.options = e, e.anthropic?.token && (this.providers.anthropic = new U(this.ai, e.anthropic.token, e.anthropic.model)), e.ollama?.host && (this.providers.ollama = new L(this.ai, e.ollama.host, e.ollama.model)), e.openAi?.token && (this.providers.openAi = new R(this.ai, e.openAi.token, e.openAi.model));
|
|
365
276
|
}
|
|
277
|
+
providers = {};
|
|
366
278
|
/**
|
|
367
279
|
* Chat with LLM
|
|
368
280
|
* @param {string} message Question
|
|
369
281
|
* @param {LLMRequest} options Configuration options and chat history
|
|
370
282
|
* @returns {{abort: () => void, response: Promise<LLMMessage[]>}} Function to abort response and chat history
|
|
371
283
|
*/
|
|
372
|
-
ask(
|
|
373
|
-
|
|
374
|
-
|
|
375
|
-
|
|
376
|
-
if (typeof options.model == "object") model = options.model;
|
|
377
|
-
else model = [options.model, (_a = this.options[options.model]) == null ? void 0 : _a.model];
|
|
378
|
-
}
|
|
379
|
-
if (!options.model || model[1] == null) {
|
|
380
|
-
if (typeof this.options.model == "object") model = this.options.model;
|
|
381
|
-
else model = [this.options.model, (_b = this.options[this.options.model]) == null ? void 0 : _b.model];
|
|
382
|
-
}
|
|
383
|
-
if (!model[0] || !model[1]) throw new Error(`Unknown LLM provider or model: ${model[0]} / ${model[1]}`);
|
|
384
|
-
return this.providers[model[0]].ask(message, { ...options, model: model[1] });
|
|
284
|
+
ask(t, e = {}) {
|
|
285
|
+
let n = [null, null];
|
|
286
|
+
if (e.model && (typeof e.model == "object" ? n = e.model : n = [e.model, this.options[e.model]?.model]), (!e.model || n[1] == null) && (typeof this.options.model == "object" ? n = this.options.model : n = [this.options.model, this.options[this.options.model]?.model]), !n[0] || !n[1]) throw new Error(`Unknown LLM provider or model: ${n[0]} / ${n[1]}`);
|
|
287
|
+
return this.providers[n[0]].ask(t, { ...e, model: n[1] });
|
|
385
288
|
}
|
|
386
289
|
/**
|
|
387
290
|
* Compress chat history to reduce context size
|
|
@@ -391,36 +294,39 @@ class LLM {
|
|
|
391
294
|
* @param {LLMRequest} options LLM options
|
|
392
295
|
* @returns {Promise<LLMMessage[]>} New chat history will summary at index 0
|
|
393
296
|
*/
|
|
394
|
-
async compress(
|
|
395
|
-
if (this.estimateTokens(
|
|
396
|
-
let
|
|
397
|
-
for (let
|
|
398
|
-
|
|
399
|
-
if (tokens < min) keep++;
|
|
297
|
+
async compress(t, e, n, s) {
|
|
298
|
+
if (this.estimateTokens(t) < e) return t;
|
|
299
|
+
let i = 0, f = 0;
|
|
300
|
+
for (let o of t.toReversed())
|
|
301
|
+
if (f += this.estimateTokens(o.content), f < n) i++;
|
|
400
302
|
else break;
|
|
401
|
-
|
|
402
|
-
|
|
403
|
-
|
|
404
|
-
|
|
405
|
-
|
|
303
|
+
if (t.length <= i) return t;
|
|
304
|
+
const c = i == 0 ? [] : t.slice(-i), m = (i == 0 ? t : t.slice(0, -i)).filter((o) => o.role === "assistant" || o.role === "user");
|
|
305
|
+
return [{ role: "assistant", content: `Conversation Summary: ${await this.summarize(m.map((o) => `${o.role}: ${o.content}`).join(`
|
|
306
|
+
|
|
307
|
+
`), 250, s)}` }, ...c];
|
|
406
308
|
}
|
|
407
309
|
/**
|
|
408
310
|
* Estimate variable as tokens
|
|
409
311
|
* @param history Object to size
|
|
410
312
|
* @returns {number} Rough token count
|
|
411
313
|
*/
|
|
412
|
-
estimateTokens(
|
|
413
|
-
const
|
|
414
|
-
return Math.ceil(
|
|
314
|
+
estimateTokens(t) {
|
|
315
|
+
const e = JSON.stringify(t);
|
|
316
|
+
return Math.ceil(e.length / 4 * 1.2);
|
|
415
317
|
}
|
|
416
|
-
|
|
417
|
-
|
|
418
|
-
|
|
419
|
-
|
|
420
|
-
|
|
318
|
+
/**
|
|
319
|
+
* Ask a question with JSON response
|
|
320
|
+
* @param {string} message Question
|
|
321
|
+
* @param {LLMRequest} options Configuration options and chat history
|
|
322
|
+
* @returns {Promise<{} | {} | RegExpExecArray | null>}
|
|
323
|
+
*/
|
|
324
|
+
async json(t, e) {
|
|
325
|
+
let n = await this.ask(t, {
|
|
326
|
+
system: "Respond using a JSON blob",
|
|
327
|
+
...e
|
|
421
328
|
});
|
|
422
|
-
|
|
423
|
-
return JSONAttemptParse(new RegExp("{[sS]*}").exec(resp[0].content), {});
|
|
329
|
+
return n?.[0]?.content ? w(new RegExp("{[sS]*}").exec(n[0].content), {}) : {};
|
|
424
330
|
}
|
|
425
331
|
/**
|
|
426
332
|
* Create a summary of some text
|
|
@@ -429,39 +335,29 @@ class LLM {
|
|
|
429
335
|
* @param options LLM request options
|
|
430
336
|
* @returns {Promise<string>} Summary
|
|
431
337
|
*/
|
|
432
|
-
summarize(
|
|
433
|
-
return this.ask(
|
|
434
|
-
var _a;
|
|
435
|
-
return ((_a = history.pop()) == null ? void 0 : _a.content) || null;
|
|
436
|
-
});
|
|
338
|
+
summarize(t, e, n) {
|
|
339
|
+
return this.ask(t, { system: `Generate a brief summary <= ${e} tokens. Output nothing else`, temperature: 0.3, ...n }).then((s) => s.pop()?.content || null);
|
|
437
340
|
}
|
|
438
341
|
}
|
|
439
|
-
class
|
|
440
|
-
constructor(
|
|
441
|
-
|
|
442
|
-
__publicField(this, "whisperModel");
|
|
443
|
-
/** Large Language Models */
|
|
444
|
-
__publicField(this, "llm");
|
|
445
|
-
var _a;
|
|
446
|
-
this.options = options;
|
|
447
|
-
this.llm = new LLM(this, options);
|
|
448
|
-
if ((_a = this.options.whisper) == null ? void 0 : _a.binary) this.downloadAsrModel(this.options.whisper.model);
|
|
342
|
+
class V {
|
|
343
|
+
constructor(t) {
|
|
344
|
+
this.options = t, this.llm = new J(this, t), this.options.whisper?.binary && this.downloadAsrModel(this.options.whisper.model);
|
|
449
345
|
}
|
|
346
|
+
downloads = {};
|
|
347
|
+
whisperModel;
|
|
348
|
+
/** Large Language Models */
|
|
349
|
+
llm;
|
|
450
350
|
/**
|
|
451
351
|
* Convert audio to text using Auditory Speech Recognition
|
|
452
352
|
* @param {string} path Path to audio
|
|
453
353
|
* @param model Whisper model
|
|
454
354
|
* @returns {Promise<any>} Extracted text
|
|
455
355
|
*/
|
|
456
|
-
async asr(
|
|
457
|
-
|
|
458
|
-
|
|
459
|
-
|
|
460
|
-
await this.
|
|
461
|
-
const name = Math.random().toString(36).substring(2, 10) + "-" + path.split("/").pop();
|
|
462
|
-
const output = Path.join(this.options.whisper.path || "/tmp", name);
|
|
463
|
-
await $`rm -f /tmp/${name}.txt && ${this.options.whisper.binary} -nt -np -m ${this.whisperModel} -f ${path} -otxt -of ${output}`;
|
|
464
|
-
return fs.readFile(output, "utf-8").then((text) => (text == null ? void 0 : text.trim()) || null).finally(() => fs.rm(output, { force: true }).catch(() => {
|
|
356
|
+
async asr(t, e) {
|
|
357
|
+
if (!this.options.whisper?.binary) throw new Error("Whisper not configured");
|
|
358
|
+
e || (e = this.options.whisper.model), await this.downloadAsrModel(e);
|
|
359
|
+
const n = Math.random().toString(36).substring(2, 10) + "-" + t.split("/").pop(), s = S.join(this.options.whisper.path || "/tmp", n);
|
|
360
|
+
return await j`rm -f /tmp/${n}.txt && ${this.options.whisper.binary} -nt -np -m ${this.whisperModel} -f ${t} -otxt -of ${s}`, y.readFile(s, "utf-8").then((i) => i?.trim() || null).finally(() => y.rm(s, { force: !0 }).catch(() => {
|
|
465
361
|
}));
|
|
466
362
|
}
|
|
467
363
|
/**
|
|
@@ -470,34 +366,28 @@ class Ai {
|
|
|
470
366
|
* @param {string} model Whisper model that will be downloaded
|
|
471
367
|
* @return {Promise<void>} A promise that resolves once the model is downloaded and saved locally.
|
|
472
368
|
*/
|
|
473
|
-
async downloadAsrModel(
|
|
474
|
-
|
|
475
|
-
if (
|
|
476
|
-
|
|
477
|
-
|
|
478
|
-
|
|
479
|
-
this.downloads[model] = fetch(`https://huggingface.co/ggerganov/whisper.cpp/resolve/main/${(_d = this.options.whisper) == null ? void 0 : _d.model}.bin`).then((resp) => resp.arrayBuffer()).then((arr) => Buffer.from(arr)).then(async (buffer) => {
|
|
480
|
-
await fs.writeFile(this.whisperModel, buffer);
|
|
481
|
-
delete this.downloads[model];
|
|
482
|
-
});
|
|
483
|
-
return this.downloads[model];
|
|
369
|
+
async downloadAsrModel(t) {
|
|
370
|
+
if (!this.options.whisper?.binary) throw new Error("Whisper not configured");
|
|
371
|
+
if (this.whisperModel = S.join(this.options.whisper?.path, this.options.whisper?.model + ".bin"), !await y.stat(this.whisperModel).then(() => !0).catch(() => !1))
|
|
372
|
+
return this.downloads[t] ? this.downloads[t] : (this.downloads[t] = fetch(`https://huggingface.co/ggerganov/whisper.cpp/resolve/main/${this.options.whisper?.model}.bin`).then((e) => e.arrayBuffer()).then((e) => Buffer.from(e)).then(async (e) => {
|
|
373
|
+
await y.writeFile(this.whisperModel, e), delete this.downloads[t];
|
|
374
|
+
}), this.downloads[t]);
|
|
484
375
|
}
|
|
485
376
|
/**
|
|
486
377
|
* Convert image to text using Optical Character Recognition
|
|
487
378
|
* @param {string} path Path to image
|
|
488
379
|
* @returns {{abort: Function, response: Promise<string | null>}} Abort function & Promise of extracted text
|
|
489
380
|
*/
|
|
490
|
-
ocr(
|
|
491
|
-
let
|
|
381
|
+
ocr(t) {
|
|
382
|
+
let e;
|
|
492
383
|
return {
|
|
493
384
|
abort: () => {
|
|
494
|
-
|
|
385
|
+
e?.terminate();
|
|
495
386
|
},
|
|
496
|
-
response: new Promise(async (
|
|
497
|
-
|
|
498
|
-
const { data } = await
|
|
499
|
-
await
|
|
500
|
-
res(data.text.trim() || null);
|
|
387
|
+
response: new Promise(async (n) => {
|
|
388
|
+
e = await q("eng");
|
|
389
|
+
const { data: s } = await e.recognize(t);
|
|
390
|
+
await e.terminate(), n(s.text.trim() || null);
|
|
501
391
|
})
|
|
502
392
|
};
|
|
503
393
|
}
|
|
@@ -507,124 +397,104 @@ class Ai {
|
|
|
507
397
|
* @param {string} searchTerms Multiple search terms to check against target
|
|
508
398
|
* @returns {{avg: number, max: number, similarities: number[]}} Similarity values 0-1: 0 = unique, 1 = identical
|
|
509
399
|
*/
|
|
510
|
-
semanticSimilarity(
|
|
511
|
-
if (
|
|
512
|
-
const
|
|
513
|
-
|
|
514
|
-
|
|
515
|
-
|
|
516
|
-
|
|
517
|
-
|
|
518
|
-
const dotProduct = tf.dot(tensor1, tensor2);
|
|
519
|
-
const magnitude1 = tf.norm(tensor1);
|
|
520
|
-
const magnitude2 = tf.norm(tensor2);
|
|
521
|
-
if (magnitude1.dataSync()[0] === 0 || magnitude2.dataSync()[0] === 0) return 0;
|
|
522
|
-
return dotProduct.dataSync()[0] / (magnitude1.dataSync()[0] * magnitude2.dataSync()[0]);
|
|
523
|
-
};
|
|
524
|
-
const v = vector(target);
|
|
525
|
-
const similarities = searchTerms.map((t) => vector(t)).map((refVector) => cosineSimilarity(v, refVector));
|
|
526
|
-
return { avg: similarities.reduce((acc, s) => acc + s, 0) / similarities.length, max: Math.max(...similarities), similarities };
|
|
400
|
+
semanticSimilarity(t, ...e) {
|
|
401
|
+
if (e.length < 2) throw new Error("Requires at least 2 strings to compare");
|
|
402
|
+
const n = (c, m = 10) => c.toLowerCase().split("").map((l, o) => l.charCodeAt(0) * (o + 1) % m / m).slice(0, m), s = (c, m) => {
|
|
403
|
+
if (c.length !== m.length) throw new Error("Vectors must be same length");
|
|
404
|
+
const l = g.tensor1d(c), o = g.tensor1d(m), r = g.dot(l, o), a = g.norm(l), u = g.norm(o);
|
|
405
|
+
return a.dataSync()[0] === 0 || u.dataSync()[0] === 0 ? 0 : r.dataSync()[0] / (a.dataSync()[0] * u.dataSync()[0]);
|
|
406
|
+
}, i = n(t), f = e.map((c) => n(c)).map((c) => s(i, c));
|
|
407
|
+
return { avg: f.reduce((c, m) => c + m, 0) / f.length, max: Math.max(...f), similarities: f };
|
|
527
408
|
}
|
|
528
409
|
}
|
|
529
|
-
const
|
|
410
|
+
const I = {
|
|
530
411
|
name: "cli",
|
|
531
412
|
description: "Use the command line interface, returns any output",
|
|
532
|
-
args: { command: { type: "string", description: "Command to run", required:
|
|
533
|
-
fn: (
|
|
534
|
-
}
|
|
535
|
-
const DateTimeTool = {
|
|
413
|
+
args: { command: { type: "string", description: "Command to run", required: !0 } },
|
|
414
|
+
fn: (d) => j`${d.command}`
|
|
415
|
+
}, Q = {
|
|
536
416
|
name: "get_datetime",
|
|
537
417
|
description: "Get current date and time",
|
|
538
418
|
args: {},
|
|
539
419
|
fn: async () => (/* @__PURE__ */ new Date()).toISOString()
|
|
540
|
-
}
|
|
541
|
-
const ExecTool = {
|
|
420
|
+
}, X = {
|
|
542
421
|
name: "exec",
|
|
543
422
|
description: "Run code/scripts",
|
|
544
423
|
args: {
|
|
545
|
-
language: { type: "string", description: "Execution language", enum: ["cli", "node", "python"], required:
|
|
546
|
-
code: { type: "string", description: "Code to execute", required:
|
|
424
|
+
language: { type: "string", description: "Execution language", enum: ["cli", "node", "python"], required: !0 },
|
|
425
|
+
code: { type: "string", description: "Code to execute", required: !0 }
|
|
547
426
|
},
|
|
548
|
-
fn: async (
|
|
427
|
+
fn: async (d, t) => {
|
|
549
428
|
try {
|
|
550
|
-
switch (
|
|
429
|
+
switch (d.type) {
|
|
551
430
|
case "bash":
|
|
552
|
-
return await
|
|
431
|
+
return await I.fn({ command: d.code }, t);
|
|
553
432
|
case "node":
|
|
554
|
-
return await
|
|
555
|
-
case "python":
|
|
556
|
-
return await
|
|
557
|
-
}
|
|
433
|
+
return await N.fn({ code: d.code }, t);
|
|
434
|
+
case "python":
|
|
435
|
+
return await z.fn({ code: d.code }, t);
|
|
558
436
|
}
|
|
559
|
-
} catch (
|
|
560
|
-
return { error:
|
|
437
|
+
} catch (e) {
|
|
438
|
+
return { error: e?.message || e.toString() };
|
|
561
439
|
}
|
|
562
440
|
}
|
|
563
|
-
}
|
|
564
|
-
const FetchTool = {
|
|
441
|
+
}, Y = {
|
|
565
442
|
name: "fetch",
|
|
566
443
|
description: "Make HTTP request to URL",
|
|
567
444
|
args: {
|
|
568
|
-
url: { type: "string", description: "URL to fetch", required:
|
|
445
|
+
url: { type: "string", description: "URL to fetch", required: !0 },
|
|
569
446
|
method: { type: "string", description: "HTTP method to use", enum: ["GET", "POST", "PUT", "DELETE"], default: "GET" },
|
|
570
447
|
headers: { type: "object", description: "HTTP headers to send", default: {} },
|
|
571
448
|
body: { type: "object", description: "HTTP body to send" }
|
|
572
449
|
},
|
|
573
|
-
fn: (
|
|
574
|
-
}
|
|
575
|
-
const JSTool = {
|
|
450
|
+
fn: (d) => new v({ url: d.url, headers: d.headers }).request({ method: d.method || "GET", body: d.body })
|
|
451
|
+
}, N = {
|
|
576
452
|
name: "exec_javascript",
|
|
577
453
|
description: "Execute commonjs javascript",
|
|
578
454
|
args: {
|
|
579
|
-
code: { type: "string", description: "CommonJS javascript", required:
|
|
455
|
+
code: { type: "string", description: "CommonJS javascript", required: !0 }
|
|
580
456
|
},
|
|
581
|
-
fn: async (
|
|
582
|
-
const
|
|
583
|
-
|
|
584
|
-
return { ...console.output, return: resp, stdout: void 0, stderr: void 0 };
|
|
457
|
+
fn: async (d) => {
|
|
458
|
+
const t = P(null), e = await A({ console: t }, d.code, !0).catch((n) => t.output.error.push(n));
|
|
459
|
+
return { ...t.output, return: e, stdout: void 0, stderr: void 0 };
|
|
585
460
|
}
|
|
586
|
-
}
|
|
587
|
-
const PythonTool = {
|
|
461
|
+
}, z = {
|
|
588
462
|
name: "exec_javascript",
|
|
589
463
|
description: "Execute commonjs javascript",
|
|
590
464
|
args: {
|
|
591
|
-
code: { type: "string", description: "CommonJS javascript", required:
|
|
465
|
+
code: { type: "string", description: "CommonJS javascript", required: !0 }
|
|
592
466
|
},
|
|
593
|
-
fn: async (
|
|
594
|
-
}
|
|
595
|
-
const SearchTool = {
|
|
467
|
+
fn: async (d) => ({ result: T`python -c "${d.code}"` })
|
|
468
|
+
}, Z = {
|
|
596
469
|
name: "search",
|
|
597
470
|
description: "Use a search engine to find relevant URLs, should be changed with fetch to scrape sources",
|
|
598
471
|
args: {
|
|
599
|
-
query: { type: "string", description: "Search string", required:
|
|
472
|
+
query: { type: "string", description: "Search string", required: !0 },
|
|
600
473
|
length: { type: "string", description: "Number of results to return", default: 5 }
|
|
601
474
|
},
|
|
602
|
-
fn: async (
|
|
603
|
-
|
|
604
|
-
const html = await fetch(`https://html.duckduckgo.com/html/?q=${encodeURIComponent(args.query)}`, {
|
|
475
|
+
fn: async (d) => {
|
|
476
|
+
const t = await fetch(`https://html.duckduckgo.com/html/?q=${encodeURIComponent(d.query)}`, {
|
|
605
477
|
headers: { "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64)", "Accept-Language": "en-US,en;q=0.9" }
|
|
606
|
-
}).then((
|
|
607
|
-
let
|
|
608
|
-
const
|
|
609
|
-
|
|
610
|
-
let
|
|
611
|
-
if (
|
|
612
|
-
if (url) results.add(url);
|
|
613
|
-
if (results.size >= (args.length || 5)) break;
|
|
478
|
+
}).then((i) => i.text());
|
|
479
|
+
let e, n = /<a .*?href="(.+?)".+?<\/a>/g;
|
|
480
|
+
const s = new O();
|
|
481
|
+
for (; (e = n.exec(t)) !== null; ) {
|
|
482
|
+
let i = /uddg=(.+)&?/.exec(decodeURIComponent(e[1]))?.[1];
|
|
483
|
+
if (i && (i = decodeURIComponent(i)), i && s.add(i), s.size >= (d.length || 5)) break;
|
|
614
484
|
}
|
|
615
|
-
return
|
|
485
|
+
return s;
|
|
616
486
|
}
|
|
617
487
|
};
|
|
618
488
|
export {
|
|
619
|
-
Ai,
|
|
620
|
-
Anthropic,
|
|
621
|
-
CliTool,
|
|
622
|
-
DateTimeTool,
|
|
623
|
-
ExecTool,
|
|
624
|
-
FetchTool,
|
|
625
|
-
JSTool,
|
|
626
|
-
LLM,
|
|
627
|
-
PythonTool,
|
|
628
|
-
SearchTool
|
|
489
|
+
V as Ai,
|
|
490
|
+
U as Anthropic,
|
|
491
|
+
I as CliTool,
|
|
492
|
+
Q as DateTimeTool,
|
|
493
|
+
X as ExecTool,
|
|
494
|
+
Y as FetchTool,
|
|
495
|
+
N as JSTool,
|
|
496
|
+
J as LLM,
|
|
497
|
+
z as PythonTool,
|
|
498
|
+
Z as SearchTool
|
|
629
499
|
};
|
|
630
500
|
//# sourceMappingURL=index.mjs.map
|