@ztimson/ai-utils 0.5.6 → 0.6.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/ai.d.ts +5 -13
- package/dist/audio.d.ts +8 -4
- package/dist/index.js +37 -18
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +371 -316
- package/dist/index.mjs.map +1 -1
- package/package.json +1 -1
package/dist/index.mjs
CHANGED
|
@@ -1,244 +1,243 @@
|
|
|
1
|
-
import * as
|
|
2
|
-
import { objectMap as
|
|
3
|
-
import { Anthropic as
|
|
4
|
-
import { OpenAI as
|
|
5
|
-
import { Worker as
|
|
6
|
-
import { fileURLToPath as
|
|
7
|
-
import { join as
|
|
8
|
-
import { spawn as
|
|
9
|
-
import
|
|
10
|
-
import L from "node:path";
|
|
1
|
+
import * as j from "node:os";
|
|
2
|
+
import { objectMap as _, JSONAttemptParse as g, findByProp as x, JSONSanitize as b, clean as T, Http as P, consoleInterceptor as q, fn as $, ASet as M } from "@ztimson/utils";
|
|
3
|
+
import { Anthropic as E } from "@anthropic-ai/sdk";
|
|
4
|
+
import { OpenAI as O } from "openai";
|
|
5
|
+
import { Worker as A } from "worker_threads";
|
|
6
|
+
import { fileURLToPath as R } from "url";
|
|
7
|
+
import { join as v, dirname as U } from "path";
|
|
8
|
+
import { spawn as w } from "node:child_process";
|
|
9
|
+
import { pipeline as L } from "@xenova/transformers";
|
|
11
10
|
import { createWorker as N } from "tesseract.js";
|
|
12
11
|
import "./embedder.mjs";
|
|
13
|
-
import * as
|
|
14
|
-
import { $ as
|
|
15
|
-
class
|
|
12
|
+
import * as z from "cheerio";
|
|
13
|
+
import { $ as C, $Sync as W } from "@ztimson/node-utils";
|
|
14
|
+
class S {
|
|
16
15
|
}
|
|
17
|
-
class
|
|
18
|
-
constructor(
|
|
19
|
-
super(), this.ai =
|
|
16
|
+
class I extends S {
|
|
17
|
+
constructor(r, e, t) {
|
|
18
|
+
super(), this.ai = r, this.apiToken = e, this.model = t, this.client = new E({ apiKey: e });
|
|
20
19
|
}
|
|
21
20
|
client;
|
|
22
|
-
toStandard(
|
|
23
|
-
const e = Date.now(),
|
|
24
|
-
for (let
|
|
25
|
-
if (typeof
|
|
26
|
-
|
|
21
|
+
toStandard(r) {
|
|
22
|
+
const e = Date.now(), t = [];
|
|
23
|
+
for (let i of r)
|
|
24
|
+
if (typeof i.content == "string")
|
|
25
|
+
t.push({ timestamp: e, ...i });
|
|
27
26
|
else {
|
|
28
|
-
const n =
|
|
27
|
+
const n = i.content?.filter((s) => s.type == "text").map((s) => s.text).join(`
|
|
29
28
|
|
|
30
29
|
`);
|
|
31
|
-
n &&
|
|
32
|
-
if (
|
|
33
|
-
|
|
34
|
-
else if (
|
|
35
|
-
const
|
|
36
|
-
|
|
30
|
+
n && t.push({ timestamp: e, role: i.role, content: n }), i.content.forEach((s) => {
|
|
31
|
+
if (s.type == "tool_use")
|
|
32
|
+
t.push({ timestamp: e, role: "tool", id: s.id, name: s.name, args: s.input, content: void 0 });
|
|
33
|
+
else if (s.type == "tool_result") {
|
|
34
|
+
const o = t.findLast((a) => a.id == s.tool_use_id);
|
|
35
|
+
o && (o[s.is_error ? "error" : "content"] = s.content);
|
|
37
36
|
}
|
|
38
37
|
});
|
|
39
38
|
}
|
|
40
|
-
return
|
|
39
|
+
return t;
|
|
41
40
|
}
|
|
42
|
-
fromStandard(
|
|
43
|
-
for (let e = 0; e <
|
|
44
|
-
if (
|
|
45
|
-
const
|
|
46
|
-
|
|
41
|
+
fromStandard(r) {
|
|
42
|
+
for (let e = 0; e < r.length; e++)
|
|
43
|
+
if (r[e].role == "tool") {
|
|
44
|
+
const t = r[e];
|
|
45
|
+
r.splice(
|
|
47
46
|
e,
|
|
48
47
|
1,
|
|
49
|
-
{ role: "assistant", content: [{ type: "tool_use", id:
|
|
50
|
-
{ role: "user", content: [{ type: "tool_result", tool_use_id:
|
|
48
|
+
{ role: "assistant", content: [{ type: "tool_use", id: t.id, name: t.name, input: t.args }] },
|
|
49
|
+
{ role: "user", content: [{ type: "tool_result", tool_use_id: t.id, is_error: !!t.error, content: t.error || t.content }] }
|
|
51
50
|
), e++;
|
|
52
51
|
}
|
|
53
|
-
return
|
|
52
|
+
return r.map(({ timestamp: e, ...t }) => t);
|
|
54
53
|
}
|
|
55
|
-
ask(
|
|
56
|
-
const
|
|
57
|
-
return Object.assign(new Promise(async (
|
|
58
|
-
let n = this.fromStandard([...e.history || [], { role: "user", content:
|
|
59
|
-
const
|
|
54
|
+
ask(r, e = {}) {
|
|
55
|
+
const t = new AbortController();
|
|
56
|
+
return Object.assign(new Promise(async (i) => {
|
|
57
|
+
let n = this.fromStandard([...e.history || [], { role: "user", content: r, timestamp: Date.now() }]);
|
|
58
|
+
const s = e.tools || this.ai.options.llm?.tools || [], o = {
|
|
60
59
|
model: e.model || this.model,
|
|
61
60
|
max_tokens: e.max_tokens || this.ai.options.llm?.max_tokens || 4096,
|
|
62
61
|
system: e.system || this.ai.options.llm?.system || "",
|
|
63
62
|
temperature: e.temperature || this.ai.options.llm?.temperature || 0.7,
|
|
64
|
-
tools:
|
|
65
|
-
name:
|
|
66
|
-
description:
|
|
63
|
+
tools: s.map((d) => ({
|
|
64
|
+
name: d.name,
|
|
65
|
+
description: d.description,
|
|
67
66
|
input_schema: {
|
|
68
67
|
type: "object",
|
|
69
|
-
properties:
|
|
70
|
-
required:
|
|
68
|
+
properties: d.args ? _(d.args, (c, m) => ({ ...m, required: void 0 })) : {},
|
|
69
|
+
required: d.args ? Object.entries(d.args).filter((c) => c[1].required).map((c) => c[0]) : []
|
|
71
70
|
},
|
|
72
71
|
fn: void 0
|
|
73
72
|
})),
|
|
74
73
|
messages: n,
|
|
75
74
|
stream: !!e.stream
|
|
76
75
|
};
|
|
77
|
-
let
|
|
76
|
+
let a, l = !0;
|
|
78
77
|
do {
|
|
79
|
-
if (
|
|
80
|
-
throw
|
|
78
|
+
if (a = await this.client.messages.create(o).catch((c) => {
|
|
79
|
+
throw c.message += `
|
|
81
80
|
|
|
82
81
|
Messages:
|
|
83
|
-
${JSON.stringify(n, null, 2)}`,
|
|
82
|
+
${JSON.stringify(n, null, 2)}`, c;
|
|
84
83
|
}), e.stream) {
|
|
85
|
-
|
|
84
|
+
l ? l = !1 : e.stream({ text: `
|
|
86
85
|
|
|
87
|
-
` }),
|
|
88
|
-
for await (const
|
|
89
|
-
if (
|
|
90
|
-
if (
|
|
91
|
-
|
|
92
|
-
else if (
|
|
93
|
-
if (
|
|
94
|
-
const
|
|
95
|
-
|
|
96
|
-
} else
|
|
97
|
-
else if (
|
|
98
|
-
const
|
|
99
|
-
|
|
100
|
-
} else if (
|
|
86
|
+
` }), a.content = [];
|
|
87
|
+
for await (const c of a) {
|
|
88
|
+
if (t.signal.aborted) break;
|
|
89
|
+
if (c.type === "content_block_start")
|
|
90
|
+
c.content_block.type === "text" ? a.content.push({ type: "text", text: "" }) : c.content_block.type === "tool_use" && a.content.push({ type: "tool_use", id: c.content_block.id, name: c.content_block.name, input: "" });
|
|
91
|
+
else if (c.type === "content_block_delta")
|
|
92
|
+
if (c.delta.type === "text_delta") {
|
|
93
|
+
const m = c.delta.text;
|
|
94
|
+
a.content.at(-1).text += m, e.stream({ text: m });
|
|
95
|
+
} else c.delta.type === "input_json_delta" && (a.content.at(-1).input += c.delta.partial_json);
|
|
96
|
+
else if (c.type === "content_block_stop") {
|
|
97
|
+
const m = a.content.at(-1);
|
|
98
|
+
m.input != null && (m.input = m.input ? g(m.input, {}) : {});
|
|
99
|
+
} else if (c.type === "message_stop")
|
|
101
100
|
break;
|
|
102
101
|
}
|
|
103
102
|
}
|
|
104
|
-
const
|
|
105
|
-
if (
|
|
106
|
-
n.push({ role: "assistant", content:
|
|
107
|
-
const
|
|
108
|
-
const
|
|
109
|
-
if (e.stream && e.stream({ tool:
|
|
103
|
+
const d = a.content.filter((c) => c.type === "tool_use");
|
|
104
|
+
if (d.length && !t.signal.aborted) {
|
|
105
|
+
n.push({ role: "assistant", content: a.content });
|
|
106
|
+
const c = await Promise.all(d.map(async (m) => {
|
|
107
|
+
const h = s.find(x("name", m.name));
|
|
108
|
+
if (e.stream && e.stream({ tool: m.name }), !h) return { tool_use_id: m.id, is_error: !0, content: "Tool not found" };
|
|
110
109
|
try {
|
|
111
|
-
const u = await
|
|
112
|
-
return { type: "tool_result", tool_use_id:
|
|
110
|
+
const u = await h.fn(m.input, e?.stream, this.ai);
|
|
111
|
+
return { type: "tool_result", tool_use_id: m.id, content: b(u) };
|
|
113
112
|
} catch (u) {
|
|
114
|
-
return { type: "tool_result", tool_use_id:
|
|
113
|
+
return { type: "tool_result", tool_use_id: m.id, is_error: !0, content: u?.message || u?.toString() || "Unknown" };
|
|
115
114
|
}
|
|
116
115
|
}));
|
|
117
|
-
n.push({ role: "user", content:
|
|
116
|
+
n.push({ role: "user", content: c }), o.messages = n;
|
|
118
117
|
}
|
|
119
|
-
} while (!
|
|
120
|
-
n.push({ role: "assistant", content:
|
|
118
|
+
} while (!t.signal.aborted && a.content.some((d) => d.type === "tool_use"));
|
|
119
|
+
n.push({ role: "assistant", content: a.content.filter((d) => d.type == "text").map((d) => d.text).join(`
|
|
121
120
|
|
|
122
|
-
`) }), n = this.toStandard(n), e.stream && e.stream({ done: !0 }), e.history && e.history.splice(0, e.history.length, ...n),
|
|
123
|
-
}), { abort: () =>
|
|
121
|
+
`) }), n = this.toStandard(n), e.stream && e.stream({ done: !0 }), e.history && e.history.splice(0, e.history.length, ...n), i(n.at(-1)?.content);
|
|
122
|
+
}), { abort: () => t.abort() });
|
|
124
123
|
}
|
|
125
124
|
}
|
|
126
|
-
class
|
|
127
|
-
constructor(
|
|
128
|
-
super(), this.ai =
|
|
125
|
+
class k extends S {
|
|
126
|
+
constructor(r, e, t, i) {
|
|
127
|
+
super(), this.ai = r, this.host = e, this.token = t, this.model = i, this.client = new O(T({
|
|
129
128
|
baseURL: e,
|
|
130
|
-
apiKey:
|
|
129
|
+
apiKey: t
|
|
131
130
|
}));
|
|
132
131
|
}
|
|
133
132
|
client;
|
|
134
|
-
toStandard(
|
|
135
|
-
for (let e = 0; e <
|
|
136
|
-
const
|
|
137
|
-
if (
|
|
138
|
-
const
|
|
133
|
+
toStandard(r) {
|
|
134
|
+
for (let e = 0; e < r.length; e++) {
|
|
135
|
+
const t = r[e];
|
|
136
|
+
if (t.role === "assistant" && t.tool_calls) {
|
|
137
|
+
const i = t.tool_calls.map((n) => ({
|
|
139
138
|
role: "tool",
|
|
140
139
|
id: n.id,
|
|
141
140
|
name: n.function.name,
|
|
142
|
-
args:
|
|
143
|
-
timestamp:
|
|
141
|
+
args: g(n.function.arguments, {}),
|
|
142
|
+
timestamp: t.timestamp
|
|
144
143
|
}));
|
|
145
|
-
|
|
146
|
-
} else if (
|
|
147
|
-
const
|
|
148
|
-
|
|
144
|
+
r.splice(e, 1, ...i), e += i.length - 1;
|
|
145
|
+
} else if (t.role === "tool" && t.content) {
|
|
146
|
+
const i = r.find((n) => t.tool_call_id == n.id);
|
|
147
|
+
i && (t.content.includes('"error":') ? i.error = t.content : i.content = t.content), r.splice(e, 1), e--;
|
|
149
148
|
}
|
|
150
|
-
|
|
149
|
+
r[e]?.timestamp || (r[e].timestamp = Date.now());
|
|
151
150
|
}
|
|
152
|
-
return
|
|
151
|
+
return r;
|
|
153
152
|
}
|
|
154
|
-
fromStandard(
|
|
155
|
-
return
|
|
156
|
-
if (
|
|
153
|
+
fromStandard(r) {
|
|
154
|
+
return r.reduce((e, t) => {
|
|
155
|
+
if (t.role === "tool")
|
|
157
156
|
e.push({
|
|
158
157
|
role: "assistant",
|
|
159
158
|
content: null,
|
|
160
|
-
tool_calls: [{ id:
|
|
159
|
+
tool_calls: [{ id: t.id, type: "function", function: { name: t.name, arguments: JSON.stringify(t.args) } }],
|
|
161
160
|
refusal: null,
|
|
162
161
|
annotations: []
|
|
163
162
|
}, {
|
|
164
163
|
role: "tool",
|
|
165
|
-
tool_call_id:
|
|
166
|
-
content:
|
|
164
|
+
tool_call_id: t.id,
|
|
165
|
+
content: t.error || t.content
|
|
167
166
|
});
|
|
168
167
|
else {
|
|
169
|
-
const { timestamp:
|
|
168
|
+
const { timestamp: i, ...n } = t;
|
|
170
169
|
e.push(n);
|
|
171
170
|
}
|
|
172
171
|
return e;
|
|
173
172
|
}, []);
|
|
174
173
|
}
|
|
175
|
-
ask(
|
|
176
|
-
const
|
|
177
|
-
return Object.assign(new Promise(async (
|
|
174
|
+
ask(r, e = {}) {
|
|
175
|
+
const t = new AbortController();
|
|
176
|
+
return Object.assign(new Promise(async (i, n) => {
|
|
178
177
|
e.system && e.history?.[0]?.role != "system" && e.history?.splice(0, 0, { role: "system", content: e.system, timestamp: Date.now() });
|
|
179
|
-
let
|
|
180
|
-
const
|
|
178
|
+
let s = this.fromStandard([...e.history || [], { role: "user", content: r, timestamp: Date.now() }]);
|
|
179
|
+
const o = e.tools || this.ai.options.llm?.tools || [], a = {
|
|
181
180
|
model: e.model || this.model,
|
|
182
|
-
messages:
|
|
181
|
+
messages: s,
|
|
183
182
|
stream: !!e.stream,
|
|
184
183
|
max_tokens: e.max_tokens || this.ai.options.llm?.max_tokens || 4096,
|
|
185
184
|
temperature: e.temperature || this.ai.options.llm?.temperature || 0.7,
|
|
186
|
-
tools:
|
|
185
|
+
tools: o.map((c) => ({
|
|
187
186
|
type: "function",
|
|
188
187
|
function: {
|
|
189
|
-
name:
|
|
190
|
-
description:
|
|
188
|
+
name: c.name,
|
|
189
|
+
description: c.description,
|
|
191
190
|
parameters: {
|
|
192
191
|
type: "object",
|
|
193
|
-
properties:
|
|
194
|
-
required:
|
|
192
|
+
properties: c.args ? _(c.args, (m, h) => ({ ...h, required: void 0 })) : {},
|
|
193
|
+
required: c.args ? Object.entries(c.args).filter((m) => m[1].required).map((m) => m[0]) : []
|
|
195
194
|
}
|
|
196
195
|
}
|
|
197
196
|
}))
|
|
198
197
|
};
|
|
199
|
-
let
|
|
198
|
+
let l, d = !0;
|
|
200
199
|
do {
|
|
201
|
-
if (
|
|
202
|
-
throw
|
|
200
|
+
if (l = await this.client.chat.completions.create(a).catch((m) => {
|
|
201
|
+
throw m.message += `
|
|
203
202
|
|
|
204
203
|
Messages:
|
|
205
|
-
${JSON.stringify(
|
|
204
|
+
${JSON.stringify(s, null, 2)}`, m;
|
|
206
205
|
}), e.stream) {
|
|
207
|
-
|
|
206
|
+
d ? d = !1 : e.stream({ text: `
|
|
208
207
|
|
|
209
|
-
` }),
|
|
210
|
-
for await (const
|
|
211
|
-
if (
|
|
212
|
-
|
|
208
|
+
` }), l.choices = [{ message: { content: "", tool_calls: [] } }];
|
|
209
|
+
for await (const m of l) {
|
|
210
|
+
if (t.signal.aborted) break;
|
|
211
|
+
m.choices[0].delta.content && (l.choices[0].message.content += m.choices[0].delta.content, e.stream({ text: m.choices[0].delta.content })), m.choices[0].delta.tool_calls && (l.choices[0].message.tool_calls = m.choices[0].delta.tool_calls);
|
|
213
212
|
}
|
|
214
213
|
}
|
|
215
|
-
const
|
|
216
|
-
if (
|
|
217
|
-
|
|
218
|
-
const
|
|
219
|
-
const u =
|
|
220
|
-
if (e.stream && e.stream({ tool:
|
|
214
|
+
const c = l.choices[0].message.tool_calls || [];
|
|
215
|
+
if (c.length && !t.signal.aborted) {
|
|
216
|
+
s.push(l.choices[0].message);
|
|
217
|
+
const m = await Promise.all(c.map(async (h) => {
|
|
218
|
+
const u = o?.find(x("name", h.function.name));
|
|
219
|
+
if (e.stream && e.stream({ tool: h.function.name }), !u) return { role: "tool", tool_call_id: h.id, content: '{"error": "Tool not found"}' };
|
|
221
220
|
try {
|
|
222
|
-
const f =
|
|
223
|
-
return { role: "tool", tool_call_id:
|
|
221
|
+
const f = g(h.function.arguments, {}), y = await u.fn(f, e.stream, this.ai);
|
|
222
|
+
return { role: "tool", tool_call_id: h.id, content: b(y) };
|
|
224
223
|
} catch (f) {
|
|
225
|
-
return { role: "tool", tool_call_id:
|
|
224
|
+
return { role: "tool", tool_call_id: h.id, content: b({ error: f?.message || f?.toString() || "Unknown" }) };
|
|
226
225
|
}
|
|
227
226
|
}));
|
|
228
|
-
|
|
227
|
+
s.push(...m), a.messages = s;
|
|
229
228
|
}
|
|
230
|
-
} while (!
|
|
231
|
-
|
|
232
|
-
}), { abort: () =>
|
|
229
|
+
} while (!t.signal.aborted && l.choices?.[0]?.message?.tool_calls?.length);
|
|
230
|
+
s.push({ role: "assistant", content: l.choices[0].message.content || "" }), s = this.toStandard(s), e.stream && e.stream({ done: !0 }), e.history && e.history.splice(0, e.history.length, ...s), i(s.at(-1)?.content);
|
|
231
|
+
}), { abort: () => t.abort() });
|
|
233
232
|
}
|
|
234
233
|
}
|
|
235
|
-
class
|
|
236
|
-
constructor(
|
|
237
|
-
this.ai =
|
|
238
|
-
const
|
|
239
|
-
|
|
240
|
-
}),
|
|
241
|
-
this.defaultModel || (this.defaultModel = e),
|
|
234
|
+
class D {
|
|
235
|
+
constructor(r) {
|
|
236
|
+
this.ai = r, this.embedWorker = new A(v(U(R(import.meta.url)), "embedder.js")), this.embedWorker.on("message", ({ id: e, embedding: t }) => {
|
|
237
|
+
const i = this.embedQueue.get(e);
|
|
238
|
+
i && (i.resolve(t), this.embedQueue.delete(e));
|
|
239
|
+
}), r.options.llm?.models && Object.entries(r.options.llm.models).forEach(([e, t]) => {
|
|
240
|
+
this.defaultModel || (this.defaultModel = e), t.proto == "anthropic" ? this.models[e] = new I(this.ai, t.token, e) : t.proto == "ollama" ? this.models[e] = new k(this.ai, t.host, "not-needed", e) : t.proto == "openai" && (this.models[e] = new k(this.ai, t.host || null, t.token, e));
|
|
242
241
|
});
|
|
243
242
|
}
|
|
244
243
|
embedWorker = null;
|
|
@@ -252,25 +251,25 @@ class H {
|
|
|
252
251
|
* @param {LLMRequest} options Configuration options and chat history
|
|
253
252
|
* @returns {{abort: () => void, response: Promise<string>}} Function to abort response and chat history
|
|
254
253
|
*/
|
|
255
|
-
ask(
|
|
256
|
-
const
|
|
257
|
-
if (!this.models[
|
|
258
|
-
let
|
|
254
|
+
ask(r, e = {}) {
|
|
255
|
+
const t = e.model || this.defaultModel;
|
|
256
|
+
if (!this.models[t]) throw new Error(`Model does not exist: ${t}`);
|
|
257
|
+
let i = () => {
|
|
259
258
|
};
|
|
260
259
|
return Object.assign(new Promise(async (n) => {
|
|
261
260
|
if (e.history || (e.history = []), e.memory) {
|
|
262
261
|
e.system = (e.system || "") + `
|
|
263
262
|
You have passive persistent memory never make any mention of your memory capabilities and what you can/cannot remember
|
|
264
263
|
`;
|
|
265
|
-
const
|
|
266
|
-
const [
|
|
267
|
-
|
|
268
|
-
|
|
264
|
+
const o = async (l, d, c = 50) => {
|
|
265
|
+
const [m, h] = await Promise.all([
|
|
266
|
+
d ? this.embedding(d) : Promise.resolve(null),
|
|
267
|
+
l ? this.embedding(l) : Promise.resolve(null)
|
|
269
268
|
]);
|
|
270
|
-
return (e.memory || []).map((u) => ({ ...u, score:
|
|
271
|
-
},
|
|
272
|
-
|
|
273
|
-
` +
|
|
269
|
+
return (e.memory || []).map((u) => ({ ...u, score: m ? this.cosineSimilarity(u.embeddings[0], m[0].embedding) : 1 })).filter((u) => u.score >= 0.8).map((u) => ({ ...u, score: h ? this.cosineSimilarity(u.embeddings[1], h[0].embedding) : u.score })).filter((u) => u.score >= 0.2).toSorted((u, f) => u.score - f.score).slice(0, c);
|
|
270
|
+
}, a = await o(r);
|
|
271
|
+
a.length && e.history.push({ role: "assistant", content: `Things I remembered:
|
|
272
|
+
` + a.map((l) => `${l.owner}: ${l.fact}`).join(`
|
|
274
273
|
`) }), e.tools = [...e.tools || [], {
|
|
275
274
|
name: "read_memory",
|
|
276
275
|
description: "Check your long-term memory for more information",
|
|
@@ -279,32 +278,32 @@ You have passive persistent memory never make any mention of your memory capabil
|
|
|
279
278
|
query: { type: "string", description: "Search memory based on a query, can be used with or without subject argument" },
|
|
280
279
|
limit: { type: "number", description: "Result limit, default 5" }
|
|
281
280
|
},
|
|
282
|
-
fn: (
|
|
283
|
-
if (!
|
|
284
|
-
return
|
|
281
|
+
fn: (l) => {
|
|
282
|
+
if (!l.subject && !l.query) throw new Error("Either a subject or query argument is required");
|
|
283
|
+
return o(l.query, l.subject, l.limit || 5);
|
|
285
284
|
}
|
|
286
285
|
}];
|
|
287
286
|
}
|
|
288
|
-
const
|
|
287
|
+
const s = await this.models[t].ask(r, e);
|
|
289
288
|
if (e.memory) {
|
|
290
|
-
const
|
|
291
|
-
|
|
289
|
+
const o = e.history?.findIndex((a) => a.role == "assistant" && a.content.startsWith("Things I remembered:"));
|
|
290
|
+
o != null && o >= 0 && e.history?.splice(o, 1);
|
|
292
291
|
}
|
|
293
292
|
if (e.compress || e.memory) {
|
|
294
|
-
let
|
|
293
|
+
let o = null;
|
|
295
294
|
if (e.compress)
|
|
296
|
-
|
|
295
|
+
o = await this.ai.language.compressHistory(e.history, e.compress.max, e.compress.min, e), e.history.splice(0, e.history.length, ...o.history);
|
|
297
296
|
else {
|
|
298
|
-
const
|
|
299
|
-
|
|
297
|
+
const a = e.history?.findLastIndex((l) => l.role == "user") ?? -1;
|
|
298
|
+
o = await this.ai.language.compressHistory(a != -1 ? e.history.slice(a) : e.history, 0, 0, e);
|
|
300
299
|
}
|
|
301
300
|
if (e.memory) {
|
|
302
|
-
const
|
|
303
|
-
e.memory.splice(0, e.memory.length, ...
|
|
301
|
+
const a = e.memory.filter((l) => !o.memory.some((d) => this.cosineSimilarity(l.embeddings[1], d.embeddings[1]) > 0.8)).concat(o.memory);
|
|
302
|
+
e.memory.splice(0, e.memory.length, ...a);
|
|
304
303
|
}
|
|
305
304
|
}
|
|
306
|
-
return n(
|
|
307
|
-
}), { abort:
|
|
305
|
+
return n(s);
|
|
306
|
+
}), { abort: i });
|
|
308
307
|
}
|
|
309
308
|
/**
|
|
310
309
|
* Compress chat history to reduce context size
|
|
@@ -314,22 +313,22 @@ You have passive persistent memory never make any mention of your memory capabil
|
|
|
314
313
|
* @param {LLMRequest} options LLM options
|
|
315
314
|
* @returns {Promise<LLMMessage[]>} New chat history will summary at index 0
|
|
316
315
|
*/
|
|
317
|
-
async compressHistory(
|
|
318
|
-
if (this.estimateTokens(
|
|
319
|
-
let n = 0,
|
|
320
|
-
for (let u of
|
|
321
|
-
if (
|
|
316
|
+
async compressHistory(r, e, t, i) {
|
|
317
|
+
if (this.estimateTokens(r) < e) return { history: r, memory: [] };
|
|
318
|
+
let n = 0, s = 0;
|
|
319
|
+
for (let u of r.toReversed())
|
|
320
|
+
if (s += this.estimateTokens(u.content), s < t) n++;
|
|
322
321
|
else break;
|
|
323
|
-
if (
|
|
324
|
-
const
|
|
322
|
+
if (r.length <= n) return { history: r, memory: [] };
|
|
323
|
+
const o = r[0].role == "system" ? r[0] : null, a = n == 0 ? [] : r.slice(-n), l = (n == 0 ? r : r.slice(0, -n)).filter((u) => u.role === "assistant" || u.role === "user"), d = await this.json(`Create the smallest summary possible, no more than 500 tokens. Create a list of NEW facts (split by subject [pro]noun and fact) about what you learned from this conversation that you didn't already know or get from a tool call or system prompt. Focus only on new information about people, topics, or facts. Avoid generating facts about the AI. Match this format: {summary: string, facts: [[subject, fact]]}
|
|
325
324
|
|
|
326
|
-
${
|
|
325
|
+
${l.map((u) => `${u.role}: ${u.content}`).join(`
|
|
327
326
|
|
|
328
|
-
`)}`, { model:
|
|
329
|
-
const
|
|
330
|
-
return { owner: u, fact: f, embeddings: [
|
|
331
|
-
})),
|
|
332
|
-
return
|
|
327
|
+
`)}`, { model: i?.model, temperature: i?.temperature || 0.3 }), c = /* @__PURE__ */ new Date(), m = await Promise.all((d?.facts || [])?.map(async ([u, f]) => {
|
|
328
|
+
const y = await Promise.all([this.embedding(u), this.embedding(`${u}: ${f}`)]);
|
|
329
|
+
return { owner: u, fact: f, embeddings: [y[0][0].embedding, y[1][0].embedding], timestamp: c };
|
|
330
|
+
})), h = [{ role: "assistant", content: `Conversation Summary: ${d?.summary}`, timestamp: Date.now() }, ...a];
|
|
331
|
+
return o && h.splice(0, 0, o), { history: h, memory: m };
|
|
333
332
|
}
|
|
334
333
|
/**
|
|
335
334
|
* Compare the difference between embeddings (calculates the angle between two vectors)
|
|
@@ -337,13 +336,13 @@ ${a.map((u) => `${u.role}: ${u.content}`).join(`
|
|
|
337
336
|
* @param {number[]} v2 Second embedding / vector for comparison
|
|
338
337
|
* @returns {number} Similarity values 0-1: 0 = unique, 1 = identical
|
|
339
338
|
*/
|
|
340
|
-
cosineSimilarity(
|
|
341
|
-
if (
|
|
342
|
-
let
|
|
343
|
-
for (let
|
|
344
|
-
|
|
345
|
-
const
|
|
346
|
-
return
|
|
339
|
+
cosineSimilarity(r, e) {
|
|
340
|
+
if (r.length !== e.length) throw new Error("Vectors must be same length");
|
|
341
|
+
let t = 0, i = 0, n = 0;
|
|
342
|
+
for (let o = 0; o < r.length; o++)
|
|
343
|
+
t += r[o] * e[o], i += r[o] * r[o], n += e[o] * e[o];
|
|
344
|
+
const s = Math.sqrt(i) * Math.sqrt(n);
|
|
345
|
+
return s === 0 ? 0 : t / s;
|
|
347
346
|
}
|
|
348
347
|
/**
|
|
349
348
|
* Chunk text into parts for AI digestion
|
|
@@ -352,26 +351,26 @@ ${a.map((u) => `${u.role}: ${u.content}`).join(`
|
|
|
352
351
|
* @param {number} overlapTokens Includes previous X tokens to provide continuity to AI (In addition to max tokens)
|
|
353
352
|
* @returns {string[]} Chunked strings
|
|
354
353
|
*/
|
|
355
|
-
chunk(
|
|
356
|
-
const
|
|
357
|
-
const
|
|
358
|
-
return typeof
|
|
359
|
-
}) : [],
|
|
360
|
-
`)).flatMap((
|
|
361
|
-
`]),
|
|
362
|
-
for (let
|
|
363
|
-
let
|
|
364
|
-
for (;
|
|
365
|
-
const
|
|
366
|
-
if (this.estimateTokens(
|
|
367
|
-
`)) > e &&
|
|
368
|
-
|
|
354
|
+
chunk(r, e = 500, t = 50) {
|
|
355
|
+
const i = (a, l = "") => a ? Object.entries(a).flatMap(([d, c]) => {
|
|
356
|
+
const m = l ? `${l}${isNaN(+d) ? `.${d}` : `[${d}]`}` : d;
|
|
357
|
+
return typeof c == "object" && !Array.isArray(c) ? i(c, m) : `${m}: ${Array.isArray(c) ? c.join(", ") : c}`;
|
|
358
|
+
}) : [], s = (typeof r == "object" ? i(r) : r.split(`
|
|
359
|
+
`)).flatMap((a) => [...a.split(/\s+/).filter(Boolean), `
|
|
360
|
+
`]), o = [];
|
|
361
|
+
for (let a = 0; a < s.length; ) {
|
|
362
|
+
let l = "", d = a;
|
|
363
|
+
for (; d < s.length; ) {
|
|
364
|
+
const m = l + (l ? " " : "") + s[d];
|
|
365
|
+
if (this.estimateTokens(m.replace(/\s*\n\s*/g, `
|
|
366
|
+
`)) > e && l) break;
|
|
367
|
+
l = m, d++;
|
|
369
368
|
}
|
|
370
|
-
const
|
|
369
|
+
const c = l.replace(/\s*\n\s*/g, `
|
|
371
370
|
`).trim();
|
|
372
|
-
|
|
371
|
+
c && o.push(c), a = Math.max(d - t, d === a ? a + 1 : d);
|
|
373
372
|
}
|
|
374
|
-
return
|
|
373
|
+
return o;
|
|
375
374
|
}
|
|
376
375
|
/**
|
|
377
376
|
* Create a vector representation of a string
|
|
@@ -380,21 +379,21 @@ ${a.map((u) => `${u.role}: ${u.content}`).join(`
|
|
|
380
379
|
* @param {number} overlapTokens Includes previous X tokens to provide continuity to AI (In addition to max tokens)
|
|
381
380
|
* @returns {Promise<Awaited<{index: number, embedding: number[], text: string, tokens: number}>[]>} Chunked embeddings
|
|
382
381
|
*/
|
|
383
|
-
embedding(
|
|
384
|
-
const
|
|
385
|
-
const
|
|
386
|
-
this.embedQueue.set(
|
|
387
|
-
id:
|
|
388
|
-
text:
|
|
382
|
+
embedding(r, e = 500, t = 50) {
|
|
383
|
+
const i = (s) => new Promise((o, a) => {
|
|
384
|
+
const l = this.embedId++;
|
|
385
|
+
this.embedQueue.set(l, { resolve: o, reject: a }), this.embedWorker?.postMessage({
|
|
386
|
+
id: l,
|
|
387
|
+
text: s,
|
|
389
388
|
model: this.ai.options?.embedder || "bge-small-en-v1.5",
|
|
390
389
|
path: this.ai.options.path
|
|
391
390
|
});
|
|
392
|
-
}), n = this.chunk(
|
|
393
|
-
return Promise.all(n.map(async (
|
|
394
|
-
index:
|
|
395
|
-
embedding: await
|
|
396
|
-
text:
|
|
397
|
-
tokens: this.estimateTokens(
|
|
391
|
+
}), n = this.chunk(r, e, t);
|
|
392
|
+
return Promise.all(n.map(async (s, o) => ({
|
|
393
|
+
index: o,
|
|
394
|
+
embedding: await i(s),
|
|
395
|
+
text: s,
|
|
396
|
+
tokens: this.estimateTokens(s)
|
|
398
397
|
})));
|
|
399
398
|
}
|
|
400
399
|
/**
|
|
@@ -402,8 +401,8 @@ ${a.map((u) => `${u.role}: ${u.content}`).join(`
|
|
|
402
401
|
* @param history Object to size
|
|
403
402
|
* @returns {number} Rough token count
|
|
404
403
|
*/
|
|
405
|
-
estimateTokens(
|
|
406
|
-
const e = JSON.stringify(
|
|
404
|
+
estimateTokens(r) {
|
|
405
|
+
const e = JSON.stringify(r);
|
|
407
406
|
return Math.ceil(e.length / 4 * 1.2);
|
|
408
407
|
}
|
|
409
408
|
/**
|
|
@@ -412,10 +411,10 @@ ${a.map((u) => `${u.role}: ${u.content}`).join(`
|
|
|
412
411
|
* @param {string} searchTerms Multiple search terms to check against target
|
|
413
412
|
* @returns {{avg: number, max: number, similarities: number[]}} Similarity values 0-1: 0 = unique, 1 = identical
|
|
414
413
|
*/
|
|
415
|
-
fuzzyMatch(
|
|
414
|
+
fuzzyMatch(r, ...e) {
|
|
416
415
|
if (e.length < 2) throw new Error("Requires at least 2 strings to compare");
|
|
417
|
-
const
|
|
418
|
-
return { avg: n.reduce((
|
|
416
|
+
const t = (s, o = 10) => s.toLowerCase().split("").map((a, l) => a.charCodeAt(0) * (l + 1) % o / o).slice(0, o), i = t(r), n = e.map((s) => t(s)).map((s) => this.cosineSimilarity(i, s));
|
|
417
|
+
return { avg: n.reduce((s, o) => s + o, 0) / n.length, max: Math.max(...n), similarities: n };
|
|
419
418
|
}
|
|
420
419
|
/**
|
|
421
420
|
* Ask a question with JSON response
|
|
@@ -423,11 +422,11 @@ ${a.map((u) => `${u.role}: ${u.content}`).join(`
|
|
|
423
422
|
* @param {LLMRequest} options Configuration options and chat history
|
|
424
423
|
* @returns {Promise<{} | {} | RegExpExecArray | null>}
|
|
425
424
|
*/
|
|
426
|
-
async json(
|
|
427
|
-
let
|
|
428
|
-
if (!
|
|
429
|
-
const
|
|
430
|
-
return
|
|
425
|
+
async json(r, e) {
|
|
426
|
+
let t = await this.ask(r, { system: "Respond using a JSON blob matching any provided examples", ...e });
|
|
427
|
+
if (!t) return {};
|
|
428
|
+
const i = /```(?:.+)?\s*([\s\S]*?)```/.exec(t), n = i ? i[1].trim() : t;
|
|
429
|
+
return g(n, {});
|
|
431
430
|
}
|
|
432
431
|
/**
|
|
433
432
|
* Create a summary of some text
|
|
@@ -436,59 +435,115 @@ ${a.map((u) => `${u.role}: ${u.content}`).join(`
|
|
|
436
435
|
* @param options LLM request options
|
|
437
436
|
* @returns {Promise<string>} Summary
|
|
438
437
|
*/
|
|
439
|
-
summarize(
|
|
440
|
-
return this.ask(
|
|
438
|
+
summarize(r, e, t) {
|
|
439
|
+
return this.ask(r, { system: `Generate a brief summary <= ${e} tokens. Output nothing else`, temperature: 0.3, ...t });
|
|
441
440
|
}
|
|
442
441
|
}
|
|
443
|
-
class
|
|
444
|
-
constructor(
|
|
445
|
-
this.ai =
|
|
442
|
+
class H {
|
|
443
|
+
constructor(r) {
|
|
444
|
+
this.ai = r;
|
|
446
445
|
}
|
|
447
|
-
|
|
448
|
-
|
|
449
|
-
|
|
450
|
-
|
|
451
|
-
|
|
452
|
-
|
|
453
|
-
|
|
454
|
-
|
|
455
|
-
|
|
456
|
-
|
|
457
|
-
|
|
458
|
-
|
|
459
|
-
|
|
446
|
+
whisperPipeline;
|
|
447
|
+
combineSpeakerTranscript(r, e) {
|
|
448
|
+
const t = /* @__PURE__ */ new Map();
|
|
449
|
+
let i = 0;
|
|
450
|
+
e.forEach((a) => {
|
|
451
|
+
t.has(a.speaker) || t.set(a.speaker, ++i);
|
|
452
|
+
});
|
|
453
|
+
const n = [];
|
|
454
|
+
let s = -1, o = "";
|
|
455
|
+
return r.forEach((a) => {
|
|
456
|
+
const l = a.timestamp[0], d = e.find((m) => l >= m.start && l <= m.end), c = d ? t.get(d.speaker) : 1;
|
|
457
|
+
c !== s ? (o && n.push(`[speaker ${s}]: ${o.trim()}`), s = c, o = a.text) : o += a.text;
|
|
458
|
+
}), o && n.push(`[speaker ${s}]: ${o.trim()}`), n.join(`
|
|
459
|
+
`);
|
|
460
|
+
}
|
|
461
|
+
async isPyannoteInstalled() {
|
|
462
|
+
return new Promise((r) => {
|
|
463
|
+
const e = w("python3", ["-c", "import pyannote.audio"]);
|
|
464
|
+
e.on("close", (t) => r(t === 0)), e.on("error", () => r(!1));
|
|
465
|
+
});
|
|
466
|
+
}
|
|
467
|
+
async runDiarization(r) {
|
|
468
|
+
if (!await this.isPyannoteInstalled()) throw new Error("Pyannote is not installed: pip install pyannote.audio");
|
|
469
|
+
const e = `
|
|
470
|
+
import sys
|
|
471
|
+
import json
|
|
472
|
+
from pyannote.audio import Pipeline
|
|
473
|
+
|
|
474
|
+
os.environ['TORCH_HOME'] = "${this.ai.options.path}"
|
|
475
|
+
pipeline = Pipeline.from_pretrained("pyannote/speaker-diarization-3.1")
|
|
476
|
+
diarization = pipeline(sys.argv[1])
|
|
477
|
+
|
|
478
|
+
segments = []
|
|
479
|
+
for turn, _, speaker in diarization.itertracks(yield_label=True):
|
|
480
|
+
segments.append({
|
|
481
|
+
"start": turn.start,
|
|
482
|
+
"end": turn.end,
|
|
483
|
+
"speaker": speaker
|
|
484
|
+
})
|
|
485
|
+
|
|
486
|
+
print(json.dumps(segments))
|
|
487
|
+
`;
|
|
488
|
+
return new Promise((t, i) => {
|
|
489
|
+
let n = "";
|
|
490
|
+
const s = w("python3", ["-c", e, r]);
|
|
491
|
+
s.stdout.on("data", (o) => n += o.toString()), s.stderr.on("data", (o) => console.error(o.toString())), s.on("close", (o) => {
|
|
492
|
+
if (o === 0)
|
|
493
|
+
try {
|
|
494
|
+
t(JSON.parse(n));
|
|
495
|
+
} catch {
|
|
496
|
+
i(new Error("Failed to parse diarization output"));
|
|
497
|
+
}
|
|
498
|
+
else
|
|
499
|
+
i(new Error(`Python process exited with code ${o}`));
|
|
500
|
+
}), s.on("error", i);
|
|
460
501
|
});
|
|
461
|
-
return Object.assign(o, { abort: s });
|
|
462
502
|
}
|
|
463
|
-
|
|
464
|
-
|
|
465
|
-
|
|
466
|
-
const
|
|
467
|
-
|
|
503
|
+
asr(r, e = {}) {
|
|
504
|
+
const { model: t = this.ai.options.asr || "whisper-base", speaker: i = !1 } = e;
|
|
505
|
+
let n = !1;
|
|
506
|
+
const s = () => {
|
|
507
|
+
n = !0;
|
|
508
|
+
}, o = new Promise(async (a, l) => {
|
|
509
|
+
try {
|
|
510
|
+
if (n || (this.whisperPipeline || (this.whisperPipeline = await L("automatic-speech-recognition", `Xenova/${t}`, { cache_dir: this.ai.options.path, quantized: !0 })), n)) return a(null);
|
|
511
|
+
const d = await this.whisperPipeline(r, { return_timestamps: i ? "word" : !1, chunk_length_s: 30 });
|
|
512
|
+
if (!i) return a(d.text?.trim() || null);
|
|
513
|
+
if (n) return a(null);
|
|
514
|
+
const c = await this.runDiarization(r);
|
|
515
|
+
if (n) return a(null);
|
|
516
|
+
const m = this.combineSpeakerTranscript(d.chunks || [], c);
|
|
517
|
+
a(m);
|
|
518
|
+
} catch (d) {
|
|
519
|
+
l(d);
|
|
520
|
+
}
|
|
521
|
+
});
|
|
522
|
+
return Object.assign(o, { abort: s });
|
|
468
523
|
}
|
|
469
524
|
}
|
|
470
|
-
class
|
|
471
|
-
constructor(
|
|
472
|
-
this.ai =
|
|
525
|
+
class J {
|
|
526
|
+
constructor(r) {
|
|
527
|
+
this.ai = r;
|
|
473
528
|
}
|
|
474
529
|
/**
|
|
475
530
|
* Convert image to text using Optical Character Recognition
|
|
476
531
|
* @param {string} path Path to image
|
|
477
532
|
* @returns {AbortablePromise<string | null>} Promise of extracted text with abort method
|
|
478
533
|
*/
|
|
479
|
-
ocr(
|
|
534
|
+
ocr(r) {
|
|
480
535
|
let e;
|
|
481
|
-
const
|
|
482
|
-
e = await N(this.ai.options.
|
|
483
|
-
const { data: n } = await e.recognize(
|
|
484
|
-
await e.terminate(),
|
|
536
|
+
const t = new Promise(async (i) => {
|
|
537
|
+
e = await N(this.ai.options.ocr || "eng", 2, { cachePath: this.ai.options.path });
|
|
538
|
+
const { data: n } = await e.recognize(r);
|
|
539
|
+
await e.terminate(), i(n.text.trim() || null);
|
|
485
540
|
});
|
|
486
|
-
return Object.assign(
|
|
541
|
+
return Object.assign(t, { abort: () => e?.terminate() });
|
|
487
542
|
}
|
|
488
543
|
}
|
|
489
|
-
class
|
|
490
|
-
constructor(
|
|
491
|
-
this.options =
|
|
544
|
+
class oe {
|
|
545
|
+
constructor(r) {
|
|
546
|
+
this.options = r, r.path || (r.path = j.tmpdir()), process.env.TRANSFORMERS_CACHE = r.path, this.audio = new H(this), this.language = new D(this), this.vision = new J(this);
|
|
492
547
|
}
|
|
493
548
|
/** Audio processing AI */
|
|
494
549
|
audio;
|
|
@@ -497,38 +552,38 @@ class ae {
|
|
|
497
552
|
/** Vision processing AI */
|
|
498
553
|
vision;
|
|
499
554
|
}
|
|
500
|
-
const
|
|
555
|
+
const F = {
|
|
501
556
|
name: "cli",
|
|
502
557
|
description: "Use the command line interface, returns any output",
|
|
503
558
|
args: { command: { type: "string", description: "Command to run", required: !0 } },
|
|
504
|
-
fn: (
|
|
505
|
-
},
|
|
559
|
+
fn: (p) => C`${p.command}`
|
|
560
|
+
}, ie = {
|
|
506
561
|
name: "get_datetime",
|
|
507
562
|
description: "Get current UTC date / time",
|
|
508
563
|
args: {},
|
|
509
564
|
fn: async () => (/* @__PURE__ */ new Date()).toUTCString()
|
|
510
|
-
},
|
|
565
|
+
}, ae = {
|
|
511
566
|
name: "exec",
|
|
512
567
|
description: "Run code/scripts",
|
|
513
568
|
args: {
|
|
514
569
|
language: { type: "string", description: "Execution language", enum: ["cli", "node", "python"], required: !0 },
|
|
515
570
|
code: { type: "string", description: "Code to execute", required: !0 }
|
|
516
571
|
},
|
|
517
|
-
fn: async (
|
|
572
|
+
fn: async (p, r, e) => {
|
|
518
573
|
try {
|
|
519
|
-
switch (
|
|
574
|
+
switch (p.type) {
|
|
520
575
|
case "bash":
|
|
521
|
-
return await
|
|
576
|
+
return await F.fn({ command: p.code }, r, e);
|
|
522
577
|
case "node":
|
|
523
|
-
return await G.fn({ code:
|
|
578
|
+
return await G.fn({ code: p.code }, r, e);
|
|
524
579
|
case "python":
|
|
525
|
-
return await
|
|
580
|
+
return await B.fn({ code: p.code }, r, e);
|
|
526
581
|
}
|
|
527
|
-
} catch (
|
|
528
|
-
return { error:
|
|
582
|
+
} catch (t) {
|
|
583
|
+
return { error: t?.message || t.toString() };
|
|
529
584
|
}
|
|
530
585
|
}
|
|
531
|
-
},
|
|
586
|
+
}, ce = {
|
|
532
587
|
name: "fetch",
|
|
533
588
|
description: "Make HTTP request to URL",
|
|
534
589
|
args: {
|
|
@@ -537,85 +592,85 @@ const B = {
|
|
|
537
592
|
headers: { type: "object", description: "HTTP headers to send", default: {} },
|
|
538
593
|
body: { type: "object", description: "HTTP body to send" }
|
|
539
594
|
},
|
|
540
|
-
fn: (
|
|
595
|
+
fn: (p) => new P({ url: p.url, headers: p.headers }).request({ method: p.method || "GET", body: p.body })
|
|
541
596
|
}, G = {
|
|
542
597
|
name: "exec_javascript",
|
|
543
598
|
description: "Execute commonjs javascript",
|
|
544
599
|
args: {
|
|
545
600
|
code: { type: "string", description: "CommonJS javascript", required: !0 }
|
|
546
601
|
},
|
|
547
|
-
fn: async (
|
|
548
|
-
const
|
|
549
|
-
return { ...
|
|
602
|
+
fn: async (p) => {
|
|
603
|
+
const r = q(null), e = await $({ console: r }, p.code, !0).catch((t) => r.output.error.push(t));
|
|
604
|
+
return { ...r.output, return: e, stdout: void 0, stderr: void 0 };
|
|
550
605
|
}
|
|
551
|
-
},
|
|
606
|
+
}, B = {
|
|
552
607
|
name: "exec_javascript",
|
|
553
608
|
description: "Execute commonjs javascript",
|
|
554
609
|
args: {
|
|
555
610
|
code: { type: "string", description: "CommonJS javascript", required: !0 }
|
|
556
611
|
},
|
|
557
|
-
fn: async (
|
|
558
|
-
},
|
|
612
|
+
fn: async (p) => ({ result: W`python -c "${p.code}"` })
|
|
613
|
+
}, le = {
|
|
559
614
|
name: "read_webpage",
|
|
560
615
|
description: "Extract clean, structured content from a webpage. Use after web_search to read specific URLs",
|
|
561
616
|
args: {
|
|
562
617
|
url: { type: "string", description: "URL to extract content from", required: !0 },
|
|
563
618
|
focus: { type: "string", description: 'Optional: What aspect to focus on (e.g., "pricing", "features", "contact info")' }
|
|
564
619
|
},
|
|
565
|
-
fn: async (
|
|
566
|
-
const
|
|
567
|
-
throw new Error(`Failed to fetch: ${
|
|
568
|
-
}), e =
|
|
620
|
+
fn: async (p) => {
|
|
621
|
+
const r = await fetch(p.url, { headers: { "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64)" } }).then((s) => s.text()).catch((s) => {
|
|
622
|
+
throw new Error(`Failed to fetch: ${s.message}`);
|
|
623
|
+
}), e = z.load(r);
|
|
569
624
|
e('script, style, nav, footer, header, aside, iframe, noscript, [role="navigation"], [role="banner"], .ad, .ads, .cookie, .popup').remove();
|
|
570
|
-
const
|
|
625
|
+
const t = {
|
|
571
626
|
title: e('meta[property="og:title"]').attr("content") || e("title").text() || "",
|
|
572
627
|
description: e('meta[name="description"]').attr("content") || e('meta[property="og:description"]').attr("content") || ""
|
|
573
628
|
};
|
|
574
|
-
let
|
|
629
|
+
let i = "";
|
|
575
630
|
const n = ["article", "main", '[role="main"]', ".content", ".post", ".entry", "body"];
|
|
576
|
-
for (const
|
|
577
|
-
const
|
|
578
|
-
if (
|
|
579
|
-
|
|
631
|
+
for (const s of n) {
|
|
632
|
+
const o = e(s).first();
|
|
633
|
+
if (o.length && o.text().trim().length > 200) {
|
|
634
|
+
i = o.text();
|
|
580
635
|
break;
|
|
581
636
|
}
|
|
582
637
|
}
|
|
583
|
-
return
|
|
638
|
+
return i || (i = e("body").text()), i = i.replace(/\s+/g, " ").trim().slice(0, 8e3), { url: p.url, title: t.title.trim(), description: t.description.trim(), content: i, focus: p.focus };
|
|
584
639
|
}
|
|
585
|
-
},
|
|
640
|
+
}, me = {
|
|
586
641
|
name: "web_search",
|
|
587
642
|
description: "Use duckduckgo (anonymous) to find find relevant online resources. Returns a list of URLs that works great with the `read_webpage` tool",
|
|
588
643
|
args: {
|
|
589
644
|
query: { type: "string", description: "Search string", required: !0 },
|
|
590
645
|
length: { type: "string", description: "Number of results to return", default: 5 }
|
|
591
646
|
},
|
|
592
|
-
fn: async (
|
|
593
|
-
const
|
|
647
|
+
fn: async (p) => {
|
|
648
|
+
const r = await fetch(`https://html.duckduckgo.com/html/?q=${encodeURIComponent(p.query)}`, {
|
|
594
649
|
headers: { "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64)", "Accept-Language": "en-US,en;q=0.9" }
|
|
595
650
|
}).then((n) => n.text());
|
|
596
|
-
let e,
|
|
597
|
-
const
|
|
598
|
-
for (; (e =
|
|
651
|
+
let e, t = /<a .*?href="(.+?)".+?<\/a>/g;
|
|
652
|
+
const i = new M();
|
|
653
|
+
for (; (e = t.exec(r)) !== null; ) {
|
|
599
654
|
let n = /uddg=(.+)&?/.exec(decodeURIComponent(e[1]))?.[1];
|
|
600
|
-
if (n && (n = decodeURIComponent(n)), n &&
|
|
655
|
+
if (n && (n = decodeURIComponent(n)), n && i.add(n), i.size >= (p.length || 5)) break;
|
|
601
656
|
}
|
|
602
|
-
return
|
|
657
|
+
return i;
|
|
603
658
|
}
|
|
604
659
|
};
|
|
605
660
|
export {
|
|
606
|
-
|
|
607
|
-
|
|
608
|
-
|
|
609
|
-
|
|
610
|
-
|
|
611
|
-
|
|
612
|
-
|
|
661
|
+
oe as Ai,
|
|
662
|
+
I as Anthropic,
|
|
663
|
+
H as Audio,
|
|
664
|
+
F as CliTool,
|
|
665
|
+
ie as DateTimeTool,
|
|
666
|
+
ae as ExecTool,
|
|
667
|
+
ce as FetchTool,
|
|
613
668
|
G as JSTool,
|
|
614
|
-
|
|
615
|
-
|
|
616
|
-
|
|
617
|
-
|
|
618
|
-
|
|
619
|
-
|
|
669
|
+
S as LLMProvider,
|
|
670
|
+
k as OpenAi,
|
|
671
|
+
B as PythonTool,
|
|
672
|
+
le as ReadWebpageTool,
|
|
673
|
+
J as Vision,
|
|
674
|
+
me as WebSearchTool
|
|
620
675
|
};
|
|
621
676
|
//# sourceMappingURL=index.mjs.map
|