@ztimson/ai-utils 0.2.2 → 0.2.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/ai.d.ts +1 -3
- package/dist/index.js +20 -11
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +218 -204
- package/dist/index.mjs.map +1 -1
- package/package.json +1 -1
package/dist/index.mjs
CHANGED
|
@@ -1,29 +1,30 @@
|
|
|
1
|
-
import
|
|
2
|
-
import {
|
|
3
|
-
import {
|
|
1
|
+
import * as T from "node:os";
|
|
2
|
+
import { pipeline as j } from "@xenova/transformers";
|
|
3
|
+
import { deepCopy as M, objectMap as b, JSONAttemptParse as _, findByProp as k, JSONSanitize as y, Http as A, consoleInterceptor as q, fn as P, ASet as O } from "@ztimson/utils";
|
|
4
|
+
import { Anthropic as $ } from "@anthropic-ai/sdk";
|
|
4
5
|
import { Ollama as E } from "ollama";
|
|
5
6
|
import { OpenAI as v } from "openai";
|
|
6
|
-
import { spawn as
|
|
7
|
-
import
|
|
8
|
-
import
|
|
9
|
-
import { createWorker as
|
|
10
|
-
import { $ as
|
|
11
|
-
class
|
|
7
|
+
import { spawn as R } from "node:child_process";
|
|
8
|
+
import S from "node:fs/promises";
|
|
9
|
+
import U from "node:path";
|
|
10
|
+
import { createWorker as L } from "tesseract.js";
|
|
11
|
+
import { $ as N, $Sync as D } from "@ztimson/node-utils";
|
|
12
|
+
class x {
|
|
12
13
|
}
|
|
13
|
-
class
|
|
14
|
+
class J extends x {
|
|
14
15
|
constructor(t, e, n) {
|
|
15
|
-
super(), this.ai = t, this.apiToken = e, this.model = n, this.client = new
|
|
16
|
+
super(), this.ai = t, this.apiToken = e, this.model = n, this.client = new $({ apiKey: e });
|
|
16
17
|
}
|
|
17
18
|
client;
|
|
18
19
|
toStandard(t) {
|
|
19
20
|
for (let e = 0; e < t.length; e++) {
|
|
20
21
|
const n = e;
|
|
21
|
-
typeof t[n].content != "string" && (t[n].role == "assistant" ? t[n].content.filter((
|
|
22
|
-
e++, t.splice(e, 0, { role: "tool", id:
|
|
23
|
-
}) : t[n].role == "user" && t[n].content.filter((
|
|
24
|
-
const c = t.find((f) => f.id ==
|
|
25
|
-
c[
|
|
26
|
-
}), t[n].content = t[n].content.filter((
|
|
22
|
+
typeof t[n].content != "string" && (t[n].role == "assistant" ? t[n].content.filter((r) => r.type == "tool_use").forEach((r) => {
|
|
23
|
+
e++, t.splice(e, 0, { role: "tool", id: r.id, name: r.name, args: r.input, timestamp: Date.now() });
|
|
24
|
+
}) : t[n].role == "user" && t[n].content.filter((r) => r.type == "tool_result").forEach((r) => {
|
|
25
|
+
const c = t.find((f) => f.id == r.tool_use_id);
|
|
26
|
+
c[r.is_error ? "error" : "content"] = r.content;
|
|
27
|
+
}), t[n].content = t[n].content.filter((r) => r.type == "text").map((r) => r.text).join(`
|
|
27
28
|
|
|
28
29
|
`)), t[n].timestamp || (t[n].timestamp = Date.now());
|
|
29
30
|
}
|
|
@@ -43,74 +44,79 @@ class D extends k {
|
|
|
43
44
|
return t.map(({ timestamp: e, ...n }) => n);
|
|
44
45
|
}
|
|
45
46
|
ask(t, e = {}) {
|
|
46
|
-
const n = new AbortController(),
|
|
47
|
+
const n = new AbortController(), r = new Promise(async (c, f) => {
|
|
47
48
|
let i = this.fromStandard([...e.history || [], { role: "user", content: t, timestamp: Date.now() }]);
|
|
48
|
-
const
|
|
49
|
+
const m = M(i);
|
|
49
50
|
e.compress && (i = await this.ai.language.compressHistory(i, e.compress.max, e.compress.min, e));
|
|
50
|
-
const
|
|
51
|
+
const h = e.tools || this.ai.options.tools || [], a = {
|
|
51
52
|
model: e.model || this.model,
|
|
52
53
|
max_tokens: e.max_tokens || this.ai.options.max_tokens || 4096,
|
|
53
54
|
system: e.system || this.ai.options.system || "",
|
|
54
55
|
temperature: e.temperature || this.ai.options.temperature || 0.7,
|
|
55
|
-
tools:
|
|
56
|
+
tools: h.map((s) => ({
|
|
56
57
|
name: s.name,
|
|
57
58
|
description: s.description,
|
|
58
59
|
input_schema: {
|
|
59
60
|
type: "object",
|
|
60
|
-
properties: s.args ?
|
|
61
|
-
required: s.args ? Object.entries(s.args).filter((
|
|
61
|
+
properties: s.args ? b(s.args, (o, u) => ({ ...u, required: void 0 })) : {},
|
|
62
|
+
required: s.args ? Object.entries(s.args).filter((o) => o[1].required).map((o) => o[0]) : []
|
|
62
63
|
},
|
|
63
64
|
fn: void 0
|
|
64
65
|
})),
|
|
65
66
|
messages: i,
|
|
66
67
|
stream: !!e.stream
|
|
67
68
|
};
|
|
68
|
-
let
|
|
69
|
+
let l, p = !0;
|
|
69
70
|
do {
|
|
70
|
-
if (
|
|
71
|
-
|
|
71
|
+
if (l = await this.client.messages.create(a).catch((o) => {
|
|
72
|
+
throw o.message += `
|
|
72
73
|
|
|
73
|
-
|
|
74
|
-
|
|
74
|
+
Messages:
|
|
75
|
+
${JSON.stringify(i, null, 2)}`, o;
|
|
76
|
+
}), e.stream) {
|
|
77
|
+
p ? p = !1 : e.stream({ text: `
|
|
78
|
+
|
|
79
|
+
` }), l.content = [];
|
|
80
|
+
for await (const o of l) {
|
|
75
81
|
if (n.signal.aborted) break;
|
|
76
|
-
if (
|
|
77
|
-
|
|
78
|
-
else if (
|
|
79
|
-
if (
|
|
80
|
-
const
|
|
81
|
-
|
|
82
|
-
} else
|
|
83
|
-
else if (
|
|
84
|
-
const
|
|
85
|
-
|
|
86
|
-
} else if (
|
|
82
|
+
if (o.type === "content_block_start")
|
|
83
|
+
o.content_block.type === "text" ? l.content.push({ type: "text", text: "" }) : o.content_block.type === "tool_use" && l.content.push({ type: "tool_use", id: o.content_block.id, name: o.content_block.name, input: "" });
|
|
84
|
+
else if (o.type === "content_block_delta")
|
|
85
|
+
if (o.delta.type === "text_delta") {
|
|
86
|
+
const u = o.delta.text;
|
|
87
|
+
l.content.at(-1).text += u, e.stream({ text: u });
|
|
88
|
+
} else o.delta.type === "input_json_delta" && (l.content.at(-1).input += o.delta.partial_json);
|
|
89
|
+
else if (o.type === "content_block_stop") {
|
|
90
|
+
const u = l.content.at(-1);
|
|
91
|
+
u.input != null && (u.input = u.input ? _(u.input, {}) : {});
|
|
92
|
+
} else if (o.type === "message_stop")
|
|
87
93
|
break;
|
|
88
94
|
}
|
|
89
95
|
}
|
|
90
|
-
const s =
|
|
96
|
+
const s = l.content.filter((o) => o.type === "tool_use");
|
|
91
97
|
if (s.length && !n.signal.aborted) {
|
|
92
|
-
i.push({ role: "assistant", content:
|
|
93
|
-
const
|
|
94
|
-
const
|
|
95
|
-
if (!
|
|
98
|
+
i.push({ role: "assistant", content: l.content }), m.push({ role: "assistant", content: l.content });
|
|
99
|
+
const o = await Promise.all(s.map(async (u) => {
|
|
100
|
+
const g = h.find(k("name", u.name));
|
|
101
|
+
if (!g) return { tool_use_id: u.id, is_error: !0, content: "Tool not found" };
|
|
96
102
|
try {
|
|
97
|
-
const
|
|
98
|
-
return { type: "tool_result", tool_use_id:
|
|
99
|
-
} catch (
|
|
100
|
-
return { type: "tool_result", tool_use_id:
|
|
103
|
+
const w = await g.fn(u.input, this.ai);
|
|
104
|
+
return { type: "tool_result", tool_use_id: u.id, content: y(w) };
|
|
105
|
+
} catch (w) {
|
|
106
|
+
return { type: "tool_result", tool_use_id: u.id, is_error: !0, content: w?.message || w?.toString() || "Unknown" };
|
|
101
107
|
}
|
|
102
108
|
}));
|
|
103
|
-
i.push({ role: "user", content:
|
|
109
|
+
i.push({ role: "user", content: o }), a.messages = i;
|
|
104
110
|
}
|
|
105
|
-
} while (!n.signal.aborted &&
|
|
106
|
-
e.stream && e.stream({ done: !0 }), c(this.toStandard([...i, { role: "assistant", content:
|
|
111
|
+
} while (!n.signal.aborted && l.content.some((s) => s.type === "tool_use"));
|
|
112
|
+
e.stream && e.stream({ done: !0 }), c(this.toStandard([...i, { role: "assistant", content: l.content.filter((s) => s.type == "text").map((s) => s.text).join(`
|
|
107
113
|
|
|
108
114
|
`) }]));
|
|
109
115
|
});
|
|
110
|
-
return Object.assign(
|
|
116
|
+
return Object.assign(r, { abort: () => n.abort() });
|
|
111
117
|
}
|
|
112
118
|
}
|
|
113
|
-
class H extends
|
|
119
|
+
class H extends x {
|
|
114
120
|
constructor(t, e, n) {
|
|
115
121
|
super(), this.ai = t, this.host = e, this.model = n, this.client = new E({ host: e });
|
|
116
122
|
}
|
|
@@ -129,67 +135,72 @@ class H extends k {
|
|
|
129
135
|
}
|
|
130
136
|
fromStandard(t) {
|
|
131
137
|
return t.map((e) => {
|
|
132
|
-
const { timestamp: n, ...
|
|
133
|
-
return e.role != "tool" ?
|
|
138
|
+
const { timestamp: n, ...r } = e;
|
|
139
|
+
return e.role != "tool" ? r : { role: "tool", tool_name: e.name, content: e.error || e.content };
|
|
134
140
|
});
|
|
135
141
|
}
|
|
136
142
|
ask(t, e = {}) {
|
|
137
|
-
const n = new AbortController(),
|
|
138
|
-
let i = e.system || this.ai.options.system,
|
|
139
|
-
|
|
140
|
-
const
|
|
143
|
+
const n = new AbortController(), r = new Promise(async (c, f) => {
|
|
144
|
+
let i = e.system || this.ai.options.system, m = this.fromStandard([...e.history || [], { role: "user", content: t, timestamp: Date.now() }]);
|
|
145
|
+
m[0].roll == "system" && (i ? m.shift() : i = m.shift()), e.compress && (m = await this.ai.language.compressHistory(m, e.compress.max, e.compress.min)), e.system && m.unshift({ role: "system", content: i });
|
|
146
|
+
const h = e.tools || this.ai.options.tools || [], a = {
|
|
141
147
|
model: e.model || this.model,
|
|
142
|
-
messages:
|
|
148
|
+
messages: m,
|
|
143
149
|
stream: !!e.stream,
|
|
144
150
|
signal: n.signal,
|
|
145
151
|
options: {
|
|
146
152
|
temperature: e.temperature || this.ai.options.temperature || 0.7,
|
|
147
153
|
num_predict: e.max_tokens || this.ai.options.max_tokens || 4096
|
|
148
154
|
},
|
|
149
|
-
tools:
|
|
155
|
+
tools: h.map((s) => ({
|
|
150
156
|
type: "function",
|
|
151
157
|
function: {
|
|
152
158
|
name: s.name,
|
|
153
159
|
description: s.description,
|
|
154
160
|
parameters: {
|
|
155
161
|
type: "object",
|
|
156
|
-
properties: s.args ?
|
|
157
|
-
required: s.args ? Object.entries(s.args).filter((
|
|
162
|
+
properties: s.args ? b(s.args, (o, u) => ({ ...u, required: void 0 })) : {},
|
|
163
|
+
required: s.args ? Object.entries(s.args).filter((o) => o[1].required).map((o) => o[0]) : []
|
|
158
164
|
}
|
|
159
165
|
}
|
|
160
166
|
}))
|
|
161
167
|
};
|
|
162
|
-
let
|
|
168
|
+
let l, p = !0;
|
|
163
169
|
do {
|
|
164
|
-
if (
|
|
165
|
-
|
|
170
|
+
if (l = await this.client.chat(a).catch((s) => {
|
|
171
|
+
throw s.message += `
|
|
166
172
|
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
173
|
+
Messages:
|
|
174
|
+
${JSON.stringify(m, null, 2)}`, s;
|
|
175
|
+
}), e.stream) {
|
|
176
|
+
p ? p = !1 : e.stream({ text: `
|
|
177
|
+
|
|
178
|
+
` }), l.message = { role: "assistant", content: "", tool_calls: [] };
|
|
179
|
+
for await (const s of l)
|
|
180
|
+
if (n.signal.aborted || (s.message?.content && (l.message.content += s.message.content, e.stream({ text: s.message.content })), s.message?.tool_calls && (l.message.tool_calls = s.message.tool_calls), s.done)) break;
|
|
170
181
|
}
|
|
171
|
-
if (
|
|
172
|
-
|
|
173
|
-
const s = await Promise.all(
|
|
174
|
-
const
|
|
175
|
-
if (!
|
|
176
|
-
const
|
|
182
|
+
if (l.message?.tool_calls?.length && !n.signal.aborted) {
|
|
183
|
+
m.push(l.message);
|
|
184
|
+
const s = await Promise.all(l.message.tool_calls.map(async (o) => {
|
|
185
|
+
const u = h.find(k("name", o.function.name));
|
|
186
|
+
if (!u) return { role: "tool", tool_name: o.function.name, content: '{"error": "Tool not found"}' };
|
|
187
|
+
const g = typeof o.function.arguments == "string" ? _(o.function.arguments, {}) : o.function.arguments;
|
|
177
188
|
try {
|
|
178
|
-
const
|
|
179
|
-
return { role: "tool", tool_name:
|
|
180
|
-
} catch (
|
|
181
|
-
return { role: "tool", tool_name:
|
|
189
|
+
const w = await u.fn(g, this.ai);
|
|
190
|
+
return { role: "tool", tool_name: o.function.name, args: g, content: y(w) };
|
|
191
|
+
} catch (w) {
|
|
192
|
+
return { role: "tool", tool_name: o.function.name, args: g, content: y({ error: w?.message || w?.toString() || "Unknown" }) };
|
|
182
193
|
}
|
|
183
194
|
}));
|
|
184
|
-
|
|
195
|
+
m.push(...s), a.messages = m;
|
|
185
196
|
}
|
|
186
|
-
} while (!n.signal.aborted &&
|
|
187
|
-
e.stream && e.stream({ done: !0 }), c(this.toStandard([...
|
|
197
|
+
} while (!n.signal.aborted && l.message?.tool_calls?.length);
|
|
198
|
+
e.stream && e.stream({ done: !0 }), c(this.toStandard([...m, { role: "assistant", content: l.message?.content }]));
|
|
188
199
|
});
|
|
189
|
-
return Object.assign(
|
|
200
|
+
return Object.assign(r, { abort: () => n.abort() });
|
|
190
201
|
}
|
|
191
202
|
}
|
|
192
|
-
class
|
|
203
|
+
class z extends x {
|
|
193
204
|
constructor(t, e, n) {
|
|
194
205
|
super(), this.ai = t, this.apiToken = e, this.model = n, this.client = new v({ apiKey: e });
|
|
195
206
|
}
|
|
@@ -198,17 +209,17 @@ class N extends k {
|
|
|
198
209
|
for (let e = 0; e < t.length; e++) {
|
|
199
210
|
const n = t[e];
|
|
200
211
|
if (n.role === "assistant" && n.tool_calls) {
|
|
201
|
-
const
|
|
212
|
+
const r = n.tool_calls.map((c) => ({
|
|
202
213
|
role: "tool",
|
|
203
214
|
id: c.id,
|
|
204
215
|
name: c.function.name,
|
|
205
|
-
args:
|
|
216
|
+
args: _(c.function.arguments, {}),
|
|
206
217
|
timestamp: n.timestamp
|
|
207
218
|
}));
|
|
208
|
-
t.splice(e, 1, ...
|
|
219
|
+
t.splice(e, 1, ...r), e += r.length - 1;
|
|
209
220
|
} else if (n.role === "tool" && n.content) {
|
|
210
|
-
const
|
|
211
|
-
|
|
221
|
+
const r = t.find((c) => n.tool_call_id == c.id);
|
|
222
|
+
r && (n.content.includes('"error":') ? r.error = n.content : r.content = n.content), t.splice(e, 1), e--;
|
|
212
223
|
}
|
|
213
224
|
t[e]?.timestamp || (t[e].timestamp = Date.now());
|
|
214
225
|
}
|
|
@@ -229,70 +240,75 @@ class N extends k {
|
|
|
229
240
|
content: n.error || n.content
|
|
230
241
|
});
|
|
231
242
|
else {
|
|
232
|
-
const { timestamp:
|
|
243
|
+
const { timestamp: r, ...c } = n;
|
|
233
244
|
e.push(c);
|
|
234
245
|
}
|
|
235
246
|
return e;
|
|
236
247
|
}, []);
|
|
237
248
|
}
|
|
238
249
|
ask(t, e = {}) {
|
|
239
|
-
const n = new AbortController(),
|
|
250
|
+
const n = new AbortController(), r = new Promise(async (c, f) => {
|
|
240
251
|
let i = this.fromStandard([...e.history || [], { role: "user", content: t, timestamp: Date.now() }]);
|
|
241
252
|
e.compress && (i = await this.ai.language.compressHistory(i, e.compress.max, e.compress.min, e));
|
|
242
|
-
const
|
|
253
|
+
const m = e.tools || this.ai.options.tools || [], h = {
|
|
243
254
|
model: e.model || this.model,
|
|
244
255
|
messages: i,
|
|
245
256
|
stream: !!e.stream,
|
|
246
257
|
max_tokens: e.max_tokens || this.ai.options.max_tokens || 4096,
|
|
247
258
|
temperature: e.temperature || this.ai.options.temperature || 0.7,
|
|
248
|
-
tools:
|
|
259
|
+
tools: m.map((p) => ({
|
|
249
260
|
type: "function",
|
|
250
261
|
function: {
|
|
251
|
-
name:
|
|
252
|
-
description:
|
|
262
|
+
name: p.name,
|
|
263
|
+
description: p.description,
|
|
253
264
|
parameters: {
|
|
254
265
|
type: "object",
|
|
255
|
-
properties:
|
|
256
|
-
required:
|
|
266
|
+
properties: p.args ? b(p.args, (s, o) => ({ ...o, required: void 0 })) : {},
|
|
267
|
+
required: p.args ? Object.entries(p.args).filter((s) => s[1].required).map((s) => s[0]) : []
|
|
257
268
|
}
|
|
258
269
|
}
|
|
259
270
|
}))
|
|
260
271
|
};
|
|
261
|
-
let
|
|
272
|
+
let a, l = !0;
|
|
262
273
|
do {
|
|
263
|
-
if (
|
|
264
|
-
|
|
274
|
+
if (a = await this.client.chat.completions.create(h).catch((s) => {
|
|
275
|
+
throw s.message += `
|
|
265
276
|
|
|
266
|
-
|
|
267
|
-
|
|
277
|
+
Messages:
|
|
278
|
+
${JSON.stringify(i, null, 2)}`, s;
|
|
279
|
+
}), e.stream) {
|
|
280
|
+
l ? l = !1 : e.stream({ text: `
|
|
281
|
+
|
|
282
|
+
` }), a.choices = [{ message: { content: "", tool_calls: [] } }];
|
|
283
|
+
for await (const s of a) {
|
|
268
284
|
if (n.signal.aborted) break;
|
|
269
|
-
s.choices[0].delta.content && (
|
|
285
|
+
s.choices[0].delta.content && (a.choices[0].message.content += s.choices[0].delta.content, e.stream({ text: s.choices[0].delta.content })), s.choices[0].delta.tool_calls && (a.choices[0].message.tool_calls = s.choices[0].delta.tool_calls);
|
|
270
286
|
}
|
|
271
287
|
}
|
|
272
|
-
const
|
|
273
|
-
if (
|
|
274
|
-
i.push(
|
|
275
|
-
const s = await Promise.all(
|
|
276
|
-
const
|
|
277
|
-
if (!
|
|
288
|
+
const p = a.choices[0].message.tool_calls || [];
|
|
289
|
+
if (p.length && !n.signal.aborted) {
|
|
290
|
+
i.push(a.choices[0].message);
|
|
291
|
+
const s = await Promise.all(p.map(async (o) => {
|
|
292
|
+
const u = m?.find(k("name", o.function.name));
|
|
293
|
+
if (!u) return { role: "tool", tool_call_id: o.id, content: '{"error": "Tool not found"}' };
|
|
278
294
|
try {
|
|
279
|
-
const
|
|
280
|
-
return { role: "tool", tool_call_id:
|
|
281
|
-
} catch (
|
|
282
|
-
return { role: "tool", tool_call_id:
|
|
295
|
+
const g = _(o.function.arguments, {}), w = await u.fn(g, this.ai);
|
|
296
|
+
return { role: "tool", tool_call_id: o.id, content: y(w) };
|
|
297
|
+
} catch (g) {
|
|
298
|
+
return { role: "tool", tool_call_id: o.id, content: y({ error: g?.message || g?.toString() || "Unknown" }) };
|
|
283
299
|
}
|
|
284
300
|
}));
|
|
285
|
-
i.push(...s),
|
|
301
|
+
i.push(...s), h.messages = i;
|
|
286
302
|
}
|
|
287
|
-
} while (!n.signal.aborted &&
|
|
288
|
-
e.stream && e.stream({ done: !0 }), c(this.toStandard([...i, { role: "assistant", content:
|
|
303
|
+
} while (!n.signal.aborted && a.choices?.[0]?.message?.tool_calls?.length);
|
|
304
|
+
e.stream && e.stream({ done: !0 }), c(this.toStandard([...i, { role: "assistant", content: a.choices[0].message.content || "" }]));
|
|
289
305
|
});
|
|
290
|
-
return Object.assign(
|
|
306
|
+
return Object.assign(r, { abort: () => n.abort() });
|
|
291
307
|
}
|
|
292
308
|
}
|
|
293
|
-
class
|
|
309
|
+
class I {
|
|
294
310
|
constructor(t) {
|
|
295
|
-
this.ai = t, this.embedModel =
|
|
311
|
+
this.ai = t, this.embedModel = j("feature-extraction", "Xenova/all-MiniLM-L6-v2"), t.options.anthropic?.token && (this.providers.anthropic = new J(this.ai, t.options.anthropic.token, t.options.anthropic.model)), t.options.ollama?.host && (this.providers.ollama = new H(this.ai, t.options.ollama.host, t.options.ollama.model)), t.options.openAi?.token && (this.providers.openAi = new z(this.ai, t.options.openAi.token, t.options.openAi.model));
|
|
296
312
|
}
|
|
297
313
|
embedModel;
|
|
298
314
|
providers = {};
|
|
@@ -315,56 +331,56 @@ class z {
|
|
|
315
331
|
* @param {LLMRequest} options LLM options
|
|
316
332
|
* @returns {Promise<LLMMessage[]>} New chat history will summary at index 0
|
|
317
333
|
*/
|
|
318
|
-
async compressHistory(t, e, n,
|
|
334
|
+
async compressHistory(t, e, n, r) {
|
|
319
335
|
if (this.estimateTokens(t) < e) return t;
|
|
320
336
|
let c = 0, f = 0;
|
|
321
|
-
for (let
|
|
322
|
-
if (f += this.estimateTokens(
|
|
337
|
+
for (let a of t.toReversed())
|
|
338
|
+
if (f += this.estimateTokens(a.content), f < n) c++;
|
|
323
339
|
else break;
|
|
324
340
|
if (t.length <= c) return t;
|
|
325
|
-
const i = c == 0 ? [] : t.slice(-c),
|
|
326
|
-
return [{ role: "assistant", content: `Conversation Summary: ${await this.summarize(
|
|
341
|
+
const i = c == 0 ? [] : t.slice(-c), m = (c == 0 ? t : t.slice(0, -c)).filter((a) => a.role === "assistant" || a.role === "user");
|
|
342
|
+
return [{ role: "assistant", content: `Conversation Summary: ${await this.summarize(m.map((a) => `${a.role}: ${a.content}`).join(`
|
|
327
343
|
|
|
328
|
-
`), 250,
|
|
344
|
+
`), 250, r)}`, timestamp: Date.now() }, ...i];
|
|
329
345
|
}
|
|
330
346
|
cosineSimilarity(t, e) {
|
|
331
347
|
if (t.length !== e.length) throw new Error("Vectors must be same length");
|
|
332
|
-
let n = 0,
|
|
348
|
+
let n = 0, r = 0, c = 0;
|
|
333
349
|
for (let i = 0; i < t.length; i++)
|
|
334
|
-
n += t[i] * e[i],
|
|
335
|
-
const f = Math.sqrt(
|
|
350
|
+
n += t[i] * e[i], r += t[i] * t[i], c += e[i] * e[i];
|
|
351
|
+
const f = Math.sqrt(r) * Math.sqrt(c);
|
|
336
352
|
return f === 0 ? 0 : n / f;
|
|
337
353
|
}
|
|
338
354
|
embedding(t, e = 500, n = 50) {
|
|
339
|
-
const
|
|
340
|
-
const
|
|
341
|
-
if (typeof
|
|
342
|
-
const
|
|
343
|
-
return `${
|
|
344
|
-
}), c = async (
|
|
345
|
-
const
|
|
346
|
-
return Array.from(
|
|
347
|
-
}, i = (typeof t == "object" ?
|
|
348
|
-
`)).flatMap((
|
|
349
|
-
`]),
|
|
350
|
-
let
|
|
351
|
-
for (;
|
|
352
|
-
let
|
|
353
|
-
for (;
|
|
354
|
-
const
|
|
355
|
-
if (this.estimateTokens(
|
|
356
|
-
`)) > e &&
|
|
357
|
-
|
|
355
|
+
const r = (a, l = "") => a == null ? [] : Object.entries(a).flatMap(([p, s]) => {
|
|
356
|
+
const o = l ? `${l}${isNaN(+p) ? `.${p}` : `[${p}]`}` : p;
|
|
357
|
+
if (typeof s == "object" && s !== null && !Array.isArray(s)) return r(s, o);
|
|
358
|
+
const u = Array.isArray(s) ? s.join(", ") : String(s);
|
|
359
|
+
return `${o}: ${u}`;
|
|
360
|
+
}), c = async (a) => {
|
|
361
|
+
const p = await (await this.embedModel)(a, { pooling: "mean", normalize: !0 });
|
|
362
|
+
return Array.from(p.data);
|
|
363
|
+
}, i = (typeof t == "object" ? r(t) : t.split(`
|
|
364
|
+
`)).flatMap((a) => [...a.split(/\s+/).filter((l) => l.trim()), `
|
|
365
|
+
`]), m = [];
|
|
366
|
+
let h = 0;
|
|
367
|
+
for (; h < i.length; ) {
|
|
368
|
+
let a = h, l = "";
|
|
369
|
+
for (; a < i.length; ) {
|
|
370
|
+
const s = i[a], o = l + (l ? " " : "") + s;
|
|
371
|
+
if (this.estimateTokens(o.replace(/\s*\n\s*/g, `
|
|
372
|
+
`)) > e && l) break;
|
|
373
|
+
l = o, a++;
|
|
358
374
|
}
|
|
359
|
-
const
|
|
375
|
+
const p = l.replace(/\s*\n\s*/g, `
|
|
360
376
|
`).trim();
|
|
361
|
-
|
|
377
|
+
p && m.push(p), h = a - n, h <= a - i.length + a && (h = a);
|
|
362
378
|
}
|
|
363
|
-
return Promise.all(
|
|
364
|
-
index:
|
|
365
|
-
embedding: await c(
|
|
366
|
-
text:
|
|
367
|
-
tokens: this.estimateTokens(
|
|
379
|
+
return Promise.all(m.map(async (a, l) => ({
|
|
380
|
+
index: l,
|
|
381
|
+
embedding: await c(a),
|
|
382
|
+
text: a,
|
|
383
|
+
tokens: this.estimateTokens(a)
|
|
368
384
|
})));
|
|
369
385
|
}
|
|
370
386
|
/**
|
|
@@ -384,7 +400,7 @@ class z {
|
|
|
384
400
|
*/
|
|
385
401
|
fuzzyMatch(t, ...e) {
|
|
386
402
|
if (e.length < 2) throw new Error("Requires at least 2 strings to compare");
|
|
387
|
-
const n = (f, i = 10) => f.toLowerCase().split("").map((
|
|
403
|
+
const n = (f, i = 10) => f.toLowerCase().split("").map((m, h) => m.charCodeAt(0) * (h + 1) % i / i).slice(0, i), r = n(t), c = e.map((f) => n(f)).map((f) => this.cosineSimilarity(r, f));
|
|
388
404
|
return { avg: c.reduce((f, i) => f + i, 0) / c.length, max: Math.max(...c), similarities: c };
|
|
389
405
|
}
|
|
390
406
|
/**
|
|
@@ -398,7 +414,7 @@ class z {
|
|
|
398
414
|
system: "Respond using a JSON blob",
|
|
399
415
|
...e
|
|
400
416
|
});
|
|
401
|
-
return n?.[0]?.content ?
|
|
417
|
+
return n?.[0]?.content ? _(new RegExp("{[sS]*}").exec(n[0].content), {}) : {};
|
|
402
418
|
}
|
|
403
419
|
/**
|
|
404
420
|
* Create a summary of some text
|
|
@@ -408,10 +424,10 @@ class z {
|
|
|
408
424
|
* @returns {Promise<string>} Summary
|
|
409
425
|
*/
|
|
410
426
|
summarize(t, e, n) {
|
|
411
|
-
return this.ask(t, { system: `Generate a brief summary <= ${e} tokens. Output nothing else`, temperature: 0.3, ...n }).then((
|
|
427
|
+
return this.ask(t, { system: `Generate a brief summary <= ${e} tokens. Output nothing else`, temperature: 0.3, ...n }).then((r) => r.pop()?.content || null);
|
|
412
428
|
}
|
|
413
429
|
}
|
|
414
|
-
class
|
|
430
|
+
class W {
|
|
415
431
|
constructor(t) {
|
|
416
432
|
this.ai = t, t.options.whisper?.binary && (this.whisperModel = t.options.whisper?.model.endsWith(".bin") ? t.options.whisper?.model : t.options.whisper?.model + ".bin", this.downloadAsrModel());
|
|
417
433
|
}
|
|
@@ -429,10 +445,10 @@ class I {
|
|
|
429
445
|
};
|
|
430
446
|
return { response: new Promise((c, f) => {
|
|
431
447
|
this.downloadAsrModel(e).then((i) => {
|
|
432
|
-
let
|
|
433
|
-
const
|
|
434
|
-
n = () =>
|
|
435
|
-
|
|
448
|
+
let m = "";
|
|
449
|
+
const h = R(this.ai.options.whisper?.binary, ["-nt", "-np", "-m", i, "-f", t], { stdio: ["ignore", "pipe", "ignore"] });
|
|
450
|
+
n = () => h.kill("SIGTERM"), h.on("error", (a) => f(a)), h.stdout.on("data", (a) => m += a.toString()), h.on("close", (a) => {
|
|
451
|
+
a === 0 ? c(m.trim() || null) : f(new Error(`Exit code ${a}`));
|
|
436
452
|
});
|
|
437
453
|
});
|
|
438
454
|
}), abort: n };
|
|
@@ -446,11 +462,11 @@ class I {
|
|
|
446
462
|
async downloadAsrModel(t = this.whisperModel) {
|
|
447
463
|
if (!this.ai.options.whisper?.binary) throw new Error("Whisper not configured");
|
|
448
464
|
t.endsWith(".bin") || (t += ".bin");
|
|
449
|
-
const e =
|
|
450
|
-
return await
|
|
465
|
+
const e = U.join(this.ai.options.path, t);
|
|
466
|
+
return await S.stat(e).then(() => !0).catch(() => !1) ? e : this.downloads[t] ? this.downloads[t] : (this.downloads[t] = fetch(`https://huggingface.co/ggerganov/whisper.cpp/resolve/main/${t}`).then((n) => n.arrayBuffer()).then((n) => Buffer.from(n)).then(async (n) => (await S.writeFile(e, n), delete this.downloads[t], e)), this.downloads[t]);
|
|
451
467
|
}
|
|
452
468
|
}
|
|
453
|
-
class
|
|
469
|
+
class F {
|
|
454
470
|
constructor(t) {
|
|
455
471
|
this.ai = t;
|
|
456
472
|
}
|
|
@@ -466,19 +482,17 @@ class J {
|
|
|
466
482
|
e?.terminate();
|
|
467
483
|
},
|
|
468
484
|
response: new Promise(async (n) => {
|
|
469
|
-
e = await
|
|
470
|
-
const { data:
|
|
471
|
-
await e.terminate(), n(
|
|
485
|
+
e = await L("eng", 1, { cachePath: this.ai.options.path });
|
|
486
|
+
const { data: r } = await e.recognize(t);
|
|
487
|
+
await e.terminate(), n(r.text.trim() || null);
|
|
472
488
|
})
|
|
473
489
|
};
|
|
474
490
|
}
|
|
475
491
|
}
|
|
476
|
-
class
|
|
492
|
+
class oe {
|
|
477
493
|
constructor(t) {
|
|
478
|
-
this.options = t, process.env.TRANSFORMERS_CACHE = t.path, this.audio = new
|
|
494
|
+
this.options = t, t.path || (t.path = T.tmpdir()), process.env.TRANSFORMERS_CACHE = t.path, this.audio = new W(this), this.language = new I(this), this.vision = new F(this);
|
|
479
495
|
}
|
|
480
|
-
downloads = {};
|
|
481
|
-
whisperModel;
|
|
482
496
|
/** Audio processing AI */
|
|
483
497
|
audio;
|
|
484
498
|
/** Language processing AI */
|
|
@@ -486,38 +500,38 @@ class ne {
|
|
|
486
500
|
/** Vision processing AI */
|
|
487
501
|
vision;
|
|
488
502
|
}
|
|
489
|
-
const
|
|
503
|
+
const G = {
|
|
490
504
|
name: "cli",
|
|
491
505
|
description: "Use the command line interface, returns any output",
|
|
492
506
|
args: { command: { type: "string", description: "Command to run", required: !0 } },
|
|
493
|
-
fn: (
|
|
494
|
-
},
|
|
507
|
+
fn: (d) => N`${d.command}`
|
|
508
|
+
}, re = {
|
|
495
509
|
name: "get_datetime",
|
|
496
510
|
description: "Get current date and time",
|
|
497
511
|
args: {},
|
|
498
512
|
fn: async () => (/* @__PURE__ */ new Date()).toISOString()
|
|
499
|
-
},
|
|
513
|
+
}, ae = {
|
|
500
514
|
name: "exec",
|
|
501
515
|
description: "Run code/scripts",
|
|
502
516
|
args: {
|
|
503
517
|
language: { type: "string", description: "Execution language", enum: ["cli", "node", "python"], required: !0 },
|
|
504
518
|
code: { type: "string", description: "Code to execute", required: !0 }
|
|
505
519
|
},
|
|
506
|
-
fn: async (
|
|
520
|
+
fn: async (d, t) => {
|
|
507
521
|
try {
|
|
508
|
-
switch (
|
|
522
|
+
switch (d.type) {
|
|
509
523
|
case "bash":
|
|
510
|
-
return await
|
|
524
|
+
return await G.fn({ command: d.code }, t);
|
|
511
525
|
case "node":
|
|
512
|
-
return await
|
|
526
|
+
return await C.fn({ code: d.code }, t);
|
|
513
527
|
case "python":
|
|
514
|
-
return await
|
|
528
|
+
return await B.fn({ code: d.code }, t);
|
|
515
529
|
}
|
|
516
530
|
} catch (e) {
|
|
517
531
|
return { error: e?.message || e.toString() };
|
|
518
532
|
}
|
|
519
533
|
}
|
|
520
|
-
},
|
|
534
|
+
}, ie = {
|
|
521
535
|
name: "fetch",
|
|
522
536
|
description: "Make HTTP request to URL",
|
|
523
537
|
args: {
|
|
@@ -526,54 +540,54 @@ const W = {
|
|
|
526
540
|
headers: { type: "object", description: "HTTP headers to send", default: {} },
|
|
527
541
|
body: { type: "object", description: "HTTP body to send" }
|
|
528
542
|
},
|
|
529
|
-
fn: (
|
|
530
|
-
},
|
|
543
|
+
fn: (d) => new A({ url: d.url, headers: d.headers }).request({ method: d.method || "GET", body: d.body })
|
|
544
|
+
}, C = {
|
|
531
545
|
name: "exec_javascript",
|
|
532
546
|
description: "Execute commonjs javascript",
|
|
533
547
|
args: {
|
|
534
548
|
code: { type: "string", description: "CommonJS javascript", required: !0 }
|
|
535
549
|
},
|
|
536
|
-
fn: async (
|
|
537
|
-
const t =
|
|
550
|
+
fn: async (d) => {
|
|
551
|
+
const t = q(null), e = await P({ console: t }, d.code, !0).catch((n) => t.output.error.push(n));
|
|
538
552
|
return { ...t.output, return: e, stdout: void 0, stderr: void 0 };
|
|
539
553
|
}
|
|
540
|
-
},
|
|
554
|
+
}, B = {
|
|
541
555
|
name: "exec_javascript",
|
|
542
556
|
description: "Execute commonjs javascript",
|
|
543
557
|
args: {
|
|
544
558
|
code: { type: "string", description: "CommonJS javascript", required: !0 }
|
|
545
559
|
},
|
|
546
|
-
fn: async (
|
|
547
|
-
},
|
|
560
|
+
fn: async (d) => ({ result: D`python -c "${d.code}"` })
|
|
561
|
+
}, ce = {
|
|
548
562
|
name: "search",
|
|
549
563
|
description: "Use a search engine to find relevant URLs, should be changed with fetch to scrape sources",
|
|
550
564
|
args: {
|
|
551
565
|
query: { type: "string", description: "Search string", required: !0 },
|
|
552
566
|
length: { type: "string", description: "Number of results to return", default: 5 }
|
|
553
567
|
},
|
|
554
|
-
fn: async (
|
|
555
|
-
const t = await fetch(`https://html.duckduckgo.com/html/?q=${encodeURIComponent(
|
|
568
|
+
fn: async (d) => {
|
|
569
|
+
const t = await fetch(`https://html.duckduckgo.com/html/?q=${encodeURIComponent(d.query)}`, {
|
|
556
570
|
headers: { "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64)", "Accept-Language": "en-US,en;q=0.9" }
|
|
557
571
|
}).then((c) => c.text());
|
|
558
572
|
let e, n = /<a .*?href="(.+?)".+?<\/a>/g;
|
|
559
|
-
const
|
|
573
|
+
const r = new O();
|
|
560
574
|
for (; (e = n.exec(t)) !== null; ) {
|
|
561
575
|
let c = /uddg=(.+)&?/.exec(decodeURIComponent(e[1]))?.[1];
|
|
562
|
-
if (c && (c = decodeURIComponent(c)), c &&
|
|
576
|
+
if (c && (c = decodeURIComponent(c)), c && r.add(c), r.size >= (d.length || 5)) break;
|
|
563
577
|
}
|
|
564
|
-
return
|
|
578
|
+
return r;
|
|
565
579
|
}
|
|
566
580
|
};
|
|
567
581
|
export {
|
|
568
|
-
|
|
569
|
-
|
|
570
|
-
|
|
571
|
-
|
|
572
|
-
|
|
573
|
-
|
|
574
|
-
|
|
575
|
-
|
|
576
|
-
|
|
577
|
-
|
|
582
|
+
oe as Ai,
|
|
583
|
+
J as Anthropic,
|
|
584
|
+
G as CliTool,
|
|
585
|
+
re as DateTimeTool,
|
|
586
|
+
ae as ExecTool,
|
|
587
|
+
ie as FetchTool,
|
|
588
|
+
C as JSTool,
|
|
589
|
+
I as LLM,
|
|
590
|
+
B as PythonTool,
|
|
591
|
+
ce as SearchTool
|
|
578
592
|
};
|
|
579
593
|
//# sourceMappingURL=index.mjs.map
|