@ztimson/ai-utils 0.6.10 → 0.7.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/asr.js +8 -13
- package/dist/asr.js.map +1 -1
- package/dist/asr.mjs +64 -61
- package/dist/asr.mjs.map +1 -1
- package/dist/audio.d.ts +1 -1
- package/dist/index.js +20 -18
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +216 -195
- package/dist/index.mjs.map +1 -1
- package/dist/llm.d.ts +8 -4
- package/package.json +1 -1
package/dist/index.mjs
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
import * as
|
|
2
|
-
import { objectMap as _, JSONAttemptParse as g, findByProp as k, JSONSanitize as b, clean as
|
|
3
|
-
import { Anthropic as
|
|
1
|
+
import * as $ from "node:os";
|
|
2
|
+
import { objectMap as _, JSONAttemptParse as g, findByProp as k, JSONSanitize as b, clean as M, Http as E, consoleInterceptor as P, fn as A, ASet as O } from "@ztimson/utils";
|
|
3
|
+
import { Anthropic as v } from "@anthropic-ai/sdk";
|
|
4
4
|
import { OpenAI as U } from "openai";
|
|
5
5
|
import { Worker as x } from "worker_threads";
|
|
6
6
|
import { fileURLToPath as j } from "url";
|
|
@@ -13,47 +13,47 @@ import { $ as N, $Sync as C } from "@ztimson/node-utils";
|
|
|
13
13
|
class q {
|
|
14
14
|
}
|
|
15
15
|
class W extends q {
|
|
16
|
-
constructor(
|
|
17
|
-
super(), this.ai =
|
|
16
|
+
constructor(s, e, t) {
|
|
17
|
+
super(), this.ai = s, this.apiToken = e, this.model = t, this.client = new v({ apiKey: e });
|
|
18
18
|
}
|
|
19
19
|
client;
|
|
20
|
-
toStandard(
|
|
20
|
+
toStandard(s) {
|
|
21
21
|
const e = Date.now(), t = [];
|
|
22
|
-
for (let
|
|
23
|
-
if (typeof
|
|
24
|
-
t.push({ timestamp: e, ...
|
|
22
|
+
for (let l of s)
|
|
23
|
+
if (typeof l.content == "string")
|
|
24
|
+
t.push({ timestamp: e, ...l });
|
|
25
25
|
else {
|
|
26
|
-
const
|
|
26
|
+
const r = l.content?.filter((n) => n.type == "text").map((n) => n.text).join(`
|
|
27
27
|
|
|
28
28
|
`);
|
|
29
|
-
|
|
29
|
+
r && t.push({ timestamp: e, role: l.role, content: r }), l.content.forEach((n) => {
|
|
30
30
|
if (n.type == "tool_use")
|
|
31
31
|
t.push({ timestamp: e, role: "tool", id: n.id, name: n.name, args: n.input, content: void 0 });
|
|
32
32
|
else if (n.type == "tool_result") {
|
|
33
|
-
const i = t.findLast((
|
|
33
|
+
const i = t.findLast((o) => o.id == n.tool_use_id);
|
|
34
34
|
i && (i[n.is_error ? "error" : "content"] = n.content);
|
|
35
35
|
}
|
|
36
36
|
});
|
|
37
37
|
}
|
|
38
38
|
return t;
|
|
39
39
|
}
|
|
40
|
-
fromStandard(
|
|
41
|
-
for (let e = 0; e <
|
|
42
|
-
if (
|
|
43
|
-
const t =
|
|
44
|
-
|
|
40
|
+
fromStandard(s) {
|
|
41
|
+
for (let e = 0; e < s.length; e++)
|
|
42
|
+
if (s[e].role == "tool") {
|
|
43
|
+
const t = s[e];
|
|
44
|
+
s.splice(
|
|
45
45
|
e,
|
|
46
46
|
1,
|
|
47
47
|
{ role: "assistant", content: [{ type: "tool_use", id: t.id, name: t.name, input: t.args }] },
|
|
48
48
|
{ role: "user", content: [{ type: "tool_result", tool_use_id: t.id, is_error: !!t.error, content: t.error || t.content }] }
|
|
49
49
|
), e++;
|
|
50
50
|
}
|
|
51
|
-
return
|
|
51
|
+
return s.map(({ timestamp: e, ...t }) => t);
|
|
52
52
|
}
|
|
53
|
-
ask(
|
|
53
|
+
ask(s, e = {}) {
|
|
54
54
|
const t = new AbortController();
|
|
55
|
-
return Object.assign(new Promise(async (
|
|
56
|
-
let
|
|
55
|
+
return Object.assign(new Promise(async (l) => {
|
|
56
|
+
let r = this.fromStandard([...e.history || [], { role: "user", content: s, timestamp: Date.now() }]);
|
|
57
57
|
const n = e.tools || this.ai.options.llm?.tools || [], i = {
|
|
58
58
|
model: e.model || this.model,
|
|
59
59
|
max_tokens: e.max_tokens || this.ai.options.llm?.max_tokens || 4096,
|
|
@@ -64,93 +64,93 @@ class W extends q {
|
|
|
64
64
|
description: d.description,
|
|
65
65
|
input_schema: {
|
|
66
66
|
type: "object",
|
|
67
|
-
properties: d.args ? _(d.args, (c,
|
|
67
|
+
properties: d.args ? _(d.args, (c, m) => ({ ...m, required: void 0 })) : {},
|
|
68
68
|
required: d.args ? Object.entries(d.args).filter((c) => c[1].required).map((c) => c[0]) : []
|
|
69
69
|
},
|
|
70
70
|
fn: void 0
|
|
71
71
|
})),
|
|
72
|
-
messages:
|
|
72
|
+
messages: r,
|
|
73
73
|
stream: !!e.stream
|
|
74
74
|
};
|
|
75
|
-
let
|
|
75
|
+
let o, a = !0;
|
|
76
76
|
do {
|
|
77
|
-
if (
|
|
77
|
+
if (o = await this.client.messages.create(i).catch((c) => {
|
|
78
78
|
throw c.message += `
|
|
79
79
|
|
|
80
80
|
Messages:
|
|
81
|
-
${JSON.stringify(
|
|
81
|
+
${JSON.stringify(r, null, 2)}`, c;
|
|
82
82
|
}), e.stream) {
|
|
83
83
|
a ? a = !1 : e.stream({ text: `
|
|
84
84
|
|
|
85
|
-
` }),
|
|
86
|
-
for await (const c of
|
|
85
|
+
` }), o.content = [];
|
|
86
|
+
for await (const c of o) {
|
|
87
87
|
if (t.signal.aborted) break;
|
|
88
88
|
if (c.type === "content_block_start")
|
|
89
|
-
c.content_block.type === "text" ?
|
|
89
|
+
c.content_block.type === "text" ? o.content.push({ type: "text", text: "" }) : c.content_block.type === "tool_use" && o.content.push({ type: "tool_use", id: c.content_block.id, name: c.content_block.name, input: "" });
|
|
90
90
|
else if (c.type === "content_block_delta")
|
|
91
91
|
if (c.delta.type === "text_delta") {
|
|
92
|
-
const
|
|
93
|
-
|
|
94
|
-
} else c.delta.type === "input_json_delta" && (
|
|
92
|
+
const m = c.delta.text;
|
|
93
|
+
o.content.at(-1).text += m, e.stream({ text: m });
|
|
94
|
+
} else c.delta.type === "input_json_delta" && (o.content.at(-1).input += c.delta.partial_json);
|
|
95
95
|
else if (c.type === "content_block_stop") {
|
|
96
|
-
const
|
|
97
|
-
|
|
96
|
+
const m = o.content.at(-1);
|
|
97
|
+
m.input != null && (m.input = m.input ? g(m.input, {}) : {});
|
|
98
98
|
} else if (c.type === "message_stop")
|
|
99
99
|
break;
|
|
100
100
|
}
|
|
101
101
|
}
|
|
102
|
-
const d =
|
|
102
|
+
const d = o.content.filter((c) => c.type === "tool_use");
|
|
103
103
|
if (d.length && !t.signal.aborted) {
|
|
104
|
-
|
|
105
|
-
const c = await Promise.all(d.map(async (
|
|
106
|
-
const p = n.find(k("name",
|
|
107
|
-
if (e.stream && e.stream({ tool:
|
|
104
|
+
r.push({ role: "assistant", content: o.content });
|
|
105
|
+
const c = await Promise.all(d.map(async (m) => {
|
|
106
|
+
const p = n.find(k("name", m.name));
|
|
107
|
+
if (e.stream && e.stream({ tool: m.name }), !p) return { tool_use_id: m.id, is_error: !0, content: "Tool not found" };
|
|
108
108
|
try {
|
|
109
|
-
const u = await p.fn(
|
|
110
|
-
return { type: "tool_result", tool_use_id:
|
|
109
|
+
const u = await p.fn(m.input, e?.stream, this.ai);
|
|
110
|
+
return { type: "tool_result", tool_use_id: m.id, content: b(u) };
|
|
111
111
|
} catch (u) {
|
|
112
|
-
return { type: "tool_result", tool_use_id:
|
|
112
|
+
return { type: "tool_result", tool_use_id: m.id, is_error: !0, content: u?.message || u?.toString() || "Unknown" };
|
|
113
113
|
}
|
|
114
114
|
}));
|
|
115
|
-
|
|
115
|
+
r.push({ role: "user", content: c }), i.messages = r;
|
|
116
116
|
}
|
|
117
|
-
} while (!t.signal.aborted &&
|
|
118
|
-
|
|
117
|
+
} while (!t.signal.aborted && o.content.some((d) => d.type === "tool_use"));
|
|
118
|
+
r.push({ role: "assistant", content: o.content.filter((d) => d.type == "text").map((d) => d.text).join(`
|
|
119
119
|
|
|
120
|
-
`) }),
|
|
120
|
+
`) }), r = this.toStandard(r), e.stream && e.stream({ done: !0 }), e.history && e.history.splice(0, e.history.length, ...r), l(r.at(-1)?.content);
|
|
121
121
|
}), { abort: () => t.abort() });
|
|
122
122
|
}
|
|
123
123
|
}
|
|
124
124
|
class w extends q {
|
|
125
|
-
constructor(
|
|
126
|
-
super(), this.ai =
|
|
125
|
+
constructor(s, e, t, l) {
|
|
126
|
+
super(), this.ai = s, this.host = e, this.token = t, this.model = l, this.client = new U(M({
|
|
127
127
|
baseURL: e,
|
|
128
128
|
apiKey: t
|
|
129
129
|
}));
|
|
130
130
|
}
|
|
131
131
|
client;
|
|
132
|
-
toStandard(
|
|
133
|
-
for (let e = 0; e <
|
|
134
|
-
const t =
|
|
132
|
+
toStandard(s) {
|
|
133
|
+
for (let e = 0; e < s.length; e++) {
|
|
134
|
+
const t = s[e];
|
|
135
135
|
if (t.role === "assistant" && t.tool_calls) {
|
|
136
|
-
const
|
|
136
|
+
const l = t.tool_calls.map((r) => ({
|
|
137
137
|
role: "tool",
|
|
138
|
-
id:
|
|
139
|
-
name:
|
|
140
|
-
args: g(
|
|
138
|
+
id: r.id,
|
|
139
|
+
name: r.function.name,
|
|
140
|
+
args: g(r.function.arguments, {}),
|
|
141
141
|
timestamp: t.timestamp
|
|
142
142
|
}));
|
|
143
|
-
|
|
143
|
+
s.splice(e, 1, ...l), e += l.length - 1;
|
|
144
144
|
} else if (t.role === "tool" && t.content) {
|
|
145
|
-
const
|
|
146
|
-
|
|
145
|
+
const l = s.find((r) => t.tool_call_id == r.id);
|
|
146
|
+
l && (t.content.includes('"error":') ? l.error = t.content : l.content = t.content), s.splice(e, 1), e--;
|
|
147
147
|
}
|
|
148
|
-
|
|
148
|
+
s[e]?.timestamp || (s[e].timestamp = Date.now());
|
|
149
149
|
}
|
|
150
|
-
return
|
|
150
|
+
return s;
|
|
151
151
|
}
|
|
152
|
-
fromStandard(
|
|
153
|
-
return
|
|
152
|
+
fromStandard(s) {
|
|
153
|
+
return s.reduce((e, t) => {
|
|
154
154
|
if (t.role === "tool")
|
|
155
155
|
e.push({
|
|
156
156
|
role: "assistant",
|
|
@@ -164,18 +164,18 @@ class w extends q {
|
|
|
164
164
|
content: t.error || t.content
|
|
165
165
|
});
|
|
166
166
|
else {
|
|
167
|
-
const { timestamp:
|
|
168
|
-
e.push(
|
|
167
|
+
const { timestamp: l, ...r } = t;
|
|
168
|
+
e.push(r);
|
|
169
169
|
}
|
|
170
170
|
return e;
|
|
171
171
|
}, []);
|
|
172
172
|
}
|
|
173
|
-
ask(
|
|
173
|
+
ask(s, e = {}) {
|
|
174
174
|
const t = new AbortController();
|
|
175
|
-
return Object.assign(new Promise(async (
|
|
175
|
+
return Object.assign(new Promise(async (l, r) => {
|
|
176
176
|
e.system && e.history?.[0]?.role != "system" && e.history?.splice(0, 0, { role: "system", content: e.system, timestamp: Date.now() });
|
|
177
|
-
let n = this.fromStandard([...e.history || [], { role: "user", content:
|
|
178
|
-
const i = e.tools || this.ai.options.llm?.tools || [],
|
|
177
|
+
let n = this.fromStandard([...e.history || [], { role: "user", content: s, timestamp: Date.now() }]);
|
|
178
|
+
const i = e.tools || this.ai.options.llm?.tools || [], o = {
|
|
179
179
|
model: e.model || this.model,
|
|
180
180
|
messages: n,
|
|
181
181
|
stream: !!e.stream,
|
|
@@ -188,32 +188,32 @@ class w extends q {
|
|
|
188
188
|
description: c.description,
|
|
189
189
|
parameters: {
|
|
190
190
|
type: "object",
|
|
191
|
-
properties: c.args ? _(c.args, (
|
|
192
|
-
required: c.args ? Object.entries(c.args).filter((
|
|
191
|
+
properties: c.args ? _(c.args, (m, p) => ({ ...p, required: void 0 })) : {},
|
|
192
|
+
required: c.args ? Object.entries(c.args).filter((m) => m[1].required).map((m) => m[0]) : []
|
|
193
193
|
}
|
|
194
194
|
}
|
|
195
195
|
}))
|
|
196
196
|
};
|
|
197
197
|
let a, d = !0;
|
|
198
198
|
do {
|
|
199
|
-
if (a = await this.client.chat.completions.create(
|
|
200
|
-
throw
|
|
199
|
+
if (a = await this.client.chat.completions.create(o).catch((m) => {
|
|
200
|
+
throw m.message += `
|
|
201
201
|
|
|
202
202
|
Messages:
|
|
203
|
-
${JSON.stringify(n, null, 2)}`,
|
|
203
|
+
${JSON.stringify(n, null, 2)}`, m;
|
|
204
204
|
}), e.stream) {
|
|
205
205
|
d ? d = !1 : e.stream({ text: `
|
|
206
206
|
|
|
207
207
|
` }), a.choices = [{ message: { content: "", tool_calls: [] } }];
|
|
208
|
-
for await (const
|
|
208
|
+
for await (const m of a) {
|
|
209
209
|
if (t.signal.aborted) break;
|
|
210
|
-
|
|
210
|
+
m.choices[0].delta.content && (a.choices[0].message.content += m.choices[0].delta.content, e.stream({ text: m.choices[0].delta.content })), m.choices[0].delta.tool_calls && (a.choices[0].message.tool_calls = m.choices[0].delta.tool_calls);
|
|
211
211
|
}
|
|
212
212
|
}
|
|
213
213
|
const c = a.choices[0].message.tool_calls || [];
|
|
214
214
|
if (c.length && !t.signal.aborted) {
|
|
215
215
|
n.push(a.choices[0].message);
|
|
216
|
-
const
|
|
216
|
+
const m = await Promise.all(c.map(async (p) => {
|
|
217
217
|
const u = i?.find(k("name", p.function.name));
|
|
218
218
|
if (e.stream && e.stream({ tool: p.function.name }), !u) return { role: "tool", tool_call_id: p.id, content: '{"error": "Tool not found"}' };
|
|
219
219
|
try {
|
|
@@ -223,46 +223,46 @@ ${JSON.stringify(n, null, 2)}`, l;
|
|
|
223
223
|
return { role: "tool", tool_call_id: p.id, content: b({ error: f?.message || f?.toString() || "Unknown" }) };
|
|
224
224
|
}
|
|
225
225
|
}));
|
|
226
|
-
n.push(...
|
|
226
|
+
n.push(...m), o.messages = n;
|
|
227
227
|
}
|
|
228
228
|
} while (!t.signal.aborted && a.choices?.[0]?.message?.tool_calls?.length);
|
|
229
|
-
n.push({ role: "assistant", content: a.choices[0].message.content || "" }), n = this.toStandard(n), e.stream && e.stream({ done: !0 }), e.history && e.history.splice(0, e.history.length, ...n),
|
|
229
|
+
n.push({ role: "assistant", content: a.choices[0].message.content || "" }), n = this.toStandard(n), e.stream && e.stream({ done: !0 }), e.history && e.history.splice(0, e.history.length, ...n), l(n.at(-1)?.content);
|
|
230
230
|
}), { abort: () => t.abort() });
|
|
231
231
|
}
|
|
232
232
|
}
|
|
233
233
|
class z {
|
|
234
|
-
constructor(
|
|
235
|
-
this.ai =
|
|
234
|
+
constructor(s) {
|
|
235
|
+
this.ai = s, s.options.llm?.models && Object.entries(s.options.llm.models).forEach(([e, t]) => {
|
|
236
236
|
this.defaultModel || (this.defaultModel = e), t.proto == "anthropic" ? this.models[e] = new W(this.ai, t.token, e) : t.proto == "ollama" ? this.models[e] = new w(this.ai, t.host, "not-needed", e) : t.proto == "openai" && (this.models[e] = new w(this.ai, t.host || null, t.token, e));
|
|
237
237
|
});
|
|
238
238
|
}
|
|
239
|
-
models = {};
|
|
240
239
|
defaultModel;
|
|
240
|
+
models = {};
|
|
241
241
|
/**
|
|
242
242
|
* Chat with LLM
|
|
243
243
|
* @param {string} message Question
|
|
244
244
|
* @param {LLMRequest} options Configuration options and chat history
|
|
245
245
|
* @returns {{abort: () => void, response: Promise<string>}} Function to abort response and chat history
|
|
246
246
|
*/
|
|
247
|
-
ask(
|
|
247
|
+
ask(s, e = {}) {
|
|
248
248
|
const t = e.model || this.defaultModel;
|
|
249
249
|
if (!this.models[t]) throw new Error(`Model does not exist: ${t}`);
|
|
250
|
-
let
|
|
250
|
+
let l = () => {
|
|
251
251
|
};
|
|
252
|
-
return Object.assign(new Promise(async (
|
|
252
|
+
return Object.assign(new Promise(async (r) => {
|
|
253
253
|
if (e.history || (e.history = []), e.memory) {
|
|
254
254
|
e.system = (e.system || "") + `
|
|
255
255
|
You have passive persistent memory never make any mention of your memory capabilities and what you can/cannot remember
|
|
256
256
|
`;
|
|
257
257
|
const i = async (a, d, c = 50) => {
|
|
258
|
-
const [
|
|
258
|
+
const [m, p] = await Promise.all([
|
|
259
259
|
d ? this.embedding(d) : Promise.resolve(null),
|
|
260
260
|
a ? this.embedding(a) : Promise.resolve(null)
|
|
261
261
|
]);
|
|
262
|
-
return (e.memory || []).map((u) => ({ ...u, score:
|
|
263
|
-
},
|
|
264
|
-
|
|
265
|
-
` +
|
|
262
|
+
return (e.memory || []).map((u) => ({ ...u, score: m ? this.cosineSimilarity(u.embeddings[0], m[0].embedding) : 1 })).filter((u) => u.score >= 0.8).map((u) => ({ ...u, score: p ? this.cosineSimilarity(u.embeddings[1], p[0].embedding) : u.score })).filter((u) => u.score >= 0.2).toSorted((u, f) => u.score - f.score).slice(0, c);
|
|
263
|
+
}, o = await i(s);
|
|
264
|
+
o.length && e.history.push({ role: "assistant", content: `Things I remembered:
|
|
265
|
+
` + o.map((a) => `${a.owner}: ${a.fact}`).join(`
|
|
266
266
|
`) }), e.tools = [...e.tools || [], {
|
|
267
267
|
name: "read_memory",
|
|
268
268
|
description: "Check your long-term memory for more information",
|
|
@@ -277,9 +277,9 @@ You have passive persistent memory never make any mention of your memory capabil
|
|
|
277
277
|
}
|
|
278
278
|
}];
|
|
279
279
|
}
|
|
280
|
-
const n = await this.models[t].ask(
|
|
280
|
+
const n = await this.models[t].ask(s, e);
|
|
281
281
|
if (e.memory) {
|
|
282
|
-
const i = e.history?.findIndex((
|
|
282
|
+
const i = e.history?.findIndex((o) => o.role == "assistant" && o.content.startsWith("Things I remembered:"));
|
|
283
283
|
i != null && i >= 0 && e.history?.splice(i, 1);
|
|
284
284
|
}
|
|
285
285
|
if (e.compress || e.memory) {
|
|
@@ -287,16 +287,16 @@ You have passive persistent memory never make any mention of your memory capabil
|
|
|
287
287
|
if (e.compress)
|
|
288
288
|
i = await this.ai.language.compressHistory(e.history, e.compress.max, e.compress.min, e), e.history.splice(0, e.history.length, ...i.history);
|
|
289
289
|
else {
|
|
290
|
-
const
|
|
291
|
-
i = await this.ai.language.compressHistory(
|
|
290
|
+
const o = e.history?.findLastIndex((a) => a.role == "user") ?? -1;
|
|
291
|
+
i = await this.ai.language.compressHistory(o != -1 ? e.history.slice(o) : e.history, 0, 0, e);
|
|
292
292
|
}
|
|
293
293
|
if (e.memory) {
|
|
294
|
-
const
|
|
295
|
-
e.memory.splice(0, e.memory.length, ...
|
|
294
|
+
const o = e.memory.filter((a) => !i.memory.some((d) => this.cosineSimilarity(a.embeddings[1], d.embeddings[1]) > 0.8)).concat(i.memory);
|
|
295
|
+
e.memory.splice(0, e.memory.length, ...o);
|
|
296
296
|
}
|
|
297
297
|
}
|
|
298
|
-
return
|
|
299
|
-
}), { abort:
|
|
298
|
+
return r(n);
|
|
299
|
+
}), { abort: l });
|
|
300
300
|
}
|
|
301
301
|
/**
|
|
302
302
|
* Compress chat history to reduce context size
|
|
@@ -306,22 +306,24 @@ You have passive persistent memory never make any mention of your memory capabil
|
|
|
306
306
|
* @param {LLMRequest} options LLM options
|
|
307
307
|
* @returns {Promise<LLMMessage[]>} New chat history will summary at index 0
|
|
308
308
|
*/
|
|
309
|
-
async compressHistory(
|
|
310
|
-
if (this.estimateTokens(
|
|
311
|
-
let
|
|
312
|
-
for (let u of
|
|
313
|
-
if (n += this.estimateTokens(u.content), n < t)
|
|
309
|
+
async compressHistory(s, e, t, l) {
|
|
310
|
+
if (this.estimateTokens(s) < e) return { history: s, memory: [] };
|
|
311
|
+
let r = 0, n = 0;
|
|
312
|
+
for (let u of s.toReversed())
|
|
313
|
+
if (n += this.estimateTokens(u.content), n < t) r++;
|
|
314
314
|
else break;
|
|
315
|
-
if (
|
|
316
|
-
const i =
|
|
315
|
+
if (s.length <= r) return { history: s, memory: [] };
|
|
316
|
+
const i = s[0].role == "system" ? s[0] : null, o = r == 0 ? [] : s.slice(-r), a = (r == 0 ? s : s.slice(0, -r)).filter((u) => u.role === "assistant" || u.role === "user"), d = await this.json(a.map((u) => `${u.role}: ${u.content}`).join(`
|
|
317
317
|
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
318
|
+
`), "{summary: string, facts: [[subject, fact]]}", {
|
|
319
|
+
system: "Create the smallest summary possible, no more than 500 tokens. Create a list of NEW facts (split by subject [pro]noun and fact) about what you learned from this conversation that you didn't already know or get from a tool call or system prompt. Focus only on new information about people, topics, or facts. Avoid generating facts about the AI.",
|
|
320
|
+
model: l?.model,
|
|
321
|
+
temperature: l?.temperature || 0.3
|
|
322
|
+
}), c = /* @__PURE__ */ new Date(), m = await Promise.all((d?.facts || [])?.map(async ([u, f]) => {
|
|
321
323
|
const y = await Promise.all([this.embedding(u), this.embedding(`${u}: ${f}`)]);
|
|
322
324
|
return { owner: u, fact: f, embeddings: [y[0][0].embedding, y[1][0].embedding], timestamp: c };
|
|
323
|
-
})), p = [{ role: "assistant", content: `Conversation Summary: ${d?.summary}`, timestamp: Date.now() }, ...
|
|
324
|
-
return i && p.splice(0, 0, i), { history: p, memory:
|
|
325
|
+
})), p = [{ role: "assistant", content: `Conversation Summary: ${d?.summary}`, timestamp: Date.now() }, ...o];
|
|
326
|
+
return i && p.splice(0, 0, i), { history: p, memory: m };
|
|
325
327
|
}
|
|
326
328
|
/**
|
|
327
329
|
* Compare the difference between embeddings (calculates the angle between two vectors)
|
|
@@ -329,12 +331,12 @@ ${a.map((u) => `${u.role}: ${u.content}`).join(`
|
|
|
329
331
|
* @param {number[]} v2 Second embedding / vector for comparison
|
|
330
332
|
* @returns {number} Similarity values 0-1: 0 = unique, 1 = identical
|
|
331
333
|
*/
|
|
332
|
-
cosineSimilarity(
|
|
333
|
-
if (
|
|
334
|
-
let t = 0,
|
|
335
|
-
for (let i = 0; i <
|
|
336
|
-
t +=
|
|
337
|
-
const n = Math.sqrt(
|
|
334
|
+
cosineSimilarity(s, e) {
|
|
335
|
+
if (s.length !== e.length) throw new Error("Vectors must be same length");
|
|
336
|
+
let t = 0, l = 0, r = 0;
|
|
337
|
+
for (let i = 0; i < s.length; i++)
|
|
338
|
+
t += s[i] * e[i], l += s[i] * s[i], r += e[i] * e[i];
|
|
339
|
+
const n = Math.sqrt(l) * Math.sqrt(r);
|
|
338
340
|
return n === 0 ? 0 : t / n;
|
|
339
341
|
}
|
|
340
342
|
/**
|
|
@@ -344,24 +346,24 @@ ${a.map((u) => `${u.role}: ${u.content}`).join(`
|
|
|
344
346
|
* @param {number} overlapTokens Includes previous X tokens to provide continuity to AI (In addition to max tokens)
|
|
345
347
|
* @returns {string[]} Chunked strings
|
|
346
348
|
*/
|
|
347
|
-
chunk(
|
|
348
|
-
const
|
|
349
|
-
const
|
|
350
|
-
return typeof c == "object" && !Array.isArray(c) ?
|
|
351
|
-
}) : [], n = (typeof
|
|
352
|
-
`)).flatMap((
|
|
349
|
+
chunk(s, e = 500, t = 50) {
|
|
350
|
+
const l = (o, a = "") => o ? Object.entries(o).flatMap(([d, c]) => {
|
|
351
|
+
const m = a ? `${a}${isNaN(+d) ? `.${d}` : `[${d}]`}` : d;
|
|
352
|
+
return typeof c == "object" && !Array.isArray(c) ? l(c, m) : `${m}: ${Array.isArray(c) ? c.join(", ") : c}`;
|
|
353
|
+
}) : [], n = (typeof s == "object" ? l(s) : s.split(`
|
|
354
|
+
`)).flatMap((o) => [...o.split(/\s+/).filter(Boolean), `
|
|
353
355
|
`]), i = [];
|
|
354
|
-
for (let
|
|
355
|
-
let a = "", d =
|
|
356
|
+
for (let o = 0; o < n.length; ) {
|
|
357
|
+
let a = "", d = o;
|
|
356
358
|
for (; d < n.length; ) {
|
|
357
|
-
const
|
|
358
|
-
if (this.estimateTokens(
|
|
359
|
+
const m = a + (a ? " " : "") + n[d];
|
|
360
|
+
if (this.estimateTokens(m.replace(/\s*\n\s*/g, `
|
|
359
361
|
`)) > e && a) break;
|
|
360
|
-
a =
|
|
362
|
+
a = m, d++;
|
|
361
363
|
}
|
|
362
364
|
const c = a.replace(/\s*\n\s*/g, `
|
|
363
365
|
`).trim();
|
|
364
|
-
c && i.push(c),
|
|
366
|
+
c && i.push(c), o = Math.max(d - t, d === o ? o + 1 : d);
|
|
365
367
|
}
|
|
366
368
|
return i;
|
|
367
369
|
}
|
|
@@ -372,20 +374,20 @@ ${a.map((u) => `${u.role}: ${u.content}`).join(`
|
|
|
372
374
|
* @param {number} overlapTokens Includes previous X tokens to provide continuity to AI (In addition to max tokens)
|
|
373
375
|
* @returns {Promise<Awaited<{index: number, embedding: number[], text: string, tokens: number}>[]>} Chunked embeddings
|
|
374
376
|
*/
|
|
375
|
-
embedding(
|
|
376
|
-
const
|
|
377
|
-
const a = new x(S(T(j(import.meta.url)), "embedder.js")), d = ({ embedding:
|
|
378
|
-
a.terminate(), i(
|
|
379
|
-
}, c = (
|
|
380
|
-
a.terminate(), m
|
|
377
|
+
embedding(s, e = 500, t = 50) {
|
|
378
|
+
const l = (n) => new Promise((i, o) => {
|
|
379
|
+
const a = new x(S(T(j(import.meta.url)), "embedder.js")), d = ({ embedding: m }) => {
|
|
380
|
+
a.terminate(), i(m);
|
|
381
|
+
}, c = (m) => {
|
|
382
|
+
a.terminate(), o(m);
|
|
381
383
|
};
|
|
382
|
-
a.on("message", d), a.on("error", c), a.on("exit", (
|
|
383
|
-
|
|
384
|
+
a.on("message", d), a.on("error", c), a.on("exit", (m) => {
|
|
385
|
+
m !== 0 && o(new Error(`Worker exited with code ${m}`));
|
|
384
386
|
}), a.postMessage({ text: n, model: this.ai.options?.embedder || "bge-small-en-v1.5", modelDir: this.ai.options.path });
|
|
385
|
-
}),
|
|
386
|
-
return Promise.all(
|
|
387
|
+
}), r = this.chunk(s, e, t);
|
|
388
|
+
return Promise.all(r.map(async (n, i) => ({
|
|
387
389
|
index: i,
|
|
388
|
-
embedding: await
|
|
390
|
+
embedding: await l(n),
|
|
389
391
|
text: n,
|
|
390
392
|
tokens: this.estimateTokens(n)
|
|
391
393
|
})));
|
|
@@ -395,8 +397,8 @@ ${a.map((u) => `${u.role}: ${u.content}`).join(`
|
|
|
395
397
|
* @param history Object to size
|
|
396
398
|
* @returns {number} Rough token count
|
|
397
399
|
*/
|
|
398
|
-
estimateTokens(
|
|
399
|
-
const e = JSON.stringify(
|
|
400
|
+
estimateTokens(s) {
|
|
401
|
+
const e = JSON.stringify(s);
|
|
400
402
|
return Math.ceil(e.length / 4 * 1.2);
|
|
401
403
|
}
|
|
402
404
|
/**
|
|
@@ -405,22 +407,27 @@ ${a.map((u) => `${u.role}: ${u.content}`).join(`
|
|
|
405
407
|
* @param {string} searchTerms Multiple search terms to check against target
|
|
406
408
|
* @returns {{avg: number, max: number, similarities: number[]}} Similarity values 0-1: 0 = unique, 1 = identical
|
|
407
409
|
*/
|
|
408
|
-
fuzzyMatch(
|
|
410
|
+
fuzzyMatch(s, ...e) {
|
|
409
411
|
if (e.length < 2) throw new Error("Requires at least 2 strings to compare");
|
|
410
|
-
const t = (n, i = 10) => n.toLowerCase().split("").map((
|
|
411
|
-
return { avg:
|
|
412
|
+
const t = (n, i = 10) => n.toLowerCase().split("").map((o, a) => o.charCodeAt(0) * (a + 1) % i / i).slice(0, i), l = t(s), r = e.map((n) => t(n)).map((n) => this.cosineSimilarity(l, n));
|
|
413
|
+
return { avg: r.reduce((n, i) => n + i, 0) / r.length, max: Math.max(...r), similarities: r };
|
|
412
414
|
}
|
|
413
415
|
/**
|
|
414
416
|
* Ask a question with JSON response
|
|
415
|
-
* @param {string}
|
|
417
|
+
* @param {string} text Text to process
|
|
418
|
+
* @param {string} schema JSON schema the AI should match
|
|
416
419
|
* @param {LLMRequest} options Configuration options and chat history
|
|
417
420
|
* @returns {Promise<{} | {} | RegExpExecArray | null>}
|
|
418
421
|
*/
|
|
419
|
-
async json(
|
|
420
|
-
let
|
|
421
|
-
|
|
422
|
-
|
|
423
|
-
|
|
422
|
+
async json(s, e, t) {
|
|
423
|
+
let l = await this.ask(s, { ...t, system: (t?.system ? `${t.system}
|
|
424
|
+
` : "") + `Only respond using a JSON code block matching this schema:
|
|
425
|
+
\`\`\`json
|
|
426
|
+
${e}
|
|
427
|
+
\`\`\`` });
|
|
428
|
+
if (!l) return {};
|
|
429
|
+
const r = /```(?:.+)?\s*([\s\S]*?)```/.exec(l), n = r ? r[1].trim() : l;
|
|
430
|
+
return g(n, {});
|
|
424
431
|
}
|
|
425
432
|
/**
|
|
426
433
|
* Create a summary of some text
|
|
@@ -429,55 +436,69 @@ ${a.map((u) => `${u.role}: ${u.content}`).join(`
|
|
|
429
436
|
* @param options LLM request options
|
|
430
437
|
* @returns {Promise<string>} Summary
|
|
431
438
|
*/
|
|
432
|
-
summarize(
|
|
433
|
-
return this.ask(
|
|
439
|
+
summarize(s, e, t) {
|
|
440
|
+
return this.ask(s, { system: `Generate a brief summary <= ${e} tokens. Output nothing else`, temperature: 0.3, ...t });
|
|
434
441
|
}
|
|
435
442
|
}
|
|
436
443
|
class I {
|
|
437
|
-
constructor(
|
|
438
|
-
this.ai =
|
|
444
|
+
constructor(s) {
|
|
445
|
+
this.ai = s;
|
|
439
446
|
}
|
|
440
|
-
asr(
|
|
441
|
-
const { model: t = this.ai.options.asr || "whisper-base", speaker:
|
|
442
|
-
let
|
|
447
|
+
asr(s, e = {}) {
|
|
448
|
+
const { model: t = this.ai.options.asr || "whisper-base", speaker: l = !1 } = e;
|
|
449
|
+
let r = !1;
|
|
443
450
|
const n = () => {
|
|
444
|
-
|
|
445
|
-
}
|
|
451
|
+
r = !0;
|
|
452
|
+
};
|
|
453
|
+
let i = new Promise((o, a) => {
|
|
446
454
|
const d = new x(S(T(j(import.meta.url)), "asr.js")), c = ({ text: p, warning: u, error: f }) => {
|
|
447
|
-
d.terminate(), !
|
|
448
|
-
},
|
|
449
|
-
d.terminate(),
|
|
455
|
+
d.terminate(), !r && (f ? a(new Error(f)) : (u && console.warn(u), o(p)));
|
|
456
|
+
}, m = (p) => {
|
|
457
|
+
d.terminate(), r || a(p);
|
|
450
458
|
};
|
|
451
|
-
d.on("message", c), d.on("error",
|
|
452
|
-
p !== 0 && !
|
|
453
|
-
}), d.postMessage({ file:
|
|
459
|
+
d.on("message", c), d.on("error", m), d.on("exit", (p) => {
|
|
460
|
+
p !== 0 && !r && a(new Error(`Worker exited with code ${p}`));
|
|
461
|
+
}), d.postMessage({ file: s, model: t, speaker: l, modelDir: this.ai.options.path, token: this.ai.options.hfToken });
|
|
454
462
|
});
|
|
463
|
+
if (e.speaker == "id") {
|
|
464
|
+
if (!this.ai.language.defaultModel) throw new Error("Configure an LLM for advanced ASR speaker detection");
|
|
465
|
+
i = i.then(async (o) => {
|
|
466
|
+
if (!o) return o;
|
|
467
|
+
const a = await this.ai.language.json(o, '{1: "Detected Name"}', {
|
|
468
|
+
system: "Use this following transcript to identify speakers. Only identify speakers you are sure about",
|
|
469
|
+
temperature: 0.2
|
|
470
|
+
});
|
|
471
|
+
return Object.entries(a).forEach(([d, c]) => {
|
|
472
|
+
o = o.replaceAll(`[Speaker ${d}]`, `[${c}]`);
|
|
473
|
+
}), o;
|
|
474
|
+
});
|
|
475
|
+
}
|
|
455
476
|
return Object.assign(i, { abort: n });
|
|
456
477
|
}
|
|
457
478
|
canDiarization = R;
|
|
458
479
|
}
|
|
459
480
|
class J {
|
|
460
|
-
constructor(
|
|
461
|
-
this.ai =
|
|
481
|
+
constructor(s) {
|
|
482
|
+
this.ai = s;
|
|
462
483
|
}
|
|
463
484
|
/**
|
|
464
485
|
* Convert image to text using Optical Character Recognition
|
|
465
486
|
* @param {string} path Path to image
|
|
466
487
|
* @returns {AbortablePromise<string | null>} Promise of extracted text with abort method
|
|
467
488
|
*/
|
|
468
|
-
ocr(
|
|
489
|
+
ocr(s) {
|
|
469
490
|
let e;
|
|
470
|
-
const t = new Promise(async (
|
|
491
|
+
const t = new Promise(async (l) => {
|
|
471
492
|
e = await L(this.ai.options.ocr || "eng", 2, { cachePath: this.ai.options.path });
|
|
472
|
-
const { data:
|
|
473
|
-
await e.terminate(),
|
|
493
|
+
const { data: r } = await e.recognize(s);
|
|
494
|
+
await e.terminate(), l(r.text.trim() || null);
|
|
474
495
|
});
|
|
475
496
|
return Object.assign(t, { abort: () => e?.terminate() });
|
|
476
497
|
}
|
|
477
498
|
}
|
|
478
|
-
class
|
|
479
|
-
constructor(
|
|
480
|
-
this.options =
|
|
499
|
+
class re {
|
|
500
|
+
constructor(s) {
|
|
501
|
+
this.options = s, s.path || (s.path = $.tmpdir()), process.env.TRANSFORMERS_CACHE = s.path, this.audio = new I(this), this.language = new z(this), this.vision = new J(this);
|
|
481
502
|
}
|
|
482
503
|
/** Audio processing AI */
|
|
483
504
|
audio;
|
|
@@ -503,15 +524,15 @@ const H = {
|
|
|
503
524
|
language: { type: "string", description: "Execution language", enum: ["cli", "node", "python"], required: !0 },
|
|
504
525
|
code: { type: "string", description: "Code to execute", required: !0 }
|
|
505
526
|
},
|
|
506
|
-
fn: async (h,
|
|
527
|
+
fn: async (h, s, e) => {
|
|
507
528
|
try {
|
|
508
529
|
switch (h.type) {
|
|
509
530
|
case "bash":
|
|
510
|
-
return await H.fn({ command: h.code },
|
|
531
|
+
return await H.fn({ command: h.code }, s, e);
|
|
511
532
|
case "node":
|
|
512
|
-
return await F.fn({ code: h.code },
|
|
533
|
+
return await F.fn({ code: h.code }, s, e);
|
|
513
534
|
case "python":
|
|
514
|
-
return await G.fn({ code: h.code },
|
|
535
|
+
return await G.fn({ code: h.code }, s, e);
|
|
515
536
|
}
|
|
516
537
|
} catch (t) {
|
|
517
538
|
return { error: t?.message || t.toString() };
|
|
@@ -526,7 +547,7 @@ const H = {
|
|
|
526
547
|
headers: { type: "object", description: "HTTP headers to send", default: {} },
|
|
527
548
|
body: { type: "object", description: "HTTP body to send" }
|
|
528
549
|
},
|
|
529
|
-
fn: (h) => new
|
|
550
|
+
fn: (h) => new E({ url: h.url, headers: h.headers }).request({ method: h.method || "GET", body: h.body })
|
|
530
551
|
}, F = {
|
|
531
552
|
name: "exec_javascript",
|
|
532
553
|
description: "Execute commonjs javascript",
|
|
@@ -534,8 +555,8 @@ const H = {
|
|
|
534
555
|
code: { type: "string", description: "CommonJS javascript", required: !0 }
|
|
535
556
|
},
|
|
536
557
|
fn: async (h) => {
|
|
537
|
-
const
|
|
538
|
-
return { ...
|
|
558
|
+
const s = P(null), e = await A({ console: s }, h.code, !0).catch((t) => s.output.error.push(t));
|
|
559
|
+
return { ...s.output, return: e, stdout: void 0, stderr: void 0 };
|
|
539
560
|
}
|
|
540
561
|
}, G = {
|
|
541
562
|
name: "exec_javascript",
|
|
@@ -552,24 +573,24 @@ const H = {
|
|
|
552
573
|
focus: { type: "string", description: 'Optional: What aspect to focus on (e.g., "pricing", "features", "contact info")' }
|
|
553
574
|
},
|
|
554
575
|
fn: async (h) => {
|
|
555
|
-
const
|
|
576
|
+
const s = await fetch(h.url, { headers: { "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64)" } }).then((n) => n.text()).catch((n) => {
|
|
556
577
|
throw new Error(`Failed to fetch: ${n.message}`);
|
|
557
|
-
}), e = D.load(
|
|
578
|
+
}), e = D.load(s);
|
|
558
579
|
e('script, style, nav, footer, header, aside, iframe, noscript, [role="navigation"], [role="banner"], .ad, .ads, .cookie, .popup').remove();
|
|
559
580
|
const t = {
|
|
560
581
|
title: e('meta[property="og:title"]').attr("content") || e("title").text() || "",
|
|
561
582
|
description: e('meta[name="description"]').attr("content") || e('meta[property="og:description"]').attr("content") || ""
|
|
562
583
|
};
|
|
563
|
-
let
|
|
564
|
-
const
|
|
565
|
-
for (const n of
|
|
584
|
+
let l = "";
|
|
585
|
+
const r = ["article", "main", '[role="main"]', ".content", ".post", ".entry", "body"];
|
|
586
|
+
for (const n of r) {
|
|
566
587
|
const i = e(n).first();
|
|
567
588
|
if (i.length && i.text().trim().length > 200) {
|
|
568
|
-
|
|
589
|
+
l = i.text();
|
|
569
590
|
break;
|
|
570
591
|
}
|
|
571
592
|
}
|
|
572
|
-
return
|
|
593
|
+
return l || (l = e("body").text()), l = l.replace(/\s+/g, " ").trim().slice(0, 8e3), { url: h.url, title: t.title.trim(), description: t.description.trim(), content: l, focus: h.focus };
|
|
573
594
|
}
|
|
574
595
|
}, ce = {
|
|
575
596
|
name: "web_search",
|
|
@@ -579,20 +600,20 @@ const H = {
|
|
|
579
600
|
length: { type: "string", description: "Number of results to return", default: 5 }
|
|
580
601
|
},
|
|
581
602
|
fn: async (h) => {
|
|
582
|
-
const
|
|
603
|
+
const s = await fetch(`https://html.duckduckgo.com/html/?q=${encodeURIComponent(h.query)}`, {
|
|
583
604
|
headers: { "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64)", "Accept-Language": "en-US,en;q=0.9" }
|
|
584
|
-
}).then((
|
|
605
|
+
}).then((r) => r.text());
|
|
585
606
|
let e, t = /<a .*?href="(.+?)".+?<\/a>/g;
|
|
586
|
-
const
|
|
587
|
-
for (; (e = t.exec(
|
|
588
|
-
let
|
|
589
|
-
if (
|
|
607
|
+
const l = new O();
|
|
608
|
+
for (; (e = t.exec(s)) !== null; ) {
|
|
609
|
+
let r = /uddg=(.+)&?/.exec(decodeURIComponent(e[1]))?.[1];
|
|
610
|
+
if (r && (r = decodeURIComponent(r)), r && l.add(r), l.size >= (h.length || 5)) break;
|
|
590
611
|
}
|
|
591
|
-
return
|
|
612
|
+
return l;
|
|
592
613
|
}
|
|
593
614
|
};
|
|
594
615
|
export {
|
|
595
|
-
|
|
616
|
+
re as Ai,
|
|
596
617
|
W as Anthropic,
|
|
597
618
|
I as Audio,
|
|
598
619
|
H as CliTool,
|