@ztimson/ai-utils 0.1.16 → 0.1.19
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +14 -6
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +233 -209
- package/dist/index.mjs.map +1 -1
- package/dist/llm.d.ts +4 -0
- package/package.json +1 -1
package/dist/index.mjs
CHANGED
|
@@ -1,39 +1,33 @@
|
|
|
1
|
-
import { createWorker as
|
|
2
|
-
import { objectMap as
|
|
3
|
-
import { Anthropic as
|
|
4
|
-
import { Ollama as
|
|
5
|
-
import { OpenAI as
|
|
6
|
-
import
|
|
1
|
+
import { createWorker as T } from "tesseract.js";
|
|
2
|
+
import { objectMap as k, JSONAttemptParse as b, findByProp as x, JSONSanitize as y, Http as q, consoleInterceptor as v, fn as P, ASet as A } from "@ztimson/utils";
|
|
3
|
+
import { Anthropic as E } from "@anthropic-ai/sdk";
|
|
4
|
+
import { Ollama as D } from "ollama";
|
|
5
|
+
import { OpenAI as M } from "openai";
|
|
6
|
+
import j from "node:fs/promises";
|
|
7
7
|
import O from "node:path";
|
|
8
|
-
import * as
|
|
9
|
-
import { spawn as
|
|
10
|
-
import {
|
|
11
|
-
class
|
|
8
|
+
import * as _ from "@tensorflow/tfjs";
|
|
9
|
+
import { spawn as U } from "node:child_process";
|
|
10
|
+
import { $, $Sync as L } from "@ztimson/node-utils";
|
|
11
|
+
class S {
|
|
12
12
|
}
|
|
13
|
-
class
|
|
13
|
+
class R extends S {
|
|
14
14
|
constructor(t, e, n) {
|
|
15
|
-
super(), this.ai = t, this.apiToken = e, this.model = n, this.client = new
|
|
15
|
+
super(), this.ai = t, this.apiToken = e, this.model = n, this.client = new E({ apiKey: e });
|
|
16
16
|
}
|
|
17
17
|
client;
|
|
18
18
|
toStandard(t) {
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
}), i.content = i.content.filter((o) => o.type == "text").map((o) => o.text).join(`
|
|
28
|
-
|
|
29
|
-
`)), i.content) {
|
|
30
|
-
const o = e.at(-1);
|
|
31
|
-
o && o.role == "assistant" && i.role == "assistant" ? o.content += `
|
|
19
|
+
for (let e = 0; e < t.length; e++) {
|
|
20
|
+
const n = e;
|
|
21
|
+
typeof t[n].content != "string" && (t[n].role == "assistant" ? t[n].content.filter((o) => o.type == "tool_use").forEach((o) => {
|
|
22
|
+
e++, t.splice(e, 0, { role: "tool", id: o.id, name: o.name, args: o.input, timestamp: Date.now() });
|
|
23
|
+
}) : t[n].role == "user" && t[n].content.filter((o) => o.type == "tool_result").forEach((o) => {
|
|
24
|
+
const l = t.find((p) => p.id == o.tool_use_id);
|
|
25
|
+
l[o.is_error ? "error" : "content"] = o.content;
|
|
26
|
+
}), t[n].content = t[n].content.filter((o) => o.type == "text").map((o) => o.text).join(`
|
|
32
27
|
|
|
33
|
-
`
|
|
34
|
-
}
|
|
28
|
+
`)), t[n].timestamp || (t[n].timestamp = Date.now());
|
|
35
29
|
}
|
|
36
|
-
return e;
|
|
30
|
+
return t.filter((e) => !!e.content);
|
|
37
31
|
}
|
|
38
32
|
fromStandard(t) {
|
|
39
33
|
for (let e = 0; e < t.length; e++)
|
|
@@ -46,13 +40,13 @@ class L extends k {
|
|
|
46
40
|
{ role: "user", content: [{ type: "tool_result", tool_use_id: n.id, is_error: !!n.error, content: n.error || n.content }] }
|
|
47
41
|
), e++;
|
|
48
42
|
}
|
|
49
|
-
return t;
|
|
43
|
+
return t.map(({ timestamp: e, ...n }) => n);
|
|
50
44
|
}
|
|
51
45
|
ask(t, e = {}) {
|
|
52
|
-
const n = new AbortController(),
|
|
53
|
-
let
|
|
54
|
-
e.compress && (
|
|
55
|
-
const
|
|
46
|
+
const n = new AbortController(), o = new Promise(async (l, p) => {
|
|
47
|
+
let m = this.fromStandard([...e.history || [], { role: "user", content: t, timestamp: Date.now() }]);
|
|
48
|
+
e.compress && (m = await this.ai.llm.compress(m, e.compress.max, e.compress.min, e));
|
|
49
|
+
const d = {
|
|
56
50
|
model: e.model || this.model,
|
|
57
51
|
max_tokens: e.max_tokens || this.ai.options.max_tokens || 4096,
|
|
58
52
|
system: e.system || this.ai.options.system || "",
|
|
@@ -62,226 +56,256 @@ class L extends k {
|
|
|
62
56
|
description: a.description,
|
|
63
57
|
input_schema: {
|
|
64
58
|
type: "object",
|
|
65
|
-
properties: a.args ?
|
|
66
|
-
required: a.args ? Object.entries(a.args).filter((
|
|
59
|
+
properties: a.args ? k(a.args, (s, i) => ({ ...i, required: void 0 })) : {},
|
|
60
|
+
required: a.args ? Object.entries(a.args).filter((s) => s[1].required).map((s) => s[0]) : []
|
|
67
61
|
},
|
|
68
62
|
fn: void 0
|
|
69
63
|
})),
|
|
70
|
-
messages:
|
|
64
|
+
messages: m,
|
|
71
65
|
stream: !!e.stream
|
|
72
66
|
};
|
|
73
|
-
let
|
|
67
|
+
let c;
|
|
68
|
+
const r = [];
|
|
74
69
|
do {
|
|
75
|
-
if (
|
|
76
|
-
|
|
70
|
+
if (c = await this.client.messages.create(d), e.stream) {
|
|
71
|
+
r.length && e.stream({ text: `
|
|
77
72
|
|
|
78
|
-
` }),
|
|
79
|
-
for await (const
|
|
73
|
+
` }), c.content = [];
|
|
74
|
+
for await (const s of c) {
|
|
80
75
|
if (n.signal.aborted) break;
|
|
81
|
-
if (
|
|
82
|
-
|
|
83
|
-
else if (
|
|
84
|
-
if (
|
|
85
|
-
const
|
|
86
|
-
|
|
87
|
-
} else
|
|
88
|
-
else if (
|
|
89
|
-
const
|
|
90
|
-
|
|
91
|
-
} else if (
|
|
76
|
+
if (s.type === "content_block_start")
|
|
77
|
+
s.content_block.type === "text" ? c.content.push({ type: "text", text: "" }) : s.content_block.type === "tool_use" && c.content.push({ type: "tool_use", id: s.content_block.id, name: s.content_block.name, input: "" });
|
|
78
|
+
else if (s.type === "content_block_delta")
|
|
79
|
+
if (s.delta.type === "text_delta") {
|
|
80
|
+
const i = s.delta.text;
|
|
81
|
+
c.content.at(-1).text += i, e.stream({ text: i });
|
|
82
|
+
} else s.delta.type === "input_json_delta" && (c.content.at(-1).input += s.delta.partial_json);
|
|
83
|
+
else if (s.type === "content_block_stop") {
|
|
84
|
+
const i = c.content.at(-1);
|
|
85
|
+
i.input != null && (i.input = i.input ? b(i.input, {}) : {});
|
|
86
|
+
} else if (s.type === "message_stop")
|
|
92
87
|
break;
|
|
93
88
|
}
|
|
94
89
|
}
|
|
95
|
-
|
|
90
|
+
r.push({ role: "assistant", content: c.content, timestamp: Date.now() });
|
|
91
|
+
const a = c.content.filter((s) => s.type === "tool_use");
|
|
96
92
|
if (a.length && !n.signal.aborted) {
|
|
97
|
-
|
|
98
|
-
const
|
|
99
|
-
const
|
|
100
|
-
if (!
|
|
93
|
+
m.push({ role: "assistant", content: c.content });
|
|
94
|
+
const i = { role: "user", content: await Promise.all(a.map(async (f) => {
|
|
95
|
+
const h = e.tools?.find(x("name", f.name));
|
|
96
|
+
if (!h) return { tool_use_id: f.id, is_error: !0, content: "Tool not found" };
|
|
101
97
|
try {
|
|
102
|
-
const
|
|
103
|
-
return { type: "tool_result", tool_use_id:
|
|
104
|
-
} catch (
|
|
105
|
-
return { type: "tool_result", tool_use_id:
|
|
98
|
+
const g = await h.fn(f.input, this.ai);
|
|
99
|
+
return { type: "tool_result", tool_use_id: f.id, content: y(g) };
|
|
100
|
+
} catch (g) {
|
|
101
|
+
return { type: "tool_result", tool_use_id: f.id, is_error: !0, content: g?.message || g?.toString() || "Unknown" };
|
|
106
102
|
}
|
|
107
|
-
}));
|
|
108
|
-
|
|
103
|
+
})) };
|
|
104
|
+
m.push(i), r.push({ ...i, timestamp: Date.now() }), d.messages = m;
|
|
109
105
|
}
|
|
110
|
-
} while (!n.signal.aborted &&
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
106
|
+
} while (!n.signal.aborted && c.content.some((a) => a.type === "tool_use"));
|
|
107
|
+
const w = r.filter((a) => a.role === "assistant").map((a) => a.content.filter((s) => s.type == "text").map((s) => s.text).join(`
|
|
108
|
+
|
|
109
|
+
`)).filter((a) => a).join(`
|
|
114
110
|
|
|
115
|
-
`)
|
|
116
|
-
}]));
|
|
111
|
+
`);
|
|
112
|
+
e.stream && e.stream({ done: !0 }), l(this.toStandard([...m, { role: "assistant", content: w, timestamp: Date.now() }]));
|
|
117
113
|
});
|
|
118
|
-
return Object.assign(
|
|
114
|
+
return Object.assign(o, { abort: () => n.abort() });
|
|
119
115
|
}
|
|
120
116
|
}
|
|
121
|
-
class
|
|
117
|
+
class I extends S {
|
|
122
118
|
constructor(t, e, n) {
|
|
123
|
-
super(), this.ai = t, this.host = e, this.model = n, this.client = new
|
|
119
|
+
super(), this.ai = t, this.host = e, this.model = n, this.client = new D({ host: e });
|
|
124
120
|
}
|
|
125
121
|
client;
|
|
126
122
|
toStandard(t) {
|
|
127
|
-
for (let e = 0; e < t.length; e++)
|
|
123
|
+
for (let e = 0; e < t.length; e++) {
|
|
128
124
|
if (t[e].role == "assistant" && t[e].tool_calls)
|
|
129
125
|
t[e].content ? delete t[e].tool_calls : (t.splice(e, 1), e--);
|
|
130
126
|
else if (t[e].role == "tool") {
|
|
131
127
|
const n = t[e].content.startsWith('{"error":');
|
|
132
|
-
t[e] = { role: "tool", name: t[e].tool_name, args: t[e].args, [n ? "error" : "content"]: t[e].content };
|
|
128
|
+
t[e] = { role: "tool", name: t[e].tool_name, args: t[e].args, [n ? "error" : "content"]: t[e].content, timestamp: t[e].timestamp };
|
|
133
129
|
}
|
|
130
|
+
t[e]?.timestamp || (t[e].timestamp = Date.now());
|
|
131
|
+
}
|
|
134
132
|
return t;
|
|
135
133
|
}
|
|
136
134
|
fromStandard(t) {
|
|
137
|
-
return t.map((e) =>
|
|
135
|
+
return t.map((e) => {
|
|
136
|
+
const { timestamp: n, ...o } = e;
|
|
137
|
+
return e.role != "tool" ? o : { role: "tool", tool_name: e.name, content: e.error || e.content };
|
|
138
|
+
});
|
|
138
139
|
}
|
|
139
140
|
ask(t, e = {}) {
|
|
140
|
-
const n = new AbortController(),
|
|
141
|
-
let
|
|
142
|
-
|
|
143
|
-
const
|
|
141
|
+
const n = new AbortController(), o = new Promise(async (l, p) => {
|
|
142
|
+
let m = e.system || this.ai.options.system, d = this.fromStandard([...e.history || [], { role: "user", content: t, timestamp: Date.now() }]);
|
|
143
|
+
d[0].roll == "system" && (m ? d.shift() : m = d.shift()), e.compress && (d = await this.ai.llm.compress(d, e.compress.max, e.compress.min)), e.system && d.unshift({ role: "system", content: m });
|
|
144
|
+
const c = {
|
|
144
145
|
model: e.model || this.model,
|
|
145
|
-
messages:
|
|
146
|
+
messages: d,
|
|
146
147
|
stream: !!e.stream,
|
|
147
148
|
signal: n.signal,
|
|
148
149
|
options: {
|
|
149
150
|
temperature: e.temperature || this.ai.options.temperature || 0.7,
|
|
150
151
|
num_predict: e.max_tokens || this.ai.options.max_tokens || 4096
|
|
151
152
|
},
|
|
152
|
-
tools: (e.tools || this.ai.options.tools || []).map((
|
|
153
|
+
tools: (e.tools || this.ai.options.tools || []).map((s) => ({
|
|
153
154
|
type: "function",
|
|
154
155
|
function: {
|
|
155
|
-
name:
|
|
156
|
-
description:
|
|
156
|
+
name: s.name,
|
|
157
|
+
description: s.description,
|
|
157
158
|
parameters: {
|
|
158
159
|
type: "object",
|
|
159
|
-
properties:
|
|
160
|
-
required:
|
|
160
|
+
properties: s.args ? k(s.args, (i, f) => ({ ...f, required: void 0 })) : {},
|
|
161
|
+
required: s.args ? Object.entries(s.args).filter((i) => i[1].required).map((i) => i[0]) : []
|
|
161
162
|
}
|
|
162
163
|
}
|
|
163
164
|
}))
|
|
164
165
|
};
|
|
165
|
-
let
|
|
166
|
+
let r;
|
|
167
|
+
const w = [];
|
|
166
168
|
do {
|
|
167
|
-
if (
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
169
|
+
if (r = await this.client.chat(c), e.stream) {
|
|
170
|
+
w.length && e.stream({ text: `
|
|
171
|
+
|
|
172
|
+
` }), r.message = { role: "assistant", content: "", tool_calls: [] };
|
|
173
|
+
for await (const s of r)
|
|
174
|
+
if (n.signal.aborted || (s.message?.content && (r.message.content += s.message.content, e.stream({ text: s.message.content })), s.message?.tool_calls && (r.message.tool_calls = s.message.tool_calls), s.done)) break;
|
|
171
175
|
}
|
|
172
|
-
if (
|
|
173
|
-
|
|
174
|
-
const
|
|
175
|
-
const
|
|
176
|
-
if (!
|
|
177
|
-
const
|
|
176
|
+
if (w.push({ role: "assistant", content: r.message?.content, timestamp: Date.now() }), r.message?.tool_calls?.length && !n.signal.aborted) {
|
|
177
|
+
d.push(r.message);
|
|
178
|
+
const s = await Promise.all(r.message.tool_calls.map(async (i) => {
|
|
179
|
+
const f = (e.tools || this.ai.options.tools)?.find(x("name", i.function.name));
|
|
180
|
+
if (!f) return { role: "tool", tool_name: i.function.name, content: '{"error": "Tool not found"}' };
|
|
181
|
+
const h = typeof i.function.arguments == "string" ? b(i.function.arguments, {}) : i.function.arguments;
|
|
178
182
|
try {
|
|
179
|
-
const
|
|
180
|
-
return { role: "tool", tool_name:
|
|
181
|
-
} catch (
|
|
182
|
-
return { role: "tool", tool_name:
|
|
183
|
+
const g = await f.fn(h, this.ai);
|
|
184
|
+
return { role: "tool", tool_name: i.function.name, args: h, content: y(g) };
|
|
185
|
+
} catch (g) {
|
|
186
|
+
return { role: "tool", tool_name: i.function.name, args: h, content: y({ error: g?.message || g?.toString() || "Unknown" }) };
|
|
183
187
|
}
|
|
184
188
|
}));
|
|
185
|
-
|
|
189
|
+
d.push(...s), w.push(...s.map((i) => ({ ...i, timestamp: Date.now() }))), c.messages = d;
|
|
186
190
|
}
|
|
187
|
-
} while (!n.signal.aborted &&
|
|
188
|
-
|
|
191
|
+
} while (!n.signal.aborted && r.message?.tool_calls?.length);
|
|
192
|
+
const a = w.filter((s) => s.role === "assistant").map((s) => s.content).filter((s) => s).join(`
|
|
193
|
+
|
|
194
|
+
`);
|
|
195
|
+
e.stream && e.stream({ done: !0 }), l(this.toStandard([...d, { role: "assistant", content: a, timestamp: Date.now() }]));
|
|
189
196
|
});
|
|
190
|
-
return Object.assign(
|
|
197
|
+
return Object.assign(o, { abort: () => n.abort() });
|
|
191
198
|
}
|
|
192
199
|
}
|
|
193
|
-
class J extends
|
|
200
|
+
class J extends S {
|
|
194
201
|
constructor(t, e, n) {
|
|
195
|
-
super(), this.ai = t, this.apiToken = e, this.model = n, this.client = new
|
|
202
|
+
super(), this.ai = t, this.apiToken = e, this.model = n, this.client = new M({ apiKey: e });
|
|
196
203
|
}
|
|
197
204
|
client;
|
|
198
205
|
toStandard(t) {
|
|
199
206
|
for (let e = 0; e < t.length; e++) {
|
|
200
207
|
const n = t[e];
|
|
201
208
|
if (n.role === "assistant" && n.tool_calls) {
|
|
202
|
-
const
|
|
209
|
+
const o = n.tool_calls.map((l) => ({
|
|
203
210
|
role: "tool",
|
|
204
|
-
id:
|
|
205
|
-
name:
|
|
206
|
-
args:
|
|
211
|
+
id: l.id,
|
|
212
|
+
name: l.function.name,
|
|
213
|
+
args: b(l.function.arguments, {}),
|
|
214
|
+
timestamp: n.timestamp
|
|
207
215
|
}));
|
|
208
|
-
t.splice(e, 1, ...
|
|
216
|
+
t.splice(e, 1, ...o), e += o.length - 1;
|
|
209
217
|
} else if (n.role === "tool" && n.content) {
|
|
210
|
-
const
|
|
211
|
-
|
|
218
|
+
const o = t.find((l) => n.tool_call_id == l.id);
|
|
219
|
+
o && (n.content.includes('"error":') ? o.error = n.content : o.content = n.content), t.splice(e, 1), e--;
|
|
212
220
|
}
|
|
221
|
+
t[e]?.timestamp || (t[e].timestamp = Date.now());
|
|
213
222
|
}
|
|
214
223
|
return t;
|
|
215
224
|
}
|
|
216
225
|
fromStandard(t) {
|
|
217
|
-
return t.reduce((e, n) =>
|
|
218
|
-
role
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
226
|
+
return t.reduce((e, n) => {
|
|
227
|
+
if (n.role === "tool")
|
|
228
|
+
e.push({
|
|
229
|
+
role: "assistant",
|
|
230
|
+
content: null,
|
|
231
|
+
tool_calls: [{ id: n.id, type: "function", function: { name: n.name, arguments: JSON.stringify(n.args) } }],
|
|
232
|
+
refusal: null,
|
|
233
|
+
annotations: []
|
|
234
|
+
}, {
|
|
235
|
+
role: "tool",
|
|
236
|
+
tool_call_id: n.id,
|
|
237
|
+
content: n.error || n.content
|
|
238
|
+
});
|
|
239
|
+
else {
|
|
240
|
+
const { timestamp: o, ...l } = n;
|
|
241
|
+
e.push(l);
|
|
242
|
+
}
|
|
243
|
+
return e;
|
|
244
|
+
}, []);
|
|
228
245
|
}
|
|
229
246
|
ask(t, e = {}) {
|
|
230
|
-
const n = new AbortController(),
|
|
231
|
-
let
|
|
232
|
-
e.compress && (
|
|
233
|
-
const
|
|
247
|
+
const n = new AbortController(), o = new Promise(async (l, p) => {
|
|
248
|
+
let m = this.fromStandard([...e.history || [], { role: "user", content: t, timestamp: Date.now() }]);
|
|
249
|
+
e.compress && (m = await this.ai.llm.compress(m, e.compress.max, e.compress.min, e));
|
|
250
|
+
const d = {
|
|
234
251
|
model: e.model || this.model,
|
|
235
|
-
messages:
|
|
252
|
+
messages: m,
|
|
236
253
|
stream: !!e.stream,
|
|
237
254
|
max_tokens: e.max_tokens || this.ai.options.max_tokens || 4096,
|
|
238
255
|
temperature: e.temperature || this.ai.options.temperature || 0.7,
|
|
239
|
-
tools: (e.tools || this.ai.options.tools || []).map((
|
|
256
|
+
tools: (e.tools || this.ai.options.tools || []).map((a) => ({
|
|
240
257
|
type: "function",
|
|
241
258
|
function: {
|
|
242
|
-
name:
|
|
243
|
-
description:
|
|
259
|
+
name: a.name,
|
|
260
|
+
description: a.description,
|
|
244
261
|
parameters: {
|
|
245
262
|
type: "object",
|
|
246
|
-
properties:
|
|
247
|
-
required:
|
|
263
|
+
properties: a.args ? k(a.args, (s, i) => ({ ...i, required: void 0 })) : {},
|
|
264
|
+
required: a.args ? Object.entries(a.args).filter((s) => s[1].required).map((s) => s[0]) : []
|
|
248
265
|
}
|
|
249
266
|
}
|
|
250
267
|
}))
|
|
251
268
|
};
|
|
252
|
-
let
|
|
269
|
+
let c;
|
|
270
|
+
const r = [];
|
|
253
271
|
do {
|
|
254
|
-
if (
|
|
255
|
-
|
|
256
|
-
|
|
272
|
+
if (c = await this.client.chat.completions.create(d), e.stream) {
|
|
273
|
+
r.length && e.stream({ text: `
|
|
274
|
+
|
|
275
|
+
` }), c.choices = [{ message: { content: "", tool_calls: [] } }];
|
|
276
|
+
for await (const s of c) {
|
|
257
277
|
if (n.signal.aborted) break;
|
|
258
|
-
|
|
278
|
+
s.choices[0].delta.content && (c.choices[0].message.content += s.choices[0].delta.content, e.stream({ text: s.choices[0].delta.content })), s.choices[0].delta.tool_calls && (c.choices[0].message.tool_calls = s.choices[0].delta.tool_calls);
|
|
259
279
|
}
|
|
260
280
|
}
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
281
|
+
r.push({ role: "assistant", content: c.choices[0].message.content || "", timestamp: Date.now() });
|
|
282
|
+
const a = c.choices[0].message.tool_calls || [];
|
|
283
|
+
if (a.length && !n.signal.aborted) {
|
|
284
|
+
m.push(c.choices[0].message);
|
|
285
|
+
const s = await Promise.all(a.map(async (i) => {
|
|
286
|
+
const f = e.tools?.find(x("name", i.function.name));
|
|
287
|
+
if (!f) return { role: "tool", tool_call_id: i.id, content: '{"error": "Tool not found"}' };
|
|
267
288
|
try {
|
|
268
|
-
const
|
|
269
|
-
return { role: "tool", tool_call_id:
|
|
270
|
-
} catch (
|
|
271
|
-
return { role: "tool", tool_call_id:
|
|
289
|
+
const h = b(i.function.arguments, {}), g = await f.fn(h, this.ai);
|
|
290
|
+
return { role: "tool", tool_call_id: i.id, content: y(g) };
|
|
291
|
+
} catch (h) {
|
|
292
|
+
return { role: "tool", tool_call_id: i.id, content: y({ error: h?.message || h?.toString() || "Unknown" }) };
|
|
272
293
|
}
|
|
273
294
|
}));
|
|
274
|
-
|
|
295
|
+
m.push(...s), r.push(...s.map((i) => ({ ...i, timestamp: Date.now() }))), d.messages = m;
|
|
275
296
|
}
|
|
276
|
-
} while (!n.signal.aborted &&
|
|
277
|
-
|
|
297
|
+
} while (!n.signal.aborted && c.choices?.[0]?.message?.tool_calls?.length);
|
|
298
|
+
const w = r.filter((a) => a.role === "assistant").map((a) => a.content).filter((a) => a).join(`
|
|
299
|
+
|
|
300
|
+
`);
|
|
301
|
+
e.stream && e.stream({ done: !0 }), l(this.toStandard([...m, { role: "assistant", content: w, timestamp: Date.now() }]));
|
|
278
302
|
});
|
|
279
|
-
return Object.assign(
|
|
303
|
+
return Object.assign(o, { abort: () => n.abort() });
|
|
280
304
|
}
|
|
281
305
|
}
|
|
282
306
|
class W {
|
|
283
307
|
constructor(t, e) {
|
|
284
|
-
this.ai = t, this.options = e, e.anthropic?.token && (this.providers.anthropic = new
|
|
308
|
+
this.ai = t, this.options = e, e.anthropic?.token && (this.providers.anthropic = new R(this.ai, e.anthropic.token, e.anthropic.model)), e.ollama?.host && (this.providers.ollama = new I(this.ai, e.ollama.host, e.ollama.model)), e.openAi?.token && (this.providers.openAi = new J(this.ai, e.openAi.token, e.openAi.model));
|
|
285
309
|
}
|
|
286
310
|
providers = {};
|
|
287
311
|
/**
|
|
@@ -303,17 +327,17 @@ class W {
|
|
|
303
327
|
* @param {LLMRequest} options LLM options
|
|
304
328
|
* @returns {Promise<LLMMessage[]>} New chat history will summary at index 0
|
|
305
329
|
*/
|
|
306
|
-
async compress(t, e, n,
|
|
330
|
+
async compress(t, e, n, o) {
|
|
307
331
|
if (this.estimateTokens(t) < e) return t;
|
|
308
|
-
let
|
|
309
|
-
for (let
|
|
310
|
-
if (p += this.estimateTokens(
|
|
332
|
+
let l = 0, p = 0;
|
|
333
|
+
for (let r of t.toReversed())
|
|
334
|
+
if (p += this.estimateTokens(r.content), p < n) l++;
|
|
311
335
|
else break;
|
|
312
|
-
if (t.length <=
|
|
313
|
-
const
|
|
314
|
-
return [{ role: "assistant", content: `Conversation Summary: ${await this.summarize(
|
|
336
|
+
if (t.length <= l) return t;
|
|
337
|
+
const m = l == 0 ? [] : t.slice(-l), d = (l == 0 ? t : t.slice(0, -l)).filter((r) => r.role === "assistant" || r.role === "user");
|
|
338
|
+
return [{ role: "assistant", content: `Conversation Summary: ${await this.summarize(d.map((r) => `${r.role}: ${r.content}`).join(`
|
|
315
339
|
|
|
316
|
-
`), 250,
|
|
340
|
+
`), 250, o)}`, timestamp: Date.now() }, ...m];
|
|
317
341
|
}
|
|
318
342
|
/**
|
|
319
343
|
* Estimate variable as tokens
|
|
@@ -335,7 +359,7 @@ class W {
|
|
|
335
359
|
system: "Respond using a JSON blob",
|
|
336
360
|
...e
|
|
337
361
|
});
|
|
338
|
-
return n?.[0]?.content ?
|
|
362
|
+
return n?.[0]?.content ? b(new RegExp("{[sS]*}").exec(n[0].content), {}) : {};
|
|
339
363
|
}
|
|
340
364
|
/**
|
|
341
365
|
* Create a summary of some text
|
|
@@ -345,10 +369,10 @@ class W {
|
|
|
345
369
|
* @returns {Promise<string>} Summary
|
|
346
370
|
*/
|
|
347
371
|
summarize(t, e, n) {
|
|
348
|
-
return this.ask(t, { system: `Generate a brief summary <= ${e} tokens. Output nothing else`, temperature: 0.3, ...n }).then((
|
|
372
|
+
return this.ask(t, { system: `Generate a brief summary <= ${e} tokens. Output nothing else`, temperature: 0.3, ...n }).then((o) => o.pop()?.content || null);
|
|
349
373
|
}
|
|
350
374
|
}
|
|
351
|
-
class
|
|
375
|
+
class Z {
|
|
352
376
|
constructor(t) {
|
|
353
377
|
this.options = t, this.llm = new W(this, t), this.options.whisper?.binary && (this.whisperModel = this.options.whisper?.model.endsWith(".bin") ? this.options.whisper?.model : this.options.whisper?.model + ".bin", this.downloadAsrModel());
|
|
354
378
|
}
|
|
@@ -366,12 +390,12 @@ class X {
|
|
|
366
390
|
if (!this.options.whisper?.binary) throw new Error("Whisper not configured");
|
|
367
391
|
let n = () => {
|
|
368
392
|
};
|
|
369
|
-
return { response: new Promise((
|
|
370
|
-
this.downloadAsrModel(e).then((
|
|
371
|
-
let
|
|
372
|
-
const
|
|
373
|
-
n = () =>
|
|
374
|
-
|
|
393
|
+
return { response: new Promise((l, p) => {
|
|
394
|
+
this.downloadAsrModel(e).then((m) => {
|
|
395
|
+
let d = "";
|
|
396
|
+
const c = U(this.options.whisper?.binary, ["-nt", "-np", "-m", m, "-f", t], { stdio: ["ignore", "pipe", "ignore"] });
|
|
397
|
+
n = () => c.kill("SIGTERM"), c.on("error", (r) => p(r)), c.stdout.on("data", (r) => d += r.toString()), c.on("close", (r) => {
|
|
398
|
+
r === 0 ? l(d.trim() || null) : p(new Error(`Exit code ${r}`));
|
|
375
399
|
});
|
|
376
400
|
});
|
|
377
401
|
}), abort: n };
|
|
@@ -386,7 +410,7 @@ class X {
|
|
|
386
410
|
if (!this.options.whisper?.binary) throw new Error("Whisper not configured");
|
|
387
411
|
t.endsWith(".bin") || (t += ".bin");
|
|
388
412
|
const e = O.join(this.options.whisper.path, t);
|
|
389
|
-
return await
|
|
413
|
+
return await j.stat(e).then(() => !0).catch(() => !1) ? e : this.downloads[t] ? this.downloads[t] : (this.downloads[t] = fetch(`https://huggingface.co/ggerganov/whisper.cpp/resolve/main/${t}`).then((n) => n.arrayBuffer()).then((n) => Buffer.from(n)).then(async (n) => (await j.writeFile(e, n), delete this.downloads[t], e)), this.downloads[t]);
|
|
390
414
|
}
|
|
391
415
|
/**
|
|
392
416
|
* Convert image to text using Optical Character Recognition
|
|
@@ -400,9 +424,9 @@ class X {
|
|
|
400
424
|
e?.terminate();
|
|
401
425
|
},
|
|
402
426
|
response: new Promise(async (n) => {
|
|
403
|
-
e = await
|
|
404
|
-
const { data:
|
|
405
|
-
await e.terminate(), n(
|
|
427
|
+
e = await T("eng");
|
|
428
|
+
const { data: o } = await e.recognize(t);
|
|
429
|
+
await e.terminate(), n(o.text.trim() || null);
|
|
406
430
|
})
|
|
407
431
|
};
|
|
408
432
|
}
|
|
@@ -414,25 +438,25 @@ class X {
|
|
|
414
438
|
*/
|
|
415
439
|
semanticSimilarity(t, ...e) {
|
|
416
440
|
if (e.length < 2) throw new Error("Requires at least 2 strings to compare");
|
|
417
|
-
const n = (
|
|
418
|
-
if (
|
|
419
|
-
const
|
|
420
|
-
return
|
|
421
|
-
},
|
|
422
|
-
return { avg: p.reduce((
|
|
441
|
+
const n = (m, d = 10) => m.toLowerCase().split("").map((c, r) => c.charCodeAt(0) * (r + 1) % d / d).slice(0, d), o = (m, d) => {
|
|
442
|
+
if (m.length !== d.length) throw new Error("Vectors must be same length");
|
|
443
|
+
const c = _.tensor1d(m), r = _.tensor1d(d), w = _.dot(c, r), a = _.norm(c), s = _.norm(r);
|
|
444
|
+
return a.dataSync()[0] === 0 || s.dataSync()[0] === 0 ? 0 : w.dataSync()[0] / (a.dataSync()[0] * s.dataSync()[0]);
|
|
445
|
+
}, l = n(t), p = e.map((m) => n(m)).map((m) => o(l, m));
|
|
446
|
+
return { avg: p.reduce((m, d) => m + d, 0) / p.length, max: Math.max(...p), similarities: p };
|
|
423
447
|
}
|
|
424
448
|
}
|
|
425
|
-
const
|
|
449
|
+
const N = {
|
|
426
450
|
name: "cli",
|
|
427
451
|
description: "Use the command line interface, returns any output",
|
|
428
452
|
args: { command: { type: "string", description: "Command to run", required: !0 } },
|
|
429
|
-
fn: (u) =>
|
|
430
|
-
},
|
|
453
|
+
fn: (u) => $`${u.command}`
|
|
454
|
+
}, ee = {
|
|
431
455
|
name: "get_datetime",
|
|
432
456
|
description: "Get current date and time",
|
|
433
457
|
args: {},
|
|
434
458
|
fn: async () => (/* @__PURE__ */ new Date()).toISOString()
|
|
435
|
-
},
|
|
459
|
+
}, te = {
|
|
436
460
|
name: "exec",
|
|
437
461
|
description: "Run code/scripts",
|
|
438
462
|
args: {
|
|
@@ -443,17 +467,17 @@ const I = {
|
|
|
443
467
|
try {
|
|
444
468
|
switch (u.type) {
|
|
445
469
|
case "bash":
|
|
446
|
-
return await
|
|
470
|
+
return await N.fn({ command: u.code }, t);
|
|
447
471
|
case "node":
|
|
448
|
-
return await N.fn({ code: u.code }, t);
|
|
449
|
-
case "python":
|
|
450
472
|
return await z.fn({ code: u.code }, t);
|
|
473
|
+
case "python":
|
|
474
|
+
return await G.fn({ code: u.code }, t);
|
|
451
475
|
}
|
|
452
476
|
} catch (e) {
|
|
453
477
|
return { error: e?.message || e.toString() };
|
|
454
478
|
}
|
|
455
479
|
}
|
|
456
|
-
},
|
|
480
|
+
}, ne = {
|
|
457
481
|
name: "fetch",
|
|
458
482
|
description: "Make HTTP request to URL",
|
|
459
483
|
args: {
|
|
@@ -462,25 +486,25 @@ const I = {
|
|
|
462
486
|
headers: { type: "object", description: "HTTP headers to send", default: {} },
|
|
463
487
|
body: { type: "object", description: "HTTP body to send" }
|
|
464
488
|
},
|
|
465
|
-
fn: (u) => new
|
|
466
|
-
},
|
|
489
|
+
fn: (u) => new q({ url: u.url, headers: u.headers }).request({ method: u.method || "GET", body: u.body })
|
|
490
|
+
}, z = {
|
|
467
491
|
name: "exec_javascript",
|
|
468
492
|
description: "Execute commonjs javascript",
|
|
469
493
|
args: {
|
|
470
494
|
code: { type: "string", description: "CommonJS javascript", required: !0 }
|
|
471
495
|
},
|
|
472
496
|
fn: async (u) => {
|
|
473
|
-
const t =
|
|
497
|
+
const t = v(null), e = await P({ console: t }, u.code, !0).catch((n) => t.output.error.push(n));
|
|
474
498
|
return { ...t.output, return: e, stdout: void 0, stderr: void 0 };
|
|
475
499
|
}
|
|
476
|
-
},
|
|
500
|
+
}, G = {
|
|
477
501
|
name: "exec_javascript",
|
|
478
502
|
description: "Execute commonjs javascript",
|
|
479
503
|
args: {
|
|
480
504
|
code: { type: "string", description: "CommonJS javascript", required: !0 }
|
|
481
505
|
},
|
|
482
|
-
fn: async (u) => ({ result:
|
|
483
|
-
},
|
|
506
|
+
fn: async (u) => ({ result: L`python -c "${u.code}"` })
|
|
507
|
+
}, se = {
|
|
484
508
|
name: "search",
|
|
485
509
|
description: "Use a search engine to find relevant URLs, should be changed with fetch to scrape sources",
|
|
486
510
|
args: {
|
|
@@ -490,26 +514,26 @@ const I = {
|
|
|
490
514
|
fn: async (u) => {
|
|
491
515
|
const t = await fetch(`https://html.duckduckgo.com/html/?q=${encodeURIComponent(u.query)}`, {
|
|
492
516
|
headers: { "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64)", "Accept-Language": "en-US,en;q=0.9" }
|
|
493
|
-
}).then((
|
|
517
|
+
}).then((l) => l.text());
|
|
494
518
|
let e, n = /<a .*?href="(.+?)".+?<\/a>/g;
|
|
495
|
-
const
|
|
519
|
+
const o = new A();
|
|
496
520
|
for (; (e = n.exec(t)) !== null; ) {
|
|
497
|
-
let
|
|
498
|
-
if (
|
|
521
|
+
let l = /uddg=(.+)&?/.exec(decodeURIComponent(e[1]))?.[1];
|
|
522
|
+
if (l && (l = decodeURIComponent(l)), l && o.add(l), o.size >= (u.length || 5)) break;
|
|
499
523
|
}
|
|
500
|
-
return
|
|
524
|
+
return o;
|
|
501
525
|
}
|
|
502
526
|
};
|
|
503
527
|
export {
|
|
504
|
-
|
|
505
|
-
|
|
506
|
-
|
|
507
|
-
|
|
508
|
-
|
|
509
|
-
|
|
510
|
-
|
|
528
|
+
Z as Ai,
|
|
529
|
+
R as Anthropic,
|
|
530
|
+
N as CliTool,
|
|
531
|
+
ee as DateTimeTool,
|
|
532
|
+
te as ExecTool,
|
|
533
|
+
ne as FetchTool,
|
|
534
|
+
z as JSTool,
|
|
511
535
|
W as LLM,
|
|
512
|
-
|
|
513
|
-
|
|
536
|
+
G as PythonTool,
|
|
537
|
+
se as SearchTool
|
|
514
538
|
};
|
|
515
539
|
//# sourceMappingURL=index.mjs.map
|