@ztimson/ai-utils 0.1.14 → 0.1.16

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -1,8 +1,8 @@
1
1
  import { createWorker as S } from "tesseract.js";
2
- import { objectMap as y, JSONAttemptParse as w, findByProp as b, JSONSanitize as _, Http as j, consoleInterceptor as T, fn as q, ASet as v } from "@ztimson/utils";
2
+ import { objectMap as _, JSONAttemptParse as w, findByProp as b, JSONSanitize as y, Http as j, consoleInterceptor as T, fn as q, ASet as v } from "@ztimson/utils";
3
3
  import { Anthropic as P } from "@anthropic-ai/sdk";
4
- import { Ollama as E } from "ollama";
5
- import { OpenAI as A } from "openai";
4
+ import { Ollama as A } from "ollama";
5
+ import { OpenAI as E } from "openai";
6
6
  import x from "node:fs/promises";
7
7
  import O from "node:path";
8
8
  import * as g from "@tensorflow/tfjs";
@@ -16,18 +16,24 @@ class L extends k {
16
16
  }
17
17
  client;
18
18
  toStandard(t) {
19
- for (let e = 0; e < t.length; e++) {
20
- const n = e;
21
- typeof t[n].content != "string" && (t[n].role == "assistant" ? t[n].content.filter((s) => s.type == "tool_use").forEach((s) => {
22
- e++, t.splice(e, 0, { role: "tool", id: s.id, name: s.name, args: s.input });
23
- }) : t[n].role == "user" && t[n].content.filter((s) => s.type == "tool_result").forEach((s) => {
24
- const l = t.find((u) => u.id == s.tool_use_id);
25
- l[s.is_error ? "error" : "content"] = s.content;
26
- }), t[n].content = t[n].content.filter((s) => s.type == "text").map((s) => s.text).join(`
19
+ const e = [];
20
+ for (let n = 0; n < t.length; n++) {
21
+ const i = t[n];
22
+ if (typeof i.content != "string" && (i.role == "assistant" ? i.content.filter((o) => o.type == "tool_use").forEach((o) => {
23
+ e.push({ role: "tool", id: o.id, name: o.name, args: o.input });
24
+ }) : i.role == "user" && i.content.filter((o) => o.type == "tool_result").forEach((o) => {
25
+ const p = e.find((c) => c.id == o.tool_use_id);
26
+ p && (p[o.is_error ? "error" : "content"] = o.content);
27
+ }), i.content = i.content.filter((o) => o.type == "text").map((o) => o.text).join(`
28
+
29
+ `)), i.content) {
30
+ const o = e.at(-1);
31
+ o && o.role == "assistant" && i.role == "assistant" ? o.content += `
27
32
 
28
- `));
33
+ ` + i.content : e.push({ role: i.role, content: i.content });
34
+ }
29
35
  }
30
- return t.filter((e) => !!e.content);
36
+ return e;
31
37
  }
32
38
  fromStandard(t) {
33
39
  for (let e = 0; e < t.length; e++)
@@ -43,76 +49,78 @@ class L extends k {
43
49
  return t;
44
50
  }
45
51
  ask(t, e = {}) {
46
- const n = new AbortController(), s = new Promise(async (l, u) => {
47
- let i = this.fromStandard([...e.history || [], { role: "user", content: t }]);
48
- e.compress && (i = await this.ai.llm.compress(i, e.compress.max, e.compress.min, e));
52
+ const n = new AbortController(), i = new Promise(async (o, p) => {
53
+ let c = this.fromStandard([...e.history || [], { role: "user", content: t }]);
54
+ e.compress && (c = await this.ai.llm.compress(c, e.compress.max, e.compress.min, e));
49
55
  const m = {
50
56
  model: e.model || this.model,
51
57
  max_tokens: e.max_tokens || this.ai.options.max_tokens || 4096,
52
58
  system: e.system || this.ai.options.system || "",
53
59
  temperature: e.temperature || this.ai.options.temperature || 0.7,
54
- tools: (e.tools || this.ai.options.tools || []).map((o) => ({
55
- name: o.name,
56
- description: o.description,
60
+ tools: (e.tools || this.ai.options.tools || []).map((a) => ({
61
+ name: a.name,
62
+ description: a.description,
57
63
  input_schema: {
58
64
  type: "object",
59
- properties: o.args ? y(o.args, (r, a) => ({ ...a, required: void 0 })) : {},
60
- required: o.args ? Object.entries(o.args).filter((r) => r[1].required).map((r) => r[0]) : []
65
+ properties: a.args ? _(a.args, (r, d) => ({ ...d, required: void 0 })) : {},
66
+ required: a.args ? Object.entries(a.args).filter((r) => r[1].required).map((r) => r[0]) : []
61
67
  },
62
68
  fn: void 0
63
69
  })),
64
- messages: i,
70
+ messages: c,
65
71
  stream: !!e.stream
66
72
  };
67
- let c;
73
+ let l, s = !0;
68
74
  do {
69
- if (c = await this.client.messages.create(m), e.stream) {
70
- c.content = [];
71
- for await (const r of c) {
75
+ if (l = await this.client.messages.create(m), e.stream) {
76
+ s || e.stream({ text: `
77
+
78
+ ` }), s = !1, l.content = [];
79
+ for await (const r of l) {
72
80
  if (n.signal.aborted) break;
73
81
  if (r.type === "content_block_start")
74
- r.content_block.type === "text" ? c.content.push({ type: "text", text: "" }) : r.content_block.type === "tool_use" && c.content.push({ type: "tool_use", id: r.content_block.id, name: r.content_block.name, input: "" });
82
+ r.content_block.type === "text" ? l.content.push({ type: "text", text: "" }) : r.content_block.type === "tool_use" && l.content.push({ type: "tool_use", id: r.content_block.id, name: r.content_block.name, input: "" });
75
83
  else if (r.type === "content_block_delta")
76
84
  if (r.delta.type === "text_delta") {
77
- const a = r.delta.text;
78
- c.content.at(-1).text += a, e.stream({ text: a });
79
- } else r.delta.type === "input_json_delta" && (c.content.at(-1).input += r.delta.partial_json);
85
+ const d = r.delta.text;
86
+ l.content.at(-1).text += d, e.stream({ text: d });
87
+ } else r.delta.type === "input_json_delta" && (l.content.at(-1).input += r.delta.partial_json);
80
88
  else if (r.type === "content_block_stop") {
81
- const a = c.content.at(-1);
82
- a.input != null && (a.input = a.input ? w(a.input, {}) : {});
89
+ const d = l.content.at(-1);
90
+ d.input != null && (d.input = d.input ? w(d.input, {}) : {});
83
91
  } else if (r.type === "message_stop")
84
92
  break;
85
93
  }
86
94
  }
87
- const o = c.content.filter((r) => r.type === "tool_use");
88
- if (o.length && !n.signal.aborted) {
89
- i.push({ role: "assistant", content: c.content });
90
- const r = await Promise.all(o.map(async (a) => {
91
- const p = e.tools?.find(b("name", a.name));
92
- if (!p) return { tool_use_id: a.id, is_error: !0, content: "Tool not found" };
95
+ const a = l.content.filter((r) => r.type === "tool_use");
96
+ if (a.length && !n.signal.aborted) {
97
+ c.push({ role: "assistant", content: l.content });
98
+ const r = await Promise.all(a.map(async (d) => {
99
+ const f = e.tools?.find(b("name", d.name));
100
+ if (!f) return { tool_use_id: d.id, is_error: !0, content: "Tool not found" };
93
101
  try {
94
- const f = await p.fn(a.input, this.ai);
95
- return { type: "tool_result", tool_use_id: a.id, content: _(f) };
96
- } catch (f) {
97
- return { type: "tool_result", tool_use_id: a.id, is_error: !0, content: f?.message || f?.toString() || "Unknown" };
102
+ const h = await f.fn(d.input, this.ai);
103
+ return { type: "tool_result", tool_use_id: d.id, content: y(h) };
104
+ } catch (h) {
105
+ return { type: "tool_result", tool_use_id: d.id, is_error: !0, content: h?.message || h?.toString() || "Unknown" };
98
106
  }
99
107
  }));
100
- i.push({ role: "user", content: r }), m.messages = i;
108
+ c.push({ role: "user", content: r }), m.messages = c;
101
109
  }
102
- } while (!n.signal.aborted && c.content.some((o) => o.type === "tool_use"));
103
- e.stream && e.stream({ done: !0 }), l(this.toStandard([...i, {
110
+ } while (!n.signal.aborted && l.content.some((a) => a.type === "tool_use"));
111
+ e.stream && e.stream({ done: !0 }), o(this.toStandard([...c, {
104
112
  role: "assistant",
105
- content: c.content.filter((o) => o.type == "text").map((o) => o.text).join(`
113
+ content: l.content.filter((a) => a.type == "text").map((a) => a.text).join(`
106
114
 
107
115
  `)
108
116
  }]));
109
117
  });
110
- return Object.assign(s, { abort: () => n.abort() });
118
+ return Object.assign(i, { abort: () => n.abort() });
111
119
  }
112
120
  }
113
121
  class R extends k {
114
122
  constructor(t, e, n) {
115
- super(), this.ai = t, this.host = e, this.model = n, this.client = new E({ host: e });
123
+ super(), this.ai = t, this.host = e, this.model = n, this.client = new A({ host: e });
116
124
  }
117
125
  client;
118
126
  toStandard(t) {
@@ -129,10 +137,10 @@ class R extends k {
129
137
  return t.map((e) => e.role != "tool" ? e : { role: "tool", tool_name: e.name, content: e.error || e.content });
130
138
  }
131
139
  ask(t, e = {}) {
132
- const n = new AbortController(), s = new Promise(async (l, u) => {
133
- let i = e.system || this.ai.options.system, m = this.fromStandard([...e.history || [], { role: "user", content: t }]);
134
- m[0].roll == "system" && (i ? m.shift() : i = m.shift()), e.compress && (m = await this.ai.llm.compress(m, e.compress.max, e.compress.min)), e.system && m.unshift({ role: "system", content: i });
135
- const c = {
140
+ const n = new AbortController(), i = new Promise(async (o, p) => {
141
+ let c = e.system || this.ai.options.system, m = this.fromStandard([...e.history || [], { role: "user", content: t }]);
142
+ m[0].roll == "system" && (c ? m.shift() : c = m.shift()), e.compress && (m = await this.ai.llm.compress(m, e.compress.max, e.compress.min)), e.system && m.unshift({ role: "system", content: c });
143
+ const l = {
136
144
  model: e.model || this.model,
137
145
  messages: m,
138
146
  stream: !!e.stream,
@@ -141,66 +149,66 @@ class R extends k {
141
149
  temperature: e.temperature || this.ai.options.temperature || 0.7,
142
150
  num_predict: e.max_tokens || this.ai.options.max_tokens || 4096
143
151
  },
144
- tools: (e.tools || this.ai.options.tools || []).map((r) => ({
152
+ tools: (e.tools || this.ai.options.tools || []).map((a) => ({
145
153
  type: "function",
146
154
  function: {
147
- name: r.name,
148
- description: r.description,
155
+ name: a.name,
156
+ description: a.description,
149
157
  parameters: {
150
158
  type: "object",
151
- properties: r.args ? y(r.args, (a, p) => ({ ...p, required: void 0 })) : {},
152
- required: r.args ? Object.entries(r.args).filter((a) => a[1].required).map((a) => a[0]) : []
159
+ properties: a.args ? _(a.args, (r, d) => ({ ...d, required: void 0 })) : {},
160
+ required: a.args ? Object.entries(a.args).filter((r) => r[1].required).map((r) => r[0]) : []
153
161
  }
154
162
  }
155
163
  }))
156
164
  };
157
- let o;
165
+ let s;
158
166
  do {
159
- if (o = await this.client.chat(c), e.stream) {
160
- o.message = { role: "assistant", content: "", tool_calls: [] };
161
- for await (const r of o)
162
- if (n.signal.aborted || (r.message?.content && (o.message.content += r.message.content, e.stream({ text: r.message.content })), r.message?.tool_calls && (o.message.tool_calls = r.message.tool_calls), r.done)) break;
167
+ if (s = await this.client.chat(l), e.stream) {
168
+ s.message = { role: "assistant", content: "", tool_calls: [] };
169
+ for await (const a of s)
170
+ if (n.signal.aborted || (a.message?.content && (s.message.content += a.message.content, e.stream({ text: a.message.content })), a.message?.tool_calls && (s.message.tool_calls = a.message.tool_calls), a.done)) break;
163
171
  }
164
- if (o.message?.tool_calls?.length && !n.signal.aborted) {
165
- m.push(o.message);
166
- const r = await Promise.all(o.message.tool_calls.map(async (a) => {
167
- const p = (e.tools || this.ai.options.tools)?.find(b("name", a.function.name));
168
- if (!p) return { role: "tool", tool_name: a.function.name, content: '{"error": "Tool not found"}' };
169
- const f = typeof a.function.arguments == "string" ? w(a.function.arguments, {}) : a.function.arguments;
172
+ if (s.message?.tool_calls?.length && !n.signal.aborted) {
173
+ m.push(s.message);
174
+ const a = await Promise.all(s.message.tool_calls.map(async (r) => {
175
+ const d = (e.tools || this.ai.options.tools)?.find(b("name", r.function.name));
176
+ if (!d) return { role: "tool", tool_name: r.function.name, content: '{"error": "Tool not found"}' };
177
+ const f = typeof r.function.arguments == "string" ? w(r.function.arguments, {}) : r.function.arguments;
170
178
  try {
171
- const h = await p.fn(f, this.ai);
172
- return { role: "tool", tool_name: a.function.name, args: f, content: _(h) };
179
+ const h = await d.fn(f, this.ai);
180
+ return { role: "tool", tool_name: r.function.name, args: f, content: y(h) };
173
181
  } catch (h) {
174
- return { role: "tool", tool_name: a.function.name, args: f, content: _({ error: h?.message || h?.toString() || "Unknown" }) };
182
+ return { role: "tool", tool_name: r.function.name, args: f, content: y({ error: h?.message || h?.toString() || "Unknown" }) };
175
183
  }
176
184
  }));
177
- m.push(...r), c.messages = m;
185
+ m.push(...a), l.messages = m;
178
186
  }
179
- } while (!n.signal.aborted && o.message?.tool_calls?.length);
180
- e.stream && e.stream({ done: !0 }), l(this.toStandard([...m, { role: "assistant", content: o.message?.content }]));
187
+ } while (!n.signal.aborted && s.message?.tool_calls?.length);
188
+ e.stream && e.stream({ done: !0 }), o(this.toStandard([...m, { role: "assistant", content: s.message?.content }]));
181
189
  });
182
- return Object.assign(s, { abort: () => n.abort() });
190
+ return Object.assign(i, { abort: () => n.abort() });
183
191
  }
184
192
  }
185
- class I extends k {
193
+ class J extends k {
186
194
  constructor(t, e, n) {
187
- super(), this.ai = t, this.apiToken = e, this.model = n, this.client = new A({ apiKey: e });
195
+ super(), this.ai = t, this.apiToken = e, this.model = n, this.client = new E({ apiKey: e });
188
196
  }
189
197
  client;
190
198
  toStandard(t) {
191
199
  for (let e = 0; e < t.length; e++) {
192
200
  const n = t[e];
193
201
  if (n.role === "assistant" && n.tool_calls) {
194
- const s = n.tool_calls.map((l) => ({
202
+ const i = n.tool_calls.map((o) => ({
195
203
  role: "tool",
196
- id: l.id,
197
- name: l.function.name,
198
- args: w(l.function.arguments, {})
204
+ id: o.id,
205
+ name: o.function.name,
206
+ args: w(o.function.arguments, {})
199
207
  }));
200
- t.splice(e, 1, ...s), e += s.length - 1;
208
+ t.splice(e, 1, ...i), e += i.length - 1;
201
209
  } else if (n.role === "tool" && n.content) {
202
- const s = t.find((l) => n.tool_call_id == l.id);
203
- s && (n.content.includes('"error":') ? s.error = n.content : s.content = n.content), t.splice(e, 1), e--;
210
+ const i = t.find((o) => n.tool_call_id == o.id);
211
+ i && (n.content.includes('"error":') ? i.error = n.content : i.content = n.content), t.splice(e, 1), e--;
204
212
  }
205
213
  }
206
214
  return t;
@@ -219,61 +227,61 @@ class I extends k {
219
227
  }) : e.push(n), e), []);
220
228
  }
221
229
  ask(t, e = {}) {
222
- const n = new AbortController(), s = new Promise(async (l, u) => {
223
- let i = this.fromStandard([...e.history || [], { role: "user", content: t }]);
224
- e.compress && (i = await this.ai.llm.compress(i, e.compress.max, e.compress.min, e));
230
+ const n = new AbortController(), i = new Promise(async (o, p) => {
231
+ let c = this.fromStandard([...e.history || [], { role: "user", content: t }]);
232
+ e.compress && (c = await this.ai.llm.compress(c, e.compress.max, e.compress.min, e));
225
233
  const m = {
226
234
  model: e.model || this.model,
227
- messages: i,
235
+ messages: c,
228
236
  stream: !!e.stream,
229
237
  max_tokens: e.max_tokens || this.ai.options.max_tokens || 4096,
230
238
  temperature: e.temperature || this.ai.options.temperature || 0.7,
231
- tools: (e.tools || this.ai.options.tools || []).map((o) => ({
239
+ tools: (e.tools || this.ai.options.tools || []).map((s) => ({
232
240
  type: "function",
233
241
  function: {
234
- name: o.name,
235
- description: o.description,
242
+ name: s.name,
243
+ description: s.description,
236
244
  parameters: {
237
245
  type: "object",
238
- properties: o.args ? y(o.args, (r, a) => ({ ...a, required: void 0 })) : {},
239
- required: o.args ? Object.entries(o.args).filter((r) => r[1].required).map((r) => r[0]) : []
246
+ properties: s.args ? _(s.args, (a, r) => ({ ...r, required: void 0 })) : {},
247
+ required: s.args ? Object.entries(s.args).filter((a) => a[1].required).map((a) => a[0]) : []
240
248
  }
241
249
  }
242
250
  }))
243
251
  };
244
- let c;
252
+ let l;
245
253
  do {
246
- if (c = await this.client.chat.completions.create(m), e.stream) {
247
- c.choices = [];
248
- for await (const r of c) {
254
+ if (l = await this.client.chat.completions.create(m), e.stream) {
255
+ l.choices = [];
256
+ for await (const a of l) {
249
257
  if (n.signal.aborted) break;
250
- r.choices[0].delta.content && e.stream({ text: r.choices[0].delta.content });
258
+ a.choices[0].delta.content && e.stream({ text: a.choices[0].delta.content });
251
259
  }
252
260
  }
253
- const o = c.choices[0].message.tool_calls || [];
254
- if (o.length && !n.signal.aborted) {
255
- i.push(c.choices[0].message);
256
- const r = await Promise.all(o.map(async (a) => {
257
- const p = e.tools?.find(b("name", a.function.name));
258
- if (!p) return { role: "tool", tool_call_id: a.id, content: '{"error": "Tool not found"}' };
261
+ const s = l.choices[0].message.tool_calls || [];
262
+ if (s.length && !n.signal.aborted) {
263
+ c.push(l.choices[0].message);
264
+ const a = await Promise.all(s.map(async (r) => {
265
+ const d = e.tools?.find(b("name", r.function.name));
266
+ if (!d) return { role: "tool", tool_call_id: r.id, content: '{"error": "Tool not found"}' };
259
267
  try {
260
- const f = w(a.function.arguments, {}), h = await p.fn(f, this.ai);
261
- return { role: "tool", tool_call_id: a.id, content: _(h) };
268
+ const f = w(r.function.arguments, {}), h = await d.fn(f, this.ai);
269
+ return { role: "tool", tool_call_id: r.id, content: y(h) };
262
270
  } catch (f) {
263
- return { role: "tool", tool_call_id: a.id, content: _({ error: f?.message || f?.toString() || "Unknown" }) };
271
+ return { role: "tool", tool_call_id: r.id, content: y({ error: f?.message || f?.toString() || "Unknown" }) };
264
272
  }
265
273
  }));
266
- i.push(...r), m.messages = i;
274
+ c.push(...a), m.messages = c;
267
275
  }
268
- } while (!n.signal.aborted && c.choices?.[0]?.message?.tool_calls?.length);
269
- e.stream && e.stream({ done: !0 }), l(this.toStandard([...i, { role: "assistant", content: c.choices[0].message.content || "" }]));
276
+ } while (!n.signal.aborted && l.choices?.[0]?.message?.tool_calls?.length);
277
+ e.stream && e.stream({ done: !0 }), o(this.toStandard([...c, { role: "assistant", content: l.choices[0].message.content || "" }]));
270
278
  });
271
- return Object.assign(s, { abort: () => n.abort() });
279
+ return Object.assign(i, { abort: () => n.abort() });
272
280
  }
273
281
  }
274
- class J {
282
+ class W {
275
283
  constructor(t, e) {
276
- this.ai = t, this.options = e, e.anthropic?.token && (this.providers.anthropic = new L(this.ai, e.anthropic.token, e.anthropic.model)), e.ollama?.host && (this.providers.ollama = new R(this.ai, e.ollama.host, e.ollama.model)), e.openAi?.token && (this.providers.openAi = new I(this.ai, e.openAi.token, e.openAi.model));
284
+ this.ai = t, this.options = e, e.anthropic?.token && (this.providers.anthropic = new L(this.ai, e.anthropic.token, e.anthropic.model)), e.ollama?.host && (this.providers.ollama = new R(this.ai, e.ollama.host, e.ollama.model)), e.openAi?.token && (this.providers.openAi = new J(this.ai, e.openAi.token, e.openAi.model));
277
285
  }
278
286
  providers = {};
279
287
  /**
@@ -295,17 +303,17 @@ class J {
295
303
  * @param {LLMRequest} options LLM options
296
304
  * @returns {Promise<LLMMessage[]>} New chat history will summary at index 0
297
305
  */
298
- async compress(t, e, n, s) {
306
+ async compress(t, e, n, i) {
299
307
  if (this.estimateTokens(t) < e) return t;
300
- let l = 0, u = 0;
301
- for (let o of t.toReversed())
302
- if (u += this.estimateTokens(o.content), u < n) l++;
308
+ let o = 0, p = 0;
309
+ for (let s of t.toReversed())
310
+ if (p += this.estimateTokens(s.content), p < n) o++;
303
311
  else break;
304
- if (t.length <= l) return t;
305
- const i = l == 0 ? [] : t.slice(-l), m = (l == 0 ? t : t.slice(0, -l)).filter((o) => o.role === "assistant" || o.role === "user");
306
- return [{ role: "assistant", content: `Conversation Summary: ${await this.summarize(m.map((o) => `${o.role}: ${o.content}`).join(`
312
+ if (t.length <= o) return t;
313
+ const c = o == 0 ? [] : t.slice(-o), m = (o == 0 ? t : t.slice(0, -o)).filter((s) => s.role === "assistant" || s.role === "user");
314
+ return [{ role: "assistant", content: `Conversation Summary: ${await this.summarize(m.map((s) => `${s.role}: ${s.content}`).join(`
307
315
 
308
- `), 250, s)}` }, ...i];
316
+ `), 250, i)}` }, ...c];
309
317
  }
310
318
  /**
311
319
  * Estimate variable as tokens
@@ -337,12 +345,12 @@ class J {
337
345
  * @returns {Promise<string>} Summary
338
346
  */
339
347
  summarize(t, e, n) {
340
- return this.ask(t, { system: `Generate a brief summary <= ${e} tokens. Output nothing else`, temperature: 0.3, ...n }).then((s) => s.pop()?.content || null);
348
+ return this.ask(t, { system: `Generate a brief summary <= ${e} tokens. Output nothing else`, temperature: 0.3, ...n }).then((i) => i.pop()?.content || null);
341
349
  }
342
350
  }
343
351
  class X {
344
352
  constructor(t) {
345
- this.options = t, this.llm = new J(this, t), this.options.whisper?.binary && (this.whisperModel = this.options.whisper?.model.endsWith(".bin") ? this.options.whisper?.model : this.options.whisper?.model + ".bin", console.log("constructor: " + this.options.whisper.model + " -> " + this.whisperModel), this.downloadAsrModel());
353
+ this.options = t, this.llm = new W(this, t), this.options.whisper?.binary && (this.whisperModel = this.options.whisper?.model.endsWith(".bin") ? this.options.whisper?.model : this.options.whisper?.model + ".bin", this.downloadAsrModel());
346
354
  }
347
355
  downloads = {};
348
356
  whisperModel;
@@ -358,12 +366,12 @@ class X {
358
366
  if (!this.options.whisper?.binary) throw new Error("Whisper not configured");
359
367
  let n = () => {
360
368
  };
361
- return { response: new Promise((l, u) => {
362
- this.downloadAsrModel(e).then((i) => {
369
+ return { response: new Promise((o, p) => {
370
+ this.downloadAsrModel(e).then((c) => {
363
371
  let m = "";
364
- const c = M(this.options.whisper?.binary, ["-nt", "-np", "-m", i, "-f", t], { stdio: ["ignore", "pipe", "ignore"] });
365
- n = () => c.kill("SIGTERM"), c.on("error", (o) => u(o)), c.stdout.on("data", (o) => m += o.toString()), c.on("close", (o) => {
366
- o === 0 ? l(m.trim() || null) : u(new Error(`Exit code ${o}`));
372
+ const l = M(this.options.whisper?.binary, ["-nt", "-np", "-m", c, "-f", t], { stdio: ["ignore", "pipe", "ignore"] });
373
+ n = () => l.kill("SIGTERM"), l.on("error", (s) => p(s)), l.stdout.on("data", (s) => m += s.toString()), l.on("close", (s) => {
374
+ s === 0 ? o(m.trim() || null) : p(new Error(`Exit code ${s}`));
367
375
  });
368
376
  });
369
377
  }), abort: n };
@@ -378,7 +386,7 @@ class X {
378
386
  if (!this.options.whisper?.binary) throw new Error("Whisper not configured");
379
387
  t.endsWith(".bin") || (t += ".bin");
380
388
  const e = O.join(this.options.whisper.path, t);
381
- return console.log("Download: " + e), await x.stat(e).then(() => !0).catch(() => !1) ? (console.log("Exists!"), e) : this.downloads[t] ? this.downloads[t] : (this.downloads[t] = fetch(`https://huggingface.co/ggerganov/whisper.cpp/resolve/main/${t}`).then((n) => n.arrayBuffer()).then((n) => Buffer.from(n)).then(async (n) => (await x.writeFile(e, n), delete this.downloads[t], e)), this.downloads[t]);
389
+ return await x.stat(e).then(() => !0).catch(() => !1) ? e : this.downloads[t] ? this.downloads[t] : (this.downloads[t] = fetch(`https://huggingface.co/ggerganov/whisper.cpp/resolve/main/${t}`).then((n) => n.arrayBuffer()).then((n) => Buffer.from(n)).then(async (n) => (await x.writeFile(e, n), delete this.downloads[t], e)), this.downloads[t]);
382
390
  }
383
391
  /**
384
392
  * Convert image to text using Optical Character Recognition
@@ -393,8 +401,8 @@ class X {
393
401
  },
394
402
  response: new Promise(async (n) => {
395
403
  e = await S("eng");
396
- const { data: s } = await e.recognize(t);
397
- await e.terminate(), n(s.text.trim() || null);
404
+ const { data: i } = await e.recognize(t);
405
+ await e.terminate(), n(i.text.trim() || null);
398
406
  })
399
407
  };
400
408
  }
@@ -406,19 +414,19 @@ class X {
406
414
  */
407
415
  semanticSimilarity(t, ...e) {
408
416
  if (e.length < 2) throw new Error("Requires at least 2 strings to compare");
409
- const n = (i, m = 10) => i.toLowerCase().split("").map((c, o) => c.charCodeAt(0) * (o + 1) % m / m).slice(0, m), s = (i, m) => {
410
- if (i.length !== m.length) throw new Error("Vectors must be same length");
411
- const c = g.tensor1d(i), o = g.tensor1d(m), r = g.dot(c, o), a = g.norm(c), p = g.norm(o);
412
- return a.dataSync()[0] === 0 || p.dataSync()[0] === 0 ? 0 : r.dataSync()[0] / (a.dataSync()[0] * p.dataSync()[0]);
413
- }, l = n(t), u = e.map((i) => n(i)).map((i) => s(l, i));
414
- return { avg: u.reduce((i, m) => i + m, 0) / u.length, max: Math.max(...u), similarities: u };
417
+ const n = (c, m = 10) => c.toLowerCase().split("").map((l, s) => l.charCodeAt(0) * (s + 1) % m / m).slice(0, m), i = (c, m) => {
418
+ if (c.length !== m.length) throw new Error("Vectors must be same length");
419
+ const l = g.tensor1d(c), s = g.tensor1d(m), a = g.dot(l, s), r = g.norm(l), d = g.norm(s);
420
+ return r.dataSync()[0] === 0 || d.dataSync()[0] === 0 ? 0 : a.dataSync()[0] / (r.dataSync()[0] * d.dataSync()[0]);
421
+ }, o = n(t), p = e.map((c) => n(c)).map((c) => i(o, c));
422
+ return { avg: p.reduce((c, m) => c + m, 0) / p.length, max: Math.max(...p), similarities: p };
415
423
  }
416
424
  }
417
- const W = {
425
+ const I = {
418
426
  name: "cli",
419
427
  description: "Use the command line interface, returns any output",
420
428
  args: { command: { type: "string", description: "Command to run", required: !0 } },
421
- fn: (d) => U`${d.command}`
429
+ fn: (u) => U`${u.command}`
422
430
  }, Y = {
423
431
  name: "get_datetime",
424
432
  description: "Get current date and time",
@@ -431,15 +439,15 @@ const W = {
431
439
  language: { type: "string", description: "Execution language", enum: ["cli", "node", "python"], required: !0 },
432
440
  code: { type: "string", description: "Code to execute", required: !0 }
433
441
  },
434
- fn: async (d, t) => {
442
+ fn: async (u, t) => {
435
443
  try {
436
- switch (d.type) {
444
+ switch (u.type) {
437
445
  case "bash":
438
- return await W.fn({ command: d.code }, t);
446
+ return await I.fn({ command: u.code }, t);
439
447
  case "node":
440
- return await N.fn({ code: d.code }, t);
448
+ return await N.fn({ code: u.code }, t);
441
449
  case "python":
442
- return await z.fn({ code: d.code }, t);
450
+ return await z.fn({ code: u.code }, t);
443
451
  }
444
452
  } catch (e) {
445
453
  return { error: e?.message || e.toString() };
@@ -454,15 +462,15 @@ const W = {
454
462
  headers: { type: "object", description: "HTTP headers to send", default: {} },
455
463
  body: { type: "object", description: "HTTP body to send" }
456
464
  },
457
- fn: (d) => new j({ url: d.url, headers: d.headers }).request({ method: d.method || "GET", body: d.body })
465
+ fn: (u) => new j({ url: u.url, headers: u.headers }).request({ method: u.method || "GET", body: u.body })
458
466
  }, N = {
459
467
  name: "exec_javascript",
460
468
  description: "Execute commonjs javascript",
461
469
  args: {
462
470
  code: { type: "string", description: "CommonJS javascript", required: !0 }
463
471
  },
464
- fn: async (d) => {
465
- const t = T(null), e = await q({ console: t }, d.code, !0).catch((n) => t.output.error.push(n));
472
+ fn: async (u) => {
473
+ const t = T(null), e = await q({ console: t }, u.code, !0).catch((n) => t.output.error.push(n));
466
474
  return { ...t.output, return: e, stdout: void 0, stderr: void 0 };
467
475
  }
468
476
  }, z = {
@@ -471,7 +479,7 @@ const W = {
471
479
  args: {
472
480
  code: { type: "string", description: "CommonJS javascript", required: !0 }
473
481
  },
474
- fn: async (d) => ({ result: $`python -c "${d.code}"` })
482
+ fn: async (u) => ({ result: $`python -c "${u.code}"` })
475
483
  }, te = {
476
484
  name: "search",
477
485
  description: "Use a search engine to find relevant URLs, should be changed with fetch to scrape sources",
@@ -479,28 +487,28 @@ const W = {
479
487
  query: { type: "string", description: "Search string", required: !0 },
480
488
  length: { type: "string", description: "Number of results to return", default: 5 }
481
489
  },
482
- fn: async (d) => {
483
- const t = await fetch(`https://html.duckduckgo.com/html/?q=${encodeURIComponent(d.query)}`, {
490
+ fn: async (u) => {
491
+ const t = await fetch(`https://html.duckduckgo.com/html/?q=${encodeURIComponent(u.query)}`, {
484
492
  headers: { "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64)", "Accept-Language": "en-US,en;q=0.9" }
485
- }).then((l) => l.text());
493
+ }).then((o) => o.text());
486
494
  let e, n = /<a .*?href="(.+?)".+?<\/a>/g;
487
- const s = new v();
495
+ const i = new v();
488
496
  for (; (e = n.exec(t)) !== null; ) {
489
- let l = /uddg=(.+)&amp?/.exec(decodeURIComponent(e[1]))?.[1];
490
- if (l && (l = decodeURIComponent(l)), l && s.add(l), s.size >= (d.length || 5)) break;
497
+ let o = /uddg=(.+)&amp?/.exec(decodeURIComponent(e[1]))?.[1];
498
+ if (o && (o = decodeURIComponent(o)), o && i.add(o), i.size >= (u.length || 5)) break;
491
499
  }
492
- return s;
500
+ return i;
493
501
  }
494
502
  };
495
503
  export {
496
504
  X as Ai,
497
505
  L as Anthropic,
498
- W as CliTool,
506
+ I as CliTool,
499
507
  Y as DateTimeTool,
500
508
  Z as ExecTool,
501
509
  ee as FetchTool,
502
510
  N as JSTool,
503
- J as LLM,
511
+ W as LLM,
504
512
  z as PythonTool,
505
513
  te as SearchTool
506
514
  };