@ztimson/ai-utils 0.2.0 → 0.2.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -1,17 +1,16 @@
1
- import { pipeline as T } from "@xenova/transformers";
2
- import { deepCopy as j, objectMap as b, JSONAttemptParse as _, findByProp as k, JSONSanitize as y, Http as M, consoleInterceptor as A, fn as q, ASet as v } from "@ztimson/utils";
1
+ import { pipeline as S } from "@xenova/transformers";
2
+ import { deepCopy as T, objectMap as y, JSONAttemptParse as w, findByProp as b, JSONSanitize as _, Http as j, consoleInterceptor as M, fn as q, ASet as A } from "@ztimson/utils";
3
3
  import { Anthropic as P } from "@anthropic-ai/sdk";
4
4
  import { Ollama as E } from "ollama";
5
5
  import { OpenAI as $ } from "openai";
6
- import * as w from "@tensorflow/tfjs";
7
- import { spawn as O } from "node:child_process";
8
- import S from "node:fs/promises";
9
- import U from "node:path";
10
- import { createWorker as L } from "tesseract.js";
11
- import { $ as R, $Sync as D } from "@ztimson/node-utils";
12
- class x {
6
+ import { spawn as v } from "node:child_process";
7
+ import x from "node:fs/promises";
8
+ import O from "node:path";
9
+ import { createWorker as U } from "tesseract.js";
10
+ import { $ as L, $Sync as R } from "@ztimson/node-utils";
11
+ class k {
13
12
  }
14
- class z extends x {
13
+ class D extends k {
15
14
  constructor(t, e, n) {
16
15
  super(), this.ai = t, this.apiToken = e, this.model = n, this.client = new P({ apiKey: e });
17
16
  }
@@ -46,9 +45,9 @@ class z extends x {
46
45
  ask(t, e = {}) {
47
46
  const n = new AbortController(), a = new Promise(async (c, f) => {
48
47
  let i = this.fromStandard([...e.history || [], { role: "user", content: t, timestamp: Date.now() }]);
49
- const l = j(i);
48
+ const p = T(i);
50
49
  e.compress && (i = await this.ai.language.compressHistory(i, e.compress.max, e.compress.min, e));
51
- const m = {
50
+ const l = {
52
51
  model: e.model || this.model,
53
52
  max_tokens: e.max_tokens || this.ai.options.max_tokens || 4096,
54
53
  system: e.system || this.ai.options.system || "",
@@ -58,7 +57,7 @@ class z extends x {
58
57
  description: s.description,
59
58
  input_schema: {
60
59
  type: "object",
61
- properties: s.args ? b(s.args, (r, u) => ({ ...u, required: void 0 })) : {},
60
+ properties: s.args ? y(s.args, (r, d) => ({ ...d, required: void 0 })) : {},
62
61
  required: s.args ? Object.entries(s.args).filter((r) => r[1].required).map((r) => r[0]) : []
63
62
  },
64
63
  fn: void 0
@@ -66,10 +65,10 @@ class z extends x {
66
65
  messages: i,
67
66
  stream: !!e.stream
68
67
  };
69
- let o, d = !0;
68
+ let o, m = !0;
70
69
  do {
71
- if (o = await this.client.messages.create(m), e.stream) {
72
- d ? d = !1 : e.stream({ text: `
70
+ if (o = await this.client.messages.create(l), e.stream) {
71
+ m ? m = !1 : e.stream({ text: `
73
72
 
74
73
  ` }), o.content = [];
75
74
  for await (const r of o) {
@@ -78,30 +77,30 @@ class z extends x {
78
77
  r.content_block.type === "text" ? o.content.push({ type: "text", text: "" }) : r.content_block.type === "tool_use" && o.content.push({ type: "tool_use", id: r.content_block.id, name: r.content_block.name, input: "" });
79
78
  else if (r.type === "content_block_delta")
80
79
  if (r.delta.type === "text_delta") {
81
- const u = r.delta.text;
82
- o.content.at(-1).text += u, e.stream({ text: u });
80
+ const d = r.delta.text;
81
+ o.content.at(-1).text += d, e.stream({ text: d });
83
82
  } else r.delta.type === "input_json_delta" && (o.content.at(-1).input += r.delta.partial_json);
84
83
  else if (r.type === "content_block_stop") {
85
- const u = o.content.at(-1);
86
- u.input != null && (u.input = u.input ? _(u.input, {}) : {});
84
+ const d = o.content.at(-1);
85
+ d.input != null && (d.input = d.input ? w(d.input, {}) : {});
87
86
  } else if (r.type === "message_stop")
88
87
  break;
89
88
  }
90
89
  }
91
90
  const s = o.content.filter((r) => r.type === "tool_use");
92
91
  if (s.length && !n.signal.aborted) {
93
- i.push({ role: "assistant", content: o.content }), l.push({ role: "assistant", content: o.content });
94
- const r = await Promise.all(s.map(async (u) => {
95
- const h = e.tools?.find(k("name", u.name));
96
- if (!h) return { tool_use_id: u.id, is_error: !0, content: "Tool not found" };
92
+ i.push({ role: "assistant", content: o.content }), p.push({ role: "assistant", content: o.content });
93
+ const r = await Promise.all(s.map(async (d) => {
94
+ const h = e.tools?.find(b("name", d.name));
95
+ if (!h) return { tool_use_id: d.id, is_error: !0, content: "Tool not found" };
97
96
  try {
98
- const g = await h.fn(u.input, this.ai);
99
- return { type: "tool_result", tool_use_id: u.id, content: y(g) };
97
+ const g = await h.fn(d.input, this.ai);
98
+ return { type: "tool_result", tool_use_id: d.id, content: _(g) };
100
99
  } catch (g) {
101
- return { type: "tool_result", tool_use_id: u.id, is_error: !0, content: g?.message || g?.toString() || "Unknown" };
100
+ return { type: "tool_result", tool_use_id: d.id, is_error: !0, content: g?.message || g?.toString() || "Unknown" };
102
101
  }
103
102
  }));
104
- i.push({ role: "user", content: r }), m.messages = i;
103
+ i.push({ role: "user", content: r }), l.messages = i;
105
104
  }
106
105
  } while (!n.signal.aborted && o.content.some((s) => s.type === "tool_use"));
107
106
  e.stream && e.stream({ done: !0 }), c(this.toStandard([...i, { role: "assistant", content: o.content.filter((s) => s.type == "text").map((s) => s.text).join(`
@@ -111,7 +110,7 @@ class z extends x {
111
110
  return Object.assign(a, { abort: () => n.abort() });
112
111
  }
113
112
  }
114
- class H extends x {
113
+ class z extends k {
115
114
  constructor(t, e, n) {
116
115
  super(), this.ai = t, this.host = e, this.model = n, this.client = new E({ host: e });
117
116
  }
@@ -136,11 +135,11 @@ class H extends x {
136
135
  }
137
136
  ask(t, e = {}) {
138
137
  const n = new AbortController(), a = new Promise(async (c, f) => {
139
- let i = e.system || this.ai.options.system, l = this.fromStandard([...e.history || [], { role: "user", content: t, timestamp: Date.now() }]);
140
- l[0].roll == "system" && (i ? l.shift() : i = l.shift()), e.compress && (l = await this.ai.language.compressHistory(l, e.compress.max, e.compress.min)), e.system && l.unshift({ role: "system", content: i });
141
- const m = {
138
+ let i = e.system || this.ai.options.system, p = this.fromStandard([...e.history || [], { role: "user", content: t, timestamp: Date.now() }]);
139
+ p[0].roll == "system" && (i ? p.shift() : i = p.shift()), e.compress && (p = await this.ai.language.compressHistory(p, e.compress.max, e.compress.min)), e.system && p.unshift({ role: "system", content: i });
140
+ const l = {
142
141
  model: e.model || this.model,
143
- messages: l,
142
+ messages: p,
144
143
  stream: !!e.stream,
145
144
  signal: n.signal,
146
145
  options: {
@@ -154,43 +153,43 @@ class H extends x {
154
153
  description: s.description,
155
154
  parameters: {
156
155
  type: "object",
157
- properties: s.args ? b(s.args, (r, u) => ({ ...u, required: void 0 })) : {},
156
+ properties: s.args ? y(s.args, (r, d) => ({ ...d, required: void 0 })) : {},
158
157
  required: s.args ? Object.entries(s.args).filter((r) => r[1].required).map((r) => r[0]) : []
159
158
  }
160
159
  }
161
160
  }))
162
161
  };
163
- let o, d = !0;
162
+ let o, m = !0;
164
163
  do {
165
- if (o = await this.client.chat(m), e.stream) {
166
- d ? d = !1 : e.stream({ text: `
164
+ if (o = await this.client.chat(l), e.stream) {
165
+ m ? m = !1 : e.stream({ text: `
167
166
 
168
167
  ` }), o.message = { role: "assistant", content: "", tool_calls: [] };
169
168
  for await (const s of o)
170
169
  if (n.signal.aborted || (s.message?.content && (o.message.content += s.message.content, e.stream({ text: s.message.content })), s.message?.tool_calls && (o.message.tool_calls = s.message.tool_calls), s.done)) break;
171
170
  }
172
171
  if (o.message?.tool_calls?.length && !n.signal.aborted) {
173
- l.push(o.message);
172
+ p.push(o.message);
174
173
  const s = await Promise.all(o.message.tool_calls.map(async (r) => {
175
- const u = (e.tools || this.ai.options.tools)?.find(k("name", r.function.name));
176
- if (!u) return { role: "tool", tool_name: r.function.name, content: '{"error": "Tool not found"}' };
177
- const h = typeof r.function.arguments == "string" ? _(r.function.arguments, {}) : r.function.arguments;
174
+ const d = (e.tools || this.ai.options.tools)?.find(b("name", r.function.name));
175
+ if (!d) return { role: "tool", tool_name: r.function.name, content: '{"error": "Tool not found"}' };
176
+ const h = typeof r.function.arguments == "string" ? w(r.function.arguments, {}) : r.function.arguments;
178
177
  try {
179
- const g = await u.fn(h, this.ai);
180
- return { role: "tool", tool_name: r.function.name, args: h, content: y(g) };
178
+ const g = await d.fn(h, this.ai);
179
+ return { role: "tool", tool_name: r.function.name, args: h, content: _(g) };
181
180
  } catch (g) {
182
- return { role: "tool", tool_name: r.function.name, args: h, content: y({ error: g?.message || g?.toString() || "Unknown" }) };
181
+ return { role: "tool", tool_name: r.function.name, args: h, content: _({ error: g?.message || g?.toString() || "Unknown" }) };
183
182
  }
184
183
  }));
185
- l.push(...s), m.messages = l;
184
+ p.push(...s), l.messages = p;
186
185
  }
187
186
  } while (!n.signal.aborted && o.message?.tool_calls?.length);
188
- e.stream && e.stream({ done: !0 }), c(this.toStandard([...l, { role: "assistant", content: o.message?.content }]));
187
+ e.stream && e.stream({ done: !0 }), c(this.toStandard([...p, { role: "assistant", content: o.message?.content }]));
189
188
  });
190
189
  return Object.assign(a, { abort: () => n.abort() });
191
190
  }
192
191
  }
193
- class N extends x {
192
+ class H extends k {
194
193
  constructor(t, e, n) {
195
194
  super(), this.ai = t, this.apiToken = e, this.model = n, this.client = new $({ apiKey: e });
196
195
  }
@@ -203,7 +202,7 @@ class N extends x {
203
202
  role: "tool",
204
203
  id: c.id,
205
204
  name: c.function.name,
206
- args: _(c.function.arguments, {}),
205
+ args: w(c.function.arguments, {}),
207
206
  timestamp: n.timestamp
208
207
  }));
209
208
  t.splice(e, 1, ...a), e += a.length - 1;
@@ -240,60 +239,60 @@ class N extends x {
240
239
  const n = new AbortController(), a = new Promise(async (c, f) => {
241
240
  let i = this.fromStandard([...e.history || [], { role: "user", content: t, timestamp: Date.now() }]);
242
241
  e.compress && (i = await this.ai.language.compressHistory(i, e.compress.max, e.compress.min, e));
243
- const l = {
242
+ const p = {
244
243
  model: e.model || this.model,
245
244
  messages: i,
246
245
  stream: !!e.stream,
247
246
  max_tokens: e.max_tokens || this.ai.options.max_tokens || 4096,
248
247
  temperature: e.temperature || this.ai.options.temperature || 0.7,
249
- tools: (e.tools || this.ai.options.tools || []).map((d) => ({
248
+ tools: (e.tools || this.ai.options.tools || []).map((m) => ({
250
249
  type: "function",
251
250
  function: {
252
- name: d.name,
253
- description: d.description,
251
+ name: m.name,
252
+ description: m.description,
254
253
  parameters: {
255
254
  type: "object",
256
- properties: d.args ? b(d.args, (s, r) => ({ ...r, required: void 0 })) : {},
257
- required: d.args ? Object.entries(d.args).filter((s) => s[1].required).map((s) => s[0]) : []
255
+ properties: m.args ? y(m.args, (s, r) => ({ ...r, required: void 0 })) : {},
256
+ required: m.args ? Object.entries(m.args).filter((s) => s[1].required).map((s) => s[0]) : []
258
257
  }
259
258
  }
260
259
  }))
261
260
  };
262
- let m, o = !0;
261
+ let l, o = !0;
263
262
  do {
264
- if (m = await this.client.chat.completions.create(l), e.stream) {
263
+ if (l = await this.client.chat.completions.create(p), e.stream) {
265
264
  o ? o = !1 : e.stream({ text: `
266
265
 
267
- ` }), m.choices = [{ message: { content: "", tool_calls: [] } }];
268
- for await (const s of m) {
266
+ ` }), l.choices = [{ message: { content: "", tool_calls: [] } }];
267
+ for await (const s of l) {
269
268
  if (n.signal.aborted) break;
270
- s.choices[0].delta.content && (m.choices[0].message.content += s.choices[0].delta.content, e.stream({ text: s.choices[0].delta.content })), s.choices[0].delta.tool_calls && (m.choices[0].message.tool_calls = s.choices[0].delta.tool_calls);
269
+ s.choices[0].delta.content && (l.choices[0].message.content += s.choices[0].delta.content, e.stream({ text: s.choices[0].delta.content })), s.choices[0].delta.tool_calls && (l.choices[0].message.tool_calls = s.choices[0].delta.tool_calls);
271
270
  }
272
271
  }
273
- const d = m.choices[0].message.tool_calls || [];
274
- if (d.length && !n.signal.aborted) {
275
- i.push(m.choices[0].message);
276
- const s = await Promise.all(d.map(async (r) => {
277
- const u = e.tools?.find(k("name", r.function.name));
278
- if (!u) return { role: "tool", tool_call_id: r.id, content: '{"error": "Tool not found"}' };
272
+ const m = l.choices[0].message.tool_calls || [];
273
+ if (m.length && !n.signal.aborted) {
274
+ i.push(l.choices[0].message);
275
+ const s = await Promise.all(m.map(async (r) => {
276
+ const d = e.tools?.find(b("name", r.function.name));
277
+ if (!d) return { role: "tool", tool_call_id: r.id, content: '{"error": "Tool not found"}' };
279
278
  try {
280
- const h = _(r.function.arguments, {}), g = await u.fn(h, this.ai);
281
- return { role: "tool", tool_call_id: r.id, content: y(g) };
279
+ const h = w(r.function.arguments, {}), g = await d.fn(h, this.ai);
280
+ return { role: "tool", tool_call_id: r.id, content: _(g) };
282
281
  } catch (h) {
283
- return { role: "tool", tool_call_id: r.id, content: y({ error: h?.message || h?.toString() || "Unknown" }) };
282
+ return { role: "tool", tool_call_id: r.id, content: _({ error: h?.message || h?.toString() || "Unknown" }) };
284
283
  }
285
284
  }));
286
- i.push(...s), l.messages = i;
285
+ i.push(...s), p.messages = i;
287
286
  }
288
- } while (!n.signal.aborted && m.choices?.[0]?.message?.tool_calls?.length);
289
- e.stream && e.stream({ done: !0 }), c(this.toStandard([...i, { role: "assistant", content: m.choices[0].message.content || "" }]));
287
+ } while (!n.signal.aborted && l.choices?.[0]?.message?.tool_calls?.length);
288
+ e.stream && e.stream({ done: !0 }), c(this.toStandard([...i, { role: "assistant", content: l.choices[0].message.content || "" }]));
290
289
  });
291
290
  return Object.assign(a, { abort: () => n.abort() });
292
291
  }
293
292
  }
294
- class I {
293
+ class N {
295
294
  constructor(t) {
296
- this.ai = t, this.embedModel = T("feature-extraction", "Xenova/all-MiniLM-L6-v2"), t.options.anthropic?.token && (this.providers.anthropic = new z(this.ai, t.options.anthropic.token, t.options.anthropic.model)), t.options.ollama?.host && (this.providers.ollama = new H(this.ai, t.options.ollama.host, t.options.ollama.model)), t.options.openAi?.token && (this.providers.openAi = new N(this.ai, t.options.openAi.token, t.options.openAi.model));
295
+ this.ai = t, this.embedModel = S("feature-extraction", "Xenova/all-MiniLM-L6-v2"), t.options.anthropic?.token && (this.providers.anthropic = new D(this.ai, t.options.anthropic.token, t.options.anthropic.model)), t.options.ollama?.host && (this.providers.ollama = new z(this.ai, t.options.ollama.host, t.options.ollama.model)), t.options.openAi?.token && (this.providers.openAi = new H(this.ai, t.options.openAi.token, t.options.openAi.model));
297
296
  }
298
297
  embedModel;
299
298
  providers = {};
@@ -323,38 +322,46 @@ class I {
323
322
  if (f += this.estimateTokens(o.content), f < n) c++;
324
323
  else break;
325
324
  if (t.length <= c) return t;
326
- const i = c == 0 ? [] : t.slice(-c), l = (c == 0 ? t : t.slice(0, -c)).filter((o) => o.role === "assistant" || o.role === "user");
327
- return [{ role: "assistant", content: `Conversation Summary: ${await this.summarize(l.map((o) => `${o.role}: ${o.content}`).join(`
325
+ const i = c == 0 ? [] : t.slice(-c), p = (c == 0 ? t : t.slice(0, -c)).filter((o) => o.role === "assistant" || o.role === "user");
326
+ return [{ role: "assistant", content: `Conversation Summary: ${await this.summarize(p.map((o) => `${o.role}: ${o.content}`).join(`
328
327
 
329
328
  `), 250, a)}`, timestamp: Date.now() }, ...i];
330
329
  }
330
+ cosineSimilarity(t, e) {
331
+ if (t.length !== e.length) throw new Error("Vectors must be same length");
332
+ let n = 0, a = 0, c = 0;
333
+ for (let i = 0; i < t.length; i++)
334
+ n += t[i] * e[i], a += t[i] * t[i], c += e[i] * e[i];
335
+ const f = Math.sqrt(a) * Math.sqrt(c);
336
+ return f === 0 ? 0 : n / f;
337
+ }
331
338
  embedding(t, e = 500, n = 50) {
332
- const a = (o, d = "") => o == null ? [] : Object.entries(o).flatMap(([s, r]) => {
333
- const u = d ? `${d}${isNaN(+s) ? `.${s}` : `[${s}]`}` : s;
334
- if (typeof r == "object" && r !== null && !Array.isArray(r)) return a(r, u);
339
+ const a = (o, m = "") => o == null ? [] : Object.entries(o).flatMap(([s, r]) => {
340
+ const d = m ? `${m}${isNaN(+s) ? `.${s}` : `[${s}]`}` : s;
341
+ if (typeof r == "object" && r !== null && !Array.isArray(r)) return a(r, d);
335
342
  const h = Array.isArray(r) ? r.join(", ") : String(r);
336
- return `${u}: ${h}`;
343
+ return `${d}: ${h}`;
337
344
  }), c = async (o) => {
338
345
  const s = await (await this.embedModel)(o, { pooling: "mean", normalize: !0 });
339
346
  return Array.from(s.data);
340
347
  }, i = (typeof t == "object" ? a(t) : t.split(`
341
- `)).flatMap((o) => [...o.split(/\s+/).filter((d) => d.trim()), `
342
- `]), l = [];
343
- let m = 0;
344
- for (; m < i.length; ) {
345
- let o = m, d = "";
348
+ `)).flatMap((o) => [...o.split(/\s+/).filter((m) => m.trim()), `
349
+ `]), p = [];
350
+ let l = 0;
351
+ for (; l < i.length; ) {
352
+ let o = l, m = "";
346
353
  for (; o < i.length; ) {
347
- const r = i[o], u = d + (d ? " " : "") + r;
348
- if (this.estimateTokens(u.replace(/\s*\n\s*/g, `
349
- `)) > e && d) break;
350
- d = u, o++;
354
+ const r = i[o], d = m + (m ? " " : "") + r;
355
+ if (this.estimateTokens(d.replace(/\s*\n\s*/g, `
356
+ `)) > e && m) break;
357
+ m = d, o++;
351
358
  }
352
- const s = d.replace(/\s*\n\s*/g, `
359
+ const s = m.replace(/\s*\n\s*/g, `
353
360
  `).trim();
354
- s && l.push(s), m = o - n, m <= o - i.length + o && (m = o);
361
+ s && p.push(s), l = o - n, l <= o - i.length + o && (l = o);
355
362
  }
356
- return Promise.all(l.map(async (o, d) => ({
357
- index: d,
363
+ return Promise.all(p.map(async (o, m) => ({
364
+ index: m,
358
365
  embedding: await c(o),
359
366
  text: o,
360
367
  tokens: this.estimateTokens(o)
@@ -377,12 +384,8 @@ class I {
377
384
  */
378
385
  fuzzyMatch(t, ...e) {
379
386
  if (e.length < 2) throw new Error("Requires at least 2 strings to compare");
380
- const n = (i, l = 10) => i.toLowerCase().split("").map((m, o) => m.charCodeAt(0) * (o + 1) % l / l).slice(0, l), a = (i, l) => {
381
- if (i.length !== l.length) throw new Error("Vectors must be same length");
382
- const m = w.tensor1d(i), o = w.tensor1d(l), d = w.dot(m, o), s = w.norm(m), r = w.norm(o);
383
- return s.dataSync()[0] === 0 || r.dataSync()[0] === 0 ? 0 : d.dataSync()[0] / (s.dataSync()[0] * r.dataSync()[0]);
384
- }, c = n(t), f = e.map((i) => n(i)).map((i) => a(c, i));
385
- return { avg: f.reduce((i, l) => i + l, 0) / f.length, max: Math.max(...f), similarities: f };
387
+ const n = (f, i = 10) => f.toLowerCase().split("").map((p, l) => p.charCodeAt(0) * (l + 1) % i / i).slice(0, i), a = n(t), c = e.map((f) => n(f)).map((f) => this.cosineSimilarity(a, f));
388
+ return { avg: c.reduce((f, i) => f + i, 0) / c.length, max: Math.max(...c), similarities: c };
386
389
  }
387
390
  /**
388
391
  * Ask a question with JSON response
@@ -395,7 +398,7 @@ class I {
395
398
  system: "Respond using a JSON blob",
396
399
  ...e
397
400
  });
398
- return n?.[0]?.content ? _(new RegExp("{[sS]*}").exec(n[0].content), {}) : {};
401
+ return n?.[0]?.content ? w(new RegExp("{[sS]*}").exec(n[0].content), {}) : {};
399
402
  }
400
403
  /**
401
404
  * Create a summary of some text
@@ -408,7 +411,7 @@ class I {
408
411
  return this.ask(t, { system: `Generate a brief summary <= ${e} tokens. Output nothing else`, temperature: 0.3, ...n }).then((a) => a.pop()?.content || null);
409
412
  }
410
413
  }
411
- class J {
414
+ class I {
412
415
  constructor(t) {
413
416
  this.ai = t, t.options.whisper?.binary && (this.whisperModel = t.options.whisper?.model.endsWith(".bin") ? t.options.whisper?.model : t.options.whisper?.model + ".bin", this.downloadAsrModel());
414
417
  }
@@ -426,10 +429,10 @@ class J {
426
429
  };
427
430
  return { response: new Promise((c, f) => {
428
431
  this.downloadAsrModel(e).then((i) => {
429
- let l = "";
430
- const m = O(this.ai.options.whisper?.binary, ["-nt", "-np", "-m", i, "-f", t], { stdio: ["ignore", "pipe", "ignore"] });
431
- n = () => m.kill("SIGTERM"), m.on("error", (o) => f(o)), m.stdout.on("data", (o) => l += o.toString()), m.on("close", (o) => {
432
- o === 0 ? c(l.trim() || null) : f(new Error(`Exit code ${o}`));
432
+ let p = "";
433
+ const l = v(this.ai.options.whisper?.binary, ["-nt", "-np", "-m", i, "-f", t], { stdio: ["ignore", "pipe", "ignore"] });
434
+ n = () => l.kill("SIGTERM"), l.on("error", (o) => f(o)), l.stdout.on("data", (o) => p += o.toString()), l.on("close", (o) => {
435
+ o === 0 ? c(p.trim() || null) : f(new Error(`Exit code ${o}`));
433
436
  });
434
437
  });
435
438
  }), abort: n };
@@ -443,11 +446,11 @@ class J {
443
446
  async downloadAsrModel(t = this.whisperModel) {
444
447
  if (!this.ai.options.whisper?.binary) throw new Error("Whisper not configured");
445
448
  t.endsWith(".bin") || (t += ".bin");
446
- const e = U.join(this.ai.options.whisper.path, t);
447
- return await S.stat(e).then(() => !0).catch(() => !1) ? e : this.downloads[t] ? this.downloads[t] : (this.downloads[t] = fetch(`https://huggingface.co/ggerganov/whisper.cpp/resolve/main/${t}`).then((n) => n.arrayBuffer()).then((n) => Buffer.from(n)).then(async (n) => (await S.writeFile(e, n), delete this.downloads[t], e)), this.downloads[t]);
449
+ const e = O.join(this.ai.options.whisper.path, t);
450
+ return await x.stat(e).then(() => !0).catch(() => !1) ? e : this.downloads[t] ? this.downloads[t] : (this.downloads[t] = fetch(`https://huggingface.co/ggerganov/whisper.cpp/resolve/main/${t}`).then((n) => n.arrayBuffer()).then((n) => Buffer.from(n)).then(async (n) => (await x.writeFile(e, n), delete this.downloads[t], e)), this.downloads[t]);
448
451
  }
449
452
  }
450
- class W {
453
+ class J {
451
454
  constructor(t) {
452
455
  this.ai = t;
453
456
  }
@@ -463,16 +466,16 @@ class W {
463
466
  e?.terminate();
464
467
  },
465
468
  response: new Promise(async (n) => {
466
- e = await L("eng");
469
+ e = await U("eng");
467
470
  const { data: a } = await e.recognize(t);
468
471
  await e.terminate(), n(a.text.trim() || null);
469
472
  })
470
473
  };
471
474
  }
472
475
  }
473
- class oe {
476
+ class ne {
474
477
  constructor(t) {
475
- this.options = t, this.audio = new J(this), this.language = new I(this), this.vision = new W(this);
478
+ this.options = t, this.audio = new I(this), this.language = new N(this), this.vision = new J(this);
476
479
  }
477
480
  downloads = {};
478
481
  whisperModel;
@@ -483,38 +486,38 @@ class oe {
483
486
  /** Vision processing AI */
484
487
  vision;
485
488
  }
486
- const G = {
489
+ const W = {
487
490
  name: "cli",
488
491
  description: "Use the command line interface, returns any output",
489
492
  args: { command: { type: "string", description: "Command to run", required: !0 } },
490
- fn: (p) => R`${p.command}`
491
- }, se = {
493
+ fn: (u) => L`${u.command}`
494
+ }, oe = {
492
495
  name: "get_datetime",
493
496
  description: "Get current date and time",
494
497
  args: {},
495
498
  fn: async () => (/* @__PURE__ */ new Date()).toISOString()
496
- }, re = {
499
+ }, se = {
497
500
  name: "exec",
498
501
  description: "Run code/scripts",
499
502
  args: {
500
503
  language: { type: "string", description: "Execution language", enum: ["cli", "node", "python"], required: !0 },
501
504
  code: { type: "string", description: "Code to execute", required: !0 }
502
505
  },
503
- fn: async (p, t) => {
506
+ fn: async (u, t) => {
504
507
  try {
505
- switch (p.type) {
508
+ switch (u.type) {
506
509
  case "bash":
507
- return await G.fn({ command: p.code }, t);
510
+ return await W.fn({ command: u.code }, t);
508
511
  case "node":
509
- return await F.fn({ code: p.code }, t);
512
+ return await G.fn({ code: u.code }, t);
510
513
  case "python":
511
- return await B.fn({ code: p.code }, t);
514
+ return await F.fn({ code: u.code }, t);
512
515
  }
513
516
  } catch (e) {
514
517
  return { error: e?.message || e.toString() };
515
518
  }
516
519
  }
517
- }, ae = {
520
+ }, re = {
518
521
  name: "fetch",
519
522
  description: "Make HTTP request to URL",
520
523
  args: {
@@ -523,54 +526,54 @@ const G = {
523
526
  headers: { type: "object", description: "HTTP headers to send", default: {} },
524
527
  body: { type: "object", description: "HTTP body to send" }
525
528
  },
526
- fn: (p) => new M({ url: p.url, headers: p.headers }).request({ method: p.method || "GET", body: p.body })
527
- }, F = {
529
+ fn: (u) => new j({ url: u.url, headers: u.headers }).request({ method: u.method || "GET", body: u.body })
530
+ }, G = {
528
531
  name: "exec_javascript",
529
532
  description: "Execute commonjs javascript",
530
533
  args: {
531
534
  code: { type: "string", description: "CommonJS javascript", required: !0 }
532
535
  },
533
- fn: async (p) => {
534
- const t = A(null), e = await q({ console: t }, p.code, !0).catch((n) => t.output.error.push(n));
536
+ fn: async (u) => {
537
+ const t = M(null), e = await q({ console: t }, u.code, !0).catch((n) => t.output.error.push(n));
535
538
  return { ...t.output, return: e, stdout: void 0, stderr: void 0 };
536
539
  }
537
- }, B = {
540
+ }, F = {
538
541
  name: "exec_javascript",
539
542
  description: "Execute commonjs javascript",
540
543
  args: {
541
544
  code: { type: "string", description: "CommonJS javascript", required: !0 }
542
545
  },
543
- fn: async (p) => ({ result: D`python -c "${p.code}"` })
544
- }, ie = {
546
+ fn: async (u) => ({ result: R`python -c "${u.code}"` })
547
+ }, ae = {
545
548
  name: "search",
546
549
  description: "Use a search engine to find relevant URLs, should be changed with fetch to scrape sources",
547
550
  args: {
548
551
  query: { type: "string", description: "Search string", required: !0 },
549
552
  length: { type: "string", description: "Number of results to return", default: 5 }
550
553
  },
551
- fn: async (p) => {
552
- const t = await fetch(`https://html.duckduckgo.com/html/?q=${encodeURIComponent(p.query)}`, {
554
+ fn: async (u) => {
555
+ const t = await fetch(`https://html.duckduckgo.com/html/?q=${encodeURIComponent(u.query)}`, {
553
556
  headers: { "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64)", "Accept-Language": "en-US,en;q=0.9" }
554
557
  }).then((c) => c.text());
555
558
  let e, n = /<a .*?href="(.+?)".+?<\/a>/g;
556
- const a = new v();
559
+ const a = new A();
557
560
  for (; (e = n.exec(t)) !== null; ) {
558
561
  let c = /uddg=(.+)&amp?/.exec(decodeURIComponent(e[1]))?.[1];
559
- if (c && (c = decodeURIComponent(c)), c && a.add(c), a.size >= (p.length || 5)) break;
562
+ if (c && (c = decodeURIComponent(c)), c && a.add(c), a.size >= (u.length || 5)) break;
560
563
  }
561
564
  return a;
562
565
  }
563
566
  };
564
567
  export {
565
- oe as Ai,
566
- z as Anthropic,
567
- G as CliTool,
568
- se as DateTimeTool,
569
- re as ExecTool,
570
- ae as FetchTool,
571
- F as JSTool,
572
- I as LLM,
573
- B as PythonTool,
574
- ie as SearchTool
568
+ ne as Ai,
569
+ D as Anthropic,
570
+ W as CliTool,
571
+ oe as DateTimeTool,
572
+ se as ExecTool,
573
+ re as FetchTool,
574
+ G as JSTool,
575
+ N as LLM,
576
+ F as PythonTool,
577
+ ae as SearchTool
575
578
  };
576
579
  //# sourceMappingURL=index.mjs.map