@ztimson/ai-utils 0.1.21 → 0.1.22

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -1,18 +1,18 @@
1
- import { createWorker as T } from "tesseract.js";
2
- import { deepCopy as q, objectMap as k, JSONAttemptParse as y, findByProp as x, JSONSanitize as b, Http as v, consoleInterceptor as P, fn as A, ASet as E } from "@ztimson/utils";
3
- import { Anthropic as M } from "@anthropic-ai/sdk";
4
- import { Ollama as O } from "ollama";
5
- import { OpenAI as D } from "openai";
6
- import j from "node:fs/promises";
1
+ import { createWorker as j } from "tesseract.js";
2
+ import { deepCopy as T, objectMap as b, JSONAttemptParse as _, findByProp as k, JSONSanitize as y, Http as q, consoleInterceptor as v, fn as P, ASet as A } from "@ztimson/utils";
3
+ import { Anthropic as E } from "@anthropic-ai/sdk";
4
+ import { Ollama as M } from "ollama";
5
+ import { OpenAI as O } from "openai";
6
+ import S from "node:fs/promises";
7
7
  import U from "node:path";
8
- import * as _ from "@tensorflow/tfjs";
8
+ import * as w from "@tensorflow/tfjs";
9
9
  import { spawn as $ } from "node:child_process";
10
10
  import { $ as L, $Sync as R } from "@ztimson/node-utils";
11
- class S {
11
+ class x {
12
12
  }
13
- class I extends S {
13
+ class D extends x {
14
14
  constructor(t, e, n) {
15
- super(), this.ai = t, this.apiToken = e, this.model = n, this.client = new M({ apiKey: e });
15
+ super(), this.ai = t, this.apiToken = e, this.model = n, this.client = new E({ apiKey: e });
16
16
  }
17
17
  client;
18
18
  toStandard(t) {
@@ -21,8 +21,8 @@ class I extends S {
21
21
  typeof t[n].content != "string" && (t[n].role == "assistant" ? t[n].content.filter((a) => a.type == "tool_use").forEach((a) => {
22
22
  e++, t.splice(e, 0, { role: "tool", id: a.id, name: a.name, args: a.input, timestamp: Date.now() });
23
23
  }) : t[n].role == "user" && t[n].content.filter((a) => a.type == "tool_result").forEach((a) => {
24
- const i = t.find((f) => f.id == a.tool_use_id);
25
- i[a.is_error ? "error" : "content"] = a.content;
24
+ const c = t.find((f) => f.id == a.tool_use_id);
25
+ c[a.is_error ? "error" : "content"] = a.content;
26
26
  }), t[n].content = t[n].content.filter((a) => a.type == "text").map((a) => a.text).join(`
27
27
 
28
28
  `)), t[n].timestamp || (t[n].timestamp = Date.now());
@@ -43,81 +43,76 @@ class I extends S {
43
43
  return t.map(({ timestamp: e, ...n }) => n);
44
44
  }
45
45
  ask(t, e = {}) {
46
- const n = new AbortController(), a = new Promise(async (i, f) => {
47
- let c = this.fromStandard([...e.history || [], { role: "user", content: t, timestamp: Date.now() }]);
48
- const l = q(c);
49
- e.compress && (c = await this.ai.llm.compress(c, e.compress.max, e.compress.min, e));
50
- const d = {
46
+ const n = new AbortController(), a = new Promise(async (c, f) => {
47
+ let i = this.fromStandard([...e.history || [], { role: "user", content: t, timestamp: Date.now() }]);
48
+ const l = T(i);
49
+ e.compress && (i = await this.ai.llm.compress(i, e.compress.max, e.compress.min, e));
50
+ const m = {
51
51
  model: e.model || this.model,
52
52
  max_tokens: e.max_tokens || this.ai.options.max_tokens || 4096,
53
53
  system: e.system || this.ai.options.system || "",
54
54
  temperature: e.temperature || this.ai.options.temperature || 0.7,
55
- tools: (e.tools || this.ai.options.tools || []).map((m) => ({
56
- name: m.name,
57
- description: m.description,
55
+ tools: (e.tools || this.ai.options.tools || []).map((o) => ({
56
+ name: o.name,
57
+ description: o.description,
58
58
  input_schema: {
59
59
  type: "object",
60
- properties: m.args ? k(m.args, (r, s) => ({ ...s, required: void 0 })) : {},
61
- required: m.args ? Object.entries(m.args).filter((r) => r[1].required).map((r) => r[0]) : []
60
+ properties: o.args ? b(o.args, (r, u) => ({ ...u, required: void 0 })) : {},
61
+ required: o.args ? Object.entries(o.args).filter((r) => r[1].required).map((r) => r[0]) : []
62
62
  },
63
63
  fn: void 0
64
64
  })),
65
- messages: c,
65
+ messages: i,
66
66
  stream: !!e.stream
67
67
  };
68
- let o;
69
- const h = [];
68
+ let s, p = !0;
70
69
  do {
71
- if (o = await this.client.messages.create(d), e.stream) {
72
- h.length && e.stream({ text: `
70
+ if (s = await this.client.messages.create(m), e.stream) {
71
+ p ? p = !1 : e.stream({ text: `
73
72
 
74
- ` }), o.content = [];
75
- for await (const s of o) {
73
+ ` }), s.content = [];
74
+ for await (const r of s) {
76
75
  if (n.signal.aborted) break;
77
- if (s.type === "content_block_start")
78
- s.content_block.type === "text" ? o.content.push({ type: "text", text: "" }) : s.content_block.type === "tool_use" && o.content.push({ type: "tool_use", id: s.content_block.id, name: s.content_block.name, input: "" });
79
- else if (s.type === "content_block_delta")
80
- if (s.delta.type === "text_delta") {
81
- const p = s.delta.text;
82
- o.content.at(-1).text += p, e.stream({ text: p });
83
- } else s.delta.type === "input_json_delta" && (o.content.at(-1).input += s.delta.partial_json);
84
- else if (s.type === "content_block_stop") {
85
- const p = o.content.at(-1);
86
- p.input != null && (p.input = p.input ? y(p.input, {}) : {});
87
- } else if (s.type === "message_stop")
76
+ if (r.type === "content_block_start")
77
+ r.content_block.type === "text" ? s.content.push({ type: "text", text: "" }) : r.content_block.type === "tool_use" && s.content.push({ type: "tool_use", id: r.content_block.id, name: r.content_block.name, input: "" });
78
+ else if (r.type === "content_block_delta")
79
+ if (r.delta.type === "text_delta") {
80
+ const u = r.delta.text;
81
+ s.content.at(-1).text += u, e.stream({ text: u });
82
+ } else r.delta.type === "input_json_delta" && (s.content.at(-1).input += r.delta.partial_json);
83
+ else if (r.type === "content_block_stop") {
84
+ const u = s.content.at(-1);
85
+ u.input != null && (u.input = u.input ? _(u.input, {}) : {});
86
+ } else if (r.type === "message_stop")
88
87
  break;
89
88
  }
90
89
  }
91
- const m = o.content.filter((s) => s.type == "text").map((s) => s.text).join(`
92
-
93
- `);
94
- m && h.push(m);
95
- const r = o.content.filter((s) => s.type === "tool_use");
96
- if (r.length && !n.signal.aborted) {
97
- c.push({ role: "assistant", content: o.content }), l.push({ role: "assistant", content: o.content });
98
- const s = await Promise.all(r.map(async (p) => {
99
- const g = e.tools?.find(x("name", p.name));
100
- if (!g) return { tool_use_id: p.id, is_error: !0, content: "Tool not found" };
90
+ const o = s.content.filter((r) => r.type === "tool_use");
91
+ if (o.length && !n.signal.aborted) {
92
+ i.push({ role: "assistant", content: s.content }), l.push({ role: "assistant", content: s.content });
93
+ const r = await Promise.all(o.map(async (u) => {
94
+ const h = e.tools?.find(k("name", u.name));
95
+ if (!h) return { tool_use_id: u.id, is_error: !0, content: "Tool not found" };
101
96
  try {
102
- const w = await g.fn(p.input, this.ai);
103
- return { type: "tool_result", tool_use_id: p.id, content: b(w) };
104
- } catch (w) {
105
- return { type: "tool_result", tool_use_id: p.id, is_error: !0, content: w?.message || w?.toString() || "Unknown" };
97
+ const g = await h.fn(u.input, this.ai);
98
+ return { type: "tool_result", tool_use_id: u.id, content: y(g) };
99
+ } catch (g) {
100
+ return { type: "tool_result", tool_use_id: u.id, is_error: !0, content: g?.message || g?.toString() || "Unknown" };
106
101
  }
107
102
  }));
108
- c.push({ role: "user", content: s }), l.push({ role: "user", content: s }), d.messages = c;
103
+ i.push({ role: "user", content: r }), m.messages = i;
109
104
  }
110
- } while (!n.signal.aborted && o.content.some((m) => m.type === "tool_use"));
111
- e.stream && e.stream({ done: !0 }), i(this.toStandard([...l, { role: "assistant", content: h.join(`
105
+ } while (!n.signal.aborted && s.content.some((o) => o.type === "tool_use"));
106
+ e.stream && e.stream({ done: !0 }), c(this.toStandard([...i, { role: "assistant", content: s.content.filter((o) => o.type == "text").map((o) => o.text).join(`
112
107
 
113
- `), timestamp: Date.now() }]));
108
+ `) }]));
114
109
  });
115
110
  return Object.assign(a, { abort: () => n.abort() });
116
111
  }
117
112
  }
118
- class J extends S {
113
+ class I extends x {
119
114
  constructor(t, e, n) {
120
- super(), this.ai = t, this.host = e, this.model = n, this.client = new O({ host: e });
115
+ super(), this.ai = t, this.host = e, this.model = n, this.client = new M({ host: e });
121
116
  }
122
117
  client;
123
118
  toStandard(t) {
@@ -139,10 +134,10 @@ class J extends S {
139
134
  });
140
135
  }
141
136
  ask(t, e = {}) {
142
- const n = new AbortController(), a = new Promise(async (i, f) => {
143
- let c = e.system || this.ai.options.system, l = this.fromStandard([...e.history || [], { role: "user", content: t, timestamp: Date.now() }]);
144
- l[0].roll == "system" && (c ? l.shift() : c = l.shift()), e.compress && (l = await this.ai.llm.compress(l, e.compress.max, e.compress.min)), e.system && l.unshift({ role: "system", content: c });
145
- const d = {
137
+ const n = new AbortController(), a = new Promise(async (c, f) => {
138
+ let i = e.system || this.ai.options.system, l = this.fromStandard([...e.history || [], { role: "user", content: t, timestamp: Date.now() }]);
139
+ l[0].roll == "system" && (i ? l.shift() : i = l.shift()), e.compress && (l = await this.ai.llm.compress(l, e.compress.max, e.compress.min)), e.system && l.unshift({ role: "system", content: i });
140
+ const m = {
146
141
  model: e.model || this.model,
147
142
  messages: l,
148
143
  stream: !!e.stream,
@@ -151,72 +146,68 @@ class J extends S {
151
146
  temperature: e.temperature || this.ai.options.temperature || 0.7,
152
147
  num_predict: e.max_tokens || this.ai.options.max_tokens || 4096
153
148
  },
154
- tools: (e.tools || this.ai.options.tools || []).map((r) => ({
149
+ tools: (e.tools || this.ai.options.tools || []).map((o) => ({
155
150
  type: "function",
156
151
  function: {
157
- name: r.name,
158
- description: r.description,
152
+ name: o.name,
153
+ description: o.description,
159
154
  parameters: {
160
155
  type: "object",
161
- properties: r.args ? k(r.args, (s, p) => ({ ...p, required: void 0 })) : {},
162
- required: r.args ? Object.entries(r.args).filter((s) => s[1].required).map((s) => s[0]) : []
156
+ properties: o.args ? b(o.args, (r, u) => ({ ...u, required: void 0 })) : {},
157
+ required: o.args ? Object.entries(o.args).filter((r) => r[1].required).map((r) => r[0]) : []
163
158
  }
164
159
  }
165
160
  }))
166
161
  };
167
- let o;
168
- const h = [];
162
+ let s, p = !0;
169
163
  do {
170
- if (o = await this.client.chat(d), e.stream) {
171
- h.length && e.stream({ text: `
164
+ if (s = await this.client.chat(m), e.stream) {
165
+ p ? p = !1 : e.stream({ text: `
172
166
 
173
- ` }), o.message = { role: "assistant", content: "", tool_calls: [] };
174
- for await (const r of o)
175
- if (n.signal.aborted || (r.message?.content && (o.message.content += r.message.content, e.stream({ text: r.message.content })), r.message?.tool_calls && (o.message.tool_calls = r.message.tool_calls), r.done)) break;
167
+ ` }), s.message = { role: "assistant", content: "", tool_calls: [] };
168
+ for await (const o of s)
169
+ if (n.signal.aborted || (o.message?.content && (s.message.content += o.message.content, e.stream({ text: o.message.content })), o.message?.tool_calls && (s.message.tool_calls = o.message.tool_calls), o.done)) break;
176
170
  }
177
- if (h.push({ role: "assistant", content: o.message?.content, timestamp: Date.now() }), o.message?.tool_calls?.length && !n.signal.aborted) {
178
- l.push(o.message);
179
- const r = await Promise.all(o.message.tool_calls.map(async (s) => {
180
- const p = (e.tools || this.ai.options.tools)?.find(x("name", s.function.name));
181
- if (!p) return { role: "tool", tool_name: s.function.name, content: '{"error": "Tool not found"}' };
182
- const g = typeof s.function.arguments == "string" ? y(s.function.arguments, {}) : s.function.arguments;
171
+ if (s.message?.tool_calls?.length && !n.signal.aborted) {
172
+ l.push(s.message);
173
+ const o = await Promise.all(s.message.tool_calls.map(async (r) => {
174
+ const u = (e.tools || this.ai.options.tools)?.find(k("name", r.function.name));
175
+ if (!u) return { role: "tool", tool_name: r.function.name, content: '{"error": "Tool not found"}' };
176
+ const h = typeof r.function.arguments == "string" ? _(r.function.arguments, {}) : r.function.arguments;
183
177
  try {
184
- const w = await p.fn(g, this.ai);
185
- return { role: "tool", tool_name: s.function.name, args: g, content: b(w) };
186
- } catch (w) {
187
- return { role: "tool", tool_name: s.function.name, args: g, content: b({ error: w?.message || w?.toString() || "Unknown" }) };
178
+ const g = await u.fn(h, this.ai);
179
+ return { role: "tool", tool_name: r.function.name, args: h, content: y(g) };
180
+ } catch (g) {
181
+ return { role: "tool", tool_name: r.function.name, args: h, content: y({ error: g?.message || g?.toString() || "Unknown" }) };
188
182
  }
189
183
  }));
190
- l.push(...r), h.push(...r.map((s) => ({ ...s, timestamp: Date.now() }))), d.messages = l;
184
+ l.push(...o), m.messages = l;
191
185
  }
192
- } while (!n.signal.aborted && o.message?.tool_calls?.length);
193
- const m = h.filter((r) => r.role === "assistant").map((r) => r.content).filter((r) => r).join(`
194
-
195
- `);
196
- e.stream && e.stream({ done: !0 }), i(this.toStandard([...l, { role: "assistant", content: m, timestamp: Date.now() }]));
186
+ } while (!n.signal.aborted && s.message?.tool_calls?.length);
187
+ e.stream && e.stream({ done: !0 }), c(this.toStandard([...l, { role: "assistant", content: s.message?.content }]));
197
188
  });
198
189
  return Object.assign(a, { abort: () => n.abort() });
199
190
  }
200
191
  }
201
- class W extends S {
192
+ class J extends x {
202
193
  constructor(t, e, n) {
203
- super(), this.ai = t, this.apiToken = e, this.model = n, this.client = new D({ apiKey: e });
194
+ super(), this.ai = t, this.apiToken = e, this.model = n, this.client = new O({ apiKey: e });
204
195
  }
205
196
  client;
206
197
  toStandard(t) {
207
198
  for (let e = 0; e < t.length; e++) {
208
199
  const n = t[e];
209
200
  if (n.role === "assistant" && n.tool_calls) {
210
- const a = n.tool_calls.map((i) => ({
201
+ const a = n.tool_calls.map((c) => ({
211
202
  role: "tool",
212
- id: i.id,
213
- name: i.function.name,
214
- args: y(i.function.arguments, {}),
203
+ id: c.id,
204
+ name: c.function.name,
205
+ args: _(c.function.arguments, {}),
215
206
  timestamp: n.timestamp
216
207
  }));
217
208
  t.splice(e, 1, ...a), e += a.length - 1;
218
209
  } else if (n.role === "tool" && n.content) {
219
- const a = t.find((i) => n.tool_call_id == i.id);
210
+ const a = t.find((c) => n.tool_call_id == c.id);
220
211
  a && (n.content.includes('"error":') ? a.error = n.content : a.content = n.content), t.splice(e, 1), e--;
221
212
  }
222
213
  t[e]?.timestamp || (t[e].timestamp = Date.now());
@@ -238,75 +229,70 @@ class W extends S {
238
229
  content: n.error || n.content
239
230
  });
240
231
  else {
241
- const { timestamp: a, ...i } = n;
242
- e.push(i);
232
+ const { timestamp: a, ...c } = n;
233
+ e.push(c);
243
234
  }
244
235
  return e;
245
236
  }, []);
246
237
  }
247
238
  ask(t, e = {}) {
248
- const n = new AbortController(), a = new Promise(async (i, f) => {
249
- let c = this.fromStandard([...e.history || [], { role: "user", content: t, timestamp: Date.now() }]);
250
- e.compress && (c = await this.ai.llm.compress(c, e.compress.max, e.compress.min, e));
239
+ const n = new AbortController(), a = new Promise(async (c, f) => {
240
+ let i = this.fromStandard([...e.history || [], { role: "user", content: t, timestamp: Date.now() }]);
241
+ e.compress && (i = await this.ai.llm.compress(i, e.compress.max, e.compress.min, e));
251
242
  const l = {
252
243
  model: e.model || this.model,
253
- messages: c,
244
+ messages: i,
254
245
  stream: !!e.stream,
255
246
  max_tokens: e.max_tokens || this.ai.options.max_tokens || 4096,
256
247
  temperature: e.temperature || this.ai.options.temperature || 0.7,
257
- tools: (e.tools || this.ai.options.tools || []).map((m) => ({
248
+ tools: (e.tools || this.ai.options.tools || []).map((p) => ({
258
249
  type: "function",
259
250
  function: {
260
- name: m.name,
261
- description: m.description,
251
+ name: p.name,
252
+ description: p.description,
262
253
  parameters: {
263
254
  type: "object",
264
- properties: m.args ? k(m.args, (r, s) => ({ ...s, required: void 0 })) : {},
265
- required: m.args ? Object.entries(m.args).filter((r) => r[1].required).map((r) => r[0]) : []
255
+ properties: p.args ? b(p.args, (o, r) => ({ ...r, required: void 0 })) : {},
256
+ required: p.args ? Object.entries(p.args).filter((o) => o[1].required).map((o) => o[0]) : []
266
257
  }
267
258
  }
268
259
  }))
269
260
  };
270
- let d;
271
- const o = [];
261
+ let m, s = !0;
272
262
  do {
273
- if (d = await this.client.chat.completions.create(l), e.stream) {
274
- o.length && e.stream({ text: `
263
+ if (m = await this.client.chat.completions.create(l), e.stream) {
264
+ s ? s = !1 : e.stream({ text: `
275
265
 
276
- ` }), d.choices = [{ message: { content: "", tool_calls: [] } }];
277
- for await (const r of d) {
266
+ ` }), m.choices = [{ message: { content: "", tool_calls: [] } }];
267
+ for await (const o of m) {
278
268
  if (n.signal.aborted) break;
279
- r.choices[0].delta.content && (d.choices[0].message.content += r.choices[0].delta.content, e.stream({ text: r.choices[0].delta.content })), r.choices[0].delta.tool_calls && (d.choices[0].message.tool_calls = r.choices[0].delta.tool_calls);
269
+ o.choices[0].delta.content && (m.choices[0].message.content += o.choices[0].delta.content, e.stream({ text: o.choices[0].delta.content })), o.choices[0].delta.tool_calls && (m.choices[0].message.tool_calls = o.choices[0].delta.tool_calls);
280
270
  }
281
271
  }
282
- o.push({ role: "assistant", content: d.choices[0].message.content || "", timestamp: Date.now() });
283
- const m = d.choices[0].message.tool_calls || [];
284
- if (m.length && !n.signal.aborted) {
285
- c.push(d.choices[0].message);
286
- const r = await Promise.all(m.map(async (s) => {
287
- const p = e.tools?.find(x("name", s.function.name));
288
- if (!p) return { role: "tool", tool_call_id: s.id, content: '{"error": "Tool not found"}' };
272
+ const p = m.choices[0].message.tool_calls || [];
273
+ if (p.length && !n.signal.aborted) {
274
+ i.push(m.choices[0].message);
275
+ const o = await Promise.all(p.map(async (r) => {
276
+ const u = e.tools?.find(k("name", r.function.name));
277
+ if (!u) return { role: "tool", tool_call_id: r.id, content: '{"error": "Tool not found"}' };
289
278
  try {
290
- const g = y(s.function.arguments, {}), w = await p.fn(g, this.ai);
291
- return { role: "tool", tool_call_id: s.id, content: b(w) };
292
- } catch (g) {
293
- return { role: "tool", tool_call_id: s.id, content: b({ error: g?.message || g?.toString() || "Unknown" }) };
279
+ const h = _(r.function.arguments, {}), g = await u.fn(h, this.ai);
280
+ return { role: "tool", tool_call_id: r.id, content: y(g) };
281
+ } catch (h) {
282
+ return { role: "tool", tool_call_id: r.id, content: y({ error: h?.message || h?.toString() || "Unknown" }) };
294
283
  }
295
284
  }));
296
- c.push(...r), o.push(...r.map((s) => ({ ...s, timestamp: Date.now() }))), l.messages = c;
285
+ i.push(...o), l.messages = i;
297
286
  }
298
- } while (!n.signal.aborted && d.choices?.[0]?.message?.tool_calls?.length);
299
- const h = o.filter((m) => m.role === "assistant").map((m) => m.content).filter((m) => m).join(`
300
-
301
- `);
302
- e.stream && e.stream({ done: !0 }), i(this.toStandard([...c, { role: "assistant", content: h, timestamp: Date.now() }]));
287
+ } while (!n.signal.aborted && m.choices?.[0]?.message?.tool_calls?.length);
288
+ e.stream && e.stream({ done: !0 }), c(this.toStandard([...i, { role: "assistant", content: m.choices[0].message.content || "" }]));
303
289
  });
304
290
  return Object.assign(a, { abort: () => n.abort() });
305
291
  }
306
292
  }
307
- class N {
293
+ class W {
308
294
  constructor(t, e) {
309
- this.ai = t, this.options = e, e.anthropic?.token && (this.providers.anthropic = new I(this.ai, e.anthropic.token, e.anthropic.model)), e.ollama?.host && (this.providers.ollama = new J(this.ai, e.ollama.host, e.ollama.model)), e.openAi?.token && (this.providers.openAi = new W(this.ai, e.openAi.token, e.openAi.model));
295
+ this.ai = t, this.options = e, e.anthropic?.token && (this.providers.anthropic = new D(this.ai, e.anthropic.token, e.anthropic.model)), e.ollama?.host && (this.providers.ollama = new I(this.ai, e.ollama.host, e.ollama.model)), e.openAi?.token && (this.providers.openAi = new J(this.ai, e.openAi.token, e.openAi.model));
310
296
  }
311
297
  providers = {};
312
298
  /**
@@ -330,15 +316,15 @@ class N {
330
316
  */
331
317
  async compress(t, e, n, a) {
332
318
  if (this.estimateTokens(t) < e) return t;
333
- let i = 0, f = 0;
334
- for (let o of t.toReversed())
335
- if (f += this.estimateTokens(o.content), f < n) i++;
319
+ let c = 0, f = 0;
320
+ for (let s of t.toReversed())
321
+ if (f += this.estimateTokens(s.content), f < n) c++;
336
322
  else break;
337
- if (t.length <= i) return t;
338
- const c = i == 0 ? [] : t.slice(-i), l = (i == 0 ? t : t.slice(0, -i)).filter((o) => o.role === "assistant" || o.role === "user");
339
- return [{ role: "assistant", content: `Conversation Summary: ${await this.summarize(l.map((o) => `${o.role}: ${o.content}`).join(`
323
+ if (t.length <= c) return t;
324
+ const i = c == 0 ? [] : t.slice(-c), l = (c == 0 ? t : t.slice(0, -c)).filter((s) => s.role === "assistant" || s.role === "user");
325
+ return [{ role: "assistant", content: `Conversation Summary: ${await this.summarize(l.map((s) => `${s.role}: ${s.content}`).join(`
340
326
 
341
- `), 250, a)}`, timestamp: Date.now() }, ...c];
327
+ `), 250, a)}`, timestamp: Date.now() }, ...i];
342
328
  }
343
329
  /**
344
330
  * Estimate variable as tokens
@@ -360,7 +346,7 @@ class N {
360
346
  system: "Respond using a JSON blob",
361
347
  ...e
362
348
  });
363
- return n?.[0]?.content ? y(new RegExp("{[sS]*}").exec(n[0].content), {}) : {};
349
+ return n?.[0]?.content ? _(new RegExp("{[sS]*}").exec(n[0].content), {}) : {};
364
350
  }
365
351
  /**
366
352
  * Create a summary of some text
@@ -373,9 +359,9 @@ class N {
373
359
  return this.ask(t, { system: `Generate a brief summary <= ${e} tokens. Output nothing else`, temperature: 0.3, ...n }).then((a) => a.pop()?.content || null);
374
360
  }
375
361
  }
376
- class ee {
362
+ class Z {
377
363
  constructor(t) {
378
- this.options = t, this.llm = new N(this, t), this.options.whisper?.binary && (this.whisperModel = this.options.whisper?.model.endsWith(".bin") ? this.options.whisper?.model : this.options.whisper?.model + ".bin", this.downloadAsrModel());
364
+ this.options = t, this.llm = new W(this, t), this.options.whisper?.binary && (this.whisperModel = this.options.whisper?.model.endsWith(".bin") ? this.options.whisper?.model : this.options.whisper?.model + ".bin", this.downloadAsrModel());
379
365
  }
380
366
  downloads = {};
381
367
  whisperModel;
@@ -391,12 +377,12 @@ class ee {
391
377
  if (!this.options.whisper?.binary) throw new Error("Whisper not configured");
392
378
  let n = () => {
393
379
  };
394
- return { response: new Promise((i, f) => {
395
- this.downloadAsrModel(e).then((c) => {
380
+ return { response: new Promise((c, f) => {
381
+ this.downloadAsrModel(e).then((i) => {
396
382
  let l = "";
397
- const d = $(this.options.whisper?.binary, ["-nt", "-np", "-m", c, "-f", t], { stdio: ["ignore", "pipe", "ignore"] });
398
- n = () => d.kill("SIGTERM"), d.on("error", (o) => f(o)), d.stdout.on("data", (o) => l += o.toString()), d.on("close", (o) => {
399
- o === 0 ? i(l.trim() || null) : f(new Error(`Exit code ${o}`));
383
+ const m = $(this.options.whisper?.binary, ["-nt", "-np", "-m", i, "-f", t], { stdio: ["ignore", "pipe", "ignore"] });
384
+ n = () => m.kill("SIGTERM"), m.on("error", (s) => f(s)), m.stdout.on("data", (s) => l += s.toString()), m.on("close", (s) => {
385
+ s === 0 ? c(l.trim() || null) : f(new Error(`Exit code ${s}`));
400
386
  });
401
387
  });
402
388
  }), abort: n };
@@ -411,7 +397,7 @@ class ee {
411
397
  if (!this.options.whisper?.binary) throw new Error("Whisper not configured");
412
398
  t.endsWith(".bin") || (t += ".bin");
413
399
  const e = U.join(this.options.whisper.path, t);
414
- return await j.stat(e).then(() => !0).catch(() => !1) ? e : this.downloads[t] ? this.downloads[t] : (this.downloads[t] = fetch(`https://huggingface.co/ggerganov/whisper.cpp/resolve/main/${t}`).then((n) => n.arrayBuffer()).then((n) => Buffer.from(n)).then(async (n) => (await j.writeFile(e, n), delete this.downloads[t], e)), this.downloads[t]);
400
+ return await S.stat(e).then(() => !0).catch(() => !1) ? e : this.downloads[t] ? this.downloads[t] : (this.downloads[t] = fetch(`https://huggingface.co/ggerganov/whisper.cpp/resolve/main/${t}`).then((n) => n.arrayBuffer()).then((n) => Buffer.from(n)).then(async (n) => (await S.writeFile(e, n), delete this.downloads[t], e)), this.downloads[t]);
415
401
  }
416
402
  /**
417
403
  * Convert image to text using Optical Character Recognition
@@ -425,7 +411,7 @@ class ee {
425
411
  e?.terminate();
426
412
  },
427
413
  response: new Promise(async (n) => {
428
- e = await T("eng");
414
+ e = await j("eng");
429
415
  const { data: a } = await e.recognize(t);
430
416
  await e.terminate(), n(a.text.trim() || null);
431
417
  })
@@ -439,46 +425,46 @@ class ee {
439
425
  */
440
426
  semanticSimilarity(t, ...e) {
441
427
  if (e.length < 2) throw new Error("Requires at least 2 strings to compare");
442
- const n = (c, l = 10) => c.toLowerCase().split("").map((d, o) => d.charCodeAt(0) * (o + 1) % l / l).slice(0, l), a = (c, l) => {
443
- if (c.length !== l.length) throw new Error("Vectors must be same length");
444
- const d = _.tensor1d(c), o = _.tensor1d(l), h = _.dot(d, o), m = _.norm(d), r = _.norm(o);
445
- return m.dataSync()[0] === 0 || r.dataSync()[0] === 0 ? 0 : h.dataSync()[0] / (m.dataSync()[0] * r.dataSync()[0]);
446
- }, i = n(t), f = e.map((c) => n(c)).map((c) => a(i, c));
447
- return { avg: f.reduce((c, l) => c + l, 0) / f.length, max: Math.max(...f), similarities: f };
428
+ const n = (i, l = 10) => i.toLowerCase().split("").map((m, s) => m.charCodeAt(0) * (s + 1) % l / l).slice(0, l), a = (i, l) => {
429
+ if (i.length !== l.length) throw new Error("Vectors must be same length");
430
+ const m = w.tensor1d(i), s = w.tensor1d(l), p = w.dot(m, s), o = w.norm(m), r = w.norm(s);
431
+ return o.dataSync()[0] === 0 || r.dataSync()[0] === 0 ? 0 : p.dataSync()[0] / (o.dataSync()[0] * r.dataSync()[0]);
432
+ }, c = n(t), f = e.map((i) => n(i)).map((i) => a(c, i));
433
+ return { avg: f.reduce((i, l) => i + l, 0) / f.length, max: Math.max(...f), similarities: f };
448
434
  }
449
435
  }
450
- const z = {
436
+ const N = {
451
437
  name: "cli",
452
438
  description: "Use the command line interface, returns any output",
453
439
  args: { command: { type: "string", description: "Command to run", required: !0 } },
454
- fn: (u) => L`${u.command}`
455
- }, te = {
440
+ fn: (d) => L`${d.command}`
441
+ }, ee = {
456
442
  name: "get_datetime",
457
443
  description: "Get current date and time",
458
444
  args: {},
459
445
  fn: async () => (/* @__PURE__ */ new Date()).toISOString()
460
- }, ne = {
446
+ }, te = {
461
447
  name: "exec",
462
448
  description: "Run code/scripts",
463
449
  args: {
464
450
  language: { type: "string", description: "Execution language", enum: ["cli", "node", "python"], required: !0 },
465
451
  code: { type: "string", description: "Code to execute", required: !0 }
466
452
  },
467
- fn: async (u, t) => {
453
+ fn: async (d, t) => {
468
454
  try {
469
- switch (u.type) {
455
+ switch (d.type) {
470
456
  case "bash":
471
- return await z.fn({ command: u.code }, t);
457
+ return await N.fn({ command: d.code }, t);
472
458
  case "node":
473
- return await C.fn({ code: u.code }, t);
459
+ return await z.fn({ code: d.code }, t);
474
460
  case "python":
475
- return await G.fn({ code: u.code }, t);
461
+ return await G.fn({ code: d.code }, t);
476
462
  }
477
463
  } catch (e) {
478
464
  return { error: e?.message || e.toString() };
479
465
  }
480
466
  }
481
- }, oe = {
467
+ }, ne = {
482
468
  name: "fetch",
483
469
  description: "Make HTTP request to URL",
484
470
  args: {
@@ -487,15 +473,15 @@ const z = {
487
473
  headers: { type: "object", description: "HTTP headers to send", default: {} },
488
474
  body: { type: "object", description: "HTTP body to send" }
489
475
  },
490
- fn: (u) => new v({ url: u.url, headers: u.headers }).request({ method: u.method || "GET", body: u.body })
491
- }, C = {
476
+ fn: (d) => new q({ url: d.url, headers: d.headers }).request({ method: d.method || "GET", body: d.body })
477
+ }, z = {
492
478
  name: "exec_javascript",
493
479
  description: "Execute commonjs javascript",
494
480
  args: {
495
481
  code: { type: "string", description: "CommonJS javascript", required: !0 }
496
482
  },
497
- fn: async (u) => {
498
- const t = P(null), e = await A({ console: t }, u.code, !0).catch((n) => t.output.error.push(n));
483
+ fn: async (d) => {
484
+ const t = v(null), e = await P({ console: t }, d.code, !0).catch((n) => t.output.error.push(n));
499
485
  return { ...t.output, return: e, stdout: void 0, stderr: void 0 };
500
486
  }
501
487
  }, G = {
@@ -504,7 +490,7 @@ const z = {
504
490
  args: {
505
491
  code: { type: "string", description: "CommonJS javascript", required: !0 }
506
492
  },
507
- fn: async (u) => ({ result: R`python -c "${u.code}"` })
493
+ fn: async (d) => ({ result: R`python -c "${d.code}"` })
508
494
  }, se = {
509
495
  name: "search",
510
496
  description: "Use a search engine to find relevant URLs, should be changed with fetch to scrape sources",
@@ -512,28 +498,28 @@ const z = {
512
498
  query: { type: "string", description: "Search string", required: !0 },
513
499
  length: { type: "string", description: "Number of results to return", default: 5 }
514
500
  },
515
- fn: async (u) => {
516
- const t = await fetch(`https://html.duckduckgo.com/html/?q=${encodeURIComponent(u.query)}`, {
501
+ fn: async (d) => {
502
+ const t = await fetch(`https://html.duckduckgo.com/html/?q=${encodeURIComponent(d.query)}`, {
517
503
  headers: { "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64)", "Accept-Language": "en-US,en;q=0.9" }
518
- }).then((i) => i.text());
504
+ }).then((c) => c.text());
519
505
  let e, n = /<a .*?href="(.+?)".+?<\/a>/g;
520
- const a = new E();
506
+ const a = new A();
521
507
  for (; (e = n.exec(t)) !== null; ) {
522
- let i = /uddg=(.+)&amp?/.exec(decodeURIComponent(e[1]))?.[1];
523
- if (i && (i = decodeURIComponent(i)), i && a.add(i), a.size >= (u.length || 5)) break;
508
+ let c = /uddg=(.+)&amp?/.exec(decodeURIComponent(e[1]))?.[1];
509
+ if (c && (c = decodeURIComponent(c)), c && a.add(c), a.size >= (d.length || 5)) break;
524
510
  }
525
511
  return a;
526
512
  }
527
513
  };
528
514
  export {
529
- ee as Ai,
530
- I as Anthropic,
531
- z as CliTool,
532
- te as DateTimeTool,
533
- ne as ExecTool,
534
- oe as FetchTool,
535
- C as JSTool,
536
- N as LLM,
515
+ Z as Ai,
516
+ D as Anthropic,
517
+ N as CliTool,
518
+ ee as DateTimeTool,
519
+ te as ExecTool,
520
+ ne as FetchTool,
521
+ z as JSTool,
522
+ W as LLM,
537
523
  G as PythonTool,
538
524
  se as SearchTool
539
525
  };