@ztimson/ai-utils 0.2.5 → 0.2.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -1,124 +1,126 @@
1
- import * as T from "node:os";
2
- import { pipeline as j } from "@xenova/transformers";
3
- import { deepCopy as M, objectMap as b, JSONAttemptParse as _, findByProp as k, JSONSanitize as y, Http as A, consoleInterceptor as q, fn as P, ASet as O } from "@ztimson/utils";
4
- import { Anthropic as $ } from "@anthropic-ai/sdk";
5
- import { Ollama as E } from "ollama";
6
- import { OpenAI as v } from "openai";
7
- import { spawn as R } from "node:child_process";
1
+ import * as j from "node:os";
2
+ import { deepCopy as T, objectMap as y, JSONAttemptParse as _, findByProp as k, JSONSanitize as b, Http as M, consoleInterceptor as A, fn as q, ASet as P } from "@ztimson/utils";
3
+ import { Anthropic as O } from "@anthropic-ai/sdk";
4
+ import { Ollama as $ } from "ollama";
5
+ import { OpenAI as E } from "openai";
6
+ import { Worker as v } from "worker_threads";
7
+ import { fileURLToPath as R } from "url";
8
+ import { join as U, dirname as L } from "path";
9
+ import { spawn as N } from "node:child_process";
8
10
  import S from "node:fs/promises";
9
- import U from "node:path";
10
- import { createWorker as L } from "tesseract.js";
11
- import { $ as N, $Sync as D } from "@ztimson/node-utils";
11
+ import W from "node:path";
12
+ import { createWorker as D } from "tesseract.js";
13
+ import { $ as J, $Sync as H } from "@ztimson/node-utils";
12
14
  class x {
13
15
  }
14
- class J extends x {
15
- constructor(t, e, n) {
16
- super(), this.ai = t, this.apiToken = e, this.model = n, this.client = new $({ apiKey: e });
16
+ class I extends x {
17
+ constructor(t, e, s) {
18
+ super(), this.ai = t, this.apiToken = e, this.model = s, this.client = new O({ apiKey: e });
17
19
  }
18
20
  client;
19
21
  toStandard(t) {
20
22
  for (let e = 0; e < t.length; e++) {
21
- const n = e;
22
- typeof t[n].content != "string" && (t[n].role == "assistant" ? t[n].content.filter((r) => r.type == "tool_use").forEach((r) => {
23
- e++, t.splice(e, 0, { role: "tool", id: r.id, name: r.name, args: r.input, timestamp: Date.now() });
24
- }) : t[n].role == "user" && t[n].content.filter((r) => r.type == "tool_result").forEach((r) => {
25
- const c = t.find((f) => f.id == r.tool_use_id);
26
- c[r.is_error ? "error" : "content"] = r.content;
27
- }), t[n].content = t[n].content.filter((r) => r.type == "text").map((r) => r.text).join(`
23
+ const s = e;
24
+ typeof t[s].content != "string" && (t[s].role == "assistant" ? t[s].content.filter((n) => n.type == "tool_use").forEach((n) => {
25
+ e++, t.splice(e, 0, { role: "tool", id: n.id, name: n.name, args: n.input, timestamp: Date.now() });
26
+ }) : t[s].role == "user" && t[s].content.filter((n) => n.type == "tool_result").forEach((n) => {
27
+ const i = t.find((d) => d.id == n.tool_use_id);
28
+ i[n.is_error ? "error" : "content"] = n.content;
29
+ }), t[s].content = t[s].content.filter((n) => n.type == "text").map((n) => n.text).join(`
28
30
 
29
- `)), t[n].timestamp || (t[n].timestamp = Date.now());
31
+ `)), t[s].timestamp || (t[s].timestamp = Date.now());
30
32
  }
31
33
  return t.filter((e) => !!e.content);
32
34
  }
33
35
  fromStandard(t) {
34
36
  for (let e = 0; e < t.length; e++)
35
37
  if (t[e].role == "tool") {
36
- const n = t[e];
38
+ const s = t[e];
37
39
  t.splice(
38
40
  e,
39
41
  1,
40
- { role: "assistant", content: [{ type: "tool_use", id: n.id, name: n.name, input: n.args }] },
41
- { role: "user", content: [{ type: "tool_result", tool_use_id: n.id, is_error: !!n.error, content: n.error || n.content }] }
42
+ { role: "assistant", content: [{ type: "tool_use", id: s.id, name: s.name, input: s.args }] },
43
+ { role: "user", content: [{ type: "tool_result", tool_use_id: s.id, is_error: !!s.error, content: s.error || s.content }] }
42
44
  ), e++;
43
45
  }
44
- return t.map(({ timestamp: e, ...n }) => n);
46
+ return t.map(({ timestamp: e, ...s }) => s);
45
47
  }
46
48
  ask(t, e = {}) {
47
- const n = new AbortController(), r = new Promise(async (c, f) => {
48
- let i = this.fromStandard([...e.history || [], { role: "user", content: t, timestamp: Date.now() }]);
49
- const m = M(i);
50
- e.compress && (i = await this.ai.language.compressHistory(i, e.compress.max, e.compress.min, e));
51
- const h = e.tools || this.ai.options.tools || [], a = {
49
+ const s = new AbortController(), n = new Promise(async (i, d) => {
50
+ let r = this.fromStandard([...e.history || [], { role: "user", content: t, timestamp: Date.now() }]);
51
+ const l = T(r);
52
+ e.compress && (r = await this.ai.language.compressHistory(r, e.compress.max, e.compress.min, e));
53
+ const u = e.tools || this.ai.options.tools || [], c = {
52
54
  model: e.model || this.model,
53
55
  max_tokens: e.max_tokens || this.ai.options.max_tokens || 4096,
54
56
  system: e.system || this.ai.options.system || "",
55
57
  temperature: e.temperature || this.ai.options.temperature || 0.7,
56
- tools: h.map((s) => ({
57
- name: s.name,
58
- description: s.description,
58
+ tools: u.map((o) => ({
59
+ name: o.name,
60
+ description: o.description,
59
61
  input_schema: {
60
62
  type: "object",
61
- properties: s.args ? b(s.args, (o, p) => ({ ...p, required: void 0 })) : {},
62
- required: s.args ? Object.entries(s.args).filter((o) => o[1].required).map((o) => o[0]) : []
63
+ properties: o.args ? y(o.args, (a, f) => ({ ...f, required: void 0 })) : {},
64
+ required: o.args ? Object.entries(o.args).filter((a) => a[1].required).map((a) => a[0]) : []
63
65
  },
64
66
  fn: void 0
65
67
  })),
66
- messages: i,
68
+ messages: r,
67
69
  stream: !!e.stream
68
70
  };
69
- let l, u = !0;
71
+ let m, h = !0;
70
72
  do {
71
- if (l = await this.client.messages.create(a).catch((o) => {
72
- throw o.message += `
73
+ if (m = await this.client.messages.create(c).catch((a) => {
74
+ throw a.message += `
73
75
 
74
76
  Messages:
75
- ${JSON.stringify(i, null, 2)}`, o;
77
+ ${JSON.stringify(r, null, 2)}`, a;
76
78
  }), e.stream) {
77
- u ? u = !1 : e.stream({ text: `
79
+ h ? h = !1 : e.stream({ text: `
78
80
 
79
- ` }), l.content = [];
80
- for await (const o of l) {
81
- if (n.signal.aborted) break;
82
- if (o.type === "content_block_start")
83
- o.content_block.type === "text" ? l.content.push({ type: "text", text: "" }) : o.content_block.type === "tool_use" && l.content.push({ type: "tool_use", id: o.content_block.id, name: o.content_block.name, input: "" });
84
- else if (o.type === "content_block_delta")
85
- if (o.delta.type === "text_delta") {
86
- const p = o.delta.text;
87
- l.content.at(-1).text += p, e.stream({ text: p });
88
- } else o.delta.type === "input_json_delta" && (l.content.at(-1).input += o.delta.partial_json);
89
- else if (o.type === "content_block_stop") {
90
- const p = l.content.at(-1);
91
- p.input != null && (p.input = p.input ? _(p.input, {}) : {});
92
- } else if (o.type === "message_stop")
81
+ ` }), m.content = [];
82
+ for await (const a of m) {
83
+ if (s.signal.aborted) break;
84
+ if (a.type === "content_block_start")
85
+ a.content_block.type === "text" ? m.content.push({ type: "text", text: "" }) : a.content_block.type === "tool_use" && m.content.push({ type: "tool_use", id: a.content_block.id, name: a.content_block.name, input: "" });
86
+ else if (a.type === "content_block_delta")
87
+ if (a.delta.type === "text_delta") {
88
+ const f = a.delta.text;
89
+ m.content.at(-1).text += f, e.stream({ text: f });
90
+ } else a.delta.type === "input_json_delta" && (m.content.at(-1).input += a.delta.partial_json);
91
+ else if (a.type === "content_block_stop") {
92
+ const f = m.content.at(-1);
93
+ f.input != null && (f.input = f.input ? _(f.input, {}) : {});
94
+ } else if (a.type === "message_stop")
93
95
  break;
94
96
  }
95
97
  }
96
- const s = l.content.filter((o) => o.type === "tool_use");
97
- if (s.length && !n.signal.aborted) {
98
- i.push({ role: "assistant", content: l.content }), m.push({ role: "assistant", content: l.content });
99
- const o = await Promise.all(s.map(async (p) => {
100
- const g = h.find(k("name", p.name));
101
- if (!g) return { tool_use_id: p.id, is_error: !0, content: "Tool not found" };
98
+ const o = m.content.filter((a) => a.type === "tool_use");
99
+ if (o.length && !s.signal.aborted) {
100
+ r.push({ role: "assistant", content: m.content }), l.push({ role: "assistant", content: m.content });
101
+ const a = await Promise.all(o.map(async (f) => {
102
+ const g = u.find(k("name", f.name));
103
+ if (!g) return { tool_use_id: f.id, is_error: !0, content: "Tool not found" };
102
104
  try {
103
- const w = await g.fn(p.input, this.ai);
104
- return { type: "tool_result", tool_use_id: p.id, content: y(w) };
105
+ const w = await g.fn(f.input, this.ai);
106
+ return { type: "tool_result", tool_use_id: f.id, content: b(w) };
105
107
  } catch (w) {
106
- return { type: "tool_result", tool_use_id: p.id, is_error: !0, content: w?.message || w?.toString() || "Unknown" };
108
+ return { type: "tool_result", tool_use_id: f.id, is_error: !0, content: w?.message || w?.toString() || "Unknown" };
107
109
  }
108
110
  }));
109
- i.push({ role: "user", content: o }), a.messages = i;
111
+ r.push({ role: "user", content: a }), c.messages = r;
110
112
  }
111
- } while (!n.signal.aborted && l.content.some((s) => s.type === "tool_use"));
112
- e.stream && e.stream({ done: !0 }), c(this.toStandard([...i, { role: "assistant", content: l.content.filter((s) => s.type == "text").map((s) => s.text).join(`
113
+ } while (!s.signal.aborted && m.content.some((o) => o.type === "tool_use"));
114
+ e.stream && e.stream({ done: !0 }), i(this.toStandard([...r, { role: "assistant", content: m.content.filter((o) => o.type == "text").map((o) => o.text).join(`
113
115
 
114
116
  `) }]));
115
117
  });
116
- return Object.assign(r, { abort: () => n.abort() });
118
+ return Object.assign(n, { abort: () => s.abort() });
117
119
  }
118
120
  }
119
- class H extends x {
120
- constructor(t, e, n) {
121
- super(), this.ai = t, this.host = e, this.model = n, this.client = new E({ host: e });
121
+ class z extends x {
122
+ constructor(t, e, s) {
123
+ super(), this.ai = t, this.host = e, this.model = s, this.client = new $({ host: e });
122
124
  }
123
125
  client;
124
126
  toStandard(t) {
@@ -126,8 +128,8 @@ class H extends x {
126
128
  if (t[e].role == "assistant" && t[e].tool_calls)
127
129
  t[e].content ? delete t[e].tool_calls : (t.splice(e, 1), e--);
128
130
  else if (t[e].role == "tool") {
129
- const n = t[e].content.startsWith('{"error":');
130
- t[e] = { role: "tool", name: t[e].tool_name, args: t[e].args, [n ? "error" : "content"]: t[e].content, timestamp: t[e].timestamp };
131
+ const s = t[e].content.startsWith('{"error":');
132
+ t[e] = { role: "tool", name: t[e].tool_name, args: t[e].args, [s ? "error" : "content"]: t[e].content, timestamp: t[e].timestamp };
131
133
  }
132
134
  t[e]?.timestamp || (t[e].timestamp = Date.now());
133
135
  }
@@ -135,182 +137,187 @@ class H extends x {
135
137
  }
136
138
  fromStandard(t) {
137
139
  return t.map((e) => {
138
- const { timestamp: n, ...r } = e;
139
- return e.role != "tool" ? r : { role: "tool", tool_name: e.name, content: e.error || e.content };
140
+ const { timestamp: s, ...n } = e;
141
+ return e.role != "tool" ? n : { role: "tool", tool_name: e.name, content: e.error || e.content };
140
142
  });
141
143
  }
142
144
  ask(t, e = {}) {
143
- const n = new AbortController(), r = new Promise(async (c, f) => {
144
- let i = e.system || this.ai.options.system, m = this.fromStandard([...e.history || [], { role: "user", content: t, timestamp: Date.now() }]);
145
- m[0].roll == "system" && (i ? m.shift() : i = m.shift()), e.compress && (m = await this.ai.language.compressHistory(m, e.compress.max, e.compress.min)), e.system && m.unshift({ role: "system", content: i });
146
- const h = e.tools || this.ai.options.tools || [], a = {
145
+ const s = new AbortController(), n = new Promise(async (i, d) => {
146
+ let r = e.system || this.ai.options.system, l = this.fromStandard([...e.history || [], { role: "user", content: t, timestamp: Date.now() }]);
147
+ l[0].roll == "system" && (r ? l.shift() : r = l.shift()), e.compress && (l = await this.ai.language.compressHistory(l, e.compress.max, e.compress.min)), e.system && l.unshift({ role: "system", content: r });
148
+ const u = e.tools || this.ai.options.tools || [], c = {
147
149
  model: e.model || this.model,
148
- messages: m,
150
+ messages: l,
149
151
  stream: !!e.stream,
150
- signal: n.signal,
152
+ signal: s.signal,
151
153
  options: {
152
154
  temperature: e.temperature || this.ai.options.temperature || 0.7,
153
155
  num_predict: e.max_tokens || this.ai.options.max_tokens || 4096
154
156
  },
155
- tools: h.map((s) => ({
157
+ tools: u.map((o) => ({
156
158
  type: "function",
157
159
  function: {
158
- name: s.name,
159
- description: s.description,
160
+ name: o.name,
161
+ description: o.description,
160
162
  parameters: {
161
163
  type: "object",
162
- properties: s.args ? b(s.args, (o, p) => ({ ...p, required: void 0 })) : {},
163
- required: s.args ? Object.entries(s.args).filter((o) => o[1].required).map((o) => o[0]) : []
164
+ properties: o.args ? y(o.args, (a, f) => ({ ...f, required: void 0 })) : {},
165
+ required: o.args ? Object.entries(o.args).filter((a) => a[1].required).map((a) => a[0]) : []
164
166
  }
165
167
  }
166
168
  }))
167
169
  };
168
- let l, u = !0;
170
+ let m, h = !0;
169
171
  do {
170
- if (l = await this.client.chat(a).catch((s) => {
171
- throw s.message += `
172
+ if (m = await this.client.chat(c).catch((o) => {
173
+ throw o.message += `
172
174
 
173
175
  Messages:
174
- ${JSON.stringify(m, null, 2)}`, s;
176
+ ${JSON.stringify(l, null, 2)}`, o;
175
177
  }), e.stream) {
176
- u ? u = !1 : e.stream({ text: `
178
+ h ? h = !1 : e.stream({ text: `
177
179
 
178
- ` }), l.message = { role: "assistant", content: "", tool_calls: [] };
179
- for await (const s of l)
180
- if (n.signal.aborted || (s.message?.content && (l.message.content += s.message.content, e.stream({ text: s.message.content })), s.message?.tool_calls && (l.message.tool_calls = s.message.tool_calls), s.done)) break;
180
+ ` }), m.message = { role: "assistant", content: "", tool_calls: [] };
181
+ for await (const o of m)
182
+ if (s.signal.aborted || (o.message?.content && (m.message.content += o.message.content, e.stream({ text: o.message.content })), o.message?.tool_calls && (m.message.tool_calls = o.message.tool_calls), o.done)) break;
181
183
  }
182
- if (l.message?.tool_calls?.length && !n.signal.aborted) {
183
- m.push(l.message);
184
- const s = await Promise.all(l.message.tool_calls.map(async (o) => {
185
- const p = h.find(k("name", o.function.name));
186
- if (!p) return { role: "tool", tool_name: o.function.name, content: '{"error": "Tool not found"}' };
187
- const g = typeof o.function.arguments == "string" ? _(o.function.arguments, {}) : o.function.arguments;
184
+ if (m.message?.tool_calls?.length && !s.signal.aborted) {
185
+ l.push(m.message);
186
+ const o = await Promise.all(m.message.tool_calls.map(async (a) => {
187
+ const f = u.find(k("name", a.function.name));
188
+ if (!f) return { role: "tool", tool_name: a.function.name, content: '{"error": "Tool not found"}' };
189
+ const g = typeof a.function.arguments == "string" ? _(a.function.arguments, {}) : a.function.arguments;
188
190
  try {
189
- const w = await p.fn(g, this.ai);
190
- return { role: "tool", tool_name: o.function.name, args: g, content: y(w) };
191
+ const w = await f.fn(g, this.ai);
192
+ return { role: "tool", tool_name: a.function.name, args: g, content: b(w) };
191
193
  } catch (w) {
192
- return { role: "tool", tool_name: o.function.name, args: g, content: y({ error: w?.message || w?.toString() || "Unknown" }) };
194
+ return { role: "tool", tool_name: a.function.name, args: g, content: b({ error: w?.message || w?.toString() || "Unknown" }) };
193
195
  }
194
196
  }));
195
- m.push(...s), a.messages = m;
197
+ l.push(...o), c.messages = l;
196
198
  }
197
- } while (!n.signal.aborted && l.message?.tool_calls?.length);
198
- e.stream && e.stream({ done: !0 }), c(this.toStandard([...m, { role: "assistant", content: l.message?.content }]));
199
+ } while (!s.signal.aborted && m.message?.tool_calls?.length);
200
+ e.stream && e.stream({ done: !0 }), i(this.toStandard([...l, { role: "assistant", content: m.message?.content }]));
199
201
  });
200
- return Object.assign(r, { abort: () => n.abort() });
202
+ return Object.assign(n, { abort: () => s.abort() });
201
203
  }
202
204
  }
203
- class z extends x {
204
- constructor(t, e, n) {
205
- super(), this.ai = t, this.apiToken = e, this.model = n, this.client = new v({ apiKey: e });
205
+ class F extends x {
206
+ constructor(t, e, s) {
207
+ super(), this.ai = t, this.apiToken = e, this.model = s, this.client = new E({ apiKey: e });
206
208
  }
207
209
  client;
208
210
  toStandard(t) {
209
211
  for (let e = 0; e < t.length; e++) {
210
- const n = t[e];
211
- if (n.role === "assistant" && n.tool_calls) {
212
- const r = n.tool_calls.map((c) => ({
212
+ const s = t[e];
213
+ if (s.role === "assistant" && s.tool_calls) {
214
+ const n = s.tool_calls.map((i) => ({
213
215
  role: "tool",
214
- id: c.id,
215
- name: c.function.name,
216
- args: _(c.function.arguments, {}),
217
- timestamp: n.timestamp
216
+ id: i.id,
217
+ name: i.function.name,
218
+ args: _(i.function.arguments, {}),
219
+ timestamp: s.timestamp
218
220
  }));
219
- t.splice(e, 1, ...r), e += r.length - 1;
220
- } else if (n.role === "tool" && n.content) {
221
- const r = t.find((c) => n.tool_call_id == c.id);
222
- r && (n.content.includes('"error":') ? r.error = n.content : r.content = n.content), t.splice(e, 1), e--;
221
+ t.splice(e, 1, ...n), e += n.length - 1;
222
+ } else if (s.role === "tool" && s.content) {
223
+ const n = t.find((i) => s.tool_call_id == i.id);
224
+ n && (s.content.includes('"error":') ? n.error = s.content : n.content = s.content), t.splice(e, 1), e--;
223
225
  }
224
226
  t[e]?.timestamp || (t[e].timestamp = Date.now());
225
227
  }
226
228
  return t;
227
229
  }
228
230
  fromStandard(t) {
229
- return t.reduce((e, n) => {
230
- if (n.role === "tool")
231
+ return t.reduce((e, s) => {
232
+ if (s.role === "tool")
231
233
  e.push({
232
234
  role: "assistant",
233
235
  content: null,
234
- tool_calls: [{ id: n.id, type: "function", function: { name: n.name, arguments: JSON.stringify(n.args) } }],
236
+ tool_calls: [{ id: s.id, type: "function", function: { name: s.name, arguments: JSON.stringify(s.args) } }],
235
237
  refusal: null,
236
238
  annotations: []
237
239
  }, {
238
240
  role: "tool",
239
- tool_call_id: n.id,
240
- content: n.error || n.content
241
+ tool_call_id: s.id,
242
+ content: s.error || s.content
241
243
  });
242
244
  else {
243
- const { timestamp: r, ...c } = n;
244
- e.push(c);
245
+ const { timestamp: n, ...i } = s;
246
+ e.push(i);
245
247
  }
246
248
  return e;
247
249
  }, []);
248
250
  }
249
251
  ask(t, e = {}) {
250
- const n = new AbortController(), r = new Promise(async (c, f) => {
251
- let i = this.fromStandard([...e.history || [], { role: "user", content: t, timestamp: Date.now() }]);
252
- e.compress && (i = await this.ai.language.compressHistory(i, e.compress.max, e.compress.min, e));
253
- const m = e.tools || this.ai.options.tools || [], h = {
252
+ const s = new AbortController(), n = new Promise(async (i, d) => {
253
+ let r = this.fromStandard([...e.history || [], { role: "user", content: t, timestamp: Date.now() }]);
254
+ e.compress && (r = await this.ai.language.compressHistory(r, e.compress.max, e.compress.min, e));
255
+ const l = e.tools || this.ai.options.tools || [], u = {
254
256
  model: e.model || this.model,
255
- messages: i,
257
+ messages: r,
256
258
  stream: !!e.stream,
257
259
  max_tokens: e.max_tokens || this.ai.options.max_tokens || 4096,
258
260
  temperature: e.temperature || this.ai.options.temperature || 0.7,
259
- tools: m.map((u) => ({
261
+ tools: l.map((h) => ({
260
262
  type: "function",
261
263
  function: {
262
- name: u.name,
263
- description: u.description,
264
+ name: h.name,
265
+ description: h.description,
264
266
  parameters: {
265
267
  type: "object",
266
- properties: u.args ? b(u.args, (s, o) => ({ ...o, required: void 0 })) : {},
267
- required: u.args ? Object.entries(u.args).filter((s) => s[1].required).map((s) => s[0]) : []
268
+ properties: h.args ? y(h.args, (o, a) => ({ ...a, required: void 0 })) : {},
269
+ required: h.args ? Object.entries(h.args).filter((o) => o[1].required).map((o) => o[0]) : []
268
270
  }
269
271
  }
270
272
  }))
271
273
  };
272
- let a, l = !0;
274
+ let c, m = !0;
273
275
  do {
274
- if (a = await this.client.chat.completions.create(h).catch((s) => {
275
- throw s.message += `
276
+ if (c = await this.client.chat.completions.create(u).catch((o) => {
277
+ throw o.message += `
276
278
 
277
279
  Messages:
278
- ${JSON.stringify(i, null, 2)}`, s;
280
+ ${JSON.stringify(r, null, 2)}`, o;
279
281
  }), e.stream) {
280
- l ? l = !1 : e.stream({ text: `
282
+ m ? m = !1 : e.stream({ text: `
281
283
 
282
- ` }), a.choices = [{ message: { content: "", tool_calls: [] } }];
283
- for await (const s of a) {
284
- if (n.signal.aborted) break;
285
- s.choices[0].delta.content && (a.choices[0].message.content += s.choices[0].delta.content, e.stream({ text: s.choices[0].delta.content })), s.choices[0].delta.tool_calls && (a.choices[0].message.tool_calls = s.choices[0].delta.tool_calls);
284
+ ` }), c.choices = [{ message: { content: "", tool_calls: [] } }];
285
+ for await (const o of c) {
286
+ if (s.signal.aborted) break;
287
+ o.choices[0].delta.content && (c.choices[0].message.content += o.choices[0].delta.content, e.stream({ text: o.choices[0].delta.content })), o.choices[0].delta.tool_calls && (c.choices[0].message.tool_calls = o.choices[0].delta.tool_calls);
286
288
  }
287
289
  }
288
- const u = a.choices[0].message.tool_calls || [];
289
- if (u.length && !n.signal.aborted) {
290
- i.push(a.choices[0].message);
291
- const s = await Promise.all(u.map(async (o) => {
292
- const p = m?.find(k("name", o.function.name));
293
- if (!p) return { role: "tool", tool_call_id: o.id, content: '{"error": "Tool not found"}' };
290
+ const h = c.choices[0].message.tool_calls || [];
291
+ if (h.length && !s.signal.aborted) {
292
+ r.push(c.choices[0].message);
293
+ const o = await Promise.all(h.map(async (a) => {
294
+ const f = l?.find(k("name", a.function.name));
295
+ if (!f) return { role: "tool", tool_call_id: a.id, content: '{"error": "Tool not found"}' };
294
296
  try {
295
- const g = _(o.function.arguments, {}), w = await p.fn(g, this.ai);
296
- return { role: "tool", tool_call_id: o.id, content: y(w) };
297
+ const g = _(a.function.arguments, {}), w = await f.fn(g, this.ai);
298
+ return { role: "tool", tool_call_id: a.id, content: b(w) };
297
299
  } catch (g) {
298
- return { role: "tool", tool_call_id: o.id, content: y({ error: g?.message || g?.toString() || "Unknown" }) };
300
+ return { role: "tool", tool_call_id: a.id, content: b({ error: g?.message || g?.toString() || "Unknown" }) };
299
301
  }
300
302
  }));
301
- i.push(...s), h.messages = i;
303
+ r.push(...o), u.messages = r;
302
304
  }
303
- } while (!n.signal.aborted && a.choices?.[0]?.message?.tool_calls?.length);
304
- e.stream && e.stream({ done: !0 }), c(this.toStandard([...i, { role: "assistant", content: a.choices[0].message.content || "" }]));
305
+ } while (!s.signal.aborted && c.choices?.[0]?.message?.tool_calls?.length);
306
+ e.stream && e.stream({ done: !0 }), i(this.toStandard([...r, { role: "assistant", content: c.choices[0].message.content || "" }]));
305
307
  });
306
- return Object.assign(r, { abort: () => n.abort() });
308
+ return Object.assign(n, { abort: () => s.abort() });
307
309
  }
308
310
  }
309
- class I {
311
+ class G {
310
312
  constructor(t) {
311
- this.ai = t, this.embedModel = j("feature-extraction", "Xenova/all-MiniLM-L6-v2"), t.options.anthropic?.token && (this.providers.anthropic = new J(this.ai, t.options.anthropic.token, t.options.anthropic.model)), t.options.ollama?.host && (this.providers.ollama = new H(this.ai, t.options.ollama.host, t.options.ollama.model)), t.options.openAi?.token && (this.providers.openAi = new z(this.ai, t.options.openAi.token, t.options.openAi.model));
313
+ this.ai = t, this.embedWorker = new v(U(L(R(import.meta.url)), "embedder.js")), this.embedWorker.on("message", ({ id: e, embedding: s }) => {
314
+ const n = this.embedQueue.get(e);
315
+ n && (n.resolve(s), this.embedQueue.delete(e));
316
+ }), t.options.anthropic?.token && (this.providers.anthropic = new I(this.ai, t.options.anthropic.token, t.options.anthropic.model)), t.options.ollama?.host && (this.providers.ollama = new z(this.ai, t.options.ollama.host, t.options.ollama.model)), t.options.openAi?.token && (this.providers.openAi = new F(this.ai, t.options.openAi.token, t.options.openAi.model));
312
317
  }
313
- embedModel;
318
+ embedWorker = null;
319
+ embedQueue = /* @__PURE__ */ new Map();
320
+ embedId = 0;
314
321
  providers = {};
315
322
  /**
316
323
  * Chat with LLM
@@ -319,9 +326,9 @@ class I {
319
326
  * @returns {{abort: () => void, response: Promise<LLMMessage[]>}} Function to abort response and chat history
320
327
  */
321
328
  ask(t, e = {}) {
322
- let n = [null, null];
323
- if (e.model && (typeof e.model == "object" ? n = e.model : n = [e.model, this.ai.options[e.model]?.model]), (!e.model || n[1] == null) && (typeof this.ai.options.model == "object" ? n = this.ai.options.model : n = [this.ai.options.model, this.ai.options[this.ai.options.model]?.model]), !n[0] || !n[1]) throw new Error(`Unknown LLM provider or model: ${n[0]} / ${n[1]}`);
324
- return this.providers[n[0]].ask(t, { ...e, model: n[1] });
329
+ let s = [null, null];
330
+ if (e.model && (typeof e.model == "object" ? s = e.model : s = [e.model, this.ai.options[e.model]?.model]), (!e.model || s[1] == null) && (typeof this.ai.options.model == "object" ? s = this.ai.options.model : s = [this.ai.options.model, this.ai.options[this.ai.options.model]?.model]), !s[0] || !s[1]) throw new Error(`Unknown LLM provider or model: ${s[0]} / ${s[1]}`);
331
+ return this.providers[s[0]].ask(t, { ...e, model: s[1] });
325
332
  }
326
333
  /**
327
334
  * Compress chat history to reduce context size
@@ -331,56 +338,57 @@ class I {
331
338
  * @param {LLMRequest} options LLM options
332
339
  * @returns {Promise<LLMMessage[]>} New chat history will summary at index 0
333
340
  */
334
- async compressHistory(t, e, n, r) {
341
+ async compressHistory(t, e, s, n) {
335
342
  if (this.estimateTokens(t) < e) return t;
336
- let c = 0, f = 0;
337
- for (let a of t.toReversed())
338
- if (f += this.estimateTokens(a.content), f < n) c++;
343
+ let i = 0, d = 0;
344
+ for (let c of t.toReversed())
345
+ if (d += this.estimateTokens(c.content), d < s) i++;
339
346
  else break;
340
- if (t.length <= c) return t;
341
- const i = c == 0 ? [] : t.slice(-c), m = (c == 0 ? t : t.slice(0, -c)).filter((a) => a.role === "assistant" || a.role === "user");
342
- return [{ role: "assistant", content: `Conversation Summary: ${await this.summarize(m.map((a) => `${a.role}: ${a.content}`).join(`
347
+ if (t.length <= i) return t;
348
+ const r = i == 0 ? [] : t.slice(-i), l = (i == 0 ? t : t.slice(0, -i)).filter((c) => c.role === "assistant" || c.role === "user");
349
+ return [{ role: "assistant", content: `Conversation Summary: ${await this.summarize(l.map((c) => `${c.role}: ${c.content}`).join(`
343
350
 
344
- `), 250, r)}`, timestamp: Date.now() }, ...i];
351
+ `), 250, n)}`, timestamp: Date.now() }, ...r];
345
352
  }
346
353
  cosineSimilarity(t, e) {
347
354
  if (t.length !== e.length) throw new Error("Vectors must be same length");
348
- let n = 0, r = 0, c = 0;
349
- for (let i = 0; i < t.length; i++)
350
- n += t[i] * e[i], r += t[i] * t[i], c += e[i] * e[i];
351
- const f = Math.sqrt(r) * Math.sqrt(c);
352
- return f === 0 ? 0 : n / f;
355
+ let s = 0, n = 0, i = 0;
356
+ for (let r = 0; r < t.length; r++)
357
+ s += t[r] * e[r], n += t[r] * t[r], i += e[r] * e[r];
358
+ const d = Math.sqrt(n) * Math.sqrt(i);
359
+ return d === 0 ? 0 : s / d;
353
360
  }
354
- embedding(t, e = 500, n = 50) {
355
- const r = (a, l = "") => a == null ? [] : Object.entries(a).flatMap(([u, s]) => {
356
- const o = l ? `${l}${isNaN(+u) ? `.${u}` : `[${u}]`}` : u;
357
- if (typeof s == "object" && s !== null && !Array.isArray(s)) return r(s, o);
358
- const p = Array.isArray(s) ? s.join(", ") : String(s);
359
- return `${o}: ${p}`;
360
- }), c = async (a) => {
361
- const u = await (await this.embedModel)(a, { pooling: "mean", normalize: !0 });
362
- return Array.from(u.data);
363
- }, i = (typeof t == "object" ? r(t) : t.split(`
364
- `)).flatMap((a) => [...a.split(/\s+/).filter((l) => l.trim()), `
365
- `]), m = [];
366
- let h = 0;
367
- for (; h < i.length; ) {
368
- let a = h, l = "";
369
- for (; a < i.length; ) {
370
- const s = i[a], o = l + (l ? " " : "") + s;
371
- if (this.estimateTokens(o.replace(/\s*\n\s*/g, `
372
- `)) > e && l) break;
373
- l = o, a++;
361
+ chunk(t, e = 500, s = 50) {
362
+ const n = (l, u = "") => l ? Object.entries(l).flatMap(([c, m]) => {
363
+ const h = u ? `${u}${isNaN(+c) ? `.${c}` : `[${c}]`}` : c;
364
+ return typeof m == "object" && !Array.isArray(m) ? n(m, h) : `${h}: ${Array.isArray(m) ? m.join(", ") : m}`;
365
+ }) : [], d = (typeof t == "object" ? n(t) : t.split(`
366
+ `)).flatMap((l) => [...l.split(/\s+/).filter(Boolean), `
367
+ `]), r = [];
368
+ for (let l = 0; l < d.length; ) {
369
+ let u = "", c = l;
370
+ for (; c < d.length; ) {
371
+ const h = u + (u ? " " : "") + d[c];
372
+ if (this.estimateTokens(h.replace(/\s*\n\s*/g, `
373
+ `)) > e && u) break;
374
+ u = h, c++;
374
375
  }
375
- const u = l.replace(/\s*\n\s*/g, `
376
+ const m = u.replace(/\s*\n\s*/g, `
376
377
  `).trim();
377
- u && m.push(u), h = a - n, h <= a - i.length + a && (h = a);
378
+ m && r.push(m), l = Math.max(c - s, c === l ? l + 1 : c);
378
379
  }
379
- return Promise.all(m.map(async (a, l) => ({
380
- index: l,
381
- embedding: await c(a),
382
- text: a,
383
- tokens: this.estimateTokens(a)
380
+ return r;
381
+ }
382
+ embedding(t, e = 500, s = 50) {
383
+ const n = (d) => new Promise((r, l) => {
384
+ const u = this.embedId++;
385
+ this.embedQueue.set(u, { resolve: r, reject: l }), this.embedWorker?.postMessage({ id: u, text: d });
386
+ }), i = this.chunk(t, e, s);
387
+ return Promise.all(i.map(async (d, r) => ({
388
+ index: r,
389
+ embedding: await n(d),
390
+ text: d,
391
+ tokens: this.estimateTokens(d)
384
392
  })));
385
393
  }
386
394
  /**
@@ -400,8 +408,8 @@ class I {
400
408
  */
401
409
  fuzzyMatch(t, ...e) {
402
410
  if (e.length < 2) throw new Error("Requires at least 2 strings to compare");
403
- const n = (f, i = 10) => f.toLowerCase().split("").map((m, h) => m.charCodeAt(0) * (h + 1) % i / i).slice(0, i), r = n(t), c = e.map((f) => n(f)).map((f) => this.cosineSimilarity(r, f));
404
- return { avg: c.reduce((f, i) => f + i, 0) / c.length, max: Math.max(...c), similarities: c };
411
+ const s = (d, r = 10) => d.toLowerCase().split("").map((l, u) => l.charCodeAt(0) * (u + 1) % r / r).slice(0, r), n = s(t), i = e.map((d) => s(d)).map((d) => this.cosineSimilarity(n, d));
412
+ return { avg: i.reduce((d, r) => d + r, 0) / i.length, max: Math.max(...i), similarities: i };
405
413
  }
406
414
  /**
407
415
  * Ask a question with JSON response
@@ -410,11 +418,11 @@ class I {
410
418
  * @returns {Promise<{} | {} | RegExpExecArray | null>}
411
419
  */
412
420
  async json(t, e) {
413
- let n = await this.ask(t, {
421
+ let s = await this.ask(t, {
414
422
  system: "Respond using a JSON blob",
415
423
  ...e
416
424
  });
417
- return n?.[0]?.content ? _(new RegExp("{[sS]*}").exec(n[0].content), {}) : {};
425
+ return s?.[0]?.content ? _(new RegExp("{[sS]*}").exec(s[0].content), {}) : {};
418
426
  }
419
427
  /**
420
428
  * Create a summary of some text
@@ -423,11 +431,11 @@ class I {
423
431
  * @param options LLM request options
424
432
  * @returns {Promise<string>} Summary
425
433
  */
426
- summarize(t, e, n) {
427
- return this.ask(t, { system: `Generate a brief summary <= ${e} tokens. Output nothing else`, temperature: 0.3, ...n }).then((r) => r.pop()?.content || null);
434
+ summarize(t, e, s) {
435
+ return this.ask(t, { system: `Generate a brief summary <= ${e} tokens. Output nothing else`, temperature: 0.3, ...s }).then((n) => n.pop()?.content || null);
428
436
  }
429
437
  }
430
- class W {
438
+ class B {
431
439
  constructor(t) {
432
440
  this.ai = t, t.options.whisper?.binary && (this.whisperModel = t.options.whisper?.model.endsWith(".bin") ? t.options.whisper?.model : t.options.whisper?.model + ".bin", this.downloadAsrModel());
433
441
  }
@@ -441,17 +449,17 @@ class W {
441
449
  */
442
450
  asr(t, e = this.whisperModel) {
443
451
  if (!this.ai.options.whisper?.binary) throw new Error("Whisper not configured");
444
- let n = () => {
452
+ let s = () => {
445
453
  };
446
- return { response: new Promise((c, f) => {
447
- this.downloadAsrModel(e).then((i) => {
448
- let m = "";
449
- const h = R(this.ai.options.whisper?.binary, ["-nt", "-np", "-m", i, "-f", t], { stdio: ["ignore", "pipe", "ignore"] });
450
- n = () => h.kill("SIGTERM"), h.on("error", (a) => f(a)), h.stdout.on("data", (a) => m += a.toString()), h.on("close", (a) => {
451
- a === 0 ? c(m.trim() || null) : f(new Error(`Exit code ${a}`));
454
+ return { response: new Promise((i, d) => {
455
+ this.downloadAsrModel(e).then((r) => {
456
+ let l = "";
457
+ const u = N(this.ai.options.whisper?.binary, ["-nt", "-np", "-m", r, "-f", t], { stdio: ["ignore", "pipe", "ignore"] });
458
+ s = () => u.kill("SIGTERM"), u.on("error", (c) => d(c)), u.stdout.on("data", (c) => l += c.toString()), u.on("close", (c) => {
459
+ c === 0 ? i(l.trim() || null) : d(new Error(`Exit code ${c}`));
452
460
  });
453
461
  });
454
- }), abort: n };
462
+ }), abort: s };
455
463
  }
456
464
  /**
457
465
  * Downloads the specified Whisper model if it is not already present locally.
@@ -462,11 +470,11 @@ class W {
462
470
  async downloadAsrModel(t = this.whisperModel) {
463
471
  if (!this.ai.options.whisper?.binary) throw new Error("Whisper not configured");
464
472
  t.endsWith(".bin") || (t += ".bin");
465
- const e = U.join(this.ai.options.path, t);
466
- return await S.stat(e).then(() => !0).catch(() => !1) ? e : this.downloads[t] ? this.downloads[t] : (this.downloads[t] = fetch(`https://huggingface.co/ggerganov/whisper.cpp/resolve/main/${t}`).then((n) => n.arrayBuffer()).then((n) => Buffer.from(n)).then(async (n) => (await S.writeFile(e, n), delete this.downloads[t], e)), this.downloads[t]);
473
+ const e = W.join(this.ai.options.path, t);
474
+ return await S.stat(e).then(() => !0).catch(() => !1) ? e : this.downloads[t] ? this.downloads[t] : (this.downloads[t] = fetch(`https://huggingface.co/ggerganov/whisper.cpp/resolve/main/${t}`).then((s) => s.arrayBuffer()).then((s) => Buffer.from(s)).then(async (s) => (await S.writeFile(e, s), delete this.downloads[t], e)), this.downloads[t]);
467
475
  }
468
476
  }
469
- class F {
477
+ class C {
470
478
  constructor(t) {
471
479
  this.ai = t;
472
480
  }
@@ -481,17 +489,17 @@ class F {
481
489
  abort: () => {
482
490
  e?.terminate();
483
491
  },
484
- response: new Promise(async (n) => {
485
- e = await L(this.ai.options.tesseract?.model || "eng", 2, { cachePath: this.ai.options.path });
486
- const { data: r } = await e.recognize(t);
487
- await e.terminate(), n(r.text.trim() || null);
492
+ response: new Promise(async (s) => {
493
+ e = await D(this.ai.options.tesseract?.model || "eng", 2, { cachePath: this.ai.options.path });
494
+ const { data: n } = await e.recognize(t);
495
+ await e.terminate(), s(n.text.trim() || null);
488
496
  })
489
497
  };
490
498
  }
491
499
  }
492
- class oe {
500
+ class le {
493
501
  constructor(t) {
494
- this.options = t, t.path || (t.path = T.tmpdir()), process.env.TRANSFORMERS_CACHE = t.path, this.audio = new W(this), this.language = new I(this), this.vision = new F(this);
502
+ this.options = t, t.path || (t.path = j.tmpdir()), process.env.TRANSFORMERS_CACHE = t.path, this.audio = new B(this), this.language = new G(this), this.vision = new C(this);
495
503
  }
496
504
  /** Audio processing AI */
497
505
  audio;
@@ -500,38 +508,38 @@ class oe {
500
508
  /** Vision processing AI */
501
509
  vision;
502
510
  }
503
- const G = {
511
+ const Q = {
504
512
  name: "cli",
505
513
  description: "Use the command line interface, returns any output",
506
514
  args: { command: { type: "string", description: "Command to run", required: !0 } },
507
- fn: (d) => N`${d.command}`
508
- }, re = {
515
+ fn: (p) => J`${p.command}`
516
+ }, me = {
509
517
  name: "get_datetime",
510
518
  description: "Get current date and time",
511
519
  args: {},
512
520
  fn: async () => (/* @__PURE__ */ new Date()).toISOString()
513
- }, ae = {
521
+ }, de = {
514
522
  name: "exec",
515
523
  description: "Run code/scripts",
516
524
  args: {
517
525
  language: { type: "string", description: "Execution language", enum: ["cli", "node", "python"], required: !0 },
518
526
  code: { type: "string", description: "Code to execute", required: !0 }
519
527
  },
520
- fn: async (d, t) => {
528
+ fn: async (p, t) => {
521
529
  try {
522
- switch (d.type) {
530
+ switch (p.type) {
523
531
  case "bash":
524
- return await G.fn({ command: d.code }, t);
532
+ return await Q.fn({ command: p.code }, t);
525
533
  case "node":
526
- return await C.fn({ code: d.code }, t);
534
+ return await K.fn({ code: p.code }, t);
527
535
  case "python":
528
- return await B.fn({ code: d.code }, t);
536
+ return await V.fn({ code: p.code }, t);
529
537
  }
530
538
  } catch (e) {
531
539
  return { error: e?.message || e.toString() };
532
540
  }
533
541
  }
534
- }, ie = {
542
+ }, ue = {
535
543
  name: "fetch",
536
544
  description: "Make HTTP request to URL",
537
545
  args: {
@@ -540,54 +548,54 @@ const G = {
540
548
  headers: { type: "object", description: "HTTP headers to send", default: {} },
541
549
  body: { type: "object", description: "HTTP body to send" }
542
550
  },
543
- fn: (d) => new A({ url: d.url, headers: d.headers }).request({ method: d.method || "GET", body: d.body })
544
- }, C = {
551
+ fn: (p) => new M({ url: p.url, headers: p.headers }).request({ method: p.method || "GET", body: p.body })
552
+ }, K = {
545
553
  name: "exec_javascript",
546
554
  description: "Execute commonjs javascript",
547
555
  args: {
548
556
  code: { type: "string", description: "CommonJS javascript", required: !0 }
549
557
  },
550
- fn: async (d) => {
551
- const t = q(null), e = await P({ console: t }, d.code, !0).catch((n) => t.output.error.push(n));
558
+ fn: async (p) => {
559
+ const t = A(null), e = await q({ console: t }, p.code, !0).catch((s) => t.output.error.push(s));
552
560
  return { ...t.output, return: e, stdout: void 0, stderr: void 0 };
553
561
  }
554
- }, B = {
562
+ }, V = {
555
563
  name: "exec_javascript",
556
564
  description: "Execute commonjs javascript",
557
565
  args: {
558
566
  code: { type: "string", description: "CommonJS javascript", required: !0 }
559
567
  },
560
- fn: async (d) => ({ result: D`python -c "${d.code}"` })
561
- }, ce = {
568
+ fn: async (p) => ({ result: H`python -c "${p.code}"` })
569
+ }, pe = {
562
570
  name: "search",
563
571
  description: "Use a search engine to find relevant URLs, should be changed with fetch to scrape sources",
564
572
  args: {
565
573
  query: { type: "string", description: "Search string", required: !0 },
566
574
  length: { type: "string", description: "Number of results to return", default: 5 }
567
575
  },
568
- fn: async (d) => {
569
- const t = await fetch(`https://html.duckduckgo.com/html/?q=${encodeURIComponent(d.query)}`, {
576
+ fn: async (p) => {
577
+ const t = await fetch(`https://html.duckduckgo.com/html/?q=${encodeURIComponent(p.query)}`, {
570
578
  headers: { "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64)", "Accept-Language": "en-US,en;q=0.9" }
571
- }).then((c) => c.text());
572
- let e, n = /<a .*?href="(.+?)".+?<\/a>/g;
573
- const r = new O();
574
- for (; (e = n.exec(t)) !== null; ) {
575
- let c = /uddg=(.+)&amp?/.exec(decodeURIComponent(e[1]))?.[1];
576
- if (c && (c = decodeURIComponent(c)), c && r.add(c), r.size >= (d.length || 5)) break;
579
+ }).then((i) => i.text());
580
+ let e, s = /<a .*?href="(.+?)".+?<\/a>/g;
581
+ const n = new P();
582
+ for (; (e = s.exec(t)) !== null; ) {
583
+ let i = /uddg=(.+)&amp?/.exec(decodeURIComponent(e[1]))?.[1];
584
+ if (i && (i = decodeURIComponent(i)), i && n.add(i), n.size >= (p.length || 5)) break;
577
585
  }
578
- return r;
586
+ return n;
579
587
  }
580
588
  };
581
589
  export {
582
- oe as Ai,
583
- J as Anthropic,
584
- G as CliTool,
585
- re as DateTimeTool,
586
- ae as ExecTool,
587
- ie as FetchTool,
588
- C as JSTool,
589
- I as LLM,
590
- B as PythonTool,
591
- ce as SearchTool
590
+ le as Ai,
591
+ I as Anthropic,
592
+ Q as CliTool,
593
+ me as DateTimeTool,
594
+ de as ExecTool,
595
+ ue as FetchTool,
596
+ K as JSTool,
597
+ G as LLM,
598
+ V as PythonTool,
599
+ pe as SearchTool
592
600
  };
593
601
  //# sourceMappingURL=index.mjs.map