@ztimson/ai-utils 0.1.5 → 0.1.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -1,387 +1,290 @@
1
- var __defProp = Object.defineProperty;
2
- var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
3
- var __publicField = (obj, key, value) => __defNormalProp(obj, typeof key !== "symbol" ? key + "" : key, value);
4
- import { $, $Sync } from "@ztimson/node-utils";
5
- import { createWorker } from "tesseract.js";
6
- import { objectMap, JSONAttemptParse, findByProp, JSONSanitize, Http, consoleInterceptor, fn, ASet } from "@ztimson/utils";
7
- import { Anthropic as Anthropic$1 } from "@anthropic-ai/sdk";
8
- import { Ollama as Ollama$1 } from "ollama";
9
- import { OpenAI } from "openai";
10
- import fs from "node:fs/promises";
11
- import Path from "node:path";
12
- import * as tf from "@tensorflow/tfjs";
13
- class LLMProvider {
1
+ import { $ as j, $Sync as T } from "@ztimson/node-utils";
2
+ import { createWorker as q } from "tesseract.js";
3
+ import { objectMap as b, JSONAttemptParse as w, findByProp as k, JSONSanitize as _, Http as v, consoleInterceptor as P, fn as A, ASet as O } from "@ztimson/utils";
4
+ import { Anthropic as $ } from "@anthropic-ai/sdk";
5
+ import { Ollama as E } from "ollama";
6
+ import { OpenAI as M } from "openai";
7
+ import y from "node:fs/promises";
8
+ import S from "node:path";
9
+ import * as g from "@tensorflow/tfjs";
10
+ class x {
14
11
  }
15
- class Anthropic extends LLMProvider {
16
- constructor(ai, apiToken, model) {
17
- super();
18
- __publicField(this, "client");
19
- this.ai = ai;
20
- this.apiToken = apiToken;
21
- this.model = model;
22
- this.client = new Anthropic$1({ apiKey: apiToken });
12
+ class U extends x {
13
+ constructor(t, e, n) {
14
+ super(), this.ai = t, this.apiToken = e, this.model = n, this.client = new $({ apiKey: e });
23
15
  }
24
- toStandard(history) {
25
- for (let i = 0; i < history.length; i++) {
26
- const orgI = i;
27
- if (typeof history[orgI].content != "string") {
28
- if (history[orgI].role == "assistant") {
29
- history[orgI].content.filter((c) => c.type == "tool_use").forEach((c) => {
30
- i++;
31
- history.splice(i, 0, { role: "tool", id: c.id, name: c.name, args: c.input });
32
- });
33
- } else if (history[orgI].role == "user") {
34
- history[orgI].content.filter((c) => c.type == "tool_result").forEach((c) => {
35
- const h = history.find((h2) => h2.id == c.tool_use_id);
36
- h[c.is_error ? "error" : "content"] = c.content;
37
- });
38
- }
39
- history[orgI].content = history[orgI].content.filter((c) => c.type == "text").map((c) => c.text).join("\n\n");
40
- }
16
+ client;
17
+ toStandard(t) {
18
+ for (let e = 0; e < t.length; e++) {
19
+ const n = e;
20
+ typeof t[n].content != "string" && (t[n].role == "assistant" ? t[n].content.filter((s) => s.type == "tool_use").forEach((s) => {
21
+ e++, t.splice(e, 0, { role: "tool", id: s.id, name: s.name, args: s.input });
22
+ }) : t[n].role == "user" && t[n].content.filter((s) => s.type == "tool_result").forEach((s) => {
23
+ const i = t.find((f) => f.id == s.tool_use_id);
24
+ i[s.is_error ? "error" : "content"] = s.content;
25
+ }), t[n].content = t[n].content.filter((s) => s.type == "text").map((s) => s.text).join(`
26
+
27
+ `));
41
28
  }
42
- return history.filter((h) => !!h.content);
29
+ return t.filter((e) => !!e.content);
43
30
  }
44
- fromStandard(history) {
45
- for (let i = 0; i < history.length; i++) {
46
- if (history[i].role == "tool") {
47
- const h = history[i];
48
- history.splice(
49
- i,
31
+ fromStandard(t) {
32
+ for (let e = 0; e < t.length; e++)
33
+ if (t[e].role == "tool") {
34
+ const n = t[e];
35
+ t.splice(
36
+ e,
50
37
  1,
51
- { role: "assistant", content: [{ type: "tool_use", id: h.id, name: h.name, input: h.args }] },
52
- { role: "user", content: [{ type: "tool_result", tool_use_id: h.id, is_error: !!h.error, content: h.error || h.content }] }
53
- );
54
- i++;
38
+ { role: "assistant", content: [{ type: "tool_use", id: n.id, name: n.name, input: n.args }] },
39
+ { role: "user", content: [{ type: "tool_result", tool_use_id: n.id, is_error: !!n.error, content: n.error || n.content }] }
40
+ ), e++;
55
41
  }
56
- }
57
- return history;
42
+ return t;
58
43
  }
59
- ask(message, options = {}) {
60
- const controller = new AbortController();
61
- const response = new Promise(async (res, rej) => {
62
- let history = this.fromStandard([...options.history || [], { role: "user", content: message }]);
63
- if (options.compress) history = await this.ai.llm.compress(history, options.compress.max, options.compress.min, options);
64
- const requestParams = {
65
- model: options.model || this.model,
66
- max_tokens: options.max_tokens || this.ai.options.max_tokens || 4096,
67
- system: options.system || this.ai.options.system || "",
68
- temperature: options.temperature || this.ai.options.temperature || 0.7,
69
- tools: (options.tools || this.ai.options.tools || []).map((t) => ({
70
- name: t.name,
71
- description: t.description,
44
+ ask(t, e = {}) {
45
+ const n = new AbortController(), s = new Promise(async (i, f) => {
46
+ let c = this.fromStandard([...e.history || [], { role: "user", content: t }]);
47
+ e.compress && (c = await this.ai.llm.compress(c, e.compress.max, e.compress.min, e));
48
+ const m = {
49
+ model: e.model || this.model,
50
+ max_tokens: e.max_tokens || this.ai.options.max_tokens || 4096,
51
+ system: e.system || this.ai.options.system || "",
52
+ temperature: e.temperature || this.ai.options.temperature || 0.7,
53
+ tools: (e.tools || this.ai.options.tools || []).map((o) => ({
54
+ name: o.name,
55
+ description: o.description,
72
56
  input_schema: {
73
57
  type: "object",
74
- properties: t.args ? objectMap(t.args, (key, value) => ({ ...value, required: void 0 })) : {},
75
- required: t.args ? Object.entries(t.args).filter((t2) => t2[1].required).map((t2) => t2[0]) : []
58
+ properties: o.args ? b(o.args, (r, a) => ({ ...a, required: void 0 })) : {},
59
+ required: o.args ? Object.entries(o.args).filter((r) => r[1].required).map((r) => r[0]) : []
76
60
  },
77
61
  fn: void 0
78
62
  })),
79
- messages: history,
80
- stream: !!options.stream
63
+ messages: c,
64
+ stream: !!e.stream
81
65
  };
82
- let resp;
66
+ let l;
83
67
  do {
84
- resp = await this.client.messages.create(requestParams);
85
- if (options.stream) {
86
- resp.content = [];
87
- for await (const chunk of resp) {
88
- if (controller.signal.aborted) break;
89
- if (chunk.type === "content_block_start") {
90
- if (chunk.content_block.type === "text") {
91
- resp.content.push({ type: "text", text: "" });
92
- } else if (chunk.content_block.type === "tool_use") {
93
- resp.content.push({ type: "tool_use", id: chunk.content_block.id, name: chunk.content_block.name, input: "" });
94
- }
95
- } else if (chunk.type === "content_block_delta") {
96
- if (chunk.delta.type === "text_delta") {
97
- const text = chunk.delta.text;
98
- resp.content.at(-1).text += text;
99
- options.stream({ text });
100
- } else if (chunk.delta.type === "input_json_delta") {
101
- resp.content.at(-1).input += chunk.delta.partial_json;
102
- }
103
- } else if (chunk.type === "content_block_stop") {
104
- const last = resp.content.at(-1);
105
- if (last.input != null) last.input = last.input ? JSONAttemptParse(last.input, {}) : {};
106
- } else if (chunk.type === "message_stop") {
68
+ if (l = await this.client.messages.create(m), e.stream) {
69
+ l.content = [];
70
+ for await (const r of l) {
71
+ if (n.signal.aborted) break;
72
+ if (r.type === "content_block_start")
73
+ r.content_block.type === "text" ? l.content.push({ type: "text", text: "" }) : r.content_block.type === "tool_use" && l.content.push({ type: "tool_use", id: r.content_block.id, name: r.content_block.name, input: "" });
74
+ else if (r.type === "content_block_delta")
75
+ if (r.delta.type === "text_delta") {
76
+ const a = r.delta.text;
77
+ l.content.at(-1).text += a, e.stream({ text: a });
78
+ } else r.delta.type === "input_json_delta" && (l.content.at(-1).input += r.delta.partial_json);
79
+ else if (r.type === "content_block_stop") {
80
+ const a = l.content.at(-1);
81
+ a.input != null && (a.input = a.input ? w(a.input, {}) : {});
82
+ } else if (r.type === "message_stop")
107
83
  break;
108
- }
109
84
  }
110
85
  }
111
- const toolCalls = resp.content.filter((c) => c.type === "tool_use");
112
- if (toolCalls.length && !controller.signal.aborted) {
113
- history.push({ role: "assistant", content: resp.content });
114
- const results = await Promise.all(toolCalls.map(async (toolCall) => {
115
- var _a;
116
- const tool = (_a = options.tools) == null ? void 0 : _a.find(findByProp("name", toolCall.name));
117
- if (!tool) return { tool_use_id: toolCall.id, is_error: true, content: "Tool not found" };
86
+ const o = l.content.filter((r) => r.type === "tool_use");
87
+ if (o.length && !n.signal.aborted) {
88
+ c.push({ role: "assistant", content: l.content });
89
+ const r = await Promise.all(o.map(async (a) => {
90
+ const u = e.tools?.find(k("name", a.name));
91
+ if (!u) return { tool_use_id: a.id, is_error: !0, content: "Tool not found" };
118
92
  try {
119
- const result = await tool.fn(toolCall.input, this.ai);
120
- return { type: "tool_result", tool_use_id: toolCall.id, content: JSONSanitize(result) };
121
- } catch (err) {
122
- return { type: "tool_result", tool_use_id: toolCall.id, is_error: true, content: (err == null ? void 0 : err.message) || (err == null ? void 0 : err.toString()) || "Unknown" };
93
+ const p = await u.fn(a.input, this.ai);
94
+ return { type: "tool_result", tool_use_id: a.id, content: _(p) };
95
+ } catch (p) {
96
+ return { type: "tool_result", tool_use_id: a.id, is_error: !0, content: p?.message || p?.toString() || "Unknown" };
123
97
  }
124
98
  }));
125
- history.push({ role: "user", content: results });
126
- requestParams.messages = history;
99
+ c.push({ role: "user", content: r }), m.messages = c;
127
100
  }
128
- } while (!controller.signal.aborted && resp.content.some((c) => c.type === "tool_use"));
129
- if (options.stream) options.stream({ done: true });
130
- res(this.toStandard([...history, {
101
+ } while (!n.signal.aborted && l.content.some((o) => o.type === "tool_use"));
102
+ e.stream && e.stream({ done: !0 }), i(this.toStandard([...c, {
131
103
  role: "assistant",
132
- content: resp.content.filter((c) => c.type == "text").map((c) => c.text).join("\n\n")
104
+ content: l.content.filter((o) => o.type == "text").map((o) => o.text).join(`
105
+
106
+ `)
133
107
  }]));
134
108
  });
135
- return Object.assign(response, { abort: () => controller.abort() });
109
+ return Object.assign(s, { abort: () => n.abort() });
136
110
  }
137
111
  }
138
- class Ollama extends LLMProvider {
139
- constructor(ai, host, model) {
140
- super();
141
- __publicField(this, "client");
142
- this.ai = ai;
143
- this.host = host;
144
- this.model = model;
145
- this.client = new Ollama$1({ host });
112
+ class L extends x {
113
+ constructor(t, e, n) {
114
+ super(), this.ai = t, this.host = e, this.model = n, this.client = new E({ host: e });
146
115
  }
147
- toStandard(history) {
148
- for (let i = 0; i < history.length; i++) {
149
- if (history[i].role == "assistant" && history[i].tool_calls) {
150
- if (history[i].content) delete history[i].tool_calls;
151
- else {
152
- history.splice(i, 1);
153
- i--;
154
- }
155
- } else if (history[i].role == "tool") {
156
- const error = history[i].content.startsWith('{"error":');
157
- history[i] = { role: "tool", name: history[i].tool_name, args: history[i].args, [error ? "error" : "content"]: history[i].content };
116
+ client;
117
+ toStandard(t) {
118
+ for (let e = 0; e < t.length; e++)
119
+ if (t[e].role == "assistant" && t[e].tool_calls)
120
+ t[e].content ? delete t[e].tool_calls : (t.splice(e, 1), e--);
121
+ else if (t[e].role == "tool") {
122
+ const n = t[e].content.startsWith('{"error":');
123
+ t[e] = { role: "tool", name: t[e].tool_name, args: t[e].args, [n ? "error" : "content"]: t[e].content };
158
124
  }
159
- }
160
- return history;
125
+ return t;
161
126
  }
162
- fromStandard(history) {
163
- return history.map((h) => {
164
- if (h.role != "tool") return h;
165
- return { role: "tool", tool_name: h.name, content: h.error || h.content };
166
- });
127
+ fromStandard(t) {
128
+ return t.map((e) => e.role != "tool" ? e : { role: "tool", tool_name: e.name, content: e.error || e.content });
167
129
  }
168
- ask(message, options = {}) {
169
- const controller = new AbortController();
170
- const response = new Promise(async (res, rej) => {
171
- var _a, _b, _c, _d, _e, _f, _g;
172
- let system = options.system || this.ai.options.system;
173
- let history = this.fromStandard([...options.history || [], { role: "user", content: message }]);
174
- if (history[0].roll == "system") {
175
- if (!system) system = history.shift();
176
- else history.shift();
177
- }
178
- if (options.compress) history = await this.ai.llm.compress(history, options.compress.max, options.compress.min);
179
- if (options.system) history.unshift({ role: "system", content: system });
180
- const requestParams = {
181
- model: options.model || this.model,
182
- messages: history,
183
- stream: !!options.stream,
184
- signal: controller.signal,
130
+ ask(t, e = {}) {
131
+ const n = new AbortController(), s = new Promise(async (i, f) => {
132
+ let c = e.system || this.ai.options.system, m = this.fromStandard([...e.history || [], { role: "user", content: t }]);
133
+ m[0].roll == "system" && (c ? m.shift() : c = m.shift()), e.compress && (m = await this.ai.llm.compress(m, e.compress.max, e.compress.min)), e.system && m.unshift({ role: "system", content: c });
134
+ const l = {
135
+ model: e.model || this.model,
136
+ messages: m,
137
+ stream: !!e.stream,
138
+ signal: n.signal,
185
139
  options: {
186
- temperature: options.temperature || this.ai.options.temperature || 0.7,
187
- num_predict: options.max_tokens || this.ai.options.max_tokens || 4096
140
+ temperature: e.temperature || this.ai.options.temperature || 0.7,
141
+ num_predict: e.max_tokens || this.ai.options.max_tokens || 4096
188
142
  },
189
- tools: (options.tools || this.ai.options.tools || []).map((t) => ({
143
+ tools: (e.tools || this.ai.options.tools || []).map((r) => ({
190
144
  type: "function",
191
145
  function: {
192
- name: t.name,
193
- description: t.description,
146
+ name: r.name,
147
+ description: r.description,
194
148
  parameters: {
195
149
  type: "object",
196
- properties: t.args ? objectMap(t.args, (key, value) => ({ ...value, required: void 0 })) : {},
197
- required: t.args ? Object.entries(t.args).filter((t2) => t2[1].required).map((t2) => t2[0]) : []
150
+ properties: r.args ? b(r.args, (a, u) => ({ ...u, required: void 0 })) : {},
151
+ required: r.args ? Object.entries(r.args).filter((a) => a[1].required).map((a) => a[0]) : []
198
152
  }
199
153
  }
200
154
  }))
201
155
  };
202
- let resp;
156
+ let o;
203
157
  do {
204
- resp = await this.client.chat(requestParams);
205
- if (options.stream) {
206
- resp.message = { role: "assistant", content: "", tool_calls: [] };
207
- for await (const chunk of resp) {
208
- if (controller.signal.aborted) break;
209
- if ((_a = chunk.message) == null ? void 0 : _a.content) {
210
- resp.message.content += chunk.message.content;
211
- options.stream({ text: chunk.message.content });
212
- }
213
- if ((_b = chunk.message) == null ? void 0 : _b.tool_calls) resp.message.tool_calls = chunk.message.tool_calls;
214
- if (chunk.done) break;
215
- }
158
+ if (o = await this.client.chat(l), e.stream) {
159
+ o.message = { role: "assistant", content: "", tool_calls: [] };
160
+ for await (const r of o)
161
+ if (n.signal.aborted || (r.message?.content && (o.message.content += r.message.content, e.stream({ text: r.message.content })), r.message?.tool_calls && (o.message.tool_calls = r.message.tool_calls), r.done)) break;
216
162
  }
217
- if (((_d = (_c = resp.message) == null ? void 0 : _c.tool_calls) == null ? void 0 : _d.length) && !controller.signal.aborted) {
218
- history.push(resp.message);
219
- const results = await Promise.all(resp.message.tool_calls.map(async (toolCall) => {
220
- var _a2;
221
- const tool = (_a2 = options.tools || this.ai.options.tools) == null ? void 0 : _a2.find(findByProp("name", toolCall.function.name));
222
- if (!tool) return { role: "tool", tool_name: toolCall.function.name, content: '{"error": "Tool not found"}' };
223
- const args = typeof toolCall.function.arguments === "string" ? JSONAttemptParse(toolCall.function.arguments, {}) : toolCall.function.arguments;
163
+ if (o.message?.tool_calls?.length && !n.signal.aborted) {
164
+ m.push(o.message);
165
+ const r = await Promise.all(o.message.tool_calls.map(async (a) => {
166
+ const u = (e.tools || this.ai.options.tools)?.find(k("name", a.function.name));
167
+ if (!u) return { role: "tool", tool_name: a.function.name, content: '{"error": "Tool not found"}' };
168
+ const p = typeof a.function.arguments == "string" ? w(a.function.arguments, {}) : a.function.arguments;
224
169
  try {
225
- const result = await tool.fn(args, this.ai);
226
- return { role: "tool", tool_name: toolCall.function.name, args, content: JSONSanitize(result) };
227
- } catch (err) {
228
- return { role: "tool", tool_name: toolCall.function.name, args, content: JSONSanitize({ error: (err == null ? void 0 : err.message) || (err == null ? void 0 : err.toString()) || "Unknown" }) };
170
+ const h = await u.fn(p, this.ai);
171
+ return { role: "tool", tool_name: a.function.name, args: p, content: _(h) };
172
+ } catch (h) {
173
+ return { role: "tool", tool_name: a.function.name, args: p, content: _({ error: h?.message || h?.toString() || "Unknown" }) };
229
174
  }
230
175
  }));
231
- history.push(...results);
232
- requestParams.messages = history;
176
+ m.push(...r), l.messages = m;
233
177
  }
234
- } while (!controller.signal.aborted && ((_f = (_e = resp.message) == null ? void 0 : _e.tool_calls) == null ? void 0 : _f.length));
235
- if (options.stream) options.stream({ done: true });
236
- res(this.toStandard([...history, { role: "assistant", content: (_g = resp.message) == null ? void 0 : _g.content }]));
178
+ } while (!n.signal.aborted && o.message?.tool_calls?.length);
179
+ e.stream && e.stream({ done: !0 }), i(this.toStandard([...m, { role: "assistant", content: o.message?.content }]));
237
180
  });
238
- return Object.assign(response, { abort: () => controller.abort() });
181
+ return Object.assign(s, { abort: () => n.abort() });
239
182
  }
240
183
  }
241
- class OpenAi extends LLMProvider {
242
- constructor(ai, apiToken, model) {
243
- super();
244
- __publicField(this, "client");
245
- this.ai = ai;
246
- this.apiToken = apiToken;
247
- this.model = model;
248
- this.client = new OpenAI({ apiKey: apiToken });
184
+ class R extends x {
185
+ constructor(t, e, n) {
186
+ super(), this.ai = t, this.apiToken = e, this.model = n, this.client = new M({ apiKey: e });
249
187
  }
250
- toStandard(history) {
251
- for (let i = 0; i < history.length; i++) {
252
- const h = history[i];
253
- if (h.role === "assistant" && h.tool_calls) {
254
- const tools = h.tool_calls.map((tc) => ({
188
+ client;
189
+ toStandard(t) {
190
+ for (let e = 0; e < t.length; e++) {
191
+ const n = t[e];
192
+ if (n.role === "assistant" && n.tool_calls) {
193
+ const s = n.tool_calls.map((i) => ({
255
194
  role: "tool",
256
- id: tc.id,
257
- name: tc.function.name,
258
- args: JSONAttemptParse(tc.function.arguments, {})
195
+ id: i.id,
196
+ name: i.function.name,
197
+ args: w(i.function.arguments, {})
259
198
  }));
260
- history.splice(i, 1, ...tools);
261
- i += tools.length - 1;
262
- } else if (h.role === "tool" && h.content) {
263
- const record = history.find((h2) => h.tool_call_id == h2.id);
264
- if (record) {
265
- if (h.content.includes('"error":')) record.error = h.content;
266
- else record.content = h.content;
267
- }
268
- history.splice(i, 1);
269
- i--;
199
+ t.splice(e, 1, ...s), e += s.length - 1;
200
+ } else if (n.role === "tool" && n.content) {
201
+ const s = t.find((i) => n.tool_call_id == i.id);
202
+ s && (n.content.includes('"error":') ? s.error = n.content : s.content = n.content), t.splice(e, 1), e--;
270
203
  }
271
204
  }
272
- return history;
205
+ return t;
273
206
  }
274
- fromStandard(history) {
275
- return history.reduce((result, h) => {
276
- if (h.role === "tool") {
277
- result.push({
278
- role: "assistant",
279
- content: null,
280
- tool_calls: [{ id: h.id, type: "function", function: { name: h.name, arguments: JSON.stringify(h.args) } }],
281
- refusal: null,
282
- annotations: []
283
- }, {
284
- role: "tool",
285
- tool_call_id: h.id,
286
- content: h.error || h.content
287
- });
288
- } else {
289
- result.push(h);
290
- }
291
- return result;
292
- }, []);
207
+ fromStandard(t) {
208
+ return t.reduce((e, n) => (n.role === "tool" ? e.push({
209
+ role: "assistant",
210
+ content: null,
211
+ tool_calls: [{ id: n.id, type: "function", function: { name: n.name, arguments: JSON.stringify(n.args) } }],
212
+ refusal: null,
213
+ annotations: []
214
+ }, {
215
+ role: "tool",
216
+ tool_call_id: n.id,
217
+ content: n.error || n.content
218
+ }) : e.push(n), e), []);
293
219
  }
294
- ask(message, options = {}) {
295
- const controller = new AbortController();
296
- const response = new Promise(async (res, rej) => {
297
- var _a, _b, _c, _d;
298
- let history = this.fromStandard([...options.history || [], { role: "user", content: message }]);
299
- if (options.compress) history = await this.ai.llm.compress(history, options.compress.max, options.compress.min, options);
300
- const requestParams = {
301
- model: options.model || this.model,
302
- messages: history,
303
- stream: !!options.stream,
304
- max_tokens: options.max_tokens || this.ai.options.max_tokens || 4096,
305
- temperature: options.temperature || this.ai.options.temperature || 0.7,
306
- tools: (options.tools || this.ai.options.tools || []).map((t) => ({
220
+ ask(t, e = {}) {
221
+ const n = new AbortController(), s = new Promise(async (i, f) => {
222
+ let c = this.fromStandard([...e.history || [], { role: "user", content: t }]);
223
+ e.compress && (c = await this.ai.llm.compress(c, e.compress.max, e.compress.min, e));
224
+ const m = {
225
+ model: e.model || this.model,
226
+ messages: c,
227
+ stream: !!e.stream,
228
+ max_tokens: e.max_tokens || this.ai.options.max_tokens || 4096,
229
+ temperature: e.temperature || this.ai.options.temperature || 0.7,
230
+ tools: (e.tools || this.ai.options.tools || []).map((o) => ({
307
231
  type: "function",
308
232
  function: {
309
- name: t.name,
310
- description: t.description,
233
+ name: o.name,
234
+ description: o.description,
311
235
  parameters: {
312
236
  type: "object",
313
- properties: t.args ? objectMap(t.args, (key, value) => ({ ...value, required: void 0 })) : {},
314
- required: t.args ? Object.entries(t.args).filter((t2) => t2[1].required).map((t2) => t2[0]) : []
237
+ properties: o.args ? b(o.args, (r, a) => ({ ...a, required: void 0 })) : {},
238
+ required: o.args ? Object.entries(o.args).filter((r) => r[1].required).map((r) => r[0]) : []
315
239
  }
316
240
  }
317
241
  }))
318
242
  };
319
- let resp;
243
+ let l;
320
244
  do {
321
- resp = await this.client.chat.completions.create(requestParams);
322
- if (options.stream) {
323
- resp.choices = [];
324
- for await (const chunk of resp) {
325
- if (controller.signal.aborted) break;
326
- if (chunk.choices[0].delta.content) {
327
- options.stream({ text: chunk.choices[0].delta.content });
328
- }
245
+ if (l = await this.client.chat.completions.create(m), e.stream) {
246
+ l.choices = [];
247
+ for await (const r of l) {
248
+ if (n.signal.aborted) break;
249
+ r.choices[0].delta.content && e.stream({ text: r.choices[0].delta.content });
329
250
  }
330
251
  }
331
- const toolCalls = resp.choices[0].message.tool_calls || [];
332
- if (toolCalls.length && !controller.signal.aborted) {
333
- history.push(resp.choices[0].message);
334
- const results = await Promise.all(toolCalls.map(async (toolCall) => {
335
- var _a2;
336
- const tool = (_a2 = options.tools) == null ? void 0 : _a2.find(findByProp("name", toolCall.function.name));
337
- if (!tool) return { role: "tool", tool_call_id: toolCall.id, content: '{"error": "Tool not found"}' };
252
+ const o = l.choices[0].message.tool_calls || [];
253
+ if (o.length && !n.signal.aborted) {
254
+ c.push(l.choices[0].message);
255
+ const r = await Promise.all(o.map(async (a) => {
256
+ const u = e.tools?.find(k("name", a.function.name));
257
+ if (!u) return { role: "tool", tool_call_id: a.id, content: '{"error": "Tool not found"}' };
338
258
  try {
339
- const args = JSONAttemptParse(toolCall.function.arguments, {});
340
- const result = await tool.fn(args, this.ai);
341
- return { role: "tool", tool_call_id: toolCall.id, content: JSONSanitize(result) };
342
- } catch (err) {
343
- return { role: "tool", tool_call_id: toolCall.id, content: JSONSanitize({ error: (err == null ? void 0 : err.message) || (err == null ? void 0 : err.toString()) || "Unknown" }) };
259
+ const p = w(a.function.arguments, {}), h = await u.fn(p, this.ai);
260
+ return { role: "tool", tool_call_id: a.id, content: _(h) };
261
+ } catch (p) {
262
+ return { role: "tool", tool_call_id: a.id, content: _({ error: p?.message || p?.toString() || "Unknown" }) };
344
263
  }
345
264
  }));
346
- history.push(...results);
347
- requestParams.messages = history;
265
+ c.push(...r), m.messages = c;
348
266
  }
349
- } while (!controller.signal.aborted && ((_d = (_c = (_b = (_a = resp.choices) == null ? void 0 : _a[0]) == null ? void 0 : _b.message) == null ? void 0 : _c.tool_calls) == null ? void 0 : _d.length));
350
- if (options.stream) options.stream({ done: true });
351
- res(this.toStandard([...history, { role: "assistant", content: resp.choices[0].message.content || "" }]));
267
+ } while (!n.signal.aborted && l.choices?.[0]?.message?.tool_calls?.length);
268
+ e.stream && e.stream({ done: !0 }), i(this.toStandard([...c, { role: "assistant", content: l.choices[0].message.content || "" }]));
352
269
  });
353
- return Object.assign(response, { abort: () => controller.abort() });
270
+ return Object.assign(s, { abort: () => n.abort() });
354
271
  }
355
272
  }
356
- class LLM {
357
- constructor(ai, options) {
358
- __publicField(this, "providers", {});
359
- var _a, _b, _c;
360
- this.ai = ai;
361
- this.options = options;
362
- if ((_a = options.anthropic) == null ? void 0 : _a.token) this.providers.anthropic = new Anthropic(this.ai, options.anthropic.token, options.anthropic.model);
363
- if ((_b = options.ollama) == null ? void 0 : _b.host) this.providers.ollama = new Ollama(this.ai, options.ollama.host, options.ollama.model);
364
- if ((_c = options.openAi) == null ? void 0 : _c.token) this.providers.openAi = new OpenAi(this.ai, options.openAi.token, options.openAi.model);
273
+ class J {
274
+ constructor(t, e) {
275
+ this.ai = t, this.options = e, e.anthropic?.token && (this.providers.anthropic = new U(this.ai, e.anthropic.token, e.anthropic.model)), e.ollama?.host && (this.providers.ollama = new L(this.ai, e.ollama.host, e.ollama.model)), e.openAi?.token && (this.providers.openAi = new R(this.ai, e.openAi.token, e.openAi.model));
365
276
  }
277
+ providers = {};
366
278
  /**
367
279
  * Chat with LLM
368
280
  * @param {string} message Question
369
281
  * @param {LLMRequest} options Configuration options and chat history
370
282
  * @returns {{abort: () => void, response: Promise<LLMMessage[]>}} Function to abort response and chat history
371
283
  */
372
- ask(message, options = {}) {
373
- var _a, _b;
374
- let model = [null, null];
375
- if (options.model) {
376
- if (typeof options.model == "object") model = options.model;
377
- else model = [options.model, (_a = this.options[options.model]) == null ? void 0 : _a.model];
378
- }
379
- if (!options.model || model[1] == null) {
380
- if (typeof this.options.model == "object") model = this.options.model;
381
- else model = [this.options.model, (_b = this.options[this.options.model]) == null ? void 0 : _b.model];
382
- }
383
- if (!model[0] || !model[1]) throw new Error(`Unknown LLM provider or model: ${model[0]} / ${model[1]}`);
384
- return this.providers[model[0]].ask(message, { ...options, model: model[1] });
284
+ ask(t, e = {}) {
285
+ let n = [null, null];
286
+ if (e.model && (typeof e.model == "object" ? n = e.model : n = [e.model, this.options[e.model]?.model]), (!e.model || n[1] == null) && (typeof this.options.model == "object" ? n = this.options.model : n = [this.options.model, this.options[this.options.model]?.model]), !n[0] || !n[1]) throw new Error(`Unknown LLM provider or model: ${n[0]} / ${n[1]}`);
287
+ return this.providers[n[0]].ask(t, { ...e, model: n[1] });
385
288
  }
386
289
  /**
387
290
  * Compress chat history to reduce context size
@@ -391,27 +294,26 @@ class LLM {
391
294
  * @param {LLMRequest} options LLM options
392
295
  * @returns {Promise<LLMMessage[]>} New chat history will summary at index 0
393
296
  */
394
- async compress(history, max, min, options) {
395
- if (this.estimateTokens(history) < max) return history;
396
- let keep = 0, tokens = 0;
397
- for (let m of history.toReversed()) {
398
- tokens += this.estimateTokens(m.content);
399
- if (tokens < min) keep++;
297
+ async compress(t, e, n, s) {
298
+ if (this.estimateTokens(t) < e) return t;
299
+ let i = 0, f = 0;
300
+ for (let o of t.toReversed())
301
+ if (f += this.estimateTokens(o.content), f < n) i++;
400
302
  else break;
401
- }
402
- if (history.length <= keep) return history;
403
- const recent = keep == 0 ? [] : history.slice(-keep), process = (keep == 0 ? history : history.slice(0, -keep)).filter((h) => h.role === "assistant" || h.role === "user");
404
- const summary = await this.summarize(process.map((m) => `${m.role}: ${m.content}`).join("\n\n"), 250, options);
405
- return [{ role: "assistant", content: `Conversation Summary: ${summary}` }, ...recent];
303
+ if (t.length <= i) return t;
304
+ const c = i == 0 ? [] : t.slice(-i), m = (i == 0 ? t : t.slice(0, -i)).filter((o) => o.role === "assistant" || o.role === "user");
305
+ return [{ role: "assistant", content: `Conversation Summary: ${await this.summarize(m.map((o) => `${o.role}: ${o.content}`).join(`
306
+
307
+ `), 250, s)}` }, ...c];
406
308
  }
407
309
  /**
408
310
  * Estimate variable as tokens
409
311
  * @param history Object to size
410
312
  * @returns {number} Rough token count
411
313
  */
412
- estimateTokens(history) {
413
- const text = JSON.stringify(history);
414
- return Math.ceil(text.length / 4 * 1.2);
314
+ estimateTokens(t) {
315
+ const e = JSON.stringify(t);
316
+ return Math.ceil(e.length / 4 * 1.2);
415
317
  }
416
318
  /**
417
319
  * Ask a question with JSON response
@@ -419,14 +321,12 @@ class LLM {
419
321
  * @param {LLMRequest} options Configuration options and chat history
420
322
  * @returns {Promise<{} | {} | RegExpExecArray | null>}
421
323
  */
422
- async json(message, options) {
423
- var _a;
424
- let resp = await this.ask(message, {
324
+ async json(t, e) {
325
+ let n = await this.ask(t, {
425
326
  system: "Respond using a JSON blob",
426
- ...options
327
+ ...e
427
328
  });
428
- if (!((_a = resp == null ? void 0 : resp[0]) == null ? void 0 : _a.content)) return {};
429
- return JSONAttemptParse(new RegExp("{[sS]*}").exec(resp[0].content), {});
329
+ return n?.[0]?.content ? w(new RegExp("{[sS]*}").exec(n[0].content), {}) : {};
430
330
  }
431
331
  /**
432
332
  * Create a summary of some text
@@ -435,39 +335,29 @@ class LLM {
435
335
  * @param options LLM request options
436
336
  * @returns {Promise<string>} Summary
437
337
  */
438
- summarize(text, tokens, options) {
439
- return this.ask(text, { system: `Generate a brief summary <= ${tokens} tokens. Output nothing else`, temperature: 0.3, ...options }).then((history) => {
440
- var _a;
441
- return ((_a = history.pop()) == null ? void 0 : _a.content) || null;
442
- });
338
+ summarize(t, e, n) {
339
+ return this.ask(t, { system: `Generate a brief summary <= ${e} tokens. Output nothing else`, temperature: 0.3, ...n }).then((s) => s.pop()?.content || null);
443
340
  }
444
341
  }
445
- class Ai {
446
- constructor(options) {
447
- __publicField(this, "downloads", {});
448
- __publicField(this, "whisperModel");
449
- /** Large Language Models */
450
- __publicField(this, "llm");
451
- var _a;
452
- this.options = options;
453
- this.llm = new LLM(this, options);
454
- if ((_a = this.options.whisper) == null ? void 0 : _a.binary) this.downloadAsrModel(this.options.whisper.model);
342
+ class V {
343
+ constructor(t) {
344
+ this.options = t, this.llm = new J(this, t), this.options.whisper?.binary && (this.whisperModel = S.join(this.options.whisper?.path, this.options.whisper?.model + this.options.whisper?.model.endsWith(".bin") ? "" : ".bin"), this.downloadAsrModel());
455
345
  }
346
+ downloads = {};
347
+ whisperModel;
348
+ /** Large Language Models */
349
+ llm;
456
350
  /**
457
351
  * Convert audio to text using Auditory Speech Recognition
458
352
  * @param {string} path Path to audio
459
353
  * @param model Whisper model
460
354
  * @returns {Promise<any>} Extracted text
461
355
  */
462
- async asr(path, model) {
463
- var _a;
464
- if (!((_a = this.options.whisper) == null ? void 0 : _a.binary)) throw new Error("Whisper not configured");
465
- if (!model) model = this.options.whisper.model;
466
- await this.downloadAsrModel(model);
467
- const name = Math.random().toString(36).substring(2, 10) + "-" + path.split("/").pop();
468
- const output = Path.join(this.options.whisper.path || "/tmp", name);
469
- await $`rm -f /tmp/${name}.txt && ${this.options.whisper.binary} -nt -np -m ${this.whisperModel} -f ${path} -otxt -of ${output}`;
470
- return fs.readFile(output, "utf-8").then((text) => (text == null ? void 0 : text.trim()) || null).finally(() => fs.rm(output, { force: true }).catch(() => {
356
+ async asr(t, e) {
357
+ if (!this.options.whisper?.binary) throw new Error("Whisper not configured");
358
+ e || (e = this.options.whisper.model), await this.downloadAsrModel();
359
+ const n = Math.random().toString(36).substring(2, 10) + "-" + t.split("/").pop() + ".txt", s = S.join(this.options.whisper.temp || "/tmp", n);
360
+ return await j`rm -f ${s} && ${this.options.whisper.binary} -nt -np -m ${this.whisperModel} -f ${t} -otxt -of ${s}`, y.readFile(s, "utf-8").then((i) => i?.trim() || null).finally(() => y.rm(s, { force: !0 }).catch(() => {
471
361
  }));
472
362
  }
473
363
  /**
@@ -476,34 +366,29 @@ class Ai {
476
366
  * @param {string} model Whisper model that will be downloaded
477
367
  * @return {Promise<void>} A promise that resolves once the model is downloaded and saved locally.
478
368
  */
479
- async downloadAsrModel(model) {
480
- var _a, _b, _c, _d;
481
- if (!((_a = this.options.whisper) == null ? void 0 : _a.binary)) throw new Error("Whisper not configured");
482
- this.whisperModel = Path.join((_b = this.options.whisper) == null ? void 0 : _b.path, ((_c = this.options.whisper) == null ? void 0 : _c.model) + ".bin");
483
- if (await fs.stat(this.whisperModel).then(() => true).catch(() => false)) return;
484
- if (!!this.downloads[model]) return this.downloads[model];
485
- this.downloads[model] = fetch(`https://huggingface.co/ggerganov/whisper.cpp/resolve/main/${(_d = this.options.whisper) == null ? void 0 : _d.model}.bin`).then((resp) => resp.arrayBuffer()).then((arr) => Buffer.from(arr)).then(async (buffer) => {
486
- await fs.writeFile(this.whisperModel, buffer);
487
- delete this.downloads[model];
488
- });
489
- return this.downloads[model];
369
+ async downloadAsrModel() {
370
+ if (!this.options.whisper?.binary) throw new Error("Whisper not configured");
371
+ if (await y.stat(this.whisperModel).then(() => !0).catch(() => !1)) return;
372
+ const t = this.whisperModel.split("/").at(-1);
373
+ return this.downloads[t] ? this.downloads[t] : (this.downloads[t] = fetch(`https://huggingface.co/ggerganov/whisper.cpp/resolve/main/${t}`).then((e) => e.arrayBuffer()).then((e) => Buffer.from(e)).then(async (e) => {
374
+ await y.writeFile(this.whisperModel, e), delete this.downloads[t];
375
+ }), this.downloads[t]);
490
376
  }
491
377
  /**
492
378
  * Convert image to text using Optical Character Recognition
493
379
  * @param {string} path Path to image
494
380
  * @returns {{abort: Function, response: Promise<string | null>}} Abort function & Promise of extracted text
495
381
  */
496
- ocr(path) {
497
- let worker;
382
+ ocr(t) {
383
+ let e;
498
384
  return {
499
385
  abort: () => {
500
- worker == null ? void 0 : worker.terminate();
386
+ e?.terminate();
501
387
  },
502
- response: new Promise(async (res) => {
503
- worker = await createWorker("eng");
504
- const { data } = await worker.recognize(path);
505
- await worker.terminate();
506
- res(data.text.trim() || null);
388
+ response: new Promise(async (n) => {
389
+ e = await q("eng");
390
+ const { data: s } = await e.recognize(t);
391
+ await e.terminate(), n(s.text.trim() || null);
507
392
  })
508
393
  };
509
394
  }
@@ -513,124 +398,104 @@ class Ai {
513
398
  * @param {string} searchTerms Multiple search terms to check against target
514
399
  * @returns {{avg: number, max: number, similarities: number[]}} Similarity values 0-1: 0 = unique, 1 = identical
515
400
  */
516
- semanticSimilarity(target, ...searchTerms) {
517
- if (searchTerms.length < 2) throw new Error("Requires at least 2 strings to compare");
518
- const vector = (text, dimensions = 10) => {
519
- return text.toLowerCase().split("").map((char, index) => char.charCodeAt(0) * (index + 1) % dimensions / dimensions).slice(0, dimensions);
520
- };
521
- const cosineSimilarity = (v1, v2) => {
522
- if (v1.length !== v2.length) throw new Error("Vectors must be same length");
523
- const tensor1 = tf.tensor1d(v1), tensor2 = tf.tensor1d(v2);
524
- const dotProduct = tf.dot(tensor1, tensor2);
525
- const magnitude1 = tf.norm(tensor1);
526
- const magnitude2 = tf.norm(tensor2);
527
- if (magnitude1.dataSync()[0] === 0 || magnitude2.dataSync()[0] === 0) return 0;
528
- return dotProduct.dataSync()[0] / (magnitude1.dataSync()[0] * magnitude2.dataSync()[0]);
529
- };
530
- const v = vector(target);
531
- const similarities = searchTerms.map((t) => vector(t)).map((refVector) => cosineSimilarity(v, refVector));
532
- return { avg: similarities.reduce((acc, s) => acc + s, 0) / similarities.length, max: Math.max(...similarities), similarities };
401
+ semanticSimilarity(t, ...e) {
402
+ if (e.length < 2) throw new Error("Requires at least 2 strings to compare");
403
+ const n = (c, m = 10) => c.toLowerCase().split("").map((l, o) => l.charCodeAt(0) * (o + 1) % m / m).slice(0, m), s = (c, m) => {
404
+ if (c.length !== m.length) throw new Error("Vectors must be same length");
405
+ const l = g.tensor1d(c), o = g.tensor1d(m), r = g.dot(l, o), a = g.norm(l), u = g.norm(o);
406
+ return a.dataSync()[0] === 0 || u.dataSync()[0] === 0 ? 0 : r.dataSync()[0] / (a.dataSync()[0] * u.dataSync()[0]);
407
+ }, i = n(t), f = e.map((c) => n(c)).map((c) => s(i, c));
408
+ return { avg: f.reduce((c, m) => c + m, 0) / f.length, max: Math.max(...f), similarities: f };
533
409
  }
534
410
  }
535
- const CliTool = {
411
+ const I = {
536
412
  name: "cli",
537
413
  description: "Use the command line interface, returns any output",
538
- args: { command: { type: "string", description: "Command to run", required: true } },
539
- fn: (args) => $`${args.command}`
540
- };
541
- const DateTimeTool = {
414
+ args: { command: { type: "string", description: "Command to run", required: !0 } },
415
+ fn: (d) => j`${d.command}`
416
+ }, Q = {
542
417
  name: "get_datetime",
543
418
  description: "Get current date and time",
544
419
  args: {},
545
420
  fn: async () => (/* @__PURE__ */ new Date()).toISOString()
546
- };
547
- const ExecTool = {
421
+ }, X = {
548
422
  name: "exec",
549
423
  description: "Run code/scripts",
550
424
  args: {
551
- language: { type: "string", description: "Execution language", enum: ["cli", "node", "python"], required: true },
552
- code: { type: "string", description: "Code to execute", required: true }
425
+ language: { type: "string", description: "Execution language", enum: ["cli", "node", "python"], required: !0 },
426
+ code: { type: "string", description: "Code to execute", required: !0 }
553
427
  },
554
- fn: async (args, ai) => {
428
+ fn: async (d, t) => {
555
429
  try {
556
- switch (args.type) {
430
+ switch (d.type) {
557
431
  case "bash":
558
- return await CliTool.fn({ command: args.code }, ai);
432
+ return await I.fn({ command: d.code }, t);
559
433
  case "node":
560
- return await JSTool.fn({ code: args.code }, ai);
561
- case "python": {
562
- return await PythonTool.fn({ code: args.code }, ai);
563
- }
434
+ return await N.fn({ code: d.code }, t);
435
+ case "python":
436
+ return await W.fn({ code: d.code }, t);
564
437
  }
565
- } catch (err) {
566
- return { error: (err == null ? void 0 : err.message) || err.toString() };
438
+ } catch (e) {
439
+ return { error: e?.message || e.toString() };
567
440
  }
568
441
  }
569
- };
570
- const FetchTool = {
442
+ }, Y = {
571
443
  name: "fetch",
572
444
  description: "Make HTTP request to URL",
573
445
  args: {
574
- url: { type: "string", description: "URL to fetch", required: true },
446
+ url: { type: "string", description: "URL to fetch", required: !0 },
575
447
  method: { type: "string", description: "HTTP method to use", enum: ["GET", "POST", "PUT", "DELETE"], default: "GET" },
576
448
  headers: { type: "object", description: "HTTP headers to send", default: {} },
577
449
  body: { type: "object", description: "HTTP body to send" }
578
450
  },
579
- fn: (args) => new Http({ url: args.url, headers: args.headers }).request({ method: args.method || "GET", body: args.body })
580
- };
581
- const JSTool = {
451
+ fn: (d) => new v({ url: d.url, headers: d.headers }).request({ method: d.method || "GET", body: d.body })
452
+ }, N = {
582
453
  name: "exec_javascript",
583
454
  description: "Execute commonjs javascript",
584
455
  args: {
585
- code: { type: "string", description: "CommonJS javascript", required: true }
456
+ code: { type: "string", description: "CommonJS javascript", required: !0 }
586
457
  },
587
- fn: async (args) => {
588
- const console = consoleInterceptor(null);
589
- const resp = await fn({ console }, args.code, true).catch((err) => console.output.error.push(err));
590
- return { ...console.output, return: resp, stdout: void 0, stderr: void 0 };
458
+ fn: async (d) => {
459
+ const t = P(null), e = await A({ console: t }, d.code, !0).catch((n) => t.output.error.push(n));
460
+ return { ...t.output, return: e, stdout: void 0, stderr: void 0 };
591
461
  }
592
- };
593
- const PythonTool = {
462
+ }, W = {
594
463
  name: "exec_javascript",
595
464
  description: "Execute commonjs javascript",
596
465
  args: {
597
- code: { type: "string", description: "CommonJS javascript", required: true }
466
+ code: { type: "string", description: "CommonJS javascript", required: !0 }
598
467
  },
599
- fn: async (args) => ({ result: $Sync`python -c "${args.code}"` })
600
- };
601
- const SearchTool = {
468
+ fn: async (d) => ({ result: T`python -c "${d.code}"` })
469
+ }, Z = {
602
470
  name: "search",
603
471
  description: "Use a search engine to find relevant URLs, should be changed with fetch to scrape sources",
604
472
  args: {
605
- query: { type: "string", description: "Search string", required: true },
473
+ query: { type: "string", description: "Search string", required: !0 },
606
474
  length: { type: "string", description: "Number of results to return", default: 5 }
607
475
  },
608
- fn: async (args) => {
609
- var _a;
610
- const html = await fetch(`https://html.duckduckgo.com/html/?q=${encodeURIComponent(args.query)}`, {
476
+ fn: async (d) => {
477
+ const t = await fetch(`https://html.duckduckgo.com/html/?q=${encodeURIComponent(d.query)}`, {
611
478
  headers: { "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64)", "Accept-Language": "en-US,en;q=0.9" }
612
- }).then((resp) => resp.text());
613
- let match, regex = /<a .*?href="(.+?)".+?<\/a>/g;
614
- const results = new ASet();
615
- while ((match = regex.exec(html)) !== null) {
616
- let url = (_a = /uddg=(.+)&amp?/.exec(decodeURIComponent(match[1]))) == null ? void 0 : _a[1];
617
- if (url) url = decodeURIComponent(url);
618
- if (url) results.add(url);
619
- if (results.size >= (args.length || 5)) break;
479
+ }).then((i) => i.text());
480
+ let e, n = /<a .*?href="(.+?)".+?<\/a>/g;
481
+ const s = new O();
482
+ for (; (e = n.exec(t)) !== null; ) {
483
+ let i = /uddg=(.+)&amp?/.exec(decodeURIComponent(e[1]))?.[1];
484
+ if (i && (i = decodeURIComponent(i)), i && s.add(i), s.size >= (d.length || 5)) break;
620
485
  }
621
- return results;
486
+ return s;
622
487
  }
623
488
  };
624
489
  export {
625
- Ai,
626
- Anthropic,
627
- CliTool,
628
- DateTimeTool,
629
- ExecTool,
630
- FetchTool,
631
- JSTool,
632
- LLM,
633
- PythonTool,
634
- SearchTool
490
+ V as Ai,
491
+ U as Anthropic,
492
+ I as CliTool,
493
+ Q as DateTimeTool,
494
+ X as ExecTool,
495
+ Y as FetchTool,
496
+ N as JSTool,
497
+ J as LLM,
498
+ W as PythonTool,
499
+ Z as SearchTool
635
500
  };
636
501
  //# sourceMappingURL=index.mjs.map