@ztimson/ai-utils 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js ADDED
@@ -0,0 +1,633 @@
1
+ (function(global, factory) {
2
+ typeof exports === "object" && typeof module !== "undefined" ? factory(exports, require("@ztimson/node-utils"), require("tesseract.js"), require("@ztimson/utils"), require("@anthropic-ai/sdk"), require("ollama"), require("openai"), require("node:fs/promises"), require("node:path"), require("@tensorflow/tfjs")) : typeof define === "function" && define.amd ? define(["exports", "@ztimson/node-utils", "tesseract.js", "@ztimson/utils", "@anthropic-ai/sdk", "ollama", "openai", "node:fs/promises", "node:path", "@tensorflow/tfjs"], factory) : (global = typeof globalThis !== "undefined" ? globalThis : global || self, factory(global.utils = {}, global.nodeUtils, global.tesseract_js, global.utils, global.sdk, global.ollama, global.openai, global.fs, global.Path, global.tf));
3
+ })(this, function(exports2, nodeUtils, tesseract_js, utils, sdk, ollama, openai, fs, Path, tf) {
4
+ "use strict";var __defProp = Object.defineProperty;
5
+ var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
6
+ var __publicField = (obj, key, value) => __defNormalProp(obj, typeof key !== "symbol" ? key + "" : key, value);
7
+
8
+ function _interopNamespaceDefault(e) {
9
+ const n = Object.create(null, { [Symbol.toStringTag]: { value: "Module" } });
10
+ if (e) {
11
+ for (const k in e) {
12
+ if (k !== "default") {
13
+ const d = Object.getOwnPropertyDescriptor(e, k);
14
+ Object.defineProperty(n, k, d.get ? d : {
15
+ enumerable: true,
16
+ get: () => e[k]
17
+ });
18
+ }
19
+ }
20
+ }
21
+ n.default = e;
22
+ return Object.freeze(n);
23
+ }
24
+ const tf__namespace = /* @__PURE__ */ _interopNamespaceDefault(tf);
25
+ class LLMProvider {
26
+ }
27
+ class Anthropic extends LLMProvider {
28
+ constructor(ai, apiToken, model) {
29
+ super();
30
+ __publicField(this, "client");
31
+ this.ai = ai;
32
+ this.apiToken = apiToken;
33
+ this.model = model;
34
+ this.client = new sdk.Anthropic({ apiKey: apiToken });
35
+ }
36
+ toStandard(history) {
37
+ for (let i = 0; i < history.length; i++) {
38
+ const orgI = i;
39
+ if (typeof history[orgI].content != "string") {
40
+ if (history[orgI].role == "assistant") {
41
+ history[orgI].content.filter((c) => c.type == "tool_use").forEach((c) => {
42
+ i++;
43
+ history.splice(i, 0, { role: "tool", id: c.id, name: c.name, args: c.input });
44
+ });
45
+ } else if (history[orgI].role == "user") {
46
+ history[orgI].content.filter((c) => c.type == "tool_result").forEach((c) => {
47
+ const h = history.find((h2) => h2.id == c.tool_use_id);
48
+ h[c.is_error ? "error" : "content"] = c.content;
49
+ });
50
+ }
51
+ history[orgI].content = history[orgI].content.filter((c) => c.type == "text").map((c) => c.text).join("\n\n");
52
+ }
53
+ }
54
+ return history.filter((h) => !!h.content);
55
+ }
56
+ fromStandard(history) {
57
+ for (let i = 0; i < history.length; i++) {
58
+ if (history[i].role == "tool") {
59
+ const h = history[i];
60
+ history.splice(
61
+ i,
62
+ 0,
63
+ { role: "assistant", content: [{ type: "tool_use", id: h.id, name: h.name, input: h.args }] },
64
+ { role: "user", content: [{ type: "tool_result", tool_use_id: h.id, is_error: !!h.error, content: h.error || h.content }] }
65
+ );
66
+ i += 2;
67
+ }
68
+ }
69
+ return history;
70
+ }
71
+ ask(message, options = {}) {
72
+ const controller = new AbortController();
73
+ const response = new Promise(async (res, rej) => {
74
+ let history = this.fromStandard([...options.history || [], { role: "user", content: message }]);
75
+ if (options.compress) history = await this.ai.llm.compress(history, options.compress.max, options.compress.min, options);
76
+ const requestParams = {
77
+ model: options.model || this.model,
78
+ max_tokens: options.max_tokens || this.ai.options.max_tokens || 4096,
79
+ system: options.system || this.ai.options.system || "",
80
+ temperature: options.temperature || this.ai.options.temperature || 0.7,
81
+ tools: (options.tools || this.ai.options.tools || []).map((t) => ({
82
+ name: t.name,
83
+ description: t.description,
84
+ input_schema: {
85
+ type: "object",
86
+ properties: utils.objectMap(t.args, (key, value) => ({ ...value, required: void 0 })),
87
+ required: Object.entries(t.args || {}).filter((t2) => t2[1].required).map((t2) => t2[0])
88
+ },
89
+ fn: void 0
90
+ })),
91
+ messages: history,
92
+ stream: !!options.stream
93
+ };
94
+ let resp;
95
+ do {
96
+ resp = await this.client.messages.create(requestParams);
97
+ if (options.stream) {
98
+ resp.content = [];
99
+ for await (const chunk of resp) {
100
+ if (controller.signal.aborted) break;
101
+ if (chunk.type === "content_block_start") {
102
+ if (chunk.content_block.type === "text") {
103
+ resp.content.push({ type: "text", text: "" });
104
+ } else if (chunk.content_block.type === "tool_use") {
105
+ resp.content.push({ type: "tool_use", id: chunk.content_block.id, name: chunk.content_block.name, input: "" });
106
+ }
107
+ } else if (chunk.type === "content_block_delta") {
108
+ if (chunk.delta.type === "text_delta") {
109
+ const text = chunk.delta.text;
110
+ resp.content.at(-1).text += text;
111
+ options.stream({ text });
112
+ } else if (chunk.delta.type === "input_json_delta") {
113
+ resp.content.at(-1).input += chunk.delta.partial_json;
114
+ }
115
+ } else if (chunk.type === "content_block_stop") {
116
+ const last = resp.content.at(-1);
117
+ if (last.input) last.input = utils.JSONAttemptParse(last.input, {});
118
+ } else if (chunk.type === "message_stop") {
119
+ break;
120
+ }
121
+ }
122
+ }
123
+ const toolCalls = resp.content.filter((c) => c.type === "tool_use");
124
+ if (toolCalls.length && !controller.signal.aborted) {
125
+ history.push({ role: "assistant", content: resp.content });
126
+ const results = await Promise.all(toolCalls.map(async (toolCall) => {
127
+ var _a;
128
+ const tool = (_a = options.tools) == null ? void 0 : _a.find(utils.findByProp("name", toolCall.name));
129
+ if (!tool) return { tool_use_id: toolCall.id, is_error: true, content: "Tool not found" };
130
+ try {
131
+ const result = await tool.fn(toolCall.input, this.ai);
132
+ return { type: "tool_result", tool_use_id: toolCall.id, content: utils.JSONSanitize(result) };
133
+ } catch (err) {
134
+ return { type: "tool_result", tool_use_id: toolCall.id, is_error: true, content: (err == null ? void 0 : err.message) || (err == null ? void 0 : err.toString()) || "Unknown" };
135
+ }
136
+ }));
137
+ history.push({ role: "user", content: results });
138
+ requestParams.messages = history;
139
+ }
140
+ } while (!controller.signal.aborted && resp.content.some((c) => c.type === "tool_use"));
141
+ if (options.stream) options.stream({ done: true });
142
+ res(this.toStandard([...history, {
143
+ role: "assistant",
144
+ content: resp.content.filter((c) => c.type == "text").map((c) => c.text).join("\n\n")
145
+ }]));
146
+ });
147
+ return Object.assign(response, { abort: () => controller.abort() });
148
+ }
149
+ }
150
+ class Ollama extends LLMProvider {
151
+ constructor(ai, host, model) {
152
+ super();
153
+ __publicField(this, "client");
154
+ this.ai = ai;
155
+ this.host = host;
156
+ this.model = model;
157
+ this.client = new ollama.Ollama({ host });
158
+ }
159
+ toStandard(history) {
160
+ for (let i = 0; i < history.length; i++) {
161
+ if (history[i].role == "assistant" && history[i].tool_calls) {
162
+ if (history[i].content) delete history[i].tool_calls;
163
+ else {
164
+ history.splice(i, 1);
165
+ i--;
166
+ }
167
+ } else if (history[i].role == "tool") {
168
+ const error = history[i].content.startsWith('{"error":');
169
+ history[i] = { role: "tool", name: history[i].tool_name, args: history[i].args, [error ? "error" : "content"]: history[i].content };
170
+ }
171
+ }
172
+ return history;
173
+ }
174
+ fromStandard(history) {
175
+ return history.map((h) => {
176
+ if (h.role != "tool") return h;
177
+ return { role: "tool", tool_name: h.name, content: h.error || h.content };
178
+ });
179
+ }
180
+ ask(message, options = {}) {
181
+ const controller = new AbortController();
182
+ const response = new Promise(async (res, rej) => {
183
+ var _a, _b, _c, _d, _e, _f, _g;
184
+ let system = options.system || this.ai.options.system;
185
+ let history = this.fromStandard([...options.history || [], { role: "user", content: message }]);
186
+ if (history[0].roll == "system") {
187
+ if (!system) system = history.shift();
188
+ else history.shift();
189
+ }
190
+ if (options.compress) history = await this.ai.llm.compress(history, options.compress.max, options.compress.min);
191
+ if (options.system) history.unshift({ role: "system", content: system });
192
+ const requestParams = {
193
+ model: options.model || this.model,
194
+ messages: history,
195
+ stream: !!options.stream,
196
+ signal: controller.signal,
197
+ options: {
198
+ temperature: options.temperature || this.ai.options.temperature || 0.7,
199
+ num_predict: options.max_tokens || this.ai.options.max_tokens || 4096
200
+ },
201
+ tools: (options.tools || this.ai.options.tools || []).map((t) => ({
202
+ type: "function",
203
+ function: {
204
+ name: t.name,
205
+ description: t.description,
206
+ parameters: {
207
+ type: "object",
208
+ properties: utils.objectMap(t.args, (key, value) => ({ ...value, required: void 0 })),
209
+ required: Object.entries(t.args || {}).filter((t2) => t2[1].required).map((t2) => t2[0])
210
+ }
211
+ }
212
+ }))
213
+ };
214
+ let resp;
215
+ do {
216
+ resp = await this.client.chat(requestParams);
217
+ if (options.stream) {
218
+ resp.message = { role: "assistant", content: "", tool_calls: [] };
219
+ for await (const chunk of resp) {
220
+ if (controller.signal.aborted) break;
221
+ if ((_a = chunk.message) == null ? void 0 : _a.content) {
222
+ resp.message.content += chunk.message.content;
223
+ options.stream({ text: chunk.message.content });
224
+ }
225
+ if ((_b = chunk.message) == null ? void 0 : _b.tool_calls) resp.message.tool_calls = chunk.message.tool_calls;
226
+ if (chunk.done) break;
227
+ }
228
+ }
229
+ if (((_d = (_c = resp.message) == null ? void 0 : _c.tool_calls) == null ? void 0 : _d.length) && !controller.signal.aborted) {
230
+ history.push(resp.message);
231
+ const results = await Promise.all(resp.message.tool_calls.map(async (toolCall) => {
232
+ var _a2;
233
+ const tool = (_a2 = options.tools || this.ai.options.tools) == null ? void 0 : _a2.find(utils.findByProp("name", toolCall.function.name));
234
+ if (!tool) return { role: "tool", tool_name: toolCall.function.name, content: '{"error": "Tool not found"}' };
235
+ const args = typeof toolCall.function.arguments === "string" ? utils.JSONAttemptParse(toolCall.function.arguments, {}) : toolCall.function.arguments;
236
+ try {
237
+ const result = await tool.fn(args, this.ai);
238
+ return { role: "tool", tool_name: toolCall.function.name, args, content: utils.JSONSanitize(result) };
239
+ } catch (err) {
240
+ return { role: "tool", tool_name: toolCall.function.name, args, content: utils.JSONSanitize({ error: (err == null ? void 0 : err.message) || (err == null ? void 0 : err.toString()) || "Unknown" }) };
241
+ }
242
+ }));
243
+ history.push(...results);
244
+ requestParams.messages = history;
245
+ }
246
+ } while (!controller.signal.aborted && ((_f = (_e = resp.message) == null ? void 0 : _e.tool_calls) == null ? void 0 : _f.length));
247
+ if (options.stream) options.stream({ done: true });
248
+ res(this.toStandard([...history, { role: "assistant", content: (_g = resp.message) == null ? void 0 : _g.content }]));
249
+ });
250
+ return Object.assign(response, { abort: () => controller.abort() });
251
+ }
252
+ }
253
+ class OpenAi extends LLMProvider {
254
+ constructor(ai, apiToken, model) {
255
+ super();
256
+ __publicField(this, "client");
257
+ this.ai = ai;
258
+ this.apiToken = apiToken;
259
+ this.model = model;
260
+ this.client = new openai.OpenAI({ apiKey: apiToken });
261
+ }
262
+ toStandard(history) {
263
+ for (let i = 0; i < history.length; i++) {
264
+ const h = history[i];
265
+ if (h.role === "assistant" && h.tool_calls) {
266
+ const tools = h.tool_calls.map((tc) => ({
267
+ role: "tool",
268
+ id: tc.id,
269
+ name: tc.function.name,
270
+ args: utils.JSONAttemptParse(tc.function.arguments, {})
271
+ }));
272
+ history.splice(i, 1, ...tools);
273
+ i += tools.length - 1;
274
+ } else if (h.role === "tool" && h.content) {
275
+ const record = history.find((h2) => h.tool_call_id == h2.id);
276
+ if (record) {
277
+ if (h.content.includes('"error":')) record.error = h.content;
278
+ else record.content = h.content;
279
+ }
280
+ history.splice(i, 1);
281
+ i--;
282
+ }
283
+ }
284
+ return history;
285
+ }
286
+ fromStandard(history) {
287
+ return history.reduce((result, h) => {
288
+ if (h.role === "tool") {
289
+ result.push({
290
+ role: "assistant",
291
+ content: null,
292
+ tool_calls: [{ id: h.id, type: "function", function: { name: h.name, arguments: JSON.stringify(h.args) } }],
293
+ refusal: null,
294
+ annotations: []
295
+ }, {
296
+ role: "tool",
297
+ tool_call_id: h.id,
298
+ content: h.error || h.content
299
+ });
300
+ } else {
301
+ result.push(h);
302
+ }
303
+ return result;
304
+ }, []);
305
+ }
306
+ ask(message, options = {}) {
307
+ const controller = new AbortController();
308
+ const response = new Promise(async (res, rej) => {
309
+ var _a, _b, _c, _d;
310
+ let history = this.fromStandard([...options.history || [], { role: "user", content: message }]);
311
+ if (options.compress) history = await this.ai.llm.compress(history, options.compress.max, options.compress.min, options);
312
+ const requestParams = {
313
+ model: options.model || this.model,
314
+ messages: history,
315
+ stream: !!options.stream,
316
+ max_tokens: options.max_tokens || this.ai.options.max_tokens || 4096,
317
+ temperature: options.temperature || this.ai.options.temperature || 0.7,
318
+ tools: (options.tools || this.ai.options.tools || []).map((t) => ({
319
+ type: "function",
320
+ function: {
321
+ name: t.name,
322
+ description: t.description,
323
+ parameters: {
324
+ type: "object",
325
+ properties: utils.objectMap(t.args, (key, value) => ({ ...value, required: void 0 })),
326
+ required: Object.entries(t.args || {}).filter((t2) => t2[1].required).map((t2) => t2[0])
327
+ }
328
+ }
329
+ }))
330
+ };
331
+ let resp;
332
+ do {
333
+ resp = await this.client.chat.completions.create(requestParams);
334
+ if (options.stream) {
335
+ resp.choices = [];
336
+ for await (const chunk of resp) {
337
+ if (controller.signal.aborted) break;
338
+ if (chunk.choices[0].delta.content) {
339
+ options.stream({ text: chunk.choices[0].delta.content });
340
+ }
341
+ }
342
+ }
343
+ const toolCalls = resp.choices[0].message.tool_calls || [];
344
+ if (toolCalls.length && !controller.signal.aborted) {
345
+ history.push(resp.choices[0].message);
346
+ const results = await Promise.all(toolCalls.map(async (toolCall) => {
347
+ var _a2;
348
+ const tool = (_a2 = options.tools) == null ? void 0 : _a2.find(utils.findByProp("name", toolCall.function.name));
349
+ if (!tool) return { role: "tool", tool_call_id: toolCall.id, content: '{"error": "Tool not found"}' };
350
+ try {
351
+ const args = utils.JSONAttemptParse(toolCall.function.arguments, {});
352
+ const result = await tool.fn(args, this.ai);
353
+ return { role: "tool", tool_call_id: toolCall.id, content: utils.JSONSanitize(result) };
354
+ } catch (err) {
355
+ return { role: "tool", tool_call_id: toolCall.id, content: utils.JSONSanitize({ error: (err == null ? void 0 : err.message) || (err == null ? void 0 : err.toString()) || "Unknown" }) };
356
+ }
357
+ }));
358
+ history.push(...results);
359
+ requestParams.messages = history;
360
+ }
361
+ } while (!controller.signal.aborted && ((_d = (_c = (_b = (_a = resp.choices) == null ? void 0 : _a[0]) == null ? void 0 : _b.message) == null ? void 0 : _c.tool_calls) == null ? void 0 : _d.length));
362
+ if (options.stream) options.stream({ done: true });
363
+ res(this.toStandard([...history, { role: "assistant", content: resp.choices[0].message.content || "" }]));
364
+ });
365
+ return Object.assign(response, { abort: () => controller.abort() });
366
+ }
367
+ }
368
+ class LLM {
369
+ constructor(ai, options) {
370
+ __publicField(this, "providers", {});
371
+ var _a, _b, _c;
372
+ this.ai = ai;
373
+ this.options = options;
374
+ if ((_a = options.anthropic) == null ? void 0 : _a.token) this.providers.anthropic = new Anthropic(this.ai, options.anthropic.token, options.anthropic.model);
375
+ if ((_b = options.ollama) == null ? void 0 : _b.host) this.providers.ollama = new Ollama(this.ai, options.ollama.host, options.ollama.model);
376
+ if ((_c = options.openAi) == null ? void 0 : _c.token) this.providers.openAi = new OpenAi(this.ai, options.openAi.token, options.openAi.model);
377
+ }
378
+ /**
379
+ * Chat with LLM
380
+ * @param {string} message Question
381
+ * @param {LLMRequest} options Configuration options and chat history
382
+ * @returns {{abort: () => void, response: Promise<LLMMessage[]>}} Function to abort response and chat history
383
+ */
384
+ ask(message, options = {}) {
385
+ var _a, _b;
386
+ const model = [((_a = options.model) == null ? void 0 : _a[0]) || this.options.model[0], ((_b = options.model) == null ? void 0 : _b[1]) || this.options.model[1]];
387
+ if (!this.providers[model[0]]) throw new Error(`Unknown LLM provider: ${model[0]}`);
388
+ return this.providers[model[0]].ask(message, { ...options, model: model[1] });
389
+ }
390
+ /**
391
+ * Compress chat history to reduce context size
392
+ * @param {LLMMessage[]} history Chatlog that will be compressed
393
+ * @param max Trigger compression once context is larger than max
394
+ * @param min Summarize until context size is less than min
395
+ * @param {LLMRequest} options LLM options
396
+ * @returns {Promise<LLMMessage[]>} New chat history will summary at index 0
397
+ */
398
+ async compress(history, max, min, options) {
399
+ if (this.estimateTokens(history) < max) return history;
400
+ let keep = 0, tokens = 0;
401
+ for (let m of history.toReversed()) {
402
+ tokens += this.estimateTokens(m.content);
403
+ if (tokens < min) keep++;
404
+ else break;
405
+ }
406
+ if (history.length <= keep) return history;
407
+ const recent = keep == 0 ? [] : history.slice(-keep), process = (keep == 0 ? history : history.slice(0, -keep)).filter((h) => h.role === "assistant" || h.role === "user");
408
+ const summary = await this.summarize(process.map((m) => `${m.role}: ${m.content}`).join("\n\n"), 250, options);
409
+ return [{ role: "assistant", content: `Conversation Summary: ${summary}` }, ...recent];
410
+ }
411
+ /**
412
+ * Estimate variable as tokens
413
+ * @param history Object to size
414
+ * @returns {number} Rough token count
415
+ */
416
+ estimateTokens(history) {
417
+ const text = JSON.stringify(history);
418
+ return Math.ceil(text.length / 4 * 1.2);
419
+ }
420
+ async json(message, options) {
421
+ var _a;
422
+ let resp = await this.ask(message, {
423
+ system: "",
424
+ ...options
425
+ });
426
+ if (!((_a = resp == null ? void 0 : resp[0]) == null ? void 0 : _a.content)) return {};
427
+ return utils.JSONAttemptParse(new RegExp("{[sS]*}").exec(resp[0].content), {});
428
+ }
429
+ /**
430
+ * Create a summary of some text
431
+ * @param {string} text Text to summarize
432
+ * @param {number} tokens Max number of tokens
433
+ * @param options LLM request options
434
+ * @returns {Promise<string>} Summary
435
+ */
436
+ summarize(text, tokens, options) {
437
+ return this.ask(text, { system: `Generate a brief summary <= ${tokens} tokens. Output nothing else`, temperature: 0.3, ...options }).then((history) => {
438
+ var _a;
439
+ return ((_a = history.pop()) == null ? void 0 : _a.content) || null;
440
+ });
441
+ }
442
+ }
443
+ class Ai {
444
+ constructor(options) {
445
+ __publicField(this, "downloads", {});
446
+ __publicField(this, "whisperModel");
447
+ /** Large Language Models */
448
+ __publicField(this, "llm");
449
+ var _a;
450
+ this.options = options;
451
+ this.llm = new LLM(this, options);
452
+ if ((_a = this.options.whisper) == null ? void 0 : _a.binary) this.downloadAsrModel(this.options.whisper.model);
453
+ }
454
+ /**
455
+ * Convert audio to text using Auditory Speech Recognition
456
+ * @param {string} path Path to audio
457
+ * @param model Whisper model
458
+ * @returns {Promise<any>} Extracted text
459
+ */
460
+ async asr(path, model) {
461
+ var _a;
462
+ if (!((_a = this.options.whisper) == null ? void 0 : _a.binary)) throw new Error("Whisper not configured");
463
+ if (!model) model = this.options.whisper.model;
464
+ await this.downloadAsrModel(model);
465
+ const name = Math.random().toString(36).substring(2, 10) + "-" + path.split("/").pop();
466
+ const output = Path.join(this.options.whisper.path || "/tmp", name);
467
+ await nodeUtils.$`rm -f /tmp/${name}.txt && ${this.options.whisper.binary} -nt -np -m ${this.whisperModel} -f ${path} -otxt -of ${output}`;
468
+ return fs.readFile(output, "utf-8").then((text) => (text == null ? void 0 : text.trim()) || null).finally(() => fs.rm(output, { force: true }).catch(() => {
469
+ }));
470
+ }
471
+ /**
472
+ * Downloads the specified Whisper model if it is not already present locally.
473
+ *
474
+ * @param {string} model Whisper model that will be downloaded
475
+ * @return {Promise<void>} A promise that resolves once the model is downloaded and saved locally.
476
+ */
477
+ async downloadAsrModel(model) {
478
+ var _a, _b, _c, _d;
479
+ if (!((_a = this.options.whisper) == null ? void 0 : _a.binary)) throw new Error("Whisper not configured");
480
+ this.whisperModel = Path.join((_b = this.options.whisper) == null ? void 0 : _b.path, ((_c = this.options.whisper) == null ? void 0 : _c.model) + ".bin");
481
+ if (await fs.stat(this.whisperModel).then(() => true).catch(() => false)) return;
482
+ if (!!this.downloads[model]) return this.downloads[model];
483
+ this.downloads[model] = fetch(`https://huggingface.co/ggerganov/whisper.cpp/resolve/main/${(_d = this.options.whisper) == null ? void 0 : _d.model}.bin`).then((resp) => resp.arrayBuffer()).then((arr) => Buffer.from(arr)).then(async (buffer) => {
484
+ await fs.writeFile(this.whisperModel, buffer);
485
+ delete this.downloads[model];
486
+ });
487
+ return this.downloads[model];
488
+ }
489
+ /**
490
+ * Convert image to text using Optical Character Recognition
491
+ * @param {string} path Path to image
492
+ * @returns {{abort: Function, response: Promise<string | null>}} Abort function & Promise of extracted text
493
+ */
494
+ ocr(path) {
495
+ let worker;
496
+ return {
497
+ abort: () => {
498
+ worker == null ? void 0 : worker.terminate();
499
+ },
500
+ response: new Promise(async (res) => {
501
+ worker = await tesseract_js.createWorker("eng");
502
+ const { data } = await worker.recognize(path);
503
+ await worker.terminate();
504
+ res(data.text.trim() || null);
505
+ })
506
+ };
507
+ }
508
+ /**
509
+ * Compare the difference between two strings using tensor math
510
+ * @param target Text that will checked
511
+ * @param {string} searchTerms Multiple search terms to check against target
512
+ * @returns {{avg: number, max: number, similarities: number[]}} Similarity values 0-1: 0 = unique, 1 = identical
513
+ */
514
+ semanticSimilarity(target, ...searchTerms) {
515
+ if (searchTerms.length < 2) throw new Error("Requires at least 2 strings to compare");
516
+ const vector = (text, dimensions = 10) => {
517
+ return text.toLowerCase().split("").map((char, index) => char.charCodeAt(0) * (index + 1) % dimensions / dimensions).slice(0, dimensions);
518
+ };
519
+ const cosineSimilarity = (v1, v2) => {
520
+ if (v1.length !== v2.length) throw new Error("Vectors must be same length");
521
+ const tensor1 = tf__namespace.tensor1d(v1), tensor2 = tf__namespace.tensor1d(v2);
522
+ const dotProduct = tf__namespace.dot(tensor1, tensor2);
523
+ const magnitude1 = tf__namespace.norm(tensor1);
524
+ const magnitude2 = tf__namespace.norm(tensor2);
525
+ if (magnitude1.dataSync()[0] === 0 || magnitude2.dataSync()[0] === 0) return 0;
526
+ return dotProduct.dataSync()[0] / (magnitude1.dataSync()[0] * magnitude2.dataSync()[0]);
527
+ };
528
+ const v = vector(target);
529
+ const similarities = searchTerms.map((t) => vector(t)).map((refVector) => cosineSimilarity(v, refVector));
530
+ return { avg: similarities.reduce((acc, s) => acc + s, 0) / similarities.length, max: Math.max(...similarities), similarities };
531
+ }
532
+ }
533
+ const CliTool = {
534
+ name: "cli",
535
+ description: "Use the command line interface, returns any output",
536
+ args: { command: { type: "string", description: "Command to run", required: true } },
537
+ fn: (args) => nodeUtils.$`${args.command}`
538
+ };
539
+ const DateTimeTool = {
540
+ name: "get_datetime",
541
+ description: "Get current date and time",
542
+ fn: async () => (/* @__PURE__ */ new Date()).toISOString()
543
+ };
544
+ const ExecTool = {
545
+ name: "exec",
546
+ description: "Run code/scripts",
547
+ args: {
548
+ language: { type: "string", description: "Execution language", enum: ["cli", "node", "python"], required: true },
549
+ code: { type: "string", description: "Code to execute", required: true }
550
+ },
551
+ fn: async (args, ai) => {
552
+ try {
553
+ switch (args.type) {
554
+ case "bash":
555
+ return await CliTool.fn({ command: args.code }, ai);
556
+ case "node":
557
+ return await JSTool.fn({ code: args.code }, ai);
558
+ case "python": {
559
+ return await PythonTool.fn({ code: args.code }, ai);
560
+ }
561
+ }
562
+ } catch (err) {
563
+ return { error: (err == null ? void 0 : err.message) || err.toString() };
564
+ }
565
+ }
566
+ };
567
+ const FetchTool = {
568
+ name: "fetch",
569
+ description: "Make HTTP request to URL",
570
+ args: {
571
+ url: { type: "string", description: "URL to fetch", required: true },
572
+ method: { type: "string", description: "HTTP method to use", enum: ["GET", "POST", "PUT", "DELETE"], default: "GET" },
573
+ headers: { type: "object", description: "HTTP headers to send", default: {} },
574
+ body: { type: "object", description: "HTTP body to send" }
575
+ },
576
+ fn: (args) => new utils.Http({ url: args.url, headers: args.headers }).request({ method: args.method || "GET", body: args.body })
577
+ };
578
+ const JSTool = {
579
+ name: "exec_javascript",
580
+ description: "Execute commonjs javascript",
581
+ args: {
582
+ code: { type: "string", description: "CommonJS javascript", required: true }
583
+ },
584
+ fn: async (args) => {
585
+ const console = utils.consoleInterceptor(null);
586
+ const resp = await utils.fn({ console }, args.code, true).catch((err) => console.output.error.push(err));
587
+ return { ...console.output, return: resp, stdout: void 0, stderr: void 0 };
588
+ }
589
+ };
590
+ const PythonTool = {
591
+ name: "exec_javascript",
592
+ description: "Execute commonjs javascript",
593
+ args: {
594
+ code: { type: "string", description: "CommonJS javascript", required: true }
595
+ },
596
+ fn: async (args) => ({ result: nodeUtils.$Sync`python -c "${args.code}"` })
597
+ };
598
+ const SearchTool = {
599
+ name: "search",
600
+ description: "Use a search engine to find relevant URLs, should be changed with fetch to scrape sources",
601
+ args: {
602
+ query: { type: "string", description: "Search string", required: true },
603
+ length: { type: "string", description: "Number of results to return", default: 5 }
604
+ },
605
+ fn: async (args) => {
606
+ var _a;
607
+ const html = await fetch(`https://html.duckduckgo.com/html/?q=${encodeURIComponent(args.query)}`, {
608
+ headers: { "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64)", "Accept-Language": "en-US,en;q=0.9" }
609
+ }).then((resp) => resp.text());
610
+ let match, regex = /<a .*?href="(.+?)".+?<\/a>/g;
611
+ const results = new utils.ASet();
612
+ while ((match = regex.exec(html)) !== null) {
613
+ let url = (_a = /uddg=(.+)&amp?/.exec(decodeURIComponent(match[1]))) == null ? void 0 : _a[1];
614
+ if (url) url = decodeURIComponent(url);
615
+ if (url) results.add(url);
616
+ if (results.size >= (args.length || 5)) break;
617
+ }
618
+ return results;
619
+ }
620
+ };
621
+ exports2.Ai = Ai;
622
+ exports2.Anthropic = Anthropic;
623
+ exports2.CliTool = CliTool;
624
+ exports2.DateTimeTool = DateTimeTool;
625
+ exports2.ExecTool = ExecTool;
626
+ exports2.FetchTool = FetchTool;
627
+ exports2.JSTool = JSTool;
628
+ exports2.LLM = LLM;
629
+ exports2.PythonTool = PythonTool;
630
+ exports2.SearchTool = SearchTool;
631
+ Object.defineProperty(exports2, Symbol.toStringTag, { value: "Module" });
632
+ });
633
+ //# sourceMappingURL=index.js.map