@ztimson/ai-utils 0.1.13 → 0.1.15

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/ai.d.ts CHANGED
@@ -7,8 +7,6 @@ export type AiOptions = LLMOptions & {
7
7
  model: string;
8
8
  /** Path to models */
9
9
  path: string;
10
- /** Path to storage location for temporary files */
11
- temp?: string;
12
10
  };
13
11
  };
14
12
  export declare class Ai {
@@ -24,7 +22,10 @@ export declare class Ai {
24
22
  * @param model Whisper model
25
23
  * @returns {Promise<any>} Extracted text
26
24
  */
27
- asr(path: string, model?: string): Promise<string | null>;
25
+ asr(path: string, model?: string): {
26
+ abort: () => void;
27
+ response: Promise<string | null>;
28
+ };
28
29
  /**
29
30
  * Downloads the specified Whisper model if it is not already present locally.
30
31
  *
package/dist/index.js CHANGED
@@ -1,8 +1,8 @@
1
- (function(u,w){typeof exports=="object"&&typeof module<"u"?w(exports,require("@ztimson/node-utils"),require("tesseract.js"),require("@ztimson/utils"),require("@anthropic-ai/sdk"),require("ollama"),require("openai"),require("node:fs/promises"),require("node:path"),require("@tensorflow/tfjs")):typeof define=="function"&&define.amd?define(["exports","@ztimson/node-utils","tesseract.js","@ztimson/utils","@anthropic-ai/sdk","ollama","openai","node:fs/promises","node:path","@tensorflow/tfjs"],w):(u=typeof globalThis<"u"?globalThis:u||self,w(u.utils={},u.nodeUtils,u.tesseract_js,u.utils,u.sdk,u.ollama,u.openai,u.fs,u.Path,u.tf))})(this,(function(u,w,P,p,v,A,M,b,k,$){"use strict";function E(l){const t=Object.create(null,{[Symbol.toStringTag]:{value:"Module"}});if(l){for(const e in l)if(e!=="default"){const n=Object.getOwnPropertyDescriptor(l,e);Object.defineProperty(t,e,n.get?n:{enumerable:!0,get:()=>l[e]})}}return t.default=l,Object.freeze(t)}const _=E($);class S{}class j extends S{constructor(t,e,n){super(),this.ai=t,this.apiToken=e,this.model=n,this.client=new v.Anthropic({apiKey:e})}client;toStandard(t){for(let e=0;e<t.length;e++){const n=e;typeof t[n].content!="string"&&(t[n].role=="assistant"?t[n].content.filter(s=>s.type=="tool_use").forEach(s=>{e++,t.splice(e,0,{role:"tool",id:s.id,name:s.name,args:s.input})}):t[n].role=="user"&&t[n].content.filter(s=>s.type=="tool_result").forEach(s=>{const i=t.find(f=>f.id==s.tool_use_id);i[s.is_error?"error":"content"]=s.content}),t[n].content=t[n].content.filter(s=>s.type=="text").map(s=>s.text).join(`
1
+ (function(u,_){typeof exports=="object"&&typeof module<"u"?_(exports,require("tesseract.js"),require("@ztimson/utils"),require("@anthropic-ai/sdk"),require("ollama"),require("openai"),require("node:fs/promises"),require("node:path"),require("@tensorflow/tfjs"),require("node:child_process"),require("@ztimson/node-utils")):typeof define=="function"&&define.amd?define(["exports","tesseract.js","@ztimson/utils","@anthropic-ai/sdk","ollama","openai","node:fs/promises","node:path","@tensorflow/tfjs","node:child_process","@ztimson/node-utils"],_):(u=typeof globalThis<"u"?globalThis:u||self,_(u.utils={},u.tesseract_js,u.utils,u.sdk,u.ollama,u.openai,u.fs,u.Path,u.tf,u.node_child_process,u.nodeUtils))})(this,(function(u,_,p,O,v,A,S,E,M,J,k){"use strict";function N(m){const t=Object.create(null,{[Symbol.toStringTag]:{value:"Module"}});if(m){for(const e in m)if(e!=="default"){const n=Object.getOwnPropertyDescriptor(m,e);Object.defineProperty(t,e,n.get?n:{enumerable:!0,get:()=>m[e]})}}return t.default=m,Object.freeze(t)}const y=N(M);class b{}class j extends b{constructor(t,e,n){super(),this.ai=t,this.apiToken=e,this.model=n,this.client=new O.Anthropic({apiKey:e})}client;toStandard(t){for(let e=0;e<t.length;e++){const n=e;typeof t[n].content!="string"&&(t[n].role=="assistant"?t[n].content.filter(s=>s.type=="tool_use").forEach(s=>{e++,t.splice(e,0,{role:"tool",id:s.id,name:s.name,args:s.input})}):t[n].role=="user"&&t[n].content.filter(s=>s.type=="tool_result").forEach(s=>{const l=t.find(f=>f.id==s.tool_use_id);l[s.is_error?"error":"content"]=s.content}),t[n].content=t[n].content.filter(s=>s.type=="text").map(s=>s.text).join(`
2
2
 
3
- `))}return t.filter(e=>!!e.content)}fromStandard(t){for(let e=0;e<t.length;e++)if(t[e].role=="tool"){const n=t[e];t.splice(e,1,{role:"assistant",content:[{type:"tool_use",id:n.id,name:n.name,input:n.args}]},{role:"user",content:[{type:"tool_result",tool_use_id:n.id,is_error:!!n.error,content:n.error||n.content}]}),e++}return t}ask(t,e={}){const n=new AbortController,s=new Promise(async(i,f)=>{let c=this.fromStandard([...e.history||[],{role:"user",content:t}]);e.compress&&(c=await this.ai.llm.compress(c,e.compress.max,e.compress.min,e));const d={model:e.model||this.model,max_tokens:e.max_tokens||this.ai.options.max_tokens||4096,system:e.system||this.ai.options.system||"",temperature:e.temperature||this.ai.options.temperature||.7,tools:(e.tools||this.ai.options.tools||[]).map(o=>({name:o.name,description:o.description,input_schema:{type:"object",properties:o.args?p.objectMap(o.args,(r,a)=>({...a,required:void 0})):{},required:o.args?Object.entries(o.args).filter(r=>r[1].required).map(r=>r[0]):[]},fn:void 0})),messages:c,stream:!!e.stream};let m;do{if(m=await this.client.messages.create(d),e.stream){m.content=[];for await(const r of m){if(n.signal.aborted)break;if(r.type==="content_block_start")r.content_block.type==="text"?m.content.push({type:"text",text:""}):r.content_block.type==="tool_use"&&m.content.push({type:"tool_use",id:r.content_block.id,name:r.content_block.name,input:""});else if(r.type==="content_block_delta")if(r.delta.type==="text_delta"){const a=r.delta.text;m.content.at(-1).text+=a,e.stream({text:a})}else r.delta.type==="input_json_delta"&&(m.content.at(-1).input+=r.delta.partial_json);else if(r.type==="content_block_stop"){const a=m.content.at(-1);a.input!=null&&(a.input=a.input?p.JSONAttemptParse(a.input,{}):{})}else if(r.type==="message_stop")break}}const o=m.content.filter(r=>r.type==="tool_use");if(o.length&&!n.signal.aborted){c.push({role:"assistant",content:m.content});const r=await Promise.all(o.map(async a=>{const h=e.tools?.find(p.findByProp("name",a.name));if(!h)return{tool_use_id:a.id,is_error:!0,content:"Tool not found"};try{const g=await h.fn(a.input,this.ai);return{type:"tool_result",tool_use_id:a.id,content:p.JSONSanitize(g)}}catch(g){return{type:"tool_result",tool_use_id:a.id,is_error:!0,content:g?.message||g?.toString()||"Unknown"}}}));c.push({role:"user",content:r}),d.messages=c}}while(!n.signal.aborted&&m.content.some(o=>o.type==="tool_use"));e.stream&&e.stream({done:!0}),i(this.toStandard([...c,{role:"assistant",content:m.content.filter(o=>o.type=="text").map(o=>o.text).join(`
3
+ `))}return t.filter(e=>!!e.content)}fromStandard(t){for(let e=0;e<t.length;e++)if(t[e].role=="tool"){const n=t[e];t.splice(e,1,{role:"assistant",content:[{type:"tool_use",id:n.id,name:n.name,input:n.args}]},{role:"user",content:[{type:"tool_result",tool_use_id:n.id,is_error:!!n.error,content:n.error||n.content}]}),e++}return t}ask(t,e={}){const n=new AbortController,s=new Promise(async(l,f)=>{let i=this.fromStandard([...e.history||[],{role:"user",content:t}]);e.compress&&(i=await this.ai.llm.compress(i,e.compress.max,e.compress.min,e));const d={model:e.model||this.model,max_tokens:e.max_tokens||this.ai.options.max_tokens||4096,system:e.system||this.ai.options.system||"",temperature:e.temperature||this.ai.options.temperature||.7,tools:(e.tools||this.ai.options.tools||[]).map(o=>({name:o.name,description:o.description,input_schema:{type:"object",properties:o.args?p.objectMap(o.args,(r,a)=>({...a,required:void 0})):{},required:o.args?Object.entries(o.args).filter(r=>r[1].required).map(r=>r[0]):[]},fn:void 0})),messages:i,stream:!!e.stream};let c;do{if(c=await this.client.messages.create(d),e.stream){c.content=[];for await(const r of c){if(n.signal.aborted)break;if(r.type==="content_block_start")r.content_block.type==="text"?c.content.push({type:"text",text:""}):r.content_block.type==="tool_use"&&c.content.push({type:"tool_use",id:r.content_block.id,name:r.content_block.name,input:""});else if(r.type==="content_block_delta")if(r.delta.type==="text_delta"){const a=r.delta.text;c.content.at(-1).text+=a,e.stream({text:a})}else r.delta.type==="input_json_delta"&&(c.content.at(-1).input+=r.delta.partial_json);else if(r.type==="content_block_stop"){const a=c.content.at(-1);a.input!=null&&(a.input=a.input?p.JSONAttemptParse(a.input,{}):{})}else if(r.type==="message_stop")break}}const o=c.content.filter(r=>r.type==="tool_use");if(o.length&&!n.signal.aborted){i.push({role:"assistant",content:c.content});const r=await Promise.all(o.map(async a=>{const h=e.tools?.find(p.findByProp("name",a.name));if(!h)return{tool_use_id:a.id,is_error:!0,content:"Tool not found"};try{const g=await h.fn(a.input,this.ai);return{type:"tool_result",tool_use_id:a.id,content:p.JSONSanitize(g)}}catch(g){return{type:"tool_result",tool_use_id:a.id,is_error:!0,content:g?.message||g?.toString()||"Unknown"}}}));i.push({role:"user",content:r}),d.messages=i}}while(!n.signal.aborted&&c.content.some(o=>o.type==="tool_use"));e.stream&&e.stream({done:!0}),l(this.toStandard([...i,{role:"assistant",content:c.content.filter(o=>o.type=="text").map(o=>o.text).join(`
4
4
 
5
- `)}]))});return Object.assign(s,{abort:()=>n.abort()})}}class J extends S{constructor(t,e,n){super(),this.ai=t,this.host=e,this.model=n,this.client=new A.Ollama({host:e})}client;toStandard(t){for(let e=0;e<t.length;e++)if(t[e].role=="assistant"&&t[e].tool_calls)t[e].content?delete t[e].tool_calls:(t.splice(e,1),e--);else if(t[e].role=="tool"){const n=t[e].content.startsWith('{"error":');t[e]={role:"tool",name:t[e].tool_name,args:t[e].args,[n?"error":"content"]:t[e].content}}return t}fromStandard(t){return t.map(e=>e.role!="tool"?e:{role:"tool",tool_name:e.name,content:e.error||e.content})}ask(t,e={}){const n=new AbortController,s=new Promise(async(i,f)=>{let c=e.system||this.ai.options.system,d=this.fromStandard([...e.history||[],{role:"user",content:t}]);d[0].roll=="system"&&(c?d.shift():c=d.shift()),e.compress&&(d=await this.ai.llm.compress(d,e.compress.max,e.compress.min)),e.system&&d.unshift({role:"system",content:c});const m={model:e.model||this.model,messages:d,stream:!!e.stream,signal:n.signal,options:{temperature:e.temperature||this.ai.options.temperature||.7,num_predict:e.max_tokens||this.ai.options.max_tokens||4096},tools:(e.tools||this.ai.options.tools||[]).map(r=>({type:"function",function:{name:r.name,description:r.description,parameters:{type:"object",properties:r.args?p.objectMap(r.args,(a,h)=>({...h,required:void 0})):{},required:r.args?Object.entries(r.args).filter(a=>a[1].required).map(a=>a[0]):[]}}}))};let o;do{if(o=await this.client.chat(m),e.stream){o.message={role:"assistant",content:"",tool_calls:[]};for await(const r of o)if(n.signal.aborted||(r.message?.content&&(o.message.content+=r.message.content,e.stream({text:r.message.content})),r.message?.tool_calls&&(o.message.tool_calls=r.message.tool_calls),r.done))break}if(o.message?.tool_calls?.length&&!n.signal.aborted){d.push(o.message);const r=await Promise.all(o.message.tool_calls.map(async a=>{const h=(e.tools||this.ai.options.tools)?.find(p.findByProp("name",a.function.name));if(!h)return{role:"tool",tool_name:a.function.name,content:'{"error": "Tool not found"}'};const g=typeof a.function.arguments=="string"?p.JSONAttemptParse(a.function.arguments,{}):a.function.arguments;try{const y=await h.fn(g,this.ai);return{role:"tool",tool_name:a.function.name,args:g,content:p.JSONSanitize(y)}}catch(y){return{role:"tool",tool_name:a.function.name,args:g,content:p.JSONSanitize({error:y?.message||y?.toString()||"Unknown"})}}}));d.push(...r),m.messages=d}}while(!n.signal.aborted&&o.message?.tool_calls?.length);e.stream&&e.stream({done:!0}),i(this.toStandard([...d,{role:"assistant",content:o.message?.content}]))});return Object.assign(s,{abort:()=>n.abort()})}}class N extends S{constructor(t,e,n){super(),this.ai=t,this.apiToken=e,this.model=n,this.client=new M.OpenAI({apiKey:e})}client;toStandard(t){for(let e=0;e<t.length;e++){const n=t[e];if(n.role==="assistant"&&n.tool_calls){const s=n.tool_calls.map(i=>({role:"tool",id:i.id,name:i.function.name,args:p.JSONAttemptParse(i.function.arguments,{})}));t.splice(e,1,...s),e+=s.length-1}else if(n.role==="tool"&&n.content){const s=t.find(i=>n.tool_call_id==i.id);s&&(n.content.includes('"error":')?s.error=n.content:s.content=n.content),t.splice(e,1),e--}}return t}fromStandard(t){return t.reduce((e,n)=>(n.role==="tool"?e.push({role:"assistant",content:null,tool_calls:[{id:n.id,type:"function",function:{name:n.name,arguments:JSON.stringify(n.args)}}],refusal:null,annotations:[]},{role:"tool",tool_call_id:n.id,content:n.error||n.content}):e.push(n),e),[])}ask(t,e={}){const n=new AbortController,s=new Promise(async(i,f)=>{let c=this.fromStandard([...e.history||[],{role:"user",content:t}]);e.compress&&(c=await this.ai.llm.compress(c,e.compress.max,e.compress.min,e));const d={model:e.model||this.model,messages:c,stream:!!e.stream,max_tokens:e.max_tokens||this.ai.options.max_tokens||4096,temperature:e.temperature||this.ai.options.temperature||.7,tools:(e.tools||this.ai.options.tools||[]).map(o=>({type:"function",function:{name:o.name,description:o.description,parameters:{type:"object",properties:o.args?p.objectMap(o.args,(r,a)=>({...a,required:void 0})):{},required:o.args?Object.entries(o.args).filter(r=>r[1].required).map(r=>r[0]):[]}}}))};let m;do{if(m=await this.client.chat.completions.create(d),e.stream){m.choices=[];for await(const r of m){if(n.signal.aborted)break;r.choices[0].delta.content&&e.stream({text:r.choices[0].delta.content})}}const o=m.choices[0].message.tool_calls||[];if(o.length&&!n.signal.aborted){c.push(m.choices[0].message);const r=await Promise.all(o.map(async a=>{const h=e.tools?.find(p.findByProp("name",a.function.name));if(!h)return{role:"tool",tool_call_id:a.id,content:'{"error": "Tool not found"}'};try{const g=p.JSONAttemptParse(a.function.arguments,{}),y=await h.fn(g,this.ai);return{role:"tool",tool_call_id:a.id,content:p.JSONSanitize(y)}}catch(g){return{role:"tool",tool_call_id:a.id,content:p.JSONSanitize({error:g?.message||g?.toString()||"Unknown"})}}}));c.push(...r),d.messages=c}}while(!n.signal.aborted&&m.choices?.[0]?.message?.tool_calls?.length);e.stream&&e.stream({done:!0}),i(this.toStandard([...c,{role:"assistant",content:m.choices[0].message.content||""}]))});return Object.assign(s,{abort:()=>n.abort()})}}class x{constructor(t,e){this.ai=t,this.options=e,e.anthropic?.token&&(this.providers.anthropic=new j(this.ai,e.anthropic.token,e.anthropic.model)),e.ollama?.host&&(this.providers.ollama=new J(this.ai,e.ollama.host,e.ollama.model)),e.openAi?.token&&(this.providers.openAi=new N(this.ai,e.openAi.token,e.openAi.model))}providers={};ask(t,e={}){let n=[null,null];if(e.model&&(typeof e.model=="object"?n=e.model:n=[e.model,this.options[e.model]?.model]),(!e.model||n[1]==null)&&(typeof this.options.model=="object"?n=this.options.model:n=[this.options.model,this.options[this.options.model]?.model]),!n[0]||!n[1])throw new Error(`Unknown LLM provider or model: ${n[0]} / ${n[1]}`);return this.providers[n[0]].ask(t,{...e,model:n[1]})}async compress(t,e,n,s){if(this.estimateTokens(t)<e)return t;let i=0,f=0;for(let o of t.toReversed())if(f+=this.estimateTokens(o.content),f<n)i++;else break;if(t.length<=i)return t;const c=i==0?[]:t.slice(-i),d=(i==0?t:t.slice(0,-i)).filter(o=>o.role==="assistant"||o.role==="user");return[{role:"assistant",content:`Conversation Summary: ${await this.summarize(d.map(o=>`${o.role}: ${o.content}`).join(`
5
+ `)}]))});return Object.assign(s,{abort:()=>n.abort()})}}class z extends b{constructor(t,e,n){super(),this.ai=t,this.host=e,this.model=n,this.client=new v.Ollama({host:e})}client;toStandard(t){for(let e=0;e<t.length;e++)if(t[e].role=="assistant"&&t[e].tool_calls)t[e].content?delete t[e].tool_calls:(t.splice(e,1),e--);else if(t[e].role=="tool"){const n=t[e].content.startsWith('{"error":');t[e]={role:"tool",name:t[e].tool_name,args:t[e].args,[n?"error":"content"]:t[e].content}}return t}fromStandard(t){return t.map(e=>e.role!="tool"?e:{role:"tool",tool_name:e.name,content:e.error||e.content})}ask(t,e={}){const n=new AbortController,s=new Promise(async(l,f)=>{let i=e.system||this.ai.options.system,d=this.fromStandard([...e.history||[],{role:"user",content:t}]);d[0].roll=="system"&&(i?d.shift():i=d.shift()),e.compress&&(d=await this.ai.llm.compress(d,e.compress.max,e.compress.min)),e.system&&d.unshift({role:"system",content:i});const c={model:e.model||this.model,messages:d,stream:!!e.stream,signal:n.signal,options:{temperature:e.temperature||this.ai.options.temperature||.7,num_predict:e.max_tokens||this.ai.options.max_tokens||4096},tools:(e.tools||this.ai.options.tools||[]).map(r=>({type:"function",function:{name:r.name,description:r.description,parameters:{type:"object",properties:r.args?p.objectMap(r.args,(a,h)=>({...h,required:void 0})):{},required:r.args?Object.entries(r.args).filter(a=>a[1].required).map(a=>a[0]):[]}}}))};let o;do{if(o=await this.client.chat(c),e.stream){o.message={role:"assistant",content:"",tool_calls:[]};for await(const r of o)if(n.signal.aborted||(r.message?.content&&(o.message.content+=r.message.content,e.stream({text:r.message.content})),r.message?.tool_calls&&(o.message.tool_calls=r.message.tool_calls),r.done))break}if(o.message?.tool_calls?.length&&!n.signal.aborted){d.push(o.message);const r=await Promise.all(o.message.tool_calls.map(async a=>{const h=(e.tools||this.ai.options.tools)?.find(p.findByProp("name",a.function.name));if(!h)return{role:"tool",tool_name:a.function.name,content:'{"error": "Tool not found"}'};const g=typeof a.function.arguments=="string"?p.JSONAttemptParse(a.function.arguments,{}):a.function.arguments;try{const w=await h.fn(g,this.ai);return{role:"tool",tool_name:a.function.name,args:g,content:p.JSONSanitize(w)}}catch(w){return{role:"tool",tool_name:a.function.name,args:g,content:p.JSONSanitize({error:w?.message||w?.toString()||"Unknown"})}}}));d.push(...r),c.messages=d}}while(!n.signal.aborted&&o.message?.tool_calls?.length);e.stream&&e.stream({done:!0}),l(this.toStandard([...d,{role:"assistant",content:o.message?.content}]))});return Object.assign(s,{abort:()=>n.abort()})}}class U extends b{constructor(t,e,n){super(),this.ai=t,this.apiToken=e,this.model=n,this.client=new A.OpenAI({apiKey:e})}client;toStandard(t){for(let e=0;e<t.length;e++){const n=t[e];if(n.role==="assistant"&&n.tool_calls){const s=n.tool_calls.map(l=>({role:"tool",id:l.id,name:l.function.name,args:p.JSONAttemptParse(l.function.arguments,{})}));t.splice(e,1,...s),e+=s.length-1}else if(n.role==="tool"&&n.content){const s=t.find(l=>n.tool_call_id==l.id);s&&(n.content.includes('"error":')?s.error=n.content:s.content=n.content),t.splice(e,1),e--}}return t}fromStandard(t){return t.reduce((e,n)=>(n.role==="tool"?e.push({role:"assistant",content:null,tool_calls:[{id:n.id,type:"function",function:{name:n.name,arguments:JSON.stringify(n.args)}}],refusal:null,annotations:[]},{role:"tool",tool_call_id:n.id,content:n.error||n.content}):e.push(n),e),[])}ask(t,e={}){const n=new AbortController,s=new Promise(async(l,f)=>{let i=this.fromStandard([...e.history||[],{role:"user",content:t}]);e.compress&&(i=await this.ai.llm.compress(i,e.compress.max,e.compress.min,e));const d={model:e.model||this.model,messages:i,stream:!!e.stream,max_tokens:e.max_tokens||this.ai.options.max_tokens||4096,temperature:e.temperature||this.ai.options.temperature||.7,tools:(e.tools||this.ai.options.tools||[]).map(o=>({type:"function",function:{name:o.name,description:o.description,parameters:{type:"object",properties:o.args?p.objectMap(o.args,(r,a)=>({...a,required:void 0})):{},required:o.args?Object.entries(o.args).filter(r=>r[1].required).map(r=>r[0]):[]}}}))};let c;do{if(c=await this.client.chat.completions.create(d),e.stream){c.choices=[];for await(const r of c){if(n.signal.aborted)break;r.choices[0].delta.content&&e.stream({text:r.choices[0].delta.content})}}const o=c.choices[0].message.tool_calls||[];if(o.length&&!n.signal.aborted){i.push(c.choices[0].message);const r=await Promise.all(o.map(async a=>{const h=e.tools?.find(p.findByProp("name",a.function.name));if(!h)return{role:"tool",tool_call_id:a.id,content:'{"error": "Tool not found"}'};try{const g=p.JSONAttemptParse(a.function.arguments,{}),w=await h.fn(g,this.ai);return{role:"tool",tool_call_id:a.id,content:p.JSONSanitize(w)}}catch(g){return{role:"tool",tool_call_id:a.id,content:p.JSONSanitize({error:g?.message||g?.toString()||"Unknown"})}}}));i.push(...r),d.messages=i}}while(!n.signal.aborted&&c.choices?.[0]?.message?.tool_calls?.length);e.stream&&e.stream({done:!0}),l(this.toStandard([...i,{role:"assistant",content:c.choices[0].message.content||""}]))});return Object.assign(s,{abort:()=>n.abort()})}}class x{constructor(t,e){this.ai=t,this.options=e,e.anthropic?.token&&(this.providers.anthropic=new j(this.ai,e.anthropic.token,e.anthropic.model)),e.ollama?.host&&(this.providers.ollama=new z(this.ai,e.ollama.host,e.ollama.model)),e.openAi?.token&&(this.providers.openAi=new U(this.ai,e.openAi.token,e.openAi.model))}providers={};ask(t,e={}){let n=[null,null];if(e.model&&(typeof e.model=="object"?n=e.model:n=[e.model,this.options[e.model]?.model]),(!e.model||n[1]==null)&&(typeof this.options.model=="object"?n=this.options.model:n=[this.options.model,this.options[this.options.model]?.model]),!n[0]||!n[1])throw new Error(`Unknown LLM provider or model: ${n[0]} / ${n[1]}`);return this.providers[n[0]].ask(t,{...e,model:n[1]})}async compress(t,e,n,s){if(this.estimateTokens(t)<e)return t;let l=0,f=0;for(let o of t.toReversed())if(f+=this.estimateTokens(o.content),f<n)l++;else break;if(t.length<=l)return t;const i=l==0?[]:t.slice(-l),d=(l==0?t:t.slice(0,-l)).filter(o=>o.role==="assistant"||o.role==="user");return[{role:"assistant",content:`Conversation Summary: ${await this.summarize(d.map(o=>`${o.role}: ${o.content}`).join(`
6
6
 
7
- `),250,s)}`},...c]}estimateTokens(t){const e=JSON.stringify(t);return Math.ceil(e.length/4*1.2)}async json(t,e){let n=await this.ask(t,{system:"Respond using a JSON blob",...e});return n?.[0]?.content?p.JSONAttemptParse(new RegExp("{[sS]*}").exec(n[0].content),{}):{}}summarize(t,e,n){return this.ask(t,{system:`Generate a brief summary <= ${e} tokens. Output nothing else`,temperature:.3,...n}).then(s=>s.pop()?.content||null)}}class z{constructor(t){this.options=t,this.llm=new x(this,t),this.options.whisper?.binary&&(this.whisperModel=this.options.whisper?.model.endsWith(".bin")?this.options.whisper?.model:this.options.whisper?.model+".bin",console.log("constructor: "+this.options.whisper.model+" -> "+this.whisperModel),this.downloadAsrModel())}downloads={};whisperModel;llm;async asr(t,e=this.whisperModel){if(!this.options.whisper?.binary)throw new Error("Whisper not configured");const n=await this.downloadAsrModel(e),s=Math.random().toString(36).substring(2,10)+"-"+t.split("/").pop()+".txt",i=k.join(this.options.whisper.temp||"/tmp",s);return console.log("ASR: "+this.options.whisper.model+" -> "+this.whisperModel),console.log(`rm -f ${i} && ${this.options.whisper.binary} -nt -np -m ${n} -f ${t} -otxt -of ${i}`),await w.$`rm -f ${i} && ${this.options.whisper.binary} -nt -np -m ${n} -f ${t} -otxt -of ${i}`,b.readFile(i,"utf-8").then(f=>f?.trim()||null).finally(()=>b.rm(i,{force:!0}).catch(()=>{}))}async downloadAsrModel(t=this.whisperModel){if(!this.options.whisper?.binary)throw new Error("Whisper not configured");t.endsWith(".bin")||(t+=".bin");const e=k.join(this.options.whisper.path,t);return console.log("Download: "+e),await b.stat(e).then(()=>!0).catch(()=>!1)?(console.log("Exists!"),e):this.downloads[t]?this.downloads[t]:(this.downloads[t]=fetch(`https://huggingface.co/ggerganov/whisper.cpp/resolve/main/${t}`).then(n=>n.arrayBuffer()).then(n=>Buffer.from(n)).then(async n=>(await b.writeFile(e,n),delete this.downloads[t],e)),this.downloads[t])}ocr(t){let e;return{abort:()=>{e?.terminate()},response:new Promise(async n=>{e=await P.createWorker("eng");const{data:s}=await e.recognize(t);await e.terminate(),n(s.text.trim()||null)})}}semanticSimilarity(t,...e){if(e.length<2)throw new Error("Requires at least 2 strings to compare");const n=(c,d=10)=>c.toLowerCase().split("").map((m,o)=>m.charCodeAt(0)*(o+1)%d/d).slice(0,d),s=(c,d)=>{if(c.length!==d.length)throw new Error("Vectors must be same length");const m=_.tensor1d(c),o=_.tensor1d(d),r=_.dot(m,o),a=_.norm(m),h=_.norm(o);return a.dataSync()[0]===0||h.dataSync()[0]===0?0:r.dataSync()[0]/(a.dataSync()[0]*h.dataSync()[0])},i=n(t),f=e.map(c=>n(c)).map(c=>s(i,c));return{avg:f.reduce((c,d)=>c+d,0)/f.length,max:Math.max(...f),similarities:f}}}const T={name:"cli",description:"Use the command line interface, returns any output",args:{command:{type:"string",description:"Command to run",required:!0}},fn:l=>w.$`${l.command}`},L={name:"get_datetime",description:"Get current date and time",args:{},fn:async()=>new Date().toISOString()},U={name:"exec",description:"Run code/scripts",args:{language:{type:"string",description:"Execution language",enum:["cli","node","python"],required:!0},code:{type:"string",description:"Code to execute",required:!0}},fn:async(l,t)=>{try{switch(l.type){case"bash":return await T.fn({command:l.code},t);case"node":return await q.fn({code:l.code},t);case"python":return await O.fn({code:l.code},t)}}catch(e){return{error:e?.message||e.toString()}}}},R={name:"fetch",description:"Make HTTP request to URL",args:{url:{type:"string",description:"URL to fetch",required:!0},method:{type:"string",description:"HTTP method to use",enum:["GET","POST","PUT","DELETE"],default:"GET"},headers:{type:"object",description:"HTTP headers to send",default:{}},body:{type:"object",description:"HTTP body to send"}},fn:l=>new p.Http({url:l.url,headers:l.headers}).request({method:l.method||"GET",body:l.body})},q={name:"exec_javascript",description:"Execute commonjs javascript",args:{code:{type:"string",description:"CommonJS javascript",required:!0}},fn:async l=>{const t=p.consoleInterceptor(null),e=await p.fn({console:t},l.code,!0).catch(n=>t.output.error.push(n));return{...t.output,return:e,stdout:void 0,stderr:void 0}}},O={name:"exec_javascript",description:"Execute commonjs javascript",args:{code:{type:"string",description:"CommonJS javascript",required:!0}},fn:async l=>({result:w.$Sync`python -c "${l.code}"`})},W={name:"search",description:"Use a search engine to find relevant URLs, should be changed with fetch to scrape sources",args:{query:{type:"string",description:"Search string",required:!0},length:{type:"string",description:"Number of results to return",default:5}},fn:async l=>{const t=await fetch(`https://html.duckduckgo.com/html/?q=${encodeURIComponent(l.query)}`,{headers:{"User-Agent":"Mozilla/5.0 (Windows NT 10.0; Win64; x64)","Accept-Language":"en-US,en;q=0.9"}}).then(i=>i.text());let e,n=/<a .*?href="(.+?)".+?<\/a>/g;const s=new p.ASet;for(;(e=n.exec(t))!==null;){let i=/uddg=(.+)&amp?/.exec(decodeURIComponent(e[1]))?.[1];if(i&&(i=decodeURIComponent(i)),i&&s.add(i),s.size>=(l.length||5))break}return s}};u.Ai=z,u.Anthropic=j,u.CliTool=T,u.DateTimeTool=L,u.ExecTool=U,u.FetchTool=R,u.JSTool=q,u.LLM=x,u.PythonTool=O,u.SearchTool=W,Object.defineProperty(u,Symbol.toStringTag,{value:"Module"})}));
7
+ `),250,s)}`},...i]}estimateTokens(t){const e=JSON.stringify(t);return Math.ceil(e.length/4*1.2)}async json(t,e){let n=await this.ask(t,{system:"Respond using a JSON blob",...e});return n?.[0]?.content?p.JSONAttemptParse(new RegExp("{[sS]*}").exec(n[0].content),{}):{}}summarize(t,e,n){return this.ask(t,{system:`Generate a brief summary <= ${e} tokens. Output nothing else`,temperature:.3,...n}).then(s=>s.pop()?.content||null)}}class L{constructor(t){this.options=t,this.llm=new x(this,t),this.options.whisper?.binary&&(this.whisperModel=this.options.whisper?.model.endsWith(".bin")?this.options.whisper?.model:this.options.whisper?.model+".bin",this.downloadAsrModel())}downloads={};whisperModel;llm;asr(t,e=this.whisperModel){if(!this.options.whisper?.binary)throw new Error("Whisper not configured");let n=()=>{};return{response:new Promise((l,f)=>{this.downloadAsrModel(e).then(i=>{let d="";const c=J.spawn(this.options.whisper?.binary,["-nt","-np","-m",i,"-f",t],{stdio:["ignore","pipe","ignore"]});n=()=>c.kill("SIGTERM"),c.on("error",o=>f(o)),c.stdout.on("data",o=>d+=o.toString()),c.on("close",o=>{o===0?l(d.trim()||null):f(new Error(`Exit code ${o}`))})})}),abort:n}}async downloadAsrModel(t=this.whisperModel){if(!this.options.whisper?.binary)throw new Error("Whisper not configured");t.endsWith(".bin")||(t+=".bin");const e=E.join(this.options.whisper.path,t);return await S.stat(e).then(()=>!0).catch(()=>!1)?e:this.downloads[t]?this.downloads[t]:(this.downloads[t]=fetch(`https://huggingface.co/ggerganov/whisper.cpp/resolve/main/${t}`).then(n=>n.arrayBuffer()).then(n=>Buffer.from(n)).then(async n=>(await S.writeFile(e,n),delete this.downloads[t],e)),this.downloads[t])}ocr(t){let e;return{abort:()=>{e?.terminate()},response:new Promise(async n=>{e=await _.createWorker("eng");const{data:s}=await e.recognize(t);await e.terminate(),n(s.text.trim()||null)})}}semanticSimilarity(t,...e){if(e.length<2)throw new Error("Requires at least 2 strings to compare");const n=(i,d=10)=>i.toLowerCase().split("").map((c,o)=>c.charCodeAt(0)*(o+1)%d/d).slice(0,d),s=(i,d)=>{if(i.length!==d.length)throw new Error("Vectors must be same length");const c=y.tensor1d(i),o=y.tensor1d(d),r=y.dot(c,o),a=y.norm(c),h=y.norm(o);return a.dataSync()[0]===0||h.dataSync()[0]===0?0:r.dataSync()[0]/(a.dataSync()[0]*h.dataSync()[0])},l=n(t),f=e.map(i=>n(i)).map(i=>s(l,i));return{avg:f.reduce((i,d)=>i+d,0)/f.length,max:Math.max(...f),similarities:f}}}const T={name:"cli",description:"Use the command line interface, returns any output",args:{command:{type:"string",description:"Command to run",required:!0}},fn:m=>k.$`${m.command}`},$={name:"get_datetime",description:"Get current date and time",args:{},fn:async()=>new Date().toISOString()},R={name:"exec",description:"Run code/scripts",args:{language:{type:"string",description:"Execution language",enum:["cli","node","python"],required:!0},code:{type:"string",description:"Code to execute",required:!0}},fn:async(m,t)=>{try{switch(m.type){case"bash":return await T.fn({command:m.code},t);case"node":return await q.fn({code:m.code},t);case"python":return await P.fn({code:m.code},t)}}catch(e){return{error:e?.message||e.toString()}}}},I={name:"fetch",description:"Make HTTP request to URL",args:{url:{type:"string",description:"URL to fetch",required:!0},method:{type:"string",description:"HTTP method to use",enum:["GET","POST","PUT","DELETE"],default:"GET"},headers:{type:"object",description:"HTTP headers to send",default:{}},body:{type:"object",description:"HTTP body to send"}},fn:m=>new p.Http({url:m.url,headers:m.headers}).request({method:m.method||"GET",body:m.body})},q={name:"exec_javascript",description:"Execute commonjs javascript",args:{code:{type:"string",description:"CommonJS javascript",required:!0}},fn:async m=>{const t=p.consoleInterceptor(null),e=await p.fn({console:t},m.code,!0).catch(n=>t.output.error.push(n));return{...t.output,return:e,stdout:void 0,stderr:void 0}}},P={name:"exec_javascript",description:"Execute commonjs javascript",args:{code:{type:"string",description:"CommonJS javascript",required:!0}},fn:async m=>({result:k.$Sync`python -c "${m.code}"`})},W={name:"search",description:"Use a search engine to find relevant URLs, should be changed with fetch to scrape sources",args:{query:{type:"string",description:"Search string",required:!0},length:{type:"string",description:"Number of results to return",default:5}},fn:async m=>{const t=await fetch(`https://html.duckduckgo.com/html/?q=${encodeURIComponent(m.query)}`,{headers:{"User-Agent":"Mozilla/5.0 (Windows NT 10.0; Win64; x64)","Accept-Language":"en-US,en;q=0.9"}}).then(l=>l.text());let e,n=/<a .*?href="(.+?)".+?<\/a>/g;const s=new p.ASet;for(;(e=n.exec(t))!==null;){let l=/uddg=(.+)&amp?/.exec(decodeURIComponent(e[1]))?.[1];if(l&&(l=decodeURIComponent(l)),l&&s.add(l),s.size>=(m.length||5))break}return s}};u.Ai=L,u.Anthropic=j,u.CliTool=T,u.DateTimeTool=$,u.ExecTool=R,u.FetchTool=I,u.JSTool=q,u.LLM=x,u.PythonTool=P,u.SearchTool=W,Object.defineProperty(u,Symbol.toStringTag,{value:"Module"})}));
8
8
  //# sourceMappingURL=index.js.map
package/dist/index.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"file":"index.js","sources":["../src/provider.ts","../src/antrhopic.ts","../src/ollama.ts","../src/open-ai.ts","../src/llm.ts","../src/ai.ts","../src/tools.ts"],"sourcesContent":["import {LLMMessage, LLMOptions, LLMRequest} from './llm.ts';\n\nexport type AbortablePromise<T> = Promise<T> & {abort: () => void};\n\nexport abstract class LLMProvider {\n\tabstract ask(message: string, options: LLMRequest): AbortablePromise<LLMMessage[]>;\n}\n","import {Anthropic as anthropic} from '@anthropic-ai/sdk';\nimport {findByProp, objectMap, JSONSanitize, JSONAttemptParse} from '@ztimson/utils';\nimport {Ai} from './ai.ts';\nimport {LLMMessage, LLMRequest} from './llm.ts';\nimport {AbortablePromise, LLMProvider} from './provider.ts';\n\nexport class Anthropic extends LLMProvider {\n\tclient!: anthropic;\n\n\tconstructor(public readonly ai: Ai, public readonly apiToken: string, public model: string) {\n\t\tsuper();\n\t\tthis.client = new anthropic({apiKey: apiToken});\n\t}\n\n\tprivate toStandard(history: any[]): LLMMessage[] {\n\t\tfor(let i = 0; i < history.length; i++) {\n\t\t\tconst orgI = i;\n\t\t\tif(typeof history[orgI].content != 'string') {\n\t\t\t\tif(history[orgI].role == 'assistant') {\n\t\t\t\t\thistory[orgI].content.filter((c: any) => c.type =='tool_use').forEach((c: any) => {\n\t\t\t\t\t\ti++;\n\t\t\t\t\t\thistory.splice(i, 0, {role: 'tool', id: c.id, name: c.name, args: c.input});\n\t\t\t\t\t});\n\t\t\t\t} else if(history[orgI].role == 'user') {\n\t\t\t\t\thistory[orgI].content.filter((c: any) => c.type =='tool_result').forEach((c: any) => {\n\t\t\t\t\t\tconst h = history.find((h: any) => h.id == c.tool_use_id);\n\t\t\t\t\t\th[c.is_error ? 'error' : 'content'] = c.content;\n\t\t\t\t\t});\n\t\t\t\t}\n\t\t\t\thistory[orgI].content = history[orgI].content.filter((c: any) => c.type == 'text').map((c: any) => c.text).join('\\n\\n');\n\t\t\t}\n\t\t}\n\t\treturn history.filter(h => !!h.content);\n\t}\n\n\tprivate fromStandard(history: LLMMessage[]): any[] {\n\t\tfor(let i = 0; i < history.length; i++) {\n\t\t\tif(history[i].role == 'tool') {\n\t\t\t\tconst h: any = history[i];\n\t\t\t\thistory.splice(i, 1,\n\t\t\t\t\t{role: 'assistant', content: [{type: 'tool_use', id: h.id, name: h.name, input: h.args}]},\n\t\t\t\t\t{role: 'user', content: [{type: 'tool_result', tool_use_id: h.id, is_error: !!h.error, content: h.error || h.content}]}\n\t\t\t\t)\n\t\t\t\ti++;\n\t\t\t}\n\t\t}\n\t\treturn history;\n\t}\n\n\task(message: string, options: LLMRequest = {}): AbortablePromise<LLMMessage[]> {\n\t\tconst controller = new AbortController();\n\t\tconst response = new Promise<any>(async (res, rej) => {\n\t\t\tlet history = this.fromStandard([...options.history || [], {role: 'user', content: message}]);\n\t\t\tif(options.compress) history = await this.ai.llm.compress(<any>history, options.compress.max, options.compress.min, options);\n\t\t\tconst requestParams: any = {\n\t\t\t\tmodel: options.model || this.model,\n\t\t\t\tmax_tokens: options.max_tokens || this.ai.options.max_tokens || 4096,\n\t\t\t\tsystem: options.system || this.ai.options.system || '',\n\t\t\t\ttemperature: options.temperature || this.ai.options.temperature || 0.7,\n\t\t\t\ttools: (options.tools || this.ai.options.tools || []).map(t => ({\n\t\t\t\t\tname: t.name,\n\t\t\t\t\tdescription: t.description,\n\t\t\t\t\tinput_schema: {\n\t\t\t\t\t\ttype: 'object',\n\t\t\t\t\t\tproperties: t.args ? objectMap(t.args, (key, value) => ({...value, required: undefined})) : {},\n\t\t\t\t\t\trequired: t.args ? Object.entries(t.args).filter(t => t[1].required).map(t => t[0]) : []\n\t\t\t\t\t},\n\t\t\t\t\tfn: undefined\n\t\t\t\t})),\n\t\t\t\tmessages: history,\n\t\t\t\tstream: !!options.stream,\n\t\t\t};\n\n\t\t\t// Run tool changes\n\t\t\tlet resp: any;\n\t\t\tdo {\n\t\t\t\tresp = await this.client.messages.create(requestParams);\n\n\t\t\t\t// Streaming mode\n\t\t\t\tif(options.stream) {\n\t\t\t\t\tresp.content = [];\n\t\t\t\t\tfor await (const chunk of resp) {\n\t\t\t\t\t\tif(controller.signal.aborted) break;\n\t\t\t\t\t\tif(chunk.type === 'content_block_start') {\n\t\t\t\t\t\t\tif(chunk.content_block.type === 'text') {\n\t\t\t\t\t\t\t\tresp.content.push({type: 'text', text: ''});\n\t\t\t\t\t\t\t} else if(chunk.content_block.type === 'tool_use') {\n\t\t\t\t\t\t\t\tresp.content.push({type: 'tool_use', id: chunk.content_block.id, name: chunk.content_block.name, input: <any>''});\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t} else if(chunk.type === 'content_block_delta') {\n\t\t\t\t\t\t\tif(chunk.delta.type === 'text_delta') {\n\t\t\t\t\t\t\t\tconst text = chunk.delta.text;\n\t\t\t\t\t\t\t\tresp.content.at(-1).text += text;\n\t\t\t\t\t\t\t\toptions.stream({text});\n\t\t\t\t\t\t\t} else if(chunk.delta.type === 'input_json_delta') {\n\t\t\t\t\t\t\t\tresp.content.at(-1).input += chunk.delta.partial_json;\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t} else if(chunk.type === 'content_block_stop') {\n\t\t\t\t\t\t\tconst last = resp.content.at(-1);\n\t\t\t\t\t\t\tif(last.input != null) last.input = last.input ? JSONAttemptParse(last.input, {}) : {};\n\t\t\t\t\t\t} else if(chunk.type === 'message_stop') {\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\t// Run tools\n\t\t\t\tconst toolCalls = resp.content.filter((c: any) => c.type === 'tool_use');\n\t\t\t\tif(toolCalls.length && !controller.signal.aborted) {\n\t\t\t\t\thistory.push({role: 'assistant', content: resp.content});\n\t\t\t\t\tconst results = await Promise.all(toolCalls.map(async (toolCall: any) => {\n\t\t\t\t\t\tconst tool = options.tools?.find(findByProp('name', toolCall.name));\n\t\t\t\t\t\tif(!tool) return {tool_use_id: toolCall.id, is_error: true, content: 'Tool not found'};\n\t\t\t\t\t\ttry {\n\t\t\t\t\t\t\tconst result = await tool.fn(toolCall.input, this.ai);\n\t\t\t\t\t\t\treturn {type: 'tool_result', tool_use_id: toolCall.id, content: JSONSanitize(result)};\n\t\t\t\t\t\t} catch (err: any) {\n\t\t\t\t\t\t\treturn {type: 'tool_result', tool_use_id: toolCall.id, is_error: true, content: err?.message || err?.toString() || 'Unknown'};\n\t\t\t\t\t\t}\n\t\t\t\t\t}));\n\t\t\t\t\thistory.push({role: 'user', content: results});\n\t\t\t\t\trequestParams.messages = history;\n\t\t\t\t}\n\t\t\t} while (!controller.signal.aborted && resp.content.some((c: any) => c.type === 'tool_use'));\n\t\t\tif(options.stream) options.stream({done: true});\n\t\t\tres(this.toStandard([...history, {\n\t\t\t\trole: 'assistant',\n\t\t\t\tcontent: resp.content.filter((c: any) => c.type == 'text').map((c: any) => c.text).join('\\n\\n')\n\t\t\t}]));\n\t\t});\n\t\treturn Object.assign(response, {abort: () => controller.abort()});\n\t}\n}\n","import {findByProp, objectMap, JSONSanitize, JSONAttemptParse} from '@ztimson/utils';\nimport {Ai} from './ai.ts';\nimport {LLMMessage, LLMRequest} from './llm.ts';\nimport {AbortablePromise, LLMProvider} from './provider.ts';\nimport {Ollama as ollama} from 'ollama';\n\nexport class Ollama extends LLMProvider {\n\tclient!: ollama;\n\n\tconstructor(public readonly ai: Ai, public host: string, public model: string) {\n\t\tsuper();\n\t\tthis.client = new ollama({host});\n\t}\n\n\tprivate toStandard(history: any[]): LLMMessage[] {\n\t\tfor(let i = 0; i < history.length; i++) {\n\t\t\tif(history[i].role == 'assistant' && history[i].tool_calls) {\n\t\t\t\tif(history[i].content) delete history[i].tool_calls;\n\t\t\t\telse {\n\t\t\t\t\thistory.splice(i, 1);\n\t\t\t\t\ti--;\n\t\t\t\t}\n\t\t\t} else if(history[i].role == 'tool') {\n\t\t\t\tconst error = history[i].content.startsWith('{\"error\":');\n\t\t\t\thistory[i] = {role: 'tool', name: history[i].tool_name, args: history[i].args, [error ? 'error' : 'content']: history[i].content};\n\t\t\t}\n\t\t}\n\t\treturn history;\n\t}\n\n\tprivate fromStandard(history: LLMMessage[]): any[] {\n\t\treturn history.map((h: any) => {\n\t\t\tif(h.role != 'tool') return h;\n\t\t\treturn {role: 'tool', tool_name: h.name, content: h.error || h.content}\n\t\t});\n\t}\n\n\task(message: string, options: LLMRequest = {}): AbortablePromise<LLMMessage[]> {\n\t\tconst controller = new AbortController();\n\t\tconst response = new Promise<any>(async (res, rej) => {\n\t\t\tlet system = options.system || this.ai.options.system;\n\t\t\tlet history = this.fromStandard([...options.history || [], {role: 'user', content: message}]);\n\t\t\tif(history[0].roll == 'system') {\n\t\t\t\tif(!system) system = history.shift();\n\t\t\t\telse history.shift();\n\t\t\t}\n\t\t\tif(options.compress) history = await this.ai.llm.compress(<any>history, options.compress.max, options.compress.min);\n\t\t\tif(options.system) history.unshift({role: 'system', content: system})\n\n\t\t\tconst requestParams: any = {\n\t\t\t\tmodel: options.model || this.model,\n\t\t\t\tmessages: history,\n\t\t\t\tstream: !!options.stream,\n\t\t\t\tsignal: controller.signal,\n\t\t\t\toptions: {\n\t\t\t\t\ttemperature: options.temperature || this.ai.options.temperature || 0.7,\n\t\t\t\t\tnum_predict: options.max_tokens || this.ai.options.max_tokens || 4096,\n\t\t\t\t},\n\t\t\t\ttools: (options.tools || this.ai.options.tools || []).map(t => ({\n\t\t\t\t\ttype: 'function',\n\t\t\t\t\tfunction: {\n\t\t\t\t\t\tname: t.name,\n\t\t\t\t\t\tdescription: t.description,\n\t\t\t\t\t\tparameters: {\n\t\t\t\t\t\t\ttype: 'object',\n\t\t\t\t\t\t\tproperties: t.args ? objectMap(t.args, (key, value) => ({...value, required: undefined})) : {},\n\t\t\t\t\t\t\trequired: t.args ? Object.entries(t.args).filter(t => t[1].required).map(t => t[0]) : []\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}))\n\t\t\t}\n\n\t\t\t// Run tool chains\n\t\t\tlet resp: any;\n\t\t\tdo {\n\t\t\t\tresp = await this.client.chat(requestParams);\n\t\t\t\tif(options.stream) {\n\t\t\t\t\tresp.message = {role: 'assistant', content: '', tool_calls: []};\n\t\t\t\t\tfor await (const chunk of resp) {\n\t\t\t\t\t\tif(controller.signal.aborted) break;\n\t\t\t\t\t\tif(chunk.message?.content) {\n\t\t\t\t\t\t\tresp.message.content += chunk.message.content;\n\t\t\t\t\t\t\toptions.stream({text: chunk.message.content});\n\t\t\t\t\t\t}\n\t\t\t\t\t\tif(chunk.message?.tool_calls) resp.message.tool_calls = chunk.message.tool_calls;\n\t\t\t\t\t\tif(chunk.done) break;\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\t// Run tools\n\t\t\t\tif(resp.message?.tool_calls?.length && !controller.signal.aborted) {\n\t\t\t\t\thistory.push(resp.message);\n\t\t\t\t\tconst results = await Promise.all(resp.message.tool_calls.map(async (toolCall: any) => {\n\t\t\t\t\t\tconst tool = (options.tools || this.ai.options.tools)?.find(findByProp('name', toolCall.function.name));\n\t\t\t\t\t\tif(!tool) return {role: 'tool', tool_name: toolCall.function.name, content: '{\"error\": \"Tool not found\"}'};\n\t\t\t\t\t\tconst args = typeof toolCall.function.arguments === 'string' ? JSONAttemptParse(toolCall.function.arguments, {}) : toolCall.function.arguments;\n\t\t\t\t\t\ttry {\n\t\t\t\t\t\t\tconst result = await tool.fn(args, this.ai);\n\t\t\t\t\t\t\treturn {role: 'tool', tool_name: toolCall.function.name, args, content: JSONSanitize(result)};\n\t\t\t\t\t\t} catch (err: any) {\n\t\t\t\t\t\t\treturn {role: 'tool', tool_name: toolCall.function.name, args, content: JSONSanitize({error: err?.message || err?.toString() || 'Unknown'})};\n\t\t\t\t\t\t}\n\t\t\t\t\t}));\n\t\t\t\t\thistory.push(...results);\n\t\t\t\t\trequestParams.messages = history;\n\t\t\t\t}\n\t\t\t} while (!controller.signal.aborted && resp.message?.tool_calls?.length);\n\t\t\tif(options.stream) options.stream({done: true});\n\t\t\tres(this.toStandard([...history, {role: 'assistant', content: resp.message?.content}]));\n\t\t});\n\t\treturn Object.assign(response, {abort: () => controller.abort()});\n\t}\n}\n","import {OpenAI as openAI} from 'openai';\nimport {findByProp, objectMap, JSONSanitize, JSONAttemptParse} from '@ztimson/utils';\nimport {Ai} from './ai.ts';\nimport {LLMMessage, LLMRequest} from './llm.ts';\nimport {AbortablePromise, LLMProvider} from './provider.ts';\n\nexport class OpenAi extends LLMProvider {\n\tclient!: openAI;\n\n\tconstructor(public readonly ai: Ai, public readonly apiToken: string, public model: string) {\n\t\tsuper();\n\t\tthis.client = new openAI({apiKey: apiToken});\n\t}\n\n\tprivate toStandard(history: any[]): LLMMessage[] {\n\t\tfor(let i = 0; i < history.length; i++) {\n\t\t\tconst h = history[i];\n\t\t\tif(h.role === 'assistant' && h.tool_calls) {\n\t\t\t\tconst tools = h.tool_calls.map((tc: any) => ({\n\t\t\t\t\trole: 'tool',\n\t\t\t\t\tid: tc.id,\n\t\t\t\t\tname: tc.function.name,\n\t\t\t\t\targs: JSONAttemptParse(tc.function.arguments, {})\n\t\t\t\t}));\n\t\t\t\thistory.splice(i, 1, ...tools);\n\t\t\t\ti += tools.length - 1;\n\t\t\t} else if(h.role === 'tool' && h.content) {\n\t\t\t\tconst record = history.find(h2 => h.tool_call_id == h2.id);\n\t\t\t\tif(record) {\n\t\t\t\t\tif(h.content.includes('\"error\":')) record.error = h.content;\n\t\t\t\t\telse record.content = h.content;\n\t\t\t\t}\n\t\t\t\thistory.splice(i, 1);\n\t\t\t\ti--;\n\t\t\t}\n\n\t\t}\n\t\treturn history;\n\t}\n\n\tprivate fromStandard(history: LLMMessage[]): any[] {\n\t\treturn history.reduce((result, h) => {\n\t\t\tif(h.role === 'tool') {\n\t\t\t\tresult.push({\n\t\t\t\t\trole: 'assistant',\n\t\t\t\t\tcontent: null,\n\t\t\t\t\ttool_calls: [{ id: h.id, type: 'function', function: { name: h.name, arguments: JSON.stringify(h.args) } }],\n\t\t\t\t\trefusal: null,\n\t\t\t\t\tannotations: [],\n\t\t\t\t}, {\n\t\t\t\t\trole: 'tool',\n\t\t\t\t\ttool_call_id: h.id,\n\t\t\t\t\tcontent: h.error || h.content\n\t\t\t\t});\n\t\t\t} else {\n\t\t\t\tresult.push(h);\n\t\t\t}\n\t\t\treturn result;\n\t\t}, [] as any[]);\n\t}\n\n\task(message: string, options: LLMRequest = {}): AbortablePromise<LLMMessage[]> {\n\t\tconst controller = new AbortController();\n\t\tconst response = new Promise<any>(async (res, rej) => {\n\t\t\tlet history = this.fromStandard([...options.history || [], {role: 'user', content: message}]);\n\t\t\tif(options.compress) history = await this.ai.llm.compress(<any>history, options.compress.max, options.compress.min, options);\n\n\t\t\tconst requestParams: any = {\n\t\t\t\tmodel: options.model || this.model,\n\t\t\t\tmessages: history,\n\t\t\t\tstream: !!options.stream,\n\t\t\t\tmax_tokens: options.max_tokens || this.ai.options.max_tokens || 4096,\n\t\t\t\ttemperature: options.temperature || this.ai.options.temperature || 0.7,\n\t\t\t\ttools: (options.tools || this.ai.options.tools || []).map(t => ({\n\t\t\t\t\ttype: 'function',\n\t\t\t\t\tfunction: {\n\t\t\t\t\t\tname: t.name,\n\t\t\t\t\t\tdescription: t.description,\n\t\t\t\t\t\tparameters: {\n\t\t\t\t\t\t\ttype: 'object',\n\t\t\t\t\t\t\tproperties: t.args ? objectMap(t.args, (key, value) => ({...value, required: undefined})) : {},\n\t\t\t\t\t\t\trequired: t.args ? Object.entries(t.args).filter(t => t[1].required).map(t => t[0]) : []\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}))\n\t\t\t};\n\n\t\t\t// Tool call and streaming logic similar to other providers\n\t\t\tlet resp: any;\n\t\t\tdo {\n\t\t\t\tresp = await this.client.chat.completions.create(requestParams);\n\n\t\t\t\t// Implement streaming and tool call handling\n\t\t\t\tif(options.stream) {\n\t\t\t\t\tresp.choices = [];\n\t\t\t\t\tfor await (const chunk of resp) {\n\t\t\t\t\t\tif(controller.signal.aborted) break;\n\t\t\t\t\t\tif(chunk.choices[0].delta.content) {\n\t\t\t\t\t\t\toptions.stream({text: chunk.choices[0].delta.content});\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\t// Run tools\n\t\t\t\tconst toolCalls = resp.choices[0].message.tool_calls || [];\n\t\t\t\tif(toolCalls.length && !controller.signal.aborted) {\n\t\t\t\t\thistory.push(resp.choices[0].message);\n\t\t\t\t\tconst results = await Promise.all(toolCalls.map(async (toolCall: any) => {\n\t\t\t\t\t\tconst tool = options.tools?.find(findByProp('name', toolCall.function.name));\n\t\t\t\t\t\tif(!tool) return {role: 'tool', tool_call_id: toolCall.id, content: '{\"error\": \"Tool not found\"}'};\n\t\t\t\t\t\ttry {\n\t\t\t\t\t\t\tconst args = JSONAttemptParse(toolCall.function.arguments, {});\n\t\t\t\t\t\t\tconst result = await tool.fn(args, this.ai);\n\t\t\t\t\t\t\treturn {role: 'tool', tool_call_id: toolCall.id, content: JSONSanitize(result)};\n\t\t\t\t\t\t} catch (err: any) {\n\t\t\t\t\t\t\treturn {role: 'tool', tool_call_id: toolCall.id, content: JSONSanitize({error: err?.message || err?.toString() || 'Unknown'})};\n\t\t\t\t\t\t}\n\t\t\t\t\t}));\n\t\t\t\t\thistory.push(...results);\n\t\t\t\t\trequestParams.messages = history;\n\t\t\t\t}\n\t\t\t} while (!controller.signal.aborted && resp.choices?.[0]?.message?.tool_calls?.length);\n\n\t\t\tif(options.stream) options.stream({done: true});\n\t\t\tres(this.toStandard([...history, {role: 'assistant', content: resp.choices[0].message.content || ''}]));\n\t\t});\n\n\t\treturn Object.assign(response, {abort: () => controller.abort()});\n\t}\n}\n","import {JSONAttemptParse} from '@ztimson/utils';\nimport {Ai} from './ai.ts';\nimport {Anthropic} from './antrhopic.ts';\nimport {Ollama} from './ollama.ts';\nimport {OpenAi} from './open-ai.ts';\nimport {AbortablePromise, LLMProvider} from './provider.ts';\nimport {AiTool} from './tools.ts';\n\nexport type LLMMessage = {\n\t/** Message originator */\n\trole: 'assistant' | 'system' | 'user';\n\t/** Message content */\n\tcontent: string | any;\n} | {\n\t/** Tool call */\n\trole: 'tool';\n\t/** Unique ID for call */\n\tid: string;\n\t/** Tool that was run */\n\tname: string;\n\t/** Tool arguments */\n\targs: any;\n\t/** Tool result */\n\tcontent: undefined | string;\n\t/** Tool error */\n\terror: undefined | string;\n}\n\nexport type LLMOptions = {\n\t/** Anthropic settings */\n\tanthropic?: {\n\t\t/** API Token */\n\t\ttoken: string;\n\t\t/** Default model */\n\t\tmodel: string;\n\t},\n\t/** Ollama settings */\n\tollama?: {\n\t\t/** connection URL */\n\t\thost: string;\n\t\t/** Default model */\n\t\tmodel: string;\n\t},\n\t/** Open AI settings */\n\topenAi?: {\n\t\t/** API Token */\n\t\ttoken: string;\n\t\t/** Default model */\n\t\tmodel: string;\n\t},\n\t/** Default provider & model */\n\tmodel: string | [string, string];\n} & Omit<LLMRequest, 'model'>;\n\nexport type LLMRequest = {\n\t/** System prompt */\n\tsystem?: string;\n\t/** Message history */\n\thistory?: LLMMessage[];\n\t/** Max tokens for request */\n\tmax_tokens?: number;\n\t/** 0 = Rigid Logic, 1 = Balanced, 2 = Hyper Creative **/\n\ttemperature?: number;\n\t/** Available tools */\n\ttools?: AiTool[];\n\t/** LLM model */\n\tmodel?: string | [string, string];\n\t/** Stream response */\n\tstream?: (chunk: {text?: string, done?: true}) => any;\n\t/** Compress old messages in the chat to free up context */\n\tcompress?: {\n\t\t/** Trigger chat compression once context exceeds the token count */\n\t\tmax: number;\n\t\t/** Compress chat until context size smaller than */\n\t\tmin: number\n\t}\n}\n\nexport class LLM {\n\tprivate providers: {[key: string]: LLMProvider} = {};\n\n\tconstructor(public readonly ai: Ai, public readonly options: LLMOptions) {\n\t\tif(options.anthropic?.token) this.providers.anthropic = new Anthropic(this.ai, options.anthropic.token, options.anthropic.model);\n\t\tif(options.ollama?.host) this.providers.ollama = new Ollama(this.ai, options.ollama.host, options.ollama.model);\n\t\tif(options.openAi?.token) this.providers.openAi = new OpenAi(this.ai, options.openAi.token, options.openAi.model);\n\t}\n\n\t/**\n\t * Chat with LLM\n\t * @param {string} message Question\n\t * @param {LLMRequest} options Configuration options and chat history\n\t * @returns {{abort: () => void, response: Promise<LLMMessage[]>}} Function to abort response and chat history\n\t */\n\task(message: string, options: LLMRequest = {}): AbortablePromise<LLMMessage[]> {\n\t\tlet model: any = [null, null];\n\t\tif(options.model) {\n\t\t\tif(typeof options.model == 'object') model = options.model;\n\t\t\telse model = [options.model, (<any>this.options)[options.model]?.model];\n\t\t}\n\t\tif(!options.model || model[1] == null) {\n\t\t\tif(typeof this.options.model == 'object') model = this.options.model;\n\t\t\telse model = [this.options.model, (<any>this.options)[this.options.model]?.model];\n\t\t}\n\t\tif(!model[0] || !model[1]) throw new Error(`Unknown LLM provider or model: ${model[0]} / ${model[1]}`);\n\t\treturn this.providers[model[0]].ask(message, {...options, model: model[1]});\n\t}\n\n\t/**\n\t * Compress chat history to reduce context size\n\t * @param {LLMMessage[]} history Chatlog that will be compressed\n\t * @param max Trigger compression once context is larger than max\n\t * @param min Summarize until context size is less than min\n\t * @param {LLMRequest} options LLM options\n\t * @returns {Promise<LLMMessage[]>} New chat history will summary at index 0\n\t */\n\tasync compress(history: LLMMessage[], max: number, min: number, options?: LLMRequest): Promise<LLMMessage[]> {\n\t\tif(this.estimateTokens(history) < max) return history;\n\t\tlet keep = 0, tokens = 0;\n\t\tfor(let m of history.toReversed()) {\n\t\t\ttokens += this.estimateTokens(m.content);\n\t\t\tif(tokens < min) keep++;\n\t\t\telse break;\n\t\t}\n\t\tif(history.length <= keep) return history;\n\t\tconst recent = keep == 0 ? [] : history.slice(-keep),\n\t\t\tprocess = (keep == 0 ? history : history.slice(0, -keep)).filter(h => h.role === 'assistant' || h.role === 'user');\n\t\tconst summary = await this.summarize(process.map(m => `${m.role}: ${m.content}`).join('\\n\\n'), 250, options);\n\t\treturn [{role: 'assistant', content: `Conversation Summary: ${summary}`}, ...recent];\n\t}\n\n\t/**\n\t * Estimate variable as tokens\n\t * @param history Object to size\n\t * @returns {number} Rough token count\n\t */\n\testimateTokens(history: any): number {\n\t\tconst text = JSON.stringify(history);\n\t\treturn Math.ceil((text.length / 4) * 1.2);\n\t}\n\n\t/**\n\t * Ask a question with JSON response\n\t * @param {string} message Question\n\t * @param {LLMRequest} options Configuration options and chat history\n\t * @returns {Promise<{} | {} | RegExpExecArray | null>}\n\t */\n\tasync json(message: string, options?: LLMRequest) {\n\t\tlet resp = await this.ask(message, {\n\t\t\tsystem: 'Respond using a JSON blob',\n\t\t\t...options\n\t\t});\n\t\tif(!resp?.[0]?.content) return {};\n\t\treturn JSONAttemptParse(new RegExp('\\{[\\s\\S]*\\}').exec(resp[0].content), {});\n\t}\n\n\t/**\n\t * Create a summary of some text\n\t * @param {string} text Text to summarize\n\t * @param {number} tokens Max number of tokens\n\t * @param options LLM request options\n\t * @returns {Promise<string>} Summary\n\t */\n\tsummarize(text: string, tokens: number, options?: LLMRequest): Promise<string | null> {\n\t\treturn this.ask(text, {system: `Generate a brief summary <= ${tokens} tokens. Output nothing else`, temperature: 0.3, ...options})\n\t\t\t.then(history => <string>history.pop()?.content || null);\n\t}\n}\n","import {$} from '@ztimson/node-utils';\nimport {createWorker} from 'tesseract.js';\nimport {LLM, LLMOptions} from './llm';\nimport fs from 'node:fs/promises';\nimport Path from 'node:path';\nimport * as tf from '@tensorflow/tfjs';\n\nexport type AiOptions = LLMOptions & {\n\twhisper?: {\n\t\t/** Whisper binary location */\n\t\tbinary: string;\n\t\t/** Model: `ggml-base.en.bin` */\n\t\tmodel: string;\n\t\t/** Path to models */\n\t\tpath: string;\n\t\t/** Path to storage location for temporary files */\n\t\ttemp?: string;\n\t}\n}\n\nexport class Ai {\n\tprivate downloads: {[key: string]: Promise<string>} = {};\n\tprivate whisperModel!: string;\n\n\t/** Large Language Models */\n\tllm!: LLM;\n\n\tconstructor(public readonly options: AiOptions) {\n\t\tthis.llm = new LLM(this, options);\n\t\tif(this.options.whisper?.binary) {\n\t\t\tthis.whisperModel = this.options.whisper?.model.endsWith('.bin') ? this.options.whisper?.model : this.options.whisper?.model + '.bin';\n\t\t\tconsole.log('constructor: ' + this.options.whisper.model + ' -> ' + this.whisperModel);\n\t\t\tthis.downloadAsrModel();\n\t\t}\n\t}\n\n\t/**\n\t * Convert audio to text using Auditory Speech Recognition\n\t * @param {string} path Path to audio\n\t * @param model Whisper model\n\t * @returns {Promise<any>} Extracted text\n\t */\n\tasync asr(path: string, model: string = this.whisperModel): Promise<string | null> {\n\t\tif(!this.options.whisper?.binary) throw new Error('Whisper not configured');\n\t\tconst m = await this.downloadAsrModel(model);\n\t\tconst name = Math.random().toString(36).substring(2, 10) + '-' + path.split('/').pop() + '.txt';\n\t\tconst output = Path.join(this.options.whisper.temp || '/tmp', name);\n\t\tconsole.log('ASR: ' + this.options.whisper.model + ' -> ' + this.whisperModel);\n\t\tconsole.log(`rm -f ${output} && ${this.options.whisper.binary} -nt -np -m ${m} -f ${path} -otxt -of ${output}`);\n\t\tawait $`rm -f ${output} && ${this.options.whisper.binary} -nt -np -m ${m} -f ${path} -otxt -of ${output}`;\n\t\treturn fs.readFile(output, 'utf-8').then(text => text?.trim() || null)\n\t\t\t.finally(() => fs.rm(output, {force: true}).catch(() => {}));\n\t}\n\n\t/**\n\t * Downloads the specified Whisper model if it is not already present locally.\n\t *\n\t * @param {string} model Whisper model that will be downloaded\n\t * @return {Promise<string>} Absolute path to model file, resolves once downloaded\n\t */\n\tasync downloadAsrModel(model: string = this.whisperModel): Promise<string> {\n\t\tif(!this.options.whisper?.binary) throw new Error('Whisper not configured');\n\t\tif(!model.endsWith('.bin')) model += '.bin';\n\t\tconst p = Path.join(this.options.whisper.path, model);\n\t\tconsole.log('Download: ' + p);\n\t\tif(await fs.stat(p).then(() => true).catch(() => false)) {\n\t\t\tconsole.log('Exists!');\n\t\t\treturn p;\n\t\t}\n\t\tif(!!this.downloads[model]) return this.downloads[model];\n\t\tthis.downloads[model] = fetch(`https://huggingface.co/ggerganov/whisper.cpp/resolve/main/${model}`)\n\t\t\t.then(resp => resp.arrayBuffer())\n\t\t\t.then(arr => Buffer.from(arr)).then(async buffer => {\n\t\t\t\tawait fs.writeFile(p, buffer);\n\t\t\t\tdelete this.downloads[model];\n\t\t\t\treturn p;\n\t\t\t});\n\t\treturn this.downloads[model];\n\t}\n\n\t/**\n\t * Convert image to text using Optical Character Recognition\n\t * @param {string} path Path to image\n\t * @returns {{abort: Function, response: Promise<string | null>}} Abort function & Promise of extracted text\n\t */\n\tocr(path: string): {abort: () => void, response: Promise<string | null>} {\n\t\tlet worker: any;\n\t\treturn {\n\t\t\tabort: () => { worker?.terminate(); },\n\t\t\tresponse: new Promise(async res => {\n\t\t\t\tworker = await createWorker('eng');\n\t\t\t\tconst {data} = await worker.recognize(path);\n\t\t\t\tawait worker.terminate();\n\t\t\t\tres(data.text.trim() || null);\n\t\t\t})\n\t\t}\n\t}\n\n\t/**\n\t * Compare the difference between two strings using tensor math\n\t * @param target Text that will checked\n\t * @param {string} searchTerms Multiple search terms to check against target\n\t * @returns {{avg: number, max: number, similarities: number[]}} Similarity values 0-1: 0 = unique, 1 = identical\n\t */\n\tsemanticSimilarity(target: string, ...searchTerms: string[]) {\n\t\tif(searchTerms.length < 2) throw new Error('Requires at least 2 strings to compare');\n\n\t\tconst vector = (text: string, dimensions: number = 10): number[] => {\n\t\t\treturn text.toLowerCase().split('').map((char, index) =>\n\t\t\t\t(char.charCodeAt(0) * (index + 1)) % dimensions / dimensions).slice(0, dimensions);\n\t\t}\n\n\t\tconst cosineSimilarity = (v1: number[], v2: number[]): number => {\n\t\t\tif (v1.length !== v2.length) throw new Error('Vectors must be same length');\n\t\t\tconst tensor1 = tf.tensor1d(v1), tensor2 = tf.tensor1d(v2)\n\t\t\tconst dotProduct = tf.dot(tensor1, tensor2)\n\t\t\tconst magnitude1 = tf.norm(tensor1)\n\t\t\tconst magnitude2 = tf.norm(tensor2)\n\t\t\tif(magnitude1.dataSync()[0] === 0 || magnitude2.dataSync()[0] === 0) return 0\n\t\t\treturn dotProduct.dataSync()[0] / (magnitude1.dataSync()[0] * magnitude2.dataSync()[0])\n\t\t}\n\n\t\tconst v = vector(target);\n\t\tconst similarities = searchTerms.map(t => vector(t)).map(refVector => cosineSimilarity(v, refVector))\n\t\treturn {avg: similarities.reduce((acc, s) => acc + s, 0) / similarities.length, max: Math.max(...similarities), similarities}\n\t}\n}\n","import {$, $Sync} from '@ztimson/node-utils';\nimport {ASet, consoleInterceptor, Http, fn as Fn} from '@ztimson/utils';\nimport {Ai} from './ai.ts';\n\nexport type AiToolArg = {[key: string]: {\n\t/** Argument type */\n\ttype: 'array' | 'boolean' | 'number' | 'object' | 'string',\n\t/** Argument description */\n\tdescription: string,\n\t/** Required argument */\n\trequired?: boolean;\n\t/** Default value */\n\tdefault?: any,\n\t/** Options */\n\tenum?: string[],\n\t/** Minimum value or length */\n\tmin?: number,\n\t/** Maximum value or length */\n\tmax?: number,\n\t/** Match pattern */\n\tpattern?: string,\n\t/** Child arguments */\n\titems?: {[key: string]: AiToolArg}\n}}\n\nexport type AiTool = {\n\t/** Tool ID / Name - Must be snail_case */\n\tname: string,\n\t/** Tool description / prompt */\n\tdescription: string,\n\t/** Tool arguments */\n\targs?: AiToolArg,\n\t/** Callback function */\n\tfn: (args: any, ai: Ai) => any | Promise<any>,\n};\n\nexport const CliTool: AiTool = {\n\tname: 'cli',\n\tdescription: 'Use the command line interface, returns any output',\n\targs: {command: {type: 'string', description: 'Command to run', required: true}},\n\tfn: (args: {command: string}) => $`${args.command}`\n}\n\nexport const DateTimeTool: AiTool = {\n\tname: 'get_datetime',\n\tdescription: 'Get current date and time',\n\targs: {},\n\tfn: async () => new Date().toISOString()\n}\n\nexport const ExecTool: AiTool = {\n\tname: 'exec',\n\tdescription: 'Run code/scripts',\n\targs: {\n\t\tlanguage: {type: 'string', description: 'Execution language', enum: ['cli', 'node', 'python'], required: true},\n\t\tcode: {type: 'string', description: 'Code to execute', required: true}\n\t},\n\tfn: async (args, ai) => {\n\t\ttry {\n\t\t\tswitch(args.type) {\n\t\t\t\tcase 'bash':\n\t\t\t\t\treturn await CliTool.fn({command: args.code}, ai);\n\t\t\t\tcase 'node':\n\t\t\t\t\treturn await JSTool.fn({code: args.code}, ai);\n\t\t\t\tcase 'python': {\n\t\t\t\t\treturn await PythonTool.fn({code: args.code}, ai);\n\t\t\t\t}\n\t\t\t}\n\t\t} catch(err: any) {\n\t\t\treturn {error: err?.message || err.toString()};\n\t\t}\n\t}\n}\n\nexport const FetchTool: AiTool = {\n\tname: 'fetch',\n\tdescription: 'Make HTTP request to URL',\n\targs: {\n\t\turl: {type: 'string', description: 'URL to fetch', required: true},\n\t\tmethod: {type: 'string', description: 'HTTP method to use', enum: ['GET', 'POST', 'PUT', 'DELETE'], default: 'GET'},\n\t\theaders: {type: 'object', description: 'HTTP headers to send', default: {}},\n\t\tbody: {type: 'object', description: 'HTTP body to send'},\n\t},\n\tfn: (args: {\n\t\turl: string;\n\t\tmethod: 'GET' | 'POST' | 'PUT' | 'DELETE';\n\t\theaders: {[key: string]: string};\n\t\tbody: any;\n\t}) => new Http({url: args.url, headers: args.headers}).request({method: args.method || 'GET', body: args.body})\n}\n\nexport const JSTool: AiTool = {\n\tname: 'exec_javascript',\n\tdescription: 'Execute commonjs javascript',\n\targs: {\n\t\tcode: {type: 'string', description: 'CommonJS javascript', required: true}\n\t},\n\tfn: async (args: {code: string}) => {\n\t\tconst console = consoleInterceptor(null);\n\t\tconst resp = await Fn<any>({console}, args.code, true).catch((err: any) => console.output.error.push(err));\n\t\treturn {...console.output, return: resp, stdout: undefined, stderr: undefined};\n\t}\n}\n\nexport const PythonTool: AiTool = {\n\tname: 'exec_javascript',\n\tdescription: 'Execute commonjs javascript',\n\targs: {\n\t\tcode: {type: 'string', description: 'CommonJS javascript', required: true}\n\t},\n\tfn: async (args: {code: string}) => ({result: $Sync`python -c \"${args.code}\"`})\n}\n\nexport const SearchTool: AiTool = {\n\tname: 'search',\n\tdescription: 'Use a search engine to find relevant URLs, should be changed with fetch to scrape sources',\n\targs: {\n\t\tquery: {type: 'string', description: 'Search string', required: true},\n\t\tlength: {type: 'string', description: 'Number of results to return', default: 5},\n\t},\n\tfn: async (args: {\n\t\tquery: string;\n\t\tlength: number;\n\t}) => {\n\t\tconst html = await fetch(`https://html.duckduckgo.com/html/?q=${encodeURIComponent(args.query)}`, {\n\t\t\theaders: {\"User-Agent\": \"Mozilla/5.0 (Windows NT 10.0; Win64; x64)\", \"Accept-Language\": \"en-US,en;q=0.9\"}\n\t\t}).then(resp => resp.text());\n\t\tlet match, regex = /<a .*?href=\"(.+?)\".+?<\\/a>/g;\n\t\tconst results = new ASet<string>();\n\t\twhile((match = regex.exec(html)) !== null) {\n\t\t\tlet url = /uddg=(.+)&amp?/.exec(decodeURIComponent(match[1]))?.[1];\n\t\t\tif(url) url = decodeURIComponent(url);\n\t\t\tif(url) results.add(url);\n\t\t\tif(results.size >= (args.length || 5)) break;\n\t\t}\n\t\treturn results;\n\t}\n}\n"],"names":["LLMProvider","Anthropic","ai","apiToken","model","anthropic","history","i","orgI","c","h","message","options","controller","response","res","rej","requestParams","t","objectMap","key","value","resp","chunk","text","last","JSONAttemptParse","toolCalls","results","toolCall","tool","findByProp","result","JSONSanitize","err","Ollama","host","ollama","error","system","args","OpenAi","openAI","tools","tc","record","h2","LLM","max","min","keep","tokens","m","recent","process","Ai","path","name","output","Path","$","fs","p","arr","buffer","worker","createWorker","data","target","searchTerms","vector","dimensions","char","index","cosineSimilarity","v1","v2","tensor1","tf","tensor2","dotProduct","magnitude1","magnitude2","v","similarities","refVector","acc","s","CliTool","DateTimeTool","ExecTool","JSTool","PythonTool","FetchTool","Http","console","consoleInterceptor","Fn","$Sync","SearchTool","html","match","regex","ASet","url"],"mappings":"m8BAIO,MAAeA,CAAY,CAElC,CCAO,MAAMC,UAAkBD,CAAY,CAG1C,YAA4BE,EAAwBC,EAAyBC,EAAe,CAC3F,MAAA,EAD2B,KAAA,GAAAF,EAAwB,KAAA,SAAAC,EAAyB,KAAA,MAAAC,EAE5E,KAAK,OAAS,IAAIC,EAAAA,UAAU,CAAC,OAAQF,EAAS,CAC/C,CALA,OAOQ,WAAWG,EAA8B,CAChD,QAAQC,EAAI,EAAGA,EAAID,EAAQ,OAAQC,IAAK,CACvC,MAAMC,EAAOD,EACV,OAAOD,EAAQE,CAAI,EAAE,SAAW,WAC/BF,EAAQE,CAAI,EAAE,MAAQ,YACxBF,EAAQE,CAAI,EAAE,QAAQ,OAAQC,GAAWA,EAAE,MAAO,UAAU,EAAE,QAASA,GAAW,CACjFF,IACAD,EAAQ,OAAOC,EAAG,EAAG,CAAC,KAAM,OAAQ,GAAIE,EAAE,GAAI,KAAMA,EAAE,KAAM,KAAMA,EAAE,MAAM,CAC3E,CAAC,EACQH,EAAQE,CAAI,EAAE,MAAQ,QAC/BF,EAAQE,CAAI,EAAE,QAAQ,OAAQC,GAAWA,EAAE,MAAO,aAAa,EAAE,QAASA,GAAW,CACpF,MAAMC,EAAIJ,EAAQ,KAAMI,GAAWA,EAAE,IAAMD,EAAE,WAAW,EACxDC,EAAED,EAAE,SAAW,QAAU,SAAS,EAAIA,EAAE,OACzC,CAAC,EAEFH,EAAQE,CAAI,EAAE,QAAUF,EAAQE,CAAI,EAAE,QAAQ,OAAQC,GAAWA,EAAE,MAAQ,MAAM,EAAE,IAAKA,GAAWA,EAAE,IAAI,EAAE,KAAK;AAAA;AAAA,CAAM,EAExH,CACA,OAAOH,EAAQ,OAAOI,GAAK,CAAC,CAACA,EAAE,OAAO,CACvC,CAEQ,aAAaJ,EAA8B,CAClD,QAAQC,EAAI,EAAGA,EAAID,EAAQ,OAAQC,IAClC,GAAGD,EAAQC,CAAC,EAAE,MAAQ,OAAQ,CAC7B,MAAMG,EAASJ,EAAQC,CAAC,EACxBD,EAAQ,OAAOC,EAAG,EACjB,CAAC,KAAM,YAAa,QAAS,CAAC,CAAC,KAAM,WAAY,GAAIG,EAAE,GAAI,KAAMA,EAAE,KAAM,MAAOA,EAAE,IAAA,CAAK,CAAA,EACvF,CAAC,KAAM,OAAQ,QAAS,CAAC,CAAC,KAAM,cAAe,YAAaA,EAAE,GAAI,SAAU,CAAC,CAACA,EAAE,MAAO,QAAUA,EAAE,OAASA,EAAE,QAAQ,CAAA,CAAC,EAExHH,GACD,CAED,OAAOD,CACR,CAEA,IAAIK,EAAiBC,EAAsB,GAAoC,CAC9E,MAAMC,EAAa,IAAI,gBACjBC,EAAW,IAAI,QAAa,MAAOC,EAAKC,IAAQ,CACrD,IAAIV,EAAU,KAAK,aAAa,CAAC,GAAGM,EAAQ,SAAW,CAAA,EAAI,CAAC,KAAM,OAAQ,QAASD,CAAA,CAAQ,CAAC,EACzFC,EAAQ,WAAUN,EAAU,MAAM,KAAK,GAAG,IAAI,SAAcA,EAASM,EAAQ,SAAS,IAAKA,EAAQ,SAAS,IAAKA,CAAO,GAC3H,MAAMK,EAAqB,CAC1B,MAAOL,EAAQ,OAAS,KAAK,MAC7B,WAAYA,EAAQ,YAAc,KAAK,GAAG,QAAQ,YAAc,KAChE,OAAQA,EAAQ,QAAU,KAAK,GAAG,QAAQ,QAAU,GACpD,YAAaA,EAAQ,aAAe,KAAK,GAAG,QAAQ,aAAe,GACnE,OAAQA,EAAQ,OAAS,KAAK,GAAG,QAAQ,OAAS,CAAA,GAAI,IAAIM,IAAM,CAC/D,KAAMA,EAAE,KACR,YAAaA,EAAE,YACf,aAAc,CACb,KAAM,SACN,WAAYA,EAAE,KAAOC,EAAAA,UAAUD,EAAE,KAAM,CAACE,EAAKC,KAAW,CAAC,GAAGA,EAAO,SAAU,MAAA,EAAW,EAAI,CAAA,EAC5F,SAAUH,EAAE,KAAO,OAAO,QAAQA,EAAE,IAAI,EAAE,OAAOA,GAAKA,EAAE,CAAC,EAAE,QAAQ,EAAE,IAAIA,GAAKA,EAAE,CAAC,CAAC,EAAI,CAAA,CAAC,EAExF,GAAI,MAAA,EACH,EACF,SAAUZ,EACV,OAAQ,CAAC,CAACM,EAAQ,MAAA,EAInB,IAAIU,EACJ,EAAG,CAIF,GAHAA,EAAO,MAAM,KAAK,OAAO,SAAS,OAAOL,CAAa,EAGnDL,EAAQ,OAAQ,CAClBU,EAAK,QAAU,CAAA,EACf,gBAAiBC,KAASD,EAAM,CAC/B,GAAGT,EAAW,OAAO,QAAS,MAC9B,GAAGU,EAAM,OAAS,sBACdA,EAAM,cAAc,OAAS,OAC/BD,EAAK,QAAQ,KAAK,CAAC,KAAM,OAAQ,KAAM,GAAG,EACjCC,EAAM,cAAc,OAAS,YACtCD,EAAK,QAAQ,KAAK,CAAC,KAAM,WAAY,GAAIC,EAAM,cAAc,GAAI,KAAMA,EAAM,cAAc,KAAM,MAAY,GAAG,UAExGA,EAAM,OAAS,sBACxB,GAAGA,EAAM,MAAM,OAAS,aAAc,CACrC,MAAMC,EAAOD,EAAM,MAAM,KACzBD,EAAK,QAAQ,GAAG,EAAE,EAAE,MAAQE,EAC5BZ,EAAQ,OAAO,CAAC,KAAAY,EAAK,CACtB,MAAUD,EAAM,MAAM,OAAS,qBAC9BD,EAAK,QAAQ,GAAG,EAAE,EAAE,OAASC,EAAM,MAAM,sBAEjCA,EAAM,OAAS,qBAAsB,CAC9C,MAAME,EAAOH,EAAK,QAAQ,GAAG,EAAE,EAC5BG,EAAK,OAAS,OAAMA,EAAK,MAAQA,EAAK,MAAQC,EAAAA,iBAAiBD,EAAK,MAAO,CAAA,CAAE,EAAI,CAAA,EACrF,SAAUF,EAAM,OAAS,eACxB,KAEF,CACD,CAGA,MAAMI,EAAYL,EAAK,QAAQ,OAAQb,GAAWA,EAAE,OAAS,UAAU,EACvE,GAAGkB,EAAU,QAAU,CAACd,EAAW,OAAO,QAAS,CAClDP,EAAQ,KAAK,CAAC,KAAM,YAAa,QAASgB,EAAK,QAAQ,EACvD,MAAMM,EAAU,MAAM,QAAQ,IAAID,EAAU,IAAI,MAAOE,GAAkB,CACxE,MAAMC,EAAOlB,EAAQ,OAAO,KAAKmB,EAAAA,WAAW,OAAQF,EAAS,IAAI,CAAC,EAClE,GAAG,CAACC,EAAM,MAAO,CAAC,YAAaD,EAAS,GAAI,SAAU,GAAM,QAAS,gBAAA,EACrE,GAAI,CACH,MAAMG,EAAS,MAAMF,EAAK,GAAGD,EAAS,MAAO,KAAK,EAAE,EACpD,MAAO,CAAC,KAAM,cAAe,YAAaA,EAAS,GAAI,QAASI,eAAaD,CAAM,CAAA,CACpF,OAASE,EAAU,CAClB,MAAO,CAAC,KAAM,cAAe,YAAaL,EAAS,GAAI,SAAU,GAAM,QAASK,GAAK,SAAWA,GAAK,SAAA,GAAc,SAAA,CACpH,CACD,CAAC,CAAC,EACF5B,EAAQ,KAAK,CAAC,KAAM,OAAQ,QAASsB,EAAQ,EAC7CX,EAAc,SAAWX,CAC1B,CACD,OAAS,CAACO,EAAW,OAAO,SAAWS,EAAK,QAAQ,KAAMb,GAAWA,EAAE,OAAS,UAAU,GACvFG,EAAQ,QAAQA,EAAQ,OAAO,CAAC,KAAM,GAAK,EAC9CG,EAAI,KAAK,WAAW,CAAC,GAAGT,EAAS,CAChC,KAAM,YACN,QAASgB,EAAK,QAAQ,OAAQb,GAAWA,EAAE,MAAQ,MAAM,EAAE,IAAKA,GAAWA,EAAE,IAAI,EAAE,KAAK;AAAA;AAAA,CAAM,CAAA,CAC9F,CAAC,CAAC,CACJ,CAAC,EACD,OAAO,OAAO,OAAOK,EAAU,CAAC,MAAO,IAAMD,EAAW,MAAA,EAAQ,CACjE,CACD,CC9HO,MAAMsB,UAAenC,CAAY,CAGvC,YAA4BE,EAAekC,EAAqBhC,EAAe,CAC9E,MAAA,EAD2B,KAAA,GAAAF,EAAe,KAAA,KAAAkC,EAAqB,KAAA,MAAAhC,EAE/D,KAAK,OAAS,IAAIiC,SAAO,CAAC,KAAAD,EAAK,CAChC,CALA,OAOQ,WAAW9B,EAA8B,CAChD,QAAQC,EAAI,EAAGA,EAAID,EAAQ,OAAQC,IAClC,GAAGD,EAAQC,CAAC,EAAE,MAAQ,aAAeD,EAAQC,CAAC,EAAE,WAC5CD,EAAQC,CAAC,EAAE,QAAS,OAAOD,EAAQC,CAAC,EAAE,YAExCD,EAAQ,OAAOC,EAAG,CAAC,EACnBA,aAEQD,EAAQC,CAAC,EAAE,MAAQ,OAAQ,CACpC,MAAM+B,EAAQhC,EAAQC,CAAC,EAAE,QAAQ,WAAW,WAAW,EACvDD,EAAQC,CAAC,EAAI,CAAC,KAAM,OAAQ,KAAMD,EAAQC,CAAC,EAAE,UAAW,KAAMD,EAAQC,CAAC,EAAE,KAAM,CAAC+B,EAAQ,QAAU,SAAS,EAAGhC,EAAQC,CAAC,EAAE,OAAA,CAC1H,CAED,OAAOD,CACR,CAEQ,aAAaA,EAA8B,CAClD,OAAOA,EAAQ,IAAKI,GAChBA,EAAE,MAAQ,OAAeA,EACrB,CAAC,KAAM,OAAQ,UAAWA,EAAE,KAAM,QAASA,EAAE,OAASA,EAAE,OAAA,CAC/D,CACF,CAEA,IAAIC,EAAiBC,EAAsB,GAAoC,CAC9E,MAAMC,EAAa,IAAI,gBACjBC,EAAW,IAAI,QAAa,MAAOC,EAAKC,IAAQ,CACrD,IAAIuB,EAAS3B,EAAQ,QAAU,KAAK,GAAG,QAAQ,OAC3CN,EAAU,KAAK,aAAa,CAAC,GAAGM,EAAQ,SAAW,CAAA,EAAI,CAAC,KAAM,OAAQ,QAASD,CAAA,CAAQ,CAAC,EACzFL,EAAQ,CAAC,EAAE,MAAQ,WACjBiC,IACS,MAAA,EADDA,EAASjC,EAAQ,MAAA,GAG3BM,EAAQ,WAAUN,EAAU,MAAM,KAAK,GAAG,IAAI,SAAcA,EAASM,EAAQ,SAAS,IAAKA,EAAQ,SAAS,GAAG,GAC/GA,EAAQ,QAAQN,EAAQ,QAAQ,CAAC,KAAM,SAAU,QAASiC,EAAO,EAEpE,MAAMtB,EAAqB,CAC1B,MAAOL,EAAQ,OAAS,KAAK,MAC7B,SAAUN,EACV,OAAQ,CAAC,CAACM,EAAQ,OAClB,OAAQC,EAAW,OACnB,QAAS,CACR,YAAaD,EAAQ,aAAe,KAAK,GAAG,QAAQ,aAAe,GACnE,YAAaA,EAAQ,YAAc,KAAK,GAAG,QAAQ,YAAc,IAAA,EAElE,OAAQA,EAAQ,OAAS,KAAK,GAAG,QAAQ,OAAS,CAAA,GAAI,IAAIM,IAAM,CAC/D,KAAM,WACN,SAAU,CACT,KAAMA,EAAE,KACR,YAAaA,EAAE,YACf,WAAY,CACX,KAAM,SACN,WAAYA,EAAE,KAAOC,EAAAA,UAAUD,EAAE,KAAM,CAACE,EAAKC,KAAW,CAAC,GAAGA,EAAO,SAAU,MAAA,EAAW,EAAI,CAAA,EAC5F,SAAUH,EAAE,KAAO,OAAO,QAAQA,EAAE,IAAI,EAAE,OAAOA,GAAKA,EAAE,CAAC,EAAE,QAAQ,EAAE,IAAIA,GAAKA,EAAE,CAAC,CAAC,EAAI,CAAA,CAAC,CACxF,CACD,EACC,CAAA,EAIH,IAAII,EACJ,EAAG,CAEF,GADAA,EAAO,MAAM,KAAK,OAAO,KAAKL,CAAa,EACxCL,EAAQ,OAAQ,CAClBU,EAAK,QAAU,CAAC,KAAM,YAAa,QAAS,GAAI,WAAY,EAAC,EAC7D,gBAAiBC,KAASD,EAOzB,GANGT,EAAW,OAAO,UAClBU,EAAM,SAAS,UACjBD,EAAK,QAAQ,SAAWC,EAAM,QAAQ,QACtCX,EAAQ,OAAO,CAAC,KAAMW,EAAM,QAAQ,QAAQ,GAE1CA,EAAM,SAAS,eAAiB,QAAQ,WAAaA,EAAM,QAAQ,YACnEA,EAAM,MAAM,KAEjB,CAGA,GAAGD,EAAK,SAAS,YAAY,QAAU,CAACT,EAAW,OAAO,QAAS,CAClEP,EAAQ,KAAKgB,EAAK,OAAO,EACzB,MAAMM,EAAU,MAAM,QAAQ,IAAIN,EAAK,QAAQ,WAAW,IAAI,MAAOO,GAAkB,CACtF,MAAMC,GAAQlB,EAAQ,OAAS,KAAK,GAAG,QAAQ,QAAQ,KAAKmB,EAAAA,WAAW,OAAQF,EAAS,SAAS,IAAI,CAAC,EACtG,GAAG,CAACC,EAAM,MAAO,CAAC,KAAM,OAAQ,UAAWD,EAAS,SAAS,KAAM,QAAS,6BAAA,EAC5E,MAAMW,EAAO,OAAOX,EAAS,SAAS,WAAc,SAAWH,EAAAA,iBAAiBG,EAAS,SAAS,UAAW,CAAA,CAAE,EAAIA,EAAS,SAAS,UACrI,GAAI,CACH,MAAMG,EAAS,MAAMF,EAAK,GAAGU,EAAM,KAAK,EAAE,EAC1C,MAAO,CAAC,KAAM,OAAQ,UAAWX,EAAS,SAAS,KAAM,KAAAW,EAAM,QAASP,EAAAA,aAAaD,CAAM,CAAA,CAC5F,OAASE,EAAU,CAClB,MAAO,CAAC,KAAM,OAAQ,UAAWL,EAAS,SAAS,KAAM,KAAAW,EAAM,QAASP,EAAAA,aAAa,CAAC,MAAOC,GAAK,SAAWA,GAAK,YAAc,SAAA,CAAU,CAAA,CAC3I,CACD,CAAC,CAAC,EACF5B,EAAQ,KAAK,GAAGsB,CAAO,EACvBX,EAAc,SAAWX,CAC1B,CACD,OAAS,CAACO,EAAW,OAAO,SAAWS,EAAK,SAAS,YAAY,QAC9DV,EAAQ,QAAQA,EAAQ,OAAO,CAAC,KAAM,GAAK,EAC9CG,EAAI,KAAK,WAAW,CAAC,GAAGT,EAAS,CAAC,KAAM,YAAa,QAASgB,EAAK,SAAS,OAAA,CAAQ,CAAC,CAAC,CACvF,CAAC,EACD,OAAO,OAAO,OAAOR,EAAU,CAAC,MAAO,IAAMD,EAAW,MAAA,EAAQ,CACjE,CACD,CC1GO,MAAM4B,UAAezC,CAAY,CAGvC,YAA4BE,EAAwBC,EAAyBC,EAAe,CAC3F,MAAA,EAD2B,KAAA,GAAAF,EAAwB,KAAA,SAAAC,EAAyB,KAAA,MAAAC,EAE5E,KAAK,OAAS,IAAIsC,EAAAA,OAAO,CAAC,OAAQvC,EAAS,CAC5C,CALA,OAOQ,WAAWG,EAA8B,CAChD,QAAQC,EAAI,EAAGA,EAAID,EAAQ,OAAQC,IAAK,CACvC,MAAMG,EAAIJ,EAAQC,CAAC,EACnB,GAAGG,EAAE,OAAS,aAAeA,EAAE,WAAY,CAC1C,MAAMiC,EAAQjC,EAAE,WAAW,IAAKkC,IAAa,CAC5C,KAAM,OACN,GAAIA,EAAG,GACP,KAAMA,EAAG,SAAS,KAClB,KAAMlB,EAAAA,iBAAiBkB,EAAG,SAAS,UAAW,CAAA,CAAE,CAAA,EAC/C,EACFtC,EAAQ,OAAOC,EAAG,EAAG,GAAGoC,CAAK,EAC7BpC,GAAKoC,EAAM,OAAS,CACrB,SAAUjC,EAAE,OAAS,QAAUA,EAAE,QAAS,CACzC,MAAMmC,EAASvC,EAAQ,QAAWI,EAAE,cAAgBoC,EAAG,EAAE,EACtDD,IACCnC,EAAE,QAAQ,SAAS,UAAU,EAAGmC,EAAO,MAAQnC,EAAE,QAC/CmC,EAAO,QAAUnC,EAAE,SAEzBJ,EAAQ,OAAOC,EAAG,CAAC,EACnBA,GACD,CAED,CACA,OAAOD,CACR,CAEQ,aAAaA,EAA8B,CAClD,OAAOA,EAAQ,OAAO,CAAC0B,EAAQtB,KAC3BA,EAAE,OAAS,OACbsB,EAAO,KAAK,CACX,KAAM,YACN,QAAS,KACT,WAAY,CAAC,CAAE,GAAItB,EAAE,GAAI,KAAM,WAAY,SAAU,CAAE,KAAMA,EAAE,KAAM,UAAW,KAAK,UAAUA,EAAE,IAAI,CAAA,EAAK,EAC1G,QAAS,KACT,YAAa,CAAA,CAAC,EACZ,CACF,KAAM,OACN,aAAcA,EAAE,GAChB,QAASA,EAAE,OAASA,EAAE,OAAA,CACtB,EAEDsB,EAAO,KAAKtB,CAAC,EAEPsB,GACL,CAAA,CAAW,CACf,CAEA,IAAIrB,EAAiBC,EAAsB,GAAoC,CAC9E,MAAMC,EAAa,IAAI,gBACjBC,EAAW,IAAI,QAAa,MAAOC,EAAKC,IAAQ,CACrD,IAAIV,EAAU,KAAK,aAAa,CAAC,GAAGM,EAAQ,SAAW,CAAA,EAAI,CAAC,KAAM,OAAQ,QAASD,CAAA,CAAQ,CAAC,EACzFC,EAAQ,WAAUN,EAAU,MAAM,KAAK,GAAG,IAAI,SAAcA,EAASM,EAAQ,SAAS,IAAKA,EAAQ,SAAS,IAAKA,CAAO,GAE3H,MAAMK,EAAqB,CAC1B,MAAOL,EAAQ,OAAS,KAAK,MAC7B,SAAUN,EACV,OAAQ,CAAC,CAACM,EAAQ,OAClB,WAAYA,EAAQ,YAAc,KAAK,GAAG,QAAQ,YAAc,KAChE,YAAaA,EAAQ,aAAe,KAAK,GAAG,QAAQ,aAAe,GACnE,OAAQA,EAAQ,OAAS,KAAK,GAAG,QAAQ,OAAS,CAAA,GAAI,IAAIM,IAAM,CAC/D,KAAM,WACN,SAAU,CACT,KAAMA,EAAE,KACR,YAAaA,EAAE,YACf,WAAY,CACX,KAAM,SACN,WAAYA,EAAE,KAAOC,EAAAA,UAAUD,EAAE,KAAM,CAACE,EAAKC,KAAW,CAAC,GAAGA,EAAO,SAAU,MAAA,EAAW,EAAI,CAAA,EAC5F,SAAUH,EAAE,KAAO,OAAO,QAAQA,EAAE,IAAI,EAAE,OAAOA,GAAKA,EAAE,CAAC,EAAE,QAAQ,EAAE,IAAIA,GAAKA,EAAE,CAAC,CAAC,EAAI,CAAA,CAAC,CACxF,CACD,EACC,CAAA,EAIH,IAAII,EACJ,EAAG,CAIF,GAHAA,EAAO,MAAM,KAAK,OAAO,KAAK,YAAY,OAAOL,CAAa,EAG3DL,EAAQ,OAAQ,CAClBU,EAAK,QAAU,CAAA,EACf,gBAAiBC,KAASD,EAAM,CAC/B,GAAGT,EAAW,OAAO,QAAS,MAC3BU,EAAM,QAAQ,CAAC,EAAE,MAAM,SACzBX,EAAQ,OAAO,CAAC,KAAMW,EAAM,QAAQ,CAAC,EAAE,MAAM,QAAQ,CAEvD,CACD,CAGA,MAAMI,EAAYL,EAAK,QAAQ,CAAC,EAAE,QAAQ,YAAc,CAAA,EACxD,GAAGK,EAAU,QAAU,CAACd,EAAW,OAAO,QAAS,CAClDP,EAAQ,KAAKgB,EAAK,QAAQ,CAAC,EAAE,OAAO,EACpC,MAAMM,EAAU,MAAM,QAAQ,IAAID,EAAU,IAAI,MAAOE,GAAkB,CACxE,MAAMC,EAAOlB,EAAQ,OAAO,KAAKmB,aAAW,OAAQF,EAAS,SAAS,IAAI,CAAC,EAC3E,GAAG,CAACC,EAAM,MAAO,CAAC,KAAM,OAAQ,aAAcD,EAAS,GAAI,QAAS,6BAAA,EACpE,GAAI,CACH,MAAMW,EAAOd,EAAAA,iBAAiBG,EAAS,SAAS,UAAW,CAAA,CAAE,EACvDG,EAAS,MAAMF,EAAK,GAAGU,EAAM,KAAK,EAAE,EAC1C,MAAO,CAAC,KAAM,OAAQ,aAAcX,EAAS,GAAI,QAASI,eAAaD,CAAM,CAAA,CAC9E,OAASE,EAAU,CAClB,MAAO,CAAC,KAAM,OAAQ,aAAcL,EAAS,GAAI,QAASI,EAAAA,aAAa,CAAC,MAAOC,GAAK,SAAWA,GAAK,YAAc,SAAA,CAAU,CAAA,CAC7H,CACD,CAAC,CAAC,EACF5B,EAAQ,KAAK,GAAGsB,CAAO,EACvBX,EAAc,SAAWX,CAC1B,CACD,OAAS,CAACO,EAAW,OAAO,SAAWS,EAAK,UAAU,CAAC,GAAG,SAAS,YAAY,QAE5EV,EAAQ,QAAQA,EAAQ,OAAO,CAAC,KAAM,GAAK,EAC9CG,EAAI,KAAK,WAAW,CAAC,GAAGT,EAAS,CAAC,KAAM,YAAa,QAASgB,EAAK,QAAQ,CAAC,EAAE,QAAQ,SAAW,EAAA,CAAG,CAAC,CAAC,CACvG,CAAC,EAED,OAAO,OAAO,OAAOR,EAAU,CAAC,MAAO,IAAMD,EAAW,MAAA,EAAQ,CACjE,CACD,CCnDO,MAAMkC,CAAI,CAGhB,YAA4B7C,EAAwBU,EAAqB,CAA7C,KAAA,GAAAV,EAAwB,KAAA,QAAAU,EAChDA,EAAQ,WAAW,QAAO,KAAK,UAAU,UAAY,IAAIX,EAAU,KAAK,GAAIW,EAAQ,UAAU,MAAOA,EAAQ,UAAU,KAAK,GAC5HA,EAAQ,QAAQ,OAAM,KAAK,UAAU,OAAS,IAAIuB,EAAO,KAAK,GAAIvB,EAAQ,OAAO,KAAMA,EAAQ,OAAO,KAAK,GAC3GA,EAAQ,QAAQ,QAAO,KAAK,UAAU,OAAS,IAAI6B,EAAO,KAAK,GAAI7B,EAAQ,OAAO,MAAOA,EAAQ,OAAO,KAAK,EACjH,CANQ,UAA0C,CAAA,EAclD,IAAID,EAAiBC,EAAsB,GAAoC,CAC9E,IAAIR,EAAa,CAAC,KAAM,IAAI,EAS5B,GARGQ,EAAQ,QACP,OAAOA,EAAQ,OAAS,WAAkBA,EAAQ,MAChDR,EAAQ,CAACQ,EAAQ,MAAa,KAAK,QAASA,EAAQ,KAAK,GAAG,KAAK,IAEpE,CAACA,EAAQ,OAASR,EAAM,CAAC,GAAK,QAC7B,OAAO,KAAK,QAAQ,OAAS,SAAUA,EAAQ,KAAK,QAAQ,MAC1DA,EAAQ,CAAC,KAAK,QAAQ,MAAa,KAAK,QAAS,KAAK,QAAQ,KAAK,GAAG,KAAK,GAE9E,CAACA,EAAM,CAAC,GAAK,CAACA,EAAM,CAAC,EAAG,MAAM,IAAI,MAAM,kCAAkCA,EAAM,CAAC,CAAC,MAAMA,EAAM,CAAC,CAAC,EAAE,EACrG,OAAO,KAAK,UAAUA,EAAM,CAAC,CAAC,EAAE,IAAIO,EAAS,CAAC,GAAGC,EAAS,MAAOR,EAAM,CAAC,EAAE,CAC3E,CAUA,MAAM,SAASE,EAAuB0C,EAAaC,EAAarC,EAA6C,CAC5G,GAAG,KAAK,eAAeN,CAAO,EAAI0C,EAAK,OAAO1C,EAC9C,IAAI4C,EAAO,EAAGC,EAAS,EACvB,QAAQC,KAAK9C,EAAQ,aAEpB,GADA6C,GAAU,KAAK,eAAeC,EAAE,OAAO,EACpCD,EAASF,EAAKC,QACZ,OAEN,GAAG5C,EAAQ,QAAU4C,EAAM,OAAO5C,EAClC,MAAM+C,EAASH,GAAQ,EAAI,CAAA,EAAK5C,EAAQ,MAAM,CAAC4C,CAAI,EAClDI,GAAWJ,GAAQ,EAAI5C,EAAUA,EAAQ,MAAM,EAAG,CAAC4C,CAAI,GAAG,OAAOxC,GAAKA,EAAE,OAAS,aAAeA,EAAE,OAAS,MAAM,EAElH,MAAO,CAAC,CAAC,KAAM,YAAa,QAAS,yBADrB,MAAM,KAAK,UAAU4C,EAAQ,OAAS,GAAGF,EAAE,IAAI,KAAKA,EAAE,OAAO,EAAE,EAAE,KAAK;AAAA;AAAA,CAAM,EAAG,IAAKxC,CAAO,CACtC,IAAK,GAAGyC,CAAM,CACpF,CAOA,eAAe/C,EAAsB,CACpC,MAAMkB,EAAO,KAAK,UAAUlB,CAAO,EACnC,OAAO,KAAK,KAAMkB,EAAK,OAAS,EAAK,GAAG,CACzC,CAQA,MAAM,KAAKb,EAAiBC,EAAsB,CACjD,IAAIU,EAAO,MAAM,KAAK,IAAIX,EAAS,CAClC,OAAQ,4BACR,GAAGC,CAAA,CACH,EACD,OAAIU,IAAO,CAAC,GAAG,QACRI,mBAAiB,IAAI,OAAO,SAAa,EAAE,KAAKJ,EAAK,CAAC,EAAE,OAAO,EAAG,EAAE,EAD5C,CAAA,CAEhC,CASA,UAAUE,EAAc2B,EAAgBvC,EAA8C,CACrF,OAAO,KAAK,IAAIY,EAAM,CAAC,OAAQ,+BAA+B2B,CAAM,+BAAgC,YAAa,GAAK,GAAGvC,CAAA,CAAQ,EAC/H,KAAKN,GAAmBA,EAAQ,IAAA,GAAO,SAAW,IAAI,CACzD,CACD,CClJO,MAAMiD,CAAG,CAOf,YAA4B3C,EAAoB,CAApB,KAAA,QAAAA,EAC3B,KAAK,IAAM,IAAImC,EAAI,KAAMnC,CAAO,EAC7B,KAAK,QAAQ,SAAS,SACxB,KAAK,aAAe,KAAK,QAAQ,SAAS,MAAM,SAAS,MAAM,EAAI,KAAK,QAAQ,SAAS,MAAQ,KAAK,QAAQ,SAAS,MAAQ,OAC/H,QAAQ,IAAI,gBAAkB,KAAK,QAAQ,QAAQ,MAAQ,OAAS,KAAK,YAAY,EACrF,KAAK,iBAAA,EAEP,CAbQ,UAA8C,CAAA,EAC9C,aAGR,IAiBA,MAAM,IAAI4C,EAAcpD,EAAgB,KAAK,aAAsC,CAClF,GAAG,CAAC,KAAK,QAAQ,SAAS,OAAQ,MAAM,IAAI,MAAM,wBAAwB,EAC1E,MAAMgD,EAAI,MAAM,KAAK,iBAAiBhD,CAAK,EACrCqD,EAAO,KAAK,OAAA,EAAS,SAAS,EAAE,EAAE,UAAU,EAAG,EAAE,EAAI,IAAMD,EAAK,MAAM,GAAG,EAAE,MAAQ,OACnFE,EAASC,EAAK,KAAK,KAAK,QAAQ,QAAQ,MAAQ,OAAQF,CAAI,EAClE,eAAQ,IAAI,QAAU,KAAK,QAAQ,QAAQ,MAAQ,OAAS,KAAK,YAAY,EAC7E,QAAQ,IAAI,SAASC,CAAM,OAAO,KAAK,QAAQ,QAAQ,MAAM,eAAeN,CAAC,OAAOI,CAAI,cAAcE,CAAM,EAAE,EAC9G,MAAME,EAAAA,UAAUF,CAAM,OAAO,KAAK,QAAQ,QAAQ,MAAM,eAAeN,CAAC,OAAOI,CAAI,cAAcE,CAAM,GAChGG,EAAG,SAASH,EAAQ,OAAO,EAAE,KAAKlC,GAAQA,GAAM,KAAA,GAAU,IAAI,EACnE,QAAQ,IAAMqC,EAAG,GAAGH,EAAQ,CAAC,MAAO,EAAA,CAAK,EAAE,MAAM,IAAM,CAAC,CAAC,CAAC,CAC7D,CAQA,MAAM,iBAAiBtD,EAAgB,KAAK,aAA+B,CAC1E,GAAG,CAAC,KAAK,QAAQ,SAAS,OAAQ,MAAM,IAAI,MAAM,wBAAwB,EACtEA,EAAM,SAAS,MAAM,IAAGA,GAAS,QACrC,MAAM0D,EAAIH,EAAK,KAAK,KAAK,QAAQ,QAAQ,KAAMvD,CAAK,EAEpD,OADA,QAAQ,IAAI,aAAe0D,CAAC,EACzB,MAAMD,EAAG,KAAKC,CAAC,EAAE,KAAK,IAAM,EAAI,EAAE,MAAM,IAAM,EAAK,GACrD,QAAQ,IAAI,SAAS,EACdA,GAEH,KAAK,UAAU1D,CAAK,EAAU,KAAK,UAAUA,CAAK,GACvD,KAAK,UAAUA,CAAK,EAAI,MAAM,6DAA6DA,CAAK,EAAE,EAChG,KAAKkB,GAAQA,EAAK,aAAa,EAC/B,KAAKyC,GAAO,OAAO,KAAKA,CAAG,CAAC,EAAE,KAAK,MAAMC,IACzC,MAAMH,EAAG,UAAUC,EAAGE,CAAM,EAC5B,OAAO,KAAK,UAAU5D,CAAK,EACpB0D,EACP,EACK,KAAK,UAAU1D,CAAK,EAC5B,CAOA,IAAIoD,EAAqE,CACxE,IAAIS,EACJ,MAAO,CACN,MAAO,IAAM,CAAEA,GAAQ,UAAA,CAAa,EACpC,SAAU,IAAI,QAAQ,MAAMlD,GAAO,CAClCkD,EAAS,MAAMC,EAAAA,aAAa,KAAK,EACjC,KAAM,CAAC,KAAAC,CAAA,EAAQ,MAAMF,EAAO,UAAUT,CAAI,EAC1C,MAAMS,EAAO,UAAA,EACblD,EAAIoD,EAAK,KAAK,KAAA,GAAU,IAAI,CAC7B,CAAC,CAAA,CAEH,CAQA,mBAAmBC,KAAmBC,EAAuB,CAC5D,GAAGA,EAAY,OAAS,EAAG,MAAM,IAAI,MAAM,wCAAwC,EAEnF,MAAMC,EAAS,CAAC9C,EAAc+C,EAAqB,KAC3C/C,EAAK,cAAc,MAAM,EAAE,EAAE,IAAI,CAACgD,EAAMC,IAC7CD,EAAK,WAAW,CAAC,GAAKC,EAAQ,GAAMF,EAAaA,CAAU,EAAE,MAAM,EAAGA,CAAU,EAG7EG,EAAmB,CAACC,EAAcC,IAAyB,CAChE,GAAID,EAAG,SAAWC,EAAG,OAAQ,MAAM,IAAI,MAAM,6BAA6B,EAC1E,MAAMC,EAAUC,EAAG,SAASH,CAAE,EAAGI,EAAUD,EAAG,SAASF,CAAE,EACnDI,EAAaF,EAAG,IAAID,EAASE,CAAO,EACpCE,EAAaH,EAAG,KAAKD,CAAO,EAC5BK,EAAaJ,EAAG,KAAKC,CAAO,EAClC,OAAGE,EAAW,WAAW,CAAC,IAAM,GAAKC,EAAW,WAAW,CAAC,IAAM,EAAU,EACrEF,EAAW,SAAA,EAAW,CAAC,GAAKC,EAAW,WAAW,CAAC,EAAIC,EAAW,SAAA,EAAW,CAAC,EACtF,EAEMC,EAAIb,EAAOF,CAAM,EACjBgB,EAAef,EAAY,IAAInD,GAAKoD,EAAOpD,CAAC,CAAC,EAAE,IAAImE,GAAaX,EAAiBS,EAAGE,CAAS,CAAC,EACpG,MAAO,CAAC,IAAKD,EAAa,OAAO,CAACE,EAAKC,IAAMD,EAAMC,EAAG,CAAC,EAAIH,EAAa,OAAQ,IAAK,KAAK,IAAI,GAAGA,CAAY,EAAG,aAAAA,CAAA,CACjH,CACD,CC1FO,MAAMI,EAAkB,CAC9B,KAAM,MACN,YAAa,qDACb,KAAM,CAAC,QAAS,CAAC,KAAM,SAAU,YAAa,iBAAkB,SAAU,GAAI,EAC9E,GAAKhD,GAA4BoB,EAAAA,IAAIpB,EAAK,OAAO,EAClD,EAEaiD,EAAuB,CACnC,KAAM,eACN,YAAa,4BACb,KAAM,CAAA,EACN,GAAI,SAAY,IAAI,KAAA,EAAO,YAAA,CAC5B,EAEaC,EAAmB,CAC/B,KAAM,OACN,YAAa,mBACb,KAAM,CACL,SAAU,CAAC,KAAM,SAAU,YAAa,qBAAsB,KAAM,CAAC,MAAO,OAAQ,QAAQ,EAAG,SAAU,EAAA,EACzG,KAAM,CAAC,KAAM,SAAU,YAAa,kBAAmB,SAAU,EAAA,CAAI,EAEtE,GAAI,MAAOlD,EAAMtC,IAAO,CACvB,GAAI,CACH,OAAOsC,EAAK,KAAA,CACX,IAAK,OACJ,OAAO,MAAMgD,EAAQ,GAAG,CAAC,QAAShD,EAAK,IAAA,EAAOtC,CAAE,EACjD,IAAK,OACJ,OAAO,MAAMyF,EAAO,GAAG,CAAC,KAAMnD,EAAK,IAAA,EAAOtC,CAAE,EAC7C,IAAK,SACJ,OAAO,MAAM0F,EAAW,GAAG,CAAC,KAAMpD,EAAK,IAAA,EAAOtC,CAAE,CACjD,CAEF,OAAQgC,EAAU,CACjB,MAAO,CAAC,MAAOA,GAAK,SAAWA,EAAI,UAAS,CAC7C,CACD,CACD,EAEa2D,EAAoB,CAChC,KAAM,QACN,YAAa,2BACb,KAAM,CACL,IAAK,CAAC,KAAM,SAAU,YAAa,eAAgB,SAAU,EAAA,EAC7D,OAAQ,CAAC,KAAM,SAAU,YAAa,qBAAsB,KAAM,CAAC,MAAO,OAAQ,MAAO,QAAQ,EAAG,QAAS,KAAA,EAC7G,QAAS,CAAC,KAAM,SAAU,YAAa,uBAAwB,QAAS,EAAC,EACzE,KAAM,CAAC,KAAM,SAAU,YAAa,mBAAA,CAAmB,EAExD,GAAKrD,GAKC,IAAIsD,EAAAA,KAAK,CAAC,IAAKtD,EAAK,IAAK,QAASA,EAAK,QAAQ,EAAE,QAAQ,CAAC,OAAQA,EAAK,QAAU,MAAO,KAAMA,EAAK,IAAA,CAAK,CAC/G,EAEamD,EAAiB,CAC7B,KAAM,kBACN,YAAa,8BACb,KAAM,CACL,KAAM,CAAC,KAAM,SAAU,YAAa,sBAAuB,SAAU,EAAA,CAAI,EAE1E,GAAI,MAAOnD,GAAyB,CACnC,MAAMuD,EAAUC,EAAAA,mBAAmB,IAAI,EACjC1E,EAAO,MAAM2E,KAAQ,CAAC,QAAAF,CAAA,EAAUvD,EAAK,KAAM,EAAI,EAAE,MAAON,GAAa6D,EAAQ,OAAO,MAAM,KAAK7D,CAAG,CAAC,EACzG,MAAO,CAAC,GAAG6D,EAAQ,OAAQ,OAAQzE,EAAM,OAAQ,OAAW,OAAQ,MAAA,CACrE,CACD,EAEasE,EAAqB,CACjC,KAAM,kBACN,YAAa,8BACb,KAAM,CACL,KAAM,CAAC,KAAM,SAAU,YAAa,sBAAuB,SAAU,EAAA,CAAI,EAE1E,GAAI,MAAOpD,IAA0B,CAAC,OAAQ0D,EAAAA,mBAAmB1D,EAAK,IAAI,GAAA,EAC3E,EAEa2D,EAAqB,CACjC,KAAM,SACN,YAAa,4FACb,KAAM,CACL,MAAO,CAAC,KAAM,SAAU,YAAa,gBAAiB,SAAU,EAAA,EAChE,OAAQ,CAAC,KAAM,SAAU,YAAa,8BAA+B,QAAS,CAAA,CAAC,EAEhF,GAAI,MAAO3D,GAGL,CACL,MAAM4D,EAAO,MAAM,MAAM,uCAAuC,mBAAmB5D,EAAK,KAAK,CAAC,GAAI,CACjG,QAAS,CAAC,aAAc,4CAA6C,kBAAmB,gBAAA,CAAgB,CACxG,EAAE,KAAKlB,GAAQA,EAAK,MAAM,EAC3B,IAAI+E,EAAOC,EAAQ,8BACnB,MAAM1E,EAAU,IAAI2E,OACpB,MAAOF,EAAQC,EAAM,KAAKF,CAAI,KAAO,MAAM,CAC1C,IAAII,EAAM,iBAAiB,KAAK,mBAAmBH,EAAM,CAAC,CAAC,CAAC,IAAI,CAAC,EAGjE,GAFGG,IAAKA,EAAM,mBAAmBA,CAAG,GACjCA,GAAK5E,EAAQ,IAAI4E,CAAG,EACpB5E,EAAQ,OAASY,EAAK,QAAU,GAAI,KACxC,CACA,OAAOZ,CACR,CACD"}
1
+ {"version":3,"file":"index.js","sources":["../src/provider.ts","../src/antrhopic.ts","../src/ollama.ts","../src/open-ai.ts","../src/llm.ts","../src/ai.ts","../src/tools.ts"],"sourcesContent":["import {LLMMessage, LLMOptions, LLMRequest} from './llm.ts';\n\nexport type AbortablePromise<T> = Promise<T> & {abort: () => void};\n\nexport abstract class LLMProvider {\n\tabstract ask(message: string, options: LLMRequest): AbortablePromise<LLMMessage[]>;\n}\n","import {Anthropic as anthropic} from '@anthropic-ai/sdk';\nimport {findByProp, objectMap, JSONSanitize, JSONAttemptParse} from '@ztimson/utils';\nimport {Ai} from './ai.ts';\nimport {LLMMessage, LLMRequest} from './llm.ts';\nimport {AbortablePromise, LLMProvider} from './provider.ts';\n\nexport class Anthropic extends LLMProvider {\n\tclient!: anthropic;\n\n\tconstructor(public readonly ai: Ai, public readonly apiToken: string, public model: string) {\n\t\tsuper();\n\t\tthis.client = new anthropic({apiKey: apiToken});\n\t}\n\n\tprivate toStandard(history: any[]): LLMMessage[] {\n\t\tfor(let i = 0; i < history.length; i++) {\n\t\t\tconst orgI = i;\n\t\t\tif(typeof history[orgI].content != 'string') {\n\t\t\t\tif(history[orgI].role == 'assistant') {\n\t\t\t\t\thistory[orgI].content.filter((c: any) => c.type =='tool_use').forEach((c: any) => {\n\t\t\t\t\t\ti++;\n\t\t\t\t\t\thistory.splice(i, 0, {role: 'tool', id: c.id, name: c.name, args: c.input});\n\t\t\t\t\t});\n\t\t\t\t} else if(history[orgI].role == 'user') {\n\t\t\t\t\thistory[orgI].content.filter((c: any) => c.type =='tool_result').forEach((c: any) => {\n\t\t\t\t\t\tconst h = history.find((h: any) => h.id == c.tool_use_id);\n\t\t\t\t\t\th[c.is_error ? 'error' : 'content'] = c.content;\n\t\t\t\t\t});\n\t\t\t\t}\n\t\t\t\thistory[orgI].content = history[orgI].content.filter((c: any) => c.type == 'text').map((c: any) => c.text).join('\\n\\n');\n\t\t\t}\n\t\t}\n\t\treturn history.filter(h => !!h.content);\n\t}\n\n\tprivate fromStandard(history: LLMMessage[]): any[] {\n\t\tfor(let i = 0; i < history.length; i++) {\n\t\t\tif(history[i].role == 'tool') {\n\t\t\t\tconst h: any = history[i];\n\t\t\t\thistory.splice(i, 1,\n\t\t\t\t\t{role: 'assistant', content: [{type: 'tool_use', id: h.id, name: h.name, input: h.args}]},\n\t\t\t\t\t{role: 'user', content: [{type: 'tool_result', tool_use_id: h.id, is_error: !!h.error, content: h.error || h.content}]}\n\t\t\t\t)\n\t\t\t\ti++;\n\t\t\t}\n\t\t}\n\t\treturn history;\n\t}\n\n\task(message: string, options: LLMRequest = {}): AbortablePromise<LLMMessage[]> {\n\t\tconst controller = new AbortController();\n\t\tconst response = new Promise<any>(async (res, rej) => {\n\t\t\tlet history = this.fromStandard([...options.history || [], {role: 'user', content: message}]);\n\t\t\tif(options.compress) history = await this.ai.llm.compress(<any>history, options.compress.max, options.compress.min, options);\n\t\t\tconst requestParams: any = {\n\t\t\t\tmodel: options.model || this.model,\n\t\t\t\tmax_tokens: options.max_tokens || this.ai.options.max_tokens || 4096,\n\t\t\t\tsystem: options.system || this.ai.options.system || '',\n\t\t\t\ttemperature: options.temperature || this.ai.options.temperature || 0.7,\n\t\t\t\ttools: (options.tools || this.ai.options.tools || []).map(t => ({\n\t\t\t\t\tname: t.name,\n\t\t\t\t\tdescription: t.description,\n\t\t\t\t\tinput_schema: {\n\t\t\t\t\t\ttype: 'object',\n\t\t\t\t\t\tproperties: t.args ? objectMap(t.args, (key, value) => ({...value, required: undefined})) : {},\n\t\t\t\t\t\trequired: t.args ? Object.entries(t.args).filter(t => t[1].required).map(t => t[0]) : []\n\t\t\t\t\t},\n\t\t\t\t\tfn: undefined\n\t\t\t\t})),\n\t\t\t\tmessages: history,\n\t\t\t\tstream: !!options.stream,\n\t\t\t};\n\n\t\t\t// Run tool changes\n\t\t\tlet resp: any;\n\t\t\tdo {\n\t\t\t\tresp = await this.client.messages.create(requestParams);\n\n\t\t\t\t// Streaming mode\n\t\t\t\tif(options.stream) {\n\t\t\t\t\tresp.content = [];\n\t\t\t\t\tfor await (const chunk of resp) {\n\t\t\t\t\t\tif(controller.signal.aborted) break;\n\t\t\t\t\t\tif(chunk.type === 'content_block_start') {\n\t\t\t\t\t\t\tif(chunk.content_block.type === 'text') {\n\t\t\t\t\t\t\t\tresp.content.push({type: 'text', text: ''});\n\t\t\t\t\t\t\t} else if(chunk.content_block.type === 'tool_use') {\n\t\t\t\t\t\t\t\tresp.content.push({type: 'tool_use', id: chunk.content_block.id, name: chunk.content_block.name, input: <any>''});\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t} else if(chunk.type === 'content_block_delta') {\n\t\t\t\t\t\t\tif(chunk.delta.type === 'text_delta') {\n\t\t\t\t\t\t\t\tconst text = chunk.delta.text;\n\t\t\t\t\t\t\t\tresp.content.at(-1).text += text;\n\t\t\t\t\t\t\t\toptions.stream({text});\n\t\t\t\t\t\t\t} else if(chunk.delta.type === 'input_json_delta') {\n\t\t\t\t\t\t\t\tresp.content.at(-1).input += chunk.delta.partial_json;\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t} else if(chunk.type === 'content_block_stop') {\n\t\t\t\t\t\t\tconst last = resp.content.at(-1);\n\t\t\t\t\t\t\tif(last.input != null) last.input = last.input ? JSONAttemptParse(last.input, {}) : {};\n\t\t\t\t\t\t} else if(chunk.type === 'message_stop') {\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\t// Run tools\n\t\t\t\tconst toolCalls = resp.content.filter((c: any) => c.type === 'tool_use');\n\t\t\t\tif(toolCalls.length && !controller.signal.aborted) {\n\t\t\t\t\thistory.push({role: 'assistant', content: resp.content});\n\t\t\t\t\tconst results = await Promise.all(toolCalls.map(async (toolCall: any) => {\n\t\t\t\t\t\tconst tool = options.tools?.find(findByProp('name', toolCall.name));\n\t\t\t\t\t\tif(!tool) return {tool_use_id: toolCall.id, is_error: true, content: 'Tool not found'};\n\t\t\t\t\t\ttry {\n\t\t\t\t\t\t\tconst result = await tool.fn(toolCall.input, this.ai);\n\t\t\t\t\t\t\treturn {type: 'tool_result', tool_use_id: toolCall.id, content: JSONSanitize(result)};\n\t\t\t\t\t\t} catch (err: any) {\n\t\t\t\t\t\t\treturn {type: 'tool_result', tool_use_id: toolCall.id, is_error: true, content: err?.message || err?.toString() || 'Unknown'};\n\t\t\t\t\t\t}\n\t\t\t\t\t}));\n\t\t\t\t\thistory.push({role: 'user', content: results});\n\t\t\t\t\trequestParams.messages = history;\n\t\t\t\t}\n\t\t\t} while (!controller.signal.aborted && resp.content.some((c: any) => c.type === 'tool_use'));\n\t\t\tif(options.stream) options.stream({done: true});\n\t\t\tres(this.toStandard([...history, {\n\t\t\t\trole: 'assistant',\n\t\t\t\tcontent: resp.content.filter((c: any) => c.type == 'text').map((c: any) => c.text).join('\\n\\n')\n\t\t\t}]));\n\t\t});\n\t\treturn Object.assign(response, {abort: () => controller.abort()});\n\t}\n}\n","import {findByProp, objectMap, JSONSanitize, JSONAttemptParse} from '@ztimson/utils';\nimport {Ai} from './ai.ts';\nimport {LLMMessage, LLMRequest} from './llm.ts';\nimport {AbortablePromise, LLMProvider} from './provider.ts';\nimport {Ollama as ollama} from 'ollama';\n\nexport class Ollama extends LLMProvider {\n\tclient!: ollama;\n\n\tconstructor(public readonly ai: Ai, public host: string, public model: string) {\n\t\tsuper();\n\t\tthis.client = new ollama({host});\n\t}\n\n\tprivate toStandard(history: any[]): LLMMessage[] {\n\t\tfor(let i = 0; i < history.length; i++) {\n\t\t\tif(history[i].role == 'assistant' && history[i].tool_calls) {\n\t\t\t\tif(history[i].content) delete history[i].tool_calls;\n\t\t\t\telse {\n\t\t\t\t\thistory.splice(i, 1);\n\t\t\t\t\ti--;\n\t\t\t\t}\n\t\t\t} else if(history[i].role == 'tool') {\n\t\t\t\tconst error = history[i].content.startsWith('{\"error\":');\n\t\t\t\thistory[i] = {role: 'tool', name: history[i].tool_name, args: history[i].args, [error ? 'error' : 'content']: history[i].content};\n\t\t\t}\n\t\t}\n\t\treturn history;\n\t}\n\n\tprivate fromStandard(history: LLMMessage[]): any[] {\n\t\treturn history.map((h: any) => {\n\t\t\tif(h.role != 'tool') return h;\n\t\t\treturn {role: 'tool', tool_name: h.name, content: h.error || h.content}\n\t\t});\n\t}\n\n\task(message: string, options: LLMRequest = {}): AbortablePromise<LLMMessage[]> {\n\t\tconst controller = new AbortController();\n\t\tconst response = new Promise<any>(async (res, rej) => {\n\t\t\tlet system = options.system || this.ai.options.system;\n\t\t\tlet history = this.fromStandard([...options.history || [], {role: 'user', content: message}]);\n\t\t\tif(history[0].roll == 'system') {\n\t\t\t\tif(!system) system = history.shift();\n\t\t\t\telse history.shift();\n\t\t\t}\n\t\t\tif(options.compress) history = await this.ai.llm.compress(<any>history, options.compress.max, options.compress.min);\n\t\t\tif(options.system) history.unshift({role: 'system', content: system})\n\n\t\t\tconst requestParams: any = {\n\t\t\t\tmodel: options.model || this.model,\n\t\t\t\tmessages: history,\n\t\t\t\tstream: !!options.stream,\n\t\t\t\tsignal: controller.signal,\n\t\t\t\toptions: {\n\t\t\t\t\ttemperature: options.temperature || this.ai.options.temperature || 0.7,\n\t\t\t\t\tnum_predict: options.max_tokens || this.ai.options.max_tokens || 4096,\n\t\t\t\t},\n\t\t\t\ttools: (options.tools || this.ai.options.tools || []).map(t => ({\n\t\t\t\t\ttype: 'function',\n\t\t\t\t\tfunction: {\n\t\t\t\t\t\tname: t.name,\n\t\t\t\t\t\tdescription: t.description,\n\t\t\t\t\t\tparameters: {\n\t\t\t\t\t\t\ttype: 'object',\n\t\t\t\t\t\t\tproperties: t.args ? objectMap(t.args, (key, value) => ({...value, required: undefined})) : {},\n\t\t\t\t\t\t\trequired: t.args ? Object.entries(t.args).filter(t => t[1].required).map(t => t[0]) : []\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}))\n\t\t\t}\n\n\t\t\t// Run tool chains\n\t\t\tlet resp: any;\n\t\t\tdo {\n\t\t\t\tresp = await this.client.chat(requestParams);\n\t\t\t\tif(options.stream) {\n\t\t\t\t\tresp.message = {role: 'assistant', content: '', tool_calls: []};\n\t\t\t\t\tfor await (const chunk of resp) {\n\t\t\t\t\t\tif(controller.signal.aborted) break;\n\t\t\t\t\t\tif(chunk.message?.content) {\n\t\t\t\t\t\t\tresp.message.content += chunk.message.content;\n\t\t\t\t\t\t\toptions.stream({text: chunk.message.content});\n\t\t\t\t\t\t}\n\t\t\t\t\t\tif(chunk.message?.tool_calls) resp.message.tool_calls = chunk.message.tool_calls;\n\t\t\t\t\t\tif(chunk.done) break;\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\t// Run tools\n\t\t\t\tif(resp.message?.tool_calls?.length && !controller.signal.aborted) {\n\t\t\t\t\thistory.push(resp.message);\n\t\t\t\t\tconst results = await Promise.all(resp.message.tool_calls.map(async (toolCall: any) => {\n\t\t\t\t\t\tconst tool = (options.tools || this.ai.options.tools)?.find(findByProp('name', toolCall.function.name));\n\t\t\t\t\t\tif(!tool) return {role: 'tool', tool_name: toolCall.function.name, content: '{\"error\": \"Tool not found\"}'};\n\t\t\t\t\t\tconst args = typeof toolCall.function.arguments === 'string' ? JSONAttemptParse(toolCall.function.arguments, {}) : toolCall.function.arguments;\n\t\t\t\t\t\ttry {\n\t\t\t\t\t\t\tconst result = await tool.fn(args, this.ai);\n\t\t\t\t\t\t\treturn {role: 'tool', tool_name: toolCall.function.name, args, content: JSONSanitize(result)};\n\t\t\t\t\t\t} catch (err: any) {\n\t\t\t\t\t\t\treturn {role: 'tool', tool_name: toolCall.function.name, args, content: JSONSanitize({error: err?.message || err?.toString() || 'Unknown'})};\n\t\t\t\t\t\t}\n\t\t\t\t\t}));\n\t\t\t\t\thistory.push(...results);\n\t\t\t\t\trequestParams.messages = history;\n\t\t\t\t}\n\t\t\t} while (!controller.signal.aborted && resp.message?.tool_calls?.length);\n\t\t\tif(options.stream) options.stream({done: true});\n\t\t\tres(this.toStandard([...history, {role: 'assistant', content: resp.message?.content}]));\n\t\t});\n\t\treturn Object.assign(response, {abort: () => controller.abort()});\n\t}\n}\n","import {OpenAI as openAI} from 'openai';\nimport {findByProp, objectMap, JSONSanitize, JSONAttemptParse} from '@ztimson/utils';\nimport {Ai} from './ai.ts';\nimport {LLMMessage, LLMRequest} from './llm.ts';\nimport {AbortablePromise, LLMProvider} from './provider.ts';\n\nexport class OpenAi extends LLMProvider {\n\tclient!: openAI;\n\n\tconstructor(public readonly ai: Ai, public readonly apiToken: string, public model: string) {\n\t\tsuper();\n\t\tthis.client = new openAI({apiKey: apiToken});\n\t}\n\n\tprivate toStandard(history: any[]): LLMMessage[] {\n\t\tfor(let i = 0; i < history.length; i++) {\n\t\t\tconst h = history[i];\n\t\t\tif(h.role === 'assistant' && h.tool_calls) {\n\t\t\t\tconst tools = h.tool_calls.map((tc: any) => ({\n\t\t\t\t\trole: 'tool',\n\t\t\t\t\tid: tc.id,\n\t\t\t\t\tname: tc.function.name,\n\t\t\t\t\targs: JSONAttemptParse(tc.function.arguments, {})\n\t\t\t\t}));\n\t\t\t\thistory.splice(i, 1, ...tools);\n\t\t\t\ti += tools.length - 1;\n\t\t\t} else if(h.role === 'tool' && h.content) {\n\t\t\t\tconst record = history.find(h2 => h.tool_call_id == h2.id);\n\t\t\t\tif(record) {\n\t\t\t\t\tif(h.content.includes('\"error\":')) record.error = h.content;\n\t\t\t\t\telse record.content = h.content;\n\t\t\t\t}\n\t\t\t\thistory.splice(i, 1);\n\t\t\t\ti--;\n\t\t\t}\n\n\t\t}\n\t\treturn history;\n\t}\n\n\tprivate fromStandard(history: LLMMessage[]): any[] {\n\t\treturn history.reduce((result, h) => {\n\t\t\tif(h.role === 'tool') {\n\t\t\t\tresult.push({\n\t\t\t\t\trole: 'assistant',\n\t\t\t\t\tcontent: null,\n\t\t\t\t\ttool_calls: [{ id: h.id, type: 'function', function: { name: h.name, arguments: JSON.stringify(h.args) } }],\n\t\t\t\t\trefusal: null,\n\t\t\t\t\tannotations: [],\n\t\t\t\t}, {\n\t\t\t\t\trole: 'tool',\n\t\t\t\t\ttool_call_id: h.id,\n\t\t\t\t\tcontent: h.error || h.content\n\t\t\t\t});\n\t\t\t} else {\n\t\t\t\tresult.push(h);\n\t\t\t}\n\t\t\treturn result;\n\t\t}, [] as any[]);\n\t}\n\n\task(message: string, options: LLMRequest = {}): AbortablePromise<LLMMessage[]> {\n\t\tconst controller = new AbortController();\n\t\tconst response = new Promise<any>(async (res, rej) => {\n\t\t\tlet history = this.fromStandard([...options.history || [], {role: 'user', content: message}]);\n\t\t\tif(options.compress) history = await this.ai.llm.compress(<any>history, options.compress.max, options.compress.min, options);\n\n\t\t\tconst requestParams: any = {\n\t\t\t\tmodel: options.model || this.model,\n\t\t\t\tmessages: history,\n\t\t\t\tstream: !!options.stream,\n\t\t\t\tmax_tokens: options.max_tokens || this.ai.options.max_tokens || 4096,\n\t\t\t\ttemperature: options.temperature || this.ai.options.temperature || 0.7,\n\t\t\t\ttools: (options.tools || this.ai.options.tools || []).map(t => ({\n\t\t\t\t\ttype: 'function',\n\t\t\t\t\tfunction: {\n\t\t\t\t\t\tname: t.name,\n\t\t\t\t\t\tdescription: t.description,\n\t\t\t\t\t\tparameters: {\n\t\t\t\t\t\t\ttype: 'object',\n\t\t\t\t\t\t\tproperties: t.args ? objectMap(t.args, (key, value) => ({...value, required: undefined})) : {},\n\t\t\t\t\t\t\trequired: t.args ? Object.entries(t.args).filter(t => t[1].required).map(t => t[0]) : []\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}))\n\t\t\t};\n\n\t\t\t// Tool call and streaming logic similar to other providers\n\t\t\tlet resp: any;\n\t\t\tdo {\n\t\t\t\tresp = await this.client.chat.completions.create(requestParams);\n\n\t\t\t\t// Implement streaming and tool call handling\n\t\t\t\tif(options.stream) {\n\t\t\t\t\tresp.choices = [];\n\t\t\t\t\tfor await (const chunk of resp) {\n\t\t\t\t\t\tif(controller.signal.aborted) break;\n\t\t\t\t\t\tif(chunk.choices[0].delta.content) {\n\t\t\t\t\t\t\toptions.stream({text: chunk.choices[0].delta.content});\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\t// Run tools\n\t\t\t\tconst toolCalls = resp.choices[0].message.tool_calls || [];\n\t\t\t\tif(toolCalls.length && !controller.signal.aborted) {\n\t\t\t\t\thistory.push(resp.choices[0].message);\n\t\t\t\t\tconst results = await Promise.all(toolCalls.map(async (toolCall: any) => {\n\t\t\t\t\t\tconst tool = options.tools?.find(findByProp('name', toolCall.function.name));\n\t\t\t\t\t\tif(!tool) return {role: 'tool', tool_call_id: toolCall.id, content: '{\"error\": \"Tool not found\"}'};\n\t\t\t\t\t\ttry {\n\t\t\t\t\t\t\tconst args = JSONAttemptParse(toolCall.function.arguments, {});\n\t\t\t\t\t\t\tconst result = await tool.fn(args, this.ai);\n\t\t\t\t\t\t\treturn {role: 'tool', tool_call_id: toolCall.id, content: JSONSanitize(result)};\n\t\t\t\t\t\t} catch (err: any) {\n\t\t\t\t\t\t\treturn {role: 'tool', tool_call_id: toolCall.id, content: JSONSanitize({error: err?.message || err?.toString() || 'Unknown'})};\n\t\t\t\t\t\t}\n\t\t\t\t\t}));\n\t\t\t\t\thistory.push(...results);\n\t\t\t\t\trequestParams.messages = history;\n\t\t\t\t}\n\t\t\t} while (!controller.signal.aborted && resp.choices?.[0]?.message?.tool_calls?.length);\n\n\t\t\tif(options.stream) options.stream({done: true});\n\t\t\tres(this.toStandard([...history, {role: 'assistant', content: resp.choices[0].message.content || ''}]));\n\t\t});\n\n\t\treturn Object.assign(response, {abort: () => controller.abort()});\n\t}\n}\n","import {JSONAttemptParse} from '@ztimson/utils';\nimport {Ai} from './ai.ts';\nimport {Anthropic} from './antrhopic.ts';\nimport {Ollama} from './ollama.ts';\nimport {OpenAi} from './open-ai.ts';\nimport {AbortablePromise, LLMProvider} from './provider.ts';\nimport {AiTool} from './tools.ts';\n\nexport type LLMMessage = {\n\t/** Message originator */\n\trole: 'assistant' | 'system' | 'user';\n\t/** Message content */\n\tcontent: string | any;\n} | {\n\t/** Tool call */\n\trole: 'tool';\n\t/** Unique ID for call */\n\tid: string;\n\t/** Tool that was run */\n\tname: string;\n\t/** Tool arguments */\n\targs: any;\n\t/** Tool result */\n\tcontent: undefined | string;\n\t/** Tool error */\n\terror: undefined | string;\n}\n\nexport type LLMOptions = {\n\t/** Anthropic settings */\n\tanthropic?: {\n\t\t/** API Token */\n\t\ttoken: string;\n\t\t/** Default model */\n\t\tmodel: string;\n\t},\n\t/** Ollama settings */\n\tollama?: {\n\t\t/** connection URL */\n\t\thost: string;\n\t\t/** Default model */\n\t\tmodel: string;\n\t},\n\t/** Open AI settings */\n\topenAi?: {\n\t\t/** API Token */\n\t\ttoken: string;\n\t\t/** Default model */\n\t\tmodel: string;\n\t},\n\t/** Default provider & model */\n\tmodel: string | [string, string];\n} & Omit<LLMRequest, 'model'>;\n\nexport type LLMRequest = {\n\t/** System prompt */\n\tsystem?: string;\n\t/** Message history */\n\thistory?: LLMMessage[];\n\t/** Max tokens for request */\n\tmax_tokens?: number;\n\t/** 0 = Rigid Logic, 1 = Balanced, 2 = Hyper Creative **/\n\ttemperature?: number;\n\t/** Available tools */\n\ttools?: AiTool[];\n\t/** LLM model */\n\tmodel?: string | [string, string];\n\t/** Stream response */\n\tstream?: (chunk: {text?: string, done?: true}) => any;\n\t/** Compress old messages in the chat to free up context */\n\tcompress?: {\n\t\t/** Trigger chat compression once context exceeds the token count */\n\t\tmax: number;\n\t\t/** Compress chat until context size smaller than */\n\t\tmin: number\n\t}\n}\n\nexport class LLM {\n\tprivate providers: {[key: string]: LLMProvider} = {};\n\n\tconstructor(public readonly ai: Ai, public readonly options: LLMOptions) {\n\t\tif(options.anthropic?.token) this.providers.anthropic = new Anthropic(this.ai, options.anthropic.token, options.anthropic.model);\n\t\tif(options.ollama?.host) this.providers.ollama = new Ollama(this.ai, options.ollama.host, options.ollama.model);\n\t\tif(options.openAi?.token) this.providers.openAi = new OpenAi(this.ai, options.openAi.token, options.openAi.model);\n\t}\n\n\t/**\n\t * Chat with LLM\n\t * @param {string} message Question\n\t * @param {LLMRequest} options Configuration options and chat history\n\t * @returns {{abort: () => void, response: Promise<LLMMessage[]>}} Function to abort response and chat history\n\t */\n\task(message: string, options: LLMRequest = {}): AbortablePromise<LLMMessage[]> {\n\t\tlet model: any = [null, null];\n\t\tif(options.model) {\n\t\t\tif(typeof options.model == 'object') model = options.model;\n\t\t\telse model = [options.model, (<any>this.options)[options.model]?.model];\n\t\t}\n\t\tif(!options.model || model[1] == null) {\n\t\t\tif(typeof this.options.model == 'object') model = this.options.model;\n\t\t\telse model = [this.options.model, (<any>this.options)[this.options.model]?.model];\n\t\t}\n\t\tif(!model[0] || !model[1]) throw new Error(`Unknown LLM provider or model: ${model[0]} / ${model[1]}`);\n\t\treturn this.providers[model[0]].ask(message, {...options, model: model[1]});\n\t}\n\n\t/**\n\t * Compress chat history to reduce context size\n\t * @param {LLMMessage[]} history Chatlog that will be compressed\n\t * @param max Trigger compression once context is larger than max\n\t * @param min Summarize until context size is less than min\n\t * @param {LLMRequest} options LLM options\n\t * @returns {Promise<LLMMessage[]>} New chat history will summary at index 0\n\t */\n\tasync compress(history: LLMMessage[], max: number, min: number, options?: LLMRequest): Promise<LLMMessage[]> {\n\t\tif(this.estimateTokens(history) < max) return history;\n\t\tlet keep = 0, tokens = 0;\n\t\tfor(let m of history.toReversed()) {\n\t\t\ttokens += this.estimateTokens(m.content);\n\t\t\tif(tokens < min) keep++;\n\t\t\telse break;\n\t\t}\n\t\tif(history.length <= keep) return history;\n\t\tconst recent = keep == 0 ? [] : history.slice(-keep),\n\t\t\tprocess = (keep == 0 ? history : history.slice(0, -keep)).filter(h => h.role === 'assistant' || h.role === 'user');\n\t\tconst summary = await this.summarize(process.map(m => `${m.role}: ${m.content}`).join('\\n\\n'), 250, options);\n\t\treturn [{role: 'assistant', content: `Conversation Summary: ${summary}`}, ...recent];\n\t}\n\n\t/**\n\t * Estimate variable as tokens\n\t * @param history Object to size\n\t * @returns {number} Rough token count\n\t */\n\testimateTokens(history: any): number {\n\t\tconst text = JSON.stringify(history);\n\t\treturn Math.ceil((text.length / 4) * 1.2);\n\t}\n\n\t/**\n\t * Ask a question with JSON response\n\t * @param {string} message Question\n\t * @param {LLMRequest} options Configuration options and chat history\n\t * @returns {Promise<{} | {} | RegExpExecArray | null>}\n\t */\n\tasync json(message: string, options?: LLMRequest) {\n\t\tlet resp = await this.ask(message, {\n\t\t\tsystem: 'Respond using a JSON blob',\n\t\t\t...options\n\t\t});\n\t\tif(!resp?.[0]?.content) return {};\n\t\treturn JSONAttemptParse(new RegExp('\\{[\\s\\S]*\\}').exec(resp[0].content), {});\n\t}\n\n\t/**\n\t * Create a summary of some text\n\t * @param {string} text Text to summarize\n\t * @param {number} tokens Max number of tokens\n\t * @param options LLM request options\n\t * @returns {Promise<string>} Summary\n\t */\n\tsummarize(text: string, tokens: number, options?: LLMRequest): Promise<string | null> {\n\t\treturn this.ask(text, {system: `Generate a brief summary <= ${tokens} tokens. Output nothing else`, temperature: 0.3, ...options})\n\t\t\t.then(history => <string>history.pop()?.content || null);\n\t}\n}\n","import {createWorker} from 'tesseract.js';\nimport {LLM, LLMOptions} from './llm';\nimport fs from 'node:fs/promises';\nimport Path from 'node:path';\nimport * as tf from '@tensorflow/tfjs';\nimport {spawn} from 'node:child_process';\n\nexport type AiOptions = LLMOptions & {\n\twhisper?: {\n\t\t/** Whisper binary location */\n\t\tbinary: string;\n\t\t/** Model: `ggml-base.en.bin` */\n\t\tmodel: string;\n\t\t/** Path to models */\n\t\tpath: string;\n\t}\n}\n\nexport class Ai {\n\tprivate downloads: {[key: string]: Promise<string>} = {};\n\tprivate whisperModel!: string;\n\n\t/** Large Language Models */\n\tllm!: LLM;\n\n\tconstructor(public readonly options: AiOptions) {\n\t\tthis.llm = new LLM(this, options);\n\t\tif(this.options.whisper?.binary) {\n\t\t\tthis.whisperModel = this.options.whisper?.model.endsWith('.bin') ? this.options.whisper?.model : this.options.whisper?.model + '.bin';\n\t\t\tthis.downloadAsrModel();\n\t\t}\n\t}\n\n\t/**\n\t * Convert audio to text using Auditory Speech Recognition\n\t * @param {string} path Path to audio\n\t * @param model Whisper model\n\t * @returns {Promise<any>} Extracted text\n\t */\n\tasr(path: string, model: string = this.whisperModel): {abort: () => void, response: Promise<string | null>} {\n\t\tif(!this.options.whisper?.binary) throw new Error('Whisper not configured');\n\t\tlet abort: any = () => {};\n\t\tconst response = new Promise<string | null>((resolve, reject) => {\n\t\t\tthis.downloadAsrModel(model).then(m => {\n\t\t\t\tlet output = '';\n\t\t\t\tconst proc = spawn(<string>this.options.whisper?.binary, ['-nt', '-np', '-m', m, '-f', path], {stdio: ['ignore', 'pipe', 'ignore']});\n\t\t\t\tabort = () => proc.kill('SIGTERM');\n\t\t\t\tproc.on('error', (err: Error) => reject(err));\n\t\t\t\tproc.stdout.on('data', (data: Buffer) => output += data.toString());\n\t\t\t\tproc.on('close', (code: number) => {\n\t\t\t\t\tif(code === 0) resolve(output.trim() || null);\n\t\t\t\t\telse reject(new Error(`Exit code ${code}`));\n\t\t\t\t});\n\t\t\t});\n\t\t});\n\t\treturn {response, abort};\n\t}\n\n\t/**\n\t * Downloads the specified Whisper model if it is not already present locally.\n\t *\n\t * @param {string} model Whisper model that will be downloaded\n\t * @return {Promise<string>} Absolute path to model file, resolves once downloaded\n\t */\n\tasync downloadAsrModel(model: string = this.whisperModel): Promise<string> {\n\t\tif(!this.options.whisper?.binary) throw new Error('Whisper not configured');\n\t\tif(!model.endsWith('.bin')) model += '.bin';\n\t\tconst p = Path.join(this.options.whisper.path, model);\n\t\tif(await fs.stat(p).then(() => true).catch(() => false)) return p;\n\t\tif(!!this.downloads[model]) return this.downloads[model];\n\t\tthis.downloads[model] = fetch(`https://huggingface.co/ggerganov/whisper.cpp/resolve/main/${model}`)\n\t\t\t.then(resp => resp.arrayBuffer())\n\t\t\t.then(arr => Buffer.from(arr)).then(async buffer => {\n\t\t\t\tawait fs.writeFile(p, buffer);\n\t\t\t\tdelete this.downloads[model];\n\t\t\t\treturn p;\n\t\t\t});\n\t\treturn this.downloads[model];\n\t}\n\n\t/**\n\t * Convert image to text using Optical Character Recognition\n\t * @param {string} path Path to image\n\t * @returns {{abort: Function, response: Promise<string | null>}} Abort function & Promise of extracted text\n\t */\n\tocr(path: string): {abort: () => void, response: Promise<string | null>} {\n\t\tlet worker: any;\n\t\treturn {\n\t\t\tabort: () => { worker?.terminate(); },\n\t\t\tresponse: new Promise(async res => {\n\t\t\t\tworker = await createWorker('eng');\n\t\t\t\tconst {data} = await worker.recognize(path);\n\t\t\t\tawait worker.terminate();\n\t\t\t\tres(data.text.trim() || null);\n\t\t\t})\n\t\t}\n\t}\n\n\t/**\n\t * Compare the difference between two strings using tensor math\n\t * @param target Text that will checked\n\t * @param {string} searchTerms Multiple search terms to check against target\n\t * @returns {{avg: number, max: number, similarities: number[]}} Similarity values 0-1: 0 = unique, 1 = identical\n\t */\n\tsemanticSimilarity(target: string, ...searchTerms: string[]) {\n\t\tif(searchTerms.length < 2) throw new Error('Requires at least 2 strings to compare');\n\n\t\tconst vector = (text: string, dimensions: number = 10): number[] => {\n\t\t\treturn text.toLowerCase().split('').map((char, index) =>\n\t\t\t\t(char.charCodeAt(0) * (index + 1)) % dimensions / dimensions).slice(0, dimensions);\n\t\t}\n\n\t\tconst cosineSimilarity = (v1: number[], v2: number[]): number => {\n\t\t\tif (v1.length !== v2.length) throw new Error('Vectors must be same length');\n\t\t\tconst tensor1 = tf.tensor1d(v1), tensor2 = tf.tensor1d(v2)\n\t\t\tconst dotProduct = tf.dot(tensor1, tensor2)\n\t\t\tconst magnitude1 = tf.norm(tensor1)\n\t\t\tconst magnitude2 = tf.norm(tensor2)\n\t\t\tif(magnitude1.dataSync()[0] === 0 || magnitude2.dataSync()[0] === 0) return 0\n\t\t\treturn dotProduct.dataSync()[0] / (magnitude1.dataSync()[0] * magnitude2.dataSync()[0])\n\t\t}\n\n\t\tconst v = vector(target);\n\t\tconst similarities = searchTerms.map(t => vector(t)).map(refVector => cosineSimilarity(v, refVector))\n\t\treturn {avg: similarities.reduce((acc, s) => acc + s, 0) / similarities.length, max: Math.max(...similarities), similarities}\n\t}\n}\n","import {$, $Sync} from '@ztimson/node-utils';\nimport {ASet, consoleInterceptor, Http, fn as Fn} from '@ztimson/utils';\nimport {Ai} from './ai.ts';\n\nexport type AiToolArg = {[key: string]: {\n\t/** Argument type */\n\ttype: 'array' | 'boolean' | 'number' | 'object' | 'string',\n\t/** Argument description */\n\tdescription: string,\n\t/** Required argument */\n\trequired?: boolean;\n\t/** Default value */\n\tdefault?: any,\n\t/** Options */\n\tenum?: string[],\n\t/** Minimum value or length */\n\tmin?: number,\n\t/** Maximum value or length */\n\tmax?: number,\n\t/** Match pattern */\n\tpattern?: string,\n\t/** Child arguments */\n\titems?: {[key: string]: AiToolArg}\n}}\n\nexport type AiTool = {\n\t/** Tool ID / Name - Must be snail_case */\n\tname: string,\n\t/** Tool description / prompt */\n\tdescription: string,\n\t/** Tool arguments */\n\targs?: AiToolArg,\n\t/** Callback function */\n\tfn: (args: any, ai: Ai) => any | Promise<any>,\n};\n\nexport const CliTool: AiTool = {\n\tname: 'cli',\n\tdescription: 'Use the command line interface, returns any output',\n\targs: {command: {type: 'string', description: 'Command to run', required: true}},\n\tfn: (args: {command: string}) => $`${args.command}`\n}\n\nexport const DateTimeTool: AiTool = {\n\tname: 'get_datetime',\n\tdescription: 'Get current date and time',\n\targs: {},\n\tfn: async () => new Date().toISOString()\n}\n\nexport const ExecTool: AiTool = {\n\tname: 'exec',\n\tdescription: 'Run code/scripts',\n\targs: {\n\t\tlanguage: {type: 'string', description: 'Execution language', enum: ['cli', 'node', 'python'], required: true},\n\t\tcode: {type: 'string', description: 'Code to execute', required: true}\n\t},\n\tfn: async (args, ai) => {\n\t\ttry {\n\t\t\tswitch(args.type) {\n\t\t\t\tcase 'bash':\n\t\t\t\t\treturn await CliTool.fn({command: args.code}, ai);\n\t\t\t\tcase 'node':\n\t\t\t\t\treturn await JSTool.fn({code: args.code}, ai);\n\t\t\t\tcase 'python': {\n\t\t\t\t\treturn await PythonTool.fn({code: args.code}, ai);\n\t\t\t\t}\n\t\t\t}\n\t\t} catch(err: any) {\n\t\t\treturn {error: err?.message || err.toString()};\n\t\t}\n\t}\n}\n\nexport const FetchTool: AiTool = {\n\tname: 'fetch',\n\tdescription: 'Make HTTP request to URL',\n\targs: {\n\t\turl: {type: 'string', description: 'URL to fetch', required: true},\n\t\tmethod: {type: 'string', description: 'HTTP method to use', enum: ['GET', 'POST', 'PUT', 'DELETE'], default: 'GET'},\n\t\theaders: {type: 'object', description: 'HTTP headers to send', default: {}},\n\t\tbody: {type: 'object', description: 'HTTP body to send'},\n\t},\n\tfn: (args: {\n\t\turl: string;\n\t\tmethod: 'GET' | 'POST' | 'PUT' | 'DELETE';\n\t\theaders: {[key: string]: string};\n\t\tbody: any;\n\t}) => new Http({url: args.url, headers: args.headers}).request({method: args.method || 'GET', body: args.body})\n}\n\nexport const JSTool: AiTool = {\n\tname: 'exec_javascript',\n\tdescription: 'Execute commonjs javascript',\n\targs: {\n\t\tcode: {type: 'string', description: 'CommonJS javascript', required: true}\n\t},\n\tfn: async (args: {code: string}) => {\n\t\tconst console = consoleInterceptor(null);\n\t\tconst resp = await Fn<any>({console}, args.code, true).catch((err: any) => console.output.error.push(err));\n\t\treturn {...console.output, return: resp, stdout: undefined, stderr: undefined};\n\t}\n}\n\nexport const PythonTool: AiTool = {\n\tname: 'exec_javascript',\n\tdescription: 'Execute commonjs javascript',\n\targs: {\n\t\tcode: {type: 'string', description: 'CommonJS javascript', required: true}\n\t},\n\tfn: async (args: {code: string}) => ({result: $Sync`python -c \"${args.code}\"`})\n}\n\nexport const SearchTool: AiTool = {\n\tname: 'search',\n\tdescription: 'Use a search engine to find relevant URLs, should be changed with fetch to scrape sources',\n\targs: {\n\t\tquery: {type: 'string', description: 'Search string', required: true},\n\t\tlength: {type: 'string', description: 'Number of results to return', default: 5},\n\t},\n\tfn: async (args: {\n\t\tquery: string;\n\t\tlength: number;\n\t}) => {\n\t\tconst html = await fetch(`https://html.duckduckgo.com/html/?q=${encodeURIComponent(args.query)}`, {\n\t\t\theaders: {\"User-Agent\": \"Mozilla/5.0 (Windows NT 10.0; Win64; x64)\", \"Accept-Language\": \"en-US,en;q=0.9\"}\n\t\t}).then(resp => resp.text());\n\t\tlet match, regex = /<a .*?href=\"(.+?)\".+?<\\/a>/g;\n\t\tconst results = new ASet<string>();\n\t\twhile((match = regex.exec(html)) !== null) {\n\t\t\tlet url = /uddg=(.+)&amp?/.exec(decodeURIComponent(match[1]))?.[1];\n\t\t\tif(url) url = decodeURIComponent(url);\n\t\t\tif(url) results.add(url);\n\t\t\tif(results.size >= (args.length || 5)) break;\n\t\t}\n\t\treturn results;\n\t}\n}\n"],"names":["LLMProvider","Anthropic","ai","apiToken","model","anthropic","history","i","orgI","c","h","message","options","controller","response","res","rej","requestParams","t","objectMap","key","value","resp","chunk","text","last","JSONAttemptParse","toolCalls","results","toolCall","tool","findByProp","result","JSONSanitize","err","Ollama","host","ollama","error","system","args","OpenAi","openAI","tools","tc","record","h2","LLM","max","min","keep","tokens","m","recent","process","Ai","path","abort","resolve","reject","output","proc","spawn","data","code","p","Path","fs","arr","buffer","worker","createWorker","target","searchTerms","vector","dimensions","char","index","cosineSimilarity","v1","v2","tensor1","tf","tensor2","dotProduct","magnitude1","magnitude2","v","similarities","refVector","acc","s","CliTool","$","DateTimeTool","ExecTool","JSTool","PythonTool","FetchTool","Http","console","consoleInterceptor","Fn","$Sync","SearchTool","html","match","regex","ASet","url"],"mappings":"6gCAIO,MAAeA,CAAY,CAElC,CCAO,MAAMC,UAAkBD,CAAY,CAG1C,YAA4BE,EAAwBC,EAAyBC,EAAe,CAC3F,MAAA,EAD2B,KAAA,GAAAF,EAAwB,KAAA,SAAAC,EAAyB,KAAA,MAAAC,EAE5E,KAAK,OAAS,IAAIC,EAAAA,UAAU,CAAC,OAAQF,EAAS,CAC/C,CALA,OAOQ,WAAWG,EAA8B,CAChD,QAAQC,EAAI,EAAGA,EAAID,EAAQ,OAAQC,IAAK,CACvC,MAAMC,EAAOD,EACV,OAAOD,EAAQE,CAAI,EAAE,SAAW,WAC/BF,EAAQE,CAAI,EAAE,MAAQ,YACxBF,EAAQE,CAAI,EAAE,QAAQ,OAAQC,GAAWA,EAAE,MAAO,UAAU,EAAE,QAASA,GAAW,CACjFF,IACAD,EAAQ,OAAOC,EAAG,EAAG,CAAC,KAAM,OAAQ,GAAIE,EAAE,GAAI,KAAMA,EAAE,KAAM,KAAMA,EAAE,MAAM,CAC3E,CAAC,EACQH,EAAQE,CAAI,EAAE,MAAQ,QAC/BF,EAAQE,CAAI,EAAE,QAAQ,OAAQC,GAAWA,EAAE,MAAO,aAAa,EAAE,QAASA,GAAW,CACpF,MAAMC,EAAIJ,EAAQ,KAAMI,GAAWA,EAAE,IAAMD,EAAE,WAAW,EACxDC,EAAED,EAAE,SAAW,QAAU,SAAS,EAAIA,EAAE,OACzC,CAAC,EAEFH,EAAQE,CAAI,EAAE,QAAUF,EAAQE,CAAI,EAAE,QAAQ,OAAQC,GAAWA,EAAE,MAAQ,MAAM,EAAE,IAAKA,GAAWA,EAAE,IAAI,EAAE,KAAK;AAAA;AAAA,CAAM,EAExH,CACA,OAAOH,EAAQ,OAAOI,GAAK,CAAC,CAACA,EAAE,OAAO,CACvC,CAEQ,aAAaJ,EAA8B,CAClD,QAAQC,EAAI,EAAGA,EAAID,EAAQ,OAAQC,IAClC,GAAGD,EAAQC,CAAC,EAAE,MAAQ,OAAQ,CAC7B,MAAMG,EAASJ,EAAQC,CAAC,EACxBD,EAAQ,OAAOC,EAAG,EACjB,CAAC,KAAM,YAAa,QAAS,CAAC,CAAC,KAAM,WAAY,GAAIG,EAAE,GAAI,KAAMA,EAAE,KAAM,MAAOA,EAAE,IAAA,CAAK,CAAA,EACvF,CAAC,KAAM,OAAQ,QAAS,CAAC,CAAC,KAAM,cAAe,YAAaA,EAAE,GAAI,SAAU,CAAC,CAACA,EAAE,MAAO,QAAUA,EAAE,OAASA,EAAE,QAAQ,CAAA,CAAC,EAExHH,GACD,CAED,OAAOD,CACR,CAEA,IAAIK,EAAiBC,EAAsB,GAAoC,CAC9E,MAAMC,EAAa,IAAI,gBACjBC,EAAW,IAAI,QAAa,MAAOC,EAAKC,IAAQ,CACrD,IAAIV,EAAU,KAAK,aAAa,CAAC,GAAGM,EAAQ,SAAW,CAAA,EAAI,CAAC,KAAM,OAAQ,QAASD,CAAA,CAAQ,CAAC,EACzFC,EAAQ,WAAUN,EAAU,MAAM,KAAK,GAAG,IAAI,SAAcA,EAASM,EAAQ,SAAS,IAAKA,EAAQ,SAAS,IAAKA,CAAO,GAC3H,MAAMK,EAAqB,CAC1B,MAAOL,EAAQ,OAAS,KAAK,MAC7B,WAAYA,EAAQ,YAAc,KAAK,GAAG,QAAQ,YAAc,KAChE,OAAQA,EAAQ,QAAU,KAAK,GAAG,QAAQ,QAAU,GACpD,YAAaA,EAAQ,aAAe,KAAK,GAAG,QAAQ,aAAe,GACnE,OAAQA,EAAQ,OAAS,KAAK,GAAG,QAAQ,OAAS,CAAA,GAAI,IAAIM,IAAM,CAC/D,KAAMA,EAAE,KACR,YAAaA,EAAE,YACf,aAAc,CACb,KAAM,SACN,WAAYA,EAAE,KAAOC,EAAAA,UAAUD,EAAE,KAAM,CAACE,EAAKC,KAAW,CAAC,GAAGA,EAAO,SAAU,MAAA,EAAW,EAAI,CAAA,EAC5F,SAAUH,EAAE,KAAO,OAAO,QAAQA,EAAE,IAAI,EAAE,OAAOA,GAAKA,EAAE,CAAC,EAAE,QAAQ,EAAE,IAAIA,GAAKA,EAAE,CAAC,CAAC,EAAI,CAAA,CAAC,EAExF,GAAI,MAAA,EACH,EACF,SAAUZ,EACV,OAAQ,CAAC,CAACM,EAAQ,MAAA,EAInB,IAAIU,EACJ,EAAG,CAIF,GAHAA,EAAO,MAAM,KAAK,OAAO,SAAS,OAAOL,CAAa,EAGnDL,EAAQ,OAAQ,CAClBU,EAAK,QAAU,CAAA,EACf,gBAAiBC,KAASD,EAAM,CAC/B,GAAGT,EAAW,OAAO,QAAS,MAC9B,GAAGU,EAAM,OAAS,sBACdA,EAAM,cAAc,OAAS,OAC/BD,EAAK,QAAQ,KAAK,CAAC,KAAM,OAAQ,KAAM,GAAG,EACjCC,EAAM,cAAc,OAAS,YACtCD,EAAK,QAAQ,KAAK,CAAC,KAAM,WAAY,GAAIC,EAAM,cAAc,GAAI,KAAMA,EAAM,cAAc,KAAM,MAAY,GAAG,UAExGA,EAAM,OAAS,sBACxB,GAAGA,EAAM,MAAM,OAAS,aAAc,CACrC,MAAMC,EAAOD,EAAM,MAAM,KACzBD,EAAK,QAAQ,GAAG,EAAE,EAAE,MAAQE,EAC5BZ,EAAQ,OAAO,CAAC,KAAAY,EAAK,CACtB,MAAUD,EAAM,MAAM,OAAS,qBAC9BD,EAAK,QAAQ,GAAG,EAAE,EAAE,OAASC,EAAM,MAAM,sBAEjCA,EAAM,OAAS,qBAAsB,CAC9C,MAAME,EAAOH,EAAK,QAAQ,GAAG,EAAE,EAC5BG,EAAK,OAAS,OAAMA,EAAK,MAAQA,EAAK,MAAQC,EAAAA,iBAAiBD,EAAK,MAAO,CAAA,CAAE,EAAI,CAAA,EACrF,SAAUF,EAAM,OAAS,eACxB,KAEF,CACD,CAGA,MAAMI,EAAYL,EAAK,QAAQ,OAAQb,GAAWA,EAAE,OAAS,UAAU,EACvE,GAAGkB,EAAU,QAAU,CAACd,EAAW,OAAO,QAAS,CAClDP,EAAQ,KAAK,CAAC,KAAM,YAAa,QAASgB,EAAK,QAAQ,EACvD,MAAMM,EAAU,MAAM,QAAQ,IAAID,EAAU,IAAI,MAAOE,GAAkB,CACxE,MAAMC,EAAOlB,EAAQ,OAAO,KAAKmB,EAAAA,WAAW,OAAQF,EAAS,IAAI,CAAC,EAClE,GAAG,CAACC,EAAM,MAAO,CAAC,YAAaD,EAAS,GAAI,SAAU,GAAM,QAAS,gBAAA,EACrE,GAAI,CACH,MAAMG,EAAS,MAAMF,EAAK,GAAGD,EAAS,MAAO,KAAK,EAAE,EACpD,MAAO,CAAC,KAAM,cAAe,YAAaA,EAAS,GAAI,QAASI,eAAaD,CAAM,CAAA,CACpF,OAASE,EAAU,CAClB,MAAO,CAAC,KAAM,cAAe,YAAaL,EAAS,GAAI,SAAU,GAAM,QAASK,GAAK,SAAWA,GAAK,SAAA,GAAc,SAAA,CACpH,CACD,CAAC,CAAC,EACF5B,EAAQ,KAAK,CAAC,KAAM,OAAQ,QAASsB,EAAQ,EAC7CX,EAAc,SAAWX,CAC1B,CACD,OAAS,CAACO,EAAW,OAAO,SAAWS,EAAK,QAAQ,KAAMb,GAAWA,EAAE,OAAS,UAAU,GACvFG,EAAQ,QAAQA,EAAQ,OAAO,CAAC,KAAM,GAAK,EAC9CG,EAAI,KAAK,WAAW,CAAC,GAAGT,EAAS,CAChC,KAAM,YACN,QAASgB,EAAK,QAAQ,OAAQb,GAAWA,EAAE,MAAQ,MAAM,EAAE,IAAKA,GAAWA,EAAE,IAAI,EAAE,KAAK;AAAA;AAAA,CAAM,CAAA,CAC9F,CAAC,CAAC,CACJ,CAAC,EACD,OAAO,OAAO,OAAOK,EAAU,CAAC,MAAO,IAAMD,EAAW,MAAA,EAAQ,CACjE,CACD,CC9HO,MAAMsB,UAAenC,CAAY,CAGvC,YAA4BE,EAAekC,EAAqBhC,EAAe,CAC9E,MAAA,EAD2B,KAAA,GAAAF,EAAe,KAAA,KAAAkC,EAAqB,KAAA,MAAAhC,EAE/D,KAAK,OAAS,IAAIiC,SAAO,CAAC,KAAAD,EAAK,CAChC,CALA,OAOQ,WAAW9B,EAA8B,CAChD,QAAQC,EAAI,EAAGA,EAAID,EAAQ,OAAQC,IAClC,GAAGD,EAAQC,CAAC,EAAE,MAAQ,aAAeD,EAAQC,CAAC,EAAE,WAC5CD,EAAQC,CAAC,EAAE,QAAS,OAAOD,EAAQC,CAAC,EAAE,YAExCD,EAAQ,OAAOC,EAAG,CAAC,EACnBA,aAEQD,EAAQC,CAAC,EAAE,MAAQ,OAAQ,CACpC,MAAM+B,EAAQhC,EAAQC,CAAC,EAAE,QAAQ,WAAW,WAAW,EACvDD,EAAQC,CAAC,EAAI,CAAC,KAAM,OAAQ,KAAMD,EAAQC,CAAC,EAAE,UAAW,KAAMD,EAAQC,CAAC,EAAE,KAAM,CAAC+B,EAAQ,QAAU,SAAS,EAAGhC,EAAQC,CAAC,EAAE,OAAA,CAC1H,CAED,OAAOD,CACR,CAEQ,aAAaA,EAA8B,CAClD,OAAOA,EAAQ,IAAKI,GAChBA,EAAE,MAAQ,OAAeA,EACrB,CAAC,KAAM,OAAQ,UAAWA,EAAE,KAAM,QAASA,EAAE,OAASA,EAAE,OAAA,CAC/D,CACF,CAEA,IAAIC,EAAiBC,EAAsB,GAAoC,CAC9E,MAAMC,EAAa,IAAI,gBACjBC,EAAW,IAAI,QAAa,MAAOC,EAAKC,IAAQ,CACrD,IAAIuB,EAAS3B,EAAQ,QAAU,KAAK,GAAG,QAAQ,OAC3CN,EAAU,KAAK,aAAa,CAAC,GAAGM,EAAQ,SAAW,CAAA,EAAI,CAAC,KAAM,OAAQ,QAASD,CAAA,CAAQ,CAAC,EACzFL,EAAQ,CAAC,EAAE,MAAQ,WACjBiC,IACS,MAAA,EADDA,EAASjC,EAAQ,MAAA,GAG3BM,EAAQ,WAAUN,EAAU,MAAM,KAAK,GAAG,IAAI,SAAcA,EAASM,EAAQ,SAAS,IAAKA,EAAQ,SAAS,GAAG,GAC/GA,EAAQ,QAAQN,EAAQ,QAAQ,CAAC,KAAM,SAAU,QAASiC,EAAO,EAEpE,MAAMtB,EAAqB,CAC1B,MAAOL,EAAQ,OAAS,KAAK,MAC7B,SAAUN,EACV,OAAQ,CAAC,CAACM,EAAQ,OAClB,OAAQC,EAAW,OACnB,QAAS,CACR,YAAaD,EAAQ,aAAe,KAAK,GAAG,QAAQ,aAAe,GACnE,YAAaA,EAAQ,YAAc,KAAK,GAAG,QAAQ,YAAc,IAAA,EAElE,OAAQA,EAAQ,OAAS,KAAK,GAAG,QAAQ,OAAS,CAAA,GAAI,IAAIM,IAAM,CAC/D,KAAM,WACN,SAAU,CACT,KAAMA,EAAE,KACR,YAAaA,EAAE,YACf,WAAY,CACX,KAAM,SACN,WAAYA,EAAE,KAAOC,EAAAA,UAAUD,EAAE,KAAM,CAACE,EAAKC,KAAW,CAAC,GAAGA,EAAO,SAAU,MAAA,EAAW,EAAI,CAAA,EAC5F,SAAUH,EAAE,KAAO,OAAO,QAAQA,EAAE,IAAI,EAAE,OAAOA,GAAKA,EAAE,CAAC,EAAE,QAAQ,EAAE,IAAIA,GAAKA,EAAE,CAAC,CAAC,EAAI,CAAA,CAAC,CACxF,CACD,EACC,CAAA,EAIH,IAAII,EACJ,EAAG,CAEF,GADAA,EAAO,MAAM,KAAK,OAAO,KAAKL,CAAa,EACxCL,EAAQ,OAAQ,CAClBU,EAAK,QAAU,CAAC,KAAM,YAAa,QAAS,GAAI,WAAY,EAAC,EAC7D,gBAAiBC,KAASD,EAOzB,GANGT,EAAW,OAAO,UAClBU,EAAM,SAAS,UACjBD,EAAK,QAAQ,SAAWC,EAAM,QAAQ,QACtCX,EAAQ,OAAO,CAAC,KAAMW,EAAM,QAAQ,QAAQ,GAE1CA,EAAM,SAAS,eAAiB,QAAQ,WAAaA,EAAM,QAAQ,YACnEA,EAAM,MAAM,KAEjB,CAGA,GAAGD,EAAK,SAAS,YAAY,QAAU,CAACT,EAAW,OAAO,QAAS,CAClEP,EAAQ,KAAKgB,EAAK,OAAO,EACzB,MAAMM,EAAU,MAAM,QAAQ,IAAIN,EAAK,QAAQ,WAAW,IAAI,MAAOO,GAAkB,CACtF,MAAMC,GAAQlB,EAAQ,OAAS,KAAK,GAAG,QAAQ,QAAQ,KAAKmB,EAAAA,WAAW,OAAQF,EAAS,SAAS,IAAI,CAAC,EACtG,GAAG,CAACC,EAAM,MAAO,CAAC,KAAM,OAAQ,UAAWD,EAAS,SAAS,KAAM,QAAS,6BAAA,EAC5E,MAAMW,EAAO,OAAOX,EAAS,SAAS,WAAc,SAAWH,EAAAA,iBAAiBG,EAAS,SAAS,UAAW,CAAA,CAAE,EAAIA,EAAS,SAAS,UACrI,GAAI,CACH,MAAMG,EAAS,MAAMF,EAAK,GAAGU,EAAM,KAAK,EAAE,EAC1C,MAAO,CAAC,KAAM,OAAQ,UAAWX,EAAS,SAAS,KAAM,KAAAW,EAAM,QAASP,EAAAA,aAAaD,CAAM,CAAA,CAC5F,OAASE,EAAU,CAClB,MAAO,CAAC,KAAM,OAAQ,UAAWL,EAAS,SAAS,KAAM,KAAAW,EAAM,QAASP,EAAAA,aAAa,CAAC,MAAOC,GAAK,SAAWA,GAAK,YAAc,SAAA,CAAU,CAAA,CAC3I,CACD,CAAC,CAAC,EACF5B,EAAQ,KAAK,GAAGsB,CAAO,EACvBX,EAAc,SAAWX,CAC1B,CACD,OAAS,CAACO,EAAW,OAAO,SAAWS,EAAK,SAAS,YAAY,QAC9DV,EAAQ,QAAQA,EAAQ,OAAO,CAAC,KAAM,GAAK,EAC9CG,EAAI,KAAK,WAAW,CAAC,GAAGT,EAAS,CAAC,KAAM,YAAa,QAASgB,EAAK,SAAS,OAAA,CAAQ,CAAC,CAAC,CACvF,CAAC,EACD,OAAO,OAAO,OAAOR,EAAU,CAAC,MAAO,IAAMD,EAAW,MAAA,EAAQ,CACjE,CACD,CC1GO,MAAM4B,UAAezC,CAAY,CAGvC,YAA4BE,EAAwBC,EAAyBC,EAAe,CAC3F,MAAA,EAD2B,KAAA,GAAAF,EAAwB,KAAA,SAAAC,EAAyB,KAAA,MAAAC,EAE5E,KAAK,OAAS,IAAIsC,EAAAA,OAAO,CAAC,OAAQvC,EAAS,CAC5C,CALA,OAOQ,WAAWG,EAA8B,CAChD,QAAQC,EAAI,EAAGA,EAAID,EAAQ,OAAQC,IAAK,CACvC,MAAMG,EAAIJ,EAAQC,CAAC,EACnB,GAAGG,EAAE,OAAS,aAAeA,EAAE,WAAY,CAC1C,MAAMiC,EAAQjC,EAAE,WAAW,IAAKkC,IAAa,CAC5C,KAAM,OACN,GAAIA,EAAG,GACP,KAAMA,EAAG,SAAS,KAClB,KAAMlB,EAAAA,iBAAiBkB,EAAG,SAAS,UAAW,CAAA,CAAE,CAAA,EAC/C,EACFtC,EAAQ,OAAOC,EAAG,EAAG,GAAGoC,CAAK,EAC7BpC,GAAKoC,EAAM,OAAS,CACrB,SAAUjC,EAAE,OAAS,QAAUA,EAAE,QAAS,CACzC,MAAMmC,EAASvC,EAAQ,QAAWI,EAAE,cAAgBoC,EAAG,EAAE,EACtDD,IACCnC,EAAE,QAAQ,SAAS,UAAU,EAAGmC,EAAO,MAAQnC,EAAE,QAC/CmC,EAAO,QAAUnC,EAAE,SAEzBJ,EAAQ,OAAOC,EAAG,CAAC,EACnBA,GACD,CAED,CACA,OAAOD,CACR,CAEQ,aAAaA,EAA8B,CAClD,OAAOA,EAAQ,OAAO,CAAC0B,EAAQtB,KAC3BA,EAAE,OAAS,OACbsB,EAAO,KAAK,CACX,KAAM,YACN,QAAS,KACT,WAAY,CAAC,CAAE,GAAItB,EAAE,GAAI,KAAM,WAAY,SAAU,CAAE,KAAMA,EAAE,KAAM,UAAW,KAAK,UAAUA,EAAE,IAAI,CAAA,EAAK,EAC1G,QAAS,KACT,YAAa,CAAA,CAAC,EACZ,CACF,KAAM,OACN,aAAcA,EAAE,GAChB,QAASA,EAAE,OAASA,EAAE,OAAA,CACtB,EAEDsB,EAAO,KAAKtB,CAAC,EAEPsB,GACL,CAAA,CAAW,CACf,CAEA,IAAIrB,EAAiBC,EAAsB,GAAoC,CAC9E,MAAMC,EAAa,IAAI,gBACjBC,EAAW,IAAI,QAAa,MAAOC,EAAKC,IAAQ,CACrD,IAAIV,EAAU,KAAK,aAAa,CAAC,GAAGM,EAAQ,SAAW,CAAA,EAAI,CAAC,KAAM,OAAQ,QAASD,CAAA,CAAQ,CAAC,EACzFC,EAAQ,WAAUN,EAAU,MAAM,KAAK,GAAG,IAAI,SAAcA,EAASM,EAAQ,SAAS,IAAKA,EAAQ,SAAS,IAAKA,CAAO,GAE3H,MAAMK,EAAqB,CAC1B,MAAOL,EAAQ,OAAS,KAAK,MAC7B,SAAUN,EACV,OAAQ,CAAC,CAACM,EAAQ,OAClB,WAAYA,EAAQ,YAAc,KAAK,GAAG,QAAQ,YAAc,KAChE,YAAaA,EAAQ,aAAe,KAAK,GAAG,QAAQ,aAAe,GACnE,OAAQA,EAAQ,OAAS,KAAK,GAAG,QAAQ,OAAS,CAAA,GAAI,IAAIM,IAAM,CAC/D,KAAM,WACN,SAAU,CACT,KAAMA,EAAE,KACR,YAAaA,EAAE,YACf,WAAY,CACX,KAAM,SACN,WAAYA,EAAE,KAAOC,EAAAA,UAAUD,EAAE,KAAM,CAACE,EAAKC,KAAW,CAAC,GAAGA,EAAO,SAAU,MAAA,EAAW,EAAI,CAAA,EAC5F,SAAUH,EAAE,KAAO,OAAO,QAAQA,EAAE,IAAI,EAAE,OAAOA,GAAKA,EAAE,CAAC,EAAE,QAAQ,EAAE,IAAIA,GAAKA,EAAE,CAAC,CAAC,EAAI,CAAA,CAAC,CACxF,CACD,EACC,CAAA,EAIH,IAAII,EACJ,EAAG,CAIF,GAHAA,EAAO,MAAM,KAAK,OAAO,KAAK,YAAY,OAAOL,CAAa,EAG3DL,EAAQ,OAAQ,CAClBU,EAAK,QAAU,CAAA,EACf,gBAAiBC,KAASD,EAAM,CAC/B,GAAGT,EAAW,OAAO,QAAS,MAC3BU,EAAM,QAAQ,CAAC,EAAE,MAAM,SACzBX,EAAQ,OAAO,CAAC,KAAMW,EAAM,QAAQ,CAAC,EAAE,MAAM,QAAQ,CAEvD,CACD,CAGA,MAAMI,EAAYL,EAAK,QAAQ,CAAC,EAAE,QAAQ,YAAc,CAAA,EACxD,GAAGK,EAAU,QAAU,CAACd,EAAW,OAAO,QAAS,CAClDP,EAAQ,KAAKgB,EAAK,QAAQ,CAAC,EAAE,OAAO,EACpC,MAAMM,EAAU,MAAM,QAAQ,IAAID,EAAU,IAAI,MAAOE,GAAkB,CACxE,MAAMC,EAAOlB,EAAQ,OAAO,KAAKmB,aAAW,OAAQF,EAAS,SAAS,IAAI,CAAC,EAC3E,GAAG,CAACC,EAAM,MAAO,CAAC,KAAM,OAAQ,aAAcD,EAAS,GAAI,QAAS,6BAAA,EACpE,GAAI,CACH,MAAMW,EAAOd,EAAAA,iBAAiBG,EAAS,SAAS,UAAW,CAAA,CAAE,EACvDG,EAAS,MAAMF,EAAK,GAAGU,EAAM,KAAK,EAAE,EAC1C,MAAO,CAAC,KAAM,OAAQ,aAAcX,EAAS,GAAI,QAASI,eAAaD,CAAM,CAAA,CAC9E,OAASE,EAAU,CAClB,MAAO,CAAC,KAAM,OAAQ,aAAcL,EAAS,GAAI,QAASI,EAAAA,aAAa,CAAC,MAAOC,GAAK,SAAWA,GAAK,YAAc,SAAA,CAAU,CAAA,CAC7H,CACD,CAAC,CAAC,EACF5B,EAAQ,KAAK,GAAGsB,CAAO,EACvBX,EAAc,SAAWX,CAC1B,CACD,OAAS,CAACO,EAAW,OAAO,SAAWS,EAAK,UAAU,CAAC,GAAG,SAAS,YAAY,QAE5EV,EAAQ,QAAQA,EAAQ,OAAO,CAAC,KAAM,GAAK,EAC9CG,EAAI,KAAK,WAAW,CAAC,GAAGT,EAAS,CAAC,KAAM,YAAa,QAASgB,EAAK,QAAQ,CAAC,EAAE,QAAQ,SAAW,EAAA,CAAG,CAAC,CAAC,CACvG,CAAC,EAED,OAAO,OAAO,OAAOR,EAAU,CAAC,MAAO,IAAMD,EAAW,MAAA,EAAQ,CACjE,CACD,CCnDO,MAAMkC,CAAI,CAGhB,YAA4B7C,EAAwBU,EAAqB,CAA7C,KAAA,GAAAV,EAAwB,KAAA,QAAAU,EAChDA,EAAQ,WAAW,QAAO,KAAK,UAAU,UAAY,IAAIX,EAAU,KAAK,GAAIW,EAAQ,UAAU,MAAOA,EAAQ,UAAU,KAAK,GAC5HA,EAAQ,QAAQ,OAAM,KAAK,UAAU,OAAS,IAAIuB,EAAO,KAAK,GAAIvB,EAAQ,OAAO,KAAMA,EAAQ,OAAO,KAAK,GAC3GA,EAAQ,QAAQ,QAAO,KAAK,UAAU,OAAS,IAAI6B,EAAO,KAAK,GAAI7B,EAAQ,OAAO,MAAOA,EAAQ,OAAO,KAAK,EACjH,CANQ,UAA0C,CAAA,EAclD,IAAID,EAAiBC,EAAsB,GAAoC,CAC9E,IAAIR,EAAa,CAAC,KAAM,IAAI,EAS5B,GARGQ,EAAQ,QACP,OAAOA,EAAQ,OAAS,WAAkBA,EAAQ,MAChDR,EAAQ,CAACQ,EAAQ,MAAa,KAAK,QAASA,EAAQ,KAAK,GAAG,KAAK,IAEpE,CAACA,EAAQ,OAASR,EAAM,CAAC,GAAK,QAC7B,OAAO,KAAK,QAAQ,OAAS,SAAUA,EAAQ,KAAK,QAAQ,MAC1DA,EAAQ,CAAC,KAAK,QAAQ,MAAa,KAAK,QAAS,KAAK,QAAQ,KAAK,GAAG,KAAK,GAE9E,CAACA,EAAM,CAAC,GAAK,CAACA,EAAM,CAAC,EAAG,MAAM,IAAI,MAAM,kCAAkCA,EAAM,CAAC,CAAC,MAAMA,EAAM,CAAC,CAAC,EAAE,EACrG,OAAO,KAAK,UAAUA,EAAM,CAAC,CAAC,EAAE,IAAIO,EAAS,CAAC,GAAGC,EAAS,MAAOR,EAAM,CAAC,EAAE,CAC3E,CAUA,MAAM,SAASE,EAAuB0C,EAAaC,EAAarC,EAA6C,CAC5G,GAAG,KAAK,eAAeN,CAAO,EAAI0C,EAAK,OAAO1C,EAC9C,IAAI4C,EAAO,EAAGC,EAAS,EACvB,QAAQC,KAAK9C,EAAQ,aAEpB,GADA6C,GAAU,KAAK,eAAeC,EAAE,OAAO,EACpCD,EAASF,EAAKC,QACZ,OAEN,GAAG5C,EAAQ,QAAU4C,EAAM,OAAO5C,EAClC,MAAM+C,EAASH,GAAQ,EAAI,CAAA,EAAK5C,EAAQ,MAAM,CAAC4C,CAAI,EAClDI,GAAWJ,GAAQ,EAAI5C,EAAUA,EAAQ,MAAM,EAAG,CAAC4C,CAAI,GAAG,OAAOxC,GAAKA,EAAE,OAAS,aAAeA,EAAE,OAAS,MAAM,EAElH,MAAO,CAAC,CAAC,KAAM,YAAa,QAAS,yBADrB,MAAM,KAAK,UAAU4C,EAAQ,OAAS,GAAGF,EAAE,IAAI,KAAKA,EAAE,OAAO,EAAE,EAAE,KAAK;AAAA;AAAA,CAAM,EAAG,IAAKxC,CAAO,CACtC,IAAK,GAAGyC,CAAM,CACpF,CAOA,eAAe/C,EAAsB,CACpC,MAAMkB,EAAO,KAAK,UAAUlB,CAAO,EACnC,OAAO,KAAK,KAAMkB,EAAK,OAAS,EAAK,GAAG,CACzC,CAQA,MAAM,KAAKb,EAAiBC,EAAsB,CACjD,IAAIU,EAAO,MAAM,KAAK,IAAIX,EAAS,CAClC,OAAQ,4BACR,GAAGC,CAAA,CACH,EACD,OAAIU,IAAO,CAAC,GAAG,QACRI,mBAAiB,IAAI,OAAO,SAAa,EAAE,KAAKJ,EAAK,CAAC,EAAE,OAAO,EAAG,EAAE,EAD5C,CAAA,CAEhC,CASA,UAAUE,EAAc2B,EAAgBvC,EAA8C,CACrF,OAAO,KAAK,IAAIY,EAAM,CAAC,OAAQ,+BAA+B2B,CAAM,+BAAgC,YAAa,GAAK,GAAGvC,CAAA,CAAQ,EAC/H,KAAKN,GAAmBA,EAAQ,IAAA,GAAO,SAAW,IAAI,CACzD,CACD,CCpJO,MAAMiD,CAAG,CAOf,YAA4B3C,EAAoB,CAApB,KAAA,QAAAA,EAC3B,KAAK,IAAM,IAAImC,EAAI,KAAMnC,CAAO,EAC7B,KAAK,QAAQ,SAAS,SACxB,KAAK,aAAe,KAAK,QAAQ,SAAS,MAAM,SAAS,MAAM,EAAI,KAAK,QAAQ,SAAS,MAAQ,KAAK,QAAQ,SAAS,MAAQ,OAC/H,KAAK,iBAAA,EAEP,CAZQ,UAA8C,CAAA,EAC9C,aAGR,IAgBA,IAAI4C,EAAcpD,EAAgB,KAAK,aAAqE,CAC3G,GAAG,CAAC,KAAK,QAAQ,SAAS,OAAQ,MAAM,IAAI,MAAM,wBAAwB,EAC1E,IAAIqD,EAAa,IAAM,CAAC,EAcxB,MAAO,CAAC,SAbS,IAAI,QAAuB,CAACC,EAASC,IAAW,CAChE,KAAK,iBAAiBvD,CAAK,EAAE,KAAKgD,GAAK,CACtC,IAAIQ,EAAS,GACb,MAAMC,EAAOC,EAAAA,MAAc,KAAK,QAAQ,SAAS,OAAQ,CAAC,MAAO,MAAO,KAAMV,EAAG,KAAMI,CAAI,EAAG,CAAC,MAAO,CAAC,SAAU,OAAQ,QAAQ,EAAE,EACnIC,EAAQ,IAAMI,EAAK,KAAK,SAAS,EACjCA,EAAK,GAAG,QAAU3B,GAAeyB,EAAOzB,CAAG,CAAC,EAC5C2B,EAAK,OAAO,GAAG,OAASE,GAAiBH,GAAUG,EAAK,UAAU,EAClEF,EAAK,GAAG,QAAUG,GAAiB,CAC/BA,IAAS,EAAGN,EAAQE,EAAO,KAAA,GAAU,IAAI,IAChC,IAAI,MAAM,aAAaI,CAAI,EAAE,CAAC,CAC3C,CAAC,CACF,CAAC,CACF,CAAC,EACiB,MAAAP,CAAA,CACnB,CAQA,MAAM,iBAAiBrD,EAAgB,KAAK,aAA+B,CAC1E,GAAG,CAAC,KAAK,QAAQ,SAAS,OAAQ,MAAM,IAAI,MAAM,wBAAwB,EACtEA,EAAM,SAAS,MAAM,IAAGA,GAAS,QACrC,MAAM6D,EAAIC,EAAK,KAAK,KAAK,QAAQ,QAAQ,KAAM9D,CAAK,EACpD,OAAG,MAAM+D,EAAG,KAAKF,CAAC,EAAE,KAAK,IAAM,EAAI,EAAE,MAAM,IAAM,EAAK,EAAUA,EAC3D,KAAK,UAAU7D,CAAK,EAAU,KAAK,UAAUA,CAAK,GACvD,KAAK,UAAUA,CAAK,EAAI,MAAM,6DAA6DA,CAAK,EAAE,EAChG,KAAKkB,GAAQA,EAAK,aAAa,EAC/B,KAAK8C,GAAO,OAAO,KAAKA,CAAG,CAAC,EAAE,KAAK,MAAMC,IACzC,MAAMF,EAAG,UAAUF,EAAGI,CAAM,EAC5B,OAAO,KAAK,UAAUjE,CAAK,EACpB6D,EACP,EACK,KAAK,UAAU7D,CAAK,EAC5B,CAOA,IAAIoD,EAAqE,CACxE,IAAIc,EACJ,MAAO,CACN,MAAO,IAAM,CAAEA,GAAQ,UAAA,CAAa,EACpC,SAAU,IAAI,QAAQ,MAAMvD,GAAO,CAClCuD,EAAS,MAAMC,EAAAA,aAAa,KAAK,EACjC,KAAM,CAAC,KAAAR,CAAA,EAAQ,MAAMO,EAAO,UAAUd,CAAI,EAC1C,MAAMc,EAAO,UAAA,EACbvD,EAAIgD,EAAK,KAAK,KAAA,GAAU,IAAI,CAC7B,CAAC,CAAA,CAEH,CAQA,mBAAmBS,KAAmBC,EAAuB,CAC5D,GAAGA,EAAY,OAAS,EAAG,MAAM,IAAI,MAAM,wCAAwC,EAEnF,MAAMC,EAAS,CAAClD,EAAcmD,EAAqB,KAC3CnD,EAAK,cAAc,MAAM,EAAE,EAAE,IAAI,CAACoD,EAAMC,IAC7CD,EAAK,WAAW,CAAC,GAAKC,EAAQ,GAAMF,EAAaA,CAAU,EAAE,MAAM,EAAGA,CAAU,EAG7EG,EAAmB,CAACC,EAAcC,IAAyB,CAChE,GAAID,EAAG,SAAWC,EAAG,OAAQ,MAAM,IAAI,MAAM,6BAA6B,EAC1E,MAAMC,EAAUC,EAAG,SAASH,CAAE,EAAGI,EAAUD,EAAG,SAASF,CAAE,EACnDI,EAAaF,EAAG,IAAID,EAASE,CAAO,EACpCE,EAAaH,EAAG,KAAKD,CAAO,EAC5BK,EAAaJ,EAAG,KAAKC,CAAO,EAClC,OAAGE,EAAW,WAAW,CAAC,IAAM,GAAKC,EAAW,WAAW,CAAC,IAAM,EAAU,EACrEF,EAAW,SAAA,EAAW,CAAC,GAAKC,EAAW,WAAW,CAAC,EAAIC,EAAW,SAAA,EAAW,CAAC,EACtF,EAEMC,EAAIb,EAAOF,CAAM,EACjBgB,EAAef,EAAY,IAAIvD,GAAKwD,EAAOxD,CAAC,CAAC,EAAE,IAAIuE,GAAaX,EAAiBS,EAAGE,CAAS,CAAC,EACpG,MAAO,CAAC,IAAKD,EAAa,OAAO,CAACE,EAAKC,IAAMD,EAAMC,EAAG,CAAC,EAAIH,EAAa,OAAQ,IAAK,KAAK,IAAI,GAAGA,CAAY,EAAG,aAAAA,CAAA,CACjH,CACD,CC1FO,MAAMI,EAAkB,CAC9B,KAAM,MACN,YAAa,qDACb,KAAM,CAAC,QAAS,CAAC,KAAM,SAAU,YAAa,iBAAkB,SAAU,GAAI,EAC9E,GAAKpD,GAA4BqD,EAAAA,IAAIrD,EAAK,OAAO,EAClD,EAEasD,EAAuB,CACnC,KAAM,eACN,YAAa,4BACb,KAAM,CAAA,EACN,GAAI,SAAY,IAAI,KAAA,EAAO,YAAA,CAC5B,EAEaC,EAAmB,CAC/B,KAAM,OACN,YAAa,mBACb,KAAM,CACL,SAAU,CAAC,KAAM,SAAU,YAAa,qBAAsB,KAAM,CAAC,MAAO,OAAQ,QAAQ,EAAG,SAAU,EAAA,EACzG,KAAM,CAAC,KAAM,SAAU,YAAa,kBAAmB,SAAU,EAAA,CAAI,EAEtE,GAAI,MAAOvD,EAAMtC,IAAO,CACvB,GAAI,CACH,OAAOsC,EAAK,KAAA,CACX,IAAK,OACJ,OAAO,MAAMoD,EAAQ,GAAG,CAAC,QAASpD,EAAK,IAAA,EAAOtC,CAAE,EACjD,IAAK,OACJ,OAAO,MAAM8F,EAAO,GAAG,CAAC,KAAMxD,EAAK,IAAA,EAAOtC,CAAE,EAC7C,IAAK,SACJ,OAAO,MAAM+F,EAAW,GAAG,CAAC,KAAMzD,EAAK,IAAA,EAAOtC,CAAE,CACjD,CAEF,OAAQgC,EAAU,CACjB,MAAO,CAAC,MAAOA,GAAK,SAAWA,EAAI,UAAS,CAC7C,CACD,CACD,EAEagE,EAAoB,CAChC,KAAM,QACN,YAAa,2BACb,KAAM,CACL,IAAK,CAAC,KAAM,SAAU,YAAa,eAAgB,SAAU,EAAA,EAC7D,OAAQ,CAAC,KAAM,SAAU,YAAa,qBAAsB,KAAM,CAAC,MAAO,OAAQ,MAAO,QAAQ,EAAG,QAAS,KAAA,EAC7G,QAAS,CAAC,KAAM,SAAU,YAAa,uBAAwB,QAAS,EAAC,EACzE,KAAM,CAAC,KAAM,SAAU,YAAa,mBAAA,CAAmB,EAExD,GAAK1D,GAKC,IAAI2D,EAAAA,KAAK,CAAC,IAAK3D,EAAK,IAAK,QAASA,EAAK,QAAQ,EAAE,QAAQ,CAAC,OAAQA,EAAK,QAAU,MAAO,KAAMA,EAAK,IAAA,CAAK,CAC/G,EAEawD,EAAiB,CAC7B,KAAM,kBACN,YAAa,8BACb,KAAM,CACL,KAAM,CAAC,KAAM,SAAU,YAAa,sBAAuB,SAAU,EAAA,CAAI,EAE1E,GAAI,MAAOxD,GAAyB,CACnC,MAAM4D,EAAUC,EAAAA,mBAAmB,IAAI,EACjC/E,EAAO,MAAMgF,KAAQ,CAAC,QAAAF,CAAA,EAAU5D,EAAK,KAAM,EAAI,EAAE,MAAON,GAAakE,EAAQ,OAAO,MAAM,KAAKlE,CAAG,CAAC,EACzG,MAAO,CAAC,GAAGkE,EAAQ,OAAQ,OAAQ9E,EAAM,OAAQ,OAAW,OAAQ,MAAA,CACrE,CACD,EAEa2E,EAAqB,CACjC,KAAM,kBACN,YAAa,8BACb,KAAM,CACL,KAAM,CAAC,KAAM,SAAU,YAAa,sBAAuB,SAAU,EAAA,CAAI,EAE1E,GAAI,MAAOzD,IAA0B,CAAC,OAAQ+D,EAAAA,mBAAmB/D,EAAK,IAAI,GAAA,EAC3E,EAEagE,EAAqB,CACjC,KAAM,SACN,YAAa,4FACb,KAAM,CACL,MAAO,CAAC,KAAM,SAAU,YAAa,gBAAiB,SAAU,EAAA,EAChE,OAAQ,CAAC,KAAM,SAAU,YAAa,8BAA+B,QAAS,CAAA,CAAC,EAEhF,GAAI,MAAOhE,GAGL,CACL,MAAMiE,EAAO,MAAM,MAAM,uCAAuC,mBAAmBjE,EAAK,KAAK,CAAC,GAAI,CACjG,QAAS,CAAC,aAAc,4CAA6C,kBAAmB,gBAAA,CAAgB,CACxG,EAAE,KAAKlB,GAAQA,EAAK,MAAM,EAC3B,IAAIoF,EAAOC,EAAQ,8BACnB,MAAM/E,EAAU,IAAIgF,OACpB,MAAOF,EAAQC,EAAM,KAAKF,CAAI,KAAO,MAAM,CAC1C,IAAII,EAAM,iBAAiB,KAAK,mBAAmBH,EAAM,CAAC,CAAC,CAAC,IAAI,CAAC,EAGjE,GAFGG,IAAKA,EAAM,mBAAmBA,CAAG,GACjCA,GAAKjF,EAAQ,IAAIiF,CAAG,EACpBjF,EAAQ,OAASY,EAAK,QAAU,GAAI,KACxC,CACA,OAAOZ,CACR,CACD"}
package/dist/index.mjs CHANGED
@@ -1,28 +1,29 @@
1
- import { $ as j, $Sync as T } from "@ztimson/node-utils";
2
- import { createWorker as q } from "tesseract.js";
3
- import { objectMap as b, JSONAttemptParse as w, findByProp as k, JSONSanitize as _, Http as v, consoleInterceptor as $, fn as A, ASet as P } from "@ztimson/utils";
4
- import { Anthropic as E } from "@anthropic-ai/sdk";
5
- import { Ollama as O } from "ollama";
6
- import { OpenAI as M } from "openai";
7
- import y from "node:fs/promises";
8
- import S from "node:path";
1
+ import { createWorker as S } from "tesseract.js";
2
+ import { objectMap as y, JSONAttemptParse as w, findByProp as b, JSONSanitize as _, Http as j, consoleInterceptor as T, fn as q, ASet as v } from "@ztimson/utils";
3
+ import { Anthropic as P } from "@anthropic-ai/sdk";
4
+ import { Ollama as A } from "ollama";
5
+ import { OpenAI as E } from "openai";
6
+ import x from "node:fs/promises";
7
+ import O from "node:path";
9
8
  import * as g from "@tensorflow/tfjs";
10
- class x {
9
+ import { spawn as M } from "node:child_process";
10
+ import { $ as U, $Sync as $ } from "@ztimson/node-utils";
11
+ class k {
11
12
  }
12
- class U extends x {
13
+ class L extends k {
13
14
  constructor(t, e, n) {
14
- super(), this.ai = t, this.apiToken = e, this.model = n, this.client = new E({ apiKey: e });
15
+ super(), this.ai = t, this.apiToken = e, this.model = n, this.client = new P({ apiKey: e });
15
16
  }
16
17
  client;
17
18
  toStandard(t) {
18
19
  for (let e = 0; e < t.length; e++) {
19
20
  const n = e;
20
- typeof t[n].content != "string" && (t[n].role == "assistant" ? t[n].content.filter((r) => r.type == "tool_use").forEach((r) => {
21
- e++, t.splice(e, 0, { role: "tool", id: r.id, name: r.name, args: r.input });
22
- }) : t[n].role == "user" && t[n].content.filter((r) => r.type == "tool_result").forEach((r) => {
23
- const i = t.find((u) => u.id == r.tool_use_id);
24
- i[r.is_error ? "error" : "content"] = r.content;
25
- }), t[n].content = t[n].content.filter((r) => r.type == "text").map((r) => r.text).join(`
21
+ typeof t[n].content != "string" && (t[n].role == "assistant" ? t[n].content.filter((s) => s.type == "tool_use").forEach((s) => {
22
+ e++, t.splice(e, 0, { role: "tool", id: s.id, name: s.name, args: s.input });
23
+ }) : t[n].role == "user" && t[n].content.filter((s) => s.type == "tool_result").forEach((s) => {
24
+ const l = t.find((u) => u.id == s.tool_use_id);
25
+ l[s.is_error ? "error" : "content"] = s.content;
26
+ }), t[n].content = t[n].content.filter((s) => s.type == "text").map((s) => s.text).join(`
26
27
 
27
28
  `));
28
29
  }
@@ -42,9 +43,9 @@ class U extends x {
42
43
  return t;
43
44
  }
44
45
  ask(t, e = {}) {
45
- const n = new AbortController(), r = new Promise(async (i, u) => {
46
- let c = this.fromStandard([...e.history || [], { role: "user", content: t }]);
47
- e.compress && (c = await this.ai.llm.compress(c, e.compress.max, e.compress.min, e));
46
+ const n = new AbortController(), s = new Promise(async (l, u) => {
47
+ let i = this.fromStandard([...e.history || [], { role: "user", content: t }]);
48
+ e.compress && (i = await this.ai.llm.compress(i, e.compress.max, e.compress.min, e));
48
49
  const m = {
49
50
  model: e.model || this.model,
50
51
  max_tokens: e.max_tokens || this.ai.options.max_tokens || 4096,
@@ -55,39 +56,39 @@ class U extends x {
55
56
  description: o.description,
56
57
  input_schema: {
57
58
  type: "object",
58
- properties: o.args ? b(o.args, (s, a) => ({ ...a, required: void 0 })) : {},
59
- required: o.args ? Object.entries(o.args).filter((s) => s[1].required).map((s) => s[0]) : []
59
+ properties: o.args ? y(o.args, (r, a) => ({ ...a, required: void 0 })) : {},
60
+ required: o.args ? Object.entries(o.args).filter((r) => r[1].required).map((r) => r[0]) : []
60
61
  },
61
62
  fn: void 0
62
63
  })),
63
- messages: c,
64
+ messages: i,
64
65
  stream: !!e.stream
65
66
  };
66
- let l;
67
+ let c;
67
68
  do {
68
- if (l = await this.client.messages.create(m), e.stream) {
69
- l.content = [];
70
- for await (const s of l) {
69
+ if (c = await this.client.messages.create(m), e.stream) {
70
+ c.content = [];
71
+ for await (const r of c) {
71
72
  if (n.signal.aborted) break;
72
- if (s.type === "content_block_start")
73
- s.content_block.type === "text" ? l.content.push({ type: "text", text: "" }) : s.content_block.type === "tool_use" && l.content.push({ type: "tool_use", id: s.content_block.id, name: s.content_block.name, input: "" });
74
- else if (s.type === "content_block_delta")
75
- if (s.delta.type === "text_delta") {
76
- const a = s.delta.text;
77
- l.content.at(-1).text += a, e.stream({ text: a });
78
- } else s.delta.type === "input_json_delta" && (l.content.at(-1).input += s.delta.partial_json);
79
- else if (s.type === "content_block_stop") {
80
- const a = l.content.at(-1);
73
+ if (r.type === "content_block_start")
74
+ r.content_block.type === "text" ? c.content.push({ type: "text", text: "" }) : r.content_block.type === "tool_use" && c.content.push({ type: "tool_use", id: r.content_block.id, name: r.content_block.name, input: "" });
75
+ else if (r.type === "content_block_delta")
76
+ if (r.delta.type === "text_delta") {
77
+ const a = r.delta.text;
78
+ c.content.at(-1).text += a, e.stream({ text: a });
79
+ } else r.delta.type === "input_json_delta" && (c.content.at(-1).input += r.delta.partial_json);
80
+ else if (r.type === "content_block_stop") {
81
+ const a = c.content.at(-1);
81
82
  a.input != null && (a.input = a.input ? w(a.input, {}) : {});
82
- } else if (s.type === "message_stop")
83
+ } else if (r.type === "message_stop")
83
84
  break;
84
85
  }
85
86
  }
86
- const o = l.content.filter((s) => s.type === "tool_use");
87
+ const o = c.content.filter((r) => r.type === "tool_use");
87
88
  if (o.length && !n.signal.aborted) {
88
- c.push({ role: "assistant", content: l.content });
89
- const s = await Promise.all(o.map(async (a) => {
90
- const p = e.tools?.find(k("name", a.name));
89
+ i.push({ role: "assistant", content: c.content });
90
+ const r = await Promise.all(o.map(async (a) => {
91
+ const p = e.tools?.find(b("name", a.name));
91
92
  if (!p) return { tool_use_id: a.id, is_error: !0, content: "Tool not found" };
92
93
  try {
93
94
  const f = await p.fn(a.input, this.ai);
@@ -96,22 +97,22 @@ class U extends x {
96
97
  return { type: "tool_result", tool_use_id: a.id, is_error: !0, content: f?.message || f?.toString() || "Unknown" };
97
98
  }
98
99
  }));
99
- c.push({ role: "user", content: s }), m.messages = c;
100
+ i.push({ role: "user", content: r }), m.messages = i;
100
101
  }
101
- } while (!n.signal.aborted && l.content.some((o) => o.type === "tool_use"));
102
- e.stream && e.stream({ done: !0 }), i(this.toStandard([...c, {
102
+ } while (!n.signal.aborted && c.content.some((o) => o.type === "tool_use"));
103
+ e.stream && e.stream({ done: !0 }), l(this.toStandard([...i, {
103
104
  role: "assistant",
104
- content: l.content.filter((o) => o.type == "text").map((o) => o.text).join(`
105
+ content: c.content.filter((o) => o.type == "text").map((o) => o.text).join(`
105
106
 
106
107
  `)
107
108
  }]));
108
109
  });
109
- return Object.assign(r, { abort: () => n.abort() });
110
+ return Object.assign(s, { abort: () => n.abort() });
110
111
  }
111
112
  }
112
- class L extends x {
113
+ class R extends k {
113
114
  constructor(t, e, n) {
114
- super(), this.ai = t, this.host = e, this.model = n, this.client = new O({ host: e });
115
+ super(), this.ai = t, this.host = e, this.model = n, this.client = new A({ host: e });
115
116
  }
116
117
  client;
117
118
  toStandard(t) {
@@ -128,10 +129,10 @@ class L extends x {
128
129
  return t.map((e) => e.role != "tool" ? e : { role: "tool", tool_name: e.name, content: e.error || e.content });
129
130
  }
130
131
  ask(t, e = {}) {
131
- const n = new AbortController(), r = new Promise(async (i, u) => {
132
- let c = e.system || this.ai.options.system, m = this.fromStandard([...e.history || [], { role: "user", content: t }]);
133
- m[0].roll == "system" && (c ? m.shift() : c = m.shift()), e.compress && (m = await this.ai.llm.compress(m, e.compress.max, e.compress.min)), e.system && m.unshift({ role: "system", content: c });
134
- const l = {
132
+ const n = new AbortController(), s = new Promise(async (l, u) => {
133
+ let i = e.system || this.ai.options.system, m = this.fromStandard([...e.history || [], { role: "user", content: t }]);
134
+ m[0].roll == "system" && (i ? m.shift() : i = m.shift()), e.compress && (m = await this.ai.llm.compress(m, e.compress.max, e.compress.min)), e.system && m.unshift({ role: "system", content: i });
135
+ const c = {
135
136
  model: e.model || this.model,
136
137
  messages: m,
137
138
  stream: !!e.stream,
@@ -140,30 +141,30 @@ class L extends x {
140
141
  temperature: e.temperature || this.ai.options.temperature || 0.7,
141
142
  num_predict: e.max_tokens || this.ai.options.max_tokens || 4096
142
143
  },
143
- tools: (e.tools || this.ai.options.tools || []).map((s) => ({
144
+ tools: (e.tools || this.ai.options.tools || []).map((r) => ({
144
145
  type: "function",
145
146
  function: {
146
- name: s.name,
147
- description: s.description,
147
+ name: r.name,
148
+ description: r.description,
148
149
  parameters: {
149
150
  type: "object",
150
- properties: s.args ? b(s.args, (a, p) => ({ ...p, required: void 0 })) : {},
151
- required: s.args ? Object.entries(s.args).filter((a) => a[1].required).map((a) => a[0]) : []
151
+ properties: r.args ? y(r.args, (a, p) => ({ ...p, required: void 0 })) : {},
152
+ required: r.args ? Object.entries(r.args).filter((a) => a[1].required).map((a) => a[0]) : []
152
153
  }
153
154
  }
154
155
  }))
155
156
  };
156
157
  let o;
157
158
  do {
158
- if (o = await this.client.chat(l), e.stream) {
159
+ if (o = await this.client.chat(c), e.stream) {
159
160
  o.message = { role: "assistant", content: "", tool_calls: [] };
160
- for await (const s of o)
161
- if (n.signal.aborted || (s.message?.content && (o.message.content += s.message.content, e.stream({ text: s.message.content })), s.message?.tool_calls && (o.message.tool_calls = s.message.tool_calls), s.done)) break;
161
+ for await (const r of o)
162
+ if (n.signal.aborted || (r.message?.content && (o.message.content += r.message.content, e.stream({ text: r.message.content })), r.message?.tool_calls && (o.message.tool_calls = r.message.tool_calls), r.done)) break;
162
163
  }
163
164
  if (o.message?.tool_calls?.length && !n.signal.aborted) {
164
165
  m.push(o.message);
165
- const s = await Promise.all(o.message.tool_calls.map(async (a) => {
166
- const p = (e.tools || this.ai.options.tools)?.find(k("name", a.function.name));
166
+ const r = await Promise.all(o.message.tool_calls.map(async (a) => {
167
+ const p = (e.tools || this.ai.options.tools)?.find(b("name", a.function.name));
167
168
  if (!p) return { role: "tool", tool_name: a.function.name, content: '{"error": "Tool not found"}' };
168
169
  const f = typeof a.function.arguments == "string" ? w(a.function.arguments, {}) : a.function.arguments;
169
170
  try {
@@ -173,33 +174,33 @@ class L extends x {
173
174
  return { role: "tool", tool_name: a.function.name, args: f, content: _({ error: h?.message || h?.toString() || "Unknown" }) };
174
175
  }
175
176
  }));
176
- m.push(...s), l.messages = m;
177
+ m.push(...r), c.messages = m;
177
178
  }
178
179
  } while (!n.signal.aborted && o.message?.tool_calls?.length);
179
- e.stream && e.stream({ done: !0 }), i(this.toStandard([...m, { role: "assistant", content: o.message?.content }]));
180
+ e.stream && e.stream({ done: !0 }), l(this.toStandard([...m, { role: "assistant", content: o.message?.content }]));
180
181
  });
181
- return Object.assign(r, { abort: () => n.abort() });
182
+ return Object.assign(s, { abort: () => n.abort() });
182
183
  }
183
184
  }
184
- class R extends x {
185
+ class I extends k {
185
186
  constructor(t, e, n) {
186
- super(), this.ai = t, this.apiToken = e, this.model = n, this.client = new M({ apiKey: e });
187
+ super(), this.ai = t, this.apiToken = e, this.model = n, this.client = new E({ apiKey: e });
187
188
  }
188
189
  client;
189
190
  toStandard(t) {
190
191
  for (let e = 0; e < t.length; e++) {
191
192
  const n = t[e];
192
193
  if (n.role === "assistant" && n.tool_calls) {
193
- const r = n.tool_calls.map((i) => ({
194
+ const s = n.tool_calls.map((l) => ({
194
195
  role: "tool",
195
- id: i.id,
196
- name: i.function.name,
197
- args: w(i.function.arguments, {})
196
+ id: l.id,
197
+ name: l.function.name,
198
+ args: w(l.function.arguments, {})
198
199
  }));
199
- t.splice(e, 1, ...r), e += r.length - 1;
200
+ t.splice(e, 1, ...s), e += s.length - 1;
200
201
  } else if (n.role === "tool" && n.content) {
201
- const r = t.find((i) => n.tool_call_id == i.id);
202
- r && (n.content.includes('"error":') ? r.error = n.content : r.content = n.content), t.splice(e, 1), e--;
202
+ const s = t.find((l) => n.tool_call_id == l.id);
203
+ s && (n.content.includes('"error":') ? s.error = n.content : s.content = n.content), t.splice(e, 1), e--;
203
204
  }
204
205
  }
205
206
  return t;
@@ -218,12 +219,12 @@ class R extends x {
218
219
  }) : e.push(n), e), []);
219
220
  }
220
221
  ask(t, e = {}) {
221
- const n = new AbortController(), r = new Promise(async (i, u) => {
222
- let c = this.fromStandard([...e.history || [], { role: "user", content: t }]);
223
- e.compress && (c = await this.ai.llm.compress(c, e.compress.max, e.compress.min, e));
222
+ const n = new AbortController(), s = new Promise(async (l, u) => {
223
+ let i = this.fromStandard([...e.history || [], { role: "user", content: t }]);
224
+ e.compress && (i = await this.ai.llm.compress(i, e.compress.max, e.compress.min, e));
224
225
  const m = {
225
226
  model: e.model || this.model,
226
- messages: c,
227
+ messages: i,
227
228
  stream: !!e.stream,
228
229
  max_tokens: e.max_tokens || this.ai.options.max_tokens || 4096,
229
230
  temperature: e.temperature || this.ai.options.temperature || 0.7,
@@ -234,26 +235,26 @@ class R extends x {
234
235
  description: o.description,
235
236
  parameters: {
236
237
  type: "object",
237
- properties: o.args ? b(o.args, (s, a) => ({ ...a, required: void 0 })) : {},
238
- required: o.args ? Object.entries(o.args).filter((s) => s[1].required).map((s) => s[0]) : []
238
+ properties: o.args ? y(o.args, (r, a) => ({ ...a, required: void 0 })) : {},
239
+ required: o.args ? Object.entries(o.args).filter((r) => r[1].required).map((r) => r[0]) : []
239
240
  }
240
241
  }
241
242
  }))
242
243
  };
243
- let l;
244
+ let c;
244
245
  do {
245
- if (l = await this.client.chat.completions.create(m), e.stream) {
246
- l.choices = [];
247
- for await (const s of l) {
246
+ if (c = await this.client.chat.completions.create(m), e.stream) {
247
+ c.choices = [];
248
+ for await (const r of c) {
248
249
  if (n.signal.aborted) break;
249
- s.choices[0].delta.content && e.stream({ text: s.choices[0].delta.content });
250
+ r.choices[0].delta.content && e.stream({ text: r.choices[0].delta.content });
250
251
  }
251
252
  }
252
- const o = l.choices[0].message.tool_calls || [];
253
+ const o = c.choices[0].message.tool_calls || [];
253
254
  if (o.length && !n.signal.aborted) {
254
- c.push(l.choices[0].message);
255
- const s = await Promise.all(o.map(async (a) => {
256
- const p = e.tools?.find(k("name", a.function.name));
255
+ i.push(c.choices[0].message);
256
+ const r = await Promise.all(o.map(async (a) => {
257
+ const p = e.tools?.find(b("name", a.function.name));
257
258
  if (!p) return { role: "tool", tool_call_id: a.id, content: '{"error": "Tool not found"}' };
258
259
  try {
259
260
  const f = w(a.function.arguments, {}), h = await p.fn(f, this.ai);
@@ -262,17 +263,17 @@ class R extends x {
262
263
  return { role: "tool", tool_call_id: a.id, content: _({ error: f?.message || f?.toString() || "Unknown" }) };
263
264
  }
264
265
  }));
265
- c.push(...s), m.messages = c;
266
+ i.push(...r), m.messages = i;
266
267
  }
267
- } while (!n.signal.aborted && l.choices?.[0]?.message?.tool_calls?.length);
268
- e.stream && e.stream({ done: !0 }), i(this.toStandard([...c, { role: "assistant", content: l.choices[0].message.content || "" }]));
268
+ } while (!n.signal.aborted && c.choices?.[0]?.message?.tool_calls?.length);
269
+ e.stream && e.stream({ done: !0 }), l(this.toStandard([...i, { role: "assistant", content: c.choices[0].message.content || "" }]));
269
270
  });
270
- return Object.assign(r, { abort: () => n.abort() });
271
+ return Object.assign(s, { abort: () => n.abort() });
271
272
  }
272
273
  }
273
274
  class J {
274
275
  constructor(t, e) {
275
- this.ai = t, this.options = e, e.anthropic?.token && (this.providers.anthropic = new U(this.ai, e.anthropic.token, e.anthropic.model)), e.ollama?.host && (this.providers.ollama = new L(this.ai, e.ollama.host, e.ollama.model)), e.openAi?.token && (this.providers.openAi = new R(this.ai, e.openAi.token, e.openAi.model));
276
+ this.ai = t, this.options = e, e.anthropic?.token && (this.providers.anthropic = new L(this.ai, e.anthropic.token, e.anthropic.model)), e.ollama?.host && (this.providers.ollama = new R(this.ai, e.ollama.host, e.ollama.model)), e.openAi?.token && (this.providers.openAi = new I(this.ai, e.openAi.token, e.openAi.model));
276
277
  }
277
278
  providers = {};
278
279
  /**
@@ -294,17 +295,17 @@ class J {
294
295
  * @param {LLMRequest} options LLM options
295
296
  * @returns {Promise<LLMMessage[]>} New chat history will summary at index 0
296
297
  */
297
- async compress(t, e, n, r) {
298
+ async compress(t, e, n, s) {
298
299
  if (this.estimateTokens(t) < e) return t;
299
- let i = 0, u = 0;
300
+ let l = 0, u = 0;
300
301
  for (let o of t.toReversed())
301
- if (u += this.estimateTokens(o.content), u < n) i++;
302
+ if (u += this.estimateTokens(o.content), u < n) l++;
302
303
  else break;
303
- if (t.length <= i) return t;
304
- const c = i == 0 ? [] : t.slice(-i), m = (i == 0 ? t : t.slice(0, -i)).filter((o) => o.role === "assistant" || o.role === "user");
304
+ if (t.length <= l) return t;
305
+ const i = l == 0 ? [] : t.slice(-l), m = (l == 0 ? t : t.slice(0, -l)).filter((o) => o.role === "assistant" || o.role === "user");
305
306
  return [{ role: "assistant", content: `Conversation Summary: ${await this.summarize(m.map((o) => `${o.role}: ${o.content}`).join(`
306
307
 
307
- `), 250, r)}` }, ...c];
308
+ `), 250, s)}` }, ...i];
308
309
  }
309
310
  /**
310
311
  * Estimate variable as tokens
@@ -336,12 +337,12 @@ class J {
336
337
  * @returns {Promise<string>} Summary
337
338
  */
338
339
  summarize(t, e, n) {
339
- return this.ask(t, { system: `Generate a brief summary <= ${e} tokens. Output nothing else`, temperature: 0.3, ...n }).then((r) => r.pop()?.content || null);
340
+ return this.ask(t, { system: `Generate a brief summary <= ${e} tokens. Output nothing else`, temperature: 0.3, ...n }).then((s) => s.pop()?.content || null);
340
341
  }
341
342
  }
342
- class V {
343
+ class X {
343
344
  constructor(t) {
344
- this.options = t, this.llm = new J(this, t), this.options.whisper?.binary && (this.whisperModel = this.options.whisper?.model.endsWith(".bin") ? this.options.whisper?.model : this.options.whisper?.model + ".bin", console.log("constructor: " + this.options.whisper.model + " -> " + this.whisperModel), this.downloadAsrModel());
345
+ this.options = t, this.llm = new J(this, t), this.options.whisper?.binary && (this.whisperModel = this.options.whisper?.model.endsWith(".bin") ? this.options.whisper?.model : this.options.whisper?.model + ".bin", this.downloadAsrModel());
345
346
  }
346
347
  downloads = {};
347
348
  whisperModel;
@@ -353,11 +354,19 @@ class V {
353
354
  * @param model Whisper model
354
355
  * @returns {Promise<any>} Extracted text
355
356
  */
356
- async asr(t, e = this.whisperModel) {
357
+ asr(t, e = this.whisperModel) {
357
358
  if (!this.options.whisper?.binary) throw new Error("Whisper not configured");
358
- const n = await this.downloadAsrModel(e), r = Math.random().toString(36).substring(2, 10) + "-" + t.split("/").pop() + ".txt", i = S.join(this.options.whisper.temp || "/tmp", r);
359
- return console.log("ASR: " + this.options.whisper.model + " -> " + this.whisperModel), console.log(`rm -f ${i} && ${this.options.whisper.binary} -nt -np -m ${n} -f ${t} -otxt -of ${i}`), await j`rm -f ${i} && ${this.options.whisper.binary} -nt -np -m ${n} -f ${t} -otxt -of ${i}`, y.readFile(i, "utf-8").then((u) => u?.trim() || null).finally(() => y.rm(i, { force: !0 }).catch(() => {
360
- }));
359
+ let n = () => {
360
+ };
361
+ return { response: new Promise((l, u) => {
362
+ this.downloadAsrModel(e).then((i) => {
363
+ let m = "";
364
+ const c = M(this.options.whisper?.binary, ["-nt", "-np", "-m", i, "-f", t], { stdio: ["ignore", "pipe", "ignore"] });
365
+ n = () => c.kill("SIGTERM"), c.on("error", (o) => u(o)), c.stdout.on("data", (o) => m += o.toString()), c.on("close", (o) => {
366
+ o === 0 ? l(m.trim() || null) : u(new Error(`Exit code ${o}`));
367
+ });
368
+ });
369
+ }), abort: n };
361
370
  }
362
371
  /**
363
372
  * Downloads the specified Whisper model if it is not already present locally.
@@ -368,8 +377,8 @@ class V {
368
377
  async downloadAsrModel(t = this.whisperModel) {
369
378
  if (!this.options.whisper?.binary) throw new Error("Whisper not configured");
370
379
  t.endsWith(".bin") || (t += ".bin");
371
- const e = S.join(this.options.whisper.path, t);
372
- return console.log("Download: " + e), await y.stat(e).then(() => !0).catch(() => !1) ? (console.log("Exists!"), e) : this.downloads[t] ? this.downloads[t] : (this.downloads[t] = fetch(`https://huggingface.co/ggerganov/whisper.cpp/resolve/main/${t}`).then((n) => n.arrayBuffer()).then((n) => Buffer.from(n)).then(async (n) => (await y.writeFile(e, n), delete this.downloads[t], e)), this.downloads[t]);
380
+ const e = O.join(this.options.whisper.path, t);
381
+ return await x.stat(e).then(() => !0).catch(() => !1) ? e : this.downloads[t] ? this.downloads[t] : (this.downloads[t] = fetch(`https://huggingface.co/ggerganov/whisper.cpp/resolve/main/${t}`).then((n) => n.arrayBuffer()).then((n) => Buffer.from(n)).then(async (n) => (await x.writeFile(e, n), delete this.downloads[t], e)), this.downloads[t]);
373
382
  }
374
383
  /**
375
384
  * Convert image to text using Optical Character Recognition
@@ -383,9 +392,9 @@ class V {
383
392
  e?.terminate();
384
393
  },
385
394
  response: new Promise(async (n) => {
386
- e = await q("eng");
387
- const { data: r } = await e.recognize(t);
388
- await e.terminate(), n(r.text.trim() || null);
395
+ e = await S("eng");
396
+ const { data: s } = await e.recognize(t);
397
+ await e.terminate(), n(s.text.trim() || null);
389
398
  })
390
399
  };
391
400
  }
@@ -397,25 +406,25 @@ class V {
397
406
  */
398
407
  semanticSimilarity(t, ...e) {
399
408
  if (e.length < 2) throw new Error("Requires at least 2 strings to compare");
400
- const n = (c, m = 10) => c.toLowerCase().split("").map((l, o) => l.charCodeAt(0) * (o + 1) % m / m).slice(0, m), r = (c, m) => {
401
- if (c.length !== m.length) throw new Error("Vectors must be same length");
402
- const l = g.tensor1d(c), o = g.tensor1d(m), s = g.dot(l, o), a = g.norm(l), p = g.norm(o);
403
- return a.dataSync()[0] === 0 || p.dataSync()[0] === 0 ? 0 : s.dataSync()[0] / (a.dataSync()[0] * p.dataSync()[0]);
404
- }, i = n(t), u = e.map((c) => n(c)).map((c) => r(i, c));
405
- return { avg: u.reduce((c, m) => c + m, 0) / u.length, max: Math.max(...u), similarities: u };
409
+ const n = (i, m = 10) => i.toLowerCase().split("").map((c, o) => c.charCodeAt(0) * (o + 1) % m / m).slice(0, m), s = (i, m) => {
410
+ if (i.length !== m.length) throw new Error("Vectors must be same length");
411
+ const c = g.tensor1d(i), o = g.tensor1d(m), r = g.dot(c, o), a = g.norm(c), p = g.norm(o);
412
+ return a.dataSync()[0] === 0 || p.dataSync()[0] === 0 ? 0 : r.dataSync()[0] / (a.dataSync()[0] * p.dataSync()[0]);
413
+ }, l = n(t), u = e.map((i) => n(i)).map((i) => s(l, i));
414
+ return { avg: u.reduce((i, m) => i + m, 0) / u.length, max: Math.max(...u), similarities: u };
406
415
  }
407
416
  }
408
417
  const W = {
409
418
  name: "cli",
410
419
  description: "Use the command line interface, returns any output",
411
420
  args: { command: { type: "string", description: "Command to run", required: !0 } },
412
- fn: (d) => j`${d.command}`
413
- }, Q = {
421
+ fn: (d) => U`${d.command}`
422
+ }, Y = {
414
423
  name: "get_datetime",
415
424
  description: "Get current date and time",
416
425
  args: {},
417
426
  fn: async () => (/* @__PURE__ */ new Date()).toISOString()
418
- }, X = {
427
+ }, Z = {
419
428
  name: "exec",
420
429
  description: "Run code/scripts",
421
430
  args: {
@@ -428,15 +437,15 @@ const W = {
428
437
  case "bash":
429
438
  return await W.fn({ command: d.code }, t);
430
439
  case "node":
431
- return await I.fn({ code: d.code }, t);
432
- case "python":
433
440
  return await N.fn({ code: d.code }, t);
441
+ case "python":
442
+ return await z.fn({ code: d.code }, t);
434
443
  }
435
444
  } catch (e) {
436
445
  return { error: e?.message || e.toString() };
437
446
  }
438
447
  }
439
- }, Y = {
448
+ }, ee = {
440
449
  name: "fetch",
441
450
  description: "Make HTTP request to URL",
442
451
  args: {
@@ -445,25 +454,25 @@ const W = {
445
454
  headers: { type: "object", description: "HTTP headers to send", default: {} },
446
455
  body: { type: "object", description: "HTTP body to send" }
447
456
  },
448
- fn: (d) => new v({ url: d.url, headers: d.headers }).request({ method: d.method || "GET", body: d.body })
449
- }, I = {
457
+ fn: (d) => new j({ url: d.url, headers: d.headers }).request({ method: d.method || "GET", body: d.body })
458
+ }, N = {
450
459
  name: "exec_javascript",
451
460
  description: "Execute commonjs javascript",
452
461
  args: {
453
462
  code: { type: "string", description: "CommonJS javascript", required: !0 }
454
463
  },
455
464
  fn: async (d) => {
456
- const t = $(null), e = await A({ console: t }, d.code, !0).catch((n) => t.output.error.push(n));
465
+ const t = T(null), e = await q({ console: t }, d.code, !0).catch((n) => t.output.error.push(n));
457
466
  return { ...t.output, return: e, stdout: void 0, stderr: void 0 };
458
467
  }
459
- }, N = {
468
+ }, z = {
460
469
  name: "exec_javascript",
461
470
  description: "Execute commonjs javascript",
462
471
  args: {
463
472
  code: { type: "string", description: "CommonJS javascript", required: !0 }
464
473
  },
465
- fn: async (d) => ({ result: T`python -c "${d.code}"` })
466
- }, Z = {
474
+ fn: async (d) => ({ result: $`python -c "${d.code}"` })
475
+ }, te = {
467
476
  name: "search",
468
477
  description: "Use a search engine to find relevant URLs, should be changed with fetch to scrape sources",
469
478
  args: {
@@ -473,26 +482,26 @@ const W = {
473
482
  fn: async (d) => {
474
483
  const t = await fetch(`https://html.duckduckgo.com/html/?q=${encodeURIComponent(d.query)}`, {
475
484
  headers: { "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64)", "Accept-Language": "en-US,en;q=0.9" }
476
- }).then((i) => i.text());
485
+ }).then((l) => l.text());
477
486
  let e, n = /<a .*?href="(.+?)".+?<\/a>/g;
478
- const r = new P();
487
+ const s = new v();
479
488
  for (; (e = n.exec(t)) !== null; ) {
480
- let i = /uddg=(.+)&amp?/.exec(decodeURIComponent(e[1]))?.[1];
481
- if (i && (i = decodeURIComponent(i)), i && r.add(i), r.size >= (d.length || 5)) break;
489
+ let l = /uddg=(.+)&amp?/.exec(decodeURIComponent(e[1]))?.[1];
490
+ if (l && (l = decodeURIComponent(l)), l && s.add(l), s.size >= (d.length || 5)) break;
482
491
  }
483
- return r;
492
+ return s;
484
493
  }
485
494
  };
486
495
  export {
487
- V as Ai,
488
- U as Anthropic,
496
+ X as Ai,
497
+ L as Anthropic,
489
498
  W as CliTool,
490
- Q as DateTimeTool,
491
- X as ExecTool,
492
- Y as FetchTool,
493
- I as JSTool,
499
+ Y as DateTimeTool,
500
+ Z as ExecTool,
501
+ ee as FetchTool,
502
+ N as JSTool,
494
503
  J as LLM,
495
- N as PythonTool,
496
- Z as SearchTool
504
+ z as PythonTool,
505
+ te as SearchTool
497
506
  };
498
507
  //# sourceMappingURL=index.mjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"index.mjs","sources":["../src/provider.ts","../src/antrhopic.ts","../src/ollama.ts","../src/open-ai.ts","../src/llm.ts","../src/ai.ts","../src/tools.ts"],"sourcesContent":["import {LLMMessage, LLMOptions, LLMRequest} from './llm.ts';\n\nexport type AbortablePromise<T> = Promise<T> & {abort: () => void};\n\nexport abstract class LLMProvider {\n\tabstract ask(message: string, options: LLMRequest): AbortablePromise<LLMMessage[]>;\n}\n","import {Anthropic as anthropic} from '@anthropic-ai/sdk';\nimport {findByProp, objectMap, JSONSanitize, JSONAttemptParse} from '@ztimson/utils';\nimport {Ai} from './ai.ts';\nimport {LLMMessage, LLMRequest} from './llm.ts';\nimport {AbortablePromise, LLMProvider} from './provider.ts';\n\nexport class Anthropic extends LLMProvider {\n\tclient!: anthropic;\n\n\tconstructor(public readonly ai: Ai, public readonly apiToken: string, public model: string) {\n\t\tsuper();\n\t\tthis.client = new anthropic({apiKey: apiToken});\n\t}\n\n\tprivate toStandard(history: any[]): LLMMessage[] {\n\t\tfor(let i = 0; i < history.length; i++) {\n\t\t\tconst orgI = i;\n\t\t\tif(typeof history[orgI].content != 'string') {\n\t\t\t\tif(history[orgI].role == 'assistant') {\n\t\t\t\t\thistory[orgI].content.filter((c: any) => c.type =='tool_use').forEach((c: any) => {\n\t\t\t\t\t\ti++;\n\t\t\t\t\t\thistory.splice(i, 0, {role: 'tool', id: c.id, name: c.name, args: c.input});\n\t\t\t\t\t});\n\t\t\t\t} else if(history[orgI].role == 'user') {\n\t\t\t\t\thistory[orgI].content.filter((c: any) => c.type =='tool_result').forEach((c: any) => {\n\t\t\t\t\t\tconst h = history.find((h: any) => h.id == c.tool_use_id);\n\t\t\t\t\t\th[c.is_error ? 'error' : 'content'] = c.content;\n\t\t\t\t\t});\n\t\t\t\t}\n\t\t\t\thistory[orgI].content = history[orgI].content.filter((c: any) => c.type == 'text').map((c: any) => c.text).join('\\n\\n');\n\t\t\t}\n\t\t}\n\t\treturn history.filter(h => !!h.content);\n\t}\n\n\tprivate fromStandard(history: LLMMessage[]): any[] {\n\t\tfor(let i = 0; i < history.length; i++) {\n\t\t\tif(history[i].role == 'tool') {\n\t\t\t\tconst h: any = history[i];\n\t\t\t\thistory.splice(i, 1,\n\t\t\t\t\t{role: 'assistant', content: [{type: 'tool_use', id: h.id, name: h.name, input: h.args}]},\n\t\t\t\t\t{role: 'user', content: [{type: 'tool_result', tool_use_id: h.id, is_error: !!h.error, content: h.error || h.content}]}\n\t\t\t\t)\n\t\t\t\ti++;\n\t\t\t}\n\t\t}\n\t\treturn history;\n\t}\n\n\task(message: string, options: LLMRequest = {}): AbortablePromise<LLMMessage[]> {\n\t\tconst controller = new AbortController();\n\t\tconst response = new Promise<any>(async (res, rej) => {\n\t\t\tlet history = this.fromStandard([...options.history || [], {role: 'user', content: message}]);\n\t\t\tif(options.compress) history = await this.ai.llm.compress(<any>history, options.compress.max, options.compress.min, options);\n\t\t\tconst requestParams: any = {\n\t\t\t\tmodel: options.model || this.model,\n\t\t\t\tmax_tokens: options.max_tokens || this.ai.options.max_tokens || 4096,\n\t\t\t\tsystem: options.system || this.ai.options.system || '',\n\t\t\t\ttemperature: options.temperature || this.ai.options.temperature || 0.7,\n\t\t\t\ttools: (options.tools || this.ai.options.tools || []).map(t => ({\n\t\t\t\t\tname: t.name,\n\t\t\t\t\tdescription: t.description,\n\t\t\t\t\tinput_schema: {\n\t\t\t\t\t\ttype: 'object',\n\t\t\t\t\t\tproperties: t.args ? objectMap(t.args, (key, value) => ({...value, required: undefined})) : {},\n\t\t\t\t\t\trequired: t.args ? Object.entries(t.args).filter(t => t[1].required).map(t => t[0]) : []\n\t\t\t\t\t},\n\t\t\t\t\tfn: undefined\n\t\t\t\t})),\n\t\t\t\tmessages: history,\n\t\t\t\tstream: !!options.stream,\n\t\t\t};\n\n\t\t\t// Run tool changes\n\t\t\tlet resp: any;\n\t\t\tdo {\n\t\t\t\tresp = await this.client.messages.create(requestParams);\n\n\t\t\t\t// Streaming mode\n\t\t\t\tif(options.stream) {\n\t\t\t\t\tresp.content = [];\n\t\t\t\t\tfor await (const chunk of resp) {\n\t\t\t\t\t\tif(controller.signal.aborted) break;\n\t\t\t\t\t\tif(chunk.type === 'content_block_start') {\n\t\t\t\t\t\t\tif(chunk.content_block.type === 'text') {\n\t\t\t\t\t\t\t\tresp.content.push({type: 'text', text: ''});\n\t\t\t\t\t\t\t} else if(chunk.content_block.type === 'tool_use') {\n\t\t\t\t\t\t\t\tresp.content.push({type: 'tool_use', id: chunk.content_block.id, name: chunk.content_block.name, input: <any>''});\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t} else if(chunk.type === 'content_block_delta') {\n\t\t\t\t\t\t\tif(chunk.delta.type === 'text_delta') {\n\t\t\t\t\t\t\t\tconst text = chunk.delta.text;\n\t\t\t\t\t\t\t\tresp.content.at(-1).text += text;\n\t\t\t\t\t\t\t\toptions.stream({text});\n\t\t\t\t\t\t\t} else if(chunk.delta.type === 'input_json_delta') {\n\t\t\t\t\t\t\t\tresp.content.at(-1).input += chunk.delta.partial_json;\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t} else if(chunk.type === 'content_block_stop') {\n\t\t\t\t\t\t\tconst last = resp.content.at(-1);\n\t\t\t\t\t\t\tif(last.input != null) last.input = last.input ? JSONAttemptParse(last.input, {}) : {};\n\t\t\t\t\t\t} else if(chunk.type === 'message_stop') {\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\t// Run tools\n\t\t\t\tconst toolCalls = resp.content.filter((c: any) => c.type === 'tool_use');\n\t\t\t\tif(toolCalls.length && !controller.signal.aborted) {\n\t\t\t\t\thistory.push({role: 'assistant', content: resp.content});\n\t\t\t\t\tconst results = await Promise.all(toolCalls.map(async (toolCall: any) => {\n\t\t\t\t\t\tconst tool = options.tools?.find(findByProp('name', toolCall.name));\n\t\t\t\t\t\tif(!tool) return {tool_use_id: toolCall.id, is_error: true, content: 'Tool not found'};\n\t\t\t\t\t\ttry {\n\t\t\t\t\t\t\tconst result = await tool.fn(toolCall.input, this.ai);\n\t\t\t\t\t\t\treturn {type: 'tool_result', tool_use_id: toolCall.id, content: JSONSanitize(result)};\n\t\t\t\t\t\t} catch (err: any) {\n\t\t\t\t\t\t\treturn {type: 'tool_result', tool_use_id: toolCall.id, is_error: true, content: err?.message || err?.toString() || 'Unknown'};\n\t\t\t\t\t\t}\n\t\t\t\t\t}));\n\t\t\t\t\thistory.push({role: 'user', content: results});\n\t\t\t\t\trequestParams.messages = history;\n\t\t\t\t}\n\t\t\t} while (!controller.signal.aborted && resp.content.some((c: any) => c.type === 'tool_use'));\n\t\t\tif(options.stream) options.stream({done: true});\n\t\t\tres(this.toStandard([...history, {\n\t\t\t\trole: 'assistant',\n\t\t\t\tcontent: resp.content.filter((c: any) => c.type == 'text').map((c: any) => c.text).join('\\n\\n')\n\t\t\t}]));\n\t\t});\n\t\treturn Object.assign(response, {abort: () => controller.abort()});\n\t}\n}\n","import {findByProp, objectMap, JSONSanitize, JSONAttemptParse} from '@ztimson/utils';\nimport {Ai} from './ai.ts';\nimport {LLMMessage, LLMRequest} from './llm.ts';\nimport {AbortablePromise, LLMProvider} from './provider.ts';\nimport {Ollama as ollama} from 'ollama';\n\nexport class Ollama extends LLMProvider {\n\tclient!: ollama;\n\n\tconstructor(public readonly ai: Ai, public host: string, public model: string) {\n\t\tsuper();\n\t\tthis.client = new ollama({host});\n\t}\n\n\tprivate toStandard(history: any[]): LLMMessage[] {\n\t\tfor(let i = 0; i < history.length; i++) {\n\t\t\tif(history[i].role == 'assistant' && history[i].tool_calls) {\n\t\t\t\tif(history[i].content) delete history[i].tool_calls;\n\t\t\t\telse {\n\t\t\t\t\thistory.splice(i, 1);\n\t\t\t\t\ti--;\n\t\t\t\t}\n\t\t\t} else if(history[i].role == 'tool') {\n\t\t\t\tconst error = history[i].content.startsWith('{\"error\":');\n\t\t\t\thistory[i] = {role: 'tool', name: history[i].tool_name, args: history[i].args, [error ? 'error' : 'content']: history[i].content};\n\t\t\t}\n\t\t}\n\t\treturn history;\n\t}\n\n\tprivate fromStandard(history: LLMMessage[]): any[] {\n\t\treturn history.map((h: any) => {\n\t\t\tif(h.role != 'tool') return h;\n\t\t\treturn {role: 'tool', tool_name: h.name, content: h.error || h.content}\n\t\t});\n\t}\n\n\task(message: string, options: LLMRequest = {}): AbortablePromise<LLMMessage[]> {\n\t\tconst controller = new AbortController();\n\t\tconst response = new Promise<any>(async (res, rej) => {\n\t\t\tlet system = options.system || this.ai.options.system;\n\t\t\tlet history = this.fromStandard([...options.history || [], {role: 'user', content: message}]);\n\t\t\tif(history[0].roll == 'system') {\n\t\t\t\tif(!system) system = history.shift();\n\t\t\t\telse history.shift();\n\t\t\t}\n\t\t\tif(options.compress) history = await this.ai.llm.compress(<any>history, options.compress.max, options.compress.min);\n\t\t\tif(options.system) history.unshift({role: 'system', content: system})\n\n\t\t\tconst requestParams: any = {\n\t\t\t\tmodel: options.model || this.model,\n\t\t\t\tmessages: history,\n\t\t\t\tstream: !!options.stream,\n\t\t\t\tsignal: controller.signal,\n\t\t\t\toptions: {\n\t\t\t\t\ttemperature: options.temperature || this.ai.options.temperature || 0.7,\n\t\t\t\t\tnum_predict: options.max_tokens || this.ai.options.max_tokens || 4096,\n\t\t\t\t},\n\t\t\t\ttools: (options.tools || this.ai.options.tools || []).map(t => ({\n\t\t\t\t\ttype: 'function',\n\t\t\t\t\tfunction: {\n\t\t\t\t\t\tname: t.name,\n\t\t\t\t\t\tdescription: t.description,\n\t\t\t\t\t\tparameters: {\n\t\t\t\t\t\t\ttype: 'object',\n\t\t\t\t\t\t\tproperties: t.args ? objectMap(t.args, (key, value) => ({...value, required: undefined})) : {},\n\t\t\t\t\t\t\trequired: t.args ? Object.entries(t.args).filter(t => t[1].required).map(t => t[0]) : []\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}))\n\t\t\t}\n\n\t\t\t// Run tool chains\n\t\t\tlet resp: any;\n\t\t\tdo {\n\t\t\t\tresp = await this.client.chat(requestParams);\n\t\t\t\tif(options.stream) {\n\t\t\t\t\tresp.message = {role: 'assistant', content: '', tool_calls: []};\n\t\t\t\t\tfor await (const chunk of resp) {\n\t\t\t\t\t\tif(controller.signal.aborted) break;\n\t\t\t\t\t\tif(chunk.message?.content) {\n\t\t\t\t\t\t\tresp.message.content += chunk.message.content;\n\t\t\t\t\t\t\toptions.stream({text: chunk.message.content});\n\t\t\t\t\t\t}\n\t\t\t\t\t\tif(chunk.message?.tool_calls) resp.message.tool_calls = chunk.message.tool_calls;\n\t\t\t\t\t\tif(chunk.done) break;\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\t// Run tools\n\t\t\t\tif(resp.message?.tool_calls?.length && !controller.signal.aborted) {\n\t\t\t\t\thistory.push(resp.message);\n\t\t\t\t\tconst results = await Promise.all(resp.message.tool_calls.map(async (toolCall: any) => {\n\t\t\t\t\t\tconst tool = (options.tools || this.ai.options.tools)?.find(findByProp('name', toolCall.function.name));\n\t\t\t\t\t\tif(!tool) return {role: 'tool', tool_name: toolCall.function.name, content: '{\"error\": \"Tool not found\"}'};\n\t\t\t\t\t\tconst args = typeof toolCall.function.arguments === 'string' ? JSONAttemptParse(toolCall.function.arguments, {}) : toolCall.function.arguments;\n\t\t\t\t\t\ttry {\n\t\t\t\t\t\t\tconst result = await tool.fn(args, this.ai);\n\t\t\t\t\t\t\treturn {role: 'tool', tool_name: toolCall.function.name, args, content: JSONSanitize(result)};\n\t\t\t\t\t\t} catch (err: any) {\n\t\t\t\t\t\t\treturn {role: 'tool', tool_name: toolCall.function.name, args, content: JSONSanitize({error: err?.message || err?.toString() || 'Unknown'})};\n\t\t\t\t\t\t}\n\t\t\t\t\t}));\n\t\t\t\t\thistory.push(...results);\n\t\t\t\t\trequestParams.messages = history;\n\t\t\t\t}\n\t\t\t} while (!controller.signal.aborted && resp.message?.tool_calls?.length);\n\t\t\tif(options.stream) options.stream({done: true});\n\t\t\tres(this.toStandard([...history, {role: 'assistant', content: resp.message?.content}]));\n\t\t});\n\t\treturn Object.assign(response, {abort: () => controller.abort()});\n\t}\n}\n","import {OpenAI as openAI} from 'openai';\nimport {findByProp, objectMap, JSONSanitize, JSONAttemptParse} from '@ztimson/utils';\nimport {Ai} from './ai.ts';\nimport {LLMMessage, LLMRequest} from './llm.ts';\nimport {AbortablePromise, LLMProvider} from './provider.ts';\n\nexport class OpenAi extends LLMProvider {\n\tclient!: openAI;\n\n\tconstructor(public readonly ai: Ai, public readonly apiToken: string, public model: string) {\n\t\tsuper();\n\t\tthis.client = new openAI({apiKey: apiToken});\n\t}\n\n\tprivate toStandard(history: any[]): LLMMessage[] {\n\t\tfor(let i = 0; i < history.length; i++) {\n\t\t\tconst h = history[i];\n\t\t\tif(h.role === 'assistant' && h.tool_calls) {\n\t\t\t\tconst tools = h.tool_calls.map((tc: any) => ({\n\t\t\t\t\trole: 'tool',\n\t\t\t\t\tid: tc.id,\n\t\t\t\t\tname: tc.function.name,\n\t\t\t\t\targs: JSONAttemptParse(tc.function.arguments, {})\n\t\t\t\t}));\n\t\t\t\thistory.splice(i, 1, ...tools);\n\t\t\t\ti += tools.length - 1;\n\t\t\t} else if(h.role === 'tool' && h.content) {\n\t\t\t\tconst record = history.find(h2 => h.tool_call_id == h2.id);\n\t\t\t\tif(record) {\n\t\t\t\t\tif(h.content.includes('\"error\":')) record.error = h.content;\n\t\t\t\t\telse record.content = h.content;\n\t\t\t\t}\n\t\t\t\thistory.splice(i, 1);\n\t\t\t\ti--;\n\t\t\t}\n\n\t\t}\n\t\treturn history;\n\t}\n\n\tprivate fromStandard(history: LLMMessage[]): any[] {\n\t\treturn history.reduce((result, h) => {\n\t\t\tif(h.role === 'tool') {\n\t\t\t\tresult.push({\n\t\t\t\t\trole: 'assistant',\n\t\t\t\t\tcontent: null,\n\t\t\t\t\ttool_calls: [{ id: h.id, type: 'function', function: { name: h.name, arguments: JSON.stringify(h.args) } }],\n\t\t\t\t\trefusal: null,\n\t\t\t\t\tannotations: [],\n\t\t\t\t}, {\n\t\t\t\t\trole: 'tool',\n\t\t\t\t\ttool_call_id: h.id,\n\t\t\t\t\tcontent: h.error || h.content\n\t\t\t\t});\n\t\t\t} else {\n\t\t\t\tresult.push(h);\n\t\t\t}\n\t\t\treturn result;\n\t\t}, [] as any[]);\n\t}\n\n\task(message: string, options: LLMRequest = {}): AbortablePromise<LLMMessage[]> {\n\t\tconst controller = new AbortController();\n\t\tconst response = new Promise<any>(async (res, rej) => {\n\t\t\tlet history = this.fromStandard([...options.history || [], {role: 'user', content: message}]);\n\t\t\tif(options.compress) history = await this.ai.llm.compress(<any>history, options.compress.max, options.compress.min, options);\n\n\t\t\tconst requestParams: any = {\n\t\t\t\tmodel: options.model || this.model,\n\t\t\t\tmessages: history,\n\t\t\t\tstream: !!options.stream,\n\t\t\t\tmax_tokens: options.max_tokens || this.ai.options.max_tokens || 4096,\n\t\t\t\ttemperature: options.temperature || this.ai.options.temperature || 0.7,\n\t\t\t\ttools: (options.tools || this.ai.options.tools || []).map(t => ({\n\t\t\t\t\ttype: 'function',\n\t\t\t\t\tfunction: {\n\t\t\t\t\t\tname: t.name,\n\t\t\t\t\t\tdescription: t.description,\n\t\t\t\t\t\tparameters: {\n\t\t\t\t\t\t\ttype: 'object',\n\t\t\t\t\t\t\tproperties: t.args ? objectMap(t.args, (key, value) => ({...value, required: undefined})) : {},\n\t\t\t\t\t\t\trequired: t.args ? Object.entries(t.args).filter(t => t[1].required).map(t => t[0]) : []\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}))\n\t\t\t};\n\n\t\t\t// Tool call and streaming logic similar to other providers\n\t\t\tlet resp: any;\n\t\t\tdo {\n\t\t\t\tresp = await this.client.chat.completions.create(requestParams);\n\n\t\t\t\t// Implement streaming and tool call handling\n\t\t\t\tif(options.stream) {\n\t\t\t\t\tresp.choices = [];\n\t\t\t\t\tfor await (const chunk of resp) {\n\t\t\t\t\t\tif(controller.signal.aborted) break;\n\t\t\t\t\t\tif(chunk.choices[0].delta.content) {\n\t\t\t\t\t\t\toptions.stream({text: chunk.choices[0].delta.content});\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\t// Run tools\n\t\t\t\tconst toolCalls = resp.choices[0].message.tool_calls || [];\n\t\t\t\tif(toolCalls.length && !controller.signal.aborted) {\n\t\t\t\t\thistory.push(resp.choices[0].message);\n\t\t\t\t\tconst results = await Promise.all(toolCalls.map(async (toolCall: any) => {\n\t\t\t\t\t\tconst tool = options.tools?.find(findByProp('name', toolCall.function.name));\n\t\t\t\t\t\tif(!tool) return {role: 'tool', tool_call_id: toolCall.id, content: '{\"error\": \"Tool not found\"}'};\n\t\t\t\t\t\ttry {\n\t\t\t\t\t\t\tconst args = JSONAttemptParse(toolCall.function.arguments, {});\n\t\t\t\t\t\t\tconst result = await tool.fn(args, this.ai);\n\t\t\t\t\t\t\treturn {role: 'tool', tool_call_id: toolCall.id, content: JSONSanitize(result)};\n\t\t\t\t\t\t} catch (err: any) {\n\t\t\t\t\t\t\treturn {role: 'tool', tool_call_id: toolCall.id, content: JSONSanitize({error: err?.message || err?.toString() || 'Unknown'})};\n\t\t\t\t\t\t}\n\t\t\t\t\t}));\n\t\t\t\t\thistory.push(...results);\n\t\t\t\t\trequestParams.messages = history;\n\t\t\t\t}\n\t\t\t} while (!controller.signal.aborted && resp.choices?.[0]?.message?.tool_calls?.length);\n\n\t\t\tif(options.stream) options.stream({done: true});\n\t\t\tres(this.toStandard([...history, {role: 'assistant', content: resp.choices[0].message.content || ''}]));\n\t\t});\n\n\t\treturn Object.assign(response, {abort: () => controller.abort()});\n\t}\n}\n","import {JSONAttemptParse} from '@ztimson/utils';\nimport {Ai} from './ai.ts';\nimport {Anthropic} from './antrhopic.ts';\nimport {Ollama} from './ollama.ts';\nimport {OpenAi} from './open-ai.ts';\nimport {AbortablePromise, LLMProvider} from './provider.ts';\nimport {AiTool} from './tools.ts';\n\nexport type LLMMessage = {\n\t/** Message originator */\n\trole: 'assistant' | 'system' | 'user';\n\t/** Message content */\n\tcontent: string | any;\n} | {\n\t/** Tool call */\n\trole: 'tool';\n\t/** Unique ID for call */\n\tid: string;\n\t/** Tool that was run */\n\tname: string;\n\t/** Tool arguments */\n\targs: any;\n\t/** Tool result */\n\tcontent: undefined | string;\n\t/** Tool error */\n\terror: undefined | string;\n}\n\nexport type LLMOptions = {\n\t/** Anthropic settings */\n\tanthropic?: {\n\t\t/** API Token */\n\t\ttoken: string;\n\t\t/** Default model */\n\t\tmodel: string;\n\t},\n\t/** Ollama settings */\n\tollama?: {\n\t\t/** connection URL */\n\t\thost: string;\n\t\t/** Default model */\n\t\tmodel: string;\n\t},\n\t/** Open AI settings */\n\topenAi?: {\n\t\t/** API Token */\n\t\ttoken: string;\n\t\t/** Default model */\n\t\tmodel: string;\n\t},\n\t/** Default provider & model */\n\tmodel: string | [string, string];\n} & Omit<LLMRequest, 'model'>;\n\nexport type LLMRequest = {\n\t/** System prompt */\n\tsystem?: string;\n\t/** Message history */\n\thistory?: LLMMessage[];\n\t/** Max tokens for request */\n\tmax_tokens?: number;\n\t/** 0 = Rigid Logic, 1 = Balanced, 2 = Hyper Creative **/\n\ttemperature?: number;\n\t/** Available tools */\n\ttools?: AiTool[];\n\t/** LLM model */\n\tmodel?: string | [string, string];\n\t/** Stream response */\n\tstream?: (chunk: {text?: string, done?: true}) => any;\n\t/** Compress old messages in the chat to free up context */\n\tcompress?: {\n\t\t/** Trigger chat compression once context exceeds the token count */\n\t\tmax: number;\n\t\t/** Compress chat until context size smaller than */\n\t\tmin: number\n\t}\n}\n\nexport class LLM {\n\tprivate providers: {[key: string]: LLMProvider} = {};\n\n\tconstructor(public readonly ai: Ai, public readonly options: LLMOptions) {\n\t\tif(options.anthropic?.token) this.providers.anthropic = new Anthropic(this.ai, options.anthropic.token, options.anthropic.model);\n\t\tif(options.ollama?.host) this.providers.ollama = new Ollama(this.ai, options.ollama.host, options.ollama.model);\n\t\tif(options.openAi?.token) this.providers.openAi = new OpenAi(this.ai, options.openAi.token, options.openAi.model);\n\t}\n\n\t/**\n\t * Chat with LLM\n\t * @param {string} message Question\n\t * @param {LLMRequest} options Configuration options and chat history\n\t * @returns {{abort: () => void, response: Promise<LLMMessage[]>}} Function to abort response and chat history\n\t */\n\task(message: string, options: LLMRequest = {}): AbortablePromise<LLMMessage[]> {\n\t\tlet model: any = [null, null];\n\t\tif(options.model) {\n\t\t\tif(typeof options.model == 'object') model = options.model;\n\t\t\telse model = [options.model, (<any>this.options)[options.model]?.model];\n\t\t}\n\t\tif(!options.model || model[1] == null) {\n\t\t\tif(typeof this.options.model == 'object') model = this.options.model;\n\t\t\telse model = [this.options.model, (<any>this.options)[this.options.model]?.model];\n\t\t}\n\t\tif(!model[0] || !model[1]) throw new Error(`Unknown LLM provider or model: ${model[0]} / ${model[1]}`);\n\t\treturn this.providers[model[0]].ask(message, {...options, model: model[1]});\n\t}\n\n\t/**\n\t * Compress chat history to reduce context size\n\t * @param {LLMMessage[]} history Chatlog that will be compressed\n\t * @param max Trigger compression once context is larger than max\n\t * @param min Summarize until context size is less than min\n\t * @param {LLMRequest} options LLM options\n\t * @returns {Promise<LLMMessage[]>} New chat history will summary at index 0\n\t */\n\tasync compress(history: LLMMessage[], max: number, min: number, options?: LLMRequest): Promise<LLMMessage[]> {\n\t\tif(this.estimateTokens(history) < max) return history;\n\t\tlet keep = 0, tokens = 0;\n\t\tfor(let m of history.toReversed()) {\n\t\t\ttokens += this.estimateTokens(m.content);\n\t\t\tif(tokens < min) keep++;\n\t\t\telse break;\n\t\t}\n\t\tif(history.length <= keep) return history;\n\t\tconst recent = keep == 0 ? [] : history.slice(-keep),\n\t\t\tprocess = (keep == 0 ? history : history.slice(0, -keep)).filter(h => h.role === 'assistant' || h.role === 'user');\n\t\tconst summary = await this.summarize(process.map(m => `${m.role}: ${m.content}`).join('\\n\\n'), 250, options);\n\t\treturn [{role: 'assistant', content: `Conversation Summary: ${summary}`}, ...recent];\n\t}\n\n\t/**\n\t * Estimate variable as tokens\n\t * @param history Object to size\n\t * @returns {number} Rough token count\n\t */\n\testimateTokens(history: any): number {\n\t\tconst text = JSON.stringify(history);\n\t\treturn Math.ceil((text.length / 4) * 1.2);\n\t}\n\n\t/**\n\t * Ask a question with JSON response\n\t * @param {string} message Question\n\t * @param {LLMRequest} options Configuration options and chat history\n\t * @returns {Promise<{} | {} | RegExpExecArray | null>}\n\t */\n\tasync json(message: string, options?: LLMRequest) {\n\t\tlet resp = await this.ask(message, {\n\t\t\tsystem: 'Respond using a JSON blob',\n\t\t\t...options\n\t\t});\n\t\tif(!resp?.[0]?.content) return {};\n\t\treturn JSONAttemptParse(new RegExp('\\{[\\s\\S]*\\}').exec(resp[0].content), {});\n\t}\n\n\t/**\n\t * Create a summary of some text\n\t * @param {string} text Text to summarize\n\t * @param {number} tokens Max number of tokens\n\t * @param options LLM request options\n\t * @returns {Promise<string>} Summary\n\t */\n\tsummarize(text: string, tokens: number, options?: LLMRequest): Promise<string | null> {\n\t\treturn this.ask(text, {system: `Generate a brief summary <= ${tokens} tokens. Output nothing else`, temperature: 0.3, ...options})\n\t\t\t.then(history => <string>history.pop()?.content || null);\n\t}\n}\n","import {$} from '@ztimson/node-utils';\nimport {createWorker} from 'tesseract.js';\nimport {LLM, LLMOptions} from './llm';\nimport fs from 'node:fs/promises';\nimport Path from 'node:path';\nimport * as tf from '@tensorflow/tfjs';\n\nexport type AiOptions = LLMOptions & {\n\twhisper?: {\n\t\t/** Whisper binary location */\n\t\tbinary: string;\n\t\t/** Model: `ggml-base.en.bin` */\n\t\tmodel: string;\n\t\t/** Path to models */\n\t\tpath: string;\n\t\t/** Path to storage location for temporary files */\n\t\ttemp?: string;\n\t}\n}\n\nexport class Ai {\n\tprivate downloads: {[key: string]: Promise<string>} = {};\n\tprivate whisperModel!: string;\n\n\t/** Large Language Models */\n\tllm!: LLM;\n\n\tconstructor(public readonly options: AiOptions) {\n\t\tthis.llm = new LLM(this, options);\n\t\tif(this.options.whisper?.binary) {\n\t\t\tthis.whisperModel = this.options.whisper?.model.endsWith('.bin') ? this.options.whisper?.model : this.options.whisper?.model + '.bin';\n\t\t\tconsole.log('constructor: ' + this.options.whisper.model + ' -> ' + this.whisperModel);\n\t\t\tthis.downloadAsrModel();\n\t\t}\n\t}\n\n\t/**\n\t * Convert audio to text using Auditory Speech Recognition\n\t * @param {string} path Path to audio\n\t * @param model Whisper model\n\t * @returns {Promise<any>} Extracted text\n\t */\n\tasync asr(path: string, model: string = this.whisperModel): Promise<string | null> {\n\t\tif(!this.options.whisper?.binary) throw new Error('Whisper not configured');\n\t\tconst m = await this.downloadAsrModel(model);\n\t\tconst name = Math.random().toString(36).substring(2, 10) + '-' + path.split('/').pop() + '.txt';\n\t\tconst output = Path.join(this.options.whisper.temp || '/tmp', name);\n\t\tconsole.log('ASR: ' + this.options.whisper.model + ' -> ' + this.whisperModel);\n\t\tconsole.log(`rm -f ${output} && ${this.options.whisper.binary} -nt -np -m ${m} -f ${path} -otxt -of ${output}`);\n\t\tawait $`rm -f ${output} && ${this.options.whisper.binary} -nt -np -m ${m} -f ${path} -otxt -of ${output}`;\n\t\treturn fs.readFile(output, 'utf-8').then(text => text?.trim() || null)\n\t\t\t.finally(() => fs.rm(output, {force: true}).catch(() => {}));\n\t}\n\n\t/**\n\t * Downloads the specified Whisper model if it is not already present locally.\n\t *\n\t * @param {string} model Whisper model that will be downloaded\n\t * @return {Promise<string>} Absolute path to model file, resolves once downloaded\n\t */\n\tasync downloadAsrModel(model: string = this.whisperModel): Promise<string> {\n\t\tif(!this.options.whisper?.binary) throw new Error('Whisper not configured');\n\t\tif(!model.endsWith('.bin')) model += '.bin';\n\t\tconst p = Path.join(this.options.whisper.path, model);\n\t\tconsole.log('Download: ' + p);\n\t\tif(await fs.stat(p).then(() => true).catch(() => false)) {\n\t\t\tconsole.log('Exists!');\n\t\t\treturn p;\n\t\t}\n\t\tif(!!this.downloads[model]) return this.downloads[model];\n\t\tthis.downloads[model] = fetch(`https://huggingface.co/ggerganov/whisper.cpp/resolve/main/${model}`)\n\t\t\t.then(resp => resp.arrayBuffer())\n\t\t\t.then(arr => Buffer.from(arr)).then(async buffer => {\n\t\t\t\tawait fs.writeFile(p, buffer);\n\t\t\t\tdelete this.downloads[model];\n\t\t\t\treturn p;\n\t\t\t});\n\t\treturn this.downloads[model];\n\t}\n\n\t/**\n\t * Convert image to text using Optical Character Recognition\n\t * @param {string} path Path to image\n\t * @returns {{abort: Function, response: Promise<string | null>}} Abort function & Promise of extracted text\n\t */\n\tocr(path: string): {abort: () => void, response: Promise<string | null>} {\n\t\tlet worker: any;\n\t\treturn {\n\t\t\tabort: () => { worker?.terminate(); },\n\t\t\tresponse: new Promise(async res => {\n\t\t\t\tworker = await createWorker('eng');\n\t\t\t\tconst {data} = await worker.recognize(path);\n\t\t\t\tawait worker.terminate();\n\t\t\t\tres(data.text.trim() || null);\n\t\t\t})\n\t\t}\n\t}\n\n\t/**\n\t * Compare the difference between two strings using tensor math\n\t * @param target Text that will checked\n\t * @param {string} searchTerms Multiple search terms to check against target\n\t * @returns {{avg: number, max: number, similarities: number[]}} Similarity values 0-1: 0 = unique, 1 = identical\n\t */\n\tsemanticSimilarity(target: string, ...searchTerms: string[]) {\n\t\tif(searchTerms.length < 2) throw new Error('Requires at least 2 strings to compare');\n\n\t\tconst vector = (text: string, dimensions: number = 10): number[] => {\n\t\t\treturn text.toLowerCase().split('').map((char, index) =>\n\t\t\t\t(char.charCodeAt(0) * (index + 1)) % dimensions / dimensions).slice(0, dimensions);\n\t\t}\n\n\t\tconst cosineSimilarity = (v1: number[], v2: number[]): number => {\n\t\t\tif (v1.length !== v2.length) throw new Error('Vectors must be same length');\n\t\t\tconst tensor1 = tf.tensor1d(v1), tensor2 = tf.tensor1d(v2)\n\t\t\tconst dotProduct = tf.dot(tensor1, tensor2)\n\t\t\tconst magnitude1 = tf.norm(tensor1)\n\t\t\tconst magnitude2 = tf.norm(tensor2)\n\t\t\tif(magnitude1.dataSync()[0] === 0 || magnitude2.dataSync()[0] === 0) return 0\n\t\t\treturn dotProduct.dataSync()[0] / (magnitude1.dataSync()[0] * magnitude2.dataSync()[0])\n\t\t}\n\n\t\tconst v = vector(target);\n\t\tconst similarities = searchTerms.map(t => vector(t)).map(refVector => cosineSimilarity(v, refVector))\n\t\treturn {avg: similarities.reduce((acc, s) => acc + s, 0) / similarities.length, max: Math.max(...similarities), similarities}\n\t}\n}\n","import {$, $Sync} from '@ztimson/node-utils';\nimport {ASet, consoleInterceptor, Http, fn as Fn} from '@ztimson/utils';\nimport {Ai} from './ai.ts';\n\nexport type AiToolArg = {[key: string]: {\n\t/** Argument type */\n\ttype: 'array' | 'boolean' | 'number' | 'object' | 'string',\n\t/** Argument description */\n\tdescription: string,\n\t/** Required argument */\n\trequired?: boolean;\n\t/** Default value */\n\tdefault?: any,\n\t/** Options */\n\tenum?: string[],\n\t/** Minimum value or length */\n\tmin?: number,\n\t/** Maximum value or length */\n\tmax?: number,\n\t/** Match pattern */\n\tpattern?: string,\n\t/** Child arguments */\n\titems?: {[key: string]: AiToolArg}\n}}\n\nexport type AiTool = {\n\t/** Tool ID / Name - Must be snail_case */\n\tname: string,\n\t/** Tool description / prompt */\n\tdescription: string,\n\t/** Tool arguments */\n\targs?: AiToolArg,\n\t/** Callback function */\n\tfn: (args: any, ai: Ai) => any | Promise<any>,\n};\n\nexport const CliTool: AiTool = {\n\tname: 'cli',\n\tdescription: 'Use the command line interface, returns any output',\n\targs: {command: {type: 'string', description: 'Command to run', required: true}},\n\tfn: (args: {command: string}) => $`${args.command}`\n}\n\nexport const DateTimeTool: AiTool = {\n\tname: 'get_datetime',\n\tdescription: 'Get current date and time',\n\targs: {},\n\tfn: async () => new Date().toISOString()\n}\n\nexport const ExecTool: AiTool = {\n\tname: 'exec',\n\tdescription: 'Run code/scripts',\n\targs: {\n\t\tlanguage: {type: 'string', description: 'Execution language', enum: ['cli', 'node', 'python'], required: true},\n\t\tcode: {type: 'string', description: 'Code to execute', required: true}\n\t},\n\tfn: async (args, ai) => {\n\t\ttry {\n\t\t\tswitch(args.type) {\n\t\t\t\tcase 'bash':\n\t\t\t\t\treturn await CliTool.fn({command: args.code}, ai);\n\t\t\t\tcase 'node':\n\t\t\t\t\treturn await JSTool.fn({code: args.code}, ai);\n\t\t\t\tcase 'python': {\n\t\t\t\t\treturn await PythonTool.fn({code: args.code}, ai);\n\t\t\t\t}\n\t\t\t}\n\t\t} catch(err: any) {\n\t\t\treturn {error: err?.message || err.toString()};\n\t\t}\n\t}\n}\n\nexport const FetchTool: AiTool = {\n\tname: 'fetch',\n\tdescription: 'Make HTTP request to URL',\n\targs: {\n\t\turl: {type: 'string', description: 'URL to fetch', required: true},\n\t\tmethod: {type: 'string', description: 'HTTP method to use', enum: ['GET', 'POST', 'PUT', 'DELETE'], default: 'GET'},\n\t\theaders: {type: 'object', description: 'HTTP headers to send', default: {}},\n\t\tbody: {type: 'object', description: 'HTTP body to send'},\n\t},\n\tfn: (args: {\n\t\turl: string;\n\t\tmethod: 'GET' | 'POST' | 'PUT' | 'DELETE';\n\t\theaders: {[key: string]: string};\n\t\tbody: any;\n\t}) => new Http({url: args.url, headers: args.headers}).request({method: args.method || 'GET', body: args.body})\n}\n\nexport const JSTool: AiTool = {\n\tname: 'exec_javascript',\n\tdescription: 'Execute commonjs javascript',\n\targs: {\n\t\tcode: {type: 'string', description: 'CommonJS javascript', required: true}\n\t},\n\tfn: async (args: {code: string}) => {\n\t\tconst console = consoleInterceptor(null);\n\t\tconst resp = await Fn<any>({console}, args.code, true).catch((err: any) => console.output.error.push(err));\n\t\treturn {...console.output, return: resp, stdout: undefined, stderr: undefined};\n\t}\n}\n\nexport const PythonTool: AiTool = {\n\tname: 'exec_javascript',\n\tdescription: 'Execute commonjs javascript',\n\targs: {\n\t\tcode: {type: 'string', description: 'CommonJS javascript', required: true}\n\t},\n\tfn: async (args: {code: string}) => ({result: $Sync`python -c \"${args.code}\"`})\n}\n\nexport const SearchTool: AiTool = {\n\tname: 'search',\n\tdescription: 'Use a search engine to find relevant URLs, should be changed with fetch to scrape sources',\n\targs: {\n\t\tquery: {type: 'string', description: 'Search string', required: true},\n\t\tlength: {type: 'string', description: 'Number of results to return', default: 5},\n\t},\n\tfn: async (args: {\n\t\tquery: string;\n\t\tlength: number;\n\t}) => {\n\t\tconst html = await fetch(`https://html.duckduckgo.com/html/?q=${encodeURIComponent(args.query)}`, {\n\t\t\theaders: {\"User-Agent\": \"Mozilla/5.0 (Windows NT 10.0; Win64; x64)\", \"Accept-Language\": \"en-US,en;q=0.9\"}\n\t\t}).then(resp => resp.text());\n\t\tlet match, regex = /<a .*?href=\"(.+?)\".+?<\\/a>/g;\n\t\tconst results = new ASet<string>();\n\t\twhile((match = regex.exec(html)) !== null) {\n\t\t\tlet url = /uddg=(.+)&amp?/.exec(decodeURIComponent(match[1]))?.[1];\n\t\t\tif(url) url = decodeURIComponent(url);\n\t\t\tif(url) results.add(url);\n\t\t\tif(results.size >= (args.length || 5)) break;\n\t\t}\n\t\treturn results;\n\t}\n}\n"],"names":["LLMProvider","Anthropic","ai","apiToken","model","anthropic","history","i","orgI","c","h","message","options","controller","response","res","rej","requestParams","t","objectMap","key","value","resp","chunk","text","last","JSONAttemptParse","toolCalls","results","toolCall","tool","findByProp","result","JSONSanitize","err","Ollama","host","ollama","error","system","args","OpenAi","openAI","tools","tc","record","h2","LLM","max","min","keep","tokens","m","recent","process","Ai","path","name","output","Path","$","fs","p","arr","buffer","worker","createWorker","data","target","searchTerms","vector","dimensions","char","index","cosineSimilarity","v1","v2","tensor1","tf","tensor2","dotProduct","magnitude1","magnitude2","v","similarities","refVector","acc","s","CliTool","DateTimeTool","ExecTool","JSTool","PythonTool","FetchTool","Http","console","consoleInterceptor","Fn","$Sync","SearchTool","html","match","regex","ASet","url"],"mappings":";;;;;;;;;AAIO,MAAeA,EAAY;AAElC;ACAO,MAAMC,UAAkBD,EAAY;AAAA,EAG1C,YAA4BE,GAAwBC,GAAyBC,GAAe;AAC3F,UAAA,GAD2B,KAAA,KAAAF,GAAwB,KAAA,WAAAC,GAAyB,KAAA,QAAAC,GAE5E,KAAK,SAAS,IAAIC,EAAU,EAAC,QAAQF,GAAS;AAAA,EAC/C;AAAA,EALA;AAAA,EAOQ,WAAWG,GAA8B;AAChD,aAAQC,IAAI,GAAGA,IAAID,EAAQ,QAAQC,KAAK;AACvC,YAAMC,IAAOD;AACb,MAAG,OAAOD,EAAQE,CAAI,EAAE,WAAW,aAC/BF,EAAQE,CAAI,EAAE,QAAQ,cACxBF,EAAQE,CAAI,EAAE,QAAQ,OAAO,CAACC,MAAWA,EAAE,QAAO,UAAU,EAAE,QAAQ,CAACA,MAAW;AACjF,QAAAF,KACAD,EAAQ,OAAOC,GAAG,GAAG,EAAC,MAAM,QAAQ,IAAIE,EAAE,IAAI,MAAMA,EAAE,MAAM,MAAMA,EAAE,OAAM;AAAA,MAC3E,CAAC,IACQH,EAAQE,CAAI,EAAE,QAAQ,UAC/BF,EAAQE,CAAI,EAAE,QAAQ,OAAO,CAACC,MAAWA,EAAE,QAAO,aAAa,EAAE,QAAQ,CAACA,MAAW;AACpF,cAAMC,IAAIJ,EAAQ,KAAK,CAACI,MAAWA,EAAE,MAAMD,EAAE,WAAW;AACxD,QAAAC,EAAED,EAAE,WAAW,UAAU,SAAS,IAAIA,EAAE;AAAA,MACzC,CAAC,GAEFH,EAAQE,CAAI,EAAE,UAAUF,EAAQE,CAAI,EAAE,QAAQ,OAAO,CAACC,MAAWA,EAAE,QAAQ,MAAM,EAAE,IAAI,CAACA,MAAWA,EAAE,IAAI,EAAE,KAAK;AAAA;AAAA,CAAM;AAAA,IAExH;AACA,WAAOH,EAAQ,OAAO,CAAAI,MAAK,CAAC,CAACA,EAAE,OAAO;AAAA,EACvC;AAAA,EAEQ,aAAaJ,GAA8B;AAClD,aAAQC,IAAI,GAAGA,IAAID,EAAQ,QAAQC;AAClC,UAAGD,EAAQC,CAAC,EAAE,QAAQ,QAAQ;AAC7B,cAAMG,IAASJ,EAAQC,CAAC;AACxB,QAAAD,EAAQ;AAAA,UAAOC;AAAA,UAAG;AAAA,UACjB,EAAC,MAAM,aAAa,SAAS,CAAC,EAAC,MAAM,YAAY,IAAIG,EAAE,IAAI,MAAMA,EAAE,MAAM,OAAOA,EAAE,KAAA,CAAK,EAAA;AAAA,UACvF,EAAC,MAAM,QAAQ,SAAS,CAAC,EAAC,MAAM,eAAe,aAAaA,EAAE,IAAI,UAAU,CAAC,CAACA,EAAE,OAAO,SAAUA,EAAE,SAASA,EAAE,SAAQ,EAAA;AAAA,QAAC,GAExHH;AAAA,MACD;AAED,WAAOD;AAAA,EACR;AAAA,EAEA,IAAIK,GAAiBC,IAAsB,IAAoC;AAC9E,UAAMC,IAAa,IAAI,gBAAA,GACjBC,IAAW,IAAI,QAAa,OAAOC,GAAKC,MAAQ;AACrD,UAAIV,IAAU,KAAK,aAAa,CAAC,GAAGM,EAAQ,WAAW,CAAA,GAAI,EAAC,MAAM,QAAQ,SAASD,EAAA,CAAQ,CAAC;AAC5F,MAAGC,EAAQ,aAAUN,IAAU,MAAM,KAAK,GAAG,IAAI,SAAcA,GAASM,EAAQ,SAAS,KAAKA,EAAQ,SAAS,KAAKA,CAAO;AAC3H,YAAMK,IAAqB;AAAA,QAC1B,OAAOL,EAAQ,SAAS,KAAK;AAAA,QAC7B,YAAYA,EAAQ,cAAc,KAAK,GAAG,QAAQ,cAAc;AAAA,QAChE,QAAQA,EAAQ,UAAU,KAAK,GAAG,QAAQ,UAAU;AAAA,QACpD,aAAaA,EAAQ,eAAe,KAAK,GAAG,QAAQ,eAAe;AAAA,QACnE,QAAQA,EAAQ,SAAS,KAAK,GAAG,QAAQ,SAAS,CAAA,GAAI,IAAI,CAAAM,OAAM;AAAA,UAC/D,MAAMA,EAAE;AAAA,UACR,aAAaA,EAAE;AAAA,UACf,cAAc;AAAA,YACb,MAAM;AAAA,YACN,YAAYA,EAAE,OAAOC,EAAUD,EAAE,MAAM,CAACE,GAAKC,OAAW,EAAC,GAAGA,GAAO,UAAU,OAAA,EAAW,IAAI,CAAA;AAAA,YAC5F,UAAUH,EAAE,OAAO,OAAO,QAAQA,EAAE,IAAI,EAAE,OAAO,CAAAA,MAAKA,EAAE,CAAC,EAAE,QAAQ,EAAE,IAAI,CAAAA,MAAKA,EAAE,CAAC,CAAC,IAAI,CAAA;AAAA,UAAC;AAAA,UAExF,IAAI;AAAA,QAAA,EACH;AAAA,QACF,UAAUZ;AAAA,QACV,QAAQ,CAAC,CAACM,EAAQ;AAAA,MAAA;AAInB,UAAIU;AACJ,SAAG;AAIF,YAHAA,IAAO,MAAM,KAAK,OAAO,SAAS,OAAOL,CAAa,GAGnDL,EAAQ,QAAQ;AAClB,UAAAU,EAAK,UAAU,CAAA;AACf,2BAAiBC,KAASD,GAAM;AAC/B,gBAAGT,EAAW,OAAO,QAAS;AAC9B,gBAAGU,EAAM,SAAS;AACjB,cAAGA,EAAM,cAAc,SAAS,SAC/BD,EAAK,QAAQ,KAAK,EAAC,MAAM,QAAQ,MAAM,IAAG,IACjCC,EAAM,cAAc,SAAS,cACtCD,EAAK,QAAQ,KAAK,EAAC,MAAM,YAAY,IAAIC,EAAM,cAAc,IAAI,MAAMA,EAAM,cAAc,MAAM,OAAY,IAAG;AAAA,qBAExGA,EAAM,SAAS;AACxB,kBAAGA,EAAM,MAAM,SAAS,cAAc;AACrC,sBAAMC,IAAOD,EAAM,MAAM;AACzB,gBAAAD,EAAK,QAAQ,GAAG,EAAE,EAAE,QAAQE,GAC5BZ,EAAQ,OAAO,EAAC,MAAAY,GAAK;AAAA,cACtB,MAAA,CAAUD,EAAM,MAAM,SAAS,uBAC9BD,EAAK,QAAQ,GAAG,EAAE,EAAE,SAASC,EAAM,MAAM;AAAA,qBAEjCA,EAAM,SAAS,sBAAsB;AAC9C,oBAAME,IAAOH,EAAK,QAAQ,GAAG,EAAE;AAC/B,cAAGG,EAAK,SAAS,SAAMA,EAAK,QAAQA,EAAK,QAAQC,EAAiBD,EAAK,OAAO,CAAA,CAAE,IAAI,CAAA;AAAA,YACrF,WAAUF,EAAM,SAAS;AACxB;AAAA,UAEF;AAAA,QACD;AAGA,cAAMI,IAAYL,EAAK,QAAQ,OAAO,CAACb,MAAWA,EAAE,SAAS,UAAU;AACvE,YAAGkB,EAAU,UAAU,CAACd,EAAW,OAAO,SAAS;AAClD,UAAAP,EAAQ,KAAK,EAAC,MAAM,aAAa,SAASgB,EAAK,SAAQ;AACvD,gBAAMM,IAAU,MAAM,QAAQ,IAAID,EAAU,IAAI,OAAOE,MAAkB;AACxE,kBAAMC,IAAOlB,EAAQ,OAAO,KAAKmB,EAAW,QAAQF,EAAS,IAAI,CAAC;AAClE,gBAAG,CAACC,EAAM,QAAO,EAAC,aAAaD,EAAS,IAAI,UAAU,IAAM,SAAS,iBAAA;AACrE,gBAAI;AACH,oBAAMG,IAAS,MAAMF,EAAK,GAAGD,EAAS,OAAO,KAAK,EAAE;AACpD,qBAAO,EAAC,MAAM,eAAe,aAAaA,EAAS,IAAI,SAASI,EAAaD,CAAM,EAAA;AAAA,YACpF,SAASE,GAAU;AAClB,qBAAO,EAAC,MAAM,eAAe,aAAaL,EAAS,IAAI,UAAU,IAAM,SAASK,GAAK,WAAWA,GAAK,SAAA,KAAc,UAAA;AAAA,YACpH;AAAA,UACD,CAAC,CAAC;AACF,UAAA5B,EAAQ,KAAK,EAAC,MAAM,QAAQ,SAASsB,GAAQ,GAC7CX,EAAc,WAAWX;AAAA,QAC1B;AAAA,MACD,SAAS,CAACO,EAAW,OAAO,WAAWS,EAAK,QAAQ,KAAK,CAACb,MAAWA,EAAE,SAAS,UAAU;AAC1F,MAAGG,EAAQ,UAAQA,EAAQ,OAAO,EAAC,MAAM,IAAK,GAC9CG,EAAI,KAAK,WAAW,CAAC,GAAGT,GAAS;AAAA,QAChC,MAAM;AAAA,QACN,SAASgB,EAAK,QAAQ,OAAO,CAACb,MAAWA,EAAE,QAAQ,MAAM,EAAE,IAAI,CAACA,MAAWA,EAAE,IAAI,EAAE,KAAK;AAAA;AAAA,CAAM;AAAA,MAAA,CAC9F,CAAC,CAAC;AAAA,IACJ,CAAC;AACD,WAAO,OAAO,OAAOK,GAAU,EAAC,OAAO,MAAMD,EAAW,MAAA,GAAQ;AAAA,EACjE;AACD;AC9HO,MAAMsB,UAAenC,EAAY;AAAA,EAGvC,YAA4BE,GAAekC,GAAqBhC,GAAe;AAC9E,UAAA,GAD2B,KAAA,KAAAF,GAAe,KAAA,OAAAkC,GAAqB,KAAA,QAAAhC,GAE/D,KAAK,SAAS,IAAIiC,EAAO,EAAC,MAAAD,GAAK;AAAA,EAChC;AAAA,EALA;AAAA,EAOQ,WAAW9B,GAA8B;AAChD,aAAQC,IAAI,GAAGA,IAAID,EAAQ,QAAQC;AAClC,UAAGD,EAAQC,CAAC,EAAE,QAAQ,eAAeD,EAAQC,CAAC,EAAE;AAC/C,QAAGD,EAAQC,CAAC,EAAE,UAAS,OAAOD,EAAQC,CAAC,EAAE,cAExCD,EAAQ,OAAOC,GAAG,CAAC,GACnBA;AAAA,eAEQD,EAAQC,CAAC,EAAE,QAAQ,QAAQ;AACpC,cAAM+B,IAAQhC,EAAQC,CAAC,EAAE,QAAQ,WAAW,WAAW;AACvD,QAAAD,EAAQC,CAAC,IAAI,EAAC,MAAM,QAAQ,MAAMD,EAAQC,CAAC,EAAE,WAAW,MAAMD,EAAQC,CAAC,EAAE,MAAM,CAAC+B,IAAQ,UAAU,SAAS,GAAGhC,EAAQC,CAAC,EAAE,QAAA;AAAA,MAC1H;AAED,WAAOD;AAAA,EACR;AAAA,EAEQ,aAAaA,GAA8B;AAClD,WAAOA,EAAQ,IAAI,CAACI,MAChBA,EAAE,QAAQ,SAAeA,IACrB,EAAC,MAAM,QAAQ,WAAWA,EAAE,MAAM,SAASA,EAAE,SAASA,EAAE,QAAA,CAC/D;AAAA,EACF;AAAA,EAEA,IAAIC,GAAiBC,IAAsB,IAAoC;AAC9E,UAAMC,IAAa,IAAI,gBAAA,GACjBC,IAAW,IAAI,QAAa,OAAOC,GAAKC,MAAQ;AACrD,UAAIuB,IAAS3B,EAAQ,UAAU,KAAK,GAAG,QAAQ,QAC3CN,IAAU,KAAK,aAAa,CAAC,GAAGM,EAAQ,WAAW,CAAA,GAAI,EAAC,MAAM,QAAQ,SAASD,EAAA,CAAQ,CAAC;AAC5F,MAAGL,EAAQ,CAAC,EAAE,QAAQ,aACjBiC,MACS,MAAA,IADDA,IAASjC,EAAQ,MAAA,IAG3BM,EAAQ,aAAUN,IAAU,MAAM,KAAK,GAAG,IAAI,SAAcA,GAASM,EAAQ,SAAS,KAAKA,EAAQ,SAAS,GAAG,IAC/GA,EAAQ,UAAQN,EAAQ,QAAQ,EAAC,MAAM,UAAU,SAASiC,GAAO;AAEpE,YAAMtB,IAAqB;AAAA,QAC1B,OAAOL,EAAQ,SAAS,KAAK;AAAA,QAC7B,UAAUN;AAAA,QACV,QAAQ,CAAC,CAACM,EAAQ;AAAA,QAClB,QAAQC,EAAW;AAAA,QACnB,SAAS;AAAA,UACR,aAAaD,EAAQ,eAAe,KAAK,GAAG,QAAQ,eAAe;AAAA,UACnE,aAAaA,EAAQ,cAAc,KAAK,GAAG,QAAQ,cAAc;AAAA,QAAA;AAAA,QAElE,QAAQA,EAAQ,SAAS,KAAK,GAAG,QAAQ,SAAS,CAAA,GAAI,IAAI,CAAAM,OAAM;AAAA,UAC/D,MAAM;AAAA,UACN,UAAU;AAAA,YACT,MAAMA,EAAE;AAAA,YACR,aAAaA,EAAE;AAAA,YACf,YAAY;AAAA,cACX,MAAM;AAAA,cACN,YAAYA,EAAE,OAAOC,EAAUD,EAAE,MAAM,CAACE,GAAKC,OAAW,EAAC,GAAGA,GAAO,UAAU,OAAA,EAAW,IAAI,CAAA;AAAA,cAC5F,UAAUH,EAAE,OAAO,OAAO,QAAQA,EAAE,IAAI,EAAE,OAAO,CAAAA,MAAKA,EAAE,CAAC,EAAE,QAAQ,EAAE,IAAI,CAAAA,MAAKA,EAAE,CAAC,CAAC,IAAI,CAAA;AAAA,YAAC;AAAA,UACxF;AAAA,QACD,EACC;AAAA,MAAA;AAIH,UAAII;AACJ,SAAG;AAEF,YADAA,IAAO,MAAM,KAAK,OAAO,KAAKL,CAAa,GACxCL,EAAQ,QAAQ;AAClB,UAAAU,EAAK,UAAU,EAAC,MAAM,aAAa,SAAS,IAAI,YAAY,GAAC;AAC7D,2BAAiBC,KAASD;AAOzB,gBANGT,EAAW,OAAO,YAClBU,EAAM,SAAS,YACjBD,EAAK,QAAQ,WAAWC,EAAM,QAAQ,SACtCX,EAAQ,OAAO,EAAC,MAAMW,EAAM,QAAQ,SAAQ,IAE1CA,EAAM,SAAS,iBAAiB,QAAQ,aAAaA,EAAM,QAAQ,aACnEA,EAAM,MAAM;AAAA,QAEjB;AAGA,YAAGD,EAAK,SAAS,YAAY,UAAU,CAACT,EAAW,OAAO,SAAS;AAClE,UAAAP,EAAQ,KAAKgB,EAAK,OAAO;AACzB,gBAAMM,IAAU,MAAM,QAAQ,IAAIN,EAAK,QAAQ,WAAW,IAAI,OAAOO,MAAkB;AACtF,kBAAMC,KAAQlB,EAAQ,SAAS,KAAK,GAAG,QAAQ,QAAQ,KAAKmB,EAAW,QAAQF,EAAS,SAAS,IAAI,CAAC;AACtG,gBAAG,CAACC,EAAM,QAAO,EAAC,MAAM,QAAQ,WAAWD,EAAS,SAAS,MAAM,SAAS,8BAAA;AAC5E,kBAAMW,IAAO,OAAOX,EAAS,SAAS,aAAc,WAAWH,EAAiBG,EAAS,SAAS,WAAW,CAAA,CAAE,IAAIA,EAAS,SAAS;AACrI,gBAAI;AACH,oBAAMG,IAAS,MAAMF,EAAK,GAAGU,GAAM,KAAK,EAAE;AAC1C,qBAAO,EAAC,MAAM,QAAQ,WAAWX,EAAS,SAAS,MAAM,MAAAW,GAAM,SAASP,EAAaD,CAAM,EAAA;AAAA,YAC5F,SAASE,GAAU;AAClB,qBAAO,EAAC,MAAM,QAAQ,WAAWL,EAAS,SAAS,MAAM,MAAAW,GAAM,SAASP,EAAa,EAAC,OAAOC,GAAK,WAAWA,GAAK,cAAc,UAAA,CAAU,EAAA;AAAA,YAC3I;AAAA,UACD,CAAC,CAAC;AACF,UAAA5B,EAAQ,KAAK,GAAGsB,CAAO,GACvBX,EAAc,WAAWX;AAAA,QAC1B;AAAA,MACD,SAAS,CAACO,EAAW,OAAO,WAAWS,EAAK,SAAS,YAAY;AACjE,MAAGV,EAAQ,UAAQA,EAAQ,OAAO,EAAC,MAAM,IAAK,GAC9CG,EAAI,KAAK,WAAW,CAAC,GAAGT,GAAS,EAAC,MAAM,aAAa,SAASgB,EAAK,SAAS,QAAA,CAAQ,CAAC,CAAC;AAAA,IACvF,CAAC;AACD,WAAO,OAAO,OAAOR,GAAU,EAAC,OAAO,MAAMD,EAAW,MAAA,GAAQ;AAAA,EACjE;AACD;AC1GO,MAAM4B,UAAezC,EAAY;AAAA,EAGvC,YAA4BE,GAAwBC,GAAyBC,GAAe;AAC3F,UAAA,GAD2B,KAAA,KAAAF,GAAwB,KAAA,WAAAC,GAAyB,KAAA,QAAAC,GAE5E,KAAK,SAAS,IAAIsC,EAAO,EAAC,QAAQvC,GAAS;AAAA,EAC5C;AAAA,EALA;AAAA,EAOQ,WAAWG,GAA8B;AAChD,aAAQC,IAAI,GAAGA,IAAID,EAAQ,QAAQC,KAAK;AACvC,YAAMG,IAAIJ,EAAQC,CAAC;AACnB,UAAGG,EAAE,SAAS,eAAeA,EAAE,YAAY;AAC1C,cAAMiC,IAAQjC,EAAE,WAAW,IAAI,CAACkC,OAAa;AAAA,UAC5C,MAAM;AAAA,UACN,IAAIA,EAAG;AAAA,UACP,MAAMA,EAAG,SAAS;AAAA,UAClB,MAAMlB,EAAiBkB,EAAG,SAAS,WAAW,CAAA,CAAE;AAAA,QAAA,EAC/C;AACF,QAAAtC,EAAQ,OAAOC,GAAG,GAAG,GAAGoC,CAAK,GAC7BpC,KAAKoC,EAAM,SAAS;AAAA,MACrB,WAAUjC,EAAE,SAAS,UAAUA,EAAE,SAAS;AACzC,cAAMmC,IAASvC,EAAQ,KAAK,OAAMI,EAAE,gBAAgBoC,EAAG,EAAE;AACzD,QAAGD,MACCnC,EAAE,QAAQ,SAAS,UAAU,IAAGmC,EAAO,QAAQnC,EAAE,UAC/CmC,EAAO,UAAUnC,EAAE,UAEzBJ,EAAQ,OAAOC,GAAG,CAAC,GACnBA;AAAA,MACD;AAAA,IAED;AACA,WAAOD;AAAA,EACR;AAAA,EAEQ,aAAaA,GAA8B;AAClD,WAAOA,EAAQ,OAAO,CAAC0B,GAAQtB,OAC3BA,EAAE,SAAS,SACbsB,EAAO,KAAK;AAAA,MACX,MAAM;AAAA,MACN,SAAS;AAAA,MACT,YAAY,CAAC,EAAE,IAAItB,EAAE,IAAI,MAAM,YAAY,UAAU,EAAE,MAAMA,EAAE,MAAM,WAAW,KAAK,UAAUA,EAAE,IAAI,EAAA,GAAK;AAAA,MAC1G,SAAS;AAAA,MACT,aAAa,CAAA;AAAA,IAAC,GACZ;AAAA,MACF,MAAM;AAAA,MACN,cAAcA,EAAE;AAAA,MAChB,SAASA,EAAE,SAASA,EAAE;AAAA,IAAA,CACtB,IAEDsB,EAAO,KAAKtB,CAAC,GAEPsB,IACL,CAAA,CAAW;AAAA,EACf;AAAA,EAEA,IAAIrB,GAAiBC,IAAsB,IAAoC;AAC9E,UAAMC,IAAa,IAAI,gBAAA,GACjBC,IAAW,IAAI,QAAa,OAAOC,GAAKC,MAAQ;AACrD,UAAIV,IAAU,KAAK,aAAa,CAAC,GAAGM,EAAQ,WAAW,CAAA,GAAI,EAAC,MAAM,QAAQ,SAASD,EAAA,CAAQ,CAAC;AAC5F,MAAGC,EAAQ,aAAUN,IAAU,MAAM,KAAK,GAAG,IAAI,SAAcA,GAASM,EAAQ,SAAS,KAAKA,EAAQ,SAAS,KAAKA,CAAO;AAE3H,YAAMK,IAAqB;AAAA,QAC1B,OAAOL,EAAQ,SAAS,KAAK;AAAA,QAC7B,UAAUN;AAAA,QACV,QAAQ,CAAC,CAACM,EAAQ;AAAA,QAClB,YAAYA,EAAQ,cAAc,KAAK,GAAG,QAAQ,cAAc;AAAA,QAChE,aAAaA,EAAQ,eAAe,KAAK,GAAG,QAAQ,eAAe;AAAA,QACnE,QAAQA,EAAQ,SAAS,KAAK,GAAG,QAAQ,SAAS,CAAA,GAAI,IAAI,CAAAM,OAAM;AAAA,UAC/D,MAAM;AAAA,UACN,UAAU;AAAA,YACT,MAAMA,EAAE;AAAA,YACR,aAAaA,EAAE;AAAA,YACf,YAAY;AAAA,cACX,MAAM;AAAA,cACN,YAAYA,EAAE,OAAOC,EAAUD,EAAE,MAAM,CAACE,GAAKC,OAAW,EAAC,GAAGA,GAAO,UAAU,OAAA,EAAW,IAAI,CAAA;AAAA,cAC5F,UAAUH,EAAE,OAAO,OAAO,QAAQA,EAAE,IAAI,EAAE,OAAO,CAAAA,MAAKA,EAAE,CAAC,EAAE,QAAQ,EAAE,IAAI,CAAAA,MAAKA,EAAE,CAAC,CAAC,IAAI,CAAA;AAAA,YAAC;AAAA,UACxF;AAAA,QACD,EACC;AAAA,MAAA;AAIH,UAAII;AACJ,SAAG;AAIF,YAHAA,IAAO,MAAM,KAAK,OAAO,KAAK,YAAY,OAAOL,CAAa,GAG3DL,EAAQ,QAAQ;AAClB,UAAAU,EAAK,UAAU,CAAA;AACf,2BAAiBC,KAASD,GAAM;AAC/B,gBAAGT,EAAW,OAAO,QAAS;AAC9B,YAAGU,EAAM,QAAQ,CAAC,EAAE,MAAM,WACzBX,EAAQ,OAAO,EAAC,MAAMW,EAAM,QAAQ,CAAC,EAAE,MAAM,SAAQ;AAAA,UAEvD;AAAA,QACD;AAGA,cAAMI,IAAYL,EAAK,QAAQ,CAAC,EAAE,QAAQ,cAAc,CAAA;AACxD,YAAGK,EAAU,UAAU,CAACd,EAAW,OAAO,SAAS;AAClD,UAAAP,EAAQ,KAAKgB,EAAK,QAAQ,CAAC,EAAE,OAAO;AACpC,gBAAMM,IAAU,MAAM,QAAQ,IAAID,EAAU,IAAI,OAAOE,MAAkB;AACxE,kBAAMC,IAAOlB,EAAQ,OAAO,KAAKmB,EAAW,QAAQF,EAAS,SAAS,IAAI,CAAC;AAC3E,gBAAG,CAACC,EAAM,QAAO,EAAC,MAAM,QAAQ,cAAcD,EAAS,IAAI,SAAS,8BAAA;AACpE,gBAAI;AACH,oBAAMW,IAAOd,EAAiBG,EAAS,SAAS,WAAW,CAAA,CAAE,GACvDG,IAAS,MAAMF,EAAK,GAAGU,GAAM,KAAK,EAAE;AAC1C,qBAAO,EAAC,MAAM,QAAQ,cAAcX,EAAS,IAAI,SAASI,EAAaD,CAAM,EAAA;AAAA,YAC9E,SAASE,GAAU;AAClB,qBAAO,EAAC,MAAM,QAAQ,cAAcL,EAAS,IAAI,SAASI,EAAa,EAAC,OAAOC,GAAK,WAAWA,GAAK,cAAc,UAAA,CAAU,EAAA;AAAA,YAC7H;AAAA,UACD,CAAC,CAAC;AACF,UAAA5B,EAAQ,KAAK,GAAGsB,CAAO,GACvBX,EAAc,WAAWX;AAAA,QAC1B;AAAA,MACD,SAAS,CAACO,EAAW,OAAO,WAAWS,EAAK,UAAU,CAAC,GAAG,SAAS,YAAY;AAE/E,MAAGV,EAAQ,UAAQA,EAAQ,OAAO,EAAC,MAAM,IAAK,GAC9CG,EAAI,KAAK,WAAW,CAAC,GAAGT,GAAS,EAAC,MAAM,aAAa,SAASgB,EAAK,QAAQ,CAAC,EAAE,QAAQ,WAAW,GAAA,CAAG,CAAC,CAAC;AAAA,IACvG,CAAC;AAED,WAAO,OAAO,OAAOR,GAAU,EAAC,OAAO,MAAMD,EAAW,MAAA,GAAQ;AAAA,EACjE;AACD;ACnDO,MAAMkC,EAAI;AAAA,EAGhB,YAA4B7C,GAAwBU,GAAqB;AAA7C,SAAA,KAAAV,GAAwB,KAAA,UAAAU,GAChDA,EAAQ,WAAW,UAAO,KAAK,UAAU,YAAY,IAAIX,EAAU,KAAK,IAAIW,EAAQ,UAAU,OAAOA,EAAQ,UAAU,KAAK,IAC5HA,EAAQ,QAAQ,SAAM,KAAK,UAAU,SAAS,IAAIuB,EAAO,KAAK,IAAIvB,EAAQ,OAAO,MAAMA,EAAQ,OAAO,KAAK,IAC3GA,EAAQ,QAAQ,UAAO,KAAK,UAAU,SAAS,IAAI6B,EAAO,KAAK,IAAI7B,EAAQ,OAAO,OAAOA,EAAQ,OAAO,KAAK;AAAA,EACjH;AAAA,EANQ,YAA0C,CAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAclD,IAAID,GAAiBC,IAAsB,IAAoC;AAC9E,QAAIR,IAAa,CAAC,MAAM,IAAI;AAS5B,QARGQ,EAAQ,UACP,OAAOA,EAAQ,SAAS,eAAkBA,EAAQ,QAChDR,IAAQ,CAACQ,EAAQ,OAAa,KAAK,QAASA,EAAQ,KAAK,GAAG,KAAK,KAEpE,CAACA,EAAQ,SAASR,EAAM,CAAC,KAAK,UAC7B,OAAO,KAAK,QAAQ,SAAS,WAAUA,IAAQ,KAAK,QAAQ,QAC1DA,IAAQ,CAAC,KAAK,QAAQ,OAAa,KAAK,QAAS,KAAK,QAAQ,KAAK,GAAG,KAAK,IAE9E,CAACA,EAAM,CAAC,KAAK,CAACA,EAAM,CAAC,EAAG,OAAM,IAAI,MAAM,kCAAkCA,EAAM,CAAC,CAAC,MAAMA,EAAM,CAAC,CAAC,EAAE;AACrG,WAAO,KAAK,UAAUA,EAAM,CAAC,CAAC,EAAE,IAAIO,GAAS,EAAC,GAAGC,GAAS,OAAOR,EAAM,CAAC,GAAE;AAAA,EAC3E;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,MAAM,SAASE,GAAuB0C,GAAaC,GAAarC,GAA6C;AAC5G,QAAG,KAAK,eAAeN,CAAO,IAAI0C,EAAK,QAAO1C;AAC9C,QAAI4C,IAAO,GAAGC,IAAS;AACvB,aAAQC,KAAK9C,EAAQ;AAEpB,UADA6C,KAAU,KAAK,eAAeC,EAAE,OAAO,GACpCD,IAASF,EAAK,CAAAC;AAAA,UACZ;AAEN,QAAG5C,EAAQ,UAAU4C,EAAM,QAAO5C;AAClC,UAAM+C,IAASH,KAAQ,IAAI,CAAA,IAAK5C,EAAQ,MAAM,CAAC4C,CAAI,GAClDI,KAAWJ,KAAQ,IAAI5C,IAAUA,EAAQ,MAAM,GAAG,CAAC4C,CAAI,GAAG,OAAO,CAAAxC,MAAKA,EAAE,SAAS,eAAeA,EAAE,SAAS,MAAM;AAElH,WAAO,CAAC,EAAC,MAAM,aAAa,SAAS,yBADrB,MAAM,KAAK,UAAU4C,EAAQ,IAAI,OAAK,GAAGF,EAAE,IAAI,KAAKA,EAAE,OAAO,EAAE,EAAE,KAAK;AAAA;AAAA,CAAM,GAAG,KAAKxC,CAAO,CACtC,MAAK,GAAGyC,CAAM;AAAA,EACpF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,eAAe/C,GAAsB;AACpC,UAAMkB,IAAO,KAAK,UAAUlB,CAAO;AACnC,WAAO,KAAK,KAAMkB,EAAK,SAAS,IAAK,GAAG;AAAA,EACzC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,KAAKb,GAAiBC,GAAsB;AACjD,QAAIU,IAAO,MAAM,KAAK,IAAIX,GAAS;AAAA,MAClC,QAAQ;AAAA,MACR,GAAGC;AAAA,IAAA,CACH;AACD,WAAIU,IAAO,CAAC,GAAG,UACRI,EAAiB,IAAI,OAAO,SAAa,EAAE,KAAKJ,EAAK,CAAC,EAAE,OAAO,GAAG,EAAE,IAD5C,CAAA;AAAA,EAEhC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,UAAUE,GAAc2B,GAAgBvC,GAA8C;AACrF,WAAO,KAAK,IAAIY,GAAM,EAAC,QAAQ,+BAA+B2B,CAAM,gCAAgC,aAAa,KAAK,GAAGvC,EAAA,CAAQ,EAC/H,KAAK,CAAAN,MAAmBA,EAAQ,IAAA,GAAO,WAAW,IAAI;AAAA,EACzD;AACD;AClJO,MAAMiD,EAAG;AAAA,EAOf,YAA4B3C,GAAoB;AAApB,SAAA,UAAAA,GAC3B,KAAK,MAAM,IAAImC,EAAI,MAAMnC,CAAO,GAC7B,KAAK,QAAQ,SAAS,WACxB,KAAK,eAAe,KAAK,QAAQ,SAAS,MAAM,SAAS,MAAM,IAAI,KAAK,QAAQ,SAAS,QAAQ,KAAK,QAAQ,SAAS,QAAQ,QAC/H,QAAQ,IAAI,kBAAkB,KAAK,QAAQ,QAAQ,QAAQ,SAAS,KAAK,YAAY,GACrF,KAAK,iBAAA;AAAA,EAEP;AAAA,EAbQ,YAA8C,CAAA;AAAA,EAC9C;AAAA;AAAA,EAGR;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAiBA,MAAM,IAAI4C,GAAcpD,IAAgB,KAAK,cAAsC;AAClF,QAAG,CAAC,KAAK,QAAQ,SAAS,OAAQ,OAAM,IAAI,MAAM,wBAAwB;AAC1E,UAAMgD,IAAI,MAAM,KAAK,iBAAiBhD,CAAK,GACrCqD,IAAO,KAAK,OAAA,EAAS,SAAS,EAAE,EAAE,UAAU,GAAG,EAAE,IAAI,MAAMD,EAAK,MAAM,GAAG,EAAE,QAAQ,QACnFE,IAASC,EAAK,KAAK,KAAK,QAAQ,QAAQ,QAAQ,QAAQF,CAAI;AAClE,mBAAQ,IAAI,UAAU,KAAK,QAAQ,QAAQ,QAAQ,SAAS,KAAK,YAAY,GAC7E,QAAQ,IAAI,SAASC,CAAM,OAAO,KAAK,QAAQ,QAAQ,MAAM,eAAeN,CAAC,OAAOI,CAAI,cAAcE,CAAM,EAAE,GAC9G,MAAME,UAAUF,CAAM,OAAO,KAAK,QAAQ,QAAQ,MAAM,eAAeN,CAAC,OAAOI,CAAI,cAAcE,CAAM,IAChGG,EAAG,SAASH,GAAQ,OAAO,EAAE,KAAK,CAAAlC,MAAQA,GAAM,KAAA,KAAU,IAAI,EACnE,QAAQ,MAAMqC,EAAG,GAAGH,GAAQ,EAAC,OAAO,GAAA,CAAK,EAAE,MAAM,MAAM;AAAA,IAAC,CAAC,CAAC;AAAA,EAC7D;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,iBAAiBtD,IAAgB,KAAK,cAA+B;AAC1E,QAAG,CAAC,KAAK,QAAQ,SAAS,OAAQ,OAAM,IAAI,MAAM,wBAAwB;AAC1E,IAAIA,EAAM,SAAS,MAAM,MAAGA,KAAS;AACrC,UAAM0D,IAAIH,EAAK,KAAK,KAAK,QAAQ,QAAQ,MAAMvD,CAAK;AAEpD,WADA,QAAQ,IAAI,eAAe0D,CAAC,GACzB,MAAMD,EAAG,KAAKC,CAAC,EAAE,KAAK,MAAM,EAAI,EAAE,MAAM,MAAM,EAAK,KACrD,QAAQ,IAAI,SAAS,GACdA,KAEH,KAAK,UAAU1D,CAAK,IAAU,KAAK,UAAUA,CAAK,KACvD,KAAK,UAAUA,CAAK,IAAI,MAAM,6DAA6DA,CAAK,EAAE,EAChG,KAAK,CAAAkB,MAAQA,EAAK,aAAa,EAC/B,KAAK,CAAAyC,MAAO,OAAO,KAAKA,CAAG,CAAC,EAAE,KAAK,OAAMC,OACzC,MAAMH,EAAG,UAAUC,GAAGE,CAAM,GAC5B,OAAO,KAAK,UAAU5D,CAAK,GACpB0D,EACP,GACK,KAAK,UAAU1D,CAAK;AAAA,EAC5B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,IAAIoD,GAAqE;AACxE,QAAIS;AACJ,WAAO;AAAA,MACN,OAAO,MAAM;AAAE,QAAAA,GAAQ,UAAA;AAAA,MAAa;AAAA,MACpC,UAAU,IAAI,QAAQ,OAAMlD,MAAO;AAClC,QAAAkD,IAAS,MAAMC,EAAa,KAAK;AACjC,cAAM,EAAC,MAAAC,EAAA,IAAQ,MAAMF,EAAO,UAAUT,CAAI;AAC1C,cAAMS,EAAO,UAAA,GACblD,EAAIoD,EAAK,KAAK,KAAA,KAAU,IAAI;AAAA,MAC7B,CAAC;AAAA,IAAA;AAAA,EAEH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,mBAAmBC,MAAmBC,GAAuB;AAC5D,QAAGA,EAAY,SAAS,EAAG,OAAM,IAAI,MAAM,wCAAwC;AAEnF,UAAMC,IAAS,CAAC9C,GAAc+C,IAAqB,OAC3C/C,EAAK,cAAc,MAAM,EAAE,EAAE,IAAI,CAACgD,GAAMC,MAC7CD,EAAK,WAAW,CAAC,KAAKC,IAAQ,KAAMF,IAAaA,CAAU,EAAE,MAAM,GAAGA,CAAU,GAG7EG,IAAmB,CAACC,GAAcC,MAAyB;AAChE,UAAID,EAAG,WAAWC,EAAG,OAAQ,OAAM,IAAI,MAAM,6BAA6B;AAC1E,YAAMC,IAAUC,EAAG,SAASH,CAAE,GAAGI,IAAUD,EAAG,SAASF,CAAE,GACnDI,IAAaF,EAAG,IAAID,GAASE,CAAO,GACpCE,IAAaH,EAAG,KAAKD,CAAO,GAC5BK,IAAaJ,EAAG,KAAKC,CAAO;AAClC,aAAGE,EAAW,WAAW,CAAC,MAAM,KAAKC,EAAW,WAAW,CAAC,MAAM,IAAU,IACrEF,EAAW,SAAA,EAAW,CAAC,KAAKC,EAAW,WAAW,CAAC,IAAIC,EAAW,SAAA,EAAW,CAAC;AAAA,IACtF,GAEMC,IAAIb,EAAOF,CAAM,GACjBgB,IAAef,EAAY,IAAI,CAAAnD,MAAKoD,EAAOpD,CAAC,CAAC,EAAE,IAAI,CAAAmE,MAAaX,EAAiBS,GAAGE,CAAS,CAAC;AACpG,WAAO,EAAC,KAAKD,EAAa,OAAO,CAACE,GAAKC,MAAMD,IAAMC,GAAG,CAAC,IAAIH,EAAa,QAAQ,KAAK,KAAK,IAAI,GAAGA,CAAY,GAAG,cAAAA,EAAA;AAAA,EACjH;AACD;AC1FO,MAAMI,IAAkB;AAAA,EAC9B,MAAM;AAAA,EACN,aAAa;AAAA,EACb,MAAM,EAAC,SAAS,EAAC,MAAM,UAAU,aAAa,kBAAkB,UAAU,KAAI;AAAA,EAC9E,IAAI,CAAChD,MAA4BoB,IAAIpB,EAAK,OAAO;AAClD,GAEaiD,IAAuB;AAAA,EACnC,MAAM;AAAA,EACN,aAAa;AAAA,EACb,MAAM,CAAA;AAAA,EACN,IAAI,aAAY,oBAAI,KAAA,GAAO,YAAA;AAC5B,GAEaC,IAAmB;AAAA,EAC/B,MAAM;AAAA,EACN,aAAa;AAAA,EACb,MAAM;AAAA,IACL,UAAU,EAAC,MAAM,UAAU,aAAa,sBAAsB,MAAM,CAAC,OAAO,QAAQ,QAAQ,GAAG,UAAU,GAAA;AAAA,IACzG,MAAM,EAAC,MAAM,UAAU,aAAa,mBAAmB,UAAU,GAAA;AAAA,EAAI;AAAA,EAEtE,IAAI,OAAOlD,GAAMtC,MAAO;AACvB,QAAI;AACH,cAAOsC,EAAK,MAAA;AAAA,QACX,KAAK;AACJ,iBAAO,MAAMgD,EAAQ,GAAG,EAAC,SAAShD,EAAK,KAAA,GAAOtC,CAAE;AAAA,QACjD,KAAK;AACJ,iBAAO,MAAMyF,EAAO,GAAG,EAAC,MAAMnD,EAAK,KAAA,GAAOtC,CAAE;AAAA,QAC7C,KAAK;AACJ,iBAAO,MAAM0F,EAAW,GAAG,EAAC,MAAMpD,EAAK,KAAA,GAAOtC,CAAE;AAAA,MACjD;AAAA,IAEF,SAAQgC,GAAU;AACjB,aAAO,EAAC,OAAOA,GAAK,WAAWA,EAAI,WAAS;AAAA,IAC7C;AAAA,EACD;AACD,GAEa2D,IAAoB;AAAA,EAChC,MAAM;AAAA,EACN,aAAa;AAAA,EACb,MAAM;AAAA,IACL,KAAK,EAAC,MAAM,UAAU,aAAa,gBAAgB,UAAU,GAAA;AAAA,IAC7D,QAAQ,EAAC,MAAM,UAAU,aAAa,sBAAsB,MAAM,CAAC,OAAO,QAAQ,OAAO,QAAQ,GAAG,SAAS,MAAA;AAAA,IAC7G,SAAS,EAAC,MAAM,UAAU,aAAa,wBAAwB,SAAS,GAAC;AAAA,IACzE,MAAM,EAAC,MAAM,UAAU,aAAa,oBAAA;AAAA,EAAmB;AAAA,EAExD,IAAI,CAACrD,MAKC,IAAIsD,EAAK,EAAC,KAAKtD,EAAK,KAAK,SAASA,EAAK,SAAQ,EAAE,QAAQ,EAAC,QAAQA,EAAK,UAAU,OAAO,MAAMA,EAAK,KAAA,CAAK;AAC/G,GAEamD,IAAiB;AAAA,EAC7B,MAAM;AAAA,EACN,aAAa;AAAA,EACb,MAAM;AAAA,IACL,MAAM,EAAC,MAAM,UAAU,aAAa,uBAAuB,UAAU,GAAA;AAAA,EAAI;AAAA,EAE1E,IAAI,OAAOnD,MAAyB;AACnC,UAAMuD,IAAUC,EAAmB,IAAI,GACjC1E,IAAO,MAAM2E,EAAQ,EAAC,SAAAF,EAAA,GAAUvD,EAAK,MAAM,EAAI,EAAE,MAAM,CAACN,MAAa6D,EAAQ,OAAO,MAAM,KAAK7D,CAAG,CAAC;AACzG,WAAO,EAAC,GAAG6D,EAAQ,QAAQ,QAAQzE,GAAM,QAAQ,QAAW,QAAQ,OAAA;AAAA,EACrE;AACD,GAEasE,IAAqB;AAAA,EACjC,MAAM;AAAA,EACN,aAAa;AAAA,EACb,MAAM;AAAA,IACL,MAAM,EAAC,MAAM,UAAU,aAAa,uBAAuB,UAAU,GAAA;AAAA,EAAI;AAAA,EAE1E,IAAI,OAAOpD,OAA0B,EAAC,QAAQ0D,eAAmB1D,EAAK,IAAI,IAAA;AAC3E,GAEa2D,IAAqB;AAAA,EACjC,MAAM;AAAA,EACN,aAAa;AAAA,EACb,MAAM;AAAA,IACL,OAAO,EAAC,MAAM,UAAU,aAAa,iBAAiB,UAAU,GAAA;AAAA,IAChE,QAAQ,EAAC,MAAM,UAAU,aAAa,+BAA+B,SAAS,EAAA;AAAA,EAAC;AAAA,EAEhF,IAAI,OAAO3D,MAGL;AACL,UAAM4D,IAAO,MAAM,MAAM,uCAAuC,mBAAmB5D,EAAK,KAAK,CAAC,IAAI;AAAA,MACjG,SAAS,EAAC,cAAc,6CAA6C,mBAAmB,iBAAA;AAAA,IAAgB,CACxG,EAAE,KAAK,CAAAlB,MAAQA,EAAK,MAAM;AAC3B,QAAI+E,GAAOC,IAAQ;AACnB,UAAM1E,IAAU,IAAI2E,EAAA;AACpB,YAAOF,IAAQC,EAAM,KAAKF,CAAI,OAAO,QAAM;AAC1C,UAAII,IAAM,iBAAiB,KAAK,mBAAmBH,EAAM,CAAC,CAAC,CAAC,IAAI,CAAC;AAGjE,UAFGG,MAAKA,IAAM,mBAAmBA,CAAG,IACjCA,KAAK5E,EAAQ,IAAI4E,CAAG,GACpB5E,EAAQ,SAASY,EAAK,UAAU,GAAI;AAAA,IACxC;AACA,WAAOZ;AAAA,EACR;AACD;"}
1
+ {"version":3,"file":"index.mjs","sources":["../src/provider.ts","../src/antrhopic.ts","../src/ollama.ts","../src/open-ai.ts","../src/llm.ts","../src/ai.ts","../src/tools.ts"],"sourcesContent":["import {LLMMessage, LLMOptions, LLMRequest} from './llm.ts';\n\nexport type AbortablePromise<T> = Promise<T> & {abort: () => void};\n\nexport abstract class LLMProvider {\n\tabstract ask(message: string, options: LLMRequest): AbortablePromise<LLMMessage[]>;\n}\n","import {Anthropic as anthropic} from '@anthropic-ai/sdk';\nimport {findByProp, objectMap, JSONSanitize, JSONAttemptParse} from '@ztimson/utils';\nimport {Ai} from './ai.ts';\nimport {LLMMessage, LLMRequest} from './llm.ts';\nimport {AbortablePromise, LLMProvider} from './provider.ts';\n\nexport class Anthropic extends LLMProvider {\n\tclient!: anthropic;\n\n\tconstructor(public readonly ai: Ai, public readonly apiToken: string, public model: string) {\n\t\tsuper();\n\t\tthis.client = new anthropic({apiKey: apiToken});\n\t}\n\n\tprivate toStandard(history: any[]): LLMMessage[] {\n\t\tfor(let i = 0; i < history.length; i++) {\n\t\t\tconst orgI = i;\n\t\t\tif(typeof history[orgI].content != 'string') {\n\t\t\t\tif(history[orgI].role == 'assistant') {\n\t\t\t\t\thistory[orgI].content.filter((c: any) => c.type =='tool_use').forEach((c: any) => {\n\t\t\t\t\t\ti++;\n\t\t\t\t\t\thistory.splice(i, 0, {role: 'tool', id: c.id, name: c.name, args: c.input});\n\t\t\t\t\t});\n\t\t\t\t} else if(history[orgI].role == 'user') {\n\t\t\t\t\thistory[orgI].content.filter((c: any) => c.type =='tool_result').forEach((c: any) => {\n\t\t\t\t\t\tconst h = history.find((h: any) => h.id == c.tool_use_id);\n\t\t\t\t\t\th[c.is_error ? 'error' : 'content'] = c.content;\n\t\t\t\t\t});\n\t\t\t\t}\n\t\t\t\thistory[orgI].content = history[orgI].content.filter((c: any) => c.type == 'text').map((c: any) => c.text).join('\\n\\n');\n\t\t\t}\n\t\t}\n\t\treturn history.filter(h => !!h.content);\n\t}\n\n\tprivate fromStandard(history: LLMMessage[]): any[] {\n\t\tfor(let i = 0; i < history.length; i++) {\n\t\t\tif(history[i].role == 'tool') {\n\t\t\t\tconst h: any = history[i];\n\t\t\t\thistory.splice(i, 1,\n\t\t\t\t\t{role: 'assistant', content: [{type: 'tool_use', id: h.id, name: h.name, input: h.args}]},\n\t\t\t\t\t{role: 'user', content: [{type: 'tool_result', tool_use_id: h.id, is_error: !!h.error, content: h.error || h.content}]}\n\t\t\t\t)\n\t\t\t\ti++;\n\t\t\t}\n\t\t}\n\t\treturn history;\n\t}\n\n\task(message: string, options: LLMRequest = {}): AbortablePromise<LLMMessage[]> {\n\t\tconst controller = new AbortController();\n\t\tconst response = new Promise<any>(async (res, rej) => {\n\t\t\tlet history = this.fromStandard([...options.history || [], {role: 'user', content: message}]);\n\t\t\tif(options.compress) history = await this.ai.llm.compress(<any>history, options.compress.max, options.compress.min, options);\n\t\t\tconst requestParams: any = {\n\t\t\t\tmodel: options.model || this.model,\n\t\t\t\tmax_tokens: options.max_tokens || this.ai.options.max_tokens || 4096,\n\t\t\t\tsystem: options.system || this.ai.options.system || '',\n\t\t\t\ttemperature: options.temperature || this.ai.options.temperature || 0.7,\n\t\t\t\ttools: (options.tools || this.ai.options.tools || []).map(t => ({\n\t\t\t\t\tname: t.name,\n\t\t\t\t\tdescription: t.description,\n\t\t\t\t\tinput_schema: {\n\t\t\t\t\t\ttype: 'object',\n\t\t\t\t\t\tproperties: t.args ? objectMap(t.args, (key, value) => ({...value, required: undefined})) : {},\n\t\t\t\t\t\trequired: t.args ? Object.entries(t.args).filter(t => t[1].required).map(t => t[0]) : []\n\t\t\t\t\t},\n\t\t\t\t\tfn: undefined\n\t\t\t\t})),\n\t\t\t\tmessages: history,\n\t\t\t\tstream: !!options.stream,\n\t\t\t};\n\n\t\t\t// Run tool changes\n\t\t\tlet resp: any;\n\t\t\tdo {\n\t\t\t\tresp = await this.client.messages.create(requestParams);\n\n\t\t\t\t// Streaming mode\n\t\t\t\tif(options.stream) {\n\t\t\t\t\tresp.content = [];\n\t\t\t\t\tfor await (const chunk of resp) {\n\t\t\t\t\t\tif(controller.signal.aborted) break;\n\t\t\t\t\t\tif(chunk.type === 'content_block_start') {\n\t\t\t\t\t\t\tif(chunk.content_block.type === 'text') {\n\t\t\t\t\t\t\t\tresp.content.push({type: 'text', text: ''});\n\t\t\t\t\t\t\t} else if(chunk.content_block.type === 'tool_use') {\n\t\t\t\t\t\t\t\tresp.content.push({type: 'tool_use', id: chunk.content_block.id, name: chunk.content_block.name, input: <any>''});\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t} else if(chunk.type === 'content_block_delta') {\n\t\t\t\t\t\t\tif(chunk.delta.type === 'text_delta') {\n\t\t\t\t\t\t\t\tconst text = chunk.delta.text;\n\t\t\t\t\t\t\t\tresp.content.at(-1).text += text;\n\t\t\t\t\t\t\t\toptions.stream({text});\n\t\t\t\t\t\t\t} else if(chunk.delta.type === 'input_json_delta') {\n\t\t\t\t\t\t\t\tresp.content.at(-1).input += chunk.delta.partial_json;\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t} else if(chunk.type === 'content_block_stop') {\n\t\t\t\t\t\t\tconst last = resp.content.at(-1);\n\t\t\t\t\t\t\tif(last.input != null) last.input = last.input ? JSONAttemptParse(last.input, {}) : {};\n\t\t\t\t\t\t} else if(chunk.type === 'message_stop') {\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\t// Run tools\n\t\t\t\tconst toolCalls = resp.content.filter((c: any) => c.type === 'tool_use');\n\t\t\t\tif(toolCalls.length && !controller.signal.aborted) {\n\t\t\t\t\thistory.push({role: 'assistant', content: resp.content});\n\t\t\t\t\tconst results = await Promise.all(toolCalls.map(async (toolCall: any) => {\n\t\t\t\t\t\tconst tool = options.tools?.find(findByProp('name', toolCall.name));\n\t\t\t\t\t\tif(!tool) return {tool_use_id: toolCall.id, is_error: true, content: 'Tool not found'};\n\t\t\t\t\t\ttry {\n\t\t\t\t\t\t\tconst result = await tool.fn(toolCall.input, this.ai);\n\t\t\t\t\t\t\treturn {type: 'tool_result', tool_use_id: toolCall.id, content: JSONSanitize(result)};\n\t\t\t\t\t\t} catch (err: any) {\n\t\t\t\t\t\t\treturn {type: 'tool_result', tool_use_id: toolCall.id, is_error: true, content: err?.message || err?.toString() || 'Unknown'};\n\t\t\t\t\t\t}\n\t\t\t\t\t}));\n\t\t\t\t\thistory.push({role: 'user', content: results});\n\t\t\t\t\trequestParams.messages = history;\n\t\t\t\t}\n\t\t\t} while (!controller.signal.aborted && resp.content.some((c: any) => c.type === 'tool_use'));\n\t\t\tif(options.stream) options.stream({done: true});\n\t\t\tres(this.toStandard([...history, {\n\t\t\t\trole: 'assistant',\n\t\t\t\tcontent: resp.content.filter((c: any) => c.type == 'text').map((c: any) => c.text).join('\\n\\n')\n\t\t\t}]));\n\t\t});\n\t\treturn Object.assign(response, {abort: () => controller.abort()});\n\t}\n}\n","import {findByProp, objectMap, JSONSanitize, JSONAttemptParse} from '@ztimson/utils';\nimport {Ai} from './ai.ts';\nimport {LLMMessage, LLMRequest} from './llm.ts';\nimport {AbortablePromise, LLMProvider} from './provider.ts';\nimport {Ollama as ollama} from 'ollama';\n\nexport class Ollama extends LLMProvider {\n\tclient!: ollama;\n\n\tconstructor(public readonly ai: Ai, public host: string, public model: string) {\n\t\tsuper();\n\t\tthis.client = new ollama({host});\n\t}\n\n\tprivate toStandard(history: any[]): LLMMessage[] {\n\t\tfor(let i = 0; i < history.length; i++) {\n\t\t\tif(history[i].role == 'assistant' && history[i].tool_calls) {\n\t\t\t\tif(history[i].content) delete history[i].tool_calls;\n\t\t\t\telse {\n\t\t\t\t\thistory.splice(i, 1);\n\t\t\t\t\ti--;\n\t\t\t\t}\n\t\t\t} else if(history[i].role == 'tool') {\n\t\t\t\tconst error = history[i].content.startsWith('{\"error\":');\n\t\t\t\thistory[i] = {role: 'tool', name: history[i].tool_name, args: history[i].args, [error ? 'error' : 'content']: history[i].content};\n\t\t\t}\n\t\t}\n\t\treturn history;\n\t}\n\n\tprivate fromStandard(history: LLMMessage[]): any[] {\n\t\treturn history.map((h: any) => {\n\t\t\tif(h.role != 'tool') return h;\n\t\t\treturn {role: 'tool', tool_name: h.name, content: h.error || h.content}\n\t\t});\n\t}\n\n\task(message: string, options: LLMRequest = {}): AbortablePromise<LLMMessage[]> {\n\t\tconst controller = new AbortController();\n\t\tconst response = new Promise<any>(async (res, rej) => {\n\t\t\tlet system = options.system || this.ai.options.system;\n\t\t\tlet history = this.fromStandard([...options.history || [], {role: 'user', content: message}]);\n\t\t\tif(history[0].roll == 'system') {\n\t\t\t\tif(!system) system = history.shift();\n\t\t\t\telse history.shift();\n\t\t\t}\n\t\t\tif(options.compress) history = await this.ai.llm.compress(<any>history, options.compress.max, options.compress.min);\n\t\t\tif(options.system) history.unshift({role: 'system', content: system})\n\n\t\t\tconst requestParams: any = {\n\t\t\t\tmodel: options.model || this.model,\n\t\t\t\tmessages: history,\n\t\t\t\tstream: !!options.stream,\n\t\t\t\tsignal: controller.signal,\n\t\t\t\toptions: {\n\t\t\t\t\ttemperature: options.temperature || this.ai.options.temperature || 0.7,\n\t\t\t\t\tnum_predict: options.max_tokens || this.ai.options.max_tokens || 4096,\n\t\t\t\t},\n\t\t\t\ttools: (options.tools || this.ai.options.tools || []).map(t => ({\n\t\t\t\t\ttype: 'function',\n\t\t\t\t\tfunction: {\n\t\t\t\t\t\tname: t.name,\n\t\t\t\t\t\tdescription: t.description,\n\t\t\t\t\t\tparameters: {\n\t\t\t\t\t\t\ttype: 'object',\n\t\t\t\t\t\t\tproperties: t.args ? objectMap(t.args, (key, value) => ({...value, required: undefined})) : {},\n\t\t\t\t\t\t\trequired: t.args ? Object.entries(t.args).filter(t => t[1].required).map(t => t[0]) : []\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}))\n\t\t\t}\n\n\t\t\t// Run tool chains\n\t\t\tlet resp: any;\n\t\t\tdo {\n\t\t\t\tresp = await this.client.chat(requestParams);\n\t\t\t\tif(options.stream) {\n\t\t\t\t\tresp.message = {role: 'assistant', content: '', tool_calls: []};\n\t\t\t\t\tfor await (const chunk of resp) {\n\t\t\t\t\t\tif(controller.signal.aborted) break;\n\t\t\t\t\t\tif(chunk.message?.content) {\n\t\t\t\t\t\t\tresp.message.content += chunk.message.content;\n\t\t\t\t\t\t\toptions.stream({text: chunk.message.content});\n\t\t\t\t\t\t}\n\t\t\t\t\t\tif(chunk.message?.tool_calls) resp.message.tool_calls = chunk.message.tool_calls;\n\t\t\t\t\t\tif(chunk.done) break;\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\t// Run tools\n\t\t\t\tif(resp.message?.tool_calls?.length && !controller.signal.aborted) {\n\t\t\t\t\thistory.push(resp.message);\n\t\t\t\t\tconst results = await Promise.all(resp.message.tool_calls.map(async (toolCall: any) => {\n\t\t\t\t\t\tconst tool = (options.tools || this.ai.options.tools)?.find(findByProp('name', toolCall.function.name));\n\t\t\t\t\t\tif(!tool) return {role: 'tool', tool_name: toolCall.function.name, content: '{\"error\": \"Tool not found\"}'};\n\t\t\t\t\t\tconst args = typeof toolCall.function.arguments === 'string' ? JSONAttemptParse(toolCall.function.arguments, {}) : toolCall.function.arguments;\n\t\t\t\t\t\ttry {\n\t\t\t\t\t\t\tconst result = await tool.fn(args, this.ai);\n\t\t\t\t\t\t\treturn {role: 'tool', tool_name: toolCall.function.name, args, content: JSONSanitize(result)};\n\t\t\t\t\t\t} catch (err: any) {\n\t\t\t\t\t\t\treturn {role: 'tool', tool_name: toolCall.function.name, args, content: JSONSanitize({error: err?.message || err?.toString() || 'Unknown'})};\n\t\t\t\t\t\t}\n\t\t\t\t\t}));\n\t\t\t\t\thistory.push(...results);\n\t\t\t\t\trequestParams.messages = history;\n\t\t\t\t}\n\t\t\t} while (!controller.signal.aborted && resp.message?.tool_calls?.length);\n\t\t\tif(options.stream) options.stream({done: true});\n\t\t\tres(this.toStandard([...history, {role: 'assistant', content: resp.message?.content}]));\n\t\t});\n\t\treturn Object.assign(response, {abort: () => controller.abort()});\n\t}\n}\n","import {OpenAI as openAI} from 'openai';\nimport {findByProp, objectMap, JSONSanitize, JSONAttemptParse} from '@ztimson/utils';\nimport {Ai} from './ai.ts';\nimport {LLMMessage, LLMRequest} from './llm.ts';\nimport {AbortablePromise, LLMProvider} from './provider.ts';\n\nexport class OpenAi extends LLMProvider {\n\tclient!: openAI;\n\n\tconstructor(public readonly ai: Ai, public readonly apiToken: string, public model: string) {\n\t\tsuper();\n\t\tthis.client = new openAI({apiKey: apiToken});\n\t}\n\n\tprivate toStandard(history: any[]): LLMMessage[] {\n\t\tfor(let i = 0; i < history.length; i++) {\n\t\t\tconst h = history[i];\n\t\t\tif(h.role === 'assistant' && h.tool_calls) {\n\t\t\t\tconst tools = h.tool_calls.map((tc: any) => ({\n\t\t\t\t\trole: 'tool',\n\t\t\t\t\tid: tc.id,\n\t\t\t\t\tname: tc.function.name,\n\t\t\t\t\targs: JSONAttemptParse(tc.function.arguments, {})\n\t\t\t\t}));\n\t\t\t\thistory.splice(i, 1, ...tools);\n\t\t\t\ti += tools.length - 1;\n\t\t\t} else if(h.role === 'tool' && h.content) {\n\t\t\t\tconst record = history.find(h2 => h.tool_call_id == h2.id);\n\t\t\t\tif(record) {\n\t\t\t\t\tif(h.content.includes('\"error\":')) record.error = h.content;\n\t\t\t\t\telse record.content = h.content;\n\t\t\t\t}\n\t\t\t\thistory.splice(i, 1);\n\t\t\t\ti--;\n\t\t\t}\n\n\t\t}\n\t\treturn history;\n\t}\n\n\tprivate fromStandard(history: LLMMessage[]): any[] {\n\t\treturn history.reduce((result, h) => {\n\t\t\tif(h.role === 'tool') {\n\t\t\t\tresult.push({\n\t\t\t\t\trole: 'assistant',\n\t\t\t\t\tcontent: null,\n\t\t\t\t\ttool_calls: [{ id: h.id, type: 'function', function: { name: h.name, arguments: JSON.stringify(h.args) } }],\n\t\t\t\t\trefusal: null,\n\t\t\t\t\tannotations: [],\n\t\t\t\t}, {\n\t\t\t\t\trole: 'tool',\n\t\t\t\t\ttool_call_id: h.id,\n\t\t\t\t\tcontent: h.error || h.content\n\t\t\t\t});\n\t\t\t} else {\n\t\t\t\tresult.push(h);\n\t\t\t}\n\t\t\treturn result;\n\t\t}, [] as any[]);\n\t}\n\n\task(message: string, options: LLMRequest = {}): AbortablePromise<LLMMessage[]> {\n\t\tconst controller = new AbortController();\n\t\tconst response = new Promise<any>(async (res, rej) => {\n\t\t\tlet history = this.fromStandard([...options.history || [], {role: 'user', content: message}]);\n\t\t\tif(options.compress) history = await this.ai.llm.compress(<any>history, options.compress.max, options.compress.min, options);\n\n\t\t\tconst requestParams: any = {\n\t\t\t\tmodel: options.model || this.model,\n\t\t\t\tmessages: history,\n\t\t\t\tstream: !!options.stream,\n\t\t\t\tmax_tokens: options.max_tokens || this.ai.options.max_tokens || 4096,\n\t\t\t\ttemperature: options.temperature || this.ai.options.temperature || 0.7,\n\t\t\t\ttools: (options.tools || this.ai.options.tools || []).map(t => ({\n\t\t\t\t\ttype: 'function',\n\t\t\t\t\tfunction: {\n\t\t\t\t\t\tname: t.name,\n\t\t\t\t\t\tdescription: t.description,\n\t\t\t\t\t\tparameters: {\n\t\t\t\t\t\t\ttype: 'object',\n\t\t\t\t\t\t\tproperties: t.args ? objectMap(t.args, (key, value) => ({...value, required: undefined})) : {},\n\t\t\t\t\t\t\trequired: t.args ? Object.entries(t.args).filter(t => t[1].required).map(t => t[0]) : []\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}))\n\t\t\t};\n\n\t\t\t// Tool call and streaming logic similar to other providers\n\t\t\tlet resp: any;\n\t\t\tdo {\n\t\t\t\tresp = await this.client.chat.completions.create(requestParams);\n\n\t\t\t\t// Implement streaming and tool call handling\n\t\t\t\tif(options.stream) {\n\t\t\t\t\tresp.choices = [];\n\t\t\t\t\tfor await (const chunk of resp) {\n\t\t\t\t\t\tif(controller.signal.aborted) break;\n\t\t\t\t\t\tif(chunk.choices[0].delta.content) {\n\t\t\t\t\t\t\toptions.stream({text: chunk.choices[0].delta.content});\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\t// Run tools\n\t\t\t\tconst toolCalls = resp.choices[0].message.tool_calls || [];\n\t\t\t\tif(toolCalls.length && !controller.signal.aborted) {\n\t\t\t\t\thistory.push(resp.choices[0].message);\n\t\t\t\t\tconst results = await Promise.all(toolCalls.map(async (toolCall: any) => {\n\t\t\t\t\t\tconst tool = options.tools?.find(findByProp('name', toolCall.function.name));\n\t\t\t\t\t\tif(!tool) return {role: 'tool', tool_call_id: toolCall.id, content: '{\"error\": \"Tool not found\"}'};\n\t\t\t\t\t\ttry {\n\t\t\t\t\t\t\tconst args = JSONAttemptParse(toolCall.function.arguments, {});\n\t\t\t\t\t\t\tconst result = await tool.fn(args, this.ai);\n\t\t\t\t\t\t\treturn {role: 'tool', tool_call_id: toolCall.id, content: JSONSanitize(result)};\n\t\t\t\t\t\t} catch (err: any) {\n\t\t\t\t\t\t\treturn {role: 'tool', tool_call_id: toolCall.id, content: JSONSanitize({error: err?.message || err?.toString() || 'Unknown'})};\n\t\t\t\t\t\t}\n\t\t\t\t\t}));\n\t\t\t\t\thistory.push(...results);\n\t\t\t\t\trequestParams.messages = history;\n\t\t\t\t}\n\t\t\t} while (!controller.signal.aborted && resp.choices?.[0]?.message?.tool_calls?.length);\n\n\t\t\tif(options.stream) options.stream({done: true});\n\t\t\tres(this.toStandard([...history, {role: 'assistant', content: resp.choices[0].message.content || ''}]));\n\t\t});\n\n\t\treturn Object.assign(response, {abort: () => controller.abort()});\n\t}\n}\n","import {JSONAttemptParse} from '@ztimson/utils';\nimport {Ai} from './ai.ts';\nimport {Anthropic} from './antrhopic.ts';\nimport {Ollama} from './ollama.ts';\nimport {OpenAi} from './open-ai.ts';\nimport {AbortablePromise, LLMProvider} from './provider.ts';\nimport {AiTool} from './tools.ts';\n\nexport type LLMMessage = {\n\t/** Message originator */\n\trole: 'assistant' | 'system' | 'user';\n\t/** Message content */\n\tcontent: string | any;\n} | {\n\t/** Tool call */\n\trole: 'tool';\n\t/** Unique ID for call */\n\tid: string;\n\t/** Tool that was run */\n\tname: string;\n\t/** Tool arguments */\n\targs: any;\n\t/** Tool result */\n\tcontent: undefined | string;\n\t/** Tool error */\n\terror: undefined | string;\n}\n\nexport type LLMOptions = {\n\t/** Anthropic settings */\n\tanthropic?: {\n\t\t/** API Token */\n\t\ttoken: string;\n\t\t/** Default model */\n\t\tmodel: string;\n\t},\n\t/** Ollama settings */\n\tollama?: {\n\t\t/** connection URL */\n\t\thost: string;\n\t\t/** Default model */\n\t\tmodel: string;\n\t},\n\t/** Open AI settings */\n\topenAi?: {\n\t\t/** API Token */\n\t\ttoken: string;\n\t\t/** Default model */\n\t\tmodel: string;\n\t},\n\t/** Default provider & model */\n\tmodel: string | [string, string];\n} & Omit<LLMRequest, 'model'>;\n\nexport type LLMRequest = {\n\t/** System prompt */\n\tsystem?: string;\n\t/** Message history */\n\thistory?: LLMMessage[];\n\t/** Max tokens for request */\n\tmax_tokens?: number;\n\t/** 0 = Rigid Logic, 1 = Balanced, 2 = Hyper Creative **/\n\ttemperature?: number;\n\t/** Available tools */\n\ttools?: AiTool[];\n\t/** LLM model */\n\tmodel?: string | [string, string];\n\t/** Stream response */\n\tstream?: (chunk: {text?: string, done?: true}) => any;\n\t/** Compress old messages in the chat to free up context */\n\tcompress?: {\n\t\t/** Trigger chat compression once context exceeds the token count */\n\t\tmax: number;\n\t\t/** Compress chat until context size smaller than */\n\t\tmin: number\n\t}\n}\n\nexport class LLM {\n\tprivate providers: {[key: string]: LLMProvider} = {};\n\n\tconstructor(public readonly ai: Ai, public readonly options: LLMOptions) {\n\t\tif(options.anthropic?.token) this.providers.anthropic = new Anthropic(this.ai, options.anthropic.token, options.anthropic.model);\n\t\tif(options.ollama?.host) this.providers.ollama = new Ollama(this.ai, options.ollama.host, options.ollama.model);\n\t\tif(options.openAi?.token) this.providers.openAi = new OpenAi(this.ai, options.openAi.token, options.openAi.model);\n\t}\n\n\t/**\n\t * Chat with LLM\n\t * @param {string} message Question\n\t * @param {LLMRequest} options Configuration options and chat history\n\t * @returns {{abort: () => void, response: Promise<LLMMessage[]>}} Function to abort response and chat history\n\t */\n\task(message: string, options: LLMRequest = {}): AbortablePromise<LLMMessage[]> {\n\t\tlet model: any = [null, null];\n\t\tif(options.model) {\n\t\t\tif(typeof options.model == 'object') model = options.model;\n\t\t\telse model = [options.model, (<any>this.options)[options.model]?.model];\n\t\t}\n\t\tif(!options.model || model[1] == null) {\n\t\t\tif(typeof this.options.model == 'object') model = this.options.model;\n\t\t\telse model = [this.options.model, (<any>this.options)[this.options.model]?.model];\n\t\t}\n\t\tif(!model[0] || !model[1]) throw new Error(`Unknown LLM provider or model: ${model[0]} / ${model[1]}`);\n\t\treturn this.providers[model[0]].ask(message, {...options, model: model[1]});\n\t}\n\n\t/**\n\t * Compress chat history to reduce context size\n\t * @param {LLMMessage[]} history Chatlog that will be compressed\n\t * @param max Trigger compression once context is larger than max\n\t * @param min Summarize until context size is less than min\n\t * @param {LLMRequest} options LLM options\n\t * @returns {Promise<LLMMessage[]>} New chat history will summary at index 0\n\t */\n\tasync compress(history: LLMMessage[], max: number, min: number, options?: LLMRequest): Promise<LLMMessage[]> {\n\t\tif(this.estimateTokens(history) < max) return history;\n\t\tlet keep = 0, tokens = 0;\n\t\tfor(let m of history.toReversed()) {\n\t\t\ttokens += this.estimateTokens(m.content);\n\t\t\tif(tokens < min) keep++;\n\t\t\telse break;\n\t\t}\n\t\tif(history.length <= keep) return history;\n\t\tconst recent = keep == 0 ? [] : history.slice(-keep),\n\t\t\tprocess = (keep == 0 ? history : history.slice(0, -keep)).filter(h => h.role === 'assistant' || h.role === 'user');\n\t\tconst summary = await this.summarize(process.map(m => `${m.role}: ${m.content}`).join('\\n\\n'), 250, options);\n\t\treturn [{role: 'assistant', content: `Conversation Summary: ${summary}`}, ...recent];\n\t}\n\n\t/**\n\t * Estimate variable as tokens\n\t * @param history Object to size\n\t * @returns {number} Rough token count\n\t */\n\testimateTokens(history: any): number {\n\t\tconst text = JSON.stringify(history);\n\t\treturn Math.ceil((text.length / 4) * 1.2);\n\t}\n\n\t/**\n\t * Ask a question with JSON response\n\t * @param {string} message Question\n\t * @param {LLMRequest} options Configuration options and chat history\n\t * @returns {Promise<{} | {} | RegExpExecArray | null>}\n\t */\n\tasync json(message: string, options?: LLMRequest) {\n\t\tlet resp = await this.ask(message, {\n\t\t\tsystem: 'Respond using a JSON blob',\n\t\t\t...options\n\t\t});\n\t\tif(!resp?.[0]?.content) return {};\n\t\treturn JSONAttemptParse(new RegExp('\\{[\\s\\S]*\\}').exec(resp[0].content), {});\n\t}\n\n\t/**\n\t * Create a summary of some text\n\t * @param {string} text Text to summarize\n\t * @param {number} tokens Max number of tokens\n\t * @param options LLM request options\n\t * @returns {Promise<string>} Summary\n\t */\n\tsummarize(text: string, tokens: number, options?: LLMRequest): Promise<string | null> {\n\t\treturn this.ask(text, {system: `Generate a brief summary <= ${tokens} tokens. Output nothing else`, temperature: 0.3, ...options})\n\t\t\t.then(history => <string>history.pop()?.content || null);\n\t}\n}\n","import {createWorker} from 'tesseract.js';\nimport {LLM, LLMOptions} from './llm';\nimport fs from 'node:fs/promises';\nimport Path from 'node:path';\nimport * as tf from '@tensorflow/tfjs';\nimport {spawn} from 'node:child_process';\n\nexport type AiOptions = LLMOptions & {\n\twhisper?: {\n\t\t/** Whisper binary location */\n\t\tbinary: string;\n\t\t/** Model: `ggml-base.en.bin` */\n\t\tmodel: string;\n\t\t/** Path to models */\n\t\tpath: string;\n\t}\n}\n\nexport class Ai {\n\tprivate downloads: {[key: string]: Promise<string>} = {};\n\tprivate whisperModel!: string;\n\n\t/** Large Language Models */\n\tllm!: LLM;\n\n\tconstructor(public readonly options: AiOptions) {\n\t\tthis.llm = new LLM(this, options);\n\t\tif(this.options.whisper?.binary) {\n\t\t\tthis.whisperModel = this.options.whisper?.model.endsWith('.bin') ? this.options.whisper?.model : this.options.whisper?.model + '.bin';\n\t\t\tthis.downloadAsrModel();\n\t\t}\n\t}\n\n\t/**\n\t * Convert audio to text using Auditory Speech Recognition\n\t * @param {string} path Path to audio\n\t * @param model Whisper model\n\t * @returns {Promise<any>} Extracted text\n\t */\n\tasr(path: string, model: string = this.whisperModel): {abort: () => void, response: Promise<string | null>} {\n\t\tif(!this.options.whisper?.binary) throw new Error('Whisper not configured');\n\t\tlet abort: any = () => {};\n\t\tconst response = new Promise<string | null>((resolve, reject) => {\n\t\t\tthis.downloadAsrModel(model).then(m => {\n\t\t\t\tlet output = '';\n\t\t\t\tconst proc = spawn(<string>this.options.whisper?.binary, ['-nt', '-np', '-m', m, '-f', path], {stdio: ['ignore', 'pipe', 'ignore']});\n\t\t\t\tabort = () => proc.kill('SIGTERM');\n\t\t\t\tproc.on('error', (err: Error) => reject(err));\n\t\t\t\tproc.stdout.on('data', (data: Buffer) => output += data.toString());\n\t\t\t\tproc.on('close', (code: number) => {\n\t\t\t\t\tif(code === 0) resolve(output.trim() || null);\n\t\t\t\t\telse reject(new Error(`Exit code ${code}`));\n\t\t\t\t});\n\t\t\t});\n\t\t});\n\t\treturn {response, abort};\n\t}\n\n\t/**\n\t * Downloads the specified Whisper model if it is not already present locally.\n\t *\n\t * @param {string} model Whisper model that will be downloaded\n\t * @return {Promise<string>} Absolute path to model file, resolves once downloaded\n\t */\n\tasync downloadAsrModel(model: string = this.whisperModel): Promise<string> {\n\t\tif(!this.options.whisper?.binary) throw new Error('Whisper not configured');\n\t\tif(!model.endsWith('.bin')) model += '.bin';\n\t\tconst p = Path.join(this.options.whisper.path, model);\n\t\tif(await fs.stat(p).then(() => true).catch(() => false)) return p;\n\t\tif(!!this.downloads[model]) return this.downloads[model];\n\t\tthis.downloads[model] = fetch(`https://huggingface.co/ggerganov/whisper.cpp/resolve/main/${model}`)\n\t\t\t.then(resp => resp.arrayBuffer())\n\t\t\t.then(arr => Buffer.from(arr)).then(async buffer => {\n\t\t\t\tawait fs.writeFile(p, buffer);\n\t\t\t\tdelete this.downloads[model];\n\t\t\t\treturn p;\n\t\t\t});\n\t\treturn this.downloads[model];\n\t}\n\n\t/**\n\t * Convert image to text using Optical Character Recognition\n\t * @param {string} path Path to image\n\t * @returns {{abort: Function, response: Promise<string | null>}} Abort function & Promise of extracted text\n\t */\n\tocr(path: string): {abort: () => void, response: Promise<string | null>} {\n\t\tlet worker: any;\n\t\treturn {\n\t\t\tabort: () => { worker?.terminate(); },\n\t\t\tresponse: new Promise(async res => {\n\t\t\t\tworker = await createWorker('eng');\n\t\t\t\tconst {data} = await worker.recognize(path);\n\t\t\t\tawait worker.terminate();\n\t\t\t\tres(data.text.trim() || null);\n\t\t\t})\n\t\t}\n\t}\n\n\t/**\n\t * Compare the difference between two strings using tensor math\n\t * @param target Text that will checked\n\t * @param {string} searchTerms Multiple search terms to check against target\n\t * @returns {{avg: number, max: number, similarities: number[]}} Similarity values 0-1: 0 = unique, 1 = identical\n\t */\n\tsemanticSimilarity(target: string, ...searchTerms: string[]) {\n\t\tif(searchTerms.length < 2) throw new Error('Requires at least 2 strings to compare');\n\n\t\tconst vector = (text: string, dimensions: number = 10): number[] => {\n\t\t\treturn text.toLowerCase().split('').map((char, index) =>\n\t\t\t\t(char.charCodeAt(0) * (index + 1)) % dimensions / dimensions).slice(0, dimensions);\n\t\t}\n\n\t\tconst cosineSimilarity = (v1: number[], v2: number[]): number => {\n\t\t\tif (v1.length !== v2.length) throw new Error('Vectors must be same length');\n\t\t\tconst tensor1 = tf.tensor1d(v1), tensor2 = tf.tensor1d(v2)\n\t\t\tconst dotProduct = tf.dot(tensor1, tensor2)\n\t\t\tconst magnitude1 = tf.norm(tensor1)\n\t\t\tconst magnitude2 = tf.norm(tensor2)\n\t\t\tif(magnitude1.dataSync()[0] === 0 || magnitude2.dataSync()[0] === 0) return 0\n\t\t\treturn dotProduct.dataSync()[0] / (magnitude1.dataSync()[0] * magnitude2.dataSync()[0])\n\t\t}\n\n\t\tconst v = vector(target);\n\t\tconst similarities = searchTerms.map(t => vector(t)).map(refVector => cosineSimilarity(v, refVector))\n\t\treturn {avg: similarities.reduce((acc, s) => acc + s, 0) / similarities.length, max: Math.max(...similarities), similarities}\n\t}\n}\n","import {$, $Sync} from '@ztimson/node-utils';\nimport {ASet, consoleInterceptor, Http, fn as Fn} from '@ztimson/utils';\nimport {Ai} from './ai.ts';\n\nexport type AiToolArg = {[key: string]: {\n\t/** Argument type */\n\ttype: 'array' | 'boolean' | 'number' | 'object' | 'string',\n\t/** Argument description */\n\tdescription: string,\n\t/** Required argument */\n\trequired?: boolean;\n\t/** Default value */\n\tdefault?: any,\n\t/** Options */\n\tenum?: string[],\n\t/** Minimum value or length */\n\tmin?: number,\n\t/** Maximum value or length */\n\tmax?: number,\n\t/** Match pattern */\n\tpattern?: string,\n\t/** Child arguments */\n\titems?: {[key: string]: AiToolArg}\n}}\n\nexport type AiTool = {\n\t/** Tool ID / Name - Must be snail_case */\n\tname: string,\n\t/** Tool description / prompt */\n\tdescription: string,\n\t/** Tool arguments */\n\targs?: AiToolArg,\n\t/** Callback function */\n\tfn: (args: any, ai: Ai) => any | Promise<any>,\n};\n\nexport const CliTool: AiTool = {\n\tname: 'cli',\n\tdescription: 'Use the command line interface, returns any output',\n\targs: {command: {type: 'string', description: 'Command to run', required: true}},\n\tfn: (args: {command: string}) => $`${args.command}`\n}\n\nexport const DateTimeTool: AiTool = {\n\tname: 'get_datetime',\n\tdescription: 'Get current date and time',\n\targs: {},\n\tfn: async () => new Date().toISOString()\n}\n\nexport const ExecTool: AiTool = {\n\tname: 'exec',\n\tdescription: 'Run code/scripts',\n\targs: {\n\t\tlanguage: {type: 'string', description: 'Execution language', enum: ['cli', 'node', 'python'], required: true},\n\t\tcode: {type: 'string', description: 'Code to execute', required: true}\n\t},\n\tfn: async (args, ai) => {\n\t\ttry {\n\t\t\tswitch(args.type) {\n\t\t\t\tcase 'bash':\n\t\t\t\t\treturn await CliTool.fn({command: args.code}, ai);\n\t\t\t\tcase 'node':\n\t\t\t\t\treturn await JSTool.fn({code: args.code}, ai);\n\t\t\t\tcase 'python': {\n\t\t\t\t\treturn await PythonTool.fn({code: args.code}, ai);\n\t\t\t\t}\n\t\t\t}\n\t\t} catch(err: any) {\n\t\t\treturn {error: err?.message || err.toString()};\n\t\t}\n\t}\n}\n\nexport const FetchTool: AiTool = {\n\tname: 'fetch',\n\tdescription: 'Make HTTP request to URL',\n\targs: {\n\t\turl: {type: 'string', description: 'URL to fetch', required: true},\n\t\tmethod: {type: 'string', description: 'HTTP method to use', enum: ['GET', 'POST', 'PUT', 'DELETE'], default: 'GET'},\n\t\theaders: {type: 'object', description: 'HTTP headers to send', default: {}},\n\t\tbody: {type: 'object', description: 'HTTP body to send'},\n\t},\n\tfn: (args: {\n\t\turl: string;\n\t\tmethod: 'GET' | 'POST' | 'PUT' | 'DELETE';\n\t\theaders: {[key: string]: string};\n\t\tbody: any;\n\t}) => new Http({url: args.url, headers: args.headers}).request({method: args.method || 'GET', body: args.body})\n}\n\nexport const JSTool: AiTool = {\n\tname: 'exec_javascript',\n\tdescription: 'Execute commonjs javascript',\n\targs: {\n\t\tcode: {type: 'string', description: 'CommonJS javascript', required: true}\n\t},\n\tfn: async (args: {code: string}) => {\n\t\tconst console = consoleInterceptor(null);\n\t\tconst resp = await Fn<any>({console}, args.code, true).catch((err: any) => console.output.error.push(err));\n\t\treturn {...console.output, return: resp, stdout: undefined, stderr: undefined};\n\t}\n}\n\nexport const PythonTool: AiTool = {\n\tname: 'exec_javascript',\n\tdescription: 'Execute commonjs javascript',\n\targs: {\n\t\tcode: {type: 'string', description: 'CommonJS javascript', required: true}\n\t},\n\tfn: async (args: {code: string}) => ({result: $Sync`python -c \"${args.code}\"`})\n}\n\nexport const SearchTool: AiTool = {\n\tname: 'search',\n\tdescription: 'Use a search engine to find relevant URLs, should be changed with fetch to scrape sources',\n\targs: {\n\t\tquery: {type: 'string', description: 'Search string', required: true},\n\t\tlength: {type: 'string', description: 'Number of results to return', default: 5},\n\t},\n\tfn: async (args: {\n\t\tquery: string;\n\t\tlength: number;\n\t}) => {\n\t\tconst html = await fetch(`https://html.duckduckgo.com/html/?q=${encodeURIComponent(args.query)}`, {\n\t\t\theaders: {\"User-Agent\": \"Mozilla/5.0 (Windows NT 10.0; Win64; x64)\", \"Accept-Language\": \"en-US,en;q=0.9\"}\n\t\t}).then(resp => resp.text());\n\t\tlet match, regex = /<a .*?href=\"(.+?)\".+?<\\/a>/g;\n\t\tconst results = new ASet<string>();\n\t\twhile((match = regex.exec(html)) !== null) {\n\t\t\tlet url = /uddg=(.+)&amp?/.exec(decodeURIComponent(match[1]))?.[1];\n\t\t\tif(url) url = decodeURIComponent(url);\n\t\t\tif(url) results.add(url);\n\t\t\tif(results.size >= (args.length || 5)) break;\n\t\t}\n\t\treturn results;\n\t}\n}\n"],"names":["LLMProvider","Anthropic","ai","apiToken","model","anthropic","history","i","orgI","c","h","message","options","controller","response","res","rej","requestParams","t","objectMap","key","value","resp","chunk","text","last","JSONAttemptParse","toolCalls","results","toolCall","tool","findByProp","result","JSONSanitize","err","Ollama","host","ollama","error","system","args","OpenAi","openAI","tools","tc","record","h2","LLM","max","min","keep","tokens","m","recent","process","Ai","path","abort","resolve","reject","output","proc","spawn","data","code","p","Path","fs","arr","buffer","worker","createWorker","target","searchTerms","vector","dimensions","char","index","cosineSimilarity","v1","v2","tensor1","tf","tensor2","dotProduct","magnitude1","magnitude2","v","similarities","refVector","acc","s","CliTool","$","DateTimeTool","ExecTool","JSTool","PythonTool","FetchTool","Http","console","consoleInterceptor","Fn","$Sync","SearchTool","html","match","regex","ASet","url"],"mappings":";;;;;;;;;;AAIO,MAAeA,EAAY;AAElC;ACAO,MAAMC,UAAkBD,EAAY;AAAA,EAG1C,YAA4BE,GAAwBC,GAAyBC,GAAe;AAC3F,UAAA,GAD2B,KAAA,KAAAF,GAAwB,KAAA,WAAAC,GAAyB,KAAA,QAAAC,GAE5E,KAAK,SAAS,IAAIC,EAAU,EAAC,QAAQF,GAAS;AAAA,EAC/C;AAAA,EALA;AAAA,EAOQ,WAAWG,GAA8B;AAChD,aAAQC,IAAI,GAAGA,IAAID,EAAQ,QAAQC,KAAK;AACvC,YAAMC,IAAOD;AACb,MAAG,OAAOD,EAAQE,CAAI,EAAE,WAAW,aAC/BF,EAAQE,CAAI,EAAE,QAAQ,cACxBF,EAAQE,CAAI,EAAE,QAAQ,OAAO,CAACC,MAAWA,EAAE,QAAO,UAAU,EAAE,QAAQ,CAACA,MAAW;AACjF,QAAAF,KACAD,EAAQ,OAAOC,GAAG,GAAG,EAAC,MAAM,QAAQ,IAAIE,EAAE,IAAI,MAAMA,EAAE,MAAM,MAAMA,EAAE,OAAM;AAAA,MAC3E,CAAC,IACQH,EAAQE,CAAI,EAAE,QAAQ,UAC/BF,EAAQE,CAAI,EAAE,QAAQ,OAAO,CAACC,MAAWA,EAAE,QAAO,aAAa,EAAE,QAAQ,CAACA,MAAW;AACpF,cAAMC,IAAIJ,EAAQ,KAAK,CAACI,MAAWA,EAAE,MAAMD,EAAE,WAAW;AACxD,QAAAC,EAAED,EAAE,WAAW,UAAU,SAAS,IAAIA,EAAE;AAAA,MACzC,CAAC,GAEFH,EAAQE,CAAI,EAAE,UAAUF,EAAQE,CAAI,EAAE,QAAQ,OAAO,CAACC,MAAWA,EAAE,QAAQ,MAAM,EAAE,IAAI,CAACA,MAAWA,EAAE,IAAI,EAAE,KAAK;AAAA;AAAA,CAAM;AAAA,IAExH;AACA,WAAOH,EAAQ,OAAO,CAAAI,MAAK,CAAC,CAACA,EAAE,OAAO;AAAA,EACvC;AAAA,EAEQ,aAAaJ,GAA8B;AAClD,aAAQC,IAAI,GAAGA,IAAID,EAAQ,QAAQC;AAClC,UAAGD,EAAQC,CAAC,EAAE,QAAQ,QAAQ;AAC7B,cAAMG,IAASJ,EAAQC,CAAC;AACxB,QAAAD,EAAQ;AAAA,UAAOC;AAAA,UAAG;AAAA,UACjB,EAAC,MAAM,aAAa,SAAS,CAAC,EAAC,MAAM,YAAY,IAAIG,EAAE,IAAI,MAAMA,EAAE,MAAM,OAAOA,EAAE,KAAA,CAAK,EAAA;AAAA,UACvF,EAAC,MAAM,QAAQ,SAAS,CAAC,EAAC,MAAM,eAAe,aAAaA,EAAE,IAAI,UAAU,CAAC,CAACA,EAAE,OAAO,SAAUA,EAAE,SAASA,EAAE,SAAQ,EAAA;AAAA,QAAC,GAExHH;AAAA,MACD;AAED,WAAOD;AAAA,EACR;AAAA,EAEA,IAAIK,GAAiBC,IAAsB,IAAoC;AAC9E,UAAMC,IAAa,IAAI,gBAAA,GACjBC,IAAW,IAAI,QAAa,OAAOC,GAAKC,MAAQ;AACrD,UAAIV,IAAU,KAAK,aAAa,CAAC,GAAGM,EAAQ,WAAW,CAAA,GAAI,EAAC,MAAM,QAAQ,SAASD,EAAA,CAAQ,CAAC;AAC5F,MAAGC,EAAQ,aAAUN,IAAU,MAAM,KAAK,GAAG,IAAI,SAAcA,GAASM,EAAQ,SAAS,KAAKA,EAAQ,SAAS,KAAKA,CAAO;AAC3H,YAAMK,IAAqB;AAAA,QAC1B,OAAOL,EAAQ,SAAS,KAAK;AAAA,QAC7B,YAAYA,EAAQ,cAAc,KAAK,GAAG,QAAQ,cAAc;AAAA,QAChE,QAAQA,EAAQ,UAAU,KAAK,GAAG,QAAQ,UAAU;AAAA,QACpD,aAAaA,EAAQ,eAAe,KAAK,GAAG,QAAQ,eAAe;AAAA,QACnE,QAAQA,EAAQ,SAAS,KAAK,GAAG,QAAQ,SAAS,CAAA,GAAI,IAAI,CAAAM,OAAM;AAAA,UAC/D,MAAMA,EAAE;AAAA,UACR,aAAaA,EAAE;AAAA,UACf,cAAc;AAAA,YACb,MAAM;AAAA,YACN,YAAYA,EAAE,OAAOC,EAAUD,EAAE,MAAM,CAACE,GAAKC,OAAW,EAAC,GAAGA,GAAO,UAAU,OAAA,EAAW,IAAI,CAAA;AAAA,YAC5F,UAAUH,EAAE,OAAO,OAAO,QAAQA,EAAE,IAAI,EAAE,OAAO,CAAAA,MAAKA,EAAE,CAAC,EAAE,QAAQ,EAAE,IAAI,CAAAA,MAAKA,EAAE,CAAC,CAAC,IAAI,CAAA;AAAA,UAAC;AAAA,UAExF,IAAI;AAAA,QAAA,EACH;AAAA,QACF,UAAUZ;AAAA,QACV,QAAQ,CAAC,CAACM,EAAQ;AAAA,MAAA;AAInB,UAAIU;AACJ,SAAG;AAIF,YAHAA,IAAO,MAAM,KAAK,OAAO,SAAS,OAAOL,CAAa,GAGnDL,EAAQ,QAAQ;AAClB,UAAAU,EAAK,UAAU,CAAA;AACf,2BAAiBC,KAASD,GAAM;AAC/B,gBAAGT,EAAW,OAAO,QAAS;AAC9B,gBAAGU,EAAM,SAAS;AACjB,cAAGA,EAAM,cAAc,SAAS,SAC/BD,EAAK,QAAQ,KAAK,EAAC,MAAM,QAAQ,MAAM,IAAG,IACjCC,EAAM,cAAc,SAAS,cACtCD,EAAK,QAAQ,KAAK,EAAC,MAAM,YAAY,IAAIC,EAAM,cAAc,IAAI,MAAMA,EAAM,cAAc,MAAM,OAAY,IAAG;AAAA,qBAExGA,EAAM,SAAS;AACxB,kBAAGA,EAAM,MAAM,SAAS,cAAc;AACrC,sBAAMC,IAAOD,EAAM,MAAM;AACzB,gBAAAD,EAAK,QAAQ,GAAG,EAAE,EAAE,QAAQE,GAC5BZ,EAAQ,OAAO,EAAC,MAAAY,GAAK;AAAA,cACtB,MAAA,CAAUD,EAAM,MAAM,SAAS,uBAC9BD,EAAK,QAAQ,GAAG,EAAE,EAAE,SAASC,EAAM,MAAM;AAAA,qBAEjCA,EAAM,SAAS,sBAAsB;AAC9C,oBAAME,IAAOH,EAAK,QAAQ,GAAG,EAAE;AAC/B,cAAGG,EAAK,SAAS,SAAMA,EAAK,QAAQA,EAAK,QAAQC,EAAiBD,EAAK,OAAO,CAAA,CAAE,IAAI,CAAA;AAAA,YACrF,WAAUF,EAAM,SAAS;AACxB;AAAA,UAEF;AAAA,QACD;AAGA,cAAMI,IAAYL,EAAK,QAAQ,OAAO,CAACb,MAAWA,EAAE,SAAS,UAAU;AACvE,YAAGkB,EAAU,UAAU,CAACd,EAAW,OAAO,SAAS;AAClD,UAAAP,EAAQ,KAAK,EAAC,MAAM,aAAa,SAASgB,EAAK,SAAQ;AACvD,gBAAMM,IAAU,MAAM,QAAQ,IAAID,EAAU,IAAI,OAAOE,MAAkB;AACxE,kBAAMC,IAAOlB,EAAQ,OAAO,KAAKmB,EAAW,QAAQF,EAAS,IAAI,CAAC;AAClE,gBAAG,CAACC,EAAM,QAAO,EAAC,aAAaD,EAAS,IAAI,UAAU,IAAM,SAAS,iBAAA;AACrE,gBAAI;AACH,oBAAMG,IAAS,MAAMF,EAAK,GAAGD,EAAS,OAAO,KAAK,EAAE;AACpD,qBAAO,EAAC,MAAM,eAAe,aAAaA,EAAS,IAAI,SAASI,EAAaD,CAAM,EAAA;AAAA,YACpF,SAASE,GAAU;AAClB,qBAAO,EAAC,MAAM,eAAe,aAAaL,EAAS,IAAI,UAAU,IAAM,SAASK,GAAK,WAAWA,GAAK,SAAA,KAAc,UAAA;AAAA,YACpH;AAAA,UACD,CAAC,CAAC;AACF,UAAA5B,EAAQ,KAAK,EAAC,MAAM,QAAQ,SAASsB,GAAQ,GAC7CX,EAAc,WAAWX;AAAA,QAC1B;AAAA,MACD,SAAS,CAACO,EAAW,OAAO,WAAWS,EAAK,QAAQ,KAAK,CAACb,MAAWA,EAAE,SAAS,UAAU;AAC1F,MAAGG,EAAQ,UAAQA,EAAQ,OAAO,EAAC,MAAM,IAAK,GAC9CG,EAAI,KAAK,WAAW,CAAC,GAAGT,GAAS;AAAA,QAChC,MAAM;AAAA,QACN,SAASgB,EAAK,QAAQ,OAAO,CAACb,MAAWA,EAAE,QAAQ,MAAM,EAAE,IAAI,CAACA,MAAWA,EAAE,IAAI,EAAE,KAAK;AAAA;AAAA,CAAM;AAAA,MAAA,CAC9F,CAAC,CAAC;AAAA,IACJ,CAAC;AACD,WAAO,OAAO,OAAOK,GAAU,EAAC,OAAO,MAAMD,EAAW,MAAA,GAAQ;AAAA,EACjE;AACD;AC9HO,MAAMsB,UAAenC,EAAY;AAAA,EAGvC,YAA4BE,GAAekC,GAAqBhC,GAAe;AAC9E,UAAA,GAD2B,KAAA,KAAAF,GAAe,KAAA,OAAAkC,GAAqB,KAAA,QAAAhC,GAE/D,KAAK,SAAS,IAAIiC,EAAO,EAAC,MAAAD,GAAK;AAAA,EAChC;AAAA,EALA;AAAA,EAOQ,WAAW9B,GAA8B;AAChD,aAAQC,IAAI,GAAGA,IAAID,EAAQ,QAAQC;AAClC,UAAGD,EAAQC,CAAC,EAAE,QAAQ,eAAeD,EAAQC,CAAC,EAAE;AAC/C,QAAGD,EAAQC,CAAC,EAAE,UAAS,OAAOD,EAAQC,CAAC,EAAE,cAExCD,EAAQ,OAAOC,GAAG,CAAC,GACnBA;AAAA,eAEQD,EAAQC,CAAC,EAAE,QAAQ,QAAQ;AACpC,cAAM+B,IAAQhC,EAAQC,CAAC,EAAE,QAAQ,WAAW,WAAW;AACvD,QAAAD,EAAQC,CAAC,IAAI,EAAC,MAAM,QAAQ,MAAMD,EAAQC,CAAC,EAAE,WAAW,MAAMD,EAAQC,CAAC,EAAE,MAAM,CAAC+B,IAAQ,UAAU,SAAS,GAAGhC,EAAQC,CAAC,EAAE,QAAA;AAAA,MAC1H;AAED,WAAOD;AAAA,EACR;AAAA,EAEQ,aAAaA,GAA8B;AAClD,WAAOA,EAAQ,IAAI,CAACI,MAChBA,EAAE,QAAQ,SAAeA,IACrB,EAAC,MAAM,QAAQ,WAAWA,EAAE,MAAM,SAASA,EAAE,SAASA,EAAE,QAAA,CAC/D;AAAA,EACF;AAAA,EAEA,IAAIC,GAAiBC,IAAsB,IAAoC;AAC9E,UAAMC,IAAa,IAAI,gBAAA,GACjBC,IAAW,IAAI,QAAa,OAAOC,GAAKC,MAAQ;AACrD,UAAIuB,IAAS3B,EAAQ,UAAU,KAAK,GAAG,QAAQ,QAC3CN,IAAU,KAAK,aAAa,CAAC,GAAGM,EAAQ,WAAW,CAAA,GAAI,EAAC,MAAM,QAAQ,SAASD,EAAA,CAAQ,CAAC;AAC5F,MAAGL,EAAQ,CAAC,EAAE,QAAQ,aACjBiC,MACS,MAAA,IADDA,IAASjC,EAAQ,MAAA,IAG3BM,EAAQ,aAAUN,IAAU,MAAM,KAAK,GAAG,IAAI,SAAcA,GAASM,EAAQ,SAAS,KAAKA,EAAQ,SAAS,GAAG,IAC/GA,EAAQ,UAAQN,EAAQ,QAAQ,EAAC,MAAM,UAAU,SAASiC,GAAO;AAEpE,YAAMtB,IAAqB;AAAA,QAC1B,OAAOL,EAAQ,SAAS,KAAK;AAAA,QAC7B,UAAUN;AAAA,QACV,QAAQ,CAAC,CAACM,EAAQ;AAAA,QAClB,QAAQC,EAAW;AAAA,QACnB,SAAS;AAAA,UACR,aAAaD,EAAQ,eAAe,KAAK,GAAG,QAAQ,eAAe;AAAA,UACnE,aAAaA,EAAQ,cAAc,KAAK,GAAG,QAAQ,cAAc;AAAA,QAAA;AAAA,QAElE,QAAQA,EAAQ,SAAS,KAAK,GAAG,QAAQ,SAAS,CAAA,GAAI,IAAI,CAAAM,OAAM;AAAA,UAC/D,MAAM;AAAA,UACN,UAAU;AAAA,YACT,MAAMA,EAAE;AAAA,YACR,aAAaA,EAAE;AAAA,YACf,YAAY;AAAA,cACX,MAAM;AAAA,cACN,YAAYA,EAAE,OAAOC,EAAUD,EAAE,MAAM,CAACE,GAAKC,OAAW,EAAC,GAAGA,GAAO,UAAU,OAAA,EAAW,IAAI,CAAA;AAAA,cAC5F,UAAUH,EAAE,OAAO,OAAO,QAAQA,EAAE,IAAI,EAAE,OAAO,CAAAA,MAAKA,EAAE,CAAC,EAAE,QAAQ,EAAE,IAAI,CAAAA,MAAKA,EAAE,CAAC,CAAC,IAAI,CAAA;AAAA,YAAC;AAAA,UACxF;AAAA,QACD,EACC;AAAA,MAAA;AAIH,UAAII;AACJ,SAAG;AAEF,YADAA,IAAO,MAAM,KAAK,OAAO,KAAKL,CAAa,GACxCL,EAAQ,QAAQ;AAClB,UAAAU,EAAK,UAAU,EAAC,MAAM,aAAa,SAAS,IAAI,YAAY,GAAC;AAC7D,2BAAiBC,KAASD;AAOzB,gBANGT,EAAW,OAAO,YAClBU,EAAM,SAAS,YACjBD,EAAK,QAAQ,WAAWC,EAAM,QAAQ,SACtCX,EAAQ,OAAO,EAAC,MAAMW,EAAM,QAAQ,SAAQ,IAE1CA,EAAM,SAAS,iBAAiB,QAAQ,aAAaA,EAAM,QAAQ,aACnEA,EAAM,MAAM;AAAA,QAEjB;AAGA,YAAGD,EAAK,SAAS,YAAY,UAAU,CAACT,EAAW,OAAO,SAAS;AAClE,UAAAP,EAAQ,KAAKgB,EAAK,OAAO;AACzB,gBAAMM,IAAU,MAAM,QAAQ,IAAIN,EAAK,QAAQ,WAAW,IAAI,OAAOO,MAAkB;AACtF,kBAAMC,KAAQlB,EAAQ,SAAS,KAAK,GAAG,QAAQ,QAAQ,KAAKmB,EAAW,QAAQF,EAAS,SAAS,IAAI,CAAC;AACtG,gBAAG,CAACC,EAAM,QAAO,EAAC,MAAM,QAAQ,WAAWD,EAAS,SAAS,MAAM,SAAS,8BAAA;AAC5E,kBAAMW,IAAO,OAAOX,EAAS,SAAS,aAAc,WAAWH,EAAiBG,EAAS,SAAS,WAAW,CAAA,CAAE,IAAIA,EAAS,SAAS;AACrI,gBAAI;AACH,oBAAMG,IAAS,MAAMF,EAAK,GAAGU,GAAM,KAAK,EAAE;AAC1C,qBAAO,EAAC,MAAM,QAAQ,WAAWX,EAAS,SAAS,MAAM,MAAAW,GAAM,SAASP,EAAaD,CAAM,EAAA;AAAA,YAC5F,SAASE,GAAU;AAClB,qBAAO,EAAC,MAAM,QAAQ,WAAWL,EAAS,SAAS,MAAM,MAAAW,GAAM,SAASP,EAAa,EAAC,OAAOC,GAAK,WAAWA,GAAK,cAAc,UAAA,CAAU,EAAA;AAAA,YAC3I;AAAA,UACD,CAAC,CAAC;AACF,UAAA5B,EAAQ,KAAK,GAAGsB,CAAO,GACvBX,EAAc,WAAWX;AAAA,QAC1B;AAAA,MACD,SAAS,CAACO,EAAW,OAAO,WAAWS,EAAK,SAAS,YAAY;AACjE,MAAGV,EAAQ,UAAQA,EAAQ,OAAO,EAAC,MAAM,IAAK,GAC9CG,EAAI,KAAK,WAAW,CAAC,GAAGT,GAAS,EAAC,MAAM,aAAa,SAASgB,EAAK,SAAS,QAAA,CAAQ,CAAC,CAAC;AAAA,IACvF,CAAC;AACD,WAAO,OAAO,OAAOR,GAAU,EAAC,OAAO,MAAMD,EAAW,MAAA,GAAQ;AAAA,EACjE;AACD;AC1GO,MAAM4B,UAAezC,EAAY;AAAA,EAGvC,YAA4BE,GAAwBC,GAAyBC,GAAe;AAC3F,UAAA,GAD2B,KAAA,KAAAF,GAAwB,KAAA,WAAAC,GAAyB,KAAA,QAAAC,GAE5E,KAAK,SAAS,IAAIsC,EAAO,EAAC,QAAQvC,GAAS;AAAA,EAC5C;AAAA,EALA;AAAA,EAOQ,WAAWG,GAA8B;AAChD,aAAQC,IAAI,GAAGA,IAAID,EAAQ,QAAQC,KAAK;AACvC,YAAMG,IAAIJ,EAAQC,CAAC;AACnB,UAAGG,EAAE,SAAS,eAAeA,EAAE,YAAY;AAC1C,cAAMiC,IAAQjC,EAAE,WAAW,IAAI,CAACkC,OAAa;AAAA,UAC5C,MAAM;AAAA,UACN,IAAIA,EAAG;AAAA,UACP,MAAMA,EAAG,SAAS;AAAA,UAClB,MAAMlB,EAAiBkB,EAAG,SAAS,WAAW,CAAA,CAAE;AAAA,QAAA,EAC/C;AACF,QAAAtC,EAAQ,OAAOC,GAAG,GAAG,GAAGoC,CAAK,GAC7BpC,KAAKoC,EAAM,SAAS;AAAA,MACrB,WAAUjC,EAAE,SAAS,UAAUA,EAAE,SAAS;AACzC,cAAMmC,IAASvC,EAAQ,KAAK,OAAMI,EAAE,gBAAgBoC,EAAG,EAAE;AACzD,QAAGD,MACCnC,EAAE,QAAQ,SAAS,UAAU,IAAGmC,EAAO,QAAQnC,EAAE,UAC/CmC,EAAO,UAAUnC,EAAE,UAEzBJ,EAAQ,OAAOC,GAAG,CAAC,GACnBA;AAAA,MACD;AAAA,IAED;AACA,WAAOD;AAAA,EACR;AAAA,EAEQ,aAAaA,GAA8B;AAClD,WAAOA,EAAQ,OAAO,CAAC0B,GAAQtB,OAC3BA,EAAE,SAAS,SACbsB,EAAO,KAAK;AAAA,MACX,MAAM;AAAA,MACN,SAAS;AAAA,MACT,YAAY,CAAC,EAAE,IAAItB,EAAE,IAAI,MAAM,YAAY,UAAU,EAAE,MAAMA,EAAE,MAAM,WAAW,KAAK,UAAUA,EAAE,IAAI,EAAA,GAAK;AAAA,MAC1G,SAAS;AAAA,MACT,aAAa,CAAA;AAAA,IAAC,GACZ;AAAA,MACF,MAAM;AAAA,MACN,cAAcA,EAAE;AAAA,MAChB,SAASA,EAAE,SAASA,EAAE;AAAA,IAAA,CACtB,IAEDsB,EAAO,KAAKtB,CAAC,GAEPsB,IACL,CAAA,CAAW;AAAA,EACf;AAAA,EAEA,IAAIrB,GAAiBC,IAAsB,IAAoC;AAC9E,UAAMC,IAAa,IAAI,gBAAA,GACjBC,IAAW,IAAI,QAAa,OAAOC,GAAKC,MAAQ;AACrD,UAAIV,IAAU,KAAK,aAAa,CAAC,GAAGM,EAAQ,WAAW,CAAA,GAAI,EAAC,MAAM,QAAQ,SAASD,EAAA,CAAQ,CAAC;AAC5F,MAAGC,EAAQ,aAAUN,IAAU,MAAM,KAAK,GAAG,IAAI,SAAcA,GAASM,EAAQ,SAAS,KAAKA,EAAQ,SAAS,KAAKA,CAAO;AAE3H,YAAMK,IAAqB;AAAA,QAC1B,OAAOL,EAAQ,SAAS,KAAK;AAAA,QAC7B,UAAUN;AAAA,QACV,QAAQ,CAAC,CAACM,EAAQ;AAAA,QAClB,YAAYA,EAAQ,cAAc,KAAK,GAAG,QAAQ,cAAc;AAAA,QAChE,aAAaA,EAAQ,eAAe,KAAK,GAAG,QAAQ,eAAe;AAAA,QACnE,QAAQA,EAAQ,SAAS,KAAK,GAAG,QAAQ,SAAS,CAAA,GAAI,IAAI,CAAAM,OAAM;AAAA,UAC/D,MAAM;AAAA,UACN,UAAU;AAAA,YACT,MAAMA,EAAE;AAAA,YACR,aAAaA,EAAE;AAAA,YACf,YAAY;AAAA,cACX,MAAM;AAAA,cACN,YAAYA,EAAE,OAAOC,EAAUD,EAAE,MAAM,CAACE,GAAKC,OAAW,EAAC,GAAGA,GAAO,UAAU,OAAA,EAAW,IAAI,CAAA;AAAA,cAC5F,UAAUH,EAAE,OAAO,OAAO,QAAQA,EAAE,IAAI,EAAE,OAAO,CAAAA,MAAKA,EAAE,CAAC,EAAE,QAAQ,EAAE,IAAI,CAAAA,MAAKA,EAAE,CAAC,CAAC,IAAI,CAAA;AAAA,YAAC;AAAA,UACxF;AAAA,QACD,EACC;AAAA,MAAA;AAIH,UAAII;AACJ,SAAG;AAIF,YAHAA,IAAO,MAAM,KAAK,OAAO,KAAK,YAAY,OAAOL,CAAa,GAG3DL,EAAQ,QAAQ;AAClB,UAAAU,EAAK,UAAU,CAAA;AACf,2BAAiBC,KAASD,GAAM;AAC/B,gBAAGT,EAAW,OAAO,QAAS;AAC9B,YAAGU,EAAM,QAAQ,CAAC,EAAE,MAAM,WACzBX,EAAQ,OAAO,EAAC,MAAMW,EAAM,QAAQ,CAAC,EAAE,MAAM,SAAQ;AAAA,UAEvD;AAAA,QACD;AAGA,cAAMI,IAAYL,EAAK,QAAQ,CAAC,EAAE,QAAQ,cAAc,CAAA;AACxD,YAAGK,EAAU,UAAU,CAACd,EAAW,OAAO,SAAS;AAClD,UAAAP,EAAQ,KAAKgB,EAAK,QAAQ,CAAC,EAAE,OAAO;AACpC,gBAAMM,IAAU,MAAM,QAAQ,IAAID,EAAU,IAAI,OAAOE,MAAkB;AACxE,kBAAMC,IAAOlB,EAAQ,OAAO,KAAKmB,EAAW,QAAQF,EAAS,SAAS,IAAI,CAAC;AAC3E,gBAAG,CAACC,EAAM,QAAO,EAAC,MAAM,QAAQ,cAAcD,EAAS,IAAI,SAAS,8BAAA;AACpE,gBAAI;AACH,oBAAMW,IAAOd,EAAiBG,EAAS,SAAS,WAAW,CAAA,CAAE,GACvDG,IAAS,MAAMF,EAAK,GAAGU,GAAM,KAAK,EAAE;AAC1C,qBAAO,EAAC,MAAM,QAAQ,cAAcX,EAAS,IAAI,SAASI,EAAaD,CAAM,EAAA;AAAA,YAC9E,SAASE,GAAU;AAClB,qBAAO,EAAC,MAAM,QAAQ,cAAcL,EAAS,IAAI,SAASI,EAAa,EAAC,OAAOC,GAAK,WAAWA,GAAK,cAAc,UAAA,CAAU,EAAA;AAAA,YAC7H;AAAA,UACD,CAAC,CAAC;AACF,UAAA5B,EAAQ,KAAK,GAAGsB,CAAO,GACvBX,EAAc,WAAWX;AAAA,QAC1B;AAAA,MACD,SAAS,CAACO,EAAW,OAAO,WAAWS,EAAK,UAAU,CAAC,GAAG,SAAS,YAAY;AAE/E,MAAGV,EAAQ,UAAQA,EAAQ,OAAO,EAAC,MAAM,IAAK,GAC9CG,EAAI,KAAK,WAAW,CAAC,GAAGT,GAAS,EAAC,MAAM,aAAa,SAASgB,EAAK,QAAQ,CAAC,EAAE,QAAQ,WAAW,GAAA,CAAG,CAAC,CAAC;AAAA,IACvG,CAAC;AAED,WAAO,OAAO,OAAOR,GAAU,EAAC,OAAO,MAAMD,EAAW,MAAA,GAAQ;AAAA,EACjE;AACD;ACnDO,MAAMkC,EAAI;AAAA,EAGhB,YAA4B7C,GAAwBU,GAAqB;AAA7C,SAAA,KAAAV,GAAwB,KAAA,UAAAU,GAChDA,EAAQ,WAAW,UAAO,KAAK,UAAU,YAAY,IAAIX,EAAU,KAAK,IAAIW,EAAQ,UAAU,OAAOA,EAAQ,UAAU,KAAK,IAC5HA,EAAQ,QAAQ,SAAM,KAAK,UAAU,SAAS,IAAIuB,EAAO,KAAK,IAAIvB,EAAQ,OAAO,MAAMA,EAAQ,OAAO,KAAK,IAC3GA,EAAQ,QAAQ,UAAO,KAAK,UAAU,SAAS,IAAI6B,EAAO,KAAK,IAAI7B,EAAQ,OAAO,OAAOA,EAAQ,OAAO,KAAK;AAAA,EACjH;AAAA,EANQ,YAA0C,CAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAclD,IAAID,GAAiBC,IAAsB,IAAoC;AAC9E,QAAIR,IAAa,CAAC,MAAM,IAAI;AAS5B,QARGQ,EAAQ,UACP,OAAOA,EAAQ,SAAS,eAAkBA,EAAQ,QAChDR,IAAQ,CAACQ,EAAQ,OAAa,KAAK,QAASA,EAAQ,KAAK,GAAG,KAAK,KAEpE,CAACA,EAAQ,SAASR,EAAM,CAAC,KAAK,UAC7B,OAAO,KAAK,QAAQ,SAAS,WAAUA,IAAQ,KAAK,QAAQ,QAC1DA,IAAQ,CAAC,KAAK,QAAQ,OAAa,KAAK,QAAS,KAAK,QAAQ,KAAK,GAAG,KAAK,IAE9E,CAACA,EAAM,CAAC,KAAK,CAACA,EAAM,CAAC,EAAG,OAAM,IAAI,MAAM,kCAAkCA,EAAM,CAAC,CAAC,MAAMA,EAAM,CAAC,CAAC,EAAE;AACrG,WAAO,KAAK,UAAUA,EAAM,CAAC,CAAC,EAAE,IAAIO,GAAS,EAAC,GAAGC,GAAS,OAAOR,EAAM,CAAC,GAAE;AAAA,EAC3E;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,MAAM,SAASE,GAAuB0C,GAAaC,GAAarC,GAA6C;AAC5G,QAAG,KAAK,eAAeN,CAAO,IAAI0C,EAAK,QAAO1C;AAC9C,QAAI4C,IAAO,GAAGC,IAAS;AACvB,aAAQC,KAAK9C,EAAQ;AAEpB,UADA6C,KAAU,KAAK,eAAeC,EAAE,OAAO,GACpCD,IAASF,EAAK,CAAAC;AAAA,UACZ;AAEN,QAAG5C,EAAQ,UAAU4C,EAAM,QAAO5C;AAClC,UAAM+C,IAASH,KAAQ,IAAI,CAAA,IAAK5C,EAAQ,MAAM,CAAC4C,CAAI,GAClDI,KAAWJ,KAAQ,IAAI5C,IAAUA,EAAQ,MAAM,GAAG,CAAC4C,CAAI,GAAG,OAAO,CAAAxC,MAAKA,EAAE,SAAS,eAAeA,EAAE,SAAS,MAAM;AAElH,WAAO,CAAC,EAAC,MAAM,aAAa,SAAS,yBADrB,MAAM,KAAK,UAAU4C,EAAQ,IAAI,OAAK,GAAGF,EAAE,IAAI,KAAKA,EAAE,OAAO,EAAE,EAAE,KAAK;AAAA;AAAA,CAAM,GAAG,KAAKxC,CAAO,CACtC,MAAK,GAAGyC,CAAM;AAAA,EACpF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,eAAe/C,GAAsB;AACpC,UAAMkB,IAAO,KAAK,UAAUlB,CAAO;AACnC,WAAO,KAAK,KAAMkB,EAAK,SAAS,IAAK,GAAG;AAAA,EACzC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,KAAKb,GAAiBC,GAAsB;AACjD,QAAIU,IAAO,MAAM,KAAK,IAAIX,GAAS;AAAA,MAClC,QAAQ;AAAA,MACR,GAAGC;AAAA,IAAA,CACH;AACD,WAAIU,IAAO,CAAC,GAAG,UACRI,EAAiB,IAAI,OAAO,SAAa,EAAE,KAAKJ,EAAK,CAAC,EAAE,OAAO,GAAG,EAAE,IAD5C,CAAA;AAAA,EAEhC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,UAAUE,GAAc2B,GAAgBvC,GAA8C;AACrF,WAAO,KAAK,IAAIY,GAAM,EAAC,QAAQ,+BAA+B2B,CAAM,gCAAgC,aAAa,KAAK,GAAGvC,EAAA,CAAQ,EAC/H,KAAK,CAAAN,MAAmBA,EAAQ,IAAA,GAAO,WAAW,IAAI;AAAA,EACzD;AACD;ACpJO,MAAMiD,EAAG;AAAA,EAOf,YAA4B3C,GAAoB;AAApB,SAAA,UAAAA,GAC3B,KAAK,MAAM,IAAImC,EAAI,MAAMnC,CAAO,GAC7B,KAAK,QAAQ,SAAS,WACxB,KAAK,eAAe,KAAK,QAAQ,SAAS,MAAM,SAAS,MAAM,IAAI,KAAK,QAAQ,SAAS,QAAQ,KAAK,QAAQ,SAAS,QAAQ,QAC/H,KAAK,iBAAA;AAAA,EAEP;AAAA,EAZQ,YAA8C,CAAA;AAAA,EAC9C;AAAA;AAAA,EAGR;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAgBA,IAAI4C,GAAcpD,IAAgB,KAAK,cAAqE;AAC3G,QAAG,CAAC,KAAK,QAAQ,SAAS,OAAQ,OAAM,IAAI,MAAM,wBAAwB;AAC1E,QAAIqD,IAAa,MAAM;AAAA,IAAC;AAcxB,WAAO,EAAC,UAbS,IAAI,QAAuB,CAACC,GAASC,MAAW;AAChE,WAAK,iBAAiBvD,CAAK,EAAE,KAAK,CAAAgD,MAAK;AACtC,YAAIQ,IAAS;AACb,cAAMC,IAAOC,EAAc,KAAK,QAAQ,SAAS,QAAQ,CAAC,OAAO,OAAO,MAAMV,GAAG,MAAMI,CAAI,GAAG,EAAC,OAAO,CAAC,UAAU,QAAQ,QAAQ,GAAE;AACnI,QAAAC,IAAQ,MAAMI,EAAK,KAAK,SAAS,GACjCA,EAAK,GAAG,SAAS,CAAC3B,MAAeyB,EAAOzB,CAAG,CAAC,GAC5C2B,EAAK,OAAO,GAAG,QAAQ,CAACE,MAAiBH,KAAUG,EAAK,UAAU,GAClEF,EAAK,GAAG,SAAS,CAACG,MAAiB;AAClC,UAAGA,MAAS,IAAGN,EAAQE,EAAO,KAAA,KAAU,IAAI,MAChC,IAAI,MAAM,aAAaI,CAAI,EAAE,CAAC;AAAA,QAC3C,CAAC;AAAA,MACF,CAAC;AAAA,IACF,CAAC,GACiB,OAAAP,EAAA;AAAA,EACnB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,iBAAiBrD,IAAgB,KAAK,cAA+B;AAC1E,QAAG,CAAC,KAAK,QAAQ,SAAS,OAAQ,OAAM,IAAI,MAAM,wBAAwB;AAC1E,IAAIA,EAAM,SAAS,MAAM,MAAGA,KAAS;AACrC,UAAM6D,IAAIC,EAAK,KAAK,KAAK,QAAQ,QAAQ,MAAM9D,CAAK;AACpD,WAAG,MAAM+D,EAAG,KAAKF,CAAC,EAAE,KAAK,MAAM,EAAI,EAAE,MAAM,MAAM,EAAK,IAAUA,IAC3D,KAAK,UAAU7D,CAAK,IAAU,KAAK,UAAUA,CAAK,KACvD,KAAK,UAAUA,CAAK,IAAI,MAAM,6DAA6DA,CAAK,EAAE,EAChG,KAAK,CAAAkB,MAAQA,EAAK,aAAa,EAC/B,KAAK,CAAA8C,MAAO,OAAO,KAAKA,CAAG,CAAC,EAAE,KAAK,OAAMC,OACzC,MAAMF,EAAG,UAAUF,GAAGI,CAAM,GAC5B,OAAO,KAAK,UAAUjE,CAAK,GACpB6D,EACP,GACK,KAAK,UAAU7D,CAAK;AAAA,EAC5B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,IAAIoD,GAAqE;AACxE,QAAIc;AACJ,WAAO;AAAA,MACN,OAAO,MAAM;AAAE,QAAAA,GAAQ,UAAA;AAAA,MAAa;AAAA,MACpC,UAAU,IAAI,QAAQ,OAAMvD,MAAO;AAClC,QAAAuD,IAAS,MAAMC,EAAa,KAAK;AACjC,cAAM,EAAC,MAAAR,EAAA,IAAQ,MAAMO,EAAO,UAAUd,CAAI;AAC1C,cAAMc,EAAO,UAAA,GACbvD,EAAIgD,EAAK,KAAK,KAAA,KAAU,IAAI;AAAA,MAC7B,CAAC;AAAA,IAAA;AAAA,EAEH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,mBAAmBS,MAAmBC,GAAuB;AAC5D,QAAGA,EAAY,SAAS,EAAG,OAAM,IAAI,MAAM,wCAAwC;AAEnF,UAAMC,IAAS,CAAClD,GAAcmD,IAAqB,OAC3CnD,EAAK,cAAc,MAAM,EAAE,EAAE,IAAI,CAACoD,GAAMC,MAC7CD,EAAK,WAAW,CAAC,KAAKC,IAAQ,KAAMF,IAAaA,CAAU,EAAE,MAAM,GAAGA,CAAU,GAG7EG,IAAmB,CAACC,GAAcC,MAAyB;AAChE,UAAID,EAAG,WAAWC,EAAG,OAAQ,OAAM,IAAI,MAAM,6BAA6B;AAC1E,YAAMC,IAAUC,EAAG,SAASH,CAAE,GAAGI,IAAUD,EAAG,SAASF,CAAE,GACnDI,IAAaF,EAAG,IAAID,GAASE,CAAO,GACpCE,IAAaH,EAAG,KAAKD,CAAO,GAC5BK,IAAaJ,EAAG,KAAKC,CAAO;AAClC,aAAGE,EAAW,WAAW,CAAC,MAAM,KAAKC,EAAW,WAAW,CAAC,MAAM,IAAU,IACrEF,EAAW,SAAA,EAAW,CAAC,KAAKC,EAAW,WAAW,CAAC,IAAIC,EAAW,SAAA,EAAW,CAAC;AAAA,IACtF,GAEMC,IAAIb,EAAOF,CAAM,GACjBgB,IAAef,EAAY,IAAI,CAAAvD,MAAKwD,EAAOxD,CAAC,CAAC,EAAE,IAAI,CAAAuE,MAAaX,EAAiBS,GAAGE,CAAS,CAAC;AACpG,WAAO,EAAC,KAAKD,EAAa,OAAO,CAACE,GAAKC,MAAMD,IAAMC,GAAG,CAAC,IAAIH,EAAa,QAAQ,KAAK,KAAK,IAAI,GAAGA,CAAY,GAAG,cAAAA,EAAA;AAAA,EACjH;AACD;AC1FO,MAAMI,IAAkB;AAAA,EAC9B,MAAM;AAAA,EACN,aAAa;AAAA,EACb,MAAM,EAAC,SAAS,EAAC,MAAM,UAAU,aAAa,kBAAkB,UAAU,KAAI;AAAA,EAC9E,IAAI,CAACpD,MAA4BqD,IAAIrD,EAAK,OAAO;AAClD,GAEasD,IAAuB;AAAA,EACnC,MAAM;AAAA,EACN,aAAa;AAAA,EACb,MAAM,CAAA;AAAA,EACN,IAAI,aAAY,oBAAI,KAAA,GAAO,YAAA;AAC5B,GAEaC,IAAmB;AAAA,EAC/B,MAAM;AAAA,EACN,aAAa;AAAA,EACb,MAAM;AAAA,IACL,UAAU,EAAC,MAAM,UAAU,aAAa,sBAAsB,MAAM,CAAC,OAAO,QAAQ,QAAQ,GAAG,UAAU,GAAA;AAAA,IACzG,MAAM,EAAC,MAAM,UAAU,aAAa,mBAAmB,UAAU,GAAA;AAAA,EAAI;AAAA,EAEtE,IAAI,OAAOvD,GAAMtC,MAAO;AACvB,QAAI;AACH,cAAOsC,EAAK,MAAA;AAAA,QACX,KAAK;AACJ,iBAAO,MAAMoD,EAAQ,GAAG,EAAC,SAASpD,EAAK,KAAA,GAAOtC,CAAE;AAAA,QACjD,KAAK;AACJ,iBAAO,MAAM8F,EAAO,GAAG,EAAC,MAAMxD,EAAK,KAAA,GAAOtC,CAAE;AAAA,QAC7C,KAAK;AACJ,iBAAO,MAAM+F,EAAW,GAAG,EAAC,MAAMzD,EAAK,KAAA,GAAOtC,CAAE;AAAA,MACjD;AAAA,IAEF,SAAQgC,GAAU;AACjB,aAAO,EAAC,OAAOA,GAAK,WAAWA,EAAI,WAAS;AAAA,IAC7C;AAAA,EACD;AACD,GAEagE,KAAoB;AAAA,EAChC,MAAM;AAAA,EACN,aAAa;AAAA,EACb,MAAM;AAAA,IACL,KAAK,EAAC,MAAM,UAAU,aAAa,gBAAgB,UAAU,GAAA;AAAA,IAC7D,QAAQ,EAAC,MAAM,UAAU,aAAa,sBAAsB,MAAM,CAAC,OAAO,QAAQ,OAAO,QAAQ,GAAG,SAAS,MAAA;AAAA,IAC7G,SAAS,EAAC,MAAM,UAAU,aAAa,wBAAwB,SAAS,GAAC;AAAA,IACzE,MAAM,EAAC,MAAM,UAAU,aAAa,oBAAA;AAAA,EAAmB;AAAA,EAExD,IAAI,CAAC1D,MAKC,IAAI2D,EAAK,EAAC,KAAK3D,EAAK,KAAK,SAASA,EAAK,SAAQ,EAAE,QAAQ,EAAC,QAAQA,EAAK,UAAU,OAAO,MAAMA,EAAK,KAAA,CAAK;AAC/G,GAEawD,IAAiB;AAAA,EAC7B,MAAM;AAAA,EACN,aAAa;AAAA,EACb,MAAM;AAAA,IACL,MAAM,EAAC,MAAM,UAAU,aAAa,uBAAuB,UAAU,GAAA;AAAA,EAAI;AAAA,EAE1E,IAAI,OAAOxD,MAAyB;AACnC,UAAM4D,IAAUC,EAAmB,IAAI,GACjC/E,IAAO,MAAMgF,EAAQ,EAAC,SAAAF,EAAA,GAAU5D,EAAK,MAAM,EAAI,EAAE,MAAM,CAACN,MAAakE,EAAQ,OAAO,MAAM,KAAKlE,CAAG,CAAC;AACzG,WAAO,EAAC,GAAGkE,EAAQ,QAAQ,QAAQ9E,GAAM,QAAQ,QAAW,QAAQ,OAAA;AAAA,EACrE;AACD,GAEa2E,IAAqB;AAAA,EACjC,MAAM;AAAA,EACN,aAAa;AAAA,EACb,MAAM;AAAA,IACL,MAAM,EAAC,MAAM,UAAU,aAAa,uBAAuB,UAAU,GAAA;AAAA,EAAI;AAAA,EAE1E,IAAI,OAAOzD,OAA0B,EAAC,QAAQ+D,eAAmB/D,EAAK,IAAI,IAAA;AAC3E,GAEagE,KAAqB;AAAA,EACjC,MAAM;AAAA,EACN,aAAa;AAAA,EACb,MAAM;AAAA,IACL,OAAO,EAAC,MAAM,UAAU,aAAa,iBAAiB,UAAU,GAAA;AAAA,IAChE,QAAQ,EAAC,MAAM,UAAU,aAAa,+BAA+B,SAAS,EAAA;AAAA,EAAC;AAAA,EAEhF,IAAI,OAAOhE,MAGL;AACL,UAAMiE,IAAO,MAAM,MAAM,uCAAuC,mBAAmBjE,EAAK,KAAK,CAAC,IAAI;AAAA,MACjG,SAAS,EAAC,cAAc,6CAA6C,mBAAmB,iBAAA;AAAA,IAAgB,CACxG,EAAE,KAAK,CAAAlB,MAAQA,EAAK,MAAM;AAC3B,QAAIoF,GAAOC,IAAQ;AACnB,UAAM/E,IAAU,IAAIgF,EAAA;AACpB,YAAOF,IAAQC,EAAM,KAAKF,CAAI,OAAO,QAAM;AAC1C,UAAII,IAAM,iBAAiB,KAAK,mBAAmBH,EAAM,CAAC,CAAC,CAAC,IAAI,CAAC;AAGjE,UAFGG,MAAKA,IAAM,mBAAmBA,CAAG,IACjCA,KAAKjF,EAAQ,IAAIiF,CAAG,GACpBjF,EAAQ,SAASY,EAAK,UAAU,GAAI;AAAA,IACxC;AACA,WAAOZ;AAAA,EACR;AACD;"}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@ztimson/ai-utils",
3
- "version": "0.1.13",
3
+ "version": "0.1.15",
4
4
  "description": "AI Utility library",
5
5
  "author": "Zak Timson",
6
6
  "license": "MIT",