@ztimson/ai-utils 0.1.15 → 0.1.16
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +8 -4
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +160 -152
- package/dist/index.mjs.map +1 -1
- package/package.json +1 -1
package/dist/index.js
CHANGED
|
@@ -1,8 +1,12 @@
|
|
|
1
|
-
(function(
|
|
1
|
+
(function(p,y){typeof exports=="object"&&typeof module<"u"?y(exports,require("tesseract.js"),require("@ztimson/utils"),require("@anthropic-ai/sdk"),require("ollama"),require("openai"),require("node:fs/promises"),require("node:path"),require("@tensorflow/tfjs"),require("node:child_process"),require("@ztimson/node-utils")):typeof define=="function"&&define.amd?define(["exports","tesseract.js","@ztimson/utils","@anthropic-ai/sdk","ollama","openai","node:fs/promises","node:path","@tensorflow/tfjs","node:child_process","@ztimson/node-utils"],y):(p=typeof globalThis<"u"?globalThis:p||self,y(p.utils={},p.tesseract_js,p.utils,p.sdk,p.ollama,p.openai,p.fs,p.Path,p.tf,p.node_child_process,p.nodeUtils))})(this,(function(p,y,f,O,v,A,S,E,M,J,k){"use strict";function N(m){const t=Object.create(null,{[Symbol.toStringTag]:{value:"Module"}});if(m){for(const e in m)if(e!=="default"){const n=Object.getOwnPropertyDescriptor(m,e);Object.defineProperty(t,e,n.get?n:{enumerable:!0,get:()=>m[e]})}}return t.default=m,Object.freeze(t)}const _=N(M);class b{}class j extends b{constructor(t,e,n){super(),this.ai=t,this.apiToken=e,this.model=n,this.client=new O.Anthropic({apiKey:e})}client;toStandard(t){const e=[];for(let n=0;n<t.length;n++){const i=t[n];if(typeof i.content!="string"&&(i.role=="assistant"?i.content.filter(o=>o.type=="tool_use").forEach(o=>{e.push({role:"tool",id:o.id,name:o.name,args:o.input})}):i.role=="user"&&i.content.filter(o=>o.type=="tool_result").forEach(o=>{const h=e.find(c=>c.id==o.tool_use_id);h&&(h[o.is_error?"error":"content"]=o.content)}),i.content=i.content.filter(o=>o.type=="text").map(o=>o.text).join(`
|
|
2
2
|
|
|
3
|
-
`))
|
|
3
|
+
`)),i.content){const o=e.at(-1);o&&o.role=="assistant"&&i.role=="assistant"?o.content+=`
|
|
4
4
|
|
|
5
|
-
|
|
5
|
+
`+i.content:e.push({role:i.role,content:i.content})}}return e}fromStandard(t){for(let e=0;e<t.length;e++)if(t[e].role=="tool"){const n=t[e];t.splice(e,1,{role:"assistant",content:[{type:"tool_use",id:n.id,name:n.name,input:n.args}]},{role:"user",content:[{type:"tool_result",tool_use_id:n.id,is_error:!!n.error,content:n.error||n.content}]}),e++}return t}ask(t,e={}){const n=new AbortController,i=new Promise(async(o,h)=>{let c=this.fromStandard([...e.history||[],{role:"user",content:t}]);e.compress&&(c=await this.ai.llm.compress(c,e.compress.max,e.compress.min,e));const d={model:e.model||this.model,max_tokens:e.max_tokens||this.ai.options.max_tokens||4096,system:e.system||this.ai.options.system||"",temperature:e.temperature||this.ai.options.temperature||.7,tools:(e.tools||this.ai.options.tools||[]).map(a=>({name:a.name,description:a.description,input_schema:{type:"object",properties:a.args?f.objectMap(a.args,(r,u)=>({...u,required:void 0})):{},required:a.args?Object.entries(a.args).filter(r=>r[1].required).map(r=>r[0]):[]},fn:void 0})),messages:c,stream:!!e.stream};let l,s=!0;do{if(l=await this.client.messages.create(d),e.stream){s||e.stream({text:`
|
|
6
6
|
|
|
7
|
-
`),
|
|
7
|
+
`}),s=!1,l.content=[];for await(const r of l){if(n.signal.aborted)break;if(r.type==="content_block_start")r.content_block.type==="text"?l.content.push({type:"text",text:""}):r.content_block.type==="tool_use"&&l.content.push({type:"tool_use",id:r.content_block.id,name:r.content_block.name,input:""});else if(r.type==="content_block_delta")if(r.delta.type==="text_delta"){const u=r.delta.text;l.content.at(-1).text+=u,e.stream({text:u})}else r.delta.type==="input_json_delta"&&(l.content.at(-1).input+=r.delta.partial_json);else if(r.type==="content_block_stop"){const u=l.content.at(-1);u.input!=null&&(u.input=u.input?f.JSONAttemptParse(u.input,{}):{})}else if(r.type==="message_stop")break}}const a=l.content.filter(r=>r.type==="tool_use");if(a.length&&!n.signal.aborted){c.push({role:"assistant",content:l.content});const r=await Promise.all(a.map(async u=>{const g=e.tools?.find(f.findByProp("name",u.name));if(!g)return{tool_use_id:u.id,is_error:!0,content:"Tool not found"};try{const w=await g.fn(u.input,this.ai);return{type:"tool_result",tool_use_id:u.id,content:f.JSONSanitize(w)}}catch(w){return{type:"tool_result",tool_use_id:u.id,is_error:!0,content:w?.message||w?.toString()||"Unknown"}}}));c.push({role:"user",content:r}),d.messages=c}}while(!n.signal.aborted&&l.content.some(a=>a.type==="tool_use"));e.stream&&e.stream({done:!0}),o(this.toStandard([...c,{role:"assistant",content:l.content.filter(a=>a.type=="text").map(a=>a.text).join(`
|
|
8
|
+
|
|
9
|
+
`)}]))});return Object.assign(i,{abort:()=>n.abort()})}}class z extends b{constructor(t,e,n){super(),this.ai=t,this.host=e,this.model=n,this.client=new v.Ollama({host:e})}client;toStandard(t){for(let e=0;e<t.length;e++)if(t[e].role=="assistant"&&t[e].tool_calls)t[e].content?delete t[e].tool_calls:(t.splice(e,1),e--);else if(t[e].role=="tool"){const n=t[e].content.startsWith('{"error":');t[e]={role:"tool",name:t[e].tool_name,args:t[e].args,[n?"error":"content"]:t[e].content}}return t}fromStandard(t){return t.map(e=>e.role!="tool"?e:{role:"tool",tool_name:e.name,content:e.error||e.content})}ask(t,e={}){const n=new AbortController,i=new Promise(async(o,h)=>{let c=e.system||this.ai.options.system,d=this.fromStandard([...e.history||[],{role:"user",content:t}]);d[0].roll=="system"&&(c?d.shift():c=d.shift()),e.compress&&(d=await this.ai.llm.compress(d,e.compress.max,e.compress.min)),e.system&&d.unshift({role:"system",content:c});const l={model:e.model||this.model,messages:d,stream:!!e.stream,signal:n.signal,options:{temperature:e.temperature||this.ai.options.temperature||.7,num_predict:e.max_tokens||this.ai.options.max_tokens||4096},tools:(e.tools||this.ai.options.tools||[]).map(a=>({type:"function",function:{name:a.name,description:a.description,parameters:{type:"object",properties:a.args?f.objectMap(a.args,(r,u)=>({...u,required:void 0})):{},required:a.args?Object.entries(a.args).filter(r=>r[1].required).map(r=>r[0]):[]}}}))};let s;do{if(s=await this.client.chat(l),e.stream){s.message={role:"assistant",content:"",tool_calls:[]};for await(const a of s)if(n.signal.aborted||(a.message?.content&&(s.message.content+=a.message.content,e.stream({text:a.message.content})),a.message?.tool_calls&&(s.message.tool_calls=a.message.tool_calls),a.done))break}if(s.message?.tool_calls?.length&&!n.signal.aborted){d.push(s.message);const a=await Promise.all(s.message.tool_calls.map(async r=>{const u=(e.tools||this.ai.options.tools)?.find(f.findByProp("name",r.function.name));if(!u)return{role:"tool",tool_name:r.function.name,content:'{"error": "Tool not found"}'};const g=typeof r.function.arguments=="string"?f.JSONAttemptParse(r.function.arguments,{}):r.function.arguments;try{const w=await u.fn(g,this.ai);return{role:"tool",tool_name:r.function.name,args:g,content:f.JSONSanitize(w)}}catch(w){return{role:"tool",tool_name:r.function.name,args:g,content:f.JSONSanitize({error:w?.message||w?.toString()||"Unknown"})}}}));d.push(...a),l.messages=d}}while(!n.signal.aborted&&s.message?.tool_calls?.length);e.stream&&e.stream({done:!0}),o(this.toStandard([...d,{role:"assistant",content:s.message?.content}]))});return Object.assign(i,{abort:()=>n.abort()})}}class U extends b{constructor(t,e,n){super(),this.ai=t,this.apiToken=e,this.model=n,this.client=new A.OpenAI({apiKey:e})}client;toStandard(t){for(let e=0;e<t.length;e++){const n=t[e];if(n.role==="assistant"&&n.tool_calls){const i=n.tool_calls.map(o=>({role:"tool",id:o.id,name:o.function.name,args:f.JSONAttemptParse(o.function.arguments,{})}));t.splice(e,1,...i),e+=i.length-1}else if(n.role==="tool"&&n.content){const i=t.find(o=>n.tool_call_id==o.id);i&&(n.content.includes('"error":')?i.error=n.content:i.content=n.content),t.splice(e,1),e--}}return t}fromStandard(t){return t.reduce((e,n)=>(n.role==="tool"?e.push({role:"assistant",content:null,tool_calls:[{id:n.id,type:"function",function:{name:n.name,arguments:JSON.stringify(n.args)}}],refusal:null,annotations:[]},{role:"tool",tool_call_id:n.id,content:n.error||n.content}):e.push(n),e),[])}ask(t,e={}){const n=new AbortController,i=new Promise(async(o,h)=>{let c=this.fromStandard([...e.history||[],{role:"user",content:t}]);e.compress&&(c=await this.ai.llm.compress(c,e.compress.max,e.compress.min,e));const d={model:e.model||this.model,messages:c,stream:!!e.stream,max_tokens:e.max_tokens||this.ai.options.max_tokens||4096,temperature:e.temperature||this.ai.options.temperature||.7,tools:(e.tools||this.ai.options.tools||[]).map(s=>({type:"function",function:{name:s.name,description:s.description,parameters:{type:"object",properties:s.args?f.objectMap(s.args,(a,r)=>({...r,required:void 0})):{},required:s.args?Object.entries(s.args).filter(a=>a[1].required).map(a=>a[0]):[]}}}))};let l;do{if(l=await this.client.chat.completions.create(d),e.stream){l.choices=[];for await(const a of l){if(n.signal.aborted)break;a.choices[0].delta.content&&e.stream({text:a.choices[0].delta.content})}}const s=l.choices[0].message.tool_calls||[];if(s.length&&!n.signal.aborted){c.push(l.choices[0].message);const a=await Promise.all(s.map(async r=>{const u=e.tools?.find(f.findByProp("name",r.function.name));if(!u)return{role:"tool",tool_call_id:r.id,content:'{"error": "Tool not found"}'};try{const g=f.JSONAttemptParse(r.function.arguments,{}),w=await u.fn(g,this.ai);return{role:"tool",tool_call_id:r.id,content:f.JSONSanitize(w)}}catch(g){return{role:"tool",tool_call_id:r.id,content:f.JSONSanitize({error:g?.message||g?.toString()||"Unknown"})}}}));c.push(...a),d.messages=c}}while(!n.signal.aborted&&l.choices?.[0]?.message?.tool_calls?.length);e.stream&&e.stream({done:!0}),o(this.toStandard([...c,{role:"assistant",content:l.choices[0].message.content||""}]))});return Object.assign(i,{abort:()=>n.abort()})}}class x{constructor(t,e){this.ai=t,this.options=e,e.anthropic?.token&&(this.providers.anthropic=new j(this.ai,e.anthropic.token,e.anthropic.model)),e.ollama?.host&&(this.providers.ollama=new z(this.ai,e.ollama.host,e.ollama.model)),e.openAi?.token&&(this.providers.openAi=new U(this.ai,e.openAi.token,e.openAi.model))}providers={};ask(t,e={}){let n=[null,null];if(e.model&&(typeof e.model=="object"?n=e.model:n=[e.model,this.options[e.model]?.model]),(!e.model||n[1]==null)&&(typeof this.options.model=="object"?n=this.options.model:n=[this.options.model,this.options[this.options.model]?.model]),!n[0]||!n[1])throw new Error(`Unknown LLM provider or model: ${n[0]} / ${n[1]}`);return this.providers[n[0]].ask(t,{...e,model:n[1]})}async compress(t,e,n,i){if(this.estimateTokens(t)<e)return t;let o=0,h=0;for(let s of t.toReversed())if(h+=this.estimateTokens(s.content),h<n)o++;else break;if(t.length<=o)return t;const c=o==0?[]:t.slice(-o),d=(o==0?t:t.slice(0,-o)).filter(s=>s.role==="assistant"||s.role==="user");return[{role:"assistant",content:`Conversation Summary: ${await this.summarize(d.map(s=>`${s.role}: ${s.content}`).join(`
|
|
10
|
+
|
|
11
|
+
`),250,i)}`},...c]}estimateTokens(t){const e=JSON.stringify(t);return Math.ceil(e.length/4*1.2)}async json(t,e){let n=await this.ask(t,{system:"Respond using a JSON blob",...e});return n?.[0]?.content?f.JSONAttemptParse(new RegExp("{[sS]*}").exec(n[0].content),{}):{}}summarize(t,e,n){return this.ask(t,{system:`Generate a brief summary <= ${e} tokens. Output nothing else`,temperature:.3,...n}).then(i=>i.pop()?.content||null)}}class L{constructor(t){this.options=t,this.llm=new x(this,t),this.options.whisper?.binary&&(this.whisperModel=this.options.whisper?.model.endsWith(".bin")?this.options.whisper?.model:this.options.whisper?.model+".bin",this.downloadAsrModel())}downloads={};whisperModel;llm;asr(t,e=this.whisperModel){if(!this.options.whisper?.binary)throw new Error("Whisper not configured");let n=()=>{};return{response:new Promise((o,h)=>{this.downloadAsrModel(e).then(c=>{let d="";const l=J.spawn(this.options.whisper?.binary,["-nt","-np","-m",c,"-f",t],{stdio:["ignore","pipe","ignore"]});n=()=>l.kill("SIGTERM"),l.on("error",s=>h(s)),l.stdout.on("data",s=>d+=s.toString()),l.on("close",s=>{s===0?o(d.trim()||null):h(new Error(`Exit code ${s}`))})})}),abort:n}}async downloadAsrModel(t=this.whisperModel){if(!this.options.whisper?.binary)throw new Error("Whisper not configured");t.endsWith(".bin")||(t+=".bin");const e=E.join(this.options.whisper.path,t);return await S.stat(e).then(()=>!0).catch(()=>!1)?e:this.downloads[t]?this.downloads[t]:(this.downloads[t]=fetch(`https://huggingface.co/ggerganov/whisper.cpp/resolve/main/${t}`).then(n=>n.arrayBuffer()).then(n=>Buffer.from(n)).then(async n=>(await S.writeFile(e,n),delete this.downloads[t],e)),this.downloads[t])}ocr(t){let e;return{abort:()=>{e?.terminate()},response:new Promise(async n=>{e=await y.createWorker("eng");const{data:i}=await e.recognize(t);await e.terminate(),n(i.text.trim()||null)})}}semanticSimilarity(t,...e){if(e.length<2)throw new Error("Requires at least 2 strings to compare");const n=(c,d=10)=>c.toLowerCase().split("").map((l,s)=>l.charCodeAt(0)*(s+1)%d/d).slice(0,d),i=(c,d)=>{if(c.length!==d.length)throw new Error("Vectors must be same length");const l=_.tensor1d(c),s=_.tensor1d(d),a=_.dot(l,s),r=_.norm(l),u=_.norm(s);return r.dataSync()[0]===0||u.dataSync()[0]===0?0:a.dataSync()[0]/(r.dataSync()[0]*u.dataSync()[0])},o=n(t),h=e.map(c=>n(c)).map(c=>i(o,c));return{avg:h.reduce((c,d)=>c+d,0)/h.length,max:Math.max(...h),similarities:h}}}const T={name:"cli",description:"Use the command line interface, returns any output",args:{command:{type:"string",description:"Command to run",required:!0}},fn:m=>k.$`${m.command}`},$={name:"get_datetime",description:"Get current date and time",args:{},fn:async()=>new Date().toISOString()},R={name:"exec",description:"Run code/scripts",args:{language:{type:"string",description:"Execution language",enum:["cli","node","python"],required:!0},code:{type:"string",description:"Code to execute",required:!0}},fn:async(m,t)=>{try{switch(m.type){case"bash":return await T.fn({command:m.code},t);case"node":return await q.fn({code:m.code},t);case"python":return await P.fn({code:m.code},t)}}catch(e){return{error:e?.message||e.toString()}}}},W={name:"fetch",description:"Make HTTP request to URL",args:{url:{type:"string",description:"URL to fetch",required:!0},method:{type:"string",description:"HTTP method to use",enum:["GET","POST","PUT","DELETE"],default:"GET"},headers:{type:"object",description:"HTTP headers to send",default:{}},body:{type:"object",description:"HTTP body to send"}},fn:m=>new f.Http({url:m.url,headers:m.headers}).request({method:m.method||"GET",body:m.body})},q={name:"exec_javascript",description:"Execute commonjs javascript",args:{code:{type:"string",description:"CommonJS javascript",required:!0}},fn:async m=>{const t=f.consoleInterceptor(null),e=await f.fn({console:t},m.code,!0).catch(n=>t.output.error.push(n));return{...t.output,return:e,stdout:void 0,stderr:void 0}}},P={name:"exec_javascript",description:"Execute commonjs javascript",args:{code:{type:"string",description:"CommonJS javascript",required:!0}},fn:async m=>({result:k.$Sync`python -c "${m.code}"`})},I={name:"search",description:"Use a search engine to find relevant URLs, should be changed with fetch to scrape sources",args:{query:{type:"string",description:"Search string",required:!0},length:{type:"string",description:"Number of results to return",default:5}},fn:async m=>{const t=await fetch(`https://html.duckduckgo.com/html/?q=${encodeURIComponent(m.query)}`,{headers:{"User-Agent":"Mozilla/5.0 (Windows NT 10.0; Win64; x64)","Accept-Language":"en-US,en;q=0.9"}}).then(o=>o.text());let e,n=/<a .*?href="(.+?)".+?<\/a>/g;const i=new f.ASet;for(;(e=n.exec(t))!==null;){let o=/uddg=(.+)&?/.exec(decodeURIComponent(e[1]))?.[1];if(o&&(o=decodeURIComponent(o)),o&&i.add(o),i.size>=(m.length||5))break}return i}};p.Ai=L,p.Anthropic=j,p.CliTool=T,p.DateTimeTool=$,p.ExecTool=R,p.FetchTool=W,p.JSTool=q,p.LLM=x,p.PythonTool=P,p.SearchTool=I,Object.defineProperty(p,Symbol.toStringTag,{value:"Module"})}));
|
|
8
12
|
//# sourceMappingURL=index.js.map
|
package/dist/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.js","sources":["../src/provider.ts","../src/antrhopic.ts","../src/ollama.ts","../src/open-ai.ts","../src/llm.ts","../src/ai.ts","../src/tools.ts"],"sourcesContent":["import {LLMMessage, LLMOptions, LLMRequest} from './llm.ts';\n\nexport type AbortablePromise<T> = Promise<T> & {abort: () => void};\n\nexport abstract class LLMProvider {\n\tabstract ask(message: string, options: LLMRequest): AbortablePromise<LLMMessage[]>;\n}\n","import {Anthropic as anthropic} from '@anthropic-ai/sdk';\nimport {findByProp, objectMap, JSONSanitize, JSONAttemptParse} from '@ztimson/utils';\nimport {Ai} from './ai.ts';\nimport {LLMMessage, LLMRequest} from './llm.ts';\nimport {AbortablePromise, LLMProvider} from './provider.ts';\n\nexport class Anthropic extends LLMProvider {\n\tclient!: anthropic;\n\n\tconstructor(public readonly ai: Ai, public readonly apiToken: string, public model: string) {\n\t\tsuper();\n\t\tthis.client = new anthropic({apiKey: apiToken});\n\t}\n\n\tprivate toStandard(history: any[]): LLMMessage[] {\n\t\tfor(let i = 0; i < history.length; i++) {\n\t\t\tconst orgI = i;\n\t\t\tif(typeof history[orgI].content != 'string') {\n\t\t\t\tif(history[orgI].role == 'assistant') {\n\t\t\t\t\thistory[orgI].content.filter((c: any) => c.type =='tool_use').forEach((c: any) => {\n\t\t\t\t\t\ti++;\n\t\t\t\t\t\thistory.splice(i, 0, {role: 'tool', id: c.id, name: c.name, args: c.input});\n\t\t\t\t\t});\n\t\t\t\t} else if(history[orgI].role == 'user') {\n\t\t\t\t\thistory[orgI].content.filter((c: any) => c.type =='tool_result').forEach((c: any) => {\n\t\t\t\t\t\tconst h = history.find((h: any) => h.id == c.tool_use_id);\n\t\t\t\t\t\th[c.is_error ? 'error' : 'content'] = c.content;\n\t\t\t\t\t});\n\t\t\t\t}\n\t\t\t\thistory[orgI].content = history[orgI].content.filter((c: any) => c.type == 'text').map((c: any) => c.text).join('\\n\\n');\n\t\t\t}\n\t\t}\n\t\treturn history.filter(h => !!h.content);\n\t}\n\n\tprivate fromStandard(history: LLMMessage[]): any[] {\n\t\tfor(let i = 0; i < history.length; i++) {\n\t\t\tif(history[i].role == 'tool') {\n\t\t\t\tconst h: any = history[i];\n\t\t\t\thistory.splice(i, 1,\n\t\t\t\t\t{role: 'assistant', content: [{type: 'tool_use', id: h.id, name: h.name, input: h.args}]},\n\t\t\t\t\t{role: 'user', content: [{type: 'tool_result', tool_use_id: h.id, is_error: !!h.error, content: h.error || h.content}]}\n\t\t\t\t)\n\t\t\t\ti++;\n\t\t\t}\n\t\t}\n\t\treturn history;\n\t}\n\n\task(message: string, options: LLMRequest = {}): AbortablePromise<LLMMessage[]> {\n\t\tconst controller = new AbortController();\n\t\tconst response = new Promise<any>(async (res, rej) => {\n\t\t\tlet history = this.fromStandard([...options.history || [], {role: 'user', content: message}]);\n\t\t\tif(options.compress) history = await this.ai.llm.compress(<any>history, options.compress.max, options.compress.min, options);\n\t\t\tconst requestParams: any = {\n\t\t\t\tmodel: options.model || this.model,\n\t\t\t\tmax_tokens: options.max_tokens || this.ai.options.max_tokens || 4096,\n\t\t\t\tsystem: options.system || this.ai.options.system || '',\n\t\t\t\ttemperature: options.temperature || this.ai.options.temperature || 0.7,\n\t\t\t\ttools: (options.tools || this.ai.options.tools || []).map(t => ({\n\t\t\t\t\tname: t.name,\n\t\t\t\t\tdescription: t.description,\n\t\t\t\t\tinput_schema: {\n\t\t\t\t\t\ttype: 'object',\n\t\t\t\t\t\tproperties: t.args ? objectMap(t.args, (key, value) => ({...value, required: undefined})) : {},\n\t\t\t\t\t\trequired: t.args ? Object.entries(t.args).filter(t => t[1].required).map(t => t[0]) : []\n\t\t\t\t\t},\n\t\t\t\t\tfn: undefined\n\t\t\t\t})),\n\t\t\t\tmessages: history,\n\t\t\t\tstream: !!options.stream,\n\t\t\t};\n\n\t\t\t// Run tool changes\n\t\t\tlet resp: any;\n\t\t\tdo {\n\t\t\t\tresp = await this.client.messages.create(requestParams);\n\n\t\t\t\t// Streaming mode\n\t\t\t\tif(options.stream) {\n\t\t\t\t\tresp.content = [];\n\t\t\t\t\tfor await (const chunk of resp) {\n\t\t\t\t\t\tif(controller.signal.aborted) break;\n\t\t\t\t\t\tif(chunk.type === 'content_block_start') {\n\t\t\t\t\t\t\tif(chunk.content_block.type === 'text') {\n\t\t\t\t\t\t\t\tresp.content.push({type: 'text', text: ''});\n\t\t\t\t\t\t\t} else if(chunk.content_block.type === 'tool_use') {\n\t\t\t\t\t\t\t\tresp.content.push({type: 'tool_use', id: chunk.content_block.id, name: chunk.content_block.name, input: <any>''});\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t} else if(chunk.type === 'content_block_delta') {\n\t\t\t\t\t\t\tif(chunk.delta.type === 'text_delta') {\n\t\t\t\t\t\t\t\tconst text = chunk.delta.text;\n\t\t\t\t\t\t\t\tresp.content.at(-1).text += text;\n\t\t\t\t\t\t\t\toptions.stream({text});\n\t\t\t\t\t\t\t} else if(chunk.delta.type === 'input_json_delta') {\n\t\t\t\t\t\t\t\tresp.content.at(-1).input += chunk.delta.partial_json;\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t} else if(chunk.type === 'content_block_stop') {\n\t\t\t\t\t\t\tconst last = resp.content.at(-1);\n\t\t\t\t\t\t\tif(last.input != null) last.input = last.input ? JSONAttemptParse(last.input, {}) : {};\n\t\t\t\t\t\t} else if(chunk.type === 'message_stop') {\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\t// Run tools\n\t\t\t\tconst toolCalls = resp.content.filter((c: any) => c.type === 'tool_use');\n\t\t\t\tif(toolCalls.length && !controller.signal.aborted) {\n\t\t\t\t\thistory.push({role: 'assistant', content: resp.content});\n\t\t\t\t\tconst results = await Promise.all(toolCalls.map(async (toolCall: any) => {\n\t\t\t\t\t\tconst tool = options.tools?.find(findByProp('name', toolCall.name));\n\t\t\t\t\t\tif(!tool) return {tool_use_id: toolCall.id, is_error: true, content: 'Tool not found'};\n\t\t\t\t\t\ttry {\n\t\t\t\t\t\t\tconst result = await tool.fn(toolCall.input, this.ai);\n\t\t\t\t\t\t\treturn {type: 'tool_result', tool_use_id: toolCall.id, content: JSONSanitize(result)};\n\t\t\t\t\t\t} catch (err: any) {\n\t\t\t\t\t\t\treturn {type: 'tool_result', tool_use_id: toolCall.id, is_error: true, content: err?.message || err?.toString() || 'Unknown'};\n\t\t\t\t\t\t}\n\t\t\t\t\t}));\n\t\t\t\t\thistory.push({role: 'user', content: results});\n\t\t\t\t\trequestParams.messages = history;\n\t\t\t\t}\n\t\t\t} while (!controller.signal.aborted && resp.content.some((c: any) => c.type === 'tool_use'));\n\t\t\tif(options.stream) options.stream({done: true});\n\t\t\tres(this.toStandard([...history, {\n\t\t\t\trole: 'assistant',\n\t\t\t\tcontent: resp.content.filter((c: any) => c.type == 'text').map((c: any) => c.text).join('\\n\\n')\n\t\t\t}]));\n\t\t});\n\t\treturn Object.assign(response, {abort: () => controller.abort()});\n\t}\n}\n","import {findByProp, objectMap, JSONSanitize, JSONAttemptParse} from '@ztimson/utils';\nimport {Ai} from './ai.ts';\nimport {LLMMessage, LLMRequest} from './llm.ts';\nimport {AbortablePromise, LLMProvider} from './provider.ts';\nimport {Ollama as ollama} from 'ollama';\n\nexport class Ollama extends LLMProvider {\n\tclient!: ollama;\n\n\tconstructor(public readonly ai: Ai, public host: string, public model: string) {\n\t\tsuper();\n\t\tthis.client = new ollama({host});\n\t}\n\n\tprivate toStandard(history: any[]): LLMMessage[] {\n\t\tfor(let i = 0; i < history.length; i++) {\n\t\t\tif(history[i].role == 'assistant' && history[i].tool_calls) {\n\t\t\t\tif(history[i].content) delete history[i].tool_calls;\n\t\t\t\telse {\n\t\t\t\t\thistory.splice(i, 1);\n\t\t\t\t\ti--;\n\t\t\t\t}\n\t\t\t} else if(history[i].role == 'tool') {\n\t\t\t\tconst error = history[i].content.startsWith('{\"error\":');\n\t\t\t\thistory[i] = {role: 'tool', name: history[i].tool_name, args: history[i].args, [error ? 'error' : 'content']: history[i].content};\n\t\t\t}\n\t\t}\n\t\treturn history;\n\t}\n\n\tprivate fromStandard(history: LLMMessage[]): any[] {\n\t\treturn history.map((h: any) => {\n\t\t\tif(h.role != 'tool') return h;\n\t\t\treturn {role: 'tool', tool_name: h.name, content: h.error || h.content}\n\t\t});\n\t}\n\n\task(message: string, options: LLMRequest = {}): AbortablePromise<LLMMessage[]> {\n\t\tconst controller = new AbortController();\n\t\tconst response = new Promise<any>(async (res, rej) => {\n\t\t\tlet system = options.system || this.ai.options.system;\n\t\t\tlet history = this.fromStandard([...options.history || [], {role: 'user', content: message}]);\n\t\t\tif(history[0].roll == 'system') {\n\t\t\t\tif(!system) system = history.shift();\n\t\t\t\telse history.shift();\n\t\t\t}\n\t\t\tif(options.compress) history = await this.ai.llm.compress(<any>history, options.compress.max, options.compress.min);\n\t\t\tif(options.system) history.unshift({role: 'system', content: system})\n\n\t\t\tconst requestParams: any = {\n\t\t\t\tmodel: options.model || this.model,\n\t\t\t\tmessages: history,\n\t\t\t\tstream: !!options.stream,\n\t\t\t\tsignal: controller.signal,\n\t\t\t\toptions: {\n\t\t\t\t\ttemperature: options.temperature || this.ai.options.temperature || 0.7,\n\t\t\t\t\tnum_predict: options.max_tokens || this.ai.options.max_tokens || 4096,\n\t\t\t\t},\n\t\t\t\ttools: (options.tools || this.ai.options.tools || []).map(t => ({\n\t\t\t\t\ttype: 'function',\n\t\t\t\t\tfunction: {\n\t\t\t\t\t\tname: t.name,\n\t\t\t\t\t\tdescription: t.description,\n\t\t\t\t\t\tparameters: {\n\t\t\t\t\t\t\ttype: 'object',\n\t\t\t\t\t\t\tproperties: t.args ? objectMap(t.args, (key, value) => ({...value, required: undefined})) : {},\n\t\t\t\t\t\t\trequired: t.args ? Object.entries(t.args).filter(t => t[1].required).map(t => t[0]) : []\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}))\n\t\t\t}\n\n\t\t\t// Run tool chains\n\t\t\tlet resp: any;\n\t\t\tdo {\n\t\t\t\tresp = await this.client.chat(requestParams);\n\t\t\t\tif(options.stream) {\n\t\t\t\t\tresp.message = {role: 'assistant', content: '', tool_calls: []};\n\t\t\t\t\tfor await (const chunk of resp) {\n\t\t\t\t\t\tif(controller.signal.aborted) break;\n\t\t\t\t\t\tif(chunk.message?.content) {\n\t\t\t\t\t\t\tresp.message.content += chunk.message.content;\n\t\t\t\t\t\t\toptions.stream({text: chunk.message.content});\n\t\t\t\t\t\t}\n\t\t\t\t\t\tif(chunk.message?.tool_calls) resp.message.tool_calls = chunk.message.tool_calls;\n\t\t\t\t\t\tif(chunk.done) break;\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\t// Run tools\n\t\t\t\tif(resp.message?.tool_calls?.length && !controller.signal.aborted) {\n\t\t\t\t\thistory.push(resp.message);\n\t\t\t\t\tconst results = await Promise.all(resp.message.tool_calls.map(async (toolCall: any) => {\n\t\t\t\t\t\tconst tool = (options.tools || this.ai.options.tools)?.find(findByProp('name', toolCall.function.name));\n\t\t\t\t\t\tif(!tool) return {role: 'tool', tool_name: toolCall.function.name, content: '{\"error\": \"Tool not found\"}'};\n\t\t\t\t\t\tconst args = typeof toolCall.function.arguments === 'string' ? JSONAttemptParse(toolCall.function.arguments, {}) : toolCall.function.arguments;\n\t\t\t\t\t\ttry {\n\t\t\t\t\t\t\tconst result = await tool.fn(args, this.ai);\n\t\t\t\t\t\t\treturn {role: 'tool', tool_name: toolCall.function.name, args, content: JSONSanitize(result)};\n\t\t\t\t\t\t} catch (err: any) {\n\t\t\t\t\t\t\treturn {role: 'tool', tool_name: toolCall.function.name, args, content: JSONSanitize({error: err?.message || err?.toString() || 'Unknown'})};\n\t\t\t\t\t\t}\n\t\t\t\t\t}));\n\t\t\t\t\thistory.push(...results);\n\t\t\t\t\trequestParams.messages = history;\n\t\t\t\t}\n\t\t\t} while (!controller.signal.aborted && resp.message?.tool_calls?.length);\n\t\t\tif(options.stream) options.stream({done: true});\n\t\t\tres(this.toStandard([...history, {role: 'assistant', content: resp.message?.content}]));\n\t\t});\n\t\treturn Object.assign(response, {abort: () => controller.abort()});\n\t}\n}\n","import {OpenAI as openAI} from 'openai';\nimport {findByProp, objectMap, JSONSanitize, JSONAttemptParse} from '@ztimson/utils';\nimport {Ai} from './ai.ts';\nimport {LLMMessage, LLMRequest} from './llm.ts';\nimport {AbortablePromise, LLMProvider} from './provider.ts';\n\nexport class OpenAi extends LLMProvider {\n\tclient!: openAI;\n\n\tconstructor(public readonly ai: Ai, public readonly apiToken: string, public model: string) {\n\t\tsuper();\n\t\tthis.client = new openAI({apiKey: apiToken});\n\t}\n\n\tprivate toStandard(history: any[]): LLMMessage[] {\n\t\tfor(let i = 0; i < history.length; i++) {\n\t\t\tconst h = history[i];\n\t\t\tif(h.role === 'assistant' && h.tool_calls) {\n\t\t\t\tconst tools = h.tool_calls.map((tc: any) => ({\n\t\t\t\t\trole: 'tool',\n\t\t\t\t\tid: tc.id,\n\t\t\t\t\tname: tc.function.name,\n\t\t\t\t\targs: JSONAttemptParse(tc.function.arguments, {})\n\t\t\t\t}));\n\t\t\t\thistory.splice(i, 1, ...tools);\n\t\t\t\ti += tools.length - 1;\n\t\t\t} else if(h.role === 'tool' && h.content) {\n\t\t\t\tconst record = history.find(h2 => h.tool_call_id == h2.id);\n\t\t\t\tif(record) {\n\t\t\t\t\tif(h.content.includes('\"error\":')) record.error = h.content;\n\t\t\t\t\telse record.content = h.content;\n\t\t\t\t}\n\t\t\t\thistory.splice(i, 1);\n\t\t\t\ti--;\n\t\t\t}\n\n\t\t}\n\t\treturn history;\n\t}\n\n\tprivate fromStandard(history: LLMMessage[]): any[] {\n\t\treturn history.reduce((result, h) => {\n\t\t\tif(h.role === 'tool') {\n\t\t\t\tresult.push({\n\t\t\t\t\trole: 'assistant',\n\t\t\t\t\tcontent: null,\n\t\t\t\t\ttool_calls: [{ id: h.id, type: 'function', function: { name: h.name, arguments: JSON.stringify(h.args) } }],\n\t\t\t\t\trefusal: null,\n\t\t\t\t\tannotations: [],\n\t\t\t\t}, {\n\t\t\t\t\trole: 'tool',\n\t\t\t\t\ttool_call_id: h.id,\n\t\t\t\t\tcontent: h.error || h.content\n\t\t\t\t});\n\t\t\t} else {\n\t\t\t\tresult.push(h);\n\t\t\t}\n\t\t\treturn result;\n\t\t}, [] as any[]);\n\t}\n\n\task(message: string, options: LLMRequest = {}): AbortablePromise<LLMMessage[]> {\n\t\tconst controller = new AbortController();\n\t\tconst response = new Promise<any>(async (res, rej) => {\n\t\t\tlet history = this.fromStandard([...options.history || [], {role: 'user', content: message}]);\n\t\t\tif(options.compress) history = await this.ai.llm.compress(<any>history, options.compress.max, options.compress.min, options);\n\n\t\t\tconst requestParams: any = {\n\t\t\t\tmodel: options.model || this.model,\n\t\t\t\tmessages: history,\n\t\t\t\tstream: !!options.stream,\n\t\t\t\tmax_tokens: options.max_tokens || this.ai.options.max_tokens || 4096,\n\t\t\t\ttemperature: options.temperature || this.ai.options.temperature || 0.7,\n\t\t\t\ttools: (options.tools || this.ai.options.tools || []).map(t => ({\n\t\t\t\t\ttype: 'function',\n\t\t\t\t\tfunction: {\n\t\t\t\t\t\tname: t.name,\n\t\t\t\t\t\tdescription: t.description,\n\t\t\t\t\t\tparameters: {\n\t\t\t\t\t\t\ttype: 'object',\n\t\t\t\t\t\t\tproperties: t.args ? objectMap(t.args, (key, value) => ({...value, required: undefined})) : {},\n\t\t\t\t\t\t\trequired: t.args ? Object.entries(t.args).filter(t => t[1].required).map(t => t[0]) : []\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}))\n\t\t\t};\n\n\t\t\t// Tool call and streaming logic similar to other providers\n\t\t\tlet resp: any;\n\t\t\tdo {\n\t\t\t\tresp = await this.client.chat.completions.create(requestParams);\n\n\t\t\t\t// Implement streaming and tool call handling\n\t\t\t\tif(options.stream) {\n\t\t\t\t\tresp.choices = [];\n\t\t\t\t\tfor await (const chunk of resp) {\n\t\t\t\t\t\tif(controller.signal.aborted) break;\n\t\t\t\t\t\tif(chunk.choices[0].delta.content) {\n\t\t\t\t\t\t\toptions.stream({text: chunk.choices[0].delta.content});\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\t// Run tools\n\t\t\t\tconst toolCalls = resp.choices[0].message.tool_calls || [];\n\t\t\t\tif(toolCalls.length && !controller.signal.aborted) {\n\t\t\t\t\thistory.push(resp.choices[0].message);\n\t\t\t\t\tconst results = await Promise.all(toolCalls.map(async (toolCall: any) => {\n\t\t\t\t\t\tconst tool = options.tools?.find(findByProp('name', toolCall.function.name));\n\t\t\t\t\t\tif(!tool) return {role: 'tool', tool_call_id: toolCall.id, content: '{\"error\": \"Tool not found\"}'};\n\t\t\t\t\t\ttry {\n\t\t\t\t\t\t\tconst args = JSONAttemptParse(toolCall.function.arguments, {});\n\t\t\t\t\t\t\tconst result = await tool.fn(args, this.ai);\n\t\t\t\t\t\t\treturn {role: 'tool', tool_call_id: toolCall.id, content: JSONSanitize(result)};\n\t\t\t\t\t\t} catch (err: any) {\n\t\t\t\t\t\t\treturn {role: 'tool', tool_call_id: toolCall.id, content: JSONSanitize({error: err?.message || err?.toString() || 'Unknown'})};\n\t\t\t\t\t\t}\n\t\t\t\t\t}));\n\t\t\t\t\thistory.push(...results);\n\t\t\t\t\trequestParams.messages = history;\n\t\t\t\t}\n\t\t\t} while (!controller.signal.aborted && resp.choices?.[0]?.message?.tool_calls?.length);\n\n\t\t\tif(options.stream) options.stream({done: true});\n\t\t\tres(this.toStandard([...history, {role: 'assistant', content: resp.choices[0].message.content || ''}]));\n\t\t});\n\n\t\treturn Object.assign(response, {abort: () => controller.abort()});\n\t}\n}\n","import {JSONAttemptParse} from '@ztimson/utils';\nimport {Ai} from './ai.ts';\nimport {Anthropic} from './antrhopic.ts';\nimport {Ollama} from './ollama.ts';\nimport {OpenAi} from './open-ai.ts';\nimport {AbortablePromise, LLMProvider} from './provider.ts';\nimport {AiTool} from './tools.ts';\n\nexport type LLMMessage = {\n\t/** Message originator */\n\trole: 'assistant' | 'system' | 'user';\n\t/** Message content */\n\tcontent: string | any;\n} | {\n\t/** Tool call */\n\trole: 'tool';\n\t/** Unique ID for call */\n\tid: string;\n\t/** Tool that was run */\n\tname: string;\n\t/** Tool arguments */\n\targs: any;\n\t/** Tool result */\n\tcontent: undefined | string;\n\t/** Tool error */\n\terror: undefined | string;\n}\n\nexport type LLMOptions = {\n\t/** Anthropic settings */\n\tanthropic?: {\n\t\t/** API Token */\n\t\ttoken: string;\n\t\t/** Default model */\n\t\tmodel: string;\n\t},\n\t/** Ollama settings */\n\tollama?: {\n\t\t/** connection URL */\n\t\thost: string;\n\t\t/** Default model */\n\t\tmodel: string;\n\t},\n\t/** Open AI settings */\n\topenAi?: {\n\t\t/** API Token */\n\t\ttoken: string;\n\t\t/** Default model */\n\t\tmodel: string;\n\t},\n\t/** Default provider & model */\n\tmodel: string | [string, string];\n} & Omit<LLMRequest, 'model'>;\n\nexport type LLMRequest = {\n\t/** System prompt */\n\tsystem?: string;\n\t/** Message history */\n\thistory?: LLMMessage[];\n\t/** Max tokens for request */\n\tmax_tokens?: number;\n\t/** 0 = Rigid Logic, 1 = Balanced, 2 = Hyper Creative **/\n\ttemperature?: number;\n\t/** Available tools */\n\ttools?: AiTool[];\n\t/** LLM model */\n\tmodel?: string | [string, string];\n\t/** Stream response */\n\tstream?: (chunk: {text?: string, done?: true}) => any;\n\t/** Compress old messages in the chat to free up context */\n\tcompress?: {\n\t\t/** Trigger chat compression once context exceeds the token count */\n\t\tmax: number;\n\t\t/** Compress chat until context size smaller than */\n\t\tmin: number\n\t}\n}\n\nexport class LLM {\n\tprivate providers: {[key: string]: LLMProvider} = {};\n\n\tconstructor(public readonly ai: Ai, public readonly options: LLMOptions) {\n\t\tif(options.anthropic?.token) this.providers.anthropic = new Anthropic(this.ai, options.anthropic.token, options.anthropic.model);\n\t\tif(options.ollama?.host) this.providers.ollama = new Ollama(this.ai, options.ollama.host, options.ollama.model);\n\t\tif(options.openAi?.token) this.providers.openAi = new OpenAi(this.ai, options.openAi.token, options.openAi.model);\n\t}\n\n\t/**\n\t * Chat with LLM\n\t * @param {string} message Question\n\t * @param {LLMRequest} options Configuration options and chat history\n\t * @returns {{abort: () => void, response: Promise<LLMMessage[]>}} Function to abort response and chat history\n\t */\n\task(message: string, options: LLMRequest = {}): AbortablePromise<LLMMessage[]> {\n\t\tlet model: any = [null, null];\n\t\tif(options.model) {\n\t\t\tif(typeof options.model == 'object') model = options.model;\n\t\t\telse model = [options.model, (<any>this.options)[options.model]?.model];\n\t\t}\n\t\tif(!options.model || model[1] == null) {\n\t\t\tif(typeof this.options.model == 'object') model = this.options.model;\n\t\t\telse model = [this.options.model, (<any>this.options)[this.options.model]?.model];\n\t\t}\n\t\tif(!model[0] || !model[1]) throw new Error(`Unknown LLM provider or model: ${model[0]} / ${model[1]}`);\n\t\treturn this.providers[model[0]].ask(message, {...options, model: model[1]});\n\t}\n\n\t/**\n\t * Compress chat history to reduce context size\n\t * @param {LLMMessage[]} history Chatlog that will be compressed\n\t * @param max Trigger compression once context is larger than max\n\t * @param min Summarize until context size is less than min\n\t * @param {LLMRequest} options LLM options\n\t * @returns {Promise<LLMMessage[]>} New chat history will summary at index 0\n\t */\n\tasync compress(history: LLMMessage[], max: number, min: number, options?: LLMRequest): Promise<LLMMessage[]> {\n\t\tif(this.estimateTokens(history) < max) return history;\n\t\tlet keep = 0, tokens = 0;\n\t\tfor(let m of history.toReversed()) {\n\t\t\ttokens += this.estimateTokens(m.content);\n\t\t\tif(tokens < min) keep++;\n\t\t\telse break;\n\t\t}\n\t\tif(history.length <= keep) return history;\n\t\tconst recent = keep == 0 ? [] : history.slice(-keep),\n\t\t\tprocess = (keep == 0 ? history : history.slice(0, -keep)).filter(h => h.role === 'assistant' || h.role === 'user');\n\t\tconst summary = await this.summarize(process.map(m => `${m.role}: ${m.content}`).join('\\n\\n'), 250, options);\n\t\treturn [{role: 'assistant', content: `Conversation Summary: ${summary}`}, ...recent];\n\t}\n\n\t/**\n\t * Estimate variable as tokens\n\t * @param history Object to size\n\t * @returns {number} Rough token count\n\t */\n\testimateTokens(history: any): number {\n\t\tconst text = JSON.stringify(history);\n\t\treturn Math.ceil((text.length / 4) * 1.2);\n\t}\n\n\t/**\n\t * Ask a question with JSON response\n\t * @param {string} message Question\n\t * @param {LLMRequest} options Configuration options and chat history\n\t * @returns {Promise<{} | {} | RegExpExecArray | null>}\n\t */\n\tasync json(message: string, options?: LLMRequest) {\n\t\tlet resp = await this.ask(message, {\n\t\t\tsystem: 'Respond using a JSON blob',\n\t\t\t...options\n\t\t});\n\t\tif(!resp?.[0]?.content) return {};\n\t\treturn JSONAttemptParse(new RegExp('\\{[\\s\\S]*\\}').exec(resp[0].content), {});\n\t}\n\n\t/**\n\t * Create a summary of some text\n\t * @param {string} text Text to summarize\n\t * @param {number} tokens Max number of tokens\n\t * @param options LLM request options\n\t * @returns {Promise<string>} Summary\n\t */\n\tsummarize(text: string, tokens: number, options?: LLMRequest): Promise<string | null> {\n\t\treturn this.ask(text, {system: `Generate a brief summary <= ${tokens} tokens. Output nothing else`, temperature: 0.3, ...options})\n\t\t\t.then(history => <string>history.pop()?.content || null);\n\t}\n}\n","import {createWorker} from 'tesseract.js';\nimport {LLM, LLMOptions} from './llm';\nimport fs from 'node:fs/promises';\nimport Path from 'node:path';\nimport * as tf from '@tensorflow/tfjs';\nimport {spawn} from 'node:child_process';\n\nexport type AiOptions = LLMOptions & {\n\twhisper?: {\n\t\t/** Whisper binary location */\n\t\tbinary: string;\n\t\t/** Model: `ggml-base.en.bin` */\n\t\tmodel: string;\n\t\t/** Path to models */\n\t\tpath: string;\n\t}\n}\n\nexport class Ai {\n\tprivate downloads: {[key: string]: Promise<string>} = {};\n\tprivate whisperModel!: string;\n\n\t/** Large Language Models */\n\tllm!: LLM;\n\n\tconstructor(public readonly options: AiOptions) {\n\t\tthis.llm = new LLM(this, options);\n\t\tif(this.options.whisper?.binary) {\n\t\t\tthis.whisperModel = this.options.whisper?.model.endsWith('.bin') ? this.options.whisper?.model : this.options.whisper?.model + '.bin';\n\t\t\tthis.downloadAsrModel();\n\t\t}\n\t}\n\n\t/**\n\t * Convert audio to text using Auditory Speech Recognition\n\t * @param {string} path Path to audio\n\t * @param model Whisper model\n\t * @returns {Promise<any>} Extracted text\n\t */\n\tasr(path: string, model: string = this.whisperModel): {abort: () => void, response: Promise<string | null>} {\n\t\tif(!this.options.whisper?.binary) throw new Error('Whisper not configured');\n\t\tlet abort: any = () => {};\n\t\tconst response = new Promise<string | null>((resolve, reject) => {\n\t\t\tthis.downloadAsrModel(model).then(m => {\n\t\t\t\tlet output = '';\n\t\t\t\tconst proc = spawn(<string>this.options.whisper?.binary, ['-nt', '-np', '-m', m, '-f', path], {stdio: ['ignore', 'pipe', 'ignore']});\n\t\t\t\tabort = () => proc.kill('SIGTERM');\n\t\t\t\tproc.on('error', (err: Error) => reject(err));\n\t\t\t\tproc.stdout.on('data', (data: Buffer) => output += data.toString());\n\t\t\t\tproc.on('close', (code: number) => {\n\t\t\t\t\tif(code === 0) resolve(output.trim() || null);\n\t\t\t\t\telse reject(new Error(`Exit code ${code}`));\n\t\t\t\t});\n\t\t\t});\n\t\t});\n\t\treturn {response, abort};\n\t}\n\n\t/**\n\t * Downloads the specified Whisper model if it is not already present locally.\n\t *\n\t * @param {string} model Whisper model that will be downloaded\n\t * @return {Promise<string>} Absolute path to model file, resolves once downloaded\n\t */\n\tasync downloadAsrModel(model: string = this.whisperModel): Promise<string> {\n\t\tif(!this.options.whisper?.binary) throw new Error('Whisper not configured');\n\t\tif(!model.endsWith('.bin')) model += '.bin';\n\t\tconst p = Path.join(this.options.whisper.path, model);\n\t\tif(await fs.stat(p).then(() => true).catch(() => false)) return p;\n\t\tif(!!this.downloads[model]) return this.downloads[model];\n\t\tthis.downloads[model] = fetch(`https://huggingface.co/ggerganov/whisper.cpp/resolve/main/${model}`)\n\t\t\t.then(resp => resp.arrayBuffer())\n\t\t\t.then(arr => Buffer.from(arr)).then(async buffer => {\n\t\t\t\tawait fs.writeFile(p, buffer);\n\t\t\t\tdelete this.downloads[model];\n\t\t\t\treturn p;\n\t\t\t});\n\t\treturn this.downloads[model];\n\t}\n\n\t/**\n\t * Convert image to text using Optical Character Recognition\n\t * @param {string} path Path to image\n\t * @returns {{abort: Function, response: Promise<string | null>}} Abort function & Promise of extracted text\n\t */\n\tocr(path: string): {abort: () => void, response: Promise<string | null>} {\n\t\tlet worker: any;\n\t\treturn {\n\t\t\tabort: () => { worker?.terminate(); },\n\t\t\tresponse: new Promise(async res => {\n\t\t\t\tworker = await createWorker('eng');\n\t\t\t\tconst {data} = await worker.recognize(path);\n\t\t\t\tawait worker.terminate();\n\t\t\t\tres(data.text.trim() || null);\n\t\t\t})\n\t\t}\n\t}\n\n\t/**\n\t * Compare the difference between two strings using tensor math\n\t * @param target Text that will checked\n\t * @param {string} searchTerms Multiple search terms to check against target\n\t * @returns {{avg: number, max: number, similarities: number[]}} Similarity values 0-1: 0 = unique, 1 = identical\n\t */\n\tsemanticSimilarity(target: string, ...searchTerms: string[]) {\n\t\tif(searchTerms.length < 2) throw new Error('Requires at least 2 strings to compare');\n\n\t\tconst vector = (text: string, dimensions: number = 10): number[] => {\n\t\t\treturn text.toLowerCase().split('').map((char, index) =>\n\t\t\t\t(char.charCodeAt(0) * (index + 1)) % dimensions / dimensions).slice(0, dimensions);\n\t\t}\n\n\t\tconst cosineSimilarity = (v1: number[], v2: number[]): number => {\n\t\t\tif (v1.length !== v2.length) throw new Error('Vectors must be same length');\n\t\t\tconst tensor1 = tf.tensor1d(v1), tensor2 = tf.tensor1d(v2)\n\t\t\tconst dotProduct = tf.dot(tensor1, tensor2)\n\t\t\tconst magnitude1 = tf.norm(tensor1)\n\t\t\tconst magnitude2 = tf.norm(tensor2)\n\t\t\tif(magnitude1.dataSync()[0] === 0 || magnitude2.dataSync()[0] === 0) return 0\n\t\t\treturn dotProduct.dataSync()[0] / (magnitude1.dataSync()[0] * magnitude2.dataSync()[0])\n\t\t}\n\n\t\tconst v = vector(target);\n\t\tconst similarities = searchTerms.map(t => vector(t)).map(refVector => cosineSimilarity(v, refVector))\n\t\treturn {avg: similarities.reduce((acc, s) => acc + s, 0) / similarities.length, max: Math.max(...similarities), similarities}\n\t}\n}\n","import {$, $Sync} from '@ztimson/node-utils';\nimport {ASet, consoleInterceptor, Http, fn as Fn} from '@ztimson/utils';\nimport {Ai} from './ai.ts';\n\nexport type AiToolArg = {[key: string]: {\n\t/** Argument type */\n\ttype: 'array' | 'boolean' | 'number' | 'object' | 'string',\n\t/** Argument description */\n\tdescription: string,\n\t/** Required argument */\n\trequired?: boolean;\n\t/** Default value */\n\tdefault?: any,\n\t/** Options */\n\tenum?: string[],\n\t/** Minimum value or length */\n\tmin?: number,\n\t/** Maximum value or length */\n\tmax?: number,\n\t/** Match pattern */\n\tpattern?: string,\n\t/** Child arguments */\n\titems?: {[key: string]: AiToolArg}\n}}\n\nexport type AiTool = {\n\t/** Tool ID / Name - Must be snail_case */\n\tname: string,\n\t/** Tool description / prompt */\n\tdescription: string,\n\t/** Tool arguments */\n\targs?: AiToolArg,\n\t/** Callback function */\n\tfn: (args: any, ai: Ai) => any | Promise<any>,\n};\n\nexport const CliTool: AiTool = {\n\tname: 'cli',\n\tdescription: 'Use the command line interface, returns any output',\n\targs: {command: {type: 'string', description: 'Command to run', required: true}},\n\tfn: (args: {command: string}) => $`${args.command}`\n}\n\nexport const DateTimeTool: AiTool = {\n\tname: 'get_datetime',\n\tdescription: 'Get current date and time',\n\targs: {},\n\tfn: async () => new Date().toISOString()\n}\n\nexport const ExecTool: AiTool = {\n\tname: 'exec',\n\tdescription: 'Run code/scripts',\n\targs: {\n\t\tlanguage: {type: 'string', description: 'Execution language', enum: ['cli', 'node', 'python'], required: true},\n\t\tcode: {type: 'string', description: 'Code to execute', required: true}\n\t},\n\tfn: async (args, ai) => {\n\t\ttry {\n\t\t\tswitch(args.type) {\n\t\t\t\tcase 'bash':\n\t\t\t\t\treturn await CliTool.fn({command: args.code}, ai);\n\t\t\t\tcase 'node':\n\t\t\t\t\treturn await JSTool.fn({code: args.code}, ai);\n\t\t\t\tcase 'python': {\n\t\t\t\t\treturn await PythonTool.fn({code: args.code}, ai);\n\t\t\t\t}\n\t\t\t}\n\t\t} catch(err: any) {\n\t\t\treturn {error: err?.message || err.toString()};\n\t\t}\n\t}\n}\n\nexport const FetchTool: AiTool = {\n\tname: 'fetch',\n\tdescription: 'Make HTTP request to URL',\n\targs: {\n\t\turl: {type: 'string', description: 'URL to fetch', required: true},\n\t\tmethod: {type: 'string', description: 'HTTP method to use', enum: ['GET', 'POST', 'PUT', 'DELETE'], default: 'GET'},\n\t\theaders: {type: 'object', description: 'HTTP headers to send', default: {}},\n\t\tbody: {type: 'object', description: 'HTTP body to send'},\n\t},\n\tfn: (args: {\n\t\turl: string;\n\t\tmethod: 'GET' | 'POST' | 'PUT' | 'DELETE';\n\t\theaders: {[key: string]: string};\n\t\tbody: any;\n\t}) => new Http({url: args.url, headers: args.headers}).request({method: args.method || 'GET', body: args.body})\n}\n\nexport const JSTool: AiTool = {\n\tname: 'exec_javascript',\n\tdescription: 'Execute commonjs javascript',\n\targs: {\n\t\tcode: {type: 'string', description: 'CommonJS javascript', required: true}\n\t},\n\tfn: async (args: {code: string}) => {\n\t\tconst console = consoleInterceptor(null);\n\t\tconst resp = await Fn<any>({console}, args.code, true).catch((err: any) => console.output.error.push(err));\n\t\treturn {...console.output, return: resp, stdout: undefined, stderr: undefined};\n\t}\n}\n\nexport const PythonTool: AiTool = {\n\tname: 'exec_javascript',\n\tdescription: 'Execute commonjs javascript',\n\targs: {\n\t\tcode: {type: 'string', description: 'CommonJS javascript', required: true}\n\t},\n\tfn: async (args: {code: string}) => ({result: $Sync`python -c \"${args.code}\"`})\n}\n\nexport const SearchTool: AiTool = {\n\tname: 'search',\n\tdescription: 'Use a search engine to find relevant URLs, should be changed with fetch to scrape sources',\n\targs: {\n\t\tquery: {type: 'string', description: 'Search string', required: true},\n\t\tlength: {type: 'string', description: 'Number of results to return', default: 5},\n\t},\n\tfn: async (args: {\n\t\tquery: string;\n\t\tlength: number;\n\t}) => {\n\t\tconst html = await fetch(`https://html.duckduckgo.com/html/?q=${encodeURIComponent(args.query)}`, {\n\t\t\theaders: {\"User-Agent\": \"Mozilla/5.0 (Windows NT 10.0; Win64; x64)\", \"Accept-Language\": \"en-US,en;q=0.9\"}\n\t\t}).then(resp => resp.text());\n\t\tlet match, regex = /<a .*?href=\"(.+?)\".+?<\\/a>/g;\n\t\tconst results = new ASet<string>();\n\t\twhile((match = regex.exec(html)) !== null) {\n\t\t\tlet url = /uddg=(.+)&?/.exec(decodeURIComponent(match[1]))?.[1];\n\t\t\tif(url) url = decodeURIComponent(url);\n\t\t\tif(url) results.add(url);\n\t\t\tif(results.size >= (args.length || 5)) break;\n\t\t}\n\t\treturn results;\n\t}\n}\n"],"names":["LLMProvider","Anthropic","ai","apiToken","model","anthropic","history","i","orgI","c","h","message","options","controller","response","res","rej","requestParams","t","objectMap","key","value","resp","chunk","text","last","JSONAttemptParse","toolCalls","results","toolCall","tool","findByProp","result","JSONSanitize","err","Ollama","host","ollama","error","system","args","OpenAi","openAI","tools","tc","record","h2","LLM","max","min","keep","tokens","m","recent","process","Ai","path","abort","resolve","reject","output","proc","spawn","data","code","p","Path","fs","arr","buffer","worker","createWorker","target","searchTerms","vector","dimensions","char","index","cosineSimilarity","v1","v2","tensor1","tf","tensor2","dotProduct","magnitude1","magnitude2","v","similarities","refVector","acc","s","CliTool","$","DateTimeTool","ExecTool","JSTool","PythonTool","FetchTool","Http","console","consoleInterceptor","Fn","$Sync","SearchTool","html","match","regex","ASet","url"],"mappings":"6gCAIO,MAAeA,CAAY,CAElC,CCAO,MAAMC,UAAkBD,CAAY,CAG1C,YAA4BE,EAAwBC,EAAyBC,EAAe,CAC3F,MAAA,EAD2B,KAAA,GAAAF,EAAwB,KAAA,SAAAC,EAAyB,KAAA,MAAAC,EAE5E,KAAK,OAAS,IAAIC,EAAAA,UAAU,CAAC,OAAQF,EAAS,CAC/C,CALA,OAOQ,WAAWG,EAA8B,CAChD,QAAQC,EAAI,EAAGA,EAAID,EAAQ,OAAQC,IAAK,CACvC,MAAMC,EAAOD,EACV,OAAOD,EAAQE,CAAI,EAAE,SAAW,WAC/BF,EAAQE,CAAI,EAAE,MAAQ,YACxBF,EAAQE,CAAI,EAAE,QAAQ,OAAQC,GAAWA,EAAE,MAAO,UAAU,EAAE,QAASA,GAAW,CACjFF,IACAD,EAAQ,OAAOC,EAAG,EAAG,CAAC,KAAM,OAAQ,GAAIE,EAAE,GAAI,KAAMA,EAAE,KAAM,KAAMA,EAAE,MAAM,CAC3E,CAAC,EACQH,EAAQE,CAAI,EAAE,MAAQ,QAC/BF,EAAQE,CAAI,EAAE,QAAQ,OAAQC,GAAWA,EAAE,MAAO,aAAa,EAAE,QAASA,GAAW,CACpF,MAAMC,EAAIJ,EAAQ,KAAMI,GAAWA,EAAE,IAAMD,EAAE,WAAW,EACxDC,EAAED,EAAE,SAAW,QAAU,SAAS,EAAIA,EAAE,OACzC,CAAC,EAEFH,EAAQE,CAAI,EAAE,QAAUF,EAAQE,CAAI,EAAE,QAAQ,OAAQC,GAAWA,EAAE,MAAQ,MAAM,EAAE,IAAKA,GAAWA,EAAE,IAAI,EAAE,KAAK;AAAA;AAAA,CAAM,EAExH,CACA,OAAOH,EAAQ,OAAOI,GAAK,CAAC,CAACA,EAAE,OAAO,CACvC,CAEQ,aAAaJ,EAA8B,CAClD,QAAQC,EAAI,EAAGA,EAAID,EAAQ,OAAQC,IAClC,GAAGD,EAAQC,CAAC,EAAE,MAAQ,OAAQ,CAC7B,MAAMG,EAASJ,EAAQC,CAAC,EACxBD,EAAQ,OAAOC,EAAG,EACjB,CAAC,KAAM,YAAa,QAAS,CAAC,CAAC,KAAM,WAAY,GAAIG,EAAE,GAAI,KAAMA,EAAE,KAAM,MAAOA,EAAE,IAAA,CAAK,CAAA,EACvF,CAAC,KAAM,OAAQ,QAAS,CAAC,CAAC,KAAM,cAAe,YAAaA,EAAE,GAAI,SAAU,CAAC,CAACA,EAAE,MAAO,QAAUA,EAAE,OAASA,EAAE,QAAQ,CAAA,CAAC,EAExHH,GACD,CAED,OAAOD,CACR,CAEA,IAAIK,EAAiBC,EAAsB,GAAoC,CAC9E,MAAMC,EAAa,IAAI,gBACjBC,EAAW,IAAI,QAAa,MAAOC,EAAKC,IAAQ,CACrD,IAAIV,EAAU,KAAK,aAAa,CAAC,GAAGM,EAAQ,SAAW,CAAA,EAAI,CAAC,KAAM,OAAQ,QAASD,CAAA,CAAQ,CAAC,EACzFC,EAAQ,WAAUN,EAAU,MAAM,KAAK,GAAG,IAAI,SAAcA,EAASM,EAAQ,SAAS,IAAKA,EAAQ,SAAS,IAAKA,CAAO,GAC3H,MAAMK,EAAqB,CAC1B,MAAOL,EAAQ,OAAS,KAAK,MAC7B,WAAYA,EAAQ,YAAc,KAAK,GAAG,QAAQ,YAAc,KAChE,OAAQA,EAAQ,QAAU,KAAK,GAAG,QAAQ,QAAU,GACpD,YAAaA,EAAQ,aAAe,KAAK,GAAG,QAAQ,aAAe,GACnE,OAAQA,EAAQ,OAAS,KAAK,GAAG,QAAQ,OAAS,CAAA,GAAI,IAAIM,IAAM,CAC/D,KAAMA,EAAE,KACR,YAAaA,EAAE,YACf,aAAc,CACb,KAAM,SACN,WAAYA,EAAE,KAAOC,EAAAA,UAAUD,EAAE,KAAM,CAACE,EAAKC,KAAW,CAAC,GAAGA,EAAO,SAAU,MAAA,EAAW,EAAI,CAAA,EAC5F,SAAUH,EAAE,KAAO,OAAO,QAAQA,EAAE,IAAI,EAAE,OAAOA,GAAKA,EAAE,CAAC,EAAE,QAAQ,EAAE,IAAIA,GAAKA,EAAE,CAAC,CAAC,EAAI,CAAA,CAAC,EAExF,GAAI,MAAA,EACH,EACF,SAAUZ,EACV,OAAQ,CAAC,CAACM,EAAQ,MAAA,EAInB,IAAIU,EACJ,EAAG,CAIF,GAHAA,EAAO,MAAM,KAAK,OAAO,SAAS,OAAOL,CAAa,EAGnDL,EAAQ,OAAQ,CAClBU,EAAK,QAAU,CAAA,EACf,gBAAiBC,KAASD,EAAM,CAC/B,GAAGT,EAAW,OAAO,QAAS,MAC9B,GAAGU,EAAM,OAAS,sBACdA,EAAM,cAAc,OAAS,OAC/BD,EAAK,QAAQ,KAAK,CAAC,KAAM,OAAQ,KAAM,GAAG,EACjCC,EAAM,cAAc,OAAS,YACtCD,EAAK,QAAQ,KAAK,CAAC,KAAM,WAAY,GAAIC,EAAM,cAAc,GAAI,KAAMA,EAAM,cAAc,KAAM,MAAY,GAAG,UAExGA,EAAM,OAAS,sBACxB,GAAGA,EAAM,MAAM,OAAS,aAAc,CACrC,MAAMC,EAAOD,EAAM,MAAM,KACzBD,EAAK,QAAQ,GAAG,EAAE,EAAE,MAAQE,EAC5BZ,EAAQ,OAAO,CAAC,KAAAY,EAAK,CACtB,MAAUD,EAAM,MAAM,OAAS,qBAC9BD,EAAK,QAAQ,GAAG,EAAE,EAAE,OAASC,EAAM,MAAM,sBAEjCA,EAAM,OAAS,qBAAsB,CAC9C,MAAME,EAAOH,EAAK,QAAQ,GAAG,EAAE,EAC5BG,EAAK,OAAS,OAAMA,EAAK,MAAQA,EAAK,MAAQC,EAAAA,iBAAiBD,EAAK,MAAO,CAAA,CAAE,EAAI,CAAA,EACrF,SAAUF,EAAM,OAAS,eACxB,KAEF,CACD,CAGA,MAAMI,EAAYL,EAAK,QAAQ,OAAQb,GAAWA,EAAE,OAAS,UAAU,EACvE,GAAGkB,EAAU,QAAU,CAACd,EAAW,OAAO,QAAS,CAClDP,EAAQ,KAAK,CAAC,KAAM,YAAa,QAASgB,EAAK,QAAQ,EACvD,MAAMM,EAAU,MAAM,QAAQ,IAAID,EAAU,IAAI,MAAOE,GAAkB,CACxE,MAAMC,EAAOlB,EAAQ,OAAO,KAAKmB,EAAAA,WAAW,OAAQF,EAAS,IAAI,CAAC,EAClE,GAAG,CAACC,EAAM,MAAO,CAAC,YAAaD,EAAS,GAAI,SAAU,GAAM,QAAS,gBAAA,EACrE,GAAI,CACH,MAAMG,EAAS,MAAMF,EAAK,GAAGD,EAAS,MAAO,KAAK,EAAE,EACpD,MAAO,CAAC,KAAM,cAAe,YAAaA,EAAS,GAAI,QAASI,eAAaD,CAAM,CAAA,CACpF,OAASE,EAAU,CAClB,MAAO,CAAC,KAAM,cAAe,YAAaL,EAAS,GAAI,SAAU,GAAM,QAASK,GAAK,SAAWA,GAAK,SAAA,GAAc,SAAA,CACpH,CACD,CAAC,CAAC,EACF5B,EAAQ,KAAK,CAAC,KAAM,OAAQ,QAASsB,EAAQ,EAC7CX,EAAc,SAAWX,CAC1B,CACD,OAAS,CAACO,EAAW,OAAO,SAAWS,EAAK,QAAQ,KAAMb,GAAWA,EAAE,OAAS,UAAU,GACvFG,EAAQ,QAAQA,EAAQ,OAAO,CAAC,KAAM,GAAK,EAC9CG,EAAI,KAAK,WAAW,CAAC,GAAGT,EAAS,CAChC,KAAM,YACN,QAASgB,EAAK,QAAQ,OAAQb,GAAWA,EAAE,MAAQ,MAAM,EAAE,IAAKA,GAAWA,EAAE,IAAI,EAAE,KAAK;AAAA;AAAA,CAAM,CAAA,CAC9F,CAAC,CAAC,CACJ,CAAC,EACD,OAAO,OAAO,OAAOK,EAAU,CAAC,MAAO,IAAMD,EAAW,MAAA,EAAQ,CACjE,CACD,CC9HO,MAAMsB,UAAenC,CAAY,CAGvC,YAA4BE,EAAekC,EAAqBhC,EAAe,CAC9E,MAAA,EAD2B,KAAA,GAAAF,EAAe,KAAA,KAAAkC,EAAqB,KAAA,MAAAhC,EAE/D,KAAK,OAAS,IAAIiC,SAAO,CAAC,KAAAD,EAAK,CAChC,CALA,OAOQ,WAAW9B,EAA8B,CAChD,QAAQC,EAAI,EAAGA,EAAID,EAAQ,OAAQC,IAClC,GAAGD,EAAQC,CAAC,EAAE,MAAQ,aAAeD,EAAQC,CAAC,EAAE,WAC5CD,EAAQC,CAAC,EAAE,QAAS,OAAOD,EAAQC,CAAC,EAAE,YAExCD,EAAQ,OAAOC,EAAG,CAAC,EACnBA,aAEQD,EAAQC,CAAC,EAAE,MAAQ,OAAQ,CACpC,MAAM+B,EAAQhC,EAAQC,CAAC,EAAE,QAAQ,WAAW,WAAW,EACvDD,EAAQC,CAAC,EAAI,CAAC,KAAM,OAAQ,KAAMD,EAAQC,CAAC,EAAE,UAAW,KAAMD,EAAQC,CAAC,EAAE,KAAM,CAAC+B,EAAQ,QAAU,SAAS,EAAGhC,EAAQC,CAAC,EAAE,OAAA,CAC1H,CAED,OAAOD,CACR,CAEQ,aAAaA,EAA8B,CAClD,OAAOA,EAAQ,IAAKI,GAChBA,EAAE,MAAQ,OAAeA,EACrB,CAAC,KAAM,OAAQ,UAAWA,EAAE,KAAM,QAASA,EAAE,OAASA,EAAE,OAAA,CAC/D,CACF,CAEA,IAAIC,EAAiBC,EAAsB,GAAoC,CAC9E,MAAMC,EAAa,IAAI,gBACjBC,EAAW,IAAI,QAAa,MAAOC,EAAKC,IAAQ,CACrD,IAAIuB,EAAS3B,EAAQ,QAAU,KAAK,GAAG,QAAQ,OAC3CN,EAAU,KAAK,aAAa,CAAC,GAAGM,EAAQ,SAAW,CAAA,EAAI,CAAC,KAAM,OAAQ,QAASD,CAAA,CAAQ,CAAC,EACzFL,EAAQ,CAAC,EAAE,MAAQ,WACjBiC,IACS,MAAA,EADDA,EAASjC,EAAQ,MAAA,GAG3BM,EAAQ,WAAUN,EAAU,MAAM,KAAK,GAAG,IAAI,SAAcA,EAASM,EAAQ,SAAS,IAAKA,EAAQ,SAAS,GAAG,GAC/GA,EAAQ,QAAQN,EAAQ,QAAQ,CAAC,KAAM,SAAU,QAASiC,EAAO,EAEpE,MAAMtB,EAAqB,CAC1B,MAAOL,EAAQ,OAAS,KAAK,MAC7B,SAAUN,EACV,OAAQ,CAAC,CAACM,EAAQ,OAClB,OAAQC,EAAW,OACnB,QAAS,CACR,YAAaD,EAAQ,aAAe,KAAK,GAAG,QAAQ,aAAe,GACnE,YAAaA,EAAQ,YAAc,KAAK,GAAG,QAAQ,YAAc,IAAA,EAElE,OAAQA,EAAQ,OAAS,KAAK,GAAG,QAAQ,OAAS,CAAA,GAAI,IAAIM,IAAM,CAC/D,KAAM,WACN,SAAU,CACT,KAAMA,EAAE,KACR,YAAaA,EAAE,YACf,WAAY,CACX,KAAM,SACN,WAAYA,EAAE,KAAOC,EAAAA,UAAUD,EAAE,KAAM,CAACE,EAAKC,KAAW,CAAC,GAAGA,EAAO,SAAU,MAAA,EAAW,EAAI,CAAA,EAC5F,SAAUH,EAAE,KAAO,OAAO,QAAQA,EAAE,IAAI,EAAE,OAAOA,GAAKA,EAAE,CAAC,EAAE,QAAQ,EAAE,IAAIA,GAAKA,EAAE,CAAC,CAAC,EAAI,CAAA,CAAC,CACxF,CACD,EACC,CAAA,EAIH,IAAII,EACJ,EAAG,CAEF,GADAA,EAAO,MAAM,KAAK,OAAO,KAAKL,CAAa,EACxCL,EAAQ,OAAQ,CAClBU,EAAK,QAAU,CAAC,KAAM,YAAa,QAAS,GAAI,WAAY,EAAC,EAC7D,gBAAiBC,KAASD,EAOzB,GANGT,EAAW,OAAO,UAClBU,EAAM,SAAS,UACjBD,EAAK,QAAQ,SAAWC,EAAM,QAAQ,QACtCX,EAAQ,OAAO,CAAC,KAAMW,EAAM,QAAQ,QAAQ,GAE1CA,EAAM,SAAS,eAAiB,QAAQ,WAAaA,EAAM,QAAQ,YACnEA,EAAM,MAAM,KAEjB,CAGA,GAAGD,EAAK,SAAS,YAAY,QAAU,CAACT,EAAW,OAAO,QAAS,CAClEP,EAAQ,KAAKgB,EAAK,OAAO,EACzB,MAAMM,EAAU,MAAM,QAAQ,IAAIN,EAAK,QAAQ,WAAW,IAAI,MAAOO,GAAkB,CACtF,MAAMC,GAAQlB,EAAQ,OAAS,KAAK,GAAG,QAAQ,QAAQ,KAAKmB,EAAAA,WAAW,OAAQF,EAAS,SAAS,IAAI,CAAC,EACtG,GAAG,CAACC,EAAM,MAAO,CAAC,KAAM,OAAQ,UAAWD,EAAS,SAAS,KAAM,QAAS,6BAAA,EAC5E,MAAMW,EAAO,OAAOX,EAAS,SAAS,WAAc,SAAWH,EAAAA,iBAAiBG,EAAS,SAAS,UAAW,CAAA,CAAE,EAAIA,EAAS,SAAS,UACrI,GAAI,CACH,MAAMG,EAAS,MAAMF,EAAK,GAAGU,EAAM,KAAK,EAAE,EAC1C,MAAO,CAAC,KAAM,OAAQ,UAAWX,EAAS,SAAS,KAAM,KAAAW,EAAM,QAASP,EAAAA,aAAaD,CAAM,CAAA,CAC5F,OAASE,EAAU,CAClB,MAAO,CAAC,KAAM,OAAQ,UAAWL,EAAS,SAAS,KAAM,KAAAW,EAAM,QAASP,EAAAA,aAAa,CAAC,MAAOC,GAAK,SAAWA,GAAK,YAAc,SAAA,CAAU,CAAA,CAC3I,CACD,CAAC,CAAC,EACF5B,EAAQ,KAAK,GAAGsB,CAAO,EACvBX,EAAc,SAAWX,CAC1B,CACD,OAAS,CAACO,EAAW,OAAO,SAAWS,EAAK,SAAS,YAAY,QAC9DV,EAAQ,QAAQA,EAAQ,OAAO,CAAC,KAAM,GAAK,EAC9CG,EAAI,KAAK,WAAW,CAAC,GAAGT,EAAS,CAAC,KAAM,YAAa,QAASgB,EAAK,SAAS,OAAA,CAAQ,CAAC,CAAC,CACvF,CAAC,EACD,OAAO,OAAO,OAAOR,EAAU,CAAC,MAAO,IAAMD,EAAW,MAAA,EAAQ,CACjE,CACD,CC1GO,MAAM4B,UAAezC,CAAY,CAGvC,YAA4BE,EAAwBC,EAAyBC,EAAe,CAC3F,MAAA,EAD2B,KAAA,GAAAF,EAAwB,KAAA,SAAAC,EAAyB,KAAA,MAAAC,EAE5E,KAAK,OAAS,IAAIsC,EAAAA,OAAO,CAAC,OAAQvC,EAAS,CAC5C,CALA,OAOQ,WAAWG,EAA8B,CAChD,QAAQC,EAAI,EAAGA,EAAID,EAAQ,OAAQC,IAAK,CACvC,MAAMG,EAAIJ,EAAQC,CAAC,EACnB,GAAGG,EAAE,OAAS,aAAeA,EAAE,WAAY,CAC1C,MAAMiC,EAAQjC,EAAE,WAAW,IAAKkC,IAAa,CAC5C,KAAM,OACN,GAAIA,EAAG,GACP,KAAMA,EAAG,SAAS,KAClB,KAAMlB,EAAAA,iBAAiBkB,EAAG,SAAS,UAAW,CAAA,CAAE,CAAA,EAC/C,EACFtC,EAAQ,OAAOC,EAAG,EAAG,GAAGoC,CAAK,EAC7BpC,GAAKoC,EAAM,OAAS,CACrB,SAAUjC,EAAE,OAAS,QAAUA,EAAE,QAAS,CACzC,MAAMmC,EAASvC,EAAQ,QAAWI,EAAE,cAAgBoC,EAAG,EAAE,EACtDD,IACCnC,EAAE,QAAQ,SAAS,UAAU,EAAGmC,EAAO,MAAQnC,EAAE,QAC/CmC,EAAO,QAAUnC,EAAE,SAEzBJ,EAAQ,OAAOC,EAAG,CAAC,EACnBA,GACD,CAED,CACA,OAAOD,CACR,CAEQ,aAAaA,EAA8B,CAClD,OAAOA,EAAQ,OAAO,CAAC0B,EAAQtB,KAC3BA,EAAE,OAAS,OACbsB,EAAO,KAAK,CACX,KAAM,YACN,QAAS,KACT,WAAY,CAAC,CAAE,GAAItB,EAAE,GAAI,KAAM,WAAY,SAAU,CAAE,KAAMA,EAAE,KAAM,UAAW,KAAK,UAAUA,EAAE,IAAI,CAAA,EAAK,EAC1G,QAAS,KACT,YAAa,CAAA,CAAC,EACZ,CACF,KAAM,OACN,aAAcA,EAAE,GAChB,QAASA,EAAE,OAASA,EAAE,OAAA,CACtB,EAEDsB,EAAO,KAAKtB,CAAC,EAEPsB,GACL,CAAA,CAAW,CACf,CAEA,IAAIrB,EAAiBC,EAAsB,GAAoC,CAC9E,MAAMC,EAAa,IAAI,gBACjBC,EAAW,IAAI,QAAa,MAAOC,EAAKC,IAAQ,CACrD,IAAIV,EAAU,KAAK,aAAa,CAAC,GAAGM,EAAQ,SAAW,CAAA,EAAI,CAAC,KAAM,OAAQ,QAASD,CAAA,CAAQ,CAAC,EACzFC,EAAQ,WAAUN,EAAU,MAAM,KAAK,GAAG,IAAI,SAAcA,EAASM,EAAQ,SAAS,IAAKA,EAAQ,SAAS,IAAKA,CAAO,GAE3H,MAAMK,EAAqB,CAC1B,MAAOL,EAAQ,OAAS,KAAK,MAC7B,SAAUN,EACV,OAAQ,CAAC,CAACM,EAAQ,OAClB,WAAYA,EAAQ,YAAc,KAAK,GAAG,QAAQ,YAAc,KAChE,YAAaA,EAAQ,aAAe,KAAK,GAAG,QAAQ,aAAe,GACnE,OAAQA,EAAQ,OAAS,KAAK,GAAG,QAAQ,OAAS,CAAA,GAAI,IAAIM,IAAM,CAC/D,KAAM,WACN,SAAU,CACT,KAAMA,EAAE,KACR,YAAaA,EAAE,YACf,WAAY,CACX,KAAM,SACN,WAAYA,EAAE,KAAOC,EAAAA,UAAUD,EAAE,KAAM,CAACE,EAAKC,KAAW,CAAC,GAAGA,EAAO,SAAU,MAAA,EAAW,EAAI,CAAA,EAC5F,SAAUH,EAAE,KAAO,OAAO,QAAQA,EAAE,IAAI,EAAE,OAAOA,GAAKA,EAAE,CAAC,EAAE,QAAQ,EAAE,IAAIA,GAAKA,EAAE,CAAC,CAAC,EAAI,CAAA,CAAC,CACxF,CACD,EACC,CAAA,EAIH,IAAII,EACJ,EAAG,CAIF,GAHAA,EAAO,MAAM,KAAK,OAAO,KAAK,YAAY,OAAOL,CAAa,EAG3DL,EAAQ,OAAQ,CAClBU,EAAK,QAAU,CAAA,EACf,gBAAiBC,KAASD,EAAM,CAC/B,GAAGT,EAAW,OAAO,QAAS,MAC3BU,EAAM,QAAQ,CAAC,EAAE,MAAM,SACzBX,EAAQ,OAAO,CAAC,KAAMW,EAAM,QAAQ,CAAC,EAAE,MAAM,QAAQ,CAEvD,CACD,CAGA,MAAMI,EAAYL,EAAK,QAAQ,CAAC,EAAE,QAAQ,YAAc,CAAA,EACxD,GAAGK,EAAU,QAAU,CAACd,EAAW,OAAO,QAAS,CAClDP,EAAQ,KAAKgB,EAAK,QAAQ,CAAC,EAAE,OAAO,EACpC,MAAMM,EAAU,MAAM,QAAQ,IAAID,EAAU,IAAI,MAAOE,GAAkB,CACxE,MAAMC,EAAOlB,EAAQ,OAAO,KAAKmB,aAAW,OAAQF,EAAS,SAAS,IAAI,CAAC,EAC3E,GAAG,CAACC,EAAM,MAAO,CAAC,KAAM,OAAQ,aAAcD,EAAS,GAAI,QAAS,6BAAA,EACpE,GAAI,CACH,MAAMW,EAAOd,EAAAA,iBAAiBG,EAAS,SAAS,UAAW,CAAA,CAAE,EACvDG,EAAS,MAAMF,EAAK,GAAGU,EAAM,KAAK,EAAE,EAC1C,MAAO,CAAC,KAAM,OAAQ,aAAcX,EAAS,GAAI,QAASI,eAAaD,CAAM,CAAA,CAC9E,OAASE,EAAU,CAClB,MAAO,CAAC,KAAM,OAAQ,aAAcL,EAAS,GAAI,QAASI,EAAAA,aAAa,CAAC,MAAOC,GAAK,SAAWA,GAAK,YAAc,SAAA,CAAU,CAAA,CAC7H,CACD,CAAC,CAAC,EACF5B,EAAQ,KAAK,GAAGsB,CAAO,EACvBX,EAAc,SAAWX,CAC1B,CACD,OAAS,CAACO,EAAW,OAAO,SAAWS,EAAK,UAAU,CAAC,GAAG,SAAS,YAAY,QAE5EV,EAAQ,QAAQA,EAAQ,OAAO,CAAC,KAAM,GAAK,EAC9CG,EAAI,KAAK,WAAW,CAAC,GAAGT,EAAS,CAAC,KAAM,YAAa,QAASgB,EAAK,QAAQ,CAAC,EAAE,QAAQ,SAAW,EAAA,CAAG,CAAC,CAAC,CACvG,CAAC,EAED,OAAO,OAAO,OAAOR,EAAU,CAAC,MAAO,IAAMD,EAAW,MAAA,EAAQ,CACjE,CACD,CCnDO,MAAMkC,CAAI,CAGhB,YAA4B7C,EAAwBU,EAAqB,CAA7C,KAAA,GAAAV,EAAwB,KAAA,QAAAU,EAChDA,EAAQ,WAAW,QAAO,KAAK,UAAU,UAAY,IAAIX,EAAU,KAAK,GAAIW,EAAQ,UAAU,MAAOA,EAAQ,UAAU,KAAK,GAC5HA,EAAQ,QAAQ,OAAM,KAAK,UAAU,OAAS,IAAIuB,EAAO,KAAK,GAAIvB,EAAQ,OAAO,KAAMA,EAAQ,OAAO,KAAK,GAC3GA,EAAQ,QAAQ,QAAO,KAAK,UAAU,OAAS,IAAI6B,EAAO,KAAK,GAAI7B,EAAQ,OAAO,MAAOA,EAAQ,OAAO,KAAK,EACjH,CANQ,UAA0C,CAAA,EAclD,IAAID,EAAiBC,EAAsB,GAAoC,CAC9E,IAAIR,EAAa,CAAC,KAAM,IAAI,EAS5B,GARGQ,EAAQ,QACP,OAAOA,EAAQ,OAAS,WAAkBA,EAAQ,MAChDR,EAAQ,CAACQ,EAAQ,MAAa,KAAK,QAASA,EAAQ,KAAK,GAAG,KAAK,IAEpE,CAACA,EAAQ,OAASR,EAAM,CAAC,GAAK,QAC7B,OAAO,KAAK,QAAQ,OAAS,SAAUA,EAAQ,KAAK,QAAQ,MAC1DA,EAAQ,CAAC,KAAK,QAAQ,MAAa,KAAK,QAAS,KAAK,QAAQ,KAAK,GAAG,KAAK,GAE9E,CAACA,EAAM,CAAC,GAAK,CAACA,EAAM,CAAC,EAAG,MAAM,IAAI,MAAM,kCAAkCA,EAAM,CAAC,CAAC,MAAMA,EAAM,CAAC,CAAC,EAAE,EACrG,OAAO,KAAK,UAAUA,EAAM,CAAC,CAAC,EAAE,IAAIO,EAAS,CAAC,GAAGC,EAAS,MAAOR,EAAM,CAAC,EAAE,CAC3E,CAUA,MAAM,SAASE,EAAuB0C,EAAaC,EAAarC,EAA6C,CAC5G,GAAG,KAAK,eAAeN,CAAO,EAAI0C,EAAK,OAAO1C,EAC9C,IAAI4C,EAAO,EAAGC,EAAS,EACvB,QAAQC,KAAK9C,EAAQ,aAEpB,GADA6C,GAAU,KAAK,eAAeC,EAAE,OAAO,EACpCD,EAASF,EAAKC,QACZ,OAEN,GAAG5C,EAAQ,QAAU4C,EAAM,OAAO5C,EAClC,MAAM+C,EAASH,GAAQ,EAAI,CAAA,EAAK5C,EAAQ,MAAM,CAAC4C,CAAI,EAClDI,GAAWJ,GAAQ,EAAI5C,EAAUA,EAAQ,MAAM,EAAG,CAAC4C,CAAI,GAAG,OAAOxC,GAAKA,EAAE,OAAS,aAAeA,EAAE,OAAS,MAAM,EAElH,MAAO,CAAC,CAAC,KAAM,YAAa,QAAS,yBADrB,MAAM,KAAK,UAAU4C,EAAQ,OAAS,GAAGF,EAAE,IAAI,KAAKA,EAAE,OAAO,EAAE,EAAE,KAAK;AAAA;AAAA,CAAM,EAAG,IAAKxC,CAAO,CACtC,IAAK,GAAGyC,CAAM,CACpF,CAOA,eAAe/C,EAAsB,CACpC,MAAMkB,EAAO,KAAK,UAAUlB,CAAO,EACnC,OAAO,KAAK,KAAMkB,EAAK,OAAS,EAAK,GAAG,CACzC,CAQA,MAAM,KAAKb,EAAiBC,EAAsB,CACjD,IAAIU,EAAO,MAAM,KAAK,IAAIX,EAAS,CAClC,OAAQ,4BACR,GAAGC,CAAA,CACH,EACD,OAAIU,IAAO,CAAC,GAAG,QACRI,mBAAiB,IAAI,OAAO,SAAa,EAAE,KAAKJ,EAAK,CAAC,EAAE,OAAO,EAAG,EAAE,EAD5C,CAAA,CAEhC,CASA,UAAUE,EAAc2B,EAAgBvC,EAA8C,CACrF,OAAO,KAAK,IAAIY,EAAM,CAAC,OAAQ,+BAA+B2B,CAAM,+BAAgC,YAAa,GAAK,GAAGvC,CAAA,CAAQ,EAC/H,KAAKN,GAAmBA,EAAQ,IAAA,GAAO,SAAW,IAAI,CACzD,CACD,CCpJO,MAAMiD,CAAG,CAOf,YAA4B3C,EAAoB,CAApB,KAAA,QAAAA,EAC3B,KAAK,IAAM,IAAImC,EAAI,KAAMnC,CAAO,EAC7B,KAAK,QAAQ,SAAS,SACxB,KAAK,aAAe,KAAK,QAAQ,SAAS,MAAM,SAAS,MAAM,EAAI,KAAK,QAAQ,SAAS,MAAQ,KAAK,QAAQ,SAAS,MAAQ,OAC/H,KAAK,iBAAA,EAEP,CAZQ,UAA8C,CAAA,EAC9C,aAGR,IAgBA,IAAI4C,EAAcpD,EAAgB,KAAK,aAAqE,CAC3G,GAAG,CAAC,KAAK,QAAQ,SAAS,OAAQ,MAAM,IAAI,MAAM,wBAAwB,EAC1E,IAAIqD,EAAa,IAAM,CAAC,EAcxB,MAAO,CAAC,SAbS,IAAI,QAAuB,CAACC,EAASC,IAAW,CAChE,KAAK,iBAAiBvD,CAAK,EAAE,KAAKgD,GAAK,CACtC,IAAIQ,EAAS,GACb,MAAMC,EAAOC,EAAAA,MAAc,KAAK,QAAQ,SAAS,OAAQ,CAAC,MAAO,MAAO,KAAMV,EAAG,KAAMI,CAAI,EAAG,CAAC,MAAO,CAAC,SAAU,OAAQ,QAAQ,EAAE,EACnIC,EAAQ,IAAMI,EAAK,KAAK,SAAS,EACjCA,EAAK,GAAG,QAAU3B,GAAeyB,EAAOzB,CAAG,CAAC,EAC5C2B,EAAK,OAAO,GAAG,OAASE,GAAiBH,GAAUG,EAAK,UAAU,EAClEF,EAAK,GAAG,QAAUG,GAAiB,CAC/BA,IAAS,EAAGN,EAAQE,EAAO,KAAA,GAAU,IAAI,IAChC,IAAI,MAAM,aAAaI,CAAI,EAAE,CAAC,CAC3C,CAAC,CACF,CAAC,CACF,CAAC,EACiB,MAAAP,CAAA,CACnB,CAQA,MAAM,iBAAiBrD,EAAgB,KAAK,aAA+B,CAC1E,GAAG,CAAC,KAAK,QAAQ,SAAS,OAAQ,MAAM,IAAI,MAAM,wBAAwB,EACtEA,EAAM,SAAS,MAAM,IAAGA,GAAS,QACrC,MAAM6D,EAAIC,EAAK,KAAK,KAAK,QAAQ,QAAQ,KAAM9D,CAAK,EACpD,OAAG,MAAM+D,EAAG,KAAKF,CAAC,EAAE,KAAK,IAAM,EAAI,EAAE,MAAM,IAAM,EAAK,EAAUA,EAC3D,KAAK,UAAU7D,CAAK,EAAU,KAAK,UAAUA,CAAK,GACvD,KAAK,UAAUA,CAAK,EAAI,MAAM,6DAA6DA,CAAK,EAAE,EAChG,KAAKkB,GAAQA,EAAK,aAAa,EAC/B,KAAK8C,GAAO,OAAO,KAAKA,CAAG,CAAC,EAAE,KAAK,MAAMC,IACzC,MAAMF,EAAG,UAAUF,EAAGI,CAAM,EAC5B,OAAO,KAAK,UAAUjE,CAAK,EACpB6D,EACP,EACK,KAAK,UAAU7D,CAAK,EAC5B,CAOA,IAAIoD,EAAqE,CACxE,IAAIc,EACJ,MAAO,CACN,MAAO,IAAM,CAAEA,GAAQ,UAAA,CAAa,EACpC,SAAU,IAAI,QAAQ,MAAMvD,GAAO,CAClCuD,EAAS,MAAMC,EAAAA,aAAa,KAAK,EACjC,KAAM,CAAC,KAAAR,CAAA,EAAQ,MAAMO,EAAO,UAAUd,CAAI,EAC1C,MAAMc,EAAO,UAAA,EACbvD,EAAIgD,EAAK,KAAK,KAAA,GAAU,IAAI,CAC7B,CAAC,CAAA,CAEH,CAQA,mBAAmBS,KAAmBC,EAAuB,CAC5D,GAAGA,EAAY,OAAS,EAAG,MAAM,IAAI,MAAM,wCAAwC,EAEnF,MAAMC,EAAS,CAAClD,EAAcmD,EAAqB,KAC3CnD,EAAK,cAAc,MAAM,EAAE,EAAE,IAAI,CAACoD,EAAMC,IAC7CD,EAAK,WAAW,CAAC,GAAKC,EAAQ,GAAMF,EAAaA,CAAU,EAAE,MAAM,EAAGA,CAAU,EAG7EG,EAAmB,CAACC,EAAcC,IAAyB,CAChE,GAAID,EAAG,SAAWC,EAAG,OAAQ,MAAM,IAAI,MAAM,6BAA6B,EAC1E,MAAMC,EAAUC,EAAG,SAASH,CAAE,EAAGI,EAAUD,EAAG,SAASF,CAAE,EACnDI,EAAaF,EAAG,IAAID,EAASE,CAAO,EACpCE,EAAaH,EAAG,KAAKD,CAAO,EAC5BK,EAAaJ,EAAG,KAAKC,CAAO,EAClC,OAAGE,EAAW,WAAW,CAAC,IAAM,GAAKC,EAAW,WAAW,CAAC,IAAM,EAAU,EACrEF,EAAW,SAAA,EAAW,CAAC,GAAKC,EAAW,WAAW,CAAC,EAAIC,EAAW,SAAA,EAAW,CAAC,EACtF,EAEMC,EAAIb,EAAOF,CAAM,EACjBgB,EAAef,EAAY,IAAIvD,GAAKwD,EAAOxD,CAAC,CAAC,EAAE,IAAIuE,GAAaX,EAAiBS,EAAGE,CAAS,CAAC,EACpG,MAAO,CAAC,IAAKD,EAAa,OAAO,CAACE,EAAKC,IAAMD,EAAMC,EAAG,CAAC,EAAIH,EAAa,OAAQ,IAAK,KAAK,IAAI,GAAGA,CAAY,EAAG,aAAAA,CAAA,CACjH,CACD,CC1FO,MAAMI,EAAkB,CAC9B,KAAM,MACN,YAAa,qDACb,KAAM,CAAC,QAAS,CAAC,KAAM,SAAU,YAAa,iBAAkB,SAAU,GAAI,EAC9E,GAAKpD,GAA4BqD,EAAAA,IAAIrD,EAAK,OAAO,EAClD,EAEasD,EAAuB,CACnC,KAAM,eACN,YAAa,4BACb,KAAM,CAAA,EACN,GAAI,SAAY,IAAI,KAAA,EAAO,YAAA,CAC5B,EAEaC,EAAmB,CAC/B,KAAM,OACN,YAAa,mBACb,KAAM,CACL,SAAU,CAAC,KAAM,SAAU,YAAa,qBAAsB,KAAM,CAAC,MAAO,OAAQ,QAAQ,EAAG,SAAU,EAAA,EACzG,KAAM,CAAC,KAAM,SAAU,YAAa,kBAAmB,SAAU,EAAA,CAAI,EAEtE,GAAI,MAAOvD,EAAMtC,IAAO,CACvB,GAAI,CACH,OAAOsC,EAAK,KAAA,CACX,IAAK,OACJ,OAAO,MAAMoD,EAAQ,GAAG,CAAC,QAASpD,EAAK,IAAA,EAAOtC,CAAE,EACjD,IAAK,OACJ,OAAO,MAAM8F,EAAO,GAAG,CAAC,KAAMxD,EAAK,IAAA,EAAOtC,CAAE,EAC7C,IAAK,SACJ,OAAO,MAAM+F,EAAW,GAAG,CAAC,KAAMzD,EAAK,IAAA,EAAOtC,CAAE,CACjD,CAEF,OAAQgC,EAAU,CACjB,MAAO,CAAC,MAAOA,GAAK,SAAWA,EAAI,UAAS,CAC7C,CACD,CACD,EAEagE,EAAoB,CAChC,KAAM,QACN,YAAa,2BACb,KAAM,CACL,IAAK,CAAC,KAAM,SAAU,YAAa,eAAgB,SAAU,EAAA,EAC7D,OAAQ,CAAC,KAAM,SAAU,YAAa,qBAAsB,KAAM,CAAC,MAAO,OAAQ,MAAO,QAAQ,EAAG,QAAS,KAAA,EAC7G,QAAS,CAAC,KAAM,SAAU,YAAa,uBAAwB,QAAS,EAAC,EACzE,KAAM,CAAC,KAAM,SAAU,YAAa,mBAAA,CAAmB,EAExD,GAAK1D,GAKC,IAAI2D,EAAAA,KAAK,CAAC,IAAK3D,EAAK,IAAK,QAASA,EAAK,QAAQ,EAAE,QAAQ,CAAC,OAAQA,EAAK,QAAU,MAAO,KAAMA,EAAK,IAAA,CAAK,CAC/G,EAEawD,EAAiB,CAC7B,KAAM,kBACN,YAAa,8BACb,KAAM,CACL,KAAM,CAAC,KAAM,SAAU,YAAa,sBAAuB,SAAU,EAAA,CAAI,EAE1E,GAAI,MAAOxD,GAAyB,CACnC,MAAM4D,EAAUC,EAAAA,mBAAmB,IAAI,EACjC/E,EAAO,MAAMgF,KAAQ,CAAC,QAAAF,CAAA,EAAU5D,EAAK,KAAM,EAAI,EAAE,MAAON,GAAakE,EAAQ,OAAO,MAAM,KAAKlE,CAAG,CAAC,EACzG,MAAO,CAAC,GAAGkE,EAAQ,OAAQ,OAAQ9E,EAAM,OAAQ,OAAW,OAAQ,MAAA,CACrE,CACD,EAEa2E,EAAqB,CACjC,KAAM,kBACN,YAAa,8BACb,KAAM,CACL,KAAM,CAAC,KAAM,SAAU,YAAa,sBAAuB,SAAU,EAAA,CAAI,EAE1E,GAAI,MAAOzD,IAA0B,CAAC,OAAQ+D,EAAAA,mBAAmB/D,EAAK,IAAI,GAAA,EAC3E,EAEagE,EAAqB,CACjC,KAAM,SACN,YAAa,4FACb,KAAM,CACL,MAAO,CAAC,KAAM,SAAU,YAAa,gBAAiB,SAAU,EAAA,EAChE,OAAQ,CAAC,KAAM,SAAU,YAAa,8BAA+B,QAAS,CAAA,CAAC,EAEhF,GAAI,MAAOhE,GAGL,CACL,MAAMiE,EAAO,MAAM,MAAM,uCAAuC,mBAAmBjE,EAAK,KAAK,CAAC,GAAI,CACjG,QAAS,CAAC,aAAc,4CAA6C,kBAAmB,gBAAA,CAAgB,CACxG,EAAE,KAAKlB,GAAQA,EAAK,MAAM,EAC3B,IAAIoF,EAAOC,EAAQ,8BACnB,MAAM/E,EAAU,IAAIgF,OACpB,MAAOF,EAAQC,EAAM,KAAKF,CAAI,KAAO,MAAM,CAC1C,IAAII,EAAM,iBAAiB,KAAK,mBAAmBH,EAAM,CAAC,CAAC,CAAC,IAAI,CAAC,EAGjE,GAFGG,IAAKA,EAAM,mBAAmBA,CAAG,GACjCA,GAAKjF,EAAQ,IAAIiF,CAAG,EACpBjF,EAAQ,OAASY,EAAK,QAAU,GAAI,KACxC,CACA,OAAOZ,CACR,CACD"}
|
|
1
|
+
{"version":3,"file":"index.js","sources":["../src/provider.ts","../src/antrhopic.ts","../src/ollama.ts","../src/open-ai.ts","../src/llm.ts","../src/ai.ts","../src/tools.ts"],"sourcesContent":["import {LLMMessage, LLMOptions, LLMRequest} from './llm.ts';\n\nexport type AbortablePromise<T> = Promise<T> & {abort: () => void};\n\nexport abstract class LLMProvider {\n\tabstract ask(message: string, options: LLMRequest): AbortablePromise<LLMMessage[]>;\n}\n","import {Anthropic as anthropic} from '@anthropic-ai/sdk';\nimport {findByProp, objectMap, JSONSanitize, JSONAttemptParse} from '@ztimson/utils';\nimport {Ai} from './ai.ts';\nimport {LLMMessage, LLMRequest} from './llm.ts';\nimport {AbortablePromise, LLMProvider} from './provider.ts';\n\nexport class Anthropic extends LLMProvider {\n\tclient!: anthropic;\n\n\tconstructor(public readonly ai: Ai, public readonly apiToken: string, public model: string) {\n\t\tsuper();\n\t\tthis.client = new anthropic({apiKey: apiToken});\n\t}\n\n\tprivate toStandard(history: any[]): LLMMessage[] {\n\t\tconst merged: any[] = [];\n\t\tfor(let i = 0; i < history.length; i++) {\n\t\t\tconst msg = history[i];\n\t\t\tif(typeof msg.content != 'string') {\n\t\t\t\tif(msg.role == 'assistant') {\n\t\t\t\t\tmsg.content.filter((c: any) => c.type == 'tool_use').forEach((c: any) => {\n\t\t\t\t\t\tmerged.push({role: 'tool', id: c.id, name: c.name, args: c.input});\n\t\t\t\t\t});\n\t\t\t\t} else if(msg.role == 'user') {\n\t\t\t\t\tmsg.content.filter((c: any) => c.type == 'tool_result').forEach((c: any) => {\n\t\t\t\t\t\tconst h = merged.find((h: any) => h.id == c.tool_use_id);\n\t\t\t\t\t\tif(h) h[c.is_error ? 'error' : 'content'] = c.content;\n\t\t\t\t\t});\n\t\t\t\t}\n\t\t\t\tmsg.content = msg.content.filter((c: any) => c.type == 'text').map((c: any) => c.text).join('\\n\\n');\n\t\t\t}\n\t\t\tif(msg.content) {\n\t\t\t\tconst last = merged.at(-1);\n\t\t\t\tif(last && last.role == 'assistant' && msg.role == 'assistant') last.content += '\\n\\n' + msg.content;\n\t\t\t\telse merged.push({role: msg.role, content: msg.content});\n\t\t\t}\n\t\t}\n\t\treturn merged;\n\t}\n\n\tprivate fromStandard(history: LLMMessage[]): any[] {\n\t\tfor(let i = 0; i < history.length; i++) {\n\t\t\tif(history[i].role == 'tool') {\n\t\t\t\tconst h: any = history[i];\n\t\t\t\thistory.splice(i, 1,\n\t\t\t\t\t{role: 'assistant', content: [{type: 'tool_use', id: h.id, name: h.name, input: h.args}]},\n\t\t\t\t\t{role: 'user', content: [{type: 'tool_result', tool_use_id: h.id, is_error: !!h.error, content: h.error || h.content}]}\n\t\t\t\t)\n\t\t\t\ti++;\n\t\t\t}\n\t\t}\n\t\treturn history;\n\t}\n\n\task(message: string, options: LLMRequest = {}): AbortablePromise<LLMMessage[]> {\n\t\tconst controller = new AbortController();\n\t\tconst response = new Promise<any>(async (res, rej) => {\n\t\t\tlet history = this.fromStandard([...options.history || [], {role: 'user', content: message}]);\n\t\t\tif(options.compress) history = await this.ai.llm.compress(<any>history, options.compress.max, options.compress.min, options);\n\t\t\tconst requestParams: any = {\n\t\t\t\tmodel: options.model || this.model,\n\t\t\t\tmax_tokens: options.max_tokens || this.ai.options.max_tokens || 4096,\n\t\t\t\tsystem: options.system || this.ai.options.system || '',\n\t\t\t\ttemperature: options.temperature || this.ai.options.temperature || 0.7,\n\t\t\t\ttools: (options.tools || this.ai.options.tools || []).map(t => ({\n\t\t\t\t\tname: t.name,\n\t\t\t\t\tdescription: t.description,\n\t\t\t\t\tinput_schema: {\n\t\t\t\t\t\ttype: 'object',\n\t\t\t\t\t\tproperties: t.args ? objectMap(t.args, (key, value) => ({...value, required: undefined})) : {},\n\t\t\t\t\t\trequired: t.args ? Object.entries(t.args).filter(t => t[1].required).map(t => t[0]) : []\n\t\t\t\t\t},\n\t\t\t\t\tfn: undefined\n\t\t\t\t})),\n\t\t\t\tmessages: history,\n\t\t\t\tstream: !!options.stream,\n\t\t\t};\n\n\t\t\tlet resp: any;\n\t\t\tlet isFirstMessage = true;\n\t\t\tdo {\n\t\t\t\tresp = await this.client.messages.create(requestParams);\n\n\t\t\t\tif(options.stream) {\n\t\t\t\t\tif(!isFirstMessage) options.stream({text: '\\n\\n'});\n\t\t\t\t\tisFirstMessage = false;\n\n\t\t\t\t\tresp.content = [];\n\t\t\t\t\tfor await (const chunk of resp) {\n\t\t\t\t\t\tif(controller.signal.aborted) break;\n\t\t\t\t\t\tif(chunk.type === 'content_block_start') {\n\t\t\t\t\t\t\tif(chunk.content_block.type === 'text') {\n\t\t\t\t\t\t\t\tresp.content.push({type: 'text', text: ''});\n\t\t\t\t\t\t\t} else if(chunk.content_block.type === 'tool_use') {\n\t\t\t\t\t\t\t\tresp.content.push({type: 'tool_use', id: chunk.content_block.id, name: chunk.content_block.name, input: <any>''});\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t} else if(chunk.type === 'content_block_delta') {\n\t\t\t\t\t\t\tif(chunk.delta.type === 'text_delta') {\n\t\t\t\t\t\t\t\tconst text = chunk.delta.text;\n\t\t\t\t\t\t\t\tresp.content.at(-1).text += text;\n\t\t\t\t\t\t\t\toptions.stream({text});\n\t\t\t\t\t\t\t} else if(chunk.delta.type === 'input_json_delta') {\n\t\t\t\t\t\t\t\tresp.content.at(-1).input += chunk.delta.partial_json;\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t} else if(chunk.type === 'content_block_stop') {\n\t\t\t\t\t\t\tconst last = resp.content.at(-1);\n\t\t\t\t\t\t\tif(last.input != null) last.input = last.input ? JSONAttemptParse(last.input, {}) : {};\n\t\t\t\t\t\t} else if(chunk.type === 'message_stop') {\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\tconst toolCalls = resp.content.filter((c: any) => c.type === 'tool_use');\n\t\t\t\tif(toolCalls.length && !controller.signal.aborted) {\n\t\t\t\t\thistory.push({role: 'assistant', content: resp.content});\n\t\t\t\t\tconst results = await Promise.all(toolCalls.map(async (toolCall: any) => {\n\t\t\t\t\t\tconst tool = options.tools?.find(findByProp('name', toolCall.name));\n\t\t\t\t\t\tif(!tool) return {tool_use_id: toolCall.id, is_error: true, content: 'Tool not found'};\n\t\t\t\t\t\ttry {\n\t\t\t\t\t\t\tconst result = await tool.fn(toolCall.input, this.ai);\n\t\t\t\t\t\t\treturn {type: 'tool_result', tool_use_id: toolCall.id, content: JSONSanitize(result)};\n\t\t\t\t\t\t} catch (err: any) {\n\t\t\t\t\t\t\treturn {type: 'tool_result', tool_use_id: toolCall.id, is_error: true, content: err?.message || err?.toString() || 'Unknown'};\n\t\t\t\t\t\t}\n\t\t\t\t\t}));\n\t\t\t\t\thistory.push({role: 'user', content: results});\n\t\t\t\t\trequestParams.messages = history;\n\t\t\t\t}\n\t\t\t} while (!controller.signal.aborted && resp.content.some((c: any) => c.type === 'tool_use'));\n\n\t\t\tif(options.stream) options.stream({done: true});\n\t\t\tres(this.toStandard([...history, {\n\t\t\t\trole: 'assistant',\n\t\t\t\tcontent: resp.content.filter((c: any) => c.type == 'text').map((c: any) => c.text).join('\\n\\n')\n\t\t\t}]));\n\t\t});\n\n\t\treturn Object.assign(response, {abort: () => controller.abort()});\n\t}\n}\n","import {findByProp, objectMap, JSONSanitize, JSONAttemptParse} from '@ztimson/utils';\nimport {Ai} from './ai.ts';\nimport {LLMMessage, LLMRequest} from './llm.ts';\nimport {AbortablePromise, LLMProvider} from './provider.ts';\nimport {Ollama as ollama} from 'ollama';\n\nexport class Ollama extends LLMProvider {\n\tclient!: ollama;\n\n\tconstructor(public readonly ai: Ai, public host: string, public model: string) {\n\t\tsuper();\n\t\tthis.client = new ollama({host});\n\t}\n\n\tprivate toStandard(history: any[]): LLMMessage[] {\n\t\tfor(let i = 0; i < history.length; i++) {\n\t\t\tif(history[i].role == 'assistant' && history[i].tool_calls) {\n\t\t\t\tif(history[i].content) delete history[i].tool_calls;\n\t\t\t\telse {\n\t\t\t\t\thistory.splice(i, 1);\n\t\t\t\t\ti--;\n\t\t\t\t}\n\t\t\t} else if(history[i].role == 'tool') {\n\t\t\t\tconst error = history[i].content.startsWith('{\"error\":');\n\t\t\t\thistory[i] = {role: 'tool', name: history[i].tool_name, args: history[i].args, [error ? 'error' : 'content']: history[i].content};\n\t\t\t}\n\t\t}\n\t\treturn history;\n\t}\n\n\tprivate fromStandard(history: LLMMessage[]): any[] {\n\t\treturn history.map((h: any) => {\n\t\t\tif(h.role != 'tool') return h;\n\t\t\treturn {role: 'tool', tool_name: h.name, content: h.error || h.content}\n\t\t});\n\t}\n\n\task(message: string, options: LLMRequest = {}): AbortablePromise<LLMMessage[]> {\n\t\tconst controller = new AbortController();\n\t\tconst response = new Promise<any>(async (res, rej) => {\n\t\t\tlet system = options.system || this.ai.options.system;\n\t\t\tlet history = this.fromStandard([...options.history || [], {role: 'user', content: message}]);\n\t\t\tif(history[0].roll == 'system') {\n\t\t\t\tif(!system) system = history.shift();\n\t\t\t\telse history.shift();\n\t\t\t}\n\t\t\tif(options.compress) history = await this.ai.llm.compress(<any>history, options.compress.max, options.compress.min);\n\t\t\tif(options.system) history.unshift({role: 'system', content: system})\n\n\t\t\tconst requestParams: any = {\n\t\t\t\tmodel: options.model || this.model,\n\t\t\t\tmessages: history,\n\t\t\t\tstream: !!options.stream,\n\t\t\t\tsignal: controller.signal,\n\t\t\t\toptions: {\n\t\t\t\t\ttemperature: options.temperature || this.ai.options.temperature || 0.7,\n\t\t\t\t\tnum_predict: options.max_tokens || this.ai.options.max_tokens || 4096,\n\t\t\t\t},\n\t\t\t\ttools: (options.tools || this.ai.options.tools || []).map(t => ({\n\t\t\t\t\ttype: 'function',\n\t\t\t\t\tfunction: {\n\t\t\t\t\t\tname: t.name,\n\t\t\t\t\t\tdescription: t.description,\n\t\t\t\t\t\tparameters: {\n\t\t\t\t\t\t\ttype: 'object',\n\t\t\t\t\t\t\tproperties: t.args ? objectMap(t.args, (key, value) => ({...value, required: undefined})) : {},\n\t\t\t\t\t\t\trequired: t.args ? Object.entries(t.args).filter(t => t[1].required).map(t => t[0]) : []\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}))\n\t\t\t}\n\n\t\t\t// Run tool chains\n\t\t\tlet resp: any;\n\t\t\tdo {\n\t\t\t\tresp = await this.client.chat(requestParams);\n\t\t\t\tif(options.stream) {\n\t\t\t\t\tresp.message = {role: 'assistant', content: '', tool_calls: []};\n\t\t\t\t\tfor await (const chunk of resp) {\n\t\t\t\t\t\tif(controller.signal.aborted) break;\n\t\t\t\t\t\tif(chunk.message?.content) {\n\t\t\t\t\t\t\tresp.message.content += chunk.message.content;\n\t\t\t\t\t\t\toptions.stream({text: chunk.message.content});\n\t\t\t\t\t\t}\n\t\t\t\t\t\tif(chunk.message?.tool_calls) resp.message.tool_calls = chunk.message.tool_calls;\n\t\t\t\t\t\tif(chunk.done) break;\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\t// Run tools\n\t\t\t\tif(resp.message?.tool_calls?.length && !controller.signal.aborted) {\n\t\t\t\t\thistory.push(resp.message);\n\t\t\t\t\tconst results = await Promise.all(resp.message.tool_calls.map(async (toolCall: any) => {\n\t\t\t\t\t\tconst tool = (options.tools || this.ai.options.tools)?.find(findByProp('name', toolCall.function.name));\n\t\t\t\t\t\tif(!tool) return {role: 'tool', tool_name: toolCall.function.name, content: '{\"error\": \"Tool not found\"}'};\n\t\t\t\t\t\tconst args = typeof toolCall.function.arguments === 'string' ? JSONAttemptParse(toolCall.function.arguments, {}) : toolCall.function.arguments;\n\t\t\t\t\t\ttry {\n\t\t\t\t\t\t\tconst result = await tool.fn(args, this.ai);\n\t\t\t\t\t\t\treturn {role: 'tool', tool_name: toolCall.function.name, args, content: JSONSanitize(result)};\n\t\t\t\t\t\t} catch (err: any) {\n\t\t\t\t\t\t\treturn {role: 'tool', tool_name: toolCall.function.name, args, content: JSONSanitize({error: err?.message || err?.toString() || 'Unknown'})};\n\t\t\t\t\t\t}\n\t\t\t\t\t}));\n\t\t\t\t\thistory.push(...results);\n\t\t\t\t\trequestParams.messages = history;\n\t\t\t\t}\n\t\t\t} while (!controller.signal.aborted && resp.message?.tool_calls?.length);\n\t\t\tif(options.stream) options.stream({done: true});\n\t\t\tres(this.toStandard([...history, {role: 'assistant', content: resp.message?.content}]));\n\t\t});\n\t\treturn Object.assign(response, {abort: () => controller.abort()});\n\t}\n}\n","import {OpenAI as openAI} from 'openai';\nimport {findByProp, objectMap, JSONSanitize, JSONAttemptParse} from '@ztimson/utils';\nimport {Ai} from './ai.ts';\nimport {LLMMessage, LLMRequest} from './llm.ts';\nimport {AbortablePromise, LLMProvider} from './provider.ts';\n\nexport class OpenAi extends LLMProvider {\n\tclient!: openAI;\n\n\tconstructor(public readonly ai: Ai, public readonly apiToken: string, public model: string) {\n\t\tsuper();\n\t\tthis.client = new openAI({apiKey: apiToken});\n\t}\n\n\tprivate toStandard(history: any[]): LLMMessage[] {\n\t\tfor(let i = 0; i < history.length; i++) {\n\t\t\tconst h = history[i];\n\t\t\tif(h.role === 'assistant' && h.tool_calls) {\n\t\t\t\tconst tools = h.tool_calls.map((tc: any) => ({\n\t\t\t\t\trole: 'tool',\n\t\t\t\t\tid: tc.id,\n\t\t\t\t\tname: tc.function.name,\n\t\t\t\t\targs: JSONAttemptParse(tc.function.arguments, {})\n\t\t\t\t}));\n\t\t\t\thistory.splice(i, 1, ...tools);\n\t\t\t\ti += tools.length - 1;\n\t\t\t} else if(h.role === 'tool' && h.content) {\n\t\t\t\tconst record = history.find(h2 => h.tool_call_id == h2.id);\n\t\t\t\tif(record) {\n\t\t\t\t\tif(h.content.includes('\"error\":')) record.error = h.content;\n\t\t\t\t\telse record.content = h.content;\n\t\t\t\t}\n\t\t\t\thistory.splice(i, 1);\n\t\t\t\ti--;\n\t\t\t}\n\n\t\t}\n\t\treturn history;\n\t}\n\n\tprivate fromStandard(history: LLMMessage[]): any[] {\n\t\treturn history.reduce((result, h) => {\n\t\t\tif(h.role === 'tool') {\n\t\t\t\tresult.push({\n\t\t\t\t\trole: 'assistant',\n\t\t\t\t\tcontent: null,\n\t\t\t\t\ttool_calls: [{ id: h.id, type: 'function', function: { name: h.name, arguments: JSON.stringify(h.args) } }],\n\t\t\t\t\trefusal: null,\n\t\t\t\t\tannotations: [],\n\t\t\t\t}, {\n\t\t\t\t\trole: 'tool',\n\t\t\t\t\ttool_call_id: h.id,\n\t\t\t\t\tcontent: h.error || h.content\n\t\t\t\t});\n\t\t\t} else {\n\t\t\t\tresult.push(h);\n\t\t\t}\n\t\t\treturn result;\n\t\t}, [] as any[]);\n\t}\n\n\task(message: string, options: LLMRequest = {}): AbortablePromise<LLMMessage[]> {\n\t\tconst controller = new AbortController();\n\t\tconst response = new Promise<any>(async (res, rej) => {\n\t\t\tlet history = this.fromStandard([...options.history || [], {role: 'user', content: message}]);\n\t\t\tif(options.compress) history = await this.ai.llm.compress(<any>history, options.compress.max, options.compress.min, options);\n\n\t\t\tconst requestParams: any = {\n\t\t\t\tmodel: options.model || this.model,\n\t\t\t\tmessages: history,\n\t\t\t\tstream: !!options.stream,\n\t\t\t\tmax_tokens: options.max_tokens || this.ai.options.max_tokens || 4096,\n\t\t\t\ttemperature: options.temperature || this.ai.options.temperature || 0.7,\n\t\t\t\ttools: (options.tools || this.ai.options.tools || []).map(t => ({\n\t\t\t\t\ttype: 'function',\n\t\t\t\t\tfunction: {\n\t\t\t\t\t\tname: t.name,\n\t\t\t\t\t\tdescription: t.description,\n\t\t\t\t\t\tparameters: {\n\t\t\t\t\t\t\ttype: 'object',\n\t\t\t\t\t\t\tproperties: t.args ? objectMap(t.args, (key, value) => ({...value, required: undefined})) : {},\n\t\t\t\t\t\t\trequired: t.args ? Object.entries(t.args).filter(t => t[1].required).map(t => t[0]) : []\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}))\n\t\t\t};\n\n\t\t\t// Tool call and streaming logic similar to other providers\n\t\t\tlet resp: any;\n\t\t\tdo {\n\t\t\t\tresp = await this.client.chat.completions.create(requestParams);\n\n\t\t\t\t// Implement streaming and tool call handling\n\t\t\t\tif(options.stream) {\n\t\t\t\t\tresp.choices = [];\n\t\t\t\t\tfor await (const chunk of resp) {\n\t\t\t\t\t\tif(controller.signal.aborted) break;\n\t\t\t\t\t\tif(chunk.choices[0].delta.content) {\n\t\t\t\t\t\t\toptions.stream({text: chunk.choices[0].delta.content});\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\t// Run tools\n\t\t\t\tconst toolCalls = resp.choices[0].message.tool_calls || [];\n\t\t\t\tif(toolCalls.length && !controller.signal.aborted) {\n\t\t\t\t\thistory.push(resp.choices[0].message);\n\t\t\t\t\tconst results = await Promise.all(toolCalls.map(async (toolCall: any) => {\n\t\t\t\t\t\tconst tool = options.tools?.find(findByProp('name', toolCall.function.name));\n\t\t\t\t\t\tif(!tool) return {role: 'tool', tool_call_id: toolCall.id, content: '{\"error\": \"Tool not found\"}'};\n\t\t\t\t\t\ttry {\n\t\t\t\t\t\t\tconst args = JSONAttemptParse(toolCall.function.arguments, {});\n\t\t\t\t\t\t\tconst result = await tool.fn(args, this.ai);\n\t\t\t\t\t\t\treturn {role: 'tool', tool_call_id: toolCall.id, content: JSONSanitize(result)};\n\t\t\t\t\t\t} catch (err: any) {\n\t\t\t\t\t\t\treturn {role: 'tool', tool_call_id: toolCall.id, content: JSONSanitize({error: err?.message || err?.toString() || 'Unknown'})};\n\t\t\t\t\t\t}\n\t\t\t\t\t}));\n\t\t\t\t\thistory.push(...results);\n\t\t\t\t\trequestParams.messages = history;\n\t\t\t\t}\n\t\t\t} while (!controller.signal.aborted && resp.choices?.[0]?.message?.tool_calls?.length);\n\n\t\t\tif(options.stream) options.stream({done: true});\n\t\t\tres(this.toStandard([...history, {role: 'assistant', content: resp.choices[0].message.content || ''}]));\n\t\t});\n\n\t\treturn Object.assign(response, {abort: () => controller.abort()});\n\t}\n}\n","import {JSONAttemptParse} from '@ztimson/utils';\nimport {Ai} from './ai.ts';\nimport {Anthropic} from './antrhopic.ts';\nimport {Ollama} from './ollama.ts';\nimport {OpenAi} from './open-ai.ts';\nimport {AbortablePromise, LLMProvider} from './provider.ts';\nimport {AiTool} from './tools.ts';\n\nexport type LLMMessage = {\n\t/** Message originator */\n\trole: 'assistant' | 'system' | 'user';\n\t/** Message content */\n\tcontent: string | any;\n} | {\n\t/** Tool call */\n\trole: 'tool';\n\t/** Unique ID for call */\n\tid: string;\n\t/** Tool that was run */\n\tname: string;\n\t/** Tool arguments */\n\targs: any;\n\t/** Tool result */\n\tcontent: undefined | string;\n\t/** Tool error */\n\terror: undefined | string;\n}\n\nexport type LLMOptions = {\n\t/** Anthropic settings */\n\tanthropic?: {\n\t\t/** API Token */\n\t\ttoken: string;\n\t\t/** Default model */\n\t\tmodel: string;\n\t},\n\t/** Ollama settings */\n\tollama?: {\n\t\t/** connection URL */\n\t\thost: string;\n\t\t/** Default model */\n\t\tmodel: string;\n\t},\n\t/** Open AI settings */\n\topenAi?: {\n\t\t/** API Token */\n\t\ttoken: string;\n\t\t/** Default model */\n\t\tmodel: string;\n\t},\n\t/** Default provider & model */\n\tmodel: string | [string, string];\n} & Omit<LLMRequest, 'model'>;\n\nexport type LLMRequest = {\n\t/** System prompt */\n\tsystem?: string;\n\t/** Message history */\n\thistory?: LLMMessage[];\n\t/** Max tokens for request */\n\tmax_tokens?: number;\n\t/** 0 = Rigid Logic, 1 = Balanced, 2 = Hyper Creative **/\n\ttemperature?: number;\n\t/** Available tools */\n\ttools?: AiTool[];\n\t/** LLM model */\n\tmodel?: string | [string, string];\n\t/** Stream response */\n\tstream?: (chunk: {text?: string, done?: true}) => any;\n\t/** Compress old messages in the chat to free up context */\n\tcompress?: {\n\t\t/** Trigger chat compression once context exceeds the token count */\n\t\tmax: number;\n\t\t/** Compress chat until context size smaller than */\n\t\tmin: number\n\t}\n}\n\nexport class LLM {\n\tprivate providers: {[key: string]: LLMProvider} = {};\n\n\tconstructor(public readonly ai: Ai, public readonly options: LLMOptions) {\n\t\tif(options.anthropic?.token) this.providers.anthropic = new Anthropic(this.ai, options.anthropic.token, options.anthropic.model);\n\t\tif(options.ollama?.host) this.providers.ollama = new Ollama(this.ai, options.ollama.host, options.ollama.model);\n\t\tif(options.openAi?.token) this.providers.openAi = new OpenAi(this.ai, options.openAi.token, options.openAi.model);\n\t}\n\n\t/**\n\t * Chat with LLM\n\t * @param {string} message Question\n\t * @param {LLMRequest} options Configuration options and chat history\n\t * @returns {{abort: () => void, response: Promise<LLMMessage[]>}} Function to abort response and chat history\n\t */\n\task(message: string, options: LLMRequest = {}): AbortablePromise<LLMMessage[]> {\n\t\tlet model: any = [null, null];\n\t\tif(options.model) {\n\t\t\tif(typeof options.model == 'object') model = options.model;\n\t\t\telse model = [options.model, (<any>this.options)[options.model]?.model];\n\t\t}\n\t\tif(!options.model || model[1] == null) {\n\t\t\tif(typeof this.options.model == 'object') model = this.options.model;\n\t\t\telse model = [this.options.model, (<any>this.options)[this.options.model]?.model];\n\t\t}\n\t\tif(!model[0] || !model[1]) throw new Error(`Unknown LLM provider or model: ${model[0]} / ${model[1]}`);\n\t\treturn this.providers[model[0]].ask(message, {...options, model: model[1]});\n\t}\n\n\t/**\n\t * Compress chat history to reduce context size\n\t * @param {LLMMessage[]} history Chatlog that will be compressed\n\t * @param max Trigger compression once context is larger than max\n\t * @param min Summarize until context size is less than min\n\t * @param {LLMRequest} options LLM options\n\t * @returns {Promise<LLMMessage[]>} New chat history will summary at index 0\n\t */\n\tasync compress(history: LLMMessage[], max: number, min: number, options?: LLMRequest): Promise<LLMMessage[]> {\n\t\tif(this.estimateTokens(history) < max) return history;\n\t\tlet keep = 0, tokens = 0;\n\t\tfor(let m of history.toReversed()) {\n\t\t\ttokens += this.estimateTokens(m.content);\n\t\t\tif(tokens < min) keep++;\n\t\t\telse break;\n\t\t}\n\t\tif(history.length <= keep) return history;\n\t\tconst recent = keep == 0 ? [] : history.slice(-keep),\n\t\t\tprocess = (keep == 0 ? history : history.slice(0, -keep)).filter(h => h.role === 'assistant' || h.role === 'user');\n\t\tconst summary = await this.summarize(process.map(m => `${m.role}: ${m.content}`).join('\\n\\n'), 250, options);\n\t\treturn [{role: 'assistant', content: `Conversation Summary: ${summary}`}, ...recent];\n\t}\n\n\t/**\n\t * Estimate variable as tokens\n\t * @param history Object to size\n\t * @returns {number} Rough token count\n\t */\n\testimateTokens(history: any): number {\n\t\tconst text = JSON.stringify(history);\n\t\treturn Math.ceil((text.length / 4) * 1.2);\n\t}\n\n\t/**\n\t * Ask a question with JSON response\n\t * @param {string} message Question\n\t * @param {LLMRequest} options Configuration options and chat history\n\t * @returns {Promise<{} | {} | RegExpExecArray | null>}\n\t */\n\tasync json(message: string, options?: LLMRequest) {\n\t\tlet resp = await this.ask(message, {\n\t\t\tsystem: 'Respond using a JSON blob',\n\t\t\t...options\n\t\t});\n\t\tif(!resp?.[0]?.content) return {};\n\t\treturn JSONAttemptParse(new RegExp('\\{[\\s\\S]*\\}').exec(resp[0].content), {});\n\t}\n\n\t/**\n\t * Create a summary of some text\n\t * @param {string} text Text to summarize\n\t * @param {number} tokens Max number of tokens\n\t * @param options LLM request options\n\t * @returns {Promise<string>} Summary\n\t */\n\tsummarize(text: string, tokens: number, options?: LLMRequest): Promise<string | null> {\n\t\treturn this.ask(text, {system: `Generate a brief summary <= ${tokens} tokens. Output nothing else`, temperature: 0.3, ...options})\n\t\t\t.then(history => <string>history.pop()?.content || null);\n\t}\n}\n","import {createWorker} from 'tesseract.js';\nimport {LLM, LLMOptions} from './llm';\nimport fs from 'node:fs/promises';\nimport Path from 'node:path';\nimport * as tf from '@tensorflow/tfjs';\nimport {spawn} from 'node:child_process';\n\nexport type AiOptions = LLMOptions & {\n\twhisper?: {\n\t\t/** Whisper binary location */\n\t\tbinary: string;\n\t\t/** Model: `ggml-base.en.bin` */\n\t\tmodel: string;\n\t\t/** Path to models */\n\t\tpath: string;\n\t}\n}\n\nexport class Ai {\n\tprivate downloads: {[key: string]: Promise<string>} = {};\n\tprivate whisperModel!: string;\n\n\t/** Large Language Models */\n\tllm!: LLM;\n\n\tconstructor(public readonly options: AiOptions) {\n\t\tthis.llm = new LLM(this, options);\n\t\tif(this.options.whisper?.binary) {\n\t\t\tthis.whisperModel = this.options.whisper?.model.endsWith('.bin') ? this.options.whisper?.model : this.options.whisper?.model + '.bin';\n\t\t\tthis.downloadAsrModel();\n\t\t}\n\t}\n\n\t/**\n\t * Convert audio to text using Auditory Speech Recognition\n\t * @param {string} path Path to audio\n\t * @param model Whisper model\n\t * @returns {Promise<any>} Extracted text\n\t */\n\tasr(path: string, model: string = this.whisperModel): {abort: () => void, response: Promise<string | null>} {\n\t\tif(!this.options.whisper?.binary) throw new Error('Whisper not configured');\n\t\tlet abort: any = () => {};\n\t\tconst response = new Promise<string | null>((resolve, reject) => {\n\t\t\tthis.downloadAsrModel(model).then(m => {\n\t\t\t\tlet output = '';\n\t\t\t\tconst proc = spawn(<string>this.options.whisper?.binary, ['-nt', '-np', '-m', m, '-f', path], {stdio: ['ignore', 'pipe', 'ignore']});\n\t\t\t\tabort = () => proc.kill('SIGTERM');\n\t\t\t\tproc.on('error', (err: Error) => reject(err));\n\t\t\t\tproc.stdout.on('data', (data: Buffer) => output += data.toString());\n\t\t\t\tproc.on('close', (code: number) => {\n\t\t\t\t\tif(code === 0) resolve(output.trim() || null);\n\t\t\t\t\telse reject(new Error(`Exit code ${code}`));\n\t\t\t\t});\n\t\t\t});\n\t\t});\n\t\treturn {response, abort};\n\t}\n\n\t/**\n\t * Downloads the specified Whisper model if it is not already present locally.\n\t *\n\t * @param {string} model Whisper model that will be downloaded\n\t * @return {Promise<string>} Absolute path to model file, resolves once downloaded\n\t */\n\tasync downloadAsrModel(model: string = this.whisperModel): Promise<string> {\n\t\tif(!this.options.whisper?.binary) throw new Error('Whisper not configured');\n\t\tif(!model.endsWith('.bin')) model += '.bin';\n\t\tconst p = Path.join(this.options.whisper.path, model);\n\t\tif(await fs.stat(p).then(() => true).catch(() => false)) return p;\n\t\tif(!!this.downloads[model]) return this.downloads[model];\n\t\tthis.downloads[model] = fetch(`https://huggingface.co/ggerganov/whisper.cpp/resolve/main/${model}`)\n\t\t\t.then(resp => resp.arrayBuffer())\n\t\t\t.then(arr => Buffer.from(arr)).then(async buffer => {\n\t\t\t\tawait fs.writeFile(p, buffer);\n\t\t\t\tdelete this.downloads[model];\n\t\t\t\treturn p;\n\t\t\t});\n\t\treturn this.downloads[model];\n\t}\n\n\t/**\n\t * Convert image to text using Optical Character Recognition\n\t * @param {string} path Path to image\n\t * @returns {{abort: Function, response: Promise<string | null>}} Abort function & Promise of extracted text\n\t */\n\tocr(path: string): {abort: () => void, response: Promise<string | null>} {\n\t\tlet worker: any;\n\t\treturn {\n\t\t\tabort: () => { worker?.terminate(); },\n\t\t\tresponse: new Promise(async res => {\n\t\t\t\tworker = await createWorker('eng');\n\t\t\t\tconst {data} = await worker.recognize(path);\n\t\t\t\tawait worker.terminate();\n\t\t\t\tres(data.text.trim() || null);\n\t\t\t})\n\t\t}\n\t}\n\n\t/**\n\t * Compare the difference between two strings using tensor math\n\t * @param target Text that will checked\n\t * @param {string} searchTerms Multiple search terms to check against target\n\t * @returns {{avg: number, max: number, similarities: number[]}} Similarity values 0-1: 0 = unique, 1 = identical\n\t */\n\tsemanticSimilarity(target: string, ...searchTerms: string[]) {\n\t\tif(searchTerms.length < 2) throw new Error('Requires at least 2 strings to compare');\n\n\t\tconst vector = (text: string, dimensions: number = 10): number[] => {\n\t\t\treturn text.toLowerCase().split('').map((char, index) =>\n\t\t\t\t(char.charCodeAt(0) * (index + 1)) % dimensions / dimensions).slice(0, dimensions);\n\t\t}\n\n\t\tconst cosineSimilarity = (v1: number[], v2: number[]): number => {\n\t\t\tif (v1.length !== v2.length) throw new Error('Vectors must be same length');\n\t\t\tconst tensor1 = tf.tensor1d(v1), tensor2 = tf.tensor1d(v2)\n\t\t\tconst dotProduct = tf.dot(tensor1, tensor2)\n\t\t\tconst magnitude1 = tf.norm(tensor1)\n\t\t\tconst magnitude2 = tf.norm(tensor2)\n\t\t\tif(magnitude1.dataSync()[0] === 0 || magnitude2.dataSync()[0] === 0) return 0\n\t\t\treturn dotProduct.dataSync()[0] / (magnitude1.dataSync()[0] * magnitude2.dataSync()[0])\n\t\t}\n\n\t\tconst v = vector(target);\n\t\tconst similarities = searchTerms.map(t => vector(t)).map(refVector => cosineSimilarity(v, refVector))\n\t\treturn {avg: similarities.reduce((acc, s) => acc + s, 0) / similarities.length, max: Math.max(...similarities), similarities}\n\t}\n}\n","import {$, $Sync} from '@ztimson/node-utils';\nimport {ASet, consoleInterceptor, Http, fn as Fn} from '@ztimson/utils';\nimport {Ai} from './ai.ts';\n\nexport type AiToolArg = {[key: string]: {\n\t/** Argument type */\n\ttype: 'array' | 'boolean' | 'number' | 'object' | 'string',\n\t/** Argument description */\n\tdescription: string,\n\t/** Required argument */\n\trequired?: boolean;\n\t/** Default value */\n\tdefault?: any,\n\t/** Options */\n\tenum?: string[],\n\t/** Minimum value or length */\n\tmin?: number,\n\t/** Maximum value or length */\n\tmax?: number,\n\t/** Match pattern */\n\tpattern?: string,\n\t/** Child arguments */\n\titems?: {[key: string]: AiToolArg}\n}}\n\nexport type AiTool = {\n\t/** Tool ID / Name - Must be snail_case */\n\tname: string,\n\t/** Tool description / prompt */\n\tdescription: string,\n\t/** Tool arguments */\n\targs?: AiToolArg,\n\t/** Callback function */\n\tfn: (args: any, ai: Ai) => any | Promise<any>,\n};\n\nexport const CliTool: AiTool = {\n\tname: 'cli',\n\tdescription: 'Use the command line interface, returns any output',\n\targs: {command: {type: 'string', description: 'Command to run', required: true}},\n\tfn: (args: {command: string}) => $`${args.command}`\n}\n\nexport const DateTimeTool: AiTool = {\n\tname: 'get_datetime',\n\tdescription: 'Get current date and time',\n\targs: {},\n\tfn: async () => new Date().toISOString()\n}\n\nexport const ExecTool: AiTool = {\n\tname: 'exec',\n\tdescription: 'Run code/scripts',\n\targs: {\n\t\tlanguage: {type: 'string', description: 'Execution language', enum: ['cli', 'node', 'python'], required: true},\n\t\tcode: {type: 'string', description: 'Code to execute', required: true}\n\t},\n\tfn: async (args, ai) => {\n\t\ttry {\n\t\t\tswitch(args.type) {\n\t\t\t\tcase 'bash':\n\t\t\t\t\treturn await CliTool.fn({command: args.code}, ai);\n\t\t\t\tcase 'node':\n\t\t\t\t\treturn await JSTool.fn({code: args.code}, ai);\n\t\t\t\tcase 'python': {\n\t\t\t\t\treturn await PythonTool.fn({code: args.code}, ai);\n\t\t\t\t}\n\t\t\t}\n\t\t} catch(err: any) {\n\t\t\treturn {error: err?.message || err.toString()};\n\t\t}\n\t}\n}\n\nexport const FetchTool: AiTool = {\n\tname: 'fetch',\n\tdescription: 'Make HTTP request to URL',\n\targs: {\n\t\turl: {type: 'string', description: 'URL to fetch', required: true},\n\t\tmethod: {type: 'string', description: 'HTTP method to use', enum: ['GET', 'POST', 'PUT', 'DELETE'], default: 'GET'},\n\t\theaders: {type: 'object', description: 'HTTP headers to send', default: {}},\n\t\tbody: {type: 'object', description: 'HTTP body to send'},\n\t},\n\tfn: (args: {\n\t\turl: string;\n\t\tmethod: 'GET' | 'POST' | 'PUT' | 'DELETE';\n\t\theaders: {[key: string]: string};\n\t\tbody: any;\n\t}) => new Http({url: args.url, headers: args.headers}).request({method: args.method || 'GET', body: args.body})\n}\n\nexport const JSTool: AiTool = {\n\tname: 'exec_javascript',\n\tdescription: 'Execute commonjs javascript',\n\targs: {\n\t\tcode: {type: 'string', description: 'CommonJS javascript', required: true}\n\t},\n\tfn: async (args: {code: string}) => {\n\t\tconst console = consoleInterceptor(null);\n\t\tconst resp = await Fn<any>({console}, args.code, true).catch((err: any) => console.output.error.push(err));\n\t\treturn {...console.output, return: resp, stdout: undefined, stderr: undefined};\n\t}\n}\n\nexport const PythonTool: AiTool = {\n\tname: 'exec_javascript',\n\tdescription: 'Execute commonjs javascript',\n\targs: {\n\t\tcode: {type: 'string', description: 'CommonJS javascript', required: true}\n\t},\n\tfn: async (args: {code: string}) => ({result: $Sync`python -c \"${args.code}\"`})\n}\n\nexport const SearchTool: AiTool = {\n\tname: 'search',\n\tdescription: 'Use a search engine to find relevant URLs, should be changed with fetch to scrape sources',\n\targs: {\n\t\tquery: {type: 'string', description: 'Search string', required: true},\n\t\tlength: {type: 'string', description: 'Number of results to return', default: 5},\n\t},\n\tfn: async (args: {\n\t\tquery: string;\n\t\tlength: number;\n\t}) => {\n\t\tconst html = await fetch(`https://html.duckduckgo.com/html/?q=${encodeURIComponent(args.query)}`, {\n\t\t\theaders: {\"User-Agent\": \"Mozilla/5.0 (Windows NT 10.0; Win64; x64)\", \"Accept-Language\": \"en-US,en;q=0.9\"}\n\t\t}).then(resp => resp.text());\n\t\tlet match, regex = /<a .*?href=\"(.+?)\".+?<\\/a>/g;\n\t\tconst results = new ASet<string>();\n\t\twhile((match = regex.exec(html)) !== null) {\n\t\t\tlet url = /uddg=(.+)&?/.exec(decodeURIComponent(match[1]))?.[1];\n\t\t\tif(url) url = decodeURIComponent(url);\n\t\t\tif(url) results.add(url);\n\t\t\tif(results.size >= (args.length || 5)) break;\n\t\t}\n\t\treturn results;\n\t}\n}\n"],"names":["LLMProvider","Anthropic","ai","apiToken","model","anthropic","history","merged","i","msg","c","h","last","message","options","controller","response","res","rej","requestParams","t","objectMap","key","value","resp","isFirstMessage","chunk","text","JSONAttemptParse","toolCalls","results","toolCall","tool","findByProp","result","JSONSanitize","err","Ollama","host","ollama","error","system","args","OpenAi","openAI","tools","tc","record","h2","LLM","max","min","keep","tokens","m","recent","process","Ai","path","abort","resolve","reject","output","proc","spawn","data","code","p","Path","fs","arr","buffer","worker","createWorker","target","searchTerms","vector","dimensions","char","index","cosineSimilarity","v1","v2","tensor1","tf","tensor2","dotProduct","magnitude1","magnitude2","v","similarities","refVector","acc","s","CliTool","$","DateTimeTool","ExecTool","JSTool","PythonTool","FetchTool","Http","console","consoleInterceptor","Fn","$Sync","SearchTool","html","match","regex","ASet","url"],"mappings":"6gCAIO,MAAeA,CAAY,CAElC,CCAO,MAAMC,UAAkBD,CAAY,CAG1C,YAA4BE,EAAwBC,EAAyBC,EAAe,CAC3F,MAAA,EAD2B,KAAA,GAAAF,EAAwB,KAAA,SAAAC,EAAyB,KAAA,MAAAC,EAE5E,KAAK,OAAS,IAAIC,EAAAA,UAAU,CAAC,OAAQF,EAAS,CAC/C,CALA,OAOQ,WAAWG,EAA8B,CAChD,MAAMC,EAAgB,CAAA,EACtB,QAAQC,EAAI,EAAGA,EAAIF,EAAQ,OAAQE,IAAK,CACvC,MAAMC,EAAMH,EAAQE,CAAC,EAcrB,GAbG,OAAOC,EAAI,SAAW,WACrBA,EAAI,MAAQ,YACdA,EAAI,QAAQ,OAAQC,GAAWA,EAAE,MAAQ,UAAU,EAAE,QAASA,GAAW,CACxEH,EAAO,KAAK,CAAC,KAAM,OAAQ,GAAIG,EAAE,GAAI,KAAMA,EAAE,KAAM,KAAMA,EAAE,MAAM,CAClE,CAAC,EACQD,EAAI,MAAQ,QACrBA,EAAI,QAAQ,OAAQC,GAAWA,EAAE,MAAQ,aAAa,EAAE,QAASA,GAAW,CAC3E,MAAM,EAAIH,EAAO,KAAMI,GAAWA,EAAE,IAAMD,EAAE,WAAW,EACpD,IAAG,EAAEA,EAAE,SAAW,QAAU,SAAS,EAAIA,EAAE,QAC/C,CAAC,EAEFD,EAAI,QAAUA,EAAI,QAAQ,OAAQC,GAAWA,EAAE,MAAQ,MAAM,EAAE,IAAKA,GAAWA,EAAE,IAAI,EAAE,KAAK;AAAA;AAAA,CAAM,GAEhGD,EAAI,QAAS,CACf,MAAMG,EAAOL,EAAO,GAAG,EAAE,EACtBK,GAAQA,EAAK,MAAQ,aAAeH,EAAI,MAAQ,YAAaG,EAAK,SAAW;AAAA;AAAA,EAASH,EAAI,QACxFF,EAAO,KAAK,CAAC,KAAME,EAAI,KAAM,QAASA,EAAI,QAAQ,CACxD,CACD,CACA,OAAOF,CACR,CAEQ,aAAaD,EAA8B,CAClD,QAAQE,EAAI,EAAGA,EAAIF,EAAQ,OAAQE,IAClC,GAAGF,EAAQE,CAAC,EAAE,MAAQ,OAAQ,CAC7B,MAAMG,EAASL,EAAQE,CAAC,EACxBF,EAAQ,OAAOE,EAAG,EACjB,CAAC,KAAM,YAAa,QAAS,CAAC,CAAC,KAAM,WAAY,GAAIG,EAAE,GAAI,KAAMA,EAAE,KAAM,MAAOA,EAAE,IAAA,CAAK,CAAA,EACvF,CAAC,KAAM,OAAQ,QAAS,CAAC,CAAC,KAAM,cAAe,YAAaA,EAAE,GAAI,SAAU,CAAC,CAACA,EAAE,MAAO,QAAUA,EAAE,OAASA,EAAE,QAAQ,CAAA,CAAC,EAExHH,GACD,CAED,OAAOF,CACR,CAEA,IAAIO,EAAiBC,EAAsB,GAAoC,CAC9E,MAAMC,EAAa,IAAI,gBACjBC,EAAW,IAAI,QAAa,MAAOC,EAAKC,IAAQ,CACrD,IAAIZ,EAAU,KAAK,aAAa,CAAC,GAAGQ,EAAQ,SAAW,CAAA,EAAI,CAAC,KAAM,OAAQ,QAASD,CAAA,CAAQ,CAAC,EACzFC,EAAQ,WAAUR,EAAU,MAAM,KAAK,GAAG,IAAI,SAAcA,EAASQ,EAAQ,SAAS,IAAKA,EAAQ,SAAS,IAAKA,CAAO,GAC3H,MAAMK,EAAqB,CAC1B,MAAOL,EAAQ,OAAS,KAAK,MAC7B,WAAYA,EAAQ,YAAc,KAAK,GAAG,QAAQ,YAAc,KAChE,OAAQA,EAAQ,QAAU,KAAK,GAAG,QAAQ,QAAU,GACpD,YAAaA,EAAQ,aAAe,KAAK,GAAG,QAAQ,aAAe,GACnE,OAAQA,EAAQ,OAAS,KAAK,GAAG,QAAQ,OAAS,CAAA,GAAI,IAAIM,IAAM,CAC/D,KAAMA,EAAE,KACR,YAAaA,EAAE,YACf,aAAc,CACb,KAAM,SACN,WAAYA,EAAE,KAAOC,EAAAA,UAAUD,EAAE,KAAM,CAACE,EAAKC,KAAW,CAAC,GAAGA,EAAO,SAAU,MAAA,EAAW,EAAI,CAAA,EAC5F,SAAUH,EAAE,KAAO,OAAO,QAAQA,EAAE,IAAI,EAAE,OAAOA,GAAKA,EAAE,CAAC,EAAE,QAAQ,EAAE,IAAIA,GAAKA,EAAE,CAAC,CAAC,EAAI,CAAA,CAAC,EAExF,GAAI,MAAA,EACH,EACF,SAAUd,EACV,OAAQ,CAAC,CAACQ,EAAQ,MAAA,EAGnB,IAAIU,EACAC,EAAiB,GACrB,EAAG,CAGF,GAFAD,EAAO,MAAM,KAAK,OAAO,SAAS,OAAOL,CAAa,EAEnDL,EAAQ,OAAQ,CACdW,GAAgBX,EAAQ,OAAO,CAAC,KAAM;AAAA;AAAA,EAAO,EACjDW,EAAiB,GAEjBD,EAAK,QAAU,CAAA,EACf,gBAAiBE,KAASF,EAAM,CAC/B,GAAGT,EAAW,OAAO,QAAS,MAC9B,GAAGW,EAAM,OAAS,sBACdA,EAAM,cAAc,OAAS,OAC/BF,EAAK,QAAQ,KAAK,CAAC,KAAM,OAAQ,KAAM,GAAG,EACjCE,EAAM,cAAc,OAAS,YACtCF,EAAK,QAAQ,KAAK,CAAC,KAAM,WAAY,GAAIE,EAAM,cAAc,GAAI,KAAMA,EAAM,cAAc,KAAM,MAAY,GAAG,UAExGA,EAAM,OAAS,sBACxB,GAAGA,EAAM,MAAM,OAAS,aAAc,CACrC,MAAMC,EAAOD,EAAM,MAAM,KACzBF,EAAK,QAAQ,GAAG,EAAE,EAAE,MAAQG,EAC5Bb,EAAQ,OAAO,CAAC,KAAAa,EAAK,CACtB,MAAUD,EAAM,MAAM,OAAS,qBAC9BF,EAAK,QAAQ,GAAG,EAAE,EAAE,OAASE,EAAM,MAAM,sBAEjCA,EAAM,OAAS,qBAAsB,CAC9C,MAAMd,EAAOY,EAAK,QAAQ,GAAG,EAAE,EAC5BZ,EAAK,OAAS,OAAMA,EAAK,MAAQA,EAAK,MAAQgB,EAAAA,iBAAiBhB,EAAK,MAAO,CAAA,CAAE,EAAI,CAAA,EACrF,SAAUc,EAAM,OAAS,eACxB,KAEF,CACD,CAEA,MAAMG,EAAYL,EAAK,QAAQ,OAAQd,GAAWA,EAAE,OAAS,UAAU,EACvE,GAAGmB,EAAU,QAAU,CAACd,EAAW,OAAO,QAAS,CAClDT,EAAQ,KAAK,CAAC,KAAM,YAAa,QAASkB,EAAK,QAAQ,EACvD,MAAMM,EAAU,MAAM,QAAQ,IAAID,EAAU,IAAI,MAAOE,GAAkB,CACxE,MAAMC,EAAOlB,EAAQ,OAAO,KAAKmB,EAAAA,WAAW,OAAQF,EAAS,IAAI,CAAC,EAClE,GAAG,CAACC,EAAM,MAAO,CAAC,YAAaD,EAAS,GAAI,SAAU,GAAM,QAAS,gBAAA,EACrE,GAAI,CACH,MAAMG,EAAS,MAAMF,EAAK,GAAGD,EAAS,MAAO,KAAK,EAAE,EACpD,MAAO,CAAC,KAAM,cAAe,YAAaA,EAAS,GAAI,QAASI,eAAaD,CAAM,CAAA,CACpF,OAASE,EAAU,CAClB,MAAO,CAAC,KAAM,cAAe,YAAaL,EAAS,GAAI,SAAU,GAAM,QAASK,GAAK,SAAWA,GAAK,SAAA,GAAc,SAAA,CACpH,CACD,CAAC,CAAC,EACF9B,EAAQ,KAAK,CAAC,KAAM,OAAQ,QAASwB,EAAQ,EAC7CX,EAAc,SAAWb,CAC1B,CACD,OAAS,CAACS,EAAW,OAAO,SAAWS,EAAK,QAAQ,KAAMd,GAAWA,EAAE,OAAS,UAAU,GAEvFI,EAAQ,QAAQA,EAAQ,OAAO,CAAC,KAAM,GAAK,EAC9CG,EAAI,KAAK,WAAW,CAAC,GAAGX,EAAS,CAChC,KAAM,YACN,QAASkB,EAAK,QAAQ,OAAQd,GAAWA,EAAE,MAAQ,MAAM,EAAE,IAAKA,GAAWA,EAAE,IAAI,EAAE,KAAK;AAAA;AAAA,CAAM,CAAA,CAC9F,CAAC,CAAC,CACJ,CAAC,EAED,OAAO,OAAO,OAAOM,EAAU,CAAC,MAAO,IAAMD,EAAW,MAAA,EAAQ,CACjE,CACD,CCtIO,MAAMsB,UAAerC,CAAY,CAGvC,YAA4BE,EAAeoC,EAAqBlC,EAAe,CAC9E,MAAA,EAD2B,KAAA,GAAAF,EAAe,KAAA,KAAAoC,EAAqB,KAAA,MAAAlC,EAE/D,KAAK,OAAS,IAAImC,SAAO,CAAC,KAAAD,EAAK,CAChC,CALA,OAOQ,WAAWhC,EAA8B,CAChD,QAAQE,EAAI,EAAGA,EAAIF,EAAQ,OAAQE,IAClC,GAAGF,EAAQE,CAAC,EAAE,MAAQ,aAAeF,EAAQE,CAAC,EAAE,WAC5CF,EAAQE,CAAC,EAAE,QAAS,OAAOF,EAAQE,CAAC,EAAE,YAExCF,EAAQ,OAAOE,EAAG,CAAC,EACnBA,aAEQF,EAAQE,CAAC,EAAE,MAAQ,OAAQ,CACpC,MAAMgC,EAAQlC,EAAQE,CAAC,EAAE,QAAQ,WAAW,WAAW,EACvDF,EAAQE,CAAC,EAAI,CAAC,KAAM,OAAQ,KAAMF,EAAQE,CAAC,EAAE,UAAW,KAAMF,EAAQE,CAAC,EAAE,KAAM,CAACgC,EAAQ,QAAU,SAAS,EAAGlC,EAAQE,CAAC,EAAE,OAAA,CAC1H,CAED,OAAOF,CACR,CAEQ,aAAaA,EAA8B,CAClD,OAAOA,EAAQ,IAAKK,GAChBA,EAAE,MAAQ,OAAeA,EACrB,CAAC,KAAM,OAAQ,UAAWA,EAAE,KAAM,QAASA,EAAE,OAASA,EAAE,OAAA,CAC/D,CACF,CAEA,IAAIE,EAAiBC,EAAsB,GAAoC,CAC9E,MAAMC,EAAa,IAAI,gBACjBC,EAAW,IAAI,QAAa,MAAOC,EAAKC,IAAQ,CACrD,IAAIuB,EAAS3B,EAAQ,QAAU,KAAK,GAAG,QAAQ,OAC3CR,EAAU,KAAK,aAAa,CAAC,GAAGQ,EAAQ,SAAW,CAAA,EAAI,CAAC,KAAM,OAAQ,QAASD,CAAA,CAAQ,CAAC,EACzFP,EAAQ,CAAC,EAAE,MAAQ,WACjBmC,IACS,MAAA,EADDA,EAASnC,EAAQ,MAAA,GAG3BQ,EAAQ,WAAUR,EAAU,MAAM,KAAK,GAAG,IAAI,SAAcA,EAASQ,EAAQ,SAAS,IAAKA,EAAQ,SAAS,GAAG,GAC/GA,EAAQ,QAAQR,EAAQ,QAAQ,CAAC,KAAM,SAAU,QAASmC,EAAO,EAEpE,MAAMtB,EAAqB,CAC1B,MAAOL,EAAQ,OAAS,KAAK,MAC7B,SAAUR,EACV,OAAQ,CAAC,CAACQ,EAAQ,OAClB,OAAQC,EAAW,OACnB,QAAS,CACR,YAAaD,EAAQ,aAAe,KAAK,GAAG,QAAQ,aAAe,GACnE,YAAaA,EAAQ,YAAc,KAAK,GAAG,QAAQ,YAAc,IAAA,EAElE,OAAQA,EAAQ,OAAS,KAAK,GAAG,QAAQ,OAAS,CAAA,GAAI,IAAIM,IAAM,CAC/D,KAAM,WACN,SAAU,CACT,KAAMA,EAAE,KACR,YAAaA,EAAE,YACf,WAAY,CACX,KAAM,SACN,WAAYA,EAAE,KAAOC,EAAAA,UAAUD,EAAE,KAAM,CAACE,EAAKC,KAAW,CAAC,GAAGA,EAAO,SAAU,MAAA,EAAW,EAAI,CAAA,EAC5F,SAAUH,EAAE,KAAO,OAAO,QAAQA,EAAE,IAAI,EAAE,OAAOA,GAAKA,EAAE,CAAC,EAAE,QAAQ,EAAE,IAAIA,GAAKA,EAAE,CAAC,CAAC,EAAI,CAAA,CAAC,CACxF,CACD,EACC,CAAA,EAIH,IAAII,EACJ,EAAG,CAEF,GADAA,EAAO,MAAM,KAAK,OAAO,KAAKL,CAAa,EACxCL,EAAQ,OAAQ,CAClBU,EAAK,QAAU,CAAC,KAAM,YAAa,QAAS,GAAI,WAAY,EAAC,EAC7D,gBAAiBE,KAASF,EAOzB,GANGT,EAAW,OAAO,UAClBW,EAAM,SAAS,UACjBF,EAAK,QAAQ,SAAWE,EAAM,QAAQ,QACtCZ,EAAQ,OAAO,CAAC,KAAMY,EAAM,QAAQ,QAAQ,GAE1CA,EAAM,SAAS,eAAiB,QAAQ,WAAaA,EAAM,QAAQ,YACnEA,EAAM,MAAM,KAEjB,CAGA,GAAGF,EAAK,SAAS,YAAY,QAAU,CAACT,EAAW,OAAO,QAAS,CAClET,EAAQ,KAAKkB,EAAK,OAAO,EACzB,MAAMM,EAAU,MAAM,QAAQ,IAAIN,EAAK,QAAQ,WAAW,IAAI,MAAOO,GAAkB,CACtF,MAAMC,GAAQlB,EAAQ,OAAS,KAAK,GAAG,QAAQ,QAAQ,KAAKmB,EAAAA,WAAW,OAAQF,EAAS,SAAS,IAAI,CAAC,EACtG,GAAG,CAACC,EAAM,MAAO,CAAC,KAAM,OAAQ,UAAWD,EAAS,SAAS,KAAM,QAAS,6BAAA,EAC5E,MAAMW,EAAO,OAAOX,EAAS,SAAS,WAAc,SAAWH,EAAAA,iBAAiBG,EAAS,SAAS,UAAW,CAAA,CAAE,EAAIA,EAAS,SAAS,UACrI,GAAI,CACH,MAAMG,EAAS,MAAMF,EAAK,GAAGU,EAAM,KAAK,EAAE,EAC1C,MAAO,CAAC,KAAM,OAAQ,UAAWX,EAAS,SAAS,KAAM,KAAAW,EAAM,QAASP,EAAAA,aAAaD,CAAM,CAAA,CAC5F,OAASE,EAAU,CAClB,MAAO,CAAC,KAAM,OAAQ,UAAWL,EAAS,SAAS,KAAM,KAAAW,EAAM,QAASP,EAAAA,aAAa,CAAC,MAAOC,GAAK,SAAWA,GAAK,YAAc,SAAA,CAAU,CAAA,CAC3I,CACD,CAAC,CAAC,EACF9B,EAAQ,KAAK,GAAGwB,CAAO,EACvBX,EAAc,SAAWb,CAC1B,CACD,OAAS,CAACS,EAAW,OAAO,SAAWS,EAAK,SAAS,YAAY,QAC9DV,EAAQ,QAAQA,EAAQ,OAAO,CAAC,KAAM,GAAK,EAC9CG,EAAI,KAAK,WAAW,CAAC,GAAGX,EAAS,CAAC,KAAM,YAAa,QAASkB,EAAK,SAAS,OAAA,CAAQ,CAAC,CAAC,CACvF,CAAC,EACD,OAAO,OAAO,OAAOR,EAAU,CAAC,MAAO,IAAMD,EAAW,MAAA,EAAQ,CACjE,CACD,CC1GO,MAAM4B,UAAe3C,CAAY,CAGvC,YAA4BE,EAAwBC,EAAyBC,EAAe,CAC3F,MAAA,EAD2B,KAAA,GAAAF,EAAwB,KAAA,SAAAC,EAAyB,KAAA,MAAAC,EAE5E,KAAK,OAAS,IAAIwC,EAAAA,OAAO,CAAC,OAAQzC,EAAS,CAC5C,CALA,OAOQ,WAAWG,EAA8B,CAChD,QAAQE,EAAI,EAAGA,EAAIF,EAAQ,OAAQE,IAAK,CACvC,MAAMG,EAAIL,EAAQE,CAAC,EACnB,GAAGG,EAAE,OAAS,aAAeA,EAAE,WAAY,CAC1C,MAAMkC,EAAQlC,EAAE,WAAW,IAAKmC,IAAa,CAC5C,KAAM,OACN,GAAIA,EAAG,GACP,KAAMA,EAAG,SAAS,KAClB,KAAMlB,EAAAA,iBAAiBkB,EAAG,SAAS,UAAW,CAAA,CAAE,CAAA,EAC/C,EACFxC,EAAQ,OAAOE,EAAG,EAAG,GAAGqC,CAAK,EAC7BrC,GAAKqC,EAAM,OAAS,CACrB,SAAUlC,EAAE,OAAS,QAAUA,EAAE,QAAS,CACzC,MAAMoC,EAASzC,EAAQ,QAAWK,EAAE,cAAgBqC,EAAG,EAAE,EACtDD,IACCpC,EAAE,QAAQ,SAAS,UAAU,EAAGoC,EAAO,MAAQpC,EAAE,QAC/CoC,EAAO,QAAUpC,EAAE,SAEzBL,EAAQ,OAAOE,EAAG,CAAC,EACnBA,GACD,CAED,CACA,OAAOF,CACR,CAEQ,aAAaA,EAA8B,CAClD,OAAOA,EAAQ,OAAO,CAAC4B,EAAQvB,KAC3BA,EAAE,OAAS,OACbuB,EAAO,KAAK,CACX,KAAM,YACN,QAAS,KACT,WAAY,CAAC,CAAE,GAAIvB,EAAE,GAAI,KAAM,WAAY,SAAU,CAAE,KAAMA,EAAE,KAAM,UAAW,KAAK,UAAUA,EAAE,IAAI,CAAA,EAAK,EAC1G,QAAS,KACT,YAAa,CAAA,CAAC,EACZ,CACF,KAAM,OACN,aAAcA,EAAE,GAChB,QAASA,EAAE,OAASA,EAAE,OAAA,CACtB,EAEDuB,EAAO,KAAKvB,CAAC,EAEPuB,GACL,CAAA,CAAW,CACf,CAEA,IAAIrB,EAAiBC,EAAsB,GAAoC,CAC9E,MAAMC,EAAa,IAAI,gBACjBC,EAAW,IAAI,QAAa,MAAOC,EAAKC,IAAQ,CACrD,IAAIZ,EAAU,KAAK,aAAa,CAAC,GAAGQ,EAAQ,SAAW,CAAA,EAAI,CAAC,KAAM,OAAQ,QAASD,CAAA,CAAQ,CAAC,EACzFC,EAAQ,WAAUR,EAAU,MAAM,KAAK,GAAG,IAAI,SAAcA,EAASQ,EAAQ,SAAS,IAAKA,EAAQ,SAAS,IAAKA,CAAO,GAE3H,MAAMK,EAAqB,CAC1B,MAAOL,EAAQ,OAAS,KAAK,MAC7B,SAAUR,EACV,OAAQ,CAAC,CAACQ,EAAQ,OAClB,WAAYA,EAAQ,YAAc,KAAK,GAAG,QAAQ,YAAc,KAChE,YAAaA,EAAQ,aAAe,KAAK,GAAG,QAAQ,aAAe,GACnE,OAAQA,EAAQ,OAAS,KAAK,GAAG,QAAQ,OAAS,CAAA,GAAI,IAAIM,IAAM,CAC/D,KAAM,WACN,SAAU,CACT,KAAMA,EAAE,KACR,YAAaA,EAAE,YACf,WAAY,CACX,KAAM,SACN,WAAYA,EAAE,KAAOC,EAAAA,UAAUD,EAAE,KAAM,CAACE,EAAKC,KAAW,CAAC,GAAGA,EAAO,SAAU,MAAA,EAAW,EAAI,CAAA,EAC5F,SAAUH,EAAE,KAAO,OAAO,QAAQA,EAAE,IAAI,EAAE,OAAOA,GAAKA,EAAE,CAAC,EAAE,QAAQ,EAAE,IAAIA,GAAKA,EAAE,CAAC,CAAC,EAAI,CAAA,CAAC,CACxF,CACD,EACC,CAAA,EAIH,IAAII,EACJ,EAAG,CAIF,GAHAA,EAAO,MAAM,KAAK,OAAO,KAAK,YAAY,OAAOL,CAAa,EAG3DL,EAAQ,OAAQ,CAClBU,EAAK,QAAU,CAAA,EACf,gBAAiBE,KAASF,EAAM,CAC/B,GAAGT,EAAW,OAAO,QAAS,MAC3BW,EAAM,QAAQ,CAAC,EAAE,MAAM,SACzBZ,EAAQ,OAAO,CAAC,KAAMY,EAAM,QAAQ,CAAC,EAAE,MAAM,QAAQ,CAEvD,CACD,CAGA,MAAMG,EAAYL,EAAK,QAAQ,CAAC,EAAE,QAAQ,YAAc,CAAA,EACxD,GAAGK,EAAU,QAAU,CAACd,EAAW,OAAO,QAAS,CAClDT,EAAQ,KAAKkB,EAAK,QAAQ,CAAC,EAAE,OAAO,EACpC,MAAMM,EAAU,MAAM,QAAQ,IAAID,EAAU,IAAI,MAAOE,GAAkB,CACxE,MAAMC,EAAOlB,EAAQ,OAAO,KAAKmB,aAAW,OAAQF,EAAS,SAAS,IAAI,CAAC,EAC3E,GAAG,CAACC,EAAM,MAAO,CAAC,KAAM,OAAQ,aAAcD,EAAS,GAAI,QAAS,6BAAA,EACpE,GAAI,CACH,MAAMW,EAAOd,EAAAA,iBAAiBG,EAAS,SAAS,UAAW,CAAA,CAAE,EACvDG,EAAS,MAAMF,EAAK,GAAGU,EAAM,KAAK,EAAE,EAC1C,MAAO,CAAC,KAAM,OAAQ,aAAcX,EAAS,GAAI,QAASI,eAAaD,CAAM,CAAA,CAC9E,OAASE,EAAU,CAClB,MAAO,CAAC,KAAM,OAAQ,aAAcL,EAAS,GAAI,QAASI,EAAAA,aAAa,CAAC,MAAOC,GAAK,SAAWA,GAAK,YAAc,SAAA,CAAU,CAAA,CAC7H,CACD,CAAC,CAAC,EACF9B,EAAQ,KAAK,GAAGwB,CAAO,EACvBX,EAAc,SAAWb,CAC1B,CACD,OAAS,CAACS,EAAW,OAAO,SAAWS,EAAK,UAAU,CAAC,GAAG,SAAS,YAAY,QAE5EV,EAAQ,QAAQA,EAAQ,OAAO,CAAC,KAAM,GAAK,EAC9CG,EAAI,KAAK,WAAW,CAAC,GAAGX,EAAS,CAAC,KAAM,YAAa,QAASkB,EAAK,QAAQ,CAAC,EAAE,QAAQ,SAAW,EAAA,CAAG,CAAC,CAAC,CACvG,CAAC,EAED,OAAO,OAAO,OAAOR,EAAU,CAAC,MAAO,IAAMD,EAAW,MAAA,EAAQ,CACjE,CACD,CCnDO,MAAMkC,CAAI,CAGhB,YAA4B/C,EAAwBY,EAAqB,CAA7C,KAAA,GAAAZ,EAAwB,KAAA,QAAAY,EAChDA,EAAQ,WAAW,QAAO,KAAK,UAAU,UAAY,IAAIb,EAAU,KAAK,GAAIa,EAAQ,UAAU,MAAOA,EAAQ,UAAU,KAAK,GAC5HA,EAAQ,QAAQ,OAAM,KAAK,UAAU,OAAS,IAAIuB,EAAO,KAAK,GAAIvB,EAAQ,OAAO,KAAMA,EAAQ,OAAO,KAAK,GAC3GA,EAAQ,QAAQ,QAAO,KAAK,UAAU,OAAS,IAAI6B,EAAO,KAAK,GAAI7B,EAAQ,OAAO,MAAOA,EAAQ,OAAO,KAAK,EACjH,CANQ,UAA0C,CAAA,EAclD,IAAID,EAAiBC,EAAsB,GAAoC,CAC9E,IAAIV,EAAa,CAAC,KAAM,IAAI,EAS5B,GARGU,EAAQ,QACP,OAAOA,EAAQ,OAAS,WAAkBA,EAAQ,MAChDV,EAAQ,CAACU,EAAQ,MAAa,KAAK,QAASA,EAAQ,KAAK,GAAG,KAAK,IAEpE,CAACA,EAAQ,OAASV,EAAM,CAAC,GAAK,QAC7B,OAAO,KAAK,QAAQ,OAAS,SAAUA,EAAQ,KAAK,QAAQ,MAC1DA,EAAQ,CAAC,KAAK,QAAQ,MAAa,KAAK,QAAS,KAAK,QAAQ,KAAK,GAAG,KAAK,GAE9E,CAACA,EAAM,CAAC,GAAK,CAACA,EAAM,CAAC,EAAG,MAAM,IAAI,MAAM,kCAAkCA,EAAM,CAAC,CAAC,MAAMA,EAAM,CAAC,CAAC,EAAE,EACrG,OAAO,KAAK,UAAUA,EAAM,CAAC,CAAC,EAAE,IAAIS,EAAS,CAAC,GAAGC,EAAS,MAAOV,EAAM,CAAC,EAAE,CAC3E,CAUA,MAAM,SAASE,EAAuB4C,EAAaC,EAAarC,EAA6C,CAC5G,GAAG,KAAK,eAAeR,CAAO,EAAI4C,EAAK,OAAO5C,EAC9C,IAAI8C,EAAO,EAAGC,EAAS,EACvB,QAAQC,KAAKhD,EAAQ,aAEpB,GADA+C,GAAU,KAAK,eAAeC,EAAE,OAAO,EACpCD,EAASF,EAAKC,QACZ,OAEN,GAAG9C,EAAQ,QAAU8C,EAAM,OAAO9C,EAClC,MAAMiD,EAASH,GAAQ,EAAI,CAAA,EAAK9C,EAAQ,MAAM,CAAC8C,CAAI,EAClDI,GAAWJ,GAAQ,EAAI9C,EAAUA,EAAQ,MAAM,EAAG,CAAC8C,CAAI,GAAG,OAAOzC,GAAKA,EAAE,OAAS,aAAeA,EAAE,OAAS,MAAM,EAElH,MAAO,CAAC,CAAC,KAAM,YAAa,QAAS,yBADrB,MAAM,KAAK,UAAU6C,EAAQ,OAAS,GAAGF,EAAE,IAAI,KAAKA,EAAE,OAAO,EAAE,EAAE,KAAK;AAAA;AAAA,CAAM,EAAG,IAAKxC,CAAO,CACtC,IAAK,GAAGyC,CAAM,CACpF,CAOA,eAAejD,EAAsB,CACpC,MAAMqB,EAAO,KAAK,UAAUrB,CAAO,EACnC,OAAO,KAAK,KAAMqB,EAAK,OAAS,EAAK,GAAG,CACzC,CAQA,MAAM,KAAKd,EAAiBC,EAAsB,CACjD,IAAIU,EAAO,MAAM,KAAK,IAAIX,EAAS,CAClC,OAAQ,4BACR,GAAGC,CAAA,CACH,EACD,OAAIU,IAAO,CAAC,GAAG,QACRI,mBAAiB,IAAI,OAAO,SAAa,EAAE,KAAKJ,EAAK,CAAC,EAAE,OAAO,EAAG,EAAE,EAD5C,CAAA,CAEhC,CASA,UAAUG,EAAc0B,EAAgBvC,EAA8C,CACrF,OAAO,KAAK,IAAIa,EAAM,CAAC,OAAQ,+BAA+B0B,CAAM,+BAAgC,YAAa,GAAK,GAAGvC,CAAA,CAAQ,EAC/H,KAAKR,GAAmBA,EAAQ,IAAA,GAAO,SAAW,IAAI,CACzD,CACD,CCpJO,MAAMmD,CAAG,CAOf,YAA4B3C,EAAoB,CAApB,KAAA,QAAAA,EAC3B,KAAK,IAAM,IAAImC,EAAI,KAAMnC,CAAO,EAC7B,KAAK,QAAQ,SAAS,SACxB,KAAK,aAAe,KAAK,QAAQ,SAAS,MAAM,SAAS,MAAM,EAAI,KAAK,QAAQ,SAAS,MAAQ,KAAK,QAAQ,SAAS,MAAQ,OAC/H,KAAK,iBAAA,EAEP,CAZQ,UAA8C,CAAA,EAC9C,aAGR,IAgBA,IAAI4C,EAActD,EAAgB,KAAK,aAAqE,CAC3G,GAAG,CAAC,KAAK,QAAQ,SAAS,OAAQ,MAAM,IAAI,MAAM,wBAAwB,EAC1E,IAAIuD,EAAa,IAAM,CAAC,EAcxB,MAAO,CAAC,SAbS,IAAI,QAAuB,CAACC,EAASC,IAAW,CAChE,KAAK,iBAAiBzD,CAAK,EAAE,KAAKkD,GAAK,CACtC,IAAIQ,EAAS,GACb,MAAMC,EAAOC,EAAAA,MAAc,KAAK,QAAQ,SAAS,OAAQ,CAAC,MAAO,MAAO,KAAMV,EAAG,KAAMI,CAAI,EAAG,CAAC,MAAO,CAAC,SAAU,OAAQ,QAAQ,EAAE,EACnIC,EAAQ,IAAMI,EAAK,KAAK,SAAS,EACjCA,EAAK,GAAG,QAAU3B,GAAeyB,EAAOzB,CAAG,CAAC,EAC5C2B,EAAK,OAAO,GAAG,OAASE,GAAiBH,GAAUG,EAAK,UAAU,EAClEF,EAAK,GAAG,QAAUG,GAAiB,CAC/BA,IAAS,EAAGN,EAAQE,EAAO,KAAA,GAAU,IAAI,IAChC,IAAI,MAAM,aAAaI,CAAI,EAAE,CAAC,CAC3C,CAAC,CACF,CAAC,CACF,CAAC,EACiB,MAAAP,CAAA,CACnB,CAQA,MAAM,iBAAiBvD,EAAgB,KAAK,aAA+B,CAC1E,GAAG,CAAC,KAAK,QAAQ,SAAS,OAAQ,MAAM,IAAI,MAAM,wBAAwB,EACtEA,EAAM,SAAS,MAAM,IAAGA,GAAS,QACrC,MAAM+D,EAAIC,EAAK,KAAK,KAAK,QAAQ,QAAQ,KAAMhE,CAAK,EACpD,OAAG,MAAMiE,EAAG,KAAKF,CAAC,EAAE,KAAK,IAAM,EAAI,EAAE,MAAM,IAAM,EAAK,EAAUA,EAC3D,KAAK,UAAU/D,CAAK,EAAU,KAAK,UAAUA,CAAK,GACvD,KAAK,UAAUA,CAAK,EAAI,MAAM,6DAA6DA,CAAK,EAAE,EAChG,KAAKoB,GAAQA,EAAK,aAAa,EAC/B,KAAK8C,GAAO,OAAO,KAAKA,CAAG,CAAC,EAAE,KAAK,MAAMC,IACzC,MAAMF,EAAG,UAAUF,EAAGI,CAAM,EAC5B,OAAO,KAAK,UAAUnE,CAAK,EACpB+D,EACP,EACK,KAAK,UAAU/D,CAAK,EAC5B,CAOA,IAAIsD,EAAqE,CACxE,IAAIc,EACJ,MAAO,CACN,MAAO,IAAM,CAAEA,GAAQ,UAAA,CAAa,EACpC,SAAU,IAAI,QAAQ,MAAMvD,GAAO,CAClCuD,EAAS,MAAMC,EAAAA,aAAa,KAAK,EACjC,KAAM,CAAC,KAAAR,CAAA,EAAQ,MAAMO,EAAO,UAAUd,CAAI,EAC1C,MAAMc,EAAO,UAAA,EACbvD,EAAIgD,EAAK,KAAK,KAAA,GAAU,IAAI,CAC7B,CAAC,CAAA,CAEH,CAQA,mBAAmBS,KAAmBC,EAAuB,CAC5D,GAAGA,EAAY,OAAS,EAAG,MAAM,IAAI,MAAM,wCAAwC,EAEnF,MAAMC,EAAS,CAACjD,EAAckD,EAAqB,KAC3ClD,EAAK,cAAc,MAAM,EAAE,EAAE,IAAI,CAACmD,EAAMC,IAC7CD,EAAK,WAAW,CAAC,GAAKC,EAAQ,GAAMF,EAAaA,CAAU,EAAE,MAAM,EAAGA,CAAU,EAG7EG,EAAmB,CAACC,EAAcC,IAAyB,CAChE,GAAID,EAAG,SAAWC,EAAG,OAAQ,MAAM,IAAI,MAAM,6BAA6B,EAC1E,MAAMC,EAAUC,EAAG,SAASH,CAAE,EAAGI,EAAUD,EAAG,SAASF,CAAE,EACnDI,EAAaF,EAAG,IAAID,EAASE,CAAO,EACpCE,EAAaH,EAAG,KAAKD,CAAO,EAC5BK,EAAaJ,EAAG,KAAKC,CAAO,EAClC,OAAGE,EAAW,WAAW,CAAC,IAAM,GAAKC,EAAW,WAAW,CAAC,IAAM,EAAU,EACrEF,EAAW,SAAA,EAAW,CAAC,GAAKC,EAAW,WAAW,CAAC,EAAIC,EAAW,SAAA,EAAW,CAAC,EACtF,EAEMC,EAAIb,EAAOF,CAAM,EACjBgB,EAAef,EAAY,IAAIvD,GAAKwD,EAAOxD,CAAC,CAAC,EAAE,IAAIuE,GAAaX,EAAiBS,EAAGE,CAAS,CAAC,EACpG,MAAO,CAAC,IAAKD,EAAa,OAAO,CAACE,EAAKC,IAAMD,EAAMC,EAAG,CAAC,EAAIH,EAAa,OAAQ,IAAK,KAAK,IAAI,GAAGA,CAAY,EAAG,aAAAA,CAAA,CACjH,CACD,CC1FO,MAAMI,EAAkB,CAC9B,KAAM,MACN,YAAa,qDACb,KAAM,CAAC,QAAS,CAAC,KAAM,SAAU,YAAa,iBAAkB,SAAU,GAAI,EAC9E,GAAKpD,GAA4BqD,EAAAA,IAAIrD,EAAK,OAAO,EAClD,EAEasD,EAAuB,CACnC,KAAM,eACN,YAAa,4BACb,KAAM,CAAA,EACN,GAAI,SAAY,IAAI,KAAA,EAAO,YAAA,CAC5B,EAEaC,EAAmB,CAC/B,KAAM,OACN,YAAa,mBACb,KAAM,CACL,SAAU,CAAC,KAAM,SAAU,YAAa,qBAAsB,KAAM,CAAC,MAAO,OAAQ,QAAQ,EAAG,SAAU,EAAA,EACzG,KAAM,CAAC,KAAM,SAAU,YAAa,kBAAmB,SAAU,EAAA,CAAI,EAEtE,GAAI,MAAOvD,EAAMxC,IAAO,CACvB,GAAI,CACH,OAAOwC,EAAK,KAAA,CACX,IAAK,OACJ,OAAO,MAAMoD,EAAQ,GAAG,CAAC,QAASpD,EAAK,IAAA,EAAOxC,CAAE,EACjD,IAAK,OACJ,OAAO,MAAMgG,EAAO,GAAG,CAAC,KAAMxD,EAAK,IAAA,EAAOxC,CAAE,EAC7C,IAAK,SACJ,OAAO,MAAMiG,EAAW,GAAG,CAAC,KAAMzD,EAAK,IAAA,EAAOxC,CAAE,CACjD,CAEF,OAAQkC,EAAU,CACjB,MAAO,CAAC,MAAOA,GAAK,SAAWA,EAAI,UAAS,CAC7C,CACD,CACD,EAEagE,EAAoB,CAChC,KAAM,QACN,YAAa,2BACb,KAAM,CACL,IAAK,CAAC,KAAM,SAAU,YAAa,eAAgB,SAAU,EAAA,EAC7D,OAAQ,CAAC,KAAM,SAAU,YAAa,qBAAsB,KAAM,CAAC,MAAO,OAAQ,MAAO,QAAQ,EAAG,QAAS,KAAA,EAC7G,QAAS,CAAC,KAAM,SAAU,YAAa,uBAAwB,QAAS,EAAC,EACzE,KAAM,CAAC,KAAM,SAAU,YAAa,mBAAA,CAAmB,EAExD,GAAK1D,GAKC,IAAI2D,EAAAA,KAAK,CAAC,IAAK3D,EAAK,IAAK,QAASA,EAAK,QAAQ,EAAE,QAAQ,CAAC,OAAQA,EAAK,QAAU,MAAO,KAAMA,EAAK,IAAA,CAAK,CAC/G,EAEawD,EAAiB,CAC7B,KAAM,kBACN,YAAa,8BACb,KAAM,CACL,KAAM,CAAC,KAAM,SAAU,YAAa,sBAAuB,SAAU,EAAA,CAAI,EAE1E,GAAI,MAAOxD,GAAyB,CACnC,MAAM4D,EAAUC,EAAAA,mBAAmB,IAAI,EACjC/E,EAAO,MAAMgF,KAAQ,CAAC,QAAAF,CAAA,EAAU5D,EAAK,KAAM,EAAI,EAAE,MAAON,GAAakE,EAAQ,OAAO,MAAM,KAAKlE,CAAG,CAAC,EACzG,MAAO,CAAC,GAAGkE,EAAQ,OAAQ,OAAQ9E,EAAM,OAAQ,OAAW,OAAQ,MAAA,CACrE,CACD,EAEa2E,EAAqB,CACjC,KAAM,kBACN,YAAa,8BACb,KAAM,CACL,KAAM,CAAC,KAAM,SAAU,YAAa,sBAAuB,SAAU,EAAA,CAAI,EAE1E,GAAI,MAAOzD,IAA0B,CAAC,OAAQ+D,EAAAA,mBAAmB/D,EAAK,IAAI,GAAA,EAC3E,EAEagE,EAAqB,CACjC,KAAM,SACN,YAAa,4FACb,KAAM,CACL,MAAO,CAAC,KAAM,SAAU,YAAa,gBAAiB,SAAU,EAAA,EAChE,OAAQ,CAAC,KAAM,SAAU,YAAa,8BAA+B,QAAS,CAAA,CAAC,EAEhF,GAAI,MAAOhE,GAGL,CACL,MAAMiE,EAAO,MAAM,MAAM,uCAAuC,mBAAmBjE,EAAK,KAAK,CAAC,GAAI,CACjG,QAAS,CAAC,aAAc,4CAA6C,kBAAmB,gBAAA,CAAgB,CACxG,EAAE,KAAKlB,GAAQA,EAAK,MAAM,EAC3B,IAAIoF,EAAOC,EAAQ,8BACnB,MAAM/E,EAAU,IAAIgF,OACpB,MAAOF,EAAQC,EAAM,KAAKF,CAAI,KAAO,MAAM,CAC1C,IAAII,EAAM,iBAAiB,KAAK,mBAAmBH,EAAM,CAAC,CAAC,CAAC,IAAI,CAAC,EAGjE,GAFGG,IAAKA,EAAM,mBAAmBA,CAAG,GACjCA,GAAKjF,EAAQ,IAAIiF,CAAG,EACpBjF,EAAQ,OAASY,EAAK,QAAU,GAAI,KACxC,CACA,OAAOZ,CACR,CACD"}
|
package/dist/index.mjs
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { createWorker as S } from "tesseract.js";
|
|
2
|
-
import { objectMap as
|
|
2
|
+
import { objectMap as _, JSONAttemptParse as w, findByProp as b, JSONSanitize as y, Http as j, consoleInterceptor as T, fn as q, ASet as v } from "@ztimson/utils";
|
|
3
3
|
import { Anthropic as P } from "@anthropic-ai/sdk";
|
|
4
4
|
import { Ollama as A } from "ollama";
|
|
5
5
|
import { OpenAI as E } from "openai";
|
|
@@ -16,18 +16,24 @@ class L extends k {
|
|
|
16
16
|
}
|
|
17
17
|
client;
|
|
18
18
|
toStandard(t) {
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
19
|
+
const e = [];
|
|
20
|
+
for (let n = 0; n < t.length; n++) {
|
|
21
|
+
const i = t[n];
|
|
22
|
+
if (typeof i.content != "string" && (i.role == "assistant" ? i.content.filter((o) => o.type == "tool_use").forEach((o) => {
|
|
23
|
+
e.push({ role: "tool", id: o.id, name: o.name, args: o.input });
|
|
24
|
+
}) : i.role == "user" && i.content.filter((o) => o.type == "tool_result").forEach((o) => {
|
|
25
|
+
const p = e.find((c) => c.id == o.tool_use_id);
|
|
26
|
+
p && (p[o.is_error ? "error" : "content"] = o.content);
|
|
27
|
+
}), i.content = i.content.filter((o) => o.type == "text").map((o) => o.text).join(`
|
|
28
|
+
|
|
29
|
+
`)), i.content) {
|
|
30
|
+
const o = e.at(-1);
|
|
31
|
+
o && o.role == "assistant" && i.role == "assistant" ? o.content += `
|
|
27
32
|
|
|
28
|
-
`)
|
|
33
|
+
` + i.content : e.push({ role: i.role, content: i.content });
|
|
34
|
+
}
|
|
29
35
|
}
|
|
30
|
-
return
|
|
36
|
+
return e;
|
|
31
37
|
}
|
|
32
38
|
fromStandard(t) {
|
|
33
39
|
for (let e = 0; e < t.length; e++)
|
|
@@ -43,71 +49,73 @@ class L extends k {
|
|
|
43
49
|
return t;
|
|
44
50
|
}
|
|
45
51
|
ask(t, e = {}) {
|
|
46
|
-
const n = new AbortController(),
|
|
47
|
-
let
|
|
48
|
-
e.compress && (
|
|
52
|
+
const n = new AbortController(), i = new Promise(async (o, p) => {
|
|
53
|
+
let c = this.fromStandard([...e.history || [], { role: "user", content: t }]);
|
|
54
|
+
e.compress && (c = await this.ai.llm.compress(c, e.compress.max, e.compress.min, e));
|
|
49
55
|
const m = {
|
|
50
56
|
model: e.model || this.model,
|
|
51
57
|
max_tokens: e.max_tokens || this.ai.options.max_tokens || 4096,
|
|
52
58
|
system: e.system || this.ai.options.system || "",
|
|
53
59
|
temperature: e.temperature || this.ai.options.temperature || 0.7,
|
|
54
|
-
tools: (e.tools || this.ai.options.tools || []).map((
|
|
55
|
-
name:
|
|
56
|
-
description:
|
|
60
|
+
tools: (e.tools || this.ai.options.tools || []).map((a) => ({
|
|
61
|
+
name: a.name,
|
|
62
|
+
description: a.description,
|
|
57
63
|
input_schema: {
|
|
58
64
|
type: "object",
|
|
59
|
-
properties:
|
|
60
|
-
required:
|
|
65
|
+
properties: a.args ? _(a.args, (r, d) => ({ ...d, required: void 0 })) : {},
|
|
66
|
+
required: a.args ? Object.entries(a.args).filter((r) => r[1].required).map((r) => r[0]) : []
|
|
61
67
|
},
|
|
62
68
|
fn: void 0
|
|
63
69
|
})),
|
|
64
|
-
messages:
|
|
70
|
+
messages: c,
|
|
65
71
|
stream: !!e.stream
|
|
66
72
|
};
|
|
67
|
-
let
|
|
73
|
+
let l, s = !0;
|
|
68
74
|
do {
|
|
69
|
-
if (
|
|
70
|
-
|
|
71
|
-
|
|
75
|
+
if (l = await this.client.messages.create(m), e.stream) {
|
|
76
|
+
s || e.stream({ text: `
|
|
77
|
+
|
|
78
|
+
` }), s = !1, l.content = [];
|
|
79
|
+
for await (const r of l) {
|
|
72
80
|
if (n.signal.aborted) break;
|
|
73
81
|
if (r.type === "content_block_start")
|
|
74
|
-
r.content_block.type === "text" ?
|
|
82
|
+
r.content_block.type === "text" ? l.content.push({ type: "text", text: "" }) : r.content_block.type === "tool_use" && l.content.push({ type: "tool_use", id: r.content_block.id, name: r.content_block.name, input: "" });
|
|
75
83
|
else if (r.type === "content_block_delta")
|
|
76
84
|
if (r.delta.type === "text_delta") {
|
|
77
|
-
const
|
|
78
|
-
|
|
79
|
-
} else r.delta.type === "input_json_delta" && (
|
|
85
|
+
const d = r.delta.text;
|
|
86
|
+
l.content.at(-1).text += d, e.stream({ text: d });
|
|
87
|
+
} else r.delta.type === "input_json_delta" && (l.content.at(-1).input += r.delta.partial_json);
|
|
80
88
|
else if (r.type === "content_block_stop") {
|
|
81
|
-
const
|
|
82
|
-
|
|
89
|
+
const d = l.content.at(-1);
|
|
90
|
+
d.input != null && (d.input = d.input ? w(d.input, {}) : {});
|
|
83
91
|
} else if (r.type === "message_stop")
|
|
84
92
|
break;
|
|
85
93
|
}
|
|
86
94
|
}
|
|
87
|
-
const
|
|
88
|
-
if (
|
|
89
|
-
|
|
90
|
-
const r = await Promise.all(
|
|
91
|
-
const
|
|
92
|
-
if (!
|
|
95
|
+
const a = l.content.filter((r) => r.type === "tool_use");
|
|
96
|
+
if (a.length && !n.signal.aborted) {
|
|
97
|
+
c.push({ role: "assistant", content: l.content });
|
|
98
|
+
const r = await Promise.all(a.map(async (d) => {
|
|
99
|
+
const f = e.tools?.find(b("name", d.name));
|
|
100
|
+
if (!f) return { tool_use_id: d.id, is_error: !0, content: "Tool not found" };
|
|
93
101
|
try {
|
|
94
|
-
const
|
|
95
|
-
return { type: "tool_result", tool_use_id:
|
|
96
|
-
} catch (
|
|
97
|
-
return { type: "tool_result", tool_use_id:
|
|
102
|
+
const h = await f.fn(d.input, this.ai);
|
|
103
|
+
return { type: "tool_result", tool_use_id: d.id, content: y(h) };
|
|
104
|
+
} catch (h) {
|
|
105
|
+
return { type: "tool_result", tool_use_id: d.id, is_error: !0, content: h?.message || h?.toString() || "Unknown" };
|
|
98
106
|
}
|
|
99
107
|
}));
|
|
100
|
-
|
|
108
|
+
c.push({ role: "user", content: r }), m.messages = c;
|
|
101
109
|
}
|
|
102
|
-
} while (!n.signal.aborted &&
|
|
103
|
-
e.stream && e.stream({ done: !0 }),
|
|
110
|
+
} while (!n.signal.aborted && l.content.some((a) => a.type === "tool_use"));
|
|
111
|
+
e.stream && e.stream({ done: !0 }), o(this.toStandard([...c, {
|
|
104
112
|
role: "assistant",
|
|
105
|
-
content:
|
|
113
|
+
content: l.content.filter((a) => a.type == "text").map((a) => a.text).join(`
|
|
106
114
|
|
|
107
115
|
`)
|
|
108
116
|
}]));
|
|
109
117
|
});
|
|
110
|
-
return Object.assign(
|
|
118
|
+
return Object.assign(i, { abort: () => n.abort() });
|
|
111
119
|
}
|
|
112
120
|
}
|
|
113
121
|
class R extends k {
|
|
@@ -129,10 +137,10 @@ class R extends k {
|
|
|
129
137
|
return t.map((e) => e.role != "tool" ? e : { role: "tool", tool_name: e.name, content: e.error || e.content });
|
|
130
138
|
}
|
|
131
139
|
ask(t, e = {}) {
|
|
132
|
-
const n = new AbortController(),
|
|
133
|
-
let
|
|
134
|
-
m[0].roll == "system" && (
|
|
135
|
-
const
|
|
140
|
+
const n = new AbortController(), i = new Promise(async (o, p) => {
|
|
141
|
+
let c = e.system || this.ai.options.system, m = this.fromStandard([...e.history || [], { role: "user", content: t }]);
|
|
142
|
+
m[0].roll == "system" && (c ? m.shift() : c = m.shift()), e.compress && (m = await this.ai.llm.compress(m, e.compress.max, e.compress.min)), e.system && m.unshift({ role: "system", content: c });
|
|
143
|
+
const l = {
|
|
136
144
|
model: e.model || this.model,
|
|
137
145
|
messages: m,
|
|
138
146
|
stream: !!e.stream,
|
|
@@ -141,48 +149,48 @@ class R extends k {
|
|
|
141
149
|
temperature: e.temperature || this.ai.options.temperature || 0.7,
|
|
142
150
|
num_predict: e.max_tokens || this.ai.options.max_tokens || 4096
|
|
143
151
|
},
|
|
144
|
-
tools: (e.tools || this.ai.options.tools || []).map((
|
|
152
|
+
tools: (e.tools || this.ai.options.tools || []).map((a) => ({
|
|
145
153
|
type: "function",
|
|
146
154
|
function: {
|
|
147
|
-
name:
|
|
148
|
-
description:
|
|
155
|
+
name: a.name,
|
|
156
|
+
description: a.description,
|
|
149
157
|
parameters: {
|
|
150
158
|
type: "object",
|
|
151
|
-
properties:
|
|
152
|
-
required:
|
|
159
|
+
properties: a.args ? _(a.args, (r, d) => ({ ...d, required: void 0 })) : {},
|
|
160
|
+
required: a.args ? Object.entries(a.args).filter((r) => r[1].required).map((r) => r[0]) : []
|
|
153
161
|
}
|
|
154
162
|
}
|
|
155
163
|
}))
|
|
156
164
|
};
|
|
157
|
-
let
|
|
165
|
+
let s;
|
|
158
166
|
do {
|
|
159
|
-
if (
|
|
160
|
-
|
|
161
|
-
for await (const
|
|
162
|
-
if (n.signal.aborted || (
|
|
167
|
+
if (s = await this.client.chat(l), e.stream) {
|
|
168
|
+
s.message = { role: "assistant", content: "", tool_calls: [] };
|
|
169
|
+
for await (const a of s)
|
|
170
|
+
if (n.signal.aborted || (a.message?.content && (s.message.content += a.message.content, e.stream({ text: a.message.content })), a.message?.tool_calls && (s.message.tool_calls = a.message.tool_calls), a.done)) break;
|
|
163
171
|
}
|
|
164
|
-
if (
|
|
165
|
-
m.push(
|
|
166
|
-
const
|
|
167
|
-
const
|
|
168
|
-
if (!
|
|
169
|
-
const f = typeof
|
|
172
|
+
if (s.message?.tool_calls?.length && !n.signal.aborted) {
|
|
173
|
+
m.push(s.message);
|
|
174
|
+
const a = await Promise.all(s.message.tool_calls.map(async (r) => {
|
|
175
|
+
const d = (e.tools || this.ai.options.tools)?.find(b("name", r.function.name));
|
|
176
|
+
if (!d) return { role: "tool", tool_name: r.function.name, content: '{"error": "Tool not found"}' };
|
|
177
|
+
const f = typeof r.function.arguments == "string" ? w(r.function.arguments, {}) : r.function.arguments;
|
|
170
178
|
try {
|
|
171
|
-
const h = await
|
|
172
|
-
return { role: "tool", tool_name:
|
|
179
|
+
const h = await d.fn(f, this.ai);
|
|
180
|
+
return { role: "tool", tool_name: r.function.name, args: f, content: y(h) };
|
|
173
181
|
} catch (h) {
|
|
174
|
-
return { role: "tool", tool_name:
|
|
182
|
+
return { role: "tool", tool_name: r.function.name, args: f, content: y({ error: h?.message || h?.toString() || "Unknown" }) };
|
|
175
183
|
}
|
|
176
184
|
}));
|
|
177
|
-
m.push(...
|
|
185
|
+
m.push(...a), l.messages = m;
|
|
178
186
|
}
|
|
179
|
-
} while (!n.signal.aborted &&
|
|
180
|
-
e.stream && e.stream({ done: !0 }),
|
|
187
|
+
} while (!n.signal.aborted && s.message?.tool_calls?.length);
|
|
188
|
+
e.stream && e.stream({ done: !0 }), o(this.toStandard([...m, { role: "assistant", content: s.message?.content }]));
|
|
181
189
|
});
|
|
182
|
-
return Object.assign(
|
|
190
|
+
return Object.assign(i, { abort: () => n.abort() });
|
|
183
191
|
}
|
|
184
192
|
}
|
|
185
|
-
class
|
|
193
|
+
class J extends k {
|
|
186
194
|
constructor(t, e, n) {
|
|
187
195
|
super(), this.ai = t, this.apiToken = e, this.model = n, this.client = new E({ apiKey: e });
|
|
188
196
|
}
|
|
@@ -191,16 +199,16 @@ class I extends k {
|
|
|
191
199
|
for (let e = 0; e < t.length; e++) {
|
|
192
200
|
const n = t[e];
|
|
193
201
|
if (n.role === "assistant" && n.tool_calls) {
|
|
194
|
-
const
|
|
202
|
+
const i = n.tool_calls.map((o) => ({
|
|
195
203
|
role: "tool",
|
|
196
|
-
id:
|
|
197
|
-
name:
|
|
198
|
-
args: w(
|
|
204
|
+
id: o.id,
|
|
205
|
+
name: o.function.name,
|
|
206
|
+
args: w(o.function.arguments, {})
|
|
199
207
|
}));
|
|
200
|
-
t.splice(e, 1, ...
|
|
208
|
+
t.splice(e, 1, ...i), e += i.length - 1;
|
|
201
209
|
} else if (n.role === "tool" && n.content) {
|
|
202
|
-
const
|
|
203
|
-
|
|
210
|
+
const i = t.find((o) => n.tool_call_id == o.id);
|
|
211
|
+
i && (n.content.includes('"error":') ? i.error = n.content : i.content = n.content), t.splice(e, 1), e--;
|
|
204
212
|
}
|
|
205
213
|
}
|
|
206
214
|
return t;
|
|
@@ -219,61 +227,61 @@ class I extends k {
|
|
|
219
227
|
}) : e.push(n), e), []);
|
|
220
228
|
}
|
|
221
229
|
ask(t, e = {}) {
|
|
222
|
-
const n = new AbortController(),
|
|
223
|
-
let
|
|
224
|
-
e.compress && (
|
|
230
|
+
const n = new AbortController(), i = new Promise(async (o, p) => {
|
|
231
|
+
let c = this.fromStandard([...e.history || [], { role: "user", content: t }]);
|
|
232
|
+
e.compress && (c = await this.ai.llm.compress(c, e.compress.max, e.compress.min, e));
|
|
225
233
|
const m = {
|
|
226
234
|
model: e.model || this.model,
|
|
227
|
-
messages:
|
|
235
|
+
messages: c,
|
|
228
236
|
stream: !!e.stream,
|
|
229
237
|
max_tokens: e.max_tokens || this.ai.options.max_tokens || 4096,
|
|
230
238
|
temperature: e.temperature || this.ai.options.temperature || 0.7,
|
|
231
|
-
tools: (e.tools || this.ai.options.tools || []).map((
|
|
239
|
+
tools: (e.tools || this.ai.options.tools || []).map((s) => ({
|
|
232
240
|
type: "function",
|
|
233
241
|
function: {
|
|
234
|
-
name:
|
|
235
|
-
description:
|
|
242
|
+
name: s.name,
|
|
243
|
+
description: s.description,
|
|
236
244
|
parameters: {
|
|
237
245
|
type: "object",
|
|
238
|
-
properties:
|
|
239
|
-
required:
|
|
246
|
+
properties: s.args ? _(s.args, (a, r) => ({ ...r, required: void 0 })) : {},
|
|
247
|
+
required: s.args ? Object.entries(s.args).filter((a) => a[1].required).map((a) => a[0]) : []
|
|
240
248
|
}
|
|
241
249
|
}
|
|
242
250
|
}))
|
|
243
251
|
};
|
|
244
|
-
let
|
|
252
|
+
let l;
|
|
245
253
|
do {
|
|
246
|
-
if (
|
|
247
|
-
|
|
248
|
-
for await (const
|
|
254
|
+
if (l = await this.client.chat.completions.create(m), e.stream) {
|
|
255
|
+
l.choices = [];
|
|
256
|
+
for await (const a of l) {
|
|
249
257
|
if (n.signal.aborted) break;
|
|
250
|
-
|
|
258
|
+
a.choices[0].delta.content && e.stream({ text: a.choices[0].delta.content });
|
|
251
259
|
}
|
|
252
260
|
}
|
|
253
|
-
const
|
|
254
|
-
if (
|
|
255
|
-
|
|
256
|
-
const
|
|
257
|
-
const
|
|
258
|
-
if (!
|
|
261
|
+
const s = l.choices[0].message.tool_calls || [];
|
|
262
|
+
if (s.length && !n.signal.aborted) {
|
|
263
|
+
c.push(l.choices[0].message);
|
|
264
|
+
const a = await Promise.all(s.map(async (r) => {
|
|
265
|
+
const d = e.tools?.find(b("name", r.function.name));
|
|
266
|
+
if (!d) return { role: "tool", tool_call_id: r.id, content: '{"error": "Tool not found"}' };
|
|
259
267
|
try {
|
|
260
|
-
const f = w(
|
|
261
|
-
return { role: "tool", tool_call_id:
|
|
268
|
+
const f = w(r.function.arguments, {}), h = await d.fn(f, this.ai);
|
|
269
|
+
return { role: "tool", tool_call_id: r.id, content: y(h) };
|
|
262
270
|
} catch (f) {
|
|
263
|
-
return { role: "tool", tool_call_id:
|
|
271
|
+
return { role: "tool", tool_call_id: r.id, content: y({ error: f?.message || f?.toString() || "Unknown" }) };
|
|
264
272
|
}
|
|
265
273
|
}));
|
|
266
|
-
|
|
274
|
+
c.push(...a), m.messages = c;
|
|
267
275
|
}
|
|
268
|
-
} while (!n.signal.aborted &&
|
|
269
|
-
e.stream && e.stream({ done: !0 }),
|
|
276
|
+
} while (!n.signal.aborted && l.choices?.[0]?.message?.tool_calls?.length);
|
|
277
|
+
e.stream && e.stream({ done: !0 }), o(this.toStandard([...c, { role: "assistant", content: l.choices[0].message.content || "" }]));
|
|
270
278
|
});
|
|
271
|
-
return Object.assign(
|
|
279
|
+
return Object.assign(i, { abort: () => n.abort() });
|
|
272
280
|
}
|
|
273
281
|
}
|
|
274
|
-
class
|
|
282
|
+
class W {
|
|
275
283
|
constructor(t, e) {
|
|
276
|
-
this.ai = t, this.options = e, e.anthropic?.token && (this.providers.anthropic = new L(this.ai, e.anthropic.token, e.anthropic.model)), e.ollama?.host && (this.providers.ollama = new R(this.ai, e.ollama.host, e.ollama.model)), e.openAi?.token && (this.providers.openAi = new
|
|
284
|
+
this.ai = t, this.options = e, e.anthropic?.token && (this.providers.anthropic = new L(this.ai, e.anthropic.token, e.anthropic.model)), e.ollama?.host && (this.providers.ollama = new R(this.ai, e.ollama.host, e.ollama.model)), e.openAi?.token && (this.providers.openAi = new J(this.ai, e.openAi.token, e.openAi.model));
|
|
277
285
|
}
|
|
278
286
|
providers = {};
|
|
279
287
|
/**
|
|
@@ -295,17 +303,17 @@ class J {
|
|
|
295
303
|
* @param {LLMRequest} options LLM options
|
|
296
304
|
* @returns {Promise<LLMMessage[]>} New chat history will summary at index 0
|
|
297
305
|
*/
|
|
298
|
-
async compress(t, e, n,
|
|
306
|
+
async compress(t, e, n, i) {
|
|
299
307
|
if (this.estimateTokens(t) < e) return t;
|
|
300
|
-
let
|
|
301
|
-
for (let
|
|
302
|
-
if (
|
|
308
|
+
let o = 0, p = 0;
|
|
309
|
+
for (let s of t.toReversed())
|
|
310
|
+
if (p += this.estimateTokens(s.content), p < n) o++;
|
|
303
311
|
else break;
|
|
304
|
-
if (t.length <=
|
|
305
|
-
const
|
|
306
|
-
return [{ role: "assistant", content: `Conversation Summary: ${await this.summarize(m.map((
|
|
312
|
+
if (t.length <= o) return t;
|
|
313
|
+
const c = o == 0 ? [] : t.slice(-o), m = (o == 0 ? t : t.slice(0, -o)).filter((s) => s.role === "assistant" || s.role === "user");
|
|
314
|
+
return [{ role: "assistant", content: `Conversation Summary: ${await this.summarize(m.map((s) => `${s.role}: ${s.content}`).join(`
|
|
307
315
|
|
|
308
|
-
`), 250,
|
|
316
|
+
`), 250, i)}` }, ...c];
|
|
309
317
|
}
|
|
310
318
|
/**
|
|
311
319
|
* Estimate variable as tokens
|
|
@@ -337,12 +345,12 @@ class J {
|
|
|
337
345
|
* @returns {Promise<string>} Summary
|
|
338
346
|
*/
|
|
339
347
|
summarize(t, e, n) {
|
|
340
|
-
return this.ask(t, { system: `Generate a brief summary <= ${e} tokens. Output nothing else`, temperature: 0.3, ...n }).then((
|
|
348
|
+
return this.ask(t, { system: `Generate a brief summary <= ${e} tokens. Output nothing else`, temperature: 0.3, ...n }).then((i) => i.pop()?.content || null);
|
|
341
349
|
}
|
|
342
350
|
}
|
|
343
351
|
class X {
|
|
344
352
|
constructor(t) {
|
|
345
|
-
this.options = t, this.llm = new
|
|
353
|
+
this.options = t, this.llm = new W(this, t), this.options.whisper?.binary && (this.whisperModel = this.options.whisper?.model.endsWith(".bin") ? this.options.whisper?.model : this.options.whisper?.model + ".bin", this.downloadAsrModel());
|
|
346
354
|
}
|
|
347
355
|
downloads = {};
|
|
348
356
|
whisperModel;
|
|
@@ -358,12 +366,12 @@ class X {
|
|
|
358
366
|
if (!this.options.whisper?.binary) throw new Error("Whisper not configured");
|
|
359
367
|
let n = () => {
|
|
360
368
|
};
|
|
361
|
-
return { response: new Promise((
|
|
362
|
-
this.downloadAsrModel(e).then((
|
|
369
|
+
return { response: new Promise((o, p) => {
|
|
370
|
+
this.downloadAsrModel(e).then((c) => {
|
|
363
371
|
let m = "";
|
|
364
|
-
const
|
|
365
|
-
n = () =>
|
|
366
|
-
|
|
372
|
+
const l = M(this.options.whisper?.binary, ["-nt", "-np", "-m", c, "-f", t], { stdio: ["ignore", "pipe", "ignore"] });
|
|
373
|
+
n = () => l.kill("SIGTERM"), l.on("error", (s) => p(s)), l.stdout.on("data", (s) => m += s.toString()), l.on("close", (s) => {
|
|
374
|
+
s === 0 ? o(m.trim() || null) : p(new Error(`Exit code ${s}`));
|
|
367
375
|
});
|
|
368
376
|
});
|
|
369
377
|
}), abort: n };
|
|
@@ -393,8 +401,8 @@ class X {
|
|
|
393
401
|
},
|
|
394
402
|
response: new Promise(async (n) => {
|
|
395
403
|
e = await S("eng");
|
|
396
|
-
const { data:
|
|
397
|
-
await e.terminate(), n(
|
|
404
|
+
const { data: i } = await e.recognize(t);
|
|
405
|
+
await e.terminate(), n(i.text.trim() || null);
|
|
398
406
|
})
|
|
399
407
|
};
|
|
400
408
|
}
|
|
@@ -406,19 +414,19 @@ class X {
|
|
|
406
414
|
*/
|
|
407
415
|
semanticSimilarity(t, ...e) {
|
|
408
416
|
if (e.length < 2) throw new Error("Requires at least 2 strings to compare");
|
|
409
|
-
const n = (
|
|
410
|
-
if (
|
|
411
|
-
const
|
|
412
|
-
return
|
|
413
|
-
},
|
|
414
|
-
return { avg:
|
|
417
|
+
const n = (c, m = 10) => c.toLowerCase().split("").map((l, s) => l.charCodeAt(0) * (s + 1) % m / m).slice(0, m), i = (c, m) => {
|
|
418
|
+
if (c.length !== m.length) throw new Error("Vectors must be same length");
|
|
419
|
+
const l = g.tensor1d(c), s = g.tensor1d(m), a = g.dot(l, s), r = g.norm(l), d = g.norm(s);
|
|
420
|
+
return r.dataSync()[0] === 0 || d.dataSync()[0] === 0 ? 0 : a.dataSync()[0] / (r.dataSync()[0] * d.dataSync()[0]);
|
|
421
|
+
}, o = n(t), p = e.map((c) => n(c)).map((c) => i(o, c));
|
|
422
|
+
return { avg: p.reduce((c, m) => c + m, 0) / p.length, max: Math.max(...p), similarities: p };
|
|
415
423
|
}
|
|
416
424
|
}
|
|
417
|
-
const
|
|
425
|
+
const I = {
|
|
418
426
|
name: "cli",
|
|
419
427
|
description: "Use the command line interface, returns any output",
|
|
420
428
|
args: { command: { type: "string", description: "Command to run", required: !0 } },
|
|
421
|
-
fn: (
|
|
429
|
+
fn: (u) => U`${u.command}`
|
|
422
430
|
}, Y = {
|
|
423
431
|
name: "get_datetime",
|
|
424
432
|
description: "Get current date and time",
|
|
@@ -431,15 +439,15 @@ const W = {
|
|
|
431
439
|
language: { type: "string", description: "Execution language", enum: ["cli", "node", "python"], required: !0 },
|
|
432
440
|
code: { type: "string", description: "Code to execute", required: !0 }
|
|
433
441
|
},
|
|
434
|
-
fn: async (
|
|
442
|
+
fn: async (u, t) => {
|
|
435
443
|
try {
|
|
436
|
-
switch (
|
|
444
|
+
switch (u.type) {
|
|
437
445
|
case "bash":
|
|
438
|
-
return await
|
|
446
|
+
return await I.fn({ command: u.code }, t);
|
|
439
447
|
case "node":
|
|
440
|
-
return await N.fn({ code:
|
|
448
|
+
return await N.fn({ code: u.code }, t);
|
|
441
449
|
case "python":
|
|
442
|
-
return await z.fn({ code:
|
|
450
|
+
return await z.fn({ code: u.code }, t);
|
|
443
451
|
}
|
|
444
452
|
} catch (e) {
|
|
445
453
|
return { error: e?.message || e.toString() };
|
|
@@ -454,15 +462,15 @@ const W = {
|
|
|
454
462
|
headers: { type: "object", description: "HTTP headers to send", default: {} },
|
|
455
463
|
body: { type: "object", description: "HTTP body to send" }
|
|
456
464
|
},
|
|
457
|
-
fn: (
|
|
465
|
+
fn: (u) => new j({ url: u.url, headers: u.headers }).request({ method: u.method || "GET", body: u.body })
|
|
458
466
|
}, N = {
|
|
459
467
|
name: "exec_javascript",
|
|
460
468
|
description: "Execute commonjs javascript",
|
|
461
469
|
args: {
|
|
462
470
|
code: { type: "string", description: "CommonJS javascript", required: !0 }
|
|
463
471
|
},
|
|
464
|
-
fn: async (
|
|
465
|
-
const t = T(null), e = await q({ console: t },
|
|
472
|
+
fn: async (u) => {
|
|
473
|
+
const t = T(null), e = await q({ console: t }, u.code, !0).catch((n) => t.output.error.push(n));
|
|
466
474
|
return { ...t.output, return: e, stdout: void 0, stderr: void 0 };
|
|
467
475
|
}
|
|
468
476
|
}, z = {
|
|
@@ -471,7 +479,7 @@ const W = {
|
|
|
471
479
|
args: {
|
|
472
480
|
code: { type: "string", description: "CommonJS javascript", required: !0 }
|
|
473
481
|
},
|
|
474
|
-
fn: async (
|
|
482
|
+
fn: async (u) => ({ result: $`python -c "${u.code}"` })
|
|
475
483
|
}, te = {
|
|
476
484
|
name: "search",
|
|
477
485
|
description: "Use a search engine to find relevant URLs, should be changed with fetch to scrape sources",
|
|
@@ -479,28 +487,28 @@ const W = {
|
|
|
479
487
|
query: { type: "string", description: "Search string", required: !0 },
|
|
480
488
|
length: { type: "string", description: "Number of results to return", default: 5 }
|
|
481
489
|
},
|
|
482
|
-
fn: async (
|
|
483
|
-
const t = await fetch(`https://html.duckduckgo.com/html/?q=${encodeURIComponent(
|
|
490
|
+
fn: async (u) => {
|
|
491
|
+
const t = await fetch(`https://html.duckduckgo.com/html/?q=${encodeURIComponent(u.query)}`, {
|
|
484
492
|
headers: { "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64)", "Accept-Language": "en-US,en;q=0.9" }
|
|
485
|
-
}).then((
|
|
493
|
+
}).then((o) => o.text());
|
|
486
494
|
let e, n = /<a .*?href="(.+?)".+?<\/a>/g;
|
|
487
|
-
const
|
|
495
|
+
const i = new v();
|
|
488
496
|
for (; (e = n.exec(t)) !== null; ) {
|
|
489
|
-
let
|
|
490
|
-
if (
|
|
497
|
+
let o = /uddg=(.+)&?/.exec(decodeURIComponent(e[1]))?.[1];
|
|
498
|
+
if (o && (o = decodeURIComponent(o)), o && i.add(o), i.size >= (u.length || 5)) break;
|
|
491
499
|
}
|
|
492
|
-
return
|
|
500
|
+
return i;
|
|
493
501
|
}
|
|
494
502
|
};
|
|
495
503
|
export {
|
|
496
504
|
X as Ai,
|
|
497
505
|
L as Anthropic,
|
|
498
|
-
|
|
506
|
+
I as CliTool,
|
|
499
507
|
Y as DateTimeTool,
|
|
500
508
|
Z as ExecTool,
|
|
501
509
|
ee as FetchTool,
|
|
502
510
|
N as JSTool,
|
|
503
|
-
|
|
511
|
+
W as LLM,
|
|
504
512
|
z as PythonTool,
|
|
505
513
|
te as SearchTool
|
|
506
514
|
};
|
package/dist/index.mjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.mjs","sources":["../src/provider.ts","../src/antrhopic.ts","../src/ollama.ts","../src/open-ai.ts","../src/llm.ts","../src/ai.ts","../src/tools.ts"],"sourcesContent":["import {LLMMessage, LLMOptions, LLMRequest} from './llm.ts';\n\nexport type AbortablePromise<T> = Promise<T> & {abort: () => void};\n\nexport abstract class LLMProvider {\n\tabstract ask(message: string, options: LLMRequest): AbortablePromise<LLMMessage[]>;\n}\n","import {Anthropic as anthropic} from '@anthropic-ai/sdk';\nimport {findByProp, objectMap, JSONSanitize, JSONAttemptParse} from '@ztimson/utils';\nimport {Ai} from './ai.ts';\nimport {LLMMessage, LLMRequest} from './llm.ts';\nimport {AbortablePromise, LLMProvider} from './provider.ts';\n\nexport class Anthropic extends LLMProvider {\n\tclient!: anthropic;\n\n\tconstructor(public readonly ai: Ai, public readonly apiToken: string, public model: string) {\n\t\tsuper();\n\t\tthis.client = new anthropic({apiKey: apiToken});\n\t}\n\n\tprivate toStandard(history: any[]): LLMMessage[] {\n\t\tfor(let i = 0; i < history.length; i++) {\n\t\t\tconst orgI = i;\n\t\t\tif(typeof history[orgI].content != 'string') {\n\t\t\t\tif(history[orgI].role == 'assistant') {\n\t\t\t\t\thistory[orgI].content.filter((c: any) => c.type =='tool_use').forEach((c: any) => {\n\t\t\t\t\t\ti++;\n\t\t\t\t\t\thistory.splice(i, 0, {role: 'tool', id: c.id, name: c.name, args: c.input});\n\t\t\t\t\t});\n\t\t\t\t} else if(history[orgI].role == 'user') {\n\t\t\t\t\thistory[orgI].content.filter((c: any) => c.type =='tool_result').forEach((c: any) => {\n\t\t\t\t\t\tconst h = history.find((h: any) => h.id == c.tool_use_id);\n\t\t\t\t\t\th[c.is_error ? 'error' : 'content'] = c.content;\n\t\t\t\t\t});\n\t\t\t\t}\n\t\t\t\thistory[orgI].content = history[orgI].content.filter((c: any) => c.type == 'text').map((c: any) => c.text).join('\\n\\n');\n\t\t\t}\n\t\t}\n\t\treturn history.filter(h => !!h.content);\n\t}\n\n\tprivate fromStandard(history: LLMMessage[]): any[] {\n\t\tfor(let i = 0; i < history.length; i++) {\n\t\t\tif(history[i].role == 'tool') {\n\t\t\t\tconst h: any = history[i];\n\t\t\t\thistory.splice(i, 1,\n\t\t\t\t\t{role: 'assistant', content: [{type: 'tool_use', id: h.id, name: h.name, input: h.args}]},\n\t\t\t\t\t{role: 'user', content: [{type: 'tool_result', tool_use_id: h.id, is_error: !!h.error, content: h.error || h.content}]}\n\t\t\t\t)\n\t\t\t\ti++;\n\t\t\t}\n\t\t}\n\t\treturn history;\n\t}\n\n\task(message: string, options: LLMRequest = {}): AbortablePromise<LLMMessage[]> {\n\t\tconst controller = new AbortController();\n\t\tconst response = new Promise<any>(async (res, rej) => {\n\t\t\tlet history = this.fromStandard([...options.history || [], {role: 'user', content: message}]);\n\t\t\tif(options.compress) history = await this.ai.llm.compress(<any>history, options.compress.max, options.compress.min, options);\n\t\t\tconst requestParams: any = {\n\t\t\t\tmodel: options.model || this.model,\n\t\t\t\tmax_tokens: options.max_tokens || this.ai.options.max_tokens || 4096,\n\t\t\t\tsystem: options.system || this.ai.options.system || '',\n\t\t\t\ttemperature: options.temperature || this.ai.options.temperature || 0.7,\n\t\t\t\ttools: (options.tools || this.ai.options.tools || []).map(t => ({\n\t\t\t\t\tname: t.name,\n\t\t\t\t\tdescription: t.description,\n\t\t\t\t\tinput_schema: {\n\t\t\t\t\t\ttype: 'object',\n\t\t\t\t\t\tproperties: t.args ? objectMap(t.args, (key, value) => ({...value, required: undefined})) : {},\n\t\t\t\t\t\trequired: t.args ? Object.entries(t.args).filter(t => t[1].required).map(t => t[0]) : []\n\t\t\t\t\t},\n\t\t\t\t\tfn: undefined\n\t\t\t\t})),\n\t\t\t\tmessages: history,\n\t\t\t\tstream: !!options.stream,\n\t\t\t};\n\n\t\t\t// Run tool changes\n\t\t\tlet resp: any;\n\t\t\tdo {\n\t\t\t\tresp = await this.client.messages.create(requestParams);\n\n\t\t\t\t// Streaming mode\n\t\t\t\tif(options.stream) {\n\t\t\t\t\tresp.content = [];\n\t\t\t\t\tfor await (const chunk of resp) {\n\t\t\t\t\t\tif(controller.signal.aborted) break;\n\t\t\t\t\t\tif(chunk.type === 'content_block_start') {\n\t\t\t\t\t\t\tif(chunk.content_block.type === 'text') {\n\t\t\t\t\t\t\t\tresp.content.push({type: 'text', text: ''});\n\t\t\t\t\t\t\t} else if(chunk.content_block.type === 'tool_use') {\n\t\t\t\t\t\t\t\tresp.content.push({type: 'tool_use', id: chunk.content_block.id, name: chunk.content_block.name, input: <any>''});\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t} else if(chunk.type === 'content_block_delta') {\n\t\t\t\t\t\t\tif(chunk.delta.type === 'text_delta') {\n\t\t\t\t\t\t\t\tconst text = chunk.delta.text;\n\t\t\t\t\t\t\t\tresp.content.at(-1).text += text;\n\t\t\t\t\t\t\t\toptions.stream({text});\n\t\t\t\t\t\t\t} else if(chunk.delta.type === 'input_json_delta') {\n\t\t\t\t\t\t\t\tresp.content.at(-1).input += chunk.delta.partial_json;\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t} else if(chunk.type === 'content_block_stop') {\n\t\t\t\t\t\t\tconst last = resp.content.at(-1);\n\t\t\t\t\t\t\tif(last.input != null) last.input = last.input ? JSONAttemptParse(last.input, {}) : {};\n\t\t\t\t\t\t} else if(chunk.type === 'message_stop') {\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\t// Run tools\n\t\t\t\tconst toolCalls = resp.content.filter((c: any) => c.type === 'tool_use');\n\t\t\t\tif(toolCalls.length && !controller.signal.aborted) {\n\t\t\t\t\thistory.push({role: 'assistant', content: resp.content});\n\t\t\t\t\tconst results = await Promise.all(toolCalls.map(async (toolCall: any) => {\n\t\t\t\t\t\tconst tool = options.tools?.find(findByProp('name', toolCall.name));\n\t\t\t\t\t\tif(!tool) return {tool_use_id: toolCall.id, is_error: true, content: 'Tool not found'};\n\t\t\t\t\t\ttry {\n\t\t\t\t\t\t\tconst result = await tool.fn(toolCall.input, this.ai);\n\t\t\t\t\t\t\treturn {type: 'tool_result', tool_use_id: toolCall.id, content: JSONSanitize(result)};\n\t\t\t\t\t\t} catch (err: any) {\n\t\t\t\t\t\t\treturn {type: 'tool_result', tool_use_id: toolCall.id, is_error: true, content: err?.message || err?.toString() || 'Unknown'};\n\t\t\t\t\t\t}\n\t\t\t\t\t}));\n\t\t\t\t\thistory.push({role: 'user', content: results});\n\t\t\t\t\trequestParams.messages = history;\n\t\t\t\t}\n\t\t\t} while (!controller.signal.aborted && resp.content.some((c: any) => c.type === 'tool_use'));\n\t\t\tif(options.stream) options.stream({done: true});\n\t\t\tres(this.toStandard([...history, {\n\t\t\t\trole: 'assistant',\n\t\t\t\tcontent: resp.content.filter((c: any) => c.type == 'text').map((c: any) => c.text).join('\\n\\n')\n\t\t\t}]));\n\t\t});\n\t\treturn Object.assign(response, {abort: () => controller.abort()});\n\t}\n}\n","import {findByProp, objectMap, JSONSanitize, JSONAttemptParse} from '@ztimson/utils';\nimport {Ai} from './ai.ts';\nimport {LLMMessage, LLMRequest} from './llm.ts';\nimport {AbortablePromise, LLMProvider} from './provider.ts';\nimport {Ollama as ollama} from 'ollama';\n\nexport class Ollama extends LLMProvider {\n\tclient!: ollama;\n\n\tconstructor(public readonly ai: Ai, public host: string, public model: string) {\n\t\tsuper();\n\t\tthis.client = new ollama({host});\n\t}\n\n\tprivate toStandard(history: any[]): LLMMessage[] {\n\t\tfor(let i = 0; i < history.length; i++) {\n\t\t\tif(history[i].role == 'assistant' && history[i].tool_calls) {\n\t\t\t\tif(history[i].content) delete history[i].tool_calls;\n\t\t\t\telse {\n\t\t\t\t\thistory.splice(i, 1);\n\t\t\t\t\ti--;\n\t\t\t\t}\n\t\t\t} else if(history[i].role == 'tool') {\n\t\t\t\tconst error = history[i].content.startsWith('{\"error\":');\n\t\t\t\thistory[i] = {role: 'tool', name: history[i].tool_name, args: history[i].args, [error ? 'error' : 'content']: history[i].content};\n\t\t\t}\n\t\t}\n\t\treturn history;\n\t}\n\n\tprivate fromStandard(history: LLMMessage[]): any[] {\n\t\treturn history.map((h: any) => {\n\t\t\tif(h.role != 'tool') return h;\n\t\t\treturn {role: 'tool', tool_name: h.name, content: h.error || h.content}\n\t\t});\n\t}\n\n\task(message: string, options: LLMRequest = {}): AbortablePromise<LLMMessage[]> {\n\t\tconst controller = new AbortController();\n\t\tconst response = new Promise<any>(async (res, rej) => {\n\t\t\tlet system = options.system || this.ai.options.system;\n\t\t\tlet history = this.fromStandard([...options.history || [], {role: 'user', content: message}]);\n\t\t\tif(history[0].roll == 'system') {\n\t\t\t\tif(!system) system = history.shift();\n\t\t\t\telse history.shift();\n\t\t\t}\n\t\t\tif(options.compress) history = await this.ai.llm.compress(<any>history, options.compress.max, options.compress.min);\n\t\t\tif(options.system) history.unshift({role: 'system', content: system})\n\n\t\t\tconst requestParams: any = {\n\t\t\t\tmodel: options.model || this.model,\n\t\t\t\tmessages: history,\n\t\t\t\tstream: !!options.stream,\n\t\t\t\tsignal: controller.signal,\n\t\t\t\toptions: {\n\t\t\t\t\ttemperature: options.temperature || this.ai.options.temperature || 0.7,\n\t\t\t\t\tnum_predict: options.max_tokens || this.ai.options.max_tokens || 4096,\n\t\t\t\t},\n\t\t\t\ttools: (options.tools || this.ai.options.tools || []).map(t => ({\n\t\t\t\t\ttype: 'function',\n\t\t\t\t\tfunction: {\n\t\t\t\t\t\tname: t.name,\n\t\t\t\t\t\tdescription: t.description,\n\t\t\t\t\t\tparameters: {\n\t\t\t\t\t\t\ttype: 'object',\n\t\t\t\t\t\t\tproperties: t.args ? objectMap(t.args, (key, value) => ({...value, required: undefined})) : {},\n\t\t\t\t\t\t\trequired: t.args ? Object.entries(t.args).filter(t => t[1].required).map(t => t[0]) : []\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}))\n\t\t\t}\n\n\t\t\t// Run tool chains\n\t\t\tlet resp: any;\n\t\t\tdo {\n\t\t\t\tresp = await this.client.chat(requestParams);\n\t\t\t\tif(options.stream) {\n\t\t\t\t\tresp.message = {role: 'assistant', content: '', tool_calls: []};\n\t\t\t\t\tfor await (const chunk of resp) {\n\t\t\t\t\t\tif(controller.signal.aborted) break;\n\t\t\t\t\t\tif(chunk.message?.content) {\n\t\t\t\t\t\t\tresp.message.content += chunk.message.content;\n\t\t\t\t\t\t\toptions.stream({text: chunk.message.content});\n\t\t\t\t\t\t}\n\t\t\t\t\t\tif(chunk.message?.tool_calls) resp.message.tool_calls = chunk.message.tool_calls;\n\t\t\t\t\t\tif(chunk.done) break;\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\t// Run tools\n\t\t\t\tif(resp.message?.tool_calls?.length && !controller.signal.aborted) {\n\t\t\t\t\thistory.push(resp.message);\n\t\t\t\t\tconst results = await Promise.all(resp.message.tool_calls.map(async (toolCall: any) => {\n\t\t\t\t\t\tconst tool = (options.tools || this.ai.options.tools)?.find(findByProp('name', toolCall.function.name));\n\t\t\t\t\t\tif(!tool) return {role: 'tool', tool_name: toolCall.function.name, content: '{\"error\": \"Tool not found\"}'};\n\t\t\t\t\t\tconst args = typeof toolCall.function.arguments === 'string' ? JSONAttemptParse(toolCall.function.arguments, {}) : toolCall.function.arguments;\n\t\t\t\t\t\ttry {\n\t\t\t\t\t\t\tconst result = await tool.fn(args, this.ai);\n\t\t\t\t\t\t\treturn {role: 'tool', tool_name: toolCall.function.name, args, content: JSONSanitize(result)};\n\t\t\t\t\t\t} catch (err: any) {\n\t\t\t\t\t\t\treturn {role: 'tool', tool_name: toolCall.function.name, args, content: JSONSanitize({error: err?.message || err?.toString() || 'Unknown'})};\n\t\t\t\t\t\t}\n\t\t\t\t\t}));\n\t\t\t\t\thistory.push(...results);\n\t\t\t\t\trequestParams.messages = history;\n\t\t\t\t}\n\t\t\t} while (!controller.signal.aborted && resp.message?.tool_calls?.length);\n\t\t\tif(options.stream) options.stream({done: true});\n\t\t\tres(this.toStandard([...history, {role: 'assistant', content: resp.message?.content}]));\n\t\t});\n\t\treturn Object.assign(response, {abort: () => controller.abort()});\n\t}\n}\n","import {OpenAI as openAI} from 'openai';\nimport {findByProp, objectMap, JSONSanitize, JSONAttemptParse} from '@ztimson/utils';\nimport {Ai} from './ai.ts';\nimport {LLMMessage, LLMRequest} from './llm.ts';\nimport {AbortablePromise, LLMProvider} from './provider.ts';\n\nexport class OpenAi extends LLMProvider {\n\tclient!: openAI;\n\n\tconstructor(public readonly ai: Ai, public readonly apiToken: string, public model: string) {\n\t\tsuper();\n\t\tthis.client = new openAI({apiKey: apiToken});\n\t}\n\n\tprivate toStandard(history: any[]): LLMMessage[] {\n\t\tfor(let i = 0; i < history.length; i++) {\n\t\t\tconst h = history[i];\n\t\t\tif(h.role === 'assistant' && h.tool_calls) {\n\t\t\t\tconst tools = h.tool_calls.map((tc: any) => ({\n\t\t\t\t\trole: 'tool',\n\t\t\t\t\tid: tc.id,\n\t\t\t\t\tname: tc.function.name,\n\t\t\t\t\targs: JSONAttemptParse(tc.function.arguments, {})\n\t\t\t\t}));\n\t\t\t\thistory.splice(i, 1, ...tools);\n\t\t\t\ti += tools.length - 1;\n\t\t\t} else if(h.role === 'tool' && h.content) {\n\t\t\t\tconst record = history.find(h2 => h.tool_call_id == h2.id);\n\t\t\t\tif(record) {\n\t\t\t\t\tif(h.content.includes('\"error\":')) record.error = h.content;\n\t\t\t\t\telse record.content = h.content;\n\t\t\t\t}\n\t\t\t\thistory.splice(i, 1);\n\t\t\t\ti--;\n\t\t\t}\n\n\t\t}\n\t\treturn history;\n\t}\n\n\tprivate fromStandard(history: LLMMessage[]): any[] {\n\t\treturn history.reduce((result, h) => {\n\t\t\tif(h.role === 'tool') {\n\t\t\t\tresult.push({\n\t\t\t\t\trole: 'assistant',\n\t\t\t\t\tcontent: null,\n\t\t\t\t\ttool_calls: [{ id: h.id, type: 'function', function: { name: h.name, arguments: JSON.stringify(h.args) } }],\n\t\t\t\t\trefusal: null,\n\t\t\t\t\tannotations: [],\n\t\t\t\t}, {\n\t\t\t\t\trole: 'tool',\n\t\t\t\t\ttool_call_id: h.id,\n\t\t\t\t\tcontent: h.error || h.content\n\t\t\t\t});\n\t\t\t} else {\n\t\t\t\tresult.push(h);\n\t\t\t}\n\t\t\treturn result;\n\t\t}, [] as any[]);\n\t}\n\n\task(message: string, options: LLMRequest = {}): AbortablePromise<LLMMessage[]> {\n\t\tconst controller = new AbortController();\n\t\tconst response = new Promise<any>(async (res, rej) => {\n\t\t\tlet history = this.fromStandard([...options.history || [], {role: 'user', content: message}]);\n\t\t\tif(options.compress) history = await this.ai.llm.compress(<any>history, options.compress.max, options.compress.min, options);\n\n\t\t\tconst requestParams: any = {\n\t\t\t\tmodel: options.model || this.model,\n\t\t\t\tmessages: history,\n\t\t\t\tstream: !!options.stream,\n\t\t\t\tmax_tokens: options.max_tokens || this.ai.options.max_tokens || 4096,\n\t\t\t\ttemperature: options.temperature || this.ai.options.temperature || 0.7,\n\t\t\t\ttools: (options.tools || this.ai.options.tools || []).map(t => ({\n\t\t\t\t\ttype: 'function',\n\t\t\t\t\tfunction: {\n\t\t\t\t\t\tname: t.name,\n\t\t\t\t\t\tdescription: t.description,\n\t\t\t\t\t\tparameters: {\n\t\t\t\t\t\t\ttype: 'object',\n\t\t\t\t\t\t\tproperties: t.args ? objectMap(t.args, (key, value) => ({...value, required: undefined})) : {},\n\t\t\t\t\t\t\trequired: t.args ? Object.entries(t.args).filter(t => t[1].required).map(t => t[0]) : []\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}))\n\t\t\t};\n\n\t\t\t// Tool call and streaming logic similar to other providers\n\t\t\tlet resp: any;\n\t\t\tdo {\n\t\t\t\tresp = await this.client.chat.completions.create(requestParams);\n\n\t\t\t\t// Implement streaming and tool call handling\n\t\t\t\tif(options.stream) {\n\t\t\t\t\tresp.choices = [];\n\t\t\t\t\tfor await (const chunk of resp) {\n\t\t\t\t\t\tif(controller.signal.aborted) break;\n\t\t\t\t\t\tif(chunk.choices[0].delta.content) {\n\t\t\t\t\t\t\toptions.stream({text: chunk.choices[0].delta.content});\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\t// Run tools\n\t\t\t\tconst toolCalls = resp.choices[0].message.tool_calls || [];\n\t\t\t\tif(toolCalls.length && !controller.signal.aborted) {\n\t\t\t\t\thistory.push(resp.choices[0].message);\n\t\t\t\t\tconst results = await Promise.all(toolCalls.map(async (toolCall: any) => {\n\t\t\t\t\t\tconst tool = options.tools?.find(findByProp('name', toolCall.function.name));\n\t\t\t\t\t\tif(!tool) return {role: 'tool', tool_call_id: toolCall.id, content: '{\"error\": \"Tool not found\"}'};\n\t\t\t\t\t\ttry {\n\t\t\t\t\t\t\tconst args = JSONAttemptParse(toolCall.function.arguments, {});\n\t\t\t\t\t\t\tconst result = await tool.fn(args, this.ai);\n\t\t\t\t\t\t\treturn {role: 'tool', tool_call_id: toolCall.id, content: JSONSanitize(result)};\n\t\t\t\t\t\t} catch (err: any) {\n\t\t\t\t\t\t\treturn {role: 'tool', tool_call_id: toolCall.id, content: JSONSanitize({error: err?.message || err?.toString() || 'Unknown'})};\n\t\t\t\t\t\t}\n\t\t\t\t\t}));\n\t\t\t\t\thistory.push(...results);\n\t\t\t\t\trequestParams.messages = history;\n\t\t\t\t}\n\t\t\t} while (!controller.signal.aborted && resp.choices?.[0]?.message?.tool_calls?.length);\n\n\t\t\tif(options.stream) options.stream({done: true});\n\t\t\tres(this.toStandard([...history, {role: 'assistant', content: resp.choices[0].message.content || ''}]));\n\t\t});\n\n\t\treturn Object.assign(response, {abort: () => controller.abort()});\n\t}\n}\n","import {JSONAttemptParse} from '@ztimson/utils';\nimport {Ai} from './ai.ts';\nimport {Anthropic} from './antrhopic.ts';\nimport {Ollama} from './ollama.ts';\nimport {OpenAi} from './open-ai.ts';\nimport {AbortablePromise, LLMProvider} from './provider.ts';\nimport {AiTool} from './tools.ts';\n\nexport type LLMMessage = {\n\t/** Message originator */\n\trole: 'assistant' | 'system' | 'user';\n\t/** Message content */\n\tcontent: string | any;\n} | {\n\t/** Tool call */\n\trole: 'tool';\n\t/** Unique ID for call */\n\tid: string;\n\t/** Tool that was run */\n\tname: string;\n\t/** Tool arguments */\n\targs: any;\n\t/** Tool result */\n\tcontent: undefined | string;\n\t/** Tool error */\n\terror: undefined | string;\n}\n\nexport type LLMOptions = {\n\t/** Anthropic settings */\n\tanthropic?: {\n\t\t/** API Token */\n\t\ttoken: string;\n\t\t/** Default model */\n\t\tmodel: string;\n\t},\n\t/** Ollama settings */\n\tollama?: {\n\t\t/** connection URL */\n\t\thost: string;\n\t\t/** Default model */\n\t\tmodel: string;\n\t},\n\t/** Open AI settings */\n\topenAi?: {\n\t\t/** API Token */\n\t\ttoken: string;\n\t\t/** Default model */\n\t\tmodel: string;\n\t},\n\t/** Default provider & model */\n\tmodel: string | [string, string];\n} & Omit<LLMRequest, 'model'>;\n\nexport type LLMRequest = {\n\t/** System prompt */\n\tsystem?: string;\n\t/** Message history */\n\thistory?: LLMMessage[];\n\t/** Max tokens for request */\n\tmax_tokens?: number;\n\t/** 0 = Rigid Logic, 1 = Balanced, 2 = Hyper Creative **/\n\ttemperature?: number;\n\t/** Available tools */\n\ttools?: AiTool[];\n\t/** LLM model */\n\tmodel?: string | [string, string];\n\t/** Stream response */\n\tstream?: (chunk: {text?: string, done?: true}) => any;\n\t/** Compress old messages in the chat to free up context */\n\tcompress?: {\n\t\t/** Trigger chat compression once context exceeds the token count */\n\t\tmax: number;\n\t\t/** Compress chat until context size smaller than */\n\t\tmin: number\n\t}\n}\n\nexport class LLM {\n\tprivate providers: {[key: string]: LLMProvider} = {};\n\n\tconstructor(public readonly ai: Ai, public readonly options: LLMOptions) {\n\t\tif(options.anthropic?.token) this.providers.anthropic = new Anthropic(this.ai, options.anthropic.token, options.anthropic.model);\n\t\tif(options.ollama?.host) this.providers.ollama = new Ollama(this.ai, options.ollama.host, options.ollama.model);\n\t\tif(options.openAi?.token) this.providers.openAi = new OpenAi(this.ai, options.openAi.token, options.openAi.model);\n\t}\n\n\t/**\n\t * Chat with LLM\n\t * @param {string} message Question\n\t * @param {LLMRequest} options Configuration options and chat history\n\t * @returns {{abort: () => void, response: Promise<LLMMessage[]>}} Function to abort response and chat history\n\t */\n\task(message: string, options: LLMRequest = {}): AbortablePromise<LLMMessage[]> {\n\t\tlet model: any = [null, null];\n\t\tif(options.model) {\n\t\t\tif(typeof options.model == 'object') model = options.model;\n\t\t\telse model = [options.model, (<any>this.options)[options.model]?.model];\n\t\t}\n\t\tif(!options.model || model[1] == null) {\n\t\t\tif(typeof this.options.model == 'object') model = this.options.model;\n\t\t\telse model = [this.options.model, (<any>this.options)[this.options.model]?.model];\n\t\t}\n\t\tif(!model[0] || !model[1]) throw new Error(`Unknown LLM provider or model: ${model[0]} / ${model[1]}`);\n\t\treturn this.providers[model[0]].ask(message, {...options, model: model[1]});\n\t}\n\n\t/**\n\t * Compress chat history to reduce context size\n\t * @param {LLMMessage[]} history Chatlog that will be compressed\n\t * @param max Trigger compression once context is larger than max\n\t * @param min Summarize until context size is less than min\n\t * @param {LLMRequest} options LLM options\n\t * @returns {Promise<LLMMessage[]>} New chat history will summary at index 0\n\t */\n\tasync compress(history: LLMMessage[], max: number, min: number, options?: LLMRequest): Promise<LLMMessage[]> {\n\t\tif(this.estimateTokens(history) < max) return history;\n\t\tlet keep = 0, tokens = 0;\n\t\tfor(let m of history.toReversed()) {\n\t\t\ttokens += this.estimateTokens(m.content);\n\t\t\tif(tokens < min) keep++;\n\t\t\telse break;\n\t\t}\n\t\tif(history.length <= keep) return history;\n\t\tconst recent = keep == 0 ? [] : history.slice(-keep),\n\t\t\tprocess = (keep == 0 ? history : history.slice(0, -keep)).filter(h => h.role === 'assistant' || h.role === 'user');\n\t\tconst summary = await this.summarize(process.map(m => `${m.role}: ${m.content}`).join('\\n\\n'), 250, options);\n\t\treturn [{role: 'assistant', content: `Conversation Summary: ${summary}`}, ...recent];\n\t}\n\n\t/**\n\t * Estimate variable as tokens\n\t * @param history Object to size\n\t * @returns {number} Rough token count\n\t */\n\testimateTokens(history: any): number {\n\t\tconst text = JSON.stringify(history);\n\t\treturn Math.ceil((text.length / 4) * 1.2);\n\t}\n\n\t/**\n\t * Ask a question with JSON response\n\t * @param {string} message Question\n\t * @param {LLMRequest} options Configuration options and chat history\n\t * @returns {Promise<{} | {} | RegExpExecArray | null>}\n\t */\n\tasync json(message: string, options?: LLMRequest) {\n\t\tlet resp = await this.ask(message, {\n\t\t\tsystem: 'Respond using a JSON blob',\n\t\t\t...options\n\t\t});\n\t\tif(!resp?.[0]?.content) return {};\n\t\treturn JSONAttemptParse(new RegExp('\\{[\\s\\S]*\\}').exec(resp[0].content), {});\n\t}\n\n\t/**\n\t * Create a summary of some text\n\t * @param {string} text Text to summarize\n\t * @param {number} tokens Max number of tokens\n\t * @param options LLM request options\n\t * @returns {Promise<string>} Summary\n\t */\n\tsummarize(text: string, tokens: number, options?: LLMRequest): Promise<string | null> {\n\t\treturn this.ask(text, {system: `Generate a brief summary <= ${tokens} tokens. Output nothing else`, temperature: 0.3, ...options})\n\t\t\t.then(history => <string>history.pop()?.content || null);\n\t}\n}\n","import {createWorker} from 'tesseract.js';\nimport {LLM, LLMOptions} from './llm';\nimport fs from 'node:fs/promises';\nimport Path from 'node:path';\nimport * as tf from '@tensorflow/tfjs';\nimport {spawn} from 'node:child_process';\n\nexport type AiOptions = LLMOptions & {\n\twhisper?: {\n\t\t/** Whisper binary location */\n\t\tbinary: string;\n\t\t/** Model: `ggml-base.en.bin` */\n\t\tmodel: string;\n\t\t/** Path to models */\n\t\tpath: string;\n\t}\n}\n\nexport class Ai {\n\tprivate downloads: {[key: string]: Promise<string>} = {};\n\tprivate whisperModel!: string;\n\n\t/** Large Language Models */\n\tllm!: LLM;\n\n\tconstructor(public readonly options: AiOptions) {\n\t\tthis.llm = new LLM(this, options);\n\t\tif(this.options.whisper?.binary) {\n\t\t\tthis.whisperModel = this.options.whisper?.model.endsWith('.bin') ? this.options.whisper?.model : this.options.whisper?.model + '.bin';\n\t\t\tthis.downloadAsrModel();\n\t\t}\n\t}\n\n\t/**\n\t * Convert audio to text using Auditory Speech Recognition\n\t * @param {string} path Path to audio\n\t * @param model Whisper model\n\t * @returns {Promise<any>} Extracted text\n\t */\n\tasr(path: string, model: string = this.whisperModel): {abort: () => void, response: Promise<string | null>} {\n\t\tif(!this.options.whisper?.binary) throw new Error('Whisper not configured');\n\t\tlet abort: any = () => {};\n\t\tconst response = new Promise<string | null>((resolve, reject) => {\n\t\t\tthis.downloadAsrModel(model).then(m => {\n\t\t\t\tlet output = '';\n\t\t\t\tconst proc = spawn(<string>this.options.whisper?.binary, ['-nt', '-np', '-m', m, '-f', path], {stdio: ['ignore', 'pipe', 'ignore']});\n\t\t\t\tabort = () => proc.kill('SIGTERM');\n\t\t\t\tproc.on('error', (err: Error) => reject(err));\n\t\t\t\tproc.stdout.on('data', (data: Buffer) => output += data.toString());\n\t\t\t\tproc.on('close', (code: number) => {\n\t\t\t\t\tif(code === 0) resolve(output.trim() || null);\n\t\t\t\t\telse reject(new Error(`Exit code ${code}`));\n\t\t\t\t});\n\t\t\t});\n\t\t});\n\t\treturn {response, abort};\n\t}\n\n\t/**\n\t * Downloads the specified Whisper model if it is not already present locally.\n\t *\n\t * @param {string} model Whisper model that will be downloaded\n\t * @return {Promise<string>} Absolute path to model file, resolves once downloaded\n\t */\n\tasync downloadAsrModel(model: string = this.whisperModel): Promise<string> {\n\t\tif(!this.options.whisper?.binary) throw new Error('Whisper not configured');\n\t\tif(!model.endsWith('.bin')) model += '.bin';\n\t\tconst p = Path.join(this.options.whisper.path, model);\n\t\tif(await fs.stat(p).then(() => true).catch(() => false)) return p;\n\t\tif(!!this.downloads[model]) return this.downloads[model];\n\t\tthis.downloads[model] = fetch(`https://huggingface.co/ggerganov/whisper.cpp/resolve/main/${model}`)\n\t\t\t.then(resp => resp.arrayBuffer())\n\t\t\t.then(arr => Buffer.from(arr)).then(async buffer => {\n\t\t\t\tawait fs.writeFile(p, buffer);\n\t\t\t\tdelete this.downloads[model];\n\t\t\t\treturn p;\n\t\t\t});\n\t\treturn this.downloads[model];\n\t}\n\n\t/**\n\t * Convert image to text using Optical Character Recognition\n\t * @param {string} path Path to image\n\t * @returns {{abort: Function, response: Promise<string | null>}} Abort function & Promise of extracted text\n\t */\n\tocr(path: string): {abort: () => void, response: Promise<string | null>} {\n\t\tlet worker: any;\n\t\treturn {\n\t\t\tabort: () => { worker?.terminate(); },\n\t\t\tresponse: new Promise(async res => {\n\t\t\t\tworker = await createWorker('eng');\n\t\t\t\tconst {data} = await worker.recognize(path);\n\t\t\t\tawait worker.terminate();\n\t\t\t\tres(data.text.trim() || null);\n\t\t\t})\n\t\t}\n\t}\n\n\t/**\n\t * Compare the difference between two strings using tensor math\n\t * @param target Text that will checked\n\t * @param {string} searchTerms Multiple search terms to check against target\n\t * @returns {{avg: number, max: number, similarities: number[]}} Similarity values 0-1: 0 = unique, 1 = identical\n\t */\n\tsemanticSimilarity(target: string, ...searchTerms: string[]) {\n\t\tif(searchTerms.length < 2) throw new Error('Requires at least 2 strings to compare');\n\n\t\tconst vector = (text: string, dimensions: number = 10): number[] => {\n\t\t\treturn text.toLowerCase().split('').map((char, index) =>\n\t\t\t\t(char.charCodeAt(0) * (index + 1)) % dimensions / dimensions).slice(0, dimensions);\n\t\t}\n\n\t\tconst cosineSimilarity = (v1: number[], v2: number[]): number => {\n\t\t\tif (v1.length !== v2.length) throw new Error('Vectors must be same length');\n\t\t\tconst tensor1 = tf.tensor1d(v1), tensor2 = tf.tensor1d(v2)\n\t\t\tconst dotProduct = tf.dot(tensor1, tensor2)\n\t\t\tconst magnitude1 = tf.norm(tensor1)\n\t\t\tconst magnitude2 = tf.norm(tensor2)\n\t\t\tif(magnitude1.dataSync()[0] === 0 || magnitude2.dataSync()[0] === 0) return 0\n\t\t\treturn dotProduct.dataSync()[0] / (magnitude1.dataSync()[0] * magnitude2.dataSync()[0])\n\t\t}\n\n\t\tconst v = vector(target);\n\t\tconst similarities = searchTerms.map(t => vector(t)).map(refVector => cosineSimilarity(v, refVector))\n\t\treturn {avg: similarities.reduce((acc, s) => acc + s, 0) / similarities.length, max: Math.max(...similarities), similarities}\n\t}\n}\n","import {$, $Sync} from '@ztimson/node-utils';\nimport {ASet, consoleInterceptor, Http, fn as Fn} from '@ztimson/utils';\nimport {Ai} from './ai.ts';\n\nexport type AiToolArg = {[key: string]: {\n\t/** Argument type */\n\ttype: 'array' | 'boolean' | 'number' | 'object' | 'string',\n\t/** Argument description */\n\tdescription: string,\n\t/** Required argument */\n\trequired?: boolean;\n\t/** Default value */\n\tdefault?: any,\n\t/** Options */\n\tenum?: string[],\n\t/** Minimum value or length */\n\tmin?: number,\n\t/** Maximum value or length */\n\tmax?: number,\n\t/** Match pattern */\n\tpattern?: string,\n\t/** Child arguments */\n\titems?: {[key: string]: AiToolArg}\n}}\n\nexport type AiTool = {\n\t/** Tool ID / Name - Must be snail_case */\n\tname: string,\n\t/** Tool description / prompt */\n\tdescription: string,\n\t/** Tool arguments */\n\targs?: AiToolArg,\n\t/** Callback function */\n\tfn: (args: any, ai: Ai) => any | Promise<any>,\n};\n\nexport const CliTool: AiTool = {\n\tname: 'cli',\n\tdescription: 'Use the command line interface, returns any output',\n\targs: {command: {type: 'string', description: 'Command to run', required: true}},\n\tfn: (args: {command: string}) => $`${args.command}`\n}\n\nexport const DateTimeTool: AiTool = {\n\tname: 'get_datetime',\n\tdescription: 'Get current date and time',\n\targs: {},\n\tfn: async () => new Date().toISOString()\n}\n\nexport const ExecTool: AiTool = {\n\tname: 'exec',\n\tdescription: 'Run code/scripts',\n\targs: {\n\t\tlanguage: {type: 'string', description: 'Execution language', enum: ['cli', 'node', 'python'], required: true},\n\t\tcode: {type: 'string', description: 'Code to execute', required: true}\n\t},\n\tfn: async (args, ai) => {\n\t\ttry {\n\t\t\tswitch(args.type) {\n\t\t\t\tcase 'bash':\n\t\t\t\t\treturn await CliTool.fn({command: args.code}, ai);\n\t\t\t\tcase 'node':\n\t\t\t\t\treturn await JSTool.fn({code: args.code}, ai);\n\t\t\t\tcase 'python': {\n\t\t\t\t\treturn await PythonTool.fn({code: args.code}, ai);\n\t\t\t\t}\n\t\t\t}\n\t\t} catch(err: any) {\n\t\t\treturn {error: err?.message || err.toString()};\n\t\t}\n\t}\n}\n\nexport const FetchTool: AiTool = {\n\tname: 'fetch',\n\tdescription: 'Make HTTP request to URL',\n\targs: {\n\t\turl: {type: 'string', description: 'URL to fetch', required: true},\n\t\tmethod: {type: 'string', description: 'HTTP method to use', enum: ['GET', 'POST', 'PUT', 'DELETE'], default: 'GET'},\n\t\theaders: {type: 'object', description: 'HTTP headers to send', default: {}},\n\t\tbody: {type: 'object', description: 'HTTP body to send'},\n\t},\n\tfn: (args: {\n\t\turl: string;\n\t\tmethod: 'GET' | 'POST' | 'PUT' | 'DELETE';\n\t\theaders: {[key: string]: string};\n\t\tbody: any;\n\t}) => new Http({url: args.url, headers: args.headers}).request({method: args.method || 'GET', body: args.body})\n}\n\nexport const JSTool: AiTool = {\n\tname: 'exec_javascript',\n\tdescription: 'Execute commonjs javascript',\n\targs: {\n\t\tcode: {type: 'string', description: 'CommonJS javascript', required: true}\n\t},\n\tfn: async (args: {code: string}) => {\n\t\tconst console = consoleInterceptor(null);\n\t\tconst resp = await Fn<any>({console}, args.code, true).catch((err: any) => console.output.error.push(err));\n\t\treturn {...console.output, return: resp, stdout: undefined, stderr: undefined};\n\t}\n}\n\nexport const PythonTool: AiTool = {\n\tname: 'exec_javascript',\n\tdescription: 'Execute commonjs javascript',\n\targs: {\n\t\tcode: {type: 'string', description: 'CommonJS javascript', required: true}\n\t},\n\tfn: async (args: {code: string}) => ({result: $Sync`python -c \"${args.code}\"`})\n}\n\nexport const SearchTool: AiTool = {\n\tname: 'search',\n\tdescription: 'Use a search engine to find relevant URLs, should be changed with fetch to scrape sources',\n\targs: {\n\t\tquery: {type: 'string', description: 'Search string', required: true},\n\t\tlength: {type: 'string', description: 'Number of results to return', default: 5},\n\t},\n\tfn: async (args: {\n\t\tquery: string;\n\t\tlength: number;\n\t}) => {\n\t\tconst html = await fetch(`https://html.duckduckgo.com/html/?q=${encodeURIComponent(args.query)}`, {\n\t\t\theaders: {\"User-Agent\": \"Mozilla/5.0 (Windows NT 10.0; Win64; x64)\", \"Accept-Language\": \"en-US,en;q=0.9\"}\n\t\t}).then(resp => resp.text());\n\t\tlet match, regex = /<a .*?href=\"(.+?)\".+?<\\/a>/g;\n\t\tconst results = new ASet<string>();\n\t\twhile((match = regex.exec(html)) !== null) {\n\t\t\tlet url = /uddg=(.+)&?/.exec(decodeURIComponent(match[1]))?.[1];\n\t\t\tif(url) url = decodeURIComponent(url);\n\t\t\tif(url) results.add(url);\n\t\t\tif(results.size >= (args.length || 5)) break;\n\t\t}\n\t\treturn results;\n\t}\n}\n"],"names":["LLMProvider","Anthropic","ai","apiToken","model","anthropic","history","i","orgI","c","h","message","options","controller","response","res","rej","requestParams","t","objectMap","key","value","resp","chunk","text","last","JSONAttemptParse","toolCalls","results","toolCall","tool","findByProp","result","JSONSanitize","err","Ollama","host","ollama","error","system","args","OpenAi","openAI","tools","tc","record","h2","LLM","max","min","keep","tokens","m","recent","process","Ai","path","abort","resolve","reject","output","proc","spawn","data","code","p","Path","fs","arr","buffer","worker","createWorker","target","searchTerms","vector","dimensions","char","index","cosineSimilarity","v1","v2","tensor1","tf","tensor2","dotProduct","magnitude1","magnitude2","v","similarities","refVector","acc","s","CliTool","$","DateTimeTool","ExecTool","JSTool","PythonTool","FetchTool","Http","console","consoleInterceptor","Fn","$Sync","SearchTool","html","match","regex","ASet","url"],"mappings":";;;;;;;;;;AAIO,MAAeA,EAAY;AAElC;ACAO,MAAMC,UAAkBD,EAAY;AAAA,EAG1C,YAA4BE,GAAwBC,GAAyBC,GAAe;AAC3F,UAAA,GAD2B,KAAA,KAAAF,GAAwB,KAAA,WAAAC,GAAyB,KAAA,QAAAC,GAE5E,KAAK,SAAS,IAAIC,EAAU,EAAC,QAAQF,GAAS;AAAA,EAC/C;AAAA,EALA;AAAA,EAOQ,WAAWG,GAA8B;AAChD,aAAQC,IAAI,GAAGA,IAAID,EAAQ,QAAQC,KAAK;AACvC,YAAMC,IAAOD;AACb,MAAG,OAAOD,EAAQE,CAAI,EAAE,WAAW,aAC/BF,EAAQE,CAAI,EAAE,QAAQ,cACxBF,EAAQE,CAAI,EAAE,QAAQ,OAAO,CAACC,MAAWA,EAAE,QAAO,UAAU,EAAE,QAAQ,CAACA,MAAW;AACjF,QAAAF,KACAD,EAAQ,OAAOC,GAAG,GAAG,EAAC,MAAM,QAAQ,IAAIE,EAAE,IAAI,MAAMA,EAAE,MAAM,MAAMA,EAAE,OAAM;AAAA,MAC3E,CAAC,IACQH,EAAQE,CAAI,EAAE,QAAQ,UAC/BF,EAAQE,CAAI,EAAE,QAAQ,OAAO,CAACC,MAAWA,EAAE,QAAO,aAAa,EAAE,QAAQ,CAACA,MAAW;AACpF,cAAMC,IAAIJ,EAAQ,KAAK,CAACI,MAAWA,EAAE,MAAMD,EAAE,WAAW;AACxD,QAAAC,EAAED,EAAE,WAAW,UAAU,SAAS,IAAIA,EAAE;AAAA,MACzC,CAAC,GAEFH,EAAQE,CAAI,EAAE,UAAUF,EAAQE,CAAI,EAAE,QAAQ,OAAO,CAACC,MAAWA,EAAE,QAAQ,MAAM,EAAE,IAAI,CAACA,MAAWA,EAAE,IAAI,EAAE,KAAK;AAAA;AAAA,CAAM;AAAA,IAExH;AACA,WAAOH,EAAQ,OAAO,CAAAI,MAAK,CAAC,CAACA,EAAE,OAAO;AAAA,EACvC;AAAA,EAEQ,aAAaJ,GAA8B;AAClD,aAAQC,IAAI,GAAGA,IAAID,EAAQ,QAAQC;AAClC,UAAGD,EAAQC,CAAC,EAAE,QAAQ,QAAQ;AAC7B,cAAMG,IAASJ,EAAQC,CAAC;AACxB,QAAAD,EAAQ;AAAA,UAAOC;AAAA,UAAG;AAAA,UACjB,EAAC,MAAM,aAAa,SAAS,CAAC,EAAC,MAAM,YAAY,IAAIG,EAAE,IAAI,MAAMA,EAAE,MAAM,OAAOA,EAAE,KAAA,CAAK,EAAA;AAAA,UACvF,EAAC,MAAM,QAAQ,SAAS,CAAC,EAAC,MAAM,eAAe,aAAaA,EAAE,IAAI,UAAU,CAAC,CAACA,EAAE,OAAO,SAAUA,EAAE,SAASA,EAAE,SAAQ,EAAA;AAAA,QAAC,GAExHH;AAAA,MACD;AAED,WAAOD;AAAA,EACR;AAAA,EAEA,IAAIK,GAAiBC,IAAsB,IAAoC;AAC9E,UAAMC,IAAa,IAAI,gBAAA,GACjBC,IAAW,IAAI,QAAa,OAAOC,GAAKC,MAAQ;AACrD,UAAIV,IAAU,KAAK,aAAa,CAAC,GAAGM,EAAQ,WAAW,CAAA,GAAI,EAAC,MAAM,QAAQ,SAASD,EAAA,CAAQ,CAAC;AAC5F,MAAGC,EAAQ,aAAUN,IAAU,MAAM,KAAK,GAAG,IAAI,SAAcA,GAASM,EAAQ,SAAS,KAAKA,EAAQ,SAAS,KAAKA,CAAO;AAC3H,YAAMK,IAAqB;AAAA,QAC1B,OAAOL,EAAQ,SAAS,KAAK;AAAA,QAC7B,YAAYA,EAAQ,cAAc,KAAK,GAAG,QAAQ,cAAc;AAAA,QAChE,QAAQA,EAAQ,UAAU,KAAK,GAAG,QAAQ,UAAU;AAAA,QACpD,aAAaA,EAAQ,eAAe,KAAK,GAAG,QAAQ,eAAe;AAAA,QACnE,QAAQA,EAAQ,SAAS,KAAK,GAAG,QAAQ,SAAS,CAAA,GAAI,IAAI,CAAAM,OAAM;AAAA,UAC/D,MAAMA,EAAE;AAAA,UACR,aAAaA,EAAE;AAAA,UACf,cAAc;AAAA,YACb,MAAM;AAAA,YACN,YAAYA,EAAE,OAAOC,EAAUD,EAAE,MAAM,CAACE,GAAKC,OAAW,EAAC,GAAGA,GAAO,UAAU,OAAA,EAAW,IAAI,CAAA;AAAA,YAC5F,UAAUH,EAAE,OAAO,OAAO,QAAQA,EAAE,IAAI,EAAE,OAAO,CAAAA,MAAKA,EAAE,CAAC,EAAE,QAAQ,EAAE,IAAI,CAAAA,MAAKA,EAAE,CAAC,CAAC,IAAI,CAAA;AAAA,UAAC;AAAA,UAExF,IAAI;AAAA,QAAA,EACH;AAAA,QACF,UAAUZ;AAAA,QACV,QAAQ,CAAC,CAACM,EAAQ;AAAA,MAAA;AAInB,UAAIU;AACJ,SAAG;AAIF,YAHAA,IAAO,MAAM,KAAK,OAAO,SAAS,OAAOL,CAAa,GAGnDL,EAAQ,QAAQ;AAClB,UAAAU,EAAK,UAAU,CAAA;AACf,2BAAiBC,KAASD,GAAM;AAC/B,gBAAGT,EAAW,OAAO,QAAS;AAC9B,gBAAGU,EAAM,SAAS;AACjB,cAAGA,EAAM,cAAc,SAAS,SAC/BD,EAAK,QAAQ,KAAK,EAAC,MAAM,QAAQ,MAAM,IAAG,IACjCC,EAAM,cAAc,SAAS,cACtCD,EAAK,QAAQ,KAAK,EAAC,MAAM,YAAY,IAAIC,EAAM,cAAc,IAAI,MAAMA,EAAM,cAAc,MAAM,OAAY,IAAG;AAAA,qBAExGA,EAAM,SAAS;AACxB,kBAAGA,EAAM,MAAM,SAAS,cAAc;AACrC,sBAAMC,IAAOD,EAAM,MAAM;AACzB,gBAAAD,EAAK,QAAQ,GAAG,EAAE,EAAE,QAAQE,GAC5BZ,EAAQ,OAAO,EAAC,MAAAY,GAAK;AAAA,cACtB,MAAA,CAAUD,EAAM,MAAM,SAAS,uBAC9BD,EAAK,QAAQ,GAAG,EAAE,EAAE,SAASC,EAAM,MAAM;AAAA,qBAEjCA,EAAM,SAAS,sBAAsB;AAC9C,oBAAME,IAAOH,EAAK,QAAQ,GAAG,EAAE;AAC/B,cAAGG,EAAK,SAAS,SAAMA,EAAK,QAAQA,EAAK,QAAQC,EAAiBD,EAAK,OAAO,CAAA,CAAE,IAAI,CAAA;AAAA,YACrF,WAAUF,EAAM,SAAS;AACxB;AAAA,UAEF;AAAA,QACD;AAGA,cAAMI,IAAYL,EAAK,QAAQ,OAAO,CAACb,MAAWA,EAAE,SAAS,UAAU;AACvE,YAAGkB,EAAU,UAAU,CAACd,EAAW,OAAO,SAAS;AAClD,UAAAP,EAAQ,KAAK,EAAC,MAAM,aAAa,SAASgB,EAAK,SAAQ;AACvD,gBAAMM,IAAU,MAAM,QAAQ,IAAID,EAAU,IAAI,OAAOE,MAAkB;AACxE,kBAAMC,IAAOlB,EAAQ,OAAO,KAAKmB,EAAW,QAAQF,EAAS,IAAI,CAAC;AAClE,gBAAG,CAACC,EAAM,QAAO,EAAC,aAAaD,EAAS,IAAI,UAAU,IAAM,SAAS,iBAAA;AACrE,gBAAI;AACH,oBAAMG,IAAS,MAAMF,EAAK,GAAGD,EAAS,OAAO,KAAK,EAAE;AACpD,qBAAO,EAAC,MAAM,eAAe,aAAaA,EAAS,IAAI,SAASI,EAAaD,CAAM,EAAA;AAAA,YACpF,SAASE,GAAU;AAClB,qBAAO,EAAC,MAAM,eAAe,aAAaL,EAAS,IAAI,UAAU,IAAM,SAASK,GAAK,WAAWA,GAAK,SAAA,KAAc,UAAA;AAAA,YACpH;AAAA,UACD,CAAC,CAAC;AACF,UAAA5B,EAAQ,KAAK,EAAC,MAAM,QAAQ,SAASsB,GAAQ,GAC7CX,EAAc,WAAWX;AAAA,QAC1B;AAAA,MACD,SAAS,CAACO,EAAW,OAAO,WAAWS,EAAK,QAAQ,KAAK,CAACb,MAAWA,EAAE,SAAS,UAAU;AAC1F,MAAGG,EAAQ,UAAQA,EAAQ,OAAO,EAAC,MAAM,IAAK,GAC9CG,EAAI,KAAK,WAAW,CAAC,GAAGT,GAAS;AAAA,QAChC,MAAM;AAAA,QACN,SAASgB,EAAK,QAAQ,OAAO,CAACb,MAAWA,EAAE,QAAQ,MAAM,EAAE,IAAI,CAACA,MAAWA,EAAE,IAAI,EAAE,KAAK;AAAA;AAAA,CAAM;AAAA,MAAA,CAC9F,CAAC,CAAC;AAAA,IACJ,CAAC;AACD,WAAO,OAAO,OAAOK,GAAU,EAAC,OAAO,MAAMD,EAAW,MAAA,GAAQ;AAAA,EACjE;AACD;AC9HO,MAAMsB,UAAenC,EAAY;AAAA,EAGvC,YAA4BE,GAAekC,GAAqBhC,GAAe;AAC9E,UAAA,GAD2B,KAAA,KAAAF,GAAe,KAAA,OAAAkC,GAAqB,KAAA,QAAAhC,GAE/D,KAAK,SAAS,IAAIiC,EAAO,EAAC,MAAAD,GAAK;AAAA,EAChC;AAAA,EALA;AAAA,EAOQ,WAAW9B,GAA8B;AAChD,aAAQC,IAAI,GAAGA,IAAID,EAAQ,QAAQC;AAClC,UAAGD,EAAQC,CAAC,EAAE,QAAQ,eAAeD,EAAQC,CAAC,EAAE;AAC/C,QAAGD,EAAQC,CAAC,EAAE,UAAS,OAAOD,EAAQC,CAAC,EAAE,cAExCD,EAAQ,OAAOC,GAAG,CAAC,GACnBA;AAAA,eAEQD,EAAQC,CAAC,EAAE,QAAQ,QAAQ;AACpC,cAAM+B,IAAQhC,EAAQC,CAAC,EAAE,QAAQ,WAAW,WAAW;AACvD,QAAAD,EAAQC,CAAC,IAAI,EAAC,MAAM,QAAQ,MAAMD,EAAQC,CAAC,EAAE,WAAW,MAAMD,EAAQC,CAAC,EAAE,MAAM,CAAC+B,IAAQ,UAAU,SAAS,GAAGhC,EAAQC,CAAC,EAAE,QAAA;AAAA,MAC1H;AAED,WAAOD;AAAA,EACR;AAAA,EAEQ,aAAaA,GAA8B;AAClD,WAAOA,EAAQ,IAAI,CAACI,MAChBA,EAAE,QAAQ,SAAeA,IACrB,EAAC,MAAM,QAAQ,WAAWA,EAAE,MAAM,SAASA,EAAE,SAASA,EAAE,QAAA,CAC/D;AAAA,EACF;AAAA,EAEA,IAAIC,GAAiBC,IAAsB,IAAoC;AAC9E,UAAMC,IAAa,IAAI,gBAAA,GACjBC,IAAW,IAAI,QAAa,OAAOC,GAAKC,MAAQ;AACrD,UAAIuB,IAAS3B,EAAQ,UAAU,KAAK,GAAG,QAAQ,QAC3CN,IAAU,KAAK,aAAa,CAAC,GAAGM,EAAQ,WAAW,CAAA,GAAI,EAAC,MAAM,QAAQ,SAASD,EAAA,CAAQ,CAAC;AAC5F,MAAGL,EAAQ,CAAC,EAAE,QAAQ,aACjBiC,MACS,MAAA,IADDA,IAASjC,EAAQ,MAAA,IAG3BM,EAAQ,aAAUN,IAAU,MAAM,KAAK,GAAG,IAAI,SAAcA,GAASM,EAAQ,SAAS,KAAKA,EAAQ,SAAS,GAAG,IAC/GA,EAAQ,UAAQN,EAAQ,QAAQ,EAAC,MAAM,UAAU,SAASiC,GAAO;AAEpE,YAAMtB,IAAqB;AAAA,QAC1B,OAAOL,EAAQ,SAAS,KAAK;AAAA,QAC7B,UAAUN;AAAA,QACV,QAAQ,CAAC,CAACM,EAAQ;AAAA,QAClB,QAAQC,EAAW;AAAA,QACnB,SAAS;AAAA,UACR,aAAaD,EAAQ,eAAe,KAAK,GAAG,QAAQ,eAAe;AAAA,UACnE,aAAaA,EAAQ,cAAc,KAAK,GAAG,QAAQ,cAAc;AAAA,QAAA;AAAA,QAElE,QAAQA,EAAQ,SAAS,KAAK,GAAG,QAAQ,SAAS,CAAA,GAAI,IAAI,CAAAM,OAAM;AAAA,UAC/D,MAAM;AAAA,UACN,UAAU;AAAA,YACT,MAAMA,EAAE;AAAA,YACR,aAAaA,EAAE;AAAA,YACf,YAAY;AAAA,cACX,MAAM;AAAA,cACN,YAAYA,EAAE,OAAOC,EAAUD,EAAE,MAAM,CAACE,GAAKC,OAAW,EAAC,GAAGA,GAAO,UAAU,OAAA,EAAW,IAAI,CAAA;AAAA,cAC5F,UAAUH,EAAE,OAAO,OAAO,QAAQA,EAAE,IAAI,EAAE,OAAO,CAAAA,MAAKA,EAAE,CAAC,EAAE,QAAQ,EAAE,IAAI,CAAAA,MAAKA,EAAE,CAAC,CAAC,IAAI,CAAA;AAAA,YAAC;AAAA,UACxF;AAAA,QACD,EACC;AAAA,MAAA;AAIH,UAAII;AACJ,SAAG;AAEF,YADAA,IAAO,MAAM,KAAK,OAAO,KAAKL,CAAa,GACxCL,EAAQ,QAAQ;AAClB,UAAAU,EAAK,UAAU,EAAC,MAAM,aAAa,SAAS,IAAI,YAAY,GAAC;AAC7D,2BAAiBC,KAASD;AAOzB,gBANGT,EAAW,OAAO,YAClBU,EAAM,SAAS,YACjBD,EAAK,QAAQ,WAAWC,EAAM,QAAQ,SACtCX,EAAQ,OAAO,EAAC,MAAMW,EAAM,QAAQ,SAAQ,IAE1CA,EAAM,SAAS,iBAAiB,QAAQ,aAAaA,EAAM,QAAQ,aACnEA,EAAM,MAAM;AAAA,QAEjB;AAGA,YAAGD,EAAK,SAAS,YAAY,UAAU,CAACT,EAAW,OAAO,SAAS;AAClE,UAAAP,EAAQ,KAAKgB,EAAK,OAAO;AACzB,gBAAMM,IAAU,MAAM,QAAQ,IAAIN,EAAK,QAAQ,WAAW,IAAI,OAAOO,MAAkB;AACtF,kBAAMC,KAAQlB,EAAQ,SAAS,KAAK,GAAG,QAAQ,QAAQ,KAAKmB,EAAW,QAAQF,EAAS,SAAS,IAAI,CAAC;AACtG,gBAAG,CAACC,EAAM,QAAO,EAAC,MAAM,QAAQ,WAAWD,EAAS,SAAS,MAAM,SAAS,8BAAA;AAC5E,kBAAMW,IAAO,OAAOX,EAAS,SAAS,aAAc,WAAWH,EAAiBG,EAAS,SAAS,WAAW,CAAA,CAAE,IAAIA,EAAS,SAAS;AACrI,gBAAI;AACH,oBAAMG,IAAS,MAAMF,EAAK,GAAGU,GAAM,KAAK,EAAE;AAC1C,qBAAO,EAAC,MAAM,QAAQ,WAAWX,EAAS,SAAS,MAAM,MAAAW,GAAM,SAASP,EAAaD,CAAM,EAAA;AAAA,YAC5F,SAASE,GAAU;AAClB,qBAAO,EAAC,MAAM,QAAQ,WAAWL,EAAS,SAAS,MAAM,MAAAW,GAAM,SAASP,EAAa,EAAC,OAAOC,GAAK,WAAWA,GAAK,cAAc,UAAA,CAAU,EAAA;AAAA,YAC3I;AAAA,UACD,CAAC,CAAC;AACF,UAAA5B,EAAQ,KAAK,GAAGsB,CAAO,GACvBX,EAAc,WAAWX;AAAA,QAC1B;AAAA,MACD,SAAS,CAACO,EAAW,OAAO,WAAWS,EAAK,SAAS,YAAY;AACjE,MAAGV,EAAQ,UAAQA,EAAQ,OAAO,EAAC,MAAM,IAAK,GAC9CG,EAAI,KAAK,WAAW,CAAC,GAAGT,GAAS,EAAC,MAAM,aAAa,SAASgB,EAAK,SAAS,QAAA,CAAQ,CAAC,CAAC;AAAA,IACvF,CAAC;AACD,WAAO,OAAO,OAAOR,GAAU,EAAC,OAAO,MAAMD,EAAW,MAAA,GAAQ;AAAA,EACjE;AACD;AC1GO,MAAM4B,UAAezC,EAAY;AAAA,EAGvC,YAA4BE,GAAwBC,GAAyBC,GAAe;AAC3F,UAAA,GAD2B,KAAA,KAAAF,GAAwB,KAAA,WAAAC,GAAyB,KAAA,QAAAC,GAE5E,KAAK,SAAS,IAAIsC,EAAO,EAAC,QAAQvC,GAAS;AAAA,EAC5C;AAAA,EALA;AAAA,EAOQ,WAAWG,GAA8B;AAChD,aAAQC,IAAI,GAAGA,IAAID,EAAQ,QAAQC,KAAK;AACvC,YAAMG,IAAIJ,EAAQC,CAAC;AACnB,UAAGG,EAAE,SAAS,eAAeA,EAAE,YAAY;AAC1C,cAAMiC,IAAQjC,EAAE,WAAW,IAAI,CAACkC,OAAa;AAAA,UAC5C,MAAM;AAAA,UACN,IAAIA,EAAG;AAAA,UACP,MAAMA,EAAG,SAAS;AAAA,UAClB,MAAMlB,EAAiBkB,EAAG,SAAS,WAAW,CAAA,CAAE;AAAA,QAAA,EAC/C;AACF,QAAAtC,EAAQ,OAAOC,GAAG,GAAG,GAAGoC,CAAK,GAC7BpC,KAAKoC,EAAM,SAAS;AAAA,MACrB,WAAUjC,EAAE,SAAS,UAAUA,EAAE,SAAS;AACzC,cAAMmC,IAASvC,EAAQ,KAAK,OAAMI,EAAE,gBAAgBoC,EAAG,EAAE;AACzD,QAAGD,MACCnC,EAAE,QAAQ,SAAS,UAAU,IAAGmC,EAAO,QAAQnC,EAAE,UAC/CmC,EAAO,UAAUnC,EAAE,UAEzBJ,EAAQ,OAAOC,GAAG,CAAC,GACnBA;AAAA,MACD;AAAA,IAED;AACA,WAAOD;AAAA,EACR;AAAA,EAEQ,aAAaA,GAA8B;AAClD,WAAOA,EAAQ,OAAO,CAAC0B,GAAQtB,OAC3BA,EAAE,SAAS,SACbsB,EAAO,KAAK;AAAA,MACX,MAAM;AAAA,MACN,SAAS;AAAA,MACT,YAAY,CAAC,EAAE,IAAItB,EAAE,IAAI,MAAM,YAAY,UAAU,EAAE,MAAMA,EAAE,MAAM,WAAW,KAAK,UAAUA,EAAE,IAAI,EAAA,GAAK;AAAA,MAC1G,SAAS;AAAA,MACT,aAAa,CAAA;AAAA,IAAC,GACZ;AAAA,MACF,MAAM;AAAA,MACN,cAAcA,EAAE;AAAA,MAChB,SAASA,EAAE,SAASA,EAAE;AAAA,IAAA,CACtB,IAEDsB,EAAO,KAAKtB,CAAC,GAEPsB,IACL,CAAA,CAAW;AAAA,EACf;AAAA,EAEA,IAAIrB,GAAiBC,IAAsB,IAAoC;AAC9E,UAAMC,IAAa,IAAI,gBAAA,GACjBC,IAAW,IAAI,QAAa,OAAOC,GAAKC,MAAQ;AACrD,UAAIV,IAAU,KAAK,aAAa,CAAC,GAAGM,EAAQ,WAAW,CAAA,GAAI,EAAC,MAAM,QAAQ,SAASD,EAAA,CAAQ,CAAC;AAC5F,MAAGC,EAAQ,aAAUN,IAAU,MAAM,KAAK,GAAG,IAAI,SAAcA,GAASM,EAAQ,SAAS,KAAKA,EAAQ,SAAS,KAAKA,CAAO;AAE3H,YAAMK,IAAqB;AAAA,QAC1B,OAAOL,EAAQ,SAAS,KAAK;AAAA,QAC7B,UAAUN;AAAA,QACV,QAAQ,CAAC,CAACM,EAAQ;AAAA,QAClB,YAAYA,EAAQ,cAAc,KAAK,GAAG,QAAQ,cAAc;AAAA,QAChE,aAAaA,EAAQ,eAAe,KAAK,GAAG,QAAQ,eAAe;AAAA,QACnE,QAAQA,EAAQ,SAAS,KAAK,GAAG,QAAQ,SAAS,CAAA,GAAI,IAAI,CAAAM,OAAM;AAAA,UAC/D,MAAM;AAAA,UACN,UAAU;AAAA,YACT,MAAMA,EAAE;AAAA,YACR,aAAaA,EAAE;AAAA,YACf,YAAY;AAAA,cACX,MAAM;AAAA,cACN,YAAYA,EAAE,OAAOC,EAAUD,EAAE,MAAM,CAACE,GAAKC,OAAW,EAAC,GAAGA,GAAO,UAAU,OAAA,EAAW,IAAI,CAAA;AAAA,cAC5F,UAAUH,EAAE,OAAO,OAAO,QAAQA,EAAE,IAAI,EAAE,OAAO,CAAAA,MAAKA,EAAE,CAAC,EAAE,QAAQ,EAAE,IAAI,CAAAA,MAAKA,EAAE,CAAC,CAAC,IAAI,CAAA;AAAA,YAAC;AAAA,UACxF;AAAA,QACD,EACC;AAAA,MAAA;AAIH,UAAII;AACJ,SAAG;AAIF,YAHAA,IAAO,MAAM,KAAK,OAAO,KAAK,YAAY,OAAOL,CAAa,GAG3DL,EAAQ,QAAQ;AAClB,UAAAU,EAAK,UAAU,CAAA;AACf,2BAAiBC,KAASD,GAAM;AAC/B,gBAAGT,EAAW,OAAO,QAAS;AAC9B,YAAGU,EAAM,QAAQ,CAAC,EAAE,MAAM,WACzBX,EAAQ,OAAO,EAAC,MAAMW,EAAM,QAAQ,CAAC,EAAE,MAAM,SAAQ;AAAA,UAEvD;AAAA,QACD;AAGA,cAAMI,IAAYL,EAAK,QAAQ,CAAC,EAAE,QAAQ,cAAc,CAAA;AACxD,YAAGK,EAAU,UAAU,CAACd,EAAW,OAAO,SAAS;AAClD,UAAAP,EAAQ,KAAKgB,EAAK,QAAQ,CAAC,EAAE,OAAO;AACpC,gBAAMM,IAAU,MAAM,QAAQ,IAAID,EAAU,IAAI,OAAOE,MAAkB;AACxE,kBAAMC,IAAOlB,EAAQ,OAAO,KAAKmB,EAAW,QAAQF,EAAS,SAAS,IAAI,CAAC;AAC3E,gBAAG,CAACC,EAAM,QAAO,EAAC,MAAM,QAAQ,cAAcD,EAAS,IAAI,SAAS,8BAAA;AACpE,gBAAI;AACH,oBAAMW,IAAOd,EAAiBG,EAAS,SAAS,WAAW,CAAA,CAAE,GACvDG,IAAS,MAAMF,EAAK,GAAGU,GAAM,KAAK,EAAE;AAC1C,qBAAO,EAAC,MAAM,QAAQ,cAAcX,EAAS,IAAI,SAASI,EAAaD,CAAM,EAAA;AAAA,YAC9E,SAASE,GAAU;AAClB,qBAAO,EAAC,MAAM,QAAQ,cAAcL,EAAS,IAAI,SAASI,EAAa,EAAC,OAAOC,GAAK,WAAWA,GAAK,cAAc,UAAA,CAAU,EAAA;AAAA,YAC7H;AAAA,UACD,CAAC,CAAC;AACF,UAAA5B,EAAQ,KAAK,GAAGsB,CAAO,GACvBX,EAAc,WAAWX;AAAA,QAC1B;AAAA,MACD,SAAS,CAACO,EAAW,OAAO,WAAWS,EAAK,UAAU,CAAC,GAAG,SAAS,YAAY;AAE/E,MAAGV,EAAQ,UAAQA,EAAQ,OAAO,EAAC,MAAM,IAAK,GAC9CG,EAAI,KAAK,WAAW,CAAC,GAAGT,GAAS,EAAC,MAAM,aAAa,SAASgB,EAAK,QAAQ,CAAC,EAAE,QAAQ,WAAW,GAAA,CAAG,CAAC,CAAC;AAAA,IACvG,CAAC;AAED,WAAO,OAAO,OAAOR,GAAU,EAAC,OAAO,MAAMD,EAAW,MAAA,GAAQ;AAAA,EACjE;AACD;ACnDO,MAAMkC,EAAI;AAAA,EAGhB,YAA4B7C,GAAwBU,GAAqB;AAA7C,SAAA,KAAAV,GAAwB,KAAA,UAAAU,GAChDA,EAAQ,WAAW,UAAO,KAAK,UAAU,YAAY,IAAIX,EAAU,KAAK,IAAIW,EAAQ,UAAU,OAAOA,EAAQ,UAAU,KAAK,IAC5HA,EAAQ,QAAQ,SAAM,KAAK,UAAU,SAAS,IAAIuB,EAAO,KAAK,IAAIvB,EAAQ,OAAO,MAAMA,EAAQ,OAAO,KAAK,IAC3GA,EAAQ,QAAQ,UAAO,KAAK,UAAU,SAAS,IAAI6B,EAAO,KAAK,IAAI7B,EAAQ,OAAO,OAAOA,EAAQ,OAAO,KAAK;AAAA,EACjH;AAAA,EANQ,YAA0C,CAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAclD,IAAID,GAAiBC,IAAsB,IAAoC;AAC9E,QAAIR,IAAa,CAAC,MAAM,IAAI;AAS5B,QARGQ,EAAQ,UACP,OAAOA,EAAQ,SAAS,eAAkBA,EAAQ,QAChDR,IAAQ,CAACQ,EAAQ,OAAa,KAAK,QAASA,EAAQ,KAAK,GAAG,KAAK,KAEpE,CAACA,EAAQ,SAASR,EAAM,CAAC,KAAK,UAC7B,OAAO,KAAK,QAAQ,SAAS,WAAUA,IAAQ,KAAK,QAAQ,QAC1DA,IAAQ,CAAC,KAAK,QAAQ,OAAa,KAAK,QAAS,KAAK,QAAQ,KAAK,GAAG,KAAK,IAE9E,CAACA,EAAM,CAAC,KAAK,CAACA,EAAM,CAAC,EAAG,OAAM,IAAI,MAAM,kCAAkCA,EAAM,CAAC,CAAC,MAAMA,EAAM,CAAC,CAAC,EAAE;AACrG,WAAO,KAAK,UAAUA,EAAM,CAAC,CAAC,EAAE,IAAIO,GAAS,EAAC,GAAGC,GAAS,OAAOR,EAAM,CAAC,GAAE;AAAA,EAC3E;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,MAAM,SAASE,GAAuB0C,GAAaC,GAAarC,GAA6C;AAC5G,QAAG,KAAK,eAAeN,CAAO,IAAI0C,EAAK,QAAO1C;AAC9C,QAAI4C,IAAO,GAAGC,IAAS;AACvB,aAAQC,KAAK9C,EAAQ;AAEpB,UADA6C,KAAU,KAAK,eAAeC,EAAE,OAAO,GACpCD,IAASF,EAAK,CAAAC;AAAA,UACZ;AAEN,QAAG5C,EAAQ,UAAU4C,EAAM,QAAO5C;AAClC,UAAM+C,IAASH,KAAQ,IAAI,CAAA,IAAK5C,EAAQ,MAAM,CAAC4C,CAAI,GAClDI,KAAWJ,KAAQ,IAAI5C,IAAUA,EAAQ,MAAM,GAAG,CAAC4C,CAAI,GAAG,OAAO,CAAAxC,MAAKA,EAAE,SAAS,eAAeA,EAAE,SAAS,MAAM;AAElH,WAAO,CAAC,EAAC,MAAM,aAAa,SAAS,yBADrB,MAAM,KAAK,UAAU4C,EAAQ,IAAI,OAAK,GAAGF,EAAE,IAAI,KAAKA,EAAE,OAAO,EAAE,EAAE,KAAK;AAAA;AAAA,CAAM,GAAG,KAAKxC,CAAO,CACtC,MAAK,GAAGyC,CAAM;AAAA,EACpF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,eAAe/C,GAAsB;AACpC,UAAMkB,IAAO,KAAK,UAAUlB,CAAO;AACnC,WAAO,KAAK,KAAMkB,EAAK,SAAS,IAAK,GAAG;AAAA,EACzC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,KAAKb,GAAiBC,GAAsB;AACjD,QAAIU,IAAO,MAAM,KAAK,IAAIX,GAAS;AAAA,MAClC,QAAQ;AAAA,MACR,GAAGC;AAAA,IAAA,CACH;AACD,WAAIU,IAAO,CAAC,GAAG,UACRI,EAAiB,IAAI,OAAO,SAAa,EAAE,KAAKJ,EAAK,CAAC,EAAE,OAAO,GAAG,EAAE,IAD5C,CAAA;AAAA,EAEhC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,UAAUE,GAAc2B,GAAgBvC,GAA8C;AACrF,WAAO,KAAK,IAAIY,GAAM,EAAC,QAAQ,+BAA+B2B,CAAM,gCAAgC,aAAa,KAAK,GAAGvC,EAAA,CAAQ,EAC/H,KAAK,CAAAN,MAAmBA,EAAQ,IAAA,GAAO,WAAW,IAAI;AAAA,EACzD;AACD;ACpJO,MAAMiD,EAAG;AAAA,EAOf,YAA4B3C,GAAoB;AAApB,SAAA,UAAAA,GAC3B,KAAK,MAAM,IAAImC,EAAI,MAAMnC,CAAO,GAC7B,KAAK,QAAQ,SAAS,WACxB,KAAK,eAAe,KAAK,QAAQ,SAAS,MAAM,SAAS,MAAM,IAAI,KAAK,QAAQ,SAAS,QAAQ,KAAK,QAAQ,SAAS,QAAQ,QAC/H,KAAK,iBAAA;AAAA,EAEP;AAAA,EAZQ,YAA8C,CAAA;AAAA,EAC9C;AAAA;AAAA,EAGR;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAgBA,IAAI4C,GAAcpD,IAAgB,KAAK,cAAqE;AAC3G,QAAG,CAAC,KAAK,QAAQ,SAAS,OAAQ,OAAM,IAAI,MAAM,wBAAwB;AAC1E,QAAIqD,IAAa,MAAM;AAAA,IAAC;AAcxB,WAAO,EAAC,UAbS,IAAI,QAAuB,CAACC,GAASC,MAAW;AAChE,WAAK,iBAAiBvD,CAAK,EAAE,KAAK,CAAAgD,MAAK;AACtC,YAAIQ,IAAS;AACb,cAAMC,IAAOC,EAAc,KAAK,QAAQ,SAAS,QAAQ,CAAC,OAAO,OAAO,MAAMV,GAAG,MAAMI,CAAI,GAAG,EAAC,OAAO,CAAC,UAAU,QAAQ,QAAQ,GAAE;AACnI,QAAAC,IAAQ,MAAMI,EAAK,KAAK,SAAS,GACjCA,EAAK,GAAG,SAAS,CAAC3B,MAAeyB,EAAOzB,CAAG,CAAC,GAC5C2B,EAAK,OAAO,GAAG,QAAQ,CAACE,MAAiBH,KAAUG,EAAK,UAAU,GAClEF,EAAK,GAAG,SAAS,CAACG,MAAiB;AAClC,UAAGA,MAAS,IAAGN,EAAQE,EAAO,KAAA,KAAU,IAAI,MAChC,IAAI,MAAM,aAAaI,CAAI,EAAE,CAAC;AAAA,QAC3C,CAAC;AAAA,MACF,CAAC;AAAA,IACF,CAAC,GACiB,OAAAP,EAAA;AAAA,EACnB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,iBAAiBrD,IAAgB,KAAK,cAA+B;AAC1E,QAAG,CAAC,KAAK,QAAQ,SAAS,OAAQ,OAAM,IAAI,MAAM,wBAAwB;AAC1E,IAAIA,EAAM,SAAS,MAAM,MAAGA,KAAS;AACrC,UAAM6D,IAAIC,EAAK,KAAK,KAAK,QAAQ,QAAQ,MAAM9D,CAAK;AACpD,WAAG,MAAM+D,EAAG,KAAKF,CAAC,EAAE,KAAK,MAAM,EAAI,EAAE,MAAM,MAAM,EAAK,IAAUA,IAC3D,KAAK,UAAU7D,CAAK,IAAU,KAAK,UAAUA,CAAK,KACvD,KAAK,UAAUA,CAAK,IAAI,MAAM,6DAA6DA,CAAK,EAAE,EAChG,KAAK,CAAAkB,MAAQA,EAAK,aAAa,EAC/B,KAAK,CAAA8C,MAAO,OAAO,KAAKA,CAAG,CAAC,EAAE,KAAK,OAAMC,OACzC,MAAMF,EAAG,UAAUF,GAAGI,CAAM,GAC5B,OAAO,KAAK,UAAUjE,CAAK,GACpB6D,EACP,GACK,KAAK,UAAU7D,CAAK;AAAA,EAC5B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,IAAIoD,GAAqE;AACxE,QAAIc;AACJ,WAAO;AAAA,MACN,OAAO,MAAM;AAAE,QAAAA,GAAQ,UAAA;AAAA,MAAa;AAAA,MACpC,UAAU,IAAI,QAAQ,OAAMvD,MAAO;AAClC,QAAAuD,IAAS,MAAMC,EAAa,KAAK;AACjC,cAAM,EAAC,MAAAR,EAAA,IAAQ,MAAMO,EAAO,UAAUd,CAAI;AAC1C,cAAMc,EAAO,UAAA,GACbvD,EAAIgD,EAAK,KAAK,KAAA,KAAU,IAAI;AAAA,MAC7B,CAAC;AAAA,IAAA;AAAA,EAEH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,mBAAmBS,MAAmBC,GAAuB;AAC5D,QAAGA,EAAY,SAAS,EAAG,OAAM,IAAI,MAAM,wCAAwC;AAEnF,UAAMC,IAAS,CAAClD,GAAcmD,IAAqB,OAC3CnD,EAAK,cAAc,MAAM,EAAE,EAAE,IAAI,CAACoD,GAAMC,MAC7CD,EAAK,WAAW,CAAC,KAAKC,IAAQ,KAAMF,IAAaA,CAAU,EAAE,MAAM,GAAGA,CAAU,GAG7EG,IAAmB,CAACC,GAAcC,MAAyB;AAChE,UAAID,EAAG,WAAWC,EAAG,OAAQ,OAAM,IAAI,MAAM,6BAA6B;AAC1E,YAAMC,IAAUC,EAAG,SAASH,CAAE,GAAGI,IAAUD,EAAG,SAASF,CAAE,GACnDI,IAAaF,EAAG,IAAID,GAASE,CAAO,GACpCE,IAAaH,EAAG,KAAKD,CAAO,GAC5BK,IAAaJ,EAAG,KAAKC,CAAO;AAClC,aAAGE,EAAW,WAAW,CAAC,MAAM,KAAKC,EAAW,WAAW,CAAC,MAAM,IAAU,IACrEF,EAAW,SAAA,EAAW,CAAC,KAAKC,EAAW,WAAW,CAAC,IAAIC,EAAW,SAAA,EAAW,CAAC;AAAA,IACtF,GAEMC,IAAIb,EAAOF,CAAM,GACjBgB,IAAef,EAAY,IAAI,CAAAvD,MAAKwD,EAAOxD,CAAC,CAAC,EAAE,IAAI,CAAAuE,MAAaX,EAAiBS,GAAGE,CAAS,CAAC;AACpG,WAAO,EAAC,KAAKD,EAAa,OAAO,CAACE,GAAKC,MAAMD,IAAMC,GAAG,CAAC,IAAIH,EAAa,QAAQ,KAAK,KAAK,IAAI,GAAGA,CAAY,GAAG,cAAAA,EAAA;AAAA,EACjH;AACD;AC1FO,MAAMI,IAAkB;AAAA,EAC9B,MAAM;AAAA,EACN,aAAa;AAAA,EACb,MAAM,EAAC,SAAS,EAAC,MAAM,UAAU,aAAa,kBAAkB,UAAU,KAAI;AAAA,EAC9E,IAAI,CAACpD,MAA4BqD,IAAIrD,EAAK,OAAO;AAClD,GAEasD,IAAuB;AAAA,EACnC,MAAM;AAAA,EACN,aAAa;AAAA,EACb,MAAM,CAAA;AAAA,EACN,IAAI,aAAY,oBAAI,KAAA,GAAO,YAAA;AAC5B,GAEaC,IAAmB;AAAA,EAC/B,MAAM;AAAA,EACN,aAAa;AAAA,EACb,MAAM;AAAA,IACL,UAAU,EAAC,MAAM,UAAU,aAAa,sBAAsB,MAAM,CAAC,OAAO,QAAQ,QAAQ,GAAG,UAAU,GAAA;AAAA,IACzG,MAAM,EAAC,MAAM,UAAU,aAAa,mBAAmB,UAAU,GAAA;AAAA,EAAI;AAAA,EAEtE,IAAI,OAAOvD,GAAMtC,MAAO;AACvB,QAAI;AACH,cAAOsC,EAAK,MAAA;AAAA,QACX,KAAK;AACJ,iBAAO,MAAMoD,EAAQ,GAAG,EAAC,SAASpD,EAAK,KAAA,GAAOtC,CAAE;AAAA,QACjD,KAAK;AACJ,iBAAO,MAAM8F,EAAO,GAAG,EAAC,MAAMxD,EAAK,KAAA,GAAOtC,CAAE;AAAA,QAC7C,KAAK;AACJ,iBAAO,MAAM+F,EAAW,GAAG,EAAC,MAAMzD,EAAK,KAAA,GAAOtC,CAAE;AAAA,MACjD;AAAA,IAEF,SAAQgC,GAAU;AACjB,aAAO,EAAC,OAAOA,GAAK,WAAWA,EAAI,WAAS;AAAA,IAC7C;AAAA,EACD;AACD,GAEagE,KAAoB;AAAA,EAChC,MAAM;AAAA,EACN,aAAa;AAAA,EACb,MAAM;AAAA,IACL,KAAK,EAAC,MAAM,UAAU,aAAa,gBAAgB,UAAU,GAAA;AAAA,IAC7D,QAAQ,EAAC,MAAM,UAAU,aAAa,sBAAsB,MAAM,CAAC,OAAO,QAAQ,OAAO,QAAQ,GAAG,SAAS,MAAA;AAAA,IAC7G,SAAS,EAAC,MAAM,UAAU,aAAa,wBAAwB,SAAS,GAAC;AAAA,IACzE,MAAM,EAAC,MAAM,UAAU,aAAa,oBAAA;AAAA,EAAmB;AAAA,EAExD,IAAI,CAAC1D,MAKC,IAAI2D,EAAK,EAAC,KAAK3D,EAAK,KAAK,SAASA,EAAK,SAAQ,EAAE,QAAQ,EAAC,QAAQA,EAAK,UAAU,OAAO,MAAMA,EAAK,KAAA,CAAK;AAC/G,GAEawD,IAAiB;AAAA,EAC7B,MAAM;AAAA,EACN,aAAa;AAAA,EACb,MAAM;AAAA,IACL,MAAM,EAAC,MAAM,UAAU,aAAa,uBAAuB,UAAU,GAAA;AAAA,EAAI;AAAA,EAE1E,IAAI,OAAOxD,MAAyB;AACnC,UAAM4D,IAAUC,EAAmB,IAAI,GACjC/E,IAAO,MAAMgF,EAAQ,EAAC,SAAAF,EAAA,GAAU5D,EAAK,MAAM,EAAI,EAAE,MAAM,CAACN,MAAakE,EAAQ,OAAO,MAAM,KAAKlE,CAAG,CAAC;AACzG,WAAO,EAAC,GAAGkE,EAAQ,QAAQ,QAAQ9E,GAAM,QAAQ,QAAW,QAAQ,OAAA;AAAA,EACrE;AACD,GAEa2E,IAAqB;AAAA,EACjC,MAAM;AAAA,EACN,aAAa;AAAA,EACb,MAAM;AAAA,IACL,MAAM,EAAC,MAAM,UAAU,aAAa,uBAAuB,UAAU,GAAA;AAAA,EAAI;AAAA,EAE1E,IAAI,OAAOzD,OAA0B,EAAC,QAAQ+D,eAAmB/D,EAAK,IAAI,IAAA;AAC3E,GAEagE,KAAqB;AAAA,EACjC,MAAM;AAAA,EACN,aAAa;AAAA,EACb,MAAM;AAAA,IACL,OAAO,EAAC,MAAM,UAAU,aAAa,iBAAiB,UAAU,GAAA;AAAA,IAChE,QAAQ,EAAC,MAAM,UAAU,aAAa,+BAA+B,SAAS,EAAA;AAAA,EAAC;AAAA,EAEhF,IAAI,OAAOhE,MAGL;AACL,UAAMiE,IAAO,MAAM,MAAM,uCAAuC,mBAAmBjE,EAAK,KAAK,CAAC,IAAI;AAAA,MACjG,SAAS,EAAC,cAAc,6CAA6C,mBAAmB,iBAAA;AAAA,IAAgB,CACxG,EAAE,KAAK,CAAAlB,MAAQA,EAAK,MAAM;AAC3B,QAAIoF,GAAOC,IAAQ;AACnB,UAAM/E,IAAU,IAAIgF,EAAA;AACpB,YAAOF,IAAQC,EAAM,KAAKF,CAAI,OAAO,QAAM;AAC1C,UAAII,IAAM,iBAAiB,KAAK,mBAAmBH,EAAM,CAAC,CAAC,CAAC,IAAI,CAAC;AAGjE,UAFGG,MAAKA,IAAM,mBAAmBA,CAAG,IACjCA,KAAKjF,EAAQ,IAAIiF,CAAG,GACpBjF,EAAQ,SAASY,EAAK,UAAU,GAAI;AAAA,IACxC;AACA,WAAOZ;AAAA,EACR;AACD;"}
|
|
1
|
+
{"version":3,"file":"index.mjs","sources":["../src/provider.ts","../src/antrhopic.ts","../src/ollama.ts","../src/open-ai.ts","../src/llm.ts","../src/ai.ts","../src/tools.ts"],"sourcesContent":["import {LLMMessage, LLMOptions, LLMRequest} from './llm.ts';\n\nexport type AbortablePromise<T> = Promise<T> & {abort: () => void};\n\nexport abstract class LLMProvider {\n\tabstract ask(message: string, options: LLMRequest): AbortablePromise<LLMMessage[]>;\n}\n","import {Anthropic as anthropic} from '@anthropic-ai/sdk';\nimport {findByProp, objectMap, JSONSanitize, JSONAttemptParse} from '@ztimson/utils';\nimport {Ai} from './ai.ts';\nimport {LLMMessage, LLMRequest} from './llm.ts';\nimport {AbortablePromise, LLMProvider} from './provider.ts';\n\nexport class Anthropic extends LLMProvider {\n\tclient!: anthropic;\n\n\tconstructor(public readonly ai: Ai, public readonly apiToken: string, public model: string) {\n\t\tsuper();\n\t\tthis.client = new anthropic({apiKey: apiToken});\n\t}\n\n\tprivate toStandard(history: any[]): LLMMessage[] {\n\t\tconst merged: any[] = [];\n\t\tfor(let i = 0; i < history.length; i++) {\n\t\t\tconst msg = history[i];\n\t\t\tif(typeof msg.content != 'string') {\n\t\t\t\tif(msg.role == 'assistant') {\n\t\t\t\t\tmsg.content.filter((c: any) => c.type == 'tool_use').forEach((c: any) => {\n\t\t\t\t\t\tmerged.push({role: 'tool', id: c.id, name: c.name, args: c.input});\n\t\t\t\t\t});\n\t\t\t\t} else if(msg.role == 'user') {\n\t\t\t\t\tmsg.content.filter((c: any) => c.type == 'tool_result').forEach((c: any) => {\n\t\t\t\t\t\tconst h = merged.find((h: any) => h.id == c.tool_use_id);\n\t\t\t\t\t\tif(h) h[c.is_error ? 'error' : 'content'] = c.content;\n\t\t\t\t\t});\n\t\t\t\t}\n\t\t\t\tmsg.content = msg.content.filter((c: any) => c.type == 'text').map((c: any) => c.text).join('\\n\\n');\n\t\t\t}\n\t\t\tif(msg.content) {\n\t\t\t\tconst last = merged.at(-1);\n\t\t\t\tif(last && last.role == 'assistant' && msg.role == 'assistant') last.content += '\\n\\n' + msg.content;\n\t\t\t\telse merged.push({role: msg.role, content: msg.content});\n\t\t\t}\n\t\t}\n\t\treturn merged;\n\t}\n\n\tprivate fromStandard(history: LLMMessage[]): any[] {\n\t\tfor(let i = 0; i < history.length; i++) {\n\t\t\tif(history[i].role == 'tool') {\n\t\t\t\tconst h: any = history[i];\n\t\t\t\thistory.splice(i, 1,\n\t\t\t\t\t{role: 'assistant', content: [{type: 'tool_use', id: h.id, name: h.name, input: h.args}]},\n\t\t\t\t\t{role: 'user', content: [{type: 'tool_result', tool_use_id: h.id, is_error: !!h.error, content: h.error || h.content}]}\n\t\t\t\t)\n\t\t\t\ti++;\n\t\t\t}\n\t\t}\n\t\treturn history;\n\t}\n\n\task(message: string, options: LLMRequest = {}): AbortablePromise<LLMMessage[]> {\n\t\tconst controller = new AbortController();\n\t\tconst response = new Promise<any>(async (res, rej) => {\n\t\t\tlet history = this.fromStandard([...options.history || [], {role: 'user', content: message}]);\n\t\t\tif(options.compress) history = await this.ai.llm.compress(<any>history, options.compress.max, options.compress.min, options);\n\t\t\tconst requestParams: any = {\n\t\t\t\tmodel: options.model || this.model,\n\t\t\t\tmax_tokens: options.max_tokens || this.ai.options.max_tokens || 4096,\n\t\t\t\tsystem: options.system || this.ai.options.system || '',\n\t\t\t\ttemperature: options.temperature || this.ai.options.temperature || 0.7,\n\t\t\t\ttools: (options.tools || this.ai.options.tools || []).map(t => ({\n\t\t\t\t\tname: t.name,\n\t\t\t\t\tdescription: t.description,\n\t\t\t\t\tinput_schema: {\n\t\t\t\t\t\ttype: 'object',\n\t\t\t\t\t\tproperties: t.args ? objectMap(t.args, (key, value) => ({...value, required: undefined})) : {},\n\t\t\t\t\t\trequired: t.args ? Object.entries(t.args).filter(t => t[1].required).map(t => t[0]) : []\n\t\t\t\t\t},\n\t\t\t\t\tfn: undefined\n\t\t\t\t})),\n\t\t\t\tmessages: history,\n\t\t\t\tstream: !!options.stream,\n\t\t\t};\n\n\t\t\tlet resp: any;\n\t\t\tlet isFirstMessage = true;\n\t\t\tdo {\n\t\t\t\tresp = await this.client.messages.create(requestParams);\n\n\t\t\t\tif(options.stream) {\n\t\t\t\t\tif(!isFirstMessage) options.stream({text: '\\n\\n'});\n\t\t\t\t\tisFirstMessage = false;\n\n\t\t\t\t\tresp.content = [];\n\t\t\t\t\tfor await (const chunk of resp) {\n\t\t\t\t\t\tif(controller.signal.aborted) break;\n\t\t\t\t\t\tif(chunk.type === 'content_block_start') {\n\t\t\t\t\t\t\tif(chunk.content_block.type === 'text') {\n\t\t\t\t\t\t\t\tresp.content.push({type: 'text', text: ''});\n\t\t\t\t\t\t\t} else if(chunk.content_block.type === 'tool_use') {\n\t\t\t\t\t\t\t\tresp.content.push({type: 'tool_use', id: chunk.content_block.id, name: chunk.content_block.name, input: <any>''});\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t} else if(chunk.type === 'content_block_delta') {\n\t\t\t\t\t\t\tif(chunk.delta.type === 'text_delta') {\n\t\t\t\t\t\t\t\tconst text = chunk.delta.text;\n\t\t\t\t\t\t\t\tresp.content.at(-1).text += text;\n\t\t\t\t\t\t\t\toptions.stream({text});\n\t\t\t\t\t\t\t} else if(chunk.delta.type === 'input_json_delta') {\n\t\t\t\t\t\t\t\tresp.content.at(-1).input += chunk.delta.partial_json;\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t} else if(chunk.type === 'content_block_stop') {\n\t\t\t\t\t\t\tconst last = resp.content.at(-1);\n\t\t\t\t\t\t\tif(last.input != null) last.input = last.input ? JSONAttemptParse(last.input, {}) : {};\n\t\t\t\t\t\t} else if(chunk.type === 'message_stop') {\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\tconst toolCalls = resp.content.filter((c: any) => c.type === 'tool_use');\n\t\t\t\tif(toolCalls.length && !controller.signal.aborted) {\n\t\t\t\t\thistory.push({role: 'assistant', content: resp.content});\n\t\t\t\t\tconst results = await Promise.all(toolCalls.map(async (toolCall: any) => {\n\t\t\t\t\t\tconst tool = options.tools?.find(findByProp('name', toolCall.name));\n\t\t\t\t\t\tif(!tool) return {tool_use_id: toolCall.id, is_error: true, content: 'Tool not found'};\n\t\t\t\t\t\ttry {\n\t\t\t\t\t\t\tconst result = await tool.fn(toolCall.input, this.ai);\n\t\t\t\t\t\t\treturn {type: 'tool_result', tool_use_id: toolCall.id, content: JSONSanitize(result)};\n\t\t\t\t\t\t} catch (err: any) {\n\t\t\t\t\t\t\treturn {type: 'tool_result', tool_use_id: toolCall.id, is_error: true, content: err?.message || err?.toString() || 'Unknown'};\n\t\t\t\t\t\t}\n\t\t\t\t\t}));\n\t\t\t\t\thistory.push({role: 'user', content: results});\n\t\t\t\t\trequestParams.messages = history;\n\t\t\t\t}\n\t\t\t} while (!controller.signal.aborted && resp.content.some((c: any) => c.type === 'tool_use'));\n\n\t\t\tif(options.stream) options.stream({done: true});\n\t\t\tres(this.toStandard([...history, {\n\t\t\t\trole: 'assistant',\n\t\t\t\tcontent: resp.content.filter((c: any) => c.type == 'text').map((c: any) => c.text).join('\\n\\n')\n\t\t\t}]));\n\t\t});\n\n\t\treturn Object.assign(response, {abort: () => controller.abort()});\n\t}\n}\n","import {findByProp, objectMap, JSONSanitize, JSONAttemptParse} from '@ztimson/utils';\nimport {Ai} from './ai.ts';\nimport {LLMMessage, LLMRequest} from './llm.ts';\nimport {AbortablePromise, LLMProvider} from './provider.ts';\nimport {Ollama as ollama} from 'ollama';\n\nexport class Ollama extends LLMProvider {\n\tclient!: ollama;\n\n\tconstructor(public readonly ai: Ai, public host: string, public model: string) {\n\t\tsuper();\n\t\tthis.client = new ollama({host});\n\t}\n\n\tprivate toStandard(history: any[]): LLMMessage[] {\n\t\tfor(let i = 0; i < history.length; i++) {\n\t\t\tif(history[i].role == 'assistant' && history[i].tool_calls) {\n\t\t\t\tif(history[i].content) delete history[i].tool_calls;\n\t\t\t\telse {\n\t\t\t\t\thistory.splice(i, 1);\n\t\t\t\t\ti--;\n\t\t\t\t}\n\t\t\t} else if(history[i].role == 'tool') {\n\t\t\t\tconst error = history[i].content.startsWith('{\"error\":');\n\t\t\t\thistory[i] = {role: 'tool', name: history[i].tool_name, args: history[i].args, [error ? 'error' : 'content']: history[i].content};\n\t\t\t}\n\t\t}\n\t\treturn history;\n\t}\n\n\tprivate fromStandard(history: LLMMessage[]): any[] {\n\t\treturn history.map((h: any) => {\n\t\t\tif(h.role != 'tool') return h;\n\t\t\treturn {role: 'tool', tool_name: h.name, content: h.error || h.content}\n\t\t});\n\t}\n\n\task(message: string, options: LLMRequest = {}): AbortablePromise<LLMMessage[]> {\n\t\tconst controller = new AbortController();\n\t\tconst response = new Promise<any>(async (res, rej) => {\n\t\t\tlet system = options.system || this.ai.options.system;\n\t\t\tlet history = this.fromStandard([...options.history || [], {role: 'user', content: message}]);\n\t\t\tif(history[0].roll == 'system') {\n\t\t\t\tif(!system) system = history.shift();\n\t\t\t\telse history.shift();\n\t\t\t}\n\t\t\tif(options.compress) history = await this.ai.llm.compress(<any>history, options.compress.max, options.compress.min);\n\t\t\tif(options.system) history.unshift({role: 'system', content: system})\n\n\t\t\tconst requestParams: any = {\n\t\t\t\tmodel: options.model || this.model,\n\t\t\t\tmessages: history,\n\t\t\t\tstream: !!options.stream,\n\t\t\t\tsignal: controller.signal,\n\t\t\t\toptions: {\n\t\t\t\t\ttemperature: options.temperature || this.ai.options.temperature || 0.7,\n\t\t\t\t\tnum_predict: options.max_tokens || this.ai.options.max_tokens || 4096,\n\t\t\t\t},\n\t\t\t\ttools: (options.tools || this.ai.options.tools || []).map(t => ({\n\t\t\t\t\ttype: 'function',\n\t\t\t\t\tfunction: {\n\t\t\t\t\t\tname: t.name,\n\t\t\t\t\t\tdescription: t.description,\n\t\t\t\t\t\tparameters: {\n\t\t\t\t\t\t\ttype: 'object',\n\t\t\t\t\t\t\tproperties: t.args ? objectMap(t.args, (key, value) => ({...value, required: undefined})) : {},\n\t\t\t\t\t\t\trequired: t.args ? Object.entries(t.args).filter(t => t[1].required).map(t => t[0]) : []\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}))\n\t\t\t}\n\n\t\t\t// Run tool chains\n\t\t\tlet resp: any;\n\t\t\tdo {\n\t\t\t\tresp = await this.client.chat(requestParams);\n\t\t\t\tif(options.stream) {\n\t\t\t\t\tresp.message = {role: 'assistant', content: '', tool_calls: []};\n\t\t\t\t\tfor await (const chunk of resp) {\n\t\t\t\t\t\tif(controller.signal.aborted) break;\n\t\t\t\t\t\tif(chunk.message?.content) {\n\t\t\t\t\t\t\tresp.message.content += chunk.message.content;\n\t\t\t\t\t\t\toptions.stream({text: chunk.message.content});\n\t\t\t\t\t\t}\n\t\t\t\t\t\tif(chunk.message?.tool_calls) resp.message.tool_calls = chunk.message.tool_calls;\n\t\t\t\t\t\tif(chunk.done) break;\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\t// Run tools\n\t\t\t\tif(resp.message?.tool_calls?.length && !controller.signal.aborted) {\n\t\t\t\t\thistory.push(resp.message);\n\t\t\t\t\tconst results = await Promise.all(resp.message.tool_calls.map(async (toolCall: any) => {\n\t\t\t\t\t\tconst tool = (options.tools || this.ai.options.tools)?.find(findByProp('name', toolCall.function.name));\n\t\t\t\t\t\tif(!tool) return {role: 'tool', tool_name: toolCall.function.name, content: '{\"error\": \"Tool not found\"}'};\n\t\t\t\t\t\tconst args = typeof toolCall.function.arguments === 'string' ? JSONAttemptParse(toolCall.function.arguments, {}) : toolCall.function.arguments;\n\t\t\t\t\t\ttry {\n\t\t\t\t\t\t\tconst result = await tool.fn(args, this.ai);\n\t\t\t\t\t\t\treturn {role: 'tool', tool_name: toolCall.function.name, args, content: JSONSanitize(result)};\n\t\t\t\t\t\t} catch (err: any) {\n\t\t\t\t\t\t\treturn {role: 'tool', tool_name: toolCall.function.name, args, content: JSONSanitize({error: err?.message || err?.toString() || 'Unknown'})};\n\t\t\t\t\t\t}\n\t\t\t\t\t}));\n\t\t\t\t\thistory.push(...results);\n\t\t\t\t\trequestParams.messages = history;\n\t\t\t\t}\n\t\t\t} while (!controller.signal.aborted && resp.message?.tool_calls?.length);\n\t\t\tif(options.stream) options.stream({done: true});\n\t\t\tres(this.toStandard([...history, {role: 'assistant', content: resp.message?.content}]));\n\t\t});\n\t\treturn Object.assign(response, {abort: () => controller.abort()});\n\t}\n}\n","import {OpenAI as openAI} from 'openai';\nimport {findByProp, objectMap, JSONSanitize, JSONAttemptParse} from '@ztimson/utils';\nimport {Ai} from './ai.ts';\nimport {LLMMessage, LLMRequest} from './llm.ts';\nimport {AbortablePromise, LLMProvider} from './provider.ts';\n\nexport class OpenAi extends LLMProvider {\n\tclient!: openAI;\n\n\tconstructor(public readonly ai: Ai, public readonly apiToken: string, public model: string) {\n\t\tsuper();\n\t\tthis.client = new openAI({apiKey: apiToken});\n\t}\n\n\tprivate toStandard(history: any[]): LLMMessage[] {\n\t\tfor(let i = 0; i < history.length; i++) {\n\t\t\tconst h = history[i];\n\t\t\tif(h.role === 'assistant' && h.tool_calls) {\n\t\t\t\tconst tools = h.tool_calls.map((tc: any) => ({\n\t\t\t\t\trole: 'tool',\n\t\t\t\t\tid: tc.id,\n\t\t\t\t\tname: tc.function.name,\n\t\t\t\t\targs: JSONAttemptParse(tc.function.arguments, {})\n\t\t\t\t}));\n\t\t\t\thistory.splice(i, 1, ...tools);\n\t\t\t\ti += tools.length - 1;\n\t\t\t} else if(h.role === 'tool' && h.content) {\n\t\t\t\tconst record = history.find(h2 => h.tool_call_id == h2.id);\n\t\t\t\tif(record) {\n\t\t\t\t\tif(h.content.includes('\"error\":')) record.error = h.content;\n\t\t\t\t\telse record.content = h.content;\n\t\t\t\t}\n\t\t\t\thistory.splice(i, 1);\n\t\t\t\ti--;\n\t\t\t}\n\n\t\t}\n\t\treturn history;\n\t}\n\n\tprivate fromStandard(history: LLMMessage[]): any[] {\n\t\treturn history.reduce((result, h) => {\n\t\t\tif(h.role === 'tool') {\n\t\t\t\tresult.push({\n\t\t\t\t\trole: 'assistant',\n\t\t\t\t\tcontent: null,\n\t\t\t\t\ttool_calls: [{ id: h.id, type: 'function', function: { name: h.name, arguments: JSON.stringify(h.args) } }],\n\t\t\t\t\trefusal: null,\n\t\t\t\t\tannotations: [],\n\t\t\t\t}, {\n\t\t\t\t\trole: 'tool',\n\t\t\t\t\ttool_call_id: h.id,\n\t\t\t\t\tcontent: h.error || h.content\n\t\t\t\t});\n\t\t\t} else {\n\t\t\t\tresult.push(h);\n\t\t\t}\n\t\t\treturn result;\n\t\t}, [] as any[]);\n\t}\n\n\task(message: string, options: LLMRequest = {}): AbortablePromise<LLMMessage[]> {\n\t\tconst controller = new AbortController();\n\t\tconst response = new Promise<any>(async (res, rej) => {\n\t\t\tlet history = this.fromStandard([...options.history || [], {role: 'user', content: message}]);\n\t\t\tif(options.compress) history = await this.ai.llm.compress(<any>history, options.compress.max, options.compress.min, options);\n\n\t\t\tconst requestParams: any = {\n\t\t\t\tmodel: options.model || this.model,\n\t\t\t\tmessages: history,\n\t\t\t\tstream: !!options.stream,\n\t\t\t\tmax_tokens: options.max_tokens || this.ai.options.max_tokens || 4096,\n\t\t\t\ttemperature: options.temperature || this.ai.options.temperature || 0.7,\n\t\t\t\ttools: (options.tools || this.ai.options.tools || []).map(t => ({\n\t\t\t\t\ttype: 'function',\n\t\t\t\t\tfunction: {\n\t\t\t\t\t\tname: t.name,\n\t\t\t\t\t\tdescription: t.description,\n\t\t\t\t\t\tparameters: {\n\t\t\t\t\t\t\ttype: 'object',\n\t\t\t\t\t\t\tproperties: t.args ? objectMap(t.args, (key, value) => ({...value, required: undefined})) : {},\n\t\t\t\t\t\t\trequired: t.args ? Object.entries(t.args).filter(t => t[1].required).map(t => t[0]) : []\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}))\n\t\t\t};\n\n\t\t\t// Tool call and streaming logic similar to other providers\n\t\t\tlet resp: any;\n\t\t\tdo {\n\t\t\t\tresp = await this.client.chat.completions.create(requestParams);\n\n\t\t\t\t// Implement streaming and tool call handling\n\t\t\t\tif(options.stream) {\n\t\t\t\t\tresp.choices = [];\n\t\t\t\t\tfor await (const chunk of resp) {\n\t\t\t\t\t\tif(controller.signal.aborted) break;\n\t\t\t\t\t\tif(chunk.choices[0].delta.content) {\n\t\t\t\t\t\t\toptions.stream({text: chunk.choices[0].delta.content});\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\t// Run tools\n\t\t\t\tconst toolCalls = resp.choices[0].message.tool_calls || [];\n\t\t\t\tif(toolCalls.length && !controller.signal.aborted) {\n\t\t\t\t\thistory.push(resp.choices[0].message);\n\t\t\t\t\tconst results = await Promise.all(toolCalls.map(async (toolCall: any) => {\n\t\t\t\t\t\tconst tool = options.tools?.find(findByProp('name', toolCall.function.name));\n\t\t\t\t\t\tif(!tool) return {role: 'tool', tool_call_id: toolCall.id, content: '{\"error\": \"Tool not found\"}'};\n\t\t\t\t\t\ttry {\n\t\t\t\t\t\t\tconst args = JSONAttemptParse(toolCall.function.arguments, {});\n\t\t\t\t\t\t\tconst result = await tool.fn(args, this.ai);\n\t\t\t\t\t\t\treturn {role: 'tool', tool_call_id: toolCall.id, content: JSONSanitize(result)};\n\t\t\t\t\t\t} catch (err: any) {\n\t\t\t\t\t\t\treturn {role: 'tool', tool_call_id: toolCall.id, content: JSONSanitize({error: err?.message || err?.toString() || 'Unknown'})};\n\t\t\t\t\t\t}\n\t\t\t\t\t}));\n\t\t\t\t\thistory.push(...results);\n\t\t\t\t\trequestParams.messages = history;\n\t\t\t\t}\n\t\t\t} while (!controller.signal.aborted && resp.choices?.[0]?.message?.tool_calls?.length);\n\n\t\t\tif(options.stream) options.stream({done: true});\n\t\t\tres(this.toStandard([...history, {role: 'assistant', content: resp.choices[0].message.content || ''}]));\n\t\t});\n\n\t\treturn Object.assign(response, {abort: () => controller.abort()});\n\t}\n}\n","import {JSONAttemptParse} from '@ztimson/utils';\nimport {Ai} from './ai.ts';\nimport {Anthropic} from './antrhopic.ts';\nimport {Ollama} from './ollama.ts';\nimport {OpenAi} from './open-ai.ts';\nimport {AbortablePromise, LLMProvider} from './provider.ts';\nimport {AiTool} from './tools.ts';\n\nexport type LLMMessage = {\n\t/** Message originator */\n\trole: 'assistant' | 'system' | 'user';\n\t/** Message content */\n\tcontent: string | any;\n} | {\n\t/** Tool call */\n\trole: 'tool';\n\t/** Unique ID for call */\n\tid: string;\n\t/** Tool that was run */\n\tname: string;\n\t/** Tool arguments */\n\targs: any;\n\t/** Tool result */\n\tcontent: undefined | string;\n\t/** Tool error */\n\terror: undefined | string;\n}\n\nexport type LLMOptions = {\n\t/** Anthropic settings */\n\tanthropic?: {\n\t\t/** API Token */\n\t\ttoken: string;\n\t\t/** Default model */\n\t\tmodel: string;\n\t},\n\t/** Ollama settings */\n\tollama?: {\n\t\t/** connection URL */\n\t\thost: string;\n\t\t/** Default model */\n\t\tmodel: string;\n\t},\n\t/** Open AI settings */\n\topenAi?: {\n\t\t/** API Token */\n\t\ttoken: string;\n\t\t/** Default model */\n\t\tmodel: string;\n\t},\n\t/** Default provider & model */\n\tmodel: string | [string, string];\n} & Omit<LLMRequest, 'model'>;\n\nexport type LLMRequest = {\n\t/** System prompt */\n\tsystem?: string;\n\t/** Message history */\n\thistory?: LLMMessage[];\n\t/** Max tokens for request */\n\tmax_tokens?: number;\n\t/** 0 = Rigid Logic, 1 = Balanced, 2 = Hyper Creative **/\n\ttemperature?: number;\n\t/** Available tools */\n\ttools?: AiTool[];\n\t/** LLM model */\n\tmodel?: string | [string, string];\n\t/** Stream response */\n\tstream?: (chunk: {text?: string, done?: true}) => any;\n\t/** Compress old messages in the chat to free up context */\n\tcompress?: {\n\t\t/** Trigger chat compression once context exceeds the token count */\n\t\tmax: number;\n\t\t/** Compress chat until context size smaller than */\n\t\tmin: number\n\t}\n}\n\nexport class LLM {\n\tprivate providers: {[key: string]: LLMProvider} = {};\n\n\tconstructor(public readonly ai: Ai, public readonly options: LLMOptions) {\n\t\tif(options.anthropic?.token) this.providers.anthropic = new Anthropic(this.ai, options.anthropic.token, options.anthropic.model);\n\t\tif(options.ollama?.host) this.providers.ollama = new Ollama(this.ai, options.ollama.host, options.ollama.model);\n\t\tif(options.openAi?.token) this.providers.openAi = new OpenAi(this.ai, options.openAi.token, options.openAi.model);\n\t}\n\n\t/**\n\t * Chat with LLM\n\t * @param {string} message Question\n\t * @param {LLMRequest} options Configuration options and chat history\n\t * @returns {{abort: () => void, response: Promise<LLMMessage[]>}} Function to abort response and chat history\n\t */\n\task(message: string, options: LLMRequest = {}): AbortablePromise<LLMMessage[]> {\n\t\tlet model: any = [null, null];\n\t\tif(options.model) {\n\t\t\tif(typeof options.model == 'object') model = options.model;\n\t\t\telse model = [options.model, (<any>this.options)[options.model]?.model];\n\t\t}\n\t\tif(!options.model || model[1] == null) {\n\t\t\tif(typeof this.options.model == 'object') model = this.options.model;\n\t\t\telse model = [this.options.model, (<any>this.options)[this.options.model]?.model];\n\t\t}\n\t\tif(!model[0] || !model[1]) throw new Error(`Unknown LLM provider or model: ${model[0]} / ${model[1]}`);\n\t\treturn this.providers[model[0]].ask(message, {...options, model: model[1]});\n\t}\n\n\t/**\n\t * Compress chat history to reduce context size\n\t * @param {LLMMessage[]} history Chatlog that will be compressed\n\t * @param max Trigger compression once context is larger than max\n\t * @param min Summarize until context size is less than min\n\t * @param {LLMRequest} options LLM options\n\t * @returns {Promise<LLMMessage[]>} New chat history will summary at index 0\n\t */\n\tasync compress(history: LLMMessage[], max: number, min: number, options?: LLMRequest): Promise<LLMMessage[]> {\n\t\tif(this.estimateTokens(history) < max) return history;\n\t\tlet keep = 0, tokens = 0;\n\t\tfor(let m of history.toReversed()) {\n\t\t\ttokens += this.estimateTokens(m.content);\n\t\t\tif(tokens < min) keep++;\n\t\t\telse break;\n\t\t}\n\t\tif(history.length <= keep) return history;\n\t\tconst recent = keep == 0 ? [] : history.slice(-keep),\n\t\t\tprocess = (keep == 0 ? history : history.slice(0, -keep)).filter(h => h.role === 'assistant' || h.role === 'user');\n\t\tconst summary = await this.summarize(process.map(m => `${m.role}: ${m.content}`).join('\\n\\n'), 250, options);\n\t\treturn [{role: 'assistant', content: `Conversation Summary: ${summary}`}, ...recent];\n\t}\n\n\t/**\n\t * Estimate variable as tokens\n\t * @param history Object to size\n\t * @returns {number} Rough token count\n\t */\n\testimateTokens(history: any): number {\n\t\tconst text = JSON.stringify(history);\n\t\treturn Math.ceil((text.length / 4) * 1.2);\n\t}\n\n\t/**\n\t * Ask a question with JSON response\n\t * @param {string} message Question\n\t * @param {LLMRequest} options Configuration options and chat history\n\t * @returns {Promise<{} | {} | RegExpExecArray | null>}\n\t */\n\tasync json(message: string, options?: LLMRequest) {\n\t\tlet resp = await this.ask(message, {\n\t\t\tsystem: 'Respond using a JSON blob',\n\t\t\t...options\n\t\t});\n\t\tif(!resp?.[0]?.content) return {};\n\t\treturn JSONAttemptParse(new RegExp('\\{[\\s\\S]*\\}').exec(resp[0].content), {});\n\t}\n\n\t/**\n\t * Create a summary of some text\n\t * @param {string} text Text to summarize\n\t * @param {number} tokens Max number of tokens\n\t * @param options LLM request options\n\t * @returns {Promise<string>} Summary\n\t */\n\tsummarize(text: string, tokens: number, options?: LLMRequest): Promise<string | null> {\n\t\treturn this.ask(text, {system: `Generate a brief summary <= ${tokens} tokens. Output nothing else`, temperature: 0.3, ...options})\n\t\t\t.then(history => <string>history.pop()?.content || null);\n\t}\n}\n","import {createWorker} from 'tesseract.js';\nimport {LLM, LLMOptions} from './llm';\nimport fs from 'node:fs/promises';\nimport Path from 'node:path';\nimport * as tf from '@tensorflow/tfjs';\nimport {spawn} from 'node:child_process';\n\nexport type AiOptions = LLMOptions & {\n\twhisper?: {\n\t\t/** Whisper binary location */\n\t\tbinary: string;\n\t\t/** Model: `ggml-base.en.bin` */\n\t\tmodel: string;\n\t\t/** Path to models */\n\t\tpath: string;\n\t}\n}\n\nexport class Ai {\n\tprivate downloads: {[key: string]: Promise<string>} = {};\n\tprivate whisperModel!: string;\n\n\t/** Large Language Models */\n\tllm!: LLM;\n\n\tconstructor(public readonly options: AiOptions) {\n\t\tthis.llm = new LLM(this, options);\n\t\tif(this.options.whisper?.binary) {\n\t\t\tthis.whisperModel = this.options.whisper?.model.endsWith('.bin') ? this.options.whisper?.model : this.options.whisper?.model + '.bin';\n\t\t\tthis.downloadAsrModel();\n\t\t}\n\t}\n\n\t/**\n\t * Convert audio to text using Auditory Speech Recognition\n\t * @param {string} path Path to audio\n\t * @param model Whisper model\n\t * @returns {Promise<any>} Extracted text\n\t */\n\tasr(path: string, model: string = this.whisperModel): {abort: () => void, response: Promise<string | null>} {\n\t\tif(!this.options.whisper?.binary) throw new Error('Whisper not configured');\n\t\tlet abort: any = () => {};\n\t\tconst response = new Promise<string | null>((resolve, reject) => {\n\t\t\tthis.downloadAsrModel(model).then(m => {\n\t\t\t\tlet output = '';\n\t\t\t\tconst proc = spawn(<string>this.options.whisper?.binary, ['-nt', '-np', '-m', m, '-f', path], {stdio: ['ignore', 'pipe', 'ignore']});\n\t\t\t\tabort = () => proc.kill('SIGTERM');\n\t\t\t\tproc.on('error', (err: Error) => reject(err));\n\t\t\t\tproc.stdout.on('data', (data: Buffer) => output += data.toString());\n\t\t\t\tproc.on('close', (code: number) => {\n\t\t\t\t\tif(code === 0) resolve(output.trim() || null);\n\t\t\t\t\telse reject(new Error(`Exit code ${code}`));\n\t\t\t\t});\n\t\t\t});\n\t\t});\n\t\treturn {response, abort};\n\t}\n\n\t/**\n\t * Downloads the specified Whisper model if it is not already present locally.\n\t *\n\t * @param {string} model Whisper model that will be downloaded\n\t * @return {Promise<string>} Absolute path to model file, resolves once downloaded\n\t */\n\tasync downloadAsrModel(model: string = this.whisperModel): Promise<string> {\n\t\tif(!this.options.whisper?.binary) throw new Error('Whisper not configured');\n\t\tif(!model.endsWith('.bin')) model += '.bin';\n\t\tconst p = Path.join(this.options.whisper.path, model);\n\t\tif(await fs.stat(p).then(() => true).catch(() => false)) return p;\n\t\tif(!!this.downloads[model]) return this.downloads[model];\n\t\tthis.downloads[model] = fetch(`https://huggingface.co/ggerganov/whisper.cpp/resolve/main/${model}`)\n\t\t\t.then(resp => resp.arrayBuffer())\n\t\t\t.then(arr => Buffer.from(arr)).then(async buffer => {\n\t\t\t\tawait fs.writeFile(p, buffer);\n\t\t\t\tdelete this.downloads[model];\n\t\t\t\treturn p;\n\t\t\t});\n\t\treturn this.downloads[model];\n\t}\n\n\t/**\n\t * Convert image to text using Optical Character Recognition\n\t * @param {string} path Path to image\n\t * @returns {{abort: Function, response: Promise<string | null>}} Abort function & Promise of extracted text\n\t */\n\tocr(path: string): {abort: () => void, response: Promise<string | null>} {\n\t\tlet worker: any;\n\t\treturn {\n\t\t\tabort: () => { worker?.terminate(); },\n\t\t\tresponse: new Promise(async res => {\n\t\t\t\tworker = await createWorker('eng');\n\t\t\t\tconst {data} = await worker.recognize(path);\n\t\t\t\tawait worker.terminate();\n\t\t\t\tres(data.text.trim() || null);\n\t\t\t})\n\t\t}\n\t}\n\n\t/**\n\t * Compare the difference between two strings using tensor math\n\t * @param target Text that will checked\n\t * @param {string} searchTerms Multiple search terms to check against target\n\t * @returns {{avg: number, max: number, similarities: number[]}} Similarity values 0-1: 0 = unique, 1 = identical\n\t */\n\tsemanticSimilarity(target: string, ...searchTerms: string[]) {\n\t\tif(searchTerms.length < 2) throw new Error('Requires at least 2 strings to compare');\n\n\t\tconst vector = (text: string, dimensions: number = 10): number[] => {\n\t\t\treturn text.toLowerCase().split('').map((char, index) =>\n\t\t\t\t(char.charCodeAt(0) * (index + 1)) % dimensions / dimensions).slice(0, dimensions);\n\t\t}\n\n\t\tconst cosineSimilarity = (v1: number[], v2: number[]): number => {\n\t\t\tif (v1.length !== v2.length) throw new Error('Vectors must be same length');\n\t\t\tconst tensor1 = tf.tensor1d(v1), tensor2 = tf.tensor1d(v2)\n\t\t\tconst dotProduct = tf.dot(tensor1, tensor2)\n\t\t\tconst magnitude1 = tf.norm(tensor1)\n\t\t\tconst magnitude2 = tf.norm(tensor2)\n\t\t\tif(magnitude1.dataSync()[0] === 0 || magnitude2.dataSync()[0] === 0) return 0\n\t\t\treturn dotProduct.dataSync()[0] / (magnitude1.dataSync()[0] * magnitude2.dataSync()[0])\n\t\t}\n\n\t\tconst v = vector(target);\n\t\tconst similarities = searchTerms.map(t => vector(t)).map(refVector => cosineSimilarity(v, refVector))\n\t\treturn {avg: similarities.reduce((acc, s) => acc + s, 0) / similarities.length, max: Math.max(...similarities), similarities}\n\t}\n}\n","import {$, $Sync} from '@ztimson/node-utils';\nimport {ASet, consoleInterceptor, Http, fn as Fn} from '@ztimson/utils';\nimport {Ai} from './ai.ts';\n\nexport type AiToolArg = {[key: string]: {\n\t/** Argument type */\n\ttype: 'array' | 'boolean' | 'number' | 'object' | 'string',\n\t/** Argument description */\n\tdescription: string,\n\t/** Required argument */\n\trequired?: boolean;\n\t/** Default value */\n\tdefault?: any,\n\t/** Options */\n\tenum?: string[],\n\t/** Minimum value or length */\n\tmin?: number,\n\t/** Maximum value or length */\n\tmax?: number,\n\t/** Match pattern */\n\tpattern?: string,\n\t/** Child arguments */\n\titems?: {[key: string]: AiToolArg}\n}}\n\nexport type AiTool = {\n\t/** Tool ID / Name - Must be snail_case */\n\tname: string,\n\t/** Tool description / prompt */\n\tdescription: string,\n\t/** Tool arguments */\n\targs?: AiToolArg,\n\t/** Callback function */\n\tfn: (args: any, ai: Ai) => any | Promise<any>,\n};\n\nexport const CliTool: AiTool = {\n\tname: 'cli',\n\tdescription: 'Use the command line interface, returns any output',\n\targs: {command: {type: 'string', description: 'Command to run', required: true}},\n\tfn: (args: {command: string}) => $`${args.command}`\n}\n\nexport const DateTimeTool: AiTool = {\n\tname: 'get_datetime',\n\tdescription: 'Get current date and time',\n\targs: {},\n\tfn: async () => new Date().toISOString()\n}\n\nexport const ExecTool: AiTool = {\n\tname: 'exec',\n\tdescription: 'Run code/scripts',\n\targs: {\n\t\tlanguage: {type: 'string', description: 'Execution language', enum: ['cli', 'node', 'python'], required: true},\n\t\tcode: {type: 'string', description: 'Code to execute', required: true}\n\t},\n\tfn: async (args, ai) => {\n\t\ttry {\n\t\t\tswitch(args.type) {\n\t\t\t\tcase 'bash':\n\t\t\t\t\treturn await CliTool.fn({command: args.code}, ai);\n\t\t\t\tcase 'node':\n\t\t\t\t\treturn await JSTool.fn({code: args.code}, ai);\n\t\t\t\tcase 'python': {\n\t\t\t\t\treturn await PythonTool.fn({code: args.code}, ai);\n\t\t\t\t}\n\t\t\t}\n\t\t} catch(err: any) {\n\t\t\treturn {error: err?.message || err.toString()};\n\t\t}\n\t}\n}\n\nexport const FetchTool: AiTool = {\n\tname: 'fetch',\n\tdescription: 'Make HTTP request to URL',\n\targs: {\n\t\turl: {type: 'string', description: 'URL to fetch', required: true},\n\t\tmethod: {type: 'string', description: 'HTTP method to use', enum: ['GET', 'POST', 'PUT', 'DELETE'], default: 'GET'},\n\t\theaders: {type: 'object', description: 'HTTP headers to send', default: {}},\n\t\tbody: {type: 'object', description: 'HTTP body to send'},\n\t},\n\tfn: (args: {\n\t\turl: string;\n\t\tmethod: 'GET' | 'POST' | 'PUT' | 'DELETE';\n\t\theaders: {[key: string]: string};\n\t\tbody: any;\n\t}) => new Http({url: args.url, headers: args.headers}).request({method: args.method || 'GET', body: args.body})\n}\n\nexport const JSTool: AiTool = {\n\tname: 'exec_javascript',\n\tdescription: 'Execute commonjs javascript',\n\targs: {\n\t\tcode: {type: 'string', description: 'CommonJS javascript', required: true}\n\t},\n\tfn: async (args: {code: string}) => {\n\t\tconst console = consoleInterceptor(null);\n\t\tconst resp = await Fn<any>({console}, args.code, true).catch((err: any) => console.output.error.push(err));\n\t\treturn {...console.output, return: resp, stdout: undefined, stderr: undefined};\n\t}\n}\n\nexport const PythonTool: AiTool = {\n\tname: 'exec_javascript',\n\tdescription: 'Execute commonjs javascript',\n\targs: {\n\t\tcode: {type: 'string', description: 'CommonJS javascript', required: true}\n\t},\n\tfn: async (args: {code: string}) => ({result: $Sync`python -c \"${args.code}\"`})\n}\n\nexport const SearchTool: AiTool = {\n\tname: 'search',\n\tdescription: 'Use a search engine to find relevant URLs, should be changed with fetch to scrape sources',\n\targs: {\n\t\tquery: {type: 'string', description: 'Search string', required: true},\n\t\tlength: {type: 'string', description: 'Number of results to return', default: 5},\n\t},\n\tfn: async (args: {\n\t\tquery: string;\n\t\tlength: number;\n\t}) => {\n\t\tconst html = await fetch(`https://html.duckduckgo.com/html/?q=${encodeURIComponent(args.query)}`, {\n\t\t\theaders: {\"User-Agent\": \"Mozilla/5.0 (Windows NT 10.0; Win64; x64)\", \"Accept-Language\": \"en-US,en;q=0.9\"}\n\t\t}).then(resp => resp.text());\n\t\tlet match, regex = /<a .*?href=\"(.+?)\".+?<\\/a>/g;\n\t\tconst results = new ASet<string>();\n\t\twhile((match = regex.exec(html)) !== null) {\n\t\t\tlet url = /uddg=(.+)&?/.exec(decodeURIComponent(match[1]))?.[1];\n\t\t\tif(url) url = decodeURIComponent(url);\n\t\t\tif(url) results.add(url);\n\t\t\tif(results.size >= (args.length || 5)) break;\n\t\t}\n\t\treturn results;\n\t}\n}\n"],"names":["LLMProvider","Anthropic","ai","apiToken","model","anthropic","history","merged","i","msg","c","h","last","message","options","controller","response","res","rej","requestParams","t","objectMap","key","value","resp","isFirstMessage","chunk","text","JSONAttemptParse","toolCalls","results","toolCall","tool","findByProp","result","JSONSanitize","err","Ollama","host","ollama","error","system","args","OpenAi","openAI","tools","tc","record","h2","LLM","max","min","keep","tokens","m","recent","process","Ai","path","abort","resolve","reject","output","proc","spawn","data","code","p","Path","fs","arr","buffer","worker","createWorker","target","searchTerms","vector","dimensions","char","index","cosineSimilarity","v1","v2","tensor1","tf","tensor2","dotProduct","magnitude1","magnitude2","v","similarities","refVector","acc","s","CliTool","$","DateTimeTool","ExecTool","JSTool","PythonTool","FetchTool","Http","console","consoleInterceptor","Fn","$Sync","SearchTool","html","match","regex","ASet","url"],"mappings":";;;;;;;;;;AAIO,MAAeA,EAAY;AAElC;ACAO,MAAMC,UAAkBD,EAAY;AAAA,EAG1C,YAA4BE,GAAwBC,GAAyBC,GAAe;AAC3F,UAAA,GAD2B,KAAA,KAAAF,GAAwB,KAAA,WAAAC,GAAyB,KAAA,QAAAC,GAE5E,KAAK,SAAS,IAAIC,EAAU,EAAC,QAAQF,GAAS;AAAA,EAC/C;AAAA,EALA;AAAA,EAOQ,WAAWG,GAA8B;AAChD,UAAMC,IAAgB,CAAA;AACtB,aAAQC,IAAI,GAAGA,IAAIF,EAAQ,QAAQE,KAAK;AACvC,YAAMC,IAAMH,EAAQE,CAAC;AAcrB,UAbG,OAAOC,EAAI,WAAW,aACrBA,EAAI,QAAQ,cACdA,EAAI,QAAQ,OAAO,CAACC,MAAWA,EAAE,QAAQ,UAAU,EAAE,QAAQ,CAACA,MAAW;AACxE,QAAAH,EAAO,KAAK,EAAC,MAAM,QAAQ,IAAIG,EAAE,IAAI,MAAMA,EAAE,MAAM,MAAMA,EAAE,OAAM;AAAA,MAClE,CAAC,IACQD,EAAI,QAAQ,UACrBA,EAAI,QAAQ,OAAO,CAACC,MAAWA,EAAE,QAAQ,aAAa,EAAE,QAAQ,CAACA,MAAW;AAC3E,cAAMC,IAAIJ,EAAO,KAAK,CAACI,MAAWA,EAAE,MAAMD,EAAE,WAAW;AACvD,QAAGC,MAAGA,EAAED,EAAE,WAAW,UAAU,SAAS,IAAIA,EAAE;AAAA,MAC/C,CAAC,GAEFD,EAAI,UAAUA,EAAI,QAAQ,OAAO,CAACC,MAAWA,EAAE,QAAQ,MAAM,EAAE,IAAI,CAACA,MAAWA,EAAE,IAAI,EAAE,KAAK;AAAA;AAAA,CAAM,IAEhGD,EAAI,SAAS;AACf,cAAMG,IAAOL,EAAO,GAAG,EAAE;AACzB,QAAGK,KAAQA,EAAK,QAAQ,eAAeH,EAAI,QAAQ,cAAaG,EAAK,WAAW;AAAA;AAAA,IAASH,EAAI,UACxFF,EAAO,KAAK,EAAC,MAAME,EAAI,MAAM,SAASA,EAAI,SAAQ;AAAA,MACxD;AAAA,IACD;AACA,WAAOF;AAAA,EACR;AAAA,EAEQ,aAAaD,GAA8B;AAClD,aAAQE,IAAI,GAAGA,IAAIF,EAAQ,QAAQE;AAClC,UAAGF,EAAQE,CAAC,EAAE,QAAQ,QAAQ;AAC7B,cAAMG,IAASL,EAAQE,CAAC;AACxB,QAAAF,EAAQ;AAAA,UAAOE;AAAA,UAAG;AAAA,UACjB,EAAC,MAAM,aAAa,SAAS,CAAC,EAAC,MAAM,YAAY,IAAIG,EAAE,IAAI,MAAMA,EAAE,MAAM,OAAOA,EAAE,KAAA,CAAK,EAAA;AAAA,UACvF,EAAC,MAAM,QAAQ,SAAS,CAAC,EAAC,MAAM,eAAe,aAAaA,EAAE,IAAI,UAAU,CAAC,CAACA,EAAE,OAAO,SAAUA,EAAE,SAASA,EAAE,SAAQ,EAAA;AAAA,QAAC,GAExHH;AAAA,MACD;AAED,WAAOF;AAAA,EACR;AAAA,EAEA,IAAIO,GAAiBC,IAAsB,IAAoC;AAC9E,UAAMC,IAAa,IAAI,gBAAA,GACjBC,IAAW,IAAI,QAAa,OAAOC,GAAKC,MAAQ;AACrD,UAAIZ,IAAU,KAAK,aAAa,CAAC,GAAGQ,EAAQ,WAAW,CAAA,GAAI,EAAC,MAAM,QAAQ,SAASD,EAAA,CAAQ,CAAC;AAC5F,MAAGC,EAAQ,aAAUR,IAAU,MAAM,KAAK,GAAG,IAAI,SAAcA,GAASQ,EAAQ,SAAS,KAAKA,EAAQ,SAAS,KAAKA,CAAO;AAC3H,YAAMK,IAAqB;AAAA,QAC1B,OAAOL,EAAQ,SAAS,KAAK;AAAA,QAC7B,YAAYA,EAAQ,cAAc,KAAK,GAAG,QAAQ,cAAc;AAAA,QAChE,QAAQA,EAAQ,UAAU,KAAK,GAAG,QAAQ,UAAU;AAAA,QACpD,aAAaA,EAAQ,eAAe,KAAK,GAAG,QAAQ,eAAe;AAAA,QACnE,QAAQA,EAAQ,SAAS,KAAK,GAAG,QAAQ,SAAS,CAAA,GAAI,IAAI,CAAAM,OAAM;AAAA,UAC/D,MAAMA,EAAE;AAAA,UACR,aAAaA,EAAE;AAAA,UACf,cAAc;AAAA,YACb,MAAM;AAAA,YACN,YAAYA,EAAE,OAAOC,EAAUD,EAAE,MAAM,CAACE,GAAKC,OAAW,EAAC,GAAGA,GAAO,UAAU,OAAA,EAAW,IAAI,CAAA;AAAA,YAC5F,UAAUH,EAAE,OAAO,OAAO,QAAQA,EAAE,IAAI,EAAE,OAAO,CAAAA,MAAKA,EAAE,CAAC,EAAE,QAAQ,EAAE,IAAI,CAAAA,MAAKA,EAAE,CAAC,CAAC,IAAI,CAAA;AAAA,UAAC;AAAA,UAExF,IAAI;AAAA,QAAA,EACH;AAAA,QACF,UAAUd;AAAA,QACV,QAAQ,CAAC,CAACQ,EAAQ;AAAA,MAAA;AAGnB,UAAIU,GACAC,IAAiB;AACrB,SAAG;AAGF,YAFAD,IAAO,MAAM,KAAK,OAAO,SAAS,OAAOL,CAAa,GAEnDL,EAAQ,QAAQ;AAClB,UAAIW,KAAgBX,EAAQ,OAAO,EAAC,MAAM;AAAA;AAAA,GAAO,GACjDW,IAAiB,IAEjBD,EAAK,UAAU,CAAA;AACf,2BAAiBE,KAASF,GAAM;AAC/B,gBAAGT,EAAW,OAAO,QAAS;AAC9B,gBAAGW,EAAM,SAAS;AACjB,cAAGA,EAAM,cAAc,SAAS,SAC/BF,EAAK,QAAQ,KAAK,EAAC,MAAM,QAAQ,MAAM,IAAG,IACjCE,EAAM,cAAc,SAAS,cACtCF,EAAK,QAAQ,KAAK,EAAC,MAAM,YAAY,IAAIE,EAAM,cAAc,IAAI,MAAMA,EAAM,cAAc,MAAM,OAAY,IAAG;AAAA,qBAExGA,EAAM,SAAS;AACxB,kBAAGA,EAAM,MAAM,SAAS,cAAc;AACrC,sBAAMC,IAAOD,EAAM,MAAM;AACzB,gBAAAF,EAAK,QAAQ,GAAG,EAAE,EAAE,QAAQG,GAC5Bb,EAAQ,OAAO,EAAC,MAAAa,GAAK;AAAA,cACtB,MAAA,CAAUD,EAAM,MAAM,SAAS,uBAC9BF,EAAK,QAAQ,GAAG,EAAE,EAAE,SAASE,EAAM,MAAM;AAAA,qBAEjCA,EAAM,SAAS,sBAAsB;AAC9C,oBAAMd,IAAOY,EAAK,QAAQ,GAAG,EAAE;AAC/B,cAAGZ,EAAK,SAAS,SAAMA,EAAK,QAAQA,EAAK,QAAQgB,EAAiBhB,EAAK,OAAO,CAAA,CAAE,IAAI,CAAA;AAAA,YACrF,WAAUc,EAAM,SAAS;AACxB;AAAA,UAEF;AAAA,QACD;AAEA,cAAMG,IAAYL,EAAK,QAAQ,OAAO,CAACd,MAAWA,EAAE,SAAS,UAAU;AACvE,YAAGmB,EAAU,UAAU,CAACd,EAAW,OAAO,SAAS;AAClD,UAAAT,EAAQ,KAAK,EAAC,MAAM,aAAa,SAASkB,EAAK,SAAQ;AACvD,gBAAMM,IAAU,MAAM,QAAQ,IAAID,EAAU,IAAI,OAAOE,MAAkB;AACxE,kBAAMC,IAAOlB,EAAQ,OAAO,KAAKmB,EAAW,QAAQF,EAAS,IAAI,CAAC;AAClE,gBAAG,CAACC,EAAM,QAAO,EAAC,aAAaD,EAAS,IAAI,UAAU,IAAM,SAAS,iBAAA;AACrE,gBAAI;AACH,oBAAMG,IAAS,MAAMF,EAAK,GAAGD,EAAS,OAAO,KAAK,EAAE;AACpD,qBAAO,EAAC,MAAM,eAAe,aAAaA,EAAS,IAAI,SAASI,EAAaD,CAAM,EAAA;AAAA,YACpF,SAASE,GAAU;AAClB,qBAAO,EAAC,MAAM,eAAe,aAAaL,EAAS,IAAI,UAAU,IAAM,SAASK,GAAK,WAAWA,GAAK,SAAA,KAAc,UAAA;AAAA,YACpH;AAAA,UACD,CAAC,CAAC;AACF,UAAA9B,EAAQ,KAAK,EAAC,MAAM,QAAQ,SAASwB,GAAQ,GAC7CX,EAAc,WAAWb;AAAA,QAC1B;AAAA,MACD,SAAS,CAACS,EAAW,OAAO,WAAWS,EAAK,QAAQ,KAAK,CAACd,MAAWA,EAAE,SAAS,UAAU;AAE1F,MAAGI,EAAQ,UAAQA,EAAQ,OAAO,EAAC,MAAM,IAAK,GAC9CG,EAAI,KAAK,WAAW,CAAC,GAAGX,GAAS;AAAA,QAChC,MAAM;AAAA,QACN,SAASkB,EAAK,QAAQ,OAAO,CAACd,MAAWA,EAAE,QAAQ,MAAM,EAAE,IAAI,CAACA,MAAWA,EAAE,IAAI,EAAE,KAAK;AAAA;AAAA,CAAM;AAAA,MAAA,CAC9F,CAAC,CAAC;AAAA,IACJ,CAAC;AAED,WAAO,OAAO,OAAOM,GAAU,EAAC,OAAO,MAAMD,EAAW,MAAA,GAAQ;AAAA,EACjE;AACD;ACtIO,MAAMsB,UAAerC,EAAY;AAAA,EAGvC,YAA4BE,GAAeoC,GAAqBlC,GAAe;AAC9E,UAAA,GAD2B,KAAA,KAAAF,GAAe,KAAA,OAAAoC,GAAqB,KAAA,QAAAlC,GAE/D,KAAK,SAAS,IAAImC,EAAO,EAAC,MAAAD,GAAK;AAAA,EAChC;AAAA,EALA;AAAA,EAOQ,WAAWhC,GAA8B;AAChD,aAAQE,IAAI,GAAGA,IAAIF,EAAQ,QAAQE;AAClC,UAAGF,EAAQE,CAAC,EAAE,QAAQ,eAAeF,EAAQE,CAAC,EAAE;AAC/C,QAAGF,EAAQE,CAAC,EAAE,UAAS,OAAOF,EAAQE,CAAC,EAAE,cAExCF,EAAQ,OAAOE,GAAG,CAAC,GACnBA;AAAA,eAEQF,EAAQE,CAAC,EAAE,QAAQ,QAAQ;AACpC,cAAMgC,IAAQlC,EAAQE,CAAC,EAAE,QAAQ,WAAW,WAAW;AACvD,QAAAF,EAAQE,CAAC,IAAI,EAAC,MAAM,QAAQ,MAAMF,EAAQE,CAAC,EAAE,WAAW,MAAMF,EAAQE,CAAC,EAAE,MAAM,CAACgC,IAAQ,UAAU,SAAS,GAAGlC,EAAQE,CAAC,EAAE,QAAA;AAAA,MAC1H;AAED,WAAOF;AAAA,EACR;AAAA,EAEQ,aAAaA,GAA8B;AAClD,WAAOA,EAAQ,IAAI,CAACK,MAChBA,EAAE,QAAQ,SAAeA,IACrB,EAAC,MAAM,QAAQ,WAAWA,EAAE,MAAM,SAASA,EAAE,SAASA,EAAE,QAAA,CAC/D;AAAA,EACF;AAAA,EAEA,IAAIE,GAAiBC,IAAsB,IAAoC;AAC9E,UAAMC,IAAa,IAAI,gBAAA,GACjBC,IAAW,IAAI,QAAa,OAAOC,GAAKC,MAAQ;AACrD,UAAIuB,IAAS3B,EAAQ,UAAU,KAAK,GAAG,QAAQ,QAC3CR,IAAU,KAAK,aAAa,CAAC,GAAGQ,EAAQ,WAAW,CAAA,GAAI,EAAC,MAAM,QAAQ,SAASD,EAAA,CAAQ,CAAC;AAC5F,MAAGP,EAAQ,CAAC,EAAE,QAAQ,aACjBmC,MACS,MAAA,IADDA,IAASnC,EAAQ,MAAA,IAG3BQ,EAAQ,aAAUR,IAAU,MAAM,KAAK,GAAG,IAAI,SAAcA,GAASQ,EAAQ,SAAS,KAAKA,EAAQ,SAAS,GAAG,IAC/GA,EAAQ,UAAQR,EAAQ,QAAQ,EAAC,MAAM,UAAU,SAASmC,GAAO;AAEpE,YAAMtB,IAAqB;AAAA,QAC1B,OAAOL,EAAQ,SAAS,KAAK;AAAA,QAC7B,UAAUR;AAAA,QACV,QAAQ,CAAC,CAACQ,EAAQ;AAAA,QAClB,QAAQC,EAAW;AAAA,QACnB,SAAS;AAAA,UACR,aAAaD,EAAQ,eAAe,KAAK,GAAG,QAAQ,eAAe;AAAA,UACnE,aAAaA,EAAQ,cAAc,KAAK,GAAG,QAAQ,cAAc;AAAA,QAAA;AAAA,QAElE,QAAQA,EAAQ,SAAS,KAAK,GAAG,QAAQ,SAAS,CAAA,GAAI,IAAI,CAAAM,OAAM;AAAA,UAC/D,MAAM;AAAA,UACN,UAAU;AAAA,YACT,MAAMA,EAAE;AAAA,YACR,aAAaA,EAAE;AAAA,YACf,YAAY;AAAA,cACX,MAAM;AAAA,cACN,YAAYA,EAAE,OAAOC,EAAUD,EAAE,MAAM,CAACE,GAAKC,OAAW,EAAC,GAAGA,GAAO,UAAU,OAAA,EAAW,IAAI,CAAA;AAAA,cAC5F,UAAUH,EAAE,OAAO,OAAO,QAAQA,EAAE,IAAI,EAAE,OAAO,CAAAA,MAAKA,EAAE,CAAC,EAAE,QAAQ,EAAE,IAAI,CAAAA,MAAKA,EAAE,CAAC,CAAC,IAAI,CAAA;AAAA,YAAC;AAAA,UACxF;AAAA,QACD,EACC;AAAA,MAAA;AAIH,UAAII;AACJ,SAAG;AAEF,YADAA,IAAO,MAAM,KAAK,OAAO,KAAKL,CAAa,GACxCL,EAAQ,QAAQ;AAClB,UAAAU,EAAK,UAAU,EAAC,MAAM,aAAa,SAAS,IAAI,YAAY,GAAC;AAC7D,2BAAiBE,KAASF;AAOzB,gBANGT,EAAW,OAAO,YAClBW,EAAM,SAAS,YACjBF,EAAK,QAAQ,WAAWE,EAAM,QAAQ,SACtCZ,EAAQ,OAAO,EAAC,MAAMY,EAAM,QAAQ,SAAQ,IAE1CA,EAAM,SAAS,iBAAiB,QAAQ,aAAaA,EAAM,QAAQ,aACnEA,EAAM,MAAM;AAAA,QAEjB;AAGA,YAAGF,EAAK,SAAS,YAAY,UAAU,CAACT,EAAW,OAAO,SAAS;AAClE,UAAAT,EAAQ,KAAKkB,EAAK,OAAO;AACzB,gBAAMM,IAAU,MAAM,QAAQ,IAAIN,EAAK,QAAQ,WAAW,IAAI,OAAOO,MAAkB;AACtF,kBAAMC,KAAQlB,EAAQ,SAAS,KAAK,GAAG,QAAQ,QAAQ,KAAKmB,EAAW,QAAQF,EAAS,SAAS,IAAI,CAAC;AACtG,gBAAG,CAACC,EAAM,QAAO,EAAC,MAAM,QAAQ,WAAWD,EAAS,SAAS,MAAM,SAAS,8BAAA;AAC5E,kBAAMW,IAAO,OAAOX,EAAS,SAAS,aAAc,WAAWH,EAAiBG,EAAS,SAAS,WAAW,CAAA,CAAE,IAAIA,EAAS,SAAS;AACrI,gBAAI;AACH,oBAAMG,IAAS,MAAMF,EAAK,GAAGU,GAAM,KAAK,EAAE;AAC1C,qBAAO,EAAC,MAAM,QAAQ,WAAWX,EAAS,SAAS,MAAM,MAAAW,GAAM,SAASP,EAAaD,CAAM,EAAA;AAAA,YAC5F,SAASE,GAAU;AAClB,qBAAO,EAAC,MAAM,QAAQ,WAAWL,EAAS,SAAS,MAAM,MAAAW,GAAM,SAASP,EAAa,EAAC,OAAOC,GAAK,WAAWA,GAAK,cAAc,UAAA,CAAU,EAAA;AAAA,YAC3I;AAAA,UACD,CAAC,CAAC;AACF,UAAA9B,EAAQ,KAAK,GAAGwB,CAAO,GACvBX,EAAc,WAAWb;AAAA,QAC1B;AAAA,MACD,SAAS,CAACS,EAAW,OAAO,WAAWS,EAAK,SAAS,YAAY;AACjE,MAAGV,EAAQ,UAAQA,EAAQ,OAAO,EAAC,MAAM,IAAK,GAC9CG,EAAI,KAAK,WAAW,CAAC,GAAGX,GAAS,EAAC,MAAM,aAAa,SAASkB,EAAK,SAAS,QAAA,CAAQ,CAAC,CAAC;AAAA,IACvF,CAAC;AACD,WAAO,OAAO,OAAOR,GAAU,EAAC,OAAO,MAAMD,EAAW,MAAA,GAAQ;AAAA,EACjE;AACD;AC1GO,MAAM4B,UAAe3C,EAAY;AAAA,EAGvC,YAA4BE,GAAwBC,GAAyBC,GAAe;AAC3F,UAAA,GAD2B,KAAA,KAAAF,GAAwB,KAAA,WAAAC,GAAyB,KAAA,QAAAC,GAE5E,KAAK,SAAS,IAAIwC,EAAO,EAAC,QAAQzC,GAAS;AAAA,EAC5C;AAAA,EALA;AAAA,EAOQ,WAAWG,GAA8B;AAChD,aAAQE,IAAI,GAAGA,IAAIF,EAAQ,QAAQE,KAAK;AACvC,YAAMG,IAAIL,EAAQE,CAAC;AACnB,UAAGG,EAAE,SAAS,eAAeA,EAAE,YAAY;AAC1C,cAAMkC,IAAQlC,EAAE,WAAW,IAAI,CAACmC,OAAa;AAAA,UAC5C,MAAM;AAAA,UACN,IAAIA,EAAG;AAAA,UACP,MAAMA,EAAG,SAAS;AAAA,UAClB,MAAMlB,EAAiBkB,EAAG,SAAS,WAAW,CAAA,CAAE;AAAA,QAAA,EAC/C;AACF,QAAAxC,EAAQ,OAAOE,GAAG,GAAG,GAAGqC,CAAK,GAC7BrC,KAAKqC,EAAM,SAAS;AAAA,MACrB,WAAUlC,EAAE,SAAS,UAAUA,EAAE,SAAS;AACzC,cAAMoC,IAASzC,EAAQ,KAAK,OAAMK,EAAE,gBAAgBqC,EAAG,EAAE;AACzD,QAAGD,MACCpC,EAAE,QAAQ,SAAS,UAAU,IAAGoC,EAAO,QAAQpC,EAAE,UAC/CoC,EAAO,UAAUpC,EAAE,UAEzBL,EAAQ,OAAOE,GAAG,CAAC,GACnBA;AAAA,MACD;AAAA,IAED;AACA,WAAOF;AAAA,EACR;AAAA,EAEQ,aAAaA,GAA8B;AAClD,WAAOA,EAAQ,OAAO,CAAC4B,GAAQvB,OAC3BA,EAAE,SAAS,SACbuB,EAAO,KAAK;AAAA,MACX,MAAM;AAAA,MACN,SAAS;AAAA,MACT,YAAY,CAAC,EAAE,IAAIvB,EAAE,IAAI,MAAM,YAAY,UAAU,EAAE,MAAMA,EAAE,MAAM,WAAW,KAAK,UAAUA,EAAE,IAAI,EAAA,GAAK;AAAA,MAC1G,SAAS;AAAA,MACT,aAAa,CAAA;AAAA,IAAC,GACZ;AAAA,MACF,MAAM;AAAA,MACN,cAAcA,EAAE;AAAA,MAChB,SAASA,EAAE,SAASA,EAAE;AAAA,IAAA,CACtB,IAEDuB,EAAO,KAAKvB,CAAC,GAEPuB,IACL,CAAA,CAAW;AAAA,EACf;AAAA,EAEA,IAAIrB,GAAiBC,IAAsB,IAAoC;AAC9E,UAAMC,IAAa,IAAI,gBAAA,GACjBC,IAAW,IAAI,QAAa,OAAOC,GAAKC,MAAQ;AACrD,UAAIZ,IAAU,KAAK,aAAa,CAAC,GAAGQ,EAAQ,WAAW,CAAA,GAAI,EAAC,MAAM,QAAQ,SAASD,EAAA,CAAQ,CAAC;AAC5F,MAAGC,EAAQ,aAAUR,IAAU,MAAM,KAAK,GAAG,IAAI,SAAcA,GAASQ,EAAQ,SAAS,KAAKA,EAAQ,SAAS,KAAKA,CAAO;AAE3H,YAAMK,IAAqB;AAAA,QAC1B,OAAOL,EAAQ,SAAS,KAAK;AAAA,QAC7B,UAAUR;AAAA,QACV,QAAQ,CAAC,CAACQ,EAAQ;AAAA,QAClB,YAAYA,EAAQ,cAAc,KAAK,GAAG,QAAQ,cAAc;AAAA,QAChE,aAAaA,EAAQ,eAAe,KAAK,GAAG,QAAQ,eAAe;AAAA,QACnE,QAAQA,EAAQ,SAAS,KAAK,GAAG,QAAQ,SAAS,CAAA,GAAI,IAAI,CAAAM,OAAM;AAAA,UAC/D,MAAM;AAAA,UACN,UAAU;AAAA,YACT,MAAMA,EAAE;AAAA,YACR,aAAaA,EAAE;AAAA,YACf,YAAY;AAAA,cACX,MAAM;AAAA,cACN,YAAYA,EAAE,OAAOC,EAAUD,EAAE,MAAM,CAACE,GAAKC,OAAW,EAAC,GAAGA,GAAO,UAAU,OAAA,EAAW,IAAI,CAAA;AAAA,cAC5F,UAAUH,EAAE,OAAO,OAAO,QAAQA,EAAE,IAAI,EAAE,OAAO,CAAAA,MAAKA,EAAE,CAAC,EAAE,QAAQ,EAAE,IAAI,CAAAA,MAAKA,EAAE,CAAC,CAAC,IAAI,CAAA;AAAA,YAAC;AAAA,UACxF;AAAA,QACD,EACC;AAAA,MAAA;AAIH,UAAII;AACJ,SAAG;AAIF,YAHAA,IAAO,MAAM,KAAK,OAAO,KAAK,YAAY,OAAOL,CAAa,GAG3DL,EAAQ,QAAQ;AAClB,UAAAU,EAAK,UAAU,CAAA;AACf,2BAAiBE,KAASF,GAAM;AAC/B,gBAAGT,EAAW,OAAO,QAAS;AAC9B,YAAGW,EAAM,QAAQ,CAAC,EAAE,MAAM,WACzBZ,EAAQ,OAAO,EAAC,MAAMY,EAAM,QAAQ,CAAC,EAAE,MAAM,SAAQ;AAAA,UAEvD;AAAA,QACD;AAGA,cAAMG,IAAYL,EAAK,QAAQ,CAAC,EAAE,QAAQ,cAAc,CAAA;AACxD,YAAGK,EAAU,UAAU,CAACd,EAAW,OAAO,SAAS;AAClD,UAAAT,EAAQ,KAAKkB,EAAK,QAAQ,CAAC,EAAE,OAAO;AACpC,gBAAMM,IAAU,MAAM,QAAQ,IAAID,EAAU,IAAI,OAAOE,MAAkB;AACxE,kBAAMC,IAAOlB,EAAQ,OAAO,KAAKmB,EAAW,QAAQF,EAAS,SAAS,IAAI,CAAC;AAC3E,gBAAG,CAACC,EAAM,QAAO,EAAC,MAAM,QAAQ,cAAcD,EAAS,IAAI,SAAS,8BAAA;AACpE,gBAAI;AACH,oBAAMW,IAAOd,EAAiBG,EAAS,SAAS,WAAW,CAAA,CAAE,GACvDG,IAAS,MAAMF,EAAK,GAAGU,GAAM,KAAK,EAAE;AAC1C,qBAAO,EAAC,MAAM,QAAQ,cAAcX,EAAS,IAAI,SAASI,EAAaD,CAAM,EAAA;AAAA,YAC9E,SAASE,GAAU;AAClB,qBAAO,EAAC,MAAM,QAAQ,cAAcL,EAAS,IAAI,SAASI,EAAa,EAAC,OAAOC,GAAK,WAAWA,GAAK,cAAc,UAAA,CAAU,EAAA;AAAA,YAC7H;AAAA,UACD,CAAC,CAAC;AACF,UAAA9B,EAAQ,KAAK,GAAGwB,CAAO,GACvBX,EAAc,WAAWb;AAAA,QAC1B;AAAA,MACD,SAAS,CAACS,EAAW,OAAO,WAAWS,EAAK,UAAU,CAAC,GAAG,SAAS,YAAY;AAE/E,MAAGV,EAAQ,UAAQA,EAAQ,OAAO,EAAC,MAAM,IAAK,GAC9CG,EAAI,KAAK,WAAW,CAAC,GAAGX,GAAS,EAAC,MAAM,aAAa,SAASkB,EAAK,QAAQ,CAAC,EAAE,QAAQ,WAAW,GAAA,CAAG,CAAC,CAAC;AAAA,IACvG,CAAC;AAED,WAAO,OAAO,OAAOR,GAAU,EAAC,OAAO,MAAMD,EAAW,MAAA,GAAQ;AAAA,EACjE;AACD;ACnDO,MAAMkC,EAAI;AAAA,EAGhB,YAA4B/C,GAAwBY,GAAqB;AAA7C,SAAA,KAAAZ,GAAwB,KAAA,UAAAY,GAChDA,EAAQ,WAAW,UAAO,KAAK,UAAU,YAAY,IAAIb,EAAU,KAAK,IAAIa,EAAQ,UAAU,OAAOA,EAAQ,UAAU,KAAK,IAC5HA,EAAQ,QAAQ,SAAM,KAAK,UAAU,SAAS,IAAIuB,EAAO,KAAK,IAAIvB,EAAQ,OAAO,MAAMA,EAAQ,OAAO,KAAK,IAC3GA,EAAQ,QAAQ,UAAO,KAAK,UAAU,SAAS,IAAI6B,EAAO,KAAK,IAAI7B,EAAQ,OAAO,OAAOA,EAAQ,OAAO,KAAK;AAAA,EACjH;AAAA,EANQ,YAA0C,CAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAclD,IAAID,GAAiBC,IAAsB,IAAoC;AAC9E,QAAIV,IAAa,CAAC,MAAM,IAAI;AAS5B,QARGU,EAAQ,UACP,OAAOA,EAAQ,SAAS,eAAkBA,EAAQ,QAChDV,IAAQ,CAACU,EAAQ,OAAa,KAAK,QAASA,EAAQ,KAAK,GAAG,KAAK,KAEpE,CAACA,EAAQ,SAASV,EAAM,CAAC,KAAK,UAC7B,OAAO,KAAK,QAAQ,SAAS,WAAUA,IAAQ,KAAK,QAAQ,QAC1DA,IAAQ,CAAC,KAAK,QAAQ,OAAa,KAAK,QAAS,KAAK,QAAQ,KAAK,GAAG,KAAK,IAE9E,CAACA,EAAM,CAAC,KAAK,CAACA,EAAM,CAAC,EAAG,OAAM,IAAI,MAAM,kCAAkCA,EAAM,CAAC,CAAC,MAAMA,EAAM,CAAC,CAAC,EAAE;AACrG,WAAO,KAAK,UAAUA,EAAM,CAAC,CAAC,EAAE,IAAIS,GAAS,EAAC,GAAGC,GAAS,OAAOV,EAAM,CAAC,GAAE;AAAA,EAC3E;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,MAAM,SAASE,GAAuB4C,GAAaC,GAAarC,GAA6C;AAC5G,QAAG,KAAK,eAAeR,CAAO,IAAI4C,EAAK,QAAO5C;AAC9C,QAAI8C,IAAO,GAAGC,IAAS;AACvB,aAAQC,KAAKhD,EAAQ;AAEpB,UADA+C,KAAU,KAAK,eAAeC,EAAE,OAAO,GACpCD,IAASF,EAAK,CAAAC;AAAA,UACZ;AAEN,QAAG9C,EAAQ,UAAU8C,EAAM,QAAO9C;AAClC,UAAMiD,IAASH,KAAQ,IAAI,CAAA,IAAK9C,EAAQ,MAAM,CAAC8C,CAAI,GAClDI,KAAWJ,KAAQ,IAAI9C,IAAUA,EAAQ,MAAM,GAAG,CAAC8C,CAAI,GAAG,OAAO,CAAAzC,MAAKA,EAAE,SAAS,eAAeA,EAAE,SAAS,MAAM;AAElH,WAAO,CAAC,EAAC,MAAM,aAAa,SAAS,yBADrB,MAAM,KAAK,UAAU6C,EAAQ,IAAI,OAAK,GAAGF,EAAE,IAAI,KAAKA,EAAE,OAAO,EAAE,EAAE,KAAK;AAAA;AAAA,CAAM,GAAG,KAAKxC,CAAO,CACtC,MAAK,GAAGyC,CAAM;AAAA,EACpF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,eAAejD,GAAsB;AACpC,UAAMqB,IAAO,KAAK,UAAUrB,CAAO;AACnC,WAAO,KAAK,KAAMqB,EAAK,SAAS,IAAK,GAAG;AAAA,EACzC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,KAAKd,GAAiBC,GAAsB;AACjD,QAAIU,IAAO,MAAM,KAAK,IAAIX,GAAS;AAAA,MAClC,QAAQ;AAAA,MACR,GAAGC;AAAA,IAAA,CACH;AACD,WAAIU,IAAO,CAAC,GAAG,UACRI,EAAiB,IAAI,OAAO,SAAa,EAAE,KAAKJ,EAAK,CAAC,EAAE,OAAO,GAAG,EAAE,IAD5C,CAAA;AAAA,EAEhC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,UAAUG,GAAc0B,GAAgBvC,GAA8C;AACrF,WAAO,KAAK,IAAIa,GAAM,EAAC,QAAQ,+BAA+B0B,CAAM,gCAAgC,aAAa,KAAK,GAAGvC,EAAA,CAAQ,EAC/H,KAAK,CAAAR,MAAmBA,EAAQ,IAAA,GAAO,WAAW,IAAI;AAAA,EACzD;AACD;ACpJO,MAAMmD,EAAG;AAAA,EAOf,YAA4B3C,GAAoB;AAApB,SAAA,UAAAA,GAC3B,KAAK,MAAM,IAAImC,EAAI,MAAMnC,CAAO,GAC7B,KAAK,QAAQ,SAAS,WACxB,KAAK,eAAe,KAAK,QAAQ,SAAS,MAAM,SAAS,MAAM,IAAI,KAAK,QAAQ,SAAS,QAAQ,KAAK,QAAQ,SAAS,QAAQ,QAC/H,KAAK,iBAAA;AAAA,EAEP;AAAA,EAZQ,YAA8C,CAAA;AAAA,EAC9C;AAAA;AAAA,EAGR;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAgBA,IAAI4C,GAActD,IAAgB,KAAK,cAAqE;AAC3G,QAAG,CAAC,KAAK,QAAQ,SAAS,OAAQ,OAAM,IAAI,MAAM,wBAAwB;AAC1E,QAAIuD,IAAa,MAAM;AAAA,IAAC;AAcxB,WAAO,EAAC,UAbS,IAAI,QAAuB,CAACC,GAASC,MAAW;AAChE,WAAK,iBAAiBzD,CAAK,EAAE,KAAK,CAAAkD,MAAK;AACtC,YAAIQ,IAAS;AACb,cAAMC,IAAOC,EAAc,KAAK,QAAQ,SAAS,QAAQ,CAAC,OAAO,OAAO,MAAMV,GAAG,MAAMI,CAAI,GAAG,EAAC,OAAO,CAAC,UAAU,QAAQ,QAAQ,GAAE;AACnI,QAAAC,IAAQ,MAAMI,EAAK,KAAK,SAAS,GACjCA,EAAK,GAAG,SAAS,CAAC3B,MAAeyB,EAAOzB,CAAG,CAAC,GAC5C2B,EAAK,OAAO,GAAG,QAAQ,CAACE,MAAiBH,KAAUG,EAAK,UAAU,GAClEF,EAAK,GAAG,SAAS,CAACG,MAAiB;AAClC,UAAGA,MAAS,IAAGN,EAAQE,EAAO,KAAA,KAAU,IAAI,MAChC,IAAI,MAAM,aAAaI,CAAI,EAAE,CAAC;AAAA,QAC3C,CAAC;AAAA,MACF,CAAC;AAAA,IACF,CAAC,GACiB,OAAAP,EAAA;AAAA,EACnB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,iBAAiBvD,IAAgB,KAAK,cAA+B;AAC1E,QAAG,CAAC,KAAK,QAAQ,SAAS,OAAQ,OAAM,IAAI,MAAM,wBAAwB;AAC1E,IAAIA,EAAM,SAAS,MAAM,MAAGA,KAAS;AACrC,UAAM+D,IAAIC,EAAK,KAAK,KAAK,QAAQ,QAAQ,MAAMhE,CAAK;AACpD,WAAG,MAAMiE,EAAG,KAAKF,CAAC,EAAE,KAAK,MAAM,EAAI,EAAE,MAAM,MAAM,EAAK,IAAUA,IAC3D,KAAK,UAAU/D,CAAK,IAAU,KAAK,UAAUA,CAAK,KACvD,KAAK,UAAUA,CAAK,IAAI,MAAM,6DAA6DA,CAAK,EAAE,EAChG,KAAK,CAAAoB,MAAQA,EAAK,aAAa,EAC/B,KAAK,CAAA8C,MAAO,OAAO,KAAKA,CAAG,CAAC,EAAE,KAAK,OAAMC,OACzC,MAAMF,EAAG,UAAUF,GAAGI,CAAM,GAC5B,OAAO,KAAK,UAAUnE,CAAK,GACpB+D,EACP,GACK,KAAK,UAAU/D,CAAK;AAAA,EAC5B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,IAAIsD,GAAqE;AACxE,QAAIc;AACJ,WAAO;AAAA,MACN,OAAO,MAAM;AAAE,QAAAA,GAAQ,UAAA;AAAA,MAAa;AAAA,MACpC,UAAU,IAAI,QAAQ,OAAMvD,MAAO;AAClC,QAAAuD,IAAS,MAAMC,EAAa,KAAK;AACjC,cAAM,EAAC,MAAAR,EAAA,IAAQ,MAAMO,EAAO,UAAUd,CAAI;AAC1C,cAAMc,EAAO,UAAA,GACbvD,EAAIgD,EAAK,KAAK,KAAA,KAAU,IAAI;AAAA,MAC7B,CAAC;AAAA,IAAA;AAAA,EAEH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,mBAAmBS,MAAmBC,GAAuB;AAC5D,QAAGA,EAAY,SAAS,EAAG,OAAM,IAAI,MAAM,wCAAwC;AAEnF,UAAMC,IAAS,CAACjD,GAAckD,IAAqB,OAC3ClD,EAAK,cAAc,MAAM,EAAE,EAAE,IAAI,CAACmD,GAAMC,MAC7CD,EAAK,WAAW,CAAC,KAAKC,IAAQ,KAAMF,IAAaA,CAAU,EAAE,MAAM,GAAGA,CAAU,GAG7EG,IAAmB,CAACC,GAAcC,MAAyB;AAChE,UAAID,EAAG,WAAWC,EAAG,OAAQ,OAAM,IAAI,MAAM,6BAA6B;AAC1E,YAAMC,IAAUC,EAAG,SAASH,CAAE,GAAGI,IAAUD,EAAG,SAASF,CAAE,GACnDI,IAAaF,EAAG,IAAID,GAASE,CAAO,GACpCE,IAAaH,EAAG,KAAKD,CAAO,GAC5BK,IAAaJ,EAAG,KAAKC,CAAO;AAClC,aAAGE,EAAW,WAAW,CAAC,MAAM,KAAKC,EAAW,WAAW,CAAC,MAAM,IAAU,IACrEF,EAAW,SAAA,EAAW,CAAC,KAAKC,EAAW,WAAW,CAAC,IAAIC,EAAW,SAAA,EAAW,CAAC;AAAA,IACtF,GAEMC,IAAIb,EAAOF,CAAM,GACjBgB,IAAef,EAAY,IAAI,CAAAvD,MAAKwD,EAAOxD,CAAC,CAAC,EAAE,IAAI,CAAAuE,MAAaX,EAAiBS,GAAGE,CAAS,CAAC;AACpG,WAAO,EAAC,KAAKD,EAAa,OAAO,CAACE,GAAKC,MAAMD,IAAMC,GAAG,CAAC,IAAIH,EAAa,QAAQ,KAAK,KAAK,IAAI,GAAGA,CAAY,GAAG,cAAAA,EAAA;AAAA,EACjH;AACD;AC1FO,MAAMI,IAAkB;AAAA,EAC9B,MAAM;AAAA,EACN,aAAa;AAAA,EACb,MAAM,EAAC,SAAS,EAAC,MAAM,UAAU,aAAa,kBAAkB,UAAU,KAAI;AAAA,EAC9E,IAAI,CAACpD,MAA4BqD,IAAIrD,EAAK,OAAO;AAClD,GAEasD,IAAuB;AAAA,EACnC,MAAM;AAAA,EACN,aAAa;AAAA,EACb,MAAM,CAAA;AAAA,EACN,IAAI,aAAY,oBAAI,KAAA,GAAO,YAAA;AAC5B,GAEaC,IAAmB;AAAA,EAC/B,MAAM;AAAA,EACN,aAAa;AAAA,EACb,MAAM;AAAA,IACL,UAAU,EAAC,MAAM,UAAU,aAAa,sBAAsB,MAAM,CAAC,OAAO,QAAQ,QAAQ,GAAG,UAAU,GAAA;AAAA,IACzG,MAAM,EAAC,MAAM,UAAU,aAAa,mBAAmB,UAAU,GAAA;AAAA,EAAI;AAAA,EAEtE,IAAI,OAAOvD,GAAMxC,MAAO;AACvB,QAAI;AACH,cAAOwC,EAAK,MAAA;AAAA,QACX,KAAK;AACJ,iBAAO,MAAMoD,EAAQ,GAAG,EAAC,SAASpD,EAAK,KAAA,GAAOxC,CAAE;AAAA,QACjD,KAAK;AACJ,iBAAO,MAAMgG,EAAO,GAAG,EAAC,MAAMxD,EAAK,KAAA,GAAOxC,CAAE;AAAA,QAC7C,KAAK;AACJ,iBAAO,MAAMiG,EAAW,GAAG,EAAC,MAAMzD,EAAK,KAAA,GAAOxC,CAAE;AAAA,MACjD;AAAA,IAEF,SAAQkC,GAAU;AACjB,aAAO,EAAC,OAAOA,GAAK,WAAWA,EAAI,WAAS;AAAA,IAC7C;AAAA,EACD;AACD,GAEagE,KAAoB;AAAA,EAChC,MAAM;AAAA,EACN,aAAa;AAAA,EACb,MAAM;AAAA,IACL,KAAK,EAAC,MAAM,UAAU,aAAa,gBAAgB,UAAU,GAAA;AAAA,IAC7D,QAAQ,EAAC,MAAM,UAAU,aAAa,sBAAsB,MAAM,CAAC,OAAO,QAAQ,OAAO,QAAQ,GAAG,SAAS,MAAA;AAAA,IAC7G,SAAS,EAAC,MAAM,UAAU,aAAa,wBAAwB,SAAS,GAAC;AAAA,IACzE,MAAM,EAAC,MAAM,UAAU,aAAa,oBAAA;AAAA,EAAmB;AAAA,EAExD,IAAI,CAAC1D,MAKC,IAAI2D,EAAK,EAAC,KAAK3D,EAAK,KAAK,SAASA,EAAK,SAAQ,EAAE,QAAQ,EAAC,QAAQA,EAAK,UAAU,OAAO,MAAMA,EAAK,KAAA,CAAK;AAC/G,GAEawD,IAAiB;AAAA,EAC7B,MAAM;AAAA,EACN,aAAa;AAAA,EACb,MAAM;AAAA,IACL,MAAM,EAAC,MAAM,UAAU,aAAa,uBAAuB,UAAU,GAAA;AAAA,EAAI;AAAA,EAE1E,IAAI,OAAOxD,MAAyB;AACnC,UAAM4D,IAAUC,EAAmB,IAAI,GACjC/E,IAAO,MAAMgF,EAAQ,EAAC,SAAAF,EAAA,GAAU5D,EAAK,MAAM,EAAI,EAAE,MAAM,CAACN,MAAakE,EAAQ,OAAO,MAAM,KAAKlE,CAAG,CAAC;AACzG,WAAO,EAAC,GAAGkE,EAAQ,QAAQ,QAAQ9E,GAAM,QAAQ,QAAW,QAAQ,OAAA;AAAA,EACrE;AACD,GAEa2E,IAAqB;AAAA,EACjC,MAAM;AAAA,EACN,aAAa;AAAA,EACb,MAAM;AAAA,IACL,MAAM,EAAC,MAAM,UAAU,aAAa,uBAAuB,UAAU,GAAA;AAAA,EAAI;AAAA,EAE1E,IAAI,OAAOzD,OAA0B,EAAC,QAAQ+D,eAAmB/D,EAAK,IAAI,IAAA;AAC3E,GAEagE,KAAqB;AAAA,EACjC,MAAM;AAAA,EACN,aAAa;AAAA,EACb,MAAM;AAAA,IACL,OAAO,EAAC,MAAM,UAAU,aAAa,iBAAiB,UAAU,GAAA;AAAA,IAChE,QAAQ,EAAC,MAAM,UAAU,aAAa,+BAA+B,SAAS,EAAA;AAAA,EAAC;AAAA,EAEhF,IAAI,OAAOhE,MAGL;AACL,UAAMiE,IAAO,MAAM,MAAM,uCAAuC,mBAAmBjE,EAAK,KAAK,CAAC,IAAI;AAAA,MACjG,SAAS,EAAC,cAAc,6CAA6C,mBAAmB,iBAAA;AAAA,IAAgB,CACxG,EAAE,KAAK,CAAAlB,MAAQA,EAAK,MAAM;AAC3B,QAAIoF,GAAOC,IAAQ;AACnB,UAAM/E,IAAU,IAAIgF,EAAA;AACpB,YAAOF,IAAQC,EAAM,KAAKF,CAAI,OAAO,QAAM;AAC1C,UAAII,IAAM,iBAAiB,KAAK,mBAAmBH,EAAM,CAAC,CAAC,CAAC,IAAI,CAAC;AAGjE,UAFGG,MAAKA,IAAM,mBAAmBA,CAAG,IACjCA,KAAKjF,EAAQ,IAAIiF,CAAG,GACpBjF,EAAQ,SAASY,EAAK,UAAU,GAAI;AAAA,IACxC;AACA,WAAOZ;AAAA,EACR;AACD;"}
|