@adaline/google 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +19 -0
- package/README.md +9 -0
- package/dist/index.d.mts +3022 -0
- package/dist/index.d.ts +3022 -0
- package/dist/index.js +135 -0
- package/dist/index.js.map +1 -0
- package/dist/index.mjs +19 -0
- package/dist/index.mjs.map +1 -0
- package/package.json +67 -0
package/dist/index.mjs
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
import { z as z$1 } from 'zod';
|
|
2
|
+
import { RangeConfigItem, CHAT_CONFIG, SelectStringConfigItem, ObjectSchemaConfigItem, ChatModelSchema, EmbeddingModelSchema, MultiStringConfigItem, EMBEDDING_CONFIG, ProviderError, urlWithoutTrailingSlash, InvalidModelRequestError, removeUndefinedEntries, InvalidMessagesError, InvalidConfigError, InvalidToolsError, ModelResponseError, InvalidEmbeddingRequestsError } from '@adaline/provider';
|
|
3
|
+
import { SystemRoleLiteral, UserRoleLiteral, AssistantRoleLiteral, ToolRoleLiteral, TextModalityLiteral, ImageModalityLiteral, ToolCallModalityLiteral, ToolResponseModalityLiteral, EmbeddingTextModalityLiteral, Config, Base64ImageContentTypeLiteral, Message, Tool, createTextContent, createToolCallContent, EmbeddingRequests, FloatEmbeddingLiteral, createPartialTextMessage, createPartialToolCallMessage } from '@adaline/types';
|
|
4
|
+
|
|
5
|
+
var Zo=Object.defineProperty;var po=Object.getOwnPropertySymbols;var et=Object.prototype.hasOwnProperty,ot=Object.prototype.propertyIsEnumerable;var tt=(i,e)=>(e=Symbol[i])?e:Symbol.for("Symbol."+i);var co=(i,e,t)=>e in i?Zo(i,e,{enumerable:!0,configurable:!0,writable:!0,value:t}):i[e]=t,G=(i,e)=>{for(var t in e||(e={}))et.call(e,t)&&co(i,t,e[t]);if(po)for(var t of po(e))ot.call(e,t)&&co(i,t,e[t]);return i};var P=(i,e,t)=>new Promise((o,s)=>{var m=l=>{try{r(t.next(l));}catch(c){s(c);}},n=l=>{try{r(t.throw(l));}catch(c){s(c);}},r=l=>l.done?o(l.value):Promise.resolve(l.value).then(m,n);r((t=t.apply(i,e)).next());}),nt=function(i,e){this[0]=i,this[1]=e;},ho=(i,e,t)=>{var o=(n,r,l,c)=>{try{var p=t[n](r),T=(r=p.value)instanceof nt,b=p.done;Promise.resolve(T?r[0]:r).then(f=>T?o(n==="return"?n:"next",r[1]?{done:f.done,value:f.value}:f,l,c):l({value:f,done:b})).catch(f=>o("throw",f,l,c));}catch(f){c(f);}},s=n=>m[n]=r=>new Promise((l,c)=>o(n,r,l,c)),m={};return t=t.apply(i,e),m[tt("asyncIterator")]=()=>m,s("next"),s("throw"),s("return"),m};var L=(i,e)=>RangeConfigItem({param:"temperature",title:CHAT_CONFIG.TEMPERATURE.title,description:CHAT_CONFIG.TEMPERATURE.description,min:0,max:i,step:.01,default:e}),v=i=>RangeConfigItem({param:"maxOutputTokens",title:CHAT_CONFIG.MAX_TOKENS.title,description:CHAT_CONFIG.MAX_TOKENS.description,min:0,max:i,step:1,default:0}),I=i=>MultiStringConfigItem({param:"stopSequences",title:CHAT_CONFIG.STOP(i).title,description:CHAT_CONFIG.STOP(i).description,max:i}),$=i=>RangeConfigItem({param:"topP",title:CHAT_CONFIG.TOP_P.title,description:CHAT_CONFIG.TOP_P.description,min:0,max:1,step:.01,default:i}),Oe=i=>RangeConfigItem({param:"topK",title:CHAT_CONFIG.TOP_K.title,description:CHAT_CONFIG.TOP_K.description,min:1,max:40,step:1,default:i}),Pe=RangeConfigItem({param:"frequencyPenalty",title:CHAT_CONFIG.FREQUENCY_PENALTY.title,description:CHAT_CONFIG.FREQUENCY_PENALTY.description,min:-2,max:2,step:.01,default:0}),Re=RangeConfigItem({param:"presencePenalty",title:CHAT_CONFIG.PRESENCE_PENALTY.title,description:CHAT_CONFIG.PRESENCE_PENALTY.description,min:-2,max:2,step:.01,default:0}),xe=RangeConfigItem({param:"seed",title:CHAT_CONFIG.SEED.title,description:CHAT_CONFIG.SEED.description,min:0,max:1e6,step:1,default:0}),F=SelectStringConfigItem({param:"toolChoice",title:"Tool choice",description:"Controls which (if any) tool is called by the model. 'none' means the model will not call a function. 'auto' means the model can pick between generating a message or calling a tool.",default:"auto",choices:["auto","any","none"]}),q=ObjectSchemaConfigItem({param:"safetySettings",title:"Safety settings",description:"The safety rating contains the category of harm and the harm probability level in that category for a piece of content.",objectSchema:z$1.array(z$1.object({threshold:z$1.enum(["HARM_BLOCK_THRESHOLD_UNSPECIFIED","BLOCK_LOW_AND_ABOVE","BLOCK_MEDIUM_AND_ABOVE","BLOCK_ONLY_HIGH","BLOCK_NONE","OFF"]),category:z$1.enum(["HARM_CATEGORY_UNSPECIFIED","HARM_CATEGORY_HARASSMENT","HARM_CATEGORY_HATE_SPEECH","HARM_CATEGORY_SEXUALLY_EXPLICIT","HARM_CATEGORY_DANGEROUS_CONTENT","HARM_CATEGORY_CIVIC_INTEGRITY"])}))});var fo=(i,e,t,o,s)=>z$1.object({temperature:L(i,e).schema,maxTokens:v(t).schema,stop:I(o).schema,topP:$(s).schema,toolChoice:F.schema,safetySettings:q.schema}),go=(i,e,t,o,s)=>({temperature:L(i,e).def,maxTokens:v(t).def,stop:I(o).def,topP:$(s).def,toolChoice:F.def,safetySettings:q.def});var uo=(i,e,t,o,s,m)=>z$1.object({temperature:L(i,e).schema,maxTokens:v(t).schema,stop:I(o).schema,topP:$(s).schema,topK:Oe(m).schema,frequencyPenalty:Pe.schema,presencePenalty:Re.schema,seed:xe.schema.transform(n=>n===0?void 0:n),toolChoice:F.schema,safetySettings:q.schema}),yo=(i,e,t,o,s,m)=>({temperature:L(i,e).def,maxTokens:v(t).def,stop:I(o).def,topP:$(s).def,topK:Oe(m).def,frequencyPenalty:Pe.def,presencePenalty:Re.def,seed:xe.def,toolChoice:F.def,safetySettings:q.def});var we=i=>RangeConfigItem({param:"outputDimensionality",title:EMBEDDING_CONFIG.DIMENSIONS.title,description:EMBEDDING_CONFIG.DIMENSIONS.description,min:1,max:i,step:1,default:i});var To=i=>z$1.object({dimensions:we(i).schema}),Go=i=>({dimensions:we(i).def});var h={base:(i,e,t,o,s)=>({def:go(i,e,t,o,s),schema:fo(i,e,t,o,s)}),c1:(i,e,t,o,s,m)=>({def:yo(i,e,t,o,s,m),schema:uo(i,e,t,o,s,m)})},j={base:i=>({def:Go(i),schema:To(i)})};var u=z$1.enum([SystemRoleLiteral,UserRoleLiteral,AssistantRoleLiteral,ToolRoleLiteral]),gt="model",ut="function",y={system:UserRoleLiteral,user:UserRoleLiteral,assistant:gt,tool:ut};var _=[TextModalityLiteral,ImageModalityLiteral,ToolCallModalityLiteral,ToolResponseModalityLiteral],E=z$1.enum([TextModalityLiteral,ImageModalityLiteral,ToolCallModalityLiteral,ToolResponseModalityLiteral]),Nn=[TextModalityLiteral],An=z$1.enum([TextModalityLiteral]),bo=[TextModalityLiteral,ImageModalityLiteral],Mo=z$1.enum([TextModalityLiteral,ImageModalityLiteral]),z=[TextModalityLiteral,ToolCallModalityLiteral,ToolResponseModalityLiteral],D=z$1.enum([TextModalityLiteral,ToolCallModalityLiteral,ToolResponseModalityLiteral]);var yt=z$1.object({text:z$1.string()}),Ct=z$1.object({functionCall:z$1.object({name:z$1.string(),args:z$1.record(z$1.any())})}),_o=z$1.object({candidates:z$1.array(z$1.object({content:z$1.object({role:z$1.string(),parts:z$1.array(z$1.union([yt,Ct]))}).optional(),finishReason:z$1.string(),index:z$1.number().optional(),safetyRatings:z$1.optional(z$1.array(z$1.object({category:z$1.string(),probability:z$1.string(),blocked:z$1.boolean().optional()})))})),promptFeedback:z$1.optional(z$1.object({safetyRatings:z$1.optional(z$1.array(z$1.object({category:z$1.string(),probability:z$1.string()})))})),usageMetadata:z$1.object({promptTokenCount:z$1.number(),cachedContentTokenCount:z$1.number().optional(),candidatesTokenCount:z$1.number().optional(),totalTokenCount:z$1.number()}).optional()}),Tt=z$1.object({text:z$1.string()}),Gt=z$1.object({functionCall:z$1.object({name:z$1.string(),args:z$1.record(z$1.any())})}),Eo=z$1.object({candidates:z$1.array(z$1.object({content:z$1.object({role:z$1.string(),parts:z$1.array(z$1.union([Tt,Gt]))}).optional(),finishReason:z$1.string().optional(),index:z$1.number().optional(),safetyRatings:z$1.optional(z$1.array(z$1.object({category:z$1.string(),probability:z$1.string(),blocked:z$1.boolean().optional()})))})),promptFeedback:z$1.optional(z$1.object({safetyRatings:z$1.optional(z$1.array(z$1.object({category:z$1.string(),probability:z$1.string()})))})),usageMetadata:z$1.object({promptTokenCount:z$1.number(),cachedContentTokenCount:z$1.number().optional(),candidatesTokenCount:z$1.number(),totalTokenCount:z$1.number()}).optional()});var Ro=z$1.object({text:z$1.string().min(1)}),bt=z$1.object({inline_data:z$1.object({mime_type:z$1.string().min(1),data:z$1.string().base64()})}),Mt=z$1.object({function_call:z$1.object({name:z$1.string().min(1),args:z$1.record(z$1.string().min(1))})}),_t=z$1.object({function_response:z$1.object({name:z$1.string().min(1),response:z$1.record(z$1.string().min(1))})}),Et=z$1.object({role:z$1.enum(["user","model","function"]),parts:z$1.array(z$1.union([Ro,bt,Mt,_t]))}),So=z$1.object({parts:z$1.array(Ro)}),St=z$1.object({name:z$1.string().min(1),description:z$1.string().min(1),parameters:z$1.any()}),Oo=z$1.object({function_calling_config:z$1.object({mode:z$1.enum(["ANY","AUTO","NONE"]),allowed_function_names:z$1.array(z$1.string()).optional()})}),Po=z$1.object({stopSequences:z$1.array(z$1.string()).optional(),maxOutputTokens:z$1.number().optional(),temperature:z$1.number().optional(),topP:z$1.number().optional(),topK:z$1.number().optional(),presencePenalty:z$1.number().optional(),frequencyPenalty:z$1.number().optional(),seed:z$1.number().optional()}),xo=z$1.object({model:z$1.string().min(1).optional(),contents:z$1.array(Et),systemInstruction:So.optional(),system_instruction:So.optional(),generationConfig:Po.optional(),generation_config:Po.optional(),tools:z$1.object({function_declarations:z$1.array(St)}).optional(),toolConfig:Oo.optional(),tool_config:Oo.optional()});var Ot="google",K=class{constructor(){this.version="v1";this.name=Ot;this.chatModelFactories={[Be]:{model:pe,modelOptions:$o,modelSchema:Ne},[Ae]:{model:ce,modelOptions:Fo,modelSchema:He},[Ve]:{model:he,modelOptions:qo,modelSchema:Ke},[De]:{model:de,modelOptions:Io,modelSchema:Ue},[We]:{model:ge,modelOptions:zo,modelSchema:Xe},[Qe]:{model:ue,modelOptions:Do,modelSchema:Ze},[eo]:{model:ye,modelOptions:Uo,modelSchema:oo},[Ye]:{model:fe,modelOptions:jo,modelSchema:Je},[Le]:{model:ae,modelOptions:wo,modelSchema:ve},[je]:{model:me,modelOptions:vo,modelSchema:ze},[Fe]:{model:le,modelOptions:Lo,modelSchema:qe},[Ie]:{model:re,modelOptions:ko,modelSchema:$e}};this.embeddingModelFactories={[to]:{model:Ce,modelOptions:Bo,modelSchema:no},[so]:{model:Te,modelOptions:No,modelSchema:io}};}chatModelLiterals(){return Object.keys(this.chatModelFactories)}chatModel(e,t){if(!(e in this.chatModelFactories))throw new ProviderError({info:`Google chat model: ${e} not found`,cause:new Error(`Google chat model: ${e} not found, available chat models:
|
|
6
|
+
[${this.chatModelLiterals().join(", ")}]`)});let o=this.chatModelFactories[e].model,s=G({},t),m=this.chatModelFactories[e].modelOptions.parse(s);return new o(m)}chatModelSchema(e){if(!(e in this.chatModelFactories))throw new ProviderError({info:`Google chat model: ${e} not found`,cause:new Error(`Google chat model: ${e} not found, available chat models:
|
|
7
|
+
[${this.chatModelLiterals().join(", ")}]`)});return this.chatModelFactories[e].modelSchema}chatModelSchemas(){return Object.keys(this.chatModelFactories).reduce((e,t)=>(e[t]=this.chatModelFactories[t].modelSchema,e),{})}embeddingModelLiterals(){return Object.keys(this.embeddingModelFactories)}embeddingModel(e,t){if(!(e in this.embeddingModelFactories))throw new ProviderError({info:`Google embedding model: ${e} not found`,cause:new Error(`Google embedding model: ${e} not found, available embedding models:
|
|
8
|
+
[${this.embeddingModelLiterals().join(", ")}]`)});let o=this.embeddingModelFactories[e].model,s=G({},t),m=this.embeddingModelFactories[e].modelOptions.parse(s);return new o(m)}embeddingModelSchema(e){if(!(e in this.embeddingModelFactories))throw new ProviderError({info:`Google embedding model: ${e} not found`,cause:new Error(`Google embedding model: ${e} not found, available embedding models:
|
|
9
|
+
[${this.embeddingModelLiterals().join(", ")}]`)});return this.embeddingModelFactories[e].modelSchema}embeddingModelSchemas(){return Object.keys(this.embeddingModelFactories).reduce((e,t)=>(e[t]=this.embeddingModelFactories[t].modelSchema,e),{})}};K.baseUrl="https://generativelanguage.googleapis.com/v1beta";var g=z$1.object({apiKey:z$1.string(),baseUrl:z$1.string().url().optional(),completeChatUrl:z$1.string().url().optional(),streamChatUrl:z$1.string().url().optional()}),C=class{constructor(e,t){this.version="v1";var s;let o=g.parse(t);this.modelSchema=e,this.apiKey=o.apiKey,this.baseUrl=urlWithoutTrailingSlash((s=o.baseUrl)!=null?s:K.baseUrl),this.completeChatUrl=urlWithoutTrailingSlash(o.completeChatUrl||`${this.baseUrl}/models/${this.modelSchema.name}:generateContent?key=${this.apiKey}`),this.streamChatUrl=urlWithoutTrailingSlash(o.streamChatUrl||`${this.baseUrl}/models/${this.modelSchema.name}:streamGenerateContent?key=${this.apiKey}`);}getDefaultBaseUrl(){return this.baseUrl}getDefaultHeaders(){return {"Content-Type":"application/json",source:"adaline.ai"}}getDefaultParams(){return {}}getRetryDelay(e){return {shouldRetry:!1,delayMs:0}}getTokenCount(e){return e.reduce((t,o)=>t+o.content.map(s=>s.modality==="text"?s.value:"").join(" ").length,0)}transformModelRequest(e){let t=xo.safeParse(e);if(!t.success)throw new InvalidModelRequestError({info:"Invalid model request",cause:t.error});let o=t.data,s=o.model;if(o.system_instruction&&o.systemInstruction)throw new InvalidModelRequestError({info:`Invalid model request for model : '${this.modelSchema.name}'`,cause:new Error("'system_instruction' and 'systemInstruction' are not allowed at the same time")});if(o.generation_config&&o.generationConfig)throw new InvalidModelRequestError({info:`Invalid model request for model : '${this.modelSchema.name}'`,cause:new Error("'generation_config' and 'generationConfig' are not allowed at the same time")});if(o.tool_config&&o.toolConfig)throw new InvalidModelRequestError({info:`Invalid model request for model : '${this.modelSchema.name}'`,cause:new Error("'tool_config' and 'toolConfig' are not allowed at the same time")});let m=o.system_instruction||o.systemInstruction,n=o.generation_config||o.generationConfig,r=o.tool_config||o.toolConfig;if(r&&(!o.tools||o.tools.function_declarations.length===0))throw new InvalidModelRequestError({info:`Invalid model request for model : '${this.modelSchema.name}'`,cause:new Error("'tools' are required when 'tool_choice' is specified")});let l={};r&&(r.function_calling_config.mode==="ANY"&&r.function_calling_config.allowed_function_names&&r.function_calling_config.allowed_function_names.length===1?l.toolChoice=r.function_calling_config.allowed_function_names[0]:l.toolChoice=r.function_calling_config.mode.toLowerCase()),l.seed=n==null?void 0:n.seed,l.maxTokens=n==null?void 0:n.maxOutputTokens,l.temperature=n==null?void 0:n.temperature,l.topP=n==null?void 0:n.topP,l.presencePenalty=n==null?void 0:n.presencePenalty,l.frequencyPenalty=n==null?void 0:n.frequencyPenalty,l.stop=n==null?void 0:n.stopSequences;let c=Config().parse(removeUndefinedEntries(l)),p=[];m&&m.parts.forEach(f=>{p.push({role:SystemRoleLiteral,content:[{modality:TextModalityLiteral,value:f.text}]});}),o.contents.forEach(f=>{let A=f.role;switch(A){case"user":{let H=f.parts.map(O=>"text"in O?{modality:TextModalityLiteral,value:O.text}:{modality:ImageModalityLiteral,detail:"auto",value:{type:Base64ImageContentTypeLiteral,base64:O.inline_data.data,media_type:O.inline_data.mime_type.split("/")[1]}});p.push({role:A,content:H});}break;case"model":{let H=f.parts.map((O,V)=>"text"in O?{modality:TextModalityLiteral,value:O.text}:{modality:ToolCallModalityLiteral,id:V.toString(),index:V,name:O.function_call.name,arguments:JSON.stringify(O.function_call.args)});p.push({role:AssistantRoleLiteral,content:H});}break;case"function":{let H=f.parts.map((O,V)=>({modality:ToolResponseModalityLiteral,id:V.toString(),index:V,name:O.function_response.name,data:JSON.stringify(O.function_response.response)}));p.push({role:ToolRoleLiteral,content:H});}break;default:throw new InvalidMessagesError({info:`Invalid message 'role' for model : ${this.modelSchema.name}`,cause:new Error(`role : '${f.role}' is not supported for model : ${this.modelSchema.name}`)})}});let b=[];return o.tools&&o.tools.function_declarations.forEach(f=>{b.push({type:"function",definition:{schema:{name:f.name,description:f.description,parameters:f.parameters}}});}),{modelName:s,config:c,messages:p,tools:b.length>0?b:void 0}}transformConfig(e,t,o){let s=e.toolChoice;delete e.toolChoice;let m=this.modelSchema.config.schema.safeParse(e);if(!m.success)throw new InvalidConfigError({info:`Invalid config for model : '${this.modelSchema.name}'`,cause:m.error});let n=m.data;Object.keys(n).forEach(p=>{if(!(p in this.modelSchema.config.def))throw new InvalidConfigError({info:`Invalid config for model : '${this.modelSchema.name}'`,cause:new Error(`Invalid config key : '${p}',
|
|
10
|
+
available keys : [${Object.keys(this.modelSchema.config.def).join(", ")}]`)})});let r=Object.keys(n).reduce((p,T)=>{let b=this.modelSchema.config.def[T],f=b.param,A=n[T];return f==="maxOutputTokens"&&b.type==="range"&&A===0?p[f]=b.max:p[f]=A,p},{}),l=r.safetySettings;delete r.safetySettings;let c;if(s!==void 0){let p=s;if(!o||o&&o.length===0)throw new InvalidConfigError({info:`Invalid config for model : '${this.modelSchema.name}'`,cause:new Error("'tools' are required when 'toolChoice' is specified")});if(o&&o.length>0){let T=this.modelSchema.config.def.toolChoice;if(T.choices.includes(p))p==="any"?c={function_calling_config:{mode:"ANY",allowed_function_names:o.map(b=>b.definition.schema.name)}}:c={function_calling_config:{mode:p.toUpperCase()}};else if(o.map(b=>b.definition.schema.name).includes(p))c={function_calling_config:{mode:"ANY",allowed_function_names:[p]}};else throw new InvalidConfigError({info:`Invalid config for model : '${this.modelSchema.name}'`,cause:new Error(`toolChoice : '${p}' is not part of provided 'tools' names or
|
|
11
|
+
one of [${T.choices.join(", ")}]`)})}}return G(G({generation_config:r},c?{tool_config:c}:{}),l?{safety_settings:l}:{})}transformMessages(e){if(!e||e&&e.length===0)return {messages:[]};let t=e.map(n=>{let r=Message().safeParse(n);if(!r.success)throw new InvalidMessagesError({info:"Invalid messages",cause:r.error});return r.data});t.forEach(n=>{n.content.forEach(r=>{if(!this.modelSchema.modalities.includes(r.modality))throw new InvalidMessagesError({info:`Invalid message content for model : '${this.modelSchema.name}'`,cause:new Error(`model : '${this.modelSchema.name}' does not support modality : '${r.modality}',
|
|
12
|
+
available modalities : [${this.modelSchema.modalities.join(", ")}]`)})});}),t.forEach(n=>{if(!Object.keys(this.modelSchema.roles).includes(n.role))throw new InvalidMessagesError({info:`Invalid message content for model : '${this.modelSchema.name}'`,cause:new Error(`model : '${this.modelSchema.name}' does not support role : '${n.role}',
|
|
13
|
+
available roles : [${Object.keys(this.modelSchema.roles).join(", ")}]`)})});let o={parts:[]},s=[];if(t.forEach(n=>{switch(n.role){case SystemRoleLiteral:n.content.forEach(r=>{if(r.modality===TextModalityLiteral)o.parts.push({text:r.value});else throw new InvalidMessagesError({info:`Invalid message 'role' and 'modality' combination for model : ${this.modelSchema.name}`,cause:new Error(`role : '${n.role}' cannot have content with modality : '${r.modality}'`)})});break;case AssistantRoleLiteral:{let r=[];n.content.forEach(l=>{if(l.modality===TextModalityLiteral)r.push({text:l.value});else if(l.modality===ToolCallModalityLiteral)r.push({function_call:{name:l.name,args:JSON.parse(l.arguments)}});else throw new InvalidMessagesError({info:`Invalid message 'role' and 'modality' combination for model : ${this.modelSchema.name}`,cause:new Error(`role : '${n.role}' cannot have content with modality : '${l.modality}'`)})}),s.push({role:this.modelSchema.roles[n.role],parts:r});}break;case UserRoleLiteral:{let r=[];n.content.forEach(l=>{if(l.modality===TextModalityLiteral)r.push({text:l.value});else if(l.modality===ImageModalityLiteral){if(l.value.type==="base64")r.push({inline_data:{mime_type:l.value.media_type,data:l.value.base64}});else if(l.value.type==="url")throw new InvalidMessagesError({info:`Invalid message 'modality' for model : ${this.modelSchema.name}`,cause:new Error(`model: '${this.modelSchema.name}' does not support image content type: '${l.value.type}'`)})}else throw new InvalidMessagesError({info:`Invalid message 'role' and 'modality' combination for model : ${this.modelSchema.name}`,cause:new Error(`role : '${n.role}' cannot have content with modality : '${l.modality}'`)})}),s.push({role:this.modelSchema.roles[n.role],parts:r});}break;case ToolRoleLiteral:{let r=[];n.content.forEach(l=>{if(l.modality===ToolResponseModalityLiteral)r.push({function_response:{name:l.name,response:JSON.parse(l.data)}});else throw new InvalidMessagesError({info:`Invalid message 'role' and 'modality' combination for model : ${this.modelSchema.name}`,cause:new Error(`role : '${n.role}' cannot have content with modality : '${l.modality}'`)})}),s.push({role:this.modelSchema.roles[n.role],parts:r});}break;default:throw new InvalidMessagesError({info:`Invalid message 'role' for model : ${this.modelSchema.name}`,cause:new Error(`role : '${n.role}' is not supported,
|
|
14
|
+
available roles : [${Object.keys(this.modelSchema.roles).join(", ")}]`)})}}),s[0].role!==this.modelSchema.roles[UserRoleLiteral])throw new InvalidMessagesError({info:`Invalid message 'role' for model : ${this.modelSchema.name}`,cause:new Error(`model : '${this.modelSchema.name}' requires first message to be from user`)});let m=n=>n===this.modelSchema.roles[UserRoleLiteral]||n===this.modelSchema.roles[ToolRoleLiteral]?[this.modelSchema.roles[AssistantRoleLiteral]]:[this.modelSchema.roles[UserRoleLiteral],this.modelSchema.roles[ToolRoleLiteral]];for(let n=1;n<s.length;n++)if(!m(s[n-1].role).includes(s[n].role))throw new InvalidMessagesError({info:`Invalid message format for model : ${this.modelSchema.name}`,cause:new Error(`model : '${this.modelSchema.name}' cannot have message with role : '${s[n].role}' after message with role : '${s[n-1].role}'`)});if(s[s.length-1].role!==this.modelSchema.roles[UserRoleLiteral])throw new InvalidMessagesError({info:`Invalid message format for model : ${this.modelSchema.name}`,cause:new Error(`model : '${this.modelSchema.name}' requires last message to be from user`)});return G({contents:s},o.parts.length>0?{system_instruction:o}:{})}transformTools(e){if(!this.modelSchema.modalities.includes(ToolCallModalityLiteral))throw new InvalidToolsError({info:`Invalid tool 'modality' for model : ${this.modelSchema.name}`,cause:new Error(`model : '${this.modelSchema.name}' does not support tool modality : '${ToolCallModalityLiteral}'`)});return !e||e&&e.length===0?{tools:[]}:{tools:e.map(s=>{let m=Tool().safeParse(s);if(!m.success)throw new InvalidToolsError({info:"Invalid tools",cause:m.error});return m.data}).map(s=>({function_declarations:[{name:s.definition.schema.name,description:s.definition.schema.description,parameters:s.definition.schema.parameters}]}))}}getCompleteChatUrl(e,t,o){return P(this,null,function*(){return new Promise(s=>{s(this.completeChatUrl);})})}getCompleteChatHeaders(e,t,o){return P(this,null,function*(){return new Promise(s=>{s(this.getDefaultHeaders());})})}getCompleteChatData(e,t,o){return P(this,null,function*(){let s=this.transformConfig(e,t,o),m=this.transformMessages(t);if(m.messages&&m.messages.length===0)throw new InvalidMessagesError({info:"Messages are required",cause:new Error("Messages are required")});let n=o?this.transformTools(o):{};return new Promise(r=>{r(G(G(G(G({},this.getDefaultParams()),s),m),n));})})}transformCompleteChatResponse(e){let t=_o.safeParse(e);if(t.success){if(t.data.candidates.length===0)throw new ModelResponseError({info:"Invalid response from model",cause:new Error(`No choices in response : ${JSON.stringify(t.data)}`)});let o=t.data,s=[],m,n=o.candidates[0].content;if(n){let c=n.parts.map((p,T)=>{if("text"in p&&p.text!==void 0)return createTextContent(p.text);if("functionCall"in p&&p.functionCall!==void 0)return createToolCallContent(T,`${p.functionCall.name}_${T}`,p.functionCall.name,JSON.stringify(p.functionCall.args))});return s.push({role:AssistantRoleLiteral,content:c}),o.usageMetadata&&(m={promptTokens:o.usageMetadata.promptTokenCount,totalTokens:o.usageMetadata.totalTokenCount,completionTokens:o.usageMetadata.candidatesTokenCount||0}),{messages:s,usage:m,logProbs:void 0}}let r=o.candidates[0].safetyRatings;if(r&&r.length>0&&r.forEach(c=>{if(c.blocked)throw new ModelResponseError({info:`Blocked content for category: ${c.category} with probability: ${c.probability}`,cause:new Error(`Blocked content for category: ${c.category} with probability: ${c.probability}`)})}),o.candidates[0].finishReason==="SAFETY")throw new ModelResponseError({info:"Blocked content, model response finished with safety reason",cause:new Error("Blocked content, model response finished with safety reason")})}throw new ModelResponseError({info:"Invalid response from model",cause:t.error})}getStreamChatUrl(e,t,o){return P(this,null,function*(){return new Promise(s=>{s(this.streamChatUrl);})})}getStreamChatHeaders(e,t,o){return P(this,null,function*(){return new Promise(s=>{s(this.getDefaultHeaders());})})}getStreamChatData(e,t,o){return P(this,null,function*(){let s=this.transformConfig(e,t,o),m=this.transformMessages(t);if(m.messages&&m.messages.length===0)throw new InvalidMessagesError({info:"Messages are required",cause:new Error("Messages are required")});let n=o?this.transformTools(o):{};return new Promise(r=>{r(G(G(G(G({},this.getDefaultParams()),s),m),n));})})}transformStreamChatResponseChunk(e,t){return ho(this,null,function*(){let o=(t+e).split(",\r").filter(s=>s.trim()!=="");for(let s of o){let m=s;if(m=m.replace(/\n/g,""),m.startsWith("["))m=m.slice(1);else if(m.endsWith("]")){if(m==="]")return;m=m.slice(0,-1);}let n;try{n=JSON.parse(m);}catch(l){if(l instanceof SyntaxError){t=m;continue}else throw l}t="";let r=Eo.safeParse(n);if(r.success){let l={partialMessages:[]},c=r.data;if(c.candidates.length>0){let p=c.candidates[0].content;p&&"parts"in p&&p.parts.length>0&&p.parts.forEach((T,b)=>{if("text"in T&&T.text!==void 0&&l.partialMessages.push(createPartialTextMessage(AssistantRoleLiteral,T.text)),"functionCall"in T&&T.functionCall!==void 0){let f=T.functionCall;l.partialMessages.push(createPartialToolCallMessage(AssistantRoleLiteral,b,`${f.name}_${b}`,f.name,JSON.stringify(f.args)));}});}c.usageMetadata&&(l.usage={promptTokens:c.usageMetadata.promptTokenCount,completionTokens:c.usageMetadata.candidatesTokenCount,totalTokens:c.usageMetadata.totalTokenCount}),yield {partialResponse:l,buffer:t};}else throw new ModelResponseError({info:"Invalid response from model",cause:r.error})}yield {partialResponse:{partialMessages:[]},buffer:t};})}};var R=class extends C{transformMessages(e){let t=super.transformMessages(e);if(t.systemInstruction){let o={role:this.modelSchema.roles[UserRoleLiteral],parts:t.systemInstruction.parts};t.contents.unshift(o),delete t.systemInstruction;}return t}};var Le="gemini-1.0-pro",jt="Google's predecessor to Gemini 1.5 Pro, a model for scaling across a wide range of tasks Optimized for natural language tasks, multi-turn text and code chat, and code generation",ve=ChatModelSchema(u,D).parse({name:Le,description:jt,maxInputTokens:30720,maxOutputTokens:2048,roles:y,modalities:z,config:{def:h.base(1,.9,2048,4,1).def,schema:h.base(1,.9,2048,4,1).schema}}),wo=g,ae=class extends R{constructor(e){super(ve,e);}};var Ie="gemini-1.0-pro-001",Dt="Google's predecessor to Gemini 1.5 Pro, a model for scaling across a wide range of tasks Optimized for natural language tasks, multi-turn text and code chat, and code generation",$e=ChatModelSchema(u,D).parse({name:Ie,description:Dt,maxInputTokens:30720,maxOutputTokens:2048,roles:y,modalities:z,config:{def:h.base(1,.9,2048,4,1).def,schema:h.base(1,.9,2048,4,1).schema}}),ko=g,re=class extends R{constructor(e){super($e,e);}};var Fe="gemini-1.0-pro-latest",Bt="Google's latest multimodal model with great performance for high-frequency tasks. Optimized for natural language tasks, multi-turn text and code chat, and code generation",qe=ChatModelSchema(u,D).parse({name:Fe,description:Bt,maxInputTokens:30720,maxOutputTokens:2048,roles:y,modalities:z,config:{def:h.base(1,.9,2048,4,1).def,schema:h.base(1,.9,2048,4,1).schema}}),Lo=g,le=class extends R{constructor(e){super(qe,e);}};var je="gemini-1.0-pro-vision",At="Google's predecessor to Gemini 1.5 Pro, an image understanding model to handle a broad range of applications",ze=ChatModelSchema(u,Mo).parse({name:je,description:At,maxInputTokens:12288,maxOutputTokens:4096,roles:y,modalities:bo,config:{def:h.c1(1,.4,4096,4,1,32).def,schema:h.c1(1,.4,4096,4,1,32).schema}}),vo=g,me=class extends R{constructor(e){super(ze,e);}};var De="gemini-1.5-flash",Vt="Google's fastest, most cost-efficient multimodal model with great performance for high-frequency tasks. Optimized for fast and versatile performance across a diverse variety of tasks",Ue=ChatModelSchema(u,E).parse({name:De,description:Vt,maxInputTokens:1e6,maxOutputTokens:8192,roles:y,modalities:_,config:{def:h.c1(2,1,8192,4,.95,64).def,schema:h.c1(2,1,8192,4,.95,64).schema}}),Io=g,de=class extends C{constructor(e){super(Ue,e);}};var Be="gemini-1.5-flash-001",Yt="Google's fastest, most cost-efficient multimodal model with great performance for high-frequency tasks. Optimized for fast and versatile performance across a diverse variety of tasks",Ne=ChatModelSchema(u,E).parse({name:Be,description:Yt,maxInputTokens:1e6,maxOutputTokens:8192,roles:y,modalities:_,config:{def:h.c1(2,1,8192,4,.95,64).def,schema:h.c1(2,1,8192,4,.95,64).schema}}),$o=g,pe=class extends C{constructor(e){super(Ne,e);}};var Ae="gemini-1.5-flash-002",Wt="Google's fastest, most cost-efficient multimodal model with great performance for high-frequency tasks. Optimized for fast and versatile performance across a diverse variety of tasks",He=ChatModelSchema(u,E).parse({name:Ae,description:Wt,maxInputTokens:1e6,maxOutputTokens:8192,roles:y,modalities:_,config:{def:h.c1(2,1,8192,4,.95,40).def,schema:h.c1(2,1,8192,4,.95,40).schema}}),Fo=g,ce=class extends C{constructor(e){super(He,e);}};var Ve="gemini-1.5-flash-latest",Qt="Google's latest multimodal model with great performance for high-frequency tasks. Optimized for fast and versatile performance across a diverse variety of tasks",Ke=ChatModelSchema(u,E).parse({name:Ve,description:Qt,maxInputTokens:1e6,maxOutputTokens:8192,roles:y,modalities:_,config:{def:h.c1(2,1,8192,4,.95,64).def,schema:h.c1(2,1,8192,4,.95,64).schema}}),qo=g,he=class extends C{constructor(e){super(Ke,e);}};var Ye="gemini-1.5-pro",en="Google's best performing multimodal model with features for a wide variety of reasoning tasks. Optimized for complex reasoning tasks requiring more intelligence",Je=ChatModelSchema(u,E).parse({name:Ye,description:en,maxInputTokens:2e6,maxOutputTokens:8192,roles:y,modalities:_,config:{def:h.c1(2,1,8192,4,.95,64).def,schema:h.c1(2,1,8192,4,.95,64).schema}}),jo=g,fe=class extends C{constructor(e){super(Je,e);}};var We="gemini-1.5-pro-001",tn="Google's best performing multimodal model with features for a wide variety of reasoning tasks. Optimized for complex reasoning tasks requiring more intelligence",Xe=ChatModelSchema(u,E).parse({name:We,description:tn,maxInputTokens:2e6,maxOutputTokens:8192,roles:y,modalities:_,config:{def:h.c1(2,1,8192,4,.95,64).def,schema:h.c1(2,1,8192,4,.95,64).schema}}),zo=g,ge=class extends C{constructor(e){super(Xe,e);}};var Qe="gemini-1.5-pro-002",sn="Google's best performing multimodal model with features for a wide variety of reasoning tasks. Optimized for complex reasoning tasks requiring more intelligence",Ze=ChatModelSchema(u,E).parse({name:Qe,description:sn,maxInputTokens:2e6,maxOutputTokens:8192,roles:y,modalities:_,config:{def:h.c1(2,1,8192,4,.95,40).def,schema:h.c1(2,1,8192,4,.95,40).schema}}),Do=g,ue=class extends C{constructor(e){super(Ze,e);}};var eo="gemini-1.5-pro-latest",rn="Google's best performing multimodal model with features for a wide variety of reasoning tasks. Optimized for complex reasoning tasks requiring more intelligence",oo=ChatModelSchema(u,E).parse({name:eo,description:rn,maxInputTokens:2e6,maxOutputTokens:8192,roles:y,modalities:_,config:{def:h.c1(2,1,8192,4,.95,64).def,schema:h.c1(2,1,8192,4,.95,64).schema}}),Uo=g,ye=class extends C{constructor(e){super(oo,e);}};var _e=[EmbeddingTextModalityLiteral],Ee=z$1.enum([EmbeddingTextModalityLiteral]);var Jo=z$1.object({embeddings:z$1.array(z$1.object({values:z$1.array(z$1.number())}))});var mn=z$1.object({model:z$1.string().min(1),content:z$1.object({parts:z$1.array(z$1.object({text:z$1.string().min(1)})).min(1)})}),Wo=z$1.object({model:z$1.string().min(1).optional(),requests:z$1.array(mn).min(1),outputDimensionality:z$1.number().int().min(1).optional()});var Z=z$1.object({apiKey:z$1.string(),baseUrl:z$1.string().url(),getEmbeddingsUrl:z$1.string().url().optional()}),N=class{constructor(e,t){this.version="v1";let o=Z.parse(t);this.modelSchema=e,this.apiKey=o.apiKey,this.baseUrl=urlWithoutTrailingSlash(o.baseUrl),this.getEmbeddingsUrl=urlWithoutTrailingSlash(o.getEmbeddingsUrl||`${this.baseUrl}/models/${this.modelSchema.name}:batchEmbedContents?key=${this.apiKey}`);}getDefaultBaseUrl(){return this.baseUrl}getDefaultHeaders(){return {"Content-Type":"application/json",source:"adaline.ai"}}getDefaultParams(){return {model:this.modelSchema.name}}getRetryDelay(e){return {shouldRetry:!1,delayMs:0}}getTokenCount(e){return e.requests.reduce((t,o)=>t+o.length,0)}transformModelRequest(e){let t=Wo.safeParse(e);if(!t.success)throw new InvalidModelRequestError({info:"Invalid model request",cause:t.error});let o=t.data,s=o.model,m={outputDimensionality:o.outputDimensionality},n=Config().parse(removeUndefinedEntries(m)),r={modality:EmbeddingTextModalityLiteral,requests:o.requests.reduce((l,c)=>(l.push(...c.content.parts.map(p=>p.text)),l),[])};return {modelName:s,config:n,embeddingRequests:r}}transformConfig(e,t){let o=this.modelSchema.config.schema.safeParse(e);if(!o.success)throw new InvalidConfigError({info:`Invalid config for model : '${this.modelSchema.name}'`,cause:o.error});let s=o.data;return Object.keys(s).forEach(n=>{if(!this.modelSchema.config.def[n])throw new InvalidConfigError({info:`Invalid config for model : '${this.modelSchema.name}'`,cause:new Error(`Invalid config key : '${n}',
|
|
15
|
+
available keys : [${Object.keys(this.modelSchema.config.def).join(", ")}]`)})}),Object.keys(s).reduce((n,r)=>{let c=this.modelSchema.config.def[r].param,p=s[r];return n[c]=p,n},{})}transformEmbeddingRequests(e){let t=EmbeddingRequests().safeParse(e);if(!t.success)throw new InvalidEmbeddingRequestsError({info:"Invalid embedding requests",cause:t.error});if(e.modality!==EmbeddingTextModalityLiteral)throw new InvalidEmbeddingRequestsError({info:`Invalid embedding requests for model : '${this.modelSchema.name}'`,cause:new Error(`Only '${EmbeddingTextModalityLiteral}' modality is supported for model : '${this.modelSchema.name}'`)});return {requests:t.data.requests.map(s=>({model:`models/${this.modelSchema.name}`,content:{parts:[{text:s}]}}))}}getGetEmbeddingsUrl(e,t){return P(this,null,function*(){return new Promise(o=>{o(this.getEmbeddingsUrl);})})}getGetEmbeddingsHeaders(e,t){return P(this,null,function*(){return new Promise(o=>{o(this.getDefaultHeaders());})})}getGetEmbeddingsData(e,t){return P(this,null,function*(){return new Promise(o=>{let s=this.transformConfig(e),m=this.transformEmbeddingRequests(t);if(t.requests.length===0)throw new InvalidEmbeddingRequestsError({info:`Invalid embedding requests for model : '${this.modelSchema.name}'`,cause:new Error("requests cannot be empty")});s.outputDimensionality&&(m.requests.forEach(n=>{n.outputDimensionality=s.outputDimensionality;}),delete s.outputDimensionality),o(G(G(G({},this.getDefaultParams()),s),m));})})}transformGetEmbeddingsResponse(e){let t=Jo.safeParse(e);if(t.success){let s=t.data.embeddings.map((m,n)=>({index:n,embedding:m.values}));return {encodingFormat:FloatEmbeddingLiteral,embeddings:s}}throw new ModelResponseError({info:"Invalid response from model",cause:t.error})}};var to="text-embedding-001",yn="text-embedding-001",no=EmbeddingModelSchema(Ee).parse({name:to,description:yn,modalities:_e,maxInputTokens:2048,maxOutputTokens:768,config:{def:j.base(768).def,schema:j.base(768).schema}}),Bo=Z,Ce=class extends N{constructor(e){super(no,e);}};var so="text-embedding-004",Tn="text-embedding-004",io=EmbeddingModelSchema(Ee).parse({name:so,description:Tn,modalities:_e,maxInputTokens:2048,maxOutputTokens:768,config:{def:j.base(768).def,schema:j.base(768).schema}}),No=Z,Te=class extends N{constructor(e){super(io,e);}};
|
|
16
|
+
|
|
17
|
+
export { C as BaseChatModel, R as BaseChatModelGemini1, g as BaseChatModelOptions, N as BaseEmbeddingModel, Z as BaseEmbeddingModelOptions, go as ChatModelBaseConfigDef, fo as ChatModelBaseConfigSchema, yo as ChatModelC1ConfigDef, uo as ChatModelC1ConfigSchema, Go as EmbeddingModelBaseConfigDef, To as EmbeddingModelBaseConfigSchema, ae as Gemini1_0Pro, le as Gemini1_0ProLatest, Fe as Gemini1_0ProLatestLiteral, Lo as Gemini1_0ProLatestOptions, qe as Gemini1_0ProLatestSchema, Le as Gemini1_0ProLiteral, wo as Gemini1_0ProOptions, ve as Gemini1_0ProSchema, me as Gemini1_0ProVision, je as Gemini1_0ProVisionLiteral, vo as Gemini1_0ProVisionOptions, ze as Gemini1_0ProVisionSchema, re as Gemini1_0Pro_001, Ie as Gemini1_0Pro_001Literal, ko as Gemini1_0Pro_001Options, $e as Gemini1_0Pro_001Schema, de as Gemini1_5Flash, pe as Gemini1_5Flash001, Be as Gemini1_5Flash001Literal, $o as Gemini1_5Flash001Options, Ne as Gemini1_5Flash001Schema, ce as Gemini1_5Flash002, Ae as Gemini1_5Flash002Literal, Fo as Gemini1_5Flash002Options, He as Gemini1_5Flash002Schema, he as Gemini1_5FlashLatest, Ve as Gemini1_5FlashLatestLiteral, qo as Gemini1_5FlashLatestOptions, Ke as Gemini1_5FlashLatestSchema, De as Gemini1_5FlashLiteral, Io as Gemini1_5FlashOptions, Ue as Gemini1_5FlashSchema, fe as Gemini1_5Pro, ge as Gemini1_5Pro001, We as Gemini1_5Pro001Literal, zo as Gemini1_5Pro001Options, Xe as Gemini1_5Pro001Schema, ue as Gemini1_5Pro002, Qe as Gemini1_5Pro002Literal, Do as Gemini1_5Pro002Options, Ze as Gemini1_5Pro002Schema, ye as Gemini1_5ProLatest, eo as Gemini1_5ProLatestLiteral, Uo as Gemini1_5ProLatestOptions, oo as Gemini1_5ProLatestSchema, Ye as Gemini1_5ProLiteral, jo as Gemini1_5ProOptions, Je as Gemini1_5ProSchema, K as Google, gt as GoogleChatAssistantRoleLiteral, Et as GoogleChatContent, Mt as GoogleChatContentPartFunctionCall, _t as GoogleChatContentPartFunctionResponse, bt as GoogleChatContentPartInlineData, Ro as GoogleChatContentPartText, Po as GoogleChatGenerationConfig, h as GoogleChatModelConfigs, _ as GoogleChatModelModalities, E as GoogleChatModelModalitiesEnum, u as GoogleChatModelRoles, y as GoogleChatModelRolesMap, Nn as GoogleChatModelTextModalities, An as GoogleChatModelTextModalitiesEnum, z as GoogleChatModelTextToolModalities, D as GoogleChatModelTextToolModalitiesEnum, bo as GoogleChatModelTextVisionModalities, Mo as GoogleChatModelTextVisionModalitiesEnum, xo as GoogleChatRequest, So as GoogleChatSystemInstruction, St as GoogleChatTool, Oo as GoogleChatToolConfig, ut as GoogleChatToolRoleLiteral, _o as GoogleCompleteChatResponse, yt as GoogleCompleteChatTextResponse, Ct as GoogleCompleteChatToolResponse, j as GoogleEmbeddingModelConfigs, _e as GoogleEmbeddingModelModalities, Ee as GoogleEmbeddingModelModalitiesEnum, Wo as GoogleEmbeddingRequest, mn as GoogleEmbeddingRequestInput, Jo as GoogleGetEmbeddingsResponse, Eo as GoogleStreamChatResponse, Tt as GoogleStreamChatTextResponse, Gt as GoogleStreamChatToolResponse, Ot as ProviderLiteral, Ce as Text_Embedding_001, to as Text_Embedding_001Literal, Bo as Text_Embedding_001Options, no as Text_Embedding_001Schema, Te as Text_Embedding_004, so as Text_Embedding_004Literal, No as Text_Embedding_004Options, io as Text_Embedding_004Schema, we as dimensions, Pe as frequencyPenalty, v as maxTokens, Re as presencePenalty, q as safetySettings, xe as seed, I as stop, L as temperature, F as toolChoice, Oe as topK, $ as topP };
|
|
18
|
+
//# sourceMappingURL=index.mjs.map
|
|
19
|
+
//# sourceMappingURL=index.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/configs/chat-model/common.config.chat-model.google.ts","../src/configs/chat-model/base.config.chat-model.google.ts","../src/configs/chat-model/c1.config.chat-model.google.ts","../src/configs/embedding-model/base.config.embedding-model.google.ts","../src/configs/embedding-model/common.config.embedding-model.google.ts","../src/configs/configs.google.ts","../src/models/chat-models/types/roles.chat-model.google.ts","../src/models/chat-models/types/modalities.chat-model.google.ts","../src/models/chat-models/types/response.chat-model.google.ts","../src/models/chat-models/types/request.chat-model.google.ts","../src/provider/provider.google.ts","../src/models/chat-models/base-chat-model.google.ts","../src/models/chat-models/base-gemini-1-chat-model.google.ts","../src/models/chat-models/gemini-1.0-pro.google.ts","../src/models/chat-models/gemini-1.0-pro-001.google.ts","../src/models/chat-models/gemini-1.0-pro-latest.google.ts","../src/models/chat-models/gemini-1.0-pro-vision.google.ts","../src/models/chat-models/gemini-1.5-flash.google.ts","../src/models/chat-models/gemini-1.5-flash-001.google.ts","../src/models/chat-models/gemini-1.5-flash-002.google.ts","../src/models/chat-models/gemini-1.5-flash-latest.google.ts","../src/models/chat-models/gemini-1.5-pro.google.ts","../src/models/chat-models/gemini-1.5-pro-001.google.ts","../src/models/chat-models/gemini-1.5-pro-002.google.ts","../src/models/chat-models/gemini-1.5-pro-latest.google.ts","../src/models/embedding-models/types/modalitites.embedding-model.google.ts","../src/models/embedding-models/types/response.embedding-model.google.ts","../src/models/embedding-models/types/request.embedding-model.google.ts","../src/models/embedding-models/base-embedding-model.google.ts","../src/models/embedding-models/text-embedding-001.google.ts","../src/models/embedding-models/text-embedding-004.google.ts"],"names":["temperature","max","_default","RangeConfigItem","CHAT_CONFIG","maxTokens","maxOutputTokens","stop","maxSequences","MultiStringConfigItem","topP","topK","frequencyPenalty","presencePenalty","seed","toolChoice","SelectStringConfigItem","safetySettings","ObjectSchemaConfigItem","z","ChatModelBaseConfigSchema","maxTemperature","defaultTemperature","defaultTopP","ChatModelBaseConfigDef","ChatModelC1ConfigSchema","defaultTopK","value","ChatModelC1ConfigDef","dimensions","maxDimensions","EMBEDDING_CONFIG","EmbeddingModelBaseConfigSchema","EmbeddingModelBaseConfigDef","GoogleChatModelConfigs","GoogleEmbeddingModelConfigs","GoogleChatModelRoles","SystemRoleLiteral","UserRoleLiteral","AssistantRoleLiteral","ToolRoleLiteral","GoogleChatAssistantRoleLiteral","GoogleChatToolRoleLiteral","GoogleChatModelRolesMap","GoogleChatModelModalities","TextModalityLiteral","ImageModalityLiteral","ToolCallModalityLiteral","ToolResponseModalityLiteral","GoogleChatModelModalitiesEnum","GoogleChatModelTextModalities","GoogleChatModelTextModalitiesEnum","GoogleChatModelTextVisionModalities","GoogleChatModelTextVisionModalitiesEnum","GoogleChatModelTextToolModalities","GoogleChatModelTextToolModalitiesEnum","GoogleCompleteChatTextResponse","GoogleCompleteChatToolResponse","GoogleCompleteChatResponse","GoogleStreamChatTextResponse","GoogleStreamChatToolResponse","GoogleStreamChatResponse","GoogleChatContentPartText","GoogleChatContentPartInlineData","GoogleChatContentPartFunctionCall","GoogleChatContentPartFunctionResponse","GoogleChatContent","GoogleChatSystemInstruction","GoogleChatTool","GoogleChatToolConfig","GoogleChatGenerationConfig","GoogleChatRequest","ProviderLiteral","Google","Gemini1_5Flash001Literal","Gemini1_5Flash001","Gemini1_5Flash001Options","Gemini1_5Flash001Schema","Gemini1_5Flash002Literal","Gemini1_5Flash002","Gemini1_5Flash002Options","Gemini1_5Flash002Schema","Gemini1_5FlashLatestLiteral","Gemini1_5FlashLatest","Gemini1_5FlashLatestOptions","Gemini1_5FlashLatestSchema","Gemini1_5FlashLiteral","Gemini1_5Flash","Gemini1_5FlashOptions","Gemini1_5FlashSchema","Gemini1_5Pro001Literal","Gemini1_5Pro001","Gemini1_5Pro001Options","Gemini1_5Pro001Schema","Gemini1_5Pro002Literal","Gemini1_5Pro002","Gemini1_5Pro002Options","Gemini1_5Pro002Schema","Gemini1_5ProLatestLiteral","Gemini1_5ProLatest","Gemini1_5ProLatestOptions","Gemini1_5ProLatestSchema","Gemini1_5ProLiteral","Gemini1_5Pro","Gemini1_5ProOptions","Gemini1_5ProSchema","Gemini1_0ProLiteral","Gemini1_0Pro","Gemini1_0ProOptions","Gemini1_0ProSchema","Gemini1_0ProVisionLiteral","Gemini1_0ProVision","Gemini1_0ProVisionOptions","Gemini1_0ProVisionSchema","Gemini1_0ProLatestLiteral","Gemini1_0ProLatest","Gemini1_0ProLatestOptions","Gemini1_0ProLatestSchema","Gemini1_0Pro_001Literal","Gemini1_0Pro_001","Gemini1_0Pro_001Options","Gemini1_0Pro_001Schema","Text_Embedding_001Literal","Text_Embedding_001","Text_Embedding_001Options","Text_Embedding_001Schema","Text_Embedding_004Literal","Text_Embedding_004","Text_Embedding_004Options","Text_Embedding_004Schema","name","options","ProviderError","model","modelOptions","__spreadValues","parsedOptions","acc","key","BaseChatModelOptions","BaseChatModel","modelSchema","_a","urlWithoutTrailingSlash","responseHeaders","messages","message","content","request","safeRequest","InvalidModelRequestError","parsedRequest","modelName","systemInstruction","generationConfig","toolConfig","_config","config","Config","removeUndefinedEntries","toolCallMap","part","role","_content","c","Base64ImageContentTypeLiteral","index","InvalidMessagesError","tools","tool","_toolChoice","_parsedConfig","InvalidConfigError","parsedConfig","transformedConfig","def","paramKey","paramValue","configToolChoice","parsedMessages","parsedMessage","Message","nonSystemMessages","assistantContent","userContent","toolResponseContent","getNextExpectedRoles","i","InvalidToolsError","parsedTool","Tool","__async","resolve","transformedMessages","transformedTools","response","safe","ModelResponseError","parsedResponse","usage","contentItem","createTextContent","createToolCallContent","safetyRatings","rating","chunk","buffer","__asyncGenerator","lines","line","completeLine","structuredLine","error","partialResponse","messagePart","createPartialTextMessage","toolCall","createPartialToolCallMessage","BaseChatModelGemini1","systemUserMessage","Gemini1_0ProDescription","ChatModelSchema","Gemini1_0Pro_001Description","Gemini1_0ProLatestDescription","Gemini1_0ProVisionDescription","Gemini1_5FlashDescription","Gemini1_5Flash001Description","Gemini1_5Flash002Description","Gemini1_5FlashLatestDescription","Gemini1_5ProDescription","Gemini1_5Pro001Description","Gemini1_5Pro002Description","Gemini1_5ProLatestDescription","GoogleEmbeddingModelModalities","EmbeddingTextModalityLiteral","GoogleEmbeddingModelModalitiesEnum","GoogleGetEmbeddingsResponse","GoogleEmbeddingRequestInput","GoogleEmbeddingRequest","BaseEmbeddingModelOptions","BaseEmbeddingModel","requests","embeddingRequests","_parsedRequests","EmbeddingRequests","InvalidEmbeddingRequestsError","_requests","embeddings","embedding","FloatEmbeddingLiteral","Text_Embedding_001_Description","EmbeddingModelSchema","Text_Embedding_004_Description"],"mappings":";;;;uhCAUA,IAAMA,EAAc,CAACC,CAAAA,CAAaC,CAAqBC,GAAAA,eAAAA,CAAgB,CACrE,KAAA,CAAO,cACP,KAAOC,CAAAA,WAAAA,CAAY,YAAY,KAC/B,CAAA,WAAA,CAAaA,YAAY,WAAY,CAAA,WAAA,CACrC,IAAK,CACL,CAAA,GAAA,CAAKH,EACL,IAAM,CAAA,GAAA,CACN,QAASC,CACX,CAAC,EAEKG,CAAaC,CAAAA,CAAAA,EACjBH,eAAgB,CAAA,CACd,KAAO,CAAA,iBAAA,CACP,MAAOC,WAAY,CAAA,UAAA,CAAW,MAC9B,WAAaA,CAAAA,WAAAA,CAAY,WAAW,WACpC,CAAA,GAAA,CAAK,EACL,GAAKE,CAAAA,CAAAA,CACL,KAAM,CACN,CAAA,OAAA,CAAS,CACX,CAAC,CAAA,CAEGC,EAAQC,CACZC,EAAAA,qBAAAA,CAAsB,CACpB,KAAA,CAAO,eACP,CAAA,KAAA,CAAOL,YAAY,IAAKI,CAAAA,CAAY,EAAE,KACtC,CAAA,WAAA,CAAaJ,YAAY,IAAKI,CAAAA,CAAY,EAAE,WAC5C,CAAA,GAAA,CAAKA,CACP,CAAC,CAAA,CAEGE,EAAQR,CAAqBC,EAAAA,eAAAA,CAAgB,CACjD,KAAO,CAAA,MAAA,CACP,KAAOC,CAAAA,WAAAA,CAAY,KAAM,CAAA,KAAA,CACzB,YAAaA,WAAY,CAAA,KAAA,CAAM,YAC/B,GAAK,CAAA,CAAA,CACL,IAAK,CACL,CAAA,IAAA,CAAM,IACN,OAASF,CAAAA,CACX,CAAC,CAEKS,CAAAA,EAAAA,CAAQT,GAAqBC,eAAgB,CAAA,CACjD,MAAO,MACP,CAAA,KAAA,CAAOC,WAAY,CAAA,KAAA,CAAM,KACzB,CAAA,WAAA,CAAaA,YAAY,KAAM,CAAA,WAAA,CAC/B,IAAK,CACL,CAAA,GAAA,CAAK,GACL,IAAM,CAAA,CAAA,CACN,QAASF,CACX,CAAC,EAEKU,EAAmBT,CAAAA,eAAAA,CAAgB,CACvC,KAAO,CAAA,kBAAA,CACP,MAAOC,WAAY,CAAA,iBAAA,CAAkB,KACrC,CAAA,WAAA,CAAaA,WAAY,CAAA,iBAAA,CAAkB,YAC3C,GAAK,CAAA,CAAA,CAAA,CACL,IAAK,CACL,CAAA,IAAA,CAAM,IACN,OAAS,CAAA,CACX,CAAC,CAEKS,CAAAA,EAAAA,CAAkBV,gBAAgB,CACtC,KAAA,CAAO,kBACP,KAAOC,CAAAA,WAAAA,CAAY,iBAAiB,KACpC,CAAA,WAAA,CAAaA,WAAY,CAAA,gBAAA,CAAiB,WAC1C,CAAA,GAAA,CAAK,GACL,GAAK,CAAA,CAAA,CACL,KAAM,GACN,CAAA,OAAA,CAAS,CACX,CAAC,CAAA,CAEKU,EAAOX,CAAAA,eAAAA,CAAgB,CAC3B,KAAA,CAAO,OACP,KAAOC,CAAAA,WAAAA,CAAY,KAAK,KACxB,CAAA,WAAA,CAAaA,YAAY,IAAK,CAAA,WAAA,CAC9B,GAAK,CAAA,CAAA,CACL,GAAK,CAAA,GAAA,CACL,KAAM,CACN,CAAA,OAAA,CAAS,CACX,CAAC,CAAA,CAEKW,EAAaC,sBAAuB,CAAA,CACxC,MAAO,YACP,CAAA,KAAA,CAAO,cACP,WACE,CAAA,uLAAA,CACF,QAAS,MACT,CAAA,OAAA,CAAS,CAAC,MAAQ,CAAA,KAAA,CAAO,MAAM,CACjC,CAAC,CAAA,CAEKC,EAAiBC,sBAAuB,CAAA,CAC5C,MAAO,gBACP,CAAA,KAAA,CAAO,kBACP,WAAa,CAAA,yHAAA,CACb,aAAcC,GAAE,CAAA,KAAA,CAAMA,IAAE,MAAO,CAAA,CAC7B,UAAWA,GAAE,CAAA,IAAA,CAAK,CAChB,kCACA,CAAA,qBAAA,CACA,wBACA,CAAA,iBAAA,CACA,YACA,CAAA,KACF,CAAC,CACD,CAAA,QAAA,CAAUA,IAAE,IAAK,CAAA,CACf,4BACA,0BACA,CAAA,2BAAA,CACA,kCACA,iCACA,CAAA,+BACF,CAAC,CACH,CAAC,CAAC,CACJ,CAAC,EC7GKC,IAAAA,EAAAA,CAA4B,CAChCC,CACAC,CAAAA,CAAAA,CACAhB,EACAE,CACAe,CAAAA,CAAAA,GAEAJ,IAAE,MAAO,CAAA,CACP,WAAanB,CAAAA,CAAAA,CAAYqB,CAAgBC,CAAAA,CAAkB,EAAE,MAC7D,CAAA,SAAA,CAAWjB,EAAUC,CAAe,CAAA,CAAE,OACtC,IAAMC,CAAAA,CAAAA,CAAKC,CAAY,CAAA,CAAE,MACzB,CAAA,IAAA,CAAME,EAAKa,CAAW,CAAA,CAAE,OACxB,UAAYR,CAAAA,CAAAA,CAAW,OACvB,cAAgBE,CAAAA,CAAAA,CAAe,MACjC,CAAC,CAAA,CAEGO,GAAyB,CAC7BH,CAAAA,CACAC,EACAhB,CACAE,CAAAA,CAAAA,CACAe,KAEC,CACC,WAAA,CAAavB,CAAYqB,CAAAA,CAAAA,CAAgBC,CAAkB,CAAA,CAAE,IAC7D,SAAWjB,CAAAA,CAAAA,CAAUC,CAAe,CAAE,CAAA,GAAA,CACtC,KAAMC,CAAKC,CAAAA,CAAY,EAAE,GACzB,CAAA,IAAA,CAAME,EAAKa,CAAW,CAAA,CAAE,IACxB,UAAYR,CAAAA,CAAAA,CAAW,IACvB,cAAgBE,CAAAA,CAAAA,CAAe,GACjC,CAAA,EC1BF,IAAMQ,GAA0B,CAC9BJ,CAAAA,CACAC,EACAhB,CACAE,CAAAA,CAAAA,CACAe,EACAG,CAEAP,GAAAA,GAAAA,CAAE,OAAO,CACP,WAAA,CAAanB,EAAYqB,CAAgBC,CAAAA,CAAkB,EAAE,MAC7D,CAAA,SAAA,CAAWjB,CAAUC,CAAAA,CAAe,CAAE,CAAA,MAAA,CACtC,KAAMC,CAAKC,CAAAA,CAAY,EAAE,MACzB,CAAA,IAAA,CAAME,EAAKa,CAAW,CAAA,CAAE,MACxB,CAAA,IAAA,CAAMZ,EAAKe,CAAAA,CAAW,EAAE,MACxB,CAAA,gBAAA,CAAkBd,GAAiB,MACnC,CAAA,eAAA,CAAiBC,GAAgB,MACjC,CAAA,IAAA,CAAMC,EAAK,CAAA,MAAA,CAAO,SAAWa,CAAAA,CAAAA,EAAWA,IAAU,CAAI,CAAA,KAAA,CAAA,CAAYA,CAAM,CACxE,CAAA,UAAA,CAAYZ,EAAW,MACvB,CAAA,cAAA,CAAgBE,EAAe,MACjC,CAAC,EAEGW,EAAuB,CAAA,CAC3BP,EACAC,CACAhB,CAAAA,CAAAA,CACAE,EACAe,CACAG,CAAAA,CAAAA,IAEC,CACC,WAAA,CAAa1B,CAAYqB,CAAAA,CAAAA,CAAgBC,CAAkB,CAAE,CAAA,GAAA,CAC7D,UAAWjB,CAAUC,CAAAA,CAAe,EAAE,GACtC,CAAA,IAAA,CAAMC,EAAKC,CAAY,CAAA,CAAE,IACzB,IAAME,CAAAA,CAAAA,CAAKa,CAAW,CAAE,CAAA,GAAA,CACxB,KAAMZ,EAAKe,CAAAA,CAAW,CAAE,CAAA,GAAA,CACxB,gBAAkBd,CAAAA,EAAAA,CAAiB,IACnC,eAAiBC,CAAAA,EAAAA,CAAgB,IACjC,IAAMC,CAAAA,EAAAA,CAAK,IACX,UAAYC,CAAAA,CAAAA,CAAW,IACvB,cAAgBE,CAAAA,CAAAA,CAAe,GACjC,CCvDF,ECEA,IAAMY,GAAcC,CAClB3B,EAAAA,eAAAA,CAAgB,CACd,KAAO,CAAA,sBAAA,CACP,MAAO4B,gBAAiB,CAAA,UAAA,CAAW,MACnC,WAAaA,CAAAA,gBAAAA,CAAiB,WAAW,WACzC,CAAA,GAAA,CAAK,EACL,GAAKD,CAAAA,CAAAA,CACL,KAAM,CACN,CAAA,OAAA,CAASA,CACX,CAAC,EDPH,IAAME,GAAkCF,CACtCX,EAAAA,GAAAA,CAAE,OAAO,CACP,UAAA,CAAYU,GAAWC,CAAa,CAAA,CAAE,MACxC,CAAC,CAAA,CAEGG,GAA+BH,CAClC,GAAA,CACC,WAAYD,EAAWC,CAAAA,CAAa,EAAE,GACxC,CAAA,EEDII,IAAAA,CAAAA,CAAyB,CAC7B,IAAA,CAAM,CACJb,CACAC,CAAAA,CAAAA,CACAhB,EACAE,CACAe,CAAAA,CAAAA,IACI,CACJ,GAAKC,CAAAA,EAAAA,CAAuBH,EAAgBC,CAAoBhB,CAAAA,CAAAA,CAAiBE,EAAce,CAAW,CAAA,CAC1G,OAAQH,EAA0BC,CAAAA,CAAAA,CAAgBC,EAAoBhB,CAAiBE,CAAAA,CAAAA,CAAce,CAAW,CAClH,CACA,CAAA,CAAA,EAAA,CAAI,CACFF,CACAC,CAAAA,CAAAA,CACAhB,EACAE,CACAe,CAAAA,CAAAA,CACAG,KACI,CACJ,GAAA,CAAKE,GAAqBP,CAAgBC,CAAAA,CAAAA,CAAoBhB,EAAiBE,CAAce,CAAAA,CAAAA,CAAaG,CAAW,CACrH,CAAA,MAAA,CAAQD,GAAwBJ,CAAgBC,CAAAA,CAAAA,CAAoBhB,CAAiBE,CAAAA,CAAAA,CAAce,CAAaG,CAAAA,CAAW,CAC7H,CACF,CAAA,CAAA,CAEMS,EAA8B,CAClC,IAAA,CAAOL,IAA2B,CAChC,GAAA,CAAKG,EAA4BH,CAAAA,CAAa,CAC9C,CAAA,MAAA,CAAQE,GAA+BF,CAAa,CACtD,EACF,MCpCMM,CAAuBjB,CAAAA,GAAAA,CAAE,KAAK,CAACkB,iBAAAA,CAAmBC,gBAAiBC,oBAAsBC,CAAAA,eAAe,CAAC,CAEzGC,CAAAA,EAAAA,CAAiC,QACjCC,EAA4B,CAAA,UAAA,CAE5BC,CAA0B,CAAA,CAC9B,MAAQL,CAAAA,eAAAA,CACR,KAAMA,eACN,CAAA,SAAA,CAAWG,GACX,IAAMC,CAAAA,EACR,ECTME,IAAAA,CAAAA,CAA+D,CACnEC,mBAAAA,CACAC,qBACAC,uBACAC,CAAAA,2BACF,EAEMC,CAAgC9B,CAAAA,GAAAA,CAAE,KAAK,CAC3C0B,mBAAAA,CACAC,qBACAC,uBACAC,CAAAA,2BACF,CAAC,CAEKE,CAAAA,EAAAA,CAAmE,CACvEL,mBACF,CAAA,CAEMM,GAAoChC,GAAE,CAAA,IAAA,CAAK,CAC/C0B,mBACF,CAAC,CAAA,CAEKO,GAAyE,CAC7EP,mBAAAA,CACAC,oBACF,CAEMO,CAAAA,EAAAA,CAA0ClC,IAAE,IAAK,CAAA,CACrD0B,mBACAC,CAAAA,oBACF,CAAC,CAAA,CAEKQ,EAAuE,CAC3ET,mBAAAA,CACAE,wBACAC,2BACF,CAAA,CAEMO,EAAwCpC,GAAE,CAAA,IAAA,CAAK,CACnD0B,mBAAAA,CACAE,uBACAC,CAAAA,2BACF,CAAC,MC7CKQ,EAAiCrC,CAAAA,GAAAA,CAAE,OAAO,CAC9C,IAAA,CAAMA,IAAE,MAAO,EACjB,CAAC,CAEKsC,CAAAA,EAAAA,CAAiCtC,IAAE,MAAO,CAAA,CAC9C,YAAcA,CAAAA,GAAAA,CAAE,MAAO,CAAA,CACrB,KAAMA,GAAE,CAAA,MAAA,GACR,IAAMA,CAAAA,GAAAA,CAAE,OAAOA,GAAE,CAAA,GAAA,EAAK,CACxB,CAAC,CACH,CAAC,CAAA,CAEKuC,GAA6BvC,GAAE,CAAA,MAAA,CAAO,CAC1C,UAAYA,CAAAA,GAAAA,CAAE,KACZA,CAAAA,GAAAA,CAAE,MAAO,CAAA,CACP,QAASA,GAAE,CAAA,MAAA,CAAO,CAChB,IAAMA,CAAAA,GAAAA,CAAE,QACR,CAAA,KAAA,CAAOA,IAAE,KAAMA,CAAAA,GAAAA,CAAE,MAAM,CAACqC,EAAAA,CAAgCC,EAA8B,CAAC,CAAC,CAC1F,CAAC,CAAA,CAAE,QAAS,EAAA,CACZ,YAActC,CAAAA,GAAAA,CAAE,QAChB,CAAA,KAAA,CAAOA,IAAE,MAAO,EAAA,CAAE,UAClB,CAAA,aAAA,CAAeA,GAAE,CAAA,QAAA,CACfA,GAAE,CAAA,KAAA,CACAA,IAAE,MAAO,CAAA,CACP,SAAUA,GAAE,CAAA,MAAA,GACZ,WAAaA,CAAAA,GAAAA,CAAE,MAAO,EAAA,CACtB,OAASA,CAAAA,GAAAA,CAAE,SAAU,CAAA,QAAA,EACvB,CAAC,CACH,CACF,CACF,CAAC,CACH,CACA,CAAA,cAAA,CAAgBA,IAAE,QAChBA,CAAAA,GAAAA,CAAE,OAAO,CACP,aAAA,CAAeA,IAAE,QACfA,CAAAA,GAAAA,CAAE,KACAA,CAAAA,GAAAA,CAAE,MAAO,CAAA,CACP,SAAUA,GAAE,CAAA,MAAA,GACZ,WAAaA,CAAAA,GAAAA,CAAE,QACjB,CAAC,CACH,CACF,CACF,CAAC,CACH,CAAA,CACA,cAAeA,GAAE,CAAA,MAAA,CAAO,CACtB,gBAAkBA,CAAAA,GAAAA,CAAE,MAAO,EAAA,CAC3B,uBAAyBA,CAAAA,GAAAA,CAAE,QAAS,CAAA,QAAA,GACpC,oBAAsBA,CAAAA,GAAAA,CAAE,QAAS,CAAA,QAAA,GACjC,eAAiBA,CAAAA,GAAAA,CAAE,QACrB,CAAC,EAAE,QAAS,EACd,CAAC,CAGKwC,CAAAA,EAAAA,CAA+BxC,GAAE,CAAA,MAAA,CAAO,CAC5C,IAAA,CAAMA,IAAE,MAAO,EACjB,CAAC,CAEKyC,CAAAA,EAAAA,CAA+BzC,IAAE,MAAO,CAAA,CAC5C,aAAcA,GAAE,CAAA,MAAA,CAAO,CACrB,IAAMA,CAAAA,GAAAA,CAAE,QACR,CAAA,IAAA,CAAMA,IAAE,MAAOA,CAAAA,GAAAA,CAAE,GAAI,EAAC,CACxB,CAAC,CACH,CAAC,CAAA,CAEK0C,GAA2B1C,GAAE,CAAA,MAAA,CAAO,CACxC,UAAYA,CAAAA,GAAAA,CAAE,MACZA,GAAE,CAAA,MAAA,CAAO,CACP,OAASA,CAAAA,GAAAA,CACN,OAAO,CACN,IAAA,CAAMA,IAAE,MAAO,EAAA,CACf,KAAOA,CAAAA,GAAAA,CAAE,KAAMA,CAAAA,GAAAA,CAAE,MAAM,CAACwC,EAAAA,CAA8BC,EAA4B,CAAC,CAAC,CACtF,CAAC,CAAA,CACA,UACH,CAAA,YAAA,CAAczC,IAAE,MAAO,EAAA,CAAE,UACzB,CAAA,KAAA,CAAOA,IAAE,MAAO,EAAA,CAAE,QAAS,EAAA,CAC3B,aAAeA,CAAAA,GAAAA,CAAE,SACfA,GAAE,CAAA,KAAA,CACAA,IAAE,MAAO,CAAA,CACP,SAAUA,GAAE,CAAA,MAAA,GACZ,WAAaA,CAAAA,GAAAA,CAAE,QACf,CAAA,OAAA,CAASA,IAAE,OAAQ,EAAA,CAAE,UACvB,CAAC,CACH,CACF,CACF,CAAC,CACH,CACA,CAAA,cAAA,CAAgBA,IAAE,QAChBA,CAAAA,GAAAA,CAAE,OAAO,CACP,aAAA,CAAeA,GAAE,CAAA,QAAA,CACfA,GAAE,CAAA,KAAA,CACAA,IAAE,MAAO,CAAA,CACP,SAAUA,GAAE,CAAA,MAAA,GACZ,WAAaA,CAAAA,GAAAA,CAAE,MAAO,EACxB,CAAC,CACH,CACF,CACF,CAAC,CACH,CACA,CAAA,aAAA,CAAeA,IAAE,MAAO,CAAA,CACtB,iBAAkBA,GAAE,CAAA,MAAA,GACpB,uBAAyBA,CAAAA,GAAAA,CAAE,QAAS,CAAA,QAAA,GACpC,oBAAsBA,CAAAA,GAAAA,CAAE,MAAO,EAAA,CAC/B,eAAiBA,CAAAA,GAAAA,CAAE,QACrB,CAAC,EAAE,QAAS,EACd,CAAC,MCvGK2C,EAA4B3C,CAAAA,GAAAA,CAAE,OAAO,CACzC,IAAA,CAAMA,IAAE,MAAO,EAAA,CAAE,GAAI,CAAA,CAAC,CACxB,CAAC,EAGK4C,EAAkC5C,CAAAA,GAAAA,CAAE,OAAO,CAC/C,WAAA,CAAaA,IAAE,MAAO,CAAA,CACpB,UAAWA,GAAE,CAAA,MAAA,GAAS,GAAI,CAAA,CAAC,EAC3B,IAAMA,CAAAA,GAAAA,CAAE,QAAS,CAAA,MAAA,EACnB,CAAC,CACH,CAAC,EAGK6C,EAAoC7C,CAAAA,GAAAA,CAAE,OAAO,CACjD,aAAA,CAAeA,IAAE,MAAO,CAAA,CACtB,IAAMA,CAAAA,GAAAA,CAAE,MAAO,EAAA,CAAE,IAAI,CAAC,CAAA,CACtB,KAAMA,GAAE,CAAA,MAAA,CAAOA,IAAE,MAAO,EAAA,CAAE,GAAI,CAAA,CAAC,CAAC,CAClC,CAAC,CACH,CAAC,EAGK8C,EAAwC9C,CAAAA,GAAAA,CAAE,OAAO,CACrD,iBAAA,CAAmBA,IAAE,MAAO,CAAA,CAC1B,KAAMA,GAAE,CAAA,MAAA,GAAS,GAAI,CAAA,CAAC,EACtB,QAAUA,CAAAA,GAAAA,CAAE,MAAOA,CAAAA,GAAAA,CAAE,MAAO,EAAA,CAAE,IAAI,CAAC,CAAC,CACtC,CAAC,CACH,CAAC,CAGK+C,CAAAA,EAAAA,CAAoB/C,IAAE,MAAO,CAAA,CACjC,KAAMA,GAAE,CAAA,IAAA,CAAK,CAAC,MAAQ,CAAA,OAAA,CAAS,UAAU,CAAC,CAAA,CAC1C,KAAOA,CAAAA,GAAAA,CAAE,KAAMA,CAAAA,GAAAA,CAAE,MAAM,CACrB2C,EAAAA,CACAC,GACAC,EACAC,CAAAA,EACF,CAAC,CAAC,CACJ,CAAC,CAGKE,CAAAA,EAAAA,CAA8BhD,IAAE,MAAO,CAAA,CAC3C,MAAOA,GAAE,CAAA,KAAA,CAAM2C,EAAyB,CAC1C,CAAC,CAGKM,CAAAA,EAAAA,CAAiBjD,GAAE,CAAA,MAAA,CAAO,CAC9B,IAAMA,CAAAA,GAAAA,CAAE,QAAS,CAAA,GAAA,CAAI,CAAC,CACtB,CAAA,WAAA,CAAaA,GAAE,CAAA,MAAA,EAAS,CAAA,GAAA,CAAI,CAAC,CAC7B,CAAA,UAAA,CAAYA,IAAE,GAAI,EACpB,CAAC,CAGKkD,CAAAA,EAAAA,CAAuBlD,GAAE,CAAA,MAAA,CAAO,CACpC,uBAAA,CAAyBA,IAAE,MAAO,CAAA,CAChC,KAAMA,GAAE,CAAA,IAAA,CAAK,CAAC,KAAO,CAAA,MAAA,CAAQ,MAAM,CAAC,CAAA,CACpC,uBAAwBA,GAAE,CAAA,KAAA,CAAMA,IAAE,MAAO,EAAC,EAAE,QAAS,EACvD,CAAC,CACH,CAAC,CAAA,CAGKmD,GAA6BnD,GAAE,CAAA,MAAA,CAAO,CAC1C,aAAeA,CAAAA,GAAAA,CAAE,MAAMA,GAAE,CAAA,MAAA,EAAQ,CAAE,CAAA,QAAA,GACnC,eAAiBA,CAAAA,GAAAA,CAAE,QAAS,CAAA,QAAA,GAC5B,WAAaA,CAAAA,GAAAA,CAAE,MAAO,EAAA,CAAE,QAAS,EAAA,CACjC,KAAMA,GAAE,CAAA,MAAA,GAAS,QAAS,EAAA,CAC1B,KAAMA,GAAE,CAAA,MAAA,GAAS,QAAS,EAAA,CAC1B,gBAAiBA,GAAE,CAAA,MAAA,GAAS,QAAS,EAAA,CACrC,iBAAkBA,GAAE,CAAA,MAAA,EAAS,CAAA,QAAA,EAC7B,CAAA,IAAA,CAAMA,IAAE,MAAO,EAAA,CAAE,UACnB,CAAC,EAGKoD,EAAoBpD,CAAAA,GAAAA,CAAE,MAAO,CAAA,CACjC,KAAOA,CAAAA,GAAAA,CAAE,QAAS,CAAA,GAAA,CAAI,CAAC,CAAE,CAAA,QAAA,GACzB,QAAUA,CAAAA,GAAAA,CAAE,KAAM+C,CAAAA,EAAiB,CACnC,CAAA,iBAAA,CAAmBC,GAA4B,QAAS,EAAA,CACxD,mBAAoBA,EAA4B,CAAA,QAAA,GAChD,gBAAkBG,CAAAA,EAAAA,CAA2B,UAC7C,CAAA,iBAAA,CAAmBA,GAA2B,QAAS,EAAA,CACvD,MAAOnD,GAAE,CAAA,MAAA,CAAO,CACd,qBAAuBA,CAAAA,GAAAA,CAAE,KAAMiD,CAAAA,EAAc,CAC/C,CAAC,EAAE,QAAS,EAAA,CACZ,WAAYC,EAAqB,CAAA,QAAA,GACjC,WAAaA,CAAAA,EAAAA,CAAqB,UACpC,CAAC,ECbD,IAAMG,EAAkB,CAAA,QAAA,CAClBC,CAAN,CAAA,KAA2F,CAA3F,WACE,EAAA,CAAA,IAAA,CAAS,QAAU,IACnB,CAAA,IAAA,CAAS,KAAOD,EAGhB,CAAA,IAAA,CAAiB,mBAKV,CACH,CAACE,EAAwB,EAAG,CAC1B,MAAOC,EACP,CAAA,YAAA,CAAcC,GACd,WAAaC,CAAAA,EACf,CACA,CAAA,CAACC,EAAwB,EAAG,CAC1B,KAAOC,CAAAA,EAAAA,CACP,aAAcC,EACd,CAAA,WAAA,CAAaC,EACf,CACA,CAAA,CAACC,EAA2B,EAAG,CAC7B,MAAOC,EACP,CAAA,YAAA,CAAcC,GACd,WAAaC,CAAAA,EACf,EACA,CAACC,EAAqB,EAAG,CACvB,KAAOC,CAAAA,EAAAA,CACP,aAAcC,EACd,CAAA,WAAA,CAAaC,EACf,CACA,CAAA,CAACC,EAAsB,EAAG,CACxB,MAAOC,EACP,CAAA,YAAA,CAAcC,GACd,WAAaC,CAAAA,EACf,EACA,CAACC,EAAsB,EAAG,CACxB,KAAA,CAAOC,EACP,CAAA,YAAA,CAAcC,EACd,CAAA,WAAA,CAAaC,EACf,CACA,CAAA,CAACC,EAAyB,EAAG,CAC3B,MAAOC,EACP,CAAA,YAAA,CAAcC,EACd,CAAA,WAAA,CAAaC,EACf,CAAA,CACA,CAACC,EAAmB,EAAG,CACrB,KAAOC,CAAAA,EAAAA,CACP,aAAcC,EACd,CAAA,WAAA,CAAaC,EACf,CAAA,CACA,CAACC,EAAmB,EAAG,CACrB,KAAA,CAAOC,GACP,YAAcC,CAAAA,EAAAA,CACd,YAAaC,EACf,CAAA,CACA,CAACC,EAAyB,EAAG,CAC3B,KAAOC,CAAAA,EAAAA,CACP,aAAcC,EACd,CAAA,WAAA,CAAaC,EACf,CACA,CAAA,CAACC,EAAyB,EAAG,CAC3B,KAAA,CAAOC,GACP,YAAcC,CAAAA,EAAAA,CACd,YAAaC,EACf,CAAA,CACA,CAACC,EAAuB,EAAG,CACzB,KAAOC,CAAAA,EAAAA,CACP,aAAcC,EACd,CAAA,WAAA,CAAaC,EACf,CACJ,CAAA,CAEA,KAAiB,uBAKV,CAAA,CACH,CAACC,EAAyB,EAAG,CAC3B,MAAOC,EACP,CAAA,YAAA,CAAcC,GACd,WAAaC,CAAAA,EACf,EACA,CAACC,EAAyB,EAAG,CAC3B,KAAA,CAAOC,GACP,YAAcC,CAAAA,EAAAA,CACd,YAAaC,EACf,CACJ,GAEA,iBAA8B,EAAA,CAC5B,OAAO,MAAA,CAAO,IAAK,CAAA,IAAA,CAAK,kBAAkB,CAC5C,CAEA,UAAUC,CAAcC,CAAAA,CAAAA,CAAyB,CAC/C,GAAI,EAAED,CAAQ,IAAA,IAAA,CAAK,kBACjB,CAAA,CAAA,MAAM,IAAIE,aAAc,CAAA,CACtB,KAAM,CAAsBF,mBAAAA,EAAAA,CAAI,aAChC,KAAO,CAAA,IAAI,KAAM,CAAA,CAAA,mBAAA,EAAsBA,CAAI,CAAA;AAAA,WACtC,EAAA,IAAA,CAAK,iBAAkB,EAAA,CAAE,IAAK,CAAA,IAAI,CAAC,CAAA,CAAA,CAAG,CAC7C,CAAC,CAGH,CAAA,IAAMG,CAAQ,CAAA,IAAA,CAAK,mBAAmBH,CAAI,CAAA,CAAE,KACtCI,CAAAA,CAAAA,CAAeC,CAAA,CAAA,EAAA,CAChBJ,CAECK,CAAAA,CAAAA,CAAAA,CAAgB,IAAK,CAAA,kBAAA,CAAmBN,CAAI,CAAA,CAAE,YAAa,CAAA,KAAA,CAAMI,CAAY,CACnF,CAAA,OAAO,IAAID,CAAAA,CAAMG,CAAa,CAChC,CAEA,eAAA,CAAgBN,CAAmC,CAAA,CACjD,GAAI,EAAEA,CAAQ,IAAA,IAAA,CAAK,oBACjB,MAAM,IAAIE,aAAc,CAAA,CACtB,IAAM,CAAA,CAAA,mBAAA,EAAsBF,CAAI,CAAA,UAAA,CAAA,CAChC,KAAO,CAAA,IAAI,KAAM,CAAA,CAAA,mBAAA,EAAsBA,CAAI,CAAA;AAAA,WAAA,EACtC,IAAK,CAAA,iBAAA,EAAoB,CAAA,IAAA,CAAK,IAAI,CAAC,CAAA,CAAA,CAAG,CAC7C,CAAC,EAEH,OAAO,IAAA,CAAK,kBAAmBA,CAAAA,CAAI,EAAE,WACvC,CAEA,gBAAwD,EAAA,CACtD,OAAO,MAAA,CAAO,IAAK,CAAA,IAAA,CAAK,kBAAkB,CAAE,CAAA,MAAA,CAAO,CAACO,CAAAA,CAAKC,KACvDD,CAAIC,CAAAA,CAAG,CAAI,CAAA,IAAA,CAAK,mBAAmBA,CAAG,CAAA,CAAE,WACjCD,CAAAA,CAAAA,CAAAA,CACN,EAAyC,CAC9C,CAEA,wBAAmC,CACjC,OAAO,MAAO,CAAA,IAAA,CAAK,KAAK,uBAAuB,CACjD,CAEA,cAAA,CAAeP,EAAcC,CAA8B,CAAA,CACzD,GAAI,EAAED,CAAQ,IAAA,IAAA,CAAK,uBACjB,CAAA,CAAA,MAAM,IAAIE,aAAc,CAAA,CACtB,IAAM,CAAA,CAAA,wBAAA,EAA2BF,CAAI,CACrC,UAAA,CAAA,CAAA,KAAA,CAAO,IAAI,KAAA,CAAM,2BAA2BA,CAAI,CAAA;AAAA,WAC3C,EAAA,IAAA,CAAK,sBAAuB,EAAA,CAAE,IAAK,CAAA,IAAI,CAAC,CAAA,CAAA,CAAG,CAClD,CAAC,CAGH,CAAA,IAAMG,CAAQ,CAAA,IAAA,CAAK,wBAAwBH,CAAI,CAAA,CAAE,KAC3CI,CAAAA,CAAAA,CAAeC,CAAA,CAAA,EAAA,CAChBJ,CAECK,CAAAA,CAAAA,CAAAA,CAAgB,IAAK,CAAA,uBAAA,CAAwBN,CAAI,CAAA,CAAE,YAAa,CAAA,KAAA,CAAMI,CAAY,CACxF,CAAA,OAAO,IAAID,CAAAA,CAAMG,CAAa,CAChC,CAEA,oBAAA,CAAqBN,CAAwC,CAAA,CAC3D,GAAI,EAAEA,CAAQ,IAAA,IAAA,CAAK,yBACjB,MAAM,IAAIE,aAAc,CAAA,CACtB,IAAM,CAAA,CAAA,wBAAA,EAA2BF,CAAI,CAAA,UAAA,CAAA,CACrC,KAAO,CAAA,IAAI,KAAM,CAAA,CAAA,wBAAA,EAA2BA,CAAI,CAAA;AAAA,WAC3C,EAAA,IAAA,CAAK,wBAAyB,CAAA,IAAA,CAAK,IAAI,CAAC,CAAA,CAAA,CAAG,CAClD,CAAC,CAAA,CAEH,OAAO,IAAK,CAAA,uBAAA,CAAwBA,CAAI,CAAE,CAAA,WAC5C,CAEA,qBAAkE,EAAA,CAChE,OAAO,MAAO,CAAA,IAAA,CAAK,KAAK,uBAAuB,CAAA,CAAE,OAAO,CAACO,CAAAA,CAAKC,KAC5DD,CAAIC,CAAAA,CAAG,EAAI,IAAK,CAAA,uBAAA,CAAwBA,CAAG,CAAE,CAAA,WAAA,CACtCD,GACN,EAA8C,CACnD,CACF,EAxKMhE,CAGY,CAAA,OAAA,CAAkB,kDCVpC,CAAA,IAAMkE,EAAuBxH,GAAE,CAAA,MAAA,CAAO,CACpC,MAAQA,CAAAA,GAAAA,CAAE,QACV,CAAA,OAAA,CAASA,IAAE,MAAO,EAAA,CAAE,KAAM,CAAA,QAAA,GAC1B,eAAiBA,CAAAA,GAAAA,CAAE,QAAS,CAAA,GAAA,GAAM,QAAS,EAAA,CAC3C,cAAeA,GAAE,CAAA,MAAA,GAAS,GAAI,EAAA,CAAE,UAClC,CAAC,EAGKyH,CAAN,CAAA,KAAgE,CAS9D,WAAYC,CAAAA,CAAAA,CAAkCV,EAAmC,CARjF,IAAA,CAAS,QAAU,IA5ErB,CAAA,IAAAW,CAqFI,CAAA,IAAMN,CAAgBG,CAAAA,CAAAA,CAAqB,MAAMR,CAAO,CAAA,CACxD,KAAK,WAAcU,CAAAA,CAAAA,CACnB,KAAK,MAASL,CAAAA,CAAAA,CAAc,OAC5B,IAAK,CAAA,OAAA,CAAUO,yBAAwBD,CAAAN,CAAAA,CAAAA,CAAc,UAAd,IAAAM,CAAAA,CAAAA,CAAyBrE,EAAO,OAAO,CAAA,CAC9E,KAAK,eAAkBsE,CAAAA,uBAAAA,CACrBP,EAAc,eACd,EAAA,CAAA,EAAG,KAAK,OAAO,CAAA,QAAA,EAAW,KAAK,WAAY,CAAA,IAAI,wBAAwB,IAAK,CAAA,MAAM,EACpF,CACA,CAAA,IAAA,CAAK,cAAgBO,uBACnBP,CAAAA,CAAAA,CAAc,eACd,CAAG,EAAA,IAAA,CAAK,OAAO,CAAA,QAAA,EAAW,IAAK,CAAA,WAAA,CAAY,IAAI,CAA8B,2BAAA,EAAA,IAAA,CAAK,MAAM,CAC1F,CAAA,EACF,CAEA,iBAA6B,EAAA,CAC3B,OAAO,IAAK,CAAA,OACd,CAEA,iBAAiC,EAAA,CAC/B,OAAO,CACL,cAAA,CAAgB,mBAChB,MAAQ,CAAA,YACV,CACF,CAEA,gBAAA,EAA+B,CAC7B,OAAO,EACT,CAGA,aAAA,CAAcQ,EAAyE,CACrF,OAAO,CAAE,WAAa,CAAA,CAAA,CAAA,CAAO,QAAS,CAAE,CAC1C,CAGA,aAAcC,CAAAA,CAAAA,CAAiC,CAC7C,OAAOA,CAAAA,CAAS,MAAO,CAAA,CAACR,CAAKS,CAAAA,CAAAA,GACpBT,EAAMS,CAAQ,CAAA,OAAA,CAAQ,IAAKC,CAAaA,EAAAA,CAAAA,CAAQ,WAAa,MAASA,CAAAA,CAAAA,CAAQ,MAAQ,EAAG,CAAA,CAAE,KAAK,GAAG,CAAA,CAAE,OAC3G,CAAC,CACN,CAGA,qBAAsBC,CAAAA,CAAAA,CAKpB,CACA,IAAMC,CAAAA,CAAc9E,GAAkB,SAAU6E,CAAAA,CAAO,EACvD,GAAI,CAACC,EAAY,OACf,CAAA,MAAM,IAAIC,wBAAyB,CAAA,CAAE,KAAM,uBAAyB,CAAA,KAAA,CAAOD,EAAY,KAAM,CAAC,EAGhG,IAAME,CAAAA,CAAgBF,CAAY,CAAA,IAAA,CAE5BG,CAAYD,CAAAA,CAAAA,CAAc,MAEhC,GAAIA,CAAAA,CAAc,oBAAsBA,CAAc,CAAA,iBAAA,CACpD,MAAM,IAAID,wBAAAA,CAAyB,CACjC,IAAM,CAAA,CAAA,mCAAA,EAAsC,KAAK,WAAY,CAAA,IAAI,IACjE,KAAO,CAAA,IAAI,MAAM,+EAA+E,CAClG,CAAC,CAGH,CAAA,GAAIC,EAAc,iBAAqBA,EAAAA,CAAAA,CAAc,iBACnD,MAAM,IAAID,yBAAyB,CACjC,IAAA,CAAM,sCAAsC,IAAK,CAAA,WAAA,CAAY,IAAI,CACjE,CAAA,CAAA,CAAA,KAAA,CAAO,IAAI,KAAM,CAAA,6EAA6E,CAChG,CAAC,CAAA,CAGH,GAAIC,CAAAA,CAAc,WAAeA,EAAAA,CAAAA,CAAc,WAC7C,MAAM,IAAID,yBAAyB,CACjC,IAAA,CAAM,sCAAsC,IAAK,CAAA,WAAA,CAAY,IAAI,CACjE,CAAA,CAAA,CAAA,KAAA,CAAO,IAAI,KAAM,CAAA,iEAAiE,CACpF,CAAC,CAAA,CAGH,IAAMG,CAAoBF,CAAAA,CAAAA,CAAc,oBAAsBA,CAAc,CAAA,iBAAA,CACtEG,EAAmBH,CAAc,CAAA,iBAAA,EAAqBA,EAAc,gBACpEI,CAAAA,CAAAA,CAAaJ,EAAc,WAAeA,EAAAA,CAAAA,CAAc,WAE9D,GAAII,CAAAA,GAAe,CAACJ,CAAc,CAAA,KAAA,EAASA,EAAc,KAAM,CAAA,qBAAA,CAAsB,SAAW,CAC9F,CAAA,CAAA,MAAM,IAAID,wBAAAA,CAAyB,CACjC,IAAA,CAAM,sCAAsC,IAAK,CAAA,WAAA,CAAY,IAAI,CACjE,CAAA,CAAA,CAAA,KAAA,CAAO,IAAI,KAAM,CAAA,sDAAsD,CACzE,CAAC,CAAA,CAGH,IAAMM,CAAsB,CAAA,GAExBD,CACEA,GAAAA,CAAAA,CAAW,wBAAwB,IAAS,GAAA,KAAA,EAC1CA,EAAW,uBAAwB,CAAA,sBAAA,EAA0BA,EAAW,uBAAwB,CAAA,sBAAA,CAAuB,SAAW,CACpIC,CAAAA,CAAAA,CAAQ,WAAaD,CAAW,CAAA,uBAAA,CAAwB,uBAAuB,CAAC,CAAA,CAKlFC,EAAQ,UAAaD,CAAAA,CAAAA,CAAW,wBAAwB,IAAK,CAAA,WAAA,IAIjEC,CAAQ,CAAA,IAAA,CAAOF,CAAA,EAAA,IAAA,CAAA,KAAA,CAAA,CAAAA,CAAkB,CAAA,IAAA,CACjCE,EAAQ,SAAYF,CAAAA,CAAAA,EAAA,YAAAA,CAAkB,CAAA,eAAA,CACtCE,EAAQ,WAAcF,CAAAA,CAAAA,EAAA,YAAAA,CAAkB,CAAA,WAAA,CACxCE,EAAQ,IAAOF,CAAAA,CAAAA,EAAA,YAAAA,CAAkB,CAAA,IAAA,CACjCE,EAAQ,eAAkBF,CAAAA,CAAAA,EAAA,YAAAA,CAAkB,CAAA,eAAA,CAC5CE,EAAQ,gBAAmBF,CAAAA,CAAAA,EAAA,YAAAA,CAAkB,CAAA,gBAAA,CAC7CE,EAAQ,IAAOF,CAAAA,CAAAA,EAAA,YAAAA,CAAkB,CAAA,aAAA,KAC3BG,CAASC,CAAAA,MAAAA,GAAS,KAAMC,CAAAA,sBAAAA,CAAuBH,CAAO,CAAC,CAAA,CAEvDX,CAA0B,CAAA,EAC1Be,CAEFP,CAAAA,EACFA,EAAkB,KAAM,CAAA,OAAA,CAASQ,GAAS,CACxChB,CAAAA,CAAS,KAAK,CAAE,IAAA,CAAM5G,kBAAmB,OAAS,CAAA,CAAC,CAAE,QAAUQ,CAAAA,mBAAAA,CAAqB,MAAOoH,CAAK,CAAA,IAAK,CAAC,CAAE,CAAC,EAC3G,CAAC,CAAA,CAGHV,EAAc,QAAS,CAAA,OAAA,CAASL,GAAY,CAC1C,IAAMgB,EAAOhB,CAAQ,CAAA,IAAA,CACrB,OAAQgB,CAAM,EACZ,IAAK,MAAQ,CAAA,CAEX,IAAMC,CADUjB,CAAAA,CAAAA,CAAQ,KACC,CAAA,GAAA,CAAKkB,CACxB,EAAA,MAAA,GAAUA,EACL,CAAE,QAAA,CAAUvH,oBAAqB,KAAOuH,CAAAA,CAAAA,CAAE,IAAK,CAE/C,CAAA,CACL,SAAUtH,oBACV,CAAA,MAAA,CAAQ,OACR,KAAO,CAAA,CACL,KAAMuH,6BACN,CAAA,MAAA,CAAQD,EAAE,WAAY,CAAA,IAAA,CACtB,WAAYA,CAAE,CAAA,WAAA,CAAY,UAAU,KAAM,CAAA,GAAG,EAAE,CAAC,CAClD,CACF,CAEH,CAAA,CACDnB,EAAS,IAAK,CAAA,CAAE,KAAMiB,CAAM,CAAA,OAAA,CAASC,CAAS,CAAC,EACjD,CACA,MAEA,IAAK,OAAS,CAAA,CAEZ,IAAMA,CAAAA,CADUjB,EAAQ,KACC,CAAA,GAAA,CAAI,CAACkB,CAAGE,CAAAA,CAAAA,GAC3B,SAAUF,CACL,CAAA,CAAE,SAAUvH,mBAAqB,CAAA,KAAA,CAAOuH,EAAE,IAAK,CAAA,CAE/C,CACL,QAAUrH,CAAAA,uBAAAA,CACV,GAAIuH,CAAM,CAAA,QAAA,GACV,KAAOA,CAAAA,CAAAA,CACP,KAAMF,CAAE,CAAA,aAAA,CAAc,KACtB,SAAW,CAAA,IAAA,CAAK,UAAUA,CAAE,CAAA,aAAA,CAAc,IAAI,CAChD,CAEH,EACDnB,CAAS,CAAA,IAAA,CAAK,CAAE,IAAM1G,CAAAA,oBAAAA,CAAsB,QAAS4H,CAAS,CAAC,EACjE,CACA,MAEA,IAAK,WAAY,CAEf,IAAMA,EADUjB,CAAQ,CAAA,KAAA,CACC,IAAI,CAACkB,CAAAA,CAAGE,KACxB,CACL,QAAA,CAAUtH,4BACV,EAAIsH,CAAAA,CAAAA,CAAM,UACV,CAAA,KAAA,CAAOA,EACP,IAAMF,CAAAA,CAAAA,CAAE,kBAAkB,IAC1B,CAAA,IAAA,CAAM,KAAK,SAAUA,CAAAA,CAAAA,CAAE,kBAAkB,QAAQ,CACnD,EACD,CACDnB,CAAAA,CAAAA,CAAS,KAAK,CAAE,IAAA,CAAMzG,gBAAiB,OAAS2H,CAAAA,CAAS,CAAC,EAC5D,CACA,MAEA,QACE,MAAM,IAAII,oBAAAA,CAAqB,CAC7B,IAAA,CAAM,sCAAsC,IAAK,CAAA,WAAA,CAAY,IAAI,CACjE,CAAA,CAAA,KAAA,CAAO,IAAI,KAAM,CAAA,CAAA,QAAA,EAAWrB,EAAQ,IAAI,CAAA,+BAAA,EAAkC,KAAK,WAAY,CAAA,IAAI,EAAE,CACnG,CAAC,CAEL,CACF,CAAC,EAED,IAAMsB,CAAAA,CAAoB,EAC1B,CAAA,OAAIjB,EAAc,KAChBA,EAAAA,CAAAA,CAAc,MAAM,qBAAsB,CAAA,OAAA,CAASkB,GAA6B,CAC9ED,CAAAA,CAAM,KAAK,CACT,IAAA,CAAM,WACN,UAAY,CAAA,CACV,OAAQ,CACN,IAAA,CAAMC,CAAK,CAAA,IAAA,CACX,WAAaA,CAAAA,CAAAA,CAAK,YAClB,UAAYA,CAAAA,CAAAA,CAAK,UACnB,CACF,CACF,CAAC,EACH,CAAC,EAGI,CACL,SAAA,CAAAjB,EACA,MAAAK,CAAAA,CAAAA,CACA,SAAAZ,CACA,CAAA,KAAA,CAAOuB,EAAM,MAAS,CAAA,CAAA,CAAIA,EAAQ,KACpC,CAAA,CACF,CAGA,eAAgBX,CAAAA,CAAAA,CAAoBZ,EAA0BuB,CAAgC,CAAA,CAC5F,IAAME,CAAcb,CAAAA,CAAAA,CAAO,WAC3B,OAAOA,CAAAA,CAAO,WAEd,IAAMc,CAAAA,CAAgB,KAAK,WAAY,CAAA,MAAA,CAAO,OAAO,SAAUd,CAAAA,CAAM,CACrE,CAAA,GAAI,CAACc,CAAAA,CAAc,QACjB,MAAM,IAAIC,mBAAmB,CAC3B,IAAA,CAAM,+BAA+B,IAAK,CAAA,WAAA,CAAY,IAAI,CAC1D,CAAA,CAAA,CAAA,KAAA,CAAOD,EAAc,KACvB,CAAC,EAGH,IAAME,CAAAA,CAAeF,EAAc,IAEnC,CAAA,MAAA,CAAO,IAAKE,CAAAA,CAAY,CAAE,CAAA,OAAA,CAASnC,GAAQ,CACzC,GAAI,EAAEA,CAAO,IAAA,IAAA,CAAK,YAAY,MAAO,CAAA,GAAA,CAAA,CACnC,MAAM,IAAIkC,kBAAAA,CAAmB,CAC3B,IAAM,CAAA,CAAA,4BAAA,EAA+B,KAAK,WAAY,CAAA,IAAI,IAC1D,KAAO,CAAA,IAAI,KAAM,CAAA,CAAA,sBAAA,EAAyBlC,CAAG,CAAA;AAAA,8BACvB,EAAA,MAAA,CAAO,KAAK,IAAK,CAAA,WAAA,CAAY,OAAO,GAAG,CAAA,CAAE,KAAK,IAAI,CAAC,GAAG,CAC9E,CAAC,CAEL,CAAC,CAAA,CAED,IAAMoC,CAAoB,CAAA,MAAA,CAAO,KAAKD,CAAY,CAAA,CAAE,OAClD,CAACpC,CAAAA,CAAKC,IAAQ,CACZ,IAAMqC,EAAM,IAAK,CAAA,WAAA,CAAY,OAAO,GAAIrC,CAAAA,CAAG,EACrCsC,CAAWD,CAAAA,CAAAA,CAAI,MACfE,CAAcJ,CAAAA,CAAAA,CAA4BnC,CAAG,CAEnD,CAAA,OAAIsC,IAAa,iBAAqBD,EAAAA,CAAAA,CAAI,OAAS,OAAWE,EAAAA,CAAAA,GAAe,EAC3ExC,CAAIuC,CAAAA,CAAQ,EAAID,CAAI,CAAA,GAAA,CAEpBtC,EAAIuC,CAAQ,CAAA,CAAIC,EAGXxC,CACT,CAAA,CACA,EACF,CAAA,CAEMxH,EAAiB6J,CAAkB,CAAA,cAAA,CACzC,OAAOA,CAAkB,CAAA,cAAA,CAEzB,IAAInB,CACJ,CAAA,GAAIe,IAAgB,KAAW,CAAA,CAAA,CAC7B,IAAM3J,CAAa2J,CAAAA,CAAAA,CACnB,GAAI,CAACF,CAAAA,EAAUA,GAASA,CAAM,CAAA,MAAA,GAAW,EACvC,MAAM,IAAII,mBAAmB,CAC3B,IAAA,CAAM,+BAA+B,IAAK,CAAA,WAAA,CAAY,IAAI,CAC1D,CAAA,CAAA,CAAA,KAAA,CAAO,IAAI,KAAM,CAAA,qDAAqD,CACxE,CAAC,CAAA,CACI,GAAIJ,CAASA,EAAAA,CAAAA,CAAM,OAAS,CAAG,CAAA,CACpC,IAAMU,CAAmB,CAAA,IAAA,CAAK,YAAY,MAAO,CAAA,GAAA,CAAI,WACrD,GAAKA,CAAAA,CAAiB,QAAQ,QAASnK,CAAAA,CAAU,EAmB3CA,CAAe,GAAA,KAAA,CACjB4I,EAAa,CACX,uBAAA,CAAyB,CACvB,IAAM,CAAA,KAAA,CACN,uBAAwBa,CAAM,CAAA,GAAA,CAAKC,GAASA,CAAK,CAAA,UAAA,CAAW,OAAO,IAAI,CACzE,CACF,CAEAd,CAAAA,CAAAA,CAAa,CACX,uBAAyB,CAAA,CACvB,KAAM5I,CAAW,CAAA,WAAA,EACnB,CACF,CAAA,CAAA,KAAA,GA7BEyJ,EAAM,GAAKC,CAAAA,CAAAA,EAASA,EAAK,UAAW,CAAA,MAAA,CAAO,IAAI,CAAE,CAAA,QAAA,CAAS1J,CAAU,CAEtE4I,CAAAA,CAAAA,CAAa,CACX,uBAAyB,CAAA,CACvB,KAAM,KACN,CAAA,sBAAA,CAAwB,CAAC5I,CAAU,CACrC,CACF,CAEA,CAAA,KAAA,MAAM,IAAI6J,kBAAmB,CAAA,CAC3B,KAAM,CAA+B,4BAAA,EAAA,IAAA,CAAK,YAAY,IAAI,CAAA,CAAA,CAAA,CAC1D,MAAO,IAAI,KAAA,CAAM,iBAAiB7J,CAAU,CAAA;AAAA,wBAChCmK,EAAAA,CAAAA,CAAiB,OAAQ,CAAA,IAAA,CAAK,IAAI,CAAC,CAAG,CAAA,CAAA,CACpD,CAAC,CAmBP,CACF,CAsBA,OAAO3C,CAAAA,CAAAA,CAAAA,CAAA,CACL,iBAAmBuC,CAAAA,CAAAA,CAAAA,CACfnB,CAAa,CAAA,CAAE,WAAaA,CAAAA,CAAW,CAAI,CAAA,EAC3C1I,CAAAA,CAAAA,CAAAA,CAAiB,CAAE,eAAA,CAAiBA,CAAe,CAAA,CAAI,EAE/D,CAAA,CAEA,iBAAkBgI,CAAAA,CAAAA,CAAqC,CACrD,GAAI,CAACA,CAAAA,EAAaA,CAAYA,EAAAA,CAAAA,CAAS,MAAW,GAAA,CAAA,CAChD,OAAO,CAAE,SAAU,EAAG,CAGxB,CAAA,IAAMkC,CAAiBlC,CAAAA,CAAAA,CAAS,GAAKC,CAAAA,CAAAA,EAAY,CAC/C,IAAMkC,CAAgBC,CAAAA,OAAAA,EAAU,CAAA,SAAA,CAAUnC,CAAO,CACjD,CAAA,GAAI,CAACkC,CAAAA,CAAc,OACjB,CAAA,MAAM,IAAIb,oBAAAA,CAAqB,CAAE,IAAA,CAAM,kBAAoB,CAAA,KAAA,CAAOa,CAAc,CAAA,KAAM,CAAC,CAEzF,CAAA,OAAOA,CAAc,CAAA,IACvB,CAAC,CAAA,CAEDD,CAAe,CAAA,OAAA,CAASjC,CAAY,EAAA,CAClCA,CAAQ,CAAA,OAAA,CAAQ,OAASC,CAAAA,CAAAA,EAAY,CACnC,GAAI,CAAC,IAAK,CAAA,WAAA,CAAY,UAAW,CAAA,QAAA,CAASA,CAAQ,CAAA,QAAQ,CACxD,CAAA,MAAM,IAAIoB,oBAAAA,CAAqB,CAC7B,IAAA,CAAM,CAAwC,qCAAA,EAAA,IAAA,CAAK,WAAY,CAAA,IAAI,CACnE,CAAA,CAAA,CAAA,KAAA,CAAO,IAAI,KAAA,CAAM,CAAY,SAAA,EAAA,IAAA,CAAK,WAAY,CAAA,IAAI,CAAkCpB,+BAAAA,EAAAA,CAAAA,CAAQ,QAAQ,CAAA;AAAA,sCACxE,EAAA,IAAA,CAAK,YAAY,UAAW,CAAA,IAAA,CAAK,IAAI,CAAC,CAAA,CAAA,CAAG,CACvE,CAAC,CAEL,CAAC,EACH,CAAC,CAAA,CAEDgC,EAAe,OAASjC,CAAAA,CAAAA,EAAY,CAClC,GAAI,CAAC,MAAO,CAAA,IAAA,CAAK,IAAK,CAAA,WAAA,CAAY,KAAK,CAAE,CAAA,QAAA,CAASA,EAAQ,IAAI,CAAA,CAC5D,MAAM,IAAIqB,oBAAAA,CAAqB,CAC7B,IAAA,CAAM,CAAwC,qCAAA,EAAA,IAAA,CAAK,YAAY,IAAI,CAAA,CAAA,CAAA,CACnE,KAAO,CAAA,IAAI,KAAM,CAAA,CAAA,SAAA,EAAY,KAAK,WAAY,CAAA,IAAI,CAA8BrB,2BAAAA,EAAAA,CAAAA,CAAQ,IAAI,CAAA;AAAA,+BAAA,EACrE,OAAO,IAAK,CAAA,IAAA,CAAK,YAAY,KAAK,CAAA,CAAE,KAAK,IAAI,CAAC,GAAG,CAC1E,CAAC,CAEL,CAAC,CAAA,CAED,IAAIO,CAAqD,CAAA,CAAE,MAAO,EAAG,EAC/D6B,CAA6C,CAAA,GAgHnD,GA9GAH,CAAAA,CAAe,QAASjC,CAAY,EAAA,CAClC,OAAQA,CAAQ,CAAA,IAAA,EACd,KAAK7G,iBAAAA,CACH6G,EAAQ,OAAQ,CAAA,OAAA,CAASC,GAAY,CACnC,GAAIA,EAAQ,QAAatG,GAAAA,mBAAAA,CACvB4G,CAAkB,CAAA,KAAA,CAAM,KAAK,CAAE,IAAA,CAAMN,EAAQ,KAAM,CAAC,OAE9C,MAAA,IAAIoB,qBAAqB,CAC7B,IAAA,CAAM,iEAAiE,IAAK,CAAA,WAAA,CAAY,IAAI,CAC5F,CAAA,CAAA,KAAA,CAAO,IAAI,KAAM,CAAA,CAAA,QAAA,EAAWrB,EAAQ,IAAI,CAAA,uCAAA,EAA0CC,EAAQ,QAAQ,CAAA,CAAA,CAAG,CACvG,CAAC,CAEL,CAAC,CAEH,CAAA,MAEA,KAAK5G,oBAAsB,CAAA,CACzB,IAAMgJ,CAAmD,CAAA,GACzDrC,CAAQ,CAAA,OAAA,CAAQ,QAASC,CAAY,EAAA,CACnC,GAAIA,CAAAA,CAAQ,WAAatG,mBACvB0I,CAAAA,CAAAA,CAAiB,KAAK,CAAE,IAAA,CAAMpC,EAAQ,KAAM,CAAC,UACpCA,CAAQ,CAAA,QAAA,GAAapG,wBAC9BwI,CAAiB,CAAA,IAAA,CAAK,CACpB,aAAe,CAAA,CACb,KAAMpC,CAAQ,CAAA,IAAA,CACd,KAAM,IAAK,CAAA,KAAA,CAAMA,EAAQ,SAAS,CACpC,CACF,CAAC,CAAA,CAAA,WAEK,IAAIoB,oBAAAA,CAAqB,CAC7B,IAAM,CAAA,CAAA,8DAAA,EAAiE,KAAK,WAAY,CAAA,IAAI,GAC5F,KAAO,CAAA,IAAI,MAAM,CAAWrB,QAAAA,EAAAA,CAAAA,CAAQ,IAAI,CAAA,uCAAA,EAA0CC,EAAQ,QAAQ,CAAA,CAAA,CAAG,CACvG,CAAC,CAEL,CAAC,CACDmC,CAAAA,CAAAA,CAAkB,KAAK,CACrB,IAAA,CAAM,KAAK,WAAY,CAAA,KAAA,CAAMpC,EAAQ,IAAI,CAAA,CACzC,MAAOqC,CACT,CAAC,EACH,CACA,MAEA,KAAKjJ,eAAiB,CAAA,CACpB,IAAMkJ,CAA8C,CAAA,GACpDtC,CAAQ,CAAA,OAAA,CAAQ,QAASC,CAAY,EAAA,CACnC,GAAIA,CAAQ,CAAA,QAAA,GAAatG,oBACvB2I,CAAY,CAAA,IAAA,CAAK,CAAE,IAAMrC,CAAAA,CAAAA,CAAQ,KAAM,CAAC,UAC/BA,CAAQ,CAAA,QAAA,GAAarG,sBAC9B,GAAIqG,CAAAA,CAAQ,MAAM,IAAS,GAAA,QAAA,CACzBqC,EAAY,IAAK,CAAA,CACf,YAAa,CACX,SAAA,CAAWrC,EAAQ,KAAM,CAAA,UAAA,CACzB,KAAMA,CAAQ,CAAA,KAAA,CAAM,MACtB,CACF,CAAC,UACQA,CAAQ,CAAA,KAAA,CAAM,OAAS,KAEhC,CAAA,MAAM,IAAIoB,oBAAqB,CAAA,CAC7B,KAAM,CAA0C,uCAAA,EAAA,IAAA,CAAK,YAAY,IAAI,CAAA,CAAA,CACrE,MAAO,IAAI,KAAA,CAAM,WAAW,IAAK,CAAA,WAAA,CAAY,IAAI,CAAA,wCAAA,EAA2CpB,EAAQ,KAAM,CAAA,IAAI,GAAG,CACnH,CAAC,OAGG,MAAA,IAAIoB,qBAAqB,CAC7B,IAAA,CAAM,iEAAiE,IAAK,CAAA,WAAA,CAAY,IAAI,CAC5F,CAAA,CAAA,KAAA,CAAO,IAAI,KAAM,CAAA,CAAA,QAAA,EAAWrB,EAAQ,IAAI,CAAA,uCAAA,EAA0CC,EAAQ,QAAQ,CAAA,CAAA,CAAG,CACvG,CAAC,CAEL,CAAC,CACDmC,CAAAA,CAAAA,CAAkB,KAAK,CACrB,IAAA,CAAM,KAAK,WAAY,CAAA,KAAA,CAAMpC,EAAQ,IAAI,CAAA,CACzC,MAAOsC,CACT,CAAC,EACH,CACA,MAEA,KAAKhJ,eAAAA,CAAiB,CACpB,IAAMiJ,CAAAA,CAAmE,EACzEvC,CAAAA,CAAAA,CAAQ,QAAQ,OAASC,CAAAA,CAAAA,EAAY,CACnC,GAAIA,CAAAA,CAAQ,WAAanG,2BACvByI,CAAAA,CAAAA,CAAoB,KAAK,CACvB,iBAAA,CAAmB,CACjB,IAAMtC,CAAAA,CAAAA,CAAQ,KACd,QAAU,CAAA,IAAA,CAAK,MAAMA,CAAQ,CAAA,IAAI,CACnC,CACF,CAAC,OAEK,MAAA,IAAIoB,qBAAqB,CAC7B,IAAA,CAAM,iEAAiE,IAAK,CAAA,WAAA,CAAY,IAAI,CAC5F,CAAA,CAAA,KAAA,CAAO,IAAI,KAAM,CAAA,CAAA,QAAA,EAAWrB,EAAQ,IAAI,CAAA,uCAAA,EAA0CC,EAAQ,QAAQ,CAAA,CAAA,CAAG,CACvG,CAAC,CAEL,CAAC,CACDmC,CAAAA,CAAAA,CAAkB,KAAK,CACrB,IAAA,CAAM,KAAK,WAAY,CAAA,KAAA,CAAMpC,EAAQ,IAAI,CAAA,CACzC,MAAOuC,CACT,CAAC,EACH,CACA,MAEA,QACE,MAAM,IAAIlB,qBAAqB,CAC7B,IAAA,CAAM,sCAAsC,IAAK,CAAA,WAAA,CAAY,IAAI,CACjE,CAAA,CAAA,KAAA,CAAO,IAAI,KAAM,CAAA,CAAA,QAAA,EAAWrB,EAAQ,IAAI,CAAA;AAAA,iCAAA,EACjB,MAAO,CAAA,IAAA,CAAK,IAAK,CAAA,WAAA,CAAY,KAAK,CAAA,CAAE,IAAK,CAAA,IAAI,CAAC,CAAA,CAAA,CAAG,CAC1E,CAAC,CAEL,CACF,CAAC,CAEGoC,CAAAA,CAAAA,CAAkB,CAAC,CAAA,CAAE,IAAS,GAAA,IAAA,CAAK,YAAY,KAAMhJ,CAAAA,eAAe,CACtE,CAAA,MAAM,IAAIiI,oBAAAA,CAAqB,CAC7B,IAAA,CAAM,sCAAsC,IAAK,CAAA,WAAA,CAAY,IAAI,CAAA,CAAA,CACjE,KAAO,CAAA,IAAI,KAAM,CAAA,CAAA,SAAA,EAAY,IAAK,CAAA,WAAA,CAAY,IAAI,CAAA,wCAAA,CAA0C,CAC9F,CAAC,CAGH,CAAA,IAAMmB,EAAwBxB,CACxBA,EAAAA,CAAAA,GAAS,IAAK,CAAA,WAAA,CAAY,KAAM5H,CAAAA,eAAe,CAAK4H,EAAAA,CAAAA,GAAS,KAAK,WAAY,CAAA,KAAA,CAAM1H,eAAe,CAAA,CAC9F,CAAC,IAAA,CAAK,WAAY,CAAA,KAAA,CAAMD,oBAAoB,CAAW,CAAA,CAEzD,CAAC,IAAA,CAAK,WAAY,CAAA,KAAA,CAAMD,eAAe,CAAA,CAAa,IAAK,CAAA,WAAA,CAAY,KAAME,CAAAA,eAAe,CAAW,CAAA,CAG9G,IAASmJ,IAAAA,CAAAA,CAAI,EAAGA,CAAIL,CAAAA,CAAAA,CAAkB,MAAQK,CAAAA,CAAAA,EAAAA,CAC5C,GAAI,CAACD,CAAqBJ,CAAAA,CAAAA,CAAkBK,EAAI,CAAC,CAAA,CAAE,IAAI,CAAA,CAAE,QAASL,CAAAA,CAAAA,CAAkBK,CAAC,CAAA,CAAE,IAAI,CACzF,CAAA,MAAM,IAAIpB,oBAAAA,CAAqB,CAC7B,IAAA,CAAM,CAAsC,mCAAA,EAAA,IAAA,CAAK,YAAY,IAAI,CAAA,CAAA,CACjE,KAAO,CAAA,IAAI,KAAM,CAAA,CAAA,SAAA,EAAY,IAAK,CAAA,WAAA,CAAY,IAAI,CAAsCe,mCAAAA,EAAAA,CAAAA,CAAkBK,CAAC,CAAA,CAAE,IAAI,CAAA,6BAAA,EAAgCL,CAAkBK,CAAAA,CAAAA,CAAI,CAAC,CAAE,CAAA,IAAI,CAAG,CAAA,CAAA,CACnL,CAAC,CAAA,CAIL,GAAIL,CAAAA,CAAkBA,EAAkB,MAAS,CAAA,CAAC,CAAE,CAAA,IAAA,GAAS,IAAK,CAAA,WAAA,CAAY,KAAMhJ,CAAAA,eAAe,CACjG,CAAA,MAAM,IAAIiI,oBAAAA,CAAqB,CAC7B,IAAA,CAAM,CAAsC,mCAAA,EAAA,IAAA,CAAK,YAAY,IAAI,CAAA,CAAA,CACjE,KAAO,CAAA,IAAI,KAAM,CAAA,CAAA,SAAA,EAAY,IAAK,CAAA,WAAA,CAAY,IAAI,CAAyC,uCAAA,CAAA,CAC7F,CAAC,CAAA,CAGH,OAAOhC,CAAAA,CAAA,CACL,QAAA,CAAU+C,GACN7B,CAAkB,CAAA,KAAA,CAAM,MAAS,CAAA,CAAA,CAAI,CAAE,kBAAA,CAAoBA,CAAkB,CAAA,CAAI,EAAC,CAE1F,CAEA,cAAA,CAAee,CAA+B,CAAA,CAC5C,GAAI,CAAC,KAAK,WAAY,CAAA,UAAA,CAAW,QAASzH,CAAAA,uBAAuB,CAC/D,CAAA,MAAM,IAAI6I,iBAAAA,CAAkB,CAC1B,IAAM,CAAA,CAAA,oCAAA,EAAuC,IAAK,CAAA,WAAA,CAAY,IAAI,CAAA,CAAA,CAClE,KAAO,CAAA,IAAI,MAAM,CAAY,SAAA,EAAA,IAAA,CAAK,WAAY,CAAA,IAAI,CAAuC7I,oCAAAA,EAAAA,uBAAuB,CAAG,CAAA,CAAA,CACrH,CAAC,CAAA,CAGH,OAAI,CAACyH,CAAUA,EAAAA,CAAAA,EAASA,CAAM,CAAA,MAAA,GAAW,EAChC,CAAE,KAAA,CAAO,EAAiB,CAmB5B,CAAA,CAAE,KAhBWA,CAAAA,CAAAA,CAAM,IAAKC,CAAS,EAAA,CACtC,IAAMoB,CAAAA,CAAaC,IAAK,EAAA,CAAE,SAAUrB,CAAAA,CAAI,EACxC,GAAI,CAACoB,CAAW,CAAA,OAAA,CACd,MAAM,IAAID,iBAAkB,CAAA,CAAE,KAAM,eAAiB,CAAA,KAAA,CAAOC,CAAW,CAAA,KAAM,CAAC,CAAA,CAEhF,OAAOA,CAAAA,CAAW,IACpB,CAAC,CAAA,CAEoC,GAAKpB,CAAAA,CAAAA,GAAU,CAClD,qBAAA,CAAuB,CAAC,CACtB,KAAMA,CAAK,CAAA,UAAA,CAAW,MAAO,CAAA,IAAA,CAC7B,WAAaA,CAAAA,CAAAA,CAAK,UAAW,CAAA,MAAA,CAAO,YACpC,UAAYA,CAAAA,CAAAA,CAAK,UAAW,CAAA,MAAA,CAAO,UACrC,CAAC,CACH,CAAA,CAAE,CAE+B,CACnC,CAGM,kBAAA,CAAmBZ,CAAqBZ,CAAAA,CAAAA,CAA0BuB,CAAsC,CAAA,CAAA,OAAAuB,EAAA,IAC5G,CAAA,IAAA,CAAA,WAAA,CAAA,OAAO,IAAI,OAAA,CAASC,CAAY,EAAA,CAC9BA,CAAQ,CAAA,IAAA,CAAK,eAAe,EAC9B,CAAC,CACH,CAAA,CAAA,CAGM,sBAAuBnC,CAAAA,CAAAA,CAAqBZ,CAA0BuB,CAAAA,CAAAA,CAA0C,QAAAuB,CAAA,CAAA,IAAA,CAAA,IAAA,CAAA,WAAA,CACpH,OAAO,IAAI,OAASC,CAAAA,CAAAA,EAAY,CAC9BA,CAAAA,CAAQ,IAAK,CAAA,iBAAA,EAAmB,EAClC,CAAC,CACH,CAEM,CAAA,CAAA,mBAAA,CAAoBnC,EAAoBZ,CAAyBuB,CAAAA,CAAAA,CAAyC,CAAAuB,OAAAA,CAAAA,CAAA,IAC9G,CAAA,IAAA,CAAA,WAAA,CAAA,IAAMjB,CAAoB,CAAA,IAAA,CAAK,gBAAgBjB,CAAQZ,CAAAA,CAAAA,CAAUuB,CAAK,CAAA,CAChEyB,CAAsB,CAAA,IAAA,CAAK,iBAAkBhD,CAAAA,CAAQ,EAC3D,GAAIgD,CAAAA,CAAoB,QAAaA,EAAAA,CAAAA,CAAoB,QAA2B,CAAA,MAAA,GAAW,CAC7F,CAAA,MAAM,IAAI1B,oBAAAA,CAAqB,CAC7B,IAAA,CAAM,uBACN,CAAA,KAAA,CAAO,IAAI,KAAA,CAAM,uBAAuB,CAC1C,CAAC,CAGH,CAAA,IAAM2B,CAAmB1B,CAAAA,CAAAA,CAAQ,IAAK,CAAA,cAAA,CAAeA,CAAK,CAAI,CAAA,EAE9D,CAAA,OAAO,IAAI,OAAA,CAASwB,CAAY,EAAA,CAC9BA,EAAQzD,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,CAAA,EAAA,CACH,IAAK,CAAA,gBAAA,EACLuC,CAAAA,CAAAA,CAAAA,CAAAA,CACAmB,CACAC,CAAAA,CAAAA,CAAAA,CACJ,EACH,CAAC,CACH,CAEA,CAAA,CAAA,6BAAA,CAA8BC,CAAiC,CAAA,CAC7D,IAAMC,CAAAA,CAAO1I,GAA2B,SAAUyI,CAAAA,CAAQ,CAC1D,CAAA,GAAIC,CAAK,CAAA,OAAA,CAAS,CAChB,GAAIA,EAAK,IAAK,CAAA,UAAA,CAAW,MAAW,GAAA,CAAA,CAClC,MAAM,IAAIC,kBAAmB,CAAA,CAC3B,KAAM,6BACN,CAAA,KAAA,CAAO,IAAI,KAAA,CAAM,CAA4B,yBAAA,EAAA,IAAA,CAAK,SAAUD,CAAAA,CAAAA,CAAK,IAAI,CAAC,CAAE,CAAA,CAC1E,CAAC,CAAA,CAGH,IAAME,CAAAA,CAAiDF,EAAK,IACtDnD,CAAAA,CAAAA,CAA0B,EAAC,CAC7BsD,CACEpC,CAAAA,CAAAA,CAAWmC,CAAe,CAAA,UAAA,CAAW,CAAC,CAAE,CAAA,OAAA,CAC9C,GAAInC,CAAAA,CAAU,CACZ,IAAMhB,CAAUgB,CAAAA,CAAAA,CAAS,MAAM,GAAI,CAAA,CAACqC,CAAkBlC,CAAAA,CAAAA,GAAe,CACnE,GAAI,MAAUkC,GAAAA,CAAAA,EAAeA,EAAY,IAAS,GAAA,KAAA,CAAA,CAChD,OAAOC,iBAAAA,CAAkBD,CAAY,CAAA,IAAI,CACpC,CAAA,GAAI,iBAAkBA,CAAeA,EAAAA,CAAAA,CAAY,YAAiB,GAAA,KAAA,CAAA,CACvE,OAAOE,qBAAAA,CACLpC,CACA,CAAA,CAAA,EAAGkC,EAAY,YAAa,CAAA,IAAI,CAAIlC,CAAAA,EAAAA,CAAK,CACzCkC,CAAAA,CAAAA,CAAAA,CAAY,YAAa,CAAA,IAAA,CACzB,KAAK,SAAUA,CAAAA,CAAAA,CAAY,YAAa,CAAA,IAAI,CAC9C,CAEJ,CAAC,CAAA,CAED,OAAAvD,CAAAA,CAAS,IAAK,CAAA,CACZ,IAAM1G,CAAAA,oBAAAA,CACN,OAAS4G,CAAAA,CACX,CAAC,CAEGmD,CAAAA,CAAAA,CAAe,aACjBC,GAAAA,CAAAA,CAAQ,CACN,YAAA,CAAcD,CAAe,CAAA,aAAA,CAAc,iBAC3C,WAAaA,CAAAA,CAAAA,CAAe,aAAc,CAAA,eAAA,CAC1C,gBAAkBA,CAAAA,CAAAA,CAAe,aAAc,CAAA,oBAAA,EAAwB,CACzE,CAGK,CAAA,CAAA,CACL,QAAUrD,CAAAA,CAAAA,CACV,KAAOsD,CAAAA,CAAAA,CACP,QAAU,CAAA,KAAA,CACZ,CACF,CAEA,IAAMI,CAAgBL,CAAAA,CAAAA,CAAe,UAAW,CAAA,CAAC,CAAE,CAAA,aAAA,CAanD,GAZIK,CAAiBA,EAAAA,CAAAA,CAAc,MAAS,CAAA,CAAA,EAC1CA,CAAc,CAAA,OAAA,CAASC,CAAW,EAAA,CAChC,GAAIA,CAAO,CAAA,OAAA,CACT,MAAM,IAAIP,kBAAmB,CAAA,CAC3B,IAAM,CAAA,CAAA,8BAAA,EAAiCO,EAAO,QAAQ,CAAA,mBAAA,EAAsBA,CAAO,CAAA,WAAW,CAC9F,CAAA,CAAA,KAAA,CAAO,IAAI,KAAA,CAAM,CAAiCA,8BAAAA,EAAAA,CAAAA,CAAO,QAAQ,CAAA,mBAAA,EAAsBA,CAAO,CAAA,WAAW,CAAE,CAAA,CAC7G,CAAC,CAEL,CAAC,CAGkBN,CAAAA,CAAAA,CAAe,UAAW,CAAA,CAAC,CAAE,CAAA,YAAA,GAC7B,SACnB,MAAM,IAAID,kBAAmB,CAAA,CAC3B,IAAM,CAAA,6DAAA,CACN,KAAO,CAAA,IAAI,MAAM,6DAA6D,CAChF,CAAC,CAEL,CAEA,MAAM,IAAIA,kBAAAA,CAAmB,CAAE,IAAA,CAAM,6BAA+B,CAAA,KAAA,CAAOD,CAAK,CAAA,KAAM,CAAC,CACzF,CAGM,gBAAiBvC,CAAAA,CAAAA,CAAqBZ,CAA0BuB,CAAAA,CAAAA,CAAsC,CAAAuB,OAAAA,CAAAA,CAAA,IAC1G,CAAA,IAAA,CAAA,WAAA,CAAA,OAAO,IAAI,OAASC,CAAAA,CAAAA,EAAY,CAC9BA,CAAAA,CAAQ,IAAK,CAAA,aAAa,EAC5B,CAAC,CACH,CAGM,CAAA,CAAA,oBAAA,CAAqBnC,CAAqBZ,CAAAA,CAAAA,CAA0BuB,CAA0C,CAAA,CAAA,OAAAuB,CAAA,CAAA,IAAA,CAAA,IAAA,CAAA,WAAA,CAClH,OAAO,IAAI,OAASC,CAAAA,CAAAA,EAAY,CAC9BA,CAAAA,CAAQ,IAAK,CAAA,iBAAA,EAAmB,EAClC,CAAC,CACH,CAAA,CAAA,CAEM,iBAAkBnC,CAAAA,CAAAA,CAAoBZ,CAAyBuB,CAAAA,CAAAA,CAAyC,QAAAuB,CAAA,CAAA,IAAA,CAAA,IAAA,CAAA,WAAA,CAC5G,IAAMjB,CAAAA,CAAoB,IAAK,CAAA,eAAA,CAAgBjB,CAAQZ,CAAAA,CAAAA,CAAUuB,CAAK,CAChEyB,CAAAA,CAAAA,CAAsB,IAAK,CAAA,iBAAA,CAAkBhD,CAAQ,CAAA,CAC3D,GAAIgD,CAAAA,CAAoB,UAAaA,CAAoB,CAAA,QAAA,CAA2B,MAAW,GAAA,CAAA,CAC7F,MAAM,IAAI1B,oBAAqB,CAAA,CAC7B,KAAM,uBACN,CAAA,KAAA,CAAO,IAAI,KAAA,CAAM,uBAAuB,CAC1C,CAAC,CAAA,CAGH,IAAM2B,CAAmB1B,CAAAA,CAAAA,CAAQ,IAAK,CAAA,cAAA,CAAeA,CAAK,CAAA,CAAI,EAAC,CAE/D,OAAO,IAAI,OAAA,CAASwB,CAAY,EAAA,CAC9BA,CAAQzD,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAA,EACH,CAAA,IAAA,CAAK,gBAAiB,EAAA,CAAA,CACtBuC,CACAmB,CAAAA,CAAAA,CAAAA,CAAAA,CACAC,CACJ,CAAA,EACH,CAAC,CACH,GAEO,gCACLW,CAAAA,CAAAA,CACAC,CAC8E,CAAA,CAAA,OAAAC,EAAA,CAAA,IAAA,CAAA,IAAA,CAAA,WAAA,CAE9E,IAAMC,CAAAA,CAAAA,CAASF,EAASD,CAAO,EAAA,KAAA,CAAM,KAAK,CAAA,CAAE,MAAQI,CAAAA,CAAAA,EAASA,CAAK,CAAA,IAAA,KAAW,EAAE,CAAA,CAC/E,IAAWA,IAAAA,CAAAA,IAAQD,CAAO,CAAA,CACxB,IAAIE,CAAAA,CAAeD,EAGnB,GADAC,CAAAA,CAAeA,CAAa,CAAA,OAAA,CAAQ,KAAO,CAAA,EAAE,CACzCA,CAAAA,CAAAA,CAAa,WAAW,GAAG,CAAA,CAE7BA,CAAeA,CAAAA,CAAAA,CAAa,KAAM,CAAA,CAAC,CAC1BA,CAAAA,KAAAA,GAAAA,CAAAA,CAAa,SAAS,GAAG,CAAA,CAAG,CACrC,GAAIA,CAAiB,GAAA,GAAA,CAEnB,OAGAA,CAAAA,CAAeA,EAAa,KAAM,CAAA,CAAA,CAAG,CAAE,CAAA,EAE3C,CAEA,IAAIC,CACJ,CAAA,GAAI,CACFA,CAAAA,CAAiB,IAAK,CAAA,KAAA,CAAMD,CAAY,EAC1C,CAASE,MAAAA,CAAAA,CAAO,CAEd,GAAIA,CAAAA,YAAiB,WAAa,CAAA,CAChCN,CAASI,CAAAA,CAAAA,CACT,QACF,CAAA,WAEQE,CAEV,CAGAN,CAAS,CAAA,EAAA,CACT,IAAMV,CAAAA,CAAOvI,EAAyB,CAAA,SAAA,CAAUsJ,CAAc,CAC9D,CAAA,GAAIf,CAAK,CAAA,OAAA,CAAS,CAChB,IAAMiB,CAA2C,CAAA,CAAE,gBAAiB,EAAG,CACjEf,CAAAA,CAAAA,CAA+CF,CAAK,CAAA,IAAA,CAC1D,GAAIE,CAAAA,CAAe,WAAW,MAAS,CAAA,CAAA,CAAG,CACxC,IAAMpD,CAAUoD,CAAAA,CAAAA,CAAe,UAAW,CAAA,CAAC,EAAE,OACzCpD,CAAAA,CAAAA,EAAW,OAAWA,GAAAA,CAAAA,EAAWA,CAAQ,CAAA,KAAA,CAAM,MAAS,CAAA,CAAA,EAC1DA,EAAQ,KAAM,CAAA,OAAA,CAAQ,CAACoE,CAAAA,CAAahD,CAAU,GAAA,CAK5C,GAJI,MAAA,GAAUgD,CAAeA,EAAAA,CAAAA,CAAY,IAAS,GAAA,KAAA,CAAA,EAChDD,CAAgB,CAAA,eAAA,CAAgB,IAAKE,CAAAA,wBAAAA,CAAyBhL,qBAAsB+K,CAAY,CAAA,IAAI,CAAC,CAAA,CAGnG,cAAkBA,GAAAA,CAAAA,EAAeA,CAAY,CAAA,YAAA,GAAiB,OAAW,CAC3E,IAAME,CAAWF,CAAAA,CAAAA,CAAY,YAC7BD,CAAAA,CAAAA,CAAgB,eAAgB,CAAA,IAAA,CAAKI,6BACnClL,oBACA+H,CAAAA,CAAAA,CACA,CAAGkD,EAAAA,CAAAA,CAAS,IAAI,CAAA,CAAA,EAAIlD,CAAK,CAAA,CAAA,CACzBkD,CAAS,CAAA,IAAA,CACT,IAAK,CAAA,SAAA,CAAUA,CAAS,CAAA,IAAI,CAC9B,CAAC,EACH,CACF,CAAC,EAEL,CAEIlB,CAAe,CAAA,aAAA,GACjBe,CAAgB,CAAA,KAAA,CAAQ,CACtB,YAAcf,CAAAA,CAAAA,CAAe,aAAc,CAAA,gBAAA,CAC3C,gBAAkBA,CAAAA,CAAAA,CAAe,aAAc,CAAA,oBAAA,CAC/C,YAAaA,CAAe,CAAA,aAAA,CAAc,eAC5C,CAAA,CAAA,CAGF,MAAM,CAAE,eAAiBe,CAAAA,CAAAA,CAAiB,MAAQP,CAAAA,CAAO,EAC3D,CAAA,KACQ,MAAA,IAAIT,kBAAmB,CAAA,CAAE,KAAM,6BAA+B,CAAA,KAAA,CAAOD,CAAK,CAAA,KAAM,CAAC,CAE3F,CAEA,MAAM,CAAE,eAAiB,CAAA,CAAE,eAAiB,CAAA,EAAG,CAAA,CAAG,MAAQU,CAAAA,CAAO,EACnE,CACF,CAAA,CAAA,ECx0BMY,IAAAA,CAAAA,CAAN,cAAmC9E,CAAc,CAC/C,iBAAkBK,CAAAA,CAAAA,CAAqC,CACrD,IAAMgD,CAAsB,CAAA,KAAA,CAAM,kBAAkBhD,CAAQ,CAAA,CAK5D,GAAIgD,CAAAA,CAAoB,iBAAmB,CAAA,CACzC,IAAM0B,CAAAA,CAAoB,CACxB,IAAM,CAAA,IAAA,CAAK,WAAY,CAAA,KAAA,CAAMrL,eAAe,CAAA,CAC5C,KAAO2J,CAAAA,CAAAA,CAAoB,kBAAkB,KAC/C,CAAA,CACAA,CAAoB,CAAA,QAAA,CAAS,OAAQ0B,CAAAA,CAAiB,CACtD,CAAA,OAAO1B,CAAoB,CAAA,kBAC7B,CAEA,OAAOA,CACT,CACF,EClBMvF,IAAAA,EAAAA,CAAsB,gBACtBkH,CAAAA,EAAAA,CACJ,qLAGI/G,CAAAA,EAAAA,CAAqBgH,gBAAgBzL,CAAsBmB,CAAAA,CAAqC,CAAE,CAAA,KAAA,CAAM,CAC5G,IAAA,CAAMmD,EACN,CAAA,WAAA,CAAakH,GACb,cAAgB,CAAA,KAAA,CAChB,eAAiB,CAAA,IAAA,CACjB,KAAOjL,CAAAA,CAAAA,CACP,UAAYW,CAAAA,CAAAA,CACZ,OAAQ,CACN,GAAA,CAAKpB,CAAuB,CAAA,IAAA,CAAK,CAAK,CAAA,EAAA,CAAK,IAAM,CAAA,CAAA,CAAG,CAAG,CAAE,CAAA,GAAA,CACzD,MAAQA,CAAAA,CAAAA,CAAuB,IAAK,CAAA,CAAA,CAAK,EAAK,CAAA,IAAA,CAAM,EAAG,CAAG,CAAA,CAAE,MAC9D,CACF,CAAC,CAAA,CAEK0E,EAAsB+B,CAAAA,CAAAA,CAGtBhC,GAAN,cAA2B+G,CAAqB,CAC9C,WAAA,CAAYvF,CAAkC,CAAA,CAC5C,KAAMtB,CAAAA,EAAAA,CAAoBsB,CAAO,EACnC,CACF,MCzBMb,EAA0B,CAAA,oBAAA,CAC1BwG,EACJ,CAAA,qLAAA,CAGIrG,EAAyBoG,CAAAA,eAAAA,CAAgBzL,CAAsBmB,CAAAA,CAAqC,EAAE,KAAM,CAAA,CAChH,IAAM+D,CAAAA,EAAAA,CACN,WAAawG,CAAAA,EAAAA,CACb,cAAgB,CAAA,KAAA,CAChB,gBAAiB,IACjB,CAAA,KAAA,CAAOnL,CACP,CAAA,UAAA,CAAYW,CACZ,CAAA,MAAA,CAAQ,CACN,GAAA,CAAKpB,EAAuB,IAAK,CAAA,CAAA,CAAK,EAAK,CAAA,IAAA,CAAM,CAAG,CAAA,CAAG,CAAE,CAAA,GAAA,CACzD,OAAQA,CAAuB,CAAA,IAAA,CAAK,CAAK,CAAA,EAAA,CAAK,IAAM,CAAA,CAAA,CAAG,CAAG,CAAA,CAAE,MAC9D,CACF,CAAC,CAEKsF,CAAAA,EAAAA,CAA0BmB,CAG1BpB,CAAAA,EAAAA,CAAN,cAA+BmG,CAAqB,CAClD,WAAYvF,CAAAA,CAAAA,CAAsC,CAChD,KAAA,CAAMV,EAAwBU,CAAAA,CAAO,EACvC,CACF,ECzBA,IAAMjB,EAA4B,CAAA,uBAAA,CAC5B6G,GACJ,8KAGI1G,CAAAA,EAAAA,CAA2BwG,eAAgBzL,CAAAA,CAAAA,CAAsBmB,CAAqC,CAAA,CAAE,KAAM,CAAA,CAClH,KAAM2D,EACN,CAAA,WAAA,CAAa6G,EACb,CAAA,cAAA,CAAgB,KAChB,CAAA,eAAA,CAAiB,IACjB,CAAA,KAAA,CAAOpL,EACP,UAAYW,CAAAA,CAAAA,CACZ,MAAQ,CAAA,CACN,GAAKpB,CAAAA,CAAAA,CAAuB,IAAK,CAAA,CAAA,CAAK,EAAK,CAAA,IAAA,CAAM,CAAG,CAAA,CAAG,CAAE,CAAA,GAAA,CACzD,MAAQA,CAAAA,CAAAA,CAAuB,KAAK,CAAK,CAAA,EAAA,CAAK,IAAM,CAAA,CAAA,CAAG,CAAG,CAAA,CAAE,MAC9D,CACF,CAAC,CAEKkF,CAAAA,EAAAA,CAA4BuB,CAG5BxB,CAAAA,EAAAA,CAAN,cAAiCuG,CAAqB,CACpD,WAAA,CAAYvF,EAAwC,CAClD,KAAA,CAAMd,EAA0Bc,CAAAA,CAAO,EACzC,CACF,EC1BA,IAAMrB,EAA4B,CAAA,uBAAA,CAC5BkH,EACJ,CAAA,8GAAA,CAEI/G,GAA2B4G,eAAgBzL,CAAAA,CAAAA,CAAsBiB,EAAuC,CAAA,CAAE,KAAM,CAAA,CACpH,IAAMyD,CAAAA,EAAAA,CACN,YAAakH,EACb,CAAA,cAAA,CAAgB,KAChB,CAAA,eAAA,CAAiB,IACjB,CAAA,KAAA,CAAOrL,CACP,CAAA,UAAA,CAAYS,GACZ,MAAQ,CAAA,CACN,GAAKlB,CAAAA,CAAAA,CAAuB,EAAG,CAAA,CAAA,CAAK,EAAK,CAAA,IAAA,CAAM,EAAG,CAAK,CAAA,EAAE,CAAE,CAAA,GAAA,CAC3D,MAAQA,CAAAA,CAAAA,CAAuB,EAAG,CAAA,CAAA,CAAK,GAAK,IAAM,CAAA,CAAA,CAAG,CAAK,CAAA,EAAE,CAAE,CAAA,MAChE,CACF,CAAC,EAEK8E,EAA4B2B,CAAAA,CAAAA,CAG5B5B,EAAN,CAAA,cAAiC2G,CAAqB,CACpD,WAAYvF,CAAAA,CAAAA,CAAwC,CAClD,KAAMlB,CAAAA,EAAAA,CAA0BkB,CAAO,EACzC,CACF,EC1BM7C,IAAAA,EAAAA,CAAwB,kBACxB2I,CAAAA,EAAAA,CACJ,0LAGIxI,CAAAA,EAAAA,CAAuBoI,gBAAgBzL,CAAsBa,CAAAA,CAA6B,CAAE,CAAA,KAAA,CAAM,CACtG,IAAA,CAAMqC,EACN,CAAA,WAAA,CAAa2I,GACb,cAAgB,CAAA,GAAA,CAChB,eAAiB,CAAA,IAAA,CACjB,KAAOtL,CAAAA,CAAAA,CACP,UAAYC,CAAAA,CAAAA,CACZ,OAAQ,CACN,GAAA,CAAKV,CAAuB,CAAA,EAAA,CAAG,CAAK,CAAA,CAAA,CAAK,IAAM,CAAA,CAAA,CAAG,IAAM,EAAE,CAAA,CAAE,GAC5D,CAAA,MAAA,CAAQA,CAAuB,CAAA,EAAA,CAAG,CAAK,CAAA,CAAA,CAAK,KAAM,CAAG,CAAA,GAAA,CAAM,EAAE,CAAA,CAAE,MACjE,CACF,CAAC,CAAA,CAEKsD,GAAwBmD,CAGxBpD,CAAAA,EAAAA,CAAN,cAA6BqD,CAAc,CACzC,WAAA,CAAYT,CAAoC,CAAA,CAC9C,MAAM1C,EAAsB0C,CAAAA,CAAO,EACrC,CACF,ECzBA,IAAMzD,EAA2B,CAAA,sBAAA,CAC3BwJ,EACJ,CAAA,0LAAA,CAGIrJ,EAA0BgJ,CAAAA,eAAAA,CAAgBzL,EAAsBa,CAA6B,CAAA,CAAE,KAAM,CAAA,CACzG,IAAMyB,CAAAA,EAAAA,CACN,WAAawJ,CAAAA,EAAAA,CACb,eAAgB,GAChB,CAAA,eAAA,CAAiB,IACjB,CAAA,KAAA,CAAOvL,CACP,CAAA,UAAA,CAAYC,CACZ,CAAA,MAAA,CAAQ,CACN,GAAKV,CAAAA,CAAAA,CAAuB,EAAG,CAAA,CAAA,CAAK,CAAK,CAAA,IAAA,CAAM,CAAG,CAAA,GAAA,CAAM,EAAE,CAAE,CAAA,GAAA,CAC5D,MAAQA,CAAAA,CAAAA,CAAuB,EAAG,CAAA,CAAA,CAAK,CAAK,CAAA,IAAA,CAAM,EAAG,GAAM,CAAA,EAAE,CAAE,CAAA,MACjE,CACF,CAAC,CAEK0C,CAAAA,EAAAA,CAA2B+D,EAG3BhE,EAAN,CAAA,cAAgCiE,CAAc,CAC5C,WAAYT,CAAAA,CAAAA,CAAuC,CACjD,KAAA,CAAMtD,GAAyBsD,CAAO,EACxC,CACF,ECzBMrD,IAAAA,EAAAA,CAA2B,sBAC3BqJ,CAAAA,EAAAA,CACJ,0LAGIlJ,CAAAA,EAAAA,CAA0B4I,eAAgBzL,CAAAA,CAAAA,CAAsBa,CAA6B,CAAE,CAAA,KAAA,CAAM,CACzG,IAAA,CAAM6B,EACN,CAAA,WAAA,CAAaqJ,EACb,CAAA,cAAA,CAAgB,IAChB,eAAiB,CAAA,IAAA,CACjB,KAAOxL,CAAAA,CAAAA,CACP,UAAYC,CAAAA,CAAAA,CACZ,MAAQ,CAAA,CACN,IAAKV,CAAuB,CAAA,EAAA,CAAG,CAAK,CAAA,CAAA,CAAK,IAAM,CAAA,CAAA,CAAG,GAAM,CAAA,EAAE,CAAE,CAAA,GAAA,CAC5D,MAAQA,CAAAA,CAAAA,CAAuB,EAAG,CAAA,CAAA,CAAK,CAAK,CAAA,IAAA,CAAM,EAAG,GAAM,CAAA,EAAE,CAAE,CAAA,MACjE,CACF,CAAC,CAEK8C,CAAAA,EAAAA,CAA2B2D,EAG3B5D,EAAN,CAAA,cAAgC6D,CAAc,CAC5C,WAAYT,CAAAA,CAAAA,CAAuC,CACjD,KAAA,CAAMlD,GAAyBkD,CAAO,EACxC,CACF,ECzBMjD,IAAAA,EAAAA,CAA8B,yBAC9BkJ,CAAAA,EAAAA,CACJ,oKAGI/I,CAAAA,EAAAA,CAA6BwI,eAAgBzL,CAAAA,CAAAA,CAAsBa,CAA6B,CAAE,CAAA,KAAA,CAAM,CAC5G,IAAA,CAAMiC,EACN,CAAA,WAAA,CAAakJ,EACb,CAAA,cAAA,CAAgB,IAChB,eAAiB,CAAA,IAAA,CACjB,KAAOzL,CAAAA,CAAAA,CACP,UAAYC,CAAAA,CAAAA,CACZ,MAAQ,CAAA,CACN,IAAKV,CAAuB,CAAA,EAAA,CAAG,CAAK,CAAA,CAAA,CAAK,IAAM,CAAA,CAAA,CAAG,GAAM,CAAA,EAAE,EAAE,GAC5D,CAAA,MAAA,CAAQA,CAAuB,CAAA,EAAA,CAAG,CAAK,CAAA,CAAA,CAAK,IAAM,CAAA,CAAA,CAAG,IAAM,EAAE,CAAA,CAAE,MACjE,CACF,CAAC,CAAA,CAEKkD,EAA8BuD,CAAAA,CAAAA,CAG9BxD,GAAN,cAAmCyD,CAAc,CAC/C,WAAA,CAAYT,CAA0C,CAAA,CACpD,KAAM9C,CAAAA,EAAAA,CAA4B8C,CAAO,EAC3C,CACF,ECzBA,IAAM7B,EAAsB,CAAA,gBAAA,CACtB+H,EACJ,CAAA,oKAAA,CAGI5H,EAAqBoH,CAAAA,eAAAA,CAAgBzL,CAAsBa,CAAAA,CAA6B,EAAE,KAAM,CAAA,CACpG,IAAMqD,CAAAA,EAAAA,CACN,WAAa+H,CAAAA,EAAAA,CACb,cAAgB,CAAA,GAAA,CAChB,gBAAiB,IACjB,CAAA,KAAA,CAAO1L,CACP,CAAA,UAAA,CAAYC,CACZ,CAAA,MAAA,CAAQ,CACN,GAAA,CAAKV,EAAuB,EAAG,CAAA,CAAA,CAAK,CAAK,CAAA,IAAA,CAAM,CAAG,CAAA,GAAA,CAAM,EAAE,CAAA,CAAE,IAC5D,MAAQA,CAAAA,CAAAA,CAAuB,EAAG,CAAA,CAAA,CAAK,CAAK,CAAA,IAAA,CAAM,CAAG,CAAA,GAAA,CAAM,EAAE,CAAE,CAAA,MACjE,CACF,CAAC,CAEKsE,CAAAA,EAAAA,CAAsBmC,CAGtBpC,CAAAA,EAAAA,CAAN,cAA2BqC,CAAc,CACvC,WAAYT,CAAAA,CAAAA,CAAkC,CAC5C,KAAA,CAAM1B,EAAoB0B,CAAAA,CAAO,EACnC,CACF,ECzBMzC,IAAAA,EAAAA,CAAyB,oBACzB4I,CAAAA,EAAAA,CACJ,oKAGIzI,CAAAA,EAAAA,CAAwBgI,eAAgBzL,CAAAA,CAAAA,CAAsBa,CAA6B,CAAA,CAAE,MAAM,CACvG,IAAA,CAAMyC,EACN,CAAA,WAAA,CAAa4I,EACb,CAAA,cAAA,CAAgB,GAChB,CAAA,eAAA,CAAiB,KACjB,KAAO3L,CAAAA,CAAAA,CACP,UAAYC,CAAAA,CAAAA,CACZ,MAAQ,CAAA,CACN,GAAKV,CAAAA,CAAAA,CAAuB,GAAG,CAAK,CAAA,CAAA,CAAK,IAAM,CAAA,CAAA,CAAG,GAAM,CAAA,EAAE,CAAE,CAAA,GAAA,CAC5D,OAAQA,CAAuB,CAAA,EAAA,CAAG,CAAK,CAAA,CAAA,CAAK,IAAM,CAAA,CAAA,CAAG,GAAM,CAAA,EAAE,EAAE,MACjE,CACF,CAAC,CAAA,CAEK0D,EAAyB+C,CAAAA,CAAAA,CAGzBhD,EAAN,CAAA,cAA8BiD,CAAc,CAC1C,WAAA,CAAYT,CAAqC,CAAA,CAC/C,KAAMtC,CAAAA,EAAAA,CAAuBsC,CAAO,EACtC,CACF,ECzBA,IAAMrC,EAAyB,CAAA,oBAAA,CACzByI,EACJ,CAAA,oKAAA,CAGItI,EAAwB4H,CAAAA,eAAAA,CAAgBzL,CAAsBa,CAAAA,CAA6B,CAAE,CAAA,KAAA,CAAM,CACvG,IAAM6C,CAAAA,EAAAA,CACN,WAAayI,CAAAA,EAAAA,CACb,cAAgB,CAAA,GAAA,CAChB,eAAiB,CAAA,IAAA,CACjB,MAAO5L,CACP,CAAA,UAAA,CAAYC,CACZ,CAAA,MAAA,CAAQ,CACN,GAAA,CAAKV,CAAuB,CAAA,EAAA,CAAG,EAAK,CAAK,CAAA,IAAA,CAAM,CAAG,CAAA,GAAA,CAAM,EAAE,CAAA,CAAE,GAC5D,CAAA,MAAA,CAAQA,CAAuB,CAAA,EAAA,CAAG,CAAK,CAAA,CAAA,CAAK,IAAM,CAAA,CAAA,CAAG,GAAM,CAAA,EAAE,EAAE,MACjE,CACF,CAAC,CAAA,CAEK8D,EAAyB2C,CAAAA,CAAAA,CAGzB5C,EAAN,CAAA,cAA8B6C,CAAc,CAC1C,WAAA,CAAYT,CAAqC,CAAA,CAC/C,KAAMlC,CAAAA,EAAAA,CAAuBkC,CAAO,EACtC,CACF,ECzBA,IAAMjC,EAA4B,CAAA,uBAAA,CAC5BsI,EACJ,CAAA,oKAAA,CAGInI,EAA2BwH,CAAAA,eAAAA,CAAgBzL,CAAsBa,CAAAA,CAA6B,CAAE,CAAA,KAAA,CAAM,CAC1G,IAAMiD,CAAAA,EAAAA,CACN,WAAasI,CAAAA,EAAAA,CACb,cAAgB,CAAA,GAAA,CAChB,eAAiB,CAAA,IAAA,CACjB,MAAO7L,CACP,CAAA,UAAA,CAAYC,CACZ,CAAA,MAAA,CAAQ,CACN,GAAA,CAAKV,CAAuB,CAAA,EAAA,CAAG,EAAK,CAAK,CAAA,IAAA,CAAM,CAAG,CAAA,GAAA,CAAM,EAAE,CAAA,CAAE,GAC5D,CAAA,MAAA,CAAQA,EAAuB,EAAG,CAAA,CAAA,CAAK,CAAK,CAAA,IAAA,CAAM,CAAG,CAAA,GAAA,CAAM,EAAE,CAAA,CAAE,MACjE,CACF,CAAC,CAEKkE,CAAAA,EAAAA,CAA4BuC,CAG5BxC,CAAAA,EAAAA,CAAN,cAAiCyC,CAAc,CAC7C,WAAYT,CAAAA,CAAAA,CAAwC,CAClD,KAAA,CAAM9B,EAA0B8B,CAAAA,CAAO,EACzC,CACF,ECjCMsG,IAAAA,EAAAA,CAAyE,CAC7EC,4BACF,CAEMC,CAAAA,EAAAA,CAAqCxN,GAAE,CAAA,IAAA,CAAK,CAChDuN,4BACF,CAAC,ECTD,IAAME,GAA8BzN,GAAE,CAAA,MAAA,CAAO,CAC3C,UAAA,CAAYA,GAAE,CAAA,KAAA,CAAMA,GAAE,CAAA,MAAA,CAAO,CAC3B,MAAQA,CAAAA,GAAAA,CAAE,KAAMA,CAAAA,GAAAA,CAAE,MAAO,EAAC,CAC5B,CAAC,CAAC,CACJ,CAAC,ECJD,IAAM0N,GAA8B1N,GAAE,CAAA,MAAA,CAAO,CAC3C,KAAA,CAAOA,GAAE,CAAA,MAAA,EAAS,CAAA,GAAA,CAAI,CAAC,CACvB,CAAA,OAAA,CAASA,GAAE,CAAA,MAAA,CAAO,CAChB,KAAA,CAAOA,GAAE,CAAA,KAAA,CAAMA,IAAE,MAAO,CAAA,CACtB,IAAMA,CAAAA,GAAAA,CAAE,MAAO,EAAA,CAAE,GAAI,CAAA,CAAC,CACxB,CAAC,CAAC,CAAA,CAAE,GAAI,CAAA,CAAC,CACX,CAAC,CACH,CAAC,CAAA,CAGK2N,EAAyB3N,CAAAA,GAAAA,CAAE,MAAO,CAAA,CACtC,KAAOA,CAAAA,GAAAA,CAAE,QAAS,CAAA,GAAA,CAAI,CAAC,CAAA,CAAE,QAAS,EAAA,CAClC,QAAUA,CAAAA,GAAAA,CAAE,MAAM0N,EAA2B,CAAA,CAAE,GAAI,CAAA,CAAC,CACpD,CAAA,oBAAA,CAAsB1N,GAAE,CAAA,MAAA,GAAS,GAAI,EAAA,CAAE,GAAI,CAAA,CAAC,CAAE,CAAA,QAAA,EAChD,CAAC,ECqBK4N,IAAAA,CAAAA,CAA4B5N,GAAE,CAAA,MAAA,CAAO,CACzC,MAAA,CAAQA,IAAE,MAAO,EAAA,CACjB,OAASA,CAAAA,GAAAA,CAAE,MAAO,EAAA,CAAE,GAAI,EAAA,CACxB,iBAAkBA,GAAE,CAAA,MAAA,EAAS,CAAA,GAAA,EAAM,CAAA,QAAA,EACrC,CAAC,EAGK6N,CAAN,CAAA,KAA+E,CAQ7E,WAAA,CAAYnG,CAAuCV,CAAAA,CAAAA,CAAwC,CAP3F,IAAA,CAAS,OAAU,CAAA,IAAA,CAQjB,IAAMK,CAAAA,CAAgBuG,CAA0B,CAAA,KAAA,CAAM5G,CAAO,CAAA,CAC7D,KAAK,WAAcU,CAAAA,CAAAA,CACnB,IAAK,CAAA,MAAA,CAASL,CAAc,CAAA,MAAA,CAC5B,IAAK,CAAA,OAAA,CAAUO,wBAAwBP,CAAc,CAAA,OAAO,CAC5D,CAAA,IAAA,CAAK,gBAAmBO,CAAAA,uBAAAA,CACtBP,CAAc,CAAA,gBAAA,EACd,GAAG,IAAK,CAAA,OAAO,CAAW,QAAA,EAAA,IAAA,CAAK,WAAY,CAAA,IAAI,CAA2B,wBAAA,EAAA,IAAA,CAAK,MAAM,CAAA,CACvF,EACF,CAEA,iBAA6B,EAAA,CAC3B,OAAO,IAAA,CAAK,OACd,CAEA,iBAAA,EAAiC,CAC/B,OAAO,CACL,cAAA,CAAgB,kBAChB,CAAA,MAAA,CAAQ,YACV,CACF,CAEA,gBAA+B,EAAA,CAC7B,OAAO,CACL,KAAO,CAAA,IAAA,CAAK,YAAY,IAC1B,CACF,CAGA,aAAA,CAAcQ,CAAyE,CAAA,CACrF,OAAO,CAAE,YAAa,CAAO,CAAA,CAAA,OAAA,CAAS,CAAE,CAC1C,CAGA,aAAA,CAAciG,CAAyC,CAAA,CACrD,OAAOA,CAAS,CAAA,QAAA,CAAS,MAAO,CAAA,CAACxG,CAAKW,CAAAA,CAAAA,GAAYX,CAAMW,CAAAA,CAAAA,CAAQ,OAAQ,CAAC,CAC3E,CAEA,qBAAA,CAAsBA,CAIpB,CAAA,CACA,IAAMC,CAAAA,CAAcyF,GAAuB,SAAU1F,CAAAA,CAAO,CAC5D,CAAA,GAAI,CAACC,CAAAA,CAAY,OACf,CAAA,MAAM,IAAIC,wBAAAA,CAAyB,CAAE,IAAA,CAAM,uBAAyB,CAAA,KAAA,CAAOD,CAAY,CAAA,KAAM,CAAC,CAGhG,CAAA,IAAME,CAAgBF,CAAAA,CAAAA,CAAY,IAE5BG,CAAAA,CAAAA,CAAYD,CAAc,CAAA,KAAA,CAE1BK,EAAU,CACd,oBAAA,CAAsBL,CAAc,CAAA,oBACtC,CACMM,CAAAA,CAAAA,CAASC,MAAO,EAAA,CAAE,MAAMC,sBAAuBH,CAAAA,CAAO,CAAC,CAAA,CAEvDsF,CAA2C,CAAA,CAC/C,QAAUR,CAAAA,4BAAAA,CACV,SAAUnF,CAAc,CAAA,QAAA,CAAS,MAAO,CAAA,CAACd,CAAKW,CAAAA,CAAAA,IAC5CX,CAAI,CAAA,IAAA,CAAK,GAAGW,CAAQ,CAAA,OAAA,CAAQ,KAAM,CAAA,GAAA,CAAK,CAAM,EAAA,CAAA,CAAE,IAAI,CAAC,EAC7CX,CACN,CAAA,CAAA,EAAc,CACnB,CAEA,CAAA,OAAO,CACL,SAAA,CAAAe,EACA,MAAAK,CAAAA,CAAAA,CACA,iBAAAqF,CAAAA,CACF,CACF,CAGA,eAAgBrF,CAAAA,CAAAA,CAAoBoF,CAA8C,CAAA,CAChF,IAAMtE,CAAAA,CAAgB,IAAK,CAAA,WAAA,CAAY,MAAO,CAAA,MAAA,CAAO,UAAUd,CAAM,CAAA,CACrE,GAAI,CAACc,CAAc,CAAA,OAAA,CACjB,MAAM,IAAIC,mBAAmB,CAC3B,IAAA,CAAM,CAA+B,4BAAA,EAAA,IAAA,CAAK,WAAY,CAAA,IAAI,CAC1D,CAAA,CAAA,CAAA,KAAA,CAAOD,EAAc,KACvB,CAAC,CAGH,CAAA,IAAME,CAAeF,CAAAA,CAAAA,CAAc,IACnC,CAAA,OAAA,MAAA,CAAO,KAAKE,CAA0B,CAAA,CAAE,OAASnC,CAAAA,CAAAA,EAAQ,CACvD,GAAI,CAAC,IAAA,CAAK,YAAY,MAAO,CAAA,GAAA,CAAIA,CAAG,CAAA,CAClC,MAAM,IAAIkC,kBAAmB,CAAA,CAC3B,KAAM,CAA+B,4BAAA,EAAA,IAAA,CAAK,WAAY,CAAA,IAAI,CAC1D,CAAA,CAAA,CAAA,KAAA,CAAO,IAAI,KAAA,CAAM,yBAAyBlC,CAAG,CAAA;AAAA,8BACvB,EAAA,MAAA,CAAO,KAAK,IAAK,CAAA,WAAA,CAAY,OAAO,GAAG,CAAA,CAAE,KAAK,IAAI,CAAC,GAAG,CAC9E,CAAC,CAEL,CAAC,CAAA,CAEyB,OAAO,IAAKmC,CAAAA,CAAY,CAAE,CAAA,MAAA,CAClD,CAACpC,CAAAA,CAAKC,IAAQ,CAEZ,IAAMsC,EADM,IAAK,CAAA,WAAA,CAAY,OAAO,GAAItC,CAAAA,CAAG,EACtB,KACfuC,CAAAA,CAAAA,CAAaJ,EAAanC,CAAG,CAAA,CACnC,OAAAD,CAAIuC,CAAAA,CAAQ,EAAIC,CACTxC,CAAAA,CACT,CACA,CAAA,EACF,CAGF,CAEA,0BAA2BwG,CAAAA,CAAAA,CAA6C,CACtE,IAAME,CAAAA,CAAkBC,mBAAoB,CAAA,SAAA,CAAUH,CAAQ,CAC9D,CAAA,GAAI,CAACE,CAAgB,CAAA,OAAA,CACnB,MAAM,IAAIE,6BAAAA,CAA8B,CAAE,IAAM,CAAA,4BAAA,CAA8B,KAAOF,CAAAA,CAAAA,CAAgB,KAAM,CAAC,EAG9G,GAAIF,CAAAA,CAAS,WAAaP,4BACxB,CAAA,MAAM,IAAIW,6BAA8B,CAAA,CACtC,IAAM,CAAA,CAAA,wCAAA,EAA2C,IAAK,CAAA,WAAA,CAAY,IAAI,CACtE,CAAA,CAAA,CAAA,KAAA,CAAO,IAAI,KAAM,CAAA,CAAA,MAAA,EAASX,4BAA4B,CAAwC,qCAAA,EAAA,IAAA,CAAK,WAAY,CAAA,IAAI,CAAG,CAAA,CAAA,CACxH,CAAC,CAUH,CAAA,OAAO,CACL,QARmDS,CAAAA,CAAAA,CAAgB,KAAK,QAAS,CAAA,GAAA,CAAK/F,IAC/E,CACL,KAAA,CAAO,UAAU,IAAK,CAAA,WAAA,CAAY,IAAI,CACtC,CAAA,CAAA,OAAA,CAAS,CAAE,KAAO,CAAA,CAAC,CAAE,IAAA,CAAMA,CAAkB,CAAC,CAAE,CAClD,CAAA,CACD,CAID,CACF,CAGM,oBAAoBS,CAAqBoF,CAAAA,CAAAA,CAAoD,CAAAlD,OAAAA,CAAAA,CAAA,IACjG,CAAA,IAAA,CAAA,WAAA,CAAA,OAAO,IAAI,OAASC,CAAAA,CAAAA,EAAY,CAC9BA,CAAQ,CAAA,IAAA,CAAK,gBAAgB,EAC/B,CAAC,CACH,CAAA,CAAA,CAGM,uBAAwBnC,CAAAA,CAAAA,CAAqBoF,EAAwD,CAAAlD,OAAAA,CAAAA,CAAA,sBACzG,OAAO,IAAI,QAASC,CAAY,EAAA,CAC9BA,EAAQ,IAAK,CAAA,iBAAA,EAAmB,EAClC,CAAC,CACH,CAEM,CAAA,CAAA,oBAAA,CAAqBnC,EAAoBoF,CAAsD,CAAA,CAAA,OAAAlD,CAAA,CAAA,IAAA,CAAA,IAAA,CAAA,WAAA,CACnG,OAAO,IAAI,QAASC,CAAY,EAAA,CAC9B,IAAMpC,CAAU,CAAA,IAAA,CAAK,gBAAgBC,CAAM,CAAA,CACrCyF,CAAY,CAAA,IAAA,CAAK,0BAA2BL,CAAAA,CAAQ,EAE1D,GAAIA,CAAAA,CAAS,SAAS,MAAW,GAAA,CAAA,CAC/B,MAAM,IAAII,6BAAAA,CAA8B,CACtC,IAAA,CAAM,CAA2C,wCAAA,EAAA,IAAA,CAAK,YAAY,IAAI,CAAA,CAAA,CAAA,CACtE,MAAO,IAAI,KAAA,CAAM,0BAA0B,CAC7C,CAAC,EAGCzF,CAAQ,CAAA,oBAAA,GACT0F,EAAkB,QAAS,CAAA,OAAA,CAASlG,GAAiB,CACpDA,CAAAA,CAAQ,qBAAuBQ,CAAQ,CAAA,qBACzC,CAAC,CAAA,CACD,OAAOA,CAAAA,CAAQ,sBAGjBoC,CAAQzD,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAAA,CAAA,GACH,IAAK,CAAA,gBAAA,IACLqB,CACA0F,CAAAA,CAAAA,CAAAA,CACJ,EACH,CAAC,CACH,GAEA,8BAA+BnD,CAAAA,CAAAA,CAAsC,CACnE,IAAMC,CAAAA,CAAOwC,GAA4B,SAAUzC,CAAAA,CAAQ,CAC3D,CAAA,GAAIC,CAAK,CAAA,OAAA,CAAS,CAEhB,IAAMmD,CAAAA,CADiBnD,EAAK,IACM,CAAA,UAAA,CAAW,IAAI,CAACoD,CAAAA,CAAWlF,KACpD,CACL,KAAA,CAAAA,EACA,SAAWkF,CAAAA,CAAAA,CAAU,MACvB,CACD,CAAA,CAAA,CAED,OAAO,CACL,cAAA,CAAgBC,qBAChB,CAAA,UAAA,CAAYF,CACd,CACF,CAEA,MAAM,IAAIlD,mBAAmB,CAAE,IAAA,CAAM,8BAA+B,KAAOD,CAAAA,CAAAA,CAAK,KAAM,CAAC,CACzF,CACF,EC5OA,IAAM1E,GAA4B,oBAC5BgI,CAAAA,EAAAA,CAAiC,oBAEjC7H,CAAAA,EAAAA,CAA2B8H,oBAAqBhB,CAAAA,EAAkC,EAAE,KAAM,CAAA,CAC9F,KAAMjH,EACN,CAAA,WAAA,CAAagI,GACb,UAAYjB,CAAAA,EAAAA,CACZ,eAAgB,IAChB,CAAA,eAAA,CAAiB,IACjB,MAAQ,CAAA,CACN,IAAKtM,CAA4B,CAAA,IAAA,CAAK,GAAG,CAAE,CAAA,GAAA,CAC3C,MAAQA,CAAAA,CAAAA,CAA4B,IAAK,CAAA,GAAG,EAAE,MAChD,CACF,CAAC,CAEKyF,CAAAA,EAAAA,CAA4BmH,EAG5BpH,EAAN,CAAA,cAAiCqH,CAAmB,CAClD,WAAY7G,CAAAA,CAAAA,CAAwC,CAClD,KAAMN,CAAAA,EAAAA,CAA0BM,CAAO,EACzC,CACF,ECtBML,IAAAA,EAAAA,CAA4B,qBAC5B8H,EAAiC,CAAA,oBAAA,CAEjC3H,GAA2B0H,oBAAqBhB,CAAAA,EAAkC,EAAE,KAAM,CAAA,CAC9F,KAAM7G,EACN,CAAA,WAAA,CAAa8H,GACb,UAAYnB,CAAAA,EAAAA,CACZ,eAAgB,IAChB,CAAA,eAAA,CAAiB,IACjB,MAAQ,CAAA,CACN,GAAKtM,CAAAA,CAAAA,CAA4B,IAAK,CAAA,GAAG,EAAE,GAC3C,CAAA,MAAA,CAAQA,EAA4B,IAAK,CAAA,GAAG,EAAE,MAChD,CACF,CAAC,CAAA,CAEK6F,EAA4B+G,CAAAA,CAAAA,CAG5BhH,GAAN,cAAiCiH,CAAmB,CAClD,WAAY7G,CAAAA,CAAAA,CAAwC,CAClD,KAAMF,CAAAA,EAAAA,CAA0BE,CAAO,EACzC,CACF","file":"index.mjs","sourcesContent":["import { z } from \"zod\";\n\nimport { \n CHAT_CONFIG, \n MultiStringConfigItem, \n RangeConfigItem, \n SelectStringConfigItem, \n ObjectSchemaConfigItem\n} from \"@adaline/provider\";\n\nconst temperature = (max: number, _default: number) => RangeConfigItem({\n param: \"temperature\",\n title: CHAT_CONFIG.TEMPERATURE.title,\n description: CHAT_CONFIG.TEMPERATURE.description,\n min: 0.0,\n max: max,\n step: 0.01,\n default: _default,\n});\n\nconst maxTokens = (maxOutputTokens: number) =>\n RangeConfigItem({\n param: \"maxOutputTokens\",\n title: CHAT_CONFIG.MAX_TOKENS.title,\n description: CHAT_CONFIG.MAX_TOKENS.description,\n min: 0,\n max: maxOutputTokens,\n step: 1,\n default: 0,\n });\n\nconst stop = (maxSequences: number) =>\n MultiStringConfigItem({\n param: \"stopSequences\",\n title: CHAT_CONFIG.STOP(maxSequences).title,\n description: CHAT_CONFIG.STOP(maxSequences).description,\n max: maxSequences,\n });\n\nconst topP = (_default: number) => RangeConfigItem({\n param: \"topP\",\n title: CHAT_CONFIG.TOP_P.title,\n description: CHAT_CONFIG.TOP_P.description,\n min: 0,\n max: 1,\n step: 0.01,\n default: _default,\n});\n\nconst topK = (_default: number) => RangeConfigItem({\n param: \"topK\",\n title: CHAT_CONFIG.TOP_K.title,\n description: CHAT_CONFIG.TOP_K.description,\n min: 1,\n max: 40,\n step: 1,\n default: _default,\n});\n\nconst frequencyPenalty = RangeConfigItem({\n param: \"frequencyPenalty\",\n title: CHAT_CONFIG.FREQUENCY_PENALTY.title,\n description: CHAT_CONFIG.FREQUENCY_PENALTY.description,\n min: -2,\n max: 2,\n step: 0.01,\n default: 0,\n});\n\nconst presencePenalty = RangeConfigItem({\n param: \"presencePenalty\",\n title: CHAT_CONFIG.PRESENCE_PENALTY.title,\n description: CHAT_CONFIG.PRESENCE_PENALTY.description,\n min: -2,\n max: 2,\n step: 0.01,\n default: 0,\n});\n\nconst seed = RangeConfigItem({\n param: \"seed\",\n title: CHAT_CONFIG.SEED.title,\n description: CHAT_CONFIG.SEED.description,\n min: 0,\n max: 1000000,\n step: 1,\n default: 0,\n});\n\nconst toolChoice = SelectStringConfigItem({\n param: \"toolChoice\",\n title: \"Tool choice\",\n description:\n \"Controls which (if any) tool is called by the model. 'none' means the model will not call a function. 'auto' means the model can pick between generating a message or calling a tool.\",\n default: \"auto\",\n choices: [\"auto\", \"any\", \"none\"],\n});\n\nconst safetySettings = ObjectSchemaConfigItem({\n param: \"safetySettings\",\n title: \"Safety settings\",\n description: \"The safety rating contains the category of harm and the harm probability level in that category for a piece of content.\",\n objectSchema: z.array(z.object({\n threshold: z.enum([\n \"HARM_BLOCK_THRESHOLD_UNSPECIFIED\",\n \"BLOCK_LOW_AND_ABOVE\",\n \"BLOCK_MEDIUM_AND_ABOVE\",\n \"BLOCK_ONLY_HIGH\",\n \"BLOCK_NONE\",\n \"OFF\"\n ]),\n category: z.enum([\n \"HARM_CATEGORY_UNSPECIFIED\",\n \"HARM_CATEGORY_HARASSMENT\",\n \"HARM_CATEGORY_HATE_SPEECH\",\n \"HARM_CATEGORY_SEXUALLY_EXPLICIT\",\n \"HARM_CATEGORY_DANGEROUS_CONTENT\",\n \"HARM_CATEGORY_CIVIC_INTEGRITY\",\n ]),\n })),\n});\n\nexport { \n maxTokens, \n stop, \n temperature, \n toolChoice, \n topK, \n topP, \n seed, \n frequencyPenalty, \n presencePenalty, \n safetySettings \n};\n","import { z } from \"zod\";\n\nimport {\n maxTokens,\n stop,\n temperature,\n toolChoice,\n topP,\n safetySettings,\n} from \"./common.config.chat-model.google\";\n\nconst ChatModelBaseConfigSchema = (\n maxTemperature: number,\n defaultTemperature: number,\n maxOutputTokens: number,\n maxSequences: number,\n defaultTopP: number,\n) =>\n z.object({\n temperature: temperature(maxTemperature, defaultTemperature).schema,\n maxTokens: maxTokens(maxOutputTokens).schema,\n stop: stop(maxSequences).schema,\n topP: topP(defaultTopP).schema,\n toolChoice: toolChoice.schema,\n safetySettings: safetySettings.schema,\n });\n\nconst ChatModelBaseConfigDef = (\n maxTemperature: number,\n defaultTemperature: number,\n maxOutputTokens: number,\n maxSequences: number,\n defaultTopP: number,\n) =>\n ({\n temperature: temperature(maxTemperature, defaultTemperature).def,\n maxTokens: maxTokens(maxOutputTokens).def,\n stop: stop(maxSequences).def,\n topP: topP(defaultTopP).def,\n toolChoice: toolChoice.def,\n safetySettings: safetySettings.def,\n }) as const;\n\nexport { ChatModelBaseConfigDef, ChatModelBaseConfigSchema };\n","import { z } from \"zod\";\n\nimport {\n frequencyPenalty,\n maxTokens,\n presencePenalty,\n seed,\n stop,\n temperature,\n toolChoice,\n topP,\n topK,\n safetySettings,\n} from \"./common.config.chat-model.google\";\n\nconst ChatModelC1ConfigSchema = (\n maxTemperature: number,\n defaultTemperature: number,\n maxOutputTokens: number,\n maxSequences: number,\n defaultTopP: number,\n defaultTopK: number,\n) =>\n z.object({\n temperature: temperature(maxTemperature, defaultTemperature).schema,\n maxTokens: maxTokens(maxOutputTokens).schema,\n stop: stop(maxSequences).schema,\n topP: topP(defaultTopP).schema,\n topK: topK(defaultTopK).schema,\n frequencyPenalty: frequencyPenalty.schema,\n presencePenalty: presencePenalty.schema,\n seed: seed.schema.transform((value) => (value === 0 ? undefined : value)),\n toolChoice: toolChoice.schema,\n safetySettings: safetySettings.schema,\n });\n\nconst ChatModelC1ConfigDef = (\n maxTemperature: number,\n defaultTemperature: number,\n maxOutputTokens: number,\n maxSequences: number,\n defaultTopP: number,\n defaultTopK: number,\n) =>\n ({\n temperature: temperature(maxTemperature, defaultTemperature).def,\n maxTokens: maxTokens(maxOutputTokens).def,\n stop: stop(maxSequences).def,\n topP: topP(defaultTopP).def,\n topK: topK(defaultTopK).def,\n frequencyPenalty: frequencyPenalty.def,\n presencePenalty: presencePenalty.def,\n seed: seed.def,\n toolChoice: toolChoice.def,\n safetySettings: safetySettings.def,\n }) as const;\n\nexport { ChatModelC1ConfigDef, ChatModelC1ConfigSchema };\n","import { z } from \"zod\";\n\nimport {dimensions } from \"./common.config.embedding-model.google\";\n\nconst EmbeddingModelBaseConfigSchema = (maxDimensions: number) =>\n z.object({\n dimensions: dimensions(maxDimensions).schema,\n });\n\nconst EmbeddingModelBaseConfigDef = (maxDimensions: number) =>\n ({\n dimensions: dimensions(maxDimensions).def,\n }) as const;\n\nexport { EmbeddingModelBaseConfigDef, EmbeddingModelBaseConfigSchema };\n","import { EMBEDDING_CONFIG, RangeConfigItem } from \"@adaline/provider\";\n\nconst dimensions = (maxDimensions: number) =>\n RangeConfigItem({\n param: \"outputDimensionality\",\n title: EMBEDDING_CONFIG.DIMENSIONS.title,\n description: EMBEDDING_CONFIG.DIMENSIONS.description,\n min: 1,\n max: maxDimensions,\n step: 1,\n default: maxDimensions,\n });\n\nexport { \n dimensions \n};\n","import {\n ChatModelBaseConfigDef,\n ChatModelBaseConfigSchema,\n ChatModelC1ConfigDef,\n ChatModelC1ConfigSchema,\n} from \"./chat-model\";\nimport { \n EmbeddingModelBaseConfigDef, \n EmbeddingModelBaseConfigSchema,\n} from \"./embedding-model\";\n\nconst GoogleChatModelConfigs = {\n base: ( \n maxTemperature: number,\n defaultTemperature: number,\n maxOutputTokens: number,\n maxSequences: number,\n defaultTopP: number,\n ) => ({\n def: ChatModelBaseConfigDef(maxTemperature, defaultTemperature, maxOutputTokens, maxSequences, defaultTopP),\n schema: ChatModelBaseConfigSchema(maxTemperature, defaultTemperature, maxOutputTokens, maxSequences, defaultTopP),\n }),\n c1: (\n maxTemperature: number,\n defaultTemperature: number,\n maxOutputTokens: number,\n maxSequences: number,\n defaultTopP: number,\n defaultTopK: number,\n ) => ({\n def: ChatModelC1ConfigDef(maxTemperature, defaultTemperature, maxOutputTokens, maxSequences, defaultTopP, defaultTopK),\n schema: ChatModelC1ConfigSchema(maxTemperature, defaultTemperature, maxOutputTokens, maxSequences, defaultTopP, defaultTopK),\n }),\n} as const;\n\nconst GoogleEmbeddingModelConfigs = {\n base: (maxDimensions: number) => ({\n def: EmbeddingModelBaseConfigDef(maxDimensions),\n schema: EmbeddingModelBaseConfigSchema(maxDimensions),\n }),\n} as const;\n\nexport { GoogleChatModelConfigs, GoogleEmbeddingModelConfigs };\n","import { z } from \"zod\";\n\nimport { AssistantRoleLiteral, SystemRoleLiteral, ToolRoleLiteral, UserRoleLiteral } from \"@adaline/types\";\n\nconst GoogleChatModelRoles = z.enum([SystemRoleLiteral, UserRoleLiteral, AssistantRoleLiteral, ToolRoleLiteral]);\n\nconst GoogleChatAssistantRoleLiteral = \"model\";\nconst GoogleChatToolRoleLiteral = \"function\";\n\nconst GoogleChatModelRolesMap = {\n system: UserRoleLiteral,\n user: UserRoleLiteral,\n assistant: GoogleChatAssistantRoleLiteral,\n tool: GoogleChatToolRoleLiteral,\n} as const;\n\nexport { \n GoogleChatAssistantRoleLiteral,\n GoogleChatToolRoleLiteral,\n GoogleChatModelRoles, \n GoogleChatModelRolesMap,\n};\n","import { z } from \"zod\";\n\nimport { ChatModelSchemaType } from \"@adaline/provider\";\nimport { ImageModalityLiteral, TextModalityLiteral, ToolCallModalityLiteral, ToolResponseModalityLiteral } from \"@adaline/types\";\n\nconst GoogleChatModelModalities: ChatModelSchemaType[\"modalities\"] = [\n TextModalityLiteral,\n ImageModalityLiteral,\n ToolCallModalityLiteral,\n ToolResponseModalityLiteral,\n];\n\nconst GoogleChatModelModalitiesEnum = z.enum([\n TextModalityLiteral,\n ImageModalityLiteral,\n ToolCallModalityLiteral,\n ToolResponseModalityLiteral,\n]);\n\nconst GoogleChatModelTextModalities: ChatModelSchemaType[\"modalities\"] = [\n TextModalityLiteral,\n];\n\nconst GoogleChatModelTextModalitiesEnum = z.enum([\n TextModalityLiteral,\n]);\n\nconst GoogleChatModelTextVisionModalities: ChatModelSchemaType[\"modalities\"] = [\n TextModalityLiteral,\n ImageModalityLiteral,\n];\n\nconst GoogleChatModelTextVisionModalitiesEnum = z.enum([\n TextModalityLiteral,\n ImageModalityLiteral,\n]);\n\nconst GoogleChatModelTextToolModalities: ChatModelSchemaType[\"modalities\"] = [\n TextModalityLiteral,\n ToolCallModalityLiteral,\n ToolResponseModalityLiteral,\n];\n\nconst GoogleChatModelTextToolModalitiesEnum = z.enum([\n TextModalityLiteral,\n ToolCallModalityLiteral,\n ToolResponseModalityLiteral,\n]);\n\nexport {\n GoogleChatModelModalitiesEnum,\n GoogleChatModelModalities,\n GoogleChatModelTextModalitiesEnum,\n GoogleChatModelTextModalities,\n GoogleChatModelTextToolModalitiesEnum,\n GoogleChatModelTextToolModalities,\n GoogleChatModelTextVisionModalitiesEnum,\n GoogleChatModelTextVisionModalities,\n};\n","import { z } from \"zod\";\n\nconst GoogleCompleteChatTextResponse = z.object({\n text: z.string(),\n});\n\nconst GoogleCompleteChatToolResponse = z.object({\n functionCall: z.object({\n name: z.string(),\n args: z.record(z.any()),\n }),\n});\n\nconst GoogleCompleteChatResponse = z.object({\n candidates: z.array(\n z.object({\n content: z.object({\n role: z.string(),\n parts: z.array(z.union([GoogleCompleteChatTextResponse, GoogleCompleteChatToolResponse])),\n }).optional(),\n finishReason: z.string(),\n index: z.number().optional(),\n safetyRatings: z.optional(\n z.array(\n z.object({\n category: z.string(),\n probability: z.string(),\n blocked: z.boolean().optional(),\n })\n )\n ),\n })\n ),\n promptFeedback: z.optional(\n z.object({\n safetyRatings: z.optional(\n z.array(\n z.object({\n category: z.string(),\n probability: z.string(),\n })\n )\n ),\n })\n ),\n usageMetadata: z.object({\n promptTokenCount: z.number(),\n cachedContentTokenCount: z.number().optional(),\n candidatesTokenCount: z.number().optional(),\n totalTokenCount: z.number(),\n }).optional(),\n});\ntype GoogleCompleteChatResponseType = z.infer<typeof GoogleCompleteChatResponse>;\n\nconst GoogleStreamChatTextResponse = z.object({\n text: z.string(),\n});\n\nconst GoogleStreamChatToolResponse = z.object({\n functionCall: z.object({\n name: z.string(),\n args: z.record(z.any()),\n }),\n});\n\nconst GoogleStreamChatResponse = z.object({\n candidates: z.array(\n z.object({\n content: z\n .object({\n role: z.string(),\n parts: z.array(z.union([GoogleStreamChatTextResponse, GoogleStreamChatToolResponse])),\n })\n .optional(),\n finishReason: z.string().optional(),\n index: z.number().optional(),\n safetyRatings: z.optional(\n z.array(\n z.object({\n category: z.string(),\n probability: z.string(),\n blocked: z.boolean().optional(),\n })\n )\n ),\n })\n ),\n promptFeedback: z.optional(\n z.object({\n safetyRatings: z.optional(\n z.array(\n z.object({\n category: z.string(),\n probability: z.string(),\n })\n )\n ),\n })\n ),\n usageMetadata: z.object({\n promptTokenCount: z.number(),\n cachedContentTokenCount: z.number().optional(),\n candidatesTokenCount: z.number(),\n totalTokenCount: z.number(),\n }).optional(),\n});\ntype GoogleStreamChatResponseType = z.infer<typeof GoogleStreamChatResponse>;\n\nexport {\n GoogleCompleteChatResponse,\n GoogleCompleteChatTextResponse,\n GoogleCompleteChatToolResponse,\n GoogleStreamChatResponse,\n GoogleStreamChatTextResponse,\n GoogleStreamChatToolResponse,\n type GoogleStreamChatResponseType,\n type GoogleCompleteChatResponseType,\n};","import { z } from \"zod\";\n\nconst GoogleChatContentPartText = z.object({\n text: z.string().min(1),\n});\ntype GoogleChatContentPartTextType = z.infer<typeof GoogleChatContentPartText>;\n\nconst GoogleChatContentPartInlineData = z.object({\n inline_data: z.object({\n mime_type: z.string().min(1),\n data: z.string().base64(),\n }),\n});\ntype GoogleChatContentPartInlineDataType = z.infer<typeof GoogleChatContentPartInlineData>;\n\nconst GoogleChatContentPartFunctionCall = z.object({\n function_call: z.object({\n name: z.string().min(1),\n args: z.record(z.string().min(1)),\n }),\n});\ntype GoogleChatContentPartFunctionCallType = z.infer<typeof GoogleChatContentPartFunctionCall>;\n\nconst GoogleChatContentPartFunctionResponse = z.object({\n function_response: z.object({\n name: z.string().min(1),\n response: z.record(z.string().min(1)),\n }),\n});\ntype GoogleChatContentPartFunctionResponseType = z.infer<typeof GoogleChatContentPartFunctionResponse>;\n\nconst GoogleChatContent = z.object({\n role: z.enum([\"user\", \"model\", \"function\"]),\n parts: z.array(z.union([\n GoogleChatContentPartText,\n GoogleChatContentPartInlineData,\n GoogleChatContentPartFunctionCall,\n GoogleChatContentPartFunctionResponse,\n ])),\n});\ntype GoogleChatContentType = z.infer<typeof GoogleChatContent>;\n\nconst GoogleChatSystemInstruction = z.object({\n parts: z.array(GoogleChatContentPartText),\n});\ntype GoogleChatSystemInstructionType = z.infer<typeof GoogleChatSystemInstruction>;\n\nconst GoogleChatTool = z.object({\n name: z.string().min(1),\n description: z.string().min(1),\n parameters: z.any(),\n});\ntype GoogleChatToolType = z.infer<typeof GoogleChatTool>;\n\nconst GoogleChatToolConfig = z.object({\n function_calling_config: z.object({\n mode: z.enum([\"ANY\", \"AUTO\", \"NONE\"]),\n allowed_function_names: z.array(z.string()).optional(),\n }),\n});\ntype GoogleChatToolConfigType = z.infer<typeof GoogleChatToolConfig>;\n\nconst GoogleChatGenerationConfig = z.object({\n stopSequences: z.array(z.string()).optional(),\n maxOutputTokens: z.number().optional(),\n temperature: z.number().optional(),\n topP: z.number().optional(),\n topK: z.number().optional(),\n presencePenalty: z.number().optional(),\n frequencyPenalty: z.number().optional(),\n seed: z.number().optional(),\n});\ntype GoogleChatGenerationConfigType = z.infer<typeof GoogleChatGenerationConfig>;\n\nconst GoogleChatRequest = z.object({\n model: z.string().min(1).optional(),\n contents: z.array(GoogleChatContent),\n systemInstruction: GoogleChatSystemInstruction.optional(),\n system_instruction: GoogleChatSystemInstruction.optional(),\n generationConfig: GoogleChatGenerationConfig.optional(),\n generation_config: GoogleChatGenerationConfig.optional(),\n tools: z.object({\n function_declarations: z.array(GoogleChatTool),\n }).optional(),\n toolConfig: GoogleChatToolConfig.optional(),\n tool_config: GoogleChatToolConfig.optional(),\n});\ntype GoogleChatRequestType = z.infer<typeof GoogleChatRequest>;\n\nexport {\n GoogleChatContent,\n GoogleChatContentPartFunctionCall,\n GoogleChatContentPartFunctionResponse,\n GoogleChatContentPartInlineData,\n GoogleChatContentPartText,\n GoogleChatGenerationConfig,\n GoogleChatRequest,\n GoogleChatSystemInstruction,\n GoogleChatTool,\n GoogleChatToolConfig,\n type GoogleChatContentPartTextType,\n type GoogleChatContentPartFunctionCallType,\n type GoogleChatContentPartFunctionResponseType,\n type GoogleChatContentPartInlineDataType,\n type GoogleChatToolType,\n type GoogleChatToolConfigType,\n type GoogleChatGenerationConfigType,\n type GoogleChatRequestType,\n type GoogleChatContentType,\n type GoogleChatSystemInstructionType,\n};","import { z } from \"zod\";\n\nimport { \n ChatModelV1, \n ChatModelSchemaType, \n EmbeddingModelV1, \n EmbeddingModelSchemaType, \n ProviderV1, \n ProviderError \n} from \"@adaline/provider\";\n\nimport {\n Gemini1_5Flash001,\n Gemini1_5Flash001Options,\n Gemini1_5Flash001Schema,\n Gemini1_5Flash001Literal,\n Gemini1_5Flash002,\n Gemini1_5Flash002Options,\n Gemini1_5Flash002Schema,\n Gemini1_5Flash002Literal,\n Gemini1_5FlashLatest,\n Gemini1_5FlashLatestOptions,\n Gemini1_5FlashLatestSchema,\n Gemini1_5FlashLatestLiteral,\n Gemini1_5Flash,\n Gemini1_5FlashOptions,\n Gemini1_5FlashSchema,\n Gemini1_5FlashLiteral,\n Gemini1_5Pro001,\n Gemini1_5Pro001Options,\n Gemini1_5Pro001Schema,\n Gemini1_5Pro001Literal,\n Gemini1_5Pro002,\n Gemini1_5Pro002Options,\n Gemini1_5Pro002Schema,\n Gemini1_5Pro002Literal,\n Gemini1_5ProLatest,\n Gemini1_5ProLatestOptions,\n Gemini1_5ProLatestSchema,\n Gemini1_5ProLatestLiteral,\n Gemini1_5Pro,\n Gemini1_5ProOptions,\n Gemini1_5ProSchema,\n Gemini1_5ProLiteral,\n Gemini1_0Pro,\n Gemini1_0ProOptions,\n Gemini1_0ProSchema,\n Gemini1_0ProLiteral,\n Gemini1_0ProVision,\n Gemini1_0ProVisionOptions,\n Gemini1_0ProVisionSchema,\n Gemini1_0ProVisionLiteral,\n Gemini1_0ProLatest,\n Gemini1_0ProLatestOptions,\n Gemini1_0ProLatestSchema,\n Gemini1_0ProLatestLiteral,\n Gemini1_0Pro_001,\n Gemini1_0Pro_001Options,\n Gemini1_0Pro_001Schema,\n Gemini1_0Pro_001Literal,\n} from \"../models\";\n\nimport {\n Text_Embedding_001,\n Text_Embedding_001Options,\n Text_Embedding_001Schema,\n Text_Embedding_001Literal,\n Text_Embedding_004,\n Text_Embedding_004Options,\n Text_Embedding_004Schema,\n Text_Embedding_004Literal\n} from \"../models\";\n\nconst ProviderLiteral = \"google\";\nclass Google<O extends Record<string, any> = Record<string, any>> implements ProviderV1<O> {\n readonly version = \"v1\" as const;\n readonly name = ProviderLiteral;\n static readonly baseUrl: string = \"https://generativelanguage.googleapis.com/v1beta\";\n\n private readonly chatModelFactories: \n Record<string, { \n model: { new (options: any): ChatModelV1 },\n modelOptions: z.ZodType<any>,\n modelSchema: ChatModelSchemaType\n }> = {\n [Gemini1_5Flash001Literal]: {\n model: Gemini1_5Flash001,\n modelOptions: Gemini1_5Flash001Options,\n modelSchema: Gemini1_5Flash001Schema,\n },\n [Gemini1_5Flash002Literal]: {\n model: Gemini1_5Flash002,\n modelOptions: Gemini1_5Flash002Options,\n modelSchema: Gemini1_5Flash002Schema,\n },\n [Gemini1_5FlashLatestLiteral]: {\n model: Gemini1_5FlashLatest,\n modelOptions: Gemini1_5FlashLatestOptions,\n modelSchema: Gemini1_5FlashLatestSchema,\n },\n [Gemini1_5FlashLiteral]: {\n model: Gemini1_5Flash,\n modelOptions: Gemini1_5FlashOptions,\n modelSchema: Gemini1_5FlashSchema,\n },\n [Gemini1_5Pro001Literal]: {\n model: Gemini1_5Pro001,\n modelOptions: Gemini1_5Pro001Options,\n modelSchema: Gemini1_5Pro001Schema,\n },\n [Gemini1_5Pro002Literal]: {\n model: Gemini1_5Pro002,\n modelOptions: Gemini1_5Pro002Options,\n modelSchema: Gemini1_5Pro002Schema,\n },\n [Gemini1_5ProLatestLiteral]: {\n model: Gemini1_5ProLatest,\n modelOptions: Gemini1_5ProLatestOptions,\n modelSchema: Gemini1_5ProLatestSchema,\n },\n [Gemini1_5ProLiteral]: {\n model: Gemini1_5Pro,\n modelOptions: Gemini1_5ProOptions,\n modelSchema: Gemini1_5ProSchema,\n },\n [Gemini1_0ProLiteral]: {\n model: Gemini1_0Pro,\n modelOptions: Gemini1_0ProOptions,\n modelSchema: Gemini1_0ProSchema,\n },\n [Gemini1_0ProVisionLiteral]: {\n model: Gemini1_0ProVision,\n modelOptions: Gemini1_0ProVisionOptions,\n modelSchema: Gemini1_0ProVisionSchema,\n },\n [Gemini1_0ProLatestLiteral]: {\n model: Gemini1_0ProLatest,\n modelOptions: Gemini1_0ProLatestOptions,\n modelSchema: Gemini1_0ProLatestSchema,\n },\n [Gemini1_0Pro_001Literal]: {\n model: Gemini1_0Pro_001,\n modelOptions: Gemini1_0Pro_001Options,\n modelSchema: Gemini1_0Pro_001Schema,\n },\n };\n\n private readonly embeddingModelFactories: \n Record<string, { \n model: { new (options: any): EmbeddingModelV1 },\n modelOptions: z.ZodType<any>,\n modelSchema: EmbeddingModelSchemaType\n }> = {\n [Text_Embedding_001Literal]: {\n model: Text_Embedding_001,\n modelOptions: Text_Embedding_001Options,\n modelSchema: Text_Embedding_001Schema,\n },\n [Text_Embedding_004Literal]: {\n model: Text_Embedding_004,\n modelOptions: Text_Embedding_004Options,\n modelSchema: Text_Embedding_004Schema,\n },\n };\n\n chatModelLiterals(): string[] {\n return Object.keys(this.chatModelFactories);\n }\n\n chatModel(name: string, options: O): ChatModelV1 {\n if (!(name in this.chatModelFactories)) {\n throw new ProviderError({\n info: `Google chat model: ${name} not found`,\n cause: new Error(`Google chat model: ${name} not found, available chat models: \n [${this.chatModelLiterals().join(\", \")}]`),\n });\n }\n\n const model = this.chatModelFactories[name].model;\n const modelOptions = {\n ...options,\n };\n const parsedOptions = this.chatModelFactories[name].modelOptions.parse(modelOptions);\n return new model(parsedOptions);\n }\n\n chatModelSchema(name: string): ChatModelSchemaType {\n if (!(name in this.chatModelFactories)) {\n throw new ProviderError({\n info: `Google chat model: ${name} not found`,\n cause: new Error(`Google chat model: ${name} not found, available chat models: \n [${this.chatModelLiterals().join(\", \")}]`),\n });\n }\n return this.chatModelFactories[name].modelSchema;\n }\n\n chatModelSchemas(): Record<string, ChatModelSchemaType> {\n return Object.keys(this.chatModelFactories).reduce((acc, key) => {\n acc[key] = this.chatModelFactories[key].modelSchema;\n return acc;\n }, {} as Record<string, ChatModelSchemaType>);\n }\n\n embeddingModelLiterals(): string[] {\n return Object.keys(this.embeddingModelFactories);\n }\n\n embeddingModel(name: string, options: O): EmbeddingModelV1 {\n if (!(name in this.embeddingModelFactories)) {\n throw new ProviderError({\n info: `Google embedding model: ${name} not found`,\n cause: new Error(`Google embedding model: ${name} not found, available embedding models: \n [${this.embeddingModelLiterals().join(\", \")}]`),\n });\n }\n\n const model = this.embeddingModelFactories[name].model;\n const modelOptions = { \n ...options,\n };\n const parsedOptions = this.embeddingModelFactories[name].modelOptions.parse(modelOptions);\n return new model(parsedOptions);\n }\n\n embeddingModelSchema(name: string): EmbeddingModelSchemaType {\n if (!(name in this.embeddingModelFactories)) {\n throw new ProviderError({\n info: `Google embedding model: ${name} not found`,\n cause: new Error(`Google embedding model: ${name} not found, available embedding models: \n [${this.embeddingModelLiterals().join(\", \")}]`),\n });\n }\n return this.embeddingModelFactories[name].modelSchema;\n }\n\n embeddingModelSchemas(): Record<string, EmbeddingModelSchemaType> {\n return Object.keys(this.embeddingModelFactories).reduce((acc, key) => {\n acc[key] = this.embeddingModelFactories[key].modelSchema;\n return acc;\n }, {} as Record<string, EmbeddingModelSchemaType>);\n }\n}\n\nexport { Google, ProviderLiteral };\n","import { z } from \"zod\";\n\nimport {\n HeadersType,\n ParamsType,\n UrlType,\n ChatModelV1,\n ChatModelSchemaType,\n SelectStringConfigItemDefType,\n ModelResponseError,\n InvalidConfigError,\n InvalidMessagesError,\n InvalidToolsError,\n InvalidModelRequestError,\n urlWithoutTrailingSlash,\n removeUndefinedEntries,\n} from \"@adaline/provider\";\n\nimport {\n SystemRoleLiteral,\n UserRoleLiteral,\n AssistantRoleLiteral,\n ToolRoleLiteral,\n TextModalityLiteral,\n ToolCallModalityLiteral,\n ToolResponseModalityLiteral,\n ImageModalityLiteral,\n Base64ImageContentTypeLiteral,\n Base64ImageContentValueType,\n Config,\n Message,\n Tool,\n ConfigType,\n ContentType,\n MessageType,\n ToolType,\n ToolCallContentType,\n ChatUsageType,\n ChatResponseType,\n PartialChatResponseType,\n createPartialTextMessage,\n createPartialToolCallMessage,\n createTextContent,\n createToolCallContent,\n} from \"@adaline/types\";\n\nimport { Google } from \"../../provider/provider.google\";\n\nimport { \n GoogleCompleteChatResponse, \n GoogleCompleteChatResponseType,\n GoogleStreamChatResponse,\n GoogleStreamChatResponseType,\n} from \"./types\";\n\nimport { \n GoogleChatToolType,\n GoogleChatRequest, \n GoogleChatRequestType,\n GoogleChatSystemInstructionType,\n GoogleChatContentType,\n GoogleChatContentPartTextType,\n GoogleChatContentPartInlineDataType,\n GoogleChatContentPartFunctionCallType,\n GoogleChatContentPartFunctionResponseType,\n} from \"./types\";\n\nconst BaseChatModelOptions = z.object({\n apiKey: z.string(),\n baseUrl: z.string().url().optional(),\n completeChatUrl: z.string().url().optional(),\n streamChatUrl: z.string().url().optional(),\n});\ntype BaseChatModelOptionsType = z.infer<typeof BaseChatModelOptions>;\n\nclass BaseChatModel implements ChatModelV1<ChatModelSchemaType> {\n readonly version = \"v1\" as const;\n modelSchema: ChatModelSchemaType;\n\n private readonly apiKey: string;\n private readonly baseUrl: string;\n private readonly streamChatUrl: string;\n private readonly completeChatUrl: string;\n\n constructor(modelSchema: ChatModelSchemaType, options: BaseChatModelOptionsType) {\n const parsedOptions = BaseChatModelOptions.parse(options);\n this.modelSchema = modelSchema;\n this.apiKey = parsedOptions.apiKey;\n this.baseUrl = urlWithoutTrailingSlash(parsedOptions.baseUrl ?? Google.baseUrl);\n this.completeChatUrl = urlWithoutTrailingSlash(\n parsedOptions.completeChatUrl || \n `${this.baseUrl}/models/${this.modelSchema.name}:generateContent?key=${this.apiKey}`\n );\n this.streamChatUrl = urlWithoutTrailingSlash(\n parsedOptions.streamChatUrl || \n `${this.baseUrl}/models/${this.modelSchema.name}:streamGenerateContent?key=${this.apiKey}`\n );\n };\n\n getDefaultBaseUrl(): UrlType {\n return this.baseUrl;\n };\n\n getDefaultHeaders(): HeadersType {\n return {\n \"Content-Type\": \"application/json\",\n source: \"adaline.ai\",\n };\n };\n\n getDefaultParams(): ParamsType {\n return {};\n };\n\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n getRetryDelay(responseHeaders: HeadersType): { shouldRetry: boolean, delayMs: number } {\n return { shouldRetry: false, delayMs: 0 };\n };\n\n // TODO: unused method, not tested; should add support for non-text modalities, tools\n getTokenCount(messages: MessageType[]): number {\n return messages.reduce((acc, message) => {\n return acc + message.content.map((content) => (content.modality === \"text\" ? content.value : \"\")).join(\" \").length;\n }, 0);\n };\n\n // TODO: add safetySettings parsing\n transformModelRequest(request: GoogleChatRequestType): { \n modelName: string | undefined, \n config: ConfigType, \n messages: MessageType[], \n tools: ToolType[] | undefined \n } {\n const safeRequest = GoogleChatRequest.safeParse(request);\n if (!safeRequest.success) {\n throw new InvalidModelRequestError({ info: \"Invalid model request\", cause: safeRequest.error });\n }\n\n const parsedRequest = safeRequest.data;\n\n const modelName = parsedRequest.model;\n\n if (parsedRequest.system_instruction && parsedRequest.systemInstruction) {\n throw new InvalidModelRequestError({\n info: `Invalid model request for model : '${this.modelSchema.name}'`,\n cause: new Error(\"'system_instruction' and 'systemInstruction' are not allowed at the same time\"),\n });\n }\n\n if (parsedRequest.generation_config && parsedRequest.generationConfig) {\n throw new InvalidModelRequestError({\n info: `Invalid model request for model : '${this.modelSchema.name}'`,\n cause: new Error(\"'generation_config' and 'generationConfig' are not allowed at the same time\"),\n });\n }\n\n if (parsedRequest.tool_config && parsedRequest.toolConfig) {\n throw new InvalidModelRequestError({\n info: `Invalid model request for model : '${this.modelSchema.name}'`,\n cause: new Error(\"'tool_config' and 'toolConfig' are not allowed at the same time\"),\n });\n }\n\n const systemInstruction = parsedRequest.system_instruction || parsedRequest.systemInstruction;\n const generationConfig = parsedRequest.generation_config || parsedRequest.generationConfig;\n const toolConfig = parsedRequest.tool_config || parsedRequest.toolConfig;\n\n if (toolConfig && (!parsedRequest.tools || parsedRequest.tools.function_declarations.length === 0)) {\n throw new InvalidModelRequestError({\n info: `Invalid model request for model : '${this.modelSchema.name}'`,\n cause: new Error(\"'tools' are required when 'tool_choice' is specified\"),\n });\n }\n\n const _config: ConfigType = {};\n\n if (toolConfig) {\n if (toolConfig.function_calling_config.mode === \"ANY\") {\n if (toolConfig.function_calling_config.allowed_function_names && toolConfig.function_calling_config.allowed_function_names.length === 1) {\n _config.toolChoice = toolConfig.function_calling_config.allowed_function_names[0];\n } else {\n _config.toolChoice = toolConfig.function_calling_config.mode.toLowerCase();\n }\n } else {\n _config.toolChoice = toolConfig.function_calling_config.mode.toLowerCase();\n }\n } \n\n _config.seed = generationConfig?.seed;\n _config.maxTokens = generationConfig?.maxOutputTokens;\n _config.temperature = generationConfig?.temperature;\n _config.topP = generationConfig?.topP;\n _config.presencePenalty = generationConfig?.presencePenalty;\n _config.frequencyPenalty = generationConfig?.frequencyPenalty;\n _config.stop = generationConfig?.stopSequences;\n const config = Config().parse(removeUndefinedEntries(_config));\n\n const messages: MessageType[] = [];\n const toolCallMap: { [id: string] : ToolCallContentType } = {};\n\n if (systemInstruction) {\n systemInstruction.parts.forEach((part) => {\n messages.push({ role: SystemRoleLiteral, content: [{ modality: TextModalityLiteral, value: part.text }] });\n });\n }\n\n parsedRequest.contents.forEach((message) => {\n const role = message.role;\n switch (role) {\n case \"user\": {\n const content = message.parts as (GoogleChatContentPartTextType | GoogleChatContentPartInlineDataType)[];\n const _content = content.map((c) => {\n if (\"text\" in c) {\n return { modality: TextModalityLiteral, value: c.text }\n } else {\n return {\n modality: ImageModalityLiteral,\n detail: \"auto\" as \"auto\",\n value: { \n type: Base64ImageContentTypeLiteral, \n base64: c.inline_data.data,\n media_type: c.inline_data.mime_type.split(\"/\")[1] as Base64ImageContentValueType[\"media_type\"],\n }\n }\n }\n });\n messages.push({ role: role, content: _content });\n }\n break;\n\n case \"model\": {\n const content = message.parts as (GoogleChatContentPartTextType | GoogleChatContentPartFunctionCallType)[];\n const _content = content.map((c, index) => {\n if (\"text\" in c) {\n return { modality: TextModalityLiteral, value: c.text }\n } else {\n return {\n modality: ToolCallModalityLiteral,\n id: index.toString(),\n index: index,\n name: c.function_call.name,\n arguments: JSON.stringify(c.function_call.args),\n }\n }\n });\n messages.push({ role: AssistantRoleLiteral, content: _content });\n }\n break;\n\n case \"function\": {\n const content = message.parts as GoogleChatContentPartFunctionResponseType[];\n const _content = content.map((c, index) => {\n return {\n modality: ToolResponseModalityLiteral,\n id: index.toString(),\n index: index,\n name: c.function_response.name,\n data: JSON.stringify(c.function_response.response),\n }\n });\n messages.push({ role: ToolRoleLiteral, content: _content });\n }\n break;\n\n default: {\n throw new InvalidMessagesError({\n info: `Invalid message 'role' for model : ${this.modelSchema.name}`,\n cause: new Error(`role : '${message.role}' is not supported for model : ${this.modelSchema.name}`),\n });\n }\n }\n });\n\n const tools: ToolType[] = [];\n if (parsedRequest.tools) {\n parsedRequest.tools.function_declarations.forEach((tool: GoogleChatToolType) => {\n tools.push({\n type: \"function\",\n definition: {\n schema: {\n name: tool.name,\n description: tool.description,\n parameters: tool.parameters,\n }\n }\n });\n });\n }\n\n return {\n modelName,\n config,\n messages,\n tools: tools.length > 0 ? tools : undefined,\n };\n };\n\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n transformConfig(config: ConfigType, messages?: MessageType[], tools?: ToolType[]): ParamsType {\n const _toolChoice = config.toolChoice;\n delete config.toolChoice; // can have a specific tool name that is not in the model schema, validated at transformation \n\n const _parsedConfig = this.modelSchema.config.schema.safeParse(config);\n if (!_parsedConfig.success) {\n throw new InvalidConfigError({ \n info: `Invalid config for model : '${this.modelSchema.name}'`, \n cause: _parsedConfig.error \n });\n }\n\n const parsedConfig = _parsedConfig.data as ConfigType;\n\n Object.keys(parsedConfig).forEach((key) => {\n if (!(key in this.modelSchema.config.def)) {\n throw new InvalidConfigError({\n info: `Invalid config for model : '${this.modelSchema.name}'`, \n cause: new Error(`Invalid config key : '${key}', \n available keys : [${Object.keys(this.modelSchema.config.def).join(\", \")}]`),\n });\n }\n });\n\n const transformedConfig = Object.keys(parsedConfig).reduce(\n (acc, key) => {\n const def = this.modelSchema.config.def[key];\n const paramKey = def.param;\n const paramValue = (parsedConfig as ConfigType)[key];\n\n if (paramKey === \"maxOutputTokens\" && def.type === \"range\" && paramValue === 0) {\n acc[paramKey] = def.max;\n } else {\n acc[paramKey] = paramValue;\n }\n\n return acc;\n },\n {} as ParamsType\n );\n\n const safetySettings = transformedConfig.safetySettings;\n delete transformedConfig.safetySettings;\n\n let toolConfig;\n if (_toolChoice !== undefined) {\n const toolChoice = _toolChoice as string;\n if (!tools || (tools && tools.length === 0)) {\n throw new InvalidConfigError({\n info: `Invalid config for model : '${this.modelSchema.name}'`, \n cause: new Error(\"'tools' are required when 'toolChoice' is specified\"),\n });\n } else if (tools && tools.length > 0) {\n const configToolChoice = this.modelSchema.config.def.toolChoice as SelectStringConfigItemDefType;\n if (!configToolChoice.choices.includes(toolChoice)) {\n // toolChoice not in model schema choices\n if (tools.map((tool) => tool.definition.schema.name).includes(toolChoice)) {\n // toolChoice is a specific tool name\n toolConfig = { \n function_calling_config: {\n mode: \"ANY\",\n allowed_function_names: [toolChoice],\n }\n };\n } else {\n throw new InvalidConfigError({\n info: `Invalid config for model : '${this.modelSchema.name}'`,\n cause: new Error(`toolChoice : '${toolChoice}' is not part of provided 'tools' names or \n one of [${configToolChoice.choices.join(\", \")}]`),\n });\n }\n } else {\n // toolChoice is in model schema choices\n if (toolChoice === \"any\") {\n toolConfig = {\n function_calling_config: {\n mode: \"ANY\",\n allowed_function_names: tools.map((tool) => tool.definition.schema.name),\n }\n };\n } else {\n toolConfig = {\n function_calling_config: {\n mode: toolChoice.toUpperCase(), // Google uses uppercase for toolChoice\n }\n };\n }\n }\n }\n }\n\n // if (\"response_format\" in transformedConfig && transformedConfig.response_format !== undefined) {\n // const responseFormat = transformedConfig.response_format as string;\n // if (responseFormat === \"json_schema\") {\n // if (!(\"response_schema\" in transformedConfig)) {\n // throw new InvalidConfigError({ \n // info: `Invalid config for model : '${this.modelSchema.name}'`, \n // cause: new Error(\"'responseSchema' is required in config when 'responseFormat' is 'json_schema'\") \n // });\n // } else {\n // transformedConfig.response_format = { \n // type: \"json_schema\",\n // json_schema: transformedConfig.response_schema,\n // };\n // delete transformedConfig.response_schema;\n // }\n // } else {\n // transformedConfig.response_format = { type: responseFormat };\n // }\n // }\n\n return { \n generation_config: transformedConfig,\n ...(toolConfig ? { tool_config: toolConfig } : {}),\n ...(safetySettings ? { safety_settings: safetySettings } : {}),\n };\n };\n\n transformMessages(messages: MessageType[]): ParamsType {\n if (!messages || (messages && messages.length === 0)) {\n return { messages: [] };\n }\n\n const parsedMessages = messages.map((message) => {\n const parsedMessage = Message().safeParse(message);\n if (!parsedMessage.success) {\n throw new InvalidMessagesError({ info: \"Invalid messages\", cause: parsedMessage.error });\n }\n return parsedMessage.data;\n });\n\n parsedMessages.forEach((message) => {\n message.content.forEach((content) => {\n if (!this.modelSchema.modalities.includes(content.modality)) {\n throw new InvalidMessagesError({\n info: `Invalid message content for model : '${this.modelSchema.name}'`,\n cause: new Error(`model : '${this.modelSchema.name}' does not support modality : '${content.modality}', \n available modalities : [${this.modelSchema.modalities.join(\", \")}]`),\n });\n }\n });\n });\n\n parsedMessages.forEach((message) => {\n if (!Object.keys(this.modelSchema.roles).includes(message.role)) {\n throw new InvalidMessagesError({\n info: `Invalid message content for model : '${this.modelSchema.name}'`,\n cause: new Error(`model : '${this.modelSchema.name}' does not support role : '${message.role}', \n available roles : [${Object.keys(this.modelSchema.roles).join(\", \")}]`),\n });\n }\n });\n\n let systemInstruction: GoogleChatSystemInstructionType = { parts: [] };\n const nonSystemMessages: GoogleChatContentType[] = [];\n\n parsedMessages.forEach((message) => {\n switch (message.role) {\n case SystemRoleLiteral: {\n message.content.forEach((content) => {\n if (content.modality === TextModalityLiteral) {\n systemInstruction.parts.push({ text: content.value });\n } else {\n throw new InvalidMessagesError({\n info: `Invalid message 'role' and 'modality' combination for model : ${this.modelSchema.name}`,\n cause: new Error(`role : '${message.role}' cannot have content with modality : '${content.modality}'`),\n });\n }\n });\n }\n break;\n\n case AssistantRoleLiteral: {\n const assistantContent: GoogleChatContentType['parts'] = [];\n message.content.forEach((content) => {\n if (content.modality === TextModalityLiteral) {\n assistantContent.push({ text: content.value });\n } else if (content.modality === ToolCallModalityLiteral) {\n assistantContent.push({\n function_call: {\n name: content.name,\n args: JSON.parse(content.arguments),\n }\n });\n } else {\n throw new InvalidMessagesError({\n info: `Invalid message 'role' and 'modality' combination for model : ${this.modelSchema.name}`,\n cause: new Error(`role : '${message.role}' cannot have content with modality : '${content.modality}'`),\n });\n }\n });\n nonSystemMessages.push({\n role: this.modelSchema.roles[message.role] as GoogleChatContentType['role'],\n parts: assistantContent,\n });\n }\n break;\n\n case UserRoleLiteral: {\n const userContent: GoogleChatContentType['parts'] = [];\n message.content.forEach((content) => {\n if (content.modality === TextModalityLiteral) {\n userContent.push({ text: content.value });\n } else if (content.modality === ImageModalityLiteral) {\n if (content.value.type === \"base64\") {\n userContent.push({ \n inline_data: {\n mime_type: content.value.media_type,\n data: content.value.base64\n }\n });\n } else if (content.value.type === \"url\") {\n // TODO: add logic to fetch image from url, remove this error\n throw new InvalidMessagesError({\n info: `Invalid message 'modality' for model : ${this.modelSchema.name}`,\n cause: new Error(`model: '${this.modelSchema.name}' does not support image content type: '${content.value.type}'`),\n });\n }\n } else {\n throw new InvalidMessagesError({\n info: `Invalid message 'role' and 'modality' combination for model : ${this.modelSchema.name}`,\n cause: new Error(`role : '${message.role}' cannot have content with modality : '${content.modality}'`),\n });\n }\n });\n nonSystemMessages.push({\n role: this.modelSchema.roles[message.role] as GoogleChatContentType['role'],\n parts: userContent,\n });\n }\n break;\n\n case ToolRoleLiteral: {\n const toolResponseContent: GoogleChatContentPartFunctionResponseType[] = [];\n message.content.forEach((content) => {\n if (content.modality === ToolResponseModalityLiteral) {\n toolResponseContent.push({\n function_response: {\n name: content.name,\n response: JSON.parse(content.data),\n }\n });\n } else {\n throw new InvalidMessagesError({\n info: `Invalid message 'role' and 'modality' combination for model : ${this.modelSchema.name}`,\n cause: new Error(`role : '${message.role}' cannot have content with modality : '${content.modality}'`),\n });\n }\n });\n nonSystemMessages.push({\n role: this.modelSchema.roles[message.role] as GoogleChatContentType['role'],\n parts: toolResponseContent,\n });\n }\n break;\n\n default: {\n throw new InvalidMessagesError({\n info: `Invalid message 'role' for model : ${this.modelSchema.name}`,\n cause: new Error(`role : '${message.role}' is not supported, \n available roles : [${Object.keys(this.modelSchema.roles).join(\", \")}]`),\n });\n }\n }\n });\n\n if (nonSystemMessages[0].role !== this.modelSchema.roles[UserRoleLiteral]) {\n throw new InvalidMessagesError({\n info: `Invalid message 'role' for model : ${this.modelSchema.name}`,\n cause: new Error(`model : '${this.modelSchema.name}' requires first message to be from user`),\n });\n }\n\n const getNextExpectedRoles = (role: string): string[] => {\n if (role === this.modelSchema.roles[UserRoleLiteral] || role === this.modelSchema.roles[ToolRoleLiteral]) {\n return [this.modelSchema.roles[AssistantRoleLiteral] as string];\n }\n return [this.modelSchema.roles[UserRoleLiteral] as string, this.modelSchema.roles[ToolRoleLiteral] as string];\n };\n\n for (let i = 1; i < nonSystemMessages.length; i++) {\n if (!getNextExpectedRoles(nonSystemMessages[i - 1].role).includes(nonSystemMessages[i].role)) {\n throw new InvalidMessagesError({\n info: `Invalid message format for model : ${this.modelSchema.name}`,\n cause: new Error(`model : '${this.modelSchema.name}' cannot have message with role : '${nonSystemMessages[i].role}' after message with role : '${nonSystemMessages[i - 1].role}'`),\n });\n }\n }\n\n if (nonSystemMessages[nonSystemMessages.length - 1].role !== this.modelSchema.roles[UserRoleLiteral]) {\n throw new InvalidMessagesError({\n info: `Invalid message format for model : ${this.modelSchema.name}`,\n cause: new Error(`model : '${this.modelSchema.name}' requires last message to be from user`),\n });\n }\n\n return { \n contents: nonSystemMessages,\n ...(systemInstruction.parts.length > 0 ? { system_instruction: systemInstruction } : {}),\n };\n };\n\n transformTools(tools: ToolType[]): ParamsType {\n if (!this.modelSchema.modalities.includes(ToolCallModalityLiteral)) {\n throw new InvalidToolsError({\n info: `Invalid tool 'modality' for model : ${this.modelSchema.name}`,\n cause: new Error(`model : '${this.modelSchema.name}' does not support tool modality : '${ToolCallModalityLiteral}'`),\n });\n }\n\n if (!tools || (tools && tools.length === 0)) {\n return { tools: [] as ToolType[] };\n }\n\n const parsedTools = tools.map((tool) => {\n const parsedTool = Tool().safeParse(tool);\n if (!parsedTool.success) {\n throw new InvalidToolsError({ info: \"Invalid tools\", cause: parsedTool.error });\n }\n return parsedTool.data;\n });\n\n const transformedTools = parsedTools.map((tool) => ({\n function_declarations: [{\n name: tool.definition.schema.name,\n description: tool.definition.schema.description,\n parameters: tool.definition.schema.parameters,\n }],\n }));\n\n return { tools: transformedTools };\n };\n\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n async getCompleteChatUrl(config?: ConfigType, messages?: MessageType[], tools?: ToolType[]): Promise<UrlType> {\n return new Promise((resolve) => {\n resolve(this.completeChatUrl);\n });\n };\n\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n async getCompleteChatHeaders(config?: ConfigType, messages?: MessageType[], tools?: ToolType[]): Promise<HeadersType> {\n return new Promise((resolve) => {\n resolve(this.getDefaultHeaders());\n });\n };\n\n async getCompleteChatData(config: ConfigType, messages: MessageType[], tools?: ToolType[]): Promise<ParamsType> {\n const transformedConfig = this.transformConfig(config, messages, tools);\n const transformedMessages = this.transformMessages(messages);\n if (transformedMessages.messages && (transformedMessages.messages as MessageType[]).length === 0) {\n throw new InvalidMessagesError({\n info: \"Messages are required\",\n cause: new Error(\"Messages are required\"),\n });\n }\n\n const transformedTools = tools ? this.transformTools(tools) : {};\n\n return new Promise((resolve) => {\n resolve({\n ...this.getDefaultParams(),\n ...transformedConfig,\n ...transformedMessages,\n ...transformedTools,\n });\n });\n };\n\n transformCompleteChatResponse(response: any): ChatResponseType {\n const safe = GoogleCompleteChatResponse.safeParse(response);\n if (safe.success) {\n if (safe.data.candidates.length === 0) {\n throw new ModelResponseError({ \n info: \"Invalid response from model\", \n cause: new Error(`No choices in response : ${JSON.stringify(safe.data)}`) \n });\n }\n\n const parsedResponse: GoogleCompleteChatResponseType = safe.data;\n const messages: MessageType[] = [];\n let usage: ChatUsageType | undefined;\n const _content = parsedResponse.candidates[0].content;\n if (_content) {\n const content = _content.parts.map((contentItem: any, index: any) => {\n if (\"text\" in contentItem && contentItem.text !== undefined) {\n return createTextContent(contentItem.text);\n } else if (\"functionCall\" in contentItem && contentItem.functionCall !== undefined) {\n return createToolCallContent(\n index, \n `${contentItem.functionCall.name}_${index}`, \n contentItem.functionCall.name,\n JSON.stringify(contentItem.functionCall.args),\n );\n }\n }) as ContentType[];\n\n messages.push({\n role: AssistantRoleLiteral,\n content: content,\n });\n\n if (parsedResponse.usageMetadata) {\n usage = {\n promptTokens: parsedResponse.usageMetadata.promptTokenCount,\n totalTokens: parsedResponse.usageMetadata.totalTokenCount,\n completionTokens: parsedResponse.usageMetadata.candidatesTokenCount || 0,\n };\n }\n\n return { \n messages: messages,\n usage: usage,\n logProbs: undefined\n };\n }\n\n const safetyRatings = parsedResponse.candidates[0].safetyRatings;\n if (safetyRatings && safetyRatings.length > 0) {\n safetyRatings.forEach((rating) => {\n if (rating.blocked) {\n throw new ModelResponseError({ \n info: `Blocked content for category: ${rating.category} with probability: ${rating.probability}`, \n cause: new Error(`Blocked content for category: ${rating.category} with probability: ${rating.probability}`) \n });\n }\n });\n }\n\n const finishReason = parsedResponse.candidates[0].finishReason;\n if (finishReason === \"SAFETY\") {\n throw new ModelResponseError({ \n info: \"Blocked content, model response finished with safety reason\", \n cause: new Error(\"Blocked content, model response finished with safety reason\") \n });\n }\n }\n\n throw new ModelResponseError({ info: \"Invalid response from model\", cause: safe.error });\n } \n\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n async getStreamChatUrl(config?: ConfigType, messages?: MessageType[], tools?: ToolType[]): Promise<UrlType> {\n return new Promise((resolve) => {\n resolve(this.streamChatUrl);\n });\n };\n\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n async getStreamChatHeaders(config?: ConfigType, messages?: MessageType[], tools?: ToolType[]): Promise<HeadersType> {\n return new Promise((resolve) => {\n resolve(this.getDefaultHeaders());\n });\n };\n\n async getStreamChatData(config: ConfigType, messages: MessageType[], tools?: ToolType[]): Promise<ParamsType> {\n const transformedConfig = this.transformConfig(config, messages, tools);\n const transformedMessages = this.transformMessages(messages);\n if (transformedMessages.messages && (transformedMessages.messages as MessageType[]).length === 0) {\n throw new InvalidMessagesError({\n info: \"Messages are required\",\n cause: new Error(\"Messages are required\"),\n });\n }\n\n const transformedTools = tools ? this.transformTools(tools) : {};\n\n return new Promise((resolve) => {\n resolve({\n ...this.getDefaultParams(),\n ...transformedConfig,\n ...transformedMessages,\n ...transformedTools,\n });\n });\n };\n\n async *transformStreamChatResponseChunk(\n chunk: string, \n buffer: string\n ): AsyncGenerator<{ partialResponse: PartialChatResponseType; buffer: string }> {\n // merge last buffer message and split into lines\n const lines = (buffer + chunk).split(\",\\r\").filter((line) => line.trim() !== \"\");\n for (const line of lines) {\n let completeLine = line;\n // remove all '\\n' from string JSON\n completeLine = completeLine.replace(/\\n/g, \"\");\n if (completeLine.startsWith(\"[\")) {\n // start of stream, remove '['\n completeLine = completeLine.slice(1);\n } else if (completeLine.endsWith(\"]\")) {\n if (completeLine === \"]\") {\n // strict end of stream\n return;\n } else {\n // remaining message and then end of stream, remove ']'\n completeLine = completeLine.slice(0, -1);\n }\n }\n\n let structuredLine: any;\n try {\n structuredLine = JSON.parse(completeLine);\n } catch (error) {\n // malformed JSON error, must be the end of loop\n if (error instanceof SyntaxError) {\n buffer = completeLine;\n continue;\n } else {\n // non JSON parse error, re-raise\n throw error;\n }\n }\n\n // reset buffer\n buffer = \"\";\n const safe = GoogleStreamChatResponse.safeParse(structuredLine);\n if (safe.success) {\n const partialResponse: PartialChatResponseType = { partialMessages: [] };\n const parsedResponse: GoogleStreamChatResponseType = safe.data;\n if (parsedResponse.candidates.length > 0) {\n const message = parsedResponse.candidates[0].content;\n if (message && \"parts\" in message && message.parts.length > 0) {\n message.parts.forEach((messagePart, index) => {\n if (\"text\" in messagePart && messagePart.text !== undefined) {\n partialResponse.partialMessages.push(createPartialTextMessage(AssistantRoleLiteral, messagePart.text));\n }\n \n if (\"functionCall\" in messagePart && messagePart.functionCall !== undefined) {\n const toolCall = messagePart.functionCall;\n partialResponse.partialMessages.push(createPartialToolCallMessage(\n AssistantRoleLiteral,\n index,\n `${toolCall.name}_${index}`,\n toolCall.name,\n JSON.stringify(toolCall.args)\n ));\n }\n });\n }\n }\n\n if (parsedResponse.usageMetadata) {\n partialResponse.usage = {\n promptTokens: parsedResponse.usageMetadata.promptTokenCount,\n completionTokens: parsedResponse.usageMetadata.candidatesTokenCount,\n totalTokens: parsedResponse.usageMetadata.totalTokenCount,\n };\n }\n\n yield { partialResponse: partialResponse, buffer: buffer };\n } else {\n throw new ModelResponseError({ info: \"Invalid response from model\", cause: safe.error });\n }\n }\n\n yield { partialResponse: { partialMessages: [] }, buffer: buffer };\n };\n}\n\nexport { \n BaseChatModel, \n BaseChatModelOptions, \n type BaseChatModelOptionsType \n};\n","import { BaseChatModel } from \"./base-chat-model.google\";\n\nimport {\n GoogleChatContentType,\n GoogleChatSystemInstructionType,\n} from \"./types\";\n\nimport {\n ParamsType,\n} from \"@adaline/provider\";\n\nimport {\n MessageType,\n UserRoleLiteral,\n} from \"@adaline/types\";\n\nclass BaseChatModelGemini1 extends BaseChatModel {\n transformMessages(messages: MessageType[]): ParamsType {\n const transformedMessages = super.transformMessages(messages) as {\n contents: GoogleChatContentType[],\n systemInstruction?: GoogleChatSystemInstructionType\n };\n\n if (transformedMessages.systemInstruction) {\n const systemUserMessage = {\n role: this.modelSchema.roles[UserRoleLiteral] as GoogleChatContentType['role'],\n parts: transformedMessages.systemInstruction.parts,\n };\n transformedMessages.contents.unshift(systemUserMessage);\n delete transformedMessages.systemInstruction;\n }\n\n return transformedMessages;\n }\n}\n\nexport { BaseChatModelGemini1 };\n","import { z } from \"zod\";\n\nimport { MessageType, UserRoleLiteral } from \"@adaline/types\";\nimport { ChatModelSchema, ParamsType } from \"@adaline/provider\";\n\nimport { GoogleChatModelConfigs } from \"../../configs\";\nimport { BaseChatModelOptions } from \"./base-chat-model.google\";\nimport { BaseChatModelGemini1 } from \"./base-gemini-1-chat-model.google\";\n\nimport {\n GoogleChatModelTextToolModalities,\n GoogleChatModelTextToolModalitiesEnum,\n GoogleChatModelRoles,\n GoogleChatModelRolesMap,\n} from \"./types\";\n\nconst Gemini1_0ProLiteral = \"gemini-1.0-pro\";\nconst Gemini1_0ProDescription =\n \"Google's predecessor to Gemini 1.5 Pro, a model for scaling across a wide range of tasks \\\n Optimized for natural language tasks, multi-turn text and code chat, and code generation\";\n\nconst Gemini1_0ProSchema = ChatModelSchema(GoogleChatModelRoles, GoogleChatModelTextToolModalitiesEnum).parse({\n name: Gemini1_0ProLiteral,\n description: Gemini1_0ProDescription,\n maxInputTokens: 30720,\n maxOutputTokens: 2048,\n roles: GoogleChatModelRolesMap,\n modalities: GoogleChatModelTextToolModalities,\n config: {\n def: GoogleChatModelConfigs.base(1.0, 0.9, 2048, 4, 1.0).def,\n schema: GoogleChatModelConfigs.base(1.0, 0.9, 2048, 4, 1.0).schema,\n },\n});\n\nconst Gemini1_0ProOptions = BaseChatModelOptions;\ntype Gemini1_0ProOptionsType = z.infer<typeof Gemini1_0ProOptions>;\n\nclass Gemini1_0Pro extends BaseChatModelGemini1 {\n constructor(options: Gemini1_0ProOptionsType) {\n super(Gemini1_0ProSchema, options);\n }\n}\n\nexport { \n Gemini1_0Pro, \n Gemini1_0ProOptions, \n Gemini1_0ProSchema, \n Gemini1_0ProLiteral, \n type Gemini1_0ProOptionsType \n};\n","import { z } from \"zod\";\n\nimport { MessageType, UserRoleLiteral } from \"@adaline/types\";\nimport { ChatModelSchema, ParamsType } from \"@adaline/provider\";\n\nimport { GoogleChatModelConfigs } from \"../../configs\";\nimport { BaseChatModelOptions } from \"./base-chat-model.google\";\nimport { BaseChatModelGemini1 } from \"./base-gemini-1-chat-model.google\";\n\nimport {\n GoogleChatModelTextToolModalities,\n GoogleChatModelTextToolModalitiesEnum,\n GoogleChatModelRoles,\n GoogleChatModelRolesMap,\n} from \"./types\";\n\nconst Gemini1_0Pro_001Literal = \"gemini-1.0-pro-001\";\nconst Gemini1_0Pro_001Description =\n \"Google's predecessor to Gemini 1.5 Pro, a model for scaling across a wide range of tasks \\\n Optimized for natural language tasks, multi-turn text and code chat, and code generation\";\n\nconst Gemini1_0Pro_001Schema = ChatModelSchema(GoogleChatModelRoles, GoogleChatModelTextToolModalitiesEnum).parse({\n name: Gemini1_0Pro_001Literal,\n description: Gemini1_0Pro_001Description,\n maxInputTokens: 30720,\n maxOutputTokens: 2048,\n roles: GoogleChatModelRolesMap,\n modalities: GoogleChatModelTextToolModalities,\n config: {\n def: GoogleChatModelConfigs.base(1.0, 0.9, 2048, 4, 1.0).def,\n schema: GoogleChatModelConfigs.base(1.0, 0.9, 2048, 4, 1.0).schema,\n },\n});\n\nconst Gemini1_0Pro_001Options = BaseChatModelOptions;\ntype Gemini1_0Pro_001OptionsType = z.infer<typeof Gemini1_0Pro_001Options>;\n\nclass Gemini1_0Pro_001 extends BaseChatModelGemini1 {\n constructor(options: Gemini1_0Pro_001OptionsType) {\n super(Gemini1_0Pro_001Schema, options);\n }\n}\n\nexport { \n Gemini1_0Pro_001, \n Gemini1_0Pro_001Options, \n Gemini1_0Pro_001Schema, \n Gemini1_0Pro_001Literal, \n type Gemini1_0Pro_001OptionsType \n};\n","import { z } from \"zod\";\n\nimport { MessageType, UserRoleLiteral } from \"@adaline/types\";\nimport { ChatModelSchema, ParamsType } from \"@adaline/provider\";\n\nimport { GoogleChatModelConfigs } from \"../../configs\";\nimport { BaseChatModelOptions } from \"./base-chat-model.google\";\nimport { BaseChatModelGemini1 } from \"./base-gemini-1-chat-model.google\";\n\nimport {\n GoogleChatModelTextToolModalities,\n GoogleChatModelTextToolModalitiesEnum,\n GoogleChatModelRoles,\n GoogleChatModelRolesMap,\n} from \"./types\";\n\nconst Gemini1_0ProLatestLiteral = \"gemini-1.0-pro-latest\";\nconst Gemini1_0ProLatestDescription =\n \"Google's latest multimodal model with great performance for high-frequency tasks. \\\n Optimized for natural language tasks, multi-turn text and code chat, and code generation\";\n\nconst Gemini1_0ProLatestSchema = ChatModelSchema(GoogleChatModelRoles, GoogleChatModelTextToolModalitiesEnum).parse({\n name: Gemini1_0ProLatestLiteral,\n description: Gemini1_0ProLatestDescription,\n maxInputTokens: 30720,\n maxOutputTokens: 2048,\n roles: GoogleChatModelRolesMap,\n modalities: GoogleChatModelTextToolModalities,\n config: {\n def: GoogleChatModelConfigs.base(1.0, 0.9, 2048, 4, 1.0).def,\n schema: GoogleChatModelConfigs.base(1.0, 0.9, 2048, 4, 1.0).schema,\n },\n});\n\nconst Gemini1_0ProLatestOptions = BaseChatModelOptions;\ntype Gemini1_0ProLatestOptionsType = z.infer<typeof Gemini1_0ProLatestOptions>;\n\nclass Gemini1_0ProLatest extends BaseChatModelGemini1 {\n constructor(options: Gemini1_0ProLatestOptionsType) {\n super(Gemini1_0ProLatestSchema, options);\n }\n}\n\nexport { \n Gemini1_0ProLatest, \n Gemini1_0ProLatestOptions, \n Gemini1_0ProLatestSchema, \n Gemini1_0ProLatestLiteral, \n type Gemini1_0ProLatestOptionsType \n};\n","import { z } from \"zod\";\n\nimport { ChatModelSchema } from \"@adaline/provider\";\n\nimport { GoogleChatModelConfigs } from \"../../configs\";\nimport { BaseChatModelOptions } from \"./base-chat-model.google\";\nimport { BaseChatModelGemini1 } from \"./base-gemini-1-chat-model.google\";\n\nimport {\n GoogleChatModelTextVisionModalities,\n GoogleChatModelTextVisionModalitiesEnum,\n GoogleChatModelRoles,\n GoogleChatModelRolesMap,\n} from \"./types\";\n\nconst Gemini1_0ProVisionLiteral = \"gemini-1.0-pro-vision\";\nconst Gemini1_0ProVisionDescription =\n \"Google's predecessor to Gemini 1.5 Pro, an image understanding model to handle a broad range of applications\";\n\nconst Gemini1_0ProVisionSchema = ChatModelSchema(GoogleChatModelRoles, GoogleChatModelTextVisionModalitiesEnum).parse({\n name: Gemini1_0ProVisionLiteral,\n description: Gemini1_0ProVisionDescription,\n maxInputTokens: 12288,\n maxOutputTokens: 4096,\n roles: GoogleChatModelRolesMap,\n modalities: GoogleChatModelTextVisionModalities,\n config: {\n def: GoogleChatModelConfigs.c1(1.0, 0.4, 4096, 4, 1.0, 32).def,\n schema: GoogleChatModelConfigs.c1(1.0, 0.4, 4096, 4, 1.0, 32).schema,\n },\n});\n\nconst Gemini1_0ProVisionOptions = BaseChatModelOptions;\ntype Gemini1_0ProVisionOptionsType = z.infer<typeof Gemini1_0ProVisionOptions>;\n\nclass Gemini1_0ProVision extends BaseChatModelGemini1 {\n constructor(options: Gemini1_0ProVisionOptionsType) {\n super(Gemini1_0ProVisionSchema, options);\n }\n}\n\nexport { \n Gemini1_0ProVision, \n Gemini1_0ProVisionOptions, \n Gemini1_0ProVisionSchema, \n Gemini1_0ProVisionLiteral, \n type Gemini1_0ProVisionOptionsType \n};\n''","import { z } from \"zod\";\n\nimport { ChatModelSchema } from \"@adaline/provider\";\n\nimport { GoogleChatModelConfigs } from \"../../configs\";\nimport { BaseChatModel, BaseChatModelOptions } from \"./base-chat-model.google\";\nimport {\n GoogleChatModelModalities,\n GoogleChatModelModalitiesEnum,\n GoogleChatModelRoles,\n GoogleChatModelRolesMap,\n} from \"./types\";\n\nconst Gemini1_5FlashLiteral = \"gemini-1.5-flash\";\nconst Gemini1_5FlashDescription =\n \"Google's fastest, most cost-efficient multimodal model with great performance for high-frequency tasks. \\\n Optimized for fast and versatile performance across a diverse variety of tasks\";\n\nconst Gemini1_5FlashSchema = ChatModelSchema(GoogleChatModelRoles, GoogleChatModelModalitiesEnum).parse({\n name: Gemini1_5FlashLiteral,\n description: Gemini1_5FlashDescription,\n maxInputTokens: 1000000,\n maxOutputTokens: 8192,\n roles: GoogleChatModelRolesMap,\n modalities: GoogleChatModelModalities,\n config: {\n def: GoogleChatModelConfigs.c1(2.0, 1.0, 8192, 4, 0.95, 64).def,\n schema: GoogleChatModelConfigs.c1(2.0, 1.0, 8192, 4, 0.95, 64).schema,\n },\n});\n\nconst Gemini1_5FlashOptions = BaseChatModelOptions;\ntype Gemini1_5FlashOptionsType = z.infer<typeof Gemini1_5FlashOptions>;\n\nclass Gemini1_5Flash extends BaseChatModel {\n constructor(options: Gemini1_5FlashOptionsType) {\n super(Gemini1_5FlashSchema, options);\n }\n}\n\nexport { \n Gemini1_5Flash, \n Gemini1_5FlashOptions, \n Gemini1_5FlashSchema, \n Gemini1_5FlashLiteral, \n type Gemini1_5FlashOptionsType \n};\n","import { z } from \"zod\";\n\nimport { ChatModelSchema } from \"@adaline/provider\";\n\nimport { GoogleChatModelConfigs } from \"../../configs\";\nimport { BaseChatModel, BaseChatModelOptions } from \"./base-chat-model.google\";\nimport {\n GoogleChatModelModalities,\n GoogleChatModelModalitiesEnum,\n GoogleChatModelRoles,\n GoogleChatModelRolesMap,\n} from \"./types\";\n\nconst Gemini1_5Flash001Literal = \"gemini-1.5-flash-001\";\nconst Gemini1_5Flash001Description =\n \"Google's fastest, most cost-efficient multimodal model with great performance for high-frequency tasks. \\\n Optimized for fast and versatile performance across a diverse variety of tasks\";\n\nconst Gemini1_5Flash001Schema = ChatModelSchema(GoogleChatModelRoles, GoogleChatModelModalitiesEnum).parse({\n name: Gemini1_5Flash001Literal,\n description: Gemini1_5Flash001Description,\n maxInputTokens: 1000000,\n maxOutputTokens: 8192,\n roles: GoogleChatModelRolesMap,\n modalities: GoogleChatModelModalities,\n config: {\n def: GoogleChatModelConfigs.c1(2.0, 1.0, 8192, 4, 0.95, 64).def,\n schema: GoogleChatModelConfigs.c1(2.0, 1.0, 8192, 4, 0.95, 64).schema,\n },\n});\n\nconst Gemini1_5Flash001Options = BaseChatModelOptions;\ntype Gemini1_5Flash001OptionsType = z.infer<typeof Gemini1_5Flash001Options>;\n\nclass Gemini1_5Flash001 extends BaseChatModel {\n constructor(options: Gemini1_5Flash001OptionsType) {\n super(Gemini1_5Flash001Schema, options);\n }\n}\n\nexport { \n Gemini1_5Flash001, \n Gemini1_5Flash001Options, \n Gemini1_5Flash001Schema, \n Gemini1_5Flash001Literal, \n type Gemini1_5Flash001OptionsType \n};\n","import { z } from \"zod\";\n\nimport { ChatModelSchema } from \"@adaline/provider\";\n\nimport { GoogleChatModelConfigs } from \"../../configs\";\nimport { BaseChatModel, BaseChatModelOptions } from \"./base-chat-model.google\";\nimport {\n GoogleChatModelModalities,\n GoogleChatModelModalitiesEnum,\n GoogleChatModelRoles,\n GoogleChatModelRolesMap,\n} from \"./types\";\n\nconst Gemini1_5Flash002Literal = \"gemini-1.5-flash-002\";\nconst Gemini1_5Flash002Description =\n \"Google's fastest, most cost-efficient multimodal model with great performance for high-frequency tasks. \\\n Optimized for fast and versatile performance across a diverse variety of tasks\";\n\nconst Gemini1_5Flash002Schema = ChatModelSchema(GoogleChatModelRoles, GoogleChatModelModalitiesEnum).parse({\n name: Gemini1_5Flash002Literal,\n description: Gemini1_5Flash002Description,\n maxInputTokens: 1000000,\n maxOutputTokens: 8192,\n roles: GoogleChatModelRolesMap,\n modalities: GoogleChatModelModalities,\n config: {\n def: GoogleChatModelConfigs.c1(2.0, 1.0, 8192, 4, 0.95, 40).def,\n schema: GoogleChatModelConfigs.c1(2.0, 1.0, 8192, 4, 0.95, 40).schema,\n },\n});\n\nconst Gemini1_5Flash002Options = BaseChatModelOptions;\ntype Gemini1_5Flash002OptionsType = z.infer<typeof Gemini1_5Flash002Options>;\n\nclass Gemini1_5Flash002 extends BaseChatModel {\n constructor(options: Gemini1_5Flash002OptionsType) {\n super(Gemini1_5Flash002Schema, options);\n }\n}\n\nexport { \n Gemini1_5Flash002, \n Gemini1_5Flash002Options, \n Gemini1_5Flash002Schema, \n Gemini1_5Flash002Literal, \n type Gemini1_5Flash002OptionsType \n};\n","import { z } from \"zod\";\n\nimport { ChatModelSchema } from \"@adaline/provider\";\n\nimport { GoogleChatModelConfigs } from \"../../configs\";\nimport { BaseChatModel, BaseChatModelOptions } from \"./base-chat-model.google\";\nimport {\n GoogleChatModelModalities,\n GoogleChatModelModalitiesEnum,\n GoogleChatModelRoles,\n GoogleChatModelRolesMap,\n} from \"./types\";\n\nconst Gemini1_5FlashLatestLiteral = \"gemini-1.5-flash-latest\";\nconst Gemini1_5FlashLatestDescription =\n \"Google's latest multimodal model with great performance for high-frequency tasks. \\\n Optimized for fast and versatile performance across a diverse variety of tasks\";\n\nconst Gemini1_5FlashLatestSchema = ChatModelSchema(GoogleChatModelRoles, GoogleChatModelModalitiesEnum).parse({\n name: Gemini1_5FlashLatestLiteral,\n description: Gemini1_5FlashLatestDescription,\n maxInputTokens: 1000000,\n maxOutputTokens: 8192,\n roles: GoogleChatModelRolesMap,\n modalities: GoogleChatModelModalities,\n config: {\n def: GoogleChatModelConfigs.c1(2.0, 1.0, 8192, 4, 0.95, 64).def,\n schema: GoogleChatModelConfigs.c1(2.0, 1.0, 8192, 4, 0.95, 64).schema,\n },\n});\n\nconst Gemini1_5FlashLatestOptions = BaseChatModelOptions;\ntype Gemini1_5FlashLatestOptionsType = z.infer<typeof Gemini1_5FlashLatestOptions>;\n\nclass Gemini1_5FlashLatest extends BaseChatModel {\n constructor(options: Gemini1_5FlashLatestOptionsType) {\n super(Gemini1_5FlashLatestSchema, options);\n }\n}\n\nexport { \n Gemini1_5FlashLatest, \n Gemini1_5FlashLatestOptions, \n Gemini1_5FlashLatestSchema, \n Gemini1_5FlashLatestLiteral, \n type Gemini1_5FlashLatestOptionsType \n};\n","import { z } from \"zod\";\n\nimport { ChatModelSchema } from \"@adaline/provider\";\n\nimport { GoogleChatModelConfigs } from \"../../configs\";\nimport { BaseChatModel, BaseChatModelOptions } from \"./base-chat-model.google\";\nimport {\n GoogleChatModelModalities,\n GoogleChatModelModalitiesEnum,\n GoogleChatModelRoles,\n GoogleChatModelRolesMap,\n} from \"./types\";\n\nconst Gemini1_5ProLiteral = \"gemini-1.5-pro\";\nconst Gemini1_5ProDescription =\n \"Google's best performing multimodal model with features for a wide variety of reasoning tasks. \\\n Optimized for complex reasoning tasks requiring more intelligence\";\n\nconst Gemini1_5ProSchema = ChatModelSchema(GoogleChatModelRoles, GoogleChatModelModalitiesEnum).parse({\n name: Gemini1_5ProLiteral,\n description: Gemini1_5ProDescription,\n maxInputTokens: 2000000,\n maxOutputTokens: 8192,\n roles: GoogleChatModelRolesMap,\n modalities: GoogleChatModelModalities,\n config: {\n def: GoogleChatModelConfigs.c1(2.0, 1.0, 8192, 4, 0.95, 64).def,\n schema: GoogleChatModelConfigs.c1(2.0, 1.0, 8192, 4, 0.95, 64).schema,\n },\n});\n\nconst Gemini1_5ProOptions = BaseChatModelOptions;\ntype Gemini1_5ProOptionsType = z.infer<typeof Gemini1_5ProOptions>;\n\nclass Gemini1_5Pro extends BaseChatModel {\n constructor(options: Gemini1_5ProOptionsType) {\n super(Gemini1_5ProSchema, options);\n }\n}\n\nexport { \n Gemini1_5Pro, \n Gemini1_5ProOptions, \n Gemini1_5ProSchema, \n Gemini1_5ProLiteral, \n type Gemini1_5ProOptionsType \n};\n","import { z } from \"zod\";\n\nimport { ChatModelSchema } from \"@adaline/provider\";\n\nimport { GoogleChatModelConfigs } from \"../../configs\";\nimport { BaseChatModel, BaseChatModelOptions } from \"./base-chat-model.google\";\nimport {\n GoogleChatModelModalities,\n GoogleChatModelModalitiesEnum,\n GoogleChatModelRoles,\n GoogleChatModelRolesMap,\n} from \"./types\";\n\nconst Gemini1_5Pro001Literal = \"gemini-1.5-pro-001\";\nconst Gemini1_5Pro001Description =\n \"Google's best performing multimodal model with features for a wide variety of reasoning tasks. \\\n Optimized for complex reasoning tasks requiring more intelligence\";\n\nconst Gemini1_5Pro001Schema = ChatModelSchema(GoogleChatModelRoles, GoogleChatModelModalitiesEnum).parse({\n name: Gemini1_5Pro001Literal,\n description: Gemini1_5Pro001Description,\n maxInputTokens: 2000000,\n maxOutputTokens: 8192,\n roles: GoogleChatModelRolesMap,\n modalities: GoogleChatModelModalities,\n config: {\n def: GoogleChatModelConfigs.c1(2.0, 1.0, 8192, 4, 0.95, 64).def,\n schema: GoogleChatModelConfigs.c1(2.0, 1.0, 8192, 4, 0.95, 64).schema,\n },\n});\n\nconst Gemini1_5Pro001Options = BaseChatModelOptions;\ntype Gemini1_5Pro001OptionsType = z.infer<typeof Gemini1_5Pro001Options>;\n\nclass Gemini1_5Pro001 extends BaseChatModel {\n constructor(options: Gemini1_5Pro001OptionsType) {\n super(Gemini1_5Pro001Schema, options);\n }\n}\n\nexport { \n Gemini1_5Pro001, \n Gemini1_5Pro001Options, \n Gemini1_5Pro001Schema, \n Gemini1_5Pro001Literal, \n type Gemini1_5Pro001OptionsType \n};\n","import { z } from \"zod\";\n\nimport { ChatModelSchema } from \"@adaline/provider\";\n\nimport { GoogleChatModelConfigs } from \"../../configs\";\nimport { BaseChatModel, BaseChatModelOptions } from \"./base-chat-model.google\";\nimport {\n GoogleChatModelModalities,\n GoogleChatModelModalitiesEnum,\n GoogleChatModelRoles,\n GoogleChatModelRolesMap,\n} from \"./types\";\n\nconst Gemini1_5Pro002Literal = \"gemini-1.5-pro-002\";\nconst Gemini1_5Pro002Description =\n \"Google's best performing multimodal model with features for a wide variety of reasoning tasks. \\\n Optimized for complex reasoning tasks requiring more intelligence\";\n\nconst Gemini1_5Pro002Schema = ChatModelSchema(GoogleChatModelRoles, GoogleChatModelModalitiesEnum).parse({\n name: Gemini1_5Pro002Literal,\n description: Gemini1_5Pro002Description,\n maxInputTokens: 2000000,\n maxOutputTokens: 8192,\n roles: GoogleChatModelRolesMap,\n modalities: GoogleChatModelModalities,\n config: {\n def: GoogleChatModelConfigs.c1(2.0, 1.0, 8192, 4, 0.95, 40).def,\n schema: GoogleChatModelConfigs.c1(2.0, 1.0, 8192, 4, 0.95, 40).schema,\n },\n});\n\nconst Gemini1_5Pro002Options = BaseChatModelOptions;\ntype Gemini1_5Pro002OptionsType = z.infer<typeof Gemini1_5Pro002Options>;\n\nclass Gemini1_5Pro002 extends BaseChatModel {\n constructor(options: Gemini1_5Pro002OptionsType) {\n super(Gemini1_5Pro002Schema, options);\n }\n}\n\nexport { \n Gemini1_5Pro002, \n Gemini1_5Pro002Options, \n Gemini1_5Pro002Schema, \n Gemini1_5Pro002Literal, \n type Gemini1_5Pro002OptionsType \n};\n","import { z } from \"zod\";\n\nimport { ChatModelSchema } from \"@adaline/provider\";\n\nimport { GoogleChatModelConfigs } from \"../../configs\";\nimport { BaseChatModel, BaseChatModelOptions } from \"./base-chat-model.google\";\nimport {\n GoogleChatModelModalities,\n GoogleChatModelModalitiesEnum,\n GoogleChatModelRoles,\n GoogleChatModelRolesMap,\n} from \"./types\";\n\nconst Gemini1_5ProLatestLiteral = \"gemini-1.5-pro-latest\";\nconst Gemini1_5ProLatestDescription =\n \"Google's best performing multimodal model with features for a wide variety of reasoning tasks. \\\n Optimized for complex reasoning tasks requiring more intelligence\";\n\nconst Gemini1_5ProLatestSchema = ChatModelSchema(GoogleChatModelRoles, GoogleChatModelModalitiesEnum).parse({\n name: Gemini1_5ProLatestLiteral,\n description: Gemini1_5ProLatestDescription,\n maxInputTokens: 2000000,\n maxOutputTokens: 8192,\n roles: GoogleChatModelRolesMap,\n modalities: GoogleChatModelModalities,\n config: {\n def: GoogleChatModelConfigs.c1(2.0, 1.0, 8192, 4, 0.95, 64).def,\n schema: GoogleChatModelConfigs.c1(2.0, 1.0, 8192, 4, 0.95, 64).schema,\n },\n});\n\nconst Gemini1_5ProLatestOptions = BaseChatModelOptions;\ntype Gemini1_5ProLatestOptionsType = z.infer<typeof Gemini1_5ProLatestOptions>;\n\nclass Gemini1_5ProLatest extends BaseChatModel {\n constructor(options: Gemini1_5ProLatestOptionsType) {\n super(Gemini1_5ProLatestSchema, options);\n }\n}\n\nexport { \n Gemini1_5ProLatest, \n Gemini1_5ProLatestOptions, \n Gemini1_5ProLatestSchema, \n Gemini1_5ProLatestLiteral, \n type Gemini1_5ProLatestOptionsType \n};\n","import { z } from \"zod\";\n\nimport { EmbeddingModelSchemaType } from \"@adaline/provider\";\nimport { EmbeddingTextModalityLiteral } from \"@adaline/types\";\n\nconst GoogleEmbeddingModelModalities: EmbeddingModelSchemaType[\"modalities\"] = [\n EmbeddingTextModalityLiteral,\n];\n\nconst GoogleEmbeddingModelModalitiesEnum = z.enum([\n EmbeddingTextModalityLiteral,\n]);\n\nexport { GoogleEmbeddingModelModalitiesEnum, GoogleEmbeddingModelModalities };\n","import { z } from \"zod\";\n\nconst GoogleGetEmbeddingsResponse = z.object({\n embeddings: z.array(z.object({\n values: z.array(z.number()),\n })),\n});\ntype GoogleGetEmbeddingsResponseType = z.infer<typeof GoogleGetEmbeddingsResponse>;\n\nexport { GoogleGetEmbeddingsResponse, type GoogleGetEmbeddingsResponseType };","import { z } from \"zod\";\n\nconst GoogleEmbeddingRequestInput = z.object({\n model: z.string().min(1),\n content: z.object({ \n parts: z.array(z.object({ \n text: z.string().min(1) \n })).min(1),\n })\n});\ntype GoogleEmbeddingRequestInputType = z.infer<typeof GoogleEmbeddingRequestInput>;\n\nconst GoogleEmbeddingRequest = z.object({\n model: z.string().min(1).optional(),\n requests: z.array(GoogleEmbeddingRequestInput).min(1),\n outputDimensionality: z.number().int().min(1).optional(),\n});\ntype GoogleEmbeddingRequestType = z.infer<typeof GoogleEmbeddingRequest>;\n\nexport {\n GoogleEmbeddingRequest,\n GoogleEmbeddingRequestInput,\n type GoogleEmbeddingRequestType,\n type GoogleEmbeddingRequestInputType,\n};","import { z } from \"zod\";\n\nimport { \n Config,\n ConfigType, \n FloatEmbeddingType,\n EmbeddingRequests,\n EmbeddingRequestsType,\n EmbeddingResponseType,\n FloatEmbeddingLiteral,\n EmbeddingTextModalityLiteral,\n EmbeddingTokenModalityLiteral,\n} from \"@adaline/types\";\n\nimport { \n EmbeddingModelV1,\n EmbeddingModelSchemaType,\n HeadersType, \n ParamsType, \n UrlType, \n ModelResponseError,\n InvalidConfigError,\n InvalidModelRequestError,\n InvalidEmbeddingRequestsError, \n urlWithoutTrailingSlash,\n removeUndefinedEntries,\n} from \"@adaline/provider\";\n\nimport { \n GoogleGetEmbeddingsResponse, \n} from \"./types\";\n\nimport { \n GoogleEmbeddingRequest,\n GoogleEmbeddingRequestInputType\n} from \"./types\";\n\nconst BaseEmbeddingModelOptions = z.object({\n apiKey: z.string(),\n baseUrl: z.string().url(),\n getEmbeddingsUrl: z.string().url().optional(),\n});\ntype BaseEmbeddingModelOptionsType = z.infer<typeof BaseEmbeddingModelOptions>;\n\nclass BaseEmbeddingModel implements EmbeddingModelV1<EmbeddingModelSchemaType> {\n readonly version = \"v1\" as const;\n modelSchema: EmbeddingModelSchemaType;\n\n private readonly apiKey: string;\n private readonly baseUrl: string;\n private readonly getEmbeddingsUrl: string;\n\n constructor(modelSchema: EmbeddingModelSchemaType, options: BaseEmbeddingModelOptionsType) {\n const parsedOptions = BaseEmbeddingModelOptions.parse(options);\n this.modelSchema = modelSchema;\n this.apiKey = parsedOptions.apiKey;\n this.baseUrl = urlWithoutTrailingSlash(parsedOptions.baseUrl);\n this.getEmbeddingsUrl = urlWithoutTrailingSlash(\n parsedOptions.getEmbeddingsUrl || \n `${this.baseUrl}/models/${this.modelSchema.name}:batchEmbedContents?key=${this.apiKey}`\n );\n };\n\n getDefaultBaseUrl(): UrlType {\n return this.baseUrl;\n };\n\n getDefaultHeaders(): HeadersType {\n return {\n \"Content-Type\": \"application/json\",\n source: \"adaline.ai\",\n };\n };\n\n getDefaultParams(): ParamsType {\n return {\n model: this.modelSchema.name,\n };\n };\n\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n getRetryDelay(responseHeaders: HeadersType): { shouldRetry: boolean, delayMs: number } {\n return { shouldRetry: false, delayMs: 0 };\n };\n\n // TODO: unused method, not tested\n getTokenCount(requests: EmbeddingRequestsType): number {\n return requests.requests.reduce((acc, request) => acc + request.length, 0);\n };\n\n transformModelRequest(request: any): { \n modelName: string | undefined, \n config: ConfigType, \n embeddingRequests: EmbeddingRequestsType, \n } {\n const safeRequest = GoogleEmbeddingRequest.safeParse(request);\n if (!safeRequest.success) {\n throw new InvalidModelRequestError({ info: \"Invalid model request\", cause: safeRequest.error });\n }\n\n const parsedRequest = safeRequest.data;\n\n const modelName = parsedRequest.model;\n\n const _config = {\n outputDimensionality: parsedRequest.outputDimensionality,\n };\n const config = Config().parse(removeUndefinedEntries(_config));\n\n const embeddingRequests: EmbeddingRequestsType = {\n modality: EmbeddingTextModalityLiteral,\n requests: parsedRequest.requests.reduce((acc, request) => {\n acc.push(...request.content.parts.map((p) => p.text))\n return acc;\n }, [] as string[])\n }\n\n return {\n modelName,\n config,\n embeddingRequests,\n };\n };\n\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n transformConfig(config: ConfigType, requests?: EmbeddingRequestsType): ParamsType {\n const _parsedConfig = this.modelSchema.config.schema.safeParse(config);\n if (!_parsedConfig.success) {\n throw new InvalidConfigError({ \n info: `Invalid config for model : '${this.modelSchema.name}'`, \n cause: _parsedConfig.error \n });\n }\n\n const parsedConfig = _parsedConfig.data as ConfigType;\n Object.keys(parsedConfig as ConfigType).forEach((key) => {\n if (!this.modelSchema.config.def[key]) {\n throw new InvalidConfigError({ \n info: `Invalid config for model : '${this.modelSchema.name}'`, \n cause: new Error(`Invalid config key : '${key}', \n available keys : [${Object.keys(this.modelSchema.config.def).join(\", \")}]`) \n });\n }\n });\n\n const transformedConfig = Object.keys(parsedConfig).reduce(\n (acc, key) => {\n const def = this.modelSchema.config.def[key];\n const paramKey = def.param;\n const paramValue = parsedConfig[key];\n acc[paramKey] = paramValue;\n return acc;\n },\n {} as ParamsType\n );\n\n return transformedConfig;\n };\n\n transformEmbeddingRequests(requests: EmbeddingRequestsType): ParamsType {\n const _parsedRequests = EmbeddingRequests().safeParse(requests);\n if (!_parsedRequests.success) {\n throw new InvalidEmbeddingRequestsError({ info: \"Invalid embedding requests\", cause: _parsedRequests.error });\n }\n\n if (requests.modality !== EmbeddingTextModalityLiteral) {\n throw new InvalidEmbeddingRequestsError({\n info: `Invalid embedding requests for model : '${this.modelSchema.name}'`,\n cause: new Error(`Only '${EmbeddingTextModalityLiteral}' modality is supported for model : '${this.modelSchema.name}'`)\n });\n }\n\n const _requests: GoogleEmbeddingRequestInputType[] = _parsedRequests.data.requests.map((request) => {\n return {\n model: `models/${this.modelSchema.name}`,\n content: { parts: [{ text: request as string }] },\n }\n });\n\n return {\n requests: _requests,\n }\n };\n\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n async getGetEmbeddingsUrl(config?: ConfigType, requests?: EmbeddingRequestsType): Promise<UrlType> {\n return new Promise((resolve) => {\n resolve(this.getEmbeddingsUrl);\n });\n };\n\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n async getGetEmbeddingsHeaders(config?: ConfigType, requests?: EmbeddingRequestsType): Promise<HeadersType> {\n return new Promise((resolve) => {\n resolve(this.getDefaultHeaders());\n });\n };\n\n async getGetEmbeddingsData(config: ConfigType, requests: EmbeddingRequestsType): Promise<ParamsType> {\n return new Promise((resolve) => {\n const _config = this.transformConfig(config);\n const _requests = this.transformEmbeddingRequests(requests);\n\n if (requests.requests.length === 0) {\n throw new InvalidEmbeddingRequestsError({\n info: `Invalid embedding requests for model : '${this.modelSchema.name}'`,\n cause: new Error(\"requests cannot be empty\")\n });\n }\n\n if (_config.outputDimensionality) {\n (_requests as any).requests.forEach((request: any) => {\n request.outputDimensionality = _config.outputDimensionality;\n });\n delete _config.outputDimensionality;\n }\n\n resolve({\n ...this.getDefaultParams(),\n ..._config,\n ..._requests,\n });\n });\n };\n\n transformGetEmbeddingsResponse(response: any): EmbeddingResponseType {\n const safe = GoogleGetEmbeddingsResponse.safeParse(response);\n if (safe.success) {\n const parsedResponse = safe.data;\n const embeddings = parsedResponse.embeddings.map((embedding, index) => {\n return {\n index,\n embedding: embedding.values,\n } as FloatEmbeddingType;\n });\n\n return {\n encodingFormat: FloatEmbeddingLiteral,\n embeddings: embeddings,\n } as EmbeddingResponseType;\n }\n\n throw new ModelResponseError({ info: \"Invalid response from model\", cause: safe.error });\n };\n}\n\nexport { \n BaseEmbeddingModel, \n BaseEmbeddingModelOptions,\n type BaseEmbeddingModelOptionsType \n};","import { z } from \"zod\";\n\nimport { EmbeddingModelSchema } from \"@adaline/provider\";\n\nimport { GoogleEmbeddingModelConfigs } from \"../../configs\";\nimport { BaseEmbeddingModel, BaseEmbeddingModelOptions } from \"./base-embedding-model.google\";\nimport { GoogleEmbeddingModelModalities, GoogleEmbeddingModelModalitiesEnum } from \"./types\";\n\nconst Text_Embedding_001Literal = \"text-embedding-001\";\nconst Text_Embedding_001_Description = \"text-embedding-001\";\n\nconst Text_Embedding_001Schema = EmbeddingModelSchema(GoogleEmbeddingModelModalitiesEnum).parse({\n name: Text_Embedding_001Literal,\n description: Text_Embedding_001_Description,\n modalities: GoogleEmbeddingModelModalities,\n maxInputTokens: 2048,\n maxOutputTokens: 768,\n config: {\n def: GoogleEmbeddingModelConfigs.base(768).def,\n schema: GoogleEmbeddingModelConfigs.base(768).schema,\n },\n});\n\nconst Text_Embedding_001Options = BaseEmbeddingModelOptions;\ntype Text_Embedding_001OptionsType = z.infer<typeof Text_Embedding_001Options>;\n\nclass Text_Embedding_001 extends BaseEmbeddingModel {\n constructor(options: Text_Embedding_001OptionsType) {\n super(Text_Embedding_001Schema, options);\n }\n}\n\nexport { \n Text_Embedding_001, \n Text_Embedding_001Options, \n Text_Embedding_001Schema, \n Text_Embedding_001Literal, \n type Text_Embedding_001OptionsType \n};","import { z } from \"zod\";\n\nimport { EmbeddingModelSchema } from \"@adaline/provider\";\n\nimport { GoogleEmbeddingModelConfigs } from \"../../configs\";\nimport { BaseEmbeddingModel, BaseEmbeddingModelOptions } from \"./base-embedding-model.google\";\nimport { GoogleEmbeddingModelModalities, GoogleEmbeddingModelModalitiesEnum } from \"./types\";\n\nconst Text_Embedding_004Literal = \"text-embedding-004\";\nconst Text_Embedding_004_Description = \"text-embedding-004\";\n\nconst Text_Embedding_004Schema = EmbeddingModelSchema(GoogleEmbeddingModelModalitiesEnum).parse({\n name: Text_Embedding_004Literal,\n description: Text_Embedding_004_Description,\n modalities: GoogleEmbeddingModelModalities,\n maxInputTokens: 2048,\n maxOutputTokens: 768,\n config: {\n def: GoogleEmbeddingModelConfigs.base(768).def,\n schema: GoogleEmbeddingModelConfigs.base(768).schema,\n },\n});\n\nconst Text_Embedding_004Options = BaseEmbeddingModelOptions;\ntype Text_Embedding_004OptionsType = z.infer<typeof Text_Embedding_004Options>;\n\nclass Text_Embedding_004 extends BaseEmbeddingModel {\n constructor(options: Text_Embedding_004OptionsType) {\n super(Text_Embedding_004Schema, options);\n }\n}\n\nexport { \n Text_Embedding_004, \n Text_Embedding_004Options, \n Text_Embedding_004Schema, \n Text_Embedding_004Literal, \n type Text_Embedding_004OptionsType \n};"]}
|
package/package.json
ADDED
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@adaline/google",
|
|
3
|
+
"version": "0.1.0",
|
|
4
|
+
"license": "MIT",
|
|
5
|
+
"sideEffects": false,
|
|
6
|
+
"private": false,
|
|
7
|
+
"description": "Adaline Google",
|
|
8
|
+
"keywords": [
|
|
9
|
+
"AI",
|
|
10
|
+
"Adaline",
|
|
11
|
+
"LLM",
|
|
12
|
+
"Prompt Engineering",
|
|
13
|
+
"Prompt",
|
|
14
|
+
"GenAI"
|
|
15
|
+
],
|
|
16
|
+
"author": "Adaline <support@adaline.ai>",
|
|
17
|
+
"homepage": "https://www.adaline.ai",
|
|
18
|
+
"repository": {
|
|
19
|
+
"type": "git",
|
|
20
|
+
"url": "git+https://github.com/adaline/gateway.git"
|
|
21
|
+
},
|
|
22
|
+
"bugs": {
|
|
23
|
+
"url": "https://github.com/adaline/gateway/issues"
|
|
24
|
+
},
|
|
25
|
+
"engines": {
|
|
26
|
+
"node": ">=18.0.0"
|
|
27
|
+
},
|
|
28
|
+
"publishConfig": {
|
|
29
|
+
"access": "public"
|
|
30
|
+
},
|
|
31
|
+
"main": "./dist/index.js",
|
|
32
|
+
"module": "./dist/index.mjs",
|
|
33
|
+
"types": "./dist/index.d.ts",
|
|
34
|
+
"exports": {
|
|
35
|
+
"./package.json": "./package.json",
|
|
36
|
+
".": {
|
|
37
|
+
"types": "./dist/index.d.ts",
|
|
38
|
+
"import": "./dist/index.mjs",
|
|
39
|
+
"require": "./dist/index.js"
|
|
40
|
+
}
|
|
41
|
+
},
|
|
42
|
+
"files": [
|
|
43
|
+
"dist/**/*",
|
|
44
|
+
"LICENSE",
|
|
45
|
+
"README.md"
|
|
46
|
+
],
|
|
47
|
+
"dependencies": {
|
|
48
|
+
"zod": "^3.23.8",
|
|
49
|
+
"@adaline/provider": "0.11.0",
|
|
50
|
+
"@adaline/types": "0.9.0"
|
|
51
|
+
},
|
|
52
|
+
"devDependencies": {
|
|
53
|
+
"tsup": "^8.2.2",
|
|
54
|
+
"tsup-node": "^0.1.0",
|
|
55
|
+
"@adaline/tsconfig": "0.6.0",
|
|
56
|
+
"eslint-config-adaline": "0.3.0"
|
|
57
|
+
},
|
|
58
|
+
"scripts": {
|
|
59
|
+
"clean": "rimraf node_modules .turbo dist",
|
|
60
|
+
"build": "tsup",
|
|
61
|
+
"lint": "eslint . --ext js,jsx,ts,tsx",
|
|
62
|
+
"format": "prettier --write .",
|
|
63
|
+
"post": "npm publish --access public",
|
|
64
|
+
"test": "jest",
|
|
65
|
+
"test:watch": "jest --watch"
|
|
66
|
+
}
|
|
67
|
+
}
|