@adaline/google 0.9.0 → 0.11.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.d.mts CHANGED
@@ -1056,20 +1056,20 @@ declare const GoogleStreamChatResponse: z.ZodObject<{
1056
1056
  }[] | undefined;
1057
1057
  }>>;
1058
1058
  usageMetadata: z.ZodOptional<z.ZodObject<{
1059
- promptTokenCount: z.ZodNumber;
1059
+ promptTokenCount: z.ZodOptional<z.ZodNumber>;
1060
1060
  cachedContentTokenCount: z.ZodOptional<z.ZodNumber>;
1061
- candidatesTokenCount: z.ZodNumber;
1062
- totalTokenCount: z.ZodNumber;
1061
+ candidatesTokenCount: z.ZodOptional<z.ZodNumber>;
1062
+ totalTokenCount: z.ZodOptional<z.ZodNumber>;
1063
1063
  }, "strip", z.ZodTypeAny, {
1064
- promptTokenCount: number;
1065
- candidatesTokenCount: number;
1066
- totalTokenCount: number;
1064
+ promptTokenCount?: number | undefined;
1067
1065
  cachedContentTokenCount?: number | undefined;
1066
+ candidatesTokenCount?: number | undefined;
1067
+ totalTokenCount?: number | undefined;
1068
1068
  }, {
1069
- promptTokenCount: number;
1070
- candidatesTokenCount: number;
1071
- totalTokenCount: number;
1069
+ promptTokenCount?: number | undefined;
1072
1070
  cachedContentTokenCount?: number | undefined;
1071
+ candidatesTokenCount?: number | undefined;
1072
+ totalTokenCount?: number | undefined;
1073
1073
  }>>;
1074
1074
  }, "strip", z.ZodTypeAny, {
1075
1075
  candidates: {
@@ -1099,10 +1099,10 @@ declare const GoogleStreamChatResponse: z.ZodObject<{
1099
1099
  }[] | undefined;
1100
1100
  } | undefined;
1101
1101
  usageMetadata?: {
1102
- promptTokenCount: number;
1103
- candidatesTokenCount: number;
1104
- totalTokenCount: number;
1102
+ promptTokenCount?: number | undefined;
1105
1103
  cachedContentTokenCount?: number | undefined;
1104
+ candidatesTokenCount?: number | undefined;
1105
+ totalTokenCount?: number | undefined;
1106
1106
  } | undefined;
1107
1107
  }, {
1108
1108
  candidates: {
@@ -1132,10 +1132,10 @@ declare const GoogleStreamChatResponse: z.ZodObject<{
1132
1132
  }[] | undefined;
1133
1133
  } | undefined;
1134
1134
  usageMetadata?: {
1135
- promptTokenCount: number;
1136
- candidatesTokenCount: number;
1137
- totalTokenCount: number;
1135
+ promptTokenCount?: number | undefined;
1138
1136
  cachedContentTokenCount?: number | undefined;
1137
+ candidatesTokenCount?: number | undefined;
1138
+ totalTokenCount?: number | undefined;
1139
1139
  } | undefined;
1140
1140
  }>;
1141
1141
  type GoogleStreamChatResponseType = z.infer<typeof GoogleStreamChatResponse>;
package/dist/index.d.ts CHANGED
@@ -1056,20 +1056,20 @@ declare const GoogleStreamChatResponse: z.ZodObject<{
1056
1056
  }[] | undefined;
1057
1057
  }>>;
1058
1058
  usageMetadata: z.ZodOptional<z.ZodObject<{
1059
- promptTokenCount: z.ZodNumber;
1059
+ promptTokenCount: z.ZodOptional<z.ZodNumber>;
1060
1060
  cachedContentTokenCount: z.ZodOptional<z.ZodNumber>;
1061
- candidatesTokenCount: z.ZodNumber;
1062
- totalTokenCount: z.ZodNumber;
1061
+ candidatesTokenCount: z.ZodOptional<z.ZodNumber>;
1062
+ totalTokenCount: z.ZodOptional<z.ZodNumber>;
1063
1063
  }, "strip", z.ZodTypeAny, {
1064
- promptTokenCount: number;
1065
- candidatesTokenCount: number;
1066
- totalTokenCount: number;
1064
+ promptTokenCount?: number | undefined;
1067
1065
  cachedContentTokenCount?: number | undefined;
1066
+ candidatesTokenCount?: number | undefined;
1067
+ totalTokenCount?: number | undefined;
1068
1068
  }, {
1069
- promptTokenCount: number;
1070
- candidatesTokenCount: number;
1071
- totalTokenCount: number;
1069
+ promptTokenCount?: number | undefined;
1072
1070
  cachedContentTokenCount?: number | undefined;
1071
+ candidatesTokenCount?: number | undefined;
1072
+ totalTokenCount?: number | undefined;
1073
1073
  }>>;
1074
1074
  }, "strip", z.ZodTypeAny, {
1075
1075
  candidates: {
@@ -1099,10 +1099,10 @@ declare const GoogleStreamChatResponse: z.ZodObject<{
1099
1099
  }[] | undefined;
1100
1100
  } | undefined;
1101
1101
  usageMetadata?: {
1102
- promptTokenCount: number;
1103
- candidatesTokenCount: number;
1104
- totalTokenCount: number;
1102
+ promptTokenCount?: number | undefined;
1105
1103
  cachedContentTokenCount?: number | undefined;
1104
+ candidatesTokenCount?: number | undefined;
1105
+ totalTokenCount?: number | undefined;
1106
1106
  } | undefined;
1107
1107
  }, {
1108
1108
  candidates: {
@@ -1132,10 +1132,10 @@ declare const GoogleStreamChatResponse: z.ZodObject<{
1132
1132
  }[] | undefined;
1133
1133
  } | undefined;
1134
1134
  usageMetadata?: {
1135
- promptTokenCount: number;
1136
- candidatesTokenCount: number;
1137
- totalTokenCount: number;
1135
+ promptTokenCount?: number | undefined;
1138
1136
  cachedContentTokenCount?: number | undefined;
1137
+ candidatesTokenCount?: number | undefined;
1138
+ totalTokenCount?: number | undefined;
1139
1139
  } | undefined;
1140
1140
  }>;
1141
1141
  type GoogleStreamChatResponseType = z.infer<typeof GoogleStreamChatResponse>;
package/dist/index.js CHANGED
@@ -4,14 +4,14 @@ var zod = require('zod');
4
4
  var provider = require('@adaline/provider');
5
5
  var types = require('@adaline/types');
6
6
 
7
- var ct=Object.defineProperty;var Go=Object.getOwnPropertySymbols;var ht=Object.prototype.hasOwnProperty,ft=Object.prototype.propertyIsEnumerable;var gt=(i,e)=>(e=Symbol[i])?e:Symbol.for("Symbol."+i);var bo=(i,e,o)=>e in i?ct(i,e,{enumerable:!0,configurable:!0,writable:!0,value:o}):i[e]=o,_=(i,e)=>{for(var o in e||(e={}))ht.call(e,o)&&bo(i,o,e[o]);if(Go)for(var o of Go(e))ft.call(e,o)&&bo(i,o,e[o]);return i};var x=(i,e,o)=>new Promise((t,s)=>{var l=r=>{try{m(o.next(r));}catch(p){s(p);}},n=r=>{try{m(o.throw(r));}catch(p){s(p);}},m=r=>r.done?t(r.value):Promise.resolve(r.value).then(l,n);m((o=o.apply(i,e)).next());}),ut=function(i,e){this[0]=i,this[1]=e;},_o=(i,e,o)=>{var t=(n,m,r,p)=>{try{var h=o[n](m),C=(m=h.value)instanceof ut,b=h.done;Promise.resolve(C?m[0]:m).then(y=>C?t(n==="return"?n:"next",m[1]?{done:y.done,value:y.value}:y,r,p):r({value:y,done:b})).catch(y=>t("throw",y,r,p));}catch(y){p(y);}},s=n=>l[n]=m=>new Promise((r,p)=>t(n,m,r,p)),l={};return o=o.apply(i,e),l[gt("asyncIterator")]=()=>l,s("next"),s("throw"),s("return"),l};var v=(i,e)=>provider.RangeConfigItem({param:"temperature",title:provider.CHAT_CONFIG.TEMPERATURE.title,description:provider.CHAT_CONFIG.TEMPERATURE.description,min:0,max:i,step:.01,default:e}),N=i=>provider.RangeConfigItem({param:"maxOutputTokens",title:provider.CHAT_CONFIG.MAX_TOKENS.title,description:provider.CHAT_CONFIG.MAX_TOKENS.description,min:0,max:i,step:1,default:0}),F=i=>provider.MultiStringConfigItem({param:"stopSequences",title:provider.CHAT_CONFIG.STOP(i).title,description:provider.CHAT_CONFIG.STOP(i).description,max:i}),q=i=>provider.RangeConfigItem({param:"topP",title:provider.CHAT_CONFIG.TOP_P.title,description:provider.CHAT_CONFIG.TOP_P.description,min:0,max:1,step:.01,default:i}),we=i=>provider.RangeConfigItem({param:"topK",title:provider.CHAT_CONFIG.TOP_K.title,description:provider.CHAT_CONFIG.TOP_K.description,min:1,max:40,step:1,default:i}),Ie=provider.RangeConfigItem({param:"frequencyPenalty",title:provider.CHAT_CONFIG.FREQUENCY_PENALTY.title,description:provider.CHAT_CONFIG.FREQUENCY_PENALTY.description,min:-2,max:2,step:.01,default:0}),Le=provider.RangeConfigItem({param:"presencePenalty",title:provider.CHAT_CONFIG.PRESENCE_PENALTY.title,description:provider.CHAT_CONFIG.PRESENCE_PENALTY.description,min:-2,max:2,step:.01,default:0}),ke=provider.RangeConfigItem({param:"seed",title:provider.CHAT_CONFIG.SEED.title,description:provider.CHAT_CONFIG.SEED.description,min:0,max:1e6,step:1,default:0}),$=provider.SelectStringConfigItem({param:"toolChoice",title:"Tool choice",description:"Controls which (if any) tool is called by the model. 'none' means the model will not call a function. 'auto' means the model can pick between generating a message or calling a tool.",default:"auto",choices:["auto","any","none"]}),z=provider.ObjectSchemaConfigItem({param:"safetySettings",title:"Safety settings",description:"The safety rating contains the category of harm and the harm probability level in that category for a piece of content.",objectSchema:zod.z.array(zod.z.object({threshold:zod.z.enum(["HARM_BLOCK_THRESHOLD_UNSPECIFIED","BLOCK_LOW_AND_ABOVE","BLOCK_MEDIUM_AND_ABOVE","BLOCK_ONLY_HIGH","BLOCK_NONE","OFF"]),category:zod.z.enum(["HARM_CATEGORY_UNSPECIFIED","HARM_CATEGORY_HARASSMENT","HARM_CATEGORY_HATE_SPEECH","HARM_CATEGORY_SEXUALLY_EXPLICIT","HARM_CATEGORY_DANGEROUS_CONTENT","HARM_CATEGORY_CIVIC_INTEGRITY"])}))});var Eo=(i,e,o,t,s)=>zod.z.object({temperature:v(i,e).schema,maxTokens:N(o).schema,stop:F(t).schema,topP:q(s).schema,toolChoice:$.schema,safetySettings:z.schema}),Oo=(i,e,o,t,s)=>({temperature:v(i,e).def,maxTokens:N(o).def,stop:F(t).def,topP:q(s).def,toolChoice:$.def,safetySettings:z.def});var Po=(i,e,o,t,s,l)=>zod.z.object({temperature:v(i,e).schema,maxTokens:N(o).schema,stop:F(t).schema,topP:q(s).schema,topK:we(l).schema,frequencyPenalty:Ie.schema,presencePenalty:Le.schema,seed:ke.schema.transform(n=>n===0?void 0:n),toolChoice:$.schema,safetySettings:z.schema}),So=(i,e,o,t,s,l)=>({temperature:v(i,e).def,maxTokens:N(o).def,stop:F(t).def,topP:q(s).def,topK:we(l).def,frequencyPenalty:Ie.def,presencePenalty:Le.def,seed:ke.def,toolChoice:$.def,safetySettings:z.def});var ve=i=>provider.RangeConfigItem({param:"outputDimensionality",title:provider.EMBEDDING_CONFIG.DIMENSIONS.title,description:provider.EMBEDDING_CONFIG.DIMENSIONS.description,min:1,max:i,step:1,default:i});var Ro=i=>zod.z.object({dimensions:ve(i).schema}),wo=i=>({dimensions:ve(i).def});var c={base:(i,e,o,t,s)=>({def:Oo(i,e,o,t,s),schema:Eo(i,e,o,t,s)}),c1:(i,e,o,t,s,l)=>({def:So(i,e,o,t,s,l),schema:Po(i,e,o,t,s,l)})},B={base:i=>({def:wo(i),schema:Ro(i)})};var Et="google",Y=class{constructor(){this.version="v1";this.name=Et;this.chatModelFactories={[mo]:{model:ye,modelOptions:Ho,modelSchema:po},[ro]:{model:ue,modelOptions:Vo,modelSchema:lo},[Ue]:{model:re,modelOptions:Fo,modelSchema:Ae},[Ve]:{model:le,modelOptions:qo,modelSchema:He},[Ke]:{model:me,modelOptions:$o,modelSchema:Ye},[Je]:{model:de,modelOptions:zo,modelSchema:We},[Xe]:{model:pe,modelOptions:Bo,modelSchema:Qe},[Ze]:{model:ce,modelOptions:jo,modelSchema:eo},[oo]:{model:he,modelOptions:Do,modelSchema:to},[so]:{model:fe,modelOptions:Uo,modelSchema:no},[je]:{model:ae,modelOptions:No,modelSchema:De},[ze]:{model:ie,modelOptions:vo,modelSchema:Be},[qe]:{model:ne,modelOptions:ko,modelSchema:$e},[Ne]:{model:se,modelOptions:Lo,modelSchema:Fe},[io]:{model:ge,modelOptions:Ao,modelSchema:ao}};this.embeddingModelFactories={[co]:{model:Ce,modelOptions:Ko,modelSchema:ho},[fo]:{model:Te,modelOptions:Yo,modelSchema:go}};}chatModelLiterals(){return Object.keys(this.chatModelFactories)}chatModelSchemas(){return Object.keys(this.chatModelFactories).reduce((e,o)=>(e[o]=this.chatModelFactories[o].modelSchema,e),{})}chatModel(e){let o=e.modelName;if(!(o in this.chatModelFactories))throw new provider.ProviderError({info:`Google chat model: ${o} not found`,cause:new Error(`Google chat model: ${o} not found, available chat models:
7
+ var ct=Object.defineProperty;var Go=Object.getOwnPropertySymbols;var ht=Object.prototype.hasOwnProperty,ft=Object.prototype.propertyIsEnumerable;var gt=(i,e)=>(e=Symbol[i])?e:Symbol.for("Symbol."+i);var bo=(i,e,o)=>e in i?ct(i,e,{enumerable:!0,configurable:!0,writable:!0,value:o}):i[e]=o,_=(i,e)=>{for(var o in e||(e={}))ht.call(e,o)&&bo(i,o,e[o]);if(Go)for(var o of Go(e))ft.call(e,o)&&bo(i,o,e[o]);return i};var x=(i,e,o)=>new Promise((t,s)=>{var l=r=>{try{m(o.next(r));}catch(p){s(p);}},n=r=>{try{m(o.throw(r));}catch(p){s(p);}},m=r=>r.done?t(r.value):Promise.resolve(r.value).then(l,n);m((o=o.apply(i,e)).next());}),ut=function(i,e){this[0]=i,this[1]=e;},_o=(i,e,o)=>{var t=(n,m,r,p)=>{try{var h=o[n](m),C=(m=h.value)instanceof ut,b=h.done;Promise.resolve(C?m[0]:m).then(y=>C?t(n==="return"?n:"next",m[1]?{done:y.done,value:y.value}:y,r,p):r({value:y,done:b})).catch(y=>t("throw",y,r,p));}catch(y){p(y);}},s=n=>l[n]=m=>new Promise((r,p)=>t(n,m,r,p)),l={};return o=o.apply(i,e),l[gt("asyncIterator")]=()=>l,s("next"),s("throw"),s("return"),l};var v=(i,e)=>provider.RangeConfigItem({param:"temperature",title:provider.CHAT_CONFIG.TEMPERATURE.title,description:provider.CHAT_CONFIG.TEMPERATURE.description,min:0,max:i,step:.01,default:e}),N=i=>provider.RangeConfigItem({param:"maxOutputTokens",title:provider.CHAT_CONFIG.MAX_TOKENS.title,description:provider.CHAT_CONFIG.MAX_TOKENS.description,min:0,max:i,step:1,default:0}),F=i=>provider.MultiStringConfigItem({param:"stopSequences",title:provider.CHAT_CONFIG.STOP(i).title,description:provider.CHAT_CONFIG.STOP(i).description,max:i}),q=i=>provider.RangeConfigItem({param:"topP",title:provider.CHAT_CONFIG.TOP_P.title,description:provider.CHAT_CONFIG.TOP_P.description,min:0,max:1,step:.01,default:i}),we=i=>provider.RangeConfigItem({param:"topK",title:provider.CHAT_CONFIG.TOP_K.title,description:provider.CHAT_CONFIG.TOP_K.description,min:1,max:40,step:1,default:i}),ke=provider.RangeConfigItem({param:"frequencyPenalty",title:provider.CHAT_CONFIG.FREQUENCY_PENALTY.title,description:provider.CHAT_CONFIG.FREQUENCY_PENALTY.description,min:-2,max:2,step:.01,default:0}),Ie=provider.RangeConfigItem({param:"presencePenalty",title:provider.CHAT_CONFIG.PRESENCE_PENALTY.title,description:provider.CHAT_CONFIG.PRESENCE_PENALTY.description,min:-2,max:2,step:.01,default:0}),Le=provider.RangeConfigItem({param:"seed",title:provider.CHAT_CONFIG.SEED.title,description:provider.CHAT_CONFIG.SEED.description,min:0,max:1e6,step:1,default:0}),$=provider.SelectStringConfigItem({param:"toolChoice",title:"Tool choice",description:"Controls which (if any) tool is called by the model. 'none' means the model will not call a function. 'auto' means the model can pick between generating a message or calling a tool.",default:"auto",choices:["auto","any","none"]}),z=provider.ObjectSchemaConfigItem({param:"safetySettings",title:"Safety settings",description:"The safety rating contains the category of harm and the harm probability level in that category for a piece of content.",objectSchema:zod.z.array(zod.z.object({threshold:zod.z.enum(["HARM_BLOCK_THRESHOLD_UNSPECIFIED","BLOCK_LOW_AND_ABOVE","BLOCK_MEDIUM_AND_ABOVE","BLOCK_ONLY_HIGH","BLOCK_NONE","OFF"]),category:zod.z.enum(["HARM_CATEGORY_UNSPECIFIED","HARM_CATEGORY_HARASSMENT","HARM_CATEGORY_HATE_SPEECH","HARM_CATEGORY_SEXUALLY_EXPLICIT","HARM_CATEGORY_DANGEROUS_CONTENT","HARM_CATEGORY_CIVIC_INTEGRITY"])}))});var Eo=(i,e,o,t,s)=>zod.z.object({temperature:v(i,e).schema,maxTokens:N(o).schema,stop:F(t).schema,topP:q(s).schema,toolChoice:$.schema,safetySettings:z.schema}),Oo=(i,e,o,t,s)=>({temperature:v(i,e).def,maxTokens:N(o).def,stop:F(t).def,topP:q(s).def,toolChoice:$.def,safetySettings:z.def});var Po=(i,e,o,t,s,l)=>zod.z.object({temperature:v(i,e).schema,maxTokens:N(o).schema,stop:F(t).schema,topP:q(s).schema,topK:we(l).schema,frequencyPenalty:ke.schema,presencePenalty:Ie.schema,seed:Le.schema.transform(n=>n===0?void 0:n),toolChoice:$.schema,safetySettings:z.schema}),So=(i,e,o,t,s,l)=>({temperature:v(i,e).def,maxTokens:N(o).def,stop:F(t).def,topP:q(s).def,topK:we(l).def,frequencyPenalty:ke.def,presencePenalty:Ie.def,seed:Le.def,toolChoice:$.def,safetySettings:z.def});var ve=i=>provider.RangeConfigItem({param:"outputDimensionality",title:provider.EMBEDDING_CONFIG.DIMENSIONS.title,description:provider.EMBEDDING_CONFIG.DIMENSIONS.description,min:1,max:i,step:1,default:i});var Ro=i=>zod.z.object({dimensions:ve(i).schema}),wo=i=>({dimensions:ve(i).def});var c={base:(i,e,o,t,s)=>({def:Oo(i,e,o,t,s),schema:Eo(i,e,o,t,s)}),c1:(i,e,o,t,s,l)=>({def:So(i,e,o,t,s,l),schema:Po(i,e,o,t,s,l)})},B={base:i=>({def:wo(i),schema:Ro(i)})};var Et="google",Y=class{constructor(){this.version="v1";this.name=Et;this.chatModelFactories={[mo]:{model:ye,modelOptions:Ho,modelSchema:po},[ro]:{model:ue,modelOptions:Vo,modelSchema:lo},[Ue]:{model:re,modelOptions:Fo,modelSchema:Ae},[Ve]:{model:le,modelOptions:qo,modelSchema:He},[Ke]:{model:me,modelOptions:$o,modelSchema:Ye},[Je]:{model:de,modelOptions:zo,modelSchema:We},[Xe]:{model:pe,modelOptions:Bo,modelSchema:Qe},[Ze]:{model:ce,modelOptions:jo,modelSchema:eo},[oo]:{model:he,modelOptions:Do,modelSchema:to},[so]:{model:fe,modelOptions:Uo,modelSchema:no},[je]:{model:ae,modelOptions:No,modelSchema:De},[ze]:{model:ie,modelOptions:vo,modelSchema:Be},[qe]:{model:ne,modelOptions:Lo,modelSchema:$e},[Ne]:{model:se,modelOptions:Io,modelSchema:Fe},[io]:{model:ge,modelOptions:Ao,modelSchema:ao}};this.embeddingModelFactories={[co]:{model:Ce,modelOptions:Ko,modelSchema:ho},[fo]:{model:Te,modelOptions:Yo,modelSchema:go}};}chatModelLiterals(){return Object.keys(this.chatModelFactories)}chatModelSchemas(){return Object.keys(this.chatModelFactories).reduce((e,o)=>(e[o]=this.chatModelFactories[o].modelSchema,e),{})}chatModel(e){let o=e.modelName;if(!(o in this.chatModelFactories))throw new provider.ProviderError({info:`Google chat model: ${o} not found`,cause:new Error(`Google chat model: ${o} not found, available chat models:
8
8
  [${this.chatModelLiterals().join(", ")}]`)});let t=this.chatModelFactories[o].model,s=this.chatModelFactories[o].modelOptions.parse(e);return new t(s)}embeddingModelLiterals(){return Object.keys(this.embeddingModelFactories)}embeddingModelSchemas(){return Object.keys(this.embeddingModelFactories).reduce((e,o)=>(e[o]=this.embeddingModelFactories[o].modelSchema,e),{})}embeddingModel(e){let o=e.modelName;if(!(o in this.embeddingModelFactories))throw new provider.ProviderError({info:`Google embedding model: ${o} not found`,cause:new Error(`Google embedding model: ${o} not found, available embedding models:
9
- [${this.embeddingModelLiterals().join(", ")}]`)});let t=this.embeddingModelFactories[o].model,s=this.embeddingModelFactories[o].modelOptions.parse(e);return new t(s)}};Y.baseUrl="https://generativelanguage.googleapis.com/v1beta";var g=zod.z.enum([types.SystemRoleLiteral,types.UserRoleLiteral,types.AssistantRoleLiteral,types.ToolRoleLiteral]),Rt="model",wt="function",u={system:types.UserRoleLiteral,user:types.UserRoleLiteral,assistant:Rt,tool:wt};var M=[types.TextModalityLiteral,types.ImageModalityLiteral,types.ToolCallModalityLiteral,types.ToolResponseModalityLiteral],G=zod.z.enum([types.TextModalityLiteral,types.ImageModalityLiteral,types.ToolCallModalityLiteral,types.ToolResponseModalityLiteral]),un=[types.TextModalityLiteral],yn=zod.z.enum([types.TextModalityLiteral]),Ee=[types.TextModalityLiteral,types.ImageModalityLiteral],Oe=zod.z.enum([types.TextModalityLiteral,types.ImageModalityLiteral]),I=[types.TextModalityLiteral,types.ToolCallModalityLiteral,types.ToolResponseModalityLiteral],L=zod.z.enum([types.TextModalityLiteral,types.ToolCallModalityLiteral,types.ToolResponseModalityLiteral]);var It=zod.z.object({text:zod.z.string()}),Lt=zod.z.object({functionCall:zod.z.object({name:zod.z.string(),args:zod.z.record(zod.z.any())})}),Jo=zod.z.object({candidates:zod.z.array(zod.z.object({content:zod.z.object({role:zod.z.string(),parts:zod.z.array(zod.z.union([It,Lt]))}).optional(),finishReason:zod.z.string(),index:zod.z.number().optional(),safetyRatings:zod.z.optional(zod.z.array(zod.z.object({category:zod.z.string(),probability:zod.z.string(),blocked:zod.z.boolean().optional()})))})),promptFeedback:zod.z.optional(zod.z.object({safetyRatings:zod.z.optional(zod.z.array(zod.z.object({category:zod.z.string(),probability:zod.z.string()})))})),usageMetadata:zod.z.object({promptTokenCount:zod.z.number(),cachedContentTokenCount:zod.z.number().optional(),candidatesTokenCount:zod.z.number().optional(),totalTokenCount:zod.z.number()}).optional()}),kt=zod.z.object({text:zod.z.string()}),vt=zod.z.object({functionCall:zod.z.object({name:zod.z.string(),args:zod.z.record(zod.z.any())})}),Wo=zod.z.object({candidates:zod.z.array(zod.z.object({content:zod.z.object({role:zod.z.string(),parts:zod.z.array(zod.z.union([kt,vt]))}).optional(),finishReason:zod.z.string().optional(),index:zod.z.number().optional(),safetyRatings:zod.z.optional(zod.z.array(zod.z.object({category:zod.z.string(),probability:zod.z.string(),blocked:zod.z.boolean().optional()})))})),promptFeedback:zod.z.optional(zod.z.object({safetyRatings:zod.z.optional(zod.z.array(zod.z.object({category:zod.z.string(),probability:zod.z.string()})))})),usageMetadata:zod.z.object({promptTokenCount:zod.z.number(),cachedContentTokenCount:zod.z.number().optional(),candidatesTokenCount:zod.z.number(),totalTokenCount:zod.z.number()}).optional()});var ot=zod.z.object({text:zod.z.string().min(1)}),Nt=zod.z.object({inline_data:zod.z.object({mime_type:zod.z.string().min(1),data:zod.z.string().base64()})}),Ft=zod.z.object({function_call:zod.z.object({name:zod.z.string().min(1),args:zod.z.record(zod.z.string().min(1))})}),qt=zod.z.object({function_response:zod.z.object({name:zod.z.string().min(1),response:zod.z.record(zod.z.string().min(1))})}),$t=zod.z.object({role:zod.z.enum(["user","model","function"]),parts:zod.z.array(zod.z.union([ot,Nt,Ft,qt]))}),Xo=zod.z.object({parts:zod.z.array(ot)}),zt=zod.z.object({name:zod.z.string().min(1),description:zod.z.string().min(1),parameters:zod.z.any()}),Qo=zod.z.object({function_calling_config:zod.z.object({mode:zod.z.enum(["ANY","AUTO","NONE"]),allowed_function_names:zod.z.array(zod.z.string()).optional()})}),Zo=zod.z.object({stopSequences:zod.z.array(zod.z.string()).optional(),maxOutputTokens:zod.z.number().optional(),temperature:zod.z.number().optional(),topP:zod.z.number().optional(),topK:zod.z.number().optional(),presencePenalty:zod.z.number().optional(),frequencyPenalty:zod.z.number().optional(),seed:zod.z.number().optional()}),et=zod.z.object({category:zod.z.enum(["HARM_CATEGORY_HARASSMENT","HARM_CATEGORY_HATE_SPEECH","HARM_CATEGORY_SEXUALLY_EXPLICIT","HARM_CATEGORY_DANGEROUS_CONTENT","HARM_CATEGORY_CIVIC_INTEGRITY"]),threshold:zod.z.enum(["HARM_BLOCK_THRESHOLD_UNSPECIFIED","BLOCK_LOW_AND_ABOVE","BLOCK_MEDIUM_AND_ABOVE","BLOCK_ONLY_HIGH","BLOCK_NONE","OFF"])}),tt=zod.z.object({model:zod.z.string().min(1).optional(),contents:zod.z.array($t),systemInstruction:Xo.optional(),system_instruction:Xo.optional(),generationConfig:Zo.optional(),generation_config:Zo.optional(),safetySettings:zod.z.array(et).optional(),safety_settings:zod.z.array(et).optional(),tools:zod.z.object({function_declarations:zod.z.array(zt)}).optional(),toolConfig:Qo.optional(),tool_config:Qo.optional()});var f=zod.z.object({modelName:zod.z.string(),apiKey:zod.z.string(),baseUrl:zod.z.string().url().optional(),completeChatUrl:zod.z.string().url().optional(),streamChatUrl:zod.z.string().url().optional()}),T=class{constructor(e,o){this.version="v1";var s;let t=f.parse(o);this.modelSchema=e,this.modelName=t.modelName,this.apiKey=t.apiKey,this.baseUrl=provider.urlWithoutTrailingSlash((s=t.baseUrl)!=null?s:Y.baseUrl),this.completeChatUrl=provider.urlWithoutTrailingSlash(t.completeChatUrl||`${this.baseUrl}/models/${this.modelName}:generateContent?key=${this.apiKey}`),this.streamChatUrl=provider.urlWithoutTrailingSlash(t.streamChatUrl||`${this.baseUrl}/models/${this.modelName}:streamGenerateContent?key=${this.apiKey}`);}getDefaultBaseUrl(){return this.baseUrl}getDefaultHeaders(){return {"Content-Type":"application/json"}}getDefaultParams(){return {}}getRetryDelay(e){return {shouldRetry:!1,delayMs:0}}getTokenCount(e){return e.reduce((o,t)=>o+t.content.map(s=>s.modality==="text"?s.value:"").join(" ").length,0)}transformModelRequest(e){let o=tt.safeParse(e);if(!o.success)throw new provider.InvalidModelRequestError({info:"Invalid model request",cause:o.error});let t=o.data,s=t.model;if(t.system_instruction&&t.systemInstruction)throw new provider.InvalidModelRequestError({info:`Invalid model request for model : '${this.modelName}'`,cause:new Error("'system_instruction' and 'systemInstruction' are not allowed at the same time")});if(t.generation_config&&t.generationConfig)throw new provider.InvalidModelRequestError({info:`Invalid model request for model : '${this.modelName}'`,cause:new Error("'generation_config' and 'generationConfig' are not allowed at the same time")});if(t.tool_config&&t.toolConfig)throw new provider.InvalidModelRequestError({info:`Invalid model request for model : '${this.modelName}'`,cause:new Error("'tool_config' and 'toolConfig' are not allowed at the same time")});let l=t.system_instruction||t.systemInstruction,n=t.generation_config||t.generationConfig,m=t.safety_settings||t.safetySettings,r=t.tool_config||t.toolConfig;if(r&&(!t.tools||t.tools.function_declarations.length===0))throw new provider.InvalidModelRequestError({info:`Invalid model request for model : '${this.modelName}'`,cause:new Error("'tools' are required when 'tool_choice' is specified")});let p={};r&&(r.function_calling_config.mode==="ANY"&&r.function_calling_config.allowed_function_names&&r.function_calling_config.allowed_function_names.length===1?p.toolChoice=r.function_calling_config.allowed_function_names[0]:p.toolChoice=r.function_calling_config.mode.toLowerCase()),p.seed=n==null?void 0:n.seed,p.maxTokens=n==null?void 0:n.maxOutputTokens,p.temperature=n==null?void 0:n.temperature,p.topP=n==null?void 0:n.topP,p.presencePenalty=n==null?void 0:n.presencePenalty,p.frequencyPenalty=n==null?void 0:n.frequencyPenalty,p.stop=n==null?void 0:n.stopSequences,p.safetySettings=m;let h=types.Config().parse(provider.removeUndefinedEntries(p)),C=[];l&&l.parts.forEach(y=>{C.push({role:types.SystemRoleLiteral,content:[{modality:types.TextModalityLiteral,value:y.text}]});}),t.contents.forEach(y=>{let V=y.role;switch(V){case"user":{let H=y.parts.map(S=>"text"in S?{modality:types.TextModalityLiteral,value:S.text}:{modality:types.ImageModalityLiteral,detail:"auto",value:{type:types.Base64ImageContentTypeLiteral,base64:S.inline_data.data,media_type:S.inline_data.mime_type.split("/")[1]}});C.push({role:V,content:H});}break;case"model":{let H=y.parts.map((S,K)=>"text"in S?{modality:types.TextModalityLiteral,value:S.text}:{modality:types.ToolCallModalityLiteral,id:K.toString(),index:K,name:S.function_call.name,arguments:JSON.stringify(S.function_call.args)});C.push({role:types.AssistantRoleLiteral,content:H});}break;case"function":{let H=y.parts.map((S,K)=>({modality:types.ToolResponseModalityLiteral,id:K.toString(),index:K,name:S.function_response.name,data:JSON.stringify(S.function_response.response)}));C.push({role:types.ToolRoleLiteral,content:H});}break;default:throw new provider.InvalidMessagesError({info:`Invalid message 'role' for model : ${this.modelName}`,cause:new Error(`role : '${y.role}' is not supported for model : ${this.modelName}`)})}});let b=[];return t.tools&&t.tools.function_declarations.forEach(y=>{b.push({type:"function",definition:{schema:{name:y.name,description:y.description,parameters:y.parameters}}});}),{modelName:s,config:h,messages:C,tools:b.length>0?b:void 0}}transformConfig(e,o,t){let s=e.toolChoice;delete e.toolChoice;let l=this.modelSchema.config.schema.safeParse(e);if(!l.success)throw new provider.InvalidConfigError({info:`Invalid config for model : '${this.modelName}'`,cause:l.error});let n=l.data;Object.keys(n).forEach(h=>{if(!(h in this.modelSchema.config.def))throw new provider.InvalidConfigError({info:`Invalid config for model : '${this.modelName}'`,cause:new Error(`Invalid config key : '${h}',
9
+ [${this.embeddingModelLiterals().join(", ")}]`)});let t=this.embeddingModelFactories[o].model,s=this.embeddingModelFactories[o].modelOptions.parse(e);return new t(s)}};Y.baseUrl="https://generativelanguage.googleapis.com/v1beta";var g=zod.z.enum([types.SystemRoleLiteral,types.UserRoleLiteral,types.AssistantRoleLiteral,types.ToolRoleLiteral]),Rt="model",wt="function",u={system:types.UserRoleLiteral,user:types.UserRoleLiteral,assistant:Rt,tool:wt};var M=[types.TextModalityLiteral,types.ImageModalityLiteral,types.ToolCallModalityLiteral,types.ToolResponseModalityLiteral],G=zod.z.enum([types.TextModalityLiteral,types.ImageModalityLiteral,types.ToolCallModalityLiteral,types.ToolResponseModalityLiteral]),un=[types.TextModalityLiteral],yn=zod.z.enum([types.TextModalityLiteral]),Ee=[types.TextModalityLiteral,types.ImageModalityLiteral],Oe=zod.z.enum([types.TextModalityLiteral,types.ImageModalityLiteral]),k=[types.TextModalityLiteral,types.ToolCallModalityLiteral,types.ToolResponseModalityLiteral],I=zod.z.enum([types.TextModalityLiteral,types.ToolCallModalityLiteral,types.ToolResponseModalityLiteral]);var kt=zod.z.object({text:zod.z.string()}),It=zod.z.object({functionCall:zod.z.object({name:zod.z.string(),args:zod.z.record(zod.z.any())})}),Jo=zod.z.object({candidates:zod.z.array(zod.z.object({content:zod.z.object({role:zod.z.string(),parts:zod.z.array(zod.z.union([kt,It]))}).optional(),finishReason:zod.z.string(),index:zod.z.number().optional(),safetyRatings:zod.z.optional(zod.z.array(zod.z.object({category:zod.z.string(),probability:zod.z.string(),blocked:zod.z.boolean().optional()})))})),promptFeedback:zod.z.optional(zod.z.object({safetyRatings:zod.z.optional(zod.z.array(zod.z.object({category:zod.z.string(),probability:zod.z.string()})))})),usageMetadata:zod.z.object({promptTokenCount:zod.z.number(),cachedContentTokenCount:zod.z.number().optional(),candidatesTokenCount:zod.z.number().optional(),totalTokenCount:zod.z.number()}).optional()}),Lt=zod.z.object({text:zod.z.string()}),vt=zod.z.object({functionCall:zod.z.object({name:zod.z.string(),args:zod.z.record(zod.z.any())})}),Wo=zod.z.object({candidates:zod.z.array(zod.z.object({content:zod.z.object({role:zod.z.string(),parts:zod.z.array(zod.z.union([Lt,vt]))}).optional(),finishReason:zod.z.string().optional(),index:zod.z.number().optional(),safetyRatings:zod.z.optional(zod.z.array(zod.z.object({category:zod.z.string(),probability:zod.z.string(),blocked:zod.z.boolean().optional()})))})),promptFeedback:zod.z.optional(zod.z.object({safetyRatings:zod.z.optional(zod.z.array(zod.z.object({category:zod.z.string(),probability:zod.z.string()})))})),usageMetadata:zod.z.object({promptTokenCount:zod.z.number().optional(),cachedContentTokenCount:zod.z.number().optional(),candidatesTokenCount:zod.z.number().optional(),totalTokenCount:zod.z.number().optional()}).optional()});var ot=zod.z.object({text:zod.z.string().min(1)}),Nt=zod.z.object({inline_data:zod.z.object({mime_type:zod.z.string().min(1),data:zod.z.string().base64()})}),Ft=zod.z.object({function_call:zod.z.object({name:zod.z.string().min(1),args:zod.z.record(zod.z.string().min(1))})}),qt=zod.z.object({function_response:zod.z.object({name:zod.z.string().min(1),response:zod.z.record(zod.z.string().min(1))})}),$t=zod.z.object({role:zod.z.enum(["user","model","function"]),parts:zod.z.array(zod.z.union([ot,Nt,Ft,qt]))}),Xo=zod.z.object({parts:zod.z.array(ot)}),zt=zod.z.object({name:zod.z.string().min(1),description:zod.z.string().min(1),parameters:zod.z.any()}),Qo=zod.z.object({function_calling_config:zod.z.object({mode:zod.z.enum(["ANY","AUTO","NONE"]),allowed_function_names:zod.z.array(zod.z.string()).optional()})}),Zo=zod.z.object({stopSequences:zod.z.array(zod.z.string()).optional(),maxOutputTokens:zod.z.number().optional(),temperature:zod.z.number().optional(),topP:zod.z.number().optional(),topK:zod.z.number().optional(),presencePenalty:zod.z.number().optional(),frequencyPenalty:zod.z.number().optional(),seed:zod.z.number().optional()}),et=zod.z.object({category:zod.z.enum(["HARM_CATEGORY_HARASSMENT","HARM_CATEGORY_HATE_SPEECH","HARM_CATEGORY_SEXUALLY_EXPLICIT","HARM_CATEGORY_DANGEROUS_CONTENT","HARM_CATEGORY_CIVIC_INTEGRITY"]),threshold:zod.z.enum(["HARM_BLOCK_THRESHOLD_UNSPECIFIED","BLOCK_LOW_AND_ABOVE","BLOCK_MEDIUM_AND_ABOVE","BLOCK_ONLY_HIGH","BLOCK_NONE","OFF"])}),tt=zod.z.object({model:zod.z.string().min(1).optional(),contents:zod.z.array($t),systemInstruction:Xo.optional(),system_instruction:Xo.optional(),generationConfig:Zo.optional(),generation_config:Zo.optional(),safetySettings:zod.z.array(et).optional(),safety_settings:zod.z.array(et).optional(),tools:zod.z.object({function_declarations:zod.z.array(zt)}).optional(),toolConfig:Qo.optional(),tool_config:Qo.optional()});var f=zod.z.object({modelName:zod.z.string(),apiKey:zod.z.string(),baseUrl:zod.z.string().url().optional(),completeChatUrl:zod.z.string().url().optional(),streamChatUrl:zod.z.string().url().optional()}),T=class{constructor(e,o){this.version="v1";var s;let t=f.parse(o);this.modelSchema=e,this.modelName=t.modelName,this.apiKey=t.apiKey,this.baseUrl=provider.urlWithoutTrailingSlash((s=t.baseUrl)!=null?s:Y.baseUrl),this.completeChatUrl=provider.urlWithoutTrailingSlash(t.completeChatUrl||`${this.baseUrl}/models/${this.modelName}:generateContent?key=${this.apiKey}`),this.streamChatUrl=provider.urlWithoutTrailingSlash(t.streamChatUrl||`${this.baseUrl}/models/${this.modelName}:streamGenerateContent?key=${this.apiKey}`);}getDefaultBaseUrl(){return this.baseUrl}getDefaultHeaders(){return {"Content-Type":"application/json"}}getDefaultParams(){return {}}getRetryDelay(e){return {shouldRetry:!1,delayMs:0}}getTokenCount(e){return e.reduce((o,t)=>o+t.content.map(s=>s.modality==="text"?s.value:"").join(" ").length,0)}transformModelRequest(e){let o=tt.safeParse(e);if(!o.success)throw new provider.InvalidModelRequestError({info:"Invalid model request",cause:o.error});let t=o.data,s=t.model;if(t.system_instruction&&t.systemInstruction)throw new provider.InvalidModelRequestError({info:`Invalid model request for model : '${this.modelName}'`,cause:new Error("'system_instruction' and 'systemInstruction' are not allowed at the same time")});if(t.generation_config&&t.generationConfig)throw new provider.InvalidModelRequestError({info:`Invalid model request for model : '${this.modelName}'`,cause:new Error("'generation_config' and 'generationConfig' are not allowed at the same time")});if(t.tool_config&&t.toolConfig)throw new provider.InvalidModelRequestError({info:`Invalid model request for model : '${this.modelName}'`,cause:new Error("'tool_config' and 'toolConfig' are not allowed at the same time")});let l=t.system_instruction||t.systemInstruction,n=t.generation_config||t.generationConfig,m=t.safety_settings||t.safetySettings,r=t.tool_config||t.toolConfig;if(r&&(!t.tools||t.tools.function_declarations.length===0))throw new provider.InvalidModelRequestError({info:`Invalid model request for model : '${this.modelName}'`,cause:new Error("'tools' are required when 'tool_choice' is specified")});let p={};r&&(r.function_calling_config.mode==="ANY"&&r.function_calling_config.allowed_function_names&&r.function_calling_config.allowed_function_names.length===1?p.toolChoice=r.function_calling_config.allowed_function_names[0]:p.toolChoice=r.function_calling_config.mode.toLowerCase()),p.seed=n==null?void 0:n.seed,p.maxTokens=n==null?void 0:n.maxOutputTokens,p.temperature=n==null?void 0:n.temperature,p.topP=n==null?void 0:n.topP,p.presencePenalty=n==null?void 0:n.presencePenalty,p.frequencyPenalty=n==null?void 0:n.frequencyPenalty,p.stop=n==null?void 0:n.stopSequences,p.safetySettings=m;let h=types.Config().parse(provider.removeUndefinedEntries(p)),C=[];l&&l.parts.forEach(y=>{C.push({role:types.SystemRoleLiteral,content:[{modality:types.TextModalityLiteral,value:y.text}]});}),t.contents.forEach(y=>{let V=y.role;switch(V){case"user":{let H=y.parts.map(S=>"text"in S?{modality:types.TextModalityLiteral,value:S.text}:{modality:types.ImageModalityLiteral,detail:"auto",value:{type:types.Base64ImageContentTypeLiteral,base64:S.inline_data.data,media_type:S.inline_data.mime_type.split("/")[1]}});C.push({role:V,content:H});}break;case"model":{let H=y.parts.map((S,K)=>"text"in S?{modality:types.TextModalityLiteral,value:S.text}:{modality:types.ToolCallModalityLiteral,id:K.toString(),index:K,name:S.function_call.name,arguments:JSON.stringify(S.function_call.args)});C.push({role:types.AssistantRoleLiteral,content:H});}break;case"function":{let H=y.parts.map((S,K)=>({modality:types.ToolResponseModalityLiteral,id:K.toString(),index:K,name:S.function_response.name,data:JSON.stringify(S.function_response.response)}));C.push({role:types.ToolRoleLiteral,content:H});}break;default:throw new provider.InvalidMessagesError({info:`Invalid message 'role' for model : ${this.modelName}`,cause:new Error(`role : '${y.role}' is not supported for model : ${this.modelName}`)})}});let b=[];return t.tools&&t.tools.function_declarations.forEach(y=>{b.push({type:"function",definition:{schema:{name:y.name,description:y.description,parameters:y.parameters}}});}),{modelName:s,config:h,messages:C,tools:b.length>0?b:void 0}}transformConfig(e,o,t){let s=e.toolChoice;delete e.toolChoice;let l=this.modelSchema.config.schema.safeParse(e);if(!l.success)throw new provider.InvalidConfigError({info:`Invalid config for model : '${this.modelName}'`,cause:l.error});let n=l.data;Object.keys(n).forEach(h=>{if(!(h in this.modelSchema.config.def))throw new provider.InvalidConfigError({info:`Invalid config for model : '${this.modelName}'`,cause:new Error(`Invalid config key : '${h}',
10
10
  available keys : [${Object.keys(this.modelSchema.config.def).join(", ")}]`)})});let m=Object.keys(n).reduce((h,C)=>{let b=this.modelSchema.config.def[C],y=b.param,V=n[C];return y==="maxOutputTokens"&&b.type==="range"&&V===0?h[y]=b.max:h[y]=V,h},{}),r=m.safetySettings;delete m.safetySettings;let p;if(s!==void 0){let h=s;if(!t||t&&t.length===0)throw new provider.InvalidConfigError({info:`Invalid config for model : '${this.modelName}'`,cause:new Error("'tools' are required when 'toolChoice' is specified")});if(t&&t.length>0){let C=this.modelSchema.config.def.toolChoice;if(C.choices.includes(h))h==="any"?p={function_calling_config:{mode:"ANY",allowed_function_names:t.map(b=>b.definition.schema.name)}}:p={function_calling_config:{mode:h.toUpperCase()}};else if(t.map(b=>b.definition.schema.name).includes(h))p={function_calling_config:{mode:"ANY",allowed_function_names:[h]}};else throw new provider.InvalidConfigError({info:`Invalid config for model : '${this.modelName}'`,cause:new Error(`toolChoice : '${h}' is not part of provided 'tools' names or
11
11
  one of [${C.choices.join(", ")}]`)})}}return _(_({generation_config:m},p?{tool_config:p}:{}),r?{safety_settings:r}:{})}transformMessages(e){if(!e||e&&e.length===0)return {messages:[]};let o=e.map(n=>{let m=types.Message().safeParse(n);if(!m.success)throw new provider.InvalidMessagesError({info:"Invalid messages",cause:m.error});return m.data});o.forEach(n=>{n.content.forEach(m=>{if(!this.modelSchema.modalities.includes(m.modality))throw new provider.InvalidMessagesError({info:`Invalid message content for model : '${this.modelName}'`,cause:new Error(`model : '${this.modelName}' does not support modality : '${m.modality}',
12
12
  available modalities : [${this.modelSchema.modalities.join(", ")}]`)})});}),o.forEach(n=>{if(!Object.keys(this.modelSchema.roles).includes(n.role))throw new provider.InvalidMessagesError({info:`Invalid message content for model : '${this.modelName}'`,cause:new Error(`model : '${this.modelName}' does not support role : '${n.role}',
13
13
  available roles : [${Object.keys(this.modelSchema.roles).join(", ")}]`)})});let t={parts:[]},s=[];if(o.forEach(n=>{switch(n.role){case types.SystemRoleLiteral:n.content.forEach(m=>{if(m.modality===types.TextModalityLiteral)t.parts.push({text:m.value});else throw new provider.InvalidMessagesError({info:`Invalid message 'role' and 'modality' combination for model : ${this.modelName}`,cause:new Error(`role : '${n.role}' cannot have content with modality : '${m.modality}'`)})});break;case types.AssistantRoleLiteral:{let m=[];n.content.forEach(r=>{if(r.modality===types.TextModalityLiteral)m.push({text:r.value});else if(r.modality===types.ToolCallModalityLiteral)m.push({function_call:{name:r.name,args:JSON.parse(r.arguments)}});else throw new provider.InvalidMessagesError({info:`Invalid message 'role' and 'modality' combination for model : ${this.modelName}`,cause:new Error(`role : '${n.role}' cannot have content with modality : '${r.modality}'`)})}),s.push({role:this.modelSchema.roles[n.role],parts:m});}break;case types.UserRoleLiteral:{let m=[];n.content.forEach(r=>{if(r.modality===types.TextModalityLiteral)m.push({text:r.value});else if(r.modality===types.ImageModalityLiteral){if(r.value.type==="base64")m.push({inline_data:{mime_type:r.value.media_type,data:r.value.base64}});else if(r.value.type==="url")throw new provider.InvalidMessagesError({info:`Invalid message 'modality' for model : ${this.modelName}`,cause:new Error(`model: '${this.modelName}' does not support image content type: '${r.value.type}'`)})}else throw new provider.InvalidMessagesError({info:`Invalid message 'role' and 'modality' combination for model : ${this.modelName}`,cause:new Error(`role : '${n.role}' cannot have content with modality : '${r.modality}'`)})}),s.push({role:this.modelSchema.roles[n.role],parts:m});}break;case types.ToolRoleLiteral:{let m=[];n.content.forEach(r=>{if(r.modality===types.ToolResponseModalityLiteral)m.push({function_response:{name:r.name,response:JSON.parse(r.data)}});else throw new provider.InvalidMessagesError({info:`Invalid message 'role' and 'modality' combination for model : ${this.modelName}`,cause:new Error(`role : '${n.role}' cannot have content with modality : '${r.modality}'`)})}),s.push({role:this.modelSchema.roles[n.role],parts:m});}break;default:throw new provider.InvalidMessagesError({info:`Invalid message 'role' for model : ${this.modelName}`,cause:new Error(`role : '${n.role}' is not supported,
14
- available roles : [${Object.keys(this.modelSchema.roles).join(", ")}]`)})}}),s[0].role!==this.modelSchema.roles[types.UserRoleLiteral])throw new provider.InvalidMessagesError({info:`Invalid message 'role' for model : ${this.modelName}`,cause:new Error(`model : '${this.modelName}' requires first message to be from user`)});let l=n=>n===this.modelSchema.roles[types.UserRoleLiteral]||n===this.modelSchema.roles[types.ToolRoleLiteral]?[this.modelSchema.roles[types.AssistantRoleLiteral]]:[this.modelSchema.roles[types.UserRoleLiteral],this.modelSchema.roles[types.ToolRoleLiteral]];for(let n=1;n<s.length;n++)if(!l(s[n-1].role).includes(s[n].role))throw new provider.InvalidMessagesError({info:`Invalid message format for model : ${this.modelName}`,cause:new Error(`model : '${this.modelName}' cannot have message with role : '${s[n].role}' after message with role : '${s[n-1].role}'`)});if(s[s.length-1].role!==this.modelSchema.roles[types.UserRoleLiteral]&&s[s.length-1].role!==this.modelSchema.roles[types.ToolRoleLiteral])throw new provider.InvalidMessagesError({info:`Invalid message format for model : ${this.modelName}`,cause:new Error(`model : '${this.modelName}' requires last message to be from user`)});return _({contents:s},t.parts.length>0?{system_instruction:t}:{})}transformTools(e){if(!this.modelSchema.modalities.includes(types.ToolCallModalityLiteral))throw new provider.InvalidToolsError({info:`Invalid tool 'modality' for model : ${this.modelName}`,cause:new Error(`model : '${this.modelName}' does not support tool modality : '${types.ToolCallModalityLiteral}'`)});return !e||e&&e.length===0?{tools:[]}:{tools:[{function_declarations:e.map(s=>{let l=types.Tool().safeParse(s);if(!l.success)throw new provider.InvalidToolsError({info:"Invalid tools",cause:l.error});return l.data}).map(s=>({name:s.definition.schema.name,description:s.definition.schema.description,parameters:s.definition.schema.parameters}))}]}}getCompleteChatUrl(e,o,t){return x(this,null,function*(){return new Promise(s=>{s(this.completeChatUrl);})})}getCompleteChatHeaders(e,o,t){return x(this,null,function*(){return new Promise(s=>{s(this.getDefaultHeaders());})})}getCompleteChatData(e,o,t){return x(this,null,function*(){let s=this.transformConfig(e,o,t),l=this.transformMessages(o);if(l.messages&&l.messages.length===0)throw new provider.InvalidMessagesError({info:"Messages are required",cause:new Error("Messages are required")});let n=t?this.transformTools(t):{};return new Promise(m=>{m(_(_(_(_({},this.getDefaultParams()),s),l),n));})})}transformCompleteChatResponse(e){let o=Jo.safeParse(e);if(o.success){if(o.data.candidates.length===0)throw new provider.ModelResponseError({info:"Invalid response from model",cause:new Error(`No choices in response : ${JSON.stringify(o.data)}`)});let t=o.data,s=[],l,n=t.candidates[0].content;if(n){let p=n.parts.map((h,C)=>{if("text"in h&&h.text!==void 0)return types.createTextContent(h.text);if("functionCall"in h&&h.functionCall!==void 0)return types.createToolCallContent(C,`${h.functionCall.name}_${C}`,h.functionCall.name,JSON.stringify(h.functionCall.args))});return s.push({role:types.AssistantRoleLiteral,content:p}),t.usageMetadata&&(l={promptTokens:t.usageMetadata.promptTokenCount,totalTokens:t.usageMetadata.totalTokenCount,completionTokens:t.usageMetadata.candidatesTokenCount||0}),{messages:s,usage:l,logProbs:void 0}}let m=t.candidates[0].safetyRatings;if(m&&m.length>0&&m.forEach(p=>{if(p.blocked)throw new provider.ModelResponseError({info:`Blocked content for category: ${p.category} with probability: ${p.probability}`,cause:new Error(`Blocked content for category: ${p.category} with probability: ${p.probability}`)})}),t.candidates[0].finishReason==="SAFETY")throw new provider.ModelResponseError({info:"Blocked content, model response finished with safety reason",cause:new Error("Blocked content, model response finished with safety reason")})}throw new provider.ModelResponseError({info:"Invalid response from model",cause:o.error})}getStreamChatUrl(e,o,t){return x(this,null,function*(){return new Promise(s=>{s(this.streamChatUrl);})})}getStreamChatHeaders(e,o,t){return x(this,null,function*(){return new Promise(s=>{s(this.getDefaultHeaders());})})}getStreamChatData(e,o,t){return x(this,null,function*(){let s=this.transformConfig(e,o,t),l=this.transformMessages(o);if(l.messages&&l.messages.length===0)throw new provider.InvalidMessagesError({info:"Messages are required",cause:new Error("Messages are required")});let n=t?this.transformTools(t):{};return new Promise(m=>{m(_(_(_(_({},this.getDefaultParams()),s),l),n));})})}transformStreamChatResponseChunk(e,o){return _o(this,null,function*(){let t=(o+e).split(",\r").filter(s=>s.trim()!=="");for(let s of t){let l=s;if(l=l.replace(/\n/g,""),l.startsWith("["))l=l.slice(1);else if(l.endsWith("]")){if(l==="]")return;l=l.slice(0,-1);}let n;try{n=JSON.parse(l);}catch(r){if(r instanceof SyntaxError){o=l;continue}else throw r}o="";let m=Wo.safeParse(n);if(m.success){let r={partialMessages:[]},p=m.data;if(p.candidates.length>0){let h=p.candidates[0].content;h&&"parts"in h&&h.parts.length>0&&h.parts.forEach((C,b)=>{if("text"in C&&C.text!==void 0&&r.partialMessages.push(types.createPartialTextMessage(types.AssistantRoleLiteral,C.text)),"functionCall"in C&&C.functionCall!==void 0){let y=C.functionCall;r.partialMessages.push(types.createPartialToolCallMessage(types.AssistantRoleLiteral,b,`${y.name}_${b}`,y.name,JSON.stringify(y.args)));}});}p.usageMetadata&&(r.usage={promptTokens:p.usageMetadata.promptTokenCount,completionTokens:p.usageMetadata.candidatesTokenCount,totalTokens:p.usageMetadata.totalTokenCount}),yield {partialResponse:r,buffer:o};}else throw new provider.ModelResponseError({info:"Invalid response from model",cause:m.error})}yield {partialResponse:{partialMessages:[]},buffer:o};})}};var P=class extends T{transformMessages(e){let o=super.transformMessages(e);if(o.systemInstruction){let t={role:this.modelSchema.roles[types.UserRoleLiteral],parts:o.systemInstruction.parts};o.contents.unshift(t),delete o.systemInstruction;}return o}};var Ne="gemini-1.0-pro-001",Xt="Google's predecessor to Gemini 1.5 Pro, a model for scaling across a wide range of tasks Optimized for natural language tasks, multi-turn text and code chat, and code generation",Fe=provider.ChatModelSchema(g,L).parse({name:Ne,description:Xt,maxInputTokens:30720,maxOutputTokens:2048,roles:u,modalities:I,config:{def:c.base(1,.9,2048,4,1).def,schema:c.base(1,.9,2048,4,1).schema}}),Lo=f,se=class extends P{constructor(e){super(Fe,e);}};var qe="gemini-1.0-pro-latest",Zt="Google's latest multimodal model with great performance for high-frequency tasks. Optimized for natural language tasks, multi-turn text and code chat, and code generation",$e=provider.ChatModelSchema(g,L).parse({name:qe,description:Zt,maxInputTokens:30720,maxOutputTokens:2048,roles:u,modalities:I,config:{def:c.base(1,.9,2048,4,1).def,schema:c.base(1,.9,2048,4,1).schema}}),ko=f,ne=class extends P{constructor(e){super($e,e);}};var ze="gemini-1.0-pro-vision",os="Google's predecessor to Gemini 1.5 Pro, an image understanding model to handle a broad range of applications",Be=provider.ChatModelSchema(g,Oe).parse({name:ze,description:os,maxInputTokens:12288,maxOutputTokens:4096,roles:u,modalities:Ee,config:{def:c.c1(1,.4,4096,4,1,32).def,schema:c.c1(1,.4,4096,4,1,32).schema}}),vo=f,ie=class extends P{constructor(e){super(Be,e);}};var je="gemini-1.0-pro",ss="Google's predecessor to Gemini 1.5 Pro, a model for scaling across a wide range of tasks Optimized for natural language tasks, multi-turn text and code chat, and code generation",De=provider.ChatModelSchema(g,L).parse({name:je,description:ss,maxInputTokens:30720,maxOutputTokens:2048,roles:u,modalities:I,config:{def:c.base(1,.9,2048,4,1).def,schema:c.base(1,.9,2048,4,1).schema}}),No=f,ae=class extends P{constructor(e){super(De,e);}};var Ue="gemini-1.5-flash-001",is="Google's fastest, most cost-efficient multimodal model with great performance for high-frequency tasks. Optimized for fast and versatile performance across a diverse variety of tasks",Ae=provider.ChatModelSchema(g,G).parse({name:Ue,description:is,maxInputTokens:1e6,maxOutputTokens:8192,roles:u,modalities:M,config:{def:c.c1(2,1,8192,4,.95,64).def,schema:c.c1(2,1,8192,4,.95,64).schema}}),Fo=f,re=class extends T{constructor(e){super(Ae,e);}};var Ve="gemini-1.5-flash-002",rs="Google's fastest, most cost-efficient multimodal model with great performance for high-frequency tasks. Optimized for fast and versatile performance across a diverse variety of tasks",He=provider.ChatModelSchema(g,G).parse({name:Ve,description:rs,maxInputTokens:1e6,maxOutputTokens:8192,roles:u,modalities:M,config:{def:c.c1(2,1,8192,4,.95,40).def,schema:c.c1(2,1,8192,4,.95,40).schema}}),qo=f,le=class extends T{constructor(e){super(He,e);}};var Ke="gemini-1.5-flash-latest",ms="Google's latest multimodal model with great performance for high-frequency tasks. Optimized for fast and versatile performance across a diverse variety of tasks",Ye=provider.ChatModelSchema(g,G).parse({name:Ke,description:ms,maxInputTokens:1e6,maxOutputTokens:8192,roles:u,modalities:M,config:{def:c.c1(2,1,8192,4,.95,64).def,schema:c.c1(2,1,8192,4,.95,64).schema}}),$o=f,me=class extends T{constructor(e){super(Ye,e);}};var Je="gemini-1.5-flash",ps="Google's fastest, most cost-efficient multimodal model with great performance for high-frequency tasks. Optimized for fast and versatile performance across a diverse variety of tasks",We=provider.ChatModelSchema(g,G).parse({name:Je,description:ps,maxInputTokens:1e6,maxOutputTokens:8192,roles:u,modalities:M,config:{def:c.c1(2,1,8192,4,.95,64).def,schema:c.c1(2,1,8192,4,.95,64).schema}}),zo=f,de=class extends T{constructor(e){super(We,e);}};var Xe="gemini-1.5-pro-001",hs="Google's best performing multimodal model with features for a wide variety of reasoning tasks. Optimized for complex reasoning tasks requiring more intelligence",Qe=provider.ChatModelSchema(g,G).parse({name:Xe,description:hs,maxInputTokens:2e6,maxOutputTokens:8192,roles:u,modalities:M,config:{def:c.c1(2,1,8192,4,.95,64).def,schema:c.c1(2,1,8192,4,.95,64).schema}}),Bo=f,pe=class extends T{constructor(e){super(Qe,e);}};var Ze="gemini-1.5-pro-002",gs="Google's best performing multimodal model with features for a wide variety of reasoning tasks. Optimized for complex reasoning tasks requiring more intelligence",eo=provider.ChatModelSchema(g,G).parse({name:Ze,description:gs,maxInputTokens:2e6,maxOutputTokens:8192,roles:u,modalities:M,config:{def:c.c1(2,1,8192,4,.95,40).def,schema:c.c1(2,1,8192,4,.95,40).schema}}),jo=f,ce=class extends T{constructor(e){super(eo,e);}};var oo="gemini-1.5-pro-latest",ys="Google's best performing multimodal model with features for a wide variety of reasoning tasks. Optimized for complex reasoning tasks requiring more intelligence",to=provider.ChatModelSchema(g,G).parse({name:oo,description:ys,maxInputTokens:2e6,maxOutputTokens:8192,roles:u,modalities:M,config:{def:c.c1(2,1,8192,4,.95,64).def,schema:c.c1(2,1,8192,4,.95,64).schema}}),Do=f,he=class extends T{constructor(e){super(to,e);}};var so="gemini-1.5-pro",Ts="Google's best performing multimodal model with features for a wide variety of reasoning tasks. Optimized for complex reasoning tasks requiring more intelligence",no=provider.ChatModelSchema(g,G).parse({name:so,description:Ts,maxInputTokens:2e6,maxOutputTokens:8192,roles:u,modalities:M,config:{def:c.c1(2,1,8192,4,.95,64).def,schema:c.c1(2,1,8192,4,.95,64).schema}}),Uo=f,fe=class extends T{constructor(e){super(no,e);}};var io="gemini-2.0-flash-exp",Gs="Google's experimental multimodal model with enhanced capabilities. Designed for cutting-edge performance across complex and high-frequency tasks.",ao=provider.ChatModelSchema(g,G).parse({name:io,description:Gs,maxInputTokens:1e6,maxOutputTokens:8192,roles:u,modalities:M,config:{def:c.c1(2,1,8192,4,.95,64).def,schema:c.c1(2,1,8192,4,.95,64).schema}}),Ao=f,ge=class extends T{constructor(e){super(ao,e);}};var ro="gemini-pro-vision",_s="An image understanding model to handle a broad range of applications",lo=provider.ChatModelSchema(g,Oe).parse({name:ro,description:_s,maxInputTokens:12288,maxOutputTokens:4096,roles:u,modalities:Ee,config:{def:c.c1(1,.4,4096,4,1,32).def,schema:c.c1(1,.4,4096,4,1,32).schema}}),Vo=f,ue=class extends P{constructor(e){super(lo,e);}};var mo="gemini-pro",Os="A model for scaling across a wide range of tasks Optimized for natural language tasks, multi-turn text and code chat, and code generation",po=provider.ChatModelSchema(g,L).parse({name:mo,description:Os,maxInputTokens:30720,maxOutputTokens:2048,roles:u,modalities:I,config:{def:c.base(1,.9,2048,4,1).def,schema:c.base(1,.9,2048,4,1).schema}}),Ho=f,ye=class extends P{constructor(e){super(po,e);}};var xe=[types.EmbeddingTextModalityLiteral],Re=zod.z.enum([types.EmbeddingTextModalityLiteral]);var lt=zod.z.object({embeddings:zod.z.array(zod.z.object({values:zod.z.array(zod.z.number())}))});var Ss=zod.z.object({model:zod.z.string().min(1),content:zod.z.object({parts:zod.z.array(zod.z.object({text:zod.z.string().min(1)})).min(1)})}),mt=zod.z.object({model:zod.z.string().min(1).optional(),requests:zod.z.array(Ss).min(1),outputDimensionality:zod.z.number().int().min(1).optional()});var oe=zod.z.object({modelName:zod.z.string(),apiKey:zod.z.string(),baseUrl:zod.z.string().url(),getEmbeddingsUrl:zod.z.string().url().optional()}),A=class{constructor(e,o){this.version="v1";let t=oe.parse(o);this.modelSchema=e,this.modelName=t.modelName,this.apiKey=t.apiKey,this.baseUrl=provider.urlWithoutTrailingSlash(t.baseUrl),this.getEmbeddingsUrl=provider.urlWithoutTrailingSlash(t.getEmbeddingsUrl||`${this.baseUrl}/models/${this.modelName}:batchEmbedContents?key=${this.apiKey}`);}getDefaultBaseUrl(){return this.baseUrl}getDefaultHeaders(){return {"Content-Type":"application/json"}}getDefaultParams(){return {model:this.modelName}}getRetryDelay(e){return {shouldRetry:!1,delayMs:0}}getTokenCount(e){return e.requests.reduce((o,t)=>o+t.length,0)}transformModelRequest(e){let o=mt.safeParse(e);if(!o.success)throw new provider.InvalidModelRequestError({info:"Invalid model request",cause:o.error});let t=o.data,s=t.model,l={outputDimensionality:t.outputDimensionality},n=types.Config().parse(provider.removeUndefinedEntries(l)),m={modality:types.EmbeddingTextModalityLiteral,requests:t.requests.reduce((r,p)=>(r.push(...p.content.parts.map(h=>h.text)),r),[])};return {modelName:s,config:n,embeddingRequests:m}}transformConfig(e,o){let t=this.modelSchema.config.schema.safeParse(e);if(!t.success)throw new provider.InvalidConfigError({info:`Invalid config for model : '${this.modelName}'`,cause:t.error});let s=t.data;return Object.keys(s).forEach(n=>{if(!this.modelSchema.config.def[n])throw new provider.InvalidConfigError({info:`Invalid config for model : '${this.modelName}'`,cause:new Error(`Invalid config key : '${n}',
14
+ available roles : [${Object.keys(this.modelSchema.roles).join(", ")}]`)})}}),s[0].role!==this.modelSchema.roles[types.UserRoleLiteral])throw new provider.InvalidMessagesError({info:`Invalid message 'role' for model : ${this.modelName}`,cause:new Error(`model : '${this.modelName}' requires first message to be from user`)});let l=n=>n===this.modelSchema.roles[types.UserRoleLiteral]||n===this.modelSchema.roles[types.ToolRoleLiteral]?[this.modelSchema.roles[types.AssistantRoleLiteral]]:[this.modelSchema.roles[types.UserRoleLiteral],this.modelSchema.roles[types.ToolRoleLiteral]];for(let n=1;n<s.length;n++)if(!l(s[n-1].role).includes(s[n].role))throw new provider.InvalidMessagesError({info:`Invalid message format for model : ${this.modelName}`,cause:new Error(`model : '${this.modelName}' cannot have message with role : '${s[n].role}' after message with role : '${s[n-1].role}'`)});if(s[s.length-1].role!==this.modelSchema.roles[types.UserRoleLiteral]&&s[s.length-1].role!==this.modelSchema.roles[types.ToolRoleLiteral])throw new provider.InvalidMessagesError({info:`Invalid message format for model : ${this.modelName}`,cause:new Error(`model : '${this.modelName}' requires last message to be from user`)});return _({contents:s},t.parts.length>0?{system_instruction:t}:{})}transformTools(e){if(!this.modelSchema.modalities.includes(types.ToolCallModalityLiteral))throw new provider.InvalidToolsError({info:`Invalid tool 'modality' for model : ${this.modelName}`,cause:new Error(`model : '${this.modelName}' does not support tool modality : '${types.ToolCallModalityLiteral}'`)});return !e||e&&e.length===0?{tools:[]}:{tools:[{function_declarations:e.map(s=>{let l=types.Tool().safeParse(s);if(!l.success)throw new provider.InvalidToolsError({info:"Invalid tools",cause:l.error});return l.data}).map(s=>({name:s.definition.schema.name,description:s.definition.schema.description,parameters:s.definition.schema.parameters}))}]}}getCompleteChatUrl(e,o,t){return x(this,null,function*(){return new Promise(s=>{s(this.completeChatUrl);})})}getCompleteChatHeaders(e,o,t){return x(this,null,function*(){return new Promise(s=>{s(this.getDefaultHeaders());})})}getCompleteChatData(e,o,t){return x(this,null,function*(){let s=this.transformConfig(e,o,t),l=this.transformMessages(o);if(l.messages&&l.messages.length===0)throw new provider.InvalidMessagesError({info:"Messages are required",cause:new Error("Messages are required")});let n=t?this.transformTools(t):{};return new Promise(m=>{m(_(_(_(_({},this.getDefaultParams()),s),l),n));})})}transformCompleteChatResponse(e){let o=Jo.safeParse(e);if(o.success){if(o.data.candidates.length===0)throw new provider.ModelResponseError({info:"Invalid response from model",cause:new Error(`No choices in response : ${JSON.stringify(o.data)}`)});let t=o.data,s=[],l,n=t.candidates[0].content;if(n){let p=n.parts.map((h,C)=>{if("text"in h&&h.text!==void 0)return types.createTextContent(h.text);if("functionCall"in h&&h.functionCall!==void 0)return types.createToolCallContent(C,`${h.functionCall.name}_${C}`,h.functionCall.name,JSON.stringify(h.functionCall.args))});return s.push({role:types.AssistantRoleLiteral,content:p}),t.usageMetadata&&(l={promptTokens:t.usageMetadata.promptTokenCount,totalTokens:t.usageMetadata.totalTokenCount,completionTokens:t.usageMetadata.candidatesTokenCount||0}),{messages:s,usage:l,logProbs:void 0}}let m=t.candidates[0].safetyRatings;if(m&&m.length>0&&m.forEach(p=>{if(p.blocked)throw new provider.ModelResponseError({info:`Blocked content for category: ${p.category} with probability: ${p.probability}`,cause:new Error(`Blocked content for category: ${p.category} with probability: ${p.probability}`)})}),t.candidates[0].finishReason==="SAFETY")throw new provider.ModelResponseError({info:"Blocked content, model response finished with safety reason",cause:new Error("Blocked content, model response finished with safety reason")})}throw new provider.ModelResponseError({info:"Invalid response from model",cause:o.error})}getStreamChatUrl(e,o,t){return x(this,null,function*(){return new Promise(s=>{s(this.streamChatUrl);})})}getStreamChatHeaders(e,o,t){return x(this,null,function*(){return new Promise(s=>{s(this.getDefaultHeaders());})})}getStreamChatData(e,o,t){return x(this,null,function*(){let s=this.transformConfig(e,o,t),l=this.transformMessages(o);if(l.messages&&l.messages.length===0)throw new provider.InvalidMessagesError({info:"Messages are required",cause:new Error("Messages are required")});let n=t?this.transformTools(t):{};return new Promise(m=>{m(_(_(_(_({},this.getDefaultParams()),s),l),n));})})}transformStreamChatResponseChunk(e,o){return _o(this,null,function*(){let t=(o+e).split(",\r").filter(s=>s.trim()!=="");for(let s of t){let l=s;if(l=l.replace(/\n/g,""),l.startsWith("[")||l.startsWith(",{"))l=l.slice(1);else if(l.endsWith("]")){if(l==="]")return;l=l.slice(0,-1);}let n;try{n=JSON.parse(l);}catch(r){if(r instanceof SyntaxError){o=l;continue}else throw r}o="";let m=Wo.safeParse(n);if(m.success){let r={partialMessages:[]},p=m.data;if(p.candidates.length>0){let h=p.candidates[0].content;h&&"parts"in h&&h.parts.length>0&&h.parts.forEach((C,b)=>{if("text"in C&&C.text!==void 0&&r.partialMessages.push(types.createPartialTextMessage(types.AssistantRoleLiteral,C.text)),"functionCall"in C&&C.functionCall!==void 0){let y=C.functionCall;r.partialMessages.push(types.createPartialToolCallMessage(types.AssistantRoleLiteral,b,`${y.name}_${b}`,y.name,JSON.stringify(y.args)));}});}p.usageMetadata&&p.usageMetadata.totalTokenCount&&p.usageMetadata.promptTokenCount&&p.usageMetadata.candidatesTokenCount&&(r.usage={promptTokens:p.usageMetadata.promptTokenCount,completionTokens:p.usageMetadata.candidatesTokenCount,totalTokens:p.usageMetadata.totalTokenCount}),yield {partialResponse:r,buffer:o};}else throw new provider.ModelResponseError({info:"Invalid response from model",cause:m.error})}yield {partialResponse:{partialMessages:[]},buffer:o};})}};var P=class extends T{transformMessages(e){let o=super.transformMessages(e);if(o.systemInstruction){let t={role:this.modelSchema.roles[types.UserRoleLiteral],parts:o.systemInstruction.parts};o.contents.unshift(t),delete o.systemInstruction;}return o}};var Ne="gemini-1.0-pro-001",Xt="Google's predecessor to Gemini 1.5 Pro, a model for scaling across a wide range of tasks Optimized for natural language tasks, multi-turn text and code chat, and code generation",Fe=provider.ChatModelSchema(g,I).parse({name:Ne,description:Xt,maxInputTokens:30720,maxOutputTokens:2048,roles:u,modalities:k,config:{def:c.base(1,.9,2048,4,1).def,schema:c.base(1,.9,2048,4,1).schema}}),Io=f,se=class extends P{constructor(e){super(Fe,e);}};var qe="gemini-1.0-pro-latest",Zt="Google's latest multimodal model with great performance for high-frequency tasks. Optimized for natural language tasks, multi-turn text and code chat, and code generation",$e=provider.ChatModelSchema(g,I).parse({name:qe,description:Zt,maxInputTokens:30720,maxOutputTokens:2048,roles:u,modalities:k,config:{def:c.base(1,.9,2048,4,1).def,schema:c.base(1,.9,2048,4,1).schema}}),Lo=f,ne=class extends P{constructor(e){super($e,e);}};var ze="gemini-1.0-pro-vision",os="Google's predecessor to Gemini 1.5 Pro, an image understanding model to handle a broad range of applications",Be=provider.ChatModelSchema(g,Oe).parse({name:ze,description:os,maxInputTokens:12288,maxOutputTokens:4096,roles:u,modalities:Ee,config:{def:c.c1(1,.4,4096,4,1,32).def,schema:c.c1(1,.4,4096,4,1,32).schema}}),vo=f,ie=class extends P{constructor(e){super(Be,e);}};var je="gemini-1.0-pro",ss="Google's predecessor to Gemini 1.5 Pro, a model for scaling across a wide range of tasks Optimized for natural language tasks, multi-turn text and code chat, and code generation",De=provider.ChatModelSchema(g,I).parse({name:je,description:ss,maxInputTokens:30720,maxOutputTokens:2048,roles:u,modalities:k,config:{def:c.base(1,.9,2048,4,1).def,schema:c.base(1,.9,2048,4,1).schema}}),No=f,ae=class extends P{constructor(e){super(De,e);}};var Ue="gemini-1.5-flash-001",is="Google's fastest, most cost-efficient multimodal model with great performance for high-frequency tasks. Optimized for fast and versatile performance across a diverse variety of tasks",Ae=provider.ChatModelSchema(g,G).parse({name:Ue,description:is,maxInputTokens:1e6,maxOutputTokens:8192,roles:u,modalities:M,config:{def:c.c1(2,1,8192,4,.95,64).def,schema:c.c1(2,1,8192,4,.95,64).schema}}),Fo=f,re=class extends T{constructor(e){super(Ae,e);}};var Ve="gemini-1.5-flash-002",rs="Google's fastest, most cost-efficient multimodal model with great performance for high-frequency tasks. Optimized for fast and versatile performance across a diverse variety of tasks",He=provider.ChatModelSchema(g,G).parse({name:Ve,description:rs,maxInputTokens:1e6,maxOutputTokens:8192,roles:u,modalities:M,config:{def:c.c1(2,1,8192,4,.95,40).def,schema:c.c1(2,1,8192,4,.95,40).schema}}),qo=f,le=class extends T{constructor(e){super(He,e);}};var Ke="gemini-1.5-flash-latest",ms="Google's latest multimodal model with great performance for high-frequency tasks. Optimized for fast and versatile performance across a diverse variety of tasks",Ye=provider.ChatModelSchema(g,G).parse({name:Ke,description:ms,maxInputTokens:1e6,maxOutputTokens:8192,roles:u,modalities:M,config:{def:c.c1(2,1,8192,4,.95,64).def,schema:c.c1(2,1,8192,4,.95,64).schema}}),$o=f,me=class extends T{constructor(e){super(Ye,e);}};var Je="gemini-1.5-flash",ps="Google's fastest, most cost-efficient multimodal model with great performance for high-frequency tasks. Optimized for fast and versatile performance across a diverse variety of tasks",We=provider.ChatModelSchema(g,G).parse({name:Je,description:ps,maxInputTokens:1e6,maxOutputTokens:8192,roles:u,modalities:M,config:{def:c.c1(2,1,8192,4,.95,64).def,schema:c.c1(2,1,8192,4,.95,64).schema}}),zo=f,de=class extends T{constructor(e){super(We,e);}};var Xe="gemini-1.5-pro-001",hs="Google's best performing multimodal model with features for a wide variety of reasoning tasks. Optimized for complex reasoning tasks requiring more intelligence",Qe=provider.ChatModelSchema(g,G).parse({name:Xe,description:hs,maxInputTokens:2e6,maxOutputTokens:8192,roles:u,modalities:M,config:{def:c.c1(2,1,8192,4,.95,64).def,schema:c.c1(2,1,8192,4,.95,64).schema}}),Bo=f,pe=class extends T{constructor(e){super(Qe,e);}};var Ze="gemini-1.5-pro-002",gs="Google's best performing multimodal model with features for a wide variety of reasoning tasks. Optimized for complex reasoning tasks requiring more intelligence",eo=provider.ChatModelSchema(g,G).parse({name:Ze,description:gs,maxInputTokens:2e6,maxOutputTokens:8192,roles:u,modalities:M,config:{def:c.c1(2,1,8192,4,.95,40).def,schema:c.c1(2,1,8192,4,.95,40).schema}}),jo=f,ce=class extends T{constructor(e){super(eo,e);}};var oo="gemini-1.5-pro-latest",ys="Google's best performing multimodal model with features for a wide variety of reasoning tasks. Optimized for complex reasoning tasks requiring more intelligence",to=provider.ChatModelSchema(g,G).parse({name:oo,description:ys,maxInputTokens:2e6,maxOutputTokens:8192,roles:u,modalities:M,config:{def:c.c1(2,1,8192,4,.95,64).def,schema:c.c1(2,1,8192,4,.95,64).schema}}),Do=f,he=class extends T{constructor(e){super(to,e);}};var so="gemini-1.5-pro",Ts="Google's best performing multimodal model with features for a wide variety of reasoning tasks. Optimized for complex reasoning tasks requiring more intelligence",no=provider.ChatModelSchema(g,G).parse({name:so,description:Ts,maxInputTokens:2e6,maxOutputTokens:8192,roles:u,modalities:M,config:{def:c.c1(2,1,8192,4,.95,64).def,schema:c.c1(2,1,8192,4,.95,64).schema}}),Uo=f,fe=class extends T{constructor(e){super(no,e);}};var io="gemini-2.0-flash-exp",Gs="Google's experimental multimodal model with enhanced capabilities. Designed for cutting-edge performance across complex and high-frequency tasks.",ao=provider.ChatModelSchema(g,G).parse({name:io,description:Gs,maxInputTokens:1e6,maxOutputTokens:8192,roles:u,modalities:M,config:{def:c.c1(2,1,8192,4,.95,64).def,schema:c.c1(2,1,8192,4,.95,64).schema}}),Ao=f,ge=class extends T{constructor(e){super(ao,e);}};var ro="gemini-pro-vision",_s="An image understanding model to handle a broad range of applications",lo=provider.ChatModelSchema(g,Oe).parse({name:ro,description:_s,maxInputTokens:12288,maxOutputTokens:4096,roles:u,modalities:Ee,config:{def:c.c1(1,.4,4096,4,1,32).def,schema:c.c1(1,.4,4096,4,1,32).schema}}),Vo=f,ue=class extends P{constructor(e){super(lo,e);}};var mo="gemini-pro",Os="A model for scaling across a wide range of tasks Optimized for natural language tasks, multi-turn text and code chat, and code generation",po=provider.ChatModelSchema(g,I).parse({name:mo,description:Os,maxInputTokens:30720,maxOutputTokens:2048,roles:u,modalities:k,config:{def:c.base(1,.9,2048,4,1).def,schema:c.base(1,.9,2048,4,1).schema}}),Ho=f,ye=class extends P{constructor(e){super(po,e);}};var xe=[types.EmbeddingTextModalityLiteral],Re=zod.z.enum([types.EmbeddingTextModalityLiteral]);var lt=zod.z.object({embeddings:zod.z.array(zod.z.object({values:zod.z.array(zod.z.number())}))});var Ss=zod.z.object({model:zod.z.string().min(1),content:zod.z.object({parts:zod.z.array(zod.z.object({text:zod.z.string().min(1)})).min(1)})}),mt=zod.z.object({model:zod.z.string().min(1).optional(),requests:zod.z.array(Ss).min(1),outputDimensionality:zod.z.number().int().min(1).optional()});var oe=zod.z.object({modelName:zod.z.string(),apiKey:zod.z.string(),baseUrl:zod.z.string().url(),getEmbeddingsUrl:zod.z.string().url().optional()}),A=class{constructor(e,o){this.version="v1";let t=oe.parse(o);this.modelSchema=e,this.modelName=t.modelName,this.apiKey=t.apiKey,this.baseUrl=provider.urlWithoutTrailingSlash(t.baseUrl),this.getEmbeddingsUrl=provider.urlWithoutTrailingSlash(t.getEmbeddingsUrl||`${this.baseUrl}/models/${this.modelName}:batchEmbedContents?key=${this.apiKey}`);}getDefaultBaseUrl(){return this.baseUrl}getDefaultHeaders(){return {"Content-Type":"application/json"}}getDefaultParams(){return {model:this.modelName}}getRetryDelay(e){return {shouldRetry:!1,delayMs:0}}getTokenCount(e){return e.requests.reduce((o,t)=>o+t.length,0)}transformModelRequest(e){let o=mt.safeParse(e);if(!o.success)throw new provider.InvalidModelRequestError({info:"Invalid model request",cause:o.error});let t=o.data,s=t.model,l={outputDimensionality:t.outputDimensionality},n=types.Config().parse(provider.removeUndefinedEntries(l)),m={modality:types.EmbeddingTextModalityLiteral,requests:t.requests.reduce((r,p)=>(r.push(...p.content.parts.map(h=>h.text)),r),[])};return {modelName:s,config:n,embeddingRequests:m}}transformConfig(e,o){let t=this.modelSchema.config.schema.safeParse(e);if(!t.success)throw new provider.InvalidConfigError({info:`Invalid config for model : '${this.modelName}'`,cause:t.error});let s=t.data;return Object.keys(s).forEach(n=>{if(!this.modelSchema.config.def[n])throw new provider.InvalidConfigError({info:`Invalid config for model : '${this.modelName}'`,cause:new Error(`Invalid config key : '${n}',
15
15
  available keys : [${Object.keys(this.modelSchema.config.def).join(", ")}]`)})}),Object.keys(s).reduce((n,m)=>{let p=this.modelSchema.config.def[m].param,h=s[m];return n[p]=h,n},{})}transformEmbeddingRequests(e){let o=types.EmbeddingRequests().safeParse(e);if(!o.success)throw new provider.InvalidEmbeddingRequestsError({info:"Invalid embedding requests",cause:o.error});if(e.modality!==types.EmbeddingTextModalityLiteral)throw new provider.InvalidEmbeddingRequestsError({info:`Invalid embedding requests for model : '${this.modelName}'`,cause:new Error(`Only '${types.EmbeddingTextModalityLiteral}' modality is supported for model : '${this.modelName}'`)});return {requests:o.data.requests.map(s=>({model:`models/${this.modelName}`,content:{parts:[{text:s}]}}))}}getGetEmbeddingsUrl(e,o){return x(this,null,function*(){return new Promise(t=>{t(this.getEmbeddingsUrl);})})}getGetEmbeddingsHeaders(e,o){return x(this,null,function*(){return new Promise(t=>{t(this.getDefaultHeaders());})})}getGetEmbeddingsData(e,o){return x(this,null,function*(){return new Promise(t=>{let s=this.transformConfig(e),l=this.transformEmbeddingRequests(o);if(o.requests.length===0)throw new provider.InvalidEmbeddingRequestsError({info:`Invalid embedding requests for model : '${this.modelName}'`,cause:new Error("requests cannot be empty")});s.outputDimensionality&&(l.requests.forEach(n=>{n.outputDimensionality=s.outputDimensionality;}),delete s.outputDimensionality),t(_(_(_({},this.getDefaultParams()),s),l));})})}transformGetEmbeddingsResponse(e){let o=lt.safeParse(e);if(o.success){let s=o.data.embeddings.map((l,n)=>({index:n,embedding:l.values}));return {encodingFormat:types.FloatEmbeddingLiteral,embeddings:s}}throw new provider.ModelResponseError({info:"Invalid response from model",cause:o.error})}};var co="text-embedding-001",Ns="text-embedding-001",ho=provider.EmbeddingModelSchema(Re).parse({name:co,description:Ns,modalities:xe,maxInputTokens:2048,maxOutputTokens:768,config:{def:B.base(768).def,schema:B.base(768).schema}}),Ko=oe,Ce=class extends A{constructor(e){super(ho,e);}};var fo="text-embedding-004",qs="text-embedding-004",go=provider.EmbeddingModelSchema(Re).parse({name:fo,description:qs,modalities:xe,maxInputTokens:2048,maxOutputTokens:768,config:{def:B.base(768).def,schema:B.base(768).schema}}),Yo=oe,Te=class extends A{constructor(e){super(go,e);}};
16
16
 
17
17
  exports.BaseChatModel = T;
@@ -28,7 +28,7 @@ exports.EmbeddingModelBaseConfigSchema = Ro;
28
28
  exports.Gemini1_0Pro = ae;
29
29
  exports.Gemini1_0ProLatest = ne;
30
30
  exports.Gemini1_0ProLatestLiteral = qe;
31
- exports.Gemini1_0ProLatestOptions = ko;
31
+ exports.Gemini1_0ProLatestOptions = Lo;
32
32
  exports.Gemini1_0ProLatestSchema = $e;
33
33
  exports.Gemini1_0ProLiteral = je;
34
34
  exports.Gemini1_0ProOptions = No;
@@ -39,7 +39,7 @@ exports.Gemini1_0ProVisionOptions = vo;
39
39
  exports.Gemini1_0ProVisionSchema = Be;
40
40
  exports.Gemini1_0Pro_001 = se;
41
41
  exports.Gemini1_0Pro_001Literal = Ne;
42
- exports.Gemini1_0Pro_001Options = Lo;
42
+ exports.Gemini1_0Pro_001Options = Io;
43
43
  exports.Gemini1_0Pro_001Schema = Fe;
44
44
  exports.Gemini1_5Flash = de;
45
45
  exports.Gemini1_5Flash001 = re;
@@ -100,8 +100,8 @@ exports.GoogleChatModelRoles = g;
100
100
  exports.GoogleChatModelRolesMap = u;
101
101
  exports.GoogleChatModelTextModalities = un;
102
102
  exports.GoogleChatModelTextModalitiesEnum = yn;
103
- exports.GoogleChatModelTextToolModalities = I;
104
- exports.GoogleChatModelTextToolModalitiesEnum = L;
103
+ exports.GoogleChatModelTextToolModalities = k;
104
+ exports.GoogleChatModelTextToolModalitiesEnum = I;
105
105
  exports.GoogleChatModelTextVisionModalities = Ee;
106
106
  exports.GoogleChatModelTextVisionModalitiesEnum = Oe;
107
107
  exports.GoogleChatRequest = tt;
@@ -111,8 +111,8 @@ exports.GoogleChatTool = zt;
111
111
  exports.GoogleChatToolConfig = Qo;
112
112
  exports.GoogleChatToolRoleLiteral = wt;
113
113
  exports.GoogleCompleteChatResponse = Jo;
114
- exports.GoogleCompleteChatTextResponse = It;
115
- exports.GoogleCompleteChatToolResponse = Lt;
114
+ exports.GoogleCompleteChatTextResponse = kt;
115
+ exports.GoogleCompleteChatToolResponse = It;
116
116
  exports.GoogleEmbeddingModelConfigs = B;
117
117
  exports.GoogleEmbeddingModelModalities = xe;
118
118
  exports.GoogleEmbeddingModelModalitiesEnum = Re;
@@ -120,7 +120,7 @@ exports.GoogleEmbeddingRequest = mt;
120
120
  exports.GoogleEmbeddingRequestInput = Ss;
121
121
  exports.GoogleGetEmbeddingsResponse = lt;
122
122
  exports.GoogleStreamChatResponse = Wo;
123
- exports.GoogleStreamChatTextResponse = kt;
123
+ exports.GoogleStreamChatTextResponse = Lt;
124
124
  exports.GoogleStreamChatToolResponse = vt;
125
125
  exports.ProviderLiteral = Et;
126
126
  exports.Text_Embedding_001 = Ce;
@@ -132,11 +132,11 @@ exports.Text_Embedding_004Literal = fo;
132
132
  exports.Text_Embedding_004Options = Yo;
133
133
  exports.Text_Embedding_004Schema = go;
134
134
  exports.dimensions = ve;
135
- exports.frequencyPenalty = Ie;
135
+ exports.frequencyPenalty = ke;
136
136
  exports.maxTokens = N;
137
- exports.presencePenalty = Le;
137
+ exports.presencePenalty = Ie;
138
138
  exports.safetySettings = z;
139
- exports.seed = ke;
139
+ exports.seed = Le;
140
140
  exports.stop = F;
141
141
  exports.temperature = v;
142
142
  exports.toolChoice = $;