@adaline/custom 1.4.2 → 1.5.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.mts +23 -0
- package/dist/index.d.ts +23 -0
- package/dist/index.js +13 -13
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +12 -12
- package/dist/index.mjs.map +1 -1
- package/package.json +3 -3
package/dist/index.d.mts
CHANGED
|
@@ -24,6 +24,29 @@ declare const BaseChatModelSchema: {
|
|
|
24
24
|
title: string;
|
|
25
25
|
description: string;
|
|
26
26
|
objectSchema?: any;
|
|
27
|
+
} | {
|
|
28
|
+
type: "paired-select";
|
|
29
|
+
param: string;
|
|
30
|
+
title: string;
|
|
31
|
+
description: string;
|
|
32
|
+
fields: [{
|
|
33
|
+
label: string;
|
|
34
|
+
key: string;
|
|
35
|
+
choices: {
|
|
36
|
+
value: string;
|
|
37
|
+
label: string;
|
|
38
|
+
}[];
|
|
39
|
+
description?: string | undefined;
|
|
40
|
+
}, {
|
|
41
|
+
label: string;
|
|
42
|
+
key: string;
|
|
43
|
+
choices: {
|
|
44
|
+
value: string;
|
|
45
|
+
label: string;
|
|
46
|
+
}[];
|
|
47
|
+
description?: string | undefined;
|
|
48
|
+
}];
|
|
49
|
+
uniqueByField?: string | undefined;
|
|
27
50
|
} | {
|
|
28
51
|
type: "range";
|
|
29
52
|
param: string;
|
package/dist/index.d.ts
CHANGED
|
@@ -24,6 +24,29 @@ declare const BaseChatModelSchema: {
|
|
|
24
24
|
title: string;
|
|
25
25
|
description: string;
|
|
26
26
|
objectSchema?: any;
|
|
27
|
+
} | {
|
|
28
|
+
type: "paired-select";
|
|
29
|
+
param: string;
|
|
30
|
+
title: string;
|
|
31
|
+
description: string;
|
|
32
|
+
fields: [{
|
|
33
|
+
label: string;
|
|
34
|
+
key: string;
|
|
35
|
+
choices: {
|
|
36
|
+
value: string;
|
|
37
|
+
label: string;
|
|
38
|
+
}[];
|
|
39
|
+
description?: string | undefined;
|
|
40
|
+
}, {
|
|
41
|
+
label: string;
|
|
42
|
+
key: string;
|
|
43
|
+
choices: {
|
|
44
|
+
value: string;
|
|
45
|
+
label: string;
|
|
46
|
+
}[];
|
|
47
|
+
description?: string | undefined;
|
|
48
|
+
}];
|
|
49
|
+
uniqueByField?: string | undefined;
|
|
27
50
|
} | {
|
|
28
51
|
type: "range";
|
|
29
52
|
param: string;
|
package/dist/index.js
CHANGED
|
@@ -2,21 +2,21 @@
|
|
|
2
2
|
|
|
3
3
|
var zod = require('zod');
|
|
4
4
|
|
|
5
|
-
var pe=(e,
|
|
6
|
-
Message: ${pa(n)}`),this.name=ue,this.info=o,this.cause=n,this.name=s!=null?s:ue,Object.setPrototypeOf(this,new.target.prototype);}static isGatewayBaseError(o){return o instanceof wt}toJSON(){return {name:this.name,info:this.info,cause:this.cause,message:this.message,stack:this.stack}}},ua=zod.z.object({inputPricePerMillion:zod.z.number().nonnegative().describe("Price per 1M input tokens"),outputPricePerMillion:zod.z.number().nonnegative().describe("Price per 1M output tokens")}).describe("Input/output price pair (per 1M tokens) for a specific category within a ChatModel pricing tier."),ga=zod.z.object({base:ua.describe("Base (uncached, non\u2011reasoning) rates")}).describe("Holds the `ChatModelTokenPairPrice` for different categories (e.g., base) within a single pricing tier."),ha=zod.z.object({minTokens:zod.z.number().int().nonnegative().describe("Inclusive lower token bound for this tier."),maxTokens:zod.z.number().int().nullable().optional().describe("Exclusive upper token bound; `null` means \u221E."),prices:ga.describe("Price categories and rates for this specific token range.")}).refine(e=>e.maxTokens===null||typeof e.maxTokens=="number"&&e.maxTokens>e.minTokens,{message:"maxTokens must be > minTokens (or null for infinite).",path:["maxTokens"]}).describe("A single pricing tier defined by a token range and associated prices.");zod.z.object({modelName:zod.z.string().describe("Model name this schedule applies to."),currency:zod.z.string().default("USD").describe("Currency code (e.g., USD)."),tokenRanges:zod.z.array(ha).min(1).describe("Pricing tiers (`ChatModelTokenRangePrice`) schedule, sorted by minTokens.")}).superRefine((e,o)=>{let{tokenRanges:n}=e;n[0].minTokens!==0&&o.addIssue({code:zod.z.ZodIssueCode.custom,path:["tokenRanges",0,"minTokens"],message:"The first tier must have minTokens = 0."});for(let i=1;i<n.length;i++){let l=n[i-1],r=n[i];if(l.maxTokens===null){o.addIssue({code:zod.z.ZodIssueCode.custom,path:["tokenRanges",i-1,"maxTokens"],message:"Cannot define any tokenRanges after an infinite tier (maxTokens = null)."});break}r.minTokens!==l.maxTokens&&o.addIssue({code:zod.z.ZodIssueCode.custom,path:["tokenRanges",i,"minTokens"],message:`Tier ${i} minTokens (${r.minTokens}) must equal previous tier's maxTokens (${l.maxTokens}) for contiguity.`}),r.minTokens<l.minTokens&&o.addIssue({code:zod.z.ZodIssueCode.custom,path:["tokenRanges",i,"minTokens"],message:`tokenRanges must be sorted by ascending minTokens. Tier ${i} (${r.minTokens}) starts before Tier ${i-1} (${l.minTokens}).`});}let s=n[n.length-1];n.every(i=>i.maxTokens!==null||i===s)&&s.maxTokens!==null&&o.addIssue({code:zod.z.ZodIssueCode.custom,path:["tokenRanges",n.length-1,"maxTokens"],message:"The final tier must have maxTokens = null (representing infinity)."});}).describe("Complete pricing schedule for a single chat model, including all its token-based tiers.");var fa="function";var ba=zod.z.enum(["object","array","number","string","boolean","null"]),ya=zod.z.object({anyOf:zod.z.array(zod.z.any()).optional(),type:ba.optional(),default:zod.z.any().optional(),title:zod.z.string().optional(),description:zod.z.string().max(4096).optional(),properties:zod.z.record(zod.z.any()).optional(),required:zod.z.array(zod.z.string()).optional(),minItems:zod.z.number().int().min(0).optional(),maxItems:zod.z.number().int().optional(),items:zod.z.record(zod.z.any()).optional(),enum:zod.z.array(zod.z.union([zod.z.string(),zod.z.number(),zod.z.boolean(),zod.z.null()])).optional(),minimum:zod.z.number().optional(),maximum:zod.z.number().optional(),minLength:zod.z.number().int().min(0).optional(),maxLength:zod.z.number().int().optional()});zod.z.object({type:zod.z.enum(["object"]),title:zod.z.string().optional(),$defs:zod.z.record(zod.z.any()).optional(),properties:zod.z.record(ya).optional(),required:zod.z.array(zod.z.string()).optional()});var Ta=zod.z.object({name:zod.z.string().regex(/^[a-zA-Z0-9_]{1,64}$/).max(64),description:zod.z.string().max(4096),parameters:zod.z.any(),strict:zod.z.boolean().optional()});var _a=zod.z.enum(["function"]),Pa=zod.z.object({maxAttempts:zod.z.number().int().positive(),initialDelay:zod.z.number().int().positive(),exponentialFactor:zod.z.number().int().positive()}),va=zod.z.object({type:zod.z.literal("http"),method:zod.z.enum(["get","post"]),url:zod.z.string().url(),headers:zod.z.record(zod.z.string()).optional(),query:zod.z.record(zod.z.string()).optional(),body:zod.z.record(zod.z.any()).optional(),proxyUrl:zod.z.string().url().optional(),proxyHeaders:zod.z.record(zod.z.string()).optional(),retry:Pa.optional()}),ka=zod.z.discriminatedUnion("type",[va]),xa=zod.z.object({type:_a,definition:zod.z.object({schema:Ta}),request:ka.optional()}),Ma=[fa];zod.z.enum(Ma);var Et=(e=zod.z.undefined())=>zod.z.discriminatedUnion("type",[xa.extend({metadata:e})]),Me=e=>_t().parse({modality:S,value:e}),Rt=(e,o,n,s,i)=>vt().parse({modality:R,index:e,id:o,name:n,arguments:s,serverName:i});var Oe=(e,o)=>Pe().parse({role:e,partialContent:Pt().parse({modality:ye,value:o})}),It=(e,o,n,s,i,l)=>Pe().parse({role:e,partialContent:kt().parse({modality:Te,index:o,id:n,name:s,arguments:i,serverName:l})});var Oa=Object.defineProperty,jt=Object.getOwnPropertySymbols,Sa=Object.prototype.hasOwnProperty,Ca=Object.prototype.propertyIsEnumerable,Nt=(e,o,n)=>o in e?Oa(e,o,{enumerable:!0,configurable:!0,writable:!0,value:n}):e[o]=n,Y=(e,o)=>{for(var n in o||(o={}))Sa.call(o,n)&&Nt(e,n,o[n]);if(jt)for(var n of jt(o))Ca.call(o,n)&&Nt(e,n,o[n]);return e},Lt="ProviderError",X=class $t extends ${constructor({info:o,cause:n}){super({info:o,cause:n},Lt),this.name=Lt,this.info=o,this.cause=n;}static isProviderError(o){return o instanceof $t}};var Gt="ModelResponseError",q=class Bt extends ${constructor({info:o,cause:n}){super({info:o,cause:n},Gt),this.name=Gt,this.cause=n,this.info=o;}static isModelResponseError(o){return o instanceof Bt}},Ut="InvalidModelRequestError",Q=class zt extends ${constructor({info:o,cause:n}){super({info:o,cause:n},Ut),this.name=Ut,this.cause=n,this.info=o,Object.setPrototypeOf(this,new.target.prototype);}static isInvalidModelRequestError(o){return o instanceof zt}},At="InvalidConfigError",F=class Ht extends ${constructor({info:o,cause:n}){super({info:o,cause:n},At),this.name=At,this.cause=n,this.info=o,Object.setPrototypeOf(this,new.target.prototype);}static isInvalidConfigError(o){return o instanceof Ht}},qt="InvalidMessagesError",I=class Vt extends ${constructor({info:o,cause:n}){super({info:o,cause:n},qt),this.name=qt,this.cause=n,this.info=o,Object.setPrototypeOf(this,new.target.prototype);}static isInvalidMessagesError(o){return o instanceof Vt}},Ft="InvalidToolsError",Se=class Jt extends ${constructor({info:o,cause:n}){super({info:o,cause:n},Ft),this.name=Ft,this.cause=n,this.info=o,Object.setPrototypeOf(this,new.target.prototype);}static isInvalidToolsError(o){return o instanceof Jt}},Dt="InvalidEmbeddingRequestsError",Kt=class Wt extends ${constructor({info:o,cause:n}){super({info:o,cause:n},Dt),this.name=Dt,this.info=o,this.cause=n,Object.setPrototypeOf(this,new.target.prototype);}static isInvalidEmbeddingRequestsError(o){return o instanceof Wt}},Ce="multi-string",Zt=zod.z.object({type:zod.z.literal(Ce),param:zod.z.string().min(1),title:zod.z.string().min(1),description:zod.z.string().min(1).max(500),max:zod.z.number().int().positive()}),wa=e=>zod.z.array(zod.z.string()).max(e).default([]).optional(),Yt=e=>({def:Zt.parse(Y({type:Ce},e)),schema:wa(e.max)}),we="object-schema",Xt=zod.z.object({type:zod.z.literal(we),param:zod.z.string().min(1),title:zod.z.string().min(1),description:zod.z.string().min(1).max(500),objectSchema:zod.z.any()}),Ea=e=>e.optional(),Qt=e=>({def:Xt.parse(Y({type:we},e)),schema:Ea(e.objectSchema)}),Ee="range",eo=zod.z.object({type:zod.z.literal(Ee),param:zod.z.string().min(1),title:zod.z.string().min(1),description:zod.z.string().min(1).max(500),min:zod.z.number().int(),max:zod.z.number().int(),step:zod.z.number().positive(),default:zod.z.number()}),Ra=(e,o,n,s)=>zod.z.number().min(e).max(o).step(n).default(s).optional(),G=e=>({def:eo.parse(Y({type:Ee},e)),schema:Ra(e.min,e.max,e.step,e.default)}),Re="select-boolean",to=zod.z.object({type:zod.z.literal(Re),param:zod.z.string().min(1),title:zod.z.string().min(1),description:zod.z.string().min(1).max(500),default:zod.z.boolean().nullable()}),Ia=e=>zod.z.boolean().nullable().default(e).optional(),oo=e=>({def:to.parse(Y({type:Re},e)),schema:Ia(e.default)}),Ie="select-string",no=zod.z.object({type:zod.z.literal(Ie),param:zod.z.string().min(1),title:zod.z.string().min(1),description:zod.z.string().min(1).max(500),default:zod.z.string(),choices:zod.z.array(zod.z.string())}),ja=(e,o)=>zod.z.enum(o).nullable().default(e).optional(),B=e=>({def:no.parse(Y({type:Ie},e)),schema:ja(e.default,e.choices)}),Na=[Ee,Ce,Ie,we,Re];zod.z.enum(Na);var ao=zod.z.discriminatedUnion("type",[eo,Zt,no,to,Xt]),g=(e=he,o=xt)=>zod.z.object({name:zod.z.string().min(1),description:zod.z.string().min(1),roles:zod.z.record(e,zod.z.string().min(1).optional()),modalities:zod.z.array(o).nonempty(),maxInputTokens:zod.z.number().int().positive().min(1),maxOutputTokens:zod.z.number().int().positive().min(1),maxReasoningTokens:zod.z.number().int().positive().min(1).optional(),config:zod.z.object({def:zod.z.record(zod.z.string().min(1),ao),schema:zod.z.instanceof(zod.z.ZodObject)}).refine(n=>{var s,i;let l=Object.keys(n.def),r=Object.keys((i=(s=n.schema)==null?void 0:s.shape)!=null?i:{});return l.every(d=>r.includes(d))&&r.every(d=>l.includes(d))},{message:"Keys in 'config.def' must exactly match keys in 'config.schema'"}),price:zod.z.custom()}),oe=(e=St)=>zod.z.object({name:zod.z.string().min(1),description:zod.z.string().min(1),modalities:zod.z.array(e).nonempty(),maxInputTokens:zod.z.number().int().positive().min(1),maxOutputTokens:zod.z.number().int().positive().min(1),config:zod.z.object({def:zod.z.record(zod.z.string().min(1),ao),schema:zod.z.instanceof(zod.z.ZodObject)}).refine(o=>{var n,s;let i=Object.keys(o.def),l=Object.keys((s=(n=o.schema)==null?void 0:n.shape)!=null?s:{});return i.every(r=>l.includes(r))&&l.every(r=>i.includes(r))},{message:"Keys in 'config.def' must exactly match keys in 'config.schema'"})});zod.z.record(zod.z.string());zod.z.record(zod.z.union([zod.z.boolean(),zod.z.string(),zod.z.number(),zod.z.object({}),zod.z.array(zod.z.any()),zod.z.null(),zod.z.undefined()]));zod.z.string().url();var La={type:"range",title:"Temperature",description:"Adjusts the model's creativity level. With a setting of 0, the model strictly picks the most probable next word. For endeavors that benefit from a dash of inventiveness, consider dialing it up to 0.7 or higher, enabling the model to produce text that's unexpectedly fresh."},Ga={type:"range",title:"Max tokens",description:"Specify the total tokens for generation, where one token approximates four English characters. Setting this to 0 defaults to the model's maximum capacity."},Ua={type:"range",title:"Max reasoning tokens",description:"Specify the total tokens for reasoning, where one token approximates four English characters."},Aa=e=>({type:"multi",title:"Stop sequence",description:`Enter up to ${e} sequences that will halt additional text output. The generated text will exclude these sequences.`}),qa={type:"range",title:"Top A",description:"Considers only the top tokens that have 'sufficiently high' probabilities relative to the most likely token, functioning like a dynamic Top-P. A lower Top-A value narrows down the token choices based on the highest probability token, while a higher Top-A value refines the filtering without necessarily impacting the creativity of the output."},Fa={type:"range",title:"Top P",description:"Selects a subset of likely tokens for generation, restricting choices to the top-P fraction of possibilities, such as the top 10% when P=0.1. This approach can limit the variety of the output. By default, it's set to 1, indicating no restriction. It's advised to adjust this parameter or temperature to modulate output diversity, but not to modify both simultaneously."},Da={type:"range",title:"Top K",description:"Select only from the highest K probabilities for each following word, effectively eliminating the less likely 'long tail' options."},$a={type:"range",title:"Min P",description:"Specifies the minimum probability a token must have to be considered, in relation to the probability of the most likely token. (This value varies based on the confidence level of the top token.) For example, if Min-P is set to 0.1, only tokens with at least 1/10th the probability of the highest-ranked token will be considered."},Ba={type:"range",title:"Frequency penalty",description:"Minimize redundancy. By assigning a penalty to frequently used tokens within the text, the likelihood of repeating identical phrases is reduced. The default setting for this penalty is zero."},za={type:"range",title:"Presence penalty",description:"Enhance the introduction of novel subjects by reducing the preference for tokens that have already appeared in the text, thus boosting the chances of exploring fresh topics. The standard setting for this is zero."},Ha={type:"range",title:"Seed",description:"When seed is fixed to a specific value, the model makes a best effort to provide the same response for repeated requests. Deterministic output isn't guaranteed. Also, changing the model or parameter settings, such as the temperature, can cause variations in the response even when you use the same seed value. By default, a random seed value is used."},Va={type:"range",title:"Repetition penalty",description:"Reduces the likelihood of repeating tokens from the input. Increasing this value makes the model less prone to repetition, but setting it too high may lead to less coherent output, often resulting in run-on sentences missing smaller words. The token penalty is scaled according to the original token's probability."},Ja={type:"boolean",title:"Log probs",description:"Whether to return log probabilities of the output tokens or not. If true, returns the log probabilities of each output token returned."},Ka={type:"range",title:"Top log probs",description:"The number of most likely tokens to return at each token position, each with an associated log probability. 'logprobs' must be set to true if this parameter is used."},Wa={type:"boolean",title:"Echo",description:"If true, the response will contain the prompt."},Za={type:"select",title:"Response format",description:"Choose the response format of your model. For JSON, you must include the string 'JSON' in some form within your system / user prompt."},Ya={type:"select",title:"Response format",description:"Choose the response format of your model. 'json_object' colloquially known as JSON mode, instructs the model to respond with a valid JSON (must include the term 'json' in prompt). 'json_schema' colloquially known as structured outputs, allows you to specify a strict response schema that the model will adhere to."},Xa={type:"object",title:"Response schema",description:"When response format is set to 'json_schema', the model will return a JSON object of the specified schema."},Qa={type:"object",title:"MCP servers",description:"MCP servers to use for the model."},P={TEMPERATURE:La,MAX_TOKENS:Ga,STOP:Aa,TOP_A:qa,TOP_P:Fa,TOP_K:Da,MIN_P:$a,FREQUENCY_PENALTY:Ba,PRESENCE_PENALTY:za,REPETITION_PENALTY:Va,SEED:Ha,LOG_PROBS:Ja,TOP_LOG_PROBS:Ka,ECHO:Wa,RESPONSE_FORMAT:Za,RESPONSE_FORMAT_WITH_SCHEMA:Ya,RESPONSE_SCHEMA:Xa,MAX_REASONING_TOKENS:Ua,MCP_SERVERS:Qa};var je=e=>Object.fromEntries(Object.entries(e).filter(([o,n])=>n!=null));var so=e=>e.split(";")[0].split("/")[1],H=e=>e==null?void 0:e.replace(/\/$/,"");var es=Object.defineProperty,ts=Object.defineProperties,os=Object.getOwnPropertyDescriptors,io=Object.getOwnPropertySymbols,ns=Object.prototype.hasOwnProperty,as=Object.prototype.propertyIsEnumerable,ae=(e,o)=>(o=Symbol[e])?o:Symbol.for("Symbol."+e),ss=e=>{throw TypeError(e)},ro=(e,o,n)=>o in e?es(e,o,{enumerable:!0,configurable:!0,writable:!0,value:n}):e[o]=n,C=(e,o)=>{for(var n in o||(o={}))ns.call(o,n)&&ro(e,n,o[n]);if(io)for(var n of io(o))as.call(o,n)&&ro(e,n,o[n]);return e},ee=(e,o)=>ts(e,os(o)),E=(e,o,n)=>new Promise((s,i)=>{var l=p=>{try{d(n.next(p));}catch(u){i(u);}},r=p=>{try{d(n.throw(p));}catch(u){i(u);}},d=p=>p.done?s(p.value):Promise.resolve(p.value).then(l,r);d((n=n.apply(e,o)).next());}),mo=function(e,o){this[0]=e,this[1]=o;},lo=(e,o,n)=>{var s=(r,d,p,u)=>{try{var k=n[r](d),y=(d=k.value)instanceof mo,x=k.done;Promise.resolve(y?d[0]:d).then(_=>y?s(r==="return"?r:"next",d[1]?{done:_.done,value:_.value}:_,p,u):p({value:_,done:x})).catch(_=>s("throw",_,p,u));}catch(_){u(_);}},i=r=>l[r]=d=>new Promise((p,u)=>s(r,d,p,u)),l={};return n=n.apply(e,o),l[ae("asyncIterator")]=()=>l,i("next"),i("throw"),i("return"),l},is=e=>{var o=e[ae("asyncIterator")],n=!1,s,i={};return o==null?(o=e[ae("iterator")](),s=l=>i[l]=r=>o[l](r)):(o=o.call(e),s=l=>i[l]=r=>{if(n){if(n=!1,l==="throw")throw r;return r}return n=!0,{done:!1,value:new mo(new Promise(d=>{var p=o[l](r);p instanceof Object||ss("Object expected"),d(p);}),1)}}),i[ae("iterator")]=()=>i,s("next"),"throw"in o?s("throw"):i.throw=l=>{throw l},"return"in o&&s("return"),i},co=G({param:"temperature",title:P.TEMPERATURE.title,description:P.TEMPERATURE.description,min:0,max:2,step:.01,default:1}),po=e=>G({param:"max_completion_tokens",title:P.MAX_TOKENS.title,description:P.MAX_TOKENS.description,min:0,max:e,step:1,default:0}),uo=e=>Yt({param:"stop",title:P.STOP(e).title,description:P.STOP(e).description,max:e}),go=G({param:"top_p",title:P.TOP_P.title,description:P.TOP_P.description,min:0,max:1,step:.01,default:1}),ho=G({param:"frequency_penalty",title:P.FREQUENCY_PENALTY.title,description:P.FREQUENCY_PENALTY.description,min:-2,max:2,step:.01,default:0}),fo=G({param:"presence_penalty",title:P.PRESENCE_PENALTY.title,description:P.PRESENCE_PENALTY.description,min:-2,max:2,step:.01,default:0}),bo=G({param:"seed",title:P.SEED.title,description:P.SEED.description,min:0,max:1e6,step:1,default:0}),yo=oo({param:"logprobs",title:P.LOG_PROBS.title,description:P.LOG_PROBS.description,default:!1}),To=G({param:"top_logprobs",title:P.TOP_LOG_PROBS.title,description:P.TOP_LOG_PROBS.description,min:0,max:20,step:1,default:0}),_o=B({param:"tool_choice",title:"Tool choice",description:"Controls which (if any) tool is called by the model. 'none' means the model will not call a function. 'auto' means the model can pick between generating a message or calling a tool.",default:"auto",choices:["auto","required","none"]}),Po=B({param:"reasoning_effort",title:"Reasoning Effort",description:"Controls the depth of the model's reasoning before delivering an answer. 'minimal' prioritizes speed, 'high' engages in deep reasoning.",default:"medium",choices:["minimal","low","medium","high"]}),vo=B({param:"verbosity",title:"Verbosity",description:"Controls the length and detail of the model's responses, independent of reasoning depth. 'low' generates concise answers, 'high' provides comprehensive responses.",default:"medium",choices:["low","medium","high"]}),re=(e,o)=>zod.z.object({temperature:co.schema,maxTokens:po(e).schema,stop:uo(o).schema,topP:go.schema,frequencyPenalty:ho.schema,presencePenalty:fo.schema,seed:bo.schema.transform(n=>n===0?void 0:n),logProbs:yo.schema,topLogProbs:To.schema,toolChoice:_o.schema}),le=(e,o)=>({temperature:co.def,maxTokens:po(e).def,stop:uo(o).def,topP:go.def,frequencyPenalty:ho.def,presencePenalty:fo.def,seed:bo.def,logProbs:yo.def,topLogProbs:To.def,toolChoice:_o.def}),ko=Qt({param:"response_schema",title:P.RESPONSE_SCHEMA.title,description:P.RESPONSE_SCHEMA.description,objectSchema:Ot}),xo=B({param:"response_format",title:P.RESPONSE_FORMAT_WITH_SCHEMA.title,description:P.RESPONSE_FORMAT_WITH_SCHEMA.description,default:"text",choices:["text","json_object","json_schema"]}),se=(e,o)=>ee(C({},le(e,o)),{responseFormat:xo.def,responseSchema:ko.def}),ie=(e,o)=>re(e,o).extend({responseFormat:xo.schema,responseSchema:ko.schema}),rs=(e,o)=>ee(C({},le(e,o)),{reasoningEffort:Po.def,verbosity:vo.def,responseFormat:se(e,o).responseFormat,responseSchema:se(e,o).responseSchema}),ls=(e,o)=>re(e,o).extend({reasoningEffort:Po.schema,verbosity:vo.schema,responseFormat:ie(e,o).shape.responseFormat,responseSchema:ie(e,o).shape.responseSchema}),Mo=G({param:"temperature",title:P.TEMPERATURE.title,description:P.TEMPERATURE.description,min:1,max:1,step:.01,default:1}),Oo=B({param:"reasoning_effort",title:"Reasoning Effort",description:"Constrains effort on reasoning for reasoning models. Reducing reasoning effort can result in faster responses and fewer tokens used on reasoning in a response.",default:"medium",choices:["low","medium","high"]}),ms=(e,o)=>ee(C({},se(e,o)),{temperature:Mo.def,reasoningEffort:Oo.def}),ds=(e,o)=>ie(e,o).extend({temperature:Mo.schema,reasoningEffort:Oo.schema}),So=B({param:"response_format",title:P.RESPONSE_FORMAT.title,description:P.RESPONSE_FORMAT.description,default:"text",choices:["text","json_object"]}),cs=(e,o)=>ee(C({},le(e,o)),{responseFormat:So.def}),ps=(e,o)=>re(e,o).extend({responseFormat:So.schema}),Co=B({param:"encoding_format",title:"Encoding format",description:"Select the encoding format for the word embedding.",default:"float",choices:["float","base64"]}),wo=e=>G({param:"dimensions",title:"Dimensions",description:"Select the number of dimensions for the word embedding.",min:1,max:e,step:1,default:e}),Eo=()=>zod.z.object({encodingFormat:Co.schema}),Ro=()=>({encodingFormat:Co.def}),us=e=>Eo().extend({dimensions:wo(e).schema}),gs=e=>ee(C({},Ro()),{dimensions:wo(e).def}),c={base:(e,o)=>({def:le(e,o),schema:re(e,o)}),responseFormat:(e,o)=>({def:cs(e,o),schema:ps(e,o)}),responseSchema:(e,o)=>({def:se(e,o),schema:ie(e,o)}),oSeries:(e,o)=>({def:ms(e,o),schema:ds(e,o)}),gpt5:(e,o)=>({def:rs(e,o),schema:ls(e,o)})},V={base:()=>({def:Ro(),schema:Eo()}),dimensions:e=>({def:gs(e),schema:us(e)})},v={"gpt-3.5-turbo-0125":{modelName:"gpt-3.5-turbo-0125",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:.5,outputPricePerMillion:1.5}}}]},"gpt-3.5-turbo-1106":{modelName:"gpt-3.5-turbo-1106",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:.5,outputPricePerMillion:1.5}}}]},"gpt-3.5-turbo":{modelName:"gpt-3.5-turbo",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:.5,outputPricePerMillion:1.5}}}]},"gpt-4-0125-preview":{modelName:"gpt-4-0125-preview",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:30,outputPricePerMillion:60}}}]},"gpt-4-0613":{modelName:"gpt-4-0613",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:30,outputPricePerMillion:60}}}]},"gpt-4-1106-preview":{modelName:"gpt-4-1106-preview",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:30,outputPricePerMillion:60}}}]},"gpt-4-turbo-2024-04-09":{modelName:"gpt-4-turbo-2024-04-09",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:10,outputPricePerMillion:30}}}]},"gpt-4-turbo-preview":{modelName:"gpt-4-turbo-preview",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:10,outputPricePerMillion:30}}}]},"gpt-4-turbo":{modelName:"gpt-4-turbo",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:10,outputPricePerMillion:30}}}]},"gpt-4":{modelName:"gpt-4",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:30,outputPricePerMillion:60}}}]},"gpt-4o-2024-05-13":{modelName:"gpt-4o-2024-05-13",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:5,outputPricePerMillion:20}}}]},"gpt-4o-2024-08-06":{modelName:"gpt-4o-2024-08-06",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:2.5,outputPricePerMillion:10}}}]},"gpt-4o-mini-2024-07-18":{modelName:"gpt-4o-mini-2024-07-18",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:.6,outputPricePerMillion:2.4}}}]},"gpt-4o-mini":{modelName:"gpt-4o-mini",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:.6,outputPricePerMillion:2.4}}}]},"gpt-4o":{modelName:"gpt-4o",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:5,outputPricePerMillion:20}}}]},"o1-2024-12-17":{modelName:"o1-2024-12-17",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:15,outputPricePerMillion:60}}}]},o1:{modelName:"o1",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:15,outputPricePerMillion:60}}}]},"o3-mini-2025-01-31":{modelName:"o3-mini-2025-01-31",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:1.1,outputPricePerMillion:4.4}}}]},"o3-mini":{modelName:"o3-mini",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:1.1,outputPricePerMillion:4.4}}}]},"o3-2025-04-16":{modelName:"o3-2025-04-16",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:10,outputPricePerMillion:40}}}]},o3:{modelName:"o3",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:10,outputPricePerMillion:40}}}]},"o4-mini-2025-04-16":{modelName:"o4-mini-2025-04-16",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:1.1,outputPricePerMillion:4.4}}}]},"o4-mini":{modelName:"o4-mini",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:1.1,outputPricePerMillion:4.4}}}]},"gpt-4.1":{modelName:"gpt-4.1",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:2,outputPricePerMillion:8}}}]},"gpt-4.1-mini":{modelName:"gpt-4.1-mini",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:.4,outputPricePerMillion:1.6}}}]},"gpt-4.1-nano":{modelName:"gpt-4.1-nano",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:.1,outputPricePerMillion:.4}}}]},"gpt-5":{modelName:"gpt-5",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:1.25,outputPricePerMillion:10}}}]},"gpt-5-mini":{modelName:"gpt-5-mini",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:.25,outputPricePerMillion:2}}}]},"gpt-5-nano":{modelName:"gpt-5-nano",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:.05,outputPricePerMillion:.4}}}]},"gpt-5-chat-latest":{modelName:"gpt-5-chat-latest",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:1.25,outputPricePerMillion:10}}}]}},hs="openai",at=class{constructor(){this.version="v1",this.name=hs,this.chatModelFactories={[Ge]:{model:Fs,modelOptions:qs,modelSchema:Lo},[Ne]:{model:Ns,modelOptions:js,modelSchema:jo},[Le]:{model:Us,modelOptions:Gs,modelSchema:No},[Ue]:{model:Bs,modelOptions:$s,modelSchema:Go},[Ae]:{model:Vs,modelOptions:Hs,modelSchema:Uo},[qe]:{model:Ws,modelOptions:Ks,modelSchema:Ao},[Fe]:{model:Xs,modelOptions:Ys,modelSchema:qo},[De]:{model:ti,modelOptions:ei,modelSchema:Fo},[$e]:{model:ai,modelOptions:ni,modelSchema:Do},[Be]:{model:ri,modelOptions:ii,modelSchema:$o},[ze]:{model:di,modelOptions:mi,modelSchema:Bo},[He]:{model:ui,modelOptions:pi,modelSchema:zo},[Ve]:{model:yi,modelOptions:bi,modelSchema:Ho},[Je]:{model:Pi,modelOptions:_i,modelSchema:Vo},[Ke]:{model:xi,modelOptions:ki,modelSchema:Jo},[We]:{model:Si,modelOptions:Oi,modelSchema:Ko},[Ze]:{model:Ei,modelOptions:wi,modelSchema:Wo},[Xe]:{model:Gi,modelOptions:Li,modelSchema:Yo},[et]:{model:$i,modelOptions:Di,modelSchema:Qo},[tt]:{model:Hi,modelOptions:zi,modelSchema:en},[Qe]:{model:qi,modelOptions:Ai,modelSchema:Xo},[Ye]:{model:ji,modelOptions:Ii,modelSchema:Zo},[nt]:{model:Yi,modelOptions:Zi,modelSchema:on},[ot]:{model:Ki,modelOptions:Ji,modelSchema:tn},[dn]:{model:mr,modelOptions:lr,modelSchema:cn},[ln]:{model:ir,modelOptions:sr,modelSchema:mn},[nn]:{model:er,modelOptions:Qi,modelSchema:an},[sn]:{model:nr,modelOptions:or,modelSchema:rn},[pn]:{model:pr,modelOptions:cr,modelSchema:un},[gn]:{model:hr,modelOptions:gr,modelSchema:hn}},this.embeddingModelFactories={[fn]:{model:Pr,modelOptions:_r,modelSchema:bn},[yn]:{model:xr,modelOptions:kr,modelSchema:Tn},[_n]:{model:Sr,modelOptions:Or,modelSchema:Pn}};}chatModelLiterals(){return Object.keys(this.chatModelFactories)}chatModelSchemas(){return Object.keys(this.chatModelFactories).reduce((e,o)=>(e[o]=this.chatModelFactories[o].modelSchema,e),{})}chatModel(e){let o=e.modelName;if(!(o in this.chatModelFactories))throw new X({info:`OpenAI chat model: ${o} not found`,cause:new Error(`OpenAI chat model: ${o} not found, available chat models:
|
|
7
|
-
[${this.chatModelLiterals().join(", ")}]`)});let
|
|
8
|
-
[${this.embeddingModelLiterals().join(", ")}]`)});let
|
|
9
|
-
available keys : [${Object.keys(this.modelSchema.config.def).join(", ")}]`)})});let
|
|
10
|
-
one of [${p.choices.join(", ")}]`)})}}if("response_format"in
|
|
11
|
-
available modalities : [${this.modelSchema.modalities.join(", ")}]`)})});}),
|
|
12
|
-
available roles : [${Object.keys(this.modelSchema.roles).join(", ")}]`)})}),{messages:
|
|
13
|
-
available roles : [${Object.keys(this.modelSchema.roles).join(", ")}]`)})}})}}transformTools(e){if(!this.modelSchema.modalities.includes(R))throw new
|
|
14
|
-
`,d);if(p===-1){r=i.substring(d);break}else {let u=i.substring(d,p).trim();u&&l.push(u),d=p+1;}}for(let p of l){if(p==="data: [DONE]")return;if(p.startsWith("data: ")){let u=p.substring(6);try{let k=JSON.parse(u),y=Ts.safeParse(k);if(y.success){let x={partialMessages:[]},_=y.data;if(_.choices.length>0){let w=_.choices[0].delta;if(w!==void 0&&Object.keys(w).length!==0){if("content"in w&&w.content!==null)x.partialMessages.push(Oe(j,w.content));else if("refusal"in w&&w.refusal!==null)x.partialMessages.push(Oe(j,w.refusal));else if("tool_calls"in w&&w.tool_calls!==void 0){let te=w.tool_calls.at(0);x.partialMessages.push(It(j,te.index,te.id,(n=te.function)==null?void 0:n.name,(s=te.function)==null?void 0:s.arguments));}}}_.usage&&(x.usage={promptTokens:_.usage.prompt_tokens,completionTokens:_.usage.completion_tokens,totalTokens:_.usage.total_tokens}),yield {partialResponse:x,buffer:r};}else throw new q({info:"Invalid response from model",cause:y.error})}catch(k){throw new q({info:`Malformed JSON received in stream: ${u}`,cause:k})}}}yield {partialResponse:{partialMessages:[]},buffer:r};})}transformProxyStreamChatResponseChunk(e,o,n,s,i){return lo(this,null,function*(){yield*ct(is(this.transformStreamChatResponseChunk(e,o)));})}getProxyStreamChatUrl(e,o,n){return E(this,null,function*(){return new Promise(s=>{s(this.streamChatUrl);})})}getProxyCompleteChatUrl(e,o,n){return E(this,null,function*(){return new Promise(s=>{s(this.completeChatUrl);})})}getProxyCompleteChatHeaders(e,o,n){return E(this,null,function*(){if(!o)return {};let s=C({},o);return delete s.host,delete s["content-length"],s})}getProxyStreamChatHeaders(e,o,n){return E(this,null,function*(){return yield this.getProxyCompleteChatHeaders(e,o,n)})}getModelPricing(){if(!(this.modelName in v))throw new q({info:`Invalid model pricing for model : '${this.modelName}'`,cause:new Error(`No pricing configuration found for model "${this.modelName}"`)});return v[this.modelName]}},Ne="gpt-3.5-turbo-0125",Is="The latest GPT-3.5 Turbo model with higher accuracy at responding in requested formats and a fix for a bug which caused a text encoding issue for non-English language function calls. Training data up to Sept 2021.",jo=g(h,A).parse({name:Ne,description:Is,maxInputTokens:4092,maxOutputTokens:4092,roles:f,modalities:U,config:{def:c.responseFormat(4092,4).def,schema:c.responseFormat(4092,4).schema},price:v[Ne]}),js=T,Ns=class extends b{constructor(e){super(jo,e);}},Le="gpt-3.5-turbo-1106",Ls="The latest GPT-3.5 Turbo model with improved instruction following, JSON mode, reproducible outputs, parallel function calling, and more. Returns a maximum of 4,096 output tokens. Training data up to Sept 2021.",No=g(h,A).parse({name:Le,description:Ls,maxInputTokens:4092,maxOutputTokens:16385,roles:f,modalities:U,config:{def:c.responseFormat(16385,4).def,schema:c.responseFormat(16385,4).schema},price:v[Le]}),Gs=T,Us=class extends b{constructor(e){super(No,e);}},Ge="gpt-3.5-turbo",As="Currently points to gpt-3.5-turbo-0125. Training data up to Sept 2021.",Lo=g(h,A).parse({name:Ge,description:As,maxInputTokens:4092,maxOutputTokens:4092,roles:f,modalities:U,config:{def:c.responseFormat(4092,4).def,schema:c.responseFormat(4092,4).schema},price:v[Ge]}),qs=T,Fs=class extends b{constructor(e){super(Lo,e);}},Ue="gpt-4-0125-preview",Ds="The latest GPT-4 model intended to reduce cases of \u201Claziness\u201D where the model doesn\u2019t complete a task. Training data up to Apr 2023.",Go=g(h,A).parse({name:Ue,description:Ds,maxInputTokens:128e3,maxOutputTokens:4092,roles:f,modalities:U,config:{def:c.base(4092,4).def,schema:c.base(4092,4).schema},price:v[Ue]}),$s=T,Bs=class extends b{constructor(e){super(Go,e);}},Ae="gpt-4-0613",zs="Snapshot of gpt-4 from June 13th 2023 with improved function calling support. Training data up to Sept 2021.",Uo=g(h,A).parse({name:Ae,description:zs,maxInputTokens:8192,maxOutputTokens:4092,roles:f,modalities:U,config:{def:c.base(4092,4).def,schema:c.base(4092,4).schema},price:v[Ae]}),Hs=T,Vs=class extends b{constructor(e){super(Uo,e);}},qe="gpt-4-1106-preview",Js="GPT-4 Turbo model featuring improved instruction following, JSON mode, reproducible outputs, parallel function calling, and more. Returns a maximum of 4,096 output tokens. This preview model is not yet suited for production traffic. Training data up to Apr 2023.",Ao=g(h,A).parse({name:qe,description:Js,maxInputTokens:128e3,maxOutputTokens:4092,roles:f,modalities:U,config:{def:c.base(4092,4).def,schema:c.base(4092,4).schema},price:v[qe]}),Ks=T,Ws=class extends b{constructor(e){super(Ao,e);}},Fe="gpt-4.1",Zs="Flagship model for complex tasks. It is well suited for problem solving across domains. Training data up to May 2024.",qo=g(h,O).parse({name:Fe,description:Zs,maxInputTokens:1047576,maxOutputTokens:32768,roles:f,modalities:M,config:{def:c.responseSchema(32768,4).def,schema:c.responseSchema(32768,4).schema},price:v[Fe]}),Ys=T,Xs=class extends b{constructor(e){super(qo,e);}},De="gpt-4.1-mini",Qs="Provides a balance between intelligence, speed, and cost that makes it an attractive model for many use cases. Training data up to May 2024.",Fo=g(h,O).parse({name:De,description:Qs,maxInputTokens:1047576,maxOutputTokens:32768,roles:f,modalities:M,config:{def:c.responseSchema(32768,4).def,schema:c.responseSchema(32768,4).schema},price:v[De]}),ei=T,ti=class extends b{constructor(e){super(Fo,e);}},$e="gpt-4.1-nano",oi="Fastest, most cost-effective GPT-4.1 model. Training data up to May 2024.",Do=g(h,O).parse({name:$e,description:oi,maxInputTokens:1047576,maxOutputTokens:32768,roles:f,modalities:M,config:{def:c.responseSchema(32768,4).def,schema:c.responseSchema(32768,4).schema},price:v[$e]}),ni=T,ai=class extends b{constructor(e){super(Do,e);}},Be="gpt-5",si="Most advanced GPT-5 model for complex reasoning and problem-solving tasks. Training data up to October 2024.",$o=g(h,O).parse({name:Be,description:si,maxInputTokens:4e5,maxOutputTokens:131072,roles:f,modalities:M,config:{def:c.gpt5(131072,4).def,schema:c.gpt5(131072,4).schema},price:v[Be]}),ii=T,ri=class extends b{constructor(e){super($o,e);}},ze="gpt-5-mini",li="Faster, more cost-effective GPT-5 model that balances intelligence and efficiency. Training data up to October 2024.",Bo=g(h,O).parse({name:ze,description:li,maxInputTokens:4e5,maxOutputTokens:131072,roles:f,modalities:M,config:{def:c.gpt5(131072,4).def,schema:c.gpt5(131072,4).schema},price:v[ze]}),mi=T,di=class extends b{constructor(e){super(Bo,e);}},He="gpt-5-nano",ci="Most cost-effective GPT-5 model optimized for speed and efficiency. Training data up to October 2024.",zo=g(h,O).parse({name:He,description:ci,maxInputTokens:4e5,maxOutputTokens:131072,roles:f,modalities:M,config:{def:c.gpt5(131072,4).def,schema:c.gpt5(131072,4).schema},price:v[He]}),pi=T,ui=class extends b{constructor(e){super(zo,e);}},Ve="gpt-5-chat-latest",gi="Latest GPT-5 model optimized for conversational use. Does not support function calling or structured outputs. Training data up to October 2024.",hi=[S,N],fi=zod.z.enum([S,N]),Ho=g(h,fi).parse({name:Ve,description:gi,maxInputTokens:4e5,maxOutputTokens:131072,roles:f,modalities:hi,config:{def:c.gpt5(131072,4).def,schema:c.gpt5(131072,4).schema},price:v[Ve]}),bi=T,yi=class extends b{constructor(e){super(Ho,e);}},Je="gpt-4-turbo-2024-04-09",Ti="GPT-4 Turbo with Vision model. Vision requests can now use JSON mode and function calling. gpt-4-turbo currently points to this version. Training data up to Dec 2023.",Vo=g(h,O).parse({name:Je,description:Ti,maxInputTokens:128e3,maxOutputTokens:4096,roles:f,modalities:M,config:{def:c.responseFormat(4096,4).def,schema:c.responseFormat(4096,4).schema},price:v[Je]}),_i=T,Pi=class extends b{constructor(e){super(Vo,e);}},Ke="gpt-4-turbo-preview",vi="Currently points to gpt-4-0125-preview. Training data up to Apr 2023.",Jo=g(h,A).parse({name:Ke,description:vi,maxInputTokens:128e3,maxOutputTokens:4092,roles:f,modalities:U,config:{def:c.responseFormat(4092,4).def,schema:c.responseFormat(4092,4).schema},price:v[Ke]}),ki=T,xi=class extends b{constructor(e){super(Jo,e);}},We="gpt-4-turbo",Mi="The latest GPT-4 Turbo model with vision capabilities. Vision requests can now use JSON mode and function calling. Currently points to gpt-4-turbo-2024-04-09. Training data up to Dec 2023.",Ko=g(h,O).parse({name:We,description:Mi,maxInputTokens:128e3,maxOutputTokens:4092,roles:f,modalities:M,config:{def:c.responseFormat(4092,4).def,schema:c.responseFormat(4092,4).schema},price:v[We]}),Oi=T,Si=class extends b{constructor(e){super(Ko,e);}},Ze="gpt-4",Ci="Currently points to gpt-4-0613. Training data up to Sept 2021.",Wo=g(h,A).parse({name:Ze,description:Ci,maxInputTokens:8192,maxOutputTokens:4092,roles:f,modalities:U,config:{def:c.base(4092,4).def,schema:c.base(4092,4).schema},price:v[Ze]}),wi=T,Ei=class extends b{constructor(e){super(Wo,e);}},Ye="gpt-4o-2024-05-13",Ri="Latest snapshot of gpt-4o that supports Structured Outputs. Training data up to Oct 2023.",Zo=g(h,O).parse({name:Ye,description:Ri,maxInputTokens:128e3,maxOutputTokens:4092,roles:f,modalities:M,config:{def:c.responseSchema(4092,4).def,schema:c.responseSchema(4092,4).schema},price:v[Ye]}),Ii=T,ji=class extends b{constructor(e){super(Zo,e);}},Xe="gpt-4o-2024-08-06",Ni="Latest snapshot of gpt-4o that supports Structured Outputs. Training data up to Oct 2023.",Yo=g(h,O).parse({name:Xe,description:Ni,maxInputTokens:128e3,maxOutputTokens:4092,roles:f,modalities:M,config:{def:c.responseSchema(4092,4).def,schema:c.responseSchema(4092,4).schema},price:v[Xe]}),Li=T,Gi=class extends b{constructor(e){super(Yo,e);}},Qe="gpt-4o-mini-2024-07-18",Ui="Most advanced, multimodal flagship model that is cheaper and faster than GPT-4 Turbo. Currently points to gpt-4o-2024-05-13. Training data up to Oct 2023.",Xo=g(h,O).parse({name:Qe,description:Ui,maxInputTokens:128e3,maxOutputTokens:4092,roles:f,modalities:M,config:{def:c.responseSchema(4092,4).def,schema:c.responseSchema(4092,4).schema},price:v[Qe]}),Ai=T,qi=class extends b{constructor(e){super(Xo,e);}},et="gpt-4o-mini",Fi="Most advanced, multimodal flagship model that is cheaper and faster than GPT-4 Turbo. Currently points to gpt-4o-2024-05-13. Training data up to Oct 2023.",Qo=g(h,O).parse({name:et,description:Fi,maxInputTokens:128e3,maxOutputTokens:4092,roles:f,modalities:M,config:{def:c.responseSchema(4092,4).def,schema:c.responseSchema(4092,4).schema},price:v[et]}),Di=T,$i=class extends b{constructor(e){super(Qo,e);}},tt="gpt-4o",Bi="Most advanced, multimodal flagship model that is cheaper and faster than GPT-4 Turbo. Currently points to gpt-4o-2024-05-13. Training data up to Oct 2023.",en=g(h,O).parse({name:tt,description:Bi,maxInputTokens:128e3,maxOutputTokens:4092,roles:f,modalities:M,config:{def:c.responseSchema(4092,4).def,schema:c.responseSchema(4092,4).schema},price:v[tt]}),zi=T,Hi=class extends b{constructor(e){super(en,e);}},ot="o1-2024-12-17",Vi="A stable release model for production use, offering robust performance and advanced features. Training data up to December 2024.",tn=g(h,O).parse({name:ot,description:Vi,maxInputTokens:2e5,maxOutputTokens:1e5,roles:f,modalities:M,config:{def:c.oSeries(1e5,4).def,schema:c.oSeries(1e5,4).schema},price:v[ot]}),Ji=T,Ki=class extends b{constructor(e){super(tn,e);}},nt="o1",Wi="Highly capable general-purpose reasoning model with advanced capabilities in language, coding, and reasoning. Training data up to Oct 2023.",on=g(h,O).parse({name:nt,description:Wi,maxInputTokens:2e5,maxOutputTokens:1e5,roles:f,modalities:M,config:{def:c.oSeries(1e5,4).def,schema:c.oSeries(1e5,4).schema},price:v[nt]}),Zi=T,Yi=class extends b{constructor(e){super(on,e);}},nn="o3-2025-04-16",Xi="A new standard for math, science, coding, and visual reasoning tasks. Training data up to Jun 2024.",an=g(h,O).parse({name:nn,description:Xi,maxInputTokens:2e5,maxOutputTokens:1e5,roles:f,modalities:M,config:{def:c.oSeries(1e5,4).def,schema:c.oSeries(1e5,4).schema}}),Qi=T,er=class extends b{constructor(e){super(an,e);}},sn="o3",tr="A new standard for math, science, coding, and visual reasoning tasks. Training data up to Jun 2024.",rn=g(h,O).parse({name:sn,description:tr,maxInputTokens:2e5,maxOutputTokens:1e5,roles:f,modalities:M,config:{def:c.oSeries(1e5,4).def,schema:c.oSeries(1e5,4).schema}}),or=T,nr=class extends b{constructor(e){super(rn,e);}},ln="o3-mini",ar="o3-mini is the newest small reasoning model, providing high intelligence at the same cost and latency targets of o1-mini. Training data up to Sep 2023.",mn=g(h,A).parse({name:ln,description:ar,maxInputTokens:2e5,maxOutputTokens:1e5,roles:f,modalities:U,config:{def:c.oSeries(1e5,4).def,schema:c.oSeries(1e5,4).schema}}),sr=T,ir=class extends b{constructor(e){super(mn,e);}},dn="o3-mini-2025-01-31",rr="o3-mini is the newest small reasoning model, providing high intelligence at the same cost and latency targets of o1-mini. Training data up to Sep 2023.",cn=g(h,A).parse({name:dn,description:rr,maxInputTokens:2e5,maxOutputTokens:1e5,roles:f,modalities:U,config:{def:c.oSeries(1e5,4).def,schema:c.oSeries(1e5,4).schema}}),lr=T,mr=class extends b{constructor(e){super(cn,e);}},pn="o4-mini-2025-04-16",dr="Optimized for fast, effective reasoning with exceptionally efficient performance in coding and visual tasks. Training data up to Jun 2024.",un=g(h,O).parse({name:pn,description:dr,maxInputTokens:2e5,maxOutputTokens:1e5,roles:f,modalities:M,config:{def:c.oSeries(1e5,4).def,schema:c.oSeries(1e5,4).schema}}),cr=T,pr=class extends b{constructor(e){super(un,e);}},gn="o4-mini",ur="Optimized for fast, effective reasoning with exceptionally efficient performance in coding and visual tasks. Training data up to Jun 2024.",hn=g(h,O).parse({name:gn,description:ur,maxInputTokens:2e5,maxOutputTokens:1e5,roles:f,modalities:M,config:{def:c.oSeries(1e5,4).def,schema:c.oSeries(1e5,4).schema}}),gr=T,hr=class extends b{constructor(e){super(hn,e);}},it=[D,z],rt=zod.z.enum([D,z]),fr=zod.z.object({object:zod.z.literal("list"),model:zod.z.string(),data:zod.z.array(zod.z.object({index:zod.z.number(),object:zod.z.literal("embedding"),embedding:zod.z.array(zod.z.number()).or(zod.z.string().base64())})),usage:zod.z.object({prompt_tokens:zod.z.number().nonnegative(),total_tokens:zod.z.number().nonnegative()})}),br=zod.z.string().min(1).or(zod.z.array(zod.z.string().min(1)).min(1)).or(zod.z.array(zod.z.number().int().nonnegative()).min(1)).or(zod.z.array(zod.z.array(zod.z.number().int().nonnegative()).min(1)).min(1)),yr=zod.z.object({model:zod.z.string().min(1).optional(),input:br,encoding_format:zod.z.enum(["float","base64"]).optional(),dimensions:zod.z.number().int().min(1).optional()}),me=zod.z.object({modelName:zod.z.string(),apiKey:zod.z.string(),baseUrl:zod.z.string().url().optional(),getEmbeddingsUrl:zod.z.string().url().optional()}),lt=class{constructor(e,o){this.version="v1";let n=me.parse(o);this.modelSchema=e,this.modelName=n.modelName,this.apiKey=n.apiKey,this.baseUrl=H(n.baseUrl||at.baseUrl),this.getEmbeddingsUrl=H(n.getEmbeddingsUrl||`${this.baseUrl}/embeddings`);}getDefaultBaseUrl(){return this.baseUrl}getDefaultHeaders(){return {Authorization:`Bearer ${this.apiKey}`,"Content-Type":"application/json"}}getDefaultParams(){return {model:this.modelSchema.name}}getRetryDelay(e){let o=r=>{let d=/(\d+)(h|m|s|ms)/g,p={h:36e5,m:6e4,s:1e3,ms:1},u,k=0;for(;(u=d.exec(r))!==null;){let y=parseInt(u[1]),x=u[2];k+=y*p[x];}return k},n=0,s=0,i=!0;e["x-ratelimit-reset-requests"]&&(n=o(e["x-ratelimit-reset-requests"])),e["x-ratelimit-reset-tokens"]&&(s=o(e["x-ratelimit-reset-tokens"]));let l=Math.max(n,s);return {shouldRetry:i,delayMs:l}}getTokenCount(e){return e.requests.reduce((o,n)=>o+n.length,0)}transformModelRequest(e){let o=yr.safeParse(e);if(!o.success)throw new Q({info:"Invalid model request",cause:o.error});let n=o.data,s=n.model,i={encodingFormat:n.encoding_format,dimensions:n.dimensions},l=ve().parse(je(i)),r,d;return typeof n.input=="string"?d=D:typeof n.input[0]=="string"?d=D:d=z,d===D?typeof n.input=="string"?r={modality:d,requests:[n.input]}:r={modality:d,requests:n.input}:typeof n.input[0]=="number"?r={modality:d,requests:[n.input]}:r={modality:d,requests:n.input},{modelName:s,config:l,embeddingRequests:r}}transformConfig(e,o){let n=this.modelSchema.config.schema.safeParse(e);if(!n.success)throw new F({info:`Invalid config for model : '${this.modelSchema.name}'`,cause:n.error});let s=n.data;return Object.keys(s).forEach(i=>{if(!this.modelSchema.config.def[i])throw new F({info:`Invalid config for model : '${this.modelSchema.name}'`,cause:new Error(`Invalid config key : '${i}',
|
|
15
|
-
available keys : [${Object.keys(this.modelSchema.config.def).join(", ")}]`)})}),Object.keys(
|
|
5
|
+
var pe=(e,n)=>(n=Symbol[e])?n:Symbol.for("Symbol."+e),So=e=>{throw TypeError(e)};var Oo=function(e,n){this[0]=e,this[1]=n;};var pt=e=>{var n=e[pe("asyncIterator")],o=!1,i,s={};return n==null?(n=e[pe("iterator")](),i=m=>s[m]=l=>n[m](l)):(n=n.call(e),i=m=>s[m]=l=>{if(o){if(o=!1,m==="throw")throw l;return l}return o=!0,{done:!1,value:new Oo(new Promise(d=>{var p=n[m](l);p instanceof Object||So("Object expected"),d(p);}),1)}}),s[pe("iterator")]=()=>s,i("next"),"throw"in n?i("throw"):s.throw=m=>{throw m},"return"in n&&i("return"),s};var K="system",W="user",j="assistant",Y="tool",Co=[K,W,j,Y],he=zod.z.enum(Co),wo=[j],Eo=zod.z.enum(wo),L="image",fe="base64",Ro=["png","jpeg","webp","gif"],Io=zod.z.object({type:zod.z.literal(fe),base64:zod.z.string(),mediaType:zod.z.enum(Ro)}),be="url",jo=zod.z.object({type:zod.z.literal(be),url:zod.z.string()}),Lo=zod.z.discriminatedUnion("type",[Io,jo]),No=["low","medium","high","auto"],Go=zod.z.enum(No),Uo=(e=zod.z.undefined())=>zod.z.object({modality:zod.z.literal(L),detail:Go,value:Lo,metadata:e}),ft="pdf",Ao="base64",Fo=zod.z.object({type:zod.z.literal(Ao),base64:zod.z.string()}),qo="url",Do=zod.z.object({type:zod.z.literal(qo),url:zod.z.string()}),$o=zod.z.discriminatedUnion("type",[Fo,Do]),Bo=(e=zod.z.undefined())=>zod.z.object({modality:zod.z.literal(ft),value:$o,file:zod.z.object({name:zod.z.string(),id:zod.z.string(),size:zod.z.number().optional()}),metadata:e}),bt="reasoning",yt="partial-reasoning",Tt="thinking",zo="redacted",ge=zod.z.object({type:zod.z.literal(Tt),thinking:zod.z.string(),signature:zod.z.string()}),_t=zod.z.object({type:zod.z.literal(zo),data:zod.z.string()}),Ho=zod.z.discriminatedUnion("type",[ge,_t]),Vo=(e=zod.z.undefined())=>zod.z.object({modality:zod.z.literal(bt),value:Ho,metadata:e}),Jo=zod.z.object({type:zod.z.literal(Tt),thinking:ge.shape.thinking.optional(),signature:ge.shape.signature.optional()}),Ko=zod.z.discriminatedUnion("type",[Jo,_t]),Wo=(e=zod.z.undefined())=>zod.z.object({modality:zod.z.literal(yt),value:Ko,metadata:e}),O="text",Pt=(e=zod.z.undefined())=>zod.z.object({modality:zod.z.literal(O),value:zod.z.string(),metadata:e}),ye="partial-text",vt=(e=zod.z.undefined())=>zod.z.object({modality:zod.z.literal(ye),value:zod.z.string(),metadata:e}),R="tool-call",kt=(e=zod.z.undefined())=>zod.z.object({modality:zod.z.literal(R),index:zod.z.number().int().nonnegative(),id:zod.z.string().min(1),name:zod.z.string().min(1),arguments:zod.z.string(),serverName:zod.z.string().optional(),metadata:e}),Te="partial-tool-call",xt=(e=zod.z.undefined())=>zod.z.object({modality:zod.z.literal(Te),index:zod.z.number().int().nonnegative(),id:zod.z.string().optional(),name:zod.z.string().optional(),arguments:zod.z.string().optional(),serverName:zod.z.string().optional(),metadata:e}),N="tool-response",Yo=(e=zod.z.undefined())=>zod.z.object({modality:zod.z.literal(N),index:zod.z.number().int().nonnegative(),id:zod.z.string().min(1),name:zod.z.string().min(1),data:zod.z.string(),apiResponse:zod.z.object({statusCode:zod.z.number().int().nonnegative()}).optional(),metadata:e}),Zo="partial-tool-response",Qo=(e=zod.z.undefined())=>zod.z.object({modality:zod.z.literal(Zo),index:zod.z.number().int().nonnegative(),id:zod.z.string().optional(),name:zod.z.string().optional(),data:zod.z.string().optional(),apiResponse:zod.z.object({statusCode:zod.z.number().int().nonnegative()}).optional(),metadata:e}),Xo=[O,L,ft,R,N,bt],Mt=zod.z.enum(Xo),ea=(e=zod.z.undefined(),n=zod.z.undefined(),o=zod.z.undefined(),i=zod.z.undefined(),s=zod.z.undefined(),m=zod.z.undefined())=>zod.z.discriminatedUnion("modality",[Pt(e),Uo(n),Bo(o),kt(i),Yo(s),Vo(m)]),ta=[ye,Te,yt];zod.z.enum(ta);var na=(e=zod.z.undefined(),n=zod.z.undefined(),o=zod.z.undefined(),i=zod.z.undefined())=>zod.z.discriminatedUnion("modality",[vt(e),xt(n),Wo(o),Qo(i)]);var _e=(e=he,n=zod.z.undefined(),o=zod.z.undefined(),i=zod.z.undefined(),s=zod.z.undefined(),m=zod.z.undefined(),l=zod.z.undefined(),d=zod.z.undefined())=>zod.z.object({role:e,content:zod.z.array(ea(n,o,i,s,l,m)),metadata:d}),Pe=(e=Eo,n=zod.z.undefined(),o=zod.z.undefined(),i=zod.z.undefined(),s=zod.z.undefined(),m=zod.z.undefined())=>zod.z.object({role:e,partialContent:na(n,o,i,m),metadata:s}),oa=zod.z.object({promptTokens:zod.z.number().nonnegative(),completionTokens:zod.z.number().nonnegative(),totalTokens:zod.z.number().nonnegative()}),ut=zod.z.object({token:zod.z.string(),logProb:zod.z.number(),bytes:zod.z.array(zod.z.number().int()).nullable()}),aa=ut.extend({topLogProbs:zod.z.array(ut)}),St=zod.z.array(aa);zod.z.object({messages:zod.z.array(_e()),usage:oa.optional(),logProbs:St.optional()});var ia=zod.z.object({promptTokens:zod.z.number().nonnegative().optional(),completionTokens:zod.z.number().nonnegative().optional(),totalTokens:zod.z.number().nonnegative().optional()});zod.z.object({partialMessages:zod.z.array(Pe()),usage:ia.optional(),logProbs:St.optional()});var ve=(e=zod.z.record(zod.z.string(),zod.z.any()).optional())=>e,sa=zod.z.object({enabled:zod.z.boolean().default(!0),allowedTools:zod.z.array(zod.z.string().min(1))});zod.z.object({type:zod.z.literal("url"),url:zod.z.string().url().refine(e=>e.startsWith("https://"),{message:"MCP server URL must start with https://"}),name:zod.z.string().min(1),toolConfiguration:sa.optional(),authorizationToken:zod.z.string().min(1).optional()});var ra=["object","array","number","string","boolean","enum"],gt=zod.z.enum(ra),la=zod.z.object({anyOf:zod.z.array(zod.z.any()).optional(),type:zod.z.union([gt,zod.z.array(zod.z.union([gt,zod.z.literal("null")]))]).optional(),default:zod.z.any().optional(),title:zod.z.string().optional(),description:zod.z.string().max(4096).optional(),properties:zod.z.record(zod.z.any()).optional(),required:zod.z.array(zod.z.string()).optional(),minItems:zod.z.number().int().min(0).optional(),maxItems:zod.z.number().int().optional(),items:zod.z.record(zod.z.any()).optional(),enum:zod.z.array(zod.z.union([zod.z.string(),zod.z.number(),zod.z.boolean(),zod.z.null()])).optional(),minimum:zod.z.number().optional(),maximum:zod.z.number().optional(),minLength:zod.z.number().int().min(0).optional(),maxLength:zod.z.number().int().optional(),$ref:zod.z.string().optional()}),ma=zod.z.object({type:zod.z.enum(["object"]),required:zod.z.array(zod.z.string()),$defs:zod.z.record(zod.z.any()).optional(),properties:zod.z.record(la),additionalProperties:zod.z.literal(!1)}),Ot=zod.z.object({name:zod.z.string().regex(/^[a-zA-Z0-9_]{1,64}$/).max(64),description:zod.z.string().max(4096),strict:zod.z.boolean().optional(),schema:ma}).optional(),D="text",z="token",da=[D,z],Ct=zod.z.enum(da),ca=zod.z.array(zod.z.string().min(1)),pa=zod.z.array(zod.z.array(zod.z.number().int().nonnegative())),wt=(e=zod.z.undefined())=>zod.z.discriminatedUnion("modality",[zod.z.object({modality:zod.z.literal(D),metadata:e,requests:ca}),zod.z.object({modality:zod.z.literal(z),metadata:e,requests:pa})]),ke="float",ua=zod.z.object({index:zod.z.number().int().nonnegative(),embedding:zod.z.array(zod.z.number())}),xe="base64",ga=zod.z.object({index:zod.z.number().int().nonnegative(),embedding:zod.z.string().base64()}),ht=zod.z.object({totalTokens:zod.z.number().int().nonnegative()});zod.z.discriminatedUnion("encodingFormat",[zod.z.object({encodingFormat:zod.z.literal(ke),embeddings:zod.z.array(ua),usage:ht.optional()}),zod.z.object({encodingFormat:zod.z.literal(xe),embeddings:zod.z.array(ga),usage:ht.optional()})]);var ha=e=>{let n=new WeakSet;return JSON.stringify(e,(o,i)=>{if(typeof i=="object"&&i!==null){if(n.has(i))return;n.add(i);}return i})},fa=e=>e==null?"unknown error":typeof e=="string"?e:e instanceof Error?e.message:ha(e),ue="GatewayBaseError",$=class Et extends Error{constructor({info:n,cause:o},i){super(`[${i!=null?i:ue}]: ${n}
|
|
6
|
+
Message: ${fa(o)}`),this.name=ue,this.info=n,this.cause=o,this.name=i!=null?i:ue,Object.setPrototypeOf(this,new.target.prototype);}static isGatewayBaseError(n){return n instanceof Et}toJSON(){return {name:this.name,info:this.info,cause:this.cause,message:this.message,stack:this.stack}}},ba=zod.z.object({inputPricePerMillion:zod.z.number().nonnegative().describe("Price per 1M input tokens"),outputPricePerMillion:zod.z.number().nonnegative().describe("Price per 1M output tokens")}).describe("Input/output price pair (per 1M tokens) for a specific category within a ChatModel pricing tier."),ya=zod.z.object({base:ba.describe("Base (uncached, non\u2011reasoning) rates")}).describe("Holds the `ChatModelTokenPairPrice` for different categories (e.g., base) within a single pricing tier."),Ta=zod.z.object({minTokens:zod.z.number().int().nonnegative().describe("Inclusive lower token bound for this tier."),maxTokens:zod.z.number().int().nullable().optional().describe("Exclusive upper token bound; `null` means \u221E."),prices:ya.describe("Price categories and rates for this specific token range.")}).refine(e=>e.maxTokens===null||typeof e.maxTokens=="number"&&e.maxTokens>e.minTokens,{message:"maxTokens must be > minTokens (or null for infinite).",path:["maxTokens"]}).describe("A single pricing tier defined by a token range and associated prices.");zod.z.object({modelName:zod.z.string().describe("Model name this schedule applies to."),currency:zod.z.string().default("USD").describe("Currency code (e.g., USD)."),tokenRanges:zod.z.array(Ta).min(1).describe("Pricing tiers (`ChatModelTokenRangePrice`) schedule, sorted by minTokens.")}).superRefine((e,n)=>{let{tokenRanges:o}=e;o[0].minTokens!==0&&n.addIssue({code:zod.z.ZodIssueCode.custom,path:["tokenRanges",0,"minTokens"],message:"The first tier must have minTokens = 0."});for(let s=1;s<o.length;s++){let m=o[s-1],l=o[s];if(m.maxTokens===null){n.addIssue({code:zod.z.ZodIssueCode.custom,path:["tokenRanges",s-1,"maxTokens"],message:"Cannot define any tokenRanges after an infinite tier (maxTokens = null)."});break}l.minTokens!==m.maxTokens&&n.addIssue({code:zod.z.ZodIssueCode.custom,path:["tokenRanges",s,"minTokens"],message:`Tier ${s} minTokens (${l.minTokens}) must equal previous tier's maxTokens (${m.maxTokens}) for contiguity.`}),l.minTokens<m.minTokens&&n.addIssue({code:zod.z.ZodIssueCode.custom,path:["tokenRanges",s,"minTokens"],message:`tokenRanges must be sorted by ascending minTokens. Tier ${s} (${l.minTokens}) starts before Tier ${s-1} (${m.minTokens}).`});}let i=o[o.length-1];o.every(s=>s.maxTokens!==null||s===i)&&i.maxTokens!==null&&n.addIssue({code:zod.z.ZodIssueCode.custom,path:["tokenRanges",o.length-1,"maxTokens"],message:"The final tier must have maxTokens = null (representing infinity)."});}).describe("Complete pricing schedule for a single chat model, including all its token-based tiers.");var _a="function";var Pa=zod.z.enum(["object","array","number","string","boolean","null"]),va=zod.z.object({anyOf:zod.z.array(zod.z.any()).optional(),type:Pa.optional(),default:zod.z.any().optional(),title:zod.z.string().optional(),description:zod.z.string().max(4096).optional(),properties:zod.z.record(zod.z.any()).optional(),required:zod.z.array(zod.z.string()).optional(),minItems:zod.z.number().int().min(0).optional(),maxItems:zod.z.number().int().optional(),items:zod.z.record(zod.z.any()).optional(),enum:zod.z.array(zod.z.union([zod.z.string(),zod.z.number(),zod.z.boolean(),zod.z.null()])).optional(),minimum:zod.z.number().optional(),maximum:zod.z.number().optional(),minLength:zod.z.number().int().min(0).optional(),maxLength:zod.z.number().int().optional()});zod.z.object({type:zod.z.enum(["object"]),title:zod.z.string().optional(),$defs:zod.z.record(zod.z.any()).optional(),properties:zod.z.record(va).optional(),required:zod.z.array(zod.z.string()).optional()});var ka=zod.z.object({name:zod.z.string().regex(/^[a-zA-Z0-9_]{1,64}$/).max(64),description:zod.z.string().max(4096),parameters:zod.z.any(),strict:zod.z.boolean().optional()});var xa=zod.z.enum(["function"]),Ma=zod.z.object({maxAttempts:zod.z.number().int().positive(),initialDelay:zod.z.number().int().positive(),exponentialFactor:zod.z.number().int().positive()}),Sa=zod.z.object({type:zod.z.literal("http"),method:zod.z.enum(["get","post"]),url:zod.z.string().url(),headers:zod.z.record(zod.z.string()).optional(),query:zod.z.record(zod.z.string()).optional(),body:zod.z.record(zod.z.any()).optional(),proxyUrl:zod.z.string().url().optional(),proxyHeaders:zod.z.record(zod.z.string()).optional(),retry:Ma.optional()}),Oa=zod.z.discriminatedUnion("type",[Sa]),Ca=zod.z.object({type:xa,definition:zod.z.object({schema:ka}),request:Oa.optional()}),wa=[_a];zod.z.enum(wa);var Rt=(e=zod.z.undefined())=>zod.z.discriminatedUnion("type",[Ca.extend({metadata:e})]),Me=e=>Pt().parse({modality:O,value:e}),It=(e,n,o,i,s)=>kt().parse({modality:R,index:e,id:n,name:o,arguments:i,serverName:s});var Se=(e,n)=>Pe().parse({role:e,partialContent:vt().parse({modality:ye,value:n})}),jt=(e,n,o,i,s,m)=>Pe().parse({role:e,partialContent:xt().parse({modality:Te,index:n,id:o,name:i,arguments:s,serverName:m})});var Ea=Object.defineProperty,Lt=Object.getOwnPropertySymbols,Ra=Object.prototype.hasOwnProperty,Ia=Object.prototype.propertyIsEnumerable,Nt=(e,n,o)=>n in e?Ea(e,n,{enumerable:!0,configurable:!0,writable:!0,value:o}):e[n]=o,Z=(e,n)=>{for(var o in n||(n={}))Ra.call(n,o)&&Nt(e,o,n[o]);if(Lt)for(var o of Lt(n))Ia.call(n,o)&&Nt(e,o,n[o]);return e},Gt="ProviderError",Q=class zt extends ${constructor({info:n,cause:o}){super({info:n,cause:o},Gt),this.name=Gt,this.info=n,this.cause=o;}static isProviderError(n){return n instanceof zt}};var Ut="ModelResponseError",F=class Ht extends ${constructor({info:n,cause:o}){super({info:n,cause:o},Ut),this.name=Ut,this.cause=o,this.info=n;}static isModelResponseError(n){return n instanceof Ht}},At="InvalidModelRequestError",X=class Vt extends ${constructor({info:n,cause:o}){super({info:n,cause:o},At),this.name=At,this.cause=o,this.info=n,Object.setPrototypeOf(this,new.target.prototype);}static isInvalidModelRequestError(n){return n instanceof Vt}},Ft="InvalidConfigError",q=class Jt extends ${constructor({info:n,cause:o}){super({info:n,cause:o},Ft),this.name=Ft,this.cause=o,this.info=n,Object.setPrototypeOf(this,new.target.prototype);}static isInvalidConfigError(n){return n instanceof Jt}},qt="InvalidMessagesError",I=class Kt extends ${constructor({info:n,cause:o}){super({info:n,cause:o},qt),this.name=qt,this.cause=o,this.info=n,Object.setPrototypeOf(this,new.target.prototype);}static isInvalidMessagesError(n){return n instanceof Kt}},Dt="InvalidToolsError",Oe=class Wt extends ${constructor({info:n,cause:o}){super({info:n,cause:o},Dt),this.name=Dt,this.cause=o,this.info=n,Object.setPrototypeOf(this,new.target.prototype);}static isInvalidToolsError(n){return n instanceof Wt}},$t="InvalidEmbeddingRequestsError",Yt=class Zt extends ${constructor({info:n,cause:o}){super({info:n,cause:o},$t),this.name=$t,this.info=n,this.cause=o,Object.setPrototypeOf(this,new.target.prototype);}static isInvalidEmbeddingRequestsError(n){return n instanceof Zt}},Ce="multi-string",Qt=zod.z.object({type:zod.z.literal(Ce),param:zod.z.string().min(1),title:zod.z.string().min(1),description:zod.z.string().min(1).max(500),max:zod.z.number().int().positive()}),ja=e=>zod.z.array(zod.z.string()).max(e).default([]).optional(),Xt=e=>({def:Qt.parse(Z({type:Ce},e)),schema:ja(e.max)}),we="object-schema",en=zod.z.object({type:zod.z.literal(we),param:zod.z.string().min(1),title:zod.z.string().min(1),description:zod.z.string().min(1).max(500),objectSchema:zod.z.any()}),La=e=>e.optional(),tn=e=>({def:en.parse(Z({type:we},e)),schema:La(e.objectSchema)}),nn="paired-select",Na=zod.z.object({value:zod.z.string().min(1),label:zod.z.string().min(1)}),Bt=zod.z.object({key:zod.z.string().min(1),label:zod.z.string().min(1),description:zod.z.string().min(1).max(500).optional(),choices:zod.z.array(Na).min(1)}),Ga=zod.z.object({type:zod.z.literal(nn),param:zod.z.string().min(1),title:zod.z.string().min(1),description:zod.z.string().min(1).max(500),fields:zod.z.tuple([Bt,Bt]),uniqueByField:zod.z.string().min(1).optional()});var Ee="range",on=zod.z.object({type:zod.z.literal(Ee),param:zod.z.string().min(1),title:zod.z.string().min(1),description:zod.z.string().min(1).max(500),min:zod.z.number().int(),max:zod.z.number().int(),step:zod.z.number().positive(),default:zod.z.number()}),Ua=(e,n,o,i)=>zod.z.number().min(e).max(n).step(o).default(i).optional(),G=e=>({def:on.parse(Z({type:Ee},e)),schema:Ua(e.min,e.max,e.step,e.default)}),Re="select-boolean",an=zod.z.object({type:zod.z.literal(Re),param:zod.z.string().min(1),title:zod.z.string().min(1),description:zod.z.string().min(1).max(500),default:zod.z.boolean().nullable()}),Aa=e=>zod.z.boolean().nullable().default(e).optional(),sn=e=>({def:an.parse(Z({type:Re},e)),schema:Aa(e.default)}),Ie="select-string",rn=zod.z.object({type:zod.z.literal(Ie),param:zod.z.string().min(1),title:zod.z.string().min(1),description:zod.z.string().min(1).max(500),default:zod.z.string(),choices:zod.z.array(zod.z.string())}),Fa=(e,n)=>zod.z.enum(n).nullable().default(e).optional(),B=e=>({def:rn.parse(Z({type:Ie},e)),schema:Fa(e.default,e.choices)}),qa=[Ee,Ce,Ie,we,Re,nn];zod.z.enum(qa);var ln=zod.z.discriminatedUnion("type",[on,Qt,rn,an,en,Ga]),g=(e=he,n=Mt)=>zod.z.object({name:zod.z.string().min(1),description:zod.z.string().min(1),roles:zod.z.record(e,zod.z.string().min(1).optional()),modalities:zod.z.array(n).nonempty(),maxInputTokens:zod.z.number().int().positive().min(1),maxOutputTokens:zod.z.number().int().positive().min(1),maxReasoningTokens:zod.z.number().int().positive().min(1).optional(),config:zod.z.object({def:zod.z.record(zod.z.string().min(1),ln),schema:zod.z.instanceof(zod.z.ZodObject)}).refine(o=>{var i,s;let m=Object.keys(o.def),l=Object.keys((s=(i=o.schema)==null?void 0:i.shape)!=null?s:{});return m.every(d=>l.includes(d))&&l.every(d=>m.includes(d))},{message:"Keys in 'config.def' must exactly match keys in 'config.schema'"}),price:zod.z.custom()}),ne=(e=Ct)=>zod.z.object({name:zod.z.string().min(1),description:zod.z.string().min(1),modalities:zod.z.array(e).nonempty(),maxInputTokens:zod.z.number().int().positive().min(1),maxOutputTokens:zod.z.number().int().positive().min(1),config:zod.z.object({def:zod.z.record(zod.z.string().min(1),ln),schema:zod.z.instanceof(zod.z.ZodObject)}).refine(n=>{var o,i;let s=Object.keys(n.def),m=Object.keys((i=(o=n.schema)==null?void 0:o.shape)!=null?i:{});return s.every(l=>m.includes(l))&&m.every(l=>s.includes(l))},{message:"Keys in 'config.def' must exactly match keys in 'config.schema'"})});zod.z.record(zod.z.string());zod.z.record(zod.z.union([zod.z.boolean(),zod.z.string(),zod.z.number(),zod.z.object({}),zod.z.array(zod.z.any()),zod.z.null(),zod.z.undefined()]));zod.z.string().url();var Da={type:"range",title:"Temperature",description:"Adjusts the model's creativity level. With a setting of 0, the model strictly picks the most probable next word. For endeavors that benefit from a dash of inventiveness, consider dialing it up to 0.7 or higher, enabling the model to produce text that's unexpectedly fresh."},$a={type:"range",title:"Max tokens",description:"Specify the total tokens for generation, where one token approximates four English characters. Setting this to 0 defaults to the model's maximum capacity."},Ba={type:"range",title:"Max reasoning tokens",description:"Specify the total tokens for reasoning, where one token approximates four English characters."},za=e=>({type:"multi",title:"Stop sequence",description:`Enter up to ${e} sequences that will halt additional text output. The generated text will exclude these sequences.`}),Ha={type:"range",title:"Top A",description:"Considers only the top tokens that have 'sufficiently high' probabilities relative to the most likely token, functioning like a dynamic Top-P. A lower Top-A value narrows down the token choices based on the highest probability token, while a higher Top-A value refines the filtering without necessarily impacting the creativity of the output."},Va={type:"range",title:"Top P",description:"Selects a subset of likely tokens for generation, restricting choices to the top-P fraction of possibilities, such as the top 10% when P=0.1. This approach can limit the variety of the output. By default, it's set to 1, indicating no restriction. It's advised to adjust this parameter or temperature to modulate output diversity, but not to modify both simultaneously."},Ja={type:"range",title:"Top K",description:"Select only from the highest K probabilities for each following word, effectively eliminating the less likely 'long tail' options."},Ka={type:"range",title:"Min P",description:"Specifies the minimum probability a token must have to be considered, in relation to the probability of the most likely token. (This value varies based on the confidence level of the top token.) For example, if Min-P is set to 0.1, only tokens with at least 1/10th the probability of the highest-ranked token will be considered."},Wa={type:"range",title:"Frequency penalty",description:"Minimize redundancy. By assigning a penalty to frequently used tokens within the text, the likelihood of repeating identical phrases is reduced. The default setting for this penalty is zero."},Ya={type:"range",title:"Presence penalty",description:"Enhance the introduction of novel subjects by reducing the preference for tokens that have already appeared in the text, thus boosting the chances of exploring fresh topics. The standard setting for this is zero."},Za={type:"range",title:"Seed",description:"When seed is fixed to a specific value, the model makes a best effort to provide the same response for repeated requests. Deterministic output isn't guaranteed. Also, changing the model or parameter settings, such as the temperature, can cause variations in the response even when you use the same seed value. By default, a random seed value is used."},Qa={type:"range",title:"Repetition penalty",description:"Reduces the likelihood of repeating tokens from the input. Increasing this value makes the model less prone to repetition, but setting it too high may lead to less coherent output, often resulting in run-on sentences missing smaller words. The token penalty is scaled according to the original token's probability."},Xa={type:"boolean",title:"Log probs",description:"Whether to return log probabilities of the output tokens or not. If true, returns the log probabilities of each output token returned."},ei={type:"range",title:"Top log probs",description:"The number of most likely tokens to return at each token position, each with an associated log probability. 'logprobs' must be set to true if this parameter is used."},ti={type:"boolean",title:"Echo",description:"If true, the response will contain the prompt."},ni={type:"select",title:"Response format",description:"Choose the response format of your model. For JSON, you must include the string 'JSON' in some form within your system / user prompt."},oi={type:"select",title:"Response format",description:"Choose the response format of your model. 'json_object' colloquially known as JSON mode, instructs the model to respond with a valid JSON (must include the term 'json' in prompt). 'json_schema' colloquially known as structured outputs, allows you to specify a strict response schema that the model will adhere to."},ai={type:"object",title:"Response schema",description:"When response format is set to 'json_schema', the model will return a JSON object of the specified schema."},ii={type:"object",title:"MCP servers",description:"MCP servers to use for the model."},P={TEMPERATURE:Da,MAX_TOKENS:$a,STOP:za,TOP_A:Ha,TOP_P:Va,TOP_K:Ja,MIN_P:Ka,FREQUENCY_PENALTY:Wa,PRESENCE_PENALTY:Ya,REPETITION_PENALTY:Qa,SEED:Za,LOG_PROBS:Xa,TOP_LOG_PROBS:ei,ECHO:ti,RESPONSE_FORMAT:ni,RESPONSE_FORMAT_WITH_SCHEMA:oi,RESPONSE_SCHEMA:ai,MAX_REASONING_TOKENS:Ba,MCP_SERVERS:ii};var je=e=>Object.fromEntries(Object.entries(e).filter(([n,o])=>o!=null));var mn=e=>e.split(";")[0].split("/")[1],H=e=>e==null?void 0:e.replace(/\/$/,"");var si=Object.defineProperty,ri=Object.defineProperties,li=Object.getOwnPropertyDescriptors,dn=Object.getOwnPropertySymbols,mi=Object.prototype.hasOwnProperty,di=Object.prototype.propertyIsEnumerable,ae=(e,n)=>(n=Symbol[e])?n:Symbol.for("Symbol."+e),ci=e=>{throw TypeError(e)},cn=(e,n,o)=>n in e?si(e,n,{enumerable:!0,configurable:!0,writable:!0,value:o}):e[n]=o,C=(e,n)=>{for(var o in n||(n={}))mi.call(n,o)&&cn(e,o,n[o]);if(dn)for(var o of dn(n))di.call(n,o)&&cn(e,o,n[o]);return e},ee=(e,n)=>ri(e,li(n)),E=(e,n,o)=>new Promise((i,s)=>{var m=p=>{try{d(o.next(p));}catch(u){s(u);}},l=p=>{try{d(o.throw(p));}catch(u){s(u);}},d=p=>p.done?i(p.value):Promise.resolve(p.value).then(m,l);d((o=o.apply(e,n)).next());}),un=function(e,n){this[0]=e,this[1]=n;},pn=(e,n,o)=>{var i=(l,d,p,u)=>{try{var k=o[l](d),T=(d=k.value)instanceof un,x=k.done;Promise.resolve(T?d[0]:d).then(_=>T?i(l==="return"?l:"next",d[1]?{done:_.done,value:_.value}:_,p,u):p({value:_,done:x})).catch(_=>i("throw",_,p,u));}catch(_){u(_);}},s=l=>m[l]=d=>new Promise((p,u)=>i(l,d,p,u)),m={};return o=o.apply(e,n),m[ae("asyncIterator")]=()=>m,s("next"),s("throw"),s("return"),m},pi=e=>{var n=e[ae("asyncIterator")],o=!1,i,s={};return n==null?(n=e[ae("iterator")](),i=m=>s[m]=l=>n[m](l)):(n=n.call(e),i=m=>s[m]=l=>{if(o){if(o=!1,m==="throw")throw l;return l}return o=!0,{done:!1,value:new un(new Promise(d=>{var p=n[m](l);p instanceof Object||ci("Object expected"),d(p);}),1)}}),s[ae("iterator")]=()=>s,i("next"),"throw"in n?i("throw"):s.throw=m=>{throw m},"return"in n&&i("return"),s},gn=G({param:"temperature",title:P.TEMPERATURE.title,description:P.TEMPERATURE.description,min:0,max:2,step:.01,default:1}),hn=e=>G({param:"max_completion_tokens",title:P.MAX_TOKENS.title,description:P.MAX_TOKENS.description,min:0,max:e,step:1,default:0}),fn=e=>Xt({param:"stop",title:P.STOP(e).title,description:P.STOP(e).description,max:e}),bn=G({param:"top_p",title:P.TOP_P.title,description:P.TOP_P.description,min:0,max:1,step:.01,default:1}),yn=G({param:"frequency_penalty",title:P.FREQUENCY_PENALTY.title,description:P.FREQUENCY_PENALTY.description,min:-2,max:2,step:.01,default:0}),Tn=G({param:"presence_penalty",title:P.PRESENCE_PENALTY.title,description:P.PRESENCE_PENALTY.description,min:-2,max:2,step:.01,default:0}),_n=G({param:"seed",title:P.SEED.title,description:P.SEED.description,min:0,max:1e6,step:1,default:0}),Pn=sn({param:"logprobs",title:P.LOG_PROBS.title,description:P.LOG_PROBS.description,default:!1}),vn=G({param:"top_logprobs",title:P.TOP_LOG_PROBS.title,description:P.TOP_LOG_PROBS.description,min:0,max:20,step:1,default:0}),kn=B({param:"tool_choice",title:"Tool choice",description:"Controls which (if any) tool is called by the model. 'none' means the model will not call a function. 'auto' means the model can pick between generating a message or calling a tool.",default:"auto",choices:["auto","required","none"]}),xn=B({param:"reasoning_effort",title:"Reasoning Effort",description:"Controls the depth of the model's reasoning before delivering an answer. 'minimal' prioritizes speed, 'high' engages in deep reasoning.",default:"medium",choices:["minimal","low","medium","high"]}),Mn=B({param:"verbosity",title:"Verbosity",description:"Controls the length and detail of the model's responses, independent of reasoning depth. 'low' generates concise answers, 'high' provides comprehensive responses.",default:"medium",choices:["low","medium","high"]}),re=(e,n)=>zod.z.object({temperature:gn.schema,maxTokens:hn(e).schema,stop:fn(n).schema,topP:bn.schema,frequencyPenalty:yn.schema,presencePenalty:Tn.schema,seed:_n.schema.transform(o=>o===0?void 0:o),logProbs:Pn.schema,topLogProbs:vn.schema,toolChoice:kn.schema}),le=(e,n)=>({temperature:gn.def,maxTokens:hn(e).def,stop:fn(n).def,topP:bn.def,frequencyPenalty:yn.def,presencePenalty:Tn.def,seed:_n.def,logProbs:Pn.def,topLogProbs:vn.def,toolChoice:kn.def}),Sn=tn({param:"response_schema",title:P.RESPONSE_SCHEMA.title,description:P.RESPONSE_SCHEMA.description,objectSchema:Ot}),On=B({param:"response_format",title:P.RESPONSE_FORMAT_WITH_SCHEMA.title,description:P.RESPONSE_FORMAT_WITH_SCHEMA.description,default:"text",choices:["text","json_object","json_schema"]}),ie=(e,n)=>ee(C({},le(e,n)),{responseFormat:On.def,responseSchema:Sn.def}),se=(e,n)=>re(e,n).extend({responseFormat:On.schema,responseSchema:Sn.schema}),ui=(e,n)=>ee(C({},le(e,n)),{reasoningEffort:xn.def,verbosity:Mn.def,responseFormat:ie(e,n).responseFormat,responseSchema:ie(e,n).responseSchema}),gi=(e,n)=>re(e,n).extend({reasoningEffort:xn.schema,verbosity:Mn.schema,responseFormat:se(e,n).shape.responseFormat,responseSchema:se(e,n).shape.responseSchema}),Cn=G({param:"temperature",title:P.TEMPERATURE.title,description:P.TEMPERATURE.description,min:1,max:1,step:.01,default:1}),wn=B({param:"reasoning_effort",title:"Reasoning Effort",description:"Constrains effort on reasoning for reasoning models. Reducing reasoning effort can result in faster responses and fewer tokens used on reasoning in a response.",default:"medium",choices:["low","medium","high"]}),hi=(e,n)=>ee(C({},ie(e,n)),{temperature:Cn.def,reasoningEffort:wn.def}),fi=(e,n)=>se(e,n).extend({temperature:Cn.schema,reasoningEffort:wn.schema}),En=B({param:"response_format",title:P.RESPONSE_FORMAT.title,description:P.RESPONSE_FORMAT.description,default:"text",choices:["text","json_object"]}),bi=(e,n)=>ee(C({},le(e,n)),{responseFormat:En.def}),yi=(e,n)=>re(e,n).extend({responseFormat:En.schema}),Rn=B({param:"encoding_format",title:"Encoding format",description:"Select the encoding format for the word embedding.",default:"float",choices:["float","base64"]}),In=e=>G({param:"dimensions",title:"Dimensions",description:"Select the number of dimensions for the word embedding.",min:1,max:e,step:1,default:e}),jn=()=>zod.z.object({encodingFormat:Rn.schema}),Ln=()=>({encodingFormat:Rn.def}),Ti=e=>jn().extend({dimensions:In(e).schema}),_i=e=>ee(C({},Ln()),{dimensions:In(e).def}),c={base:(e,n)=>({def:le(e,n),schema:re(e,n)}),responseFormat:(e,n)=>({def:bi(e,n),schema:yi(e,n)}),responseSchema:(e,n)=>({def:ie(e,n),schema:se(e,n)}),oSeries:(e,n)=>({def:hi(e,n),schema:fi(e,n)}),gpt5:(e,n)=>({def:ui(e,n),schema:gi(e,n)})},V={base:()=>({def:Ln(),schema:jn()}),dimensions:e=>({def:_i(e),schema:Ti(e)})},v={"gpt-3.5-turbo-0125":{modelName:"gpt-3.5-turbo-0125",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:.5,outputPricePerMillion:1.5}}}]},"gpt-3.5-turbo-1106":{modelName:"gpt-3.5-turbo-1106",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:.5,outputPricePerMillion:1.5}}}]},"gpt-3.5-turbo":{modelName:"gpt-3.5-turbo",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:.5,outputPricePerMillion:1.5}}}]},"gpt-4-0125-preview":{modelName:"gpt-4-0125-preview",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:30,outputPricePerMillion:60}}}]},"gpt-4-0613":{modelName:"gpt-4-0613",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:30,outputPricePerMillion:60}}}]},"gpt-4-1106-preview":{modelName:"gpt-4-1106-preview",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:30,outputPricePerMillion:60}}}]},"gpt-4-turbo-2024-04-09":{modelName:"gpt-4-turbo-2024-04-09",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:10,outputPricePerMillion:30}}}]},"gpt-4-turbo-preview":{modelName:"gpt-4-turbo-preview",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:10,outputPricePerMillion:30}}}]},"gpt-4-turbo":{modelName:"gpt-4-turbo",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:10,outputPricePerMillion:30}}}]},"gpt-4":{modelName:"gpt-4",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:30,outputPricePerMillion:60}}}]},"gpt-4o-2024-05-13":{modelName:"gpt-4o-2024-05-13",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:5,outputPricePerMillion:20}}}]},"gpt-4o-2024-08-06":{modelName:"gpt-4o-2024-08-06",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:2.5,outputPricePerMillion:10}}}]},"gpt-4o-mini-2024-07-18":{modelName:"gpt-4o-mini-2024-07-18",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:.6,outputPricePerMillion:2.4}}}]},"gpt-4o-mini":{modelName:"gpt-4o-mini",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:.6,outputPricePerMillion:2.4}}}]},"gpt-4o":{modelName:"gpt-4o",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:5,outputPricePerMillion:20}}}]},"o1-2024-12-17":{modelName:"o1-2024-12-17",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:15,outputPricePerMillion:60}}}]},o1:{modelName:"o1",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:15,outputPricePerMillion:60}}}]},"o3-mini-2025-01-31":{modelName:"o3-mini-2025-01-31",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:1.1,outputPricePerMillion:4.4}}}]},"o3-mini":{modelName:"o3-mini",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:1.1,outputPricePerMillion:4.4}}}]},"o3-2025-04-16":{modelName:"o3-2025-04-16",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:10,outputPricePerMillion:40}}}]},o3:{modelName:"o3",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:10,outputPricePerMillion:40}}}]},"o4-mini-2025-04-16":{modelName:"o4-mini-2025-04-16",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:1.1,outputPricePerMillion:4.4}}}]},"o4-mini":{modelName:"o4-mini",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:1.1,outputPricePerMillion:4.4}}}]},"gpt-4.1":{modelName:"gpt-4.1",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:2,outputPricePerMillion:8}}}]},"gpt-4.1-mini":{modelName:"gpt-4.1-mini",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:.4,outputPricePerMillion:1.6}}}]},"gpt-4.1-nano":{modelName:"gpt-4.1-nano",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:.1,outputPricePerMillion:.4}}}]},"gpt-5":{modelName:"gpt-5",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:1.25,outputPricePerMillion:10}}}]},"gpt-5-mini":{modelName:"gpt-5-mini",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:.25,outputPricePerMillion:2}}}]},"gpt-5-nano":{modelName:"gpt-5-nano",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:.05,outputPricePerMillion:.4}}}]},"gpt-5-chat-latest":{modelName:"gpt-5-chat-latest",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:1.25,outputPricePerMillion:10}}}]},"gpt-5.1":{modelName:"gpt-5.1",currency:"USD",tokenRanges:[{minTokens:0,maxTokens:null,prices:{base:{inputPricePerMillion:1.5,outputPricePerMillion:12}}}]}},Pi="openai",it=class{constructor(){this.version="v1",this.name=Pi,this.chatModelFactories={[Ge]:{model:Vi,modelOptions:Hi,modelSchema:An},[Le]:{model:qi,modelOptions:Fi,modelSchema:Gn},[Ne]:{model:Bi,modelOptions:$i,modelSchema:Un},[Ue]:{model:Wi,modelOptions:Ki,modelSchema:Fn},[Ae]:{model:Qi,modelOptions:Zi,modelSchema:qn},[Fe]:{model:ts,modelOptions:es,modelSchema:Dn},[qe]:{model:as,modelOptions:os,modelSchema:$n},[De]:{model:rs,modelOptions:ss,modelSchema:Bn},[$e]:{model:ds,modelOptions:ms,modelSchema:zn},[ze]:{model:fs,modelOptions:hs,modelSchema:Vn},[Be]:{model:us,modelOptions:ps,modelSchema:Hn},[He]:{model:Ts,modelOptions:ys,modelSchema:Jn},[Ve]:{model:vs,modelOptions:Ps,modelSchema:Kn},[Je]:{model:Os,modelOptions:Ss,modelSchema:Wn},[Ke]:{model:Es,modelOptions:ws,modelSchema:Yn},[We]:{model:js,modelOptions:Is,modelSchema:Zn},[Ye]:{model:Gs,modelOptions:Ns,modelSchema:Qn},[Ze]:{model:Fs,modelOptions:As,modelSchema:Xn},[Xe]:{model:Hs,modelOptions:zs,modelSchema:to},[tt]:{model:Zs,modelOptions:Ys,modelSchema:oo},[nt]:{model:er,modelOptions:Xs,modelSchema:ao},[et]:{model:Ks,modelOptions:Js,modelSchema:no},[Qe]:{model:$s,modelOptions:Ds,modelSchema:eo},[at]:{model:sr,modelOptions:ir,modelSchema:so},[ot]:{model:or,modelOptions:nr,modelSchema:io},[go]:{model:yr,modelOptions:br,modelSchema:ho},[po]:{model:hr,modelOptions:gr,modelSchema:uo},[ro]:{model:mr,modelOptions:lr,modelSchema:lo},[mo]:{model:pr,modelOptions:cr,modelSchema:co},[fo]:{model:Pr,modelOptions:_r,modelSchema:bo},[yo]:{model:xr,modelOptions:kr,modelSchema:To}},this.embeddingModelFactories={[_o]:{model:Er,modelOptions:wr,modelSchema:Po},[vo]:{model:jr,modelOptions:Ir,modelSchema:ko},[xo]:{model:Gr,modelOptions:Nr,modelSchema:Mo}};}chatModelLiterals(){return Object.keys(this.chatModelFactories)}chatModelSchemas(){return Object.keys(this.chatModelFactories).reduce((e,n)=>(e[n]=this.chatModelFactories[n].modelSchema,e),{})}chatModel(e){let n=e.modelName;if(!(n in this.chatModelFactories))throw new Q({info:`OpenAI chat model: ${n} not found`,cause:new Error(`OpenAI chat model: ${n} not found, available chat models:
|
|
7
|
+
[${this.chatModelLiterals().join(", ")}]`)});let o=this.chatModelFactories[n].model,i=this.chatModelFactories[n].modelOptions.parse(e);return new o(i)}embeddingModelLiterals(){return Object.keys(this.embeddingModelFactories)}embeddingModelSchemas(){return Object.keys(this.embeddingModelFactories).reduce((e,n)=>(e[n]=this.embeddingModelFactories[n].modelSchema,e),{})}embeddingModel(e){let n=e.modelName;if(!(n in this.embeddingModelFactories))throw new Q({info:`OpenAI embedding model: ${n} not found`,cause:new Error(`OpenAI embedding model: ${n} not found, available embedding models:
|
|
8
|
+
[${this.embeddingModelLiterals().join(", ")}]`)});let o=this.embeddingModelFactories[n].model,i=this.embeddingModelFactories[n].modelOptions.parse(e);return new o(i)}};it.baseUrl="https://api.openai.com/v1";var h=zod.z.enum([K,W,j,Y]),f={system:K,user:W,assistant:j,tool:Y},M=[O,L,R,N],S=zod.z.enum([O,L,R,N]);zod.z.enum([O]);var U=[O,R,N],A=zod.z.enum([O,R,N]),oe=zod.z.object({token:zod.z.string(),logprob:zod.z.number(),bytes:zod.z.array(zod.z.number()).nullable()}),Nn=zod.z.object({content:zod.z.array(oe.extend({top_logprobs:zod.z.array(oe)})).nullable().optional(),refusal:zod.z.array(oe.extend({top_logprobs:zod.z.array(oe)})).nullable().optional()}).nullable(),vi=zod.z.array(zod.z.object({id:zod.z.string().min(1),type:zod.z.enum(["function"]),function:zod.z.object({name:zod.z.string(),arguments:zod.z.string()})})),ki=zod.z.object({id:zod.z.string(),object:zod.z.literal("chat.completion"),created:zod.z.number(),model:zod.z.string(),system_fingerprint:zod.z.string().nullable(),choices:zod.z.array(zod.z.object({index:zod.z.number(),message:zod.z.object({role:zod.z.string(),content:zod.z.string().nullable().optional(),tool_calls:vi.optional(),refusal:zod.z.string().nullable().optional()}),logprobs:Nn.optional(),finish_reason:zod.z.string()})),usage:zod.z.object({prompt_tokens:zod.z.number(),completion_tokens:zod.z.number(),total_tokens:zod.z.number()})}),xi=zod.z.array(zod.z.object({index:zod.z.number().int(),id:zod.z.string().min(1).optional(),type:zod.z.enum(["function"]).optional(),function:zod.z.object({name:zod.z.string().min(1).optional(),arguments:zod.z.string().optional()}).optional()})),Mi=zod.z.object({id:zod.z.string(),object:zod.z.string(),created:zod.z.number(),model:zod.z.string(),system_fingerprint:zod.z.string().nullable().optional(),choices:zod.z.array(zod.z.object({index:zod.z.number(),delta:zod.z.object({content:zod.z.string().nullable().optional(),tool_calls:xi.optional(),refusal:zod.z.string().nullable().optional()}).or(zod.z.object({})),logprobs:Nn.optional(),finish_reason:zod.z.string().nullable()})),usage:zod.z.object({prompt_tokens:zod.z.number(),completion_tokens:zod.z.number(),total_tokens:zod.z.number()}).nullable().optional()}),Si=zod.z.object({type:zod.z.literal("function"),function:zod.z.object({name:zod.z.string().min(1),description:zod.z.string().min(1).optional(),strict:zod.z.boolean().optional(),parameters:zod.z.any()})}),Oi=zod.z.enum(["none","auto","required"]),Ci=zod.z.object({type:zod.z.literal("function"),function:zod.z.object({name:zod.z.string().min(1)})}),wi=zod.z.object({type:zod.z.enum(["text","json_object"])}).or(zod.z.object({type:zod.z.literal("json_schema"),json_schema:zod.z.object({name:zod.z.string().min(1),description:zod.z.string().min(1).optional(),strict:zod.z.boolean().optional(),schema:zod.z.any()})})),st=zod.z.object({text:zod.z.string().min(1),type:zod.z.literal("text")}),Ei=zod.z.object({type:zod.z.literal("image_url"),image_url:zod.z.object({url:zod.z.string().url().min(1),detail:zod.z.enum(["low","high","auto"]).optional()})}),Ri=zod.z.object({id:zod.z.string().min(1),type:zod.z.literal("function"),function:zod.z.object({name:zod.z.string().min(1),arguments:zod.z.string().min(1)})}),Ii=zod.z.object({role:zod.z.literal("system"),content:zod.z.string().min(1).or(zod.z.array(st).min(1))}),ji=zod.z.object({role:zod.z.literal("user"),content:zod.z.string().min(1).or(zod.z.array(zod.z.union([st,Ei])).min(1))}),Li=zod.z.object({role:zod.z.literal("assistant"),content:zod.z.string().min(1).or(zod.z.array(st).min(1)).optional(),tool_calls:zod.z.array(Ri).min(1).optional()}),Ni=zod.z.object({role:zod.z.literal("tool"),tool_call_id:zod.z.string().min(1),content:zod.z.string().min(1)}),Gi=zod.z.union([Ii,ji,Li,Ni]),Ui=zod.z.object({model:zod.z.string().min(1).optional(),messages:zod.z.array(Gi).min(1),frequency_penalty:zod.z.number().min(-2).max(2).nullable().optional(),logprobs:zod.z.boolean().nullable().optional(),top_logprobs:zod.z.number().min(0).max(20).nullable().optional(),max_completion_tokens:zod.z.number().min(0).nullable().optional(),presence_penalty:zod.z.number().min(-2).max(2).nullable().optional(),response_format:wi.optional(),seed:zod.z.number().nullable().optional(),stop:zod.z.string().or(zod.z.array(zod.z.string()).max(4)).nullable().optional(),temperature:zod.z.number().min(0).max(2).nullable().optional(),top_p:zod.z.number().min(0).max(1).nullable().optional(),tools:zod.z.array(Si).optional(),tool_choice:Oi.or(Ci).optional(),reasoning_effort:zod.z.enum(["minimal","low","medium","high"]).optional(),verbosity:zod.z.enum(["low","medium","high"]).optional()}),y=zod.z.object({modelName:zod.z.string(),apiKey:zod.z.string(),baseUrl:zod.z.string().url().optional(),completeChatUrl:zod.z.string().url().optional(),streamChatUrl:zod.z.string().url().optional(),organization:zod.z.string().optional()}),b=class{constructor(e,n){this.version="v1";let o=y.parse(n);this.modelSchema=e,this.modelName=o.modelName,this.apiKey=o.apiKey,this.baseUrl=H(o.baseUrl||it.baseUrl),this.streamChatUrl=H(o.streamChatUrl||`${this.baseUrl}/chat/completions`),this.completeChatUrl=H(o.completeChatUrl||`${this.baseUrl}/chat/completions`),this.organization=o.organization;}getDefaultBaseUrl(){return this.baseUrl}getDefaultHeaders(){return C({Authorization:`Bearer ${this.apiKey}`,"Content-Type":"application/json"},this.organization?{"OpenAI-Organization":this.organization}:{})}getDefaultParams(){return {model:this.modelName}}getRetryDelay(e){let n=l=>{let d=/(\d+)(h|m|s|ms)/g,p={h:36e5,m:6e4,s:1e3,ms:1},u,k=0;for(;(u=d.exec(l))!==null;){let T=parseInt(u[1]),x=u[2];k+=T*p[x];}return k},o=0,i=0,s=!0;e["x-ratelimit-reset-requests"]&&(o=n(e["x-ratelimit-reset-requests"])),e["x-ratelimit-reset-tokens"]&&(i=n(e["x-ratelimit-reset-tokens"]));let m=Math.max(o,i);return {shouldRetry:s,delayMs:m}}getTokenCount(e){return e.reduce((n,o)=>n+o.content.map(i=>i.modality==="text"?i.value:"").join(" ").length,0)}transformModelRequest(e){let n=Ui.safeParse(e);if(!n.success)throw new X({info:"Invalid model request",cause:n.error});let o=n.data,i=o.model;if(o.tool_choice&&(!o.tools||o.tools.length===0))throw new X({info:`Invalid model request for model : '${this.modelName}'`,cause:new Error("'tools' are required when 'tool_choice' is specified")});let s={};o.response_format&&(s.responseFormat=o.response_format.type,o.response_format.type==="json_schema"&&(s.responseSchema={name:o.response_format.json_schema.name,description:o.response_format.json_schema.description||"",strict:o.response_format.json_schema.strict,schema:o.response_format.json_schema.schema})),o.tool_choice&&(typeof o.tool_choice=="string"?s.toolChoice=o.tool_choice:s.toolChoice=o.tool_choice.function.name),s.seed=o.seed,s.maxTokens=o.max_completion_tokens,s.temperature=o.temperature,s.topP=o.top_p,s.presencePenalty=o.presence_penalty,s.frequencyPenalty=o.frequency_penalty,s.stop=o.stop,s.logProbs=o.logprobs,s.topLogProbs=o.top_logprobs,s.reasoningEffort=o.reasoning_effort,s.verbosity=o.verbosity;let m=ve().parse(je(s)),l=[],d={};o.messages.forEach(u=>{let k=u.role;switch(k){case"system":{let T=u.content;if(typeof T=="string")l.push({role:k,content:[{modality:O,value:T}]});else {let x=T.map(_=>({modality:O,value:_.text}));l.push({role:k,content:x});}}break;case"user":{let T=u.content;if(typeof T=="string")l.push({role:k,content:[{modality:O,value:T}]});else {let x=T.map(_=>_.type==="text"?{modality:O,value:_.text}:_.image_url.url.startsWith("data:")?{modality:L,detail:_.image_url.detail||"auto",value:{type:fe,base64:_.image_url.url,mediaType:mn(_.image_url.url)}}:{modality:L,detail:_.image_url.detail||"auto",value:{type:be,url:_.image_url.url}});l.push({role:k,content:x});}}break;case"assistant":{let T=[];if(!u.content&&!u.tool_calls)throw new X({info:`Invalid model request for model : '${this.modelName}'`,cause:new Error("one of'content' or 'tool_calls' must be provided")});if(u.content){let x=u.content;typeof x=="string"?T.push({modality:O,value:x}):x.forEach(_=>{T.push({modality:O,value:_.text});});}u.tool_calls&&u.tool_calls.forEach((x,_)=>{let w={modality:R,id:x.id,index:_,name:x.function.name,arguments:x.function.arguments};T.push(w),d[w.id]=w;}),l.push({role:k,content:T});}break;case"tool":{let T=u;l.push({role:k,content:[{modality:N,id:T.tool_call_id,index:d[T.tool_call_id].index,name:d[T.tool_call_id].name,data:T.content}]});}break}});let p=[];return o.tools&&o.tools.forEach(u=>{p.push({type:"function",definition:{schema:{name:u.function.name,description:u.function.description||"",strict:u.function.strict,parameters:u.function.parameters}}});}),{modelName:i,config:m,messages:l,tools:p.length>0?p:void 0}}transformConfig(e,n,o){let i=e.toolChoice;delete e.toolChoice;let s=this.modelSchema.config.schema.safeParse(e);if(!s.success)throw new q({info:`Invalid config for model : '${this.modelName}'`,cause:s.error});let m=s.data;i!==void 0&&(m.toolChoice=i),Object.keys(m).forEach(d=>{if(!(d in this.modelSchema.config.def))throw new q({info:`Invalid config for model : '${this.modelName}'`,cause:new Error(`Invalid config key : '${d}',
|
|
9
|
+
available keys : [${Object.keys(this.modelSchema.config.def).join(", ")}]`)})});let l=Object.keys(m).reduce((d,p)=>{let u=this.modelSchema.config.def[p],k=u.param,T=m[p];return k==="max_completion_tokens"&&u.type==="range"&&T===0?d[k]=u.max:d[k]=T,d},{});if(l.top_logprobs&&!l.logprobs)throw new q({info:`Invalid config for model : '${this.modelName}'`,cause:new Error("'logprobs' must be 'true' when 'top_logprobs' is specified")});if("tool_choice"in l&&l.tool_choice!==void 0){let d=l.tool_choice;if(!o||o&&o.length===0)throw new q({info:`Invalid config for model : '${this.modelName}'`,cause:new Error("'tools' are required when 'toolChoice' is specified")});if(o&&o.length>0){let p=this.modelSchema.config.def.toolChoice;if(!p.choices.includes(d))if(o.map(u=>u.definition.schema.name).includes(d))l.tool_choice={type:"function",function:{name:d}};else throw new q({info:`Invalid config for model : '${this.modelName}'`,cause:new Error(`toolChoice : '${d}' is not part of provided 'tools' names or
|
|
10
|
+
one of [${p.choices.join(", ")}]`)})}}if("response_format"in l&&l.response_format!==void 0){let d=l.response_format;if(d==="json_schema")if("response_schema"in l)l.response_format={type:"json_schema",json_schema:l.response_schema},delete l.response_schema;else throw new q({info:`Invalid config for model : '${this.modelName}'`,cause:new Error("'responseSchema' is required in config when 'responseFormat' is 'json_schema'")});else l.response_format={type:d};}return l}transformMessages(e){if(!e||e&&e.length===0)return {messages:[]};let n=e.map(o=>{let i=_e().safeParse(o);if(!i.success)throw new I({info:"Invalid messages",cause:i.error});return i.data});return n.forEach(o=>{o.content.forEach(i=>{if(!this.modelSchema.modalities.includes(i.modality))throw new I({info:`Invalid message content for model : '${this.modelName}'`,cause:new Error(`model : '${this.modelName}' does not support modality : '${i.modality}',
|
|
11
|
+
available modalities : [${this.modelSchema.modalities.join(", ")}]`)})});}),n.forEach(o=>{if(!Object.keys(this.modelSchema.roles).includes(o.role))throw new I({info:`Invalid message content for model : '${this.modelName}'`,cause:new Error(`model : '${this.modelName}' does not support role : '${o.role}',
|
|
12
|
+
available roles : [${Object.keys(this.modelSchema.roles).join(", ")}]`)})}),{messages:n.map(o=>{switch(o.role){case K:{let i=[];return o.content.forEach(s=>{if(s.modality===O)i.push({type:"text",text:s.value});else throw new I({info:`Invalid message 'role' and 'modality' combination for model : ${this.modelName}`,cause:new Error(`role : '${o.role}' cannot have content with modality : '${s.modality}'`)})}),{role:this.modelSchema.roles[o.role],content:i}}case j:{let i=[],s=[];return o.content.forEach(m=>{if(m.modality===O)i.push({type:"text",text:m.value});else if(m.modality===R)s.push({id:m.id,type:"function",function:{name:m.name,arguments:m.arguments}});else throw new I({info:`Invalid message 'role' and 'modality' combination for model : ${this.modelName}`,cause:new Error(`role : '${o.role}' cannot have content with modality : '${m.modality}'`)})}),C({role:this.modelSchema.roles[o.role],content:i},s.length>0?{tool_calls:s}:{})}case W:{let i=[],s=[];o.content.forEach(l=>{if(l.modality===O)i.push({type:"text",text:l.value});else if(l.modality===L)s.push({type:"image_url",image_url:{url:l.value.type==="url"?l.value.url:l.value.base64,detail:l.detail}});else throw new I({info:`Invalid message 'role' and 'modality' combination for model : ${this.modelName}`,cause:new Error(`role : '${o.role}' cannot have content with modality : '${l.modality}'`)})});let m=[...i,...s];return {role:this.modelSchema.roles[o.role],content:m}}case Y:{if(o.content.length!==1)throw new I({info:`Invalid message for role : '${o.role}'`,cause:new Error(`role : '${o.role}' must have exactly one content item`)});if(o.content[0].modality!==N)throw new I({info:`Invalid message 'role' and 'modality' combination for model : ${this.modelName}`,cause:new Error(`role : '${o.role}' must have content with modality : '${N}'`)});let i=o.content[0];return {role:this.modelSchema.roles[o.role],tool_call_id:i.id,content:i.data}}default:throw new I({info:`Invalid message 'role' for model : ${this.modelName}`,cause:new Error(`role : '${o.role}' is not supported,
|
|
13
|
+
available roles : [${Object.keys(this.modelSchema.roles).join(", ")}]`)})}})}}transformTools(e){if(!this.modelSchema.modalities.includes(R))throw new Oe({info:`Invalid tool 'modality' for model : ${this.modelName}`,cause:new Error(`model : '${this.modelName}' does not support tool modality : '${R}'`)});return !e||e&&e.length===0?{tools:[]}:{tools:e.map(n=>{let o=Rt().safeParse(n);if(!o.success)throw new Oe({info:"Invalid tools",cause:o.error});return o.data}).map(n=>({type:"function",function:n.definition.schema}))}}getCompleteChatUrl(e,n,o){return E(this,null,function*(){return new Promise(i=>{i(this.completeChatUrl);})})}getCompleteChatHeaders(e,n,o){return E(this,null,function*(){return new Promise(i=>{i(this.getDefaultHeaders());})})}getCompleteChatData(e,n,o){return E(this,null,function*(){let i=this.transformConfig(e,n,o),s=this.transformMessages(n);if(s.messages&&s.messages.length===0)throw new I({info:"Messages are required",cause:new Error("Messages are required")});let m=o?this.transformTools(o):{};return new Promise(l=>{l(C(C(C(C({},this.getDefaultParams()),i),s),m));})})}transformCompleteChatResponse(e){let n=ki.safeParse(e);if(n.success){if(n.data.choices.length===0)throw new F({info:"Invalid response from model",cause:new Error(`No choices in response : ${JSON.stringify(n.data)}`)});let o=n.data,i=[{role:j,content:[]}],s=o.choices[0].message;s.content&&i[0].content.push(Me(s.content)),s.refusal&&i[0].content.push(Me(s.refusal)),s.tool_calls&&s.tool_calls.forEach((p,u)=>{i[0].content.push(It(u,p.id,p.function.name,p.function.arguments));});let m={promptTokens:o.usage.prompt_tokens,completionTokens:o.usage.completion_tokens,totalTokens:o.usage.total_tokens},l=[],d=o.choices[0].logprobs;return d&&(d.content&&l.push(...d.content.map(p=>({token:p.token,logProb:p.logprob,bytes:p.bytes,topLogProbs:p.top_logprobs.map(u=>({token:u.token,logProb:u.logprob,bytes:u.bytes}))}))),d.refusal&&l.push(...d.refusal.map(p=>({token:p.token,logProb:p.logprob,bytes:p.bytes,topLogProbs:p.top_logprobs.map(u=>({token:u.token,logProb:u.logprob,bytes:u.bytes}))})))),{messages:i,usage:m,logProbs:l}}throw new F({info:"Invalid response from model",cause:n.error})}getStreamChatUrl(e,n,o){return E(this,null,function*(){return new Promise(i=>{i(this.streamChatUrl);})})}getStreamChatHeaders(e,n,o){return E(this,null,function*(){return new Promise(i=>{i(this.getDefaultHeaders());})})}getStreamChatData(e,n,o){return E(this,null,function*(){let i=this.transformConfig(e,n,o),s=this.transformMessages(n);if(s.messages&&s.messages.length===0)throw new I({info:"Messages are required",cause:new Error("Messages are required")});let m=o?this.transformTools(o):{};return new Promise(l=>{l(C(C(C(C({stream:!0,stream_options:{include_usage:!0}},this.getDefaultParams()),i),s),m));})})}transformStreamChatResponseChunk(e,n){return pn(this,null,function*(){var o,i;let s=n+e,m=[],l="",d=0;for(;d<s.length;){let p=s.indexOf(`
|
|
14
|
+
`,d);if(p===-1){l=s.substring(d);break}else {let u=s.substring(d,p).trim();u&&m.push(u),d=p+1;}}for(let p of m){if(p==="data: [DONE]")return;if(p.startsWith("data: ")){let u=p.substring(6);try{let k=JSON.parse(u),T=Mi.safeParse(k);if(T.success){let x={partialMessages:[]},_=T.data;if(_.choices.length>0){let w=_.choices[0].delta;if(w!==void 0&&Object.keys(w).length!==0){if("content"in w&&w.content!==null)x.partialMessages.push(Se(j,w.content));else if("refusal"in w&&w.refusal!==null)x.partialMessages.push(Se(j,w.refusal));else if("tool_calls"in w&&w.tool_calls!==void 0){let te=w.tool_calls.at(0);x.partialMessages.push(jt(j,te.index,te.id,(o=te.function)==null?void 0:o.name,(i=te.function)==null?void 0:i.arguments));}}}_.usage&&(x.usage={promptTokens:_.usage.prompt_tokens,completionTokens:_.usage.completion_tokens,totalTokens:_.usage.total_tokens}),yield {partialResponse:x,buffer:l};}else throw new F({info:"Invalid response from model",cause:T.error})}catch(k){throw new F({info:`Malformed JSON received in stream: ${u}`,cause:k})}}}yield {partialResponse:{partialMessages:[]},buffer:l};})}transformProxyStreamChatResponseChunk(e,n,o,i,s){return pn(this,null,function*(){yield*pt(pi(this.transformStreamChatResponseChunk(e,n)));})}getProxyStreamChatUrl(e,n,o){return E(this,null,function*(){return new Promise(i=>{i(this.streamChatUrl);})})}getProxyCompleteChatUrl(e,n,o){return E(this,null,function*(){return new Promise(i=>{i(this.completeChatUrl);})})}getProxyCompleteChatHeaders(e,n,o){return E(this,null,function*(){if(!n)return {};let i=C({},n);return delete i.host,delete i["content-length"],i})}getProxyStreamChatHeaders(e,n,o){return E(this,null,function*(){return yield this.getProxyCompleteChatHeaders(e,n,o)})}getModelPricing(){if(!(this.modelName in v))throw new F({info:`Invalid model pricing for model : '${this.modelName}'`,cause:new Error(`No pricing configuration found for model "${this.modelName}"`)});return v[this.modelName]}},Le="gpt-3.5-turbo-0125",Ai="The latest GPT-3.5 Turbo model with higher accuracy at responding in requested formats and a fix for a bug which caused a text encoding issue for non-English language function calls. Training data up to Sept 2021.",Gn=g(h,A).parse({name:Le,description:Ai,maxInputTokens:4092,maxOutputTokens:4092,roles:f,modalities:U,config:{def:c.responseFormat(4092,4).def,schema:c.responseFormat(4092,4).schema},price:v[Le]}),Fi=y,qi=class extends b{constructor(e){super(Gn,e);}},Ne="gpt-3.5-turbo-1106",Di="The latest GPT-3.5 Turbo model with improved instruction following, JSON mode, reproducible outputs, parallel function calling, and more. Returns a maximum of 4,096 output tokens. Training data up to Sept 2021.",Un=g(h,A).parse({name:Ne,description:Di,maxInputTokens:4092,maxOutputTokens:16385,roles:f,modalities:U,config:{def:c.responseFormat(16385,4).def,schema:c.responseFormat(16385,4).schema},price:v[Ne]}),$i=y,Bi=class extends b{constructor(e){super(Un,e);}},Ge="gpt-3.5-turbo",zi="Currently points to gpt-3.5-turbo-0125. Training data up to Sept 2021.",An=g(h,A).parse({name:Ge,description:zi,maxInputTokens:4092,maxOutputTokens:4092,roles:f,modalities:U,config:{def:c.responseFormat(4092,4).def,schema:c.responseFormat(4092,4).schema},price:v[Ge]}),Hi=y,Vi=class extends b{constructor(e){super(An,e);}},Ue="gpt-4-0125-preview",Ji="The latest GPT-4 model intended to reduce cases of \u201Claziness\u201D where the model doesn\u2019t complete a task. Training data up to Apr 2023.",Fn=g(h,A).parse({name:Ue,description:Ji,maxInputTokens:128e3,maxOutputTokens:4092,roles:f,modalities:U,config:{def:c.base(4092,4).def,schema:c.base(4092,4).schema},price:v[Ue]}),Ki=y,Wi=class extends b{constructor(e){super(Fn,e);}},Ae="gpt-4-0613",Yi="Snapshot of gpt-4 from June 13th 2023 with improved function calling support. Training data up to Sept 2021.",qn=g(h,A).parse({name:Ae,description:Yi,maxInputTokens:8192,maxOutputTokens:4092,roles:f,modalities:U,config:{def:c.base(4092,4).def,schema:c.base(4092,4).schema},price:v[Ae]}),Zi=y,Qi=class extends b{constructor(e){super(qn,e);}},Fe="gpt-4-1106-preview",Xi="GPT-4 Turbo model featuring improved instruction following, JSON mode, reproducible outputs, parallel function calling, and more. Returns a maximum of 4,096 output tokens. This preview model is not yet suited for production traffic. Training data up to Apr 2023.",Dn=g(h,A).parse({name:Fe,description:Xi,maxInputTokens:128e3,maxOutputTokens:4092,roles:f,modalities:U,config:{def:c.base(4092,4).def,schema:c.base(4092,4).schema},price:v[Fe]}),es=y,ts=class extends b{constructor(e){super(Dn,e);}},qe="gpt-4.1",ns="Flagship model for complex tasks. It is well suited for problem solving across domains. Training data up to May 2024.",$n=g(h,S).parse({name:qe,description:ns,maxInputTokens:1047576,maxOutputTokens:32768,roles:f,modalities:M,config:{def:c.responseSchema(32768,4).def,schema:c.responseSchema(32768,4).schema},price:v[qe]}),os=y,as=class extends b{constructor(e){super($n,e);}},De="gpt-4.1-mini",is="Provides a balance between intelligence, speed, and cost that makes it an attractive model for many use cases. Training data up to May 2024.",Bn=g(h,S).parse({name:De,description:is,maxInputTokens:1047576,maxOutputTokens:32768,roles:f,modalities:M,config:{def:c.responseSchema(32768,4).def,schema:c.responseSchema(32768,4).schema},price:v[De]}),ss=y,rs=class extends b{constructor(e){super(Bn,e);}},$e="gpt-4.1-nano",ls="Fastest, most cost-effective GPT-4.1 model. Training data up to May 2024.",zn=g(h,S).parse({name:$e,description:ls,maxInputTokens:1047576,maxOutputTokens:32768,roles:f,modalities:M,config:{def:c.responseSchema(32768,4).def,schema:c.responseSchema(32768,4).schema},price:v[$e]}),ms=y,ds=class extends b{constructor(e){super(zn,e);}},Be="gpt-5.1",cs="Flagship GPT-5.1 model for coding and agentic tasks with configurable reasoning effort. Training data up to September 2024.",Hn=g(h,S).parse({name:Be,description:cs,maxInputTokens:4e5,maxOutputTokens:128e3,roles:f,modalities:M,config:{def:c.gpt5(128e3,4).def,schema:c.gpt5(128e3,4).schema},price:v[Be]}),ps=y,us=class extends b{constructor(e){super(Hn,e);}},ze="gpt-5",gs="Most advanced GPT-5 model for complex reasoning and problem-solving tasks. Training data up to October 2024.",Vn=g(h,S).parse({name:ze,description:gs,maxInputTokens:4e5,maxOutputTokens:131072,roles:f,modalities:M,config:{def:c.gpt5(131072,4).def,schema:c.gpt5(131072,4).schema},price:v[ze]}),hs=y,fs=class extends b{constructor(e){super(Vn,e);}},He="gpt-5-mini",bs="Faster, more cost-effective GPT-5 model that balances intelligence and efficiency. Training data up to October 2024.",Jn=g(h,S).parse({name:He,description:bs,maxInputTokens:4e5,maxOutputTokens:131072,roles:f,modalities:M,config:{def:c.gpt5(131072,4).def,schema:c.gpt5(131072,4).schema},price:v[He]}),ys=y,Ts=class extends b{constructor(e){super(Jn,e);}},Ve="gpt-5-nano",_s="Most cost-effective GPT-5 model optimized for speed and efficiency. Training data up to October 2024.",Kn=g(h,S).parse({name:Ve,description:_s,maxInputTokens:4e5,maxOutputTokens:131072,roles:f,modalities:M,config:{def:c.gpt5(131072,4).def,schema:c.gpt5(131072,4).schema},price:v[Ve]}),Ps=y,vs=class extends b{constructor(e){super(Kn,e);}},Je="gpt-5-chat-latest",ks="Latest GPT-5 model optimized for conversational use. Does not support function calling or structured outputs. Training data up to October 2024.",xs=[O,L],Ms=zod.z.enum([O,L]),Wn=g(h,Ms).parse({name:Je,description:ks,maxInputTokens:4e5,maxOutputTokens:131072,roles:f,modalities:xs,config:{def:c.gpt5(131072,4).def,schema:c.gpt5(131072,4).schema},price:v[Je]}),Ss=y,Os=class extends b{constructor(e){super(Wn,e);}},Ke="gpt-4-turbo-2024-04-09",Cs="GPT-4 Turbo with Vision model. Vision requests can now use JSON mode and function calling. gpt-4-turbo currently points to this version. Training data up to Dec 2023.",Yn=g(h,S).parse({name:Ke,description:Cs,maxInputTokens:128e3,maxOutputTokens:4096,roles:f,modalities:M,config:{def:c.responseFormat(4096,4).def,schema:c.responseFormat(4096,4).schema},price:v[Ke]}),ws=y,Es=class extends b{constructor(e){super(Yn,e);}},We="gpt-4-turbo-preview",Rs="Currently points to gpt-4-0125-preview. Training data up to Apr 2023.",Zn=g(h,A).parse({name:We,description:Rs,maxInputTokens:128e3,maxOutputTokens:4092,roles:f,modalities:U,config:{def:c.responseFormat(4092,4).def,schema:c.responseFormat(4092,4).schema},price:v[We]}),Is=y,js=class extends b{constructor(e){super(Zn,e);}},Ye="gpt-4-turbo",Ls="The latest GPT-4 Turbo model with vision capabilities. Vision requests can now use JSON mode and function calling. Currently points to gpt-4-turbo-2024-04-09. Training data up to Dec 2023.",Qn=g(h,S).parse({name:Ye,description:Ls,maxInputTokens:128e3,maxOutputTokens:4092,roles:f,modalities:M,config:{def:c.responseFormat(4092,4).def,schema:c.responseFormat(4092,4).schema},price:v[Ye]}),Ns=y,Gs=class extends b{constructor(e){super(Qn,e);}},Ze="gpt-4",Us="Currently points to gpt-4-0613. Training data up to Sept 2021.",Xn=g(h,A).parse({name:Ze,description:Us,maxInputTokens:8192,maxOutputTokens:4092,roles:f,modalities:U,config:{def:c.base(4092,4).def,schema:c.base(4092,4).schema},price:v[Ze]}),As=y,Fs=class extends b{constructor(e){super(Xn,e);}},Qe="gpt-4o-2024-05-13",qs="Latest snapshot of gpt-4o that supports Structured Outputs. Training data up to Oct 2023.",eo=g(h,S).parse({name:Qe,description:qs,maxInputTokens:128e3,maxOutputTokens:4092,roles:f,modalities:M,config:{def:c.responseSchema(4092,4).def,schema:c.responseSchema(4092,4).schema},price:v[Qe]}),Ds=y,$s=class extends b{constructor(e){super(eo,e);}},Xe="gpt-4o-2024-08-06",Bs="Latest snapshot of gpt-4o that supports Structured Outputs. Training data up to Oct 2023.",to=g(h,S).parse({name:Xe,description:Bs,maxInputTokens:128e3,maxOutputTokens:4092,roles:f,modalities:M,config:{def:c.responseSchema(4092,4).def,schema:c.responseSchema(4092,4).schema},price:v[Xe]}),zs=y,Hs=class extends b{constructor(e){super(to,e);}},et="gpt-4o-mini-2024-07-18",Vs="Most advanced, multimodal flagship model that is cheaper and faster than GPT-4 Turbo. Currently points to gpt-4o-2024-05-13. Training data up to Oct 2023.",no=g(h,S).parse({name:et,description:Vs,maxInputTokens:128e3,maxOutputTokens:4092,roles:f,modalities:M,config:{def:c.responseSchema(4092,4).def,schema:c.responseSchema(4092,4).schema},price:v[et]}),Js=y,Ks=class extends b{constructor(e){super(no,e);}},tt="gpt-4o-mini",Ws="Most advanced, multimodal flagship model that is cheaper and faster than GPT-4 Turbo. Currently points to gpt-4o-2024-05-13. Training data up to Oct 2023.",oo=g(h,S).parse({name:tt,description:Ws,maxInputTokens:128e3,maxOutputTokens:4092,roles:f,modalities:M,config:{def:c.responseSchema(4092,4).def,schema:c.responseSchema(4092,4).schema},price:v[tt]}),Ys=y,Zs=class extends b{constructor(e){super(oo,e);}},nt="gpt-4o",Qs="Most advanced, multimodal flagship model that is cheaper and faster than GPT-4 Turbo. Currently points to gpt-4o-2024-05-13. Training data up to Oct 2023.",ao=g(h,S).parse({name:nt,description:Qs,maxInputTokens:128e3,maxOutputTokens:4092,roles:f,modalities:M,config:{def:c.responseSchema(4092,4).def,schema:c.responseSchema(4092,4).schema},price:v[nt]}),Xs=y,er=class extends b{constructor(e){super(ao,e);}},ot="o1-2024-12-17",tr="A stable release model for production use, offering robust performance and advanced features. Training data up to December 2024.",io=g(h,S).parse({name:ot,description:tr,maxInputTokens:2e5,maxOutputTokens:1e5,roles:f,modalities:M,config:{def:c.oSeries(1e5,4).def,schema:c.oSeries(1e5,4).schema},price:v[ot]}),nr=y,or=class extends b{constructor(e){super(io,e);}},at="o1",ar="Highly capable general-purpose reasoning model with advanced capabilities in language, coding, and reasoning. Training data up to Oct 2023.",so=g(h,S).parse({name:at,description:ar,maxInputTokens:2e5,maxOutputTokens:1e5,roles:f,modalities:M,config:{def:c.oSeries(1e5,4).def,schema:c.oSeries(1e5,4).schema},price:v[at]}),ir=y,sr=class extends b{constructor(e){super(so,e);}},ro="o3-2025-04-16",rr="A new standard for math, science, coding, and visual reasoning tasks. Training data up to Jun 2024.",lo=g(h,S).parse({name:ro,description:rr,maxInputTokens:2e5,maxOutputTokens:1e5,roles:f,modalities:M,config:{def:c.oSeries(1e5,4).def,schema:c.oSeries(1e5,4).schema}}),lr=y,mr=class extends b{constructor(e){super(lo,e);}},mo="o3",dr="A new standard for math, science, coding, and visual reasoning tasks. Training data up to Jun 2024.",co=g(h,S).parse({name:mo,description:dr,maxInputTokens:2e5,maxOutputTokens:1e5,roles:f,modalities:M,config:{def:c.oSeries(1e5,4).def,schema:c.oSeries(1e5,4).schema}}),cr=y,pr=class extends b{constructor(e){super(co,e);}},po="o3-mini",ur="o3-mini is the newest small reasoning model, providing high intelligence at the same cost and latency targets of o1-mini. Training data up to Sep 2023.",uo=g(h,A).parse({name:po,description:ur,maxInputTokens:2e5,maxOutputTokens:1e5,roles:f,modalities:U,config:{def:c.oSeries(1e5,4).def,schema:c.oSeries(1e5,4).schema}}),gr=y,hr=class extends b{constructor(e){super(uo,e);}},go="o3-mini-2025-01-31",fr="o3-mini is the newest small reasoning model, providing high intelligence at the same cost and latency targets of o1-mini. Training data up to Sep 2023.",ho=g(h,A).parse({name:go,description:fr,maxInputTokens:2e5,maxOutputTokens:1e5,roles:f,modalities:U,config:{def:c.oSeries(1e5,4).def,schema:c.oSeries(1e5,4).schema}}),br=y,yr=class extends b{constructor(e){super(ho,e);}},fo="o4-mini-2025-04-16",Tr="Optimized for fast, effective reasoning with exceptionally efficient performance in coding and visual tasks. Training data up to Jun 2024.",bo=g(h,S).parse({name:fo,description:Tr,maxInputTokens:2e5,maxOutputTokens:1e5,roles:f,modalities:M,config:{def:c.oSeries(1e5,4).def,schema:c.oSeries(1e5,4).schema}}),_r=y,Pr=class extends b{constructor(e){super(bo,e);}},yo="o4-mini",vr="Optimized for fast, effective reasoning with exceptionally efficient performance in coding and visual tasks. Training data up to Jun 2024.",To=g(h,S).parse({name:yo,description:vr,maxInputTokens:2e5,maxOutputTokens:1e5,roles:f,modalities:M,config:{def:c.oSeries(1e5,4).def,schema:c.oSeries(1e5,4).schema}}),kr=y,xr=class extends b{constructor(e){super(To,e);}},rt=[D,z],lt=zod.z.enum([D,z]),Mr=zod.z.object({object:zod.z.literal("list"),model:zod.z.string(),data:zod.z.array(zod.z.object({index:zod.z.number(),object:zod.z.literal("embedding"),embedding:zod.z.array(zod.z.number()).or(zod.z.string().base64())})),usage:zod.z.object({prompt_tokens:zod.z.number().nonnegative(),total_tokens:zod.z.number().nonnegative()})}),Sr=zod.z.string().min(1).or(zod.z.array(zod.z.string().min(1)).min(1)).or(zod.z.array(zod.z.number().int().nonnegative()).min(1)).or(zod.z.array(zod.z.array(zod.z.number().int().nonnegative()).min(1)).min(1)),Or=zod.z.object({model:zod.z.string().min(1).optional(),input:Sr,encoding_format:zod.z.enum(["float","base64"]).optional(),dimensions:zod.z.number().int().min(1).optional()}),me=zod.z.object({modelName:zod.z.string(),apiKey:zod.z.string(),baseUrl:zod.z.string().url().optional(),getEmbeddingsUrl:zod.z.string().url().optional()}),mt=class{constructor(e,n){this.version="v1";let o=me.parse(n);this.modelSchema=e,this.modelName=o.modelName,this.apiKey=o.apiKey,this.baseUrl=H(o.baseUrl||it.baseUrl),this.getEmbeddingsUrl=H(o.getEmbeddingsUrl||`${this.baseUrl}/embeddings`);}getDefaultBaseUrl(){return this.baseUrl}getDefaultHeaders(){return {Authorization:`Bearer ${this.apiKey}`,"Content-Type":"application/json"}}getDefaultParams(){return {model:this.modelSchema.name}}getRetryDelay(e){let n=l=>{let d=/(\d+)(h|m|s|ms)/g,p={h:36e5,m:6e4,s:1e3,ms:1},u,k=0;for(;(u=d.exec(l))!==null;){let T=parseInt(u[1]),x=u[2];k+=T*p[x];}return k},o=0,i=0,s=!0;e["x-ratelimit-reset-requests"]&&(o=n(e["x-ratelimit-reset-requests"])),e["x-ratelimit-reset-tokens"]&&(i=n(e["x-ratelimit-reset-tokens"]));let m=Math.max(o,i);return {shouldRetry:s,delayMs:m}}getTokenCount(e){return e.requests.reduce((n,o)=>n+o.length,0)}transformModelRequest(e){let n=Or.safeParse(e);if(!n.success)throw new X({info:"Invalid model request",cause:n.error});let o=n.data,i=o.model,s={encodingFormat:o.encoding_format,dimensions:o.dimensions},m=ve().parse(je(s)),l,d;return typeof o.input=="string"?d=D:typeof o.input[0]=="string"?d=D:d=z,d===D?typeof o.input=="string"?l={modality:d,requests:[o.input]}:l={modality:d,requests:o.input}:typeof o.input[0]=="number"?l={modality:d,requests:[o.input]}:l={modality:d,requests:o.input},{modelName:i,config:m,embeddingRequests:l}}transformConfig(e,n){let o=this.modelSchema.config.schema.safeParse(e);if(!o.success)throw new q({info:`Invalid config for model : '${this.modelSchema.name}'`,cause:o.error});let i=o.data;return Object.keys(i).forEach(s=>{if(!this.modelSchema.config.def[s])throw new q({info:`Invalid config for model : '${this.modelSchema.name}'`,cause:new Error(`Invalid config key : '${s}',
|
|
15
|
+
available keys : [${Object.keys(this.modelSchema.config.def).join(", ")}]`)})}),Object.keys(i).reduce((s,m)=>{let l=this.modelSchema.config.def[m].param,d=i[m];return s[l]=d,s},{})}transformEmbeddingRequests(e){let n=wt().safeParse(e);if(!n.success)throw new Yt({info:"Invalid embedding requests",cause:n.error});return {input:n.data.requests}}getGetEmbeddingsUrl(e,n){return E(this,null,function*(){return new Promise(o=>{o(this.getEmbeddingsUrl);})})}getGetEmbeddingsHeaders(e,n){return E(this,null,function*(){return new Promise(o=>{o(this.getDefaultHeaders());})})}getGetEmbeddingsData(e,n){return E(this,null,function*(){return new Promise(o=>{o(C(C(C({},this.getDefaultParams()),this.transformConfig(e,n)),this.transformEmbeddingRequests(n)));})})}transformGetEmbeddingsResponse(e){let n,o=Mr.safeParse(e);if(o.success){let i=o.data;n=typeof i.data[0].embedding=="string"?xe:ke;let s=i.data.map(m=>typeof m.embedding=="string"?{index:m.index,embedding:m.embedding}:{index:m.index,embedding:m.embedding});return {encodingFormat:n,embeddings:s,usage:{totalTokens:i.usage.total_tokens}}}throw new F({info:"Invalid response from model",cause:o.error})}},_o="text-embedding-ada-002",Cr="Most capable 2nd generation embedding model, replacing 16 first generation models",Po=ne(lt).parse({name:_o,description:Cr,modalities:rt,maxInputTokens:8192,maxOutputTokens:1536,config:{def:V.base().def,schema:V.base().schema}}),wr=me,Er=class extends mt{constructor(e){super(Po,e);}},vo="text-embedding-3-small",Rr="Increased performance over 2nd generation ada embedding model",ko=ne(lt).parse({name:vo,description:Rr,modalities:rt,maxInputTokens:8192,maxOutputTokens:1536,config:{def:V.dimensions(1536).def,schema:V.dimensions(1536).schema}}),Ir=me,jr=class extends mt{constructor(e){super(ko,e);}},xo="text-embedding-3-large",Lr="Most capable embedding model for both english and non-english tasks",Mo=ne(lt).parse({name:xo,description:Lr,modalities:rt,maxInputTokens:8192,maxOutputTokens:3072,config:{def:V.dimensions(3072).def,schema:V.dimensions(3072).schema}}),Nr=me,Gr=class extends mt{constructor(e){super(Mo,e);}};var dt=g(h,S).parse({name:"__base__",description:"Base chat model for Custom",maxInputTokens:128e3,maxOutputTokens:128e3,roles:f,modalities:M,config:{def:c.base(128e3,4).def,schema:c.base(128e3,4).schema}});var de=zod.z.object({apiKey:zod.z.string().min(1),modelName:zod.z.string(),baseUrl:zod.z.string().optional(),completeChatUrl:zod.z.string().optional(),streamChatUrl:zod.z.string().optional()});var ce=class extends b{constructor(o,i){let s=de.parse(i);super(o,{modelName:s.modelName,apiKey:s.apiKey,baseUrl:s.baseUrl,completeChatUrl:s.completeChatUrl,streamChatUrl:s.streamChatUrl});this.version="v1";}getModelPricing(){throw new F({info:`Invalid model pricing for model : '${this.modelName}'`,cause:new Error("No pricing configuration for custom provider")})}};var Ur="custom",ct=class{constructor(){this.version="v1";this.name=Ur;this.embeddingModelFactories={};}chatModelLiterals(){return ["__base__"]}chatModelSchemas(){return {__base__:dt}}chatModel(n){let o=ce,i=de.parse(n);return new o(dt,i)}embeddingModelLiterals(){return Object.keys(this.embeddingModelFactories)}embeddingModelSchemas(){return Object.keys(this.embeddingModelFactories).reduce((n,o)=>(n[o]=this.embeddingModelFactories[o].modelSchema,n),{})}embeddingModel(n){throw new Q({info:"Custom embedding models not supported yet",cause:new Error("Custom embedding models not supported yet")})}};ct.customUrl=n=>`${n}`;
|
|
16
16
|
|
|
17
17
|
exports.BaseChatModelOpenAI = ce;
|
|
18
18
|
exports.BaseChatModelOptions = de;
|
|
19
|
-
exports.BaseChatModelSchema =
|
|
20
|
-
exports.Custom =
|
|
19
|
+
exports.BaseChatModelSchema = dt;
|
|
20
|
+
exports.Custom = ct;
|
|
21
21
|
//# sourceMappingURL=index.js.map
|
|
22
22
|
//# sourceMappingURL=index.js.map
|