@adaline/openai 0.18.0 → 0.19.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.d.mts CHANGED
@@ -907,15 +907,7 @@ declare const toolChoice: {
907
907
  schema: zod.ZodOptional<zod.ZodDefault<zod.ZodNullable<zod.ZodEnum<[string, ...string[]]>>>>;
908
908
  };
909
909
 
910
- declare const ChatModelResponseFormatConfigDef: (maxOutputTokens: number, maxSequences: number) => {
911
- responseFormat: {
912
- type: "select-string";
913
- param: string;
914
- title: string;
915
- description: string;
916
- default: string | null;
917
- choices: string[];
918
- };
910
+ declare const ChatModelOSeriesConfigDef: (maxOutputTokens: number, maxSequences: number) => {
919
911
  temperature: {
920
912
  type: "range";
921
913
  param: string;
@@ -936,6 +928,21 @@ declare const ChatModelResponseFormatConfigDef: (maxOutputTokens: number, maxSeq
936
928
  min: number;
937
929
  step: number;
938
930
  };
931
+ responseFormat: {
932
+ type: "select-string";
933
+ param: string;
934
+ title: string;
935
+ description: string;
936
+ default: string | null;
937
+ choices: string[];
938
+ };
939
+ responseSchema: {
940
+ type: "object-schema";
941
+ param: string;
942
+ title: string;
943
+ description: string;
944
+ objectSchema?: any;
945
+ };
939
946
  stop: {
940
947
  type: "multi-string";
941
948
  param: string;
@@ -1009,7 +1016,7 @@ declare const ChatModelResponseFormatConfigDef: (maxOutputTokens: number, maxSeq
1009
1016
  choices: string[];
1010
1017
  };
1011
1018
  };
1012
- declare const ChatModelResponseFormatConfigSchema: (maxOutputTokens: number, maxSequences: number) => zod.ZodObject<zod.objectUtil.extendShape<{
1019
+ declare const ChatModelOSeriesConfigSchema: (maxOutputTokens: number, maxSequences: number) => zod.ZodObject<zod.objectUtil.extendShape<zod.objectUtil.extendShape<{
1013
1020
  temperature: zod.ZodOptional<zod.ZodDefault<zod.ZodNumber>>;
1014
1021
  maxTokens: zod.ZodOptional<zod.ZodDefault<zod.ZodNumber>>;
1015
1022
  stop: zod.ZodOptional<zod.ZodDefault<zod.ZodArray<zod.ZodString, "many">>>;
@@ -1022,6 +1029,14 @@ declare const ChatModelResponseFormatConfigSchema: (maxOutputTokens: number, max
1022
1029
  toolChoice: zod.ZodOptional<zod.ZodDefault<zod.ZodNullable<zod.ZodEnum<[string, ...string[]]>>>>;
1023
1030
  }, {
1024
1031
  responseFormat: zod.ZodOptional<zod.ZodDefault<zod.ZodNullable<zod.ZodEnum<[string, ...string[]]>>>>;
1032
+ responseSchema: zod.ZodOptional<zod.ZodObject<zod.ZodRawShape, zod.UnknownKeysParam, zod.ZodTypeAny, {
1033
+ [x: string]: any;
1034
+ }, {
1035
+ [x: string]: any;
1036
+ }>>;
1037
+ }>, {
1038
+ temperature: zod.ZodOptional<zod.ZodDefault<zod.ZodNumber>>;
1039
+ maxTokens: zod.ZodOptional<zod.ZodDefault<zod.ZodNumber>>;
1025
1040
  }>, "strip", zod.ZodTypeAny, {
1026
1041
  temperature?: number | undefined;
1027
1042
  seed?: number | undefined;
@@ -1034,6 +1049,9 @@ declare const ChatModelResponseFormatConfigSchema: (maxOutputTokens: number, max
1034
1049
  topLogProbs?: number | undefined;
1035
1050
  toolChoice?: string | null | undefined;
1036
1051
  responseFormat?: string | null | undefined;
1052
+ responseSchema?: {
1053
+ [x: string]: any;
1054
+ } | undefined;
1037
1055
  }, {
1038
1056
  temperature?: number | undefined;
1039
1057
  seed?: number | undefined;
@@ -1046,9 +1064,12 @@ declare const ChatModelResponseFormatConfigSchema: (maxOutputTokens: number, max
1046
1064
  topLogProbs?: number | undefined;
1047
1065
  toolChoice?: string | null | undefined;
1048
1066
  responseFormat?: string | null | undefined;
1067
+ responseSchema?: {
1068
+ [x: string]: any;
1069
+ } | undefined;
1049
1070
  }>;
1050
1071
 
1051
- declare const ChatModelResponseSchemaConfigDef: (maxOutputTokens: number, maxSequences: number) => {
1072
+ declare const ChatModelResponseFormatConfigDef: (maxOutputTokens: number, maxSequences: number) => {
1052
1073
  responseFormat: {
1053
1074
  type: "select-string";
1054
1075
  param: string;
@@ -1057,13 +1078,6 @@ declare const ChatModelResponseSchemaConfigDef: (maxOutputTokens: number, maxSeq
1057
1078
  default: string | null;
1058
1079
  choices: string[];
1059
1080
  };
1060
- responseSchema: {
1061
- type: "object-schema";
1062
- param: string;
1063
- title: string;
1064
- description: string;
1065
- objectSchema?: any;
1066
- };
1067
1081
  temperature: {
1068
1082
  type: "range";
1069
1083
  param: string;
@@ -1157,25 +1171,20 @@ declare const ChatModelResponseSchemaConfigDef: (maxOutputTokens: number, maxSeq
1157
1171
  choices: string[];
1158
1172
  };
1159
1173
  };
1160
- declare const ChatModelResponseSchemaConfigSchema: (maxOutputTokens: number, maxSequences: number) => z.ZodObject<z.objectUtil.extendShape<{
1161
- temperature: z.ZodOptional<z.ZodDefault<z.ZodNumber>>;
1162
- maxTokens: z.ZodOptional<z.ZodDefault<z.ZodNumber>>;
1163
- stop: z.ZodOptional<z.ZodDefault<z.ZodArray<z.ZodString, "many">>>;
1164
- topP: z.ZodOptional<z.ZodDefault<z.ZodNumber>>;
1165
- frequencyPenalty: z.ZodOptional<z.ZodDefault<z.ZodNumber>>;
1166
- presencePenalty: z.ZodOptional<z.ZodDefault<z.ZodNumber>>;
1167
- seed: z.ZodEffects<z.ZodOptional<z.ZodDefault<z.ZodNumber>>, number | undefined, number | undefined>;
1168
- logProbs: z.ZodOptional<z.ZodDefault<z.ZodNullable<z.ZodBoolean>>>;
1169
- topLogProbs: z.ZodOptional<z.ZodDefault<z.ZodNumber>>;
1170
- toolChoice: z.ZodOptional<z.ZodDefault<z.ZodNullable<z.ZodEnum<[string, ...string[]]>>>>;
1174
+ declare const ChatModelResponseFormatConfigSchema: (maxOutputTokens: number, maxSequences: number) => zod.ZodObject<zod.objectUtil.extendShape<{
1175
+ temperature: zod.ZodOptional<zod.ZodDefault<zod.ZodNumber>>;
1176
+ maxTokens: zod.ZodOptional<zod.ZodDefault<zod.ZodNumber>>;
1177
+ stop: zod.ZodOptional<zod.ZodDefault<zod.ZodArray<zod.ZodString, "many">>>;
1178
+ topP: zod.ZodOptional<zod.ZodDefault<zod.ZodNumber>>;
1179
+ frequencyPenalty: zod.ZodOptional<zod.ZodDefault<zod.ZodNumber>>;
1180
+ presencePenalty: zod.ZodOptional<zod.ZodDefault<zod.ZodNumber>>;
1181
+ seed: zod.ZodEffects<zod.ZodOptional<zod.ZodDefault<zod.ZodNumber>>, number | undefined, number | undefined>;
1182
+ logProbs: zod.ZodOptional<zod.ZodDefault<zod.ZodNullable<zod.ZodBoolean>>>;
1183
+ topLogProbs: zod.ZodOptional<zod.ZodDefault<zod.ZodNumber>>;
1184
+ toolChoice: zod.ZodOptional<zod.ZodDefault<zod.ZodNullable<zod.ZodEnum<[string, ...string[]]>>>>;
1171
1185
  }, {
1172
- responseFormat: z.ZodOptional<z.ZodDefault<z.ZodNullable<z.ZodEnum<[string, ...string[]]>>>>;
1173
- responseSchema: z.ZodOptional<z.ZodObject<z.ZodRawShape, z.UnknownKeysParam, z.ZodTypeAny, {
1174
- [x: string]: any;
1175
- }, {
1176
- [x: string]: any;
1177
- }>>;
1178
- }>, "strip", z.ZodTypeAny, {
1186
+ responseFormat: zod.ZodOptional<zod.ZodDefault<zod.ZodNullable<zod.ZodEnum<[string, ...string[]]>>>>;
1187
+ }>, "strip", zod.ZodTypeAny, {
1179
1188
  temperature?: number | undefined;
1180
1189
  seed?: number | undefined;
1181
1190
  stop?: string[] | undefined;
@@ -1187,9 +1196,6 @@ declare const ChatModelResponseSchemaConfigSchema: (maxOutputTokens: number, max
1187
1196
  topLogProbs?: number | undefined;
1188
1197
  toolChoice?: string | null | undefined;
1189
1198
  responseFormat?: string | null | undefined;
1190
- responseSchema?: {
1191
- [x: string]: any;
1192
- } | undefined;
1193
1199
  }, {
1194
1200
  temperature?: number | undefined;
1195
1201
  seed?: number | undefined;
@@ -1202,12 +1208,24 @@ declare const ChatModelResponseSchemaConfigSchema: (maxOutputTokens: number, max
1202
1208
  topLogProbs?: number | undefined;
1203
1209
  toolChoice?: string | null | undefined;
1204
1210
  responseFormat?: string | null | undefined;
1205
- responseSchema?: {
1206
- [x: string]: any;
1207
- } | undefined;
1208
1211
  }>;
1209
1212
 
1210
- declare const ChatModelOSeriesConfigDef: (maxOutputTokens: number, maxSequences: number) => {
1213
+ declare const ChatModelResponseSchemaConfigDef: (maxOutputTokens: number, maxSequences: number) => {
1214
+ responseFormat: {
1215
+ type: "select-string";
1216
+ param: string;
1217
+ title: string;
1218
+ description: string;
1219
+ default: string | null;
1220
+ choices: string[];
1221
+ };
1222
+ responseSchema: {
1223
+ type: "object-schema";
1224
+ param: string;
1225
+ title: string;
1226
+ description: string;
1227
+ objectSchema?: any;
1228
+ };
1211
1229
  temperature: {
1212
1230
  type: "range";
1213
1231
  param: string;
@@ -1228,21 +1246,6 @@ declare const ChatModelOSeriesConfigDef: (maxOutputTokens: number, maxSequences:
1228
1246
  min: number;
1229
1247
  step: number;
1230
1248
  };
1231
- responseFormat: {
1232
- type: "select-string";
1233
- param: string;
1234
- title: string;
1235
- description: string;
1236
- default: string | null;
1237
- choices: string[];
1238
- };
1239
- responseSchema: {
1240
- type: "object-schema";
1241
- param: string;
1242
- title: string;
1243
- description: string;
1244
- objectSchema?: any;
1245
- };
1246
1249
  stop: {
1247
1250
  type: "multi-string";
1248
1251
  param: string;
@@ -1316,7 +1319,7 @@ declare const ChatModelOSeriesConfigDef: (maxOutputTokens: number, maxSequences:
1316
1319
  choices: string[];
1317
1320
  };
1318
1321
  };
1319
- declare const ChatModelOSeriesConfigSchema: (maxOutputTokens: number, maxSequences: number) => zod.ZodObject<zod.objectUtil.extendShape<zod.objectUtil.extendShape<{
1322
+ declare const ChatModelResponseSchemaConfigSchema: (maxOutputTokens: number, maxSequences: number) => zod.ZodObject<zod.objectUtil.extendShape<{
1320
1323
  temperature: zod.ZodOptional<zod.ZodDefault<zod.ZodNumber>>;
1321
1324
  maxTokens: zod.ZodOptional<zod.ZodDefault<zod.ZodNumber>>;
1322
1325
  stop: zod.ZodOptional<zod.ZodDefault<zod.ZodArray<zod.ZodString, "many">>>;
@@ -1334,9 +1337,6 @@ declare const ChatModelOSeriesConfigSchema: (maxOutputTokens: number, maxSequenc
1334
1337
  }, {
1335
1338
  [x: string]: any;
1336
1339
  }>>;
1337
- }>, {
1338
- temperature: zod.ZodOptional<zod.ZodDefault<zod.ZodNumber>>;
1339
- maxTokens: zod.ZodOptional<zod.ZodDefault<zod.ZodNumber>>;
1340
1340
  }>, "strip", zod.ZodTypeAny, {
1341
1341
  temperature?: number | undefined;
1342
1342
  seed?: number | undefined;
@@ -1412,11 +1412,11 @@ declare const dimensions: (maxDimensions: number) => {
1412
1412
  schema: zod.ZodOptional<zod.ZodDefault<zod.ZodNumber>>;
1413
1413
  };
1414
1414
 
1415
- declare const EmbeddingModelDimensionsConfigSchema: (maxDimensions: number) => z.ZodObject<z.objectUtil.extendShape<{
1416
- encodingFormat: z.ZodOptional<z.ZodDefault<z.ZodNullable<z.ZodEnum<[string, ...string[]]>>>>;
1415
+ declare const EmbeddingModelDimensionsConfigSchema: (maxDimensions: number) => zod.ZodObject<zod.objectUtil.extendShape<{
1416
+ encodingFormat: zod.ZodOptional<zod.ZodDefault<zod.ZodNullable<zod.ZodEnum<[string, ...string[]]>>>>;
1417
1417
  }, {
1418
- dimensions: z.ZodOptional<z.ZodDefault<z.ZodNumber>>;
1419
- }>, "strip", z.ZodTypeAny, {
1418
+ dimensions: zod.ZodOptional<zod.ZodDefault<zod.ZodNumber>>;
1419
+ }>, "strip", zod.ZodTypeAny, {
1420
1420
  dimensions?: number | undefined;
1421
1421
  encodingFormat?: string | null | undefined;
1422
1422
  }, {
@@ -5293,7 +5293,7 @@ declare class Text_Embedding_3_Large extends BaseEmbeddingModel {
5293
5293
  }
5294
5294
 
5295
5295
  declare const ProviderLiteral = "openai";
5296
- declare class OpenAI<O extends Record<string, any> = Record<string, any>> implements ProviderV1<O> {
5296
+ declare class OpenAI<C extends BaseChatModelOptionsType, E extends BaseEmbeddingModelOptionsType> implements ProviderV1<C, E> {
5297
5297
  readonly version: "v1";
5298
5298
  readonly name = "openai";
5299
5299
  private readonly baseUrl;
@@ -5301,10 +5301,10 @@ declare class OpenAI<O extends Record<string, any> = Record<string, any>> implem
5301
5301
  private readonly embeddingModelFactories;
5302
5302
  chatModelLiterals(): string[];
5303
5303
  chatModelSchemas(): Record<string, ChatModelSchemaType>;
5304
- chatModel(options: O): ChatModelV1;
5304
+ chatModel(options: C): ChatModelV1;
5305
5305
  embeddingModelLiterals(): string[];
5306
5306
  embeddingModelSchemas(): Record<string, EmbeddingModelSchemaType>;
5307
- embeddingModel(options: O): EmbeddingModelV1;
5307
+ embeddingModel(options: E): EmbeddingModelV1;
5308
5308
  }
5309
5309
 
5310
5310
  export { BaseChatModel, BaseChatModelOptions, type BaseChatModelOptionsType, BaseEmbeddingModel, BaseEmbeddingModelOptions, type BaseEmbeddingModelOptionsType, BaseOSeriesChatModel, ChatModelBaseConfigDef, ChatModelBaseConfigSchema, ChatModelOSeriesConfigDef, ChatModelOSeriesConfigSchema, ChatModelResponseFormatConfigDef, ChatModelResponseFormatConfigSchema, ChatModelResponseSchemaConfigDef, ChatModelResponseSchemaConfigSchema, EmbeddingModelBaseConfigDef, EmbeddingModelBaseConfigSchema, EmbeddingModelDimensionsConfigDef, EmbeddingModelDimensionsConfigSchema, GPT_3_5_Turbo, GPT_3_5_TurboLiteral, GPT_3_5_TurboOptions, type GPT_3_5_TurboOptionsType, GPT_3_5_TurboSchema, GPT_3_5_Turbo_0125, GPT_3_5_Turbo_0125Literal, GPT_3_5_Turbo_0125Options, type GPT_3_5_Turbo_0125OptionsType, GPT_3_5_Turbo_0125Schema, GPT_3_5_Turbo_1106, GPT_3_5_Turbo_1106Literal, GPT_3_5_Turbo_1106Options, type GPT_3_5_Turbo_1106OptionsType, GPT_3_5_Turbo_1106Schema, GPT_4, GPT_4Literal, GPT_4Options, type GPT_4OptionsType, GPT_4Schema, GPT_4_0125_Preview, GPT_4_0125_PreviewLiteral, GPT_4_0125_PreviewOptions, type GPT_4_0125_PreviewOptionsType, GPT_4_0125_PreviewSchema, GPT_4_0613, GPT_4_0613Literal, GPT_4_0613Options, type GPT_4_0613OptionsType, GPT_4_0613Schema, GPT_4_1106_Preview, GPT_4_1106_PreviewLiteral, GPT_4_1106_PreviewOptions, type GPT_4_1106_PreviewOptionsType, GPT_4_1106_PreviewSchema, GPT_4_Turbo, GPT_4_TurboLiteral, GPT_4_TurboOptions, type GPT_4_TurboOptionsType, GPT_4_TurboSchema, GPT_4_Turbo_2024_04_09, GPT_4_Turbo_2024_04_09Literal, GPT_4_Turbo_2024_04_09Options, type GPT_4_Turbo_2024_04_09OptionsType, GPT_4_Turbo_2024_04_09Schema, GPT_4_Turbo_Preview, GPT_4_Turbo_PreviewLiteral, GPT_4_Turbo_PreviewOptions, type GPT_4_Turbo_PreviewOptionsType, GPT_4_Turbo_PreviewSchema, GPT_4o, GPT_4oLiteral, GPT_4oOptions, type GPT_4oOptionsType, GPT_4oSchema, GPT_4o_2024_05_13, GPT_4o_2024_05_13Literal, GPT_4o_2024_05_13Options, type GPT_4o_2024_05_13OptionsType, GPT_4o_2024_05_13Schema, GPT_4o_2024_08_06, GPT_4o_2024_08_06Literal, GPT_4o_2024_08_06Options, type GPT_4o_2024_08_06OptionsType, GPT_4o_2024_08_06Schema, GPT_4o_Mini, GPT_4o_MiniLiteral, GPT_4o_MiniOptions, type GPT_4o_MiniOptionsType, GPT_4o_MiniSchema, GPT_4o_Mini_2024_07_18, GPT_4o_Mini_2024_07_18Literal, GPT_4o_Mini_2024_07_18Options, type GPT_4o_Mini_2024_07_18OptionsType, GPT_4o_Mini_2024_07_18Schema, O1_Mini, O1_MiniLiteral, O1_MiniOptions, type O1_MiniOptionsType, O1_MiniSchema, O1_Preview, O1_PreviewLiteral, O1_PreviewOptions, type O1_PreviewOptionsType, O1_PreviewSchema, OpenAI, OpenAIChatModelConfigs, OpenAIChatModelModalities, OpenAIChatModelModalitiesEnum, OpenAIChatModelOSSeriesRoles, OpenAIChatModelOSSeriesRolesMap, OpenAIChatModelRoles, OpenAIChatModelRolesMap, OpenAIChatModelTextModalities, OpenAIChatModelTextModalitiesEnum, OpenAIChatModelTextToolModalities, OpenAIChatModelTextToolModalitiesEnum, OpenAIChatOSeriesRequest, type OpenAIChatOSeriesRequestType, OpenAIChatRequest, OpenAIChatRequestAssistantMessage, type OpenAIChatRequestAssistantMessageType, OpenAIChatRequestImageContent, type OpenAIChatRequestImageContentType, OpenAIChatRequestMessage, type OpenAIChatRequestMessageType, OpenAIChatRequestResponseFormat, type OpenAIChatRequestResponseFormatType, OpenAIChatRequestSystemMessage, type OpenAIChatRequestSystemMessageType, OpenAIChatRequestTextContent, type OpenAIChatRequestTextContentType, OpenAIChatRequestTool, OpenAIChatRequestToolCallContent, type OpenAIChatRequestToolCallContentType, OpenAIChatRequestToolChoiceEnum, type OpenAIChatRequestToolChoiceEnumType, OpenAIChatRequestToolChoiceFunction, type OpenAIChatRequestToolChoiceFunctionType, OpenAIChatRequestToolMessage, type OpenAIChatRequestToolMessageType, type OpenAIChatRequestToolType, type OpenAIChatRequestType, OpenAIChatRequestUserMessage, type OpenAIChatRequestUserMessageType, OpenAICompleteChatResponse, type OpenAICompleteChatResponseType, OpenAIEmbeddingModelConfigs, OpenAIEmbeddingModelModalities, OpenAIEmbeddingModelModalitiesEnum, OpenAIEmbeddingRequest, OpenAIEmbeddingRequestInput, type OpenAIEmbeddingRequestInputType, type OpenAIEmbeddingRequestType, OpenAIGetEmbeddingsResponse, OpenAIStreamChatResponse, type OpenAIStreamChatResponseType, OpenAIToolCallsCompleteChatResponse, OpenAIToolCallsStreamChatResponse, ProviderLiteral, Text_Embedding_3_Large, Text_Embedding_3_LargeLiteral, Text_Embedding_3_LargeSchema, Text_Embedding_3_Large_Options, type Text_Embedding_3_Large_OptionsType, Text_Embedding_3_Small, Text_Embedding_3_SmallLiteral, Text_Embedding_3_SmallSchema, Text_Embedding_3_Small_Options, type Text_Embedding_3_Small_OptionsType, Text_Embedding_Ada002, Text_Embedding_Ada002Literal, Text_Embedding_Ada002Schema, Text_Embedding_Ada002_Options, type Text_Embedding_Ada002_OptionsType, dimensions, encodingFormat, frequencyPenalty, logProbs, maxTokens, presencePenalty, seed, stop, temperature, toolChoice, topLogProbs, topP };
package/dist/index.d.ts CHANGED
@@ -907,15 +907,7 @@ declare const toolChoice: {
907
907
  schema: zod.ZodOptional<zod.ZodDefault<zod.ZodNullable<zod.ZodEnum<[string, ...string[]]>>>>;
908
908
  };
909
909
 
910
- declare const ChatModelResponseFormatConfigDef: (maxOutputTokens: number, maxSequences: number) => {
911
- responseFormat: {
912
- type: "select-string";
913
- param: string;
914
- title: string;
915
- description: string;
916
- default: string | null;
917
- choices: string[];
918
- };
910
+ declare const ChatModelOSeriesConfigDef: (maxOutputTokens: number, maxSequences: number) => {
919
911
  temperature: {
920
912
  type: "range";
921
913
  param: string;
@@ -936,6 +928,21 @@ declare const ChatModelResponseFormatConfigDef: (maxOutputTokens: number, maxSeq
936
928
  min: number;
937
929
  step: number;
938
930
  };
931
+ responseFormat: {
932
+ type: "select-string";
933
+ param: string;
934
+ title: string;
935
+ description: string;
936
+ default: string | null;
937
+ choices: string[];
938
+ };
939
+ responseSchema: {
940
+ type: "object-schema";
941
+ param: string;
942
+ title: string;
943
+ description: string;
944
+ objectSchema?: any;
945
+ };
939
946
  stop: {
940
947
  type: "multi-string";
941
948
  param: string;
@@ -1009,7 +1016,7 @@ declare const ChatModelResponseFormatConfigDef: (maxOutputTokens: number, maxSeq
1009
1016
  choices: string[];
1010
1017
  };
1011
1018
  };
1012
- declare const ChatModelResponseFormatConfigSchema: (maxOutputTokens: number, maxSequences: number) => zod.ZodObject<zod.objectUtil.extendShape<{
1019
+ declare const ChatModelOSeriesConfigSchema: (maxOutputTokens: number, maxSequences: number) => zod.ZodObject<zod.objectUtil.extendShape<zod.objectUtil.extendShape<{
1013
1020
  temperature: zod.ZodOptional<zod.ZodDefault<zod.ZodNumber>>;
1014
1021
  maxTokens: zod.ZodOptional<zod.ZodDefault<zod.ZodNumber>>;
1015
1022
  stop: zod.ZodOptional<zod.ZodDefault<zod.ZodArray<zod.ZodString, "many">>>;
@@ -1022,6 +1029,14 @@ declare const ChatModelResponseFormatConfigSchema: (maxOutputTokens: number, max
1022
1029
  toolChoice: zod.ZodOptional<zod.ZodDefault<zod.ZodNullable<zod.ZodEnum<[string, ...string[]]>>>>;
1023
1030
  }, {
1024
1031
  responseFormat: zod.ZodOptional<zod.ZodDefault<zod.ZodNullable<zod.ZodEnum<[string, ...string[]]>>>>;
1032
+ responseSchema: zod.ZodOptional<zod.ZodObject<zod.ZodRawShape, zod.UnknownKeysParam, zod.ZodTypeAny, {
1033
+ [x: string]: any;
1034
+ }, {
1035
+ [x: string]: any;
1036
+ }>>;
1037
+ }>, {
1038
+ temperature: zod.ZodOptional<zod.ZodDefault<zod.ZodNumber>>;
1039
+ maxTokens: zod.ZodOptional<zod.ZodDefault<zod.ZodNumber>>;
1025
1040
  }>, "strip", zod.ZodTypeAny, {
1026
1041
  temperature?: number | undefined;
1027
1042
  seed?: number | undefined;
@@ -1034,6 +1049,9 @@ declare const ChatModelResponseFormatConfigSchema: (maxOutputTokens: number, max
1034
1049
  topLogProbs?: number | undefined;
1035
1050
  toolChoice?: string | null | undefined;
1036
1051
  responseFormat?: string | null | undefined;
1052
+ responseSchema?: {
1053
+ [x: string]: any;
1054
+ } | undefined;
1037
1055
  }, {
1038
1056
  temperature?: number | undefined;
1039
1057
  seed?: number | undefined;
@@ -1046,9 +1064,12 @@ declare const ChatModelResponseFormatConfigSchema: (maxOutputTokens: number, max
1046
1064
  topLogProbs?: number | undefined;
1047
1065
  toolChoice?: string | null | undefined;
1048
1066
  responseFormat?: string | null | undefined;
1067
+ responseSchema?: {
1068
+ [x: string]: any;
1069
+ } | undefined;
1049
1070
  }>;
1050
1071
 
1051
- declare const ChatModelResponseSchemaConfigDef: (maxOutputTokens: number, maxSequences: number) => {
1072
+ declare const ChatModelResponseFormatConfigDef: (maxOutputTokens: number, maxSequences: number) => {
1052
1073
  responseFormat: {
1053
1074
  type: "select-string";
1054
1075
  param: string;
@@ -1057,13 +1078,6 @@ declare const ChatModelResponseSchemaConfigDef: (maxOutputTokens: number, maxSeq
1057
1078
  default: string | null;
1058
1079
  choices: string[];
1059
1080
  };
1060
- responseSchema: {
1061
- type: "object-schema";
1062
- param: string;
1063
- title: string;
1064
- description: string;
1065
- objectSchema?: any;
1066
- };
1067
1081
  temperature: {
1068
1082
  type: "range";
1069
1083
  param: string;
@@ -1157,25 +1171,20 @@ declare const ChatModelResponseSchemaConfigDef: (maxOutputTokens: number, maxSeq
1157
1171
  choices: string[];
1158
1172
  };
1159
1173
  };
1160
- declare const ChatModelResponseSchemaConfigSchema: (maxOutputTokens: number, maxSequences: number) => z.ZodObject<z.objectUtil.extendShape<{
1161
- temperature: z.ZodOptional<z.ZodDefault<z.ZodNumber>>;
1162
- maxTokens: z.ZodOptional<z.ZodDefault<z.ZodNumber>>;
1163
- stop: z.ZodOptional<z.ZodDefault<z.ZodArray<z.ZodString, "many">>>;
1164
- topP: z.ZodOptional<z.ZodDefault<z.ZodNumber>>;
1165
- frequencyPenalty: z.ZodOptional<z.ZodDefault<z.ZodNumber>>;
1166
- presencePenalty: z.ZodOptional<z.ZodDefault<z.ZodNumber>>;
1167
- seed: z.ZodEffects<z.ZodOptional<z.ZodDefault<z.ZodNumber>>, number | undefined, number | undefined>;
1168
- logProbs: z.ZodOptional<z.ZodDefault<z.ZodNullable<z.ZodBoolean>>>;
1169
- topLogProbs: z.ZodOptional<z.ZodDefault<z.ZodNumber>>;
1170
- toolChoice: z.ZodOptional<z.ZodDefault<z.ZodNullable<z.ZodEnum<[string, ...string[]]>>>>;
1174
+ declare const ChatModelResponseFormatConfigSchema: (maxOutputTokens: number, maxSequences: number) => zod.ZodObject<zod.objectUtil.extendShape<{
1175
+ temperature: zod.ZodOptional<zod.ZodDefault<zod.ZodNumber>>;
1176
+ maxTokens: zod.ZodOptional<zod.ZodDefault<zod.ZodNumber>>;
1177
+ stop: zod.ZodOptional<zod.ZodDefault<zod.ZodArray<zod.ZodString, "many">>>;
1178
+ topP: zod.ZodOptional<zod.ZodDefault<zod.ZodNumber>>;
1179
+ frequencyPenalty: zod.ZodOptional<zod.ZodDefault<zod.ZodNumber>>;
1180
+ presencePenalty: zod.ZodOptional<zod.ZodDefault<zod.ZodNumber>>;
1181
+ seed: zod.ZodEffects<zod.ZodOptional<zod.ZodDefault<zod.ZodNumber>>, number | undefined, number | undefined>;
1182
+ logProbs: zod.ZodOptional<zod.ZodDefault<zod.ZodNullable<zod.ZodBoolean>>>;
1183
+ topLogProbs: zod.ZodOptional<zod.ZodDefault<zod.ZodNumber>>;
1184
+ toolChoice: zod.ZodOptional<zod.ZodDefault<zod.ZodNullable<zod.ZodEnum<[string, ...string[]]>>>>;
1171
1185
  }, {
1172
- responseFormat: z.ZodOptional<z.ZodDefault<z.ZodNullable<z.ZodEnum<[string, ...string[]]>>>>;
1173
- responseSchema: z.ZodOptional<z.ZodObject<z.ZodRawShape, z.UnknownKeysParam, z.ZodTypeAny, {
1174
- [x: string]: any;
1175
- }, {
1176
- [x: string]: any;
1177
- }>>;
1178
- }>, "strip", z.ZodTypeAny, {
1186
+ responseFormat: zod.ZodOptional<zod.ZodDefault<zod.ZodNullable<zod.ZodEnum<[string, ...string[]]>>>>;
1187
+ }>, "strip", zod.ZodTypeAny, {
1179
1188
  temperature?: number | undefined;
1180
1189
  seed?: number | undefined;
1181
1190
  stop?: string[] | undefined;
@@ -1187,9 +1196,6 @@ declare const ChatModelResponseSchemaConfigSchema: (maxOutputTokens: number, max
1187
1196
  topLogProbs?: number | undefined;
1188
1197
  toolChoice?: string | null | undefined;
1189
1198
  responseFormat?: string | null | undefined;
1190
- responseSchema?: {
1191
- [x: string]: any;
1192
- } | undefined;
1193
1199
  }, {
1194
1200
  temperature?: number | undefined;
1195
1201
  seed?: number | undefined;
@@ -1202,12 +1208,24 @@ declare const ChatModelResponseSchemaConfigSchema: (maxOutputTokens: number, max
1202
1208
  topLogProbs?: number | undefined;
1203
1209
  toolChoice?: string | null | undefined;
1204
1210
  responseFormat?: string | null | undefined;
1205
- responseSchema?: {
1206
- [x: string]: any;
1207
- } | undefined;
1208
1211
  }>;
1209
1212
 
1210
- declare const ChatModelOSeriesConfigDef: (maxOutputTokens: number, maxSequences: number) => {
1213
+ declare const ChatModelResponseSchemaConfigDef: (maxOutputTokens: number, maxSequences: number) => {
1214
+ responseFormat: {
1215
+ type: "select-string";
1216
+ param: string;
1217
+ title: string;
1218
+ description: string;
1219
+ default: string | null;
1220
+ choices: string[];
1221
+ };
1222
+ responseSchema: {
1223
+ type: "object-schema";
1224
+ param: string;
1225
+ title: string;
1226
+ description: string;
1227
+ objectSchema?: any;
1228
+ };
1211
1229
  temperature: {
1212
1230
  type: "range";
1213
1231
  param: string;
@@ -1228,21 +1246,6 @@ declare const ChatModelOSeriesConfigDef: (maxOutputTokens: number, maxSequences:
1228
1246
  min: number;
1229
1247
  step: number;
1230
1248
  };
1231
- responseFormat: {
1232
- type: "select-string";
1233
- param: string;
1234
- title: string;
1235
- description: string;
1236
- default: string | null;
1237
- choices: string[];
1238
- };
1239
- responseSchema: {
1240
- type: "object-schema";
1241
- param: string;
1242
- title: string;
1243
- description: string;
1244
- objectSchema?: any;
1245
- };
1246
1249
  stop: {
1247
1250
  type: "multi-string";
1248
1251
  param: string;
@@ -1316,7 +1319,7 @@ declare const ChatModelOSeriesConfigDef: (maxOutputTokens: number, maxSequences:
1316
1319
  choices: string[];
1317
1320
  };
1318
1321
  };
1319
- declare const ChatModelOSeriesConfigSchema: (maxOutputTokens: number, maxSequences: number) => zod.ZodObject<zod.objectUtil.extendShape<zod.objectUtil.extendShape<{
1322
+ declare const ChatModelResponseSchemaConfigSchema: (maxOutputTokens: number, maxSequences: number) => zod.ZodObject<zod.objectUtil.extendShape<{
1320
1323
  temperature: zod.ZodOptional<zod.ZodDefault<zod.ZodNumber>>;
1321
1324
  maxTokens: zod.ZodOptional<zod.ZodDefault<zod.ZodNumber>>;
1322
1325
  stop: zod.ZodOptional<zod.ZodDefault<zod.ZodArray<zod.ZodString, "many">>>;
@@ -1334,9 +1337,6 @@ declare const ChatModelOSeriesConfigSchema: (maxOutputTokens: number, maxSequenc
1334
1337
  }, {
1335
1338
  [x: string]: any;
1336
1339
  }>>;
1337
- }>, {
1338
- temperature: zod.ZodOptional<zod.ZodDefault<zod.ZodNumber>>;
1339
- maxTokens: zod.ZodOptional<zod.ZodDefault<zod.ZodNumber>>;
1340
1340
  }>, "strip", zod.ZodTypeAny, {
1341
1341
  temperature?: number | undefined;
1342
1342
  seed?: number | undefined;
@@ -1412,11 +1412,11 @@ declare const dimensions: (maxDimensions: number) => {
1412
1412
  schema: zod.ZodOptional<zod.ZodDefault<zod.ZodNumber>>;
1413
1413
  };
1414
1414
 
1415
- declare const EmbeddingModelDimensionsConfigSchema: (maxDimensions: number) => z.ZodObject<z.objectUtil.extendShape<{
1416
- encodingFormat: z.ZodOptional<z.ZodDefault<z.ZodNullable<z.ZodEnum<[string, ...string[]]>>>>;
1415
+ declare const EmbeddingModelDimensionsConfigSchema: (maxDimensions: number) => zod.ZodObject<zod.objectUtil.extendShape<{
1416
+ encodingFormat: zod.ZodOptional<zod.ZodDefault<zod.ZodNullable<zod.ZodEnum<[string, ...string[]]>>>>;
1417
1417
  }, {
1418
- dimensions: z.ZodOptional<z.ZodDefault<z.ZodNumber>>;
1419
- }>, "strip", z.ZodTypeAny, {
1418
+ dimensions: zod.ZodOptional<zod.ZodDefault<zod.ZodNumber>>;
1419
+ }>, "strip", zod.ZodTypeAny, {
1420
1420
  dimensions?: number | undefined;
1421
1421
  encodingFormat?: string | null | undefined;
1422
1422
  }, {
@@ -5293,7 +5293,7 @@ declare class Text_Embedding_3_Large extends BaseEmbeddingModel {
5293
5293
  }
5294
5294
 
5295
5295
  declare const ProviderLiteral = "openai";
5296
- declare class OpenAI<O extends Record<string, any> = Record<string, any>> implements ProviderV1<O> {
5296
+ declare class OpenAI<C extends BaseChatModelOptionsType, E extends BaseEmbeddingModelOptionsType> implements ProviderV1<C, E> {
5297
5297
  readonly version: "v1";
5298
5298
  readonly name = "openai";
5299
5299
  private readonly baseUrl;
@@ -5301,10 +5301,10 @@ declare class OpenAI<O extends Record<string, any> = Record<string, any>> implem
5301
5301
  private readonly embeddingModelFactories;
5302
5302
  chatModelLiterals(): string[];
5303
5303
  chatModelSchemas(): Record<string, ChatModelSchemaType>;
5304
- chatModel(options: O): ChatModelV1;
5304
+ chatModel(options: C): ChatModelV1;
5305
5305
  embeddingModelLiterals(): string[];
5306
5306
  embeddingModelSchemas(): Record<string, EmbeddingModelSchemaType>;
5307
- embeddingModel(options: O): EmbeddingModelV1;
5307
+ embeddingModel(options: E): EmbeddingModelV1;
5308
5308
  }
5309
5309
 
5310
5310
  export { BaseChatModel, BaseChatModelOptions, type BaseChatModelOptionsType, BaseEmbeddingModel, BaseEmbeddingModelOptions, type BaseEmbeddingModelOptionsType, BaseOSeriesChatModel, ChatModelBaseConfigDef, ChatModelBaseConfigSchema, ChatModelOSeriesConfigDef, ChatModelOSeriesConfigSchema, ChatModelResponseFormatConfigDef, ChatModelResponseFormatConfigSchema, ChatModelResponseSchemaConfigDef, ChatModelResponseSchemaConfigSchema, EmbeddingModelBaseConfigDef, EmbeddingModelBaseConfigSchema, EmbeddingModelDimensionsConfigDef, EmbeddingModelDimensionsConfigSchema, GPT_3_5_Turbo, GPT_3_5_TurboLiteral, GPT_3_5_TurboOptions, type GPT_3_5_TurboOptionsType, GPT_3_5_TurboSchema, GPT_3_5_Turbo_0125, GPT_3_5_Turbo_0125Literal, GPT_3_5_Turbo_0125Options, type GPT_3_5_Turbo_0125OptionsType, GPT_3_5_Turbo_0125Schema, GPT_3_5_Turbo_1106, GPT_3_5_Turbo_1106Literal, GPT_3_5_Turbo_1106Options, type GPT_3_5_Turbo_1106OptionsType, GPT_3_5_Turbo_1106Schema, GPT_4, GPT_4Literal, GPT_4Options, type GPT_4OptionsType, GPT_4Schema, GPT_4_0125_Preview, GPT_4_0125_PreviewLiteral, GPT_4_0125_PreviewOptions, type GPT_4_0125_PreviewOptionsType, GPT_4_0125_PreviewSchema, GPT_4_0613, GPT_4_0613Literal, GPT_4_0613Options, type GPT_4_0613OptionsType, GPT_4_0613Schema, GPT_4_1106_Preview, GPT_4_1106_PreviewLiteral, GPT_4_1106_PreviewOptions, type GPT_4_1106_PreviewOptionsType, GPT_4_1106_PreviewSchema, GPT_4_Turbo, GPT_4_TurboLiteral, GPT_4_TurboOptions, type GPT_4_TurboOptionsType, GPT_4_TurboSchema, GPT_4_Turbo_2024_04_09, GPT_4_Turbo_2024_04_09Literal, GPT_4_Turbo_2024_04_09Options, type GPT_4_Turbo_2024_04_09OptionsType, GPT_4_Turbo_2024_04_09Schema, GPT_4_Turbo_Preview, GPT_4_Turbo_PreviewLiteral, GPT_4_Turbo_PreviewOptions, type GPT_4_Turbo_PreviewOptionsType, GPT_4_Turbo_PreviewSchema, GPT_4o, GPT_4oLiteral, GPT_4oOptions, type GPT_4oOptionsType, GPT_4oSchema, GPT_4o_2024_05_13, GPT_4o_2024_05_13Literal, GPT_4o_2024_05_13Options, type GPT_4o_2024_05_13OptionsType, GPT_4o_2024_05_13Schema, GPT_4o_2024_08_06, GPT_4o_2024_08_06Literal, GPT_4o_2024_08_06Options, type GPT_4o_2024_08_06OptionsType, GPT_4o_2024_08_06Schema, GPT_4o_Mini, GPT_4o_MiniLiteral, GPT_4o_MiniOptions, type GPT_4o_MiniOptionsType, GPT_4o_MiniSchema, GPT_4o_Mini_2024_07_18, GPT_4o_Mini_2024_07_18Literal, GPT_4o_Mini_2024_07_18Options, type GPT_4o_Mini_2024_07_18OptionsType, GPT_4o_Mini_2024_07_18Schema, O1_Mini, O1_MiniLiteral, O1_MiniOptions, type O1_MiniOptionsType, O1_MiniSchema, O1_Preview, O1_PreviewLiteral, O1_PreviewOptions, type O1_PreviewOptionsType, O1_PreviewSchema, OpenAI, OpenAIChatModelConfigs, OpenAIChatModelModalities, OpenAIChatModelModalitiesEnum, OpenAIChatModelOSSeriesRoles, OpenAIChatModelOSSeriesRolesMap, OpenAIChatModelRoles, OpenAIChatModelRolesMap, OpenAIChatModelTextModalities, OpenAIChatModelTextModalitiesEnum, OpenAIChatModelTextToolModalities, OpenAIChatModelTextToolModalitiesEnum, OpenAIChatOSeriesRequest, type OpenAIChatOSeriesRequestType, OpenAIChatRequest, OpenAIChatRequestAssistantMessage, type OpenAIChatRequestAssistantMessageType, OpenAIChatRequestImageContent, type OpenAIChatRequestImageContentType, OpenAIChatRequestMessage, type OpenAIChatRequestMessageType, OpenAIChatRequestResponseFormat, type OpenAIChatRequestResponseFormatType, OpenAIChatRequestSystemMessage, type OpenAIChatRequestSystemMessageType, OpenAIChatRequestTextContent, type OpenAIChatRequestTextContentType, OpenAIChatRequestTool, OpenAIChatRequestToolCallContent, type OpenAIChatRequestToolCallContentType, OpenAIChatRequestToolChoiceEnum, type OpenAIChatRequestToolChoiceEnumType, OpenAIChatRequestToolChoiceFunction, type OpenAIChatRequestToolChoiceFunctionType, OpenAIChatRequestToolMessage, type OpenAIChatRequestToolMessageType, type OpenAIChatRequestToolType, type OpenAIChatRequestType, OpenAIChatRequestUserMessage, type OpenAIChatRequestUserMessageType, OpenAICompleteChatResponse, type OpenAICompleteChatResponseType, OpenAIEmbeddingModelConfigs, OpenAIEmbeddingModelModalities, OpenAIEmbeddingModelModalitiesEnum, OpenAIEmbeddingRequest, OpenAIEmbeddingRequestInput, type OpenAIEmbeddingRequestInputType, type OpenAIEmbeddingRequestType, OpenAIGetEmbeddingsResponse, OpenAIStreamChatResponse, type OpenAIStreamChatResponseType, OpenAIToolCallsCompleteChatResponse, OpenAIToolCallsStreamChatResponse, ProviderLiteral, Text_Embedding_3_Large, Text_Embedding_3_LargeLiteral, Text_Embedding_3_LargeSchema, Text_Embedding_3_Large_Options, type Text_Embedding_3_Large_OptionsType, Text_Embedding_3_Small, Text_Embedding_3_SmallLiteral, Text_Embedding_3_SmallSchema, Text_Embedding_3_Small_Options, type Text_Embedding_3_Small_OptionsType, Text_Embedding_Ada002, Text_Embedding_Ada002Literal, Text_Embedding_Ada002Schema, Text_Embedding_Ada002_Options, type Text_Embedding_Ada002_OptionsType, dimensions, encodingFormat, frequencyPenalty, logProbs, maxTokens, presencePenalty, seed, stop, temperature, toolChoice, topLogProbs, topP };