@ai-sdk/openai 0.0.68 → 0.0.71
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +28 -0
- package/dist/index.js +153 -116
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +149 -110
- package/dist/index.mjs.map +1 -1
- package/internal/dist/index.js +153 -116
- package/internal/dist/index.js.map +1 -1
- package/internal/dist/index.mjs +149 -110
- package/internal/dist/index.mjs.map +1 -1
- package/package.json +3 -3
package/internal/dist/index.js
CHANGED
|
@@ -27,7 +27,7 @@ __export(internal_exports, {
|
|
|
27
27
|
module.exports = __toCommonJS(internal_exports);
|
|
28
28
|
|
|
29
29
|
// src/openai-chat-language-model.ts
|
|
30
|
-
var
|
|
30
|
+
var import_provider3 = require("@ai-sdk/provider");
|
|
31
31
|
var import_provider_utils3 = require("@ai-sdk/provider-utils");
|
|
32
32
|
var import_zod2 = require("zod");
|
|
33
33
|
|
|
@@ -235,6 +235,107 @@ function getResponseMetadata({
|
|
|
235
235
|
};
|
|
236
236
|
}
|
|
237
237
|
|
|
238
|
+
// src/openai-prepare-tools.ts
|
|
239
|
+
var import_provider2 = require("@ai-sdk/provider");
|
|
240
|
+
function prepareTools({
|
|
241
|
+
mode,
|
|
242
|
+
useLegacyFunctionCalling = false,
|
|
243
|
+
structuredOutputs = false
|
|
244
|
+
}) {
|
|
245
|
+
var _a;
|
|
246
|
+
const tools = ((_a = mode.tools) == null ? void 0 : _a.length) ? mode.tools : void 0;
|
|
247
|
+
const toolWarnings = [];
|
|
248
|
+
if (tools == null) {
|
|
249
|
+
return { tools: void 0, tool_choice: void 0, toolWarnings };
|
|
250
|
+
}
|
|
251
|
+
const toolChoice = mode.toolChoice;
|
|
252
|
+
if (useLegacyFunctionCalling) {
|
|
253
|
+
const openaiFunctions = [];
|
|
254
|
+
for (const tool of tools) {
|
|
255
|
+
if (tool.type === "provider-defined") {
|
|
256
|
+
toolWarnings.push({ type: "unsupported-tool", tool });
|
|
257
|
+
} else {
|
|
258
|
+
openaiFunctions.push({
|
|
259
|
+
name: tool.name,
|
|
260
|
+
description: tool.description,
|
|
261
|
+
parameters: tool.parameters
|
|
262
|
+
});
|
|
263
|
+
}
|
|
264
|
+
}
|
|
265
|
+
if (toolChoice == null) {
|
|
266
|
+
return {
|
|
267
|
+
functions: openaiFunctions,
|
|
268
|
+
function_call: void 0,
|
|
269
|
+
toolWarnings
|
|
270
|
+
};
|
|
271
|
+
}
|
|
272
|
+
const type2 = toolChoice.type;
|
|
273
|
+
switch (type2) {
|
|
274
|
+
case "auto":
|
|
275
|
+
case "none":
|
|
276
|
+
case void 0:
|
|
277
|
+
return {
|
|
278
|
+
functions: openaiFunctions,
|
|
279
|
+
function_call: void 0,
|
|
280
|
+
toolWarnings
|
|
281
|
+
};
|
|
282
|
+
case "required":
|
|
283
|
+
throw new import_provider2.UnsupportedFunctionalityError({
|
|
284
|
+
functionality: "useLegacyFunctionCalling and toolChoice: required"
|
|
285
|
+
});
|
|
286
|
+
default:
|
|
287
|
+
return {
|
|
288
|
+
functions: openaiFunctions,
|
|
289
|
+
function_call: { name: toolChoice.toolName },
|
|
290
|
+
toolWarnings
|
|
291
|
+
};
|
|
292
|
+
}
|
|
293
|
+
}
|
|
294
|
+
const openaiTools = [];
|
|
295
|
+
for (const tool of tools) {
|
|
296
|
+
if (tool.type === "provider-defined") {
|
|
297
|
+
toolWarnings.push({ type: "unsupported-tool", tool });
|
|
298
|
+
} else {
|
|
299
|
+
openaiTools.push({
|
|
300
|
+
type: "function",
|
|
301
|
+
function: {
|
|
302
|
+
name: tool.name,
|
|
303
|
+
description: tool.description,
|
|
304
|
+
parameters: tool.parameters,
|
|
305
|
+
strict: structuredOutputs === true ? true : void 0
|
|
306
|
+
}
|
|
307
|
+
});
|
|
308
|
+
}
|
|
309
|
+
}
|
|
310
|
+
if (toolChoice == null) {
|
|
311
|
+
return { tools: openaiTools, tool_choice: void 0, toolWarnings };
|
|
312
|
+
}
|
|
313
|
+
const type = toolChoice.type;
|
|
314
|
+
switch (type) {
|
|
315
|
+
case "auto":
|
|
316
|
+
case "none":
|
|
317
|
+
case "required":
|
|
318
|
+
return { tools: openaiTools, tool_choice: type, toolWarnings };
|
|
319
|
+
case "tool":
|
|
320
|
+
return {
|
|
321
|
+
tools: openaiTools,
|
|
322
|
+
tool_choice: {
|
|
323
|
+
type: "function",
|
|
324
|
+
function: {
|
|
325
|
+
name: toolChoice.toolName
|
|
326
|
+
}
|
|
327
|
+
},
|
|
328
|
+
toolWarnings
|
|
329
|
+
};
|
|
330
|
+
default: {
|
|
331
|
+
const _exhaustiveCheck = type;
|
|
332
|
+
throw new import_provider2.UnsupportedFunctionalityError({
|
|
333
|
+
functionality: `Unsupported tool choice type: ${_exhaustiveCheck}`
|
|
334
|
+
});
|
|
335
|
+
}
|
|
336
|
+
}
|
|
337
|
+
}
|
|
338
|
+
|
|
238
339
|
// src/openai-chat-language-model.ts
|
|
239
340
|
var OpenAIChatLanguageModel = class {
|
|
240
341
|
constructor(modelId, settings, config) {
|
|
@@ -290,12 +391,12 @@ var OpenAIChatLanguageModel = class {
|
|
|
290
391
|
}
|
|
291
392
|
const useLegacyFunctionCalling = this.settings.useLegacyFunctionCalling;
|
|
292
393
|
if (useLegacyFunctionCalling && this.settings.parallelToolCalls === true) {
|
|
293
|
-
throw new
|
|
394
|
+
throw new import_provider3.UnsupportedFunctionalityError({
|
|
294
395
|
functionality: "useLegacyFunctionCalling with parallelToolCalls"
|
|
295
396
|
});
|
|
296
397
|
}
|
|
297
398
|
if (useLegacyFunctionCalling && this.settings.structuredOutputs === true) {
|
|
298
|
-
throw new
|
|
399
|
+
throw new import_provider3.UnsupportedFunctionalityError({
|
|
299
400
|
functionality: "structuredOutputs with useLegacyFunctionCalling"
|
|
300
401
|
});
|
|
301
402
|
}
|
|
@@ -336,23 +437,27 @@ var OpenAIChatLanguageModel = class {
|
|
|
336
437
|
}
|
|
337
438
|
switch (type) {
|
|
338
439
|
case "regular": {
|
|
440
|
+
const { tools, tool_choice, functions, function_call, toolWarnings } = prepareTools({
|
|
441
|
+
mode,
|
|
442
|
+
useLegacyFunctionCalling,
|
|
443
|
+
structuredOutputs: this.settings.structuredOutputs
|
|
444
|
+
});
|
|
339
445
|
return {
|
|
340
446
|
args: {
|
|
341
447
|
...baseArgs,
|
|
342
|
-
|
|
343
|
-
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
})
|
|
448
|
+
tools,
|
|
449
|
+
tool_choice,
|
|
450
|
+
functions,
|
|
451
|
+
function_call
|
|
347
452
|
},
|
|
348
|
-
warnings
|
|
453
|
+
warnings: [...warnings, ...toolWarnings]
|
|
349
454
|
};
|
|
350
455
|
}
|
|
351
456
|
case "object-json": {
|
|
352
457
|
return {
|
|
353
458
|
args: {
|
|
354
459
|
...baseArgs,
|
|
355
|
-
response_format: this.settings.structuredOutputs === true ? {
|
|
460
|
+
response_format: this.settings.structuredOutputs === true && mode.schema != null ? {
|
|
356
461
|
type: "json_schema",
|
|
357
462
|
json_schema: {
|
|
358
463
|
schema: mode.schema,
|
|
@@ -408,14 +513,14 @@ var OpenAIChatLanguageModel = class {
|
|
|
408
513
|
}
|
|
409
514
|
async doGenerate(options) {
|
|
410
515
|
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r;
|
|
411
|
-
const { args, warnings } = this.getArgs(options);
|
|
516
|
+
const { args: body, warnings } = this.getArgs(options);
|
|
412
517
|
const { responseHeaders, value: response } = await (0, import_provider_utils3.postJsonToApi)({
|
|
413
518
|
url: this.config.url({
|
|
414
519
|
path: "/chat/completions",
|
|
415
520
|
modelId: this.modelId
|
|
416
521
|
}),
|
|
417
522
|
headers: (0, import_provider_utils3.combineHeaders)(this.config.headers(), options.headers),
|
|
418
|
-
body
|
|
523
|
+
body,
|
|
419
524
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
420
525
|
successfulResponseHandler: (0, import_provider_utils3.createJsonResponseHandler)(
|
|
421
526
|
openAIChatResponseSchema
|
|
@@ -423,7 +528,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
423
528
|
abortSignal: options.abortSignal,
|
|
424
529
|
fetch: this.config.fetch
|
|
425
530
|
});
|
|
426
|
-
const { messages: rawPrompt, ...rawSettings } =
|
|
531
|
+
const { messages: rawPrompt, ...rawSettings } = body;
|
|
427
532
|
const choice = response.choices[0];
|
|
428
533
|
let providerMetadata;
|
|
429
534
|
if (((_b = (_a = response.usage) == null ? void 0 : _a.completion_tokens_details) == null ? void 0 : _b.reasoning_tokens) != null || ((_d = (_c = response.usage) == null ? void 0 : _c.prompt_tokens_details) == null ? void 0 : _d.cached_tokens) != null) {
|
|
@@ -460,6 +565,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
460
565
|
},
|
|
461
566
|
rawCall: { rawPrompt, rawSettings },
|
|
462
567
|
rawResponse: { headers: responseHeaders },
|
|
568
|
+
request: { body: JSON.stringify(body) },
|
|
463
569
|
response: getResponseMetadata(response),
|
|
464
570
|
warnings,
|
|
465
571
|
logprobs: mapOpenAIChatLogProbsOutput(choice.logprobs),
|
|
@@ -504,18 +610,19 @@ var OpenAIChatLanguageModel = class {
|
|
|
504
610
|
};
|
|
505
611
|
}
|
|
506
612
|
const { args, warnings } = this.getArgs(options);
|
|
613
|
+
const body = {
|
|
614
|
+
...args,
|
|
615
|
+
stream: true,
|
|
616
|
+
// only include stream_options when in strict compatibility mode:
|
|
617
|
+
stream_options: this.config.compatibility === "strict" ? { include_usage: true } : void 0
|
|
618
|
+
};
|
|
507
619
|
const { responseHeaders, value: response } = await (0, import_provider_utils3.postJsonToApi)({
|
|
508
620
|
url: this.config.url({
|
|
509
621
|
path: "/chat/completions",
|
|
510
622
|
modelId: this.modelId
|
|
511
623
|
}),
|
|
512
624
|
headers: (0, import_provider_utils3.combineHeaders)(this.config.headers(), options.headers),
|
|
513
|
-
body
|
|
514
|
-
...args,
|
|
515
|
-
stream: true,
|
|
516
|
-
// only include stream_options when in strict compatibility mode:
|
|
517
|
-
stream_options: this.config.compatibility === "strict" ? { include_usage: true } : void 0
|
|
518
|
-
},
|
|
625
|
+
body,
|
|
519
626
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
520
627
|
successfulResponseHandler: (0, import_provider_utils3.createEventSourceResponseHandler)(
|
|
521
628
|
openaiChatChunkSchema
|
|
@@ -604,19 +711,19 @@ var OpenAIChatLanguageModel = class {
|
|
|
604
711
|
const index = toolCallDelta.index;
|
|
605
712
|
if (toolCalls[index] == null) {
|
|
606
713
|
if (toolCallDelta.type !== "function") {
|
|
607
|
-
throw new
|
|
714
|
+
throw new import_provider3.InvalidResponseDataError({
|
|
608
715
|
data: toolCallDelta,
|
|
609
716
|
message: `Expected 'function' type.`
|
|
610
717
|
});
|
|
611
718
|
}
|
|
612
719
|
if (toolCallDelta.id == null) {
|
|
613
|
-
throw new
|
|
720
|
+
throw new import_provider3.InvalidResponseDataError({
|
|
614
721
|
data: toolCallDelta,
|
|
615
722
|
message: `Expected 'id' to be a string.`
|
|
616
723
|
});
|
|
617
724
|
}
|
|
618
725
|
if (((_e = toolCallDelta.function) == null ? void 0 : _e.name) == null) {
|
|
619
|
-
throw new
|
|
726
|
+
throw new import_provider3.InvalidResponseDataError({
|
|
620
727
|
data: toolCallDelta,
|
|
621
728
|
message: `Expected 'function.name' to be a string.`
|
|
622
729
|
});
|
|
@@ -692,6 +799,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
692
799
|
),
|
|
693
800
|
rawCall: { rawPrompt, rawSettings },
|
|
694
801
|
rawResponse: { headers: responseHeaders },
|
|
802
|
+
request: { body: JSON.stringify(body) },
|
|
695
803
|
warnings
|
|
696
804
|
};
|
|
697
805
|
}
|
|
@@ -798,80 +906,6 @@ var openaiChatChunkSchema = import_zod2.z.union([
|
|
|
798
906
|
}),
|
|
799
907
|
openAIErrorDataSchema
|
|
800
908
|
]);
|
|
801
|
-
function prepareToolsAndToolChoice({
|
|
802
|
-
mode,
|
|
803
|
-
useLegacyFunctionCalling = false,
|
|
804
|
-
structuredOutputs = false
|
|
805
|
-
}) {
|
|
806
|
-
var _a;
|
|
807
|
-
const tools = ((_a = mode.tools) == null ? void 0 : _a.length) ? mode.tools : void 0;
|
|
808
|
-
if (tools == null) {
|
|
809
|
-
return { tools: void 0, tool_choice: void 0 };
|
|
810
|
-
}
|
|
811
|
-
const toolChoice = mode.toolChoice;
|
|
812
|
-
if (useLegacyFunctionCalling) {
|
|
813
|
-
const mappedFunctions = tools.map((tool) => ({
|
|
814
|
-
name: tool.name,
|
|
815
|
-
description: tool.description,
|
|
816
|
-
parameters: tool.parameters
|
|
817
|
-
}));
|
|
818
|
-
if (toolChoice == null) {
|
|
819
|
-
return { functions: mappedFunctions, function_call: void 0 };
|
|
820
|
-
}
|
|
821
|
-
const type2 = toolChoice.type;
|
|
822
|
-
switch (type2) {
|
|
823
|
-
case "auto":
|
|
824
|
-
case "none":
|
|
825
|
-
case void 0:
|
|
826
|
-
return {
|
|
827
|
-
functions: mappedFunctions,
|
|
828
|
-
function_call: void 0
|
|
829
|
-
};
|
|
830
|
-
case "required":
|
|
831
|
-
throw new import_provider2.UnsupportedFunctionalityError({
|
|
832
|
-
functionality: "useLegacyFunctionCalling and toolChoice: required"
|
|
833
|
-
});
|
|
834
|
-
default:
|
|
835
|
-
return {
|
|
836
|
-
functions: mappedFunctions,
|
|
837
|
-
function_call: { name: toolChoice.toolName }
|
|
838
|
-
};
|
|
839
|
-
}
|
|
840
|
-
}
|
|
841
|
-
const mappedTools = tools.map((tool) => ({
|
|
842
|
-
type: "function",
|
|
843
|
-
function: {
|
|
844
|
-
name: tool.name,
|
|
845
|
-
description: tool.description,
|
|
846
|
-
parameters: tool.parameters,
|
|
847
|
-
strict: structuredOutputs === true ? true : void 0
|
|
848
|
-
}
|
|
849
|
-
}));
|
|
850
|
-
if (toolChoice == null) {
|
|
851
|
-
return { tools: mappedTools, tool_choice: void 0 };
|
|
852
|
-
}
|
|
853
|
-
const type = toolChoice.type;
|
|
854
|
-
switch (type) {
|
|
855
|
-
case "auto":
|
|
856
|
-
case "none":
|
|
857
|
-
case "required":
|
|
858
|
-
return { tools: mappedTools, tool_choice: type };
|
|
859
|
-
case "tool":
|
|
860
|
-
return {
|
|
861
|
-
tools: mappedTools,
|
|
862
|
-
tool_choice: {
|
|
863
|
-
type: "function",
|
|
864
|
-
function: {
|
|
865
|
-
name: toolChoice.toolName
|
|
866
|
-
}
|
|
867
|
-
}
|
|
868
|
-
};
|
|
869
|
-
default: {
|
|
870
|
-
const _exhaustiveCheck = type;
|
|
871
|
-
throw new Error(`Unsupported tool choice type: ${_exhaustiveCheck}`);
|
|
872
|
-
}
|
|
873
|
-
}
|
|
874
|
-
}
|
|
875
909
|
function isReasoningModel(modelId) {
|
|
876
910
|
return modelId.startsWith("o1-");
|
|
877
911
|
}
|
|
@@ -880,12 +914,12 @@ function isAudioModel(modelId) {
|
|
|
880
914
|
}
|
|
881
915
|
|
|
882
916
|
// src/openai-completion-language-model.ts
|
|
883
|
-
var
|
|
917
|
+
var import_provider5 = require("@ai-sdk/provider");
|
|
884
918
|
var import_provider_utils4 = require("@ai-sdk/provider-utils");
|
|
885
919
|
var import_zod3 = require("zod");
|
|
886
920
|
|
|
887
921
|
// src/convert-to-openai-completion-prompt.ts
|
|
888
|
-
var
|
|
922
|
+
var import_provider4 = require("@ai-sdk/provider");
|
|
889
923
|
function convertToOpenAICompletionPrompt({
|
|
890
924
|
prompt,
|
|
891
925
|
inputFormat,
|
|
@@ -905,7 +939,7 @@ function convertToOpenAICompletionPrompt({
|
|
|
905
939
|
for (const { role, content } of prompt) {
|
|
906
940
|
switch (role) {
|
|
907
941
|
case "system": {
|
|
908
|
-
throw new
|
|
942
|
+
throw new import_provider4.InvalidPromptError({
|
|
909
943
|
message: "Unexpected system message in prompt: ${content}",
|
|
910
944
|
prompt
|
|
911
945
|
});
|
|
@@ -917,7 +951,7 @@ function convertToOpenAICompletionPrompt({
|
|
|
917
951
|
return part.text;
|
|
918
952
|
}
|
|
919
953
|
case "image": {
|
|
920
|
-
throw new
|
|
954
|
+
throw new import_provider4.UnsupportedFunctionalityError({
|
|
921
955
|
functionality: "images"
|
|
922
956
|
});
|
|
923
957
|
}
|
|
@@ -936,7 +970,7 @@ ${userMessage}
|
|
|
936
970
|
return part.text;
|
|
937
971
|
}
|
|
938
972
|
case "tool-call": {
|
|
939
|
-
throw new
|
|
973
|
+
throw new import_provider4.UnsupportedFunctionalityError({
|
|
940
974
|
functionality: "tool-call messages"
|
|
941
975
|
});
|
|
942
976
|
}
|
|
@@ -949,7 +983,7 @@ ${assistantMessage}
|
|
|
949
983
|
break;
|
|
950
984
|
}
|
|
951
985
|
case "tool": {
|
|
952
|
-
throw new
|
|
986
|
+
throw new import_provider4.UnsupportedFunctionalityError({
|
|
953
987
|
functionality: "tool messages"
|
|
954
988
|
});
|
|
955
989
|
}
|
|
@@ -1050,24 +1084,24 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1050
1084
|
switch (type) {
|
|
1051
1085
|
case "regular": {
|
|
1052
1086
|
if ((_a = mode.tools) == null ? void 0 : _a.length) {
|
|
1053
|
-
throw new
|
|
1087
|
+
throw new import_provider5.UnsupportedFunctionalityError({
|
|
1054
1088
|
functionality: "tools"
|
|
1055
1089
|
});
|
|
1056
1090
|
}
|
|
1057
1091
|
if (mode.toolChoice) {
|
|
1058
|
-
throw new
|
|
1092
|
+
throw new import_provider5.UnsupportedFunctionalityError({
|
|
1059
1093
|
functionality: "toolChoice"
|
|
1060
1094
|
});
|
|
1061
1095
|
}
|
|
1062
1096
|
return { args: baseArgs, warnings };
|
|
1063
1097
|
}
|
|
1064
1098
|
case "object-json": {
|
|
1065
|
-
throw new
|
|
1099
|
+
throw new import_provider5.UnsupportedFunctionalityError({
|
|
1066
1100
|
functionality: "object-json mode"
|
|
1067
1101
|
});
|
|
1068
1102
|
}
|
|
1069
1103
|
case "object-tool": {
|
|
1070
|
-
throw new
|
|
1104
|
+
throw new import_provider5.UnsupportedFunctionalityError({
|
|
1071
1105
|
functionality: "object-tool mode"
|
|
1072
1106
|
});
|
|
1073
1107
|
}
|
|
@@ -1106,23 +1140,25 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1106
1140
|
rawCall: { rawPrompt, rawSettings },
|
|
1107
1141
|
rawResponse: { headers: responseHeaders },
|
|
1108
1142
|
response: getResponseMetadata(response),
|
|
1109
|
-
warnings
|
|
1143
|
+
warnings,
|
|
1144
|
+
request: { body: JSON.stringify(args) }
|
|
1110
1145
|
};
|
|
1111
1146
|
}
|
|
1112
1147
|
async doStream(options) {
|
|
1113
1148
|
const { args, warnings } = this.getArgs(options);
|
|
1149
|
+
const body = {
|
|
1150
|
+
...args,
|
|
1151
|
+
stream: true,
|
|
1152
|
+
// only include stream_options when in strict compatibility mode:
|
|
1153
|
+
stream_options: this.config.compatibility === "strict" ? { include_usage: true } : void 0
|
|
1154
|
+
};
|
|
1114
1155
|
const { responseHeaders, value: response } = await (0, import_provider_utils4.postJsonToApi)({
|
|
1115
1156
|
url: this.config.url({
|
|
1116
1157
|
path: "/completions",
|
|
1117
1158
|
modelId: this.modelId
|
|
1118
1159
|
}),
|
|
1119
1160
|
headers: (0, import_provider_utils4.combineHeaders)(this.config.headers(), options.headers),
|
|
1120
|
-
body
|
|
1121
|
-
...args,
|
|
1122
|
-
stream: true,
|
|
1123
|
-
// only include stream_options when in strict compatibility mode:
|
|
1124
|
-
stream_options: this.config.compatibility === "strict" ? { include_usage: true } : void 0
|
|
1125
|
-
},
|
|
1161
|
+
body,
|
|
1126
1162
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
1127
1163
|
successfulResponseHandler: (0, import_provider_utils4.createEventSourceResponseHandler)(
|
|
1128
1164
|
openaiCompletionChunkSchema
|
|
@@ -1196,7 +1232,8 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1196
1232
|
),
|
|
1197
1233
|
rawCall: { rawPrompt, rawSettings },
|
|
1198
1234
|
rawResponse: { headers: responseHeaders },
|
|
1199
|
-
warnings
|
|
1235
|
+
warnings,
|
|
1236
|
+
request: { body: JSON.stringify(body) }
|
|
1200
1237
|
};
|
|
1201
1238
|
}
|
|
1202
1239
|
};
|
|
@@ -1246,7 +1283,7 @@ var openaiCompletionChunkSchema = import_zod3.z.union([
|
|
|
1246
1283
|
]);
|
|
1247
1284
|
|
|
1248
1285
|
// src/openai-embedding-model.ts
|
|
1249
|
-
var
|
|
1286
|
+
var import_provider6 = require("@ai-sdk/provider");
|
|
1250
1287
|
var import_provider_utils5 = require("@ai-sdk/provider-utils");
|
|
1251
1288
|
var import_zod4 = require("zod");
|
|
1252
1289
|
var OpenAIEmbeddingModel = class {
|
|
@@ -1273,7 +1310,7 @@ var OpenAIEmbeddingModel = class {
|
|
|
1273
1310
|
abortSignal
|
|
1274
1311
|
}) {
|
|
1275
1312
|
if (values.length > this.maxEmbeddingsPerCall) {
|
|
1276
|
-
throw new
|
|
1313
|
+
throw new import_provider6.TooManyEmbeddingValuesForCallError({
|
|
1277
1314
|
provider: this.provider,
|
|
1278
1315
|
modelId: this.modelId,
|
|
1279
1316
|
maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,
|