@ai-sdk/openai 0.0.68 → 0.0.70
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +22 -0
- package/dist/index.js +150 -115
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +146 -109
- package/dist/index.mjs.map +1 -1
- package/internal/dist/index.js +150 -115
- package/internal/dist/index.js.map +1 -1
- package/internal/dist/index.mjs +146 -109
- package/internal/dist/index.mjs.map +1 -1
- package/package.json +3 -3
package/dist/index.mjs
CHANGED
|
@@ -4,7 +4,7 @@ import { loadApiKey, withoutTrailingSlash } from "@ai-sdk/provider-utils";
|
|
|
4
4
|
// src/openai-chat-language-model.ts
|
|
5
5
|
import {
|
|
6
6
|
InvalidResponseDataError,
|
|
7
|
-
UnsupportedFunctionalityError as
|
|
7
|
+
UnsupportedFunctionalityError as UnsupportedFunctionalityError3
|
|
8
8
|
} from "@ai-sdk/provider";
|
|
9
9
|
import {
|
|
10
10
|
combineHeaders,
|
|
@@ -222,6 +222,107 @@ function getResponseMetadata({
|
|
|
222
222
|
};
|
|
223
223
|
}
|
|
224
224
|
|
|
225
|
+
// src/openai-prepare-tools.ts
|
|
226
|
+
import {
|
|
227
|
+
UnsupportedFunctionalityError as UnsupportedFunctionalityError2
|
|
228
|
+
} from "@ai-sdk/provider";
|
|
229
|
+
function prepareTools({
|
|
230
|
+
mode,
|
|
231
|
+
useLegacyFunctionCalling = false,
|
|
232
|
+
structuredOutputs = false
|
|
233
|
+
}) {
|
|
234
|
+
var _a;
|
|
235
|
+
const tools = ((_a = mode.tools) == null ? void 0 : _a.length) ? mode.tools : void 0;
|
|
236
|
+
const toolWarnings = [];
|
|
237
|
+
if (tools == null) {
|
|
238
|
+
return { tools: void 0, tool_choice: void 0, toolWarnings };
|
|
239
|
+
}
|
|
240
|
+
const toolChoice = mode.toolChoice;
|
|
241
|
+
if (useLegacyFunctionCalling) {
|
|
242
|
+
const openaiFunctions = [];
|
|
243
|
+
for (const tool of tools) {
|
|
244
|
+
if (tool.type === "provider-defined") {
|
|
245
|
+
toolWarnings.push({ type: "unsupported-tool", tool });
|
|
246
|
+
} else {
|
|
247
|
+
openaiFunctions.push({
|
|
248
|
+
name: tool.name,
|
|
249
|
+
description: tool.description,
|
|
250
|
+
parameters: tool.parameters
|
|
251
|
+
});
|
|
252
|
+
}
|
|
253
|
+
}
|
|
254
|
+
if (toolChoice == null) {
|
|
255
|
+
return {
|
|
256
|
+
functions: openaiFunctions,
|
|
257
|
+
function_call: void 0,
|
|
258
|
+
toolWarnings
|
|
259
|
+
};
|
|
260
|
+
}
|
|
261
|
+
const type2 = toolChoice.type;
|
|
262
|
+
switch (type2) {
|
|
263
|
+
case "auto":
|
|
264
|
+
case "none":
|
|
265
|
+
case void 0:
|
|
266
|
+
return {
|
|
267
|
+
functions: openaiFunctions,
|
|
268
|
+
function_call: void 0,
|
|
269
|
+
toolWarnings
|
|
270
|
+
};
|
|
271
|
+
case "required":
|
|
272
|
+
throw new UnsupportedFunctionalityError2({
|
|
273
|
+
functionality: "useLegacyFunctionCalling and toolChoice: required"
|
|
274
|
+
});
|
|
275
|
+
default:
|
|
276
|
+
return {
|
|
277
|
+
functions: openaiFunctions,
|
|
278
|
+
function_call: { name: toolChoice.toolName },
|
|
279
|
+
toolWarnings
|
|
280
|
+
};
|
|
281
|
+
}
|
|
282
|
+
}
|
|
283
|
+
const openaiTools = [];
|
|
284
|
+
for (const tool of tools) {
|
|
285
|
+
if (tool.type === "provider-defined") {
|
|
286
|
+
toolWarnings.push({ type: "unsupported-tool", tool });
|
|
287
|
+
} else {
|
|
288
|
+
openaiTools.push({
|
|
289
|
+
type: "function",
|
|
290
|
+
function: {
|
|
291
|
+
name: tool.name,
|
|
292
|
+
description: tool.description,
|
|
293
|
+
parameters: tool.parameters,
|
|
294
|
+
strict: structuredOutputs === true ? true : void 0
|
|
295
|
+
}
|
|
296
|
+
});
|
|
297
|
+
}
|
|
298
|
+
}
|
|
299
|
+
if (toolChoice == null) {
|
|
300
|
+
return { tools: openaiTools, tool_choice: void 0, toolWarnings };
|
|
301
|
+
}
|
|
302
|
+
const type = toolChoice.type;
|
|
303
|
+
switch (type) {
|
|
304
|
+
case "auto":
|
|
305
|
+
case "none":
|
|
306
|
+
case "required":
|
|
307
|
+
return { tools: openaiTools, tool_choice: type, toolWarnings };
|
|
308
|
+
case "tool":
|
|
309
|
+
return {
|
|
310
|
+
tools: openaiTools,
|
|
311
|
+
tool_choice: {
|
|
312
|
+
type: "function",
|
|
313
|
+
function: {
|
|
314
|
+
name: toolChoice.toolName
|
|
315
|
+
}
|
|
316
|
+
},
|
|
317
|
+
toolWarnings
|
|
318
|
+
};
|
|
319
|
+
default: {
|
|
320
|
+
const _exhaustiveCheck = type;
|
|
321
|
+
throw new Error(`Unsupported tool choice type: ${_exhaustiveCheck}`);
|
|
322
|
+
}
|
|
323
|
+
}
|
|
324
|
+
}
|
|
325
|
+
|
|
225
326
|
// src/openai-chat-language-model.ts
|
|
226
327
|
var OpenAIChatLanguageModel = class {
|
|
227
328
|
constructor(modelId, settings, config) {
|
|
@@ -277,12 +378,12 @@ var OpenAIChatLanguageModel = class {
|
|
|
277
378
|
}
|
|
278
379
|
const useLegacyFunctionCalling = this.settings.useLegacyFunctionCalling;
|
|
279
380
|
if (useLegacyFunctionCalling && this.settings.parallelToolCalls === true) {
|
|
280
|
-
throw new
|
|
381
|
+
throw new UnsupportedFunctionalityError3({
|
|
281
382
|
functionality: "useLegacyFunctionCalling with parallelToolCalls"
|
|
282
383
|
});
|
|
283
384
|
}
|
|
284
385
|
if (useLegacyFunctionCalling && this.settings.structuredOutputs === true) {
|
|
285
|
-
throw new
|
|
386
|
+
throw new UnsupportedFunctionalityError3({
|
|
286
387
|
functionality: "structuredOutputs with useLegacyFunctionCalling"
|
|
287
388
|
});
|
|
288
389
|
}
|
|
@@ -323,16 +424,20 @@ var OpenAIChatLanguageModel = class {
|
|
|
323
424
|
}
|
|
324
425
|
switch (type) {
|
|
325
426
|
case "regular": {
|
|
427
|
+
const { tools, tool_choice, functions, function_call, toolWarnings } = prepareTools({
|
|
428
|
+
mode,
|
|
429
|
+
useLegacyFunctionCalling,
|
|
430
|
+
structuredOutputs: this.settings.structuredOutputs
|
|
431
|
+
});
|
|
326
432
|
return {
|
|
327
433
|
args: {
|
|
328
434
|
...baseArgs,
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
})
|
|
435
|
+
tools,
|
|
436
|
+
tool_choice,
|
|
437
|
+
functions,
|
|
438
|
+
function_call
|
|
334
439
|
},
|
|
335
|
-
warnings
|
|
440
|
+
warnings: [...warnings, ...toolWarnings]
|
|
336
441
|
};
|
|
337
442
|
}
|
|
338
443
|
case "object-json": {
|
|
@@ -395,14 +500,14 @@ var OpenAIChatLanguageModel = class {
|
|
|
395
500
|
}
|
|
396
501
|
async doGenerate(options) {
|
|
397
502
|
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r;
|
|
398
|
-
const { args, warnings } = this.getArgs(options);
|
|
503
|
+
const { args: body, warnings } = this.getArgs(options);
|
|
399
504
|
const { responseHeaders, value: response } = await postJsonToApi({
|
|
400
505
|
url: this.config.url({
|
|
401
506
|
path: "/chat/completions",
|
|
402
507
|
modelId: this.modelId
|
|
403
508
|
}),
|
|
404
509
|
headers: combineHeaders(this.config.headers(), options.headers),
|
|
405
|
-
body
|
|
510
|
+
body,
|
|
406
511
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
407
512
|
successfulResponseHandler: createJsonResponseHandler(
|
|
408
513
|
openAIChatResponseSchema
|
|
@@ -410,7 +515,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
410
515
|
abortSignal: options.abortSignal,
|
|
411
516
|
fetch: this.config.fetch
|
|
412
517
|
});
|
|
413
|
-
const { messages: rawPrompt, ...rawSettings } =
|
|
518
|
+
const { messages: rawPrompt, ...rawSettings } = body;
|
|
414
519
|
const choice = response.choices[0];
|
|
415
520
|
let providerMetadata;
|
|
416
521
|
if (((_b = (_a = response.usage) == null ? void 0 : _a.completion_tokens_details) == null ? void 0 : _b.reasoning_tokens) != null || ((_d = (_c = response.usage) == null ? void 0 : _c.prompt_tokens_details) == null ? void 0 : _d.cached_tokens) != null) {
|
|
@@ -447,6 +552,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
447
552
|
},
|
|
448
553
|
rawCall: { rawPrompt, rawSettings },
|
|
449
554
|
rawResponse: { headers: responseHeaders },
|
|
555
|
+
request: { body: JSON.stringify(body) },
|
|
450
556
|
response: getResponseMetadata(response),
|
|
451
557
|
warnings,
|
|
452
558
|
logprobs: mapOpenAIChatLogProbsOutput(choice.logprobs),
|
|
@@ -491,18 +597,19 @@ var OpenAIChatLanguageModel = class {
|
|
|
491
597
|
};
|
|
492
598
|
}
|
|
493
599
|
const { args, warnings } = this.getArgs(options);
|
|
600
|
+
const body = {
|
|
601
|
+
...args,
|
|
602
|
+
stream: true,
|
|
603
|
+
// only include stream_options when in strict compatibility mode:
|
|
604
|
+
stream_options: this.config.compatibility === "strict" ? { include_usage: true } : void 0
|
|
605
|
+
};
|
|
494
606
|
const { responseHeaders, value: response } = await postJsonToApi({
|
|
495
607
|
url: this.config.url({
|
|
496
608
|
path: "/chat/completions",
|
|
497
609
|
modelId: this.modelId
|
|
498
610
|
}),
|
|
499
611
|
headers: combineHeaders(this.config.headers(), options.headers),
|
|
500
|
-
body
|
|
501
|
-
...args,
|
|
502
|
-
stream: true,
|
|
503
|
-
// only include stream_options when in strict compatibility mode:
|
|
504
|
-
stream_options: this.config.compatibility === "strict" ? { include_usage: true } : void 0
|
|
505
|
-
},
|
|
612
|
+
body,
|
|
506
613
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
507
614
|
successfulResponseHandler: createEventSourceResponseHandler(
|
|
508
615
|
openaiChatChunkSchema
|
|
@@ -679,6 +786,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
679
786
|
),
|
|
680
787
|
rawCall: { rawPrompt, rawSettings },
|
|
681
788
|
rawResponse: { headers: responseHeaders },
|
|
789
|
+
request: { body: JSON.stringify(body) },
|
|
682
790
|
warnings
|
|
683
791
|
};
|
|
684
792
|
}
|
|
@@ -785,80 +893,6 @@ var openaiChatChunkSchema = z2.union([
|
|
|
785
893
|
}),
|
|
786
894
|
openAIErrorDataSchema
|
|
787
895
|
]);
|
|
788
|
-
function prepareToolsAndToolChoice({
|
|
789
|
-
mode,
|
|
790
|
-
useLegacyFunctionCalling = false,
|
|
791
|
-
structuredOutputs = false
|
|
792
|
-
}) {
|
|
793
|
-
var _a;
|
|
794
|
-
const tools = ((_a = mode.tools) == null ? void 0 : _a.length) ? mode.tools : void 0;
|
|
795
|
-
if (tools == null) {
|
|
796
|
-
return { tools: void 0, tool_choice: void 0 };
|
|
797
|
-
}
|
|
798
|
-
const toolChoice = mode.toolChoice;
|
|
799
|
-
if (useLegacyFunctionCalling) {
|
|
800
|
-
const mappedFunctions = tools.map((tool) => ({
|
|
801
|
-
name: tool.name,
|
|
802
|
-
description: tool.description,
|
|
803
|
-
parameters: tool.parameters
|
|
804
|
-
}));
|
|
805
|
-
if (toolChoice == null) {
|
|
806
|
-
return { functions: mappedFunctions, function_call: void 0 };
|
|
807
|
-
}
|
|
808
|
-
const type2 = toolChoice.type;
|
|
809
|
-
switch (type2) {
|
|
810
|
-
case "auto":
|
|
811
|
-
case "none":
|
|
812
|
-
case void 0:
|
|
813
|
-
return {
|
|
814
|
-
functions: mappedFunctions,
|
|
815
|
-
function_call: void 0
|
|
816
|
-
};
|
|
817
|
-
case "required":
|
|
818
|
-
throw new UnsupportedFunctionalityError2({
|
|
819
|
-
functionality: "useLegacyFunctionCalling and toolChoice: required"
|
|
820
|
-
});
|
|
821
|
-
default:
|
|
822
|
-
return {
|
|
823
|
-
functions: mappedFunctions,
|
|
824
|
-
function_call: { name: toolChoice.toolName }
|
|
825
|
-
};
|
|
826
|
-
}
|
|
827
|
-
}
|
|
828
|
-
const mappedTools = tools.map((tool) => ({
|
|
829
|
-
type: "function",
|
|
830
|
-
function: {
|
|
831
|
-
name: tool.name,
|
|
832
|
-
description: tool.description,
|
|
833
|
-
parameters: tool.parameters,
|
|
834
|
-
strict: structuredOutputs === true ? true : void 0
|
|
835
|
-
}
|
|
836
|
-
}));
|
|
837
|
-
if (toolChoice == null) {
|
|
838
|
-
return { tools: mappedTools, tool_choice: void 0 };
|
|
839
|
-
}
|
|
840
|
-
const type = toolChoice.type;
|
|
841
|
-
switch (type) {
|
|
842
|
-
case "auto":
|
|
843
|
-
case "none":
|
|
844
|
-
case "required":
|
|
845
|
-
return { tools: mappedTools, tool_choice: type };
|
|
846
|
-
case "tool":
|
|
847
|
-
return {
|
|
848
|
-
tools: mappedTools,
|
|
849
|
-
tool_choice: {
|
|
850
|
-
type: "function",
|
|
851
|
-
function: {
|
|
852
|
-
name: toolChoice.toolName
|
|
853
|
-
}
|
|
854
|
-
}
|
|
855
|
-
};
|
|
856
|
-
default: {
|
|
857
|
-
const _exhaustiveCheck = type;
|
|
858
|
-
throw new Error(`Unsupported tool choice type: ${_exhaustiveCheck}`);
|
|
859
|
-
}
|
|
860
|
-
}
|
|
861
|
-
}
|
|
862
896
|
function isReasoningModel(modelId) {
|
|
863
897
|
return modelId.startsWith("o1-");
|
|
864
898
|
}
|
|
@@ -868,7 +902,7 @@ function isAudioModel(modelId) {
|
|
|
868
902
|
|
|
869
903
|
// src/openai-completion-language-model.ts
|
|
870
904
|
import {
|
|
871
|
-
UnsupportedFunctionalityError as
|
|
905
|
+
UnsupportedFunctionalityError as UnsupportedFunctionalityError5
|
|
872
906
|
} from "@ai-sdk/provider";
|
|
873
907
|
import {
|
|
874
908
|
combineHeaders as combineHeaders2,
|
|
@@ -881,7 +915,7 @@ import { z as z3 } from "zod";
|
|
|
881
915
|
// src/convert-to-openai-completion-prompt.ts
|
|
882
916
|
import {
|
|
883
917
|
InvalidPromptError,
|
|
884
|
-
UnsupportedFunctionalityError as
|
|
918
|
+
UnsupportedFunctionalityError as UnsupportedFunctionalityError4
|
|
885
919
|
} from "@ai-sdk/provider";
|
|
886
920
|
function convertToOpenAICompletionPrompt({
|
|
887
921
|
prompt,
|
|
@@ -914,7 +948,7 @@ function convertToOpenAICompletionPrompt({
|
|
|
914
948
|
return part.text;
|
|
915
949
|
}
|
|
916
950
|
case "image": {
|
|
917
|
-
throw new
|
|
951
|
+
throw new UnsupportedFunctionalityError4({
|
|
918
952
|
functionality: "images"
|
|
919
953
|
});
|
|
920
954
|
}
|
|
@@ -933,7 +967,7 @@ ${userMessage}
|
|
|
933
967
|
return part.text;
|
|
934
968
|
}
|
|
935
969
|
case "tool-call": {
|
|
936
|
-
throw new
|
|
970
|
+
throw new UnsupportedFunctionalityError4({
|
|
937
971
|
functionality: "tool-call messages"
|
|
938
972
|
});
|
|
939
973
|
}
|
|
@@ -946,7 +980,7 @@ ${assistantMessage}
|
|
|
946
980
|
break;
|
|
947
981
|
}
|
|
948
982
|
case "tool": {
|
|
949
|
-
throw new
|
|
983
|
+
throw new UnsupportedFunctionalityError4({
|
|
950
984
|
functionality: "tool messages"
|
|
951
985
|
});
|
|
952
986
|
}
|
|
@@ -1047,24 +1081,24 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1047
1081
|
switch (type) {
|
|
1048
1082
|
case "regular": {
|
|
1049
1083
|
if ((_a = mode.tools) == null ? void 0 : _a.length) {
|
|
1050
|
-
throw new
|
|
1084
|
+
throw new UnsupportedFunctionalityError5({
|
|
1051
1085
|
functionality: "tools"
|
|
1052
1086
|
});
|
|
1053
1087
|
}
|
|
1054
1088
|
if (mode.toolChoice) {
|
|
1055
|
-
throw new
|
|
1089
|
+
throw new UnsupportedFunctionalityError5({
|
|
1056
1090
|
functionality: "toolChoice"
|
|
1057
1091
|
});
|
|
1058
1092
|
}
|
|
1059
1093
|
return { args: baseArgs, warnings };
|
|
1060
1094
|
}
|
|
1061
1095
|
case "object-json": {
|
|
1062
|
-
throw new
|
|
1096
|
+
throw new UnsupportedFunctionalityError5({
|
|
1063
1097
|
functionality: "object-json mode"
|
|
1064
1098
|
});
|
|
1065
1099
|
}
|
|
1066
1100
|
case "object-tool": {
|
|
1067
|
-
throw new
|
|
1101
|
+
throw new UnsupportedFunctionalityError5({
|
|
1068
1102
|
functionality: "object-tool mode"
|
|
1069
1103
|
});
|
|
1070
1104
|
}
|
|
@@ -1103,23 +1137,25 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1103
1137
|
rawCall: { rawPrompt, rawSettings },
|
|
1104
1138
|
rawResponse: { headers: responseHeaders },
|
|
1105
1139
|
response: getResponseMetadata(response),
|
|
1106
|
-
warnings
|
|
1140
|
+
warnings,
|
|
1141
|
+
request: { body: JSON.stringify(args) }
|
|
1107
1142
|
};
|
|
1108
1143
|
}
|
|
1109
1144
|
async doStream(options) {
|
|
1110
1145
|
const { args, warnings } = this.getArgs(options);
|
|
1146
|
+
const body = {
|
|
1147
|
+
...args,
|
|
1148
|
+
stream: true,
|
|
1149
|
+
// only include stream_options when in strict compatibility mode:
|
|
1150
|
+
stream_options: this.config.compatibility === "strict" ? { include_usage: true } : void 0
|
|
1151
|
+
};
|
|
1111
1152
|
const { responseHeaders, value: response } = await postJsonToApi2({
|
|
1112
1153
|
url: this.config.url({
|
|
1113
1154
|
path: "/completions",
|
|
1114
1155
|
modelId: this.modelId
|
|
1115
1156
|
}),
|
|
1116
1157
|
headers: combineHeaders2(this.config.headers(), options.headers),
|
|
1117
|
-
body
|
|
1118
|
-
...args,
|
|
1119
|
-
stream: true,
|
|
1120
|
-
// only include stream_options when in strict compatibility mode:
|
|
1121
|
-
stream_options: this.config.compatibility === "strict" ? { include_usage: true } : void 0
|
|
1122
|
-
},
|
|
1158
|
+
body,
|
|
1123
1159
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
1124
1160
|
successfulResponseHandler: createEventSourceResponseHandler2(
|
|
1125
1161
|
openaiCompletionChunkSchema
|
|
@@ -1193,7 +1229,8 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1193
1229
|
),
|
|
1194
1230
|
rawCall: { rawPrompt, rawSettings },
|
|
1195
1231
|
rawResponse: { headers: responseHeaders },
|
|
1196
|
-
warnings
|
|
1232
|
+
warnings,
|
|
1233
|
+
request: { body: JSON.stringify(body) }
|
|
1197
1234
|
};
|
|
1198
1235
|
}
|
|
1199
1236
|
};
|