@ai-sdk/openai 0.0.68 → 0.0.70
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +22 -0
- package/dist/index.js +150 -115
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +146 -109
- package/dist/index.mjs.map +1 -1
- package/internal/dist/index.js +150 -115
- package/internal/dist/index.js.map +1 -1
- package/internal/dist/index.mjs +146 -109
- package/internal/dist/index.mjs.map +1 -1
- package/package.json +3 -3
package/internal/dist/index.mjs
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
// src/openai-chat-language-model.ts
|
|
2
2
|
import {
|
|
3
3
|
InvalidResponseDataError,
|
|
4
|
-
UnsupportedFunctionalityError as
|
|
4
|
+
UnsupportedFunctionalityError as UnsupportedFunctionalityError3
|
|
5
5
|
} from "@ai-sdk/provider";
|
|
6
6
|
import {
|
|
7
7
|
combineHeaders,
|
|
@@ -219,6 +219,107 @@ function getResponseMetadata({
|
|
|
219
219
|
};
|
|
220
220
|
}
|
|
221
221
|
|
|
222
|
+
// src/openai-prepare-tools.ts
|
|
223
|
+
import {
|
|
224
|
+
UnsupportedFunctionalityError as UnsupportedFunctionalityError2
|
|
225
|
+
} from "@ai-sdk/provider";
|
|
226
|
+
function prepareTools({
|
|
227
|
+
mode,
|
|
228
|
+
useLegacyFunctionCalling = false,
|
|
229
|
+
structuredOutputs = false
|
|
230
|
+
}) {
|
|
231
|
+
var _a;
|
|
232
|
+
const tools = ((_a = mode.tools) == null ? void 0 : _a.length) ? mode.tools : void 0;
|
|
233
|
+
const toolWarnings = [];
|
|
234
|
+
if (tools == null) {
|
|
235
|
+
return { tools: void 0, tool_choice: void 0, toolWarnings };
|
|
236
|
+
}
|
|
237
|
+
const toolChoice = mode.toolChoice;
|
|
238
|
+
if (useLegacyFunctionCalling) {
|
|
239
|
+
const openaiFunctions = [];
|
|
240
|
+
for (const tool of tools) {
|
|
241
|
+
if (tool.type === "provider-defined") {
|
|
242
|
+
toolWarnings.push({ type: "unsupported-tool", tool });
|
|
243
|
+
} else {
|
|
244
|
+
openaiFunctions.push({
|
|
245
|
+
name: tool.name,
|
|
246
|
+
description: tool.description,
|
|
247
|
+
parameters: tool.parameters
|
|
248
|
+
});
|
|
249
|
+
}
|
|
250
|
+
}
|
|
251
|
+
if (toolChoice == null) {
|
|
252
|
+
return {
|
|
253
|
+
functions: openaiFunctions,
|
|
254
|
+
function_call: void 0,
|
|
255
|
+
toolWarnings
|
|
256
|
+
};
|
|
257
|
+
}
|
|
258
|
+
const type2 = toolChoice.type;
|
|
259
|
+
switch (type2) {
|
|
260
|
+
case "auto":
|
|
261
|
+
case "none":
|
|
262
|
+
case void 0:
|
|
263
|
+
return {
|
|
264
|
+
functions: openaiFunctions,
|
|
265
|
+
function_call: void 0,
|
|
266
|
+
toolWarnings
|
|
267
|
+
};
|
|
268
|
+
case "required":
|
|
269
|
+
throw new UnsupportedFunctionalityError2({
|
|
270
|
+
functionality: "useLegacyFunctionCalling and toolChoice: required"
|
|
271
|
+
});
|
|
272
|
+
default:
|
|
273
|
+
return {
|
|
274
|
+
functions: openaiFunctions,
|
|
275
|
+
function_call: { name: toolChoice.toolName },
|
|
276
|
+
toolWarnings
|
|
277
|
+
};
|
|
278
|
+
}
|
|
279
|
+
}
|
|
280
|
+
const openaiTools = [];
|
|
281
|
+
for (const tool of tools) {
|
|
282
|
+
if (tool.type === "provider-defined") {
|
|
283
|
+
toolWarnings.push({ type: "unsupported-tool", tool });
|
|
284
|
+
} else {
|
|
285
|
+
openaiTools.push({
|
|
286
|
+
type: "function",
|
|
287
|
+
function: {
|
|
288
|
+
name: tool.name,
|
|
289
|
+
description: tool.description,
|
|
290
|
+
parameters: tool.parameters,
|
|
291
|
+
strict: structuredOutputs === true ? true : void 0
|
|
292
|
+
}
|
|
293
|
+
});
|
|
294
|
+
}
|
|
295
|
+
}
|
|
296
|
+
if (toolChoice == null) {
|
|
297
|
+
return { tools: openaiTools, tool_choice: void 0, toolWarnings };
|
|
298
|
+
}
|
|
299
|
+
const type = toolChoice.type;
|
|
300
|
+
switch (type) {
|
|
301
|
+
case "auto":
|
|
302
|
+
case "none":
|
|
303
|
+
case "required":
|
|
304
|
+
return { tools: openaiTools, tool_choice: type, toolWarnings };
|
|
305
|
+
case "tool":
|
|
306
|
+
return {
|
|
307
|
+
tools: openaiTools,
|
|
308
|
+
tool_choice: {
|
|
309
|
+
type: "function",
|
|
310
|
+
function: {
|
|
311
|
+
name: toolChoice.toolName
|
|
312
|
+
}
|
|
313
|
+
},
|
|
314
|
+
toolWarnings
|
|
315
|
+
};
|
|
316
|
+
default: {
|
|
317
|
+
const _exhaustiveCheck = type;
|
|
318
|
+
throw new Error(`Unsupported tool choice type: ${_exhaustiveCheck}`);
|
|
319
|
+
}
|
|
320
|
+
}
|
|
321
|
+
}
|
|
322
|
+
|
|
222
323
|
// src/openai-chat-language-model.ts
|
|
223
324
|
var OpenAIChatLanguageModel = class {
|
|
224
325
|
constructor(modelId, settings, config) {
|
|
@@ -274,12 +375,12 @@ var OpenAIChatLanguageModel = class {
|
|
|
274
375
|
}
|
|
275
376
|
const useLegacyFunctionCalling = this.settings.useLegacyFunctionCalling;
|
|
276
377
|
if (useLegacyFunctionCalling && this.settings.parallelToolCalls === true) {
|
|
277
|
-
throw new
|
|
378
|
+
throw new UnsupportedFunctionalityError3({
|
|
278
379
|
functionality: "useLegacyFunctionCalling with parallelToolCalls"
|
|
279
380
|
});
|
|
280
381
|
}
|
|
281
382
|
if (useLegacyFunctionCalling && this.settings.structuredOutputs === true) {
|
|
282
|
-
throw new
|
|
383
|
+
throw new UnsupportedFunctionalityError3({
|
|
283
384
|
functionality: "structuredOutputs with useLegacyFunctionCalling"
|
|
284
385
|
});
|
|
285
386
|
}
|
|
@@ -320,16 +421,20 @@ var OpenAIChatLanguageModel = class {
|
|
|
320
421
|
}
|
|
321
422
|
switch (type) {
|
|
322
423
|
case "regular": {
|
|
424
|
+
const { tools, tool_choice, functions, function_call, toolWarnings } = prepareTools({
|
|
425
|
+
mode,
|
|
426
|
+
useLegacyFunctionCalling,
|
|
427
|
+
structuredOutputs: this.settings.structuredOutputs
|
|
428
|
+
});
|
|
323
429
|
return {
|
|
324
430
|
args: {
|
|
325
431
|
...baseArgs,
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
})
|
|
432
|
+
tools,
|
|
433
|
+
tool_choice,
|
|
434
|
+
functions,
|
|
435
|
+
function_call
|
|
331
436
|
},
|
|
332
|
-
warnings
|
|
437
|
+
warnings: [...warnings, ...toolWarnings]
|
|
333
438
|
};
|
|
334
439
|
}
|
|
335
440
|
case "object-json": {
|
|
@@ -392,14 +497,14 @@ var OpenAIChatLanguageModel = class {
|
|
|
392
497
|
}
|
|
393
498
|
async doGenerate(options) {
|
|
394
499
|
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r;
|
|
395
|
-
const { args, warnings } = this.getArgs(options);
|
|
500
|
+
const { args: body, warnings } = this.getArgs(options);
|
|
396
501
|
const { responseHeaders, value: response } = await postJsonToApi({
|
|
397
502
|
url: this.config.url({
|
|
398
503
|
path: "/chat/completions",
|
|
399
504
|
modelId: this.modelId
|
|
400
505
|
}),
|
|
401
506
|
headers: combineHeaders(this.config.headers(), options.headers),
|
|
402
|
-
body
|
|
507
|
+
body,
|
|
403
508
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
404
509
|
successfulResponseHandler: createJsonResponseHandler(
|
|
405
510
|
openAIChatResponseSchema
|
|
@@ -407,7 +512,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
407
512
|
abortSignal: options.abortSignal,
|
|
408
513
|
fetch: this.config.fetch
|
|
409
514
|
});
|
|
410
|
-
const { messages: rawPrompt, ...rawSettings } =
|
|
515
|
+
const { messages: rawPrompt, ...rawSettings } = body;
|
|
411
516
|
const choice = response.choices[0];
|
|
412
517
|
let providerMetadata;
|
|
413
518
|
if (((_b = (_a = response.usage) == null ? void 0 : _a.completion_tokens_details) == null ? void 0 : _b.reasoning_tokens) != null || ((_d = (_c = response.usage) == null ? void 0 : _c.prompt_tokens_details) == null ? void 0 : _d.cached_tokens) != null) {
|
|
@@ -444,6 +549,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
444
549
|
},
|
|
445
550
|
rawCall: { rawPrompt, rawSettings },
|
|
446
551
|
rawResponse: { headers: responseHeaders },
|
|
552
|
+
request: { body: JSON.stringify(body) },
|
|
447
553
|
response: getResponseMetadata(response),
|
|
448
554
|
warnings,
|
|
449
555
|
logprobs: mapOpenAIChatLogProbsOutput(choice.logprobs),
|
|
@@ -488,18 +594,19 @@ var OpenAIChatLanguageModel = class {
|
|
|
488
594
|
};
|
|
489
595
|
}
|
|
490
596
|
const { args, warnings } = this.getArgs(options);
|
|
597
|
+
const body = {
|
|
598
|
+
...args,
|
|
599
|
+
stream: true,
|
|
600
|
+
// only include stream_options when in strict compatibility mode:
|
|
601
|
+
stream_options: this.config.compatibility === "strict" ? { include_usage: true } : void 0
|
|
602
|
+
};
|
|
491
603
|
const { responseHeaders, value: response } = await postJsonToApi({
|
|
492
604
|
url: this.config.url({
|
|
493
605
|
path: "/chat/completions",
|
|
494
606
|
modelId: this.modelId
|
|
495
607
|
}),
|
|
496
608
|
headers: combineHeaders(this.config.headers(), options.headers),
|
|
497
|
-
body
|
|
498
|
-
...args,
|
|
499
|
-
stream: true,
|
|
500
|
-
// only include stream_options when in strict compatibility mode:
|
|
501
|
-
stream_options: this.config.compatibility === "strict" ? { include_usage: true } : void 0
|
|
502
|
-
},
|
|
609
|
+
body,
|
|
503
610
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
504
611
|
successfulResponseHandler: createEventSourceResponseHandler(
|
|
505
612
|
openaiChatChunkSchema
|
|
@@ -676,6 +783,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
676
783
|
),
|
|
677
784
|
rawCall: { rawPrompt, rawSettings },
|
|
678
785
|
rawResponse: { headers: responseHeaders },
|
|
786
|
+
request: { body: JSON.stringify(body) },
|
|
679
787
|
warnings
|
|
680
788
|
};
|
|
681
789
|
}
|
|
@@ -782,80 +890,6 @@ var openaiChatChunkSchema = z2.union([
|
|
|
782
890
|
}),
|
|
783
891
|
openAIErrorDataSchema
|
|
784
892
|
]);
|
|
785
|
-
function prepareToolsAndToolChoice({
|
|
786
|
-
mode,
|
|
787
|
-
useLegacyFunctionCalling = false,
|
|
788
|
-
structuredOutputs = false
|
|
789
|
-
}) {
|
|
790
|
-
var _a;
|
|
791
|
-
const tools = ((_a = mode.tools) == null ? void 0 : _a.length) ? mode.tools : void 0;
|
|
792
|
-
if (tools == null) {
|
|
793
|
-
return { tools: void 0, tool_choice: void 0 };
|
|
794
|
-
}
|
|
795
|
-
const toolChoice = mode.toolChoice;
|
|
796
|
-
if (useLegacyFunctionCalling) {
|
|
797
|
-
const mappedFunctions = tools.map((tool) => ({
|
|
798
|
-
name: tool.name,
|
|
799
|
-
description: tool.description,
|
|
800
|
-
parameters: tool.parameters
|
|
801
|
-
}));
|
|
802
|
-
if (toolChoice == null) {
|
|
803
|
-
return { functions: mappedFunctions, function_call: void 0 };
|
|
804
|
-
}
|
|
805
|
-
const type2 = toolChoice.type;
|
|
806
|
-
switch (type2) {
|
|
807
|
-
case "auto":
|
|
808
|
-
case "none":
|
|
809
|
-
case void 0:
|
|
810
|
-
return {
|
|
811
|
-
functions: mappedFunctions,
|
|
812
|
-
function_call: void 0
|
|
813
|
-
};
|
|
814
|
-
case "required":
|
|
815
|
-
throw new UnsupportedFunctionalityError2({
|
|
816
|
-
functionality: "useLegacyFunctionCalling and toolChoice: required"
|
|
817
|
-
});
|
|
818
|
-
default:
|
|
819
|
-
return {
|
|
820
|
-
functions: mappedFunctions,
|
|
821
|
-
function_call: { name: toolChoice.toolName }
|
|
822
|
-
};
|
|
823
|
-
}
|
|
824
|
-
}
|
|
825
|
-
const mappedTools = tools.map((tool) => ({
|
|
826
|
-
type: "function",
|
|
827
|
-
function: {
|
|
828
|
-
name: tool.name,
|
|
829
|
-
description: tool.description,
|
|
830
|
-
parameters: tool.parameters,
|
|
831
|
-
strict: structuredOutputs === true ? true : void 0
|
|
832
|
-
}
|
|
833
|
-
}));
|
|
834
|
-
if (toolChoice == null) {
|
|
835
|
-
return { tools: mappedTools, tool_choice: void 0 };
|
|
836
|
-
}
|
|
837
|
-
const type = toolChoice.type;
|
|
838
|
-
switch (type) {
|
|
839
|
-
case "auto":
|
|
840
|
-
case "none":
|
|
841
|
-
case "required":
|
|
842
|
-
return { tools: mappedTools, tool_choice: type };
|
|
843
|
-
case "tool":
|
|
844
|
-
return {
|
|
845
|
-
tools: mappedTools,
|
|
846
|
-
tool_choice: {
|
|
847
|
-
type: "function",
|
|
848
|
-
function: {
|
|
849
|
-
name: toolChoice.toolName
|
|
850
|
-
}
|
|
851
|
-
}
|
|
852
|
-
};
|
|
853
|
-
default: {
|
|
854
|
-
const _exhaustiveCheck = type;
|
|
855
|
-
throw new Error(`Unsupported tool choice type: ${_exhaustiveCheck}`);
|
|
856
|
-
}
|
|
857
|
-
}
|
|
858
|
-
}
|
|
859
893
|
function isReasoningModel(modelId) {
|
|
860
894
|
return modelId.startsWith("o1-");
|
|
861
895
|
}
|
|
@@ -865,7 +899,7 @@ function isAudioModel(modelId) {
|
|
|
865
899
|
|
|
866
900
|
// src/openai-completion-language-model.ts
|
|
867
901
|
import {
|
|
868
|
-
UnsupportedFunctionalityError as
|
|
902
|
+
UnsupportedFunctionalityError as UnsupportedFunctionalityError5
|
|
869
903
|
} from "@ai-sdk/provider";
|
|
870
904
|
import {
|
|
871
905
|
combineHeaders as combineHeaders2,
|
|
@@ -878,7 +912,7 @@ import { z as z3 } from "zod";
|
|
|
878
912
|
// src/convert-to-openai-completion-prompt.ts
|
|
879
913
|
import {
|
|
880
914
|
InvalidPromptError,
|
|
881
|
-
UnsupportedFunctionalityError as
|
|
915
|
+
UnsupportedFunctionalityError as UnsupportedFunctionalityError4
|
|
882
916
|
} from "@ai-sdk/provider";
|
|
883
917
|
function convertToOpenAICompletionPrompt({
|
|
884
918
|
prompt,
|
|
@@ -911,7 +945,7 @@ function convertToOpenAICompletionPrompt({
|
|
|
911
945
|
return part.text;
|
|
912
946
|
}
|
|
913
947
|
case "image": {
|
|
914
|
-
throw new
|
|
948
|
+
throw new UnsupportedFunctionalityError4({
|
|
915
949
|
functionality: "images"
|
|
916
950
|
});
|
|
917
951
|
}
|
|
@@ -930,7 +964,7 @@ ${userMessage}
|
|
|
930
964
|
return part.text;
|
|
931
965
|
}
|
|
932
966
|
case "tool-call": {
|
|
933
|
-
throw new
|
|
967
|
+
throw new UnsupportedFunctionalityError4({
|
|
934
968
|
functionality: "tool-call messages"
|
|
935
969
|
});
|
|
936
970
|
}
|
|
@@ -943,7 +977,7 @@ ${assistantMessage}
|
|
|
943
977
|
break;
|
|
944
978
|
}
|
|
945
979
|
case "tool": {
|
|
946
|
-
throw new
|
|
980
|
+
throw new UnsupportedFunctionalityError4({
|
|
947
981
|
functionality: "tool messages"
|
|
948
982
|
});
|
|
949
983
|
}
|
|
@@ -1044,24 +1078,24 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1044
1078
|
switch (type) {
|
|
1045
1079
|
case "regular": {
|
|
1046
1080
|
if ((_a = mode.tools) == null ? void 0 : _a.length) {
|
|
1047
|
-
throw new
|
|
1081
|
+
throw new UnsupportedFunctionalityError5({
|
|
1048
1082
|
functionality: "tools"
|
|
1049
1083
|
});
|
|
1050
1084
|
}
|
|
1051
1085
|
if (mode.toolChoice) {
|
|
1052
|
-
throw new
|
|
1086
|
+
throw new UnsupportedFunctionalityError5({
|
|
1053
1087
|
functionality: "toolChoice"
|
|
1054
1088
|
});
|
|
1055
1089
|
}
|
|
1056
1090
|
return { args: baseArgs, warnings };
|
|
1057
1091
|
}
|
|
1058
1092
|
case "object-json": {
|
|
1059
|
-
throw new
|
|
1093
|
+
throw new UnsupportedFunctionalityError5({
|
|
1060
1094
|
functionality: "object-json mode"
|
|
1061
1095
|
});
|
|
1062
1096
|
}
|
|
1063
1097
|
case "object-tool": {
|
|
1064
|
-
throw new
|
|
1098
|
+
throw new UnsupportedFunctionalityError5({
|
|
1065
1099
|
functionality: "object-tool mode"
|
|
1066
1100
|
});
|
|
1067
1101
|
}
|
|
@@ -1100,23 +1134,25 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1100
1134
|
rawCall: { rawPrompt, rawSettings },
|
|
1101
1135
|
rawResponse: { headers: responseHeaders },
|
|
1102
1136
|
response: getResponseMetadata(response),
|
|
1103
|
-
warnings
|
|
1137
|
+
warnings,
|
|
1138
|
+
request: { body: JSON.stringify(args) }
|
|
1104
1139
|
};
|
|
1105
1140
|
}
|
|
1106
1141
|
async doStream(options) {
|
|
1107
1142
|
const { args, warnings } = this.getArgs(options);
|
|
1143
|
+
const body = {
|
|
1144
|
+
...args,
|
|
1145
|
+
stream: true,
|
|
1146
|
+
// only include stream_options when in strict compatibility mode:
|
|
1147
|
+
stream_options: this.config.compatibility === "strict" ? { include_usage: true } : void 0
|
|
1148
|
+
};
|
|
1108
1149
|
const { responseHeaders, value: response } = await postJsonToApi2({
|
|
1109
1150
|
url: this.config.url({
|
|
1110
1151
|
path: "/completions",
|
|
1111
1152
|
modelId: this.modelId
|
|
1112
1153
|
}),
|
|
1113
1154
|
headers: combineHeaders2(this.config.headers(), options.headers),
|
|
1114
|
-
body
|
|
1115
|
-
...args,
|
|
1116
|
-
stream: true,
|
|
1117
|
-
// only include stream_options when in strict compatibility mode:
|
|
1118
|
-
stream_options: this.config.compatibility === "strict" ? { include_usage: true } : void 0
|
|
1119
|
-
},
|
|
1155
|
+
body,
|
|
1120
1156
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
1121
1157
|
successfulResponseHandler: createEventSourceResponseHandler2(
|
|
1122
1158
|
openaiCompletionChunkSchema
|
|
@@ -1190,7 +1226,8 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1190
1226
|
),
|
|
1191
1227
|
rawCall: { rawPrompt, rawSettings },
|
|
1192
1228
|
rawResponse: { headers: responseHeaders },
|
|
1193
|
-
warnings
|
|
1229
|
+
warnings,
|
|
1230
|
+
request: { body: JSON.stringify(body) }
|
|
1194
1231
|
};
|
|
1195
1232
|
}
|
|
1196
1233
|
};
|