@ai-sdk/openai 0.0.68 → 0.0.71
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +28 -0
- package/dist/index.js +153 -116
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +149 -110
- package/dist/index.mjs.map +1 -1
- package/internal/dist/index.js +153 -116
- package/internal/dist/index.js.map +1 -1
- package/internal/dist/index.mjs +149 -110
- package/internal/dist/index.mjs.map +1 -1
- package/package.json +3 -3
package/internal/dist/index.mjs
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
// src/openai-chat-language-model.ts
|
|
2
2
|
import {
|
|
3
3
|
InvalidResponseDataError,
|
|
4
|
-
UnsupportedFunctionalityError as
|
|
4
|
+
UnsupportedFunctionalityError as UnsupportedFunctionalityError3
|
|
5
5
|
} from "@ai-sdk/provider";
|
|
6
6
|
import {
|
|
7
7
|
combineHeaders,
|
|
@@ -219,6 +219,109 @@ function getResponseMetadata({
|
|
|
219
219
|
};
|
|
220
220
|
}
|
|
221
221
|
|
|
222
|
+
// src/openai-prepare-tools.ts
|
|
223
|
+
import {
|
|
224
|
+
UnsupportedFunctionalityError as UnsupportedFunctionalityError2
|
|
225
|
+
} from "@ai-sdk/provider";
|
|
226
|
+
function prepareTools({
|
|
227
|
+
mode,
|
|
228
|
+
useLegacyFunctionCalling = false,
|
|
229
|
+
structuredOutputs = false
|
|
230
|
+
}) {
|
|
231
|
+
var _a;
|
|
232
|
+
const tools = ((_a = mode.tools) == null ? void 0 : _a.length) ? mode.tools : void 0;
|
|
233
|
+
const toolWarnings = [];
|
|
234
|
+
if (tools == null) {
|
|
235
|
+
return { tools: void 0, tool_choice: void 0, toolWarnings };
|
|
236
|
+
}
|
|
237
|
+
const toolChoice = mode.toolChoice;
|
|
238
|
+
if (useLegacyFunctionCalling) {
|
|
239
|
+
const openaiFunctions = [];
|
|
240
|
+
for (const tool of tools) {
|
|
241
|
+
if (tool.type === "provider-defined") {
|
|
242
|
+
toolWarnings.push({ type: "unsupported-tool", tool });
|
|
243
|
+
} else {
|
|
244
|
+
openaiFunctions.push({
|
|
245
|
+
name: tool.name,
|
|
246
|
+
description: tool.description,
|
|
247
|
+
parameters: tool.parameters
|
|
248
|
+
});
|
|
249
|
+
}
|
|
250
|
+
}
|
|
251
|
+
if (toolChoice == null) {
|
|
252
|
+
return {
|
|
253
|
+
functions: openaiFunctions,
|
|
254
|
+
function_call: void 0,
|
|
255
|
+
toolWarnings
|
|
256
|
+
};
|
|
257
|
+
}
|
|
258
|
+
const type2 = toolChoice.type;
|
|
259
|
+
switch (type2) {
|
|
260
|
+
case "auto":
|
|
261
|
+
case "none":
|
|
262
|
+
case void 0:
|
|
263
|
+
return {
|
|
264
|
+
functions: openaiFunctions,
|
|
265
|
+
function_call: void 0,
|
|
266
|
+
toolWarnings
|
|
267
|
+
};
|
|
268
|
+
case "required":
|
|
269
|
+
throw new UnsupportedFunctionalityError2({
|
|
270
|
+
functionality: "useLegacyFunctionCalling and toolChoice: required"
|
|
271
|
+
});
|
|
272
|
+
default:
|
|
273
|
+
return {
|
|
274
|
+
functions: openaiFunctions,
|
|
275
|
+
function_call: { name: toolChoice.toolName },
|
|
276
|
+
toolWarnings
|
|
277
|
+
};
|
|
278
|
+
}
|
|
279
|
+
}
|
|
280
|
+
const openaiTools = [];
|
|
281
|
+
for (const tool of tools) {
|
|
282
|
+
if (tool.type === "provider-defined") {
|
|
283
|
+
toolWarnings.push({ type: "unsupported-tool", tool });
|
|
284
|
+
} else {
|
|
285
|
+
openaiTools.push({
|
|
286
|
+
type: "function",
|
|
287
|
+
function: {
|
|
288
|
+
name: tool.name,
|
|
289
|
+
description: tool.description,
|
|
290
|
+
parameters: tool.parameters,
|
|
291
|
+
strict: structuredOutputs === true ? true : void 0
|
|
292
|
+
}
|
|
293
|
+
});
|
|
294
|
+
}
|
|
295
|
+
}
|
|
296
|
+
if (toolChoice == null) {
|
|
297
|
+
return { tools: openaiTools, tool_choice: void 0, toolWarnings };
|
|
298
|
+
}
|
|
299
|
+
const type = toolChoice.type;
|
|
300
|
+
switch (type) {
|
|
301
|
+
case "auto":
|
|
302
|
+
case "none":
|
|
303
|
+
case "required":
|
|
304
|
+
return { tools: openaiTools, tool_choice: type, toolWarnings };
|
|
305
|
+
case "tool":
|
|
306
|
+
return {
|
|
307
|
+
tools: openaiTools,
|
|
308
|
+
tool_choice: {
|
|
309
|
+
type: "function",
|
|
310
|
+
function: {
|
|
311
|
+
name: toolChoice.toolName
|
|
312
|
+
}
|
|
313
|
+
},
|
|
314
|
+
toolWarnings
|
|
315
|
+
};
|
|
316
|
+
default: {
|
|
317
|
+
const _exhaustiveCheck = type;
|
|
318
|
+
throw new UnsupportedFunctionalityError2({
|
|
319
|
+
functionality: `Unsupported tool choice type: ${_exhaustiveCheck}`
|
|
320
|
+
});
|
|
321
|
+
}
|
|
322
|
+
}
|
|
323
|
+
}
|
|
324
|
+
|
|
222
325
|
// src/openai-chat-language-model.ts
|
|
223
326
|
var OpenAIChatLanguageModel = class {
|
|
224
327
|
constructor(modelId, settings, config) {
|
|
@@ -274,12 +377,12 @@ var OpenAIChatLanguageModel = class {
|
|
|
274
377
|
}
|
|
275
378
|
const useLegacyFunctionCalling = this.settings.useLegacyFunctionCalling;
|
|
276
379
|
if (useLegacyFunctionCalling && this.settings.parallelToolCalls === true) {
|
|
277
|
-
throw new
|
|
380
|
+
throw new UnsupportedFunctionalityError3({
|
|
278
381
|
functionality: "useLegacyFunctionCalling with parallelToolCalls"
|
|
279
382
|
});
|
|
280
383
|
}
|
|
281
384
|
if (useLegacyFunctionCalling && this.settings.structuredOutputs === true) {
|
|
282
|
-
throw new
|
|
385
|
+
throw new UnsupportedFunctionalityError3({
|
|
283
386
|
functionality: "structuredOutputs with useLegacyFunctionCalling"
|
|
284
387
|
});
|
|
285
388
|
}
|
|
@@ -320,23 +423,27 @@ var OpenAIChatLanguageModel = class {
|
|
|
320
423
|
}
|
|
321
424
|
switch (type) {
|
|
322
425
|
case "regular": {
|
|
426
|
+
const { tools, tool_choice, functions, function_call, toolWarnings } = prepareTools({
|
|
427
|
+
mode,
|
|
428
|
+
useLegacyFunctionCalling,
|
|
429
|
+
structuredOutputs: this.settings.structuredOutputs
|
|
430
|
+
});
|
|
323
431
|
return {
|
|
324
432
|
args: {
|
|
325
433
|
...baseArgs,
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
})
|
|
434
|
+
tools,
|
|
435
|
+
tool_choice,
|
|
436
|
+
functions,
|
|
437
|
+
function_call
|
|
331
438
|
},
|
|
332
|
-
warnings
|
|
439
|
+
warnings: [...warnings, ...toolWarnings]
|
|
333
440
|
};
|
|
334
441
|
}
|
|
335
442
|
case "object-json": {
|
|
336
443
|
return {
|
|
337
444
|
args: {
|
|
338
445
|
...baseArgs,
|
|
339
|
-
response_format: this.settings.structuredOutputs === true ? {
|
|
446
|
+
response_format: this.settings.structuredOutputs === true && mode.schema != null ? {
|
|
340
447
|
type: "json_schema",
|
|
341
448
|
json_schema: {
|
|
342
449
|
schema: mode.schema,
|
|
@@ -392,14 +499,14 @@ var OpenAIChatLanguageModel = class {
|
|
|
392
499
|
}
|
|
393
500
|
async doGenerate(options) {
|
|
394
501
|
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r;
|
|
395
|
-
const { args, warnings } = this.getArgs(options);
|
|
502
|
+
const { args: body, warnings } = this.getArgs(options);
|
|
396
503
|
const { responseHeaders, value: response } = await postJsonToApi({
|
|
397
504
|
url: this.config.url({
|
|
398
505
|
path: "/chat/completions",
|
|
399
506
|
modelId: this.modelId
|
|
400
507
|
}),
|
|
401
508
|
headers: combineHeaders(this.config.headers(), options.headers),
|
|
402
|
-
body
|
|
509
|
+
body,
|
|
403
510
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
404
511
|
successfulResponseHandler: createJsonResponseHandler(
|
|
405
512
|
openAIChatResponseSchema
|
|
@@ -407,7 +514,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
407
514
|
abortSignal: options.abortSignal,
|
|
408
515
|
fetch: this.config.fetch
|
|
409
516
|
});
|
|
410
|
-
const { messages: rawPrompt, ...rawSettings } =
|
|
517
|
+
const { messages: rawPrompt, ...rawSettings } = body;
|
|
411
518
|
const choice = response.choices[0];
|
|
412
519
|
let providerMetadata;
|
|
413
520
|
if (((_b = (_a = response.usage) == null ? void 0 : _a.completion_tokens_details) == null ? void 0 : _b.reasoning_tokens) != null || ((_d = (_c = response.usage) == null ? void 0 : _c.prompt_tokens_details) == null ? void 0 : _d.cached_tokens) != null) {
|
|
@@ -444,6 +551,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
444
551
|
},
|
|
445
552
|
rawCall: { rawPrompt, rawSettings },
|
|
446
553
|
rawResponse: { headers: responseHeaders },
|
|
554
|
+
request: { body: JSON.stringify(body) },
|
|
447
555
|
response: getResponseMetadata(response),
|
|
448
556
|
warnings,
|
|
449
557
|
logprobs: mapOpenAIChatLogProbsOutput(choice.logprobs),
|
|
@@ -488,18 +596,19 @@ var OpenAIChatLanguageModel = class {
|
|
|
488
596
|
};
|
|
489
597
|
}
|
|
490
598
|
const { args, warnings } = this.getArgs(options);
|
|
599
|
+
const body = {
|
|
600
|
+
...args,
|
|
601
|
+
stream: true,
|
|
602
|
+
// only include stream_options when in strict compatibility mode:
|
|
603
|
+
stream_options: this.config.compatibility === "strict" ? { include_usage: true } : void 0
|
|
604
|
+
};
|
|
491
605
|
const { responseHeaders, value: response } = await postJsonToApi({
|
|
492
606
|
url: this.config.url({
|
|
493
607
|
path: "/chat/completions",
|
|
494
608
|
modelId: this.modelId
|
|
495
609
|
}),
|
|
496
610
|
headers: combineHeaders(this.config.headers(), options.headers),
|
|
497
|
-
body
|
|
498
|
-
...args,
|
|
499
|
-
stream: true,
|
|
500
|
-
// only include stream_options when in strict compatibility mode:
|
|
501
|
-
stream_options: this.config.compatibility === "strict" ? { include_usage: true } : void 0
|
|
502
|
-
},
|
|
611
|
+
body,
|
|
503
612
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
504
613
|
successfulResponseHandler: createEventSourceResponseHandler(
|
|
505
614
|
openaiChatChunkSchema
|
|
@@ -676,6 +785,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
676
785
|
),
|
|
677
786
|
rawCall: { rawPrompt, rawSettings },
|
|
678
787
|
rawResponse: { headers: responseHeaders },
|
|
788
|
+
request: { body: JSON.stringify(body) },
|
|
679
789
|
warnings
|
|
680
790
|
};
|
|
681
791
|
}
|
|
@@ -782,80 +892,6 @@ var openaiChatChunkSchema = z2.union([
|
|
|
782
892
|
}),
|
|
783
893
|
openAIErrorDataSchema
|
|
784
894
|
]);
|
|
785
|
-
function prepareToolsAndToolChoice({
|
|
786
|
-
mode,
|
|
787
|
-
useLegacyFunctionCalling = false,
|
|
788
|
-
structuredOutputs = false
|
|
789
|
-
}) {
|
|
790
|
-
var _a;
|
|
791
|
-
const tools = ((_a = mode.tools) == null ? void 0 : _a.length) ? mode.tools : void 0;
|
|
792
|
-
if (tools == null) {
|
|
793
|
-
return { tools: void 0, tool_choice: void 0 };
|
|
794
|
-
}
|
|
795
|
-
const toolChoice = mode.toolChoice;
|
|
796
|
-
if (useLegacyFunctionCalling) {
|
|
797
|
-
const mappedFunctions = tools.map((tool) => ({
|
|
798
|
-
name: tool.name,
|
|
799
|
-
description: tool.description,
|
|
800
|
-
parameters: tool.parameters
|
|
801
|
-
}));
|
|
802
|
-
if (toolChoice == null) {
|
|
803
|
-
return { functions: mappedFunctions, function_call: void 0 };
|
|
804
|
-
}
|
|
805
|
-
const type2 = toolChoice.type;
|
|
806
|
-
switch (type2) {
|
|
807
|
-
case "auto":
|
|
808
|
-
case "none":
|
|
809
|
-
case void 0:
|
|
810
|
-
return {
|
|
811
|
-
functions: mappedFunctions,
|
|
812
|
-
function_call: void 0
|
|
813
|
-
};
|
|
814
|
-
case "required":
|
|
815
|
-
throw new UnsupportedFunctionalityError2({
|
|
816
|
-
functionality: "useLegacyFunctionCalling and toolChoice: required"
|
|
817
|
-
});
|
|
818
|
-
default:
|
|
819
|
-
return {
|
|
820
|
-
functions: mappedFunctions,
|
|
821
|
-
function_call: { name: toolChoice.toolName }
|
|
822
|
-
};
|
|
823
|
-
}
|
|
824
|
-
}
|
|
825
|
-
const mappedTools = tools.map((tool) => ({
|
|
826
|
-
type: "function",
|
|
827
|
-
function: {
|
|
828
|
-
name: tool.name,
|
|
829
|
-
description: tool.description,
|
|
830
|
-
parameters: tool.parameters,
|
|
831
|
-
strict: structuredOutputs === true ? true : void 0
|
|
832
|
-
}
|
|
833
|
-
}));
|
|
834
|
-
if (toolChoice == null) {
|
|
835
|
-
return { tools: mappedTools, tool_choice: void 0 };
|
|
836
|
-
}
|
|
837
|
-
const type = toolChoice.type;
|
|
838
|
-
switch (type) {
|
|
839
|
-
case "auto":
|
|
840
|
-
case "none":
|
|
841
|
-
case "required":
|
|
842
|
-
return { tools: mappedTools, tool_choice: type };
|
|
843
|
-
case "tool":
|
|
844
|
-
return {
|
|
845
|
-
tools: mappedTools,
|
|
846
|
-
tool_choice: {
|
|
847
|
-
type: "function",
|
|
848
|
-
function: {
|
|
849
|
-
name: toolChoice.toolName
|
|
850
|
-
}
|
|
851
|
-
}
|
|
852
|
-
};
|
|
853
|
-
default: {
|
|
854
|
-
const _exhaustiveCheck = type;
|
|
855
|
-
throw new Error(`Unsupported tool choice type: ${_exhaustiveCheck}`);
|
|
856
|
-
}
|
|
857
|
-
}
|
|
858
|
-
}
|
|
859
895
|
function isReasoningModel(modelId) {
|
|
860
896
|
return modelId.startsWith("o1-");
|
|
861
897
|
}
|
|
@@ -865,7 +901,7 @@ function isAudioModel(modelId) {
|
|
|
865
901
|
|
|
866
902
|
// src/openai-completion-language-model.ts
|
|
867
903
|
import {
|
|
868
|
-
UnsupportedFunctionalityError as
|
|
904
|
+
UnsupportedFunctionalityError as UnsupportedFunctionalityError5
|
|
869
905
|
} from "@ai-sdk/provider";
|
|
870
906
|
import {
|
|
871
907
|
combineHeaders as combineHeaders2,
|
|
@@ -878,7 +914,7 @@ import { z as z3 } from "zod";
|
|
|
878
914
|
// src/convert-to-openai-completion-prompt.ts
|
|
879
915
|
import {
|
|
880
916
|
InvalidPromptError,
|
|
881
|
-
UnsupportedFunctionalityError as
|
|
917
|
+
UnsupportedFunctionalityError as UnsupportedFunctionalityError4
|
|
882
918
|
} from "@ai-sdk/provider";
|
|
883
919
|
function convertToOpenAICompletionPrompt({
|
|
884
920
|
prompt,
|
|
@@ -911,7 +947,7 @@ function convertToOpenAICompletionPrompt({
|
|
|
911
947
|
return part.text;
|
|
912
948
|
}
|
|
913
949
|
case "image": {
|
|
914
|
-
throw new
|
|
950
|
+
throw new UnsupportedFunctionalityError4({
|
|
915
951
|
functionality: "images"
|
|
916
952
|
});
|
|
917
953
|
}
|
|
@@ -930,7 +966,7 @@ ${userMessage}
|
|
|
930
966
|
return part.text;
|
|
931
967
|
}
|
|
932
968
|
case "tool-call": {
|
|
933
|
-
throw new
|
|
969
|
+
throw new UnsupportedFunctionalityError4({
|
|
934
970
|
functionality: "tool-call messages"
|
|
935
971
|
});
|
|
936
972
|
}
|
|
@@ -943,7 +979,7 @@ ${assistantMessage}
|
|
|
943
979
|
break;
|
|
944
980
|
}
|
|
945
981
|
case "tool": {
|
|
946
|
-
throw new
|
|
982
|
+
throw new UnsupportedFunctionalityError4({
|
|
947
983
|
functionality: "tool messages"
|
|
948
984
|
});
|
|
949
985
|
}
|
|
@@ -1044,24 +1080,24 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1044
1080
|
switch (type) {
|
|
1045
1081
|
case "regular": {
|
|
1046
1082
|
if ((_a = mode.tools) == null ? void 0 : _a.length) {
|
|
1047
|
-
throw new
|
|
1083
|
+
throw new UnsupportedFunctionalityError5({
|
|
1048
1084
|
functionality: "tools"
|
|
1049
1085
|
});
|
|
1050
1086
|
}
|
|
1051
1087
|
if (mode.toolChoice) {
|
|
1052
|
-
throw new
|
|
1088
|
+
throw new UnsupportedFunctionalityError5({
|
|
1053
1089
|
functionality: "toolChoice"
|
|
1054
1090
|
});
|
|
1055
1091
|
}
|
|
1056
1092
|
return { args: baseArgs, warnings };
|
|
1057
1093
|
}
|
|
1058
1094
|
case "object-json": {
|
|
1059
|
-
throw new
|
|
1095
|
+
throw new UnsupportedFunctionalityError5({
|
|
1060
1096
|
functionality: "object-json mode"
|
|
1061
1097
|
});
|
|
1062
1098
|
}
|
|
1063
1099
|
case "object-tool": {
|
|
1064
|
-
throw new
|
|
1100
|
+
throw new UnsupportedFunctionalityError5({
|
|
1065
1101
|
functionality: "object-tool mode"
|
|
1066
1102
|
});
|
|
1067
1103
|
}
|
|
@@ -1100,23 +1136,25 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1100
1136
|
rawCall: { rawPrompt, rawSettings },
|
|
1101
1137
|
rawResponse: { headers: responseHeaders },
|
|
1102
1138
|
response: getResponseMetadata(response),
|
|
1103
|
-
warnings
|
|
1139
|
+
warnings,
|
|
1140
|
+
request: { body: JSON.stringify(args) }
|
|
1104
1141
|
};
|
|
1105
1142
|
}
|
|
1106
1143
|
async doStream(options) {
|
|
1107
1144
|
const { args, warnings } = this.getArgs(options);
|
|
1145
|
+
const body = {
|
|
1146
|
+
...args,
|
|
1147
|
+
stream: true,
|
|
1148
|
+
// only include stream_options when in strict compatibility mode:
|
|
1149
|
+
stream_options: this.config.compatibility === "strict" ? { include_usage: true } : void 0
|
|
1150
|
+
};
|
|
1108
1151
|
const { responseHeaders, value: response } = await postJsonToApi2({
|
|
1109
1152
|
url: this.config.url({
|
|
1110
1153
|
path: "/completions",
|
|
1111
1154
|
modelId: this.modelId
|
|
1112
1155
|
}),
|
|
1113
1156
|
headers: combineHeaders2(this.config.headers(), options.headers),
|
|
1114
|
-
body
|
|
1115
|
-
...args,
|
|
1116
|
-
stream: true,
|
|
1117
|
-
// only include stream_options when in strict compatibility mode:
|
|
1118
|
-
stream_options: this.config.compatibility === "strict" ? { include_usage: true } : void 0
|
|
1119
|
-
},
|
|
1157
|
+
body,
|
|
1120
1158
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
1121
1159
|
successfulResponseHandler: createEventSourceResponseHandler2(
|
|
1122
1160
|
openaiCompletionChunkSchema
|
|
@@ -1190,7 +1228,8 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1190
1228
|
),
|
|
1191
1229
|
rawCall: { rawPrompt, rawSettings },
|
|
1192
1230
|
rawResponse: { headers: responseHeaders },
|
|
1193
|
-
warnings
|
|
1231
|
+
warnings,
|
|
1232
|
+
request: { body: JSON.stringify(body) }
|
|
1194
1233
|
};
|
|
1195
1234
|
}
|
|
1196
1235
|
};
|