@ai-sdk/openai 2.0.0-canary.1 → 2.0.0-canary.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +24 -0
- package/dist/index.d.mts +7 -1
- package/dist/index.d.ts +7 -1
- package/dist/index.js +507 -465
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +474 -428
- package/dist/index.mjs.map +1 -1
- package/internal/dist/index.d.mts +67 -2
- package/internal/dist/index.d.ts +67 -2
- package/internal/dist/index.js +498 -462
- package/internal/dist/index.js.map +1 -1
- package/internal/dist/index.mjs +467 -428
- package/internal/dist/index.mjs.map +1 -1
- package/package.json +4 -4
package/internal/dist/index.mjs
CHANGED
|
@@ -17,7 +17,6 @@ import { z as z2 } from "zod";
|
|
|
17
17
|
import {
|
|
18
18
|
UnsupportedFunctionalityError
|
|
19
19
|
} from "@ai-sdk/provider";
|
|
20
|
-
import { convertUint8ArrayToBase64 } from "@ai-sdk/provider-utils";
|
|
21
20
|
function convertToOpenAIChatMessages({
|
|
22
21
|
prompt,
|
|
23
22
|
useLegacyFunctionCalling = false,
|
|
@@ -61,55 +60,65 @@ function convertToOpenAIChatMessages({
|
|
|
61
60
|
messages.push({
|
|
62
61
|
role: "user",
|
|
63
62
|
content: content.map((part, index) => {
|
|
64
|
-
var _a, _b, _c
|
|
63
|
+
var _a, _b, _c;
|
|
65
64
|
switch (part.type) {
|
|
66
65
|
case "text": {
|
|
67
66
|
return { type: "text", text: part.text };
|
|
68
67
|
}
|
|
69
|
-
case "image": {
|
|
70
|
-
return {
|
|
71
|
-
type: "image_url",
|
|
72
|
-
image_url: {
|
|
73
|
-
url: part.image instanceof URL ? part.image.toString() : `data:${(_a = part.mimeType) != null ? _a : "image/jpeg"};base64,${convertUint8ArrayToBase64(part.image)}`,
|
|
74
|
-
// OpenAI specific extension: image detail
|
|
75
|
-
detail: (_c = (_b = part.providerMetadata) == null ? void 0 : _b.openai) == null ? void 0 : _c.imageDetail
|
|
76
|
-
}
|
|
77
|
-
};
|
|
78
|
-
}
|
|
79
68
|
case "file": {
|
|
80
|
-
if (part.
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
type: "input_audio",
|
|
96
|
-
input_audio: { data: part.data, format: "mp3" }
|
|
97
|
-
};
|
|
69
|
+
if (part.mediaType.startsWith("image/")) {
|
|
70
|
+
const mediaType = part.mediaType === "image/*" ? "image/jpeg" : part.mediaType;
|
|
71
|
+
return {
|
|
72
|
+
type: "image_url",
|
|
73
|
+
image_url: {
|
|
74
|
+
url: part.data instanceof URL ? part.data.toString() : `data:${mediaType};base64,${part.data}`,
|
|
75
|
+
// OpenAI specific extension: image detail
|
|
76
|
+
detail: (_b = (_a = part.providerOptions) == null ? void 0 : _a.openai) == null ? void 0 : _b.imageDetail
|
|
77
|
+
}
|
|
78
|
+
};
|
|
79
|
+
} else if (part.mediaType.startsWith("audio/")) {
|
|
80
|
+
if (part.data instanceof URL) {
|
|
81
|
+
throw new UnsupportedFunctionalityError({
|
|
82
|
+
functionality: "audio file parts with URLs"
|
|
83
|
+
});
|
|
98
84
|
}
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
85
|
+
switch (part.mediaType) {
|
|
86
|
+
case "audio/wav": {
|
|
87
|
+
return {
|
|
88
|
+
type: "input_audio",
|
|
89
|
+
input_audio: { data: part.data, format: "wav" }
|
|
90
|
+
};
|
|
91
|
+
}
|
|
92
|
+
case "audio/mp3":
|
|
93
|
+
case "audio/mpeg": {
|
|
94
|
+
return {
|
|
95
|
+
type: "input_audio",
|
|
96
|
+
input_audio: { data: part.data, format: "mp3" }
|
|
97
|
+
};
|
|
98
|
+
}
|
|
99
|
+
default: {
|
|
100
|
+
throw new UnsupportedFunctionalityError({
|
|
101
|
+
functionality: `audio content parts with media type ${part.mediaType}`
|
|
102
|
+
});
|
|
103
|
+
}
|
|
107
104
|
}
|
|
108
|
-
|
|
105
|
+
} else if (part.mediaType === "application/pdf") {
|
|
106
|
+
if (part.data instanceof URL) {
|
|
109
107
|
throw new UnsupportedFunctionalityError({
|
|
110
|
-
functionality:
|
|
108
|
+
functionality: "PDF file parts with URLs"
|
|
111
109
|
});
|
|
112
110
|
}
|
|
111
|
+
return {
|
|
112
|
+
type: "file",
|
|
113
|
+
file: {
|
|
114
|
+
filename: (_c = part.filename) != null ? _c : `part-${index}.pdf`,
|
|
115
|
+
file_data: `data:application/pdf;base64,${part.data}`
|
|
116
|
+
}
|
|
117
|
+
};
|
|
118
|
+
} else {
|
|
119
|
+
throw new UnsupportedFunctionalityError({
|
|
120
|
+
functionality: `file part media type ${part.mediaType}`
|
|
121
|
+
});
|
|
113
122
|
}
|
|
114
123
|
}
|
|
115
124
|
}
|
|
@@ -253,17 +262,16 @@ import {
|
|
|
253
262
|
UnsupportedFunctionalityError as UnsupportedFunctionalityError2
|
|
254
263
|
} from "@ai-sdk/provider";
|
|
255
264
|
function prepareTools({
|
|
256
|
-
|
|
265
|
+
tools,
|
|
266
|
+
toolChoice,
|
|
257
267
|
useLegacyFunctionCalling = false,
|
|
258
268
|
structuredOutputs
|
|
259
269
|
}) {
|
|
260
|
-
|
|
261
|
-
const tools = ((_a = mode.tools) == null ? void 0 : _a.length) ? mode.tools : void 0;
|
|
270
|
+
tools = (tools == null ? void 0 : tools.length) ? tools : void 0;
|
|
262
271
|
const toolWarnings = [];
|
|
263
272
|
if (tools == null) {
|
|
264
|
-
return { tools: void 0,
|
|
273
|
+
return { tools: void 0, toolChoice: void 0, toolWarnings };
|
|
265
274
|
}
|
|
266
|
-
const toolChoice = mode.toolChoice;
|
|
267
275
|
if (useLegacyFunctionCalling) {
|
|
268
276
|
const openaiFunctions = [];
|
|
269
277
|
for (const tool of tools) {
|
|
@@ -323,18 +331,18 @@ function prepareTools({
|
|
|
323
331
|
}
|
|
324
332
|
}
|
|
325
333
|
if (toolChoice == null) {
|
|
326
|
-
return { tools: openaiTools,
|
|
334
|
+
return { tools: openaiTools, toolChoice: void 0, toolWarnings };
|
|
327
335
|
}
|
|
328
336
|
const type = toolChoice.type;
|
|
329
337
|
switch (type) {
|
|
330
338
|
case "auto":
|
|
331
339
|
case "none":
|
|
332
340
|
case "required":
|
|
333
|
-
return { tools: openaiTools,
|
|
341
|
+
return { tools: openaiTools, toolChoice: type, toolWarnings };
|
|
334
342
|
case "tool":
|
|
335
343
|
return {
|
|
336
344
|
tools: openaiTools,
|
|
337
|
-
|
|
345
|
+
toolChoice: {
|
|
338
346
|
type: "function",
|
|
339
347
|
function: {
|
|
340
348
|
name: toolChoice.toolName
|
|
@@ -345,7 +353,7 @@ function prepareTools({
|
|
|
345
353
|
default: {
|
|
346
354
|
const _exhaustiveCheck = type;
|
|
347
355
|
throw new UnsupportedFunctionalityError2({
|
|
348
|
-
functionality: `
|
|
356
|
+
functionality: `tool choice type: ${_exhaustiveCheck}`
|
|
349
357
|
});
|
|
350
358
|
}
|
|
351
359
|
}
|
|
@@ -376,7 +384,6 @@ var OpenAIChatLanguageModel = class {
|
|
|
376
384
|
return !this.settings.downloadImages;
|
|
377
385
|
}
|
|
378
386
|
getArgs({
|
|
379
|
-
mode,
|
|
380
387
|
prompt,
|
|
381
388
|
maxTokens,
|
|
382
389
|
temperature,
|
|
@@ -387,10 +394,11 @@ var OpenAIChatLanguageModel = class {
|
|
|
387
394
|
stopSequences,
|
|
388
395
|
responseFormat,
|
|
389
396
|
seed,
|
|
390
|
-
|
|
397
|
+
tools,
|
|
398
|
+
toolChoice,
|
|
399
|
+
providerOptions
|
|
391
400
|
}) {
|
|
392
|
-
var _a, _b, _c, _d, _e, _f, _g
|
|
393
|
-
const type = mode.type;
|
|
401
|
+
var _a, _b, _c, _d, _e, _f, _g;
|
|
394
402
|
const warnings = [];
|
|
395
403
|
if (topK != null) {
|
|
396
404
|
warnings.push({
|
|
@@ -439,6 +447,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
439
447
|
top_p: topP,
|
|
440
448
|
frequency_penalty: frequencyPenalty,
|
|
441
449
|
presence_penalty: presencePenalty,
|
|
450
|
+
// TODO improve below:
|
|
442
451
|
response_format: (responseFormat == null ? void 0 : responseFormat.type) === "json" ? this.supportsStructuredOutputs && responseFormat.schema != null ? {
|
|
443
452
|
type: "json_schema",
|
|
444
453
|
json_schema: {
|
|
@@ -452,11 +461,11 @@ var OpenAIChatLanguageModel = class {
|
|
|
452
461
|
seed,
|
|
453
462
|
// openai specific settings:
|
|
454
463
|
// TODO remove in next major version; we auto-map maxTokens now
|
|
455
|
-
max_completion_tokens: (_b =
|
|
456
|
-
store: (_c =
|
|
457
|
-
metadata: (_d =
|
|
458
|
-
prediction: (_e =
|
|
459
|
-
reasoning_effort: (_g = (_f =
|
|
464
|
+
max_completion_tokens: (_b = providerOptions == null ? void 0 : providerOptions.openai) == null ? void 0 : _b.maxCompletionTokens,
|
|
465
|
+
store: (_c = providerOptions == null ? void 0 : providerOptions.openai) == null ? void 0 : _c.store,
|
|
466
|
+
metadata: (_d = providerOptions == null ? void 0 : providerOptions.openai) == null ? void 0 : _d.metadata,
|
|
467
|
+
prediction: (_e = providerOptions == null ? void 0 : providerOptions.openai) == null ? void 0 : _e.prediction,
|
|
468
|
+
reasoning_effort: (_g = (_f = providerOptions == null ? void 0 : providerOptions.openai) == null ? void 0 : _f.reasoningEffort) != null ? _g : this.settings.reasoningEffort,
|
|
460
469
|
// messages:
|
|
461
470
|
messages
|
|
462
471
|
};
|
|
@@ -521,81 +530,28 @@ var OpenAIChatLanguageModel = class {
|
|
|
521
530
|
baseArgs.max_tokens = void 0;
|
|
522
531
|
}
|
|
523
532
|
}
|
|
524
|
-
|
|
525
|
-
|
|
526
|
-
|
|
527
|
-
|
|
528
|
-
|
|
529
|
-
|
|
530
|
-
|
|
531
|
-
|
|
532
|
-
|
|
533
|
-
|
|
534
|
-
|
|
535
|
-
|
|
536
|
-
|
|
537
|
-
|
|
538
|
-
|
|
539
|
-
|
|
540
|
-
|
|
541
|
-
|
|
542
|
-
|
|
543
|
-
|
|
544
|
-
|
|
545
|
-
|
|
546
|
-
response_format: this.supportsStructuredOutputs && mode.schema != null ? {
|
|
547
|
-
type: "json_schema",
|
|
548
|
-
json_schema: {
|
|
549
|
-
schema: mode.schema,
|
|
550
|
-
strict: true,
|
|
551
|
-
name: (_h = mode.name) != null ? _h : "response",
|
|
552
|
-
description: mode.description
|
|
553
|
-
}
|
|
554
|
-
} : { type: "json_object" }
|
|
555
|
-
},
|
|
556
|
-
warnings
|
|
557
|
-
};
|
|
558
|
-
}
|
|
559
|
-
case "object-tool": {
|
|
560
|
-
return {
|
|
561
|
-
args: useLegacyFunctionCalling ? {
|
|
562
|
-
...baseArgs,
|
|
563
|
-
function_call: {
|
|
564
|
-
name: mode.tool.name
|
|
565
|
-
},
|
|
566
|
-
functions: [
|
|
567
|
-
{
|
|
568
|
-
name: mode.tool.name,
|
|
569
|
-
description: mode.tool.description,
|
|
570
|
-
parameters: mode.tool.parameters
|
|
571
|
-
}
|
|
572
|
-
]
|
|
573
|
-
} : {
|
|
574
|
-
...baseArgs,
|
|
575
|
-
tool_choice: {
|
|
576
|
-
type: "function",
|
|
577
|
-
function: { name: mode.tool.name }
|
|
578
|
-
},
|
|
579
|
-
tools: [
|
|
580
|
-
{
|
|
581
|
-
type: "function",
|
|
582
|
-
function: {
|
|
583
|
-
name: mode.tool.name,
|
|
584
|
-
description: mode.tool.description,
|
|
585
|
-
parameters: mode.tool.parameters,
|
|
586
|
-
strict: this.supportsStructuredOutputs ? true : void 0
|
|
587
|
-
}
|
|
588
|
-
}
|
|
589
|
-
]
|
|
590
|
-
},
|
|
591
|
-
warnings
|
|
592
|
-
};
|
|
593
|
-
}
|
|
594
|
-
default: {
|
|
595
|
-
const _exhaustiveCheck = type;
|
|
596
|
-
throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
|
|
597
|
-
}
|
|
598
|
-
}
|
|
533
|
+
const {
|
|
534
|
+
tools: openaiTools,
|
|
535
|
+
toolChoice: openaiToolChoice,
|
|
536
|
+
functions,
|
|
537
|
+
function_call,
|
|
538
|
+
toolWarnings
|
|
539
|
+
} = prepareTools({
|
|
540
|
+
tools,
|
|
541
|
+
toolChoice,
|
|
542
|
+
useLegacyFunctionCalling,
|
|
543
|
+
structuredOutputs: this.supportsStructuredOutputs
|
|
544
|
+
});
|
|
545
|
+
return {
|
|
546
|
+
args: {
|
|
547
|
+
...baseArgs,
|
|
548
|
+
tools: openaiTools,
|
|
549
|
+
tool_choice: openaiToolChoice,
|
|
550
|
+
functions,
|
|
551
|
+
function_call
|
|
552
|
+
},
|
|
553
|
+
warnings: [...warnings, ...toolWarnings]
|
|
554
|
+
};
|
|
599
555
|
}
|
|
600
556
|
async doGenerate(options) {
|
|
601
557
|
var _a, _b, _c, _d, _e, _f, _g, _h;
|
|
@@ -1062,9 +1018,6 @@ var reasoningModels = {
|
|
|
1062
1018
|
};
|
|
1063
1019
|
|
|
1064
1020
|
// src/openai-completion-language-model.ts
|
|
1065
|
-
import {
|
|
1066
|
-
UnsupportedFunctionalityError as UnsupportedFunctionalityError5
|
|
1067
|
-
} from "@ai-sdk/provider";
|
|
1068
1021
|
import {
|
|
1069
1022
|
combineHeaders as combineHeaders2,
|
|
1070
1023
|
createEventSourceResponseHandler as createEventSourceResponseHandler2,
|
|
@@ -1108,13 +1061,8 @@ function convertToOpenAICompletionPrompt({
|
|
|
1108
1061
|
case "text": {
|
|
1109
1062
|
return part.text;
|
|
1110
1063
|
}
|
|
1111
|
-
case "image": {
|
|
1112
|
-
throw new UnsupportedFunctionalityError4({
|
|
1113
|
-
functionality: "images"
|
|
1114
|
-
});
|
|
1115
|
-
}
|
|
1116
1064
|
}
|
|
1117
|
-
}).join("");
|
|
1065
|
+
}).filter(Boolean).join("");
|
|
1118
1066
|
text += `${user}:
|
|
1119
1067
|
${userMessage}
|
|
1120
1068
|
|
|
@@ -1187,7 +1135,6 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1187
1135
|
return this.config.provider;
|
|
1188
1136
|
}
|
|
1189
1137
|
getArgs({
|
|
1190
|
-
mode,
|
|
1191
1138
|
inputFormat,
|
|
1192
1139
|
prompt,
|
|
1193
1140
|
maxTokens,
|
|
@@ -1198,16 +1145,19 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1198
1145
|
presencePenalty,
|
|
1199
1146
|
stopSequences: userStopSequences,
|
|
1200
1147
|
responseFormat,
|
|
1148
|
+
tools,
|
|
1149
|
+
toolChoice,
|
|
1201
1150
|
seed
|
|
1202
1151
|
}) {
|
|
1203
|
-
var _a;
|
|
1204
|
-
const type = mode.type;
|
|
1205
1152
|
const warnings = [];
|
|
1206
1153
|
if (topK != null) {
|
|
1207
|
-
warnings.push({
|
|
1208
|
-
|
|
1209
|
-
|
|
1210
|
-
});
|
|
1154
|
+
warnings.push({ type: "unsupported-setting", setting: "topK" });
|
|
1155
|
+
}
|
|
1156
|
+
if (tools == null ? void 0 : tools.length) {
|
|
1157
|
+
warnings.push({ type: "unsupported-setting", setting: "tools" });
|
|
1158
|
+
}
|
|
1159
|
+
if (toolChoice != null) {
|
|
1160
|
+
warnings.push({ type: "unsupported-setting", setting: "toolChoice" });
|
|
1211
1161
|
}
|
|
1212
1162
|
if (responseFormat != null && responseFormat.type !== "text") {
|
|
1213
1163
|
warnings.push({
|
|
@@ -1218,56 +1168,30 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1218
1168
|
}
|
|
1219
1169
|
const { prompt: completionPrompt, stopSequences } = convertToOpenAICompletionPrompt({ prompt, inputFormat });
|
|
1220
1170
|
const stop = [...stopSequences != null ? stopSequences : [], ...userStopSequences != null ? userStopSequences : []];
|
|
1221
|
-
|
|
1222
|
-
|
|
1223
|
-
|
|
1224
|
-
|
|
1225
|
-
|
|
1226
|
-
|
|
1227
|
-
|
|
1228
|
-
|
|
1229
|
-
|
|
1230
|
-
|
|
1231
|
-
|
|
1232
|
-
|
|
1233
|
-
|
|
1234
|
-
|
|
1235
|
-
|
|
1236
|
-
|
|
1237
|
-
|
|
1238
|
-
|
|
1239
|
-
|
|
1240
|
-
|
|
1171
|
+
return {
|
|
1172
|
+
args: {
|
|
1173
|
+
// model id:
|
|
1174
|
+
model: this.modelId,
|
|
1175
|
+
// model specific settings:
|
|
1176
|
+
echo: this.settings.echo,
|
|
1177
|
+
logit_bias: this.settings.logitBias,
|
|
1178
|
+
logprobs: typeof this.settings.logprobs === "number" ? this.settings.logprobs : typeof this.settings.logprobs === "boolean" ? this.settings.logprobs ? 0 : void 0 : void 0,
|
|
1179
|
+
suffix: this.settings.suffix,
|
|
1180
|
+
user: this.settings.user,
|
|
1181
|
+
// standardized settings:
|
|
1182
|
+
max_tokens: maxTokens,
|
|
1183
|
+
temperature,
|
|
1184
|
+
top_p: topP,
|
|
1185
|
+
frequency_penalty: frequencyPenalty,
|
|
1186
|
+
presence_penalty: presencePenalty,
|
|
1187
|
+
seed,
|
|
1188
|
+
// prompt:
|
|
1189
|
+
prompt: completionPrompt,
|
|
1190
|
+
// stop sequences:
|
|
1191
|
+
stop: stop.length > 0 ? stop : void 0
|
|
1192
|
+
},
|
|
1193
|
+
warnings
|
|
1241
1194
|
};
|
|
1242
|
-
switch (type) {
|
|
1243
|
-
case "regular": {
|
|
1244
|
-
if ((_a = mode.tools) == null ? void 0 : _a.length) {
|
|
1245
|
-
throw new UnsupportedFunctionalityError5({
|
|
1246
|
-
functionality: "tools"
|
|
1247
|
-
});
|
|
1248
|
-
}
|
|
1249
|
-
if (mode.toolChoice) {
|
|
1250
|
-
throw new UnsupportedFunctionalityError5({
|
|
1251
|
-
functionality: "toolChoice"
|
|
1252
|
-
});
|
|
1253
|
-
}
|
|
1254
|
-
return { args: baseArgs, warnings };
|
|
1255
|
-
}
|
|
1256
|
-
case "object-json": {
|
|
1257
|
-
throw new UnsupportedFunctionalityError5({
|
|
1258
|
-
functionality: "object-json mode"
|
|
1259
|
-
});
|
|
1260
|
-
}
|
|
1261
|
-
case "object-tool": {
|
|
1262
|
-
throw new UnsupportedFunctionalityError5({
|
|
1263
|
-
functionality: "object-tool mode"
|
|
1264
|
-
});
|
|
1265
|
-
}
|
|
1266
|
-
default: {
|
|
1267
|
-
const _exhaustiveCheck = type;
|
|
1268
|
-
throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
|
|
1269
|
-
}
|
|
1270
|
-
}
|
|
1271
1195
|
}
|
|
1272
1196
|
async doGenerate(options) {
|
|
1273
1197
|
const { args, warnings } = this.getArgs(options);
|
|
@@ -1605,22 +1529,201 @@ var openaiImageResponseSchema = z5.object({
|
|
|
1605
1529
|
data: z5.array(z5.object({ b64_json: z5.string() }))
|
|
1606
1530
|
});
|
|
1607
1531
|
|
|
1608
|
-
// src/
|
|
1532
|
+
// src/openai-transcription-model.ts
|
|
1609
1533
|
import {
|
|
1610
1534
|
combineHeaders as combineHeaders5,
|
|
1611
|
-
|
|
1535
|
+
convertBase64ToUint8Array,
|
|
1612
1536
|
createJsonResponseHandler as createJsonResponseHandler5,
|
|
1613
|
-
generateId as generateId2,
|
|
1614
1537
|
parseProviderOptions,
|
|
1615
|
-
|
|
1538
|
+
postFormDataToApi
|
|
1616
1539
|
} from "@ai-sdk/provider-utils";
|
|
1617
1540
|
import { z as z6 } from "zod";
|
|
1541
|
+
var OpenAIProviderOptionsSchema = z6.object({
|
|
1542
|
+
include: z6.array(z6.string()).optional().describe(
|
|
1543
|
+
"Additional information to include in the transcription response."
|
|
1544
|
+
),
|
|
1545
|
+
language: z6.string().optional().describe("The language of the input audio in ISO-639-1 format."),
|
|
1546
|
+
prompt: z6.string().optional().describe(
|
|
1547
|
+
"An optional text to guide the model's style or continue a previous audio segment."
|
|
1548
|
+
),
|
|
1549
|
+
temperature: z6.number().min(0).max(1).optional().default(0).describe("The sampling temperature, between 0 and 1."),
|
|
1550
|
+
timestampGranularities: z6.array(z6.enum(["word", "segment"])).optional().default(["segment"]).describe(
|
|
1551
|
+
"The timestamp granularities to populate for this transcription."
|
|
1552
|
+
)
|
|
1553
|
+
});
|
|
1554
|
+
var languageMap = {
|
|
1555
|
+
afrikaans: "af",
|
|
1556
|
+
arabic: "ar",
|
|
1557
|
+
armenian: "hy",
|
|
1558
|
+
azerbaijani: "az",
|
|
1559
|
+
belarusian: "be",
|
|
1560
|
+
bosnian: "bs",
|
|
1561
|
+
bulgarian: "bg",
|
|
1562
|
+
catalan: "ca",
|
|
1563
|
+
chinese: "zh",
|
|
1564
|
+
croatian: "hr",
|
|
1565
|
+
czech: "cs",
|
|
1566
|
+
danish: "da",
|
|
1567
|
+
dutch: "nl",
|
|
1568
|
+
english: "en",
|
|
1569
|
+
estonian: "et",
|
|
1570
|
+
finnish: "fi",
|
|
1571
|
+
french: "fr",
|
|
1572
|
+
galician: "gl",
|
|
1573
|
+
german: "de",
|
|
1574
|
+
greek: "el",
|
|
1575
|
+
hebrew: "he",
|
|
1576
|
+
hindi: "hi",
|
|
1577
|
+
hungarian: "hu",
|
|
1578
|
+
icelandic: "is",
|
|
1579
|
+
indonesian: "id",
|
|
1580
|
+
italian: "it",
|
|
1581
|
+
japanese: "ja",
|
|
1582
|
+
kannada: "kn",
|
|
1583
|
+
kazakh: "kk",
|
|
1584
|
+
korean: "ko",
|
|
1585
|
+
latvian: "lv",
|
|
1586
|
+
lithuanian: "lt",
|
|
1587
|
+
macedonian: "mk",
|
|
1588
|
+
malay: "ms",
|
|
1589
|
+
marathi: "mr",
|
|
1590
|
+
maori: "mi",
|
|
1591
|
+
nepali: "ne",
|
|
1592
|
+
norwegian: "no",
|
|
1593
|
+
persian: "fa",
|
|
1594
|
+
polish: "pl",
|
|
1595
|
+
portuguese: "pt",
|
|
1596
|
+
romanian: "ro",
|
|
1597
|
+
russian: "ru",
|
|
1598
|
+
serbian: "sr",
|
|
1599
|
+
slovak: "sk",
|
|
1600
|
+
slovenian: "sl",
|
|
1601
|
+
spanish: "es",
|
|
1602
|
+
swahili: "sw",
|
|
1603
|
+
swedish: "sv",
|
|
1604
|
+
tagalog: "tl",
|
|
1605
|
+
tamil: "ta",
|
|
1606
|
+
thai: "th",
|
|
1607
|
+
turkish: "tr",
|
|
1608
|
+
ukrainian: "uk",
|
|
1609
|
+
urdu: "ur",
|
|
1610
|
+
vietnamese: "vi",
|
|
1611
|
+
welsh: "cy"
|
|
1612
|
+
};
|
|
1613
|
+
var OpenAITranscriptionModel = class {
|
|
1614
|
+
constructor(modelId, config) {
|
|
1615
|
+
this.modelId = modelId;
|
|
1616
|
+
this.config = config;
|
|
1617
|
+
this.specificationVersion = "v1";
|
|
1618
|
+
}
|
|
1619
|
+
get provider() {
|
|
1620
|
+
return this.config.provider;
|
|
1621
|
+
}
|
|
1622
|
+
getArgs({
|
|
1623
|
+
audio,
|
|
1624
|
+
mediaType,
|
|
1625
|
+
providerOptions
|
|
1626
|
+
}) {
|
|
1627
|
+
const warnings = [];
|
|
1628
|
+
const openAIOptions = parseProviderOptions({
|
|
1629
|
+
provider: "openai",
|
|
1630
|
+
providerOptions,
|
|
1631
|
+
schema: OpenAIProviderOptionsSchema
|
|
1632
|
+
});
|
|
1633
|
+
const formData = new FormData();
|
|
1634
|
+
const blob = audio instanceof Uint8Array ? new Blob([audio]) : new Blob([convertBase64ToUint8Array(audio)]);
|
|
1635
|
+
formData.append("model", this.modelId);
|
|
1636
|
+
formData.append("file", new File([blob], "audio", { type: mediaType }));
|
|
1637
|
+
if (openAIOptions) {
|
|
1638
|
+
const transcriptionModelOptions = {
|
|
1639
|
+
include: openAIOptions.include,
|
|
1640
|
+
language: openAIOptions.language,
|
|
1641
|
+
prompt: openAIOptions.prompt,
|
|
1642
|
+
temperature: openAIOptions.temperature,
|
|
1643
|
+
timestamp_granularities: openAIOptions.timestampGranularities
|
|
1644
|
+
};
|
|
1645
|
+
for (const key in transcriptionModelOptions) {
|
|
1646
|
+
const value = transcriptionModelOptions[key];
|
|
1647
|
+
if (value !== void 0) {
|
|
1648
|
+
formData.append(key, value);
|
|
1649
|
+
}
|
|
1650
|
+
}
|
|
1651
|
+
}
|
|
1652
|
+
return {
|
|
1653
|
+
formData,
|
|
1654
|
+
warnings
|
|
1655
|
+
};
|
|
1656
|
+
}
|
|
1657
|
+
async doGenerate(options) {
|
|
1658
|
+
var _a, _b, _c, _d, _e, _f;
|
|
1659
|
+
const currentDate = (_c = (_b = (_a = this.config._internal) == null ? void 0 : _a.currentDate) == null ? void 0 : _b.call(_a)) != null ? _c : /* @__PURE__ */ new Date();
|
|
1660
|
+
const { formData, warnings } = this.getArgs(options);
|
|
1661
|
+
const {
|
|
1662
|
+
value: response,
|
|
1663
|
+
responseHeaders,
|
|
1664
|
+
rawValue: rawResponse
|
|
1665
|
+
} = await postFormDataToApi({
|
|
1666
|
+
url: this.config.url({
|
|
1667
|
+
path: "/audio/transcriptions",
|
|
1668
|
+
modelId: this.modelId
|
|
1669
|
+
}),
|
|
1670
|
+
headers: combineHeaders5(this.config.headers(), options.headers),
|
|
1671
|
+
formData,
|
|
1672
|
+
failedResponseHandler: openaiFailedResponseHandler,
|
|
1673
|
+
successfulResponseHandler: createJsonResponseHandler5(
|
|
1674
|
+
openaiTranscriptionResponseSchema
|
|
1675
|
+
),
|
|
1676
|
+
abortSignal: options.abortSignal,
|
|
1677
|
+
fetch: this.config.fetch
|
|
1678
|
+
});
|
|
1679
|
+
const language = response.language != null && response.language in languageMap ? languageMap[response.language] : void 0;
|
|
1680
|
+
return {
|
|
1681
|
+
text: response.text,
|
|
1682
|
+
segments: (_e = (_d = response.words) == null ? void 0 : _d.map((word) => ({
|
|
1683
|
+
text: word.word,
|
|
1684
|
+
startSecond: word.start,
|
|
1685
|
+
endSecond: word.end
|
|
1686
|
+
}))) != null ? _e : [],
|
|
1687
|
+
language,
|
|
1688
|
+
durationInSeconds: (_f = response.duration) != null ? _f : void 0,
|
|
1689
|
+
warnings,
|
|
1690
|
+
response: {
|
|
1691
|
+
timestamp: currentDate,
|
|
1692
|
+
modelId: this.modelId,
|
|
1693
|
+
headers: responseHeaders,
|
|
1694
|
+
body: rawResponse
|
|
1695
|
+
}
|
|
1696
|
+
};
|
|
1697
|
+
}
|
|
1698
|
+
};
|
|
1699
|
+
var openaiTranscriptionResponseSchema = z6.object({
|
|
1700
|
+
text: z6.string(),
|
|
1701
|
+
language: z6.string().nullish(),
|
|
1702
|
+
duration: z6.number().nullish(),
|
|
1703
|
+
words: z6.array(
|
|
1704
|
+
z6.object({
|
|
1705
|
+
word: z6.string(),
|
|
1706
|
+
start: z6.number(),
|
|
1707
|
+
end: z6.number()
|
|
1708
|
+
})
|
|
1709
|
+
).nullish()
|
|
1710
|
+
});
|
|
1711
|
+
|
|
1712
|
+
// src/responses/openai-responses-language-model.ts
|
|
1713
|
+
import {
|
|
1714
|
+
combineHeaders as combineHeaders6,
|
|
1715
|
+
createEventSourceResponseHandler as createEventSourceResponseHandler3,
|
|
1716
|
+
createJsonResponseHandler as createJsonResponseHandler6,
|
|
1717
|
+
generateId as generateId2,
|
|
1718
|
+
parseProviderOptions as parseProviderOptions2,
|
|
1719
|
+
postJsonToApi as postJsonToApi5
|
|
1720
|
+
} from "@ai-sdk/provider-utils";
|
|
1721
|
+
import { z as z7 } from "zod";
|
|
1618
1722
|
|
|
1619
1723
|
// src/responses/convert-to-openai-responses-messages.ts
|
|
1620
1724
|
import {
|
|
1621
|
-
UnsupportedFunctionalityError as
|
|
1725
|
+
UnsupportedFunctionalityError as UnsupportedFunctionalityError5
|
|
1622
1726
|
} from "@ai-sdk/provider";
|
|
1623
|
-
import { convertUint8ArrayToBase64 as convertUint8ArrayToBase642 } from "@ai-sdk/provider-utils";
|
|
1624
1727
|
function convertToOpenAIResponsesMessages({
|
|
1625
1728
|
prompt,
|
|
1626
1729
|
systemMessageMode
|
|
@@ -1659,38 +1762,35 @@ function convertToOpenAIResponsesMessages({
|
|
|
1659
1762
|
messages.push({
|
|
1660
1763
|
role: "user",
|
|
1661
1764
|
content: content.map((part, index) => {
|
|
1662
|
-
var _a, _b, _c
|
|
1765
|
+
var _a, _b, _c;
|
|
1663
1766
|
switch (part.type) {
|
|
1664
1767
|
case "text": {
|
|
1665
1768
|
return { type: "input_text", text: part.text };
|
|
1666
1769
|
}
|
|
1667
|
-
case "image": {
|
|
1668
|
-
return {
|
|
1669
|
-
type: "input_image",
|
|
1670
|
-
image_url: part.image instanceof URL ? part.image.toString() : `data:${(_a = part.mimeType) != null ? _a : "image/jpeg"};base64,${convertUint8ArrayToBase642(part.image)}`,
|
|
1671
|
-
// OpenAI specific extension: image detail
|
|
1672
|
-
detail: (_c = (_b = part.providerMetadata) == null ? void 0 : _b.openai) == null ? void 0 : _c.imageDetail
|
|
1673
|
-
};
|
|
1674
|
-
}
|
|
1675
1770
|
case "file": {
|
|
1676
|
-
if (part.
|
|
1677
|
-
|
|
1678
|
-
|
|
1679
|
-
|
|
1680
|
-
|
|
1681
|
-
|
|
1682
|
-
|
|
1683
|
-
|
|
1684
|
-
|
|
1685
|
-
|
|
1686
|
-
|
|
1687
|
-
|
|
1688
|
-
}
|
|
1689
|
-
default: {
|
|
1690
|
-
throw new UnsupportedFunctionalityError6({
|
|
1691
|
-
functionality: "Only PDF files are supported in user messages"
|
|
1771
|
+
if (part.mediaType.startsWith("image/")) {
|
|
1772
|
+
const mediaType = part.mediaType === "image/*" ? "image/jpeg" : part.mediaType;
|
|
1773
|
+
return {
|
|
1774
|
+
type: "input_image",
|
|
1775
|
+
image_url: part.data instanceof URL ? part.data.toString() : `data:${mediaType};base64,${part.data}`,
|
|
1776
|
+
// OpenAI specific extension: image detail
|
|
1777
|
+
detail: (_b = (_a = part.providerOptions) == null ? void 0 : _a.openai) == null ? void 0 : _b.imageDetail
|
|
1778
|
+
};
|
|
1779
|
+
} else if (part.mediaType === "application/pdf") {
|
|
1780
|
+
if (part.data instanceof URL) {
|
|
1781
|
+
throw new UnsupportedFunctionalityError5({
|
|
1782
|
+
functionality: "PDF file parts with URLs"
|
|
1692
1783
|
});
|
|
1693
1784
|
}
|
|
1785
|
+
return {
|
|
1786
|
+
type: "input_file",
|
|
1787
|
+
filename: (_c = part.filename) != null ? _c : `part-${index}.pdf`,
|
|
1788
|
+
file_data: `data:application/pdf;base64,${part.data}`
|
|
1789
|
+
};
|
|
1790
|
+
} else {
|
|
1791
|
+
throw new UnsupportedFunctionalityError5({
|
|
1792
|
+
functionality: `file part media type ${part.mediaType}`
|
|
1793
|
+
});
|
|
1694
1794
|
}
|
|
1695
1795
|
}
|
|
1696
1796
|
}
|
|
@@ -1760,19 +1860,18 @@ function mapOpenAIResponseFinishReason({
|
|
|
1760
1860
|
|
|
1761
1861
|
// src/responses/openai-responses-prepare-tools.ts
|
|
1762
1862
|
import {
|
|
1763
|
-
UnsupportedFunctionalityError as
|
|
1863
|
+
UnsupportedFunctionalityError as UnsupportedFunctionalityError6
|
|
1764
1864
|
} from "@ai-sdk/provider";
|
|
1765
1865
|
function prepareResponsesTools({
|
|
1766
|
-
|
|
1866
|
+
tools,
|
|
1867
|
+
toolChoice,
|
|
1767
1868
|
strict
|
|
1768
1869
|
}) {
|
|
1769
|
-
|
|
1770
|
-
const tools = ((_a = mode.tools) == null ? void 0 : _a.length) ? mode.tools : void 0;
|
|
1870
|
+
tools = (tools == null ? void 0 : tools.length) ? tools : void 0;
|
|
1771
1871
|
const toolWarnings = [];
|
|
1772
1872
|
if (tools == null) {
|
|
1773
|
-
return { tools: void 0,
|
|
1873
|
+
return { tools: void 0, toolChoice: void 0, toolWarnings };
|
|
1774
1874
|
}
|
|
1775
|
-
const toolChoice = mode.toolChoice;
|
|
1776
1875
|
const openaiTools = [];
|
|
1777
1876
|
for (const tool of tools) {
|
|
1778
1877
|
switch (tool.type) {
|
|
@@ -1805,37 +1904,24 @@ function prepareResponsesTools({
|
|
|
1805
1904
|
}
|
|
1806
1905
|
}
|
|
1807
1906
|
if (toolChoice == null) {
|
|
1808
|
-
return { tools: openaiTools,
|
|
1907
|
+
return { tools: openaiTools, toolChoice: void 0, toolWarnings };
|
|
1809
1908
|
}
|
|
1810
1909
|
const type = toolChoice.type;
|
|
1811
1910
|
switch (type) {
|
|
1812
1911
|
case "auto":
|
|
1813
1912
|
case "none":
|
|
1814
1913
|
case "required":
|
|
1815
|
-
return { tools: openaiTools,
|
|
1816
|
-
case "tool":
|
|
1817
|
-
if (toolChoice.toolName === "web_search_preview") {
|
|
1818
|
-
return {
|
|
1819
|
-
tools: openaiTools,
|
|
1820
|
-
tool_choice: {
|
|
1821
|
-
type: "web_search_preview"
|
|
1822
|
-
},
|
|
1823
|
-
toolWarnings
|
|
1824
|
-
};
|
|
1825
|
-
}
|
|
1914
|
+
return { tools: openaiTools, toolChoice: type, toolWarnings };
|
|
1915
|
+
case "tool":
|
|
1826
1916
|
return {
|
|
1827
1917
|
tools: openaiTools,
|
|
1828
|
-
|
|
1829
|
-
type: "function",
|
|
1830
|
-
name: toolChoice.toolName
|
|
1831
|
-
},
|
|
1918
|
+
toolChoice: toolChoice.toolName === "web_search_preview" ? { type: "web_search_preview" } : { type: "function", name: toolChoice.toolName },
|
|
1832
1919
|
toolWarnings
|
|
1833
1920
|
};
|
|
1834
|
-
}
|
|
1835
1921
|
default: {
|
|
1836
1922
|
const _exhaustiveCheck = type;
|
|
1837
|
-
throw new
|
|
1838
|
-
functionality: `
|
|
1923
|
+
throw new UnsupportedFunctionalityError6({
|
|
1924
|
+
functionality: `tool choice type: ${_exhaustiveCheck}`
|
|
1839
1925
|
});
|
|
1840
1926
|
}
|
|
1841
1927
|
}
|
|
@@ -1853,7 +1939,6 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
1853
1939
|
return this.config.provider;
|
|
1854
1940
|
}
|
|
1855
1941
|
getArgs({
|
|
1856
|
-
mode,
|
|
1857
1942
|
maxTokens,
|
|
1858
1943
|
temperature,
|
|
1859
1944
|
stopSequences,
|
|
@@ -1863,24 +1948,19 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
1863
1948
|
frequencyPenalty,
|
|
1864
1949
|
seed,
|
|
1865
1950
|
prompt,
|
|
1866
|
-
|
|
1951
|
+
providerOptions,
|
|
1952
|
+
tools,
|
|
1953
|
+
toolChoice,
|
|
1867
1954
|
responseFormat
|
|
1868
1955
|
}) {
|
|
1869
|
-
var _a, _b
|
|
1956
|
+
var _a, _b;
|
|
1870
1957
|
const warnings = [];
|
|
1871
1958
|
const modelConfig = getResponsesModelConfig(this.modelId);
|
|
1872
|
-
const type = mode.type;
|
|
1873
1959
|
if (topK != null) {
|
|
1874
|
-
warnings.push({
|
|
1875
|
-
type: "unsupported-setting",
|
|
1876
|
-
setting: "topK"
|
|
1877
|
-
});
|
|
1960
|
+
warnings.push({ type: "unsupported-setting", setting: "topK" });
|
|
1878
1961
|
}
|
|
1879
1962
|
if (seed != null) {
|
|
1880
|
-
warnings.push({
|
|
1881
|
-
type: "unsupported-setting",
|
|
1882
|
-
setting: "seed"
|
|
1883
|
-
});
|
|
1963
|
+
warnings.push({ type: "unsupported-setting", setting: "seed" });
|
|
1884
1964
|
}
|
|
1885
1965
|
if (presencePenalty != null) {
|
|
1886
1966
|
warnings.push({
|
|
@@ -1895,19 +1975,16 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
1895
1975
|
});
|
|
1896
1976
|
}
|
|
1897
1977
|
if (stopSequences != null) {
|
|
1898
|
-
warnings.push({
|
|
1899
|
-
type: "unsupported-setting",
|
|
1900
|
-
setting: "stopSequences"
|
|
1901
|
-
});
|
|
1978
|
+
warnings.push({ type: "unsupported-setting", setting: "stopSequences" });
|
|
1902
1979
|
}
|
|
1903
1980
|
const { messages, warnings: messageWarnings } = convertToOpenAIResponsesMessages({
|
|
1904
1981
|
prompt,
|
|
1905
1982
|
systemMessageMode: modelConfig.systemMessageMode
|
|
1906
1983
|
});
|
|
1907
1984
|
warnings.push(...messageWarnings);
|
|
1908
|
-
const openaiOptions =
|
|
1985
|
+
const openaiOptions = parseProviderOptions2({
|
|
1909
1986
|
provider: "openai",
|
|
1910
|
-
providerOptions
|
|
1987
|
+
providerOptions,
|
|
1911
1988
|
schema: openaiResponsesProviderOptionsSchema
|
|
1912
1989
|
});
|
|
1913
1990
|
const isStrict = (_a = openaiOptions == null ? void 0 : openaiOptions.strictSchemas) != null ? _a : true;
|
|
@@ -1961,62 +2038,23 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
1961
2038
|
});
|
|
1962
2039
|
}
|
|
1963
2040
|
}
|
|
1964
|
-
|
|
1965
|
-
|
|
1966
|
-
|
|
1967
|
-
|
|
1968
|
-
|
|
1969
|
-
|
|
1970
|
-
|
|
1971
|
-
|
|
1972
|
-
|
|
1973
|
-
|
|
1974
|
-
|
|
1975
|
-
|
|
1976
|
-
|
|
1977
|
-
|
|
1978
|
-
|
|
1979
|
-
|
|
1980
|
-
|
|
1981
|
-
return {
|
|
1982
|
-
args: {
|
|
1983
|
-
...baseArgs,
|
|
1984
|
-
text: {
|
|
1985
|
-
format: mode.schema != null ? {
|
|
1986
|
-
type: "json_schema",
|
|
1987
|
-
strict: isStrict,
|
|
1988
|
-
name: (_c = mode.name) != null ? _c : "response",
|
|
1989
|
-
description: mode.description,
|
|
1990
|
-
schema: mode.schema
|
|
1991
|
-
} : { type: "json_object" }
|
|
1992
|
-
}
|
|
1993
|
-
},
|
|
1994
|
-
warnings
|
|
1995
|
-
};
|
|
1996
|
-
}
|
|
1997
|
-
case "object-tool": {
|
|
1998
|
-
return {
|
|
1999
|
-
args: {
|
|
2000
|
-
...baseArgs,
|
|
2001
|
-
tool_choice: { type: "function", name: mode.tool.name },
|
|
2002
|
-
tools: [
|
|
2003
|
-
{
|
|
2004
|
-
type: "function",
|
|
2005
|
-
name: mode.tool.name,
|
|
2006
|
-
description: mode.tool.description,
|
|
2007
|
-
parameters: mode.tool.parameters,
|
|
2008
|
-
strict: isStrict
|
|
2009
|
-
}
|
|
2010
|
-
]
|
|
2011
|
-
},
|
|
2012
|
-
warnings
|
|
2013
|
-
};
|
|
2014
|
-
}
|
|
2015
|
-
default: {
|
|
2016
|
-
const _exhaustiveCheck = type;
|
|
2017
|
-
throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
|
|
2018
|
-
}
|
|
2019
|
-
}
|
|
2041
|
+
const {
|
|
2042
|
+
tools: openaiTools,
|
|
2043
|
+
toolChoice: openaiToolChoice,
|
|
2044
|
+
toolWarnings
|
|
2045
|
+
} = prepareResponsesTools({
|
|
2046
|
+
tools,
|
|
2047
|
+
toolChoice,
|
|
2048
|
+
strict: isStrict
|
|
2049
|
+
});
|
|
2050
|
+
return {
|
|
2051
|
+
args: {
|
|
2052
|
+
...baseArgs,
|
|
2053
|
+
tools: openaiTools,
|
|
2054
|
+
tool_choice: openaiToolChoice
|
|
2055
|
+
},
|
|
2056
|
+
warnings: [...warnings, ...toolWarnings]
|
|
2057
|
+
};
|
|
2020
2058
|
}
|
|
2021
2059
|
async doGenerate(options) {
|
|
2022
2060
|
var _a, _b, _c, _d, _e;
|
|
@@ -2030,53 +2068,53 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2030
2068
|
path: "/responses",
|
|
2031
2069
|
modelId: this.modelId
|
|
2032
2070
|
}),
|
|
2033
|
-
headers:
|
|
2071
|
+
headers: combineHeaders6(this.config.headers(), options.headers),
|
|
2034
2072
|
body,
|
|
2035
2073
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
2036
|
-
successfulResponseHandler:
|
|
2037
|
-
|
|
2038
|
-
id:
|
|
2039
|
-
created_at:
|
|
2040
|
-
model:
|
|
2041
|
-
output:
|
|
2042
|
-
|
|
2043
|
-
|
|
2044
|
-
type:
|
|
2045
|
-
role:
|
|
2046
|
-
content:
|
|
2047
|
-
|
|
2048
|
-
type:
|
|
2049
|
-
text:
|
|
2050
|
-
annotations:
|
|
2051
|
-
|
|
2052
|
-
type:
|
|
2053
|
-
start_index:
|
|
2054
|
-
end_index:
|
|
2055
|
-
url:
|
|
2056
|
-
title:
|
|
2074
|
+
successfulResponseHandler: createJsonResponseHandler6(
|
|
2075
|
+
z7.object({
|
|
2076
|
+
id: z7.string(),
|
|
2077
|
+
created_at: z7.number(),
|
|
2078
|
+
model: z7.string(),
|
|
2079
|
+
output: z7.array(
|
|
2080
|
+
z7.discriminatedUnion("type", [
|
|
2081
|
+
z7.object({
|
|
2082
|
+
type: z7.literal("message"),
|
|
2083
|
+
role: z7.literal("assistant"),
|
|
2084
|
+
content: z7.array(
|
|
2085
|
+
z7.object({
|
|
2086
|
+
type: z7.literal("output_text"),
|
|
2087
|
+
text: z7.string(),
|
|
2088
|
+
annotations: z7.array(
|
|
2089
|
+
z7.object({
|
|
2090
|
+
type: z7.literal("url_citation"),
|
|
2091
|
+
start_index: z7.number(),
|
|
2092
|
+
end_index: z7.number(),
|
|
2093
|
+
url: z7.string(),
|
|
2094
|
+
title: z7.string()
|
|
2057
2095
|
})
|
|
2058
2096
|
)
|
|
2059
2097
|
})
|
|
2060
2098
|
)
|
|
2061
2099
|
}),
|
|
2062
|
-
|
|
2063
|
-
type:
|
|
2064
|
-
call_id:
|
|
2065
|
-
name:
|
|
2066
|
-
arguments:
|
|
2100
|
+
z7.object({
|
|
2101
|
+
type: z7.literal("function_call"),
|
|
2102
|
+
call_id: z7.string(),
|
|
2103
|
+
name: z7.string(),
|
|
2104
|
+
arguments: z7.string()
|
|
2067
2105
|
}),
|
|
2068
|
-
|
|
2069
|
-
type:
|
|
2106
|
+
z7.object({
|
|
2107
|
+
type: z7.literal("web_search_call")
|
|
2070
2108
|
}),
|
|
2071
|
-
|
|
2072
|
-
type:
|
|
2109
|
+
z7.object({
|
|
2110
|
+
type: z7.literal("computer_call")
|
|
2073
2111
|
}),
|
|
2074
|
-
|
|
2075
|
-
type:
|
|
2112
|
+
z7.object({
|
|
2113
|
+
type: z7.literal("reasoning")
|
|
2076
2114
|
})
|
|
2077
2115
|
])
|
|
2078
2116
|
),
|
|
2079
|
-
incomplete_details:
|
|
2117
|
+
incomplete_details: z7.object({ reason: z7.string() }).nullable(),
|
|
2080
2118
|
usage: usageSchema
|
|
2081
2119
|
})
|
|
2082
2120
|
),
|
|
@@ -2145,7 +2183,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2145
2183
|
path: "/responses",
|
|
2146
2184
|
modelId: this.modelId
|
|
2147
2185
|
}),
|
|
2148
|
-
headers:
|
|
2186
|
+
headers: combineHeaders6(this.config.headers(), options.headers),
|
|
2149
2187
|
body: {
|
|
2150
2188
|
...body,
|
|
2151
2189
|
stream: true
|
|
@@ -2274,79 +2312,79 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2274
2312
|
};
|
|
2275
2313
|
}
|
|
2276
2314
|
};
|
|
2277
|
-
var usageSchema =
|
|
2278
|
-
input_tokens:
|
|
2279
|
-
input_tokens_details:
|
|
2280
|
-
output_tokens:
|
|
2281
|
-
output_tokens_details:
|
|
2315
|
+
var usageSchema = z7.object({
|
|
2316
|
+
input_tokens: z7.number(),
|
|
2317
|
+
input_tokens_details: z7.object({ cached_tokens: z7.number().nullish() }).nullish(),
|
|
2318
|
+
output_tokens: z7.number(),
|
|
2319
|
+
output_tokens_details: z7.object({ reasoning_tokens: z7.number().nullish() }).nullish()
|
|
2282
2320
|
});
|
|
2283
|
-
var textDeltaChunkSchema =
|
|
2284
|
-
type:
|
|
2285
|
-
delta:
|
|
2321
|
+
var textDeltaChunkSchema = z7.object({
|
|
2322
|
+
type: z7.literal("response.output_text.delta"),
|
|
2323
|
+
delta: z7.string()
|
|
2286
2324
|
});
|
|
2287
|
-
var responseFinishedChunkSchema =
|
|
2288
|
-
type:
|
|
2289
|
-
response:
|
|
2290
|
-
incomplete_details:
|
|
2325
|
+
var responseFinishedChunkSchema = z7.object({
|
|
2326
|
+
type: z7.enum(["response.completed", "response.incomplete"]),
|
|
2327
|
+
response: z7.object({
|
|
2328
|
+
incomplete_details: z7.object({ reason: z7.string() }).nullish(),
|
|
2291
2329
|
usage: usageSchema
|
|
2292
2330
|
})
|
|
2293
2331
|
});
|
|
2294
|
-
var responseCreatedChunkSchema =
|
|
2295
|
-
type:
|
|
2296
|
-
response:
|
|
2297
|
-
id:
|
|
2298
|
-
created_at:
|
|
2299
|
-
model:
|
|
2332
|
+
var responseCreatedChunkSchema = z7.object({
|
|
2333
|
+
type: z7.literal("response.created"),
|
|
2334
|
+
response: z7.object({
|
|
2335
|
+
id: z7.string(),
|
|
2336
|
+
created_at: z7.number(),
|
|
2337
|
+
model: z7.string()
|
|
2300
2338
|
})
|
|
2301
2339
|
});
|
|
2302
|
-
var responseOutputItemDoneSchema =
|
|
2303
|
-
type:
|
|
2304
|
-
output_index:
|
|
2305
|
-
item:
|
|
2306
|
-
|
|
2307
|
-
type:
|
|
2340
|
+
var responseOutputItemDoneSchema = z7.object({
|
|
2341
|
+
type: z7.literal("response.output_item.done"),
|
|
2342
|
+
output_index: z7.number(),
|
|
2343
|
+
item: z7.discriminatedUnion("type", [
|
|
2344
|
+
z7.object({
|
|
2345
|
+
type: z7.literal("message")
|
|
2308
2346
|
}),
|
|
2309
|
-
|
|
2310
|
-
type:
|
|
2311
|
-
id:
|
|
2312
|
-
call_id:
|
|
2313
|
-
name:
|
|
2314
|
-
arguments:
|
|
2315
|
-
status:
|
|
2347
|
+
z7.object({
|
|
2348
|
+
type: z7.literal("function_call"),
|
|
2349
|
+
id: z7.string(),
|
|
2350
|
+
call_id: z7.string(),
|
|
2351
|
+
name: z7.string(),
|
|
2352
|
+
arguments: z7.string(),
|
|
2353
|
+
status: z7.literal("completed")
|
|
2316
2354
|
})
|
|
2317
2355
|
])
|
|
2318
2356
|
});
|
|
2319
|
-
var responseFunctionCallArgumentsDeltaSchema =
|
|
2320
|
-
type:
|
|
2321
|
-
item_id:
|
|
2322
|
-
output_index:
|
|
2323
|
-
delta:
|
|
2357
|
+
var responseFunctionCallArgumentsDeltaSchema = z7.object({
|
|
2358
|
+
type: z7.literal("response.function_call_arguments.delta"),
|
|
2359
|
+
item_id: z7.string(),
|
|
2360
|
+
output_index: z7.number(),
|
|
2361
|
+
delta: z7.string()
|
|
2324
2362
|
});
|
|
2325
|
-
var responseOutputItemAddedSchema =
|
|
2326
|
-
type:
|
|
2327
|
-
output_index:
|
|
2328
|
-
item:
|
|
2329
|
-
|
|
2330
|
-
type:
|
|
2363
|
+
var responseOutputItemAddedSchema = z7.object({
|
|
2364
|
+
type: z7.literal("response.output_item.added"),
|
|
2365
|
+
output_index: z7.number(),
|
|
2366
|
+
item: z7.discriminatedUnion("type", [
|
|
2367
|
+
z7.object({
|
|
2368
|
+
type: z7.literal("message")
|
|
2331
2369
|
}),
|
|
2332
|
-
|
|
2333
|
-
type:
|
|
2334
|
-
id:
|
|
2335
|
-
call_id:
|
|
2336
|
-
name:
|
|
2337
|
-
arguments:
|
|
2370
|
+
z7.object({
|
|
2371
|
+
type: z7.literal("function_call"),
|
|
2372
|
+
id: z7.string(),
|
|
2373
|
+
call_id: z7.string(),
|
|
2374
|
+
name: z7.string(),
|
|
2375
|
+
arguments: z7.string()
|
|
2338
2376
|
})
|
|
2339
2377
|
])
|
|
2340
2378
|
});
|
|
2341
|
-
var responseAnnotationAddedSchema =
|
|
2342
|
-
type:
|
|
2343
|
-
annotation:
|
|
2344
|
-
type:
|
|
2345
|
-
url:
|
|
2346
|
-
title:
|
|
2379
|
+
var responseAnnotationAddedSchema = z7.object({
|
|
2380
|
+
type: z7.literal("response.output_text.annotation.added"),
|
|
2381
|
+
annotation: z7.object({
|
|
2382
|
+
type: z7.literal("url_citation"),
|
|
2383
|
+
url: z7.string(),
|
|
2384
|
+
title: z7.string()
|
|
2347
2385
|
})
|
|
2348
2386
|
});
|
|
2349
|
-
var openaiResponsesChunkSchema =
|
|
2387
|
+
var openaiResponsesChunkSchema = z7.union([
|
|
2350
2388
|
textDeltaChunkSchema,
|
|
2351
2389
|
responseFinishedChunkSchema,
|
|
2352
2390
|
responseCreatedChunkSchema,
|
|
@@ -2354,7 +2392,7 @@ var openaiResponsesChunkSchema = z6.union([
|
|
|
2354
2392
|
responseFunctionCallArgumentsDeltaSchema,
|
|
2355
2393
|
responseOutputItemAddedSchema,
|
|
2356
2394
|
responseAnnotationAddedSchema,
|
|
2357
|
-
|
|
2395
|
+
z7.object({ type: z7.string() }).passthrough()
|
|
2358
2396
|
// fallback for unknown chunks
|
|
2359
2397
|
]);
|
|
2360
2398
|
function isTextDeltaChunk(chunk) {
|
|
@@ -2399,15 +2437,15 @@ function getResponsesModelConfig(modelId) {
|
|
|
2399
2437
|
requiredAutoTruncation: false
|
|
2400
2438
|
};
|
|
2401
2439
|
}
|
|
2402
|
-
var openaiResponsesProviderOptionsSchema =
|
|
2403
|
-
metadata:
|
|
2404
|
-
parallelToolCalls:
|
|
2405
|
-
previousResponseId:
|
|
2406
|
-
store:
|
|
2407
|
-
user:
|
|
2408
|
-
reasoningEffort:
|
|
2409
|
-
strictSchemas:
|
|
2410
|
-
instructions:
|
|
2440
|
+
var openaiResponsesProviderOptionsSchema = z7.object({
|
|
2441
|
+
metadata: z7.any().nullish(),
|
|
2442
|
+
parallelToolCalls: z7.boolean().nullish(),
|
|
2443
|
+
previousResponseId: z7.string().nullish(),
|
|
2444
|
+
store: z7.boolean().nullish(),
|
|
2445
|
+
user: z7.string().nullish(),
|
|
2446
|
+
reasoningEffort: z7.string().nullish(),
|
|
2447
|
+
strictSchemas: z7.boolean().nullish(),
|
|
2448
|
+
instructions: z7.string().nullish()
|
|
2411
2449
|
});
|
|
2412
2450
|
export {
|
|
2413
2451
|
OpenAIChatLanguageModel,
|
|
@@ -2415,6 +2453,7 @@ export {
|
|
|
2415
2453
|
OpenAIEmbeddingModel,
|
|
2416
2454
|
OpenAIImageModel,
|
|
2417
2455
|
OpenAIResponsesLanguageModel,
|
|
2456
|
+
OpenAITranscriptionModel,
|
|
2418
2457
|
modelMaxImagesPerCall
|
|
2419
2458
|
};
|
|
2420
2459
|
//# sourceMappingURL=index.mjs.map
|