@ai-sdk/openai 2.0.8 → 2.0.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +13 -0
- package/dist/index.d.mts +36 -53
- package/dist/index.d.ts +36 -53
- package/dist/index.js +503 -443
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +514 -454
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.js +317 -256
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +302 -241
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +1 -1
package/dist/index.mjs
CHANGED
|
@@ -4,7 +4,7 @@ import {
|
|
|
4
4
|
withoutTrailingSlash
|
|
5
5
|
} from "@ai-sdk/provider-utils";
|
|
6
6
|
|
|
7
|
-
// src/openai-chat-language-model.ts
|
|
7
|
+
// src/chat/openai-chat-language-model.ts
|
|
8
8
|
import {
|
|
9
9
|
InvalidResponseDataError
|
|
10
10
|
} from "@ai-sdk/provider";
|
|
@@ -19,7 +19,26 @@ import {
|
|
|
19
19
|
} from "@ai-sdk/provider-utils";
|
|
20
20
|
import { z as z5 } from "zod/v4";
|
|
21
21
|
|
|
22
|
-
// src/
|
|
22
|
+
// src/openai-error.ts
|
|
23
|
+
import { z } from "zod/v4";
|
|
24
|
+
import { createJsonErrorResponseHandler } from "@ai-sdk/provider-utils";
|
|
25
|
+
var openaiErrorDataSchema = z.object({
|
|
26
|
+
error: z.object({
|
|
27
|
+
message: z.string(),
|
|
28
|
+
// The additional information below is handled loosely to support
|
|
29
|
+
// OpenAI-compatible providers that have slightly different error
|
|
30
|
+
// responses:
|
|
31
|
+
type: z.string().nullish(),
|
|
32
|
+
param: z.any().nullish(),
|
|
33
|
+
code: z.union([z.string(), z.number()]).nullish()
|
|
34
|
+
})
|
|
35
|
+
});
|
|
36
|
+
var openaiFailedResponseHandler = createJsonErrorResponseHandler({
|
|
37
|
+
errorSchema: openaiErrorDataSchema,
|
|
38
|
+
errorToMessage: (data) => data.error.message
|
|
39
|
+
});
|
|
40
|
+
|
|
41
|
+
// src/chat/convert-to-openai-chat-messages.ts
|
|
23
42
|
import {
|
|
24
43
|
UnsupportedFunctionalityError
|
|
25
44
|
} from "@ai-sdk/provider";
|
|
@@ -199,7 +218,7 @@ function convertToOpenAIChatMessages({
|
|
|
199
218
|
return { messages, warnings };
|
|
200
219
|
}
|
|
201
220
|
|
|
202
|
-
// src/get-response-metadata.ts
|
|
221
|
+
// src/chat/get-response-metadata.ts
|
|
203
222
|
function getResponseMetadata({
|
|
204
223
|
id,
|
|
205
224
|
model,
|
|
@@ -212,7 +231,7 @@ function getResponseMetadata({
|
|
|
212
231
|
};
|
|
213
232
|
}
|
|
214
233
|
|
|
215
|
-
// src/map-openai-finish-reason.ts
|
|
234
|
+
// src/chat/map-openai-finish-reason.ts
|
|
216
235
|
function mapOpenAIFinishReason(finishReason) {
|
|
217
236
|
switch (finishReason) {
|
|
218
237
|
case "stop":
|
|
@@ -229,16 +248,16 @@ function mapOpenAIFinishReason(finishReason) {
|
|
|
229
248
|
}
|
|
230
249
|
}
|
|
231
250
|
|
|
232
|
-
// src/openai-chat-options.ts
|
|
233
|
-
import { z } from "zod/v4";
|
|
234
|
-
var openaiProviderOptions =
|
|
251
|
+
// src/chat/openai-chat-options.ts
|
|
252
|
+
import { z as z2 } from "zod/v4";
|
|
253
|
+
var openaiProviderOptions = z2.object({
|
|
235
254
|
/**
|
|
236
255
|
* Modify the likelihood of specified tokens appearing in the completion.
|
|
237
256
|
*
|
|
238
257
|
* Accepts a JSON object that maps tokens (specified by their token ID in
|
|
239
258
|
* the GPT tokenizer) to an associated bias value from -100 to 100.
|
|
240
259
|
*/
|
|
241
|
-
logitBias:
|
|
260
|
+
logitBias: z2.record(z2.coerce.number(), z2.number()).optional(),
|
|
242
261
|
/**
|
|
243
262
|
* Return the log probabilities of the tokens.
|
|
244
263
|
*
|
|
@@ -248,42 +267,42 @@ var openaiProviderOptions = z.object({
|
|
|
248
267
|
* Setting to a number will return the log probabilities of the top n
|
|
249
268
|
* tokens that were generated.
|
|
250
269
|
*/
|
|
251
|
-
logprobs:
|
|
270
|
+
logprobs: z2.union([z2.boolean(), z2.number()]).optional(),
|
|
252
271
|
/**
|
|
253
272
|
* Whether to enable parallel function calling during tool use. Default to true.
|
|
254
273
|
*/
|
|
255
|
-
parallelToolCalls:
|
|
274
|
+
parallelToolCalls: z2.boolean().optional(),
|
|
256
275
|
/**
|
|
257
276
|
* A unique identifier representing your end-user, which can help OpenAI to
|
|
258
277
|
* monitor and detect abuse.
|
|
259
278
|
*/
|
|
260
|
-
user:
|
|
279
|
+
user: z2.string().optional(),
|
|
261
280
|
/**
|
|
262
281
|
* Reasoning effort for reasoning models. Defaults to `medium`.
|
|
263
282
|
*/
|
|
264
|
-
reasoningEffort:
|
|
283
|
+
reasoningEffort: z2.enum(["minimal", "low", "medium", "high"]).optional(),
|
|
265
284
|
/**
|
|
266
285
|
* Maximum number of completion tokens to generate. Useful for reasoning models.
|
|
267
286
|
*/
|
|
268
|
-
maxCompletionTokens:
|
|
287
|
+
maxCompletionTokens: z2.number().optional(),
|
|
269
288
|
/**
|
|
270
289
|
* Whether to enable persistence in responses API.
|
|
271
290
|
*/
|
|
272
|
-
store:
|
|
291
|
+
store: z2.boolean().optional(),
|
|
273
292
|
/**
|
|
274
293
|
* Metadata to associate with the request.
|
|
275
294
|
*/
|
|
276
|
-
metadata:
|
|
295
|
+
metadata: z2.record(z2.string().max(64), z2.string().max(512)).optional(),
|
|
277
296
|
/**
|
|
278
297
|
* Parameters for prediction mode.
|
|
279
298
|
*/
|
|
280
|
-
prediction:
|
|
299
|
+
prediction: z2.record(z2.string(), z2.any()).optional(),
|
|
281
300
|
/**
|
|
282
301
|
* Whether to use structured outputs.
|
|
283
302
|
*
|
|
284
303
|
* @default true
|
|
285
304
|
*/
|
|
286
|
-
structuredOutputs:
|
|
305
|
+
structuredOutputs: z2.boolean().optional(),
|
|
287
306
|
/**
|
|
288
307
|
* Service tier for the request.
|
|
289
308
|
* - 'auto': Default service tier
|
|
@@ -292,40 +311,21 @@ var openaiProviderOptions = z.object({
|
|
|
292
311
|
*
|
|
293
312
|
* @default 'auto'
|
|
294
313
|
*/
|
|
295
|
-
serviceTier:
|
|
314
|
+
serviceTier: z2.enum(["auto", "flex", "priority"]).optional(),
|
|
296
315
|
/**
|
|
297
316
|
* Whether to use strict JSON schema validation.
|
|
298
317
|
*
|
|
299
318
|
* @default false
|
|
300
319
|
*/
|
|
301
|
-
strictJsonSchema:
|
|
320
|
+
strictJsonSchema: z2.boolean().optional(),
|
|
302
321
|
/**
|
|
303
322
|
* Controls the verbosity of the model's responses.
|
|
304
323
|
* Lower values will result in more concise responses, while higher values will result in more verbose responses.
|
|
305
324
|
*/
|
|
306
|
-
textVerbosity:
|
|
325
|
+
textVerbosity: z2.enum(["low", "medium", "high"]).optional()
|
|
307
326
|
});
|
|
308
327
|
|
|
309
|
-
// src/openai-
|
|
310
|
-
import { z as z2 } from "zod/v4";
|
|
311
|
-
import { createJsonErrorResponseHandler } from "@ai-sdk/provider-utils";
|
|
312
|
-
var openaiErrorDataSchema = z2.object({
|
|
313
|
-
error: z2.object({
|
|
314
|
-
message: z2.string(),
|
|
315
|
-
// The additional information below is handled loosely to support
|
|
316
|
-
// OpenAI-compatible providers that have slightly different error
|
|
317
|
-
// responses:
|
|
318
|
-
type: z2.string().nullish(),
|
|
319
|
-
param: z2.any().nullish(),
|
|
320
|
-
code: z2.union([z2.string(), z2.number()]).nullish()
|
|
321
|
-
})
|
|
322
|
-
});
|
|
323
|
-
var openaiFailedResponseHandler = createJsonErrorResponseHandler({
|
|
324
|
-
errorSchema: openaiErrorDataSchema,
|
|
325
|
-
errorToMessage: (data) => data.error.message
|
|
326
|
-
});
|
|
327
|
-
|
|
328
|
-
// src/openai-prepare-tools.ts
|
|
328
|
+
// src/chat/openai-chat-prepare-tools.ts
|
|
329
329
|
import {
|
|
330
330
|
UnsupportedFunctionalityError as UnsupportedFunctionalityError2
|
|
331
331
|
} from "@ai-sdk/provider";
|
|
@@ -416,8 +416,8 @@ var webSearchPreview = createProviderDefinedToolFactory2({
|
|
|
416
416
|
inputSchema: z4.object({})
|
|
417
417
|
});
|
|
418
418
|
|
|
419
|
-
// src/openai-prepare-tools.ts
|
|
420
|
-
function
|
|
419
|
+
// src/chat/openai-chat-prepare-tools.ts
|
|
420
|
+
function prepareChatTools({
|
|
421
421
|
tools,
|
|
422
422
|
toolChoice,
|
|
423
423
|
structuredOutputs,
|
|
@@ -503,7 +503,7 @@ function prepareTools({
|
|
|
503
503
|
}
|
|
504
504
|
}
|
|
505
505
|
|
|
506
|
-
// src/openai-chat-language-model.ts
|
|
506
|
+
// src/chat/openai-chat-language-model.ts
|
|
507
507
|
var OpenAIChatLanguageModel = class {
|
|
508
508
|
constructor(modelId, config) {
|
|
509
509
|
this.specificationVersion = "v2";
|
|
@@ -588,7 +588,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
588
588
|
seed,
|
|
589
589
|
verbosity: openaiOptions.textVerbosity,
|
|
590
590
|
// openai specific settings:
|
|
591
|
-
// TODO
|
|
591
|
+
// TODO AI SDK 6: remove, we auto-map maxOutputTokens now
|
|
592
592
|
max_completion_tokens: openaiOptions.maxCompletionTokens,
|
|
593
593
|
store: openaiOptions.store,
|
|
594
594
|
metadata: openaiOptions.metadata,
|
|
@@ -688,7 +688,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
688
688
|
tools: openaiTools2,
|
|
689
689
|
toolChoice: openaiToolChoice,
|
|
690
690
|
toolWarnings
|
|
691
|
-
} =
|
|
691
|
+
} = prepareChatTools({
|
|
692
692
|
tools,
|
|
693
693
|
toolChoice,
|
|
694
694
|
structuredOutputs,
|
|
@@ -1158,7 +1158,7 @@ var reasoningModels = {
|
|
|
1158
1158
|
}
|
|
1159
1159
|
};
|
|
1160
1160
|
|
|
1161
|
-
// src/openai-completion-language-model.ts
|
|
1161
|
+
// src/completion/openai-completion-language-model.ts
|
|
1162
1162
|
import {
|
|
1163
1163
|
combineHeaders as combineHeaders2,
|
|
1164
1164
|
createEventSourceResponseHandler as createEventSourceResponseHandler2,
|
|
@@ -1168,7 +1168,7 @@ import {
|
|
|
1168
1168
|
} from "@ai-sdk/provider-utils";
|
|
1169
1169
|
import { z as z7 } from "zod/v4";
|
|
1170
1170
|
|
|
1171
|
-
// src/convert-to-openai-completion-prompt.ts
|
|
1171
|
+
// src/completion/convert-to-openai-completion-prompt.ts
|
|
1172
1172
|
import {
|
|
1173
1173
|
InvalidPromptError,
|
|
1174
1174
|
UnsupportedFunctionalityError as UnsupportedFunctionalityError3
|
|
@@ -1246,7 +1246,37 @@ ${user}:`]
|
|
|
1246
1246
|
};
|
|
1247
1247
|
}
|
|
1248
1248
|
|
|
1249
|
-
// src/
|
|
1249
|
+
// src/completion/get-response-metadata.ts
|
|
1250
|
+
function getResponseMetadata2({
|
|
1251
|
+
id,
|
|
1252
|
+
model,
|
|
1253
|
+
created
|
|
1254
|
+
}) {
|
|
1255
|
+
return {
|
|
1256
|
+
id: id != null ? id : void 0,
|
|
1257
|
+
modelId: model != null ? model : void 0,
|
|
1258
|
+
timestamp: created != null ? new Date(created * 1e3) : void 0
|
|
1259
|
+
};
|
|
1260
|
+
}
|
|
1261
|
+
|
|
1262
|
+
// src/completion/map-openai-finish-reason.ts
|
|
1263
|
+
function mapOpenAIFinishReason2(finishReason) {
|
|
1264
|
+
switch (finishReason) {
|
|
1265
|
+
case "stop":
|
|
1266
|
+
return "stop";
|
|
1267
|
+
case "length":
|
|
1268
|
+
return "length";
|
|
1269
|
+
case "content_filter":
|
|
1270
|
+
return "content-filter";
|
|
1271
|
+
case "function_call":
|
|
1272
|
+
case "tool_calls":
|
|
1273
|
+
return "tool-calls";
|
|
1274
|
+
default:
|
|
1275
|
+
return "unknown";
|
|
1276
|
+
}
|
|
1277
|
+
}
|
|
1278
|
+
|
|
1279
|
+
// src/completion/openai-completion-options.ts
|
|
1250
1280
|
import { z as z6 } from "zod/v4";
|
|
1251
1281
|
var openaiCompletionProviderOptions = z6.object({
|
|
1252
1282
|
/**
|
|
@@ -1289,7 +1319,7 @@ var openaiCompletionProviderOptions = z6.object({
|
|
|
1289
1319
|
logprobs: z6.union([z6.boolean(), z6.number()]).optional()
|
|
1290
1320
|
});
|
|
1291
1321
|
|
|
1292
|
-
// src/openai-completion-language-model.ts
|
|
1322
|
+
// src/completion/openai-completion-language-model.ts
|
|
1293
1323
|
var OpenAICompletionLanguageModel = class {
|
|
1294
1324
|
constructor(modelId, config) {
|
|
1295
1325
|
this.specificationVersion = "v2";
|
|
@@ -1409,10 +1439,10 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1409
1439
|
outputTokens: (_b = response.usage) == null ? void 0 : _b.completion_tokens,
|
|
1410
1440
|
totalTokens: (_c = response.usage) == null ? void 0 : _c.total_tokens
|
|
1411
1441
|
},
|
|
1412
|
-
finishReason:
|
|
1442
|
+
finishReason: mapOpenAIFinishReason2(choice.finish_reason),
|
|
1413
1443
|
request: { body: args },
|
|
1414
1444
|
response: {
|
|
1415
|
-
...
|
|
1445
|
+
...getResponseMetadata2(response),
|
|
1416
1446
|
headers: responseHeaders,
|
|
1417
1447
|
body: rawResponse
|
|
1418
1448
|
},
|
|
@@ -1476,7 +1506,7 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1476
1506
|
isFirstChunk = false;
|
|
1477
1507
|
controller.enqueue({
|
|
1478
1508
|
type: "response-metadata",
|
|
1479
|
-
...
|
|
1509
|
+
...getResponseMetadata2(value)
|
|
1480
1510
|
});
|
|
1481
1511
|
controller.enqueue({ type: "text-start", id: "0" });
|
|
1482
1512
|
}
|
|
@@ -1487,7 +1517,7 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1487
1517
|
}
|
|
1488
1518
|
const choice = value.choices[0];
|
|
1489
1519
|
if ((choice == null ? void 0 : choice.finish_reason) != null) {
|
|
1490
|
-
finishReason =
|
|
1520
|
+
finishReason = mapOpenAIFinishReason2(choice.finish_reason);
|
|
1491
1521
|
}
|
|
1492
1522
|
if ((choice == null ? void 0 : choice.logprobs) != null) {
|
|
1493
1523
|
providerMetadata.openai.logprobs = choice.logprobs;
|
|
@@ -1562,7 +1592,7 @@ var openaiCompletionChunkSchema = z7.union([
|
|
|
1562
1592
|
openaiErrorDataSchema
|
|
1563
1593
|
]);
|
|
1564
1594
|
|
|
1565
|
-
// src/openai-embedding-model.ts
|
|
1595
|
+
// src/embedding/openai-embedding-model.ts
|
|
1566
1596
|
import {
|
|
1567
1597
|
TooManyEmbeddingValuesForCallError
|
|
1568
1598
|
} from "@ai-sdk/provider";
|
|
@@ -1574,7 +1604,7 @@ import {
|
|
|
1574
1604
|
} from "@ai-sdk/provider-utils";
|
|
1575
1605
|
import { z as z9 } from "zod/v4";
|
|
1576
1606
|
|
|
1577
|
-
// src/openai-embedding-options.ts
|
|
1607
|
+
// src/embedding/openai-embedding-options.ts
|
|
1578
1608
|
import { z as z8 } from "zod/v4";
|
|
1579
1609
|
var openaiEmbeddingProviderOptions = z8.object({
|
|
1580
1610
|
/**
|
|
@@ -1589,7 +1619,7 @@ var openaiEmbeddingProviderOptions = z8.object({
|
|
|
1589
1619
|
user: z8.string().optional()
|
|
1590
1620
|
});
|
|
1591
1621
|
|
|
1592
|
-
// src/openai-embedding-model.ts
|
|
1622
|
+
// src/embedding/openai-embedding-model.ts
|
|
1593
1623
|
var OpenAIEmbeddingModel = class {
|
|
1594
1624
|
constructor(modelId, config) {
|
|
1595
1625
|
this.specificationVersion = "v2";
|
|
@@ -1657,7 +1687,7 @@ var openaiTextEmbeddingResponseSchema = z9.object({
|
|
|
1657
1687
|
usage: z9.object({ prompt_tokens: z9.number() }).nullish()
|
|
1658
1688
|
});
|
|
1659
1689
|
|
|
1660
|
-
// src/openai-image-model.ts
|
|
1690
|
+
// src/image/openai-image-model.ts
|
|
1661
1691
|
import {
|
|
1662
1692
|
combineHeaders as combineHeaders4,
|
|
1663
1693
|
createJsonResponseHandler as createJsonResponseHandler4,
|
|
@@ -1665,7 +1695,7 @@ import {
|
|
|
1665
1695
|
} from "@ai-sdk/provider-utils";
|
|
1666
1696
|
import { z as z10 } from "zod/v4";
|
|
1667
1697
|
|
|
1668
|
-
// src/openai-image-
|
|
1698
|
+
// src/image/openai-image-options.ts
|
|
1669
1699
|
var modelMaxImagesPerCall = {
|
|
1670
1700
|
"dall-e-3": 1,
|
|
1671
1701
|
"dall-e-2": 10,
|
|
@@ -1673,7 +1703,7 @@ var modelMaxImagesPerCall = {
|
|
|
1673
1703
|
};
|
|
1674
1704
|
var hasDefaultResponseFormat = /* @__PURE__ */ new Set(["gpt-image-1"]);
|
|
1675
1705
|
|
|
1676
|
-
// src/openai-image-model.ts
|
|
1706
|
+
// src/image/openai-image-model.ts
|
|
1677
1707
|
var OpenAIImageModel = class {
|
|
1678
1708
|
constructor(modelId, config) {
|
|
1679
1709
|
this.modelId = modelId;
|
|
@@ -1757,227 +1787,50 @@ var openaiImageResponseSchema = z10.object({
|
|
|
1757
1787
|
)
|
|
1758
1788
|
});
|
|
1759
1789
|
|
|
1790
|
+
// src/tool/code-interpreter.ts
|
|
1791
|
+
import { createProviderDefinedToolFactory as createProviderDefinedToolFactory3 } from "@ai-sdk/provider-utils";
|
|
1792
|
+
import { z as z11 } from "zod/v4";
|
|
1793
|
+
var codeInterpreterArgsSchema = z11.object({
|
|
1794
|
+
container: z11.union([
|
|
1795
|
+
z11.string(),
|
|
1796
|
+
z11.object({
|
|
1797
|
+
fileIds: z11.array(z11.string()).optional()
|
|
1798
|
+
})
|
|
1799
|
+
]).optional()
|
|
1800
|
+
});
|
|
1801
|
+
var codeInterpreter = createProviderDefinedToolFactory3({
|
|
1802
|
+
id: "openai.code_interpreter",
|
|
1803
|
+
name: "code_interpreter",
|
|
1804
|
+
inputSchema: z11.object({})
|
|
1805
|
+
});
|
|
1806
|
+
|
|
1760
1807
|
// src/openai-tools.ts
|
|
1761
1808
|
var openaiTools = {
|
|
1809
|
+
codeInterpreter,
|
|
1762
1810
|
fileSearch,
|
|
1763
1811
|
webSearchPreview
|
|
1764
1812
|
};
|
|
1765
1813
|
|
|
1766
|
-
// src/openai-transcription-model.ts
|
|
1767
|
-
import {
|
|
1768
|
-
combineHeaders as combineHeaders5,
|
|
1769
|
-
convertBase64ToUint8Array,
|
|
1770
|
-
createJsonResponseHandler as createJsonResponseHandler5,
|
|
1771
|
-
parseProviderOptions as parseProviderOptions4,
|
|
1772
|
-
postFormDataToApi
|
|
1773
|
-
} from "@ai-sdk/provider-utils";
|
|
1774
|
-
import { z as z12 } from "zod/v4";
|
|
1775
|
-
|
|
1776
|
-
// src/openai-transcription-options.ts
|
|
1777
|
-
import { z as z11 } from "zod/v4";
|
|
1778
|
-
var openAITranscriptionProviderOptions = z11.object({
|
|
1779
|
-
/**
|
|
1780
|
-
* Additional information to include in the transcription response.
|
|
1781
|
-
*/
|
|
1782
|
-
include: z11.array(z11.string()).optional(),
|
|
1783
|
-
/**
|
|
1784
|
-
* The language of the input audio in ISO-639-1 format.
|
|
1785
|
-
*/
|
|
1786
|
-
language: z11.string().optional(),
|
|
1787
|
-
/**
|
|
1788
|
-
* An optional text to guide the model's style or continue a previous audio segment.
|
|
1789
|
-
*/
|
|
1790
|
-
prompt: z11.string().optional(),
|
|
1791
|
-
/**
|
|
1792
|
-
* The sampling temperature, between 0 and 1.
|
|
1793
|
-
* @default 0
|
|
1794
|
-
*/
|
|
1795
|
-
temperature: z11.number().min(0).max(1).default(0).optional(),
|
|
1796
|
-
/**
|
|
1797
|
-
* The timestamp granularities to populate for this transcription.
|
|
1798
|
-
* @default ['segment']
|
|
1799
|
-
*/
|
|
1800
|
-
timestampGranularities: z11.array(z11.enum(["word", "segment"])).default(["segment"]).optional()
|
|
1801
|
-
});
|
|
1802
|
-
|
|
1803
|
-
// src/openai-transcription-model.ts
|
|
1804
|
-
var languageMap = {
|
|
1805
|
-
afrikaans: "af",
|
|
1806
|
-
arabic: "ar",
|
|
1807
|
-
armenian: "hy",
|
|
1808
|
-
azerbaijani: "az",
|
|
1809
|
-
belarusian: "be",
|
|
1810
|
-
bosnian: "bs",
|
|
1811
|
-
bulgarian: "bg",
|
|
1812
|
-
catalan: "ca",
|
|
1813
|
-
chinese: "zh",
|
|
1814
|
-
croatian: "hr",
|
|
1815
|
-
czech: "cs",
|
|
1816
|
-
danish: "da",
|
|
1817
|
-
dutch: "nl",
|
|
1818
|
-
english: "en",
|
|
1819
|
-
estonian: "et",
|
|
1820
|
-
finnish: "fi",
|
|
1821
|
-
french: "fr",
|
|
1822
|
-
galician: "gl",
|
|
1823
|
-
german: "de",
|
|
1824
|
-
greek: "el",
|
|
1825
|
-
hebrew: "he",
|
|
1826
|
-
hindi: "hi",
|
|
1827
|
-
hungarian: "hu",
|
|
1828
|
-
icelandic: "is",
|
|
1829
|
-
indonesian: "id",
|
|
1830
|
-
italian: "it",
|
|
1831
|
-
japanese: "ja",
|
|
1832
|
-
kannada: "kn",
|
|
1833
|
-
kazakh: "kk",
|
|
1834
|
-
korean: "ko",
|
|
1835
|
-
latvian: "lv",
|
|
1836
|
-
lithuanian: "lt",
|
|
1837
|
-
macedonian: "mk",
|
|
1838
|
-
malay: "ms",
|
|
1839
|
-
marathi: "mr",
|
|
1840
|
-
maori: "mi",
|
|
1841
|
-
nepali: "ne",
|
|
1842
|
-
norwegian: "no",
|
|
1843
|
-
persian: "fa",
|
|
1844
|
-
polish: "pl",
|
|
1845
|
-
portuguese: "pt",
|
|
1846
|
-
romanian: "ro",
|
|
1847
|
-
russian: "ru",
|
|
1848
|
-
serbian: "sr",
|
|
1849
|
-
slovak: "sk",
|
|
1850
|
-
slovenian: "sl",
|
|
1851
|
-
spanish: "es",
|
|
1852
|
-
swahili: "sw",
|
|
1853
|
-
swedish: "sv",
|
|
1854
|
-
tagalog: "tl",
|
|
1855
|
-
tamil: "ta",
|
|
1856
|
-
thai: "th",
|
|
1857
|
-
turkish: "tr",
|
|
1858
|
-
ukrainian: "uk",
|
|
1859
|
-
urdu: "ur",
|
|
1860
|
-
vietnamese: "vi",
|
|
1861
|
-
welsh: "cy"
|
|
1862
|
-
};
|
|
1863
|
-
var OpenAITranscriptionModel = class {
|
|
1864
|
-
constructor(modelId, config) {
|
|
1865
|
-
this.modelId = modelId;
|
|
1866
|
-
this.config = config;
|
|
1867
|
-
this.specificationVersion = "v2";
|
|
1868
|
-
}
|
|
1869
|
-
get provider() {
|
|
1870
|
-
return this.config.provider;
|
|
1871
|
-
}
|
|
1872
|
-
async getArgs({
|
|
1873
|
-
audio,
|
|
1874
|
-
mediaType,
|
|
1875
|
-
providerOptions
|
|
1876
|
-
}) {
|
|
1877
|
-
const warnings = [];
|
|
1878
|
-
const openAIOptions = await parseProviderOptions4({
|
|
1879
|
-
provider: "openai",
|
|
1880
|
-
providerOptions,
|
|
1881
|
-
schema: openAITranscriptionProviderOptions
|
|
1882
|
-
});
|
|
1883
|
-
const formData = new FormData();
|
|
1884
|
-
const blob = audio instanceof Uint8Array ? new Blob([audio]) : new Blob([convertBase64ToUint8Array(audio)]);
|
|
1885
|
-
formData.append("model", this.modelId);
|
|
1886
|
-
formData.append("file", new File([blob], "audio", { type: mediaType }));
|
|
1887
|
-
if (openAIOptions) {
|
|
1888
|
-
const transcriptionModelOptions = {
|
|
1889
|
-
include: openAIOptions.include,
|
|
1890
|
-
language: openAIOptions.language,
|
|
1891
|
-
prompt: openAIOptions.prompt,
|
|
1892
|
-
temperature: openAIOptions.temperature,
|
|
1893
|
-
timestamp_granularities: openAIOptions.timestampGranularities
|
|
1894
|
-
};
|
|
1895
|
-
for (const [key, value] of Object.entries(transcriptionModelOptions)) {
|
|
1896
|
-
if (value != null) {
|
|
1897
|
-
formData.append(key, String(value));
|
|
1898
|
-
}
|
|
1899
|
-
}
|
|
1900
|
-
}
|
|
1901
|
-
return {
|
|
1902
|
-
formData,
|
|
1903
|
-
warnings
|
|
1904
|
-
};
|
|
1905
|
-
}
|
|
1906
|
-
async doGenerate(options) {
|
|
1907
|
-
var _a, _b, _c, _d, _e, _f;
|
|
1908
|
-
const currentDate = (_c = (_b = (_a = this.config._internal) == null ? void 0 : _a.currentDate) == null ? void 0 : _b.call(_a)) != null ? _c : /* @__PURE__ */ new Date();
|
|
1909
|
-
const { formData, warnings } = await this.getArgs(options);
|
|
1910
|
-
const {
|
|
1911
|
-
value: response,
|
|
1912
|
-
responseHeaders,
|
|
1913
|
-
rawValue: rawResponse
|
|
1914
|
-
} = await postFormDataToApi({
|
|
1915
|
-
url: this.config.url({
|
|
1916
|
-
path: "/audio/transcriptions",
|
|
1917
|
-
modelId: this.modelId
|
|
1918
|
-
}),
|
|
1919
|
-
headers: combineHeaders5(this.config.headers(), options.headers),
|
|
1920
|
-
formData,
|
|
1921
|
-
failedResponseHandler: openaiFailedResponseHandler,
|
|
1922
|
-
successfulResponseHandler: createJsonResponseHandler5(
|
|
1923
|
-
openaiTranscriptionResponseSchema
|
|
1924
|
-
),
|
|
1925
|
-
abortSignal: options.abortSignal,
|
|
1926
|
-
fetch: this.config.fetch
|
|
1927
|
-
});
|
|
1928
|
-
const language = response.language != null && response.language in languageMap ? languageMap[response.language] : void 0;
|
|
1929
|
-
return {
|
|
1930
|
-
text: response.text,
|
|
1931
|
-
segments: (_e = (_d = response.words) == null ? void 0 : _d.map((word) => ({
|
|
1932
|
-
text: word.word,
|
|
1933
|
-
startSecond: word.start,
|
|
1934
|
-
endSecond: word.end
|
|
1935
|
-
}))) != null ? _e : [],
|
|
1936
|
-
language,
|
|
1937
|
-
durationInSeconds: (_f = response.duration) != null ? _f : void 0,
|
|
1938
|
-
warnings,
|
|
1939
|
-
response: {
|
|
1940
|
-
timestamp: currentDate,
|
|
1941
|
-
modelId: this.modelId,
|
|
1942
|
-
headers: responseHeaders,
|
|
1943
|
-
body: rawResponse
|
|
1944
|
-
}
|
|
1945
|
-
};
|
|
1946
|
-
}
|
|
1947
|
-
};
|
|
1948
|
-
var openaiTranscriptionResponseSchema = z12.object({
|
|
1949
|
-
text: z12.string(),
|
|
1950
|
-
language: z12.string().nullish(),
|
|
1951
|
-
duration: z12.number().nullish(),
|
|
1952
|
-
words: z12.array(
|
|
1953
|
-
z12.object({
|
|
1954
|
-
word: z12.string(),
|
|
1955
|
-
start: z12.number(),
|
|
1956
|
-
end: z12.number()
|
|
1957
|
-
})
|
|
1958
|
-
).nullish()
|
|
1959
|
-
});
|
|
1960
|
-
|
|
1961
1814
|
// src/responses/openai-responses-language-model.ts
|
|
1962
1815
|
import {
|
|
1963
1816
|
APICallError
|
|
1964
1817
|
} from "@ai-sdk/provider";
|
|
1965
1818
|
import {
|
|
1966
|
-
combineHeaders as
|
|
1819
|
+
combineHeaders as combineHeaders5,
|
|
1967
1820
|
createEventSourceResponseHandler as createEventSourceResponseHandler3,
|
|
1968
|
-
createJsonResponseHandler as
|
|
1821
|
+
createJsonResponseHandler as createJsonResponseHandler5,
|
|
1969
1822
|
generateId as generateId2,
|
|
1970
|
-
parseProviderOptions as
|
|
1823
|
+
parseProviderOptions as parseProviderOptions5,
|
|
1971
1824
|
postJsonToApi as postJsonToApi5
|
|
1972
1825
|
} from "@ai-sdk/provider-utils";
|
|
1973
|
-
import { z as
|
|
1826
|
+
import { z as z13 } from "zod/v4";
|
|
1974
1827
|
|
|
1975
1828
|
// src/responses/convert-to-openai-responses-messages.ts
|
|
1976
1829
|
import {
|
|
1977
1830
|
UnsupportedFunctionalityError as UnsupportedFunctionalityError4
|
|
1978
1831
|
} from "@ai-sdk/provider";
|
|
1979
|
-
import { parseProviderOptions as
|
|
1980
|
-
import { z as
|
|
1832
|
+
import { parseProviderOptions as parseProviderOptions4 } from "@ai-sdk/provider-utils";
|
|
1833
|
+
import { z as z12 } from "zod/v4";
|
|
1981
1834
|
import { convertToBase64 as convertToBase642 } from "@ai-sdk/provider-utils";
|
|
1982
1835
|
async function convertToOpenAIResponsesMessages({
|
|
1983
1836
|
prompt,
|
|
@@ -2029,7 +1882,7 @@ async function convertToOpenAIResponsesMessages({
|
|
|
2029
1882
|
return {
|
|
2030
1883
|
type: "input_image",
|
|
2031
1884
|
...part.data instanceof URL ? { image_url: part.data.toString() } : typeof part.data === "string" && part.data.startsWith("file-") ? { file_id: part.data } : {
|
|
2032
|
-
image_url: `data:${mediaType};base64,${part.data}`
|
|
1885
|
+
image_url: `data:${mediaType};base64,${convertToBase642(part.data)}`
|
|
2033
1886
|
},
|
|
2034
1887
|
detail: (_b2 = (_a2 = part.providerOptions) == null ? void 0 : _a2.openai) == null ? void 0 : _b2.imageDetail
|
|
2035
1888
|
};
|
|
@@ -2090,7 +1943,7 @@ async function convertToOpenAIResponsesMessages({
|
|
|
2090
1943
|
break;
|
|
2091
1944
|
}
|
|
2092
1945
|
case "reasoning": {
|
|
2093
|
-
const providerOptions = await
|
|
1946
|
+
const providerOptions = await parseProviderOptions4({
|
|
2094
1947
|
provider: "openai",
|
|
2095
1948
|
providerOptions: part.providerOptions,
|
|
2096
1949
|
schema: openaiResponsesReasoningProviderOptionsSchema
|
|
@@ -2161,9 +2014,9 @@ async function convertToOpenAIResponsesMessages({
|
|
|
2161
2014
|
}
|
|
2162
2015
|
return { messages, warnings };
|
|
2163
2016
|
}
|
|
2164
|
-
var openaiResponsesReasoningProviderOptionsSchema =
|
|
2165
|
-
itemId:
|
|
2166
|
-
reasoningEncryptedContent:
|
|
2017
|
+
var openaiResponsesReasoningProviderOptionsSchema = z12.object({
|
|
2018
|
+
itemId: z12.string().nullish(),
|
|
2019
|
+
reasoningEncryptedContent: z12.string().nullish()
|
|
2167
2020
|
});
|
|
2168
2021
|
|
|
2169
2022
|
// src/responses/map-openai-responses-finish-reason.ts
|
|
@@ -2210,7 +2063,7 @@ function prepareResponsesTools({
|
|
|
2210
2063
|
strict: strictJsonSchema
|
|
2211
2064
|
});
|
|
2212
2065
|
break;
|
|
2213
|
-
case "provider-defined":
|
|
2066
|
+
case "provider-defined": {
|
|
2214
2067
|
switch (tool.id) {
|
|
2215
2068
|
case "openai.file_search": {
|
|
2216
2069
|
const args = fileSearchArgsSchema.parse(tool.args);
|
|
@@ -2223,18 +2076,30 @@ function prepareResponsesTools({
|
|
|
2223
2076
|
});
|
|
2224
2077
|
break;
|
|
2225
2078
|
}
|
|
2226
|
-
case "openai.web_search_preview":
|
|
2079
|
+
case "openai.web_search_preview": {
|
|
2080
|
+
const args = webSearchPreviewArgsSchema.parse(tool.args);
|
|
2227
2081
|
openaiTools2.push({
|
|
2228
2082
|
type: "web_search_preview",
|
|
2229
|
-
search_context_size:
|
|
2230
|
-
user_location:
|
|
2083
|
+
search_context_size: args.searchContextSize,
|
|
2084
|
+
user_location: args.userLocation
|
|
2231
2085
|
});
|
|
2232
2086
|
break;
|
|
2233
|
-
|
|
2087
|
+
}
|
|
2088
|
+
case "openai.code_interpreter": {
|
|
2089
|
+
const args = codeInterpreterArgsSchema.parse(tool.args);
|
|
2090
|
+
openaiTools2.push({
|
|
2091
|
+
type: "code_interpreter",
|
|
2092
|
+
container: args.container == null ? { type: "auto", file_ids: void 0 } : typeof args.container === "string" ? args.container : { type: "auto", file_ids: args.container.fileIds }
|
|
2093
|
+
});
|
|
2094
|
+
break;
|
|
2095
|
+
}
|
|
2096
|
+
default: {
|
|
2234
2097
|
toolWarnings.push({ type: "unsupported-tool", tool });
|
|
2235
2098
|
break;
|
|
2099
|
+
}
|
|
2236
2100
|
}
|
|
2237
2101
|
break;
|
|
2102
|
+
}
|
|
2238
2103
|
default:
|
|
2239
2104
|
toolWarnings.push({ type: "unsupported-tool", tool });
|
|
2240
2105
|
break;
|
|
@@ -2252,7 +2117,7 @@ function prepareResponsesTools({
|
|
|
2252
2117
|
case "tool":
|
|
2253
2118
|
return {
|
|
2254
2119
|
tools: openaiTools2,
|
|
2255
|
-
toolChoice: toolChoice.toolName === "
|
|
2120
|
+
toolChoice: toolChoice.toolName === "code_interpreter" || toolChoice.toolName === "file_search" || toolChoice.toolName === "web_search_preview" ? { type: toolChoice.toolName } : { type: "function", name: toolChoice.toolName },
|
|
2256
2121
|
toolWarnings
|
|
2257
2122
|
};
|
|
2258
2123
|
default: {
|
|
@@ -2321,7 +2186,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2321
2186
|
systemMessageMode: modelConfig.systemMessageMode
|
|
2322
2187
|
});
|
|
2323
2188
|
warnings.push(...messageWarnings);
|
|
2324
|
-
const openaiOptions = await
|
|
2189
|
+
const openaiOptions = await parseProviderOptions5({
|
|
2325
2190
|
provider: "openai",
|
|
2326
2191
|
providerOptions,
|
|
2327
2192
|
schema: openaiResponsesProviderOptionsSchema
|
|
@@ -2453,76 +2318,76 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2453
2318
|
rawValue: rawResponse
|
|
2454
2319
|
} = await postJsonToApi5({
|
|
2455
2320
|
url,
|
|
2456
|
-
headers:
|
|
2321
|
+
headers: combineHeaders5(this.config.headers(), options.headers),
|
|
2457
2322
|
body,
|
|
2458
2323
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
2459
|
-
successfulResponseHandler:
|
|
2460
|
-
|
|
2461
|
-
id:
|
|
2462
|
-
created_at:
|
|
2463
|
-
error:
|
|
2464
|
-
code:
|
|
2465
|
-
message:
|
|
2324
|
+
successfulResponseHandler: createJsonResponseHandler5(
|
|
2325
|
+
z13.object({
|
|
2326
|
+
id: z13.string(),
|
|
2327
|
+
created_at: z13.number(),
|
|
2328
|
+
error: z13.object({
|
|
2329
|
+
code: z13.string(),
|
|
2330
|
+
message: z13.string()
|
|
2466
2331
|
}).nullish(),
|
|
2467
|
-
model:
|
|
2468
|
-
output:
|
|
2469
|
-
|
|
2470
|
-
|
|
2471
|
-
type:
|
|
2472
|
-
role:
|
|
2473
|
-
id:
|
|
2474
|
-
content:
|
|
2475
|
-
|
|
2476
|
-
type:
|
|
2477
|
-
text:
|
|
2478
|
-
annotations:
|
|
2479
|
-
|
|
2480
|
-
type:
|
|
2481
|
-
start_index:
|
|
2482
|
-
end_index:
|
|
2483
|
-
url:
|
|
2484
|
-
title:
|
|
2332
|
+
model: z13.string(),
|
|
2333
|
+
output: z13.array(
|
|
2334
|
+
z13.discriminatedUnion("type", [
|
|
2335
|
+
z13.object({
|
|
2336
|
+
type: z13.literal("message"),
|
|
2337
|
+
role: z13.literal("assistant"),
|
|
2338
|
+
id: z13.string(),
|
|
2339
|
+
content: z13.array(
|
|
2340
|
+
z13.object({
|
|
2341
|
+
type: z13.literal("output_text"),
|
|
2342
|
+
text: z13.string(),
|
|
2343
|
+
annotations: z13.array(
|
|
2344
|
+
z13.object({
|
|
2345
|
+
type: z13.literal("url_citation"),
|
|
2346
|
+
start_index: z13.number(),
|
|
2347
|
+
end_index: z13.number(),
|
|
2348
|
+
url: z13.string(),
|
|
2349
|
+
title: z13.string()
|
|
2485
2350
|
})
|
|
2486
2351
|
)
|
|
2487
2352
|
})
|
|
2488
2353
|
)
|
|
2489
2354
|
}),
|
|
2490
|
-
|
|
2491
|
-
type:
|
|
2492
|
-
call_id:
|
|
2493
|
-
name:
|
|
2494
|
-
arguments:
|
|
2495
|
-
id:
|
|
2355
|
+
z13.object({
|
|
2356
|
+
type: z13.literal("function_call"),
|
|
2357
|
+
call_id: z13.string(),
|
|
2358
|
+
name: z13.string(),
|
|
2359
|
+
arguments: z13.string(),
|
|
2360
|
+
id: z13.string()
|
|
2496
2361
|
}),
|
|
2497
|
-
|
|
2498
|
-
type:
|
|
2499
|
-
id:
|
|
2500
|
-
status:
|
|
2362
|
+
z13.object({
|
|
2363
|
+
type: z13.literal("web_search_call"),
|
|
2364
|
+
id: z13.string(),
|
|
2365
|
+
status: z13.string().optional()
|
|
2501
2366
|
}),
|
|
2502
|
-
|
|
2503
|
-
type:
|
|
2504
|
-
id:
|
|
2505
|
-
status:
|
|
2367
|
+
z13.object({
|
|
2368
|
+
type: z13.literal("computer_call"),
|
|
2369
|
+
id: z13.string(),
|
|
2370
|
+
status: z13.string().optional()
|
|
2506
2371
|
}),
|
|
2507
|
-
|
|
2508
|
-
type:
|
|
2509
|
-
id:
|
|
2510
|
-
status:
|
|
2372
|
+
z13.object({
|
|
2373
|
+
type: z13.literal("file_search_call"),
|
|
2374
|
+
id: z13.string(),
|
|
2375
|
+
status: z13.string().optional()
|
|
2511
2376
|
}),
|
|
2512
|
-
|
|
2513
|
-
type:
|
|
2514
|
-
id:
|
|
2515
|
-
encrypted_content:
|
|
2516
|
-
summary:
|
|
2517
|
-
|
|
2518
|
-
type:
|
|
2519
|
-
text:
|
|
2377
|
+
z13.object({
|
|
2378
|
+
type: z13.literal("reasoning"),
|
|
2379
|
+
id: z13.string(),
|
|
2380
|
+
encrypted_content: z13.string().nullish(),
|
|
2381
|
+
summary: z13.array(
|
|
2382
|
+
z13.object({
|
|
2383
|
+
type: z13.literal("summary_text"),
|
|
2384
|
+
text: z13.string()
|
|
2520
2385
|
})
|
|
2521
2386
|
)
|
|
2522
2387
|
})
|
|
2523
2388
|
])
|
|
2524
2389
|
),
|
|
2525
|
-
incomplete_details:
|
|
2390
|
+
incomplete_details: z13.object({ reason: z13.string() }).nullable(),
|
|
2526
2391
|
usage: usageSchema2
|
|
2527
2392
|
})
|
|
2528
2393
|
),
|
|
@@ -2693,7 +2558,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2693
2558
|
path: "/responses",
|
|
2694
2559
|
modelId: this.modelId
|
|
2695
2560
|
}),
|
|
2696
|
-
headers:
|
|
2561
|
+
headers: combineHeaders5(this.config.headers(), options.headers),
|
|
2697
2562
|
body: {
|
|
2698
2563
|
...body,
|
|
2699
2564
|
stream: true
|
|
@@ -2969,140 +2834,140 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2969
2834
|
};
|
|
2970
2835
|
}
|
|
2971
2836
|
};
|
|
2972
|
-
var usageSchema2 =
|
|
2973
|
-
input_tokens:
|
|
2974
|
-
input_tokens_details:
|
|
2975
|
-
output_tokens:
|
|
2976
|
-
output_tokens_details:
|
|
2837
|
+
var usageSchema2 = z13.object({
|
|
2838
|
+
input_tokens: z13.number(),
|
|
2839
|
+
input_tokens_details: z13.object({ cached_tokens: z13.number().nullish() }).nullish(),
|
|
2840
|
+
output_tokens: z13.number(),
|
|
2841
|
+
output_tokens_details: z13.object({ reasoning_tokens: z13.number().nullish() }).nullish()
|
|
2977
2842
|
});
|
|
2978
|
-
var textDeltaChunkSchema =
|
|
2979
|
-
type:
|
|
2980
|
-
item_id:
|
|
2981
|
-
delta:
|
|
2843
|
+
var textDeltaChunkSchema = z13.object({
|
|
2844
|
+
type: z13.literal("response.output_text.delta"),
|
|
2845
|
+
item_id: z13.string(),
|
|
2846
|
+
delta: z13.string()
|
|
2982
2847
|
});
|
|
2983
|
-
var errorChunkSchema =
|
|
2984
|
-
type:
|
|
2985
|
-
code:
|
|
2986
|
-
message:
|
|
2987
|
-
param:
|
|
2988
|
-
sequence_number:
|
|
2848
|
+
var errorChunkSchema = z13.object({
|
|
2849
|
+
type: z13.literal("error"),
|
|
2850
|
+
code: z13.string(),
|
|
2851
|
+
message: z13.string(),
|
|
2852
|
+
param: z13.string().nullish(),
|
|
2853
|
+
sequence_number: z13.number()
|
|
2989
2854
|
});
|
|
2990
|
-
var responseFinishedChunkSchema =
|
|
2991
|
-
type:
|
|
2992
|
-
response:
|
|
2993
|
-
incomplete_details:
|
|
2855
|
+
var responseFinishedChunkSchema = z13.object({
|
|
2856
|
+
type: z13.enum(["response.completed", "response.incomplete"]),
|
|
2857
|
+
response: z13.object({
|
|
2858
|
+
incomplete_details: z13.object({ reason: z13.string() }).nullish(),
|
|
2994
2859
|
usage: usageSchema2
|
|
2995
2860
|
})
|
|
2996
2861
|
});
|
|
2997
|
-
var responseCreatedChunkSchema =
|
|
2998
|
-
type:
|
|
2999
|
-
response:
|
|
3000
|
-
id:
|
|
3001
|
-
created_at:
|
|
3002
|
-
model:
|
|
2862
|
+
var responseCreatedChunkSchema = z13.object({
|
|
2863
|
+
type: z13.literal("response.created"),
|
|
2864
|
+
response: z13.object({
|
|
2865
|
+
id: z13.string(),
|
|
2866
|
+
created_at: z13.number(),
|
|
2867
|
+
model: z13.string()
|
|
3003
2868
|
})
|
|
3004
2869
|
});
|
|
3005
|
-
var responseOutputItemAddedSchema =
|
|
3006
|
-
type:
|
|
3007
|
-
output_index:
|
|
3008
|
-
item:
|
|
3009
|
-
|
|
3010
|
-
type:
|
|
3011
|
-
id:
|
|
2870
|
+
var responseOutputItemAddedSchema = z13.object({
|
|
2871
|
+
type: z13.literal("response.output_item.added"),
|
|
2872
|
+
output_index: z13.number(),
|
|
2873
|
+
item: z13.discriminatedUnion("type", [
|
|
2874
|
+
z13.object({
|
|
2875
|
+
type: z13.literal("message"),
|
|
2876
|
+
id: z13.string()
|
|
3012
2877
|
}),
|
|
3013
|
-
|
|
3014
|
-
type:
|
|
3015
|
-
id:
|
|
3016
|
-
encrypted_content:
|
|
2878
|
+
z13.object({
|
|
2879
|
+
type: z13.literal("reasoning"),
|
|
2880
|
+
id: z13.string(),
|
|
2881
|
+
encrypted_content: z13.string().nullish()
|
|
3017
2882
|
}),
|
|
3018
|
-
|
|
3019
|
-
type:
|
|
3020
|
-
id:
|
|
3021
|
-
call_id:
|
|
3022
|
-
name:
|
|
3023
|
-
arguments:
|
|
2883
|
+
z13.object({
|
|
2884
|
+
type: z13.literal("function_call"),
|
|
2885
|
+
id: z13.string(),
|
|
2886
|
+
call_id: z13.string(),
|
|
2887
|
+
name: z13.string(),
|
|
2888
|
+
arguments: z13.string()
|
|
3024
2889
|
}),
|
|
3025
|
-
|
|
3026
|
-
type:
|
|
3027
|
-
id:
|
|
3028
|
-
status:
|
|
2890
|
+
z13.object({
|
|
2891
|
+
type: z13.literal("web_search_call"),
|
|
2892
|
+
id: z13.string(),
|
|
2893
|
+
status: z13.string()
|
|
3029
2894
|
}),
|
|
3030
|
-
|
|
3031
|
-
type:
|
|
3032
|
-
id:
|
|
3033
|
-
status:
|
|
2895
|
+
z13.object({
|
|
2896
|
+
type: z13.literal("computer_call"),
|
|
2897
|
+
id: z13.string(),
|
|
2898
|
+
status: z13.string()
|
|
3034
2899
|
}),
|
|
3035
|
-
|
|
3036
|
-
type:
|
|
3037
|
-
id:
|
|
3038
|
-
status:
|
|
2900
|
+
z13.object({
|
|
2901
|
+
type: z13.literal("file_search_call"),
|
|
2902
|
+
id: z13.string(),
|
|
2903
|
+
status: z13.string()
|
|
3039
2904
|
})
|
|
3040
2905
|
])
|
|
3041
2906
|
});
|
|
3042
|
-
var responseOutputItemDoneSchema =
|
|
3043
|
-
type:
|
|
3044
|
-
output_index:
|
|
3045
|
-
item:
|
|
3046
|
-
|
|
3047
|
-
type:
|
|
3048
|
-
id:
|
|
2907
|
+
var responseOutputItemDoneSchema = z13.object({
|
|
2908
|
+
type: z13.literal("response.output_item.done"),
|
|
2909
|
+
output_index: z13.number(),
|
|
2910
|
+
item: z13.discriminatedUnion("type", [
|
|
2911
|
+
z13.object({
|
|
2912
|
+
type: z13.literal("message"),
|
|
2913
|
+
id: z13.string()
|
|
3049
2914
|
}),
|
|
3050
|
-
|
|
3051
|
-
type:
|
|
3052
|
-
id:
|
|
3053
|
-
encrypted_content:
|
|
2915
|
+
z13.object({
|
|
2916
|
+
type: z13.literal("reasoning"),
|
|
2917
|
+
id: z13.string(),
|
|
2918
|
+
encrypted_content: z13.string().nullish()
|
|
3054
2919
|
}),
|
|
3055
|
-
|
|
3056
|
-
type:
|
|
3057
|
-
id:
|
|
3058
|
-
call_id:
|
|
3059
|
-
name:
|
|
3060
|
-
arguments:
|
|
3061
|
-
status:
|
|
2920
|
+
z13.object({
|
|
2921
|
+
type: z13.literal("function_call"),
|
|
2922
|
+
id: z13.string(),
|
|
2923
|
+
call_id: z13.string(),
|
|
2924
|
+
name: z13.string(),
|
|
2925
|
+
arguments: z13.string(),
|
|
2926
|
+
status: z13.literal("completed")
|
|
3062
2927
|
}),
|
|
3063
|
-
|
|
3064
|
-
type:
|
|
3065
|
-
id:
|
|
3066
|
-
status:
|
|
2928
|
+
z13.object({
|
|
2929
|
+
type: z13.literal("web_search_call"),
|
|
2930
|
+
id: z13.string(),
|
|
2931
|
+
status: z13.literal("completed")
|
|
3067
2932
|
}),
|
|
3068
|
-
|
|
3069
|
-
type:
|
|
3070
|
-
id:
|
|
3071
|
-
status:
|
|
2933
|
+
z13.object({
|
|
2934
|
+
type: z13.literal("computer_call"),
|
|
2935
|
+
id: z13.string(),
|
|
2936
|
+
status: z13.literal("completed")
|
|
3072
2937
|
}),
|
|
3073
|
-
|
|
3074
|
-
type:
|
|
3075
|
-
id:
|
|
3076
|
-
status:
|
|
2938
|
+
z13.object({
|
|
2939
|
+
type: z13.literal("file_search_call"),
|
|
2940
|
+
id: z13.string(),
|
|
2941
|
+
status: z13.literal("completed")
|
|
3077
2942
|
})
|
|
3078
2943
|
])
|
|
3079
2944
|
});
|
|
3080
|
-
var responseFunctionCallArgumentsDeltaSchema =
|
|
3081
|
-
type:
|
|
3082
|
-
item_id:
|
|
3083
|
-
output_index:
|
|
3084
|
-
delta:
|
|
2945
|
+
var responseFunctionCallArgumentsDeltaSchema = z13.object({
|
|
2946
|
+
type: z13.literal("response.function_call_arguments.delta"),
|
|
2947
|
+
item_id: z13.string(),
|
|
2948
|
+
output_index: z13.number(),
|
|
2949
|
+
delta: z13.string()
|
|
3085
2950
|
});
|
|
3086
|
-
var responseAnnotationAddedSchema =
|
|
3087
|
-
type:
|
|
3088
|
-
annotation:
|
|
3089
|
-
type:
|
|
3090
|
-
url:
|
|
3091
|
-
title:
|
|
2951
|
+
var responseAnnotationAddedSchema = z13.object({
|
|
2952
|
+
type: z13.literal("response.output_text.annotation.added"),
|
|
2953
|
+
annotation: z13.object({
|
|
2954
|
+
type: z13.literal("url_citation"),
|
|
2955
|
+
url: z13.string(),
|
|
2956
|
+
title: z13.string()
|
|
3092
2957
|
})
|
|
3093
2958
|
});
|
|
3094
|
-
var responseReasoningSummaryPartAddedSchema =
|
|
3095
|
-
type:
|
|
3096
|
-
item_id:
|
|
3097
|
-
summary_index:
|
|
2959
|
+
var responseReasoningSummaryPartAddedSchema = z13.object({
|
|
2960
|
+
type: z13.literal("response.reasoning_summary_part.added"),
|
|
2961
|
+
item_id: z13.string(),
|
|
2962
|
+
summary_index: z13.number()
|
|
3098
2963
|
});
|
|
3099
|
-
var responseReasoningSummaryTextDeltaSchema =
|
|
3100
|
-
type:
|
|
3101
|
-
item_id:
|
|
3102
|
-
summary_index:
|
|
3103
|
-
delta:
|
|
2964
|
+
var responseReasoningSummaryTextDeltaSchema = z13.object({
|
|
2965
|
+
type: z13.literal("response.reasoning_summary_text.delta"),
|
|
2966
|
+
item_id: z13.string(),
|
|
2967
|
+
summary_index: z13.number(),
|
|
2968
|
+
delta: z13.string()
|
|
3104
2969
|
});
|
|
3105
|
-
var openaiResponsesChunkSchema =
|
|
2970
|
+
var openaiResponsesChunkSchema = z13.union([
|
|
3106
2971
|
textDeltaChunkSchema,
|
|
3107
2972
|
responseFinishedChunkSchema,
|
|
3108
2973
|
responseCreatedChunkSchema,
|
|
@@ -3113,7 +2978,7 @@ var openaiResponsesChunkSchema = z14.union([
|
|
|
3113
2978
|
responseReasoningSummaryPartAddedSchema,
|
|
3114
2979
|
responseReasoningSummaryTextDeltaSchema,
|
|
3115
2980
|
errorChunkSchema,
|
|
3116
|
-
|
|
2981
|
+
z13.object({ type: z13.string() }).loose()
|
|
3117
2982
|
// fallback for unknown chunks
|
|
3118
2983
|
]);
|
|
3119
2984
|
function isTextDeltaChunk(chunk) {
|
|
@@ -3179,32 +3044,32 @@ function supportsFlexProcessing2(modelId) {
|
|
|
3179
3044
|
function supportsPriorityProcessing2(modelId) {
|
|
3180
3045
|
return modelId.startsWith("gpt-4") || modelId.startsWith("gpt-5-mini") || modelId.startsWith("gpt-5") && !modelId.startsWith("gpt-5-nano") || modelId.startsWith("o3") || modelId.startsWith("o4-mini");
|
|
3181
3046
|
}
|
|
3182
|
-
var openaiResponsesProviderOptionsSchema =
|
|
3183
|
-
metadata:
|
|
3184
|
-
parallelToolCalls:
|
|
3185
|
-
previousResponseId:
|
|
3186
|
-
store:
|
|
3187
|
-
user:
|
|
3188
|
-
reasoningEffort:
|
|
3189
|
-
strictJsonSchema:
|
|
3190
|
-
instructions:
|
|
3191
|
-
reasoningSummary:
|
|
3192
|
-
serviceTier:
|
|
3193
|
-
include:
|
|
3194
|
-
textVerbosity:
|
|
3047
|
+
var openaiResponsesProviderOptionsSchema = z13.object({
|
|
3048
|
+
metadata: z13.any().nullish(),
|
|
3049
|
+
parallelToolCalls: z13.boolean().nullish(),
|
|
3050
|
+
previousResponseId: z13.string().nullish(),
|
|
3051
|
+
store: z13.boolean().nullish(),
|
|
3052
|
+
user: z13.string().nullish(),
|
|
3053
|
+
reasoningEffort: z13.string().nullish(),
|
|
3054
|
+
strictJsonSchema: z13.boolean().nullish(),
|
|
3055
|
+
instructions: z13.string().nullish(),
|
|
3056
|
+
reasoningSummary: z13.string().nullish(),
|
|
3057
|
+
serviceTier: z13.enum(["auto", "flex", "priority"]).nullish(),
|
|
3058
|
+
include: z13.array(z13.enum(["reasoning.encrypted_content", "file_search_call.results"])).nullish(),
|
|
3059
|
+
textVerbosity: z13.enum(["low", "medium", "high"]).nullish()
|
|
3195
3060
|
});
|
|
3196
3061
|
|
|
3197
|
-
// src/openai-speech-model.ts
|
|
3062
|
+
// src/speech/openai-speech-model.ts
|
|
3198
3063
|
import {
|
|
3199
|
-
combineHeaders as
|
|
3064
|
+
combineHeaders as combineHeaders6,
|
|
3200
3065
|
createBinaryResponseHandler,
|
|
3201
|
-
parseProviderOptions as
|
|
3066
|
+
parseProviderOptions as parseProviderOptions6,
|
|
3202
3067
|
postJsonToApi as postJsonToApi6
|
|
3203
3068
|
} from "@ai-sdk/provider-utils";
|
|
3204
|
-
import { z as
|
|
3205
|
-
var OpenAIProviderOptionsSchema =
|
|
3206
|
-
instructions:
|
|
3207
|
-
speed:
|
|
3069
|
+
import { z as z14 } from "zod/v4";
|
|
3070
|
+
var OpenAIProviderOptionsSchema = z14.object({
|
|
3071
|
+
instructions: z14.string().nullish(),
|
|
3072
|
+
speed: z14.number().min(0.25).max(4).default(1).nullish()
|
|
3208
3073
|
});
|
|
3209
3074
|
var OpenAISpeechModel = class {
|
|
3210
3075
|
constructor(modelId, config) {
|
|
@@ -3225,7 +3090,7 @@ var OpenAISpeechModel = class {
|
|
|
3225
3090
|
providerOptions
|
|
3226
3091
|
}) {
|
|
3227
3092
|
const warnings = [];
|
|
3228
|
-
const openAIOptions = await
|
|
3093
|
+
const openAIOptions = await parseProviderOptions6({
|
|
3229
3094
|
provider: "openai",
|
|
3230
3095
|
providerOptions,
|
|
3231
3096
|
schema: OpenAIProviderOptionsSchema
|
|
@@ -3283,7 +3148,7 @@ var OpenAISpeechModel = class {
|
|
|
3283
3148
|
path: "/audio/speech",
|
|
3284
3149
|
modelId: this.modelId
|
|
3285
3150
|
}),
|
|
3286
|
-
headers:
|
|
3151
|
+
headers: combineHeaders6(this.config.headers(), options.headers),
|
|
3287
3152
|
body: requestBody,
|
|
3288
3153
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
3289
3154
|
successfulResponseHandler: createBinaryResponseHandler(),
|
|
@@ -3306,6 +3171,201 @@ var OpenAISpeechModel = class {
|
|
|
3306
3171
|
}
|
|
3307
3172
|
};
|
|
3308
3173
|
|
|
3174
|
+
// src/transcription/openai-transcription-model.ts
|
|
3175
|
+
import {
|
|
3176
|
+
combineHeaders as combineHeaders7,
|
|
3177
|
+
convertBase64ToUint8Array,
|
|
3178
|
+
createJsonResponseHandler as createJsonResponseHandler6,
|
|
3179
|
+
parseProviderOptions as parseProviderOptions7,
|
|
3180
|
+
postFormDataToApi
|
|
3181
|
+
} from "@ai-sdk/provider-utils";
|
|
3182
|
+
import { z as z16 } from "zod/v4";
|
|
3183
|
+
|
|
3184
|
+
// src/transcription/openai-transcription-options.ts
|
|
3185
|
+
import { z as z15 } from "zod/v4";
|
|
3186
|
+
var openAITranscriptionProviderOptions = z15.object({
|
|
3187
|
+
/**
|
|
3188
|
+
* Additional information to include in the transcription response.
|
|
3189
|
+
*/
|
|
3190
|
+
include: z15.array(z15.string()).optional(),
|
|
3191
|
+
/**
|
|
3192
|
+
* The language of the input audio in ISO-639-1 format.
|
|
3193
|
+
*/
|
|
3194
|
+
language: z15.string().optional(),
|
|
3195
|
+
/**
|
|
3196
|
+
* An optional text to guide the model's style or continue a previous audio segment.
|
|
3197
|
+
*/
|
|
3198
|
+
prompt: z15.string().optional(),
|
|
3199
|
+
/**
|
|
3200
|
+
* The sampling temperature, between 0 and 1.
|
|
3201
|
+
* @default 0
|
|
3202
|
+
*/
|
|
3203
|
+
temperature: z15.number().min(0).max(1).default(0).optional(),
|
|
3204
|
+
/**
|
|
3205
|
+
* The timestamp granularities to populate for this transcription.
|
|
3206
|
+
* @default ['segment']
|
|
3207
|
+
*/
|
|
3208
|
+
timestampGranularities: z15.array(z15.enum(["word", "segment"])).default(["segment"]).optional()
|
|
3209
|
+
});
|
|
3210
|
+
|
|
3211
|
+
// src/transcription/openai-transcription-model.ts
|
|
3212
|
+
var languageMap = {
|
|
3213
|
+
afrikaans: "af",
|
|
3214
|
+
arabic: "ar",
|
|
3215
|
+
armenian: "hy",
|
|
3216
|
+
azerbaijani: "az",
|
|
3217
|
+
belarusian: "be",
|
|
3218
|
+
bosnian: "bs",
|
|
3219
|
+
bulgarian: "bg",
|
|
3220
|
+
catalan: "ca",
|
|
3221
|
+
chinese: "zh",
|
|
3222
|
+
croatian: "hr",
|
|
3223
|
+
czech: "cs",
|
|
3224
|
+
danish: "da",
|
|
3225
|
+
dutch: "nl",
|
|
3226
|
+
english: "en",
|
|
3227
|
+
estonian: "et",
|
|
3228
|
+
finnish: "fi",
|
|
3229
|
+
french: "fr",
|
|
3230
|
+
galician: "gl",
|
|
3231
|
+
german: "de",
|
|
3232
|
+
greek: "el",
|
|
3233
|
+
hebrew: "he",
|
|
3234
|
+
hindi: "hi",
|
|
3235
|
+
hungarian: "hu",
|
|
3236
|
+
icelandic: "is",
|
|
3237
|
+
indonesian: "id",
|
|
3238
|
+
italian: "it",
|
|
3239
|
+
japanese: "ja",
|
|
3240
|
+
kannada: "kn",
|
|
3241
|
+
kazakh: "kk",
|
|
3242
|
+
korean: "ko",
|
|
3243
|
+
latvian: "lv",
|
|
3244
|
+
lithuanian: "lt",
|
|
3245
|
+
macedonian: "mk",
|
|
3246
|
+
malay: "ms",
|
|
3247
|
+
marathi: "mr",
|
|
3248
|
+
maori: "mi",
|
|
3249
|
+
nepali: "ne",
|
|
3250
|
+
norwegian: "no",
|
|
3251
|
+
persian: "fa",
|
|
3252
|
+
polish: "pl",
|
|
3253
|
+
portuguese: "pt",
|
|
3254
|
+
romanian: "ro",
|
|
3255
|
+
russian: "ru",
|
|
3256
|
+
serbian: "sr",
|
|
3257
|
+
slovak: "sk",
|
|
3258
|
+
slovenian: "sl",
|
|
3259
|
+
spanish: "es",
|
|
3260
|
+
swahili: "sw",
|
|
3261
|
+
swedish: "sv",
|
|
3262
|
+
tagalog: "tl",
|
|
3263
|
+
tamil: "ta",
|
|
3264
|
+
thai: "th",
|
|
3265
|
+
turkish: "tr",
|
|
3266
|
+
ukrainian: "uk",
|
|
3267
|
+
urdu: "ur",
|
|
3268
|
+
vietnamese: "vi",
|
|
3269
|
+
welsh: "cy"
|
|
3270
|
+
};
|
|
3271
|
+
var OpenAITranscriptionModel = class {
|
|
3272
|
+
constructor(modelId, config) {
|
|
3273
|
+
this.modelId = modelId;
|
|
3274
|
+
this.config = config;
|
|
3275
|
+
this.specificationVersion = "v2";
|
|
3276
|
+
}
|
|
3277
|
+
get provider() {
|
|
3278
|
+
return this.config.provider;
|
|
3279
|
+
}
|
|
3280
|
+
async getArgs({
|
|
3281
|
+
audio,
|
|
3282
|
+
mediaType,
|
|
3283
|
+
providerOptions
|
|
3284
|
+
}) {
|
|
3285
|
+
const warnings = [];
|
|
3286
|
+
const openAIOptions = await parseProviderOptions7({
|
|
3287
|
+
provider: "openai",
|
|
3288
|
+
providerOptions,
|
|
3289
|
+
schema: openAITranscriptionProviderOptions
|
|
3290
|
+
});
|
|
3291
|
+
const formData = new FormData();
|
|
3292
|
+
const blob = audio instanceof Uint8Array ? new Blob([audio]) : new Blob([convertBase64ToUint8Array(audio)]);
|
|
3293
|
+
formData.append("model", this.modelId);
|
|
3294
|
+
formData.append("file", new File([blob], "audio", { type: mediaType }));
|
|
3295
|
+
if (openAIOptions) {
|
|
3296
|
+
const transcriptionModelOptions = {
|
|
3297
|
+
include: openAIOptions.include,
|
|
3298
|
+
language: openAIOptions.language,
|
|
3299
|
+
prompt: openAIOptions.prompt,
|
|
3300
|
+
temperature: openAIOptions.temperature,
|
|
3301
|
+
timestamp_granularities: openAIOptions.timestampGranularities
|
|
3302
|
+
};
|
|
3303
|
+
for (const [key, value] of Object.entries(transcriptionModelOptions)) {
|
|
3304
|
+
if (value != null) {
|
|
3305
|
+
formData.append(key, String(value));
|
|
3306
|
+
}
|
|
3307
|
+
}
|
|
3308
|
+
}
|
|
3309
|
+
return {
|
|
3310
|
+
formData,
|
|
3311
|
+
warnings
|
|
3312
|
+
};
|
|
3313
|
+
}
|
|
3314
|
+
async doGenerate(options) {
|
|
3315
|
+
var _a, _b, _c, _d, _e, _f;
|
|
3316
|
+
const currentDate = (_c = (_b = (_a = this.config._internal) == null ? void 0 : _a.currentDate) == null ? void 0 : _b.call(_a)) != null ? _c : /* @__PURE__ */ new Date();
|
|
3317
|
+
const { formData, warnings } = await this.getArgs(options);
|
|
3318
|
+
const {
|
|
3319
|
+
value: response,
|
|
3320
|
+
responseHeaders,
|
|
3321
|
+
rawValue: rawResponse
|
|
3322
|
+
} = await postFormDataToApi({
|
|
3323
|
+
url: this.config.url({
|
|
3324
|
+
path: "/audio/transcriptions",
|
|
3325
|
+
modelId: this.modelId
|
|
3326
|
+
}),
|
|
3327
|
+
headers: combineHeaders7(this.config.headers(), options.headers),
|
|
3328
|
+
formData,
|
|
3329
|
+
failedResponseHandler: openaiFailedResponseHandler,
|
|
3330
|
+
successfulResponseHandler: createJsonResponseHandler6(
|
|
3331
|
+
openaiTranscriptionResponseSchema
|
|
3332
|
+
),
|
|
3333
|
+
abortSignal: options.abortSignal,
|
|
3334
|
+
fetch: this.config.fetch
|
|
3335
|
+
});
|
|
3336
|
+
const language = response.language != null && response.language in languageMap ? languageMap[response.language] : void 0;
|
|
3337
|
+
return {
|
|
3338
|
+
text: response.text,
|
|
3339
|
+
segments: (_e = (_d = response.words) == null ? void 0 : _d.map((word) => ({
|
|
3340
|
+
text: word.word,
|
|
3341
|
+
startSecond: word.start,
|
|
3342
|
+
endSecond: word.end
|
|
3343
|
+
}))) != null ? _e : [],
|
|
3344
|
+
language,
|
|
3345
|
+
durationInSeconds: (_f = response.duration) != null ? _f : void 0,
|
|
3346
|
+
warnings,
|
|
3347
|
+
response: {
|
|
3348
|
+
timestamp: currentDate,
|
|
3349
|
+
modelId: this.modelId,
|
|
3350
|
+
headers: responseHeaders,
|
|
3351
|
+
body: rawResponse
|
|
3352
|
+
}
|
|
3353
|
+
};
|
|
3354
|
+
}
|
|
3355
|
+
};
|
|
3356
|
+
var openaiTranscriptionResponseSchema = z16.object({
|
|
3357
|
+
text: z16.string(),
|
|
3358
|
+
language: z16.string().nullish(),
|
|
3359
|
+
duration: z16.number().nullish(),
|
|
3360
|
+
words: z16.array(
|
|
3361
|
+
z16.object({
|
|
3362
|
+
word: z16.string(),
|
|
3363
|
+
start: z16.number(),
|
|
3364
|
+
end: z16.number()
|
|
3365
|
+
})
|
|
3366
|
+
).nullish()
|
|
3367
|
+
});
|
|
3368
|
+
|
|
3309
3369
|
// src/openai-provider.ts
|
|
3310
3370
|
function createOpenAI(options = {}) {
|
|
3311
3371
|
var _a, _b;
|