@ai-sdk/openai 2.0.9 → 2.0.11
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +25 -0
- package/dist/index.d.mts +40 -56
- package/dist/index.d.ts +40 -56
- package/dist/index.js +605 -448
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +616 -459
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.d.mts +15 -3
- package/dist/internal/index.d.ts +15 -3
- package/dist/internal/index.js +417 -260
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +402 -245
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +2 -2
package/dist/index.mjs
CHANGED
|
@@ -4,7 +4,7 @@ import {
|
|
|
4
4
|
withoutTrailingSlash
|
|
5
5
|
} from "@ai-sdk/provider-utils";
|
|
6
6
|
|
|
7
|
-
// src/openai-chat-language-model.ts
|
|
7
|
+
// src/chat/openai-chat-language-model.ts
|
|
8
8
|
import {
|
|
9
9
|
InvalidResponseDataError
|
|
10
10
|
} from "@ai-sdk/provider";
|
|
@@ -19,7 +19,26 @@ import {
|
|
|
19
19
|
} from "@ai-sdk/provider-utils";
|
|
20
20
|
import { z as z5 } from "zod/v4";
|
|
21
21
|
|
|
22
|
-
// src/
|
|
22
|
+
// src/openai-error.ts
|
|
23
|
+
import { z } from "zod/v4";
|
|
24
|
+
import { createJsonErrorResponseHandler } from "@ai-sdk/provider-utils";
|
|
25
|
+
var openaiErrorDataSchema = z.object({
|
|
26
|
+
error: z.object({
|
|
27
|
+
message: z.string(),
|
|
28
|
+
// The additional information below is handled loosely to support
|
|
29
|
+
// OpenAI-compatible providers that have slightly different error
|
|
30
|
+
// responses:
|
|
31
|
+
type: z.string().nullish(),
|
|
32
|
+
param: z.any().nullish(),
|
|
33
|
+
code: z.union([z.string(), z.number()]).nullish()
|
|
34
|
+
})
|
|
35
|
+
});
|
|
36
|
+
var openaiFailedResponseHandler = createJsonErrorResponseHandler({
|
|
37
|
+
errorSchema: openaiErrorDataSchema,
|
|
38
|
+
errorToMessage: (data) => data.error.message
|
|
39
|
+
});
|
|
40
|
+
|
|
41
|
+
// src/chat/convert-to-openai-chat-messages.ts
|
|
23
42
|
import {
|
|
24
43
|
UnsupportedFunctionalityError
|
|
25
44
|
} from "@ai-sdk/provider";
|
|
@@ -199,7 +218,7 @@ function convertToOpenAIChatMessages({
|
|
|
199
218
|
return { messages, warnings };
|
|
200
219
|
}
|
|
201
220
|
|
|
202
|
-
// src/get-response-metadata.ts
|
|
221
|
+
// src/chat/get-response-metadata.ts
|
|
203
222
|
function getResponseMetadata({
|
|
204
223
|
id,
|
|
205
224
|
model,
|
|
@@ -212,7 +231,7 @@ function getResponseMetadata({
|
|
|
212
231
|
};
|
|
213
232
|
}
|
|
214
233
|
|
|
215
|
-
// src/map-openai-finish-reason.ts
|
|
234
|
+
// src/chat/map-openai-finish-reason.ts
|
|
216
235
|
function mapOpenAIFinishReason(finishReason) {
|
|
217
236
|
switch (finishReason) {
|
|
218
237
|
case "stop":
|
|
@@ -229,16 +248,16 @@ function mapOpenAIFinishReason(finishReason) {
|
|
|
229
248
|
}
|
|
230
249
|
}
|
|
231
250
|
|
|
232
|
-
// src/openai-chat-options.ts
|
|
233
|
-
import { z } from "zod/v4";
|
|
234
|
-
var openaiProviderOptions =
|
|
251
|
+
// src/chat/openai-chat-options.ts
|
|
252
|
+
import { z as z2 } from "zod/v4";
|
|
253
|
+
var openaiProviderOptions = z2.object({
|
|
235
254
|
/**
|
|
236
255
|
* Modify the likelihood of specified tokens appearing in the completion.
|
|
237
256
|
*
|
|
238
257
|
* Accepts a JSON object that maps tokens (specified by their token ID in
|
|
239
258
|
* the GPT tokenizer) to an associated bias value from -100 to 100.
|
|
240
259
|
*/
|
|
241
|
-
logitBias:
|
|
260
|
+
logitBias: z2.record(z2.coerce.number(), z2.number()).optional(),
|
|
242
261
|
/**
|
|
243
262
|
* Return the log probabilities of the tokens.
|
|
244
263
|
*
|
|
@@ -248,42 +267,42 @@ var openaiProviderOptions = z.object({
|
|
|
248
267
|
* Setting to a number will return the log probabilities of the top n
|
|
249
268
|
* tokens that were generated.
|
|
250
269
|
*/
|
|
251
|
-
logprobs:
|
|
270
|
+
logprobs: z2.union([z2.boolean(), z2.number()]).optional(),
|
|
252
271
|
/**
|
|
253
272
|
* Whether to enable parallel function calling during tool use. Default to true.
|
|
254
273
|
*/
|
|
255
|
-
parallelToolCalls:
|
|
274
|
+
parallelToolCalls: z2.boolean().optional(),
|
|
256
275
|
/**
|
|
257
276
|
* A unique identifier representing your end-user, which can help OpenAI to
|
|
258
277
|
* monitor and detect abuse.
|
|
259
278
|
*/
|
|
260
|
-
user:
|
|
279
|
+
user: z2.string().optional(),
|
|
261
280
|
/**
|
|
262
281
|
* Reasoning effort for reasoning models. Defaults to `medium`.
|
|
263
282
|
*/
|
|
264
|
-
reasoningEffort:
|
|
283
|
+
reasoningEffort: z2.enum(["minimal", "low", "medium", "high"]).optional(),
|
|
265
284
|
/**
|
|
266
285
|
* Maximum number of completion tokens to generate. Useful for reasoning models.
|
|
267
286
|
*/
|
|
268
|
-
maxCompletionTokens:
|
|
287
|
+
maxCompletionTokens: z2.number().optional(),
|
|
269
288
|
/**
|
|
270
289
|
* Whether to enable persistence in responses API.
|
|
271
290
|
*/
|
|
272
|
-
store:
|
|
291
|
+
store: z2.boolean().optional(),
|
|
273
292
|
/**
|
|
274
293
|
* Metadata to associate with the request.
|
|
275
294
|
*/
|
|
276
|
-
metadata:
|
|
295
|
+
metadata: z2.record(z2.string().max(64), z2.string().max(512)).optional(),
|
|
277
296
|
/**
|
|
278
297
|
* Parameters for prediction mode.
|
|
279
298
|
*/
|
|
280
|
-
prediction:
|
|
299
|
+
prediction: z2.record(z2.string(), z2.any()).optional(),
|
|
281
300
|
/**
|
|
282
301
|
* Whether to use structured outputs.
|
|
283
302
|
*
|
|
284
303
|
* @default true
|
|
285
304
|
*/
|
|
286
|
-
structuredOutputs:
|
|
305
|
+
structuredOutputs: z2.boolean().optional(),
|
|
287
306
|
/**
|
|
288
307
|
* Service tier for the request.
|
|
289
308
|
* - 'auto': Default service tier
|
|
@@ -292,40 +311,34 @@ var openaiProviderOptions = z.object({
|
|
|
292
311
|
*
|
|
293
312
|
* @default 'auto'
|
|
294
313
|
*/
|
|
295
|
-
serviceTier:
|
|
314
|
+
serviceTier: z2.enum(["auto", "flex", "priority"]).optional(),
|
|
296
315
|
/**
|
|
297
316
|
* Whether to use strict JSON schema validation.
|
|
298
317
|
*
|
|
299
318
|
* @default false
|
|
300
319
|
*/
|
|
301
|
-
strictJsonSchema:
|
|
320
|
+
strictJsonSchema: z2.boolean().optional(),
|
|
302
321
|
/**
|
|
303
322
|
* Controls the verbosity of the model's responses.
|
|
304
323
|
* Lower values will result in more concise responses, while higher values will result in more verbose responses.
|
|
305
324
|
*/
|
|
306
|
-
textVerbosity:
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
code: z2.union([z2.string(), z2.number()]).nullish()
|
|
321
|
-
})
|
|
322
|
-
});
|
|
323
|
-
var openaiFailedResponseHandler = createJsonErrorResponseHandler({
|
|
324
|
-
errorSchema: openaiErrorDataSchema,
|
|
325
|
-
errorToMessage: (data) => data.error.message
|
|
325
|
+
textVerbosity: z2.enum(["low", "medium", "high"]).optional(),
|
|
326
|
+
/**
|
|
327
|
+
* A cache key for prompt caching. Allows manual control over prompt caching behavior.
|
|
328
|
+
* Useful for improving cache hit rates and working around automatic caching issues.
|
|
329
|
+
*/
|
|
330
|
+
promptCacheKey: z2.string().optional(),
|
|
331
|
+
/**
|
|
332
|
+
* A stable identifier used to help detect users of your application
|
|
333
|
+
* that may be violating OpenAI's usage policies. The IDs should be a
|
|
334
|
+
* string that uniquely identifies each user. We recommend hashing their
|
|
335
|
+
* username or email address, in order to avoid sending us any identifying
|
|
336
|
+
* information.
|
|
337
|
+
*/
|
|
338
|
+
safetyIdentifier: z2.string().optional()
|
|
326
339
|
});
|
|
327
340
|
|
|
328
|
-
// src/openai-prepare-tools.ts
|
|
341
|
+
// src/chat/openai-chat-prepare-tools.ts
|
|
329
342
|
import {
|
|
330
343
|
UnsupportedFunctionalityError as UnsupportedFunctionalityError2
|
|
331
344
|
} from "@ai-sdk/provider";
|
|
@@ -416,8 +429,8 @@ var webSearchPreview = createProviderDefinedToolFactory2({
|
|
|
416
429
|
inputSchema: z4.object({})
|
|
417
430
|
});
|
|
418
431
|
|
|
419
|
-
// src/openai-prepare-tools.ts
|
|
420
|
-
function
|
|
432
|
+
// src/chat/openai-chat-prepare-tools.ts
|
|
433
|
+
function prepareChatTools({
|
|
421
434
|
tools,
|
|
422
435
|
toolChoice,
|
|
423
436
|
structuredOutputs,
|
|
@@ -503,7 +516,7 @@ function prepareTools({
|
|
|
503
516
|
}
|
|
504
517
|
}
|
|
505
518
|
|
|
506
|
-
// src/openai-chat-language-model.ts
|
|
519
|
+
// src/chat/openai-chat-language-model.ts
|
|
507
520
|
var OpenAIChatLanguageModel = class {
|
|
508
521
|
constructor(modelId, config) {
|
|
509
522
|
this.specificationVersion = "v2";
|
|
@@ -588,13 +601,15 @@ var OpenAIChatLanguageModel = class {
|
|
|
588
601
|
seed,
|
|
589
602
|
verbosity: openaiOptions.textVerbosity,
|
|
590
603
|
// openai specific settings:
|
|
591
|
-
// TODO
|
|
604
|
+
// TODO AI SDK 6: remove, we auto-map maxOutputTokens now
|
|
592
605
|
max_completion_tokens: openaiOptions.maxCompletionTokens,
|
|
593
606
|
store: openaiOptions.store,
|
|
594
607
|
metadata: openaiOptions.metadata,
|
|
595
608
|
prediction: openaiOptions.prediction,
|
|
596
609
|
reasoning_effort: openaiOptions.reasoningEffort,
|
|
597
610
|
service_tier: openaiOptions.serviceTier,
|
|
611
|
+
prompt_cache_key: openaiOptions.promptCacheKey,
|
|
612
|
+
safety_identifier: openaiOptions.safetyIdentifier,
|
|
598
613
|
// messages:
|
|
599
614
|
messages
|
|
600
615
|
};
|
|
@@ -688,7 +703,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
688
703
|
tools: openaiTools2,
|
|
689
704
|
toolChoice: openaiToolChoice,
|
|
690
705
|
toolWarnings
|
|
691
|
-
} =
|
|
706
|
+
} = prepareChatTools({
|
|
692
707
|
tools,
|
|
693
708
|
toolChoice,
|
|
694
709
|
structuredOutputs,
|
|
@@ -1158,7 +1173,7 @@ var reasoningModels = {
|
|
|
1158
1173
|
}
|
|
1159
1174
|
};
|
|
1160
1175
|
|
|
1161
|
-
// src/openai-completion-language-model.ts
|
|
1176
|
+
// src/completion/openai-completion-language-model.ts
|
|
1162
1177
|
import {
|
|
1163
1178
|
combineHeaders as combineHeaders2,
|
|
1164
1179
|
createEventSourceResponseHandler as createEventSourceResponseHandler2,
|
|
@@ -1168,7 +1183,7 @@ import {
|
|
|
1168
1183
|
} from "@ai-sdk/provider-utils";
|
|
1169
1184
|
import { z as z7 } from "zod/v4";
|
|
1170
1185
|
|
|
1171
|
-
// src/convert-to-openai-completion-prompt.ts
|
|
1186
|
+
// src/completion/convert-to-openai-completion-prompt.ts
|
|
1172
1187
|
import {
|
|
1173
1188
|
InvalidPromptError,
|
|
1174
1189
|
UnsupportedFunctionalityError as UnsupportedFunctionalityError3
|
|
@@ -1246,7 +1261,37 @@ ${user}:`]
|
|
|
1246
1261
|
};
|
|
1247
1262
|
}
|
|
1248
1263
|
|
|
1249
|
-
// src/
|
|
1264
|
+
// src/completion/get-response-metadata.ts
|
|
1265
|
+
function getResponseMetadata2({
|
|
1266
|
+
id,
|
|
1267
|
+
model,
|
|
1268
|
+
created
|
|
1269
|
+
}) {
|
|
1270
|
+
return {
|
|
1271
|
+
id: id != null ? id : void 0,
|
|
1272
|
+
modelId: model != null ? model : void 0,
|
|
1273
|
+
timestamp: created != null ? new Date(created * 1e3) : void 0
|
|
1274
|
+
};
|
|
1275
|
+
}
|
|
1276
|
+
|
|
1277
|
+
// src/completion/map-openai-finish-reason.ts
|
|
1278
|
+
function mapOpenAIFinishReason2(finishReason) {
|
|
1279
|
+
switch (finishReason) {
|
|
1280
|
+
case "stop":
|
|
1281
|
+
return "stop";
|
|
1282
|
+
case "length":
|
|
1283
|
+
return "length";
|
|
1284
|
+
case "content_filter":
|
|
1285
|
+
return "content-filter";
|
|
1286
|
+
case "function_call":
|
|
1287
|
+
case "tool_calls":
|
|
1288
|
+
return "tool-calls";
|
|
1289
|
+
default:
|
|
1290
|
+
return "unknown";
|
|
1291
|
+
}
|
|
1292
|
+
}
|
|
1293
|
+
|
|
1294
|
+
// src/completion/openai-completion-options.ts
|
|
1250
1295
|
import { z as z6 } from "zod/v4";
|
|
1251
1296
|
var openaiCompletionProviderOptions = z6.object({
|
|
1252
1297
|
/**
|
|
@@ -1289,7 +1334,7 @@ var openaiCompletionProviderOptions = z6.object({
|
|
|
1289
1334
|
logprobs: z6.union([z6.boolean(), z6.number()]).optional()
|
|
1290
1335
|
});
|
|
1291
1336
|
|
|
1292
|
-
// src/openai-completion-language-model.ts
|
|
1337
|
+
// src/completion/openai-completion-language-model.ts
|
|
1293
1338
|
var OpenAICompletionLanguageModel = class {
|
|
1294
1339
|
constructor(modelId, config) {
|
|
1295
1340
|
this.specificationVersion = "v2";
|
|
@@ -1409,10 +1454,10 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1409
1454
|
outputTokens: (_b = response.usage) == null ? void 0 : _b.completion_tokens,
|
|
1410
1455
|
totalTokens: (_c = response.usage) == null ? void 0 : _c.total_tokens
|
|
1411
1456
|
},
|
|
1412
|
-
finishReason:
|
|
1457
|
+
finishReason: mapOpenAIFinishReason2(choice.finish_reason),
|
|
1413
1458
|
request: { body: args },
|
|
1414
1459
|
response: {
|
|
1415
|
-
...
|
|
1460
|
+
...getResponseMetadata2(response),
|
|
1416
1461
|
headers: responseHeaders,
|
|
1417
1462
|
body: rawResponse
|
|
1418
1463
|
},
|
|
@@ -1476,7 +1521,7 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1476
1521
|
isFirstChunk = false;
|
|
1477
1522
|
controller.enqueue({
|
|
1478
1523
|
type: "response-metadata",
|
|
1479
|
-
...
|
|
1524
|
+
...getResponseMetadata2(value)
|
|
1480
1525
|
});
|
|
1481
1526
|
controller.enqueue({ type: "text-start", id: "0" });
|
|
1482
1527
|
}
|
|
@@ -1487,7 +1532,7 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1487
1532
|
}
|
|
1488
1533
|
const choice = value.choices[0];
|
|
1489
1534
|
if ((choice == null ? void 0 : choice.finish_reason) != null) {
|
|
1490
|
-
finishReason =
|
|
1535
|
+
finishReason = mapOpenAIFinishReason2(choice.finish_reason);
|
|
1491
1536
|
}
|
|
1492
1537
|
if ((choice == null ? void 0 : choice.logprobs) != null) {
|
|
1493
1538
|
providerMetadata.openai.logprobs = choice.logprobs;
|
|
@@ -1562,7 +1607,7 @@ var openaiCompletionChunkSchema = z7.union([
|
|
|
1562
1607
|
openaiErrorDataSchema
|
|
1563
1608
|
]);
|
|
1564
1609
|
|
|
1565
|
-
// src/openai-embedding-model.ts
|
|
1610
|
+
// src/embedding/openai-embedding-model.ts
|
|
1566
1611
|
import {
|
|
1567
1612
|
TooManyEmbeddingValuesForCallError
|
|
1568
1613
|
} from "@ai-sdk/provider";
|
|
@@ -1574,7 +1619,7 @@ import {
|
|
|
1574
1619
|
} from "@ai-sdk/provider-utils";
|
|
1575
1620
|
import { z as z9 } from "zod/v4";
|
|
1576
1621
|
|
|
1577
|
-
// src/openai-embedding-options.ts
|
|
1622
|
+
// src/embedding/openai-embedding-options.ts
|
|
1578
1623
|
import { z as z8 } from "zod/v4";
|
|
1579
1624
|
var openaiEmbeddingProviderOptions = z8.object({
|
|
1580
1625
|
/**
|
|
@@ -1589,7 +1634,7 @@ var openaiEmbeddingProviderOptions = z8.object({
|
|
|
1589
1634
|
user: z8.string().optional()
|
|
1590
1635
|
});
|
|
1591
1636
|
|
|
1592
|
-
// src/openai-embedding-model.ts
|
|
1637
|
+
// src/embedding/openai-embedding-model.ts
|
|
1593
1638
|
var OpenAIEmbeddingModel = class {
|
|
1594
1639
|
constructor(modelId, config) {
|
|
1595
1640
|
this.specificationVersion = "v2";
|
|
@@ -1657,7 +1702,7 @@ var openaiTextEmbeddingResponseSchema = z9.object({
|
|
|
1657
1702
|
usage: z9.object({ prompt_tokens: z9.number() }).nullish()
|
|
1658
1703
|
});
|
|
1659
1704
|
|
|
1660
|
-
// src/openai-image-model.ts
|
|
1705
|
+
// src/image/openai-image-model.ts
|
|
1661
1706
|
import {
|
|
1662
1707
|
combineHeaders as combineHeaders4,
|
|
1663
1708
|
createJsonResponseHandler as createJsonResponseHandler4,
|
|
@@ -1665,7 +1710,7 @@ import {
|
|
|
1665
1710
|
} from "@ai-sdk/provider-utils";
|
|
1666
1711
|
import { z as z10 } from "zod/v4";
|
|
1667
1712
|
|
|
1668
|
-
// src/openai-image-
|
|
1713
|
+
// src/image/openai-image-options.ts
|
|
1669
1714
|
var modelMaxImagesPerCall = {
|
|
1670
1715
|
"dall-e-3": 1,
|
|
1671
1716
|
"dall-e-2": 10,
|
|
@@ -1673,7 +1718,7 @@ var modelMaxImagesPerCall = {
|
|
|
1673
1718
|
};
|
|
1674
1719
|
var hasDefaultResponseFormat = /* @__PURE__ */ new Set(["gpt-image-1"]);
|
|
1675
1720
|
|
|
1676
|
-
// src/openai-image-model.ts
|
|
1721
|
+
// src/image/openai-image-model.ts
|
|
1677
1722
|
var OpenAIImageModel = class {
|
|
1678
1723
|
constructor(modelId, config) {
|
|
1679
1724
|
this.modelId = modelId;
|
|
@@ -1757,231 +1802,59 @@ var openaiImageResponseSchema = z10.object({
|
|
|
1757
1802
|
)
|
|
1758
1803
|
});
|
|
1759
1804
|
|
|
1805
|
+
// src/tool/code-interpreter.ts
|
|
1806
|
+
import { createProviderDefinedToolFactory as createProviderDefinedToolFactory3 } from "@ai-sdk/provider-utils";
|
|
1807
|
+
import { z as z11 } from "zod/v4";
|
|
1808
|
+
var codeInterpreterArgsSchema = z11.object({
|
|
1809
|
+
container: z11.union([
|
|
1810
|
+
z11.string(),
|
|
1811
|
+
z11.object({
|
|
1812
|
+
fileIds: z11.array(z11.string()).optional()
|
|
1813
|
+
})
|
|
1814
|
+
]).optional()
|
|
1815
|
+
});
|
|
1816
|
+
var codeInterpreter = createProviderDefinedToolFactory3({
|
|
1817
|
+
id: "openai.code_interpreter",
|
|
1818
|
+
name: "code_interpreter",
|
|
1819
|
+
inputSchema: z11.object({})
|
|
1820
|
+
});
|
|
1821
|
+
|
|
1760
1822
|
// src/openai-tools.ts
|
|
1761
1823
|
var openaiTools = {
|
|
1824
|
+
codeInterpreter,
|
|
1762
1825
|
fileSearch,
|
|
1763
1826
|
webSearchPreview
|
|
1764
1827
|
};
|
|
1765
1828
|
|
|
1766
|
-
// src/openai-transcription-model.ts
|
|
1767
|
-
import {
|
|
1768
|
-
combineHeaders as combineHeaders5,
|
|
1769
|
-
convertBase64ToUint8Array,
|
|
1770
|
-
createJsonResponseHandler as createJsonResponseHandler5,
|
|
1771
|
-
parseProviderOptions as parseProviderOptions4,
|
|
1772
|
-
postFormDataToApi
|
|
1773
|
-
} from "@ai-sdk/provider-utils";
|
|
1774
|
-
import { z as z12 } from "zod/v4";
|
|
1775
|
-
|
|
1776
|
-
// src/openai-transcription-options.ts
|
|
1777
|
-
import { z as z11 } from "zod/v4";
|
|
1778
|
-
var openAITranscriptionProviderOptions = z11.object({
|
|
1779
|
-
/**
|
|
1780
|
-
* Additional information to include in the transcription response.
|
|
1781
|
-
*/
|
|
1782
|
-
include: z11.array(z11.string()).optional(),
|
|
1783
|
-
/**
|
|
1784
|
-
* The language of the input audio in ISO-639-1 format.
|
|
1785
|
-
*/
|
|
1786
|
-
language: z11.string().optional(),
|
|
1787
|
-
/**
|
|
1788
|
-
* An optional text to guide the model's style or continue a previous audio segment.
|
|
1789
|
-
*/
|
|
1790
|
-
prompt: z11.string().optional(),
|
|
1791
|
-
/**
|
|
1792
|
-
* The sampling temperature, between 0 and 1.
|
|
1793
|
-
* @default 0
|
|
1794
|
-
*/
|
|
1795
|
-
temperature: z11.number().min(0).max(1).default(0).optional(),
|
|
1796
|
-
/**
|
|
1797
|
-
* The timestamp granularities to populate for this transcription.
|
|
1798
|
-
* @default ['segment']
|
|
1799
|
-
*/
|
|
1800
|
-
timestampGranularities: z11.array(z11.enum(["word", "segment"])).default(["segment"]).optional()
|
|
1801
|
-
});
|
|
1802
|
-
|
|
1803
|
-
// src/openai-transcription-model.ts
|
|
1804
|
-
var languageMap = {
|
|
1805
|
-
afrikaans: "af",
|
|
1806
|
-
arabic: "ar",
|
|
1807
|
-
armenian: "hy",
|
|
1808
|
-
azerbaijani: "az",
|
|
1809
|
-
belarusian: "be",
|
|
1810
|
-
bosnian: "bs",
|
|
1811
|
-
bulgarian: "bg",
|
|
1812
|
-
catalan: "ca",
|
|
1813
|
-
chinese: "zh",
|
|
1814
|
-
croatian: "hr",
|
|
1815
|
-
czech: "cs",
|
|
1816
|
-
danish: "da",
|
|
1817
|
-
dutch: "nl",
|
|
1818
|
-
english: "en",
|
|
1819
|
-
estonian: "et",
|
|
1820
|
-
finnish: "fi",
|
|
1821
|
-
french: "fr",
|
|
1822
|
-
galician: "gl",
|
|
1823
|
-
german: "de",
|
|
1824
|
-
greek: "el",
|
|
1825
|
-
hebrew: "he",
|
|
1826
|
-
hindi: "hi",
|
|
1827
|
-
hungarian: "hu",
|
|
1828
|
-
icelandic: "is",
|
|
1829
|
-
indonesian: "id",
|
|
1830
|
-
italian: "it",
|
|
1831
|
-
japanese: "ja",
|
|
1832
|
-
kannada: "kn",
|
|
1833
|
-
kazakh: "kk",
|
|
1834
|
-
korean: "ko",
|
|
1835
|
-
latvian: "lv",
|
|
1836
|
-
lithuanian: "lt",
|
|
1837
|
-
macedonian: "mk",
|
|
1838
|
-
malay: "ms",
|
|
1839
|
-
marathi: "mr",
|
|
1840
|
-
maori: "mi",
|
|
1841
|
-
nepali: "ne",
|
|
1842
|
-
norwegian: "no",
|
|
1843
|
-
persian: "fa",
|
|
1844
|
-
polish: "pl",
|
|
1845
|
-
portuguese: "pt",
|
|
1846
|
-
romanian: "ro",
|
|
1847
|
-
russian: "ru",
|
|
1848
|
-
serbian: "sr",
|
|
1849
|
-
slovak: "sk",
|
|
1850
|
-
slovenian: "sl",
|
|
1851
|
-
spanish: "es",
|
|
1852
|
-
swahili: "sw",
|
|
1853
|
-
swedish: "sv",
|
|
1854
|
-
tagalog: "tl",
|
|
1855
|
-
tamil: "ta",
|
|
1856
|
-
thai: "th",
|
|
1857
|
-
turkish: "tr",
|
|
1858
|
-
ukrainian: "uk",
|
|
1859
|
-
urdu: "ur",
|
|
1860
|
-
vietnamese: "vi",
|
|
1861
|
-
welsh: "cy"
|
|
1862
|
-
};
|
|
1863
|
-
var OpenAITranscriptionModel = class {
|
|
1864
|
-
constructor(modelId, config) {
|
|
1865
|
-
this.modelId = modelId;
|
|
1866
|
-
this.config = config;
|
|
1867
|
-
this.specificationVersion = "v2";
|
|
1868
|
-
}
|
|
1869
|
-
get provider() {
|
|
1870
|
-
return this.config.provider;
|
|
1871
|
-
}
|
|
1872
|
-
async getArgs({
|
|
1873
|
-
audio,
|
|
1874
|
-
mediaType,
|
|
1875
|
-
providerOptions
|
|
1876
|
-
}) {
|
|
1877
|
-
const warnings = [];
|
|
1878
|
-
const openAIOptions = await parseProviderOptions4({
|
|
1879
|
-
provider: "openai",
|
|
1880
|
-
providerOptions,
|
|
1881
|
-
schema: openAITranscriptionProviderOptions
|
|
1882
|
-
});
|
|
1883
|
-
const formData = new FormData();
|
|
1884
|
-
const blob = audio instanceof Uint8Array ? new Blob([audio]) : new Blob([convertBase64ToUint8Array(audio)]);
|
|
1885
|
-
formData.append("model", this.modelId);
|
|
1886
|
-
formData.append("file", new File([blob], "audio", { type: mediaType }));
|
|
1887
|
-
if (openAIOptions) {
|
|
1888
|
-
const transcriptionModelOptions = {
|
|
1889
|
-
include: openAIOptions.include,
|
|
1890
|
-
language: openAIOptions.language,
|
|
1891
|
-
prompt: openAIOptions.prompt,
|
|
1892
|
-
temperature: openAIOptions.temperature,
|
|
1893
|
-
timestamp_granularities: openAIOptions.timestampGranularities
|
|
1894
|
-
};
|
|
1895
|
-
for (const [key, value] of Object.entries(transcriptionModelOptions)) {
|
|
1896
|
-
if (value != null) {
|
|
1897
|
-
formData.append(key, String(value));
|
|
1898
|
-
}
|
|
1899
|
-
}
|
|
1900
|
-
}
|
|
1901
|
-
return {
|
|
1902
|
-
formData,
|
|
1903
|
-
warnings
|
|
1904
|
-
};
|
|
1905
|
-
}
|
|
1906
|
-
async doGenerate(options) {
|
|
1907
|
-
var _a, _b, _c, _d, _e, _f;
|
|
1908
|
-
const currentDate = (_c = (_b = (_a = this.config._internal) == null ? void 0 : _a.currentDate) == null ? void 0 : _b.call(_a)) != null ? _c : /* @__PURE__ */ new Date();
|
|
1909
|
-
const { formData, warnings } = await this.getArgs(options);
|
|
1910
|
-
const {
|
|
1911
|
-
value: response,
|
|
1912
|
-
responseHeaders,
|
|
1913
|
-
rawValue: rawResponse
|
|
1914
|
-
} = await postFormDataToApi({
|
|
1915
|
-
url: this.config.url({
|
|
1916
|
-
path: "/audio/transcriptions",
|
|
1917
|
-
modelId: this.modelId
|
|
1918
|
-
}),
|
|
1919
|
-
headers: combineHeaders5(this.config.headers(), options.headers),
|
|
1920
|
-
formData,
|
|
1921
|
-
failedResponseHandler: openaiFailedResponseHandler,
|
|
1922
|
-
successfulResponseHandler: createJsonResponseHandler5(
|
|
1923
|
-
openaiTranscriptionResponseSchema
|
|
1924
|
-
),
|
|
1925
|
-
abortSignal: options.abortSignal,
|
|
1926
|
-
fetch: this.config.fetch
|
|
1927
|
-
});
|
|
1928
|
-
const language = response.language != null && response.language in languageMap ? languageMap[response.language] : void 0;
|
|
1929
|
-
return {
|
|
1930
|
-
text: response.text,
|
|
1931
|
-
segments: (_e = (_d = response.words) == null ? void 0 : _d.map((word) => ({
|
|
1932
|
-
text: word.word,
|
|
1933
|
-
startSecond: word.start,
|
|
1934
|
-
endSecond: word.end
|
|
1935
|
-
}))) != null ? _e : [],
|
|
1936
|
-
language,
|
|
1937
|
-
durationInSeconds: (_f = response.duration) != null ? _f : void 0,
|
|
1938
|
-
warnings,
|
|
1939
|
-
response: {
|
|
1940
|
-
timestamp: currentDate,
|
|
1941
|
-
modelId: this.modelId,
|
|
1942
|
-
headers: responseHeaders,
|
|
1943
|
-
body: rawResponse
|
|
1944
|
-
}
|
|
1945
|
-
};
|
|
1946
|
-
}
|
|
1947
|
-
};
|
|
1948
|
-
var openaiTranscriptionResponseSchema = z12.object({
|
|
1949
|
-
text: z12.string(),
|
|
1950
|
-
language: z12.string().nullish(),
|
|
1951
|
-
duration: z12.number().nullish(),
|
|
1952
|
-
words: z12.array(
|
|
1953
|
-
z12.object({
|
|
1954
|
-
word: z12.string(),
|
|
1955
|
-
start: z12.number(),
|
|
1956
|
-
end: z12.number()
|
|
1957
|
-
})
|
|
1958
|
-
).nullish()
|
|
1959
|
-
});
|
|
1960
|
-
|
|
1961
1829
|
// src/responses/openai-responses-language-model.ts
|
|
1962
1830
|
import {
|
|
1963
1831
|
APICallError
|
|
1964
1832
|
} from "@ai-sdk/provider";
|
|
1965
1833
|
import {
|
|
1966
|
-
combineHeaders as
|
|
1834
|
+
combineHeaders as combineHeaders5,
|
|
1967
1835
|
createEventSourceResponseHandler as createEventSourceResponseHandler3,
|
|
1968
|
-
createJsonResponseHandler as
|
|
1836
|
+
createJsonResponseHandler as createJsonResponseHandler5,
|
|
1969
1837
|
generateId as generateId2,
|
|
1970
|
-
parseProviderOptions as
|
|
1838
|
+
parseProviderOptions as parseProviderOptions5,
|
|
1971
1839
|
postJsonToApi as postJsonToApi5
|
|
1972
1840
|
} from "@ai-sdk/provider-utils";
|
|
1973
|
-
import { z as
|
|
1841
|
+
import { z as z13 } from "zod/v4";
|
|
1974
1842
|
|
|
1975
1843
|
// src/responses/convert-to-openai-responses-messages.ts
|
|
1976
1844
|
import {
|
|
1977
1845
|
UnsupportedFunctionalityError as UnsupportedFunctionalityError4
|
|
1978
1846
|
} from "@ai-sdk/provider";
|
|
1979
|
-
import { parseProviderOptions as
|
|
1980
|
-
import { z as
|
|
1847
|
+
import { parseProviderOptions as parseProviderOptions4 } from "@ai-sdk/provider-utils";
|
|
1848
|
+
import { z as z12 } from "zod/v4";
|
|
1981
1849
|
import { convertToBase64 as convertToBase642 } from "@ai-sdk/provider-utils";
|
|
1850
|
+
function isFileId(data, prefixes) {
|
|
1851
|
+
if (!prefixes) return false;
|
|
1852
|
+
return prefixes.some((prefix) => data.startsWith(prefix));
|
|
1853
|
+
}
|
|
1982
1854
|
async function convertToOpenAIResponsesMessages({
|
|
1983
1855
|
prompt,
|
|
1984
|
-
systemMessageMode
|
|
1856
|
+
systemMessageMode,
|
|
1857
|
+
fileIdPrefixes
|
|
1985
1858
|
}) {
|
|
1986
1859
|
var _a, _b, _c, _d, _e, _f;
|
|
1987
1860
|
const messages = [];
|
|
@@ -2028,7 +1901,7 @@ async function convertToOpenAIResponsesMessages({
|
|
|
2028
1901
|
const mediaType = part.mediaType === "image/*" ? "image/jpeg" : part.mediaType;
|
|
2029
1902
|
return {
|
|
2030
1903
|
type: "input_image",
|
|
2031
|
-
...part.data instanceof URL ? { image_url: part.data.toString() } : typeof part.data === "string" && part.data
|
|
1904
|
+
...part.data instanceof URL ? { image_url: part.data.toString() } : typeof part.data === "string" && isFileId(part.data, fileIdPrefixes) ? { file_id: part.data } : {
|
|
2032
1905
|
image_url: `data:${mediaType};base64,${convertToBase642(part.data)}`
|
|
2033
1906
|
},
|
|
2034
1907
|
detail: (_b2 = (_a2 = part.providerOptions) == null ? void 0 : _a2.openai) == null ? void 0 : _b2.imageDetail
|
|
@@ -2041,7 +1914,7 @@ async function convertToOpenAIResponsesMessages({
|
|
|
2041
1914
|
}
|
|
2042
1915
|
return {
|
|
2043
1916
|
type: "input_file",
|
|
2044
|
-
...typeof part.data === "string" && part.data
|
|
1917
|
+
...typeof part.data === "string" && isFileId(part.data, fileIdPrefixes) ? { file_id: part.data } : {
|
|
2045
1918
|
filename: (_c2 = part.filename) != null ? _c2 : `part-${index}.pdf`,
|
|
2046
1919
|
file_data: `data:application/pdf;base64,${convertToBase642(part.data)}`
|
|
2047
1920
|
}
|
|
@@ -2090,7 +1963,7 @@ async function convertToOpenAIResponsesMessages({
|
|
|
2090
1963
|
break;
|
|
2091
1964
|
}
|
|
2092
1965
|
case "reasoning": {
|
|
2093
|
-
const providerOptions = await
|
|
1966
|
+
const providerOptions = await parseProviderOptions4({
|
|
2094
1967
|
provider: "openai",
|
|
2095
1968
|
providerOptions: part.providerOptions,
|
|
2096
1969
|
schema: openaiResponsesReasoningProviderOptionsSchema
|
|
@@ -2161,9 +2034,9 @@ async function convertToOpenAIResponsesMessages({
|
|
|
2161
2034
|
}
|
|
2162
2035
|
return { messages, warnings };
|
|
2163
2036
|
}
|
|
2164
|
-
var openaiResponsesReasoningProviderOptionsSchema =
|
|
2165
|
-
itemId:
|
|
2166
|
-
reasoningEncryptedContent:
|
|
2037
|
+
var openaiResponsesReasoningProviderOptionsSchema = z12.object({
|
|
2038
|
+
itemId: z12.string().nullish(),
|
|
2039
|
+
reasoningEncryptedContent: z12.string().nullish()
|
|
2167
2040
|
});
|
|
2168
2041
|
|
|
2169
2042
|
// src/responses/map-openai-responses-finish-reason.ts
|
|
@@ -2210,7 +2083,7 @@ function prepareResponsesTools({
|
|
|
2210
2083
|
strict: strictJsonSchema
|
|
2211
2084
|
});
|
|
2212
2085
|
break;
|
|
2213
|
-
case "provider-defined":
|
|
2086
|
+
case "provider-defined": {
|
|
2214
2087
|
switch (tool.id) {
|
|
2215
2088
|
case "openai.file_search": {
|
|
2216
2089
|
const args = fileSearchArgsSchema.parse(tool.args);
|
|
@@ -2223,18 +2096,30 @@ function prepareResponsesTools({
|
|
|
2223
2096
|
});
|
|
2224
2097
|
break;
|
|
2225
2098
|
}
|
|
2226
|
-
case "openai.web_search_preview":
|
|
2099
|
+
case "openai.web_search_preview": {
|
|
2100
|
+
const args = webSearchPreviewArgsSchema.parse(tool.args);
|
|
2227
2101
|
openaiTools2.push({
|
|
2228
2102
|
type: "web_search_preview",
|
|
2229
|
-
search_context_size:
|
|
2230
|
-
user_location:
|
|
2103
|
+
search_context_size: args.searchContextSize,
|
|
2104
|
+
user_location: args.userLocation
|
|
2231
2105
|
});
|
|
2232
2106
|
break;
|
|
2233
|
-
|
|
2107
|
+
}
|
|
2108
|
+
case "openai.code_interpreter": {
|
|
2109
|
+
const args = codeInterpreterArgsSchema.parse(tool.args);
|
|
2110
|
+
openaiTools2.push({
|
|
2111
|
+
type: "code_interpreter",
|
|
2112
|
+
container: args.container == null ? { type: "auto", file_ids: void 0 } : typeof args.container === "string" ? args.container : { type: "auto", file_ids: args.container.fileIds }
|
|
2113
|
+
});
|
|
2114
|
+
break;
|
|
2115
|
+
}
|
|
2116
|
+
default: {
|
|
2234
2117
|
toolWarnings.push({ type: "unsupported-tool", tool });
|
|
2235
2118
|
break;
|
|
2119
|
+
}
|
|
2236
2120
|
}
|
|
2237
2121
|
break;
|
|
2122
|
+
}
|
|
2238
2123
|
default:
|
|
2239
2124
|
toolWarnings.push({ type: "unsupported-tool", tool });
|
|
2240
2125
|
break;
|
|
@@ -2252,7 +2137,7 @@ function prepareResponsesTools({
|
|
|
2252
2137
|
case "tool":
|
|
2253
2138
|
return {
|
|
2254
2139
|
tools: openaiTools2,
|
|
2255
|
-
toolChoice: toolChoice.toolName === "
|
|
2140
|
+
toolChoice: toolChoice.toolName === "code_interpreter" || toolChoice.toolName === "file_search" || toolChoice.toolName === "web_search_preview" ? { type: toolChoice.toolName } : { type: "function", name: toolChoice.toolName },
|
|
2256
2141
|
toolWarnings
|
|
2257
2142
|
};
|
|
2258
2143
|
default: {
|
|
@@ -2318,10 +2203,11 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2318
2203
|
}
|
|
2319
2204
|
const { messages, warnings: messageWarnings } = await convertToOpenAIResponsesMessages({
|
|
2320
2205
|
prompt,
|
|
2321
|
-
systemMessageMode: modelConfig.systemMessageMode
|
|
2206
|
+
systemMessageMode: modelConfig.systemMessageMode,
|
|
2207
|
+
fileIdPrefixes: this.config.fileIdPrefixes
|
|
2322
2208
|
});
|
|
2323
2209
|
warnings.push(...messageWarnings);
|
|
2324
|
-
const openaiOptions = await
|
|
2210
|
+
const openaiOptions = await parseProviderOptions5({
|
|
2325
2211
|
provider: "openai",
|
|
2326
2212
|
providerOptions,
|
|
2327
2213
|
schema: openaiResponsesProviderOptionsSchema
|
|
@@ -2358,6 +2244,8 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2358
2244
|
instructions: openaiOptions == null ? void 0 : openaiOptions.instructions,
|
|
2359
2245
|
service_tier: openaiOptions == null ? void 0 : openaiOptions.serviceTier,
|
|
2360
2246
|
include: openaiOptions == null ? void 0 : openaiOptions.include,
|
|
2247
|
+
prompt_cache_key: openaiOptions == null ? void 0 : openaiOptions.promptCacheKey,
|
|
2248
|
+
safety_identifier: openaiOptions == null ? void 0 : openaiOptions.safetyIdentifier,
|
|
2361
2249
|
// model-specific settings:
|
|
2362
2250
|
...modelConfig.isReasoningModel && ((openaiOptions == null ? void 0 : openaiOptions.reasoningEffort) != null || (openaiOptions == null ? void 0 : openaiOptions.reasoningSummary) != null) && {
|
|
2363
2251
|
reasoning: {
|
|
@@ -2453,76 +2341,87 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2453
2341
|
rawValue: rawResponse
|
|
2454
2342
|
} = await postJsonToApi5({
|
|
2455
2343
|
url,
|
|
2456
|
-
headers:
|
|
2344
|
+
headers: combineHeaders5(this.config.headers(), options.headers),
|
|
2457
2345
|
body,
|
|
2458
2346
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
2459
|
-
successfulResponseHandler:
|
|
2460
|
-
|
|
2461
|
-
id:
|
|
2462
|
-
created_at:
|
|
2463
|
-
error:
|
|
2464
|
-
code:
|
|
2465
|
-
message:
|
|
2347
|
+
successfulResponseHandler: createJsonResponseHandler5(
|
|
2348
|
+
z13.object({
|
|
2349
|
+
id: z13.string(),
|
|
2350
|
+
created_at: z13.number(),
|
|
2351
|
+
error: z13.object({
|
|
2352
|
+
code: z13.string(),
|
|
2353
|
+
message: z13.string()
|
|
2466
2354
|
}).nullish(),
|
|
2467
|
-
model:
|
|
2468
|
-
output:
|
|
2469
|
-
|
|
2470
|
-
|
|
2471
|
-
type:
|
|
2472
|
-
role:
|
|
2473
|
-
id:
|
|
2474
|
-
content:
|
|
2475
|
-
|
|
2476
|
-
type:
|
|
2477
|
-
text:
|
|
2478
|
-
annotations:
|
|
2479
|
-
|
|
2480
|
-
type:
|
|
2481
|
-
start_index:
|
|
2482
|
-
end_index:
|
|
2483
|
-
url:
|
|
2484
|
-
title:
|
|
2355
|
+
model: z13.string(),
|
|
2356
|
+
output: z13.array(
|
|
2357
|
+
z13.discriminatedUnion("type", [
|
|
2358
|
+
z13.object({
|
|
2359
|
+
type: z13.literal("message"),
|
|
2360
|
+
role: z13.literal("assistant"),
|
|
2361
|
+
id: z13.string(),
|
|
2362
|
+
content: z13.array(
|
|
2363
|
+
z13.object({
|
|
2364
|
+
type: z13.literal("output_text"),
|
|
2365
|
+
text: z13.string(),
|
|
2366
|
+
annotations: z13.array(
|
|
2367
|
+
z13.object({
|
|
2368
|
+
type: z13.literal("url_citation"),
|
|
2369
|
+
start_index: z13.number(),
|
|
2370
|
+
end_index: z13.number(),
|
|
2371
|
+
url: z13.string(),
|
|
2372
|
+
title: z13.string()
|
|
2485
2373
|
})
|
|
2486
2374
|
)
|
|
2487
2375
|
})
|
|
2488
2376
|
)
|
|
2489
2377
|
}),
|
|
2490
|
-
|
|
2491
|
-
type:
|
|
2492
|
-
call_id:
|
|
2493
|
-
name:
|
|
2494
|
-
arguments:
|
|
2495
|
-
id:
|
|
2378
|
+
z13.object({
|
|
2379
|
+
type: z13.literal("function_call"),
|
|
2380
|
+
call_id: z13.string(),
|
|
2381
|
+
name: z13.string(),
|
|
2382
|
+
arguments: z13.string(),
|
|
2383
|
+
id: z13.string()
|
|
2496
2384
|
}),
|
|
2497
|
-
|
|
2498
|
-
type:
|
|
2499
|
-
id:
|
|
2500
|
-
status:
|
|
2385
|
+
z13.object({
|
|
2386
|
+
type: z13.literal("web_search_call"),
|
|
2387
|
+
id: z13.string(),
|
|
2388
|
+
status: z13.string().optional()
|
|
2501
2389
|
}),
|
|
2502
|
-
|
|
2503
|
-
type:
|
|
2504
|
-
id:
|
|
2505
|
-
status:
|
|
2390
|
+
z13.object({
|
|
2391
|
+
type: z13.literal("computer_call"),
|
|
2392
|
+
id: z13.string(),
|
|
2393
|
+
status: z13.string().optional()
|
|
2506
2394
|
}),
|
|
2507
|
-
|
|
2508
|
-
type:
|
|
2509
|
-
id:
|
|
2510
|
-
status:
|
|
2395
|
+
z13.object({
|
|
2396
|
+
type: z13.literal("file_search_call"),
|
|
2397
|
+
id: z13.string(),
|
|
2398
|
+
status: z13.string().optional(),
|
|
2399
|
+
queries: z13.array(z13.string()).nullish(),
|
|
2400
|
+
results: z13.array(
|
|
2401
|
+
z13.object({
|
|
2402
|
+
attributes: z13.object({
|
|
2403
|
+
file_id: z13.string(),
|
|
2404
|
+
filename: z13.string(),
|
|
2405
|
+
score: z13.number(),
|
|
2406
|
+
text: z13.string()
|
|
2407
|
+
})
|
|
2408
|
+
})
|
|
2409
|
+
).nullish()
|
|
2511
2410
|
}),
|
|
2512
|
-
|
|
2513
|
-
type:
|
|
2514
|
-
id:
|
|
2515
|
-
encrypted_content:
|
|
2516
|
-
summary:
|
|
2517
|
-
|
|
2518
|
-
type:
|
|
2519
|
-
text:
|
|
2411
|
+
z13.object({
|
|
2412
|
+
type: z13.literal("reasoning"),
|
|
2413
|
+
id: z13.string(),
|
|
2414
|
+
encrypted_content: z13.string().nullish(),
|
|
2415
|
+
summary: z13.array(
|
|
2416
|
+
z13.object({
|
|
2417
|
+
type: z13.literal("summary_text"),
|
|
2418
|
+
text: z13.string()
|
|
2520
2419
|
})
|
|
2521
2420
|
)
|
|
2522
2421
|
})
|
|
2523
2422
|
])
|
|
2524
2423
|
),
|
|
2525
|
-
incomplete_details:
|
|
2424
|
+
incomplete_details: z13.object({ reason: z13.string() }).nullable(),
|
|
2526
2425
|
usage: usageSchema2
|
|
2527
2426
|
})
|
|
2528
2427
|
),
|
|
@@ -2649,7 +2548,9 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2649
2548
|
toolName: "file_search",
|
|
2650
2549
|
result: {
|
|
2651
2550
|
type: "file_search_tool_result",
|
|
2652
|
-
status: part.status || "completed"
|
|
2551
|
+
status: part.status || "completed",
|
|
2552
|
+
...part.queries && { queries: part.queries },
|
|
2553
|
+
...part.results && { results: part.results }
|
|
2653
2554
|
},
|
|
2654
2555
|
providerExecuted: true
|
|
2655
2556
|
});
|
|
@@ -2693,7 +2594,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2693
2594
|
path: "/responses",
|
|
2694
2595
|
modelId: this.modelId
|
|
2695
2596
|
}),
|
|
2696
|
-
headers:
|
|
2597
|
+
headers: combineHeaders5(this.config.headers(), options.headers),
|
|
2697
2598
|
body: {
|
|
2698
2599
|
...body,
|
|
2699
2600
|
stream: true
|
|
@@ -2764,6 +2665,16 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2764
2665
|
id: value.item.id,
|
|
2765
2666
|
toolName: "computer_use"
|
|
2766
2667
|
});
|
|
2668
|
+
} else if (value.item.type === "file_search_call") {
|
|
2669
|
+
ongoingToolCalls[value.output_index] = {
|
|
2670
|
+
toolName: "file_search",
|
|
2671
|
+
toolCallId: value.item.id
|
|
2672
|
+
};
|
|
2673
|
+
controller.enqueue({
|
|
2674
|
+
type: "tool-input-start",
|
|
2675
|
+
id: value.item.id,
|
|
2676
|
+
toolName: "file_search"
|
|
2677
|
+
});
|
|
2767
2678
|
} else if (value.item.type === "message") {
|
|
2768
2679
|
controller.enqueue({
|
|
2769
2680
|
type: "text-start",
|
|
@@ -2857,6 +2768,32 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2857
2768
|
},
|
|
2858
2769
|
providerExecuted: true
|
|
2859
2770
|
});
|
|
2771
|
+
} else if (value.item.type === "file_search_call") {
|
|
2772
|
+
ongoingToolCalls[value.output_index] = void 0;
|
|
2773
|
+
hasToolCalls = true;
|
|
2774
|
+
controller.enqueue({
|
|
2775
|
+
type: "tool-input-end",
|
|
2776
|
+
id: value.item.id
|
|
2777
|
+
});
|
|
2778
|
+
controller.enqueue({
|
|
2779
|
+
type: "tool-call",
|
|
2780
|
+
toolCallId: value.item.id,
|
|
2781
|
+
toolName: "file_search",
|
|
2782
|
+
input: "",
|
|
2783
|
+
providerExecuted: true
|
|
2784
|
+
});
|
|
2785
|
+
controller.enqueue({
|
|
2786
|
+
type: "tool-result",
|
|
2787
|
+
toolCallId: value.item.id,
|
|
2788
|
+
toolName: "file_search",
|
|
2789
|
+
result: {
|
|
2790
|
+
type: "file_search_tool_result",
|
|
2791
|
+
status: value.item.status || "completed",
|
|
2792
|
+
...value.item.queries && { queries: value.item.queries },
|
|
2793
|
+
...value.item.results && { results: value.item.results }
|
|
2794
|
+
},
|
|
2795
|
+
providerExecuted: true
|
|
2796
|
+
});
|
|
2860
2797
|
} else if (value.item.type === "message") {
|
|
2861
2798
|
controller.enqueue({
|
|
2862
2799
|
type: "text-end",
|
|
@@ -2969,140 +2906,162 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2969
2906
|
};
|
|
2970
2907
|
}
|
|
2971
2908
|
};
|
|
2972
|
-
var usageSchema2 =
|
|
2973
|
-
input_tokens:
|
|
2974
|
-
input_tokens_details:
|
|
2975
|
-
output_tokens:
|
|
2976
|
-
output_tokens_details:
|
|
2909
|
+
var usageSchema2 = z13.object({
|
|
2910
|
+
input_tokens: z13.number(),
|
|
2911
|
+
input_tokens_details: z13.object({ cached_tokens: z13.number().nullish() }).nullish(),
|
|
2912
|
+
output_tokens: z13.number(),
|
|
2913
|
+
output_tokens_details: z13.object({ reasoning_tokens: z13.number().nullish() }).nullish()
|
|
2977
2914
|
});
|
|
2978
|
-
var textDeltaChunkSchema =
|
|
2979
|
-
type:
|
|
2980
|
-
item_id:
|
|
2981
|
-
delta:
|
|
2915
|
+
var textDeltaChunkSchema = z13.object({
|
|
2916
|
+
type: z13.literal("response.output_text.delta"),
|
|
2917
|
+
item_id: z13.string(),
|
|
2918
|
+
delta: z13.string()
|
|
2982
2919
|
});
|
|
2983
|
-
var errorChunkSchema =
|
|
2984
|
-
type:
|
|
2985
|
-
code:
|
|
2986
|
-
message:
|
|
2987
|
-
param:
|
|
2988
|
-
sequence_number:
|
|
2920
|
+
var errorChunkSchema = z13.object({
|
|
2921
|
+
type: z13.literal("error"),
|
|
2922
|
+
code: z13.string(),
|
|
2923
|
+
message: z13.string(),
|
|
2924
|
+
param: z13.string().nullish(),
|
|
2925
|
+
sequence_number: z13.number()
|
|
2989
2926
|
});
|
|
2990
|
-
var responseFinishedChunkSchema =
|
|
2991
|
-
type:
|
|
2992
|
-
response:
|
|
2993
|
-
incomplete_details:
|
|
2927
|
+
var responseFinishedChunkSchema = z13.object({
|
|
2928
|
+
type: z13.enum(["response.completed", "response.incomplete"]),
|
|
2929
|
+
response: z13.object({
|
|
2930
|
+
incomplete_details: z13.object({ reason: z13.string() }).nullish(),
|
|
2994
2931
|
usage: usageSchema2
|
|
2995
2932
|
})
|
|
2996
2933
|
});
|
|
2997
|
-
var responseCreatedChunkSchema =
|
|
2998
|
-
type:
|
|
2999
|
-
response:
|
|
3000
|
-
id:
|
|
3001
|
-
created_at:
|
|
3002
|
-
model:
|
|
2934
|
+
var responseCreatedChunkSchema = z13.object({
|
|
2935
|
+
type: z13.literal("response.created"),
|
|
2936
|
+
response: z13.object({
|
|
2937
|
+
id: z13.string(),
|
|
2938
|
+
created_at: z13.number(),
|
|
2939
|
+
model: z13.string()
|
|
3003
2940
|
})
|
|
3004
2941
|
});
|
|
3005
|
-
var responseOutputItemAddedSchema =
|
|
3006
|
-
type:
|
|
3007
|
-
output_index:
|
|
3008
|
-
item:
|
|
3009
|
-
|
|
3010
|
-
type:
|
|
3011
|
-
id:
|
|
2942
|
+
var responseOutputItemAddedSchema = z13.object({
|
|
2943
|
+
type: z13.literal("response.output_item.added"),
|
|
2944
|
+
output_index: z13.number(),
|
|
2945
|
+
item: z13.discriminatedUnion("type", [
|
|
2946
|
+
z13.object({
|
|
2947
|
+
type: z13.literal("message"),
|
|
2948
|
+
id: z13.string()
|
|
3012
2949
|
}),
|
|
3013
|
-
|
|
3014
|
-
type:
|
|
3015
|
-
id:
|
|
3016
|
-
encrypted_content:
|
|
2950
|
+
z13.object({
|
|
2951
|
+
type: z13.literal("reasoning"),
|
|
2952
|
+
id: z13.string(),
|
|
2953
|
+
encrypted_content: z13.string().nullish()
|
|
3017
2954
|
}),
|
|
3018
|
-
|
|
3019
|
-
type:
|
|
3020
|
-
id:
|
|
3021
|
-
call_id:
|
|
3022
|
-
name:
|
|
3023
|
-
arguments:
|
|
2955
|
+
z13.object({
|
|
2956
|
+
type: z13.literal("function_call"),
|
|
2957
|
+
id: z13.string(),
|
|
2958
|
+
call_id: z13.string(),
|
|
2959
|
+
name: z13.string(),
|
|
2960
|
+
arguments: z13.string()
|
|
3024
2961
|
}),
|
|
3025
|
-
|
|
3026
|
-
type:
|
|
3027
|
-
id:
|
|
3028
|
-
status:
|
|
2962
|
+
z13.object({
|
|
2963
|
+
type: z13.literal("web_search_call"),
|
|
2964
|
+
id: z13.string(),
|
|
2965
|
+
status: z13.string()
|
|
3029
2966
|
}),
|
|
3030
|
-
|
|
3031
|
-
type:
|
|
3032
|
-
id:
|
|
3033
|
-
status:
|
|
2967
|
+
z13.object({
|
|
2968
|
+
type: z13.literal("computer_call"),
|
|
2969
|
+
id: z13.string(),
|
|
2970
|
+
status: z13.string()
|
|
3034
2971
|
}),
|
|
3035
|
-
|
|
3036
|
-
type:
|
|
3037
|
-
id:
|
|
3038
|
-
status:
|
|
2972
|
+
z13.object({
|
|
2973
|
+
type: z13.literal("file_search_call"),
|
|
2974
|
+
id: z13.string(),
|
|
2975
|
+
status: z13.string(),
|
|
2976
|
+
queries: z13.array(z13.string()).nullish(),
|
|
2977
|
+
results: z13.array(
|
|
2978
|
+
z13.object({
|
|
2979
|
+
attributes: z13.object({
|
|
2980
|
+
file_id: z13.string(),
|
|
2981
|
+
filename: z13.string(),
|
|
2982
|
+
score: z13.number(),
|
|
2983
|
+
text: z13.string()
|
|
2984
|
+
})
|
|
2985
|
+
})
|
|
2986
|
+
).optional()
|
|
3039
2987
|
})
|
|
3040
2988
|
])
|
|
3041
2989
|
});
|
|
3042
|
-
var responseOutputItemDoneSchema =
|
|
3043
|
-
type:
|
|
3044
|
-
output_index:
|
|
3045
|
-
item:
|
|
3046
|
-
|
|
3047
|
-
type:
|
|
3048
|
-
id:
|
|
2990
|
+
var responseOutputItemDoneSchema = z13.object({
|
|
2991
|
+
type: z13.literal("response.output_item.done"),
|
|
2992
|
+
output_index: z13.number(),
|
|
2993
|
+
item: z13.discriminatedUnion("type", [
|
|
2994
|
+
z13.object({
|
|
2995
|
+
type: z13.literal("message"),
|
|
2996
|
+
id: z13.string()
|
|
3049
2997
|
}),
|
|
3050
|
-
|
|
3051
|
-
type:
|
|
3052
|
-
id:
|
|
3053
|
-
encrypted_content:
|
|
2998
|
+
z13.object({
|
|
2999
|
+
type: z13.literal("reasoning"),
|
|
3000
|
+
id: z13.string(),
|
|
3001
|
+
encrypted_content: z13.string().nullish()
|
|
3054
3002
|
}),
|
|
3055
|
-
|
|
3056
|
-
type:
|
|
3057
|
-
id:
|
|
3058
|
-
call_id:
|
|
3059
|
-
name:
|
|
3060
|
-
arguments:
|
|
3061
|
-
status:
|
|
3003
|
+
z13.object({
|
|
3004
|
+
type: z13.literal("function_call"),
|
|
3005
|
+
id: z13.string(),
|
|
3006
|
+
call_id: z13.string(),
|
|
3007
|
+
name: z13.string(),
|
|
3008
|
+
arguments: z13.string(),
|
|
3009
|
+
status: z13.literal("completed")
|
|
3062
3010
|
}),
|
|
3063
|
-
|
|
3064
|
-
type:
|
|
3065
|
-
id:
|
|
3066
|
-
status:
|
|
3011
|
+
z13.object({
|
|
3012
|
+
type: z13.literal("web_search_call"),
|
|
3013
|
+
id: z13.string(),
|
|
3014
|
+
status: z13.literal("completed")
|
|
3067
3015
|
}),
|
|
3068
|
-
|
|
3069
|
-
type:
|
|
3070
|
-
id:
|
|
3071
|
-
status:
|
|
3016
|
+
z13.object({
|
|
3017
|
+
type: z13.literal("computer_call"),
|
|
3018
|
+
id: z13.string(),
|
|
3019
|
+
status: z13.literal("completed")
|
|
3072
3020
|
}),
|
|
3073
|
-
|
|
3074
|
-
type:
|
|
3075
|
-
id:
|
|
3076
|
-
status:
|
|
3021
|
+
z13.object({
|
|
3022
|
+
type: z13.literal("file_search_call"),
|
|
3023
|
+
id: z13.string(),
|
|
3024
|
+
status: z13.literal("completed"),
|
|
3025
|
+
queries: z13.array(z13.string()).nullish(),
|
|
3026
|
+
results: z13.array(
|
|
3027
|
+
z13.object({
|
|
3028
|
+
attributes: z13.object({
|
|
3029
|
+
file_id: z13.string(),
|
|
3030
|
+
filename: z13.string(),
|
|
3031
|
+
score: z13.number(),
|
|
3032
|
+
text: z13.string()
|
|
3033
|
+
})
|
|
3034
|
+
})
|
|
3035
|
+
).nullish()
|
|
3077
3036
|
})
|
|
3078
3037
|
])
|
|
3079
3038
|
});
|
|
3080
|
-
var responseFunctionCallArgumentsDeltaSchema =
|
|
3081
|
-
type:
|
|
3082
|
-
item_id:
|
|
3083
|
-
output_index:
|
|
3084
|
-
delta:
|
|
3039
|
+
var responseFunctionCallArgumentsDeltaSchema = z13.object({
|
|
3040
|
+
type: z13.literal("response.function_call_arguments.delta"),
|
|
3041
|
+
item_id: z13.string(),
|
|
3042
|
+
output_index: z13.number(),
|
|
3043
|
+
delta: z13.string()
|
|
3085
3044
|
});
|
|
3086
|
-
var responseAnnotationAddedSchema =
|
|
3087
|
-
type:
|
|
3088
|
-
annotation:
|
|
3089
|
-
type:
|
|
3090
|
-
url:
|
|
3091
|
-
title:
|
|
3045
|
+
var responseAnnotationAddedSchema = z13.object({
|
|
3046
|
+
type: z13.literal("response.output_text.annotation.added"),
|
|
3047
|
+
annotation: z13.object({
|
|
3048
|
+
type: z13.literal("url_citation"),
|
|
3049
|
+
url: z13.string(),
|
|
3050
|
+
title: z13.string()
|
|
3092
3051
|
})
|
|
3093
3052
|
});
|
|
3094
|
-
var responseReasoningSummaryPartAddedSchema =
|
|
3095
|
-
type:
|
|
3096
|
-
item_id:
|
|
3097
|
-
summary_index:
|
|
3053
|
+
var responseReasoningSummaryPartAddedSchema = z13.object({
|
|
3054
|
+
type: z13.literal("response.reasoning_summary_part.added"),
|
|
3055
|
+
item_id: z13.string(),
|
|
3056
|
+
summary_index: z13.number()
|
|
3098
3057
|
});
|
|
3099
|
-
var responseReasoningSummaryTextDeltaSchema =
|
|
3100
|
-
type:
|
|
3101
|
-
item_id:
|
|
3102
|
-
summary_index:
|
|
3103
|
-
delta:
|
|
3058
|
+
var responseReasoningSummaryTextDeltaSchema = z13.object({
|
|
3059
|
+
type: z13.literal("response.reasoning_summary_text.delta"),
|
|
3060
|
+
item_id: z13.string(),
|
|
3061
|
+
summary_index: z13.number(),
|
|
3062
|
+
delta: z13.string()
|
|
3104
3063
|
});
|
|
3105
|
-
var openaiResponsesChunkSchema =
|
|
3064
|
+
var openaiResponsesChunkSchema = z13.union([
|
|
3106
3065
|
textDeltaChunkSchema,
|
|
3107
3066
|
responseFinishedChunkSchema,
|
|
3108
3067
|
responseCreatedChunkSchema,
|
|
@@ -3113,7 +3072,7 @@ var openaiResponsesChunkSchema = z14.union([
|
|
|
3113
3072
|
responseReasoningSummaryPartAddedSchema,
|
|
3114
3073
|
responseReasoningSummaryTextDeltaSchema,
|
|
3115
3074
|
errorChunkSchema,
|
|
3116
|
-
|
|
3075
|
+
z13.object({ type: z13.string() }).loose()
|
|
3117
3076
|
// fallback for unknown chunks
|
|
3118
3077
|
]);
|
|
3119
3078
|
function isTextDeltaChunk(chunk) {
|
|
@@ -3179,32 +3138,34 @@ function supportsFlexProcessing2(modelId) {
|
|
|
3179
3138
|
function supportsPriorityProcessing2(modelId) {
|
|
3180
3139
|
return modelId.startsWith("gpt-4") || modelId.startsWith("gpt-5-mini") || modelId.startsWith("gpt-5") && !modelId.startsWith("gpt-5-nano") || modelId.startsWith("o3") || modelId.startsWith("o4-mini");
|
|
3181
3140
|
}
|
|
3182
|
-
var openaiResponsesProviderOptionsSchema =
|
|
3183
|
-
metadata:
|
|
3184
|
-
parallelToolCalls:
|
|
3185
|
-
previousResponseId:
|
|
3186
|
-
store:
|
|
3187
|
-
user:
|
|
3188
|
-
reasoningEffort:
|
|
3189
|
-
strictJsonSchema:
|
|
3190
|
-
instructions:
|
|
3191
|
-
reasoningSummary:
|
|
3192
|
-
serviceTier:
|
|
3193
|
-
include:
|
|
3194
|
-
textVerbosity:
|
|
3141
|
+
var openaiResponsesProviderOptionsSchema = z13.object({
|
|
3142
|
+
metadata: z13.any().nullish(),
|
|
3143
|
+
parallelToolCalls: z13.boolean().nullish(),
|
|
3144
|
+
previousResponseId: z13.string().nullish(),
|
|
3145
|
+
store: z13.boolean().nullish(),
|
|
3146
|
+
user: z13.string().nullish(),
|
|
3147
|
+
reasoningEffort: z13.string().nullish(),
|
|
3148
|
+
strictJsonSchema: z13.boolean().nullish(),
|
|
3149
|
+
instructions: z13.string().nullish(),
|
|
3150
|
+
reasoningSummary: z13.string().nullish(),
|
|
3151
|
+
serviceTier: z13.enum(["auto", "flex", "priority"]).nullish(),
|
|
3152
|
+
include: z13.array(z13.enum(["reasoning.encrypted_content", "file_search_call.results"])).nullish(),
|
|
3153
|
+
textVerbosity: z13.enum(["low", "medium", "high"]).nullish(),
|
|
3154
|
+
promptCacheKey: z13.string().nullish(),
|
|
3155
|
+
safetyIdentifier: z13.string().nullish()
|
|
3195
3156
|
});
|
|
3196
3157
|
|
|
3197
|
-
// src/openai-speech-model.ts
|
|
3158
|
+
// src/speech/openai-speech-model.ts
|
|
3198
3159
|
import {
|
|
3199
|
-
combineHeaders as
|
|
3160
|
+
combineHeaders as combineHeaders6,
|
|
3200
3161
|
createBinaryResponseHandler,
|
|
3201
|
-
parseProviderOptions as
|
|
3162
|
+
parseProviderOptions as parseProviderOptions6,
|
|
3202
3163
|
postJsonToApi as postJsonToApi6
|
|
3203
3164
|
} from "@ai-sdk/provider-utils";
|
|
3204
|
-
import { z as
|
|
3205
|
-
var OpenAIProviderOptionsSchema =
|
|
3206
|
-
instructions:
|
|
3207
|
-
speed:
|
|
3165
|
+
import { z as z14 } from "zod/v4";
|
|
3166
|
+
var OpenAIProviderOptionsSchema = z14.object({
|
|
3167
|
+
instructions: z14.string().nullish(),
|
|
3168
|
+
speed: z14.number().min(0.25).max(4).default(1).nullish()
|
|
3208
3169
|
});
|
|
3209
3170
|
var OpenAISpeechModel = class {
|
|
3210
3171
|
constructor(modelId, config) {
|
|
@@ -3225,7 +3186,7 @@ var OpenAISpeechModel = class {
|
|
|
3225
3186
|
providerOptions
|
|
3226
3187
|
}) {
|
|
3227
3188
|
const warnings = [];
|
|
3228
|
-
const openAIOptions = await
|
|
3189
|
+
const openAIOptions = await parseProviderOptions6({
|
|
3229
3190
|
provider: "openai",
|
|
3230
3191
|
providerOptions,
|
|
3231
3192
|
schema: OpenAIProviderOptionsSchema
|
|
@@ -3283,7 +3244,7 @@ var OpenAISpeechModel = class {
|
|
|
3283
3244
|
path: "/audio/speech",
|
|
3284
3245
|
modelId: this.modelId
|
|
3285
3246
|
}),
|
|
3286
|
-
headers:
|
|
3247
|
+
headers: combineHeaders6(this.config.headers(), options.headers),
|
|
3287
3248
|
body: requestBody,
|
|
3288
3249
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
3289
3250
|
successfulResponseHandler: createBinaryResponseHandler(),
|
|
@@ -3306,6 +3267,201 @@ var OpenAISpeechModel = class {
|
|
|
3306
3267
|
}
|
|
3307
3268
|
};
|
|
3308
3269
|
|
|
3270
|
+
// src/transcription/openai-transcription-model.ts
|
|
3271
|
+
import {
|
|
3272
|
+
combineHeaders as combineHeaders7,
|
|
3273
|
+
convertBase64ToUint8Array,
|
|
3274
|
+
createJsonResponseHandler as createJsonResponseHandler6,
|
|
3275
|
+
parseProviderOptions as parseProviderOptions7,
|
|
3276
|
+
postFormDataToApi
|
|
3277
|
+
} from "@ai-sdk/provider-utils";
|
|
3278
|
+
import { z as z16 } from "zod/v4";
|
|
3279
|
+
|
|
3280
|
+
// src/transcription/openai-transcription-options.ts
|
|
3281
|
+
import { z as z15 } from "zod/v4";
|
|
3282
|
+
var openAITranscriptionProviderOptions = z15.object({
|
|
3283
|
+
/**
|
|
3284
|
+
* Additional information to include in the transcription response.
|
|
3285
|
+
*/
|
|
3286
|
+
include: z15.array(z15.string()).optional(),
|
|
3287
|
+
/**
|
|
3288
|
+
* The language of the input audio in ISO-639-1 format.
|
|
3289
|
+
*/
|
|
3290
|
+
language: z15.string().optional(),
|
|
3291
|
+
/**
|
|
3292
|
+
* An optional text to guide the model's style or continue a previous audio segment.
|
|
3293
|
+
*/
|
|
3294
|
+
prompt: z15.string().optional(),
|
|
3295
|
+
/**
|
|
3296
|
+
* The sampling temperature, between 0 and 1.
|
|
3297
|
+
* @default 0
|
|
3298
|
+
*/
|
|
3299
|
+
temperature: z15.number().min(0).max(1).default(0).optional(),
|
|
3300
|
+
/**
|
|
3301
|
+
* The timestamp granularities to populate for this transcription.
|
|
3302
|
+
* @default ['segment']
|
|
3303
|
+
*/
|
|
3304
|
+
timestampGranularities: z15.array(z15.enum(["word", "segment"])).default(["segment"]).optional()
|
|
3305
|
+
});
|
|
3306
|
+
|
|
3307
|
+
// src/transcription/openai-transcription-model.ts
|
|
3308
|
+
var languageMap = {
|
|
3309
|
+
afrikaans: "af",
|
|
3310
|
+
arabic: "ar",
|
|
3311
|
+
armenian: "hy",
|
|
3312
|
+
azerbaijani: "az",
|
|
3313
|
+
belarusian: "be",
|
|
3314
|
+
bosnian: "bs",
|
|
3315
|
+
bulgarian: "bg",
|
|
3316
|
+
catalan: "ca",
|
|
3317
|
+
chinese: "zh",
|
|
3318
|
+
croatian: "hr",
|
|
3319
|
+
czech: "cs",
|
|
3320
|
+
danish: "da",
|
|
3321
|
+
dutch: "nl",
|
|
3322
|
+
english: "en",
|
|
3323
|
+
estonian: "et",
|
|
3324
|
+
finnish: "fi",
|
|
3325
|
+
french: "fr",
|
|
3326
|
+
galician: "gl",
|
|
3327
|
+
german: "de",
|
|
3328
|
+
greek: "el",
|
|
3329
|
+
hebrew: "he",
|
|
3330
|
+
hindi: "hi",
|
|
3331
|
+
hungarian: "hu",
|
|
3332
|
+
icelandic: "is",
|
|
3333
|
+
indonesian: "id",
|
|
3334
|
+
italian: "it",
|
|
3335
|
+
japanese: "ja",
|
|
3336
|
+
kannada: "kn",
|
|
3337
|
+
kazakh: "kk",
|
|
3338
|
+
korean: "ko",
|
|
3339
|
+
latvian: "lv",
|
|
3340
|
+
lithuanian: "lt",
|
|
3341
|
+
macedonian: "mk",
|
|
3342
|
+
malay: "ms",
|
|
3343
|
+
marathi: "mr",
|
|
3344
|
+
maori: "mi",
|
|
3345
|
+
nepali: "ne",
|
|
3346
|
+
norwegian: "no",
|
|
3347
|
+
persian: "fa",
|
|
3348
|
+
polish: "pl",
|
|
3349
|
+
portuguese: "pt",
|
|
3350
|
+
romanian: "ro",
|
|
3351
|
+
russian: "ru",
|
|
3352
|
+
serbian: "sr",
|
|
3353
|
+
slovak: "sk",
|
|
3354
|
+
slovenian: "sl",
|
|
3355
|
+
spanish: "es",
|
|
3356
|
+
swahili: "sw",
|
|
3357
|
+
swedish: "sv",
|
|
3358
|
+
tagalog: "tl",
|
|
3359
|
+
tamil: "ta",
|
|
3360
|
+
thai: "th",
|
|
3361
|
+
turkish: "tr",
|
|
3362
|
+
ukrainian: "uk",
|
|
3363
|
+
urdu: "ur",
|
|
3364
|
+
vietnamese: "vi",
|
|
3365
|
+
welsh: "cy"
|
|
3366
|
+
};
|
|
3367
|
+
var OpenAITranscriptionModel = class {
|
|
3368
|
+
constructor(modelId, config) {
|
|
3369
|
+
this.modelId = modelId;
|
|
3370
|
+
this.config = config;
|
|
3371
|
+
this.specificationVersion = "v2";
|
|
3372
|
+
}
|
|
3373
|
+
get provider() {
|
|
3374
|
+
return this.config.provider;
|
|
3375
|
+
}
|
|
3376
|
+
async getArgs({
|
|
3377
|
+
audio,
|
|
3378
|
+
mediaType,
|
|
3379
|
+
providerOptions
|
|
3380
|
+
}) {
|
|
3381
|
+
const warnings = [];
|
|
3382
|
+
const openAIOptions = await parseProviderOptions7({
|
|
3383
|
+
provider: "openai",
|
|
3384
|
+
providerOptions,
|
|
3385
|
+
schema: openAITranscriptionProviderOptions
|
|
3386
|
+
});
|
|
3387
|
+
const formData = new FormData();
|
|
3388
|
+
const blob = audio instanceof Uint8Array ? new Blob([audio]) : new Blob([convertBase64ToUint8Array(audio)]);
|
|
3389
|
+
formData.append("model", this.modelId);
|
|
3390
|
+
formData.append("file", new File([blob], "audio", { type: mediaType }));
|
|
3391
|
+
if (openAIOptions) {
|
|
3392
|
+
const transcriptionModelOptions = {
|
|
3393
|
+
include: openAIOptions.include,
|
|
3394
|
+
language: openAIOptions.language,
|
|
3395
|
+
prompt: openAIOptions.prompt,
|
|
3396
|
+
temperature: openAIOptions.temperature,
|
|
3397
|
+
timestamp_granularities: openAIOptions.timestampGranularities
|
|
3398
|
+
};
|
|
3399
|
+
for (const [key, value] of Object.entries(transcriptionModelOptions)) {
|
|
3400
|
+
if (value != null) {
|
|
3401
|
+
formData.append(key, String(value));
|
|
3402
|
+
}
|
|
3403
|
+
}
|
|
3404
|
+
}
|
|
3405
|
+
return {
|
|
3406
|
+
formData,
|
|
3407
|
+
warnings
|
|
3408
|
+
};
|
|
3409
|
+
}
|
|
3410
|
+
async doGenerate(options) {
|
|
3411
|
+
var _a, _b, _c, _d, _e, _f;
|
|
3412
|
+
const currentDate = (_c = (_b = (_a = this.config._internal) == null ? void 0 : _a.currentDate) == null ? void 0 : _b.call(_a)) != null ? _c : /* @__PURE__ */ new Date();
|
|
3413
|
+
const { formData, warnings } = await this.getArgs(options);
|
|
3414
|
+
const {
|
|
3415
|
+
value: response,
|
|
3416
|
+
responseHeaders,
|
|
3417
|
+
rawValue: rawResponse
|
|
3418
|
+
} = await postFormDataToApi({
|
|
3419
|
+
url: this.config.url({
|
|
3420
|
+
path: "/audio/transcriptions",
|
|
3421
|
+
modelId: this.modelId
|
|
3422
|
+
}),
|
|
3423
|
+
headers: combineHeaders7(this.config.headers(), options.headers),
|
|
3424
|
+
formData,
|
|
3425
|
+
failedResponseHandler: openaiFailedResponseHandler,
|
|
3426
|
+
successfulResponseHandler: createJsonResponseHandler6(
|
|
3427
|
+
openaiTranscriptionResponseSchema
|
|
3428
|
+
),
|
|
3429
|
+
abortSignal: options.abortSignal,
|
|
3430
|
+
fetch: this.config.fetch
|
|
3431
|
+
});
|
|
3432
|
+
const language = response.language != null && response.language in languageMap ? languageMap[response.language] : void 0;
|
|
3433
|
+
return {
|
|
3434
|
+
text: response.text,
|
|
3435
|
+
segments: (_e = (_d = response.words) == null ? void 0 : _d.map((word) => ({
|
|
3436
|
+
text: word.word,
|
|
3437
|
+
startSecond: word.start,
|
|
3438
|
+
endSecond: word.end
|
|
3439
|
+
}))) != null ? _e : [],
|
|
3440
|
+
language,
|
|
3441
|
+
durationInSeconds: (_f = response.duration) != null ? _f : void 0,
|
|
3442
|
+
warnings,
|
|
3443
|
+
response: {
|
|
3444
|
+
timestamp: currentDate,
|
|
3445
|
+
modelId: this.modelId,
|
|
3446
|
+
headers: responseHeaders,
|
|
3447
|
+
body: rawResponse
|
|
3448
|
+
}
|
|
3449
|
+
};
|
|
3450
|
+
}
|
|
3451
|
+
};
|
|
3452
|
+
var openaiTranscriptionResponseSchema = z16.object({
|
|
3453
|
+
text: z16.string(),
|
|
3454
|
+
language: z16.string().nullish(),
|
|
3455
|
+
duration: z16.number().nullish(),
|
|
3456
|
+
words: z16.array(
|
|
3457
|
+
z16.object({
|
|
3458
|
+
word: z16.string(),
|
|
3459
|
+
start: z16.number(),
|
|
3460
|
+
end: z16.number()
|
|
3461
|
+
})
|
|
3462
|
+
).nullish()
|
|
3463
|
+
});
|
|
3464
|
+
|
|
3309
3465
|
// src/openai-provider.ts
|
|
3310
3466
|
function createOpenAI(options = {}) {
|
|
3311
3467
|
var _a, _b;
|
|
@@ -3370,7 +3526,8 @@ function createOpenAI(options = {}) {
|
|
|
3370
3526
|
provider: `${providerName}.responses`,
|
|
3371
3527
|
url: ({ path }) => `${baseURL}${path}`,
|
|
3372
3528
|
headers: getHeaders,
|
|
3373
|
-
fetch: options.fetch
|
|
3529
|
+
fetch: options.fetch,
|
|
3530
|
+
fileIdPrefixes: ["file-"]
|
|
3374
3531
|
});
|
|
3375
3532
|
};
|
|
3376
3533
|
const provider = function(modelId) {
|