@ai-sdk/openai 2.0.0-canary.12 → 2.0.0-canary.14
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +24 -0
- package/dist/index.d.mts +8 -26
- package/dist/index.d.ts +8 -26
- package/dist/index.js +200 -180
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +200 -180
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.d.mts +31 -47
- package/dist/internal/index.d.ts +31 -47
- package/dist/internal/index.js +196 -174
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +195 -174
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +3 -3
package/dist/internal/index.mjs
CHANGED
|
@@ -248,7 +248,13 @@ var openaiProviderOptions = z.object({
|
|
|
248
248
|
/**
|
|
249
249
|
* Parameters for prediction mode.
|
|
250
250
|
*/
|
|
251
|
-
prediction: z.record(z.any()).optional()
|
|
251
|
+
prediction: z.record(z.any()).optional(),
|
|
252
|
+
/**
|
|
253
|
+
* Whether to use structured outputs.
|
|
254
|
+
*
|
|
255
|
+
* @default true
|
|
256
|
+
*/
|
|
257
|
+
structuredOutputs: z.boolean().optional()
|
|
252
258
|
});
|
|
253
259
|
|
|
254
260
|
// src/openai-error.ts
|
|
@@ -331,20 +337,17 @@ function prepareTools({
|
|
|
331
337
|
|
|
332
338
|
// src/openai-chat-language-model.ts
|
|
333
339
|
var OpenAIChatLanguageModel = class {
|
|
334
|
-
constructor(modelId,
|
|
340
|
+
constructor(modelId, config) {
|
|
335
341
|
this.specificationVersion = "v2";
|
|
342
|
+
this.supportedUrls = {
|
|
343
|
+
"image/*": [/^https?:\/\/.*$/]
|
|
344
|
+
};
|
|
336
345
|
this.modelId = modelId;
|
|
337
|
-
this.settings = settings;
|
|
338
346
|
this.config = config;
|
|
339
347
|
}
|
|
340
348
|
get provider() {
|
|
341
349
|
return this.config.provider;
|
|
342
350
|
}
|
|
343
|
-
async getSupportedUrls() {
|
|
344
|
-
return {
|
|
345
|
-
"image/*": [/^https?:\/\/.*$/]
|
|
346
|
-
};
|
|
347
|
-
}
|
|
348
351
|
async getArgs({
|
|
349
352
|
prompt,
|
|
350
353
|
maxOutputTokens,
|
|
@@ -367,13 +370,14 @@ var OpenAIChatLanguageModel = class {
|
|
|
367
370
|
providerOptions,
|
|
368
371
|
schema: openaiProviderOptions
|
|
369
372
|
})) != null ? _a : {};
|
|
373
|
+
const structuredOutputs = (_b = openaiOptions.structuredOutputs) != null ? _b : true;
|
|
370
374
|
if (topK != null) {
|
|
371
375
|
warnings.push({
|
|
372
376
|
type: "unsupported-setting",
|
|
373
377
|
setting: "topK"
|
|
374
378
|
});
|
|
375
379
|
}
|
|
376
|
-
if ((responseFormat == null ? void 0 : responseFormat.type) === "json" && responseFormat.schema != null && !
|
|
380
|
+
if ((responseFormat == null ? void 0 : responseFormat.type) === "json" && responseFormat.schema != null && !structuredOutputs) {
|
|
377
381
|
warnings.push({
|
|
378
382
|
type: "unsupported-setting",
|
|
379
383
|
setting: "responseFormat",
|
|
@@ -402,12 +406,12 @@ var OpenAIChatLanguageModel = class {
|
|
|
402
406
|
presence_penalty: presencePenalty,
|
|
403
407
|
response_format: (responseFormat == null ? void 0 : responseFormat.type) === "json" ? (
|
|
404
408
|
// TODO convert into provider option
|
|
405
|
-
|
|
409
|
+
structuredOutputs && responseFormat.schema != null ? {
|
|
406
410
|
type: "json_schema",
|
|
407
411
|
json_schema: {
|
|
408
412
|
schema: responseFormat.schema,
|
|
409
413
|
strict: true,
|
|
410
|
-
name: (
|
|
414
|
+
name: (_c = responseFormat.name) != null ? _c : "response",
|
|
411
415
|
description: responseFormat.description
|
|
412
416
|
}
|
|
413
417
|
} : { type: "json_object" }
|
|
@@ -487,7 +491,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
487
491
|
} = prepareTools({
|
|
488
492
|
tools,
|
|
489
493
|
toolChoice,
|
|
490
|
-
structuredOutputs
|
|
494
|
+
structuredOutputs
|
|
491
495
|
});
|
|
492
496
|
return {
|
|
493
497
|
args: {
|
|
@@ -847,11 +851,23 @@ var reasoningModels = {
|
|
|
847
851
|
"o1-preview-2024-09-12": {
|
|
848
852
|
systemMessageMode: "remove"
|
|
849
853
|
},
|
|
854
|
+
o3: {
|
|
855
|
+
systemMessageMode: "developer"
|
|
856
|
+
},
|
|
857
|
+
"o3-2025-04-16": {
|
|
858
|
+
systemMessageMode: "developer"
|
|
859
|
+
},
|
|
850
860
|
"o3-mini": {
|
|
851
861
|
systemMessageMode: "developer"
|
|
852
862
|
},
|
|
853
863
|
"o3-mini-2025-01-31": {
|
|
854
864
|
systemMessageMode: "developer"
|
|
865
|
+
},
|
|
866
|
+
"o4-mini": {
|
|
867
|
+
systemMessageMode: "developer"
|
|
868
|
+
},
|
|
869
|
+
"o4-mini-2025-04-16": {
|
|
870
|
+
systemMessageMode: "developer"
|
|
855
871
|
}
|
|
856
872
|
};
|
|
857
873
|
|
|
@@ -872,13 +888,9 @@ import {
|
|
|
872
888
|
} from "@ai-sdk/provider";
|
|
873
889
|
function convertToOpenAICompletionPrompt({
|
|
874
890
|
prompt,
|
|
875
|
-
inputFormat,
|
|
876
891
|
user = "user",
|
|
877
892
|
assistant = "assistant"
|
|
878
893
|
}) {
|
|
879
|
-
if (inputFormat === "prompt" && prompt.length === 1 && prompt[0].role === "user" && prompt[0].content.length === 1 && prompt[0].content[0].type === "text") {
|
|
880
|
-
return { prompt: prompt[0].content[0].text };
|
|
881
|
-
}
|
|
882
894
|
let text = "";
|
|
883
895
|
if (prompt[0].role === "system") {
|
|
884
896
|
text += `${prompt[0].content}
|
|
@@ -984,6 +996,9 @@ var openaiCompletionProviderOptions = z4.object({
|
|
|
984
996
|
var OpenAICompletionLanguageModel = class {
|
|
985
997
|
constructor(modelId, config) {
|
|
986
998
|
this.specificationVersion = "v2";
|
|
999
|
+
this.supportedUrls = {
|
|
1000
|
+
// No URLs are supported for completion models.
|
|
1001
|
+
};
|
|
987
1002
|
this.modelId = modelId;
|
|
988
1003
|
this.config = config;
|
|
989
1004
|
}
|
|
@@ -993,13 +1008,7 @@ var OpenAICompletionLanguageModel = class {
|
|
|
993
1008
|
get provider() {
|
|
994
1009
|
return this.config.provider;
|
|
995
1010
|
}
|
|
996
|
-
async getSupportedUrls() {
|
|
997
|
-
return {
|
|
998
|
-
// no supported urls for completion models
|
|
999
|
-
};
|
|
1000
|
-
}
|
|
1001
1011
|
async getArgs({
|
|
1002
|
-
inputFormat,
|
|
1003
1012
|
prompt,
|
|
1004
1013
|
maxOutputTokens,
|
|
1005
1014
|
temperature,
|
|
@@ -1043,7 +1052,7 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1043
1052
|
details: "JSON response format is not supported."
|
|
1044
1053
|
});
|
|
1045
1054
|
}
|
|
1046
|
-
const { prompt: completionPrompt, stopSequences } = convertToOpenAICompletionPrompt({ prompt
|
|
1055
|
+
const { prompt: completionPrompt, stopSequences } = convertToOpenAICompletionPrompt({ prompt });
|
|
1047
1056
|
const stop = [...stopSequences != null ? stopSequences : [], ...userStopSequences != null ? userStopSequences : []];
|
|
1048
1057
|
return {
|
|
1049
1058
|
args: {
|
|
@@ -1252,23 +1261,16 @@ var openaiEmbeddingProviderOptions = z6.object({
|
|
|
1252
1261
|
|
|
1253
1262
|
// src/openai-embedding-model.ts
|
|
1254
1263
|
var OpenAIEmbeddingModel = class {
|
|
1255
|
-
constructor(modelId,
|
|
1264
|
+
constructor(modelId, config) {
|
|
1256
1265
|
this.specificationVersion = "v2";
|
|
1266
|
+
this.maxEmbeddingsPerCall = 2048;
|
|
1267
|
+
this.supportsParallelCalls = true;
|
|
1257
1268
|
this.modelId = modelId;
|
|
1258
|
-
this.settings = settings;
|
|
1259
1269
|
this.config = config;
|
|
1260
1270
|
}
|
|
1261
1271
|
get provider() {
|
|
1262
1272
|
return this.config.provider;
|
|
1263
1273
|
}
|
|
1264
|
-
get maxEmbeddingsPerCall() {
|
|
1265
|
-
var _a;
|
|
1266
|
-
return (_a = this.settings.maxEmbeddingsPerCall) != null ? _a : 2048;
|
|
1267
|
-
}
|
|
1268
|
-
get supportsParallelCalls() {
|
|
1269
|
-
var _a;
|
|
1270
|
-
return (_a = this.settings.supportsParallelCalls) != null ? _a : true;
|
|
1271
|
-
}
|
|
1272
1274
|
async doEmbed({
|
|
1273
1275
|
values,
|
|
1274
1276
|
headers,
|
|
@@ -1347,7 +1349,7 @@ var OpenAIImageModel = class {
|
|
|
1347
1349
|
this.modelId = modelId;
|
|
1348
1350
|
this.settings = settings;
|
|
1349
1351
|
this.config = config;
|
|
1350
|
-
this.specificationVersion = "
|
|
1352
|
+
this.specificationVersion = "v2";
|
|
1351
1353
|
}
|
|
1352
1354
|
get maxImagesPerCall() {
|
|
1353
1355
|
var _a, _b;
|
|
@@ -1423,14 +1425,36 @@ import {
|
|
|
1423
1425
|
parseProviderOptions as parseProviderOptions4,
|
|
1424
1426
|
postFormDataToApi
|
|
1425
1427
|
} from "@ai-sdk/provider-utils";
|
|
1428
|
+
import { z as z10 } from "zod";
|
|
1429
|
+
|
|
1430
|
+
// src/openai-transcription-options.ts
|
|
1426
1431
|
import { z as z9 } from "zod";
|
|
1427
|
-
var
|
|
1432
|
+
var openAITranscriptionProviderOptions = z9.object({
|
|
1433
|
+
/**
|
|
1434
|
+
* Additional information to include in the transcription response.
|
|
1435
|
+
*/
|
|
1428
1436
|
include: z9.array(z9.string()).nullish(),
|
|
1437
|
+
/**
|
|
1438
|
+
* The language of the input audio in ISO-639-1 format.
|
|
1439
|
+
*/
|
|
1429
1440
|
language: z9.string().nullish(),
|
|
1441
|
+
/**
|
|
1442
|
+
* An optional text to guide the model's style or continue a previous audio segment.
|
|
1443
|
+
*/
|
|
1430
1444
|
prompt: z9.string().nullish(),
|
|
1431
|
-
|
|
1432
|
-
|
|
1445
|
+
/**
|
|
1446
|
+
* The sampling temperature, between 0 and 1.
|
|
1447
|
+
* @default 0
|
|
1448
|
+
*/
|
|
1449
|
+
temperature: z9.number().min(0).max(1).default(0).nullish(),
|
|
1450
|
+
/**
|
|
1451
|
+
* The timestamp granularities to populate for this transcription.
|
|
1452
|
+
* @default ['segment']
|
|
1453
|
+
*/
|
|
1454
|
+
timestampGranularities: z9.array(z9.enum(["word", "segment"])).default(["segment"]).nullish()
|
|
1433
1455
|
});
|
|
1456
|
+
|
|
1457
|
+
// src/openai-transcription-model.ts
|
|
1434
1458
|
var languageMap = {
|
|
1435
1459
|
afrikaans: "af",
|
|
1436
1460
|
arabic: "ar",
|
|
@@ -1504,12 +1528,11 @@ var OpenAITranscriptionModel = class {
|
|
|
1504
1528
|
mediaType,
|
|
1505
1529
|
providerOptions
|
|
1506
1530
|
}) {
|
|
1507
|
-
var _a, _b, _c, _d, _e;
|
|
1508
1531
|
const warnings = [];
|
|
1509
1532
|
const openAIOptions = await parseProviderOptions4({
|
|
1510
1533
|
provider: "openai",
|
|
1511
1534
|
providerOptions,
|
|
1512
|
-
schema:
|
|
1535
|
+
schema: openAITranscriptionProviderOptions
|
|
1513
1536
|
});
|
|
1514
1537
|
const formData = new FormData();
|
|
1515
1538
|
const blob = audio instanceof Uint8Array ? new Blob([audio]) : new Blob([convertBase64ToUint8Array(audio)]);
|
|
@@ -1517,15 +1540,14 @@ var OpenAITranscriptionModel = class {
|
|
|
1517
1540
|
formData.append("file", new File([blob], "audio", { type: mediaType }));
|
|
1518
1541
|
if (openAIOptions) {
|
|
1519
1542
|
const transcriptionModelOptions = {
|
|
1520
|
-
include:
|
|
1521
|
-
language:
|
|
1522
|
-
prompt:
|
|
1523
|
-
temperature:
|
|
1524
|
-
timestamp_granularities:
|
|
1543
|
+
include: openAIOptions.include,
|
|
1544
|
+
language: openAIOptions.language,
|
|
1545
|
+
prompt: openAIOptions.prompt,
|
|
1546
|
+
temperature: openAIOptions.temperature,
|
|
1547
|
+
timestamp_granularities: openAIOptions.timestampGranularities
|
|
1525
1548
|
};
|
|
1526
|
-
for (const key
|
|
1527
|
-
|
|
1528
|
-
if (value !== void 0) {
|
|
1549
|
+
for (const [key, value] of Object.entries(transcriptionModelOptions)) {
|
|
1550
|
+
if (value != null) {
|
|
1529
1551
|
formData.append(key, String(value));
|
|
1530
1552
|
}
|
|
1531
1553
|
}
|
|
@@ -1577,15 +1599,15 @@ var OpenAITranscriptionModel = class {
|
|
|
1577
1599
|
};
|
|
1578
1600
|
}
|
|
1579
1601
|
};
|
|
1580
|
-
var openaiTranscriptionResponseSchema =
|
|
1581
|
-
text:
|
|
1582
|
-
language:
|
|
1583
|
-
duration:
|
|
1584
|
-
words:
|
|
1585
|
-
|
|
1586
|
-
word:
|
|
1587
|
-
start:
|
|
1588
|
-
end:
|
|
1602
|
+
var openaiTranscriptionResponseSchema = z10.object({
|
|
1603
|
+
text: z10.string(),
|
|
1604
|
+
language: z10.string().nullish(),
|
|
1605
|
+
duration: z10.number().nullish(),
|
|
1606
|
+
words: z10.array(
|
|
1607
|
+
z10.object({
|
|
1608
|
+
word: z10.string(),
|
|
1609
|
+
start: z10.number(),
|
|
1610
|
+
end: z10.number()
|
|
1589
1611
|
})
|
|
1590
1612
|
).nullish()
|
|
1591
1613
|
});
|
|
@@ -1597,10 +1619,10 @@ import {
|
|
|
1597
1619
|
parseProviderOptions as parseProviderOptions5,
|
|
1598
1620
|
postJsonToApi as postJsonToApi5
|
|
1599
1621
|
} from "@ai-sdk/provider-utils";
|
|
1600
|
-
import { z as
|
|
1601
|
-
var OpenAIProviderOptionsSchema =
|
|
1602
|
-
instructions:
|
|
1603
|
-
speed:
|
|
1622
|
+
import { z as z11 } from "zod";
|
|
1623
|
+
var OpenAIProviderOptionsSchema = z11.object({
|
|
1624
|
+
instructions: z11.string().nullish(),
|
|
1625
|
+
speed: z11.number().min(0.25).max(4).default(1).nullish()
|
|
1604
1626
|
});
|
|
1605
1627
|
var OpenAISpeechModel = class {
|
|
1606
1628
|
constructor(modelId, config) {
|
|
@@ -1703,7 +1725,7 @@ import {
|
|
|
1703
1725
|
parseProviderOptions as parseProviderOptions6,
|
|
1704
1726
|
postJsonToApi as postJsonToApi6
|
|
1705
1727
|
} from "@ai-sdk/provider-utils";
|
|
1706
|
-
import { z as
|
|
1728
|
+
import { z as z12 } from "zod";
|
|
1707
1729
|
|
|
1708
1730
|
// src/responses/convert-to-openai-responses-messages.ts
|
|
1709
1731
|
import {
|
|
@@ -1916,13 +1938,11 @@ function prepareResponsesTools({
|
|
|
1916
1938
|
var OpenAIResponsesLanguageModel = class {
|
|
1917
1939
|
constructor(modelId, config) {
|
|
1918
1940
|
this.specificationVersion = "v2";
|
|
1919
|
-
this.
|
|
1920
|
-
this.config = config;
|
|
1921
|
-
}
|
|
1922
|
-
async getSupportedUrls() {
|
|
1923
|
-
return {
|
|
1941
|
+
this.supportedUrls = {
|
|
1924
1942
|
"image/*": [/^https?:\/\/.*$/]
|
|
1925
1943
|
};
|
|
1944
|
+
this.modelId = modelId;
|
|
1945
|
+
this.config = config;
|
|
1926
1946
|
}
|
|
1927
1947
|
get provider() {
|
|
1928
1948
|
return this.config.provider;
|
|
@@ -2068,55 +2088,55 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2068
2088
|
body,
|
|
2069
2089
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
2070
2090
|
successfulResponseHandler: createJsonResponseHandler6(
|
|
2071
|
-
|
|
2072
|
-
id:
|
|
2073
|
-
created_at:
|
|
2074
|
-
model:
|
|
2075
|
-
output:
|
|
2076
|
-
|
|
2077
|
-
|
|
2078
|
-
type:
|
|
2079
|
-
role:
|
|
2080
|
-
content:
|
|
2081
|
-
|
|
2082
|
-
type:
|
|
2083
|
-
text:
|
|
2084
|
-
annotations:
|
|
2085
|
-
|
|
2086
|
-
type:
|
|
2087
|
-
start_index:
|
|
2088
|
-
end_index:
|
|
2089
|
-
url:
|
|
2090
|
-
title:
|
|
2091
|
+
z12.object({
|
|
2092
|
+
id: z12.string(),
|
|
2093
|
+
created_at: z12.number(),
|
|
2094
|
+
model: z12.string(),
|
|
2095
|
+
output: z12.array(
|
|
2096
|
+
z12.discriminatedUnion("type", [
|
|
2097
|
+
z12.object({
|
|
2098
|
+
type: z12.literal("message"),
|
|
2099
|
+
role: z12.literal("assistant"),
|
|
2100
|
+
content: z12.array(
|
|
2101
|
+
z12.object({
|
|
2102
|
+
type: z12.literal("output_text"),
|
|
2103
|
+
text: z12.string(),
|
|
2104
|
+
annotations: z12.array(
|
|
2105
|
+
z12.object({
|
|
2106
|
+
type: z12.literal("url_citation"),
|
|
2107
|
+
start_index: z12.number(),
|
|
2108
|
+
end_index: z12.number(),
|
|
2109
|
+
url: z12.string(),
|
|
2110
|
+
title: z12.string()
|
|
2091
2111
|
})
|
|
2092
2112
|
)
|
|
2093
2113
|
})
|
|
2094
2114
|
)
|
|
2095
2115
|
}),
|
|
2096
|
-
|
|
2097
|
-
type:
|
|
2098
|
-
call_id:
|
|
2099
|
-
name:
|
|
2100
|
-
arguments:
|
|
2116
|
+
z12.object({
|
|
2117
|
+
type: z12.literal("function_call"),
|
|
2118
|
+
call_id: z12.string(),
|
|
2119
|
+
name: z12.string(),
|
|
2120
|
+
arguments: z12.string()
|
|
2101
2121
|
}),
|
|
2102
|
-
|
|
2103
|
-
type:
|
|
2122
|
+
z12.object({
|
|
2123
|
+
type: z12.literal("web_search_call")
|
|
2104
2124
|
}),
|
|
2105
|
-
|
|
2106
|
-
type:
|
|
2125
|
+
z12.object({
|
|
2126
|
+
type: z12.literal("computer_call")
|
|
2107
2127
|
}),
|
|
2108
|
-
|
|
2109
|
-
type:
|
|
2110
|
-
summary:
|
|
2111
|
-
|
|
2112
|
-
type:
|
|
2113
|
-
text:
|
|
2128
|
+
z12.object({
|
|
2129
|
+
type: z12.literal("reasoning"),
|
|
2130
|
+
summary: z12.array(
|
|
2131
|
+
z12.object({
|
|
2132
|
+
type: z12.literal("summary_text"),
|
|
2133
|
+
text: z12.string()
|
|
2114
2134
|
})
|
|
2115
2135
|
)
|
|
2116
2136
|
})
|
|
2117
2137
|
])
|
|
2118
2138
|
),
|
|
2119
|
-
incomplete_details:
|
|
2139
|
+
incomplete_details: z12.object({ reason: z12.string() }).nullable(),
|
|
2120
2140
|
usage: usageSchema
|
|
2121
2141
|
})
|
|
2122
2142
|
),
|
|
@@ -2330,86 +2350,86 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2330
2350
|
};
|
|
2331
2351
|
}
|
|
2332
2352
|
};
|
|
2333
|
-
var usageSchema =
|
|
2334
|
-
input_tokens:
|
|
2335
|
-
input_tokens_details:
|
|
2336
|
-
output_tokens:
|
|
2337
|
-
output_tokens_details:
|
|
2353
|
+
var usageSchema = z12.object({
|
|
2354
|
+
input_tokens: z12.number(),
|
|
2355
|
+
input_tokens_details: z12.object({ cached_tokens: z12.number().nullish() }).nullish(),
|
|
2356
|
+
output_tokens: z12.number(),
|
|
2357
|
+
output_tokens_details: z12.object({ reasoning_tokens: z12.number().nullish() }).nullish()
|
|
2338
2358
|
});
|
|
2339
|
-
var textDeltaChunkSchema =
|
|
2340
|
-
type:
|
|
2341
|
-
delta:
|
|
2359
|
+
var textDeltaChunkSchema = z12.object({
|
|
2360
|
+
type: z12.literal("response.output_text.delta"),
|
|
2361
|
+
delta: z12.string()
|
|
2342
2362
|
});
|
|
2343
|
-
var responseFinishedChunkSchema =
|
|
2344
|
-
type:
|
|
2345
|
-
response:
|
|
2346
|
-
incomplete_details:
|
|
2363
|
+
var responseFinishedChunkSchema = z12.object({
|
|
2364
|
+
type: z12.enum(["response.completed", "response.incomplete"]),
|
|
2365
|
+
response: z12.object({
|
|
2366
|
+
incomplete_details: z12.object({ reason: z12.string() }).nullish(),
|
|
2347
2367
|
usage: usageSchema
|
|
2348
2368
|
})
|
|
2349
2369
|
});
|
|
2350
|
-
var responseCreatedChunkSchema =
|
|
2351
|
-
type:
|
|
2352
|
-
response:
|
|
2353
|
-
id:
|
|
2354
|
-
created_at:
|
|
2355
|
-
model:
|
|
2370
|
+
var responseCreatedChunkSchema = z12.object({
|
|
2371
|
+
type: z12.literal("response.created"),
|
|
2372
|
+
response: z12.object({
|
|
2373
|
+
id: z12.string(),
|
|
2374
|
+
created_at: z12.number(),
|
|
2375
|
+
model: z12.string()
|
|
2356
2376
|
})
|
|
2357
2377
|
});
|
|
2358
|
-
var responseOutputItemDoneSchema =
|
|
2359
|
-
type:
|
|
2360
|
-
output_index:
|
|
2361
|
-
item:
|
|
2362
|
-
|
|
2363
|
-
type:
|
|
2378
|
+
var responseOutputItemDoneSchema = z12.object({
|
|
2379
|
+
type: z12.literal("response.output_item.done"),
|
|
2380
|
+
output_index: z12.number(),
|
|
2381
|
+
item: z12.discriminatedUnion("type", [
|
|
2382
|
+
z12.object({
|
|
2383
|
+
type: z12.literal("message")
|
|
2364
2384
|
}),
|
|
2365
|
-
|
|
2366
|
-
type:
|
|
2367
|
-
id:
|
|
2368
|
-
call_id:
|
|
2369
|
-
name:
|
|
2370
|
-
arguments:
|
|
2371
|
-
status:
|
|
2385
|
+
z12.object({
|
|
2386
|
+
type: z12.literal("function_call"),
|
|
2387
|
+
id: z12.string(),
|
|
2388
|
+
call_id: z12.string(),
|
|
2389
|
+
name: z12.string(),
|
|
2390
|
+
arguments: z12.string(),
|
|
2391
|
+
status: z12.literal("completed")
|
|
2372
2392
|
})
|
|
2373
2393
|
])
|
|
2374
2394
|
});
|
|
2375
|
-
var responseFunctionCallArgumentsDeltaSchema =
|
|
2376
|
-
type:
|
|
2377
|
-
item_id:
|
|
2378
|
-
output_index:
|
|
2379
|
-
delta:
|
|
2395
|
+
var responseFunctionCallArgumentsDeltaSchema = z12.object({
|
|
2396
|
+
type: z12.literal("response.function_call_arguments.delta"),
|
|
2397
|
+
item_id: z12.string(),
|
|
2398
|
+
output_index: z12.number(),
|
|
2399
|
+
delta: z12.string()
|
|
2380
2400
|
});
|
|
2381
|
-
var responseOutputItemAddedSchema =
|
|
2382
|
-
type:
|
|
2383
|
-
output_index:
|
|
2384
|
-
item:
|
|
2385
|
-
|
|
2386
|
-
type:
|
|
2401
|
+
var responseOutputItemAddedSchema = z12.object({
|
|
2402
|
+
type: z12.literal("response.output_item.added"),
|
|
2403
|
+
output_index: z12.number(),
|
|
2404
|
+
item: z12.discriminatedUnion("type", [
|
|
2405
|
+
z12.object({
|
|
2406
|
+
type: z12.literal("message")
|
|
2387
2407
|
}),
|
|
2388
|
-
|
|
2389
|
-
type:
|
|
2390
|
-
id:
|
|
2391
|
-
call_id:
|
|
2392
|
-
name:
|
|
2393
|
-
arguments:
|
|
2408
|
+
z12.object({
|
|
2409
|
+
type: z12.literal("function_call"),
|
|
2410
|
+
id: z12.string(),
|
|
2411
|
+
call_id: z12.string(),
|
|
2412
|
+
name: z12.string(),
|
|
2413
|
+
arguments: z12.string()
|
|
2394
2414
|
})
|
|
2395
2415
|
])
|
|
2396
2416
|
});
|
|
2397
|
-
var responseAnnotationAddedSchema =
|
|
2398
|
-
type:
|
|
2399
|
-
annotation:
|
|
2400
|
-
type:
|
|
2401
|
-
url:
|
|
2402
|
-
title:
|
|
2417
|
+
var responseAnnotationAddedSchema = z12.object({
|
|
2418
|
+
type: z12.literal("response.output_text.annotation.added"),
|
|
2419
|
+
annotation: z12.object({
|
|
2420
|
+
type: z12.literal("url_citation"),
|
|
2421
|
+
url: z12.string(),
|
|
2422
|
+
title: z12.string()
|
|
2403
2423
|
})
|
|
2404
2424
|
});
|
|
2405
|
-
var responseReasoningSummaryTextDeltaSchema =
|
|
2406
|
-
type:
|
|
2407
|
-
item_id:
|
|
2408
|
-
output_index:
|
|
2409
|
-
summary_index:
|
|
2410
|
-
delta:
|
|
2425
|
+
var responseReasoningSummaryTextDeltaSchema = z12.object({
|
|
2426
|
+
type: z12.literal("response.reasoning_summary_text.delta"),
|
|
2427
|
+
item_id: z12.string(),
|
|
2428
|
+
output_index: z12.number(),
|
|
2429
|
+
summary_index: z12.number(),
|
|
2430
|
+
delta: z12.string()
|
|
2411
2431
|
});
|
|
2412
|
-
var openaiResponsesChunkSchema =
|
|
2432
|
+
var openaiResponsesChunkSchema = z12.union([
|
|
2413
2433
|
textDeltaChunkSchema,
|
|
2414
2434
|
responseFinishedChunkSchema,
|
|
2415
2435
|
responseCreatedChunkSchema,
|
|
@@ -2418,7 +2438,7 @@ var openaiResponsesChunkSchema = z11.union([
|
|
|
2418
2438
|
responseOutputItemAddedSchema,
|
|
2419
2439
|
responseAnnotationAddedSchema,
|
|
2420
2440
|
responseReasoningSummaryTextDeltaSchema,
|
|
2421
|
-
|
|
2441
|
+
z12.object({ type: z12.string() }).passthrough()
|
|
2422
2442
|
// fallback for unknown chunks
|
|
2423
2443
|
]);
|
|
2424
2444
|
function isTextDeltaChunk(chunk) {
|
|
@@ -2466,16 +2486,16 @@ function getResponsesModelConfig(modelId) {
|
|
|
2466
2486
|
requiredAutoTruncation: false
|
|
2467
2487
|
};
|
|
2468
2488
|
}
|
|
2469
|
-
var openaiResponsesProviderOptionsSchema =
|
|
2470
|
-
metadata:
|
|
2471
|
-
parallelToolCalls:
|
|
2472
|
-
previousResponseId:
|
|
2473
|
-
store:
|
|
2474
|
-
user:
|
|
2475
|
-
reasoningEffort:
|
|
2476
|
-
strictSchemas:
|
|
2477
|
-
instructions:
|
|
2478
|
-
reasoningSummary:
|
|
2489
|
+
var openaiResponsesProviderOptionsSchema = z12.object({
|
|
2490
|
+
metadata: z12.any().nullish(),
|
|
2491
|
+
parallelToolCalls: z12.boolean().nullish(),
|
|
2492
|
+
previousResponseId: z12.string().nullish(),
|
|
2493
|
+
store: z12.boolean().nullish(),
|
|
2494
|
+
user: z12.string().nullish(),
|
|
2495
|
+
reasoningEffort: z12.string().nullish(),
|
|
2496
|
+
strictSchemas: z12.boolean().nullish(),
|
|
2497
|
+
instructions: z12.string().nullish(),
|
|
2498
|
+
reasoningSummary: z12.string().nullish()
|
|
2479
2499
|
});
|
|
2480
2500
|
export {
|
|
2481
2501
|
OpenAIChatLanguageModel,
|
|
@@ -2487,6 +2507,7 @@ export {
|
|
|
2487
2507
|
OpenAITranscriptionModel,
|
|
2488
2508
|
hasDefaultResponseFormat,
|
|
2489
2509
|
modelMaxImagesPerCall,
|
|
2510
|
+
openAITranscriptionProviderOptions,
|
|
2490
2511
|
openaiCompletionProviderOptions,
|
|
2491
2512
|
openaiEmbeddingProviderOptions,
|
|
2492
2513
|
openaiProviderOptions
|