@ai-sdk/openai 2.0.0-canary.12 → 2.0.0-canary.13
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +14 -0
- package/dist/index.d.mts +7 -25
- package/dist/index.d.ts +7 -25
- package/dist/index.js +178 -164
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +178 -164
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.d.mts +25 -43
- package/dist/internal/index.d.ts +25 -43
- package/dist/internal/index.js +174 -158
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +173 -158
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +3 -3
package/dist/internal/index.mjs
CHANGED
|
@@ -248,7 +248,13 @@ var openaiProviderOptions = z.object({
|
|
|
248
248
|
/**
|
|
249
249
|
* Parameters for prediction mode.
|
|
250
250
|
*/
|
|
251
|
-
prediction: z.record(z.any()).optional()
|
|
251
|
+
prediction: z.record(z.any()).optional(),
|
|
252
|
+
/**
|
|
253
|
+
* Whether to use structured outputs.
|
|
254
|
+
*
|
|
255
|
+
* @default true
|
|
256
|
+
*/
|
|
257
|
+
structuredOutputs: z.boolean().optional()
|
|
252
258
|
});
|
|
253
259
|
|
|
254
260
|
// src/openai-error.ts
|
|
@@ -331,10 +337,9 @@ function prepareTools({
|
|
|
331
337
|
|
|
332
338
|
// src/openai-chat-language-model.ts
|
|
333
339
|
var OpenAIChatLanguageModel = class {
|
|
334
|
-
constructor(modelId,
|
|
340
|
+
constructor(modelId, config) {
|
|
335
341
|
this.specificationVersion = "v2";
|
|
336
342
|
this.modelId = modelId;
|
|
337
|
-
this.settings = settings;
|
|
338
343
|
this.config = config;
|
|
339
344
|
}
|
|
340
345
|
get provider() {
|
|
@@ -367,13 +372,14 @@ var OpenAIChatLanguageModel = class {
|
|
|
367
372
|
providerOptions,
|
|
368
373
|
schema: openaiProviderOptions
|
|
369
374
|
})) != null ? _a : {};
|
|
375
|
+
const structuredOutputs = (_b = openaiOptions.structuredOutputs) != null ? _b : true;
|
|
370
376
|
if (topK != null) {
|
|
371
377
|
warnings.push({
|
|
372
378
|
type: "unsupported-setting",
|
|
373
379
|
setting: "topK"
|
|
374
380
|
});
|
|
375
381
|
}
|
|
376
|
-
if ((responseFormat == null ? void 0 : responseFormat.type) === "json" && responseFormat.schema != null && !
|
|
382
|
+
if ((responseFormat == null ? void 0 : responseFormat.type) === "json" && responseFormat.schema != null && !structuredOutputs) {
|
|
377
383
|
warnings.push({
|
|
378
384
|
type: "unsupported-setting",
|
|
379
385
|
setting: "responseFormat",
|
|
@@ -402,12 +408,12 @@ var OpenAIChatLanguageModel = class {
|
|
|
402
408
|
presence_penalty: presencePenalty,
|
|
403
409
|
response_format: (responseFormat == null ? void 0 : responseFormat.type) === "json" ? (
|
|
404
410
|
// TODO convert into provider option
|
|
405
|
-
|
|
411
|
+
structuredOutputs && responseFormat.schema != null ? {
|
|
406
412
|
type: "json_schema",
|
|
407
413
|
json_schema: {
|
|
408
414
|
schema: responseFormat.schema,
|
|
409
415
|
strict: true,
|
|
410
|
-
name: (
|
|
416
|
+
name: (_c = responseFormat.name) != null ? _c : "response",
|
|
411
417
|
description: responseFormat.description
|
|
412
418
|
}
|
|
413
419
|
} : { type: "json_object" }
|
|
@@ -487,7 +493,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
487
493
|
} = prepareTools({
|
|
488
494
|
tools,
|
|
489
495
|
toolChoice,
|
|
490
|
-
structuredOutputs
|
|
496
|
+
structuredOutputs
|
|
491
497
|
});
|
|
492
498
|
return {
|
|
493
499
|
args: {
|
|
@@ -872,13 +878,9 @@ import {
|
|
|
872
878
|
} from "@ai-sdk/provider";
|
|
873
879
|
function convertToOpenAICompletionPrompt({
|
|
874
880
|
prompt,
|
|
875
|
-
inputFormat,
|
|
876
881
|
user = "user",
|
|
877
882
|
assistant = "assistant"
|
|
878
883
|
}) {
|
|
879
|
-
if (inputFormat === "prompt" && prompt.length === 1 && prompt[0].role === "user" && prompt[0].content.length === 1 && prompt[0].content[0].type === "text") {
|
|
880
|
-
return { prompt: prompt[0].content[0].text };
|
|
881
|
-
}
|
|
882
884
|
let text = "";
|
|
883
885
|
if (prompt[0].role === "system") {
|
|
884
886
|
text += `${prompt[0].content}
|
|
@@ -999,7 +1001,6 @@ var OpenAICompletionLanguageModel = class {
|
|
|
999
1001
|
};
|
|
1000
1002
|
}
|
|
1001
1003
|
async getArgs({
|
|
1002
|
-
inputFormat,
|
|
1003
1004
|
prompt,
|
|
1004
1005
|
maxOutputTokens,
|
|
1005
1006
|
temperature,
|
|
@@ -1043,7 +1044,7 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1043
1044
|
details: "JSON response format is not supported."
|
|
1044
1045
|
});
|
|
1045
1046
|
}
|
|
1046
|
-
const { prompt: completionPrompt, stopSequences } = convertToOpenAICompletionPrompt({ prompt
|
|
1047
|
+
const { prompt: completionPrompt, stopSequences } = convertToOpenAICompletionPrompt({ prompt });
|
|
1047
1048
|
const stop = [...stopSequences != null ? stopSequences : [], ...userStopSequences != null ? userStopSequences : []];
|
|
1048
1049
|
return {
|
|
1049
1050
|
args: {
|
|
@@ -1252,23 +1253,16 @@ var openaiEmbeddingProviderOptions = z6.object({
|
|
|
1252
1253
|
|
|
1253
1254
|
// src/openai-embedding-model.ts
|
|
1254
1255
|
var OpenAIEmbeddingModel = class {
|
|
1255
|
-
constructor(modelId,
|
|
1256
|
+
constructor(modelId, config) {
|
|
1256
1257
|
this.specificationVersion = "v2";
|
|
1258
|
+
this.maxEmbeddingsPerCall = 2048;
|
|
1259
|
+
this.supportsParallelCalls = true;
|
|
1257
1260
|
this.modelId = modelId;
|
|
1258
|
-
this.settings = settings;
|
|
1259
1261
|
this.config = config;
|
|
1260
1262
|
}
|
|
1261
1263
|
get provider() {
|
|
1262
1264
|
return this.config.provider;
|
|
1263
1265
|
}
|
|
1264
|
-
get maxEmbeddingsPerCall() {
|
|
1265
|
-
var _a;
|
|
1266
|
-
return (_a = this.settings.maxEmbeddingsPerCall) != null ? _a : 2048;
|
|
1267
|
-
}
|
|
1268
|
-
get supportsParallelCalls() {
|
|
1269
|
-
var _a;
|
|
1270
|
-
return (_a = this.settings.supportsParallelCalls) != null ? _a : true;
|
|
1271
|
-
}
|
|
1272
1266
|
async doEmbed({
|
|
1273
1267
|
values,
|
|
1274
1268
|
headers,
|
|
@@ -1423,14 +1417,36 @@ import {
|
|
|
1423
1417
|
parseProviderOptions as parseProviderOptions4,
|
|
1424
1418
|
postFormDataToApi
|
|
1425
1419
|
} from "@ai-sdk/provider-utils";
|
|
1420
|
+
import { z as z10 } from "zod";
|
|
1421
|
+
|
|
1422
|
+
// src/openai-transcription-options.ts
|
|
1426
1423
|
import { z as z9 } from "zod";
|
|
1427
|
-
var
|
|
1424
|
+
var openAITranscriptionProviderOptions = z9.object({
|
|
1425
|
+
/**
|
|
1426
|
+
* Additional information to include in the transcription response.
|
|
1427
|
+
*/
|
|
1428
1428
|
include: z9.array(z9.string()).nullish(),
|
|
1429
|
+
/**
|
|
1430
|
+
* The language of the input audio in ISO-639-1 format.
|
|
1431
|
+
*/
|
|
1429
1432
|
language: z9.string().nullish(),
|
|
1433
|
+
/**
|
|
1434
|
+
* An optional text to guide the model's style or continue a previous audio segment.
|
|
1435
|
+
*/
|
|
1430
1436
|
prompt: z9.string().nullish(),
|
|
1431
|
-
|
|
1432
|
-
|
|
1437
|
+
/**
|
|
1438
|
+
* The sampling temperature, between 0 and 1.
|
|
1439
|
+
* @default 0
|
|
1440
|
+
*/
|
|
1441
|
+
temperature: z9.number().min(0).max(1).default(0).nullish(),
|
|
1442
|
+
/**
|
|
1443
|
+
* The timestamp granularities to populate for this transcription.
|
|
1444
|
+
* @default ['segment']
|
|
1445
|
+
*/
|
|
1446
|
+
timestampGranularities: z9.array(z9.enum(["word", "segment"])).default(["segment"]).nullish()
|
|
1433
1447
|
});
|
|
1448
|
+
|
|
1449
|
+
// src/openai-transcription-model.ts
|
|
1434
1450
|
var languageMap = {
|
|
1435
1451
|
afrikaans: "af",
|
|
1436
1452
|
arabic: "ar",
|
|
@@ -1504,12 +1520,11 @@ var OpenAITranscriptionModel = class {
|
|
|
1504
1520
|
mediaType,
|
|
1505
1521
|
providerOptions
|
|
1506
1522
|
}) {
|
|
1507
|
-
var _a, _b, _c, _d, _e;
|
|
1508
1523
|
const warnings = [];
|
|
1509
1524
|
const openAIOptions = await parseProviderOptions4({
|
|
1510
1525
|
provider: "openai",
|
|
1511
1526
|
providerOptions,
|
|
1512
|
-
schema:
|
|
1527
|
+
schema: openAITranscriptionProviderOptions
|
|
1513
1528
|
});
|
|
1514
1529
|
const formData = new FormData();
|
|
1515
1530
|
const blob = audio instanceof Uint8Array ? new Blob([audio]) : new Blob([convertBase64ToUint8Array(audio)]);
|
|
@@ -1517,15 +1532,14 @@ var OpenAITranscriptionModel = class {
|
|
|
1517
1532
|
formData.append("file", new File([blob], "audio", { type: mediaType }));
|
|
1518
1533
|
if (openAIOptions) {
|
|
1519
1534
|
const transcriptionModelOptions = {
|
|
1520
|
-
include:
|
|
1521
|
-
language:
|
|
1522
|
-
prompt:
|
|
1523
|
-
temperature:
|
|
1524
|
-
timestamp_granularities:
|
|
1535
|
+
include: openAIOptions.include,
|
|
1536
|
+
language: openAIOptions.language,
|
|
1537
|
+
prompt: openAIOptions.prompt,
|
|
1538
|
+
temperature: openAIOptions.temperature,
|
|
1539
|
+
timestamp_granularities: openAIOptions.timestampGranularities
|
|
1525
1540
|
};
|
|
1526
|
-
for (const key
|
|
1527
|
-
|
|
1528
|
-
if (value !== void 0) {
|
|
1541
|
+
for (const [key, value] of Object.entries(transcriptionModelOptions)) {
|
|
1542
|
+
if (value != null) {
|
|
1529
1543
|
formData.append(key, String(value));
|
|
1530
1544
|
}
|
|
1531
1545
|
}
|
|
@@ -1577,15 +1591,15 @@ var OpenAITranscriptionModel = class {
|
|
|
1577
1591
|
};
|
|
1578
1592
|
}
|
|
1579
1593
|
};
|
|
1580
|
-
var openaiTranscriptionResponseSchema =
|
|
1581
|
-
text:
|
|
1582
|
-
language:
|
|
1583
|
-
duration:
|
|
1584
|
-
words:
|
|
1585
|
-
|
|
1586
|
-
word:
|
|
1587
|
-
start:
|
|
1588
|
-
end:
|
|
1594
|
+
var openaiTranscriptionResponseSchema = z10.object({
|
|
1595
|
+
text: z10.string(),
|
|
1596
|
+
language: z10.string().nullish(),
|
|
1597
|
+
duration: z10.number().nullish(),
|
|
1598
|
+
words: z10.array(
|
|
1599
|
+
z10.object({
|
|
1600
|
+
word: z10.string(),
|
|
1601
|
+
start: z10.number(),
|
|
1602
|
+
end: z10.number()
|
|
1589
1603
|
})
|
|
1590
1604
|
).nullish()
|
|
1591
1605
|
});
|
|
@@ -1597,10 +1611,10 @@ import {
|
|
|
1597
1611
|
parseProviderOptions as parseProviderOptions5,
|
|
1598
1612
|
postJsonToApi as postJsonToApi5
|
|
1599
1613
|
} from "@ai-sdk/provider-utils";
|
|
1600
|
-
import { z as
|
|
1601
|
-
var OpenAIProviderOptionsSchema =
|
|
1602
|
-
instructions:
|
|
1603
|
-
speed:
|
|
1614
|
+
import { z as z11 } from "zod";
|
|
1615
|
+
var OpenAIProviderOptionsSchema = z11.object({
|
|
1616
|
+
instructions: z11.string().nullish(),
|
|
1617
|
+
speed: z11.number().min(0.25).max(4).default(1).nullish()
|
|
1604
1618
|
});
|
|
1605
1619
|
var OpenAISpeechModel = class {
|
|
1606
1620
|
constructor(modelId, config) {
|
|
@@ -1703,7 +1717,7 @@ import {
|
|
|
1703
1717
|
parseProviderOptions as parseProviderOptions6,
|
|
1704
1718
|
postJsonToApi as postJsonToApi6
|
|
1705
1719
|
} from "@ai-sdk/provider-utils";
|
|
1706
|
-
import { z as
|
|
1720
|
+
import { z as z12 } from "zod";
|
|
1707
1721
|
|
|
1708
1722
|
// src/responses/convert-to-openai-responses-messages.ts
|
|
1709
1723
|
import {
|
|
@@ -2068,55 +2082,55 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2068
2082
|
body,
|
|
2069
2083
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
2070
2084
|
successfulResponseHandler: createJsonResponseHandler6(
|
|
2071
|
-
|
|
2072
|
-
id:
|
|
2073
|
-
created_at:
|
|
2074
|
-
model:
|
|
2075
|
-
output:
|
|
2076
|
-
|
|
2077
|
-
|
|
2078
|
-
type:
|
|
2079
|
-
role:
|
|
2080
|
-
content:
|
|
2081
|
-
|
|
2082
|
-
type:
|
|
2083
|
-
text:
|
|
2084
|
-
annotations:
|
|
2085
|
-
|
|
2086
|
-
type:
|
|
2087
|
-
start_index:
|
|
2088
|
-
end_index:
|
|
2089
|
-
url:
|
|
2090
|
-
title:
|
|
2085
|
+
z12.object({
|
|
2086
|
+
id: z12.string(),
|
|
2087
|
+
created_at: z12.number(),
|
|
2088
|
+
model: z12.string(),
|
|
2089
|
+
output: z12.array(
|
|
2090
|
+
z12.discriminatedUnion("type", [
|
|
2091
|
+
z12.object({
|
|
2092
|
+
type: z12.literal("message"),
|
|
2093
|
+
role: z12.literal("assistant"),
|
|
2094
|
+
content: z12.array(
|
|
2095
|
+
z12.object({
|
|
2096
|
+
type: z12.literal("output_text"),
|
|
2097
|
+
text: z12.string(),
|
|
2098
|
+
annotations: z12.array(
|
|
2099
|
+
z12.object({
|
|
2100
|
+
type: z12.literal("url_citation"),
|
|
2101
|
+
start_index: z12.number(),
|
|
2102
|
+
end_index: z12.number(),
|
|
2103
|
+
url: z12.string(),
|
|
2104
|
+
title: z12.string()
|
|
2091
2105
|
})
|
|
2092
2106
|
)
|
|
2093
2107
|
})
|
|
2094
2108
|
)
|
|
2095
2109
|
}),
|
|
2096
|
-
|
|
2097
|
-
type:
|
|
2098
|
-
call_id:
|
|
2099
|
-
name:
|
|
2100
|
-
arguments:
|
|
2110
|
+
z12.object({
|
|
2111
|
+
type: z12.literal("function_call"),
|
|
2112
|
+
call_id: z12.string(),
|
|
2113
|
+
name: z12.string(),
|
|
2114
|
+
arguments: z12.string()
|
|
2101
2115
|
}),
|
|
2102
|
-
|
|
2103
|
-
type:
|
|
2116
|
+
z12.object({
|
|
2117
|
+
type: z12.literal("web_search_call")
|
|
2104
2118
|
}),
|
|
2105
|
-
|
|
2106
|
-
type:
|
|
2119
|
+
z12.object({
|
|
2120
|
+
type: z12.literal("computer_call")
|
|
2107
2121
|
}),
|
|
2108
|
-
|
|
2109
|
-
type:
|
|
2110
|
-
summary:
|
|
2111
|
-
|
|
2112
|
-
type:
|
|
2113
|
-
text:
|
|
2122
|
+
z12.object({
|
|
2123
|
+
type: z12.literal("reasoning"),
|
|
2124
|
+
summary: z12.array(
|
|
2125
|
+
z12.object({
|
|
2126
|
+
type: z12.literal("summary_text"),
|
|
2127
|
+
text: z12.string()
|
|
2114
2128
|
})
|
|
2115
2129
|
)
|
|
2116
2130
|
})
|
|
2117
2131
|
])
|
|
2118
2132
|
),
|
|
2119
|
-
incomplete_details:
|
|
2133
|
+
incomplete_details: z12.object({ reason: z12.string() }).nullable(),
|
|
2120
2134
|
usage: usageSchema
|
|
2121
2135
|
})
|
|
2122
2136
|
),
|
|
@@ -2330,86 +2344,86 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2330
2344
|
};
|
|
2331
2345
|
}
|
|
2332
2346
|
};
|
|
2333
|
-
var usageSchema =
|
|
2334
|
-
input_tokens:
|
|
2335
|
-
input_tokens_details:
|
|
2336
|
-
output_tokens:
|
|
2337
|
-
output_tokens_details:
|
|
2347
|
+
var usageSchema = z12.object({
|
|
2348
|
+
input_tokens: z12.number(),
|
|
2349
|
+
input_tokens_details: z12.object({ cached_tokens: z12.number().nullish() }).nullish(),
|
|
2350
|
+
output_tokens: z12.number(),
|
|
2351
|
+
output_tokens_details: z12.object({ reasoning_tokens: z12.number().nullish() }).nullish()
|
|
2338
2352
|
});
|
|
2339
|
-
var textDeltaChunkSchema =
|
|
2340
|
-
type:
|
|
2341
|
-
delta:
|
|
2353
|
+
var textDeltaChunkSchema = z12.object({
|
|
2354
|
+
type: z12.literal("response.output_text.delta"),
|
|
2355
|
+
delta: z12.string()
|
|
2342
2356
|
});
|
|
2343
|
-
var responseFinishedChunkSchema =
|
|
2344
|
-
type:
|
|
2345
|
-
response:
|
|
2346
|
-
incomplete_details:
|
|
2357
|
+
var responseFinishedChunkSchema = z12.object({
|
|
2358
|
+
type: z12.enum(["response.completed", "response.incomplete"]),
|
|
2359
|
+
response: z12.object({
|
|
2360
|
+
incomplete_details: z12.object({ reason: z12.string() }).nullish(),
|
|
2347
2361
|
usage: usageSchema
|
|
2348
2362
|
})
|
|
2349
2363
|
});
|
|
2350
|
-
var responseCreatedChunkSchema =
|
|
2351
|
-
type:
|
|
2352
|
-
response:
|
|
2353
|
-
id:
|
|
2354
|
-
created_at:
|
|
2355
|
-
model:
|
|
2364
|
+
var responseCreatedChunkSchema = z12.object({
|
|
2365
|
+
type: z12.literal("response.created"),
|
|
2366
|
+
response: z12.object({
|
|
2367
|
+
id: z12.string(),
|
|
2368
|
+
created_at: z12.number(),
|
|
2369
|
+
model: z12.string()
|
|
2356
2370
|
})
|
|
2357
2371
|
});
|
|
2358
|
-
var responseOutputItemDoneSchema =
|
|
2359
|
-
type:
|
|
2360
|
-
output_index:
|
|
2361
|
-
item:
|
|
2362
|
-
|
|
2363
|
-
type:
|
|
2372
|
+
var responseOutputItemDoneSchema = z12.object({
|
|
2373
|
+
type: z12.literal("response.output_item.done"),
|
|
2374
|
+
output_index: z12.number(),
|
|
2375
|
+
item: z12.discriminatedUnion("type", [
|
|
2376
|
+
z12.object({
|
|
2377
|
+
type: z12.literal("message")
|
|
2364
2378
|
}),
|
|
2365
|
-
|
|
2366
|
-
type:
|
|
2367
|
-
id:
|
|
2368
|
-
call_id:
|
|
2369
|
-
name:
|
|
2370
|
-
arguments:
|
|
2371
|
-
status:
|
|
2379
|
+
z12.object({
|
|
2380
|
+
type: z12.literal("function_call"),
|
|
2381
|
+
id: z12.string(),
|
|
2382
|
+
call_id: z12.string(),
|
|
2383
|
+
name: z12.string(),
|
|
2384
|
+
arguments: z12.string(),
|
|
2385
|
+
status: z12.literal("completed")
|
|
2372
2386
|
})
|
|
2373
2387
|
])
|
|
2374
2388
|
});
|
|
2375
|
-
var responseFunctionCallArgumentsDeltaSchema =
|
|
2376
|
-
type:
|
|
2377
|
-
item_id:
|
|
2378
|
-
output_index:
|
|
2379
|
-
delta:
|
|
2389
|
+
var responseFunctionCallArgumentsDeltaSchema = z12.object({
|
|
2390
|
+
type: z12.literal("response.function_call_arguments.delta"),
|
|
2391
|
+
item_id: z12.string(),
|
|
2392
|
+
output_index: z12.number(),
|
|
2393
|
+
delta: z12.string()
|
|
2380
2394
|
});
|
|
2381
|
-
var responseOutputItemAddedSchema =
|
|
2382
|
-
type:
|
|
2383
|
-
output_index:
|
|
2384
|
-
item:
|
|
2385
|
-
|
|
2386
|
-
type:
|
|
2395
|
+
var responseOutputItemAddedSchema = z12.object({
|
|
2396
|
+
type: z12.literal("response.output_item.added"),
|
|
2397
|
+
output_index: z12.number(),
|
|
2398
|
+
item: z12.discriminatedUnion("type", [
|
|
2399
|
+
z12.object({
|
|
2400
|
+
type: z12.literal("message")
|
|
2387
2401
|
}),
|
|
2388
|
-
|
|
2389
|
-
type:
|
|
2390
|
-
id:
|
|
2391
|
-
call_id:
|
|
2392
|
-
name:
|
|
2393
|
-
arguments:
|
|
2402
|
+
z12.object({
|
|
2403
|
+
type: z12.literal("function_call"),
|
|
2404
|
+
id: z12.string(),
|
|
2405
|
+
call_id: z12.string(),
|
|
2406
|
+
name: z12.string(),
|
|
2407
|
+
arguments: z12.string()
|
|
2394
2408
|
})
|
|
2395
2409
|
])
|
|
2396
2410
|
});
|
|
2397
|
-
var responseAnnotationAddedSchema =
|
|
2398
|
-
type:
|
|
2399
|
-
annotation:
|
|
2400
|
-
type:
|
|
2401
|
-
url:
|
|
2402
|
-
title:
|
|
2411
|
+
var responseAnnotationAddedSchema = z12.object({
|
|
2412
|
+
type: z12.literal("response.output_text.annotation.added"),
|
|
2413
|
+
annotation: z12.object({
|
|
2414
|
+
type: z12.literal("url_citation"),
|
|
2415
|
+
url: z12.string(),
|
|
2416
|
+
title: z12.string()
|
|
2403
2417
|
})
|
|
2404
2418
|
});
|
|
2405
|
-
var responseReasoningSummaryTextDeltaSchema =
|
|
2406
|
-
type:
|
|
2407
|
-
item_id:
|
|
2408
|
-
output_index:
|
|
2409
|
-
summary_index:
|
|
2410
|
-
delta:
|
|
2419
|
+
var responseReasoningSummaryTextDeltaSchema = z12.object({
|
|
2420
|
+
type: z12.literal("response.reasoning_summary_text.delta"),
|
|
2421
|
+
item_id: z12.string(),
|
|
2422
|
+
output_index: z12.number(),
|
|
2423
|
+
summary_index: z12.number(),
|
|
2424
|
+
delta: z12.string()
|
|
2411
2425
|
});
|
|
2412
|
-
var openaiResponsesChunkSchema =
|
|
2426
|
+
var openaiResponsesChunkSchema = z12.union([
|
|
2413
2427
|
textDeltaChunkSchema,
|
|
2414
2428
|
responseFinishedChunkSchema,
|
|
2415
2429
|
responseCreatedChunkSchema,
|
|
@@ -2418,7 +2432,7 @@ var openaiResponsesChunkSchema = z11.union([
|
|
|
2418
2432
|
responseOutputItemAddedSchema,
|
|
2419
2433
|
responseAnnotationAddedSchema,
|
|
2420
2434
|
responseReasoningSummaryTextDeltaSchema,
|
|
2421
|
-
|
|
2435
|
+
z12.object({ type: z12.string() }).passthrough()
|
|
2422
2436
|
// fallback for unknown chunks
|
|
2423
2437
|
]);
|
|
2424
2438
|
function isTextDeltaChunk(chunk) {
|
|
@@ -2466,16 +2480,16 @@ function getResponsesModelConfig(modelId) {
|
|
|
2466
2480
|
requiredAutoTruncation: false
|
|
2467
2481
|
};
|
|
2468
2482
|
}
|
|
2469
|
-
var openaiResponsesProviderOptionsSchema =
|
|
2470
|
-
metadata:
|
|
2471
|
-
parallelToolCalls:
|
|
2472
|
-
previousResponseId:
|
|
2473
|
-
store:
|
|
2474
|
-
user:
|
|
2475
|
-
reasoningEffort:
|
|
2476
|
-
strictSchemas:
|
|
2477
|
-
instructions:
|
|
2478
|
-
reasoningSummary:
|
|
2483
|
+
var openaiResponsesProviderOptionsSchema = z12.object({
|
|
2484
|
+
metadata: z12.any().nullish(),
|
|
2485
|
+
parallelToolCalls: z12.boolean().nullish(),
|
|
2486
|
+
previousResponseId: z12.string().nullish(),
|
|
2487
|
+
store: z12.boolean().nullish(),
|
|
2488
|
+
user: z12.string().nullish(),
|
|
2489
|
+
reasoningEffort: z12.string().nullish(),
|
|
2490
|
+
strictSchemas: z12.boolean().nullish(),
|
|
2491
|
+
instructions: z12.string().nullish(),
|
|
2492
|
+
reasoningSummary: z12.string().nullish()
|
|
2479
2493
|
});
|
|
2480
2494
|
export {
|
|
2481
2495
|
OpenAIChatLanguageModel,
|
|
@@ -2487,6 +2501,7 @@ export {
|
|
|
2487
2501
|
OpenAITranscriptionModel,
|
|
2488
2502
|
hasDefaultResponseFormat,
|
|
2489
2503
|
modelMaxImagesPerCall,
|
|
2504
|
+
openAITranscriptionProviderOptions,
|
|
2490
2505
|
openaiCompletionProviderOptions,
|
|
2491
2506
|
openaiEmbeddingProviderOptions,
|
|
2492
2507
|
openaiProviderOptions
|