@ai-sdk/openai 2.0.0-beta.3 → 2.0.0-beta.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +7 -0
- package/dist/index.d.mts +5 -1
- package/dist/index.d.ts +5 -1
- package/dist/index.js +289 -190
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +277 -178
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.d.mts +5 -1
- package/dist/internal/index.d.ts +5 -1
- package/dist/internal/index.js +277 -178
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +271 -172
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +3 -3
package/dist/index.js
CHANGED
|
@@ -26,7 +26,7 @@ __export(src_exports, {
|
|
|
26
26
|
module.exports = __toCommonJS(src_exports);
|
|
27
27
|
|
|
28
28
|
// src/openai-provider.ts
|
|
29
|
-
var
|
|
29
|
+
var import_provider_utils13 = require("@ai-sdk/provider-utils");
|
|
30
30
|
|
|
31
31
|
// src/openai-chat-language-model.ts
|
|
32
32
|
var import_provider3 = require("@ai-sdk/provider");
|
|
@@ -1867,15 +1867,18 @@ var openaiTranscriptionResponseSchema = import_v412.z.object({
|
|
|
1867
1867
|
});
|
|
1868
1868
|
|
|
1869
1869
|
// src/responses/openai-responses-language-model.ts
|
|
1870
|
-
var
|
|
1871
|
-
var
|
|
1870
|
+
var import_provider_utils11 = require("@ai-sdk/provider-utils");
|
|
1871
|
+
var import_v414 = require("zod/v4");
|
|
1872
1872
|
|
|
1873
1873
|
// src/responses/convert-to-openai-responses-messages.ts
|
|
1874
1874
|
var import_provider6 = require("@ai-sdk/provider");
|
|
1875
|
-
|
|
1875
|
+
var import_provider_utils10 = require("@ai-sdk/provider-utils");
|
|
1876
|
+
var import_v413 = require("zod/v4");
|
|
1877
|
+
async function convertToOpenAIResponsesMessages({
|
|
1876
1878
|
prompt,
|
|
1877
1879
|
systemMessageMode
|
|
1878
1880
|
}) {
|
|
1881
|
+
var _a, _b;
|
|
1879
1882
|
const messages = [];
|
|
1880
1883
|
const warnings = [];
|
|
1881
1884
|
for (const { role, content } of prompt) {
|
|
@@ -1910,7 +1913,7 @@ function convertToOpenAIResponsesMessages({
|
|
|
1910
1913
|
messages.push({
|
|
1911
1914
|
role: "user",
|
|
1912
1915
|
content: content.map((part, index) => {
|
|
1913
|
-
var
|
|
1916
|
+
var _a2, _b2, _c;
|
|
1914
1917
|
switch (part.type) {
|
|
1915
1918
|
case "text": {
|
|
1916
1919
|
return { type: "input_text", text: part.text };
|
|
@@ -1922,7 +1925,7 @@ function convertToOpenAIResponsesMessages({
|
|
|
1922
1925
|
type: "input_image",
|
|
1923
1926
|
image_url: part.data instanceof URL ? part.data.toString() : `data:${mediaType};base64,${part.data}`,
|
|
1924
1927
|
// OpenAI specific extension: image detail
|
|
1925
|
-
detail: (
|
|
1928
|
+
detail: (_b2 = (_a2 = part.providerOptions) == null ? void 0 : _a2.openai) == null ? void 0 : _b2.imageDetail
|
|
1926
1929
|
};
|
|
1927
1930
|
} else if (part.mediaType === "application/pdf") {
|
|
1928
1931
|
if (part.data instanceof URL) {
|
|
@@ -1947,6 +1950,7 @@ function convertToOpenAIResponsesMessages({
|
|
|
1947
1950
|
break;
|
|
1948
1951
|
}
|
|
1949
1952
|
case "assistant": {
|
|
1953
|
+
const reasoningMessages = {};
|
|
1950
1954
|
for (const part of content) {
|
|
1951
1955
|
switch (part.type) {
|
|
1952
1956
|
case "text": {
|
|
@@ -1975,6 +1979,43 @@ function convertToOpenAIResponsesMessages({
|
|
|
1975
1979
|
});
|
|
1976
1980
|
break;
|
|
1977
1981
|
}
|
|
1982
|
+
case "reasoning": {
|
|
1983
|
+
const providerOptions = await (0, import_provider_utils10.parseProviderOptions)({
|
|
1984
|
+
provider: "openai",
|
|
1985
|
+
providerOptions: part.providerOptions,
|
|
1986
|
+
schema: openaiResponsesReasoningProviderOptionsSchema
|
|
1987
|
+
});
|
|
1988
|
+
const reasoningId = (_a = providerOptions == null ? void 0 : providerOptions.reasoning) == null ? void 0 : _a.id;
|
|
1989
|
+
if (reasoningId != null) {
|
|
1990
|
+
const existingReasoningMessage = reasoningMessages[reasoningId];
|
|
1991
|
+
const summaryParts = [];
|
|
1992
|
+
if (part.text.length > 0) {
|
|
1993
|
+
summaryParts.push({ type: "summary_text", text: part.text });
|
|
1994
|
+
} else {
|
|
1995
|
+
warnings.push({
|
|
1996
|
+
type: "other",
|
|
1997
|
+
message: `Cannot append empty reasoning part to existing reasoning sequence. Skipping reasoning part: ${JSON.stringify(part)}.`
|
|
1998
|
+
});
|
|
1999
|
+
}
|
|
2000
|
+
if (existingReasoningMessage === void 0) {
|
|
2001
|
+
reasoningMessages[reasoningId] = {
|
|
2002
|
+
type: "reasoning",
|
|
2003
|
+
id: reasoningId,
|
|
2004
|
+
encrypted_content: (_b = providerOptions == null ? void 0 : providerOptions.reasoning) == null ? void 0 : _b.encryptedContent,
|
|
2005
|
+
summary: summaryParts
|
|
2006
|
+
};
|
|
2007
|
+
messages.push(reasoningMessages[reasoningId]);
|
|
2008
|
+
} else {
|
|
2009
|
+
existingReasoningMessage.summary.push(...summaryParts);
|
|
2010
|
+
}
|
|
2011
|
+
} else {
|
|
2012
|
+
warnings.push({
|
|
2013
|
+
type: "other",
|
|
2014
|
+
message: `Non-OpenAI reasoning parts are not supported. Skipping reasoning part: ${JSON.stringify(part)}.`
|
|
2015
|
+
});
|
|
2016
|
+
}
|
|
2017
|
+
break;
|
|
2018
|
+
}
|
|
1978
2019
|
}
|
|
1979
2020
|
}
|
|
1980
2021
|
break;
|
|
@@ -2010,6 +2051,12 @@ function convertToOpenAIResponsesMessages({
|
|
|
2010
2051
|
}
|
|
2011
2052
|
return { messages, warnings };
|
|
2012
2053
|
}
|
|
2054
|
+
var openaiResponsesReasoningProviderOptionsSchema = import_v413.z.object({
|
|
2055
|
+
reasoning: import_v413.z.object({
|
|
2056
|
+
id: import_v413.z.string().nullish(),
|
|
2057
|
+
encryptedContent: import_v413.z.string().nullish()
|
|
2058
|
+
}).nullish()
|
|
2059
|
+
});
|
|
2013
2060
|
|
|
2014
2061
|
// src/responses/map-openai-responses-finish-reason.ts
|
|
2015
2062
|
function mapOpenAIResponseFinishReason({
|
|
@@ -2148,12 +2195,12 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2148
2195
|
if (stopSequences != null) {
|
|
2149
2196
|
warnings.push({ type: "unsupported-setting", setting: "stopSequences" });
|
|
2150
2197
|
}
|
|
2151
|
-
const { messages, warnings: messageWarnings } = convertToOpenAIResponsesMessages({
|
|
2198
|
+
const { messages, warnings: messageWarnings } = await convertToOpenAIResponsesMessages({
|
|
2152
2199
|
prompt,
|
|
2153
2200
|
systemMessageMode: modelConfig.systemMessageMode
|
|
2154
2201
|
});
|
|
2155
2202
|
warnings.push(...messageWarnings);
|
|
2156
|
-
const openaiOptions = await (0,
|
|
2203
|
+
const openaiOptions = await (0, import_provider_utils11.parseProviderOptions)({
|
|
2157
2204
|
provider: "openai",
|
|
2158
2205
|
providerOptions,
|
|
2159
2206
|
schema: openaiResponsesProviderOptionsSchema
|
|
@@ -2184,6 +2231,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2184
2231
|
user: openaiOptions == null ? void 0 : openaiOptions.user,
|
|
2185
2232
|
instructions: openaiOptions == null ? void 0 : openaiOptions.instructions,
|
|
2186
2233
|
service_tier: openaiOptions == null ? void 0 : openaiOptions.serviceTier,
|
|
2234
|
+
include: openaiOptions == null ? void 0 : openaiOptions.include,
|
|
2187
2235
|
// model-specific settings:
|
|
2188
2236
|
...modelConfig.isReasoningModel && ((openaiOptions == null ? void 0 : openaiOptions.reasoningEffort) != null || (openaiOptions == null ? void 0 : openaiOptions.reasoningSummary) != null) && {
|
|
2189
2237
|
reasoning: {
|
|
@@ -2216,6 +2264,21 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2216
2264
|
details: "topP is not supported for reasoning models"
|
|
2217
2265
|
});
|
|
2218
2266
|
}
|
|
2267
|
+
} else {
|
|
2268
|
+
if ((openaiOptions == null ? void 0 : openaiOptions.reasoningEffort) != null) {
|
|
2269
|
+
warnings.push({
|
|
2270
|
+
type: "unsupported-setting",
|
|
2271
|
+
setting: "reasoningEffort",
|
|
2272
|
+
details: "reasoningEffort is not supported for non-reasoning models"
|
|
2273
|
+
});
|
|
2274
|
+
}
|
|
2275
|
+
if ((openaiOptions == null ? void 0 : openaiOptions.reasoningSummary) != null) {
|
|
2276
|
+
warnings.push({
|
|
2277
|
+
type: "unsupported-setting",
|
|
2278
|
+
setting: "reasoningSummary",
|
|
2279
|
+
details: "reasoningSummary is not supported for non-reasoning models"
|
|
2280
|
+
});
|
|
2281
|
+
}
|
|
2219
2282
|
}
|
|
2220
2283
|
if ((openaiOptions == null ? void 0 : openaiOptions.serviceTier) === "flex" && !supportsFlexProcessing2(this.modelId)) {
|
|
2221
2284
|
warnings.push({
|
|
@@ -2244,74 +2307,76 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2244
2307
|
};
|
|
2245
2308
|
}
|
|
2246
2309
|
async doGenerate(options) {
|
|
2247
|
-
var _a, _b, _c, _d, _e, _f, _g, _h;
|
|
2310
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _i;
|
|
2248
2311
|
const { args: body, warnings } = await this.getArgs(options);
|
|
2249
2312
|
const {
|
|
2250
2313
|
responseHeaders,
|
|
2251
2314
|
value: response,
|
|
2252
2315
|
rawValue: rawResponse
|
|
2253
|
-
} = await (0,
|
|
2316
|
+
} = await (0, import_provider_utils11.postJsonToApi)({
|
|
2254
2317
|
url: this.config.url({
|
|
2255
2318
|
path: "/responses",
|
|
2256
2319
|
modelId: this.modelId
|
|
2257
2320
|
}),
|
|
2258
|
-
headers: (0,
|
|
2321
|
+
headers: (0, import_provider_utils11.combineHeaders)(this.config.headers(), options.headers),
|
|
2259
2322
|
body,
|
|
2260
2323
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
2261
|
-
successfulResponseHandler: (0,
|
|
2262
|
-
|
|
2263
|
-
id:
|
|
2264
|
-
created_at:
|
|
2265
|
-
model:
|
|
2266
|
-
output:
|
|
2267
|
-
|
|
2268
|
-
|
|
2269
|
-
type:
|
|
2270
|
-
role:
|
|
2271
|
-
content:
|
|
2272
|
-
|
|
2273
|
-
type:
|
|
2274
|
-
text:
|
|
2275
|
-
annotations:
|
|
2276
|
-
|
|
2277
|
-
type:
|
|
2278
|
-
start_index:
|
|
2279
|
-
end_index:
|
|
2280
|
-
url:
|
|
2281
|
-
title:
|
|
2324
|
+
successfulResponseHandler: (0, import_provider_utils11.createJsonResponseHandler)(
|
|
2325
|
+
import_v414.z.object({
|
|
2326
|
+
id: import_v414.z.string(),
|
|
2327
|
+
created_at: import_v414.z.number(),
|
|
2328
|
+
model: import_v414.z.string(),
|
|
2329
|
+
output: import_v414.z.array(
|
|
2330
|
+
import_v414.z.discriminatedUnion("type", [
|
|
2331
|
+
import_v414.z.object({
|
|
2332
|
+
type: import_v414.z.literal("message"),
|
|
2333
|
+
role: import_v414.z.literal("assistant"),
|
|
2334
|
+
content: import_v414.z.array(
|
|
2335
|
+
import_v414.z.object({
|
|
2336
|
+
type: import_v414.z.literal("output_text"),
|
|
2337
|
+
text: import_v414.z.string(),
|
|
2338
|
+
annotations: import_v414.z.array(
|
|
2339
|
+
import_v414.z.object({
|
|
2340
|
+
type: import_v414.z.literal("url_citation"),
|
|
2341
|
+
start_index: import_v414.z.number(),
|
|
2342
|
+
end_index: import_v414.z.number(),
|
|
2343
|
+
url: import_v414.z.string(),
|
|
2344
|
+
title: import_v414.z.string()
|
|
2282
2345
|
})
|
|
2283
2346
|
)
|
|
2284
2347
|
})
|
|
2285
2348
|
)
|
|
2286
2349
|
}),
|
|
2287
|
-
|
|
2288
|
-
type:
|
|
2289
|
-
call_id:
|
|
2290
|
-
name:
|
|
2291
|
-
arguments:
|
|
2350
|
+
import_v414.z.object({
|
|
2351
|
+
type: import_v414.z.literal("function_call"),
|
|
2352
|
+
call_id: import_v414.z.string(),
|
|
2353
|
+
name: import_v414.z.string(),
|
|
2354
|
+
arguments: import_v414.z.string()
|
|
2292
2355
|
}),
|
|
2293
|
-
|
|
2294
|
-
type:
|
|
2295
|
-
id:
|
|
2296
|
-
status:
|
|
2356
|
+
import_v414.z.object({
|
|
2357
|
+
type: import_v414.z.literal("web_search_call"),
|
|
2358
|
+
id: import_v414.z.string(),
|
|
2359
|
+
status: import_v414.z.string().optional()
|
|
2297
2360
|
}),
|
|
2298
|
-
|
|
2299
|
-
type:
|
|
2300
|
-
id:
|
|
2301
|
-
status:
|
|
2361
|
+
import_v414.z.object({
|
|
2362
|
+
type: import_v414.z.literal("computer_call"),
|
|
2363
|
+
id: import_v414.z.string(),
|
|
2364
|
+
status: import_v414.z.string().optional()
|
|
2302
2365
|
}),
|
|
2303
|
-
|
|
2304
|
-
type:
|
|
2305
|
-
|
|
2306
|
-
|
|
2307
|
-
|
|
2308
|
-
|
|
2366
|
+
import_v414.z.object({
|
|
2367
|
+
type: import_v414.z.literal("reasoning"),
|
|
2368
|
+
id: import_v414.z.string(),
|
|
2369
|
+
encrypted_content: import_v414.z.string().nullish(),
|
|
2370
|
+
summary: import_v414.z.array(
|
|
2371
|
+
import_v414.z.object({
|
|
2372
|
+
type: import_v414.z.literal("summary_text"),
|
|
2373
|
+
text: import_v414.z.string()
|
|
2309
2374
|
})
|
|
2310
2375
|
)
|
|
2311
2376
|
})
|
|
2312
2377
|
])
|
|
2313
2378
|
),
|
|
2314
|
-
incomplete_details:
|
|
2379
|
+
incomplete_details: import_v414.z.object({ reason: import_v414.z.string() }).nullable(),
|
|
2315
2380
|
usage: usageSchema2
|
|
2316
2381
|
})
|
|
2317
2382
|
),
|
|
@@ -2322,10 +2387,23 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2322
2387
|
for (const part of response.output) {
|
|
2323
2388
|
switch (part.type) {
|
|
2324
2389
|
case "reasoning": {
|
|
2325
|
-
|
|
2326
|
-
type: "
|
|
2327
|
-
|
|
2328
|
-
|
|
2390
|
+
if (part.summary.length === 0) {
|
|
2391
|
+
part.summary.push({ type: "summary_text", text: "" });
|
|
2392
|
+
}
|
|
2393
|
+
for (const summary of part.summary) {
|
|
2394
|
+
content.push({
|
|
2395
|
+
type: "reasoning",
|
|
2396
|
+
text: summary.text,
|
|
2397
|
+
providerMetadata: {
|
|
2398
|
+
openai: {
|
|
2399
|
+
reasoning: {
|
|
2400
|
+
id: part.id,
|
|
2401
|
+
encryptedContent: (_a = part.encrypted_content) != null ? _a : null
|
|
2402
|
+
}
|
|
2403
|
+
}
|
|
2404
|
+
}
|
|
2405
|
+
});
|
|
2406
|
+
}
|
|
2329
2407
|
break;
|
|
2330
2408
|
}
|
|
2331
2409
|
case "message": {
|
|
@@ -2338,7 +2416,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2338
2416
|
content.push({
|
|
2339
2417
|
type: "source",
|
|
2340
2418
|
sourceType: "url",
|
|
2341
|
-
id: (
|
|
2419
|
+
id: (_d = (_c = (_b = this.config).generateId) == null ? void 0 : _c.call(_b)) != null ? _d : (0, import_provider_utils11.generateId)(),
|
|
2342
2420
|
url: annotation.url,
|
|
2343
2421
|
title: annotation.title
|
|
2344
2422
|
});
|
|
@@ -2397,15 +2475,15 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2397
2475
|
return {
|
|
2398
2476
|
content,
|
|
2399
2477
|
finishReason: mapOpenAIResponseFinishReason({
|
|
2400
|
-
finishReason: (
|
|
2478
|
+
finishReason: (_e = response.incomplete_details) == null ? void 0 : _e.reason,
|
|
2401
2479
|
hasToolCalls: content.some((part) => part.type === "tool-call")
|
|
2402
2480
|
}),
|
|
2403
2481
|
usage: {
|
|
2404
2482
|
inputTokens: response.usage.input_tokens,
|
|
2405
2483
|
outputTokens: response.usage.output_tokens,
|
|
2406
2484
|
totalTokens: response.usage.input_tokens + response.usage.output_tokens,
|
|
2407
|
-
reasoningTokens: (
|
|
2408
|
-
cachedInputTokens: (
|
|
2485
|
+
reasoningTokens: (_g = (_f = response.usage.output_tokens_details) == null ? void 0 : _f.reasoning_tokens) != null ? _g : void 0,
|
|
2486
|
+
cachedInputTokens: (_i = (_h = response.usage.input_tokens_details) == null ? void 0 : _h.cached_tokens) != null ? _i : void 0
|
|
2409
2487
|
},
|
|
2410
2488
|
request: { body },
|
|
2411
2489
|
response: {
|
|
@@ -2425,18 +2503,18 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2425
2503
|
}
|
|
2426
2504
|
async doStream(options) {
|
|
2427
2505
|
const { args: body, warnings } = await this.getArgs(options);
|
|
2428
|
-
const { responseHeaders, value: response } = await (0,
|
|
2506
|
+
const { responseHeaders, value: response } = await (0, import_provider_utils11.postJsonToApi)({
|
|
2429
2507
|
url: this.config.url({
|
|
2430
2508
|
path: "/responses",
|
|
2431
2509
|
modelId: this.modelId
|
|
2432
2510
|
}),
|
|
2433
|
-
headers: (0,
|
|
2511
|
+
headers: (0, import_provider_utils11.combineHeaders)(this.config.headers(), options.headers),
|
|
2434
2512
|
body: {
|
|
2435
2513
|
...body,
|
|
2436
2514
|
stream: true
|
|
2437
2515
|
},
|
|
2438
2516
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
2439
|
-
successfulResponseHandler: (0,
|
|
2517
|
+
successfulResponseHandler: (0, import_provider_utils11.createEventSourceResponseHandler)(
|
|
2440
2518
|
openaiResponsesChunkSchema
|
|
2441
2519
|
),
|
|
2442
2520
|
abortSignal: options.abortSignal,
|
|
@@ -2459,7 +2537,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2459
2537
|
controller.enqueue({ type: "stream-start", warnings });
|
|
2460
2538
|
},
|
|
2461
2539
|
transform(chunk, controller) {
|
|
2462
|
-
var _a, _b, _c, _d, _e, _f, _g, _h;
|
|
2540
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j;
|
|
2463
2541
|
if (options.includeRawChunks) {
|
|
2464
2542
|
controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
|
|
2465
2543
|
}
|
|
@@ -2508,7 +2586,15 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2508
2586
|
} else if (value.item.type === "reasoning") {
|
|
2509
2587
|
controller.enqueue({
|
|
2510
2588
|
type: "reasoning-start",
|
|
2511
|
-
id: value.item.id
|
|
2589
|
+
id: value.item.id,
|
|
2590
|
+
providerMetadata: {
|
|
2591
|
+
openai: {
|
|
2592
|
+
reasoning: {
|
|
2593
|
+
id: value.item.id,
|
|
2594
|
+
encryptedContent: (_a = value.item.encrypted_content) != null ? _a : null
|
|
2595
|
+
}
|
|
2596
|
+
}
|
|
2597
|
+
}
|
|
2512
2598
|
});
|
|
2513
2599
|
}
|
|
2514
2600
|
} else if (isResponseOutputItemDoneChunk(value)) {
|
|
@@ -2581,7 +2667,15 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2581
2667
|
} else if (value.item.type === "reasoning") {
|
|
2582
2668
|
controller.enqueue({
|
|
2583
2669
|
type: "reasoning-end",
|
|
2584
|
-
id: value.item.id
|
|
2670
|
+
id: value.item.id,
|
|
2671
|
+
providerMetadata: {
|
|
2672
|
+
openai: {
|
|
2673
|
+
reasoning: {
|
|
2674
|
+
id: value.item.id,
|
|
2675
|
+
encryptedContent: (_b = value.item.encrypted_content) != null ? _b : null
|
|
2676
|
+
}
|
|
2677
|
+
}
|
|
2678
|
+
}
|
|
2585
2679
|
});
|
|
2586
2680
|
}
|
|
2587
2681
|
} else if (isResponseFunctionCallArgumentsDeltaChunk(value)) {
|
|
@@ -2610,24 +2704,24 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2610
2704
|
} else if (isResponseReasoningSummaryTextDeltaChunk(value)) {
|
|
2611
2705
|
controller.enqueue({
|
|
2612
2706
|
type: "reasoning-delta",
|
|
2613
|
-
|
|
2614
|
-
|
|
2707
|
+
id: value.item_id,
|
|
2708
|
+
delta: value.delta
|
|
2615
2709
|
});
|
|
2616
2710
|
} else if (isResponseFinishedChunk(value)) {
|
|
2617
2711
|
finishReason = mapOpenAIResponseFinishReason({
|
|
2618
|
-
finishReason: (
|
|
2712
|
+
finishReason: (_c = value.response.incomplete_details) == null ? void 0 : _c.reason,
|
|
2619
2713
|
hasToolCalls
|
|
2620
2714
|
});
|
|
2621
2715
|
usage.inputTokens = value.response.usage.input_tokens;
|
|
2622
2716
|
usage.outputTokens = value.response.usage.output_tokens;
|
|
2623
2717
|
usage.totalTokens = value.response.usage.input_tokens + value.response.usage.output_tokens;
|
|
2624
|
-
usage.reasoningTokens = (
|
|
2625
|
-
usage.cachedInputTokens = (
|
|
2718
|
+
usage.reasoningTokens = (_e = (_d = value.response.usage.output_tokens_details) == null ? void 0 : _d.reasoning_tokens) != null ? _e : void 0;
|
|
2719
|
+
usage.cachedInputTokens = (_g = (_f = value.response.usage.input_tokens_details) == null ? void 0 : _f.cached_tokens) != null ? _g : void 0;
|
|
2626
2720
|
} else if (isResponseAnnotationAddedChunk(value)) {
|
|
2627
2721
|
controller.enqueue({
|
|
2628
2722
|
type: "source",
|
|
2629
2723
|
sourceType: "url",
|
|
2630
|
-
id: (
|
|
2724
|
+
id: (_j = (_i = (_h = self.config).generateId) == null ? void 0 : _i.call(_h)) != null ? _j : (0, import_provider_utils11.generateId)(),
|
|
2631
2725
|
url: value.annotation.url,
|
|
2632
2726
|
title: value.annotation.title
|
|
2633
2727
|
});
|
|
@@ -2652,124 +2746,129 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2652
2746
|
};
|
|
2653
2747
|
}
|
|
2654
2748
|
};
|
|
2655
|
-
var usageSchema2 =
|
|
2656
|
-
input_tokens:
|
|
2657
|
-
input_tokens_details:
|
|
2658
|
-
output_tokens:
|
|
2659
|
-
output_tokens_details:
|
|
2749
|
+
var usageSchema2 = import_v414.z.object({
|
|
2750
|
+
input_tokens: import_v414.z.number(),
|
|
2751
|
+
input_tokens_details: import_v414.z.object({ cached_tokens: import_v414.z.number().nullish() }).nullish(),
|
|
2752
|
+
output_tokens: import_v414.z.number(),
|
|
2753
|
+
output_tokens_details: import_v414.z.object({ reasoning_tokens: import_v414.z.number().nullish() }).nullish()
|
|
2660
2754
|
});
|
|
2661
|
-
var textDeltaChunkSchema =
|
|
2662
|
-
type:
|
|
2663
|
-
item_id:
|
|
2664
|
-
delta:
|
|
2755
|
+
var textDeltaChunkSchema = import_v414.z.object({
|
|
2756
|
+
type: import_v414.z.literal("response.output_text.delta"),
|
|
2757
|
+
item_id: import_v414.z.string(),
|
|
2758
|
+
delta: import_v414.z.string()
|
|
2665
2759
|
});
|
|
2666
|
-
var responseFinishedChunkSchema =
|
|
2667
|
-
type:
|
|
2668
|
-
response:
|
|
2669
|
-
incomplete_details:
|
|
2760
|
+
var responseFinishedChunkSchema = import_v414.z.object({
|
|
2761
|
+
type: import_v414.z.enum(["response.completed", "response.incomplete"]),
|
|
2762
|
+
response: import_v414.z.object({
|
|
2763
|
+
incomplete_details: import_v414.z.object({ reason: import_v414.z.string() }).nullish(),
|
|
2670
2764
|
usage: usageSchema2
|
|
2671
2765
|
})
|
|
2672
2766
|
});
|
|
2673
|
-
var responseCreatedChunkSchema =
|
|
2674
|
-
type:
|
|
2675
|
-
response:
|
|
2676
|
-
id:
|
|
2677
|
-
created_at:
|
|
2678
|
-
model:
|
|
2767
|
+
var responseCreatedChunkSchema = import_v414.z.object({
|
|
2768
|
+
type: import_v414.z.literal("response.created"),
|
|
2769
|
+
response: import_v414.z.object({
|
|
2770
|
+
id: import_v414.z.string(),
|
|
2771
|
+
created_at: import_v414.z.number(),
|
|
2772
|
+
model: import_v414.z.string()
|
|
2679
2773
|
})
|
|
2680
2774
|
});
|
|
2681
|
-
var responseOutputItemAddedSchema =
|
|
2682
|
-
type:
|
|
2683
|
-
output_index:
|
|
2684
|
-
item:
|
|
2685
|
-
|
|
2686
|
-
type:
|
|
2687
|
-
id:
|
|
2775
|
+
var responseOutputItemAddedSchema = import_v414.z.object({
|
|
2776
|
+
type: import_v414.z.literal("response.output_item.added"),
|
|
2777
|
+
output_index: import_v414.z.number(),
|
|
2778
|
+
item: import_v414.z.discriminatedUnion("type", [
|
|
2779
|
+
import_v414.z.object({
|
|
2780
|
+
type: import_v414.z.literal("message"),
|
|
2781
|
+
id: import_v414.z.string()
|
|
2688
2782
|
}),
|
|
2689
|
-
|
|
2690
|
-
type:
|
|
2691
|
-
id:
|
|
2783
|
+
import_v414.z.object({
|
|
2784
|
+
type: import_v414.z.literal("reasoning"),
|
|
2785
|
+
id: import_v414.z.string(),
|
|
2786
|
+
encrypted_content: import_v414.z.string().nullish(),
|
|
2787
|
+
summary: import_v414.z.array(
|
|
2788
|
+
import_v414.z.object({
|
|
2789
|
+
type: import_v414.z.literal("summary_text"),
|
|
2790
|
+
text: import_v414.z.string()
|
|
2791
|
+
})
|
|
2792
|
+
)
|
|
2692
2793
|
}),
|
|
2693
|
-
|
|
2694
|
-
type:
|
|
2695
|
-
id:
|
|
2696
|
-
call_id:
|
|
2697
|
-
name:
|
|
2698
|
-
arguments:
|
|
2794
|
+
import_v414.z.object({
|
|
2795
|
+
type: import_v414.z.literal("function_call"),
|
|
2796
|
+
id: import_v414.z.string(),
|
|
2797
|
+
call_id: import_v414.z.string(),
|
|
2798
|
+
name: import_v414.z.string(),
|
|
2799
|
+
arguments: import_v414.z.string()
|
|
2699
2800
|
}),
|
|
2700
|
-
|
|
2701
|
-
type:
|
|
2702
|
-
id:
|
|
2703
|
-
status:
|
|
2801
|
+
import_v414.z.object({
|
|
2802
|
+
type: import_v414.z.literal("web_search_call"),
|
|
2803
|
+
id: import_v414.z.string(),
|
|
2804
|
+
status: import_v414.z.string()
|
|
2704
2805
|
}),
|
|
2705
|
-
|
|
2706
|
-
type:
|
|
2707
|
-
id:
|
|
2708
|
-
status:
|
|
2806
|
+
import_v414.z.object({
|
|
2807
|
+
type: import_v414.z.literal("computer_call"),
|
|
2808
|
+
id: import_v414.z.string(),
|
|
2809
|
+
status: import_v414.z.string()
|
|
2709
2810
|
})
|
|
2710
2811
|
])
|
|
2711
2812
|
});
|
|
2712
|
-
var responseOutputItemDoneSchema =
|
|
2713
|
-
type:
|
|
2714
|
-
output_index:
|
|
2715
|
-
item:
|
|
2716
|
-
|
|
2717
|
-
type:
|
|
2718
|
-
id:
|
|
2813
|
+
var responseOutputItemDoneSchema = import_v414.z.object({
|
|
2814
|
+
type: import_v414.z.literal("response.output_item.done"),
|
|
2815
|
+
output_index: import_v414.z.number(),
|
|
2816
|
+
item: import_v414.z.discriminatedUnion("type", [
|
|
2817
|
+
import_v414.z.object({
|
|
2818
|
+
type: import_v414.z.literal("message"),
|
|
2819
|
+
id: import_v414.z.string()
|
|
2719
2820
|
}),
|
|
2720
|
-
|
|
2721
|
-
type:
|
|
2722
|
-
id:
|
|
2821
|
+
import_v414.z.object({
|
|
2822
|
+
type: import_v414.z.literal("reasoning"),
|
|
2823
|
+
id: import_v414.z.string(),
|
|
2824
|
+
encrypted_content: import_v414.z.string().nullish(),
|
|
2825
|
+
summary: import_v414.z.array(
|
|
2826
|
+
import_v414.z.object({
|
|
2827
|
+
type: import_v414.z.literal("summary_text"),
|
|
2828
|
+
text: import_v414.z.string()
|
|
2829
|
+
})
|
|
2830
|
+
)
|
|
2723
2831
|
}),
|
|
2724
|
-
|
|
2725
|
-
type:
|
|
2726
|
-
id:
|
|
2727
|
-
call_id:
|
|
2728
|
-
name:
|
|
2729
|
-
arguments:
|
|
2730
|
-
status:
|
|
2832
|
+
import_v414.z.object({
|
|
2833
|
+
type: import_v414.z.literal("function_call"),
|
|
2834
|
+
id: import_v414.z.string(),
|
|
2835
|
+
call_id: import_v414.z.string(),
|
|
2836
|
+
name: import_v414.z.string(),
|
|
2837
|
+
arguments: import_v414.z.string(),
|
|
2838
|
+
status: import_v414.z.literal("completed")
|
|
2731
2839
|
}),
|
|
2732
|
-
|
|
2733
|
-
type:
|
|
2734
|
-
id:
|
|
2735
|
-
status:
|
|
2840
|
+
import_v414.z.object({
|
|
2841
|
+
type: import_v414.z.literal("web_search_call"),
|
|
2842
|
+
id: import_v414.z.string(),
|
|
2843
|
+
status: import_v414.z.literal("completed")
|
|
2736
2844
|
}),
|
|
2737
|
-
|
|
2738
|
-
type:
|
|
2739
|
-
id:
|
|
2740
|
-
status:
|
|
2845
|
+
import_v414.z.object({
|
|
2846
|
+
type: import_v414.z.literal("computer_call"),
|
|
2847
|
+
id: import_v414.z.string(),
|
|
2848
|
+
status: import_v414.z.literal("completed")
|
|
2741
2849
|
})
|
|
2742
2850
|
])
|
|
2743
2851
|
});
|
|
2744
|
-
var responseFunctionCallArgumentsDeltaSchema =
|
|
2745
|
-
type:
|
|
2746
|
-
item_id:
|
|
2747
|
-
output_index:
|
|
2748
|
-
delta:
|
|
2852
|
+
var responseFunctionCallArgumentsDeltaSchema = import_v414.z.object({
|
|
2853
|
+
type: import_v414.z.literal("response.function_call_arguments.delta"),
|
|
2854
|
+
item_id: import_v414.z.string(),
|
|
2855
|
+
output_index: import_v414.z.number(),
|
|
2856
|
+
delta: import_v414.z.string()
|
|
2749
2857
|
});
|
|
2750
|
-
var responseAnnotationAddedSchema =
|
|
2751
|
-
type:
|
|
2752
|
-
annotation:
|
|
2753
|
-
type:
|
|
2754
|
-
url:
|
|
2755
|
-
title:
|
|
2858
|
+
var responseAnnotationAddedSchema = import_v414.z.object({
|
|
2859
|
+
type: import_v414.z.literal("response.output_text.annotation.added"),
|
|
2860
|
+
annotation: import_v414.z.object({
|
|
2861
|
+
type: import_v414.z.literal("url_citation"),
|
|
2862
|
+
url: import_v414.z.string(),
|
|
2863
|
+
title: import_v414.z.string()
|
|
2756
2864
|
})
|
|
2757
2865
|
});
|
|
2758
|
-
var responseReasoningSummaryTextDeltaSchema =
|
|
2759
|
-
type:
|
|
2760
|
-
item_id:
|
|
2761
|
-
|
|
2762
|
-
summary_index: import_v413.z.number(),
|
|
2763
|
-
delta: import_v413.z.string()
|
|
2866
|
+
var responseReasoningSummaryTextDeltaSchema = import_v414.z.object({
|
|
2867
|
+
type: import_v414.z.literal("response.reasoning_summary_text.delta"),
|
|
2868
|
+
item_id: import_v414.z.string(),
|
|
2869
|
+
delta: import_v414.z.string()
|
|
2764
2870
|
});
|
|
2765
|
-
var
|
|
2766
|
-
type: import_v413.z.literal("response.reasoning_summary_part.done"),
|
|
2767
|
-
item_id: import_v413.z.string(),
|
|
2768
|
-
output_index: import_v413.z.number(),
|
|
2769
|
-
summary_index: import_v413.z.number(),
|
|
2770
|
-
part: import_v413.z.unknown().nullish()
|
|
2771
|
-
});
|
|
2772
|
-
var openaiResponsesChunkSchema = import_v413.z.union([
|
|
2871
|
+
var openaiResponsesChunkSchema = import_v414.z.union([
|
|
2773
2872
|
textDeltaChunkSchema,
|
|
2774
2873
|
responseFinishedChunkSchema,
|
|
2775
2874
|
responseCreatedChunkSchema,
|
|
@@ -2778,8 +2877,7 @@ var openaiResponsesChunkSchema = import_v413.z.union([
|
|
|
2778
2877
|
responseFunctionCallArgumentsDeltaSchema,
|
|
2779
2878
|
responseAnnotationAddedSchema,
|
|
2780
2879
|
responseReasoningSummaryTextDeltaSchema,
|
|
2781
|
-
|
|
2782
|
-
import_v413.z.object({ type: import_v413.z.string() }).passthrough()
|
|
2880
|
+
import_v414.z.object({ type: import_v414.z.string() }).passthrough()
|
|
2783
2881
|
// fallback for unknown chunks
|
|
2784
2882
|
]);
|
|
2785
2883
|
function isTextDeltaChunk(chunk) {
|
|
@@ -2807,7 +2905,7 @@ function isResponseReasoningSummaryTextDeltaChunk(chunk) {
|
|
|
2807
2905
|
return chunk.type === "response.reasoning_summary_text.delta";
|
|
2808
2906
|
}
|
|
2809
2907
|
function getResponsesModelConfig(modelId) {
|
|
2810
|
-
if (modelId.startsWith("o")) {
|
|
2908
|
+
if (modelId.startsWith("o") || modelId.startsWith("codex-") || modelId.startsWith("computer-use")) {
|
|
2811
2909
|
if (modelId.startsWith("o1-mini") || modelId.startsWith("o1-preview")) {
|
|
2812
2910
|
return {
|
|
2813
2911
|
isReasoningModel: true,
|
|
@@ -2830,25 +2928,26 @@ function getResponsesModelConfig(modelId) {
|
|
|
2830
2928
|
function supportsFlexProcessing2(modelId) {
|
|
2831
2929
|
return modelId.startsWith("o3") || modelId.startsWith("o4-mini");
|
|
2832
2930
|
}
|
|
2833
|
-
var openaiResponsesProviderOptionsSchema =
|
|
2834
|
-
metadata:
|
|
2835
|
-
parallelToolCalls:
|
|
2836
|
-
previousResponseId:
|
|
2837
|
-
store:
|
|
2838
|
-
user:
|
|
2839
|
-
reasoningEffort:
|
|
2840
|
-
strictJsonSchema:
|
|
2841
|
-
instructions:
|
|
2842
|
-
reasoningSummary:
|
|
2843
|
-
serviceTier:
|
|
2931
|
+
var openaiResponsesProviderOptionsSchema = import_v414.z.object({
|
|
2932
|
+
metadata: import_v414.z.any().nullish(),
|
|
2933
|
+
parallelToolCalls: import_v414.z.boolean().nullish(),
|
|
2934
|
+
previousResponseId: import_v414.z.string().nullish(),
|
|
2935
|
+
store: import_v414.z.boolean().nullish(),
|
|
2936
|
+
user: import_v414.z.string().nullish(),
|
|
2937
|
+
reasoningEffort: import_v414.z.string().nullish(),
|
|
2938
|
+
strictJsonSchema: import_v414.z.boolean().nullish(),
|
|
2939
|
+
instructions: import_v414.z.string().nullish(),
|
|
2940
|
+
reasoningSummary: import_v414.z.string().nullish(),
|
|
2941
|
+
serviceTier: import_v414.z.enum(["auto", "flex"]).nullish(),
|
|
2942
|
+
include: import_v414.z.array(import_v414.z.enum(["reasoning.encrypted_content"])).nullish()
|
|
2844
2943
|
});
|
|
2845
2944
|
|
|
2846
2945
|
// src/openai-speech-model.ts
|
|
2847
|
-
var
|
|
2848
|
-
var
|
|
2849
|
-
var OpenAIProviderOptionsSchema =
|
|
2850
|
-
instructions:
|
|
2851
|
-
speed:
|
|
2946
|
+
var import_provider_utils12 = require("@ai-sdk/provider-utils");
|
|
2947
|
+
var import_v415 = require("zod/v4");
|
|
2948
|
+
var OpenAIProviderOptionsSchema = import_v415.z.object({
|
|
2949
|
+
instructions: import_v415.z.string().nullish(),
|
|
2950
|
+
speed: import_v415.z.number().min(0.25).max(4).default(1).nullish()
|
|
2852
2951
|
});
|
|
2853
2952
|
var OpenAISpeechModel = class {
|
|
2854
2953
|
constructor(modelId, config) {
|
|
@@ -2869,7 +2968,7 @@ var OpenAISpeechModel = class {
|
|
|
2869
2968
|
providerOptions
|
|
2870
2969
|
}) {
|
|
2871
2970
|
const warnings = [];
|
|
2872
|
-
const openAIOptions = await (0,
|
|
2971
|
+
const openAIOptions = await (0, import_provider_utils12.parseProviderOptions)({
|
|
2873
2972
|
provider: "openai",
|
|
2874
2973
|
providerOptions,
|
|
2875
2974
|
schema: OpenAIProviderOptionsSchema
|
|
@@ -2922,15 +3021,15 @@ var OpenAISpeechModel = class {
|
|
|
2922
3021
|
value: audio,
|
|
2923
3022
|
responseHeaders,
|
|
2924
3023
|
rawValue: rawResponse
|
|
2925
|
-
} = await (0,
|
|
3024
|
+
} = await (0, import_provider_utils12.postJsonToApi)({
|
|
2926
3025
|
url: this.config.url({
|
|
2927
3026
|
path: "/audio/speech",
|
|
2928
3027
|
modelId: this.modelId
|
|
2929
3028
|
}),
|
|
2930
|
-
headers: (0,
|
|
3029
|
+
headers: (0, import_provider_utils12.combineHeaders)(this.config.headers(), options.headers),
|
|
2931
3030
|
body: requestBody,
|
|
2932
3031
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
2933
|
-
successfulResponseHandler: (0,
|
|
3032
|
+
successfulResponseHandler: (0, import_provider_utils12.createBinaryResponseHandler)(),
|
|
2934
3033
|
abortSignal: options.abortSignal,
|
|
2935
3034
|
fetch: this.config.fetch
|
|
2936
3035
|
});
|
|
@@ -2953,10 +3052,10 @@ var OpenAISpeechModel = class {
|
|
|
2953
3052
|
// src/openai-provider.ts
|
|
2954
3053
|
function createOpenAI(options = {}) {
|
|
2955
3054
|
var _a, _b;
|
|
2956
|
-
const baseURL = (_a = (0,
|
|
3055
|
+
const baseURL = (_a = (0, import_provider_utils13.withoutTrailingSlash)(options.baseURL)) != null ? _a : "https://api.openai.com/v1";
|
|
2957
3056
|
const providerName = (_b = options.name) != null ? _b : "openai";
|
|
2958
3057
|
const getHeaders = () => ({
|
|
2959
|
-
Authorization: `Bearer ${(0,
|
|
3058
|
+
Authorization: `Bearer ${(0, import_provider_utils13.loadApiKey)({
|
|
2960
3059
|
apiKey: options.apiKey,
|
|
2961
3060
|
environmentVariableName: "OPENAI_API_KEY",
|
|
2962
3061
|
description: "OpenAI"
|