@ai-sdk/openai 2.0.0-beta.2 → 2.0.0-beta.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +13 -0
- package/dist/index.d.mts +6 -2
- package/dist/index.d.ts +6 -2
- package/dist/index.js +317 -212
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +305 -200
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.d.mts +7 -2
- package/dist/internal/index.d.ts +7 -2
- package/dist/internal/index.js +305 -200
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +299 -194
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +1 -1
package/dist/index.js
CHANGED
|
@@ -26,7 +26,7 @@ __export(src_exports, {
|
|
|
26
26
|
module.exports = __toCommonJS(src_exports);
|
|
27
27
|
|
|
28
28
|
// src/openai-provider.ts
|
|
29
|
-
var
|
|
29
|
+
var import_provider_utils13 = require("@ai-sdk/provider-utils");
|
|
30
30
|
|
|
31
31
|
// src/openai-chat-language-model.ts
|
|
32
32
|
var import_provider3 = require("@ai-sdk/provider");
|
|
@@ -302,7 +302,13 @@ var openaiProviderOptions = import_v4.z.object({
|
|
|
302
302
|
*
|
|
303
303
|
* @default 'auto'
|
|
304
304
|
*/
|
|
305
|
-
serviceTier: import_v4.z.enum(["auto", "flex"]).optional()
|
|
305
|
+
serviceTier: import_v4.z.enum(["auto", "flex"]).optional(),
|
|
306
|
+
/**
|
|
307
|
+
* Whether to use strict JSON schema validation.
|
|
308
|
+
*
|
|
309
|
+
* @default true
|
|
310
|
+
*/
|
|
311
|
+
strictJsonSchema: import_v4.z.boolean().optional()
|
|
306
312
|
});
|
|
307
313
|
|
|
308
314
|
// src/openai-error.ts
|
|
@@ -399,7 +405,8 @@ var webSearchPreview = (0, import_provider_utils4.createProviderDefinedToolFacto
|
|
|
399
405
|
function prepareTools({
|
|
400
406
|
tools,
|
|
401
407
|
toolChoice,
|
|
402
|
-
structuredOutputs
|
|
408
|
+
structuredOutputs,
|
|
409
|
+
strictJsonSchema
|
|
403
410
|
}) {
|
|
404
411
|
tools = (tools == null ? void 0 : tools.length) ? tools : void 0;
|
|
405
412
|
const toolWarnings = [];
|
|
@@ -416,7 +423,7 @@ function prepareTools({
|
|
|
416
423
|
name: tool.name,
|
|
417
424
|
description: tool.description,
|
|
418
425
|
parameters: tool.inputSchema,
|
|
419
|
-
strict: structuredOutputs ?
|
|
426
|
+
strict: structuredOutputs ? strictJsonSchema : void 0
|
|
420
427
|
}
|
|
421
428
|
});
|
|
422
429
|
break;
|
|
@@ -508,7 +515,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
508
515
|
toolChoice,
|
|
509
516
|
providerOptions
|
|
510
517
|
}) {
|
|
511
|
-
var _a, _b, _c;
|
|
518
|
+
var _a, _b, _c, _d;
|
|
512
519
|
const warnings = [];
|
|
513
520
|
const openaiOptions = (_a = await (0, import_provider_utils5.parseProviderOptions)({
|
|
514
521
|
provider: "openai",
|
|
@@ -536,6 +543,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
536
543
|
}
|
|
537
544
|
);
|
|
538
545
|
warnings.push(...messageWarnings);
|
|
546
|
+
const strictJsonSchema = (_c = openaiOptions.strictJsonSchema) != null ? _c : false;
|
|
539
547
|
const baseArgs = {
|
|
540
548
|
// model id:
|
|
541
549
|
model: this.modelId,
|
|
@@ -551,18 +559,15 @@ var OpenAIChatLanguageModel = class {
|
|
|
551
559
|
top_p: topP,
|
|
552
560
|
frequency_penalty: frequencyPenalty,
|
|
553
561
|
presence_penalty: presencePenalty,
|
|
554
|
-
response_format: (responseFormat == null ? void 0 : responseFormat.type) === "json" ?
|
|
555
|
-
|
|
556
|
-
|
|
557
|
-
|
|
558
|
-
|
|
559
|
-
|
|
560
|
-
|
|
561
|
-
|
|
562
|
-
|
|
563
|
-
}
|
|
564
|
-
} : { type: "json_object" }
|
|
565
|
-
) : void 0,
|
|
562
|
+
response_format: (responseFormat == null ? void 0 : responseFormat.type) === "json" ? structuredOutputs && responseFormat.schema != null ? {
|
|
563
|
+
type: "json_schema",
|
|
564
|
+
json_schema: {
|
|
565
|
+
schema: responseFormat.schema,
|
|
566
|
+
strict: strictJsonSchema,
|
|
567
|
+
name: (_d = responseFormat.name) != null ? _d : "response",
|
|
568
|
+
description: responseFormat.description
|
|
569
|
+
}
|
|
570
|
+
} : { type: "json_object" } : void 0,
|
|
566
571
|
stop: stopSequences,
|
|
567
572
|
seed,
|
|
568
573
|
// openai specific settings:
|
|
@@ -661,7 +666,8 @@ var OpenAIChatLanguageModel = class {
|
|
|
661
666
|
} = prepareTools({
|
|
662
667
|
tools,
|
|
663
668
|
toolChoice,
|
|
664
|
-
structuredOutputs
|
|
669
|
+
structuredOutputs,
|
|
670
|
+
strictJsonSchema
|
|
665
671
|
});
|
|
666
672
|
return {
|
|
667
673
|
args: {
|
|
@@ -1861,15 +1867,18 @@ var openaiTranscriptionResponseSchema = import_v412.z.object({
|
|
|
1861
1867
|
});
|
|
1862
1868
|
|
|
1863
1869
|
// src/responses/openai-responses-language-model.ts
|
|
1864
|
-
var
|
|
1865
|
-
var
|
|
1870
|
+
var import_provider_utils11 = require("@ai-sdk/provider-utils");
|
|
1871
|
+
var import_v414 = require("zod/v4");
|
|
1866
1872
|
|
|
1867
1873
|
// src/responses/convert-to-openai-responses-messages.ts
|
|
1868
1874
|
var import_provider6 = require("@ai-sdk/provider");
|
|
1869
|
-
|
|
1875
|
+
var import_provider_utils10 = require("@ai-sdk/provider-utils");
|
|
1876
|
+
var import_v413 = require("zod/v4");
|
|
1877
|
+
async function convertToOpenAIResponsesMessages({
|
|
1870
1878
|
prompt,
|
|
1871
1879
|
systemMessageMode
|
|
1872
1880
|
}) {
|
|
1881
|
+
var _a, _b;
|
|
1873
1882
|
const messages = [];
|
|
1874
1883
|
const warnings = [];
|
|
1875
1884
|
for (const { role, content } of prompt) {
|
|
@@ -1904,7 +1913,7 @@ function convertToOpenAIResponsesMessages({
|
|
|
1904
1913
|
messages.push({
|
|
1905
1914
|
role: "user",
|
|
1906
1915
|
content: content.map((part, index) => {
|
|
1907
|
-
var
|
|
1916
|
+
var _a2, _b2, _c;
|
|
1908
1917
|
switch (part.type) {
|
|
1909
1918
|
case "text": {
|
|
1910
1919
|
return { type: "input_text", text: part.text };
|
|
@@ -1916,7 +1925,7 @@ function convertToOpenAIResponsesMessages({
|
|
|
1916
1925
|
type: "input_image",
|
|
1917
1926
|
image_url: part.data instanceof URL ? part.data.toString() : `data:${mediaType};base64,${part.data}`,
|
|
1918
1927
|
// OpenAI specific extension: image detail
|
|
1919
|
-
detail: (
|
|
1928
|
+
detail: (_b2 = (_a2 = part.providerOptions) == null ? void 0 : _a2.openai) == null ? void 0 : _b2.imageDetail
|
|
1920
1929
|
};
|
|
1921
1930
|
} else if (part.mediaType === "application/pdf") {
|
|
1922
1931
|
if (part.data instanceof URL) {
|
|
@@ -1941,6 +1950,7 @@ function convertToOpenAIResponsesMessages({
|
|
|
1941
1950
|
break;
|
|
1942
1951
|
}
|
|
1943
1952
|
case "assistant": {
|
|
1953
|
+
const reasoningMessages = {};
|
|
1944
1954
|
for (const part of content) {
|
|
1945
1955
|
switch (part.type) {
|
|
1946
1956
|
case "text": {
|
|
@@ -1969,6 +1979,43 @@ function convertToOpenAIResponsesMessages({
|
|
|
1969
1979
|
});
|
|
1970
1980
|
break;
|
|
1971
1981
|
}
|
|
1982
|
+
case "reasoning": {
|
|
1983
|
+
const providerOptions = await (0, import_provider_utils10.parseProviderOptions)({
|
|
1984
|
+
provider: "openai",
|
|
1985
|
+
providerOptions: part.providerOptions,
|
|
1986
|
+
schema: openaiResponsesReasoningProviderOptionsSchema
|
|
1987
|
+
});
|
|
1988
|
+
const reasoningId = (_a = providerOptions == null ? void 0 : providerOptions.reasoning) == null ? void 0 : _a.id;
|
|
1989
|
+
if (reasoningId != null) {
|
|
1990
|
+
const existingReasoningMessage = reasoningMessages[reasoningId];
|
|
1991
|
+
const summaryParts = [];
|
|
1992
|
+
if (part.text.length > 0) {
|
|
1993
|
+
summaryParts.push({ type: "summary_text", text: part.text });
|
|
1994
|
+
} else {
|
|
1995
|
+
warnings.push({
|
|
1996
|
+
type: "other",
|
|
1997
|
+
message: `Cannot append empty reasoning part to existing reasoning sequence. Skipping reasoning part: ${JSON.stringify(part)}.`
|
|
1998
|
+
});
|
|
1999
|
+
}
|
|
2000
|
+
if (existingReasoningMessage === void 0) {
|
|
2001
|
+
reasoningMessages[reasoningId] = {
|
|
2002
|
+
type: "reasoning",
|
|
2003
|
+
id: reasoningId,
|
|
2004
|
+
encrypted_content: (_b = providerOptions == null ? void 0 : providerOptions.reasoning) == null ? void 0 : _b.encryptedContent,
|
|
2005
|
+
summary: summaryParts
|
|
2006
|
+
};
|
|
2007
|
+
messages.push(reasoningMessages[reasoningId]);
|
|
2008
|
+
} else {
|
|
2009
|
+
existingReasoningMessage.summary.push(...summaryParts);
|
|
2010
|
+
}
|
|
2011
|
+
} else {
|
|
2012
|
+
warnings.push({
|
|
2013
|
+
type: "other",
|
|
2014
|
+
message: `Non-OpenAI reasoning parts are not supported. Skipping reasoning part: ${JSON.stringify(part)}.`
|
|
2015
|
+
});
|
|
2016
|
+
}
|
|
2017
|
+
break;
|
|
2018
|
+
}
|
|
1972
2019
|
}
|
|
1973
2020
|
}
|
|
1974
2021
|
break;
|
|
@@ -2004,6 +2051,12 @@ function convertToOpenAIResponsesMessages({
|
|
|
2004
2051
|
}
|
|
2005
2052
|
return { messages, warnings };
|
|
2006
2053
|
}
|
|
2054
|
+
var openaiResponsesReasoningProviderOptionsSchema = import_v413.z.object({
|
|
2055
|
+
reasoning: import_v413.z.object({
|
|
2056
|
+
id: import_v413.z.string().nullish(),
|
|
2057
|
+
encryptedContent: import_v413.z.string().nullish()
|
|
2058
|
+
}).nullish()
|
|
2059
|
+
});
|
|
2007
2060
|
|
|
2008
2061
|
// src/responses/map-openai-responses-finish-reason.ts
|
|
2009
2062
|
function mapOpenAIResponseFinishReason({
|
|
@@ -2028,7 +2081,7 @@ var import_provider7 = require("@ai-sdk/provider");
|
|
|
2028
2081
|
function prepareResponsesTools({
|
|
2029
2082
|
tools,
|
|
2030
2083
|
toolChoice,
|
|
2031
|
-
|
|
2084
|
+
strictJsonSchema
|
|
2032
2085
|
}) {
|
|
2033
2086
|
tools = (tools == null ? void 0 : tools.length) ? tools : void 0;
|
|
2034
2087
|
const toolWarnings = [];
|
|
@@ -2044,7 +2097,7 @@ function prepareResponsesTools({
|
|
|
2044
2097
|
name: tool.name,
|
|
2045
2098
|
description: tool.description,
|
|
2046
2099
|
parameters: tool.inputSchema,
|
|
2047
|
-
strict:
|
|
2100
|
+
strict: strictJsonSchema
|
|
2048
2101
|
});
|
|
2049
2102
|
break;
|
|
2050
2103
|
case "provider-defined":
|
|
@@ -2142,17 +2195,17 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2142
2195
|
if (stopSequences != null) {
|
|
2143
2196
|
warnings.push({ type: "unsupported-setting", setting: "stopSequences" });
|
|
2144
2197
|
}
|
|
2145
|
-
const { messages, warnings: messageWarnings } = convertToOpenAIResponsesMessages({
|
|
2198
|
+
const { messages, warnings: messageWarnings } = await convertToOpenAIResponsesMessages({
|
|
2146
2199
|
prompt,
|
|
2147
2200
|
systemMessageMode: modelConfig.systemMessageMode
|
|
2148
2201
|
});
|
|
2149
2202
|
warnings.push(...messageWarnings);
|
|
2150
|
-
const openaiOptions = await (0,
|
|
2203
|
+
const openaiOptions = await (0, import_provider_utils11.parseProviderOptions)({
|
|
2151
2204
|
provider: "openai",
|
|
2152
2205
|
providerOptions,
|
|
2153
2206
|
schema: openaiResponsesProviderOptionsSchema
|
|
2154
2207
|
});
|
|
2155
|
-
const
|
|
2208
|
+
const strictJsonSchema = (_a = openaiOptions == null ? void 0 : openaiOptions.strictJsonSchema) != null ? _a : false;
|
|
2156
2209
|
const baseArgs = {
|
|
2157
2210
|
model: this.modelId,
|
|
2158
2211
|
input: messages,
|
|
@@ -2163,7 +2216,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2163
2216
|
text: {
|
|
2164
2217
|
format: responseFormat.schema != null ? {
|
|
2165
2218
|
type: "json_schema",
|
|
2166
|
-
strict:
|
|
2219
|
+
strict: strictJsonSchema,
|
|
2167
2220
|
name: (_b = responseFormat.name) != null ? _b : "response",
|
|
2168
2221
|
description: responseFormat.description,
|
|
2169
2222
|
schema: responseFormat.schema
|
|
@@ -2178,6 +2231,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2178
2231
|
user: openaiOptions == null ? void 0 : openaiOptions.user,
|
|
2179
2232
|
instructions: openaiOptions == null ? void 0 : openaiOptions.instructions,
|
|
2180
2233
|
service_tier: openaiOptions == null ? void 0 : openaiOptions.serviceTier,
|
|
2234
|
+
include: openaiOptions == null ? void 0 : openaiOptions.include,
|
|
2181
2235
|
// model-specific settings:
|
|
2182
2236
|
...modelConfig.isReasoningModel && ((openaiOptions == null ? void 0 : openaiOptions.reasoningEffort) != null || (openaiOptions == null ? void 0 : openaiOptions.reasoningSummary) != null) && {
|
|
2183
2237
|
reasoning: {
|
|
@@ -2210,6 +2264,21 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2210
2264
|
details: "topP is not supported for reasoning models"
|
|
2211
2265
|
});
|
|
2212
2266
|
}
|
|
2267
|
+
} else {
|
|
2268
|
+
if ((openaiOptions == null ? void 0 : openaiOptions.reasoningEffort) != null) {
|
|
2269
|
+
warnings.push({
|
|
2270
|
+
type: "unsupported-setting",
|
|
2271
|
+
setting: "reasoningEffort",
|
|
2272
|
+
details: "reasoningEffort is not supported for non-reasoning models"
|
|
2273
|
+
});
|
|
2274
|
+
}
|
|
2275
|
+
if ((openaiOptions == null ? void 0 : openaiOptions.reasoningSummary) != null) {
|
|
2276
|
+
warnings.push({
|
|
2277
|
+
type: "unsupported-setting",
|
|
2278
|
+
setting: "reasoningSummary",
|
|
2279
|
+
details: "reasoningSummary is not supported for non-reasoning models"
|
|
2280
|
+
});
|
|
2281
|
+
}
|
|
2213
2282
|
}
|
|
2214
2283
|
if ((openaiOptions == null ? void 0 : openaiOptions.serviceTier) === "flex" && !supportsFlexProcessing2(this.modelId)) {
|
|
2215
2284
|
warnings.push({
|
|
@@ -2226,7 +2295,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2226
2295
|
} = prepareResponsesTools({
|
|
2227
2296
|
tools,
|
|
2228
2297
|
toolChoice,
|
|
2229
|
-
|
|
2298
|
+
strictJsonSchema
|
|
2230
2299
|
});
|
|
2231
2300
|
return {
|
|
2232
2301
|
args: {
|
|
@@ -2238,74 +2307,76 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2238
2307
|
};
|
|
2239
2308
|
}
|
|
2240
2309
|
async doGenerate(options) {
|
|
2241
|
-
var _a, _b, _c, _d, _e, _f, _g, _h;
|
|
2310
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _i;
|
|
2242
2311
|
const { args: body, warnings } = await this.getArgs(options);
|
|
2243
2312
|
const {
|
|
2244
2313
|
responseHeaders,
|
|
2245
2314
|
value: response,
|
|
2246
2315
|
rawValue: rawResponse
|
|
2247
|
-
} = await (0,
|
|
2316
|
+
} = await (0, import_provider_utils11.postJsonToApi)({
|
|
2248
2317
|
url: this.config.url({
|
|
2249
2318
|
path: "/responses",
|
|
2250
2319
|
modelId: this.modelId
|
|
2251
2320
|
}),
|
|
2252
|
-
headers: (0,
|
|
2321
|
+
headers: (0, import_provider_utils11.combineHeaders)(this.config.headers(), options.headers),
|
|
2253
2322
|
body,
|
|
2254
2323
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
2255
|
-
successfulResponseHandler: (0,
|
|
2256
|
-
|
|
2257
|
-
id:
|
|
2258
|
-
created_at:
|
|
2259
|
-
model:
|
|
2260
|
-
output:
|
|
2261
|
-
|
|
2262
|
-
|
|
2263
|
-
type:
|
|
2264
|
-
role:
|
|
2265
|
-
content:
|
|
2266
|
-
|
|
2267
|
-
type:
|
|
2268
|
-
text:
|
|
2269
|
-
annotations:
|
|
2270
|
-
|
|
2271
|
-
type:
|
|
2272
|
-
start_index:
|
|
2273
|
-
end_index:
|
|
2274
|
-
url:
|
|
2275
|
-
title:
|
|
2324
|
+
successfulResponseHandler: (0, import_provider_utils11.createJsonResponseHandler)(
|
|
2325
|
+
import_v414.z.object({
|
|
2326
|
+
id: import_v414.z.string(),
|
|
2327
|
+
created_at: import_v414.z.number(),
|
|
2328
|
+
model: import_v414.z.string(),
|
|
2329
|
+
output: import_v414.z.array(
|
|
2330
|
+
import_v414.z.discriminatedUnion("type", [
|
|
2331
|
+
import_v414.z.object({
|
|
2332
|
+
type: import_v414.z.literal("message"),
|
|
2333
|
+
role: import_v414.z.literal("assistant"),
|
|
2334
|
+
content: import_v414.z.array(
|
|
2335
|
+
import_v414.z.object({
|
|
2336
|
+
type: import_v414.z.literal("output_text"),
|
|
2337
|
+
text: import_v414.z.string(),
|
|
2338
|
+
annotations: import_v414.z.array(
|
|
2339
|
+
import_v414.z.object({
|
|
2340
|
+
type: import_v414.z.literal("url_citation"),
|
|
2341
|
+
start_index: import_v414.z.number(),
|
|
2342
|
+
end_index: import_v414.z.number(),
|
|
2343
|
+
url: import_v414.z.string(),
|
|
2344
|
+
title: import_v414.z.string()
|
|
2276
2345
|
})
|
|
2277
2346
|
)
|
|
2278
2347
|
})
|
|
2279
2348
|
)
|
|
2280
2349
|
}),
|
|
2281
|
-
|
|
2282
|
-
type:
|
|
2283
|
-
call_id:
|
|
2284
|
-
name:
|
|
2285
|
-
arguments:
|
|
2350
|
+
import_v414.z.object({
|
|
2351
|
+
type: import_v414.z.literal("function_call"),
|
|
2352
|
+
call_id: import_v414.z.string(),
|
|
2353
|
+
name: import_v414.z.string(),
|
|
2354
|
+
arguments: import_v414.z.string()
|
|
2286
2355
|
}),
|
|
2287
|
-
|
|
2288
|
-
type:
|
|
2289
|
-
id:
|
|
2290
|
-
status:
|
|
2356
|
+
import_v414.z.object({
|
|
2357
|
+
type: import_v414.z.literal("web_search_call"),
|
|
2358
|
+
id: import_v414.z.string(),
|
|
2359
|
+
status: import_v414.z.string().optional()
|
|
2291
2360
|
}),
|
|
2292
|
-
|
|
2293
|
-
type:
|
|
2294
|
-
id:
|
|
2295
|
-
status:
|
|
2361
|
+
import_v414.z.object({
|
|
2362
|
+
type: import_v414.z.literal("computer_call"),
|
|
2363
|
+
id: import_v414.z.string(),
|
|
2364
|
+
status: import_v414.z.string().optional()
|
|
2296
2365
|
}),
|
|
2297
|
-
|
|
2298
|
-
type:
|
|
2299
|
-
|
|
2300
|
-
|
|
2301
|
-
|
|
2302
|
-
|
|
2366
|
+
import_v414.z.object({
|
|
2367
|
+
type: import_v414.z.literal("reasoning"),
|
|
2368
|
+
id: import_v414.z.string(),
|
|
2369
|
+
encrypted_content: import_v414.z.string().nullish(),
|
|
2370
|
+
summary: import_v414.z.array(
|
|
2371
|
+
import_v414.z.object({
|
|
2372
|
+
type: import_v414.z.literal("summary_text"),
|
|
2373
|
+
text: import_v414.z.string()
|
|
2303
2374
|
})
|
|
2304
2375
|
)
|
|
2305
2376
|
})
|
|
2306
2377
|
])
|
|
2307
2378
|
),
|
|
2308
|
-
incomplete_details:
|
|
2379
|
+
incomplete_details: import_v414.z.object({ reason: import_v414.z.string() }).nullable(),
|
|
2309
2380
|
usage: usageSchema2
|
|
2310
2381
|
})
|
|
2311
2382
|
),
|
|
@@ -2316,10 +2387,23 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2316
2387
|
for (const part of response.output) {
|
|
2317
2388
|
switch (part.type) {
|
|
2318
2389
|
case "reasoning": {
|
|
2319
|
-
|
|
2320
|
-
type: "
|
|
2321
|
-
|
|
2322
|
-
|
|
2390
|
+
if (part.summary.length === 0) {
|
|
2391
|
+
part.summary.push({ type: "summary_text", text: "" });
|
|
2392
|
+
}
|
|
2393
|
+
for (const summary of part.summary) {
|
|
2394
|
+
content.push({
|
|
2395
|
+
type: "reasoning",
|
|
2396
|
+
text: summary.text,
|
|
2397
|
+
providerMetadata: {
|
|
2398
|
+
openai: {
|
|
2399
|
+
reasoning: {
|
|
2400
|
+
id: part.id,
|
|
2401
|
+
encryptedContent: (_a = part.encrypted_content) != null ? _a : null
|
|
2402
|
+
}
|
|
2403
|
+
}
|
|
2404
|
+
}
|
|
2405
|
+
});
|
|
2406
|
+
}
|
|
2323
2407
|
break;
|
|
2324
2408
|
}
|
|
2325
2409
|
case "message": {
|
|
@@ -2332,7 +2416,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2332
2416
|
content.push({
|
|
2333
2417
|
type: "source",
|
|
2334
2418
|
sourceType: "url",
|
|
2335
|
-
id: (
|
|
2419
|
+
id: (_d = (_c = (_b = this.config).generateId) == null ? void 0 : _c.call(_b)) != null ? _d : (0, import_provider_utils11.generateId)(),
|
|
2336
2420
|
url: annotation.url,
|
|
2337
2421
|
title: annotation.title
|
|
2338
2422
|
});
|
|
@@ -2391,15 +2475,15 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2391
2475
|
return {
|
|
2392
2476
|
content,
|
|
2393
2477
|
finishReason: mapOpenAIResponseFinishReason({
|
|
2394
|
-
finishReason: (
|
|
2478
|
+
finishReason: (_e = response.incomplete_details) == null ? void 0 : _e.reason,
|
|
2395
2479
|
hasToolCalls: content.some((part) => part.type === "tool-call")
|
|
2396
2480
|
}),
|
|
2397
2481
|
usage: {
|
|
2398
2482
|
inputTokens: response.usage.input_tokens,
|
|
2399
2483
|
outputTokens: response.usage.output_tokens,
|
|
2400
2484
|
totalTokens: response.usage.input_tokens + response.usage.output_tokens,
|
|
2401
|
-
reasoningTokens: (
|
|
2402
|
-
cachedInputTokens: (
|
|
2485
|
+
reasoningTokens: (_g = (_f = response.usage.output_tokens_details) == null ? void 0 : _f.reasoning_tokens) != null ? _g : void 0,
|
|
2486
|
+
cachedInputTokens: (_i = (_h = response.usage.input_tokens_details) == null ? void 0 : _h.cached_tokens) != null ? _i : void 0
|
|
2403
2487
|
},
|
|
2404
2488
|
request: { body },
|
|
2405
2489
|
response: {
|
|
@@ -2419,18 +2503,18 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2419
2503
|
}
|
|
2420
2504
|
async doStream(options) {
|
|
2421
2505
|
const { args: body, warnings } = await this.getArgs(options);
|
|
2422
|
-
const { responseHeaders, value: response } = await (0,
|
|
2506
|
+
const { responseHeaders, value: response } = await (0, import_provider_utils11.postJsonToApi)({
|
|
2423
2507
|
url: this.config.url({
|
|
2424
2508
|
path: "/responses",
|
|
2425
2509
|
modelId: this.modelId
|
|
2426
2510
|
}),
|
|
2427
|
-
headers: (0,
|
|
2511
|
+
headers: (0, import_provider_utils11.combineHeaders)(this.config.headers(), options.headers),
|
|
2428
2512
|
body: {
|
|
2429
2513
|
...body,
|
|
2430
2514
|
stream: true
|
|
2431
2515
|
},
|
|
2432
2516
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
2433
|
-
successfulResponseHandler: (0,
|
|
2517
|
+
successfulResponseHandler: (0, import_provider_utils11.createEventSourceResponseHandler)(
|
|
2434
2518
|
openaiResponsesChunkSchema
|
|
2435
2519
|
),
|
|
2436
2520
|
abortSignal: options.abortSignal,
|
|
@@ -2453,7 +2537,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2453
2537
|
controller.enqueue({ type: "stream-start", warnings });
|
|
2454
2538
|
},
|
|
2455
2539
|
transform(chunk, controller) {
|
|
2456
|
-
var _a, _b, _c, _d, _e, _f, _g, _h;
|
|
2540
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j;
|
|
2457
2541
|
if (options.includeRawChunks) {
|
|
2458
2542
|
controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
|
|
2459
2543
|
}
|
|
@@ -2502,7 +2586,15 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2502
2586
|
} else if (value.item.type === "reasoning") {
|
|
2503
2587
|
controller.enqueue({
|
|
2504
2588
|
type: "reasoning-start",
|
|
2505
|
-
id: value.item.id
|
|
2589
|
+
id: value.item.id,
|
|
2590
|
+
providerMetadata: {
|
|
2591
|
+
openai: {
|
|
2592
|
+
reasoning: {
|
|
2593
|
+
id: value.item.id,
|
|
2594
|
+
encryptedContent: (_a = value.item.encrypted_content) != null ? _a : null
|
|
2595
|
+
}
|
|
2596
|
+
}
|
|
2597
|
+
}
|
|
2506
2598
|
});
|
|
2507
2599
|
}
|
|
2508
2600
|
} else if (isResponseOutputItemDoneChunk(value)) {
|
|
@@ -2575,7 +2667,15 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2575
2667
|
} else if (value.item.type === "reasoning") {
|
|
2576
2668
|
controller.enqueue({
|
|
2577
2669
|
type: "reasoning-end",
|
|
2578
|
-
id: value.item.id
|
|
2670
|
+
id: value.item.id,
|
|
2671
|
+
providerMetadata: {
|
|
2672
|
+
openai: {
|
|
2673
|
+
reasoning: {
|
|
2674
|
+
id: value.item.id,
|
|
2675
|
+
encryptedContent: (_b = value.item.encrypted_content) != null ? _b : null
|
|
2676
|
+
}
|
|
2677
|
+
}
|
|
2678
|
+
}
|
|
2579
2679
|
});
|
|
2580
2680
|
}
|
|
2581
2681
|
} else if (isResponseFunctionCallArgumentsDeltaChunk(value)) {
|
|
@@ -2604,24 +2704,24 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2604
2704
|
} else if (isResponseReasoningSummaryTextDeltaChunk(value)) {
|
|
2605
2705
|
controller.enqueue({
|
|
2606
2706
|
type: "reasoning-delta",
|
|
2607
|
-
|
|
2608
|
-
|
|
2707
|
+
id: value.item_id,
|
|
2708
|
+
delta: value.delta
|
|
2609
2709
|
});
|
|
2610
2710
|
} else if (isResponseFinishedChunk(value)) {
|
|
2611
2711
|
finishReason = mapOpenAIResponseFinishReason({
|
|
2612
|
-
finishReason: (
|
|
2712
|
+
finishReason: (_c = value.response.incomplete_details) == null ? void 0 : _c.reason,
|
|
2613
2713
|
hasToolCalls
|
|
2614
2714
|
});
|
|
2615
2715
|
usage.inputTokens = value.response.usage.input_tokens;
|
|
2616
2716
|
usage.outputTokens = value.response.usage.output_tokens;
|
|
2617
2717
|
usage.totalTokens = value.response.usage.input_tokens + value.response.usage.output_tokens;
|
|
2618
|
-
usage.reasoningTokens = (
|
|
2619
|
-
usage.cachedInputTokens = (
|
|
2718
|
+
usage.reasoningTokens = (_e = (_d = value.response.usage.output_tokens_details) == null ? void 0 : _d.reasoning_tokens) != null ? _e : void 0;
|
|
2719
|
+
usage.cachedInputTokens = (_g = (_f = value.response.usage.input_tokens_details) == null ? void 0 : _f.cached_tokens) != null ? _g : void 0;
|
|
2620
2720
|
} else if (isResponseAnnotationAddedChunk(value)) {
|
|
2621
2721
|
controller.enqueue({
|
|
2622
2722
|
type: "source",
|
|
2623
2723
|
sourceType: "url",
|
|
2624
|
-
id: (
|
|
2724
|
+
id: (_j = (_i = (_h = self.config).generateId) == null ? void 0 : _i.call(_h)) != null ? _j : (0, import_provider_utils11.generateId)(),
|
|
2625
2725
|
url: value.annotation.url,
|
|
2626
2726
|
title: value.annotation.title
|
|
2627
2727
|
});
|
|
@@ -2646,124 +2746,129 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2646
2746
|
};
|
|
2647
2747
|
}
|
|
2648
2748
|
};
|
|
2649
|
-
var usageSchema2 =
|
|
2650
|
-
input_tokens:
|
|
2651
|
-
input_tokens_details:
|
|
2652
|
-
output_tokens:
|
|
2653
|
-
output_tokens_details:
|
|
2749
|
+
var usageSchema2 = import_v414.z.object({
|
|
2750
|
+
input_tokens: import_v414.z.number(),
|
|
2751
|
+
input_tokens_details: import_v414.z.object({ cached_tokens: import_v414.z.number().nullish() }).nullish(),
|
|
2752
|
+
output_tokens: import_v414.z.number(),
|
|
2753
|
+
output_tokens_details: import_v414.z.object({ reasoning_tokens: import_v414.z.number().nullish() }).nullish()
|
|
2654
2754
|
});
|
|
2655
|
-
var textDeltaChunkSchema =
|
|
2656
|
-
type:
|
|
2657
|
-
item_id:
|
|
2658
|
-
delta:
|
|
2755
|
+
var textDeltaChunkSchema = import_v414.z.object({
|
|
2756
|
+
type: import_v414.z.literal("response.output_text.delta"),
|
|
2757
|
+
item_id: import_v414.z.string(),
|
|
2758
|
+
delta: import_v414.z.string()
|
|
2659
2759
|
});
|
|
2660
|
-
var responseFinishedChunkSchema =
|
|
2661
|
-
type:
|
|
2662
|
-
response:
|
|
2663
|
-
incomplete_details:
|
|
2760
|
+
var responseFinishedChunkSchema = import_v414.z.object({
|
|
2761
|
+
type: import_v414.z.enum(["response.completed", "response.incomplete"]),
|
|
2762
|
+
response: import_v414.z.object({
|
|
2763
|
+
incomplete_details: import_v414.z.object({ reason: import_v414.z.string() }).nullish(),
|
|
2664
2764
|
usage: usageSchema2
|
|
2665
2765
|
})
|
|
2666
2766
|
});
|
|
2667
|
-
var responseCreatedChunkSchema =
|
|
2668
|
-
type:
|
|
2669
|
-
response:
|
|
2670
|
-
id:
|
|
2671
|
-
created_at:
|
|
2672
|
-
model:
|
|
2767
|
+
var responseCreatedChunkSchema = import_v414.z.object({
|
|
2768
|
+
type: import_v414.z.literal("response.created"),
|
|
2769
|
+
response: import_v414.z.object({
|
|
2770
|
+
id: import_v414.z.string(),
|
|
2771
|
+
created_at: import_v414.z.number(),
|
|
2772
|
+
model: import_v414.z.string()
|
|
2673
2773
|
})
|
|
2674
2774
|
});
|
|
2675
|
-
var responseOutputItemAddedSchema =
|
|
2676
|
-
type:
|
|
2677
|
-
output_index:
|
|
2678
|
-
item:
|
|
2679
|
-
|
|
2680
|
-
type:
|
|
2681
|
-
id:
|
|
2775
|
+
var responseOutputItemAddedSchema = import_v414.z.object({
|
|
2776
|
+
type: import_v414.z.literal("response.output_item.added"),
|
|
2777
|
+
output_index: import_v414.z.number(),
|
|
2778
|
+
item: import_v414.z.discriminatedUnion("type", [
|
|
2779
|
+
import_v414.z.object({
|
|
2780
|
+
type: import_v414.z.literal("message"),
|
|
2781
|
+
id: import_v414.z.string()
|
|
2682
2782
|
}),
|
|
2683
|
-
|
|
2684
|
-
type:
|
|
2685
|
-
id:
|
|
2783
|
+
import_v414.z.object({
|
|
2784
|
+
type: import_v414.z.literal("reasoning"),
|
|
2785
|
+
id: import_v414.z.string(),
|
|
2786
|
+
encrypted_content: import_v414.z.string().nullish(),
|
|
2787
|
+
summary: import_v414.z.array(
|
|
2788
|
+
import_v414.z.object({
|
|
2789
|
+
type: import_v414.z.literal("summary_text"),
|
|
2790
|
+
text: import_v414.z.string()
|
|
2791
|
+
})
|
|
2792
|
+
)
|
|
2686
2793
|
}),
|
|
2687
|
-
|
|
2688
|
-
type:
|
|
2689
|
-
id:
|
|
2690
|
-
call_id:
|
|
2691
|
-
name:
|
|
2692
|
-
arguments:
|
|
2794
|
+
import_v414.z.object({
|
|
2795
|
+
type: import_v414.z.literal("function_call"),
|
|
2796
|
+
id: import_v414.z.string(),
|
|
2797
|
+
call_id: import_v414.z.string(),
|
|
2798
|
+
name: import_v414.z.string(),
|
|
2799
|
+
arguments: import_v414.z.string()
|
|
2693
2800
|
}),
|
|
2694
|
-
|
|
2695
|
-
type:
|
|
2696
|
-
id:
|
|
2697
|
-
status:
|
|
2801
|
+
import_v414.z.object({
|
|
2802
|
+
type: import_v414.z.literal("web_search_call"),
|
|
2803
|
+
id: import_v414.z.string(),
|
|
2804
|
+
status: import_v414.z.string()
|
|
2698
2805
|
}),
|
|
2699
|
-
|
|
2700
|
-
type:
|
|
2701
|
-
id:
|
|
2702
|
-
status:
|
|
2806
|
+
import_v414.z.object({
|
|
2807
|
+
type: import_v414.z.literal("computer_call"),
|
|
2808
|
+
id: import_v414.z.string(),
|
|
2809
|
+
status: import_v414.z.string()
|
|
2703
2810
|
})
|
|
2704
2811
|
])
|
|
2705
2812
|
});
|
|
2706
|
-
var responseOutputItemDoneSchema =
|
|
2707
|
-
type:
|
|
2708
|
-
output_index:
|
|
2709
|
-
item:
|
|
2710
|
-
|
|
2711
|
-
type:
|
|
2712
|
-
id:
|
|
2813
|
+
var responseOutputItemDoneSchema = import_v414.z.object({
|
|
2814
|
+
type: import_v414.z.literal("response.output_item.done"),
|
|
2815
|
+
output_index: import_v414.z.number(),
|
|
2816
|
+
item: import_v414.z.discriminatedUnion("type", [
|
|
2817
|
+
import_v414.z.object({
|
|
2818
|
+
type: import_v414.z.literal("message"),
|
|
2819
|
+
id: import_v414.z.string()
|
|
2713
2820
|
}),
|
|
2714
|
-
|
|
2715
|
-
type:
|
|
2716
|
-
id:
|
|
2821
|
+
import_v414.z.object({
|
|
2822
|
+
type: import_v414.z.literal("reasoning"),
|
|
2823
|
+
id: import_v414.z.string(),
|
|
2824
|
+
encrypted_content: import_v414.z.string().nullish(),
|
|
2825
|
+
summary: import_v414.z.array(
|
|
2826
|
+
import_v414.z.object({
|
|
2827
|
+
type: import_v414.z.literal("summary_text"),
|
|
2828
|
+
text: import_v414.z.string()
|
|
2829
|
+
})
|
|
2830
|
+
)
|
|
2717
2831
|
}),
|
|
2718
|
-
|
|
2719
|
-
type:
|
|
2720
|
-
id:
|
|
2721
|
-
call_id:
|
|
2722
|
-
name:
|
|
2723
|
-
arguments:
|
|
2724
|
-
status:
|
|
2832
|
+
import_v414.z.object({
|
|
2833
|
+
type: import_v414.z.literal("function_call"),
|
|
2834
|
+
id: import_v414.z.string(),
|
|
2835
|
+
call_id: import_v414.z.string(),
|
|
2836
|
+
name: import_v414.z.string(),
|
|
2837
|
+
arguments: import_v414.z.string(),
|
|
2838
|
+
status: import_v414.z.literal("completed")
|
|
2725
2839
|
}),
|
|
2726
|
-
|
|
2727
|
-
type:
|
|
2728
|
-
id:
|
|
2729
|
-
status:
|
|
2840
|
+
import_v414.z.object({
|
|
2841
|
+
type: import_v414.z.literal("web_search_call"),
|
|
2842
|
+
id: import_v414.z.string(),
|
|
2843
|
+
status: import_v414.z.literal("completed")
|
|
2730
2844
|
}),
|
|
2731
|
-
|
|
2732
|
-
type:
|
|
2733
|
-
id:
|
|
2734
|
-
status:
|
|
2845
|
+
import_v414.z.object({
|
|
2846
|
+
type: import_v414.z.literal("computer_call"),
|
|
2847
|
+
id: import_v414.z.string(),
|
|
2848
|
+
status: import_v414.z.literal("completed")
|
|
2735
2849
|
})
|
|
2736
2850
|
])
|
|
2737
2851
|
});
|
|
2738
|
-
var responseFunctionCallArgumentsDeltaSchema =
|
|
2739
|
-
type:
|
|
2740
|
-
item_id:
|
|
2741
|
-
output_index:
|
|
2742
|
-
delta:
|
|
2852
|
+
var responseFunctionCallArgumentsDeltaSchema = import_v414.z.object({
|
|
2853
|
+
type: import_v414.z.literal("response.function_call_arguments.delta"),
|
|
2854
|
+
item_id: import_v414.z.string(),
|
|
2855
|
+
output_index: import_v414.z.number(),
|
|
2856
|
+
delta: import_v414.z.string()
|
|
2743
2857
|
});
|
|
2744
|
-
var responseAnnotationAddedSchema =
|
|
2745
|
-
type:
|
|
2746
|
-
annotation:
|
|
2747
|
-
type:
|
|
2748
|
-
url:
|
|
2749
|
-
title:
|
|
2858
|
+
var responseAnnotationAddedSchema = import_v414.z.object({
|
|
2859
|
+
type: import_v414.z.literal("response.output_text.annotation.added"),
|
|
2860
|
+
annotation: import_v414.z.object({
|
|
2861
|
+
type: import_v414.z.literal("url_citation"),
|
|
2862
|
+
url: import_v414.z.string(),
|
|
2863
|
+
title: import_v414.z.string()
|
|
2750
2864
|
})
|
|
2751
2865
|
});
|
|
2752
|
-
var responseReasoningSummaryTextDeltaSchema =
|
|
2753
|
-
type:
|
|
2754
|
-
item_id:
|
|
2755
|
-
|
|
2756
|
-
summary_index: import_v413.z.number(),
|
|
2757
|
-
delta: import_v413.z.string()
|
|
2758
|
-
});
|
|
2759
|
-
var responseReasoningSummaryPartDoneSchema = import_v413.z.object({
|
|
2760
|
-
type: import_v413.z.literal("response.reasoning_summary_part.done"),
|
|
2761
|
-
item_id: import_v413.z.string(),
|
|
2762
|
-
output_index: import_v413.z.number(),
|
|
2763
|
-
summary_index: import_v413.z.number(),
|
|
2764
|
-
part: import_v413.z.unknown().nullish()
|
|
2866
|
+
var responseReasoningSummaryTextDeltaSchema = import_v414.z.object({
|
|
2867
|
+
type: import_v414.z.literal("response.reasoning_summary_text.delta"),
|
|
2868
|
+
item_id: import_v414.z.string(),
|
|
2869
|
+
delta: import_v414.z.string()
|
|
2765
2870
|
});
|
|
2766
|
-
var openaiResponsesChunkSchema =
|
|
2871
|
+
var openaiResponsesChunkSchema = import_v414.z.union([
|
|
2767
2872
|
textDeltaChunkSchema,
|
|
2768
2873
|
responseFinishedChunkSchema,
|
|
2769
2874
|
responseCreatedChunkSchema,
|
|
@@ -2772,8 +2877,7 @@ var openaiResponsesChunkSchema = import_v413.z.union([
|
|
|
2772
2877
|
responseFunctionCallArgumentsDeltaSchema,
|
|
2773
2878
|
responseAnnotationAddedSchema,
|
|
2774
2879
|
responseReasoningSummaryTextDeltaSchema,
|
|
2775
|
-
|
|
2776
|
-
import_v413.z.object({ type: import_v413.z.string() }).passthrough()
|
|
2880
|
+
import_v414.z.object({ type: import_v414.z.string() }).passthrough()
|
|
2777
2881
|
// fallback for unknown chunks
|
|
2778
2882
|
]);
|
|
2779
2883
|
function isTextDeltaChunk(chunk) {
|
|
@@ -2801,7 +2905,7 @@ function isResponseReasoningSummaryTextDeltaChunk(chunk) {
|
|
|
2801
2905
|
return chunk.type === "response.reasoning_summary_text.delta";
|
|
2802
2906
|
}
|
|
2803
2907
|
function getResponsesModelConfig(modelId) {
|
|
2804
|
-
if (modelId.startsWith("o")) {
|
|
2908
|
+
if (modelId.startsWith("o") || modelId.startsWith("codex-") || modelId.startsWith("computer-use")) {
|
|
2805
2909
|
if (modelId.startsWith("o1-mini") || modelId.startsWith("o1-preview")) {
|
|
2806
2910
|
return {
|
|
2807
2911
|
isReasoningModel: true,
|
|
@@ -2824,25 +2928,26 @@ function getResponsesModelConfig(modelId) {
|
|
|
2824
2928
|
function supportsFlexProcessing2(modelId) {
|
|
2825
2929
|
return modelId.startsWith("o3") || modelId.startsWith("o4-mini");
|
|
2826
2930
|
}
|
|
2827
|
-
var openaiResponsesProviderOptionsSchema =
|
|
2828
|
-
metadata:
|
|
2829
|
-
parallelToolCalls:
|
|
2830
|
-
previousResponseId:
|
|
2831
|
-
store:
|
|
2832
|
-
user:
|
|
2833
|
-
reasoningEffort:
|
|
2834
|
-
|
|
2835
|
-
instructions:
|
|
2836
|
-
reasoningSummary:
|
|
2837
|
-
serviceTier:
|
|
2931
|
+
var openaiResponsesProviderOptionsSchema = import_v414.z.object({
|
|
2932
|
+
metadata: import_v414.z.any().nullish(),
|
|
2933
|
+
parallelToolCalls: import_v414.z.boolean().nullish(),
|
|
2934
|
+
previousResponseId: import_v414.z.string().nullish(),
|
|
2935
|
+
store: import_v414.z.boolean().nullish(),
|
|
2936
|
+
user: import_v414.z.string().nullish(),
|
|
2937
|
+
reasoningEffort: import_v414.z.string().nullish(),
|
|
2938
|
+
strictJsonSchema: import_v414.z.boolean().nullish(),
|
|
2939
|
+
instructions: import_v414.z.string().nullish(),
|
|
2940
|
+
reasoningSummary: import_v414.z.string().nullish(),
|
|
2941
|
+
serviceTier: import_v414.z.enum(["auto", "flex"]).nullish(),
|
|
2942
|
+
include: import_v414.z.array(import_v414.z.enum(["reasoning.encrypted_content"])).nullish()
|
|
2838
2943
|
});
|
|
2839
2944
|
|
|
2840
2945
|
// src/openai-speech-model.ts
|
|
2841
|
-
var
|
|
2842
|
-
var
|
|
2843
|
-
var OpenAIProviderOptionsSchema =
|
|
2844
|
-
instructions:
|
|
2845
|
-
speed:
|
|
2946
|
+
var import_provider_utils12 = require("@ai-sdk/provider-utils");
|
|
2947
|
+
var import_v415 = require("zod/v4");
|
|
2948
|
+
var OpenAIProviderOptionsSchema = import_v415.z.object({
|
|
2949
|
+
instructions: import_v415.z.string().nullish(),
|
|
2950
|
+
speed: import_v415.z.number().min(0.25).max(4).default(1).nullish()
|
|
2846
2951
|
});
|
|
2847
2952
|
var OpenAISpeechModel = class {
|
|
2848
2953
|
constructor(modelId, config) {
|
|
@@ -2863,7 +2968,7 @@ var OpenAISpeechModel = class {
|
|
|
2863
2968
|
providerOptions
|
|
2864
2969
|
}) {
|
|
2865
2970
|
const warnings = [];
|
|
2866
|
-
const openAIOptions = await (0,
|
|
2971
|
+
const openAIOptions = await (0, import_provider_utils12.parseProviderOptions)({
|
|
2867
2972
|
provider: "openai",
|
|
2868
2973
|
providerOptions,
|
|
2869
2974
|
schema: OpenAIProviderOptionsSchema
|
|
@@ -2916,15 +3021,15 @@ var OpenAISpeechModel = class {
|
|
|
2916
3021
|
value: audio,
|
|
2917
3022
|
responseHeaders,
|
|
2918
3023
|
rawValue: rawResponse
|
|
2919
|
-
} = await (0,
|
|
3024
|
+
} = await (0, import_provider_utils12.postJsonToApi)({
|
|
2920
3025
|
url: this.config.url({
|
|
2921
3026
|
path: "/audio/speech",
|
|
2922
3027
|
modelId: this.modelId
|
|
2923
3028
|
}),
|
|
2924
|
-
headers: (0,
|
|
3029
|
+
headers: (0, import_provider_utils12.combineHeaders)(this.config.headers(), options.headers),
|
|
2925
3030
|
body: requestBody,
|
|
2926
3031
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
2927
|
-
successfulResponseHandler: (0,
|
|
3032
|
+
successfulResponseHandler: (0, import_provider_utils12.createBinaryResponseHandler)(),
|
|
2928
3033
|
abortSignal: options.abortSignal,
|
|
2929
3034
|
fetch: this.config.fetch
|
|
2930
3035
|
});
|
|
@@ -2947,10 +3052,10 @@ var OpenAISpeechModel = class {
|
|
|
2947
3052
|
// src/openai-provider.ts
|
|
2948
3053
|
function createOpenAI(options = {}) {
|
|
2949
3054
|
var _a, _b;
|
|
2950
|
-
const baseURL = (_a = (0,
|
|
3055
|
+
const baseURL = (_a = (0, import_provider_utils13.withoutTrailingSlash)(options.baseURL)) != null ? _a : "https://api.openai.com/v1";
|
|
2951
3056
|
const providerName = (_b = options.name) != null ? _b : "openai";
|
|
2952
3057
|
const getHeaders = () => ({
|
|
2953
|
-
Authorization: `Bearer ${(0,
|
|
3058
|
+
Authorization: `Bearer ${(0, import_provider_utils13.loadApiKey)({
|
|
2954
3059
|
apiKey: options.apiKey,
|
|
2955
3060
|
environmentVariableName: "OPENAI_API_KEY",
|
|
2956
3061
|
description: "OpenAI"
|