@ai-sdk/openai 2.0.25 → 2.0.27
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +13 -0
- package/dist/index.d.mts +114 -1
- package/dist/index.d.ts +114 -1
- package/dist/index.js +413 -332
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +382 -301
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.js +310 -275
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +298 -263
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +1 -1
package/dist/index.js
CHANGED
|
@@ -26,7 +26,7 @@ __export(src_exports, {
|
|
|
26
26
|
module.exports = __toCommonJS(src_exports);
|
|
27
27
|
|
|
28
28
|
// src/openai-provider.ts
|
|
29
|
-
var
|
|
29
|
+
var import_provider_utils16 = require("@ai-sdk/provider-utils");
|
|
30
30
|
|
|
31
31
|
// src/chat/openai-chat-language-model.ts
|
|
32
32
|
var import_provider3 = require("@ai-sdk/provider");
|
|
@@ -369,23 +369,11 @@ var compoundFilterSchema = import_v43.z.object({
|
|
|
369
369
|
});
|
|
370
370
|
var filtersSchema = import_v43.z.union([comparisonFilterSchema, compoundFilterSchema]);
|
|
371
371
|
var fileSearchArgsSchema = import_v43.z.object({
|
|
372
|
-
/**
|
|
373
|
-
* List of vector store IDs to search through. If not provided, searches all available vector stores.
|
|
374
|
-
*/
|
|
375
372
|
vectorStoreIds: import_v43.z.array(import_v43.z.string()).optional(),
|
|
376
|
-
/**
|
|
377
|
-
* Maximum number of search results to return. Defaults to 10.
|
|
378
|
-
*/
|
|
379
373
|
maxNumResults: import_v43.z.number().optional(),
|
|
380
|
-
/**
|
|
381
|
-
* Ranking options for the search.
|
|
382
|
-
*/
|
|
383
374
|
ranking: import_v43.z.object({
|
|
384
375
|
ranker: import_v43.z.enum(["auto", "default-2024-08-21"]).optional()
|
|
385
376
|
}).optional(),
|
|
386
|
-
/**
|
|
387
|
-
* A filter to apply based on file attributes.
|
|
388
|
-
*/
|
|
389
377
|
filters: filtersSchema.optional()
|
|
390
378
|
});
|
|
391
379
|
var fileSearch = (0, import_provider_utils3.createProviderDefinedToolFactory)({
|
|
@@ -1825,23 +1813,107 @@ var codeInterpreter = (0, import_provider_utils9.createProviderDefinedToolFactor
|
|
|
1825
1813
|
inputSchema: import_v411.z.object({})
|
|
1826
1814
|
});
|
|
1827
1815
|
|
|
1816
|
+
// src/tool/web-search.ts
|
|
1817
|
+
var import_provider_utils10 = require("@ai-sdk/provider-utils");
|
|
1818
|
+
var import_v412 = require("zod/v4");
|
|
1819
|
+
var webSearchArgsSchema = import_v412.z.object({
|
|
1820
|
+
filters: import_v412.z.object({
|
|
1821
|
+
allowedDomains: import_v412.z.array(import_v412.z.string()).optional()
|
|
1822
|
+
}).optional(),
|
|
1823
|
+
searchContextSize: import_v412.z.enum(["low", "medium", "high"]).optional(),
|
|
1824
|
+
userLocation: import_v412.z.object({
|
|
1825
|
+
type: import_v412.z.literal("approximate"),
|
|
1826
|
+
country: import_v412.z.string().optional(),
|
|
1827
|
+
city: import_v412.z.string().optional(),
|
|
1828
|
+
region: import_v412.z.string().optional(),
|
|
1829
|
+
timezone: import_v412.z.string().optional()
|
|
1830
|
+
}).optional()
|
|
1831
|
+
});
|
|
1832
|
+
var factory = (0, import_provider_utils10.createProviderDefinedToolFactory)({
|
|
1833
|
+
id: "openai.web_search",
|
|
1834
|
+
name: "web_search",
|
|
1835
|
+
inputSchema: import_v412.z.object({
|
|
1836
|
+
action: import_v412.z.discriminatedUnion("type", [
|
|
1837
|
+
import_v412.z.object({
|
|
1838
|
+
type: import_v412.z.literal("search"),
|
|
1839
|
+
query: import_v412.z.string().nullish()
|
|
1840
|
+
}),
|
|
1841
|
+
import_v412.z.object({
|
|
1842
|
+
type: import_v412.z.literal("open_page"),
|
|
1843
|
+
url: import_v412.z.string()
|
|
1844
|
+
}),
|
|
1845
|
+
import_v412.z.object({
|
|
1846
|
+
type: import_v412.z.literal("find"),
|
|
1847
|
+
url: import_v412.z.string(),
|
|
1848
|
+
pattern: import_v412.z.string()
|
|
1849
|
+
})
|
|
1850
|
+
]).nullish()
|
|
1851
|
+
})
|
|
1852
|
+
});
|
|
1853
|
+
var webSearch = (args = {}) => {
|
|
1854
|
+
return factory(args);
|
|
1855
|
+
};
|
|
1856
|
+
|
|
1828
1857
|
// src/openai-tools.ts
|
|
1829
1858
|
var openaiTools = {
|
|
1859
|
+
/**
|
|
1860
|
+
* The Code Interpreter tool allows models to write and run Python code in a
|
|
1861
|
+
* sandboxed environment to solve complex problems in domains like data analysis,
|
|
1862
|
+
* coding, and math.
|
|
1863
|
+
*
|
|
1864
|
+
* @param container - The container to use for the code interpreter.
|
|
1865
|
+
*
|
|
1866
|
+
* Must have name `code_interpreter`.
|
|
1867
|
+
*/
|
|
1830
1868
|
codeInterpreter,
|
|
1869
|
+
/**
|
|
1870
|
+
* File search is a tool available in the Responses API. It enables models to
|
|
1871
|
+
* retrieve information in a knowledge base of previously uploaded files through
|
|
1872
|
+
* semantic and keyword search.
|
|
1873
|
+
*
|
|
1874
|
+
* Must have name `file_search`.
|
|
1875
|
+
*
|
|
1876
|
+
* @param vectorStoreIds - The vector store IDs to use for the file search.
|
|
1877
|
+
* @param maxNumResults - The maximum number of results to return.
|
|
1878
|
+
* @param ranking - The ranking options to use for the file search.
|
|
1879
|
+
* @param filters - The filters to use for the file search.
|
|
1880
|
+
*/
|
|
1831
1881
|
fileSearch,
|
|
1832
|
-
|
|
1882
|
+
/**
|
|
1883
|
+
* Web search allows models to access up-to-date information from the internet
|
|
1884
|
+
* and provide answers with sourced citations.
|
|
1885
|
+
*
|
|
1886
|
+
* Must have name `web_search_preview`.
|
|
1887
|
+
*
|
|
1888
|
+
* @param searchContextSize - The search context size to use for the web search.
|
|
1889
|
+
* @param userLocation - The user location to use for the web search.
|
|
1890
|
+
*
|
|
1891
|
+
* @deprecated Use `webSearch` instead.
|
|
1892
|
+
*/
|
|
1893
|
+
webSearchPreview,
|
|
1894
|
+
/**
|
|
1895
|
+
* Web search allows models to access up-to-date information from the internet
|
|
1896
|
+
* and provide answers with sourced citations.
|
|
1897
|
+
*
|
|
1898
|
+
* Must have name `web_search`.
|
|
1899
|
+
*
|
|
1900
|
+
* @param filters - The filters to use for the web search.
|
|
1901
|
+
* @param searchContextSize - The search context size to use for the web search.
|
|
1902
|
+
* @param userLocation - The user location to use for the web search.
|
|
1903
|
+
*/
|
|
1904
|
+
webSearch
|
|
1833
1905
|
};
|
|
1834
1906
|
|
|
1835
1907
|
// src/responses/openai-responses-language-model.ts
|
|
1836
1908
|
var import_provider8 = require("@ai-sdk/provider");
|
|
1837
|
-
var
|
|
1838
|
-
var
|
|
1909
|
+
var import_provider_utils13 = require("@ai-sdk/provider-utils");
|
|
1910
|
+
var import_v414 = require("zod/v4");
|
|
1839
1911
|
|
|
1840
1912
|
// src/responses/convert-to-openai-responses-messages.ts
|
|
1841
1913
|
var import_provider6 = require("@ai-sdk/provider");
|
|
1842
|
-
var import_provider_utils10 = require("@ai-sdk/provider-utils");
|
|
1843
|
-
var import_v412 = require("zod/v4");
|
|
1844
1914
|
var import_provider_utils11 = require("@ai-sdk/provider-utils");
|
|
1915
|
+
var import_v413 = require("zod/v4");
|
|
1916
|
+
var import_provider_utils12 = require("@ai-sdk/provider-utils");
|
|
1845
1917
|
function isFileId(data, prefixes) {
|
|
1846
1918
|
if (!prefixes) return false;
|
|
1847
1919
|
return prefixes.some((prefix) => data.startsWith(prefix));
|
|
@@ -1897,7 +1969,7 @@ async function convertToOpenAIResponsesMessages({
|
|
|
1897
1969
|
return {
|
|
1898
1970
|
type: "input_image",
|
|
1899
1971
|
...part.data instanceof URL ? { image_url: part.data.toString() } : typeof part.data === "string" && isFileId(part.data, fileIdPrefixes) ? { file_id: part.data } : {
|
|
1900
|
-
image_url: `data:${mediaType};base64,${(0,
|
|
1972
|
+
image_url: `data:${mediaType};base64,${(0, import_provider_utils12.convertToBase64)(part.data)}`
|
|
1901
1973
|
},
|
|
1902
1974
|
detail: (_b2 = (_a2 = part.providerOptions) == null ? void 0 : _a2.openai) == null ? void 0 : _b2.imageDetail
|
|
1903
1975
|
};
|
|
@@ -1912,7 +1984,7 @@ async function convertToOpenAIResponsesMessages({
|
|
|
1912
1984
|
type: "input_file",
|
|
1913
1985
|
...typeof part.data === "string" && isFileId(part.data, fileIdPrefixes) ? { file_id: part.data } : {
|
|
1914
1986
|
filename: (_c2 = part.filename) != null ? _c2 : `part-${index}.pdf`,
|
|
1915
|
-
file_data: `data:application/pdf;base64,${(0,
|
|
1987
|
+
file_data: `data:application/pdf;base64,${(0, import_provider_utils12.convertToBase64)(part.data)}`
|
|
1916
1988
|
}
|
|
1917
1989
|
};
|
|
1918
1990
|
} else {
|
|
@@ -1959,7 +2031,7 @@ async function convertToOpenAIResponsesMessages({
|
|
|
1959
2031
|
break;
|
|
1960
2032
|
}
|
|
1961
2033
|
case "reasoning": {
|
|
1962
|
-
const providerOptions = await (0,
|
|
2034
|
+
const providerOptions = await (0, import_provider_utils11.parseProviderOptions)({
|
|
1963
2035
|
provider: "openai",
|
|
1964
2036
|
providerOptions: part.providerOptions,
|
|
1965
2037
|
schema: openaiResponsesReasoningProviderOptionsSchema
|
|
@@ -2030,26 +2102,26 @@ async function convertToOpenAIResponsesMessages({
|
|
|
2030
2102
|
}
|
|
2031
2103
|
return { messages, warnings };
|
|
2032
2104
|
}
|
|
2033
|
-
var openaiResponsesReasoningProviderOptionsSchema =
|
|
2034
|
-
itemId:
|
|
2035
|
-
reasoningEncryptedContent:
|
|
2105
|
+
var openaiResponsesReasoningProviderOptionsSchema = import_v413.z.object({
|
|
2106
|
+
itemId: import_v413.z.string().nullish(),
|
|
2107
|
+
reasoningEncryptedContent: import_v413.z.string().nullish()
|
|
2036
2108
|
});
|
|
2037
2109
|
|
|
2038
2110
|
// src/responses/map-openai-responses-finish-reason.ts
|
|
2039
2111
|
function mapOpenAIResponseFinishReason({
|
|
2040
2112
|
finishReason,
|
|
2041
|
-
|
|
2113
|
+
hasFunctionCall
|
|
2042
2114
|
}) {
|
|
2043
2115
|
switch (finishReason) {
|
|
2044
2116
|
case void 0:
|
|
2045
2117
|
case null:
|
|
2046
|
-
return
|
|
2118
|
+
return hasFunctionCall ? "tool-calls" : "stop";
|
|
2047
2119
|
case "max_output_tokens":
|
|
2048
2120
|
return "length";
|
|
2049
2121
|
case "content_filter":
|
|
2050
2122
|
return "content-filter";
|
|
2051
2123
|
default:
|
|
2052
|
-
return
|
|
2124
|
+
return hasFunctionCall ? "tool-calls" : "unknown";
|
|
2053
2125
|
}
|
|
2054
2126
|
}
|
|
2055
2127
|
|
|
@@ -2099,6 +2171,16 @@ function prepareResponsesTools({
|
|
|
2099
2171
|
});
|
|
2100
2172
|
break;
|
|
2101
2173
|
}
|
|
2174
|
+
case "openai.web_search": {
|
|
2175
|
+
const args = webSearchArgsSchema.parse(tool.args);
|
|
2176
|
+
openaiTools2.push({
|
|
2177
|
+
type: "web_search",
|
|
2178
|
+
filters: args.filters != null ? { allowed_domains: args.filters.allowedDomains } : void 0,
|
|
2179
|
+
search_context_size: args.searchContextSize,
|
|
2180
|
+
user_location: args.userLocation
|
|
2181
|
+
});
|
|
2182
|
+
break;
|
|
2183
|
+
}
|
|
2102
2184
|
case "openai.code_interpreter": {
|
|
2103
2185
|
const args = codeInterpreterArgsSchema.parse(tool.args);
|
|
2104
2186
|
openaiTools2.push({
|
|
@@ -2131,7 +2213,7 @@ function prepareResponsesTools({
|
|
|
2131
2213
|
case "tool":
|
|
2132
2214
|
return {
|
|
2133
2215
|
tools: openaiTools2,
|
|
2134
|
-
toolChoice: toolChoice.toolName === "code_interpreter" || toolChoice.toolName === "file_search" || toolChoice.toolName === "web_search_preview" ? { type: toolChoice.toolName } : { type: "function", name: toolChoice.toolName },
|
|
2216
|
+
toolChoice: toolChoice.toolName === "code_interpreter" || toolChoice.toolName === "file_search" || toolChoice.toolName === "web_search_preview" || toolChoice.toolName === "web_search" ? { type: toolChoice.toolName } : { type: "function", name: toolChoice.toolName },
|
|
2135
2217
|
toolWarnings
|
|
2136
2218
|
};
|
|
2137
2219
|
default: {
|
|
@@ -2144,35 +2226,35 @@ function prepareResponsesTools({
|
|
|
2144
2226
|
}
|
|
2145
2227
|
|
|
2146
2228
|
// src/responses/openai-responses-language-model.ts
|
|
2147
|
-
var webSearchCallItem =
|
|
2148
|
-
type:
|
|
2149
|
-
id:
|
|
2150
|
-
status:
|
|
2151
|
-
action:
|
|
2152
|
-
|
|
2153
|
-
type:
|
|
2154
|
-
query:
|
|
2229
|
+
var webSearchCallItem = import_v414.z.object({
|
|
2230
|
+
type: import_v414.z.literal("web_search_call"),
|
|
2231
|
+
id: import_v414.z.string(),
|
|
2232
|
+
status: import_v414.z.string(),
|
|
2233
|
+
action: import_v414.z.discriminatedUnion("type", [
|
|
2234
|
+
import_v414.z.object({
|
|
2235
|
+
type: import_v414.z.literal("search"),
|
|
2236
|
+
query: import_v414.z.string().nullish()
|
|
2155
2237
|
}),
|
|
2156
|
-
|
|
2157
|
-
type:
|
|
2158
|
-
url:
|
|
2238
|
+
import_v414.z.object({
|
|
2239
|
+
type: import_v414.z.literal("open_page"),
|
|
2240
|
+
url: import_v414.z.string()
|
|
2159
2241
|
}),
|
|
2160
|
-
|
|
2161
|
-
type:
|
|
2162
|
-
url:
|
|
2163
|
-
pattern:
|
|
2242
|
+
import_v414.z.object({
|
|
2243
|
+
type: import_v414.z.literal("find"),
|
|
2244
|
+
url: import_v414.z.string(),
|
|
2245
|
+
pattern: import_v414.z.string()
|
|
2164
2246
|
})
|
|
2165
2247
|
]).nullish()
|
|
2166
2248
|
});
|
|
2167
2249
|
var TOP_LOGPROBS_MAX = 20;
|
|
2168
|
-
var LOGPROBS_SCHEMA =
|
|
2169
|
-
|
|
2170
|
-
token:
|
|
2171
|
-
logprob:
|
|
2172
|
-
top_logprobs:
|
|
2173
|
-
|
|
2174
|
-
token:
|
|
2175
|
-
logprob:
|
|
2250
|
+
var LOGPROBS_SCHEMA = import_v414.z.array(
|
|
2251
|
+
import_v414.z.object({
|
|
2252
|
+
token: import_v414.z.string(),
|
|
2253
|
+
logprob: import_v414.z.number(),
|
|
2254
|
+
top_logprobs: import_v414.z.array(
|
|
2255
|
+
import_v414.z.object({
|
|
2256
|
+
token: import_v414.z.string(),
|
|
2257
|
+
logprob: import_v414.z.number()
|
|
2176
2258
|
})
|
|
2177
2259
|
)
|
|
2178
2260
|
})
|
|
@@ -2235,7 +2317,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2235
2317
|
fileIdPrefixes: this.config.fileIdPrefixes
|
|
2236
2318
|
});
|
|
2237
2319
|
warnings.push(...messageWarnings);
|
|
2238
|
-
const openaiOptions = await (0,
|
|
2320
|
+
const openaiOptions = await (0, import_provider_utils13.parseProviderOptions)({
|
|
2239
2321
|
provider: "openai",
|
|
2240
2322
|
providerOptions,
|
|
2241
2323
|
schema: openaiResponsesProviderOptionsSchema
|
|
@@ -2370,98 +2452,98 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2370
2452
|
responseHeaders,
|
|
2371
2453
|
value: response,
|
|
2372
2454
|
rawValue: rawResponse
|
|
2373
|
-
} = await (0,
|
|
2455
|
+
} = await (0, import_provider_utils13.postJsonToApi)({
|
|
2374
2456
|
url,
|
|
2375
|
-
headers: (0,
|
|
2457
|
+
headers: (0, import_provider_utils13.combineHeaders)(this.config.headers(), options.headers),
|
|
2376
2458
|
body,
|
|
2377
2459
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
2378
|
-
successfulResponseHandler: (0,
|
|
2379
|
-
|
|
2380
|
-
id:
|
|
2381
|
-
created_at:
|
|
2382
|
-
error:
|
|
2383
|
-
code:
|
|
2384
|
-
message:
|
|
2460
|
+
successfulResponseHandler: (0, import_provider_utils13.createJsonResponseHandler)(
|
|
2461
|
+
import_v414.z.object({
|
|
2462
|
+
id: import_v414.z.string(),
|
|
2463
|
+
created_at: import_v414.z.number(),
|
|
2464
|
+
error: import_v414.z.object({
|
|
2465
|
+
code: import_v414.z.string(),
|
|
2466
|
+
message: import_v414.z.string()
|
|
2385
2467
|
}).nullish(),
|
|
2386
|
-
model:
|
|
2387
|
-
output:
|
|
2388
|
-
|
|
2389
|
-
|
|
2390
|
-
type:
|
|
2391
|
-
role:
|
|
2392
|
-
id:
|
|
2393
|
-
content:
|
|
2394
|
-
|
|
2395
|
-
type:
|
|
2396
|
-
text:
|
|
2468
|
+
model: import_v414.z.string(),
|
|
2469
|
+
output: import_v414.z.array(
|
|
2470
|
+
import_v414.z.discriminatedUnion("type", [
|
|
2471
|
+
import_v414.z.object({
|
|
2472
|
+
type: import_v414.z.literal("message"),
|
|
2473
|
+
role: import_v414.z.literal("assistant"),
|
|
2474
|
+
id: import_v414.z.string(),
|
|
2475
|
+
content: import_v414.z.array(
|
|
2476
|
+
import_v414.z.object({
|
|
2477
|
+
type: import_v414.z.literal("output_text"),
|
|
2478
|
+
text: import_v414.z.string(),
|
|
2397
2479
|
logprobs: LOGPROBS_SCHEMA.nullish(),
|
|
2398
|
-
annotations:
|
|
2399
|
-
|
|
2400
|
-
|
|
2401
|
-
type:
|
|
2402
|
-
start_index:
|
|
2403
|
-
end_index:
|
|
2404
|
-
url:
|
|
2405
|
-
title:
|
|
2480
|
+
annotations: import_v414.z.array(
|
|
2481
|
+
import_v414.z.discriminatedUnion("type", [
|
|
2482
|
+
import_v414.z.object({
|
|
2483
|
+
type: import_v414.z.literal("url_citation"),
|
|
2484
|
+
start_index: import_v414.z.number(),
|
|
2485
|
+
end_index: import_v414.z.number(),
|
|
2486
|
+
url: import_v414.z.string(),
|
|
2487
|
+
title: import_v414.z.string()
|
|
2406
2488
|
}),
|
|
2407
|
-
|
|
2408
|
-
type:
|
|
2409
|
-
file_id:
|
|
2410
|
-
filename:
|
|
2411
|
-
index:
|
|
2412
|
-
start_index:
|
|
2413
|
-
end_index:
|
|
2414
|
-
quote:
|
|
2489
|
+
import_v414.z.object({
|
|
2490
|
+
type: import_v414.z.literal("file_citation"),
|
|
2491
|
+
file_id: import_v414.z.string(),
|
|
2492
|
+
filename: import_v414.z.string().nullish(),
|
|
2493
|
+
index: import_v414.z.number().nullish(),
|
|
2494
|
+
start_index: import_v414.z.number().nullish(),
|
|
2495
|
+
end_index: import_v414.z.number().nullish(),
|
|
2496
|
+
quote: import_v414.z.string().nullish()
|
|
2415
2497
|
})
|
|
2416
2498
|
])
|
|
2417
2499
|
)
|
|
2418
2500
|
})
|
|
2419
2501
|
)
|
|
2420
2502
|
}),
|
|
2421
|
-
|
|
2422
|
-
type:
|
|
2423
|
-
call_id:
|
|
2424
|
-
name:
|
|
2425
|
-
arguments:
|
|
2426
|
-
id:
|
|
2503
|
+
import_v414.z.object({
|
|
2504
|
+
type: import_v414.z.literal("function_call"),
|
|
2505
|
+
call_id: import_v414.z.string(),
|
|
2506
|
+
name: import_v414.z.string(),
|
|
2507
|
+
arguments: import_v414.z.string(),
|
|
2508
|
+
id: import_v414.z.string()
|
|
2427
2509
|
}),
|
|
2428
2510
|
webSearchCallItem,
|
|
2429
|
-
|
|
2430
|
-
type:
|
|
2431
|
-
id:
|
|
2432
|
-
status:
|
|
2511
|
+
import_v414.z.object({
|
|
2512
|
+
type: import_v414.z.literal("computer_call"),
|
|
2513
|
+
id: import_v414.z.string(),
|
|
2514
|
+
status: import_v414.z.string().optional()
|
|
2433
2515
|
}),
|
|
2434
|
-
|
|
2435
|
-
type:
|
|
2436
|
-
id:
|
|
2437
|
-
status:
|
|
2438
|
-
queries:
|
|
2439
|
-
results:
|
|
2440
|
-
|
|
2441
|
-
attributes:
|
|
2442
|
-
file_id:
|
|
2443
|
-
filename:
|
|
2444
|
-
score:
|
|
2445
|
-
text:
|
|
2516
|
+
import_v414.z.object({
|
|
2517
|
+
type: import_v414.z.literal("file_search_call"),
|
|
2518
|
+
id: import_v414.z.string(),
|
|
2519
|
+
status: import_v414.z.string().optional(),
|
|
2520
|
+
queries: import_v414.z.array(import_v414.z.string()).nullish(),
|
|
2521
|
+
results: import_v414.z.array(
|
|
2522
|
+
import_v414.z.object({
|
|
2523
|
+
attributes: import_v414.z.object({
|
|
2524
|
+
file_id: import_v414.z.string(),
|
|
2525
|
+
filename: import_v414.z.string(),
|
|
2526
|
+
score: import_v414.z.number(),
|
|
2527
|
+
text: import_v414.z.string()
|
|
2446
2528
|
})
|
|
2447
2529
|
})
|
|
2448
2530
|
).nullish()
|
|
2449
2531
|
}),
|
|
2450
|
-
|
|
2451
|
-
type:
|
|
2452
|
-
id:
|
|
2453
|
-
encrypted_content:
|
|
2454
|
-
summary:
|
|
2455
|
-
|
|
2456
|
-
type:
|
|
2457
|
-
text:
|
|
2532
|
+
import_v414.z.object({
|
|
2533
|
+
type: import_v414.z.literal("reasoning"),
|
|
2534
|
+
id: import_v414.z.string(),
|
|
2535
|
+
encrypted_content: import_v414.z.string().nullish(),
|
|
2536
|
+
summary: import_v414.z.array(
|
|
2537
|
+
import_v414.z.object({
|
|
2538
|
+
type: import_v414.z.literal("summary_text"),
|
|
2539
|
+
text: import_v414.z.string()
|
|
2458
2540
|
})
|
|
2459
2541
|
)
|
|
2460
2542
|
})
|
|
2461
2543
|
])
|
|
2462
2544
|
),
|
|
2463
|
-
service_tier:
|
|
2464
|
-
incomplete_details:
|
|
2545
|
+
service_tier: import_v414.z.string().nullish(),
|
|
2546
|
+
incomplete_details: import_v414.z.object({ reason: import_v414.z.string() }).nullable(),
|
|
2465
2547
|
usage: usageSchema2
|
|
2466
2548
|
})
|
|
2467
2549
|
),
|
|
@@ -2481,6 +2563,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2481
2563
|
}
|
|
2482
2564
|
const content = [];
|
|
2483
2565
|
const logprobs = [];
|
|
2566
|
+
let hasFunctionCall = false;
|
|
2484
2567
|
for (const part of response.output) {
|
|
2485
2568
|
switch (part.type) {
|
|
2486
2569
|
case "reasoning": {
|
|
@@ -2520,7 +2603,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2520
2603
|
content.push({
|
|
2521
2604
|
type: "source",
|
|
2522
2605
|
sourceType: "url",
|
|
2523
|
-
id: (_f = (_e = (_d = this.config).generateId) == null ? void 0 : _e.call(_d)) != null ? _f : (0,
|
|
2606
|
+
id: (_f = (_e = (_d = this.config).generateId) == null ? void 0 : _e.call(_d)) != null ? _f : (0, import_provider_utils13.generateId)(),
|
|
2524
2607
|
url: annotation.url,
|
|
2525
2608
|
title: annotation.title
|
|
2526
2609
|
});
|
|
@@ -2528,7 +2611,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2528
2611
|
content.push({
|
|
2529
2612
|
type: "source",
|
|
2530
2613
|
sourceType: "document",
|
|
2531
|
-
id: (_i = (_h = (_g = this.config).generateId) == null ? void 0 : _h.call(_g)) != null ? _i : (0,
|
|
2614
|
+
id: (_i = (_h = (_g = this.config).generateId) == null ? void 0 : _h.call(_g)) != null ? _i : (0, import_provider_utils13.generateId)(),
|
|
2532
2615
|
mediaType: "text/plain",
|
|
2533
2616
|
title: (_k = (_j = annotation.quote) != null ? _j : annotation.filename) != null ? _k : "Document",
|
|
2534
2617
|
filename: (_l = annotation.filename) != null ? _l : annotation.file_id
|
|
@@ -2539,6 +2622,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2539
2622
|
break;
|
|
2540
2623
|
}
|
|
2541
2624
|
case "function_call": {
|
|
2625
|
+
hasFunctionCall = true;
|
|
2542
2626
|
content.push({
|
|
2543
2627
|
type: "tool-call",
|
|
2544
2628
|
toolCallId: part.call_id,
|
|
@@ -2626,7 +2710,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2626
2710
|
content,
|
|
2627
2711
|
finishReason: mapOpenAIResponseFinishReason({
|
|
2628
2712
|
finishReason: (_m = response.incomplete_details) == null ? void 0 : _m.reason,
|
|
2629
|
-
|
|
2713
|
+
hasFunctionCall
|
|
2630
2714
|
}),
|
|
2631
2715
|
usage: {
|
|
2632
2716
|
inputTokens: response.usage.input_tokens,
|
|
@@ -2649,18 +2733,18 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2649
2733
|
}
|
|
2650
2734
|
async doStream(options) {
|
|
2651
2735
|
const { args: body, warnings } = await this.getArgs(options);
|
|
2652
|
-
const { responseHeaders, value: response } = await (0,
|
|
2736
|
+
const { responseHeaders, value: response } = await (0, import_provider_utils13.postJsonToApi)({
|
|
2653
2737
|
url: this.config.url({
|
|
2654
2738
|
path: "/responses",
|
|
2655
2739
|
modelId: this.modelId
|
|
2656
2740
|
}),
|
|
2657
|
-
headers: (0,
|
|
2741
|
+
headers: (0, import_provider_utils13.combineHeaders)(this.config.headers(), options.headers),
|
|
2658
2742
|
body: {
|
|
2659
2743
|
...body,
|
|
2660
2744
|
stream: true
|
|
2661
2745
|
},
|
|
2662
2746
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
2663
|
-
successfulResponseHandler: (0,
|
|
2747
|
+
successfulResponseHandler: (0, import_provider_utils13.createEventSourceResponseHandler)(
|
|
2664
2748
|
openaiResponsesChunkSchema
|
|
2665
2749
|
),
|
|
2666
2750
|
abortSignal: options.abortSignal,
|
|
@@ -2676,7 +2760,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2676
2760
|
const logprobs = [];
|
|
2677
2761
|
let responseId = null;
|
|
2678
2762
|
const ongoingToolCalls = {};
|
|
2679
|
-
let
|
|
2763
|
+
let hasFunctionCall = false;
|
|
2680
2764
|
const activeReasoning = {};
|
|
2681
2765
|
let serviceTier;
|
|
2682
2766
|
return {
|
|
@@ -2766,7 +2850,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2766
2850
|
} else if (isResponseOutputItemDoneChunk(value)) {
|
|
2767
2851
|
if (value.item.type === "function_call") {
|
|
2768
2852
|
ongoingToolCalls[value.output_index] = void 0;
|
|
2769
|
-
|
|
2853
|
+
hasFunctionCall = true;
|
|
2770
2854
|
controller.enqueue({
|
|
2771
2855
|
type: "tool-input-end",
|
|
2772
2856
|
id: value.item.call_id
|
|
@@ -2784,7 +2868,6 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2784
2868
|
});
|
|
2785
2869
|
} else if (value.item.type === "web_search_call") {
|
|
2786
2870
|
ongoingToolCalls[value.output_index] = void 0;
|
|
2787
|
-
hasToolCalls = true;
|
|
2788
2871
|
controller.enqueue({
|
|
2789
2872
|
type: "tool-input-end",
|
|
2790
2873
|
id: value.item.id
|
|
@@ -2792,20 +2875,19 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2792
2875
|
controller.enqueue({
|
|
2793
2876
|
type: "tool-call",
|
|
2794
2877
|
toolCallId: value.item.id,
|
|
2795
|
-
toolName: "
|
|
2878
|
+
toolName: "web_search",
|
|
2796
2879
|
input: JSON.stringify({ action: value.item.action }),
|
|
2797
2880
|
providerExecuted: true
|
|
2798
2881
|
});
|
|
2799
2882
|
controller.enqueue({
|
|
2800
2883
|
type: "tool-result",
|
|
2801
2884
|
toolCallId: value.item.id,
|
|
2802
|
-
toolName: "
|
|
2885
|
+
toolName: "web_search",
|
|
2803
2886
|
result: { status: value.item.status },
|
|
2804
2887
|
providerExecuted: true
|
|
2805
2888
|
});
|
|
2806
2889
|
} else if (value.item.type === "computer_call") {
|
|
2807
2890
|
ongoingToolCalls[value.output_index] = void 0;
|
|
2808
|
-
hasToolCalls = true;
|
|
2809
2891
|
controller.enqueue({
|
|
2810
2892
|
type: "tool-input-end",
|
|
2811
2893
|
id: value.item.id
|
|
@@ -2829,7 +2911,6 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2829
2911
|
});
|
|
2830
2912
|
} else if (value.item.type === "file_search_call") {
|
|
2831
2913
|
ongoingToolCalls[value.output_index] = void 0;
|
|
2832
|
-
hasToolCalls = true;
|
|
2833
2914
|
controller.enqueue({
|
|
2834
2915
|
type: "tool-input-end",
|
|
2835
2916
|
id: value.item.id
|
|
@@ -2930,7 +3011,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2930
3011
|
} else if (isResponseFinishedChunk(value)) {
|
|
2931
3012
|
finishReason = mapOpenAIResponseFinishReason({
|
|
2932
3013
|
finishReason: (_h = value.response.incomplete_details) == null ? void 0 : _h.reason,
|
|
2933
|
-
|
|
3014
|
+
hasFunctionCall
|
|
2934
3015
|
});
|
|
2935
3016
|
usage.inputTokens = value.response.usage.input_tokens;
|
|
2936
3017
|
usage.outputTokens = value.response.usage.output_tokens;
|
|
@@ -2945,7 +3026,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2945
3026
|
controller.enqueue({
|
|
2946
3027
|
type: "source",
|
|
2947
3028
|
sourceType: "url",
|
|
2948
|
-
id: (_o = (_n = (_m = self.config).generateId) == null ? void 0 : _n.call(_m)) != null ? _o : (0,
|
|
3029
|
+
id: (_o = (_n = (_m = self.config).generateId) == null ? void 0 : _n.call(_m)) != null ? _o : (0, import_provider_utils13.generateId)(),
|
|
2949
3030
|
url: value.annotation.url,
|
|
2950
3031
|
title: value.annotation.title
|
|
2951
3032
|
});
|
|
@@ -2953,7 +3034,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2953
3034
|
controller.enqueue({
|
|
2954
3035
|
type: "source",
|
|
2955
3036
|
sourceType: "document",
|
|
2956
|
-
id: (_r = (_q = (_p = self.config).generateId) == null ? void 0 : _q.call(_p)) != null ? _r : (0,
|
|
3037
|
+
id: (_r = (_q = (_p = self.config).generateId) == null ? void 0 : _q.call(_p)) != null ? _r : (0, import_provider_utils13.generateId)(),
|
|
2957
3038
|
mediaType: "text/plain",
|
|
2958
3039
|
title: (_t = (_s = value.annotation.quote) != null ? _s : value.annotation.filename) != null ? _t : "Document",
|
|
2959
3040
|
filename: (_u = value.annotation.filename) != null ? _u : value.annotation.file_id
|
|
@@ -2989,176 +3070,176 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2989
3070
|
};
|
|
2990
3071
|
}
|
|
2991
3072
|
};
|
|
2992
|
-
var usageSchema2 =
|
|
2993
|
-
input_tokens:
|
|
2994
|
-
input_tokens_details:
|
|
2995
|
-
output_tokens:
|
|
2996
|
-
output_tokens_details:
|
|
3073
|
+
var usageSchema2 = import_v414.z.object({
|
|
3074
|
+
input_tokens: import_v414.z.number(),
|
|
3075
|
+
input_tokens_details: import_v414.z.object({ cached_tokens: import_v414.z.number().nullish() }).nullish(),
|
|
3076
|
+
output_tokens: import_v414.z.number(),
|
|
3077
|
+
output_tokens_details: import_v414.z.object({ reasoning_tokens: import_v414.z.number().nullish() }).nullish()
|
|
2997
3078
|
});
|
|
2998
|
-
var textDeltaChunkSchema =
|
|
2999
|
-
type:
|
|
3000
|
-
item_id:
|
|
3001
|
-
delta:
|
|
3079
|
+
var textDeltaChunkSchema = import_v414.z.object({
|
|
3080
|
+
type: import_v414.z.literal("response.output_text.delta"),
|
|
3081
|
+
item_id: import_v414.z.string(),
|
|
3082
|
+
delta: import_v414.z.string(),
|
|
3002
3083
|
logprobs: LOGPROBS_SCHEMA.nullish()
|
|
3003
3084
|
});
|
|
3004
|
-
var errorChunkSchema =
|
|
3005
|
-
type:
|
|
3006
|
-
code:
|
|
3007
|
-
message:
|
|
3008
|
-
param:
|
|
3009
|
-
sequence_number:
|
|
3085
|
+
var errorChunkSchema = import_v414.z.object({
|
|
3086
|
+
type: import_v414.z.literal("error"),
|
|
3087
|
+
code: import_v414.z.string(),
|
|
3088
|
+
message: import_v414.z.string(),
|
|
3089
|
+
param: import_v414.z.string().nullish(),
|
|
3090
|
+
sequence_number: import_v414.z.number()
|
|
3010
3091
|
});
|
|
3011
|
-
var responseFinishedChunkSchema =
|
|
3012
|
-
type:
|
|
3013
|
-
response:
|
|
3014
|
-
incomplete_details:
|
|
3092
|
+
var responseFinishedChunkSchema = import_v414.z.object({
|
|
3093
|
+
type: import_v414.z.enum(["response.completed", "response.incomplete"]),
|
|
3094
|
+
response: import_v414.z.object({
|
|
3095
|
+
incomplete_details: import_v414.z.object({ reason: import_v414.z.string() }).nullish(),
|
|
3015
3096
|
usage: usageSchema2,
|
|
3016
|
-
service_tier:
|
|
3097
|
+
service_tier: import_v414.z.string().nullish()
|
|
3017
3098
|
})
|
|
3018
3099
|
});
|
|
3019
|
-
var responseCreatedChunkSchema =
|
|
3020
|
-
type:
|
|
3021
|
-
response:
|
|
3022
|
-
id:
|
|
3023
|
-
created_at:
|
|
3024
|
-
model:
|
|
3025
|
-
service_tier:
|
|
3100
|
+
var responseCreatedChunkSchema = import_v414.z.object({
|
|
3101
|
+
type: import_v414.z.literal("response.created"),
|
|
3102
|
+
response: import_v414.z.object({
|
|
3103
|
+
id: import_v414.z.string(),
|
|
3104
|
+
created_at: import_v414.z.number(),
|
|
3105
|
+
model: import_v414.z.string(),
|
|
3106
|
+
service_tier: import_v414.z.string().nullish()
|
|
3026
3107
|
})
|
|
3027
3108
|
});
|
|
3028
|
-
var responseOutputItemAddedSchema =
|
|
3029
|
-
type:
|
|
3030
|
-
output_index:
|
|
3031
|
-
item:
|
|
3032
|
-
|
|
3033
|
-
type:
|
|
3034
|
-
id:
|
|
3109
|
+
var responseOutputItemAddedSchema = import_v414.z.object({
|
|
3110
|
+
type: import_v414.z.literal("response.output_item.added"),
|
|
3111
|
+
output_index: import_v414.z.number(),
|
|
3112
|
+
item: import_v414.z.discriminatedUnion("type", [
|
|
3113
|
+
import_v414.z.object({
|
|
3114
|
+
type: import_v414.z.literal("message"),
|
|
3115
|
+
id: import_v414.z.string()
|
|
3035
3116
|
}),
|
|
3036
|
-
|
|
3037
|
-
type:
|
|
3038
|
-
id:
|
|
3039
|
-
encrypted_content:
|
|
3117
|
+
import_v414.z.object({
|
|
3118
|
+
type: import_v414.z.literal("reasoning"),
|
|
3119
|
+
id: import_v414.z.string(),
|
|
3120
|
+
encrypted_content: import_v414.z.string().nullish()
|
|
3040
3121
|
}),
|
|
3041
|
-
|
|
3042
|
-
type:
|
|
3043
|
-
id:
|
|
3044
|
-
call_id:
|
|
3045
|
-
name:
|
|
3046
|
-
arguments:
|
|
3122
|
+
import_v414.z.object({
|
|
3123
|
+
type: import_v414.z.literal("function_call"),
|
|
3124
|
+
id: import_v414.z.string(),
|
|
3125
|
+
call_id: import_v414.z.string(),
|
|
3126
|
+
name: import_v414.z.string(),
|
|
3127
|
+
arguments: import_v414.z.string()
|
|
3047
3128
|
}),
|
|
3048
|
-
|
|
3049
|
-
type:
|
|
3050
|
-
id:
|
|
3051
|
-
status:
|
|
3052
|
-
action:
|
|
3053
|
-
type:
|
|
3054
|
-
query:
|
|
3129
|
+
import_v414.z.object({
|
|
3130
|
+
type: import_v414.z.literal("web_search_call"),
|
|
3131
|
+
id: import_v414.z.string(),
|
|
3132
|
+
status: import_v414.z.string(),
|
|
3133
|
+
action: import_v414.z.object({
|
|
3134
|
+
type: import_v414.z.literal("search"),
|
|
3135
|
+
query: import_v414.z.string().optional()
|
|
3055
3136
|
}).nullish()
|
|
3056
3137
|
}),
|
|
3057
|
-
|
|
3058
|
-
type:
|
|
3059
|
-
id:
|
|
3060
|
-
status:
|
|
3138
|
+
import_v414.z.object({
|
|
3139
|
+
type: import_v414.z.literal("computer_call"),
|
|
3140
|
+
id: import_v414.z.string(),
|
|
3141
|
+
status: import_v414.z.string()
|
|
3061
3142
|
}),
|
|
3062
|
-
|
|
3063
|
-
type:
|
|
3064
|
-
id:
|
|
3065
|
-
status:
|
|
3066
|
-
queries:
|
|
3067
|
-
results:
|
|
3068
|
-
|
|
3069
|
-
attributes:
|
|
3070
|
-
file_id:
|
|
3071
|
-
filename:
|
|
3072
|
-
score:
|
|
3073
|
-
text:
|
|
3143
|
+
import_v414.z.object({
|
|
3144
|
+
type: import_v414.z.literal("file_search_call"),
|
|
3145
|
+
id: import_v414.z.string(),
|
|
3146
|
+
status: import_v414.z.string(),
|
|
3147
|
+
queries: import_v414.z.array(import_v414.z.string()).nullish(),
|
|
3148
|
+
results: import_v414.z.array(
|
|
3149
|
+
import_v414.z.object({
|
|
3150
|
+
attributes: import_v414.z.object({
|
|
3151
|
+
file_id: import_v414.z.string(),
|
|
3152
|
+
filename: import_v414.z.string(),
|
|
3153
|
+
score: import_v414.z.number(),
|
|
3154
|
+
text: import_v414.z.string()
|
|
3074
3155
|
})
|
|
3075
3156
|
})
|
|
3076
3157
|
).optional()
|
|
3077
3158
|
})
|
|
3078
3159
|
])
|
|
3079
3160
|
});
|
|
3080
|
-
var responseOutputItemDoneSchema =
|
|
3081
|
-
type:
|
|
3082
|
-
output_index:
|
|
3083
|
-
item:
|
|
3084
|
-
|
|
3085
|
-
type:
|
|
3086
|
-
id:
|
|
3161
|
+
var responseOutputItemDoneSchema = import_v414.z.object({
|
|
3162
|
+
type: import_v414.z.literal("response.output_item.done"),
|
|
3163
|
+
output_index: import_v414.z.number(),
|
|
3164
|
+
item: import_v414.z.discriminatedUnion("type", [
|
|
3165
|
+
import_v414.z.object({
|
|
3166
|
+
type: import_v414.z.literal("message"),
|
|
3167
|
+
id: import_v414.z.string()
|
|
3087
3168
|
}),
|
|
3088
|
-
|
|
3089
|
-
type:
|
|
3090
|
-
id:
|
|
3091
|
-
encrypted_content:
|
|
3169
|
+
import_v414.z.object({
|
|
3170
|
+
type: import_v414.z.literal("reasoning"),
|
|
3171
|
+
id: import_v414.z.string(),
|
|
3172
|
+
encrypted_content: import_v414.z.string().nullish()
|
|
3092
3173
|
}),
|
|
3093
|
-
|
|
3094
|
-
type:
|
|
3095
|
-
id:
|
|
3096
|
-
call_id:
|
|
3097
|
-
name:
|
|
3098
|
-
arguments:
|
|
3099
|
-
status:
|
|
3174
|
+
import_v414.z.object({
|
|
3175
|
+
type: import_v414.z.literal("function_call"),
|
|
3176
|
+
id: import_v414.z.string(),
|
|
3177
|
+
call_id: import_v414.z.string(),
|
|
3178
|
+
name: import_v414.z.string(),
|
|
3179
|
+
arguments: import_v414.z.string(),
|
|
3180
|
+
status: import_v414.z.literal("completed")
|
|
3100
3181
|
}),
|
|
3101
3182
|
webSearchCallItem,
|
|
3102
|
-
|
|
3103
|
-
type:
|
|
3104
|
-
id:
|
|
3105
|
-
status:
|
|
3183
|
+
import_v414.z.object({
|
|
3184
|
+
type: import_v414.z.literal("computer_call"),
|
|
3185
|
+
id: import_v414.z.string(),
|
|
3186
|
+
status: import_v414.z.literal("completed")
|
|
3106
3187
|
}),
|
|
3107
|
-
|
|
3108
|
-
type:
|
|
3109
|
-
id:
|
|
3110
|
-
status:
|
|
3111
|
-
queries:
|
|
3112
|
-
results:
|
|
3113
|
-
|
|
3114
|
-
attributes:
|
|
3115
|
-
file_id:
|
|
3116
|
-
filename:
|
|
3117
|
-
score:
|
|
3118
|
-
text:
|
|
3188
|
+
import_v414.z.object({
|
|
3189
|
+
type: import_v414.z.literal("file_search_call"),
|
|
3190
|
+
id: import_v414.z.string(),
|
|
3191
|
+
status: import_v414.z.literal("completed"),
|
|
3192
|
+
queries: import_v414.z.array(import_v414.z.string()).nullish(),
|
|
3193
|
+
results: import_v414.z.array(
|
|
3194
|
+
import_v414.z.object({
|
|
3195
|
+
attributes: import_v414.z.object({
|
|
3196
|
+
file_id: import_v414.z.string(),
|
|
3197
|
+
filename: import_v414.z.string(),
|
|
3198
|
+
score: import_v414.z.number(),
|
|
3199
|
+
text: import_v414.z.string()
|
|
3119
3200
|
})
|
|
3120
3201
|
})
|
|
3121
3202
|
).nullish()
|
|
3122
3203
|
})
|
|
3123
3204
|
])
|
|
3124
3205
|
});
|
|
3125
|
-
var responseFunctionCallArgumentsDeltaSchema =
|
|
3126
|
-
type:
|
|
3127
|
-
item_id:
|
|
3128
|
-
output_index:
|
|
3129
|
-
delta:
|
|
3206
|
+
var responseFunctionCallArgumentsDeltaSchema = import_v414.z.object({
|
|
3207
|
+
type: import_v414.z.literal("response.function_call_arguments.delta"),
|
|
3208
|
+
item_id: import_v414.z.string(),
|
|
3209
|
+
output_index: import_v414.z.number(),
|
|
3210
|
+
delta: import_v414.z.string()
|
|
3130
3211
|
});
|
|
3131
|
-
var responseAnnotationAddedSchema =
|
|
3132
|
-
type:
|
|
3133
|
-
annotation:
|
|
3134
|
-
|
|
3135
|
-
type:
|
|
3136
|
-
url:
|
|
3137
|
-
title:
|
|
3212
|
+
var responseAnnotationAddedSchema = import_v414.z.object({
|
|
3213
|
+
type: import_v414.z.literal("response.output_text.annotation.added"),
|
|
3214
|
+
annotation: import_v414.z.discriminatedUnion("type", [
|
|
3215
|
+
import_v414.z.object({
|
|
3216
|
+
type: import_v414.z.literal("url_citation"),
|
|
3217
|
+
url: import_v414.z.string(),
|
|
3218
|
+
title: import_v414.z.string()
|
|
3138
3219
|
}),
|
|
3139
|
-
|
|
3140
|
-
type:
|
|
3141
|
-
file_id:
|
|
3142
|
-
filename:
|
|
3143
|
-
index:
|
|
3144
|
-
start_index:
|
|
3145
|
-
end_index:
|
|
3146
|
-
quote:
|
|
3220
|
+
import_v414.z.object({
|
|
3221
|
+
type: import_v414.z.literal("file_citation"),
|
|
3222
|
+
file_id: import_v414.z.string(),
|
|
3223
|
+
filename: import_v414.z.string().nullish(),
|
|
3224
|
+
index: import_v414.z.number().nullish(),
|
|
3225
|
+
start_index: import_v414.z.number().nullish(),
|
|
3226
|
+
end_index: import_v414.z.number().nullish(),
|
|
3227
|
+
quote: import_v414.z.string().nullish()
|
|
3147
3228
|
})
|
|
3148
3229
|
])
|
|
3149
3230
|
});
|
|
3150
|
-
var responseReasoningSummaryPartAddedSchema =
|
|
3151
|
-
type:
|
|
3152
|
-
item_id:
|
|
3153
|
-
summary_index:
|
|
3231
|
+
var responseReasoningSummaryPartAddedSchema = import_v414.z.object({
|
|
3232
|
+
type: import_v414.z.literal("response.reasoning_summary_part.added"),
|
|
3233
|
+
item_id: import_v414.z.string(),
|
|
3234
|
+
summary_index: import_v414.z.number()
|
|
3154
3235
|
});
|
|
3155
|
-
var responseReasoningSummaryTextDeltaSchema =
|
|
3156
|
-
type:
|
|
3157
|
-
item_id:
|
|
3158
|
-
summary_index:
|
|
3159
|
-
delta:
|
|
3236
|
+
var responseReasoningSummaryTextDeltaSchema = import_v414.z.object({
|
|
3237
|
+
type: import_v414.z.literal("response.reasoning_summary_text.delta"),
|
|
3238
|
+
item_id: import_v414.z.string(),
|
|
3239
|
+
summary_index: import_v414.z.number(),
|
|
3240
|
+
delta: import_v414.z.string()
|
|
3160
3241
|
});
|
|
3161
|
-
var openaiResponsesChunkSchema =
|
|
3242
|
+
var openaiResponsesChunkSchema = import_v414.z.union([
|
|
3162
3243
|
textDeltaChunkSchema,
|
|
3163
3244
|
responseFinishedChunkSchema,
|
|
3164
3245
|
responseCreatedChunkSchema,
|
|
@@ -3169,7 +3250,7 @@ var openaiResponsesChunkSchema = import_v413.z.union([
|
|
|
3169
3250
|
responseReasoningSummaryPartAddedSchema,
|
|
3170
3251
|
responseReasoningSummaryTextDeltaSchema,
|
|
3171
3252
|
errorChunkSchema,
|
|
3172
|
-
|
|
3253
|
+
import_v414.z.object({ type: import_v414.z.string() }).loose()
|
|
3173
3254
|
// fallback for unknown chunks
|
|
3174
3255
|
]);
|
|
3175
3256
|
function isTextDeltaChunk(chunk) {
|
|
@@ -3242,27 +3323,27 @@ function getResponsesModelConfig(modelId) {
|
|
|
3242
3323
|
isReasoningModel: false
|
|
3243
3324
|
};
|
|
3244
3325
|
}
|
|
3245
|
-
var openaiResponsesProviderOptionsSchema =
|
|
3246
|
-
metadata:
|
|
3247
|
-
parallelToolCalls:
|
|
3248
|
-
previousResponseId:
|
|
3249
|
-
store:
|
|
3250
|
-
user:
|
|
3251
|
-
reasoningEffort:
|
|
3252
|
-
strictJsonSchema:
|
|
3253
|
-
instructions:
|
|
3254
|
-
reasoningSummary:
|
|
3255
|
-
serviceTier:
|
|
3256
|
-
include:
|
|
3257
|
-
|
|
3326
|
+
var openaiResponsesProviderOptionsSchema = import_v414.z.object({
|
|
3327
|
+
metadata: import_v414.z.any().nullish(),
|
|
3328
|
+
parallelToolCalls: import_v414.z.boolean().nullish(),
|
|
3329
|
+
previousResponseId: import_v414.z.string().nullish(),
|
|
3330
|
+
store: import_v414.z.boolean().nullish(),
|
|
3331
|
+
user: import_v414.z.string().nullish(),
|
|
3332
|
+
reasoningEffort: import_v414.z.string().nullish(),
|
|
3333
|
+
strictJsonSchema: import_v414.z.boolean().nullish(),
|
|
3334
|
+
instructions: import_v414.z.string().nullish(),
|
|
3335
|
+
reasoningSummary: import_v414.z.string().nullish(),
|
|
3336
|
+
serviceTier: import_v414.z.enum(["auto", "flex", "priority"]).nullish(),
|
|
3337
|
+
include: import_v414.z.array(
|
|
3338
|
+
import_v414.z.enum([
|
|
3258
3339
|
"reasoning.encrypted_content",
|
|
3259
3340
|
"file_search_call.results",
|
|
3260
3341
|
"message.output_text.logprobs"
|
|
3261
3342
|
])
|
|
3262
3343
|
).nullish(),
|
|
3263
|
-
textVerbosity:
|
|
3264
|
-
promptCacheKey:
|
|
3265
|
-
safetyIdentifier:
|
|
3344
|
+
textVerbosity: import_v414.z.enum(["low", "medium", "high"]).nullish(),
|
|
3345
|
+
promptCacheKey: import_v414.z.string().nullish(),
|
|
3346
|
+
safetyIdentifier: import_v414.z.string().nullish(),
|
|
3266
3347
|
/**
|
|
3267
3348
|
* Return the log probabilities of the tokens.
|
|
3268
3349
|
*
|
|
@@ -3275,15 +3356,15 @@ var openaiResponsesProviderOptionsSchema = import_v413.z.object({
|
|
|
3275
3356
|
* @see https://platform.openai.com/docs/api-reference/responses/create
|
|
3276
3357
|
* @see https://cookbook.openai.com/examples/using_logprobs
|
|
3277
3358
|
*/
|
|
3278
|
-
logprobs:
|
|
3359
|
+
logprobs: import_v414.z.union([import_v414.z.boolean(), import_v414.z.number().min(1).max(TOP_LOGPROBS_MAX)]).optional()
|
|
3279
3360
|
});
|
|
3280
3361
|
|
|
3281
3362
|
// src/speech/openai-speech-model.ts
|
|
3282
|
-
var
|
|
3283
|
-
var
|
|
3284
|
-
var OpenAIProviderOptionsSchema =
|
|
3285
|
-
instructions:
|
|
3286
|
-
speed:
|
|
3363
|
+
var import_provider_utils14 = require("@ai-sdk/provider-utils");
|
|
3364
|
+
var import_v415 = require("zod/v4");
|
|
3365
|
+
var OpenAIProviderOptionsSchema = import_v415.z.object({
|
|
3366
|
+
instructions: import_v415.z.string().nullish(),
|
|
3367
|
+
speed: import_v415.z.number().min(0.25).max(4).default(1).nullish()
|
|
3287
3368
|
});
|
|
3288
3369
|
var OpenAISpeechModel = class {
|
|
3289
3370
|
constructor(modelId, config) {
|
|
@@ -3304,7 +3385,7 @@ var OpenAISpeechModel = class {
|
|
|
3304
3385
|
providerOptions
|
|
3305
3386
|
}) {
|
|
3306
3387
|
const warnings = [];
|
|
3307
|
-
const openAIOptions = await (0,
|
|
3388
|
+
const openAIOptions = await (0, import_provider_utils14.parseProviderOptions)({
|
|
3308
3389
|
provider: "openai",
|
|
3309
3390
|
providerOptions,
|
|
3310
3391
|
schema: OpenAIProviderOptionsSchema
|
|
@@ -3357,15 +3438,15 @@ var OpenAISpeechModel = class {
|
|
|
3357
3438
|
value: audio,
|
|
3358
3439
|
responseHeaders,
|
|
3359
3440
|
rawValue: rawResponse
|
|
3360
|
-
} = await (0,
|
|
3441
|
+
} = await (0, import_provider_utils14.postJsonToApi)({
|
|
3361
3442
|
url: this.config.url({
|
|
3362
3443
|
path: "/audio/speech",
|
|
3363
3444
|
modelId: this.modelId
|
|
3364
3445
|
}),
|
|
3365
|
-
headers: (0,
|
|
3446
|
+
headers: (0, import_provider_utils14.combineHeaders)(this.config.headers(), options.headers),
|
|
3366
3447
|
body: requestBody,
|
|
3367
3448
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
3368
|
-
successfulResponseHandler: (0,
|
|
3449
|
+
successfulResponseHandler: (0, import_provider_utils14.createBinaryResponseHandler)(),
|
|
3369
3450
|
abortSignal: options.abortSignal,
|
|
3370
3451
|
fetch: this.config.fetch
|
|
3371
3452
|
});
|
|
@@ -3386,34 +3467,34 @@ var OpenAISpeechModel = class {
|
|
|
3386
3467
|
};
|
|
3387
3468
|
|
|
3388
3469
|
// src/transcription/openai-transcription-model.ts
|
|
3389
|
-
var
|
|
3390
|
-
var
|
|
3470
|
+
var import_provider_utils15 = require("@ai-sdk/provider-utils");
|
|
3471
|
+
var import_v417 = require("zod/v4");
|
|
3391
3472
|
|
|
3392
3473
|
// src/transcription/openai-transcription-options.ts
|
|
3393
|
-
var
|
|
3394
|
-
var openAITranscriptionProviderOptions =
|
|
3474
|
+
var import_v416 = require("zod/v4");
|
|
3475
|
+
var openAITranscriptionProviderOptions = import_v416.z.object({
|
|
3395
3476
|
/**
|
|
3396
3477
|
* Additional information to include in the transcription response.
|
|
3397
3478
|
*/
|
|
3398
|
-
include:
|
|
3479
|
+
include: import_v416.z.array(import_v416.z.string()).optional(),
|
|
3399
3480
|
/**
|
|
3400
3481
|
* The language of the input audio in ISO-639-1 format.
|
|
3401
3482
|
*/
|
|
3402
|
-
language:
|
|
3483
|
+
language: import_v416.z.string().optional(),
|
|
3403
3484
|
/**
|
|
3404
3485
|
* An optional text to guide the model's style or continue a previous audio segment.
|
|
3405
3486
|
*/
|
|
3406
|
-
prompt:
|
|
3487
|
+
prompt: import_v416.z.string().optional(),
|
|
3407
3488
|
/**
|
|
3408
3489
|
* The sampling temperature, between 0 and 1.
|
|
3409
3490
|
* @default 0
|
|
3410
3491
|
*/
|
|
3411
|
-
temperature:
|
|
3492
|
+
temperature: import_v416.z.number().min(0).max(1).default(0).optional(),
|
|
3412
3493
|
/**
|
|
3413
3494
|
* The timestamp granularities to populate for this transcription.
|
|
3414
3495
|
* @default ['segment']
|
|
3415
3496
|
*/
|
|
3416
|
-
timestampGranularities:
|
|
3497
|
+
timestampGranularities: import_v416.z.array(import_v416.z.enum(["word", "segment"])).default(["segment"]).optional()
|
|
3417
3498
|
});
|
|
3418
3499
|
|
|
3419
3500
|
// src/transcription/openai-transcription-model.ts
|
|
@@ -3491,15 +3572,15 @@ var OpenAITranscriptionModel = class {
|
|
|
3491
3572
|
providerOptions
|
|
3492
3573
|
}) {
|
|
3493
3574
|
const warnings = [];
|
|
3494
|
-
const openAIOptions = await (0,
|
|
3575
|
+
const openAIOptions = await (0, import_provider_utils15.parseProviderOptions)({
|
|
3495
3576
|
provider: "openai",
|
|
3496
3577
|
providerOptions,
|
|
3497
3578
|
schema: openAITranscriptionProviderOptions
|
|
3498
3579
|
});
|
|
3499
3580
|
const formData = new FormData();
|
|
3500
|
-
const blob = audio instanceof Uint8Array ? new Blob([audio]) : new Blob([(0,
|
|
3581
|
+
const blob = audio instanceof Uint8Array ? new Blob([audio]) : new Blob([(0, import_provider_utils15.convertBase64ToUint8Array)(audio)]);
|
|
3501
3582
|
formData.append("model", this.modelId);
|
|
3502
|
-
const fileExtension = (0,
|
|
3583
|
+
const fileExtension = (0, import_provider_utils15.mediaTypeToExtension)(mediaType);
|
|
3503
3584
|
formData.append(
|
|
3504
3585
|
"file",
|
|
3505
3586
|
new File([blob], "audio", { type: mediaType }),
|
|
@@ -3544,15 +3625,15 @@ var OpenAITranscriptionModel = class {
|
|
|
3544
3625
|
value: response,
|
|
3545
3626
|
responseHeaders,
|
|
3546
3627
|
rawValue: rawResponse
|
|
3547
|
-
} = await (0,
|
|
3628
|
+
} = await (0, import_provider_utils15.postFormDataToApi)({
|
|
3548
3629
|
url: this.config.url({
|
|
3549
3630
|
path: "/audio/transcriptions",
|
|
3550
3631
|
modelId: this.modelId
|
|
3551
3632
|
}),
|
|
3552
|
-
headers: (0,
|
|
3633
|
+
headers: (0, import_provider_utils15.combineHeaders)(this.config.headers(), options.headers),
|
|
3553
3634
|
formData,
|
|
3554
3635
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
3555
|
-
successfulResponseHandler: (0,
|
|
3636
|
+
successfulResponseHandler: (0, import_provider_utils15.createJsonResponseHandler)(
|
|
3556
3637
|
openaiTranscriptionResponseSchema
|
|
3557
3638
|
),
|
|
3558
3639
|
abortSignal: options.abortSignal,
|
|
@@ -3582,29 +3663,29 @@ var OpenAITranscriptionModel = class {
|
|
|
3582
3663
|
};
|
|
3583
3664
|
}
|
|
3584
3665
|
};
|
|
3585
|
-
var openaiTranscriptionResponseSchema =
|
|
3586
|
-
text:
|
|
3587
|
-
language:
|
|
3588
|
-
duration:
|
|
3589
|
-
words:
|
|
3590
|
-
|
|
3591
|
-
word:
|
|
3592
|
-
start:
|
|
3593
|
-
end:
|
|
3666
|
+
var openaiTranscriptionResponseSchema = import_v417.z.object({
|
|
3667
|
+
text: import_v417.z.string(),
|
|
3668
|
+
language: import_v417.z.string().nullish(),
|
|
3669
|
+
duration: import_v417.z.number().nullish(),
|
|
3670
|
+
words: import_v417.z.array(
|
|
3671
|
+
import_v417.z.object({
|
|
3672
|
+
word: import_v417.z.string(),
|
|
3673
|
+
start: import_v417.z.number(),
|
|
3674
|
+
end: import_v417.z.number()
|
|
3594
3675
|
})
|
|
3595
3676
|
).nullish(),
|
|
3596
|
-
segments:
|
|
3597
|
-
|
|
3598
|
-
id:
|
|
3599
|
-
seek:
|
|
3600
|
-
start:
|
|
3601
|
-
end:
|
|
3602
|
-
text:
|
|
3603
|
-
tokens:
|
|
3604
|
-
temperature:
|
|
3605
|
-
avg_logprob:
|
|
3606
|
-
compression_ratio:
|
|
3607
|
-
no_speech_prob:
|
|
3677
|
+
segments: import_v417.z.array(
|
|
3678
|
+
import_v417.z.object({
|
|
3679
|
+
id: import_v417.z.number(),
|
|
3680
|
+
seek: import_v417.z.number(),
|
|
3681
|
+
start: import_v417.z.number(),
|
|
3682
|
+
end: import_v417.z.number(),
|
|
3683
|
+
text: import_v417.z.string(),
|
|
3684
|
+
tokens: import_v417.z.array(import_v417.z.number()),
|
|
3685
|
+
temperature: import_v417.z.number(),
|
|
3686
|
+
avg_logprob: import_v417.z.number(),
|
|
3687
|
+
compression_ratio: import_v417.z.number(),
|
|
3688
|
+
no_speech_prob: import_v417.z.number()
|
|
3608
3689
|
})
|
|
3609
3690
|
).nullish()
|
|
3610
3691
|
});
|
|
@@ -3612,10 +3693,10 @@ var openaiTranscriptionResponseSchema = import_v416.z.object({
|
|
|
3612
3693
|
// src/openai-provider.ts
|
|
3613
3694
|
function createOpenAI(options = {}) {
|
|
3614
3695
|
var _a, _b;
|
|
3615
|
-
const baseURL = (_a = (0,
|
|
3696
|
+
const baseURL = (_a = (0, import_provider_utils16.withoutTrailingSlash)(options.baseURL)) != null ? _a : "https://api.openai.com/v1";
|
|
3616
3697
|
const providerName = (_b = options.name) != null ? _b : "openai";
|
|
3617
3698
|
const getHeaders = () => ({
|
|
3618
|
-
Authorization: `Bearer ${(0,
|
|
3699
|
+
Authorization: `Bearer ${(0, import_provider_utils16.loadApiKey)({
|
|
3619
3700
|
apiKey: options.apiKey,
|
|
3620
3701
|
environmentVariableName: "OPENAI_API_KEY",
|
|
3621
3702
|
description: "OpenAI"
|