@ai-sdk/openai 2.0.27 → 2.0.28
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +7 -0
- package/dist/index.d.mts +15 -32
- package/dist/index.d.ts +15 -32
- package/dist/index.js +36 -13
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +36 -13
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.d.mts +1 -1
- package/dist/internal/index.d.ts +1 -1
- package/dist/internal/index.js +32 -12
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +32 -12
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +1 -1
package/CHANGELOG.md
CHANGED
package/dist/index.d.mts
CHANGED
|
@@ -11,7 +11,7 @@ type OpenAIEmbeddingModelId = 'text-embedding-3-small' | 'text-embedding-3-large
|
|
|
11
11
|
|
|
12
12
|
type OpenAIImageModelId = 'gpt-image-1' | 'dall-e-3' | 'dall-e-2' | (string & {});
|
|
13
13
|
|
|
14
|
-
declare const
|
|
14
|
+
declare const webSearchToolFactory: _ai_sdk_provider_utils.ProviderDefinedToolFactory<{}, {
|
|
15
15
|
/**
|
|
16
16
|
* Filters for the search.
|
|
17
17
|
*/
|
|
@@ -57,6 +57,17 @@ declare const factory: _ai_sdk_provider_utils.ProviderDefinedToolFactory<{}, {
|
|
|
57
57
|
};
|
|
58
58
|
}>;
|
|
59
59
|
|
|
60
|
+
declare const codeInterpreterToolFactory: _ai_sdk_provider_utils.ProviderDefinedToolFactory<{}, {
|
|
61
|
+
/**
|
|
62
|
+
* The code interpreter container.
|
|
63
|
+
* Can be a container ID
|
|
64
|
+
* or an object that specifies uploaded file IDs to make available to your code.
|
|
65
|
+
*/
|
|
66
|
+
container?: string | {
|
|
67
|
+
fileIds?: string[];
|
|
68
|
+
};
|
|
69
|
+
}>;
|
|
70
|
+
|
|
60
71
|
declare const openaiTools: {
|
|
61
72
|
/**
|
|
62
73
|
* The Code Interpreter tool allows models to write and run Python code in a
|
|
@@ -67,35 +78,7 @@ declare const openaiTools: {
|
|
|
67
78
|
*
|
|
68
79
|
* Must have name `code_interpreter`.
|
|
69
80
|
*/
|
|
70
|
-
codeInterpreter: _ai_sdk_provider_utils.
|
|
71
|
-
container?: string | {
|
|
72
|
-
fileIds
|
|
73
|
-
/**
|
|
74
|
-
* File search is a tool available in the Responses API. It enables models to
|
|
75
|
-
* retrieve information in a knowledge base of previously uploaded files through
|
|
76
|
-
* semantic and keyword search.
|
|
77
|
-
*
|
|
78
|
-
* Must have name `file_search`.
|
|
79
|
-
*
|
|
80
|
-
* @param vectorStoreIds - The vector store IDs to use for the file search.
|
|
81
|
-
* @param maxNumResults - The maximum number of results to return.
|
|
82
|
-
* @param ranking - The ranking options to use for the file search.
|
|
83
|
-
* @param filters - The filters to use for the file search.
|
|
84
|
-
*/
|
|
85
|
-
? /**
|
|
86
|
-
* File search is a tool available in the Responses API. It enables models to
|
|
87
|
-
* retrieve information in a knowledge base of previously uploaded files through
|
|
88
|
-
* semantic and keyword search.
|
|
89
|
-
*
|
|
90
|
-
* Must have name `file_search`.
|
|
91
|
-
*
|
|
92
|
-
* @param vectorStoreIds - The vector store IDs to use for the file search.
|
|
93
|
-
* @param maxNumResults - The maximum number of results to return.
|
|
94
|
-
* @param ranking - The ranking options to use for the file search.
|
|
95
|
-
* @param filters - The filters to use for the file search.
|
|
96
|
-
*/: string[];
|
|
97
|
-
};
|
|
98
|
-
}>;
|
|
81
|
+
codeInterpreter: (args?: Parameters<typeof codeInterpreterToolFactory>[0]) => _ai_sdk_provider_utils.Tool<{}, unknown>;
|
|
99
82
|
/**
|
|
100
83
|
* File search is a tool available in the Responses API. It enables models to
|
|
101
84
|
* retrieve information in a knowledge base of previously uploaded files through
|
|
@@ -156,7 +139,7 @@ declare const openaiTools: {
|
|
|
156
139
|
* @param searchContextSize - The search context size to use for the web search.
|
|
157
140
|
* @param userLocation - The user location to use for the web search.
|
|
158
141
|
*/
|
|
159
|
-
webSearch: (args?: Parameters<typeof
|
|
142
|
+
webSearch: (args?: Parameters<typeof webSearchToolFactory>[0]) => _ai_sdk_provider_utils.Tool<{}, unknown>;
|
|
160
143
|
};
|
|
161
144
|
|
|
162
145
|
type OpenAIResponsesModelId = 'o1' | 'o1-2024-12-17' | 'o3-mini' | 'o3-mini-2025-01-31' | 'o3' | 'o3-2025-04-16' | 'gpt-5' | 'gpt-5-2025-08-07' | 'gpt-5-mini' | 'gpt-5-mini-2025-08-07' | 'gpt-5-nano' | 'gpt-5-nano-2025-08-07' | 'gpt-5-chat-latest' | 'gpt-4.1' | 'gpt-4.1-2025-04-14' | 'gpt-4.1-mini' | 'gpt-4.1-mini-2025-04-14' | 'gpt-4.1-nano' | 'gpt-4.1-nano-2025-04-14' | 'gpt-4o' | 'gpt-4o-2024-05-13' | 'gpt-4o-2024-08-06' | 'gpt-4o-2024-11-20' | 'gpt-4o-mini' | 'gpt-4o-mini-2024-07-18' | 'gpt-4-turbo' | 'gpt-4-turbo-2024-04-09' | 'gpt-4' | 'gpt-4-0613' | 'gpt-3.5-turbo-0125' | 'gpt-3.5-turbo' | 'gpt-3.5-turbo-1106' | 'chatgpt-4o-latest' | (string & {});
|
|
@@ -272,9 +255,9 @@ declare const openaiResponsesProviderOptionsSchema: z.ZodObject<{
|
|
|
272
255
|
priority: "priority";
|
|
273
256
|
}>>>;
|
|
274
257
|
include: z.ZodOptional<z.ZodNullable<z.ZodArray<z.ZodEnum<{
|
|
275
|
-
"reasoning.encrypted_content": "reasoning.encrypted_content";
|
|
276
258
|
"file_search_call.results": "file_search_call.results";
|
|
277
259
|
"message.output_text.logprobs": "message.output_text.logprobs";
|
|
260
|
+
"reasoning.encrypted_content": "reasoning.encrypted_content";
|
|
278
261
|
}>>>>;
|
|
279
262
|
textVerbosity: z.ZodOptional<z.ZodNullable<z.ZodEnum<{
|
|
280
263
|
low: "low";
|
package/dist/index.d.ts
CHANGED
|
@@ -11,7 +11,7 @@ type OpenAIEmbeddingModelId = 'text-embedding-3-small' | 'text-embedding-3-large
|
|
|
11
11
|
|
|
12
12
|
type OpenAIImageModelId = 'gpt-image-1' | 'dall-e-3' | 'dall-e-2' | (string & {});
|
|
13
13
|
|
|
14
|
-
declare const
|
|
14
|
+
declare const webSearchToolFactory: _ai_sdk_provider_utils.ProviderDefinedToolFactory<{}, {
|
|
15
15
|
/**
|
|
16
16
|
* Filters for the search.
|
|
17
17
|
*/
|
|
@@ -57,6 +57,17 @@ declare const factory: _ai_sdk_provider_utils.ProviderDefinedToolFactory<{}, {
|
|
|
57
57
|
};
|
|
58
58
|
}>;
|
|
59
59
|
|
|
60
|
+
declare const codeInterpreterToolFactory: _ai_sdk_provider_utils.ProviderDefinedToolFactory<{}, {
|
|
61
|
+
/**
|
|
62
|
+
* The code interpreter container.
|
|
63
|
+
* Can be a container ID
|
|
64
|
+
* or an object that specifies uploaded file IDs to make available to your code.
|
|
65
|
+
*/
|
|
66
|
+
container?: string | {
|
|
67
|
+
fileIds?: string[];
|
|
68
|
+
};
|
|
69
|
+
}>;
|
|
70
|
+
|
|
60
71
|
declare const openaiTools: {
|
|
61
72
|
/**
|
|
62
73
|
* The Code Interpreter tool allows models to write and run Python code in a
|
|
@@ -67,35 +78,7 @@ declare const openaiTools: {
|
|
|
67
78
|
*
|
|
68
79
|
* Must have name `code_interpreter`.
|
|
69
80
|
*/
|
|
70
|
-
codeInterpreter: _ai_sdk_provider_utils.
|
|
71
|
-
container?: string | {
|
|
72
|
-
fileIds
|
|
73
|
-
/**
|
|
74
|
-
* File search is a tool available in the Responses API. It enables models to
|
|
75
|
-
* retrieve information in a knowledge base of previously uploaded files through
|
|
76
|
-
* semantic and keyword search.
|
|
77
|
-
*
|
|
78
|
-
* Must have name `file_search`.
|
|
79
|
-
*
|
|
80
|
-
* @param vectorStoreIds - The vector store IDs to use for the file search.
|
|
81
|
-
* @param maxNumResults - The maximum number of results to return.
|
|
82
|
-
* @param ranking - The ranking options to use for the file search.
|
|
83
|
-
* @param filters - The filters to use for the file search.
|
|
84
|
-
*/
|
|
85
|
-
? /**
|
|
86
|
-
* File search is a tool available in the Responses API. It enables models to
|
|
87
|
-
* retrieve information in a knowledge base of previously uploaded files through
|
|
88
|
-
* semantic and keyword search.
|
|
89
|
-
*
|
|
90
|
-
* Must have name `file_search`.
|
|
91
|
-
*
|
|
92
|
-
* @param vectorStoreIds - The vector store IDs to use for the file search.
|
|
93
|
-
* @param maxNumResults - The maximum number of results to return.
|
|
94
|
-
* @param ranking - The ranking options to use for the file search.
|
|
95
|
-
* @param filters - The filters to use for the file search.
|
|
96
|
-
*/: string[];
|
|
97
|
-
};
|
|
98
|
-
}>;
|
|
81
|
+
codeInterpreter: (args?: Parameters<typeof codeInterpreterToolFactory>[0]) => _ai_sdk_provider_utils.Tool<{}, unknown>;
|
|
99
82
|
/**
|
|
100
83
|
* File search is a tool available in the Responses API. It enables models to
|
|
101
84
|
* retrieve information in a knowledge base of previously uploaded files through
|
|
@@ -156,7 +139,7 @@ declare const openaiTools: {
|
|
|
156
139
|
* @param searchContextSize - The search context size to use for the web search.
|
|
157
140
|
* @param userLocation - The user location to use for the web search.
|
|
158
141
|
*/
|
|
159
|
-
webSearch: (args?: Parameters<typeof
|
|
142
|
+
webSearch: (args?: Parameters<typeof webSearchToolFactory>[0]) => _ai_sdk_provider_utils.Tool<{}, unknown>;
|
|
160
143
|
};
|
|
161
144
|
|
|
162
145
|
type OpenAIResponsesModelId = 'o1' | 'o1-2024-12-17' | 'o3-mini' | 'o3-mini-2025-01-31' | 'o3' | 'o3-2025-04-16' | 'gpt-5' | 'gpt-5-2025-08-07' | 'gpt-5-mini' | 'gpt-5-mini-2025-08-07' | 'gpt-5-nano' | 'gpt-5-nano-2025-08-07' | 'gpt-5-chat-latest' | 'gpt-4.1' | 'gpt-4.1-2025-04-14' | 'gpt-4.1-mini' | 'gpt-4.1-mini-2025-04-14' | 'gpt-4.1-nano' | 'gpt-4.1-nano-2025-04-14' | 'gpt-4o' | 'gpt-4o-2024-05-13' | 'gpt-4o-2024-08-06' | 'gpt-4o-2024-11-20' | 'gpt-4o-mini' | 'gpt-4o-mini-2024-07-18' | 'gpt-4-turbo' | 'gpt-4-turbo-2024-04-09' | 'gpt-4' | 'gpt-4-0613' | 'gpt-3.5-turbo-0125' | 'gpt-3.5-turbo' | 'gpt-3.5-turbo-1106' | 'chatgpt-4o-latest' | (string & {});
|
|
@@ -272,9 +255,9 @@ declare const openaiResponsesProviderOptionsSchema: z.ZodObject<{
|
|
|
272
255
|
priority: "priority";
|
|
273
256
|
}>>>;
|
|
274
257
|
include: z.ZodOptional<z.ZodNullable<z.ZodArray<z.ZodEnum<{
|
|
275
|
-
"reasoning.encrypted_content": "reasoning.encrypted_content";
|
|
276
258
|
"file_search_call.results": "file_search_call.results";
|
|
277
259
|
"message.output_text.logprobs": "message.output_text.logprobs";
|
|
260
|
+
"reasoning.encrypted_content": "reasoning.encrypted_content";
|
|
278
261
|
}>>>>;
|
|
279
262
|
textVerbosity: z.ZodOptional<z.ZodNullable<z.ZodEnum<{
|
|
280
263
|
low: "low";
|
package/dist/index.js
CHANGED
|
@@ -1807,11 +1807,14 @@ var codeInterpreterArgsSchema = import_v411.z.object({
|
|
|
1807
1807
|
})
|
|
1808
1808
|
]).optional()
|
|
1809
1809
|
});
|
|
1810
|
-
var
|
|
1810
|
+
var codeInterpreterToolFactory = (0, import_provider_utils9.createProviderDefinedToolFactory)({
|
|
1811
1811
|
id: "openai.code_interpreter",
|
|
1812
1812
|
name: "code_interpreter",
|
|
1813
1813
|
inputSchema: import_v411.z.object({})
|
|
1814
1814
|
});
|
|
1815
|
+
var codeInterpreter = (args = {}) => {
|
|
1816
|
+
return codeInterpreterToolFactory(args);
|
|
1817
|
+
};
|
|
1815
1818
|
|
|
1816
1819
|
// src/tool/web-search.ts
|
|
1817
1820
|
var import_provider_utils10 = require("@ai-sdk/provider-utils");
|
|
@@ -1829,7 +1832,7 @@ var webSearchArgsSchema = import_v412.z.object({
|
|
|
1829
1832
|
timezone: import_v412.z.string().optional()
|
|
1830
1833
|
}).optional()
|
|
1831
1834
|
});
|
|
1832
|
-
var
|
|
1835
|
+
var webSearchToolFactory = (0, import_provider_utils10.createProviderDefinedToolFactory)({
|
|
1833
1836
|
id: "openai.web_search",
|
|
1834
1837
|
name: "web_search",
|
|
1835
1838
|
inputSchema: import_v412.z.object({
|
|
@@ -1851,7 +1854,7 @@ var factory = (0, import_provider_utils10.createProviderDefinedToolFactory)({
|
|
|
1851
1854
|
})
|
|
1852
1855
|
});
|
|
1853
1856
|
var webSearch = (args = {}) => {
|
|
1854
|
-
return
|
|
1857
|
+
return webSearchToolFactory(args);
|
|
1855
1858
|
};
|
|
1856
1859
|
|
|
1857
1860
|
// src/openai-tools.ts
|
|
@@ -2287,7 +2290,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2287
2290
|
toolChoice,
|
|
2288
2291
|
responseFormat
|
|
2289
2292
|
}) {
|
|
2290
|
-
var _a, _b;
|
|
2293
|
+
var _a, _b, _c;
|
|
2291
2294
|
const warnings = [];
|
|
2292
2295
|
const modelConfig = getResponsesModelConfig(this.modelId);
|
|
2293
2296
|
if (topK != null) {
|
|
@@ -2323,8 +2326,13 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2323
2326
|
schema: openaiResponsesProviderOptionsSchema
|
|
2324
2327
|
});
|
|
2325
2328
|
const strictJsonSchema = (_a = openaiOptions == null ? void 0 : openaiOptions.strictJsonSchema) != null ? _a : false;
|
|
2329
|
+
let include = openaiOptions == null ? void 0 : openaiOptions.include;
|
|
2326
2330
|
const topLogprobs = typeof (openaiOptions == null ? void 0 : openaiOptions.logprobs) === "number" ? openaiOptions == null ? void 0 : openaiOptions.logprobs : (openaiOptions == null ? void 0 : openaiOptions.logprobs) === true ? TOP_LOGPROBS_MAX : void 0;
|
|
2327
|
-
|
|
2331
|
+
include = topLogprobs ? Array.isArray(include) ? [...include, "message.output_text.logprobs"] : ["message.output_text.logprobs"] : include;
|
|
2332
|
+
const webSearchToolName = (_b = tools == null ? void 0 : tools.find(
|
|
2333
|
+
(tool) => tool.type === "provider-defined" && (tool.id === "openai.web_search" || tool.id === "openai.web_search_preview")
|
|
2334
|
+
)) == null ? void 0 : _b.name;
|
|
2335
|
+
include = webSearchToolName ? Array.isArray(include) ? [...include, "web_search_call.action.sources"] : ["web_search_call.action.sources"] : include;
|
|
2328
2336
|
const baseArgs = {
|
|
2329
2337
|
model: this.modelId,
|
|
2330
2338
|
input: messages,
|
|
@@ -2337,7 +2345,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2337
2345
|
format: responseFormat.schema != null ? {
|
|
2338
2346
|
type: "json_schema",
|
|
2339
2347
|
strict: strictJsonSchema,
|
|
2340
|
-
name: (
|
|
2348
|
+
name: (_c = responseFormat.name) != null ? _c : "response",
|
|
2341
2349
|
description: responseFormat.description,
|
|
2342
2350
|
schema: responseFormat.schema
|
|
2343
2351
|
} : { type: "json_object" }
|
|
@@ -2355,7 +2363,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2355
2363
|
user: openaiOptions == null ? void 0 : openaiOptions.user,
|
|
2356
2364
|
instructions: openaiOptions == null ? void 0 : openaiOptions.instructions,
|
|
2357
2365
|
service_tier: openaiOptions == null ? void 0 : openaiOptions.serviceTier,
|
|
2358
|
-
include
|
|
2366
|
+
include,
|
|
2359
2367
|
prompt_cache_key: openaiOptions == null ? void 0 : openaiOptions.promptCacheKey,
|
|
2360
2368
|
safety_identifier: openaiOptions == null ? void 0 : openaiOptions.safetyIdentifier,
|
|
2361
2369
|
top_logprobs: topLogprobs,
|
|
@@ -2433,6 +2441,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2433
2441
|
strictJsonSchema
|
|
2434
2442
|
});
|
|
2435
2443
|
return {
|
|
2444
|
+
webSearchToolName,
|
|
2436
2445
|
args: {
|
|
2437
2446
|
...baseArgs,
|
|
2438
2447
|
tools: openaiTools2,
|
|
@@ -2443,7 +2452,11 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2443
2452
|
}
|
|
2444
2453
|
async doGenerate(options) {
|
|
2445
2454
|
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q;
|
|
2446
|
-
const {
|
|
2455
|
+
const {
|
|
2456
|
+
args: body,
|
|
2457
|
+
warnings,
|
|
2458
|
+
webSearchToolName
|
|
2459
|
+
} = await this.getArgs(options);
|
|
2447
2460
|
const url = this.config.url({
|
|
2448
2461
|
path: "/responses",
|
|
2449
2462
|
modelId: this.modelId
|
|
@@ -2494,12 +2507,18 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2494
2507
|
start_index: import_v414.z.number().nullish(),
|
|
2495
2508
|
end_index: import_v414.z.number().nullish(),
|
|
2496
2509
|
quote: import_v414.z.string().nullish()
|
|
2510
|
+
}),
|
|
2511
|
+
import_v414.z.object({
|
|
2512
|
+
type: import_v414.z.literal("container_file_citation")
|
|
2497
2513
|
})
|
|
2498
2514
|
])
|
|
2499
2515
|
)
|
|
2500
2516
|
})
|
|
2501
2517
|
)
|
|
2502
2518
|
}),
|
|
2519
|
+
import_v414.z.object({
|
|
2520
|
+
type: import_v414.z.literal("code_interpreter_call")
|
|
2521
|
+
}),
|
|
2503
2522
|
import_v414.z.object({
|
|
2504
2523
|
type: import_v414.z.literal("function_call"),
|
|
2505
2524
|
call_id: import_v414.z.string(),
|
|
@@ -2640,14 +2659,14 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2640
2659
|
content.push({
|
|
2641
2660
|
type: "tool-call",
|
|
2642
2661
|
toolCallId: part.id,
|
|
2643
|
-
toolName: "
|
|
2662
|
+
toolName: webSearchToolName != null ? webSearchToolName : "web_search",
|
|
2644
2663
|
input: JSON.stringify({ action: part.action }),
|
|
2645
2664
|
providerExecuted: true
|
|
2646
2665
|
});
|
|
2647
2666
|
content.push({
|
|
2648
2667
|
type: "tool-result",
|
|
2649
2668
|
toolCallId: part.id,
|
|
2650
|
-
toolName: "
|
|
2669
|
+
toolName: webSearchToolName != null ? webSearchToolName : "web_search",
|
|
2651
2670
|
result: { status: part.status },
|
|
2652
2671
|
providerExecuted: true
|
|
2653
2672
|
});
|
|
@@ -2732,7 +2751,11 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2732
2751
|
};
|
|
2733
2752
|
}
|
|
2734
2753
|
async doStream(options) {
|
|
2735
|
-
const {
|
|
2754
|
+
const {
|
|
2755
|
+
args: body,
|
|
2756
|
+
warnings,
|
|
2757
|
+
webSearchToolName
|
|
2758
|
+
} = await this.getArgs(options);
|
|
2736
2759
|
const { responseHeaders, value: response } = await (0, import_provider_utils13.postJsonToApi)({
|
|
2737
2760
|
url: this.config.url({
|
|
2738
2761
|
path: "/responses",
|
|
@@ -2793,13 +2816,13 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2793
2816
|
});
|
|
2794
2817
|
} else if (value.item.type === "web_search_call") {
|
|
2795
2818
|
ongoingToolCalls[value.output_index] = {
|
|
2796
|
-
toolName: "
|
|
2819
|
+
toolName: webSearchToolName != null ? webSearchToolName : "web_search",
|
|
2797
2820
|
toolCallId: value.item.id
|
|
2798
2821
|
};
|
|
2799
2822
|
controller.enqueue({
|
|
2800
2823
|
type: "tool-input-start",
|
|
2801
2824
|
id: value.item.id,
|
|
2802
|
-
toolName: "
|
|
2825
|
+
toolName: webSearchToolName != null ? webSearchToolName : "web_search"
|
|
2803
2826
|
});
|
|
2804
2827
|
} else if (value.item.type === "computer_call") {
|
|
2805
2828
|
ongoingToolCalls[value.output_index] = {
|