@ai-sdk/openai 2.0.8 → 2.0.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +13 -0
- package/dist/index.d.mts +36 -53
- package/dist/index.d.ts +36 -53
- package/dist/index.js +503 -443
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +514 -454
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.js +317 -256
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +302 -241
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +1 -1
package/dist/internal/index.mjs
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
// src/openai-chat-language-model.ts
|
|
1
|
+
// src/chat/openai-chat-language-model.ts
|
|
2
2
|
import {
|
|
3
3
|
InvalidResponseDataError
|
|
4
4
|
} from "@ai-sdk/provider";
|
|
@@ -13,7 +13,26 @@ import {
|
|
|
13
13
|
} from "@ai-sdk/provider-utils";
|
|
14
14
|
import { z as z5 } from "zod/v4";
|
|
15
15
|
|
|
16
|
-
// src/
|
|
16
|
+
// src/openai-error.ts
|
|
17
|
+
import { z } from "zod/v4";
|
|
18
|
+
import { createJsonErrorResponseHandler } from "@ai-sdk/provider-utils";
|
|
19
|
+
var openaiErrorDataSchema = z.object({
|
|
20
|
+
error: z.object({
|
|
21
|
+
message: z.string(),
|
|
22
|
+
// The additional information below is handled loosely to support
|
|
23
|
+
// OpenAI-compatible providers that have slightly different error
|
|
24
|
+
// responses:
|
|
25
|
+
type: z.string().nullish(),
|
|
26
|
+
param: z.any().nullish(),
|
|
27
|
+
code: z.union([z.string(), z.number()]).nullish()
|
|
28
|
+
})
|
|
29
|
+
});
|
|
30
|
+
var openaiFailedResponseHandler = createJsonErrorResponseHandler({
|
|
31
|
+
errorSchema: openaiErrorDataSchema,
|
|
32
|
+
errorToMessage: (data) => data.error.message
|
|
33
|
+
});
|
|
34
|
+
|
|
35
|
+
// src/chat/convert-to-openai-chat-messages.ts
|
|
17
36
|
import {
|
|
18
37
|
UnsupportedFunctionalityError
|
|
19
38
|
} from "@ai-sdk/provider";
|
|
@@ -193,7 +212,7 @@ function convertToOpenAIChatMessages({
|
|
|
193
212
|
return { messages, warnings };
|
|
194
213
|
}
|
|
195
214
|
|
|
196
|
-
// src/get-response-metadata.ts
|
|
215
|
+
// src/chat/get-response-metadata.ts
|
|
197
216
|
function getResponseMetadata({
|
|
198
217
|
id,
|
|
199
218
|
model,
|
|
@@ -206,7 +225,7 @@ function getResponseMetadata({
|
|
|
206
225
|
};
|
|
207
226
|
}
|
|
208
227
|
|
|
209
|
-
// src/map-openai-finish-reason.ts
|
|
228
|
+
// src/chat/map-openai-finish-reason.ts
|
|
210
229
|
function mapOpenAIFinishReason(finishReason) {
|
|
211
230
|
switch (finishReason) {
|
|
212
231
|
case "stop":
|
|
@@ -223,16 +242,16 @@ function mapOpenAIFinishReason(finishReason) {
|
|
|
223
242
|
}
|
|
224
243
|
}
|
|
225
244
|
|
|
226
|
-
// src/openai-chat-options.ts
|
|
227
|
-
import { z } from "zod/v4";
|
|
228
|
-
var openaiProviderOptions =
|
|
245
|
+
// src/chat/openai-chat-options.ts
|
|
246
|
+
import { z as z2 } from "zod/v4";
|
|
247
|
+
var openaiProviderOptions = z2.object({
|
|
229
248
|
/**
|
|
230
249
|
* Modify the likelihood of specified tokens appearing in the completion.
|
|
231
250
|
*
|
|
232
251
|
* Accepts a JSON object that maps tokens (specified by their token ID in
|
|
233
252
|
* the GPT tokenizer) to an associated bias value from -100 to 100.
|
|
234
253
|
*/
|
|
235
|
-
logitBias:
|
|
254
|
+
logitBias: z2.record(z2.coerce.number(), z2.number()).optional(),
|
|
236
255
|
/**
|
|
237
256
|
* Return the log probabilities of the tokens.
|
|
238
257
|
*
|
|
@@ -242,42 +261,42 @@ var openaiProviderOptions = z.object({
|
|
|
242
261
|
* Setting to a number will return the log probabilities of the top n
|
|
243
262
|
* tokens that were generated.
|
|
244
263
|
*/
|
|
245
|
-
logprobs:
|
|
264
|
+
logprobs: z2.union([z2.boolean(), z2.number()]).optional(),
|
|
246
265
|
/**
|
|
247
266
|
* Whether to enable parallel function calling during tool use. Default to true.
|
|
248
267
|
*/
|
|
249
|
-
parallelToolCalls:
|
|
268
|
+
parallelToolCalls: z2.boolean().optional(),
|
|
250
269
|
/**
|
|
251
270
|
* A unique identifier representing your end-user, which can help OpenAI to
|
|
252
271
|
* monitor and detect abuse.
|
|
253
272
|
*/
|
|
254
|
-
user:
|
|
273
|
+
user: z2.string().optional(),
|
|
255
274
|
/**
|
|
256
275
|
* Reasoning effort for reasoning models. Defaults to `medium`.
|
|
257
276
|
*/
|
|
258
|
-
reasoningEffort:
|
|
277
|
+
reasoningEffort: z2.enum(["minimal", "low", "medium", "high"]).optional(),
|
|
259
278
|
/**
|
|
260
279
|
* Maximum number of completion tokens to generate. Useful for reasoning models.
|
|
261
280
|
*/
|
|
262
|
-
maxCompletionTokens:
|
|
281
|
+
maxCompletionTokens: z2.number().optional(),
|
|
263
282
|
/**
|
|
264
283
|
* Whether to enable persistence in responses API.
|
|
265
284
|
*/
|
|
266
|
-
store:
|
|
285
|
+
store: z2.boolean().optional(),
|
|
267
286
|
/**
|
|
268
287
|
* Metadata to associate with the request.
|
|
269
288
|
*/
|
|
270
|
-
metadata:
|
|
289
|
+
metadata: z2.record(z2.string().max(64), z2.string().max(512)).optional(),
|
|
271
290
|
/**
|
|
272
291
|
* Parameters for prediction mode.
|
|
273
292
|
*/
|
|
274
|
-
prediction:
|
|
293
|
+
prediction: z2.record(z2.string(), z2.any()).optional(),
|
|
275
294
|
/**
|
|
276
295
|
* Whether to use structured outputs.
|
|
277
296
|
*
|
|
278
297
|
* @default true
|
|
279
298
|
*/
|
|
280
|
-
structuredOutputs:
|
|
299
|
+
structuredOutputs: z2.boolean().optional(),
|
|
281
300
|
/**
|
|
282
301
|
* Service tier for the request.
|
|
283
302
|
* - 'auto': Default service tier
|
|
@@ -286,40 +305,21 @@ var openaiProviderOptions = z.object({
|
|
|
286
305
|
*
|
|
287
306
|
* @default 'auto'
|
|
288
307
|
*/
|
|
289
|
-
serviceTier:
|
|
308
|
+
serviceTier: z2.enum(["auto", "flex", "priority"]).optional(),
|
|
290
309
|
/**
|
|
291
310
|
* Whether to use strict JSON schema validation.
|
|
292
311
|
*
|
|
293
312
|
* @default false
|
|
294
313
|
*/
|
|
295
|
-
strictJsonSchema:
|
|
314
|
+
strictJsonSchema: z2.boolean().optional(),
|
|
296
315
|
/**
|
|
297
316
|
* Controls the verbosity of the model's responses.
|
|
298
317
|
* Lower values will result in more concise responses, while higher values will result in more verbose responses.
|
|
299
318
|
*/
|
|
300
|
-
textVerbosity:
|
|
319
|
+
textVerbosity: z2.enum(["low", "medium", "high"]).optional()
|
|
301
320
|
});
|
|
302
321
|
|
|
303
|
-
// src/openai-
|
|
304
|
-
import { z as z2 } from "zod/v4";
|
|
305
|
-
import { createJsonErrorResponseHandler } from "@ai-sdk/provider-utils";
|
|
306
|
-
var openaiErrorDataSchema = z2.object({
|
|
307
|
-
error: z2.object({
|
|
308
|
-
message: z2.string(),
|
|
309
|
-
// The additional information below is handled loosely to support
|
|
310
|
-
// OpenAI-compatible providers that have slightly different error
|
|
311
|
-
// responses:
|
|
312
|
-
type: z2.string().nullish(),
|
|
313
|
-
param: z2.any().nullish(),
|
|
314
|
-
code: z2.union([z2.string(), z2.number()]).nullish()
|
|
315
|
-
})
|
|
316
|
-
});
|
|
317
|
-
var openaiFailedResponseHandler = createJsonErrorResponseHandler({
|
|
318
|
-
errorSchema: openaiErrorDataSchema,
|
|
319
|
-
errorToMessage: (data) => data.error.message
|
|
320
|
-
});
|
|
321
|
-
|
|
322
|
-
// src/openai-prepare-tools.ts
|
|
322
|
+
// src/chat/openai-chat-prepare-tools.ts
|
|
323
323
|
import {
|
|
324
324
|
UnsupportedFunctionalityError as UnsupportedFunctionalityError2
|
|
325
325
|
} from "@ai-sdk/provider";
|
|
@@ -410,8 +410,8 @@ var webSearchPreview = createProviderDefinedToolFactory2({
|
|
|
410
410
|
inputSchema: z4.object({})
|
|
411
411
|
});
|
|
412
412
|
|
|
413
|
-
// src/openai-prepare-tools.ts
|
|
414
|
-
function
|
|
413
|
+
// src/chat/openai-chat-prepare-tools.ts
|
|
414
|
+
function prepareChatTools({
|
|
415
415
|
tools,
|
|
416
416
|
toolChoice,
|
|
417
417
|
structuredOutputs,
|
|
@@ -497,7 +497,7 @@ function prepareTools({
|
|
|
497
497
|
}
|
|
498
498
|
}
|
|
499
499
|
|
|
500
|
-
// src/openai-chat-language-model.ts
|
|
500
|
+
// src/chat/openai-chat-language-model.ts
|
|
501
501
|
var OpenAIChatLanguageModel = class {
|
|
502
502
|
constructor(modelId, config) {
|
|
503
503
|
this.specificationVersion = "v2";
|
|
@@ -582,7 +582,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
582
582
|
seed,
|
|
583
583
|
verbosity: openaiOptions.textVerbosity,
|
|
584
584
|
// openai specific settings:
|
|
585
|
-
// TODO
|
|
585
|
+
// TODO AI SDK 6: remove, we auto-map maxOutputTokens now
|
|
586
586
|
max_completion_tokens: openaiOptions.maxCompletionTokens,
|
|
587
587
|
store: openaiOptions.store,
|
|
588
588
|
metadata: openaiOptions.metadata,
|
|
@@ -682,7 +682,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
682
682
|
tools: openaiTools,
|
|
683
683
|
toolChoice: openaiToolChoice,
|
|
684
684
|
toolWarnings
|
|
685
|
-
} =
|
|
685
|
+
} = prepareChatTools({
|
|
686
686
|
tools,
|
|
687
687
|
toolChoice,
|
|
688
688
|
structuredOutputs,
|
|
@@ -1152,7 +1152,7 @@ var reasoningModels = {
|
|
|
1152
1152
|
}
|
|
1153
1153
|
};
|
|
1154
1154
|
|
|
1155
|
-
// src/openai-completion-language-model.ts
|
|
1155
|
+
// src/completion/openai-completion-language-model.ts
|
|
1156
1156
|
import {
|
|
1157
1157
|
combineHeaders as combineHeaders2,
|
|
1158
1158
|
createEventSourceResponseHandler as createEventSourceResponseHandler2,
|
|
@@ -1162,7 +1162,7 @@ import {
|
|
|
1162
1162
|
} from "@ai-sdk/provider-utils";
|
|
1163
1163
|
import { z as z7 } from "zod/v4";
|
|
1164
1164
|
|
|
1165
|
-
// src/convert-to-openai-completion-prompt.ts
|
|
1165
|
+
// src/completion/convert-to-openai-completion-prompt.ts
|
|
1166
1166
|
import {
|
|
1167
1167
|
InvalidPromptError,
|
|
1168
1168
|
UnsupportedFunctionalityError as UnsupportedFunctionalityError3
|
|
@@ -1240,7 +1240,37 @@ ${user}:`]
|
|
|
1240
1240
|
};
|
|
1241
1241
|
}
|
|
1242
1242
|
|
|
1243
|
-
// src/
|
|
1243
|
+
// src/completion/get-response-metadata.ts
|
|
1244
|
+
function getResponseMetadata2({
|
|
1245
|
+
id,
|
|
1246
|
+
model,
|
|
1247
|
+
created
|
|
1248
|
+
}) {
|
|
1249
|
+
return {
|
|
1250
|
+
id: id != null ? id : void 0,
|
|
1251
|
+
modelId: model != null ? model : void 0,
|
|
1252
|
+
timestamp: created != null ? new Date(created * 1e3) : void 0
|
|
1253
|
+
};
|
|
1254
|
+
}
|
|
1255
|
+
|
|
1256
|
+
// src/completion/map-openai-finish-reason.ts
|
|
1257
|
+
function mapOpenAIFinishReason2(finishReason) {
|
|
1258
|
+
switch (finishReason) {
|
|
1259
|
+
case "stop":
|
|
1260
|
+
return "stop";
|
|
1261
|
+
case "length":
|
|
1262
|
+
return "length";
|
|
1263
|
+
case "content_filter":
|
|
1264
|
+
return "content-filter";
|
|
1265
|
+
case "function_call":
|
|
1266
|
+
case "tool_calls":
|
|
1267
|
+
return "tool-calls";
|
|
1268
|
+
default:
|
|
1269
|
+
return "unknown";
|
|
1270
|
+
}
|
|
1271
|
+
}
|
|
1272
|
+
|
|
1273
|
+
// src/completion/openai-completion-options.ts
|
|
1244
1274
|
import { z as z6 } from "zod/v4";
|
|
1245
1275
|
var openaiCompletionProviderOptions = z6.object({
|
|
1246
1276
|
/**
|
|
@@ -1283,7 +1313,7 @@ var openaiCompletionProviderOptions = z6.object({
|
|
|
1283
1313
|
logprobs: z6.union([z6.boolean(), z6.number()]).optional()
|
|
1284
1314
|
});
|
|
1285
1315
|
|
|
1286
|
-
// src/openai-completion-language-model.ts
|
|
1316
|
+
// src/completion/openai-completion-language-model.ts
|
|
1287
1317
|
var OpenAICompletionLanguageModel = class {
|
|
1288
1318
|
constructor(modelId, config) {
|
|
1289
1319
|
this.specificationVersion = "v2";
|
|
@@ -1403,10 +1433,10 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1403
1433
|
outputTokens: (_b = response.usage) == null ? void 0 : _b.completion_tokens,
|
|
1404
1434
|
totalTokens: (_c = response.usage) == null ? void 0 : _c.total_tokens
|
|
1405
1435
|
},
|
|
1406
|
-
finishReason:
|
|
1436
|
+
finishReason: mapOpenAIFinishReason2(choice.finish_reason),
|
|
1407
1437
|
request: { body: args },
|
|
1408
1438
|
response: {
|
|
1409
|
-
...
|
|
1439
|
+
...getResponseMetadata2(response),
|
|
1410
1440
|
headers: responseHeaders,
|
|
1411
1441
|
body: rawResponse
|
|
1412
1442
|
},
|
|
@@ -1470,7 +1500,7 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1470
1500
|
isFirstChunk = false;
|
|
1471
1501
|
controller.enqueue({
|
|
1472
1502
|
type: "response-metadata",
|
|
1473
|
-
...
|
|
1503
|
+
...getResponseMetadata2(value)
|
|
1474
1504
|
});
|
|
1475
1505
|
controller.enqueue({ type: "text-start", id: "0" });
|
|
1476
1506
|
}
|
|
@@ -1481,7 +1511,7 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1481
1511
|
}
|
|
1482
1512
|
const choice = value.choices[0];
|
|
1483
1513
|
if ((choice == null ? void 0 : choice.finish_reason) != null) {
|
|
1484
|
-
finishReason =
|
|
1514
|
+
finishReason = mapOpenAIFinishReason2(choice.finish_reason);
|
|
1485
1515
|
}
|
|
1486
1516
|
if ((choice == null ? void 0 : choice.logprobs) != null) {
|
|
1487
1517
|
providerMetadata.openai.logprobs = choice.logprobs;
|
|
@@ -1556,7 +1586,7 @@ var openaiCompletionChunkSchema = z7.union([
|
|
|
1556
1586
|
openaiErrorDataSchema
|
|
1557
1587
|
]);
|
|
1558
1588
|
|
|
1559
|
-
// src/openai-embedding-model.ts
|
|
1589
|
+
// src/embedding/openai-embedding-model.ts
|
|
1560
1590
|
import {
|
|
1561
1591
|
TooManyEmbeddingValuesForCallError
|
|
1562
1592
|
} from "@ai-sdk/provider";
|
|
@@ -1568,7 +1598,7 @@ import {
|
|
|
1568
1598
|
} from "@ai-sdk/provider-utils";
|
|
1569
1599
|
import { z as z9 } from "zod/v4";
|
|
1570
1600
|
|
|
1571
|
-
// src/openai-embedding-options.ts
|
|
1601
|
+
// src/embedding/openai-embedding-options.ts
|
|
1572
1602
|
import { z as z8 } from "zod/v4";
|
|
1573
1603
|
var openaiEmbeddingProviderOptions = z8.object({
|
|
1574
1604
|
/**
|
|
@@ -1583,7 +1613,7 @@ var openaiEmbeddingProviderOptions = z8.object({
|
|
|
1583
1613
|
user: z8.string().optional()
|
|
1584
1614
|
});
|
|
1585
1615
|
|
|
1586
|
-
// src/openai-embedding-model.ts
|
|
1616
|
+
// src/embedding/openai-embedding-model.ts
|
|
1587
1617
|
var OpenAIEmbeddingModel = class {
|
|
1588
1618
|
constructor(modelId, config) {
|
|
1589
1619
|
this.specificationVersion = "v2";
|
|
@@ -1651,7 +1681,7 @@ var openaiTextEmbeddingResponseSchema = z9.object({
|
|
|
1651
1681
|
usage: z9.object({ prompt_tokens: z9.number() }).nullish()
|
|
1652
1682
|
});
|
|
1653
1683
|
|
|
1654
|
-
// src/openai-image-model.ts
|
|
1684
|
+
// src/image/openai-image-model.ts
|
|
1655
1685
|
import {
|
|
1656
1686
|
combineHeaders as combineHeaders4,
|
|
1657
1687
|
createJsonResponseHandler as createJsonResponseHandler4,
|
|
@@ -1659,7 +1689,7 @@ import {
|
|
|
1659
1689
|
} from "@ai-sdk/provider-utils";
|
|
1660
1690
|
import { z as z10 } from "zod/v4";
|
|
1661
1691
|
|
|
1662
|
-
// src/openai-image-
|
|
1692
|
+
// src/image/openai-image-options.ts
|
|
1663
1693
|
var modelMaxImagesPerCall = {
|
|
1664
1694
|
"dall-e-3": 1,
|
|
1665
1695
|
"dall-e-2": 10,
|
|
@@ -1667,7 +1697,7 @@ var modelMaxImagesPerCall = {
|
|
|
1667
1697
|
};
|
|
1668
1698
|
var hasDefaultResponseFormat = /* @__PURE__ */ new Set(["gpt-image-1"]);
|
|
1669
1699
|
|
|
1670
|
-
// src/openai-image-model.ts
|
|
1700
|
+
// src/image/openai-image-model.ts
|
|
1671
1701
|
var OpenAIImageModel = class {
|
|
1672
1702
|
constructor(modelId, config) {
|
|
1673
1703
|
this.modelId = modelId;
|
|
@@ -1751,7 +1781,7 @@ var openaiImageResponseSchema = z10.object({
|
|
|
1751
1781
|
)
|
|
1752
1782
|
});
|
|
1753
1783
|
|
|
1754
|
-
// src/openai-transcription-model.ts
|
|
1784
|
+
// src/transcription/openai-transcription-model.ts
|
|
1755
1785
|
import {
|
|
1756
1786
|
combineHeaders as combineHeaders5,
|
|
1757
1787
|
convertBase64ToUint8Array,
|
|
@@ -1761,7 +1791,7 @@ import {
|
|
|
1761
1791
|
} from "@ai-sdk/provider-utils";
|
|
1762
1792
|
import { z as z12 } from "zod/v4";
|
|
1763
1793
|
|
|
1764
|
-
// src/openai-transcription-options.ts
|
|
1794
|
+
// src/transcription/openai-transcription-options.ts
|
|
1765
1795
|
import { z as z11 } from "zod/v4";
|
|
1766
1796
|
var openAITranscriptionProviderOptions = z11.object({
|
|
1767
1797
|
/**
|
|
@@ -1788,7 +1818,7 @@ var openAITranscriptionProviderOptions = z11.object({
|
|
|
1788
1818
|
timestampGranularities: z11.array(z11.enum(["word", "segment"])).default(["segment"]).optional()
|
|
1789
1819
|
});
|
|
1790
1820
|
|
|
1791
|
-
// src/openai-transcription-model.ts
|
|
1821
|
+
// src/transcription/openai-transcription-model.ts
|
|
1792
1822
|
var languageMap = {
|
|
1793
1823
|
afrikaans: "af",
|
|
1794
1824
|
arabic: "ar",
|
|
@@ -1946,7 +1976,7 @@ var openaiTranscriptionResponseSchema = z12.object({
|
|
|
1946
1976
|
).nullish()
|
|
1947
1977
|
});
|
|
1948
1978
|
|
|
1949
|
-
// src/openai-speech-model.ts
|
|
1979
|
+
// src/speech/openai-speech-model.ts
|
|
1950
1980
|
import {
|
|
1951
1981
|
combineHeaders as combineHeaders6,
|
|
1952
1982
|
createBinaryResponseHandler,
|
|
@@ -2070,7 +2100,7 @@ import {
|
|
|
2070
2100
|
parseProviderOptions as parseProviderOptions7,
|
|
2071
2101
|
postJsonToApi as postJsonToApi6
|
|
2072
2102
|
} from "@ai-sdk/provider-utils";
|
|
2073
|
-
import { z as
|
|
2103
|
+
import { z as z16 } from "zod/v4";
|
|
2074
2104
|
|
|
2075
2105
|
// src/responses/convert-to-openai-responses-messages.ts
|
|
2076
2106
|
import {
|
|
@@ -2129,7 +2159,7 @@ async function convertToOpenAIResponsesMessages({
|
|
|
2129
2159
|
return {
|
|
2130
2160
|
type: "input_image",
|
|
2131
2161
|
...part.data instanceof URL ? { image_url: part.data.toString() } : typeof part.data === "string" && part.data.startsWith("file-") ? { file_id: part.data } : {
|
|
2132
|
-
image_url: `data:${mediaType};base64,${part.data}`
|
|
2162
|
+
image_url: `data:${mediaType};base64,${convertToBase642(part.data)}`
|
|
2133
2163
|
},
|
|
2134
2164
|
detail: (_b2 = (_a2 = part.providerOptions) == null ? void 0 : _a2.openai) == null ? void 0 : _b2.imageDetail
|
|
2135
2165
|
};
|
|
@@ -2288,6 +2318,25 @@ function mapOpenAIResponseFinishReason({
|
|
|
2288
2318
|
import {
|
|
2289
2319
|
UnsupportedFunctionalityError as UnsupportedFunctionalityError5
|
|
2290
2320
|
} from "@ai-sdk/provider";
|
|
2321
|
+
|
|
2322
|
+
// src/tool/code-interpreter.ts
|
|
2323
|
+
import { createProviderDefinedToolFactory as createProviderDefinedToolFactory3 } from "@ai-sdk/provider-utils";
|
|
2324
|
+
import { z as z15 } from "zod/v4";
|
|
2325
|
+
var codeInterpreterArgsSchema = z15.object({
|
|
2326
|
+
container: z15.union([
|
|
2327
|
+
z15.string(),
|
|
2328
|
+
z15.object({
|
|
2329
|
+
fileIds: z15.array(z15.string()).optional()
|
|
2330
|
+
})
|
|
2331
|
+
]).optional()
|
|
2332
|
+
});
|
|
2333
|
+
var codeInterpreter = createProviderDefinedToolFactory3({
|
|
2334
|
+
id: "openai.code_interpreter",
|
|
2335
|
+
name: "code_interpreter",
|
|
2336
|
+
inputSchema: z15.object({})
|
|
2337
|
+
});
|
|
2338
|
+
|
|
2339
|
+
// src/responses/openai-responses-prepare-tools.ts
|
|
2291
2340
|
function prepareResponsesTools({
|
|
2292
2341
|
tools,
|
|
2293
2342
|
toolChoice,
|
|
@@ -2310,7 +2359,7 @@ function prepareResponsesTools({
|
|
|
2310
2359
|
strict: strictJsonSchema
|
|
2311
2360
|
});
|
|
2312
2361
|
break;
|
|
2313
|
-
case "provider-defined":
|
|
2362
|
+
case "provider-defined": {
|
|
2314
2363
|
switch (tool.id) {
|
|
2315
2364
|
case "openai.file_search": {
|
|
2316
2365
|
const args = fileSearchArgsSchema.parse(tool.args);
|
|
@@ -2323,18 +2372,30 @@ function prepareResponsesTools({
|
|
|
2323
2372
|
});
|
|
2324
2373
|
break;
|
|
2325
2374
|
}
|
|
2326
|
-
case "openai.web_search_preview":
|
|
2375
|
+
case "openai.web_search_preview": {
|
|
2376
|
+
const args = webSearchPreviewArgsSchema.parse(tool.args);
|
|
2327
2377
|
openaiTools.push({
|
|
2328
2378
|
type: "web_search_preview",
|
|
2329
|
-
search_context_size:
|
|
2330
|
-
user_location:
|
|
2379
|
+
search_context_size: args.searchContextSize,
|
|
2380
|
+
user_location: args.userLocation
|
|
2331
2381
|
});
|
|
2332
2382
|
break;
|
|
2333
|
-
|
|
2383
|
+
}
|
|
2384
|
+
case "openai.code_interpreter": {
|
|
2385
|
+
const args = codeInterpreterArgsSchema.parse(tool.args);
|
|
2386
|
+
openaiTools.push({
|
|
2387
|
+
type: "code_interpreter",
|
|
2388
|
+
container: args.container == null ? { type: "auto", file_ids: void 0 } : typeof args.container === "string" ? args.container : { type: "auto", file_ids: args.container.fileIds }
|
|
2389
|
+
});
|
|
2390
|
+
break;
|
|
2391
|
+
}
|
|
2392
|
+
default: {
|
|
2334
2393
|
toolWarnings.push({ type: "unsupported-tool", tool });
|
|
2335
2394
|
break;
|
|
2395
|
+
}
|
|
2336
2396
|
}
|
|
2337
2397
|
break;
|
|
2398
|
+
}
|
|
2338
2399
|
default:
|
|
2339
2400
|
toolWarnings.push({ type: "unsupported-tool", tool });
|
|
2340
2401
|
break;
|
|
@@ -2352,7 +2413,7 @@ function prepareResponsesTools({
|
|
|
2352
2413
|
case "tool":
|
|
2353
2414
|
return {
|
|
2354
2415
|
tools: openaiTools,
|
|
2355
|
-
toolChoice: toolChoice.toolName === "
|
|
2416
|
+
toolChoice: toolChoice.toolName === "code_interpreter" || toolChoice.toolName === "file_search" || toolChoice.toolName === "web_search_preview" ? { type: toolChoice.toolName } : { type: "function", name: toolChoice.toolName },
|
|
2356
2417
|
toolWarnings
|
|
2357
2418
|
};
|
|
2358
2419
|
default: {
|
|
@@ -2557,72 +2618,72 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2557
2618
|
body,
|
|
2558
2619
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
2559
2620
|
successfulResponseHandler: createJsonResponseHandler6(
|
|
2560
|
-
|
|
2561
|
-
id:
|
|
2562
|
-
created_at:
|
|
2563
|
-
error:
|
|
2564
|
-
code:
|
|
2565
|
-
message:
|
|
2621
|
+
z16.object({
|
|
2622
|
+
id: z16.string(),
|
|
2623
|
+
created_at: z16.number(),
|
|
2624
|
+
error: z16.object({
|
|
2625
|
+
code: z16.string(),
|
|
2626
|
+
message: z16.string()
|
|
2566
2627
|
}).nullish(),
|
|
2567
|
-
model:
|
|
2568
|
-
output:
|
|
2569
|
-
|
|
2570
|
-
|
|
2571
|
-
type:
|
|
2572
|
-
role:
|
|
2573
|
-
id:
|
|
2574
|
-
content:
|
|
2575
|
-
|
|
2576
|
-
type:
|
|
2577
|
-
text:
|
|
2578
|
-
annotations:
|
|
2579
|
-
|
|
2580
|
-
type:
|
|
2581
|
-
start_index:
|
|
2582
|
-
end_index:
|
|
2583
|
-
url:
|
|
2584
|
-
title:
|
|
2628
|
+
model: z16.string(),
|
|
2629
|
+
output: z16.array(
|
|
2630
|
+
z16.discriminatedUnion("type", [
|
|
2631
|
+
z16.object({
|
|
2632
|
+
type: z16.literal("message"),
|
|
2633
|
+
role: z16.literal("assistant"),
|
|
2634
|
+
id: z16.string(),
|
|
2635
|
+
content: z16.array(
|
|
2636
|
+
z16.object({
|
|
2637
|
+
type: z16.literal("output_text"),
|
|
2638
|
+
text: z16.string(),
|
|
2639
|
+
annotations: z16.array(
|
|
2640
|
+
z16.object({
|
|
2641
|
+
type: z16.literal("url_citation"),
|
|
2642
|
+
start_index: z16.number(),
|
|
2643
|
+
end_index: z16.number(),
|
|
2644
|
+
url: z16.string(),
|
|
2645
|
+
title: z16.string()
|
|
2585
2646
|
})
|
|
2586
2647
|
)
|
|
2587
2648
|
})
|
|
2588
2649
|
)
|
|
2589
2650
|
}),
|
|
2590
|
-
|
|
2591
|
-
type:
|
|
2592
|
-
call_id:
|
|
2593
|
-
name:
|
|
2594
|
-
arguments:
|
|
2595
|
-
id:
|
|
2651
|
+
z16.object({
|
|
2652
|
+
type: z16.literal("function_call"),
|
|
2653
|
+
call_id: z16.string(),
|
|
2654
|
+
name: z16.string(),
|
|
2655
|
+
arguments: z16.string(),
|
|
2656
|
+
id: z16.string()
|
|
2596
2657
|
}),
|
|
2597
|
-
|
|
2598
|
-
type:
|
|
2599
|
-
id:
|
|
2600
|
-
status:
|
|
2658
|
+
z16.object({
|
|
2659
|
+
type: z16.literal("web_search_call"),
|
|
2660
|
+
id: z16.string(),
|
|
2661
|
+
status: z16.string().optional()
|
|
2601
2662
|
}),
|
|
2602
|
-
|
|
2603
|
-
type:
|
|
2604
|
-
id:
|
|
2605
|
-
status:
|
|
2663
|
+
z16.object({
|
|
2664
|
+
type: z16.literal("computer_call"),
|
|
2665
|
+
id: z16.string(),
|
|
2666
|
+
status: z16.string().optional()
|
|
2606
2667
|
}),
|
|
2607
|
-
|
|
2608
|
-
type:
|
|
2609
|
-
id:
|
|
2610
|
-
status:
|
|
2668
|
+
z16.object({
|
|
2669
|
+
type: z16.literal("file_search_call"),
|
|
2670
|
+
id: z16.string(),
|
|
2671
|
+
status: z16.string().optional()
|
|
2611
2672
|
}),
|
|
2612
|
-
|
|
2613
|
-
type:
|
|
2614
|
-
id:
|
|
2615
|
-
encrypted_content:
|
|
2616
|
-
summary:
|
|
2617
|
-
|
|
2618
|
-
type:
|
|
2619
|
-
text:
|
|
2673
|
+
z16.object({
|
|
2674
|
+
type: z16.literal("reasoning"),
|
|
2675
|
+
id: z16.string(),
|
|
2676
|
+
encrypted_content: z16.string().nullish(),
|
|
2677
|
+
summary: z16.array(
|
|
2678
|
+
z16.object({
|
|
2679
|
+
type: z16.literal("summary_text"),
|
|
2680
|
+
text: z16.string()
|
|
2620
2681
|
})
|
|
2621
2682
|
)
|
|
2622
2683
|
})
|
|
2623
2684
|
])
|
|
2624
2685
|
),
|
|
2625
|
-
incomplete_details:
|
|
2686
|
+
incomplete_details: z16.object({ reason: z16.string() }).nullable(),
|
|
2626
2687
|
usage: usageSchema2
|
|
2627
2688
|
})
|
|
2628
2689
|
),
|
|
@@ -3069,140 +3130,140 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3069
3130
|
};
|
|
3070
3131
|
}
|
|
3071
3132
|
};
|
|
3072
|
-
var usageSchema2 =
|
|
3073
|
-
input_tokens:
|
|
3074
|
-
input_tokens_details:
|
|
3075
|
-
output_tokens:
|
|
3076
|
-
output_tokens_details:
|
|
3133
|
+
var usageSchema2 = z16.object({
|
|
3134
|
+
input_tokens: z16.number(),
|
|
3135
|
+
input_tokens_details: z16.object({ cached_tokens: z16.number().nullish() }).nullish(),
|
|
3136
|
+
output_tokens: z16.number(),
|
|
3137
|
+
output_tokens_details: z16.object({ reasoning_tokens: z16.number().nullish() }).nullish()
|
|
3077
3138
|
});
|
|
3078
|
-
var textDeltaChunkSchema =
|
|
3079
|
-
type:
|
|
3080
|
-
item_id:
|
|
3081
|
-
delta:
|
|
3139
|
+
var textDeltaChunkSchema = z16.object({
|
|
3140
|
+
type: z16.literal("response.output_text.delta"),
|
|
3141
|
+
item_id: z16.string(),
|
|
3142
|
+
delta: z16.string()
|
|
3082
3143
|
});
|
|
3083
|
-
var errorChunkSchema =
|
|
3084
|
-
type:
|
|
3085
|
-
code:
|
|
3086
|
-
message:
|
|
3087
|
-
param:
|
|
3088
|
-
sequence_number:
|
|
3144
|
+
var errorChunkSchema = z16.object({
|
|
3145
|
+
type: z16.literal("error"),
|
|
3146
|
+
code: z16.string(),
|
|
3147
|
+
message: z16.string(),
|
|
3148
|
+
param: z16.string().nullish(),
|
|
3149
|
+
sequence_number: z16.number()
|
|
3089
3150
|
});
|
|
3090
|
-
var responseFinishedChunkSchema =
|
|
3091
|
-
type:
|
|
3092
|
-
response:
|
|
3093
|
-
incomplete_details:
|
|
3151
|
+
var responseFinishedChunkSchema = z16.object({
|
|
3152
|
+
type: z16.enum(["response.completed", "response.incomplete"]),
|
|
3153
|
+
response: z16.object({
|
|
3154
|
+
incomplete_details: z16.object({ reason: z16.string() }).nullish(),
|
|
3094
3155
|
usage: usageSchema2
|
|
3095
3156
|
})
|
|
3096
3157
|
});
|
|
3097
|
-
var responseCreatedChunkSchema =
|
|
3098
|
-
type:
|
|
3099
|
-
response:
|
|
3100
|
-
id:
|
|
3101
|
-
created_at:
|
|
3102
|
-
model:
|
|
3158
|
+
var responseCreatedChunkSchema = z16.object({
|
|
3159
|
+
type: z16.literal("response.created"),
|
|
3160
|
+
response: z16.object({
|
|
3161
|
+
id: z16.string(),
|
|
3162
|
+
created_at: z16.number(),
|
|
3163
|
+
model: z16.string()
|
|
3103
3164
|
})
|
|
3104
3165
|
});
|
|
3105
|
-
var responseOutputItemAddedSchema =
|
|
3106
|
-
type:
|
|
3107
|
-
output_index:
|
|
3108
|
-
item:
|
|
3109
|
-
|
|
3110
|
-
type:
|
|
3111
|
-
id:
|
|
3166
|
+
var responseOutputItemAddedSchema = z16.object({
|
|
3167
|
+
type: z16.literal("response.output_item.added"),
|
|
3168
|
+
output_index: z16.number(),
|
|
3169
|
+
item: z16.discriminatedUnion("type", [
|
|
3170
|
+
z16.object({
|
|
3171
|
+
type: z16.literal("message"),
|
|
3172
|
+
id: z16.string()
|
|
3112
3173
|
}),
|
|
3113
|
-
|
|
3114
|
-
type:
|
|
3115
|
-
id:
|
|
3116
|
-
encrypted_content:
|
|
3174
|
+
z16.object({
|
|
3175
|
+
type: z16.literal("reasoning"),
|
|
3176
|
+
id: z16.string(),
|
|
3177
|
+
encrypted_content: z16.string().nullish()
|
|
3117
3178
|
}),
|
|
3118
|
-
|
|
3119
|
-
type:
|
|
3120
|
-
id:
|
|
3121
|
-
call_id:
|
|
3122
|
-
name:
|
|
3123
|
-
arguments:
|
|
3179
|
+
z16.object({
|
|
3180
|
+
type: z16.literal("function_call"),
|
|
3181
|
+
id: z16.string(),
|
|
3182
|
+
call_id: z16.string(),
|
|
3183
|
+
name: z16.string(),
|
|
3184
|
+
arguments: z16.string()
|
|
3124
3185
|
}),
|
|
3125
|
-
|
|
3126
|
-
type:
|
|
3127
|
-
id:
|
|
3128
|
-
status:
|
|
3186
|
+
z16.object({
|
|
3187
|
+
type: z16.literal("web_search_call"),
|
|
3188
|
+
id: z16.string(),
|
|
3189
|
+
status: z16.string()
|
|
3129
3190
|
}),
|
|
3130
|
-
|
|
3131
|
-
type:
|
|
3132
|
-
id:
|
|
3133
|
-
status:
|
|
3191
|
+
z16.object({
|
|
3192
|
+
type: z16.literal("computer_call"),
|
|
3193
|
+
id: z16.string(),
|
|
3194
|
+
status: z16.string()
|
|
3134
3195
|
}),
|
|
3135
|
-
|
|
3136
|
-
type:
|
|
3137
|
-
id:
|
|
3138
|
-
status:
|
|
3196
|
+
z16.object({
|
|
3197
|
+
type: z16.literal("file_search_call"),
|
|
3198
|
+
id: z16.string(),
|
|
3199
|
+
status: z16.string()
|
|
3139
3200
|
})
|
|
3140
3201
|
])
|
|
3141
3202
|
});
|
|
3142
|
-
var responseOutputItemDoneSchema =
|
|
3143
|
-
type:
|
|
3144
|
-
output_index:
|
|
3145
|
-
item:
|
|
3146
|
-
|
|
3147
|
-
type:
|
|
3148
|
-
id:
|
|
3203
|
+
var responseOutputItemDoneSchema = z16.object({
|
|
3204
|
+
type: z16.literal("response.output_item.done"),
|
|
3205
|
+
output_index: z16.number(),
|
|
3206
|
+
item: z16.discriminatedUnion("type", [
|
|
3207
|
+
z16.object({
|
|
3208
|
+
type: z16.literal("message"),
|
|
3209
|
+
id: z16.string()
|
|
3149
3210
|
}),
|
|
3150
|
-
|
|
3151
|
-
type:
|
|
3152
|
-
id:
|
|
3153
|
-
encrypted_content:
|
|
3211
|
+
z16.object({
|
|
3212
|
+
type: z16.literal("reasoning"),
|
|
3213
|
+
id: z16.string(),
|
|
3214
|
+
encrypted_content: z16.string().nullish()
|
|
3154
3215
|
}),
|
|
3155
|
-
|
|
3156
|
-
type:
|
|
3157
|
-
id:
|
|
3158
|
-
call_id:
|
|
3159
|
-
name:
|
|
3160
|
-
arguments:
|
|
3161
|
-
status:
|
|
3216
|
+
z16.object({
|
|
3217
|
+
type: z16.literal("function_call"),
|
|
3218
|
+
id: z16.string(),
|
|
3219
|
+
call_id: z16.string(),
|
|
3220
|
+
name: z16.string(),
|
|
3221
|
+
arguments: z16.string(),
|
|
3222
|
+
status: z16.literal("completed")
|
|
3162
3223
|
}),
|
|
3163
|
-
|
|
3164
|
-
type:
|
|
3165
|
-
id:
|
|
3166
|
-
status:
|
|
3224
|
+
z16.object({
|
|
3225
|
+
type: z16.literal("web_search_call"),
|
|
3226
|
+
id: z16.string(),
|
|
3227
|
+
status: z16.literal("completed")
|
|
3167
3228
|
}),
|
|
3168
|
-
|
|
3169
|
-
type:
|
|
3170
|
-
id:
|
|
3171
|
-
status:
|
|
3229
|
+
z16.object({
|
|
3230
|
+
type: z16.literal("computer_call"),
|
|
3231
|
+
id: z16.string(),
|
|
3232
|
+
status: z16.literal("completed")
|
|
3172
3233
|
}),
|
|
3173
|
-
|
|
3174
|
-
type:
|
|
3175
|
-
id:
|
|
3176
|
-
status:
|
|
3234
|
+
z16.object({
|
|
3235
|
+
type: z16.literal("file_search_call"),
|
|
3236
|
+
id: z16.string(),
|
|
3237
|
+
status: z16.literal("completed")
|
|
3177
3238
|
})
|
|
3178
3239
|
])
|
|
3179
3240
|
});
|
|
3180
|
-
var responseFunctionCallArgumentsDeltaSchema =
|
|
3181
|
-
type:
|
|
3182
|
-
item_id:
|
|
3183
|
-
output_index:
|
|
3184
|
-
delta:
|
|
3241
|
+
var responseFunctionCallArgumentsDeltaSchema = z16.object({
|
|
3242
|
+
type: z16.literal("response.function_call_arguments.delta"),
|
|
3243
|
+
item_id: z16.string(),
|
|
3244
|
+
output_index: z16.number(),
|
|
3245
|
+
delta: z16.string()
|
|
3185
3246
|
});
|
|
3186
|
-
var responseAnnotationAddedSchema =
|
|
3187
|
-
type:
|
|
3188
|
-
annotation:
|
|
3189
|
-
type:
|
|
3190
|
-
url:
|
|
3191
|
-
title:
|
|
3247
|
+
var responseAnnotationAddedSchema = z16.object({
|
|
3248
|
+
type: z16.literal("response.output_text.annotation.added"),
|
|
3249
|
+
annotation: z16.object({
|
|
3250
|
+
type: z16.literal("url_citation"),
|
|
3251
|
+
url: z16.string(),
|
|
3252
|
+
title: z16.string()
|
|
3192
3253
|
})
|
|
3193
3254
|
});
|
|
3194
|
-
var responseReasoningSummaryPartAddedSchema =
|
|
3195
|
-
type:
|
|
3196
|
-
item_id:
|
|
3197
|
-
summary_index:
|
|
3255
|
+
var responseReasoningSummaryPartAddedSchema = z16.object({
|
|
3256
|
+
type: z16.literal("response.reasoning_summary_part.added"),
|
|
3257
|
+
item_id: z16.string(),
|
|
3258
|
+
summary_index: z16.number()
|
|
3198
3259
|
});
|
|
3199
|
-
var responseReasoningSummaryTextDeltaSchema =
|
|
3200
|
-
type:
|
|
3201
|
-
item_id:
|
|
3202
|
-
summary_index:
|
|
3203
|
-
delta:
|
|
3260
|
+
var responseReasoningSummaryTextDeltaSchema = z16.object({
|
|
3261
|
+
type: z16.literal("response.reasoning_summary_text.delta"),
|
|
3262
|
+
item_id: z16.string(),
|
|
3263
|
+
summary_index: z16.number(),
|
|
3264
|
+
delta: z16.string()
|
|
3204
3265
|
});
|
|
3205
|
-
var openaiResponsesChunkSchema =
|
|
3266
|
+
var openaiResponsesChunkSchema = z16.union([
|
|
3206
3267
|
textDeltaChunkSchema,
|
|
3207
3268
|
responseFinishedChunkSchema,
|
|
3208
3269
|
responseCreatedChunkSchema,
|
|
@@ -3213,7 +3274,7 @@ var openaiResponsesChunkSchema = z15.union([
|
|
|
3213
3274
|
responseReasoningSummaryPartAddedSchema,
|
|
3214
3275
|
responseReasoningSummaryTextDeltaSchema,
|
|
3215
3276
|
errorChunkSchema,
|
|
3216
|
-
|
|
3277
|
+
z16.object({ type: z16.string() }).loose()
|
|
3217
3278
|
// fallback for unknown chunks
|
|
3218
3279
|
]);
|
|
3219
3280
|
function isTextDeltaChunk(chunk) {
|
|
@@ -3279,19 +3340,19 @@ function supportsFlexProcessing2(modelId) {
|
|
|
3279
3340
|
function supportsPriorityProcessing2(modelId) {
|
|
3280
3341
|
return modelId.startsWith("gpt-4") || modelId.startsWith("gpt-5-mini") || modelId.startsWith("gpt-5") && !modelId.startsWith("gpt-5-nano") || modelId.startsWith("o3") || modelId.startsWith("o4-mini");
|
|
3281
3342
|
}
|
|
3282
|
-
var openaiResponsesProviderOptionsSchema =
|
|
3283
|
-
metadata:
|
|
3284
|
-
parallelToolCalls:
|
|
3285
|
-
previousResponseId:
|
|
3286
|
-
store:
|
|
3287
|
-
user:
|
|
3288
|
-
reasoningEffort:
|
|
3289
|
-
strictJsonSchema:
|
|
3290
|
-
instructions:
|
|
3291
|
-
reasoningSummary:
|
|
3292
|
-
serviceTier:
|
|
3293
|
-
include:
|
|
3294
|
-
textVerbosity:
|
|
3343
|
+
var openaiResponsesProviderOptionsSchema = z16.object({
|
|
3344
|
+
metadata: z16.any().nullish(),
|
|
3345
|
+
parallelToolCalls: z16.boolean().nullish(),
|
|
3346
|
+
previousResponseId: z16.string().nullish(),
|
|
3347
|
+
store: z16.boolean().nullish(),
|
|
3348
|
+
user: z16.string().nullish(),
|
|
3349
|
+
reasoningEffort: z16.string().nullish(),
|
|
3350
|
+
strictJsonSchema: z16.boolean().nullish(),
|
|
3351
|
+
instructions: z16.string().nullish(),
|
|
3352
|
+
reasoningSummary: z16.string().nullish(),
|
|
3353
|
+
serviceTier: z16.enum(["auto", "flex", "priority"]).nullish(),
|
|
3354
|
+
include: z16.array(z16.enum(["reasoning.encrypted_content", "file_search_call.results"])).nullish(),
|
|
3355
|
+
textVerbosity: z16.enum(["low", "medium", "high"]).nullish()
|
|
3295
3356
|
});
|
|
3296
3357
|
export {
|
|
3297
3358
|
OpenAIChatLanguageModel,
|