@ai-sdk/openai 2.0.9 → 2.0.11
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +25 -0
- package/dist/index.d.mts +40 -56
- package/dist/index.d.ts +40 -56
- package/dist/index.js +605 -448
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +616 -459
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.d.mts +15 -3
- package/dist/internal/index.d.ts +15 -3
- package/dist/internal/index.js +417 -260
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +402 -245
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +2 -2
package/dist/internal/index.mjs
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
// src/openai-chat-language-model.ts
|
|
1
|
+
// src/chat/openai-chat-language-model.ts
|
|
2
2
|
import {
|
|
3
3
|
InvalidResponseDataError
|
|
4
4
|
} from "@ai-sdk/provider";
|
|
@@ -13,7 +13,26 @@ import {
|
|
|
13
13
|
} from "@ai-sdk/provider-utils";
|
|
14
14
|
import { z as z5 } from "zod/v4";
|
|
15
15
|
|
|
16
|
-
// src/
|
|
16
|
+
// src/openai-error.ts
|
|
17
|
+
import { z } from "zod/v4";
|
|
18
|
+
import { createJsonErrorResponseHandler } from "@ai-sdk/provider-utils";
|
|
19
|
+
var openaiErrorDataSchema = z.object({
|
|
20
|
+
error: z.object({
|
|
21
|
+
message: z.string(),
|
|
22
|
+
// The additional information below is handled loosely to support
|
|
23
|
+
// OpenAI-compatible providers that have slightly different error
|
|
24
|
+
// responses:
|
|
25
|
+
type: z.string().nullish(),
|
|
26
|
+
param: z.any().nullish(),
|
|
27
|
+
code: z.union([z.string(), z.number()]).nullish()
|
|
28
|
+
})
|
|
29
|
+
});
|
|
30
|
+
var openaiFailedResponseHandler = createJsonErrorResponseHandler({
|
|
31
|
+
errorSchema: openaiErrorDataSchema,
|
|
32
|
+
errorToMessage: (data) => data.error.message
|
|
33
|
+
});
|
|
34
|
+
|
|
35
|
+
// src/chat/convert-to-openai-chat-messages.ts
|
|
17
36
|
import {
|
|
18
37
|
UnsupportedFunctionalityError
|
|
19
38
|
} from "@ai-sdk/provider";
|
|
@@ -193,7 +212,7 @@ function convertToOpenAIChatMessages({
|
|
|
193
212
|
return { messages, warnings };
|
|
194
213
|
}
|
|
195
214
|
|
|
196
|
-
// src/get-response-metadata.ts
|
|
215
|
+
// src/chat/get-response-metadata.ts
|
|
197
216
|
function getResponseMetadata({
|
|
198
217
|
id,
|
|
199
218
|
model,
|
|
@@ -206,7 +225,7 @@ function getResponseMetadata({
|
|
|
206
225
|
};
|
|
207
226
|
}
|
|
208
227
|
|
|
209
|
-
// src/map-openai-finish-reason.ts
|
|
228
|
+
// src/chat/map-openai-finish-reason.ts
|
|
210
229
|
function mapOpenAIFinishReason(finishReason) {
|
|
211
230
|
switch (finishReason) {
|
|
212
231
|
case "stop":
|
|
@@ -223,16 +242,16 @@ function mapOpenAIFinishReason(finishReason) {
|
|
|
223
242
|
}
|
|
224
243
|
}
|
|
225
244
|
|
|
226
|
-
// src/openai-chat-options.ts
|
|
227
|
-
import { z } from "zod/v4";
|
|
228
|
-
var openaiProviderOptions =
|
|
245
|
+
// src/chat/openai-chat-options.ts
|
|
246
|
+
import { z as z2 } from "zod/v4";
|
|
247
|
+
var openaiProviderOptions = z2.object({
|
|
229
248
|
/**
|
|
230
249
|
* Modify the likelihood of specified tokens appearing in the completion.
|
|
231
250
|
*
|
|
232
251
|
* Accepts a JSON object that maps tokens (specified by their token ID in
|
|
233
252
|
* the GPT tokenizer) to an associated bias value from -100 to 100.
|
|
234
253
|
*/
|
|
235
|
-
logitBias:
|
|
254
|
+
logitBias: z2.record(z2.coerce.number(), z2.number()).optional(),
|
|
236
255
|
/**
|
|
237
256
|
* Return the log probabilities of the tokens.
|
|
238
257
|
*
|
|
@@ -242,42 +261,42 @@ var openaiProviderOptions = z.object({
|
|
|
242
261
|
* Setting to a number will return the log probabilities of the top n
|
|
243
262
|
* tokens that were generated.
|
|
244
263
|
*/
|
|
245
|
-
logprobs:
|
|
264
|
+
logprobs: z2.union([z2.boolean(), z2.number()]).optional(),
|
|
246
265
|
/**
|
|
247
266
|
* Whether to enable parallel function calling during tool use. Default to true.
|
|
248
267
|
*/
|
|
249
|
-
parallelToolCalls:
|
|
268
|
+
parallelToolCalls: z2.boolean().optional(),
|
|
250
269
|
/**
|
|
251
270
|
* A unique identifier representing your end-user, which can help OpenAI to
|
|
252
271
|
* monitor and detect abuse.
|
|
253
272
|
*/
|
|
254
|
-
user:
|
|
273
|
+
user: z2.string().optional(),
|
|
255
274
|
/**
|
|
256
275
|
* Reasoning effort for reasoning models. Defaults to `medium`.
|
|
257
276
|
*/
|
|
258
|
-
reasoningEffort:
|
|
277
|
+
reasoningEffort: z2.enum(["minimal", "low", "medium", "high"]).optional(),
|
|
259
278
|
/**
|
|
260
279
|
* Maximum number of completion tokens to generate. Useful for reasoning models.
|
|
261
280
|
*/
|
|
262
|
-
maxCompletionTokens:
|
|
281
|
+
maxCompletionTokens: z2.number().optional(),
|
|
263
282
|
/**
|
|
264
283
|
* Whether to enable persistence in responses API.
|
|
265
284
|
*/
|
|
266
|
-
store:
|
|
285
|
+
store: z2.boolean().optional(),
|
|
267
286
|
/**
|
|
268
287
|
* Metadata to associate with the request.
|
|
269
288
|
*/
|
|
270
|
-
metadata:
|
|
289
|
+
metadata: z2.record(z2.string().max(64), z2.string().max(512)).optional(),
|
|
271
290
|
/**
|
|
272
291
|
* Parameters for prediction mode.
|
|
273
292
|
*/
|
|
274
|
-
prediction:
|
|
293
|
+
prediction: z2.record(z2.string(), z2.any()).optional(),
|
|
275
294
|
/**
|
|
276
295
|
* Whether to use structured outputs.
|
|
277
296
|
*
|
|
278
297
|
* @default true
|
|
279
298
|
*/
|
|
280
|
-
structuredOutputs:
|
|
299
|
+
structuredOutputs: z2.boolean().optional(),
|
|
281
300
|
/**
|
|
282
301
|
* Service tier for the request.
|
|
283
302
|
* - 'auto': Default service tier
|
|
@@ -286,40 +305,34 @@ var openaiProviderOptions = z.object({
|
|
|
286
305
|
*
|
|
287
306
|
* @default 'auto'
|
|
288
307
|
*/
|
|
289
|
-
serviceTier:
|
|
308
|
+
serviceTier: z2.enum(["auto", "flex", "priority"]).optional(),
|
|
290
309
|
/**
|
|
291
310
|
* Whether to use strict JSON schema validation.
|
|
292
311
|
*
|
|
293
312
|
* @default false
|
|
294
313
|
*/
|
|
295
|
-
strictJsonSchema:
|
|
314
|
+
strictJsonSchema: z2.boolean().optional(),
|
|
296
315
|
/**
|
|
297
316
|
* Controls the verbosity of the model's responses.
|
|
298
317
|
* Lower values will result in more concise responses, while higher values will result in more verbose responses.
|
|
299
318
|
*/
|
|
300
|
-
textVerbosity:
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
code: z2.union([z2.string(), z2.number()]).nullish()
|
|
315
|
-
})
|
|
316
|
-
});
|
|
317
|
-
var openaiFailedResponseHandler = createJsonErrorResponseHandler({
|
|
318
|
-
errorSchema: openaiErrorDataSchema,
|
|
319
|
-
errorToMessage: (data) => data.error.message
|
|
319
|
+
textVerbosity: z2.enum(["low", "medium", "high"]).optional(),
|
|
320
|
+
/**
|
|
321
|
+
* A cache key for prompt caching. Allows manual control over prompt caching behavior.
|
|
322
|
+
* Useful for improving cache hit rates and working around automatic caching issues.
|
|
323
|
+
*/
|
|
324
|
+
promptCacheKey: z2.string().optional(),
|
|
325
|
+
/**
|
|
326
|
+
* A stable identifier used to help detect users of your application
|
|
327
|
+
* that may be violating OpenAI's usage policies. The IDs should be a
|
|
328
|
+
* string that uniquely identifies each user. We recommend hashing their
|
|
329
|
+
* username or email address, in order to avoid sending us any identifying
|
|
330
|
+
* information.
|
|
331
|
+
*/
|
|
332
|
+
safetyIdentifier: z2.string().optional()
|
|
320
333
|
});
|
|
321
334
|
|
|
322
|
-
// src/openai-prepare-tools.ts
|
|
335
|
+
// src/chat/openai-chat-prepare-tools.ts
|
|
323
336
|
import {
|
|
324
337
|
UnsupportedFunctionalityError as UnsupportedFunctionalityError2
|
|
325
338
|
} from "@ai-sdk/provider";
|
|
@@ -410,8 +423,8 @@ var webSearchPreview = createProviderDefinedToolFactory2({
|
|
|
410
423
|
inputSchema: z4.object({})
|
|
411
424
|
});
|
|
412
425
|
|
|
413
|
-
// src/openai-prepare-tools.ts
|
|
414
|
-
function
|
|
426
|
+
// src/chat/openai-chat-prepare-tools.ts
|
|
427
|
+
function prepareChatTools({
|
|
415
428
|
tools,
|
|
416
429
|
toolChoice,
|
|
417
430
|
structuredOutputs,
|
|
@@ -497,7 +510,7 @@ function prepareTools({
|
|
|
497
510
|
}
|
|
498
511
|
}
|
|
499
512
|
|
|
500
|
-
// src/openai-chat-language-model.ts
|
|
513
|
+
// src/chat/openai-chat-language-model.ts
|
|
501
514
|
var OpenAIChatLanguageModel = class {
|
|
502
515
|
constructor(modelId, config) {
|
|
503
516
|
this.specificationVersion = "v2";
|
|
@@ -582,13 +595,15 @@ var OpenAIChatLanguageModel = class {
|
|
|
582
595
|
seed,
|
|
583
596
|
verbosity: openaiOptions.textVerbosity,
|
|
584
597
|
// openai specific settings:
|
|
585
|
-
// TODO
|
|
598
|
+
// TODO AI SDK 6: remove, we auto-map maxOutputTokens now
|
|
586
599
|
max_completion_tokens: openaiOptions.maxCompletionTokens,
|
|
587
600
|
store: openaiOptions.store,
|
|
588
601
|
metadata: openaiOptions.metadata,
|
|
589
602
|
prediction: openaiOptions.prediction,
|
|
590
603
|
reasoning_effort: openaiOptions.reasoningEffort,
|
|
591
604
|
service_tier: openaiOptions.serviceTier,
|
|
605
|
+
prompt_cache_key: openaiOptions.promptCacheKey,
|
|
606
|
+
safety_identifier: openaiOptions.safetyIdentifier,
|
|
592
607
|
// messages:
|
|
593
608
|
messages
|
|
594
609
|
};
|
|
@@ -682,7 +697,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
682
697
|
tools: openaiTools,
|
|
683
698
|
toolChoice: openaiToolChoice,
|
|
684
699
|
toolWarnings
|
|
685
|
-
} =
|
|
700
|
+
} = prepareChatTools({
|
|
686
701
|
tools,
|
|
687
702
|
toolChoice,
|
|
688
703
|
structuredOutputs,
|
|
@@ -1152,7 +1167,7 @@ var reasoningModels = {
|
|
|
1152
1167
|
}
|
|
1153
1168
|
};
|
|
1154
1169
|
|
|
1155
|
-
// src/openai-completion-language-model.ts
|
|
1170
|
+
// src/completion/openai-completion-language-model.ts
|
|
1156
1171
|
import {
|
|
1157
1172
|
combineHeaders as combineHeaders2,
|
|
1158
1173
|
createEventSourceResponseHandler as createEventSourceResponseHandler2,
|
|
@@ -1162,7 +1177,7 @@ import {
|
|
|
1162
1177
|
} from "@ai-sdk/provider-utils";
|
|
1163
1178
|
import { z as z7 } from "zod/v4";
|
|
1164
1179
|
|
|
1165
|
-
// src/convert-to-openai-completion-prompt.ts
|
|
1180
|
+
// src/completion/convert-to-openai-completion-prompt.ts
|
|
1166
1181
|
import {
|
|
1167
1182
|
InvalidPromptError,
|
|
1168
1183
|
UnsupportedFunctionalityError as UnsupportedFunctionalityError3
|
|
@@ -1240,7 +1255,37 @@ ${user}:`]
|
|
|
1240
1255
|
};
|
|
1241
1256
|
}
|
|
1242
1257
|
|
|
1243
|
-
// src/
|
|
1258
|
+
// src/completion/get-response-metadata.ts
|
|
1259
|
+
function getResponseMetadata2({
|
|
1260
|
+
id,
|
|
1261
|
+
model,
|
|
1262
|
+
created
|
|
1263
|
+
}) {
|
|
1264
|
+
return {
|
|
1265
|
+
id: id != null ? id : void 0,
|
|
1266
|
+
modelId: model != null ? model : void 0,
|
|
1267
|
+
timestamp: created != null ? new Date(created * 1e3) : void 0
|
|
1268
|
+
};
|
|
1269
|
+
}
|
|
1270
|
+
|
|
1271
|
+
// src/completion/map-openai-finish-reason.ts
|
|
1272
|
+
function mapOpenAIFinishReason2(finishReason) {
|
|
1273
|
+
switch (finishReason) {
|
|
1274
|
+
case "stop":
|
|
1275
|
+
return "stop";
|
|
1276
|
+
case "length":
|
|
1277
|
+
return "length";
|
|
1278
|
+
case "content_filter":
|
|
1279
|
+
return "content-filter";
|
|
1280
|
+
case "function_call":
|
|
1281
|
+
case "tool_calls":
|
|
1282
|
+
return "tool-calls";
|
|
1283
|
+
default:
|
|
1284
|
+
return "unknown";
|
|
1285
|
+
}
|
|
1286
|
+
}
|
|
1287
|
+
|
|
1288
|
+
// src/completion/openai-completion-options.ts
|
|
1244
1289
|
import { z as z6 } from "zod/v4";
|
|
1245
1290
|
var openaiCompletionProviderOptions = z6.object({
|
|
1246
1291
|
/**
|
|
@@ -1283,7 +1328,7 @@ var openaiCompletionProviderOptions = z6.object({
|
|
|
1283
1328
|
logprobs: z6.union([z6.boolean(), z6.number()]).optional()
|
|
1284
1329
|
});
|
|
1285
1330
|
|
|
1286
|
-
// src/openai-completion-language-model.ts
|
|
1331
|
+
// src/completion/openai-completion-language-model.ts
|
|
1287
1332
|
var OpenAICompletionLanguageModel = class {
|
|
1288
1333
|
constructor(modelId, config) {
|
|
1289
1334
|
this.specificationVersion = "v2";
|
|
@@ -1403,10 +1448,10 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1403
1448
|
outputTokens: (_b = response.usage) == null ? void 0 : _b.completion_tokens,
|
|
1404
1449
|
totalTokens: (_c = response.usage) == null ? void 0 : _c.total_tokens
|
|
1405
1450
|
},
|
|
1406
|
-
finishReason:
|
|
1451
|
+
finishReason: mapOpenAIFinishReason2(choice.finish_reason),
|
|
1407
1452
|
request: { body: args },
|
|
1408
1453
|
response: {
|
|
1409
|
-
...
|
|
1454
|
+
...getResponseMetadata2(response),
|
|
1410
1455
|
headers: responseHeaders,
|
|
1411
1456
|
body: rawResponse
|
|
1412
1457
|
},
|
|
@@ -1470,7 +1515,7 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1470
1515
|
isFirstChunk = false;
|
|
1471
1516
|
controller.enqueue({
|
|
1472
1517
|
type: "response-metadata",
|
|
1473
|
-
...
|
|
1518
|
+
...getResponseMetadata2(value)
|
|
1474
1519
|
});
|
|
1475
1520
|
controller.enqueue({ type: "text-start", id: "0" });
|
|
1476
1521
|
}
|
|
@@ -1481,7 +1526,7 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1481
1526
|
}
|
|
1482
1527
|
const choice = value.choices[0];
|
|
1483
1528
|
if ((choice == null ? void 0 : choice.finish_reason) != null) {
|
|
1484
|
-
finishReason =
|
|
1529
|
+
finishReason = mapOpenAIFinishReason2(choice.finish_reason);
|
|
1485
1530
|
}
|
|
1486
1531
|
if ((choice == null ? void 0 : choice.logprobs) != null) {
|
|
1487
1532
|
providerMetadata.openai.logprobs = choice.logprobs;
|
|
@@ -1556,7 +1601,7 @@ var openaiCompletionChunkSchema = z7.union([
|
|
|
1556
1601
|
openaiErrorDataSchema
|
|
1557
1602
|
]);
|
|
1558
1603
|
|
|
1559
|
-
// src/openai-embedding-model.ts
|
|
1604
|
+
// src/embedding/openai-embedding-model.ts
|
|
1560
1605
|
import {
|
|
1561
1606
|
TooManyEmbeddingValuesForCallError
|
|
1562
1607
|
} from "@ai-sdk/provider";
|
|
@@ -1568,7 +1613,7 @@ import {
|
|
|
1568
1613
|
} from "@ai-sdk/provider-utils";
|
|
1569
1614
|
import { z as z9 } from "zod/v4";
|
|
1570
1615
|
|
|
1571
|
-
// src/openai-embedding-options.ts
|
|
1616
|
+
// src/embedding/openai-embedding-options.ts
|
|
1572
1617
|
import { z as z8 } from "zod/v4";
|
|
1573
1618
|
var openaiEmbeddingProviderOptions = z8.object({
|
|
1574
1619
|
/**
|
|
@@ -1583,7 +1628,7 @@ var openaiEmbeddingProviderOptions = z8.object({
|
|
|
1583
1628
|
user: z8.string().optional()
|
|
1584
1629
|
});
|
|
1585
1630
|
|
|
1586
|
-
// src/openai-embedding-model.ts
|
|
1631
|
+
// src/embedding/openai-embedding-model.ts
|
|
1587
1632
|
var OpenAIEmbeddingModel = class {
|
|
1588
1633
|
constructor(modelId, config) {
|
|
1589
1634
|
this.specificationVersion = "v2";
|
|
@@ -1651,7 +1696,7 @@ var openaiTextEmbeddingResponseSchema = z9.object({
|
|
|
1651
1696
|
usage: z9.object({ prompt_tokens: z9.number() }).nullish()
|
|
1652
1697
|
});
|
|
1653
1698
|
|
|
1654
|
-
// src/openai-image-model.ts
|
|
1699
|
+
// src/image/openai-image-model.ts
|
|
1655
1700
|
import {
|
|
1656
1701
|
combineHeaders as combineHeaders4,
|
|
1657
1702
|
createJsonResponseHandler as createJsonResponseHandler4,
|
|
@@ -1659,7 +1704,7 @@ import {
|
|
|
1659
1704
|
} from "@ai-sdk/provider-utils";
|
|
1660
1705
|
import { z as z10 } from "zod/v4";
|
|
1661
1706
|
|
|
1662
|
-
// src/openai-image-
|
|
1707
|
+
// src/image/openai-image-options.ts
|
|
1663
1708
|
var modelMaxImagesPerCall = {
|
|
1664
1709
|
"dall-e-3": 1,
|
|
1665
1710
|
"dall-e-2": 10,
|
|
@@ -1667,7 +1712,7 @@ var modelMaxImagesPerCall = {
|
|
|
1667
1712
|
};
|
|
1668
1713
|
var hasDefaultResponseFormat = /* @__PURE__ */ new Set(["gpt-image-1"]);
|
|
1669
1714
|
|
|
1670
|
-
// src/openai-image-model.ts
|
|
1715
|
+
// src/image/openai-image-model.ts
|
|
1671
1716
|
var OpenAIImageModel = class {
|
|
1672
1717
|
constructor(modelId, config) {
|
|
1673
1718
|
this.modelId = modelId;
|
|
@@ -1751,7 +1796,7 @@ var openaiImageResponseSchema = z10.object({
|
|
|
1751
1796
|
)
|
|
1752
1797
|
});
|
|
1753
1798
|
|
|
1754
|
-
// src/openai-transcription-model.ts
|
|
1799
|
+
// src/transcription/openai-transcription-model.ts
|
|
1755
1800
|
import {
|
|
1756
1801
|
combineHeaders as combineHeaders5,
|
|
1757
1802
|
convertBase64ToUint8Array,
|
|
@@ -1761,7 +1806,7 @@ import {
|
|
|
1761
1806
|
} from "@ai-sdk/provider-utils";
|
|
1762
1807
|
import { z as z12 } from "zod/v4";
|
|
1763
1808
|
|
|
1764
|
-
// src/openai-transcription-options.ts
|
|
1809
|
+
// src/transcription/openai-transcription-options.ts
|
|
1765
1810
|
import { z as z11 } from "zod/v4";
|
|
1766
1811
|
var openAITranscriptionProviderOptions = z11.object({
|
|
1767
1812
|
/**
|
|
@@ -1788,7 +1833,7 @@ var openAITranscriptionProviderOptions = z11.object({
|
|
|
1788
1833
|
timestampGranularities: z11.array(z11.enum(["word", "segment"])).default(["segment"]).optional()
|
|
1789
1834
|
});
|
|
1790
1835
|
|
|
1791
|
-
// src/openai-transcription-model.ts
|
|
1836
|
+
// src/transcription/openai-transcription-model.ts
|
|
1792
1837
|
var languageMap = {
|
|
1793
1838
|
afrikaans: "af",
|
|
1794
1839
|
arabic: "ar",
|
|
@@ -1946,7 +1991,7 @@ var openaiTranscriptionResponseSchema = z12.object({
|
|
|
1946
1991
|
).nullish()
|
|
1947
1992
|
});
|
|
1948
1993
|
|
|
1949
|
-
// src/openai-speech-model.ts
|
|
1994
|
+
// src/speech/openai-speech-model.ts
|
|
1950
1995
|
import {
|
|
1951
1996
|
combineHeaders as combineHeaders6,
|
|
1952
1997
|
createBinaryResponseHandler,
|
|
@@ -2070,7 +2115,7 @@ import {
|
|
|
2070
2115
|
parseProviderOptions as parseProviderOptions7,
|
|
2071
2116
|
postJsonToApi as postJsonToApi6
|
|
2072
2117
|
} from "@ai-sdk/provider-utils";
|
|
2073
|
-
import { z as
|
|
2118
|
+
import { z as z16 } from "zod/v4";
|
|
2074
2119
|
|
|
2075
2120
|
// src/responses/convert-to-openai-responses-messages.ts
|
|
2076
2121
|
import {
|
|
@@ -2079,9 +2124,14 @@ import {
|
|
|
2079
2124
|
import { parseProviderOptions as parseProviderOptions6 } from "@ai-sdk/provider-utils";
|
|
2080
2125
|
import { z as z14 } from "zod/v4";
|
|
2081
2126
|
import { convertToBase64 as convertToBase642 } from "@ai-sdk/provider-utils";
|
|
2127
|
+
function isFileId(data, prefixes) {
|
|
2128
|
+
if (!prefixes) return false;
|
|
2129
|
+
return prefixes.some((prefix) => data.startsWith(prefix));
|
|
2130
|
+
}
|
|
2082
2131
|
async function convertToOpenAIResponsesMessages({
|
|
2083
2132
|
prompt,
|
|
2084
|
-
systemMessageMode
|
|
2133
|
+
systemMessageMode,
|
|
2134
|
+
fileIdPrefixes
|
|
2085
2135
|
}) {
|
|
2086
2136
|
var _a, _b, _c, _d, _e, _f;
|
|
2087
2137
|
const messages = [];
|
|
@@ -2128,7 +2178,7 @@ async function convertToOpenAIResponsesMessages({
|
|
|
2128
2178
|
const mediaType = part.mediaType === "image/*" ? "image/jpeg" : part.mediaType;
|
|
2129
2179
|
return {
|
|
2130
2180
|
type: "input_image",
|
|
2131
|
-
...part.data instanceof URL ? { image_url: part.data.toString() } : typeof part.data === "string" && part.data
|
|
2181
|
+
...part.data instanceof URL ? { image_url: part.data.toString() } : typeof part.data === "string" && isFileId(part.data, fileIdPrefixes) ? { file_id: part.data } : {
|
|
2132
2182
|
image_url: `data:${mediaType};base64,${convertToBase642(part.data)}`
|
|
2133
2183
|
},
|
|
2134
2184
|
detail: (_b2 = (_a2 = part.providerOptions) == null ? void 0 : _a2.openai) == null ? void 0 : _b2.imageDetail
|
|
@@ -2141,7 +2191,7 @@ async function convertToOpenAIResponsesMessages({
|
|
|
2141
2191
|
}
|
|
2142
2192
|
return {
|
|
2143
2193
|
type: "input_file",
|
|
2144
|
-
...typeof part.data === "string" && part.data
|
|
2194
|
+
...typeof part.data === "string" && isFileId(part.data, fileIdPrefixes) ? { file_id: part.data } : {
|
|
2145
2195
|
filename: (_c2 = part.filename) != null ? _c2 : `part-${index}.pdf`,
|
|
2146
2196
|
file_data: `data:application/pdf;base64,${convertToBase642(part.data)}`
|
|
2147
2197
|
}
|
|
@@ -2288,6 +2338,25 @@ function mapOpenAIResponseFinishReason({
|
|
|
2288
2338
|
import {
|
|
2289
2339
|
UnsupportedFunctionalityError as UnsupportedFunctionalityError5
|
|
2290
2340
|
} from "@ai-sdk/provider";
|
|
2341
|
+
|
|
2342
|
+
// src/tool/code-interpreter.ts
|
|
2343
|
+
import { createProviderDefinedToolFactory as createProviderDefinedToolFactory3 } from "@ai-sdk/provider-utils";
|
|
2344
|
+
import { z as z15 } from "zod/v4";
|
|
2345
|
+
var codeInterpreterArgsSchema = z15.object({
|
|
2346
|
+
container: z15.union([
|
|
2347
|
+
z15.string(),
|
|
2348
|
+
z15.object({
|
|
2349
|
+
fileIds: z15.array(z15.string()).optional()
|
|
2350
|
+
})
|
|
2351
|
+
]).optional()
|
|
2352
|
+
});
|
|
2353
|
+
var codeInterpreter = createProviderDefinedToolFactory3({
|
|
2354
|
+
id: "openai.code_interpreter",
|
|
2355
|
+
name: "code_interpreter",
|
|
2356
|
+
inputSchema: z15.object({})
|
|
2357
|
+
});
|
|
2358
|
+
|
|
2359
|
+
// src/responses/openai-responses-prepare-tools.ts
|
|
2291
2360
|
function prepareResponsesTools({
|
|
2292
2361
|
tools,
|
|
2293
2362
|
toolChoice,
|
|
@@ -2310,7 +2379,7 @@ function prepareResponsesTools({
|
|
|
2310
2379
|
strict: strictJsonSchema
|
|
2311
2380
|
});
|
|
2312
2381
|
break;
|
|
2313
|
-
case "provider-defined":
|
|
2382
|
+
case "provider-defined": {
|
|
2314
2383
|
switch (tool.id) {
|
|
2315
2384
|
case "openai.file_search": {
|
|
2316
2385
|
const args = fileSearchArgsSchema.parse(tool.args);
|
|
@@ -2323,18 +2392,30 @@ function prepareResponsesTools({
|
|
|
2323
2392
|
});
|
|
2324
2393
|
break;
|
|
2325
2394
|
}
|
|
2326
|
-
case "openai.web_search_preview":
|
|
2395
|
+
case "openai.web_search_preview": {
|
|
2396
|
+
const args = webSearchPreviewArgsSchema.parse(tool.args);
|
|
2327
2397
|
openaiTools.push({
|
|
2328
2398
|
type: "web_search_preview",
|
|
2329
|
-
search_context_size:
|
|
2330
|
-
user_location:
|
|
2399
|
+
search_context_size: args.searchContextSize,
|
|
2400
|
+
user_location: args.userLocation
|
|
2331
2401
|
});
|
|
2332
2402
|
break;
|
|
2333
|
-
|
|
2403
|
+
}
|
|
2404
|
+
case "openai.code_interpreter": {
|
|
2405
|
+
const args = codeInterpreterArgsSchema.parse(tool.args);
|
|
2406
|
+
openaiTools.push({
|
|
2407
|
+
type: "code_interpreter",
|
|
2408
|
+
container: args.container == null ? { type: "auto", file_ids: void 0 } : typeof args.container === "string" ? args.container : { type: "auto", file_ids: args.container.fileIds }
|
|
2409
|
+
});
|
|
2410
|
+
break;
|
|
2411
|
+
}
|
|
2412
|
+
default: {
|
|
2334
2413
|
toolWarnings.push({ type: "unsupported-tool", tool });
|
|
2335
2414
|
break;
|
|
2415
|
+
}
|
|
2336
2416
|
}
|
|
2337
2417
|
break;
|
|
2418
|
+
}
|
|
2338
2419
|
default:
|
|
2339
2420
|
toolWarnings.push({ type: "unsupported-tool", tool });
|
|
2340
2421
|
break;
|
|
@@ -2352,7 +2433,7 @@ function prepareResponsesTools({
|
|
|
2352
2433
|
case "tool":
|
|
2353
2434
|
return {
|
|
2354
2435
|
tools: openaiTools,
|
|
2355
|
-
toolChoice: toolChoice.toolName === "
|
|
2436
|
+
toolChoice: toolChoice.toolName === "code_interpreter" || toolChoice.toolName === "file_search" || toolChoice.toolName === "web_search_preview" ? { type: toolChoice.toolName } : { type: "function", name: toolChoice.toolName },
|
|
2356
2437
|
toolWarnings
|
|
2357
2438
|
};
|
|
2358
2439
|
default: {
|
|
@@ -2418,7 +2499,8 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2418
2499
|
}
|
|
2419
2500
|
const { messages, warnings: messageWarnings } = await convertToOpenAIResponsesMessages({
|
|
2420
2501
|
prompt,
|
|
2421
|
-
systemMessageMode: modelConfig.systemMessageMode
|
|
2502
|
+
systemMessageMode: modelConfig.systemMessageMode,
|
|
2503
|
+
fileIdPrefixes: this.config.fileIdPrefixes
|
|
2422
2504
|
});
|
|
2423
2505
|
warnings.push(...messageWarnings);
|
|
2424
2506
|
const openaiOptions = await parseProviderOptions7({
|
|
@@ -2458,6 +2540,8 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2458
2540
|
instructions: openaiOptions == null ? void 0 : openaiOptions.instructions,
|
|
2459
2541
|
service_tier: openaiOptions == null ? void 0 : openaiOptions.serviceTier,
|
|
2460
2542
|
include: openaiOptions == null ? void 0 : openaiOptions.include,
|
|
2543
|
+
prompt_cache_key: openaiOptions == null ? void 0 : openaiOptions.promptCacheKey,
|
|
2544
|
+
safety_identifier: openaiOptions == null ? void 0 : openaiOptions.safetyIdentifier,
|
|
2461
2545
|
// model-specific settings:
|
|
2462
2546
|
...modelConfig.isReasoningModel && ((openaiOptions == null ? void 0 : openaiOptions.reasoningEffort) != null || (openaiOptions == null ? void 0 : openaiOptions.reasoningSummary) != null) && {
|
|
2463
2547
|
reasoning: {
|
|
@@ -2557,72 +2641,83 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2557
2641
|
body,
|
|
2558
2642
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
2559
2643
|
successfulResponseHandler: createJsonResponseHandler6(
|
|
2560
|
-
|
|
2561
|
-
id:
|
|
2562
|
-
created_at:
|
|
2563
|
-
error:
|
|
2564
|
-
code:
|
|
2565
|
-
message:
|
|
2644
|
+
z16.object({
|
|
2645
|
+
id: z16.string(),
|
|
2646
|
+
created_at: z16.number(),
|
|
2647
|
+
error: z16.object({
|
|
2648
|
+
code: z16.string(),
|
|
2649
|
+
message: z16.string()
|
|
2566
2650
|
}).nullish(),
|
|
2567
|
-
model:
|
|
2568
|
-
output:
|
|
2569
|
-
|
|
2570
|
-
|
|
2571
|
-
type:
|
|
2572
|
-
role:
|
|
2573
|
-
id:
|
|
2574
|
-
content:
|
|
2575
|
-
|
|
2576
|
-
type:
|
|
2577
|
-
text:
|
|
2578
|
-
annotations:
|
|
2579
|
-
|
|
2580
|
-
type:
|
|
2581
|
-
start_index:
|
|
2582
|
-
end_index:
|
|
2583
|
-
url:
|
|
2584
|
-
title:
|
|
2651
|
+
model: z16.string(),
|
|
2652
|
+
output: z16.array(
|
|
2653
|
+
z16.discriminatedUnion("type", [
|
|
2654
|
+
z16.object({
|
|
2655
|
+
type: z16.literal("message"),
|
|
2656
|
+
role: z16.literal("assistant"),
|
|
2657
|
+
id: z16.string(),
|
|
2658
|
+
content: z16.array(
|
|
2659
|
+
z16.object({
|
|
2660
|
+
type: z16.literal("output_text"),
|
|
2661
|
+
text: z16.string(),
|
|
2662
|
+
annotations: z16.array(
|
|
2663
|
+
z16.object({
|
|
2664
|
+
type: z16.literal("url_citation"),
|
|
2665
|
+
start_index: z16.number(),
|
|
2666
|
+
end_index: z16.number(),
|
|
2667
|
+
url: z16.string(),
|
|
2668
|
+
title: z16.string()
|
|
2585
2669
|
})
|
|
2586
2670
|
)
|
|
2587
2671
|
})
|
|
2588
2672
|
)
|
|
2589
2673
|
}),
|
|
2590
|
-
|
|
2591
|
-
type:
|
|
2592
|
-
call_id:
|
|
2593
|
-
name:
|
|
2594
|
-
arguments:
|
|
2595
|
-
id:
|
|
2674
|
+
z16.object({
|
|
2675
|
+
type: z16.literal("function_call"),
|
|
2676
|
+
call_id: z16.string(),
|
|
2677
|
+
name: z16.string(),
|
|
2678
|
+
arguments: z16.string(),
|
|
2679
|
+
id: z16.string()
|
|
2596
2680
|
}),
|
|
2597
|
-
|
|
2598
|
-
type:
|
|
2599
|
-
id:
|
|
2600
|
-
status:
|
|
2681
|
+
z16.object({
|
|
2682
|
+
type: z16.literal("web_search_call"),
|
|
2683
|
+
id: z16.string(),
|
|
2684
|
+
status: z16.string().optional()
|
|
2601
2685
|
}),
|
|
2602
|
-
|
|
2603
|
-
type:
|
|
2604
|
-
id:
|
|
2605
|
-
status:
|
|
2686
|
+
z16.object({
|
|
2687
|
+
type: z16.literal("computer_call"),
|
|
2688
|
+
id: z16.string(),
|
|
2689
|
+
status: z16.string().optional()
|
|
2606
2690
|
}),
|
|
2607
|
-
|
|
2608
|
-
type:
|
|
2609
|
-
id:
|
|
2610
|
-
status:
|
|
2691
|
+
z16.object({
|
|
2692
|
+
type: z16.literal("file_search_call"),
|
|
2693
|
+
id: z16.string(),
|
|
2694
|
+
status: z16.string().optional(),
|
|
2695
|
+
queries: z16.array(z16.string()).nullish(),
|
|
2696
|
+
results: z16.array(
|
|
2697
|
+
z16.object({
|
|
2698
|
+
attributes: z16.object({
|
|
2699
|
+
file_id: z16.string(),
|
|
2700
|
+
filename: z16.string(),
|
|
2701
|
+
score: z16.number(),
|
|
2702
|
+
text: z16.string()
|
|
2703
|
+
})
|
|
2704
|
+
})
|
|
2705
|
+
).nullish()
|
|
2611
2706
|
}),
|
|
2612
|
-
|
|
2613
|
-
type:
|
|
2614
|
-
id:
|
|
2615
|
-
encrypted_content:
|
|
2616
|
-
summary:
|
|
2617
|
-
|
|
2618
|
-
type:
|
|
2619
|
-
text:
|
|
2707
|
+
z16.object({
|
|
2708
|
+
type: z16.literal("reasoning"),
|
|
2709
|
+
id: z16.string(),
|
|
2710
|
+
encrypted_content: z16.string().nullish(),
|
|
2711
|
+
summary: z16.array(
|
|
2712
|
+
z16.object({
|
|
2713
|
+
type: z16.literal("summary_text"),
|
|
2714
|
+
text: z16.string()
|
|
2620
2715
|
})
|
|
2621
2716
|
)
|
|
2622
2717
|
})
|
|
2623
2718
|
])
|
|
2624
2719
|
),
|
|
2625
|
-
incomplete_details:
|
|
2720
|
+
incomplete_details: z16.object({ reason: z16.string() }).nullable(),
|
|
2626
2721
|
usage: usageSchema2
|
|
2627
2722
|
})
|
|
2628
2723
|
),
|
|
@@ -2749,7 +2844,9 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2749
2844
|
toolName: "file_search",
|
|
2750
2845
|
result: {
|
|
2751
2846
|
type: "file_search_tool_result",
|
|
2752
|
-
status: part.status || "completed"
|
|
2847
|
+
status: part.status || "completed",
|
|
2848
|
+
...part.queries && { queries: part.queries },
|
|
2849
|
+
...part.results && { results: part.results }
|
|
2753
2850
|
},
|
|
2754
2851
|
providerExecuted: true
|
|
2755
2852
|
});
|
|
@@ -2864,6 +2961,16 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2864
2961
|
id: value.item.id,
|
|
2865
2962
|
toolName: "computer_use"
|
|
2866
2963
|
});
|
|
2964
|
+
} else if (value.item.type === "file_search_call") {
|
|
2965
|
+
ongoingToolCalls[value.output_index] = {
|
|
2966
|
+
toolName: "file_search",
|
|
2967
|
+
toolCallId: value.item.id
|
|
2968
|
+
};
|
|
2969
|
+
controller.enqueue({
|
|
2970
|
+
type: "tool-input-start",
|
|
2971
|
+
id: value.item.id,
|
|
2972
|
+
toolName: "file_search"
|
|
2973
|
+
});
|
|
2867
2974
|
} else if (value.item.type === "message") {
|
|
2868
2975
|
controller.enqueue({
|
|
2869
2976
|
type: "text-start",
|
|
@@ -2957,6 +3064,32 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2957
3064
|
},
|
|
2958
3065
|
providerExecuted: true
|
|
2959
3066
|
});
|
|
3067
|
+
} else if (value.item.type === "file_search_call") {
|
|
3068
|
+
ongoingToolCalls[value.output_index] = void 0;
|
|
3069
|
+
hasToolCalls = true;
|
|
3070
|
+
controller.enqueue({
|
|
3071
|
+
type: "tool-input-end",
|
|
3072
|
+
id: value.item.id
|
|
3073
|
+
});
|
|
3074
|
+
controller.enqueue({
|
|
3075
|
+
type: "tool-call",
|
|
3076
|
+
toolCallId: value.item.id,
|
|
3077
|
+
toolName: "file_search",
|
|
3078
|
+
input: "",
|
|
3079
|
+
providerExecuted: true
|
|
3080
|
+
});
|
|
3081
|
+
controller.enqueue({
|
|
3082
|
+
type: "tool-result",
|
|
3083
|
+
toolCallId: value.item.id,
|
|
3084
|
+
toolName: "file_search",
|
|
3085
|
+
result: {
|
|
3086
|
+
type: "file_search_tool_result",
|
|
3087
|
+
status: value.item.status || "completed",
|
|
3088
|
+
...value.item.queries && { queries: value.item.queries },
|
|
3089
|
+
...value.item.results && { results: value.item.results }
|
|
3090
|
+
},
|
|
3091
|
+
providerExecuted: true
|
|
3092
|
+
});
|
|
2960
3093
|
} else if (value.item.type === "message") {
|
|
2961
3094
|
controller.enqueue({
|
|
2962
3095
|
type: "text-end",
|
|
@@ -3069,140 +3202,162 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3069
3202
|
};
|
|
3070
3203
|
}
|
|
3071
3204
|
};
|
|
3072
|
-
var usageSchema2 =
|
|
3073
|
-
input_tokens:
|
|
3074
|
-
input_tokens_details:
|
|
3075
|
-
output_tokens:
|
|
3076
|
-
output_tokens_details:
|
|
3205
|
+
var usageSchema2 = z16.object({
|
|
3206
|
+
input_tokens: z16.number(),
|
|
3207
|
+
input_tokens_details: z16.object({ cached_tokens: z16.number().nullish() }).nullish(),
|
|
3208
|
+
output_tokens: z16.number(),
|
|
3209
|
+
output_tokens_details: z16.object({ reasoning_tokens: z16.number().nullish() }).nullish()
|
|
3077
3210
|
});
|
|
3078
|
-
var textDeltaChunkSchema =
|
|
3079
|
-
type:
|
|
3080
|
-
item_id:
|
|
3081
|
-
delta:
|
|
3211
|
+
var textDeltaChunkSchema = z16.object({
|
|
3212
|
+
type: z16.literal("response.output_text.delta"),
|
|
3213
|
+
item_id: z16.string(),
|
|
3214
|
+
delta: z16.string()
|
|
3082
3215
|
});
|
|
3083
|
-
var errorChunkSchema =
|
|
3084
|
-
type:
|
|
3085
|
-
code:
|
|
3086
|
-
message:
|
|
3087
|
-
param:
|
|
3088
|
-
sequence_number:
|
|
3216
|
+
var errorChunkSchema = z16.object({
|
|
3217
|
+
type: z16.literal("error"),
|
|
3218
|
+
code: z16.string(),
|
|
3219
|
+
message: z16.string(),
|
|
3220
|
+
param: z16.string().nullish(),
|
|
3221
|
+
sequence_number: z16.number()
|
|
3089
3222
|
});
|
|
3090
|
-
var responseFinishedChunkSchema =
|
|
3091
|
-
type:
|
|
3092
|
-
response:
|
|
3093
|
-
incomplete_details:
|
|
3223
|
+
var responseFinishedChunkSchema = z16.object({
|
|
3224
|
+
type: z16.enum(["response.completed", "response.incomplete"]),
|
|
3225
|
+
response: z16.object({
|
|
3226
|
+
incomplete_details: z16.object({ reason: z16.string() }).nullish(),
|
|
3094
3227
|
usage: usageSchema2
|
|
3095
3228
|
})
|
|
3096
3229
|
});
|
|
3097
|
-
var responseCreatedChunkSchema =
|
|
3098
|
-
type:
|
|
3099
|
-
response:
|
|
3100
|
-
id:
|
|
3101
|
-
created_at:
|
|
3102
|
-
model:
|
|
3230
|
+
var responseCreatedChunkSchema = z16.object({
|
|
3231
|
+
type: z16.literal("response.created"),
|
|
3232
|
+
response: z16.object({
|
|
3233
|
+
id: z16.string(),
|
|
3234
|
+
created_at: z16.number(),
|
|
3235
|
+
model: z16.string()
|
|
3103
3236
|
})
|
|
3104
3237
|
});
|
|
3105
|
-
var responseOutputItemAddedSchema =
|
|
3106
|
-
type:
|
|
3107
|
-
output_index:
|
|
3108
|
-
item:
|
|
3109
|
-
|
|
3110
|
-
type:
|
|
3111
|
-
id:
|
|
3238
|
+
var responseOutputItemAddedSchema = z16.object({
|
|
3239
|
+
type: z16.literal("response.output_item.added"),
|
|
3240
|
+
output_index: z16.number(),
|
|
3241
|
+
item: z16.discriminatedUnion("type", [
|
|
3242
|
+
z16.object({
|
|
3243
|
+
type: z16.literal("message"),
|
|
3244
|
+
id: z16.string()
|
|
3112
3245
|
}),
|
|
3113
|
-
|
|
3114
|
-
type:
|
|
3115
|
-
id:
|
|
3116
|
-
encrypted_content:
|
|
3246
|
+
z16.object({
|
|
3247
|
+
type: z16.literal("reasoning"),
|
|
3248
|
+
id: z16.string(),
|
|
3249
|
+
encrypted_content: z16.string().nullish()
|
|
3117
3250
|
}),
|
|
3118
|
-
|
|
3119
|
-
type:
|
|
3120
|
-
id:
|
|
3121
|
-
call_id:
|
|
3122
|
-
name:
|
|
3123
|
-
arguments:
|
|
3251
|
+
z16.object({
|
|
3252
|
+
type: z16.literal("function_call"),
|
|
3253
|
+
id: z16.string(),
|
|
3254
|
+
call_id: z16.string(),
|
|
3255
|
+
name: z16.string(),
|
|
3256
|
+
arguments: z16.string()
|
|
3124
3257
|
}),
|
|
3125
|
-
|
|
3126
|
-
type:
|
|
3127
|
-
id:
|
|
3128
|
-
status:
|
|
3258
|
+
z16.object({
|
|
3259
|
+
type: z16.literal("web_search_call"),
|
|
3260
|
+
id: z16.string(),
|
|
3261
|
+
status: z16.string()
|
|
3129
3262
|
}),
|
|
3130
|
-
|
|
3131
|
-
type:
|
|
3132
|
-
id:
|
|
3133
|
-
status:
|
|
3263
|
+
z16.object({
|
|
3264
|
+
type: z16.literal("computer_call"),
|
|
3265
|
+
id: z16.string(),
|
|
3266
|
+
status: z16.string()
|
|
3134
3267
|
}),
|
|
3135
|
-
|
|
3136
|
-
type:
|
|
3137
|
-
id:
|
|
3138
|
-
status:
|
|
3268
|
+
z16.object({
|
|
3269
|
+
type: z16.literal("file_search_call"),
|
|
3270
|
+
id: z16.string(),
|
|
3271
|
+
status: z16.string(),
|
|
3272
|
+
queries: z16.array(z16.string()).nullish(),
|
|
3273
|
+
results: z16.array(
|
|
3274
|
+
z16.object({
|
|
3275
|
+
attributes: z16.object({
|
|
3276
|
+
file_id: z16.string(),
|
|
3277
|
+
filename: z16.string(),
|
|
3278
|
+
score: z16.number(),
|
|
3279
|
+
text: z16.string()
|
|
3280
|
+
})
|
|
3281
|
+
})
|
|
3282
|
+
).optional()
|
|
3139
3283
|
})
|
|
3140
3284
|
])
|
|
3141
3285
|
});
|
|
3142
|
-
var responseOutputItemDoneSchema =
|
|
3143
|
-
type:
|
|
3144
|
-
output_index:
|
|
3145
|
-
item:
|
|
3146
|
-
|
|
3147
|
-
type:
|
|
3148
|
-
id:
|
|
3286
|
+
var responseOutputItemDoneSchema = z16.object({
|
|
3287
|
+
type: z16.literal("response.output_item.done"),
|
|
3288
|
+
output_index: z16.number(),
|
|
3289
|
+
item: z16.discriminatedUnion("type", [
|
|
3290
|
+
z16.object({
|
|
3291
|
+
type: z16.literal("message"),
|
|
3292
|
+
id: z16.string()
|
|
3149
3293
|
}),
|
|
3150
|
-
|
|
3151
|
-
type:
|
|
3152
|
-
id:
|
|
3153
|
-
encrypted_content:
|
|
3294
|
+
z16.object({
|
|
3295
|
+
type: z16.literal("reasoning"),
|
|
3296
|
+
id: z16.string(),
|
|
3297
|
+
encrypted_content: z16.string().nullish()
|
|
3154
3298
|
}),
|
|
3155
|
-
|
|
3156
|
-
type:
|
|
3157
|
-
id:
|
|
3158
|
-
call_id:
|
|
3159
|
-
name:
|
|
3160
|
-
arguments:
|
|
3161
|
-
status:
|
|
3299
|
+
z16.object({
|
|
3300
|
+
type: z16.literal("function_call"),
|
|
3301
|
+
id: z16.string(),
|
|
3302
|
+
call_id: z16.string(),
|
|
3303
|
+
name: z16.string(),
|
|
3304
|
+
arguments: z16.string(),
|
|
3305
|
+
status: z16.literal("completed")
|
|
3162
3306
|
}),
|
|
3163
|
-
|
|
3164
|
-
type:
|
|
3165
|
-
id:
|
|
3166
|
-
status:
|
|
3307
|
+
z16.object({
|
|
3308
|
+
type: z16.literal("web_search_call"),
|
|
3309
|
+
id: z16.string(),
|
|
3310
|
+
status: z16.literal("completed")
|
|
3167
3311
|
}),
|
|
3168
|
-
|
|
3169
|
-
type:
|
|
3170
|
-
id:
|
|
3171
|
-
status:
|
|
3312
|
+
z16.object({
|
|
3313
|
+
type: z16.literal("computer_call"),
|
|
3314
|
+
id: z16.string(),
|
|
3315
|
+
status: z16.literal("completed")
|
|
3172
3316
|
}),
|
|
3173
|
-
|
|
3174
|
-
type:
|
|
3175
|
-
id:
|
|
3176
|
-
status:
|
|
3317
|
+
z16.object({
|
|
3318
|
+
type: z16.literal("file_search_call"),
|
|
3319
|
+
id: z16.string(),
|
|
3320
|
+
status: z16.literal("completed"),
|
|
3321
|
+
queries: z16.array(z16.string()).nullish(),
|
|
3322
|
+
results: z16.array(
|
|
3323
|
+
z16.object({
|
|
3324
|
+
attributes: z16.object({
|
|
3325
|
+
file_id: z16.string(),
|
|
3326
|
+
filename: z16.string(),
|
|
3327
|
+
score: z16.number(),
|
|
3328
|
+
text: z16.string()
|
|
3329
|
+
})
|
|
3330
|
+
})
|
|
3331
|
+
).nullish()
|
|
3177
3332
|
})
|
|
3178
3333
|
])
|
|
3179
3334
|
});
|
|
3180
|
-
var responseFunctionCallArgumentsDeltaSchema =
|
|
3181
|
-
type:
|
|
3182
|
-
item_id:
|
|
3183
|
-
output_index:
|
|
3184
|
-
delta:
|
|
3335
|
+
var responseFunctionCallArgumentsDeltaSchema = z16.object({
|
|
3336
|
+
type: z16.literal("response.function_call_arguments.delta"),
|
|
3337
|
+
item_id: z16.string(),
|
|
3338
|
+
output_index: z16.number(),
|
|
3339
|
+
delta: z16.string()
|
|
3185
3340
|
});
|
|
3186
|
-
var responseAnnotationAddedSchema =
|
|
3187
|
-
type:
|
|
3188
|
-
annotation:
|
|
3189
|
-
type:
|
|
3190
|
-
url:
|
|
3191
|
-
title:
|
|
3341
|
+
var responseAnnotationAddedSchema = z16.object({
|
|
3342
|
+
type: z16.literal("response.output_text.annotation.added"),
|
|
3343
|
+
annotation: z16.object({
|
|
3344
|
+
type: z16.literal("url_citation"),
|
|
3345
|
+
url: z16.string(),
|
|
3346
|
+
title: z16.string()
|
|
3192
3347
|
})
|
|
3193
3348
|
});
|
|
3194
|
-
var responseReasoningSummaryPartAddedSchema =
|
|
3195
|
-
type:
|
|
3196
|
-
item_id:
|
|
3197
|
-
summary_index:
|
|
3349
|
+
var responseReasoningSummaryPartAddedSchema = z16.object({
|
|
3350
|
+
type: z16.literal("response.reasoning_summary_part.added"),
|
|
3351
|
+
item_id: z16.string(),
|
|
3352
|
+
summary_index: z16.number()
|
|
3198
3353
|
});
|
|
3199
|
-
var responseReasoningSummaryTextDeltaSchema =
|
|
3200
|
-
type:
|
|
3201
|
-
item_id:
|
|
3202
|
-
summary_index:
|
|
3203
|
-
delta:
|
|
3354
|
+
var responseReasoningSummaryTextDeltaSchema = z16.object({
|
|
3355
|
+
type: z16.literal("response.reasoning_summary_text.delta"),
|
|
3356
|
+
item_id: z16.string(),
|
|
3357
|
+
summary_index: z16.number(),
|
|
3358
|
+
delta: z16.string()
|
|
3204
3359
|
});
|
|
3205
|
-
var openaiResponsesChunkSchema =
|
|
3360
|
+
var openaiResponsesChunkSchema = z16.union([
|
|
3206
3361
|
textDeltaChunkSchema,
|
|
3207
3362
|
responseFinishedChunkSchema,
|
|
3208
3363
|
responseCreatedChunkSchema,
|
|
@@ -3213,7 +3368,7 @@ var openaiResponsesChunkSchema = z15.union([
|
|
|
3213
3368
|
responseReasoningSummaryPartAddedSchema,
|
|
3214
3369
|
responseReasoningSummaryTextDeltaSchema,
|
|
3215
3370
|
errorChunkSchema,
|
|
3216
|
-
|
|
3371
|
+
z16.object({ type: z16.string() }).loose()
|
|
3217
3372
|
// fallback for unknown chunks
|
|
3218
3373
|
]);
|
|
3219
3374
|
function isTextDeltaChunk(chunk) {
|
|
@@ -3279,19 +3434,21 @@ function supportsFlexProcessing2(modelId) {
|
|
|
3279
3434
|
function supportsPriorityProcessing2(modelId) {
|
|
3280
3435
|
return modelId.startsWith("gpt-4") || modelId.startsWith("gpt-5-mini") || modelId.startsWith("gpt-5") && !modelId.startsWith("gpt-5-nano") || modelId.startsWith("o3") || modelId.startsWith("o4-mini");
|
|
3281
3436
|
}
|
|
3282
|
-
var openaiResponsesProviderOptionsSchema =
|
|
3283
|
-
metadata:
|
|
3284
|
-
parallelToolCalls:
|
|
3285
|
-
previousResponseId:
|
|
3286
|
-
store:
|
|
3287
|
-
user:
|
|
3288
|
-
reasoningEffort:
|
|
3289
|
-
strictJsonSchema:
|
|
3290
|
-
instructions:
|
|
3291
|
-
reasoningSummary:
|
|
3292
|
-
serviceTier:
|
|
3293
|
-
include:
|
|
3294
|
-
textVerbosity:
|
|
3437
|
+
var openaiResponsesProviderOptionsSchema = z16.object({
|
|
3438
|
+
metadata: z16.any().nullish(),
|
|
3439
|
+
parallelToolCalls: z16.boolean().nullish(),
|
|
3440
|
+
previousResponseId: z16.string().nullish(),
|
|
3441
|
+
store: z16.boolean().nullish(),
|
|
3442
|
+
user: z16.string().nullish(),
|
|
3443
|
+
reasoningEffort: z16.string().nullish(),
|
|
3444
|
+
strictJsonSchema: z16.boolean().nullish(),
|
|
3445
|
+
instructions: z16.string().nullish(),
|
|
3446
|
+
reasoningSummary: z16.string().nullish(),
|
|
3447
|
+
serviceTier: z16.enum(["auto", "flex", "priority"]).nullish(),
|
|
3448
|
+
include: z16.array(z16.enum(["reasoning.encrypted_content", "file_search_call.results"])).nullish(),
|
|
3449
|
+
textVerbosity: z16.enum(["low", "medium", "high"]).nullish(),
|
|
3450
|
+
promptCacheKey: z16.string().nullish(),
|
|
3451
|
+
safetyIdentifier: z16.string().nullish()
|
|
3295
3452
|
});
|
|
3296
3453
|
export {
|
|
3297
3454
|
OpenAIChatLanguageModel,
|