@ai-sdk/openai 2.0.9 → 2.0.11
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +25 -0
- package/dist/index.d.mts +40 -56
- package/dist/index.d.ts +40 -56
- package/dist/index.js +605 -448
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +616 -459
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.d.mts +15 -3
- package/dist/internal/index.d.ts +15 -3
- package/dist/internal/index.js +417 -260
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +402 -245
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +2 -2
package/dist/internal/index.js
CHANGED
|
@@ -36,14 +36,33 @@ __export(internal_exports, {
|
|
|
36
36
|
});
|
|
37
37
|
module.exports = __toCommonJS(internal_exports);
|
|
38
38
|
|
|
39
|
-
// src/openai-chat-language-model.ts
|
|
39
|
+
// src/chat/openai-chat-language-model.ts
|
|
40
40
|
var import_provider3 = require("@ai-sdk/provider");
|
|
41
41
|
var import_provider_utils5 = require("@ai-sdk/provider-utils");
|
|
42
42
|
var import_v45 = require("zod/v4");
|
|
43
43
|
|
|
44
|
-
// src/
|
|
45
|
-
var
|
|
44
|
+
// src/openai-error.ts
|
|
45
|
+
var import_v4 = require("zod/v4");
|
|
46
46
|
var import_provider_utils = require("@ai-sdk/provider-utils");
|
|
47
|
+
var openaiErrorDataSchema = import_v4.z.object({
|
|
48
|
+
error: import_v4.z.object({
|
|
49
|
+
message: import_v4.z.string(),
|
|
50
|
+
// The additional information below is handled loosely to support
|
|
51
|
+
// OpenAI-compatible providers that have slightly different error
|
|
52
|
+
// responses:
|
|
53
|
+
type: import_v4.z.string().nullish(),
|
|
54
|
+
param: import_v4.z.any().nullish(),
|
|
55
|
+
code: import_v4.z.union([import_v4.z.string(), import_v4.z.number()]).nullish()
|
|
56
|
+
})
|
|
57
|
+
});
|
|
58
|
+
var openaiFailedResponseHandler = (0, import_provider_utils.createJsonErrorResponseHandler)({
|
|
59
|
+
errorSchema: openaiErrorDataSchema,
|
|
60
|
+
errorToMessage: (data) => data.error.message
|
|
61
|
+
});
|
|
62
|
+
|
|
63
|
+
// src/chat/convert-to-openai-chat-messages.ts
|
|
64
|
+
var import_provider = require("@ai-sdk/provider");
|
|
65
|
+
var import_provider_utils2 = require("@ai-sdk/provider-utils");
|
|
47
66
|
function convertToOpenAIChatMessages({
|
|
48
67
|
prompt,
|
|
49
68
|
systemMessageMode = "system"
|
|
@@ -97,7 +116,7 @@ function convertToOpenAIChatMessages({
|
|
|
97
116
|
return {
|
|
98
117
|
type: "image_url",
|
|
99
118
|
image_url: {
|
|
100
|
-
url: part.data instanceof URL ? part.data.toString() : `data:${mediaType};base64,${(0,
|
|
119
|
+
url: part.data instanceof URL ? part.data.toString() : `data:${mediaType};base64,${(0, import_provider_utils2.convertToBase64)(part.data)}`,
|
|
101
120
|
// OpenAI specific extension: image detail
|
|
102
121
|
detail: (_b = (_a = part.providerOptions) == null ? void 0 : _a.openai) == null ? void 0 : _b.imageDetail
|
|
103
122
|
}
|
|
@@ -113,7 +132,7 @@ function convertToOpenAIChatMessages({
|
|
|
113
132
|
return {
|
|
114
133
|
type: "input_audio",
|
|
115
134
|
input_audio: {
|
|
116
|
-
data: (0,
|
|
135
|
+
data: (0, import_provider_utils2.convertToBase64)(part.data),
|
|
117
136
|
format: "wav"
|
|
118
137
|
}
|
|
119
138
|
};
|
|
@@ -123,7 +142,7 @@ function convertToOpenAIChatMessages({
|
|
|
123
142
|
return {
|
|
124
143
|
type: "input_audio",
|
|
125
144
|
input_audio: {
|
|
126
|
-
data: (0,
|
|
145
|
+
data: (0, import_provider_utils2.convertToBase64)(part.data),
|
|
127
146
|
format: "mp3"
|
|
128
147
|
}
|
|
129
148
|
};
|
|
@@ -144,7 +163,7 @@ function convertToOpenAIChatMessages({
|
|
|
144
163
|
type: "file",
|
|
145
164
|
file: typeof part.data === "string" && part.data.startsWith("file-") ? { file_id: part.data } : {
|
|
146
165
|
filename: (_c = part.filename) != null ? _c : `part-${index}.pdf`,
|
|
147
|
-
file_data: `data:application/pdf;base64,${(0,
|
|
166
|
+
file_data: `data:application/pdf;base64,${(0, import_provider_utils2.convertToBase64)(part.data)}`
|
|
148
167
|
}
|
|
149
168
|
};
|
|
150
169
|
} else {
|
|
@@ -219,7 +238,7 @@ function convertToOpenAIChatMessages({
|
|
|
219
238
|
return { messages, warnings };
|
|
220
239
|
}
|
|
221
240
|
|
|
222
|
-
// src/get-response-metadata.ts
|
|
241
|
+
// src/chat/get-response-metadata.ts
|
|
223
242
|
function getResponseMetadata({
|
|
224
243
|
id,
|
|
225
244
|
model,
|
|
@@ -232,7 +251,7 @@ function getResponseMetadata({
|
|
|
232
251
|
};
|
|
233
252
|
}
|
|
234
253
|
|
|
235
|
-
// src/map-openai-finish-reason.ts
|
|
254
|
+
// src/chat/map-openai-finish-reason.ts
|
|
236
255
|
function mapOpenAIFinishReason(finishReason) {
|
|
237
256
|
switch (finishReason) {
|
|
238
257
|
case "stop":
|
|
@@ -249,16 +268,16 @@ function mapOpenAIFinishReason(finishReason) {
|
|
|
249
268
|
}
|
|
250
269
|
}
|
|
251
270
|
|
|
252
|
-
// src/openai-chat-options.ts
|
|
253
|
-
var
|
|
254
|
-
var openaiProviderOptions =
|
|
271
|
+
// src/chat/openai-chat-options.ts
|
|
272
|
+
var import_v42 = require("zod/v4");
|
|
273
|
+
var openaiProviderOptions = import_v42.z.object({
|
|
255
274
|
/**
|
|
256
275
|
* Modify the likelihood of specified tokens appearing in the completion.
|
|
257
276
|
*
|
|
258
277
|
* Accepts a JSON object that maps tokens (specified by their token ID in
|
|
259
278
|
* the GPT tokenizer) to an associated bias value from -100 to 100.
|
|
260
279
|
*/
|
|
261
|
-
logitBias:
|
|
280
|
+
logitBias: import_v42.z.record(import_v42.z.coerce.number(), import_v42.z.number()).optional(),
|
|
262
281
|
/**
|
|
263
282
|
* Return the log probabilities of the tokens.
|
|
264
283
|
*
|
|
@@ -268,42 +287,42 @@ var openaiProviderOptions = import_v4.z.object({
|
|
|
268
287
|
* Setting to a number will return the log probabilities of the top n
|
|
269
288
|
* tokens that were generated.
|
|
270
289
|
*/
|
|
271
|
-
logprobs:
|
|
290
|
+
logprobs: import_v42.z.union([import_v42.z.boolean(), import_v42.z.number()]).optional(),
|
|
272
291
|
/**
|
|
273
292
|
* Whether to enable parallel function calling during tool use. Default to true.
|
|
274
293
|
*/
|
|
275
|
-
parallelToolCalls:
|
|
294
|
+
parallelToolCalls: import_v42.z.boolean().optional(),
|
|
276
295
|
/**
|
|
277
296
|
* A unique identifier representing your end-user, which can help OpenAI to
|
|
278
297
|
* monitor and detect abuse.
|
|
279
298
|
*/
|
|
280
|
-
user:
|
|
299
|
+
user: import_v42.z.string().optional(),
|
|
281
300
|
/**
|
|
282
301
|
* Reasoning effort for reasoning models. Defaults to `medium`.
|
|
283
302
|
*/
|
|
284
|
-
reasoningEffort:
|
|
303
|
+
reasoningEffort: import_v42.z.enum(["minimal", "low", "medium", "high"]).optional(),
|
|
285
304
|
/**
|
|
286
305
|
* Maximum number of completion tokens to generate. Useful for reasoning models.
|
|
287
306
|
*/
|
|
288
|
-
maxCompletionTokens:
|
|
307
|
+
maxCompletionTokens: import_v42.z.number().optional(),
|
|
289
308
|
/**
|
|
290
309
|
* Whether to enable persistence in responses API.
|
|
291
310
|
*/
|
|
292
|
-
store:
|
|
311
|
+
store: import_v42.z.boolean().optional(),
|
|
293
312
|
/**
|
|
294
313
|
* Metadata to associate with the request.
|
|
295
314
|
*/
|
|
296
|
-
metadata:
|
|
315
|
+
metadata: import_v42.z.record(import_v42.z.string().max(64), import_v42.z.string().max(512)).optional(),
|
|
297
316
|
/**
|
|
298
317
|
* Parameters for prediction mode.
|
|
299
318
|
*/
|
|
300
|
-
prediction:
|
|
319
|
+
prediction: import_v42.z.record(import_v42.z.string(), import_v42.z.any()).optional(),
|
|
301
320
|
/**
|
|
302
321
|
* Whether to use structured outputs.
|
|
303
322
|
*
|
|
304
323
|
* @default true
|
|
305
324
|
*/
|
|
306
|
-
structuredOutputs:
|
|
325
|
+
structuredOutputs: import_v42.z.boolean().optional(),
|
|
307
326
|
/**
|
|
308
327
|
* Service tier for the request.
|
|
309
328
|
* - 'auto': Default service tier
|
|
@@ -312,40 +331,34 @@ var openaiProviderOptions = import_v4.z.object({
|
|
|
312
331
|
*
|
|
313
332
|
* @default 'auto'
|
|
314
333
|
*/
|
|
315
|
-
serviceTier:
|
|
334
|
+
serviceTier: import_v42.z.enum(["auto", "flex", "priority"]).optional(),
|
|
316
335
|
/**
|
|
317
336
|
* Whether to use strict JSON schema validation.
|
|
318
337
|
*
|
|
319
338
|
* @default false
|
|
320
339
|
*/
|
|
321
|
-
strictJsonSchema:
|
|
340
|
+
strictJsonSchema: import_v42.z.boolean().optional(),
|
|
322
341
|
/**
|
|
323
342
|
* Controls the verbosity of the model's responses.
|
|
324
343
|
* Lower values will result in more concise responses, while higher values will result in more verbose responses.
|
|
325
344
|
*/
|
|
326
|
-
textVerbosity:
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
code: import_v42.z.union([import_v42.z.string(), import_v42.z.number()]).nullish()
|
|
341
|
-
})
|
|
342
|
-
});
|
|
343
|
-
var openaiFailedResponseHandler = (0, import_provider_utils2.createJsonErrorResponseHandler)({
|
|
344
|
-
errorSchema: openaiErrorDataSchema,
|
|
345
|
-
errorToMessage: (data) => data.error.message
|
|
345
|
+
textVerbosity: import_v42.z.enum(["low", "medium", "high"]).optional(),
|
|
346
|
+
/**
|
|
347
|
+
* A cache key for prompt caching. Allows manual control over prompt caching behavior.
|
|
348
|
+
* Useful for improving cache hit rates and working around automatic caching issues.
|
|
349
|
+
*/
|
|
350
|
+
promptCacheKey: import_v42.z.string().optional(),
|
|
351
|
+
/**
|
|
352
|
+
* A stable identifier used to help detect users of your application
|
|
353
|
+
* that may be violating OpenAI's usage policies. The IDs should be a
|
|
354
|
+
* string that uniquely identifies each user. We recommend hashing their
|
|
355
|
+
* username or email address, in order to avoid sending us any identifying
|
|
356
|
+
* information.
|
|
357
|
+
*/
|
|
358
|
+
safetyIdentifier: import_v42.z.string().optional()
|
|
346
359
|
});
|
|
347
360
|
|
|
348
|
-
// src/openai-prepare-tools.ts
|
|
361
|
+
// src/chat/openai-chat-prepare-tools.ts
|
|
349
362
|
var import_provider2 = require("@ai-sdk/provider");
|
|
350
363
|
|
|
351
364
|
// src/tool/file-search.ts
|
|
@@ -434,8 +447,8 @@ var webSearchPreview = (0, import_provider_utils4.createProviderDefinedToolFacto
|
|
|
434
447
|
inputSchema: import_v44.z.object({})
|
|
435
448
|
});
|
|
436
449
|
|
|
437
|
-
// src/openai-prepare-tools.ts
|
|
438
|
-
function
|
|
450
|
+
// src/chat/openai-chat-prepare-tools.ts
|
|
451
|
+
function prepareChatTools({
|
|
439
452
|
tools,
|
|
440
453
|
toolChoice,
|
|
441
454
|
structuredOutputs,
|
|
@@ -521,7 +534,7 @@ function prepareTools({
|
|
|
521
534
|
}
|
|
522
535
|
}
|
|
523
536
|
|
|
524
|
-
// src/openai-chat-language-model.ts
|
|
537
|
+
// src/chat/openai-chat-language-model.ts
|
|
525
538
|
var OpenAIChatLanguageModel = class {
|
|
526
539
|
constructor(modelId, config) {
|
|
527
540
|
this.specificationVersion = "v2";
|
|
@@ -606,13 +619,15 @@ var OpenAIChatLanguageModel = class {
|
|
|
606
619
|
seed,
|
|
607
620
|
verbosity: openaiOptions.textVerbosity,
|
|
608
621
|
// openai specific settings:
|
|
609
|
-
// TODO
|
|
622
|
+
// TODO AI SDK 6: remove, we auto-map maxOutputTokens now
|
|
610
623
|
max_completion_tokens: openaiOptions.maxCompletionTokens,
|
|
611
624
|
store: openaiOptions.store,
|
|
612
625
|
metadata: openaiOptions.metadata,
|
|
613
626
|
prediction: openaiOptions.prediction,
|
|
614
627
|
reasoning_effort: openaiOptions.reasoningEffort,
|
|
615
628
|
service_tier: openaiOptions.serviceTier,
|
|
629
|
+
prompt_cache_key: openaiOptions.promptCacheKey,
|
|
630
|
+
safety_identifier: openaiOptions.safetyIdentifier,
|
|
616
631
|
// messages:
|
|
617
632
|
messages
|
|
618
633
|
};
|
|
@@ -706,7 +721,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
706
721
|
tools: openaiTools,
|
|
707
722
|
toolChoice: openaiToolChoice,
|
|
708
723
|
toolWarnings
|
|
709
|
-
} =
|
|
724
|
+
} = prepareChatTools({
|
|
710
725
|
tools,
|
|
711
726
|
toolChoice,
|
|
712
727
|
structuredOutputs,
|
|
@@ -1176,11 +1191,11 @@ var reasoningModels = {
|
|
|
1176
1191
|
}
|
|
1177
1192
|
};
|
|
1178
1193
|
|
|
1179
|
-
// src/openai-completion-language-model.ts
|
|
1194
|
+
// src/completion/openai-completion-language-model.ts
|
|
1180
1195
|
var import_provider_utils6 = require("@ai-sdk/provider-utils");
|
|
1181
1196
|
var import_v47 = require("zod/v4");
|
|
1182
1197
|
|
|
1183
|
-
// src/convert-to-openai-completion-prompt.ts
|
|
1198
|
+
// src/completion/convert-to-openai-completion-prompt.ts
|
|
1184
1199
|
var import_provider4 = require("@ai-sdk/provider");
|
|
1185
1200
|
function convertToOpenAICompletionPrompt({
|
|
1186
1201
|
prompt,
|
|
@@ -1255,7 +1270,37 @@ ${user}:`]
|
|
|
1255
1270
|
};
|
|
1256
1271
|
}
|
|
1257
1272
|
|
|
1258
|
-
// src/
|
|
1273
|
+
// src/completion/get-response-metadata.ts
|
|
1274
|
+
function getResponseMetadata2({
|
|
1275
|
+
id,
|
|
1276
|
+
model,
|
|
1277
|
+
created
|
|
1278
|
+
}) {
|
|
1279
|
+
return {
|
|
1280
|
+
id: id != null ? id : void 0,
|
|
1281
|
+
modelId: model != null ? model : void 0,
|
|
1282
|
+
timestamp: created != null ? new Date(created * 1e3) : void 0
|
|
1283
|
+
};
|
|
1284
|
+
}
|
|
1285
|
+
|
|
1286
|
+
// src/completion/map-openai-finish-reason.ts
|
|
1287
|
+
function mapOpenAIFinishReason2(finishReason) {
|
|
1288
|
+
switch (finishReason) {
|
|
1289
|
+
case "stop":
|
|
1290
|
+
return "stop";
|
|
1291
|
+
case "length":
|
|
1292
|
+
return "length";
|
|
1293
|
+
case "content_filter":
|
|
1294
|
+
return "content-filter";
|
|
1295
|
+
case "function_call":
|
|
1296
|
+
case "tool_calls":
|
|
1297
|
+
return "tool-calls";
|
|
1298
|
+
default:
|
|
1299
|
+
return "unknown";
|
|
1300
|
+
}
|
|
1301
|
+
}
|
|
1302
|
+
|
|
1303
|
+
// src/completion/openai-completion-options.ts
|
|
1259
1304
|
var import_v46 = require("zod/v4");
|
|
1260
1305
|
var openaiCompletionProviderOptions = import_v46.z.object({
|
|
1261
1306
|
/**
|
|
@@ -1298,7 +1343,7 @@ var openaiCompletionProviderOptions = import_v46.z.object({
|
|
|
1298
1343
|
logprobs: import_v46.z.union([import_v46.z.boolean(), import_v46.z.number()]).optional()
|
|
1299
1344
|
});
|
|
1300
1345
|
|
|
1301
|
-
// src/openai-completion-language-model.ts
|
|
1346
|
+
// src/completion/openai-completion-language-model.ts
|
|
1302
1347
|
var OpenAICompletionLanguageModel = class {
|
|
1303
1348
|
constructor(modelId, config) {
|
|
1304
1349
|
this.specificationVersion = "v2";
|
|
@@ -1418,10 +1463,10 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1418
1463
|
outputTokens: (_b = response.usage) == null ? void 0 : _b.completion_tokens,
|
|
1419
1464
|
totalTokens: (_c = response.usage) == null ? void 0 : _c.total_tokens
|
|
1420
1465
|
},
|
|
1421
|
-
finishReason:
|
|
1466
|
+
finishReason: mapOpenAIFinishReason2(choice.finish_reason),
|
|
1422
1467
|
request: { body: args },
|
|
1423
1468
|
response: {
|
|
1424
|
-
...
|
|
1469
|
+
...getResponseMetadata2(response),
|
|
1425
1470
|
headers: responseHeaders,
|
|
1426
1471
|
body: rawResponse
|
|
1427
1472
|
},
|
|
@@ -1485,7 +1530,7 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1485
1530
|
isFirstChunk = false;
|
|
1486
1531
|
controller.enqueue({
|
|
1487
1532
|
type: "response-metadata",
|
|
1488
|
-
...
|
|
1533
|
+
...getResponseMetadata2(value)
|
|
1489
1534
|
});
|
|
1490
1535
|
controller.enqueue({ type: "text-start", id: "0" });
|
|
1491
1536
|
}
|
|
@@ -1496,7 +1541,7 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1496
1541
|
}
|
|
1497
1542
|
const choice = value.choices[0];
|
|
1498
1543
|
if ((choice == null ? void 0 : choice.finish_reason) != null) {
|
|
1499
|
-
finishReason =
|
|
1544
|
+
finishReason = mapOpenAIFinishReason2(choice.finish_reason);
|
|
1500
1545
|
}
|
|
1501
1546
|
if ((choice == null ? void 0 : choice.logprobs) != null) {
|
|
1502
1547
|
providerMetadata.openai.logprobs = choice.logprobs;
|
|
@@ -1571,12 +1616,12 @@ var openaiCompletionChunkSchema = import_v47.z.union([
|
|
|
1571
1616
|
openaiErrorDataSchema
|
|
1572
1617
|
]);
|
|
1573
1618
|
|
|
1574
|
-
// src/openai-embedding-model.ts
|
|
1619
|
+
// src/embedding/openai-embedding-model.ts
|
|
1575
1620
|
var import_provider5 = require("@ai-sdk/provider");
|
|
1576
1621
|
var import_provider_utils7 = require("@ai-sdk/provider-utils");
|
|
1577
1622
|
var import_v49 = require("zod/v4");
|
|
1578
1623
|
|
|
1579
|
-
// src/openai-embedding-options.ts
|
|
1624
|
+
// src/embedding/openai-embedding-options.ts
|
|
1580
1625
|
var import_v48 = require("zod/v4");
|
|
1581
1626
|
var openaiEmbeddingProviderOptions = import_v48.z.object({
|
|
1582
1627
|
/**
|
|
@@ -1591,7 +1636,7 @@ var openaiEmbeddingProviderOptions = import_v48.z.object({
|
|
|
1591
1636
|
user: import_v48.z.string().optional()
|
|
1592
1637
|
});
|
|
1593
1638
|
|
|
1594
|
-
// src/openai-embedding-model.ts
|
|
1639
|
+
// src/embedding/openai-embedding-model.ts
|
|
1595
1640
|
var OpenAIEmbeddingModel = class {
|
|
1596
1641
|
constructor(modelId, config) {
|
|
1597
1642
|
this.specificationVersion = "v2";
|
|
@@ -1659,11 +1704,11 @@ var openaiTextEmbeddingResponseSchema = import_v49.z.object({
|
|
|
1659
1704
|
usage: import_v49.z.object({ prompt_tokens: import_v49.z.number() }).nullish()
|
|
1660
1705
|
});
|
|
1661
1706
|
|
|
1662
|
-
// src/openai-image-model.ts
|
|
1707
|
+
// src/image/openai-image-model.ts
|
|
1663
1708
|
var import_provider_utils8 = require("@ai-sdk/provider-utils");
|
|
1664
1709
|
var import_v410 = require("zod/v4");
|
|
1665
1710
|
|
|
1666
|
-
// src/openai-image-
|
|
1711
|
+
// src/image/openai-image-options.ts
|
|
1667
1712
|
var modelMaxImagesPerCall = {
|
|
1668
1713
|
"dall-e-3": 1,
|
|
1669
1714
|
"dall-e-2": 10,
|
|
@@ -1671,7 +1716,7 @@ var modelMaxImagesPerCall = {
|
|
|
1671
1716
|
};
|
|
1672
1717
|
var hasDefaultResponseFormat = /* @__PURE__ */ new Set(["gpt-image-1"]);
|
|
1673
1718
|
|
|
1674
|
-
// src/openai-image-model.ts
|
|
1719
|
+
// src/image/openai-image-model.ts
|
|
1675
1720
|
var OpenAIImageModel = class {
|
|
1676
1721
|
constructor(modelId, config) {
|
|
1677
1722
|
this.modelId = modelId;
|
|
@@ -1755,11 +1800,11 @@ var openaiImageResponseSchema = import_v410.z.object({
|
|
|
1755
1800
|
)
|
|
1756
1801
|
});
|
|
1757
1802
|
|
|
1758
|
-
// src/openai-transcription-model.ts
|
|
1803
|
+
// src/transcription/openai-transcription-model.ts
|
|
1759
1804
|
var import_provider_utils9 = require("@ai-sdk/provider-utils");
|
|
1760
1805
|
var import_v412 = require("zod/v4");
|
|
1761
1806
|
|
|
1762
|
-
// src/openai-transcription-options.ts
|
|
1807
|
+
// src/transcription/openai-transcription-options.ts
|
|
1763
1808
|
var import_v411 = require("zod/v4");
|
|
1764
1809
|
var openAITranscriptionProviderOptions = import_v411.z.object({
|
|
1765
1810
|
/**
|
|
@@ -1786,7 +1831,7 @@ var openAITranscriptionProviderOptions = import_v411.z.object({
|
|
|
1786
1831
|
timestampGranularities: import_v411.z.array(import_v411.z.enum(["word", "segment"])).default(["segment"]).optional()
|
|
1787
1832
|
});
|
|
1788
1833
|
|
|
1789
|
-
// src/openai-transcription-model.ts
|
|
1834
|
+
// src/transcription/openai-transcription-model.ts
|
|
1790
1835
|
var languageMap = {
|
|
1791
1836
|
afrikaans: "af",
|
|
1792
1837
|
arabic: "ar",
|
|
@@ -1944,7 +1989,7 @@ var openaiTranscriptionResponseSchema = import_v412.z.object({
|
|
|
1944
1989
|
).nullish()
|
|
1945
1990
|
});
|
|
1946
1991
|
|
|
1947
|
-
// src/openai-speech-model.ts
|
|
1992
|
+
// src/speech/openai-speech-model.ts
|
|
1948
1993
|
var import_provider_utils10 = require("@ai-sdk/provider-utils");
|
|
1949
1994
|
var import_v413 = require("zod/v4");
|
|
1950
1995
|
var OpenAIProviderOptionsSchema = import_v413.z.object({
|
|
@@ -2053,17 +2098,22 @@ var OpenAISpeechModel = class {
|
|
|
2053
2098
|
|
|
2054
2099
|
// src/responses/openai-responses-language-model.ts
|
|
2055
2100
|
var import_provider8 = require("@ai-sdk/provider");
|
|
2056
|
-
var
|
|
2057
|
-
var
|
|
2101
|
+
var import_provider_utils14 = require("@ai-sdk/provider-utils");
|
|
2102
|
+
var import_v416 = require("zod/v4");
|
|
2058
2103
|
|
|
2059
2104
|
// src/responses/convert-to-openai-responses-messages.ts
|
|
2060
2105
|
var import_provider6 = require("@ai-sdk/provider");
|
|
2061
2106
|
var import_provider_utils11 = require("@ai-sdk/provider-utils");
|
|
2062
2107
|
var import_v414 = require("zod/v4");
|
|
2063
2108
|
var import_provider_utils12 = require("@ai-sdk/provider-utils");
|
|
2109
|
+
function isFileId(data, prefixes) {
|
|
2110
|
+
if (!prefixes) return false;
|
|
2111
|
+
return prefixes.some((prefix) => data.startsWith(prefix));
|
|
2112
|
+
}
|
|
2064
2113
|
async function convertToOpenAIResponsesMessages({
|
|
2065
2114
|
prompt,
|
|
2066
|
-
systemMessageMode
|
|
2115
|
+
systemMessageMode,
|
|
2116
|
+
fileIdPrefixes
|
|
2067
2117
|
}) {
|
|
2068
2118
|
var _a, _b, _c, _d, _e, _f;
|
|
2069
2119
|
const messages = [];
|
|
@@ -2110,7 +2160,7 @@ async function convertToOpenAIResponsesMessages({
|
|
|
2110
2160
|
const mediaType = part.mediaType === "image/*" ? "image/jpeg" : part.mediaType;
|
|
2111
2161
|
return {
|
|
2112
2162
|
type: "input_image",
|
|
2113
|
-
...part.data instanceof URL ? { image_url: part.data.toString() } : typeof part.data === "string" && part.data
|
|
2163
|
+
...part.data instanceof URL ? { image_url: part.data.toString() } : typeof part.data === "string" && isFileId(part.data, fileIdPrefixes) ? { file_id: part.data } : {
|
|
2114
2164
|
image_url: `data:${mediaType};base64,${(0, import_provider_utils12.convertToBase64)(part.data)}`
|
|
2115
2165
|
},
|
|
2116
2166
|
detail: (_b2 = (_a2 = part.providerOptions) == null ? void 0 : _a2.openai) == null ? void 0 : _b2.imageDetail
|
|
@@ -2123,7 +2173,7 @@ async function convertToOpenAIResponsesMessages({
|
|
|
2123
2173
|
}
|
|
2124
2174
|
return {
|
|
2125
2175
|
type: "input_file",
|
|
2126
|
-
...typeof part.data === "string" && part.data
|
|
2176
|
+
...typeof part.data === "string" && isFileId(part.data, fileIdPrefixes) ? { file_id: part.data } : {
|
|
2127
2177
|
filename: (_c2 = part.filename) != null ? _c2 : `part-${index}.pdf`,
|
|
2128
2178
|
file_data: `data:application/pdf;base64,${(0, import_provider_utils12.convertToBase64)(part.data)}`
|
|
2129
2179
|
}
|
|
@@ -2268,6 +2318,25 @@ function mapOpenAIResponseFinishReason({
|
|
|
2268
2318
|
|
|
2269
2319
|
// src/responses/openai-responses-prepare-tools.ts
|
|
2270
2320
|
var import_provider7 = require("@ai-sdk/provider");
|
|
2321
|
+
|
|
2322
|
+
// src/tool/code-interpreter.ts
|
|
2323
|
+
var import_provider_utils13 = require("@ai-sdk/provider-utils");
|
|
2324
|
+
var import_v415 = require("zod/v4");
|
|
2325
|
+
var codeInterpreterArgsSchema = import_v415.z.object({
|
|
2326
|
+
container: import_v415.z.union([
|
|
2327
|
+
import_v415.z.string(),
|
|
2328
|
+
import_v415.z.object({
|
|
2329
|
+
fileIds: import_v415.z.array(import_v415.z.string()).optional()
|
|
2330
|
+
})
|
|
2331
|
+
]).optional()
|
|
2332
|
+
});
|
|
2333
|
+
var codeInterpreter = (0, import_provider_utils13.createProviderDefinedToolFactory)({
|
|
2334
|
+
id: "openai.code_interpreter",
|
|
2335
|
+
name: "code_interpreter",
|
|
2336
|
+
inputSchema: import_v415.z.object({})
|
|
2337
|
+
});
|
|
2338
|
+
|
|
2339
|
+
// src/responses/openai-responses-prepare-tools.ts
|
|
2271
2340
|
function prepareResponsesTools({
|
|
2272
2341
|
tools,
|
|
2273
2342
|
toolChoice,
|
|
@@ -2290,7 +2359,7 @@ function prepareResponsesTools({
|
|
|
2290
2359
|
strict: strictJsonSchema
|
|
2291
2360
|
});
|
|
2292
2361
|
break;
|
|
2293
|
-
case "provider-defined":
|
|
2362
|
+
case "provider-defined": {
|
|
2294
2363
|
switch (tool.id) {
|
|
2295
2364
|
case "openai.file_search": {
|
|
2296
2365
|
const args = fileSearchArgsSchema.parse(tool.args);
|
|
@@ -2303,18 +2372,30 @@ function prepareResponsesTools({
|
|
|
2303
2372
|
});
|
|
2304
2373
|
break;
|
|
2305
2374
|
}
|
|
2306
|
-
case "openai.web_search_preview":
|
|
2375
|
+
case "openai.web_search_preview": {
|
|
2376
|
+
const args = webSearchPreviewArgsSchema.parse(tool.args);
|
|
2307
2377
|
openaiTools.push({
|
|
2308
2378
|
type: "web_search_preview",
|
|
2309
|
-
search_context_size:
|
|
2310
|
-
user_location:
|
|
2379
|
+
search_context_size: args.searchContextSize,
|
|
2380
|
+
user_location: args.userLocation
|
|
2311
2381
|
});
|
|
2312
2382
|
break;
|
|
2313
|
-
|
|
2383
|
+
}
|
|
2384
|
+
case "openai.code_interpreter": {
|
|
2385
|
+
const args = codeInterpreterArgsSchema.parse(tool.args);
|
|
2386
|
+
openaiTools.push({
|
|
2387
|
+
type: "code_interpreter",
|
|
2388
|
+
container: args.container == null ? { type: "auto", file_ids: void 0 } : typeof args.container === "string" ? args.container : { type: "auto", file_ids: args.container.fileIds }
|
|
2389
|
+
});
|
|
2390
|
+
break;
|
|
2391
|
+
}
|
|
2392
|
+
default: {
|
|
2314
2393
|
toolWarnings.push({ type: "unsupported-tool", tool });
|
|
2315
2394
|
break;
|
|
2395
|
+
}
|
|
2316
2396
|
}
|
|
2317
2397
|
break;
|
|
2398
|
+
}
|
|
2318
2399
|
default:
|
|
2319
2400
|
toolWarnings.push({ type: "unsupported-tool", tool });
|
|
2320
2401
|
break;
|
|
@@ -2332,7 +2413,7 @@ function prepareResponsesTools({
|
|
|
2332
2413
|
case "tool":
|
|
2333
2414
|
return {
|
|
2334
2415
|
tools: openaiTools,
|
|
2335
|
-
toolChoice: toolChoice.toolName === "
|
|
2416
|
+
toolChoice: toolChoice.toolName === "code_interpreter" || toolChoice.toolName === "file_search" || toolChoice.toolName === "web_search_preview" ? { type: toolChoice.toolName } : { type: "function", name: toolChoice.toolName },
|
|
2336
2417
|
toolWarnings
|
|
2337
2418
|
};
|
|
2338
2419
|
default: {
|
|
@@ -2398,10 +2479,11 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2398
2479
|
}
|
|
2399
2480
|
const { messages, warnings: messageWarnings } = await convertToOpenAIResponsesMessages({
|
|
2400
2481
|
prompt,
|
|
2401
|
-
systemMessageMode: modelConfig.systemMessageMode
|
|
2482
|
+
systemMessageMode: modelConfig.systemMessageMode,
|
|
2483
|
+
fileIdPrefixes: this.config.fileIdPrefixes
|
|
2402
2484
|
});
|
|
2403
2485
|
warnings.push(...messageWarnings);
|
|
2404
|
-
const openaiOptions = await (0,
|
|
2486
|
+
const openaiOptions = await (0, import_provider_utils14.parseProviderOptions)({
|
|
2405
2487
|
provider: "openai",
|
|
2406
2488
|
providerOptions,
|
|
2407
2489
|
schema: openaiResponsesProviderOptionsSchema
|
|
@@ -2438,6 +2520,8 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2438
2520
|
instructions: openaiOptions == null ? void 0 : openaiOptions.instructions,
|
|
2439
2521
|
service_tier: openaiOptions == null ? void 0 : openaiOptions.serviceTier,
|
|
2440
2522
|
include: openaiOptions == null ? void 0 : openaiOptions.include,
|
|
2523
|
+
prompt_cache_key: openaiOptions == null ? void 0 : openaiOptions.promptCacheKey,
|
|
2524
|
+
safety_identifier: openaiOptions == null ? void 0 : openaiOptions.safetyIdentifier,
|
|
2441
2525
|
// model-specific settings:
|
|
2442
2526
|
...modelConfig.isReasoningModel && ((openaiOptions == null ? void 0 : openaiOptions.reasoningEffort) != null || (openaiOptions == null ? void 0 : openaiOptions.reasoningSummary) != null) && {
|
|
2443
2527
|
reasoning: {
|
|
@@ -2531,78 +2615,89 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2531
2615
|
responseHeaders,
|
|
2532
2616
|
value: response,
|
|
2533
2617
|
rawValue: rawResponse
|
|
2534
|
-
} = await (0,
|
|
2618
|
+
} = await (0, import_provider_utils14.postJsonToApi)({
|
|
2535
2619
|
url,
|
|
2536
|
-
headers: (0,
|
|
2620
|
+
headers: (0, import_provider_utils14.combineHeaders)(this.config.headers(), options.headers),
|
|
2537
2621
|
body,
|
|
2538
2622
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
2539
|
-
successfulResponseHandler: (0,
|
|
2540
|
-
|
|
2541
|
-
id:
|
|
2542
|
-
created_at:
|
|
2543
|
-
error:
|
|
2544
|
-
code:
|
|
2545
|
-
message:
|
|
2623
|
+
successfulResponseHandler: (0, import_provider_utils14.createJsonResponseHandler)(
|
|
2624
|
+
import_v416.z.object({
|
|
2625
|
+
id: import_v416.z.string(),
|
|
2626
|
+
created_at: import_v416.z.number(),
|
|
2627
|
+
error: import_v416.z.object({
|
|
2628
|
+
code: import_v416.z.string(),
|
|
2629
|
+
message: import_v416.z.string()
|
|
2546
2630
|
}).nullish(),
|
|
2547
|
-
model:
|
|
2548
|
-
output:
|
|
2549
|
-
|
|
2550
|
-
|
|
2551
|
-
type:
|
|
2552
|
-
role:
|
|
2553
|
-
id:
|
|
2554
|
-
content:
|
|
2555
|
-
|
|
2556
|
-
type:
|
|
2557
|
-
text:
|
|
2558
|
-
annotations:
|
|
2559
|
-
|
|
2560
|
-
type:
|
|
2561
|
-
start_index:
|
|
2562
|
-
end_index:
|
|
2563
|
-
url:
|
|
2564
|
-
title:
|
|
2631
|
+
model: import_v416.z.string(),
|
|
2632
|
+
output: import_v416.z.array(
|
|
2633
|
+
import_v416.z.discriminatedUnion("type", [
|
|
2634
|
+
import_v416.z.object({
|
|
2635
|
+
type: import_v416.z.literal("message"),
|
|
2636
|
+
role: import_v416.z.literal("assistant"),
|
|
2637
|
+
id: import_v416.z.string(),
|
|
2638
|
+
content: import_v416.z.array(
|
|
2639
|
+
import_v416.z.object({
|
|
2640
|
+
type: import_v416.z.literal("output_text"),
|
|
2641
|
+
text: import_v416.z.string(),
|
|
2642
|
+
annotations: import_v416.z.array(
|
|
2643
|
+
import_v416.z.object({
|
|
2644
|
+
type: import_v416.z.literal("url_citation"),
|
|
2645
|
+
start_index: import_v416.z.number(),
|
|
2646
|
+
end_index: import_v416.z.number(),
|
|
2647
|
+
url: import_v416.z.string(),
|
|
2648
|
+
title: import_v416.z.string()
|
|
2565
2649
|
})
|
|
2566
2650
|
)
|
|
2567
2651
|
})
|
|
2568
2652
|
)
|
|
2569
2653
|
}),
|
|
2570
|
-
|
|
2571
|
-
type:
|
|
2572
|
-
call_id:
|
|
2573
|
-
name:
|
|
2574
|
-
arguments:
|
|
2575
|
-
id:
|
|
2654
|
+
import_v416.z.object({
|
|
2655
|
+
type: import_v416.z.literal("function_call"),
|
|
2656
|
+
call_id: import_v416.z.string(),
|
|
2657
|
+
name: import_v416.z.string(),
|
|
2658
|
+
arguments: import_v416.z.string(),
|
|
2659
|
+
id: import_v416.z.string()
|
|
2576
2660
|
}),
|
|
2577
|
-
|
|
2578
|
-
type:
|
|
2579
|
-
id:
|
|
2580
|
-
status:
|
|
2661
|
+
import_v416.z.object({
|
|
2662
|
+
type: import_v416.z.literal("web_search_call"),
|
|
2663
|
+
id: import_v416.z.string(),
|
|
2664
|
+
status: import_v416.z.string().optional()
|
|
2581
2665
|
}),
|
|
2582
|
-
|
|
2583
|
-
type:
|
|
2584
|
-
id:
|
|
2585
|
-
status:
|
|
2666
|
+
import_v416.z.object({
|
|
2667
|
+
type: import_v416.z.literal("computer_call"),
|
|
2668
|
+
id: import_v416.z.string(),
|
|
2669
|
+
status: import_v416.z.string().optional()
|
|
2586
2670
|
}),
|
|
2587
|
-
|
|
2588
|
-
type:
|
|
2589
|
-
id:
|
|
2590
|
-
status:
|
|
2671
|
+
import_v416.z.object({
|
|
2672
|
+
type: import_v416.z.literal("file_search_call"),
|
|
2673
|
+
id: import_v416.z.string(),
|
|
2674
|
+
status: import_v416.z.string().optional(),
|
|
2675
|
+
queries: import_v416.z.array(import_v416.z.string()).nullish(),
|
|
2676
|
+
results: import_v416.z.array(
|
|
2677
|
+
import_v416.z.object({
|
|
2678
|
+
attributes: import_v416.z.object({
|
|
2679
|
+
file_id: import_v416.z.string(),
|
|
2680
|
+
filename: import_v416.z.string(),
|
|
2681
|
+
score: import_v416.z.number(),
|
|
2682
|
+
text: import_v416.z.string()
|
|
2683
|
+
})
|
|
2684
|
+
})
|
|
2685
|
+
).nullish()
|
|
2591
2686
|
}),
|
|
2592
|
-
|
|
2593
|
-
type:
|
|
2594
|
-
id:
|
|
2595
|
-
encrypted_content:
|
|
2596
|
-
summary:
|
|
2597
|
-
|
|
2598
|
-
type:
|
|
2599
|
-
text:
|
|
2687
|
+
import_v416.z.object({
|
|
2688
|
+
type: import_v416.z.literal("reasoning"),
|
|
2689
|
+
id: import_v416.z.string(),
|
|
2690
|
+
encrypted_content: import_v416.z.string().nullish(),
|
|
2691
|
+
summary: import_v416.z.array(
|
|
2692
|
+
import_v416.z.object({
|
|
2693
|
+
type: import_v416.z.literal("summary_text"),
|
|
2694
|
+
text: import_v416.z.string()
|
|
2600
2695
|
})
|
|
2601
2696
|
)
|
|
2602
2697
|
})
|
|
2603
2698
|
])
|
|
2604
2699
|
),
|
|
2605
|
-
incomplete_details:
|
|
2700
|
+
incomplete_details: import_v416.z.object({ reason: import_v416.z.string() }).nullable(),
|
|
2606
2701
|
usage: usageSchema2
|
|
2607
2702
|
})
|
|
2608
2703
|
),
|
|
@@ -2656,7 +2751,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2656
2751
|
content.push({
|
|
2657
2752
|
type: "source",
|
|
2658
2753
|
sourceType: "url",
|
|
2659
|
-
id: (_d = (_c = (_b = this.config).generateId) == null ? void 0 : _c.call(_b)) != null ? _d : (0,
|
|
2754
|
+
id: (_d = (_c = (_b = this.config).generateId) == null ? void 0 : _c.call(_b)) != null ? _d : (0, import_provider_utils14.generateId)(),
|
|
2660
2755
|
url: annotation.url,
|
|
2661
2756
|
title: annotation.title
|
|
2662
2757
|
});
|
|
@@ -2729,7 +2824,9 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2729
2824
|
toolName: "file_search",
|
|
2730
2825
|
result: {
|
|
2731
2826
|
type: "file_search_tool_result",
|
|
2732
|
-
status: part.status || "completed"
|
|
2827
|
+
status: part.status || "completed",
|
|
2828
|
+
...part.queries && { queries: part.queries },
|
|
2829
|
+
...part.results && { results: part.results }
|
|
2733
2830
|
},
|
|
2734
2831
|
providerExecuted: true
|
|
2735
2832
|
});
|
|
@@ -2768,18 +2865,18 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2768
2865
|
}
|
|
2769
2866
|
async doStream(options) {
|
|
2770
2867
|
const { args: body, warnings } = await this.getArgs(options);
|
|
2771
|
-
const { responseHeaders, value: response } = await (0,
|
|
2868
|
+
const { responseHeaders, value: response } = await (0, import_provider_utils14.postJsonToApi)({
|
|
2772
2869
|
url: this.config.url({
|
|
2773
2870
|
path: "/responses",
|
|
2774
2871
|
modelId: this.modelId
|
|
2775
2872
|
}),
|
|
2776
|
-
headers: (0,
|
|
2873
|
+
headers: (0, import_provider_utils14.combineHeaders)(this.config.headers(), options.headers),
|
|
2777
2874
|
body: {
|
|
2778
2875
|
...body,
|
|
2779
2876
|
stream: true
|
|
2780
2877
|
},
|
|
2781
2878
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
2782
|
-
successfulResponseHandler: (0,
|
|
2879
|
+
successfulResponseHandler: (0, import_provider_utils14.createEventSourceResponseHandler)(
|
|
2783
2880
|
openaiResponsesChunkSchema
|
|
2784
2881
|
),
|
|
2785
2882
|
abortSignal: options.abortSignal,
|
|
@@ -2844,6 +2941,16 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2844
2941
|
id: value.item.id,
|
|
2845
2942
|
toolName: "computer_use"
|
|
2846
2943
|
});
|
|
2944
|
+
} else if (value.item.type === "file_search_call") {
|
|
2945
|
+
ongoingToolCalls[value.output_index] = {
|
|
2946
|
+
toolName: "file_search",
|
|
2947
|
+
toolCallId: value.item.id
|
|
2948
|
+
};
|
|
2949
|
+
controller.enqueue({
|
|
2950
|
+
type: "tool-input-start",
|
|
2951
|
+
id: value.item.id,
|
|
2952
|
+
toolName: "file_search"
|
|
2953
|
+
});
|
|
2847
2954
|
} else if (value.item.type === "message") {
|
|
2848
2955
|
controller.enqueue({
|
|
2849
2956
|
type: "text-start",
|
|
@@ -2937,6 +3044,32 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2937
3044
|
},
|
|
2938
3045
|
providerExecuted: true
|
|
2939
3046
|
});
|
|
3047
|
+
} else if (value.item.type === "file_search_call") {
|
|
3048
|
+
ongoingToolCalls[value.output_index] = void 0;
|
|
3049
|
+
hasToolCalls = true;
|
|
3050
|
+
controller.enqueue({
|
|
3051
|
+
type: "tool-input-end",
|
|
3052
|
+
id: value.item.id
|
|
3053
|
+
});
|
|
3054
|
+
controller.enqueue({
|
|
3055
|
+
type: "tool-call",
|
|
3056
|
+
toolCallId: value.item.id,
|
|
3057
|
+
toolName: "file_search",
|
|
3058
|
+
input: "",
|
|
3059
|
+
providerExecuted: true
|
|
3060
|
+
});
|
|
3061
|
+
controller.enqueue({
|
|
3062
|
+
type: "tool-result",
|
|
3063
|
+
toolCallId: value.item.id,
|
|
3064
|
+
toolName: "file_search",
|
|
3065
|
+
result: {
|
|
3066
|
+
type: "file_search_tool_result",
|
|
3067
|
+
status: value.item.status || "completed",
|
|
3068
|
+
...value.item.queries && { queries: value.item.queries },
|
|
3069
|
+
...value.item.results && { results: value.item.results }
|
|
3070
|
+
},
|
|
3071
|
+
providerExecuted: true
|
|
3072
|
+
});
|
|
2940
3073
|
} else if (value.item.type === "message") {
|
|
2941
3074
|
controller.enqueue({
|
|
2942
3075
|
type: "text-end",
|
|
@@ -3022,7 +3155,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3022
3155
|
controller.enqueue({
|
|
3023
3156
|
type: "source",
|
|
3024
3157
|
sourceType: "url",
|
|
3025
|
-
id: (_m = (_l = (_k = self.config).generateId) == null ? void 0 : _l.call(_k)) != null ? _m : (0,
|
|
3158
|
+
id: (_m = (_l = (_k = self.config).generateId) == null ? void 0 : _l.call(_k)) != null ? _m : (0, import_provider_utils14.generateId)(),
|
|
3026
3159
|
url: value.annotation.url,
|
|
3027
3160
|
title: value.annotation.title
|
|
3028
3161
|
});
|
|
@@ -3049,140 +3182,162 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3049
3182
|
};
|
|
3050
3183
|
}
|
|
3051
3184
|
};
|
|
3052
|
-
var usageSchema2 =
|
|
3053
|
-
input_tokens:
|
|
3054
|
-
input_tokens_details:
|
|
3055
|
-
output_tokens:
|
|
3056
|
-
output_tokens_details:
|
|
3185
|
+
var usageSchema2 = import_v416.z.object({
|
|
3186
|
+
input_tokens: import_v416.z.number(),
|
|
3187
|
+
input_tokens_details: import_v416.z.object({ cached_tokens: import_v416.z.number().nullish() }).nullish(),
|
|
3188
|
+
output_tokens: import_v416.z.number(),
|
|
3189
|
+
output_tokens_details: import_v416.z.object({ reasoning_tokens: import_v416.z.number().nullish() }).nullish()
|
|
3057
3190
|
});
|
|
3058
|
-
var textDeltaChunkSchema =
|
|
3059
|
-
type:
|
|
3060
|
-
item_id:
|
|
3061
|
-
delta:
|
|
3191
|
+
var textDeltaChunkSchema = import_v416.z.object({
|
|
3192
|
+
type: import_v416.z.literal("response.output_text.delta"),
|
|
3193
|
+
item_id: import_v416.z.string(),
|
|
3194
|
+
delta: import_v416.z.string()
|
|
3062
3195
|
});
|
|
3063
|
-
var errorChunkSchema =
|
|
3064
|
-
type:
|
|
3065
|
-
code:
|
|
3066
|
-
message:
|
|
3067
|
-
param:
|
|
3068
|
-
sequence_number:
|
|
3196
|
+
var errorChunkSchema = import_v416.z.object({
|
|
3197
|
+
type: import_v416.z.literal("error"),
|
|
3198
|
+
code: import_v416.z.string(),
|
|
3199
|
+
message: import_v416.z.string(),
|
|
3200
|
+
param: import_v416.z.string().nullish(),
|
|
3201
|
+
sequence_number: import_v416.z.number()
|
|
3069
3202
|
});
|
|
3070
|
-
var responseFinishedChunkSchema =
|
|
3071
|
-
type:
|
|
3072
|
-
response:
|
|
3073
|
-
incomplete_details:
|
|
3203
|
+
var responseFinishedChunkSchema = import_v416.z.object({
|
|
3204
|
+
type: import_v416.z.enum(["response.completed", "response.incomplete"]),
|
|
3205
|
+
response: import_v416.z.object({
|
|
3206
|
+
incomplete_details: import_v416.z.object({ reason: import_v416.z.string() }).nullish(),
|
|
3074
3207
|
usage: usageSchema2
|
|
3075
3208
|
})
|
|
3076
3209
|
});
|
|
3077
|
-
var responseCreatedChunkSchema =
|
|
3078
|
-
type:
|
|
3079
|
-
response:
|
|
3080
|
-
id:
|
|
3081
|
-
created_at:
|
|
3082
|
-
model:
|
|
3210
|
+
var responseCreatedChunkSchema = import_v416.z.object({
|
|
3211
|
+
type: import_v416.z.literal("response.created"),
|
|
3212
|
+
response: import_v416.z.object({
|
|
3213
|
+
id: import_v416.z.string(),
|
|
3214
|
+
created_at: import_v416.z.number(),
|
|
3215
|
+
model: import_v416.z.string()
|
|
3083
3216
|
})
|
|
3084
3217
|
});
|
|
3085
|
-
var responseOutputItemAddedSchema =
|
|
3086
|
-
type:
|
|
3087
|
-
output_index:
|
|
3088
|
-
item:
|
|
3089
|
-
|
|
3090
|
-
type:
|
|
3091
|
-
id:
|
|
3218
|
+
var responseOutputItemAddedSchema = import_v416.z.object({
|
|
3219
|
+
type: import_v416.z.literal("response.output_item.added"),
|
|
3220
|
+
output_index: import_v416.z.number(),
|
|
3221
|
+
item: import_v416.z.discriminatedUnion("type", [
|
|
3222
|
+
import_v416.z.object({
|
|
3223
|
+
type: import_v416.z.literal("message"),
|
|
3224
|
+
id: import_v416.z.string()
|
|
3092
3225
|
}),
|
|
3093
|
-
|
|
3094
|
-
type:
|
|
3095
|
-
id:
|
|
3096
|
-
encrypted_content:
|
|
3226
|
+
import_v416.z.object({
|
|
3227
|
+
type: import_v416.z.literal("reasoning"),
|
|
3228
|
+
id: import_v416.z.string(),
|
|
3229
|
+
encrypted_content: import_v416.z.string().nullish()
|
|
3097
3230
|
}),
|
|
3098
|
-
|
|
3099
|
-
type:
|
|
3100
|
-
id:
|
|
3101
|
-
call_id:
|
|
3102
|
-
name:
|
|
3103
|
-
arguments:
|
|
3231
|
+
import_v416.z.object({
|
|
3232
|
+
type: import_v416.z.literal("function_call"),
|
|
3233
|
+
id: import_v416.z.string(),
|
|
3234
|
+
call_id: import_v416.z.string(),
|
|
3235
|
+
name: import_v416.z.string(),
|
|
3236
|
+
arguments: import_v416.z.string()
|
|
3104
3237
|
}),
|
|
3105
|
-
|
|
3106
|
-
type:
|
|
3107
|
-
id:
|
|
3108
|
-
status:
|
|
3238
|
+
import_v416.z.object({
|
|
3239
|
+
type: import_v416.z.literal("web_search_call"),
|
|
3240
|
+
id: import_v416.z.string(),
|
|
3241
|
+
status: import_v416.z.string()
|
|
3109
3242
|
}),
|
|
3110
|
-
|
|
3111
|
-
type:
|
|
3112
|
-
id:
|
|
3113
|
-
status:
|
|
3243
|
+
import_v416.z.object({
|
|
3244
|
+
type: import_v416.z.literal("computer_call"),
|
|
3245
|
+
id: import_v416.z.string(),
|
|
3246
|
+
status: import_v416.z.string()
|
|
3114
3247
|
}),
|
|
3115
|
-
|
|
3116
|
-
type:
|
|
3117
|
-
id:
|
|
3118
|
-
status:
|
|
3248
|
+
import_v416.z.object({
|
|
3249
|
+
type: import_v416.z.literal("file_search_call"),
|
|
3250
|
+
id: import_v416.z.string(),
|
|
3251
|
+
status: import_v416.z.string(),
|
|
3252
|
+
queries: import_v416.z.array(import_v416.z.string()).nullish(),
|
|
3253
|
+
results: import_v416.z.array(
|
|
3254
|
+
import_v416.z.object({
|
|
3255
|
+
attributes: import_v416.z.object({
|
|
3256
|
+
file_id: import_v416.z.string(),
|
|
3257
|
+
filename: import_v416.z.string(),
|
|
3258
|
+
score: import_v416.z.number(),
|
|
3259
|
+
text: import_v416.z.string()
|
|
3260
|
+
})
|
|
3261
|
+
})
|
|
3262
|
+
).optional()
|
|
3119
3263
|
})
|
|
3120
3264
|
])
|
|
3121
3265
|
});
|
|
3122
|
-
var responseOutputItemDoneSchema =
|
|
3123
|
-
type:
|
|
3124
|
-
output_index:
|
|
3125
|
-
item:
|
|
3126
|
-
|
|
3127
|
-
type:
|
|
3128
|
-
id:
|
|
3266
|
+
var responseOutputItemDoneSchema = import_v416.z.object({
|
|
3267
|
+
type: import_v416.z.literal("response.output_item.done"),
|
|
3268
|
+
output_index: import_v416.z.number(),
|
|
3269
|
+
item: import_v416.z.discriminatedUnion("type", [
|
|
3270
|
+
import_v416.z.object({
|
|
3271
|
+
type: import_v416.z.literal("message"),
|
|
3272
|
+
id: import_v416.z.string()
|
|
3129
3273
|
}),
|
|
3130
|
-
|
|
3131
|
-
type:
|
|
3132
|
-
id:
|
|
3133
|
-
encrypted_content:
|
|
3274
|
+
import_v416.z.object({
|
|
3275
|
+
type: import_v416.z.literal("reasoning"),
|
|
3276
|
+
id: import_v416.z.string(),
|
|
3277
|
+
encrypted_content: import_v416.z.string().nullish()
|
|
3134
3278
|
}),
|
|
3135
|
-
|
|
3136
|
-
type:
|
|
3137
|
-
id:
|
|
3138
|
-
call_id:
|
|
3139
|
-
name:
|
|
3140
|
-
arguments:
|
|
3141
|
-
status:
|
|
3279
|
+
import_v416.z.object({
|
|
3280
|
+
type: import_v416.z.literal("function_call"),
|
|
3281
|
+
id: import_v416.z.string(),
|
|
3282
|
+
call_id: import_v416.z.string(),
|
|
3283
|
+
name: import_v416.z.string(),
|
|
3284
|
+
arguments: import_v416.z.string(),
|
|
3285
|
+
status: import_v416.z.literal("completed")
|
|
3142
3286
|
}),
|
|
3143
|
-
|
|
3144
|
-
type:
|
|
3145
|
-
id:
|
|
3146
|
-
status:
|
|
3287
|
+
import_v416.z.object({
|
|
3288
|
+
type: import_v416.z.literal("web_search_call"),
|
|
3289
|
+
id: import_v416.z.string(),
|
|
3290
|
+
status: import_v416.z.literal("completed")
|
|
3147
3291
|
}),
|
|
3148
|
-
|
|
3149
|
-
type:
|
|
3150
|
-
id:
|
|
3151
|
-
status:
|
|
3292
|
+
import_v416.z.object({
|
|
3293
|
+
type: import_v416.z.literal("computer_call"),
|
|
3294
|
+
id: import_v416.z.string(),
|
|
3295
|
+
status: import_v416.z.literal("completed")
|
|
3152
3296
|
}),
|
|
3153
|
-
|
|
3154
|
-
type:
|
|
3155
|
-
id:
|
|
3156
|
-
status:
|
|
3297
|
+
import_v416.z.object({
|
|
3298
|
+
type: import_v416.z.literal("file_search_call"),
|
|
3299
|
+
id: import_v416.z.string(),
|
|
3300
|
+
status: import_v416.z.literal("completed"),
|
|
3301
|
+
queries: import_v416.z.array(import_v416.z.string()).nullish(),
|
|
3302
|
+
results: import_v416.z.array(
|
|
3303
|
+
import_v416.z.object({
|
|
3304
|
+
attributes: import_v416.z.object({
|
|
3305
|
+
file_id: import_v416.z.string(),
|
|
3306
|
+
filename: import_v416.z.string(),
|
|
3307
|
+
score: import_v416.z.number(),
|
|
3308
|
+
text: import_v416.z.string()
|
|
3309
|
+
})
|
|
3310
|
+
})
|
|
3311
|
+
).nullish()
|
|
3157
3312
|
})
|
|
3158
3313
|
])
|
|
3159
3314
|
});
|
|
3160
|
-
var responseFunctionCallArgumentsDeltaSchema =
|
|
3161
|
-
type:
|
|
3162
|
-
item_id:
|
|
3163
|
-
output_index:
|
|
3164
|
-
delta:
|
|
3315
|
+
var responseFunctionCallArgumentsDeltaSchema = import_v416.z.object({
|
|
3316
|
+
type: import_v416.z.literal("response.function_call_arguments.delta"),
|
|
3317
|
+
item_id: import_v416.z.string(),
|
|
3318
|
+
output_index: import_v416.z.number(),
|
|
3319
|
+
delta: import_v416.z.string()
|
|
3165
3320
|
});
|
|
3166
|
-
var responseAnnotationAddedSchema =
|
|
3167
|
-
type:
|
|
3168
|
-
annotation:
|
|
3169
|
-
type:
|
|
3170
|
-
url:
|
|
3171
|
-
title:
|
|
3321
|
+
var responseAnnotationAddedSchema = import_v416.z.object({
|
|
3322
|
+
type: import_v416.z.literal("response.output_text.annotation.added"),
|
|
3323
|
+
annotation: import_v416.z.object({
|
|
3324
|
+
type: import_v416.z.literal("url_citation"),
|
|
3325
|
+
url: import_v416.z.string(),
|
|
3326
|
+
title: import_v416.z.string()
|
|
3172
3327
|
})
|
|
3173
3328
|
});
|
|
3174
|
-
var responseReasoningSummaryPartAddedSchema =
|
|
3175
|
-
type:
|
|
3176
|
-
item_id:
|
|
3177
|
-
summary_index:
|
|
3329
|
+
var responseReasoningSummaryPartAddedSchema = import_v416.z.object({
|
|
3330
|
+
type: import_v416.z.literal("response.reasoning_summary_part.added"),
|
|
3331
|
+
item_id: import_v416.z.string(),
|
|
3332
|
+
summary_index: import_v416.z.number()
|
|
3178
3333
|
});
|
|
3179
|
-
var responseReasoningSummaryTextDeltaSchema =
|
|
3180
|
-
type:
|
|
3181
|
-
item_id:
|
|
3182
|
-
summary_index:
|
|
3183
|
-
delta:
|
|
3334
|
+
var responseReasoningSummaryTextDeltaSchema = import_v416.z.object({
|
|
3335
|
+
type: import_v416.z.literal("response.reasoning_summary_text.delta"),
|
|
3336
|
+
item_id: import_v416.z.string(),
|
|
3337
|
+
summary_index: import_v416.z.number(),
|
|
3338
|
+
delta: import_v416.z.string()
|
|
3184
3339
|
});
|
|
3185
|
-
var openaiResponsesChunkSchema =
|
|
3340
|
+
var openaiResponsesChunkSchema = import_v416.z.union([
|
|
3186
3341
|
textDeltaChunkSchema,
|
|
3187
3342
|
responseFinishedChunkSchema,
|
|
3188
3343
|
responseCreatedChunkSchema,
|
|
@@ -3193,7 +3348,7 @@ var openaiResponsesChunkSchema = import_v415.z.union([
|
|
|
3193
3348
|
responseReasoningSummaryPartAddedSchema,
|
|
3194
3349
|
responseReasoningSummaryTextDeltaSchema,
|
|
3195
3350
|
errorChunkSchema,
|
|
3196
|
-
|
|
3351
|
+
import_v416.z.object({ type: import_v416.z.string() }).loose()
|
|
3197
3352
|
// fallback for unknown chunks
|
|
3198
3353
|
]);
|
|
3199
3354
|
function isTextDeltaChunk(chunk) {
|
|
@@ -3259,19 +3414,21 @@ function supportsFlexProcessing2(modelId) {
|
|
|
3259
3414
|
function supportsPriorityProcessing2(modelId) {
|
|
3260
3415
|
return modelId.startsWith("gpt-4") || modelId.startsWith("gpt-5-mini") || modelId.startsWith("gpt-5") && !modelId.startsWith("gpt-5-nano") || modelId.startsWith("o3") || modelId.startsWith("o4-mini");
|
|
3261
3416
|
}
|
|
3262
|
-
var openaiResponsesProviderOptionsSchema =
|
|
3263
|
-
metadata:
|
|
3264
|
-
parallelToolCalls:
|
|
3265
|
-
previousResponseId:
|
|
3266
|
-
store:
|
|
3267
|
-
user:
|
|
3268
|
-
reasoningEffort:
|
|
3269
|
-
strictJsonSchema:
|
|
3270
|
-
instructions:
|
|
3271
|
-
reasoningSummary:
|
|
3272
|
-
serviceTier:
|
|
3273
|
-
include:
|
|
3274
|
-
textVerbosity:
|
|
3417
|
+
var openaiResponsesProviderOptionsSchema = import_v416.z.object({
|
|
3418
|
+
metadata: import_v416.z.any().nullish(),
|
|
3419
|
+
parallelToolCalls: import_v416.z.boolean().nullish(),
|
|
3420
|
+
previousResponseId: import_v416.z.string().nullish(),
|
|
3421
|
+
store: import_v416.z.boolean().nullish(),
|
|
3422
|
+
user: import_v416.z.string().nullish(),
|
|
3423
|
+
reasoningEffort: import_v416.z.string().nullish(),
|
|
3424
|
+
strictJsonSchema: import_v416.z.boolean().nullish(),
|
|
3425
|
+
instructions: import_v416.z.string().nullish(),
|
|
3426
|
+
reasoningSummary: import_v416.z.string().nullish(),
|
|
3427
|
+
serviceTier: import_v416.z.enum(["auto", "flex", "priority"]).nullish(),
|
|
3428
|
+
include: import_v416.z.array(import_v416.z.enum(["reasoning.encrypted_content", "file_search_call.results"])).nullish(),
|
|
3429
|
+
textVerbosity: import_v416.z.enum(["low", "medium", "high"]).nullish(),
|
|
3430
|
+
promptCacheKey: import_v416.z.string().nullish(),
|
|
3431
|
+
safetyIdentifier: import_v416.z.string().nullish()
|
|
3275
3432
|
});
|
|
3276
3433
|
// Annotate the CommonJS export names for ESM import in node:
|
|
3277
3434
|
0 && (module.exports = {
|