@ai-sdk/openai 3.0.0-beta.17 → 3.0.0-beta.18
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +10 -0
- package/dist/index.d.mts +38 -65
- package/dist/index.d.ts +38 -65
- package/dist/index.js +1339 -1033
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +1293 -942
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.d.mts +101 -183
- package/dist/internal/index.d.ts +101 -183
- package/dist/internal/index.js +1336 -1028
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +1305 -953
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +2 -2
package/dist/index.js
CHANGED
|
@@ -1,7 +1,9 @@
|
|
|
1
1
|
"use strict";
|
|
2
|
+
var __create = Object.create;
|
|
2
3
|
var __defProp = Object.defineProperty;
|
|
3
4
|
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
5
|
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
6
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
5
7
|
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
8
|
var __export = (target, all) => {
|
|
7
9
|
for (var name in all)
|
|
@@ -15,6 +17,14 @@ var __copyProps = (to, from, except, desc) => {
|
|
|
15
17
|
}
|
|
16
18
|
return to;
|
|
17
19
|
};
|
|
20
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
21
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
22
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
23
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
24
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
25
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
26
|
+
mod
|
|
27
|
+
));
|
|
18
28
|
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
19
29
|
|
|
20
30
|
// src/index.ts
|
|
@@ -27,25 +37,24 @@ __export(src_exports, {
|
|
|
27
37
|
module.exports = __toCommonJS(src_exports);
|
|
28
38
|
|
|
29
39
|
// src/openai-provider.ts
|
|
30
|
-
var
|
|
40
|
+
var import_provider_utils30 = require("@ai-sdk/provider-utils");
|
|
31
41
|
|
|
32
42
|
// src/chat/openai-chat-language-model.ts
|
|
33
43
|
var import_provider3 = require("@ai-sdk/provider");
|
|
34
|
-
var
|
|
35
|
-
var import_v43 = require("zod/v4");
|
|
44
|
+
var import_provider_utils5 = require("@ai-sdk/provider-utils");
|
|
36
45
|
|
|
37
46
|
// src/openai-error.ts
|
|
38
|
-
var
|
|
47
|
+
var z = __toESM(require("zod/v4"));
|
|
39
48
|
var import_provider_utils = require("@ai-sdk/provider-utils");
|
|
40
|
-
var openaiErrorDataSchema =
|
|
41
|
-
error:
|
|
42
|
-
message:
|
|
49
|
+
var openaiErrorDataSchema = z.object({
|
|
50
|
+
error: z.object({
|
|
51
|
+
message: z.string(),
|
|
43
52
|
// The additional information below is handled loosely to support
|
|
44
53
|
// OpenAI-compatible providers that have slightly different error
|
|
45
54
|
// responses:
|
|
46
|
-
type:
|
|
47
|
-
param:
|
|
48
|
-
code:
|
|
55
|
+
type: z.string().nullish(),
|
|
56
|
+
param: z.any().nullish(),
|
|
57
|
+
code: z.union([z.string(), z.number()]).nullish()
|
|
49
58
|
})
|
|
50
59
|
});
|
|
51
60
|
var openaiFailedResponseHandler = (0, import_provider_utils.createJsonErrorResponseHandler)({
|
|
@@ -265,95 +274,238 @@ function mapOpenAIFinishReason(finishReason) {
|
|
|
265
274
|
}
|
|
266
275
|
}
|
|
267
276
|
|
|
277
|
+
// src/chat/openai-chat-api.ts
|
|
278
|
+
var import_provider_utils3 = require("@ai-sdk/provider-utils");
|
|
279
|
+
var z2 = __toESM(require("zod/v4"));
|
|
280
|
+
var openaiChatResponseSchema = (0, import_provider_utils3.lazyValidator)(
|
|
281
|
+
() => (0, import_provider_utils3.zodSchema)(
|
|
282
|
+
z2.object({
|
|
283
|
+
id: z2.string().nullish(),
|
|
284
|
+
created: z2.number().nullish(),
|
|
285
|
+
model: z2.string().nullish(),
|
|
286
|
+
choices: z2.array(
|
|
287
|
+
z2.object({
|
|
288
|
+
message: z2.object({
|
|
289
|
+
role: z2.literal("assistant").nullish(),
|
|
290
|
+
content: z2.string().nullish(),
|
|
291
|
+
tool_calls: z2.array(
|
|
292
|
+
z2.object({
|
|
293
|
+
id: z2.string().nullish(),
|
|
294
|
+
type: z2.literal("function"),
|
|
295
|
+
function: z2.object({
|
|
296
|
+
name: z2.string(),
|
|
297
|
+
arguments: z2.string()
|
|
298
|
+
})
|
|
299
|
+
})
|
|
300
|
+
).nullish(),
|
|
301
|
+
annotations: z2.array(
|
|
302
|
+
z2.object({
|
|
303
|
+
type: z2.literal("url_citation"),
|
|
304
|
+
start_index: z2.number(),
|
|
305
|
+
end_index: z2.number(),
|
|
306
|
+
url: z2.string(),
|
|
307
|
+
title: z2.string()
|
|
308
|
+
})
|
|
309
|
+
).nullish()
|
|
310
|
+
}),
|
|
311
|
+
index: z2.number(),
|
|
312
|
+
logprobs: z2.object({
|
|
313
|
+
content: z2.array(
|
|
314
|
+
z2.object({
|
|
315
|
+
token: z2.string(),
|
|
316
|
+
logprob: z2.number(),
|
|
317
|
+
top_logprobs: z2.array(
|
|
318
|
+
z2.object({
|
|
319
|
+
token: z2.string(),
|
|
320
|
+
logprob: z2.number()
|
|
321
|
+
})
|
|
322
|
+
)
|
|
323
|
+
})
|
|
324
|
+
).nullish()
|
|
325
|
+
}).nullish(),
|
|
326
|
+
finish_reason: z2.string().nullish()
|
|
327
|
+
})
|
|
328
|
+
),
|
|
329
|
+
usage: z2.object({
|
|
330
|
+
prompt_tokens: z2.number().nullish(),
|
|
331
|
+
completion_tokens: z2.number().nullish(),
|
|
332
|
+
total_tokens: z2.number().nullish(),
|
|
333
|
+
prompt_tokens_details: z2.object({
|
|
334
|
+
cached_tokens: z2.number().nullish()
|
|
335
|
+
}).nullish(),
|
|
336
|
+
completion_tokens_details: z2.object({
|
|
337
|
+
reasoning_tokens: z2.number().nullish(),
|
|
338
|
+
accepted_prediction_tokens: z2.number().nullish(),
|
|
339
|
+
rejected_prediction_tokens: z2.number().nullish()
|
|
340
|
+
}).nullish()
|
|
341
|
+
}).nullish()
|
|
342
|
+
})
|
|
343
|
+
)
|
|
344
|
+
);
|
|
345
|
+
var openaiChatChunkSchema = (0, import_provider_utils3.lazyValidator)(
|
|
346
|
+
() => (0, import_provider_utils3.zodSchema)(
|
|
347
|
+
z2.union([
|
|
348
|
+
z2.object({
|
|
349
|
+
id: z2.string().nullish(),
|
|
350
|
+
created: z2.number().nullish(),
|
|
351
|
+
model: z2.string().nullish(),
|
|
352
|
+
choices: z2.array(
|
|
353
|
+
z2.object({
|
|
354
|
+
delta: z2.object({
|
|
355
|
+
role: z2.enum(["assistant"]).nullish(),
|
|
356
|
+
content: z2.string().nullish(),
|
|
357
|
+
tool_calls: z2.array(
|
|
358
|
+
z2.object({
|
|
359
|
+
index: z2.number(),
|
|
360
|
+
id: z2.string().nullish(),
|
|
361
|
+
type: z2.literal("function").nullish(),
|
|
362
|
+
function: z2.object({
|
|
363
|
+
name: z2.string().nullish(),
|
|
364
|
+
arguments: z2.string().nullish()
|
|
365
|
+
})
|
|
366
|
+
})
|
|
367
|
+
).nullish(),
|
|
368
|
+
annotations: z2.array(
|
|
369
|
+
z2.object({
|
|
370
|
+
type: z2.literal("url_citation"),
|
|
371
|
+
start_index: z2.number(),
|
|
372
|
+
end_index: z2.number(),
|
|
373
|
+
url: z2.string(),
|
|
374
|
+
title: z2.string()
|
|
375
|
+
})
|
|
376
|
+
).nullish()
|
|
377
|
+
}).nullish(),
|
|
378
|
+
logprobs: z2.object({
|
|
379
|
+
content: z2.array(
|
|
380
|
+
z2.object({
|
|
381
|
+
token: z2.string(),
|
|
382
|
+
logprob: z2.number(),
|
|
383
|
+
top_logprobs: z2.array(
|
|
384
|
+
z2.object({
|
|
385
|
+
token: z2.string(),
|
|
386
|
+
logprob: z2.number()
|
|
387
|
+
})
|
|
388
|
+
)
|
|
389
|
+
})
|
|
390
|
+
).nullish()
|
|
391
|
+
}).nullish(),
|
|
392
|
+
finish_reason: z2.string().nullish(),
|
|
393
|
+
index: z2.number()
|
|
394
|
+
})
|
|
395
|
+
),
|
|
396
|
+
usage: z2.object({
|
|
397
|
+
prompt_tokens: z2.number().nullish(),
|
|
398
|
+
completion_tokens: z2.number().nullish(),
|
|
399
|
+
total_tokens: z2.number().nullish(),
|
|
400
|
+
prompt_tokens_details: z2.object({
|
|
401
|
+
cached_tokens: z2.number().nullish()
|
|
402
|
+
}).nullish(),
|
|
403
|
+
completion_tokens_details: z2.object({
|
|
404
|
+
reasoning_tokens: z2.number().nullish(),
|
|
405
|
+
accepted_prediction_tokens: z2.number().nullish(),
|
|
406
|
+
rejected_prediction_tokens: z2.number().nullish()
|
|
407
|
+
}).nullish()
|
|
408
|
+
}).nullish()
|
|
409
|
+
}),
|
|
410
|
+
openaiErrorDataSchema
|
|
411
|
+
])
|
|
412
|
+
)
|
|
413
|
+
);
|
|
414
|
+
|
|
268
415
|
// src/chat/openai-chat-options.ts
|
|
269
|
-
var
|
|
270
|
-
var
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
|
|
347
|
-
|
|
348
|
-
|
|
349
|
-
|
|
350
|
-
|
|
351
|
-
|
|
352
|
-
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
|
|
416
|
+
var import_provider_utils4 = require("@ai-sdk/provider-utils");
|
|
417
|
+
var z3 = __toESM(require("zod/v4"));
|
|
418
|
+
var openaiChatLanguageModelOptions = (0, import_provider_utils4.lazyValidator)(
|
|
419
|
+
() => (0, import_provider_utils4.zodSchema)(
|
|
420
|
+
z3.object({
|
|
421
|
+
/**
|
|
422
|
+
* Modify the likelihood of specified tokens appearing in the completion.
|
|
423
|
+
*
|
|
424
|
+
* Accepts a JSON object that maps tokens (specified by their token ID in
|
|
425
|
+
* the GPT tokenizer) to an associated bias value from -100 to 100.
|
|
426
|
+
*/
|
|
427
|
+
logitBias: z3.record(z3.coerce.number(), z3.number()).optional(),
|
|
428
|
+
/**
|
|
429
|
+
* Return the log probabilities of the tokens.
|
|
430
|
+
*
|
|
431
|
+
* Setting to true will return the log probabilities of the tokens that
|
|
432
|
+
* were generated.
|
|
433
|
+
*
|
|
434
|
+
* Setting to a number will return the log probabilities of the top n
|
|
435
|
+
* tokens that were generated.
|
|
436
|
+
*/
|
|
437
|
+
logprobs: z3.union([z3.boolean(), z3.number()]).optional(),
|
|
438
|
+
/**
|
|
439
|
+
* Whether to enable parallel function calling during tool use. Default to true.
|
|
440
|
+
*/
|
|
441
|
+
parallelToolCalls: z3.boolean().optional(),
|
|
442
|
+
/**
|
|
443
|
+
* A unique identifier representing your end-user, which can help OpenAI to
|
|
444
|
+
* monitor and detect abuse.
|
|
445
|
+
*/
|
|
446
|
+
user: z3.string().optional(),
|
|
447
|
+
/**
|
|
448
|
+
* Reasoning effort for reasoning models. Defaults to `medium`.
|
|
449
|
+
*/
|
|
450
|
+
reasoningEffort: z3.enum(["minimal", "low", "medium", "high"]).optional(),
|
|
451
|
+
/**
|
|
452
|
+
* Maximum number of completion tokens to generate. Useful for reasoning models.
|
|
453
|
+
*/
|
|
454
|
+
maxCompletionTokens: z3.number().optional(),
|
|
455
|
+
/**
|
|
456
|
+
* Whether to enable persistence in responses API.
|
|
457
|
+
*/
|
|
458
|
+
store: z3.boolean().optional(),
|
|
459
|
+
/**
|
|
460
|
+
* Metadata to associate with the request.
|
|
461
|
+
*/
|
|
462
|
+
metadata: z3.record(z3.string().max(64), z3.string().max(512)).optional(),
|
|
463
|
+
/**
|
|
464
|
+
* Parameters for prediction mode.
|
|
465
|
+
*/
|
|
466
|
+
prediction: z3.record(z3.string(), z3.any()).optional(),
|
|
467
|
+
/**
|
|
468
|
+
* Whether to use structured outputs.
|
|
469
|
+
*
|
|
470
|
+
* @default true
|
|
471
|
+
*/
|
|
472
|
+
structuredOutputs: z3.boolean().optional(),
|
|
473
|
+
/**
|
|
474
|
+
* Service tier for the request.
|
|
475
|
+
* - 'auto': Default service tier
|
|
476
|
+
* - 'flex': 50% cheaper processing at the cost of increased latency. Only available for o3 and o4-mini models.
|
|
477
|
+
* - 'priority': Higher-speed processing with predictably low latency at premium cost. Available for Enterprise customers.
|
|
478
|
+
*
|
|
479
|
+
* @default 'auto'
|
|
480
|
+
*/
|
|
481
|
+
serviceTier: z3.enum(["auto", "flex", "priority"]).optional(),
|
|
482
|
+
/**
|
|
483
|
+
* Whether to use strict JSON schema validation.
|
|
484
|
+
*
|
|
485
|
+
* @default false
|
|
486
|
+
*/
|
|
487
|
+
strictJsonSchema: z3.boolean().optional(),
|
|
488
|
+
/**
|
|
489
|
+
* Controls the verbosity of the model's responses.
|
|
490
|
+
* Lower values will result in more concise responses, while higher values will result in more verbose responses.
|
|
491
|
+
*/
|
|
492
|
+
textVerbosity: z3.enum(["low", "medium", "high"]).optional(),
|
|
493
|
+
/**
|
|
494
|
+
* A cache key for prompt caching. Allows manual control over prompt caching behavior.
|
|
495
|
+
* Useful for improving cache hit rates and working around automatic caching issues.
|
|
496
|
+
*/
|
|
497
|
+
promptCacheKey: z3.string().optional(),
|
|
498
|
+
/**
|
|
499
|
+
* A stable identifier used to help detect users of your application
|
|
500
|
+
* that may be violating OpenAI's usage policies. The IDs should be a
|
|
501
|
+
* string that uniquely identifies each user. We recommend hashing their
|
|
502
|
+
* username or email address, in order to avoid sending us any identifying
|
|
503
|
+
* information.
|
|
504
|
+
*/
|
|
505
|
+
safetyIdentifier: z3.string().optional()
|
|
506
|
+
})
|
|
507
|
+
)
|
|
508
|
+
);
|
|
357
509
|
|
|
358
510
|
// src/chat/openai-chat-prepare-tools.ts
|
|
359
511
|
var import_provider2 = require("@ai-sdk/provider");
|
|
@@ -446,7 +598,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
446
598
|
}) {
|
|
447
599
|
var _a, _b, _c, _d;
|
|
448
600
|
const warnings = [];
|
|
449
|
-
const openaiOptions = (_a = await (0,
|
|
601
|
+
const openaiOptions = (_a = await (0, import_provider_utils5.parseProviderOptions)({
|
|
450
602
|
provider: "openai",
|
|
451
603
|
providerOptions,
|
|
452
604
|
schema: openaiChatLanguageModelOptions
|
|
@@ -625,15 +777,15 @@ var OpenAIChatLanguageModel = class {
|
|
|
625
777
|
responseHeaders,
|
|
626
778
|
value: response,
|
|
627
779
|
rawValue: rawResponse
|
|
628
|
-
} = await (0,
|
|
780
|
+
} = await (0, import_provider_utils5.postJsonToApi)({
|
|
629
781
|
url: this.config.url({
|
|
630
782
|
path: "/chat/completions",
|
|
631
783
|
modelId: this.modelId
|
|
632
784
|
}),
|
|
633
|
-
headers: (0,
|
|
785
|
+
headers: (0, import_provider_utils5.combineHeaders)(this.config.headers(), options.headers),
|
|
634
786
|
body,
|
|
635
787
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
636
|
-
successfulResponseHandler: (0,
|
|
788
|
+
successfulResponseHandler: (0, import_provider_utils5.createJsonResponseHandler)(
|
|
637
789
|
openaiChatResponseSchema
|
|
638
790
|
),
|
|
639
791
|
abortSignal: options.abortSignal,
|
|
@@ -648,7 +800,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
648
800
|
for (const toolCall of (_a = choice.message.tool_calls) != null ? _a : []) {
|
|
649
801
|
content.push({
|
|
650
802
|
type: "tool-call",
|
|
651
|
-
toolCallId: (_b = toolCall.id) != null ? _b : (0,
|
|
803
|
+
toolCallId: (_b = toolCall.id) != null ? _b : (0, import_provider_utils5.generateId)(),
|
|
652
804
|
toolName: toolCall.function.name,
|
|
653
805
|
input: toolCall.function.arguments
|
|
654
806
|
});
|
|
@@ -657,7 +809,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
657
809
|
content.push({
|
|
658
810
|
type: "source",
|
|
659
811
|
sourceType: "url",
|
|
660
|
-
id: (0,
|
|
812
|
+
id: (0, import_provider_utils5.generateId)(),
|
|
661
813
|
url: annotation.url,
|
|
662
814
|
title: annotation.title
|
|
663
815
|
});
|
|
@@ -703,15 +855,15 @@ var OpenAIChatLanguageModel = class {
|
|
|
703
855
|
include_usage: true
|
|
704
856
|
}
|
|
705
857
|
};
|
|
706
|
-
const { responseHeaders, value: response } = await (0,
|
|
858
|
+
const { responseHeaders, value: response } = await (0, import_provider_utils5.postJsonToApi)({
|
|
707
859
|
url: this.config.url({
|
|
708
860
|
path: "/chat/completions",
|
|
709
861
|
modelId: this.modelId
|
|
710
862
|
}),
|
|
711
|
-
headers: (0,
|
|
863
|
+
headers: (0, import_provider_utils5.combineHeaders)(this.config.headers(), options.headers),
|
|
712
864
|
body,
|
|
713
865
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
714
|
-
successfulResponseHandler: (0,
|
|
866
|
+
successfulResponseHandler: (0, import_provider_utils5.createEventSourceResponseHandler)(
|
|
715
867
|
openaiChatChunkSchema
|
|
716
868
|
),
|
|
717
869
|
abortSignal: options.abortSignal,
|
|
@@ -836,14 +988,14 @@ var OpenAIChatLanguageModel = class {
|
|
|
836
988
|
delta: toolCall2.function.arguments
|
|
837
989
|
});
|
|
838
990
|
}
|
|
839
|
-
if ((0,
|
|
991
|
+
if ((0, import_provider_utils5.isParsableJson)(toolCall2.function.arguments)) {
|
|
840
992
|
controller.enqueue({
|
|
841
993
|
type: "tool-input-end",
|
|
842
994
|
id: toolCall2.id
|
|
843
995
|
});
|
|
844
996
|
controller.enqueue({
|
|
845
997
|
type: "tool-call",
|
|
846
|
-
toolCallId: (_q = toolCall2.id) != null ? _q : (0,
|
|
998
|
+
toolCallId: (_q = toolCall2.id) != null ? _q : (0, import_provider_utils5.generateId)(),
|
|
847
999
|
toolName: toolCall2.function.name,
|
|
848
1000
|
input: toolCall2.function.arguments
|
|
849
1001
|
});
|
|
@@ -864,14 +1016,14 @@ var OpenAIChatLanguageModel = class {
|
|
|
864
1016
|
id: toolCall.id,
|
|
865
1017
|
delta: (_u = toolCallDelta.function.arguments) != null ? _u : ""
|
|
866
1018
|
});
|
|
867
|
-
if (((_v = toolCall.function) == null ? void 0 : _v.name) != null && ((_w = toolCall.function) == null ? void 0 : _w.arguments) != null && (0,
|
|
1019
|
+
if (((_v = toolCall.function) == null ? void 0 : _v.name) != null && ((_w = toolCall.function) == null ? void 0 : _w.arguments) != null && (0, import_provider_utils5.isParsableJson)(toolCall.function.arguments)) {
|
|
868
1020
|
controller.enqueue({
|
|
869
1021
|
type: "tool-input-end",
|
|
870
1022
|
id: toolCall.id
|
|
871
1023
|
});
|
|
872
1024
|
controller.enqueue({
|
|
873
1025
|
type: "tool-call",
|
|
874
|
-
toolCallId: (_x = toolCall.id) != null ? _x : (0,
|
|
1026
|
+
toolCallId: (_x = toolCall.id) != null ? _x : (0, import_provider_utils5.generateId)(),
|
|
875
1027
|
toolName: toolCall.function.name,
|
|
876
1028
|
input: toolCall.function.arguments
|
|
877
1029
|
});
|
|
@@ -884,7 +1036,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
884
1036
|
controller.enqueue({
|
|
885
1037
|
type: "source",
|
|
886
1038
|
sourceType: "url",
|
|
887
|
-
id: (0,
|
|
1039
|
+
id: (0, import_provider_utils5.generateId)(),
|
|
888
1040
|
url: annotation.url,
|
|
889
1041
|
title: annotation.title
|
|
890
1042
|
});
|
|
@@ -909,121 +1061,6 @@ var OpenAIChatLanguageModel = class {
|
|
|
909
1061
|
};
|
|
910
1062
|
}
|
|
911
1063
|
};
|
|
912
|
-
var openaiTokenUsageSchema = import_v43.z.object({
|
|
913
|
-
prompt_tokens: import_v43.z.number().nullish(),
|
|
914
|
-
completion_tokens: import_v43.z.number().nullish(),
|
|
915
|
-
total_tokens: import_v43.z.number().nullish(),
|
|
916
|
-
prompt_tokens_details: import_v43.z.object({
|
|
917
|
-
cached_tokens: import_v43.z.number().nullish()
|
|
918
|
-
}).nullish(),
|
|
919
|
-
completion_tokens_details: import_v43.z.object({
|
|
920
|
-
reasoning_tokens: import_v43.z.number().nullish(),
|
|
921
|
-
accepted_prediction_tokens: import_v43.z.number().nullish(),
|
|
922
|
-
rejected_prediction_tokens: import_v43.z.number().nullish()
|
|
923
|
-
}).nullish()
|
|
924
|
-
}).nullish();
|
|
925
|
-
var openaiChatResponseSchema = import_v43.z.object({
|
|
926
|
-
id: import_v43.z.string().nullish(),
|
|
927
|
-
created: import_v43.z.number().nullish(),
|
|
928
|
-
model: import_v43.z.string().nullish(),
|
|
929
|
-
choices: import_v43.z.array(
|
|
930
|
-
import_v43.z.object({
|
|
931
|
-
message: import_v43.z.object({
|
|
932
|
-
role: import_v43.z.literal("assistant").nullish(),
|
|
933
|
-
content: import_v43.z.string().nullish(),
|
|
934
|
-
tool_calls: import_v43.z.array(
|
|
935
|
-
import_v43.z.object({
|
|
936
|
-
id: import_v43.z.string().nullish(),
|
|
937
|
-
type: import_v43.z.literal("function"),
|
|
938
|
-
function: import_v43.z.object({
|
|
939
|
-
name: import_v43.z.string(),
|
|
940
|
-
arguments: import_v43.z.string()
|
|
941
|
-
})
|
|
942
|
-
})
|
|
943
|
-
).nullish(),
|
|
944
|
-
annotations: import_v43.z.array(
|
|
945
|
-
import_v43.z.object({
|
|
946
|
-
type: import_v43.z.literal("url_citation"),
|
|
947
|
-
start_index: import_v43.z.number(),
|
|
948
|
-
end_index: import_v43.z.number(),
|
|
949
|
-
url: import_v43.z.string(),
|
|
950
|
-
title: import_v43.z.string()
|
|
951
|
-
})
|
|
952
|
-
).nullish()
|
|
953
|
-
}),
|
|
954
|
-
index: import_v43.z.number(),
|
|
955
|
-
logprobs: import_v43.z.object({
|
|
956
|
-
content: import_v43.z.array(
|
|
957
|
-
import_v43.z.object({
|
|
958
|
-
token: import_v43.z.string(),
|
|
959
|
-
logprob: import_v43.z.number(),
|
|
960
|
-
top_logprobs: import_v43.z.array(
|
|
961
|
-
import_v43.z.object({
|
|
962
|
-
token: import_v43.z.string(),
|
|
963
|
-
logprob: import_v43.z.number()
|
|
964
|
-
})
|
|
965
|
-
)
|
|
966
|
-
})
|
|
967
|
-
).nullish()
|
|
968
|
-
}).nullish(),
|
|
969
|
-
finish_reason: import_v43.z.string().nullish()
|
|
970
|
-
})
|
|
971
|
-
),
|
|
972
|
-
usage: openaiTokenUsageSchema
|
|
973
|
-
});
|
|
974
|
-
var openaiChatChunkSchema = import_v43.z.union([
|
|
975
|
-
import_v43.z.object({
|
|
976
|
-
id: import_v43.z.string().nullish(),
|
|
977
|
-
created: import_v43.z.number().nullish(),
|
|
978
|
-
model: import_v43.z.string().nullish(),
|
|
979
|
-
choices: import_v43.z.array(
|
|
980
|
-
import_v43.z.object({
|
|
981
|
-
delta: import_v43.z.object({
|
|
982
|
-
role: import_v43.z.enum(["assistant"]).nullish(),
|
|
983
|
-
content: import_v43.z.string().nullish(),
|
|
984
|
-
tool_calls: import_v43.z.array(
|
|
985
|
-
import_v43.z.object({
|
|
986
|
-
index: import_v43.z.number(),
|
|
987
|
-
id: import_v43.z.string().nullish(),
|
|
988
|
-
type: import_v43.z.literal("function").nullish(),
|
|
989
|
-
function: import_v43.z.object({
|
|
990
|
-
name: import_v43.z.string().nullish(),
|
|
991
|
-
arguments: import_v43.z.string().nullish()
|
|
992
|
-
})
|
|
993
|
-
})
|
|
994
|
-
).nullish(),
|
|
995
|
-
annotations: import_v43.z.array(
|
|
996
|
-
import_v43.z.object({
|
|
997
|
-
type: import_v43.z.literal("url_citation"),
|
|
998
|
-
start_index: import_v43.z.number(),
|
|
999
|
-
end_index: import_v43.z.number(),
|
|
1000
|
-
url: import_v43.z.string(),
|
|
1001
|
-
title: import_v43.z.string()
|
|
1002
|
-
})
|
|
1003
|
-
).nullish()
|
|
1004
|
-
}).nullish(),
|
|
1005
|
-
logprobs: import_v43.z.object({
|
|
1006
|
-
content: import_v43.z.array(
|
|
1007
|
-
import_v43.z.object({
|
|
1008
|
-
token: import_v43.z.string(),
|
|
1009
|
-
logprob: import_v43.z.number(),
|
|
1010
|
-
top_logprobs: import_v43.z.array(
|
|
1011
|
-
import_v43.z.object({
|
|
1012
|
-
token: import_v43.z.string(),
|
|
1013
|
-
logprob: import_v43.z.number()
|
|
1014
|
-
})
|
|
1015
|
-
)
|
|
1016
|
-
})
|
|
1017
|
-
).nullish()
|
|
1018
|
-
}).nullish(),
|
|
1019
|
-
finish_reason: import_v43.z.string().nullish(),
|
|
1020
|
-
index: import_v43.z.number()
|
|
1021
|
-
})
|
|
1022
|
-
),
|
|
1023
|
-
usage: openaiTokenUsageSchema
|
|
1024
|
-
}),
|
|
1025
|
-
openaiErrorDataSchema
|
|
1026
|
-
]);
|
|
1027
1064
|
function isReasoningModel(modelId) {
|
|
1028
1065
|
return (modelId.startsWith("o") || modelId.startsWith("gpt-5")) && !modelId.startsWith("gpt-5-chat");
|
|
1029
1066
|
}
|
|
@@ -1074,8 +1111,7 @@ var reasoningModels = {
|
|
|
1074
1111
|
};
|
|
1075
1112
|
|
|
1076
1113
|
// src/completion/openai-completion-language-model.ts
|
|
1077
|
-
var
|
|
1078
|
-
var import_v45 = require("zod/v4");
|
|
1114
|
+
var import_provider_utils8 = require("@ai-sdk/provider-utils");
|
|
1079
1115
|
|
|
1080
1116
|
// src/completion/convert-to-openai-completion-prompt.ts
|
|
1081
1117
|
var import_provider4 = require("@ai-sdk/provider");
|
|
@@ -1182,48 +1218,111 @@ function mapOpenAIFinishReason2(finishReason) {
|
|
|
1182
1218
|
}
|
|
1183
1219
|
}
|
|
1184
1220
|
|
|
1221
|
+
// src/completion/openai-completion-api.ts
|
|
1222
|
+
var z4 = __toESM(require("zod/v4"));
|
|
1223
|
+
var import_provider_utils6 = require("@ai-sdk/provider-utils");
|
|
1224
|
+
var openaiCompletionResponseSchema = (0, import_provider_utils6.lazyValidator)(
|
|
1225
|
+
() => (0, import_provider_utils6.zodSchema)(
|
|
1226
|
+
z4.object({
|
|
1227
|
+
id: z4.string().nullish(),
|
|
1228
|
+
created: z4.number().nullish(),
|
|
1229
|
+
model: z4.string().nullish(),
|
|
1230
|
+
choices: z4.array(
|
|
1231
|
+
z4.object({
|
|
1232
|
+
text: z4.string(),
|
|
1233
|
+
finish_reason: z4.string(),
|
|
1234
|
+
logprobs: z4.object({
|
|
1235
|
+
tokens: z4.array(z4.string()),
|
|
1236
|
+
token_logprobs: z4.array(z4.number()),
|
|
1237
|
+
top_logprobs: z4.array(z4.record(z4.string(), z4.number())).nullish()
|
|
1238
|
+
}).nullish()
|
|
1239
|
+
})
|
|
1240
|
+
),
|
|
1241
|
+
usage: z4.object({
|
|
1242
|
+
prompt_tokens: z4.number(),
|
|
1243
|
+
completion_tokens: z4.number(),
|
|
1244
|
+
total_tokens: z4.number()
|
|
1245
|
+
}).nullish()
|
|
1246
|
+
})
|
|
1247
|
+
)
|
|
1248
|
+
);
|
|
1249
|
+
var openaiCompletionChunkSchema = (0, import_provider_utils6.lazyValidator)(
|
|
1250
|
+
() => (0, import_provider_utils6.zodSchema)(
|
|
1251
|
+
z4.union([
|
|
1252
|
+
z4.object({
|
|
1253
|
+
id: z4.string().nullish(),
|
|
1254
|
+
created: z4.number().nullish(),
|
|
1255
|
+
model: z4.string().nullish(),
|
|
1256
|
+
choices: z4.array(
|
|
1257
|
+
z4.object({
|
|
1258
|
+
text: z4.string(),
|
|
1259
|
+
finish_reason: z4.string().nullish(),
|
|
1260
|
+
index: z4.number(),
|
|
1261
|
+
logprobs: z4.object({
|
|
1262
|
+
tokens: z4.array(z4.string()),
|
|
1263
|
+
token_logprobs: z4.array(z4.number()),
|
|
1264
|
+
top_logprobs: z4.array(z4.record(z4.string(), z4.number())).nullish()
|
|
1265
|
+
}).nullish()
|
|
1266
|
+
})
|
|
1267
|
+
),
|
|
1268
|
+
usage: z4.object({
|
|
1269
|
+
prompt_tokens: z4.number(),
|
|
1270
|
+
completion_tokens: z4.number(),
|
|
1271
|
+
total_tokens: z4.number()
|
|
1272
|
+
}).nullish()
|
|
1273
|
+
}),
|
|
1274
|
+
openaiErrorDataSchema
|
|
1275
|
+
])
|
|
1276
|
+
)
|
|
1277
|
+
);
|
|
1278
|
+
|
|
1185
1279
|
// src/completion/openai-completion-options.ts
|
|
1186
|
-
var
|
|
1187
|
-
var
|
|
1188
|
-
|
|
1189
|
-
|
|
1190
|
-
|
|
1191
|
-
|
|
1192
|
-
|
|
1193
|
-
|
|
1194
|
-
|
|
1195
|
-
|
|
1196
|
-
|
|
1197
|
-
|
|
1198
|
-
|
|
1199
|
-
|
|
1200
|
-
|
|
1201
|
-
|
|
1202
|
-
|
|
1203
|
-
|
|
1204
|
-
|
|
1205
|
-
|
|
1206
|
-
|
|
1207
|
-
|
|
1208
|
-
|
|
1209
|
-
|
|
1210
|
-
|
|
1211
|
-
|
|
1212
|
-
|
|
1213
|
-
|
|
1214
|
-
|
|
1215
|
-
|
|
1216
|
-
|
|
1217
|
-
|
|
1218
|
-
|
|
1219
|
-
|
|
1220
|
-
|
|
1221
|
-
|
|
1222
|
-
|
|
1223
|
-
|
|
1224
|
-
|
|
1225
|
-
|
|
1226
|
-
|
|
1280
|
+
var import_provider_utils7 = require("@ai-sdk/provider-utils");
|
|
1281
|
+
var z5 = __toESM(require("zod/v4"));
|
|
1282
|
+
var openaiCompletionProviderOptions = (0, import_provider_utils7.lazyValidator)(
|
|
1283
|
+
() => (0, import_provider_utils7.zodSchema)(
|
|
1284
|
+
z5.object({
|
|
1285
|
+
/**
|
|
1286
|
+
Echo back the prompt in addition to the completion.
|
|
1287
|
+
*/
|
|
1288
|
+
echo: z5.boolean().optional(),
|
|
1289
|
+
/**
|
|
1290
|
+
Modify the likelihood of specified tokens appearing in the completion.
|
|
1291
|
+
|
|
1292
|
+
Accepts a JSON object that maps tokens (specified by their token ID in
|
|
1293
|
+
the GPT tokenizer) to an associated bias value from -100 to 100. You
|
|
1294
|
+
can use this tokenizer tool to convert text to token IDs. Mathematically,
|
|
1295
|
+
the bias is added to the logits generated by the model prior to sampling.
|
|
1296
|
+
The exact effect will vary per model, but values between -1 and 1 should
|
|
1297
|
+
decrease or increase likelihood of selection; values like -100 or 100
|
|
1298
|
+
should result in a ban or exclusive selection of the relevant token.
|
|
1299
|
+
|
|
1300
|
+
As an example, you can pass {"50256": -100} to prevent the <|endoftext|>
|
|
1301
|
+
token from being generated.
|
|
1302
|
+
*/
|
|
1303
|
+
logitBias: z5.record(z5.string(), z5.number()).optional(),
|
|
1304
|
+
/**
|
|
1305
|
+
The suffix that comes after a completion of inserted text.
|
|
1306
|
+
*/
|
|
1307
|
+
suffix: z5.string().optional(),
|
|
1308
|
+
/**
|
|
1309
|
+
A unique identifier representing your end-user, which can help OpenAI to
|
|
1310
|
+
monitor and detect abuse. Learn more.
|
|
1311
|
+
*/
|
|
1312
|
+
user: z5.string().optional(),
|
|
1313
|
+
/**
|
|
1314
|
+
Return the log probabilities of the tokens. Including logprobs will increase
|
|
1315
|
+
the response size and can slow down response times. However, it can
|
|
1316
|
+
be useful to better understand how the model is behaving.
|
|
1317
|
+
Setting to true will return the log probabilities of the tokens that
|
|
1318
|
+
were generated.
|
|
1319
|
+
Setting to a number will return the log probabilities of the top n
|
|
1320
|
+
tokens that were generated.
|
|
1321
|
+
*/
|
|
1322
|
+
logprobs: z5.union([z5.boolean(), z5.number()]).optional()
|
|
1323
|
+
})
|
|
1324
|
+
)
|
|
1325
|
+
);
|
|
1227
1326
|
|
|
1228
1327
|
// src/completion/openai-completion-language-model.ts
|
|
1229
1328
|
var OpenAICompletionLanguageModel = class {
|
|
@@ -1258,12 +1357,12 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1258
1357
|
}) {
|
|
1259
1358
|
const warnings = [];
|
|
1260
1359
|
const openaiOptions = {
|
|
1261
|
-
...await (0,
|
|
1360
|
+
...await (0, import_provider_utils8.parseProviderOptions)({
|
|
1262
1361
|
provider: "openai",
|
|
1263
1362
|
providerOptions,
|
|
1264
1363
|
schema: openaiCompletionProviderOptions
|
|
1265
1364
|
}),
|
|
1266
|
-
...await (0,
|
|
1365
|
+
...await (0, import_provider_utils8.parseProviderOptions)({
|
|
1267
1366
|
provider: this.providerOptionsName,
|
|
1268
1367
|
providerOptions,
|
|
1269
1368
|
schema: openaiCompletionProviderOptions
|
|
@@ -1319,15 +1418,15 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1319
1418
|
responseHeaders,
|
|
1320
1419
|
value: response,
|
|
1321
1420
|
rawValue: rawResponse
|
|
1322
|
-
} = await (0,
|
|
1421
|
+
} = await (0, import_provider_utils8.postJsonToApi)({
|
|
1323
1422
|
url: this.config.url({
|
|
1324
1423
|
path: "/completions",
|
|
1325
1424
|
modelId: this.modelId
|
|
1326
1425
|
}),
|
|
1327
|
-
headers: (0,
|
|
1426
|
+
headers: (0, import_provider_utils8.combineHeaders)(this.config.headers(), options.headers),
|
|
1328
1427
|
body: args,
|
|
1329
1428
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
1330
|
-
successfulResponseHandler: (0,
|
|
1429
|
+
successfulResponseHandler: (0, import_provider_utils8.createJsonResponseHandler)(
|
|
1331
1430
|
openaiCompletionResponseSchema
|
|
1332
1431
|
),
|
|
1333
1432
|
abortSignal: options.abortSignal,
|
|
@@ -1365,15 +1464,15 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1365
1464
|
include_usage: true
|
|
1366
1465
|
}
|
|
1367
1466
|
};
|
|
1368
|
-
const { responseHeaders, value: response } = await (0,
|
|
1467
|
+
const { responseHeaders, value: response } = await (0, import_provider_utils8.postJsonToApi)({
|
|
1369
1468
|
url: this.config.url({
|
|
1370
1469
|
path: "/completions",
|
|
1371
1470
|
modelId: this.modelId
|
|
1372
1471
|
}),
|
|
1373
|
-
headers: (0,
|
|
1472
|
+
headers: (0, import_provider_utils8.combineHeaders)(this.config.headers(), options.headers),
|
|
1374
1473
|
body,
|
|
1375
1474
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
1376
|
-
successfulResponseHandler: (0,
|
|
1475
|
+
successfulResponseHandler: (0, import_provider_utils8.createEventSourceResponseHandler)(
|
|
1377
1476
|
openaiCompletionChunkSchema
|
|
1378
1477
|
),
|
|
1379
1478
|
abortSignal: options.abortSignal,
|
|
@@ -1454,69 +1553,42 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1454
1553
|
};
|
|
1455
1554
|
}
|
|
1456
1555
|
};
|
|
1457
|
-
var usageSchema = import_v45.z.object({
|
|
1458
|
-
prompt_tokens: import_v45.z.number(),
|
|
1459
|
-
completion_tokens: import_v45.z.number(),
|
|
1460
|
-
total_tokens: import_v45.z.number()
|
|
1461
|
-
});
|
|
1462
|
-
var openaiCompletionResponseSchema = import_v45.z.object({
|
|
1463
|
-
id: import_v45.z.string().nullish(),
|
|
1464
|
-
created: import_v45.z.number().nullish(),
|
|
1465
|
-
model: import_v45.z.string().nullish(),
|
|
1466
|
-
choices: import_v45.z.array(
|
|
1467
|
-
import_v45.z.object({
|
|
1468
|
-
text: import_v45.z.string(),
|
|
1469
|
-
finish_reason: import_v45.z.string(),
|
|
1470
|
-
logprobs: import_v45.z.object({
|
|
1471
|
-
tokens: import_v45.z.array(import_v45.z.string()),
|
|
1472
|
-
token_logprobs: import_v45.z.array(import_v45.z.number()),
|
|
1473
|
-
top_logprobs: import_v45.z.array(import_v45.z.record(import_v45.z.string(), import_v45.z.number())).nullish()
|
|
1474
|
-
}).nullish()
|
|
1475
|
-
})
|
|
1476
|
-
),
|
|
1477
|
-
usage: usageSchema.nullish()
|
|
1478
|
-
});
|
|
1479
|
-
var openaiCompletionChunkSchema = import_v45.z.union([
|
|
1480
|
-
import_v45.z.object({
|
|
1481
|
-
id: import_v45.z.string().nullish(),
|
|
1482
|
-
created: import_v45.z.number().nullish(),
|
|
1483
|
-
model: import_v45.z.string().nullish(),
|
|
1484
|
-
choices: import_v45.z.array(
|
|
1485
|
-
import_v45.z.object({
|
|
1486
|
-
text: import_v45.z.string(),
|
|
1487
|
-
finish_reason: import_v45.z.string().nullish(),
|
|
1488
|
-
index: import_v45.z.number(),
|
|
1489
|
-
logprobs: import_v45.z.object({
|
|
1490
|
-
tokens: import_v45.z.array(import_v45.z.string()),
|
|
1491
|
-
token_logprobs: import_v45.z.array(import_v45.z.number()),
|
|
1492
|
-
top_logprobs: import_v45.z.array(import_v45.z.record(import_v45.z.string(), import_v45.z.number())).nullish()
|
|
1493
|
-
}).nullish()
|
|
1494
|
-
})
|
|
1495
|
-
),
|
|
1496
|
-
usage: usageSchema.nullish()
|
|
1497
|
-
}),
|
|
1498
|
-
openaiErrorDataSchema
|
|
1499
|
-
]);
|
|
1500
1556
|
|
|
1501
1557
|
// src/embedding/openai-embedding-model.ts
|
|
1502
1558
|
var import_provider5 = require("@ai-sdk/provider");
|
|
1503
|
-
var
|
|
1504
|
-
var import_v47 = require("zod/v4");
|
|
1559
|
+
var import_provider_utils11 = require("@ai-sdk/provider-utils");
|
|
1505
1560
|
|
|
1506
1561
|
// src/embedding/openai-embedding-options.ts
|
|
1507
|
-
var
|
|
1508
|
-
var
|
|
1509
|
-
|
|
1510
|
-
|
|
1511
|
-
|
|
1512
|
-
|
|
1513
|
-
|
|
1514
|
-
|
|
1515
|
-
|
|
1516
|
-
|
|
1517
|
-
|
|
1518
|
-
|
|
1519
|
-
|
|
1562
|
+
var import_provider_utils9 = require("@ai-sdk/provider-utils");
|
|
1563
|
+
var z6 = __toESM(require("zod/v4"));
|
|
1564
|
+
var openaiEmbeddingProviderOptions = (0, import_provider_utils9.lazyValidator)(
|
|
1565
|
+
() => (0, import_provider_utils9.zodSchema)(
|
|
1566
|
+
z6.object({
|
|
1567
|
+
/**
|
|
1568
|
+
The number of dimensions the resulting output embeddings should have.
|
|
1569
|
+
Only supported in text-embedding-3 and later models.
|
|
1570
|
+
*/
|
|
1571
|
+
dimensions: z6.number().optional(),
|
|
1572
|
+
/**
|
|
1573
|
+
A unique identifier representing your end-user, which can help OpenAI to
|
|
1574
|
+
monitor and detect abuse. Learn more.
|
|
1575
|
+
*/
|
|
1576
|
+
user: z6.string().optional()
|
|
1577
|
+
})
|
|
1578
|
+
)
|
|
1579
|
+
);
|
|
1580
|
+
|
|
1581
|
+
// src/embedding/openai-embedding-api.ts
|
|
1582
|
+
var import_provider_utils10 = require("@ai-sdk/provider-utils");
|
|
1583
|
+
var z7 = __toESM(require("zod/v4"));
|
|
1584
|
+
var openaiTextEmbeddingResponseSchema = (0, import_provider_utils10.lazyValidator)(
|
|
1585
|
+
() => (0, import_provider_utils10.zodSchema)(
|
|
1586
|
+
z7.object({
|
|
1587
|
+
data: z7.array(z7.object({ embedding: z7.array(z7.number()) })),
|
|
1588
|
+
usage: z7.object({ prompt_tokens: z7.number() }).nullish()
|
|
1589
|
+
})
|
|
1590
|
+
)
|
|
1591
|
+
);
|
|
1520
1592
|
|
|
1521
1593
|
// src/embedding/openai-embedding-model.ts
|
|
1522
1594
|
var OpenAIEmbeddingModel = class {
|
|
@@ -1545,7 +1617,7 @@ var OpenAIEmbeddingModel = class {
|
|
|
1545
1617
|
values
|
|
1546
1618
|
});
|
|
1547
1619
|
}
|
|
1548
|
-
const openaiOptions = (_a = await (0,
|
|
1620
|
+
const openaiOptions = (_a = await (0, import_provider_utils11.parseProviderOptions)({
|
|
1549
1621
|
provider: "openai",
|
|
1550
1622
|
providerOptions,
|
|
1551
1623
|
schema: openaiEmbeddingProviderOptions
|
|
@@ -1554,12 +1626,12 @@ var OpenAIEmbeddingModel = class {
|
|
|
1554
1626
|
responseHeaders,
|
|
1555
1627
|
value: response,
|
|
1556
1628
|
rawValue
|
|
1557
|
-
} = await (0,
|
|
1629
|
+
} = await (0, import_provider_utils11.postJsonToApi)({
|
|
1558
1630
|
url: this.config.url({
|
|
1559
1631
|
path: "/embeddings",
|
|
1560
1632
|
modelId: this.modelId
|
|
1561
1633
|
}),
|
|
1562
|
-
headers: (0,
|
|
1634
|
+
headers: (0, import_provider_utils11.combineHeaders)(this.config.headers(), headers),
|
|
1563
1635
|
body: {
|
|
1564
1636
|
model: this.modelId,
|
|
1565
1637
|
input: values,
|
|
@@ -1568,7 +1640,7 @@ var OpenAIEmbeddingModel = class {
|
|
|
1568
1640
|
user: openaiOptions.user
|
|
1569
1641
|
},
|
|
1570
1642
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
1571
|
-
successfulResponseHandler: (0,
|
|
1643
|
+
successfulResponseHandler: (0, import_provider_utils11.createJsonResponseHandler)(
|
|
1572
1644
|
openaiTextEmbeddingResponseSchema
|
|
1573
1645
|
),
|
|
1574
1646
|
abortSignal,
|
|
@@ -1581,14 +1653,25 @@ var OpenAIEmbeddingModel = class {
|
|
|
1581
1653
|
};
|
|
1582
1654
|
}
|
|
1583
1655
|
};
|
|
1584
|
-
var openaiTextEmbeddingResponseSchema = import_v47.z.object({
|
|
1585
|
-
data: import_v47.z.array(import_v47.z.object({ embedding: import_v47.z.array(import_v47.z.number()) })),
|
|
1586
|
-
usage: import_v47.z.object({ prompt_tokens: import_v47.z.number() }).nullish()
|
|
1587
|
-
});
|
|
1588
1656
|
|
|
1589
1657
|
// src/image/openai-image-model.ts
|
|
1590
|
-
var
|
|
1591
|
-
|
|
1658
|
+
var import_provider_utils13 = require("@ai-sdk/provider-utils");
|
|
1659
|
+
|
|
1660
|
+
// src/image/openai-image-api.ts
|
|
1661
|
+
var import_provider_utils12 = require("@ai-sdk/provider-utils");
|
|
1662
|
+
var z8 = __toESM(require("zod/v4"));
|
|
1663
|
+
var openaiImageResponseSchema = (0, import_provider_utils12.lazyValidator)(
|
|
1664
|
+
() => (0, import_provider_utils12.zodSchema)(
|
|
1665
|
+
z8.object({
|
|
1666
|
+
data: z8.array(
|
|
1667
|
+
z8.object({
|
|
1668
|
+
b64_json: z8.string(),
|
|
1669
|
+
revised_prompt: z8.string().optional()
|
|
1670
|
+
})
|
|
1671
|
+
)
|
|
1672
|
+
})
|
|
1673
|
+
)
|
|
1674
|
+
);
|
|
1592
1675
|
|
|
1593
1676
|
// src/image/openai-image-options.ts
|
|
1594
1677
|
var modelMaxImagesPerCall = {
|
|
@@ -1639,12 +1722,12 @@ var OpenAIImageModel = class {
|
|
|
1639
1722
|
warnings.push({ type: "unsupported-setting", setting: "seed" });
|
|
1640
1723
|
}
|
|
1641
1724
|
const currentDate = (_c = (_b = (_a = this.config._internal) == null ? void 0 : _a.currentDate) == null ? void 0 : _b.call(_a)) != null ? _c : /* @__PURE__ */ new Date();
|
|
1642
|
-
const { value: response, responseHeaders } = await (0,
|
|
1725
|
+
const { value: response, responseHeaders } = await (0, import_provider_utils13.postJsonToApi)({
|
|
1643
1726
|
url: this.config.url({
|
|
1644
1727
|
path: "/images/generations",
|
|
1645
1728
|
modelId: this.modelId
|
|
1646
1729
|
}),
|
|
1647
|
-
headers: (0,
|
|
1730
|
+
headers: (0, import_provider_utils13.combineHeaders)(this.config.headers(), headers),
|
|
1648
1731
|
body: {
|
|
1649
1732
|
model: this.modelId,
|
|
1650
1733
|
prompt,
|
|
@@ -1654,7 +1737,7 @@ var OpenAIImageModel = class {
|
|
|
1654
1737
|
...!hasDefaultResponseFormat.has(this.modelId) ? { response_format: "b64_json" } : {}
|
|
1655
1738
|
},
|
|
1656
1739
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
1657
|
-
successfulResponseHandler: (0,
|
|
1740
|
+
successfulResponseHandler: (0, import_provider_utils13.createJsonResponseHandler)(
|
|
1658
1741
|
openaiImageResponseSchema
|
|
1659
1742
|
),
|
|
1660
1743
|
abortSignal,
|
|
@@ -1680,36 +1763,43 @@ var OpenAIImageModel = class {
|
|
|
1680
1763
|
};
|
|
1681
1764
|
}
|
|
1682
1765
|
};
|
|
1683
|
-
var openaiImageResponseSchema = import_v48.z.object({
|
|
1684
|
-
data: import_v48.z.array(
|
|
1685
|
-
import_v48.z.object({ b64_json: import_v48.z.string(), revised_prompt: import_v48.z.string().optional() })
|
|
1686
|
-
)
|
|
1687
|
-
});
|
|
1688
1766
|
|
|
1689
1767
|
// src/tool/code-interpreter.ts
|
|
1690
|
-
var
|
|
1691
|
-
var
|
|
1692
|
-
var codeInterpreterInputSchema =
|
|
1693
|
-
|
|
1694
|
-
|
|
1695
|
-
|
|
1696
|
-
|
|
1697
|
-
outputs: import_v49.z.array(
|
|
1698
|
-
import_v49.z.discriminatedUnion("type", [
|
|
1699
|
-
import_v49.z.object({ type: import_v49.z.literal("logs"), logs: import_v49.z.string() }),
|
|
1700
|
-
import_v49.z.object({ type: import_v49.z.literal("image"), url: import_v49.z.string() })
|
|
1701
|
-
])
|
|
1702
|
-
).nullish()
|
|
1703
|
-
});
|
|
1704
|
-
var codeInterpreterArgsSchema = import_v49.z.object({
|
|
1705
|
-
container: import_v49.z.union([
|
|
1706
|
-
import_v49.z.string(),
|
|
1707
|
-
import_v49.z.object({
|
|
1708
|
-
fileIds: import_v49.z.array(import_v49.z.string()).optional()
|
|
1768
|
+
var import_provider_utils14 = require("@ai-sdk/provider-utils");
|
|
1769
|
+
var z9 = __toESM(require("zod/v4"));
|
|
1770
|
+
var codeInterpreterInputSchema = (0, import_provider_utils14.lazySchema)(
|
|
1771
|
+
() => (0, import_provider_utils14.zodSchema)(
|
|
1772
|
+
z9.object({
|
|
1773
|
+
code: z9.string().nullish(),
|
|
1774
|
+
containerId: z9.string()
|
|
1709
1775
|
})
|
|
1710
|
-
|
|
1711
|
-
|
|
1712
|
-
var
|
|
1776
|
+
)
|
|
1777
|
+
);
|
|
1778
|
+
var codeInterpreterOutputSchema = (0, import_provider_utils14.lazySchema)(
|
|
1779
|
+
() => (0, import_provider_utils14.zodSchema)(
|
|
1780
|
+
z9.object({
|
|
1781
|
+
outputs: z9.array(
|
|
1782
|
+
z9.discriminatedUnion("type", [
|
|
1783
|
+
z9.object({ type: z9.literal("logs"), logs: z9.string() }),
|
|
1784
|
+
z9.object({ type: z9.literal("image"), url: z9.string() })
|
|
1785
|
+
])
|
|
1786
|
+
).nullish()
|
|
1787
|
+
})
|
|
1788
|
+
)
|
|
1789
|
+
);
|
|
1790
|
+
var codeInterpreterArgsSchema = (0, import_provider_utils14.lazySchema)(
|
|
1791
|
+
() => (0, import_provider_utils14.zodSchema)(
|
|
1792
|
+
z9.object({
|
|
1793
|
+
container: z9.union([
|
|
1794
|
+
z9.string(),
|
|
1795
|
+
z9.object({
|
|
1796
|
+
fileIds: z9.array(z9.string()).optional()
|
|
1797
|
+
})
|
|
1798
|
+
]).optional()
|
|
1799
|
+
})
|
|
1800
|
+
)
|
|
1801
|
+
);
|
|
1802
|
+
var codeInterpreterToolFactory = (0, import_provider_utils14.createProviderDefinedToolFactoryWithOutputSchema)({
|
|
1713
1803
|
id: "openai.code_interpreter",
|
|
1714
1804
|
name: "code_interpreter",
|
|
1715
1805
|
inputSchema: codeInterpreterInputSchema,
|
|
@@ -1720,72 +1810,85 @@ var codeInterpreter = (args = {}) => {
|
|
|
1720
1810
|
};
|
|
1721
1811
|
|
|
1722
1812
|
// src/tool/file-search.ts
|
|
1723
|
-
var
|
|
1724
|
-
var
|
|
1725
|
-
var comparisonFilterSchema =
|
|
1726
|
-
key:
|
|
1727
|
-
type:
|
|
1728
|
-
value:
|
|
1813
|
+
var import_provider_utils15 = require("@ai-sdk/provider-utils");
|
|
1814
|
+
var z10 = __toESM(require("zod/v4"));
|
|
1815
|
+
var comparisonFilterSchema = z10.object({
|
|
1816
|
+
key: z10.string(),
|
|
1817
|
+
type: z10.enum(["eq", "ne", "gt", "gte", "lt", "lte"]),
|
|
1818
|
+
value: z10.union([z10.string(), z10.number(), z10.boolean()])
|
|
1729
1819
|
});
|
|
1730
|
-
var compoundFilterSchema =
|
|
1731
|
-
type:
|
|
1732
|
-
filters:
|
|
1733
|
-
|
|
1820
|
+
var compoundFilterSchema = z10.object({
|
|
1821
|
+
type: z10.enum(["and", "or"]),
|
|
1822
|
+
filters: z10.array(
|
|
1823
|
+
z10.union([comparisonFilterSchema, z10.lazy(() => compoundFilterSchema)])
|
|
1734
1824
|
)
|
|
1735
1825
|
});
|
|
1736
|
-
var fileSearchArgsSchema =
|
|
1737
|
-
|
|
1738
|
-
|
|
1739
|
-
|
|
1740
|
-
|
|
1741
|
-
|
|
1742
|
-
|
|
1743
|
-
|
|
1744
|
-
})
|
|
1745
|
-
|
|
1746
|
-
queries: import_v410.z.array(import_v410.z.string()),
|
|
1747
|
-
results: import_v410.z.array(
|
|
1748
|
-
import_v410.z.object({
|
|
1749
|
-
attributes: import_v410.z.record(import_v410.z.string(), import_v410.z.unknown()),
|
|
1750
|
-
fileId: import_v410.z.string(),
|
|
1751
|
-
filename: import_v410.z.string(),
|
|
1752
|
-
score: import_v410.z.number(),
|
|
1753
|
-
text: import_v410.z.string()
|
|
1826
|
+
var fileSearchArgsSchema = (0, import_provider_utils15.lazySchema)(
|
|
1827
|
+
() => (0, import_provider_utils15.zodSchema)(
|
|
1828
|
+
z10.object({
|
|
1829
|
+
vectorStoreIds: z10.array(z10.string()),
|
|
1830
|
+
maxNumResults: z10.number().optional(),
|
|
1831
|
+
ranking: z10.object({
|
|
1832
|
+
ranker: z10.string().optional(),
|
|
1833
|
+
scoreThreshold: z10.number().optional()
|
|
1834
|
+
}).optional(),
|
|
1835
|
+
filters: z10.union([comparisonFilterSchema, compoundFilterSchema]).optional()
|
|
1754
1836
|
})
|
|
1755
|
-
)
|
|
1756
|
-
|
|
1757
|
-
var
|
|
1837
|
+
)
|
|
1838
|
+
);
|
|
1839
|
+
var fileSearchOutputSchema = (0, import_provider_utils15.lazySchema)(
|
|
1840
|
+
() => (0, import_provider_utils15.zodSchema)(
|
|
1841
|
+
z10.object({
|
|
1842
|
+
queries: z10.array(z10.string()),
|
|
1843
|
+
results: z10.array(
|
|
1844
|
+
z10.object({
|
|
1845
|
+
attributes: z10.record(z10.string(), z10.unknown()),
|
|
1846
|
+
fileId: z10.string(),
|
|
1847
|
+
filename: z10.string(),
|
|
1848
|
+
score: z10.number(),
|
|
1849
|
+
text: z10.string()
|
|
1850
|
+
})
|
|
1851
|
+
).nullable()
|
|
1852
|
+
})
|
|
1853
|
+
)
|
|
1854
|
+
);
|
|
1855
|
+
var fileSearch = (0, import_provider_utils15.createProviderDefinedToolFactoryWithOutputSchema)({
|
|
1758
1856
|
id: "openai.file_search",
|
|
1759
1857
|
name: "file_search",
|
|
1760
|
-
inputSchema:
|
|
1858
|
+
inputSchema: z10.object({}),
|
|
1761
1859
|
outputSchema: fileSearchOutputSchema
|
|
1762
1860
|
});
|
|
1763
1861
|
|
|
1764
1862
|
// src/tool/image-generation.ts
|
|
1765
|
-
var
|
|
1766
|
-
var
|
|
1767
|
-
var imageGenerationArgsSchema =
|
|
1768
|
-
|
|
1769
|
-
|
|
1770
|
-
|
|
1771
|
-
|
|
1772
|
-
|
|
1773
|
-
|
|
1774
|
-
|
|
1775
|
-
|
|
1776
|
-
|
|
1777
|
-
|
|
1778
|
-
|
|
1779
|
-
|
|
1780
|
-
|
|
1781
|
-
|
|
1782
|
-
|
|
1783
|
-
|
|
1784
|
-
|
|
1785
|
-
|
|
1863
|
+
var import_provider_utils16 = require("@ai-sdk/provider-utils");
|
|
1864
|
+
var z11 = __toESM(require("zod/v4"));
|
|
1865
|
+
var imageGenerationArgsSchema = (0, import_provider_utils16.lazySchema)(
|
|
1866
|
+
() => (0, import_provider_utils16.zodSchema)(
|
|
1867
|
+
z11.object({
|
|
1868
|
+
background: z11.enum(["auto", "opaque", "transparent"]).optional(),
|
|
1869
|
+
inputFidelity: z11.enum(["low", "high"]).optional(),
|
|
1870
|
+
inputImageMask: z11.object({
|
|
1871
|
+
fileId: z11.string().optional(),
|
|
1872
|
+
imageUrl: z11.string().optional()
|
|
1873
|
+
}).optional(),
|
|
1874
|
+
model: z11.string().optional(),
|
|
1875
|
+
moderation: z11.enum(["auto"]).optional(),
|
|
1876
|
+
outputCompression: z11.number().int().min(0).max(100).optional(),
|
|
1877
|
+
outputFormat: z11.enum(["png", "jpeg", "webp"]).optional(),
|
|
1878
|
+
partialImages: z11.number().int().min(0).max(3).optional(),
|
|
1879
|
+
quality: z11.enum(["auto", "low", "medium", "high"]).optional(),
|
|
1880
|
+
size: z11.enum(["1024x1024", "1024x1536", "1536x1024", "auto"]).optional()
|
|
1881
|
+
}).strict()
|
|
1882
|
+
)
|
|
1883
|
+
);
|
|
1884
|
+
var imageGenerationInputSchema = (0, import_provider_utils16.lazySchema)(() => (0, import_provider_utils16.zodSchema)(z11.object({})));
|
|
1885
|
+
var imageGenerationOutputSchema = (0, import_provider_utils16.lazySchema)(
|
|
1886
|
+
() => (0, import_provider_utils16.zodSchema)(z11.object({ result: z11.string() }))
|
|
1887
|
+
);
|
|
1888
|
+
var imageGenerationToolFactory = (0, import_provider_utils16.createProviderDefinedToolFactoryWithOutputSchema)({
|
|
1786
1889
|
id: "openai.image_generation",
|
|
1787
1890
|
name: "image_generation",
|
|
1788
|
-
inputSchema:
|
|
1891
|
+
inputSchema: imageGenerationInputSchema,
|
|
1789
1892
|
outputSchema: imageGenerationOutputSchema
|
|
1790
1893
|
});
|
|
1791
1894
|
var imageGeneration = (args = {}) => {
|
|
@@ -1793,22 +1896,26 @@ var imageGeneration = (args = {}) => {
|
|
|
1793
1896
|
};
|
|
1794
1897
|
|
|
1795
1898
|
// src/tool/local-shell.ts
|
|
1796
|
-
var
|
|
1797
|
-
var
|
|
1798
|
-
var localShellInputSchema =
|
|
1799
|
-
|
|
1800
|
-
|
|
1801
|
-
|
|
1802
|
-
|
|
1803
|
-
|
|
1804
|
-
|
|
1805
|
-
|
|
1806
|
-
|
|
1807
|
-
|
|
1808
|
-
|
|
1809
|
-
|
|
1810
|
-
|
|
1811
|
-
|
|
1899
|
+
var import_provider_utils17 = require("@ai-sdk/provider-utils");
|
|
1900
|
+
var z12 = __toESM(require("zod/v4"));
|
|
1901
|
+
var localShellInputSchema = (0, import_provider_utils17.lazySchema)(
|
|
1902
|
+
() => (0, import_provider_utils17.zodSchema)(
|
|
1903
|
+
z12.object({
|
|
1904
|
+
action: z12.object({
|
|
1905
|
+
type: z12.literal("exec"),
|
|
1906
|
+
command: z12.array(z12.string()),
|
|
1907
|
+
timeoutMs: z12.number().optional(),
|
|
1908
|
+
user: z12.string().optional(),
|
|
1909
|
+
workingDirectory: z12.string().optional(),
|
|
1910
|
+
env: z12.record(z12.string(), z12.string()).optional()
|
|
1911
|
+
})
|
|
1912
|
+
})
|
|
1913
|
+
)
|
|
1914
|
+
);
|
|
1915
|
+
var localShellOutputSchema = (0, import_provider_utils17.lazySchema)(
|
|
1916
|
+
() => (0, import_provider_utils17.zodSchema)(z12.object({ output: z12.string() }))
|
|
1917
|
+
);
|
|
1918
|
+
var localShell = (0, import_provider_utils17.createProviderDefinedToolFactoryWithOutputSchema)({
|
|
1812
1919
|
id: "openai.local_shell",
|
|
1813
1920
|
name: "local_shell",
|
|
1814
1921
|
inputSchema: localShellInputSchema,
|
|
@@ -1816,103 +1923,121 @@ var localShell = (0, import_provider_utils10.createProviderDefinedToolFactoryWit
|
|
|
1816
1923
|
});
|
|
1817
1924
|
|
|
1818
1925
|
// src/tool/web-search.ts
|
|
1819
|
-
var
|
|
1820
|
-
var
|
|
1821
|
-
var webSearchArgsSchema =
|
|
1822
|
-
|
|
1823
|
-
|
|
1824
|
-
|
|
1825
|
-
|
|
1826
|
-
|
|
1827
|
-
|
|
1828
|
-
|
|
1829
|
-
|
|
1830
|
-
|
|
1831
|
-
|
|
1832
|
-
|
|
1833
|
-
|
|
1834
|
-
|
|
1926
|
+
var import_provider_utils18 = require("@ai-sdk/provider-utils");
|
|
1927
|
+
var z13 = __toESM(require("zod/v4"));
|
|
1928
|
+
var webSearchArgsSchema = (0, import_provider_utils18.lazySchema)(
|
|
1929
|
+
() => (0, import_provider_utils18.zodSchema)(
|
|
1930
|
+
z13.object({
|
|
1931
|
+
filters: z13.object({
|
|
1932
|
+
allowedDomains: z13.array(z13.string()).optional()
|
|
1933
|
+
}).optional(),
|
|
1934
|
+
searchContextSize: z13.enum(["low", "medium", "high"]).optional(),
|
|
1935
|
+
userLocation: z13.object({
|
|
1936
|
+
type: z13.literal("approximate"),
|
|
1937
|
+
country: z13.string().optional(),
|
|
1938
|
+
city: z13.string().optional(),
|
|
1939
|
+
region: z13.string().optional(),
|
|
1940
|
+
timezone: z13.string().optional()
|
|
1941
|
+
}).optional()
|
|
1942
|
+
})
|
|
1943
|
+
)
|
|
1944
|
+
);
|
|
1945
|
+
var webSearchInputSchema = (0, import_provider_utils18.lazySchema)(
|
|
1946
|
+
() => (0, import_provider_utils18.zodSchema)(
|
|
1947
|
+
z13.object({
|
|
1948
|
+
action: z13.discriminatedUnion("type", [
|
|
1949
|
+
z13.object({
|
|
1950
|
+
type: z13.literal("search"),
|
|
1951
|
+
query: z13.string().nullish()
|
|
1952
|
+
}),
|
|
1953
|
+
z13.object({
|
|
1954
|
+
type: z13.literal("open_page"),
|
|
1955
|
+
url: z13.string()
|
|
1956
|
+
}),
|
|
1957
|
+
z13.object({
|
|
1958
|
+
type: z13.literal("find"),
|
|
1959
|
+
url: z13.string(),
|
|
1960
|
+
pattern: z13.string()
|
|
1961
|
+
})
|
|
1962
|
+
]).nullish()
|
|
1963
|
+
})
|
|
1964
|
+
)
|
|
1965
|
+
);
|
|
1966
|
+
var webSearchToolFactory = (0, import_provider_utils18.createProviderDefinedToolFactory)({
|
|
1835
1967
|
id: "openai.web_search",
|
|
1836
1968
|
name: "web_search",
|
|
1837
|
-
inputSchema:
|
|
1838
|
-
action: import_v413.z.discriminatedUnion("type", [
|
|
1839
|
-
import_v413.z.object({
|
|
1840
|
-
type: import_v413.z.literal("search"),
|
|
1841
|
-
query: import_v413.z.string().nullish()
|
|
1842
|
-
}),
|
|
1843
|
-
import_v413.z.object({
|
|
1844
|
-
type: import_v413.z.literal("open_page"),
|
|
1845
|
-
url: import_v413.z.string()
|
|
1846
|
-
}),
|
|
1847
|
-
import_v413.z.object({
|
|
1848
|
-
type: import_v413.z.literal("find"),
|
|
1849
|
-
url: import_v413.z.string(),
|
|
1850
|
-
pattern: import_v413.z.string()
|
|
1851
|
-
})
|
|
1852
|
-
]).nullish()
|
|
1853
|
-
})
|
|
1969
|
+
inputSchema: webSearchInputSchema
|
|
1854
1970
|
});
|
|
1855
1971
|
var webSearch = (args = {}) => {
|
|
1856
1972
|
return webSearchToolFactory(args);
|
|
1857
1973
|
};
|
|
1858
1974
|
|
|
1859
1975
|
// src/tool/web-search-preview.ts
|
|
1860
|
-
var
|
|
1861
|
-
var
|
|
1862
|
-
var webSearchPreviewArgsSchema =
|
|
1863
|
-
|
|
1864
|
-
|
|
1865
|
-
|
|
1866
|
-
|
|
1867
|
-
|
|
1868
|
-
|
|
1869
|
-
|
|
1870
|
-
|
|
1871
|
-
|
|
1872
|
-
|
|
1873
|
-
|
|
1874
|
-
|
|
1875
|
-
|
|
1876
|
-
|
|
1877
|
-
|
|
1878
|
-
|
|
1879
|
-
|
|
1880
|
-
|
|
1881
|
-
|
|
1882
|
-
|
|
1883
|
-
|
|
1884
|
-
|
|
1885
|
-
|
|
1886
|
-
|
|
1887
|
-
|
|
1888
|
-
|
|
1889
|
-
|
|
1890
|
-
|
|
1891
|
-
|
|
1892
|
-
|
|
1893
|
-
|
|
1894
|
-
|
|
1895
|
-
|
|
1896
|
-
|
|
1976
|
+
var import_provider_utils19 = require("@ai-sdk/provider-utils");
|
|
1977
|
+
var z14 = __toESM(require("zod/v4"));
|
|
1978
|
+
var webSearchPreviewArgsSchema = (0, import_provider_utils19.lazySchema)(
|
|
1979
|
+
() => (0, import_provider_utils19.zodSchema)(
|
|
1980
|
+
z14.object({
|
|
1981
|
+
/**
|
|
1982
|
+
* Search context size to use for the web search.
|
|
1983
|
+
* - high: Most comprehensive context, highest cost, slower response
|
|
1984
|
+
* - medium: Balanced context, cost, and latency (default)
|
|
1985
|
+
* - low: Least context, lowest cost, fastest response
|
|
1986
|
+
*/
|
|
1987
|
+
searchContextSize: z14.enum(["low", "medium", "high"]).optional(),
|
|
1988
|
+
/**
|
|
1989
|
+
* User location information to provide geographically relevant search results.
|
|
1990
|
+
*/
|
|
1991
|
+
userLocation: z14.object({
|
|
1992
|
+
/**
|
|
1993
|
+
* Type of location (always 'approximate')
|
|
1994
|
+
*/
|
|
1995
|
+
type: z14.literal("approximate"),
|
|
1996
|
+
/**
|
|
1997
|
+
* Two-letter ISO country code (e.g., 'US', 'GB')
|
|
1998
|
+
*/
|
|
1999
|
+
country: z14.string().optional(),
|
|
2000
|
+
/**
|
|
2001
|
+
* City name (free text, e.g., 'Minneapolis')
|
|
2002
|
+
*/
|
|
2003
|
+
city: z14.string().optional(),
|
|
2004
|
+
/**
|
|
2005
|
+
* Region name (free text, e.g., 'Minnesota')
|
|
2006
|
+
*/
|
|
2007
|
+
region: z14.string().optional(),
|
|
2008
|
+
/**
|
|
2009
|
+
* IANA timezone (e.g., 'America/Chicago')
|
|
2010
|
+
*/
|
|
2011
|
+
timezone: z14.string().optional()
|
|
2012
|
+
}).optional()
|
|
2013
|
+
})
|
|
2014
|
+
)
|
|
2015
|
+
);
|
|
2016
|
+
var webSearchPreviewInputSchema = (0, import_provider_utils19.lazySchema)(
|
|
2017
|
+
() => (0, import_provider_utils19.zodSchema)(
|
|
2018
|
+
z14.object({
|
|
2019
|
+
action: z14.discriminatedUnion("type", [
|
|
2020
|
+
z14.object({
|
|
2021
|
+
type: z14.literal("search"),
|
|
2022
|
+
query: z14.string().nullish()
|
|
2023
|
+
}),
|
|
2024
|
+
z14.object({
|
|
2025
|
+
type: z14.literal("open_page"),
|
|
2026
|
+
url: z14.string()
|
|
2027
|
+
}),
|
|
2028
|
+
z14.object({
|
|
2029
|
+
type: z14.literal("find"),
|
|
2030
|
+
url: z14.string(),
|
|
2031
|
+
pattern: z14.string()
|
|
2032
|
+
})
|
|
2033
|
+
]).nullish()
|
|
2034
|
+
})
|
|
2035
|
+
)
|
|
2036
|
+
);
|
|
2037
|
+
var webSearchPreview = (0, import_provider_utils19.createProviderDefinedToolFactory)({
|
|
1897
2038
|
id: "openai.web_search_preview",
|
|
1898
2039
|
name: "web_search_preview",
|
|
1899
|
-
inputSchema:
|
|
1900
|
-
action: import_v414.z.discriminatedUnion("type", [
|
|
1901
|
-
import_v414.z.object({
|
|
1902
|
-
type: import_v414.z.literal("search"),
|
|
1903
|
-
query: import_v414.z.string().nullish()
|
|
1904
|
-
}),
|
|
1905
|
-
import_v414.z.object({
|
|
1906
|
-
type: import_v414.z.literal("open_page"),
|
|
1907
|
-
url: import_v414.z.string()
|
|
1908
|
-
}),
|
|
1909
|
-
import_v414.z.object({
|
|
1910
|
-
type: import_v414.z.literal("find"),
|
|
1911
|
-
url: import_v414.z.string(),
|
|
1912
|
-
pattern: import_v414.z.string()
|
|
1913
|
-
})
|
|
1914
|
-
]).nullish()
|
|
1915
|
-
})
|
|
2040
|
+
inputSchema: webSearchPreviewInputSchema
|
|
1916
2041
|
});
|
|
1917
2042
|
|
|
1918
2043
|
// src/openai-tools.ts
|
|
@@ -1995,13 +2120,12 @@ var openaiTools = {
|
|
|
1995
2120
|
|
|
1996
2121
|
// src/responses/openai-responses-language-model.ts
|
|
1997
2122
|
var import_provider8 = require("@ai-sdk/provider");
|
|
1998
|
-
var
|
|
1999
|
-
var import_v416 = require("zod/v4");
|
|
2123
|
+
var import_provider_utils24 = require("@ai-sdk/provider-utils");
|
|
2000
2124
|
|
|
2001
2125
|
// src/responses/convert-to-openai-responses-input.ts
|
|
2002
2126
|
var import_provider6 = require("@ai-sdk/provider");
|
|
2003
|
-
var
|
|
2004
|
-
var
|
|
2127
|
+
var import_provider_utils20 = require("@ai-sdk/provider-utils");
|
|
2128
|
+
var z15 = __toESM(require("zod/v4"));
|
|
2005
2129
|
function isFileId(data, prefixes) {
|
|
2006
2130
|
if (!prefixes) return false;
|
|
2007
2131
|
return prefixes.some((prefix) => data.startsWith(prefix));
|
|
@@ -2059,7 +2183,7 @@ async function convertToOpenAIResponsesInput({
|
|
|
2059
2183
|
return {
|
|
2060
2184
|
type: "input_image",
|
|
2061
2185
|
...part.data instanceof URL ? { image_url: part.data.toString() } : typeof part.data === "string" && isFileId(part.data, fileIdPrefixes) ? { file_id: part.data } : {
|
|
2062
|
-
image_url: `data:${mediaType};base64,${(0,
|
|
2186
|
+
image_url: `data:${mediaType};base64,${(0, import_provider_utils20.convertToBase64)(part.data)}`
|
|
2063
2187
|
},
|
|
2064
2188
|
detail: (_b2 = (_a2 = part.providerOptions) == null ? void 0 : _a2.openai) == null ? void 0 : _b2.imageDetail
|
|
2065
2189
|
};
|
|
@@ -2074,7 +2198,7 @@ async function convertToOpenAIResponsesInput({
|
|
|
2074
2198
|
type: "input_file",
|
|
2075
2199
|
...typeof part.data === "string" && isFileId(part.data, fileIdPrefixes) ? { file_id: part.data } : {
|
|
2076
2200
|
filename: (_c2 = part.filename) != null ? _c2 : `part-${index}.pdf`,
|
|
2077
|
-
file_data: `data:application/pdf;base64,${(0,
|
|
2201
|
+
file_data: `data:application/pdf;base64,${(0, import_provider_utils20.convertToBase64)(part.data)}`
|
|
2078
2202
|
}
|
|
2079
2203
|
};
|
|
2080
2204
|
} else {
|
|
@@ -2107,7 +2231,10 @@ async function convertToOpenAIResponsesInput({
|
|
|
2107
2231
|
break;
|
|
2108
2232
|
}
|
|
2109
2233
|
if (hasLocalShellTool && part.toolName === "local_shell") {
|
|
2110
|
-
const parsedInput =
|
|
2234
|
+
const parsedInput = await (0, import_provider_utils20.validateTypes)({
|
|
2235
|
+
value: part.input,
|
|
2236
|
+
schema: localShellInputSchema
|
|
2237
|
+
});
|
|
2111
2238
|
input.push({
|
|
2112
2239
|
type: "local_shell_call",
|
|
2113
2240
|
call_id: part.toolCallId,
|
|
@@ -2145,7 +2272,7 @@ async function convertToOpenAIResponsesInput({
|
|
|
2145
2272
|
break;
|
|
2146
2273
|
}
|
|
2147
2274
|
case "reasoning": {
|
|
2148
|
-
const providerOptions = await (0,
|
|
2275
|
+
const providerOptions = await (0, import_provider_utils20.parseProviderOptions)({
|
|
2149
2276
|
provider: "openai",
|
|
2150
2277
|
providerOptions: part.providerOptions,
|
|
2151
2278
|
schema: openaiResponsesReasoningProviderOptionsSchema
|
|
@@ -2203,10 +2330,14 @@ async function convertToOpenAIResponsesInput({
|
|
|
2203
2330
|
for (const part of content) {
|
|
2204
2331
|
const output = part.output;
|
|
2205
2332
|
if (hasLocalShellTool && part.toolName === "local_shell" && output.type === "json") {
|
|
2333
|
+
const parsedOutput = await (0, import_provider_utils20.validateTypes)({
|
|
2334
|
+
value: output.value,
|
|
2335
|
+
schema: localShellOutputSchema
|
|
2336
|
+
});
|
|
2206
2337
|
input.push({
|
|
2207
2338
|
type: "local_shell_call_output",
|
|
2208
2339
|
call_id: part.toolCallId,
|
|
2209
|
-
output:
|
|
2340
|
+
output: parsedOutput.output
|
|
2210
2341
|
});
|
|
2211
2342
|
break;
|
|
2212
2343
|
}
|
|
@@ -2241,9 +2372,9 @@ async function convertToOpenAIResponsesInput({
|
|
|
2241
2372
|
}
|
|
2242
2373
|
return { input, warnings };
|
|
2243
2374
|
}
|
|
2244
|
-
var openaiResponsesReasoningProviderOptionsSchema =
|
|
2245
|
-
itemId:
|
|
2246
|
-
reasoningEncryptedContent:
|
|
2375
|
+
var openaiResponsesReasoningProviderOptionsSchema = z15.object({
|
|
2376
|
+
itemId: z15.string().nullish(),
|
|
2377
|
+
reasoningEncryptedContent: z15.string().nullish()
|
|
2247
2378
|
});
|
|
2248
2379
|
|
|
2249
2380
|
// src/responses/map-openai-responses-finish-reason.ts
|
|
@@ -2264,9 +2395,539 @@ function mapOpenAIResponseFinishReason({
|
|
|
2264
2395
|
}
|
|
2265
2396
|
}
|
|
2266
2397
|
|
|
2398
|
+
// src/responses/openai-responses-api.ts
|
|
2399
|
+
var import_provider_utils21 = require("@ai-sdk/provider-utils");
|
|
2400
|
+
var z16 = __toESM(require("zod/v4"));
|
|
2401
|
+
var openaiResponsesChunkSchema = (0, import_provider_utils21.lazyValidator)(
|
|
2402
|
+
() => (0, import_provider_utils21.zodSchema)(
|
|
2403
|
+
z16.union([
|
|
2404
|
+
z16.object({
|
|
2405
|
+
type: z16.literal("response.output_text.delta"),
|
|
2406
|
+
item_id: z16.string(),
|
|
2407
|
+
delta: z16.string(),
|
|
2408
|
+
logprobs: z16.array(
|
|
2409
|
+
z16.object({
|
|
2410
|
+
token: z16.string(),
|
|
2411
|
+
logprob: z16.number(),
|
|
2412
|
+
top_logprobs: z16.array(
|
|
2413
|
+
z16.object({
|
|
2414
|
+
token: z16.string(),
|
|
2415
|
+
logprob: z16.number()
|
|
2416
|
+
})
|
|
2417
|
+
)
|
|
2418
|
+
})
|
|
2419
|
+
).nullish()
|
|
2420
|
+
}),
|
|
2421
|
+
z16.object({
|
|
2422
|
+
type: z16.enum(["response.completed", "response.incomplete"]),
|
|
2423
|
+
response: z16.object({
|
|
2424
|
+
incomplete_details: z16.object({ reason: z16.string() }).nullish(),
|
|
2425
|
+
usage: z16.object({
|
|
2426
|
+
input_tokens: z16.number(),
|
|
2427
|
+
input_tokens_details: z16.object({ cached_tokens: z16.number().nullish() }).nullish(),
|
|
2428
|
+
output_tokens: z16.number(),
|
|
2429
|
+
output_tokens_details: z16.object({ reasoning_tokens: z16.number().nullish() }).nullish()
|
|
2430
|
+
}),
|
|
2431
|
+
service_tier: z16.string().nullish()
|
|
2432
|
+
})
|
|
2433
|
+
}),
|
|
2434
|
+
z16.object({
|
|
2435
|
+
type: z16.literal("response.created"),
|
|
2436
|
+
response: z16.object({
|
|
2437
|
+
id: z16.string(),
|
|
2438
|
+
created_at: z16.number(),
|
|
2439
|
+
model: z16.string(),
|
|
2440
|
+
service_tier: z16.string().nullish()
|
|
2441
|
+
})
|
|
2442
|
+
}),
|
|
2443
|
+
z16.object({
|
|
2444
|
+
type: z16.literal("response.output_item.added"),
|
|
2445
|
+
output_index: z16.number(),
|
|
2446
|
+
item: z16.discriminatedUnion("type", [
|
|
2447
|
+
z16.object({
|
|
2448
|
+
type: z16.literal("message"),
|
|
2449
|
+
id: z16.string()
|
|
2450
|
+
}),
|
|
2451
|
+
z16.object({
|
|
2452
|
+
type: z16.literal("reasoning"),
|
|
2453
|
+
id: z16.string(),
|
|
2454
|
+
encrypted_content: z16.string().nullish()
|
|
2455
|
+
}),
|
|
2456
|
+
z16.object({
|
|
2457
|
+
type: z16.literal("function_call"),
|
|
2458
|
+
id: z16.string(),
|
|
2459
|
+
call_id: z16.string(),
|
|
2460
|
+
name: z16.string(),
|
|
2461
|
+
arguments: z16.string()
|
|
2462
|
+
}),
|
|
2463
|
+
z16.object({
|
|
2464
|
+
type: z16.literal("web_search_call"),
|
|
2465
|
+
id: z16.string(),
|
|
2466
|
+
status: z16.string(),
|
|
2467
|
+
action: z16.object({
|
|
2468
|
+
type: z16.literal("search"),
|
|
2469
|
+
query: z16.string().optional()
|
|
2470
|
+
}).nullish()
|
|
2471
|
+
}),
|
|
2472
|
+
z16.object({
|
|
2473
|
+
type: z16.literal("computer_call"),
|
|
2474
|
+
id: z16.string(),
|
|
2475
|
+
status: z16.string()
|
|
2476
|
+
}),
|
|
2477
|
+
z16.object({
|
|
2478
|
+
type: z16.literal("file_search_call"),
|
|
2479
|
+
id: z16.string()
|
|
2480
|
+
}),
|
|
2481
|
+
z16.object({
|
|
2482
|
+
type: z16.literal("image_generation_call"),
|
|
2483
|
+
id: z16.string()
|
|
2484
|
+
}),
|
|
2485
|
+
z16.object({
|
|
2486
|
+
type: z16.literal("code_interpreter_call"),
|
|
2487
|
+
id: z16.string(),
|
|
2488
|
+
container_id: z16.string(),
|
|
2489
|
+
code: z16.string().nullable(),
|
|
2490
|
+
outputs: z16.array(
|
|
2491
|
+
z16.discriminatedUnion("type", [
|
|
2492
|
+
z16.object({ type: z16.literal("logs"), logs: z16.string() }),
|
|
2493
|
+
z16.object({ type: z16.literal("image"), url: z16.string() })
|
|
2494
|
+
])
|
|
2495
|
+
).nullable(),
|
|
2496
|
+
status: z16.string()
|
|
2497
|
+
})
|
|
2498
|
+
])
|
|
2499
|
+
}),
|
|
2500
|
+
z16.object({
|
|
2501
|
+
type: z16.literal("response.output_item.done"),
|
|
2502
|
+
output_index: z16.number(),
|
|
2503
|
+
item: z16.discriminatedUnion("type", [
|
|
2504
|
+
z16.object({
|
|
2505
|
+
type: z16.literal("message"),
|
|
2506
|
+
id: z16.string()
|
|
2507
|
+
}),
|
|
2508
|
+
z16.object({
|
|
2509
|
+
type: z16.literal("reasoning"),
|
|
2510
|
+
id: z16.string(),
|
|
2511
|
+
encrypted_content: z16.string().nullish()
|
|
2512
|
+
}),
|
|
2513
|
+
z16.object({
|
|
2514
|
+
type: z16.literal("function_call"),
|
|
2515
|
+
id: z16.string(),
|
|
2516
|
+
call_id: z16.string(),
|
|
2517
|
+
name: z16.string(),
|
|
2518
|
+
arguments: z16.string(),
|
|
2519
|
+
status: z16.literal("completed")
|
|
2520
|
+
}),
|
|
2521
|
+
z16.object({
|
|
2522
|
+
type: z16.literal("code_interpreter_call"),
|
|
2523
|
+
id: z16.string(),
|
|
2524
|
+
code: z16.string().nullable(),
|
|
2525
|
+
container_id: z16.string(),
|
|
2526
|
+
outputs: z16.array(
|
|
2527
|
+
z16.discriminatedUnion("type", [
|
|
2528
|
+
z16.object({ type: z16.literal("logs"), logs: z16.string() }),
|
|
2529
|
+
z16.object({ type: z16.literal("image"), url: z16.string() })
|
|
2530
|
+
])
|
|
2531
|
+
).nullable()
|
|
2532
|
+
}),
|
|
2533
|
+
z16.object({
|
|
2534
|
+
type: z16.literal("image_generation_call"),
|
|
2535
|
+
id: z16.string(),
|
|
2536
|
+
result: z16.string()
|
|
2537
|
+
}),
|
|
2538
|
+
z16.object({
|
|
2539
|
+
type: z16.literal("web_search_call"),
|
|
2540
|
+
id: z16.string(),
|
|
2541
|
+
status: z16.string(),
|
|
2542
|
+
action: z16.discriminatedUnion("type", [
|
|
2543
|
+
z16.object({
|
|
2544
|
+
type: z16.literal("search"),
|
|
2545
|
+
query: z16.string().nullish()
|
|
2546
|
+
}),
|
|
2547
|
+
z16.object({
|
|
2548
|
+
type: z16.literal("open_page"),
|
|
2549
|
+
url: z16.string()
|
|
2550
|
+
}),
|
|
2551
|
+
z16.object({
|
|
2552
|
+
type: z16.literal("find"),
|
|
2553
|
+
url: z16.string(),
|
|
2554
|
+
pattern: z16.string()
|
|
2555
|
+
})
|
|
2556
|
+
]).nullish()
|
|
2557
|
+
}),
|
|
2558
|
+
z16.object({
|
|
2559
|
+
type: z16.literal("file_search_call"),
|
|
2560
|
+
id: z16.string(),
|
|
2561
|
+
queries: z16.array(z16.string()),
|
|
2562
|
+
results: z16.array(
|
|
2563
|
+
z16.object({
|
|
2564
|
+
attributes: z16.record(z16.string(), z16.unknown()),
|
|
2565
|
+
file_id: z16.string(),
|
|
2566
|
+
filename: z16.string(),
|
|
2567
|
+
score: z16.number(),
|
|
2568
|
+
text: z16.string()
|
|
2569
|
+
})
|
|
2570
|
+
).nullish()
|
|
2571
|
+
}),
|
|
2572
|
+
z16.object({
|
|
2573
|
+
type: z16.literal("local_shell_call"),
|
|
2574
|
+
id: z16.string(),
|
|
2575
|
+
call_id: z16.string(),
|
|
2576
|
+
action: z16.object({
|
|
2577
|
+
type: z16.literal("exec"),
|
|
2578
|
+
command: z16.array(z16.string()),
|
|
2579
|
+
timeout_ms: z16.number().optional(),
|
|
2580
|
+
user: z16.string().optional(),
|
|
2581
|
+
working_directory: z16.string().optional(),
|
|
2582
|
+
env: z16.record(z16.string(), z16.string()).optional()
|
|
2583
|
+
})
|
|
2584
|
+
}),
|
|
2585
|
+
z16.object({
|
|
2586
|
+
type: z16.literal("computer_call"),
|
|
2587
|
+
id: z16.string(),
|
|
2588
|
+
status: z16.literal("completed")
|
|
2589
|
+
})
|
|
2590
|
+
])
|
|
2591
|
+
}),
|
|
2592
|
+
z16.object({
|
|
2593
|
+
type: z16.literal("response.function_call_arguments.delta"),
|
|
2594
|
+
item_id: z16.string(),
|
|
2595
|
+
output_index: z16.number(),
|
|
2596
|
+
delta: z16.string()
|
|
2597
|
+
}),
|
|
2598
|
+
z16.object({
|
|
2599
|
+
type: z16.literal("response.image_generation_call.partial_image"),
|
|
2600
|
+
item_id: z16.string(),
|
|
2601
|
+
output_index: z16.number(),
|
|
2602
|
+
partial_image_b64: z16.string()
|
|
2603
|
+
}),
|
|
2604
|
+
z16.object({
|
|
2605
|
+
type: z16.literal("response.code_interpreter_call_code.delta"),
|
|
2606
|
+
item_id: z16.string(),
|
|
2607
|
+
output_index: z16.number(),
|
|
2608
|
+
delta: z16.string()
|
|
2609
|
+
}),
|
|
2610
|
+
z16.object({
|
|
2611
|
+
type: z16.literal("response.code_interpreter_call_code.done"),
|
|
2612
|
+
item_id: z16.string(),
|
|
2613
|
+
output_index: z16.number(),
|
|
2614
|
+
code: z16.string()
|
|
2615
|
+
}),
|
|
2616
|
+
z16.object({
|
|
2617
|
+
type: z16.literal("response.output_text.annotation.added"),
|
|
2618
|
+
annotation: z16.discriminatedUnion("type", [
|
|
2619
|
+
z16.object({
|
|
2620
|
+
type: z16.literal("url_citation"),
|
|
2621
|
+
url: z16.string(),
|
|
2622
|
+
title: z16.string()
|
|
2623
|
+
}),
|
|
2624
|
+
z16.object({
|
|
2625
|
+
type: z16.literal("file_citation"),
|
|
2626
|
+
file_id: z16.string(),
|
|
2627
|
+
filename: z16.string().nullish(),
|
|
2628
|
+
index: z16.number().nullish(),
|
|
2629
|
+
start_index: z16.number().nullish(),
|
|
2630
|
+
end_index: z16.number().nullish(),
|
|
2631
|
+
quote: z16.string().nullish()
|
|
2632
|
+
})
|
|
2633
|
+
])
|
|
2634
|
+
}),
|
|
2635
|
+
z16.object({
|
|
2636
|
+
type: z16.literal("response.reasoning_summary_part.added"),
|
|
2637
|
+
item_id: z16.string(),
|
|
2638
|
+
summary_index: z16.number()
|
|
2639
|
+
}),
|
|
2640
|
+
z16.object({
|
|
2641
|
+
type: z16.literal("response.reasoning_summary_text.delta"),
|
|
2642
|
+
item_id: z16.string(),
|
|
2643
|
+
summary_index: z16.number(),
|
|
2644
|
+
delta: z16.string()
|
|
2645
|
+
}),
|
|
2646
|
+
z16.object({
|
|
2647
|
+
type: z16.literal("error"),
|
|
2648
|
+
code: z16.string(),
|
|
2649
|
+
message: z16.string(),
|
|
2650
|
+
param: z16.string().nullish(),
|
|
2651
|
+
sequence_number: z16.number()
|
|
2652
|
+
}),
|
|
2653
|
+
z16.object({ type: z16.string() }).loose().transform((value) => ({
|
|
2654
|
+
type: "unknown_chunk",
|
|
2655
|
+
message: value.type
|
|
2656
|
+
}))
|
|
2657
|
+
// fallback for unknown chunks
|
|
2658
|
+
])
|
|
2659
|
+
)
|
|
2660
|
+
);
|
|
2661
|
+
var openaiResponsesResponseSchema = (0, import_provider_utils21.lazyValidator)(
|
|
2662
|
+
() => (0, import_provider_utils21.zodSchema)(
|
|
2663
|
+
z16.object({
|
|
2664
|
+
id: z16.string(),
|
|
2665
|
+
created_at: z16.number(),
|
|
2666
|
+
error: z16.object({
|
|
2667
|
+
code: z16.string(),
|
|
2668
|
+
message: z16.string()
|
|
2669
|
+
}).nullish(),
|
|
2670
|
+
model: z16.string(),
|
|
2671
|
+
output: z16.array(
|
|
2672
|
+
z16.discriminatedUnion("type", [
|
|
2673
|
+
z16.object({
|
|
2674
|
+
type: z16.literal("message"),
|
|
2675
|
+
role: z16.literal("assistant"),
|
|
2676
|
+
id: z16.string(),
|
|
2677
|
+
content: z16.array(
|
|
2678
|
+
z16.object({
|
|
2679
|
+
type: z16.literal("output_text"),
|
|
2680
|
+
text: z16.string(),
|
|
2681
|
+
logprobs: z16.array(
|
|
2682
|
+
z16.object({
|
|
2683
|
+
token: z16.string(),
|
|
2684
|
+
logprob: z16.number(),
|
|
2685
|
+
top_logprobs: z16.array(
|
|
2686
|
+
z16.object({
|
|
2687
|
+
token: z16.string(),
|
|
2688
|
+
logprob: z16.number()
|
|
2689
|
+
})
|
|
2690
|
+
)
|
|
2691
|
+
})
|
|
2692
|
+
).nullish(),
|
|
2693
|
+
annotations: z16.array(
|
|
2694
|
+
z16.discriminatedUnion("type", [
|
|
2695
|
+
z16.object({
|
|
2696
|
+
type: z16.literal("url_citation"),
|
|
2697
|
+
start_index: z16.number(),
|
|
2698
|
+
end_index: z16.number(),
|
|
2699
|
+
url: z16.string(),
|
|
2700
|
+
title: z16.string()
|
|
2701
|
+
}),
|
|
2702
|
+
z16.object({
|
|
2703
|
+
type: z16.literal("file_citation"),
|
|
2704
|
+
file_id: z16.string(),
|
|
2705
|
+
filename: z16.string().nullish(),
|
|
2706
|
+
index: z16.number().nullish(),
|
|
2707
|
+
start_index: z16.number().nullish(),
|
|
2708
|
+
end_index: z16.number().nullish(),
|
|
2709
|
+
quote: z16.string().nullish()
|
|
2710
|
+
}),
|
|
2711
|
+
z16.object({
|
|
2712
|
+
type: z16.literal("container_file_citation")
|
|
2713
|
+
})
|
|
2714
|
+
])
|
|
2715
|
+
)
|
|
2716
|
+
})
|
|
2717
|
+
)
|
|
2718
|
+
}),
|
|
2719
|
+
z16.object({
|
|
2720
|
+
type: z16.literal("web_search_call"),
|
|
2721
|
+
id: z16.string(),
|
|
2722
|
+
status: z16.string(),
|
|
2723
|
+
action: z16.discriminatedUnion("type", [
|
|
2724
|
+
z16.object({
|
|
2725
|
+
type: z16.literal("search"),
|
|
2726
|
+
query: z16.string().nullish()
|
|
2727
|
+
}),
|
|
2728
|
+
z16.object({
|
|
2729
|
+
type: z16.literal("open_page"),
|
|
2730
|
+
url: z16.string()
|
|
2731
|
+
}),
|
|
2732
|
+
z16.object({
|
|
2733
|
+
type: z16.literal("find"),
|
|
2734
|
+
url: z16.string(),
|
|
2735
|
+
pattern: z16.string()
|
|
2736
|
+
})
|
|
2737
|
+
]).nullish()
|
|
2738
|
+
}),
|
|
2739
|
+
z16.object({
|
|
2740
|
+
type: z16.literal("file_search_call"),
|
|
2741
|
+
id: z16.string(),
|
|
2742
|
+
queries: z16.array(z16.string()),
|
|
2743
|
+
results: z16.array(
|
|
2744
|
+
z16.object({
|
|
2745
|
+
attributes: z16.record(z16.string(), z16.unknown()),
|
|
2746
|
+
file_id: z16.string(),
|
|
2747
|
+
filename: z16.string(),
|
|
2748
|
+
score: z16.number(),
|
|
2749
|
+
text: z16.string()
|
|
2750
|
+
})
|
|
2751
|
+
).nullish()
|
|
2752
|
+
}),
|
|
2753
|
+
z16.object({
|
|
2754
|
+
type: z16.literal("code_interpreter_call"),
|
|
2755
|
+
id: z16.string(),
|
|
2756
|
+
code: z16.string().nullable(),
|
|
2757
|
+
container_id: z16.string(),
|
|
2758
|
+
outputs: z16.array(
|
|
2759
|
+
z16.discriminatedUnion("type", [
|
|
2760
|
+
z16.object({ type: z16.literal("logs"), logs: z16.string() }),
|
|
2761
|
+
z16.object({ type: z16.literal("image"), url: z16.string() })
|
|
2762
|
+
])
|
|
2763
|
+
).nullable()
|
|
2764
|
+
}),
|
|
2765
|
+
z16.object({
|
|
2766
|
+
type: z16.literal("image_generation_call"),
|
|
2767
|
+
id: z16.string(),
|
|
2768
|
+
result: z16.string()
|
|
2769
|
+
}),
|
|
2770
|
+
z16.object({
|
|
2771
|
+
type: z16.literal("local_shell_call"),
|
|
2772
|
+
id: z16.string(),
|
|
2773
|
+
call_id: z16.string(),
|
|
2774
|
+
action: z16.object({
|
|
2775
|
+
type: z16.literal("exec"),
|
|
2776
|
+
command: z16.array(z16.string()),
|
|
2777
|
+
timeout_ms: z16.number().optional(),
|
|
2778
|
+
user: z16.string().optional(),
|
|
2779
|
+
working_directory: z16.string().optional(),
|
|
2780
|
+
env: z16.record(z16.string(), z16.string()).optional()
|
|
2781
|
+
})
|
|
2782
|
+
}),
|
|
2783
|
+
z16.object({
|
|
2784
|
+
type: z16.literal("function_call"),
|
|
2785
|
+
call_id: z16.string(),
|
|
2786
|
+
name: z16.string(),
|
|
2787
|
+
arguments: z16.string(),
|
|
2788
|
+
id: z16.string()
|
|
2789
|
+
}),
|
|
2790
|
+
z16.object({
|
|
2791
|
+
type: z16.literal("computer_call"),
|
|
2792
|
+
id: z16.string(),
|
|
2793
|
+
status: z16.string().optional()
|
|
2794
|
+
}),
|
|
2795
|
+
z16.object({
|
|
2796
|
+
type: z16.literal("reasoning"),
|
|
2797
|
+
id: z16.string(),
|
|
2798
|
+
encrypted_content: z16.string().nullish(),
|
|
2799
|
+
summary: z16.array(
|
|
2800
|
+
z16.object({
|
|
2801
|
+
type: z16.literal("summary_text"),
|
|
2802
|
+
text: z16.string()
|
|
2803
|
+
})
|
|
2804
|
+
)
|
|
2805
|
+
})
|
|
2806
|
+
])
|
|
2807
|
+
),
|
|
2808
|
+
service_tier: z16.string().nullish(),
|
|
2809
|
+
incomplete_details: z16.object({ reason: z16.string() }).nullish(),
|
|
2810
|
+
usage: z16.object({
|
|
2811
|
+
input_tokens: z16.number(),
|
|
2812
|
+
input_tokens_details: z16.object({ cached_tokens: z16.number().nullish() }).nullish(),
|
|
2813
|
+
output_tokens: z16.number(),
|
|
2814
|
+
output_tokens_details: z16.object({ reasoning_tokens: z16.number().nullish() }).nullish()
|
|
2815
|
+
})
|
|
2816
|
+
})
|
|
2817
|
+
)
|
|
2818
|
+
);
|
|
2819
|
+
|
|
2820
|
+
// src/responses/openai-responses-options.ts
|
|
2821
|
+
var import_provider_utils22 = require("@ai-sdk/provider-utils");
|
|
2822
|
+
var z17 = __toESM(require("zod/v4"));
|
|
2823
|
+
var TOP_LOGPROBS_MAX = 20;
|
|
2824
|
+
var openaiResponsesReasoningModelIds = [
|
|
2825
|
+
"o1",
|
|
2826
|
+
"o1-2024-12-17",
|
|
2827
|
+
"o3-mini",
|
|
2828
|
+
"o3-mini-2025-01-31",
|
|
2829
|
+
"o3",
|
|
2830
|
+
"o3-2025-04-16",
|
|
2831
|
+
"o4-mini",
|
|
2832
|
+
"o4-mini-2025-04-16",
|
|
2833
|
+
"codex-mini-latest",
|
|
2834
|
+
"computer-use-preview",
|
|
2835
|
+
"gpt-5",
|
|
2836
|
+
"gpt-5-2025-08-07",
|
|
2837
|
+
"gpt-5-codex",
|
|
2838
|
+
"gpt-5-mini",
|
|
2839
|
+
"gpt-5-mini-2025-08-07",
|
|
2840
|
+
"gpt-5-nano",
|
|
2841
|
+
"gpt-5-nano-2025-08-07",
|
|
2842
|
+
"gpt-5-pro",
|
|
2843
|
+
"gpt-5-pro-2025-10-06"
|
|
2844
|
+
];
|
|
2845
|
+
var openaiResponsesModelIds = [
|
|
2846
|
+
"gpt-4.1",
|
|
2847
|
+
"gpt-4.1-2025-04-14",
|
|
2848
|
+
"gpt-4.1-mini",
|
|
2849
|
+
"gpt-4.1-mini-2025-04-14",
|
|
2850
|
+
"gpt-4.1-nano",
|
|
2851
|
+
"gpt-4.1-nano-2025-04-14",
|
|
2852
|
+
"gpt-4o",
|
|
2853
|
+
"gpt-4o-2024-05-13",
|
|
2854
|
+
"gpt-4o-2024-08-06",
|
|
2855
|
+
"gpt-4o-2024-11-20",
|
|
2856
|
+
"gpt-4o-audio-preview",
|
|
2857
|
+
"gpt-4o-audio-preview-2024-10-01",
|
|
2858
|
+
"gpt-4o-audio-preview-2024-12-17",
|
|
2859
|
+
"gpt-4o-search-preview",
|
|
2860
|
+
"gpt-4o-search-preview-2025-03-11",
|
|
2861
|
+
"gpt-4o-mini-search-preview",
|
|
2862
|
+
"gpt-4o-mini-search-preview-2025-03-11",
|
|
2863
|
+
"gpt-4o-mini",
|
|
2864
|
+
"gpt-4o-mini-2024-07-18",
|
|
2865
|
+
"gpt-4-turbo",
|
|
2866
|
+
"gpt-4-turbo-2024-04-09",
|
|
2867
|
+
"gpt-4-turbo-preview",
|
|
2868
|
+
"gpt-4-0125-preview",
|
|
2869
|
+
"gpt-4-1106-preview",
|
|
2870
|
+
"gpt-4",
|
|
2871
|
+
"gpt-4-0613",
|
|
2872
|
+
"gpt-4.5-preview",
|
|
2873
|
+
"gpt-4.5-preview-2025-02-27",
|
|
2874
|
+
"gpt-3.5-turbo-0125",
|
|
2875
|
+
"gpt-3.5-turbo",
|
|
2876
|
+
"gpt-3.5-turbo-1106",
|
|
2877
|
+
"chatgpt-4o-latest",
|
|
2878
|
+
"gpt-5-chat-latest",
|
|
2879
|
+
...openaiResponsesReasoningModelIds
|
|
2880
|
+
];
|
|
2881
|
+
var openaiResponsesProviderOptionsSchema = (0, import_provider_utils22.lazyValidator)(
|
|
2882
|
+
() => (0, import_provider_utils22.zodSchema)(
|
|
2883
|
+
z17.object({
|
|
2884
|
+
include: z17.array(
|
|
2885
|
+
z17.enum([
|
|
2886
|
+
"reasoning.encrypted_content",
|
|
2887
|
+
"file_search_call.results",
|
|
2888
|
+
"message.output_text.logprobs"
|
|
2889
|
+
])
|
|
2890
|
+
).nullish(),
|
|
2891
|
+
instructions: z17.string().nullish(),
|
|
2892
|
+
/**
|
|
2893
|
+
* Return the log probabilities of the tokens.
|
|
2894
|
+
*
|
|
2895
|
+
* Setting to true will return the log probabilities of the tokens that
|
|
2896
|
+
* were generated.
|
|
2897
|
+
*
|
|
2898
|
+
* Setting to a number will return the log probabilities of the top n
|
|
2899
|
+
* tokens that were generated.
|
|
2900
|
+
*
|
|
2901
|
+
* @see https://platform.openai.com/docs/api-reference/responses/create
|
|
2902
|
+
* @see https://cookbook.openai.com/examples/using_logprobs
|
|
2903
|
+
*/
|
|
2904
|
+
logprobs: z17.union([z17.boolean(), z17.number().min(1).max(TOP_LOGPROBS_MAX)]).optional(),
|
|
2905
|
+
/**
|
|
2906
|
+
* The maximum number of total calls to built-in tools that can be processed in a response.
|
|
2907
|
+
* This maximum number applies across all built-in tool calls, not per individual tool.
|
|
2908
|
+
* Any further attempts to call a tool by the model will be ignored.
|
|
2909
|
+
*/
|
|
2910
|
+
maxToolCalls: z17.number().nullish(),
|
|
2911
|
+
metadata: z17.any().nullish(),
|
|
2912
|
+
parallelToolCalls: z17.boolean().nullish(),
|
|
2913
|
+
previousResponseId: z17.string().nullish(),
|
|
2914
|
+
promptCacheKey: z17.string().nullish(),
|
|
2915
|
+
reasoningEffort: z17.string().nullish(),
|
|
2916
|
+
reasoningSummary: z17.string().nullish(),
|
|
2917
|
+
safetyIdentifier: z17.string().nullish(),
|
|
2918
|
+
serviceTier: z17.enum(["auto", "flex", "priority"]).nullish(),
|
|
2919
|
+
store: z17.boolean().nullish(),
|
|
2920
|
+
strictJsonSchema: z17.boolean().nullish(),
|
|
2921
|
+
textVerbosity: z17.enum(["low", "medium", "high"]).nullish(),
|
|
2922
|
+
user: z17.string().nullish()
|
|
2923
|
+
})
|
|
2924
|
+
)
|
|
2925
|
+
);
|
|
2926
|
+
|
|
2267
2927
|
// src/responses/openai-responses-prepare-tools.ts
|
|
2268
2928
|
var import_provider7 = require("@ai-sdk/provider");
|
|
2269
|
-
|
|
2929
|
+
var import_provider_utils23 = require("@ai-sdk/provider-utils");
|
|
2930
|
+
async function prepareResponsesTools({
|
|
2270
2931
|
tools,
|
|
2271
2932
|
toolChoice,
|
|
2272
2933
|
strictJsonSchema
|
|
@@ -2291,7 +2952,10 @@ function prepareResponsesTools({
|
|
|
2291
2952
|
case "provider-defined": {
|
|
2292
2953
|
switch (tool.id) {
|
|
2293
2954
|
case "openai.file_search": {
|
|
2294
|
-
const args =
|
|
2955
|
+
const args = await (0, import_provider_utils23.validateTypes)({
|
|
2956
|
+
value: tool.args,
|
|
2957
|
+
schema: fileSearchArgsSchema
|
|
2958
|
+
});
|
|
2295
2959
|
openaiTools2.push({
|
|
2296
2960
|
type: "file_search",
|
|
2297
2961
|
vector_store_ids: args.vectorStoreIds,
|
|
@@ -2311,7 +2975,10 @@ function prepareResponsesTools({
|
|
|
2311
2975
|
break;
|
|
2312
2976
|
}
|
|
2313
2977
|
case "openai.web_search_preview": {
|
|
2314
|
-
const args =
|
|
2978
|
+
const args = await (0, import_provider_utils23.validateTypes)({
|
|
2979
|
+
value: tool.args,
|
|
2980
|
+
schema: webSearchPreviewArgsSchema
|
|
2981
|
+
});
|
|
2315
2982
|
openaiTools2.push({
|
|
2316
2983
|
type: "web_search_preview",
|
|
2317
2984
|
search_context_size: args.searchContextSize,
|
|
@@ -2320,7 +2987,10 @@ function prepareResponsesTools({
|
|
|
2320
2987
|
break;
|
|
2321
2988
|
}
|
|
2322
2989
|
case "openai.web_search": {
|
|
2323
|
-
const args =
|
|
2990
|
+
const args = await (0, import_provider_utils23.validateTypes)({
|
|
2991
|
+
value: tool.args,
|
|
2992
|
+
schema: webSearchArgsSchema
|
|
2993
|
+
});
|
|
2324
2994
|
openaiTools2.push({
|
|
2325
2995
|
type: "web_search",
|
|
2326
2996
|
filters: args.filters != null ? { allowed_domains: args.filters.allowedDomains } : void 0,
|
|
@@ -2330,7 +3000,10 @@ function prepareResponsesTools({
|
|
|
2330
3000
|
break;
|
|
2331
3001
|
}
|
|
2332
3002
|
case "openai.code_interpreter": {
|
|
2333
|
-
const args =
|
|
3003
|
+
const args = await (0, import_provider_utils23.validateTypes)({
|
|
3004
|
+
value: tool.args,
|
|
3005
|
+
schema: codeInterpreterArgsSchema
|
|
3006
|
+
});
|
|
2334
3007
|
openaiTools2.push({
|
|
2335
3008
|
type: "code_interpreter",
|
|
2336
3009
|
container: args.container == null ? { type: "auto", file_ids: void 0 } : typeof args.container === "string" ? args.container : { type: "auto", file_ids: args.container.fileIds }
|
|
@@ -2338,7 +3011,10 @@ function prepareResponsesTools({
|
|
|
2338
3011
|
break;
|
|
2339
3012
|
}
|
|
2340
3013
|
case "openai.image_generation": {
|
|
2341
|
-
const args =
|
|
3014
|
+
const args = await (0, import_provider_utils23.validateTypes)({
|
|
3015
|
+
value: tool.args,
|
|
3016
|
+
schema: imageGenerationArgsSchema
|
|
3017
|
+
});
|
|
2342
3018
|
openaiTools2.push({
|
|
2343
3019
|
type: "image_generation",
|
|
2344
3020
|
background: args.background,
|
|
@@ -2390,83 +3066,6 @@ function prepareResponsesTools({
|
|
|
2390
3066
|
}
|
|
2391
3067
|
|
|
2392
3068
|
// src/responses/openai-responses-language-model.ts
|
|
2393
|
-
var webSearchCallItem = import_v416.z.object({
|
|
2394
|
-
type: import_v416.z.literal("web_search_call"),
|
|
2395
|
-
id: import_v416.z.string(),
|
|
2396
|
-
status: import_v416.z.string(),
|
|
2397
|
-
action: import_v416.z.discriminatedUnion("type", [
|
|
2398
|
-
import_v416.z.object({
|
|
2399
|
-
type: import_v416.z.literal("search"),
|
|
2400
|
-
query: import_v416.z.string().nullish()
|
|
2401
|
-
}),
|
|
2402
|
-
import_v416.z.object({
|
|
2403
|
-
type: import_v416.z.literal("open_page"),
|
|
2404
|
-
url: import_v416.z.string()
|
|
2405
|
-
}),
|
|
2406
|
-
import_v416.z.object({
|
|
2407
|
-
type: import_v416.z.literal("find"),
|
|
2408
|
-
url: import_v416.z.string(),
|
|
2409
|
-
pattern: import_v416.z.string()
|
|
2410
|
-
})
|
|
2411
|
-
]).nullish()
|
|
2412
|
-
});
|
|
2413
|
-
var fileSearchCallItem = import_v416.z.object({
|
|
2414
|
-
type: import_v416.z.literal("file_search_call"),
|
|
2415
|
-
id: import_v416.z.string(),
|
|
2416
|
-
queries: import_v416.z.array(import_v416.z.string()),
|
|
2417
|
-
results: import_v416.z.array(
|
|
2418
|
-
import_v416.z.object({
|
|
2419
|
-
attributes: import_v416.z.record(import_v416.z.string(), import_v416.z.unknown()),
|
|
2420
|
-
file_id: import_v416.z.string(),
|
|
2421
|
-
filename: import_v416.z.string(),
|
|
2422
|
-
score: import_v416.z.number(),
|
|
2423
|
-
text: import_v416.z.string()
|
|
2424
|
-
})
|
|
2425
|
-
).nullish()
|
|
2426
|
-
});
|
|
2427
|
-
var codeInterpreterCallItem = import_v416.z.object({
|
|
2428
|
-
type: import_v416.z.literal("code_interpreter_call"),
|
|
2429
|
-
id: import_v416.z.string(),
|
|
2430
|
-
code: import_v416.z.string().nullable(),
|
|
2431
|
-
container_id: import_v416.z.string(),
|
|
2432
|
-
outputs: import_v416.z.array(
|
|
2433
|
-
import_v416.z.discriminatedUnion("type", [
|
|
2434
|
-
import_v416.z.object({ type: import_v416.z.literal("logs"), logs: import_v416.z.string() }),
|
|
2435
|
-
import_v416.z.object({ type: import_v416.z.literal("image"), url: import_v416.z.string() })
|
|
2436
|
-
])
|
|
2437
|
-
).nullable()
|
|
2438
|
-
});
|
|
2439
|
-
var localShellCallItem = import_v416.z.object({
|
|
2440
|
-
type: import_v416.z.literal("local_shell_call"),
|
|
2441
|
-
id: import_v416.z.string(),
|
|
2442
|
-
call_id: import_v416.z.string(),
|
|
2443
|
-
action: import_v416.z.object({
|
|
2444
|
-
type: import_v416.z.literal("exec"),
|
|
2445
|
-
command: import_v416.z.array(import_v416.z.string()),
|
|
2446
|
-
timeout_ms: import_v416.z.number().optional(),
|
|
2447
|
-
user: import_v416.z.string().optional(),
|
|
2448
|
-
working_directory: import_v416.z.string().optional(),
|
|
2449
|
-
env: import_v416.z.record(import_v416.z.string(), import_v416.z.string()).optional()
|
|
2450
|
-
})
|
|
2451
|
-
});
|
|
2452
|
-
var imageGenerationCallItem = import_v416.z.object({
|
|
2453
|
-
type: import_v416.z.literal("image_generation_call"),
|
|
2454
|
-
id: import_v416.z.string(),
|
|
2455
|
-
result: import_v416.z.string()
|
|
2456
|
-
});
|
|
2457
|
-
var TOP_LOGPROBS_MAX = 20;
|
|
2458
|
-
var LOGPROBS_SCHEMA = import_v416.z.array(
|
|
2459
|
-
import_v416.z.object({
|
|
2460
|
-
token: import_v416.z.string(),
|
|
2461
|
-
logprob: import_v416.z.number(),
|
|
2462
|
-
top_logprobs: import_v416.z.array(
|
|
2463
|
-
import_v416.z.object({
|
|
2464
|
-
token: import_v416.z.string(),
|
|
2465
|
-
logprob: import_v416.z.number()
|
|
2466
|
-
})
|
|
2467
|
-
)
|
|
2468
|
-
})
|
|
2469
|
-
);
|
|
2470
3069
|
var OpenAIResponsesLanguageModel = class {
|
|
2471
3070
|
constructor(modelId, config) {
|
|
2472
3071
|
this.specificationVersion = "v3";
|
|
@@ -2519,7 +3118,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2519
3118
|
if (stopSequences != null) {
|
|
2520
3119
|
warnings.push({ type: "unsupported-setting", setting: "stopSequences" });
|
|
2521
3120
|
}
|
|
2522
|
-
const openaiOptions = await (0,
|
|
3121
|
+
const openaiOptions = await (0, import_provider_utils24.parseProviderOptions)({
|
|
2523
3122
|
provider: "openai",
|
|
2524
3123
|
providerOptions,
|
|
2525
3124
|
schema: openaiResponsesProviderOptionsSchema
|
|
@@ -2658,7 +3257,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2658
3257
|
tools: openaiTools2,
|
|
2659
3258
|
toolChoice: openaiToolChoice,
|
|
2660
3259
|
toolWarnings
|
|
2661
|
-
} = prepareResponsesTools({
|
|
3260
|
+
} = await prepareResponsesTools({
|
|
2662
3261
|
tools,
|
|
2663
3262
|
toolChoice,
|
|
2664
3263
|
strictJsonSchema
|
|
@@ -2688,91 +3287,13 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2688
3287
|
responseHeaders,
|
|
2689
3288
|
value: response,
|
|
2690
3289
|
rawValue: rawResponse
|
|
2691
|
-
} = await (0,
|
|
3290
|
+
} = await (0, import_provider_utils24.postJsonToApi)({
|
|
2692
3291
|
url,
|
|
2693
|
-
headers: (0,
|
|
3292
|
+
headers: (0, import_provider_utils24.combineHeaders)(this.config.headers(), options.headers),
|
|
2694
3293
|
body,
|
|
2695
3294
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
2696
|
-
successfulResponseHandler: (0,
|
|
2697
|
-
|
|
2698
|
-
id: import_v416.z.string(),
|
|
2699
|
-
created_at: import_v416.z.number(),
|
|
2700
|
-
error: import_v416.z.object({
|
|
2701
|
-
code: import_v416.z.string(),
|
|
2702
|
-
message: import_v416.z.string()
|
|
2703
|
-
}).nullish(),
|
|
2704
|
-
model: import_v416.z.string(),
|
|
2705
|
-
output: import_v416.z.array(
|
|
2706
|
-
import_v416.z.discriminatedUnion("type", [
|
|
2707
|
-
import_v416.z.object({
|
|
2708
|
-
type: import_v416.z.literal("message"),
|
|
2709
|
-
role: import_v416.z.literal("assistant"),
|
|
2710
|
-
id: import_v416.z.string(),
|
|
2711
|
-
content: import_v416.z.array(
|
|
2712
|
-
import_v416.z.object({
|
|
2713
|
-
type: import_v416.z.literal("output_text"),
|
|
2714
|
-
text: import_v416.z.string(),
|
|
2715
|
-
logprobs: LOGPROBS_SCHEMA.nullish(),
|
|
2716
|
-
annotations: import_v416.z.array(
|
|
2717
|
-
import_v416.z.discriminatedUnion("type", [
|
|
2718
|
-
import_v416.z.object({
|
|
2719
|
-
type: import_v416.z.literal("url_citation"),
|
|
2720
|
-
start_index: import_v416.z.number(),
|
|
2721
|
-
end_index: import_v416.z.number(),
|
|
2722
|
-
url: import_v416.z.string(),
|
|
2723
|
-
title: import_v416.z.string()
|
|
2724
|
-
}),
|
|
2725
|
-
import_v416.z.object({
|
|
2726
|
-
type: import_v416.z.literal("file_citation"),
|
|
2727
|
-
file_id: import_v416.z.string(),
|
|
2728
|
-
filename: import_v416.z.string().nullish(),
|
|
2729
|
-
index: import_v416.z.number().nullish(),
|
|
2730
|
-
start_index: import_v416.z.number().nullish(),
|
|
2731
|
-
end_index: import_v416.z.number().nullish(),
|
|
2732
|
-
quote: import_v416.z.string().nullish()
|
|
2733
|
-
}),
|
|
2734
|
-
import_v416.z.object({
|
|
2735
|
-
type: import_v416.z.literal("container_file_citation")
|
|
2736
|
-
})
|
|
2737
|
-
])
|
|
2738
|
-
)
|
|
2739
|
-
})
|
|
2740
|
-
)
|
|
2741
|
-
}),
|
|
2742
|
-
webSearchCallItem,
|
|
2743
|
-
fileSearchCallItem,
|
|
2744
|
-
codeInterpreterCallItem,
|
|
2745
|
-
imageGenerationCallItem,
|
|
2746
|
-
localShellCallItem,
|
|
2747
|
-
import_v416.z.object({
|
|
2748
|
-
type: import_v416.z.literal("function_call"),
|
|
2749
|
-
call_id: import_v416.z.string(),
|
|
2750
|
-
name: import_v416.z.string(),
|
|
2751
|
-
arguments: import_v416.z.string(),
|
|
2752
|
-
id: import_v416.z.string()
|
|
2753
|
-
}),
|
|
2754
|
-
import_v416.z.object({
|
|
2755
|
-
type: import_v416.z.literal("computer_call"),
|
|
2756
|
-
id: import_v416.z.string(),
|
|
2757
|
-
status: import_v416.z.string().optional()
|
|
2758
|
-
}),
|
|
2759
|
-
import_v416.z.object({
|
|
2760
|
-
type: import_v416.z.literal("reasoning"),
|
|
2761
|
-
id: import_v416.z.string(),
|
|
2762
|
-
encrypted_content: import_v416.z.string().nullish(),
|
|
2763
|
-
summary: import_v416.z.array(
|
|
2764
|
-
import_v416.z.object({
|
|
2765
|
-
type: import_v416.z.literal("summary_text"),
|
|
2766
|
-
text: import_v416.z.string()
|
|
2767
|
-
})
|
|
2768
|
-
)
|
|
2769
|
-
})
|
|
2770
|
-
])
|
|
2771
|
-
),
|
|
2772
|
-
service_tier: import_v416.z.string().nullish(),
|
|
2773
|
-
incomplete_details: import_v416.z.object({ reason: import_v416.z.string() }).nullish(),
|
|
2774
|
-
usage: usageSchema2
|
|
2775
|
-
})
|
|
3295
|
+
successfulResponseHandler: (0, import_provider_utils24.createJsonResponseHandler)(
|
|
3296
|
+
openaiResponsesResponseSchema
|
|
2776
3297
|
),
|
|
2777
3298
|
abortSignal: options.abortSignal,
|
|
2778
3299
|
fetch: this.config.fetch
|
|
@@ -2835,7 +3356,9 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2835
3356
|
type: "tool-call",
|
|
2836
3357
|
toolCallId: part.call_id,
|
|
2837
3358
|
toolName: "local_shell",
|
|
2838
|
-
input: JSON.stringify({
|
|
3359
|
+
input: JSON.stringify({
|
|
3360
|
+
action: part.action
|
|
3361
|
+
}),
|
|
2839
3362
|
providerMetadata: {
|
|
2840
3363
|
openai: {
|
|
2841
3364
|
itemId: part.id
|
|
@@ -2863,7 +3386,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2863
3386
|
content.push({
|
|
2864
3387
|
type: "source",
|
|
2865
3388
|
sourceType: "url",
|
|
2866
|
-
id: (_f = (_e = (_d = this.config).generateId) == null ? void 0 : _e.call(_d)) != null ? _f : (0,
|
|
3389
|
+
id: (_f = (_e = (_d = this.config).generateId) == null ? void 0 : _e.call(_d)) != null ? _f : (0, import_provider_utils24.generateId)(),
|
|
2867
3390
|
url: annotation.url,
|
|
2868
3391
|
title: annotation.title
|
|
2869
3392
|
});
|
|
@@ -2871,7 +3394,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2871
3394
|
content.push({
|
|
2872
3395
|
type: "source",
|
|
2873
3396
|
sourceType: "document",
|
|
2874
|
-
id: (_i = (_h = (_g = this.config).generateId) == null ? void 0 : _h.call(_g)) != null ? _i : (0,
|
|
3397
|
+
id: (_i = (_h = (_g = this.config).generateId) == null ? void 0 : _h.call(_g)) != null ? _i : (0, import_provider_utils24.generateId)(),
|
|
2875
3398
|
mediaType: "text/plain",
|
|
2876
3399
|
title: (_k = (_j = annotation.quote) != null ? _j : annotation.filename) != null ? _k : "Document",
|
|
2877
3400
|
filename: (_l = annotation.filename) != null ? _l : annotation.file_id
|
|
@@ -3023,18 +3546,18 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3023
3546
|
warnings,
|
|
3024
3547
|
webSearchToolName
|
|
3025
3548
|
} = await this.getArgs(options);
|
|
3026
|
-
const { responseHeaders, value: response } = await (0,
|
|
3549
|
+
const { responseHeaders, value: response } = await (0, import_provider_utils24.postJsonToApi)({
|
|
3027
3550
|
url: this.config.url({
|
|
3028
3551
|
path: "/responses",
|
|
3029
3552
|
modelId: this.modelId
|
|
3030
3553
|
}),
|
|
3031
|
-
headers: (0,
|
|
3554
|
+
headers: (0, import_provider_utils24.combineHeaders)(this.config.headers(), options.headers),
|
|
3032
3555
|
body: {
|
|
3033
3556
|
...body,
|
|
3034
3557
|
stream: true
|
|
3035
3558
|
},
|
|
3036
3559
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
3037
|
-
successfulResponseHandler: (0,
|
|
3560
|
+
successfulResponseHandler: (0, import_provider_utils24.createEventSourceResponseHandler)(
|
|
3038
3561
|
openaiResponsesChunkSchema
|
|
3039
3562
|
),
|
|
3040
3563
|
abortSignal: options.abortSignal,
|
|
@@ -3422,7 +3945,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3422
3945
|
controller.enqueue({
|
|
3423
3946
|
type: "source",
|
|
3424
3947
|
sourceType: "url",
|
|
3425
|
-
id: (_q = (_p = (_o = self.config).generateId) == null ? void 0 : _p.call(_o)) != null ? _q : (0,
|
|
3948
|
+
id: (_q = (_p = (_o = self.config).generateId) == null ? void 0 : _p.call(_o)) != null ? _q : (0, import_provider_utils24.generateId)(),
|
|
3426
3949
|
url: value.annotation.url,
|
|
3427
3950
|
title: value.annotation.title
|
|
3428
3951
|
});
|
|
@@ -3430,7 +3953,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3430
3953
|
controller.enqueue({
|
|
3431
3954
|
type: "source",
|
|
3432
3955
|
sourceType: "document",
|
|
3433
|
-
id: (_t = (_s = (_r = self.config).generateId) == null ? void 0 : _s.call(_r)) != null ? _t : (0,
|
|
3956
|
+
id: (_t = (_s = (_r = self.config).generateId) == null ? void 0 : _s.call(_r)) != null ? _t : (0, import_provider_utils24.generateId)(),
|
|
3434
3957
|
mediaType: "text/plain",
|
|
3435
3958
|
title: (_v = (_u = value.annotation.quote) != null ? _u : value.annotation.filename) != null ? _v : "Document",
|
|
3436
3959
|
filename: (_w = value.annotation.filename) != null ? _w : value.annotation.file_id
|
|
@@ -3466,203 +3989,6 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3466
3989
|
};
|
|
3467
3990
|
}
|
|
3468
3991
|
};
|
|
3469
|
-
var usageSchema2 = import_v416.z.object({
|
|
3470
|
-
input_tokens: import_v416.z.number(),
|
|
3471
|
-
input_tokens_details: import_v416.z.object({ cached_tokens: import_v416.z.number().nullish() }).nullish(),
|
|
3472
|
-
output_tokens: import_v416.z.number(),
|
|
3473
|
-
output_tokens_details: import_v416.z.object({ reasoning_tokens: import_v416.z.number().nullish() }).nullish()
|
|
3474
|
-
});
|
|
3475
|
-
var textDeltaChunkSchema = import_v416.z.object({
|
|
3476
|
-
type: import_v416.z.literal("response.output_text.delta"),
|
|
3477
|
-
item_id: import_v416.z.string(),
|
|
3478
|
-
delta: import_v416.z.string(),
|
|
3479
|
-
logprobs: LOGPROBS_SCHEMA.nullish()
|
|
3480
|
-
});
|
|
3481
|
-
var errorChunkSchema = import_v416.z.object({
|
|
3482
|
-
type: import_v416.z.literal("error"),
|
|
3483
|
-
code: import_v416.z.string(),
|
|
3484
|
-
message: import_v416.z.string(),
|
|
3485
|
-
param: import_v416.z.string().nullish(),
|
|
3486
|
-
sequence_number: import_v416.z.number()
|
|
3487
|
-
});
|
|
3488
|
-
var responseFinishedChunkSchema = import_v416.z.object({
|
|
3489
|
-
type: import_v416.z.enum(["response.completed", "response.incomplete"]),
|
|
3490
|
-
response: import_v416.z.object({
|
|
3491
|
-
incomplete_details: import_v416.z.object({ reason: import_v416.z.string() }).nullish(),
|
|
3492
|
-
usage: usageSchema2,
|
|
3493
|
-
service_tier: import_v416.z.string().nullish()
|
|
3494
|
-
})
|
|
3495
|
-
});
|
|
3496
|
-
var responseCreatedChunkSchema = import_v416.z.object({
|
|
3497
|
-
type: import_v416.z.literal("response.created"),
|
|
3498
|
-
response: import_v416.z.object({
|
|
3499
|
-
id: import_v416.z.string(),
|
|
3500
|
-
created_at: import_v416.z.number(),
|
|
3501
|
-
model: import_v416.z.string(),
|
|
3502
|
-
service_tier: import_v416.z.string().nullish()
|
|
3503
|
-
})
|
|
3504
|
-
});
|
|
3505
|
-
var responseOutputItemAddedSchema = import_v416.z.object({
|
|
3506
|
-
type: import_v416.z.literal("response.output_item.added"),
|
|
3507
|
-
output_index: import_v416.z.number(),
|
|
3508
|
-
item: import_v416.z.discriminatedUnion("type", [
|
|
3509
|
-
import_v416.z.object({
|
|
3510
|
-
type: import_v416.z.literal("message"),
|
|
3511
|
-
id: import_v416.z.string()
|
|
3512
|
-
}),
|
|
3513
|
-
import_v416.z.object({
|
|
3514
|
-
type: import_v416.z.literal("reasoning"),
|
|
3515
|
-
id: import_v416.z.string(),
|
|
3516
|
-
encrypted_content: import_v416.z.string().nullish()
|
|
3517
|
-
}),
|
|
3518
|
-
import_v416.z.object({
|
|
3519
|
-
type: import_v416.z.literal("function_call"),
|
|
3520
|
-
id: import_v416.z.string(),
|
|
3521
|
-
call_id: import_v416.z.string(),
|
|
3522
|
-
name: import_v416.z.string(),
|
|
3523
|
-
arguments: import_v416.z.string()
|
|
3524
|
-
}),
|
|
3525
|
-
import_v416.z.object({
|
|
3526
|
-
type: import_v416.z.literal("web_search_call"),
|
|
3527
|
-
id: import_v416.z.string(),
|
|
3528
|
-
status: import_v416.z.string(),
|
|
3529
|
-
action: import_v416.z.object({
|
|
3530
|
-
type: import_v416.z.literal("search"),
|
|
3531
|
-
query: import_v416.z.string().optional()
|
|
3532
|
-
}).nullish()
|
|
3533
|
-
}),
|
|
3534
|
-
import_v416.z.object({
|
|
3535
|
-
type: import_v416.z.literal("computer_call"),
|
|
3536
|
-
id: import_v416.z.string(),
|
|
3537
|
-
status: import_v416.z.string()
|
|
3538
|
-
}),
|
|
3539
|
-
import_v416.z.object({
|
|
3540
|
-
type: import_v416.z.literal("file_search_call"),
|
|
3541
|
-
id: import_v416.z.string()
|
|
3542
|
-
}),
|
|
3543
|
-
import_v416.z.object({
|
|
3544
|
-
type: import_v416.z.literal("image_generation_call"),
|
|
3545
|
-
id: import_v416.z.string()
|
|
3546
|
-
}),
|
|
3547
|
-
import_v416.z.object({
|
|
3548
|
-
type: import_v416.z.literal("code_interpreter_call"),
|
|
3549
|
-
id: import_v416.z.string(),
|
|
3550
|
-
container_id: import_v416.z.string(),
|
|
3551
|
-
code: import_v416.z.string().nullable(),
|
|
3552
|
-
outputs: import_v416.z.array(
|
|
3553
|
-
import_v416.z.discriminatedUnion("type", [
|
|
3554
|
-
import_v416.z.object({ type: import_v416.z.literal("logs"), logs: import_v416.z.string() }),
|
|
3555
|
-
import_v416.z.object({ type: import_v416.z.literal("image"), url: import_v416.z.string() })
|
|
3556
|
-
])
|
|
3557
|
-
).nullable(),
|
|
3558
|
-
status: import_v416.z.string()
|
|
3559
|
-
})
|
|
3560
|
-
])
|
|
3561
|
-
});
|
|
3562
|
-
var responseOutputItemDoneSchema = import_v416.z.object({
|
|
3563
|
-
type: import_v416.z.literal("response.output_item.done"),
|
|
3564
|
-
output_index: import_v416.z.number(),
|
|
3565
|
-
item: import_v416.z.discriminatedUnion("type", [
|
|
3566
|
-
import_v416.z.object({
|
|
3567
|
-
type: import_v416.z.literal("message"),
|
|
3568
|
-
id: import_v416.z.string()
|
|
3569
|
-
}),
|
|
3570
|
-
import_v416.z.object({
|
|
3571
|
-
type: import_v416.z.literal("reasoning"),
|
|
3572
|
-
id: import_v416.z.string(),
|
|
3573
|
-
encrypted_content: import_v416.z.string().nullish()
|
|
3574
|
-
}),
|
|
3575
|
-
import_v416.z.object({
|
|
3576
|
-
type: import_v416.z.literal("function_call"),
|
|
3577
|
-
id: import_v416.z.string(),
|
|
3578
|
-
call_id: import_v416.z.string(),
|
|
3579
|
-
name: import_v416.z.string(),
|
|
3580
|
-
arguments: import_v416.z.string(),
|
|
3581
|
-
status: import_v416.z.literal("completed")
|
|
3582
|
-
}),
|
|
3583
|
-
codeInterpreterCallItem,
|
|
3584
|
-
imageGenerationCallItem,
|
|
3585
|
-
webSearchCallItem,
|
|
3586
|
-
fileSearchCallItem,
|
|
3587
|
-
localShellCallItem,
|
|
3588
|
-
import_v416.z.object({
|
|
3589
|
-
type: import_v416.z.literal("computer_call"),
|
|
3590
|
-
id: import_v416.z.string(),
|
|
3591
|
-
status: import_v416.z.literal("completed")
|
|
3592
|
-
})
|
|
3593
|
-
])
|
|
3594
|
-
});
|
|
3595
|
-
var responseFunctionCallArgumentsDeltaSchema = import_v416.z.object({
|
|
3596
|
-
type: import_v416.z.literal("response.function_call_arguments.delta"),
|
|
3597
|
-
item_id: import_v416.z.string(),
|
|
3598
|
-
output_index: import_v416.z.number(),
|
|
3599
|
-
delta: import_v416.z.string()
|
|
3600
|
-
});
|
|
3601
|
-
var responseImageGenerationCallPartialImageSchema = import_v416.z.object({
|
|
3602
|
-
type: import_v416.z.literal("response.image_generation_call.partial_image"),
|
|
3603
|
-
item_id: import_v416.z.string(),
|
|
3604
|
-
output_index: import_v416.z.number(),
|
|
3605
|
-
partial_image_b64: import_v416.z.string()
|
|
3606
|
-
});
|
|
3607
|
-
var responseCodeInterpreterCallCodeDeltaSchema = import_v416.z.object({
|
|
3608
|
-
type: import_v416.z.literal("response.code_interpreter_call_code.delta"),
|
|
3609
|
-
item_id: import_v416.z.string(),
|
|
3610
|
-
output_index: import_v416.z.number(),
|
|
3611
|
-
delta: import_v416.z.string()
|
|
3612
|
-
});
|
|
3613
|
-
var responseCodeInterpreterCallCodeDoneSchema = import_v416.z.object({
|
|
3614
|
-
type: import_v416.z.literal("response.code_interpreter_call_code.done"),
|
|
3615
|
-
item_id: import_v416.z.string(),
|
|
3616
|
-
output_index: import_v416.z.number(),
|
|
3617
|
-
code: import_v416.z.string()
|
|
3618
|
-
});
|
|
3619
|
-
var responseAnnotationAddedSchema = import_v416.z.object({
|
|
3620
|
-
type: import_v416.z.literal("response.output_text.annotation.added"),
|
|
3621
|
-
annotation: import_v416.z.discriminatedUnion("type", [
|
|
3622
|
-
import_v416.z.object({
|
|
3623
|
-
type: import_v416.z.literal("url_citation"),
|
|
3624
|
-
url: import_v416.z.string(),
|
|
3625
|
-
title: import_v416.z.string()
|
|
3626
|
-
}),
|
|
3627
|
-
import_v416.z.object({
|
|
3628
|
-
type: import_v416.z.literal("file_citation"),
|
|
3629
|
-
file_id: import_v416.z.string(),
|
|
3630
|
-
filename: import_v416.z.string().nullish(),
|
|
3631
|
-
index: import_v416.z.number().nullish(),
|
|
3632
|
-
start_index: import_v416.z.number().nullish(),
|
|
3633
|
-
end_index: import_v416.z.number().nullish(),
|
|
3634
|
-
quote: import_v416.z.string().nullish()
|
|
3635
|
-
})
|
|
3636
|
-
])
|
|
3637
|
-
});
|
|
3638
|
-
var responseReasoningSummaryPartAddedSchema = import_v416.z.object({
|
|
3639
|
-
type: import_v416.z.literal("response.reasoning_summary_part.added"),
|
|
3640
|
-
item_id: import_v416.z.string(),
|
|
3641
|
-
summary_index: import_v416.z.number()
|
|
3642
|
-
});
|
|
3643
|
-
var responseReasoningSummaryTextDeltaSchema = import_v416.z.object({
|
|
3644
|
-
type: import_v416.z.literal("response.reasoning_summary_text.delta"),
|
|
3645
|
-
item_id: import_v416.z.string(),
|
|
3646
|
-
summary_index: import_v416.z.number(),
|
|
3647
|
-
delta: import_v416.z.string()
|
|
3648
|
-
});
|
|
3649
|
-
var openaiResponsesChunkSchema = import_v416.z.union([
|
|
3650
|
-
textDeltaChunkSchema,
|
|
3651
|
-
responseFinishedChunkSchema,
|
|
3652
|
-
responseCreatedChunkSchema,
|
|
3653
|
-
responseOutputItemAddedSchema,
|
|
3654
|
-
responseOutputItemDoneSchema,
|
|
3655
|
-
responseFunctionCallArgumentsDeltaSchema,
|
|
3656
|
-
responseImageGenerationCallPartialImageSchema,
|
|
3657
|
-
responseCodeInterpreterCallCodeDeltaSchema,
|
|
3658
|
-
responseCodeInterpreterCallCodeDoneSchema,
|
|
3659
|
-
responseAnnotationAddedSchema,
|
|
3660
|
-
responseReasoningSummaryPartAddedSchema,
|
|
3661
|
-
responseReasoningSummaryTextDeltaSchema,
|
|
3662
|
-
errorChunkSchema,
|
|
3663
|
-
import_v416.z.object({ type: import_v416.z.string() }).loose()
|
|
3664
|
-
// fallback for unknown chunks
|
|
3665
|
-
]);
|
|
3666
3992
|
function isTextDeltaChunk(chunk) {
|
|
3667
3993
|
return chunk.type === "response.output_text.delta";
|
|
3668
3994
|
}
|
|
@@ -3742,55 +4068,23 @@ function getResponsesModelConfig(modelId) {
|
|
|
3742
4068
|
isReasoningModel: false
|
|
3743
4069
|
};
|
|
3744
4070
|
}
|
|
3745
|
-
var openaiResponsesProviderOptionsSchema = import_v416.z.object({
|
|
3746
|
-
include: import_v416.z.array(
|
|
3747
|
-
import_v416.z.enum([
|
|
3748
|
-
"reasoning.encrypted_content",
|
|
3749
|
-
"file_search_call.results",
|
|
3750
|
-
"message.output_text.logprobs"
|
|
3751
|
-
])
|
|
3752
|
-
).nullish(),
|
|
3753
|
-
instructions: import_v416.z.string().nullish(),
|
|
3754
|
-
/**
|
|
3755
|
-
* Return the log probabilities of the tokens.
|
|
3756
|
-
*
|
|
3757
|
-
* Setting to true will return the log probabilities of the tokens that
|
|
3758
|
-
* were generated.
|
|
3759
|
-
*
|
|
3760
|
-
* Setting to a number will return the log probabilities of the top n
|
|
3761
|
-
* tokens that were generated.
|
|
3762
|
-
*
|
|
3763
|
-
* @see https://platform.openai.com/docs/api-reference/responses/create
|
|
3764
|
-
* @see https://cookbook.openai.com/examples/using_logprobs
|
|
3765
|
-
*/
|
|
3766
|
-
logprobs: import_v416.z.union([import_v416.z.boolean(), import_v416.z.number().min(1).max(TOP_LOGPROBS_MAX)]).optional(),
|
|
3767
|
-
/**
|
|
3768
|
-
* The maximum number of total calls to built-in tools that can be processed in a response.
|
|
3769
|
-
* This maximum number applies across all built-in tool calls, not per individual tool.
|
|
3770
|
-
* Any further attempts to call a tool by the model will be ignored.
|
|
3771
|
-
*/
|
|
3772
|
-
maxToolCalls: import_v416.z.number().nullish(),
|
|
3773
|
-
metadata: import_v416.z.any().nullish(),
|
|
3774
|
-
parallelToolCalls: import_v416.z.boolean().nullish(),
|
|
3775
|
-
previousResponseId: import_v416.z.string().nullish(),
|
|
3776
|
-
promptCacheKey: import_v416.z.string().nullish(),
|
|
3777
|
-
reasoningEffort: import_v416.z.string().nullish(),
|
|
3778
|
-
reasoningSummary: import_v416.z.string().nullish(),
|
|
3779
|
-
safetyIdentifier: import_v416.z.string().nullish(),
|
|
3780
|
-
serviceTier: import_v416.z.enum(["auto", "flex", "priority"]).nullish(),
|
|
3781
|
-
store: import_v416.z.boolean().nullish(),
|
|
3782
|
-
strictJsonSchema: import_v416.z.boolean().nullish(),
|
|
3783
|
-
textVerbosity: import_v416.z.enum(["low", "medium", "high"]).nullish(),
|
|
3784
|
-
user: import_v416.z.string().nullish()
|
|
3785
|
-
});
|
|
3786
4071
|
|
|
3787
4072
|
// src/speech/openai-speech-model.ts
|
|
3788
|
-
var
|
|
3789
|
-
|
|
3790
|
-
|
|
3791
|
-
|
|
3792
|
-
|
|
3793
|
-
|
|
4073
|
+
var import_provider_utils26 = require("@ai-sdk/provider-utils");
|
|
4074
|
+
|
|
4075
|
+
// src/speech/openai-speech-options.ts
|
|
4076
|
+
var import_provider_utils25 = require("@ai-sdk/provider-utils");
|
|
4077
|
+
var z18 = __toESM(require("zod/v4"));
|
|
4078
|
+
var openaiSpeechProviderOptionsSchema = (0, import_provider_utils25.lazyValidator)(
|
|
4079
|
+
() => (0, import_provider_utils25.zodSchema)(
|
|
4080
|
+
z18.object({
|
|
4081
|
+
instructions: z18.string().nullish(),
|
|
4082
|
+
speed: z18.number().min(0.25).max(4).default(1).nullish()
|
|
4083
|
+
})
|
|
4084
|
+
)
|
|
4085
|
+
);
|
|
4086
|
+
|
|
4087
|
+
// src/speech/openai-speech-model.ts
|
|
3794
4088
|
var OpenAISpeechModel = class {
|
|
3795
4089
|
constructor(modelId, config) {
|
|
3796
4090
|
this.modelId = modelId;
|
|
@@ -3810,10 +4104,10 @@ var OpenAISpeechModel = class {
|
|
|
3810
4104
|
providerOptions
|
|
3811
4105
|
}) {
|
|
3812
4106
|
const warnings = [];
|
|
3813
|
-
const openAIOptions = await (0,
|
|
4107
|
+
const openAIOptions = await (0, import_provider_utils26.parseProviderOptions)({
|
|
3814
4108
|
provider: "openai",
|
|
3815
4109
|
providerOptions,
|
|
3816
|
-
schema:
|
|
4110
|
+
schema: openaiSpeechProviderOptionsSchema
|
|
3817
4111
|
});
|
|
3818
4112
|
const requestBody = {
|
|
3819
4113
|
model: this.modelId,
|
|
@@ -3863,15 +4157,15 @@ var OpenAISpeechModel = class {
|
|
|
3863
4157
|
value: audio,
|
|
3864
4158
|
responseHeaders,
|
|
3865
4159
|
rawValue: rawResponse
|
|
3866
|
-
} = await (0,
|
|
4160
|
+
} = await (0, import_provider_utils26.postJsonToApi)({
|
|
3867
4161
|
url: this.config.url({
|
|
3868
4162
|
path: "/audio/speech",
|
|
3869
4163
|
modelId: this.modelId
|
|
3870
4164
|
}),
|
|
3871
|
-
headers: (0,
|
|
4165
|
+
headers: (0, import_provider_utils26.combineHeaders)(this.config.headers(), options.headers),
|
|
3872
4166
|
body: requestBody,
|
|
3873
4167
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
3874
|
-
successfulResponseHandler: (0,
|
|
4168
|
+
successfulResponseHandler: (0, import_provider_utils26.createBinaryResponseHandler)(),
|
|
3875
4169
|
abortSignal: options.abortSignal,
|
|
3876
4170
|
fetch: this.config.fetch
|
|
3877
4171
|
});
|
|
@@ -3892,35 +4186,73 @@ var OpenAISpeechModel = class {
|
|
|
3892
4186
|
};
|
|
3893
4187
|
|
|
3894
4188
|
// src/transcription/openai-transcription-model.ts
|
|
3895
|
-
var
|
|
3896
|
-
|
|
4189
|
+
var import_provider_utils29 = require("@ai-sdk/provider-utils");
|
|
4190
|
+
|
|
4191
|
+
// src/transcription/openai-transcription-api.ts
|
|
4192
|
+
var import_provider_utils27 = require("@ai-sdk/provider-utils");
|
|
4193
|
+
var z19 = __toESM(require("zod/v4"));
|
|
4194
|
+
var openaiTranscriptionResponseSchema = (0, import_provider_utils27.lazyValidator)(
|
|
4195
|
+
() => (0, import_provider_utils27.zodSchema)(
|
|
4196
|
+
z19.object({
|
|
4197
|
+
text: z19.string(),
|
|
4198
|
+
language: z19.string().nullish(),
|
|
4199
|
+
duration: z19.number().nullish(),
|
|
4200
|
+
words: z19.array(
|
|
4201
|
+
z19.object({
|
|
4202
|
+
word: z19.string(),
|
|
4203
|
+
start: z19.number(),
|
|
4204
|
+
end: z19.number()
|
|
4205
|
+
})
|
|
4206
|
+
).nullish(),
|
|
4207
|
+
segments: z19.array(
|
|
4208
|
+
z19.object({
|
|
4209
|
+
id: z19.number(),
|
|
4210
|
+
seek: z19.number(),
|
|
4211
|
+
start: z19.number(),
|
|
4212
|
+
end: z19.number(),
|
|
4213
|
+
text: z19.string(),
|
|
4214
|
+
tokens: z19.array(z19.number()),
|
|
4215
|
+
temperature: z19.number(),
|
|
4216
|
+
avg_logprob: z19.number(),
|
|
4217
|
+
compression_ratio: z19.number(),
|
|
4218
|
+
no_speech_prob: z19.number()
|
|
4219
|
+
})
|
|
4220
|
+
).nullish()
|
|
4221
|
+
})
|
|
4222
|
+
)
|
|
4223
|
+
);
|
|
3897
4224
|
|
|
3898
4225
|
// src/transcription/openai-transcription-options.ts
|
|
3899
|
-
var
|
|
3900
|
-
var
|
|
3901
|
-
|
|
3902
|
-
|
|
3903
|
-
|
|
3904
|
-
|
|
3905
|
-
|
|
3906
|
-
|
|
3907
|
-
|
|
3908
|
-
|
|
3909
|
-
|
|
3910
|
-
|
|
3911
|
-
|
|
3912
|
-
|
|
3913
|
-
|
|
3914
|
-
|
|
3915
|
-
|
|
3916
|
-
|
|
3917
|
-
|
|
3918
|
-
|
|
3919
|
-
|
|
3920
|
-
|
|
3921
|
-
|
|
3922
|
-
|
|
3923
|
-
|
|
4226
|
+
var import_provider_utils28 = require("@ai-sdk/provider-utils");
|
|
4227
|
+
var z20 = __toESM(require("zod/v4"));
|
|
4228
|
+
var openAITranscriptionProviderOptions = (0, import_provider_utils28.lazyValidator)(
|
|
4229
|
+
() => (0, import_provider_utils28.zodSchema)(
|
|
4230
|
+
z20.object({
|
|
4231
|
+
/**
|
|
4232
|
+
* Additional information to include in the transcription response.
|
|
4233
|
+
*/
|
|
4234
|
+
include: z20.array(z20.string()).optional(),
|
|
4235
|
+
/**
|
|
4236
|
+
* The language of the input audio in ISO-639-1 format.
|
|
4237
|
+
*/
|
|
4238
|
+
language: z20.string().optional(),
|
|
4239
|
+
/**
|
|
4240
|
+
* An optional text to guide the model's style or continue a previous audio segment.
|
|
4241
|
+
*/
|
|
4242
|
+
prompt: z20.string().optional(),
|
|
4243
|
+
/**
|
|
4244
|
+
* The sampling temperature, between 0 and 1.
|
|
4245
|
+
* @default 0
|
|
4246
|
+
*/
|
|
4247
|
+
temperature: z20.number().min(0).max(1).default(0).optional(),
|
|
4248
|
+
/**
|
|
4249
|
+
* The timestamp granularities to populate for this transcription.
|
|
4250
|
+
* @default ['segment']
|
|
4251
|
+
*/
|
|
4252
|
+
timestampGranularities: z20.array(z20.enum(["word", "segment"])).default(["segment"]).optional()
|
|
4253
|
+
})
|
|
4254
|
+
)
|
|
4255
|
+
);
|
|
3924
4256
|
|
|
3925
4257
|
// src/transcription/openai-transcription-model.ts
|
|
3926
4258
|
var languageMap = {
|
|
@@ -3997,15 +4329,15 @@ var OpenAITranscriptionModel = class {
|
|
|
3997
4329
|
providerOptions
|
|
3998
4330
|
}) {
|
|
3999
4331
|
const warnings = [];
|
|
4000
|
-
const openAIOptions = await (0,
|
|
4332
|
+
const openAIOptions = await (0, import_provider_utils29.parseProviderOptions)({
|
|
4001
4333
|
provider: "openai",
|
|
4002
4334
|
providerOptions,
|
|
4003
4335
|
schema: openAITranscriptionProviderOptions
|
|
4004
4336
|
});
|
|
4005
4337
|
const formData = new FormData();
|
|
4006
|
-
const blob = audio instanceof Uint8Array ? new Blob([audio]) : new Blob([(0,
|
|
4338
|
+
const blob = audio instanceof Uint8Array ? new Blob([audio]) : new Blob([(0, import_provider_utils29.convertBase64ToUint8Array)(audio)]);
|
|
4007
4339
|
formData.append("model", this.modelId);
|
|
4008
|
-
const fileExtension = (0,
|
|
4340
|
+
const fileExtension = (0, import_provider_utils29.mediaTypeToExtension)(mediaType);
|
|
4009
4341
|
formData.append(
|
|
4010
4342
|
"file",
|
|
4011
4343
|
new File([blob], "audio", { type: mediaType }),
|
|
@@ -4050,15 +4382,15 @@ var OpenAITranscriptionModel = class {
|
|
|
4050
4382
|
value: response,
|
|
4051
4383
|
responseHeaders,
|
|
4052
4384
|
rawValue: rawResponse
|
|
4053
|
-
} = await (0,
|
|
4385
|
+
} = await (0, import_provider_utils29.postFormDataToApi)({
|
|
4054
4386
|
url: this.config.url({
|
|
4055
4387
|
path: "/audio/transcriptions",
|
|
4056
4388
|
modelId: this.modelId
|
|
4057
4389
|
}),
|
|
4058
|
-
headers: (0,
|
|
4390
|
+
headers: (0, import_provider_utils29.combineHeaders)(this.config.headers(), options.headers),
|
|
4059
4391
|
formData,
|
|
4060
4392
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
4061
|
-
successfulResponseHandler: (0,
|
|
4393
|
+
successfulResponseHandler: (0, import_provider_utils29.createJsonResponseHandler)(
|
|
4062
4394
|
openaiTranscriptionResponseSchema
|
|
4063
4395
|
),
|
|
4064
4396
|
abortSignal: options.abortSignal,
|
|
@@ -4088,49 +4420,23 @@ var OpenAITranscriptionModel = class {
|
|
|
4088
4420
|
};
|
|
4089
4421
|
}
|
|
4090
4422
|
};
|
|
4091
|
-
var openaiTranscriptionResponseSchema = import_v419.z.object({
|
|
4092
|
-
text: import_v419.z.string(),
|
|
4093
|
-
language: import_v419.z.string().nullish(),
|
|
4094
|
-
duration: import_v419.z.number().nullish(),
|
|
4095
|
-
words: import_v419.z.array(
|
|
4096
|
-
import_v419.z.object({
|
|
4097
|
-
word: import_v419.z.string(),
|
|
4098
|
-
start: import_v419.z.number(),
|
|
4099
|
-
end: import_v419.z.number()
|
|
4100
|
-
})
|
|
4101
|
-
).nullish(),
|
|
4102
|
-
segments: import_v419.z.array(
|
|
4103
|
-
import_v419.z.object({
|
|
4104
|
-
id: import_v419.z.number(),
|
|
4105
|
-
seek: import_v419.z.number(),
|
|
4106
|
-
start: import_v419.z.number(),
|
|
4107
|
-
end: import_v419.z.number(),
|
|
4108
|
-
text: import_v419.z.string(),
|
|
4109
|
-
tokens: import_v419.z.array(import_v419.z.number()),
|
|
4110
|
-
temperature: import_v419.z.number(),
|
|
4111
|
-
avg_logprob: import_v419.z.number(),
|
|
4112
|
-
compression_ratio: import_v419.z.number(),
|
|
4113
|
-
no_speech_prob: import_v419.z.number()
|
|
4114
|
-
})
|
|
4115
|
-
).nullish()
|
|
4116
|
-
});
|
|
4117
4423
|
|
|
4118
4424
|
// src/version.ts
|
|
4119
|
-
var VERSION = true ? "3.0.0-beta.
|
|
4425
|
+
var VERSION = true ? "3.0.0-beta.18" : "0.0.0-test";
|
|
4120
4426
|
|
|
4121
4427
|
// src/openai-provider.ts
|
|
4122
4428
|
function createOpenAI(options = {}) {
|
|
4123
4429
|
var _a, _b;
|
|
4124
|
-
const baseURL = (_a = (0,
|
|
4125
|
-
(0,
|
|
4430
|
+
const baseURL = (_a = (0, import_provider_utils30.withoutTrailingSlash)(
|
|
4431
|
+
(0, import_provider_utils30.loadOptionalSetting)({
|
|
4126
4432
|
settingValue: options.baseURL,
|
|
4127
4433
|
environmentVariableName: "OPENAI_BASE_URL"
|
|
4128
4434
|
})
|
|
4129
4435
|
)) != null ? _a : "https://api.openai.com/v1";
|
|
4130
4436
|
const providerName = (_b = options.name) != null ? _b : "openai";
|
|
4131
|
-
const getHeaders = () => (0,
|
|
4437
|
+
const getHeaders = () => (0, import_provider_utils30.withUserAgentSuffix)(
|
|
4132
4438
|
{
|
|
4133
|
-
Authorization: `Bearer ${(0,
|
|
4439
|
+
Authorization: `Bearer ${(0, import_provider_utils30.loadApiKey)({
|
|
4134
4440
|
apiKey: options.apiKey,
|
|
4135
4441
|
environmentVariableName: "OPENAI_API_KEY",
|
|
4136
4442
|
description: "OpenAI"
|