@ai-sdk/openai 2.0.44 → 2.0.45
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +10 -0
- package/dist/index.d.mts +38 -65
- package/dist/index.d.ts +38 -65
- package/dist/index.js +1339 -1025
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +1293 -934
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.d.mts +101 -182
- package/dist/internal/index.d.ts +101 -182
- package/dist/internal/index.js +1336 -1020
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +1305 -945
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +2 -2
package/dist/index.js
CHANGED
|
@@ -1,7 +1,9 @@
|
|
|
1
1
|
"use strict";
|
|
2
|
+
var __create = Object.create;
|
|
2
3
|
var __defProp = Object.defineProperty;
|
|
3
4
|
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
5
|
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
6
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
5
7
|
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
8
|
var __export = (target, all) => {
|
|
7
9
|
for (var name in all)
|
|
@@ -15,6 +17,14 @@ var __copyProps = (to, from, except, desc) => {
|
|
|
15
17
|
}
|
|
16
18
|
return to;
|
|
17
19
|
};
|
|
20
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
21
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
22
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
23
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
24
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
25
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
26
|
+
mod
|
|
27
|
+
));
|
|
18
28
|
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
19
29
|
|
|
20
30
|
// src/index.ts
|
|
@@ -27,25 +37,24 @@ __export(src_exports, {
|
|
|
27
37
|
module.exports = __toCommonJS(src_exports);
|
|
28
38
|
|
|
29
39
|
// src/openai-provider.ts
|
|
30
|
-
var
|
|
40
|
+
var import_provider_utils30 = require("@ai-sdk/provider-utils");
|
|
31
41
|
|
|
32
42
|
// src/chat/openai-chat-language-model.ts
|
|
33
43
|
var import_provider3 = require("@ai-sdk/provider");
|
|
34
|
-
var
|
|
35
|
-
var import_v43 = require("zod/v4");
|
|
44
|
+
var import_provider_utils5 = require("@ai-sdk/provider-utils");
|
|
36
45
|
|
|
37
46
|
// src/openai-error.ts
|
|
38
|
-
var
|
|
47
|
+
var z = __toESM(require("zod/v4"));
|
|
39
48
|
var import_provider_utils = require("@ai-sdk/provider-utils");
|
|
40
|
-
var openaiErrorDataSchema =
|
|
41
|
-
error:
|
|
42
|
-
message:
|
|
49
|
+
var openaiErrorDataSchema = z.object({
|
|
50
|
+
error: z.object({
|
|
51
|
+
message: z.string(),
|
|
43
52
|
// The additional information below is handled loosely to support
|
|
44
53
|
// OpenAI-compatible providers that have slightly different error
|
|
45
54
|
// responses:
|
|
46
|
-
type:
|
|
47
|
-
param:
|
|
48
|
-
code:
|
|
55
|
+
type: z.string().nullish(),
|
|
56
|
+
param: z.any().nullish(),
|
|
57
|
+
code: z.union([z.string(), z.number()]).nullish()
|
|
49
58
|
})
|
|
50
59
|
});
|
|
51
60
|
var openaiFailedResponseHandler = (0, import_provider_utils.createJsonErrorResponseHandler)({
|
|
@@ -261,95 +270,238 @@ function mapOpenAIFinishReason(finishReason) {
|
|
|
261
270
|
}
|
|
262
271
|
}
|
|
263
272
|
|
|
273
|
+
// src/chat/openai-chat-api.ts
|
|
274
|
+
var import_provider_utils3 = require("@ai-sdk/provider-utils");
|
|
275
|
+
var z2 = __toESM(require("zod/v4"));
|
|
276
|
+
var openaiChatResponseSchema = (0, import_provider_utils3.lazyValidator)(
|
|
277
|
+
() => (0, import_provider_utils3.zodSchema)(
|
|
278
|
+
z2.object({
|
|
279
|
+
id: z2.string().nullish(),
|
|
280
|
+
created: z2.number().nullish(),
|
|
281
|
+
model: z2.string().nullish(),
|
|
282
|
+
choices: z2.array(
|
|
283
|
+
z2.object({
|
|
284
|
+
message: z2.object({
|
|
285
|
+
role: z2.literal("assistant").nullish(),
|
|
286
|
+
content: z2.string().nullish(),
|
|
287
|
+
tool_calls: z2.array(
|
|
288
|
+
z2.object({
|
|
289
|
+
id: z2.string().nullish(),
|
|
290
|
+
type: z2.literal("function"),
|
|
291
|
+
function: z2.object({
|
|
292
|
+
name: z2.string(),
|
|
293
|
+
arguments: z2.string()
|
|
294
|
+
})
|
|
295
|
+
})
|
|
296
|
+
).nullish(),
|
|
297
|
+
annotations: z2.array(
|
|
298
|
+
z2.object({
|
|
299
|
+
type: z2.literal("url_citation"),
|
|
300
|
+
start_index: z2.number(),
|
|
301
|
+
end_index: z2.number(),
|
|
302
|
+
url: z2.string(),
|
|
303
|
+
title: z2.string()
|
|
304
|
+
})
|
|
305
|
+
).nullish()
|
|
306
|
+
}),
|
|
307
|
+
index: z2.number(),
|
|
308
|
+
logprobs: z2.object({
|
|
309
|
+
content: z2.array(
|
|
310
|
+
z2.object({
|
|
311
|
+
token: z2.string(),
|
|
312
|
+
logprob: z2.number(),
|
|
313
|
+
top_logprobs: z2.array(
|
|
314
|
+
z2.object({
|
|
315
|
+
token: z2.string(),
|
|
316
|
+
logprob: z2.number()
|
|
317
|
+
})
|
|
318
|
+
)
|
|
319
|
+
})
|
|
320
|
+
).nullish()
|
|
321
|
+
}).nullish(),
|
|
322
|
+
finish_reason: z2.string().nullish()
|
|
323
|
+
})
|
|
324
|
+
),
|
|
325
|
+
usage: z2.object({
|
|
326
|
+
prompt_tokens: z2.number().nullish(),
|
|
327
|
+
completion_tokens: z2.number().nullish(),
|
|
328
|
+
total_tokens: z2.number().nullish(),
|
|
329
|
+
prompt_tokens_details: z2.object({
|
|
330
|
+
cached_tokens: z2.number().nullish()
|
|
331
|
+
}).nullish(),
|
|
332
|
+
completion_tokens_details: z2.object({
|
|
333
|
+
reasoning_tokens: z2.number().nullish(),
|
|
334
|
+
accepted_prediction_tokens: z2.number().nullish(),
|
|
335
|
+
rejected_prediction_tokens: z2.number().nullish()
|
|
336
|
+
}).nullish()
|
|
337
|
+
}).nullish()
|
|
338
|
+
})
|
|
339
|
+
)
|
|
340
|
+
);
|
|
341
|
+
var openaiChatChunkSchema = (0, import_provider_utils3.lazyValidator)(
|
|
342
|
+
() => (0, import_provider_utils3.zodSchema)(
|
|
343
|
+
z2.union([
|
|
344
|
+
z2.object({
|
|
345
|
+
id: z2.string().nullish(),
|
|
346
|
+
created: z2.number().nullish(),
|
|
347
|
+
model: z2.string().nullish(),
|
|
348
|
+
choices: z2.array(
|
|
349
|
+
z2.object({
|
|
350
|
+
delta: z2.object({
|
|
351
|
+
role: z2.enum(["assistant"]).nullish(),
|
|
352
|
+
content: z2.string().nullish(),
|
|
353
|
+
tool_calls: z2.array(
|
|
354
|
+
z2.object({
|
|
355
|
+
index: z2.number(),
|
|
356
|
+
id: z2.string().nullish(),
|
|
357
|
+
type: z2.literal("function").nullish(),
|
|
358
|
+
function: z2.object({
|
|
359
|
+
name: z2.string().nullish(),
|
|
360
|
+
arguments: z2.string().nullish()
|
|
361
|
+
})
|
|
362
|
+
})
|
|
363
|
+
).nullish(),
|
|
364
|
+
annotations: z2.array(
|
|
365
|
+
z2.object({
|
|
366
|
+
type: z2.literal("url_citation"),
|
|
367
|
+
start_index: z2.number(),
|
|
368
|
+
end_index: z2.number(),
|
|
369
|
+
url: z2.string(),
|
|
370
|
+
title: z2.string()
|
|
371
|
+
})
|
|
372
|
+
).nullish()
|
|
373
|
+
}).nullish(),
|
|
374
|
+
logprobs: z2.object({
|
|
375
|
+
content: z2.array(
|
|
376
|
+
z2.object({
|
|
377
|
+
token: z2.string(),
|
|
378
|
+
logprob: z2.number(),
|
|
379
|
+
top_logprobs: z2.array(
|
|
380
|
+
z2.object({
|
|
381
|
+
token: z2.string(),
|
|
382
|
+
logprob: z2.number()
|
|
383
|
+
})
|
|
384
|
+
)
|
|
385
|
+
})
|
|
386
|
+
).nullish()
|
|
387
|
+
}).nullish(),
|
|
388
|
+
finish_reason: z2.string().nullish(),
|
|
389
|
+
index: z2.number()
|
|
390
|
+
})
|
|
391
|
+
),
|
|
392
|
+
usage: z2.object({
|
|
393
|
+
prompt_tokens: z2.number().nullish(),
|
|
394
|
+
completion_tokens: z2.number().nullish(),
|
|
395
|
+
total_tokens: z2.number().nullish(),
|
|
396
|
+
prompt_tokens_details: z2.object({
|
|
397
|
+
cached_tokens: z2.number().nullish()
|
|
398
|
+
}).nullish(),
|
|
399
|
+
completion_tokens_details: z2.object({
|
|
400
|
+
reasoning_tokens: z2.number().nullish(),
|
|
401
|
+
accepted_prediction_tokens: z2.number().nullish(),
|
|
402
|
+
rejected_prediction_tokens: z2.number().nullish()
|
|
403
|
+
}).nullish()
|
|
404
|
+
}).nullish()
|
|
405
|
+
}),
|
|
406
|
+
openaiErrorDataSchema
|
|
407
|
+
])
|
|
408
|
+
)
|
|
409
|
+
);
|
|
410
|
+
|
|
264
411
|
// src/chat/openai-chat-options.ts
|
|
265
|
-
var
|
|
266
|
-
var
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
|
|
347
|
-
|
|
348
|
-
|
|
349
|
-
|
|
350
|
-
|
|
351
|
-
|
|
352
|
-
|
|
412
|
+
var import_provider_utils4 = require("@ai-sdk/provider-utils");
|
|
413
|
+
var z3 = __toESM(require("zod/v4"));
|
|
414
|
+
var openaiChatLanguageModelOptions = (0, import_provider_utils4.lazyValidator)(
|
|
415
|
+
() => (0, import_provider_utils4.zodSchema)(
|
|
416
|
+
z3.object({
|
|
417
|
+
/**
|
|
418
|
+
* Modify the likelihood of specified tokens appearing in the completion.
|
|
419
|
+
*
|
|
420
|
+
* Accepts a JSON object that maps tokens (specified by their token ID in
|
|
421
|
+
* the GPT tokenizer) to an associated bias value from -100 to 100.
|
|
422
|
+
*/
|
|
423
|
+
logitBias: z3.record(z3.coerce.number(), z3.number()).optional(),
|
|
424
|
+
/**
|
|
425
|
+
* Return the log probabilities of the tokens.
|
|
426
|
+
*
|
|
427
|
+
* Setting to true will return the log probabilities of the tokens that
|
|
428
|
+
* were generated.
|
|
429
|
+
*
|
|
430
|
+
* Setting to a number will return the log probabilities of the top n
|
|
431
|
+
* tokens that were generated.
|
|
432
|
+
*/
|
|
433
|
+
logprobs: z3.union([z3.boolean(), z3.number()]).optional(),
|
|
434
|
+
/**
|
|
435
|
+
* Whether to enable parallel function calling during tool use. Default to true.
|
|
436
|
+
*/
|
|
437
|
+
parallelToolCalls: z3.boolean().optional(),
|
|
438
|
+
/**
|
|
439
|
+
* A unique identifier representing your end-user, which can help OpenAI to
|
|
440
|
+
* monitor and detect abuse.
|
|
441
|
+
*/
|
|
442
|
+
user: z3.string().optional(),
|
|
443
|
+
/**
|
|
444
|
+
* Reasoning effort for reasoning models. Defaults to `medium`.
|
|
445
|
+
*/
|
|
446
|
+
reasoningEffort: z3.enum(["minimal", "low", "medium", "high"]).optional(),
|
|
447
|
+
/**
|
|
448
|
+
* Maximum number of completion tokens to generate. Useful for reasoning models.
|
|
449
|
+
*/
|
|
450
|
+
maxCompletionTokens: z3.number().optional(),
|
|
451
|
+
/**
|
|
452
|
+
* Whether to enable persistence in responses API.
|
|
453
|
+
*/
|
|
454
|
+
store: z3.boolean().optional(),
|
|
455
|
+
/**
|
|
456
|
+
* Metadata to associate with the request.
|
|
457
|
+
*/
|
|
458
|
+
metadata: z3.record(z3.string().max(64), z3.string().max(512)).optional(),
|
|
459
|
+
/**
|
|
460
|
+
* Parameters for prediction mode.
|
|
461
|
+
*/
|
|
462
|
+
prediction: z3.record(z3.string(), z3.any()).optional(),
|
|
463
|
+
/**
|
|
464
|
+
* Whether to use structured outputs.
|
|
465
|
+
*
|
|
466
|
+
* @default true
|
|
467
|
+
*/
|
|
468
|
+
structuredOutputs: z3.boolean().optional(),
|
|
469
|
+
/**
|
|
470
|
+
* Service tier for the request.
|
|
471
|
+
* - 'auto': Default service tier
|
|
472
|
+
* - 'flex': 50% cheaper processing at the cost of increased latency. Only available for o3 and o4-mini models.
|
|
473
|
+
* - 'priority': Higher-speed processing with predictably low latency at premium cost. Available for Enterprise customers.
|
|
474
|
+
*
|
|
475
|
+
* @default 'auto'
|
|
476
|
+
*/
|
|
477
|
+
serviceTier: z3.enum(["auto", "flex", "priority"]).optional(),
|
|
478
|
+
/**
|
|
479
|
+
* Whether to use strict JSON schema validation.
|
|
480
|
+
*
|
|
481
|
+
* @default false
|
|
482
|
+
*/
|
|
483
|
+
strictJsonSchema: z3.boolean().optional(),
|
|
484
|
+
/**
|
|
485
|
+
* Controls the verbosity of the model's responses.
|
|
486
|
+
* Lower values will result in more concise responses, while higher values will result in more verbose responses.
|
|
487
|
+
*/
|
|
488
|
+
textVerbosity: z3.enum(["low", "medium", "high"]).optional(),
|
|
489
|
+
/**
|
|
490
|
+
* A cache key for prompt caching. Allows manual control over prompt caching behavior.
|
|
491
|
+
* Useful for improving cache hit rates and working around automatic caching issues.
|
|
492
|
+
*/
|
|
493
|
+
promptCacheKey: z3.string().optional(),
|
|
494
|
+
/**
|
|
495
|
+
* A stable identifier used to help detect users of your application
|
|
496
|
+
* that may be violating OpenAI's usage policies. The IDs should be a
|
|
497
|
+
* string that uniquely identifies each user. We recommend hashing their
|
|
498
|
+
* username or email address, in order to avoid sending us any identifying
|
|
499
|
+
* information.
|
|
500
|
+
*/
|
|
501
|
+
safetyIdentifier: z3.string().optional()
|
|
502
|
+
})
|
|
503
|
+
)
|
|
504
|
+
);
|
|
353
505
|
|
|
354
506
|
// src/chat/openai-chat-prepare-tools.ts
|
|
355
507
|
var import_provider2 = require("@ai-sdk/provider");
|
|
@@ -442,7 +594,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
442
594
|
}) {
|
|
443
595
|
var _a, _b, _c, _d;
|
|
444
596
|
const warnings = [];
|
|
445
|
-
const openaiOptions = (_a = await (0,
|
|
597
|
+
const openaiOptions = (_a = await (0, import_provider_utils5.parseProviderOptions)({
|
|
446
598
|
provider: "openai",
|
|
447
599
|
providerOptions,
|
|
448
600
|
schema: openaiChatLanguageModelOptions
|
|
@@ -621,15 +773,15 @@ var OpenAIChatLanguageModel = class {
|
|
|
621
773
|
responseHeaders,
|
|
622
774
|
value: response,
|
|
623
775
|
rawValue: rawResponse
|
|
624
|
-
} = await (0,
|
|
776
|
+
} = await (0, import_provider_utils5.postJsonToApi)({
|
|
625
777
|
url: this.config.url({
|
|
626
778
|
path: "/chat/completions",
|
|
627
779
|
modelId: this.modelId
|
|
628
780
|
}),
|
|
629
|
-
headers: (0,
|
|
781
|
+
headers: (0, import_provider_utils5.combineHeaders)(this.config.headers(), options.headers),
|
|
630
782
|
body,
|
|
631
783
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
632
|
-
successfulResponseHandler: (0,
|
|
784
|
+
successfulResponseHandler: (0, import_provider_utils5.createJsonResponseHandler)(
|
|
633
785
|
openaiChatResponseSchema
|
|
634
786
|
),
|
|
635
787
|
abortSignal: options.abortSignal,
|
|
@@ -644,7 +796,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
644
796
|
for (const toolCall of (_a = choice.message.tool_calls) != null ? _a : []) {
|
|
645
797
|
content.push({
|
|
646
798
|
type: "tool-call",
|
|
647
|
-
toolCallId: (_b = toolCall.id) != null ? _b : (0,
|
|
799
|
+
toolCallId: (_b = toolCall.id) != null ? _b : (0, import_provider_utils5.generateId)(),
|
|
648
800
|
toolName: toolCall.function.name,
|
|
649
801
|
input: toolCall.function.arguments
|
|
650
802
|
});
|
|
@@ -653,7 +805,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
653
805
|
content.push({
|
|
654
806
|
type: "source",
|
|
655
807
|
sourceType: "url",
|
|
656
|
-
id: (0,
|
|
808
|
+
id: (0, import_provider_utils5.generateId)(),
|
|
657
809
|
url: annotation.url,
|
|
658
810
|
title: annotation.title
|
|
659
811
|
});
|
|
@@ -699,15 +851,15 @@ var OpenAIChatLanguageModel = class {
|
|
|
699
851
|
include_usage: true
|
|
700
852
|
}
|
|
701
853
|
};
|
|
702
|
-
const { responseHeaders, value: response } = await (0,
|
|
854
|
+
const { responseHeaders, value: response } = await (0, import_provider_utils5.postJsonToApi)({
|
|
703
855
|
url: this.config.url({
|
|
704
856
|
path: "/chat/completions",
|
|
705
857
|
modelId: this.modelId
|
|
706
858
|
}),
|
|
707
|
-
headers: (0,
|
|
859
|
+
headers: (0, import_provider_utils5.combineHeaders)(this.config.headers(), options.headers),
|
|
708
860
|
body,
|
|
709
861
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
710
|
-
successfulResponseHandler: (0,
|
|
862
|
+
successfulResponseHandler: (0, import_provider_utils5.createEventSourceResponseHandler)(
|
|
711
863
|
openaiChatChunkSchema
|
|
712
864
|
),
|
|
713
865
|
abortSignal: options.abortSignal,
|
|
@@ -832,14 +984,14 @@ var OpenAIChatLanguageModel = class {
|
|
|
832
984
|
delta: toolCall2.function.arguments
|
|
833
985
|
});
|
|
834
986
|
}
|
|
835
|
-
if ((0,
|
|
987
|
+
if ((0, import_provider_utils5.isParsableJson)(toolCall2.function.arguments)) {
|
|
836
988
|
controller.enqueue({
|
|
837
989
|
type: "tool-input-end",
|
|
838
990
|
id: toolCall2.id
|
|
839
991
|
});
|
|
840
992
|
controller.enqueue({
|
|
841
993
|
type: "tool-call",
|
|
842
|
-
toolCallId: (_q = toolCall2.id) != null ? _q : (0,
|
|
994
|
+
toolCallId: (_q = toolCall2.id) != null ? _q : (0, import_provider_utils5.generateId)(),
|
|
843
995
|
toolName: toolCall2.function.name,
|
|
844
996
|
input: toolCall2.function.arguments
|
|
845
997
|
});
|
|
@@ -860,14 +1012,14 @@ var OpenAIChatLanguageModel = class {
|
|
|
860
1012
|
id: toolCall.id,
|
|
861
1013
|
delta: (_u = toolCallDelta.function.arguments) != null ? _u : ""
|
|
862
1014
|
});
|
|
863
|
-
if (((_v = toolCall.function) == null ? void 0 : _v.name) != null && ((_w = toolCall.function) == null ? void 0 : _w.arguments) != null && (0,
|
|
1015
|
+
if (((_v = toolCall.function) == null ? void 0 : _v.name) != null && ((_w = toolCall.function) == null ? void 0 : _w.arguments) != null && (0, import_provider_utils5.isParsableJson)(toolCall.function.arguments)) {
|
|
864
1016
|
controller.enqueue({
|
|
865
1017
|
type: "tool-input-end",
|
|
866
1018
|
id: toolCall.id
|
|
867
1019
|
});
|
|
868
1020
|
controller.enqueue({
|
|
869
1021
|
type: "tool-call",
|
|
870
|
-
toolCallId: (_x = toolCall.id) != null ? _x : (0,
|
|
1022
|
+
toolCallId: (_x = toolCall.id) != null ? _x : (0, import_provider_utils5.generateId)(),
|
|
871
1023
|
toolName: toolCall.function.name,
|
|
872
1024
|
input: toolCall.function.arguments
|
|
873
1025
|
});
|
|
@@ -880,7 +1032,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
880
1032
|
controller.enqueue({
|
|
881
1033
|
type: "source",
|
|
882
1034
|
sourceType: "url",
|
|
883
|
-
id: (0,
|
|
1035
|
+
id: (0, import_provider_utils5.generateId)(),
|
|
884
1036
|
url: annotation.url,
|
|
885
1037
|
title: annotation.title
|
|
886
1038
|
});
|
|
@@ -905,121 +1057,6 @@ var OpenAIChatLanguageModel = class {
|
|
|
905
1057
|
};
|
|
906
1058
|
}
|
|
907
1059
|
};
|
|
908
|
-
var openaiTokenUsageSchema = import_v43.z.object({
|
|
909
|
-
prompt_tokens: import_v43.z.number().nullish(),
|
|
910
|
-
completion_tokens: import_v43.z.number().nullish(),
|
|
911
|
-
total_tokens: import_v43.z.number().nullish(),
|
|
912
|
-
prompt_tokens_details: import_v43.z.object({
|
|
913
|
-
cached_tokens: import_v43.z.number().nullish()
|
|
914
|
-
}).nullish(),
|
|
915
|
-
completion_tokens_details: import_v43.z.object({
|
|
916
|
-
reasoning_tokens: import_v43.z.number().nullish(),
|
|
917
|
-
accepted_prediction_tokens: import_v43.z.number().nullish(),
|
|
918
|
-
rejected_prediction_tokens: import_v43.z.number().nullish()
|
|
919
|
-
}).nullish()
|
|
920
|
-
}).nullish();
|
|
921
|
-
var openaiChatResponseSchema = import_v43.z.object({
|
|
922
|
-
id: import_v43.z.string().nullish(),
|
|
923
|
-
created: import_v43.z.number().nullish(),
|
|
924
|
-
model: import_v43.z.string().nullish(),
|
|
925
|
-
choices: import_v43.z.array(
|
|
926
|
-
import_v43.z.object({
|
|
927
|
-
message: import_v43.z.object({
|
|
928
|
-
role: import_v43.z.literal("assistant").nullish(),
|
|
929
|
-
content: import_v43.z.string().nullish(),
|
|
930
|
-
tool_calls: import_v43.z.array(
|
|
931
|
-
import_v43.z.object({
|
|
932
|
-
id: import_v43.z.string().nullish(),
|
|
933
|
-
type: import_v43.z.literal("function"),
|
|
934
|
-
function: import_v43.z.object({
|
|
935
|
-
name: import_v43.z.string(),
|
|
936
|
-
arguments: import_v43.z.string()
|
|
937
|
-
})
|
|
938
|
-
})
|
|
939
|
-
).nullish(),
|
|
940
|
-
annotations: import_v43.z.array(
|
|
941
|
-
import_v43.z.object({
|
|
942
|
-
type: import_v43.z.literal("url_citation"),
|
|
943
|
-
start_index: import_v43.z.number(),
|
|
944
|
-
end_index: import_v43.z.number(),
|
|
945
|
-
url: import_v43.z.string(),
|
|
946
|
-
title: import_v43.z.string()
|
|
947
|
-
})
|
|
948
|
-
).nullish()
|
|
949
|
-
}),
|
|
950
|
-
index: import_v43.z.number(),
|
|
951
|
-
logprobs: import_v43.z.object({
|
|
952
|
-
content: import_v43.z.array(
|
|
953
|
-
import_v43.z.object({
|
|
954
|
-
token: import_v43.z.string(),
|
|
955
|
-
logprob: import_v43.z.number(),
|
|
956
|
-
top_logprobs: import_v43.z.array(
|
|
957
|
-
import_v43.z.object({
|
|
958
|
-
token: import_v43.z.string(),
|
|
959
|
-
logprob: import_v43.z.number()
|
|
960
|
-
})
|
|
961
|
-
)
|
|
962
|
-
})
|
|
963
|
-
).nullish()
|
|
964
|
-
}).nullish(),
|
|
965
|
-
finish_reason: import_v43.z.string().nullish()
|
|
966
|
-
})
|
|
967
|
-
),
|
|
968
|
-
usage: openaiTokenUsageSchema
|
|
969
|
-
});
|
|
970
|
-
var openaiChatChunkSchema = import_v43.z.union([
|
|
971
|
-
import_v43.z.object({
|
|
972
|
-
id: import_v43.z.string().nullish(),
|
|
973
|
-
created: import_v43.z.number().nullish(),
|
|
974
|
-
model: import_v43.z.string().nullish(),
|
|
975
|
-
choices: import_v43.z.array(
|
|
976
|
-
import_v43.z.object({
|
|
977
|
-
delta: import_v43.z.object({
|
|
978
|
-
role: import_v43.z.enum(["assistant"]).nullish(),
|
|
979
|
-
content: import_v43.z.string().nullish(),
|
|
980
|
-
tool_calls: import_v43.z.array(
|
|
981
|
-
import_v43.z.object({
|
|
982
|
-
index: import_v43.z.number(),
|
|
983
|
-
id: import_v43.z.string().nullish(),
|
|
984
|
-
type: import_v43.z.literal("function").nullish(),
|
|
985
|
-
function: import_v43.z.object({
|
|
986
|
-
name: import_v43.z.string().nullish(),
|
|
987
|
-
arguments: import_v43.z.string().nullish()
|
|
988
|
-
})
|
|
989
|
-
})
|
|
990
|
-
).nullish(),
|
|
991
|
-
annotations: import_v43.z.array(
|
|
992
|
-
import_v43.z.object({
|
|
993
|
-
type: import_v43.z.literal("url_citation"),
|
|
994
|
-
start_index: import_v43.z.number(),
|
|
995
|
-
end_index: import_v43.z.number(),
|
|
996
|
-
url: import_v43.z.string(),
|
|
997
|
-
title: import_v43.z.string()
|
|
998
|
-
})
|
|
999
|
-
).nullish()
|
|
1000
|
-
}).nullish(),
|
|
1001
|
-
logprobs: import_v43.z.object({
|
|
1002
|
-
content: import_v43.z.array(
|
|
1003
|
-
import_v43.z.object({
|
|
1004
|
-
token: import_v43.z.string(),
|
|
1005
|
-
logprob: import_v43.z.number(),
|
|
1006
|
-
top_logprobs: import_v43.z.array(
|
|
1007
|
-
import_v43.z.object({
|
|
1008
|
-
token: import_v43.z.string(),
|
|
1009
|
-
logprob: import_v43.z.number()
|
|
1010
|
-
})
|
|
1011
|
-
)
|
|
1012
|
-
})
|
|
1013
|
-
).nullish()
|
|
1014
|
-
}).nullish(),
|
|
1015
|
-
finish_reason: import_v43.z.string().nullish(),
|
|
1016
|
-
index: import_v43.z.number()
|
|
1017
|
-
})
|
|
1018
|
-
),
|
|
1019
|
-
usage: openaiTokenUsageSchema
|
|
1020
|
-
}),
|
|
1021
|
-
openaiErrorDataSchema
|
|
1022
|
-
]);
|
|
1023
1060
|
function isReasoningModel(modelId) {
|
|
1024
1061
|
return (modelId.startsWith("o") || modelId.startsWith("gpt-5")) && !modelId.startsWith("gpt-5-chat");
|
|
1025
1062
|
}
|
|
@@ -1070,8 +1107,7 @@ var reasoningModels = {
|
|
|
1070
1107
|
};
|
|
1071
1108
|
|
|
1072
1109
|
// src/completion/openai-completion-language-model.ts
|
|
1073
|
-
var
|
|
1074
|
-
var import_v45 = require("zod/v4");
|
|
1110
|
+
var import_provider_utils8 = require("@ai-sdk/provider-utils");
|
|
1075
1111
|
|
|
1076
1112
|
// src/completion/convert-to-openai-completion-prompt.ts
|
|
1077
1113
|
var import_provider4 = require("@ai-sdk/provider");
|
|
@@ -1178,48 +1214,111 @@ function mapOpenAIFinishReason2(finishReason) {
|
|
|
1178
1214
|
}
|
|
1179
1215
|
}
|
|
1180
1216
|
|
|
1217
|
+
// src/completion/openai-completion-api.ts
|
|
1218
|
+
var z4 = __toESM(require("zod/v4"));
|
|
1219
|
+
var import_provider_utils6 = require("@ai-sdk/provider-utils");
|
|
1220
|
+
var openaiCompletionResponseSchema = (0, import_provider_utils6.lazyValidator)(
|
|
1221
|
+
() => (0, import_provider_utils6.zodSchema)(
|
|
1222
|
+
z4.object({
|
|
1223
|
+
id: z4.string().nullish(),
|
|
1224
|
+
created: z4.number().nullish(),
|
|
1225
|
+
model: z4.string().nullish(),
|
|
1226
|
+
choices: z4.array(
|
|
1227
|
+
z4.object({
|
|
1228
|
+
text: z4.string(),
|
|
1229
|
+
finish_reason: z4.string(),
|
|
1230
|
+
logprobs: z4.object({
|
|
1231
|
+
tokens: z4.array(z4.string()),
|
|
1232
|
+
token_logprobs: z4.array(z4.number()),
|
|
1233
|
+
top_logprobs: z4.array(z4.record(z4.string(), z4.number())).nullish()
|
|
1234
|
+
}).nullish()
|
|
1235
|
+
})
|
|
1236
|
+
),
|
|
1237
|
+
usage: z4.object({
|
|
1238
|
+
prompt_tokens: z4.number(),
|
|
1239
|
+
completion_tokens: z4.number(),
|
|
1240
|
+
total_tokens: z4.number()
|
|
1241
|
+
}).nullish()
|
|
1242
|
+
})
|
|
1243
|
+
)
|
|
1244
|
+
);
|
|
1245
|
+
var openaiCompletionChunkSchema = (0, import_provider_utils6.lazyValidator)(
|
|
1246
|
+
() => (0, import_provider_utils6.zodSchema)(
|
|
1247
|
+
z4.union([
|
|
1248
|
+
z4.object({
|
|
1249
|
+
id: z4.string().nullish(),
|
|
1250
|
+
created: z4.number().nullish(),
|
|
1251
|
+
model: z4.string().nullish(),
|
|
1252
|
+
choices: z4.array(
|
|
1253
|
+
z4.object({
|
|
1254
|
+
text: z4.string(),
|
|
1255
|
+
finish_reason: z4.string().nullish(),
|
|
1256
|
+
index: z4.number(),
|
|
1257
|
+
logprobs: z4.object({
|
|
1258
|
+
tokens: z4.array(z4.string()),
|
|
1259
|
+
token_logprobs: z4.array(z4.number()),
|
|
1260
|
+
top_logprobs: z4.array(z4.record(z4.string(), z4.number())).nullish()
|
|
1261
|
+
}).nullish()
|
|
1262
|
+
})
|
|
1263
|
+
),
|
|
1264
|
+
usage: z4.object({
|
|
1265
|
+
prompt_tokens: z4.number(),
|
|
1266
|
+
completion_tokens: z4.number(),
|
|
1267
|
+
total_tokens: z4.number()
|
|
1268
|
+
}).nullish()
|
|
1269
|
+
}),
|
|
1270
|
+
openaiErrorDataSchema
|
|
1271
|
+
])
|
|
1272
|
+
)
|
|
1273
|
+
);
|
|
1274
|
+
|
|
1181
1275
|
// src/completion/openai-completion-options.ts
|
|
1182
|
-
var
|
|
1183
|
-
var
|
|
1184
|
-
|
|
1185
|
-
|
|
1186
|
-
|
|
1187
|
-
|
|
1188
|
-
|
|
1189
|
-
|
|
1190
|
-
|
|
1191
|
-
|
|
1192
|
-
|
|
1193
|
-
|
|
1194
|
-
|
|
1195
|
-
|
|
1196
|
-
|
|
1197
|
-
|
|
1198
|
-
|
|
1199
|
-
|
|
1200
|
-
|
|
1201
|
-
|
|
1202
|
-
|
|
1203
|
-
|
|
1204
|
-
|
|
1205
|
-
|
|
1206
|
-
|
|
1207
|
-
|
|
1208
|
-
|
|
1209
|
-
|
|
1210
|
-
|
|
1211
|
-
|
|
1212
|
-
|
|
1213
|
-
|
|
1214
|
-
|
|
1215
|
-
|
|
1216
|
-
|
|
1217
|
-
|
|
1218
|
-
|
|
1219
|
-
|
|
1220
|
-
|
|
1221
|
-
|
|
1222
|
-
|
|
1276
|
+
var import_provider_utils7 = require("@ai-sdk/provider-utils");
|
|
1277
|
+
var z5 = __toESM(require("zod/v4"));
|
|
1278
|
+
var openaiCompletionProviderOptions = (0, import_provider_utils7.lazyValidator)(
|
|
1279
|
+
() => (0, import_provider_utils7.zodSchema)(
|
|
1280
|
+
z5.object({
|
|
1281
|
+
/**
|
|
1282
|
+
Echo back the prompt in addition to the completion.
|
|
1283
|
+
*/
|
|
1284
|
+
echo: z5.boolean().optional(),
|
|
1285
|
+
/**
|
|
1286
|
+
Modify the likelihood of specified tokens appearing in the completion.
|
|
1287
|
+
|
|
1288
|
+
Accepts a JSON object that maps tokens (specified by their token ID in
|
|
1289
|
+
the GPT tokenizer) to an associated bias value from -100 to 100. You
|
|
1290
|
+
can use this tokenizer tool to convert text to token IDs. Mathematically,
|
|
1291
|
+
the bias is added to the logits generated by the model prior to sampling.
|
|
1292
|
+
The exact effect will vary per model, but values between -1 and 1 should
|
|
1293
|
+
decrease or increase likelihood of selection; values like -100 or 100
|
|
1294
|
+
should result in a ban or exclusive selection of the relevant token.
|
|
1295
|
+
|
|
1296
|
+
As an example, you can pass {"50256": -100} to prevent the <|endoftext|>
|
|
1297
|
+
token from being generated.
|
|
1298
|
+
*/
|
|
1299
|
+
logitBias: z5.record(z5.string(), z5.number()).optional(),
|
|
1300
|
+
/**
|
|
1301
|
+
The suffix that comes after a completion of inserted text.
|
|
1302
|
+
*/
|
|
1303
|
+
suffix: z5.string().optional(),
|
|
1304
|
+
/**
|
|
1305
|
+
A unique identifier representing your end-user, which can help OpenAI to
|
|
1306
|
+
monitor and detect abuse. Learn more.
|
|
1307
|
+
*/
|
|
1308
|
+
user: z5.string().optional(),
|
|
1309
|
+
/**
|
|
1310
|
+
Return the log probabilities of the tokens. Including logprobs will increase
|
|
1311
|
+
the response size and can slow down response times. However, it can
|
|
1312
|
+
be useful to better understand how the model is behaving.
|
|
1313
|
+
Setting to true will return the log probabilities of the tokens that
|
|
1314
|
+
were generated.
|
|
1315
|
+
Setting to a number will return the log probabilities of the top n
|
|
1316
|
+
tokens that were generated.
|
|
1317
|
+
*/
|
|
1318
|
+
logprobs: z5.union([z5.boolean(), z5.number()]).optional()
|
|
1319
|
+
})
|
|
1320
|
+
)
|
|
1321
|
+
);
|
|
1223
1322
|
|
|
1224
1323
|
// src/completion/openai-completion-language-model.ts
|
|
1225
1324
|
var OpenAICompletionLanguageModel = class {
|
|
@@ -1254,12 +1353,12 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1254
1353
|
}) {
|
|
1255
1354
|
const warnings = [];
|
|
1256
1355
|
const openaiOptions = {
|
|
1257
|
-
...await (0,
|
|
1356
|
+
...await (0, import_provider_utils8.parseProviderOptions)({
|
|
1258
1357
|
provider: "openai",
|
|
1259
1358
|
providerOptions,
|
|
1260
1359
|
schema: openaiCompletionProviderOptions
|
|
1261
1360
|
}),
|
|
1262
|
-
...await (0,
|
|
1361
|
+
...await (0, import_provider_utils8.parseProviderOptions)({
|
|
1263
1362
|
provider: this.providerOptionsName,
|
|
1264
1363
|
providerOptions,
|
|
1265
1364
|
schema: openaiCompletionProviderOptions
|
|
@@ -1315,15 +1414,15 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1315
1414
|
responseHeaders,
|
|
1316
1415
|
value: response,
|
|
1317
1416
|
rawValue: rawResponse
|
|
1318
|
-
} = await (0,
|
|
1417
|
+
} = await (0, import_provider_utils8.postJsonToApi)({
|
|
1319
1418
|
url: this.config.url({
|
|
1320
1419
|
path: "/completions",
|
|
1321
1420
|
modelId: this.modelId
|
|
1322
1421
|
}),
|
|
1323
|
-
headers: (0,
|
|
1422
|
+
headers: (0, import_provider_utils8.combineHeaders)(this.config.headers(), options.headers),
|
|
1324
1423
|
body: args,
|
|
1325
1424
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
1326
|
-
successfulResponseHandler: (0,
|
|
1425
|
+
successfulResponseHandler: (0, import_provider_utils8.createJsonResponseHandler)(
|
|
1327
1426
|
openaiCompletionResponseSchema
|
|
1328
1427
|
),
|
|
1329
1428
|
abortSignal: options.abortSignal,
|
|
@@ -1361,15 +1460,15 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1361
1460
|
include_usage: true
|
|
1362
1461
|
}
|
|
1363
1462
|
};
|
|
1364
|
-
const { responseHeaders, value: response } = await (0,
|
|
1463
|
+
const { responseHeaders, value: response } = await (0, import_provider_utils8.postJsonToApi)({
|
|
1365
1464
|
url: this.config.url({
|
|
1366
1465
|
path: "/completions",
|
|
1367
1466
|
modelId: this.modelId
|
|
1368
1467
|
}),
|
|
1369
|
-
headers: (0,
|
|
1468
|
+
headers: (0, import_provider_utils8.combineHeaders)(this.config.headers(), options.headers),
|
|
1370
1469
|
body,
|
|
1371
1470
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
1372
|
-
successfulResponseHandler: (0,
|
|
1471
|
+
successfulResponseHandler: (0, import_provider_utils8.createEventSourceResponseHandler)(
|
|
1373
1472
|
openaiCompletionChunkSchema
|
|
1374
1473
|
),
|
|
1375
1474
|
abortSignal: options.abortSignal,
|
|
@@ -1450,69 +1549,42 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1450
1549
|
};
|
|
1451
1550
|
}
|
|
1452
1551
|
};
|
|
1453
|
-
var usageSchema = import_v45.z.object({
|
|
1454
|
-
prompt_tokens: import_v45.z.number(),
|
|
1455
|
-
completion_tokens: import_v45.z.number(),
|
|
1456
|
-
total_tokens: import_v45.z.number()
|
|
1457
|
-
});
|
|
1458
|
-
var openaiCompletionResponseSchema = import_v45.z.object({
|
|
1459
|
-
id: import_v45.z.string().nullish(),
|
|
1460
|
-
created: import_v45.z.number().nullish(),
|
|
1461
|
-
model: import_v45.z.string().nullish(),
|
|
1462
|
-
choices: import_v45.z.array(
|
|
1463
|
-
import_v45.z.object({
|
|
1464
|
-
text: import_v45.z.string(),
|
|
1465
|
-
finish_reason: import_v45.z.string(),
|
|
1466
|
-
logprobs: import_v45.z.object({
|
|
1467
|
-
tokens: import_v45.z.array(import_v45.z.string()),
|
|
1468
|
-
token_logprobs: import_v45.z.array(import_v45.z.number()),
|
|
1469
|
-
top_logprobs: import_v45.z.array(import_v45.z.record(import_v45.z.string(), import_v45.z.number())).nullish()
|
|
1470
|
-
}).nullish()
|
|
1471
|
-
})
|
|
1472
|
-
),
|
|
1473
|
-
usage: usageSchema.nullish()
|
|
1474
|
-
});
|
|
1475
|
-
var openaiCompletionChunkSchema = import_v45.z.union([
|
|
1476
|
-
import_v45.z.object({
|
|
1477
|
-
id: import_v45.z.string().nullish(),
|
|
1478
|
-
created: import_v45.z.number().nullish(),
|
|
1479
|
-
model: import_v45.z.string().nullish(),
|
|
1480
|
-
choices: import_v45.z.array(
|
|
1481
|
-
import_v45.z.object({
|
|
1482
|
-
text: import_v45.z.string(),
|
|
1483
|
-
finish_reason: import_v45.z.string().nullish(),
|
|
1484
|
-
index: import_v45.z.number(),
|
|
1485
|
-
logprobs: import_v45.z.object({
|
|
1486
|
-
tokens: import_v45.z.array(import_v45.z.string()),
|
|
1487
|
-
token_logprobs: import_v45.z.array(import_v45.z.number()),
|
|
1488
|
-
top_logprobs: import_v45.z.array(import_v45.z.record(import_v45.z.string(), import_v45.z.number())).nullish()
|
|
1489
|
-
}).nullish()
|
|
1490
|
-
})
|
|
1491
|
-
),
|
|
1492
|
-
usage: usageSchema.nullish()
|
|
1493
|
-
}),
|
|
1494
|
-
openaiErrorDataSchema
|
|
1495
|
-
]);
|
|
1496
1552
|
|
|
1497
1553
|
// src/embedding/openai-embedding-model.ts
|
|
1498
1554
|
var import_provider5 = require("@ai-sdk/provider");
|
|
1499
|
-
var
|
|
1500
|
-
var import_v47 = require("zod/v4");
|
|
1555
|
+
var import_provider_utils11 = require("@ai-sdk/provider-utils");
|
|
1501
1556
|
|
|
1502
1557
|
// src/embedding/openai-embedding-options.ts
|
|
1503
|
-
var
|
|
1504
|
-
var
|
|
1505
|
-
|
|
1506
|
-
|
|
1507
|
-
|
|
1508
|
-
|
|
1509
|
-
|
|
1510
|
-
|
|
1511
|
-
|
|
1512
|
-
|
|
1513
|
-
|
|
1514
|
-
|
|
1515
|
-
|
|
1558
|
+
var import_provider_utils9 = require("@ai-sdk/provider-utils");
|
|
1559
|
+
var z6 = __toESM(require("zod/v4"));
|
|
1560
|
+
var openaiEmbeddingProviderOptions = (0, import_provider_utils9.lazyValidator)(
|
|
1561
|
+
() => (0, import_provider_utils9.zodSchema)(
|
|
1562
|
+
z6.object({
|
|
1563
|
+
/**
|
|
1564
|
+
The number of dimensions the resulting output embeddings should have.
|
|
1565
|
+
Only supported in text-embedding-3 and later models.
|
|
1566
|
+
*/
|
|
1567
|
+
dimensions: z6.number().optional(),
|
|
1568
|
+
/**
|
|
1569
|
+
A unique identifier representing your end-user, which can help OpenAI to
|
|
1570
|
+
monitor and detect abuse. Learn more.
|
|
1571
|
+
*/
|
|
1572
|
+
user: z6.string().optional()
|
|
1573
|
+
})
|
|
1574
|
+
)
|
|
1575
|
+
);
|
|
1576
|
+
|
|
1577
|
+
// src/embedding/openai-embedding-api.ts
|
|
1578
|
+
var import_provider_utils10 = require("@ai-sdk/provider-utils");
|
|
1579
|
+
var z7 = __toESM(require("zod/v4"));
|
|
1580
|
+
var openaiTextEmbeddingResponseSchema = (0, import_provider_utils10.lazyValidator)(
|
|
1581
|
+
() => (0, import_provider_utils10.zodSchema)(
|
|
1582
|
+
z7.object({
|
|
1583
|
+
data: z7.array(z7.object({ embedding: z7.array(z7.number()) })),
|
|
1584
|
+
usage: z7.object({ prompt_tokens: z7.number() }).nullish()
|
|
1585
|
+
})
|
|
1586
|
+
)
|
|
1587
|
+
);
|
|
1516
1588
|
|
|
1517
1589
|
// src/embedding/openai-embedding-model.ts
|
|
1518
1590
|
var OpenAIEmbeddingModel = class {
|
|
@@ -1541,7 +1613,7 @@ var OpenAIEmbeddingModel = class {
|
|
|
1541
1613
|
values
|
|
1542
1614
|
});
|
|
1543
1615
|
}
|
|
1544
|
-
const openaiOptions = (_a = await (0,
|
|
1616
|
+
const openaiOptions = (_a = await (0, import_provider_utils11.parseProviderOptions)({
|
|
1545
1617
|
provider: "openai",
|
|
1546
1618
|
providerOptions,
|
|
1547
1619
|
schema: openaiEmbeddingProviderOptions
|
|
@@ -1550,12 +1622,12 @@ var OpenAIEmbeddingModel = class {
|
|
|
1550
1622
|
responseHeaders,
|
|
1551
1623
|
value: response,
|
|
1552
1624
|
rawValue
|
|
1553
|
-
} = await (0,
|
|
1625
|
+
} = await (0, import_provider_utils11.postJsonToApi)({
|
|
1554
1626
|
url: this.config.url({
|
|
1555
1627
|
path: "/embeddings",
|
|
1556
1628
|
modelId: this.modelId
|
|
1557
1629
|
}),
|
|
1558
|
-
headers: (0,
|
|
1630
|
+
headers: (0, import_provider_utils11.combineHeaders)(this.config.headers(), headers),
|
|
1559
1631
|
body: {
|
|
1560
1632
|
model: this.modelId,
|
|
1561
1633
|
input: values,
|
|
@@ -1564,7 +1636,7 @@ var OpenAIEmbeddingModel = class {
|
|
|
1564
1636
|
user: openaiOptions.user
|
|
1565
1637
|
},
|
|
1566
1638
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
1567
|
-
successfulResponseHandler: (0,
|
|
1639
|
+
successfulResponseHandler: (0, import_provider_utils11.createJsonResponseHandler)(
|
|
1568
1640
|
openaiTextEmbeddingResponseSchema
|
|
1569
1641
|
),
|
|
1570
1642
|
abortSignal,
|
|
@@ -1577,14 +1649,25 @@ var OpenAIEmbeddingModel = class {
|
|
|
1577
1649
|
};
|
|
1578
1650
|
}
|
|
1579
1651
|
};
|
|
1580
|
-
var openaiTextEmbeddingResponseSchema = import_v47.z.object({
|
|
1581
|
-
data: import_v47.z.array(import_v47.z.object({ embedding: import_v47.z.array(import_v47.z.number()) })),
|
|
1582
|
-
usage: import_v47.z.object({ prompt_tokens: import_v47.z.number() }).nullish()
|
|
1583
|
-
});
|
|
1584
1652
|
|
|
1585
1653
|
// src/image/openai-image-model.ts
|
|
1586
|
-
var
|
|
1587
|
-
|
|
1654
|
+
var import_provider_utils13 = require("@ai-sdk/provider-utils");
|
|
1655
|
+
|
|
1656
|
+
// src/image/openai-image-api.ts
|
|
1657
|
+
var import_provider_utils12 = require("@ai-sdk/provider-utils");
|
|
1658
|
+
var z8 = __toESM(require("zod/v4"));
|
|
1659
|
+
var openaiImageResponseSchema = (0, import_provider_utils12.lazyValidator)(
|
|
1660
|
+
() => (0, import_provider_utils12.zodSchema)(
|
|
1661
|
+
z8.object({
|
|
1662
|
+
data: z8.array(
|
|
1663
|
+
z8.object({
|
|
1664
|
+
b64_json: z8.string(),
|
|
1665
|
+
revised_prompt: z8.string().optional()
|
|
1666
|
+
})
|
|
1667
|
+
)
|
|
1668
|
+
})
|
|
1669
|
+
)
|
|
1670
|
+
);
|
|
1588
1671
|
|
|
1589
1672
|
// src/image/openai-image-options.ts
|
|
1590
1673
|
var modelMaxImagesPerCall = {
|
|
@@ -1635,12 +1718,12 @@ var OpenAIImageModel = class {
|
|
|
1635
1718
|
warnings.push({ type: "unsupported-setting", setting: "seed" });
|
|
1636
1719
|
}
|
|
1637
1720
|
const currentDate = (_c = (_b = (_a = this.config._internal) == null ? void 0 : _a.currentDate) == null ? void 0 : _b.call(_a)) != null ? _c : /* @__PURE__ */ new Date();
|
|
1638
|
-
const { value: response, responseHeaders } = await (0,
|
|
1721
|
+
const { value: response, responseHeaders } = await (0, import_provider_utils13.postJsonToApi)({
|
|
1639
1722
|
url: this.config.url({
|
|
1640
1723
|
path: "/images/generations",
|
|
1641
1724
|
modelId: this.modelId
|
|
1642
1725
|
}),
|
|
1643
|
-
headers: (0,
|
|
1726
|
+
headers: (0, import_provider_utils13.combineHeaders)(this.config.headers(), headers),
|
|
1644
1727
|
body: {
|
|
1645
1728
|
model: this.modelId,
|
|
1646
1729
|
prompt,
|
|
@@ -1650,7 +1733,7 @@ var OpenAIImageModel = class {
|
|
|
1650
1733
|
...!hasDefaultResponseFormat.has(this.modelId) ? { response_format: "b64_json" } : {}
|
|
1651
1734
|
},
|
|
1652
1735
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
1653
|
-
successfulResponseHandler: (0,
|
|
1736
|
+
successfulResponseHandler: (0, import_provider_utils13.createJsonResponseHandler)(
|
|
1654
1737
|
openaiImageResponseSchema
|
|
1655
1738
|
),
|
|
1656
1739
|
abortSignal,
|
|
@@ -1676,36 +1759,43 @@ var OpenAIImageModel = class {
|
|
|
1676
1759
|
};
|
|
1677
1760
|
}
|
|
1678
1761
|
};
|
|
1679
|
-
var openaiImageResponseSchema = import_v48.z.object({
|
|
1680
|
-
data: import_v48.z.array(
|
|
1681
|
-
import_v48.z.object({ b64_json: import_v48.z.string(), revised_prompt: import_v48.z.string().optional() })
|
|
1682
|
-
)
|
|
1683
|
-
});
|
|
1684
1762
|
|
|
1685
1763
|
// src/tool/code-interpreter.ts
|
|
1686
|
-
var
|
|
1687
|
-
var
|
|
1688
|
-
var codeInterpreterInputSchema =
|
|
1689
|
-
|
|
1690
|
-
|
|
1691
|
-
|
|
1692
|
-
|
|
1693
|
-
outputs: import_v49.z.array(
|
|
1694
|
-
import_v49.z.discriminatedUnion("type", [
|
|
1695
|
-
import_v49.z.object({ type: import_v49.z.literal("logs"), logs: import_v49.z.string() }),
|
|
1696
|
-
import_v49.z.object({ type: import_v49.z.literal("image"), url: import_v49.z.string() })
|
|
1697
|
-
])
|
|
1698
|
-
).nullish()
|
|
1699
|
-
});
|
|
1700
|
-
var codeInterpreterArgsSchema = import_v49.z.object({
|
|
1701
|
-
container: import_v49.z.union([
|
|
1702
|
-
import_v49.z.string(),
|
|
1703
|
-
import_v49.z.object({
|
|
1704
|
-
fileIds: import_v49.z.array(import_v49.z.string()).optional()
|
|
1764
|
+
var import_provider_utils14 = require("@ai-sdk/provider-utils");
|
|
1765
|
+
var z9 = __toESM(require("zod/v4"));
|
|
1766
|
+
var codeInterpreterInputSchema = (0, import_provider_utils14.lazySchema)(
|
|
1767
|
+
() => (0, import_provider_utils14.zodSchema)(
|
|
1768
|
+
z9.object({
|
|
1769
|
+
code: z9.string().nullish(),
|
|
1770
|
+
containerId: z9.string()
|
|
1705
1771
|
})
|
|
1706
|
-
|
|
1707
|
-
|
|
1708
|
-
var
|
|
1772
|
+
)
|
|
1773
|
+
);
|
|
1774
|
+
var codeInterpreterOutputSchema = (0, import_provider_utils14.lazySchema)(
|
|
1775
|
+
() => (0, import_provider_utils14.zodSchema)(
|
|
1776
|
+
z9.object({
|
|
1777
|
+
outputs: z9.array(
|
|
1778
|
+
z9.discriminatedUnion("type", [
|
|
1779
|
+
z9.object({ type: z9.literal("logs"), logs: z9.string() }),
|
|
1780
|
+
z9.object({ type: z9.literal("image"), url: z9.string() })
|
|
1781
|
+
])
|
|
1782
|
+
).nullish()
|
|
1783
|
+
})
|
|
1784
|
+
)
|
|
1785
|
+
);
|
|
1786
|
+
var codeInterpreterArgsSchema = (0, import_provider_utils14.lazySchema)(
|
|
1787
|
+
() => (0, import_provider_utils14.zodSchema)(
|
|
1788
|
+
z9.object({
|
|
1789
|
+
container: z9.union([
|
|
1790
|
+
z9.string(),
|
|
1791
|
+
z9.object({
|
|
1792
|
+
fileIds: z9.array(z9.string()).optional()
|
|
1793
|
+
})
|
|
1794
|
+
]).optional()
|
|
1795
|
+
})
|
|
1796
|
+
)
|
|
1797
|
+
);
|
|
1798
|
+
var codeInterpreterToolFactory = (0, import_provider_utils14.createProviderDefinedToolFactoryWithOutputSchema)({
|
|
1709
1799
|
id: "openai.code_interpreter",
|
|
1710
1800
|
name: "code_interpreter",
|
|
1711
1801
|
inputSchema: codeInterpreterInputSchema,
|
|
@@ -1716,71 +1806,85 @@ var codeInterpreter = (args = {}) => {
|
|
|
1716
1806
|
};
|
|
1717
1807
|
|
|
1718
1808
|
// src/tool/file-search.ts
|
|
1719
|
-
var
|
|
1720
|
-
var
|
|
1721
|
-
var comparisonFilterSchema =
|
|
1722
|
-
key:
|
|
1723
|
-
type:
|
|
1724
|
-
value:
|
|
1809
|
+
var import_provider_utils15 = require("@ai-sdk/provider-utils");
|
|
1810
|
+
var z10 = __toESM(require("zod/v4"));
|
|
1811
|
+
var comparisonFilterSchema = z10.object({
|
|
1812
|
+
key: z10.string(),
|
|
1813
|
+
type: z10.enum(["eq", "ne", "gt", "gte", "lt", "lte"]),
|
|
1814
|
+
value: z10.union([z10.string(), z10.number(), z10.boolean()])
|
|
1725
1815
|
});
|
|
1726
|
-
var compoundFilterSchema =
|
|
1727
|
-
type:
|
|
1728
|
-
filters:
|
|
1729
|
-
|
|
1816
|
+
var compoundFilterSchema = z10.object({
|
|
1817
|
+
type: z10.enum(["and", "or"]),
|
|
1818
|
+
filters: z10.array(
|
|
1819
|
+
z10.union([comparisonFilterSchema, z10.lazy(() => compoundFilterSchema)])
|
|
1730
1820
|
)
|
|
1731
1821
|
});
|
|
1732
|
-
var fileSearchArgsSchema =
|
|
1733
|
-
|
|
1734
|
-
|
|
1735
|
-
|
|
1736
|
-
|
|
1737
|
-
|
|
1738
|
-
|
|
1739
|
-
|
|
1740
|
-
})
|
|
1741
|
-
|
|
1742
|
-
queries: import_v410.z.array(import_v410.z.string()),
|
|
1743
|
-
results: import_v410.z.array(
|
|
1744
|
-
import_v410.z.object({
|
|
1745
|
-
attributes: import_v410.z.record(import_v410.z.string(), import_v410.z.unknown()),
|
|
1746
|
-
fileId: import_v410.z.string(),
|
|
1747
|
-
filename: import_v410.z.string(),
|
|
1748
|
-
score: import_v410.z.number(),
|
|
1749
|
-
text: import_v410.z.string()
|
|
1822
|
+
var fileSearchArgsSchema = (0, import_provider_utils15.lazySchema)(
|
|
1823
|
+
() => (0, import_provider_utils15.zodSchema)(
|
|
1824
|
+
z10.object({
|
|
1825
|
+
vectorStoreIds: z10.array(z10.string()),
|
|
1826
|
+
maxNumResults: z10.number().optional(),
|
|
1827
|
+
ranking: z10.object({
|
|
1828
|
+
ranker: z10.string().optional(),
|
|
1829
|
+
scoreThreshold: z10.number().optional()
|
|
1830
|
+
}).optional(),
|
|
1831
|
+
filters: z10.union([comparisonFilterSchema, compoundFilterSchema]).optional()
|
|
1750
1832
|
})
|
|
1751
|
-
)
|
|
1752
|
-
|
|
1753
|
-
var
|
|
1833
|
+
)
|
|
1834
|
+
);
|
|
1835
|
+
var fileSearchOutputSchema = (0, import_provider_utils15.lazySchema)(
|
|
1836
|
+
() => (0, import_provider_utils15.zodSchema)(
|
|
1837
|
+
z10.object({
|
|
1838
|
+
queries: z10.array(z10.string()),
|
|
1839
|
+
results: z10.array(
|
|
1840
|
+
z10.object({
|
|
1841
|
+
attributes: z10.record(z10.string(), z10.unknown()),
|
|
1842
|
+
fileId: z10.string(),
|
|
1843
|
+
filename: z10.string(),
|
|
1844
|
+
score: z10.number(),
|
|
1845
|
+
text: z10.string()
|
|
1846
|
+
})
|
|
1847
|
+
).nullable()
|
|
1848
|
+
})
|
|
1849
|
+
)
|
|
1850
|
+
);
|
|
1851
|
+
var fileSearch = (0, import_provider_utils15.createProviderDefinedToolFactoryWithOutputSchema)({
|
|
1754
1852
|
id: "openai.file_search",
|
|
1755
1853
|
name: "file_search",
|
|
1756
|
-
inputSchema:
|
|
1854
|
+
inputSchema: z10.object({}),
|
|
1757
1855
|
outputSchema: fileSearchOutputSchema
|
|
1758
1856
|
});
|
|
1759
1857
|
|
|
1760
1858
|
// src/tool/image-generation.ts
|
|
1761
|
-
var
|
|
1762
|
-
var
|
|
1763
|
-
var imageGenerationArgsSchema =
|
|
1764
|
-
|
|
1765
|
-
|
|
1766
|
-
|
|
1767
|
-
|
|
1768
|
-
|
|
1769
|
-
|
|
1770
|
-
|
|
1771
|
-
|
|
1772
|
-
|
|
1773
|
-
|
|
1774
|
-
|
|
1775
|
-
|
|
1776
|
-
|
|
1777
|
-
|
|
1778
|
-
|
|
1779
|
-
})
|
|
1780
|
-
|
|
1859
|
+
var import_provider_utils16 = require("@ai-sdk/provider-utils");
|
|
1860
|
+
var z11 = __toESM(require("zod/v4"));
|
|
1861
|
+
var imageGenerationArgsSchema = (0, import_provider_utils16.lazySchema)(
|
|
1862
|
+
() => (0, import_provider_utils16.zodSchema)(
|
|
1863
|
+
z11.object({
|
|
1864
|
+
background: z11.enum(["auto", "opaque", "transparent"]).optional(),
|
|
1865
|
+
inputFidelity: z11.enum(["low", "high"]).optional(),
|
|
1866
|
+
inputImageMask: z11.object({
|
|
1867
|
+
fileId: z11.string().optional(),
|
|
1868
|
+
imageUrl: z11.string().optional()
|
|
1869
|
+
}).optional(),
|
|
1870
|
+
model: z11.string().optional(),
|
|
1871
|
+
moderation: z11.enum(["auto"]).optional(),
|
|
1872
|
+
outputCompression: z11.number().int().min(0).max(100).optional(),
|
|
1873
|
+
outputFormat: z11.enum(["png", "jpeg", "webp"]).optional(),
|
|
1874
|
+
partialImages: z11.number().int().min(0).max(3).optional(),
|
|
1875
|
+
quality: z11.enum(["auto", "low", "medium", "high"]).optional(),
|
|
1876
|
+
size: z11.enum(["1024x1024", "1024x1536", "1536x1024", "auto"]).optional()
|
|
1877
|
+
}).strict()
|
|
1878
|
+
)
|
|
1879
|
+
);
|
|
1880
|
+
var imageGenerationInputSchema = (0, import_provider_utils16.lazySchema)(() => (0, import_provider_utils16.zodSchema)(z11.object({})));
|
|
1881
|
+
var imageGenerationOutputSchema = (0, import_provider_utils16.lazySchema)(
|
|
1882
|
+
() => (0, import_provider_utils16.zodSchema)(z11.object({ result: z11.string() }))
|
|
1883
|
+
);
|
|
1884
|
+
var imageGenerationToolFactory = (0, import_provider_utils16.createProviderDefinedToolFactoryWithOutputSchema)({
|
|
1781
1885
|
id: "openai.image_generation",
|
|
1782
1886
|
name: "image_generation",
|
|
1783
|
-
inputSchema:
|
|
1887
|
+
inputSchema: imageGenerationInputSchema,
|
|
1784
1888
|
outputSchema: imageGenerationOutputSchema
|
|
1785
1889
|
});
|
|
1786
1890
|
var imageGeneration = (args = {}) => {
|
|
@@ -1788,22 +1892,26 @@ var imageGeneration = (args = {}) => {
|
|
|
1788
1892
|
};
|
|
1789
1893
|
|
|
1790
1894
|
// src/tool/local-shell.ts
|
|
1791
|
-
var
|
|
1792
|
-
var
|
|
1793
|
-
var localShellInputSchema =
|
|
1794
|
-
|
|
1795
|
-
|
|
1796
|
-
|
|
1797
|
-
|
|
1798
|
-
|
|
1799
|
-
|
|
1800
|
-
|
|
1801
|
-
|
|
1802
|
-
|
|
1803
|
-
|
|
1804
|
-
|
|
1805
|
-
|
|
1806
|
-
|
|
1895
|
+
var import_provider_utils17 = require("@ai-sdk/provider-utils");
|
|
1896
|
+
var z12 = __toESM(require("zod/v4"));
|
|
1897
|
+
var localShellInputSchema = (0, import_provider_utils17.lazySchema)(
|
|
1898
|
+
() => (0, import_provider_utils17.zodSchema)(
|
|
1899
|
+
z12.object({
|
|
1900
|
+
action: z12.object({
|
|
1901
|
+
type: z12.literal("exec"),
|
|
1902
|
+
command: z12.array(z12.string()),
|
|
1903
|
+
timeoutMs: z12.number().optional(),
|
|
1904
|
+
user: z12.string().optional(),
|
|
1905
|
+
workingDirectory: z12.string().optional(),
|
|
1906
|
+
env: z12.record(z12.string(), z12.string()).optional()
|
|
1907
|
+
})
|
|
1908
|
+
})
|
|
1909
|
+
)
|
|
1910
|
+
);
|
|
1911
|
+
var localShellOutputSchema = (0, import_provider_utils17.lazySchema)(
|
|
1912
|
+
() => (0, import_provider_utils17.zodSchema)(z12.object({ output: z12.string() }))
|
|
1913
|
+
);
|
|
1914
|
+
var localShell = (0, import_provider_utils17.createProviderDefinedToolFactoryWithOutputSchema)({
|
|
1807
1915
|
id: "openai.local_shell",
|
|
1808
1916
|
name: "local_shell",
|
|
1809
1917
|
inputSchema: localShellInputSchema,
|
|
@@ -1811,103 +1919,121 @@ var localShell = (0, import_provider_utils10.createProviderDefinedToolFactoryWit
|
|
|
1811
1919
|
});
|
|
1812
1920
|
|
|
1813
1921
|
// src/tool/web-search.ts
|
|
1814
|
-
var
|
|
1815
|
-
var
|
|
1816
|
-
var webSearchArgsSchema =
|
|
1817
|
-
|
|
1818
|
-
|
|
1819
|
-
|
|
1820
|
-
|
|
1821
|
-
|
|
1822
|
-
|
|
1823
|
-
|
|
1824
|
-
|
|
1825
|
-
|
|
1826
|
-
|
|
1827
|
-
|
|
1828
|
-
|
|
1829
|
-
|
|
1922
|
+
var import_provider_utils18 = require("@ai-sdk/provider-utils");
|
|
1923
|
+
var z13 = __toESM(require("zod/v4"));
|
|
1924
|
+
var webSearchArgsSchema = (0, import_provider_utils18.lazySchema)(
|
|
1925
|
+
() => (0, import_provider_utils18.zodSchema)(
|
|
1926
|
+
z13.object({
|
|
1927
|
+
filters: z13.object({
|
|
1928
|
+
allowedDomains: z13.array(z13.string()).optional()
|
|
1929
|
+
}).optional(),
|
|
1930
|
+
searchContextSize: z13.enum(["low", "medium", "high"]).optional(),
|
|
1931
|
+
userLocation: z13.object({
|
|
1932
|
+
type: z13.literal("approximate"),
|
|
1933
|
+
country: z13.string().optional(),
|
|
1934
|
+
city: z13.string().optional(),
|
|
1935
|
+
region: z13.string().optional(),
|
|
1936
|
+
timezone: z13.string().optional()
|
|
1937
|
+
}).optional()
|
|
1938
|
+
})
|
|
1939
|
+
)
|
|
1940
|
+
);
|
|
1941
|
+
var webSearchInputSchema = (0, import_provider_utils18.lazySchema)(
|
|
1942
|
+
() => (0, import_provider_utils18.zodSchema)(
|
|
1943
|
+
z13.object({
|
|
1944
|
+
action: z13.discriminatedUnion("type", [
|
|
1945
|
+
z13.object({
|
|
1946
|
+
type: z13.literal("search"),
|
|
1947
|
+
query: z13.string().nullish()
|
|
1948
|
+
}),
|
|
1949
|
+
z13.object({
|
|
1950
|
+
type: z13.literal("open_page"),
|
|
1951
|
+
url: z13.string()
|
|
1952
|
+
}),
|
|
1953
|
+
z13.object({
|
|
1954
|
+
type: z13.literal("find"),
|
|
1955
|
+
url: z13.string(),
|
|
1956
|
+
pattern: z13.string()
|
|
1957
|
+
})
|
|
1958
|
+
]).nullish()
|
|
1959
|
+
})
|
|
1960
|
+
)
|
|
1961
|
+
);
|
|
1962
|
+
var webSearchToolFactory = (0, import_provider_utils18.createProviderDefinedToolFactory)({
|
|
1830
1963
|
id: "openai.web_search",
|
|
1831
1964
|
name: "web_search",
|
|
1832
|
-
inputSchema:
|
|
1833
|
-
action: import_v413.z.discriminatedUnion("type", [
|
|
1834
|
-
import_v413.z.object({
|
|
1835
|
-
type: import_v413.z.literal("search"),
|
|
1836
|
-
query: import_v413.z.string().nullish()
|
|
1837
|
-
}),
|
|
1838
|
-
import_v413.z.object({
|
|
1839
|
-
type: import_v413.z.literal("open_page"),
|
|
1840
|
-
url: import_v413.z.string()
|
|
1841
|
-
}),
|
|
1842
|
-
import_v413.z.object({
|
|
1843
|
-
type: import_v413.z.literal("find"),
|
|
1844
|
-
url: import_v413.z.string(),
|
|
1845
|
-
pattern: import_v413.z.string()
|
|
1846
|
-
})
|
|
1847
|
-
]).nullish()
|
|
1848
|
-
})
|
|
1965
|
+
inputSchema: webSearchInputSchema
|
|
1849
1966
|
});
|
|
1850
1967
|
var webSearch = (args = {}) => {
|
|
1851
1968
|
return webSearchToolFactory(args);
|
|
1852
1969
|
};
|
|
1853
1970
|
|
|
1854
1971
|
// src/tool/web-search-preview.ts
|
|
1855
|
-
var
|
|
1856
|
-
var
|
|
1857
|
-
var webSearchPreviewArgsSchema =
|
|
1858
|
-
|
|
1859
|
-
|
|
1860
|
-
|
|
1861
|
-
|
|
1862
|
-
|
|
1863
|
-
|
|
1864
|
-
|
|
1865
|
-
|
|
1866
|
-
|
|
1867
|
-
|
|
1868
|
-
|
|
1869
|
-
|
|
1870
|
-
|
|
1871
|
-
|
|
1872
|
-
|
|
1873
|
-
|
|
1874
|
-
|
|
1875
|
-
|
|
1876
|
-
|
|
1877
|
-
|
|
1878
|
-
|
|
1879
|
-
|
|
1880
|
-
|
|
1881
|
-
|
|
1882
|
-
|
|
1883
|
-
|
|
1884
|
-
|
|
1885
|
-
|
|
1886
|
-
|
|
1887
|
-
|
|
1888
|
-
|
|
1889
|
-
|
|
1890
|
-
|
|
1891
|
-
|
|
1972
|
+
var import_provider_utils19 = require("@ai-sdk/provider-utils");
|
|
1973
|
+
var z14 = __toESM(require("zod/v4"));
|
|
1974
|
+
var webSearchPreviewArgsSchema = (0, import_provider_utils19.lazySchema)(
|
|
1975
|
+
() => (0, import_provider_utils19.zodSchema)(
|
|
1976
|
+
z14.object({
|
|
1977
|
+
/**
|
|
1978
|
+
* Search context size to use for the web search.
|
|
1979
|
+
* - high: Most comprehensive context, highest cost, slower response
|
|
1980
|
+
* - medium: Balanced context, cost, and latency (default)
|
|
1981
|
+
* - low: Least context, lowest cost, fastest response
|
|
1982
|
+
*/
|
|
1983
|
+
searchContextSize: z14.enum(["low", "medium", "high"]).optional(),
|
|
1984
|
+
/**
|
|
1985
|
+
* User location information to provide geographically relevant search results.
|
|
1986
|
+
*/
|
|
1987
|
+
userLocation: z14.object({
|
|
1988
|
+
/**
|
|
1989
|
+
* Type of location (always 'approximate')
|
|
1990
|
+
*/
|
|
1991
|
+
type: z14.literal("approximate"),
|
|
1992
|
+
/**
|
|
1993
|
+
* Two-letter ISO country code (e.g., 'US', 'GB')
|
|
1994
|
+
*/
|
|
1995
|
+
country: z14.string().optional(),
|
|
1996
|
+
/**
|
|
1997
|
+
* City name (free text, e.g., 'Minneapolis')
|
|
1998
|
+
*/
|
|
1999
|
+
city: z14.string().optional(),
|
|
2000
|
+
/**
|
|
2001
|
+
* Region name (free text, e.g., 'Minnesota')
|
|
2002
|
+
*/
|
|
2003
|
+
region: z14.string().optional(),
|
|
2004
|
+
/**
|
|
2005
|
+
* IANA timezone (e.g., 'America/Chicago')
|
|
2006
|
+
*/
|
|
2007
|
+
timezone: z14.string().optional()
|
|
2008
|
+
}).optional()
|
|
2009
|
+
})
|
|
2010
|
+
)
|
|
2011
|
+
);
|
|
2012
|
+
var webSearchPreviewInputSchema = (0, import_provider_utils19.lazySchema)(
|
|
2013
|
+
() => (0, import_provider_utils19.zodSchema)(
|
|
2014
|
+
z14.object({
|
|
2015
|
+
action: z14.discriminatedUnion("type", [
|
|
2016
|
+
z14.object({
|
|
2017
|
+
type: z14.literal("search"),
|
|
2018
|
+
query: z14.string().nullish()
|
|
2019
|
+
}),
|
|
2020
|
+
z14.object({
|
|
2021
|
+
type: z14.literal("open_page"),
|
|
2022
|
+
url: z14.string()
|
|
2023
|
+
}),
|
|
2024
|
+
z14.object({
|
|
2025
|
+
type: z14.literal("find"),
|
|
2026
|
+
url: z14.string(),
|
|
2027
|
+
pattern: z14.string()
|
|
2028
|
+
})
|
|
2029
|
+
]).nullish()
|
|
2030
|
+
})
|
|
2031
|
+
)
|
|
2032
|
+
);
|
|
2033
|
+
var webSearchPreview = (0, import_provider_utils19.createProviderDefinedToolFactory)({
|
|
1892
2034
|
id: "openai.web_search_preview",
|
|
1893
2035
|
name: "web_search_preview",
|
|
1894
|
-
inputSchema:
|
|
1895
|
-
action: import_v414.z.discriminatedUnion("type", [
|
|
1896
|
-
import_v414.z.object({
|
|
1897
|
-
type: import_v414.z.literal("search"),
|
|
1898
|
-
query: import_v414.z.string().nullish()
|
|
1899
|
-
}),
|
|
1900
|
-
import_v414.z.object({
|
|
1901
|
-
type: import_v414.z.literal("open_page"),
|
|
1902
|
-
url: import_v414.z.string()
|
|
1903
|
-
}),
|
|
1904
|
-
import_v414.z.object({
|
|
1905
|
-
type: import_v414.z.literal("find"),
|
|
1906
|
-
url: import_v414.z.string(),
|
|
1907
|
-
pattern: import_v414.z.string()
|
|
1908
|
-
})
|
|
1909
|
-
]).nullish()
|
|
1910
|
-
})
|
|
2036
|
+
inputSchema: webSearchPreviewInputSchema
|
|
1911
2037
|
});
|
|
1912
2038
|
|
|
1913
2039
|
// src/openai-tools.ts
|
|
@@ -1985,13 +2111,12 @@ var openaiTools = {
|
|
|
1985
2111
|
|
|
1986
2112
|
// src/responses/openai-responses-language-model.ts
|
|
1987
2113
|
var import_provider8 = require("@ai-sdk/provider");
|
|
1988
|
-
var
|
|
1989
|
-
var import_v416 = require("zod/v4");
|
|
2114
|
+
var import_provider_utils24 = require("@ai-sdk/provider-utils");
|
|
1990
2115
|
|
|
1991
2116
|
// src/responses/convert-to-openai-responses-input.ts
|
|
1992
2117
|
var import_provider6 = require("@ai-sdk/provider");
|
|
1993
|
-
var
|
|
1994
|
-
var
|
|
2118
|
+
var import_provider_utils20 = require("@ai-sdk/provider-utils");
|
|
2119
|
+
var z15 = __toESM(require("zod/v4"));
|
|
1995
2120
|
function isFileId(data, prefixes) {
|
|
1996
2121
|
if (!prefixes) return false;
|
|
1997
2122
|
return prefixes.some((prefix) => data.startsWith(prefix));
|
|
@@ -2049,7 +2174,7 @@ async function convertToOpenAIResponsesInput({
|
|
|
2049
2174
|
return {
|
|
2050
2175
|
type: "input_image",
|
|
2051
2176
|
...part.data instanceof URL ? { image_url: part.data.toString() } : typeof part.data === "string" && isFileId(part.data, fileIdPrefixes) ? { file_id: part.data } : {
|
|
2052
|
-
image_url: `data:${mediaType};base64,${(0,
|
|
2177
|
+
image_url: `data:${mediaType};base64,${(0, import_provider_utils20.convertToBase64)(part.data)}`
|
|
2053
2178
|
},
|
|
2054
2179
|
detail: (_b2 = (_a2 = part.providerOptions) == null ? void 0 : _a2.openai) == null ? void 0 : _b2.imageDetail
|
|
2055
2180
|
};
|
|
@@ -2064,7 +2189,7 @@ async function convertToOpenAIResponsesInput({
|
|
|
2064
2189
|
type: "input_file",
|
|
2065
2190
|
...typeof part.data === "string" && isFileId(part.data, fileIdPrefixes) ? { file_id: part.data } : {
|
|
2066
2191
|
filename: (_c2 = part.filename) != null ? _c2 : `part-${index}.pdf`,
|
|
2067
|
-
file_data: `data:application/pdf;base64,${(0,
|
|
2192
|
+
file_data: `data:application/pdf;base64,${(0, import_provider_utils20.convertToBase64)(part.data)}`
|
|
2068
2193
|
}
|
|
2069
2194
|
};
|
|
2070
2195
|
} else {
|
|
@@ -2097,7 +2222,10 @@ async function convertToOpenAIResponsesInput({
|
|
|
2097
2222
|
break;
|
|
2098
2223
|
}
|
|
2099
2224
|
if (hasLocalShellTool && part.toolName === "local_shell") {
|
|
2100
|
-
const parsedInput =
|
|
2225
|
+
const parsedInput = await (0, import_provider_utils20.validateTypes)({
|
|
2226
|
+
value: part.input,
|
|
2227
|
+
schema: localShellInputSchema
|
|
2228
|
+
});
|
|
2101
2229
|
input.push({
|
|
2102
2230
|
type: "local_shell_call",
|
|
2103
2231
|
call_id: part.toolCallId,
|
|
@@ -2135,7 +2263,7 @@ async function convertToOpenAIResponsesInput({
|
|
|
2135
2263
|
break;
|
|
2136
2264
|
}
|
|
2137
2265
|
case "reasoning": {
|
|
2138
|
-
const providerOptions = await (0,
|
|
2266
|
+
const providerOptions = await (0, import_provider_utils20.parseProviderOptions)({
|
|
2139
2267
|
provider: "openai",
|
|
2140
2268
|
providerOptions: part.providerOptions,
|
|
2141
2269
|
schema: openaiResponsesReasoningProviderOptionsSchema
|
|
@@ -2193,10 +2321,14 @@ async function convertToOpenAIResponsesInput({
|
|
|
2193
2321
|
for (const part of content) {
|
|
2194
2322
|
const output = part.output;
|
|
2195
2323
|
if (hasLocalShellTool && part.toolName === "local_shell" && output.type === "json") {
|
|
2324
|
+
const parsedOutput = await (0, import_provider_utils20.validateTypes)({
|
|
2325
|
+
value: output.value,
|
|
2326
|
+
schema: localShellOutputSchema
|
|
2327
|
+
});
|
|
2196
2328
|
input.push({
|
|
2197
2329
|
type: "local_shell_call_output",
|
|
2198
2330
|
call_id: part.toolCallId,
|
|
2199
|
-
output:
|
|
2331
|
+
output: parsedOutput.output
|
|
2200
2332
|
});
|
|
2201
2333
|
break;
|
|
2202
2334
|
}
|
|
@@ -2228,9 +2360,9 @@ async function convertToOpenAIResponsesInput({
|
|
|
2228
2360
|
}
|
|
2229
2361
|
return { input, warnings };
|
|
2230
2362
|
}
|
|
2231
|
-
var openaiResponsesReasoningProviderOptionsSchema =
|
|
2232
|
-
itemId:
|
|
2233
|
-
reasoningEncryptedContent:
|
|
2363
|
+
var openaiResponsesReasoningProviderOptionsSchema = z15.object({
|
|
2364
|
+
itemId: z15.string().nullish(),
|
|
2365
|
+
reasoningEncryptedContent: z15.string().nullish()
|
|
2234
2366
|
});
|
|
2235
2367
|
|
|
2236
2368
|
// src/responses/map-openai-responses-finish-reason.ts
|
|
@@ -2251,9 +2383,539 @@ function mapOpenAIResponseFinishReason({
|
|
|
2251
2383
|
}
|
|
2252
2384
|
}
|
|
2253
2385
|
|
|
2386
|
+
// src/responses/openai-responses-api.ts
|
|
2387
|
+
var import_provider_utils21 = require("@ai-sdk/provider-utils");
|
|
2388
|
+
var z16 = __toESM(require("zod/v4"));
|
|
2389
|
+
var openaiResponsesChunkSchema = (0, import_provider_utils21.lazyValidator)(
|
|
2390
|
+
() => (0, import_provider_utils21.zodSchema)(
|
|
2391
|
+
z16.union([
|
|
2392
|
+
z16.object({
|
|
2393
|
+
type: z16.literal("response.output_text.delta"),
|
|
2394
|
+
item_id: z16.string(),
|
|
2395
|
+
delta: z16.string(),
|
|
2396
|
+
logprobs: z16.array(
|
|
2397
|
+
z16.object({
|
|
2398
|
+
token: z16.string(),
|
|
2399
|
+
logprob: z16.number(),
|
|
2400
|
+
top_logprobs: z16.array(
|
|
2401
|
+
z16.object({
|
|
2402
|
+
token: z16.string(),
|
|
2403
|
+
logprob: z16.number()
|
|
2404
|
+
})
|
|
2405
|
+
)
|
|
2406
|
+
})
|
|
2407
|
+
).nullish()
|
|
2408
|
+
}),
|
|
2409
|
+
z16.object({
|
|
2410
|
+
type: z16.enum(["response.completed", "response.incomplete"]),
|
|
2411
|
+
response: z16.object({
|
|
2412
|
+
incomplete_details: z16.object({ reason: z16.string() }).nullish(),
|
|
2413
|
+
usage: z16.object({
|
|
2414
|
+
input_tokens: z16.number(),
|
|
2415
|
+
input_tokens_details: z16.object({ cached_tokens: z16.number().nullish() }).nullish(),
|
|
2416
|
+
output_tokens: z16.number(),
|
|
2417
|
+
output_tokens_details: z16.object({ reasoning_tokens: z16.number().nullish() }).nullish()
|
|
2418
|
+
}),
|
|
2419
|
+
service_tier: z16.string().nullish()
|
|
2420
|
+
})
|
|
2421
|
+
}),
|
|
2422
|
+
z16.object({
|
|
2423
|
+
type: z16.literal("response.created"),
|
|
2424
|
+
response: z16.object({
|
|
2425
|
+
id: z16.string(),
|
|
2426
|
+
created_at: z16.number(),
|
|
2427
|
+
model: z16.string(),
|
|
2428
|
+
service_tier: z16.string().nullish()
|
|
2429
|
+
})
|
|
2430
|
+
}),
|
|
2431
|
+
z16.object({
|
|
2432
|
+
type: z16.literal("response.output_item.added"),
|
|
2433
|
+
output_index: z16.number(),
|
|
2434
|
+
item: z16.discriminatedUnion("type", [
|
|
2435
|
+
z16.object({
|
|
2436
|
+
type: z16.literal("message"),
|
|
2437
|
+
id: z16.string()
|
|
2438
|
+
}),
|
|
2439
|
+
z16.object({
|
|
2440
|
+
type: z16.literal("reasoning"),
|
|
2441
|
+
id: z16.string(),
|
|
2442
|
+
encrypted_content: z16.string().nullish()
|
|
2443
|
+
}),
|
|
2444
|
+
z16.object({
|
|
2445
|
+
type: z16.literal("function_call"),
|
|
2446
|
+
id: z16.string(),
|
|
2447
|
+
call_id: z16.string(),
|
|
2448
|
+
name: z16.string(),
|
|
2449
|
+
arguments: z16.string()
|
|
2450
|
+
}),
|
|
2451
|
+
z16.object({
|
|
2452
|
+
type: z16.literal("web_search_call"),
|
|
2453
|
+
id: z16.string(),
|
|
2454
|
+
status: z16.string(),
|
|
2455
|
+
action: z16.object({
|
|
2456
|
+
type: z16.literal("search"),
|
|
2457
|
+
query: z16.string().optional()
|
|
2458
|
+
}).nullish()
|
|
2459
|
+
}),
|
|
2460
|
+
z16.object({
|
|
2461
|
+
type: z16.literal("computer_call"),
|
|
2462
|
+
id: z16.string(),
|
|
2463
|
+
status: z16.string()
|
|
2464
|
+
}),
|
|
2465
|
+
z16.object({
|
|
2466
|
+
type: z16.literal("file_search_call"),
|
|
2467
|
+
id: z16.string()
|
|
2468
|
+
}),
|
|
2469
|
+
z16.object({
|
|
2470
|
+
type: z16.literal("image_generation_call"),
|
|
2471
|
+
id: z16.string()
|
|
2472
|
+
}),
|
|
2473
|
+
z16.object({
|
|
2474
|
+
type: z16.literal("code_interpreter_call"),
|
|
2475
|
+
id: z16.string(),
|
|
2476
|
+
container_id: z16.string(),
|
|
2477
|
+
code: z16.string().nullable(),
|
|
2478
|
+
outputs: z16.array(
|
|
2479
|
+
z16.discriminatedUnion("type", [
|
|
2480
|
+
z16.object({ type: z16.literal("logs"), logs: z16.string() }),
|
|
2481
|
+
z16.object({ type: z16.literal("image"), url: z16.string() })
|
|
2482
|
+
])
|
|
2483
|
+
).nullable(),
|
|
2484
|
+
status: z16.string()
|
|
2485
|
+
})
|
|
2486
|
+
])
|
|
2487
|
+
}),
|
|
2488
|
+
z16.object({
|
|
2489
|
+
type: z16.literal("response.output_item.done"),
|
|
2490
|
+
output_index: z16.number(),
|
|
2491
|
+
item: z16.discriminatedUnion("type", [
|
|
2492
|
+
z16.object({
|
|
2493
|
+
type: z16.literal("message"),
|
|
2494
|
+
id: z16.string()
|
|
2495
|
+
}),
|
|
2496
|
+
z16.object({
|
|
2497
|
+
type: z16.literal("reasoning"),
|
|
2498
|
+
id: z16.string(),
|
|
2499
|
+
encrypted_content: z16.string().nullish()
|
|
2500
|
+
}),
|
|
2501
|
+
z16.object({
|
|
2502
|
+
type: z16.literal("function_call"),
|
|
2503
|
+
id: z16.string(),
|
|
2504
|
+
call_id: z16.string(),
|
|
2505
|
+
name: z16.string(),
|
|
2506
|
+
arguments: z16.string(),
|
|
2507
|
+
status: z16.literal("completed")
|
|
2508
|
+
}),
|
|
2509
|
+
z16.object({
|
|
2510
|
+
type: z16.literal("code_interpreter_call"),
|
|
2511
|
+
id: z16.string(),
|
|
2512
|
+
code: z16.string().nullable(),
|
|
2513
|
+
container_id: z16.string(),
|
|
2514
|
+
outputs: z16.array(
|
|
2515
|
+
z16.discriminatedUnion("type", [
|
|
2516
|
+
z16.object({ type: z16.literal("logs"), logs: z16.string() }),
|
|
2517
|
+
z16.object({ type: z16.literal("image"), url: z16.string() })
|
|
2518
|
+
])
|
|
2519
|
+
).nullable()
|
|
2520
|
+
}),
|
|
2521
|
+
z16.object({
|
|
2522
|
+
type: z16.literal("image_generation_call"),
|
|
2523
|
+
id: z16.string(),
|
|
2524
|
+
result: z16.string()
|
|
2525
|
+
}),
|
|
2526
|
+
z16.object({
|
|
2527
|
+
type: z16.literal("web_search_call"),
|
|
2528
|
+
id: z16.string(),
|
|
2529
|
+
status: z16.string(),
|
|
2530
|
+
action: z16.discriminatedUnion("type", [
|
|
2531
|
+
z16.object({
|
|
2532
|
+
type: z16.literal("search"),
|
|
2533
|
+
query: z16.string().nullish()
|
|
2534
|
+
}),
|
|
2535
|
+
z16.object({
|
|
2536
|
+
type: z16.literal("open_page"),
|
|
2537
|
+
url: z16.string()
|
|
2538
|
+
}),
|
|
2539
|
+
z16.object({
|
|
2540
|
+
type: z16.literal("find"),
|
|
2541
|
+
url: z16.string(),
|
|
2542
|
+
pattern: z16.string()
|
|
2543
|
+
})
|
|
2544
|
+
]).nullish()
|
|
2545
|
+
}),
|
|
2546
|
+
z16.object({
|
|
2547
|
+
type: z16.literal("file_search_call"),
|
|
2548
|
+
id: z16.string(),
|
|
2549
|
+
queries: z16.array(z16.string()),
|
|
2550
|
+
results: z16.array(
|
|
2551
|
+
z16.object({
|
|
2552
|
+
attributes: z16.record(z16.string(), z16.unknown()),
|
|
2553
|
+
file_id: z16.string(),
|
|
2554
|
+
filename: z16.string(),
|
|
2555
|
+
score: z16.number(),
|
|
2556
|
+
text: z16.string()
|
|
2557
|
+
})
|
|
2558
|
+
).nullish()
|
|
2559
|
+
}),
|
|
2560
|
+
z16.object({
|
|
2561
|
+
type: z16.literal("local_shell_call"),
|
|
2562
|
+
id: z16.string(),
|
|
2563
|
+
call_id: z16.string(),
|
|
2564
|
+
action: z16.object({
|
|
2565
|
+
type: z16.literal("exec"),
|
|
2566
|
+
command: z16.array(z16.string()),
|
|
2567
|
+
timeout_ms: z16.number().optional(),
|
|
2568
|
+
user: z16.string().optional(),
|
|
2569
|
+
working_directory: z16.string().optional(),
|
|
2570
|
+
env: z16.record(z16.string(), z16.string()).optional()
|
|
2571
|
+
})
|
|
2572
|
+
}),
|
|
2573
|
+
z16.object({
|
|
2574
|
+
type: z16.literal("computer_call"),
|
|
2575
|
+
id: z16.string(),
|
|
2576
|
+
status: z16.literal("completed")
|
|
2577
|
+
})
|
|
2578
|
+
])
|
|
2579
|
+
}),
|
|
2580
|
+
z16.object({
|
|
2581
|
+
type: z16.literal("response.function_call_arguments.delta"),
|
|
2582
|
+
item_id: z16.string(),
|
|
2583
|
+
output_index: z16.number(),
|
|
2584
|
+
delta: z16.string()
|
|
2585
|
+
}),
|
|
2586
|
+
z16.object({
|
|
2587
|
+
type: z16.literal("response.image_generation_call.partial_image"),
|
|
2588
|
+
item_id: z16.string(),
|
|
2589
|
+
output_index: z16.number(),
|
|
2590
|
+
partial_image_b64: z16.string()
|
|
2591
|
+
}),
|
|
2592
|
+
z16.object({
|
|
2593
|
+
type: z16.literal("response.code_interpreter_call_code.delta"),
|
|
2594
|
+
item_id: z16.string(),
|
|
2595
|
+
output_index: z16.number(),
|
|
2596
|
+
delta: z16.string()
|
|
2597
|
+
}),
|
|
2598
|
+
z16.object({
|
|
2599
|
+
type: z16.literal("response.code_interpreter_call_code.done"),
|
|
2600
|
+
item_id: z16.string(),
|
|
2601
|
+
output_index: z16.number(),
|
|
2602
|
+
code: z16.string()
|
|
2603
|
+
}),
|
|
2604
|
+
z16.object({
|
|
2605
|
+
type: z16.literal("response.output_text.annotation.added"),
|
|
2606
|
+
annotation: z16.discriminatedUnion("type", [
|
|
2607
|
+
z16.object({
|
|
2608
|
+
type: z16.literal("url_citation"),
|
|
2609
|
+
url: z16.string(),
|
|
2610
|
+
title: z16.string()
|
|
2611
|
+
}),
|
|
2612
|
+
z16.object({
|
|
2613
|
+
type: z16.literal("file_citation"),
|
|
2614
|
+
file_id: z16.string(),
|
|
2615
|
+
filename: z16.string().nullish(),
|
|
2616
|
+
index: z16.number().nullish(),
|
|
2617
|
+
start_index: z16.number().nullish(),
|
|
2618
|
+
end_index: z16.number().nullish(),
|
|
2619
|
+
quote: z16.string().nullish()
|
|
2620
|
+
})
|
|
2621
|
+
])
|
|
2622
|
+
}),
|
|
2623
|
+
z16.object({
|
|
2624
|
+
type: z16.literal("response.reasoning_summary_part.added"),
|
|
2625
|
+
item_id: z16.string(),
|
|
2626
|
+
summary_index: z16.number()
|
|
2627
|
+
}),
|
|
2628
|
+
z16.object({
|
|
2629
|
+
type: z16.literal("response.reasoning_summary_text.delta"),
|
|
2630
|
+
item_id: z16.string(),
|
|
2631
|
+
summary_index: z16.number(),
|
|
2632
|
+
delta: z16.string()
|
|
2633
|
+
}),
|
|
2634
|
+
z16.object({
|
|
2635
|
+
type: z16.literal("error"),
|
|
2636
|
+
code: z16.string(),
|
|
2637
|
+
message: z16.string(),
|
|
2638
|
+
param: z16.string().nullish(),
|
|
2639
|
+
sequence_number: z16.number()
|
|
2640
|
+
}),
|
|
2641
|
+
z16.object({ type: z16.string() }).loose().transform((value) => ({
|
|
2642
|
+
type: "unknown_chunk",
|
|
2643
|
+
message: value.type
|
|
2644
|
+
}))
|
|
2645
|
+
// fallback for unknown chunks
|
|
2646
|
+
])
|
|
2647
|
+
)
|
|
2648
|
+
);
|
|
2649
|
+
var openaiResponsesResponseSchema = (0, import_provider_utils21.lazyValidator)(
|
|
2650
|
+
() => (0, import_provider_utils21.zodSchema)(
|
|
2651
|
+
z16.object({
|
|
2652
|
+
id: z16.string(),
|
|
2653
|
+
created_at: z16.number(),
|
|
2654
|
+
error: z16.object({
|
|
2655
|
+
code: z16.string(),
|
|
2656
|
+
message: z16.string()
|
|
2657
|
+
}).nullish(),
|
|
2658
|
+
model: z16.string(),
|
|
2659
|
+
output: z16.array(
|
|
2660
|
+
z16.discriminatedUnion("type", [
|
|
2661
|
+
z16.object({
|
|
2662
|
+
type: z16.literal("message"),
|
|
2663
|
+
role: z16.literal("assistant"),
|
|
2664
|
+
id: z16.string(),
|
|
2665
|
+
content: z16.array(
|
|
2666
|
+
z16.object({
|
|
2667
|
+
type: z16.literal("output_text"),
|
|
2668
|
+
text: z16.string(),
|
|
2669
|
+
logprobs: z16.array(
|
|
2670
|
+
z16.object({
|
|
2671
|
+
token: z16.string(),
|
|
2672
|
+
logprob: z16.number(),
|
|
2673
|
+
top_logprobs: z16.array(
|
|
2674
|
+
z16.object({
|
|
2675
|
+
token: z16.string(),
|
|
2676
|
+
logprob: z16.number()
|
|
2677
|
+
})
|
|
2678
|
+
)
|
|
2679
|
+
})
|
|
2680
|
+
).nullish(),
|
|
2681
|
+
annotations: z16.array(
|
|
2682
|
+
z16.discriminatedUnion("type", [
|
|
2683
|
+
z16.object({
|
|
2684
|
+
type: z16.literal("url_citation"),
|
|
2685
|
+
start_index: z16.number(),
|
|
2686
|
+
end_index: z16.number(),
|
|
2687
|
+
url: z16.string(),
|
|
2688
|
+
title: z16.string()
|
|
2689
|
+
}),
|
|
2690
|
+
z16.object({
|
|
2691
|
+
type: z16.literal("file_citation"),
|
|
2692
|
+
file_id: z16.string(),
|
|
2693
|
+
filename: z16.string().nullish(),
|
|
2694
|
+
index: z16.number().nullish(),
|
|
2695
|
+
start_index: z16.number().nullish(),
|
|
2696
|
+
end_index: z16.number().nullish(),
|
|
2697
|
+
quote: z16.string().nullish()
|
|
2698
|
+
}),
|
|
2699
|
+
z16.object({
|
|
2700
|
+
type: z16.literal("container_file_citation")
|
|
2701
|
+
})
|
|
2702
|
+
])
|
|
2703
|
+
)
|
|
2704
|
+
})
|
|
2705
|
+
)
|
|
2706
|
+
}),
|
|
2707
|
+
z16.object({
|
|
2708
|
+
type: z16.literal("web_search_call"),
|
|
2709
|
+
id: z16.string(),
|
|
2710
|
+
status: z16.string(),
|
|
2711
|
+
action: z16.discriminatedUnion("type", [
|
|
2712
|
+
z16.object({
|
|
2713
|
+
type: z16.literal("search"),
|
|
2714
|
+
query: z16.string().nullish()
|
|
2715
|
+
}),
|
|
2716
|
+
z16.object({
|
|
2717
|
+
type: z16.literal("open_page"),
|
|
2718
|
+
url: z16.string()
|
|
2719
|
+
}),
|
|
2720
|
+
z16.object({
|
|
2721
|
+
type: z16.literal("find"),
|
|
2722
|
+
url: z16.string(),
|
|
2723
|
+
pattern: z16.string()
|
|
2724
|
+
})
|
|
2725
|
+
]).nullish()
|
|
2726
|
+
}),
|
|
2727
|
+
z16.object({
|
|
2728
|
+
type: z16.literal("file_search_call"),
|
|
2729
|
+
id: z16.string(),
|
|
2730
|
+
queries: z16.array(z16.string()),
|
|
2731
|
+
results: z16.array(
|
|
2732
|
+
z16.object({
|
|
2733
|
+
attributes: z16.record(z16.string(), z16.unknown()),
|
|
2734
|
+
file_id: z16.string(),
|
|
2735
|
+
filename: z16.string(),
|
|
2736
|
+
score: z16.number(),
|
|
2737
|
+
text: z16.string()
|
|
2738
|
+
})
|
|
2739
|
+
).nullish()
|
|
2740
|
+
}),
|
|
2741
|
+
z16.object({
|
|
2742
|
+
type: z16.literal("code_interpreter_call"),
|
|
2743
|
+
id: z16.string(),
|
|
2744
|
+
code: z16.string().nullable(),
|
|
2745
|
+
container_id: z16.string(),
|
|
2746
|
+
outputs: z16.array(
|
|
2747
|
+
z16.discriminatedUnion("type", [
|
|
2748
|
+
z16.object({ type: z16.literal("logs"), logs: z16.string() }),
|
|
2749
|
+
z16.object({ type: z16.literal("image"), url: z16.string() })
|
|
2750
|
+
])
|
|
2751
|
+
).nullable()
|
|
2752
|
+
}),
|
|
2753
|
+
z16.object({
|
|
2754
|
+
type: z16.literal("image_generation_call"),
|
|
2755
|
+
id: z16.string(),
|
|
2756
|
+
result: z16.string()
|
|
2757
|
+
}),
|
|
2758
|
+
z16.object({
|
|
2759
|
+
type: z16.literal("local_shell_call"),
|
|
2760
|
+
id: z16.string(),
|
|
2761
|
+
call_id: z16.string(),
|
|
2762
|
+
action: z16.object({
|
|
2763
|
+
type: z16.literal("exec"),
|
|
2764
|
+
command: z16.array(z16.string()),
|
|
2765
|
+
timeout_ms: z16.number().optional(),
|
|
2766
|
+
user: z16.string().optional(),
|
|
2767
|
+
working_directory: z16.string().optional(),
|
|
2768
|
+
env: z16.record(z16.string(), z16.string()).optional()
|
|
2769
|
+
})
|
|
2770
|
+
}),
|
|
2771
|
+
z16.object({
|
|
2772
|
+
type: z16.literal("function_call"),
|
|
2773
|
+
call_id: z16.string(),
|
|
2774
|
+
name: z16.string(),
|
|
2775
|
+
arguments: z16.string(),
|
|
2776
|
+
id: z16.string()
|
|
2777
|
+
}),
|
|
2778
|
+
z16.object({
|
|
2779
|
+
type: z16.literal("computer_call"),
|
|
2780
|
+
id: z16.string(),
|
|
2781
|
+
status: z16.string().optional()
|
|
2782
|
+
}),
|
|
2783
|
+
z16.object({
|
|
2784
|
+
type: z16.literal("reasoning"),
|
|
2785
|
+
id: z16.string(),
|
|
2786
|
+
encrypted_content: z16.string().nullish(),
|
|
2787
|
+
summary: z16.array(
|
|
2788
|
+
z16.object({
|
|
2789
|
+
type: z16.literal("summary_text"),
|
|
2790
|
+
text: z16.string()
|
|
2791
|
+
})
|
|
2792
|
+
)
|
|
2793
|
+
})
|
|
2794
|
+
])
|
|
2795
|
+
),
|
|
2796
|
+
service_tier: z16.string().nullish(),
|
|
2797
|
+
incomplete_details: z16.object({ reason: z16.string() }).nullish(),
|
|
2798
|
+
usage: z16.object({
|
|
2799
|
+
input_tokens: z16.number(),
|
|
2800
|
+
input_tokens_details: z16.object({ cached_tokens: z16.number().nullish() }).nullish(),
|
|
2801
|
+
output_tokens: z16.number(),
|
|
2802
|
+
output_tokens_details: z16.object({ reasoning_tokens: z16.number().nullish() }).nullish()
|
|
2803
|
+
})
|
|
2804
|
+
})
|
|
2805
|
+
)
|
|
2806
|
+
);
|
|
2807
|
+
|
|
2808
|
+
// src/responses/openai-responses-options.ts
|
|
2809
|
+
var import_provider_utils22 = require("@ai-sdk/provider-utils");
|
|
2810
|
+
var z17 = __toESM(require("zod/v4"));
|
|
2811
|
+
var TOP_LOGPROBS_MAX = 20;
|
|
2812
|
+
var openaiResponsesReasoningModelIds = [
|
|
2813
|
+
"o1",
|
|
2814
|
+
"o1-2024-12-17",
|
|
2815
|
+
"o3-mini",
|
|
2816
|
+
"o3-mini-2025-01-31",
|
|
2817
|
+
"o3",
|
|
2818
|
+
"o3-2025-04-16",
|
|
2819
|
+
"o4-mini",
|
|
2820
|
+
"o4-mini-2025-04-16",
|
|
2821
|
+
"codex-mini-latest",
|
|
2822
|
+
"computer-use-preview",
|
|
2823
|
+
"gpt-5",
|
|
2824
|
+
"gpt-5-2025-08-07",
|
|
2825
|
+
"gpt-5-codex",
|
|
2826
|
+
"gpt-5-mini",
|
|
2827
|
+
"gpt-5-mini-2025-08-07",
|
|
2828
|
+
"gpt-5-nano",
|
|
2829
|
+
"gpt-5-nano-2025-08-07",
|
|
2830
|
+
"gpt-5-pro",
|
|
2831
|
+
"gpt-5-pro-2025-10-06"
|
|
2832
|
+
];
|
|
2833
|
+
var openaiResponsesModelIds = [
|
|
2834
|
+
"gpt-4.1",
|
|
2835
|
+
"gpt-4.1-2025-04-14",
|
|
2836
|
+
"gpt-4.1-mini",
|
|
2837
|
+
"gpt-4.1-mini-2025-04-14",
|
|
2838
|
+
"gpt-4.1-nano",
|
|
2839
|
+
"gpt-4.1-nano-2025-04-14",
|
|
2840
|
+
"gpt-4o",
|
|
2841
|
+
"gpt-4o-2024-05-13",
|
|
2842
|
+
"gpt-4o-2024-08-06",
|
|
2843
|
+
"gpt-4o-2024-11-20",
|
|
2844
|
+
"gpt-4o-audio-preview",
|
|
2845
|
+
"gpt-4o-audio-preview-2024-10-01",
|
|
2846
|
+
"gpt-4o-audio-preview-2024-12-17",
|
|
2847
|
+
"gpt-4o-search-preview",
|
|
2848
|
+
"gpt-4o-search-preview-2025-03-11",
|
|
2849
|
+
"gpt-4o-mini-search-preview",
|
|
2850
|
+
"gpt-4o-mini-search-preview-2025-03-11",
|
|
2851
|
+
"gpt-4o-mini",
|
|
2852
|
+
"gpt-4o-mini-2024-07-18",
|
|
2853
|
+
"gpt-4-turbo",
|
|
2854
|
+
"gpt-4-turbo-2024-04-09",
|
|
2855
|
+
"gpt-4-turbo-preview",
|
|
2856
|
+
"gpt-4-0125-preview",
|
|
2857
|
+
"gpt-4-1106-preview",
|
|
2858
|
+
"gpt-4",
|
|
2859
|
+
"gpt-4-0613",
|
|
2860
|
+
"gpt-4.5-preview",
|
|
2861
|
+
"gpt-4.5-preview-2025-02-27",
|
|
2862
|
+
"gpt-3.5-turbo-0125",
|
|
2863
|
+
"gpt-3.5-turbo",
|
|
2864
|
+
"gpt-3.5-turbo-1106",
|
|
2865
|
+
"chatgpt-4o-latest",
|
|
2866
|
+
"gpt-5-chat-latest",
|
|
2867
|
+
...openaiResponsesReasoningModelIds
|
|
2868
|
+
];
|
|
2869
|
+
var openaiResponsesProviderOptionsSchema = (0, import_provider_utils22.lazyValidator)(
|
|
2870
|
+
() => (0, import_provider_utils22.zodSchema)(
|
|
2871
|
+
z17.object({
|
|
2872
|
+
include: z17.array(
|
|
2873
|
+
z17.enum([
|
|
2874
|
+
"reasoning.encrypted_content",
|
|
2875
|
+
"file_search_call.results",
|
|
2876
|
+
"message.output_text.logprobs"
|
|
2877
|
+
])
|
|
2878
|
+
).nullish(),
|
|
2879
|
+
instructions: z17.string().nullish(),
|
|
2880
|
+
/**
|
|
2881
|
+
* Return the log probabilities of the tokens.
|
|
2882
|
+
*
|
|
2883
|
+
* Setting to true will return the log probabilities of the tokens that
|
|
2884
|
+
* were generated.
|
|
2885
|
+
*
|
|
2886
|
+
* Setting to a number will return the log probabilities of the top n
|
|
2887
|
+
* tokens that were generated.
|
|
2888
|
+
*
|
|
2889
|
+
* @see https://platform.openai.com/docs/api-reference/responses/create
|
|
2890
|
+
* @see https://cookbook.openai.com/examples/using_logprobs
|
|
2891
|
+
*/
|
|
2892
|
+
logprobs: z17.union([z17.boolean(), z17.number().min(1).max(TOP_LOGPROBS_MAX)]).optional(),
|
|
2893
|
+
/**
|
|
2894
|
+
* The maximum number of total calls to built-in tools that can be processed in a response.
|
|
2895
|
+
* This maximum number applies across all built-in tool calls, not per individual tool.
|
|
2896
|
+
* Any further attempts to call a tool by the model will be ignored.
|
|
2897
|
+
*/
|
|
2898
|
+
maxToolCalls: z17.number().nullish(),
|
|
2899
|
+
metadata: z17.any().nullish(),
|
|
2900
|
+
parallelToolCalls: z17.boolean().nullish(),
|
|
2901
|
+
previousResponseId: z17.string().nullish(),
|
|
2902
|
+
promptCacheKey: z17.string().nullish(),
|
|
2903
|
+
reasoningEffort: z17.string().nullish(),
|
|
2904
|
+
reasoningSummary: z17.string().nullish(),
|
|
2905
|
+
safetyIdentifier: z17.string().nullish(),
|
|
2906
|
+
serviceTier: z17.enum(["auto", "flex", "priority"]).nullish(),
|
|
2907
|
+
store: z17.boolean().nullish(),
|
|
2908
|
+
strictJsonSchema: z17.boolean().nullish(),
|
|
2909
|
+
textVerbosity: z17.enum(["low", "medium", "high"]).nullish(),
|
|
2910
|
+
user: z17.string().nullish()
|
|
2911
|
+
})
|
|
2912
|
+
)
|
|
2913
|
+
);
|
|
2914
|
+
|
|
2254
2915
|
// src/responses/openai-responses-prepare-tools.ts
|
|
2255
2916
|
var import_provider7 = require("@ai-sdk/provider");
|
|
2256
|
-
|
|
2917
|
+
var import_provider_utils23 = require("@ai-sdk/provider-utils");
|
|
2918
|
+
async function prepareResponsesTools({
|
|
2257
2919
|
tools,
|
|
2258
2920
|
toolChoice,
|
|
2259
2921
|
strictJsonSchema
|
|
@@ -2278,7 +2940,10 @@ function prepareResponsesTools({
|
|
|
2278
2940
|
case "provider-defined": {
|
|
2279
2941
|
switch (tool.id) {
|
|
2280
2942
|
case "openai.file_search": {
|
|
2281
|
-
const args =
|
|
2943
|
+
const args = await (0, import_provider_utils23.validateTypes)({
|
|
2944
|
+
value: tool.args,
|
|
2945
|
+
schema: fileSearchArgsSchema
|
|
2946
|
+
});
|
|
2282
2947
|
openaiTools2.push({
|
|
2283
2948
|
type: "file_search",
|
|
2284
2949
|
vector_store_ids: args.vectorStoreIds,
|
|
@@ -2298,7 +2963,10 @@ function prepareResponsesTools({
|
|
|
2298
2963
|
break;
|
|
2299
2964
|
}
|
|
2300
2965
|
case "openai.web_search_preview": {
|
|
2301
|
-
const args =
|
|
2966
|
+
const args = await (0, import_provider_utils23.validateTypes)({
|
|
2967
|
+
value: tool.args,
|
|
2968
|
+
schema: webSearchPreviewArgsSchema
|
|
2969
|
+
});
|
|
2302
2970
|
openaiTools2.push({
|
|
2303
2971
|
type: "web_search_preview",
|
|
2304
2972
|
search_context_size: args.searchContextSize,
|
|
@@ -2307,7 +2975,10 @@ function prepareResponsesTools({
|
|
|
2307
2975
|
break;
|
|
2308
2976
|
}
|
|
2309
2977
|
case "openai.web_search": {
|
|
2310
|
-
const args =
|
|
2978
|
+
const args = await (0, import_provider_utils23.validateTypes)({
|
|
2979
|
+
value: tool.args,
|
|
2980
|
+
schema: webSearchArgsSchema
|
|
2981
|
+
});
|
|
2311
2982
|
openaiTools2.push({
|
|
2312
2983
|
type: "web_search",
|
|
2313
2984
|
filters: args.filters != null ? { allowed_domains: args.filters.allowedDomains } : void 0,
|
|
@@ -2317,7 +2988,10 @@ function prepareResponsesTools({
|
|
|
2317
2988
|
break;
|
|
2318
2989
|
}
|
|
2319
2990
|
case "openai.code_interpreter": {
|
|
2320
|
-
const args =
|
|
2991
|
+
const args = await (0, import_provider_utils23.validateTypes)({
|
|
2992
|
+
value: tool.args,
|
|
2993
|
+
schema: codeInterpreterArgsSchema
|
|
2994
|
+
});
|
|
2321
2995
|
openaiTools2.push({
|
|
2322
2996
|
type: "code_interpreter",
|
|
2323
2997
|
container: args.container == null ? { type: "auto", file_ids: void 0 } : typeof args.container === "string" ? args.container : { type: "auto", file_ids: args.container.fileIds }
|
|
@@ -2325,7 +2999,10 @@ function prepareResponsesTools({
|
|
|
2325
2999
|
break;
|
|
2326
3000
|
}
|
|
2327
3001
|
case "openai.image_generation": {
|
|
2328
|
-
const args =
|
|
3002
|
+
const args = await (0, import_provider_utils23.validateTypes)({
|
|
3003
|
+
value: tool.args,
|
|
3004
|
+
schema: imageGenerationArgsSchema
|
|
3005
|
+
});
|
|
2329
3006
|
openaiTools2.push({
|
|
2330
3007
|
type: "image_generation",
|
|
2331
3008
|
background: args.background,
|
|
@@ -2376,83 +3053,6 @@ function prepareResponsesTools({
|
|
|
2376
3053
|
}
|
|
2377
3054
|
|
|
2378
3055
|
// src/responses/openai-responses-language-model.ts
|
|
2379
|
-
var webSearchCallItem = import_v416.z.object({
|
|
2380
|
-
type: import_v416.z.literal("web_search_call"),
|
|
2381
|
-
id: import_v416.z.string(),
|
|
2382
|
-
status: import_v416.z.string(),
|
|
2383
|
-
action: import_v416.z.discriminatedUnion("type", [
|
|
2384
|
-
import_v416.z.object({
|
|
2385
|
-
type: import_v416.z.literal("search"),
|
|
2386
|
-
query: import_v416.z.string().nullish()
|
|
2387
|
-
}),
|
|
2388
|
-
import_v416.z.object({
|
|
2389
|
-
type: import_v416.z.literal("open_page"),
|
|
2390
|
-
url: import_v416.z.string()
|
|
2391
|
-
}),
|
|
2392
|
-
import_v416.z.object({
|
|
2393
|
-
type: import_v416.z.literal("find"),
|
|
2394
|
-
url: import_v416.z.string(),
|
|
2395
|
-
pattern: import_v416.z.string()
|
|
2396
|
-
})
|
|
2397
|
-
]).nullish()
|
|
2398
|
-
});
|
|
2399
|
-
var fileSearchCallItem = import_v416.z.object({
|
|
2400
|
-
type: import_v416.z.literal("file_search_call"),
|
|
2401
|
-
id: import_v416.z.string(),
|
|
2402
|
-
queries: import_v416.z.array(import_v416.z.string()),
|
|
2403
|
-
results: import_v416.z.array(
|
|
2404
|
-
import_v416.z.object({
|
|
2405
|
-
attributes: import_v416.z.record(import_v416.z.string(), import_v416.z.unknown()),
|
|
2406
|
-
file_id: import_v416.z.string(),
|
|
2407
|
-
filename: import_v416.z.string(),
|
|
2408
|
-
score: import_v416.z.number(),
|
|
2409
|
-
text: import_v416.z.string()
|
|
2410
|
-
})
|
|
2411
|
-
).nullish()
|
|
2412
|
-
});
|
|
2413
|
-
var codeInterpreterCallItem = import_v416.z.object({
|
|
2414
|
-
type: import_v416.z.literal("code_interpreter_call"),
|
|
2415
|
-
id: import_v416.z.string(),
|
|
2416
|
-
code: import_v416.z.string().nullable(),
|
|
2417
|
-
container_id: import_v416.z.string(),
|
|
2418
|
-
outputs: import_v416.z.array(
|
|
2419
|
-
import_v416.z.discriminatedUnion("type", [
|
|
2420
|
-
import_v416.z.object({ type: import_v416.z.literal("logs"), logs: import_v416.z.string() }),
|
|
2421
|
-
import_v416.z.object({ type: import_v416.z.literal("image"), url: import_v416.z.string() })
|
|
2422
|
-
])
|
|
2423
|
-
).nullable()
|
|
2424
|
-
});
|
|
2425
|
-
var localShellCallItem = import_v416.z.object({
|
|
2426
|
-
type: import_v416.z.literal("local_shell_call"),
|
|
2427
|
-
id: import_v416.z.string(),
|
|
2428
|
-
call_id: import_v416.z.string(),
|
|
2429
|
-
action: import_v416.z.object({
|
|
2430
|
-
type: import_v416.z.literal("exec"),
|
|
2431
|
-
command: import_v416.z.array(import_v416.z.string()),
|
|
2432
|
-
timeout_ms: import_v416.z.number().optional(),
|
|
2433
|
-
user: import_v416.z.string().optional(),
|
|
2434
|
-
working_directory: import_v416.z.string().optional(),
|
|
2435
|
-
env: import_v416.z.record(import_v416.z.string(), import_v416.z.string()).optional()
|
|
2436
|
-
})
|
|
2437
|
-
});
|
|
2438
|
-
var imageGenerationCallItem = import_v416.z.object({
|
|
2439
|
-
type: import_v416.z.literal("image_generation_call"),
|
|
2440
|
-
id: import_v416.z.string(),
|
|
2441
|
-
result: import_v416.z.string()
|
|
2442
|
-
});
|
|
2443
|
-
var TOP_LOGPROBS_MAX = 20;
|
|
2444
|
-
var LOGPROBS_SCHEMA = import_v416.z.array(
|
|
2445
|
-
import_v416.z.object({
|
|
2446
|
-
token: import_v416.z.string(),
|
|
2447
|
-
logprob: import_v416.z.number(),
|
|
2448
|
-
top_logprobs: import_v416.z.array(
|
|
2449
|
-
import_v416.z.object({
|
|
2450
|
-
token: import_v416.z.string(),
|
|
2451
|
-
logprob: import_v416.z.number()
|
|
2452
|
-
})
|
|
2453
|
-
)
|
|
2454
|
-
})
|
|
2455
|
-
);
|
|
2456
3056
|
var OpenAIResponsesLanguageModel = class {
|
|
2457
3057
|
constructor(modelId, config) {
|
|
2458
3058
|
this.specificationVersion = "v2";
|
|
@@ -2505,7 +3105,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2505
3105
|
if (stopSequences != null) {
|
|
2506
3106
|
warnings.push({ type: "unsupported-setting", setting: "stopSequences" });
|
|
2507
3107
|
}
|
|
2508
|
-
const openaiOptions = await (0,
|
|
3108
|
+
const openaiOptions = await (0, import_provider_utils24.parseProviderOptions)({
|
|
2509
3109
|
provider: "openai",
|
|
2510
3110
|
providerOptions,
|
|
2511
3111
|
schema: openaiResponsesProviderOptionsSchema
|
|
@@ -2644,7 +3244,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2644
3244
|
tools: openaiTools2,
|
|
2645
3245
|
toolChoice: openaiToolChoice,
|
|
2646
3246
|
toolWarnings
|
|
2647
|
-
} = prepareResponsesTools({
|
|
3247
|
+
} = await prepareResponsesTools({
|
|
2648
3248
|
tools,
|
|
2649
3249
|
toolChoice,
|
|
2650
3250
|
strictJsonSchema
|
|
@@ -2674,91 +3274,13 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2674
3274
|
responseHeaders,
|
|
2675
3275
|
value: response,
|
|
2676
3276
|
rawValue: rawResponse
|
|
2677
|
-
} = await (0,
|
|
3277
|
+
} = await (0, import_provider_utils24.postJsonToApi)({
|
|
2678
3278
|
url,
|
|
2679
|
-
headers: (0,
|
|
3279
|
+
headers: (0, import_provider_utils24.combineHeaders)(this.config.headers(), options.headers),
|
|
2680
3280
|
body,
|
|
2681
3281
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
2682
|
-
successfulResponseHandler: (0,
|
|
2683
|
-
|
|
2684
|
-
id: import_v416.z.string(),
|
|
2685
|
-
created_at: import_v416.z.number(),
|
|
2686
|
-
error: import_v416.z.object({
|
|
2687
|
-
code: import_v416.z.string(),
|
|
2688
|
-
message: import_v416.z.string()
|
|
2689
|
-
}).nullish(),
|
|
2690
|
-
model: import_v416.z.string(),
|
|
2691
|
-
output: import_v416.z.array(
|
|
2692
|
-
import_v416.z.discriminatedUnion("type", [
|
|
2693
|
-
import_v416.z.object({
|
|
2694
|
-
type: import_v416.z.literal("message"),
|
|
2695
|
-
role: import_v416.z.literal("assistant"),
|
|
2696
|
-
id: import_v416.z.string(),
|
|
2697
|
-
content: import_v416.z.array(
|
|
2698
|
-
import_v416.z.object({
|
|
2699
|
-
type: import_v416.z.literal("output_text"),
|
|
2700
|
-
text: import_v416.z.string(),
|
|
2701
|
-
logprobs: LOGPROBS_SCHEMA.nullish(),
|
|
2702
|
-
annotations: import_v416.z.array(
|
|
2703
|
-
import_v416.z.discriminatedUnion("type", [
|
|
2704
|
-
import_v416.z.object({
|
|
2705
|
-
type: import_v416.z.literal("url_citation"),
|
|
2706
|
-
start_index: import_v416.z.number(),
|
|
2707
|
-
end_index: import_v416.z.number(),
|
|
2708
|
-
url: import_v416.z.string(),
|
|
2709
|
-
title: import_v416.z.string()
|
|
2710
|
-
}),
|
|
2711
|
-
import_v416.z.object({
|
|
2712
|
-
type: import_v416.z.literal("file_citation"),
|
|
2713
|
-
file_id: import_v416.z.string(),
|
|
2714
|
-
filename: import_v416.z.string().nullish(),
|
|
2715
|
-
index: import_v416.z.number().nullish(),
|
|
2716
|
-
start_index: import_v416.z.number().nullish(),
|
|
2717
|
-
end_index: import_v416.z.number().nullish(),
|
|
2718
|
-
quote: import_v416.z.string().nullish()
|
|
2719
|
-
}),
|
|
2720
|
-
import_v416.z.object({
|
|
2721
|
-
type: import_v416.z.literal("container_file_citation")
|
|
2722
|
-
})
|
|
2723
|
-
])
|
|
2724
|
-
)
|
|
2725
|
-
})
|
|
2726
|
-
)
|
|
2727
|
-
}),
|
|
2728
|
-
webSearchCallItem,
|
|
2729
|
-
fileSearchCallItem,
|
|
2730
|
-
codeInterpreterCallItem,
|
|
2731
|
-
imageGenerationCallItem,
|
|
2732
|
-
localShellCallItem,
|
|
2733
|
-
import_v416.z.object({
|
|
2734
|
-
type: import_v416.z.literal("function_call"),
|
|
2735
|
-
call_id: import_v416.z.string(),
|
|
2736
|
-
name: import_v416.z.string(),
|
|
2737
|
-
arguments: import_v416.z.string(),
|
|
2738
|
-
id: import_v416.z.string()
|
|
2739
|
-
}),
|
|
2740
|
-
import_v416.z.object({
|
|
2741
|
-
type: import_v416.z.literal("computer_call"),
|
|
2742
|
-
id: import_v416.z.string(),
|
|
2743
|
-
status: import_v416.z.string().optional()
|
|
2744
|
-
}),
|
|
2745
|
-
import_v416.z.object({
|
|
2746
|
-
type: import_v416.z.literal("reasoning"),
|
|
2747
|
-
id: import_v416.z.string(),
|
|
2748
|
-
encrypted_content: import_v416.z.string().nullish(),
|
|
2749
|
-
summary: import_v416.z.array(
|
|
2750
|
-
import_v416.z.object({
|
|
2751
|
-
type: import_v416.z.literal("summary_text"),
|
|
2752
|
-
text: import_v416.z.string()
|
|
2753
|
-
})
|
|
2754
|
-
)
|
|
2755
|
-
})
|
|
2756
|
-
])
|
|
2757
|
-
),
|
|
2758
|
-
service_tier: import_v416.z.string().nullish(),
|
|
2759
|
-
incomplete_details: import_v416.z.object({ reason: import_v416.z.string() }).nullish(),
|
|
2760
|
-
usage: usageSchema2
|
|
2761
|
-
})
|
|
3282
|
+
successfulResponseHandler: (0, import_provider_utils24.createJsonResponseHandler)(
|
|
3283
|
+
openaiResponsesResponseSchema
|
|
2762
3284
|
),
|
|
2763
3285
|
abortSignal: options.abortSignal,
|
|
2764
3286
|
fetch: this.config.fetch
|
|
@@ -2821,7 +3343,9 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2821
3343
|
type: "tool-call",
|
|
2822
3344
|
toolCallId: part.call_id,
|
|
2823
3345
|
toolName: "local_shell",
|
|
2824
|
-
input: JSON.stringify({
|
|
3346
|
+
input: JSON.stringify({
|
|
3347
|
+
action: part.action
|
|
3348
|
+
}),
|
|
2825
3349
|
providerMetadata: {
|
|
2826
3350
|
openai: {
|
|
2827
3351
|
itemId: part.id
|
|
@@ -2849,7 +3373,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2849
3373
|
content.push({
|
|
2850
3374
|
type: "source",
|
|
2851
3375
|
sourceType: "url",
|
|
2852
|
-
id: (_f = (_e = (_d = this.config).generateId) == null ? void 0 : _e.call(_d)) != null ? _f : (0,
|
|
3376
|
+
id: (_f = (_e = (_d = this.config).generateId) == null ? void 0 : _e.call(_d)) != null ? _f : (0, import_provider_utils24.generateId)(),
|
|
2853
3377
|
url: annotation.url,
|
|
2854
3378
|
title: annotation.title
|
|
2855
3379
|
});
|
|
@@ -2857,7 +3381,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2857
3381
|
content.push({
|
|
2858
3382
|
type: "source",
|
|
2859
3383
|
sourceType: "document",
|
|
2860
|
-
id: (_i = (_h = (_g = this.config).generateId) == null ? void 0 : _h.call(_g)) != null ? _i : (0,
|
|
3384
|
+
id: (_i = (_h = (_g = this.config).generateId) == null ? void 0 : _h.call(_g)) != null ? _i : (0, import_provider_utils24.generateId)(),
|
|
2861
3385
|
mediaType: "text/plain",
|
|
2862
3386
|
title: (_k = (_j = annotation.quote) != null ? _j : annotation.filename) != null ? _k : "Document",
|
|
2863
3387
|
filename: (_l = annotation.filename) != null ? _l : annotation.file_id
|
|
@@ -3009,18 +3533,18 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3009
3533
|
warnings,
|
|
3010
3534
|
webSearchToolName
|
|
3011
3535
|
} = await this.getArgs(options);
|
|
3012
|
-
const { responseHeaders, value: response } = await (0,
|
|
3536
|
+
const { responseHeaders, value: response } = await (0, import_provider_utils24.postJsonToApi)({
|
|
3013
3537
|
url: this.config.url({
|
|
3014
3538
|
path: "/responses",
|
|
3015
3539
|
modelId: this.modelId
|
|
3016
3540
|
}),
|
|
3017
|
-
headers: (0,
|
|
3541
|
+
headers: (0, import_provider_utils24.combineHeaders)(this.config.headers(), options.headers),
|
|
3018
3542
|
body: {
|
|
3019
3543
|
...body,
|
|
3020
3544
|
stream: true
|
|
3021
3545
|
},
|
|
3022
3546
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
3023
|
-
successfulResponseHandler: (0,
|
|
3547
|
+
successfulResponseHandler: (0, import_provider_utils24.createEventSourceResponseHandler)(
|
|
3024
3548
|
openaiResponsesChunkSchema
|
|
3025
3549
|
),
|
|
3026
3550
|
abortSignal: options.abortSignal,
|
|
@@ -3397,7 +3921,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3397
3921
|
controller.enqueue({
|
|
3398
3922
|
type: "source",
|
|
3399
3923
|
sourceType: "url",
|
|
3400
|
-
id: (_q = (_p = (_o = self.config).generateId) == null ? void 0 : _p.call(_o)) != null ? _q : (0,
|
|
3924
|
+
id: (_q = (_p = (_o = self.config).generateId) == null ? void 0 : _p.call(_o)) != null ? _q : (0, import_provider_utils24.generateId)(),
|
|
3401
3925
|
url: value.annotation.url,
|
|
3402
3926
|
title: value.annotation.title
|
|
3403
3927
|
});
|
|
@@ -3405,7 +3929,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3405
3929
|
controller.enqueue({
|
|
3406
3930
|
type: "source",
|
|
3407
3931
|
sourceType: "document",
|
|
3408
|
-
id: (_t = (_s = (_r = self.config).generateId) == null ? void 0 : _s.call(_r)) != null ? _t : (0,
|
|
3932
|
+
id: (_t = (_s = (_r = self.config).generateId) == null ? void 0 : _s.call(_r)) != null ? _t : (0, import_provider_utils24.generateId)(),
|
|
3409
3933
|
mediaType: "text/plain",
|
|
3410
3934
|
title: (_v = (_u = value.annotation.quote) != null ? _u : value.annotation.filename) != null ? _v : "Document",
|
|
3411
3935
|
filename: (_w = value.annotation.filename) != null ? _w : value.annotation.file_id
|
|
@@ -3441,196 +3965,6 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3441
3965
|
};
|
|
3442
3966
|
}
|
|
3443
3967
|
};
|
|
3444
|
-
var usageSchema2 = import_v416.z.object({
|
|
3445
|
-
input_tokens: import_v416.z.number(),
|
|
3446
|
-
input_tokens_details: import_v416.z.object({ cached_tokens: import_v416.z.number().nullish() }).nullish(),
|
|
3447
|
-
output_tokens: import_v416.z.number(),
|
|
3448
|
-
output_tokens_details: import_v416.z.object({ reasoning_tokens: import_v416.z.number().nullish() }).nullish()
|
|
3449
|
-
});
|
|
3450
|
-
var textDeltaChunkSchema = import_v416.z.object({
|
|
3451
|
-
type: import_v416.z.literal("response.output_text.delta"),
|
|
3452
|
-
item_id: import_v416.z.string(),
|
|
3453
|
-
delta: import_v416.z.string(),
|
|
3454
|
-
logprobs: LOGPROBS_SCHEMA.nullish()
|
|
3455
|
-
});
|
|
3456
|
-
var errorChunkSchema = import_v416.z.object({
|
|
3457
|
-
type: import_v416.z.literal("error"),
|
|
3458
|
-
code: import_v416.z.string(),
|
|
3459
|
-
message: import_v416.z.string(),
|
|
3460
|
-
param: import_v416.z.string().nullish(),
|
|
3461
|
-
sequence_number: import_v416.z.number()
|
|
3462
|
-
});
|
|
3463
|
-
var responseFinishedChunkSchema = import_v416.z.object({
|
|
3464
|
-
type: import_v416.z.enum(["response.completed", "response.incomplete"]),
|
|
3465
|
-
response: import_v416.z.object({
|
|
3466
|
-
incomplete_details: import_v416.z.object({ reason: import_v416.z.string() }).nullish(),
|
|
3467
|
-
usage: usageSchema2,
|
|
3468
|
-
service_tier: import_v416.z.string().nullish()
|
|
3469
|
-
})
|
|
3470
|
-
});
|
|
3471
|
-
var responseCreatedChunkSchema = import_v416.z.object({
|
|
3472
|
-
type: import_v416.z.literal("response.created"),
|
|
3473
|
-
response: import_v416.z.object({
|
|
3474
|
-
id: import_v416.z.string(),
|
|
3475
|
-
created_at: import_v416.z.number(),
|
|
3476
|
-
model: import_v416.z.string(),
|
|
3477
|
-
service_tier: import_v416.z.string().nullish()
|
|
3478
|
-
})
|
|
3479
|
-
});
|
|
3480
|
-
var responseOutputItemAddedSchema = import_v416.z.object({
|
|
3481
|
-
type: import_v416.z.literal("response.output_item.added"),
|
|
3482
|
-
output_index: import_v416.z.number(),
|
|
3483
|
-
item: import_v416.z.discriminatedUnion("type", [
|
|
3484
|
-
import_v416.z.object({
|
|
3485
|
-
type: import_v416.z.literal("message"),
|
|
3486
|
-
id: import_v416.z.string()
|
|
3487
|
-
}),
|
|
3488
|
-
import_v416.z.object({
|
|
3489
|
-
type: import_v416.z.literal("reasoning"),
|
|
3490
|
-
id: import_v416.z.string(),
|
|
3491
|
-
encrypted_content: import_v416.z.string().nullish()
|
|
3492
|
-
}),
|
|
3493
|
-
import_v416.z.object({
|
|
3494
|
-
type: import_v416.z.literal("function_call"),
|
|
3495
|
-
id: import_v416.z.string(),
|
|
3496
|
-
call_id: import_v416.z.string(),
|
|
3497
|
-
name: import_v416.z.string(),
|
|
3498
|
-
arguments: import_v416.z.string()
|
|
3499
|
-
}),
|
|
3500
|
-
import_v416.z.object({
|
|
3501
|
-
type: import_v416.z.literal("web_search_call"),
|
|
3502
|
-
id: import_v416.z.string(),
|
|
3503
|
-
status: import_v416.z.string(),
|
|
3504
|
-
action: import_v416.z.object({
|
|
3505
|
-
type: import_v416.z.literal("search"),
|
|
3506
|
-
query: import_v416.z.string().optional()
|
|
3507
|
-
}).nullish()
|
|
3508
|
-
}),
|
|
3509
|
-
import_v416.z.object({
|
|
3510
|
-
type: import_v416.z.literal("computer_call"),
|
|
3511
|
-
id: import_v416.z.string(),
|
|
3512
|
-
status: import_v416.z.string()
|
|
3513
|
-
}),
|
|
3514
|
-
import_v416.z.object({
|
|
3515
|
-
type: import_v416.z.literal("file_search_call"),
|
|
3516
|
-
id: import_v416.z.string()
|
|
3517
|
-
}),
|
|
3518
|
-
import_v416.z.object({
|
|
3519
|
-
type: import_v416.z.literal("image_generation_call"),
|
|
3520
|
-
id: import_v416.z.string()
|
|
3521
|
-
}),
|
|
3522
|
-
import_v416.z.object({
|
|
3523
|
-
type: import_v416.z.literal("code_interpreter_call"),
|
|
3524
|
-
id: import_v416.z.string(),
|
|
3525
|
-
container_id: import_v416.z.string(),
|
|
3526
|
-
code: import_v416.z.string().nullable(),
|
|
3527
|
-
outputs: import_v416.z.array(
|
|
3528
|
-
import_v416.z.discriminatedUnion("type", [
|
|
3529
|
-
import_v416.z.object({ type: import_v416.z.literal("logs"), logs: import_v416.z.string() }),
|
|
3530
|
-
import_v416.z.object({ type: import_v416.z.literal("image"), url: import_v416.z.string() })
|
|
3531
|
-
])
|
|
3532
|
-
).nullable(),
|
|
3533
|
-
status: import_v416.z.string()
|
|
3534
|
-
})
|
|
3535
|
-
])
|
|
3536
|
-
});
|
|
3537
|
-
var responseOutputItemDoneSchema = import_v416.z.object({
|
|
3538
|
-
type: import_v416.z.literal("response.output_item.done"),
|
|
3539
|
-
output_index: import_v416.z.number(),
|
|
3540
|
-
item: import_v416.z.discriminatedUnion("type", [
|
|
3541
|
-
import_v416.z.object({
|
|
3542
|
-
type: import_v416.z.literal("message"),
|
|
3543
|
-
id: import_v416.z.string()
|
|
3544
|
-
}),
|
|
3545
|
-
import_v416.z.object({
|
|
3546
|
-
type: import_v416.z.literal("reasoning"),
|
|
3547
|
-
id: import_v416.z.string(),
|
|
3548
|
-
encrypted_content: import_v416.z.string().nullish()
|
|
3549
|
-
}),
|
|
3550
|
-
import_v416.z.object({
|
|
3551
|
-
type: import_v416.z.literal("function_call"),
|
|
3552
|
-
id: import_v416.z.string(),
|
|
3553
|
-
call_id: import_v416.z.string(),
|
|
3554
|
-
name: import_v416.z.string(),
|
|
3555
|
-
arguments: import_v416.z.string(),
|
|
3556
|
-
status: import_v416.z.literal("completed")
|
|
3557
|
-
}),
|
|
3558
|
-
codeInterpreterCallItem,
|
|
3559
|
-
imageGenerationCallItem,
|
|
3560
|
-
webSearchCallItem,
|
|
3561
|
-
fileSearchCallItem,
|
|
3562
|
-
localShellCallItem,
|
|
3563
|
-
import_v416.z.object({
|
|
3564
|
-
type: import_v416.z.literal("computer_call"),
|
|
3565
|
-
id: import_v416.z.string(),
|
|
3566
|
-
status: import_v416.z.literal("completed")
|
|
3567
|
-
})
|
|
3568
|
-
])
|
|
3569
|
-
});
|
|
3570
|
-
var responseFunctionCallArgumentsDeltaSchema = import_v416.z.object({
|
|
3571
|
-
type: import_v416.z.literal("response.function_call_arguments.delta"),
|
|
3572
|
-
item_id: import_v416.z.string(),
|
|
3573
|
-
output_index: import_v416.z.number(),
|
|
3574
|
-
delta: import_v416.z.string()
|
|
3575
|
-
});
|
|
3576
|
-
var responseCodeInterpreterCallCodeDeltaSchema = import_v416.z.object({
|
|
3577
|
-
type: import_v416.z.literal("response.code_interpreter_call_code.delta"),
|
|
3578
|
-
item_id: import_v416.z.string(),
|
|
3579
|
-
output_index: import_v416.z.number(),
|
|
3580
|
-
delta: import_v416.z.string()
|
|
3581
|
-
});
|
|
3582
|
-
var responseCodeInterpreterCallCodeDoneSchema = import_v416.z.object({
|
|
3583
|
-
type: import_v416.z.literal("response.code_interpreter_call_code.done"),
|
|
3584
|
-
item_id: import_v416.z.string(),
|
|
3585
|
-
output_index: import_v416.z.number(),
|
|
3586
|
-
code: import_v416.z.string()
|
|
3587
|
-
});
|
|
3588
|
-
var responseAnnotationAddedSchema = import_v416.z.object({
|
|
3589
|
-
type: import_v416.z.literal("response.output_text.annotation.added"),
|
|
3590
|
-
annotation: import_v416.z.discriminatedUnion("type", [
|
|
3591
|
-
import_v416.z.object({
|
|
3592
|
-
type: import_v416.z.literal("url_citation"),
|
|
3593
|
-
url: import_v416.z.string(),
|
|
3594
|
-
title: import_v416.z.string()
|
|
3595
|
-
}),
|
|
3596
|
-
import_v416.z.object({
|
|
3597
|
-
type: import_v416.z.literal("file_citation"),
|
|
3598
|
-
file_id: import_v416.z.string(),
|
|
3599
|
-
filename: import_v416.z.string().nullish(),
|
|
3600
|
-
index: import_v416.z.number().nullish(),
|
|
3601
|
-
start_index: import_v416.z.number().nullish(),
|
|
3602
|
-
end_index: import_v416.z.number().nullish(),
|
|
3603
|
-
quote: import_v416.z.string().nullish()
|
|
3604
|
-
})
|
|
3605
|
-
])
|
|
3606
|
-
});
|
|
3607
|
-
var responseReasoningSummaryPartAddedSchema = import_v416.z.object({
|
|
3608
|
-
type: import_v416.z.literal("response.reasoning_summary_part.added"),
|
|
3609
|
-
item_id: import_v416.z.string(),
|
|
3610
|
-
summary_index: import_v416.z.number()
|
|
3611
|
-
});
|
|
3612
|
-
var responseReasoningSummaryTextDeltaSchema = import_v416.z.object({
|
|
3613
|
-
type: import_v416.z.literal("response.reasoning_summary_text.delta"),
|
|
3614
|
-
item_id: import_v416.z.string(),
|
|
3615
|
-
summary_index: import_v416.z.number(),
|
|
3616
|
-
delta: import_v416.z.string()
|
|
3617
|
-
});
|
|
3618
|
-
var openaiResponsesChunkSchema = import_v416.z.union([
|
|
3619
|
-
textDeltaChunkSchema,
|
|
3620
|
-
responseFinishedChunkSchema,
|
|
3621
|
-
responseCreatedChunkSchema,
|
|
3622
|
-
responseOutputItemAddedSchema,
|
|
3623
|
-
responseOutputItemDoneSchema,
|
|
3624
|
-
responseFunctionCallArgumentsDeltaSchema,
|
|
3625
|
-
responseCodeInterpreterCallCodeDeltaSchema,
|
|
3626
|
-
responseCodeInterpreterCallCodeDoneSchema,
|
|
3627
|
-
responseAnnotationAddedSchema,
|
|
3628
|
-
responseReasoningSummaryPartAddedSchema,
|
|
3629
|
-
responseReasoningSummaryTextDeltaSchema,
|
|
3630
|
-
errorChunkSchema,
|
|
3631
|
-
import_v416.z.object({ type: import_v416.z.string() }).loose()
|
|
3632
|
-
// fallback for unknown chunks
|
|
3633
|
-
]);
|
|
3634
3968
|
function isTextDeltaChunk(chunk) {
|
|
3635
3969
|
return chunk.type === "response.output_text.delta";
|
|
3636
3970
|
}
|
|
@@ -3707,55 +4041,23 @@ function getResponsesModelConfig(modelId) {
|
|
|
3707
4041
|
isReasoningModel: false
|
|
3708
4042
|
};
|
|
3709
4043
|
}
|
|
3710
|
-
var openaiResponsesProviderOptionsSchema = import_v416.z.object({
|
|
3711
|
-
include: import_v416.z.array(
|
|
3712
|
-
import_v416.z.enum([
|
|
3713
|
-
"reasoning.encrypted_content",
|
|
3714
|
-
"file_search_call.results",
|
|
3715
|
-
"message.output_text.logprobs"
|
|
3716
|
-
])
|
|
3717
|
-
).nullish(),
|
|
3718
|
-
instructions: import_v416.z.string().nullish(),
|
|
3719
|
-
/**
|
|
3720
|
-
* Return the log probabilities of the tokens.
|
|
3721
|
-
*
|
|
3722
|
-
* Setting to true will return the log probabilities of the tokens that
|
|
3723
|
-
* were generated.
|
|
3724
|
-
*
|
|
3725
|
-
* Setting to a number will return the log probabilities of the top n
|
|
3726
|
-
* tokens that were generated.
|
|
3727
|
-
*
|
|
3728
|
-
* @see https://platform.openai.com/docs/api-reference/responses/create
|
|
3729
|
-
* @see https://cookbook.openai.com/examples/using_logprobs
|
|
3730
|
-
*/
|
|
3731
|
-
logprobs: import_v416.z.union([import_v416.z.boolean(), import_v416.z.number().min(1).max(TOP_LOGPROBS_MAX)]).optional(),
|
|
3732
|
-
/**
|
|
3733
|
-
* The maximum number of total calls to built-in tools that can be processed in a response.
|
|
3734
|
-
* This maximum number applies across all built-in tool calls, not per individual tool.
|
|
3735
|
-
* Any further attempts to call a tool by the model will be ignored.
|
|
3736
|
-
*/
|
|
3737
|
-
maxToolCalls: import_v416.z.number().nullish(),
|
|
3738
|
-
metadata: import_v416.z.any().nullish(),
|
|
3739
|
-
parallelToolCalls: import_v416.z.boolean().nullish(),
|
|
3740
|
-
previousResponseId: import_v416.z.string().nullish(),
|
|
3741
|
-
promptCacheKey: import_v416.z.string().nullish(),
|
|
3742
|
-
reasoningEffort: import_v416.z.string().nullish(),
|
|
3743
|
-
reasoningSummary: import_v416.z.string().nullish(),
|
|
3744
|
-
safetyIdentifier: import_v416.z.string().nullish(),
|
|
3745
|
-
serviceTier: import_v416.z.enum(["auto", "flex", "priority"]).nullish(),
|
|
3746
|
-
store: import_v416.z.boolean().nullish(),
|
|
3747
|
-
strictJsonSchema: import_v416.z.boolean().nullish(),
|
|
3748
|
-
textVerbosity: import_v416.z.enum(["low", "medium", "high"]).nullish(),
|
|
3749
|
-
user: import_v416.z.string().nullish()
|
|
3750
|
-
});
|
|
3751
4044
|
|
|
3752
4045
|
// src/speech/openai-speech-model.ts
|
|
3753
|
-
var
|
|
3754
|
-
|
|
3755
|
-
|
|
3756
|
-
|
|
3757
|
-
|
|
3758
|
-
|
|
4046
|
+
var import_provider_utils26 = require("@ai-sdk/provider-utils");
|
|
4047
|
+
|
|
4048
|
+
// src/speech/openai-speech-options.ts
|
|
4049
|
+
var import_provider_utils25 = require("@ai-sdk/provider-utils");
|
|
4050
|
+
var z18 = __toESM(require("zod/v4"));
|
|
4051
|
+
var openaiSpeechProviderOptionsSchema = (0, import_provider_utils25.lazyValidator)(
|
|
4052
|
+
() => (0, import_provider_utils25.zodSchema)(
|
|
4053
|
+
z18.object({
|
|
4054
|
+
instructions: z18.string().nullish(),
|
|
4055
|
+
speed: z18.number().min(0.25).max(4).default(1).nullish()
|
|
4056
|
+
})
|
|
4057
|
+
)
|
|
4058
|
+
);
|
|
4059
|
+
|
|
4060
|
+
// src/speech/openai-speech-model.ts
|
|
3759
4061
|
var OpenAISpeechModel = class {
|
|
3760
4062
|
constructor(modelId, config) {
|
|
3761
4063
|
this.modelId = modelId;
|
|
@@ -3775,10 +4077,10 @@ var OpenAISpeechModel = class {
|
|
|
3775
4077
|
providerOptions
|
|
3776
4078
|
}) {
|
|
3777
4079
|
const warnings = [];
|
|
3778
|
-
const openAIOptions = await (0,
|
|
4080
|
+
const openAIOptions = await (0, import_provider_utils26.parseProviderOptions)({
|
|
3779
4081
|
provider: "openai",
|
|
3780
4082
|
providerOptions,
|
|
3781
|
-
schema:
|
|
4083
|
+
schema: openaiSpeechProviderOptionsSchema
|
|
3782
4084
|
});
|
|
3783
4085
|
const requestBody = {
|
|
3784
4086
|
model: this.modelId,
|
|
@@ -3828,15 +4130,15 @@ var OpenAISpeechModel = class {
|
|
|
3828
4130
|
value: audio,
|
|
3829
4131
|
responseHeaders,
|
|
3830
4132
|
rawValue: rawResponse
|
|
3831
|
-
} = await (0,
|
|
4133
|
+
} = await (0, import_provider_utils26.postJsonToApi)({
|
|
3832
4134
|
url: this.config.url({
|
|
3833
4135
|
path: "/audio/speech",
|
|
3834
4136
|
modelId: this.modelId
|
|
3835
4137
|
}),
|
|
3836
|
-
headers: (0,
|
|
4138
|
+
headers: (0, import_provider_utils26.combineHeaders)(this.config.headers(), options.headers),
|
|
3837
4139
|
body: requestBody,
|
|
3838
4140
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
3839
|
-
successfulResponseHandler: (0,
|
|
4141
|
+
successfulResponseHandler: (0, import_provider_utils26.createBinaryResponseHandler)(),
|
|
3840
4142
|
abortSignal: options.abortSignal,
|
|
3841
4143
|
fetch: this.config.fetch
|
|
3842
4144
|
});
|
|
@@ -3857,35 +4159,73 @@ var OpenAISpeechModel = class {
|
|
|
3857
4159
|
};
|
|
3858
4160
|
|
|
3859
4161
|
// src/transcription/openai-transcription-model.ts
|
|
3860
|
-
var
|
|
3861
|
-
|
|
4162
|
+
var import_provider_utils29 = require("@ai-sdk/provider-utils");
|
|
4163
|
+
|
|
4164
|
+
// src/transcription/openai-transcription-api.ts
|
|
4165
|
+
var import_provider_utils27 = require("@ai-sdk/provider-utils");
|
|
4166
|
+
var z19 = __toESM(require("zod/v4"));
|
|
4167
|
+
var openaiTranscriptionResponseSchema = (0, import_provider_utils27.lazyValidator)(
|
|
4168
|
+
() => (0, import_provider_utils27.zodSchema)(
|
|
4169
|
+
z19.object({
|
|
4170
|
+
text: z19.string(),
|
|
4171
|
+
language: z19.string().nullish(),
|
|
4172
|
+
duration: z19.number().nullish(),
|
|
4173
|
+
words: z19.array(
|
|
4174
|
+
z19.object({
|
|
4175
|
+
word: z19.string(),
|
|
4176
|
+
start: z19.number(),
|
|
4177
|
+
end: z19.number()
|
|
4178
|
+
})
|
|
4179
|
+
).nullish(),
|
|
4180
|
+
segments: z19.array(
|
|
4181
|
+
z19.object({
|
|
4182
|
+
id: z19.number(),
|
|
4183
|
+
seek: z19.number(),
|
|
4184
|
+
start: z19.number(),
|
|
4185
|
+
end: z19.number(),
|
|
4186
|
+
text: z19.string(),
|
|
4187
|
+
tokens: z19.array(z19.number()),
|
|
4188
|
+
temperature: z19.number(),
|
|
4189
|
+
avg_logprob: z19.number(),
|
|
4190
|
+
compression_ratio: z19.number(),
|
|
4191
|
+
no_speech_prob: z19.number()
|
|
4192
|
+
})
|
|
4193
|
+
).nullish()
|
|
4194
|
+
})
|
|
4195
|
+
)
|
|
4196
|
+
);
|
|
3862
4197
|
|
|
3863
4198
|
// src/transcription/openai-transcription-options.ts
|
|
3864
|
-
var
|
|
3865
|
-
var
|
|
3866
|
-
|
|
3867
|
-
|
|
3868
|
-
|
|
3869
|
-
|
|
3870
|
-
|
|
3871
|
-
|
|
3872
|
-
|
|
3873
|
-
|
|
3874
|
-
|
|
3875
|
-
|
|
3876
|
-
|
|
3877
|
-
|
|
3878
|
-
|
|
3879
|
-
|
|
3880
|
-
|
|
3881
|
-
|
|
3882
|
-
|
|
3883
|
-
|
|
3884
|
-
|
|
3885
|
-
|
|
3886
|
-
|
|
3887
|
-
|
|
3888
|
-
|
|
4199
|
+
var import_provider_utils28 = require("@ai-sdk/provider-utils");
|
|
4200
|
+
var z20 = __toESM(require("zod/v4"));
|
|
4201
|
+
var openAITranscriptionProviderOptions = (0, import_provider_utils28.lazyValidator)(
|
|
4202
|
+
() => (0, import_provider_utils28.zodSchema)(
|
|
4203
|
+
z20.object({
|
|
4204
|
+
/**
|
|
4205
|
+
* Additional information to include in the transcription response.
|
|
4206
|
+
*/
|
|
4207
|
+
include: z20.array(z20.string()).optional(),
|
|
4208
|
+
/**
|
|
4209
|
+
* The language of the input audio in ISO-639-1 format.
|
|
4210
|
+
*/
|
|
4211
|
+
language: z20.string().optional(),
|
|
4212
|
+
/**
|
|
4213
|
+
* An optional text to guide the model's style or continue a previous audio segment.
|
|
4214
|
+
*/
|
|
4215
|
+
prompt: z20.string().optional(),
|
|
4216
|
+
/**
|
|
4217
|
+
* The sampling temperature, between 0 and 1.
|
|
4218
|
+
* @default 0
|
|
4219
|
+
*/
|
|
4220
|
+
temperature: z20.number().min(0).max(1).default(0).optional(),
|
|
4221
|
+
/**
|
|
4222
|
+
* The timestamp granularities to populate for this transcription.
|
|
4223
|
+
* @default ['segment']
|
|
4224
|
+
*/
|
|
4225
|
+
timestampGranularities: z20.array(z20.enum(["word", "segment"])).default(["segment"]).optional()
|
|
4226
|
+
})
|
|
4227
|
+
)
|
|
4228
|
+
);
|
|
3889
4229
|
|
|
3890
4230
|
// src/transcription/openai-transcription-model.ts
|
|
3891
4231
|
var languageMap = {
|
|
@@ -3962,15 +4302,15 @@ var OpenAITranscriptionModel = class {
|
|
|
3962
4302
|
providerOptions
|
|
3963
4303
|
}) {
|
|
3964
4304
|
const warnings = [];
|
|
3965
|
-
const openAIOptions = await (0,
|
|
4305
|
+
const openAIOptions = await (0, import_provider_utils29.parseProviderOptions)({
|
|
3966
4306
|
provider: "openai",
|
|
3967
4307
|
providerOptions,
|
|
3968
4308
|
schema: openAITranscriptionProviderOptions
|
|
3969
4309
|
});
|
|
3970
4310
|
const formData = new FormData();
|
|
3971
|
-
const blob = audio instanceof Uint8Array ? new Blob([audio]) : new Blob([(0,
|
|
4311
|
+
const blob = audio instanceof Uint8Array ? new Blob([audio]) : new Blob([(0, import_provider_utils29.convertBase64ToUint8Array)(audio)]);
|
|
3972
4312
|
formData.append("model", this.modelId);
|
|
3973
|
-
const fileExtension = (0,
|
|
4313
|
+
const fileExtension = (0, import_provider_utils29.mediaTypeToExtension)(mediaType);
|
|
3974
4314
|
formData.append(
|
|
3975
4315
|
"file",
|
|
3976
4316
|
new File([blob], "audio", { type: mediaType }),
|
|
@@ -4015,15 +4355,15 @@ var OpenAITranscriptionModel = class {
|
|
|
4015
4355
|
value: response,
|
|
4016
4356
|
responseHeaders,
|
|
4017
4357
|
rawValue: rawResponse
|
|
4018
|
-
} = await (0,
|
|
4358
|
+
} = await (0, import_provider_utils29.postFormDataToApi)({
|
|
4019
4359
|
url: this.config.url({
|
|
4020
4360
|
path: "/audio/transcriptions",
|
|
4021
4361
|
modelId: this.modelId
|
|
4022
4362
|
}),
|
|
4023
|
-
headers: (0,
|
|
4363
|
+
headers: (0, import_provider_utils29.combineHeaders)(this.config.headers(), options.headers),
|
|
4024
4364
|
formData,
|
|
4025
4365
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
4026
|
-
successfulResponseHandler: (0,
|
|
4366
|
+
successfulResponseHandler: (0, import_provider_utils29.createJsonResponseHandler)(
|
|
4027
4367
|
openaiTranscriptionResponseSchema
|
|
4028
4368
|
),
|
|
4029
4369
|
abortSignal: options.abortSignal,
|
|
@@ -4053,49 +4393,23 @@ var OpenAITranscriptionModel = class {
|
|
|
4053
4393
|
};
|
|
4054
4394
|
}
|
|
4055
4395
|
};
|
|
4056
|
-
var openaiTranscriptionResponseSchema = import_v419.z.object({
|
|
4057
|
-
text: import_v419.z.string(),
|
|
4058
|
-
language: import_v419.z.string().nullish(),
|
|
4059
|
-
duration: import_v419.z.number().nullish(),
|
|
4060
|
-
words: import_v419.z.array(
|
|
4061
|
-
import_v419.z.object({
|
|
4062
|
-
word: import_v419.z.string(),
|
|
4063
|
-
start: import_v419.z.number(),
|
|
4064
|
-
end: import_v419.z.number()
|
|
4065
|
-
})
|
|
4066
|
-
).nullish(),
|
|
4067
|
-
segments: import_v419.z.array(
|
|
4068
|
-
import_v419.z.object({
|
|
4069
|
-
id: import_v419.z.number(),
|
|
4070
|
-
seek: import_v419.z.number(),
|
|
4071
|
-
start: import_v419.z.number(),
|
|
4072
|
-
end: import_v419.z.number(),
|
|
4073
|
-
text: import_v419.z.string(),
|
|
4074
|
-
tokens: import_v419.z.array(import_v419.z.number()),
|
|
4075
|
-
temperature: import_v419.z.number(),
|
|
4076
|
-
avg_logprob: import_v419.z.number(),
|
|
4077
|
-
compression_ratio: import_v419.z.number(),
|
|
4078
|
-
no_speech_prob: import_v419.z.number()
|
|
4079
|
-
})
|
|
4080
|
-
).nullish()
|
|
4081
|
-
});
|
|
4082
4396
|
|
|
4083
4397
|
// src/version.ts
|
|
4084
|
-
var VERSION = true ? "2.0.
|
|
4398
|
+
var VERSION = true ? "2.0.45" : "0.0.0-test";
|
|
4085
4399
|
|
|
4086
4400
|
// src/openai-provider.ts
|
|
4087
4401
|
function createOpenAI(options = {}) {
|
|
4088
4402
|
var _a, _b;
|
|
4089
|
-
const baseURL = (_a = (0,
|
|
4090
|
-
(0,
|
|
4403
|
+
const baseURL = (_a = (0, import_provider_utils30.withoutTrailingSlash)(
|
|
4404
|
+
(0, import_provider_utils30.loadOptionalSetting)({
|
|
4091
4405
|
settingValue: options.baseURL,
|
|
4092
4406
|
environmentVariableName: "OPENAI_BASE_URL"
|
|
4093
4407
|
})
|
|
4094
4408
|
)) != null ? _a : "https://api.openai.com/v1";
|
|
4095
4409
|
const providerName = (_b = options.name) != null ? _b : "openai";
|
|
4096
|
-
const getHeaders = () => (0,
|
|
4410
|
+
const getHeaders = () => (0, import_provider_utils30.withUserAgentSuffix)(
|
|
4097
4411
|
{
|
|
4098
|
-
Authorization: `Bearer ${(0,
|
|
4412
|
+
Authorization: `Bearer ${(0, import_provider_utils30.loadApiKey)({
|
|
4099
4413
|
apiKey: options.apiKey,
|
|
4100
4414
|
environmentVariableName: "OPENAI_API_KEY",
|
|
4101
4415
|
description: "OpenAI"
|