@ai-sdk/openai 2.0.0-alpha.9 → 2.0.0-beta.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +83 -0
- package/dist/index.d.mts +77 -75
- package/dist/index.d.ts +77 -75
- package/dist/index.js +745 -389
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +690 -334
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.d.mts +20 -8
- package/dist/internal/index.d.ts +20 -8
- package/dist/internal/index.js +739 -365
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +687 -313
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +3 -3
package/dist/internal/index.js
CHANGED
|
@@ -38,8 +38,8 @@ module.exports = __toCommonJS(internal_exports);
|
|
|
38
38
|
|
|
39
39
|
// src/openai-chat-language-model.ts
|
|
40
40
|
var import_provider3 = require("@ai-sdk/provider");
|
|
41
|
-
var
|
|
42
|
-
var
|
|
41
|
+
var import_provider_utils5 = require("@ai-sdk/provider-utils");
|
|
42
|
+
var import_zod5 = require("zod");
|
|
43
43
|
|
|
44
44
|
// src/convert-to-openai-chat-messages.ts
|
|
45
45
|
var import_provider = require("@ai-sdk/provider");
|
|
@@ -173,7 +173,7 @@ function convertToOpenAIChatMessages({
|
|
|
173
173
|
type: "function",
|
|
174
174
|
function: {
|
|
175
175
|
name: part.toolName,
|
|
176
|
-
arguments: JSON.stringify(part.
|
|
176
|
+
arguments: JSON.stringify(part.input)
|
|
177
177
|
}
|
|
178
178
|
});
|
|
179
179
|
break;
|
|
@@ -189,10 +189,23 @@ function convertToOpenAIChatMessages({
|
|
|
189
189
|
}
|
|
190
190
|
case "tool": {
|
|
191
191
|
for (const toolResponse of content) {
|
|
192
|
+
const output = toolResponse.output;
|
|
193
|
+
let contentValue;
|
|
194
|
+
switch (output.type) {
|
|
195
|
+
case "text":
|
|
196
|
+
case "error-text":
|
|
197
|
+
contentValue = output.value;
|
|
198
|
+
break;
|
|
199
|
+
case "content":
|
|
200
|
+
case "json":
|
|
201
|
+
case "error-json":
|
|
202
|
+
contentValue = JSON.stringify(output.value);
|
|
203
|
+
break;
|
|
204
|
+
}
|
|
192
205
|
messages.push({
|
|
193
206
|
role: "tool",
|
|
194
207
|
tool_call_id: toolResponse.toolCallId,
|
|
195
|
-
content:
|
|
208
|
+
content: contentValue
|
|
196
209
|
});
|
|
197
210
|
}
|
|
198
211
|
break;
|
|
@@ -290,7 +303,14 @@ var openaiProviderOptions = import_zod.z.object({
|
|
|
290
303
|
*
|
|
291
304
|
* @default true
|
|
292
305
|
*/
|
|
293
|
-
structuredOutputs: import_zod.z.boolean().optional()
|
|
306
|
+
structuredOutputs: import_zod.z.boolean().optional(),
|
|
307
|
+
/**
|
|
308
|
+
* Service tier for the request. Set to 'flex' for 50% cheaper processing
|
|
309
|
+
* at the cost of increased latency. Only available for o3 and o4-mini models.
|
|
310
|
+
*
|
|
311
|
+
* @default 'auto'
|
|
312
|
+
*/
|
|
313
|
+
serviceTier: import_zod.z.enum(["auto", "flex"]).optional()
|
|
294
314
|
});
|
|
295
315
|
|
|
296
316
|
// src/openai-error.ts
|
|
@@ -314,6 +334,76 @@ var openaiFailedResponseHandler = (0, import_provider_utils2.createJsonErrorResp
|
|
|
314
334
|
|
|
315
335
|
// src/openai-prepare-tools.ts
|
|
316
336
|
var import_provider2 = require("@ai-sdk/provider");
|
|
337
|
+
|
|
338
|
+
// src/tool/file-search.ts
|
|
339
|
+
var import_provider_utils3 = require("@ai-sdk/provider-utils");
|
|
340
|
+
var import_zod3 = require("zod");
|
|
341
|
+
var fileSearchArgsSchema = import_zod3.z.object({
|
|
342
|
+
/**
|
|
343
|
+
* List of vector store IDs to search through. If not provided, searches all available vector stores.
|
|
344
|
+
*/
|
|
345
|
+
vectorStoreIds: import_zod3.z.array(import_zod3.z.string()).optional(),
|
|
346
|
+
/**
|
|
347
|
+
* Maximum number of search results to return. Defaults to 10.
|
|
348
|
+
*/
|
|
349
|
+
maxResults: import_zod3.z.number().optional(),
|
|
350
|
+
/**
|
|
351
|
+
* Type of search to perform. Defaults to 'auto'.
|
|
352
|
+
*/
|
|
353
|
+
searchType: import_zod3.z.enum(["auto", "keyword", "semantic"]).optional()
|
|
354
|
+
});
|
|
355
|
+
var fileSearch = (0, import_provider_utils3.createProviderDefinedToolFactory)({
|
|
356
|
+
id: "openai.file_search",
|
|
357
|
+
name: "file_search",
|
|
358
|
+
inputSchema: import_zod3.z.object({
|
|
359
|
+
query: import_zod3.z.string()
|
|
360
|
+
})
|
|
361
|
+
});
|
|
362
|
+
|
|
363
|
+
// src/tool/web-search-preview.ts
|
|
364
|
+
var import_provider_utils4 = require("@ai-sdk/provider-utils");
|
|
365
|
+
var import_zod4 = require("zod");
|
|
366
|
+
var webSearchPreviewArgsSchema = import_zod4.z.object({
|
|
367
|
+
/**
|
|
368
|
+
* Search context size to use for the web search.
|
|
369
|
+
* - high: Most comprehensive context, highest cost, slower response
|
|
370
|
+
* - medium: Balanced context, cost, and latency (default)
|
|
371
|
+
* - low: Least context, lowest cost, fastest response
|
|
372
|
+
*/
|
|
373
|
+
searchContextSize: import_zod4.z.enum(["low", "medium", "high"]).optional(),
|
|
374
|
+
/**
|
|
375
|
+
* User location information to provide geographically relevant search results.
|
|
376
|
+
*/
|
|
377
|
+
userLocation: import_zod4.z.object({
|
|
378
|
+
/**
|
|
379
|
+
* Type of location (always 'approximate')
|
|
380
|
+
*/
|
|
381
|
+
type: import_zod4.z.literal("approximate"),
|
|
382
|
+
/**
|
|
383
|
+
* Two-letter ISO country code (e.g., 'US', 'GB')
|
|
384
|
+
*/
|
|
385
|
+
country: import_zod4.z.string().optional(),
|
|
386
|
+
/**
|
|
387
|
+
* City name (free text, e.g., 'Minneapolis')
|
|
388
|
+
*/
|
|
389
|
+
city: import_zod4.z.string().optional(),
|
|
390
|
+
/**
|
|
391
|
+
* Region name (free text, e.g., 'Minnesota')
|
|
392
|
+
*/
|
|
393
|
+
region: import_zod4.z.string().optional(),
|
|
394
|
+
/**
|
|
395
|
+
* IANA timezone (e.g., 'America/Chicago')
|
|
396
|
+
*/
|
|
397
|
+
timezone: import_zod4.z.string().optional()
|
|
398
|
+
}).optional()
|
|
399
|
+
});
|
|
400
|
+
var webSearchPreview = (0, import_provider_utils4.createProviderDefinedToolFactory)({
|
|
401
|
+
id: "openai.web_search_preview",
|
|
402
|
+
name: "web_search_preview",
|
|
403
|
+
inputSchema: import_zod4.z.object({})
|
|
404
|
+
});
|
|
405
|
+
|
|
406
|
+
// src/openai-prepare-tools.ts
|
|
317
407
|
function prepareTools({
|
|
318
408
|
tools,
|
|
319
409
|
toolChoice,
|
|
@@ -326,18 +416,47 @@ function prepareTools({
|
|
|
326
416
|
}
|
|
327
417
|
const openaiTools = [];
|
|
328
418
|
for (const tool of tools) {
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
419
|
+
switch (tool.type) {
|
|
420
|
+
case "function":
|
|
421
|
+
openaiTools.push({
|
|
422
|
+
type: "function",
|
|
423
|
+
function: {
|
|
424
|
+
name: tool.name,
|
|
425
|
+
description: tool.description,
|
|
426
|
+
parameters: tool.inputSchema,
|
|
427
|
+
strict: structuredOutputs ? true : void 0
|
|
428
|
+
}
|
|
429
|
+
});
|
|
430
|
+
break;
|
|
431
|
+
case "provider-defined":
|
|
432
|
+
switch (tool.id) {
|
|
433
|
+
case "openai.file_search": {
|
|
434
|
+
const args = fileSearchArgsSchema.parse(tool.args);
|
|
435
|
+
openaiTools.push({
|
|
436
|
+
type: "file_search",
|
|
437
|
+
vector_store_ids: args.vectorStoreIds,
|
|
438
|
+
max_results: args.maxResults,
|
|
439
|
+
search_type: args.searchType
|
|
440
|
+
});
|
|
441
|
+
break;
|
|
442
|
+
}
|
|
443
|
+
case "openai.web_search_preview": {
|
|
444
|
+
const args = webSearchPreviewArgsSchema.parse(tool.args);
|
|
445
|
+
openaiTools.push({
|
|
446
|
+
type: "web_search_preview",
|
|
447
|
+
search_context_size: args.searchContextSize,
|
|
448
|
+
user_location: args.userLocation
|
|
449
|
+
});
|
|
450
|
+
break;
|
|
451
|
+
}
|
|
452
|
+
default:
|
|
453
|
+
toolWarnings.push({ type: "unsupported-tool", tool });
|
|
454
|
+
break;
|
|
339
455
|
}
|
|
340
|
-
|
|
456
|
+
break;
|
|
457
|
+
default:
|
|
458
|
+
toolWarnings.push({ type: "unsupported-tool", tool });
|
|
459
|
+
break;
|
|
341
460
|
}
|
|
342
461
|
}
|
|
343
462
|
if (toolChoice == null) {
|
|
@@ -399,7 +518,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
399
518
|
}) {
|
|
400
519
|
var _a, _b, _c;
|
|
401
520
|
const warnings = [];
|
|
402
|
-
const openaiOptions = (_a = await (0,
|
|
521
|
+
const openaiOptions = (_a = await (0, import_provider_utils5.parseProviderOptions)({
|
|
403
522
|
provider: "openai",
|
|
404
523
|
providerOptions,
|
|
405
524
|
schema: openaiProviderOptions
|
|
@@ -461,6 +580,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
461
580
|
metadata: openaiOptions.metadata,
|
|
462
581
|
prediction: openaiOptions.prediction,
|
|
463
582
|
reasoning_effort: openaiOptions.reasoningEffort,
|
|
583
|
+
service_tier: openaiOptions.serviceTier,
|
|
464
584
|
// messages:
|
|
465
585
|
messages
|
|
466
586
|
};
|
|
@@ -534,6 +654,14 @@ var OpenAIChatLanguageModel = class {
|
|
|
534
654
|
});
|
|
535
655
|
}
|
|
536
656
|
}
|
|
657
|
+
if (openaiOptions.serviceTier === "flex" && !supportsFlexProcessing(this.modelId)) {
|
|
658
|
+
warnings.push({
|
|
659
|
+
type: "unsupported-setting",
|
|
660
|
+
setting: "serviceTier",
|
|
661
|
+
details: "flex processing is only available for o3 and o4-mini models"
|
|
662
|
+
});
|
|
663
|
+
baseArgs.service_tier = void 0;
|
|
664
|
+
}
|
|
537
665
|
const {
|
|
538
666
|
tools: openaiTools,
|
|
539
667
|
toolChoice: openaiToolChoice,
|
|
@@ -559,15 +687,15 @@ var OpenAIChatLanguageModel = class {
|
|
|
559
687
|
responseHeaders,
|
|
560
688
|
value: response,
|
|
561
689
|
rawValue: rawResponse
|
|
562
|
-
} = await (0,
|
|
690
|
+
} = await (0, import_provider_utils5.postJsonToApi)({
|
|
563
691
|
url: this.config.url({
|
|
564
692
|
path: "/chat/completions",
|
|
565
693
|
modelId: this.modelId
|
|
566
694
|
}),
|
|
567
|
-
headers: (0,
|
|
695
|
+
headers: (0, import_provider_utils5.combineHeaders)(this.config.headers(), options.headers),
|
|
568
696
|
body,
|
|
569
697
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
570
|
-
successfulResponseHandler: (0,
|
|
698
|
+
successfulResponseHandler: (0, import_provider_utils5.createJsonResponseHandler)(
|
|
571
699
|
openaiChatResponseSchema
|
|
572
700
|
),
|
|
573
701
|
abortSignal: options.abortSignal,
|
|
@@ -582,10 +710,9 @@ var OpenAIChatLanguageModel = class {
|
|
|
582
710
|
for (const toolCall of (_a = choice.message.tool_calls) != null ? _a : []) {
|
|
583
711
|
content.push({
|
|
584
712
|
type: "tool-call",
|
|
585
|
-
|
|
586
|
-
toolCallId: (_b = toolCall.id) != null ? _b : (0, import_provider_utils3.generateId)(),
|
|
713
|
+
toolCallId: (_b = toolCall.id) != null ? _b : (0, import_provider_utils5.generateId)(),
|
|
587
714
|
toolName: toolCall.function.name,
|
|
588
|
-
|
|
715
|
+
input: toolCall.function.arguments
|
|
589
716
|
});
|
|
590
717
|
}
|
|
591
718
|
const completionTokenDetails = (_c = response.usage) == null ? void 0 : _c.completion_tokens_details;
|
|
@@ -629,15 +756,15 @@ var OpenAIChatLanguageModel = class {
|
|
|
629
756
|
include_usage: true
|
|
630
757
|
}
|
|
631
758
|
};
|
|
632
|
-
const { responseHeaders, value: response } = await (0,
|
|
759
|
+
const { responseHeaders, value: response } = await (0, import_provider_utils5.postJsonToApi)({
|
|
633
760
|
url: this.config.url({
|
|
634
761
|
path: "/chat/completions",
|
|
635
762
|
modelId: this.modelId
|
|
636
763
|
}),
|
|
637
|
-
headers: (0,
|
|
764
|
+
headers: (0, import_provider_utils5.combineHeaders)(this.config.headers(), options.headers),
|
|
638
765
|
body,
|
|
639
766
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
640
|
-
successfulResponseHandler: (0,
|
|
767
|
+
successfulResponseHandler: (0, import_provider_utils5.createEventSourceResponseHandler)(
|
|
641
768
|
openaiChatChunkSchema
|
|
642
769
|
),
|
|
643
770
|
abortSignal: options.abortSignal,
|
|
@@ -651,6 +778,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
651
778
|
totalTokens: void 0
|
|
652
779
|
};
|
|
653
780
|
let isFirstChunk = true;
|
|
781
|
+
let isActiveText = false;
|
|
654
782
|
const providerMetadata = { openai: {} };
|
|
655
783
|
return {
|
|
656
784
|
stream: response.pipeThrough(
|
|
@@ -660,6 +788,9 @@ var OpenAIChatLanguageModel = class {
|
|
|
660
788
|
},
|
|
661
789
|
transform(chunk, controller) {
|
|
662
790
|
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x;
|
|
791
|
+
if (options.includeRawChunks) {
|
|
792
|
+
controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
|
|
793
|
+
}
|
|
663
794
|
if (!chunk.success) {
|
|
664
795
|
finishReason = "error";
|
|
665
796
|
controller.enqueue({ type: "error", error: chunk.error });
|
|
@@ -703,9 +834,14 @@ var OpenAIChatLanguageModel = class {
|
|
|
703
834
|
}
|
|
704
835
|
const delta = choice.delta;
|
|
705
836
|
if (delta.content != null) {
|
|
837
|
+
if (!isActiveText) {
|
|
838
|
+
controller.enqueue({ type: "text-start", id: "0" });
|
|
839
|
+
isActiveText = true;
|
|
840
|
+
}
|
|
706
841
|
controller.enqueue({
|
|
707
|
-
type: "text",
|
|
708
|
-
|
|
842
|
+
type: "text-delta",
|
|
843
|
+
id: "0",
|
|
844
|
+
delta: delta.content
|
|
709
845
|
});
|
|
710
846
|
}
|
|
711
847
|
if (delta.tool_calls != null) {
|
|
@@ -730,6 +866,11 @@ var OpenAIChatLanguageModel = class {
|
|
|
730
866
|
message: `Expected 'function.name' to be a string.`
|
|
731
867
|
});
|
|
732
868
|
}
|
|
869
|
+
controller.enqueue({
|
|
870
|
+
type: "tool-input-start",
|
|
871
|
+
id: toolCallDelta.id,
|
|
872
|
+
toolName: toolCallDelta.function.name
|
|
873
|
+
});
|
|
733
874
|
toolCalls[index] = {
|
|
734
875
|
id: toolCallDelta.id,
|
|
735
876
|
type: "function",
|
|
@@ -743,20 +884,21 @@ var OpenAIChatLanguageModel = class {
|
|
|
743
884
|
if (((_o = toolCall2.function) == null ? void 0 : _o.name) != null && ((_p = toolCall2.function) == null ? void 0 : _p.arguments) != null) {
|
|
744
885
|
if (toolCall2.function.arguments.length > 0) {
|
|
745
886
|
controller.enqueue({
|
|
746
|
-
type: "tool-
|
|
747
|
-
|
|
748
|
-
|
|
749
|
-
toolName: toolCall2.function.name,
|
|
750
|
-
argsTextDelta: toolCall2.function.arguments
|
|
887
|
+
type: "tool-input-delta",
|
|
888
|
+
id: toolCall2.id,
|
|
889
|
+
delta: toolCall2.function.arguments
|
|
751
890
|
});
|
|
752
891
|
}
|
|
753
|
-
if ((0,
|
|
892
|
+
if ((0, import_provider_utils5.isParsableJson)(toolCall2.function.arguments)) {
|
|
893
|
+
controller.enqueue({
|
|
894
|
+
type: "tool-input-end",
|
|
895
|
+
id: toolCall2.id
|
|
896
|
+
});
|
|
754
897
|
controller.enqueue({
|
|
755
898
|
type: "tool-call",
|
|
756
|
-
|
|
757
|
-
toolCallId: (_q = toolCall2.id) != null ? _q : (0, import_provider_utils3.generateId)(),
|
|
899
|
+
toolCallId: (_q = toolCall2.id) != null ? _q : (0, import_provider_utils5.generateId)(),
|
|
758
900
|
toolName: toolCall2.function.name,
|
|
759
|
-
|
|
901
|
+
input: toolCall2.function.arguments
|
|
760
902
|
});
|
|
761
903
|
toolCall2.hasFinished = true;
|
|
762
904
|
}
|
|
@@ -771,19 +913,20 @@ var OpenAIChatLanguageModel = class {
|
|
|
771
913
|
toolCall.function.arguments += (_t = (_s = toolCallDelta.function) == null ? void 0 : _s.arguments) != null ? _t : "";
|
|
772
914
|
}
|
|
773
915
|
controller.enqueue({
|
|
774
|
-
type: "tool-
|
|
775
|
-
|
|
776
|
-
|
|
777
|
-
toolName: toolCall.function.name,
|
|
778
|
-
argsTextDelta: (_u = toolCallDelta.function.arguments) != null ? _u : ""
|
|
916
|
+
type: "tool-input-delta",
|
|
917
|
+
id: toolCall.id,
|
|
918
|
+
delta: (_u = toolCallDelta.function.arguments) != null ? _u : ""
|
|
779
919
|
});
|
|
780
|
-
if (((_v = toolCall.function) == null ? void 0 : _v.name) != null && ((_w = toolCall.function) == null ? void 0 : _w.arguments) != null && (0,
|
|
920
|
+
if (((_v = toolCall.function) == null ? void 0 : _v.name) != null && ((_w = toolCall.function) == null ? void 0 : _w.arguments) != null && (0, import_provider_utils5.isParsableJson)(toolCall.function.arguments)) {
|
|
921
|
+
controller.enqueue({
|
|
922
|
+
type: "tool-input-end",
|
|
923
|
+
id: toolCall.id
|
|
924
|
+
});
|
|
781
925
|
controller.enqueue({
|
|
782
926
|
type: "tool-call",
|
|
783
|
-
|
|
784
|
-
toolCallId: (_x = toolCall.id) != null ? _x : (0, import_provider_utils3.generateId)(),
|
|
927
|
+
toolCallId: (_x = toolCall.id) != null ? _x : (0, import_provider_utils5.generateId)(),
|
|
785
928
|
toolName: toolCall.function.name,
|
|
786
|
-
|
|
929
|
+
input: toolCall.function.arguments
|
|
787
930
|
});
|
|
788
931
|
toolCall.hasFinished = true;
|
|
789
932
|
}
|
|
@@ -791,6 +934,9 @@ var OpenAIChatLanguageModel = class {
|
|
|
791
934
|
}
|
|
792
935
|
},
|
|
793
936
|
flush(controller) {
|
|
937
|
+
if (isActiveText) {
|
|
938
|
+
controller.enqueue({ type: "text-end", id: "0" });
|
|
939
|
+
}
|
|
794
940
|
controller.enqueue({
|
|
795
941
|
type: "finish",
|
|
796
942
|
finishReason,
|
|
@@ -805,97 +951,97 @@ var OpenAIChatLanguageModel = class {
|
|
|
805
951
|
};
|
|
806
952
|
}
|
|
807
953
|
};
|
|
808
|
-
var openaiTokenUsageSchema =
|
|
809
|
-
prompt_tokens:
|
|
810
|
-
completion_tokens:
|
|
811
|
-
total_tokens:
|
|
812
|
-
prompt_tokens_details:
|
|
813
|
-
cached_tokens:
|
|
954
|
+
var openaiTokenUsageSchema = import_zod5.z.object({
|
|
955
|
+
prompt_tokens: import_zod5.z.number().nullish(),
|
|
956
|
+
completion_tokens: import_zod5.z.number().nullish(),
|
|
957
|
+
total_tokens: import_zod5.z.number().nullish(),
|
|
958
|
+
prompt_tokens_details: import_zod5.z.object({
|
|
959
|
+
cached_tokens: import_zod5.z.number().nullish()
|
|
814
960
|
}).nullish(),
|
|
815
|
-
completion_tokens_details:
|
|
816
|
-
reasoning_tokens:
|
|
817
|
-
accepted_prediction_tokens:
|
|
818
|
-
rejected_prediction_tokens:
|
|
961
|
+
completion_tokens_details: import_zod5.z.object({
|
|
962
|
+
reasoning_tokens: import_zod5.z.number().nullish(),
|
|
963
|
+
accepted_prediction_tokens: import_zod5.z.number().nullish(),
|
|
964
|
+
rejected_prediction_tokens: import_zod5.z.number().nullish()
|
|
819
965
|
}).nullish()
|
|
820
966
|
}).nullish();
|
|
821
|
-
var openaiChatResponseSchema =
|
|
822
|
-
id:
|
|
823
|
-
created:
|
|
824
|
-
model:
|
|
825
|
-
choices:
|
|
826
|
-
|
|
827
|
-
message:
|
|
828
|
-
role:
|
|
829
|
-
content:
|
|
830
|
-
tool_calls:
|
|
831
|
-
|
|
832
|
-
id:
|
|
833
|
-
type:
|
|
834
|
-
function:
|
|
835
|
-
name:
|
|
836
|
-
arguments:
|
|
967
|
+
var openaiChatResponseSchema = import_zod5.z.object({
|
|
968
|
+
id: import_zod5.z.string().nullish(),
|
|
969
|
+
created: import_zod5.z.number().nullish(),
|
|
970
|
+
model: import_zod5.z.string().nullish(),
|
|
971
|
+
choices: import_zod5.z.array(
|
|
972
|
+
import_zod5.z.object({
|
|
973
|
+
message: import_zod5.z.object({
|
|
974
|
+
role: import_zod5.z.literal("assistant").nullish(),
|
|
975
|
+
content: import_zod5.z.string().nullish(),
|
|
976
|
+
tool_calls: import_zod5.z.array(
|
|
977
|
+
import_zod5.z.object({
|
|
978
|
+
id: import_zod5.z.string().nullish(),
|
|
979
|
+
type: import_zod5.z.literal("function"),
|
|
980
|
+
function: import_zod5.z.object({
|
|
981
|
+
name: import_zod5.z.string(),
|
|
982
|
+
arguments: import_zod5.z.string()
|
|
837
983
|
})
|
|
838
984
|
})
|
|
839
985
|
).nullish()
|
|
840
986
|
}),
|
|
841
|
-
index:
|
|
842
|
-
logprobs:
|
|
843
|
-
content:
|
|
844
|
-
|
|
845
|
-
token:
|
|
846
|
-
logprob:
|
|
847
|
-
top_logprobs:
|
|
848
|
-
|
|
849
|
-
token:
|
|
850
|
-
logprob:
|
|
987
|
+
index: import_zod5.z.number(),
|
|
988
|
+
logprobs: import_zod5.z.object({
|
|
989
|
+
content: import_zod5.z.array(
|
|
990
|
+
import_zod5.z.object({
|
|
991
|
+
token: import_zod5.z.string(),
|
|
992
|
+
logprob: import_zod5.z.number(),
|
|
993
|
+
top_logprobs: import_zod5.z.array(
|
|
994
|
+
import_zod5.z.object({
|
|
995
|
+
token: import_zod5.z.string(),
|
|
996
|
+
logprob: import_zod5.z.number()
|
|
851
997
|
})
|
|
852
998
|
)
|
|
853
999
|
})
|
|
854
1000
|
).nullish()
|
|
855
1001
|
}).nullish(),
|
|
856
|
-
finish_reason:
|
|
1002
|
+
finish_reason: import_zod5.z.string().nullish()
|
|
857
1003
|
})
|
|
858
1004
|
),
|
|
859
1005
|
usage: openaiTokenUsageSchema
|
|
860
1006
|
});
|
|
861
|
-
var openaiChatChunkSchema =
|
|
862
|
-
|
|
863
|
-
id:
|
|
864
|
-
created:
|
|
865
|
-
model:
|
|
866
|
-
choices:
|
|
867
|
-
|
|
868
|
-
delta:
|
|
869
|
-
role:
|
|
870
|
-
content:
|
|
871
|
-
tool_calls:
|
|
872
|
-
|
|
873
|
-
index:
|
|
874
|
-
id:
|
|
875
|
-
type:
|
|
876
|
-
function:
|
|
877
|
-
name:
|
|
878
|
-
arguments:
|
|
1007
|
+
var openaiChatChunkSchema = import_zod5.z.union([
|
|
1008
|
+
import_zod5.z.object({
|
|
1009
|
+
id: import_zod5.z.string().nullish(),
|
|
1010
|
+
created: import_zod5.z.number().nullish(),
|
|
1011
|
+
model: import_zod5.z.string().nullish(),
|
|
1012
|
+
choices: import_zod5.z.array(
|
|
1013
|
+
import_zod5.z.object({
|
|
1014
|
+
delta: import_zod5.z.object({
|
|
1015
|
+
role: import_zod5.z.enum(["assistant"]).nullish(),
|
|
1016
|
+
content: import_zod5.z.string().nullish(),
|
|
1017
|
+
tool_calls: import_zod5.z.array(
|
|
1018
|
+
import_zod5.z.object({
|
|
1019
|
+
index: import_zod5.z.number(),
|
|
1020
|
+
id: import_zod5.z.string().nullish(),
|
|
1021
|
+
type: import_zod5.z.literal("function").nullish(),
|
|
1022
|
+
function: import_zod5.z.object({
|
|
1023
|
+
name: import_zod5.z.string().nullish(),
|
|
1024
|
+
arguments: import_zod5.z.string().nullish()
|
|
879
1025
|
})
|
|
880
1026
|
})
|
|
881
1027
|
).nullish()
|
|
882
1028
|
}).nullish(),
|
|
883
|
-
logprobs:
|
|
884
|
-
content:
|
|
885
|
-
|
|
886
|
-
token:
|
|
887
|
-
logprob:
|
|
888
|
-
top_logprobs:
|
|
889
|
-
|
|
890
|
-
token:
|
|
891
|
-
logprob:
|
|
1029
|
+
logprobs: import_zod5.z.object({
|
|
1030
|
+
content: import_zod5.z.array(
|
|
1031
|
+
import_zod5.z.object({
|
|
1032
|
+
token: import_zod5.z.string(),
|
|
1033
|
+
logprob: import_zod5.z.number(),
|
|
1034
|
+
top_logprobs: import_zod5.z.array(
|
|
1035
|
+
import_zod5.z.object({
|
|
1036
|
+
token: import_zod5.z.string(),
|
|
1037
|
+
logprob: import_zod5.z.number()
|
|
892
1038
|
})
|
|
893
1039
|
)
|
|
894
1040
|
})
|
|
895
1041
|
).nullish()
|
|
896
1042
|
}).nullish(),
|
|
897
|
-
finish_reason:
|
|
898
|
-
index:
|
|
1043
|
+
finish_reason: import_zod5.z.string().nullish(),
|
|
1044
|
+
index: import_zod5.z.number()
|
|
899
1045
|
})
|
|
900
1046
|
),
|
|
901
1047
|
usage: openaiTokenUsageSchema
|
|
@@ -905,6 +1051,9 @@ var openaiChatChunkSchema = import_zod3.z.union([
|
|
|
905
1051
|
function isReasoningModel(modelId) {
|
|
906
1052
|
return modelId.startsWith("o");
|
|
907
1053
|
}
|
|
1054
|
+
function supportsFlexProcessing(modelId) {
|
|
1055
|
+
return modelId.startsWith("o3") || modelId.startsWith("o4-mini");
|
|
1056
|
+
}
|
|
908
1057
|
function getSystemMessageMode(modelId) {
|
|
909
1058
|
var _a, _b;
|
|
910
1059
|
if (!isReasoningModel(modelId)) {
|
|
@@ -946,8 +1095,8 @@ var reasoningModels = {
|
|
|
946
1095
|
};
|
|
947
1096
|
|
|
948
1097
|
// src/openai-completion-language-model.ts
|
|
949
|
-
var
|
|
950
|
-
var
|
|
1098
|
+
var import_provider_utils6 = require("@ai-sdk/provider-utils");
|
|
1099
|
+
var import_zod7 = require("zod");
|
|
951
1100
|
|
|
952
1101
|
// src/convert-to-openai-completion-prompt.ts
|
|
953
1102
|
var import_provider4 = require("@ai-sdk/provider");
|
|
@@ -1025,12 +1174,12 @@ ${user}:`]
|
|
|
1025
1174
|
}
|
|
1026
1175
|
|
|
1027
1176
|
// src/openai-completion-options.ts
|
|
1028
|
-
var
|
|
1029
|
-
var openaiCompletionProviderOptions =
|
|
1177
|
+
var import_zod6 = require("zod");
|
|
1178
|
+
var openaiCompletionProviderOptions = import_zod6.z.object({
|
|
1030
1179
|
/**
|
|
1031
1180
|
Echo back the prompt in addition to the completion.
|
|
1032
1181
|
*/
|
|
1033
|
-
echo:
|
|
1182
|
+
echo: import_zod6.z.boolean().optional(),
|
|
1034
1183
|
/**
|
|
1035
1184
|
Modify the likelihood of specified tokens appearing in the completion.
|
|
1036
1185
|
|
|
@@ -1045,16 +1194,16 @@ var openaiCompletionProviderOptions = import_zod4.z.object({
|
|
|
1045
1194
|
As an example, you can pass {"50256": -100} to prevent the <|endoftext|>
|
|
1046
1195
|
token from being generated.
|
|
1047
1196
|
*/
|
|
1048
|
-
logitBias:
|
|
1197
|
+
logitBias: import_zod6.z.record(import_zod6.z.string(), import_zod6.z.number()).optional(),
|
|
1049
1198
|
/**
|
|
1050
1199
|
The suffix that comes after a completion of inserted text.
|
|
1051
1200
|
*/
|
|
1052
|
-
suffix:
|
|
1201
|
+
suffix: import_zod6.z.string().optional(),
|
|
1053
1202
|
/**
|
|
1054
1203
|
A unique identifier representing your end-user, which can help OpenAI to
|
|
1055
1204
|
monitor and detect abuse. Learn more.
|
|
1056
1205
|
*/
|
|
1057
|
-
user:
|
|
1206
|
+
user: import_zod6.z.string().optional(),
|
|
1058
1207
|
/**
|
|
1059
1208
|
Return the log probabilities of the tokens. Including logprobs will increase
|
|
1060
1209
|
the response size and can slow down response times. However, it can
|
|
@@ -1064,7 +1213,7 @@ var openaiCompletionProviderOptions = import_zod4.z.object({
|
|
|
1064
1213
|
Setting to a number will return the log probabilities of the top n
|
|
1065
1214
|
tokens that were generated.
|
|
1066
1215
|
*/
|
|
1067
|
-
logprobs:
|
|
1216
|
+
logprobs: import_zod6.z.union([import_zod6.z.boolean(), import_zod6.z.number()]).optional()
|
|
1068
1217
|
});
|
|
1069
1218
|
|
|
1070
1219
|
// src/openai-completion-language-model.ts
|
|
@@ -1100,12 +1249,12 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1100
1249
|
}) {
|
|
1101
1250
|
const warnings = [];
|
|
1102
1251
|
const openaiOptions = {
|
|
1103
|
-
...await (0,
|
|
1252
|
+
...await (0, import_provider_utils6.parseProviderOptions)({
|
|
1104
1253
|
provider: "openai",
|
|
1105
1254
|
providerOptions,
|
|
1106
1255
|
schema: openaiCompletionProviderOptions
|
|
1107
1256
|
}),
|
|
1108
|
-
...await (0,
|
|
1257
|
+
...await (0, import_provider_utils6.parseProviderOptions)({
|
|
1109
1258
|
provider: this.providerOptionsName,
|
|
1110
1259
|
providerOptions,
|
|
1111
1260
|
schema: openaiCompletionProviderOptions
|
|
@@ -1161,15 +1310,15 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1161
1310
|
responseHeaders,
|
|
1162
1311
|
value: response,
|
|
1163
1312
|
rawValue: rawResponse
|
|
1164
|
-
} = await (0,
|
|
1313
|
+
} = await (0, import_provider_utils6.postJsonToApi)({
|
|
1165
1314
|
url: this.config.url({
|
|
1166
1315
|
path: "/completions",
|
|
1167
1316
|
modelId: this.modelId
|
|
1168
1317
|
}),
|
|
1169
|
-
headers: (0,
|
|
1318
|
+
headers: (0, import_provider_utils6.combineHeaders)(this.config.headers(), options.headers),
|
|
1170
1319
|
body: args,
|
|
1171
1320
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
1172
|
-
successfulResponseHandler: (0,
|
|
1321
|
+
successfulResponseHandler: (0, import_provider_utils6.createJsonResponseHandler)(
|
|
1173
1322
|
openaiCompletionResponseSchema
|
|
1174
1323
|
),
|
|
1175
1324
|
abortSignal: options.abortSignal,
|
|
@@ -1207,15 +1356,15 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1207
1356
|
include_usage: true
|
|
1208
1357
|
}
|
|
1209
1358
|
};
|
|
1210
|
-
const { responseHeaders, value: response } = await (0,
|
|
1359
|
+
const { responseHeaders, value: response } = await (0, import_provider_utils6.postJsonToApi)({
|
|
1211
1360
|
url: this.config.url({
|
|
1212
1361
|
path: "/completions",
|
|
1213
1362
|
modelId: this.modelId
|
|
1214
1363
|
}),
|
|
1215
|
-
headers: (0,
|
|
1364
|
+
headers: (0, import_provider_utils6.combineHeaders)(this.config.headers(), options.headers),
|
|
1216
1365
|
body,
|
|
1217
1366
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
1218
|
-
successfulResponseHandler: (0,
|
|
1367
|
+
successfulResponseHandler: (0, import_provider_utils6.createEventSourceResponseHandler)(
|
|
1219
1368
|
openaiCompletionChunkSchema
|
|
1220
1369
|
),
|
|
1221
1370
|
abortSignal: options.abortSignal,
|
|
@@ -1236,6 +1385,9 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1236
1385
|
controller.enqueue({ type: "stream-start", warnings });
|
|
1237
1386
|
},
|
|
1238
1387
|
transform(chunk, controller) {
|
|
1388
|
+
if (options.includeRawChunks) {
|
|
1389
|
+
controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
|
|
1390
|
+
}
|
|
1239
1391
|
if (!chunk.success) {
|
|
1240
1392
|
finishReason = "error";
|
|
1241
1393
|
controller.enqueue({ type: "error", error: chunk.error });
|
|
@@ -1253,6 +1405,7 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1253
1405
|
type: "response-metadata",
|
|
1254
1406
|
...getResponseMetadata(value)
|
|
1255
1407
|
});
|
|
1408
|
+
controller.enqueue({ type: "text-start", id: "0" });
|
|
1256
1409
|
}
|
|
1257
1410
|
if (value.usage != null) {
|
|
1258
1411
|
usage.inputTokens = value.usage.prompt_tokens;
|
|
@@ -1266,14 +1419,18 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1266
1419
|
if ((choice == null ? void 0 : choice.logprobs) != null) {
|
|
1267
1420
|
providerMetadata.openai.logprobs = choice.logprobs;
|
|
1268
1421
|
}
|
|
1269
|
-
if ((choice == null ? void 0 : choice.text) != null) {
|
|
1422
|
+
if ((choice == null ? void 0 : choice.text) != null && choice.text.length > 0) {
|
|
1270
1423
|
controller.enqueue({
|
|
1271
|
-
type: "text",
|
|
1272
|
-
|
|
1424
|
+
type: "text-delta",
|
|
1425
|
+
id: "0",
|
|
1426
|
+
delta: choice.text
|
|
1273
1427
|
});
|
|
1274
1428
|
}
|
|
1275
1429
|
},
|
|
1276
1430
|
flush(controller) {
|
|
1431
|
+
if (!isFirstChunk) {
|
|
1432
|
+
controller.enqueue({ type: "text-end", id: "0" });
|
|
1433
|
+
}
|
|
1277
1434
|
controller.enqueue({
|
|
1278
1435
|
type: "finish",
|
|
1279
1436
|
finishReason,
|
|
@@ -1288,42 +1445,42 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1288
1445
|
};
|
|
1289
1446
|
}
|
|
1290
1447
|
};
|
|
1291
|
-
var usageSchema =
|
|
1292
|
-
prompt_tokens:
|
|
1293
|
-
completion_tokens:
|
|
1294
|
-
total_tokens:
|
|
1448
|
+
var usageSchema = import_zod7.z.object({
|
|
1449
|
+
prompt_tokens: import_zod7.z.number(),
|
|
1450
|
+
completion_tokens: import_zod7.z.number(),
|
|
1451
|
+
total_tokens: import_zod7.z.number()
|
|
1295
1452
|
});
|
|
1296
|
-
var openaiCompletionResponseSchema =
|
|
1297
|
-
id:
|
|
1298
|
-
created:
|
|
1299
|
-
model:
|
|
1300
|
-
choices:
|
|
1301
|
-
|
|
1302
|
-
text:
|
|
1303
|
-
finish_reason:
|
|
1304
|
-
logprobs:
|
|
1305
|
-
tokens:
|
|
1306
|
-
token_logprobs:
|
|
1307
|
-
top_logprobs:
|
|
1453
|
+
var openaiCompletionResponseSchema = import_zod7.z.object({
|
|
1454
|
+
id: import_zod7.z.string().nullish(),
|
|
1455
|
+
created: import_zod7.z.number().nullish(),
|
|
1456
|
+
model: import_zod7.z.string().nullish(),
|
|
1457
|
+
choices: import_zod7.z.array(
|
|
1458
|
+
import_zod7.z.object({
|
|
1459
|
+
text: import_zod7.z.string(),
|
|
1460
|
+
finish_reason: import_zod7.z.string(),
|
|
1461
|
+
logprobs: import_zod7.z.object({
|
|
1462
|
+
tokens: import_zod7.z.array(import_zod7.z.string()),
|
|
1463
|
+
token_logprobs: import_zod7.z.array(import_zod7.z.number()),
|
|
1464
|
+
top_logprobs: import_zod7.z.array(import_zod7.z.record(import_zod7.z.string(), import_zod7.z.number())).nullish()
|
|
1308
1465
|
}).nullish()
|
|
1309
1466
|
})
|
|
1310
1467
|
),
|
|
1311
1468
|
usage: usageSchema.nullish()
|
|
1312
1469
|
});
|
|
1313
|
-
var openaiCompletionChunkSchema =
|
|
1314
|
-
|
|
1315
|
-
id:
|
|
1316
|
-
created:
|
|
1317
|
-
model:
|
|
1318
|
-
choices:
|
|
1319
|
-
|
|
1320
|
-
text:
|
|
1321
|
-
finish_reason:
|
|
1322
|
-
index:
|
|
1323
|
-
logprobs:
|
|
1324
|
-
tokens:
|
|
1325
|
-
token_logprobs:
|
|
1326
|
-
top_logprobs:
|
|
1470
|
+
var openaiCompletionChunkSchema = import_zod7.z.union([
|
|
1471
|
+
import_zod7.z.object({
|
|
1472
|
+
id: import_zod7.z.string().nullish(),
|
|
1473
|
+
created: import_zod7.z.number().nullish(),
|
|
1474
|
+
model: import_zod7.z.string().nullish(),
|
|
1475
|
+
choices: import_zod7.z.array(
|
|
1476
|
+
import_zod7.z.object({
|
|
1477
|
+
text: import_zod7.z.string(),
|
|
1478
|
+
finish_reason: import_zod7.z.string().nullish(),
|
|
1479
|
+
index: import_zod7.z.number(),
|
|
1480
|
+
logprobs: import_zod7.z.object({
|
|
1481
|
+
tokens: import_zod7.z.array(import_zod7.z.string()),
|
|
1482
|
+
token_logprobs: import_zod7.z.array(import_zod7.z.number()),
|
|
1483
|
+
top_logprobs: import_zod7.z.array(import_zod7.z.record(import_zod7.z.string(), import_zod7.z.number())).nullish()
|
|
1327
1484
|
}).nullish()
|
|
1328
1485
|
})
|
|
1329
1486
|
),
|
|
@@ -1334,22 +1491,22 @@ var openaiCompletionChunkSchema = import_zod5.z.union([
|
|
|
1334
1491
|
|
|
1335
1492
|
// src/openai-embedding-model.ts
|
|
1336
1493
|
var import_provider5 = require("@ai-sdk/provider");
|
|
1337
|
-
var
|
|
1338
|
-
var
|
|
1494
|
+
var import_provider_utils7 = require("@ai-sdk/provider-utils");
|
|
1495
|
+
var import_zod9 = require("zod");
|
|
1339
1496
|
|
|
1340
1497
|
// src/openai-embedding-options.ts
|
|
1341
|
-
var
|
|
1342
|
-
var openaiEmbeddingProviderOptions =
|
|
1498
|
+
var import_zod8 = require("zod");
|
|
1499
|
+
var openaiEmbeddingProviderOptions = import_zod8.z.object({
|
|
1343
1500
|
/**
|
|
1344
1501
|
The number of dimensions the resulting output embeddings should have.
|
|
1345
1502
|
Only supported in text-embedding-3 and later models.
|
|
1346
1503
|
*/
|
|
1347
|
-
dimensions:
|
|
1504
|
+
dimensions: import_zod8.z.number().optional(),
|
|
1348
1505
|
/**
|
|
1349
1506
|
A unique identifier representing your end-user, which can help OpenAI to
|
|
1350
1507
|
monitor and detect abuse. Learn more.
|
|
1351
1508
|
*/
|
|
1352
|
-
user:
|
|
1509
|
+
user: import_zod8.z.string().optional()
|
|
1353
1510
|
});
|
|
1354
1511
|
|
|
1355
1512
|
// src/openai-embedding-model.ts
|
|
@@ -1379,7 +1536,7 @@ var OpenAIEmbeddingModel = class {
|
|
|
1379
1536
|
values
|
|
1380
1537
|
});
|
|
1381
1538
|
}
|
|
1382
|
-
const openaiOptions = (_a = await (0,
|
|
1539
|
+
const openaiOptions = (_a = await (0, import_provider_utils7.parseProviderOptions)({
|
|
1383
1540
|
provider: "openai",
|
|
1384
1541
|
providerOptions,
|
|
1385
1542
|
schema: openaiEmbeddingProviderOptions
|
|
@@ -1388,12 +1545,12 @@ var OpenAIEmbeddingModel = class {
|
|
|
1388
1545
|
responseHeaders,
|
|
1389
1546
|
value: response,
|
|
1390
1547
|
rawValue
|
|
1391
|
-
} = await (0,
|
|
1548
|
+
} = await (0, import_provider_utils7.postJsonToApi)({
|
|
1392
1549
|
url: this.config.url({
|
|
1393
1550
|
path: "/embeddings",
|
|
1394
1551
|
modelId: this.modelId
|
|
1395
1552
|
}),
|
|
1396
|
-
headers: (0,
|
|
1553
|
+
headers: (0, import_provider_utils7.combineHeaders)(this.config.headers(), headers),
|
|
1397
1554
|
body: {
|
|
1398
1555
|
model: this.modelId,
|
|
1399
1556
|
input: values,
|
|
@@ -1402,7 +1559,7 @@ var OpenAIEmbeddingModel = class {
|
|
|
1402
1559
|
user: openaiOptions.user
|
|
1403
1560
|
},
|
|
1404
1561
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
1405
|
-
successfulResponseHandler: (0,
|
|
1562
|
+
successfulResponseHandler: (0, import_provider_utils7.createJsonResponseHandler)(
|
|
1406
1563
|
openaiTextEmbeddingResponseSchema
|
|
1407
1564
|
),
|
|
1408
1565
|
abortSignal,
|
|
@@ -1415,14 +1572,14 @@ var OpenAIEmbeddingModel = class {
|
|
|
1415
1572
|
};
|
|
1416
1573
|
}
|
|
1417
1574
|
};
|
|
1418
|
-
var openaiTextEmbeddingResponseSchema =
|
|
1419
|
-
data:
|
|
1420
|
-
usage:
|
|
1575
|
+
var openaiTextEmbeddingResponseSchema = import_zod9.z.object({
|
|
1576
|
+
data: import_zod9.z.array(import_zod9.z.object({ embedding: import_zod9.z.array(import_zod9.z.number()) })),
|
|
1577
|
+
usage: import_zod9.z.object({ prompt_tokens: import_zod9.z.number() }).nullish()
|
|
1421
1578
|
});
|
|
1422
1579
|
|
|
1423
1580
|
// src/openai-image-model.ts
|
|
1424
|
-
var
|
|
1425
|
-
var
|
|
1581
|
+
var import_provider_utils8 = require("@ai-sdk/provider-utils");
|
|
1582
|
+
var import_zod10 = require("zod");
|
|
1426
1583
|
|
|
1427
1584
|
// src/openai-image-settings.ts
|
|
1428
1585
|
var modelMaxImagesPerCall = {
|
|
@@ -1469,12 +1626,12 @@ var OpenAIImageModel = class {
|
|
|
1469
1626
|
warnings.push({ type: "unsupported-setting", setting: "seed" });
|
|
1470
1627
|
}
|
|
1471
1628
|
const currentDate = (_c = (_b = (_a = this.config._internal) == null ? void 0 : _a.currentDate) == null ? void 0 : _b.call(_a)) != null ? _c : /* @__PURE__ */ new Date();
|
|
1472
|
-
const { value: response, responseHeaders } = await (0,
|
|
1629
|
+
const { value: response, responseHeaders } = await (0, import_provider_utils8.postJsonToApi)({
|
|
1473
1630
|
url: this.config.url({
|
|
1474
1631
|
path: "/images/generations",
|
|
1475
1632
|
modelId: this.modelId
|
|
1476
1633
|
}),
|
|
1477
|
-
headers: (0,
|
|
1634
|
+
headers: (0, import_provider_utils8.combineHeaders)(this.config.headers(), headers),
|
|
1478
1635
|
body: {
|
|
1479
1636
|
model: this.modelId,
|
|
1480
1637
|
prompt,
|
|
@@ -1484,7 +1641,7 @@ var OpenAIImageModel = class {
|
|
|
1484
1641
|
...!hasDefaultResponseFormat.has(this.modelId) ? { response_format: "b64_json" } : {}
|
|
1485
1642
|
},
|
|
1486
1643
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
1487
|
-
successfulResponseHandler: (0,
|
|
1644
|
+
successfulResponseHandler: (0, import_provider_utils8.createJsonResponseHandler)(
|
|
1488
1645
|
openaiImageResponseSchema
|
|
1489
1646
|
),
|
|
1490
1647
|
abortSignal,
|
|
@@ -1510,41 +1667,41 @@ var OpenAIImageModel = class {
|
|
|
1510
1667
|
};
|
|
1511
1668
|
}
|
|
1512
1669
|
};
|
|
1513
|
-
var openaiImageResponseSchema =
|
|
1514
|
-
data:
|
|
1515
|
-
|
|
1670
|
+
var openaiImageResponseSchema = import_zod10.z.object({
|
|
1671
|
+
data: import_zod10.z.array(
|
|
1672
|
+
import_zod10.z.object({ b64_json: import_zod10.z.string(), revised_prompt: import_zod10.z.string().optional() })
|
|
1516
1673
|
)
|
|
1517
1674
|
});
|
|
1518
1675
|
|
|
1519
1676
|
// src/openai-transcription-model.ts
|
|
1520
|
-
var
|
|
1521
|
-
var
|
|
1677
|
+
var import_provider_utils9 = require("@ai-sdk/provider-utils");
|
|
1678
|
+
var import_zod12 = require("zod");
|
|
1522
1679
|
|
|
1523
1680
|
// src/openai-transcription-options.ts
|
|
1524
|
-
var
|
|
1525
|
-
var openAITranscriptionProviderOptions =
|
|
1681
|
+
var import_zod11 = require("zod");
|
|
1682
|
+
var openAITranscriptionProviderOptions = import_zod11.z.object({
|
|
1526
1683
|
/**
|
|
1527
1684
|
* Additional information to include in the transcription response.
|
|
1528
1685
|
*/
|
|
1529
|
-
include:
|
|
1686
|
+
include: import_zod11.z.array(import_zod11.z.string()).optional(),
|
|
1530
1687
|
/**
|
|
1531
1688
|
* The language of the input audio in ISO-639-1 format.
|
|
1532
1689
|
*/
|
|
1533
|
-
language:
|
|
1690
|
+
language: import_zod11.z.string().optional(),
|
|
1534
1691
|
/**
|
|
1535
1692
|
* An optional text to guide the model's style or continue a previous audio segment.
|
|
1536
1693
|
*/
|
|
1537
|
-
prompt:
|
|
1694
|
+
prompt: import_zod11.z.string().optional(),
|
|
1538
1695
|
/**
|
|
1539
1696
|
* The sampling temperature, between 0 and 1.
|
|
1540
1697
|
* @default 0
|
|
1541
1698
|
*/
|
|
1542
|
-
temperature:
|
|
1699
|
+
temperature: import_zod11.z.number().min(0).max(1).default(0).optional(),
|
|
1543
1700
|
/**
|
|
1544
1701
|
* The timestamp granularities to populate for this transcription.
|
|
1545
1702
|
* @default ['segment']
|
|
1546
1703
|
*/
|
|
1547
|
-
timestampGranularities:
|
|
1704
|
+
timestampGranularities: import_zod11.z.array(import_zod11.z.enum(["word", "segment"])).default(["segment"]).optional()
|
|
1548
1705
|
});
|
|
1549
1706
|
|
|
1550
1707
|
// src/openai-transcription-model.ts
|
|
@@ -1611,7 +1768,7 @@ var OpenAITranscriptionModel = class {
|
|
|
1611
1768
|
constructor(modelId, config) {
|
|
1612
1769
|
this.modelId = modelId;
|
|
1613
1770
|
this.config = config;
|
|
1614
|
-
this.specificationVersion = "
|
|
1771
|
+
this.specificationVersion = "v2";
|
|
1615
1772
|
}
|
|
1616
1773
|
get provider() {
|
|
1617
1774
|
return this.config.provider;
|
|
@@ -1622,13 +1779,13 @@ var OpenAITranscriptionModel = class {
|
|
|
1622
1779
|
providerOptions
|
|
1623
1780
|
}) {
|
|
1624
1781
|
const warnings = [];
|
|
1625
|
-
const openAIOptions = await (0,
|
|
1782
|
+
const openAIOptions = await (0, import_provider_utils9.parseProviderOptions)({
|
|
1626
1783
|
provider: "openai",
|
|
1627
1784
|
providerOptions,
|
|
1628
1785
|
schema: openAITranscriptionProviderOptions
|
|
1629
1786
|
});
|
|
1630
1787
|
const formData = new FormData();
|
|
1631
|
-
const blob = audio instanceof Uint8Array ? new Blob([audio]) : new Blob([(0,
|
|
1788
|
+
const blob = audio instanceof Uint8Array ? new Blob([audio]) : new Blob([(0, import_provider_utils9.convertBase64ToUint8Array)(audio)]);
|
|
1632
1789
|
formData.append("model", this.modelId);
|
|
1633
1790
|
formData.append("file", new File([blob], "audio", { type: mediaType }));
|
|
1634
1791
|
if (openAIOptions) {
|
|
@@ -1658,15 +1815,15 @@ var OpenAITranscriptionModel = class {
|
|
|
1658
1815
|
value: response,
|
|
1659
1816
|
responseHeaders,
|
|
1660
1817
|
rawValue: rawResponse
|
|
1661
|
-
} = await (0,
|
|
1818
|
+
} = await (0, import_provider_utils9.postFormDataToApi)({
|
|
1662
1819
|
url: this.config.url({
|
|
1663
1820
|
path: "/audio/transcriptions",
|
|
1664
1821
|
modelId: this.modelId
|
|
1665
1822
|
}),
|
|
1666
|
-
headers: (0,
|
|
1823
|
+
headers: (0, import_provider_utils9.combineHeaders)(this.config.headers(), options.headers),
|
|
1667
1824
|
formData,
|
|
1668
1825
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
1669
|
-
successfulResponseHandler: (0,
|
|
1826
|
+
successfulResponseHandler: (0, import_provider_utils9.createJsonResponseHandler)(
|
|
1670
1827
|
openaiTranscriptionResponseSchema
|
|
1671
1828
|
),
|
|
1672
1829
|
abortSignal: options.abortSignal,
|
|
@@ -1692,31 +1849,31 @@ var OpenAITranscriptionModel = class {
|
|
|
1692
1849
|
};
|
|
1693
1850
|
}
|
|
1694
1851
|
};
|
|
1695
|
-
var openaiTranscriptionResponseSchema =
|
|
1696
|
-
text:
|
|
1697
|
-
language:
|
|
1698
|
-
duration:
|
|
1699
|
-
words:
|
|
1700
|
-
|
|
1701
|
-
word:
|
|
1702
|
-
start:
|
|
1703
|
-
end:
|
|
1852
|
+
var openaiTranscriptionResponseSchema = import_zod12.z.object({
|
|
1853
|
+
text: import_zod12.z.string(),
|
|
1854
|
+
language: import_zod12.z.string().nullish(),
|
|
1855
|
+
duration: import_zod12.z.number().nullish(),
|
|
1856
|
+
words: import_zod12.z.array(
|
|
1857
|
+
import_zod12.z.object({
|
|
1858
|
+
word: import_zod12.z.string(),
|
|
1859
|
+
start: import_zod12.z.number(),
|
|
1860
|
+
end: import_zod12.z.number()
|
|
1704
1861
|
})
|
|
1705
1862
|
).nullish()
|
|
1706
1863
|
});
|
|
1707
1864
|
|
|
1708
1865
|
// src/openai-speech-model.ts
|
|
1709
|
-
var
|
|
1710
|
-
var
|
|
1711
|
-
var OpenAIProviderOptionsSchema =
|
|
1712
|
-
instructions:
|
|
1713
|
-
speed:
|
|
1866
|
+
var import_provider_utils10 = require("@ai-sdk/provider-utils");
|
|
1867
|
+
var import_zod13 = require("zod");
|
|
1868
|
+
var OpenAIProviderOptionsSchema = import_zod13.z.object({
|
|
1869
|
+
instructions: import_zod13.z.string().nullish(),
|
|
1870
|
+
speed: import_zod13.z.number().min(0.25).max(4).default(1).nullish()
|
|
1714
1871
|
});
|
|
1715
1872
|
var OpenAISpeechModel = class {
|
|
1716
1873
|
constructor(modelId, config) {
|
|
1717
1874
|
this.modelId = modelId;
|
|
1718
1875
|
this.config = config;
|
|
1719
|
-
this.specificationVersion = "
|
|
1876
|
+
this.specificationVersion = "v2";
|
|
1720
1877
|
}
|
|
1721
1878
|
get provider() {
|
|
1722
1879
|
return this.config.provider;
|
|
@@ -1727,10 +1884,11 @@ var OpenAISpeechModel = class {
|
|
|
1727
1884
|
outputFormat = "mp3",
|
|
1728
1885
|
speed,
|
|
1729
1886
|
instructions,
|
|
1887
|
+
language,
|
|
1730
1888
|
providerOptions
|
|
1731
1889
|
}) {
|
|
1732
1890
|
const warnings = [];
|
|
1733
|
-
const openAIOptions = await (0,
|
|
1891
|
+
const openAIOptions = await (0, import_provider_utils10.parseProviderOptions)({
|
|
1734
1892
|
provider: "openai",
|
|
1735
1893
|
providerOptions,
|
|
1736
1894
|
schema: OpenAIProviderOptionsSchema
|
|
@@ -1763,6 +1921,13 @@ var OpenAISpeechModel = class {
|
|
|
1763
1921
|
}
|
|
1764
1922
|
}
|
|
1765
1923
|
}
|
|
1924
|
+
if (language) {
|
|
1925
|
+
warnings.push({
|
|
1926
|
+
type: "unsupported-setting",
|
|
1927
|
+
setting: "language",
|
|
1928
|
+
details: `OpenAI speech models do not support language selection. Language parameter "${language}" was ignored.`
|
|
1929
|
+
});
|
|
1930
|
+
}
|
|
1766
1931
|
return {
|
|
1767
1932
|
requestBody,
|
|
1768
1933
|
warnings
|
|
@@ -1776,15 +1941,15 @@ var OpenAISpeechModel = class {
|
|
|
1776
1941
|
value: audio,
|
|
1777
1942
|
responseHeaders,
|
|
1778
1943
|
rawValue: rawResponse
|
|
1779
|
-
} = await (0,
|
|
1944
|
+
} = await (0, import_provider_utils10.postJsonToApi)({
|
|
1780
1945
|
url: this.config.url({
|
|
1781
1946
|
path: "/audio/speech",
|
|
1782
1947
|
modelId: this.modelId
|
|
1783
1948
|
}),
|
|
1784
|
-
headers: (0,
|
|
1949
|
+
headers: (0, import_provider_utils10.combineHeaders)(this.config.headers(), options.headers),
|
|
1785
1950
|
body: requestBody,
|
|
1786
1951
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
1787
|
-
successfulResponseHandler: (0,
|
|
1952
|
+
successfulResponseHandler: (0, import_provider_utils10.createBinaryResponseHandler)(),
|
|
1788
1953
|
abortSignal: options.abortSignal,
|
|
1789
1954
|
fetch: this.config.fetch
|
|
1790
1955
|
});
|
|
@@ -1805,8 +1970,8 @@ var OpenAISpeechModel = class {
|
|
|
1805
1970
|
};
|
|
1806
1971
|
|
|
1807
1972
|
// src/responses/openai-responses-language-model.ts
|
|
1808
|
-
var
|
|
1809
|
-
var
|
|
1973
|
+
var import_provider_utils11 = require("@ai-sdk/provider-utils");
|
|
1974
|
+
var import_zod14 = require("zod");
|
|
1810
1975
|
|
|
1811
1976
|
// src/responses/convert-to-openai-responses-messages.ts
|
|
1812
1977
|
var import_provider6 = require("@ai-sdk/provider");
|
|
@@ -1895,11 +2060,21 @@ function convertToOpenAIResponsesMessages({
|
|
|
1895
2060
|
break;
|
|
1896
2061
|
}
|
|
1897
2062
|
case "tool-call": {
|
|
2063
|
+
if (part.providerExecuted) {
|
|
2064
|
+
break;
|
|
2065
|
+
}
|
|
1898
2066
|
messages.push({
|
|
1899
2067
|
type: "function_call",
|
|
1900
2068
|
call_id: part.toolCallId,
|
|
1901
2069
|
name: part.toolName,
|
|
1902
|
-
arguments: JSON.stringify(part.
|
|
2070
|
+
arguments: JSON.stringify(part.input)
|
|
2071
|
+
});
|
|
2072
|
+
break;
|
|
2073
|
+
}
|
|
2074
|
+
case "tool-result": {
|
|
2075
|
+
warnings.push({
|
|
2076
|
+
type: "other",
|
|
2077
|
+
message: `tool result parts in assistant messages are not supported for OpenAI responses`
|
|
1903
2078
|
});
|
|
1904
2079
|
break;
|
|
1905
2080
|
}
|
|
@@ -1909,10 +2084,23 @@ function convertToOpenAIResponsesMessages({
|
|
|
1909
2084
|
}
|
|
1910
2085
|
case "tool": {
|
|
1911
2086
|
for (const part of content) {
|
|
2087
|
+
const output = part.output;
|
|
2088
|
+
let contentValue;
|
|
2089
|
+
switch (output.type) {
|
|
2090
|
+
case "text":
|
|
2091
|
+
case "error-text":
|
|
2092
|
+
contentValue = output.value;
|
|
2093
|
+
break;
|
|
2094
|
+
case "content":
|
|
2095
|
+
case "json":
|
|
2096
|
+
case "error-json":
|
|
2097
|
+
contentValue = JSON.stringify(output.value);
|
|
2098
|
+
break;
|
|
2099
|
+
}
|
|
1912
2100
|
messages.push({
|
|
1913
2101
|
type: "function_call_output",
|
|
1914
2102
|
call_id: part.toolCallId,
|
|
1915
|
-
output:
|
|
2103
|
+
output: contentValue
|
|
1916
2104
|
});
|
|
1917
2105
|
}
|
|
1918
2106
|
break;
|
|
@@ -1964,7 +2152,7 @@ function prepareResponsesTools({
|
|
|
1964
2152
|
type: "function",
|
|
1965
2153
|
name: tool.name,
|
|
1966
2154
|
description: tool.description,
|
|
1967
|
-
parameters: tool.
|
|
2155
|
+
parameters: tool.inputSchema,
|
|
1968
2156
|
strict: strict ? true : void 0
|
|
1969
2157
|
});
|
|
1970
2158
|
break;
|
|
@@ -2068,7 +2256,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2068
2256
|
systemMessageMode: modelConfig.systemMessageMode
|
|
2069
2257
|
});
|
|
2070
2258
|
warnings.push(...messageWarnings);
|
|
2071
|
-
const openaiOptions = await (0,
|
|
2259
|
+
const openaiOptions = await (0, import_provider_utils11.parseProviderOptions)({
|
|
2072
2260
|
provider: "openai",
|
|
2073
2261
|
providerOptions,
|
|
2074
2262
|
schema: openaiResponsesProviderOptionsSchema
|
|
@@ -2098,6 +2286,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2098
2286
|
store: openaiOptions == null ? void 0 : openaiOptions.store,
|
|
2099
2287
|
user: openaiOptions == null ? void 0 : openaiOptions.user,
|
|
2100
2288
|
instructions: openaiOptions == null ? void 0 : openaiOptions.instructions,
|
|
2289
|
+
service_tier: openaiOptions == null ? void 0 : openaiOptions.serviceTier,
|
|
2101
2290
|
// model-specific settings:
|
|
2102
2291
|
...modelConfig.isReasoningModel && ((openaiOptions == null ? void 0 : openaiOptions.reasoningEffort) != null || (openaiOptions == null ? void 0 : openaiOptions.reasoningSummary) != null) && {
|
|
2103
2292
|
reasoning: {
|
|
@@ -2131,6 +2320,14 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2131
2320
|
});
|
|
2132
2321
|
}
|
|
2133
2322
|
}
|
|
2323
|
+
if ((openaiOptions == null ? void 0 : openaiOptions.serviceTier) === "flex" && !supportsFlexProcessing2(this.modelId)) {
|
|
2324
|
+
warnings.push({
|
|
2325
|
+
type: "unsupported-setting",
|
|
2326
|
+
setting: "serviceTier",
|
|
2327
|
+
details: "flex processing is only available for o3 and o4-mini models"
|
|
2328
|
+
});
|
|
2329
|
+
delete baseArgs.service_tier;
|
|
2330
|
+
}
|
|
2134
2331
|
const {
|
|
2135
2332
|
tools: openaiTools,
|
|
2136
2333
|
toolChoice: openaiToolChoice,
|
|
@@ -2156,64 +2353,68 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2156
2353
|
responseHeaders,
|
|
2157
2354
|
value: response,
|
|
2158
2355
|
rawValue: rawResponse
|
|
2159
|
-
} = await (0,
|
|
2356
|
+
} = await (0, import_provider_utils11.postJsonToApi)({
|
|
2160
2357
|
url: this.config.url({
|
|
2161
2358
|
path: "/responses",
|
|
2162
2359
|
modelId: this.modelId
|
|
2163
2360
|
}),
|
|
2164
|
-
headers: (0,
|
|
2361
|
+
headers: (0, import_provider_utils11.combineHeaders)(this.config.headers(), options.headers),
|
|
2165
2362
|
body,
|
|
2166
2363
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
2167
|
-
successfulResponseHandler: (0,
|
|
2168
|
-
|
|
2169
|
-
id:
|
|
2170
|
-
created_at:
|
|
2171
|
-
model:
|
|
2172
|
-
output:
|
|
2173
|
-
|
|
2174
|
-
|
|
2175
|
-
type:
|
|
2176
|
-
role:
|
|
2177
|
-
content:
|
|
2178
|
-
|
|
2179
|
-
type:
|
|
2180
|
-
text:
|
|
2181
|
-
annotations:
|
|
2182
|
-
|
|
2183
|
-
type:
|
|
2184
|
-
start_index:
|
|
2185
|
-
end_index:
|
|
2186
|
-
url:
|
|
2187
|
-
title:
|
|
2364
|
+
successfulResponseHandler: (0, import_provider_utils11.createJsonResponseHandler)(
|
|
2365
|
+
import_zod14.z.object({
|
|
2366
|
+
id: import_zod14.z.string(),
|
|
2367
|
+
created_at: import_zod14.z.number(),
|
|
2368
|
+
model: import_zod14.z.string(),
|
|
2369
|
+
output: import_zod14.z.array(
|
|
2370
|
+
import_zod14.z.discriminatedUnion("type", [
|
|
2371
|
+
import_zod14.z.object({
|
|
2372
|
+
type: import_zod14.z.literal("message"),
|
|
2373
|
+
role: import_zod14.z.literal("assistant"),
|
|
2374
|
+
content: import_zod14.z.array(
|
|
2375
|
+
import_zod14.z.object({
|
|
2376
|
+
type: import_zod14.z.literal("output_text"),
|
|
2377
|
+
text: import_zod14.z.string(),
|
|
2378
|
+
annotations: import_zod14.z.array(
|
|
2379
|
+
import_zod14.z.object({
|
|
2380
|
+
type: import_zod14.z.literal("url_citation"),
|
|
2381
|
+
start_index: import_zod14.z.number(),
|
|
2382
|
+
end_index: import_zod14.z.number(),
|
|
2383
|
+
url: import_zod14.z.string(),
|
|
2384
|
+
title: import_zod14.z.string()
|
|
2188
2385
|
})
|
|
2189
2386
|
)
|
|
2190
2387
|
})
|
|
2191
2388
|
)
|
|
2192
2389
|
}),
|
|
2193
|
-
|
|
2194
|
-
type:
|
|
2195
|
-
call_id:
|
|
2196
|
-
name:
|
|
2197
|
-
arguments:
|
|
2390
|
+
import_zod14.z.object({
|
|
2391
|
+
type: import_zod14.z.literal("function_call"),
|
|
2392
|
+
call_id: import_zod14.z.string(),
|
|
2393
|
+
name: import_zod14.z.string(),
|
|
2394
|
+
arguments: import_zod14.z.string()
|
|
2198
2395
|
}),
|
|
2199
|
-
|
|
2200
|
-
type:
|
|
2396
|
+
import_zod14.z.object({
|
|
2397
|
+
type: import_zod14.z.literal("web_search_call"),
|
|
2398
|
+
id: import_zod14.z.string(),
|
|
2399
|
+
status: import_zod14.z.string().optional()
|
|
2201
2400
|
}),
|
|
2202
|
-
|
|
2203
|
-
type:
|
|
2401
|
+
import_zod14.z.object({
|
|
2402
|
+
type: import_zod14.z.literal("computer_call"),
|
|
2403
|
+
id: import_zod14.z.string(),
|
|
2404
|
+
status: import_zod14.z.string().optional()
|
|
2204
2405
|
}),
|
|
2205
|
-
|
|
2206
|
-
type:
|
|
2207
|
-
summary:
|
|
2208
|
-
|
|
2209
|
-
type:
|
|
2210
|
-
text:
|
|
2406
|
+
import_zod14.z.object({
|
|
2407
|
+
type: import_zod14.z.literal("reasoning"),
|
|
2408
|
+
summary: import_zod14.z.array(
|
|
2409
|
+
import_zod14.z.object({
|
|
2410
|
+
type: import_zod14.z.literal("summary_text"),
|
|
2411
|
+
text: import_zod14.z.string()
|
|
2211
2412
|
})
|
|
2212
2413
|
)
|
|
2213
2414
|
})
|
|
2214
2415
|
])
|
|
2215
2416
|
),
|
|
2216
|
-
incomplete_details:
|
|
2417
|
+
incomplete_details: import_zod14.z.object({ reason: import_zod14.z.string() }).nullable(),
|
|
2217
2418
|
usage: usageSchema2
|
|
2218
2419
|
})
|
|
2219
2420
|
),
|
|
@@ -2240,7 +2441,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2240
2441
|
content.push({
|
|
2241
2442
|
type: "source",
|
|
2242
2443
|
sourceType: "url",
|
|
2243
|
-
id: (_c = (_b = (_a = this.config).generateId) == null ? void 0 : _b.call(_a)) != null ? _c : (0,
|
|
2444
|
+
id: (_c = (_b = (_a = this.config).generateId) == null ? void 0 : _b.call(_a)) != null ? _c : (0, import_provider_utils11.generateId)(),
|
|
2244
2445
|
url: annotation.url,
|
|
2245
2446
|
title: annotation.title
|
|
2246
2447
|
});
|
|
@@ -2251,10 +2452,46 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2251
2452
|
case "function_call": {
|
|
2252
2453
|
content.push({
|
|
2253
2454
|
type: "tool-call",
|
|
2254
|
-
toolCallType: "function",
|
|
2255
2455
|
toolCallId: part.call_id,
|
|
2256
2456
|
toolName: part.name,
|
|
2257
|
-
|
|
2457
|
+
input: part.arguments
|
|
2458
|
+
});
|
|
2459
|
+
break;
|
|
2460
|
+
}
|
|
2461
|
+
case "web_search_call": {
|
|
2462
|
+
content.push({
|
|
2463
|
+
type: "tool-call",
|
|
2464
|
+
toolCallId: part.id,
|
|
2465
|
+
toolName: "web_search_preview",
|
|
2466
|
+
input: "",
|
|
2467
|
+
providerExecuted: true
|
|
2468
|
+
});
|
|
2469
|
+
content.push({
|
|
2470
|
+
type: "tool-result",
|
|
2471
|
+
toolCallId: part.id,
|
|
2472
|
+
toolName: "web_search_preview",
|
|
2473
|
+
result: { status: part.status || "completed" },
|
|
2474
|
+
providerExecuted: true
|
|
2475
|
+
});
|
|
2476
|
+
break;
|
|
2477
|
+
}
|
|
2478
|
+
case "computer_call": {
|
|
2479
|
+
content.push({
|
|
2480
|
+
type: "tool-call",
|
|
2481
|
+
toolCallId: part.id,
|
|
2482
|
+
toolName: "computer_use",
|
|
2483
|
+
input: "",
|
|
2484
|
+
providerExecuted: true
|
|
2485
|
+
});
|
|
2486
|
+
content.push({
|
|
2487
|
+
type: "tool-result",
|
|
2488
|
+
toolCallId: part.id,
|
|
2489
|
+
toolName: "computer_use",
|
|
2490
|
+
result: {
|
|
2491
|
+
type: "computer_use_tool_result",
|
|
2492
|
+
status: part.status || "completed"
|
|
2493
|
+
},
|
|
2494
|
+
providerExecuted: true
|
|
2258
2495
|
});
|
|
2259
2496
|
break;
|
|
2260
2497
|
}
|
|
@@ -2291,18 +2528,18 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2291
2528
|
}
|
|
2292
2529
|
async doStream(options) {
|
|
2293
2530
|
const { args: body, warnings } = await this.getArgs(options);
|
|
2294
|
-
const { responseHeaders, value: response } = await (0,
|
|
2531
|
+
const { responseHeaders, value: response } = await (0, import_provider_utils11.postJsonToApi)({
|
|
2295
2532
|
url: this.config.url({
|
|
2296
2533
|
path: "/responses",
|
|
2297
2534
|
modelId: this.modelId
|
|
2298
2535
|
}),
|
|
2299
|
-
headers: (0,
|
|
2536
|
+
headers: (0, import_provider_utils11.combineHeaders)(this.config.headers(), options.headers),
|
|
2300
2537
|
body: {
|
|
2301
2538
|
...body,
|
|
2302
2539
|
stream: true
|
|
2303
2540
|
},
|
|
2304
2541
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
2305
|
-
successfulResponseHandler: (0,
|
|
2542
|
+
successfulResponseHandler: (0, import_provider_utils11.createEventSourceResponseHandler)(
|
|
2306
2543
|
openaiResponsesChunkSchema
|
|
2307
2544
|
),
|
|
2308
2545
|
abortSignal: options.abortSignal,
|
|
@@ -2326,6 +2563,9 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2326
2563
|
},
|
|
2327
2564
|
transform(chunk, controller) {
|
|
2328
2565
|
var _a, _b, _c, _d, _e, _f, _g, _h;
|
|
2566
|
+
if (options.includeRawChunks) {
|
|
2567
|
+
controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
|
|
2568
|
+
}
|
|
2329
2569
|
if (!chunk.success) {
|
|
2330
2570
|
finishReason = "error";
|
|
2331
2571
|
controller.enqueue({ type: "error", error: chunk.error });
|
|
@@ -2339,22 +2579,121 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2339
2579
|
toolCallId: value.item.call_id
|
|
2340
2580
|
};
|
|
2341
2581
|
controller.enqueue({
|
|
2342
|
-
type: "tool-
|
|
2343
|
-
|
|
2582
|
+
type: "tool-input-start",
|
|
2583
|
+
id: value.item.call_id,
|
|
2584
|
+
toolName: value.item.name
|
|
2585
|
+
});
|
|
2586
|
+
} else if (value.item.type === "web_search_call") {
|
|
2587
|
+
ongoingToolCalls[value.output_index] = {
|
|
2588
|
+
toolName: "web_search_preview",
|
|
2589
|
+
toolCallId: value.item.id
|
|
2590
|
+
};
|
|
2591
|
+
controller.enqueue({
|
|
2592
|
+
type: "tool-input-start",
|
|
2593
|
+
id: value.item.id,
|
|
2594
|
+
toolName: "web_search_preview"
|
|
2595
|
+
});
|
|
2596
|
+
} else if (value.item.type === "computer_call") {
|
|
2597
|
+
ongoingToolCalls[value.output_index] = {
|
|
2598
|
+
toolName: "computer_use",
|
|
2599
|
+
toolCallId: value.item.id
|
|
2600
|
+
};
|
|
2601
|
+
controller.enqueue({
|
|
2602
|
+
type: "tool-input-start",
|
|
2603
|
+
id: value.item.id,
|
|
2604
|
+
toolName: "computer_use"
|
|
2605
|
+
});
|
|
2606
|
+
} else if (value.item.type === "message") {
|
|
2607
|
+
controller.enqueue({
|
|
2608
|
+
type: "text-start",
|
|
2609
|
+
id: value.item.id
|
|
2610
|
+
});
|
|
2611
|
+
} else if (value.item.type === "reasoning") {
|
|
2612
|
+
controller.enqueue({
|
|
2613
|
+
type: "reasoning-start",
|
|
2614
|
+
id: value.item.id
|
|
2615
|
+
});
|
|
2616
|
+
}
|
|
2617
|
+
} else if (isResponseOutputItemDoneChunk(value)) {
|
|
2618
|
+
if (value.item.type === "function_call") {
|
|
2619
|
+
ongoingToolCalls[value.output_index] = void 0;
|
|
2620
|
+
hasToolCalls = true;
|
|
2621
|
+
controller.enqueue({
|
|
2622
|
+
type: "tool-input-end",
|
|
2623
|
+
id: value.item.call_id
|
|
2624
|
+
});
|
|
2625
|
+
controller.enqueue({
|
|
2626
|
+
type: "tool-call",
|
|
2344
2627
|
toolCallId: value.item.call_id,
|
|
2345
2628
|
toolName: value.item.name,
|
|
2346
|
-
|
|
2629
|
+
input: value.item.arguments
|
|
2630
|
+
});
|
|
2631
|
+
} else if (value.item.type === "web_search_call") {
|
|
2632
|
+
ongoingToolCalls[value.output_index] = void 0;
|
|
2633
|
+
hasToolCalls = true;
|
|
2634
|
+
controller.enqueue({
|
|
2635
|
+
type: "tool-input-end",
|
|
2636
|
+
id: value.item.id
|
|
2637
|
+
});
|
|
2638
|
+
controller.enqueue({
|
|
2639
|
+
type: "tool-call",
|
|
2640
|
+
toolCallId: value.item.id,
|
|
2641
|
+
toolName: "web_search_preview",
|
|
2642
|
+
input: "",
|
|
2643
|
+
providerExecuted: true
|
|
2644
|
+
});
|
|
2645
|
+
controller.enqueue({
|
|
2646
|
+
type: "tool-result",
|
|
2647
|
+
toolCallId: value.item.id,
|
|
2648
|
+
toolName: "web_search_preview",
|
|
2649
|
+
result: {
|
|
2650
|
+
type: "web_search_tool_result",
|
|
2651
|
+
status: value.item.status || "completed"
|
|
2652
|
+
},
|
|
2653
|
+
providerExecuted: true
|
|
2654
|
+
});
|
|
2655
|
+
} else if (value.item.type === "computer_call") {
|
|
2656
|
+
ongoingToolCalls[value.output_index] = void 0;
|
|
2657
|
+
hasToolCalls = true;
|
|
2658
|
+
controller.enqueue({
|
|
2659
|
+
type: "tool-input-end",
|
|
2660
|
+
id: value.item.id
|
|
2661
|
+
});
|
|
2662
|
+
controller.enqueue({
|
|
2663
|
+
type: "tool-call",
|
|
2664
|
+
toolCallId: value.item.id,
|
|
2665
|
+
toolName: "computer_use",
|
|
2666
|
+
input: "",
|
|
2667
|
+
providerExecuted: true
|
|
2668
|
+
});
|
|
2669
|
+
controller.enqueue({
|
|
2670
|
+
type: "tool-result",
|
|
2671
|
+
toolCallId: value.item.id,
|
|
2672
|
+
toolName: "computer_use",
|
|
2673
|
+
result: {
|
|
2674
|
+
type: "computer_use_tool_result",
|
|
2675
|
+
status: value.item.status || "completed"
|
|
2676
|
+
},
|
|
2677
|
+
providerExecuted: true
|
|
2678
|
+
});
|
|
2679
|
+
} else if (value.item.type === "message") {
|
|
2680
|
+
controller.enqueue({
|
|
2681
|
+
type: "text-end",
|
|
2682
|
+
id: value.item.id
|
|
2683
|
+
});
|
|
2684
|
+
} else if (value.item.type === "reasoning") {
|
|
2685
|
+
controller.enqueue({
|
|
2686
|
+
type: "reasoning-end",
|
|
2687
|
+
id: value.item.id
|
|
2347
2688
|
});
|
|
2348
2689
|
}
|
|
2349
2690
|
} else if (isResponseFunctionCallArgumentsDeltaChunk(value)) {
|
|
2350
2691
|
const toolCall = ongoingToolCalls[value.output_index];
|
|
2351
2692
|
if (toolCall != null) {
|
|
2352
2693
|
controller.enqueue({
|
|
2353
|
-
type: "tool-
|
|
2354
|
-
|
|
2355
|
-
|
|
2356
|
-
toolName: toolCall.toolName,
|
|
2357
|
-
argsTextDelta: value.delta
|
|
2694
|
+
type: "tool-input-delta",
|
|
2695
|
+
id: toolCall.toolCallId,
|
|
2696
|
+
delta: value.delta
|
|
2358
2697
|
});
|
|
2359
2698
|
}
|
|
2360
2699
|
} else if (isResponseCreatedChunk(value)) {
|
|
@@ -2367,23 +2706,15 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2367
2706
|
});
|
|
2368
2707
|
} else if (isTextDeltaChunk(value)) {
|
|
2369
2708
|
controller.enqueue({
|
|
2370
|
-
type: "text",
|
|
2371
|
-
|
|
2709
|
+
type: "text-delta",
|
|
2710
|
+
id: value.item_id,
|
|
2711
|
+
delta: value.delta
|
|
2372
2712
|
});
|
|
2373
2713
|
} else if (isResponseReasoningSummaryTextDeltaChunk(value)) {
|
|
2374
2714
|
controller.enqueue({
|
|
2375
|
-
type: "reasoning",
|
|
2376
|
-
|
|
2377
|
-
|
|
2378
|
-
} else if (isResponseOutputItemDoneChunk(value) && value.item.type === "function_call") {
|
|
2379
|
-
ongoingToolCalls[value.output_index] = void 0;
|
|
2380
|
-
hasToolCalls = true;
|
|
2381
|
-
controller.enqueue({
|
|
2382
|
-
type: "tool-call",
|
|
2383
|
-
toolCallType: "function",
|
|
2384
|
-
toolCallId: value.item.call_id,
|
|
2385
|
-
toolName: value.item.name,
|
|
2386
|
-
args: value.item.arguments
|
|
2715
|
+
type: "reasoning-delta",
|
|
2716
|
+
delta: value.delta,
|
|
2717
|
+
id: value.item_id
|
|
2387
2718
|
});
|
|
2388
2719
|
} else if (isResponseFinishedChunk(value)) {
|
|
2389
2720
|
finishReason = mapOpenAIResponseFinishReason({
|
|
@@ -2399,7 +2730,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2399
2730
|
controller.enqueue({
|
|
2400
2731
|
type: "source",
|
|
2401
2732
|
sourceType: "url",
|
|
2402
|
-
id: (_h = (_g = (_f = self.config).generateId) == null ? void 0 : _g.call(_f)) != null ? _h : (0,
|
|
2733
|
+
id: (_h = (_g = (_f = self.config).generateId) == null ? void 0 : _g.call(_f)) != null ? _h : (0, import_provider_utils11.generateId)(),
|
|
2403
2734
|
url: value.annotation.url,
|
|
2404
2735
|
title: value.annotation.title
|
|
2405
2736
|
});
|
|
@@ -2424,95 +2755,134 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2424
2755
|
};
|
|
2425
2756
|
}
|
|
2426
2757
|
};
|
|
2427
|
-
var usageSchema2 =
|
|
2428
|
-
input_tokens:
|
|
2429
|
-
input_tokens_details:
|
|
2430
|
-
output_tokens:
|
|
2431
|
-
output_tokens_details:
|
|
2758
|
+
var usageSchema2 = import_zod14.z.object({
|
|
2759
|
+
input_tokens: import_zod14.z.number(),
|
|
2760
|
+
input_tokens_details: import_zod14.z.object({ cached_tokens: import_zod14.z.number().nullish() }).nullish(),
|
|
2761
|
+
output_tokens: import_zod14.z.number(),
|
|
2762
|
+
output_tokens_details: import_zod14.z.object({ reasoning_tokens: import_zod14.z.number().nullish() }).nullish()
|
|
2432
2763
|
});
|
|
2433
|
-
var textDeltaChunkSchema =
|
|
2434
|
-
type:
|
|
2435
|
-
|
|
2764
|
+
var textDeltaChunkSchema = import_zod14.z.object({
|
|
2765
|
+
type: import_zod14.z.literal("response.output_text.delta"),
|
|
2766
|
+
item_id: import_zod14.z.string(),
|
|
2767
|
+
delta: import_zod14.z.string()
|
|
2436
2768
|
});
|
|
2437
|
-
var responseFinishedChunkSchema =
|
|
2438
|
-
type:
|
|
2439
|
-
response:
|
|
2440
|
-
incomplete_details:
|
|
2769
|
+
var responseFinishedChunkSchema = import_zod14.z.object({
|
|
2770
|
+
type: import_zod14.z.enum(["response.completed", "response.incomplete"]),
|
|
2771
|
+
response: import_zod14.z.object({
|
|
2772
|
+
incomplete_details: import_zod14.z.object({ reason: import_zod14.z.string() }).nullish(),
|
|
2441
2773
|
usage: usageSchema2
|
|
2442
2774
|
})
|
|
2443
2775
|
});
|
|
2444
|
-
var responseCreatedChunkSchema =
|
|
2445
|
-
type:
|
|
2446
|
-
response:
|
|
2447
|
-
id:
|
|
2448
|
-
created_at:
|
|
2449
|
-
model:
|
|
2776
|
+
var responseCreatedChunkSchema = import_zod14.z.object({
|
|
2777
|
+
type: import_zod14.z.literal("response.created"),
|
|
2778
|
+
response: import_zod14.z.object({
|
|
2779
|
+
id: import_zod14.z.string(),
|
|
2780
|
+
created_at: import_zod14.z.number(),
|
|
2781
|
+
model: import_zod14.z.string()
|
|
2450
2782
|
})
|
|
2451
2783
|
});
|
|
2452
|
-
var
|
|
2453
|
-
type:
|
|
2454
|
-
output_index:
|
|
2455
|
-
item:
|
|
2456
|
-
|
|
2457
|
-
type:
|
|
2784
|
+
var responseOutputItemAddedSchema = import_zod14.z.object({
|
|
2785
|
+
type: import_zod14.z.literal("response.output_item.added"),
|
|
2786
|
+
output_index: import_zod14.z.number(),
|
|
2787
|
+
item: import_zod14.z.discriminatedUnion("type", [
|
|
2788
|
+
import_zod14.z.object({
|
|
2789
|
+
type: import_zod14.z.literal("message"),
|
|
2790
|
+
id: import_zod14.z.string()
|
|
2458
2791
|
}),
|
|
2459
|
-
|
|
2460
|
-
type:
|
|
2461
|
-
id:
|
|
2462
|
-
|
|
2463
|
-
|
|
2464
|
-
|
|
2465
|
-
|
|
2792
|
+
import_zod14.z.object({
|
|
2793
|
+
type: import_zod14.z.literal("reasoning"),
|
|
2794
|
+
id: import_zod14.z.string()
|
|
2795
|
+
}),
|
|
2796
|
+
import_zod14.z.object({
|
|
2797
|
+
type: import_zod14.z.literal("function_call"),
|
|
2798
|
+
id: import_zod14.z.string(),
|
|
2799
|
+
call_id: import_zod14.z.string(),
|
|
2800
|
+
name: import_zod14.z.string(),
|
|
2801
|
+
arguments: import_zod14.z.string()
|
|
2802
|
+
}),
|
|
2803
|
+
import_zod14.z.object({
|
|
2804
|
+
type: import_zod14.z.literal("web_search_call"),
|
|
2805
|
+
id: import_zod14.z.string(),
|
|
2806
|
+
status: import_zod14.z.string()
|
|
2807
|
+
}),
|
|
2808
|
+
import_zod14.z.object({
|
|
2809
|
+
type: import_zod14.z.literal("computer_call"),
|
|
2810
|
+
id: import_zod14.z.string(),
|
|
2811
|
+
status: import_zod14.z.string()
|
|
2466
2812
|
})
|
|
2467
2813
|
])
|
|
2468
2814
|
});
|
|
2469
|
-
var
|
|
2470
|
-
type:
|
|
2471
|
-
|
|
2472
|
-
|
|
2473
|
-
|
|
2474
|
-
|
|
2475
|
-
|
|
2476
|
-
type: import_zod12.z.literal("response.output_item.added"),
|
|
2477
|
-
output_index: import_zod12.z.number(),
|
|
2478
|
-
item: import_zod12.z.discriminatedUnion("type", [
|
|
2479
|
-
import_zod12.z.object({
|
|
2480
|
-
type: import_zod12.z.literal("message")
|
|
2815
|
+
var responseOutputItemDoneSchema = import_zod14.z.object({
|
|
2816
|
+
type: import_zod14.z.literal("response.output_item.done"),
|
|
2817
|
+
output_index: import_zod14.z.number(),
|
|
2818
|
+
item: import_zod14.z.discriminatedUnion("type", [
|
|
2819
|
+
import_zod14.z.object({
|
|
2820
|
+
type: import_zod14.z.literal("message"),
|
|
2821
|
+
id: import_zod14.z.string()
|
|
2481
2822
|
}),
|
|
2482
|
-
|
|
2483
|
-
type:
|
|
2484
|
-
id:
|
|
2485
|
-
|
|
2486
|
-
|
|
2487
|
-
|
|
2823
|
+
import_zod14.z.object({
|
|
2824
|
+
type: import_zod14.z.literal("reasoning"),
|
|
2825
|
+
id: import_zod14.z.string()
|
|
2826
|
+
}),
|
|
2827
|
+
import_zod14.z.object({
|
|
2828
|
+
type: import_zod14.z.literal("function_call"),
|
|
2829
|
+
id: import_zod14.z.string(),
|
|
2830
|
+
call_id: import_zod14.z.string(),
|
|
2831
|
+
name: import_zod14.z.string(),
|
|
2832
|
+
arguments: import_zod14.z.string(),
|
|
2833
|
+
status: import_zod14.z.literal("completed")
|
|
2834
|
+
}),
|
|
2835
|
+
import_zod14.z.object({
|
|
2836
|
+
type: import_zod14.z.literal("web_search_call"),
|
|
2837
|
+
id: import_zod14.z.string(),
|
|
2838
|
+
status: import_zod14.z.literal("completed")
|
|
2839
|
+
}),
|
|
2840
|
+
import_zod14.z.object({
|
|
2841
|
+
type: import_zod14.z.literal("computer_call"),
|
|
2842
|
+
id: import_zod14.z.string(),
|
|
2843
|
+
status: import_zod14.z.literal("completed")
|
|
2488
2844
|
})
|
|
2489
2845
|
])
|
|
2490
2846
|
});
|
|
2491
|
-
var
|
|
2492
|
-
type:
|
|
2493
|
-
|
|
2494
|
-
|
|
2495
|
-
|
|
2496
|
-
|
|
2847
|
+
var responseFunctionCallArgumentsDeltaSchema = import_zod14.z.object({
|
|
2848
|
+
type: import_zod14.z.literal("response.function_call_arguments.delta"),
|
|
2849
|
+
item_id: import_zod14.z.string(),
|
|
2850
|
+
output_index: import_zod14.z.number(),
|
|
2851
|
+
delta: import_zod14.z.string()
|
|
2852
|
+
});
|
|
2853
|
+
var responseAnnotationAddedSchema = import_zod14.z.object({
|
|
2854
|
+
type: import_zod14.z.literal("response.output_text.annotation.added"),
|
|
2855
|
+
annotation: import_zod14.z.object({
|
|
2856
|
+
type: import_zod14.z.literal("url_citation"),
|
|
2857
|
+
url: import_zod14.z.string(),
|
|
2858
|
+
title: import_zod14.z.string()
|
|
2497
2859
|
})
|
|
2498
2860
|
});
|
|
2499
|
-
var responseReasoningSummaryTextDeltaSchema =
|
|
2500
|
-
type:
|
|
2501
|
-
item_id:
|
|
2502
|
-
output_index:
|
|
2503
|
-
summary_index:
|
|
2504
|
-
delta:
|
|
2861
|
+
var responseReasoningSummaryTextDeltaSchema = import_zod14.z.object({
|
|
2862
|
+
type: import_zod14.z.literal("response.reasoning_summary_text.delta"),
|
|
2863
|
+
item_id: import_zod14.z.string(),
|
|
2864
|
+
output_index: import_zod14.z.number(),
|
|
2865
|
+
summary_index: import_zod14.z.number(),
|
|
2866
|
+
delta: import_zod14.z.string()
|
|
2867
|
+
});
|
|
2868
|
+
var responseReasoningSummaryPartDoneSchema = import_zod14.z.object({
|
|
2869
|
+
type: import_zod14.z.literal("response.reasoning_summary_part.done"),
|
|
2870
|
+
item_id: import_zod14.z.string(),
|
|
2871
|
+
output_index: import_zod14.z.number(),
|
|
2872
|
+
summary_index: import_zod14.z.number(),
|
|
2873
|
+
part: import_zod14.z.unknown().nullish()
|
|
2505
2874
|
});
|
|
2506
|
-
var openaiResponsesChunkSchema =
|
|
2875
|
+
var openaiResponsesChunkSchema = import_zod14.z.union([
|
|
2507
2876
|
textDeltaChunkSchema,
|
|
2508
2877
|
responseFinishedChunkSchema,
|
|
2509
2878
|
responseCreatedChunkSchema,
|
|
2879
|
+
responseOutputItemAddedSchema,
|
|
2510
2880
|
responseOutputItemDoneSchema,
|
|
2511
2881
|
responseFunctionCallArgumentsDeltaSchema,
|
|
2512
|
-
responseOutputItemAddedSchema,
|
|
2513
2882
|
responseAnnotationAddedSchema,
|
|
2514
2883
|
responseReasoningSummaryTextDeltaSchema,
|
|
2515
|
-
|
|
2884
|
+
responseReasoningSummaryPartDoneSchema,
|
|
2885
|
+
import_zod14.z.object({ type: import_zod14.z.string() }).passthrough()
|
|
2516
2886
|
// fallback for unknown chunks
|
|
2517
2887
|
]);
|
|
2518
2888
|
function isTextDeltaChunk(chunk) {
|
|
@@ -2560,16 +2930,20 @@ function getResponsesModelConfig(modelId) {
|
|
|
2560
2930
|
requiredAutoTruncation: false
|
|
2561
2931
|
};
|
|
2562
2932
|
}
|
|
2563
|
-
|
|
2564
|
-
|
|
2565
|
-
|
|
2566
|
-
|
|
2567
|
-
|
|
2568
|
-
|
|
2569
|
-
|
|
2570
|
-
|
|
2571
|
-
|
|
2572
|
-
|
|
2933
|
+
function supportsFlexProcessing2(modelId) {
|
|
2934
|
+
return modelId.startsWith("o3") || modelId.startsWith("o4-mini");
|
|
2935
|
+
}
|
|
2936
|
+
var openaiResponsesProviderOptionsSchema = import_zod14.z.object({
|
|
2937
|
+
metadata: import_zod14.z.any().nullish(),
|
|
2938
|
+
parallelToolCalls: import_zod14.z.boolean().nullish(),
|
|
2939
|
+
previousResponseId: import_zod14.z.string().nullish(),
|
|
2940
|
+
store: import_zod14.z.boolean().nullish(),
|
|
2941
|
+
user: import_zod14.z.string().nullish(),
|
|
2942
|
+
reasoningEffort: import_zod14.z.string().nullish(),
|
|
2943
|
+
strictSchemas: import_zod14.z.boolean().nullish(),
|
|
2944
|
+
instructions: import_zod14.z.string().nullish(),
|
|
2945
|
+
reasoningSummary: import_zod14.z.string().nullish(),
|
|
2946
|
+
serviceTier: import_zod14.z.enum(["auto", "flex"]).nullish()
|
|
2573
2947
|
});
|
|
2574
2948
|
// Annotate the CommonJS export names for ESM import in node:
|
|
2575
2949
|
0 && (module.exports = {
|