@ai-sdk/openai 2.0.31 → 2.0.32
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +7 -0
- package/dist/index.d.mts +44 -12
- package/dist/index.d.ts +44 -12
- package/dist/index.js +388 -425
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +354 -391
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.js +420 -457
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +372 -409
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +1 -1
package/dist/internal/index.js
CHANGED
|
@@ -38,8 +38,8 @@ module.exports = __toCommonJS(internal_exports);
|
|
|
38
38
|
|
|
39
39
|
// src/chat/openai-chat-language-model.ts
|
|
40
40
|
var import_provider3 = require("@ai-sdk/provider");
|
|
41
|
-
var
|
|
42
|
-
var
|
|
41
|
+
var import_provider_utils3 = require("@ai-sdk/provider-utils");
|
|
42
|
+
var import_v43 = require("zod/v4");
|
|
43
43
|
|
|
44
44
|
// src/openai-error.ts
|
|
45
45
|
var import_v4 = require("zod/v4");
|
|
@@ -360,98 +360,6 @@ var openaiProviderOptions = import_v42.z.object({
|
|
|
360
360
|
|
|
361
361
|
// src/chat/openai-chat-prepare-tools.ts
|
|
362
362
|
var import_provider2 = require("@ai-sdk/provider");
|
|
363
|
-
|
|
364
|
-
// src/tool/file-search.ts
|
|
365
|
-
var import_provider_utils3 = require("@ai-sdk/provider-utils");
|
|
366
|
-
var import_v43 = require("zod/v4");
|
|
367
|
-
var comparisonFilterSchema = import_v43.z.object({
|
|
368
|
-
key: import_v43.z.string(),
|
|
369
|
-
type: import_v43.z.enum(["eq", "ne", "gt", "gte", "lt", "lte"]),
|
|
370
|
-
value: import_v43.z.union([import_v43.z.string(), import_v43.z.number(), import_v43.z.boolean()])
|
|
371
|
-
});
|
|
372
|
-
var compoundFilterSchema = import_v43.z.object({
|
|
373
|
-
type: import_v43.z.enum(["and", "or"]),
|
|
374
|
-
filters: import_v43.z.array(
|
|
375
|
-
import_v43.z.union([comparisonFilterSchema, import_v43.z.lazy(() => compoundFilterSchema)])
|
|
376
|
-
)
|
|
377
|
-
});
|
|
378
|
-
var filtersSchema = import_v43.z.union([comparisonFilterSchema, compoundFilterSchema]);
|
|
379
|
-
var fileSearchArgsSchema = import_v43.z.object({
|
|
380
|
-
vectorStoreIds: import_v43.z.array(import_v43.z.string()).optional(),
|
|
381
|
-
maxNumResults: import_v43.z.number().optional(),
|
|
382
|
-
ranking: import_v43.z.object({
|
|
383
|
-
ranker: import_v43.z.enum(["auto", "default-2024-08-21"]).optional()
|
|
384
|
-
}).optional(),
|
|
385
|
-
filters: filtersSchema.optional()
|
|
386
|
-
});
|
|
387
|
-
var fileSearch = (0, import_provider_utils3.createProviderDefinedToolFactory)({
|
|
388
|
-
id: "openai.file_search",
|
|
389
|
-
name: "file_search",
|
|
390
|
-
inputSchema: import_v43.z.object({
|
|
391
|
-
query: import_v43.z.string()
|
|
392
|
-
})
|
|
393
|
-
});
|
|
394
|
-
|
|
395
|
-
// src/tool/web-search-preview.ts
|
|
396
|
-
var import_provider_utils4 = require("@ai-sdk/provider-utils");
|
|
397
|
-
var import_v44 = require("zod/v4");
|
|
398
|
-
var webSearchPreviewArgsSchema = import_v44.z.object({
|
|
399
|
-
/**
|
|
400
|
-
* Search context size to use for the web search.
|
|
401
|
-
* - high: Most comprehensive context, highest cost, slower response
|
|
402
|
-
* - medium: Balanced context, cost, and latency (default)
|
|
403
|
-
* - low: Least context, lowest cost, fastest response
|
|
404
|
-
*/
|
|
405
|
-
searchContextSize: import_v44.z.enum(["low", "medium", "high"]).optional(),
|
|
406
|
-
/**
|
|
407
|
-
* User location information to provide geographically relevant search results.
|
|
408
|
-
*/
|
|
409
|
-
userLocation: import_v44.z.object({
|
|
410
|
-
/**
|
|
411
|
-
* Type of location (always 'approximate')
|
|
412
|
-
*/
|
|
413
|
-
type: import_v44.z.literal("approximate"),
|
|
414
|
-
/**
|
|
415
|
-
* Two-letter ISO country code (e.g., 'US', 'GB')
|
|
416
|
-
*/
|
|
417
|
-
country: import_v44.z.string().optional(),
|
|
418
|
-
/**
|
|
419
|
-
* City name (free text, e.g., 'Minneapolis')
|
|
420
|
-
*/
|
|
421
|
-
city: import_v44.z.string().optional(),
|
|
422
|
-
/**
|
|
423
|
-
* Region name (free text, e.g., 'Minnesota')
|
|
424
|
-
*/
|
|
425
|
-
region: import_v44.z.string().optional(),
|
|
426
|
-
/**
|
|
427
|
-
* IANA timezone (e.g., 'America/Chicago')
|
|
428
|
-
*/
|
|
429
|
-
timezone: import_v44.z.string().optional()
|
|
430
|
-
}).optional()
|
|
431
|
-
});
|
|
432
|
-
var webSearchPreview = (0, import_provider_utils4.createProviderDefinedToolFactory)({
|
|
433
|
-
id: "openai.web_search_preview",
|
|
434
|
-
name: "web_search_preview",
|
|
435
|
-
inputSchema: import_v44.z.object({
|
|
436
|
-
action: import_v44.z.discriminatedUnion("type", [
|
|
437
|
-
import_v44.z.object({
|
|
438
|
-
type: import_v44.z.literal("search"),
|
|
439
|
-
query: import_v44.z.string().nullish()
|
|
440
|
-
}),
|
|
441
|
-
import_v44.z.object({
|
|
442
|
-
type: import_v44.z.literal("open_page"),
|
|
443
|
-
url: import_v44.z.string()
|
|
444
|
-
}),
|
|
445
|
-
import_v44.z.object({
|
|
446
|
-
type: import_v44.z.literal("find"),
|
|
447
|
-
url: import_v44.z.string(),
|
|
448
|
-
pattern: import_v44.z.string()
|
|
449
|
-
})
|
|
450
|
-
]).nullish()
|
|
451
|
-
})
|
|
452
|
-
});
|
|
453
|
-
|
|
454
|
-
// src/chat/openai-chat-prepare-tools.ts
|
|
455
363
|
function prepareChatTools({
|
|
456
364
|
tools,
|
|
457
365
|
toolChoice,
|
|
@@ -477,33 +385,6 @@ function prepareChatTools({
|
|
|
477
385
|
}
|
|
478
386
|
});
|
|
479
387
|
break;
|
|
480
|
-
case "provider-defined":
|
|
481
|
-
switch (tool.id) {
|
|
482
|
-
case "openai.file_search": {
|
|
483
|
-
const args = fileSearchArgsSchema.parse(tool.args);
|
|
484
|
-
openaiTools.push({
|
|
485
|
-
type: "file_search",
|
|
486
|
-
vector_store_ids: args.vectorStoreIds,
|
|
487
|
-
max_num_results: args.maxNumResults,
|
|
488
|
-
ranking_options: args.ranking ? { ranker: args.ranking.ranker } : void 0,
|
|
489
|
-
filters: args.filters
|
|
490
|
-
});
|
|
491
|
-
break;
|
|
492
|
-
}
|
|
493
|
-
case "openai.web_search_preview": {
|
|
494
|
-
const args = webSearchPreviewArgsSchema.parse(tool.args);
|
|
495
|
-
openaiTools.push({
|
|
496
|
-
type: "web_search_preview",
|
|
497
|
-
search_context_size: args.searchContextSize,
|
|
498
|
-
user_location: args.userLocation
|
|
499
|
-
});
|
|
500
|
-
break;
|
|
501
|
-
}
|
|
502
|
-
default:
|
|
503
|
-
toolWarnings.push({ type: "unsupported-tool", tool });
|
|
504
|
-
break;
|
|
505
|
-
}
|
|
506
|
-
break;
|
|
507
388
|
default:
|
|
508
389
|
toolWarnings.push({ type: "unsupported-tool", tool });
|
|
509
390
|
break;
|
|
@@ -568,7 +449,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
568
449
|
}) {
|
|
569
450
|
var _a, _b, _c, _d;
|
|
570
451
|
const warnings = [];
|
|
571
|
-
const openaiOptions = (_a = await (0,
|
|
452
|
+
const openaiOptions = (_a = await (0, import_provider_utils3.parseProviderOptions)({
|
|
572
453
|
provider: "openai",
|
|
573
454
|
providerOptions,
|
|
574
455
|
schema: openaiProviderOptions
|
|
@@ -747,15 +628,15 @@ var OpenAIChatLanguageModel = class {
|
|
|
747
628
|
responseHeaders,
|
|
748
629
|
value: response,
|
|
749
630
|
rawValue: rawResponse
|
|
750
|
-
} = await (0,
|
|
631
|
+
} = await (0, import_provider_utils3.postJsonToApi)({
|
|
751
632
|
url: this.config.url({
|
|
752
633
|
path: "/chat/completions",
|
|
753
634
|
modelId: this.modelId
|
|
754
635
|
}),
|
|
755
|
-
headers: (0,
|
|
636
|
+
headers: (0, import_provider_utils3.combineHeaders)(this.config.headers(), options.headers),
|
|
756
637
|
body,
|
|
757
638
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
758
|
-
successfulResponseHandler: (0,
|
|
639
|
+
successfulResponseHandler: (0, import_provider_utils3.createJsonResponseHandler)(
|
|
759
640
|
openaiChatResponseSchema
|
|
760
641
|
),
|
|
761
642
|
abortSignal: options.abortSignal,
|
|
@@ -770,7 +651,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
770
651
|
for (const toolCall of (_a = choice.message.tool_calls) != null ? _a : []) {
|
|
771
652
|
content.push({
|
|
772
653
|
type: "tool-call",
|
|
773
|
-
toolCallId: (_b = toolCall.id) != null ? _b : (0,
|
|
654
|
+
toolCallId: (_b = toolCall.id) != null ? _b : (0, import_provider_utils3.generateId)(),
|
|
774
655
|
toolName: toolCall.function.name,
|
|
775
656
|
input: toolCall.function.arguments
|
|
776
657
|
});
|
|
@@ -779,7 +660,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
779
660
|
content.push({
|
|
780
661
|
type: "source",
|
|
781
662
|
sourceType: "url",
|
|
782
|
-
id: (0,
|
|
663
|
+
id: (0, import_provider_utils3.generateId)(),
|
|
783
664
|
url: annotation.url,
|
|
784
665
|
title: annotation.title
|
|
785
666
|
});
|
|
@@ -825,15 +706,15 @@ var OpenAIChatLanguageModel = class {
|
|
|
825
706
|
include_usage: true
|
|
826
707
|
}
|
|
827
708
|
};
|
|
828
|
-
const { responseHeaders, value: response } = await (0,
|
|
709
|
+
const { responseHeaders, value: response } = await (0, import_provider_utils3.postJsonToApi)({
|
|
829
710
|
url: this.config.url({
|
|
830
711
|
path: "/chat/completions",
|
|
831
712
|
modelId: this.modelId
|
|
832
713
|
}),
|
|
833
|
-
headers: (0,
|
|
714
|
+
headers: (0, import_provider_utils3.combineHeaders)(this.config.headers(), options.headers),
|
|
834
715
|
body,
|
|
835
716
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
836
|
-
successfulResponseHandler: (0,
|
|
717
|
+
successfulResponseHandler: (0, import_provider_utils3.createEventSourceResponseHandler)(
|
|
837
718
|
openaiChatChunkSchema
|
|
838
719
|
),
|
|
839
720
|
abortSignal: options.abortSignal,
|
|
@@ -958,14 +839,14 @@ var OpenAIChatLanguageModel = class {
|
|
|
958
839
|
delta: toolCall2.function.arguments
|
|
959
840
|
});
|
|
960
841
|
}
|
|
961
|
-
if ((0,
|
|
842
|
+
if ((0, import_provider_utils3.isParsableJson)(toolCall2.function.arguments)) {
|
|
962
843
|
controller.enqueue({
|
|
963
844
|
type: "tool-input-end",
|
|
964
845
|
id: toolCall2.id
|
|
965
846
|
});
|
|
966
847
|
controller.enqueue({
|
|
967
848
|
type: "tool-call",
|
|
968
|
-
toolCallId: (_q = toolCall2.id) != null ? _q : (0,
|
|
849
|
+
toolCallId: (_q = toolCall2.id) != null ? _q : (0, import_provider_utils3.generateId)(),
|
|
969
850
|
toolName: toolCall2.function.name,
|
|
970
851
|
input: toolCall2.function.arguments
|
|
971
852
|
});
|
|
@@ -986,14 +867,14 @@ var OpenAIChatLanguageModel = class {
|
|
|
986
867
|
id: toolCall.id,
|
|
987
868
|
delta: (_u = toolCallDelta.function.arguments) != null ? _u : ""
|
|
988
869
|
});
|
|
989
|
-
if (((_v = toolCall.function) == null ? void 0 : _v.name) != null && ((_w = toolCall.function) == null ? void 0 : _w.arguments) != null && (0,
|
|
870
|
+
if (((_v = toolCall.function) == null ? void 0 : _v.name) != null && ((_w = toolCall.function) == null ? void 0 : _w.arguments) != null && (0, import_provider_utils3.isParsableJson)(toolCall.function.arguments)) {
|
|
990
871
|
controller.enqueue({
|
|
991
872
|
type: "tool-input-end",
|
|
992
873
|
id: toolCall.id
|
|
993
874
|
});
|
|
994
875
|
controller.enqueue({
|
|
995
876
|
type: "tool-call",
|
|
996
|
-
toolCallId: (_x = toolCall.id) != null ? _x : (0,
|
|
877
|
+
toolCallId: (_x = toolCall.id) != null ? _x : (0, import_provider_utils3.generateId)(),
|
|
997
878
|
toolName: toolCall.function.name,
|
|
998
879
|
input: toolCall.function.arguments
|
|
999
880
|
});
|
|
@@ -1006,7 +887,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
1006
887
|
controller.enqueue({
|
|
1007
888
|
type: "source",
|
|
1008
889
|
sourceType: "url",
|
|
1009
|
-
id: (0,
|
|
890
|
+
id: (0, import_provider_utils3.generateId)(),
|
|
1010
891
|
url: annotation.url,
|
|
1011
892
|
title: annotation.title
|
|
1012
893
|
});
|
|
@@ -1031,115 +912,115 @@ var OpenAIChatLanguageModel = class {
|
|
|
1031
912
|
};
|
|
1032
913
|
}
|
|
1033
914
|
};
|
|
1034
|
-
var openaiTokenUsageSchema =
|
|
1035
|
-
prompt_tokens:
|
|
1036
|
-
completion_tokens:
|
|
1037
|
-
total_tokens:
|
|
1038
|
-
prompt_tokens_details:
|
|
1039
|
-
cached_tokens:
|
|
915
|
+
var openaiTokenUsageSchema = import_v43.z.object({
|
|
916
|
+
prompt_tokens: import_v43.z.number().nullish(),
|
|
917
|
+
completion_tokens: import_v43.z.number().nullish(),
|
|
918
|
+
total_tokens: import_v43.z.number().nullish(),
|
|
919
|
+
prompt_tokens_details: import_v43.z.object({
|
|
920
|
+
cached_tokens: import_v43.z.number().nullish()
|
|
1040
921
|
}).nullish(),
|
|
1041
|
-
completion_tokens_details:
|
|
1042
|
-
reasoning_tokens:
|
|
1043
|
-
accepted_prediction_tokens:
|
|
1044
|
-
rejected_prediction_tokens:
|
|
922
|
+
completion_tokens_details: import_v43.z.object({
|
|
923
|
+
reasoning_tokens: import_v43.z.number().nullish(),
|
|
924
|
+
accepted_prediction_tokens: import_v43.z.number().nullish(),
|
|
925
|
+
rejected_prediction_tokens: import_v43.z.number().nullish()
|
|
1045
926
|
}).nullish()
|
|
1046
927
|
}).nullish();
|
|
1047
|
-
var openaiChatResponseSchema =
|
|
1048
|
-
id:
|
|
1049
|
-
created:
|
|
1050
|
-
model:
|
|
1051
|
-
choices:
|
|
1052
|
-
|
|
1053
|
-
message:
|
|
1054
|
-
role:
|
|
1055
|
-
content:
|
|
1056
|
-
tool_calls:
|
|
1057
|
-
|
|
1058
|
-
id:
|
|
1059
|
-
type:
|
|
1060
|
-
function:
|
|
1061
|
-
name:
|
|
1062
|
-
arguments:
|
|
928
|
+
var openaiChatResponseSchema = import_v43.z.object({
|
|
929
|
+
id: import_v43.z.string().nullish(),
|
|
930
|
+
created: import_v43.z.number().nullish(),
|
|
931
|
+
model: import_v43.z.string().nullish(),
|
|
932
|
+
choices: import_v43.z.array(
|
|
933
|
+
import_v43.z.object({
|
|
934
|
+
message: import_v43.z.object({
|
|
935
|
+
role: import_v43.z.literal("assistant").nullish(),
|
|
936
|
+
content: import_v43.z.string().nullish(),
|
|
937
|
+
tool_calls: import_v43.z.array(
|
|
938
|
+
import_v43.z.object({
|
|
939
|
+
id: import_v43.z.string().nullish(),
|
|
940
|
+
type: import_v43.z.literal("function"),
|
|
941
|
+
function: import_v43.z.object({
|
|
942
|
+
name: import_v43.z.string(),
|
|
943
|
+
arguments: import_v43.z.string()
|
|
1063
944
|
})
|
|
1064
945
|
})
|
|
1065
946
|
).nullish(),
|
|
1066
|
-
annotations:
|
|
1067
|
-
|
|
1068
|
-
type:
|
|
1069
|
-
start_index:
|
|
1070
|
-
end_index:
|
|
1071
|
-
url:
|
|
1072
|
-
title:
|
|
947
|
+
annotations: import_v43.z.array(
|
|
948
|
+
import_v43.z.object({
|
|
949
|
+
type: import_v43.z.literal("url_citation"),
|
|
950
|
+
start_index: import_v43.z.number(),
|
|
951
|
+
end_index: import_v43.z.number(),
|
|
952
|
+
url: import_v43.z.string(),
|
|
953
|
+
title: import_v43.z.string()
|
|
1073
954
|
})
|
|
1074
955
|
).nullish()
|
|
1075
956
|
}),
|
|
1076
|
-
index:
|
|
1077
|
-
logprobs:
|
|
1078
|
-
content:
|
|
1079
|
-
|
|
1080
|
-
token:
|
|
1081
|
-
logprob:
|
|
1082
|
-
top_logprobs:
|
|
1083
|
-
|
|
1084
|
-
token:
|
|
1085
|
-
logprob:
|
|
957
|
+
index: import_v43.z.number(),
|
|
958
|
+
logprobs: import_v43.z.object({
|
|
959
|
+
content: import_v43.z.array(
|
|
960
|
+
import_v43.z.object({
|
|
961
|
+
token: import_v43.z.string(),
|
|
962
|
+
logprob: import_v43.z.number(),
|
|
963
|
+
top_logprobs: import_v43.z.array(
|
|
964
|
+
import_v43.z.object({
|
|
965
|
+
token: import_v43.z.string(),
|
|
966
|
+
logprob: import_v43.z.number()
|
|
1086
967
|
})
|
|
1087
968
|
)
|
|
1088
969
|
})
|
|
1089
970
|
).nullish()
|
|
1090
971
|
}).nullish(),
|
|
1091
|
-
finish_reason:
|
|
972
|
+
finish_reason: import_v43.z.string().nullish()
|
|
1092
973
|
})
|
|
1093
974
|
),
|
|
1094
975
|
usage: openaiTokenUsageSchema
|
|
1095
976
|
});
|
|
1096
|
-
var openaiChatChunkSchema =
|
|
1097
|
-
|
|
1098
|
-
id:
|
|
1099
|
-
created:
|
|
1100
|
-
model:
|
|
1101
|
-
choices:
|
|
1102
|
-
|
|
1103
|
-
delta:
|
|
1104
|
-
role:
|
|
1105
|
-
content:
|
|
1106
|
-
tool_calls:
|
|
1107
|
-
|
|
1108
|
-
index:
|
|
1109
|
-
id:
|
|
1110
|
-
type:
|
|
1111
|
-
function:
|
|
1112
|
-
name:
|
|
1113
|
-
arguments:
|
|
977
|
+
var openaiChatChunkSchema = import_v43.z.union([
|
|
978
|
+
import_v43.z.object({
|
|
979
|
+
id: import_v43.z.string().nullish(),
|
|
980
|
+
created: import_v43.z.number().nullish(),
|
|
981
|
+
model: import_v43.z.string().nullish(),
|
|
982
|
+
choices: import_v43.z.array(
|
|
983
|
+
import_v43.z.object({
|
|
984
|
+
delta: import_v43.z.object({
|
|
985
|
+
role: import_v43.z.enum(["assistant"]).nullish(),
|
|
986
|
+
content: import_v43.z.string().nullish(),
|
|
987
|
+
tool_calls: import_v43.z.array(
|
|
988
|
+
import_v43.z.object({
|
|
989
|
+
index: import_v43.z.number(),
|
|
990
|
+
id: import_v43.z.string().nullish(),
|
|
991
|
+
type: import_v43.z.literal("function").nullish(),
|
|
992
|
+
function: import_v43.z.object({
|
|
993
|
+
name: import_v43.z.string().nullish(),
|
|
994
|
+
arguments: import_v43.z.string().nullish()
|
|
1114
995
|
})
|
|
1115
996
|
})
|
|
1116
997
|
).nullish(),
|
|
1117
|
-
annotations:
|
|
1118
|
-
|
|
1119
|
-
type:
|
|
1120
|
-
start_index:
|
|
1121
|
-
end_index:
|
|
1122
|
-
url:
|
|
1123
|
-
title:
|
|
998
|
+
annotations: import_v43.z.array(
|
|
999
|
+
import_v43.z.object({
|
|
1000
|
+
type: import_v43.z.literal("url_citation"),
|
|
1001
|
+
start_index: import_v43.z.number(),
|
|
1002
|
+
end_index: import_v43.z.number(),
|
|
1003
|
+
url: import_v43.z.string(),
|
|
1004
|
+
title: import_v43.z.string()
|
|
1124
1005
|
})
|
|
1125
1006
|
).nullish()
|
|
1126
1007
|
}).nullish(),
|
|
1127
|
-
logprobs:
|
|
1128
|
-
content:
|
|
1129
|
-
|
|
1130
|
-
token:
|
|
1131
|
-
logprob:
|
|
1132
|
-
top_logprobs:
|
|
1133
|
-
|
|
1134
|
-
token:
|
|
1135
|
-
logprob:
|
|
1008
|
+
logprobs: import_v43.z.object({
|
|
1009
|
+
content: import_v43.z.array(
|
|
1010
|
+
import_v43.z.object({
|
|
1011
|
+
token: import_v43.z.string(),
|
|
1012
|
+
logprob: import_v43.z.number(),
|
|
1013
|
+
top_logprobs: import_v43.z.array(
|
|
1014
|
+
import_v43.z.object({
|
|
1015
|
+
token: import_v43.z.string(),
|
|
1016
|
+
logprob: import_v43.z.number()
|
|
1136
1017
|
})
|
|
1137
1018
|
)
|
|
1138
1019
|
})
|
|
1139
1020
|
).nullish()
|
|
1140
1021
|
}).nullish(),
|
|
1141
|
-
finish_reason:
|
|
1142
|
-
index:
|
|
1022
|
+
finish_reason: import_v43.z.string().nullish(),
|
|
1023
|
+
index: import_v43.z.number()
|
|
1143
1024
|
})
|
|
1144
1025
|
),
|
|
1145
1026
|
usage: openaiTokenUsageSchema
|
|
@@ -1196,8 +1077,8 @@ var reasoningModels = {
|
|
|
1196
1077
|
};
|
|
1197
1078
|
|
|
1198
1079
|
// src/completion/openai-completion-language-model.ts
|
|
1199
|
-
var
|
|
1200
|
-
var
|
|
1080
|
+
var import_provider_utils4 = require("@ai-sdk/provider-utils");
|
|
1081
|
+
var import_v45 = require("zod/v4");
|
|
1201
1082
|
|
|
1202
1083
|
// src/completion/convert-to-openai-completion-prompt.ts
|
|
1203
1084
|
var import_provider4 = require("@ai-sdk/provider");
|
|
@@ -1305,12 +1186,12 @@ function mapOpenAIFinishReason2(finishReason) {
|
|
|
1305
1186
|
}
|
|
1306
1187
|
|
|
1307
1188
|
// src/completion/openai-completion-options.ts
|
|
1308
|
-
var
|
|
1309
|
-
var openaiCompletionProviderOptions =
|
|
1189
|
+
var import_v44 = require("zod/v4");
|
|
1190
|
+
var openaiCompletionProviderOptions = import_v44.z.object({
|
|
1310
1191
|
/**
|
|
1311
1192
|
Echo back the prompt in addition to the completion.
|
|
1312
1193
|
*/
|
|
1313
|
-
echo:
|
|
1194
|
+
echo: import_v44.z.boolean().optional(),
|
|
1314
1195
|
/**
|
|
1315
1196
|
Modify the likelihood of specified tokens appearing in the completion.
|
|
1316
1197
|
|
|
@@ -1325,16 +1206,16 @@ var openaiCompletionProviderOptions = import_v46.z.object({
|
|
|
1325
1206
|
As an example, you can pass {"50256": -100} to prevent the <|endoftext|>
|
|
1326
1207
|
token from being generated.
|
|
1327
1208
|
*/
|
|
1328
|
-
logitBias:
|
|
1209
|
+
logitBias: import_v44.z.record(import_v44.z.string(), import_v44.z.number()).optional(),
|
|
1329
1210
|
/**
|
|
1330
1211
|
The suffix that comes after a completion of inserted text.
|
|
1331
1212
|
*/
|
|
1332
|
-
suffix:
|
|
1213
|
+
suffix: import_v44.z.string().optional(),
|
|
1333
1214
|
/**
|
|
1334
1215
|
A unique identifier representing your end-user, which can help OpenAI to
|
|
1335
1216
|
monitor and detect abuse. Learn more.
|
|
1336
1217
|
*/
|
|
1337
|
-
user:
|
|
1218
|
+
user: import_v44.z.string().optional(),
|
|
1338
1219
|
/**
|
|
1339
1220
|
Return the log probabilities of the tokens. Including logprobs will increase
|
|
1340
1221
|
the response size and can slow down response times. However, it can
|
|
@@ -1344,7 +1225,7 @@ var openaiCompletionProviderOptions = import_v46.z.object({
|
|
|
1344
1225
|
Setting to a number will return the log probabilities of the top n
|
|
1345
1226
|
tokens that were generated.
|
|
1346
1227
|
*/
|
|
1347
|
-
logprobs:
|
|
1228
|
+
logprobs: import_v44.z.union([import_v44.z.boolean(), import_v44.z.number()]).optional()
|
|
1348
1229
|
});
|
|
1349
1230
|
|
|
1350
1231
|
// src/completion/openai-completion-language-model.ts
|
|
@@ -1380,12 +1261,12 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1380
1261
|
}) {
|
|
1381
1262
|
const warnings = [];
|
|
1382
1263
|
const openaiOptions = {
|
|
1383
|
-
...await (0,
|
|
1264
|
+
...await (0, import_provider_utils4.parseProviderOptions)({
|
|
1384
1265
|
provider: "openai",
|
|
1385
1266
|
providerOptions,
|
|
1386
1267
|
schema: openaiCompletionProviderOptions
|
|
1387
1268
|
}),
|
|
1388
|
-
...await (0,
|
|
1269
|
+
...await (0, import_provider_utils4.parseProviderOptions)({
|
|
1389
1270
|
provider: this.providerOptionsName,
|
|
1390
1271
|
providerOptions,
|
|
1391
1272
|
schema: openaiCompletionProviderOptions
|
|
@@ -1441,15 +1322,15 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1441
1322
|
responseHeaders,
|
|
1442
1323
|
value: response,
|
|
1443
1324
|
rawValue: rawResponse
|
|
1444
|
-
} = await (0,
|
|
1325
|
+
} = await (0, import_provider_utils4.postJsonToApi)({
|
|
1445
1326
|
url: this.config.url({
|
|
1446
1327
|
path: "/completions",
|
|
1447
1328
|
modelId: this.modelId
|
|
1448
1329
|
}),
|
|
1449
|
-
headers: (0,
|
|
1330
|
+
headers: (0, import_provider_utils4.combineHeaders)(this.config.headers(), options.headers),
|
|
1450
1331
|
body: args,
|
|
1451
1332
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
1452
|
-
successfulResponseHandler: (0,
|
|
1333
|
+
successfulResponseHandler: (0, import_provider_utils4.createJsonResponseHandler)(
|
|
1453
1334
|
openaiCompletionResponseSchema
|
|
1454
1335
|
),
|
|
1455
1336
|
abortSignal: options.abortSignal,
|
|
@@ -1487,15 +1368,15 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1487
1368
|
include_usage: true
|
|
1488
1369
|
}
|
|
1489
1370
|
};
|
|
1490
|
-
const { responseHeaders, value: response } = await (0,
|
|
1371
|
+
const { responseHeaders, value: response } = await (0, import_provider_utils4.postJsonToApi)({
|
|
1491
1372
|
url: this.config.url({
|
|
1492
1373
|
path: "/completions",
|
|
1493
1374
|
modelId: this.modelId
|
|
1494
1375
|
}),
|
|
1495
|
-
headers: (0,
|
|
1376
|
+
headers: (0, import_provider_utils4.combineHeaders)(this.config.headers(), options.headers),
|
|
1496
1377
|
body,
|
|
1497
1378
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
1498
|
-
successfulResponseHandler: (0,
|
|
1379
|
+
successfulResponseHandler: (0, import_provider_utils4.createEventSourceResponseHandler)(
|
|
1499
1380
|
openaiCompletionChunkSchema
|
|
1500
1381
|
),
|
|
1501
1382
|
abortSignal: options.abortSignal,
|
|
@@ -1576,42 +1457,42 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1576
1457
|
};
|
|
1577
1458
|
}
|
|
1578
1459
|
};
|
|
1579
|
-
var usageSchema =
|
|
1580
|
-
prompt_tokens:
|
|
1581
|
-
completion_tokens:
|
|
1582
|
-
total_tokens:
|
|
1460
|
+
var usageSchema = import_v45.z.object({
|
|
1461
|
+
prompt_tokens: import_v45.z.number(),
|
|
1462
|
+
completion_tokens: import_v45.z.number(),
|
|
1463
|
+
total_tokens: import_v45.z.number()
|
|
1583
1464
|
});
|
|
1584
|
-
var openaiCompletionResponseSchema =
|
|
1585
|
-
id:
|
|
1586
|
-
created:
|
|
1587
|
-
model:
|
|
1588
|
-
choices:
|
|
1589
|
-
|
|
1590
|
-
text:
|
|
1591
|
-
finish_reason:
|
|
1592
|
-
logprobs:
|
|
1593
|
-
tokens:
|
|
1594
|
-
token_logprobs:
|
|
1595
|
-
top_logprobs:
|
|
1465
|
+
var openaiCompletionResponseSchema = import_v45.z.object({
|
|
1466
|
+
id: import_v45.z.string().nullish(),
|
|
1467
|
+
created: import_v45.z.number().nullish(),
|
|
1468
|
+
model: import_v45.z.string().nullish(),
|
|
1469
|
+
choices: import_v45.z.array(
|
|
1470
|
+
import_v45.z.object({
|
|
1471
|
+
text: import_v45.z.string(),
|
|
1472
|
+
finish_reason: import_v45.z.string(),
|
|
1473
|
+
logprobs: import_v45.z.object({
|
|
1474
|
+
tokens: import_v45.z.array(import_v45.z.string()),
|
|
1475
|
+
token_logprobs: import_v45.z.array(import_v45.z.number()),
|
|
1476
|
+
top_logprobs: import_v45.z.array(import_v45.z.record(import_v45.z.string(), import_v45.z.number())).nullish()
|
|
1596
1477
|
}).nullish()
|
|
1597
1478
|
})
|
|
1598
1479
|
),
|
|
1599
1480
|
usage: usageSchema.nullish()
|
|
1600
1481
|
});
|
|
1601
|
-
var openaiCompletionChunkSchema =
|
|
1602
|
-
|
|
1603
|
-
id:
|
|
1604
|
-
created:
|
|
1605
|
-
model:
|
|
1606
|
-
choices:
|
|
1607
|
-
|
|
1608
|
-
text:
|
|
1609
|
-
finish_reason:
|
|
1610
|
-
index:
|
|
1611
|
-
logprobs:
|
|
1612
|
-
tokens:
|
|
1613
|
-
token_logprobs:
|
|
1614
|
-
top_logprobs:
|
|
1482
|
+
var openaiCompletionChunkSchema = import_v45.z.union([
|
|
1483
|
+
import_v45.z.object({
|
|
1484
|
+
id: import_v45.z.string().nullish(),
|
|
1485
|
+
created: import_v45.z.number().nullish(),
|
|
1486
|
+
model: import_v45.z.string().nullish(),
|
|
1487
|
+
choices: import_v45.z.array(
|
|
1488
|
+
import_v45.z.object({
|
|
1489
|
+
text: import_v45.z.string(),
|
|
1490
|
+
finish_reason: import_v45.z.string().nullish(),
|
|
1491
|
+
index: import_v45.z.number(),
|
|
1492
|
+
logprobs: import_v45.z.object({
|
|
1493
|
+
tokens: import_v45.z.array(import_v45.z.string()),
|
|
1494
|
+
token_logprobs: import_v45.z.array(import_v45.z.number()),
|
|
1495
|
+
top_logprobs: import_v45.z.array(import_v45.z.record(import_v45.z.string(), import_v45.z.number())).nullish()
|
|
1615
1496
|
}).nullish()
|
|
1616
1497
|
})
|
|
1617
1498
|
),
|
|
@@ -1622,22 +1503,22 @@ var openaiCompletionChunkSchema = import_v47.z.union([
|
|
|
1622
1503
|
|
|
1623
1504
|
// src/embedding/openai-embedding-model.ts
|
|
1624
1505
|
var import_provider5 = require("@ai-sdk/provider");
|
|
1625
|
-
var
|
|
1626
|
-
var
|
|
1506
|
+
var import_provider_utils5 = require("@ai-sdk/provider-utils");
|
|
1507
|
+
var import_v47 = require("zod/v4");
|
|
1627
1508
|
|
|
1628
1509
|
// src/embedding/openai-embedding-options.ts
|
|
1629
|
-
var
|
|
1630
|
-
var openaiEmbeddingProviderOptions =
|
|
1510
|
+
var import_v46 = require("zod/v4");
|
|
1511
|
+
var openaiEmbeddingProviderOptions = import_v46.z.object({
|
|
1631
1512
|
/**
|
|
1632
1513
|
The number of dimensions the resulting output embeddings should have.
|
|
1633
1514
|
Only supported in text-embedding-3 and later models.
|
|
1634
1515
|
*/
|
|
1635
|
-
dimensions:
|
|
1516
|
+
dimensions: import_v46.z.number().optional(),
|
|
1636
1517
|
/**
|
|
1637
1518
|
A unique identifier representing your end-user, which can help OpenAI to
|
|
1638
1519
|
monitor and detect abuse. Learn more.
|
|
1639
1520
|
*/
|
|
1640
|
-
user:
|
|
1521
|
+
user: import_v46.z.string().optional()
|
|
1641
1522
|
});
|
|
1642
1523
|
|
|
1643
1524
|
// src/embedding/openai-embedding-model.ts
|
|
@@ -1667,7 +1548,7 @@ var OpenAIEmbeddingModel = class {
|
|
|
1667
1548
|
values
|
|
1668
1549
|
});
|
|
1669
1550
|
}
|
|
1670
|
-
const openaiOptions = (_a = await (0,
|
|
1551
|
+
const openaiOptions = (_a = await (0, import_provider_utils5.parseProviderOptions)({
|
|
1671
1552
|
provider: "openai",
|
|
1672
1553
|
providerOptions,
|
|
1673
1554
|
schema: openaiEmbeddingProviderOptions
|
|
@@ -1676,12 +1557,12 @@ var OpenAIEmbeddingModel = class {
|
|
|
1676
1557
|
responseHeaders,
|
|
1677
1558
|
value: response,
|
|
1678
1559
|
rawValue
|
|
1679
|
-
} = await (0,
|
|
1560
|
+
} = await (0, import_provider_utils5.postJsonToApi)({
|
|
1680
1561
|
url: this.config.url({
|
|
1681
1562
|
path: "/embeddings",
|
|
1682
1563
|
modelId: this.modelId
|
|
1683
1564
|
}),
|
|
1684
|
-
headers: (0,
|
|
1565
|
+
headers: (0, import_provider_utils5.combineHeaders)(this.config.headers(), headers),
|
|
1685
1566
|
body: {
|
|
1686
1567
|
model: this.modelId,
|
|
1687
1568
|
input: values,
|
|
@@ -1690,7 +1571,7 @@ var OpenAIEmbeddingModel = class {
|
|
|
1690
1571
|
user: openaiOptions.user
|
|
1691
1572
|
},
|
|
1692
1573
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
1693
|
-
successfulResponseHandler: (0,
|
|
1574
|
+
successfulResponseHandler: (0, import_provider_utils5.createJsonResponseHandler)(
|
|
1694
1575
|
openaiTextEmbeddingResponseSchema
|
|
1695
1576
|
),
|
|
1696
1577
|
abortSignal,
|
|
@@ -1703,14 +1584,14 @@ var OpenAIEmbeddingModel = class {
|
|
|
1703
1584
|
};
|
|
1704
1585
|
}
|
|
1705
1586
|
};
|
|
1706
|
-
var openaiTextEmbeddingResponseSchema =
|
|
1707
|
-
data:
|
|
1708
|
-
usage:
|
|
1587
|
+
var openaiTextEmbeddingResponseSchema = import_v47.z.object({
|
|
1588
|
+
data: import_v47.z.array(import_v47.z.object({ embedding: import_v47.z.array(import_v47.z.number()) })),
|
|
1589
|
+
usage: import_v47.z.object({ prompt_tokens: import_v47.z.number() }).nullish()
|
|
1709
1590
|
});
|
|
1710
1591
|
|
|
1711
1592
|
// src/image/openai-image-model.ts
|
|
1712
|
-
var
|
|
1713
|
-
var
|
|
1593
|
+
var import_provider_utils6 = require("@ai-sdk/provider-utils");
|
|
1594
|
+
var import_v48 = require("zod/v4");
|
|
1714
1595
|
|
|
1715
1596
|
// src/image/openai-image-options.ts
|
|
1716
1597
|
var modelMaxImagesPerCall = {
|
|
@@ -1757,12 +1638,12 @@ var OpenAIImageModel = class {
|
|
|
1757
1638
|
warnings.push({ type: "unsupported-setting", setting: "seed" });
|
|
1758
1639
|
}
|
|
1759
1640
|
const currentDate = (_c = (_b = (_a = this.config._internal) == null ? void 0 : _a.currentDate) == null ? void 0 : _b.call(_a)) != null ? _c : /* @__PURE__ */ new Date();
|
|
1760
|
-
const { value: response, responseHeaders } = await (0,
|
|
1641
|
+
const { value: response, responseHeaders } = await (0, import_provider_utils6.postJsonToApi)({
|
|
1761
1642
|
url: this.config.url({
|
|
1762
1643
|
path: "/images/generations",
|
|
1763
1644
|
modelId: this.modelId
|
|
1764
1645
|
}),
|
|
1765
|
-
headers: (0,
|
|
1646
|
+
headers: (0, import_provider_utils6.combineHeaders)(this.config.headers(), headers),
|
|
1766
1647
|
body: {
|
|
1767
1648
|
model: this.modelId,
|
|
1768
1649
|
prompt,
|
|
@@ -1772,7 +1653,7 @@ var OpenAIImageModel = class {
|
|
|
1772
1653
|
...!hasDefaultResponseFormat.has(this.modelId) ? { response_format: "b64_json" } : {}
|
|
1773
1654
|
},
|
|
1774
1655
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
1775
|
-
successfulResponseHandler: (0,
|
|
1656
|
+
successfulResponseHandler: (0, import_provider_utils6.createJsonResponseHandler)(
|
|
1776
1657
|
openaiImageResponseSchema
|
|
1777
1658
|
),
|
|
1778
1659
|
abortSignal,
|
|
@@ -1798,41 +1679,41 @@ var OpenAIImageModel = class {
|
|
|
1798
1679
|
};
|
|
1799
1680
|
}
|
|
1800
1681
|
};
|
|
1801
|
-
var openaiImageResponseSchema =
|
|
1802
|
-
data:
|
|
1803
|
-
|
|
1682
|
+
var openaiImageResponseSchema = import_v48.z.object({
|
|
1683
|
+
data: import_v48.z.array(
|
|
1684
|
+
import_v48.z.object({ b64_json: import_v48.z.string(), revised_prompt: import_v48.z.string().optional() })
|
|
1804
1685
|
)
|
|
1805
1686
|
});
|
|
1806
1687
|
|
|
1807
1688
|
// src/transcription/openai-transcription-model.ts
|
|
1808
|
-
var
|
|
1809
|
-
var
|
|
1689
|
+
var import_provider_utils7 = require("@ai-sdk/provider-utils");
|
|
1690
|
+
var import_v410 = require("zod/v4");
|
|
1810
1691
|
|
|
1811
1692
|
// src/transcription/openai-transcription-options.ts
|
|
1812
|
-
var
|
|
1813
|
-
var openAITranscriptionProviderOptions =
|
|
1693
|
+
var import_v49 = require("zod/v4");
|
|
1694
|
+
var openAITranscriptionProviderOptions = import_v49.z.object({
|
|
1814
1695
|
/**
|
|
1815
1696
|
* Additional information to include in the transcription response.
|
|
1816
1697
|
*/
|
|
1817
|
-
include:
|
|
1698
|
+
include: import_v49.z.array(import_v49.z.string()).optional(),
|
|
1818
1699
|
/**
|
|
1819
1700
|
* The language of the input audio in ISO-639-1 format.
|
|
1820
1701
|
*/
|
|
1821
|
-
language:
|
|
1702
|
+
language: import_v49.z.string().optional(),
|
|
1822
1703
|
/**
|
|
1823
1704
|
* An optional text to guide the model's style or continue a previous audio segment.
|
|
1824
1705
|
*/
|
|
1825
|
-
prompt:
|
|
1706
|
+
prompt: import_v49.z.string().optional(),
|
|
1826
1707
|
/**
|
|
1827
1708
|
* The sampling temperature, between 0 and 1.
|
|
1828
1709
|
* @default 0
|
|
1829
1710
|
*/
|
|
1830
|
-
temperature:
|
|
1711
|
+
temperature: import_v49.z.number().min(0).max(1).default(0).optional(),
|
|
1831
1712
|
/**
|
|
1832
1713
|
* The timestamp granularities to populate for this transcription.
|
|
1833
1714
|
* @default ['segment']
|
|
1834
1715
|
*/
|
|
1835
|
-
timestampGranularities:
|
|
1716
|
+
timestampGranularities: import_v49.z.array(import_v49.z.enum(["word", "segment"])).default(["segment"]).optional()
|
|
1836
1717
|
});
|
|
1837
1718
|
|
|
1838
1719
|
// src/transcription/openai-transcription-model.ts
|
|
@@ -1910,15 +1791,15 @@ var OpenAITranscriptionModel = class {
|
|
|
1910
1791
|
providerOptions
|
|
1911
1792
|
}) {
|
|
1912
1793
|
const warnings = [];
|
|
1913
|
-
const openAIOptions = await (0,
|
|
1794
|
+
const openAIOptions = await (0, import_provider_utils7.parseProviderOptions)({
|
|
1914
1795
|
provider: "openai",
|
|
1915
1796
|
providerOptions,
|
|
1916
1797
|
schema: openAITranscriptionProviderOptions
|
|
1917
1798
|
});
|
|
1918
1799
|
const formData = new FormData();
|
|
1919
|
-
const blob = audio instanceof Uint8Array ? new Blob([audio]) : new Blob([(0,
|
|
1800
|
+
const blob = audio instanceof Uint8Array ? new Blob([audio]) : new Blob([(0, import_provider_utils7.convertBase64ToUint8Array)(audio)]);
|
|
1920
1801
|
formData.append("model", this.modelId);
|
|
1921
|
-
const fileExtension = (0,
|
|
1802
|
+
const fileExtension = (0, import_provider_utils7.mediaTypeToExtension)(mediaType);
|
|
1922
1803
|
formData.append(
|
|
1923
1804
|
"file",
|
|
1924
1805
|
new File([blob], "audio", { type: mediaType }),
|
|
@@ -1963,15 +1844,15 @@ var OpenAITranscriptionModel = class {
|
|
|
1963
1844
|
value: response,
|
|
1964
1845
|
responseHeaders,
|
|
1965
1846
|
rawValue: rawResponse
|
|
1966
|
-
} = await (0,
|
|
1847
|
+
} = await (0, import_provider_utils7.postFormDataToApi)({
|
|
1967
1848
|
url: this.config.url({
|
|
1968
1849
|
path: "/audio/transcriptions",
|
|
1969
1850
|
modelId: this.modelId
|
|
1970
1851
|
}),
|
|
1971
|
-
headers: (0,
|
|
1852
|
+
headers: (0, import_provider_utils7.combineHeaders)(this.config.headers(), options.headers),
|
|
1972
1853
|
formData,
|
|
1973
1854
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
1974
|
-
successfulResponseHandler: (0,
|
|
1855
|
+
successfulResponseHandler: (0, import_provider_utils7.createJsonResponseHandler)(
|
|
1975
1856
|
openaiTranscriptionResponseSchema
|
|
1976
1857
|
),
|
|
1977
1858
|
abortSignal: options.abortSignal,
|
|
@@ -2001,39 +1882,39 @@ var OpenAITranscriptionModel = class {
|
|
|
2001
1882
|
};
|
|
2002
1883
|
}
|
|
2003
1884
|
};
|
|
2004
|
-
var openaiTranscriptionResponseSchema =
|
|
2005
|
-
text:
|
|
2006
|
-
language:
|
|
2007
|
-
duration:
|
|
2008
|
-
words:
|
|
2009
|
-
|
|
2010
|
-
word:
|
|
2011
|
-
start:
|
|
2012
|
-
end:
|
|
1885
|
+
var openaiTranscriptionResponseSchema = import_v410.z.object({
|
|
1886
|
+
text: import_v410.z.string(),
|
|
1887
|
+
language: import_v410.z.string().nullish(),
|
|
1888
|
+
duration: import_v410.z.number().nullish(),
|
|
1889
|
+
words: import_v410.z.array(
|
|
1890
|
+
import_v410.z.object({
|
|
1891
|
+
word: import_v410.z.string(),
|
|
1892
|
+
start: import_v410.z.number(),
|
|
1893
|
+
end: import_v410.z.number()
|
|
2013
1894
|
})
|
|
2014
1895
|
).nullish(),
|
|
2015
|
-
segments:
|
|
2016
|
-
|
|
2017
|
-
id:
|
|
2018
|
-
seek:
|
|
2019
|
-
start:
|
|
2020
|
-
end:
|
|
2021
|
-
text:
|
|
2022
|
-
tokens:
|
|
2023
|
-
temperature:
|
|
2024
|
-
avg_logprob:
|
|
2025
|
-
compression_ratio:
|
|
2026
|
-
no_speech_prob:
|
|
1896
|
+
segments: import_v410.z.array(
|
|
1897
|
+
import_v410.z.object({
|
|
1898
|
+
id: import_v410.z.number(),
|
|
1899
|
+
seek: import_v410.z.number(),
|
|
1900
|
+
start: import_v410.z.number(),
|
|
1901
|
+
end: import_v410.z.number(),
|
|
1902
|
+
text: import_v410.z.string(),
|
|
1903
|
+
tokens: import_v410.z.array(import_v410.z.number()),
|
|
1904
|
+
temperature: import_v410.z.number(),
|
|
1905
|
+
avg_logprob: import_v410.z.number(),
|
|
1906
|
+
compression_ratio: import_v410.z.number(),
|
|
1907
|
+
no_speech_prob: import_v410.z.number()
|
|
2027
1908
|
})
|
|
2028
1909
|
).nullish()
|
|
2029
1910
|
});
|
|
2030
1911
|
|
|
2031
1912
|
// src/speech/openai-speech-model.ts
|
|
2032
|
-
var
|
|
2033
|
-
var
|
|
2034
|
-
var OpenAIProviderOptionsSchema =
|
|
2035
|
-
instructions:
|
|
2036
|
-
speed:
|
|
1913
|
+
var import_provider_utils8 = require("@ai-sdk/provider-utils");
|
|
1914
|
+
var import_v411 = require("zod/v4");
|
|
1915
|
+
var OpenAIProviderOptionsSchema = import_v411.z.object({
|
|
1916
|
+
instructions: import_v411.z.string().nullish(),
|
|
1917
|
+
speed: import_v411.z.number().min(0.25).max(4).default(1).nullish()
|
|
2037
1918
|
});
|
|
2038
1919
|
var OpenAISpeechModel = class {
|
|
2039
1920
|
constructor(modelId, config) {
|
|
@@ -2054,7 +1935,7 @@ var OpenAISpeechModel = class {
|
|
|
2054
1935
|
providerOptions
|
|
2055
1936
|
}) {
|
|
2056
1937
|
const warnings = [];
|
|
2057
|
-
const openAIOptions = await (0,
|
|
1938
|
+
const openAIOptions = await (0, import_provider_utils8.parseProviderOptions)({
|
|
2058
1939
|
provider: "openai",
|
|
2059
1940
|
providerOptions,
|
|
2060
1941
|
schema: OpenAIProviderOptionsSchema
|
|
@@ -2107,15 +1988,15 @@ var OpenAISpeechModel = class {
|
|
|
2107
1988
|
value: audio,
|
|
2108
1989
|
responseHeaders,
|
|
2109
1990
|
rawValue: rawResponse
|
|
2110
|
-
} = await (0,
|
|
1991
|
+
} = await (0, import_provider_utils8.postJsonToApi)({
|
|
2111
1992
|
url: this.config.url({
|
|
2112
1993
|
path: "/audio/speech",
|
|
2113
1994
|
modelId: this.modelId
|
|
2114
1995
|
}),
|
|
2115
|
-
headers: (0,
|
|
1996
|
+
headers: (0, import_provider_utils8.combineHeaders)(this.config.headers(), options.headers),
|
|
2116
1997
|
body: requestBody,
|
|
2117
1998
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
2118
|
-
successfulResponseHandler: (0,
|
|
1999
|
+
successfulResponseHandler: (0, import_provider_utils8.createBinaryResponseHandler)(),
|
|
2119
2000
|
abortSignal: options.abortSignal,
|
|
2120
2001
|
fetch: this.config.fetch
|
|
2121
2002
|
});
|
|
@@ -2142,8 +2023,8 @@ var import_v418 = require("zod/v4");
|
|
|
2142
2023
|
|
|
2143
2024
|
// src/responses/convert-to-openai-responses-input.ts
|
|
2144
2025
|
var import_provider6 = require("@ai-sdk/provider");
|
|
2145
|
-
var
|
|
2146
|
-
var
|
|
2026
|
+
var import_provider_utils9 = require("@ai-sdk/provider-utils");
|
|
2027
|
+
var import_v412 = require("zod/v4");
|
|
2147
2028
|
function isFileId(data, prefixes) {
|
|
2148
2029
|
if (!prefixes) return false;
|
|
2149
2030
|
return prefixes.some((prefix) => data.startsWith(prefix));
|
|
@@ -2200,7 +2081,7 @@ async function convertToOpenAIResponsesInput({
|
|
|
2200
2081
|
return {
|
|
2201
2082
|
type: "input_image",
|
|
2202
2083
|
...part.data instanceof URL ? { image_url: part.data.toString() } : typeof part.data === "string" && isFileId(part.data, fileIdPrefixes) ? { file_id: part.data } : {
|
|
2203
|
-
image_url: `data:${mediaType};base64,${(0,
|
|
2084
|
+
image_url: `data:${mediaType};base64,${(0, import_provider_utils9.convertToBase64)(part.data)}`
|
|
2204
2085
|
},
|
|
2205
2086
|
detail: (_b2 = (_a2 = part.providerOptions) == null ? void 0 : _a2.openai) == null ? void 0 : _b2.imageDetail
|
|
2206
2087
|
};
|
|
@@ -2215,7 +2096,7 @@ async function convertToOpenAIResponsesInput({
|
|
|
2215
2096
|
type: "input_file",
|
|
2216
2097
|
...typeof part.data === "string" && isFileId(part.data, fileIdPrefixes) ? { file_id: part.data } : {
|
|
2217
2098
|
filename: (_c2 = part.filename) != null ? _c2 : `part-${index}.pdf`,
|
|
2218
|
-
file_data: `data:application/pdf;base64,${(0,
|
|
2099
|
+
file_data: `data:application/pdf;base64,${(0, import_provider_utils9.convertToBase64)(part.data)}`
|
|
2219
2100
|
}
|
|
2220
2101
|
};
|
|
2221
2102
|
} else {
|
|
@@ -2268,7 +2149,7 @@ async function convertToOpenAIResponsesInput({
|
|
|
2268
2149
|
break;
|
|
2269
2150
|
}
|
|
2270
2151
|
case "reasoning": {
|
|
2271
|
-
const providerOptions = await (0,
|
|
2152
|
+
const providerOptions = await (0, import_provider_utils9.parseProviderOptions)({
|
|
2272
2153
|
provider: "openai",
|
|
2273
2154
|
providerOptions: part.providerOptions,
|
|
2274
2155
|
schema: openaiResponsesReasoningProviderOptionsSchema
|
|
@@ -2339,9 +2220,9 @@ async function convertToOpenAIResponsesInput({
|
|
|
2339
2220
|
}
|
|
2340
2221
|
return { input, warnings };
|
|
2341
2222
|
}
|
|
2342
|
-
var openaiResponsesReasoningProviderOptionsSchema =
|
|
2343
|
-
itemId:
|
|
2344
|
-
reasoningEncryptedContent:
|
|
2223
|
+
var openaiResponsesReasoningProviderOptionsSchema = import_v412.z.object({
|
|
2224
|
+
itemId: import_v412.z.string().nullish(),
|
|
2225
|
+
reasoningEncryptedContent: import_v412.z.string().nullish()
|
|
2345
2226
|
});
|
|
2346
2227
|
|
|
2347
2228
|
// src/responses/map-openai-responses-finish-reason.ts
|
|
@@ -2366,54 +2247,155 @@ function mapOpenAIResponseFinishReason({
|
|
|
2366
2247
|
var import_provider7 = require("@ai-sdk/provider");
|
|
2367
2248
|
|
|
2368
2249
|
// src/tool/code-interpreter.ts
|
|
2369
|
-
var
|
|
2370
|
-
var
|
|
2371
|
-
var codeInterpreterInputSchema =
|
|
2372
|
-
code:
|
|
2373
|
-
containerId:
|
|
2250
|
+
var import_provider_utils10 = require("@ai-sdk/provider-utils");
|
|
2251
|
+
var import_v413 = require("zod/v4");
|
|
2252
|
+
var codeInterpreterInputSchema = import_v413.z.object({
|
|
2253
|
+
code: import_v413.z.string().nullish(),
|
|
2254
|
+
containerId: import_v413.z.string()
|
|
2374
2255
|
});
|
|
2375
|
-
var codeInterpreterOutputSchema =
|
|
2376
|
-
outputs:
|
|
2377
|
-
|
|
2378
|
-
|
|
2379
|
-
|
|
2256
|
+
var codeInterpreterOutputSchema = import_v413.z.object({
|
|
2257
|
+
outputs: import_v413.z.array(
|
|
2258
|
+
import_v413.z.discriminatedUnion("type", [
|
|
2259
|
+
import_v413.z.object({ type: import_v413.z.literal("logs"), logs: import_v413.z.string() }),
|
|
2260
|
+
import_v413.z.object({ type: import_v413.z.literal("image"), url: import_v413.z.string() })
|
|
2380
2261
|
])
|
|
2381
2262
|
).nullish()
|
|
2382
2263
|
});
|
|
2383
|
-
var codeInterpreterArgsSchema =
|
|
2384
|
-
container:
|
|
2385
|
-
|
|
2386
|
-
|
|
2387
|
-
fileIds:
|
|
2264
|
+
var codeInterpreterArgsSchema = import_v413.z.object({
|
|
2265
|
+
container: import_v413.z.union([
|
|
2266
|
+
import_v413.z.string(),
|
|
2267
|
+
import_v413.z.object({
|
|
2268
|
+
fileIds: import_v413.z.array(import_v413.z.string()).optional()
|
|
2388
2269
|
})
|
|
2389
2270
|
]).optional()
|
|
2390
2271
|
});
|
|
2391
|
-
var codeInterpreterToolFactory = (0,
|
|
2272
|
+
var codeInterpreterToolFactory = (0, import_provider_utils10.createProviderDefinedToolFactoryWithOutputSchema)({
|
|
2392
2273
|
id: "openai.code_interpreter",
|
|
2393
2274
|
name: "code_interpreter",
|
|
2394
2275
|
inputSchema: codeInterpreterInputSchema,
|
|
2395
2276
|
outputSchema: codeInterpreterOutputSchema
|
|
2396
2277
|
});
|
|
2397
2278
|
|
|
2279
|
+
// src/tool/file-search.ts
|
|
2280
|
+
var import_provider_utils11 = require("@ai-sdk/provider-utils");
|
|
2281
|
+
var import_v414 = require("zod/v4");
|
|
2282
|
+
var comparisonFilterSchema = import_v414.z.object({
|
|
2283
|
+
key: import_v414.z.string(),
|
|
2284
|
+
type: import_v414.z.enum(["eq", "ne", "gt", "gte", "lt", "lte"]),
|
|
2285
|
+
value: import_v414.z.union([import_v414.z.string(), import_v414.z.number(), import_v414.z.boolean()])
|
|
2286
|
+
});
|
|
2287
|
+
var compoundFilterSchema = import_v414.z.object({
|
|
2288
|
+
type: import_v414.z.enum(["and", "or"]),
|
|
2289
|
+
filters: import_v414.z.array(
|
|
2290
|
+
import_v414.z.union([comparisonFilterSchema, import_v414.z.lazy(() => compoundFilterSchema)])
|
|
2291
|
+
)
|
|
2292
|
+
});
|
|
2293
|
+
var fileSearchArgsSchema = import_v414.z.object({
|
|
2294
|
+
vectorStoreIds: import_v414.z.array(import_v414.z.string()),
|
|
2295
|
+
maxNumResults: import_v414.z.number().optional(),
|
|
2296
|
+
ranking: import_v414.z.object({
|
|
2297
|
+
ranker: import_v414.z.string().optional(),
|
|
2298
|
+
scoreThreshold: import_v414.z.number().optional()
|
|
2299
|
+
}).optional(),
|
|
2300
|
+
filters: import_v414.z.union([comparisonFilterSchema, compoundFilterSchema]).optional()
|
|
2301
|
+
});
|
|
2302
|
+
var fileSearchOutputSchema = import_v414.z.object({
|
|
2303
|
+
queries: import_v414.z.array(import_v414.z.string()),
|
|
2304
|
+
results: import_v414.z.array(
|
|
2305
|
+
import_v414.z.object({
|
|
2306
|
+
attributes: import_v414.z.record(import_v414.z.string(), import_v414.z.unknown()),
|
|
2307
|
+
fileId: import_v414.z.string(),
|
|
2308
|
+
filename: import_v414.z.string(),
|
|
2309
|
+
score: import_v414.z.number(),
|
|
2310
|
+
text: import_v414.z.string()
|
|
2311
|
+
})
|
|
2312
|
+
).nullable()
|
|
2313
|
+
});
|
|
2314
|
+
var fileSearch = (0, import_provider_utils11.createProviderDefinedToolFactoryWithOutputSchema)({
|
|
2315
|
+
id: "openai.file_search",
|
|
2316
|
+
name: "file_search",
|
|
2317
|
+
inputSchema: import_v414.z.object({}),
|
|
2318
|
+
outputSchema: fileSearchOutputSchema
|
|
2319
|
+
});
|
|
2320
|
+
|
|
2398
2321
|
// src/tool/web-search.ts
|
|
2322
|
+
var import_provider_utils12 = require("@ai-sdk/provider-utils");
|
|
2323
|
+
var import_v415 = require("zod/v4");
|
|
2324
|
+
var webSearchArgsSchema = import_v415.z.object({
|
|
2325
|
+
filters: import_v415.z.object({
|
|
2326
|
+
allowedDomains: import_v415.z.array(import_v415.z.string()).optional()
|
|
2327
|
+
}).optional(),
|
|
2328
|
+
searchContextSize: import_v415.z.enum(["low", "medium", "high"]).optional(),
|
|
2329
|
+
userLocation: import_v415.z.object({
|
|
2330
|
+
type: import_v415.z.literal("approximate"),
|
|
2331
|
+
country: import_v415.z.string().optional(),
|
|
2332
|
+
city: import_v415.z.string().optional(),
|
|
2333
|
+
region: import_v415.z.string().optional(),
|
|
2334
|
+
timezone: import_v415.z.string().optional()
|
|
2335
|
+
}).optional()
|
|
2336
|
+
});
|
|
2337
|
+
var webSearchToolFactory = (0, import_provider_utils12.createProviderDefinedToolFactory)({
|
|
2338
|
+
id: "openai.web_search",
|
|
2339
|
+
name: "web_search",
|
|
2340
|
+
inputSchema: import_v415.z.object({
|
|
2341
|
+
action: import_v415.z.discriminatedUnion("type", [
|
|
2342
|
+
import_v415.z.object({
|
|
2343
|
+
type: import_v415.z.literal("search"),
|
|
2344
|
+
query: import_v415.z.string().nullish()
|
|
2345
|
+
}),
|
|
2346
|
+
import_v415.z.object({
|
|
2347
|
+
type: import_v415.z.literal("open_page"),
|
|
2348
|
+
url: import_v415.z.string()
|
|
2349
|
+
}),
|
|
2350
|
+
import_v415.z.object({
|
|
2351
|
+
type: import_v415.z.literal("find"),
|
|
2352
|
+
url: import_v415.z.string(),
|
|
2353
|
+
pattern: import_v415.z.string()
|
|
2354
|
+
})
|
|
2355
|
+
]).nullish()
|
|
2356
|
+
})
|
|
2357
|
+
});
|
|
2358
|
+
|
|
2359
|
+
// src/tool/web-search-preview.ts
|
|
2399
2360
|
var import_provider_utils13 = require("@ai-sdk/provider-utils");
|
|
2400
2361
|
var import_v416 = require("zod/v4");
|
|
2401
|
-
var
|
|
2402
|
-
|
|
2403
|
-
|
|
2404
|
-
|
|
2362
|
+
var webSearchPreviewArgsSchema = import_v416.z.object({
|
|
2363
|
+
/**
|
|
2364
|
+
* Search context size to use for the web search.
|
|
2365
|
+
* - high: Most comprehensive context, highest cost, slower response
|
|
2366
|
+
* - medium: Balanced context, cost, and latency (default)
|
|
2367
|
+
* - low: Least context, lowest cost, fastest response
|
|
2368
|
+
*/
|
|
2405
2369
|
searchContextSize: import_v416.z.enum(["low", "medium", "high"]).optional(),
|
|
2370
|
+
/**
|
|
2371
|
+
* User location information to provide geographically relevant search results.
|
|
2372
|
+
*/
|
|
2406
2373
|
userLocation: import_v416.z.object({
|
|
2374
|
+
/**
|
|
2375
|
+
* Type of location (always 'approximate')
|
|
2376
|
+
*/
|
|
2407
2377
|
type: import_v416.z.literal("approximate"),
|
|
2378
|
+
/**
|
|
2379
|
+
* Two-letter ISO country code (e.g., 'US', 'GB')
|
|
2380
|
+
*/
|
|
2408
2381
|
country: import_v416.z.string().optional(),
|
|
2382
|
+
/**
|
|
2383
|
+
* City name (free text, e.g., 'Minneapolis')
|
|
2384
|
+
*/
|
|
2409
2385
|
city: import_v416.z.string().optional(),
|
|
2386
|
+
/**
|
|
2387
|
+
* Region name (free text, e.g., 'Minnesota')
|
|
2388
|
+
*/
|
|
2410
2389
|
region: import_v416.z.string().optional(),
|
|
2390
|
+
/**
|
|
2391
|
+
* IANA timezone (e.g., 'America/Chicago')
|
|
2392
|
+
*/
|
|
2411
2393
|
timezone: import_v416.z.string().optional()
|
|
2412
2394
|
}).optional()
|
|
2413
2395
|
});
|
|
2414
|
-
var
|
|
2415
|
-
id: "openai.
|
|
2416
|
-
name: "
|
|
2396
|
+
var webSearchPreview = (0, import_provider_utils13.createProviderDefinedToolFactory)({
|
|
2397
|
+
id: "openai.web_search_preview",
|
|
2398
|
+
name: "web_search_preview",
|
|
2417
2399
|
inputSchema: import_v416.z.object({
|
|
2418
2400
|
action: import_v416.z.discriminatedUnion("type", [
|
|
2419
2401
|
import_v416.z.object({
|
|
@@ -2491,7 +2473,10 @@ function prepareResponsesTools({
|
|
|
2491
2473
|
type: "file_search",
|
|
2492
2474
|
vector_store_ids: args.vectorStoreIds,
|
|
2493
2475
|
max_num_results: args.maxNumResults,
|
|
2494
|
-
ranking_options: args.ranking ? {
|
|
2476
|
+
ranking_options: args.ranking ? {
|
|
2477
|
+
ranker: args.ranking.ranker,
|
|
2478
|
+
score_threshold: args.ranking.scoreThreshold
|
|
2479
|
+
} : void 0,
|
|
2495
2480
|
filters: args.filters
|
|
2496
2481
|
});
|
|
2497
2482
|
break;
|
|
@@ -2595,6 +2580,20 @@ var webSearchCallItem = import_v418.z.object({
|
|
|
2595
2580
|
})
|
|
2596
2581
|
]).nullish()
|
|
2597
2582
|
});
|
|
2583
|
+
var fileSearchCallItem = import_v418.z.object({
|
|
2584
|
+
type: import_v418.z.literal("file_search_call"),
|
|
2585
|
+
id: import_v418.z.string(),
|
|
2586
|
+
queries: import_v418.z.array(import_v418.z.string()),
|
|
2587
|
+
results: import_v418.z.array(
|
|
2588
|
+
import_v418.z.object({
|
|
2589
|
+
attributes: import_v418.z.record(import_v418.z.string(), import_v418.z.unknown()),
|
|
2590
|
+
file_id: import_v418.z.string(),
|
|
2591
|
+
filename: import_v418.z.string(),
|
|
2592
|
+
score: import_v418.z.number(),
|
|
2593
|
+
text: import_v418.z.string()
|
|
2594
|
+
})
|
|
2595
|
+
).nullish()
|
|
2596
|
+
});
|
|
2598
2597
|
var codeInterpreterCallItem = import_v418.z.object({
|
|
2599
2598
|
type: import_v418.z.literal("code_interpreter_call"),
|
|
2600
2599
|
id: import_v418.z.string(),
|
|
@@ -2653,7 +2652,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2653
2652
|
toolChoice,
|
|
2654
2653
|
responseFormat
|
|
2655
2654
|
}) {
|
|
2656
|
-
var _a, _b, _c, _d
|
|
2655
|
+
var _a, _b, _c, _d;
|
|
2657
2656
|
const warnings = [];
|
|
2658
2657
|
const modelConfig = getResponsesModelConfig(this.modelId);
|
|
2659
2658
|
if (topK != null) {
|
|
@@ -2691,16 +2690,27 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2691
2690
|
warnings.push(...inputWarnings);
|
|
2692
2691
|
const strictJsonSchema = (_b = openaiOptions == null ? void 0 : openaiOptions.strictJsonSchema) != null ? _b : false;
|
|
2693
2692
|
let include = openaiOptions == null ? void 0 : openaiOptions.include;
|
|
2693
|
+
function addInclude(key) {
|
|
2694
|
+
include = include != null ? [...include, key] : [key];
|
|
2695
|
+
}
|
|
2696
|
+
function hasOpenAITool(id) {
|
|
2697
|
+
return (tools == null ? void 0 : tools.find(
|
|
2698
|
+
(tool) => tool.type === "provider-defined" && tool.id === id
|
|
2699
|
+
)) != null;
|
|
2700
|
+
}
|
|
2694
2701
|
const topLogprobs = typeof (openaiOptions == null ? void 0 : openaiOptions.logprobs) === "number" ? openaiOptions == null ? void 0 : openaiOptions.logprobs : (openaiOptions == null ? void 0 : openaiOptions.logprobs) === true ? TOP_LOGPROBS_MAX : void 0;
|
|
2695
|
-
|
|
2702
|
+
if (topLogprobs) {
|
|
2703
|
+
addInclude("message.output_text.logprobs");
|
|
2704
|
+
}
|
|
2696
2705
|
const webSearchToolName = (_c = tools == null ? void 0 : tools.find(
|
|
2697
2706
|
(tool) => tool.type === "provider-defined" && (tool.id === "openai.web_search" || tool.id === "openai.web_search_preview")
|
|
2698
2707
|
)) == null ? void 0 : _c.name;
|
|
2699
|
-
|
|
2700
|
-
|
|
2701
|
-
|
|
2702
|
-
))
|
|
2703
|
-
|
|
2708
|
+
if (webSearchToolName) {
|
|
2709
|
+
addInclude("web_search_call.action.sources");
|
|
2710
|
+
}
|
|
2711
|
+
if (hasOpenAITool("openai.code_interpreter")) {
|
|
2712
|
+
addInclude("code_interpreter_call.outputs");
|
|
2713
|
+
}
|
|
2704
2714
|
const baseArgs = {
|
|
2705
2715
|
model: this.modelId,
|
|
2706
2716
|
input,
|
|
@@ -2713,7 +2723,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2713
2723
|
format: responseFormat.schema != null ? {
|
|
2714
2724
|
type: "json_schema",
|
|
2715
2725
|
strict: strictJsonSchema,
|
|
2716
|
-
name: (
|
|
2726
|
+
name: (_d = responseFormat.name) != null ? _d : "response",
|
|
2717
2727
|
description: responseFormat.description,
|
|
2718
2728
|
schema: responseFormat.schema
|
|
2719
2729
|
} : { type: "json_object" }
|
|
@@ -2820,7 +2830,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2820
2830
|
};
|
|
2821
2831
|
}
|
|
2822
2832
|
async doGenerate(options) {
|
|
2823
|
-
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q;
|
|
2833
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s;
|
|
2824
2834
|
const {
|
|
2825
2835
|
args: body,
|
|
2826
2836
|
warnings,
|
|
@@ -2885,6 +2895,8 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2885
2895
|
})
|
|
2886
2896
|
)
|
|
2887
2897
|
}),
|
|
2898
|
+
webSearchCallItem,
|
|
2899
|
+
fileSearchCallItem,
|
|
2888
2900
|
codeInterpreterCallItem,
|
|
2889
2901
|
imageGenerationCallItem,
|
|
2890
2902
|
import_v418.z.object({
|
|
@@ -2894,28 +2906,11 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2894
2906
|
arguments: import_v418.z.string(),
|
|
2895
2907
|
id: import_v418.z.string()
|
|
2896
2908
|
}),
|
|
2897
|
-
webSearchCallItem,
|
|
2898
2909
|
import_v418.z.object({
|
|
2899
2910
|
type: import_v418.z.literal("computer_call"),
|
|
2900
2911
|
id: import_v418.z.string(),
|
|
2901
2912
|
status: import_v418.z.string().optional()
|
|
2902
2913
|
}),
|
|
2903
|
-
import_v418.z.object({
|
|
2904
|
-
type: import_v418.z.literal("file_search_call"),
|
|
2905
|
-
id: import_v418.z.string(),
|
|
2906
|
-
status: import_v418.z.string().optional(),
|
|
2907
|
-
queries: import_v418.z.array(import_v418.z.string()).nullish(),
|
|
2908
|
-
results: import_v418.z.array(
|
|
2909
|
-
import_v418.z.object({
|
|
2910
|
-
attributes: import_v418.z.object({
|
|
2911
|
-
file_id: import_v418.z.string(),
|
|
2912
|
-
filename: import_v418.z.string(),
|
|
2913
|
-
score: import_v418.z.number(),
|
|
2914
|
-
text: import_v418.z.string()
|
|
2915
|
-
})
|
|
2916
|
-
})
|
|
2917
|
-
).nullish()
|
|
2918
|
-
}),
|
|
2919
2914
|
import_v418.z.object({
|
|
2920
2915
|
type: import_v418.z.literal("reasoning"),
|
|
2921
2916
|
id: import_v418.z.string(),
|
|
@@ -3084,7 +3079,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3084
3079
|
type: "tool-call",
|
|
3085
3080
|
toolCallId: part.id,
|
|
3086
3081
|
toolName: "file_search",
|
|
3087
|
-
input: "",
|
|
3082
|
+
input: "{}",
|
|
3088
3083
|
providerExecuted: true
|
|
3089
3084
|
});
|
|
3090
3085
|
content.push({
|
|
@@ -3092,10 +3087,14 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3092
3087
|
toolCallId: part.id,
|
|
3093
3088
|
toolName: "file_search",
|
|
3094
3089
|
result: {
|
|
3095
|
-
|
|
3096
|
-
|
|
3097
|
-
|
|
3098
|
-
|
|
3090
|
+
queries: part.queries,
|
|
3091
|
+
results: (_n = (_m = part.results) == null ? void 0 : _m.map((result) => ({
|
|
3092
|
+
attributes: result.attributes,
|
|
3093
|
+
fileId: result.file_id,
|
|
3094
|
+
filename: result.filename,
|
|
3095
|
+
score: result.score,
|
|
3096
|
+
text: result.text
|
|
3097
|
+
}))) != null ? _n : null
|
|
3099
3098
|
},
|
|
3100
3099
|
providerExecuted: true
|
|
3101
3100
|
});
|
|
@@ -3137,15 +3136,15 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3137
3136
|
return {
|
|
3138
3137
|
content,
|
|
3139
3138
|
finishReason: mapOpenAIResponseFinishReason({
|
|
3140
|
-
finishReason: (
|
|
3139
|
+
finishReason: (_o = response.incomplete_details) == null ? void 0 : _o.reason,
|
|
3141
3140
|
hasFunctionCall
|
|
3142
3141
|
}),
|
|
3143
3142
|
usage: {
|
|
3144
3143
|
inputTokens: response.usage.input_tokens,
|
|
3145
3144
|
outputTokens: response.usage.output_tokens,
|
|
3146
3145
|
totalTokens: response.usage.input_tokens + response.usage.output_tokens,
|
|
3147
|
-
reasoningTokens: (
|
|
3148
|
-
cachedInputTokens: (
|
|
3146
|
+
reasoningTokens: (_q = (_p = response.usage.output_tokens_details) == null ? void 0 : _p.reasoning_tokens) != null ? _q : void 0,
|
|
3147
|
+
cachedInputTokens: (_s = (_r = response.usage.input_tokens_details) == null ? void 0 : _r.cached_tokens) != null ? _s : void 0
|
|
3149
3148
|
},
|
|
3150
3149
|
request: { body },
|
|
3151
3150
|
response: {
|
|
@@ -3202,7 +3201,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3202
3201
|
controller.enqueue({ type: "stream-start", warnings });
|
|
3203
3202
|
},
|
|
3204
3203
|
transform(chunk, controller) {
|
|
3205
|
-
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u;
|
|
3204
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v, _w;
|
|
3206
3205
|
if (options.includeRawChunks) {
|
|
3207
3206
|
controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
|
|
3208
3207
|
}
|
|
@@ -3244,14 +3243,12 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3244
3243
|
toolName: "computer_use"
|
|
3245
3244
|
});
|
|
3246
3245
|
} else if (value.item.type === "file_search_call") {
|
|
3247
|
-
ongoingToolCalls[value.output_index] = {
|
|
3248
|
-
toolName: "file_search",
|
|
3249
|
-
toolCallId: value.item.id
|
|
3250
|
-
};
|
|
3251
3246
|
controller.enqueue({
|
|
3252
|
-
type: "tool-
|
|
3253
|
-
|
|
3254
|
-
toolName: "file_search"
|
|
3247
|
+
type: "tool-call",
|
|
3248
|
+
toolCallId: value.item.id,
|
|
3249
|
+
toolName: "file_search",
|
|
3250
|
+
input: "{}",
|
|
3251
|
+
providerExecuted: true
|
|
3255
3252
|
});
|
|
3256
3253
|
} else if (value.item.type === "image_generation_call") {
|
|
3257
3254
|
controller.enqueue({
|
|
@@ -3351,26 +3348,19 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3351
3348
|
});
|
|
3352
3349
|
} else if (value.item.type === "file_search_call") {
|
|
3353
3350
|
ongoingToolCalls[value.output_index] = void 0;
|
|
3354
|
-
controller.enqueue({
|
|
3355
|
-
type: "tool-input-end",
|
|
3356
|
-
id: value.item.id
|
|
3357
|
-
});
|
|
3358
|
-
controller.enqueue({
|
|
3359
|
-
type: "tool-call",
|
|
3360
|
-
toolCallId: value.item.id,
|
|
3361
|
-
toolName: "file_search",
|
|
3362
|
-
input: "",
|
|
3363
|
-
providerExecuted: true
|
|
3364
|
-
});
|
|
3365
3351
|
controller.enqueue({
|
|
3366
3352
|
type: "tool-result",
|
|
3367
3353
|
toolCallId: value.item.id,
|
|
3368
3354
|
toolName: "file_search",
|
|
3369
3355
|
result: {
|
|
3370
|
-
|
|
3371
|
-
|
|
3372
|
-
|
|
3373
|
-
|
|
3356
|
+
queries: value.item.queries,
|
|
3357
|
+
results: (_c = (_b = value.item.results) == null ? void 0 : _b.map((result) => ({
|
|
3358
|
+
attributes: result.attributes,
|
|
3359
|
+
fileId: result.file_id,
|
|
3360
|
+
filename: result.filename,
|
|
3361
|
+
score: result.score,
|
|
3362
|
+
text: result.text
|
|
3363
|
+
}))) != null ? _c : null
|
|
3374
3364
|
},
|
|
3375
3365
|
providerExecuted: true
|
|
3376
3366
|
});
|
|
@@ -3418,7 +3408,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3418
3408
|
providerMetadata: {
|
|
3419
3409
|
openai: {
|
|
3420
3410
|
itemId: value.item.id,
|
|
3421
|
-
reasoningEncryptedContent: (
|
|
3411
|
+
reasoningEncryptedContent: (_d = value.item.encrypted_content) != null ? _d : null
|
|
3422
3412
|
}
|
|
3423
3413
|
}
|
|
3424
3414
|
});
|
|
@@ -3448,12 +3438,12 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3448
3438
|
id: value.item_id,
|
|
3449
3439
|
delta: value.delta
|
|
3450
3440
|
});
|
|
3451
|
-
if (((
|
|
3441
|
+
if (((_f = (_e = options.providerOptions) == null ? void 0 : _e.openai) == null ? void 0 : _f.logprobs) && value.logprobs) {
|
|
3452
3442
|
logprobs.push(value.logprobs);
|
|
3453
3443
|
}
|
|
3454
3444
|
} else if (isResponseReasoningSummaryPartAddedChunk(value)) {
|
|
3455
3445
|
if (value.summary_index > 0) {
|
|
3456
|
-
(
|
|
3446
|
+
(_g = activeReasoning[value.item_id]) == null ? void 0 : _g.summaryParts.push(
|
|
3457
3447
|
value.summary_index
|
|
3458
3448
|
);
|
|
3459
3449
|
controller.enqueue({
|
|
@@ -3462,7 +3452,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3462
3452
|
providerMetadata: {
|
|
3463
3453
|
openai: {
|
|
3464
3454
|
itemId: value.item_id,
|
|
3465
|
-
reasoningEncryptedContent: (
|
|
3455
|
+
reasoningEncryptedContent: (_i = (_h = activeReasoning[value.item_id]) == null ? void 0 : _h.encryptedContent) != null ? _i : null
|
|
3466
3456
|
}
|
|
3467
3457
|
}
|
|
3468
3458
|
});
|
|
@@ -3480,14 +3470,14 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3480
3470
|
});
|
|
3481
3471
|
} else if (isResponseFinishedChunk(value)) {
|
|
3482
3472
|
finishReason = mapOpenAIResponseFinishReason({
|
|
3483
|
-
finishReason: (
|
|
3473
|
+
finishReason: (_j = value.response.incomplete_details) == null ? void 0 : _j.reason,
|
|
3484
3474
|
hasFunctionCall
|
|
3485
3475
|
});
|
|
3486
3476
|
usage.inputTokens = value.response.usage.input_tokens;
|
|
3487
3477
|
usage.outputTokens = value.response.usage.output_tokens;
|
|
3488
3478
|
usage.totalTokens = value.response.usage.input_tokens + value.response.usage.output_tokens;
|
|
3489
|
-
usage.reasoningTokens = (
|
|
3490
|
-
usage.cachedInputTokens = (
|
|
3479
|
+
usage.reasoningTokens = (_l = (_k = value.response.usage.output_tokens_details) == null ? void 0 : _k.reasoning_tokens) != null ? _l : void 0;
|
|
3480
|
+
usage.cachedInputTokens = (_n = (_m = value.response.usage.input_tokens_details) == null ? void 0 : _m.cached_tokens) != null ? _n : void 0;
|
|
3491
3481
|
if (typeof value.response.service_tier === "string") {
|
|
3492
3482
|
serviceTier = value.response.service_tier;
|
|
3493
3483
|
}
|
|
@@ -3496,7 +3486,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3496
3486
|
controller.enqueue({
|
|
3497
3487
|
type: "source",
|
|
3498
3488
|
sourceType: "url",
|
|
3499
|
-
id: (
|
|
3489
|
+
id: (_q = (_p = (_o = self.config).generateId) == null ? void 0 : _p.call(_o)) != null ? _q : (0, import_provider_utils15.generateId)(),
|
|
3500
3490
|
url: value.annotation.url,
|
|
3501
3491
|
title: value.annotation.title
|
|
3502
3492
|
});
|
|
@@ -3504,10 +3494,10 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3504
3494
|
controller.enqueue({
|
|
3505
3495
|
type: "source",
|
|
3506
3496
|
sourceType: "document",
|
|
3507
|
-
id: (
|
|
3497
|
+
id: (_t = (_s = (_r = self.config).generateId) == null ? void 0 : _s.call(_r)) != null ? _t : (0, import_provider_utils15.generateId)(),
|
|
3508
3498
|
mediaType: "text/plain",
|
|
3509
|
-
title: (
|
|
3510
|
-
filename: (
|
|
3499
|
+
title: (_v = (_u = value.annotation.quote) != null ? _u : value.annotation.filename) != null ? _v : "Document",
|
|
3500
|
+
filename: (_w = value.annotation.filename) != null ? _w : value.annotation.file_id
|
|
3511
3501
|
});
|
|
3512
3502
|
}
|
|
3513
3503
|
} else if (isErrorChunk(value)) {
|
|
@@ -3612,19 +3602,7 @@ var responseOutputItemAddedSchema = import_v418.z.object({
|
|
|
3612
3602
|
}),
|
|
3613
3603
|
import_v418.z.object({
|
|
3614
3604
|
type: import_v418.z.literal("file_search_call"),
|
|
3615
|
-
id: import_v418.z.string()
|
|
3616
|
-
status: import_v418.z.string(),
|
|
3617
|
-
queries: import_v418.z.array(import_v418.z.string()).nullish(),
|
|
3618
|
-
results: import_v418.z.array(
|
|
3619
|
-
import_v418.z.object({
|
|
3620
|
-
attributes: import_v418.z.object({
|
|
3621
|
-
file_id: import_v418.z.string(),
|
|
3622
|
-
filename: import_v418.z.string(),
|
|
3623
|
-
score: import_v418.z.number(),
|
|
3624
|
-
text: import_v418.z.string()
|
|
3625
|
-
})
|
|
3626
|
-
})
|
|
3627
|
-
).optional()
|
|
3605
|
+
id: import_v418.z.string()
|
|
3628
3606
|
}),
|
|
3629
3607
|
import_v418.z.object({
|
|
3630
3608
|
type: import_v418.z.literal("image_generation_call"),
|
|
@@ -3656,26 +3634,11 @@ var responseOutputItemDoneSchema = import_v418.z.object({
|
|
|
3656
3634
|
codeInterpreterCallItem,
|
|
3657
3635
|
imageGenerationCallItem,
|
|
3658
3636
|
webSearchCallItem,
|
|
3637
|
+
fileSearchCallItem,
|
|
3659
3638
|
import_v418.z.object({
|
|
3660
3639
|
type: import_v418.z.literal("computer_call"),
|
|
3661
3640
|
id: import_v418.z.string(),
|
|
3662
3641
|
status: import_v418.z.literal("completed")
|
|
3663
|
-
}),
|
|
3664
|
-
import_v418.z.object({
|
|
3665
|
-
type: import_v418.z.literal("file_search_call"),
|
|
3666
|
-
id: import_v418.z.string(),
|
|
3667
|
-
status: import_v418.z.literal("completed"),
|
|
3668
|
-
queries: import_v418.z.array(import_v418.z.string()).nullish(),
|
|
3669
|
-
results: import_v418.z.array(
|
|
3670
|
-
import_v418.z.object({
|
|
3671
|
-
attributes: import_v418.z.object({
|
|
3672
|
-
file_id: import_v418.z.string(),
|
|
3673
|
-
filename: import_v418.z.string(),
|
|
3674
|
-
score: import_v418.z.number(),
|
|
3675
|
-
text: import_v418.z.string()
|
|
3676
|
-
})
|
|
3677
|
-
})
|
|
3678
|
-
).nullish()
|
|
3679
3642
|
})
|
|
3680
3643
|
])
|
|
3681
3644
|
});
|