@ai-sdk/openai 2.0.30 → 2.0.32
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +15 -0
- package/dist/index.d.mts +85 -23
- package/dist/index.d.ts +85 -23
- package/dist/index.js +773 -690
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +712 -629
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.d.mts +13 -12
- package/dist/internal/index.d.ts +13 -12
- package/dist/internal/index.js +760 -694
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +701 -635
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +1 -1
package/dist/internal/index.js
CHANGED
|
@@ -38,8 +38,8 @@ module.exports = __toCommonJS(internal_exports);
|
|
|
38
38
|
|
|
39
39
|
// src/chat/openai-chat-language-model.ts
|
|
40
40
|
var import_provider3 = require("@ai-sdk/provider");
|
|
41
|
-
var
|
|
42
|
-
var
|
|
41
|
+
var import_provider_utils3 = require("@ai-sdk/provider-utils");
|
|
42
|
+
var import_v43 = require("zod/v4");
|
|
43
43
|
|
|
44
44
|
// src/openai-error.ts
|
|
45
45
|
var import_v4 = require("zod/v4");
|
|
@@ -360,98 +360,6 @@ var openaiProviderOptions = import_v42.z.object({
|
|
|
360
360
|
|
|
361
361
|
// src/chat/openai-chat-prepare-tools.ts
|
|
362
362
|
var import_provider2 = require("@ai-sdk/provider");
|
|
363
|
-
|
|
364
|
-
// src/tool/file-search.ts
|
|
365
|
-
var import_provider_utils3 = require("@ai-sdk/provider-utils");
|
|
366
|
-
var import_v43 = require("zod/v4");
|
|
367
|
-
var comparisonFilterSchema = import_v43.z.object({
|
|
368
|
-
key: import_v43.z.string(),
|
|
369
|
-
type: import_v43.z.enum(["eq", "ne", "gt", "gte", "lt", "lte"]),
|
|
370
|
-
value: import_v43.z.union([import_v43.z.string(), import_v43.z.number(), import_v43.z.boolean()])
|
|
371
|
-
});
|
|
372
|
-
var compoundFilterSchema = import_v43.z.object({
|
|
373
|
-
type: import_v43.z.enum(["and", "or"]),
|
|
374
|
-
filters: import_v43.z.array(
|
|
375
|
-
import_v43.z.union([comparisonFilterSchema, import_v43.z.lazy(() => compoundFilterSchema)])
|
|
376
|
-
)
|
|
377
|
-
});
|
|
378
|
-
var filtersSchema = import_v43.z.union([comparisonFilterSchema, compoundFilterSchema]);
|
|
379
|
-
var fileSearchArgsSchema = import_v43.z.object({
|
|
380
|
-
vectorStoreIds: import_v43.z.array(import_v43.z.string()).optional(),
|
|
381
|
-
maxNumResults: import_v43.z.number().optional(),
|
|
382
|
-
ranking: import_v43.z.object({
|
|
383
|
-
ranker: import_v43.z.enum(["auto", "default-2024-08-21"]).optional()
|
|
384
|
-
}).optional(),
|
|
385
|
-
filters: filtersSchema.optional()
|
|
386
|
-
});
|
|
387
|
-
var fileSearch = (0, import_provider_utils3.createProviderDefinedToolFactory)({
|
|
388
|
-
id: "openai.file_search",
|
|
389
|
-
name: "file_search",
|
|
390
|
-
inputSchema: import_v43.z.object({
|
|
391
|
-
query: import_v43.z.string()
|
|
392
|
-
})
|
|
393
|
-
});
|
|
394
|
-
|
|
395
|
-
// src/tool/web-search-preview.ts
|
|
396
|
-
var import_provider_utils4 = require("@ai-sdk/provider-utils");
|
|
397
|
-
var import_v44 = require("zod/v4");
|
|
398
|
-
var webSearchPreviewArgsSchema = import_v44.z.object({
|
|
399
|
-
/**
|
|
400
|
-
* Search context size to use for the web search.
|
|
401
|
-
* - high: Most comprehensive context, highest cost, slower response
|
|
402
|
-
* - medium: Balanced context, cost, and latency (default)
|
|
403
|
-
* - low: Least context, lowest cost, fastest response
|
|
404
|
-
*/
|
|
405
|
-
searchContextSize: import_v44.z.enum(["low", "medium", "high"]).optional(),
|
|
406
|
-
/**
|
|
407
|
-
* User location information to provide geographically relevant search results.
|
|
408
|
-
*/
|
|
409
|
-
userLocation: import_v44.z.object({
|
|
410
|
-
/**
|
|
411
|
-
* Type of location (always 'approximate')
|
|
412
|
-
*/
|
|
413
|
-
type: import_v44.z.literal("approximate"),
|
|
414
|
-
/**
|
|
415
|
-
* Two-letter ISO country code (e.g., 'US', 'GB')
|
|
416
|
-
*/
|
|
417
|
-
country: import_v44.z.string().optional(),
|
|
418
|
-
/**
|
|
419
|
-
* City name (free text, e.g., 'Minneapolis')
|
|
420
|
-
*/
|
|
421
|
-
city: import_v44.z.string().optional(),
|
|
422
|
-
/**
|
|
423
|
-
* Region name (free text, e.g., 'Minnesota')
|
|
424
|
-
*/
|
|
425
|
-
region: import_v44.z.string().optional(),
|
|
426
|
-
/**
|
|
427
|
-
* IANA timezone (e.g., 'America/Chicago')
|
|
428
|
-
*/
|
|
429
|
-
timezone: import_v44.z.string().optional()
|
|
430
|
-
}).optional()
|
|
431
|
-
});
|
|
432
|
-
var webSearchPreview = (0, import_provider_utils4.createProviderDefinedToolFactory)({
|
|
433
|
-
id: "openai.web_search_preview",
|
|
434
|
-
name: "web_search_preview",
|
|
435
|
-
inputSchema: import_v44.z.object({
|
|
436
|
-
action: import_v44.z.discriminatedUnion("type", [
|
|
437
|
-
import_v44.z.object({
|
|
438
|
-
type: import_v44.z.literal("search"),
|
|
439
|
-
query: import_v44.z.string().nullish()
|
|
440
|
-
}),
|
|
441
|
-
import_v44.z.object({
|
|
442
|
-
type: import_v44.z.literal("open_page"),
|
|
443
|
-
url: import_v44.z.string()
|
|
444
|
-
}),
|
|
445
|
-
import_v44.z.object({
|
|
446
|
-
type: import_v44.z.literal("find"),
|
|
447
|
-
url: import_v44.z.string(),
|
|
448
|
-
pattern: import_v44.z.string()
|
|
449
|
-
})
|
|
450
|
-
]).nullish()
|
|
451
|
-
})
|
|
452
|
-
});
|
|
453
|
-
|
|
454
|
-
// src/chat/openai-chat-prepare-tools.ts
|
|
455
363
|
function prepareChatTools({
|
|
456
364
|
tools,
|
|
457
365
|
toolChoice,
|
|
@@ -477,33 +385,6 @@ function prepareChatTools({
|
|
|
477
385
|
}
|
|
478
386
|
});
|
|
479
387
|
break;
|
|
480
|
-
case "provider-defined":
|
|
481
|
-
switch (tool.id) {
|
|
482
|
-
case "openai.file_search": {
|
|
483
|
-
const args = fileSearchArgsSchema.parse(tool.args);
|
|
484
|
-
openaiTools.push({
|
|
485
|
-
type: "file_search",
|
|
486
|
-
vector_store_ids: args.vectorStoreIds,
|
|
487
|
-
max_num_results: args.maxNumResults,
|
|
488
|
-
ranking_options: args.ranking ? { ranker: args.ranking.ranker } : void 0,
|
|
489
|
-
filters: args.filters
|
|
490
|
-
});
|
|
491
|
-
break;
|
|
492
|
-
}
|
|
493
|
-
case "openai.web_search_preview": {
|
|
494
|
-
const args = webSearchPreviewArgsSchema.parse(tool.args);
|
|
495
|
-
openaiTools.push({
|
|
496
|
-
type: "web_search_preview",
|
|
497
|
-
search_context_size: args.searchContextSize,
|
|
498
|
-
user_location: args.userLocation
|
|
499
|
-
});
|
|
500
|
-
break;
|
|
501
|
-
}
|
|
502
|
-
default:
|
|
503
|
-
toolWarnings.push({ type: "unsupported-tool", tool });
|
|
504
|
-
break;
|
|
505
|
-
}
|
|
506
|
-
break;
|
|
507
388
|
default:
|
|
508
389
|
toolWarnings.push({ type: "unsupported-tool", tool });
|
|
509
390
|
break;
|
|
@@ -568,7 +449,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
568
449
|
}) {
|
|
569
450
|
var _a, _b, _c, _d;
|
|
570
451
|
const warnings = [];
|
|
571
|
-
const openaiOptions = (_a = await (0,
|
|
452
|
+
const openaiOptions = (_a = await (0, import_provider_utils3.parseProviderOptions)({
|
|
572
453
|
provider: "openai",
|
|
573
454
|
providerOptions,
|
|
574
455
|
schema: openaiProviderOptions
|
|
@@ -747,15 +628,15 @@ var OpenAIChatLanguageModel = class {
|
|
|
747
628
|
responseHeaders,
|
|
748
629
|
value: response,
|
|
749
630
|
rawValue: rawResponse
|
|
750
|
-
} = await (0,
|
|
631
|
+
} = await (0, import_provider_utils3.postJsonToApi)({
|
|
751
632
|
url: this.config.url({
|
|
752
633
|
path: "/chat/completions",
|
|
753
634
|
modelId: this.modelId
|
|
754
635
|
}),
|
|
755
|
-
headers: (0,
|
|
636
|
+
headers: (0, import_provider_utils3.combineHeaders)(this.config.headers(), options.headers),
|
|
756
637
|
body,
|
|
757
638
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
758
|
-
successfulResponseHandler: (0,
|
|
639
|
+
successfulResponseHandler: (0, import_provider_utils3.createJsonResponseHandler)(
|
|
759
640
|
openaiChatResponseSchema
|
|
760
641
|
),
|
|
761
642
|
abortSignal: options.abortSignal,
|
|
@@ -770,7 +651,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
770
651
|
for (const toolCall of (_a = choice.message.tool_calls) != null ? _a : []) {
|
|
771
652
|
content.push({
|
|
772
653
|
type: "tool-call",
|
|
773
|
-
toolCallId: (_b = toolCall.id) != null ? _b : (0,
|
|
654
|
+
toolCallId: (_b = toolCall.id) != null ? _b : (0, import_provider_utils3.generateId)(),
|
|
774
655
|
toolName: toolCall.function.name,
|
|
775
656
|
input: toolCall.function.arguments
|
|
776
657
|
});
|
|
@@ -779,7 +660,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
779
660
|
content.push({
|
|
780
661
|
type: "source",
|
|
781
662
|
sourceType: "url",
|
|
782
|
-
id: (0,
|
|
663
|
+
id: (0, import_provider_utils3.generateId)(),
|
|
783
664
|
url: annotation.url,
|
|
784
665
|
title: annotation.title
|
|
785
666
|
});
|
|
@@ -825,15 +706,15 @@ var OpenAIChatLanguageModel = class {
|
|
|
825
706
|
include_usage: true
|
|
826
707
|
}
|
|
827
708
|
};
|
|
828
|
-
const { responseHeaders, value: response } = await (0,
|
|
709
|
+
const { responseHeaders, value: response } = await (0, import_provider_utils3.postJsonToApi)({
|
|
829
710
|
url: this.config.url({
|
|
830
711
|
path: "/chat/completions",
|
|
831
712
|
modelId: this.modelId
|
|
832
713
|
}),
|
|
833
|
-
headers: (0,
|
|
714
|
+
headers: (0, import_provider_utils3.combineHeaders)(this.config.headers(), options.headers),
|
|
834
715
|
body,
|
|
835
716
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
836
|
-
successfulResponseHandler: (0,
|
|
717
|
+
successfulResponseHandler: (0, import_provider_utils3.createEventSourceResponseHandler)(
|
|
837
718
|
openaiChatChunkSchema
|
|
838
719
|
),
|
|
839
720
|
abortSignal: options.abortSignal,
|
|
@@ -958,14 +839,14 @@ var OpenAIChatLanguageModel = class {
|
|
|
958
839
|
delta: toolCall2.function.arguments
|
|
959
840
|
});
|
|
960
841
|
}
|
|
961
|
-
if ((0,
|
|
842
|
+
if ((0, import_provider_utils3.isParsableJson)(toolCall2.function.arguments)) {
|
|
962
843
|
controller.enqueue({
|
|
963
844
|
type: "tool-input-end",
|
|
964
845
|
id: toolCall2.id
|
|
965
846
|
});
|
|
966
847
|
controller.enqueue({
|
|
967
848
|
type: "tool-call",
|
|
968
|
-
toolCallId: (_q = toolCall2.id) != null ? _q : (0,
|
|
849
|
+
toolCallId: (_q = toolCall2.id) != null ? _q : (0, import_provider_utils3.generateId)(),
|
|
969
850
|
toolName: toolCall2.function.name,
|
|
970
851
|
input: toolCall2.function.arguments
|
|
971
852
|
});
|
|
@@ -986,14 +867,14 @@ var OpenAIChatLanguageModel = class {
|
|
|
986
867
|
id: toolCall.id,
|
|
987
868
|
delta: (_u = toolCallDelta.function.arguments) != null ? _u : ""
|
|
988
869
|
});
|
|
989
|
-
if (((_v = toolCall.function) == null ? void 0 : _v.name) != null && ((_w = toolCall.function) == null ? void 0 : _w.arguments) != null && (0,
|
|
870
|
+
if (((_v = toolCall.function) == null ? void 0 : _v.name) != null && ((_w = toolCall.function) == null ? void 0 : _w.arguments) != null && (0, import_provider_utils3.isParsableJson)(toolCall.function.arguments)) {
|
|
990
871
|
controller.enqueue({
|
|
991
872
|
type: "tool-input-end",
|
|
992
873
|
id: toolCall.id
|
|
993
874
|
});
|
|
994
875
|
controller.enqueue({
|
|
995
876
|
type: "tool-call",
|
|
996
|
-
toolCallId: (_x = toolCall.id) != null ? _x : (0,
|
|
877
|
+
toolCallId: (_x = toolCall.id) != null ? _x : (0, import_provider_utils3.generateId)(),
|
|
997
878
|
toolName: toolCall.function.name,
|
|
998
879
|
input: toolCall.function.arguments
|
|
999
880
|
});
|
|
@@ -1006,7 +887,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
1006
887
|
controller.enqueue({
|
|
1007
888
|
type: "source",
|
|
1008
889
|
sourceType: "url",
|
|
1009
|
-
id: (0,
|
|
890
|
+
id: (0, import_provider_utils3.generateId)(),
|
|
1010
891
|
url: annotation.url,
|
|
1011
892
|
title: annotation.title
|
|
1012
893
|
});
|
|
@@ -1031,115 +912,115 @@ var OpenAIChatLanguageModel = class {
|
|
|
1031
912
|
};
|
|
1032
913
|
}
|
|
1033
914
|
};
|
|
1034
|
-
var openaiTokenUsageSchema =
|
|
1035
|
-
prompt_tokens:
|
|
1036
|
-
completion_tokens:
|
|
1037
|
-
total_tokens:
|
|
1038
|
-
prompt_tokens_details:
|
|
1039
|
-
cached_tokens:
|
|
915
|
+
var openaiTokenUsageSchema = import_v43.z.object({
|
|
916
|
+
prompt_tokens: import_v43.z.number().nullish(),
|
|
917
|
+
completion_tokens: import_v43.z.number().nullish(),
|
|
918
|
+
total_tokens: import_v43.z.number().nullish(),
|
|
919
|
+
prompt_tokens_details: import_v43.z.object({
|
|
920
|
+
cached_tokens: import_v43.z.number().nullish()
|
|
1040
921
|
}).nullish(),
|
|
1041
|
-
completion_tokens_details:
|
|
1042
|
-
reasoning_tokens:
|
|
1043
|
-
accepted_prediction_tokens:
|
|
1044
|
-
rejected_prediction_tokens:
|
|
922
|
+
completion_tokens_details: import_v43.z.object({
|
|
923
|
+
reasoning_tokens: import_v43.z.number().nullish(),
|
|
924
|
+
accepted_prediction_tokens: import_v43.z.number().nullish(),
|
|
925
|
+
rejected_prediction_tokens: import_v43.z.number().nullish()
|
|
1045
926
|
}).nullish()
|
|
1046
927
|
}).nullish();
|
|
1047
|
-
var openaiChatResponseSchema =
|
|
1048
|
-
id:
|
|
1049
|
-
created:
|
|
1050
|
-
model:
|
|
1051
|
-
choices:
|
|
1052
|
-
|
|
1053
|
-
message:
|
|
1054
|
-
role:
|
|
1055
|
-
content:
|
|
1056
|
-
tool_calls:
|
|
1057
|
-
|
|
1058
|
-
id:
|
|
1059
|
-
type:
|
|
1060
|
-
function:
|
|
1061
|
-
name:
|
|
1062
|
-
arguments:
|
|
928
|
+
var openaiChatResponseSchema = import_v43.z.object({
|
|
929
|
+
id: import_v43.z.string().nullish(),
|
|
930
|
+
created: import_v43.z.number().nullish(),
|
|
931
|
+
model: import_v43.z.string().nullish(),
|
|
932
|
+
choices: import_v43.z.array(
|
|
933
|
+
import_v43.z.object({
|
|
934
|
+
message: import_v43.z.object({
|
|
935
|
+
role: import_v43.z.literal("assistant").nullish(),
|
|
936
|
+
content: import_v43.z.string().nullish(),
|
|
937
|
+
tool_calls: import_v43.z.array(
|
|
938
|
+
import_v43.z.object({
|
|
939
|
+
id: import_v43.z.string().nullish(),
|
|
940
|
+
type: import_v43.z.literal("function"),
|
|
941
|
+
function: import_v43.z.object({
|
|
942
|
+
name: import_v43.z.string(),
|
|
943
|
+
arguments: import_v43.z.string()
|
|
1063
944
|
})
|
|
1064
945
|
})
|
|
1065
946
|
).nullish(),
|
|
1066
|
-
annotations:
|
|
1067
|
-
|
|
1068
|
-
type:
|
|
1069
|
-
start_index:
|
|
1070
|
-
end_index:
|
|
1071
|
-
url:
|
|
1072
|
-
title:
|
|
947
|
+
annotations: import_v43.z.array(
|
|
948
|
+
import_v43.z.object({
|
|
949
|
+
type: import_v43.z.literal("url_citation"),
|
|
950
|
+
start_index: import_v43.z.number(),
|
|
951
|
+
end_index: import_v43.z.number(),
|
|
952
|
+
url: import_v43.z.string(),
|
|
953
|
+
title: import_v43.z.string()
|
|
1073
954
|
})
|
|
1074
955
|
).nullish()
|
|
1075
956
|
}),
|
|
1076
|
-
index:
|
|
1077
|
-
logprobs:
|
|
1078
|
-
content:
|
|
1079
|
-
|
|
1080
|
-
token:
|
|
1081
|
-
logprob:
|
|
1082
|
-
top_logprobs:
|
|
1083
|
-
|
|
1084
|
-
token:
|
|
1085
|
-
logprob:
|
|
957
|
+
index: import_v43.z.number(),
|
|
958
|
+
logprobs: import_v43.z.object({
|
|
959
|
+
content: import_v43.z.array(
|
|
960
|
+
import_v43.z.object({
|
|
961
|
+
token: import_v43.z.string(),
|
|
962
|
+
logprob: import_v43.z.number(),
|
|
963
|
+
top_logprobs: import_v43.z.array(
|
|
964
|
+
import_v43.z.object({
|
|
965
|
+
token: import_v43.z.string(),
|
|
966
|
+
logprob: import_v43.z.number()
|
|
1086
967
|
})
|
|
1087
968
|
)
|
|
1088
969
|
})
|
|
1089
970
|
).nullish()
|
|
1090
971
|
}).nullish(),
|
|
1091
|
-
finish_reason:
|
|
972
|
+
finish_reason: import_v43.z.string().nullish()
|
|
1092
973
|
})
|
|
1093
974
|
),
|
|
1094
975
|
usage: openaiTokenUsageSchema
|
|
1095
976
|
});
|
|
1096
|
-
var openaiChatChunkSchema =
|
|
1097
|
-
|
|
1098
|
-
id:
|
|
1099
|
-
created:
|
|
1100
|
-
model:
|
|
1101
|
-
choices:
|
|
1102
|
-
|
|
1103
|
-
delta:
|
|
1104
|
-
role:
|
|
1105
|
-
content:
|
|
1106
|
-
tool_calls:
|
|
1107
|
-
|
|
1108
|
-
index:
|
|
1109
|
-
id:
|
|
1110
|
-
type:
|
|
1111
|
-
function:
|
|
1112
|
-
name:
|
|
1113
|
-
arguments:
|
|
977
|
+
var openaiChatChunkSchema = import_v43.z.union([
|
|
978
|
+
import_v43.z.object({
|
|
979
|
+
id: import_v43.z.string().nullish(),
|
|
980
|
+
created: import_v43.z.number().nullish(),
|
|
981
|
+
model: import_v43.z.string().nullish(),
|
|
982
|
+
choices: import_v43.z.array(
|
|
983
|
+
import_v43.z.object({
|
|
984
|
+
delta: import_v43.z.object({
|
|
985
|
+
role: import_v43.z.enum(["assistant"]).nullish(),
|
|
986
|
+
content: import_v43.z.string().nullish(),
|
|
987
|
+
tool_calls: import_v43.z.array(
|
|
988
|
+
import_v43.z.object({
|
|
989
|
+
index: import_v43.z.number(),
|
|
990
|
+
id: import_v43.z.string().nullish(),
|
|
991
|
+
type: import_v43.z.literal("function").nullish(),
|
|
992
|
+
function: import_v43.z.object({
|
|
993
|
+
name: import_v43.z.string().nullish(),
|
|
994
|
+
arguments: import_v43.z.string().nullish()
|
|
1114
995
|
})
|
|
1115
996
|
})
|
|
1116
997
|
).nullish(),
|
|
1117
|
-
annotations:
|
|
1118
|
-
|
|
1119
|
-
type:
|
|
1120
|
-
start_index:
|
|
1121
|
-
end_index:
|
|
1122
|
-
url:
|
|
1123
|
-
title:
|
|
998
|
+
annotations: import_v43.z.array(
|
|
999
|
+
import_v43.z.object({
|
|
1000
|
+
type: import_v43.z.literal("url_citation"),
|
|
1001
|
+
start_index: import_v43.z.number(),
|
|
1002
|
+
end_index: import_v43.z.number(),
|
|
1003
|
+
url: import_v43.z.string(),
|
|
1004
|
+
title: import_v43.z.string()
|
|
1124
1005
|
})
|
|
1125
1006
|
).nullish()
|
|
1126
1007
|
}).nullish(),
|
|
1127
|
-
logprobs:
|
|
1128
|
-
content:
|
|
1129
|
-
|
|
1130
|
-
token:
|
|
1131
|
-
logprob:
|
|
1132
|
-
top_logprobs:
|
|
1133
|
-
|
|
1134
|
-
token:
|
|
1135
|
-
logprob:
|
|
1008
|
+
logprobs: import_v43.z.object({
|
|
1009
|
+
content: import_v43.z.array(
|
|
1010
|
+
import_v43.z.object({
|
|
1011
|
+
token: import_v43.z.string(),
|
|
1012
|
+
logprob: import_v43.z.number(),
|
|
1013
|
+
top_logprobs: import_v43.z.array(
|
|
1014
|
+
import_v43.z.object({
|
|
1015
|
+
token: import_v43.z.string(),
|
|
1016
|
+
logprob: import_v43.z.number()
|
|
1136
1017
|
})
|
|
1137
1018
|
)
|
|
1138
1019
|
})
|
|
1139
1020
|
).nullish()
|
|
1140
1021
|
}).nullish(),
|
|
1141
|
-
finish_reason:
|
|
1142
|
-
index:
|
|
1022
|
+
finish_reason: import_v43.z.string().nullish(),
|
|
1023
|
+
index: import_v43.z.number()
|
|
1143
1024
|
})
|
|
1144
1025
|
),
|
|
1145
1026
|
usage: openaiTokenUsageSchema
|
|
@@ -1196,8 +1077,8 @@ var reasoningModels = {
|
|
|
1196
1077
|
};
|
|
1197
1078
|
|
|
1198
1079
|
// src/completion/openai-completion-language-model.ts
|
|
1199
|
-
var
|
|
1200
|
-
var
|
|
1080
|
+
var import_provider_utils4 = require("@ai-sdk/provider-utils");
|
|
1081
|
+
var import_v45 = require("zod/v4");
|
|
1201
1082
|
|
|
1202
1083
|
// src/completion/convert-to-openai-completion-prompt.ts
|
|
1203
1084
|
var import_provider4 = require("@ai-sdk/provider");
|
|
@@ -1305,12 +1186,12 @@ function mapOpenAIFinishReason2(finishReason) {
|
|
|
1305
1186
|
}
|
|
1306
1187
|
|
|
1307
1188
|
// src/completion/openai-completion-options.ts
|
|
1308
|
-
var
|
|
1309
|
-
var openaiCompletionProviderOptions =
|
|
1189
|
+
var import_v44 = require("zod/v4");
|
|
1190
|
+
var openaiCompletionProviderOptions = import_v44.z.object({
|
|
1310
1191
|
/**
|
|
1311
1192
|
Echo back the prompt in addition to the completion.
|
|
1312
1193
|
*/
|
|
1313
|
-
echo:
|
|
1194
|
+
echo: import_v44.z.boolean().optional(),
|
|
1314
1195
|
/**
|
|
1315
1196
|
Modify the likelihood of specified tokens appearing in the completion.
|
|
1316
1197
|
|
|
@@ -1325,16 +1206,16 @@ var openaiCompletionProviderOptions = import_v46.z.object({
|
|
|
1325
1206
|
As an example, you can pass {"50256": -100} to prevent the <|endoftext|>
|
|
1326
1207
|
token from being generated.
|
|
1327
1208
|
*/
|
|
1328
|
-
logitBias:
|
|
1209
|
+
logitBias: import_v44.z.record(import_v44.z.string(), import_v44.z.number()).optional(),
|
|
1329
1210
|
/**
|
|
1330
1211
|
The suffix that comes after a completion of inserted text.
|
|
1331
1212
|
*/
|
|
1332
|
-
suffix:
|
|
1213
|
+
suffix: import_v44.z.string().optional(),
|
|
1333
1214
|
/**
|
|
1334
1215
|
A unique identifier representing your end-user, which can help OpenAI to
|
|
1335
1216
|
monitor and detect abuse. Learn more.
|
|
1336
1217
|
*/
|
|
1337
|
-
user:
|
|
1218
|
+
user: import_v44.z.string().optional(),
|
|
1338
1219
|
/**
|
|
1339
1220
|
Return the log probabilities of the tokens. Including logprobs will increase
|
|
1340
1221
|
the response size and can slow down response times. However, it can
|
|
@@ -1344,7 +1225,7 @@ var openaiCompletionProviderOptions = import_v46.z.object({
|
|
|
1344
1225
|
Setting to a number will return the log probabilities of the top n
|
|
1345
1226
|
tokens that were generated.
|
|
1346
1227
|
*/
|
|
1347
|
-
logprobs:
|
|
1228
|
+
logprobs: import_v44.z.union([import_v44.z.boolean(), import_v44.z.number()]).optional()
|
|
1348
1229
|
});
|
|
1349
1230
|
|
|
1350
1231
|
// src/completion/openai-completion-language-model.ts
|
|
@@ -1380,12 +1261,12 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1380
1261
|
}) {
|
|
1381
1262
|
const warnings = [];
|
|
1382
1263
|
const openaiOptions = {
|
|
1383
|
-
...await (0,
|
|
1264
|
+
...await (0, import_provider_utils4.parseProviderOptions)({
|
|
1384
1265
|
provider: "openai",
|
|
1385
1266
|
providerOptions,
|
|
1386
1267
|
schema: openaiCompletionProviderOptions
|
|
1387
1268
|
}),
|
|
1388
|
-
...await (0,
|
|
1269
|
+
...await (0, import_provider_utils4.parseProviderOptions)({
|
|
1389
1270
|
provider: this.providerOptionsName,
|
|
1390
1271
|
providerOptions,
|
|
1391
1272
|
schema: openaiCompletionProviderOptions
|
|
@@ -1441,15 +1322,15 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1441
1322
|
responseHeaders,
|
|
1442
1323
|
value: response,
|
|
1443
1324
|
rawValue: rawResponse
|
|
1444
|
-
} = await (0,
|
|
1325
|
+
} = await (0, import_provider_utils4.postJsonToApi)({
|
|
1445
1326
|
url: this.config.url({
|
|
1446
1327
|
path: "/completions",
|
|
1447
1328
|
modelId: this.modelId
|
|
1448
1329
|
}),
|
|
1449
|
-
headers: (0,
|
|
1330
|
+
headers: (0, import_provider_utils4.combineHeaders)(this.config.headers(), options.headers),
|
|
1450
1331
|
body: args,
|
|
1451
1332
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
1452
|
-
successfulResponseHandler: (0,
|
|
1333
|
+
successfulResponseHandler: (0, import_provider_utils4.createJsonResponseHandler)(
|
|
1453
1334
|
openaiCompletionResponseSchema
|
|
1454
1335
|
),
|
|
1455
1336
|
abortSignal: options.abortSignal,
|
|
@@ -1487,15 +1368,15 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1487
1368
|
include_usage: true
|
|
1488
1369
|
}
|
|
1489
1370
|
};
|
|
1490
|
-
const { responseHeaders, value: response } = await (0,
|
|
1371
|
+
const { responseHeaders, value: response } = await (0, import_provider_utils4.postJsonToApi)({
|
|
1491
1372
|
url: this.config.url({
|
|
1492
1373
|
path: "/completions",
|
|
1493
1374
|
modelId: this.modelId
|
|
1494
1375
|
}),
|
|
1495
|
-
headers: (0,
|
|
1376
|
+
headers: (0, import_provider_utils4.combineHeaders)(this.config.headers(), options.headers),
|
|
1496
1377
|
body,
|
|
1497
1378
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
1498
|
-
successfulResponseHandler: (0,
|
|
1379
|
+
successfulResponseHandler: (0, import_provider_utils4.createEventSourceResponseHandler)(
|
|
1499
1380
|
openaiCompletionChunkSchema
|
|
1500
1381
|
),
|
|
1501
1382
|
abortSignal: options.abortSignal,
|
|
@@ -1576,42 +1457,42 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1576
1457
|
};
|
|
1577
1458
|
}
|
|
1578
1459
|
};
|
|
1579
|
-
var usageSchema =
|
|
1580
|
-
prompt_tokens:
|
|
1581
|
-
completion_tokens:
|
|
1582
|
-
total_tokens:
|
|
1460
|
+
var usageSchema = import_v45.z.object({
|
|
1461
|
+
prompt_tokens: import_v45.z.number(),
|
|
1462
|
+
completion_tokens: import_v45.z.number(),
|
|
1463
|
+
total_tokens: import_v45.z.number()
|
|
1583
1464
|
});
|
|
1584
|
-
var openaiCompletionResponseSchema =
|
|
1585
|
-
id:
|
|
1586
|
-
created:
|
|
1587
|
-
model:
|
|
1588
|
-
choices:
|
|
1589
|
-
|
|
1590
|
-
text:
|
|
1591
|
-
finish_reason:
|
|
1592
|
-
logprobs:
|
|
1593
|
-
tokens:
|
|
1594
|
-
token_logprobs:
|
|
1595
|
-
top_logprobs:
|
|
1465
|
+
var openaiCompletionResponseSchema = import_v45.z.object({
|
|
1466
|
+
id: import_v45.z.string().nullish(),
|
|
1467
|
+
created: import_v45.z.number().nullish(),
|
|
1468
|
+
model: import_v45.z.string().nullish(),
|
|
1469
|
+
choices: import_v45.z.array(
|
|
1470
|
+
import_v45.z.object({
|
|
1471
|
+
text: import_v45.z.string(),
|
|
1472
|
+
finish_reason: import_v45.z.string(),
|
|
1473
|
+
logprobs: import_v45.z.object({
|
|
1474
|
+
tokens: import_v45.z.array(import_v45.z.string()),
|
|
1475
|
+
token_logprobs: import_v45.z.array(import_v45.z.number()),
|
|
1476
|
+
top_logprobs: import_v45.z.array(import_v45.z.record(import_v45.z.string(), import_v45.z.number())).nullish()
|
|
1596
1477
|
}).nullish()
|
|
1597
1478
|
})
|
|
1598
1479
|
),
|
|
1599
1480
|
usage: usageSchema.nullish()
|
|
1600
1481
|
});
|
|
1601
|
-
var openaiCompletionChunkSchema =
|
|
1602
|
-
|
|
1603
|
-
id:
|
|
1604
|
-
created:
|
|
1605
|
-
model:
|
|
1606
|
-
choices:
|
|
1607
|
-
|
|
1608
|
-
text:
|
|
1609
|
-
finish_reason:
|
|
1610
|
-
index:
|
|
1611
|
-
logprobs:
|
|
1612
|
-
tokens:
|
|
1613
|
-
token_logprobs:
|
|
1614
|
-
top_logprobs:
|
|
1482
|
+
var openaiCompletionChunkSchema = import_v45.z.union([
|
|
1483
|
+
import_v45.z.object({
|
|
1484
|
+
id: import_v45.z.string().nullish(),
|
|
1485
|
+
created: import_v45.z.number().nullish(),
|
|
1486
|
+
model: import_v45.z.string().nullish(),
|
|
1487
|
+
choices: import_v45.z.array(
|
|
1488
|
+
import_v45.z.object({
|
|
1489
|
+
text: import_v45.z.string(),
|
|
1490
|
+
finish_reason: import_v45.z.string().nullish(),
|
|
1491
|
+
index: import_v45.z.number(),
|
|
1492
|
+
logprobs: import_v45.z.object({
|
|
1493
|
+
tokens: import_v45.z.array(import_v45.z.string()),
|
|
1494
|
+
token_logprobs: import_v45.z.array(import_v45.z.number()),
|
|
1495
|
+
top_logprobs: import_v45.z.array(import_v45.z.record(import_v45.z.string(), import_v45.z.number())).nullish()
|
|
1615
1496
|
}).nullish()
|
|
1616
1497
|
})
|
|
1617
1498
|
),
|
|
@@ -1622,22 +1503,22 @@ var openaiCompletionChunkSchema = import_v47.z.union([
|
|
|
1622
1503
|
|
|
1623
1504
|
// src/embedding/openai-embedding-model.ts
|
|
1624
1505
|
var import_provider5 = require("@ai-sdk/provider");
|
|
1625
|
-
var
|
|
1626
|
-
var
|
|
1506
|
+
var import_provider_utils5 = require("@ai-sdk/provider-utils");
|
|
1507
|
+
var import_v47 = require("zod/v4");
|
|
1627
1508
|
|
|
1628
1509
|
// src/embedding/openai-embedding-options.ts
|
|
1629
|
-
var
|
|
1630
|
-
var openaiEmbeddingProviderOptions =
|
|
1510
|
+
var import_v46 = require("zod/v4");
|
|
1511
|
+
var openaiEmbeddingProviderOptions = import_v46.z.object({
|
|
1631
1512
|
/**
|
|
1632
1513
|
The number of dimensions the resulting output embeddings should have.
|
|
1633
1514
|
Only supported in text-embedding-3 and later models.
|
|
1634
1515
|
*/
|
|
1635
|
-
dimensions:
|
|
1516
|
+
dimensions: import_v46.z.number().optional(),
|
|
1636
1517
|
/**
|
|
1637
1518
|
A unique identifier representing your end-user, which can help OpenAI to
|
|
1638
1519
|
monitor and detect abuse. Learn more.
|
|
1639
1520
|
*/
|
|
1640
|
-
user:
|
|
1521
|
+
user: import_v46.z.string().optional()
|
|
1641
1522
|
});
|
|
1642
1523
|
|
|
1643
1524
|
// src/embedding/openai-embedding-model.ts
|
|
@@ -1667,7 +1548,7 @@ var OpenAIEmbeddingModel = class {
|
|
|
1667
1548
|
values
|
|
1668
1549
|
});
|
|
1669
1550
|
}
|
|
1670
|
-
const openaiOptions = (_a = await (0,
|
|
1551
|
+
const openaiOptions = (_a = await (0, import_provider_utils5.parseProviderOptions)({
|
|
1671
1552
|
provider: "openai",
|
|
1672
1553
|
providerOptions,
|
|
1673
1554
|
schema: openaiEmbeddingProviderOptions
|
|
@@ -1676,12 +1557,12 @@ var OpenAIEmbeddingModel = class {
|
|
|
1676
1557
|
responseHeaders,
|
|
1677
1558
|
value: response,
|
|
1678
1559
|
rawValue
|
|
1679
|
-
} = await (0,
|
|
1560
|
+
} = await (0, import_provider_utils5.postJsonToApi)({
|
|
1680
1561
|
url: this.config.url({
|
|
1681
1562
|
path: "/embeddings",
|
|
1682
1563
|
modelId: this.modelId
|
|
1683
1564
|
}),
|
|
1684
|
-
headers: (0,
|
|
1565
|
+
headers: (0, import_provider_utils5.combineHeaders)(this.config.headers(), headers),
|
|
1685
1566
|
body: {
|
|
1686
1567
|
model: this.modelId,
|
|
1687
1568
|
input: values,
|
|
@@ -1690,7 +1571,7 @@ var OpenAIEmbeddingModel = class {
|
|
|
1690
1571
|
user: openaiOptions.user
|
|
1691
1572
|
},
|
|
1692
1573
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
1693
|
-
successfulResponseHandler: (0,
|
|
1574
|
+
successfulResponseHandler: (0, import_provider_utils5.createJsonResponseHandler)(
|
|
1694
1575
|
openaiTextEmbeddingResponseSchema
|
|
1695
1576
|
),
|
|
1696
1577
|
abortSignal,
|
|
@@ -1703,14 +1584,14 @@ var OpenAIEmbeddingModel = class {
|
|
|
1703
1584
|
};
|
|
1704
1585
|
}
|
|
1705
1586
|
};
|
|
1706
|
-
var openaiTextEmbeddingResponseSchema =
|
|
1707
|
-
data:
|
|
1708
|
-
usage:
|
|
1587
|
+
var openaiTextEmbeddingResponseSchema = import_v47.z.object({
|
|
1588
|
+
data: import_v47.z.array(import_v47.z.object({ embedding: import_v47.z.array(import_v47.z.number()) })),
|
|
1589
|
+
usage: import_v47.z.object({ prompt_tokens: import_v47.z.number() }).nullish()
|
|
1709
1590
|
});
|
|
1710
1591
|
|
|
1711
1592
|
// src/image/openai-image-model.ts
|
|
1712
|
-
var
|
|
1713
|
-
var
|
|
1593
|
+
var import_provider_utils6 = require("@ai-sdk/provider-utils");
|
|
1594
|
+
var import_v48 = require("zod/v4");
|
|
1714
1595
|
|
|
1715
1596
|
// src/image/openai-image-options.ts
|
|
1716
1597
|
var modelMaxImagesPerCall = {
|
|
@@ -1757,12 +1638,12 @@ var OpenAIImageModel = class {
|
|
|
1757
1638
|
warnings.push({ type: "unsupported-setting", setting: "seed" });
|
|
1758
1639
|
}
|
|
1759
1640
|
const currentDate = (_c = (_b = (_a = this.config._internal) == null ? void 0 : _a.currentDate) == null ? void 0 : _b.call(_a)) != null ? _c : /* @__PURE__ */ new Date();
|
|
1760
|
-
const { value: response, responseHeaders } = await (0,
|
|
1641
|
+
const { value: response, responseHeaders } = await (0, import_provider_utils6.postJsonToApi)({
|
|
1761
1642
|
url: this.config.url({
|
|
1762
1643
|
path: "/images/generations",
|
|
1763
1644
|
modelId: this.modelId
|
|
1764
1645
|
}),
|
|
1765
|
-
headers: (0,
|
|
1646
|
+
headers: (0, import_provider_utils6.combineHeaders)(this.config.headers(), headers),
|
|
1766
1647
|
body: {
|
|
1767
1648
|
model: this.modelId,
|
|
1768
1649
|
prompt,
|
|
@@ -1772,7 +1653,7 @@ var OpenAIImageModel = class {
|
|
|
1772
1653
|
...!hasDefaultResponseFormat.has(this.modelId) ? { response_format: "b64_json" } : {}
|
|
1773
1654
|
},
|
|
1774
1655
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
1775
|
-
successfulResponseHandler: (0,
|
|
1656
|
+
successfulResponseHandler: (0, import_provider_utils6.createJsonResponseHandler)(
|
|
1776
1657
|
openaiImageResponseSchema
|
|
1777
1658
|
),
|
|
1778
1659
|
abortSignal,
|
|
@@ -1798,41 +1679,41 @@ var OpenAIImageModel = class {
|
|
|
1798
1679
|
};
|
|
1799
1680
|
}
|
|
1800
1681
|
};
|
|
1801
|
-
var openaiImageResponseSchema =
|
|
1802
|
-
data:
|
|
1803
|
-
|
|
1682
|
+
var openaiImageResponseSchema = import_v48.z.object({
|
|
1683
|
+
data: import_v48.z.array(
|
|
1684
|
+
import_v48.z.object({ b64_json: import_v48.z.string(), revised_prompt: import_v48.z.string().optional() })
|
|
1804
1685
|
)
|
|
1805
1686
|
});
|
|
1806
1687
|
|
|
1807
1688
|
// src/transcription/openai-transcription-model.ts
|
|
1808
|
-
var
|
|
1809
|
-
var
|
|
1689
|
+
var import_provider_utils7 = require("@ai-sdk/provider-utils");
|
|
1690
|
+
var import_v410 = require("zod/v4");
|
|
1810
1691
|
|
|
1811
1692
|
// src/transcription/openai-transcription-options.ts
|
|
1812
|
-
var
|
|
1813
|
-
var openAITranscriptionProviderOptions =
|
|
1693
|
+
var import_v49 = require("zod/v4");
|
|
1694
|
+
var openAITranscriptionProviderOptions = import_v49.z.object({
|
|
1814
1695
|
/**
|
|
1815
1696
|
* Additional information to include in the transcription response.
|
|
1816
1697
|
*/
|
|
1817
|
-
include:
|
|
1698
|
+
include: import_v49.z.array(import_v49.z.string()).optional(),
|
|
1818
1699
|
/**
|
|
1819
1700
|
* The language of the input audio in ISO-639-1 format.
|
|
1820
1701
|
*/
|
|
1821
|
-
language:
|
|
1702
|
+
language: import_v49.z.string().optional(),
|
|
1822
1703
|
/**
|
|
1823
1704
|
* An optional text to guide the model's style or continue a previous audio segment.
|
|
1824
1705
|
*/
|
|
1825
|
-
prompt:
|
|
1706
|
+
prompt: import_v49.z.string().optional(),
|
|
1826
1707
|
/**
|
|
1827
1708
|
* The sampling temperature, between 0 and 1.
|
|
1828
1709
|
* @default 0
|
|
1829
1710
|
*/
|
|
1830
|
-
temperature:
|
|
1711
|
+
temperature: import_v49.z.number().min(0).max(1).default(0).optional(),
|
|
1831
1712
|
/**
|
|
1832
1713
|
* The timestamp granularities to populate for this transcription.
|
|
1833
1714
|
* @default ['segment']
|
|
1834
1715
|
*/
|
|
1835
|
-
timestampGranularities:
|
|
1716
|
+
timestampGranularities: import_v49.z.array(import_v49.z.enum(["word", "segment"])).default(["segment"]).optional()
|
|
1836
1717
|
});
|
|
1837
1718
|
|
|
1838
1719
|
// src/transcription/openai-transcription-model.ts
|
|
@@ -1910,15 +1791,15 @@ var OpenAITranscriptionModel = class {
|
|
|
1910
1791
|
providerOptions
|
|
1911
1792
|
}) {
|
|
1912
1793
|
const warnings = [];
|
|
1913
|
-
const openAIOptions = await (0,
|
|
1794
|
+
const openAIOptions = await (0, import_provider_utils7.parseProviderOptions)({
|
|
1914
1795
|
provider: "openai",
|
|
1915
1796
|
providerOptions,
|
|
1916
1797
|
schema: openAITranscriptionProviderOptions
|
|
1917
1798
|
});
|
|
1918
1799
|
const formData = new FormData();
|
|
1919
|
-
const blob = audio instanceof Uint8Array ? new Blob([audio]) : new Blob([(0,
|
|
1800
|
+
const blob = audio instanceof Uint8Array ? new Blob([audio]) : new Blob([(0, import_provider_utils7.convertBase64ToUint8Array)(audio)]);
|
|
1920
1801
|
formData.append("model", this.modelId);
|
|
1921
|
-
const fileExtension = (0,
|
|
1802
|
+
const fileExtension = (0, import_provider_utils7.mediaTypeToExtension)(mediaType);
|
|
1922
1803
|
formData.append(
|
|
1923
1804
|
"file",
|
|
1924
1805
|
new File([blob], "audio", { type: mediaType }),
|
|
@@ -1963,15 +1844,15 @@ var OpenAITranscriptionModel = class {
|
|
|
1963
1844
|
value: response,
|
|
1964
1845
|
responseHeaders,
|
|
1965
1846
|
rawValue: rawResponse
|
|
1966
|
-
} = await (0,
|
|
1847
|
+
} = await (0, import_provider_utils7.postFormDataToApi)({
|
|
1967
1848
|
url: this.config.url({
|
|
1968
1849
|
path: "/audio/transcriptions",
|
|
1969
1850
|
modelId: this.modelId
|
|
1970
1851
|
}),
|
|
1971
|
-
headers: (0,
|
|
1852
|
+
headers: (0, import_provider_utils7.combineHeaders)(this.config.headers(), options.headers),
|
|
1972
1853
|
formData,
|
|
1973
1854
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
1974
|
-
successfulResponseHandler: (0,
|
|
1855
|
+
successfulResponseHandler: (0, import_provider_utils7.createJsonResponseHandler)(
|
|
1975
1856
|
openaiTranscriptionResponseSchema
|
|
1976
1857
|
),
|
|
1977
1858
|
abortSignal: options.abortSignal,
|
|
@@ -2001,39 +1882,39 @@ var OpenAITranscriptionModel = class {
|
|
|
2001
1882
|
};
|
|
2002
1883
|
}
|
|
2003
1884
|
};
|
|
2004
|
-
var openaiTranscriptionResponseSchema =
|
|
2005
|
-
text:
|
|
2006
|
-
language:
|
|
2007
|
-
duration:
|
|
2008
|
-
words:
|
|
2009
|
-
|
|
2010
|
-
word:
|
|
2011
|
-
start:
|
|
2012
|
-
end:
|
|
1885
|
+
var openaiTranscriptionResponseSchema = import_v410.z.object({
|
|
1886
|
+
text: import_v410.z.string(),
|
|
1887
|
+
language: import_v410.z.string().nullish(),
|
|
1888
|
+
duration: import_v410.z.number().nullish(),
|
|
1889
|
+
words: import_v410.z.array(
|
|
1890
|
+
import_v410.z.object({
|
|
1891
|
+
word: import_v410.z.string(),
|
|
1892
|
+
start: import_v410.z.number(),
|
|
1893
|
+
end: import_v410.z.number()
|
|
2013
1894
|
})
|
|
2014
1895
|
).nullish(),
|
|
2015
|
-
segments:
|
|
2016
|
-
|
|
2017
|
-
id:
|
|
2018
|
-
seek:
|
|
2019
|
-
start:
|
|
2020
|
-
end:
|
|
2021
|
-
text:
|
|
2022
|
-
tokens:
|
|
2023
|
-
temperature:
|
|
2024
|
-
avg_logprob:
|
|
2025
|
-
compression_ratio:
|
|
2026
|
-
no_speech_prob:
|
|
1896
|
+
segments: import_v410.z.array(
|
|
1897
|
+
import_v410.z.object({
|
|
1898
|
+
id: import_v410.z.number(),
|
|
1899
|
+
seek: import_v410.z.number(),
|
|
1900
|
+
start: import_v410.z.number(),
|
|
1901
|
+
end: import_v410.z.number(),
|
|
1902
|
+
text: import_v410.z.string(),
|
|
1903
|
+
tokens: import_v410.z.array(import_v410.z.number()),
|
|
1904
|
+
temperature: import_v410.z.number(),
|
|
1905
|
+
avg_logprob: import_v410.z.number(),
|
|
1906
|
+
compression_ratio: import_v410.z.number(),
|
|
1907
|
+
no_speech_prob: import_v410.z.number()
|
|
2027
1908
|
})
|
|
2028
1909
|
).nullish()
|
|
2029
1910
|
});
|
|
2030
1911
|
|
|
2031
1912
|
// src/speech/openai-speech-model.ts
|
|
2032
|
-
var
|
|
2033
|
-
var
|
|
2034
|
-
var OpenAIProviderOptionsSchema =
|
|
2035
|
-
instructions:
|
|
2036
|
-
speed:
|
|
1913
|
+
var import_provider_utils8 = require("@ai-sdk/provider-utils");
|
|
1914
|
+
var import_v411 = require("zod/v4");
|
|
1915
|
+
var OpenAIProviderOptionsSchema = import_v411.z.object({
|
|
1916
|
+
instructions: import_v411.z.string().nullish(),
|
|
1917
|
+
speed: import_v411.z.number().min(0.25).max(4).default(1).nullish()
|
|
2037
1918
|
});
|
|
2038
1919
|
var OpenAISpeechModel = class {
|
|
2039
1920
|
constructor(modelId, config) {
|
|
@@ -2054,7 +1935,7 @@ var OpenAISpeechModel = class {
|
|
|
2054
1935
|
providerOptions
|
|
2055
1936
|
}) {
|
|
2056
1937
|
const warnings = [];
|
|
2057
|
-
const openAIOptions = await (0,
|
|
1938
|
+
const openAIOptions = await (0, import_provider_utils8.parseProviderOptions)({
|
|
2058
1939
|
provider: "openai",
|
|
2059
1940
|
providerOptions,
|
|
2060
1941
|
schema: OpenAIProviderOptionsSchema
|
|
@@ -2107,15 +1988,15 @@ var OpenAISpeechModel = class {
|
|
|
2107
1988
|
value: audio,
|
|
2108
1989
|
responseHeaders,
|
|
2109
1990
|
rawValue: rawResponse
|
|
2110
|
-
} = await (0,
|
|
1991
|
+
} = await (0, import_provider_utils8.postJsonToApi)({
|
|
2111
1992
|
url: this.config.url({
|
|
2112
1993
|
path: "/audio/speech",
|
|
2113
1994
|
modelId: this.modelId
|
|
2114
1995
|
}),
|
|
2115
|
-
headers: (0,
|
|
1996
|
+
headers: (0, import_provider_utils8.combineHeaders)(this.config.headers(), options.headers),
|
|
2116
1997
|
body: requestBody,
|
|
2117
1998
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
2118
|
-
successfulResponseHandler: (0,
|
|
1999
|
+
successfulResponseHandler: (0, import_provider_utils8.createBinaryResponseHandler)(),
|
|
2119
2000
|
abortSignal: options.abortSignal,
|
|
2120
2001
|
fetch: this.config.fetch
|
|
2121
2002
|
});
|
|
@@ -2137,13 +2018,13 @@ var OpenAISpeechModel = class {
|
|
|
2137
2018
|
|
|
2138
2019
|
// src/responses/openai-responses-language-model.ts
|
|
2139
2020
|
var import_provider8 = require("@ai-sdk/provider");
|
|
2140
|
-
var
|
|
2141
|
-
var
|
|
2021
|
+
var import_provider_utils15 = require("@ai-sdk/provider-utils");
|
|
2022
|
+
var import_v418 = require("zod/v4");
|
|
2142
2023
|
|
|
2143
2024
|
// src/responses/convert-to-openai-responses-input.ts
|
|
2144
2025
|
var import_provider6 = require("@ai-sdk/provider");
|
|
2145
|
-
var
|
|
2146
|
-
var
|
|
2026
|
+
var import_provider_utils9 = require("@ai-sdk/provider-utils");
|
|
2027
|
+
var import_v412 = require("zod/v4");
|
|
2147
2028
|
function isFileId(data, prefixes) {
|
|
2148
2029
|
if (!prefixes) return false;
|
|
2149
2030
|
return prefixes.some((prefix) => data.startsWith(prefix));
|
|
@@ -2151,7 +2032,8 @@ function isFileId(data, prefixes) {
|
|
|
2151
2032
|
async function convertToOpenAIResponsesInput({
|
|
2152
2033
|
prompt,
|
|
2153
2034
|
systemMessageMode,
|
|
2154
|
-
fileIdPrefixes
|
|
2035
|
+
fileIdPrefixes,
|
|
2036
|
+
store
|
|
2155
2037
|
}) {
|
|
2156
2038
|
var _a, _b, _c, _d, _e, _f;
|
|
2157
2039
|
const input = [];
|
|
@@ -2199,7 +2081,7 @@ async function convertToOpenAIResponsesInput({
|
|
|
2199
2081
|
return {
|
|
2200
2082
|
type: "input_image",
|
|
2201
2083
|
...part.data instanceof URL ? { image_url: part.data.toString() } : typeof part.data === "string" && isFileId(part.data, fileIdPrefixes) ? { file_id: part.data } : {
|
|
2202
|
-
image_url: `data:${mediaType};base64,${(0,
|
|
2084
|
+
image_url: `data:${mediaType};base64,${(0, import_provider_utils9.convertToBase64)(part.data)}`
|
|
2203
2085
|
},
|
|
2204
2086
|
detail: (_b2 = (_a2 = part.providerOptions) == null ? void 0 : _a2.openai) == null ? void 0 : _b2.imageDetail
|
|
2205
2087
|
};
|
|
@@ -2214,7 +2096,7 @@ async function convertToOpenAIResponsesInput({
|
|
|
2214
2096
|
type: "input_file",
|
|
2215
2097
|
...typeof part.data === "string" && isFileId(part.data, fileIdPrefixes) ? { file_id: part.data } : {
|
|
2216
2098
|
filename: (_c2 = part.filename) != null ? _c2 : `part-${index}.pdf`,
|
|
2217
|
-
file_data: `data:application/pdf;base64,${(0,
|
|
2099
|
+
file_data: `data:application/pdf;base64,${(0, import_provider_utils9.convertToBase64)(part.data)}`
|
|
2218
2100
|
}
|
|
2219
2101
|
};
|
|
2220
2102
|
} else {
|
|
@@ -2256,14 +2138,18 @@ async function convertToOpenAIResponsesInput({
|
|
|
2256
2138
|
break;
|
|
2257
2139
|
}
|
|
2258
2140
|
case "tool-result": {
|
|
2259
|
-
|
|
2260
|
-
type: "
|
|
2261
|
-
|
|
2262
|
-
|
|
2141
|
+
if (store) {
|
|
2142
|
+
input.push({ type: "item_reference", id: part.toolCallId });
|
|
2143
|
+
} else {
|
|
2144
|
+
warnings.push({
|
|
2145
|
+
type: "other",
|
|
2146
|
+
message: `Results for OpenAI tool ${part.toolName} are not sent to the API when store is false`
|
|
2147
|
+
});
|
|
2148
|
+
}
|
|
2263
2149
|
break;
|
|
2264
2150
|
}
|
|
2265
2151
|
case "reasoning": {
|
|
2266
|
-
const providerOptions = await (0,
|
|
2152
|
+
const providerOptions = await (0, import_provider_utils9.parseProviderOptions)({
|
|
2267
2153
|
provider: "openai",
|
|
2268
2154
|
providerOptions: part.providerOptions,
|
|
2269
2155
|
schema: openaiResponsesReasoningProviderOptionsSchema
|
|
@@ -2334,9 +2220,9 @@ async function convertToOpenAIResponsesInput({
|
|
|
2334
2220
|
}
|
|
2335
2221
|
return { input, warnings };
|
|
2336
2222
|
}
|
|
2337
|
-
var openaiResponsesReasoningProviderOptionsSchema =
|
|
2338
|
-
itemId:
|
|
2339
|
-
reasoningEncryptedContent:
|
|
2223
|
+
var openaiResponsesReasoningProviderOptionsSchema = import_v412.z.object({
|
|
2224
|
+
itemId: import_v412.z.string().nullish(),
|
|
2225
|
+
reasoningEncryptedContent: import_v412.z.string().nullish()
|
|
2340
2226
|
});
|
|
2341
2227
|
|
|
2342
2228
|
// src/responses/map-openai-responses-finish-reason.ts
|
|
@@ -2361,54 +2247,155 @@ function mapOpenAIResponseFinishReason({
|
|
|
2361
2247
|
var import_provider7 = require("@ai-sdk/provider");
|
|
2362
2248
|
|
|
2363
2249
|
// src/tool/code-interpreter.ts
|
|
2364
|
-
var
|
|
2365
|
-
var
|
|
2366
|
-
var codeInterpreterInputSchema =
|
|
2367
|
-
code:
|
|
2368
|
-
containerId:
|
|
2250
|
+
var import_provider_utils10 = require("@ai-sdk/provider-utils");
|
|
2251
|
+
var import_v413 = require("zod/v4");
|
|
2252
|
+
var codeInterpreterInputSchema = import_v413.z.object({
|
|
2253
|
+
code: import_v413.z.string().nullish(),
|
|
2254
|
+
containerId: import_v413.z.string()
|
|
2369
2255
|
});
|
|
2370
|
-
var codeInterpreterOutputSchema =
|
|
2371
|
-
outputs:
|
|
2372
|
-
|
|
2373
|
-
|
|
2374
|
-
|
|
2256
|
+
var codeInterpreterOutputSchema = import_v413.z.object({
|
|
2257
|
+
outputs: import_v413.z.array(
|
|
2258
|
+
import_v413.z.discriminatedUnion("type", [
|
|
2259
|
+
import_v413.z.object({ type: import_v413.z.literal("logs"), logs: import_v413.z.string() }),
|
|
2260
|
+
import_v413.z.object({ type: import_v413.z.literal("image"), url: import_v413.z.string() })
|
|
2375
2261
|
])
|
|
2376
2262
|
).nullish()
|
|
2377
2263
|
});
|
|
2378
|
-
var codeInterpreterArgsSchema =
|
|
2379
|
-
container:
|
|
2380
|
-
|
|
2381
|
-
|
|
2382
|
-
fileIds:
|
|
2264
|
+
var codeInterpreterArgsSchema = import_v413.z.object({
|
|
2265
|
+
container: import_v413.z.union([
|
|
2266
|
+
import_v413.z.string(),
|
|
2267
|
+
import_v413.z.object({
|
|
2268
|
+
fileIds: import_v413.z.array(import_v413.z.string()).optional()
|
|
2383
2269
|
})
|
|
2384
2270
|
]).optional()
|
|
2385
2271
|
});
|
|
2386
|
-
var codeInterpreterToolFactory = (0,
|
|
2272
|
+
var codeInterpreterToolFactory = (0, import_provider_utils10.createProviderDefinedToolFactoryWithOutputSchema)({
|
|
2387
2273
|
id: "openai.code_interpreter",
|
|
2388
2274
|
name: "code_interpreter",
|
|
2389
2275
|
inputSchema: codeInterpreterInputSchema,
|
|
2390
2276
|
outputSchema: codeInterpreterOutputSchema
|
|
2391
2277
|
});
|
|
2392
2278
|
|
|
2279
|
+
// src/tool/file-search.ts
|
|
2280
|
+
var import_provider_utils11 = require("@ai-sdk/provider-utils");
|
|
2281
|
+
var import_v414 = require("zod/v4");
|
|
2282
|
+
var comparisonFilterSchema = import_v414.z.object({
|
|
2283
|
+
key: import_v414.z.string(),
|
|
2284
|
+
type: import_v414.z.enum(["eq", "ne", "gt", "gte", "lt", "lte"]),
|
|
2285
|
+
value: import_v414.z.union([import_v414.z.string(), import_v414.z.number(), import_v414.z.boolean()])
|
|
2286
|
+
});
|
|
2287
|
+
var compoundFilterSchema = import_v414.z.object({
|
|
2288
|
+
type: import_v414.z.enum(["and", "or"]),
|
|
2289
|
+
filters: import_v414.z.array(
|
|
2290
|
+
import_v414.z.union([comparisonFilterSchema, import_v414.z.lazy(() => compoundFilterSchema)])
|
|
2291
|
+
)
|
|
2292
|
+
});
|
|
2293
|
+
var fileSearchArgsSchema = import_v414.z.object({
|
|
2294
|
+
vectorStoreIds: import_v414.z.array(import_v414.z.string()),
|
|
2295
|
+
maxNumResults: import_v414.z.number().optional(),
|
|
2296
|
+
ranking: import_v414.z.object({
|
|
2297
|
+
ranker: import_v414.z.string().optional(),
|
|
2298
|
+
scoreThreshold: import_v414.z.number().optional()
|
|
2299
|
+
}).optional(),
|
|
2300
|
+
filters: import_v414.z.union([comparisonFilterSchema, compoundFilterSchema]).optional()
|
|
2301
|
+
});
|
|
2302
|
+
var fileSearchOutputSchema = import_v414.z.object({
|
|
2303
|
+
queries: import_v414.z.array(import_v414.z.string()),
|
|
2304
|
+
results: import_v414.z.array(
|
|
2305
|
+
import_v414.z.object({
|
|
2306
|
+
attributes: import_v414.z.record(import_v414.z.string(), import_v414.z.unknown()),
|
|
2307
|
+
fileId: import_v414.z.string(),
|
|
2308
|
+
filename: import_v414.z.string(),
|
|
2309
|
+
score: import_v414.z.number(),
|
|
2310
|
+
text: import_v414.z.string()
|
|
2311
|
+
})
|
|
2312
|
+
).nullable()
|
|
2313
|
+
});
|
|
2314
|
+
var fileSearch = (0, import_provider_utils11.createProviderDefinedToolFactoryWithOutputSchema)({
|
|
2315
|
+
id: "openai.file_search",
|
|
2316
|
+
name: "file_search",
|
|
2317
|
+
inputSchema: import_v414.z.object({}),
|
|
2318
|
+
outputSchema: fileSearchOutputSchema
|
|
2319
|
+
});
|
|
2320
|
+
|
|
2393
2321
|
// src/tool/web-search.ts
|
|
2322
|
+
var import_provider_utils12 = require("@ai-sdk/provider-utils");
|
|
2323
|
+
var import_v415 = require("zod/v4");
|
|
2324
|
+
var webSearchArgsSchema = import_v415.z.object({
|
|
2325
|
+
filters: import_v415.z.object({
|
|
2326
|
+
allowedDomains: import_v415.z.array(import_v415.z.string()).optional()
|
|
2327
|
+
}).optional(),
|
|
2328
|
+
searchContextSize: import_v415.z.enum(["low", "medium", "high"]).optional(),
|
|
2329
|
+
userLocation: import_v415.z.object({
|
|
2330
|
+
type: import_v415.z.literal("approximate"),
|
|
2331
|
+
country: import_v415.z.string().optional(),
|
|
2332
|
+
city: import_v415.z.string().optional(),
|
|
2333
|
+
region: import_v415.z.string().optional(),
|
|
2334
|
+
timezone: import_v415.z.string().optional()
|
|
2335
|
+
}).optional()
|
|
2336
|
+
});
|
|
2337
|
+
var webSearchToolFactory = (0, import_provider_utils12.createProviderDefinedToolFactory)({
|
|
2338
|
+
id: "openai.web_search",
|
|
2339
|
+
name: "web_search",
|
|
2340
|
+
inputSchema: import_v415.z.object({
|
|
2341
|
+
action: import_v415.z.discriminatedUnion("type", [
|
|
2342
|
+
import_v415.z.object({
|
|
2343
|
+
type: import_v415.z.literal("search"),
|
|
2344
|
+
query: import_v415.z.string().nullish()
|
|
2345
|
+
}),
|
|
2346
|
+
import_v415.z.object({
|
|
2347
|
+
type: import_v415.z.literal("open_page"),
|
|
2348
|
+
url: import_v415.z.string()
|
|
2349
|
+
}),
|
|
2350
|
+
import_v415.z.object({
|
|
2351
|
+
type: import_v415.z.literal("find"),
|
|
2352
|
+
url: import_v415.z.string(),
|
|
2353
|
+
pattern: import_v415.z.string()
|
|
2354
|
+
})
|
|
2355
|
+
]).nullish()
|
|
2356
|
+
})
|
|
2357
|
+
});
|
|
2358
|
+
|
|
2359
|
+
// src/tool/web-search-preview.ts
|
|
2394
2360
|
var import_provider_utils13 = require("@ai-sdk/provider-utils");
|
|
2395
2361
|
var import_v416 = require("zod/v4");
|
|
2396
|
-
var
|
|
2397
|
-
|
|
2398
|
-
|
|
2399
|
-
|
|
2362
|
+
var webSearchPreviewArgsSchema = import_v416.z.object({
|
|
2363
|
+
/**
|
|
2364
|
+
* Search context size to use for the web search.
|
|
2365
|
+
* - high: Most comprehensive context, highest cost, slower response
|
|
2366
|
+
* - medium: Balanced context, cost, and latency (default)
|
|
2367
|
+
* - low: Least context, lowest cost, fastest response
|
|
2368
|
+
*/
|
|
2400
2369
|
searchContextSize: import_v416.z.enum(["low", "medium", "high"]).optional(),
|
|
2370
|
+
/**
|
|
2371
|
+
* User location information to provide geographically relevant search results.
|
|
2372
|
+
*/
|
|
2401
2373
|
userLocation: import_v416.z.object({
|
|
2374
|
+
/**
|
|
2375
|
+
* Type of location (always 'approximate')
|
|
2376
|
+
*/
|
|
2402
2377
|
type: import_v416.z.literal("approximate"),
|
|
2378
|
+
/**
|
|
2379
|
+
* Two-letter ISO country code (e.g., 'US', 'GB')
|
|
2380
|
+
*/
|
|
2403
2381
|
country: import_v416.z.string().optional(),
|
|
2382
|
+
/**
|
|
2383
|
+
* City name (free text, e.g., 'Minneapolis')
|
|
2384
|
+
*/
|
|
2404
2385
|
city: import_v416.z.string().optional(),
|
|
2386
|
+
/**
|
|
2387
|
+
* Region name (free text, e.g., 'Minnesota')
|
|
2388
|
+
*/
|
|
2405
2389
|
region: import_v416.z.string().optional(),
|
|
2390
|
+
/**
|
|
2391
|
+
* IANA timezone (e.g., 'America/Chicago')
|
|
2392
|
+
*/
|
|
2406
2393
|
timezone: import_v416.z.string().optional()
|
|
2407
2394
|
}).optional()
|
|
2408
2395
|
});
|
|
2409
|
-
var
|
|
2410
|
-
id: "openai.
|
|
2411
|
-
name: "
|
|
2396
|
+
var webSearchPreview = (0, import_provider_utils13.createProviderDefinedToolFactory)({
|
|
2397
|
+
id: "openai.web_search_preview",
|
|
2398
|
+
name: "web_search_preview",
|
|
2412
2399
|
inputSchema: import_v416.z.object({
|
|
2413
2400
|
action: import_v416.z.discriminatedUnion("type", [
|
|
2414
2401
|
import_v416.z.object({
|
|
@@ -2428,6 +2415,33 @@ var webSearchToolFactory = (0, import_provider_utils13.createProviderDefinedTool
|
|
|
2428
2415
|
})
|
|
2429
2416
|
});
|
|
2430
2417
|
|
|
2418
|
+
// src/tool/image-generation.ts
|
|
2419
|
+
var import_provider_utils14 = require("@ai-sdk/provider-utils");
|
|
2420
|
+
var import_v417 = require("zod/v4");
|
|
2421
|
+
var imageGenerationArgsSchema = import_v417.z.object({
|
|
2422
|
+
background: import_v417.z.enum(["auto", "opaque", "transparent"]).optional(),
|
|
2423
|
+
inputFidelity: import_v417.z.enum(["low", "high"]).optional(),
|
|
2424
|
+
inputImageMask: import_v417.z.object({
|
|
2425
|
+
fileId: import_v417.z.string().optional(),
|
|
2426
|
+
imageUrl: import_v417.z.string().optional()
|
|
2427
|
+
}).optional(),
|
|
2428
|
+
model: import_v417.z.string().optional(),
|
|
2429
|
+
moderation: import_v417.z.enum(["auto"]).optional(),
|
|
2430
|
+
outputCompression: import_v417.z.number().int().min(0).max(100).optional(),
|
|
2431
|
+
outputFormat: import_v417.z.enum(["png", "jpeg", "webp"]).optional(),
|
|
2432
|
+
quality: import_v417.z.enum(["auto", "low", "medium", "high"]).optional(),
|
|
2433
|
+
size: import_v417.z.enum(["1024x1024", "1024x1536", "1536x1024", "auto"]).optional()
|
|
2434
|
+
}).strict();
|
|
2435
|
+
var imageGenerationOutputSchema = import_v417.z.object({
|
|
2436
|
+
result: import_v417.z.string()
|
|
2437
|
+
});
|
|
2438
|
+
var imageGenerationToolFactory = (0, import_provider_utils14.createProviderDefinedToolFactoryWithOutputSchema)({
|
|
2439
|
+
id: "openai.image_generation",
|
|
2440
|
+
name: "image_generation",
|
|
2441
|
+
inputSchema: import_v417.z.object({}),
|
|
2442
|
+
outputSchema: imageGenerationOutputSchema
|
|
2443
|
+
});
|
|
2444
|
+
|
|
2431
2445
|
// src/responses/openai-responses-prepare-tools.ts
|
|
2432
2446
|
function prepareResponsesTools({
|
|
2433
2447
|
tools,
|
|
@@ -2459,7 +2473,10 @@ function prepareResponsesTools({
|
|
|
2459
2473
|
type: "file_search",
|
|
2460
2474
|
vector_store_ids: args.vectorStoreIds,
|
|
2461
2475
|
max_num_results: args.maxNumResults,
|
|
2462
|
-
ranking_options: args.ranking ? {
|
|
2476
|
+
ranking_options: args.ranking ? {
|
|
2477
|
+
ranker: args.ranking.ranker,
|
|
2478
|
+
score_threshold: args.ranking.scoreThreshold
|
|
2479
|
+
} : void 0,
|
|
2463
2480
|
filters: args.filters
|
|
2464
2481
|
});
|
|
2465
2482
|
break;
|
|
@@ -2491,8 +2508,23 @@ function prepareResponsesTools({
|
|
|
2491
2508
|
});
|
|
2492
2509
|
break;
|
|
2493
2510
|
}
|
|
2494
|
-
|
|
2495
|
-
|
|
2511
|
+
case "openai.image_generation": {
|
|
2512
|
+
const args = imageGenerationArgsSchema.parse(tool.args);
|
|
2513
|
+
openaiTools.push({
|
|
2514
|
+
type: "image_generation",
|
|
2515
|
+
background: args.background,
|
|
2516
|
+
input_fidelity: args.inputFidelity,
|
|
2517
|
+
input_image_mask: args.inputImageMask ? {
|
|
2518
|
+
file_id: args.inputImageMask.fileId,
|
|
2519
|
+
image_url: args.inputImageMask.imageUrl
|
|
2520
|
+
} : void 0,
|
|
2521
|
+
model: args.model,
|
|
2522
|
+
size: args.size,
|
|
2523
|
+
quality: args.quality,
|
|
2524
|
+
moderation: args.moderation,
|
|
2525
|
+
output_format: args.outputFormat,
|
|
2526
|
+
output_compression: args.outputCompression
|
|
2527
|
+
});
|
|
2496
2528
|
break;
|
|
2497
2529
|
}
|
|
2498
2530
|
}
|
|
@@ -2515,7 +2547,7 @@ function prepareResponsesTools({
|
|
|
2515
2547
|
case "tool":
|
|
2516
2548
|
return {
|
|
2517
2549
|
tools: openaiTools,
|
|
2518
|
-
toolChoice: toolChoice.toolName === "code_interpreter" || toolChoice.toolName === "file_search" || toolChoice.toolName === "web_search_preview" || toolChoice.toolName === "web_search" ? { type: toolChoice.toolName } : { type: "function", name: toolChoice.toolName },
|
|
2550
|
+
toolChoice: toolChoice.toolName === "code_interpreter" || toolChoice.toolName === "file_search" || toolChoice.toolName === "image_generation" || toolChoice.toolName === "web_search_preview" || toolChoice.toolName === "web_search" ? { type: toolChoice.toolName } : { type: "function", name: toolChoice.toolName },
|
|
2519
2551
|
toolWarnings
|
|
2520
2552
|
};
|
|
2521
2553
|
default: {
|
|
@@ -2528,47 +2560,66 @@ function prepareResponsesTools({
|
|
|
2528
2560
|
}
|
|
2529
2561
|
|
|
2530
2562
|
// src/responses/openai-responses-language-model.ts
|
|
2531
|
-
var webSearchCallItem =
|
|
2532
|
-
type:
|
|
2533
|
-
id:
|
|
2534
|
-
status:
|
|
2535
|
-
action:
|
|
2536
|
-
|
|
2537
|
-
type:
|
|
2538
|
-
query:
|
|
2563
|
+
var webSearchCallItem = import_v418.z.object({
|
|
2564
|
+
type: import_v418.z.literal("web_search_call"),
|
|
2565
|
+
id: import_v418.z.string(),
|
|
2566
|
+
status: import_v418.z.string(),
|
|
2567
|
+
action: import_v418.z.discriminatedUnion("type", [
|
|
2568
|
+
import_v418.z.object({
|
|
2569
|
+
type: import_v418.z.literal("search"),
|
|
2570
|
+
query: import_v418.z.string().nullish()
|
|
2539
2571
|
}),
|
|
2540
|
-
|
|
2541
|
-
type:
|
|
2542
|
-
url:
|
|
2572
|
+
import_v418.z.object({
|
|
2573
|
+
type: import_v418.z.literal("open_page"),
|
|
2574
|
+
url: import_v418.z.string()
|
|
2543
2575
|
}),
|
|
2544
|
-
|
|
2545
|
-
type:
|
|
2546
|
-
url:
|
|
2547
|
-
pattern:
|
|
2576
|
+
import_v418.z.object({
|
|
2577
|
+
type: import_v418.z.literal("find"),
|
|
2578
|
+
url: import_v418.z.string(),
|
|
2579
|
+
pattern: import_v418.z.string()
|
|
2548
2580
|
})
|
|
2549
2581
|
]).nullish()
|
|
2550
2582
|
});
|
|
2551
|
-
var
|
|
2552
|
-
type:
|
|
2553
|
-
id:
|
|
2554
|
-
|
|
2555
|
-
|
|
2556
|
-
|
|
2557
|
-
|
|
2558
|
-
|
|
2559
|
-
|
|
2583
|
+
var fileSearchCallItem = import_v418.z.object({
|
|
2584
|
+
type: import_v418.z.literal("file_search_call"),
|
|
2585
|
+
id: import_v418.z.string(),
|
|
2586
|
+
queries: import_v418.z.array(import_v418.z.string()),
|
|
2587
|
+
results: import_v418.z.array(
|
|
2588
|
+
import_v418.z.object({
|
|
2589
|
+
attributes: import_v418.z.record(import_v418.z.string(), import_v418.z.unknown()),
|
|
2590
|
+
file_id: import_v418.z.string(),
|
|
2591
|
+
filename: import_v418.z.string(),
|
|
2592
|
+
score: import_v418.z.number(),
|
|
2593
|
+
text: import_v418.z.string()
|
|
2594
|
+
})
|
|
2595
|
+
).nullish()
|
|
2596
|
+
});
|
|
2597
|
+
var codeInterpreterCallItem = import_v418.z.object({
|
|
2598
|
+
type: import_v418.z.literal("code_interpreter_call"),
|
|
2599
|
+
id: import_v418.z.string(),
|
|
2600
|
+
code: import_v418.z.string().nullable(),
|
|
2601
|
+
container_id: import_v418.z.string(),
|
|
2602
|
+
outputs: import_v418.z.array(
|
|
2603
|
+
import_v418.z.discriminatedUnion("type", [
|
|
2604
|
+
import_v418.z.object({ type: import_v418.z.literal("logs"), logs: import_v418.z.string() }),
|
|
2605
|
+
import_v418.z.object({ type: import_v418.z.literal("image"), url: import_v418.z.string() })
|
|
2560
2606
|
])
|
|
2561
2607
|
).nullable()
|
|
2562
2608
|
});
|
|
2609
|
+
var imageGenerationCallItem = import_v418.z.object({
|
|
2610
|
+
type: import_v418.z.literal("image_generation_call"),
|
|
2611
|
+
id: import_v418.z.string(),
|
|
2612
|
+
result: import_v418.z.string()
|
|
2613
|
+
});
|
|
2563
2614
|
var TOP_LOGPROBS_MAX = 20;
|
|
2564
|
-
var LOGPROBS_SCHEMA =
|
|
2565
|
-
|
|
2566
|
-
token:
|
|
2567
|
-
logprob:
|
|
2568
|
-
top_logprobs:
|
|
2569
|
-
|
|
2570
|
-
token:
|
|
2571
|
-
logprob:
|
|
2615
|
+
var LOGPROBS_SCHEMA = import_v418.z.array(
|
|
2616
|
+
import_v418.z.object({
|
|
2617
|
+
token: import_v418.z.string(),
|
|
2618
|
+
logprob: import_v418.z.number(),
|
|
2619
|
+
top_logprobs: import_v418.z.array(
|
|
2620
|
+
import_v418.z.object({
|
|
2621
|
+
token: import_v418.z.string(),
|
|
2622
|
+
logprob: import_v418.z.number()
|
|
2572
2623
|
})
|
|
2573
2624
|
)
|
|
2574
2625
|
})
|
|
@@ -2625,29 +2676,41 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2625
2676
|
if (stopSequences != null) {
|
|
2626
2677
|
warnings.push({ type: "unsupported-setting", setting: "stopSequences" });
|
|
2627
2678
|
}
|
|
2679
|
+
const openaiOptions = await (0, import_provider_utils15.parseProviderOptions)({
|
|
2680
|
+
provider: "openai",
|
|
2681
|
+
providerOptions,
|
|
2682
|
+
schema: openaiResponsesProviderOptionsSchema
|
|
2683
|
+
});
|
|
2628
2684
|
const { input, warnings: inputWarnings } = await convertToOpenAIResponsesInput({
|
|
2629
2685
|
prompt,
|
|
2630
2686
|
systemMessageMode: modelConfig.systemMessageMode,
|
|
2631
|
-
fileIdPrefixes: this.config.fileIdPrefixes
|
|
2687
|
+
fileIdPrefixes: this.config.fileIdPrefixes,
|
|
2688
|
+
store: (_a = openaiOptions == null ? void 0 : openaiOptions.store) != null ? _a : true
|
|
2632
2689
|
});
|
|
2633
2690
|
warnings.push(...inputWarnings);
|
|
2634
|
-
const
|
|
2635
|
-
provider: "openai",
|
|
2636
|
-
providerOptions,
|
|
2637
|
-
schema: openaiResponsesProviderOptionsSchema
|
|
2638
|
-
});
|
|
2639
|
-
const strictJsonSchema = (_a = openaiOptions == null ? void 0 : openaiOptions.strictJsonSchema) != null ? _a : false;
|
|
2691
|
+
const strictJsonSchema = (_b = openaiOptions == null ? void 0 : openaiOptions.strictJsonSchema) != null ? _b : false;
|
|
2640
2692
|
let include = openaiOptions == null ? void 0 : openaiOptions.include;
|
|
2693
|
+
function addInclude(key) {
|
|
2694
|
+
include = include != null ? [...include, key] : [key];
|
|
2695
|
+
}
|
|
2696
|
+
function hasOpenAITool(id) {
|
|
2697
|
+
return (tools == null ? void 0 : tools.find(
|
|
2698
|
+
(tool) => tool.type === "provider-defined" && tool.id === id
|
|
2699
|
+
)) != null;
|
|
2700
|
+
}
|
|
2641
2701
|
const topLogprobs = typeof (openaiOptions == null ? void 0 : openaiOptions.logprobs) === "number" ? openaiOptions == null ? void 0 : openaiOptions.logprobs : (openaiOptions == null ? void 0 : openaiOptions.logprobs) === true ? TOP_LOGPROBS_MAX : void 0;
|
|
2642
|
-
|
|
2643
|
-
|
|
2702
|
+
if (topLogprobs) {
|
|
2703
|
+
addInclude("message.output_text.logprobs");
|
|
2704
|
+
}
|
|
2705
|
+
const webSearchToolName = (_c = tools == null ? void 0 : tools.find(
|
|
2644
2706
|
(tool) => tool.type === "provider-defined" && (tool.id === "openai.web_search" || tool.id === "openai.web_search_preview")
|
|
2645
|
-
)) == null ? void 0 : _b.name;
|
|
2646
|
-
include = webSearchToolName ? Array.isArray(include) ? [...include, "web_search_call.action.sources"] : ["web_search_call.action.sources"] : include;
|
|
2647
|
-
const codeInterpreterToolName = (_c = tools == null ? void 0 : tools.find(
|
|
2648
|
-
(tool) => tool.type === "provider-defined" && tool.id === "openai.code_interpreter"
|
|
2649
2707
|
)) == null ? void 0 : _c.name;
|
|
2650
|
-
|
|
2708
|
+
if (webSearchToolName) {
|
|
2709
|
+
addInclude("web_search_call.action.sources");
|
|
2710
|
+
}
|
|
2711
|
+
if (hasOpenAITool("openai.code_interpreter")) {
|
|
2712
|
+
addInclude("code_interpreter_call.outputs");
|
|
2713
|
+
}
|
|
2651
2714
|
const baseArgs = {
|
|
2652
2715
|
model: this.modelId,
|
|
2653
2716
|
input,
|
|
@@ -2671,6 +2734,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2671
2734
|
}
|
|
2672
2735
|
},
|
|
2673
2736
|
// provider options:
|
|
2737
|
+
max_tool_calls: openaiOptions == null ? void 0 : openaiOptions.maxToolCalls,
|
|
2674
2738
|
metadata: openaiOptions == null ? void 0 : openaiOptions.metadata,
|
|
2675
2739
|
parallel_tool_calls: openaiOptions == null ? void 0 : openaiOptions.parallelToolCalls,
|
|
2676
2740
|
previous_response_id: openaiOptions == null ? void 0 : openaiOptions.previousResponseId,
|
|
@@ -2766,7 +2830,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2766
2830
|
};
|
|
2767
2831
|
}
|
|
2768
2832
|
async doGenerate(options) {
|
|
2769
|
-
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q;
|
|
2833
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s;
|
|
2770
2834
|
const {
|
|
2771
2835
|
args: body,
|
|
2772
2836
|
warnings,
|
|
@@ -2780,102 +2844,88 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2780
2844
|
responseHeaders,
|
|
2781
2845
|
value: response,
|
|
2782
2846
|
rawValue: rawResponse
|
|
2783
|
-
} = await (0,
|
|
2847
|
+
} = await (0, import_provider_utils15.postJsonToApi)({
|
|
2784
2848
|
url,
|
|
2785
|
-
headers: (0,
|
|
2849
|
+
headers: (0, import_provider_utils15.combineHeaders)(this.config.headers(), options.headers),
|
|
2786
2850
|
body,
|
|
2787
2851
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
2788
|
-
successfulResponseHandler: (0,
|
|
2789
|
-
|
|
2790
|
-
id:
|
|
2791
|
-
created_at:
|
|
2792
|
-
error:
|
|
2793
|
-
code:
|
|
2794
|
-
message:
|
|
2852
|
+
successfulResponseHandler: (0, import_provider_utils15.createJsonResponseHandler)(
|
|
2853
|
+
import_v418.z.object({
|
|
2854
|
+
id: import_v418.z.string(),
|
|
2855
|
+
created_at: import_v418.z.number(),
|
|
2856
|
+
error: import_v418.z.object({
|
|
2857
|
+
code: import_v418.z.string(),
|
|
2858
|
+
message: import_v418.z.string()
|
|
2795
2859
|
}).nullish(),
|
|
2796
|
-
model:
|
|
2797
|
-
output:
|
|
2798
|
-
|
|
2799
|
-
|
|
2800
|
-
type:
|
|
2801
|
-
role:
|
|
2802
|
-
id:
|
|
2803
|
-
content:
|
|
2804
|
-
|
|
2805
|
-
type:
|
|
2806
|
-
text:
|
|
2860
|
+
model: import_v418.z.string(),
|
|
2861
|
+
output: import_v418.z.array(
|
|
2862
|
+
import_v418.z.discriminatedUnion("type", [
|
|
2863
|
+
import_v418.z.object({
|
|
2864
|
+
type: import_v418.z.literal("message"),
|
|
2865
|
+
role: import_v418.z.literal("assistant"),
|
|
2866
|
+
id: import_v418.z.string(),
|
|
2867
|
+
content: import_v418.z.array(
|
|
2868
|
+
import_v418.z.object({
|
|
2869
|
+
type: import_v418.z.literal("output_text"),
|
|
2870
|
+
text: import_v418.z.string(),
|
|
2807
2871
|
logprobs: LOGPROBS_SCHEMA.nullish(),
|
|
2808
|
-
annotations:
|
|
2809
|
-
|
|
2810
|
-
|
|
2811
|
-
type:
|
|
2812
|
-
start_index:
|
|
2813
|
-
end_index:
|
|
2814
|
-
url:
|
|
2815
|
-
title:
|
|
2872
|
+
annotations: import_v418.z.array(
|
|
2873
|
+
import_v418.z.discriminatedUnion("type", [
|
|
2874
|
+
import_v418.z.object({
|
|
2875
|
+
type: import_v418.z.literal("url_citation"),
|
|
2876
|
+
start_index: import_v418.z.number(),
|
|
2877
|
+
end_index: import_v418.z.number(),
|
|
2878
|
+
url: import_v418.z.string(),
|
|
2879
|
+
title: import_v418.z.string()
|
|
2816
2880
|
}),
|
|
2817
|
-
|
|
2818
|
-
type:
|
|
2819
|
-
file_id:
|
|
2820
|
-
filename:
|
|
2821
|
-
index:
|
|
2822
|
-
start_index:
|
|
2823
|
-
end_index:
|
|
2824
|
-
quote:
|
|
2881
|
+
import_v418.z.object({
|
|
2882
|
+
type: import_v418.z.literal("file_citation"),
|
|
2883
|
+
file_id: import_v418.z.string(),
|
|
2884
|
+
filename: import_v418.z.string().nullish(),
|
|
2885
|
+
index: import_v418.z.number().nullish(),
|
|
2886
|
+
start_index: import_v418.z.number().nullish(),
|
|
2887
|
+
end_index: import_v418.z.number().nullish(),
|
|
2888
|
+
quote: import_v418.z.string().nullish()
|
|
2825
2889
|
}),
|
|
2826
|
-
|
|
2827
|
-
type:
|
|
2890
|
+
import_v418.z.object({
|
|
2891
|
+
type: import_v418.z.literal("container_file_citation")
|
|
2828
2892
|
})
|
|
2829
2893
|
])
|
|
2830
2894
|
)
|
|
2831
2895
|
})
|
|
2832
2896
|
)
|
|
2833
2897
|
}),
|
|
2834
|
-
codeInterpreterCallItem,
|
|
2835
|
-
import_v417.z.object({
|
|
2836
|
-
type: import_v417.z.literal("function_call"),
|
|
2837
|
-
call_id: import_v417.z.string(),
|
|
2838
|
-
name: import_v417.z.string(),
|
|
2839
|
-
arguments: import_v417.z.string(),
|
|
2840
|
-
id: import_v417.z.string()
|
|
2841
|
-
}),
|
|
2842
2898
|
webSearchCallItem,
|
|
2843
|
-
|
|
2844
|
-
|
|
2845
|
-
|
|
2846
|
-
|
|
2899
|
+
fileSearchCallItem,
|
|
2900
|
+
codeInterpreterCallItem,
|
|
2901
|
+
imageGenerationCallItem,
|
|
2902
|
+
import_v418.z.object({
|
|
2903
|
+
type: import_v418.z.literal("function_call"),
|
|
2904
|
+
call_id: import_v418.z.string(),
|
|
2905
|
+
name: import_v418.z.string(),
|
|
2906
|
+
arguments: import_v418.z.string(),
|
|
2907
|
+
id: import_v418.z.string()
|
|
2847
2908
|
}),
|
|
2848
|
-
|
|
2849
|
-
type:
|
|
2850
|
-
id:
|
|
2851
|
-
status:
|
|
2852
|
-
queries: import_v417.z.array(import_v417.z.string()).nullish(),
|
|
2853
|
-
results: import_v417.z.array(
|
|
2854
|
-
import_v417.z.object({
|
|
2855
|
-
attributes: import_v417.z.object({
|
|
2856
|
-
file_id: import_v417.z.string(),
|
|
2857
|
-
filename: import_v417.z.string(),
|
|
2858
|
-
score: import_v417.z.number(),
|
|
2859
|
-
text: import_v417.z.string()
|
|
2860
|
-
})
|
|
2861
|
-
})
|
|
2862
|
-
).nullish()
|
|
2909
|
+
import_v418.z.object({
|
|
2910
|
+
type: import_v418.z.literal("computer_call"),
|
|
2911
|
+
id: import_v418.z.string(),
|
|
2912
|
+
status: import_v418.z.string().optional()
|
|
2863
2913
|
}),
|
|
2864
|
-
|
|
2865
|
-
type:
|
|
2866
|
-
id:
|
|
2867
|
-
encrypted_content:
|
|
2868
|
-
summary:
|
|
2869
|
-
|
|
2870
|
-
type:
|
|
2871
|
-
text:
|
|
2914
|
+
import_v418.z.object({
|
|
2915
|
+
type: import_v418.z.literal("reasoning"),
|
|
2916
|
+
id: import_v418.z.string(),
|
|
2917
|
+
encrypted_content: import_v418.z.string().nullish(),
|
|
2918
|
+
summary: import_v418.z.array(
|
|
2919
|
+
import_v418.z.object({
|
|
2920
|
+
type: import_v418.z.literal("summary_text"),
|
|
2921
|
+
text: import_v418.z.string()
|
|
2872
2922
|
})
|
|
2873
2923
|
)
|
|
2874
2924
|
})
|
|
2875
2925
|
])
|
|
2876
2926
|
),
|
|
2877
|
-
service_tier:
|
|
2878
|
-
incomplete_details:
|
|
2927
|
+
service_tier: import_v418.z.string().nullish(),
|
|
2928
|
+
incomplete_details: import_v418.z.object({ reason: import_v418.z.string() }).nullable(),
|
|
2879
2929
|
usage: usageSchema2
|
|
2880
2930
|
})
|
|
2881
2931
|
),
|
|
@@ -2916,6 +2966,25 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2916
2966
|
}
|
|
2917
2967
|
break;
|
|
2918
2968
|
}
|
|
2969
|
+
case "image_generation_call": {
|
|
2970
|
+
content.push({
|
|
2971
|
+
type: "tool-call",
|
|
2972
|
+
toolCallId: part.id,
|
|
2973
|
+
toolName: "image_generation",
|
|
2974
|
+
input: "{}",
|
|
2975
|
+
providerExecuted: true
|
|
2976
|
+
});
|
|
2977
|
+
content.push({
|
|
2978
|
+
type: "tool-result",
|
|
2979
|
+
toolCallId: part.id,
|
|
2980
|
+
toolName: "image_generation",
|
|
2981
|
+
result: {
|
|
2982
|
+
result: part.result
|
|
2983
|
+
},
|
|
2984
|
+
providerExecuted: true
|
|
2985
|
+
});
|
|
2986
|
+
break;
|
|
2987
|
+
}
|
|
2919
2988
|
case "message": {
|
|
2920
2989
|
for (const contentPart of part.content) {
|
|
2921
2990
|
if (((_c = (_b = options.providerOptions) == null ? void 0 : _b.openai) == null ? void 0 : _c.logprobs) && contentPart.logprobs) {
|
|
@@ -2935,7 +3004,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2935
3004
|
content.push({
|
|
2936
3005
|
type: "source",
|
|
2937
3006
|
sourceType: "url",
|
|
2938
|
-
id: (_f = (_e = (_d = this.config).generateId) == null ? void 0 : _e.call(_d)) != null ? _f : (0,
|
|
3007
|
+
id: (_f = (_e = (_d = this.config).generateId) == null ? void 0 : _e.call(_d)) != null ? _f : (0, import_provider_utils15.generateId)(),
|
|
2939
3008
|
url: annotation.url,
|
|
2940
3009
|
title: annotation.title
|
|
2941
3010
|
});
|
|
@@ -2943,7 +3012,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2943
3012
|
content.push({
|
|
2944
3013
|
type: "source",
|
|
2945
3014
|
sourceType: "document",
|
|
2946
|
-
id: (_i = (_h = (_g = this.config).generateId) == null ? void 0 : _h.call(_g)) != null ? _i : (0,
|
|
3015
|
+
id: (_i = (_h = (_g = this.config).generateId) == null ? void 0 : _h.call(_g)) != null ? _i : (0, import_provider_utils15.generateId)(),
|
|
2947
3016
|
mediaType: "text/plain",
|
|
2948
3017
|
title: (_k = (_j = annotation.quote) != null ? _j : annotation.filename) != null ? _k : "Document",
|
|
2949
3018
|
filename: (_l = annotation.filename) != null ? _l : annotation.file_id
|
|
@@ -3010,7 +3079,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3010
3079
|
type: "tool-call",
|
|
3011
3080
|
toolCallId: part.id,
|
|
3012
3081
|
toolName: "file_search",
|
|
3013
|
-
input: "",
|
|
3082
|
+
input: "{}",
|
|
3014
3083
|
providerExecuted: true
|
|
3015
3084
|
});
|
|
3016
3085
|
content.push({
|
|
@@ -3018,10 +3087,14 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3018
3087
|
toolCallId: part.id,
|
|
3019
3088
|
toolName: "file_search",
|
|
3020
3089
|
result: {
|
|
3021
|
-
|
|
3022
|
-
|
|
3023
|
-
|
|
3024
|
-
|
|
3090
|
+
queries: part.queries,
|
|
3091
|
+
results: (_n = (_m = part.results) == null ? void 0 : _m.map((result) => ({
|
|
3092
|
+
attributes: result.attributes,
|
|
3093
|
+
fileId: result.file_id,
|
|
3094
|
+
filename: result.filename,
|
|
3095
|
+
score: result.score,
|
|
3096
|
+
text: result.text
|
|
3097
|
+
}))) != null ? _n : null
|
|
3025
3098
|
},
|
|
3026
3099
|
providerExecuted: true
|
|
3027
3100
|
});
|
|
@@ -3063,15 +3136,15 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3063
3136
|
return {
|
|
3064
3137
|
content,
|
|
3065
3138
|
finishReason: mapOpenAIResponseFinishReason({
|
|
3066
|
-
finishReason: (
|
|
3139
|
+
finishReason: (_o = response.incomplete_details) == null ? void 0 : _o.reason,
|
|
3067
3140
|
hasFunctionCall
|
|
3068
3141
|
}),
|
|
3069
3142
|
usage: {
|
|
3070
3143
|
inputTokens: response.usage.input_tokens,
|
|
3071
3144
|
outputTokens: response.usage.output_tokens,
|
|
3072
3145
|
totalTokens: response.usage.input_tokens + response.usage.output_tokens,
|
|
3073
|
-
reasoningTokens: (
|
|
3074
|
-
cachedInputTokens: (
|
|
3146
|
+
reasoningTokens: (_q = (_p = response.usage.output_tokens_details) == null ? void 0 : _p.reasoning_tokens) != null ? _q : void 0,
|
|
3147
|
+
cachedInputTokens: (_s = (_r = response.usage.input_tokens_details) == null ? void 0 : _r.cached_tokens) != null ? _s : void 0
|
|
3075
3148
|
},
|
|
3076
3149
|
request: { body },
|
|
3077
3150
|
response: {
|
|
@@ -3091,18 +3164,18 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3091
3164
|
warnings,
|
|
3092
3165
|
webSearchToolName
|
|
3093
3166
|
} = await this.getArgs(options);
|
|
3094
|
-
const { responseHeaders, value: response } = await (0,
|
|
3167
|
+
const { responseHeaders, value: response } = await (0, import_provider_utils15.postJsonToApi)({
|
|
3095
3168
|
url: this.config.url({
|
|
3096
3169
|
path: "/responses",
|
|
3097
3170
|
modelId: this.modelId
|
|
3098
3171
|
}),
|
|
3099
|
-
headers: (0,
|
|
3172
|
+
headers: (0, import_provider_utils15.combineHeaders)(this.config.headers(), options.headers),
|
|
3100
3173
|
body: {
|
|
3101
3174
|
...body,
|
|
3102
3175
|
stream: true
|
|
3103
3176
|
},
|
|
3104
3177
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
3105
|
-
successfulResponseHandler: (0,
|
|
3178
|
+
successfulResponseHandler: (0, import_provider_utils15.createEventSourceResponseHandler)(
|
|
3106
3179
|
openaiResponsesChunkSchema
|
|
3107
3180
|
),
|
|
3108
3181
|
abortSignal: options.abortSignal,
|
|
@@ -3128,7 +3201,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3128
3201
|
controller.enqueue({ type: "stream-start", warnings });
|
|
3129
3202
|
},
|
|
3130
3203
|
transform(chunk, controller) {
|
|
3131
|
-
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u;
|
|
3204
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v, _w;
|
|
3132
3205
|
if (options.includeRawChunks) {
|
|
3133
3206
|
controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
|
|
3134
3207
|
}
|
|
@@ -3170,14 +3243,20 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3170
3243
|
toolName: "computer_use"
|
|
3171
3244
|
});
|
|
3172
3245
|
} else if (value.item.type === "file_search_call") {
|
|
3173
|
-
|
|
3246
|
+
controller.enqueue({
|
|
3247
|
+
type: "tool-call",
|
|
3248
|
+
toolCallId: value.item.id,
|
|
3174
3249
|
toolName: "file_search",
|
|
3175
|
-
|
|
3176
|
-
|
|
3250
|
+
input: "{}",
|
|
3251
|
+
providerExecuted: true
|
|
3252
|
+
});
|
|
3253
|
+
} else if (value.item.type === "image_generation_call") {
|
|
3177
3254
|
controller.enqueue({
|
|
3178
|
-
type: "tool-
|
|
3179
|
-
|
|
3180
|
-
toolName: "
|
|
3255
|
+
type: "tool-call",
|
|
3256
|
+
toolCallId: value.item.id,
|
|
3257
|
+
toolName: "image_generation",
|
|
3258
|
+
input: "{}",
|
|
3259
|
+
providerExecuted: true
|
|
3181
3260
|
});
|
|
3182
3261
|
} else if (value.item.type === "message") {
|
|
3183
3262
|
controller.enqueue({
|
|
@@ -3269,26 +3348,19 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3269
3348
|
});
|
|
3270
3349
|
} else if (value.item.type === "file_search_call") {
|
|
3271
3350
|
ongoingToolCalls[value.output_index] = void 0;
|
|
3272
|
-
controller.enqueue({
|
|
3273
|
-
type: "tool-input-end",
|
|
3274
|
-
id: value.item.id
|
|
3275
|
-
});
|
|
3276
|
-
controller.enqueue({
|
|
3277
|
-
type: "tool-call",
|
|
3278
|
-
toolCallId: value.item.id,
|
|
3279
|
-
toolName: "file_search",
|
|
3280
|
-
input: "",
|
|
3281
|
-
providerExecuted: true
|
|
3282
|
-
});
|
|
3283
3351
|
controller.enqueue({
|
|
3284
3352
|
type: "tool-result",
|
|
3285
3353
|
toolCallId: value.item.id,
|
|
3286
3354
|
toolName: "file_search",
|
|
3287
3355
|
result: {
|
|
3288
|
-
|
|
3289
|
-
|
|
3290
|
-
|
|
3291
|
-
|
|
3356
|
+
queries: value.item.queries,
|
|
3357
|
+
results: (_c = (_b = value.item.results) == null ? void 0 : _b.map((result) => ({
|
|
3358
|
+
attributes: result.attributes,
|
|
3359
|
+
fileId: result.file_id,
|
|
3360
|
+
filename: result.filename,
|
|
3361
|
+
score: result.score,
|
|
3362
|
+
text: result.text
|
|
3363
|
+
}))) != null ? _c : null
|
|
3292
3364
|
},
|
|
3293
3365
|
providerExecuted: true
|
|
3294
3366
|
});
|
|
@@ -3312,6 +3384,16 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3312
3384
|
},
|
|
3313
3385
|
providerExecuted: true
|
|
3314
3386
|
});
|
|
3387
|
+
} else if (value.item.type === "image_generation_call") {
|
|
3388
|
+
controller.enqueue({
|
|
3389
|
+
type: "tool-result",
|
|
3390
|
+
toolCallId: value.item.id,
|
|
3391
|
+
toolName: "image_generation",
|
|
3392
|
+
result: {
|
|
3393
|
+
result: value.item.result
|
|
3394
|
+
},
|
|
3395
|
+
providerExecuted: true
|
|
3396
|
+
});
|
|
3315
3397
|
} else if (value.item.type === "message") {
|
|
3316
3398
|
controller.enqueue({
|
|
3317
3399
|
type: "text-end",
|
|
@@ -3326,7 +3408,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3326
3408
|
providerMetadata: {
|
|
3327
3409
|
openai: {
|
|
3328
3410
|
itemId: value.item.id,
|
|
3329
|
-
reasoningEncryptedContent: (
|
|
3411
|
+
reasoningEncryptedContent: (_d = value.item.encrypted_content) != null ? _d : null
|
|
3330
3412
|
}
|
|
3331
3413
|
}
|
|
3332
3414
|
});
|
|
@@ -3356,12 +3438,12 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3356
3438
|
id: value.item_id,
|
|
3357
3439
|
delta: value.delta
|
|
3358
3440
|
});
|
|
3359
|
-
if (((
|
|
3441
|
+
if (((_f = (_e = options.providerOptions) == null ? void 0 : _e.openai) == null ? void 0 : _f.logprobs) && value.logprobs) {
|
|
3360
3442
|
logprobs.push(value.logprobs);
|
|
3361
3443
|
}
|
|
3362
3444
|
} else if (isResponseReasoningSummaryPartAddedChunk(value)) {
|
|
3363
3445
|
if (value.summary_index > 0) {
|
|
3364
|
-
(
|
|
3446
|
+
(_g = activeReasoning[value.item_id]) == null ? void 0 : _g.summaryParts.push(
|
|
3365
3447
|
value.summary_index
|
|
3366
3448
|
);
|
|
3367
3449
|
controller.enqueue({
|
|
@@ -3370,7 +3452,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3370
3452
|
providerMetadata: {
|
|
3371
3453
|
openai: {
|
|
3372
3454
|
itemId: value.item_id,
|
|
3373
|
-
reasoningEncryptedContent: (
|
|
3455
|
+
reasoningEncryptedContent: (_i = (_h = activeReasoning[value.item_id]) == null ? void 0 : _h.encryptedContent) != null ? _i : null
|
|
3374
3456
|
}
|
|
3375
3457
|
}
|
|
3376
3458
|
});
|
|
@@ -3388,14 +3470,14 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3388
3470
|
});
|
|
3389
3471
|
} else if (isResponseFinishedChunk(value)) {
|
|
3390
3472
|
finishReason = mapOpenAIResponseFinishReason({
|
|
3391
|
-
finishReason: (
|
|
3473
|
+
finishReason: (_j = value.response.incomplete_details) == null ? void 0 : _j.reason,
|
|
3392
3474
|
hasFunctionCall
|
|
3393
3475
|
});
|
|
3394
3476
|
usage.inputTokens = value.response.usage.input_tokens;
|
|
3395
3477
|
usage.outputTokens = value.response.usage.output_tokens;
|
|
3396
3478
|
usage.totalTokens = value.response.usage.input_tokens + value.response.usage.output_tokens;
|
|
3397
|
-
usage.reasoningTokens = (
|
|
3398
|
-
usage.cachedInputTokens = (
|
|
3479
|
+
usage.reasoningTokens = (_l = (_k = value.response.usage.output_tokens_details) == null ? void 0 : _k.reasoning_tokens) != null ? _l : void 0;
|
|
3480
|
+
usage.cachedInputTokens = (_n = (_m = value.response.usage.input_tokens_details) == null ? void 0 : _m.cached_tokens) != null ? _n : void 0;
|
|
3399
3481
|
if (typeof value.response.service_tier === "string") {
|
|
3400
3482
|
serviceTier = value.response.service_tier;
|
|
3401
3483
|
}
|
|
@@ -3404,7 +3486,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3404
3486
|
controller.enqueue({
|
|
3405
3487
|
type: "source",
|
|
3406
3488
|
sourceType: "url",
|
|
3407
|
-
id: (
|
|
3489
|
+
id: (_q = (_p = (_o = self.config).generateId) == null ? void 0 : _p.call(_o)) != null ? _q : (0, import_provider_utils15.generateId)(),
|
|
3408
3490
|
url: value.annotation.url,
|
|
3409
3491
|
title: value.annotation.title
|
|
3410
3492
|
});
|
|
@@ -3412,10 +3494,10 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3412
3494
|
controller.enqueue({
|
|
3413
3495
|
type: "source",
|
|
3414
3496
|
sourceType: "document",
|
|
3415
|
-
id: (
|
|
3497
|
+
id: (_t = (_s = (_r = self.config).generateId) == null ? void 0 : _s.call(_r)) != null ? _t : (0, import_provider_utils15.generateId)(),
|
|
3416
3498
|
mediaType: "text/plain",
|
|
3417
|
-
title: (
|
|
3418
|
-
filename: (
|
|
3499
|
+
title: (_v = (_u = value.annotation.quote) != null ? _u : value.annotation.filename) != null ? _v : "Document",
|
|
3500
|
+
filename: (_w = value.annotation.filename) != null ? _w : value.annotation.file_id
|
|
3419
3501
|
});
|
|
3420
3502
|
}
|
|
3421
3503
|
} else if (isErrorChunk(value)) {
|
|
@@ -3448,177 +3530,155 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3448
3530
|
};
|
|
3449
3531
|
}
|
|
3450
3532
|
};
|
|
3451
|
-
var usageSchema2 =
|
|
3452
|
-
input_tokens:
|
|
3453
|
-
input_tokens_details:
|
|
3454
|
-
output_tokens:
|
|
3455
|
-
output_tokens_details:
|
|
3533
|
+
var usageSchema2 = import_v418.z.object({
|
|
3534
|
+
input_tokens: import_v418.z.number(),
|
|
3535
|
+
input_tokens_details: import_v418.z.object({ cached_tokens: import_v418.z.number().nullish() }).nullish(),
|
|
3536
|
+
output_tokens: import_v418.z.number(),
|
|
3537
|
+
output_tokens_details: import_v418.z.object({ reasoning_tokens: import_v418.z.number().nullish() }).nullish()
|
|
3456
3538
|
});
|
|
3457
|
-
var textDeltaChunkSchema =
|
|
3458
|
-
type:
|
|
3459
|
-
item_id:
|
|
3460
|
-
delta:
|
|
3539
|
+
var textDeltaChunkSchema = import_v418.z.object({
|
|
3540
|
+
type: import_v418.z.literal("response.output_text.delta"),
|
|
3541
|
+
item_id: import_v418.z.string(),
|
|
3542
|
+
delta: import_v418.z.string(),
|
|
3461
3543
|
logprobs: LOGPROBS_SCHEMA.nullish()
|
|
3462
3544
|
});
|
|
3463
|
-
var errorChunkSchema =
|
|
3464
|
-
type:
|
|
3465
|
-
code:
|
|
3466
|
-
message:
|
|
3467
|
-
param:
|
|
3468
|
-
sequence_number:
|
|
3545
|
+
var errorChunkSchema = import_v418.z.object({
|
|
3546
|
+
type: import_v418.z.literal("error"),
|
|
3547
|
+
code: import_v418.z.string(),
|
|
3548
|
+
message: import_v418.z.string(),
|
|
3549
|
+
param: import_v418.z.string().nullish(),
|
|
3550
|
+
sequence_number: import_v418.z.number()
|
|
3469
3551
|
});
|
|
3470
|
-
var responseFinishedChunkSchema =
|
|
3471
|
-
type:
|
|
3472
|
-
response:
|
|
3473
|
-
incomplete_details:
|
|
3552
|
+
var responseFinishedChunkSchema = import_v418.z.object({
|
|
3553
|
+
type: import_v418.z.enum(["response.completed", "response.incomplete"]),
|
|
3554
|
+
response: import_v418.z.object({
|
|
3555
|
+
incomplete_details: import_v418.z.object({ reason: import_v418.z.string() }).nullish(),
|
|
3474
3556
|
usage: usageSchema2,
|
|
3475
|
-
service_tier:
|
|
3557
|
+
service_tier: import_v418.z.string().nullish()
|
|
3476
3558
|
})
|
|
3477
3559
|
});
|
|
3478
|
-
var responseCreatedChunkSchema =
|
|
3479
|
-
type:
|
|
3480
|
-
response:
|
|
3481
|
-
id:
|
|
3482
|
-
created_at:
|
|
3483
|
-
model:
|
|
3484
|
-
service_tier:
|
|
3560
|
+
var responseCreatedChunkSchema = import_v418.z.object({
|
|
3561
|
+
type: import_v418.z.literal("response.created"),
|
|
3562
|
+
response: import_v418.z.object({
|
|
3563
|
+
id: import_v418.z.string(),
|
|
3564
|
+
created_at: import_v418.z.number(),
|
|
3565
|
+
model: import_v418.z.string(),
|
|
3566
|
+
service_tier: import_v418.z.string().nullish()
|
|
3485
3567
|
})
|
|
3486
3568
|
});
|
|
3487
|
-
var responseOutputItemAddedSchema =
|
|
3488
|
-
type:
|
|
3489
|
-
output_index:
|
|
3490
|
-
item:
|
|
3491
|
-
|
|
3492
|
-
type:
|
|
3493
|
-
id:
|
|
3569
|
+
var responseOutputItemAddedSchema = import_v418.z.object({
|
|
3570
|
+
type: import_v418.z.literal("response.output_item.added"),
|
|
3571
|
+
output_index: import_v418.z.number(),
|
|
3572
|
+
item: import_v418.z.discriminatedUnion("type", [
|
|
3573
|
+
import_v418.z.object({
|
|
3574
|
+
type: import_v418.z.literal("message"),
|
|
3575
|
+
id: import_v418.z.string()
|
|
3494
3576
|
}),
|
|
3495
|
-
|
|
3496
|
-
type:
|
|
3497
|
-
id:
|
|
3498
|
-
encrypted_content:
|
|
3577
|
+
import_v418.z.object({
|
|
3578
|
+
type: import_v418.z.literal("reasoning"),
|
|
3579
|
+
id: import_v418.z.string(),
|
|
3580
|
+
encrypted_content: import_v418.z.string().nullish()
|
|
3499
3581
|
}),
|
|
3500
|
-
|
|
3501
|
-
type:
|
|
3502
|
-
id:
|
|
3503
|
-
call_id:
|
|
3504
|
-
name:
|
|
3505
|
-
arguments:
|
|
3582
|
+
import_v418.z.object({
|
|
3583
|
+
type: import_v418.z.literal("function_call"),
|
|
3584
|
+
id: import_v418.z.string(),
|
|
3585
|
+
call_id: import_v418.z.string(),
|
|
3586
|
+
name: import_v418.z.string(),
|
|
3587
|
+
arguments: import_v418.z.string()
|
|
3506
3588
|
}),
|
|
3507
|
-
|
|
3508
|
-
type:
|
|
3509
|
-
id:
|
|
3510
|
-
status:
|
|
3511
|
-
action:
|
|
3512
|
-
type:
|
|
3513
|
-
query:
|
|
3589
|
+
import_v418.z.object({
|
|
3590
|
+
type: import_v418.z.literal("web_search_call"),
|
|
3591
|
+
id: import_v418.z.string(),
|
|
3592
|
+
status: import_v418.z.string(),
|
|
3593
|
+
action: import_v418.z.object({
|
|
3594
|
+
type: import_v418.z.literal("search"),
|
|
3595
|
+
query: import_v418.z.string().optional()
|
|
3514
3596
|
}).nullish()
|
|
3515
3597
|
}),
|
|
3516
|
-
|
|
3517
|
-
type:
|
|
3518
|
-
id:
|
|
3519
|
-
status:
|
|
3598
|
+
import_v418.z.object({
|
|
3599
|
+
type: import_v418.z.literal("computer_call"),
|
|
3600
|
+
id: import_v418.z.string(),
|
|
3601
|
+
status: import_v418.z.string()
|
|
3520
3602
|
}),
|
|
3521
|
-
|
|
3522
|
-
type:
|
|
3523
|
-
id:
|
|
3524
|
-
|
|
3525
|
-
|
|
3526
|
-
|
|
3527
|
-
|
|
3528
|
-
attributes: import_v417.z.object({
|
|
3529
|
-
file_id: import_v417.z.string(),
|
|
3530
|
-
filename: import_v417.z.string(),
|
|
3531
|
-
score: import_v417.z.number(),
|
|
3532
|
-
text: import_v417.z.string()
|
|
3533
|
-
})
|
|
3534
|
-
})
|
|
3535
|
-
).optional()
|
|
3603
|
+
import_v418.z.object({
|
|
3604
|
+
type: import_v418.z.literal("file_search_call"),
|
|
3605
|
+
id: import_v418.z.string()
|
|
3606
|
+
}),
|
|
3607
|
+
import_v418.z.object({
|
|
3608
|
+
type: import_v418.z.literal("image_generation_call"),
|
|
3609
|
+
id: import_v418.z.string()
|
|
3536
3610
|
})
|
|
3537
3611
|
])
|
|
3538
3612
|
});
|
|
3539
|
-
var responseOutputItemDoneSchema =
|
|
3540
|
-
type:
|
|
3541
|
-
output_index:
|
|
3542
|
-
item:
|
|
3543
|
-
|
|
3544
|
-
type:
|
|
3545
|
-
id:
|
|
3613
|
+
var responseOutputItemDoneSchema = import_v418.z.object({
|
|
3614
|
+
type: import_v418.z.literal("response.output_item.done"),
|
|
3615
|
+
output_index: import_v418.z.number(),
|
|
3616
|
+
item: import_v418.z.discriminatedUnion("type", [
|
|
3617
|
+
import_v418.z.object({
|
|
3618
|
+
type: import_v418.z.literal("message"),
|
|
3619
|
+
id: import_v418.z.string()
|
|
3546
3620
|
}),
|
|
3547
|
-
|
|
3548
|
-
type:
|
|
3549
|
-
id:
|
|
3550
|
-
encrypted_content:
|
|
3621
|
+
import_v418.z.object({
|
|
3622
|
+
type: import_v418.z.literal("reasoning"),
|
|
3623
|
+
id: import_v418.z.string(),
|
|
3624
|
+
encrypted_content: import_v418.z.string().nullish()
|
|
3551
3625
|
}),
|
|
3552
|
-
|
|
3553
|
-
type:
|
|
3554
|
-
id:
|
|
3555
|
-
call_id:
|
|
3556
|
-
name:
|
|
3557
|
-
arguments:
|
|
3558
|
-
status:
|
|
3626
|
+
import_v418.z.object({
|
|
3627
|
+
type: import_v418.z.literal("function_call"),
|
|
3628
|
+
id: import_v418.z.string(),
|
|
3629
|
+
call_id: import_v418.z.string(),
|
|
3630
|
+
name: import_v418.z.string(),
|
|
3631
|
+
arguments: import_v418.z.string(),
|
|
3632
|
+
status: import_v418.z.literal("completed")
|
|
3559
3633
|
}),
|
|
3560
3634
|
codeInterpreterCallItem,
|
|
3635
|
+
imageGenerationCallItem,
|
|
3561
3636
|
webSearchCallItem,
|
|
3562
|
-
|
|
3563
|
-
|
|
3564
|
-
|
|
3565
|
-
|
|
3566
|
-
|
|
3567
|
-
import_v417.z.object({
|
|
3568
|
-
type: import_v417.z.literal("file_search_call"),
|
|
3569
|
-
id: import_v417.z.string(),
|
|
3570
|
-
status: import_v417.z.literal("completed"),
|
|
3571
|
-
queries: import_v417.z.array(import_v417.z.string()).nullish(),
|
|
3572
|
-
results: import_v417.z.array(
|
|
3573
|
-
import_v417.z.object({
|
|
3574
|
-
attributes: import_v417.z.object({
|
|
3575
|
-
file_id: import_v417.z.string(),
|
|
3576
|
-
filename: import_v417.z.string(),
|
|
3577
|
-
score: import_v417.z.number(),
|
|
3578
|
-
text: import_v417.z.string()
|
|
3579
|
-
})
|
|
3580
|
-
})
|
|
3581
|
-
).nullish()
|
|
3637
|
+
fileSearchCallItem,
|
|
3638
|
+
import_v418.z.object({
|
|
3639
|
+
type: import_v418.z.literal("computer_call"),
|
|
3640
|
+
id: import_v418.z.string(),
|
|
3641
|
+
status: import_v418.z.literal("completed")
|
|
3582
3642
|
})
|
|
3583
3643
|
])
|
|
3584
3644
|
});
|
|
3585
|
-
var responseFunctionCallArgumentsDeltaSchema =
|
|
3586
|
-
type:
|
|
3587
|
-
item_id:
|
|
3588
|
-
output_index:
|
|
3589
|
-
delta:
|
|
3645
|
+
var responseFunctionCallArgumentsDeltaSchema = import_v418.z.object({
|
|
3646
|
+
type: import_v418.z.literal("response.function_call_arguments.delta"),
|
|
3647
|
+
item_id: import_v418.z.string(),
|
|
3648
|
+
output_index: import_v418.z.number(),
|
|
3649
|
+
delta: import_v418.z.string()
|
|
3590
3650
|
});
|
|
3591
|
-
var responseAnnotationAddedSchema =
|
|
3592
|
-
type:
|
|
3593
|
-
annotation:
|
|
3594
|
-
|
|
3595
|
-
type:
|
|
3596
|
-
url:
|
|
3597
|
-
title:
|
|
3651
|
+
var responseAnnotationAddedSchema = import_v418.z.object({
|
|
3652
|
+
type: import_v418.z.literal("response.output_text.annotation.added"),
|
|
3653
|
+
annotation: import_v418.z.discriminatedUnion("type", [
|
|
3654
|
+
import_v418.z.object({
|
|
3655
|
+
type: import_v418.z.literal("url_citation"),
|
|
3656
|
+
url: import_v418.z.string(),
|
|
3657
|
+
title: import_v418.z.string()
|
|
3598
3658
|
}),
|
|
3599
|
-
|
|
3600
|
-
type:
|
|
3601
|
-
file_id:
|
|
3602
|
-
filename:
|
|
3603
|
-
index:
|
|
3604
|
-
start_index:
|
|
3605
|
-
end_index:
|
|
3606
|
-
quote:
|
|
3659
|
+
import_v418.z.object({
|
|
3660
|
+
type: import_v418.z.literal("file_citation"),
|
|
3661
|
+
file_id: import_v418.z.string(),
|
|
3662
|
+
filename: import_v418.z.string().nullish(),
|
|
3663
|
+
index: import_v418.z.number().nullish(),
|
|
3664
|
+
start_index: import_v418.z.number().nullish(),
|
|
3665
|
+
end_index: import_v418.z.number().nullish(),
|
|
3666
|
+
quote: import_v418.z.string().nullish()
|
|
3607
3667
|
})
|
|
3608
3668
|
])
|
|
3609
3669
|
});
|
|
3610
|
-
var responseReasoningSummaryPartAddedSchema =
|
|
3611
|
-
type:
|
|
3612
|
-
item_id:
|
|
3613
|
-
summary_index:
|
|
3670
|
+
var responseReasoningSummaryPartAddedSchema = import_v418.z.object({
|
|
3671
|
+
type: import_v418.z.literal("response.reasoning_summary_part.added"),
|
|
3672
|
+
item_id: import_v418.z.string(),
|
|
3673
|
+
summary_index: import_v418.z.number()
|
|
3614
3674
|
});
|
|
3615
|
-
var responseReasoningSummaryTextDeltaSchema =
|
|
3616
|
-
type:
|
|
3617
|
-
item_id:
|
|
3618
|
-
summary_index:
|
|
3619
|
-
delta:
|
|
3675
|
+
var responseReasoningSummaryTextDeltaSchema = import_v418.z.object({
|
|
3676
|
+
type: import_v418.z.literal("response.reasoning_summary_text.delta"),
|
|
3677
|
+
item_id: import_v418.z.string(),
|
|
3678
|
+
summary_index: import_v418.z.number(),
|
|
3679
|
+
delta: import_v418.z.string()
|
|
3620
3680
|
});
|
|
3621
|
-
var openaiResponsesChunkSchema =
|
|
3681
|
+
var openaiResponsesChunkSchema = import_v418.z.union([
|
|
3622
3682
|
textDeltaChunkSchema,
|
|
3623
3683
|
responseFinishedChunkSchema,
|
|
3624
3684
|
responseCreatedChunkSchema,
|
|
@@ -3629,7 +3689,7 @@ var openaiResponsesChunkSchema = import_v417.z.union([
|
|
|
3629
3689
|
responseReasoningSummaryPartAddedSchema,
|
|
3630
3690
|
responseReasoningSummaryTextDeltaSchema,
|
|
3631
3691
|
errorChunkSchema,
|
|
3632
|
-
|
|
3692
|
+
import_v418.z.object({ type: import_v418.z.string() }).loose()
|
|
3633
3693
|
// fallback for unknown chunks
|
|
3634
3694
|
]);
|
|
3635
3695
|
function isTextDeltaChunk(chunk) {
|
|
@@ -3702,27 +3762,15 @@ function getResponsesModelConfig(modelId) {
|
|
|
3702
3762
|
isReasoningModel: false
|
|
3703
3763
|
};
|
|
3704
3764
|
}
|
|
3705
|
-
var openaiResponsesProviderOptionsSchema =
|
|
3706
|
-
|
|
3707
|
-
|
|
3708
|
-
previousResponseId: import_v417.z.string().nullish(),
|
|
3709
|
-
store: import_v417.z.boolean().nullish(),
|
|
3710
|
-
user: import_v417.z.string().nullish(),
|
|
3711
|
-
reasoningEffort: import_v417.z.string().nullish(),
|
|
3712
|
-
strictJsonSchema: import_v417.z.boolean().nullish(),
|
|
3713
|
-
instructions: import_v417.z.string().nullish(),
|
|
3714
|
-
reasoningSummary: import_v417.z.string().nullish(),
|
|
3715
|
-
serviceTier: import_v417.z.enum(["auto", "flex", "priority"]).nullish(),
|
|
3716
|
-
include: import_v417.z.array(
|
|
3717
|
-
import_v417.z.enum([
|
|
3765
|
+
var openaiResponsesProviderOptionsSchema = import_v418.z.object({
|
|
3766
|
+
include: import_v418.z.array(
|
|
3767
|
+
import_v418.z.enum([
|
|
3718
3768
|
"reasoning.encrypted_content",
|
|
3719
3769
|
"file_search_call.results",
|
|
3720
3770
|
"message.output_text.logprobs"
|
|
3721
3771
|
])
|
|
3722
3772
|
).nullish(),
|
|
3723
|
-
|
|
3724
|
-
promptCacheKey: import_v417.z.string().nullish(),
|
|
3725
|
-
safetyIdentifier: import_v417.z.string().nullish(),
|
|
3773
|
+
instructions: import_v418.z.string().nullish(),
|
|
3726
3774
|
/**
|
|
3727
3775
|
* Return the log probabilities of the tokens.
|
|
3728
3776
|
*
|
|
@@ -3735,7 +3783,25 @@ var openaiResponsesProviderOptionsSchema = import_v417.z.object({
|
|
|
3735
3783
|
* @see https://platform.openai.com/docs/api-reference/responses/create
|
|
3736
3784
|
* @see https://cookbook.openai.com/examples/using_logprobs
|
|
3737
3785
|
*/
|
|
3738
|
-
logprobs:
|
|
3786
|
+
logprobs: import_v418.z.union([import_v418.z.boolean(), import_v418.z.number().min(1).max(TOP_LOGPROBS_MAX)]).optional(),
|
|
3787
|
+
/**
|
|
3788
|
+
* The maximum number of total calls to built-in tools that can be processed in a response.
|
|
3789
|
+
* This maximum number applies across all built-in tool calls, not per individual tool.
|
|
3790
|
+
* Any further attempts to call a tool by the model will be ignored.
|
|
3791
|
+
*/
|
|
3792
|
+
maxToolCalls: import_v418.z.number().nullish(),
|
|
3793
|
+
metadata: import_v418.z.any().nullish(),
|
|
3794
|
+
parallelToolCalls: import_v418.z.boolean().nullish(),
|
|
3795
|
+
previousResponseId: import_v418.z.string().nullish(),
|
|
3796
|
+
promptCacheKey: import_v418.z.string().nullish(),
|
|
3797
|
+
reasoningEffort: import_v418.z.string().nullish(),
|
|
3798
|
+
reasoningSummary: import_v418.z.string().nullish(),
|
|
3799
|
+
safetyIdentifier: import_v418.z.string().nullish(),
|
|
3800
|
+
serviceTier: import_v418.z.enum(["auto", "flex", "priority"]).nullish(),
|
|
3801
|
+
store: import_v418.z.boolean().nullish(),
|
|
3802
|
+
strictJsonSchema: import_v418.z.boolean().nullish(),
|
|
3803
|
+
textVerbosity: import_v418.z.enum(["low", "medium", "high"]).nullish(),
|
|
3804
|
+
user: import_v418.z.string().nullish()
|
|
3739
3805
|
});
|
|
3740
3806
|
// Annotate the CommonJS export names for ESM import in node:
|
|
3741
3807
|
0 && (module.exports = {
|