@ai-sdk/openai 2.0.31 → 2.0.33
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +13 -0
- package/dist/index.d.mts +44 -12
- package/dist/index.d.ts +44 -12
- package/dist/index.js +389 -426
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +355 -392
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.js +421 -458
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +373 -410
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +1 -1
package/dist/index.js
CHANGED
|
@@ -30,8 +30,8 @@ var import_provider_utils16 = require("@ai-sdk/provider-utils");
|
|
|
30
30
|
|
|
31
31
|
// src/chat/openai-chat-language-model.ts
|
|
32
32
|
var import_provider3 = require("@ai-sdk/provider");
|
|
33
|
-
var
|
|
34
|
-
var
|
|
33
|
+
var import_provider_utils3 = require("@ai-sdk/provider-utils");
|
|
34
|
+
var import_v43 = require("zod/v4");
|
|
35
35
|
|
|
36
36
|
// src/openai-error.ts
|
|
37
37
|
var import_v4 = require("zod/v4");
|
|
@@ -352,98 +352,6 @@ var openaiProviderOptions = import_v42.z.object({
|
|
|
352
352
|
|
|
353
353
|
// src/chat/openai-chat-prepare-tools.ts
|
|
354
354
|
var import_provider2 = require("@ai-sdk/provider");
|
|
355
|
-
|
|
356
|
-
// src/tool/file-search.ts
|
|
357
|
-
var import_provider_utils3 = require("@ai-sdk/provider-utils");
|
|
358
|
-
var import_v43 = require("zod/v4");
|
|
359
|
-
var comparisonFilterSchema = import_v43.z.object({
|
|
360
|
-
key: import_v43.z.string(),
|
|
361
|
-
type: import_v43.z.enum(["eq", "ne", "gt", "gte", "lt", "lte"]),
|
|
362
|
-
value: import_v43.z.union([import_v43.z.string(), import_v43.z.number(), import_v43.z.boolean()])
|
|
363
|
-
});
|
|
364
|
-
var compoundFilterSchema = import_v43.z.object({
|
|
365
|
-
type: import_v43.z.enum(["and", "or"]),
|
|
366
|
-
filters: import_v43.z.array(
|
|
367
|
-
import_v43.z.union([comparisonFilterSchema, import_v43.z.lazy(() => compoundFilterSchema)])
|
|
368
|
-
)
|
|
369
|
-
});
|
|
370
|
-
var filtersSchema = import_v43.z.union([comparisonFilterSchema, compoundFilterSchema]);
|
|
371
|
-
var fileSearchArgsSchema = import_v43.z.object({
|
|
372
|
-
vectorStoreIds: import_v43.z.array(import_v43.z.string()).optional(),
|
|
373
|
-
maxNumResults: import_v43.z.number().optional(),
|
|
374
|
-
ranking: import_v43.z.object({
|
|
375
|
-
ranker: import_v43.z.enum(["auto", "default-2024-08-21"]).optional()
|
|
376
|
-
}).optional(),
|
|
377
|
-
filters: filtersSchema.optional()
|
|
378
|
-
});
|
|
379
|
-
var fileSearch = (0, import_provider_utils3.createProviderDefinedToolFactory)({
|
|
380
|
-
id: "openai.file_search",
|
|
381
|
-
name: "file_search",
|
|
382
|
-
inputSchema: import_v43.z.object({
|
|
383
|
-
query: import_v43.z.string()
|
|
384
|
-
})
|
|
385
|
-
});
|
|
386
|
-
|
|
387
|
-
// src/tool/web-search-preview.ts
|
|
388
|
-
var import_provider_utils4 = require("@ai-sdk/provider-utils");
|
|
389
|
-
var import_v44 = require("zod/v4");
|
|
390
|
-
var webSearchPreviewArgsSchema = import_v44.z.object({
|
|
391
|
-
/**
|
|
392
|
-
* Search context size to use for the web search.
|
|
393
|
-
* - high: Most comprehensive context, highest cost, slower response
|
|
394
|
-
* - medium: Balanced context, cost, and latency (default)
|
|
395
|
-
* - low: Least context, lowest cost, fastest response
|
|
396
|
-
*/
|
|
397
|
-
searchContextSize: import_v44.z.enum(["low", "medium", "high"]).optional(),
|
|
398
|
-
/**
|
|
399
|
-
* User location information to provide geographically relevant search results.
|
|
400
|
-
*/
|
|
401
|
-
userLocation: import_v44.z.object({
|
|
402
|
-
/**
|
|
403
|
-
* Type of location (always 'approximate')
|
|
404
|
-
*/
|
|
405
|
-
type: import_v44.z.literal("approximate"),
|
|
406
|
-
/**
|
|
407
|
-
* Two-letter ISO country code (e.g., 'US', 'GB')
|
|
408
|
-
*/
|
|
409
|
-
country: import_v44.z.string().optional(),
|
|
410
|
-
/**
|
|
411
|
-
* City name (free text, e.g., 'Minneapolis')
|
|
412
|
-
*/
|
|
413
|
-
city: import_v44.z.string().optional(),
|
|
414
|
-
/**
|
|
415
|
-
* Region name (free text, e.g., 'Minnesota')
|
|
416
|
-
*/
|
|
417
|
-
region: import_v44.z.string().optional(),
|
|
418
|
-
/**
|
|
419
|
-
* IANA timezone (e.g., 'America/Chicago')
|
|
420
|
-
*/
|
|
421
|
-
timezone: import_v44.z.string().optional()
|
|
422
|
-
}).optional()
|
|
423
|
-
});
|
|
424
|
-
var webSearchPreview = (0, import_provider_utils4.createProviderDefinedToolFactory)({
|
|
425
|
-
id: "openai.web_search_preview",
|
|
426
|
-
name: "web_search_preview",
|
|
427
|
-
inputSchema: import_v44.z.object({
|
|
428
|
-
action: import_v44.z.discriminatedUnion("type", [
|
|
429
|
-
import_v44.z.object({
|
|
430
|
-
type: import_v44.z.literal("search"),
|
|
431
|
-
query: import_v44.z.string().nullish()
|
|
432
|
-
}),
|
|
433
|
-
import_v44.z.object({
|
|
434
|
-
type: import_v44.z.literal("open_page"),
|
|
435
|
-
url: import_v44.z.string()
|
|
436
|
-
}),
|
|
437
|
-
import_v44.z.object({
|
|
438
|
-
type: import_v44.z.literal("find"),
|
|
439
|
-
url: import_v44.z.string(),
|
|
440
|
-
pattern: import_v44.z.string()
|
|
441
|
-
})
|
|
442
|
-
]).nullish()
|
|
443
|
-
})
|
|
444
|
-
});
|
|
445
|
-
|
|
446
|
-
// src/chat/openai-chat-prepare-tools.ts
|
|
447
355
|
function prepareChatTools({
|
|
448
356
|
tools,
|
|
449
357
|
toolChoice,
|
|
@@ -469,33 +377,6 @@ function prepareChatTools({
|
|
|
469
377
|
}
|
|
470
378
|
});
|
|
471
379
|
break;
|
|
472
|
-
case "provider-defined":
|
|
473
|
-
switch (tool.id) {
|
|
474
|
-
case "openai.file_search": {
|
|
475
|
-
const args = fileSearchArgsSchema.parse(tool.args);
|
|
476
|
-
openaiTools2.push({
|
|
477
|
-
type: "file_search",
|
|
478
|
-
vector_store_ids: args.vectorStoreIds,
|
|
479
|
-
max_num_results: args.maxNumResults,
|
|
480
|
-
ranking_options: args.ranking ? { ranker: args.ranking.ranker } : void 0,
|
|
481
|
-
filters: args.filters
|
|
482
|
-
});
|
|
483
|
-
break;
|
|
484
|
-
}
|
|
485
|
-
case "openai.web_search_preview": {
|
|
486
|
-
const args = webSearchPreviewArgsSchema.parse(tool.args);
|
|
487
|
-
openaiTools2.push({
|
|
488
|
-
type: "web_search_preview",
|
|
489
|
-
search_context_size: args.searchContextSize,
|
|
490
|
-
user_location: args.userLocation
|
|
491
|
-
});
|
|
492
|
-
break;
|
|
493
|
-
}
|
|
494
|
-
default:
|
|
495
|
-
toolWarnings.push({ type: "unsupported-tool", tool });
|
|
496
|
-
break;
|
|
497
|
-
}
|
|
498
|
-
break;
|
|
499
380
|
default:
|
|
500
381
|
toolWarnings.push({ type: "unsupported-tool", tool });
|
|
501
382
|
break;
|
|
@@ -560,7 +441,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
560
441
|
}) {
|
|
561
442
|
var _a, _b, _c, _d;
|
|
562
443
|
const warnings = [];
|
|
563
|
-
const openaiOptions = (_a = await (0,
|
|
444
|
+
const openaiOptions = (_a = await (0, import_provider_utils3.parseProviderOptions)({
|
|
564
445
|
provider: "openai",
|
|
565
446
|
providerOptions,
|
|
566
447
|
schema: openaiProviderOptions
|
|
@@ -739,15 +620,15 @@ var OpenAIChatLanguageModel = class {
|
|
|
739
620
|
responseHeaders,
|
|
740
621
|
value: response,
|
|
741
622
|
rawValue: rawResponse
|
|
742
|
-
} = await (0,
|
|
623
|
+
} = await (0, import_provider_utils3.postJsonToApi)({
|
|
743
624
|
url: this.config.url({
|
|
744
625
|
path: "/chat/completions",
|
|
745
626
|
modelId: this.modelId
|
|
746
627
|
}),
|
|
747
|
-
headers: (0,
|
|
628
|
+
headers: (0, import_provider_utils3.combineHeaders)(this.config.headers(), options.headers),
|
|
748
629
|
body,
|
|
749
630
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
750
|
-
successfulResponseHandler: (0,
|
|
631
|
+
successfulResponseHandler: (0, import_provider_utils3.createJsonResponseHandler)(
|
|
751
632
|
openaiChatResponseSchema
|
|
752
633
|
),
|
|
753
634
|
abortSignal: options.abortSignal,
|
|
@@ -762,7 +643,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
762
643
|
for (const toolCall of (_a = choice.message.tool_calls) != null ? _a : []) {
|
|
763
644
|
content.push({
|
|
764
645
|
type: "tool-call",
|
|
765
|
-
toolCallId: (_b = toolCall.id) != null ? _b : (0,
|
|
646
|
+
toolCallId: (_b = toolCall.id) != null ? _b : (0, import_provider_utils3.generateId)(),
|
|
766
647
|
toolName: toolCall.function.name,
|
|
767
648
|
input: toolCall.function.arguments
|
|
768
649
|
});
|
|
@@ -771,7 +652,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
771
652
|
content.push({
|
|
772
653
|
type: "source",
|
|
773
654
|
sourceType: "url",
|
|
774
|
-
id: (0,
|
|
655
|
+
id: (0, import_provider_utils3.generateId)(),
|
|
775
656
|
url: annotation.url,
|
|
776
657
|
title: annotation.title
|
|
777
658
|
});
|
|
@@ -817,15 +698,15 @@ var OpenAIChatLanguageModel = class {
|
|
|
817
698
|
include_usage: true
|
|
818
699
|
}
|
|
819
700
|
};
|
|
820
|
-
const { responseHeaders, value: response } = await (0,
|
|
701
|
+
const { responseHeaders, value: response } = await (0, import_provider_utils3.postJsonToApi)({
|
|
821
702
|
url: this.config.url({
|
|
822
703
|
path: "/chat/completions",
|
|
823
704
|
modelId: this.modelId
|
|
824
705
|
}),
|
|
825
|
-
headers: (0,
|
|
706
|
+
headers: (0, import_provider_utils3.combineHeaders)(this.config.headers(), options.headers),
|
|
826
707
|
body,
|
|
827
708
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
828
|
-
successfulResponseHandler: (0,
|
|
709
|
+
successfulResponseHandler: (0, import_provider_utils3.createEventSourceResponseHandler)(
|
|
829
710
|
openaiChatChunkSchema
|
|
830
711
|
),
|
|
831
712
|
abortSignal: options.abortSignal,
|
|
@@ -950,14 +831,14 @@ var OpenAIChatLanguageModel = class {
|
|
|
950
831
|
delta: toolCall2.function.arguments
|
|
951
832
|
});
|
|
952
833
|
}
|
|
953
|
-
if ((0,
|
|
834
|
+
if ((0, import_provider_utils3.isParsableJson)(toolCall2.function.arguments)) {
|
|
954
835
|
controller.enqueue({
|
|
955
836
|
type: "tool-input-end",
|
|
956
837
|
id: toolCall2.id
|
|
957
838
|
});
|
|
958
839
|
controller.enqueue({
|
|
959
840
|
type: "tool-call",
|
|
960
|
-
toolCallId: (_q = toolCall2.id) != null ? _q : (0,
|
|
841
|
+
toolCallId: (_q = toolCall2.id) != null ? _q : (0, import_provider_utils3.generateId)(),
|
|
961
842
|
toolName: toolCall2.function.name,
|
|
962
843
|
input: toolCall2.function.arguments
|
|
963
844
|
});
|
|
@@ -978,14 +859,14 @@ var OpenAIChatLanguageModel = class {
|
|
|
978
859
|
id: toolCall.id,
|
|
979
860
|
delta: (_u = toolCallDelta.function.arguments) != null ? _u : ""
|
|
980
861
|
});
|
|
981
|
-
if (((_v = toolCall.function) == null ? void 0 : _v.name) != null && ((_w = toolCall.function) == null ? void 0 : _w.arguments) != null && (0,
|
|
862
|
+
if (((_v = toolCall.function) == null ? void 0 : _v.name) != null && ((_w = toolCall.function) == null ? void 0 : _w.arguments) != null && (0, import_provider_utils3.isParsableJson)(toolCall.function.arguments)) {
|
|
982
863
|
controller.enqueue({
|
|
983
864
|
type: "tool-input-end",
|
|
984
865
|
id: toolCall.id
|
|
985
866
|
});
|
|
986
867
|
controller.enqueue({
|
|
987
868
|
type: "tool-call",
|
|
988
|
-
toolCallId: (_x = toolCall.id) != null ? _x : (0,
|
|
869
|
+
toolCallId: (_x = toolCall.id) != null ? _x : (0, import_provider_utils3.generateId)(),
|
|
989
870
|
toolName: toolCall.function.name,
|
|
990
871
|
input: toolCall.function.arguments
|
|
991
872
|
});
|
|
@@ -998,7 +879,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
998
879
|
controller.enqueue({
|
|
999
880
|
type: "source",
|
|
1000
881
|
sourceType: "url",
|
|
1001
|
-
id: (0,
|
|
882
|
+
id: (0, import_provider_utils3.generateId)(),
|
|
1002
883
|
url: annotation.url,
|
|
1003
884
|
title: annotation.title
|
|
1004
885
|
});
|
|
@@ -1023,115 +904,115 @@ var OpenAIChatLanguageModel = class {
|
|
|
1023
904
|
};
|
|
1024
905
|
}
|
|
1025
906
|
};
|
|
1026
|
-
var openaiTokenUsageSchema =
|
|
1027
|
-
prompt_tokens:
|
|
1028
|
-
completion_tokens:
|
|
1029
|
-
total_tokens:
|
|
1030
|
-
prompt_tokens_details:
|
|
1031
|
-
cached_tokens:
|
|
907
|
+
var openaiTokenUsageSchema = import_v43.z.object({
|
|
908
|
+
prompt_tokens: import_v43.z.number().nullish(),
|
|
909
|
+
completion_tokens: import_v43.z.number().nullish(),
|
|
910
|
+
total_tokens: import_v43.z.number().nullish(),
|
|
911
|
+
prompt_tokens_details: import_v43.z.object({
|
|
912
|
+
cached_tokens: import_v43.z.number().nullish()
|
|
1032
913
|
}).nullish(),
|
|
1033
|
-
completion_tokens_details:
|
|
1034
|
-
reasoning_tokens:
|
|
1035
|
-
accepted_prediction_tokens:
|
|
1036
|
-
rejected_prediction_tokens:
|
|
914
|
+
completion_tokens_details: import_v43.z.object({
|
|
915
|
+
reasoning_tokens: import_v43.z.number().nullish(),
|
|
916
|
+
accepted_prediction_tokens: import_v43.z.number().nullish(),
|
|
917
|
+
rejected_prediction_tokens: import_v43.z.number().nullish()
|
|
1037
918
|
}).nullish()
|
|
1038
919
|
}).nullish();
|
|
1039
|
-
var openaiChatResponseSchema =
|
|
1040
|
-
id:
|
|
1041
|
-
created:
|
|
1042
|
-
model:
|
|
1043
|
-
choices:
|
|
1044
|
-
|
|
1045
|
-
message:
|
|
1046
|
-
role:
|
|
1047
|
-
content:
|
|
1048
|
-
tool_calls:
|
|
1049
|
-
|
|
1050
|
-
id:
|
|
1051
|
-
type:
|
|
1052
|
-
function:
|
|
1053
|
-
name:
|
|
1054
|
-
arguments:
|
|
920
|
+
var openaiChatResponseSchema = import_v43.z.object({
|
|
921
|
+
id: import_v43.z.string().nullish(),
|
|
922
|
+
created: import_v43.z.number().nullish(),
|
|
923
|
+
model: import_v43.z.string().nullish(),
|
|
924
|
+
choices: import_v43.z.array(
|
|
925
|
+
import_v43.z.object({
|
|
926
|
+
message: import_v43.z.object({
|
|
927
|
+
role: import_v43.z.literal("assistant").nullish(),
|
|
928
|
+
content: import_v43.z.string().nullish(),
|
|
929
|
+
tool_calls: import_v43.z.array(
|
|
930
|
+
import_v43.z.object({
|
|
931
|
+
id: import_v43.z.string().nullish(),
|
|
932
|
+
type: import_v43.z.literal("function"),
|
|
933
|
+
function: import_v43.z.object({
|
|
934
|
+
name: import_v43.z.string(),
|
|
935
|
+
arguments: import_v43.z.string()
|
|
1055
936
|
})
|
|
1056
937
|
})
|
|
1057
938
|
).nullish(),
|
|
1058
|
-
annotations:
|
|
1059
|
-
|
|
1060
|
-
type:
|
|
1061
|
-
start_index:
|
|
1062
|
-
end_index:
|
|
1063
|
-
url:
|
|
1064
|
-
title:
|
|
939
|
+
annotations: import_v43.z.array(
|
|
940
|
+
import_v43.z.object({
|
|
941
|
+
type: import_v43.z.literal("url_citation"),
|
|
942
|
+
start_index: import_v43.z.number(),
|
|
943
|
+
end_index: import_v43.z.number(),
|
|
944
|
+
url: import_v43.z.string(),
|
|
945
|
+
title: import_v43.z.string()
|
|
1065
946
|
})
|
|
1066
947
|
).nullish()
|
|
1067
948
|
}),
|
|
1068
|
-
index:
|
|
1069
|
-
logprobs:
|
|
1070
|
-
content:
|
|
1071
|
-
|
|
1072
|
-
token:
|
|
1073
|
-
logprob:
|
|
1074
|
-
top_logprobs:
|
|
1075
|
-
|
|
1076
|
-
token:
|
|
1077
|
-
logprob:
|
|
949
|
+
index: import_v43.z.number(),
|
|
950
|
+
logprobs: import_v43.z.object({
|
|
951
|
+
content: import_v43.z.array(
|
|
952
|
+
import_v43.z.object({
|
|
953
|
+
token: import_v43.z.string(),
|
|
954
|
+
logprob: import_v43.z.number(),
|
|
955
|
+
top_logprobs: import_v43.z.array(
|
|
956
|
+
import_v43.z.object({
|
|
957
|
+
token: import_v43.z.string(),
|
|
958
|
+
logprob: import_v43.z.number()
|
|
1078
959
|
})
|
|
1079
960
|
)
|
|
1080
961
|
})
|
|
1081
962
|
).nullish()
|
|
1082
963
|
}).nullish(),
|
|
1083
|
-
finish_reason:
|
|
964
|
+
finish_reason: import_v43.z.string().nullish()
|
|
1084
965
|
})
|
|
1085
966
|
),
|
|
1086
967
|
usage: openaiTokenUsageSchema
|
|
1087
968
|
});
|
|
1088
|
-
var openaiChatChunkSchema =
|
|
1089
|
-
|
|
1090
|
-
id:
|
|
1091
|
-
created:
|
|
1092
|
-
model:
|
|
1093
|
-
choices:
|
|
1094
|
-
|
|
1095
|
-
delta:
|
|
1096
|
-
role:
|
|
1097
|
-
content:
|
|
1098
|
-
tool_calls:
|
|
1099
|
-
|
|
1100
|
-
index:
|
|
1101
|
-
id:
|
|
1102
|
-
type:
|
|
1103
|
-
function:
|
|
1104
|
-
name:
|
|
1105
|
-
arguments:
|
|
969
|
+
var openaiChatChunkSchema = import_v43.z.union([
|
|
970
|
+
import_v43.z.object({
|
|
971
|
+
id: import_v43.z.string().nullish(),
|
|
972
|
+
created: import_v43.z.number().nullish(),
|
|
973
|
+
model: import_v43.z.string().nullish(),
|
|
974
|
+
choices: import_v43.z.array(
|
|
975
|
+
import_v43.z.object({
|
|
976
|
+
delta: import_v43.z.object({
|
|
977
|
+
role: import_v43.z.enum(["assistant"]).nullish(),
|
|
978
|
+
content: import_v43.z.string().nullish(),
|
|
979
|
+
tool_calls: import_v43.z.array(
|
|
980
|
+
import_v43.z.object({
|
|
981
|
+
index: import_v43.z.number(),
|
|
982
|
+
id: import_v43.z.string().nullish(),
|
|
983
|
+
type: import_v43.z.literal("function").nullish(),
|
|
984
|
+
function: import_v43.z.object({
|
|
985
|
+
name: import_v43.z.string().nullish(),
|
|
986
|
+
arguments: import_v43.z.string().nullish()
|
|
1106
987
|
})
|
|
1107
988
|
})
|
|
1108
989
|
).nullish(),
|
|
1109
|
-
annotations:
|
|
1110
|
-
|
|
1111
|
-
type:
|
|
1112
|
-
start_index:
|
|
1113
|
-
end_index:
|
|
1114
|
-
url:
|
|
1115
|
-
title:
|
|
990
|
+
annotations: import_v43.z.array(
|
|
991
|
+
import_v43.z.object({
|
|
992
|
+
type: import_v43.z.literal("url_citation"),
|
|
993
|
+
start_index: import_v43.z.number(),
|
|
994
|
+
end_index: import_v43.z.number(),
|
|
995
|
+
url: import_v43.z.string(),
|
|
996
|
+
title: import_v43.z.string()
|
|
1116
997
|
})
|
|
1117
998
|
).nullish()
|
|
1118
999
|
}).nullish(),
|
|
1119
|
-
logprobs:
|
|
1120
|
-
content:
|
|
1121
|
-
|
|
1122
|
-
token:
|
|
1123
|
-
logprob:
|
|
1124
|
-
top_logprobs:
|
|
1125
|
-
|
|
1126
|
-
token:
|
|
1127
|
-
logprob:
|
|
1000
|
+
logprobs: import_v43.z.object({
|
|
1001
|
+
content: import_v43.z.array(
|
|
1002
|
+
import_v43.z.object({
|
|
1003
|
+
token: import_v43.z.string(),
|
|
1004
|
+
logprob: import_v43.z.number(),
|
|
1005
|
+
top_logprobs: import_v43.z.array(
|
|
1006
|
+
import_v43.z.object({
|
|
1007
|
+
token: import_v43.z.string(),
|
|
1008
|
+
logprob: import_v43.z.number()
|
|
1128
1009
|
})
|
|
1129
1010
|
)
|
|
1130
1011
|
})
|
|
1131
1012
|
).nullish()
|
|
1132
1013
|
}).nullish(),
|
|
1133
|
-
finish_reason:
|
|
1134
|
-
index:
|
|
1014
|
+
finish_reason: import_v43.z.string().nullish(),
|
|
1015
|
+
index: import_v43.z.number()
|
|
1135
1016
|
})
|
|
1136
1017
|
),
|
|
1137
1018
|
usage: openaiTokenUsageSchema
|
|
@@ -1188,8 +1069,8 @@ var reasoningModels = {
|
|
|
1188
1069
|
};
|
|
1189
1070
|
|
|
1190
1071
|
// src/completion/openai-completion-language-model.ts
|
|
1191
|
-
var
|
|
1192
|
-
var
|
|
1072
|
+
var import_provider_utils4 = require("@ai-sdk/provider-utils");
|
|
1073
|
+
var import_v45 = require("zod/v4");
|
|
1193
1074
|
|
|
1194
1075
|
// src/completion/convert-to-openai-completion-prompt.ts
|
|
1195
1076
|
var import_provider4 = require("@ai-sdk/provider");
|
|
@@ -1297,12 +1178,12 @@ function mapOpenAIFinishReason2(finishReason) {
|
|
|
1297
1178
|
}
|
|
1298
1179
|
|
|
1299
1180
|
// src/completion/openai-completion-options.ts
|
|
1300
|
-
var
|
|
1301
|
-
var openaiCompletionProviderOptions =
|
|
1181
|
+
var import_v44 = require("zod/v4");
|
|
1182
|
+
var openaiCompletionProviderOptions = import_v44.z.object({
|
|
1302
1183
|
/**
|
|
1303
1184
|
Echo back the prompt in addition to the completion.
|
|
1304
1185
|
*/
|
|
1305
|
-
echo:
|
|
1186
|
+
echo: import_v44.z.boolean().optional(),
|
|
1306
1187
|
/**
|
|
1307
1188
|
Modify the likelihood of specified tokens appearing in the completion.
|
|
1308
1189
|
|
|
@@ -1317,16 +1198,16 @@ var openaiCompletionProviderOptions = import_v46.z.object({
|
|
|
1317
1198
|
As an example, you can pass {"50256": -100} to prevent the <|endoftext|>
|
|
1318
1199
|
token from being generated.
|
|
1319
1200
|
*/
|
|
1320
|
-
logitBias:
|
|
1201
|
+
logitBias: import_v44.z.record(import_v44.z.string(), import_v44.z.number()).optional(),
|
|
1321
1202
|
/**
|
|
1322
1203
|
The suffix that comes after a completion of inserted text.
|
|
1323
1204
|
*/
|
|
1324
|
-
suffix:
|
|
1205
|
+
suffix: import_v44.z.string().optional(),
|
|
1325
1206
|
/**
|
|
1326
1207
|
A unique identifier representing your end-user, which can help OpenAI to
|
|
1327
1208
|
monitor and detect abuse. Learn more.
|
|
1328
1209
|
*/
|
|
1329
|
-
user:
|
|
1210
|
+
user: import_v44.z.string().optional(),
|
|
1330
1211
|
/**
|
|
1331
1212
|
Return the log probabilities of the tokens. Including logprobs will increase
|
|
1332
1213
|
the response size and can slow down response times. However, it can
|
|
@@ -1336,7 +1217,7 @@ var openaiCompletionProviderOptions = import_v46.z.object({
|
|
|
1336
1217
|
Setting to a number will return the log probabilities of the top n
|
|
1337
1218
|
tokens that were generated.
|
|
1338
1219
|
*/
|
|
1339
|
-
logprobs:
|
|
1220
|
+
logprobs: import_v44.z.union([import_v44.z.boolean(), import_v44.z.number()]).optional()
|
|
1340
1221
|
});
|
|
1341
1222
|
|
|
1342
1223
|
// src/completion/openai-completion-language-model.ts
|
|
@@ -1372,12 +1253,12 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1372
1253
|
}) {
|
|
1373
1254
|
const warnings = [];
|
|
1374
1255
|
const openaiOptions = {
|
|
1375
|
-
...await (0,
|
|
1256
|
+
...await (0, import_provider_utils4.parseProviderOptions)({
|
|
1376
1257
|
provider: "openai",
|
|
1377
1258
|
providerOptions,
|
|
1378
1259
|
schema: openaiCompletionProviderOptions
|
|
1379
1260
|
}),
|
|
1380
|
-
...await (0,
|
|
1261
|
+
...await (0, import_provider_utils4.parseProviderOptions)({
|
|
1381
1262
|
provider: this.providerOptionsName,
|
|
1382
1263
|
providerOptions,
|
|
1383
1264
|
schema: openaiCompletionProviderOptions
|
|
@@ -1433,15 +1314,15 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1433
1314
|
responseHeaders,
|
|
1434
1315
|
value: response,
|
|
1435
1316
|
rawValue: rawResponse
|
|
1436
|
-
} = await (0,
|
|
1317
|
+
} = await (0, import_provider_utils4.postJsonToApi)({
|
|
1437
1318
|
url: this.config.url({
|
|
1438
1319
|
path: "/completions",
|
|
1439
1320
|
modelId: this.modelId
|
|
1440
1321
|
}),
|
|
1441
|
-
headers: (0,
|
|
1322
|
+
headers: (0, import_provider_utils4.combineHeaders)(this.config.headers(), options.headers),
|
|
1442
1323
|
body: args,
|
|
1443
1324
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
1444
|
-
successfulResponseHandler: (0,
|
|
1325
|
+
successfulResponseHandler: (0, import_provider_utils4.createJsonResponseHandler)(
|
|
1445
1326
|
openaiCompletionResponseSchema
|
|
1446
1327
|
),
|
|
1447
1328
|
abortSignal: options.abortSignal,
|
|
@@ -1479,15 +1360,15 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1479
1360
|
include_usage: true
|
|
1480
1361
|
}
|
|
1481
1362
|
};
|
|
1482
|
-
const { responseHeaders, value: response } = await (0,
|
|
1363
|
+
const { responseHeaders, value: response } = await (0, import_provider_utils4.postJsonToApi)({
|
|
1483
1364
|
url: this.config.url({
|
|
1484
1365
|
path: "/completions",
|
|
1485
1366
|
modelId: this.modelId
|
|
1486
1367
|
}),
|
|
1487
|
-
headers: (0,
|
|
1368
|
+
headers: (0, import_provider_utils4.combineHeaders)(this.config.headers(), options.headers),
|
|
1488
1369
|
body,
|
|
1489
1370
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
1490
|
-
successfulResponseHandler: (0,
|
|
1371
|
+
successfulResponseHandler: (0, import_provider_utils4.createEventSourceResponseHandler)(
|
|
1491
1372
|
openaiCompletionChunkSchema
|
|
1492
1373
|
),
|
|
1493
1374
|
abortSignal: options.abortSignal,
|
|
@@ -1568,42 +1449,42 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1568
1449
|
};
|
|
1569
1450
|
}
|
|
1570
1451
|
};
|
|
1571
|
-
var usageSchema =
|
|
1572
|
-
prompt_tokens:
|
|
1573
|
-
completion_tokens:
|
|
1574
|
-
total_tokens:
|
|
1452
|
+
var usageSchema = import_v45.z.object({
|
|
1453
|
+
prompt_tokens: import_v45.z.number(),
|
|
1454
|
+
completion_tokens: import_v45.z.number(),
|
|
1455
|
+
total_tokens: import_v45.z.number()
|
|
1575
1456
|
});
|
|
1576
|
-
var openaiCompletionResponseSchema =
|
|
1577
|
-
id:
|
|
1578
|
-
created:
|
|
1579
|
-
model:
|
|
1580
|
-
choices:
|
|
1581
|
-
|
|
1582
|
-
text:
|
|
1583
|
-
finish_reason:
|
|
1584
|
-
logprobs:
|
|
1585
|
-
tokens:
|
|
1586
|
-
token_logprobs:
|
|
1587
|
-
top_logprobs:
|
|
1457
|
+
var openaiCompletionResponseSchema = import_v45.z.object({
|
|
1458
|
+
id: import_v45.z.string().nullish(),
|
|
1459
|
+
created: import_v45.z.number().nullish(),
|
|
1460
|
+
model: import_v45.z.string().nullish(),
|
|
1461
|
+
choices: import_v45.z.array(
|
|
1462
|
+
import_v45.z.object({
|
|
1463
|
+
text: import_v45.z.string(),
|
|
1464
|
+
finish_reason: import_v45.z.string(),
|
|
1465
|
+
logprobs: import_v45.z.object({
|
|
1466
|
+
tokens: import_v45.z.array(import_v45.z.string()),
|
|
1467
|
+
token_logprobs: import_v45.z.array(import_v45.z.number()),
|
|
1468
|
+
top_logprobs: import_v45.z.array(import_v45.z.record(import_v45.z.string(), import_v45.z.number())).nullish()
|
|
1588
1469
|
}).nullish()
|
|
1589
1470
|
})
|
|
1590
1471
|
),
|
|
1591
1472
|
usage: usageSchema.nullish()
|
|
1592
1473
|
});
|
|
1593
|
-
var openaiCompletionChunkSchema =
|
|
1594
|
-
|
|
1595
|
-
id:
|
|
1596
|
-
created:
|
|
1597
|
-
model:
|
|
1598
|
-
choices:
|
|
1599
|
-
|
|
1600
|
-
text:
|
|
1601
|
-
finish_reason:
|
|
1602
|
-
index:
|
|
1603
|
-
logprobs:
|
|
1604
|
-
tokens:
|
|
1605
|
-
token_logprobs:
|
|
1606
|
-
top_logprobs:
|
|
1474
|
+
var openaiCompletionChunkSchema = import_v45.z.union([
|
|
1475
|
+
import_v45.z.object({
|
|
1476
|
+
id: import_v45.z.string().nullish(),
|
|
1477
|
+
created: import_v45.z.number().nullish(),
|
|
1478
|
+
model: import_v45.z.string().nullish(),
|
|
1479
|
+
choices: import_v45.z.array(
|
|
1480
|
+
import_v45.z.object({
|
|
1481
|
+
text: import_v45.z.string(),
|
|
1482
|
+
finish_reason: import_v45.z.string().nullish(),
|
|
1483
|
+
index: import_v45.z.number(),
|
|
1484
|
+
logprobs: import_v45.z.object({
|
|
1485
|
+
tokens: import_v45.z.array(import_v45.z.string()),
|
|
1486
|
+
token_logprobs: import_v45.z.array(import_v45.z.number()),
|
|
1487
|
+
top_logprobs: import_v45.z.array(import_v45.z.record(import_v45.z.string(), import_v45.z.number())).nullish()
|
|
1607
1488
|
}).nullish()
|
|
1608
1489
|
})
|
|
1609
1490
|
),
|
|
@@ -1614,22 +1495,22 @@ var openaiCompletionChunkSchema = import_v47.z.union([
|
|
|
1614
1495
|
|
|
1615
1496
|
// src/embedding/openai-embedding-model.ts
|
|
1616
1497
|
var import_provider5 = require("@ai-sdk/provider");
|
|
1617
|
-
var
|
|
1618
|
-
var
|
|
1498
|
+
var import_provider_utils5 = require("@ai-sdk/provider-utils");
|
|
1499
|
+
var import_v47 = require("zod/v4");
|
|
1619
1500
|
|
|
1620
1501
|
// src/embedding/openai-embedding-options.ts
|
|
1621
|
-
var
|
|
1622
|
-
var openaiEmbeddingProviderOptions =
|
|
1502
|
+
var import_v46 = require("zod/v4");
|
|
1503
|
+
var openaiEmbeddingProviderOptions = import_v46.z.object({
|
|
1623
1504
|
/**
|
|
1624
1505
|
The number of dimensions the resulting output embeddings should have.
|
|
1625
1506
|
Only supported in text-embedding-3 and later models.
|
|
1626
1507
|
*/
|
|
1627
|
-
dimensions:
|
|
1508
|
+
dimensions: import_v46.z.number().optional(),
|
|
1628
1509
|
/**
|
|
1629
1510
|
A unique identifier representing your end-user, which can help OpenAI to
|
|
1630
1511
|
monitor and detect abuse. Learn more.
|
|
1631
1512
|
*/
|
|
1632
|
-
user:
|
|
1513
|
+
user: import_v46.z.string().optional()
|
|
1633
1514
|
});
|
|
1634
1515
|
|
|
1635
1516
|
// src/embedding/openai-embedding-model.ts
|
|
@@ -1659,7 +1540,7 @@ var OpenAIEmbeddingModel = class {
|
|
|
1659
1540
|
values
|
|
1660
1541
|
});
|
|
1661
1542
|
}
|
|
1662
|
-
const openaiOptions = (_a = await (0,
|
|
1543
|
+
const openaiOptions = (_a = await (0, import_provider_utils5.parseProviderOptions)({
|
|
1663
1544
|
provider: "openai",
|
|
1664
1545
|
providerOptions,
|
|
1665
1546
|
schema: openaiEmbeddingProviderOptions
|
|
@@ -1668,12 +1549,12 @@ var OpenAIEmbeddingModel = class {
|
|
|
1668
1549
|
responseHeaders,
|
|
1669
1550
|
value: response,
|
|
1670
1551
|
rawValue
|
|
1671
|
-
} = await (0,
|
|
1552
|
+
} = await (0, import_provider_utils5.postJsonToApi)({
|
|
1672
1553
|
url: this.config.url({
|
|
1673
1554
|
path: "/embeddings",
|
|
1674
1555
|
modelId: this.modelId
|
|
1675
1556
|
}),
|
|
1676
|
-
headers: (0,
|
|
1557
|
+
headers: (0, import_provider_utils5.combineHeaders)(this.config.headers(), headers),
|
|
1677
1558
|
body: {
|
|
1678
1559
|
model: this.modelId,
|
|
1679
1560
|
input: values,
|
|
@@ -1682,7 +1563,7 @@ var OpenAIEmbeddingModel = class {
|
|
|
1682
1563
|
user: openaiOptions.user
|
|
1683
1564
|
},
|
|
1684
1565
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
1685
|
-
successfulResponseHandler: (0,
|
|
1566
|
+
successfulResponseHandler: (0, import_provider_utils5.createJsonResponseHandler)(
|
|
1686
1567
|
openaiTextEmbeddingResponseSchema
|
|
1687
1568
|
),
|
|
1688
1569
|
abortSignal,
|
|
@@ -1695,14 +1576,14 @@ var OpenAIEmbeddingModel = class {
|
|
|
1695
1576
|
};
|
|
1696
1577
|
}
|
|
1697
1578
|
};
|
|
1698
|
-
var openaiTextEmbeddingResponseSchema =
|
|
1699
|
-
data:
|
|
1700
|
-
usage:
|
|
1579
|
+
var openaiTextEmbeddingResponseSchema = import_v47.z.object({
|
|
1580
|
+
data: import_v47.z.array(import_v47.z.object({ embedding: import_v47.z.array(import_v47.z.number()) })),
|
|
1581
|
+
usage: import_v47.z.object({ prompt_tokens: import_v47.z.number() }).nullish()
|
|
1701
1582
|
});
|
|
1702
1583
|
|
|
1703
1584
|
// src/image/openai-image-model.ts
|
|
1704
|
-
var
|
|
1705
|
-
var
|
|
1585
|
+
var import_provider_utils6 = require("@ai-sdk/provider-utils");
|
|
1586
|
+
var import_v48 = require("zod/v4");
|
|
1706
1587
|
|
|
1707
1588
|
// src/image/openai-image-options.ts
|
|
1708
1589
|
var modelMaxImagesPerCall = {
|
|
@@ -1749,12 +1630,12 @@ var OpenAIImageModel = class {
|
|
|
1749
1630
|
warnings.push({ type: "unsupported-setting", setting: "seed" });
|
|
1750
1631
|
}
|
|
1751
1632
|
const currentDate = (_c = (_b = (_a = this.config._internal) == null ? void 0 : _a.currentDate) == null ? void 0 : _b.call(_a)) != null ? _c : /* @__PURE__ */ new Date();
|
|
1752
|
-
const { value: response, responseHeaders } = await (0,
|
|
1633
|
+
const { value: response, responseHeaders } = await (0, import_provider_utils6.postJsonToApi)({
|
|
1753
1634
|
url: this.config.url({
|
|
1754
1635
|
path: "/images/generations",
|
|
1755
1636
|
modelId: this.modelId
|
|
1756
1637
|
}),
|
|
1757
|
-
headers: (0,
|
|
1638
|
+
headers: (0, import_provider_utils6.combineHeaders)(this.config.headers(), headers),
|
|
1758
1639
|
body: {
|
|
1759
1640
|
model: this.modelId,
|
|
1760
1641
|
prompt,
|
|
@@ -1764,7 +1645,7 @@ var OpenAIImageModel = class {
|
|
|
1764
1645
|
...!hasDefaultResponseFormat.has(this.modelId) ? { response_format: "b64_json" } : {}
|
|
1765
1646
|
},
|
|
1766
1647
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
1767
|
-
successfulResponseHandler: (0,
|
|
1648
|
+
successfulResponseHandler: (0, import_provider_utils6.createJsonResponseHandler)(
|
|
1768
1649
|
openaiImageResponseSchema
|
|
1769
1650
|
),
|
|
1770
1651
|
abortSignal,
|
|
@@ -1790,36 +1671,36 @@ var OpenAIImageModel = class {
|
|
|
1790
1671
|
};
|
|
1791
1672
|
}
|
|
1792
1673
|
};
|
|
1793
|
-
var openaiImageResponseSchema =
|
|
1794
|
-
data:
|
|
1795
|
-
|
|
1674
|
+
var openaiImageResponseSchema = import_v48.z.object({
|
|
1675
|
+
data: import_v48.z.array(
|
|
1676
|
+
import_v48.z.object({ b64_json: import_v48.z.string(), revised_prompt: import_v48.z.string().optional() })
|
|
1796
1677
|
)
|
|
1797
1678
|
});
|
|
1798
1679
|
|
|
1799
1680
|
// src/tool/code-interpreter.ts
|
|
1800
|
-
var
|
|
1801
|
-
var
|
|
1802
|
-
var codeInterpreterInputSchema =
|
|
1803
|
-
code:
|
|
1804
|
-
containerId:
|
|
1681
|
+
var import_provider_utils7 = require("@ai-sdk/provider-utils");
|
|
1682
|
+
var import_v49 = require("zod/v4");
|
|
1683
|
+
var codeInterpreterInputSchema = import_v49.z.object({
|
|
1684
|
+
code: import_v49.z.string().nullish(),
|
|
1685
|
+
containerId: import_v49.z.string()
|
|
1805
1686
|
});
|
|
1806
|
-
var codeInterpreterOutputSchema =
|
|
1807
|
-
outputs:
|
|
1808
|
-
|
|
1809
|
-
|
|
1810
|
-
|
|
1687
|
+
var codeInterpreterOutputSchema = import_v49.z.object({
|
|
1688
|
+
outputs: import_v49.z.array(
|
|
1689
|
+
import_v49.z.discriminatedUnion("type", [
|
|
1690
|
+
import_v49.z.object({ type: import_v49.z.literal("logs"), logs: import_v49.z.string() }),
|
|
1691
|
+
import_v49.z.object({ type: import_v49.z.literal("image"), url: import_v49.z.string() })
|
|
1811
1692
|
])
|
|
1812
1693
|
).nullish()
|
|
1813
1694
|
});
|
|
1814
|
-
var codeInterpreterArgsSchema =
|
|
1815
|
-
container:
|
|
1816
|
-
|
|
1817
|
-
|
|
1818
|
-
fileIds:
|
|
1695
|
+
var codeInterpreterArgsSchema = import_v49.z.object({
|
|
1696
|
+
container: import_v49.z.union([
|
|
1697
|
+
import_v49.z.string(),
|
|
1698
|
+
import_v49.z.object({
|
|
1699
|
+
fileIds: import_v49.z.array(import_v49.z.string()).optional()
|
|
1819
1700
|
})
|
|
1820
1701
|
]).optional()
|
|
1821
1702
|
});
|
|
1822
|
-
var codeInterpreterToolFactory = (0,
|
|
1703
|
+
var codeInterpreterToolFactory = (0, import_provider_utils7.createProviderDefinedToolFactoryWithOutputSchema)({
|
|
1823
1704
|
id: "openai.code_interpreter",
|
|
1824
1705
|
name: "code_interpreter",
|
|
1825
1706
|
inputSchema: codeInterpreterInputSchema,
|
|
@@ -1829,30 +1710,72 @@ var codeInterpreter = (args = {}) => {
|
|
|
1829
1710
|
return codeInterpreterToolFactory(args);
|
|
1830
1711
|
};
|
|
1831
1712
|
|
|
1713
|
+
// src/tool/file-search.ts
|
|
1714
|
+
var import_provider_utils8 = require("@ai-sdk/provider-utils");
|
|
1715
|
+
var import_v410 = require("zod/v4");
|
|
1716
|
+
var comparisonFilterSchema = import_v410.z.object({
|
|
1717
|
+
key: import_v410.z.string(),
|
|
1718
|
+
type: import_v410.z.enum(["eq", "ne", "gt", "gte", "lt", "lte"]),
|
|
1719
|
+
value: import_v410.z.union([import_v410.z.string(), import_v410.z.number(), import_v410.z.boolean()])
|
|
1720
|
+
});
|
|
1721
|
+
var compoundFilterSchema = import_v410.z.object({
|
|
1722
|
+
type: import_v410.z.enum(["and", "or"]),
|
|
1723
|
+
filters: import_v410.z.array(
|
|
1724
|
+
import_v410.z.union([comparisonFilterSchema, import_v410.z.lazy(() => compoundFilterSchema)])
|
|
1725
|
+
)
|
|
1726
|
+
});
|
|
1727
|
+
var fileSearchArgsSchema = import_v410.z.object({
|
|
1728
|
+
vectorStoreIds: import_v410.z.array(import_v410.z.string()),
|
|
1729
|
+
maxNumResults: import_v410.z.number().optional(),
|
|
1730
|
+
ranking: import_v410.z.object({
|
|
1731
|
+
ranker: import_v410.z.string().optional(),
|
|
1732
|
+
scoreThreshold: import_v410.z.number().optional()
|
|
1733
|
+
}).optional(),
|
|
1734
|
+
filters: import_v410.z.union([comparisonFilterSchema, compoundFilterSchema]).optional()
|
|
1735
|
+
});
|
|
1736
|
+
var fileSearchOutputSchema = import_v410.z.object({
|
|
1737
|
+
queries: import_v410.z.array(import_v410.z.string()),
|
|
1738
|
+
results: import_v410.z.array(
|
|
1739
|
+
import_v410.z.object({
|
|
1740
|
+
attributes: import_v410.z.record(import_v410.z.string(), import_v410.z.unknown()),
|
|
1741
|
+
fileId: import_v410.z.string(),
|
|
1742
|
+
filename: import_v410.z.string(),
|
|
1743
|
+
score: import_v410.z.number(),
|
|
1744
|
+
text: import_v410.z.string()
|
|
1745
|
+
})
|
|
1746
|
+
).nullable()
|
|
1747
|
+
});
|
|
1748
|
+
var fileSearch = (0, import_provider_utils8.createProviderDefinedToolFactoryWithOutputSchema)({
|
|
1749
|
+
id: "openai.file_search",
|
|
1750
|
+
name: "file_search",
|
|
1751
|
+
inputSchema: import_v410.z.object({}),
|
|
1752
|
+
outputSchema: fileSearchOutputSchema
|
|
1753
|
+
});
|
|
1754
|
+
|
|
1832
1755
|
// src/tool/image-generation.ts
|
|
1833
|
-
var
|
|
1834
|
-
var
|
|
1835
|
-
var imageGenerationArgsSchema =
|
|
1836
|
-
background:
|
|
1837
|
-
inputFidelity:
|
|
1838
|
-
inputImageMask:
|
|
1839
|
-
fileId:
|
|
1840
|
-
imageUrl:
|
|
1756
|
+
var import_provider_utils9 = require("@ai-sdk/provider-utils");
|
|
1757
|
+
var import_v411 = require("zod/v4");
|
|
1758
|
+
var imageGenerationArgsSchema = import_v411.z.object({
|
|
1759
|
+
background: import_v411.z.enum(["auto", "opaque", "transparent"]).optional(),
|
|
1760
|
+
inputFidelity: import_v411.z.enum(["low", "high"]).optional(),
|
|
1761
|
+
inputImageMask: import_v411.z.object({
|
|
1762
|
+
fileId: import_v411.z.string().optional(),
|
|
1763
|
+
imageUrl: import_v411.z.string().optional()
|
|
1841
1764
|
}).optional(),
|
|
1842
|
-
model:
|
|
1843
|
-
moderation:
|
|
1844
|
-
outputCompression:
|
|
1845
|
-
outputFormat:
|
|
1846
|
-
quality:
|
|
1847
|
-
size:
|
|
1765
|
+
model: import_v411.z.string().optional(),
|
|
1766
|
+
moderation: import_v411.z.enum(["auto"]).optional(),
|
|
1767
|
+
outputCompression: import_v411.z.number().int().min(0).max(100).optional(),
|
|
1768
|
+
outputFormat: import_v411.z.enum(["png", "jpeg", "webp"]).optional(),
|
|
1769
|
+
quality: import_v411.z.enum(["auto", "low", "medium", "high"]).optional(),
|
|
1770
|
+
size: import_v411.z.enum(["1024x1024", "1024x1536", "1536x1024", "auto"]).optional()
|
|
1848
1771
|
}).strict();
|
|
1849
|
-
var imageGenerationOutputSchema =
|
|
1850
|
-
result:
|
|
1772
|
+
var imageGenerationOutputSchema = import_v411.z.object({
|
|
1773
|
+
result: import_v411.z.string()
|
|
1851
1774
|
});
|
|
1852
|
-
var imageGenerationToolFactory = (0,
|
|
1775
|
+
var imageGenerationToolFactory = (0, import_provider_utils9.createProviderDefinedToolFactoryWithOutputSchema)({
|
|
1853
1776
|
id: "openai.image_generation",
|
|
1854
1777
|
name: "image_generation",
|
|
1855
|
-
inputSchema:
|
|
1778
|
+
inputSchema: import_v411.z.object({}),
|
|
1856
1779
|
outputSchema: imageGenerationOutputSchema
|
|
1857
1780
|
});
|
|
1858
1781
|
var imageGeneration = (args = {}) => {
|
|
@@ -1860,24 +1783,86 @@ var imageGeneration = (args = {}) => {
|
|
|
1860
1783
|
};
|
|
1861
1784
|
|
|
1862
1785
|
// src/tool/web-search.ts
|
|
1786
|
+
var import_provider_utils10 = require("@ai-sdk/provider-utils");
|
|
1787
|
+
var import_v412 = require("zod/v4");
|
|
1788
|
+
var webSearchArgsSchema = import_v412.z.object({
|
|
1789
|
+
filters: import_v412.z.object({
|
|
1790
|
+
allowedDomains: import_v412.z.array(import_v412.z.string()).optional()
|
|
1791
|
+
}).optional(),
|
|
1792
|
+
searchContextSize: import_v412.z.enum(["low", "medium", "high"]).optional(),
|
|
1793
|
+
userLocation: import_v412.z.object({
|
|
1794
|
+
type: import_v412.z.literal("approximate"),
|
|
1795
|
+
country: import_v412.z.string().optional(),
|
|
1796
|
+
city: import_v412.z.string().optional(),
|
|
1797
|
+
region: import_v412.z.string().optional(),
|
|
1798
|
+
timezone: import_v412.z.string().optional()
|
|
1799
|
+
}).optional()
|
|
1800
|
+
});
|
|
1801
|
+
var webSearchToolFactory = (0, import_provider_utils10.createProviderDefinedToolFactory)({
|
|
1802
|
+
id: "openai.web_search",
|
|
1803
|
+
name: "web_search",
|
|
1804
|
+
inputSchema: import_v412.z.object({
|
|
1805
|
+
action: import_v412.z.discriminatedUnion("type", [
|
|
1806
|
+
import_v412.z.object({
|
|
1807
|
+
type: import_v412.z.literal("search"),
|
|
1808
|
+
query: import_v412.z.string().nullish()
|
|
1809
|
+
}),
|
|
1810
|
+
import_v412.z.object({
|
|
1811
|
+
type: import_v412.z.literal("open_page"),
|
|
1812
|
+
url: import_v412.z.string()
|
|
1813
|
+
}),
|
|
1814
|
+
import_v412.z.object({
|
|
1815
|
+
type: import_v412.z.literal("find"),
|
|
1816
|
+
url: import_v412.z.string(),
|
|
1817
|
+
pattern: import_v412.z.string()
|
|
1818
|
+
})
|
|
1819
|
+
]).nullish()
|
|
1820
|
+
})
|
|
1821
|
+
});
|
|
1822
|
+
var webSearch = (args = {}) => {
|
|
1823
|
+
return webSearchToolFactory(args);
|
|
1824
|
+
};
|
|
1825
|
+
|
|
1826
|
+
// src/tool/web-search-preview.ts
|
|
1863
1827
|
var import_provider_utils11 = require("@ai-sdk/provider-utils");
|
|
1864
1828
|
var import_v413 = require("zod/v4");
|
|
1865
|
-
var
|
|
1866
|
-
|
|
1867
|
-
|
|
1868
|
-
|
|
1829
|
+
var webSearchPreviewArgsSchema = import_v413.z.object({
|
|
1830
|
+
/**
|
|
1831
|
+
* Search context size to use for the web search.
|
|
1832
|
+
* - high: Most comprehensive context, highest cost, slower response
|
|
1833
|
+
* - medium: Balanced context, cost, and latency (default)
|
|
1834
|
+
* - low: Least context, lowest cost, fastest response
|
|
1835
|
+
*/
|
|
1869
1836
|
searchContextSize: import_v413.z.enum(["low", "medium", "high"]).optional(),
|
|
1837
|
+
/**
|
|
1838
|
+
* User location information to provide geographically relevant search results.
|
|
1839
|
+
*/
|
|
1870
1840
|
userLocation: import_v413.z.object({
|
|
1841
|
+
/**
|
|
1842
|
+
* Type of location (always 'approximate')
|
|
1843
|
+
*/
|
|
1871
1844
|
type: import_v413.z.literal("approximate"),
|
|
1845
|
+
/**
|
|
1846
|
+
* Two-letter ISO country code (e.g., 'US', 'GB')
|
|
1847
|
+
*/
|
|
1872
1848
|
country: import_v413.z.string().optional(),
|
|
1849
|
+
/**
|
|
1850
|
+
* City name (free text, e.g., 'Minneapolis')
|
|
1851
|
+
*/
|
|
1873
1852
|
city: import_v413.z.string().optional(),
|
|
1853
|
+
/**
|
|
1854
|
+
* Region name (free text, e.g., 'Minnesota')
|
|
1855
|
+
*/
|
|
1874
1856
|
region: import_v413.z.string().optional(),
|
|
1857
|
+
/**
|
|
1858
|
+
* IANA timezone (e.g., 'America/Chicago')
|
|
1859
|
+
*/
|
|
1875
1860
|
timezone: import_v413.z.string().optional()
|
|
1876
1861
|
}).optional()
|
|
1877
1862
|
});
|
|
1878
|
-
var
|
|
1879
|
-
id: "openai.
|
|
1880
|
-
name: "
|
|
1863
|
+
var webSearchPreview = (0, import_provider_utils11.createProviderDefinedToolFactory)({
|
|
1864
|
+
id: "openai.web_search_preview",
|
|
1865
|
+
name: "web_search_preview",
|
|
1881
1866
|
inputSchema: import_v413.z.object({
|
|
1882
1867
|
action: import_v413.z.discriminatedUnion("type", [
|
|
1883
1868
|
import_v413.z.object({
|
|
@@ -1896,9 +1881,6 @@ var webSearchToolFactory = (0, import_provider_utils11.createProviderDefinedTool
|
|
|
1896
1881
|
]).nullish()
|
|
1897
1882
|
})
|
|
1898
1883
|
});
|
|
1899
|
-
var webSearch = (args = {}) => {
|
|
1900
|
-
return webSearchToolFactory(args);
|
|
1901
|
-
};
|
|
1902
1884
|
|
|
1903
1885
|
// src/openai-tools.ts
|
|
1904
1886
|
var openaiTools = {
|
|
@@ -2223,7 +2205,10 @@ function prepareResponsesTools({
|
|
|
2223
2205
|
type: "file_search",
|
|
2224
2206
|
vector_store_ids: args.vectorStoreIds,
|
|
2225
2207
|
max_num_results: args.maxNumResults,
|
|
2226
|
-
ranking_options: args.ranking ? {
|
|
2208
|
+
ranking_options: args.ranking ? {
|
|
2209
|
+
ranker: args.ranking.ranker,
|
|
2210
|
+
score_threshold: args.ranking.scoreThreshold
|
|
2211
|
+
} : void 0,
|
|
2227
2212
|
filters: args.filters
|
|
2228
2213
|
});
|
|
2229
2214
|
break;
|
|
@@ -2327,6 +2312,20 @@ var webSearchCallItem = import_v415.z.object({
|
|
|
2327
2312
|
})
|
|
2328
2313
|
]).nullish()
|
|
2329
2314
|
});
|
|
2315
|
+
var fileSearchCallItem = import_v415.z.object({
|
|
2316
|
+
type: import_v415.z.literal("file_search_call"),
|
|
2317
|
+
id: import_v415.z.string(),
|
|
2318
|
+
queries: import_v415.z.array(import_v415.z.string()),
|
|
2319
|
+
results: import_v415.z.array(
|
|
2320
|
+
import_v415.z.object({
|
|
2321
|
+
attributes: import_v415.z.record(import_v415.z.string(), import_v415.z.unknown()),
|
|
2322
|
+
file_id: import_v415.z.string(),
|
|
2323
|
+
filename: import_v415.z.string(),
|
|
2324
|
+
score: import_v415.z.number(),
|
|
2325
|
+
text: import_v415.z.string()
|
|
2326
|
+
})
|
|
2327
|
+
).nullish()
|
|
2328
|
+
});
|
|
2330
2329
|
var codeInterpreterCallItem = import_v415.z.object({
|
|
2331
2330
|
type: import_v415.z.literal("code_interpreter_call"),
|
|
2332
2331
|
id: import_v415.z.string(),
|
|
@@ -2385,7 +2384,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2385
2384
|
toolChoice,
|
|
2386
2385
|
responseFormat
|
|
2387
2386
|
}) {
|
|
2388
|
-
var _a, _b, _c, _d
|
|
2387
|
+
var _a, _b, _c, _d;
|
|
2389
2388
|
const warnings = [];
|
|
2390
2389
|
const modelConfig = getResponsesModelConfig(this.modelId);
|
|
2391
2390
|
if (topK != null) {
|
|
@@ -2423,16 +2422,27 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2423
2422
|
warnings.push(...inputWarnings);
|
|
2424
2423
|
const strictJsonSchema = (_b = openaiOptions == null ? void 0 : openaiOptions.strictJsonSchema) != null ? _b : false;
|
|
2425
2424
|
let include = openaiOptions == null ? void 0 : openaiOptions.include;
|
|
2425
|
+
function addInclude(key) {
|
|
2426
|
+
include = include != null ? [...include, key] : [key];
|
|
2427
|
+
}
|
|
2428
|
+
function hasOpenAITool(id) {
|
|
2429
|
+
return (tools == null ? void 0 : tools.find(
|
|
2430
|
+
(tool) => tool.type === "provider-defined" && tool.id === id
|
|
2431
|
+
)) != null;
|
|
2432
|
+
}
|
|
2426
2433
|
const topLogprobs = typeof (openaiOptions == null ? void 0 : openaiOptions.logprobs) === "number" ? openaiOptions == null ? void 0 : openaiOptions.logprobs : (openaiOptions == null ? void 0 : openaiOptions.logprobs) === true ? TOP_LOGPROBS_MAX : void 0;
|
|
2427
|
-
|
|
2434
|
+
if (topLogprobs) {
|
|
2435
|
+
addInclude("message.output_text.logprobs");
|
|
2436
|
+
}
|
|
2428
2437
|
const webSearchToolName = (_c = tools == null ? void 0 : tools.find(
|
|
2429
2438
|
(tool) => tool.type === "provider-defined" && (tool.id === "openai.web_search" || tool.id === "openai.web_search_preview")
|
|
2430
2439
|
)) == null ? void 0 : _c.name;
|
|
2431
|
-
|
|
2432
|
-
|
|
2433
|
-
|
|
2434
|
-
))
|
|
2435
|
-
|
|
2440
|
+
if (webSearchToolName) {
|
|
2441
|
+
addInclude("web_search_call.action.sources");
|
|
2442
|
+
}
|
|
2443
|
+
if (hasOpenAITool("openai.code_interpreter")) {
|
|
2444
|
+
addInclude("code_interpreter_call.outputs");
|
|
2445
|
+
}
|
|
2436
2446
|
const baseArgs = {
|
|
2437
2447
|
model: this.modelId,
|
|
2438
2448
|
input,
|
|
@@ -2445,7 +2455,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2445
2455
|
format: responseFormat.schema != null ? {
|
|
2446
2456
|
type: "json_schema",
|
|
2447
2457
|
strict: strictJsonSchema,
|
|
2448
|
-
name: (
|
|
2458
|
+
name: (_d = responseFormat.name) != null ? _d : "response",
|
|
2449
2459
|
description: responseFormat.description,
|
|
2450
2460
|
schema: responseFormat.schema
|
|
2451
2461
|
} : { type: "json_object" }
|
|
@@ -2552,7 +2562,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2552
2562
|
};
|
|
2553
2563
|
}
|
|
2554
2564
|
async doGenerate(options) {
|
|
2555
|
-
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q;
|
|
2565
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s;
|
|
2556
2566
|
const {
|
|
2557
2567
|
args: body,
|
|
2558
2568
|
warnings,
|
|
@@ -2617,6 +2627,8 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2617
2627
|
})
|
|
2618
2628
|
)
|
|
2619
2629
|
}),
|
|
2630
|
+
webSearchCallItem,
|
|
2631
|
+
fileSearchCallItem,
|
|
2620
2632
|
codeInterpreterCallItem,
|
|
2621
2633
|
imageGenerationCallItem,
|
|
2622
2634
|
import_v415.z.object({
|
|
@@ -2626,28 +2638,11 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2626
2638
|
arguments: import_v415.z.string(),
|
|
2627
2639
|
id: import_v415.z.string()
|
|
2628
2640
|
}),
|
|
2629
|
-
webSearchCallItem,
|
|
2630
2641
|
import_v415.z.object({
|
|
2631
2642
|
type: import_v415.z.literal("computer_call"),
|
|
2632
2643
|
id: import_v415.z.string(),
|
|
2633
2644
|
status: import_v415.z.string().optional()
|
|
2634
2645
|
}),
|
|
2635
|
-
import_v415.z.object({
|
|
2636
|
-
type: import_v415.z.literal("file_search_call"),
|
|
2637
|
-
id: import_v415.z.string(),
|
|
2638
|
-
status: import_v415.z.string().optional(),
|
|
2639
|
-
queries: import_v415.z.array(import_v415.z.string()).nullish(),
|
|
2640
|
-
results: import_v415.z.array(
|
|
2641
|
-
import_v415.z.object({
|
|
2642
|
-
attributes: import_v415.z.object({
|
|
2643
|
-
file_id: import_v415.z.string(),
|
|
2644
|
-
filename: import_v415.z.string(),
|
|
2645
|
-
score: import_v415.z.number(),
|
|
2646
|
-
text: import_v415.z.string()
|
|
2647
|
-
})
|
|
2648
|
-
})
|
|
2649
|
-
).nullish()
|
|
2650
|
-
}),
|
|
2651
2646
|
import_v415.z.object({
|
|
2652
2647
|
type: import_v415.z.literal("reasoning"),
|
|
2653
2648
|
id: import_v415.z.string(),
|
|
@@ -2662,7 +2657,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2662
2657
|
])
|
|
2663
2658
|
),
|
|
2664
2659
|
service_tier: import_v415.z.string().nullish(),
|
|
2665
|
-
incomplete_details: import_v415.z.object({ reason: import_v415.z.string() }).
|
|
2660
|
+
incomplete_details: import_v415.z.object({ reason: import_v415.z.string() }).nullish(),
|
|
2666
2661
|
usage: usageSchema2
|
|
2667
2662
|
})
|
|
2668
2663
|
),
|
|
@@ -2816,7 +2811,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2816
2811
|
type: "tool-call",
|
|
2817
2812
|
toolCallId: part.id,
|
|
2818
2813
|
toolName: "file_search",
|
|
2819
|
-
input: "",
|
|
2814
|
+
input: "{}",
|
|
2820
2815
|
providerExecuted: true
|
|
2821
2816
|
});
|
|
2822
2817
|
content.push({
|
|
@@ -2824,10 +2819,14 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2824
2819
|
toolCallId: part.id,
|
|
2825
2820
|
toolName: "file_search",
|
|
2826
2821
|
result: {
|
|
2827
|
-
|
|
2828
|
-
|
|
2829
|
-
|
|
2830
|
-
|
|
2822
|
+
queries: part.queries,
|
|
2823
|
+
results: (_n = (_m = part.results) == null ? void 0 : _m.map((result) => ({
|
|
2824
|
+
attributes: result.attributes,
|
|
2825
|
+
fileId: result.file_id,
|
|
2826
|
+
filename: result.filename,
|
|
2827
|
+
score: result.score,
|
|
2828
|
+
text: result.text
|
|
2829
|
+
}))) != null ? _n : null
|
|
2831
2830
|
},
|
|
2832
2831
|
providerExecuted: true
|
|
2833
2832
|
});
|
|
@@ -2869,15 +2868,15 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2869
2868
|
return {
|
|
2870
2869
|
content,
|
|
2871
2870
|
finishReason: mapOpenAIResponseFinishReason({
|
|
2872
|
-
finishReason: (
|
|
2871
|
+
finishReason: (_o = response.incomplete_details) == null ? void 0 : _o.reason,
|
|
2873
2872
|
hasFunctionCall
|
|
2874
2873
|
}),
|
|
2875
2874
|
usage: {
|
|
2876
2875
|
inputTokens: response.usage.input_tokens,
|
|
2877
2876
|
outputTokens: response.usage.output_tokens,
|
|
2878
2877
|
totalTokens: response.usage.input_tokens + response.usage.output_tokens,
|
|
2879
|
-
reasoningTokens: (
|
|
2880
|
-
cachedInputTokens: (
|
|
2878
|
+
reasoningTokens: (_q = (_p = response.usage.output_tokens_details) == null ? void 0 : _p.reasoning_tokens) != null ? _q : void 0,
|
|
2879
|
+
cachedInputTokens: (_s = (_r = response.usage.input_tokens_details) == null ? void 0 : _r.cached_tokens) != null ? _s : void 0
|
|
2881
2880
|
},
|
|
2882
2881
|
request: { body },
|
|
2883
2882
|
response: {
|
|
@@ -2934,7 +2933,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2934
2933
|
controller.enqueue({ type: "stream-start", warnings });
|
|
2935
2934
|
},
|
|
2936
2935
|
transform(chunk, controller) {
|
|
2937
|
-
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u;
|
|
2936
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v, _w;
|
|
2938
2937
|
if (options.includeRawChunks) {
|
|
2939
2938
|
controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
|
|
2940
2939
|
}
|
|
@@ -2976,14 +2975,12 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2976
2975
|
toolName: "computer_use"
|
|
2977
2976
|
});
|
|
2978
2977
|
} else if (value.item.type === "file_search_call") {
|
|
2979
|
-
ongoingToolCalls[value.output_index] = {
|
|
2980
|
-
toolName: "file_search",
|
|
2981
|
-
toolCallId: value.item.id
|
|
2982
|
-
};
|
|
2983
2978
|
controller.enqueue({
|
|
2984
|
-
type: "tool-
|
|
2985
|
-
|
|
2986
|
-
toolName: "file_search"
|
|
2979
|
+
type: "tool-call",
|
|
2980
|
+
toolCallId: value.item.id,
|
|
2981
|
+
toolName: "file_search",
|
|
2982
|
+
input: "{}",
|
|
2983
|
+
providerExecuted: true
|
|
2987
2984
|
});
|
|
2988
2985
|
} else if (value.item.type === "image_generation_call") {
|
|
2989
2986
|
controller.enqueue({
|
|
@@ -3083,26 +3080,19 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3083
3080
|
});
|
|
3084
3081
|
} else if (value.item.type === "file_search_call") {
|
|
3085
3082
|
ongoingToolCalls[value.output_index] = void 0;
|
|
3086
|
-
controller.enqueue({
|
|
3087
|
-
type: "tool-input-end",
|
|
3088
|
-
id: value.item.id
|
|
3089
|
-
});
|
|
3090
|
-
controller.enqueue({
|
|
3091
|
-
type: "tool-call",
|
|
3092
|
-
toolCallId: value.item.id,
|
|
3093
|
-
toolName: "file_search",
|
|
3094
|
-
input: "",
|
|
3095
|
-
providerExecuted: true
|
|
3096
|
-
});
|
|
3097
3083
|
controller.enqueue({
|
|
3098
3084
|
type: "tool-result",
|
|
3099
3085
|
toolCallId: value.item.id,
|
|
3100
3086
|
toolName: "file_search",
|
|
3101
3087
|
result: {
|
|
3102
|
-
|
|
3103
|
-
|
|
3104
|
-
|
|
3105
|
-
|
|
3088
|
+
queries: value.item.queries,
|
|
3089
|
+
results: (_c = (_b = value.item.results) == null ? void 0 : _b.map((result) => ({
|
|
3090
|
+
attributes: result.attributes,
|
|
3091
|
+
fileId: result.file_id,
|
|
3092
|
+
filename: result.filename,
|
|
3093
|
+
score: result.score,
|
|
3094
|
+
text: result.text
|
|
3095
|
+
}))) != null ? _c : null
|
|
3106
3096
|
},
|
|
3107
3097
|
providerExecuted: true
|
|
3108
3098
|
});
|
|
@@ -3150,7 +3140,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3150
3140
|
providerMetadata: {
|
|
3151
3141
|
openai: {
|
|
3152
3142
|
itemId: value.item.id,
|
|
3153
|
-
reasoningEncryptedContent: (
|
|
3143
|
+
reasoningEncryptedContent: (_d = value.item.encrypted_content) != null ? _d : null
|
|
3154
3144
|
}
|
|
3155
3145
|
}
|
|
3156
3146
|
});
|
|
@@ -3180,12 +3170,12 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3180
3170
|
id: value.item_id,
|
|
3181
3171
|
delta: value.delta
|
|
3182
3172
|
});
|
|
3183
|
-
if (((
|
|
3173
|
+
if (((_f = (_e = options.providerOptions) == null ? void 0 : _e.openai) == null ? void 0 : _f.logprobs) && value.logprobs) {
|
|
3184
3174
|
logprobs.push(value.logprobs);
|
|
3185
3175
|
}
|
|
3186
3176
|
} else if (isResponseReasoningSummaryPartAddedChunk(value)) {
|
|
3187
3177
|
if (value.summary_index > 0) {
|
|
3188
|
-
(
|
|
3178
|
+
(_g = activeReasoning[value.item_id]) == null ? void 0 : _g.summaryParts.push(
|
|
3189
3179
|
value.summary_index
|
|
3190
3180
|
);
|
|
3191
3181
|
controller.enqueue({
|
|
@@ -3194,7 +3184,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3194
3184
|
providerMetadata: {
|
|
3195
3185
|
openai: {
|
|
3196
3186
|
itemId: value.item_id,
|
|
3197
|
-
reasoningEncryptedContent: (
|
|
3187
|
+
reasoningEncryptedContent: (_i = (_h = activeReasoning[value.item_id]) == null ? void 0 : _h.encryptedContent) != null ? _i : null
|
|
3198
3188
|
}
|
|
3199
3189
|
}
|
|
3200
3190
|
});
|
|
@@ -3212,14 +3202,14 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3212
3202
|
});
|
|
3213
3203
|
} else if (isResponseFinishedChunk(value)) {
|
|
3214
3204
|
finishReason = mapOpenAIResponseFinishReason({
|
|
3215
|
-
finishReason: (
|
|
3205
|
+
finishReason: (_j = value.response.incomplete_details) == null ? void 0 : _j.reason,
|
|
3216
3206
|
hasFunctionCall
|
|
3217
3207
|
});
|
|
3218
3208
|
usage.inputTokens = value.response.usage.input_tokens;
|
|
3219
3209
|
usage.outputTokens = value.response.usage.output_tokens;
|
|
3220
3210
|
usage.totalTokens = value.response.usage.input_tokens + value.response.usage.output_tokens;
|
|
3221
|
-
usage.reasoningTokens = (
|
|
3222
|
-
usage.cachedInputTokens = (
|
|
3211
|
+
usage.reasoningTokens = (_l = (_k = value.response.usage.output_tokens_details) == null ? void 0 : _k.reasoning_tokens) != null ? _l : void 0;
|
|
3212
|
+
usage.cachedInputTokens = (_n = (_m = value.response.usage.input_tokens_details) == null ? void 0 : _m.cached_tokens) != null ? _n : void 0;
|
|
3223
3213
|
if (typeof value.response.service_tier === "string") {
|
|
3224
3214
|
serviceTier = value.response.service_tier;
|
|
3225
3215
|
}
|
|
@@ -3228,7 +3218,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3228
3218
|
controller.enqueue({
|
|
3229
3219
|
type: "source",
|
|
3230
3220
|
sourceType: "url",
|
|
3231
|
-
id: (
|
|
3221
|
+
id: (_q = (_p = (_o = self.config).generateId) == null ? void 0 : _p.call(_o)) != null ? _q : (0, import_provider_utils13.generateId)(),
|
|
3232
3222
|
url: value.annotation.url,
|
|
3233
3223
|
title: value.annotation.title
|
|
3234
3224
|
});
|
|
@@ -3236,10 +3226,10 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3236
3226
|
controller.enqueue({
|
|
3237
3227
|
type: "source",
|
|
3238
3228
|
sourceType: "document",
|
|
3239
|
-
id: (
|
|
3229
|
+
id: (_t = (_s = (_r = self.config).generateId) == null ? void 0 : _s.call(_r)) != null ? _t : (0, import_provider_utils13.generateId)(),
|
|
3240
3230
|
mediaType: "text/plain",
|
|
3241
|
-
title: (
|
|
3242
|
-
filename: (
|
|
3231
|
+
title: (_v = (_u = value.annotation.quote) != null ? _u : value.annotation.filename) != null ? _v : "Document",
|
|
3232
|
+
filename: (_w = value.annotation.filename) != null ? _w : value.annotation.file_id
|
|
3243
3233
|
});
|
|
3244
3234
|
}
|
|
3245
3235
|
} else if (isErrorChunk(value)) {
|
|
@@ -3344,19 +3334,7 @@ var responseOutputItemAddedSchema = import_v415.z.object({
|
|
|
3344
3334
|
}),
|
|
3345
3335
|
import_v415.z.object({
|
|
3346
3336
|
type: import_v415.z.literal("file_search_call"),
|
|
3347
|
-
id: import_v415.z.string()
|
|
3348
|
-
status: import_v415.z.string(),
|
|
3349
|
-
queries: import_v415.z.array(import_v415.z.string()).nullish(),
|
|
3350
|
-
results: import_v415.z.array(
|
|
3351
|
-
import_v415.z.object({
|
|
3352
|
-
attributes: import_v415.z.object({
|
|
3353
|
-
file_id: import_v415.z.string(),
|
|
3354
|
-
filename: import_v415.z.string(),
|
|
3355
|
-
score: import_v415.z.number(),
|
|
3356
|
-
text: import_v415.z.string()
|
|
3357
|
-
})
|
|
3358
|
-
})
|
|
3359
|
-
).optional()
|
|
3337
|
+
id: import_v415.z.string()
|
|
3360
3338
|
}),
|
|
3361
3339
|
import_v415.z.object({
|
|
3362
3340
|
type: import_v415.z.literal("image_generation_call"),
|
|
@@ -3388,26 +3366,11 @@ var responseOutputItemDoneSchema = import_v415.z.object({
|
|
|
3388
3366
|
codeInterpreterCallItem,
|
|
3389
3367
|
imageGenerationCallItem,
|
|
3390
3368
|
webSearchCallItem,
|
|
3369
|
+
fileSearchCallItem,
|
|
3391
3370
|
import_v415.z.object({
|
|
3392
3371
|
type: import_v415.z.literal("computer_call"),
|
|
3393
3372
|
id: import_v415.z.string(),
|
|
3394
3373
|
status: import_v415.z.literal("completed")
|
|
3395
|
-
}),
|
|
3396
|
-
import_v415.z.object({
|
|
3397
|
-
type: import_v415.z.literal("file_search_call"),
|
|
3398
|
-
id: import_v415.z.string(),
|
|
3399
|
-
status: import_v415.z.literal("completed"),
|
|
3400
|
-
queries: import_v415.z.array(import_v415.z.string()).nullish(),
|
|
3401
|
-
results: import_v415.z.array(
|
|
3402
|
-
import_v415.z.object({
|
|
3403
|
-
attributes: import_v415.z.object({
|
|
3404
|
-
file_id: import_v415.z.string(),
|
|
3405
|
-
filename: import_v415.z.string(),
|
|
3406
|
-
score: import_v415.z.number(),
|
|
3407
|
-
text: import_v415.z.string()
|
|
3408
|
-
})
|
|
3409
|
-
})
|
|
3410
|
-
).nullish()
|
|
3411
3374
|
})
|
|
3412
3375
|
])
|
|
3413
3376
|
});
|