@ai-sdk/openai 2.0.30 → 2.0.32
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +15 -0
- package/dist/index.d.mts +85 -23
- package/dist/index.d.ts +85 -23
- package/dist/index.js +773 -690
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +712 -629
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.d.mts +13 -12
- package/dist/internal/index.d.ts +13 -12
- package/dist/internal/index.js +760 -694
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +701 -635
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +1 -1
package/dist/index.js
CHANGED
|
@@ -26,12 +26,12 @@ __export(src_exports, {
|
|
|
26
26
|
module.exports = __toCommonJS(src_exports);
|
|
27
27
|
|
|
28
28
|
// src/openai-provider.ts
|
|
29
|
-
var
|
|
29
|
+
var import_provider_utils16 = require("@ai-sdk/provider-utils");
|
|
30
30
|
|
|
31
31
|
// src/chat/openai-chat-language-model.ts
|
|
32
32
|
var import_provider3 = require("@ai-sdk/provider");
|
|
33
|
-
var
|
|
34
|
-
var
|
|
33
|
+
var import_provider_utils3 = require("@ai-sdk/provider-utils");
|
|
34
|
+
var import_v43 = require("zod/v4");
|
|
35
35
|
|
|
36
36
|
// src/openai-error.ts
|
|
37
37
|
var import_v4 = require("zod/v4");
|
|
@@ -352,98 +352,6 @@ var openaiProviderOptions = import_v42.z.object({
|
|
|
352
352
|
|
|
353
353
|
// src/chat/openai-chat-prepare-tools.ts
|
|
354
354
|
var import_provider2 = require("@ai-sdk/provider");
|
|
355
|
-
|
|
356
|
-
// src/tool/file-search.ts
|
|
357
|
-
var import_provider_utils3 = require("@ai-sdk/provider-utils");
|
|
358
|
-
var import_v43 = require("zod/v4");
|
|
359
|
-
var comparisonFilterSchema = import_v43.z.object({
|
|
360
|
-
key: import_v43.z.string(),
|
|
361
|
-
type: import_v43.z.enum(["eq", "ne", "gt", "gte", "lt", "lte"]),
|
|
362
|
-
value: import_v43.z.union([import_v43.z.string(), import_v43.z.number(), import_v43.z.boolean()])
|
|
363
|
-
});
|
|
364
|
-
var compoundFilterSchema = import_v43.z.object({
|
|
365
|
-
type: import_v43.z.enum(["and", "or"]),
|
|
366
|
-
filters: import_v43.z.array(
|
|
367
|
-
import_v43.z.union([comparisonFilterSchema, import_v43.z.lazy(() => compoundFilterSchema)])
|
|
368
|
-
)
|
|
369
|
-
});
|
|
370
|
-
var filtersSchema = import_v43.z.union([comparisonFilterSchema, compoundFilterSchema]);
|
|
371
|
-
var fileSearchArgsSchema = import_v43.z.object({
|
|
372
|
-
vectorStoreIds: import_v43.z.array(import_v43.z.string()).optional(),
|
|
373
|
-
maxNumResults: import_v43.z.number().optional(),
|
|
374
|
-
ranking: import_v43.z.object({
|
|
375
|
-
ranker: import_v43.z.enum(["auto", "default-2024-08-21"]).optional()
|
|
376
|
-
}).optional(),
|
|
377
|
-
filters: filtersSchema.optional()
|
|
378
|
-
});
|
|
379
|
-
var fileSearch = (0, import_provider_utils3.createProviderDefinedToolFactory)({
|
|
380
|
-
id: "openai.file_search",
|
|
381
|
-
name: "file_search",
|
|
382
|
-
inputSchema: import_v43.z.object({
|
|
383
|
-
query: import_v43.z.string()
|
|
384
|
-
})
|
|
385
|
-
});
|
|
386
|
-
|
|
387
|
-
// src/tool/web-search-preview.ts
|
|
388
|
-
var import_provider_utils4 = require("@ai-sdk/provider-utils");
|
|
389
|
-
var import_v44 = require("zod/v4");
|
|
390
|
-
var webSearchPreviewArgsSchema = import_v44.z.object({
|
|
391
|
-
/**
|
|
392
|
-
* Search context size to use for the web search.
|
|
393
|
-
* - high: Most comprehensive context, highest cost, slower response
|
|
394
|
-
* - medium: Balanced context, cost, and latency (default)
|
|
395
|
-
* - low: Least context, lowest cost, fastest response
|
|
396
|
-
*/
|
|
397
|
-
searchContextSize: import_v44.z.enum(["low", "medium", "high"]).optional(),
|
|
398
|
-
/**
|
|
399
|
-
* User location information to provide geographically relevant search results.
|
|
400
|
-
*/
|
|
401
|
-
userLocation: import_v44.z.object({
|
|
402
|
-
/**
|
|
403
|
-
* Type of location (always 'approximate')
|
|
404
|
-
*/
|
|
405
|
-
type: import_v44.z.literal("approximate"),
|
|
406
|
-
/**
|
|
407
|
-
* Two-letter ISO country code (e.g., 'US', 'GB')
|
|
408
|
-
*/
|
|
409
|
-
country: import_v44.z.string().optional(),
|
|
410
|
-
/**
|
|
411
|
-
* City name (free text, e.g., 'Minneapolis')
|
|
412
|
-
*/
|
|
413
|
-
city: import_v44.z.string().optional(),
|
|
414
|
-
/**
|
|
415
|
-
* Region name (free text, e.g., 'Minnesota')
|
|
416
|
-
*/
|
|
417
|
-
region: import_v44.z.string().optional(),
|
|
418
|
-
/**
|
|
419
|
-
* IANA timezone (e.g., 'America/Chicago')
|
|
420
|
-
*/
|
|
421
|
-
timezone: import_v44.z.string().optional()
|
|
422
|
-
}).optional()
|
|
423
|
-
});
|
|
424
|
-
var webSearchPreview = (0, import_provider_utils4.createProviderDefinedToolFactory)({
|
|
425
|
-
id: "openai.web_search_preview",
|
|
426
|
-
name: "web_search_preview",
|
|
427
|
-
inputSchema: import_v44.z.object({
|
|
428
|
-
action: import_v44.z.discriminatedUnion("type", [
|
|
429
|
-
import_v44.z.object({
|
|
430
|
-
type: import_v44.z.literal("search"),
|
|
431
|
-
query: import_v44.z.string().nullish()
|
|
432
|
-
}),
|
|
433
|
-
import_v44.z.object({
|
|
434
|
-
type: import_v44.z.literal("open_page"),
|
|
435
|
-
url: import_v44.z.string()
|
|
436
|
-
}),
|
|
437
|
-
import_v44.z.object({
|
|
438
|
-
type: import_v44.z.literal("find"),
|
|
439
|
-
url: import_v44.z.string(),
|
|
440
|
-
pattern: import_v44.z.string()
|
|
441
|
-
})
|
|
442
|
-
]).nullish()
|
|
443
|
-
})
|
|
444
|
-
});
|
|
445
|
-
|
|
446
|
-
// src/chat/openai-chat-prepare-tools.ts
|
|
447
355
|
function prepareChatTools({
|
|
448
356
|
tools,
|
|
449
357
|
toolChoice,
|
|
@@ -469,33 +377,6 @@ function prepareChatTools({
|
|
|
469
377
|
}
|
|
470
378
|
});
|
|
471
379
|
break;
|
|
472
|
-
case "provider-defined":
|
|
473
|
-
switch (tool.id) {
|
|
474
|
-
case "openai.file_search": {
|
|
475
|
-
const args = fileSearchArgsSchema.parse(tool.args);
|
|
476
|
-
openaiTools2.push({
|
|
477
|
-
type: "file_search",
|
|
478
|
-
vector_store_ids: args.vectorStoreIds,
|
|
479
|
-
max_num_results: args.maxNumResults,
|
|
480
|
-
ranking_options: args.ranking ? { ranker: args.ranking.ranker } : void 0,
|
|
481
|
-
filters: args.filters
|
|
482
|
-
});
|
|
483
|
-
break;
|
|
484
|
-
}
|
|
485
|
-
case "openai.web_search_preview": {
|
|
486
|
-
const args = webSearchPreviewArgsSchema.parse(tool.args);
|
|
487
|
-
openaiTools2.push({
|
|
488
|
-
type: "web_search_preview",
|
|
489
|
-
search_context_size: args.searchContextSize,
|
|
490
|
-
user_location: args.userLocation
|
|
491
|
-
});
|
|
492
|
-
break;
|
|
493
|
-
}
|
|
494
|
-
default:
|
|
495
|
-
toolWarnings.push({ type: "unsupported-tool", tool });
|
|
496
|
-
break;
|
|
497
|
-
}
|
|
498
|
-
break;
|
|
499
380
|
default:
|
|
500
381
|
toolWarnings.push({ type: "unsupported-tool", tool });
|
|
501
382
|
break;
|
|
@@ -560,7 +441,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
560
441
|
}) {
|
|
561
442
|
var _a, _b, _c, _d;
|
|
562
443
|
const warnings = [];
|
|
563
|
-
const openaiOptions = (_a = await (0,
|
|
444
|
+
const openaiOptions = (_a = await (0, import_provider_utils3.parseProviderOptions)({
|
|
564
445
|
provider: "openai",
|
|
565
446
|
providerOptions,
|
|
566
447
|
schema: openaiProviderOptions
|
|
@@ -739,15 +620,15 @@ var OpenAIChatLanguageModel = class {
|
|
|
739
620
|
responseHeaders,
|
|
740
621
|
value: response,
|
|
741
622
|
rawValue: rawResponse
|
|
742
|
-
} = await (0,
|
|
623
|
+
} = await (0, import_provider_utils3.postJsonToApi)({
|
|
743
624
|
url: this.config.url({
|
|
744
625
|
path: "/chat/completions",
|
|
745
626
|
modelId: this.modelId
|
|
746
627
|
}),
|
|
747
|
-
headers: (0,
|
|
628
|
+
headers: (0, import_provider_utils3.combineHeaders)(this.config.headers(), options.headers),
|
|
748
629
|
body,
|
|
749
630
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
750
|
-
successfulResponseHandler: (0,
|
|
631
|
+
successfulResponseHandler: (0, import_provider_utils3.createJsonResponseHandler)(
|
|
751
632
|
openaiChatResponseSchema
|
|
752
633
|
),
|
|
753
634
|
abortSignal: options.abortSignal,
|
|
@@ -762,7 +643,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
762
643
|
for (const toolCall of (_a = choice.message.tool_calls) != null ? _a : []) {
|
|
763
644
|
content.push({
|
|
764
645
|
type: "tool-call",
|
|
765
|
-
toolCallId: (_b = toolCall.id) != null ? _b : (0,
|
|
646
|
+
toolCallId: (_b = toolCall.id) != null ? _b : (0, import_provider_utils3.generateId)(),
|
|
766
647
|
toolName: toolCall.function.name,
|
|
767
648
|
input: toolCall.function.arguments
|
|
768
649
|
});
|
|
@@ -771,7 +652,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
771
652
|
content.push({
|
|
772
653
|
type: "source",
|
|
773
654
|
sourceType: "url",
|
|
774
|
-
id: (0,
|
|
655
|
+
id: (0, import_provider_utils3.generateId)(),
|
|
775
656
|
url: annotation.url,
|
|
776
657
|
title: annotation.title
|
|
777
658
|
});
|
|
@@ -817,15 +698,15 @@ var OpenAIChatLanguageModel = class {
|
|
|
817
698
|
include_usage: true
|
|
818
699
|
}
|
|
819
700
|
};
|
|
820
|
-
const { responseHeaders, value: response } = await (0,
|
|
701
|
+
const { responseHeaders, value: response } = await (0, import_provider_utils3.postJsonToApi)({
|
|
821
702
|
url: this.config.url({
|
|
822
703
|
path: "/chat/completions",
|
|
823
704
|
modelId: this.modelId
|
|
824
705
|
}),
|
|
825
|
-
headers: (0,
|
|
706
|
+
headers: (0, import_provider_utils3.combineHeaders)(this.config.headers(), options.headers),
|
|
826
707
|
body,
|
|
827
708
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
828
|
-
successfulResponseHandler: (0,
|
|
709
|
+
successfulResponseHandler: (0, import_provider_utils3.createEventSourceResponseHandler)(
|
|
829
710
|
openaiChatChunkSchema
|
|
830
711
|
),
|
|
831
712
|
abortSignal: options.abortSignal,
|
|
@@ -950,14 +831,14 @@ var OpenAIChatLanguageModel = class {
|
|
|
950
831
|
delta: toolCall2.function.arguments
|
|
951
832
|
});
|
|
952
833
|
}
|
|
953
|
-
if ((0,
|
|
834
|
+
if ((0, import_provider_utils3.isParsableJson)(toolCall2.function.arguments)) {
|
|
954
835
|
controller.enqueue({
|
|
955
836
|
type: "tool-input-end",
|
|
956
837
|
id: toolCall2.id
|
|
957
838
|
});
|
|
958
839
|
controller.enqueue({
|
|
959
840
|
type: "tool-call",
|
|
960
|
-
toolCallId: (_q = toolCall2.id) != null ? _q : (0,
|
|
841
|
+
toolCallId: (_q = toolCall2.id) != null ? _q : (0, import_provider_utils3.generateId)(),
|
|
961
842
|
toolName: toolCall2.function.name,
|
|
962
843
|
input: toolCall2.function.arguments
|
|
963
844
|
});
|
|
@@ -978,14 +859,14 @@ var OpenAIChatLanguageModel = class {
|
|
|
978
859
|
id: toolCall.id,
|
|
979
860
|
delta: (_u = toolCallDelta.function.arguments) != null ? _u : ""
|
|
980
861
|
});
|
|
981
|
-
if (((_v = toolCall.function) == null ? void 0 : _v.name) != null && ((_w = toolCall.function) == null ? void 0 : _w.arguments) != null && (0,
|
|
862
|
+
if (((_v = toolCall.function) == null ? void 0 : _v.name) != null && ((_w = toolCall.function) == null ? void 0 : _w.arguments) != null && (0, import_provider_utils3.isParsableJson)(toolCall.function.arguments)) {
|
|
982
863
|
controller.enqueue({
|
|
983
864
|
type: "tool-input-end",
|
|
984
865
|
id: toolCall.id
|
|
985
866
|
});
|
|
986
867
|
controller.enqueue({
|
|
987
868
|
type: "tool-call",
|
|
988
|
-
toolCallId: (_x = toolCall.id) != null ? _x : (0,
|
|
869
|
+
toolCallId: (_x = toolCall.id) != null ? _x : (0, import_provider_utils3.generateId)(),
|
|
989
870
|
toolName: toolCall.function.name,
|
|
990
871
|
input: toolCall.function.arguments
|
|
991
872
|
});
|
|
@@ -998,7 +879,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
998
879
|
controller.enqueue({
|
|
999
880
|
type: "source",
|
|
1000
881
|
sourceType: "url",
|
|
1001
|
-
id: (0,
|
|
882
|
+
id: (0, import_provider_utils3.generateId)(),
|
|
1002
883
|
url: annotation.url,
|
|
1003
884
|
title: annotation.title
|
|
1004
885
|
});
|
|
@@ -1023,115 +904,115 @@ var OpenAIChatLanguageModel = class {
|
|
|
1023
904
|
};
|
|
1024
905
|
}
|
|
1025
906
|
};
|
|
1026
|
-
var openaiTokenUsageSchema =
|
|
1027
|
-
prompt_tokens:
|
|
1028
|
-
completion_tokens:
|
|
1029
|
-
total_tokens:
|
|
1030
|
-
prompt_tokens_details:
|
|
1031
|
-
cached_tokens:
|
|
907
|
+
var openaiTokenUsageSchema = import_v43.z.object({
|
|
908
|
+
prompt_tokens: import_v43.z.number().nullish(),
|
|
909
|
+
completion_tokens: import_v43.z.number().nullish(),
|
|
910
|
+
total_tokens: import_v43.z.number().nullish(),
|
|
911
|
+
prompt_tokens_details: import_v43.z.object({
|
|
912
|
+
cached_tokens: import_v43.z.number().nullish()
|
|
1032
913
|
}).nullish(),
|
|
1033
|
-
completion_tokens_details:
|
|
1034
|
-
reasoning_tokens:
|
|
1035
|
-
accepted_prediction_tokens:
|
|
1036
|
-
rejected_prediction_tokens:
|
|
914
|
+
completion_tokens_details: import_v43.z.object({
|
|
915
|
+
reasoning_tokens: import_v43.z.number().nullish(),
|
|
916
|
+
accepted_prediction_tokens: import_v43.z.number().nullish(),
|
|
917
|
+
rejected_prediction_tokens: import_v43.z.number().nullish()
|
|
1037
918
|
}).nullish()
|
|
1038
919
|
}).nullish();
|
|
1039
|
-
var openaiChatResponseSchema =
|
|
1040
|
-
id:
|
|
1041
|
-
created:
|
|
1042
|
-
model:
|
|
1043
|
-
choices:
|
|
1044
|
-
|
|
1045
|
-
message:
|
|
1046
|
-
role:
|
|
1047
|
-
content:
|
|
1048
|
-
tool_calls:
|
|
1049
|
-
|
|
1050
|
-
id:
|
|
1051
|
-
type:
|
|
1052
|
-
function:
|
|
1053
|
-
name:
|
|
1054
|
-
arguments:
|
|
920
|
+
var openaiChatResponseSchema = import_v43.z.object({
|
|
921
|
+
id: import_v43.z.string().nullish(),
|
|
922
|
+
created: import_v43.z.number().nullish(),
|
|
923
|
+
model: import_v43.z.string().nullish(),
|
|
924
|
+
choices: import_v43.z.array(
|
|
925
|
+
import_v43.z.object({
|
|
926
|
+
message: import_v43.z.object({
|
|
927
|
+
role: import_v43.z.literal("assistant").nullish(),
|
|
928
|
+
content: import_v43.z.string().nullish(),
|
|
929
|
+
tool_calls: import_v43.z.array(
|
|
930
|
+
import_v43.z.object({
|
|
931
|
+
id: import_v43.z.string().nullish(),
|
|
932
|
+
type: import_v43.z.literal("function"),
|
|
933
|
+
function: import_v43.z.object({
|
|
934
|
+
name: import_v43.z.string(),
|
|
935
|
+
arguments: import_v43.z.string()
|
|
1055
936
|
})
|
|
1056
937
|
})
|
|
1057
938
|
).nullish(),
|
|
1058
|
-
annotations:
|
|
1059
|
-
|
|
1060
|
-
type:
|
|
1061
|
-
start_index:
|
|
1062
|
-
end_index:
|
|
1063
|
-
url:
|
|
1064
|
-
title:
|
|
939
|
+
annotations: import_v43.z.array(
|
|
940
|
+
import_v43.z.object({
|
|
941
|
+
type: import_v43.z.literal("url_citation"),
|
|
942
|
+
start_index: import_v43.z.number(),
|
|
943
|
+
end_index: import_v43.z.number(),
|
|
944
|
+
url: import_v43.z.string(),
|
|
945
|
+
title: import_v43.z.string()
|
|
1065
946
|
})
|
|
1066
947
|
).nullish()
|
|
1067
948
|
}),
|
|
1068
|
-
index:
|
|
1069
|
-
logprobs:
|
|
1070
|
-
content:
|
|
1071
|
-
|
|
1072
|
-
token:
|
|
1073
|
-
logprob:
|
|
1074
|
-
top_logprobs:
|
|
1075
|
-
|
|
1076
|
-
token:
|
|
1077
|
-
logprob:
|
|
949
|
+
index: import_v43.z.number(),
|
|
950
|
+
logprobs: import_v43.z.object({
|
|
951
|
+
content: import_v43.z.array(
|
|
952
|
+
import_v43.z.object({
|
|
953
|
+
token: import_v43.z.string(),
|
|
954
|
+
logprob: import_v43.z.number(),
|
|
955
|
+
top_logprobs: import_v43.z.array(
|
|
956
|
+
import_v43.z.object({
|
|
957
|
+
token: import_v43.z.string(),
|
|
958
|
+
logprob: import_v43.z.number()
|
|
1078
959
|
})
|
|
1079
960
|
)
|
|
1080
961
|
})
|
|
1081
962
|
).nullish()
|
|
1082
963
|
}).nullish(),
|
|
1083
|
-
finish_reason:
|
|
964
|
+
finish_reason: import_v43.z.string().nullish()
|
|
1084
965
|
})
|
|
1085
966
|
),
|
|
1086
967
|
usage: openaiTokenUsageSchema
|
|
1087
968
|
});
|
|
1088
|
-
var openaiChatChunkSchema =
|
|
1089
|
-
|
|
1090
|
-
id:
|
|
1091
|
-
created:
|
|
1092
|
-
model:
|
|
1093
|
-
choices:
|
|
1094
|
-
|
|
1095
|
-
delta:
|
|
1096
|
-
role:
|
|
1097
|
-
content:
|
|
1098
|
-
tool_calls:
|
|
1099
|
-
|
|
1100
|
-
index:
|
|
1101
|
-
id:
|
|
1102
|
-
type:
|
|
1103
|
-
function:
|
|
1104
|
-
name:
|
|
1105
|
-
arguments:
|
|
969
|
+
var openaiChatChunkSchema = import_v43.z.union([
|
|
970
|
+
import_v43.z.object({
|
|
971
|
+
id: import_v43.z.string().nullish(),
|
|
972
|
+
created: import_v43.z.number().nullish(),
|
|
973
|
+
model: import_v43.z.string().nullish(),
|
|
974
|
+
choices: import_v43.z.array(
|
|
975
|
+
import_v43.z.object({
|
|
976
|
+
delta: import_v43.z.object({
|
|
977
|
+
role: import_v43.z.enum(["assistant"]).nullish(),
|
|
978
|
+
content: import_v43.z.string().nullish(),
|
|
979
|
+
tool_calls: import_v43.z.array(
|
|
980
|
+
import_v43.z.object({
|
|
981
|
+
index: import_v43.z.number(),
|
|
982
|
+
id: import_v43.z.string().nullish(),
|
|
983
|
+
type: import_v43.z.literal("function").nullish(),
|
|
984
|
+
function: import_v43.z.object({
|
|
985
|
+
name: import_v43.z.string().nullish(),
|
|
986
|
+
arguments: import_v43.z.string().nullish()
|
|
1106
987
|
})
|
|
1107
988
|
})
|
|
1108
989
|
).nullish(),
|
|
1109
|
-
annotations:
|
|
1110
|
-
|
|
1111
|
-
type:
|
|
1112
|
-
start_index:
|
|
1113
|
-
end_index:
|
|
1114
|
-
url:
|
|
1115
|
-
title:
|
|
990
|
+
annotations: import_v43.z.array(
|
|
991
|
+
import_v43.z.object({
|
|
992
|
+
type: import_v43.z.literal("url_citation"),
|
|
993
|
+
start_index: import_v43.z.number(),
|
|
994
|
+
end_index: import_v43.z.number(),
|
|
995
|
+
url: import_v43.z.string(),
|
|
996
|
+
title: import_v43.z.string()
|
|
1116
997
|
})
|
|
1117
998
|
).nullish()
|
|
1118
999
|
}).nullish(),
|
|
1119
|
-
logprobs:
|
|
1120
|
-
content:
|
|
1121
|
-
|
|
1122
|
-
token:
|
|
1123
|
-
logprob:
|
|
1124
|
-
top_logprobs:
|
|
1125
|
-
|
|
1126
|
-
token:
|
|
1127
|
-
logprob:
|
|
1000
|
+
logprobs: import_v43.z.object({
|
|
1001
|
+
content: import_v43.z.array(
|
|
1002
|
+
import_v43.z.object({
|
|
1003
|
+
token: import_v43.z.string(),
|
|
1004
|
+
logprob: import_v43.z.number(),
|
|
1005
|
+
top_logprobs: import_v43.z.array(
|
|
1006
|
+
import_v43.z.object({
|
|
1007
|
+
token: import_v43.z.string(),
|
|
1008
|
+
logprob: import_v43.z.number()
|
|
1128
1009
|
})
|
|
1129
1010
|
)
|
|
1130
1011
|
})
|
|
1131
1012
|
).nullish()
|
|
1132
1013
|
}).nullish(),
|
|
1133
|
-
finish_reason:
|
|
1134
|
-
index:
|
|
1014
|
+
finish_reason: import_v43.z.string().nullish(),
|
|
1015
|
+
index: import_v43.z.number()
|
|
1135
1016
|
})
|
|
1136
1017
|
),
|
|
1137
1018
|
usage: openaiTokenUsageSchema
|
|
@@ -1188,8 +1069,8 @@ var reasoningModels = {
|
|
|
1188
1069
|
};
|
|
1189
1070
|
|
|
1190
1071
|
// src/completion/openai-completion-language-model.ts
|
|
1191
|
-
var
|
|
1192
|
-
var
|
|
1072
|
+
var import_provider_utils4 = require("@ai-sdk/provider-utils");
|
|
1073
|
+
var import_v45 = require("zod/v4");
|
|
1193
1074
|
|
|
1194
1075
|
// src/completion/convert-to-openai-completion-prompt.ts
|
|
1195
1076
|
var import_provider4 = require("@ai-sdk/provider");
|
|
@@ -1297,12 +1178,12 @@ function mapOpenAIFinishReason2(finishReason) {
|
|
|
1297
1178
|
}
|
|
1298
1179
|
|
|
1299
1180
|
// src/completion/openai-completion-options.ts
|
|
1300
|
-
var
|
|
1301
|
-
var openaiCompletionProviderOptions =
|
|
1181
|
+
var import_v44 = require("zod/v4");
|
|
1182
|
+
var openaiCompletionProviderOptions = import_v44.z.object({
|
|
1302
1183
|
/**
|
|
1303
1184
|
Echo back the prompt in addition to the completion.
|
|
1304
1185
|
*/
|
|
1305
|
-
echo:
|
|
1186
|
+
echo: import_v44.z.boolean().optional(),
|
|
1306
1187
|
/**
|
|
1307
1188
|
Modify the likelihood of specified tokens appearing in the completion.
|
|
1308
1189
|
|
|
@@ -1317,16 +1198,16 @@ var openaiCompletionProviderOptions = import_v46.z.object({
|
|
|
1317
1198
|
As an example, you can pass {"50256": -100} to prevent the <|endoftext|>
|
|
1318
1199
|
token from being generated.
|
|
1319
1200
|
*/
|
|
1320
|
-
logitBias:
|
|
1201
|
+
logitBias: import_v44.z.record(import_v44.z.string(), import_v44.z.number()).optional(),
|
|
1321
1202
|
/**
|
|
1322
1203
|
The suffix that comes after a completion of inserted text.
|
|
1323
1204
|
*/
|
|
1324
|
-
suffix:
|
|
1205
|
+
suffix: import_v44.z.string().optional(),
|
|
1325
1206
|
/**
|
|
1326
1207
|
A unique identifier representing your end-user, which can help OpenAI to
|
|
1327
1208
|
monitor and detect abuse. Learn more.
|
|
1328
1209
|
*/
|
|
1329
|
-
user:
|
|
1210
|
+
user: import_v44.z.string().optional(),
|
|
1330
1211
|
/**
|
|
1331
1212
|
Return the log probabilities of the tokens. Including logprobs will increase
|
|
1332
1213
|
the response size and can slow down response times. However, it can
|
|
@@ -1336,7 +1217,7 @@ var openaiCompletionProviderOptions = import_v46.z.object({
|
|
|
1336
1217
|
Setting to a number will return the log probabilities of the top n
|
|
1337
1218
|
tokens that were generated.
|
|
1338
1219
|
*/
|
|
1339
|
-
logprobs:
|
|
1220
|
+
logprobs: import_v44.z.union([import_v44.z.boolean(), import_v44.z.number()]).optional()
|
|
1340
1221
|
});
|
|
1341
1222
|
|
|
1342
1223
|
// src/completion/openai-completion-language-model.ts
|
|
@@ -1372,12 +1253,12 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1372
1253
|
}) {
|
|
1373
1254
|
const warnings = [];
|
|
1374
1255
|
const openaiOptions = {
|
|
1375
|
-
...await (0,
|
|
1256
|
+
...await (0, import_provider_utils4.parseProviderOptions)({
|
|
1376
1257
|
provider: "openai",
|
|
1377
1258
|
providerOptions,
|
|
1378
1259
|
schema: openaiCompletionProviderOptions
|
|
1379
1260
|
}),
|
|
1380
|
-
...await (0,
|
|
1261
|
+
...await (0, import_provider_utils4.parseProviderOptions)({
|
|
1381
1262
|
provider: this.providerOptionsName,
|
|
1382
1263
|
providerOptions,
|
|
1383
1264
|
schema: openaiCompletionProviderOptions
|
|
@@ -1433,15 +1314,15 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1433
1314
|
responseHeaders,
|
|
1434
1315
|
value: response,
|
|
1435
1316
|
rawValue: rawResponse
|
|
1436
|
-
} = await (0,
|
|
1317
|
+
} = await (0, import_provider_utils4.postJsonToApi)({
|
|
1437
1318
|
url: this.config.url({
|
|
1438
1319
|
path: "/completions",
|
|
1439
1320
|
modelId: this.modelId
|
|
1440
1321
|
}),
|
|
1441
|
-
headers: (0,
|
|
1322
|
+
headers: (0, import_provider_utils4.combineHeaders)(this.config.headers(), options.headers),
|
|
1442
1323
|
body: args,
|
|
1443
1324
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
1444
|
-
successfulResponseHandler: (0,
|
|
1325
|
+
successfulResponseHandler: (0, import_provider_utils4.createJsonResponseHandler)(
|
|
1445
1326
|
openaiCompletionResponseSchema
|
|
1446
1327
|
),
|
|
1447
1328
|
abortSignal: options.abortSignal,
|
|
@@ -1479,15 +1360,15 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1479
1360
|
include_usage: true
|
|
1480
1361
|
}
|
|
1481
1362
|
};
|
|
1482
|
-
const { responseHeaders, value: response } = await (0,
|
|
1363
|
+
const { responseHeaders, value: response } = await (0, import_provider_utils4.postJsonToApi)({
|
|
1483
1364
|
url: this.config.url({
|
|
1484
1365
|
path: "/completions",
|
|
1485
1366
|
modelId: this.modelId
|
|
1486
1367
|
}),
|
|
1487
|
-
headers: (0,
|
|
1368
|
+
headers: (0, import_provider_utils4.combineHeaders)(this.config.headers(), options.headers),
|
|
1488
1369
|
body,
|
|
1489
1370
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
1490
|
-
successfulResponseHandler: (0,
|
|
1371
|
+
successfulResponseHandler: (0, import_provider_utils4.createEventSourceResponseHandler)(
|
|
1491
1372
|
openaiCompletionChunkSchema
|
|
1492
1373
|
),
|
|
1493
1374
|
abortSignal: options.abortSignal,
|
|
@@ -1568,42 +1449,42 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1568
1449
|
};
|
|
1569
1450
|
}
|
|
1570
1451
|
};
|
|
1571
|
-
var usageSchema =
|
|
1572
|
-
prompt_tokens:
|
|
1573
|
-
completion_tokens:
|
|
1574
|
-
total_tokens:
|
|
1452
|
+
var usageSchema = import_v45.z.object({
|
|
1453
|
+
prompt_tokens: import_v45.z.number(),
|
|
1454
|
+
completion_tokens: import_v45.z.number(),
|
|
1455
|
+
total_tokens: import_v45.z.number()
|
|
1575
1456
|
});
|
|
1576
|
-
var openaiCompletionResponseSchema =
|
|
1577
|
-
id:
|
|
1578
|
-
created:
|
|
1579
|
-
model:
|
|
1580
|
-
choices:
|
|
1581
|
-
|
|
1582
|
-
text:
|
|
1583
|
-
finish_reason:
|
|
1584
|
-
logprobs:
|
|
1585
|
-
tokens:
|
|
1586
|
-
token_logprobs:
|
|
1587
|
-
top_logprobs:
|
|
1457
|
+
var openaiCompletionResponseSchema = import_v45.z.object({
|
|
1458
|
+
id: import_v45.z.string().nullish(),
|
|
1459
|
+
created: import_v45.z.number().nullish(),
|
|
1460
|
+
model: import_v45.z.string().nullish(),
|
|
1461
|
+
choices: import_v45.z.array(
|
|
1462
|
+
import_v45.z.object({
|
|
1463
|
+
text: import_v45.z.string(),
|
|
1464
|
+
finish_reason: import_v45.z.string(),
|
|
1465
|
+
logprobs: import_v45.z.object({
|
|
1466
|
+
tokens: import_v45.z.array(import_v45.z.string()),
|
|
1467
|
+
token_logprobs: import_v45.z.array(import_v45.z.number()),
|
|
1468
|
+
top_logprobs: import_v45.z.array(import_v45.z.record(import_v45.z.string(), import_v45.z.number())).nullish()
|
|
1588
1469
|
}).nullish()
|
|
1589
1470
|
})
|
|
1590
1471
|
),
|
|
1591
1472
|
usage: usageSchema.nullish()
|
|
1592
1473
|
});
|
|
1593
|
-
var openaiCompletionChunkSchema =
|
|
1594
|
-
|
|
1595
|
-
id:
|
|
1596
|
-
created:
|
|
1597
|
-
model:
|
|
1598
|
-
choices:
|
|
1599
|
-
|
|
1600
|
-
text:
|
|
1601
|
-
finish_reason:
|
|
1602
|
-
index:
|
|
1603
|
-
logprobs:
|
|
1604
|
-
tokens:
|
|
1605
|
-
token_logprobs:
|
|
1606
|
-
top_logprobs:
|
|
1474
|
+
var openaiCompletionChunkSchema = import_v45.z.union([
|
|
1475
|
+
import_v45.z.object({
|
|
1476
|
+
id: import_v45.z.string().nullish(),
|
|
1477
|
+
created: import_v45.z.number().nullish(),
|
|
1478
|
+
model: import_v45.z.string().nullish(),
|
|
1479
|
+
choices: import_v45.z.array(
|
|
1480
|
+
import_v45.z.object({
|
|
1481
|
+
text: import_v45.z.string(),
|
|
1482
|
+
finish_reason: import_v45.z.string().nullish(),
|
|
1483
|
+
index: import_v45.z.number(),
|
|
1484
|
+
logprobs: import_v45.z.object({
|
|
1485
|
+
tokens: import_v45.z.array(import_v45.z.string()),
|
|
1486
|
+
token_logprobs: import_v45.z.array(import_v45.z.number()),
|
|
1487
|
+
top_logprobs: import_v45.z.array(import_v45.z.record(import_v45.z.string(), import_v45.z.number())).nullish()
|
|
1607
1488
|
}).nullish()
|
|
1608
1489
|
})
|
|
1609
1490
|
),
|
|
@@ -1614,22 +1495,22 @@ var openaiCompletionChunkSchema = import_v47.z.union([
|
|
|
1614
1495
|
|
|
1615
1496
|
// src/embedding/openai-embedding-model.ts
|
|
1616
1497
|
var import_provider5 = require("@ai-sdk/provider");
|
|
1617
|
-
var
|
|
1618
|
-
var
|
|
1498
|
+
var import_provider_utils5 = require("@ai-sdk/provider-utils");
|
|
1499
|
+
var import_v47 = require("zod/v4");
|
|
1619
1500
|
|
|
1620
1501
|
// src/embedding/openai-embedding-options.ts
|
|
1621
|
-
var
|
|
1622
|
-
var openaiEmbeddingProviderOptions =
|
|
1502
|
+
var import_v46 = require("zod/v4");
|
|
1503
|
+
var openaiEmbeddingProviderOptions = import_v46.z.object({
|
|
1623
1504
|
/**
|
|
1624
1505
|
The number of dimensions the resulting output embeddings should have.
|
|
1625
1506
|
Only supported in text-embedding-3 and later models.
|
|
1626
1507
|
*/
|
|
1627
|
-
dimensions:
|
|
1508
|
+
dimensions: import_v46.z.number().optional(),
|
|
1628
1509
|
/**
|
|
1629
1510
|
A unique identifier representing your end-user, which can help OpenAI to
|
|
1630
1511
|
monitor and detect abuse. Learn more.
|
|
1631
1512
|
*/
|
|
1632
|
-
user:
|
|
1513
|
+
user: import_v46.z.string().optional()
|
|
1633
1514
|
});
|
|
1634
1515
|
|
|
1635
1516
|
// src/embedding/openai-embedding-model.ts
|
|
@@ -1659,7 +1540,7 @@ var OpenAIEmbeddingModel = class {
|
|
|
1659
1540
|
values
|
|
1660
1541
|
});
|
|
1661
1542
|
}
|
|
1662
|
-
const openaiOptions = (_a = await (0,
|
|
1543
|
+
const openaiOptions = (_a = await (0, import_provider_utils5.parseProviderOptions)({
|
|
1663
1544
|
provider: "openai",
|
|
1664
1545
|
providerOptions,
|
|
1665
1546
|
schema: openaiEmbeddingProviderOptions
|
|
@@ -1668,12 +1549,12 @@ var OpenAIEmbeddingModel = class {
|
|
|
1668
1549
|
responseHeaders,
|
|
1669
1550
|
value: response,
|
|
1670
1551
|
rawValue
|
|
1671
|
-
} = await (0,
|
|
1552
|
+
} = await (0, import_provider_utils5.postJsonToApi)({
|
|
1672
1553
|
url: this.config.url({
|
|
1673
1554
|
path: "/embeddings",
|
|
1674
1555
|
modelId: this.modelId
|
|
1675
1556
|
}),
|
|
1676
|
-
headers: (0,
|
|
1557
|
+
headers: (0, import_provider_utils5.combineHeaders)(this.config.headers(), headers),
|
|
1677
1558
|
body: {
|
|
1678
1559
|
model: this.modelId,
|
|
1679
1560
|
input: values,
|
|
@@ -1682,7 +1563,7 @@ var OpenAIEmbeddingModel = class {
|
|
|
1682
1563
|
user: openaiOptions.user
|
|
1683
1564
|
},
|
|
1684
1565
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
1685
|
-
successfulResponseHandler: (0,
|
|
1566
|
+
successfulResponseHandler: (0, import_provider_utils5.createJsonResponseHandler)(
|
|
1686
1567
|
openaiTextEmbeddingResponseSchema
|
|
1687
1568
|
),
|
|
1688
1569
|
abortSignal,
|
|
@@ -1695,14 +1576,14 @@ var OpenAIEmbeddingModel = class {
|
|
|
1695
1576
|
};
|
|
1696
1577
|
}
|
|
1697
1578
|
};
|
|
1698
|
-
var openaiTextEmbeddingResponseSchema =
|
|
1699
|
-
data:
|
|
1700
|
-
usage:
|
|
1579
|
+
var openaiTextEmbeddingResponseSchema = import_v47.z.object({
|
|
1580
|
+
data: import_v47.z.array(import_v47.z.object({ embedding: import_v47.z.array(import_v47.z.number()) })),
|
|
1581
|
+
usage: import_v47.z.object({ prompt_tokens: import_v47.z.number() }).nullish()
|
|
1701
1582
|
});
|
|
1702
1583
|
|
|
1703
1584
|
// src/image/openai-image-model.ts
|
|
1704
|
-
var
|
|
1705
|
-
var
|
|
1585
|
+
var import_provider_utils6 = require("@ai-sdk/provider-utils");
|
|
1586
|
+
var import_v48 = require("zod/v4");
|
|
1706
1587
|
|
|
1707
1588
|
// src/image/openai-image-options.ts
|
|
1708
1589
|
var modelMaxImagesPerCall = {
|
|
@@ -1749,12 +1630,12 @@ var OpenAIImageModel = class {
|
|
|
1749
1630
|
warnings.push({ type: "unsupported-setting", setting: "seed" });
|
|
1750
1631
|
}
|
|
1751
1632
|
const currentDate = (_c = (_b = (_a = this.config._internal) == null ? void 0 : _a.currentDate) == null ? void 0 : _b.call(_a)) != null ? _c : /* @__PURE__ */ new Date();
|
|
1752
|
-
const { value: response, responseHeaders } = await (0,
|
|
1633
|
+
const { value: response, responseHeaders } = await (0, import_provider_utils6.postJsonToApi)({
|
|
1753
1634
|
url: this.config.url({
|
|
1754
1635
|
path: "/images/generations",
|
|
1755
1636
|
modelId: this.modelId
|
|
1756
1637
|
}),
|
|
1757
|
-
headers: (0,
|
|
1638
|
+
headers: (0, import_provider_utils6.combineHeaders)(this.config.headers(), headers),
|
|
1758
1639
|
body: {
|
|
1759
1640
|
model: this.modelId,
|
|
1760
1641
|
prompt,
|
|
@@ -1764,7 +1645,7 @@ var OpenAIImageModel = class {
|
|
|
1764
1645
|
...!hasDefaultResponseFormat.has(this.modelId) ? { response_format: "b64_json" } : {}
|
|
1765
1646
|
},
|
|
1766
1647
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
1767
|
-
successfulResponseHandler: (0,
|
|
1648
|
+
successfulResponseHandler: (0, import_provider_utils6.createJsonResponseHandler)(
|
|
1768
1649
|
openaiImageResponseSchema
|
|
1769
1650
|
),
|
|
1770
1651
|
abortSignal,
|
|
@@ -1790,36 +1671,36 @@ var OpenAIImageModel = class {
|
|
|
1790
1671
|
};
|
|
1791
1672
|
}
|
|
1792
1673
|
};
|
|
1793
|
-
var openaiImageResponseSchema =
|
|
1794
|
-
data:
|
|
1795
|
-
|
|
1674
|
+
var openaiImageResponseSchema = import_v48.z.object({
|
|
1675
|
+
data: import_v48.z.array(
|
|
1676
|
+
import_v48.z.object({ b64_json: import_v48.z.string(), revised_prompt: import_v48.z.string().optional() })
|
|
1796
1677
|
)
|
|
1797
1678
|
});
|
|
1798
1679
|
|
|
1799
1680
|
// src/tool/code-interpreter.ts
|
|
1800
|
-
var
|
|
1801
|
-
var
|
|
1802
|
-
var codeInterpreterInputSchema =
|
|
1803
|
-
code:
|
|
1804
|
-
containerId:
|
|
1681
|
+
var import_provider_utils7 = require("@ai-sdk/provider-utils");
|
|
1682
|
+
var import_v49 = require("zod/v4");
|
|
1683
|
+
var codeInterpreterInputSchema = import_v49.z.object({
|
|
1684
|
+
code: import_v49.z.string().nullish(),
|
|
1685
|
+
containerId: import_v49.z.string()
|
|
1805
1686
|
});
|
|
1806
|
-
var codeInterpreterOutputSchema =
|
|
1807
|
-
outputs:
|
|
1808
|
-
|
|
1809
|
-
|
|
1810
|
-
|
|
1687
|
+
var codeInterpreterOutputSchema = import_v49.z.object({
|
|
1688
|
+
outputs: import_v49.z.array(
|
|
1689
|
+
import_v49.z.discriminatedUnion("type", [
|
|
1690
|
+
import_v49.z.object({ type: import_v49.z.literal("logs"), logs: import_v49.z.string() }),
|
|
1691
|
+
import_v49.z.object({ type: import_v49.z.literal("image"), url: import_v49.z.string() })
|
|
1811
1692
|
])
|
|
1812
1693
|
).nullish()
|
|
1813
1694
|
});
|
|
1814
|
-
var codeInterpreterArgsSchema =
|
|
1815
|
-
container:
|
|
1816
|
-
|
|
1817
|
-
|
|
1818
|
-
fileIds:
|
|
1695
|
+
var codeInterpreterArgsSchema = import_v49.z.object({
|
|
1696
|
+
container: import_v49.z.union([
|
|
1697
|
+
import_v49.z.string(),
|
|
1698
|
+
import_v49.z.object({
|
|
1699
|
+
fileIds: import_v49.z.array(import_v49.z.string()).optional()
|
|
1819
1700
|
})
|
|
1820
1701
|
]).optional()
|
|
1821
1702
|
});
|
|
1822
|
-
var codeInterpreterToolFactory = (0,
|
|
1703
|
+
var codeInterpreterToolFactory = (0, import_provider_utils7.createProviderDefinedToolFactoryWithOutputSchema)({
|
|
1823
1704
|
id: "openai.code_interpreter",
|
|
1824
1705
|
name: "code_interpreter",
|
|
1825
1706
|
inputSchema: codeInterpreterInputSchema,
|
|
@@ -1829,6 +1710,78 @@ var codeInterpreter = (args = {}) => {
|
|
|
1829
1710
|
return codeInterpreterToolFactory(args);
|
|
1830
1711
|
};
|
|
1831
1712
|
|
|
1713
|
+
// src/tool/file-search.ts
|
|
1714
|
+
var import_provider_utils8 = require("@ai-sdk/provider-utils");
|
|
1715
|
+
var import_v410 = require("zod/v4");
|
|
1716
|
+
var comparisonFilterSchema = import_v410.z.object({
|
|
1717
|
+
key: import_v410.z.string(),
|
|
1718
|
+
type: import_v410.z.enum(["eq", "ne", "gt", "gte", "lt", "lte"]),
|
|
1719
|
+
value: import_v410.z.union([import_v410.z.string(), import_v410.z.number(), import_v410.z.boolean()])
|
|
1720
|
+
});
|
|
1721
|
+
var compoundFilterSchema = import_v410.z.object({
|
|
1722
|
+
type: import_v410.z.enum(["and", "or"]),
|
|
1723
|
+
filters: import_v410.z.array(
|
|
1724
|
+
import_v410.z.union([comparisonFilterSchema, import_v410.z.lazy(() => compoundFilterSchema)])
|
|
1725
|
+
)
|
|
1726
|
+
});
|
|
1727
|
+
var fileSearchArgsSchema = import_v410.z.object({
|
|
1728
|
+
vectorStoreIds: import_v410.z.array(import_v410.z.string()),
|
|
1729
|
+
maxNumResults: import_v410.z.number().optional(),
|
|
1730
|
+
ranking: import_v410.z.object({
|
|
1731
|
+
ranker: import_v410.z.string().optional(),
|
|
1732
|
+
scoreThreshold: import_v410.z.number().optional()
|
|
1733
|
+
}).optional(),
|
|
1734
|
+
filters: import_v410.z.union([comparisonFilterSchema, compoundFilterSchema]).optional()
|
|
1735
|
+
});
|
|
1736
|
+
var fileSearchOutputSchema = import_v410.z.object({
|
|
1737
|
+
queries: import_v410.z.array(import_v410.z.string()),
|
|
1738
|
+
results: import_v410.z.array(
|
|
1739
|
+
import_v410.z.object({
|
|
1740
|
+
attributes: import_v410.z.record(import_v410.z.string(), import_v410.z.unknown()),
|
|
1741
|
+
fileId: import_v410.z.string(),
|
|
1742
|
+
filename: import_v410.z.string(),
|
|
1743
|
+
score: import_v410.z.number(),
|
|
1744
|
+
text: import_v410.z.string()
|
|
1745
|
+
})
|
|
1746
|
+
).nullable()
|
|
1747
|
+
});
|
|
1748
|
+
var fileSearch = (0, import_provider_utils8.createProviderDefinedToolFactoryWithOutputSchema)({
|
|
1749
|
+
id: "openai.file_search",
|
|
1750
|
+
name: "file_search",
|
|
1751
|
+
inputSchema: import_v410.z.object({}),
|
|
1752
|
+
outputSchema: fileSearchOutputSchema
|
|
1753
|
+
});
|
|
1754
|
+
|
|
1755
|
+
// src/tool/image-generation.ts
|
|
1756
|
+
var import_provider_utils9 = require("@ai-sdk/provider-utils");
|
|
1757
|
+
var import_v411 = require("zod/v4");
|
|
1758
|
+
var imageGenerationArgsSchema = import_v411.z.object({
|
|
1759
|
+
background: import_v411.z.enum(["auto", "opaque", "transparent"]).optional(),
|
|
1760
|
+
inputFidelity: import_v411.z.enum(["low", "high"]).optional(),
|
|
1761
|
+
inputImageMask: import_v411.z.object({
|
|
1762
|
+
fileId: import_v411.z.string().optional(),
|
|
1763
|
+
imageUrl: import_v411.z.string().optional()
|
|
1764
|
+
}).optional(),
|
|
1765
|
+
model: import_v411.z.string().optional(),
|
|
1766
|
+
moderation: import_v411.z.enum(["auto"]).optional(),
|
|
1767
|
+
outputCompression: import_v411.z.number().int().min(0).max(100).optional(),
|
|
1768
|
+
outputFormat: import_v411.z.enum(["png", "jpeg", "webp"]).optional(),
|
|
1769
|
+
quality: import_v411.z.enum(["auto", "low", "medium", "high"]).optional(),
|
|
1770
|
+
size: import_v411.z.enum(["1024x1024", "1024x1536", "1536x1024", "auto"]).optional()
|
|
1771
|
+
}).strict();
|
|
1772
|
+
var imageGenerationOutputSchema = import_v411.z.object({
|
|
1773
|
+
result: import_v411.z.string()
|
|
1774
|
+
});
|
|
1775
|
+
var imageGenerationToolFactory = (0, import_provider_utils9.createProviderDefinedToolFactoryWithOutputSchema)({
|
|
1776
|
+
id: "openai.image_generation",
|
|
1777
|
+
name: "image_generation",
|
|
1778
|
+
inputSchema: import_v411.z.object({}),
|
|
1779
|
+
outputSchema: imageGenerationOutputSchema
|
|
1780
|
+
});
|
|
1781
|
+
var imageGeneration = (args = {}) => {
|
|
1782
|
+
return imageGenerationToolFactory(args);
|
|
1783
|
+
};
|
|
1784
|
+
|
|
1832
1785
|
// src/tool/web-search.ts
|
|
1833
1786
|
var import_provider_utils10 = require("@ai-sdk/provider-utils");
|
|
1834
1787
|
var import_v412 = require("zod/v4");
|
|
@@ -1870,6 +1823,65 @@ var webSearch = (args = {}) => {
|
|
|
1870
1823
|
return webSearchToolFactory(args);
|
|
1871
1824
|
};
|
|
1872
1825
|
|
|
1826
|
+
// src/tool/web-search-preview.ts
|
|
1827
|
+
var import_provider_utils11 = require("@ai-sdk/provider-utils");
|
|
1828
|
+
var import_v413 = require("zod/v4");
|
|
1829
|
+
var webSearchPreviewArgsSchema = import_v413.z.object({
|
|
1830
|
+
/**
|
|
1831
|
+
* Search context size to use for the web search.
|
|
1832
|
+
* - high: Most comprehensive context, highest cost, slower response
|
|
1833
|
+
* - medium: Balanced context, cost, and latency (default)
|
|
1834
|
+
* - low: Least context, lowest cost, fastest response
|
|
1835
|
+
*/
|
|
1836
|
+
searchContextSize: import_v413.z.enum(["low", "medium", "high"]).optional(),
|
|
1837
|
+
/**
|
|
1838
|
+
* User location information to provide geographically relevant search results.
|
|
1839
|
+
*/
|
|
1840
|
+
userLocation: import_v413.z.object({
|
|
1841
|
+
/**
|
|
1842
|
+
* Type of location (always 'approximate')
|
|
1843
|
+
*/
|
|
1844
|
+
type: import_v413.z.literal("approximate"),
|
|
1845
|
+
/**
|
|
1846
|
+
* Two-letter ISO country code (e.g., 'US', 'GB')
|
|
1847
|
+
*/
|
|
1848
|
+
country: import_v413.z.string().optional(),
|
|
1849
|
+
/**
|
|
1850
|
+
* City name (free text, e.g., 'Minneapolis')
|
|
1851
|
+
*/
|
|
1852
|
+
city: import_v413.z.string().optional(),
|
|
1853
|
+
/**
|
|
1854
|
+
* Region name (free text, e.g., 'Minnesota')
|
|
1855
|
+
*/
|
|
1856
|
+
region: import_v413.z.string().optional(),
|
|
1857
|
+
/**
|
|
1858
|
+
* IANA timezone (e.g., 'America/Chicago')
|
|
1859
|
+
*/
|
|
1860
|
+
timezone: import_v413.z.string().optional()
|
|
1861
|
+
}).optional()
|
|
1862
|
+
});
|
|
1863
|
+
var webSearchPreview = (0, import_provider_utils11.createProviderDefinedToolFactory)({
|
|
1864
|
+
id: "openai.web_search_preview",
|
|
1865
|
+
name: "web_search_preview",
|
|
1866
|
+
inputSchema: import_v413.z.object({
|
|
1867
|
+
action: import_v413.z.discriminatedUnion("type", [
|
|
1868
|
+
import_v413.z.object({
|
|
1869
|
+
type: import_v413.z.literal("search"),
|
|
1870
|
+
query: import_v413.z.string().nullish()
|
|
1871
|
+
}),
|
|
1872
|
+
import_v413.z.object({
|
|
1873
|
+
type: import_v413.z.literal("open_page"),
|
|
1874
|
+
url: import_v413.z.string()
|
|
1875
|
+
}),
|
|
1876
|
+
import_v413.z.object({
|
|
1877
|
+
type: import_v413.z.literal("find"),
|
|
1878
|
+
url: import_v413.z.string(),
|
|
1879
|
+
pattern: import_v413.z.string()
|
|
1880
|
+
})
|
|
1881
|
+
]).nullish()
|
|
1882
|
+
})
|
|
1883
|
+
});
|
|
1884
|
+
|
|
1873
1885
|
// src/openai-tools.ts
|
|
1874
1886
|
var openaiTools = {
|
|
1875
1887
|
/**
|
|
@@ -1895,6 +1907,20 @@ var openaiTools = {
|
|
|
1895
1907
|
* @param filters - The filters to use for the file search.
|
|
1896
1908
|
*/
|
|
1897
1909
|
fileSearch,
|
|
1910
|
+
/**
|
|
1911
|
+
* The image generation tool allows you to generate images using a text prompt,
|
|
1912
|
+
* and optionally image inputs. It leverages the GPT Image model,
|
|
1913
|
+
* and automatically optimizes text inputs for improved performance.
|
|
1914
|
+
*
|
|
1915
|
+
* Must have name `image_generation`.
|
|
1916
|
+
*
|
|
1917
|
+
* @param size - Image dimensions (e.g., 1024x1024, 1024x1536)
|
|
1918
|
+
* @param quality - Rendering quality (e.g. low, medium, high)
|
|
1919
|
+
* @param format - File output format
|
|
1920
|
+
* @param compression - Compression level (0-100%) for JPEG and WebP formats
|
|
1921
|
+
* @param background - Transparent or opaque
|
|
1922
|
+
*/
|
|
1923
|
+
imageGeneration,
|
|
1898
1924
|
/**
|
|
1899
1925
|
* Web search allows models to access up-to-date information from the internet
|
|
1900
1926
|
* and provide answers with sourced citations.
|
|
@@ -1922,13 +1948,13 @@ var openaiTools = {
|
|
|
1922
1948
|
|
|
1923
1949
|
// src/responses/openai-responses-language-model.ts
|
|
1924
1950
|
var import_provider8 = require("@ai-sdk/provider");
|
|
1925
|
-
var
|
|
1926
|
-
var
|
|
1951
|
+
var import_provider_utils13 = require("@ai-sdk/provider-utils");
|
|
1952
|
+
var import_v415 = require("zod/v4");
|
|
1927
1953
|
|
|
1928
1954
|
// src/responses/convert-to-openai-responses-input.ts
|
|
1929
1955
|
var import_provider6 = require("@ai-sdk/provider");
|
|
1930
|
-
var
|
|
1931
|
-
var
|
|
1956
|
+
var import_provider_utils12 = require("@ai-sdk/provider-utils");
|
|
1957
|
+
var import_v414 = require("zod/v4");
|
|
1932
1958
|
function isFileId(data, prefixes) {
|
|
1933
1959
|
if (!prefixes) return false;
|
|
1934
1960
|
return prefixes.some((prefix) => data.startsWith(prefix));
|
|
@@ -1936,7 +1962,8 @@ function isFileId(data, prefixes) {
|
|
|
1936
1962
|
async function convertToOpenAIResponsesInput({
|
|
1937
1963
|
prompt,
|
|
1938
1964
|
systemMessageMode,
|
|
1939
|
-
fileIdPrefixes
|
|
1965
|
+
fileIdPrefixes,
|
|
1966
|
+
store
|
|
1940
1967
|
}) {
|
|
1941
1968
|
var _a, _b, _c, _d, _e, _f;
|
|
1942
1969
|
const input = [];
|
|
@@ -1984,7 +2011,7 @@ async function convertToOpenAIResponsesInput({
|
|
|
1984
2011
|
return {
|
|
1985
2012
|
type: "input_image",
|
|
1986
2013
|
...part.data instanceof URL ? { image_url: part.data.toString() } : typeof part.data === "string" && isFileId(part.data, fileIdPrefixes) ? { file_id: part.data } : {
|
|
1987
|
-
image_url: `data:${mediaType};base64,${(0,
|
|
2014
|
+
image_url: `data:${mediaType};base64,${(0, import_provider_utils12.convertToBase64)(part.data)}`
|
|
1988
2015
|
},
|
|
1989
2016
|
detail: (_b2 = (_a2 = part.providerOptions) == null ? void 0 : _a2.openai) == null ? void 0 : _b2.imageDetail
|
|
1990
2017
|
};
|
|
@@ -1999,7 +2026,7 @@ async function convertToOpenAIResponsesInput({
|
|
|
1999
2026
|
type: "input_file",
|
|
2000
2027
|
...typeof part.data === "string" && isFileId(part.data, fileIdPrefixes) ? { file_id: part.data } : {
|
|
2001
2028
|
filename: (_c2 = part.filename) != null ? _c2 : `part-${index}.pdf`,
|
|
2002
|
-
file_data: `data:application/pdf;base64,${(0,
|
|
2029
|
+
file_data: `data:application/pdf;base64,${(0, import_provider_utils12.convertToBase64)(part.data)}`
|
|
2003
2030
|
}
|
|
2004
2031
|
};
|
|
2005
2032
|
} else {
|
|
@@ -2041,14 +2068,18 @@ async function convertToOpenAIResponsesInput({
|
|
|
2041
2068
|
break;
|
|
2042
2069
|
}
|
|
2043
2070
|
case "tool-result": {
|
|
2044
|
-
|
|
2045
|
-
type: "
|
|
2046
|
-
|
|
2047
|
-
|
|
2071
|
+
if (store) {
|
|
2072
|
+
input.push({ type: "item_reference", id: part.toolCallId });
|
|
2073
|
+
} else {
|
|
2074
|
+
warnings.push({
|
|
2075
|
+
type: "other",
|
|
2076
|
+
message: `Results for OpenAI tool ${part.toolName} are not sent to the API when store is false`
|
|
2077
|
+
});
|
|
2078
|
+
}
|
|
2048
2079
|
break;
|
|
2049
2080
|
}
|
|
2050
2081
|
case "reasoning": {
|
|
2051
|
-
const providerOptions = await (0,
|
|
2082
|
+
const providerOptions = await (0, import_provider_utils12.parseProviderOptions)({
|
|
2052
2083
|
provider: "openai",
|
|
2053
2084
|
providerOptions: part.providerOptions,
|
|
2054
2085
|
schema: openaiResponsesReasoningProviderOptionsSchema
|
|
@@ -2119,9 +2150,9 @@ async function convertToOpenAIResponsesInput({
|
|
|
2119
2150
|
}
|
|
2120
2151
|
return { input, warnings };
|
|
2121
2152
|
}
|
|
2122
|
-
var openaiResponsesReasoningProviderOptionsSchema =
|
|
2123
|
-
itemId:
|
|
2124
|
-
reasoningEncryptedContent:
|
|
2153
|
+
var openaiResponsesReasoningProviderOptionsSchema = import_v414.z.object({
|
|
2154
|
+
itemId: import_v414.z.string().nullish(),
|
|
2155
|
+
reasoningEncryptedContent: import_v414.z.string().nullish()
|
|
2125
2156
|
});
|
|
2126
2157
|
|
|
2127
2158
|
// src/responses/map-openai-responses-finish-reason.ts
|
|
@@ -2174,7 +2205,10 @@ function prepareResponsesTools({
|
|
|
2174
2205
|
type: "file_search",
|
|
2175
2206
|
vector_store_ids: args.vectorStoreIds,
|
|
2176
2207
|
max_num_results: args.maxNumResults,
|
|
2177
|
-
ranking_options: args.ranking ? {
|
|
2208
|
+
ranking_options: args.ranking ? {
|
|
2209
|
+
ranker: args.ranking.ranker,
|
|
2210
|
+
score_threshold: args.ranking.scoreThreshold
|
|
2211
|
+
} : void 0,
|
|
2178
2212
|
filters: args.filters
|
|
2179
2213
|
});
|
|
2180
2214
|
break;
|
|
@@ -2206,8 +2240,23 @@ function prepareResponsesTools({
|
|
|
2206
2240
|
});
|
|
2207
2241
|
break;
|
|
2208
2242
|
}
|
|
2209
|
-
|
|
2210
|
-
|
|
2243
|
+
case "openai.image_generation": {
|
|
2244
|
+
const args = imageGenerationArgsSchema.parse(tool.args);
|
|
2245
|
+
openaiTools2.push({
|
|
2246
|
+
type: "image_generation",
|
|
2247
|
+
background: args.background,
|
|
2248
|
+
input_fidelity: args.inputFidelity,
|
|
2249
|
+
input_image_mask: args.inputImageMask ? {
|
|
2250
|
+
file_id: args.inputImageMask.fileId,
|
|
2251
|
+
image_url: args.inputImageMask.imageUrl
|
|
2252
|
+
} : void 0,
|
|
2253
|
+
model: args.model,
|
|
2254
|
+
size: args.size,
|
|
2255
|
+
quality: args.quality,
|
|
2256
|
+
moderation: args.moderation,
|
|
2257
|
+
output_format: args.outputFormat,
|
|
2258
|
+
output_compression: args.outputCompression
|
|
2259
|
+
});
|
|
2211
2260
|
break;
|
|
2212
2261
|
}
|
|
2213
2262
|
}
|
|
@@ -2230,7 +2279,7 @@ function prepareResponsesTools({
|
|
|
2230
2279
|
case "tool":
|
|
2231
2280
|
return {
|
|
2232
2281
|
tools: openaiTools2,
|
|
2233
|
-
toolChoice: toolChoice.toolName === "code_interpreter" || toolChoice.toolName === "file_search" || toolChoice.toolName === "web_search_preview" || toolChoice.toolName === "web_search" ? { type: toolChoice.toolName } : { type: "function", name: toolChoice.toolName },
|
|
2282
|
+
toolChoice: toolChoice.toolName === "code_interpreter" || toolChoice.toolName === "file_search" || toolChoice.toolName === "image_generation" || toolChoice.toolName === "web_search_preview" || toolChoice.toolName === "web_search" ? { type: toolChoice.toolName } : { type: "function", name: toolChoice.toolName },
|
|
2234
2283
|
toolWarnings
|
|
2235
2284
|
};
|
|
2236
2285
|
default: {
|
|
@@ -2243,47 +2292,66 @@ function prepareResponsesTools({
|
|
|
2243
2292
|
}
|
|
2244
2293
|
|
|
2245
2294
|
// src/responses/openai-responses-language-model.ts
|
|
2246
|
-
var webSearchCallItem =
|
|
2247
|
-
type:
|
|
2248
|
-
id:
|
|
2249
|
-
status:
|
|
2250
|
-
action:
|
|
2251
|
-
|
|
2252
|
-
type:
|
|
2253
|
-
query:
|
|
2295
|
+
var webSearchCallItem = import_v415.z.object({
|
|
2296
|
+
type: import_v415.z.literal("web_search_call"),
|
|
2297
|
+
id: import_v415.z.string(),
|
|
2298
|
+
status: import_v415.z.string(),
|
|
2299
|
+
action: import_v415.z.discriminatedUnion("type", [
|
|
2300
|
+
import_v415.z.object({
|
|
2301
|
+
type: import_v415.z.literal("search"),
|
|
2302
|
+
query: import_v415.z.string().nullish()
|
|
2254
2303
|
}),
|
|
2255
|
-
|
|
2256
|
-
type:
|
|
2257
|
-
url:
|
|
2304
|
+
import_v415.z.object({
|
|
2305
|
+
type: import_v415.z.literal("open_page"),
|
|
2306
|
+
url: import_v415.z.string()
|
|
2258
2307
|
}),
|
|
2259
|
-
|
|
2260
|
-
type:
|
|
2261
|
-
url:
|
|
2262
|
-
pattern:
|
|
2308
|
+
import_v415.z.object({
|
|
2309
|
+
type: import_v415.z.literal("find"),
|
|
2310
|
+
url: import_v415.z.string(),
|
|
2311
|
+
pattern: import_v415.z.string()
|
|
2263
2312
|
})
|
|
2264
2313
|
]).nullish()
|
|
2265
2314
|
});
|
|
2266
|
-
var
|
|
2267
|
-
type:
|
|
2268
|
-
id:
|
|
2269
|
-
|
|
2270
|
-
|
|
2271
|
-
|
|
2272
|
-
|
|
2273
|
-
|
|
2274
|
-
|
|
2315
|
+
var fileSearchCallItem = import_v415.z.object({
|
|
2316
|
+
type: import_v415.z.literal("file_search_call"),
|
|
2317
|
+
id: import_v415.z.string(),
|
|
2318
|
+
queries: import_v415.z.array(import_v415.z.string()),
|
|
2319
|
+
results: import_v415.z.array(
|
|
2320
|
+
import_v415.z.object({
|
|
2321
|
+
attributes: import_v415.z.record(import_v415.z.string(), import_v415.z.unknown()),
|
|
2322
|
+
file_id: import_v415.z.string(),
|
|
2323
|
+
filename: import_v415.z.string(),
|
|
2324
|
+
score: import_v415.z.number(),
|
|
2325
|
+
text: import_v415.z.string()
|
|
2326
|
+
})
|
|
2327
|
+
).nullish()
|
|
2328
|
+
});
|
|
2329
|
+
var codeInterpreterCallItem = import_v415.z.object({
|
|
2330
|
+
type: import_v415.z.literal("code_interpreter_call"),
|
|
2331
|
+
id: import_v415.z.string(),
|
|
2332
|
+
code: import_v415.z.string().nullable(),
|
|
2333
|
+
container_id: import_v415.z.string(),
|
|
2334
|
+
outputs: import_v415.z.array(
|
|
2335
|
+
import_v415.z.discriminatedUnion("type", [
|
|
2336
|
+
import_v415.z.object({ type: import_v415.z.literal("logs"), logs: import_v415.z.string() }),
|
|
2337
|
+
import_v415.z.object({ type: import_v415.z.literal("image"), url: import_v415.z.string() })
|
|
2275
2338
|
])
|
|
2276
2339
|
).nullable()
|
|
2277
2340
|
});
|
|
2341
|
+
var imageGenerationCallItem = import_v415.z.object({
|
|
2342
|
+
type: import_v415.z.literal("image_generation_call"),
|
|
2343
|
+
id: import_v415.z.string(),
|
|
2344
|
+
result: import_v415.z.string()
|
|
2345
|
+
});
|
|
2278
2346
|
var TOP_LOGPROBS_MAX = 20;
|
|
2279
|
-
var LOGPROBS_SCHEMA =
|
|
2280
|
-
|
|
2281
|
-
token:
|
|
2282
|
-
logprob:
|
|
2283
|
-
top_logprobs:
|
|
2284
|
-
|
|
2285
|
-
token:
|
|
2286
|
-
logprob:
|
|
2347
|
+
var LOGPROBS_SCHEMA = import_v415.z.array(
|
|
2348
|
+
import_v415.z.object({
|
|
2349
|
+
token: import_v415.z.string(),
|
|
2350
|
+
logprob: import_v415.z.number(),
|
|
2351
|
+
top_logprobs: import_v415.z.array(
|
|
2352
|
+
import_v415.z.object({
|
|
2353
|
+
token: import_v415.z.string(),
|
|
2354
|
+
logprob: import_v415.z.number()
|
|
2287
2355
|
})
|
|
2288
2356
|
)
|
|
2289
2357
|
})
|
|
@@ -2340,29 +2408,41 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2340
2408
|
if (stopSequences != null) {
|
|
2341
2409
|
warnings.push({ type: "unsupported-setting", setting: "stopSequences" });
|
|
2342
2410
|
}
|
|
2411
|
+
const openaiOptions = await (0, import_provider_utils13.parseProviderOptions)({
|
|
2412
|
+
provider: "openai",
|
|
2413
|
+
providerOptions,
|
|
2414
|
+
schema: openaiResponsesProviderOptionsSchema
|
|
2415
|
+
});
|
|
2343
2416
|
const { input, warnings: inputWarnings } = await convertToOpenAIResponsesInput({
|
|
2344
2417
|
prompt,
|
|
2345
2418
|
systemMessageMode: modelConfig.systemMessageMode,
|
|
2346
|
-
fileIdPrefixes: this.config.fileIdPrefixes
|
|
2419
|
+
fileIdPrefixes: this.config.fileIdPrefixes,
|
|
2420
|
+
store: (_a = openaiOptions == null ? void 0 : openaiOptions.store) != null ? _a : true
|
|
2347
2421
|
});
|
|
2348
2422
|
warnings.push(...inputWarnings);
|
|
2349
|
-
const
|
|
2350
|
-
provider: "openai",
|
|
2351
|
-
providerOptions,
|
|
2352
|
-
schema: openaiResponsesProviderOptionsSchema
|
|
2353
|
-
});
|
|
2354
|
-
const strictJsonSchema = (_a = openaiOptions == null ? void 0 : openaiOptions.strictJsonSchema) != null ? _a : false;
|
|
2423
|
+
const strictJsonSchema = (_b = openaiOptions == null ? void 0 : openaiOptions.strictJsonSchema) != null ? _b : false;
|
|
2355
2424
|
let include = openaiOptions == null ? void 0 : openaiOptions.include;
|
|
2425
|
+
function addInclude(key) {
|
|
2426
|
+
include = include != null ? [...include, key] : [key];
|
|
2427
|
+
}
|
|
2428
|
+
function hasOpenAITool(id) {
|
|
2429
|
+
return (tools == null ? void 0 : tools.find(
|
|
2430
|
+
(tool) => tool.type === "provider-defined" && tool.id === id
|
|
2431
|
+
)) != null;
|
|
2432
|
+
}
|
|
2356
2433
|
const topLogprobs = typeof (openaiOptions == null ? void 0 : openaiOptions.logprobs) === "number" ? openaiOptions == null ? void 0 : openaiOptions.logprobs : (openaiOptions == null ? void 0 : openaiOptions.logprobs) === true ? TOP_LOGPROBS_MAX : void 0;
|
|
2357
|
-
|
|
2358
|
-
|
|
2434
|
+
if (topLogprobs) {
|
|
2435
|
+
addInclude("message.output_text.logprobs");
|
|
2436
|
+
}
|
|
2437
|
+
const webSearchToolName = (_c = tools == null ? void 0 : tools.find(
|
|
2359
2438
|
(tool) => tool.type === "provider-defined" && (tool.id === "openai.web_search" || tool.id === "openai.web_search_preview")
|
|
2360
|
-
)) == null ? void 0 : _b.name;
|
|
2361
|
-
include = webSearchToolName ? Array.isArray(include) ? [...include, "web_search_call.action.sources"] : ["web_search_call.action.sources"] : include;
|
|
2362
|
-
const codeInterpreterToolName = (_c = tools == null ? void 0 : tools.find(
|
|
2363
|
-
(tool) => tool.type === "provider-defined" && tool.id === "openai.code_interpreter"
|
|
2364
2439
|
)) == null ? void 0 : _c.name;
|
|
2365
|
-
|
|
2440
|
+
if (webSearchToolName) {
|
|
2441
|
+
addInclude("web_search_call.action.sources");
|
|
2442
|
+
}
|
|
2443
|
+
if (hasOpenAITool("openai.code_interpreter")) {
|
|
2444
|
+
addInclude("code_interpreter_call.outputs");
|
|
2445
|
+
}
|
|
2366
2446
|
const baseArgs = {
|
|
2367
2447
|
model: this.modelId,
|
|
2368
2448
|
input,
|
|
@@ -2386,6 +2466,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2386
2466
|
}
|
|
2387
2467
|
},
|
|
2388
2468
|
// provider options:
|
|
2469
|
+
max_tool_calls: openaiOptions == null ? void 0 : openaiOptions.maxToolCalls,
|
|
2389
2470
|
metadata: openaiOptions == null ? void 0 : openaiOptions.metadata,
|
|
2390
2471
|
parallel_tool_calls: openaiOptions == null ? void 0 : openaiOptions.parallelToolCalls,
|
|
2391
2472
|
previous_response_id: openaiOptions == null ? void 0 : openaiOptions.previousResponseId,
|
|
@@ -2481,7 +2562,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2481
2562
|
};
|
|
2482
2563
|
}
|
|
2483
2564
|
async doGenerate(options) {
|
|
2484
|
-
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q;
|
|
2565
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s;
|
|
2485
2566
|
const {
|
|
2486
2567
|
args: body,
|
|
2487
2568
|
warnings,
|
|
@@ -2495,102 +2576,88 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2495
2576
|
responseHeaders,
|
|
2496
2577
|
value: response,
|
|
2497
2578
|
rawValue: rawResponse
|
|
2498
|
-
} = await (0,
|
|
2579
|
+
} = await (0, import_provider_utils13.postJsonToApi)({
|
|
2499
2580
|
url,
|
|
2500
|
-
headers: (0,
|
|
2581
|
+
headers: (0, import_provider_utils13.combineHeaders)(this.config.headers(), options.headers),
|
|
2501
2582
|
body,
|
|
2502
2583
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
2503
|
-
successfulResponseHandler: (0,
|
|
2504
|
-
|
|
2505
|
-
id:
|
|
2506
|
-
created_at:
|
|
2507
|
-
error:
|
|
2508
|
-
code:
|
|
2509
|
-
message:
|
|
2584
|
+
successfulResponseHandler: (0, import_provider_utils13.createJsonResponseHandler)(
|
|
2585
|
+
import_v415.z.object({
|
|
2586
|
+
id: import_v415.z.string(),
|
|
2587
|
+
created_at: import_v415.z.number(),
|
|
2588
|
+
error: import_v415.z.object({
|
|
2589
|
+
code: import_v415.z.string(),
|
|
2590
|
+
message: import_v415.z.string()
|
|
2510
2591
|
}).nullish(),
|
|
2511
|
-
model:
|
|
2512
|
-
output:
|
|
2513
|
-
|
|
2514
|
-
|
|
2515
|
-
type:
|
|
2516
|
-
role:
|
|
2517
|
-
id:
|
|
2518
|
-
content:
|
|
2519
|
-
|
|
2520
|
-
type:
|
|
2521
|
-
text:
|
|
2592
|
+
model: import_v415.z.string(),
|
|
2593
|
+
output: import_v415.z.array(
|
|
2594
|
+
import_v415.z.discriminatedUnion("type", [
|
|
2595
|
+
import_v415.z.object({
|
|
2596
|
+
type: import_v415.z.literal("message"),
|
|
2597
|
+
role: import_v415.z.literal("assistant"),
|
|
2598
|
+
id: import_v415.z.string(),
|
|
2599
|
+
content: import_v415.z.array(
|
|
2600
|
+
import_v415.z.object({
|
|
2601
|
+
type: import_v415.z.literal("output_text"),
|
|
2602
|
+
text: import_v415.z.string(),
|
|
2522
2603
|
logprobs: LOGPROBS_SCHEMA.nullish(),
|
|
2523
|
-
annotations:
|
|
2524
|
-
|
|
2525
|
-
|
|
2526
|
-
type:
|
|
2527
|
-
start_index:
|
|
2528
|
-
end_index:
|
|
2529
|
-
url:
|
|
2530
|
-
title:
|
|
2604
|
+
annotations: import_v415.z.array(
|
|
2605
|
+
import_v415.z.discriminatedUnion("type", [
|
|
2606
|
+
import_v415.z.object({
|
|
2607
|
+
type: import_v415.z.literal("url_citation"),
|
|
2608
|
+
start_index: import_v415.z.number(),
|
|
2609
|
+
end_index: import_v415.z.number(),
|
|
2610
|
+
url: import_v415.z.string(),
|
|
2611
|
+
title: import_v415.z.string()
|
|
2531
2612
|
}),
|
|
2532
|
-
|
|
2533
|
-
type:
|
|
2534
|
-
file_id:
|
|
2535
|
-
filename:
|
|
2536
|
-
index:
|
|
2537
|
-
start_index:
|
|
2538
|
-
end_index:
|
|
2539
|
-
quote:
|
|
2613
|
+
import_v415.z.object({
|
|
2614
|
+
type: import_v415.z.literal("file_citation"),
|
|
2615
|
+
file_id: import_v415.z.string(),
|
|
2616
|
+
filename: import_v415.z.string().nullish(),
|
|
2617
|
+
index: import_v415.z.number().nullish(),
|
|
2618
|
+
start_index: import_v415.z.number().nullish(),
|
|
2619
|
+
end_index: import_v415.z.number().nullish(),
|
|
2620
|
+
quote: import_v415.z.string().nullish()
|
|
2540
2621
|
}),
|
|
2541
|
-
|
|
2542
|
-
type:
|
|
2622
|
+
import_v415.z.object({
|
|
2623
|
+
type: import_v415.z.literal("container_file_citation")
|
|
2543
2624
|
})
|
|
2544
2625
|
])
|
|
2545
2626
|
)
|
|
2546
2627
|
})
|
|
2547
2628
|
)
|
|
2548
2629
|
}),
|
|
2549
|
-
codeInterpreterCallItem,
|
|
2550
|
-
import_v414.z.object({
|
|
2551
|
-
type: import_v414.z.literal("function_call"),
|
|
2552
|
-
call_id: import_v414.z.string(),
|
|
2553
|
-
name: import_v414.z.string(),
|
|
2554
|
-
arguments: import_v414.z.string(),
|
|
2555
|
-
id: import_v414.z.string()
|
|
2556
|
-
}),
|
|
2557
2630
|
webSearchCallItem,
|
|
2558
|
-
|
|
2559
|
-
|
|
2560
|
-
|
|
2561
|
-
|
|
2631
|
+
fileSearchCallItem,
|
|
2632
|
+
codeInterpreterCallItem,
|
|
2633
|
+
imageGenerationCallItem,
|
|
2634
|
+
import_v415.z.object({
|
|
2635
|
+
type: import_v415.z.literal("function_call"),
|
|
2636
|
+
call_id: import_v415.z.string(),
|
|
2637
|
+
name: import_v415.z.string(),
|
|
2638
|
+
arguments: import_v415.z.string(),
|
|
2639
|
+
id: import_v415.z.string()
|
|
2562
2640
|
}),
|
|
2563
|
-
|
|
2564
|
-
type:
|
|
2565
|
-
id:
|
|
2566
|
-
status:
|
|
2567
|
-
queries: import_v414.z.array(import_v414.z.string()).nullish(),
|
|
2568
|
-
results: import_v414.z.array(
|
|
2569
|
-
import_v414.z.object({
|
|
2570
|
-
attributes: import_v414.z.object({
|
|
2571
|
-
file_id: import_v414.z.string(),
|
|
2572
|
-
filename: import_v414.z.string(),
|
|
2573
|
-
score: import_v414.z.number(),
|
|
2574
|
-
text: import_v414.z.string()
|
|
2575
|
-
})
|
|
2576
|
-
})
|
|
2577
|
-
).nullish()
|
|
2641
|
+
import_v415.z.object({
|
|
2642
|
+
type: import_v415.z.literal("computer_call"),
|
|
2643
|
+
id: import_v415.z.string(),
|
|
2644
|
+
status: import_v415.z.string().optional()
|
|
2578
2645
|
}),
|
|
2579
|
-
|
|
2580
|
-
type:
|
|
2581
|
-
id:
|
|
2582
|
-
encrypted_content:
|
|
2583
|
-
summary:
|
|
2584
|
-
|
|
2585
|
-
type:
|
|
2586
|
-
text:
|
|
2646
|
+
import_v415.z.object({
|
|
2647
|
+
type: import_v415.z.literal("reasoning"),
|
|
2648
|
+
id: import_v415.z.string(),
|
|
2649
|
+
encrypted_content: import_v415.z.string().nullish(),
|
|
2650
|
+
summary: import_v415.z.array(
|
|
2651
|
+
import_v415.z.object({
|
|
2652
|
+
type: import_v415.z.literal("summary_text"),
|
|
2653
|
+
text: import_v415.z.string()
|
|
2587
2654
|
})
|
|
2588
2655
|
)
|
|
2589
2656
|
})
|
|
2590
2657
|
])
|
|
2591
2658
|
),
|
|
2592
|
-
service_tier:
|
|
2593
|
-
incomplete_details:
|
|
2659
|
+
service_tier: import_v415.z.string().nullish(),
|
|
2660
|
+
incomplete_details: import_v415.z.object({ reason: import_v415.z.string() }).nullable(),
|
|
2594
2661
|
usage: usageSchema2
|
|
2595
2662
|
})
|
|
2596
2663
|
),
|
|
@@ -2631,6 +2698,25 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2631
2698
|
}
|
|
2632
2699
|
break;
|
|
2633
2700
|
}
|
|
2701
|
+
case "image_generation_call": {
|
|
2702
|
+
content.push({
|
|
2703
|
+
type: "tool-call",
|
|
2704
|
+
toolCallId: part.id,
|
|
2705
|
+
toolName: "image_generation",
|
|
2706
|
+
input: "{}",
|
|
2707
|
+
providerExecuted: true
|
|
2708
|
+
});
|
|
2709
|
+
content.push({
|
|
2710
|
+
type: "tool-result",
|
|
2711
|
+
toolCallId: part.id,
|
|
2712
|
+
toolName: "image_generation",
|
|
2713
|
+
result: {
|
|
2714
|
+
result: part.result
|
|
2715
|
+
},
|
|
2716
|
+
providerExecuted: true
|
|
2717
|
+
});
|
|
2718
|
+
break;
|
|
2719
|
+
}
|
|
2634
2720
|
case "message": {
|
|
2635
2721
|
for (const contentPart of part.content) {
|
|
2636
2722
|
if (((_c = (_b = options.providerOptions) == null ? void 0 : _b.openai) == null ? void 0 : _c.logprobs) && contentPart.logprobs) {
|
|
@@ -2650,7 +2736,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2650
2736
|
content.push({
|
|
2651
2737
|
type: "source",
|
|
2652
2738
|
sourceType: "url",
|
|
2653
|
-
id: (_f = (_e = (_d = this.config).generateId) == null ? void 0 : _e.call(_d)) != null ? _f : (0,
|
|
2739
|
+
id: (_f = (_e = (_d = this.config).generateId) == null ? void 0 : _e.call(_d)) != null ? _f : (0, import_provider_utils13.generateId)(),
|
|
2654
2740
|
url: annotation.url,
|
|
2655
2741
|
title: annotation.title
|
|
2656
2742
|
});
|
|
@@ -2658,7 +2744,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2658
2744
|
content.push({
|
|
2659
2745
|
type: "source",
|
|
2660
2746
|
sourceType: "document",
|
|
2661
|
-
id: (_i = (_h = (_g = this.config).generateId) == null ? void 0 : _h.call(_g)) != null ? _i : (0,
|
|
2747
|
+
id: (_i = (_h = (_g = this.config).generateId) == null ? void 0 : _h.call(_g)) != null ? _i : (0, import_provider_utils13.generateId)(),
|
|
2662
2748
|
mediaType: "text/plain",
|
|
2663
2749
|
title: (_k = (_j = annotation.quote) != null ? _j : annotation.filename) != null ? _k : "Document",
|
|
2664
2750
|
filename: (_l = annotation.filename) != null ? _l : annotation.file_id
|
|
@@ -2725,7 +2811,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2725
2811
|
type: "tool-call",
|
|
2726
2812
|
toolCallId: part.id,
|
|
2727
2813
|
toolName: "file_search",
|
|
2728
|
-
input: "",
|
|
2814
|
+
input: "{}",
|
|
2729
2815
|
providerExecuted: true
|
|
2730
2816
|
});
|
|
2731
2817
|
content.push({
|
|
@@ -2733,10 +2819,14 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2733
2819
|
toolCallId: part.id,
|
|
2734
2820
|
toolName: "file_search",
|
|
2735
2821
|
result: {
|
|
2736
|
-
|
|
2737
|
-
|
|
2738
|
-
|
|
2739
|
-
|
|
2822
|
+
queries: part.queries,
|
|
2823
|
+
results: (_n = (_m = part.results) == null ? void 0 : _m.map((result) => ({
|
|
2824
|
+
attributes: result.attributes,
|
|
2825
|
+
fileId: result.file_id,
|
|
2826
|
+
filename: result.filename,
|
|
2827
|
+
score: result.score,
|
|
2828
|
+
text: result.text
|
|
2829
|
+
}))) != null ? _n : null
|
|
2740
2830
|
},
|
|
2741
2831
|
providerExecuted: true
|
|
2742
2832
|
});
|
|
@@ -2778,15 +2868,15 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2778
2868
|
return {
|
|
2779
2869
|
content,
|
|
2780
2870
|
finishReason: mapOpenAIResponseFinishReason({
|
|
2781
|
-
finishReason: (
|
|
2871
|
+
finishReason: (_o = response.incomplete_details) == null ? void 0 : _o.reason,
|
|
2782
2872
|
hasFunctionCall
|
|
2783
2873
|
}),
|
|
2784
2874
|
usage: {
|
|
2785
2875
|
inputTokens: response.usage.input_tokens,
|
|
2786
2876
|
outputTokens: response.usage.output_tokens,
|
|
2787
2877
|
totalTokens: response.usage.input_tokens + response.usage.output_tokens,
|
|
2788
|
-
reasoningTokens: (
|
|
2789
|
-
cachedInputTokens: (
|
|
2878
|
+
reasoningTokens: (_q = (_p = response.usage.output_tokens_details) == null ? void 0 : _p.reasoning_tokens) != null ? _q : void 0,
|
|
2879
|
+
cachedInputTokens: (_s = (_r = response.usage.input_tokens_details) == null ? void 0 : _r.cached_tokens) != null ? _s : void 0
|
|
2790
2880
|
},
|
|
2791
2881
|
request: { body },
|
|
2792
2882
|
response: {
|
|
@@ -2806,18 +2896,18 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2806
2896
|
warnings,
|
|
2807
2897
|
webSearchToolName
|
|
2808
2898
|
} = await this.getArgs(options);
|
|
2809
|
-
const { responseHeaders, value: response } = await (0,
|
|
2899
|
+
const { responseHeaders, value: response } = await (0, import_provider_utils13.postJsonToApi)({
|
|
2810
2900
|
url: this.config.url({
|
|
2811
2901
|
path: "/responses",
|
|
2812
2902
|
modelId: this.modelId
|
|
2813
2903
|
}),
|
|
2814
|
-
headers: (0,
|
|
2904
|
+
headers: (0, import_provider_utils13.combineHeaders)(this.config.headers(), options.headers),
|
|
2815
2905
|
body: {
|
|
2816
2906
|
...body,
|
|
2817
2907
|
stream: true
|
|
2818
2908
|
},
|
|
2819
2909
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
2820
|
-
successfulResponseHandler: (0,
|
|
2910
|
+
successfulResponseHandler: (0, import_provider_utils13.createEventSourceResponseHandler)(
|
|
2821
2911
|
openaiResponsesChunkSchema
|
|
2822
2912
|
),
|
|
2823
2913
|
abortSignal: options.abortSignal,
|
|
@@ -2843,7 +2933,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2843
2933
|
controller.enqueue({ type: "stream-start", warnings });
|
|
2844
2934
|
},
|
|
2845
2935
|
transform(chunk, controller) {
|
|
2846
|
-
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u;
|
|
2936
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v, _w;
|
|
2847
2937
|
if (options.includeRawChunks) {
|
|
2848
2938
|
controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
|
|
2849
2939
|
}
|
|
@@ -2885,14 +2975,20 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2885
2975
|
toolName: "computer_use"
|
|
2886
2976
|
});
|
|
2887
2977
|
} else if (value.item.type === "file_search_call") {
|
|
2888
|
-
|
|
2978
|
+
controller.enqueue({
|
|
2979
|
+
type: "tool-call",
|
|
2980
|
+
toolCallId: value.item.id,
|
|
2889
2981
|
toolName: "file_search",
|
|
2890
|
-
|
|
2891
|
-
|
|
2982
|
+
input: "{}",
|
|
2983
|
+
providerExecuted: true
|
|
2984
|
+
});
|
|
2985
|
+
} else if (value.item.type === "image_generation_call") {
|
|
2892
2986
|
controller.enqueue({
|
|
2893
|
-
type: "tool-
|
|
2894
|
-
|
|
2895
|
-
toolName: "
|
|
2987
|
+
type: "tool-call",
|
|
2988
|
+
toolCallId: value.item.id,
|
|
2989
|
+
toolName: "image_generation",
|
|
2990
|
+
input: "{}",
|
|
2991
|
+
providerExecuted: true
|
|
2896
2992
|
});
|
|
2897
2993
|
} else if (value.item.type === "message") {
|
|
2898
2994
|
controller.enqueue({
|
|
@@ -2984,26 +3080,19 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2984
3080
|
});
|
|
2985
3081
|
} else if (value.item.type === "file_search_call") {
|
|
2986
3082
|
ongoingToolCalls[value.output_index] = void 0;
|
|
2987
|
-
controller.enqueue({
|
|
2988
|
-
type: "tool-input-end",
|
|
2989
|
-
id: value.item.id
|
|
2990
|
-
});
|
|
2991
|
-
controller.enqueue({
|
|
2992
|
-
type: "tool-call",
|
|
2993
|
-
toolCallId: value.item.id,
|
|
2994
|
-
toolName: "file_search",
|
|
2995
|
-
input: "",
|
|
2996
|
-
providerExecuted: true
|
|
2997
|
-
});
|
|
2998
3083
|
controller.enqueue({
|
|
2999
3084
|
type: "tool-result",
|
|
3000
3085
|
toolCallId: value.item.id,
|
|
3001
3086
|
toolName: "file_search",
|
|
3002
3087
|
result: {
|
|
3003
|
-
|
|
3004
|
-
|
|
3005
|
-
|
|
3006
|
-
|
|
3088
|
+
queries: value.item.queries,
|
|
3089
|
+
results: (_c = (_b = value.item.results) == null ? void 0 : _b.map((result) => ({
|
|
3090
|
+
attributes: result.attributes,
|
|
3091
|
+
fileId: result.file_id,
|
|
3092
|
+
filename: result.filename,
|
|
3093
|
+
score: result.score,
|
|
3094
|
+
text: result.text
|
|
3095
|
+
}))) != null ? _c : null
|
|
3007
3096
|
},
|
|
3008
3097
|
providerExecuted: true
|
|
3009
3098
|
});
|
|
@@ -3027,6 +3116,16 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3027
3116
|
},
|
|
3028
3117
|
providerExecuted: true
|
|
3029
3118
|
});
|
|
3119
|
+
} else if (value.item.type === "image_generation_call") {
|
|
3120
|
+
controller.enqueue({
|
|
3121
|
+
type: "tool-result",
|
|
3122
|
+
toolCallId: value.item.id,
|
|
3123
|
+
toolName: "image_generation",
|
|
3124
|
+
result: {
|
|
3125
|
+
result: value.item.result
|
|
3126
|
+
},
|
|
3127
|
+
providerExecuted: true
|
|
3128
|
+
});
|
|
3030
3129
|
} else if (value.item.type === "message") {
|
|
3031
3130
|
controller.enqueue({
|
|
3032
3131
|
type: "text-end",
|
|
@@ -3041,7 +3140,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3041
3140
|
providerMetadata: {
|
|
3042
3141
|
openai: {
|
|
3043
3142
|
itemId: value.item.id,
|
|
3044
|
-
reasoningEncryptedContent: (
|
|
3143
|
+
reasoningEncryptedContent: (_d = value.item.encrypted_content) != null ? _d : null
|
|
3045
3144
|
}
|
|
3046
3145
|
}
|
|
3047
3146
|
});
|
|
@@ -3071,12 +3170,12 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3071
3170
|
id: value.item_id,
|
|
3072
3171
|
delta: value.delta
|
|
3073
3172
|
});
|
|
3074
|
-
if (((
|
|
3173
|
+
if (((_f = (_e = options.providerOptions) == null ? void 0 : _e.openai) == null ? void 0 : _f.logprobs) && value.logprobs) {
|
|
3075
3174
|
logprobs.push(value.logprobs);
|
|
3076
3175
|
}
|
|
3077
3176
|
} else if (isResponseReasoningSummaryPartAddedChunk(value)) {
|
|
3078
3177
|
if (value.summary_index > 0) {
|
|
3079
|
-
(
|
|
3178
|
+
(_g = activeReasoning[value.item_id]) == null ? void 0 : _g.summaryParts.push(
|
|
3080
3179
|
value.summary_index
|
|
3081
3180
|
);
|
|
3082
3181
|
controller.enqueue({
|
|
@@ -3085,7 +3184,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3085
3184
|
providerMetadata: {
|
|
3086
3185
|
openai: {
|
|
3087
3186
|
itemId: value.item_id,
|
|
3088
|
-
reasoningEncryptedContent: (
|
|
3187
|
+
reasoningEncryptedContent: (_i = (_h = activeReasoning[value.item_id]) == null ? void 0 : _h.encryptedContent) != null ? _i : null
|
|
3089
3188
|
}
|
|
3090
3189
|
}
|
|
3091
3190
|
});
|
|
@@ -3103,14 +3202,14 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3103
3202
|
});
|
|
3104
3203
|
} else if (isResponseFinishedChunk(value)) {
|
|
3105
3204
|
finishReason = mapOpenAIResponseFinishReason({
|
|
3106
|
-
finishReason: (
|
|
3205
|
+
finishReason: (_j = value.response.incomplete_details) == null ? void 0 : _j.reason,
|
|
3107
3206
|
hasFunctionCall
|
|
3108
3207
|
});
|
|
3109
3208
|
usage.inputTokens = value.response.usage.input_tokens;
|
|
3110
3209
|
usage.outputTokens = value.response.usage.output_tokens;
|
|
3111
3210
|
usage.totalTokens = value.response.usage.input_tokens + value.response.usage.output_tokens;
|
|
3112
|
-
usage.reasoningTokens = (
|
|
3113
|
-
usage.cachedInputTokens = (
|
|
3211
|
+
usage.reasoningTokens = (_l = (_k = value.response.usage.output_tokens_details) == null ? void 0 : _k.reasoning_tokens) != null ? _l : void 0;
|
|
3212
|
+
usage.cachedInputTokens = (_n = (_m = value.response.usage.input_tokens_details) == null ? void 0 : _m.cached_tokens) != null ? _n : void 0;
|
|
3114
3213
|
if (typeof value.response.service_tier === "string") {
|
|
3115
3214
|
serviceTier = value.response.service_tier;
|
|
3116
3215
|
}
|
|
@@ -3119,7 +3218,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3119
3218
|
controller.enqueue({
|
|
3120
3219
|
type: "source",
|
|
3121
3220
|
sourceType: "url",
|
|
3122
|
-
id: (
|
|
3221
|
+
id: (_q = (_p = (_o = self.config).generateId) == null ? void 0 : _p.call(_o)) != null ? _q : (0, import_provider_utils13.generateId)(),
|
|
3123
3222
|
url: value.annotation.url,
|
|
3124
3223
|
title: value.annotation.title
|
|
3125
3224
|
});
|
|
@@ -3127,10 +3226,10 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3127
3226
|
controller.enqueue({
|
|
3128
3227
|
type: "source",
|
|
3129
3228
|
sourceType: "document",
|
|
3130
|
-
id: (
|
|
3229
|
+
id: (_t = (_s = (_r = self.config).generateId) == null ? void 0 : _s.call(_r)) != null ? _t : (0, import_provider_utils13.generateId)(),
|
|
3131
3230
|
mediaType: "text/plain",
|
|
3132
|
-
title: (
|
|
3133
|
-
filename: (
|
|
3231
|
+
title: (_v = (_u = value.annotation.quote) != null ? _u : value.annotation.filename) != null ? _v : "Document",
|
|
3232
|
+
filename: (_w = value.annotation.filename) != null ? _w : value.annotation.file_id
|
|
3134
3233
|
});
|
|
3135
3234
|
}
|
|
3136
3235
|
} else if (isErrorChunk(value)) {
|
|
@@ -3163,177 +3262,155 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3163
3262
|
};
|
|
3164
3263
|
}
|
|
3165
3264
|
};
|
|
3166
|
-
var usageSchema2 =
|
|
3167
|
-
input_tokens:
|
|
3168
|
-
input_tokens_details:
|
|
3169
|
-
output_tokens:
|
|
3170
|
-
output_tokens_details:
|
|
3265
|
+
var usageSchema2 = import_v415.z.object({
|
|
3266
|
+
input_tokens: import_v415.z.number(),
|
|
3267
|
+
input_tokens_details: import_v415.z.object({ cached_tokens: import_v415.z.number().nullish() }).nullish(),
|
|
3268
|
+
output_tokens: import_v415.z.number(),
|
|
3269
|
+
output_tokens_details: import_v415.z.object({ reasoning_tokens: import_v415.z.number().nullish() }).nullish()
|
|
3171
3270
|
});
|
|
3172
|
-
var textDeltaChunkSchema =
|
|
3173
|
-
type:
|
|
3174
|
-
item_id:
|
|
3175
|
-
delta:
|
|
3271
|
+
var textDeltaChunkSchema = import_v415.z.object({
|
|
3272
|
+
type: import_v415.z.literal("response.output_text.delta"),
|
|
3273
|
+
item_id: import_v415.z.string(),
|
|
3274
|
+
delta: import_v415.z.string(),
|
|
3176
3275
|
logprobs: LOGPROBS_SCHEMA.nullish()
|
|
3177
3276
|
});
|
|
3178
|
-
var errorChunkSchema =
|
|
3179
|
-
type:
|
|
3180
|
-
code:
|
|
3181
|
-
message:
|
|
3182
|
-
param:
|
|
3183
|
-
sequence_number:
|
|
3277
|
+
var errorChunkSchema = import_v415.z.object({
|
|
3278
|
+
type: import_v415.z.literal("error"),
|
|
3279
|
+
code: import_v415.z.string(),
|
|
3280
|
+
message: import_v415.z.string(),
|
|
3281
|
+
param: import_v415.z.string().nullish(),
|
|
3282
|
+
sequence_number: import_v415.z.number()
|
|
3184
3283
|
});
|
|
3185
|
-
var responseFinishedChunkSchema =
|
|
3186
|
-
type:
|
|
3187
|
-
response:
|
|
3188
|
-
incomplete_details:
|
|
3284
|
+
var responseFinishedChunkSchema = import_v415.z.object({
|
|
3285
|
+
type: import_v415.z.enum(["response.completed", "response.incomplete"]),
|
|
3286
|
+
response: import_v415.z.object({
|
|
3287
|
+
incomplete_details: import_v415.z.object({ reason: import_v415.z.string() }).nullish(),
|
|
3189
3288
|
usage: usageSchema2,
|
|
3190
|
-
service_tier:
|
|
3289
|
+
service_tier: import_v415.z.string().nullish()
|
|
3191
3290
|
})
|
|
3192
3291
|
});
|
|
3193
|
-
var responseCreatedChunkSchema =
|
|
3194
|
-
type:
|
|
3195
|
-
response:
|
|
3196
|
-
id:
|
|
3197
|
-
created_at:
|
|
3198
|
-
model:
|
|
3199
|
-
service_tier:
|
|
3292
|
+
var responseCreatedChunkSchema = import_v415.z.object({
|
|
3293
|
+
type: import_v415.z.literal("response.created"),
|
|
3294
|
+
response: import_v415.z.object({
|
|
3295
|
+
id: import_v415.z.string(),
|
|
3296
|
+
created_at: import_v415.z.number(),
|
|
3297
|
+
model: import_v415.z.string(),
|
|
3298
|
+
service_tier: import_v415.z.string().nullish()
|
|
3200
3299
|
})
|
|
3201
3300
|
});
|
|
3202
|
-
var responseOutputItemAddedSchema =
|
|
3203
|
-
type:
|
|
3204
|
-
output_index:
|
|
3205
|
-
item:
|
|
3206
|
-
|
|
3207
|
-
type:
|
|
3208
|
-
id:
|
|
3301
|
+
var responseOutputItemAddedSchema = import_v415.z.object({
|
|
3302
|
+
type: import_v415.z.literal("response.output_item.added"),
|
|
3303
|
+
output_index: import_v415.z.number(),
|
|
3304
|
+
item: import_v415.z.discriminatedUnion("type", [
|
|
3305
|
+
import_v415.z.object({
|
|
3306
|
+
type: import_v415.z.literal("message"),
|
|
3307
|
+
id: import_v415.z.string()
|
|
3209
3308
|
}),
|
|
3210
|
-
|
|
3211
|
-
type:
|
|
3212
|
-
id:
|
|
3213
|
-
encrypted_content:
|
|
3309
|
+
import_v415.z.object({
|
|
3310
|
+
type: import_v415.z.literal("reasoning"),
|
|
3311
|
+
id: import_v415.z.string(),
|
|
3312
|
+
encrypted_content: import_v415.z.string().nullish()
|
|
3214
3313
|
}),
|
|
3215
|
-
|
|
3216
|
-
type:
|
|
3217
|
-
id:
|
|
3218
|
-
call_id:
|
|
3219
|
-
name:
|
|
3220
|
-
arguments:
|
|
3314
|
+
import_v415.z.object({
|
|
3315
|
+
type: import_v415.z.literal("function_call"),
|
|
3316
|
+
id: import_v415.z.string(),
|
|
3317
|
+
call_id: import_v415.z.string(),
|
|
3318
|
+
name: import_v415.z.string(),
|
|
3319
|
+
arguments: import_v415.z.string()
|
|
3221
3320
|
}),
|
|
3222
|
-
|
|
3223
|
-
type:
|
|
3224
|
-
id:
|
|
3225
|
-
status:
|
|
3226
|
-
action:
|
|
3227
|
-
type:
|
|
3228
|
-
query:
|
|
3321
|
+
import_v415.z.object({
|
|
3322
|
+
type: import_v415.z.literal("web_search_call"),
|
|
3323
|
+
id: import_v415.z.string(),
|
|
3324
|
+
status: import_v415.z.string(),
|
|
3325
|
+
action: import_v415.z.object({
|
|
3326
|
+
type: import_v415.z.literal("search"),
|
|
3327
|
+
query: import_v415.z.string().optional()
|
|
3229
3328
|
}).nullish()
|
|
3230
3329
|
}),
|
|
3231
|
-
|
|
3232
|
-
type:
|
|
3233
|
-
id:
|
|
3234
|
-
status:
|
|
3330
|
+
import_v415.z.object({
|
|
3331
|
+
type: import_v415.z.literal("computer_call"),
|
|
3332
|
+
id: import_v415.z.string(),
|
|
3333
|
+
status: import_v415.z.string()
|
|
3235
3334
|
}),
|
|
3236
|
-
|
|
3237
|
-
type:
|
|
3238
|
-
id:
|
|
3239
|
-
|
|
3240
|
-
|
|
3241
|
-
|
|
3242
|
-
|
|
3243
|
-
attributes: import_v414.z.object({
|
|
3244
|
-
file_id: import_v414.z.string(),
|
|
3245
|
-
filename: import_v414.z.string(),
|
|
3246
|
-
score: import_v414.z.number(),
|
|
3247
|
-
text: import_v414.z.string()
|
|
3248
|
-
})
|
|
3249
|
-
})
|
|
3250
|
-
).optional()
|
|
3335
|
+
import_v415.z.object({
|
|
3336
|
+
type: import_v415.z.literal("file_search_call"),
|
|
3337
|
+
id: import_v415.z.string()
|
|
3338
|
+
}),
|
|
3339
|
+
import_v415.z.object({
|
|
3340
|
+
type: import_v415.z.literal("image_generation_call"),
|
|
3341
|
+
id: import_v415.z.string()
|
|
3251
3342
|
})
|
|
3252
3343
|
])
|
|
3253
3344
|
});
|
|
3254
|
-
var responseOutputItemDoneSchema =
|
|
3255
|
-
type:
|
|
3256
|
-
output_index:
|
|
3257
|
-
item:
|
|
3258
|
-
|
|
3259
|
-
type:
|
|
3260
|
-
id:
|
|
3345
|
+
var responseOutputItemDoneSchema = import_v415.z.object({
|
|
3346
|
+
type: import_v415.z.literal("response.output_item.done"),
|
|
3347
|
+
output_index: import_v415.z.number(),
|
|
3348
|
+
item: import_v415.z.discriminatedUnion("type", [
|
|
3349
|
+
import_v415.z.object({
|
|
3350
|
+
type: import_v415.z.literal("message"),
|
|
3351
|
+
id: import_v415.z.string()
|
|
3261
3352
|
}),
|
|
3262
|
-
|
|
3263
|
-
type:
|
|
3264
|
-
id:
|
|
3265
|
-
encrypted_content:
|
|
3353
|
+
import_v415.z.object({
|
|
3354
|
+
type: import_v415.z.literal("reasoning"),
|
|
3355
|
+
id: import_v415.z.string(),
|
|
3356
|
+
encrypted_content: import_v415.z.string().nullish()
|
|
3266
3357
|
}),
|
|
3267
|
-
|
|
3268
|
-
type:
|
|
3269
|
-
id:
|
|
3270
|
-
call_id:
|
|
3271
|
-
name:
|
|
3272
|
-
arguments:
|
|
3273
|
-
status:
|
|
3358
|
+
import_v415.z.object({
|
|
3359
|
+
type: import_v415.z.literal("function_call"),
|
|
3360
|
+
id: import_v415.z.string(),
|
|
3361
|
+
call_id: import_v415.z.string(),
|
|
3362
|
+
name: import_v415.z.string(),
|
|
3363
|
+
arguments: import_v415.z.string(),
|
|
3364
|
+
status: import_v415.z.literal("completed")
|
|
3274
3365
|
}),
|
|
3275
3366
|
codeInterpreterCallItem,
|
|
3367
|
+
imageGenerationCallItem,
|
|
3276
3368
|
webSearchCallItem,
|
|
3277
|
-
|
|
3278
|
-
|
|
3279
|
-
|
|
3280
|
-
|
|
3281
|
-
|
|
3282
|
-
import_v414.z.object({
|
|
3283
|
-
type: import_v414.z.literal("file_search_call"),
|
|
3284
|
-
id: import_v414.z.string(),
|
|
3285
|
-
status: import_v414.z.literal("completed"),
|
|
3286
|
-
queries: import_v414.z.array(import_v414.z.string()).nullish(),
|
|
3287
|
-
results: import_v414.z.array(
|
|
3288
|
-
import_v414.z.object({
|
|
3289
|
-
attributes: import_v414.z.object({
|
|
3290
|
-
file_id: import_v414.z.string(),
|
|
3291
|
-
filename: import_v414.z.string(),
|
|
3292
|
-
score: import_v414.z.number(),
|
|
3293
|
-
text: import_v414.z.string()
|
|
3294
|
-
})
|
|
3295
|
-
})
|
|
3296
|
-
).nullish()
|
|
3369
|
+
fileSearchCallItem,
|
|
3370
|
+
import_v415.z.object({
|
|
3371
|
+
type: import_v415.z.literal("computer_call"),
|
|
3372
|
+
id: import_v415.z.string(),
|
|
3373
|
+
status: import_v415.z.literal("completed")
|
|
3297
3374
|
})
|
|
3298
3375
|
])
|
|
3299
3376
|
});
|
|
3300
|
-
var responseFunctionCallArgumentsDeltaSchema =
|
|
3301
|
-
type:
|
|
3302
|
-
item_id:
|
|
3303
|
-
output_index:
|
|
3304
|
-
delta:
|
|
3377
|
+
var responseFunctionCallArgumentsDeltaSchema = import_v415.z.object({
|
|
3378
|
+
type: import_v415.z.literal("response.function_call_arguments.delta"),
|
|
3379
|
+
item_id: import_v415.z.string(),
|
|
3380
|
+
output_index: import_v415.z.number(),
|
|
3381
|
+
delta: import_v415.z.string()
|
|
3305
3382
|
});
|
|
3306
|
-
var responseAnnotationAddedSchema =
|
|
3307
|
-
type:
|
|
3308
|
-
annotation:
|
|
3309
|
-
|
|
3310
|
-
type:
|
|
3311
|
-
url:
|
|
3312
|
-
title:
|
|
3383
|
+
var responseAnnotationAddedSchema = import_v415.z.object({
|
|
3384
|
+
type: import_v415.z.literal("response.output_text.annotation.added"),
|
|
3385
|
+
annotation: import_v415.z.discriminatedUnion("type", [
|
|
3386
|
+
import_v415.z.object({
|
|
3387
|
+
type: import_v415.z.literal("url_citation"),
|
|
3388
|
+
url: import_v415.z.string(),
|
|
3389
|
+
title: import_v415.z.string()
|
|
3313
3390
|
}),
|
|
3314
|
-
|
|
3315
|
-
type:
|
|
3316
|
-
file_id:
|
|
3317
|
-
filename:
|
|
3318
|
-
index:
|
|
3319
|
-
start_index:
|
|
3320
|
-
end_index:
|
|
3321
|
-
quote:
|
|
3391
|
+
import_v415.z.object({
|
|
3392
|
+
type: import_v415.z.literal("file_citation"),
|
|
3393
|
+
file_id: import_v415.z.string(),
|
|
3394
|
+
filename: import_v415.z.string().nullish(),
|
|
3395
|
+
index: import_v415.z.number().nullish(),
|
|
3396
|
+
start_index: import_v415.z.number().nullish(),
|
|
3397
|
+
end_index: import_v415.z.number().nullish(),
|
|
3398
|
+
quote: import_v415.z.string().nullish()
|
|
3322
3399
|
})
|
|
3323
3400
|
])
|
|
3324
3401
|
});
|
|
3325
|
-
var responseReasoningSummaryPartAddedSchema =
|
|
3326
|
-
type:
|
|
3327
|
-
item_id:
|
|
3328
|
-
summary_index:
|
|
3402
|
+
var responseReasoningSummaryPartAddedSchema = import_v415.z.object({
|
|
3403
|
+
type: import_v415.z.literal("response.reasoning_summary_part.added"),
|
|
3404
|
+
item_id: import_v415.z.string(),
|
|
3405
|
+
summary_index: import_v415.z.number()
|
|
3329
3406
|
});
|
|
3330
|
-
var responseReasoningSummaryTextDeltaSchema =
|
|
3331
|
-
type:
|
|
3332
|
-
item_id:
|
|
3333
|
-
summary_index:
|
|
3334
|
-
delta:
|
|
3407
|
+
var responseReasoningSummaryTextDeltaSchema = import_v415.z.object({
|
|
3408
|
+
type: import_v415.z.literal("response.reasoning_summary_text.delta"),
|
|
3409
|
+
item_id: import_v415.z.string(),
|
|
3410
|
+
summary_index: import_v415.z.number(),
|
|
3411
|
+
delta: import_v415.z.string()
|
|
3335
3412
|
});
|
|
3336
|
-
var openaiResponsesChunkSchema =
|
|
3413
|
+
var openaiResponsesChunkSchema = import_v415.z.union([
|
|
3337
3414
|
textDeltaChunkSchema,
|
|
3338
3415
|
responseFinishedChunkSchema,
|
|
3339
3416
|
responseCreatedChunkSchema,
|
|
@@ -3344,7 +3421,7 @@ var openaiResponsesChunkSchema = import_v414.z.union([
|
|
|
3344
3421
|
responseReasoningSummaryPartAddedSchema,
|
|
3345
3422
|
responseReasoningSummaryTextDeltaSchema,
|
|
3346
3423
|
errorChunkSchema,
|
|
3347
|
-
|
|
3424
|
+
import_v415.z.object({ type: import_v415.z.string() }).loose()
|
|
3348
3425
|
// fallback for unknown chunks
|
|
3349
3426
|
]);
|
|
3350
3427
|
function isTextDeltaChunk(chunk) {
|
|
@@ -3417,27 +3494,15 @@ function getResponsesModelConfig(modelId) {
|
|
|
3417
3494
|
isReasoningModel: false
|
|
3418
3495
|
};
|
|
3419
3496
|
}
|
|
3420
|
-
var openaiResponsesProviderOptionsSchema =
|
|
3421
|
-
|
|
3422
|
-
|
|
3423
|
-
previousResponseId: import_v414.z.string().nullish(),
|
|
3424
|
-
store: import_v414.z.boolean().nullish(),
|
|
3425
|
-
user: import_v414.z.string().nullish(),
|
|
3426
|
-
reasoningEffort: import_v414.z.string().nullish(),
|
|
3427
|
-
strictJsonSchema: import_v414.z.boolean().nullish(),
|
|
3428
|
-
instructions: import_v414.z.string().nullish(),
|
|
3429
|
-
reasoningSummary: import_v414.z.string().nullish(),
|
|
3430
|
-
serviceTier: import_v414.z.enum(["auto", "flex", "priority"]).nullish(),
|
|
3431
|
-
include: import_v414.z.array(
|
|
3432
|
-
import_v414.z.enum([
|
|
3497
|
+
var openaiResponsesProviderOptionsSchema = import_v415.z.object({
|
|
3498
|
+
include: import_v415.z.array(
|
|
3499
|
+
import_v415.z.enum([
|
|
3433
3500
|
"reasoning.encrypted_content",
|
|
3434
3501
|
"file_search_call.results",
|
|
3435
3502
|
"message.output_text.logprobs"
|
|
3436
3503
|
])
|
|
3437
3504
|
).nullish(),
|
|
3438
|
-
|
|
3439
|
-
promptCacheKey: import_v414.z.string().nullish(),
|
|
3440
|
-
safetyIdentifier: import_v414.z.string().nullish(),
|
|
3505
|
+
instructions: import_v415.z.string().nullish(),
|
|
3441
3506
|
/**
|
|
3442
3507
|
* Return the log probabilities of the tokens.
|
|
3443
3508
|
*
|
|
@@ -3450,15 +3515,33 @@ var openaiResponsesProviderOptionsSchema = import_v414.z.object({
|
|
|
3450
3515
|
* @see https://platform.openai.com/docs/api-reference/responses/create
|
|
3451
3516
|
* @see https://cookbook.openai.com/examples/using_logprobs
|
|
3452
3517
|
*/
|
|
3453
|
-
logprobs:
|
|
3518
|
+
logprobs: import_v415.z.union([import_v415.z.boolean(), import_v415.z.number().min(1).max(TOP_LOGPROBS_MAX)]).optional(),
|
|
3519
|
+
/**
|
|
3520
|
+
* The maximum number of total calls to built-in tools that can be processed in a response.
|
|
3521
|
+
* This maximum number applies across all built-in tool calls, not per individual tool.
|
|
3522
|
+
* Any further attempts to call a tool by the model will be ignored.
|
|
3523
|
+
*/
|
|
3524
|
+
maxToolCalls: import_v415.z.number().nullish(),
|
|
3525
|
+
metadata: import_v415.z.any().nullish(),
|
|
3526
|
+
parallelToolCalls: import_v415.z.boolean().nullish(),
|
|
3527
|
+
previousResponseId: import_v415.z.string().nullish(),
|
|
3528
|
+
promptCacheKey: import_v415.z.string().nullish(),
|
|
3529
|
+
reasoningEffort: import_v415.z.string().nullish(),
|
|
3530
|
+
reasoningSummary: import_v415.z.string().nullish(),
|
|
3531
|
+
safetyIdentifier: import_v415.z.string().nullish(),
|
|
3532
|
+
serviceTier: import_v415.z.enum(["auto", "flex", "priority"]).nullish(),
|
|
3533
|
+
store: import_v415.z.boolean().nullish(),
|
|
3534
|
+
strictJsonSchema: import_v415.z.boolean().nullish(),
|
|
3535
|
+
textVerbosity: import_v415.z.enum(["low", "medium", "high"]).nullish(),
|
|
3536
|
+
user: import_v415.z.string().nullish()
|
|
3454
3537
|
});
|
|
3455
3538
|
|
|
3456
3539
|
// src/speech/openai-speech-model.ts
|
|
3457
|
-
var
|
|
3458
|
-
var
|
|
3459
|
-
var OpenAIProviderOptionsSchema =
|
|
3460
|
-
instructions:
|
|
3461
|
-
speed:
|
|
3540
|
+
var import_provider_utils14 = require("@ai-sdk/provider-utils");
|
|
3541
|
+
var import_v416 = require("zod/v4");
|
|
3542
|
+
var OpenAIProviderOptionsSchema = import_v416.z.object({
|
|
3543
|
+
instructions: import_v416.z.string().nullish(),
|
|
3544
|
+
speed: import_v416.z.number().min(0.25).max(4).default(1).nullish()
|
|
3462
3545
|
});
|
|
3463
3546
|
var OpenAISpeechModel = class {
|
|
3464
3547
|
constructor(modelId, config) {
|
|
@@ -3479,7 +3562,7 @@ var OpenAISpeechModel = class {
|
|
|
3479
3562
|
providerOptions
|
|
3480
3563
|
}) {
|
|
3481
3564
|
const warnings = [];
|
|
3482
|
-
const openAIOptions = await (0,
|
|
3565
|
+
const openAIOptions = await (0, import_provider_utils14.parseProviderOptions)({
|
|
3483
3566
|
provider: "openai",
|
|
3484
3567
|
providerOptions,
|
|
3485
3568
|
schema: OpenAIProviderOptionsSchema
|
|
@@ -3532,15 +3615,15 @@ var OpenAISpeechModel = class {
|
|
|
3532
3615
|
value: audio,
|
|
3533
3616
|
responseHeaders,
|
|
3534
3617
|
rawValue: rawResponse
|
|
3535
|
-
} = await (0,
|
|
3618
|
+
} = await (0, import_provider_utils14.postJsonToApi)({
|
|
3536
3619
|
url: this.config.url({
|
|
3537
3620
|
path: "/audio/speech",
|
|
3538
3621
|
modelId: this.modelId
|
|
3539
3622
|
}),
|
|
3540
|
-
headers: (0,
|
|
3623
|
+
headers: (0, import_provider_utils14.combineHeaders)(this.config.headers(), options.headers),
|
|
3541
3624
|
body: requestBody,
|
|
3542
3625
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
3543
|
-
successfulResponseHandler: (0,
|
|
3626
|
+
successfulResponseHandler: (0, import_provider_utils14.createBinaryResponseHandler)(),
|
|
3544
3627
|
abortSignal: options.abortSignal,
|
|
3545
3628
|
fetch: this.config.fetch
|
|
3546
3629
|
});
|
|
@@ -3561,34 +3644,34 @@ var OpenAISpeechModel = class {
|
|
|
3561
3644
|
};
|
|
3562
3645
|
|
|
3563
3646
|
// src/transcription/openai-transcription-model.ts
|
|
3564
|
-
var
|
|
3565
|
-
var
|
|
3647
|
+
var import_provider_utils15 = require("@ai-sdk/provider-utils");
|
|
3648
|
+
var import_v418 = require("zod/v4");
|
|
3566
3649
|
|
|
3567
3650
|
// src/transcription/openai-transcription-options.ts
|
|
3568
|
-
var
|
|
3569
|
-
var openAITranscriptionProviderOptions =
|
|
3651
|
+
var import_v417 = require("zod/v4");
|
|
3652
|
+
var openAITranscriptionProviderOptions = import_v417.z.object({
|
|
3570
3653
|
/**
|
|
3571
3654
|
* Additional information to include in the transcription response.
|
|
3572
3655
|
*/
|
|
3573
|
-
include:
|
|
3656
|
+
include: import_v417.z.array(import_v417.z.string()).optional(),
|
|
3574
3657
|
/**
|
|
3575
3658
|
* The language of the input audio in ISO-639-1 format.
|
|
3576
3659
|
*/
|
|
3577
|
-
language:
|
|
3660
|
+
language: import_v417.z.string().optional(),
|
|
3578
3661
|
/**
|
|
3579
3662
|
* An optional text to guide the model's style or continue a previous audio segment.
|
|
3580
3663
|
*/
|
|
3581
|
-
prompt:
|
|
3664
|
+
prompt: import_v417.z.string().optional(),
|
|
3582
3665
|
/**
|
|
3583
3666
|
* The sampling temperature, between 0 and 1.
|
|
3584
3667
|
* @default 0
|
|
3585
3668
|
*/
|
|
3586
|
-
temperature:
|
|
3669
|
+
temperature: import_v417.z.number().min(0).max(1).default(0).optional(),
|
|
3587
3670
|
/**
|
|
3588
3671
|
* The timestamp granularities to populate for this transcription.
|
|
3589
3672
|
* @default ['segment']
|
|
3590
3673
|
*/
|
|
3591
|
-
timestampGranularities:
|
|
3674
|
+
timestampGranularities: import_v417.z.array(import_v417.z.enum(["word", "segment"])).default(["segment"]).optional()
|
|
3592
3675
|
});
|
|
3593
3676
|
|
|
3594
3677
|
// src/transcription/openai-transcription-model.ts
|
|
@@ -3666,15 +3749,15 @@ var OpenAITranscriptionModel = class {
|
|
|
3666
3749
|
providerOptions
|
|
3667
3750
|
}) {
|
|
3668
3751
|
const warnings = [];
|
|
3669
|
-
const openAIOptions = await (0,
|
|
3752
|
+
const openAIOptions = await (0, import_provider_utils15.parseProviderOptions)({
|
|
3670
3753
|
provider: "openai",
|
|
3671
3754
|
providerOptions,
|
|
3672
3755
|
schema: openAITranscriptionProviderOptions
|
|
3673
3756
|
});
|
|
3674
3757
|
const formData = new FormData();
|
|
3675
|
-
const blob = audio instanceof Uint8Array ? new Blob([audio]) : new Blob([(0,
|
|
3758
|
+
const blob = audio instanceof Uint8Array ? new Blob([audio]) : new Blob([(0, import_provider_utils15.convertBase64ToUint8Array)(audio)]);
|
|
3676
3759
|
formData.append("model", this.modelId);
|
|
3677
|
-
const fileExtension = (0,
|
|
3760
|
+
const fileExtension = (0, import_provider_utils15.mediaTypeToExtension)(mediaType);
|
|
3678
3761
|
formData.append(
|
|
3679
3762
|
"file",
|
|
3680
3763
|
new File([blob], "audio", { type: mediaType }),
|
|
@@ -3719,15 +3802,15 @@ var OpenAITranscriptionModel = class {
|
|
|
3719
3802
|
value: response,
|
|
3720
3803
|
responseHeaders,
|
|
3721
3804
|
rawValue: rawResponse
|
|
3722
|
-
} = await (0,
|
|
3805
|
+
} = await (0, import_provider_utils15.postFormDataToApi)({
|
|
3723
3806
|
url: this.config.url({
|
|
3724
3807
|
path: "/audio/transcriptions",
|
|
3725
3808
|
modelId: this.modelId
|
|
3726
3809
|
}),
|
|
3727
|
-
headers: (0,
|
|
3810
|
+
headers: (0, import_provider_utils15.combineHeaders)(this.config.headers(), options.headers),
|
|
3728
3811
|
formData,
|
|
3729
3812
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
3730
|
-
successfulResponseHandler: (0,
|
|
3813
|
+
successfulResponseHandler: (0, import_provider_utils15.createJsonResponseHandler)(
|
|
3731
3814
|
openaiTranscriptionResponseSchema
|
|
3732
3815
|
),
|
|
3733
3816
|
abortSignal: options.abortSignal,
|
|
@@ -3757,29 +3840,29 @@ var OpenAITranscriptionModel = class {
|
|
|
3757
3840
|
};
|
|
3758
3841
|
}
|
|
3759
3842
|
};
|
|
3760
|
-
var openaiTranscriptionResponseSchema =
|
|
3761
|
-
text:
|
|
3762
|
-
language:
|
|
3763
|
-
duration:
|
|
3764
|
-
words:
|
|
3765
|
-
|
|
3766
|
-
word:
|
|
3767
|
-
start:
|
|
3768
|
-
end:
|
|
3843
|
+
var openaiTranscriptionResponseSchema = import_v418.z.object({
|
|
3844
|
+
text: import_v418.z.string(),
|
|
3845
|
+
language: import_v418.z.string().nullish(),
|
|
3846
|
+
duration: import_v418.z.number().nullish(),
|
|
3847
|
+
words: import_v418.z.array(
|
|
3848
|
+
import_v418.z.object({
|
|
3849
|
+
word: import_v418.z.string(),
|
|
3850
|
+
start: import_v418.z.number(),
|
|
3851
|
+
end: import_v418.z.number()
|
|
3769
3852
|
})
|
|
3770
3853
|
).nullish(),
|
|
3771
|
-
segments:
|
|
3772
|
-
|
|
3773
|
-
id:
|
|
3774
|
-
seek:
|
|
3775
|
-
start:
|
|
3776
|
-
end:
|
|
3777
|
-
text:
|
|
3778
|
-
tokens:
|
|
3779
|
-
temperature:
|
|
3780
|
-
avg_logprob:
|
|
3781
|
-
compression_ratio:
|
|
3782
|
-
no_speech_prob:
|
|
3854
|
+
segments: import_v418.z.array(
|
|
3855
|
+
import_v418.z.object({
|
|
3856
|
+
id: import_v418.z.number(),
|
|
3857
|
+
seek: import_v418.z.number(),
|
|
3858
|
+
start: import_v418.z.number(),
|
|
3859
|
+
end: import_v418.z.number(),
|
|
3860
|
+
text: import_v418.z.string(),
|
|
3861
|
+
tokens: import_v418.z.array(import_v418.z.number()),
|
|
3862
|
+
temperature: import_v418.z.number(),
|
|
3863
|
+
avg_logprob: import_v418.z.number(),
|
|
3864
|
+
compression_ratio: import_v418.z.number(),
|
|
3865
|
+
no_speech_prob: import_v418.z.number()
|
|
3783
3866
|
})
|
|
3784
3867
|
).nullish()
|
|
3785
3868
|
});
|
|
@@ -3787,10 +3870,10 @@ var openaiTranscriptionResponseSchema = import_v417.z.object({
|
|
|
3787
3870
|
// src/openai-provider.ts
|
|
3788
3871
|
function createOpenAI(options = {}) {
|
|
3789
3872
|
var _a, _b;
|
|
3790
|
-
const baseURL = (_a = (0,
|
|
3873
|
+
const baseURL = (_a = (0, import_provider_utils16.withoutTrailingSlash)(options.baseURL)) != null ? _a : "https://api.openai.com/v1";
|
|
3791
3874
|
const providerName = (_b = options.name) != null ? _b : "openai";
|
|
3792
3875
|
const getHeaders = () => ({
|
|
3793
|
-
Authorization: `Bearer ${(0,
|
|
3876
|
+
Authorization: `Bearer ${(0, import_provider_utils16.loadApiKey)({
|
|
3794
3877
|
apiKey: options.apiKey,
|
|
3795
3878
|
environmentVariableName: "OPENAI_API_KEY",
|
|
3796
3879
|
description: "OpenAI"
|