@ai-sdk/openai 2.0.20 → 2.0.22
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +21 -0
- package/dist/index.js +27 -17
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +27 -17
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.js +27 -17
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +27 -17
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +2 -2
package/dist/internal/index.js
CHANGED
|
@@ -448,7 +448,7 @@ var webSearchPreview = (0, import_provider_utils4.createProviderDefinedToolFacto
|
|
|
448
448
|
action: import_v44.z.discriminatedUnion("type", [
|
|
449
449
|
import_v44.z.object({
|
|
450
450
|
type: import_v44.z.literal("search"),
|
|
451
|
-
query: import_v44.z.string()
|
|
451
|
+
query: import_v44.z.string().nullish()
|
|
452
452
|
}),
|
|
453
453
|
import_v44.z.object({
|
|
454
454
|
type: import_v44.z.literal("open_page"),
|
|
@@ -1936,8 +1936,12 @@ var OpenAITranscriptionModel = class {
|
|
|
1936
1936
|
include: openAIOptions.include,
|
|
1937
1937
|
language: openAIOptions.language,
|
|
1938
1938
|
prompt: openAIOptions.prompt,
|
|
1939
|
-
response_format
|
|
1940
|
-
//
|
|
1939
|
+
// https://platform.openai.com/docs/api-reference/audio/createTranscription#audio_createtranscription-response_format
|
|
1940
|
+
// prefer verbose_json to get segments for models that support it
|
|
1941
|
+
response_format: [
|
|
1942
|
+
"gpt-4o-transcribe",
|
|
1943
|
+
"gpt-4o-mini-transcribe"
|
|
1944
|
+
].includes(this.modelId) ? "json" : "verbose_json",
|
|
1941
1945
|
temperature: openAIOptions.temperature,
|
|
1942
1946
|
timestamp_granularities: openAIOptions.timestampGranularities
|
|
1943
1947
|
};
|
|
@@ -2470,7 +2474,7 @@ var webSearchCallItem = import_v416.z.object({
|
|
|
2470
2474
|
action: import_v416.z.discriminatedUnion("type", [
|
|
2471
2475
|
import_v416.z.object({
|
|
2472
2476
|
type: import_v416.z.literal("search"),
|
|
2473
|
-
query: import_v416.z.string()
|
|
2477
|
+
query: import_v416.z.string().nullish()
|
|
2474
2478
|
}),
|
|
2475
2479
|
import_v416.z.object({
|
|
2476
2480
|
type: import_v416.z.literal("open_page"),
|
|
@@ -2679,7 +2683,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2679
2683
|
};
|
|
2680
2684
|
}
|
|
2681
2685
|
async doGenerate(options) {
|
|
2682
|
-
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n;
|
|
2686
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q;
|
|
2683
2687
|
const { args: body, warnings } = await this.getArgs(options);
|
|
2684
2688
|
const url = this.config.url({
|
|
2685
2689
|
path: "/responses",
|
|
@@ -2725,10 +2729,12 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2725
2729
|
}),
|
|
2726
2730
|
import_v416.z.object({
|
|
2727
2731
|
type: import_v416.z.literal("file_citation"),
|
|
2728
|
-
start_index: import_v416.z.number(),
|
|
2729
|
-
end_index: import_v416.z.number(),
|
|
2730
2732
|
file_id: import_v416.z.string(),
|
|
2731
|
-
|
|
2733
|
+
filename: import_v416.z.string().nullish(),
|
|
2734
|
+
index: import_v416.z.number().nullish(),
|
|
2735
|
+
start_index: import_v416.z.number().nullish(),
|
|
2736
|
+
end_index: import_v416.z.number().nullish(),
|
|
2737
|
+
quote: import_v416.z.string().nullish()
|
|
2732
2738
|
})
|
|
2733
2739
|
])
|
|
2734
2740
|
)
|
|
@@ -2846,8 +2852,8 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2846
2852
|
sourceType: "document",
|
|
2847
2853
|
id: (_i = (_h = (_g = this.config).generateId) == null ? void 0 : _h.call(_g)) != null ? _i : (0, import_provider_utils14.generateId)(),
|
|
2848
2854
|
mediaType: "text/plain",
|
|
2849
|
-
title: annotation.quote,
|
|
2850
|
-
filename: annotation.file_id
|
|
2855
|
+
title: (_k = (_j = annotation.quote) != null ? _j : annotation.filename) != null ? _k : "Document",
|
|
2856
|
+
filename: (_l = annotation.filename) != null ? _l : annotation.file_id
|
|
2851
2857
|
});
|
|
2852
2858
|
}
|
|
2853
2859
|
}
|
|
@@ -2938,15 +2944,15 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2938
2944
|
return {
|
|
2939
2945
|
content,
|
|
2940
2946
|
finishReason: mapOpenAIResponseFinishReason({
|
|
2941
|
-
finishReason: (
|
|
2947
|
+
finishReason: (_m = response.incomplete_details) == null ? void 0 : _m.reason,
|
|
2942
2948
|
hasToolCalls: content.some((part) => part.type === "tool-call")
|
|
2943
2949
|
}),
|
|
2944
2950
|
usage: {
|
|
2945
2951
|
inputTokens: response.usage.input_tokens,
|
|
2946
2952
|
outputTokens: response.usage.output_tokens,
|
|
2947
2953
|
totalTokens: response.usage.input_tokens + response.usage.output_tokens,
|
|
2948
|
-
reasoningTokens: (
|
|
2949
|
-
cachedInputTokens: (
|
|
2954
|
+
reasoningTokens: (_o = (_n = response.usage.output_tokens_details) == null ? void 0 : _n.reasoning_tokens) != null ? _o : void 0,
|
|
2955
|
+
cachedInputTokens: (_q = (_p = response.usage.input_tokens_details) == null ? void 0 : _p.cached_tokens) != null ? _q : void 0
|
|
2950
2956
|
},
|
|
2951
2957
|
request: { body },
|
|
2952
2958
|
response: {
|
|
@@ -2998,7 +3004,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2998
3004
|
controller.enqueue({ type: "stream-start", warnings });
|
|
2999
3005
|
},
|
|
3000
3006
|
transform(chunk, controller) {
|
|
3001
|
-
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r;
|
|
3007
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u;
|
|
3002
3008
|
if (options.includeRawChunks) {
|
|
3003
3009
|
controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
|
|
3004
3010
|
}
|
|
@@ -3264,8 +3270,8 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3264
3270
|
sourceType: "document",
|
|
3265
3271
|
id: (_r = (_q = (_p = self.config).generateId) == null ? void 0 : _q.call(_p)) != null ? _r : (0, import_provider_utils14.generateId)(),
|
|
3266
3272
|
mediaType: "text/plain",
|
|
3267
|
-
title: value.annotation.quote,
|
|
3268
|
-
filename: value.annotation.file_id
|
|
3273
|
+
title: (_t = (_s = value.annotation.quote) != null ? _s : value.annotation.filename) != null ? _t : "Document",
|
|
3274
|
+
filename: (_u = value.annotation.filename) != null ? _u : value.annotation.file_id
|
|
3269
3275
|
});
|
|
3270
3276
|
}
|
|
3271
3277
|
} else if (isErrorChunk(value)) {
|
|
@@ -3443,7 +3449,11 @@ var responseAnnotationAddedSchema = import_v416.z.object({
|
|
|
3443
3449
|
import_v416.z.object({
|
|
3444
3450
|
type: import_v416.z.literal("file_citation"),
|
|
3445
3451
|
file_id: import_v416.z.string(),
|
|
3446
|
-
|
|
3452
|
+
filename: import_v416.z.string().nullish(),
|
|
3453
|
+
index: import_v416.z.number().nullish(),
|
|
3454
|
+
start_index: import_v416.z.number().nullish(),
|
|
3455
|
+
end_index: import_v416.z.number().nullish(),
|
|
3456
|
+
quote: import_v416.z.string().nullish()
|
|
3447
3457
|
})
|
|
3448
3458
|
])
|
|
3449
3459
|
});
|