@ai-sdk/openai 2.0.21 → 2.0.22
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +8 -0
- package/dist/index.js +19 -13
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +19 -13
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.js +19 -13
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +19 -13
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +2 -2
package/dist/internal/index.mjs
CHANGED
|
@@ -2703,7 +2703,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2703
2703
|
};
|
|
2704
2704
|
}
|
|
2705
2705
|
async doGenerate(options) {
|
|
2706
|
-
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n;
|
|
2706
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q;
|
|
2707
2707
|
const { args: body, warnings } = await this.getArgs(options);
|
|
2708
2708
|
const url = this.config.url({
|
|
2709
2709
|
path: "/responses",
|
|
@@ -2749,10 +2749,12 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2749
2749
|
}),
|
|
2750
2750
|
z16.object({
|
|
2751
2751
|
type: z16.literal("file_citation"),
|
|
2752
|
-
start_index: z16.number(),
|
|
2753
|
-
end_index: z16.number(),
|
|
2754
2752
|
file_id: z16.string(),
|
|
2755
|
-
|
|
2753
|
+
filename: z16.string().nullish(),
|
|
2754
|
+
index: z16.number().nullish(),
|
|
2755
|
+
start_index: z16.number().nullish(),
|
|
2756
|
+
end_index: z16.number().nullish(),
|
|
2757
|
+
quote: z16.string().nullish()
|
|
2756
2758
|
})
|
|
2757
2759
|
])
|
|
2758
2760
|
)
|
|
@@ -2870,8 +2872,8 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2870
2872
|
sourceType: "document",
|
|
2871
2873
|
id: (_i = (_h = (_g = this.config).generateId) == null ? void 0 : _h.call(_g)) != null ? _i : generateId2(),
|
|
2872
2874
|
mediaType: "text/plain",
|
|
2873
|
-
title: annotation.quote,
|
|
2874
|
-
filename: annotation.file_id
|
|
2875
|
+
title: (_k = (_j = annotation.quote) != null ? _j : annotation.filename) != null ? _k : "Document",
|
|
2876
|
+
filename: (_l = annotation.filename) != null ? _l : annotation.file_id
|
|
2875
2877
|
});
|
|
2876
2878
|
}
|
|
2877
2879
|
}
|
|
@@ -2962,15 +2964,15 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2962
2964
|
return {
|
|
2963
2965
|
content,
|
|
2964
2966
|
finishReason: mapOpenAIResponseFinishReason({
|
|
2965
|
-
finishReason: (
|
|
2967
|
+
finishReason: (_m = response.incomplete_details) == null ? void 0 : _m.reason,
|
|
2966
2968
|
hasToolCalls: content.some((part) => part.type === "tool-call")
|
|
2967
2969
|
}),
|
|
2968
2970
|
usage: {
|
|
2969
2971
|
inputTokens: response.usage.input_tokens,
|
|
2970
2972
|
outputTokens: response.usage.output_tokens,
|
|
2971
2973
|
totalTokens: response.usage.input_tokens + response.usage.output_tokens,
|
|
2972
|
-
reasoningTokens: (
|
|
2973
|
-
cachedInputTokens: (
|
|
2974
|
+
reasoningTokens: (_o = (_n = response.usage.output_tokens_details) == null ? void 0 : _n.reasoning_tokens) != null ? _o : void 0,
|
|
2975
|
+
cachedInputTokens: (_q = (_p = response.usage.input_tokens_details) == null ? void 0 : _p.cached_tokens) != null ? _q : void 0
|
|
2974
2976
|
},
|
|
2975
2977
|
request: { body },
|
|
2976
2978
|
response: {
|
|
@@ -3022,7 +3024,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3022
3024
|
controller.enqueue({ type: "stream-start", warnings });
|
|
3023
3025
|
},
|
|
3024
3026
|
transform(chunk, controller) {
|
|
3025
|
-
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r;
|
|
3027
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u;
|
|
3026
3028
|
if (options.includeRawChunks) {
|
|
3027
3029
|
controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
|
|
3028
3030
|
}
|
|
@@ -3288,8 +3290,8 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3288
3290
|
sourceType: "document",
|
|
3289
3291
|
id: (_r = (_q = (_p = self.config).generateId) == null ? void 0 : _q.call(_p)) != null ? _r : generateId2(),
|
|
3290
3292
|
mediaType: "text/plain",
|
|
3291
|
-
title: value.annotation.quote,
|
|
3292
|
-
filename: value.annotation.file_id
|
|
3293
|
+
title: (_t = (_s = value.annotation.quote) != null ? _s : value.annotation.filename) != null ? _t : "Document",
|
|
3294
|
+
filename: (_u = value.annotation.filename) != null ? _u : value.annotation.file_id
|
|
3293
3295
|
});
|
|
3294
3296
|
}
|
|
3295
3297
|
} else if (isErrorChunk(value)) {
|
|
@@ -3467,7 +3469,11 @@ var responseAnnotationAddedSchema = z16.object({
|
|
|
3467
3469
|
z16.object({
|
|
3468
3470
|
type: z16.literal("file_citation"),
|
|
3469
3471
|
file_id: z16.string(),
|
|
3470
|
-
|
|
3472
|
+
filename: z16.string().nullish(),
|
|
3473
|
+
index: z16.number().nullish(),
|
|
3474
|
+
start_index: z16.number().nullish(),
|
|
3475
|
+
end_index: z16.number().nullish(),
|
|
3476
|
+
quote: z16.string().nullish()
|
|
3471
3477
|
})
|
|
3472
3478
|
])
|
|
3473
3479
|
});
|