@ai-sdk/openai 2.0.21 → 2.0.22
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +8 -0
- package/dist/index.js +19 -13
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +19 -13
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.js +19 -13
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +19 -13
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +2 -2
package/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,13 @@
|
|
|
1
1
|
# @ai-sdk/openai
|
|
2
2
|
|
|
3
|
+
## 2.0.22
|
|
4
|
+
|
|
5
|
+
### Patch Changes
|
|
6
|
+
|
|
7
|
+
- 0e272ae: fix(provider/openai): make file_citation annotation fields optional for responses api compatibility
|
|
8
|
+
- Updated dependencies [886e7cd]
|
|
9
|
+
- @ai-sdk/provider-utils@3.0.7
|
|
10
|
+
|
|
3
11
|
## 2.0.21
|
|
4
12
|
|
|
5
13
|
### Patch Changes
|
package/dist/index.js
CHANGED
|
@@ -2360,7 +2360,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2360
2360
|
};
|
|
2361
2361
|
}
|
|
2362
2362
|
async doGenerate(options) {
|
|
2363
|
-
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n;
|
|
2363
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q;
|
|
2364
2364
|
const { args: body, warnings } = await this.getArgs(options);
|
|
2365
2365
|
const url = this.config.url({
|
|
2366
2366
|
path: "/responses",
|
|
@@ -2406,10 +2406,12 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2406
2406
|
}),
|
|
2407
2407
|
import_v413.z.object({
|
|
2408
2408
|
type: import_v413.z.literal("file_citation"),
|
|
2409
|
-
start_index: import_v413.z.number(),
|
|
2410
|
-
end_index: import_v413.z.number(),
|
|
2411
2409
|
file_id: import_v413.z.string(),
|
|
2412
|
-
|
|
2410
|
+
filename: import_v413.z.string().nullish(),
|
|
2411
|
+
index: import_v413.z.number().nullish(),
|
|
2412
|
+
start_index: import_v413.z.number().nullish(),
|
|
2413
|
+
end_index: import_v413.z.number().nullish(),
|
|
2414
|
+
quote: import_v413.z.string().nullish()
|
|
2413
2415
|
})
|
|
2414
2416
|
])
|
|
2415
2417
|
)
|
|
@@ -2527,8 +2529,8 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2527
2529
|
sourceType: "document",
|
|
2528
2530
|
id: (_i = (_h = (_g = this.config).generateId) == null ? void 0 : _h.call(_g)) != null ? _i : (0, import_provider_utils12.generateId)(),
|
|
2529
2531
|
mediaType: "text/plain",
|
|
2530
|
-
title: annotation.quote,
|
|
2531
|
-
filename: annotation.file_id
|
|
2532
|
+
title: (_k = (_j = annotation.quote) != null ? _j : annotation.filename) != null ? _k : "Document",
|
|
2533
|
+
filename: (_l = annotation.filename) != null ? _l : annotation.file_id
|
|
2532
2534
|
});
|
|
2533
2535
|
}
|
|
2534
2536
|
}
|
|
@@ -2619,15 +2621,15 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2619
2621
|
return {
|
|
2620
2622
|
content,
|
|
2621
2623
|
finishReason: mapOpenAIResponseFinishReason({
|
|
2622
|
-
finishReason: (
|
|
2624
|
+
finishReason: (_m = response.incomplete_details) == null ? void 0 : _m.reason,
|
|
2623
2625
|
hasToolCalls: content.some((part) => part.type === "tool-call")
|
|
2624
2626
|
}),
|
|
2625
2627
|
usage: {
|
|
2626
2628
|
inputTokens: response.usage.input_tokens,
|
|
2627
2629
|
outputTokens: response.usage.output_tokens,
|
|
2628
2630
|
totalTokens: response.usage.input_tokens + response.usage.output_tokens,
|
|
2629
|
-
reasoningTokens: (
|
|
2630
|
-
cachedInputTokens: (
|
|
2631
|
+
reasoningTokens: (_o = (_n = response.usage.output_tokens_details) == null ? void 0 : _n.reasoning_tokens) != null ? _o : void 0,
|
|
2632
|
+
cachedInputTokens: (_q = (_p = response.usage.input_tokens_details) == null ? void 0 : _p.cached_tokens) != null ? _q : void 0
|
|
2631
2633
|
},
|
|
2632
2634
|
request: { body },
|
|
2633
2635
|
response: {
|
|
@@ -2679,7 +2681,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2679
2681
|
controller.enqueue({ type: "stream-start", warnings });
|
|
2680
2682
|
},
|
|
2681
2683
|
transform(chunk, controller) {
|
|
2682
|
-
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r;
|
|
2684
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u;
|
|
2683
2685
|
if (options.includeRawChunks) {
|
|
2684
2686
|
controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
|
|
2685
2687
|
}
|
|
@@ -2945,8 +2947,8 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2945
2947
|
sourceType: "document",
|
|
2946
2948
|
id: (_r = (_q = (_p = self.config).generateId) == null ? void 0 : _q.call(_p)) != null ? _r : (0, import_provider_utils12.generateId)(),
|
|
2947
2949
|
mediaType: "text/plain",
|
|
2948
|
-
title: value.annotation.quote,
|
|
2949
|
-
filename: value.annotation.file_id
|
|
2950
|
+
title: (_t = (_s = value.annotation.quote) != null ? _s : value.annotation.filename) != null ? _t : "Document",
|
|
2951
|
+
filename: (_u = value.annotation.filename) != null ? _u : value.annotation.file_id
|
|
2950
2952
|
});
|
|
2951
2953
|
}
|
|
2952
2954
|
} else if (isErrorChunk(value)) {
|
|
@@ -3124,7 +3126,11 @@ var responseAnnotationAddedSchema = import_v413.z.object({
|
|
|
3124
3126
|
import_v413.z.object({
|
|
3125
3127
|
type: import_v413.z.literal("file_citation"),
|
|
3126
3128
|
file_id: import_v413.z.string(),
|
|
3127
|
-
|
|
3129
|
+
filename: import_v413.z.string().nullish(),
|
|
3130
|
+
index: import_v413.z.number().nullish(),
|
|
3131
|
+
start_index: import_v413.z.number().nullish(),
|
|
3132
|
+
end_index: import_v413.z.number().nullish(),
|
|
3133
|
+
quote: import_v413.z.string().nullish()
|
|
3128
3134
|
})
|
|
3129
3135
|
])
|
|
3130
3136
|
});
|