@ai-sdk/openai 2.0.22 → 2.0.23
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +6 -0
- package/dist/index.js +15 -2
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +15 -2
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.js +15 -2
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +15 -2
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +1 -1
package/dist/internal/index.js
CHANGED
|
@@ -2783,6 +2783,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2783
2783
|
})
|
|
2784
2784
|
])
|
|
2785
2785
|
),
|
|
2786
|
+
service_tier: import_v416.z.string().nullish(),
|
|
2786
2787
|
incomplete_details: import_v416.z.object({ reason: import_v416.z.string() }).nullable(),
|
|
2787
2788
|
usage: usageSchema2
|
|
2788
2789
|
})
|
|
@@ -2941,6 +2942,9 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2941
2942
|
if (logprobs.length > 0) {
|
|
2942
2943
|
providerMetadata.openai.logprobs = logprobs;
|
|
2943
2944
|
}
|
|
2945
|
+
if (typeof response.service_tier === "string") {
|
|
2946
|
+
providerMetadata.openai.serviceTier = response.service_tier;
|
|
2947
|
+
}
|
|
2944
2948
|
return {
|
|
2945
2949
|
content,
|
|
2946
2950
|
finishReason: mapOpenAIResponseFinishReason({
|
|
@@ -2997,6 +3001,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2997
3001
|
const ongoingToolCalls = {};
|
|
2998
3002
|
let hasToolCalls = false;
|
|
2999
3003
|
const activeReasoning = {};
|
|
3004
|
+
let serviceTier;
|
|
3000
3005
|
return {
|
|
3001
3006
|
stream: response.pipeThrough(
|
|
3002
3007
|
new TransformStream({
|
|
@@ -3255,6 +3260,9 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3255
3260
|
usage.totalTokens = value.response.usage.input_tokens + value.response.usage.output_tokens;
|
|
3256
3261
|
usage.reasoningTokens = (_j = (_i = value.response.usage.output_tokens_details) == null ? void 0 : _i.reasoning_tokens) != null ? _j : void 0;
|
|
3257
3262
|
usage.cachedInputTokens = (_l = (_k = value.response.usage.input_tokens_details) == null ? void 0 : _k.cached_tokens) != null ? _l : void 0;
|
|
3263
|
+
if (typeof value.response.service_tier === "string") {
|
|
3264
|
+
serviceTier = value.response.service_tier;
|
|
3265
|
+
}
|
|
3258
3266
|
} else if (isResponseAnnotationAddedChunk(value)) {
|
|
3259
3267
|
if (value.annotation.type === "url_citation") {
|
|
3260
3268
|
controller.enqueue({
|
|
@@ -3287,6 +3295,9 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3287
3295
|
if (logprobs.length > 0) {
|
|
3288
3296
|
providerMetadata.openai.logprobs = logprobs;
|
|
3289
3297
|
}
|
|
3298
|
+
if (serviceTier !== void 0) {
|
|
3299
|
+
providerMetadata.openai.serviceTier = serviceTier;
|
|
3300
|
+
}
|
|
3290
3301
|
controller.enqueue({
|
|
3291
3302
|
type: "finish",
|
|
3292
3303
|
finishReason,
|
|
@@ -3324,7 +3335,8 @@ var responseFinishedChunkSchema = import_v416.z.object({
|
|
|
3324
3335
|
type: import_v416.z.enum(["response.completed", "response.incomplete"]),
|
|
3325
3336
|
response: import_v416.z.object({
|
|
3326
3337
|
incomplete_details: import_v416.z.object({ reason: import_v416.z.string() }).nullish(),
|
|
3327
|
-
usage: usageSchema2
|
|
3338
|
+
usage: usageSchema2,
|
|
3339
|
+
service_tier: import_v416.z.string().nullish()
|
|
3328
3340
|
})
|
|
3329
3341
|
});
|
|
3330
3342
|
var responseCreatedChunkSchema = import_v416.z.object({
|
|
@@ -3332,7 +3344,8 @@ var responseCreatedChunkSchema = import_v416.z.object({
|
|
|
3332
3344
|
response: import_v416.z.object({
|
|
3333
3345
|
id: import_v416.z.string(),
|
|
3334
3346
|
created_at: import_v416.z.number(),
|
|
3335
|
-
model: import_v416.z.string()
|
|
3347
|
+
model: import_v416.z.string(),
|
|
3348
|
+
service_tier: import_v416.z.string().nullish()
|
|
3336
3349
|
})
|
|
3337
3350
|
});
|
|
3338
3351
|
var responseOutputItemAddedSchema = import_v416.z.object({
|