@ai-sdk/openai 2.0.14 → 2.0.15
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +9 -0
- package/dist/index.js +56 -38
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +56 -38
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.js +56 -38
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +56 -38
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +1 -1
package/dist/internal/index.mjs
CHANGED
|
@@ -2626,7 +2626,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2626
2626
|
});
|
|
2627
2627
|
}
|
|
2628
2628
|
}
|
|
2629
|
-
if ((openaiOptions == null ? void 0 : openaiOptions.serviceTier) === "flex" && !
|
|
2629
|
+
if ((openaiOptions == null ? void 0 : openaiOptions.serviceTier) === "flex" && !modelConfig.supportsFlexProcessing) {
|
|
2630
2630
|
warnings.push({
|
|
2631
2631
|
type: "unsupported-setting",
|
|
2632
2632
|
setting: "serviceTier",
|
|
@@ -2634,7 +2634,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2634
2634
|
});
|
|
2635
2635
|
delete baseArgs.service_tier;
|
|
2636
2636
|
}
|
|
2637
|
-
if ((openaiOptions == null ? void 0 : openaiOptions.serviceTier) === "priority" && !
|
|
2637
|
+
if ((openaiOptions == null ? void 0 : openaiOptions.serviceTier) === "priority" && !modelConfig.supportsPriorityProcessing) {
|
|
2638
2638
|
warnings.push({
|
|
2639
2639
|
type: "unsupported-setting",
|
|
2640
2640
|
setting: "serviceTier",
|
|
@@ -2661,7 +2661,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2661
2661
|
};
|
|
2662
2662
|
}
|
|
2663
2663
|
async doGenerate(options) {
|
|
2664
|
-
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n;
|
|
2664
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q;
|
|
2665
2665
|
const { args: body, warnings } = await this.getArgs(options);
|
|
2666
2666
|
const url = this.config.url({
|
|
2667
2667
|
path: "/responses",
|
|
@@ -2727,7 +2727,11 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2727
2727
|
z16.object({
|
|
2728
2728
|
type: z16.literal("web_search_call"),
|
|
2729
2729
|
id: z16.string(),
|
|
2730
|
-
status: z16.string().optional()
|
|
2730
|
+
status: z16.string().optional(),
|
|
2731
|
+
action: z16.object({
|
|
2732
|
+
type: z16.literal("search"),
|
|
2733
|
+
query: z16.string().optional()
|
|
2734
|
+
}).nullish()
|
|
2731
2735
|
}),
|
|
2732
2736
|
z16.object({
|
|
2733
2737
|
type: z16.literal("computer_call"),
|
|
@@ -2859,14 +2863,17 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2859
2863
|
type: "tool-call",
|
|
2860
2864
|
toolCallId: part.id,
|
|
2861
2865
|
toolName: "web_search_preview",
|
|
2862
|
-
input: "",
|
|
2866
|
+
input: (_k = (_j = part.action) == null ? void 0 : _j.query) != null ? _k : "",
|
|
2863
2867
|
providerExecuted: true
|
|
2864
2868
|
});
|
|
2865
2869
|
content.push({
|
|
2866
2870
|
type: "tool-result",
|
|
2867
2871
|
toolCallId: part.id,
|
|
2868
2872
|
toolName: "web_search_preview",
|
|
2869
|
-
result: {
|
|
2873
|
+
result: {
|
|
2874
|
+
status: part.status || "completed",
|
|
2875
|
+
...((_l = part.action) == null ? void 0 : _l.query) && { query: part.action.query }
|
|
2876
|
+
},
|
|
2870
2877
|
providerExecuted: true
|
|
2871
2878
|
});
|
|
2872
2879
|
break;
|
|
@@ -2924,15 +2931,15 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2924
2931
|
return {
|
|
2925
2932
|
content,
|
|
2926
2933
|
finishReason: mapOpenAIResponseFinishReason({
|
|
2927
|
-
finishReason: (
|
|
2934
|
+
finishReason: (_m = response.incomplete_details) == null ? void 0 : _m.reason,
|
|
2928
2935
|
hasToolCalls: content.some((part) => part.type === "tool-call")
|
|
2929
2936
|
}),
|
|
2930
2937
|
usage: {
|
|
2931
2938
|
inputTokens: response.usage.input_tokens,
|
|
2932
2939
|
outputTokens: response.usage.output_tokens,
|
|
2933
2940
|
totalTokens: response.usage.input_tokens + response.usage.output_tokens,
|
|
2934
|
-
reasoningTokens: (
|
|
2935
|
-
cachedInputTokens: (
|
|
2941
|
+
reasoningTokens: (_o = (_n = response.usage.output_tokens_details) == null ? void 0 : _n.reasoning_tokens) != null ? _o : void 0,
|
|
2942
|
+
cachedInputTokens: (_q = (_p = response.usage.input_tokens_details) == null ? void 0 : _p.cached_tokens) != null ? _q : void 0
|
|
2936
2943
|
},
|
|
2937
2944
|
request: { body },
|
|
2938
2945
|
response: {
|
|
@@ -2984,7 +2991,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2984
2991
|
controller.enqueue({ type: "stream-start", warnings });
|
|
2985
2992
|
},
|
|
2986
2993
|
transform(chunk, controller) {
|
|
2987
|
-
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p;
|
|
2994
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s;
|
|
2988
2995
|
if (options.includeRawChunks) {
|
|
2989
2996
|
controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
|
|
2990
2997
|
}
|
|
@@ -3091,7 +3098,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3091
3098
|
type: "tool-call",
|
|
3092
3099
|
toolCallId: value.item.id,
|
|
3093
3100
|
toolName: "web_search_preview",
|
|
3094
|
-
input: "",
|
|
3101
|
+
input: (_c = (_b = value.item.action) == null ? void 0 : _b.query) != null ? _c : "",
|
|
3095
3102
|
providerExecuted: true
|
|
3096
3103
|
});
|
|
3097
3104
|
controller.enqueue({
|
|
@@ -3100,7 +3107,10 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3100
3107
|
toolName: "web_search_preview",
|
|
3101
3108
|
result: {
|
|
3102
3109
|
type: "web_search_tool_result",
|
|
3103
|
-
status: value.item.status || "completed"
|
|
3110
|
+
status: value.item.status || "completed",
|
|
3111
|
+
...((_d = value.item.action) == null ? void 0 : _d.query) && {
|
|
3112
|
+
query: value.item.action.query
|
|
3113
|
+
}
|
|
3104
3114
|
},
|
|
3105
3115
|
providerExecuted: true
|
|
3106
3116
|
});
|
|
@@ -3168,7 +3178,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3168
3178
|
providerMetadata: {
|
|
3169
3179
|
openai: {
|
|
3170
3180
|
itemId: value.item.id,
|
|
3171
|
-
reasoningEncryptedContent: (
|
|
3181
|
+
reasoningEncryptedContent: (_e = value.item.encrypted_content) != null ? _e : null
|
|
3172
3182
|
}
|
|
3173
3183
|
}
|
|
3174
3184
|
});
|
|
@@ -3203,7 +3213,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3203
3213
|
}
|
|
3204
3214
|
} else if (isResponseReasoningSummaryPartAddedChunk(value)) {
|
|
3205
3215
|
if (value.summary_index > 0) {
|
|
3206
|
-
(
|
|
3216
|
+
(_f = activeReasoning[value.item_id]) == null ? void 0 : _f.summaryParts.push(
|
|
3207
3217
|
value.summary_index
|
|
3208
3218
|
);
|
|
3209
3219
|
controller.enqueue({
|
|
@@ -3212,7 +3222,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3212
3222
|
providerMetadata: {
|
|
3213
3223
|
openai: {
|
|
3214
3224
|
itemId: value.item_id,
|
|
3215
|
-
reasoningEncryptedContent: (
|
|
3225
|
+
reasoningEncryptedContent: (_h = (_g = activeReasoning[value.item_id]) == null ? void 0 : _g.encryptedContent) != null ? _h : null
|
|
3216
3226
|
}
|
|
3217
3227
|
}
|
|
3218
3228
|
});
|
|
@@ -3230,20 +3240,20 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3230
3240
|
});
|
|
3231
3241
|
} else if (isResponseFinishedChunk(value)) {
|
|
3232
3242
|
finishReason = mapOpenAIResponseFinishReason({
|
|
3233
|
-
finishReason: (
|
|
3243
|
+
finishReason: (_i = value.response.incomplete_details) == null ? void 0 : _i.reason,
|
|
3234
3244
|
hasToolCalls
|
|
3235
3245
|
});
|
|
3236
3246
|
usage.inputTokens = value.response.usage.input_tokens;
|
|
3237
3247
|
usage.outputTokens = value.response.usage.output_tokens;
|
|
3238
3248
|
usage.totalTokens = value.response.usage.input_tokens + value.response.usage.output_tokens;
|
|
3239
|
-
usage.reasoningTokens = (
|
|
3240
|
-
usage.cachedInputTokens = (
|
|
3249
|
+
usage.reasoningTokens = (_k = (_j = value.response.usage.output_tokens_details) == null ? void 0 : _j.reasoning_tokens) != null ? _k : void 0;
|
|
3250
|
+
usage.cachedInputTokens = (_m = (_l = value.response.usage.input_tokens_details) == null ? void 0 : _l.cached_tokens) != null ? _m : void 0;
|
|
3241
3251
|
} else if (isResponseAnnotationAddedChunk(value)) {
|
|
3242
3252
|
if (value.annotation.type === "url_citation") {
|
|
3243
3253
|
controller.enqueue({
|
|
3244
3254
|
type: "source",
|
|
3245
3255
|
sourceType: "url",
|
|
3246
|
-
id: (
|
|
3256
|
+
id: (_p = (_o = (_n = self.config).generateId) == null ? void 0 : _o.call(_n)) != null ? _p : generateId2(),
|
|
3247
3257
|
url: value.annotation.url,
|
|
3248
3258
|
title: value.annotation.title
|
|
3249
3259
|
});
|
|
@@ -3251,7 +3261,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3251
3261
|
controller.enqueue({
|
|
3252
3262
|
type: "source",
|
|
3253
3263
|
sourceType: "document",
|
|
3254
|
-
id: (
|
|
3264
|
+
id: (_s = (_r = (_q = self.config).generateId) == null ? void 0 : _r.call(_q)) != null ? _s : generateId2(),
|
|
3255
3265
|
mediaType: "text/plain",
|
|
3256
3266
|
title: value.annotation.quote,
|
|
3257
3267
|
filename: value.annotation.file_id
|
|
@@ -3341,7 +3351,11 @@ var responseOutputItemAddedSchema = z16.object({
|
|
|
3341
3351
|
z16.object({
|
|
3342
3352
|
type: z16.literal("web_search_call"),
|
|
3343
3353
|
id: z16.string(),
|
|
3344
|
-
status: z16.string()
|
|
3354
|
+
status: z16.string(),
|
|
3355
|
+
action: z16.object({
|
|
3356
|
+
type: z16.literal("search"),
|
|
3357
|
+
query: z16.string().optional()
|
|
3358
|
+
}).nullish()
|
|
3345
3359
|
}),
|
|
3346
3360
|
z16.object({
|
|
3347
3361
|
type: z16.literal("computer_call"),
|
|
@@ -3390,7 +3404,11 @@ var responseOutputItemDoneSchema = z16.object({
|
|
|
3390
3404
|
z16.object({
|
|
3391
3405
|
type: z16.literal("web_search_call"),
|
|
3392
3406
|
id: z16.string(),
|
|
3393
|
-
status: z16.literal("completed")
|
|
3407
|
+
status: z16.literal("completed"),
|
|
3408
|
+
action: z16.object({
|
|
3409
|
+
type: z16.literal("search"),
|
|
3410
|
+
query: z16.string().optional()
|
|
3411
|
+
}).nullish()
|
|
3394
3412
|
}),
|
|
3395
3413
|
z16.object({
|
|
3396
3414
|
type: z16.literal("computer_call"),
|
|
@@ -3498,39 +3516,39 @@ function isErrorChunk(chunk) {
|
|
|
3498
3516
|
return chunk.type === "error";
|
|
3499
3517
|
}
|
|
3500
3518
|
function getResponsesModelConfig(modelId) {
|
|
3519
|
+
const supportsFlexProcessing2 = modelId.startsWith("o3") || modelId.startsWith("o4-mini") || modelId.startsWith("gpt-5") && !modelId.startsWith("gpt-5-chat");
|
|
3520
|
+
const supportsPriorityProcessing2 = modelId.startsWith("gpt-4") || modelId.startsWith("gpt-5-mini") || modelId.startsWith("gpt-5") && !modelId.startsWith("gpt-5-nano") && !modelId.startsWith("gpt-5-chat") || modelId.startsWith("o3") || modelId.startsWith("o4-mini");
|
|
3521
|
+
const defaults = {
|
|
3522
|
+
requiredAutoTruncation: false,
|
|
3523
|
+
systemMessageMode: "system",
|
|
3524
|
+
supportsFlexProcessing: supportsFlexProcessing2,
|
|
3525
|
+
supportsPriorityProcessing: supportsPriorityProcessing2
|
|
3526
|
+
};
|
|
3501
3527
|
if (modelId.startsWith("gpt-5-chat")) {
|
|
3502
3528
|
return {
|
|
3503
|
-
|
|
3504
|
-
|
|
3505
|
-
requiredAutoTruncation: false
|
|
3529
|
+
...defaults,
|
|
3530
|
+
isReasoningModel: false
|
|
3506
3531
|
};
|
|
3507
3532
|
}
|
|
3508
3533
|
if (modelId.startsWith("o") || modelId.startsWith("gpt-5") || modelId.startsWith("codex-") || modelId.startsWith("computer-use")) {
|
|
3509
3534
|
if (modelId.startsWith("o1-mini") || modelId.startsWith("o1-preview")) {
|
|
3510
3535
|
return {
|
|
3536
|
+
...defaults,
|
|
3511
3537
|
isReasoningModel: true,
|
|
3512
|
-
systemMessageMode: "remove"
|
|
3513
|
-
requiredAutoTruncation: false
|
|
3538
|
+
systemMessageMode: "remove"
|
|
3514
3539
|
};
|
|
3515
3540
|
}
|
|
3516
3541
|
return {
|
|
3542
|
+
...defaults,
|
|
3517
3543
|
isReasoningModel: true,
|
|
3518
|
-
systemMessageMode: "developer"
|
|
3519
|
-
requiredAutoTruncation: false
|
|
3544
|
+
systemMessageMode: "developer"
|
|
3520
3545
|
};
|
|
3521
3546
|
}
|
|
3522
3547
|
return {
|
|
3523
|
-
|
|
3524
|
-
|
|
3525
|
-
requiredAutoTruncation: false
|
|
3548
|
+
...defaults,
|
|
3549
|
+
isReasoningModel: false
|
|
3526
3550
|
};
|
|
3527
3551
|
}
|
|
3528
|
-
function supportsFlexProcessing2(modelId) {
|
|
3529
|
-
return modelId.startsWith("o3") || modelId.startsWith("o4-mini") || modelId.startsWith("gpt-5") && !modelId.startsWith("gpt-5-chat");
|
|
3530
|
-
}
|
|
3531
|
-
function supportsPriorityProcessing2(modelId) {
|
|
3532
|
-
return modelId.startsWith("gpt-4") || modelId.startsWith("gpt-5-mini") || modelId.startsWith("gpt-5") && !modelId.startsWith("gpt-5-nano") && !modelId.startsWith("gpt-5-chat") || modelId.startsWith("o3") || modelId.startsWith("o4-mini");
|
|
3533
|
-
}
|
|
3534
3552
|
var openaiResponsesProviderOptionsSchema = z16.object({
|
|
3535
3553
|
metadata: z16.any().nullish(),
|
|
3536
3554
|
parallelToolCalls: z16.boolean().nullish(),
|