@ai-sdk/openai 2.0.13 → 2.0.15
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +15 -0
- package/dist/index.js +118 -62
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +118 -62
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.js +118 -62
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +118 -62
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +1 -1
package/dist/internal/index.mjs
CHANGED
|
@@ -2626,7 +2626,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2626
2626
|
});
|
|
2627
2627
|
}
|
|
2628
2628
|
}
|
|
2629
|
-
if ((openaiOptions == null ? void 0 : openaiOptions.serviceTier) === "flex" && !
|
|
2629
|
+
if ((openaiOptions == null ? void 0 : openaiOptions.serviceTier) === "flex" && !modelConfig.supportsFlexProcessing) {
|
|
2630
2630
|
warnings.push({
|
|
2631
2631
|
type: "unsupported-setting",
|
|
2632
2632
|
setting: "serviceTier",
|
|
@@ -2634,7 +2634,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2634
2634
|
});
|
|
2635
2635
|
delete baseArgs.service_tier;
|
|
2636
2636
|
}
|
|
2637
|
-
if ((openaiOptions == null ? void 0 : openaiOptions.serviceTier) === "priority" && !
|
|
2637
|
+
if ((openaiOptions == null ? void 0 : openaiOptions.serviceTier) === "priority" && !modelConfig.supportsPriorityProcessing) {
|
|
2638
2638
|
warnings.push({
|
|
2639
2639
|
type: "unsupported-setting",
|
|
2640
2640
|
setting: "serviceTier",
|
|
@@ -2661,7 +2661,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2661
2661
|
};
|
|
2662
2662
|
}
|
|
2663
2663
|
async doGenerate(options) {
|
|
2664
|
-
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k;
|
|
2664
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q;
|
|
2665
2665
|
const { args: body, warnings } = await this.getArgs(options);
|
|
2666
2666
|
const url = this.config.url({
|
|
2667
2667
|
path: "/responses",
|
|
@@ -2697,13 +2697,22 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2697
2697
|
text: z16.string(),
|
|
2698
2698
|
logprobs: LOGPROBS_SCHEMA.nullish(),
|
|
2699
2699
|
annotations: z16.array(
|
|
2700
|
-
z16.
|
|
2701
|
-
|
|
2702
|
-
|
|
2703
|
-
|
|
2704
|
-
|
|
2705
|
-
|
|
2706
|
-
|
|
2700
|
+
z16.discriminatedUnion("type", [
|
|
2701
|
+
z16.object({
|
|
2702
|
+
type: z16.literal("url_citation"),
|
|
2703
|
+
start_index: z16.number(),
|
|
2704
|
+
end_index: z16.number(),
|
|
2705
|
+
url: z16.string(),
|
|
2706
|
+
title: z16.string()
|
|
2707
|
+
}),
|
|
2708
|
+
z16.object({
|
|
2709
|
+
type: z16.literal("file_citation"),
|
|
2710
|
+
start_index: z16.number(),
|
|
2711
|
+
end_index: z16.number(),
|
|
2712
|
+
file_id: z16.string(),
|
|
2713
|
+
quote: z16.string()
|
|
2714
|
+
})
|
|
2715
|
+
])
|
|
2707
2716
|
)
|
|
2708
2717
|
})
|
|
2709
2718
|
)
|
|
@@ -2718,7 +2727,11 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2718
2727
|
z16.object({
|
|
2719
2728
|
type: z16.literal("web_search_call"),
|
|
2720
2729
|
id: z16.string(),
|
|
2721
|
-
status: z16.string().optional()
|
|
2730
|
+
status: z16.string().optional(),
|
|
2731
|
+
action: z16.object({
|
|
2732
|
+
type: z16.literal("search"),
|
|
2733
|
+
query: z16.string().optional()
|
|
2734
|
+
}).nullish()
|
|
2722
2735
|
}),
|
|
2723
2736
|
z16.object({
|
|
2724
2737
|
type: z16.literal("computer_call"),
|
|
@@ -2809,13 +2822,24 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2809
2822
|
}
|
|
2810
2823
|
});
|
|
2811
2824
|
for (const annotation of contentPart.annotations) {
|
|
2812
|
-
|
|
2813
|
-
|
|
2814
|
-
|
|
2815
|
-
|
|
2816
|
-
|
|
2817
|
-
|
|
2818
|
-
|
|
2825
|
+
if (annotation.type === "url_citation") {
|
|
2826
|
+
content.push({
|
|
2827
|
+
type: "source",
|
|
2828
|
+
sourceType: "url",
|
|
2829
|
+
id: (_f = (_e = (_d = this.config).generateId) == null ? void 0 : _e.call(_d)) != null ? _f : generateId2(),
|
|
2830
|
+
url: annotation.url,
|
|
2831
|
+
title: annotation.title
|
|
2832
|
+
});
|
|
2833
|
+
} else if (annotation.type === "file_citation") {
|
|
2834
|
+
content.push({
|
|
2835
|
+
type: "source",
|
|
2836
|
+
sourceType: "document",
|
|
2837
|
+
id: (_i = (_h = (_g = this.config).generateId) == null ? void 0 : _h.call(_g)) != null ? _i : generateId2(),
|
|
2838
|
+
mediaType: "text/plain",
|
|
2839
|
+
title: annotation.quote,
|
|
2840
|
+
filename: annotation.file_id
|
|
2841
|
+
});
|
|
2842
|
+
}
|
|
2819
2843
|
}
|
|
2820
2844
|
}
|
|
2821
2845
|
break;
|
|
@@ -2839,14 +2863,17 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2839
2863
|
type: "tool-call",
|
|
2840
2864
|
toolCallId: part.id,
|
|
2841
2865
|
toolName: "web_search_preview",
|
|
2842
|
-
input: "",
|
|
2866
|
+
input: (_k = (_j = part.action) == null ? void 0 : _j.query) != null ? _k : "",
|
|
2843
2867
|
providerExecuted: true
|
|
2844
2868
|
});
|
|
2845
2869
|
content.push({
|
|
2846
2870
|
type: "tool-result",
|
|
2847
2871
|
toolCallId: part.id,
|
|
2848
2872
|
toolName: "web_search_preview",
|
|
2849
|
-
result: {
|
|
2873
|
+
result: {
|
|
2874
|
+
status: part.status || "completed",
|
|
2875
|
+
...((_l = part.action) == null ? void 0 : _l.query) && { query: part.action.query }
|
|
2876
|
+
},
|
|
2850
2877
|
providerExecuted: true
|
|
2851
2878
|
});
|
|
2852
2879
|
break;
|
|
@@ -2904,15 +2931,15 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2904
2931
|
return {
|
|
2905
2932
|
content,
|
|
2906
2933
|
finishReason: mapOpenAIResponseFinishReason({
|
|
2907
|
-
finishReason: (
|
|
2934
|
+
finishReason: (_m = response.incomplete_details) == null ? void 0 : _m.reason,
|
|
2908
2935
|
hasToolCalls: content.some((part) => part.type === "tool-call")
|
|
2909
2936
|
}),
|
|
2910
2937
|
usage: {
|
|
2911
2938
|
inputTokens: response.usage.input_tokens,
|
|
2912
2939
|
outputTokens: response.usage.output_tokens,
|
|
2913
2940
|
totalTokens: response.usage.input_tokens + response.usage.output_tokens,
|
|
2914
|
-
reasoningTokens: (
|
|
2915
|
-
cachedInputTokens: (
|
|
2941
|
+
reasoningTokens: (_o = (_n = response.usage.output_tokens_details) == null ? void 0 : _n.reasoning_tokens) != null ? _o : void 0,
|
|
2942
|
+
cachedInputTokens: (_q = (_p = response.usage.input_tokens_details) == null ? void 0 : _p.cached_tokens) != null ? _q : void 0
|
|
2916
2943
|
},
|
|
2917
2944
|
request: { body },
|
|
2918
2945
|
response: {
|
|
@@ -2964,7 +2991,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2964
2991
|
controller.enqueue({ type: "stream-start", warnings });
|
|
2965
2992
|
},
|
|
2966
2993
|
transform(chunk, controller) {
|
|
2967
|
-
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m;
|
|
2994
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s;
|
|
2968
2995
|
if (options.includeRawChunks) {
|
|
2969
2996
|
controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
|
|
2970
2997
|
}
|
|
@@ -3071,7 +3098,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3071
3098
|
type: "tool-call",
|
|
3072
3099
|
toolCallId: value.item.id,
|
|
3073
3100
|
toolName: "web_search_preview",
|
|
3074
|
-
input: "",
|
|
3101
|
+
input: (_c = (_b = value.item.action) == null ? void 0 : _b.query) != null ? _c : "",
|
|
3075
3102
|
providerExecuted: true
|
|
3076
3103
|
});
|
|
3077
3104
|
controller.enqueue({
|
|
@@ -3080,7 +3107,10 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3080
3107
|
toolName: "web_search_preview",
|
|
3081
3108
|
result: {
|
|
3082
3109
|
type: "web_search_tool_result",
|
|
3083
|
-
status: value.item.status || "completed"
|
|
3110
|
+
status: value.item.status || "completed",
|
|
3111
|
+
...((_d = value.item.action) == null ? void 0 : _d.query) && {
|
|
3112
|
+
query: value.item.action.query
|
|
3113
|
+
}
|
|
3084
3114
|
},
|
|
3085
3115
|
providerExecuted: true
|
|
3086
3116
|
});
|
|
@@ -3148,7 +3178,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3148
3178
|
providerMetadata: {
|
|
3149
3179
|
openai: {
|
|
3150
3180
|
itemId: value.item.id,
|
|
3151
|
-
reasoningEncryptedContent: (
|
|
3181
|
+
reasoningEncryptedContent: (_e = value.item.encrypted_content) != null ? _e : null
|
|
3152
3182
|
}
|
|
3153
3183
|
}
|
|
3154
3184
|
});
|
|
@@ -3183,7 +3213,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3183
3213
|
}
|
|
3184
3214
|
} else if (isResponseReasoningSummaryPartAddedChunk(value)) {
|
|
3185
3215
|
if (value.summary_index > 0) {
|
|
3186
|
-
(
|
|
3216
|
+
(_f = activeReasoning[value.item_id]) == null ? void 0 : _f.summaryParts.push(
|
|
3187
3217
|
value.summary_index
|
|
3188
3218
|
);
|
|
3189
3219
|
controller.enqueue({
|
|
@@ -3192,7 +3222,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3192
3222
|
providerMetadata: {
|
|
3193
3223
|
openai: {
|
|
3194
3224
|
itemId: value.item_id,
|
|
3195
|
-
reasoningEncryptedContent: (
|
|
3225
|
+
reasoningEncryptedContent: (_h = (_g = activeReasoning[value.item_id]) == null ? void 0 : _g.encryptedContent) != null ? _h : null
|
|
3196
3226
|
}
|
|
3197
3227
|
}
|
|
3198
3228
|
});
|
|
@@ -3210,22 +3240,33 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3210
3240
|
});
|
|
3211
3241
|
} else if (isResponseFinishedChunk(value)) {
|
|
3212
3242
|
finishReason = mapOpenAIResponseFinishReason({
|
|
3213
|
-
finishReason: (
|
|
3243
|
+
finishReason: (_i = value.response.incomplete_details) == null ? void 0 : _i.reason,
|
|
3214
3244
|
hasToolCalls
|
|
3215
3245
|
});
|
|
3216
3246
|
usage.inputTokens = value.response.usage.input_tokens;
|
|
3217
3247
|
usage.outputTokens = value.response.usage.output_tokens;
|
|
3218
3248
|
usage.totalTokens = value.response.usage.input_tokens + value.response.usage.output_tokens;
|
|
3219
|
-
usage.reasoningTokens = (
|
|
3220
|
-
usage.cachedInputTokens = (
|
|
3249
|
+
usage.reasoningTokens = (_k = (_j = value.response.usage.output_tokens_details) == null ? void 0 : _j.reasoning_tokens) != null ? _k : void 0;
|
|
3250
|
+
usage.cachedInputTokens = (_m = (_l = value.response.usage.input_tokens_details) == null ? void 0 : _l.cached_tokens) != null ? _m : void 0;
|
|
3221
3251
|
} else if (isResponseAnnotationAddedChunk(value)) {
|
|
3222
|
-
|
|
3223
|
-
|
|
3224
|
-
|
|
3225
|
-
|
|
3226
|
-
|
|
3227
|
-
|
|
3228
|
-
|
|
3252
|
+
if (value.annotation.type === "url_citation") {
|
|
3253
|
+
controller.enqueue({
|
|
3254
|
+
type: "source",
|
|
3255
|
+
sourceType: "url",
|
|
3256
|
+
id: (_p = (_o = (_n = self.config).generateId) == null ? void 0 : _o.call(_n)) != null ? _p : generateId2(),
|
|
3257
|
+
url: value.annotation.url,
|
|
3258
|
+
title: value.annotation.title
|
|
3259
|
+
});
|
|
3260
|
+
} else if (value.annotation.type === "file_citation") {
|
|
3261
|
+
controller.enqueue({
|
|
3262
|
+
type: "source",
|
|
3263
|
+
sourceType: "document",
|
|
3264
|
+
id: (_s = (_r = (_q = self.config).generateId) == null ? void 0 : _r.call(_q)) != null ? _s : generateId2(),
|
|
3265
|
+
mediaType: "text/plain",
|
|
3266
|
+
title: value.annotation.quote,
|
|
3267
|
+
filename: value.annotation.file_id
|
|
3268
|
+
});
|
|
3269
|
+
}
|
|
3229
3270
|
} else if (isErrorChunk(value)) {
|
|
3230
3271
|
controller.enqueue({ type: "error", error: value });
|
|
3231
3272
|
}
|
|
@@ -3310,7 +3351,11 @@ var responseOutputItemAddedSchema = z16.object({
|
|
|
3310
3351
|
z16.object({
|
|
3311
3352
|
type: z16.literal("web_search_call"),
|
|
3312
3353
|
id: z16.string(),
|
|
3313
|
-
status: z16.string()
|
|
3354
|
+
status: z16.string(),
|
|
3355
|
+
action: z16.object({
|
|
3356
|
+
type: z16.literal("search"),
|
|
3357
|
+
query: z16.string().optional()
|
|
3358
|
+
}).nullish()
|
|
3314
3359
|
}),
|
|
3315
3360
|
z16.object({
|
|
3316
3361
|
type: z16.literal("computer_call"),
|
|
@@ -3359,7 +3404,11 @@ var responseOutputItemDoneSchema = z16.object({
|
|
|
3359
3404
|
z16.object({
|
|
3360
3405
|
type: z16.literal("web_search_call"),
|
|
3361
3406
|
id: z16.string(),
|
|
3362
|
-
status: z16.literal("completed")
|
|
3407
|
+
status: z16.literal("completed"),
|
|
3408
|
+
action: z16.object({
|
|
3409
|
+
type: z16.literal("search"),
|
|
3410
|
+
query: z16.string().optional()
|
|
3411
|
+
}).nullish()
|
|
3363
3412
|
}),
|
|
3364
3413
|
z16.object({
|
|
3365
3414
|
type: z16.literal("computer_call"),
|
|
@@ -3392,11 +3441,18 @@ var responseFunctionCallArgumentsDeltaSchema = z16.object({
|
|
|
3392
3441
|
});
|
|
3393
3442
|
var responseAnnotationAddedSchema = z16.object({
|
|
3394
3443
|
type: z16.literal("response.output_text.annotation.added"),
|
|
3395
|
-
annotation: z16.
|
|
3396
|
-
|
|
3397
|
-
|
|
3398
|
-
|
|
3399
|
-
|
|
3444
|
+
annotation: z16.discriminatedUnion("type", [
|
|
3445
|
+
z16.object({
|
|
3446
|
+
type: z16.literal("url_citation"),
|
|
3447
|
+
url: z16.string(),
|
|
3448
|
+
title: z16.string()
|
|
3449
|
+
}),
|
|
3450
|
+
z16.object({
|
|
3451
|
+
type: z16.literal("file_citation"),
|
|
3452
|
+
file_id: z16.string(),
|
|
3453
|
+
quote: z16.string()
|
|
3454
|
+
})
|
|
3455
|
+
])
|
|
3400
3456
|
});
|
|
3401
3457
|
var responseReasoningSummaryPartAddedSchema = z16.object({
|
|
3402
3458
|
type: z16.literal("response.reasoning_summary_part.added"),
|
|
@@ -3460,39 +3516,39 @@ function isErrorChunk(chunk) {
|
|
|
3460
3516
|
return chunk.type === "error";
|
|
3461
3517
|
}
|
|
3462
3518
|
function getResponsesModelConfig(modelId) {
|
|
3519
|
+
const supportsFlexProcessing2 = modelId.startsWith("o3") || modelId.startsWith("o4-mini") || modelId.startsWith("gpt-5") && !modelId.startsWith("gpt-5-chat");
|
|
3520
|
+
const supportsPriorityProcessing2 = modelId.startsWith("gpt-4") || modelId.startsWith("gpt-5-mini") || modelId.startsWith("gpt-5") && !modelId.startsWith("gpt-5-nano") && !modelId.startsWith("gpt-5-chat") || modelId.startsWith("o3") || modelId.startsWith("o4-mini");
|
|
3521
|
+
const defaults = {
|
|
3522
|
+
requiredAutoTruncation: false,
|
|
3523
|
+
systemMessageMode: "system",
|
|
3524
|
+
supportsFlexProcessing: supportsFlexProcessing2,
|
|
3525
|
+
supportsPriorityProcessing: supportsPriorityProcessing2
|
|
3526
|
+
};
|
|
3463
3527
|
if (modelId.startsWith("gpt-5-chat")) {
|
|
3464
3528
|
return {
|
|
3465
|
-
|
|
3466
|
-
|
|
3467
|
-
requiredAutoTruncation: false
|
|
3529
|
+
...defaults,
|
|
3530
|
+
isReasoningModel: false
|
|
3468
3531
|
};
|
|
3469
3532
|
}
|
|
3470
3533
|
if (modelId.startsWith("o") || modelId.startsWith("gpt-5") || modelId.startsWith("codex-") || modelId.startsWith("computer-use")) {
|
|
3471
3534
|
if (modelId.startsWith("o1-mini") || modelId.startsWith("o1-preview")) {
|
|
3472
3535
|
return {
|
|
3536
|
+
...defaults,
|
|
3473
3537
|
isReasoningModel: true,
|
|
3474
|
-
systemMessageMode: "remove"
|
|
3475
|
-
requiredAutoTruncation: false
|
|
3538
|
+
systemMessageMode: "remove"
|
|
3476
3539
|
};
|
|
3477
3540
|
}
|
|
3478
3541
|
return {
|
|
3542
|
+
...defaults,
|
|
3479
3543
|
isReasoningModel: true,
|
|
3480
|
-
systemMessageMode: "developer"
|
|
3481
|
-
requiredAutoTruncation: false
|
|
3544
|
+
systemMessageMode: "developer"
|
|
3482
3545
|
};
|
|
3483
3546
|
}
|
|
3484
3547
|
return {
|
|
3485
|
-
|
|
3486
|
-
|
|
3487
|
-
requiredAutoTruncation: false
|
|
3548
|
+
...defaults,
|
|
3549
|
+
isReasoningModel: false
|
|
3488
3550
|
};
|
|
3489
3551
|
}
|
|
3490
|
-
function supportsFlexProcessing2(modelId) {
|
|
3491
|
-
return modelId.startsWith("o3") || modelId.startsWith("o4-mini") || modelId.startsWith("gpt-5") && !modelId.startsWith("gpt-5-chat");
|
|
3492
|
-
}
|
|
3493
|
-
function supportsPriorityProcessing2(modelId) {
|
|
3494
|
-
return modelId.startsWith("gpt-4") || modelId.startsWith("gpt-5-mini") || modelId.startsWith("gpt-5") && !modelId.startsWith("gpt-5-nano") && !modelId.startsWith("gpt-5-chat") || modelId.startsWith("o3") || modelId.startsWith("o4-mini");
|
|
3495
|
-
}
|
|
3496
3552
|
var openaiResponsesProviderOptionsSchema = z16.object({
|
|
3497
3553
|
metadata: z16.any().nullish(),
|
|
3498
3554
|
parallelToolCalls: z16.boolean().nullish(),
|