@ai-sdk/openai 2.0.13 → 2.0.15
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +15 -0
- package/dist/index.js +118 -62
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +118 -62
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.js +118 -62
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +118 -62
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +1 -1
package/dist/index.mjs
CHANGED
|
@@ -2310,7 +2310,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2310
2310
|
});
|
|
2311
2311
|
}
|
|
2312
2312
|
}
|
|
2313
|
-
if ((openaiOptions == null ? void 0 : openaiOptions.serviceTier) === "flex" && !
|
|
2313
|
+
if ((openaiOptions == null ? void 0 : openaiOptions.serviceTier) === "flex" && !modelConfig.supportsFlexProcessing) {
|
|
2314
2314
|
warnings.push({
|
|
2315
2315
|
type: "unsupported-setting",
|
|
2316
2316
|
setting: "serviceTier",
|
|
@@ -2318,7 +2318,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2318
2318
|
});
|
|
2319
2319
|
delete baseArgs.service_tier;
|
|
2320
2320
|
}
|
|
2321
|
-
if ((openaiOptions == null ? void 0 : openaiOptions.serviceTier) === "priority" && !
|
|
2321
|
+
if ((openaiOptions == null ? void 0 : openaiOptions.serviceTier) === "priority" && !modelConfig.supportsPriorityProcessing) {
|
|
2322
2322
|
warnings.push({
|
|
2323
2323
|
type: "unsupported-setting",
|
|
2324
2324
|
setting: "serviceTier",
|
|
@@ -2345,7 +2345,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2345
2345
|
};
|
|
2346
2346
|
}
|
|
2347
2347
|
async doGenerate(options) {
|
|
2348
|
-
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k;
|
|
2348
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q;
|
|
2349
2349
|
const { args: body, warnings } = await this.getArgs(options);
|
|
2350
2350
|
const url = this.config.url({
|
|
2351
2351
|
path: "/responses",
|
|
@@ -2381,13 +2381,22 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2381
2381
|
text: z13.string(),
|
|
2382
2382
|
logprobs: LOGPROBS_SCHEMA.nullish(),
|
|
2383
2383
|
annotations: z13.array(
|
|
2384
|
-
z13.
|
|
2385
|
-
|
|
2386
|
-
|
|
2387
|
-
|
|
2388
|
-
|
|
2389
|
-
|
|
2390
|
-
|
|
2384
|
+
z13.discriminatedUnion("type", [
|
|
2385
|
+
z13.object({
|
|
2386
|
+
type: z13.literal("url_citation"),
|
|
2387
|
+
start_index: z13.number(),
|
|
2388
|
+
end_index: z13.number(),
|
|
2389
|
+
url: z13.string(),
|
|
2390
|
+
title: z13.string()
|
|
2391
|
+
}),
|
|
2392
|
+
z13.object({
|
|
2393
|
+
type: z13.literal("file_citation"),
|
|
2394
|
+
start_index: z13.number(),
|
|
2395
|
+
end_index: z13.number(),
|
|
2396
|
+
file_id: z13.string(),
|
|
2397
|
+
quote: z13.string()
|
|
2398
|
+
})
|
|
2399
|
+
])
|
|
2391
2400
|
)
|
|
2392
2401
|
})
|
|
2393
2402
|
)
|
|
@@ -2402,7 +2411,11 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2402
2411
|
z13.object({
|
|
2403
2412
|
type: z13.literal("web_search_call"),
|
|
2404
2413
|
id: z13.string(),
|
|
2405
|
-
status: z13.string().optional()
|
|
2414
|
+
status: z13.string().optional(),
|
|
2415
|
+
action: z13.object({
|
|
2416
|
+
type: z13.literal("search"),
|
|
2417
|
+
query: z13.string().optional()
|
|
2418
|
+
}).nullish()
|
|
2406
2419
|
}),
|
|
2407
2420
|
z13.object({
|
|
2408
2421
|
type: z13.literal("computer_call"),
|
|
@@ -2493,13 +2506,24 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2493
2506
|
}
|
|
2494
2507
|
});
|
|
2495
2508
|
for (const annotation of contentPart.annotations) {
|
|
2496
|
-
|
|
2497
|
-
|
|
2498
|
-
|
|
2499
|
-
|
|
2500
|
-
|
|
2501
|
-
|
|
2502
|
-
|
|
2509
|
+
if (annotation.type === "url_citation") {
|
|
2510
|
+
content.push({
|
|
2511
|
+
type: "source",
|
|
2512
|
+
sourceType: "url",
|
|
2513
|
+
id: (_f = (_e = (_d = this.config).generateId) == null ? void 0 : _e.call(_d)) != null ? _f : generateId2(),
|
|
2514
|
+
url: annotation.url,
|
|
2515
|
+
title: annotation.title
|
|
2516
|
+
});
|
|
2517
|
+
} else if (annotation.type === "file_citation") {
|
|
2518
|
+
content.push({
|
|
2519
|
+
type: "source",
|
|
2520
|
+
sourceType: "document",
|
|
2521
|
+
id: (_i = (_h = (_g = this.config).generateId) == null ? void 0 : _h.call(_g)) != null ? _i : generateId2(),
|
|
2522
|
+
mediaType: "text/plain",
|
|
2523
|
+
title: annotation.quote,
|
|
2524
|
+
filename: annotation.file_id
|
|
2525
|
+
});
|
|
2526
|
+
}
|
|
2503
2527
|
}
|
|
2504
2528
|
}
|
|
2505
2529
|
break;
|
|
@@ -2523,14 +2547,17 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2523
2547
|
type: "tool-call",
|
|
2524
2548
|
toolCallId: part.id,
|
|
2525
2549
|
toolName: "web_search_preview",
|
|
2526
|
-
input: "",
|
|
2550
|
+
input: (_k = (_j = part.action) == null ? void 0 : _j.query) != null ? _k : "",
|
|
2527
2551
|
providerExecuted: true
|
|
2528
2552
|
});
|
|
2529
2553
|
content.push({
|
|
2530
2554
|
type: "tool-result",
|
|
2531
2555
|
toolCallId: part.id,
|
|
2532
2556
|
toolName: "web_search_preview",
|
|
2533
|
-
result: {
|
|
2557
|
+
result: {
|
|
2558
|
+
status: part.status || "completed",
|
|
2559
|
+
...((_l = part.action) == null ? void 0 : _l.query) && { query: part.action.query }
|
|
2560
|
+
},
|
|
2534
2561
|
providerExecuted: true
|
|
2535
2562
|
});
|
|
2536
2563
|
break;
|
|
@@ -2588,15 +2615,15 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2588
2615
|
return {
|
|
2589
2616
|
content,
|
|
2590
2617
|
finishReason: mapOpenAIResponseFinishReason({
|
|
2591
|
-
finishReason: (
|
|
2618
|
+
finishReason: (_m = response.incomplete_details) == null ? void 0 : _m.reason,
|
|
2592
2619
|
hasToolCalls: content.some((part) => part.type === "tool-call")
|
|
2593
2620
|
}),
|
|
2594
2621
|
usage: {
|
|
2595
2622
|
inputTokens: response.usage.input_tokens,
|
|
2596
2623
|
outputTokens: response.usage.output_tokens,
|
|
2597
2624
|
totalTokens: response.usage.input_tokens + response.usage.output_tokens,
|
|
2598
|
-
reasoningTokens: (
|
|
2599
|
-
cachedInputTokens: (
|
|
2625
|
+
reasoningTokens: (_o = (_n = response.usage.output_tokens_details) == null ? void 0 : _n.reasoning_tokens) != null ? _o : void 0,
|
|
2626
|
+
cachedInputTokens: (_q = (_p = response.usage.input_tokens_details) == null ? void 0 : _p.cached_tokens) != null ? _q : void 0
|
|
2600
2627
|
},
|
|
2601
2628
|
request: { body },
|
|
2602
2629
|
response: {
|
|
@@ -2648,7 +2675,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2648
2675
|
controller.enqueue({ type: "stream-start", warnings });
|
|
2649
2676
|
},
|
|
2650
2677
|
transform(chunk, controller) {
|
|
2651
|
-
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m;
|
|
2678
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s;
|
|
2652
2679
|
if (options.includeRawChunks) {
|
|
2653
2680
|
controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
|
|
2654
2681
|
}
|
|
@@ -2755,7 +2782,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2755
2782
|
type: "tool-call",
|
|
2756
2783
|
toolCallId: value.item.id,
|
|
2757
2784
|
toolName: "web_search_preview",
|
|
2758
|
-
input: "",
|
|
2785
|
+
input: (_c = (_b = value.item.action) == null ? void 0 : _b.query) != null ? _c : "",
|
|
2759
2786
|
providerExecuted: true
|
|
2760
2787
|
});
|
|
2761
2788
|
controller.enqueue({
|
|
@@ -2764,7 +2791,10 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2764
2791
|
toolName: "web_search_preview",
|
|
2765
2792
|
result: {
|
|
2766
2793
|
type: "web_search_tool_result",
|
|
2767
|
-
status: value.item.status || "completed"
|
|
2794
|
+
status: value.item.status || "completed",
|
|
2795
|
+
...((_d = value.item.action) == null ? void 0 : _d.query) && {
|
|
2796
|
+
query: value.item.action.query
|
|
2797
|
+
}
|
|
2768
2798
|
},
|
|
2769
2799
|
providerExecuted: true
|
|
2770
2800
|
});
|
|
@@ -2832,7 +2862,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2832
2862
|
providerMetadata: {
|
|
2833
2863
|
openai: {
|
|
2834
2864
|
itemId: value.item.id,
|
|
2835
|
-
reasoningEncryptedContent: (
|
|
2865
|
+
reasoningEncryptedContent: (_e = value.item.encrypted_content) != null ? _e : null
|
|
2836
2866
|
}
|
|
2837
2867
|
}
|
|
2838
2868
|
});
|
|
@@ -2867,7 +2897,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2867
2897
|
}
|
|
2868
2898
|
} else if (isResponseReasoningSummaryPartAddedChunk(value)) {
|
|
2869
2899
|
if (value.summary_index > 0) {
|
|
2870
|
-
(
|
|
2900
|
+
(_f = activeReasoning[value.item_id]) == null ? void 0 : _f.summaryParts.push(
|
|
2871
2901
|
value.summary_index
|
|
2872
2902
|
);
|
|
2873
2903
|
controller.enqueue({
|
|
@@ -2876,7 +2906,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2876
2906
|
providerMetadata: {
|
|
2877
2907
|
openai: {
|
|
2878
2908
|
itemId: value.item_id,
|
|
2879
|
-
reasoningEncryptedContent: (
|
|
2909
|
+
reasoningEncryptedContent: (_h = (_g = activeReasoning[value.item_id]) == null ? void 0 : _g.encryptedContent) != null ? _h : null
|
|
2880
2910
|
}
|
|
2881
2911
|
}
|
|
2882
2912
|
});
|
|
@@ -2894,22 +2924,33 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2894
2924
|
});
|
|
2895
2925
|
} else if (isResponseFinishedChunk(value)) {
|
|
2896
2926
|
finishReason = mapOpenAIResponseFinishReason({
|
|
2897
|
-
finishReason: (
|
|
2927
|
+
finishReason: (_i = value.response.incomplete_details) == null ? void 0 : _i.reason,
|
|
2898
2928
|
hasToolCalls
|
|
2899
2929
|
});
|
|
2900
2930
|
usage.inputTokens = value.response.usage.input_tokens;
|
|
2901
2931
|
usage.outputTokens = value.response.usage.output_tokens;
|
|
2902
2932
|
usage.totalTokens = value.response.usage.input_tokens + value.response.usage.output_tokens;
|
|
2903
|
-
usage.reasoningTokens = (
|
|
2904
|
-
usage.cachedInputTokens = (
|
|
2933
|
+
usage.reasoningTokens = (_k = (_j = value.response.usage.output_tokens_details) == null ? void 0 : _j.reasoning_tokens) != null ? _k : void 0;
|
|
2934
|
+
usage.cachedInputTokens = (_m = (_l = value.response.usage.input_tokens_details) == null ? void 0 : _l.cached_tokens) != null ? _m : void 0;
|
|
2905
2935
|
} else if (isResponseAnnotationAddedChunk(value)) {
|
|
2906
|
-
|
|
2907
|
-
|
|
2908
|
-
|
|
2909
|
-
|
|
2910
|
-
|
|
2911
|
-
|
|
2912
|
-
|
|
2936
|
+
if (value.annotation.type === "url_citation") {
|
|
2937
|
+
controller.enqueue({
|
|
2938
|
+
type: "source",
|
|
2939
|
+
sourceType: "url",
|
|
2940
|
+
id: (_p = (_o = (_n = self.config).generateId) == null ? void 0 : _o.call(_n)) != null ? _p : generateId2(),
|
|
2941
|
+
url: value.annotation.url,
|
|
2942
|
+
title: value.annotation.title
|
|
2943
|
+
});
|
|
2944
|
+
} else if (value.annotation.type === "file_citation") {
|
|
2945
|
+
controller.enqueue({
|
|
2946
|
+
type: "source",
|
|
2947
|
+
sourceType: "document",
|
|
2948
|
+
id: (_s = (_r = (_q = self.config).generateId) == null ? void 0 : _r.call(_q)) != null ? _s : generateId2(),
|
|
2949
|
+
mediaType: "text/plain",
|
|
2950
|
+
title: value.annotation.quote,
|
|
2951
|
+
filename: value.annotation.file_id
|
|
2952
|
+
});
|
|
2953
|
+
}
|
|
2913
2954
|
} else if (isErrorChunk(value)) {
|
|
2914
2955
|
controller.enqueue({ type: "error", error: value });
|
|
2915
2956
|
}
|
|
@@ -2994,7 +3035,11 @@ var responseOutputItemAddedSchema = z13.object({
|
|
|
2994
3035
|
z13.object({
|
|
2995
3036
|
type: z13.literal("web_search_call"),
|
|
2996
3037
|
id: z13.string(),
|
|
2997
|
-
status: z13.string()
|
|
3038
|
+
status: z13.string(),
|
|
3039
|
+
action: z13.object({
|
|
3040
|
+
type: z13.literal("search"),
|
|
3041
|
+
query: z13.string().optional()
|
|
3042
|
+
}).nullish()
|
|
2998
3043
|
}),
|
|
2999
3044
|
z13.object({
|
|
3000
3045
|
type: z13.literal("computer_call"),
|
|
@@ -3043,7 +3088,11 @@ var responseOutputItemDoneSchema = z13.object({
|
|
|
3043
3088
|
z13.object({
|
|
3044
3089
|
type: z13.literal("web_search_call"),
|
|
3045
3090
|
id: z13.string(),
|
|
3046
|
-
status: z13.literal("completed")
|
|
3091
|
+
status: z13.literal("completed"),
|
|
3092
|
+
action: z13.object({
|
|
3093
|
+
type: z13.literal("search"),
|
|
3094
|
+
query: z13.string().optional()
|
|
3095
|
+
}).nullish()
|
|
3047
3096
|
}),
|
|
3048
3097
|
z13.object({
|
|
3049
3098
|
type: z13.literal("computer_call"),
|
|
@@ -3076,11 +3125,18 @@ var responseFunctionCallArgumentsDeltaSchema = z13.object({
|
|
|
3076
3125
|
});
|
|
3077
3126
|
var responseAnnotationAddedSchema = z13.object({
|
|
3078
3127
|
type: z13.literal("response.output_text.annotation.added"),
|
|
3079
|
-
annotation: z13.
|
|
3080
|
-
|
|
3081
|
-
|
|
3082
|
-
|
|
3083
|
-
|
|
3128
|
+
annotation: z13.discriminatedUnion("type", [
|
|
3129
|
+
z13.object({
|
|
3130
|
+
type: z13.literal("url_citation"),
|
|
3131
|
+
url: z13.string(),
|
|
3132
|
+
title: z13.string()
|
|
3133
|
+
}),
|
|
3134
|
+
z13.object({
|
|
3135
|
+
type: z13.literal("file_citation"),
|
|
3136
|
+
file_id: z13.string(),
|
|
3137
|
+
quote: z13.string()
|
|
3138
|
+
})
|
|
3139
|
+
])
|
|
3084
3140
|
});
|
|
3085
3141
|
var responseReasoningSummaryPartAddedSchema = z13.object({
|
|
3086
3142
|
type: z13.literal("response.reasoning_summary_part.added"),
|
|
@@ -3144,39 +3200,39 @@ function isErrorChunk(chunk) {
|
|
|
3144
3200
|
return chunk.type === "error";
|
|
3145
3201
|
}
|
|
3146
3202
|
function getResponsesModelConfig(modelId) {
|
|
3203
|
+
const supportsFlexProcessing2 = modelId.startsWith("o3") || modelId.startsWith("o4-mini") || modelId.startsWith("gpt-5") && !modelId.startsWith("gpt-5-chat");
|
|
3204
|
+
const supportsPriorityProcessing2 = modelId.startsWith("gpt-4") || modelId.startsWith("gpt-5-mini") || modelId.startsWith("gpt-5") && !modelId.startsWith("gpt-5-nano") && !modelId.startsWith("gpt-5-chat") || modelId.startsWith("o3") || modelId.startsWith("o4-mini");
|
|
3205
|
+
const defaults = {
|
|
3206
|
+
requiredAutoTruncation: false,
|
|
3207
|
+
systemMessageMode: "system",
|
|
3208
|
+
supportsFlexProcessing: supportsFlexProcessing2,
|
|
3209
|
+
supportsPriorityProcessing: supportsPriorityProcessing2
|
|
3210
|
+
};
|
|
3147
3211
|
if (modelId.startsWith("gpt-5-chat")) {
|
|
3148
3212
|
return {
|
|
3149
|
-
|
|
3150
|
-
|
|
3151
|
-
requiredAutoTruncation: false
|
|
3213
|
+
...defaults,
|
|
3214
|
+
isReasoningModel: false
|
|
3152
3215
|
};
|
|
3153
3216
|
}
|
|
3154
3217
|
if (modelId.startsWith("o") || modelId.startsWith("gpt-5") || modelId.startsWith("codex-") || modelId.startsWith("computer-use")) {
|
|
3155
3218
|
if (modelId.startsWith("o1-mini") || modelId.startsWith("o1-preview")) {
|
|
3156
3219
|
return {
|
|
3220
|
+
...defaults,
|
|
3157
3221
|
isReasoningModel: true,
|
|
3158
|
-
systemMessageMode: "remove"
|
|
3159
|
-
requiredAutoTruncation: false
|
|
3222
|
+
systemMessageMode: "remove"
|
|
3160
3223
|
};
|
|
3161
3224
|
}
|
|
3162
3225
|
return {
|
|
3226
|
+
...defaults,
|
|
3163
3227
|
isReasoningModel: true,
|
|
3164
|
-
systemMessageMode: "developer"
|
|
3165
|
-
requiredAutoTruncation: false
|
|
3228
|
+
systemMessageMode: "developer"
|
|
3166
3229
|
};
|
|
3167
3230
|
}
|
|
3168
3231
|
return {
|
|
3169
|
-
|
|
3170
|
-
|
|
3171
|
-
requiredAutoTruncation: false
|
|
3232
|
+
...defaults,
|
|
3233
|
+
isReasoningModel: false
|
|
3172
3234
|
};
|
|
3173
3235
|
}
|
|
3174
|
-
function supportsFlexProcessing2(modelId) {
|
|
3175
|
-
return modelId.startsWith("o3") || modelId.startsWith("o4-mini") || modelId.startsWith("gpt-5") && !modelId.startsWith("gpt-5-chat");
|
|
3176
|
-
}
|
|
3177
|
-
function supportsPriorityProcessing2(modelId) {
|
|
3178
|
-
return modelId.startsWith("gpt-4") || modelId.startsWith("gpt-5-mini") || modelId.startsWith("gpt-5") && !modelId.startsWith("gpt-5-nano") && !modelId.startsWith("gpt-5-chat") || modelId.startsWith("o3") || modelId.startsWith("o4-mini");
|
|
3179
|
-
}
|
|
3180
3236
|
var openaiResponsesProviderOptionsSchema = z13.object({
|
|
3181
3237
|
metadata: z13.any().nullish(),
|
|
3182
3238
|
parallelToolCalls: z13.boolean().nullish(),
|