@ai-sdk/openai 2.0.16 → 2.0.18
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +18 -0
- package/dist/index.js +62 -49
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +62 -49
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.js +62 -49
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +62 -49
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +1 -1
package/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,23 @@
|
|
|
1
1
|
# @ai-sdk/openai
|
|
2
2
|
|
|
3
|
+
## 2.0.18
|
|
4
|
+
|
|
5
|
+
### Patch Changes
|
|
6
|
+
|
|
7
|
+
- 5e47d00: Support Responses API input_file file_url passthrough for PDFs.
|
|
8
|
+
|
|
9
|
+
This adds:
|
|
10
|
+
|
|
11
|
+
- file_url variant to OpenAIResponses user content
|
|
12
|
+
- PDF URL mapping to input_file with file_url in Responses converter
|
|
13
|
+
- PDF URL support in supportedUrls to avoid auto-download
|
|
14
|
+
|
|
15
|
+
## 2.0.17
|
|
16
|
+
|
|
17
|
+
### Patch Changes
|
|
18
|
+
|
|
19
|
+
- 70bb696: fix(provider/openai): correct web search tool input
|
|
20
|
+
|
|
3
21
|
## 2.0.16
|
|
4
22
|
|
|
5
23
|
### Patch Changes
|
package/dist/index.js
CHANGED
|
@@ -436,7 +436,23 @@ var webSearchPreviewArgsSchema = import_v44.z.object({
|
|
|
436
436
|
var webSearchPreview = (0, import_provider_utils4.createProviderDefinedToolFactory)({
|
|
437
437
|
id: "openai.web_search_preview",
|
|
438
438
|
name: "web_search_preview",
|
|
439
|
-
inputSchema: import_v44.z.object({
|
|
439
|
+
inputSchema: import_v44.z.object({
|
|
440
|
+
action: import_v44.z.discriminatedUnion("type", [
|
|
441
|
+
import_v44.z.object({
|
|
442
|
+
type: import_v44.z.literal("search"),
|
|
443
|
+
query: import_v44.z.string()
|
|
444
|
+
}),
|
|
445
|
+
import_v44.z.object({
|
|
446
|
+
type: import_v44.z.literal("open_page"),
|
|
447
|
+
url: import_v44.z.string()
|
|
448
|
+
}),
|
|
449
|
+
import_v44.z.object({
|
|
450
|
+
type: import_v44.z.literal("find"),
|
|
451
|
+
url: import_v44.z.string(),
|
|
452
|
+
pattern: import_v44.z.string()
|
|
453
|
+
})
|
|
454
|
+
]).nullish()
|
|
455
|
+
})
|
|
440
456
|
});
|
|
441
457
|
|
|
442
458
|
// src/chat/openai-chat-prepare-tools.ts
|
|
@@ -1887,9 +1903,10 @@ async function convertToOpenAIResponsesMessages({
|
|
|
1887
1903
|
};
|
|
1888
1904
|
} else if (part.mediaType === "application/pdf") {
|
|
1889
1905
|
if (part.data instanceof URL) {
|
|
1890
|
-
|
|
1891
|
-
|
|
1892
|
-
|
|
1906
|
+
return {
|
|
1907
|
+
type: "input_file",
|
|
1908
|
+
file_url: part.data.toString()
|
|
1909
|
+
};
|
|
1893
1910
|
}
|
|
1894
1911
|
return {
|
|
1895
1912
|
type: "input_file",
|
|
@@ -2127,6 +2144,26 @@ function prepareResponsesTools({
|
|
|
2127
2144
|
}
|
|
2128
2145
|
|
|
2129
2146
|
// src/responses/openai-responses-language-model.ts
|
|
2147
|
+
var webSearchCallItem = import_v413.z.object({
|
|
2148
|
+
type: import_v413.z.literal("web_search_call"),
|
|
2149
|
+
id: import_v413.z.string(),
|
|
2150
|
+
status: import_v413.z.string(),
|
|
2151
|
+
action: import_v413.z.discriminatedUnion("type", [
|
|
2152
|
+
import_v413.z.object({
|
|
2153
|
+
type: import_v413.z.literal("search"),
|
|
2154
|
+
query: import_v413.z.string()
|
|
2155
|
+
}),
|
|
2156
|
+
import_v413.z.object({
|
|
2157
|
+
type: import_v413.z.literal("open_page"),
|
|
2158
|
+
url: import_v413.z.string()
|
|
2159
|
+
}),
|
|
2160
|
+
import_v413.z.object({
|
|
2161
|
+
type: import_v413.z.literal("find"),
|
|
2162
|
+
url: import_v413.z.string(),
|
|
2163
|
+
pattern: import_v413.z.string()
|
|
2164
|
+
})
|
|
2165
|
+
]).nullish()
|
|
2166
|
+
});
|
|
2130
2167
|
var TOP_LOGPROBS_MAX = 20;
|
|
2131
2168
|
var LOGPROBS_SCHEMA = import_v413.z.array(
|
|
2132
2169
|
import_v413.z.object({
|
|
@@ -2144,7 +2181,8 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2144
2181
|
constructor(modelId, config) {
|
|
2145
2182
|
this.specificationVersion = "v2";
|
|
2146
2183
|
this.supportedUrls = {
|
|
2147
|
-
"image/*": [/^https?:\/\/.*$/]
|
|
2184
|
+
"image/*": [/^https?:\/\/.*$/],
|
|
2185
|
+
"application/pdf": [/^https?:\/\/.*$/]
|
|
2148
2186
|
};
|
|
2149
2187
|
this.modelId = modelId;
|
|
2150
2188
|
this.config = config;
|
|
@@ -2322,7 +2360,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2322
2360
|
};
|
|
2323
2361
|
}
|
|
2324
2362
|
async doGenerate(options) {
|
|
2325
|
-
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n
|
|
2363
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n;
|
|
2326
2364
|
const { args: body, warnings } = await this.getArgs(options);
|
|
2327
2365
|
const url = this.config.url({
|
|
2328
2366
|
path: "/responses",
|
|
@@ -2385,15 +2423,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2385
2423
|
arguments: import_v413.z.string(),
|
|
2386
2424
|
id: import_v413.z.string()
|
|
2387
2425
|
}),
|
|
2388
|
-
|
|
2389
|
-
type: import_v413.z.literal("web_search_call"),
|
|
2390
|
-
id: import_v413.z.string(),
|
|
2391
|
-
status: import_v413.z.string().optional(),
|
|
2392
|
-
action: import_v413.z.object({
|
|
2393
|
-
type: import_v413.z.literal("search"),
|
|
2394
|
-
query: import_v413.z.string().optional()
|
|
2395
|
-
}).nullish()
|
|
2396
|
-
}),
|
|
2426
|
+
webSearchCallItem,
|
|
2397
2427
|
import_v413.z.object({
|
|
2398
2428
|
type: import_v413.z.literal("computer_call"),
|
|
2399
2429
|
id: import_v413.z.string(),
|
|
@@ -2524,17 +2554,14 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2524
2554
|
type: "tool-call",
|
|
2525
2555
|
toolCallId: part.id,
|
|
2526
2556
|
toolName: "web_search_preview",
|
|
2527
|
-
input: (
|
|
2557
|
+
input: JSON.stringify({ action: part.action }),
|
|
2528
2558
|
providerExecuted: true
|
|
2529
2559
|
});
|
|
2530
2560
|
content.push({
|
|
2531
2561
|
type: "tool-result",
|
|
2532
2562
|
toolCallId: part.id,
|
|
2533
2563
|
toolName: "web_search_preview",
|
|
2534
|
-
result: {
|
|
2535
|
-
status: part.status || "completed",
|
|
2536
|
-
...((_l = part.action) == null ? void 0 : _l.query) && { query: part.action.query }
|
|
2537
|
-
},
|
|
2564
|
+
result: { status: part.status },
|
|
2538
2565
|
providerExecuted: true
|
|
2539
2566
|
});
|
|
2540
2567
|
break;
|
|
@@ -2592,15 +2619,15 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2592
2619
|
return {
|
|
2593
2620
|
content,
|
|
2594
2621
|
finishReason: mapOpenAIResponseFinishReason({
|
|
2595
|
-
finishReason: (
|
|
2622
|
+
finishReason: (_j = response.incomplete_details) == null ? void 0 : _j.reason,
|
|
2596
2623
|
hasToolCalls: content.some((part) => part.type === "tool-call")
|
|
2597
2624
|
}),
|
|
2598
2625
|
usage: {
|
|
2599
2626
|
inputTokens: response.usage.input_tokens,
|
|
2600
2627
|
outputTokens: response.usage.output_tokens,
|
|
2601
2628
|
totalTokens: response.usage.input_tokens + response.usage.output_tokens,
|
|
2602
|
-
reasoningTokens: (
|
|
2603
|
-
cachedInputTokens: (
|
|
2629
|
+
reasoningTokens: (_l = (_k = response.usage.output_tokens_details) == null ? void 0 : _k.reasoning_tokens) != null ? _l : void 0,
|
|
2630
|
+
cachedInputTokens: (_n = (_m = response.usage.input_tokens_details) == null ? void 0 : _m.cached_tokens) != null ? _n : void 0
|
|
2604
2631
|
},
|
|
2605
2632
|
request: { body },
|
|
2606
2633
|
response: {
|
|
@@ -2652,7 +2679,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2652
2679
|
controller.enqueue({ type: "stream-start", warnings });
|
|
2653
2680
|
},
|
|
2654
2681
|
transform(chunk, controller) {
|
|
2655
|
-
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p
|
|
2682
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p;
|
|
2656
2683
|
if (options.includeRawChunks) {
|
|
2657
2684
|
controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
|
|
2658
2685
|
}
|
|
@@ -2759,20 +2786,14 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2759
2786
|
type: "tool-call",
|
|
2760
2787
|
toolCallId: value.item.id,
|
|
2761
2788
|
toolName: "web_search_preview",
|
|
2762
|
-
input: (
|
|
2789
|
+
input: JSON.stringify({ action: value.item.action }),
|
|
2763
2790
|
providerExecuted: true
|
|
2764
2791
|
});
|
|
2765
2792
|
controller.enqueue({
|
|
2766
2793
|
type: "tool-result",
|
|
2767
2794
|
toolCallId: value.item.id,
|
|
2768
2795
|
toolName: "web_search_preview",
|
|
2769
|
-
result: {
|
|
2770
|
-
type: "web_search_tool_result",
|
|
2771
|
-
status: value.item.status || "completed",
|
|
2772
|
-
...((_d = value.item.action) == null ? void 0 : _d.query) && {
|
|
2773
|
-
query: value.item.action.query
|
|
2774
|
-
}
|
|
2775
|
-
},
|
|
2796
|
+
result: { status: value.item.status },
|
|
2776
2797
|
providerExecuted: true
|
|
2777
2798
|
});
|
|
2778
2799
|
} else if (value.item.type === "computer_call") {
|
|
@@ -2839,7 +2860,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2839
2860
|
providerMetadata: {
|
|
2840
2861
|
openai: {
|
|
2841
2862
|
itemId: value.item.id,
|
|
2842
|
-
reasoningEncryptedContent: (
|
|
2863
|
+
reasoningEncryptedContent: (_b = value.item.encrypted_content) != null ? _b : null
|
|
2843
2864
|
}
|
|
2844
2865
|
}
|
|
2845
2866
|
});
|
|
@@ -2874,7 +2895,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2874
2895
|
}
|
|
2875
2896
|
} else if (isResponseReasoningSummaryPartAddedChunk(value)) {
|
|
2876
2897
|
if (value.summary_index > 0) {
|
|
2877
|
-
(
|
|
2898
|
+
(_c = activeReasoning[value.item_id]) == null ? void 0 : _c.summaryParts.push(
|
|
2878
2899
|
value.summary_index
|
|
2879
2900
|
);
|
|
2880
2901
|
controller.enqueue({
|
|
@@ -2883,7 +2904,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2883
2904
|
providerMetadata: {
|
|
2884
2905
|
openai: {
|
|
2885
2906
|
itemId: value.item_id,
|
|
2886
|
-
reasoningEncryptedContent: (
|
|
2907
|
+
reasoningEncryptedContent: (_e = (_d = activeReasoning[value.item_id]) == null ? void 0 : _d.encryptedContent) != null ? _e : null
|
|
2887
2908
|
}
|
|
2888
2909
|
}
|
|
2889
2910
|
});
|
|
@@ -2901,20 +2922,20 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2901
2922
|
});
|
|
2902
2923
|
} else if (isResponseFinishedChunk(value)) {
|
|
2903
2924
|
finishReason = mapOpenAIResponseFinishReason({
|
|
2904
|
-
finishReason: (
|
|
2925
|
+
finishReason: (_f = value.response.incomplete_details) == null ? void 0 : _f.reason,
|
|
2905
2926
|
hasToolCalls
|
|
2906
2927
|
});
|
|
2907
2928
|
usage.inputTokens = value.response.usage.input_tokens;
|
|
2908
2929
|
usage.outputTokens = value.response.usage.output_tokens;
|
|
2909
2930
|
usage.totalTokens = value.response.usage.input_tokens + value.response.usage.output_tokens;
|
|
2910
|
-
usage.reasoningTokens = (
|
|
2911
|
-
usage.cachedInputTokens = (
|
|
2931
|
+
usage.reasoningTokens = (_h = (_g = value.response.usage.output_tokens_details) == null ? void 0 : _g.reasoning_tokens) != null ? _h : void 0;
|
|
2932
|
+
usage.cachedInputTokens = (_j = (_i = value.response.usage.input_tokens_details) == null ? void 0 : _i.cached_tokens) != null ? _j : void 0;
|
|
2912
2933
|
} else if (isResponseAnnotationAddedChunk(value)) {
|
|
2913
2934
|
if (value.annotation.type === "url_citation") {
|
|
2914
2935
|
controller.enqueue({
|
|
2915
2936
|
type: "source",
|
|
2916
2937
|
sourceType: "url",
|
|
2917
|
-
id: (
|
|
2938
|
+
id: (_m = (_l = (_k = self.config).generateId) == null ? void 0 : _l.call(_k)) != null ? _m : (0, import_provider_utils12.generateId)(),
|
|
2918
2939
|
url: value.annotation.url,
|
|
2919
2940
|
title: value.annotation.title
|
|
2920
2941
|
});
|
|
@@ -2922,7 +2943,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2922
2943
|
controller.enqueue({
|
|
2923
2944
|
type: "source",
|
|
2924
2945
|
sourceType: "document",
|
|
2925
|
-
id: (
|
|
2946
|
+
id: (_p = (_o = (_n = self.config).generateId) == null ? void 0 : _o.call(_n)) != null ? _p : (0, import_provider_utils12.generateId)(),
|
|
2926
2947
|
mediaType: "text/plain",
|
|
2927
2948
|
title: value.annotation.quote,
|
|
2928
2949
|
filename: value.annotation.file_id
|
|
@@ -3062,15 +3083,7 @@ var responseOutputItemDoneSchema = import_v413.z.object({
|
|
|
3062
3083
|
arguments: import_v413.z.string(),
|
|
3063
3084
|
status: import_v413.z.literal("completed")
|
|
3064
3085
|
}),
|
|
3065
|
-
|
|
3066
|
-
type: import_v413.z.literal("web_search_call"),
|
|
3067
|
-
id: import_v413.z.string(),
|
|
3068
|
-
status: import_v413.z.literal("completed"),
|
|
3069
|
-
action: import_v413.z.object({
|
|
3070
|
-
type: import_v413.z.literal("search"),
|
|
3071
|
-
query: import_v413.z.string().optional()
|
|
3072
|
-
}).nullish()
|
|
3073
|
-
}),
|
|
3086
|
+
webSearchCallItem,
|
|
3074
3087
|
import_v413.z.object({
|
|
3075
3088
|
type: import_v413.z.literal("computer_call"),
|
|
3076
3089
|
id: import_v413.z.string(),
|