@ai-sdk/openai 2.0.50 → 2.0.52

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,17 @@
1
1
  # @ai-sdk/openai
2
2
 
3
+ ## 2.0.52
4
+
5
+ ### Patch Changes
6
+
7
+ - 8de8de5: fix(provider/openai): end reasoning parts earlier
8
+
9
+ ## 2.0.51
10
+
11
+ ### Patch Changes
12
+
13
+ - cad5c1d: fix(provider/openai): fix web search tool input types
14
+
3
15
  ## 2.0.50
4
16
 
5
17
  ### Patch Changes
package/dist/index.d.mts CHANGED
@@ -28,7 +28,44 @@ type OpenAIEmbeddingModelId = 'text-embedding-3-small' | 'text-embedding-3-large
28
28
 
29
29
  type OpenAIImageModelId = 'dall-e-3' | 'dall-e-2' | 'gpt-image-1' | 'gpt-image-1-mini' | (string & {});
30
30
 
31
- declare const webSearchToolFactory: _ai_sdk_provider_utils.ProviderDefinedToolFactory<{}, {
31
+ declare const webSearchToolFactory: _ai_sdk_provider_utils.ProviderDefinedToolFactoryWithOutputSchema<{}, {
32
+ /**
33
+ * An object describing the specific action taken in this web search call.
34
+ * Includes details on how the model used the web (search, open_page, find).
35
+ */
36
+ action: {
37
+ /**
38
+ * Action type "search" - Performs a web search query.
39
+ */
40
+ type: "search";
41
+ /**
42
+ * The search query.
43
+ */
44
+ query?: string;
45
+ } | {
46
+ /**
47
+ * Action type "openPage" - Opens a specific URL from search results.
48
+ */
49
+ type: "openPage";
50
+ /**
51
+ * The URL opened by the model.
52
+ */
53
+ url: string;
54
+ } | {
55
+ /**
56
+ * Action type "find": Searches for a pattern within a loaded page.
57
+ */
58
+ type: "find";
59
+ /**
60
+ * The URL of the page searched for the pattern.
61
+ */
62
+ url: string;
63
+ /**
64
+ * The pattern or text to search for within the page.
65
+ */
66
+ pattern: string;
67
+ };
68
+ }, {
32
69
  /**
33
70
  * Filters for the search.
34
71
  */
@@ -221,7 +258,19 @@ declare const openaiTools: {
221
258
  *
222
259
  * @deprecated Use `webSearch` instead.
223
260
  */
224
- webSearchPreview: _ai_sdk_provider_utils.ProviderDefinedToolFactory<{}, {
261
+ webSearchPreview: _ai_sdk_provider_utils.ProviderDefinedToolFactoryWithOutputSchema<{}, {
262
+ action: {
263
+ type: "search";
264
+ query?: string;
265
+ } | {
266
+ type: "openPage";
267
+ url: string;
268
+ } | {
269
+ type: "find";
270
+ url: string;
271
+ pattern: string;
272
+ };
273
+ }, {
225
274
  searchContextSize?: "low" | "medium" | "high";
226
275
  userLocation?: {
227
276
  type: "approximate";
@@ -241,7 +290,19 @@ declare const openaiTools: {
241
290
  * @param searchContextSize - The search context size to use for the web search.
242
291
  * @param userLocation - The user location to use for the web search.
243
292
  */
244
- webSearch: (args?: Parameters<typeof webSearchToolFactory>[0]) => _ai_sdk_provider_utils.Tool<{}, unknown>;
293
+ webSearch: (args?: Parameters<typeof webSearchToolFactory>[0]) => _ai_sdk_provider_utils.Tool<{}, {
294
+ action: {
295
+ type: "search";
296
+ query?: string;
297
+ } | {
298
+ type: "openPage";
299
+ url: string;
300
+ } | {
301
+ type: "find";
302
+ url: string;
303
+ pattern: string;
304
+ };
305
+ }>;
245
306
  };
246
307
 
247
308
  type OpenAIResponsesModelId = 'chatgpt-4o-latest' | 'gpt-3.5-turbo-0125' | 'gpt-3.5-turbo-1106' | 'gpt-3.5-turbo' | 'gpt-4-0613' | 'gpt-4-turbo-2024-04-09' | 'gpt-4-turbo' | 'gpt-4.1-2025-04-14' | 'gpt-4.1-mini-2025-04-14' | 'gpt-4.1-mini' | 'gpt-4.1-nano-2025-04-14' | 'gpt-4.1-nano' | 'gpt-4.1' | 'gpt-4' | 'gpt-4o-2024-05-13' | 'gpt-4o-2024-08-06' | 'gpt-4o-2024-11-20' | 'gpt-4o-mini-2024-07-18' | 'gpt-4o-mini' | 'gpt-4o' | 'gpt-5-2025-08-07' | 'gpt-5-chat-latest' | 'gpt-5-codex' | 'gpt-5-mini-2025-08-07' | 'gpt-5-mini' | 'gpt-5-nano-2025-08-07' | 'gpt-5-nano' | 'gpt-5-pro-2025-10-06' | 'gpt-5-pro' | 'gpt-5' | 'o1-2024-12-17' | 'o1' | 'o3-2025-04-16' | 'o3-mini-2025-01-31' | 'o3-mini' | 'o3' | (string & {});
package/dist/index.d.ts CHANGED
@@ -28,7 +28,44 @@ type OpenAIEmbeddingModelId = 'text-embedding-3-small' | 'text-embedding-3-large
28
28
 
29
29
  type OpenAIImageModelId = 'dall-e-3' | 'dall-e-2' | 'gpt-image-1' | 'gpt-image-1-mini' | (string & {});
30
30
 
31
- declare const webSearchToolFactory: _ai_sdk_provider_utils.ProviderDefinedToolFactory<{}, {
31
+ declare const webSearchToolFactory: _ai_sdk_provider_utils.ProviderDefinedToolFactoryWithOutputSchema<{}, {
32
+ /**
33
+ * An object describing the specific action taken in this web search call.
34
+ * Includes details on how the model used the web (search, open_page, find).
35
+ */
36
+ action: {
37
+ /**
38
+ * Action type "search" - Performs a web search query.
39
+ */
40
+ type: "search";
41
+ /**
42
+ * The search query.
43
+ */
44
+ query?: string;
45
+ } | {
46
+ /**
47
+ * Action type "openPage" - Opens a specific URL from search results.
48
+ */
49
+ type: "openPage";
50
+ /**
51
+ * The URL opened by the model.
52
+ */
53
+ url: string;
54
+ } | {
55
+ /**
56
+ * Action type "find": Searches for a pattern within a loaded page.
57
+ */
58
+ type: "find";
59
+ /**
60
+ * The URL of the page searched for the pattern.
61
+ */
62
+ url: string;
63
+ /**
64
+ * The pattern or text to search for within the page.
65
+ */
66
+ pattern: string;
67
+ };
68
+ }, {
32
69
  /**
33
70
  * Filters for the search.
34
71
  */
@@ -221,7 +258,19 @@ declare const openaiTools: {
221
258
  *
222
259
  * @deprecated Use `webSearch` instead.
223
260
  */
224
- webSearchPreview: _ai_sdk_provider_utils.ProviderDefinedToolFactory<{}, {
261
+ webSearchPreview: _ai_sdk_provider_utils.ProviderDefinedToolFactoryWithOutputSchema<{}, {
262
+ action: {
263
+ type: "search";
264
+ query?: string;
265
+ } | {
266
+ type: "openPage";
267
+ url: string;
268
+ } | {
269
+ type: "find";
270
+ url: string;
271
+ pattern: string;
272
+ };
273
+ }, {
225
274
  searchContextSize?: "low" | "medium" | "high";
226
275
  userLocation?: {
227
276
  type: "approximate";
@@ -241,7 +290,19 @@ declare const openaiTools: {
241
290
  * @param searchContextSize - The search context size to use for the web search.
242
291
  * @param userLocation - The user location to use for the web search.
243
292
  */
244
- webSearch: (args?: Parameters<typeof webSearchToolFactory>[0]) => _ai_sdk_provider_utils.Tool<{}, unknown>;
293
+ webSearch: (args?: Parameters<typeof webSearchToolFactory>[0]) => _ai_sdk_provider_utils.Tool<{}, {
294
+ action: {
295
+ type: "search";
296
+ query?: string;
297
+ } | {
298
+ type: "openPage";
299
+ url: string;
300
+ } | {
301
+ type: "find";
302
+ url: string;
303
+ pattern: string;
304
+ };
305
+ }>;
245
306
  };
246
307
 
247
308
  type OpenAIResponsesModelId = 'chatgpt-4o-latest' | 'gpt-3.5-turbo-0125' | 'gpt-3.5-turbo-1106' | 'gpt-3.5-turbo' | 'gpt-4-0613' | 'gpt-4-turbo-2024-04-09' | 'gpt-4-turbo' | 'gpt-4.1-2025-04-14' | 'gpt-4.1-mini-2025-04-14' | 'gpt-4.1-mini' | 'gpt-4.1-nano-2025-04-14' | 'gpt-4.1-nano' | 'gpt-4.1' | 'gpt-4' | 'gpt-4o-2024-05-13' | 'gpt-4o-2024-08-06' | 'gpt-4o-2024-11-20' | 'gpt-4o-mini-2024-07-18' | 'gpt-4o-mini' | 'gpt-4o' | 'gpt-5-2025-08-07' | 'gpt-5-chat-latest' | 'gpt-5-codex' | 'gpt-5-mini-2025-08-07' | 'gpt-5-mini' | 'gpt-5-nano-2025-08-07' | 'gpt-5-nano' | 'gpt-5-pro-2025-10-06' | 'gpt-5-pro' | 'gpt-5' | 'o1-2024-12-17' | 'o1' | 'o3-2025-04-16' | 'o3-mini-2025-01-31' | 'o3-mini' | 'o3' | (string & {});
package/dist/index.js CHANGED
@@ -1916,9 +1916,7 @@ var import_v413 = require("zod/v4");
1916
1916
  var webSearchArgsSchema = (0, import_provider_utils18.lazySchema)(
1917
1917
  () => (0, import_provider_utils18.zodSchema)(
1918
1918
  import_v413.z.object({
1919
- filters: import_v413.z.object({
1920
- allowedDomains: import_v413.z.array(import_v413.z.string()).optional()
1921
- }).optional(),
1919
+ filters: import_v413.z.object({ allowedDomains: import_v413.z.array(import_v413.z.string()).optional() }).optional(),
1922
1920
  searchContextSize: import_v413.z.enum(["low", "medium", "high"]).optional(),
1923
1921
  userLocation: import_v413.z.object({
1924
1922
  type: import_v413.z.literal("approximate"),
@@ -1930,16 +1928,17 @@ var webSearchArgsSchema = (0, import_provider_utils18.lazySchema)(
1930
1928
  })
1931
1929
  )
1932
1930
  );
1933
- var webSearchInputSchema = (0, import_provider_utils18.lazySchema)(
1931
+ var webSearchInputSchema = (0, import_provider_utils18.lazySchema)(() => (0, import_provider_utils18.zodSchema)(import_v413.z.object({})));
1932
+ var webSearchOutputSchema = (0, import_provider_utils18.lazySchema)(
1934
1933
  () => (0, import_provider_utils18.zodSchema)(
1935
1934
  import_v413.z.object({
1936
1935
  action: import_v413.z.discriminatedUnion("type", [
1937
1936
  import_v413.z.object({
1938
1937
  type: import_v413.z.literal("search"),
1939
- query: import_v413.z.string().nullish()
1938
+ query: import_v413.z.string().optional()
1940
1939
  }),
1941
1940
  import_v413.z.object({
1942
- type: import_v413.z.literal("open_page"),
1941
+ type: import_v413.z.literal("openPage"),
1943
1942
  url: import_v413.z.string()
1944
1943
  }),
1945
1944
  import_v413.z.object({
@@ -1947,18 +1946,17 @@ var webSearchInputSchema = (0, import_provider_utils18.lazySchema)(
1947
1946
  url: import_v413.z.string(),
1948
1947
  pattern: import_v413.z.string()
1949
1948
  })
1950
- ]).nullish()
1949
+ ])
1951
1950
  })
1952
1951
  )
1953
1952
  );
1954
- var webSearchToolFactory = (0, import_provider_utils18.createProviderDefinedToolFactory)({
1953
+ var webSearchToolFactory = (0, import_provider_utils18.createProviderDefinedToolFactoryWithOutputSchema)({
1955
1954
  id: "openai.web_search",
1956
1955
  name: "web_search",
1957
- inputSchema: webSearchInputSchema
1956
+ inputSchema: webSearchInputSchema,
1957
+ outputSchema: webSearchOutputSchema
1958
1958
  });
1959
- var webSearch = (args = {}) => {
1960
- return webSearchToolFactory(args);
1961
- };
1959
+ var webSearch = (args = {}) => webSearchToolFactory(args);
1962
1960
 
1963
1961
  // src/tool/web-search-preview.ts
1964
1962
  var import_provider_utils19 = require("@ai-sdk/provider-utils");
@@ -1966,51 +1964,30 @@ var import_v414 = require("zod/v4");
1966
1964
  var webSearchPreviewArgsSchema = (0, import_provider_utils19.lazySchema)(
1967
1965
  () => (0, import_provider_utils19.zodSchema)(
1968
1966
  import_v414.z.object({
1969
- /**
1970
- * Search context size to use for the web search.
1971
- * - high: Most comprehensive context, highest cost, slower response
1972
- * - medium: Balanced context, cost, and latency (default)
1973
- * - low: Least context, lowest cost, fastest response
1974
- */
1975
1967
  searchContextSize: import_v414.z.enum(["low", "medium", "high"]).optional(),
1976
- /**
1977
- * User location information to provide geographically relevant search results.
1978
- */
1979
1968
  userLocation: import_v414.z.object({
1980
- /**
1981
- * Type of location (always 'approximate')
1982
- */
1983
1969
  type: import_v414.z.literal("approximate"),
1984
- /**
1985
- * Two-letter ISO country code (e.g., 'US', 'GB')
1986
- */
1987
1970
  country: import_v414.z.string().optional(),
1988
- /**
1989
- * City name (free text, e.g., 'Minneapolis')
1990
- */
1991
1971
  city: import_v414.z.string().optional(),
1992
- /**
1993
- * Region name (free text, e.g., 'Minnesota')
1994
- */
1995
1972
  region: import_v414.z.string().optional(),
1996
- /**
1997
- * IANA timezone (e.g., 'America/Chicago')
1998
- */
1999
1973
  timezone: import_v414.z.string().optional()
2000
1974
  }).optional()
2001
1975
  })
2002
1976
  )
2003
1977
  );
2004
1978
  var webSearchPreviewInputSchema = (0, import_provider_utils19.lazySchema)(
1979
+ () => (0, import_provider_utils19.zodSchema)(import_v414.z.object({}))
1980
+ );
1981
+ var webSearchPreviewOutputSchema = (0, import_provider_utils19.lazySchema)(
2005
1982
  () => (0, import_provider_utils19.zodSchema)(
2006
1983
  import_v414.z.object({
2007
1984
  action: import_v414.z.discriminatedUnion("type", [
2008
1985
  import_v414.z.object({
2009
1986
  type: import_v414.z.literal("search"),
2010
- query: import_v414.z.string().nullish()
1987
+ query: import_v414.z.string().optional()
2011
1988
  }),
2012
1989
  import_v414.z.object({
2013
- type: import_v414.z.literal("open_page"),
1990
+ type: import_v414.z.literal("openPage"),
2014
1991
  url: import_v414.z.string()
2015
1992
  }),
2016
1993
  import_v414.z.object({
@@ -2018,14 +1995,15 @@ var webSearchPreviewInputSchema = (0, import_provider_utils19.lazySchema)(
2018
1995
  url: import_v414.z.string(),
2019
1996
  pattern: import_v414.z.string()
2020
1997
  })
2021
- ]).nullish()
1998
+ ])
2022
1999
  })
2023
2000
  )
2024
2001
  );
2025
- var webSearchPreview = (0, import_provider_utils19.createProviderDefinedToolFactory)({
2002
+ var webSearchPreview = (0, import_provider_utils19.createProviderDefinedToolFactoryWithOutputSchema)({
2026
2003
  id: "openai.web_search_preview",
2027
2004
  name: "web_search_preview",
2028
- inputSchema: webSearchPreviewInputSchema
2005
+ inputSchema: webSearchPreviewInputSchema,
2006
+ outputSchema: webSearchPreviewOutputSchema
2029
2007
  });
2030
2008
 
2031
2009
  // src/openai-tools.ts
@@ -2305,6 +2283,9 @@ async function convertToOpenAIResponsesInput({
2305
2283
  input.push(reasoningMessages[reasoningId]);
2306
2284
  } else {
2307
2285
  reasoningMessage.summary.push(...summaryParts);
2286
+ if ((providerOptions == null ? void 0 : providerOptions.reasoningEncryptedContent) != null) {
2287
+ reasoningMessage.encrypted_content = providerOptions.reasoningEncryptedContent;
2288
+ }
2308
2289
  }
2309
2290
  }
2310
2291
  } else {
@@ -2471,11 +2452,7 @@ var openaiResponsesChunkSchema = (0, import_provider_utils21.lazyValidator)(
2471
2452
  import_v416.z.object({
2472
2453
  type: import_v416.z.literal("web_search_call"),
2473
2454
  id: import_v416.z.string(),
2474
- status: import_v416.z.string(),
2475
- action: import_v416.z.object({
2476
- type: import_v416.z.literal("search"),
2477
- query: import_v416.z.string().optional()
2478
- }).nullish()
2455
+ status: import_v416.z.string()
2479
2456
  }),
2480
2457
  import_v416.z.object({
2481
2458
  type: import_v416.z.literal("computer_call"),
@@ -2561,7 +2538,7 @@ var openaiResponsesChunkSchema = (0, import_provider_utils21.lazyValidator)(
2561
2538
  url: import_v416.z.string(),
2562
2539
  pattern: import_v416.z.string()
2563
2540
  })
2564
- ]).nullish()
2541
+ ])
2565
2542
  }),
2566
2543
  import_v416.z.object({
2567
2544
  type: import_v416.z.literal("file_search_call"),
@@ -2651,6 +2628,11 @@ var openaiResponsesChunkSchema = (0, import_provider_utils21.lazyValidator)(
2651
2628
  summary_index: import_v416.z.number(),
2652
2629
  delta: import_v416.z.string()
2653
2630
  }),
2631
+ import_v416.z.object({
2632
+ type: import_v416.z.literal("response.reasoning_summary_part.done"),
2633
+ item_id: import_v416.z.string(),
2634
+ summary_index: import_v416.z.number()
2635
+ }),
2654
2636
  import_v416.z.object({
2655
2637
  type: import_v416.z.literal("error"),
2656
2638
  code: import_v416.z.string(),
@@ -2742,7 +2724,7 @@ var openaiResponsesResponseSchema = (0, import_provider_utils21.lazyValidator)(
2742
2724
  url: import_v416.z.string(),
2743
2725
  pattern: import_v416.z.string()
2744
2726
  })
2745
- ]).nullish()
2727
+ ])
2746
2728
  }),
2747
2729
  import_v416.z.object({
2748
2730
  type: import_v416.z.literal("file_search_call"),
@@ -3285,7 +3267,8 @@ var OpenAIResponsesLanguageModel = class {
3285
3267
  tools: openaiTools2,
3286
3268
  tool_choice: openaiToolChoice
3287
3269
  },
3288
- warnings: [...warnings, ...toolWarnings]
3270
+ warnings: [...warnings, ...toolWarnings],
3271
+ store
3289
3272
  };
3290
3273
  }
3291
3274
  async doGenerate(options) {
@@ -3440,14 +3423,14 @@ var OpenAIResponsesLanguageModel = class {
3440
3423
  type: "tool-call",
3441
3424
  toolCallId: part.id,
3442
3425
  toolName: webSearchToolName != null ? webSearchToolName : "web_search",
3443
- input: JSON.stringify({ action: part.action }),
3426
+ input: JSON.stringify({}),
3444
3427
  providerExecuted: true
3445
3428
  });
3446
3429
  content.push({
3447
3430
  type: "tool-result",
3448
3431
  toolCallId: part.id,
3449
3432
  toolName: webSearchToolName != null ? webSearchToolName : "web_search",
3450
- result: { status: part.status },
3433
+ result: mapWebSearchOutput(part.action),
3451
3434
  providerExecuted: true
3452
3435
  });
3453
3436
  break;
@@ -3560,7 +3543,8 @@ var OpenAIResponsesLanguageModel = class {
3560
3543
  const {
3561
3544
  args: body,
3562
3545
  warnings,
3563
- webSearchToolName
3546
+ webSearchToolName,
3547
+ store
3564
3548
  } = await this.getArgs(options);
3565
3549
  const { responseHeaders, value: response } = await (0, import_provider_utils24.postJsonToApi)({
3566
3550
  url: this.config.url({
@@ -3599,7 +3583,7 @@ var OpenAIResponsesLanguageModel = class {
3599
3583
  controller.enqueue({ type: "stream-start", warnings });
3600
3584
  },
3601
3585
  transform(chunk, controller) {
3602
- var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v, _w;
3586
+ var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v;
3603
3587
  if (options.includeRawChunks) {
3604
3588
  controller.enqueue({ type: "raw", rawValue: chunk.rawValue });
3605
3589
  }
@@ -3631,6 +3615,17 @@ var OpenAIResponsesLanguageModel = class {
3631
3615
  toolName: webSearchToolName != null ? webSearchToolName : "web_search",
3632
3616
  providerExecuted: true
3633
3617
  });
3618
+ controller.enqueue({
3619
+ type: "tool-input-end",
3620
+ id: value.item.id
3621
+ });
3622
+ controller.enqueue({
3623
+ type: "tool-call",
3624
+ toolCallId: value.item.id,
3625
+ toolName: "web_search",
3626
+ input: JSON.stringify({}),
3627
+ providerExecuted: true
3628
+ });
3634
3629
  } else if (value.item.type === "computer_call") {
3635
3630
  ongoingToolCalls[value.output_index] = {
3636
3631
  toolName: "computer_use",
@@ -3687,10 +3682,10 @@ var OpenAIResponsesLanguageModel = class {
3687
3682
  }
3688
3683
  }
3689
3684
  });
3690
- } else if (isResponseOutputItemAddedReasoningChunk(value)) {
3685
+ } else if (isResponseOutputItemAddedChunk(value) && value.item.type === "reasoning") {
3691
3686
  activeReasoning[value.item.id] = {
3692
3687
  encryptedContent: value.item.encrypted_content,
3693
- summaryParts: [0]
3688
+ summaryParts: { 0: "active" }
3694
3689
  };
3695
3690
  controller.enqueue({
3696
3691
  type: "reasoning-start",
@@ -3724,22 +3719,11 @@ var OpenAIResponsesLanguageModel = class {
3724
3719
  });
3725
3720
  } else if (value.item.type === "web_search_call") {
3726
3721
  ongoingToolCalls[value.output_index] = void 0;
3727
- controller.enqueue({
3728
- type: "tool-input-end",
3729
- id: value.item.id
3730
- });
3731
- controller.enqueue({
3732
- type: "tool-call",
3733
- toolCallId: value.item.id,
3734
- toolName: "web_search",
3735
- input: JSON.stringify({ action: value.item.action }),
3736
- providerExecuted: true
3737
- });
3738
3722
  controller.enqueue({
3739
3723
  type: "tool-result",
3740
3724
  toolCallId: value.item.id,
3741
3725
  toolName: "web_search",
3742
- result: { status: value.item.status },
3726
+ result: mapWebSearchOutput(value.item.action),
3743
3727
  providerExecuted: true
3744
3728
  });
3745
3729
  } else if (value.item.type === "computer_call") {
@@ -3829,9 +3813,14 @@ var OpenAIResponsesLanguageModel = class {
3829
3813
  type: "text-end",
3830
3814
  id: value.item.id
3831
3815
  });
3832
- } else if (isResponseOutputItemDoneReasoningChunk(value)) {
3816
+ } else if (value.item.type === "reasoning") {
3833
3817
  const activeReasoningPart = activeReasoning[value.item.id];
3834
- for (const summaryIndex of activeReasoningPart.summaryParts) {
3818
+ const summaryPartIndices = Object.entries(
3819
+ activeReasoningPart.summaryParts
3820
+ ).filter(
3821
+ ([_, status]) => status === "active" || status === "can-conclude"
3822
+ ).map(([summaryIndex]) => summaryIndex);
3823
+ for (const summaryIndex of summaryPartIndices) {
3835
3824
  controller.enqueue({
3836
3825
  type: "reasoning-end",
3837
3826
  id: `${value.item.id}:${summaryIndex}`,
@@ -3905,23 +3894,34 @@ var OpenAIResponsesLanguageModel = class {
3905
3894
  if (((_f = (_e = options.providerOptions) == null ? void 0 : _e.openai) == null ? void 0 : _f.logprobs) && value.logprobs) {
3906
3895
  logprobs.push(value.logprobs);
3907
3896
  }
3908
- } else if (isResponseReasoningSummaryPartAddedChunk(value)) {
3897
+ } else if (value.type === "response.reasoning_summary_part.added") {
3909
3898
  if (value.summary_index > 0) {
3910
- (_g = activeReasoning[value.item_id]) == null ? void 0 : _g.summaryParts.push(
3911
- value.summary_index
3912
- );
3899
+ const activeReasoningPart = activeReasoning[value.item_id];
3900
+ activeReasoningPart.summaryParts[value.summary_index] = "active";
3901
+ for (const summaryIndex of Object.keys(
3902
+ activeReasoningPart.summaryParts
3903
+ )) {
3904
+ if (activeReasoningPart.summaryParts[summaryIndex] === "can-conclude") {
3905
+ controller.enqueue({
3906
+ type: "reasoning-end",
3907
+ id: `${value.item_id}:${summaryIndex}`,
3908
+ providerMetadata: { openai: { itemId: value.item_id } }
3909
+ });
3910
+ activeReasoningPart.summaryParts[summaryIndex] = "concluded";
3911
+ }
3912
+ }
3913
3913
  controller.enqueue({
3914
3914
  type: "reasoning-start",
3915
3915
  id: `${value.item_id}:${value.summary_index}`,
3916
3916
  providerMetadata: {
3917
3917
  openai: {
3918
3918
  itemId: value.item_id,
3919
- reasoningEncryptedContent: (_i = (_h = activeReasoning[value.item_id]) == null ? void 0 : _h.encryptedContent) != null ? _i : null
3919
+ reasoningEncryptedContent: (_h = (_g = activeReasoning[value.item_id]) == null ? void 0 : _g.encryptedContent) != null ? _h : null
3920
3920
  }
3921
3921
  }
3922
3922
  });
3923
3923
  }
3924
- } else if (isResponseReasoningSummaryTextDeltaChunk(value)) {
3924
+ } else if (value.type === "response.reasoning_summary_text.delta") {
3925
3925
  controller.enqueue({
3926
3926
  type: "reasoning-delta",
3927
3927
  id: `${value.item_id}:${value.summary_index}`,
@@ -3932,16 +3932,29 @@ var OpenAIResponsesLanguageModel = class {
3932
3932
  }
3933
3933
  }
3934
3934
  });
3935
+ } else if (value.type === "response.reasoning_summary_part.done") {
3936
+ if (store) {
3937
+ controller.enqueue({
3938
+ type: "reasoning-end",
3939
+ id: `${value.item_id}:${value.summary_index}`,
3940
+ providerMetadata: {
3941
+ openai: { itemId: value.item_id }
3942
+ }
3943
+ });
3944
+ activeReasoning[value.item_id].summaryParts[value.summary_index] = "concluded";
3945
+ } else {
3946
+ activeReasoning[value.item_id].summaryParts[value.summary_index] = "can-conclude";
3947
+ }
3935
3948
  } else if (isResponseFinishedChunk(value)) {
3936
3949
  finishReason = mapOpenAIResponseFinishReason({
3937
- finishReason: (_j = value.response.incomplete_details) == null ? void 0 : _j.reason,
3950
+ finishReason: (_i = value.response.incomplete_details) == null ? void 0 : _i.reason,
3938
3951
  hasFunctionCall
3939
3952
  });
3940
3953
  usage.inputTokens = value.response.usage.input_tokens;
3941
3954
  usage.outputTokens = value.response.usage.output_tokens;
3942
3955
  usage.totalTokens = value.response.usage.input_tokens + value.response.usage.output_tokens;
3943
- usage.reasoningTokens = (_l = (_k = value.response.usage.output_tokens_details) == null ? void 0 : _k.reasoning_tokens) != null ? _l : void 0;
3944
- usage.cachedInputTokens = (_n = (_m = value.response.usage.input_tokens_details) == null ? void 0 : _m.cached_tokens) != null ? _n : void 0;
3956
+ usage.reasoningTokens = (_k = (_j = value.response.usage.output_tokens_details) == null ? void 0 : _j.reasoning_tokens) != null ? _k : void 0;
3957
+ usage.cachedInputTokens = (_m = (_l = value.response.usage.input_tokens_details) == null ? void 0 : _l.cached_tokens) != null ? _m : void 0;
3945
3958
  if (typeof value.response.service_tier === "string") {
3946
3959
  serviceTier = value.response.service_tier;
3947
3960
  }
@@ -3950,7 +3963,7 @@ var OpenAIResponsesLanguageModel = class {
3950
3963
  controller.enqueue({
3951
3964
  type: "source",
3952
3965
  sourceType: "url",
3953
- id: (_q = (_p = (_o = self.config).generateId) == null ? void 0 : _p.call(_o)) != null ? _q : (0, import_provider_utils24.generateId)(),
3966
+ id: (_p = (_o = (_n = self.config).generateId) == null ? void 0 : _o.call(_n)) != null ? _p : (0, import_provider_utils24.generateId)(),
3954
3967
  url: value.annotation.url,
3955
3968
  title: value.annotation.title
3956
3969
  });
@@ -3958,10 +3971,10 @@ var OpenAIResponsesLanguageModel = class {
3958
3971
  controller.enqueue({
3959
3972
  type: "source",
3960
3973
  sourceType: "document",
3961
- id: (_t = (_s = (_r = self.config).generateId) == null ? void 0 : _s.call(_r)) != null ? _t : (0, import_provider_utils24.generateId)(),
3974
+ id: (_s = (_r = (_q = self.config).generateId) == null ? void 0 : _r.call(_q)) != null ? _s : (0, import_provider_utils24.generateId)(),
3962
3975
  mediaType: "text/plain",
3963
- title: (_v = (_u = value.annotation.quote) != null ? _u : value.annotation.filename) != null ? _v : "Document",
3964
- filename: (_w = value.annotation.filename) != null ? _w : value.annotation.file_id
3976
+ title: (_u = (_t = value.annotation.quote) != null ? _t : value.annotation.filename) != null ? _u : "Document",
3977
+ filename: (_v = value.annotation.filename) != null ? _v : value.annotation.file_id
3965
3978
  });
3966
3979
  }
3967
3980
  } else if (isErrorChunk(value)) {
@@ -4000,9 +4013,6 @@ function isTextDeltaChunk(chunk) {
4000
4013
  function isResponseOutputItemDoneChunk(chunk) {
4001
4014
  return chunk.type === "response.output_item.done";
4002
4015
  }
4003
- function isResponseOutputItemDoneReasoningChunk(chunk) {
4004
- return isResponseOutputItemDoneChunk(chunk) && chunk.item.type === "reasoning";
4005
- }
4006
4016
  function isResponseFinishedChunk(chunk) {
4007
4017
  return chunk.type === "response.completed" || chunk.type === "response.incomplete";
4008
4018
  }
@@ -4021,18 +4031,9 @@ function isResponseCodeInterpreterCallCodeDoneChunk(chunk) {
4021
4031
  function isResponseOutputItemAddedChunk(chunk) {
4022
4032
  return chunk.type === "response.output_item.added";
4023
4033
  }
4024
- function isResponseOutputItemAddedReasoningChunk(chunk) {
4025
- return isResponseOutputItemAddedChunk(chunk) && chunk.item.type === "reasoning";
4026
- }
4027
4034
  function isResponseAnnotationAddedChunk(chunk) {
4028
4035
  return chunk.type === "response.output_text.annotation.added";
4029
4036
  }
4030
- function isResponseReasoningSummaryPartAddedChunk(chunk) {
4031
- return chunk.type === "response.reasoning_summary_part.added";
4032
- }
4033
- function isResponseReasoningSummaryTextDeltaChunk(chunk) {
4034
- return chunk.type === "response.reasoning_summary_text.delta";
4035
- }
4036
4037
  function isErrorChunk(chunk) {
4037
4038
  return chunk.type === "error";
4038
4039
  }
@@ -4070,6 +4071,19 @@ function getResponsesModelConfig(modelId) {
4070
4071
  isReasoningModel: false
4071
4072
  };
4072
4073
  }
4074
+ function mapWebSearchOutput(action) {
4075
+ var _a;
4076
+ switch (action.type) {
4077
+ case "search":
4078
+ return { action: { type: "search", query: (_a = action.query) != null ? _a : void 0 } };
4079
+ case "open_page":
4080
+ return { action: { type: "openPage", url: action.url } };
4081
+ case "find":
4082
+ return {
4083
+ action: { type: "find", url: action.url, pattern: action.pattern }
4084
+ };
4085
+ }
4086
+ }
4073
4087
 
4074
4088
  // src/speech/openai-speech-model.ts
4075
4089
  var import_provider_utils26 = require("@ai-sdk/provider-utils");
@@ -4424,7 +4438,7 @@ var OpenAITranscriptionModel = class {
4424
4438
  };
4425
4439
 
4426
4440
  // src/version.ts
4427
- var VERSION = true ? "2.0.50" : "0.0.0-test";
4441
+ var VERSION = true ? "2.0.52" : "0.0.0-test";
4428
4442
 
4429
4443
  // src/openai-provider.ts
4430
4444
  function createOpenAI(options = {}) {