@ai-sdk/openai 3.0.53 → 3.0.55

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (48) hide show
  1. package/CHANGELOG.md +20 -0
  2. package/dist/index.d.mts +1 -1
  3. package/dist/index.d.ts +1 -1
  4. package/dist/index.js +5 -3
  5. package/dist/index.js.map +1 -1
  6. package/dist/index.mjs +41 -12
  7. package/dist/index.mjs.map +1 -1
  8. package/dist/internal/index.d.mts +1 -1
  9. package/dist/internal/index.d.ts +1 -1
  10. package/dist/internal/index.js +4 -2
  11. package/dist/internal/index.js.map +1 -1
  12. package/dist/internal/index.mjs +40 -11
  13. package/dist/internal/index.mjs.map +1 -1
  14. package/package.json +6 -5
  15. package/src/chat/convert-openai-chat-usage.ts +1 -1
  16. package/src/chat/convert-to-openai-chat-messages.ts +4 -4
  17. package/src/chat/map-openai-finish-reason.ts +1 -1
  18. package/src/chat/openai-chat-api.ts +6 -2
  19. package/src/chat/openai-chat-language-model.ts +14 -14
  20. package/src/chat/openai-chat-options.ts +5 -1
  21. package/src/chat/openai-chat-prepare-tools.ts +3 -3
  22. package/src/completion/convert-openai-completion-usage.ts +1 -1
  23. package/src/completion/convert-to-openai-completion-prompt.ts +1 -1
  24. package/src/completion/map-openai-finish-reason.ts +1 -1
  25. package/src/completion/openai-completion-api.ts +5 -1
  26. package/src/completion/openai-completion-language-model.ts +6 -6
  27. package/src/completion/openai-completion-options.ts +5 -1
  28. package/src/embedding/openai-embedding-model.ts +3 -3
  29. package/src/embedding/openai-embedding-options.ts +5 -1
  30. package/src/image/openai-image-model.ts +3 -3
  31. package/src/image/openai-image-options.ts +3 -0
  32. package/src/openai-config.ts +1 -1
  33. package/src/openai-provider.ts +9 -9
  34. package/src/responses/convert-openai-responses-usage.ts +1 -1
  35. package/src/responses/convert-to-openai-responses-input.ts +5 -5
  36. package/src/responses/map-openai-responses-finish-reason.ts +1 -1
  37. package/src/responses/openai-responses-api.ts +6 -2
  38. package/src/responses/openai-responses-language-model.ts +35 -35
  39. package/src/responses/openai-responses-options.ts +5 -1
  40. package/src/responses/openai-responses-prepare-tools.ts +4 -4
  41. package/src/responses/openai-responses-provider-metadata.ts +2 -2
  42. package/src/speech/openai-speech-model.ts +4 -4
  43. package/src/speech/openai-speech-options.ts +5 -1
  44. package/src/tool/file-search.ts +1 -1
  45. package/src/tool/mcp.ts +1 -1
  46. package/src/tool/tool-search.ts +2 -2
  47. package/src/transcription/openai-transcription-model.ts +4 -4
  48. package/src/transcription/openai-transcription-options.ts +5 -1
@@ -233,7 +233,7 @@ function convertToOpenAIChatMessages({
233
233
  }
234
234
  messages.push({
235
235
  role: "assistant",
236
- content: text,
236
+ content: text || null,
237
237
  tool_calls: toolCalls.length > 0 ? toolCalls : void 0
238
238
  });
239
239
  break;
@@ -307,7 +307,10 @@ function mapOpenAIFinishReason(finishReason) {
307
307
  }
308
308
 
309
309
  // src/chat/openai-chat-api.ts
310
- import { lazySchema, zodSchema } from "@ai-sdk/provider-utils";
310
+ import {
311
+ lazySchema,
312
+ zodSchema
313
+ } from "@ai-sdk/provider-utils";
311
314
  import { z as z2 } from "zod/v4";
312
315
  var openaiChatResponseSchema = lazySchema(
313
316
  () => zodSchema(
@@ -449,7 +452,10 @@ var openaiChatChunkSchema = lazySchema(
449
452
  );
450
453
 
451
454
  // src/chat/openai-chat-options.ts
452
- import { lazySchema as lazySchema2, zodSchema as zodSchema2 } from "@ai-sdk/provider-utils";
455
+ import {
456
+ lazySchema as lazySchema2,
457
+ zodSchema as zodSchema2
458
+ } from "@ai-sdk/provider-utils";
453
459
  import { z as z3 } from "zod/v4";
454
460
  var openaiLanguageModelChatOptions = lazySchema2(
455
461
  () => zodSchema2(
@@ -1271,7 +1277,10 @@ function mapOpenAIFinishReason2(finishReason) {
1271
1277
 
1272
1278
  // src/completion/openai-completion-api.ts
1273
1279
  import { z as z4 } from "zod/v4";
1274
- import { lazySchema as lazySchema3, zodSchema as zodSchema3 } from "@ai-sdk/provider-utils";
1280
+ import {
1281
+ lazySchema as lazySchema3,
1282
+ zodSchema as zodSchema3
1283
+ } from "@ai-sdk/provider-utils";
1275
1284
  var openaiCompletionResponseSchema = lazySchema3(
1276
1285
  () => zodSchema3(
1277
1286
  z4.object({
@@ -1328,7 +1337,10 @@ var openaiCompletionChunkSchema = lazySchema3(
1328
1337
  );
1329
1338
 
1330
1339
  // src/completion/openai-completion-options.ts
1331
- import { lazySchema as lazySchema4, zodSchema as zodSchema4 } from "@ai-sdk/provider-utils";
1340
+ import {
1341
+ lazySchema as lazySchema4,
1342
+ zodSchema as zodSchema4
1343
+ } from "@ai-sdk/provider-utils";
1332
1344
  import { z as z5 } from "zod/v4";
1333
1345
  var openaiLanguageModelCompletionOptions = lazySchema4(
1334
1346
  () => zodSchema4(
@@ -1616,7 +1628,10 @@ import {
1616
1628
  } from "@ai-sdk/provider-utils";
1617
1629
 
1618
1630
  // src/embedding/openai-embedding-options.ts
1619
- import { lazySchema as lazySchema5, zodSchema as zodSchema5 } from "@ai-sdk/provider-utils";
1631
+ import {
1632
+ lazySchema as lazySchema5,
1633
+ zodSchema as zodSchema5
1634
+ } from "@ai-sdk/provider-utils";
1620
1635
  import { z as z6 } from "zod/v4";
1621
1636
  var openaiEmbeddingModelOptions = lazySchema5(
1622
1637
  () => zodSchema5(
@@ -1760,13 +1775,15 @@ var modelMaxImagesPerCall = {
1760
1775
  "gpt-image-1": 10,
1761
1776
  "gpt-image-1-mini": 10,
1762
1777
  "gpt-image-1.5": 10,
1778
+ "gpt-image-2": 10,
1763
1779
  "chatgpt-image-latest": 10
1764
1780
  };
1765
1781
  var defaultResponseFormatPrefixes = [
1766
1782
  "chatgpt-image-",
1767
1783
  "gpt-image-1-mini",
1768
1784
  "gpt-image-1.5",
1769
- "gpt-image-1"
1785
+ "gpt-image-1",
1786
+ "gpt-image-2"
1770
1787
  ];
1771
1788
  function hasDefaultResponseFormat(modelId) {
1772
1789
  return defaultResponseFormatPrefixes.some(
@@ -2012,7 +2029,10 @@ var openaiTranscriptionResponseSchema = lazySchema8(
2012
2029
  );
2013
2030
 
2014
2031
  // src/transcription/openai-transcription-options.ts
2015
- import { lazySchema as lazySchema9, zodSchema as zodSchema9 } from "@ai-sdk/provider-utils";
2032
+ import {
2033
+ lazySchema as lazySchema9,
2034
+ zodSchema as zodSchema9
2035
+ } from "@ai-sdk/provider-utils";
2016
2036
  import { z as z10 } from "zod/v4";
2017
2037
  var openAITranscriptionModelOptions = lazySchema9(
2018
2038
  () => zodSchema9(
@@ -2219,7 +2239,10 @@ import {
2219
2239
  } from "@ai-sdk/provider-utils";
2220
2240
 
2221
2241
  // src/speech/openai-speech-options.ts
2222
- import { lazySchema as lazySchema10, zodSchema as zodSchema10 } from "@ai-sdk/provider-utils";
2242
+ import {
2243
+ lazySchema as lazySchema10,
2244
+ zodSchema as zodSchema10
2245
+ } from "@ai-sdk/provider-utils";
2223
2246
  import { z as z11 } from "zod/v4";
2224
2247
  var openaiSpeechModelOptionsSchema = lazySchema10(
2225
2248
  () => zodSchema10(
@@ -3239,7 +3262,10 @@ function mapOpenAIResponseFinishReason({
3239
3262
  }
3240
3263
 
3241
3264
  // src/responses/openai-responses-api.ts
3242
- import { lazySchema as lazySchema15, zodSchema as zodSchema15 } from "@ai-sdk/provider-utils";
3265
+ import {
3266
+ lazySchema as lazySchema15,
3267
+ zodSchema as zodSchema15
3268
+ } from "@ai-sdk/provider-utils";
3243
3269
  import { z as z17 } from "zod/v4";
3244
3270
  var jsonValueSchema = z17.lazy(
3245
3271
  () => z17.union([
@@ -4084,7 +4110,10 @@ var openaiResponsesResponseSchema = lazySchema15(
4084
4110
  );
4085
4111
 
4086
4112
  // src/responses/openai-responses-options.ts
4087
- import { lazySchema as lazySchema16, zodSchema as zodSchema16 } from "@ai-sdk/provider-utils";
4113
+ import {
4114
+ lazySchema as lazySchema16,
4115
+ zodSchema as zodSchema16
4116
+ } from "@ai-sdk/provider-utils";
4088
4117
  import { z as z18 } from "zod/v4";
4089
4118
  var TOP_LOGPROBS_MAX = 20;
4090
4119
  var openaiResponsesReasoningModelIds = [