@ai-sdk/openai 3.0.17 → 3.0.19

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (70) hide show
  1. package/CHANGELOG.md +21 -0
  2. package/dist/index.d.mts +20 -1
  3. package/dist/index.d.ts +20 -1
  4. package/dist/index.js +15 -17
  5. package/dist/index.js.map +1 -1
  6. package/dist/index.mjs +15 -17
  7. package/dist/index.mjs.map +1 -1
  8. package/dist/internal/index.d.mts +20 -1
  9. package/dist/internal/index.d.ts +20 -1
  10. package/dist/internal/index.js +14 -16
  11. package/dist/internal/index.js.map +1 -1
  12. package/dist/internal/index.mjs +14 -16
  13. package/dist/internal/index.mjs.map +1 -1
  14. package/docs/03-openai.mdx +67 -11
  15. package/package.json +8 -4
  16. package/src/index.ts +2 -0
  17. package/src/responses/openai-responses-language-model.ts +24 -26
  18. package/src/responses/openai-responses-provider-metadata.ts +23 -1
  19. package/src/chat/__fixtures__/azure-model-router.1.chunks.txt +0 -8
  20. package/src/chat/__snapshots__/openai-chat-language-model.test.ts.snap +0 -88
  21. package/src/chat/convert-to-openai-chat-messages.test.ts +0 -516
  22. package/src/chat/openai-chat-language-model.test.ts +0 -3496
  23. package/src/chat/openai-chat-prepare-tools.test.ts +0 -322
  24. package/src/completion/openai-completion-language-model.test.ts +0 -752
  25. package/src/embedding/__snapshots__/openai-embedding-model.test.ts.snap +0 -43
  26. package/src/embedding/openai-embedding-model.test.ts +0 -146
  27. package/src/image/openai-image-model.test.ts +0 -722
  28. package/src/openai-error.test.ts +0 -34
  29. package/src/openai-language-model-capabilities.test.ts +0 -93
  30. package/src/openai-provider.test.ts +0 -98
  31. package/src/responses/__fixtures__/openai-apply-patch-tool-delete.1.chunks.txt +0 -5
  32. package/src/responses/__fixtures__/openai-apply-patch-tool.1.chunks.txt +0 -38
  33. package/src/responses/__fixtures__/openai-apply-patch-tool.1.json +0 -69
  34. package/src/responses/__fixtures__/openai-code-interpreter-tool.1.chunks.txt +0 -393
  35. package/src/responses/__fixtures__/openai-code-interpreter-tool.1.json +0 -137
  36. package/src/responses/__fixtures__/openai-error.1.chunks.txt +0 -4
  37. package/src/responses/__fixtures__/openai-error.1.json +0 -8
  38. package/src/responses/__fixtures__/openai-file-search-tool.1.chunks.txt +0 -94
  39. package/src/responses/__fixtures__/openai-file-search-tool.1.json +0 -89
  40. package/src/responses/__fixtures__/openai-file-search-tool.2.chunks.txt +0 -93
  41. package/src/responses/__fixtures__/openai-file-search-tool.2.json +0 -112
  42. package/src/responses/__fixtures__/openai-image-generation-tool.1.chunks.txt +0 -16
  43. package/src/responses/__fixtures__/openai-image-generation-tool.1.json +0 -96
  44. package/src/responses/__fixtures__/openai-local-shell-tool.1.chunks.txt +0 -7
  45. package/src/responses/__fixtures__/openai-local-shell-tool.1.json +0 -70
  46. package/src/responses/__fixtures__/openai-mcp-tool-approval.1.chunks.txt +0 -11
  47. package/src/responses/__fixtures__/openai-mcp-tool-approval.1.json +0 -169
  48. package/src/responses/__fixtures__/openai-mcp-tool-approval.2.chunks.txt +0 -123
  49. package/src/responses/__fixtures__/openai-mcp-tool-approval.2.json +0 -176
  50. package/src/responses/__fixtures__/openai-mcp-tool-approval.3.chunks.txt +0 -11
  51. package/src/responses/__fixtures__/openai-mcp-tool-approval.3.json +0 -169
  52. package/src/responses/__fixtures__/openai-mcp-tool-approval.4.chunks.txt +0 -84
  53. package/src/responses/__fixtures__/openai-mcp-tool-approval.4.json +0 -182
  54. package/src/responses/__fixtures__/openai-mcp-tool.1.chunks.txt +0 -373
  55. package/src/responses/__fixtures__/openai-mcp-tool.1.json +0 -159
  56. package/src/responses/__fixtures__/openai-reasoning-encrypted-content.1.chunks.txt +0 -110
  57. package/src/responses/__fixtures__/openai-reasoning-encrypted-content.1.json +0 -117
  58. package/src/responses/__fixtures__/openai-shell-tool.1.chunks.txt +0 -182
  59. package/src/responses/__fixtures__/openai-shell-tool.1.json +0 -73
  60. package/src/responses/__fixtures__/openai-web-search-tool.1.chunks.txt +0 -185
  61. package/src/responses/__fixtures__/openai-web-search-tool.1.json +0 -266
  62. package/src/responses/__snapshots__/openai-responses-language-model.test.ts.snap +0 -10955
  63. package/src/responses/convert-to-openai-responses-input.test.ts +0 -3223
  64. package/src/responses/openai-responses-api.test.ts +0 -89
  65. package/src/responses/openai-responses-language-model.test.ts +0 -6927
  66. package/src/responses/openai-responses-prepare-tools.test.ts +0 -924
  67. package/src/speech/openai-speech-model.test.ts +0 -202
  68. package/src/tool/local-shell.test-d.ts +0 -20
  69. package/src/tool/web-search.test-d.ts +0 -13
  70. package/src/transcription/openai-transcription-model.test.ts +0 -507
package/CHANGELOG.md CHANGED
@@ -1,5 +1,26 @@
1
1
  # @ai-sdk/openai
2
2
 
3
+ ## 3.0.19
4
+
5
+ ### Patch Changes
6
+
7
+ - 04c89b1: Provide Responses API providerMetadata types at the message / reasoning level.
8
+
9
+ - Export the following types for use in client code:
10
+ - `OpenaiResponsesProviderMetadata`
11
+ - `OpenaiResponsesReasoningProviderMetadata`
12
+ - `AzureResponsesProviderMetadata`
13
+ - `AzureResponsesReasoningProviderMetadata`
14
+
15
+ ## 3.0.18
16
+
17
+ ### Patch Changes
18
+
19
+ - 4de5a1d: chore: excluded tests from src folder in npm package
20
+ - Updated dependencies [4de5a1d]
21
+ - @ai-sdk/provider@3.0.5
22
+ - @ai-sdk/provider-utils@4.0.9
23
+
3
24
  ## 3.0.17
4
25
 
5
26
  ### Patch Changes
package/dist/index.d.mts CHANGED
@@ -506,6 +506,10 @@ declare const openaiResponsesChunkSchema: _ai_sdk_provider_utils.LazySchema<{
506
506
  param?: string | null | undefined;
507
507
  };
508
508
  }>;
509
+ type OpenAIResponsesChunk = InferSchema<typeof openaiResponsesChunkSchema>;
510
+ type OpenAIResponsesLogprobs = NonNullable<(OpenAIResponsesChunk & {
511
+ type: 'response.output_text.delta';
512
+ })['logprobs']> | null;
509
513
 
510
514
  /**
511
515
  * Type definitions for the apply_patch operations.
@@ -909,6 +913,21 @@ type OpenaiResponsesChunk = InferSchema<typeof openaiResponsesChunkSchema>;
909
913
  type ResponsesOutputTextAnnotationProviderMetadata = Extract<OpenaiResponsesChunk, {
910
914
  type: 'response.output_text.annotation.added';
911
915
  }>['annotation'];
916
+ type ResponsesProviderMetadata = {
917
+ responseId: string | null | undefined;
918
+ logprobs?: Array<OpenAIResponsesLogprobs>;
919
+ serviceTier?: string;
920
+ };
921
+ type ResponsesReasoningProviderMetadata = {
922
+ itemId: string;
923
+ reasoningEncryptedContent?: string | null;
924
+ };
925
+ type OpenaiResponsesReasoningProviderMetadata = {
926
+ openai: ResponsesReasoningProviderMetadata;
927
+ };
928
+ type OpenaiResponsesProviderMetadata = {
929
+ openai: ResponsesProviderMetadata;
930
+ };
912
931
  type ResponsesTextProviderMetadata = {
913
932
  itemId: string;
914
933
  annotations?: Array<ResponsesOutputTextAnnotationProviderMetadata>;
@@ -935,4 +954,4 @@ type OpenaiResponsesSourceDocumentProviderMetadata = {
935
954
 
936
955
  declare const VERSION: string;
937
956
 
938
- export { type OpenAIChatLanguageModelOptions, type OpenAIProvider, type OpenAIProviderSettings, type OpenAIResponsesProviderOptions, type OpenaiResponsesSourceDocumentProviderMetadata, type OpenaiResponsesTextProviderMetadata, VERSION, createOpenAI, openai };
957
+ export { type OpenAIChatLanguageModelOptions, type OpenAIProvider, type OpenAIProviderSettings, type OpenAIResponsesProviderOptions, type OpenaiResponsesProviderMetadata, type OpenaiResponsesReasoningProviderMetadata, type OpenaiResponsesSourceDocumentProviderMetadata, type OpenaiResponsesTextProviderMetadata, VERSION, createOpenAI, openai };
package/dist/index.d.ts CHANGED
@@ -506,6 +506,10 @@ declare const openaiResponsesChunkSchema: _ai_sdk_provider_utils.LazySchema<{
506
506
  param?: string | null | undefined;
507
507
  };
508
508
  }>;
509
+ type OpenAIResponsesChunk = InferSchema<typeof openaiResponsesChunkSchema>;
510
+ type OpenAIResponsesLogprobs = NonNullable<(OpenAIResponsesChunk & {
511
+ type: 'response.output_text.delta';
512
+ })['logprobs']> | null;
509
513
 
510
514
  /**
511
515
  * Type definitions for the apply_patch operations.
@@ -909,6 +913,21 @@ type OpenaiResponsesChunk = InferSchema<typeof openaiResponsesChunkSchema>;
909
913
  type ResponsesOutputTextAnnotationProviderMetadata = Extract<OpenaiResponsesChunk, {
910
914
  type: 'response.output_text.annotation.added';
911
915
  }>['annotation'];
916
+ type ResponsesProviderMetadata = {
917
+ responseId: string | null | undefined;
918
+ logprobs?: Array<OpenAIResponsesLogprobs>;
919
+ serviceTier?: string;
920
+ };
921
+ type ResponsesReasoningProviderMetadata = {
922
+ itemId: string;
923
+ reasoningEncryptedContent?: string | null;
924
+ };
925
+ type OpenaiResponsesReasoningProviderMetadata = {
926
+ openai: ResponsesReasoningProviderMetadata;
927
+ };
928
+ type OpenaiResponsesProviderMetadata = {
929
+ openai: ResponsesProviderMetadata;
930
+ };
912
931
  type ResponsesTextProviderMetadata = {
913
932
  itemId: string;
914
933
  annotations?: Array<ResponsesOutputTextAnnotationProviderMetadata>;
@@ -935,4 +954,4 @@ type OpenaiResponsesSourceDocumentProviderMetadata = {
935
954
 
936
955
  declare const VERSION: string;
937
956
 
938
- export { type OpenAIChatLanguageModelOptions, type OpenAIProvider, type OpenAIProviderSettings, type OpenAIResponsesProviderOptions, type OpenaiResponsesSourceDocumentProviderMetadata, type OpenaiResponsesTextProviderMetadata, VERSION, createOpenAI, openai };
957
+ export { type OpenAIChatLanguageModelOptions, type OpenAIProvider, type OpenAIProviderSettings, type OpenAIResponsesProviderOptions, type OpenaiResponsesProviderMetadata, type OpenaiResponsesReasoningProviderMetadata, type OpenaiResponsesSourceDocumentProviderMetadata, type OpenaiResponsesTextProviderMetadata, VERSION, createOpenAI, openai };
package/dist/index.js CHANGED
@@ -4622,14 +4622,12 @@ var OpenAIResponsesLanguageModel = class {
4622
4622
  }
4623
4623
  }
4624
4624
  const providerMetadata = {
4625
- [providerOptionsName]: { responseId: response.id }
4625
+ [providerOptionsName]: {
4626
+ responseId: response.id,
4627
+ ...logprobs.length > 0 ? { logprobs } : {},
4628
+ ...typeof response.service_tier === "string" ? { serviceTier: response.service_tier } : {}
4629
+ }
4626
4630
  };
4627
- if (logprobs.length > 0) {
4628
- providerMetadata[providerOptionsName].logprobs = logprobs;
4629
- }
4630
- if (typeof response.service_tier === "string") {
4631
- providerMetadata[providerOptionsName].serviceTier = response.service_tier;
4632
- }
4633
4631
  const usage = response.usage;
4634
4632
  return {
4635
4633
  content,
@@ -5229,7 +5227,9 @@ var OpenAIResponsesLanguageModel = class {
5229
5227
  type: "reasoning-end",
5230
5228
  id: `${value.item_id}:${summaryIndex}`,
5231
5229
  providerMetadata: {
5232
- [providerOptionsName]: { itemId: value.item_id }
5230
+ [providerOptionsName]: {
5231
+ itemId: value.item_id
5232
+ }
5233
5233
  }
5234
5234
  });
5235
5235
  activeReasoningPart.summaryParts[summaryIndex] = "concluded";
@@ -5263,7 +5263,9 @@ var OpenAIResponsesLanguageModel = class {
5263
5263
  type: "reasoning-end",
5264
5264
  id: `${value.item_id}:${value.summary_index}`,
5265
5265
  providerMetadata: {
5266
- [providerOptionsName]: { itemId: value.item_id }
5266
+ [providerOptionsName]: {
5267
+ itemId: value.item_id
5268
+ }
5267
5269
  }
5268
5270
  });
5269
5271
  activeReasoning[value.item_id].summaryParts[value.summary_index] = "concluded";
@@ -5348,15 +5350,11 @@ var OpenAIResponsesLanguageModel = class {
5348
5350
  flush(controller) {
5349
5351
  const providerMetadata = {
5350
5352
  [providerOptionsName]: {
5351
- responseId
5353
+ responseId,
5354
+ ...logprobs.length > 0 ? { logprobs } : {},
5355
+ ...serviceTier !== void 0 ? { serviceTier } : {}
5352
5356
  }
5353
5357
  };
5354
- if (logprobs.length > 0) {
5355
- providerMetadata[providerOptionsName].logprobs = logprobs;
5356
- }
5357
- if (serviceTier !== void 0) {
5358
- providerMetadata[providerOptionsName].serviceTier = serviceTier;
5359
- }
5360
5358
  controller.enqueue({
5361
5359
  type: "finish",
5362
5360
  finishReason,
@@ -5788,7 +5786,7 @@ var OpenAITranscriptionModel = class {
5788
5786
  };
5789
5787
 
5790
5788
  // src/version.ts
5791
- var VERSION = true ? "3.0.17" : "0.0.0-test";
5789
+ var VERSION = true ? "3.0.19" : "0.0.0-test";
5792
5790
 
5793
5791
  // src/openai-provider.ts
5794
5792
  function createOpenAI(options = {}) {