@ai-sdk/openai 3.0.14 → 3.0.15

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (110) hide show
  1. package/CHANGELOG.md +6 -0
  2. package/dist/index.js +1 -1
  3. package/dist/index.mjs +1 -1
  4. package/package.json +6 -5
  5. package/src/chat/__fixtures__/azure-model-router.1.chunks.txt +8 -0
  6. package/src/chat/__snapshots__/openai-chat-language-model.test.ts.snap +88 -0
  7. package/src/chat/convert-openai-chat-usage.ts +57 -0
  8. package/src/chat/convert-to-openai-chat-messages.test.ts +516 -0
  9. package/src/chat/convert-to-openai-chat-messages.ts +225 -0
  10. package/src/chat/get-response-metadata.ts +15 -0
  11. package/src/chat/map-openai-finish-reason.ts +19 -0
  12. package/src/chat/openai-chat-api.ts +198 -0
  13. package/src/chat/openai-chat-language-model.test.ts +3496 -0
  14. package/src/chat/openai-chat-language-model.ts +700 -0
  15. package/src/chat/openai-chat-options.ts +186 -0
  16. package/src/chat/openai-chat-prepare-tools.test.ts +322 -0
  17. package/src/chat/openai-chat-prepare-tools.ts +84 -0
  18. package/src/chat/openai-chat-prompt.ts +70 -0
  19. package/src/completion/convert-openai-completion-usage.ts +46 -0
  20. package/src/completion/convert-to-openai-completion-prompt.ts +93 -0
  21. package/src/completion/get-response-metadata.ts +15 -0
  22. package/src/completion/map-openai-finish-reason.ts +19 -0
  23. package/src/completion/openai-completion-api.ts +81 -0
  24. package/src/completion/openai-completion-language-model.test.ts +752 -0
  25. package/src/completion/openai-completion-language-model.ts +336 -0
  26. package/src/completion/openai-completion-options.ts +58 -0
  27. package/src/embedding/__snapshots__/openai-embedding-model.test.ts.snap +43 -0
  28. package/src/embedding/openai-embedding-api.ts +13 -0
  29. package/src/embedding/openai-embedding-model.test.ts +146 -0
  30. package/src/embedding/openai-embedding-model.ts +95 -0
  31. package/src/embedding/openai-embedding-options.ts +30 -0
  32. package/src/image/openai-image-api.ts +35 -0
  33. package/src/image/openai-image-model.test.ts +722 -0
  34. package/src/image/openai-image-model.ts +305 -0
  35. package/src/image/openai-image-options.ts +28 -0
  36. package/src/index.ts +9 -0
  37. package/src/internal/index.ts +19 -0
  38. package/src/openai-config.ts +18 -0
  39. package/src/openai-error.test.ts +34 -0
  40. package/src/openai-error.ts +22 -0
  41. package/src/openai-language-model-capabilities.test.ts +93 -0
  42. package/src/openai-language-model-capabilities.ts +54 -0
  43. package/src/openai-provider.test.ts +98 -0
  44. package/src/openai-provider.ts +270 -0
  45. package/src/openai-tools.ts +114 -0
  46. package/src/responses/__fixtures__/openai-apply-patch-tool-delete.1.chunks.txt +5 -0
  47. package/src/responses/__fixtures__/openai-apply-patch-tool.1.chunks.txt +38 -0
  48. package/src/responses/__fixtures__/openai-apply-patch-tool.1.json +69 -0
  49. package/src/responses/__fixtures__/openai-code-interpreter-tool.1.chunks.txt +393 -0
  50. package/src/responses/__fixtures__/openai-code-interpreter-tool.1.json +137 -0
  51. package/src/responses/__fixtures__/openai-error.1.chunks.txt +4 -0
  52. package/src/responses/__fixtures__/openai-error.1.json +8 -0
  53. package/src/responses/__fixtures__/openai-file-search-tool.1.chunks.txt +94 -0
  54. package/src/responses/__fixtures__/openai-file-search-tool.1.json +89 -0
  55. package/src/responses/__fixtures__/openai-file-search-tool.2.chunks.txt +93 -0
  56. package/src/responses/__fixtures__/openai-file-search-tool.2.json +112 -0
  57. package/src/responses/__fixtures__/openai-image-generation-tool.1.chunks.txt +16 -0
  58. package/src/responses/__fixtures__/openai-image-generation-tool.1.json +96 -0
  59. package/src/responses/__fixtures__/openai-local-shell-tool.1.chunks.txt +7 -0
  60. package/src/responses/__fixtures__/openai-local-shell-tool.1.json +70 -0
  61. package/src/responses/__fixtures__/openai-mcp-tool-approval.1.chunks.txt +11 -0
  62. package/src/responses/__fixtures__/openai-mcp-tool-approval.1.json +169 -0
  63. package/src/responses/__fixtures__/openai-mcp-tool-approval.2.chunks.txt +123 -0
  64. package/src/responses/__fixtures__/openai-mcp-tool-approval.2.json +176 -0
  65. package/src/responses/__fixtures__/openai-mcp-tool-approval.3.chunks.txt +11 -0
  66. package/src/responses/__fixtures__/openai-mcp-tool-approval.3.json +169 -0
  67. package/src/responses/__fixtures__/openai-mcp-tool-approval.4.chunks.txt +84 -0
  68. package/src/responses/__fixtures__/openai-mcp-tool-approval.4.json +182 -0
  69. package/src/responses/__fixtures__/openai-mcp-tool.1.chunks.txt +373 -0
  70. package/src/responses/__fixtures__/openai-mcp-tool.1.json +159 -0
  71. package/src/responses/__fixtures__/openai-reasoning-encrypted-content.1.chunks.txt +110 -0
  72. package/src/responses/__fixtures__/openai-reasoning-encrypted-content.1.json +117 -0
  73. package/src/responses/__fixtures__/openai-shell-tool.1.chunks.txt +182 -0
  74. package/src/responses/__fixtures__/openai-shell-tool.1.json +73 -0
  75. package/src/responses/__fixtures__/openai-web-search-tool.1.chunks.txt +185 -0
  76. package/src/responses/__fixtures__/openai-web-search-tool.1.json +266 -0
  77. package/src/responses/__snapshots__/openai-responses-language-model.test.ts.snap +10955 -0
  78. package/src/responses/convert-openai-responses-usage.ts +53 -0
  79. package/src/responses/convert-to-openai-responses-input.test.ts +2976 -0
  80. package/src/responses/convert-to-openai-responses-input.ts +578 -0
  81. package/src/responses/map-openai-responses-finish-reason.ts +22 -0
  82. package/src/responses/openai-responses-api.test.ts +89 -0
  83. package/src/responses/openai-responses-api.ts +1086 -0
  84. package/src/responses/openai-responses-language-model.test.ts +6927 -0
  85. package/src/responses/openai-responses-language-model.ts +1932 -0
  86. package/src/responses/openai-responses-options.ts +312 -0
  87. package/src/responses/openai-responses-prepare-tools.test.ts +924 -0
  88. package/src/responses/openai-responses-prepare-tools.ts +264 -0
  89. package/src/responses/openai-responses-provider-metadata.ts +39 -0
  90. package/src/speech/openai-speech-api.ts +38 -0
  91. package/src/speech/openai-speech-model.test.ts +202 -0
  92. package/src/speech/openai-speech-model.ts +137 -0
  93. package/src/speech/openai-speech-options.ts +22 -0
  94. package/src/tool/apply-patch.ts +141 -0
  95. package/src/tool/code-interpreter.ts +104 -0
  96. package/src/tool/file-search.ts +145 -0
  97. package/src/tool/image-generation.ts +126 -0
  98. package/src/tool/local-shell.test-d.ts +20 -0
  99. package/src/tool/local-shell.ts +72 -0
  100. package/src/tool/mcp.ts +125 -0
  101. package/src/tool/shell.ts +85 -0
  102. package/src/tool/web-search-preview.ts +139 -0
  103. package/src/tool/web-search.test-d.ts +13 -0
  104. package/src/tool/web-search.ts +179 -0
  105. package/src/transcription/openai-transcription-api.ts +37 -0
  106. package/src/transcription/openai-transcription-model.test.ts +507 -0
  107. package/src/transcription/openai-transcription-model.ts +232 -0
  108. package/src/transcription/openai-transcription-options.ts +50 -0
  109. package/src/transcription/transcription-test.mp3 +0 -0
  110. package/src/version.ts +6 -0
package/CHANGELOG.md CHANGED
@@ -1,5 +1,11 @@
1
1
  # @ai-sdk/openai
2
2
 
3
+ ## 3.0.15
4
+
5
+ ### Patch Changes
6
+
7
+ - 8dc54db: chore: add src folders to package bundle
8
+
3
9
  ## 3.0.14
4
10
 
5
11
  ### Patch Changes
package/dist/index.js CHANGED
@@ -5774,7 +5774,7 @@ var OpenAITranscriptionModel = class {
5774
5774
  };
5775
5775
 
5776
5776
  // src/version.ts
5777
- var VERSION = true ? "3.0.14" : "0.0.0-test";
5777
+ var VERSION = true ? "3.0.15" : "0.0.0-test";
5778
5778
 
5779
5779
  // src/openai-provider.ts
5780
5780
  function createOpenAI(options = {}) {
package/dist/index.mjs CHANGED
@@ -5856,7 +5856,7 @@ var OpenAITranscriptionModel = class {
5856
5856
  };
5857
5857
 
5858
5858
  // src/version.ts
5859
- var VERSION = true ? "3.0.14" : "0.0.0-test";
5859
+ var VERSION = true ? "3.0.15" : "0.0.0-test";
5860
5860
 
5861
5861
  // src/openai-provider.ts
5862
5862
  function createOpenAI(options = {}) {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@ai-sdk/openai",
3
- "version": "3.0.14",
3
+ "version": "3.0.15",
4
4
  "license": "Apache-2.0",
5
5
  "sideEffects": false,
6
6
  "main": "./dist/index.js",
@@ -8,6 +8,7 @@
8
8
  "types": "./dist/index.d.ts",
9
9
  "files": [
10
10
  "dist/**/*",
11
+ "src",
11
12
  "CHANGELOG.md",
12
13
  "README.md",
13
14
  "internal.d.ts"
@@ -27,16 +28,16 @@
27
28
  }
28
29
  },
29
30
  "dependencies": {
30
- "@ai-sdk/provider": "3.0.4",
31
- "@ai-sdk/provider-utils": "4.0.8"
31
+ "@ai-sdk/provider-utils": "4.0.8",
32
+ "@ai-sdk/provider": "3.0.4"
32
33
  },
33
34
  "devDependencies": {
34
35
  "@types/node": "20.17.24",
35
36
  "tsup": "^8",
36
37
  "typescript": "5.8.3",
37
38
  "zod": "3.25.76",
38
- "@ai-sdk/test-server": "1.0.1",
39
- "@vercel/ai-tsconfig": "0.0.0"
39
+ "@vercel/ai-tsconfig": "0.0.0",
40
+ "@ai-sdk/test-server": "1.0.2"
40
41
  },
41
42
  "peerDependencies": {
42
43
  "zod": "^3.25.76 || ^4.1.8"
@@ -0,0 +1,8 @@
1
+ {"choices":[],"created":0,"id":"","model":"","object":"","prompt_filter_results":[{"prompt_index":0,"content_filter_results":{"hate":{"filtered":false,"severity":"safe"},"jailbreak":{"filtered":false,"detected":false},"self_harm":{"filtered":false,"severity":"safe"},"sexual":{"filtered":false,"severity":"safe"},"violence":{"filtered":false,"severity":"safe"}}}]}
2
+ {"choices":[{"content_filter_results":{},"delta":{"content":"","refusal":null,"role":"assistant"},"finish_reason":null,"index":0,"logprobs":null}],"created":1762317021,"id":"chatcmpl-CYPS1lijGoK8gd9lYzY3r9Sx50nbt","model":"gpt-5-nano-2025-08-07","obfuscation":"D3WbtIxo1Q2j1Q","object":"chat.completion.chunk","system_fingerprint":null,"usage":null}
3
+ {"choices":[{"content_filter_results":{"hate":{"filtered":false,"severity":"safe"},"self_harm":{"filtered":false,"severity":"safe"},"sexual":{"filtered":false,"severity":"safe"},"violence":{"filtered":false,"severity":"safe"}},"delta":{"content":"Capital"},"finish_reason":null,"index":0,"logprobs":null}],"created":1762317021,"id":"chatcmpl-CYPS1lijGoK8gd9lYzY3r9Sx50nbt","model":"gpt-5-nano-2025-08-07","obfuscation":"NNpA6Dj2U","object":"chat.completion.chunk","system_fingerprint":null,"usage":null}
4
+ {"choices":[{"content_filter_results":{"hate":{"filtered":false,"severity":"safe"},"self_harm":{"filtered":false,"severity":"safe"},"sexual":{"filtered":false,"severity":"safe"},"violence":{"filtered":false,"severity":"safe"}},"delta":{"content":" of"},"finish_reason":null,"index":0,"logprobs":null}],"created":1762317021,"id":"chatcmpl-CYPS1lijGoK8gd9lYzY3r9Sx50nbt","model":"gpt-5-nano-2025-08-07","obfuscation":"etvV32yk5dbxb","object":"chat.completion.chunk","system_fingerprint":null,"usage":null}
5
+ {"choices":[{"content_filter_results":{"hate":{"filtered":false,"severity":"safe"},"self_harm":{"filtered":false,"severity":"safe"},"sexual":{"filtered":false,"severity":"safe"},"violence":{"filtered":false,"severity":"safe"}},"delta":{"content":" Denmark"},"finish_reason":null,"index":0,"logprobs":null}],"created":1762317021,"id":"chatcmpl-CYPS1lijGoK8gd9lYzY3r9Sx50nbt","model":"gpt-5-nano-2025-08-07","obfuscation":"iDOuV7Jz","object":"chat.completion.chunk","system_fingerprint":null,"usage":null}
6
+ {"choices":[{"content_filter_results":{"hate":{"filtered":false,"severity":"safe"},"self_harm":{"filtered":false,"severity":"safe"},"sexual":{"filtered":false,"severity":"safe"},"violence":{"filtered":false,"severity":"safe"}},"delta":{"content":"."},"finish_reason":null,"index":0,"logprobs":null}],"created":1762317021,"id":"chatcmpl-CYPS1lijGoK8gd9lYzY3r9Sx50nbt","model":"gpt-5-nano-2025-08-07","obfuscation":"ywLH2r1kcaeOOkq","object":"chat.completion.chunk","system_fingerprint":null,"usage":null}
7
+ {"choices":[{"content_filter_results":{},"delta":{},"finish_reason":"stop","index":0,"logprobs":null}],"created":1762317021,"id":"chatcmpl-CYPS1lijGoK8gd9lYzY3r9Sx50nbt","model":"gpt-5-nano-2025-08-07","obfuscation":"Zarov0xJhP","object":"chat.completion.chunk","system_fingerprint":null,"usage":null}
8
+ {"choices":[],"created":1762317021,"id":"chatcmpl-CYPS1lijGoK8gd9lYzY3r9Sx50nbt","model":"gpt-5-nano-2025-08-07","obfuscation":"DjqQ9RbEQMJ3PX","object":"chat.completion.chunk","system_fingerprint":null,"usage":{"completion_tokens":78,"completion_tokens_details":{"accepted_prediction_tokens":0,"audio_tokens":0,"reasoning_tokens":64,"rejected_prediction_tokens":0},"prompt_tokens":15,"prompt_tokens_details":{"audio_tokens":0,"cached_tokens":0},"total_tokens":93}}
@@ -0,0 +1,88 @@
1
+ // Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html
2
+
3
+ exports[`doStream > should set .modelId for model-router request 1`] = `
4
+ [
5
+ {
6
+ "type": "stream-start",
7
+ "warnings": [],
8
+ },
9
+ {
10
+ "id": "chatcmpl-CYPS1lijGoK8gd9lYzY3r9Sx50nbt",
11
+ "modelId": "gpt-5-nano-2025-08-07",
12
+ "timestamp": 2025-11-05T04:30:21.000Z,
13
+ "type": "response-metadata",
14
+ },
15
+ {
16
+ "id": "0",
17
+ "type": "text-start",
18
+ },
19
+ {
20
+ "delta": "",
21
+ "id": "0",
22
+ "type": "text-delta",
23
+ },
24
+ {
25
+ "delta": "Capital",
26
+ "id": "0",
27
+ "type": "text-delta",
28
+ },
29
+ {
30
+ "delta": " of",
31
+ "id": "0",
32
+ "type": "text-delta",
33
+ },
34
+ {
35
+ "delta": " Denmark",
36
+ "id": "0",
37
+ "type": "text-delta",
38
+ },
39
+ {
40
+ "delta": ".",
41
+ "id": "0",
42
+ "type": "text-delta",
43
+ },
44
+ {
45
+ "id": "0",
46
+ "type": "text-end",
47
+ },
48
+ {
49
+ "finishReason": {
50
+ "raw": "stop",
51
+ "unified": "stop",
52
+ },
53
+ "providerMetadata": {
54
+ "openai": {
55
+ "acceptedPredictionTokens": 0,
56
+ "rejectedPredictionTokens": 0,
57
+ },
58
+ },
59
+ "type": "finish",
60
+ "usage": {
61
+ "inputTokens": {
62
+ "cacheRead": 0,
63
+ "cacheWrite": undefined,
64
+ "noCache": 15,
65
+ "total": 15,
66
+ },
67
+ "outputTokens": {
68
+ "reasoning": 64,
69
+ "text": 14,
70
+ "total": 78,
71
+ },
72
+ "raw": {
73
+ "completion_tokens": 78,
74
+ "completion_tokens_details": {
75
+ "accepted_prediction_tokens": 0,
76
+ "reasoning_tokens": 64,
77
+ "rejected_prediction_tokens": 0,
78
+ },
79
+ "prompt_tokens": 15,
80
+ "prompt_tokens_details": {
81
+ "cached_tokens": 0,
82
+ },
83
+ "total_tokens": 93,
84
+ },
85
+ },
86
+ },
87
+ ]
88
+ `;
@@ -0,0 +1,57 @@
1
+ import { LanguageModelV3Usage } from '@ai-sdk/provider';
2
+
3
+ export type OpenAIChatUsage = {
4
+ prompt_tokens?: number | null;
5
+ completion_tokens?: number | null;
6
+ total_tokens?: number | null;
7
+ prompt_tokens_details?: {
8
+ cached_tokens?: number | null;
9
+ } | null;
10
+ completion_tokens_details?: {
11
+ reasoning_tokens?: number | null;
12
+ accepted_prediction_tokens?: number | null;
13
+ rejected_prediction_tokens?: number | null;
14
+ } | null;
15
+ };
16
+
17
+ export function convertOpenAIChatUsage(
18
+ usage: OpenAIChatUsage | undefined | null,
19
+ ): LanguageModelV3Usage {
20
+ if (usage == null) {
21
+ return {
22
+ inputTokens: {
23
+ total: undefined,
24
+ noCache: undefined,
25
+ cacheRead: undefined,
26
+ cacheWrite: undefined,
27
+ },
28
+ outputTokens: {
29
+ total: undefined,
30
+ text: undefined,
31
+ reasoning: undefined,
32
+ },
33
+ raw: undefined,
34
+ };
35
+ }
36
+
37
+ const promptTokens = usage.prompt_tokens ?? 0;
38
+ const completionTokens = usage.completion_tokens ?? 0;
39
+ const cachedTokens = usage.prompt_tokens_details?.cached_tokens ?? 0;
40
+ const reasoningTokens =
41
+ usage.completion_tokens_details?.reasoning_tokens ?? 0;
42
+
43
+ return {
44
+ inputTokens: {
45
+ total: promptTokens,
46
+ noCache: promptTokens - cachedTokens,
47
+ cacheRead: cachedTokens,
48
+ cacheWrite: undefined,
49
+ },
50
+ outputTokens: {
51
+ total: completionTokens,
52
+ text: completionTokens - reasoningTokens,
53
+ reasoning: reasoningTokens,
54
+ },
55
+ raw: usage,
56
+ };
57
+ }