@ai-sdk/openai 2.1.0-beta.1 → 2.1.0-beta.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +32 -0
- package/dist/index.d.mts +38 -5
- package/dist/index.d.ts +38 -5
- package/dist/index.js +22 -14
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +23 -15
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.d.mts +7 -7
- package/dist/internal/index.d.ts +7 -7
- package/dist/internal/index.js +8 -8
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +6 -6
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +3 -3
package/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,37 @@
|
|
|
1
1
|
# @ai-sdk/openai
|
|
2
2
|
|
|
3
|
+
## 2.1.0-beta.3
|
|
4
|
+
|
|
5
|
+
### Patch Changes
|
|
6
|
+
|
|
7
|
+
- 2e86082: feat(provider/openai): `OpenAIChatLanguageModelOptions` type
|
|
8
|
+
|
|
9
|
+
```ts
|
|
10
|
+
import { openai, type OpenAIChatLanguageModelOptions } from '@ai-sdk/openai';
|
|
11
|
+
import { generateText } from 'ai';
|
|
12
|
+
|
|
13
|
+
await generateText({
|
|
14
|
+
model: openai.chat('gpt-4o'),
|
|
15
|
+
prompt: 'Invent a new holiday and describe its traditions.',
|
|
16
|
+
providerOptions: {
|
|
17
|
+
openai: {
|
|
18
|
+
user: 'user-123',
|
|
19
|
+
} satisfies OpenAIChatLanguageModelOptions,
|
|
20
|
+
},
|
|
21
|
+
});
|
|
22
|
+
```
|
|
23
|
+
|
|
24
|
+
## 2.1.0-beta.2
|
|
25
|
+
|
|
26
|
+
### Patch Changes
|
|
27
|
+
|
|
28
|
+
- 4920119: fix the "incomplete_details" key from nullable to nullish for openai compatibility
|
|
29
|
+
- 0c4822d: feat: `EmbeddingModelV3`
|
|
30
|
+
- 1cad0ab: feat: add provider version to user-agent header
|
|
31
|
+
- Updated dependencies [0c4822d]
|
|
32
|
+
- @ai-sdk/provider@2.1.0-beta.1
|
|
33
|
+
- @ai-sdk/provider-utils@3.1.0-beta.2
|
|
34
|
+
|
|
3
35
|
## 2.1.0-beta.1
|
|
4
36
|
|
|
5
37
|
### Patch Changes
|
package/dist/index.d.mts
CHANGED
|
@@ -1,9 +1,40 @@
|
|
|
1
|
-
import { ProviderV2, LanguageModelV2,
|
|
1
|
+
import { ProviderV2, LanguageModelV2, EmbeddingModelV3, ImageModelV2, TranscriptionModelV2, SpeechModelV2 } from '@ai-sdk/provider';
|
|
2
2
|
import * as _ai_sdk_provider_utils from '@ai-sdk/provider-utils';
|
|
3
3
|
import { FetchFunction } from '@ai-sdk/provider-utils';
|
|
4
4
|
import { z } from 'zod/v4';
|
|
5
5
|
|
|
6
6
|
type OpenAIChatModelId = 'o1' | 'o1-2024-12-17' | 'o3-mini' | 'o3-mini-2025-01-31' | 'o3' | 'o3-2025-04-16' | 'gpt-4.1' | 'gpt-4.1-2025-04-14' | 'gpt-4.1-mini' | 'gpt-4.1-mini-2025-04-14' | 'gpt-4.1-nano' | 'gpt-4.1-nano-2025-04-14' | 'gpt-4o' | 'gpt-4o-2024-05-13' | 'gpt-4o-2024-08-06' | 'gpt-4o-2024-11-20' | 'gpt-4o-mini' | 'gpt-4o-mini-2024-07-18' | 'gpt-4-turbo' | 'gpt-4-turbo-2024-04-09' | 'gpt-4' | 'gpt-4-0613' | 'gpt-4.5-preview' | 'gpt-4.5-preview-2025-02-27' | 'gpt-3.5-turbo-0125' | 'gpt-3.5-turbo' | 'gpt-3.5-turbo-1106' | 'chatgpt-4o-latest' | 'gpt-5' | 'gpt-5-2025-08-07' | 'gpt-5-mini' | 'gpt-5-mini-2025-08-07' | 'gpt-5-nano' | 'gpt-5-nano-2025-08-07' | 'gpt-5-chat-latest' | (string & {});
|
|
7
|
+
declare const openaiChatLanguageModelOptions: z.ZodObject<{
|
|
8
|
+
logitBias: z.ZodOptional<z.ZodRecord<z.ZodCoercedNumber<string>, z.ZodNumber>>;
|
|
9
|
+
logprobs: z.ZodOptional<z.ZodUnion<readonly [z.ZodBoolean, z.ZodNumber]>>;
|
|
10
|
+
parallelToolCalls: z.ZodOptional<z.ZodBoolean>;
|
|
11
|
+
user: z.ZodOptional<z.ZodString>;
|
|
12
|
+
reasoningEffort: z.ZodOptional<z.ZodEnum<{
|
|
13
|
+
minimal: "minimal";
|
|
14
|
+
low: "low";
|
|
15
|
+
medium: "medium";
|
|
16
|
+
high: "high";
|
|
17
|
+
}>>;
|
|
18
|
+
maxCompletionTokens: z.ZodOptional<z.ZodNumber>;
|
|
19
|
+
store: z.ZodOptional<z.ZodBoolean>;
|
|
20
|
+
metadata: z.ZodOptional<z.ZodRecord<z.ZodString, z.ZodString>>;
|
|
21
|
+
prediction: z.ZodOptional<z.ZodRecord<z.ZodString, z.ZodAny>>;
|
|
22
|
+
structuredOutputs: z.ZodOptional<z.ZodBoolean>;
|
|
23
|
+
serviceTier: z.ZodOptional<z.ZodEnum<{
|
|
24
|
+
auto: "auto";
|
|
25
|
+
flex: "flex";
|
|
26
|
+
priority: "priority";
|
|
27
|
+
}>>;
|
|
28
|
+
strictJsonSchema: z.ZodOptional<z.ZodBoolean>;
|
|
29
|
+
textVerbosity: z.ZodOptional<z.ZodEnum<{
|
|
30
|
+
low: "low";
|
|
31
|
+
medium: "medium";
|
|
32
|
+
high: "high";
|
|
33
|
+
}>>;
|
|
34
|
+
promptCacheKey: z.ZodOptional<z.ZodString>;
|
|
35
|
+
safetyIdentifier: z.ZodOptional<z.ZodString>;
|
|
36
|
+
}, z.core.$strip>;
|
|
37
|
+
type OpenAIChatLanguageModelOptions = z.infer<typeof openaiChatLanguageModelOptions>;
|
|
7
38
|
|
|
8
39
|
type OpenAICompletionModelId = 'gpt-3.5-turbo-instruct' | (string & {});
|
|
9
40
|
|
|
@@ -234,15 +265,15 @@ interface OpenAIProvider extends ProviderV2 {
|
|
|
234
265
|
/**
|
|
235
266
|
Creates a model for text embeddings.
|
|
236
267
|
*/
|
|
237
|
-
embedding(modelId: OpenAIEmbeddingModelId):
|
|
268
|
+
embedding(modelId: OpenAIEmbeddingModelId): EmbeddingModelV3<string>;
|
|
238
269
|
/**
|
|
239
270
|
Creates a model for text embeddings.
|
|
240
271
|
*/
|
|
241
|
-
textEmbedding(modelId: OpenAIEmbeddingModelId):
|
|
272
|
+
textEmbedding(modelId: OpenAIEmbeddingModelId): EmbeddingModelV3<string>;
|
|
242
273
|
/**
|
|
243
274
|
Creates a model for text embeddings.
|
|
244
275
|
*/
|
|
245
|
-
textEmbeddingModel(modelId: OpenAIEmbeddingModelId):
|
|
276
|
+
textEmbeddingModel(modelId: OpenAIEmbeddingModelId): EmbeddingModelV3<string>;
|
|
246
277
|
/**
|
|
247
278
|
Creates a model for image generation.
|
|
248
279
|
*/
|
|
@@ -336,4 +367,6 @@ declare const openaiResponsesProviderOptionsSchema: z.ZodObject<{
|
|
|
336
367
|
}, z.core.$strip>;
|
|
337
368
|
type OpenAIResponsesProviderOptions = z.infer<typeof openaiResponsesProviderOptionsSchema>;
|
|
338
369
|
|
|
339
|
-
|
|
370
|
+
declare const VERSION: string;
|
|
371
|
+
|
|
372
|
+
export { type OpenAIChatLanguageModelOptions, type OpenAIProvider, type OpenAIProviderSettings, type OpenAIResponsesProviderOptions, VERSION, createOpenAI, openai };
|
package/dist/index.d.ts
CHANGED
|
@@ -1,9 +1,40 @@
|
|
|
1
|
-
import { ProviderV2, LanguageModelV2,
|
|
1
|
+
import { ProviderV2, LanguageModelV2, EmbeddingModelV3, ImageModelV2, TranscriptionModelV2, SpeechModelV2 } from '@ai-sdk/provider';
|
|
2
2
|
import * as _ai_sdk_provider_utils from '@ai-sdk/provider-utils';
|
|
3
3
|
import { FetchFunction } from '@ai-sdk/provider-utils';
|
|
4
4
|
import { z } from 'zod/v4';
|
|
5
5
|
|
|
6
6
|
type OpenAIChatModelId = 'o1' | 'o1-2024-12-17' | 'o3-mini' | 'o3-mini-2025-01-31' | 'o3' | 'o3-2025-04-16' | 'gpt-4.1' | 'gpt-4.1-2025-04-14' | 'gpt-4.1-mini' | 'gpt-4.1-mini-2025-04-14' | 'gpt-4.1-nano' | 'gpt-4.1-nano-2025-04-14' | 'gpt-4o' | 'gpt-4o-2024-05-13' | 'gpt-4o-2024-08-06' | 'gpt-4o-2024-11-20' | 'gpt-4o-mini' | 'gpt-4o-mini-2024-07-18' | 'gpt-4-turbo' | 'gpt-4-turbo-2024-04-09' | 'gpt-4' | 'gpt-4-0613' | 'gpt-4.5-preview' | 'gpt-4.5-preview-2025-02-27' | 'gpt-3.5-turbo-0125' | 'gpt-3.5-turbo' | 'gpt-3.5-turbo-1106' | 'chatgpt-4o-latest' | 'gpt-5' | 'gpt-5-2025-08-07' | 'gpt-5-mini' | 'gpt-5-mini-2025-08-07' | 'gpt-5-nano' | 'gpt-5-nano-2025-08-07' | 'gpt-5-chat-latest' | (string & {});
|
|
7
|
+
declare const openaiChatLanguageModelOptions: z.ZodObject<{
|
|
8
|
+
logitBias: z.ZodOptional<z.ZodRecord<z.ZodCoercedNumber<string>, z.ZodNumber>>;
|
|
9
|
+
logprobs: z.ZodOptional<z.ZodUnion<readonly [z.ZodBoolean, z.ZodNumber]>>;
|
|
10
|
+
parallelToolCalls: z.ZodOptional<z.ZodBoolean>;
|
|
11
|
+
user: z.ZodOptional<z.ZodString>;
|
|
12
|
+
reasoningEffort: z.ZodOptional<z.ZodEnum<{
|
|
13
|
+
minimal: "minimal";
|
|
14
|
+
low: "low";
|
|
15
|
+
medium: "medium";
|
|
16
|
+
high: "high";
|
|
17
|
+
}>>;
|
|
18
|
+
maxCompletionTokens: z.ZodOptional<z.ZodNumber>;
|
|
19
|
+
store: z.ZodOptional<z.ZodBoolean>;
|
|
20
|
+
metadata: z.ZodOptional<z.ZodRecord<z.ZodString, z.ZodString>>;
|
|
21
|
+
prediction: z.ZodOptional<z.ZodRecord<z.ZodString, z.ZodAny>>;
|
|
22
|
+
structuredOutputs: z.ZodOptional<z.ZodBoolean>;
|
|
23
|
+
serviceTier: z.ZodOptional<z.ZodEnum<{
|
|
24
|
+
auto: "auto";
|
|
25
|
+
flex: "flex";
|
|
26
|
+
priority: "priority";
|
|
27
|
+
}>>;
|
|
28
|
+
strictJsonSchema: z.ZodOptional<z.ZodBoolean>;
|
|
29
|
+
textVerbosity: z.ZodOptional<z.ZodEnum<{
|
|
30
|
+
low: "low";
|
|
31
|
+
medium: "medium";
|
|
32
|
+
high: "high";
|
|
33
|
+
}>>;
|
|
34
|
+
promptCacheKey: z.ZodOptional<z.ZodString>;
|
|
35
|
+
safetyIdentifier: z.ZodOptional<z.ZodString>;
|
|
36
|
+
}, z.core.$strip>;
|
|
37
|
+
type OpenAIChatLanguageModelOptions = z.infer<typeof openaiChatLanguageModelOptions>;
|
|
7
38
|
|
|
8
39
|
type OpenAICompletionModelId = 'gpt-3.5-turbo-instruct' | (string & {});
|
|
9
40
|
|
|
@@ -234,15 +265,15 @@ interface OpenAIProvider extends ProviderV2 {
|
|
|
234
265
|
/**
|
|
235
266
|
Creates a model for text embeddings.
|
|
236
267
|
*/
|
|
237
|
-
embedding(modelId: OpenAIEmbeddingModelId):
|
|
268
|
+
embedding(modelId: OpenAIEmbeddingModelId): EmbeddingModelV3<string>;
|
|
238
269
|
/**
|
|
239
270
|
Creates a model for text embeddings.
|
|
240
271
|
*/
|
|
241
|
-
textEmbedding(modelId: OpenAIEmbeddingModelId):
|
|
272
|
+
textEmbedding(modelId: OpenAIEmbeddingModelId): EmbeddingModelV3<string>;
|
|
242
273
|
/**
|
|
243
274
|
Creates a model for text embeddings.
|
|
244
275
|
*/
|
|
245
|
-
textEmbeddingModel(modelId: OpenAIEmbeddingModelId):
|
|
276
|
+
textEmbeddingModel(modelId: OpenAIEmbeddingModelId): EmbeddingModelV3<string>;
|
|
246
277
|
/**
|
|
247
278
|
Creates a model for image generation.
|
|
248
279
|
*/
|
|
@@ -336,4 +367,6 @@ declare const openaiResponsesProviderOptionsSchema: z.ZodObject<{
|
|
|
336
367
|
}, z.core.$strip>;
|
|
337
368
|
type OpenAIResponsesProviderOptions = z.infer<typeof openaiResponsesProviderOptionsSchema>;
|
|
338
369
|
|
|
339
|
-
|
|
370
|
+
declare const VERSION: string;
|
|
371
|
+
|
|
372
|
+
export { type OpenAIChatLanguageModelOptions, type OpenAIProvider, type OpenAIProviderSettings, type OpenAIResponsesProviderOptions, VERSION, createOpenAI, openai };
|
package/dist/index.js
CHANGED
|
@@ -20,6 +20,7 @@ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: tru
|
|
|
20
20
|
// src/index.ts
|
|
21
21
|
var src_exports = {};
|
|
22
22
|
__export(src_exports, {
|
|
23
|
+
VERSION: () => VERSION,
|
|
23
24
|
createOpenAI: () => createOpenAI,
|
|
24
25
|
openai: () => openai
|
|
25
26
|
});
|
|
@@ -262,7 +263,7 @@ function mapOpenAIFinishReason(finishReason) {
|
|
|
262
263
|
|
|
263
264
|
// src/chat/openai-chat-options.ts
|
|
264
265
|
var import_v42 = require("zod/v4");
|
|
265
|
-
var
|
|
266
|
+
var openaiChatLanguageModelOptions = import_v42.z.object({
|
|
266
267
|
/**
|
|
267
268
|
* Modify the likelihood of specified tokens appearing in the completion.
|
|
268
269
|
*
|
|
@@ -444,7 +445,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
444
445
|
const openaiOptions = (_a = await (0, import_provider_utils3.parseProviderOptions)({
|
|
445
446
|
provider: "openai",
|
|
446
447
|
providerOptions,
|
|
447
|
-
schema:
|
|
448
|
+
schema: openaiChatLanguageModelOptions
|
|
448
449
|
})) != null ? _a : {};
|
|
449
450
|
const structuredOutputs = (_b = openaiOptions.structuredOutputs) != null ? _b : true;
|
|
450
451
|
if (topK != null) {
|
|
@@ -1516,7 +1517,7 @@ var openaiEmbeddingProviderOptions = import_v46.z.object({
|
|
|
1516
1517
|
// src/embedding/openai-embedding-model.ts
|
|
1517
1518
|
var OpenAIEmbeddingModel = class {
|
|
1518
1519
|
constructor(modelId, config) {
|
|
1519
|
-
this.specificationVersion = "
|
|
1520
|
+
this.specificationVersion = "v3";
|
|
1520
1521
|
this.maxEmbeddingsPerCall = 2048;
|
|
1521
1522
|
this.supportsParallelCalls = true;
|
|
1522
1523
|
this.modelId = modelId;
|
|
@@ -2658,7 +2659,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2658
2659
|
])
|
|
2659
2660
|
),
|
|
2660
2661
|
service_tier: import_v415.z.string().nullish(),
|
|
2661
|
-
incomplete_details: import_v415.z.object({ reason: import_v415.z.string() }).
|
|
2662
|
+
incomplete_details: import_v415.z.object({ reason: import_v415.z.string() }).nullish(),
|
|
2662
2663
|
usage: usageSchema2
|
|
2663
2664
|
})
|
|
2664
2665
|
),
|
|
@@ -3868,21 +3869,27 @@ var openaiTranscriptionResponseSchema = import_v418.z.object({
|
|
|
3868
3869
|
).nullish()
|
|
3869
3870
|
});
|
|
3870
3871
|
|
|
3872
|
+
// src/version.ts
|
|
3873
|
+
var VERSION = true ? "2.1.0-beta.3" : "0.0.0-test";
|
|
3874
|
+
|
|
3871
3875
|
// src/openai-provider.ts
|
|
3872
3876
|
function createOpenAI(options = {}) {
|
|
3873
3877
|
var _a, _b;
|
|
3874
3878
|
const baseURL = (_a = (0, import_provider_utils16.withoutTrailingSlash)(options.baseURL)) != null ? _a : "https://api.openai.com/v1";
|
|
3875
3879
|
const providerName = (_b = options.name) != null ? _b : "openai";
|
|
3876
|
-
const getHeaders = () => (
|
|
3877
|
-
|
|
3878
|
-
|
|
3879
|
-
|
|
3880
|
-
|
|
3881
|
-
|
|
3882
|
-
|
|
3883
|
-
|
|
3884
|
-
|
|
3885
|
-
|
|
3880
|
+
const getHeaders = () => (0, import_provider_utils16.withUserAgentSuffix)(
|
|
3881
|
+
{
|
|
3882
|
+
Authorization: `Bearer ${(0, import_provider_utils16.loadApiKey)({
|
|
3883
|
+
apiKey: options.apiKey,
|
|
3884
|
+
environmentVariableName: "OPENAI_API_KEY",
|
|
3885
|
+
description: "OpenAI"
|
|
3886
|
+
})}`,
|
|
3887
|
+
"OpenAI-Organization": options.organization,
|
|
3888
|
+
"OpenAI-Project": options.project,
|
|
3889
|
+
...options.headers
|
|
3890
|
+
},
|
|
3891
|
+
`ai-sdk/openai/${VERSION}`
|
|
3892
|
+
);
|
|
3886
3893
|
const createChatModel = (modelId) => new OpenAIChatLanguageModel(modelId, {
|
|
3887
3894
|
provider: `${providerName}.chat`,
|
|
3888
3895
|
url: ({ path }) => `${baseURL}${path}`,
|
|
@@ -3958,6 +3965,7 @@ function createOpenAI(options = {}) {
|
|
|
3958
3965
|
var openai = createOpenAI();
|
|
3959
3966
|
// Annotate the CommonJS export names for ESM import in node:
|
|
3960
3967
|
0 && (module.exports = {
|
|
3968
|
+
VERSION,
|
|
3961
3969
|
createOpenAI,
|
|
3962
3970
|
openai
|
|
3963
3971
|
});
|