@ai-sdk/openai 3.0.0-beta.74 → 3.0.0-beta.76
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +12 -0
- package/dist/index.d.mts +0 -1
- package/dist/index.d.ts +0 -1
- package/dist/index.js +24 -29
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +24 -29
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.d.mts +0 -1
- package/dist/internal/index.d.ts +0 -1
- package/dist/internal/index.js +23 -28
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +23 -28
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +3 -3
package/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,17 @@
|
|
|
1
1
|
# @ai-sdk/openai
|
|
2
2
|
|
|
3
|
+
## 3.0.0-beta.76
|
|
4
|
+
|
|
5
|
+
### Patch Changes
|
|
6
|
+
|
|
7
|
+
- 88edc28: feat (provider/openai): include more image generation response metadata
|
|
8
|
+
|
|
9
|
+
## 3.0.0-beta.75
|
|
10
|
+
|
|
11
|
+
### Patch Changes
|
|
12
|
+
|
|
13
|
+
- 73d9883: chore(openai): enable strict json by default
|
|
14
|
+
|
|
3
15
|
## 3.0.0-beta.74
|
|
4
16
|
|
|
5
17
|
### Patch Changes
|
package/dist/index.d.mts
CHANGED
|
@@ -14,7 +14,6 @@ declare const openaiChatLanguageModelOptions: _ai_sdk_provider_utils.LazySchema<
|
|
|
14
14
|
store?: boolean | undefined;
|
|
15
15
|
metadata?: Record<string, string> | undefined;
|
|
16
16
|
prediction?: Record<string, any> | undefined;
|
|
17
|
-
structuredOutputs?: boolean | undefined;
|
|
18
17
|
serviceTier?: "default" | "auto" | "flex" | "priority" | undefined;
|
|
19
18
|
strictJsonSchema?: boolean | undefined;
|
|
20
19
|
textVerbosity?: "low" | "medium" | "high" | undefined;
|
package/dist/index.d.ts
CHANGED
|
@@ -14,7 +14,6 @@ declare const openaiChatLanguageModelOptions: _ai_sdk_provider_utils.LazySchema<
|
|
|
14
14
|
store?: boolean | undefined;
|
|
15
15
|
metadata?: Record<string, string> | undefined;
|
|
16
16
|
prediction?: Record<string, any> | undefined;
|
|
17
|
-
structuredOutputs?: boolean | undefined;
|
|
18
17
|
serviceTier?: "default" | "auto" | "flex" | "priority" | undefined;
|
|
19
18
|
strictJsonSchema?: boolean | undefined;
|
|
20
19
|
textVerbosity?: "low" | "medium" | "high" | undefined;
|
package/dist/index.js
CHANGED
|
@@ -463,12 +463,6 @@ var openaiChatLanguageModelOptions = (0, import_provider_utils4.lazySchema)(
|
|
|
463
463
|
* Parameters for prediction mode.
|
|
464
464
|
*/
|
|
465
465
|
prediction: import_v43.z.record(import_v43.z.string(), import_v43.z.any()).optional(),
|
|
466
|
-
/**
|
|
467
|
-
* Whether to use structured outputs.
|
|
468
|
-
*
|
|
469
|
-
* @default true
|
|
470
|
-
*/
|
|
471
|
-
structuredOutputs: import_v43.z.boolean().optional(),
|
|
472
466
|
/**
|
|
473
467
|
* Service tier for the request.
|
|
474
468
|
* - 'auto': Default service tier. The request will be processed with the service tier configured in the
|
|
@@ -483,7 +477,7 @@ var openaiChatLanguageModelOptions = (0, import_provider_utils4.lazySchema)(
|
|
|
483
477
|
/**
|
|
484
478
|
* Whether to use strict JSON schema validation.
|
|
485
479
|
*
|
|
486
|
-
* @default
|
|
480
|
+
* @default true
|
|
487
481
|
*/
|
|
488
482
|
strictJsonSchema: import_v43.z.boolean().optional(),
|
|
489
483
|
/**
|
|
@@ -522,7 +516,6 @@ var import_provider2 = require("@ai-sdk/provider");
|
|
|
522
516
|
function prepareChatTools({
|
|
523
517
|
tools,
|
|
524
518
|
toolChoice,
|
|
525
|
-
structuredOutputs,
|
|
526
519
|
strictJsonSchema
|
|
527
520
|
}) {
|
|
528
521
|
tools = (tools == null ? void 0 : tools.length) ? tools : void 0;
|
|
@@ -540,7 +533,7 @@ function prepareChatTools({
|
|
|
540
533
|
name: tool.name,
|
|
541
534
|
description: tool.description,
|
|
542
535
|
parameters: tool.inputSchema,
|
|
543
|
-
strict:
|
|
536
|
+
strict: strictJsonSchema
|
|
544
537
|
}
|
|
545
538
|
});
|
|
546
539
|
break;
|
|
@@ -609,24 +602,16 @@ var OpenAIChatLanguageModel = class {
|
|
|
609
602
|
toolChoice,
|
|
610
603
|
providerOptions
|
|
611
604
|
}) {
|
|
612
|
-
var _a, _b, _c
|
|
605
|
+
var _a, _b, _c;
|
|
613
606
|
const warnings = [];
|
|
614
607
|
const openaiOptions = (_a = await (0, import_provider_utils5.parseProviderOptions)({
|
|
615
608
|
provider: "openai",
|
|
616
609
|
providerOptions,
|
|
617
610
|
schema: openaiChatLanguageModelOptions
|
|
618
611
|
})) != null ? _a : {};
|
|
619
|
-
const structuredOutputs = (_b = openaiOptions.structuredOutputs) != null ? _b : true;
|
|
620
612
|
if (topK != null) {
|
|
621
613
|
warnings.push({ type: "unsupported", feature: "topK" });
|
|
622
614
|
}
|
|
623
|
-
if ((responseFormat == null ? void 0 : responseFormat.type) === "json" && responseFormat.schema != null && !structuredOutputs) {
|
|
624
|
-
warnings.push({
|
|
625
|
-
type: "unsupported",
|
|
626
|
-
feature: "responseFormat",
|
|
627
|
-
details: "JSON response format schema is only supported with structuredOutputs"
|
|
628
|
-
});
|
|
629
|
-
}
|
|
630
615
|
const { messages, warnings: messageWarnings } = convertToOpenAIChatMessages(
|
|
631
616
|
{
|
|
632
617
|
prompt,
|
|
@@ -634,7 +619,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
634
619
|
}
|
|
635
620
|
);
|
|
636
621
|
warnings.push(...messageWarnings);
|
|
637
|
-
const strictJsonSchema = (
|
|
622
|
+
const strictJsonSchema = (_b = openaiOptions.strictJsonSchema) != null ? _b : true;
|
|
638
623
|
const baseArgs = {
|
|
639
624
|
// model id:
|
|
640
625
|
model: this.modelId,
|
|
@@ -650,12 +635,12 @@ var OpenAIChatLanguageModel = class {
|
|
|
650
635
|
top_p: topP,
|
|
651
636
|
frequency_penalty: frequencyPenalty,
|
|
652
637
|
presence_penalty: presencePenalty,
|
|
653
|
-
response_format: (responseFormat == null ? void 0 : responseFormat.type) === "json" ?
|
|
638
|
+
response_format: (responseFormat == null ? void 0 : responseFormat.type) === "json" ? responseFormat.schema != null ? {
|
|
654
639
|
type: "json_schema",
|
|
655
640
|
json_schema: {
|
|
656
641
|
schema: responseFormat.schema,
|
|
657
642
|
strict: strictJsonSchema,
|
|
658
|
-
name: (
|
|
643
|
+
name: (_c = responseFormat.name) != null ? _c : "response",
|
|
659
644
|
description: responseFormat.description
|
|
660
645
|
}
|
|
661
646
|
} : { type: "json_object" } : void 0,
|
|
@@ -769,7 +754,6 @@ var OpenAIChatLanguageModel = class {
|
|
|
769
754
|
} = prepareChatTools({
|
|
770
755
|
tools,
|
|
771
756
|
toolChoice,
|
|
772
|
-
structuredOutputs,
|
|
773
757
|
strictJsonSchema
|
|
774
758
|
});
|
|
775
759
|
return {
|
|
@@ -1638,12 +1622,17 @@ var import_v48 = require("zod/v4");
|
|
|
1638
1622
|
var openaiImageResponseSchema = (0, import_provider_utils12.lazySchema)(
|
|
1639
1623
|
() => (0, import_provider_utils12.zodSchema)(
|
|
1640
1624
|
import_v48.z.object({
|
|
1625
|
+
created: import_v48.z.number().nullish(),
|
|
1641
1626
|
data: import_v48.z.array(
|
|
1642
1627
|
import_v48.z.object({
|
|
1643
1628
|
b64_json: import_v48.z.string(),
|
|
1644
1629
|
revised_prompt: import_v48.z.string().nullish()
|
|
1645
1630
|
})
|
|
1646
1631
|
),
|
|
1632
|
+
background: import_v48.z.string().nullish(),
|
|
1633
|
+
output_format: import_v48.z.string().nullish(),
|
|
1634
|
+
size: import_v48.z.string().nullish(),
|
|
1635
|
+
quality: import_v48.z.string().nullish(),
|
|
1647
1636
|
usage: import_v48.z.object({
|
|
1648
1637
|
input_tokens: import_v48.z.number().nullish(),
|
|
1649
1638
|
output_tokens: import_v48.z.number().nullish(),
|
|
@@ -1742,11 +1731,17 @@ var OpenAIImageModel = class {
|
|
|
1742
1731
|
},
|
|
1743
1732
|
providerMetadata: {
|
|
1744
1733
|
openai: {
|
|
1745
|
-
images: response.data.map(
|
|
1746
|
-
|
|
1747
|
-
|
|
1748
|
-
|
|
1749
|
-
|
|
1734
|
+
images: response.data.map((item) => {
|
|
1735
|
+
var _a2, _b2, _c2, _d2, _e2;
|
|
1736
|
+
return {
|
|
1737
|
+
...item.revised_prompt ? { revisedPrompt: item.revised_prompt } : {},
|
|
1738
|
+
created: (_a2 = response.created) != null ? _a2 : void 0,
|
|
1739
|
+
size: (_b2 = response.size) != null ? _b2 : void 0,
|
|
1740
|
+
quality: (_c2 = response.quality) != null ? _c2 : void 0,
|
|
1741
|
+
background: (_d2 = response.background) != null ? _d2 : void 0,
|
|
1742
|
+
outputFormat: (_e2 = response.output_format) != null ? _e2 : void 0
|
|
1743
|
+
};
|
|
1744
|
+
})
|
|
1750
1745
|
}
|
|
1751
1746
|
}
|
|
1752
1747
|
};
|
|
@@ -3586,7 +3581,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
3586
3581
|
hasApplyPatchTool: hasOpenAITool("openai.apply_patch")
|
|
3587
3582
|
});
|
|
3588
3583
|
warnings.push(...inputWarnings);
|
|
3589
|
-
const strictJsonSchema = (_b = openaiOptions == null ? void 0 : openaiOptions.strictJsonSchema) != null ? _b :
|
|
3584
|
+
const strictJsonSchema = (_b = openaiOptions == null ? void 0 : openaiOptions.strictJsonSchema) != null ? _b : true;
|
|
3590
3585
|
let include = openaiOptions == null ? void 0 : openaiOptions.include;
|
|
3591
3586
|
function addInclude(key) {
|
|
3592
3587
|
if (include == null) {
|
|
@@ -5185,7 +5180,7 @@ var OpenAITranscriptionModel = class {
|
|
|
5185
5180
|
};
|
|
5186
5181
|
|
|
5187
5182
|
// src/version.ts
|
|
5188
|
-
var VERSION = true ? "3.0.0-beta.
|
|
5183
|
+
var VERSION = true ? "3.0.0-beta.76" : "0.0.0-test";
|
|
5189
5184
|
|
|
5190
5185
|
// src/openai-provider.ts
|
|
5191
5186
|
function createOpenAI(options = {}) {
|