@ai-sdk/openai 2.1.0-beta.11 → 2.1.0-beta.13
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +18 -0
- package/dist/index.d.mts +5 -5
- package/dist/index.d.ts +5 -5
- package/dist/index.js +19 -8
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +19 -8
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.d.mts +10 -10
- package/dist/internal/index.d.ts +10 -10
- package/dist/internal/index.js +18 -7
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +18 -7
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +3 -3
package/dist/internal/index.mjs
CHANGED
|
@@ -41,6 +41,7 @@ function convertToOpenAIChatMessages({
|
|
|
41
41
|
prompt,
|
|
42
42
|
systemMessageMode = "system"
|
|
43
43
|
}) {
|
|
44
|
+
var _a;
|
|
44
45
|
const messages = [];
|
|
45
46
|
const warnings = [];
|
|
46
47
|
for (const { role, content } of prompt) {
|
|
@@ -79,7 +80,7 @@ function convertToOpenAIChatMessages({
|
|
|
79
80
|
messages.push({
|
|
80
81
|
role: "user",
|
|
81
82
|
content: content.map((part, index) => {
|
|
82
|
-
var
|
|
83
|
+
var _a2, _b, _c;
|
|
83
84
|
switch (part.type) {
|
|
84
85
|
case "text": {
|
|
85
86
|
return { type: "text", text: part.text };
|
|
@@ -92,7 +93,7 @@ function convertToOpenAIChatMessages({
|
|
|
92
93
|
image_url: {
|
|
93
94
|
url: part.data instanceof URL ? part.data.toString() : `data:${mediaType};base64,${convertToBase64(part.data)}`,
|
|
94
95
|
// OpenAI specific extension: image detail
|
|
95
|
-
detail: (_b = (
|
|
96
|
+
detail: (_b = (_a2 = part.providerOptions) == null ? void 0 : _a2.openai) == null ? void 0 : _b.imageDetail
|
|
96
97
|
}
|
|
97
98
|
};
|
|
98
99
|
} else if (part.mediaType.startsWith("audio/")) {
|
|
@@ -189,6 +190,9 @@ function convertToOpenAIChatMessages({
|
|
|
189
190
|
case "error-text":
|
|
190
191
|
contentValue = output.value;
|
|
191
192
|
break;
|
|
193
|
+
case "execution-denied":
|
|
194
|
+
contentValue = (_a = output.reason) != null ? _a : "Tool execution denied.";
|
|
195
|
+
break;
|
|
192
196
|
case "content":
|
|
193
197
|
case "json":
|
|
194
198
|
case "error-json":
|
|
@@ -1593,9 +1597,13 @@ import { z as z8 } from "zod/v4";
|
|
|
1593
1597
|
var modelMaxImagesPerCall = {
|
|
1594
1598
|
"dall-e-3": 1,
|
|
1595
1599
|
"dall-e-2": 10,
|
|
1596
|
-
"gpt-image-1": 10
|
|
1600
|
+
"gpt-image-1": 10,
|
|
1601
|
+
"gpt-image-1-mini": 10
|
|
1597
1602
|
};
|
|
1598
|
-
var hasDefaultResponseFormat = /* @__PURE__ */ new Set([
|
|
1603
|
+
var hasDefaultResponseFormat = /* @__PURE__ */ new Set([
|
|
1604
|
+
"gpt-image-1",
|
|
1605
|
+
"gpt-image-1-mini"
|
|
1606
|
+
]);
|
|
1599
1607
|
|
|
1600
1608
|
// src/image/openai-image-model.ts
|
|
1601
1609
|
var OpenAIImageModel = class {
|
|
@@ -1783,7 +1791,7 @@ var OpenAITranscriptionModel = class {
|
|
|
1783
1791
|
constructor(modelId, config) {
|
|
1784
1792
|
this.modelId = modelId;
|
|
1785
1793
|
this.config = config;
|
|
1786
|
-
this.specificationVersion = "
|
|
1794
|
+
this.specificationVersion = "v3";
|
|
1787
1795
|
}
|
|
1788
1796
|
get provider() {
|
|
1789
1797
|
return this.config.provider;
|
|
@@ -1928,7 +1936,7 @@ var OpenAISpeechModel = class {
|
|
|
1928
1936
|
constructor(modelId, config) {
|
|
1929
1937
|
this.modelId = modelId;
|
|
1930
1938
|
this.config = config;
|
|
1931
|
-
this.specificationVersion = "
|
|
1939
|
+
this.specificationVersion = "v3";
|
|
1932
1940
|
}
|
|
1933
1941
|
get provider() {
|
|
1934
1942
|
return this.config.provider;
|
|
@@ -2080,7 +2088,7 @@ async function convertToOpenAIResponsesInput({
|
|
|
2080
2088
|
store,
|
|
2081
2089
|
hasLocalShellTool = false
|
|
2082
2090
|
}) {
|
|
2083
|
-
var _a, _b, _c, _d, _e, _f, _g, _h, _i;
|
|
2091
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j;
|
|
2084
2092
|
const input = [];
|
|
2085
2093
|
const warnings = [];
|
|
2086
2094
|
for (const { role, content } of prompt) {
|
|
@@ -2283,6 +2291,9 @@ async function convertToOpenAIResponsesInput({
|
|
|
2283
2291
|
case "error-text":
|
|
2284
2292
|
contentValue = output.value;
|
|
2285
2293
|
break;
|
|
2294
|
+
case "execution-denied":
|
|
2295
|
+
contentValue = (_j = output.reason) != null ? _j : "Tool execution denied.";
|
|
2296
|
+
break;
|
|
2286
2297
|
case "content":
|
|
2287
2298
|
case "json":
|
|
2288
2299
|
case "error-json":
|