@ai-sdk/openai 2.0.0-canary.6 → 2.0.0-canary.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +22 -0
- package/dist/index.d.mts +4 -4
- package/dist/index.d.ts +4 -4
- package/dist/index.js +32 -24
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +32 -24
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.d.mts +6 -6
- package/dist/internal/index.d.ts +6 -6
- package/dist/internal/index.js +32 -24
- package/dist/internal/index.js.map +1 -1
- package/dist/internal/index.mjs +32 -24
- package/dist/internal/index.mjs.map +1 -1
- package/package.json +3 -3
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { LanguageModelV2,
|
|
1
|
+
import { LanguageModelV2, EmbeddingModelV2, ImageModelV1, TranscriptionModelV1CallOptions, TranscriptionModelV1 } from '@ai-sdk/provider';
|
|
2
2
|
import { FetchFunction } from '@ai-sdk/provider-utils';
|
|
3
3
|
import { z } from 'zod';
|
|
4
4
|
|
|
@@ -214,8 +214,8 @@ interface OpenAIEmbeddingSettings {
|
|
|
214
214
|
user?: string;
|
|
215
215
|
}
|
|
216
216
|
|
|
217
|
-
declare class OpenAIEmbeddingModel implements
|
|
218
|
-
readonly specificationVersion = "
|
|
217
|
+
declare class OpenAIEmbeddingModel implements EmbeddingModelV2<string> {
|
|
218
|
+
readonly specificationVersion = "v2";
|
|
219
219
|
readonly modelId: OpenAIEmbeddingModelId;
|
|
220
220
|
private readonly config;
|
|
221
221
|
private readonly settings;
|
|
@@ -223,7 +223,7 @@ declare class OpenAIEmbeddingModel implements EmbeddingModelV1<string> {
|
|
|
223
223
|
get maxEmbeddingsPerCall(): number;
|
|
224
224
|
get supportsParallelCalls(): boolean;
|
|
225
225
|
constructor(modelId: OpenAIEmbeddingModelId, settings: OpenAIEmbeddingSettings, config: OpenAIConfig);
|
|
226
|
-
doEmbed({ values, headers, abortSignal, }: Parameters<
|
|
226
|
+
doEmbed({ values, headers, abortSignal, }: Parameters<EmbeddingModelV2<string>['doEmbed']>[0]): Promise<Awaited<ReturnType<EmbeddingModelV2<string>['doEmbed']>>>;
|
|
227
227
|
}
|
|
228
228
|
|
|
229
229
|
type OpenAIImageModelId = 'dall-e-3' | 'dall-e-2' | (string & {});
|
|
@@ -278,7 +278,7 @@ type OpenAITranscriptionModelOptions = {
|
|
|
278
278
|
timestamp_granularities?: Array<'word' | 'segment'>;
|
|
279
279
|
};
|
|
280
280
|
|
|
281
|
-
declare const
|
|
281
|
+
declare const openAIProviderOptionsSchema: z.ZodObject<{
|
|
282
282
|
include: z.ZodOptional<z.ZodNullable<z.ZodArray<z.ZodString, "many">>>;
|
|
283
283
|
language: z.ZodOptional<z.ZodNullable<z.ZodString>>;
|
|
284
284
|
prompt: z.ZodOptional<z.ZodNullable<z.ZodString>>;
|
|
@@ -299,7 +299,7 @@ declare const OpenAIProviderOptionsSchema: z.ZodObject<{
|
|
|
299
299
|
}>;
|
|
300
300
|
type OpenAITranscriptionCallOptions = Omit<TranscriptionModelV1CallOptions, 'providerOptions'> & {
|
|
301
301
|
providerOptions?: {
|
|
302
|
-
openai?: z.infer<typeof
|
|
302
|
+
openai?: z.infer<typeof openAIProviderOptionsSchema>;
|
|
303
303
|
};
|
|
304
304
|
};
|
|
305
305
|
interface OpenAITranscriptionModelConfig extends OpenAIConfig {
|
package/dist/internal/index.d.ts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { LanguageModelV2,
|
|
1
|
+
import { LanguageModelV2, EmbeddingModelV2, ImageModelV1, TranscriptionModelV1CallOptions, TranscriptionModelV1 } from '@ai-sdk/provider';
|
|
2
2
|
import { FetchFunction } from '@ai-sdk/provider-utils';
|
|
3
3
|
import { z } from 'zod';
|
|
4
4
|
|
|
@@ -214,8 +214,8 @@ interface OpenAIEmbeddingSettings {
|
|
|
214
214
|
user?: string;
|
|
215
215
|
}
|
|
216
216
|
|
|
217
|
-
declare class OpenAIEmbeddingModel implements
|
|
218
|
-
readonly specificationVersion = "
|
|
217
|
+
declare class OpenAIEmbeddingModel implements EmbeddingModelV2<string> {
|
|
218
|
+
readonly specificationVersion = "v2";
|
|
219
219
|
readonly modelId: OpenAIEmbeddingModelId;
|
|
220
220
|
private readonly config;
|
|
221
221
|
private readonly settings;
|
|
@@ -223,7 +223,7 @@ declare class OpenAIEmbeddingModel implements EmbeddingModelV1<string> {
|
|
|
223
223
|
get maxEmbeddingsPerCall(): number;
|
|
224
224
|
get supportsParallelCalls(): boolean;
|
|
225
225
|
constructor(modelId: OpenAIEmbeddingModelId, settings: OpenAIEmbeddingSettings, config: OpenAIConfig);
|
|
226
|
-
doEmbed({ values, headers, abortSignal, }: Parameters<
|
|
226
|
+
doEmbed({ values, headers, abortSignal, }: Parameters<EmbeddingModelV2<string>['doEmbed']>[0]): Promise<Awaited<ReturnType<EmbeddingModelV2<string>['doEmbed']>>>;
|
|
227
227
|
}
|
|
228
228
|
|
|
229
229
|
type OpenAIImageModelId = 'dall-e-3' | 'dall-e-2' | (string & {});
|
|
@@ -278,7 +278,7 @@ type OpenAITranscriptionModelOptions = {
|
|
|
278
278
|
timestamp_granularities?: Array<'word' | 'segment'>;
|
|
279
279
|
};
|
|
280
280
|
|
|
281
|
-
declare const
|
|
281
|
+
declare const openAIProviderOptionsSchema: z.ZodObject<{
|
|
282
282
|
include: z.ZodOptional<z.ZodNullable<z.ZodArray<z.ZodString, "many">>>;
|
|
283
283
|
language: z.ZodOptional<z.ZodNullable<z.ZodString>>;
|
|
284
284
|
prompt: z.ZodOptional<z.ZodNullable<z.ZodString>>;
|
|
@@ -299,7 +299,7 @@ declare const OpenAIProviderOptionsSchema: z.ZodObject<{
|
|
|
299
299
|
}>;
|
|
300
300
|
type OpenAITranscriptionCallOptions = Omit<TranscriptionModelV1CallOptions, 'providerOptions'> & {
|
|
301
301
|
providerOptions?: {
|
|
302
|
-
openai?: z.infer<typeof
|
|
302
|
+
openai?: z.infer<typeof openAIProviderOptionsSchema>;
|
|
303
303
|
};
|
|
304
304
|
};
|
|
305
305
|
interface OpenAITranscriptionModelConfig extends OpenAIConfig {
|
package/dist/internal/index.js
CHANGED
|
@@ -563,7 +563,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
563
563
|
};
|
|
564
564
|
}
|
|
565
565
|
async doGenerate(options) {
|
|
566
|
-
var _a, _b, _c, _d, _e, _f, _g
|
|
566
|
+
var _a, _b, _c, _d, _e, _f, _g;
|
|
567
567
|
const { args: body, warnings } = this.getArgs(options);
|
|
568
568
|
const {
|
|
569
569
|
responseHeaders,
|
|
@@ -601,10 +601,11 @@ var OpenAIChatLanguageModel = class {
|
|
|
601
601
|
providerMetadata.openai.cachedPromptTokens = promptTokenDetails == null ? void 0 : promptTokenDetails.cached_tokens;
|
|
602
602
|
}
|
|
603
603
|
return {
|
|
604
|
-
text:
|
|
605
|
-
toolCalls: (
|
|
604
|
+
text: choice.message.content != null ? { type: "text", text: choice.message.content } : void 0,
|
|
605
|
+
toolCalls: (_c = choice.message.tool_calls) == null ? void 0 : _c.map((toolCall) => {
|
|
606
606
|
var _a2;
|
|
607
607
|
return {
|
|
608
|
+
type: "tool-call",
|
|
608
609
|
toolCallType: "function",
|
|
609
610
|
toolCallId: (_a2 = toolCall.id) != null ? _a2 : (0, import_provider_utils3.generateId)(),
|
|
610
611
|
toolName: toolCall.function.name,
|
|
@@ -613,8 +614,8 @@ var OpenAIChatLanguageModel = class {
|
|
|
613
614
|
}),
|
|
614
615
|
finishReason: mapOpenAIFinishReason(choice.finish_reason),
|
|
615
616
|
usage: {
|
|
616
|
-
inputTokens: (
|
|
617
|
-
outputTokens: (
|
|
617
|
+
inputTokens: (_e = (_d = response.usage) == null ? void 0 : _d.prompt_tokens) != null ? _e : void 0,
|
|
618
|
+
outputTokens: (_g = (_f = response.usage) == null ? void 0 : _f.completion_tokens) != null ? _g : void 0
|
|
618
619
|
},
|
|
619
620
|
request: { body },
|
|
620
621
|
response: {
|
|
@@ -714,8 +715,8 @@ var OpenAIChatLanguageModel = class {
|
|
|
714
715
|
const delta = choice.delta;
|
|
715
716
|
if (delta.content != null) {
|
|
716
717
|
controller.enqueue({
|
|
717
|
-
type: "text
|
|
718
|
-
|
|
718
|
+
type: "text",
|
|
719
|
+
text: delta.content
|
|
719
720
|
});
|
|
720
721
|
}
|
|
721
722
|
const mappedLogprobs = mapOpenAIChatLogProbsOutput(
|
|
@@ -1144,7 +1145,7 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1144
1145
|
});
|
|
1145
1146
|
const choice = response.choices[0];
|
|
1146
1147
|
return {
|
|
1147
|
-
text: choice.text,
|
|
1148
|
+
text: { type: "text", text: choice.text },
|
|
1148
1149
|
usage: {
|
|
1149
1150
|
inputTokens: response.usage.prompt_tokens,
|
|
1150
1151
|
outputTokens: response.usage.completion_tokens
|
|
@@ -1221,8 +1222,8 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1221
1222
|
}
|
|
1222
1223
|
if ((choice == null ? void 0 : choice.text) != null) {
|
|
1223
1224
|
controller.enqueue({
|
|
1224
|
-
type: "text
|
|
1225
|
-
|
|
1225
|
+
type: "text",
|
|
1226
|
+
text: choice.text
|
|
1226
1227
|
});
|
|
1227
1228
|
}
|
|
1228
1229
|
const mappedLogprobs = mapOpenAICompletionLogProbs(
|
|
@@ -1300,7 +1301,7 @@ var import_provider_utils5 = require("@ai-sdk/provider-utils");
|
|
|
1300
1301
|
var import_zod5 = require("zod");
|
|
1301
1302
|
var OpenAIEmbeddingModel = class {
|
|
1302
1303
|
constructor(modelId, settings, config) {
|
|
1303
|
-
this.specificationVersion = "
|
|
1304
|
+
this.specificationVersion = "v2";
|
|
1304
1305
|
this.modelId = modelId;
|
|
1305
1306
|
this.settings = settings;
|
|
1306
1307
|
this.config = config;
|
|
@@ -1329,7 +1330,11 @@ var OpenAIEmbeddingModel = class {
|
|
|
1329
1330
|
values
|
|
1330
1331
|
});
|
|
1331
1332
|
}
|
|
1332
|
-
const {
|
|
1333
|
+
const {
|
|
1334
|
+
responseHeaders,
|
|
1335
|
+
value: response,
|
|
1336
|
+
rawValue
|
|
1337
|
+
} = await (0, import_provider_utils5.postJsonToApi)({
|
|
1333
1338
|
url: this.config.url({
|
|
1334
1339
|
path: "/embeddings",
|
|
1335
1340
|
modelId: this.modelId
|
|
@@ -1352,7 +1357,7 @@ var OpenAIEmbeddingModel = class {
|
|
|
1352
1357
|
return {
|
|
1353
1358
|
embeddings: response.data.map((item) => item.embedding),
|
|
1354
1359
|
usage: response.usage ? { tokens: response.usage.prompt_tokens } : void 0,
|
|
1355
|
-
|
|
1360
|
+
response: { headers: responseHeaders, body: rawValue }
|
|
1356
1361
|
};
|
|
1357
1362
|
}
|
|
1358
1363
|
};
|
|
@@ -1448,7 +1453,7 @@ var openaiImageResponseSchema = import_zod6.z.object({
|
|
|
1448
1453
|
// src/openai-transcription-model.ts
|
|
1449
1454
|
var import_provider_utils7 = require("@ai-sdk/provider-utils");
|
|
1450
1455
|
var import_zod7 = require("zod");
|
|
1451
|
-
var
|
|
1456
|
+
var openAIProviderOptionsSchema = import_zod7.z.object({
|
|
1452
1457
|
include: import_zod7.z.array(import_zod7.z.string()).nullish(),
|
|
1453
1458
|
language: import_zod7.z.string().nullish(),
|
|
1454
1459
|
prompt: import_zod7.z.string().nullish(),
|
|
@@ -1533,7 +1538,7 @@ var OpenAITranscriptionModel = class {
|
|
|
1533
1538
|
const openAIOptions = (0, import_provider_utils7.parseProviderOptions)({
|
|
1534
1539
|
provider: "openai",
|
|
1535
1540
|
providerOptions,
|
|
1536
|
-
schema:
|
|
1541
|
+
schema: openAIProviderOptionsSchema
|
|
1537
1542
|
});
|
|
1538
1543
|
const formData = new FormData();
|
|
1539
1544
|
const blob = audio instanceof Uint8Array ? new Blob([audio]) : new Blob([(0, import_provider_utils7.convertBase64ToUint8Array)(audio)]);
|
|
@@ -2017,17 +2022,22 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2017
2022
|
});
|
|
2018
2023
|
const outputTextElements = response.output.filter((output) => output.type === "message").flatMap((output) => output.content).filter((content) => content.type === "output_text");
|
|
2019
2024
|
const toolCalls = response.output.filter((output) => output.type === "function_call").map((output) => ({
|
|
2025
|
+
type: "tool-call",
|
|
2020
2026
|
toolCallType: "function",
|
|
2021
2027
|
toolCallId: output.call_id,
|
|
2022
2028
|
toolName: output.name,
|
|
2023
2029
|
args: output.arguments
|
|
2024
2030
|
}));
|
|
2025
2031
|
return {
|
|
2026
|
-
text:
|
|
2032
|
+
text: {
|
|
2033
|
+
type: "text",
|
|
2034
|
+
text: outputTextElements.map((content) => content.text).join("\n")
|
|
2035
|
+
},
|
|
2027
2036
|
sources: outputTextElements.flatMap(
|
|
2028
2037
|
(content) => content.annotations.map((annotation) => {
|
|
2029
2038
|
var _a2, _b2, _c2;
|
|
2030
2039
|
return {
|
|
2040
|
+
type: "source",
|
|
2031
2041
|
sourceType: "url",
|
|
2032
2042
|
id: (_c2 = (_b2 = (_a2 = this.config).generateId) == null ? void 0 : _b2.call(_a2)) != null ? _c2 : (0, import_provider_utils8.generateId)(),
|
|
2033
2043
|
url: annotation.url,
|
|
@@ -2138,8 +2148,8 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2138
2148
|
});
|
|
2139
2149
|
} else if (isTextDeltaChunk(value)) {
|
|
2140
2150
|
controller.enqueue({
|
|
2141
|
-
type: "text
|
|
2142
|
-
|
|
2151
|
+
type: "text",
|
|
2152
|
+
text: value.delta
|
|
2143
2153
|
});
|
|
2144
2154
|
} else if (isResponseOutputItemDoneChunk(value) && value.item.type === "function_call") {
|
|
2145
2155
|
ongoingToolCalls[value.output_index] = void 0;
|
|
@@ -2163,12 +2173,10 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2163
2173
|
} else if (isResponseAnnotationAddedChunk(value)) {
|
|
2164
2174
|
controller.enqueue({
|
|
2165
2175
|
type: "source",
|
|
2166
|
-
|
|
2167
|
-
|
|
2168
|
-
|
|
2169
|
-
|
|
2170
|
-
title: value.annotation.title
|
|
2171
|
-
}
|
|
2176
|
+
sourceType: "url",
|
|
2177
|
+
id: (_h = (_g = (_f = self.config).generateId) == null ? void 0 : _g.call(_f)) != null ? _h : (0, import_provider_utils8.generateId)(),
|
|
2178
|
+
url: value.annotation.url,
|
|
2179
|
+
title: value.annotation.title
|
|
2172
2180
|
});
|
|
2173
2181
|
}
|
|
2174
2182
|
},
|