@ai-sdk/openai 2.0.0-canary.1 → 2.0.0-canary.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +24 -0
- package/dist/index.d.mts +7 -1
- package/dist/index.d.ts +7 -1
- package/dist/index.js +507 -465
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +474 -428
- package/dist/index.mjs.map +1 -1
- package/internal/dist/index.d.mts +67 -2
- package/internal/dist/index.d.ts +67 -2
- package/internal/dist/index.js +498 -462
- package/internal/dist/index.js.map +1 -1
- package/internal/dist/index.mjs +467 -428
- package/internal/dist/index.mjs.map +1 -1
- package/package.json +4 -4
package/internal/dist/index.js
CHANGED
|
@@ -25,18 +25,18 @@ __export(internal_exports, {
|
|
|
25
25
|
OpenAIEmbeddingModel: () => OpenAIEmbeddingModel,
|
|
26
26
|
OpenAIImageModel: () => OpenAIImageModel,
|
|
27
27
|
OpenAIResponsesLanguageModel: () => OpenAIResponsesLanguageModel,
|
|
28
|
+
OpenAITranscriptionModel: () => OpenAITranscriptionModel,
|
|
28
29
|
modelMaxImagesPerCall: () => modelMaxImagesPerCall
|
|
29
30
|
});
|
|
30
31
|
module.exports = __toCommonJS(internal_exports);
|
|
31
32
|
|
|
32
33
|
// src/openai-chat-language-model.ts
|
|
33
34
|
var import_provider3 = require("@ai-sdk/provider");
|
|
34
|
-
var
|
|
35
|
+
var import_provider_utils2 = require("@ai-sdk/provider-utils");
|
|
35
36
|
var import_zod2 = require("zod");
|
|
36
37
|
|
|
37
38
|
// src/convert-to-openai-chat-messages.ts
|
|
38
39
|
var import_provider = require("@ai-sdk/provider");
|
|
39
|
-
var import_provider_utils = require("@ai-sdk/provider-utils");
|
|
40
40
|
function convertToOpenAIChatMessages({
|
|
41
41
|
prompt,
|
|
42
42
|
useLegacyFunctionCalling = false,
|
|
@@ -80,55 +80,65 @@ function convertToOpenAIChatMessages({
|
|
|
80
80
|
messages.push({
|
|
81
81
|
role: "user",
|
|
82
82
|
content: content.map((part, index) => {
|
|
83
|
-
var _a, _b, _c
|
|
83
|
+
var _a, _b, _c;
|
|
84
84
|
switch (part.type) {
|
|
85
85
|
case "text": {
|
|
86
86
|
return { type: "text", text: part.text };
|
|
87
87
|
}
|
|
88
|
-
case "image": {
|
|
89
|
-
return {
|
|
90
|
-
type: "image_url",
|
|
91
|
-
image_url: {
|
|
92
|
-
url: part.image instanceof URL ? part.image.toString() : `data:${(_a = part.mimeType) != null ? _a : "image/jpeg"};base64,${(0, import_provider_utils.convertUint8ArrayToBase64)(part.image)}`,
|
|
93
|
-
// OpenAI specific extension: image detail
|
|
94
|
-
detail: (_c = (_b = part.providerMetadata) == null ? void 0 : _b.openai) == null ? void 0 : _c.imageDetail
|
|
95
|
-
}
|
|
96
|
-
};
|
|
97
|
-
}
|
|
98
88
|
case "file": {
|
|
99
|
-
if (part.
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
type: "input_audio",
|
|
115
|
-
input_audio: { data: part.data, format: "mp3" }
|
|
116
|
-
};
|
|
89
|
+
if (part.mediaType.startsWith("image/")) {
|
|
90
|
+
const mediaType = part.mediaType === "image/*" ? "image/jpeg" : part.mediaType;
|
|
91
|
+
return {
|
|
92
|
+
type: "image_url",
|
|
93
|
+
image_url: {
|
|
94
|
+
url: part.data instanceof URL ? part.data.toString() : `data:${mediaType};base64,${part.data}`,
|
|
95
|
+
// OpenAI specific extension: image detail
|
|
96
|
+
detail: (_b = (_a = part.providerOptions) == null ? void 0 : _a.openai) == null ? void 0 : _b.imageDetail
|
|
97
|
+
}
|
|
98
|
+
};
|
|
99
|
+
} else if (part.mediaType.startsWith("audio/")) {
|
|
100
|
+
if (part.data instanceof URL) {
|
|
101
|
+
throw new import_provider.UnsupportedFunctionalityError({
|
|
102
|
+
functionality: "audio file parts with URLs"
|
|
103
|
+
});
|
|
117
104
|
}
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
105
|
+
switch (part.mediaType) {
|
|
106
|
+
case "audio/wav": {
|
|
107
|
+
return {
|
|
108
|
+
type: "input_audio",
|
|
109
|
+
input_audio: { data: part.data, format: "wav" }
|
|
110
|
+
};
|
|
111
|
+
}
|
|
112
|
+
case "audio/mp3":
|
|
113
|
+
case "audio/mpeg": {
|
|
114
|
+
return {
|
|
115
|
+
type: "input_audio",
|
|
116
|
+
input_audio: { data: part.data, format: "mp3" }
|
|
117
|
+
};
|
|
118
|
+
}
|
|
119
|
+
default: {
|
|
120
|
+
throw new import_provider.UnsupportedFunctionalityError({
|
|
121
|
+
functionality: `audio content parts with media type ${part.mediaType}`
|
|
122
|
+
});
|
|
123
|
+
}
|
|
126
124
|
}
|
|
127
|
-
|
|
125
|
+
} else if (part.mediaType === "application/pdf") {
|
|
126
|
+
if (part.data instanceof URL) {
|
|
128
127
|
throw new import_provider.UnsupportedFunctionalityError({
|
|
129
|
-
functionality:
|
|
128
|
+
functionality: "PDF file parts with URLs"
|
|
130
129
|
});
|
|
131
130
|
}
|
|
131
|
+
return {
|
|
132
|
+
type: "file",
|
|
133
|
+
file: {
|
|
134
|
+
filename: (_c = part.filename) != null ? _c : `part-${index}.pdf`,
|
|
135
|
+
file_data: `data:application/pdf;base64,${part.data}`
|
|
136
|
+
}
|
|
137
|
+
};
|
|
138
|
+
} else {
|
|
139
|
+
throw new import_provider.UnsupportedFunctionalityError({
|
|
140
|
+
functionality: `file part media type ${part.mediaType}`
|
|
141
|
+
});
|
|
132
142
|
}
|
|
133
143
|
}
|
|
134
144
|
}
|
|
@@ -237,7 +247,7 @@ function mapOpenAIFinishReason(finishReason) {
|
|
|
237
247
|
|
|
238
248
|
// src/openai-error.ts
|
|
239
249
|
var import_zod = require("zod");
|
|
240
|
-
var
|
|
250
|
+
var import_provider_utils = require("@ai-sdk/provider-utils");
|
|
241
251
|
var openaiErrorDataSchema = import_zod.z.object({
|
|
242
252
|
error: import_zod.z.object({
|
|
243
253
|
message: import_zod.z.string(),
|
|
@@ -249,7 +259,7 @@ var openaiErrorDataSchema = import_zod.z.object({
|
|
|
249
259
|
code: import_zod.z.union([import_zod.z.string(), import_zod.z.number()]).nullish()
|
|
250
260
|
})
|
|
251
261
|
});
|
|
252
|
-
var openaiFailedResponseHandler = (0,
|
|
262
|
+
var openaiFailedResponseHandler = (0, import_provider_utils.createJsonErrorResponseHandler)({
|
|
253
263
|
errorSchema: openaiErrorDataSchema,
|
|
254
264
|
errorToMessage: (data) => data.error.message
|
|
255
265
|
});
|
|
@@ -270,17 +280,16 @@ function getResponseMetadata({
|
|
|
270
280
|
// src/openai-prepare-tools.ts
|
|
271
281
|
var import_provider2 = require("@ai-sdk/provider");
|
|
272
282
|
function prepareTools({
|
|
273
|
-
|
|
283
|
+
tools,
|
|
284
|
+
toolChoice,
|
|
274
285
|
useLegacyFunctionCalling = false,
|
|
275
286
|
structuredOutputs
|
|
276
287
|
}) {
|
|
277
|
-
|
|
278
|
-
const tools = ((_a = mode.tools) == null ? void 0 : _a.length) ? mode.tools : void 0;
|
|
288
|
+
tools = (tools == null ? void 0 : tools.length) ? tools : void 0;
|
|
279
289
|
const toolWarnings = [];
|
|
280
290
|
if (tools == null) {
|
|
281
|
-
return { tools: void 0,
|
|
291
|
+
return { tools: void 0, toolChoice: void 0, toolWarnings };
|
|
282
292
|
}
|
|
283
|
-
const toolChoice = mode.toolChoice;
|
|
284
293
|
if (useLegacyFunctionCalling) {
|
|
285
294
|
const openaiFunctions = [];
|
|
286
295
|
for (const tool of tools) {
|
|
@@ -340,18 +349,18 @@ function prepareTools({
|
|
|
340
349
|
}
|
|
341
350
|
}
|
|
342
351
|
if (toolChoice == null) {
|
|
343
|
-
return { tools: openaiTools,
|
|
352
|
+
return { tools: openaiTools, toolChoice: void 0, toolWarnings };
|
|
344
353
|
}
|
|
345
354
|
const type = toolChoice.type;
|
|
346
355
|
switch (type) {
|
|
347
356
|
case "auto":
|
|
348
357
|
case "none":
|
|
349
358
|
case "required":
|
|
350
|
-
return { tools: openaiTools,
|
|
359
|
+
return { tools: openaiTools, toolChoice: type, toolWarnings };
|
|
351
360
|
case "tool":
|
|
352
361
|
return {
|
|
353
362
|
tools: openaiTools,
|
|
354
|
-
|
|
363
|
+
toolChoice: {
|
|
355
364
|
type: "function",
|
|
356
365
|
function: {
|
|
357
366
|
name: toolChoice.toolName
|
|
@@ -362,7 +371,7 @@ function prepareTools({
|
|
|
362
371
|
default: {
|
|
363
372
|
const _exhaustiveCheck = type;
|
|
364
373
|
throw new import_provider2.UnsupportedFunctionalityError({
|
|
365
|
-
functionality: `
|
|
374
|
+
functionality: `tool choice type: ${_exhaustiveCheck}`
|
|
366
375
|
});
|
|
367
376
|
}
|
|
368
377
|
}
|
|
@@ -393,7 +402,6 @@ var OpenAIChatLanguageModel = class {
|
|
|
393
402
|
return !this.settings.downloadImages;
|
|
394
403
|
}
|
|
395
404
|
getArgs({
|
|
396
|
-
mode,
|
|
397
405
|
prompt,
|
|
398
406
|
maxTokens,
|
|
399
407
|
temperature,
|
|
@@ -404,10 +412,11 @@ var OpenAIChatLanguageModel = class {
|
|
|
404
412
|
stopSequences,
|
|
405
413
|
responseFormat,
|
|
406
414
|
seed,
|
|
407
|
-
|
|
415
|
+
tools,
|
|
416
|
+
toolChoice,
|
|
417
|
+
providerOptions
|
|
408
418
|
}) {
|
|
409
|
-
var _a, _b, _c, _d, _e, _f, _g
|
|
410
|
-
const type = mode.type;
|
|
419
|
+
var _a, _b, _c, _d, _e, _f, _g;
|
|
411
420
|
const warnings = [];
|
|
412
421
|
if (topK != null) {
|
|
413
422
|
warnings.push({
|
|
@@ -456,6 +465,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
456
465
|
top_p: topP,
|
|
457
466
|
frequency_penalty: frequencyPenalty,
|
|
458
467
|
presence_penalty: presencePenalty,
|
|
468
|
+
// TODO improve below:
|
|
459
469
|
response_format: (responseFormat == null ? void 0 : responseFormat.type) === "json" ? this.supportsStructuredOutputs && responseFormat.schema != null ? {
|
|
460
470
|
type: "json_schema",
|
|
461
471
|
json_schema: {
|
|
@@ -469,11 +479,11 @@ var OpenAIChatLanguageModel = class {
|
|
|
469
479
|
seed,
|
|
470
480
|
// openai specific settings:
|
|
471
481
|
// TODO remove in next major version; we auto-map maxTokens now
|
|
472
|
-
max_completion_tokens: (_b =
|
|
473
|
-
store: (_c =
|
|
474
|
-
metadata: (_d =
|
|
475
|
-
prediction: (_e =
|
|
476
|
-
reasoning_effort: (_g = (_f =
|
|
482
|
+
max_completion_tokens: (_b = providerOptions == null ? void 0 : providerOptions.openai) == null ? void 0 : _b.maxCompletionTokens,
|
|
483
|
+
store: (_c = providerOptions == null ? void 0 : providerOptions.openai) == null ? void 0 : _c.store,
|
|
484
|
+
metadata: (_d = providerOptions == null ? void 0 : providerOptions.openai) == null ? void 0 : _d.metadata,
|
|
485
|
+
prediction: (_e = providerOptions == null ? void 0 : providerOptions.openai) == null ? void 0 : _e.prediction,
|
|
486
|
+
reasoning_effort: (_g = (_f = providerOptions == null ? void 0 : providerOptions.openai) == null ? void 0 : _f.reasoningEffort) != null ? _g : this.settings.reasoningEffort,
|
|
477
487
|
// messages:
|
|
478
488
|
messages
|
|
479
489
|
};
|
|
@@ -538,81 +548,28 @@ var OpenAIChatLanguageModel = class {
|
|
|
538
548
|
baseArgs.max_tokens = void 0;
|
|
539
549
|
}
|
|
540
550
|
}
|
|
541
|
-
|
|
542
|
-
|
|
543
|
-
|
|
544
|
-
|
|
545
|
-
|
|
546
|
-
|
|
547
|
-
|
|
548
|
-
|
|
549
|
-
|
|
550
|
-
|
|
551
|
-
|
|
552
|
-
|
|
553
|
-
|
|
554
|
-
|
|
555
|
-
|
|
556
|
-
|
|
557
|
-
|
|
558
|
-
|
|
559
|
-
|
|
560
|
-
|
|
561
|
-
|
|
562
|
-
|
|
563
|
-
response_format: this.supportsStructuredOutputs && mode.schema != null ? {
|
|
564
|
-
type: "json_schema",
|
|
565
|
-
json_schema: {
|
|
566
|
-
schema: mode.schema,
|
|
567
|
-
strict: true,
|
|
568
|
-
name: (_h = mode.name) != null ? _h : "response",
|
|
569
|
-
description: mode.description
|
|
570
|
-
}
|
|
571
|
-
} : { type: "json_object" }
|
|
572
|
-
},
|
|
573
|
-
warnings
|
|
574
|
-
};
|
|
575
|
-
}
|
|
576
|
-
case "object-tool": {
|
|
577
|
-
return {
|
|
578
|
-
args: useLegacyFunctionCalling ? {
|
|
579
|
-
...baseArgs,
|
|
580
|
-
function_call: {
|
|
581
|
-
name: mode.tool.name
|
|
582
|
-
},
|
|
583
|
-
functions: [
|
|
584
|
-
{
|
|
585
|
-
name: mode.tool.name,
|
|
586
|
-
description: mode.tool.description,
|
|
587
|
-
parameters: mode.tool.parameters
|
|
588
|
-
}
|
|
589
|
-
]
|
|
590
|
-
} : {
|
|
591
|
-
...baseArgs,
|
|
592
|
-
tool_choice: {
|
|
593
|
-
type: "function",
|
|
594
|
-
function: { name: mode.tool.name }
|
|
595
|
-
},
|
|
596
|
-
tools: [
|
|
597
|
-
{
|
|
598
|
-
type: "function",
|
|
599
|
-
function: {
|
|
600
|
-
name: mode.tool.name,
|
|
601
|
-
description: mode.tool.description,
|
|
602
|
-
parameters: mode.tool.parameters,
|
|
603
|
-
strict: this.supportsStructuredOutputs ? true : void 0
|
|
604
|
-
}
|
|
605
|
-
}
|
|
606
|
-
]
|
|
607
|
-
},
|
|
608
|
-
warnings
|
|
609
|
-
};
|
|
610
|
-
}
|
|
611
|
-
default: {
|
|
612
|
-
const _exhaustiveCheck = type;
|
|
613
|
-
throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
|
|
614
|
-
}
|
|
615
|
-
}
|
|
551
|
+
const {
|
|
552
|
+
tools: openaiTools,
|
|
553
|
+
toolChoice: openaiToolChoice,
|
|
554
|
+
functions,
|
|
555
|
+
function_call,
|
|
556
|
+
toolWarnings
|
|
557
|
+
} = prepareTools({
|
|
558
|
+
tools,
|
|
559
|
+
toolChoice,
|
|
560
|
+
useLegacyFunctionCalling,
|
|
561
|
+
structuredOutputs: this.supportsStructuredOutputs
|
|
562
|
+
});
|
|
563
|
+
return {
|
|
564
|
+
args: {
|
|
565
|
+
...baseArgs,
|
|
566
|
+
tools: openaiTools,
|
|
567
|
+
tool_choice: openaiToolChoice,
|
|
568
|
+
functions,
|
|
569
|
+
function_call
|
|
570
|
+
},
|
|
571
|
+
warnings: [...warnings, ...toolWarnings]
|
|
572
|
+
};
|
|
616
573
|
}
|
|
617
574
|
async doGenerate(options) {
|
|
618
575
|
var _a, _b, _c, _d, _e, _f, _g, _h;
|
|
@@ -621,15 +578,15 @@ var OpenAIChatLanguageModel = class {
|
|
|
621
578
|
responseHeaders,
|
|
622
579
|
value: response,
|
|
623
580
|
rawValue: rawResponse
|
|
624
|
-
} = await (0,
|
|
581
|
+
} = await (0, import_provider_utils2.postJsonToApi)({
|
|
625
582
|
url: this.config.url({
|
|
626
583
|
path: "/chat/completions",
|
|
627
584
|
modelId: this.modelId
|
|
628
585
|
}),
|
|
629
|
-
headers: (0,
|
|
586
|
+
headers: (0, import_provider_utils2.combineHeaders)(this.config.headers(), options.headers),
|
|
630
587
|
body,
|
|
631
588
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
632
|
-
successfulResponseHandler: (0,
|
|
589
|
+
successfulResponseHandler: (0, import_provider_utils2.createJsonResponseHandler)(
|
|
633
590
|
openaiChatResponseSchema
|
|
634
591
|
),
|
|
635
592
|
abortSignal: options.abortSignal,
|
|
@@ -657,7 +614,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
657
614
|
toolCalls: this.settings.useLegacyFunctionCalling && choice.message.function_call ? [
|
|
658
615
|
{
|
|
659
616
|
toolCallType: "function",
|
|
660
|
-
toolCallId: (0,
|
|
617
|
+
toolCallId: (0, import_provider_utils2.generateId)(),
|
|
661
618
|
toolName: choice.message.function_call.name,
|
|
662
619
|
args: choice.message.function_call.arguments
|
|
663
620
|
}
|
|
@@ -665,7 +622,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
665
622
|
var _a2;
|
|
666
623
|
return {
|
|
667
624
|
toolCallType: "function",
|
|
668
|
-
toolCallId: (_a2 = toolCall.id) != null ? _a2 : (0,
|
|
625
|
+
toolCallId: (_a2 = toolCall.id) != null ? _a2 : (0, import_provider_utils2.generateId)(),
|
|
669
626
|
toolName: toolCall.function.name,
|
|
670
627
|
args: toolCall.function.arguments
|
|
671
628
|
};
|
|
@@ -735,15 +692,15 @@ var OpenAIChatLanguageModel = class {
|
|
|
735
692
|
// only include stream_options when in strict compatibility mode:
|
|
736
693
|
stream_options: this.config.compatibility === "strict" ? { include_usage: true } : void 0
|
|
737
694
|
};
|
|
738
|
-
const { responseHeaders, value: response } = await (0,
|
|
695
|
+
const { responseHeaders, value: response } = await (0, import_provider_utils2.postJsonToApi)({
|
|
739
696
|
url: this.config.url({
|
|
740
697
|
path: "/chat/completions",
|
|
741
698
|
modelId: this.modelId
|
|
742
699
|
}),
|
|
743
|
-
headers: (0,
|
|
700
|
+
headers: (0, import_provider_utils2.combineHeaders)(this.config.headers(), options.headers),
|
|
744
701
|
body,
|
|
745
702
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
746
|
-
successfulResponseHandler: (0,
|
|
703
|
+
successfulResponseHandler: (0, import_provider_utils2.createEventSourceResponseHandler)(
|
|
747
704
|
openaiChatChunkSchema
|
|
748
705
|
),
|
|
749
706
|
abortSignal: options.abortSignal,
|
|
@@ -831,7 +788,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
831
788
|
const mappedToolCalls = useLegacyFunctionCalling && delta.function_call != null ? [
|
|
832
789
|
{
|
|
833
790
|
type: "function",
|
|
834
|
-
id: (0,
|
|
791
|
+
id: (0, import_provider_utils2.generateId)(),
|
|
835
792
|
function: delta.function_call,
|
|
836
793
|
index: 0
|
|
837
794
|
}
|
|
@@ -878,11 +835,11 @@ var OpenAIChatLanguageModel = class {
|
|
|
878
835
|
argsTextDelta: toolCall2.function.arguments
|
|
879
836
|
});
|
|
880
837
|
}
|
|
881
|
-
if ((0,
|
|
838
|
+
if ((0, import_provider_utils2.isParsableJson)(toolCall2.function.arguments)) {
|
|
882
839
|
controller.enqueue({
|
|
883
840
|
type: "tool-call",
|
|
884
841
|
toolCallType: "function",
|
|
885
|
-
toolCallId: (_e = toolCall2.id) != null ? _e : (0,
|
|
842
|
+
toolCallId: (_e = toolCall2.id) != null ? _e : (0, import_provider_utils2.generateId)(),
|
|
886
843
|
toolName: toolCall2.function.name,
|
|
887
844
|
args: toolCall2.function.arguments
|
|
888
845
|
});
|
|
@@ -905,11 +862,11 @@ var OpenAIChatLanguageModel = class {
|
|
|
905
862
|
toolName: toolCall.function.name,
|
|
906
863
|
argsTextDelta: (_i = toolCallDelta.function.arguments) != null ? _i : ""
|
|
907
864
|
});
|
|
908
|
-
if (((_j = toolCall.function) == null ? void 0 : _j.name) != null && ((_k = toolCall.function) == null ? void 0 : _k.arguments) != null && (0,
|
|
865
|
+
if (((_j = toolCall.function) == null ? void 0 : _j.name) != null && ((_k = toolCall.function) == null ? void 0 : _k.arguments) != null && (0, import_provider_utils2.isParsableJson)(toolCall.function.arguments)) {
|
|
909
866
|
controller.enqueue({
|
|
910
867
|
type: "tool-call",
|
|
911
868
|
toolCallType: "function",
|
|
912
|
-
toolCallId: (_l = toolCall.id) != null ? _l : (0,
|
|
869
|
+
toolCallId: (_l = toolCall.id) != null ? _l : (0, import_provider_utils2.generateId)(),
|
|
913
870
|
toolName: toolCall.function.name,
|
|
914
871
|
args: toolCall.function.arguments
|
|
915
872
|
});
|
|
@@ -1079,8 +1036,7 @@ var reasoningModels = {
|
|
|
1079
1036
|
};
|
|
1080
1037
|
|
|
1081
1038
|
// src/openai-completion-language-model.ts
|
|
1082
|
-
var
|
|
1083
|
-
var import_provider_utils4 = require("@ai-sdk/provider-utils");
|
|
1039
|
+
var import_provider_utils3 = require("@ai-sdk/provider-utils");
|
|
1084
1040
|
var import_zod3 = require("zod");
|
|
1085
1041
|
|
|
1086
1042
|
// src/convert-to-openai-completion-prompt.ts
|
|
@@ -1115,13 +1071,8 @@ function convertToOpenAICompletionPrompt({
|
|
|
1115
1071
|
case "text": {
|
|
1116
1072
|
return part.text;
|
|
1117
1073
|
}
|
|
1118
|
-
case "image": {
|
|
1119
|
-
throw new import_provider4.UnsupportedFunctionalityError({
|
|
1120
|
-
functionality: "images"
|
|
1121
|
-
});
|
|
1122
|
-
}
|
|
1123
1074
|
}
|
|
1124
|
-
}).join("");
|
|
1075
|
+
}).filter(Boolean).join("");
|
|
1125
1076
|
text += `${user}:
|
|
1126
1077
|
${userMessage}
|
|
1127
1078
|
|
|
@@ -1194,7 +1145,6 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1194
1145
|
return this.config.provider;
|
|
1195
1146
|
}
|
|
1196
1147
|
getArgs({
|
|
1197
|
-
mode,
|
|
1198
1148
|
inputFormat,
|
|
1199
1149
|
prompt,
|
|
1200
1150
|
maxTokens,
|
|
@@ -1205,16 +1155,19 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1205
1155
|
presencePenalty,
|
|
1206
1156
|
stopSequences: userStopSequences,
|
|
1207
1157
|
responseFormat,
|
|
1158
|
+
tools,
|
|
1159
|
+
toolChoice,
|
|
1208
1160
|
seed
|
|
1209
1161
|
}) {
|
|
1210
|
-
var _a;
|
|
1211
|
-
const type = mode.type;
|
|
1212
1162
|
const warnings = [];
|
|
1213
1163
|
if (topK != null) {
|
|
1214
|
-
warnings.push({
|
|
1215
|
-
|
|
1216
|
-
|
|
1217
|
-
});
|
|
1164
|
+
warnings.push({ type: "unsupported-setting", setting: "topK" });
|
|
1165
|
+
}
|
|
1166
|
+
if (tools == null ? void 0 : tools.length) {
|
|
1167
|
+
warnings.push({ type: "unsupported-setting", setting: "tools" });
|
|
1168
|
+
}
|
|
1169
|
+
if (toolChoice != null) {
|
|
1170
|
+
warnings.push({ type: "unsupported-setting", setting: "toolChoice" });
|
|
1218
1171
|
}
|
|
1219
1172
|
if (responseFormat != null && responseFormat.type !== "text") {
|
|
1220
1173
|
warnings.push({
|
|
@@ -1225,56 +1178,30 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1225
1178
|
}
|
|
1226
1179
|
const { prompt: completionPrompt, stopSequences } = convertToOpenAICompletionPrompt({ prompt, inputFormat });
|
|
1227
1180
|
const stop = [...stopSequences != null ? stopSequences : [], ...userStopSequences != null ? userStopSequences : []];
|
|
1228
|
-
|
|
1229
|
-
|
|
1230
|
-
|
|
1231
|
-
|
|
1232
|
-
|
|
1233
|
-
|
|
1234
|
-
|
|
1235
|
-
|
|
1236
|
-
|
|
1237
|
-
|
|
1238
|
-
|
|
1239
|
-
|
|
1240
|
-
|
|
1241
|
-
|
|
1242
|
-
|
|
1243
|
-
|
|
1244
|
-
|
|
1245
|
-
|
|
1246
|
-
|
|
1247
|
-
|
|
1181
|
+
return {
|
|
1182
|
+
args: {
|
|
1183
|
+
// model id:
|
|
1184
|
+
model: this.modelId,
|
|
1185
|
+
// model specific settings:
|
|
1186
|
+
echo: this.settings.echo,
|
|
1187
|
+
logit_bias: this.settings.logitBias,
|
|
1188
|
+
logprobs: typeof this.settings.logprobs === "number" ? this.settings.logprobs : typeof this.settings.logprobs === "boolean" ? this.settings.logprobs ? 0 : void 0 : void 0,
|
|
1189
|
+
suffix: this.settings.suffix,
|
|
1190
|
+
user: this.settings.user,
|
|
1191
|
+
// standardized settings:
|
|
1192
|
+
max_tokens: maxTokens,
|
|
1193
|
+
temperature,
|
|
1194
|
+
top_p: topP,
|
|
1195
|
+
frequency_penalty: frequencyPenalty,
|
|
1196
|
+
presence_penalty: presencePenalty,
|
|
1197
|
+
seed,
|
|
1198
|
+
// prompt:
|
|
1199
|
+
prompt: completionPrompt,
|
|
1200
|
+
// stop sequences:
|
|
1201
|
+
stop: stop.length > 0 ? stop : void 0
|
|
1202
|
+
},
|
|
1203
|
+
warnings
|
|
1248
1204
|
};
|
|
1249
|
-
switch (type) {
|
|
1250
|
-
case "regular": {
|
|
1251
|
-
if ((_a = mode.tools) == null ? void 0 : _a.length) {
|
|
1252
|
-
throw new import_provider5.UnsupportedFunctionalityError({
|
|
1253
|
-
functionality: "tools"
|
|
1254
|
-
});
|
|
1255
|
-
}
|
|
1256
|
-
if (mode.toolChoice) {
|
|
1257
|
-
throw new import_provider5.UnsupportedFunctionalityError({
|
|
1258
|
-
functionality: "toolChoice"
|
|
1259
|
-
});
|
|
1260
|
-
}
|
|
1261
|
-
return { args: baseArgs, warnings };
|
|
1262
|
-
}
|
|
1263
|
-
case "object-json": {
|
|
1264
|
-
throw new import_provider5.UnsupportedFunctionalityError({
|
|
1265
|
-
functionality: "object-json mode"
|
|
1266
|
-
});
|
|
1267
|
-
}
|
|
1268
|
-
case "object-tool": {
|
|
1269
|
-
throw new import_provider5.UnsupportedFunctionalityError({
|
|
1270
|
-
functionality: "object-tool mode"
|
|
1271
|
-
});
|
|
1272
|
-
}
|
|
1273
|
-
default: {
|
|
1274
|
-
const _exhaustiveCheck = type;
|
|
1275
|
-
throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
|
|
1276
|
-
}
|
|
1277
|
-
}
|
|
1278
1205
|
}
|
|
1279
1206
|
async doGenerate(options) {
|
|
1280
1207
|
const { args, warnings } = this.getArgs(options);
|
|
@@ -1282,15 +1209,15 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1282
1209
|
responseHeaders,
|
|
1283
1210
|
value: response,
|
|
1284
1211
|
rawValue: rawResponse
|
|
1285
|
-
} = await (0,
|
|
1212
|
+
} = await (0, import_provider_utils3.postJsonToApi)({
|
|
1286
1213
|
url: this.config.url({
|
|
1287
1214
|
path: "/completions",
|
|
1288
1215
|
modelId: this.modelId
|
|
1289
1216
|
}),
|
|
1290
|
-
headers: (0,
|
|
1217
|
+
headers: (0, import_provider_utils3.combineHeaders)(this.config.headers(), options.headers),
|
|
1291
1218
|
body: args,
|
|
1292
1219
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
1293
|
-
successfulResponseHandler: (0,
|
|
1220
|
+
successfulResponseHandler: (0, import_provider_utils3.createJsonResponseHandler)(
|
|
1294
1221
|
openaiCompletionResponseSchema
|
|
1295
1222
|
),
|
|
1296
1223
|
abortSignal: options.abortSignal,
|
|
@@ -1321,15 +1248,15 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1321
1248
|
// only include stream_options when in strict compatibility mode:
|
|
1322
1249
|
stream_options: this.config.compatibility === "strict" ? { include_usage: true } : void 0
|
|
1323
1250
|
};
|
|
1324
|
-
const { responseHeaders, value: response } = await (0,
|
|
1251
|
+
const { responseHeaders, value: response } = await (0, import_provider_utils3.postJsonToApi)({
|
|
1325
1252
|
url: this.config.url({
|
|
1326
1253
|
path: "/completions",
|
|
1327
1254
|
modelId: this.modelId
|
|
1328
1255
|
}),
|
|
1329
|
-
headers: (0,
|
|
1256
|
+
headers: (0, import_provider_utils3.combineHeaders)(this.config.headers(), options.headers),
|
|
1330
1257
|
body,
|
|
1331
1258
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
1332
|
-
successfulResponseHandler: (0,
|
|
1259
|
+
successfulResponseHandler: (0, import_provider_utils3.createEventSourceResponseHandler)(
|
|
1333
1260
|
openaiCompletionChunkSchema
|
|
1334
1261
|
),
|
|
1335
1262
|
abortSignal: options.abortSignal,
|
|
@@ -1452,8 +1379,8 @@ var openaiCompletionChunkSchema = import_zod3.z.union([
|
|
|
1452
1379
|
]);
|
|
1453
1380
|
|
|
1454
1381
|
// src/openai-embedding-model.ts
|
|
1455
|
-
var
|
|
1456
|
-
var
|
|
1382
|
+
var import_provider5 = require("@ai-sdk/provider");
|
|
1383
|
+
var import_provider_utils4 = require("@ai-sdk/provider-utils");
|
|
1457
1384
|
var import_zod4 = require("zod");
|
|
1458
1385
|
var OpenAIEmbeddingModel = class {
|
|
1459
1386
|
constructor(modelId, settings, config) {
|
|
@@ -1479,19 +1406,19 @@ var OpenAIEmbeddingModel = class {
|
|
|
1479
1406
|
abortSignal
|
|
1480
1407
|
}) {
|
|
1481
1408
|
if (values.length > this.maxEmbeddingsPerCall) {
|
|
1482
|
-
throw new
|
|
1409
|
+
throw new import_provider5.TooManyEmbeddingValuesForCallError({
|
|
1483
1410
|
provider: this.provider,
|
|
1484
1411
|
modelId: this.modelId,
|
|
1485
1412
|
maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,
|
|
1486
1413
|
values
|
|
1487
1414
|
});
|
|
1488
1415
|
}
|
|
1489
|
-
const { responseHeaders, value: response } = await (0,
|
|
1416
|
+
const { responseHeaders, value: response } = await (0, import_provider_utils4.postJsonToApi)({
|
|
1490
1417
|
url: this.config.url({
|
|
1491
1418
|
path: "/embeddings",
|
|
1492
1419
|
modelId: this.modelId
|
|
1493
1420
|
}),
|
|
1494
|
-
headers: (0,
|
|
1421
|
+
headers: (0, import_provider_utils4.combineHeaders)(this.config.headers(), headers),
|
|
1495
1422
|
body: {
|
|
1496
1423
|
model: this.modelId,
|
|
1497
1424
|
input: values,
|
|
@@ -1500,7 +1427,7 @@ var OpenAIEmbeddingModel = class {
|
|
|
1500
1427
|
user: this.settings.user
|
|
1501
1428
|
},
|
|
1502
1429
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
1503
|
-
successfulResponseHandler: (0,
|
|
1430
|
+
successfulResponseHandler: (0, import_provider_utils4.createJsonResponseHandler)(
|
|
1504
1431
|
openaiTextEmbeddingResponseSchema
|
|
1505
1432
|
),
|
|
1506
1433
|
abortSignal,
|
|
@@ -1519,7 +1446,7 @@ var openaiTextEmbeddingResponseSchema = import_zod4.z.object({
|
|
|
1519
1446
|
});
|
|
1520
1447
|
|
|
1521
1448
|
// src/openai-image-model.ts
|
|
1522
|
-
var
|
|
1449
|
+
var import_provider_utils5 = require("@ai-sdk/provider-utils");
|
|
1523
1450
|
var import_zod5 = require("zod");
|
|
1524
1451
|
|
|
1525
1452
|
// src/openai-image-settings.ts
|
|
@@ -1566,12 +1493,12 @@ var OpenAIImageModel = class {
|
|
|
1566
1493
|
warnings.push({ type: "unsupported-setting", setting: "seed" });
|
|
1567
1494
|
}
|
|
1568
1495
|
const currentDate = (_c = (_b = (_a = this.config._internal) == null ? void 0 : _a.currentDate) == null ? void 0 : _b.call(_a)) != null ? _c : /* @__PURE__ */ new Date();
|
|
1569
|
-
const { value: response, responseHeaders } = await (0,
|
|
1496
|
+
const { value: response, responseHeaders } = await (0, import_provider_utils5.postJsonToApi)({
|
|
1570
1497
|
url: this.config.url({
|
|
1571
1498
|
path: "/images/generations",
|
|
1572
1499
|
modelId: this.modelId
|
|
1573
1500
|
}),
|
|
1574
|
-
headers: (0,
|
|
1501
|
+
headers: (0, import_provider_utils5.combineHeaders)(this.config.headers(), headers),
|
|
1575
1502
|
body: {
|
|
1576
1503
|
model: this.modelId,
|
|
1577
1504
|
prompt,
|
|
@@ -1581,7 +1508,7 @@ var OpenAIImageModel = class {
|
|
|
1581
1508
|
response_format: "b64_json"
|
|
1582
1509
|
},
|
|
1583
1510
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
1584
|
-
successfulResponseHandler: (0,
|
|
1511
|
+
successfulResponseHandler: (0, import_provider_utils5.createJsonResponseHandler)(
|
|
1585
1512
|
openaiImageResponseSchema
|
|
1586
1513
|
),
|
|
1587
1514
|
abortSignal,
|
|
@@ -1602,13 +1529,186 @@ var openaiImageResponseSchema = import_zod5.z.object({
|
|
|
1602
1529
|
data: import_zod5.z.array(import_zod5.z.object({ b64_json: import_zod5.z.string() }))
|
|
1603
1530
|
});
|
|
1604
1531
|
|
|
1605
|
-
// src/
|
|
1606
|
-
var
|
|
1532
|
+
// src/openai-transcription-model.ts
|
|
1533
|
+
var import_provider_utils6 = require("@ai-sdk/provider-utils");
|
|
1607
1534
|
var import_zod6 = require("zod");
|
|
1535
|
+
var OpenAIProviderOptionsSchema = import_zod6.z.object({
|
|
1536
|
+
include: import_zod6.z.array(import_zod6.z.string()).optional().describe(
|
|
1537
|
+
"Additional information to include in the transcription response."
|
|
1538
|
+
),
|
|
1539
|
+
language: import_zod6.z.string().optional().describe("The language of the input audio in ISO-639-1 format."),
|
|
1540
|
+
prompt: import_zod6.z.string().optional().describe(
|
|
1541
|
+
"An optional text to guide the model's style or continue a previous audio segment."
|
|
1542
|
+
),
|
|
1543
|
+
temperature: import_zod6.z.number().min(0).max(1).optional().default(0).describe("The sampling temperature, between 0 and 1."),
|
|
1544
|
+
timestampGranularities: import_zod6.z.array(import_zod6.z.enum(["word", "segment"])).optional().default(["segment"]).describe(
|
|
1545
|
+
"The timestamp granularities to populate for this transcription."
|
|
1546
|
+
)
|
|
1547
|
+
});
|
|
1548
|
+
var languageMap = {
|
|
1549
|
+
afrikaans: "af",
|
|
1550
|
+
arabic: "ar",
|
|
1551
|
+
armenian: "hy",
|
|
1552
|
+
azerbaijani: "az",
|
|
1553
|
+
belarusian: "be",
|
|
1554
|
+
bosnian: "bs",
|
|
1555
|
+
bulgarian: "bg",
|
|
1556
|
+
catalan: "ca",
|
|
1557
|
+
chinese: "zh",
|
|
1558
|
+
croatian: "hr",
|
|
1559
|
+
czech: "cs",
|
|
1560
|
+
danish: "da",
|
|
1561
|
+
dutch: "nl",
|
|
1562
|
+
english: "en",
|
|
1563
|
+
estonian: "et",
|
|
1564
|
+
finnish: "fi",
|
|
1565
|
+
french: "fr",
|
|
1566
|
+
galician: "gl",
|
|
1567
|
+
german: "de",
|
|
1568
|
+
greek: "el",
|
|
1569
|
+
hebrew: "he",
|
|
1570
|
+
hindi: "hi",
|
|
1571
|
+
hungarian: "hu",
|
|
1572
|
+
icelandic: "is",
|
|
1573
|
+
indonesian: "id",
|
|
1574
|
+
italian: "it",
|
|
1575
|
+
japanese: "ja",
|
|
1576
|
+
kannada: "kn",
|
|
1577
|
+
kazakh: "kk",
|
|
1578
|
+
korean: "ko",
|
|
1579
|
+
latvian: "lv",
|
|
1580
|
+
lithuanian: "lt",
|
|
1581
|
+
macedonian: "mk",
|
|
1582
|
+
malay: "ms",
|
|
1583
|
+
marathi: "mr",
|
|
1584
|
+
maori: "mi",
|
|
1585
|
+
nepali: "ne",
|
|
1586
|
+
norwegian: "no",
|
|
1587
|
+
persian: "fa",
|
|
1588
|
+
polish: "pl",
|
|
1589
|
+
portuguese: "pt",
|
|
1590
|
+
romanian: "ro",
|
|
1591
|
+
russian: "ru",
|
|
1592
|
+
serbian: "sr",
|
|
1593
|
+
slovak: "sk",
|
|
1594
|
+
slovenian: "sl",
|
|
1595
|
+
spanish: "es",
|
|
1596
|
+
swahili: "sw",
|
|
1597
|
+
swedish: "sv",
|
|
1598
|
+
tagalog: "tl",
|
|
1599
|
+
tamil: "ta",
|
|
1600
|
+
thai: "th",
|
|
1601
|
+
turkish: "tr",
|
|
1602
|
+
ukrainian: "uk",
|
|
1603
|
+
urdu: "ur",
|
|
1604
|
+
vietnamese: "vi",
|
|
1605
|
+
welsh: "cy"
|
|
1606
|
+
};
|
|
1607
|
+
var OpenAITranscriptionModel = class {
|
|
1608
|
+
constructor(modelId, config) {
|
|
1609
|
+
this.modelId = modelId;
|
|
1610
|
+
this.config = config;
|
|
1611
|
+
this.specificationVersion = "v1";
|
|
1612
|
+
}
|
|
1613
|
+
get provider() {
|
|
1614
|
+
return this.config.provider;
|
|
1615
|
+
}
|
|
1616
|
+
getArgs({
|
|
1617
|
+
audio,
|
|
1618
|
+
mediaType,
|
|
1619
|
+
providerOptions
|
|
1620
|
+
}) {
|
|
1621
|
+
const warnings = [];
|
|
1622
|
+
const openAIOptions = (0, import_provider_utils6.parseProviderOptions)({
|
|
1623
|
+
provider: "openai",
|
|
1624
|
+
providerOptions,
|
|
1625
|
+
schema: OpenAIProviderOptionsSchema
|
|
1626
|
+
});
|
|
1627
|
+
const formData = new FormData();
|
|
1628
|
+
const blob = audio instanceof Uint8Array ? new Blob([audio]) : new Blob([(0, import_provider_utils6.convertBase64ToUint8Array)(audio)]);
|
|
1629
|
+
formData.append("model", this.modelId);
|
|
1630
|
+
formData.append("file", new File([blob], "audio", { type: mediaType }));
|
|
1631
|
+
if (openAIOptions) {
|
|
1632
|
+
const transcriptionModelOptions = {
|
|
1633
|
+
include: openAIOptions.include,
|
|
1634
|
+
language: openAIOptions.language,
|
|
1635
|
+
prompt: openAIOptions.prompt,
|
|
1636
|
+
temperature: openAIOptions.temperature,
|
|
1637
|
+
timestamp_granularities: openAIOptions.timestampGranularities
|
|
1638
|
+
};
|
|
1639
|
+
for (const key in transcriptionModelOptions) {
|
|
1640
|
+
const value = transcriptionModelOptions[key];
|
|
1641
|
+
if (value !== void 0) {
|
|
1642
|
+
formData.append(key, value);
|
|
1643
|
+
}
|
|
1644
|
+
}
|
|
1645
|
+
}
|
|
1646
|
+
return {
|
|
1647
|
+
formData,
|
|
1648
|
+
warnings
|
|
1649
|
+
};
|
|
1650
|
+
}
|
|
1651
|
+
async doGenerate(options) {
|
|
1652
|
+
var _a, _b, _c, _d, _e, _f;
|
|
1653
|
+
const currentDate = (_c = (_b = (_a = this.config._internal) == null ? void 0 : _a.currentDate) == null ? void 0 : _b.call(_a)) != null ? _c : /* @__PURE__ */ new Date();
|
|
1654
|
+
const { formData, warnings } = this.getArgs(options);
|
|
1655
|
+
const {
|
|
1656
|
+
value: response,
|
|
1657
|
+
responseHeaders,
|
|
1658
|
+
rawValue: rawResponse
|
|
1659
|
+
} = await (0, import_provider_utils6.postFormDataToApi)({
|
|
1660
|
+
url: this.config.url({
|
|
1661
|
+
path: "/audio/transcriptions",
|
|
1662
|
+
modelId: this.modelId
|
|
1663
|
+
}),
|
|
1664
|
+
headers: (0, import_provider_utils6.combineHeaders)(this.config.headers(), options.headers),
|
|
1665
|
+
formData,
|
|
1666
|
+
failedResponseHandler: openaiFailedResponseHandler,
|
|
1667
|
+
successfulResponseHandler: (0, import_provider_utils6.createJsonResponseHandler)(
|
|
1668
|
+
openaiTranscriptionResponseSchema
|
|
1669
|
+
),
|
|
1670
|
+
abortSignal: options.abortSignal,
|
|
1671
|
+
fetch: this.config.fetch
|
|
1672
|
+
});
|
|
1673
|
+
const language = response.language != null && response.language in languageMap ? languageMap[response.language] : void 0;
|
|
1674
|
+
return {
|
|
1675
|
+
text: response.text,
|
|
1676
|
+
segments: (_e = (_d = response.words) == null ? void 0 : _d.map((word) => ({
|
|
1677
|
+
text: word.word,
|
|
1678
|
+
startSecond: word.start,
|
|
1679
|
+
endSecond: word.end
|
|
1680
|
+
}))) != null ? _e : [],
|
|
1681
|
+
language,
|
|
1682
|
+
durationInSeconds: (_f = response.duration) != null ? _f : void 0,
|
|
1683
|
+
warnings,
|
|
1684
|
+
response: {
|
|
1685
|
+
timestamp: currentDate,
|
|
1686
|
+
modelId: this.modelId,
|
|
1687
|
+
headers: responseHeaders,
|
|
1688
|
+
body: rawResponse
|
|
1689
|
+
}
|
|
1690
|
+
};
|
|
1691
|
+
}
|
|
1692
|
+
};
|
|
1693
|
+
var openaiTranscriptionResponseSchema = import_zod6.z.object({
|
|
1694
|
+
text: import_zod6.z.string(),
|
|
1695
|
+
language: import_zod6.z.string().nullish(),
|
|
1696
|
+
duration: import_zod6.z.number().nullish(),
|
|
1697
|
+
words: import_zod6.z.array(
|
|
1698
|
+
import_zod6.z.object({
|
|
1699
|
+
word: import_zod6.z.string(),
|
|
1700
|
+
start: import_zod6.z.number(),
|
|
1701
|
+
end: import_zod6.z.number()
|
|
1702
|
+
})
|
|
1703
|
+
).nullish()
|
|
1704
|
+
});
|
|
1608
1705
|
|
|
1609
|
-
// src/responses/
|
|
1610
|
-
var import_provider7 = require("@ai-sdk/provider");
|
|
1706
|
+
// src/responses/openai-responses-language-model.ts
|
|
1611
1707
|
var import_provider_utils7 = require("@ai-sdk/provider-utils");
|
|
1708
|
+
var import_zod7 = require("zod");
|
|
1709
|
+
|
|
1710
|
+
// src/responses/convert-to-openai-responses-messages.ts
|
|
1711
|
+
var import_provider6 = require("@ai-sdk/provider");
|
|
1612
1712
|
function convertToOpenAIResponsesMessages({
|
|
1613
1713
|
prompt,
|
|
1614
1714
|
systemMessageMode
|
|
@@ -1647,38 +1747,35 @@ function convertToOpenAIResponsesMessages({
|
|
|
1647
1747
|
messages.push({
|
|
1648
1748
|
role: "user",
|
|
1649
1749
|
content: content.map((part, index) => {
|
|
1650
|
-
var _a, _b, _c
|
|
1750
|
+
var _a, _b, _c;
|
|
1651
1751
|
switch (part.type) {
|
|
1652
1752
|
case "text": {
|
|
1653
1753
|
return { type: "input_text", text: part.text };
|
|
1654
1754
|
}
|
|
1655
|
-
case "image": {
|
|
1656
|
-
return {
|
|
1657
|
-
type: "input_image",
|
|
1658
|
-
image_url: part.image instanceof URL ? part.image.toString() : `data:${(_a = part.mimeType) != null ? _a : "image/jpeg"};base64,${(0, import_provider_utils7.convertUint8ArrayToBase64)(part.image)}`,
|
|
1659
|
-
// OpenAI specific extension: image detail
|
|
1660
|
-
detail: (_c = (_b = part.providerMetadata) == null ? void 0 : _b.openai) == null ? void 0 : _c.imageDetail
|
|
1661
|
-
};
|
|
1662
|
-
}
|
|
1663
1755
|
case "file": {
|
|
1664
|
-
if (part.
|
|
1665
|
-
|
|
1666
|
-
|
|
1667
|
-
|
|
1668
|
-
|
|
1669
|
-
|
|
1670
|
-
|
|
1671
|
-
|
|
1672
|
-
|
|
1673
|
-
|
|
1674
|
-
|
|
1675
|
-
|
|
1676
|
-
}
|
|
1677
|
-
default: {
|
|
1678
|
-
throw new import_provider7.UnsupportedFunctionalityError({
|
|
1679
|
-
functionality: "Only PDF files are supported in user messages"
|
|
1756
|
+
if (part.mediaType.startsWith("image/")) {
|
|
1757
|
+
const mediaType = part.mediaType === "image/*" ? "image/jpeg" : part.mediaType;
|
|
1758
|
+
return {
|
|
1759
|
+
type: "input_image",
|
|
1760
|
+
image_url: part.data instanceof URL ? part.data.toString() : `data:${mediaType};base64,${part.data}`,
|
|
1761
|
+
// OpenAI specific extension: image detail
|
|
1762
|
+
detail: (_b = (_a = part.providerOptions) == null ? void 0 : _a.openai) == null ? void 0 : _b.imageDetail
|
|
1763
|
+
};
|
|
1764
|
+
} else if (part.mediaType === "application/pdf") {
|
|
1765
|
+
if (part.data instanceof URL) {
|
|
1766
|
+
throw new import_provider6.UnsupportedFunctionalityError({
|
|
1767
|
+
functionality: "PDF file parts with URLs"
|
|
1680
1768
|
});
|
|
1681
1769
|
}
|
|
1770
|
+
return {
|
|
1771
|
+
type: "input_file",
|
|
1772
|
+
filename: (_c = part.filename) != null ? _c : `part-${index}.pdf`,
|
|
1773
|
+
file_data: `data:application/pdf;base64,${part.data}`
|
|
1774
|
+
};
|
|
1775
|
+
} else {
|
|
1776
|
+
throw new import_provider6.UnsupportedFunctionalityError({
|
|
1777
|
+
functionality: `file part media type ${part.mediaType}`
|
|
1778
|
+
});
|
|
1682
1779
|
}
|
|
1683
1780
|
}
|
|
1684
1781
|
}
|
|
@@ -1747,18 +1844,17 @@ function mapOpenAIResponseFinishReason({
|
|
|
1747
1844
|
}
|
|
1748
1845
|
|
|
1749
1846
|
// src/responses/openai-responses-prepare-tools.ts
|
|
1750
|
-
var
|
|
1847
|
+
var import_provider7 = require("@ai-sdk/provider");
|
|
1751
1848
|
function prepareResponsesTools({
|
|
1752
|
-
|
|
1849
|
+
tools,
|
|
1850
|
+
toolChoice,
|
|
1753
1851
|
strict
|
|
1754
1852
|
}) {
|
|
1755
|
-
|
|
1756
|
-
const tools = ((_a = mode.tools) == null ? void 0 : _a.length) ? mode.tools : void 0;
|
|
1853
|
+
tools = (tools == null ? void 0 : tools.length) ? tools : void 0;
|
|
1757
1854
|
const toolWarnings = [];
|
|
1758
1855
|
if (tools == null) {
|
|
1759
|
-
return { tools: void 0,
|
|
1856
|
+
return { tools: void 0, toolChoice: void 0, toolWarnings };
|
|
1760
1857
|
}
|
|
1761
|
-
const toolChoice = mode.toolChoice;
|
|
1762
1858
|
const openaiTools = [];
|
|
1763
1859
|
for (const tool of tools) {
|
|
1764
1860
|
switch (tool.type) {
|
|
@@ -1791,37 +1887,24 @@ function prepareResponsesTools({
|
|
|
1791
1887
|
}
|
|
1792
1888
|
}
|
|
1793
1889
|
if (toolChoice == null) {
|
|
1794
|
-
return { tools: openaiTools,
|
|
1890
|
+
return { tools: openaiTools, toolChoice: void 0, toolWarnings };
|
|
1795
1891
|
}
|
|
1796
1892
|
const type = toolChoice.type;
|
|
1797
1893
|
switch (type) {
|
|
1798
1894
|
case "auto":
|
|
1799
1895
|
case "none":
|
|
1800
1896
|
case "required":
|
|
1801
|
-
return { tools: openaiTools,
|
|
1802
|
-
case "tool":
|
|
1803
|
-
if (toolChoice.toolName === "web_search_preview") {
|
|
1804
|
-
return {
|
|
1805
|
-
tools: openaiTools,
|
|
1806
|
-
tool_choice: {
|
|
1807
|
-
type: "web_search_preview"
|
|
1808
|
-
},
|
|
1809
|
-
toolWarnings
|
|
1810
|
-
};
|
|
1811
|
-
}
|
|
1897
|
+
return { tools: openaiTools, toolChoice: type, toolWarnings };
|
|
1898
|
+
case "tool":
|
|
1812
1899
|
return {
|
|
1813
1900
|
tools: openaiTools,
|
|
1814
|
-
|
|
1815
|
-
type: "function",
|
|
1816
|
-
name: toolChoice.toolName
|
|
1817
|
-
},
|
|
1901
|
+
toolChoice: toolChoice.toolName === "web_search_preview" ? { type: "web_search_preview" } : { type: "function", name: toolChoice.toolName },
|
|
1818
1902
|
toolWarnings
|
|
1819
1903
|
};
|
|
1820
|
-
}
|
|
1821
1904
|
default: {
|
|
1822
1905
|
const _exhaustiveCheck = type;
|
|
1823
|
-
throw new
|
|
1824
|
-
functionality: `
|
|
1906
|
+
throw new import_provider7.UnsupportedFunctionalityError({
|
|
1907
|
+
functionality: `tool choice type: ${_exhaustiveCheck}`
|
|
1825
1908
|
});
|
|
1826
1909
|
}
|
|
1827
1910
|
}
|
|
@@ -1839,7 +1922,6 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
1839
1922
|
return this.config.provider;
|
|
1840
1923
|
}
|
|
1841
1924
|
getArgs({
|
|
1842
|
-
mode,
|
|
1843
1925
|
maxTokens,
|
|
1844
1926
|
temperature,
|
|
1845
1927
|
stopSequences,
|
|
@@ -1849,24 +1931,19 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
1849
1931
|
frequencyPenalty,
|
|
1850
1932
|
seed,
|
|
1851
1933
|
prompt,
|
|
1852
|
-
|
|
1934
|
+
providerOptions,
|
|
1935
|
+
tools,
|
|
1936
|
+
toolChoice,
|
|
1853
1937
|
responseFormat
|
|
1854
1938
|
}) {
|
|
1855
|
-
var _a, _b
|
|
1939
|
+
var _a, _b;
|
|
1856
1940
|
const warnings = [];
|
|
1857
1941
|
const modelConfig = getResponsesModelConfig(this.modelId);
|
|
1858
|
-
const type = mode.type;
|
|
1859
1942
|
if (topK != null) {
|
|
1860
|
-
warnings.push({
|
|
1861
|
-
type: "unsupported-setting",
|
|
1862
|
-
setting: "topK"
|
|
1863
|
-
});
|
|
1943
|
+
warnings.push({ type: "unsupported-setting", setting: "topK" });
|
|
1864
1944
|
}
|
|
1865
1945
|
if (seed != null) {
|
|
1866
|
-
warnings.push({
|
|
1867
|
-
type: "unsupported-setting",
|
|
1868
|
-
setting: "seed"
|
|
1869
|
-
});
|
|
1946
|
+
warnings.push({ type: "unsupported-setting", setting: "seed" });
|
|
1870
1947
|
}
|
|
1871
1948
|
if (presencePenalty != null) {
|
|
1872
1949
|
warnings.push({
|
|
@@ -1881,19 +1958,16 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
1881
1958
|
});
|
|
1882
1959
|
}
|
|
1883
1960
|
if (stopSequences != null) {
|
|
1884
|
-
warnings.push({
|
|
1885
|
-
type: "unsupported-setting",
|
|
1886
|
-
setting: "stopSequences"
|
|
1887
|
-
});
|
|
1961
|
+
warnings.push({ type: "unsupported-setting", setting: "stopSequences" });
|
|
1888
1962
|
}
|
|
1889
1963
|
const { messages, warnings: messageWarnings } = convertToOpenAIResponsesMessages({
|
|
1890
1964
|
prompt,
|
|
1891
1965
|
systemMessageMode: modelConfig.systemMessageMode
|
|
1892
1966
|
});
|
|
1893
1967
|
warnings.push(...messageWarnings);
|
|
1894
|
-
const openaiOptions = (0,
|
|
1968
|
+
const openaiOptions = (0, import_provider_utils7.parseProviderOptions)({
|
|
1895
1969
|
provider: "openai",
|
|
1896
|
-
providerOptions
|
|
1970
|
+
providerOptions,
|
|
1897
1971
|
schema: openaiResponsesProviderOptionsSchema
|
|
1898
1972
|
});
|
|
1899
1973
|
const isStrict = (_a = openaiOptions == null ? void 0 : openaiOptions.strictSchemas) != null ? _a : true;
|
|
@@ -1947,62 +2021,23 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
1947
2021
|
});
|
|
1948
2022
|
}
|
|
1949
2023
|
}
|
|
1950
|
-
|
|
1951
|
-
|
|
1952
|
-
|
|
1953
|
-
|
|
1954
|
-
|
|
1955
|
-
|
|
1956
|
-
|
|
1957
|
-
|
|
1958
|
-
|
|
1959
|
-
|
|
1960
|
-
|
|
1961
|
-
|
|
1962
|
-
|
|
1963
|
-
|
|
1964
|
-
|
|
1965
|
-
|
|
1966
|
-
|
|
1967
|
-
return {
|
|
1968
|
-
args: {
|
|
1969
|
-
...baseArgs,
|
|
1970
|
-
text: {
|
|
1971
|
-
format: mode.schema != null ? {
|
|
1972
|
-
type: "json_schema",
|
|
1973
|
-
strict: isStrict,
|
|
1974
|
-
name: (_c = mode.name) != null ? _c : "response",
|
|
1975
|
-
description: mode.description,
|
|
1976
|
-
schema: mode.schema
|
|
1977
|
-
} : { type: "json_object" }
|
|
1978
|
-
}
|
|
1979
|
-
},
|
|
1980
|
-
warnings
|
|
1981
|
-
};
|
|
1982
|
-
}
|
|
1983
|
-
case "object-tool": {
|
|
1984
|
-
return {
|
|
1985
|
-
args: {
|
|
1986
|
-
...baseArgs,
|
|
1987
|
-
tool_choice: { type: "function", name: mode.tool.name },
|
|
1988
|
-
tools: [
|
|
1989
|
-
{
|
|
1990
|
-
type: "function",
|
|
1991
|
-
name: mode.tool.name,
|
|
1992
|
-
description: mode.tool.description,
|
|
1993
|
-
parameters: mode.tool.parameters,
|
|
1994
|
-
strict: isStrict
|
|
1995
|
-
}
|
|
1996
|
-
]
|
|
1997
|
-
},
|
|
1998
|
-
warnings
|
|
1999
|
-
};
|
|
2000
|
-
}
|
|
2001
|
-
default: {
|
|
2002
|
-
const _exhaustiveCheck = type;
|
|
2003
|
-
throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
|
|
2004
|
-
}
|
|
2005
|
-
}
|
|
2024
|
+
const {
|
|
2025
|
+
tools: openaiTools,
|
|
2026
|
+
toolChoice: openaiToolChoice,
|
|
2027
|
+
toolWarnings
|
|
2028
|
+
} = prepareResponsesTools({
|
|
2029
|
+
tools,
|
|
2030
|
+
toolChoice,
|
|
2031
|
+
strict: isStrict
|
|
2032
|
+
});
|
|
2033
|
+
return {
|
|
2034
|
+
args: {
|
|
2035
|
+
...baseArgs,
|
|
2036
|
+
tools: openaiTools,
|
|
2037
|
+
tool_choice: openaiToolChoice
|
|
2038
|
+
},
|
|
2039
|
+
warnings: [...warnings, ...toolWarnings]
|
|
2040
|
+
};
|
|
2006
2041
|
}
|
|
2007
2042
|
async doGenerate(options) {
|
|
2008
2043
|
var _a, _b, _c, _d, _e;
|
|
@@ -2011,58 +2046,58 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2011
2046
|
responseHeaders,
|
|
2012
2047
|
value: response,
|
|
2013
2048
|
rawValue: rawResponse
|
|
2014
|
-
} = await (0,
|
|
2049
|
+
} = await (0, import_provider_utils7.postJsonToApi)({
|
|
2015
2050
|
url: this.config.url({
|
|
2016
2051
|
path: "/responses",
|
|
2017
2052
|
modelId: this.modelId
|
|
2018
2053
|
}),
|
|
2019
|
-
headers: (0,
|
|
2054
|
+
headers: (0, import_provider_utils7.combineHeaders)(this.config.headers(), options.headers),
|
|
2020
2055
|
body,
|
|
2021
2056
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
2022
|
-
successfulResponseHandler: (0,
|
|
2023
|
-
|
|
2024
|
-
id:
|
|
2025
|
-
created_at:
|
|
2026
|
-
model:
|
|
2027
|
-
output:
|
|
2028
|
-
|
|
2029
|
-
|
|
2030
|
-
type:
|
|
2031
|
-
role:
|
|
2032
|
-
content:
|
|
2033
|
-
|
|
2034
|
-
type:
|
|
2035
|
-
text:
|
|
2036
|
-
annotations:
|
|
2037
|
-
|
|
2038
|
-
type:
|
|
2039
|
-
start_index:
|
|
2040
|
-
end_index:
|
|
2041
|
-
url:
|
|
2042
|
-
title:
|
|
2057
|
+
successfulResponseHandler: (0, import_provider_utils7.createJsonResponseHandler)(
|
|
2058
|
+
import_zod7.z.object({
|
|
2059
|
+
id: import_zod7.z.string(),
|
|
2060
|
+
created_at: import_zod7.z.number(),
|
|
2061
|
+
model: import_zod7.z.string(),
|
|
2062
|
+
output: import_zod7.z.array(
|
|
2063
|
+
import_zod7.z.discriminatedUnion("type", [
|
|
2064
|
+
import_zod7.z.object({
|
|
2065
|
+
type: import_zod7.z.literal("message"),
|
|
2066
|
+
role: import_zod7.z.literal("assistant"),
|
|
2067
|
+
content: import_zod7.z.array(
|
|
2068
|
+
import_zod7.z.object({
|
|
2069
|
+
type: import_zod7.z.literal("output_text"),
|
|
2070
|
+
text: import_zod7.z.string(),
|
|
2071
|
+
annotations: import_zod7.z.array(
|
|
2072
|
+
import_zod7.z.object({
|
|
2073
|
+
type: import_zod7.z.literal("url_citation"),
|
|
2074
|
+
start_index: import_zod7.z.number(),
|
|
2075
|
+
end_index: import_zod7.z.number(),
|
|
2076
|
+
url: import_zod7.z.string(),
|
|
2077
|
+
title: import_zod7.z.string()
|
|
2043
2078
|
})
|
|
2044
2079
|
)
|
|
2045
2080
|
})
|
|
2046
2081
|
)
|
|
2047
2082
|
}),
|
|
2048
|
-
|
|
2049
|
-
type:
|
|
2050
|
-
call_id:
|
|
2051
|
-
name:
|
|
2052
|
-
arguments:
|
|
2083
|
+
import_zod7.z.object({
|
|
2084
|
+
type: import_zod7.z.literal("function_call"),
|
|
2085
|
+
call_id: import_zod7.z.string(),
|
|
2086
|
+
name: import_zod7.z.string(),
|
|
2087
|
+
arguments: import_zod7.z.string()
|
|
2053
2088
|
}),
|
|
2054
|
-
|
|
2055
|
-
type:
|
|
2089
|
+
import_zod7.z.object({
|
|
2090
|
+
type: import_zod7.z.literal("web_search_call")
|
|
2056
2091
|
}),
|
|
2057
|
-
|
|
2058
|
-
type:
|
|
2092
|
+
import_zod7.z.object({
|
|
2093
|
+
type: import_zod7.z.literal("computer_call")
|
|
2059
2094
|
}),
|
|
2060
|
-
|
|
2061
|
-
type:
|
|
2095
|
+
import_zod7.z.object({
|
|
2096
|
+
type: import_zod7.z.literal("reasoning")
|
|
2062
2097
|
})
|
|
2063
2098
|
])
|
|
2064
2099
|
),
|
|
2065
|
-
incomplete_details:
|
|
2100
|
+
incomplete_details: import_zod7.z.object({ reason: import_zod7.z.string() }).nullable(),
|
|
2066
2101
|
usage: usageSchema
|
|
2067
2102
|
})
|
|
2068
2103
|
),
|
|
@@ -2083,7 +2118,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2083
2118
|
var _a2, _b2, _c2;
|
|
2084
2119
|
return {
|
|
2085
2120
|
sourceType: "url",
|
|
2086
|
-
id: (_c2 = (_b2 = (_a2 = this.config).generateId) == null ? void 0 : _b2.call(_a2)) != null ? _c2 : (0,
|
|
2121
|
+
id: (_c2 = (_b2 = (_a2 = this.config).generateId) == null ? void 0 : _b2.call(_a2)) != null ? _c2 : (0, import_provider_utils7.generateId)(),
|
|
2087
2122
|
url: annotation.url,
|
|
2088
2123
|
title: annotation.title
|
|
2089
2124
|
};
|
|
@@ -2126,18 +2161,18 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2126
2161
|
}
|
|
2127
2162
|
async doStream(options) {
|
|
2128
2163
|
const { args: body, warnings } = this.getArgs(options);
|
|
2129
|
-
const { responseHeaders, value: response } = await (0,
|
|
2164
|
+
const { responseHeaders, value: response } = await (0, import_provider_utils7.postJsonToApi)({
|
|
2130
2165
|
url: this.config.url({
|
|
2131
2166
|
path: "/responses",
|
|
2132
2167
|
modelId: this.modelId
|
|
2133
2168
|
}),
|
|
2134
|
-
headers: (0,
|
|
2169
|
+
headers: (0, import_provider_utils7.combineHeaders)(this.config.headers(), options.headers),
|
|
2135
2170
|
body: {
|
|
2136
2171
|
...body,
|
|
2137
2172
|
stream: true
|
|
2138
2173
|
},
|
|
2139
2174
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
2140
|
-
successfulResponseHandler: (0,
|
|
2175
|
+
successfulResponseHandler: (0, import_provider_utils7.createEventSourceResponseHandler)(
|
|
2141
2176
|
openaiResponsesChunkSchema
|
|
2142
2177
|
),
|
|
2143
2178
|
abortSignal: options.abortSignal,
|
|
@@ -2225,7 +2260,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2225
2260
|
type: "source",
|
|
2226
2261
|
source: {
|
|
2227
2262
|
sourceType: "url",
|
|
2228
|
-
id: (_h = (_g = (_f = self.config).generateId) == null ? void 0 : _g.call(_f)) != null ? _h : (0,
|
|
2263
|
+
id: (_h = (_g = (_f = self.config).generateId) == null ? void 0 : _g.call(_f)) != null ? _h : (0, import_provider_utils7.generateId)(),
|
|
2229
2264
|
url: value.annotation.url,
|
|
2230
2265
|
title: value.annotation.title
|
|
2231
2266
|
}
|
|
@@ -2260,79 +2295,79 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2260
2295
|
};
|
|
2261
2296
|
}
|
|
2262
2297
|
};
|
|
2263
|
-
var usageSchema =
|
|
2264
|
-
input_tokens:
|
|
2265
|
-
input_tokens_details:
|
|
2266
|
-
output_tokens:
|
|
2267
|
-
output_tokens_details:
|
|
2298
|
+
var usageSchema = import_zod7.z.object({
|
|
2299
|
+
input_tokens: import_zod7.z.number(),
|
|
2300
|
+
input_tokens_details: import_zod7.z.object({ cached_tokens: import_zod7.z.number().nullish() }).nullish(),
|
|
2301
|
+
output_tokens: import_zod7.z.number(),
|
|
2302
|
+
output_tokens_details: import_zod7.z.object({ reasoning_tokens: import_zod7.z.number().nullish() }).nullish()
|
|
2268
2303
|
});
|
|
2269
|
-
var textDeltaChunkSchema =
|
|
2270
|
-
type:
|
|
2271
|
-
delta:
|
|
2304
|
+
var textDeltaChunkSchema = import_zod7.z.object({
|
|
2305
|
+
type: import_zod7.z.literal("response.output_text.delta"),
|
|
2306
|
+
delta: import_zod7.z.string()
|
|
2272
2307
|
});
|
|
2273
|
-
var responseFinishedChunkSchema =
|
|
2274
|
-
type:
|
|
2275
|
-
response:
|
|
2276
|
-
incomplete_details:
|
|
2308
|
+
var responseFinishedChunkSchema = import_zod7.z.object({
|
|
2309
|
+
type: import_zod7.z.enum(["response.completed", "response.incomplete"]),
|
|
2310
|
+
response: import_zod7.z.object({
|
|
2311
|
+
incomplete_details: import_zod7.z.object({ reason: import_zod7.z.string() }).nullish(),
|
|
2277
2312
|
usage: usageSchema
|
|
2278
2313
|
})
|
|
2279
2314
|
});
|
|
2280
|
-
var responseCreatedChunkSchema =
|
|
2281
|
-
type:
|
|
2282
|
-
response:
|
|
2283
|
-
id:
|
|
2284
|
-
created_at:
|
|
2285
|
-
model:
|
|
2315
|
+
var responseCreatedChunkSchema = import_zod7.z.object({
|
|
2316
|
+
type: import_zod7.z.literal("response.created"),
|
|
2317
|
+
response: import_zod7.z.object({
|
|
2318
|
+
id: import_zod7.z.string(),
|
|
2319
|
+
created_at: import_zod7.z.number(),
|
|
2320
|
+
model: import_zod7.z.string()
|
|
2286
2321
|
})
|
|
2287
2322
|
});
|
|
2288
|
-
var responseOutputItemDoneSchema =
|
|
2289
|
-
type:
|
|
2290
|
-
output_index:
|
|
2291
|
-
item:
|
|
2292
|
-
|
|
2293
|
-
type:
|
|
2323
|
+
var responseOutputItemDoneSchema = import_zod7.z.object({
|
|
2324
|
+
type: import_zod7.z.literal("response.output_item.done"),
|
|
2325
|
+
output_index: import_zod7.z.number(),
|
|
2326
|
+
item: import_zod7.z.discriminatedUnion("type", [
|
|
2327
|
+
import_zod7.z.object({
|
|
2328
|
+
type: import_zod7.z.literal("message")
|
|
2294
2329
|
}),
|
|
2295
|
-
|
|
2296
|
-
type:
|
|
2297
|
-
id:
|
|
2298
|
-
call_id:
|
|
2299
|
-
name:
|
|
2300
|
-
arguments:
|
|
2301
|
-
status:
|
|
2330
|
+
import_zod7.z.object({
|
|
2331
|
+
type: import_zod7.z.literal("function_call"),
|
|
2332
|
+
id: import_zod7.z.string(),
|
|
2333
|
+
call_id: import_zod7.z.string(),
|
|
2334
|
+
name: import_zod7.z.string(),
|
|
2335
|
+
arguments: import_zod7.z.string(),
|
|
2336
|
+
status: import_zod7.z.literal("completed")
|
|
2302
2337
|
})
|
|
2303
2338
|
])
|
|
2304
2339
|
});
|
|
2305
|
-
var responseFunctionCallArgumentsDeltaSchema =
|
|
2306
|
-
type:
|
|
2307
|
-
item_id:
|
|
2308
|
-
output_index:
|
|
2309
|
-
delta:
|
|
2340
|
+
var responseFunctionCallArgumentsDeltaSchema = import_zod7.z.object({
|
|
2341
|
+
type: import_zod7.z.literal("response.function_call_arguments.delta"),
|
|
2342
|
+
item_id: import_zod7.z.string(),
|
|
2343
|
+
output_index: import_zod7.z.number(),
|
|
2344
|
+
delta: import_zod7.z.string()
|
|
2310
2345
|
});
|
|
2311
|
-
var responseOutputItemAddedSchema =
|
|
2312
|
-
type:
|
|
2313
|
-
output_index:
|
|
2314
|
-
item:
|
|
2315
|
-
|
|
2316
|
-
type:
|
|
2346
|
+
var responseOutputItemAddedSchema = import_zod7.z.object({
|
|
2347
|
+
type: import_zod7.z.literal("response.output_item.added"),
|
|
2348
|
+
output_index: import_zod7.z.number(),
|
|
2349
|
+
item: import_zod7.z.discriminatedUnion("type", [
|
|
2350
|
+
import_zod7.z.object({
|
|
2351
|
+
type: import_zod7.z.literal("message")
|
|
2317
2352
|
}),
|
|
2318
|
-
|
|
2319
|
-
type:
|
|
2320
|
-
id:
|
|
2321
|
-
call_id:
|
|
2322
|
-
name:
|
|
2323
|
-
arguments:
|
|
2353
|
+
import_zod7.z.object({
|
|
2354
|
+
type: import_zod7.z.literal("function_call"),
|
|
2355
|
+
id: import_zod7.z.string(),
|
|
2356
|
+
call_id: import_zod7.z.string(),
|
|
2357
|
+
name: import_zod7.z.string(),
|
|
2358
|
+
arguments: import_zod7.z.string()
|
|
2324
2359
|
})
|
|
2325
2360
|
])
|
|
2326
2361
|
});
|
|
2327
|
-
var responseAnnotationAddedSchema =
|
|
2328
|
-
type:
|
|
2329
|
-
annotation:
|
|
2330
|
-
type:
|
|
2331
|
-
url:
|
|
2332
|
-
title:
|
|
2362
|
+
var responseAnnotationAddedSchema = import_zod7.z.object({
|
|
2363
|
+
type: import_zod7.z.literal("response.output_text.annotation.added"),
|
|
2364
|
+
annotation: import_zod7.z.object({
|
|
2365
|
+
type: import_zod7.z.literal("url_citation"),
|
|
2366
|
+
url: import_zod7.z.string(),
|
|
2367
|
+
title: import_zod7.z.string()
|
|
2333
2368
|
})
|
|
2334
2369
|
});
|
|
2335
|
-
var openaiResponsesChunkSchema =
|
|
2370
|
+
var openaiResponsesChunkSchema = import_zod7.z.union([
|
|
2336
2371
|
textDeltaChunkSchema,
|
|
2337
2372
|
responseFinishedChunkSchema,
|
|
2338
2373
|
responseCreatedChunkSchema,
|
|
@@ -2340,7 +2375,7 @@ var openaiResponsesChunkSchema = import_zod6.z.union([
|
|
|
2340
2375
|
responseFunctionCallArgumentsDeltaSchema,
|
|
2341
2376
|
responseOutputItemAddedSchema,
|
|
2342
2377
|
responseAnnotationAddedSchema,
|
|
2343
|
-
|
|
2378
|
+
import_zod7.z.object({ type: import_zod7.z.string() }).passthrough()
|
|
2344
2379
|
// fallback for unknown chunks
|
|
2345
2380
|
]);
|
|
2346
2381
|
function isTextDeltaChunk(chunk) {
|
|
@@ -2385,15 +2420,15 @@ function getResponsesModelConfig(modelId) {
|
|
|
2385
2420
|
requiredAutoTruncation: false
|
|
2386
2421
|
};
|
|
2387
2422
|
}
|
|
2388
|
-
var openaiResponsesProviderOptionsSchema =
|
|
2389
|
-
metadata:
|
|
2390
|
-
parallelToolCalls:
|
|
2391
|
-
previousResponseId:
|
|
2392
|
-
store:
|
|
2393
|
-
user:
|
|
2394
|
-
reasoningEffort:
|
|
2395
|
-
strictSchemas:
|
|
2396
|
-
instructions:
|
|
2423
|
+
var openaiResponsesProviderOptionsSchema = import_zod7.z.object({
|
|
2424
|
+
metadata: import_zod7.z.any().nullish(),
|
|
2425
|
+
parallelToolCalls: import_zod7.z.boolean().nullish(),
|
|
2426
|
+
previousResponseId: import_zod7.z.string().nullish(),
|
|
2427
|
+
store: import_zod7.z.boolean().nullish(),
|
|
2428
|
+
user: import_zod7.z.string().nullish(),
|
|
2429
|
+
reasoningEffort: import_zod7.z.string().nullish(),
|
|
2430
|
+
strictSchemas: import_zod7.z.boolean().nullish(),
|
|
2431
|
+
instructions: import_zod7.z.string().nullish()
|
|
2397
2432
|
});
|
|
2398
2433
|
// Annotate the CommonJS export names for ESM import in node:
|
|
2399
2434
|
0 && (module.exports = {
|
|
@@ -2402,6 +2437,7 @@ var openaiResponsesProviderOptionsSchema = import_zod6.z.object({
|
|
|
2402
2437
|
OpenAIEmbeddingModel,
|
|
2403
2438
|
OpenAIImageModel,
|
|
2404
2439
|
OpenAIResponsesLanguageModel,
|
|
2440
|
+
OpenAITranscriptionModel,
|
|
2405
2441
|
modelMaxImagesPerCall
|
|
2406
2442
|
});
|
|
2407
2443
|
//# sourceMappingURL=index.js.map
|