@ai-sdk/openai 2.0.0-canary.1 → 2.0.0-canary.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +24 -0
- package/dist/index.d.mts +7 -1
- package/dist/index.d.ts +7 -1
- package/dist/index.js +507 -465
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +474 -428
- package/dist/index.mjs.map +1 -1
- package/internal/dist/index.d.mts +67 -2
- package/internal/dist/index.d.ts +67 -2
- package/internal/dist/index.js +498 -462
- package/internal/dist/index.js.map +1 -1
- package/internal/dist/index.mjs +467 -428
- package/internal/dist/index.mjs.map +1 -1
- package/package.json +4 -4
package/dist/index.js
CHANGED
|
@@ -26,16 +26,15 @@ __export(src_exports, {
|
|
|
26
26
|
module.exports = __toCommonJS(src_exports);
|
|
27
27
|
|
|
28
28
|
// src/openai-provider.ts
|
|
29
|
-
var
|
|
29
|
+
var import_provider_utils8 = require("@ai-sdk/provider-utils");
|
|
30
30
|
|
|
31
31
|
// src/openai-chat-language-model.ts
|
|
32
32
|
var import_provider3 = require("@ai-sdk/provider");
|
|
33
|
-
var
|
|
33
|
+
var import_provider_utils2 = require("@ai-sdk/provider-utils");
|
|
34
34
|
var import_zod2 = require("zod");
|
|
35
35
|
|
|
36
36
|
// src/convert-to-openai-chat-messages.ts
|
|
37
37
|
var import_provider = require("@ai-sdk/provider");
|
|
38
|
-
var import_provider_utils = require("@ai-sdk/provider-utils");
|
|
39
38
|
function convertToOpenAIChatMessages({
|
|
40
39
|
prompt,
|
|
41
40
|
useLegacyFunctionCalling = false,
|
|
@@ -79,55 +78,65 @@ function convertToOpenAIChatMessages({
|
|
|
79
78
|
messages.push({
|
|
80
79
|
role: "user",
|
|
81
80
|
content: content.map((part, index) => {
|
|
82
|
-
var _a, _b, _c
|
|
81
|
+
var _a, _b, _c;
|
|
83
82
|
switch (part.type) {
|
|
84
83
|
case "text": {
|
|
85
84
|
return { type: "text", text: part.text };
|
|
86
85
|
}
|
|
87
|
-
case "image": {
|
|
88
|
-
return {
|
|
89
|
-
type: "image_url",
|
|
90
|
-
image_url: {
|
|
91
|
-
url: part.image instanceof URL ? part.image.toString() : `data:${(_a = part.mimeType) != null ? _a : "image/jpeg"};base64,${(0, import_provider_utils.convertUint8ArrayToBase64)(part.image)}`,
|
|
92
|
-
// OpenAI specific extension: image detail
|
|
93
|
-
detail: (_c = (_b = part.providerMetadata) == null ? void 0 : _b.openai) == null ? void 0 : _c.imageDetail
|
|
94
|
-
}
|
|
95
|
-
};
|
|
96
|
-
}
|
|
97
86
|
case "file": {
|
|
98
|
-
if (part.
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
type: "input_audio",
|
|
114
|
-
input_audio: { data: part.data, format: "mp3" }
|
|
115
|
-
};
|
|
87
|
+
if (part.mediaType.startsWith("image/")) {
|
|
88
|
+
const mediaType = part.mediaType === "image/*" ? "image/jpeg" : part.mediaType;
|
|
89
|
+
return {
|
|
90
|
+
type: "image_url",
|
|
91
|
+
image_url: {
|
|
92
|
+
url: part.data instanceof URL ? part.data.toString() : `data:${mediaType};base64,${part.data}`,
|
|
93
|
+
// OpenAI specific extension: image detail
|
|
94
|
+
detail: (_b = (_a = part.providerOptions) == null ? void 0 : _a.openai) == null ? void 0 : _b.imageDetail
|
|
95
|
+
}
|
|
96
|
+
};
|
|
97
|
+
} else if (part.mediaType.startsWith("audio/")) {
|
|
98
|
+
if (part.data instanceof URL) {
|
|
99
|
+
throw new import_provider.UnsupportedFunctionalityError({
|
|
100
|
+
functionality: "audio file parts with URLs"
|
|
101
|
+
});
|
|
116
102
|
}
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
103
|
+
switch (part.mediaType) {
|
|
104
|
+
case "audio/wav": {
|
|
105
|
+
return {
|
|
106
|
+
type: "input_audio",
|
|
107
|
+
input_audio: { data: part.data, format: "wav" }
|
|
108
|
+
};
|
|
109
|
+
}
|
|
110
|
+
case "audio/mp3":
|
|
111
|
+
case "audio/mpeg": {
|
|
112
|
+
return {
|
|
113
|
+
type: "input_audio",
|
|
114
|
+
input_audio: { data: part.data, format: "mp3" }
|
|
115
|
+
};
|
|
116
|
+
}
|
|
117
|
+
default: {
|
|
118
|
+
throw new import_provider.UnsupportedFunctionalityError({
|
|
119
|
+
functionality: `audio content parts with media type ${part.mediaType}`
|
|
120
|
+
});
|
|
121
|
+
}
|
|
125
122
|
}
|
|
126
|
-
|
|
123
|
+
} else if (part.mediaType === "application/pdf") {
|
|
124
|
+
if (part.data instanceof URL) {
|
|
127
125
|
throw new import_provider.UnsupportedFunctionalityError({
|
|
128
|
-
functionality:
|
|
126
|
+
functionality: "PDF file parts with URLs"
|
|
129
127
|
});
|
|
130
128
|
}
|
|
129
|
+
return {
|
|
130
|
+
type: "file",
|
|
131
|
+
file: {
|
|
132
|
+
filename: (_c = part.filename) != null ? _c : `part-${index}.pdf`,
|
|
133
|
+
file_data: `data:application/pdf;base64,${part.data}`
|
|
134
|
+
}
|
|
135
|
+
};
|
|
136
|
+
} else {
|
|
137
|
+
throw new import_provider.UnsupportedFunctionalityError({
|
|
138
|
+
functionality: `file part media type ${part.mediaType}`
|
|
139
|
+
});
|
|
131
140
|
}
|
|
132
141
|
}
|
|
133
142
|
}
|
|
@@ -236,7 +245,7 @@ function mapOpenAIFinishReason(finishReason) {
|
|
|
236
245
|
|
|
237
246
|
// src/openai-error.ts
|
|
238
247
|
var import_zod = require("zod");
|
|
239
|
-
var
|
|
248
|
+
var import_provider_utils = require("@ai-sdk/provider-utils");
|
|
240
249
|
var openaiErrorDataSchema = import_zod.z.object({
|
|
241
250
|
error: import_zod.z.object({
|
|
242
251
|
message: import_zod.z.string(),
|
|
@@ -248,7 +257,7 @@ var openaiErrorDataSchema = import_zod.z.object({
|
|
|
248
257
|
code: import_zod.z.union([import_zod.z.string(), import_zod.z.number()]).nullish()
|
|
249
258
|
})
|
|
250
259
|
});
|
|
251
|
-
var openaiFailedResponseHandler = (0,
|
|
260
|
+
var openaiFailedResponseHandler = (0, import_provider_utils.createJsonErrorResponseHandler)({
|
|
252
261
|
errorSchema: openaiErrorDataSchema,
|
|
253
262
|
errorToMessage: (data) => data.error.message
|
|
254
263
|
});
|
|
@@ -269,17 +278,16 @@ function getResponseMetadata({
|
|
|
269
278
|
// src/openai-prepare-tools.ts
|
|
270
279
|
var import_provider2 = require("@ai-sdk/provider");
|
|
271
280
|
function prepareTools({
|
|
272
|
-
|
|
281
|
+
tools,
|
|
282
|
+
toolChoice,
|
|
273
283
|
useLegacyFunctionCalling = false,
|
|
274
284
|
structuredOutputs
|
|
275
285
|
}) {
|
|
276
|
-
|
|
277
|
-
const tools = ((_a = mode.tools) == null ? void 0 : _a.length) ? mode.tools : void 0;
|
|
286
|
+
tools = (tools == null ? void 0 : tools.length) ? tools : void 0;
|
|
278
287
|
const toolWarnings = [];
|
|
279
288
|
if (tools == null) {
|
|
280
|
-
return { tools: void 0,
|
|
289
|
+
return { tools: void 0, toolChoice: void 0, toolWarnings };
|
|
281
290
|
}
|
|
282
|
-
const toolChoice = mode.toolChoice;
|
|
283
291
|
if (useLegacyFunctionCalling) {
|
|
284
292
|
const openaiFunctions = [];
|
|
285
293
|
for (const tool of tools) {
|
|
@@ -339,18 +347,18 @@ function prepareTools({
|
|
|
339
347
|
}
|
|
340
348
|
}
|
|
341
349
|
if (toolChoice == null) {
|
|
342
|
-
return { tools: openaiTools2,
|
|
350
|
+
return { tools: openaiTools2, toolChoice: void 0, toolWarnings };
|
|
343
351
|
}
|
|
344
352
|
const type = toolChoice.type;
|
|
345
353
|
switch (type) {
|
|
346
354
|
case "auto":
|
|
347
355
|
case "none":
|
|
348
356
|
case "required":
|
|
349
|
-
return { tools: openaiTools2,
|
|
357
|
+
return { tools: openaiTools2, toolChoice: type, toolWarnings };
|
|
350
358
|
case "tool":
|
|
351
359
|
return {
|
|
352
360
|
tools: openaiTools2,
|
|
353
|
-
|
|
361
|
+
toolChoice: {
|
|
354
362
|
type: "function",
|
|
355
363
|
function: {
|
|
356
364
|
name: toolChoice.toolName
|
|
@@ -361,7 +369,7 @@ function prepareTools({
|
|
|
361
369
|
default: {
|
|
362
370
|
const _exhaustiveCheck = type;
|
|
363
371
|
throw new import_provider2.UnsupportedFunctionalityError({
|
|
364
|
-
functionality: `
|
|
372
|
+
functionality: `tool choice type: ${_exhaustiveCheck}`
|
|
365
373
|
});
|
|
366
374
|
}
|
|
367
375
|
}
|
|
@@ -392,7 +400,6 @@ var OpenAIChatLanguageModel = class {
|
|
|
392
400
|
return !this.settings.downloadImages;
|
|
393
401
|
}
|
|
394
402
|
getArgs({
|
|
395
|
-
mode,
|
|
396
403
|
prompt,
|
|
397
404
|
maxTokens,
|
|
398
405
|
temperature,
|
|
@@ -403,10 +410,11 @@ var OpenAIChatLanguageModel = class {
|
|
|
403
410
|
stopSequences,
|
|
404
411
|
responseFormat,
|
|
405
412
|
seed,
|
|
406
|
-
|
|
413
|
+
tools,
|
|
414
|
+
toolChoice,
|
|
415
|
+
providerOptions
|
|
407
416
|
}) {
|
|
408
|
-
var _a, _b, _c, _d, _e, _f, _g
|
|
409
|
-
const type = mode.type;
|
|
417
|
+
var _a, _b, _c, _d, _e, _f, _g;
|
|
410
418
|
const warnings = [];
|
|
411
419
|
if (topK != null) {
|
|
412
420
|
warnings.push({
|
|
@@ -455,6 +463,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
455
463
|
top_p: topP,
|
|
456
464
|
frequency_penalty: frequencyPenalty,
|
|
457
465
|
presence_penalty: presencePenalty,
|
|
466
|
+
// TODO improve below:
|
|
458
467
|
response_format: (responseFormat == null ? void 0 : responseFormat.type) === "json" ? this.supportsStructuredOutputs && responseFormat.schema != null ? {
|
|
459
468
|
type: "json_schema",
|
|
460
469
|
json_schema: {
|
|
@@ -468,11 +477,11 @@ var OpenAIChatLanguageModel = class {
|
|
|
468
477
|
seed,
|
|
469
478
|
// openai specific settings:
|
|
470
479
|
// TODO remove in next major version; we auto-map maxTokens now
|
|
471
|
-
max_completion_tokens: (_b =
|
|
472
|
-
store: (_c =
|
|
473
|
-
metadata: (_d =
|
|
474
|
-
prediction: (_e =
|
|
475
|
-
reasoning_effort: (_g = (_f =
|
|
480
|
+
max_completion_tokens: (_b = providerOptions == null ? void 0 : providerOptions.openai) == null ? void 0 : _b.maxCompletionTokens,
|
|
481
|
+
store: (_c = providerOptions == null ? void 0 : providerOptions.openai) == null ? void 0 : _c.store,
|
|
482
|
+
metadata: (_d = providerOptions == null ? void 0 : providerOptions.openai) == null ? void 0 : _d.metadata,
|
|
483
|
+
prediction: (_e = providerOptions == null ? void 0 : providerOptions.openai) == null ? void 0 : _e.prediction,
|
|
484
|
+
reasoning_effort: (_g = (_f = providerOptions == null ? void 0 : providerOptions.openai) == null ? void 0 : _f.reasoningEffort) != null ? _g : this.settings.reasoningEffort,
|
|
476
485
|
// messages:
|
|
477
486
|
messages
|
|
478
487
|
};
|
|
@@ -537,81 +546,28 @@ var OpenAIChatLanguageModel = class {
|
|
|
537
546
|
baseArgs.max_tokens = void 0;
|
|
538
547
|
}
|
|
539
548
|
}
|
|
540
|
-
|
|
541
|
-
|
|
542
|
-
|
|
543
|
-
|
|
544
|
-
|
|
545
|
-
|
|
546
|
-
|
|
547
|
-
|
|
548
|
-
|
|
549
|
-
|
|
550
|
-
|
|
551
|
-
|
|
552
|
-
|
|
553
|
-
|
|
554
|
-
|
|
555
|
-
|
|
556
|
-
|
|
557
|
-
|
|
558
|
-
|
|
559
|
-
|
|
560
|
-
|
|
561
|
-
|
|
562
|
-
response_format: this.supportsStructuredOutputs && mode.schema != null ? {
|
|
563
|
-
type: "json_schema",
|
|
564
|
-
json_schema: {
|
|
565
|
-
schema: mode.schema,
|
|
566
|
-
strict: true,
|
|
567
|
-
name: (_h = mode.name) != null ? _h : "response",
|
|
568
|
-
description: mode.description
|
|
569
|
-
}
|
|
570
|
-
} : { type: "json_object" }
|
|
571
|
-
},
|
|
572
|
-
warnings
|
|
573
|
-
};
|
|
574
|
-
}
|
|
575
|
-
case "object-tool": {
|
|
576
|
-
return {
|
|
577
|
-
args: useLegacyFunctionCalling ? {
|
|
578
|
-
...baseArgs,
|
|
579
|
-
function_call: {
|
|
580
|
-
name: mode.tool.name
|
|
581
|
-
},
|
|
582
|
-
functions: [
|
|
583
|
-
{
|
|
584
|
-
name: mode.tool.name,
|
|
585
|
-
description: mode.tool.description,
|
|
586
|
-
parameters: mode.tool.parameters
|
|
587
|
-
}
|
|
588
|
-
]
|
|
589
|
-
} : {
|
|
590
|
-
...baseArgs,
|
|
591
|
-
tool_choice: {
|
|
592
|
-
type: "function",
|
|
593
|
-
function: { name: mode.tool.name }
|
|
594
|
-
},
|
|
595
|
-
tools: [
|
|
596
|
-
{
|
|
597
|
-
type: "function",
|
|
598
|
-
function: {
|
|
599
|
-
name: mode.tool.name,
|
|
600
|
-
description: mode.tool.description,
|
|
601
|
-
parameters: mode.tool.parameters,
|
|
602
|
-
strict: this.supportsStructuredOutputs ? true : void 0
|
|
603
|
-
}
|
|
604
|
-
}
|
|
605
|
-
]
|
|
606
|
-
},
|
|
607
|
-
warnings
|
|
608
|
-
};
|
|
609
|
-
}
|
|
610
|
-
default: {
|
|
611
|
-
const _exhaustiveCheck = type;
|
|
612
|
-
throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
|
|
613
|
-
}
|
|
614
|
-
}
|
|
549
|
+
const {
|
|
550
|
+
tools: openaiTools2,
|
|
551
|
+
toolChoice: openaiToolChoice,
|
|
552
|
+
functions,
|
|
553
|
+
function_call,
|
|
554
|
+
toolWarnings
|
|
555
|
+
} = prepareTools({
|
|
556
|
+
tools,
|
|
557
|
+
toolChoice,
|
|
558
|
+
useLegacyFunctionCalling,
|
|
559
|
+
structuredOutputs: this.supportsStructuredOutputs
|
|
560
|
+
});
|
|
561
|
+
return {
|
|
562
|
+
args: {
|
|
563
|
+
...baseArgs,
|
|
564
|
+
tools: openaiTools2,
|
|
565
|
+
tool_choice: openaiToolChoice,
|
|
566
|
+
functions,
|
|
567
|
+
function_call
|
|
568
|
+
},
|
|
569
|
+
warnings: [...warnings, ...toolWarnings]
|
|
570
|
+
};
|
|
615
571
|
}
|
|
616
572
|
async doGenerate(options) {
|
|
617
573
|
var _a, _b, _c, _d, _e, _f, _g, _h;
|
|
@@ -620,15 +576,15 @@ var OpenAIChatLanguageModel = class {
|
|
|
620
576
|
responseHeaders,
|
|
621
577
|
value: response,
|
|
622
578
|
rawValue: rawResponse
|
|
623
|
-
} = await (0,
|
|
579
|
+
} = await (0, import_provider_utils2.postJsonToApi)({
|
|
624
580
|
url: this.config.url({
|
|
625
581
|
path: "/chat/completions",
|
|
626
582
|
modelId: this.modelId
|
|
627
583
|
}),
|
|
628
|
-
headers: (0,
|
|
584
|
+
headers: (0, import_provider_utils2.combineHeaders)(this.config.headers(), options.headers),
|
|
629
585
|
body,
|
|
630
586
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
631
|
-
successfulResponseHandler: (0,
|
|
587
|
+
successfulResponseHandler: (0, import_provider_utils2.createJsonResponseHandler)(
|
|
632
588
|
openaiChatResponseSchema
|
|
633
589
|
),
|
|
634
590
|
abortSignal: options.abortSignal,
|
|
@@ -656,7 +612,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
656
612
|
toolCalls: this.settings.useLegacyFunctionCalling && choice.message.function_call ? [
|
|
657
613
|
{
|
|
658
614
|
toolCallType: "function",
|
|
659
|
-
toolCallId: (0,
|
|
615
|
+
toolCallId: (0, import_provider_utils2.generateId)(),
|
|
660
616
|
toolName: choice.message.function_call.name,
|
|
661
617
|
args: choice.message.function_call.arguments
|
|
662
618
|
}
|
|
@@ -664,7 +620,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
664
620
|
var _a2;
|
|
665
621
|
return {
|
|
666
622
|
toolCallType: "function",
|
|
667
|
-
toolCallId: (_a2 = toolCall.id) != null ? _a2 : (0,
|
|
623
|
+
toolCallId: (_a2 = toolCall.id) != null ? _a2 : (0, import_provider_utils2.generateId)(),
|
|
668
624
|
toolName: toolCall.function.name,
|
|
669
625
|
args: toolCall.function.arguments
|
|
670
626
|
};
|
|
@@ -734,15 +690,15 @@ var OpenAIChatLanguageModel = class {
|
|
|
734
690
|
// only include stream_options when in strict compatibility mode:
|
|
735
691
|
stream_options: this.config.compatibility === "strict" ? { include_usage: true } : void 0
|
|
736
692
|
};
|
|
737
|
-
const { responseHeaders, value: response } = await (0,
|
|
693
|
+
const { responseHeaders, value: response } = await (0, import_provider_utils2.postJsonToApi)({
|
|
738
694
|
url: this.config.url({
|
|
739
695
|
path: "/chat/completions",
|
|
740
696
|
modelId: this.modelId
|
|
741
697
|
}),
|
|
742
|
-
headers: (0,
|
|
698
|
+
headers: (0, import_provider_utils2.combineHeaders)(this.config.headers(), options.headers),
|
|
743
699
|
body,
|
|
744
700
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
745
|
-
successfulResponseHandler: (0,
|
|
701
|
+
successfulResponseHandler: (0, import_provider_utils2.createEventSourceResponseHandler)(
|
|
746
702
|
openaiChatChunkSchema
|
|
747
703
|
),
|
|
748
704
|
abortSignal: options.abortSignal,
|
|
@@ -830,7 +786,7 @@ var OpenAIChatLanguageModel = class {
|
|
|
830
786
|
const mappedToolCalls = useLegacyFunctionCalling && delta.function_call != null ? [
|
|
831
787
|
{
|
|
832
788
|
type: "function",
|
|
833
|
-
id: (0,
|
|
789
|
+
id: (0, import_provider_utils2.generateId)(),
|
|
834
790
|
function: delta.function_call,
|
|
835
791
|
index: 0
|
|
836
792
|
}
|
|
@@ -877,11 +833,11 @@ var OpenAIChatLanguageModel = class {
|
|
|
877
833
|
argsTextDelta: toolCall2.function.arguments
|
|
878
834
|
});
|
|
879
835
|
}
|
|
880
|
-
if ((0,
|
|
836
|
+
if ((0, import_provider_utils2.isParsableJson)(toolCall2.function.arguments)) {
|
|
881
837
|
controller.enqueue({
|
|
882
838
|
type: "tool-call",
|
|
883
839
|
toolCallType: "function",
|
|
884
|
-
toolCallId: (_e = toolCall2.id) != null ? _e : (0,
|
|
840
|
+
toolCallId: (_e = toolCall2.id) != null ? _e : (0, import_provider_utils2.generateId)(),
|
|
885
841
|
toolName: toolCall2.function.name,
|
|
886
842
|
args: toolCall2.function.arguments
|
|
887
843
|
});
|
|
@@ -904,11 +860,11 @@ var OpenAIChatLanguageModel = class {
|
|
|
904
860
|
toolName: toolCall.function.name,
|
|
905
861
|
argsTextDelta: (_i = toolCallDelta.function.arguments) != null ? _i : ""
|
|
906
862
|
});
|
|
907
|
-
if (((_j = toolCall.function) == null ? void 0 : _j.name) != null && ((_k = toolCall.function) == null ? void 0 : _k.arguments) != null && (0,
|
|
863
|
+
if (((_j = toolCall.function) == null ? void 0 : _j.name) != null && ((_k = toolCall.function) == null ? void 0 : _k.arguments) != null && (0, import_provider_utils2.isParsableJson)(toolCall.function.arguments)) {
|
|
908
864
|
controller.enqueue({
|
|
909
865
|
type: "tool-call",
|
|
910
866
|
toolCallType: "function",
|
|
911
|
-
toolCallId: (_l = toolCall.id) != null ? _l : (0,
|
|
867
|
+
toolCallId: (_l = toolCall.id) != null ? _l : (0, import_provider_utils2.generateId)(),
|
|
912
868
|
toolName: toolCall.function.name,
|
|
913
869
|
args: toolCall.function.arguments
|
|
914
870
|
});
|
|
@@ -1078,8 +1034,7 @@ var reasoningModels = {
|
|
|
1078
1034
|
};
|
|
1079
1035
|
|
|
1080
1036
|
// src/openai-completion-language-model.ts
|
|
1081
|
-
var
|
|
1082
|
-
var import_provider_utils4 = require("@ai-sdk/provider-utils");
|
|
1037
|
+
var import_provider_utils3 = require("@ai-sdk/provider-utils");
|
|
1083
1038
|
var import_zod3 = require("zod");
|
|
1084
1039
|
|
|
1085
1040
|
// src/convert-to-openai-completion-prompt.ts
|
|
@@ -1114,13 +1069,8 @@ function convertToOpenAICompletionPrompt({
|
|
|
1114
1069
|
case "text": {
|
|
1115
1070
|
return part.text;
|
|
1116
1071
|
}
|
|
1117
|
-
case "image": {
|
|
1118
|
-
throw new import_provider4.UnsupportedFunctionalityError({
|
|
1119
|
-
functionality: "images"
|
|
1120
|
-
});
|
|
1121
|
-
}
|
|
1122
1072
|
}
|
|
1123
|
-
}).join("");
|
|
1073
|
+
}).filter(Boolean).join("");
|
|
1124
1074
|
text += `${user}:
|
|
1125
1075
|
${userMessage}
|
|
1126
1076
|
|
|
@@ -1193,7 +1143,6 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1193
1143
|
return this.config.provider;
|
|
1194
1144
|
}
|
|
1195
1145
|
getArgs({
|
|
1196
|
-
mode,
|
|
1197
1146
|
inputFormat,
|
|
1198
1147
|
prompt,
|
|
1199
1148
|
maxTokens,
|
|
@@ -1204,16 +1153,19 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1204
1153
|
presencePenalty,
|
|
1205
1154
|
stopSequences: userStopSequences,
|
|
1206
1155
|
responseFormat,
|
|
1156
|
+
tools,
|
|
1157
|
+
toolChoice,
|
|
1207
1158
|
seed
|
|
1208
1159
|
}) {
|
|
1209
|
-
var _a;
|
|
1210
|
-
const type = mode.type;
|
|
1211
1160
|
const warnings = [];
|
|
1212
1161
|
if (topK != null) {
|
|
1213
|
-
warnings.push({
|
|
1214
|
-
|
|
1215
|
-
|
|
1216
|
-
});
|
|
1162
|
+
warnings.push({ type: "unsupported-setting", setting: "topK" });
|
|
1163
|
+
}
|
|
1164
|
+
if (tools == null ? void 0 : tools.length) {
|
|
1165
|
+
warnings.push({ type: "unsupported-setting", setting: "tools" });
|
|
1166
|
+
}
|
|
1167
|
+
if (toolChoice != null) {
|
|
1168
|
+
warnings.push({ type: "unsupported-setting", setting: "toolChoice" });
|
|
1217
1169
|
}
|
|
1218
1170
|
if (responseFormat != null && responseFormat.type !== "text") {
|
|
1219
1171
|
warnings.push({
|
|
@@ -1224,56 +1176,30 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1224
1176
|
}
|
|
1225
1177
|
const { prompt: completionPrompt, stopSequences } = convertToOpenAICompletionPrompt({ prompt, inputFormat });
|
|
1226
1178
|
const stop = [...stopSequences != null ? stopSequences : [], ...userStopSequences != null ? userStopSequences : []];
|
|
1227
|
-
|
|
1228
|
-
|
|
1229
|
-
|
|
1230
|
-
|
|
1231
|
-
|
|
1232
|
-
|
|
1233
|
-
|
|
1234
|
-
|
|
1235
|
-
|
|
1236
|
-
|
|
1237
|
-
|
|
1238
|
-
|
|
1239
|
-
|
|
1240
|
-
|
|
1241
|
-
|
|
1242
|
-
|
|
1243
|
-
|
|
1244
|
-
|
|
1245
|
-
|
|
1246
|
-
|
|
1179
|
+
return {
|
|
1180
|
+
args: {
|
|
1181
|
+
// model id:
|
|
1182
|
+
model: this.modelId,
|
|
1183
|
+
// model specific settings:
|
|
1184
|
+
echo: this.settings.echo,
|
|
1185
|
+
logit_bias: this.settings.logitBias,
|
|
1186
|
+
logprobs: typeof this.settings.logprobs === "number" ? this.settings.logprobs : typeof this.settings.logprobs === "boolean" ? this.settings.logprobs ? 0 : void 0 : void 0,
|
|
1187
|
+
suffix: this.settings.suffix,
|
|
1188
|
+
user: this.settings.user,
|
|
1189
|
+
// standardized settings:
|
|
1190
|
+
max_tokens: maxTokens,
|
|
1191
|
+
temperature,
|
|
1192
|
+
top_p: topP,
|
|
1193
|
+
frequency_penalty: frequencyPenalty,
|
|
1194
|
+
presence_penalty: presencePenalty,
|
|
1195
|
+
seed,
|
|
1196
|
+
// prompt:
|
|
1197
|
+
prompt: completionPrompt,
|
|
1198
|
+
// stop sequences:
|
|
1199
|
+
stop: stop.length > 0 ? stop : void 0
|
|
1200
|
+
},
|
|
1201
|
+
warnings
|
|
1247
1202
|
};
|
|
1248
|
-
switch (type) {
|
|
1249
|
-
case "regular": {
|
|
1250
|
-
if ((_a = mode.tools) == null ? void 0 : _a.length) {
|
|
1251
|
-
throw new import_provider5.UnsupportedFunctionalityError({
|
|
1252
|
-
functionality: "tools"
|
|
1253
|
-
});
|
|
1254
|
-
}
|
|
1255
|
-
if (mode.toolChoice) {
|
|
1256
|
-
throw new import_provider5.UnsupportedFunctionalityError({
|
|
1257
|
-
functionality: "toolChoice"
|
|
1258
|
-
});
|
|
1259
|
-
}
|
|
1260
|
-
return { args: baseArgs, warnings };
|
|
1261
|
-
}
|
|
1262
|
-
case "object-json": {
|
|
1263
|
-
throw new import_provider5.UnsupportedFunctionalityError({
|
|
1264
|
-
functionality: "object-json mode"
|
|
1265
|
-
});
|
|
1266
|
-
}
|
|
1267
|
-
case "object-tool": {
|
|
1268
|
-
throw new import_provider5.UnsupportedFunctionalityError({
|
|
1269
|
-
functionality: "object-tool mode"
|
|
1270
|
-
});
|
|
1271
|
-
}
|
|
1272
|
-
default: {
|
|
1273
|
-
const _exhaustiveCheck = type;
|
|
1274
|
-
throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
|
|
1275
|
-
}
|
|
1276
|
-
}
|
|
1277
1203
|
}
|
|
1278
1204
|
async doGenerate(options) {
|
|
1279
1205
|
const { args, warnings } = this.getArgs(options);
|
|
@@ -1281,15 +1207,15 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1281
1207
|
responseHeaders,
|
|
1282
1208
|
value: response,
|
|
1283
1209
|
rawValue: rawResponse
|
|
1284
|
-
} = await (0,
|
|
1210
|
+
} = await (0, import_provider_utils3.postJsonToApi)({
|
|
1285
1211
|
url: this.config.url({
|
|
1286
1212
|
path: "/completions",
|
|
1287
1213
|
modelId: this.modelId
|
|
1288
1214
|
}),
|
|
1289
|
-
headers: (0,
|
|
1215
|
+
headers: (0, import_provider_utils3.combineHeaders)(this.config.headers(), options.headers),
|
|
1290
1216
|
body: args,
|
|
1291
1217
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
1292
|
-
successfulResponseHandler: (0,
|
|
1218
|
+
successfulResponseHandler: (0, import_provider_utils3.createJsonResponseHandler)(
|
|
1293
1219
|
openaiCompletionResponseSchema
|
|
1294
1220
|
),
|
|
1295
1221
|
abortSignal: options.abortSignal,
|
|
@@ -1320,15 +1246,15 @@ var OpenAICompletionLanguageModel = class {
|
|
|
1320
1246
|
// only include stream_options when in strict compatibility mode:
|
|
1321
1247
|
stream_options: this.config.compatibility === "strict" ? { include_usage: true } : void 0
|
|
1322
1248
|
};
|
|
1323
|
-
const { responseHeaders, value: response } = await (0,
|
|
1249
|
+
const { responseHeaders, value: response } = await (0, import_provider_utils3.postJsonToApi)({
|
|
1324
1250
|
url: this.config.url({
|
|
1325
1251
|
path: "/completions",
|
|
1326
1252
|
modelId: this.modelId
|
|
1327
1253
|
}),
|
|
1328
|
-
headers: (0,
|
|
1254
|
+
headers: (0, import_provider_utils3.combineHeaders)(this.config.headers(), options.headers),
|
|
1329
1255
|
body,
|
|
1330
1256
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
1331
|
-
successfulResponseHandler: (0,
|
|
1257
|
+
successfulResponseHandler: (0, import_provider_utils3.createEventSourceResponseHandler)(
|
|
1332
1258
|
openaiCompletionChunkSchema
|
|
1333
1259
|
),
|
|
1334
1260
|
abortSignal: options.abortSignal,
|
|
@@ -1451,8 +1377,8 @@ var openaiCompletionChunkSchema = import_zod3.z.union([
|
|
|
1451
1377
|
]);
|
|
1452
1378
|
|
|
1453
1379
|
// src/openai-embedding-model.ts
|
|
1454
|
-
var
|
|
1455
|
-
var
|
|
1380
|
+
var import_provider5 = require("@ai-sdk/provider");
|
|
1381
|
+
var import_provider_utils4 = require("@ai-sdk/provider-utils");
|
|
1456
1382
|
var import_zod4 = require("zod");
|
|
1457
1383
|
var OpenAIEmbeddingModel = class {
|
|
1458
1384
|
constructor(modelId, settings, config) {
|
|
@@ -1478,19 +1404,19 @@ var OpenAIEmbeddingModel = class {
|
|
|
1478
1404
|
abortSignal
|
|
1479
1405
|
}) {
|
|
1480
1406
|
if (values.length > this.maxEmbeddingsPerCall) {
|
|
1481
|
-
throw new
|
|
1407
|
+
throw new import_provider5.TooManyEmbeddingValuesForCallError({
|
|
1482
1408
|
provider: this.provider,
|
|
1483
1409
|
modelId: this.modelId,
|
|
1484
1410
|
maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,
|
|
1485
1411
|
values
|
|
1486
1412
|
});
|
|
1487
1413
|
}
|
|
1488
|
-
const { responseHeaders, value: response } = await (0,
|
|
1414
|
+
const { responseHeaders, value: response } = await (0, import_provider_utils4.postJsonToApi)({
|
|
1489
1415
|
url: this.config.url({
|
|
1490
1416
|
path: "/embeddings",
|
|
1491
1417
|
modelId: this.modelId
|
|
1492
1418
|
}),
|
|
1493
|
-
headers: (0,
|
|
1419
|
+
headers: (0, import_provider_utils4.combineHeaders)(this.config.headers(), headers),
|
|
1494
1420
|
body: {
|
|
1495
1421
|
model: this.modelId,
|
|
1496
1422
|
input: values,
|
|
@@ -1499,7 +1425,7 @@ var OpenAIEmbeddingModel = class {
|
|
|
1499
1425
|
user: this.settings.user
|
|
1500
1426
|
},
|
|
1501
1427
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
1502
|
-
successfulResponseHandler: (0,
|
|
1428
|
+
successfulResponseHandler: (0, import_provider_utils4.createJsonResponseHandler)(
|
|
1503
1429
|
openaiTextEmbeddingResponseSchema
|
|
1504
1430
|
),
|
|
1505
1431
|
abortSignal,
|
|
@@ -1518,7 +1444,7 @@ var openaiTextEmbeddingResponseSchema = import_zod4.z.object({
|
|
|
1518
1444
|
});
|
|
1519
1445
|
|
|
1520
1446
|
// src/openai-image-model.ts
|
|
1521
|
-
var
|
|
1447
|
+
var import_provider_utils5 = require("@ai-sdk/provider-utils");
|
|
1522
1448
|
var import_zod5 = require("zod");
|
|
1523
1449
|
|
|
1524
1450
|
// src/openai-image-settings.ts
|
|
@@ -1565,12 +1491,12 @@ var OpenAIImageModel = class {
|
|
|
1565
1491
|
warnings.push({ type: "unsupported-setting", setting: "seed" });
|
|
1566
1492
|
}
|
|
1567
1493
|
const currentDate = (_c = (_b = (_a = this.config._internal) == null ? void 0 : _a.currentDate) == null ? void 0 : _b.call(_a)) != null ? _c : /* @__PURE__ */ new Date();
|
|
1568
|
-
const { value: response, responseHeaders } = await (0,
|
|
1494
|
+
const { value: response, responseHeaders } = await (0, import_provider_utils5.postJsonToApi)({
|
|
1569
1495
|
url: this.config.url({
|
|
1570
1496
|
path: "/images/generations",
|
|
1571
1497
|
modelId: this.modelId
|
|
1572
1498
|
}),
|
|
1573
|
-
headers: (0,
|
|
1499
|
+
headers: (0, import_provider_utils5.combineHeaders)(this.config.headers(), headers),
|
|
1574
1500
|
body: {
|
|
1575
1501
|
model: this.modelId,
|
|
1576
1502
|
prompt,
|
|
@@ -1580,7 +1506,7 @@ var OpenAIImageModel = class {
|
|
|
1580
1506
|
response_format: "b64_json"
|
|
1581
1507
|
},
|
|
1582
1508
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
1583
|
-
successfulResponseHandler: (0,
|
|
1509
|
+
successfulResponseHandler: (0, import_provider_utils5.createJsonResponseHandler)(
|
|
1584
1510
|
openaiImageResponseSchema
|
|
1585
1511
|
),
|
|
1586
1512
|
abortSignal,
|
|
@@ -1622,13 +1548,186 @@ var openaiTools = {
|
|
|
1622
1548
|
webSearchPreview: webSearchPreviewTool
|
|
1623
1549
|
};
|
|
1624
1550
|
|
|
1625
|
-
// src/
|
|
1626
|
-
var
|
|
1551
|
+
// src/openai-transcription-model.ts
|
|
1552
|
+
var import_provider_utils6 = require("@ai-sdk/provider-utils");
|
|
1627
1553
|
var import_zod7 = require("zod");
|
|
1554
|
+
var OpenAIProviderOptionsSchema = import_zod7.z.object({
|
|
1555
|
+
include: import_zod7.z.array(import_zod7.z.string()).optional().describe(
|
|
1556
|
+
"Additional information to include in the transcription response."
|
|
1557
|
+
),
|
|
1558
|
+
language: import_zod7.z.string().optional().describe("The language of the input audio in ISO-639-1 format."),
|
|
1559
|
+
prompt: import_zod7.z.string().optional().describe(
|
|
1560
|
+
"An optional text to guide the model's style or continue a previous audio segment."
|
|
1561
|
+
),
|
|
1562
|
+
temperature: import_zod7.z.number().min(0).max(1).optional().default(0).describe("The sampling temperature, between 0 and 1."),
|
|
1563
|
+
timestampGranularities: import_zod7.z.array(import_zod7.z.enum(["word", "segment"])).optional().default(["segment"]).describe(
|
|
1564
|
+
"The timestamp granularities to populate for this transcription."
|
|
1565
|
+
)
|
|
1566
|
+
});
|
|
1567
|
+
var languageMap = {
|
|
1568
|
+
afrikaans: "af",
|
|
1569
|
+
arabic: "ar",
|
|
1570
|
+
armenian: "hy",
|
|
1571
|
+
azerbaijani: "az",
|
|
1572
|
+
belarusian: "be",
|
|
1573
|
+
bosnian: "bs",
|
|
1574
|
+
bulgarian: "bg",
|
|
1575
|
+
catalan: "ca",
|
|
1576
|
+
chinese: "zh",
|
|
1577
|
+
croatian: "hr",
|
|
1578
|
+
czech: "cs",
|
|
1579
|
+
danish: "da",
|
|
1580
|
+
dutch: "nl",
|
|
1581
|
+
english: "en",
|
|
1582
|
+
estonian: "et",
|
|
1583
|
+
finnish: "fi",
|
|
1584
|
+
french: "fr",
|
|
1585
|
+
galician: "gl",
|
|
1586
|
+
german: "de",
|
|
1587
|
+
greek: "el",
|
|
1588
|
+
hebrew: "he",
|
|
1589
|
+
hindi: "hi",
|
|
1590
|
+
hungarian: "hu",
|
|
1591
|
+
icelandic: "is",
|
|
1592
|
+
indonesian: "id",
|
|
1593
|
+
italian: "it",
|
|
1594
|
+
japanese: "ja",
|
|
1595
|
+
kannada: "kn",
|
|
1596
|
+
kazakh: "kk",
|
|
1597
|
+
korean: "ko",
|
|
1598
|
+
latvian: "lv",
|
|
1599
|
+
lithuanian: "lt",
|
|
1600
|
+
macedonian: "mk",
|
|
1601
|
+
malay: "ms",
|
|
1602
|
+
marathi: "mr",
|
|
1603
|
+
maori: "mi",
|
|
1604
|
+
nepali: "ne",
|
|
1605
|
+
norwegian: "no",
|
|
1606
|
+
persian: "fa",
|
|
1607
|
+
polish: "pl",
|
|
1608
|
+
portuguese: "pt",
|
|
1609
|
+
romanian: "ro",
|
|
1610
|
+
russian: "ru",
|
|
1611
|
+
serbian: "sr",
|
|
1612
|
+
slovak: "sk",
|
|
1613
|
+
slovenian: "sl",
|
|
1614
|
+
spanish: "es",
|
|
1615
|
+
swahili: "sw",
|
|
1616
|
+
swedish: "sv",
|
|
1617
|
+
tagalog: "tl",
|
|
1618
|
+
tamil: "ta",
|
|
1619
|
+
thai: "th",
|
|
1620
|
+
turkish: "tr",
|
|
1621
|
+
ukrainian: "uk",
|
|
1622
|
+
urdu: "ur",
|
|
1623
|
+
vietnamese: "vi",
|
|
1624
|
+
welsh: "cy"
|
|
1625
|
+
};
|
|
1626
|
+
var OpenAITranscriptionModel = class {
|
|
1627
|
+
constructor(modelId, config) {
|
|
1628
|
+
this.modelId = modelId;
|
|
1629
|
+
this.config = config;
|
|
1630
|
+
this.specificationVersion = "v1";
|
|
1631
|
+
}
|
|
1632
|
+
get provider() {
|
|
1633
|
+
return this.config.provider;
|
|
1634
|
+
}
|
|
1635
|
+
getArgs({
|
|
1636
|
+
audio,
|
|
1637
|
+
mediaType,
|
|
1638
|
+
providerOptions
|
|
1639
|
+
}) {
|
|
1640
|
+
const warnings = [];
|
|
1641
|
+
const openAIOptions = (0, import_provider_utils6.parseProviderOptions)({
|
|
1642
|
+
provider: "openai",
|
|
1643
|
+
providerOptions,
|
|
1644
|
+
schema: OpenAIProviderOptionsSchema
|
|
1645
|
+
});
|
|
1646
|
+
const formData = new FormData();
|
|
1647
|
+
const blob = audio instanceof Uint8Array ? new Blob([audio]) : new Blob([(0, import_provider_utils6.convertBase64ToUint8Array)(audio)]);
|
|
1648
|
+
formData.append("model", this.modelId);
|
|
1649
|
+
formData.append("file", new File([blob], "audio", { type: mediaType }));
|
|
1650
|
+
if (openAIOptions) {
|
|
1651
|
+
const transcriptionModelOptions = {
|
|
1652
|
+
include: openAIOptions.include,
|
|
1653
|
+
language: openAIOptions.language,
|
|
1654
|
+
prompt: openAIOptions.prompt,
|
|
1655
|
+
temperature: openAIOptions.temperature,
|
|
1656
|
+
timestamp_granularities: openAIOptions.timestampGranularities
|
|
1657
|
+
};
|
|
1658
|
+
for (const key in transcriptionModelOptions) {
|
|
1659
|
+
const value = transcriptionModelOptions[key];
|
|
1660
|
+
if (value !== void 0) {
|
|
1661
|
+
formData.append(key, value);
|
|
1662
|
+
}
|
|
1663
|
+
}
|
|
1664
|
+
}
|
|
1665
|
+
return {
|
|
1666
|
+
formData,
|
|
1667
|
+
warnings
|
|
1668
|
+
};
|
|
1669
|
+
}
|
|
1670
|
+
async doGenerate(options) {
|
|
1671
|
+
var _a, _b, _c, _d, _e, _f;
|
|
1672
|
+
const currentDate = (_c = (_b = (_a = this.config._internal) == null ? void 0 : _a.currentDate) == null ? void 0 : _b.call(_a)) != null ? _c : /* @__PURE__ */ new Date();
|
|
1673
|
+
const { formData, warnings } = this.getArgs(options);
|
|
1674
|
+
const {
|
|
1675
|
+
value: response,
|
|
1676
|
+
responseHeaders,
|
|
1677
|
+
rawValue: rawResponse
|
|
1678
|
+
} = await (0, import_provider_utils6.postFormDataToApi)({
|
|
1679
|
+
url: this.config.url({
|
|
1680
|
+
path: "/audio/transcriptions",
|
|
1681
|
+
modelId: this.modelId
|
|
1682
|
+
}),
|
|
1683
|
+
headers: (0, import_provider_utils6.combineHeaders)(this.config.headers(), options.headers),
|
|
1684
|
+
formData,
|
|
1685
|
+
failedResponseHandler: openaiFailedResponseHandler,
|
|
1686
|
+
successfulResponseHandler: (0, import_provider_utils6.createJsonResponseHandler)(
|
|
1687
|
+
openaiTranscriptionResponseSchema
|
|
1688
|
+
),
|
|
1689
|
+
abortSignal: options.abortSignal,
|
|
1690
|
+
fetch: this.config.fetch
|
|
1691
|
+
});
|
|
1692
|
+
const language = response.language != null && response.language in languageMap ? languageMap[response.language] : void 0;
|
|
1693
|
+
return {
|
|
1694
|
+
text: response.text,
|
|
1695
|
+
segments: (_e = (_d = response.words) == null ? void 0 : _d.map((word) => ({
|
|
1696
|
+
text: word.word,
|
|
1697
|
+
startSecond: word.start,
|
|
1698
|
+
endSecond: word.end
|
|
1699
|
+
}))) != null ? _e : [],
|
|
1700
|
+
language,
|
|
1701
|
+
durationInSeconds: (_f = response.duration) != null ? _f : void 0,
|
|
1702
|
+
warnings,
|
|
1703
|
+
response: {
|
|
1704
|
+
timestamp: currentDate,
|
|
1705
|
+
modelId: this.modelId,
|
|
1706
|
+
headers: responseHeaders,
|
|
1707
|
+
body: rawResponse
|
|
1708
|
+
}
|
|
1709
|
+
};
|
|
1710
|
+
}
|
|
1711
|
+
};
|
|
1712
|
+
var openaiTranscriptionResponseSchema = import_zod7.z.object({
|
|
1713
|
+
text: import_zod7.z.string(),
|
|
1714
|
+
language: import_zod7.z.string().nullish(),
|
|
1715
|
+
duration: import_zod7.z.number().nullish(),
|
|
1716
|
+
words: import_zod7.z.array(
|
|
1717
|
+
import_zod7.z.object({
|
|
1718
|
+
word: import_zod7.z.string(),
|
|
1719
|
+
start: import_zod7.z.number(),
|
|
1720
|
+
end: import_zod7.z.number()
|
|
1721
|
+
})
|
|
1722
|
+
).nullish()
|
|
1723
|
+
});
|
|
1628
1724
|
|
|
1629
|
-
// src/responses/
|
|
1630
|
-
var import_provider7 = require("@ai-sdk/provider");
|
|
1725
|
+
// src/responses/openai-responses-language-model.ts
|
|
1631
1726
|
var import_provider_utils7 = require("@ai-sdk/provider-utils");
|
|
1727
|
+
var import_zod8 = require("zod");
|
|
1728
|
+
|
|
1729
|
+
// src/responses/convert-to-openai-responses-messages.ts
|
|
1730
|
+
var import_provider6 = require("@ai-sdk/provider");
|
|
1632
1731
|
function convertToOpenAIResponsesMessages({
|
|
1633
1732
|
prompt,
|
|
1634
1733
|
systemMessageMode
|
|
@@ -1667,38 +1766,35 @@ function convertToOpenAIResponsesMessages({
|
|
|
1667
1766
|
messages.push({
|
|
1668
1767
|
role: "user",
|
|
1669
1768
|
content: content.map((part, index) => {
|
|
1670
|
-
var _a, _b, _c
|
|
1769
|
+
var _a, _b, _c;
|
|
1671
1770
|
switch (part.type) {
|
|
1672
1771
|
case "text": {
|
|
1673
1772
|
return { type: "input_text", text: part.text };
|
|
1674
1773
|
}
|
|
1675
|
-
case "image": {
|
|
1676
|
-
return {
|
|
1677
|
-
type: "input_image",
|
|
1678
|
-
image_url: part.image instanceof URL ? part.image.toString() : `data:${(_a = part.mimeType) != null ? _a : "image/jpeg"};base64,${(0, import_provider_utils7.convertUint8ArrayToBase64)(part.image)}`,
|
|
1679
|
-
// OpenAI specific extension: image detail
|
|
1680
|
-
detail: (_c = (_b = part.providerMetadata) == null ? void 0 : _b.openai) == null ? void 0 : _c.imageDetail
|
|
1681
|
-
};
|
|
1682
|
-
}
|
|
1683
1774
|
case "file": {
|
|
1684
|
-
if (part.
|
|
1685
|
-
|
|
1686
|
-
|
|
1687
|
-
|
|
1688
|
-
|
|
1689
|
-
|
|
1690
|
-
|
|
1691
|
-
|
|
1692
|
-
|
|
1693
|
-
|
|
1694
|
-
|
|
1695
|
-
|
|
1696
|
-
}
|
|
1697
|
-
default: {
|
|
1698
|
-
throw new import_provider7.UnsupportedFunctionalityError({
|
|
1699
|
-
functionality: "Only PDF files are supported in user messages"
|
|
1775
|
+
if (part.mediaType.startsWith("image/")) {
|
|
1776
|
+
const mediaType = part.mediaType === "image/*" ? "image/jpeg" : part.mediaType;
|
|
1777
|
+
return {
|
|
1778
|
+
type: "input_image",
|
|
1779
|
+
image_url: part.data instanceof URL ? part.data.toString() : `data:${mediaType};base64,${part.data}`,
|
|
1780
|
+
// OpenAI specific extension: image detail
|
|
1781
|
+
detail: (_b = (_a = part.providerOptions) == null ? void 0 : _a.openai) == null ? void 0 : _b.imageDetail
|
|
1782
|
+
};
|
|
1783
|
+
} else if (part.mediaType === "application/pdf") {
|
|
1784
|
+
if (part.data instanceof URL) {
|
|
1785
|
+
throw new import_provider6.UnsupportedFunctionalityError({
|
|
1786
|
+
functionality: "PDF file parts with URLs"
|
|
1700
1787
|
});
|
|
1701
1788
|
}
|
|
1789
|
+
return {
|
|
1790
|
+
type: "input_file",
|
|
1791
|
+
filename: (_c = part.filename) != null ? _c : `part-${index}.pdf`,
|
|
1792
|
+
file_data: `data:application/pdf;base64,${part.data}`
|
|
1793
|
+
};
|
|
1794
|
+
} else {
|
|
1795
|
+
throw new import_provider6.UnsupportedFunctionalityError({
|
|
1796
|
+
functionality: `file part media type ${part.mediaType}`
|
|
1797
|
+
});
|
|
1702
1798
|
}
|
|
1703
1799
|
}
|
|
1704
1800
|
}
|
|
@@ -1767,18 +1863,17 @@ function mapOpenAIResponseFinishReason({
|
|
|
1767
1863
|
}
|
|
1768
1864
|
|
|
1769
1865
|
// src/responses/openai-responses-prepare-tools.ts
|
|
1770
|
-
var
|
|
1866
|
+
var import_provider7 = require("@ai-sdk/provider");
|
|
1771
1867
|
function prepareResponsesTools({
|
|
1772
|
-
|
|
1868
|
+
tools,
|
|
1869
|
+
toolChoice,
|
|
1773
1870
|
strict
|
|
1774
1871
|
}) {
|
|
1775
|
-
|
|
1776
|
-
const tools = ((_a = mode.tools) == null ? void 0 : _a.length) ? mode.tools : void 0;
|
|
1872
|
+
tools = (tools == null ? void 0 : tools.length) ? tools : void 0;
|
|
1777
1873
|
const toolWarnings = [];
|
|
1778
1874
|
if (tools == null) {
|
|
1779
|
-
return { tools: void 0,
|
|
1875
|
+
return { tools: void 0, toolChoice: void 0, toolWarnings };
|
|
1780
1876
|
}
|
|
1781
|
-
const toolChoice = mode.toolChoice;
|
|
1782
1877
|
const openaiTools2 = [];
|
|
1783
1878
|
for (const tool of tools) {
|
|
1784
1879
|
switch (tool.type) {
|
|
@@ -1811,37 +1906,24 @@ function prepareResponsesTools({
|
|
|
1811
1906
|
}
|
|
1812
1907
|
}
|
|
1813
1908
|
if (toolChoice == null) {
|
|
1814
|
-
return { tools: openaiTools2,
|
|
1909
|
+
return { tools: openaiTools2, toolChoice: void 0, toolWarnings };
|
|
1815
1910
|
}
|
|
1816
1911
|
const type = toolChoice.type;
|
|
1817
1912
|
switch (type) {
|
|
1818
1913
|
case "auto":
|
|
1819
1914
|
case "none":
|
|
1820
1915
|
case "required":
|
|
1821
|
-
return { tools: openaiTools2,
|
|
1822
|
-
case "tool":
|
|
1823
|
-
if (toolChoice.toolName === "web_search_preview") {
|
|
1824
|
-
return {
|
|
1825
|
-
tools: openaiTools2,
|
|
1826
|
-
tool_choice: {
|
|
1827
|
-
type: "web_search_preview"
|
|
1828
|
-
},
|
|
1829
|
-
toolWarnings
|
|
1830
|
-
};
|
|
1831
|
-
}
|
|
1916
|
+
return { tools: openaiTools2, toolChoice: type, toolWarnings };
|
|
1917
|
+
case "tool":
|
|
1832
1918
|
return {
|
|
1833
1919
|
tools: openaiTools2,
|
|
1834
|
-
|
|
1835
|
-
type: "function",
|
|
1836
|
-
name: toolChoice.toolName
|
|
1837
|
-
},
|
|
1920
|
+
toolChoice: toolChoice.toolName === "web_search_preview" ? { type: "web_search_preview" } : { type: "function", name: toolChoice.toolName },
|
|
1838
1921
|
toolWarnings
|
|
1839
1922
|
};
|
|
1840
|
-
}
|
|
1841
1923
|
default: {
|
|
1842
1924
|
const _exhaustiveCheck = type;
|
|
1843
|
-
throw new
|
|
1844
|
-
functionality: `
|
|
1925
|
+
throw new import_provider7.UnsupportedFunctionalityError({
|
|
1926
|
+
functionality: `tool choice type: ${_exhaustiveCheck}`
|
|
1845
1927
|
});
|
|
1846
1928
|
}
|
|
1847
1929
|
}
|
|
@@ -1859,7 +1941,6 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
1859
1941
|
return this.config.provider;
|
|
1860
1942
|
}
|
|
1861
1943
|
getArgs({
|
|
1862
|
-
mode,
|
|
1863
1944
|
maxTokens,
|
|
1864
1945
|
temperature,
|
|
1865
1946
|
stopSequences,
|
|
@@ -1869,24 +1950,19 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
1869
1950
|
frequencyPenalty,
|
|
1870
1951
|
seed,
|
|
1871
1952
|
prompt,
|
|
1872
|
-
|
|
1953
|
+
providerOptions,
|
|
1954
|
+
tools,
|
|
1955
|
+
toolChoice,
|
|
1873
1956
|
responseFormat
|
|
1874
1957
|
}) {
|
|
1875
|
-
var _a, _b
|
|
1958
|
+
var _a, _b;
|
|
1876
1959
|
const warnings = [];
|
|
1877
1960
|
const modelConfig = getResponsesModelConfig(this.modelId);
|
|
1878
|
-
const type = mode.type;
|
|
1879
1961
|
if (topK != null) {
|
|
1880
|
-
warnings.push({
|
|
1881
|
-
type: "unsupported-setting",
|
|
1882
|
-
setting: "topK"
|
|
1883
|
-
});
|
|
1962
|
+
warnings.push({ type: "unsupported-setting", setting: "topK" });
|
|
1884
1963
|
}
|
|
1885
1964
|
if (seed != null) {
|
|
1886
|
-
warnings.push({
|
|
1887
|
-
type: "unsupported-setting",
|
|
1888
|
-
setting: "seed"
|
|
1889
|
-
});
|
|
1965
|
+
warnings.push({ type: "unsupported-setting", setting: "seed" });
|
|
1890
1966
|
}
|
|
1891
1967
|
if (presencePenalty != null) {
|
|
1892
1968
|
warnings.push({
|
|
@@ -1901,19 +1977,16 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
1901
1977
|
});
|
|
1902
1978
|
}
|
|
1903
1979
|
if (stopSequences != null) {
|
|
1904
|
-
warnings.push({
|
|
1905
|
-
type: "unsupported-setting",
|
|
1906
|
-
setting: "stopSequences"
|
|
1907
|
-
});
|
|
1980
|
+
warnings.push({ type: "unsupported-setting", setting: "stopSequences" });
|
|
1908
1981
|
}
|
|
1909
1982
|
const { messages, warnings: messageWarnings } = convertToOpenAIResponsesMessages({
|
|
1910
1983
|
prompt,
|
|
1911
1984
|
systemMessageMode: modelConfig.systemMessageMode
|
|
1912
1985
|
});
|
|
1913
1986
|
warnings.push(...messageWarnings);
|
|
1914
|
-
const openaiOptions = (0,
|
|
1987
|
+
const openaiOptions = (0, import_provider_utils7.parseProviderOptions)({
|
|
1915
1988
|
provider: "openai",
|
|
1916
|
-
providerOptions
|
|
1989
|
+
providerOptions,
|
|
1917
1990
|
schema: openaiResponsesProviderOptionsSchema
|
|
1918
1991
|
});
|
|
1919
1992
|
const isStrict = (_a = openaiOptions == null ? void 0 : openaiOptions.strictSchemas) != null ? _a : true;
|
|
@@ -1967,62 +2040,23 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
1967
2040
|
});
|
|
1968
2041
|
}
|
|
1969
2042
|
}
|
|
1970
|
-
|
|
1971
|
-
|
|
1972
|
-
|
|
1973
|
-
|
|
1974
|
-
|
|
1975
|
-
|
|
1976
|
-
|
|
1977
|
-
|
|
1978
|
-
|
|
1979
|
-
|
|
1980
|
-
|
|
1981
|
-
|
|
1982
|
-
|
|
1983
|
-
|
|
1984
|
-
|
|
1985
|
-
|
|
1986
|
-
|
|
1987
|
-
return {
|
|
1988
|
-
args: {
|
|
1989
|
-
...baseArgs,
|
|
1990
|
-
text: {
|
|
1991
|
-
format: mode.schema != null ? {
|
|
1992
|
-
type: "json_schema",
|
|
1993
|
-
strict: isStrict,
|
|
1994
|
-
name: (_c = mode.name) != null ? _c : "response",
|
|
1995
|
-
description: mode.description,
|
|
1996
|
-
schema: mode.schema
|
|
1997
|
-
} : { type: "json_object" }
|
|
1998
|
-
}
|
|
1999
|
-
},
|
|
2000
|
-
warnings
|
|
2001
|
-
};
|
|
2002
|
-
}
|
|
2003
|
-
case "object-tool": {
|
|
2004
|
-
return {
|
|
2005
|
-
args: {
|
|
2006
|
-
...baseArgs,
|
|
2007
|
-
tool_choice: { type: "function", name: mode.tool.name },
|
|
2008
|
-
tools: [
|
|
2009
|
-
{
|
|
2010
|
-
type: "function",
|
|
2011
|
-
name: mode.tool.name,
|
|
2012
|
-
description: mode.tool.description,
|
|
2013
|
-
parameters: mode.tool.parameters,
|
|
2014
|
-
strict: isStrict
|
|
2015
|
-
}
|
|
2016
|
-
]
|
|
2017
|
-
},
|
|
2018
|
-
warnings
|
|
2019
|
-
};
|
|
2020
|
-
}
|
|
2021
|
-
default: {
|
|
2022
|
-
const _exhaustiveCheck = type;
|
|
2023
|
-
throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
|
|
2024
|
-
}
|
|
2025
|
-
}
|
|
2043
|
+
const {
|
|
2044
|
+
tools: openaiTools2,
|
|
2045
|
+
toolChoice: openaiToolChoice,
|
|
2046
|
+
toolWarnings
|
|
2047
|
+
} = prepareResponsesTools({
|
|
2048
|
+
tools,
|
|
2049
|
+
toolChoice,
|
|
2050
|
+
strict: isStrict
|
|
2051
|
+
});
|
|
2052
|
+
return {
|
|
2053
|
+
args: {
|
|
2054
|
+
...baseArgs,
|
|
2055
|
+
tools: openaiTools2,
|
|
2056
|
+
tool_choice: openaiToolChoice
|
|
2057
|
+
},
|
|
2058
|
+
warnings: [...warnings, ...toolWarnings]
|
|
2059
|
+
};
|
|
2026
2060
|
}
|
|
2027
2061
|
async doGenerate(options) {
|
|
2028
2062
|
var _a, _b, _c, _d, _e;
|
|
@@ -2031,58 +2065,58 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2031
2065
|
responseHeaders,
|
|
2032
2066
|
value: response,
|
|
2033
2067
|
rawValue: rawResponse
|
|
2034
|
-
} = await (0,
|
|
2068
|
+
} = await (0, import_provider_utils7.postJsonToApi)({
|
|
2035
2069
|
url: this.config.url({
|
|
2036
2070
|
path: "/responses",
|
|
2037
2071
|
modelId: this.modelId
|
|
2038
2072
|
}),
|
|
2039
|
-
headers: (0,
|
|
2073
|
+
headers: (0, import_provider_utils7.combineHeaders)(this.config.headers(), options.headers),
|
|
2040
2074
|
body,
|
|
2041
2075
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
2042
|
-
successfulResponseHandler: (0,
|
|
2043
|
-
|
|
2044
|
-
id:
|
|
2045
|
-
created_at:
|
|
2046
|
-
model:
|
|
2047
|
-
output:
|
|
2048
|
-
|
|
2049
|
-
|
|
2050
|
-
type:
|
|
2051
|
-
role:
|
|
2052
|
-
content:
|
|
2053
|
-
|
|
2054
|
-
type:
|
|
2055
|
-
text:
|
|
2056
|
-
annotations:
|
|
2057
|
-
|
|
2058
|
-
type:
|
|
2059
|
-
start_index:
|
|
2060
|
-
end_index:
|
|
2061
|
-
url:
|
|
2062
|
-
title:
|
|
2076
|
+
successfulResponseHandler: (0, import_provider_utils7.createJsonResponseHandler)(
|
|
2077
|
+
import_zod8.z.object({
|
|
2078
|
+
id: import_zod8.z.string(),
|
|
2079
|
+
created_at: import_zod8.z.number(),
|
|
2080
|
+
model: import_zod8.z.string(),
|
|
2081
|
+
output: import_zod8.z.array(
|
|
2082
|
+
import_zod8.z.discriminatedUnion("type", [
|
|
2083
|
+
import_zod8.z.object({
|
|
2084
|
+
type: import_zod8.z.literal("message"),
|
|
2085
|
+
role: import_zod8.z.literal("assistant"),
|
|
2086
|
+
content: import_zod8.z.array(
|
|
2087
|
+
import_zod8.z.object({
|
|
2088
|
+
type: import_zod8.z.literal("output_text"),
|
|
2089
|
+
text: import_zod8.z.string(),
|
|
2090
|
+
annotations: import_zod8.z.array(
|
|
2091
|
+
import_zod8.z.object({
|
|
2092
|
+
type: import_zod8.z.literal("url_citation"),
|
|
2093
|
+
start_index: import_zod8.z.number(),
|
|
2094
|
+
end_index: import_zod8.z.number(),
|
|
2095
|
+
url: import_zod8.z.string(),
|
|
2096
|
+
title: import_zod8.z.string()
|
|
2063
2097
|
})
|
|
2064
2098
|
)
|
|
2065
2099
|
})
|
|
2066
2100
|
)
|
|
2067
2101
|
}),
|
|
2068
|
-
|
|
2069
|
-
type:
|
|
2070
|
-
call_id:
|
|
2071
|
-
name:
|
|
2072
|
-
arguments:
|
|
2102
|
+
import_zod8.z.object({
|
|
2103
|
+
type: import_zod8.z.literal("function_call"),
|
|
2104
|
+
call_id: import_zod8.z.string(),
|
|
2105
|
+
name: import_zod8.z.string(),
|
|
2106
|
+
arguments: import_zod8.z.string()
|
|
2073
2107
|
}),
|
|
2074
|
-
|
|
2075
|
-
type:
|
|
2108
|
+
import_zod8.z.object({
|
|
2109
|
+
type: import_zod8.z.literal("web_search_call")
|
|
2076
2110
|
}),
|
|
2077
|
-
|
|
2078
|
-
type:
|
|
2111
|
+
import_zod8.z.object({
|
|
2112
|
+
type: import_zod8.z.literal("computer_call")
|
|
2079
2113
|
}),
|
|
2080
|
-
|
|
2081
|
-
type:
|
|
2114
|
+
import_zod8.z.object({
|
|
2115
|
+
type: import_zod8.z.literal("reasoning")
|
|
2082
2116
|
})
|
|
2083
2117
|
])
|
|
2084
2118
|
),
|
|
2085
|
-
incomplete_details:
|
|
2119
|
+
incomplete_details: import_zod8.z.object({ reason: import_zod8.z.string() }).nullable(),
|
|
2086
2120
|
usage: usageSchema
|
|
2087
2121
|
})
|
|
2088
2122
|
),
|
|
@@ -2103,7 +2137,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2103
2137
|
var _a2, _b2, _c2;
|
|
2104
2138
|
return {
|
|
2105
2139
|
sourceType: "url",
|
|
2106
|
-
id: (_c2 = (_b2 = (_a2 = this.config).generateId) == null ? void 0 : _b2.call(_a2)) != null ? _c2 : (0,
|
|
2140
|
+
id: (_c2 = (_b2 = (_a2 = this.config).generateId) == null ? void 0 : _b2.call(_a2)) != null ? _c2 : (0, import_provider_utils7.generateId)(),
|
|
2107
2141
|
url: annotation.url,
|
|
2108
2142
|
title: annotation.title
|
|
2109
2143
|
};
|
|
@@ -2146,18 +2180,18 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2146
2180
|
}
|
|
2147
2181
|
async doStream(options) {
|
|
2148
2182
|
const { args: body, warnings } = this.getArgs(options);
|
|
2149
|
-
const { responseHeaders, value: response } = await (0,
|
|
2183
|
+
const { responseHeaders, value: response } = await (0, import_provider_utils7.postJsonToApi)({
|
|
2150
2184
|
url: this.config.url({
|
|
2151
2185
|
path: "/responses",
|
|
2152
2186
|
modelId: this.modelId
|
|
2153
2187
|
}),
|
|
2154
|
-
headers: (0,
|
|
2188
|
+
headers: (0, import_provider_utils7.combineHeaders)(this.config.headers(), options.headers),
|
|
2155
2189
|
body: {
|
|
2156
2190
|
...body,
|
|
2157
2191
|
stream: true
|
|
2158
2192
|
},
|
|
2159
2193
|
failedResponseHandler: openaiFailedResponseHandler,
|
|
2160
|
-
successfulResponseHandler: (0,
|
|
2194
|
+
successfulResponseHandler: (0, import_provider_utils7.createEventSourceResponseHandler)(
|
|
2161
2195
|
openaiResponsesChunkSchema
|
|
2162
2196
|
),
|
|
2163
2197
|
abortSignal: options.abortSignal,
|
|
@@ -2245,7 +2279,7 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2245
2279
|
type: "source",
|
|
2246
2280
|
source: {
|
|
2247
2281
|
sourceType: "url",
|
|
2248
|
-
id: (_h = (_g = (_f = self.config).generateId) == null ? void 0 : _g.call(_f)) != null ? _h : (0,
|
|
2282
|
+
id: (_h = (_g = (_f = self.config).generateId) == null ? void 0 : _g.call(_f)) != null ? _h : (0, import_provider_utils7.generateId)(),
|
|
2249
2283
|
url: value.annotation.url,
|
|
2250
2284
|
title: value.annotation.title
|
|
2251
2285
|
}
|
|
@@ -2280,79 +2314,79 @@ var OpenAIResponsesLanguageModel = class {
|
|
|
2280
2314
|
};
|
|
2281
2315
|
}
|
|
2282
2316
|
};
|
|
2283
|
-
var usageSchema =
|
|
2284
|
-
input_tokens:
|
|
2285
|
-
input_tokens_details:
|
|
2286
|
-
output_tokens:
|
|
2287
|
-
output_tokens_details:
|
|
2317
|
+
var usageSchema = import_zod8.z.object({
|
|
2318
|
+
input_tokens: import_zod8.z.number(),
|
|
2319
|
+
input_tokens_details: import_zod8.z.object({ cached_tokens: import_zod8.z.number().nullish() }).nullish(),
|
|
2320
|
+
output_tokens: import_zod8.z.number(),
|
|
2321
|
+
output_tokens_details: import_zod8.z.object({ reasoning_tokens: import_zod8.z.number().nullish() }).nullish()
|
|
2288
2322
|
});
|
|
2289
|
-
var textDeltaChunkSchema =
|
|
2290
|
-
type:
|
|
2291
|
-
delta:
|
|
2323
|
+
var textDeltaChunkSchema = import_zod8.z.object({
|
|
2324
|
+
type: import_zod8.z.literal("response.output_text.delta"),
|
|
2325
|
+
delta: import_zod8.z.string()
|
|
2292
2326
|
});
|
|
2293
|
-
var responseFinishedChunkSchema =
|
|
2294
|
-
type:
|
|
2295
|
-
response:
|
|
2296
|
-
incomplete_details:
|
|
2327
|
+
var responseFinishedChunkSchema = import_zod8.z.object({
|
|
2328
|
+
type: import_zod8.z.enum(["response.completed", "response.incomplete"]),
|
|
2329
|
+
response: import_zod8.z.object({
|
|
2330
|
+
incomplete_details: import_zod8.z.object({ reason: import_zod8.z.string() }).nullish(),
|
|
2297
2331
|
usage: usageSchema
|
|
2298
2332
|
})
|
|
2299
2333
|
});
|
|
2300
|
-
var responseCreatedChunkSchema =
|
|
2301
|
-
type:
|
|
2302
|
-
response:
|
|
2303
|
-
id:
|
|
2304
|
-
created_at:
|
|
2305
|
-
model:
|
|
2334
|
+
var responseCreatedChunkSchema = import_zod8.z.object({
|
|
2335
|
+
type: import_zod8.z.literal("response.created"),
|
|
2336
|
+
response: import_zod8.z.object({
|
|
2337
|
+
id: import_zod8.z.string(),
|
|
2338
|
+
created_at: import_zod8.z.number(),
|
|
2339
|
+
model: import_zod8.z.string()
|
|
2306
2340
|
})
|
|
2307
2341
|
});
|
|
2308
|
-
var responseOutputItemDoneSchema =
|
|
2309
|
-
type:
|
|
2310
|
-
output_index:
|
|
2311
|
-
item:
|
|
2312
|
-
|
|
2313
|
-
type:
|
|
2342
|
+
var responseOutputItemDoneSchema = import_zod8.z.object({
|
|
2343
|
+
type: import_zod8.z.literal("response.output_item.done"),
|
|
2344
|
+
output_index: import_zod8.z.number(),
|
|
2345
|
+
item: import_zod8.z.discriminatedUnion("type", [
|
|
2346
|
+
import_zod8.z.object({
|
|
2347
|
+
type: import_zod8.z.literal("message")
|
|
2314
2348
|
}),
|
|
2315
|
-
|
|
2316
|
-
type:
|
|
2317
|
-
id:
|
|
2318
|
-
call_id:
|
|
2319
|
-
name:
|
|
2320
|
-
arguments:
|
|
2321
|
-
status:
|
|
2349
|
+
import_zod8.z.object({
|
|
2350
|
+
type: import_zod8.z.literal("function_call"),
|
|
2351
|
+
id: import_zod8.z.string(),
|
|
2352
|
+
call_id: import_zod8.z.string(),
|
|
2353
|
+
name: import_zod8.z.string(),
|
|
2354
|
+
arguments: import_zod8.z.string(),
|
|
2355
|
+
status: import_zod8.z.literal("completed")
|
|
2322
2356
|
})
|
|
2323
2357
|
])
|
|
2324
2358
|
});
|
|
2325
|
-
var responseFunctionCallArgumentsDeltaSchema =
|
|
2326
|
-
type:
|
|
2327
|
-
item_id:
|
|
2328
|
-
output_index:
|
|
2329
|
-
delta:
|
|
2359
|
+
var responseFunctionCallArgumentsDeltaSchema = import_zod8.z.object({
|
|
2360
|
+
type: import_zod8.z.literal("response.function_call_arguments.delta"),
|
|
2361
|
+
item_id: import_zod8.z.string(),
|
|
2362
|
+
output_index: import_zod8.z.number(),
|
|
2363
|
+
delta: import_zod8.z.string()
|
|
2330
2364
|
});
|
|
2331
|
-
var responseOutputItemAddedSchema =
|
|
2332
|
-
type:
|
|
2333
|
-
output_index:
|
|
2334
|
-
item:
|
|
2335
|
-
|
|
2336
|
-
type:
|
|
2365
|
+
var responseOutputItemAddedSchema = import_zod8.z.object({
|
|
2366
|
+
type: import_zod8.z.literal("response.output_item.added"),
|
|
2367
|
+
output_index: import_zod8.z.number(),
|
|
2368
|
+
item: import_zod8.z.discriminatedUnion("type", [
|
|
2369
|
+
import_zod8.z.object({
|
|
2370
|
+
type: import_zod8.z.literal("message")
|
|
2337
2371
|
}),
|
|
2338
|
-
|
|
2339
|
-
type:
|
|
2340
|
-
id:
|
|
2341
|
-
call_id:
|
|
2342
|
-
name:
|
|
2343
|
-
arguments:
|
|
2372
|
+
import_zod8.z.object({
|
|
2373
|
+
type: import_zod8.z.literal("function_call"),
|
|
2374
|
+
id: import_zod8.z.string(),
|
|
2375
|
+
call_id: import_zod8.z.string(),
|
|
2376
|
+
name: import_zod8.z.string(),
|
|
2377
|
+
arguments: import_zod8.z.string()
|
|
2344
2378
|
})
|
|
2345
2379
|
])
|
|
2346
2380
|
});
|
|
2347
|
-
var responseAnnotationAddedSchema =
|
|
2348
|
-
type:
|
|
2349
|
-
annotation:
|
|
2350
|
-
type:
|
|
2351
|
-
url:
|
|
2352
|
-
title:
|
|
2381
|
+
var responseAnnotationAddedSchema = import_zod8.z.object({
|
|
2382
|
+
type: import_zod8.z.literal("response.output_text.annotation.added"),
|
|
2383
|
+
annotation: import_zod8.z.object({
|
|
2384
|
+
type: import_zod8.z.literal("url_citation"),
|
|
2385
|
+
url: import_zod8.z.string(),
|
|
2386
|
+
title: import_zod8.z.string()
|
|
2353
2387
|
})
|
|
2354
2388
|
});
|
|
2355
|
-
var openaiResponsesChunkSchema =
|
|
2389
|
+
var openaiResponsesChunkSchema = import_zod8.z.union([
|
|
2356
2390
|
textDeltaChunkSchema,
|
|
2357
2391
|
responseFinishedChunkSchema,
|
|
2358
2392
|
responseCreatedChunkSchema,
|
|
@@ -2360,7 +2394,7 @@ var openaiResponsesChunkSchema = import_zod7.z.union([
|
|
|
2360
2394
|
responseFunctionCallArgumentsDeltaSchema,
|
|
2361
2395
|
responseOutputItemAddedSchema,
|
|
2362
2396
|
responseAnnotationAddedSchema,
|
|
2363
|
-
|
|
2397
|
+
import_zod8.z.object({ type: import_zod8.z.string() }).passthrough()
|
|
2364
2398
|
// fallback for unknown chunks
|
|
2365
2399
|
]);
|
|
2366
2400
|
function isTextDeltaChunk(chunk) {
|
|
@@ -2405,25 +2439,25 @@ function getResponsesModelConfig(modelId) {
|
|
|
2405
2439
|
requiredAutoTruncation: false
|
|
2406
2440
|
};
|
|
2407
2441
|
}
|
|
2408
|
-
var openaiResponsesProviderOptionsSchema =
|
|
2409
|
-
metadata:
|
|
2410
|
-
parallelToolCalls:
|
|
2411
|
-
previousResponseId:
|
|
2412
|
-
store:
|
|
2413
|
-
user:
|
|
2414
|
-
reasoningEffort:
|
|
2415
|
-
strictSchemas:
|
|
2416
|
-
instructions:
|
|
2442
|
+
var openaiResponsesProviderOptionsSchema = import_zod8.z.object({
|
|
2443
|
+
metadata: import_zod8.z.any().nullish(),
|
|
2444
|
+
parallelToolCalls: import_zod8.z.boolean().nullish(),
|
|
2445
|
+
previousResponseId: import_zod8.z.string().nullish(),
|
|
2446
|
+
store: import_zod8.z.boolean().nullish(),
|
|
2447
|
+
user: import_zod8.z.string().nullish(),
|
|
2448
|
+
reasoningEffort: import_zod8.z.string().nullish(),
|
|
2449
|
+
strictSchemas: import_zod8.z.boolean().nullish(),
|
|
2450
|
+
instructions: import_zod8.z.string().nullish()
|
|
2417
2451
|
});
|
|
2418
2452
|
|
|
2419
2453
|
// src/openai-provider.ts
|
|
2420
2454
|
function createOpenAI(options = {}) {
|
|
2421
2455
|
var _a, _b, _c;
|
|
2422
|
-
const baseURL = (_a = (0,
|
|
2456
|
+
const baseURL = (_a = (0, import_provider_utils8.withoutTrailingSlash)(options.baseURL)) != null ? _a : "https://api.openai.com/v1";
|
|
2423
2457
|
const compatibility = (_b = options.compatibility) != null ? _b : "compatible";
|
|
2424
2458
|
const providerName = (_c = options.name) != null ? _c : "openai";
|
|
2425
2459
|
const getHeaders = () => ({
|
|
2426
|
-
Authorization: `Bearer ${(0,
|
|
2460
|
+
Authorization: `Bearer ${(0, import_provider_utils8.loadApiKey)({
|
|
2427
2461
|
apiKey: options.apiKey,
|
|
2428
2462
|
environmentVariableName: "OPENAI_API_KEY",
|
|
2429
2463
|
description: "OpenAI"
|
|
@@ -2458,6 +2492,12 @@ function createOpenAI(options = {}) {
|
|
|
2458
2492
|
headers: getHeaders,
|
|
2459
2493
|
fetch: options.fetch
|
|
2460
2494
|
});
|
|
2495
|
+
const createTranscriptionModel = (modelId) => new OpenAITranscriptionModel(modelId, {
|
|
2496
|
+
provider: `${providerName}.transcription`,
|
|
2497
|
+
url: ({ path }) => `${baseURL}${path}`,
|
|
2498
|
+
headers: getHeaders,
|
|
2499
|
+
fetch: options.fetch
|
|
2500
|
+
});
|
|
2461
2501
|
const createLanguageModel = (modelId, settings) => {
|
|
2462
2502
|
if (new.target) {
|
|
2463
2503
|
throw new Error(
|
|
@@ -2492,6 +2532,8 @@ function createOpenAI(options = {}) {
|
|
|
2492
2532
|
provider.textEmbeddingModel = createEmbeddingModel;
|
|
2493
2533
|
provider.image = createImageModel;
|
|
2494
2534
|
provider.imageModel = createImageModel;
|
|
2535
|
+
provider.transcription = createTranscriptionModel;
|
|
2536
|
+
provider.transcriptionModel = createTranscriptionModel;
|
|
2495
2537
|
provider.tools = openaiTools;
|
|
2496
2538
|
return provider;
|
|
2497
2539
|
}
|