@zenning/openai 1.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +1099 -0
- package/README.md +36 -0
- package/dist/index.d.mts +468 -0
- package/dist/index.d.ts +468 -0
- package/dist/index.js +2952 -0
- package/dist/index.js.map +1 -0
- package/dist/index.mjs +2983 -0
- package/dist/index.mjs.map +1 -0
- package/internal/dist/index.d.mts +391 -0
- package/internal/dist/index.d.ts +391 -0
- package/internal/dist/index.js +2768 -0
- package/internal/dist/index.js.map +1 -0
- package/internal/dist/index.mjs +2789 -0
- package/internal/dist/index.mjs.map +1 -0
- package/package.json +71 -0
package/dist/index.mjs
ADDED
|
@@ -0,0 +1,2983 @@
|
|
|
1
|
+
// src/openai-provider.ts
|
|
2
|
+
import {
|
|
3
|
+
loadApiKey,
|
|
4
|
+
withoutTrailingSlash
|
|
5
|
+
} from "@ai-sdk/provider-utils";
|
|
6
|
+
|
|
7
|
+
// src/openai-chat-language-model.ts
|
|
8
|
+
import {
|
|
9
|
+
InvalidResponseDataError,
|
|
10
|
+
UnsupportedFunctionalityError as UnsupportedFunctionalityError3
|
|
11
|
+
} from "@ai-sdk/provider";
|
|
12
|
+
import {
|
|
13
|
+
combineHeaders,
|
|
14
|
+
createEventSourceResponseHandler,
|
|
15
|
+
createJsonResponseHandler,
|
|
16
|
+
generateId,
|
|
17
|
+
isParsableJson,
|
|
18
|
+
postJsonToApi
|
|
19
|
+
} from "@ai-sdk/provider-utils";
|
|
20
|
+
import { z as z2 } from "zod";
|
|
21
|
+
|
|
22
|
+
// src/convert-to-openai-chat-messages.ts
|
|
23
|
+
import {
|
|
24
|
+
UnsupportedFunctionalityError
|
|
25
|
+
} from "@ai-sdk/provider";
|
|
26
|
+
import { convertUint8ArrayToBase64 } from "@ai-sdk/provider-utils";
|
|
27
|
+
function convertToOpenAIChatMessages({
|
|
28
|
+
prompt,
|
|
29
|
+
useLegacyFunctionCalling = false,
|
|
30
|
+
systemMessageMode = "system"
|
|
31
|
+
}) {
|
|
32
|
+
const messages = [];
|
|
33
|
+
const warnings = [];
|
|
34
|
+
for (const { role, content } of prompt) {
|
|
35
|
+
switch (role) {
|
|
36
|
+
case "system": {
|
|
37
|
+
switch (systemMessageMode) {
|
|
38
|
+
case "system": {
|
|
39
|
+
messages.push({ role: "system", content });
|
|
40
|
+
break;
|
|
41
|
+
}
|
|
42
|
+
case "developer": {
|
|
43
|
+
messages.push({ role: "developer", content });
|
|
44
|
+
break;
|
|
45
|
+
}
|
|
46
|
+
case "remove": {
|
|
47
|
+
warnings.push({
|
|
48
|
+
type: "other",
|
|
49
|
+
message: "system messages are removed for this model"
|
|
50
|
+
});
|
|
51
|
+
break;
|
|
52
|
+
}
|
|
53
|
+
default: {
|
|
54
|
+
const _exhaustiveCheck = systemMessageMode;
|
|
55
|
+
throw new Error(
|
|
56
|
+
`Unsupported system message mode: ${_exhaustiveCheck}`
|
|
57
|
+
);
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
break;
|
|
61
|
+
}
|
|
62
|
+
case "user": {
|
|
63
|
+
if (content.length === 1 && content[0].type === "text") {
|
|
64
|
+
messages.push({ role: "user", content: content[0].text });
|
|
65
|
+
break;
|
|
66
|
+
}
|
|
67
|
+
messages.push({
|
|
68
|
+
role: "user",
|
|
69
|
+
content: content.map((part, index) => {
|
|
70
|
+
var _a, _b, _c, _d;
|
|
71
|
+
switch (part.type) {
|
|
72
|
+
case "text": {
|
|
73
|
+
return { type: "text", text: part.text };
|
|
74
|
+
}
|
|
75
|
+
case "image": {
|
|
76
|
+
return {
|
|
77
|
+
type: "image_url",
|
|
78
|
+
image_url: {
|
|
79
|
+
url: part.image instanceof URL ? part.image.toString() : `data:${(_a = part.mimeType) != null ? _a : "image/jpeg"};base64,${convertUint8ArrayToBase64(part.image)}`,
|
|
80
|
+
// OpenAI specific extension: image detail
|
|
81
|
+
detail: (_c = (_b = part.providerMetadata) == null ? void 0 : _b.openai) == null ? void 0 : _c.imageDetail
|
|
82
|
+
}
|
|
83
|
+
};
|
|
84
|
+
}
|
|
85
|
+
case "file": {
|
|
86
|
+
if (part.data instanceof URL) {
|
|
87
|
+
throw new UnsupportedFunctionalityError({
|
|
88
|
+
functionality: "'File content parts with URL data' functionality not supported."
|
|
89
|
+
});
|
|
90
|
+
}
|
|
91
|
+
switch (part.mimeType) {
|
|
92
|
+
case "audio/wav": {
|
|
93
|
+
return {
|
|
94
|
+
type: "input_audio",
|
|
95
|
+
input_audio: { data: part.data, format: "wav" }
|
|
96
|
+
};
|
|
97
|
+
}
|
|
98
|
+
case "audio/mp3":
|
|
99
|
+
case "audio/mpeg": {
|
|
100
|
+
return {
|
|
101
|
+
type: "input_audio",
|
|
102
|
+
input_audio: { data: part.data, format: "mp3" }
|
|
103
|
+
};
|
|
104
|
+
}
|
|
105
|
+
case "application/pdf": {
|
|
106
|
+
return {
|
|
107
|
+
type: "file",
|
|
108
|
+
file: {
|
|
109
|
+
filename: (_d = part.filename) != null ? _d : `part-${index}.pdf`,
|
|
110
|
+
file_data: `data:application/pdf;base64,${part.data}`
|
|
111
|
+
}
|
|
112
|
+
};
|
|
113
|
+
}
|
|
114
|
+
default: {
|
|
115
|
+
throw new UnsupportedFunctionalityError({
|
|
116
|
+
functionality: `File content part type ${part.mimeType} in user messages`
|
|
117
|
+
});
|
|
118
|
+
}
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
})
|
|
123
|
+
});
|
|
124
|
+
break;
|
|
125
|
+
}
|
|
126
|
+
case "assistant": {
|
|
127
|
+
let text = "";
|
|
128
|
+
const toolCalls = [];
|
|
129
|
+
for (const part of content) {
|
|
130
|
+
switch (part.type) {
|
|
131
|
+
case "text": {
|
|
132
|
+
text += part.text;
|
|
133
|
+
break;
|
|
134
|
+
}
|
|
135
|
+
case "tool-call": {
|
|
136
|
+
toolCalls.push({
|
|
137
|
+
id: part.toolCallId,
|
|
138
|
+
type: "function",
|
|
139
|
+
function: {
|
|
140
|
+
name: part.toolName,
|
|
141
|
+
arguments: JSON.stringify(part.args)
|
|
142
|
+
}
|
|
143
|
+
});
|
|
144
|
+
break;
|
|
145
|
+
}
|
|
146
|
+
}
|
|
147
|
+
}
|
|
148
|
+
if (useLegacyFunctionCalling) {
|
|
149
|
+
if (toolCalls.length > 1) {
|
|
150
|
+
throw new UnsupportedFunctionalityError({
|
|
151
|
+
functionality: "useLegacyFunctionCalling with multiple tool calls in one message"
|
|
152
|
+
});
|
|
153
|
+
}
|
|
154
|
+
messages.push({
|
|
155
|
+
role: "assistant",
|
|
156
|
+
content: text,
|
|
157
|
+
function_call: toolCalls.length > 0 ? toolCalls[0].function : void 0
|
|
158
|
+
});
|
|
159
|
+
} else {
|
|
160
|
+
messages.push({
|
|
161
|
+
role: "assistant",
|
|
162
|
+
content: text,
|
|
163
|
+
tool_calls: toolCalls.length > 0 ? toolCalls : void 0
|
|
164
|
+
});
|
|
165
|
+
}
|
|
166
|
+
break;
|
|
167
|
+
}
|
|
168
|
+
case "tool": {
|
|
169
|
+
for (const toolResponse of content) {
|
|
170
|
+
if (useLegacyFunctionCalling) {
|
|
171
|
+
messages.push({
|
|
172
|
+
role: "function",
|
|
173
|
+
name: toolResponse.toolName,
|
|
174
|
+
content: JSON.stringify(toolResponse.result)
|
|
175
|
+
});
|
|
176
|
+
} else {
|
|
177
|
+
messages.push({
|
|
178
|
+
role: "tool",
|
|
179
|
+
tool_call_id: toolResponse.toolCallId,
|
|
180
|
+
content: JSON.stringify(toolResponse.result)
|
|
181
|
+
});
|
|
182
|
+
}
|
|
183
|
+
}
|
|
184
|
+
break;
|
|
185
|
+
}
|
|
186
|
+
default: {
|
|
187
|
+
const _exhaustiveCheck = role;
|
|
188
|
+
throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
|
|
189
|
+
}
|
|
190
|
+
}
|
|
191
|
+
}
|
|
192
|
+
return { messages, warnings };
|
|
193
|
+
}
|
|
194
|
+
|
|
195
|
+
// src/map-openai-chat-logprobs.ts
|
|
196
|
+
function mapOpenAIChatLogProbsOutput(logprobs) {
|
|
197
|
+
var _a, _b;
|
|
198
|
+
return (_b = (_a = logprobs == null ? void 0 : logprobs.content) == null ? void 0 : _a.map(({ token, logprob, top_logprobs }) => ({
|
|
199
|
+
token,
|
|
200
|
+
logprob,
|
|
201
|
+
topLogprobs: top_logprobs ? top_logprobs.map(({ token: token2, logprob: logprob2 }) => ({
|
|
202
|
+
token: token2,
|
|
203
|
+
logprob: logprob2
|
|
204
|
+
})) : []
|
|
205
|
+
}))) != null ? _b : void 0;
|
|
206
|
+
}
|
|
207
|
+
|
|
208
|
+
// src/map-openai-finish-reason.ts
|
|
209
|
+
function mapOpenAIFinishReason(finishReason) {
|
|
210
|
+
switch (finishReason) {
|
|
211
|
+
case "stop":
|
|
212
|
+
return "stop";
|
|
213
|
+
case "length":
|
|
214
|
+
return "length";
|
|
215
|
+
case "content_filter":
|
|
216
|
+
return "content-filter";
|
|
217
|
+
case "function_call":
|
|
218
|
+
case "tool_calls":
|
|
219
|
+
return "tool-calls";
|
|
220
|
+
default:
|
|
221
|
+
return "unknown";
|
|
222
|
+
}
|
|
223
|
+
}
|
|
224
|
+
|
|
225
|
+
// src/openai-error.ts
|
|
226
|
+
import { z } from "zod";
|
|
227
|
+
import { createJsonErrorResponseHandler } from "@ai-sdk/provider-utils";
|
|
228
|
+
var openaiErrorDataSchema = z.object({
|
|
229
|
+
error: z.object({
|
|
230
|
+
message: z.string(),
|
|
231
|
+
// The additional information below is handled loosely to support
|
|
232
|
+
// OpenAI-compatible providers that have slightly different error
|
|
233
|
+
// responses:
|
|
234
|
+
type: z.string().nullish(),
|
|
235
|
+
param: z.any().nullish(),
|
|
236
|
+
code: z.union([z.string(), z.number()]).nullish()
|
|
237
|
+
})
|
|
238
|
+
});
|
|
239
|
+
var openaiFailedResponseHandler = createJsonErrorResponseHandler({
|
|
240
|
+
errorSchema: openaiErrorDataSchema,
|
|
241
|
+
errorToMessage: (data) => data.error.message
|
|
242
|
+
});
|
|
243
|
+
|
|
244
|
+
// src/get-response-metadata.ts
|
|
245
|
+
function getResponseMetadata({
|
|
246
|
+
id,
|
|
247
|
+
model,
|
|
248
|
+
created
|
|
249
|
+
}) {
|
|
250
|
+
return {
|
|
251
|
+
id: id != null ? id : void 0,
|
|
252
|
+
modelId: model != null ? model : void 0,
|
|
253
|
+
timestamp: created != null ? new Date(created * 1e3) : void 0
|
|
254
|
+
};
|
|
255
|
+
}
|
|
256
|
+
|
|
257
|
+
// src/openai-prepare-tools.ts
|
|
258
|
+
import {
|
|
259
|
+
UnsupportedFunctionalityError as UnsupportedFunctionalityError2
|
|
260
|
+
} from "@ai-sdk/provider";
|
|
261
|
+
function prepareTools({
|
|
262
|
+
mode,
|
|
263
|
+
useLegacyFunctionCalling = false,
|
|
264
|
+
structuredOutputs
|
|
265
|
+
}) {
|
|
266
|
+
var _a;
|
|
267
|
+
const tools = ((_a = mode.tools) == null ? void 0 : _a.length) ? mode.tools : void 0;
|
|
268
|
+
const toolWarnings = [];
|
|
269
|
+
if (tools == null) {
|
|
270
|
+
return { tools: void 0, tool_choice: void 0, toolWarnings };
|
|
271
|
+
}
|
|
272
|
+
const toolChoice = mode.toolChoice;
|
|
273
|
+
if (useLegacyFunctionCalling) {
|
|
274
|
+
const openaiFunctions = [];
|
|
275
|
+
for (const tool of tools) {
|
|
276
|
+
if (tool.type === "provider-defined") {
|
|
277
|
+
toolWarnings.push({ type: "unsupported-tool", tool });
|
|
278
|
+
} else {
|
|
279
|
+
openaiFunctions.push({
|
|
280
|
+
name: tool.name,
|
|
281
|
+
description: tool.description,
|
|
282
|
+
parameters: tool.parameters
|
|
283
|
+
});
|
|
284
|
+
}
|
|
285
|
+
}
|
|
286
|
+
if (toolChoice == null) {
|
|
287
|
+
return {
|
|
288
|
+
functions: openaiFunctions,
|
|
289
|
+
function_call: void 0,
|
|
290
|
+
toolWarnings
|
|
291
|
+
};
|
|
292
|
+
}
|
|
293
|
+
const type2 = toolChoice.type;
|
|
294
|
+
switch (type2) {
|
|
295
|
+
case "auto":
|
|
296
|
+
case "none":
|
|
297
|
+
case void 0:
|
|
298
|
+
return {
|
|
299
|
+
functions: openaiFunctions,
|
|
300
|
+
function_call: void 0,
|
|
301
|
+
toolWarnings
|
|
302
|
+
};
|
|
303
|
+
case "required":
|
|
304
|
+
throw new UnsupportedFunctionalityError2({
|
|
305
|
+
functionality: "useLegacyFunctionCalling and toolChoice: required"
|
|
306
|
+
});
|
|
307
|
+
default:
|
|
308
|
+
return {
|
|
309
|
+
functions: openaiFunctions,
|
|
310
|
+
function_call: { name: toolChoice.toolName },
|
|
311
|
+
toolWarnings
|
|
312
|
+
};
|
|
313
|
+
}
|
|
314
|
+
}
|
|
315
|
+
const openaiTools2 = [];
|
|
316
|
+
for (const tool of tools) {
|
|
317
|
+
if (tool.type === "provider-defined") {
|
|
318
|
+
toolWarnings.push({ type: "unsupported-tool", tool });
|
|
319
|
+
} else {
|
|
320
|
+
openaiTools2.push({
|
|
321
|
+
type: "function",
|
|
322
|
+
function: {
|
|
323
|
+
name: tool.name,
|
|
324
|
+
description: tool.description,
|
|
325
|
+
parameters: tool.parameters,
|
|
326
|
+
strict: structuredOutputs ? true : void 0
|
|
327
|
+
}
|
|
328
|
+
});
|
|
329
|
+
}
|
|
330
|
+
}
|
|
331
|
+
if (toolChoice == null) {
|
|
332
|
+
return { tools: openaiTools2, tool_choice: void 0, toolWarnings };
|
|
333
|
+
}
|
|
334
|
+
const type = toolChoice.type;
|
|
335
|
+
switch (type) {
|
|
336
|
+
case "auto":
|
|
337
|
+
case "none":
|
|
338
|
+
case "required":
|
|
339
|
+
return { tools: openaiTools2, tool_choice: type, toolWarnings };
|
|
340
|
+
case "tool":
|
|
341
|
+
return {
|
|
342
|
+
tools: openaiTools2,
|
|
343
|
+
tool_choice: {
|
|
344
|
+
type: "function",
|
|
345
|
+
function: {
|
|
346
|
+
name: toolChoice.toolName
|
|
347
|
+
}
|
|
348
|
+
},
|
|
349
|
+
toolWarnings
|
|
350
|
+
};
|
|
351
|
+
default: {
|
|
352
|
+
const _exhaustiveCheck = type;
|
|
353
|
+
throw new UnsupportedFunctionalityError2({
|
|
354
|
+
functionality: `Unsupported tool choice type: ${_exhaustiveCheck}`
|
|
355
|
+
});
|
|
356
|
+
}
|
|
357
|
+
}
|
|
358
|
+
}
|
|
359
|
+
|
|
360
|
+
// src/openai-chat-language-model.ts
|
|
361
|
+
var OpenAIChatLanguageModel = class {
|
|
362
|
+
constructor(modelId, settings, config) {
|
|
363
|
+
this.specificationVersion = "v1";
|
|
364
|
+
this.modelId = modelId;
|
|
365
|
+
this.settings = settings;
|
|
366
|
+
this.config = config;
|
|
367
|
+
}
|
|
368
|
+
get supportsStructuredOutputs() {
|
|
369
|
+
var _a;
|
|
370
|
+
return (_a = this.settings.structuredOutputs) != null ? _a : isReasoningModel(this.modelId);
|
|
371
|
+
}
|
|
372
|
+
get defaultObjectGenerationMode() {
|
|
373
|
+
if (isAudioModel(this.modelId)) {
|
|
374
|
+
return "tool";
|
|
375
|
+
}
|
|
376
|
+
return this.supportsStructuredOutputs ? "json" : "tool";
|
|
377
|
+
}
|
|
378
|
+
get provider() {
|
|
379
|
+
return this.config.provider;
|
|
380
|
+
}
|
|
381
|
+
get supportsImageUrls() {
|
|
382
|
+
return !this.settings.downloadImages;
|
|
383
|
+
}
|
|
384
|
+
getArgs({
|
|
385
|
+
mode,
|
|
386
|
+
prompt,
|
|
387
|
+
maxTokens,
|
|
388
|
+
temperature,
|
|
389
|
+
topP,
|
|
390
|
+
topK,
|
|
391
|
+
frequencyPenalty,
|
|
392
|
+
presencePenalty,
|
|
393
|
+
stopSequences,
|
|
394
|
+
responseFormat,
|
|
395
|
+
seed,
|
|
396
|
+
providerMetadata
|
|
397
|
+
}) {
|
|
398
|
+
var _a, _b, _c, _d, _e, _f, _g, _h;
|
|
399
|
+
const type = mode.type;
|
|
400
|
+
const warnings = [];
|
|
401
|
+
if (topK != null) {
|
|
402
|
+
warnings.push({
|
|
403
|
+
type: "unsupported-setting",
|
|
404
|
+
setting: "topK"
|
|
405
|
+
});
|
|
406
|
+
}
|
|
407
|
+
if ((responseFormat == null ? void 0 : responseFormat.type) === "json" && responseFormat.schema != null && !this.supportsStructuredOutputs) {
|
|
408
|
+
warnings.push({
|
|
409
|
+
type: "unsupported-setting",
|
|
410
|
+
setting: "responseFormat",
|
|
411
|
+
details: "JSON response format schema is only supported with structuredOutputs"
|
|
412
|
+
});
|
|
413
|
+
}
|
|
414
|
+
const useLegacyFunctionCalling = this.settings.useLegacyFunctionCalling;
|
|
415
|
+
if (useLegacyFunctionCalling && this.settings.parallelToolCalls === true) {
|
|
416
|
+
throw new UnsupportedFunctionalityError3({
|
|
417
|
+
functionality: "useLegacyFunctionCalling with parallelToolCalls"
|
|
418
|
+
});
|
|
419
|
+
}
|
|
420
|
+
if (useLegacyFunctionCalling && this.supportsStructuredOutputs) {
|
|
421
|
+
throw new UnsupportedFunctionalityError3({
|
|
422
|
+
functionality: "structuredOutputs with useLegacyFunctionCalling"
|
|
423
|
+
});
|
|
424
|
+
}
|
|
425
|
+
const { messages, warnings: messageWarnings } = convertToOpenAIChatMessages(
|
|
426
|
+
{
|
|
427
|
+
prompt,
|
|
428
|
+
useLegacyFunctionCalling,
|
|
429
|
+
systemMessageMode: getSystemMessageMode(this.modelId)
|
|
430
|
+
}
|
|
431
|
+
);
|
|
432
|
+
warnings.push(...messageWarnings);
|
|
433
|
+
const baseArgs = {
|
|
434
|
+
// model id:
|
|
435
|
+
model: this.modelId,
|
|
436
|
+
// model specific settings:
|
|
437
|
+
logit_bias: this.settings.logitBias,
|
|
438
|
+
logprobs: this.settings.logprobs === true || typeof this.settings.logprobs === "number" ? true : void 0,
|
|
439
|
+
top_logprobs: typeof this.settings.logprobs === "number" ? this.settings.logprobs : typeof this.settings.logprobs === "boolean" ? this.settings.logprobs ? 0 : void 0 : void 0,
|
|
440
|
+
user: this.settings.user,
|
|
441
|
+
parallel_tool_calls: this.settings.parallelToolCalls,
|
|
442
|
+
// standardized settings:
|
|
443
|
+
max_tokens: maxTokens,
|
|
444
|
+
temperature,
|
|
445
|
+
top_p: topP,
|
|
446
|
+
frequency_penalty: frequencyPenalty,
|
|
447
|
+
presence_penalty: presencePenalty,
|
|
448
|
+
response_format: (responseFormat == null ? void 0 : responseFormat.type) === "json" ? this.supportsStructuredOutputs && responseFormat.schema != null ? {
|
|
449
|
+
type: "json_schema",
|
|
450
|
+
json_schema: {
|
|
451
|
+
schema: responseFormat.schema,
|
|
452
|
+
strict: true,
|
|
453
|
+
name: (_a = responseFormat.name) != null ? _a : "response",
|
|
454
|
+
description: responseFormat.description
|
|
455
|
+
}
|
|
456
|
+
} : { type: "json_object" } : void 0,
|
|
457
|
+
stop: stopSequences,
|
|
458
|
+
seed,
|
|
459
|
+
// openai specific settings:
|
|
460
|
+
// TODO remove in next major version; we auto-map maxTokens now
|
|
461
|
+
max_completion_tokens: (_b = providerMetadata == null ? void 0 : providerMetadata.openai) == null ? void 0 : _b.maxCompletionTokens,
|
|
462
|
+
store: (_c = providerMetadata == null ? void 0 : providerMetadata.openai) == null ? void 0 : _c.store,
|
|
463
|
+
metadata: (_d = providerMetadata == null ? void 0 : providerMetadata.openai) == null ? void 0 : _d.metadata,
|
|
464
|
+
prediction: (_e = providerMetadata == null ? void 0 : providerMetadata.openai) == null ? void 0 : _e.prediction,
|
|
465
|
+
reasoning_effort: (_g = (_f = providerMetadata == null ? void 0 : providerMetadata.openai) == null ? void 0 : _f.reasoningEffort) != null ? _g : this.settings.reasoningEffort,
|
|
466
|
+
// messages:
|
|
467
|
+
messages
|
|
468
|
+
};
|
|
469
|
+
if (isReasoningModel(this.modelId)) {
|
|
470
|
+
if (baseArgs.temperature != null) {
|
|
471
|
+
baseArgs.temperature = void 0;
|
|
472
|
+
warnings.push({
|
|
473
|
+
type: "unsupported-setting",
|
|
474
|
+
setting: "temperature",
|
|
475
|
+
details: "temperature is not supported for reasoning models"
|
|
476
|
+
});
|
|
477
|
+
}
|
|
478
|
+
if (baseArgs.top_p != null) {
|
|
479
|
+
baseArgs.top_p = void 0;
|
|
480
|
+
warnings.push({
|
|
481
|
+
type: "unsupported-setting",
|
|
482
|
+
setting: "topP",
|
|
483
|
+
details: "topP is not supported for reasoning models"
|
|
484
|
+
});
|
|
485
|
+
}
|
|
486
|
+
if (baseArgs.frequency_penalty != null) {
|
|
487
|
+
baseArgs.frequency_penalty = void 0;
|
|
488
|
+
warnings.push({
|
|
489
|
+
type: "unsupported-setting",
|
|
490
|
+
setting: "frequencyPenalty",
|
|
491
|
+
details: "frequencyPenalty is not supported for reasoning models"
|
|
492
|
+
});
|
|
493
|
+
}
|
|
494
|
+
if (baseArgs.presence_penalty != null) {
|
|
495
|
+
baseArgs.presence_penalty = void 0;
|
|
496
|
+
warnings.push({
|
|
497
|
+
type: "unsupported-setting",
|
|
498
|
+
setting: "presencePenalty",
|
|
499
|
+
details: "presencePenalty is not supported for reasoning models"
|
|
500
|
+
});
|
|
501
|
+
}
|
|
502
|
+
if (baseArgs.logit_bias != null) {
|
|
503
|
+
baseArgs.logit_bias = void 0;
|
|
504
|
+
warnings.push({
|
|
505
|
+
type: "other",
|
|
506
|
+
message: "logitBias is not supported for reasoning models"
|
|
507
|
+
});
|
|
508
|
+
}
|
|
509
|
+
if (baseArgs.logprobs != null) {
|
|
510
|
+
baseArgs.logprobs = void 0;
|
|
511
|
+
warnings.push({
|
|
512
|
+
type: "other",
|
|
513
|
+
message: "logprobs is not supported for reasoning models"
|
|
514
|
+
});
|
|
515
|
+
}
|
|
516
|
+
if (baseArgs.top_logprobs != null) {
|
|
517
|
+
baseArgs.top_logprobs = void 0;
|
|
518
|
+
warnings.push({
|
|
519
|
+
type: "other",
|
|
520
|
+
message: "topLogprobs is not supported for reasoning models"
|
|
521
|
+
});
|
|
522
|
+
}
|
|
523
|
+
if (baseArgs.max_tokens != null) {
|
|
524
|
+
if (baseArgs.max_completion_tokens == null) {
|
|
525
|
+
baseArgs.max_completion_tokens = baseArgs.max_tokens;
|
|
526
|
+
}
|
|
527
|
+
baseArgs.max_tokens = void 0;
|
|
528
|
+
}
|
|
529
|
+
} else if (this.modelId.startsWith("gpt-4o-search-preview") || this.modelId.startsWith("gpt-4o-mini-search-preview")) {
|
|
530
|
+
if (baseArgs.temperature != null) {
|
|
531
|
+
baseArgs.temperature = void 0;
|
|
532
|
+
warnings.push({
|
|
533
|
+
type: "unsupported-setting",
|
|
534
|
+
setting: "temperature",
|
|
535
|
+
details: "temperature is not supported for the search preview models and has been removed."
|
|
536
|
+
});
|
|
537
|
+
}
|
|
538
|
+
}
|
|
539
|
+
switch (type) {
|
|
540
|
+
case "regular": {
|
|
541
|
+
const { tools, tool_choice, functions, function_call, toolWarnings } = prepareTools({
|
|
542
|
+
mode,
|
|
543
|
+
useLegacyFunctionCalling,
|
|
544
|
+
structuredOutputs: this.supportsStructuredOutputs
|
|
545
|
+
});
|
|
546
|
+
return {
|
|
547
|
+
args: {
|
|
548
|
+
...baseArgs,
|
|
549
|
+
tools,
|
|
550
|
+
tool_choice,
|
|
551
|
+
functions,
|
|
552
|
+
function_call
|
|
553
|
+
},
|
|
554
|
+
warnings: [...warnings, ...toolWarnings]
|
|
555
|
+
};
|
|
556
|
+
}
|
|
557
|
+
case "object-json": {
|
|
558
|
+
return {
|
|
559
|
+
args: {
|
|
560
|
+
...baseArgs,
|
|
561
|
+
response_format: this.supportsStructuredOutputs && mode.schema != null ? {
|
|
562
|
+
type: "json_schema",
|
|
563
|
+
json_schema: {
|
|
564
|
+
schema: mode.schema,
|
|
565
|
+
strict: true,
|
|
566
|
+
name: (_h = mode.name) != null ? _h : "response",
|
|
567
|
+
description: mode.description
|
|
568
|
+
}
|
|
569
|
+
} : { type: "json_object" }
|
|
570
|
+
},
|
|
571
|
+
warnings
|
|
572
|
+
};
|
|
573
|
+
}
|
|
574
|
+
case "object-tool": {
|
|
575
|
+
return {
|
|
576
|
+
args: useLegacyFunctionCalling ? {
|
|
577
|
+
...baseArgs,
|
|
578
|
+
function_call: {
|
|
579
|
+
name: mode.tool.name
|
|
580
|
+
},
|
|
581
|
+
functions: [
|
|
582
|
+
{
|
|
583
|
+
name: mode.tool.name,
|
|
584
|
+
description: mode.tool.description,
|
|
585
|
+
parameters: mode.tool.parameters
|
|
586
|
+
}
|
|
587
|
+
]
|
|
588
|
+
} : {
|
|
589
|
+
...baseArgs,
|
|
590
|
+
tool_choice: {
|
|
591
|
+
type: "function",
|
|
592
|
+
function: { name: mode.tool.name }
|
|
593
|
+
},
|
|
594
|
+
tools: [
|
|
595
|
+
{
|
|
596
|
+
type: "function",
|
|
597
|
+
function: {
|
|
598
|
+
name: mode.tool.name,
|
|
599
|
+
description: mode.tool.description,
|
|
600
|
+
parameters: mode.tool.parameters,
|
|
601
|
+
strict: this.supportsStructuredOutputs ? true : void 0
|
|
602
|
+
}
|
|
603
|
+
}
|
|
604
|
+
]
|
|
605
|
+
},
|
|
606
|
+
warnings
|
|
607
|
+
};
|
|
608
|
+
}
|
|
609
|
+
default: {
|
|
610
|
+
const _exhaustiveCheck = type;
|
|
611
|
+
throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
|
|
612
|
+
}
|
|
613
|
+
}
|
|
614
|
+
}
|
|
615
|
+
async doGenerate(options) {
|
|
616
|
+
var _a, _b, _c, _d, _e, _f, _g, _h;
|
|
617
|
+
const { args: body, warnings } = this.getArgs(options);
|
|
618
|
+
const {
|
|
619
|
+
responseHeaders,
|
|
620
|
+
value: response,
|
|
621
|
+
rawValue: rawResponse
|
|
622
|
+
} = await postJsonToApi({
|
|
623
|
+
url: this.config.url({
|
|
624
|
+
path: "/chat/completions",
|
|
625
|
+
modelId: this.modelId
|
|
626
|
+
}),
|
|
627
|
+
headers: combineHeaders(this.config.headers(), options.headers),
|
|
628
|
+
body,
|
|
629
|
+
failedResponseHandler: openaiFailedResponseHandler,
|
|
630
|
+
successfulResponseHandler: createJsonResponseHandler(
|
|
631
|
+
openaiChatResponseSchema
|
|
632
|
+
),
|
|
633
|
+
abortSignal: options.abortSignal,
|
|
634
|
+
fetch: this.config.fetch
|
|
635
|
+
});
|
|
636
|
+
const { messages: rawPrompt, ...rawSettings } = body;
|
|
637
|
+
const choice = response.choices[0];
|
|
638
|
+
const completionTokenDetails = (_a = response.usage) == null ? void 0 : _a.completion_tokens_details;
|
|
639
|
+
const promptTokenDetails = (_b = response.usage) == null ? void 0 : _b.prompt_tokens_details;
|
|
640
|
+
const providerMetadata = { openai: {} };
|
|
641
|
+
if ((completionTokenDetails == null ? void 0 : completionTokenDetails.reasoning_tokens) != null) {
|
|
642
|
+
providerMetadata.openai.reasoningTokens = completionTokenDetails == null ? void 0 : completionTokenDetails.reasoning_tokens;
|
|
643
|
+
}
|
|
644
|
+
if ((completionTokenDetails == null ? void 0 : completionTokenDetails.accepted_prediction_tokens) != null) {
|
|
645
|
+
providerMetadata.openai.acceptedPredictionTokens = completionTokenDetails == null ? void 0 : completionTokenDetails.accepted_prediction_tokens;
|
|
646
|
+
}
|
|
647
|
+
if ((completionTokenDetails == null ? void 0 : completionTokenDetails.rejected_prediction_tokens) != null) {
|
|
648
|
+
providerMetadata.openai.rejectedPredictionTokens = completionTokenDetails == null ? void 0 : completionTokenDetails.rejected_prediction_tokens;
|
|
649
|
+
}
|
|
650
|
+
if ((promptTokenDetails == null ? void 0 : promptTokenDetails.cached_tokens) != null) {
|
|
651
|
+
providerMetadata.openai.cachedPromptTokens = promptTokenDetails == null ? void 0 : promptTokenDetails.cached_tokens;
|
|
652
|
+
}
|
|
653
|
+
return {
|
|
654
|
+
text: (_c = choice.message.content) != null ? _c : void 0,
|
|
655
|
+
toolCalls: this.settings.useLegacyFunctionCalling && choice.message.function_call ? [
|
|
656
|
+
{
|
|
657
|
+
toolCallType: "function",
|
|
658
|
+
toolCallId: generateId(),
|
|
659
|
+
toolName: choice.message.function_call.name,
|
|
660
|
+
args: choice.message.function_call.arguments
|
|
661
|
+
}
|
|
662
|
+
] : (_d = choice.message.tool_calls) == null ? void 0 : _d.map((toolCall) => {
|
|
663
|
+
var _a2;
|
|
664
|
+
return {
|
|
665
|
+
toolCallType: "function",
|
|
666
|
+
toolCallId: (_a2 = toolCall.id) != null ? _a2 : generateId(),
|
|
667
|
+
toolName: toolCall.function.name,
|
|
668
|
+
args: toolCall.function.arguments
|
|
669
|
+
};
|
|
670
|
+
}),
|
|
671
|
+
finishReason: mapOpenAIFinishReason(choice.finish_reason),
|
|
672
|
+
usage: {
|
|
673
|
+
promptTokens: (_f = (_e = response.usage) == null ? void 0 : _e.prompt_tokens) != null ? _f : NaN,
|
|
674
|
+
completionTokens: (_h = (_g = response.usage) == null ? void 0 : _g.completion_tokens) != null ? _h : NaN
|
|
675
|
+
},
|
|
676
|
+
rawCall: { rawPrompt, rawSettings },
|
|
677
|
+
rawResponse: { headers: responseHeaders, body: rawResponse },
|
|
678
|
+
request: { body: JSON.stringify(body) },
|
|
679
|
+
response: getResponseMetadata(response),
|
|
680
|
+
warnings,
|
|
681
|
+
logprobs: mapOpenAIChatLogProbsOutput(choice.logprobs),
|
|
682
|
+
providerMetadata
|
|
683
|
+
};
|
|
684
|
+
}
|
|
685
|
+
async doStream(options) {
|
|
686
|
+
if (this.settings.simulateStreaming) {
|
|
687
|
+
const result = await this.doGenerate(options);
|
|
688
|
+
const simulatedStream = new ReadableStream({
|
|
689
|
+
start(controller) {
|
|
690
|
+
controller.enqueue({ type: "response-metadata", ...result.response });
|
|
691
|
+
if (result.text) {
|
|
692
|
+
controller.enqueue({
|
|
693
|
+
type: "text-delta",
|
|
694
|
+
textDelta: result.text
|
|
695
|
+
});
|
|
696
|
+
}
|
|
697
|
+
if (result.toolCalls) {
|
|
698
|
+
for (const toolCall of result.toolCalls) {
|
|
699
|
+
controller.enqueue({
|
|
700
|
+
type: "tool-call-delta",
|
|
701
|
+
toolCallType: "function",
|
|
702
|
+
toolCallId: toolCall.toolCallId,
|
|
703
|
+
toolName: toolCall.toolName,
|
|
704
|
+
argsTextDelta: toolCall.args
|
|
705
|
+
});
|
|
706
|
+
controller.enqueue({
|
|
707
|
+
type: "tool-call",
|
|
708
|
+
...toolCall
|
|
709
|
+
});
|
|
710
|
+
}
|
|
711
|
+
}
|
|
712
|
+
controller.enqueue({
|
|
713
|
+
type: "finish",
|
|
714
|
+
finishReason: result.finishReason,
|
|
715
|
+
usage: result.usage,
|
|
716
|
+
logprobs: result.logprobs,
|
|
717
|
+
providerMetadata: result.providerMetadata
|
|
718
|
+
});
|
|
719
|
+
controller.close();
|
|
720
|
+
}
|
|
721
|
+
});
|
|
722
|
+
return {
|
|
723
|
+
stream: simulatedStream,
|
|
724
|
+
rawCall: result.rawCall,
|
|
725
|
+
rawResponse: result.rawResponse,
|
|
726
|
+
warnings: result.warnings
|
|
727
|
+
};
|
|
728
|
+
}
|
|
729
|
+
const { args, warnings } = this.getArgs(options);
|
|
730
|
+
const body = {
|
|
731
|
+
...args,
|
|
732
|
+
stream: true,
|
|
733
|
+
// only include stream_options when in strict compatibility mode:
|
|
734
|
+
stream_options: this.config.compatibility === "strict" ? { include_usage: true } : void 0
|
|
735
|
+
};
|
|
736
|
+
const { responseHeaders, value: response } = await postJsonToApi({
|
|
737
|
+
url: this.config.url({
|
|
738
|
+
path: "/chat/completions",
|
|
739
|
+
modelId: this.modelId
|
|
740
|
+
}),
|
|
741
|
+
headers: combineHeaders(this.config.headers(), options.headers),
|
|
742
|
+
body,
|
|
743
|
+
failedResponseHandler: openaiFailedResponseHandler,
|
|
744
|
+
successfulResponseHandler: createEventSourceResponseHandler(
|
|
745
|
+
openaiChatChunkSchema
|
|
746
|
+
),
|
|
747
|
+
abortSignal: options.abortSignal,
|
|
748
|
+
fetch: this.config.fetch
|
|
749
|
+
});
|
|
750
|
+
const { messages: rawPrompt, ...rawSettings } = args;
|
|
751
|
+
const toolCalls = [];
|
|
752
|
+
let finishReason = "unknown";
|
|
753
|
+
let usage = {
|
|
754
|
+
promptTokens: void 0,
|
|
755
|
+
completionTokens: void 0
|
|
756
|
+
};
|
|
757
|
+
let logprobs;
|
|
758
|
+
let isFirstChunk = true;
|
|
759
|
+
const { useLegacyFunctionCalling } = this.settings;
|
|
760
|
+
const providerMetadata = { openai: {} };
|
|
761
|
+
return {
|
|
762
|
+
stream: response.pipeThrough(
|
|
763
|
+
new TransformStream({
|
|
764
|
+
transform(chunk, controller) {
|
|
765
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l;
|
|
766
|
+
if (!chunk.success) {
|
|
767
|
+
finishReason = "error";
|
|
768
|
+
controller.enqueue({ type: "error", error: chunk.error });
|
|
769
|
+
return;
|
|
770
|
+
}
|
|
771
|
+
const value = chunk.value;
|
|
772
|
+
if ("error" in value) {
|
|
773
|
+
finishReason = "error";
|
|
774
|
+
controller.enqueue({ type: "error", error: value.error });
|
|
775
|
+
return;
|
|
776
|
+
}
|
|
777
|
+
if (isFirstChunk) {
|
|
778
|
+
isFirstChunk = false;
|
|
779
|
+
controller.enqueue({
|
|
780
|
+
type: "response-metadata",
|
|
781
|
+
...getResponseMetadata(value)
|
|
782
|
+
});
|
|
783
|
+
}
|
|
784
|
+
if (value.usage != null) {
|
|
785
|
+
const {
|
|
786
|
+
prompt_tokens,
|
|
787
|
+
completion_tokens,
|
|
788
|
+
prompt_tokens_details,
|
|
789
|
+
completion_tokens_details
|
|
790
|
+
} = value.usage;
|
|
791
|
+
usage = {
|
|
792
|
+
promptTokens: prompt_tokens != null ? prompt_tokens : void 0,
|
|
793
|
+
completionTokens: completion_tokens != null ? completion_tokens : void 0
|
|
794
|
+
};
|
|
795
|
+
if ((completion_tokens_details == null ? void 0 : completion_tokens_details.reasoning_tokens) != null) {
|
|
796
|
+
providerMetadata.openai.reasoningTokens = completion_tokens_details == null ? void 0 : completion_tokens_details.reasoning_tokens;
|
|
797
|
+
}
|
|
798
|
+
if ((completion_tokens_details == null ? void 0 : completion_tokens_details.accepted_prediction_tokens) != null) {
|
|
799
|
+
providerMetadata.openai.acceptedPredictionTokens = completion_tokens_details == null ? void 0 : completion_tokens_details.accepted_prediction_tokens;
|
|
800
|
+
}
|
|
801
|
+
if ((completion_tokens_details == null ? void 0 : completion_tokens_details.rejected_prediction_tokens) != null) {
|
|
802
|
+
providerMetadata.openai.rejectedPredictionTokens = completion_tokens_details == null ? void 0 : completion_tokens_details.rejected_prediction_tokens;
|
|
803
|
+
}
|
|
804
|
+
if ((prompt_tokens_details == null ? void 0 : prompt_tokens_details.cached_tokens) != null) {
|
|
805
|
+
providerMetadata.openai.cachedPromptTokens = prompt_tokens_details == null ? void 0 : prompt_tokens_details.cached_tokens;
|
|
806
|
+
}
|
|
807
|
+
}
|
|
808
|
+
const choice = value.choices[0];
|
|
809
|
+
if ((choice == null ? void 0 : choice.finish_reason) != null) {
|
|
810
|
+
finishReason = mapOpenAIFinishReason(choice.finish_reason);
|
|
811
|
+
}
|
|
812
|
+
if ((choice == null ? void 0 : choice.delta) == null) {
|
|
813
|
+
return;
|
|
814
|
+
}
|
|
815
|
+
const delta = choice.delta;
|
|
816
|
+
if (delta.content != null) {
|
|
817
|
+
controller.enqueue({
|
|
818
|
+
type: "text-delta",
|
|
819
|
+
textDelta: delta.content
|
|
820
|
+
});
|
|
821
|
+
}
|
|
822
|
+
const mappedLogprobs = mapOpenAIChatLogProbsOutput(
|
|
823
|
+
choice == null ? void 0 : choice.logprobs
|
|
824
|
+
);
|
|
825
|
+
if (mappedLogprobs == null ? void 0 : mappedLogprobs.length) {
|
|
826
|
+
if (logprobs === void 0) logprobs = [];
|
|
827
|
+
logprobs.push(...mappedLogprobs);
|
|
828
|
+
}
|
|
829
|
+
const mappedToolCalls = useLegacyFunctionCalling && delta.function_call != null ? [
|
|
830
|
+
{
|
|
831
|
+
type: "function",
|
|
832
|
+
id: generateId(),
|
|
833
|
+
function: delta.function_call,
|
|
834
|
+
index: 0
|
|
835
|
+
}
|
|
836
|
+
] : delta.tool_calls;
|
|
837
|
+
if (mappedToolCalls != null) {
|
|
838
|
+
for (const toolCallDelta of mappedToolCalls) {
|
|
839
|
+
const index = toolCallDelta.index;
|
|
840
|
+
if (toolCalls[index] == null) {
|
|
841
|
+
if (toolCallDelta.type !== "function") {
|
|
842
|
+
throw new InvalidResponseDataError({
|
|
843
|
+
data: toolCallDelta,
|
|
844
|
+
message: `Expected 'function' type.`
|
|
845
|
+
});
|
|
846
|
+
}
|
|
847
|
+
if (toolCallDelta.id == null) {
|
|
848
|
+
throw new InvalidResponseDataError({
|
|
849
|
+
data: toolCallDelta,
|
|
850
|
+
message: `Expected 'id' to be a string.`
|
|
851
|
+
});
|
|
852
|
+
}
|
|
853
|
+
if (((_a = toolCallDelta.function) == null ? void 0 : _a.name) == null) {
|
|
854
|
+
throw new InvalidResponseDataError({
|
|
855
|
+
data: toolCallDelta,
|
|
856
|
+
message: `Expected 'function.name' to be a string.`
|
|
857
|
+
});
|
|
858
|
+
}
|
|
859
|
+
toolCalls[index] = {
|
|
860
|
+
id: toolCallDelta.id,
|
|
861
|
+
type: "function",
|
|
862
|
+
function: {
|
|
863
|
+
name: toolCallDelta.function.name,
|
|
864
|
+
arguments: (_b = toolCallDelta.function.arguments) != null ? _b : ""
|
|
865
|
+
},
|
|
866
|
+
hasFinished: false
|
|
867
|
+
};
|
|
868
|
+
const toolCall2 = toolCalls[index];
|
|
869
|
+
if (((_c = toolCall2.function) == null ? void 0 : _c.name) != null && ((_d = toolCall2.function) == null ? void 0 : _d.arguments) != null) {
|
|
870
|
+
if (toolCall2.function.arguments.length > 0) {
|
|
871
|
+
controller.enqueue({
|
|
872
|
+
type: "tool-call-delta",
|
|
873
|
+
toolCallType: "function",
|
|
874
|
+
toolCallId: toolCall2.id,
|
|
875
|
+
toolName: toolCall2.function.name,
|
|
876
|
+
argsTextDelta: toolCall2.function.arguments
|
|
877
|
+
});
|
|
878
|
+
}
|
|
879
|
+
if (isParsableJson(toolCall2.function.arguments)) {
|
|
880
|
+
controller.enqueue({
|
|
881
|
+
type: "tool-call",
|
|
882
|
+
toolCallType: "function",
|
|
883
|
+
toolCallId: (_e = toolCall2.id) != null ? _e : generateId(),
|
|
884
|
+
toolName: toolCall2.function.name,
|
|
885
|
+
args: toolCall2.function.arguments
|
|
886
|
+
});
|
|
887
|
+
toolCall2.hasFinished = true;
|
|
888
|
+
}
|
|
889
|
+
}
|
|
890
|
+
continue;
|
|
891
|
+
}
|
|
892
|
+
const toolCall = toolCalls[index];
|
|
893
|
+
if (toolCall.hasFinished) {
|
|
894
|
+
continue;
|
|
895
|
+
}
|
|
896
|
+
if (((_f = toolCallDelta.function) == null ? void 0 : _f.arguments) != null) {
|
|
897
|
+
toolCall.function.arguments += (_h = (_g = toolCallDelta.function) == null ? void 0 : _g.arguments) != null ? _h : "";
|
|
898
|
+
}
|
|
899
|
+
controller.enqueue({
|
|
900
|
+
type: "tool-call-delta",
|
|
901
|
+
toolCallType: "function",
|
|
902
|
+
toolCallId: toolCall.id,
|
|
903
|
+
toolName: toolCall.function.name,
|
|
904
|
+
argsTextDelta: (_i = toolCallDelta.function.arguments) != null ? _i : ""
|
|
905
|
+
});
|
|
906
|
+
if (((_j = toolCall.function) == null ? void 0 : _j.name) != null && ((_k = toolCall.function) == null ? void 0 : _k.arguments) != null && isParsableJson(toolCall.function.arguments)) {
|
|
907
|
+
controller.enqueue({
|
|
908
|
+
type: "tool-call",
|
|
909
|
+
toolCallType: "function",
|
|
910
|
+
toolCallId: (_l = toolCall.id) != null ? _l : generateId(),
|
|
911
|
+
toolName: toolCall.function.name,
|
|
912
|
+
args: toolCall.function.arguments
|
|
913
|
+
});
|
|
914
|
+
toolCall.hasFinished = true;
|
|
915
|
+
}
|
|
916
|
+
}
|
|
917
|
+
}
|
|
918
|
+
},
|
|
919
|
+
flush(controller) {
|
|
920
|
+
var _a, _b;
|
|
921
|
+
controller.enqueue({
|
|
922
|
+
type: "finish",
|
|
923
|
+
finishReason,
|
|
924
|
+
logprobs,
|
|
925
|
+
usage: {
|
|
926
|
+
promptTokens: (_a = usage.promptTokens) != null ? _a : NaN,
|
|
927
|
+
completionTokens: (_b = usage.completionTokens) != null ? _b : NaN
|
|
928
|
+
},
|
|
929
|
+
...providerMetadata != null ? { providerMetadata } : {}
|
|
930
|
+
});
|
|
931
|
+
}
|
|
932
|
+
})
|
|
933
|
+
),
|
|
934
|
+
rawCall: { rawPrompt, rawSettings },
|
|
935
|
+
rawResponse: { headers: responseHeaders },
|
|
936
|
+
request: { body: JSON.stringify(body) },
|
|
937
|
+
warnings
|
|
938
|
+
};
|
|
939
|
+
}
|
|
940
|
+
};
|
|
941
|
+
var openaiTokenUsageSchema = z2.object({
|
|
942
|
+
prompt_tokens: z2.number().nullish(),
|
|
943
|
+
completion_tokens: z2.number().nullish(),
|
|
944
|
+
prompt_tokens_details: z2.object({
|
|
945
|
+
cached_tokens: z2.number().nullish()
|
|
946
|
+
}).nullish(),
|
|
947
|
+
completion_tokens_details: z2.object({
|
|
948
|
+
reasoning_tokens: z2.number().nullish(),
|
|
949
|
+
accepted_prediction_tokens: z2.number().nullish(),
|
|
950
|
+
rejected_prediction_tokens: z2.number().nullish()
|
|
951
|
+
}).nullish()
|
|
952
|
+
}).nullish();
|
|
953
|
+
var openaiChatResponseSchema = z2.object({
|
|
954
|
+
id: z2.string().nullish(),
|
|
955
|
+
created: z2.number().nullish(),
|
|
956
|
+
model: z2.string().nullish(),
|
|
957
|
+
choices: z2.array(
|
|
958
|
+
z2.object({
|
|
959
|
+
message: z2.object({
|
|
960
|
+
role: z2.literal("assistant").nullish(),
|
|
961
|
+
content: z2.string().nullish(),
|
|
962
|
+
function_call: z2.object({
|
|
963
|
+
arguments: z2.string(),
|
|
964
|
+
name: z2.string()
|
|
965
|
+
}).nullish(),
|
|
966
|
+
tool_calls: z2.array(
|
|
967
|
+
z2.object({
|
|
968
|
+
id: z2.string().nullish(),
|
|
969
|
+
type: z2.literal("function"),
|
|
970
|
+
function: z2.object({
|
|
971
|
+
name: z2.string(),
|
|
972
|
+
arguments: z2.string()
|
|
973
|
+
})
|
|
974
|
+
})
|
|
975
|
+
).nullish()
|
|
976
|
+
}),
|
|
977
|
+
index: z2.number(),
|
|
978
|
+
logprobs: z2.object({
|
|
979
|
+
content: z2.array(
|
|
980
|
+
z2.object({
|
|
981
|
+
token: z2.string(),
|
|
982
|
+
logprob: z2.number(),
|
|
983
|
+
top_logprobs: z2.array(
|
|
984
|
+
z2.object({
|
|
985
|
+
token: z2.string(),
|
|
986
|
+
logprob: z2.number()
|
|
987
|
+
})
|
|
988
|
+
)
|
|
989
|
+
})
|
|
990
|
+
).nullable()
|
|
991
|
+
}).nullish(),
|
|
992
|
+
finish_reason: z2.string().nullish()
|
|
993
|
+
})
|
|
994
|
+
),
|
|
995
|
+
usage: openaiTokenUsageSchema
|
|
996
|
+
});
|
|
997
|
+
var openaiChatChunkSchema = z2.union([
|
|
998
|
+
z2.object({
|
|
999
|
+
id: z2.string().nullish(),
|
|
1000
|
+
created: z2.number().nullish(),
|
|
1001
|
+
model: z2.string().nullish(),
|
|
1002
|
+
choices: z2.array(
|
|
1003
|
+
z2.object({
|
|
1004
|
+
delta: z2.object({
|
|
1005
|
+
role: z2.enum(["assistant"]).nullish(),
|
|
1006
|
+
content: z2.string().nullish(),
|
|
1007
|
+
function_call: z2.object({
|
|
1008
|
+
name: z2.string().optional(),
|
|
1009
|
+
arguments: z2.string().optional()
|
|
1010
|
+
}).nullish(),
|
|
1011
|
+
tool_calls: z2.array(
|
|
1012
|
+
z2.object({
|
|
1013
|
+
index: z2.number(),
|
|
1014
|
+
id: z2.string().nullish(),
|
|
1015
|
+
type: z2.literal("function").nullish(),
|
|
1016
|
+
function: z2.object({
|
|
1017
|
+
name: z2.string().nullish(),
|
|
1018
|
+
arguments: z2.string().nullish()
|
|
1019
|
+
})
|
|
1020
|
+
})
|
|
1021
|
+
).nullish()
|
|
1022
|
+
}).nullish(),
|
|
1023
|
+
logprobs: z2.object({
|
|
1024
|
+
content: z2.array(
|
|
1025
|
+
z2.object({
|
|
1026
|
+
token: z2.string(),
|
|
1027
|
+
logprob: z2.number(),
|
|
1028
|
+
top_logprobs: z2.array(
|
|
1029
|
+
z2.object({
|
|
1030
|
+
token: z2.string(),
|
|
1031
|
+
logprob: z2.number()
|
|
1032
|
+
})
|
|
1033
|
+
)
|
|
1034
|
+
})
|
|
1035
|
+
).nullable()
|
|
1036
|
+
}).nullish(),
|
|
1037
|
+
finish_reason: z2.string().nullish(),
|
|
1038
|
+
index: z2.number()
|
|
1039
|
+
})
|
|
1040
|
+
),
|
|
1041
|
+
usage: openaiTokenUsageSchema
|
|
1042
|
+
}),
|
|
1043
|
+
openaiErrorDataSchema
|
|
1044
|
+
]);
|
|
1045
|
+
function isReasoningModel(modelId) {
|
|
1046
|
+
return modelId.startsWith("o");
|
|
1047
|
+
}
|
|
1048
|
+
function isAudioModel(modelId) {
|
|
1049
|
+
return modelId.startsWith("gpt-4o-audio-preview");
|
|
1050
|
+
}
|
|
1051
|
+
function getSystemMessageMode(modelId) {
|
|
1052
|
+
var _a, _b;
|
|
1053
|
+
if (!isReasoningModel(modelId)) {
|
|
1054
|
+
return "system";
|
|
1055
|
+
}
|
|
1056
|
+
return (_b = (_a = reasoningModels[modelId]) == null ? void 0 : _a.systemMessageMode) != null ? _b : "developer";
|
|
1057
|
+
}
|
|
1058
|
+
var reasoningModels = {
|
|
1059
|
+
"o1-mini": {
|
|
1060
|
+
systemMessageMode: "remove"
|
|
1061
|
+
},
|
|
1062
|
+
"o1-mini-2024-09-12": {
|
|
1063
|
+
systemMessageMode: "remove"
|
|
1064
|
+
},
|
|
1065
|
+
"o1-preview": {
|
|
1066
|
+
systemMessageMode: "remove"
|
|
1067
|
+
},
|
|
1068
|
+
"o1-preview-2024-09-12": {
|
|
1069
|
+
systemMessageMode: "remove"
|
|
1070
|
+
},
|
|
1071
|
+
o3: {
|
|
1072
|
+
systemMessageMode: "developer"
|
|
1073
|
+
},
|
|
1074
|
+
"o3-2025-04-16": {
|
|
1075
|
+
systemMessageMode: "developer"
|
|
1076
|
+
},
|
|
1077
|
+
"o3-mini": {
|
|
1078
|
+
systemMessageMode: "developer"
|
|
1079
|
+
},
|
|
1080
|
+
"o3-mini-2025-01-31": {
|
|
1081
|
+
systemMessageMode: "developer"
|
|
1082
|
+
},
|
|
1083
|
+
"o4-mini": {
|
|
1084
|
+
systemMessageMode: "developer"
|
|
1085
|
+
},
|
|
1086
|
+
"o4-mini-2025-04-16": {
|
|
1087
|
+
systemMessageMode: "developer"
|
|
1088
|
+
}
|
|
1089
|
+
};
|
|
1090
|
+
|
|
1091
|
+
// src/openai-completion-language-model.ts
|
|
1092
|
+
import {
|
|
1093
|
+
UnsupportedFunctionalityError as UnsupportedFunctionalityError5
|
|
1094
|
+
} from "@ai-sdk/provider";
|
|
1095
|
+
import {
|
|
1096
|
+
combineHeaders as combineHeaders2,
|
|
1097
|
+
createEventSourceResponseHandler as createEventSourceResponseHandler2,
|
|
1098
|
+
createJsonResponseHandler as createJsonResponseHandler2,
|
|
1099
|
+
postJsonToApi as postJsonToApi2
|
|
1100
|
+
} from "@ai-sdk/provider-utils";
|
|
1101
|
+
import { z as z3 } from "zod";
|
|
1102
|
+
|
|
1103
|
+
// src/convert-to-openai-completion-prompt.ts
|
|
1104
|
+
import {
|
|
1105
|
+
InvalidPromptError,
|
|
1106
|
+
UnsupportedFunctionalityError as UnsupportedFunctionalityError4
|
|
1107
|
+
} from "@ai-sdk/provider";
|
|
1108
|
+
function convertToOpenAICompletionPrompt({
|
|
1109
|
+
prompt,
|
|
1110
|
+
inputFormat,
|
|
1111
|
+
user = "user",
|
|
1112
|
+
assistant = "assistant"
|
|
1113
|
+
}) {
|
|
1114
|
+
if (inputFormat === "prompt" && prompt.length === 1 && prompt[0].role === "user" && prompt[0].content.length === 1 && prompt[0].content[0].type === "text") {
|
|
1115
|
+
return { prompt: prompt[0].content[0].text };
|
|
1116
|
+
}
|
|
1117
|
+
let text = "";
|
|
1118
|
+
if (prompt[0].role === "system") {
|
|
1119
|
+
text += `${prompt[0].content}
|
|
1120
|
+
|
|
1121
|
+
`;
|
|
1122
|
+
prompt = prompt.slice(1);
|
|
1123
|
+
}
|
|
1124
|
+
for (const { role, content } of prompt) {
|
|
1125
|
+
switch (role) {
|
|
1126
|
+
case "system": {
|
|
1127
|
+
throw new InvalidPromptError({
|
|
1128
|
+
message: "Unexpected system message in prompt: ${content}",
|
|
1129
|
+
prompt
|
|
1130
|
+
});
|
|
1131
|
+
}
|
|
1132
|
+
case "user": {
|
|
1133
|
+
const userMessage = content.map((part) => {
|
|
1134
|
+
switch (part.type) {
|
|
1135
|
+
case "text": {
|
|
1136
|
+
return part.text;
|
|
1137
|
+
}
|
|
1138
|
+
case "image": {
|
|
1139
|
+
throw new UnsupportedFunctionalityError4({
|
|
1140
|
+
functionality: "images"
|
|
1141
|
+
});
|
|
1142
|
+
}
|
|
1143
|
+
}
|
|
1144
|
+
}).join("");
|
|
1145
|
+
text += `${user}:
|
|
1146
|
+
${userMessage}
|
|
1147
|
+
|
|
1148
|
+
`;
|
|
1149
|
+
break;
|
|
1150
|
+
}
|
|
1151
|
+
case "assistant": {
|
|
1152
|
+
const assistantMessage = content.map((part) => {
|
|
1153
|
+
switch (part.type) {
|
|
1154
|
+
case "text": {
|
|
1155
|
+
return part.text;
|
|
1156
|
+
}
|
|
1157
|
+
case "tool-call": {
|
|
1158
|
+
throw new UnsupportedFunctionalityError4({
|
|
1159
|
+
functionality: "tool-call messages"
|
|
1160
|
+
});
|
|
1161
|
+
}
|
|
1162
|
+
}
|
|
1163
|
+
}).join("");
|
|
1164
|
+
text += `${assistant}:
|
|
1165
|
+
${assistantMessage}
|
|
1166
|
+
|
|
1167
|
+
`;
|
|
1168
|
+
break;
|
|
1169
|
+
}
|
|
1170
|
+
case "tool": {
|
|
1171
|
+
throw new UnsupportedFunctionalityError4({
|
|
1172
|
+
functionality: "tool messages"
|
|
1173
|
+
});
|
|
1174
|
+
}
|
|
1175
|
+
default: {
|
|
1176
|
+
const _exhaustiveCheck = role;
|
|
1177
|
+
throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
|
|
1178
|
+
}
|
|
1179
|
+
}
|
|
1180
|
+
}
|
|
1181
|
+
text += `${assistant}:
|
|
1182
|
+
`;
|
|
1183
|
+
return {
|
|
1184
|
+
prompt: text,
|
|
1185
|
+
stopSequences: [`
|
|
1186
|
+
${user}:`]
|
|
1187
|
+
};
|
|
1188
|
+
}
|
|
1189
|
+
|
|
1190
|
+
// src/map-openai-completion-logprobs.ts
|
|
1191
|
+
function mapOpenAICompletionLogProbs(logprobs) {
|
|
1192
|
+
return logprobs == null ? void 0 : logprobs.tokens.map((token, index) => ({
|
|
1193
|
+
token,
|
|
1194
|
+
logprob: logprobs.token_logprobs[index],
|
|
1195
|
+
topLogprobs: logprobs.top_logprobs ? Object.entries(logprobs.top_logprobs[index]).map(
|
|
1196
|
+
([token2, logprob]) => ({
|
|
1197
|
+
token: token2,
|
|
1198
|
+
logprob
|
|
1199
|
+
})
|
|
1200
|
+
) : []
|
|
1201
|
+
}));
|
|
1202
|
+
}
|
|
1203
|
+
|
|
1204
|
+
// src/openai-completion-language-model.ts
|
|
1205
|
+
var OpenAICompletionLanguageModel = class {
|
|
1206
|
+
constructor(modelId, settings, config) {
|
|
1207
|
+
this.specificationVersion = "v1";
|
|
1208
|
+
this.defaultObjectGenerationMode = void 0;
|
|
1209
|
+
this.modelId = modelId;
|
|
1210
|
+
this.settings = settings;
|
|
1211
|
+
this.config = config;
|
|
1212
|
+
}
|
|
1213
|
+
get provider() {
|
|
1214
|
+
return this.config.provider;
|
|
1215
|
+
}
|
|
1216
|
+
getArgs({
|
|
1217
|
+
mode,
|
|
1218
|
+
inputFormat,
|
|
1219
|
+
prompt,
|
|
1220
|
+
maxTokens,
|
|
1221
|
+
temperature,
|
|
1222
|
+
topP,
|
|
1223
|
+
topK,
|
|
1224
|
+
frequencyPenalty,
|
|
1225
|
+
presencePenalty,
|
|
1226
|
+
stopSequences: userStopSequences,
|
|
1227
|
+
responseFormat,
|
|
1228
|
+
seed
|
|
1229
|
+
}) {
|
|
1230
|
+
var _a;
|
|
1231
|
+
const type = mode.type;
|
|
1232
|
+
const warnings = [];
|
|
1233
|
+
if (topK != null) {
|
|
1234
|
+
warnings.push({
|
|
1235
|
+
type: "unsupported-setting",
|
|
1236
|
+
setting: "topK"
|
|
1237
|
+
});
|
|
1238
|
+
}
|
|
1239
|
+
if (responseFormat != null && responseFormat.type !== "text") {
|
|
1240
|
+
warnings.push({
|
|
1241
|
+
type: "unsupported-setting",
|
|
1242
|
+
setting: "responseFormat",
|
|
1243
|
+
details: "JSON response format is not supported."
|
|
1244
|
+
});
|
|
1245
|
+
}
|
|
1246
|
+
const { prompt: completionPrompt, stopSequences } = convertToOpenAICompletionPrompt({ prompt, inputFormat });
|
|
1247
|
+
const stop = [...stopSequences != null ? stopSequences : [], ...userStopSequences != null ? userStopSequences : []];
|
|
1248
|
+
const baseArgs = {
|
|
1249
|
+
// model id:
|
|
1250
|
+
model: this.modelId,
|
|
1251
|
+
// model specific settings:
|
|
1252
|
+
echo: this.settings.echo,
|
|
1253
|
+
logit_bias: this.settings.logitBias,
|
|
1254
|
+
logprobs: typeof this.settings.logprobs === "number" ? this.settings.logprobs : typeof this.settings.logprobs === "boolean" ? this.settings.logprobs ? 0 : void 0 : void 0,
|
|
1255
|
+
suffix: this.settings.suffix,
|
|
1256
|
+
user: this.settings.user,
|
|
1257
|
+
// standardized settings:
|
|
1258
|
+
max_tokens: maxTokens,
|
|
1259
|
+
temperature,
|
|
1260
|
+
top_p: topP,
|
|
1261
|
+
frequency_penalty: frequencyPenalty,
|
|
1262
|
+
presence_penalty: presencePenalty,
|
|
1263
|
+
seed,
|
|
1264
|
+
// prompt:
|
|
1265
|
+
prompt: completionPrompt,
|
|
1266
|
+
// stop sequences:
|
|
1267
|
+
stop: stop.length > 0 ? stop : void 0
|
|
1268
|
+
};
|
|
1269
|
+
switch (type) {
|
|
1270
|
+
case "regular": {
|
|
1271
|
+
if ((_a = mode.tools) == null ? void 0 : _a.length) {
|
|
1272
|
+
throw new UnsupportedFunctionalityError5({
|
|
1273
|
+
functionality: "tools"
|
|
1274
|
+
});
|
|
1275
|
+
}
|
|
1276
|
+
if (mode.toolChoice) {
|
|
1277
|
+
throw new UnsupportedFunctionalityError5({
|
|
1278
|
+
functionality: "toolChoice"
|
|
1279
|
+
});
|
|
1280
|
+
}
|
|
1281
|
+
return { args: baseArgs, warnings };
|
|
1282
|
+
}
|
|
1283
|
+
case "object-json": {
|
|
1284
|
+
throw new UnsupportedFunctionalityError5({
|
|
1285
|
+
functionality: "object-json mode"
|
|
1286
|
+
});
|
|
1287
|
+
}
|
|
1288
|
+
case "object-tool": {
|
|
1289
|
+
throw new UnsupportedFunctionalityError5({
|
|
1290
|
+
functionality: "object-tool mode"
|
|
1291
|
+
});
|
|
1292
|
+
}
|
|
1293
|
+
default: {
|
|
1294
|
+
const _exhaustiveCheck = type;
|
|
1295
|
+
throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
|
|
1296
|
+
}
|
|
1297
|
+
}
|
|
1298
|
+
}
|
|
1299
|
+
async doGenerate(options) {
|
|
1300
|
+
const { args, warnings } = this.getArgs(options);
|
|
1301
|
+
const {
|
|
1302
|
+
responseHeaders,
|
|
1303
|
+
value: response,
|
|
1304
|
+
rawValue: rawResponse
|
|
1305
|
+
} = await postJsonToApi2({
|
|
1306
|
+
url: this.config.url({
|
|
1307
|
+
path: "/completions",
|
|
1308
|
+
modelId: this.modelId
|
|
1309
|
+
}),
|
|
1310
|
+
headers: combineHeaders2(this.config.headers(), options.headers),
|
|
1311
|
+
body: args,
|
|
1312
|
+
failedResponseHandler: openaiFailedResponseHandler,
|
|
1313
|
+
successfulResponseHandler: createJsonResponseHandler2(
|
|
1314
|
+
openaiCompletionResponseSchema
|
|
1315
|
+
),
|
|
1316
|
+
abortSignal: options.abortSignal,
|
|
1317
|
+
fetch: this.config.fetch
|
|
1318
|
+
});
|
|
1319
|
+
const { prompt: rawPrompt, ...rawSettings } = args;
|
|
1320
|
+
const choice = response.choices[0];
|
|
1321
|
+
return {
|
|
1322
|
+
text: choice.text,
|
|
1323
|
+
usage: {
|
|
1324
|
+
promptTokens: response.usage.prompt_tokens,
|
|
1325
|
+
completionTokens: response.usage.completion_tokens
|
|
1326
|
+
},
|
|
1327
|
+
finishReason: mapOpenAIFinishReason(choice.finish_reason),
|
|
1328
|
+
logprobs: mapOpenAICompletionLogProbs(choice.logprobs),
|
|
1329
|
+
rawCall: { rawPrompt, rawSettings },
|
|
1330
|
+
rawResponse: { headers: responseHeaders, body: rawResponse },
|
|
1331
|
+
response: getResponseMetadata(response),
|
|
1332
|
+
warnings,
|
|
1333
|
+
request: { body: JSON.stringify(args) }
|
|
1334
|
+
};
|
|
1335
|
+
}
|
|
1336
|
+
async doStream(options) {
|
|
1337
|
+
const { args, warnings } = this.getArgs(options);
|
|
1338
|
+
const body = {
|
|
1339
|
+
...args,
|
|
1340
|
+
stream: true,
|
|
1341
|
+
// only include stream_options when in strict compatibility mode:
|
|
1342
|
+
stream_options: this.config.compatibility === "strict" ? { include_usage: true } : void 0
|
|
1343
|
+
};
|
|
1344
|
+
const { responseHeaders, value: response } = await postJsonToApi2({
|
|
1345
|
+
url: this.config.url({
|
|
1346
|
+
path: "/completions",
|
|
1347
|
+
modelId: this.modelId
|
|
1348
|
+
}),
|
|
1349
|
+
headers: combineHeaders2(this.config.headers(), options.headers),
|
|
1350
|
+
body,
|
|
1351
|
+
failedResponseHandler: openaiFailedResponseHandler,
|
|
1352
|
+
successfulResponseHandler: createEventSourceResponseHandler2(
|
|
1353
|
+
openaiCompletionChunkSchema
|
|
1354
|
+
),
|
|
1355
|
+
abortSignal: options.abortSignal,
|
|
1356
|
+
fetch: this.config.fetch
|
|
1357
|
+
});
|
|
1358
|
+
const { prompt: rawPrompt, ...rawSettings } = args;
|
|
1359
|
+
let finishReason = "unknown";
|
|
1360
|
+
let usage = {
|
|
1361
|
+
promptTokens: Number.NaN,
|
|
1362
|
+
completionTokens: Number.NaN
|
|
1363
|
+
};
|
|
1364
|
+
let logprobs;
|
|
1365
|
+
let isFirstChunk = true;
|
|
1366
|
+
return {
|
|
1367
|
+
stream: response.pipeThrough(
|
|
1368
|
+
new TransformStream({
|
|
1369
|
+
transform(chunk, controller) {
|
|
1370
|
+
if (!chunk.success) {
|
|
1371
|
+
finishReason = "error";
|
|
1372
|
+
controller.enqueue({ type: "error", error: chunk.error });
|
|
1373
|
+
return;
|
|
1374
|
+
}
|
|
1375
|
+
const value = chunk.value;
|
|
1376
|
+
if ("error" in value) {
|
|
1377
|
+
finishReason = "error";
|
|
1378
|
+
controller.enqueue({ type: "error", error: value.error });
|
|
1379
|
+
return;
|
|
1380
|
+
}
|
|
1381
|
+
if (isFirstChunk) {
|
|
1382
|
+
isFirstChunk = false;
|
|
1383
|
+
controller.enqueue({
|
|
1384
|
+
type: "response-metadata",
|
|
1385
|
+
...getResponseMetadata(value)
|
|
1386
|
+
});
|
|
1387
|
+
}
|
|
1388
|
+
if (value.usage != null) {
|
|
1389
|
+
usage = {
|
|
1390
|
+
promptTokens: value.usage.prompt_tokens,
|
|
1391
|
+
completionTokens: value.usage.completion_tokens
|
|
1392
|
+
};
|
|
1393
|
+
}
|
|
1394
|
+
const choice = value.choices[0];
|
|
1395
|
+
if ((choice == null ? void 0 : choice.finish_reason) != null) {
|
|
1396
|
+
finishReason = mapOpenAIFinishReason(choice.finish_reason);
|
|
1397
|
+
}
|
|
1398
|
+
if ((choice == null ? void 0 : choice.text) != null) {
|
|
1399
|
+
controller.enqueue({
|
|
1400
|
+
type: "text-delta",
|
|
1401
|
+
textDelta: choice.text
|
|
1402
|
+
});
|
|
1403
|
+
}
|
|
1404
|
+
const mappedLogprobs = mapOpenAICompletionLogProbs(
|
|
1405
|
+
choice == null ? void 0 : choice.logprobs
|
|
1406
|
+
);
|
|
1407
|
+
if (mappedLogprobs == null ? void 0 : mappedLogprobs.length) {
|
|
1408
|
+
if (logprobs === void 0) logprobs = [];
|
|
1409
|
+
logprobs.push(...mappedLogprobs);
|
|
1410
|
+
}
|
|
1411
|
+
},
|
|
1412
|
+
flush(controller) {
|
|
1413
|
+
controller.enqueue({
|
|
1414
|
+
type: "finish",
|
|
1415
|
+
finishReason,
|
|
1416
|
+
logprobs,
|
|
1417
|
+
usage
|
|
1418
|
+
});
|
|
1419
|
+
}
|
|
1420
|
+
})
|
|
1421
|
+
),
|
|
1422
|
+
rawCall: { rawPrompt, rawSettings },
|
|
1423
|
+
rawResponse: { headers: responseHeaders },
|
|
1424
|
+
warnings,
|
|
1425
|
+
request: { body: JSON.stringify(body) }
|
|
1426
|
+
};
|
|
1427
|
+
}
|
|
1428
|
+
};
|
|
1429
|
+
var openaiCompletionResponseSchema = z3.object({
|
|
1430
|
+
id: z3.string().nullish(),
|
|
1431
|
+
created: z3.number().nullish(),
|
|
1432
|
+
model: z3.string().nullish(),
|
|
1433
|
+
choices: z3.array(
|
|
1434
|
+
z3.object({
|
|
1435
|
+
text: z3.string(),
|
|
1436
|
+
finish_reason: z3.string(),
|
|
1437
|
+
logprobs: z3.object({
|
|
1438
|
+
tokens: z3.array(z3.string()),
|
|
1439
|
+
token_logprobs: z3.array(z3.number()),
|
|
1440
|
+
top_logprobs: z3.array(z3.record(z3.string(), z3.number())).nullable()
|
|
1441
|
+
}).nullish()
|
|
1442
|
+
})
|
|
1443
|
+
),
|
|
1444
|
+
usage: z3.object({
|
|
1445
|
+
prompt_tokens: z3.number(),
|
|
1446
|
+
completion_tokens: z3.number()
|
|
1447
|
+
})
|
|
1448
|
+
});
|
|
1449
|
+
var openaiCompletionChunkSchema = z3.union([
|
|
1450
|
+
z3.object({
|
|
1451
|
+
id: z3.string().nullish(),
|
|
1452
|
+
created: z3.number().nullish(),
|
|
1453
|
+
model: z3.string().nullish(),
|
|
1454
|
+
choices: z3.array(
|
|
1455
|
+
z3.object({
|
|
1456
|
+
text: z3.string(),
|
|
1457
|
+
finish_reason: z3.string().nullish(),
|
|
1458
|
+
index: z3.number(),
|
|
1459
|
+
logprobs: z3.object({
|
|
1460
|
+
tokens: z3.array(z3.string()),
|
|
1461
|
+
token_logprobs: z3.array(z3.number()),
|
|
1462
|
+
top_logprobs: z3.array(z3.record(z3.string(), z3.number())).nullable()
|
|
1463
|
+
}).nullish()
|
|
1464
|
+
})
|
|
1465
|
+
),
|
|
1466
|
+
usage: z3.object({
|
|
1467
|
+
prompt_tokens: z3.number(),
|
|
1468
|
+
completion_tokens: z3.number()
|
|
1469
|
+
}).nullish()
|
|
1470
|
+
}),
|
|
1471
|
+
openaiErrorDataSchema
|
|
1472
|
+
]);
|
|
1473
|
+
|
|
1474
|
+
// src/openai-embedding-model.ts
|
|
1475
|
+
import {
|
|
1476
|
+
TooManyEmbeddingValuesForCallError
|
|
1477
|
+
} from "@ai-sdk/provider";
|
|
1478
|
+
import {
|
|
1479
|
+
combineHeaders as combineHeaders3,
|
|
1480
|
+
createJsonResponseHandler as createJsonResponseHandler3,
|
|
1481
|
+
postJsonToApi as postJsonToApi3
|
|
1482
|
+
} from "@ai-sdk/provider-utils";
|
|
1483
|
+
import { z as z4 } from "zod";
|
|
1484
|
+
var OpenAIEmbeddingModel = class {
|
|
1485
|
+
constructor(modelId, settings, config) {
|
|
1486
|
+
this.specificationVersion = "v1";
|
|
1487
|
+
this.modelId = modelId;
|
|
1488
|
+
this.settings = settings;
|
|
1489
|
+
this.config = config;
|
|
1490
|
+
}
|
|
1491
|
+
get provider() {
|
|
1492
|
+
return this.config.provider;
|
|
1493
|
+
}
|
|
1494
|
+
get maxEmbeddingsPerCall() {
|
|
1495
|
+
var _a;
|
|
1496
|
+
return (_a = this.settings.maxEmbeddingsPerCall) != null ? _a : 2048;
|
|
1497
|
+
}
|
|
1498
|
+
get supportsParallelCalls() {
|
|
1499
|
+
var _a;
|
|
1500
|
+
return (_a = this.settings.supportsParallelCalls) != null ? _a : true;
|
|
1501
|
+
}
|
|
1502
|
+
async doEmbed({
|
|
1503
|
+
values,
|
|
1504
|
+
headers,
|
|
1505
|
+
abortSignal
|
|
1506
|
+
}) {
|
|
1507
|
+
if (values.length > this.maxEmbeddingsPerCall) {
|
|
1508
|
+
throw new TooManyEmbeddingValuesForCallError({
|
|
1509
|
+
provider: this.provider,
|
|
1510
|
+
modelId: this.modelId,
|
|
1511
|
+
maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,
|
|
1512
|
+
values
|
|
1513
|
+
});
|
|
1514
|
+
}
|
|
1515
|
+
const { responseHeaders, value: response } = await postJsonToApi3({
|
|
1516
|
+
url: this.config.url({
|
|
1517
|
+
path: "/embeddings",
|
|
1518
|
+
modelId: this.modelId
|
|
1519
|
+
}),
|
|
1520
|
+
headers: combineHeaders3(this.config.headers(), headers),
|
|
1521
|
+
body: {
|
|
1522
|
+
model: this.modelId,
|
|
1523
|
+
input: values,
|
|
1524
|
+
encoding_format: "float",
|
|
1525
|
+
dimensions: this.settings.dimensions,
|
|
1526
|
+
user: this.settings.user
|
|
1527
|
+
},
|
|
1528
|
+
failedResponseHandler: openaiFailedResponseHandler,
|
|
1529
|
+
successfulResponseHandler: createJsonResponseHandler3(
|
|
1530
|
+
openaiTextEmbeddingResponseSchema
|
|
1531
|
+
),
|
|
1532
|
+
abortSignal,
|
|
1533
|
+
fetch: this.config.fetch
|
|
1534
|
+
});
|
|
1535
|
+
return {
|
|
1536
|
+
embeddings: response.data.map((item) => item.embedding),
|
|
1537
|
+
usage: response.usage ? { tokens: response.usage.prompt_tokens } : void 0,
|
|
1538
|
+
rawResponse: { headers: responseHeaders }
|
|
1539
|
+
};
|
|
1540
|
+
}
|
|
1541
|
+
};
|
|
1542
|
+
var openaiTextEmbeddingResponseSchema = z4.object({
|
|
1543
|
+
data: z4.array(z4.object({ embedding: z4.array(z4.number()) })),
|
|
1544
|
+
usage: z4.object({ prompt_tokens: z4.number() }).nullish()
|
|
1545
|
+
});
|
|
1546
|
+
|
|
1547
|
+
// src/openai-image-model.ts
|
|
1548
|
+
import {
|
|
1549
|
+
combineHeaders as combineHeaders4,
|
|
1550
|
+
createJsonResponseHandler as createJsonResponseHandler4,
|
|
1551
|
+
postJsonToApi as postJsonToApi4
|
|
1552
|
+
} from "@ai-sdk/provider-utils";
|
|
1553
|
+
import { z as z5 } from "zod";
|
|
1554
|
+
|
|
1555
|
+
// src/openai-image-settings.ts
|
|
1556
|
+
var modelMaxImagesPerCall = {
|
|
1557
|
+
"dall-e-3": 1,
|
|
1558
|
+
"dall-e-2": 10,
|
|
1559
|
+
"gpt-image-1": 10
|
|
1560
|
+
};
|
|
1561
|
+
var hasDefaultResponseFormat = /* @__PURE__ */ new Set(["gpt-image-1"]);
|
|
1562
|
+
|
|
1563
|
+
// src/openai-image-model.ts
|
|
1564
|
+
var OpenAIImageModel = class {
|
|
1565
|
+
constructor(modelId, settings, config) {
|
|
1566
|
+
this.modelId = modelId;
|
|
1567
|
+
this.settings = settings;
|
|
1568
|
+
this.config = config;
|
|
1569
|
+
this.specificationVersion = "v1";
|
|
1570
|
+
}
|
|
1571
|
+
get maxImagesPerCall() {
|
|
1572
|
+
var _a, _b;
|
|
1573
|
+
return (_b = (_a = this.settings.maxImagesPerCall) != null ? _a : modelMaxImagesPerCall[this.modelId]) != null ? _b : 1;
|
|
1574
|
+
}
|
|
1575
|
+
get provider() {
|
|
1576
|
+
return this.config.provider;
|
|
1577
|
+
}
|
|
1578
|
+
async doGenerate({
|
|
1579
|
+
prompt,
|
|
1580
|
+
n,
|
|
1581
|
+
size,
|
|
1582
|
+
aspectRatio,
|
|
1583
|
+
seed,
|
|
1584
|
+
providerOptions,
|
|
1585
|
+
headers,
|
|
1586
|
+
abortSignal
|
|
1587
|
+
}) {
|
|
1588
|
+
var _a, _b, _c, _d;
|
|
1589
|
+
const warnings = [];
|
|
1590
|
+
if (aspectRatio != null) {
|
|
1591
|
+
warnings.push({
|
|
1592
|
+
type: "unsupported-setting",
|
|
1593
|
+
setting: "aspectRatio",
|
|
1594
|
+
details: "This model does not support aspect ratio. Use `size` instead."
|
|
1595
|
+
});
|
|
1596
|
+
}
|
|
1597
|
+
if (seed != null) {
|
|
1598
|
+
warnings.push({ type: "unsupported-setting", setting: "seed" });
|
|
1599
|
+
}
|
|
1600
|
+
const currentDate = (_c = (_b = (_a = this.config._internal) == null ? void 0 : _a.currentDate) == null ? void 0 : _b.call(_a)) != null ? _c : /* @__PURE__ */ new Date();
|
|
1601
|
+
const { value: response, responseHeaders } = await postJsonToApi4({
|
|
1602
|
+
url: this.config.url({
|
|
1603
|
+
path: "/images/generations",
|
|
1604
|
+
modelId: this.modelId
|
|
1605
|
+
}),
|
|
1606
|
+
headers: combineHeaders4(this.config.headers(), headers),
|
|
1607
|
+
body: {
|
|
1608
|
+
model: this.modelId,
|
|
1609
|
+
prompt,
|
|
1610
|
+
n,
|
|
1611
|
+
size,
|
|
1612
|
+
...(_d = providerOptions.openai) != null ? _d : {},
|
|
1613
|
+
...!hasDefaultResponseFormat.has(this.modelId) ? { response_format: "b64_json" } : {}
|
|
1614
|
+
},
|
|
1615
|
+
failedResponseHandler: openaiFailedResponseHandler,
|
|
1616
|
+
successfulResponseHandler: createJsonResponseHandler4(
|
|
1617
|
+
openaiImageResponseSchema
|
|
1618
|
+
),
|
|
1619
|
+
abortSignal,
|
|
1620
|
+
fetch: this.config.fetch
|
|
1621
|
+
});
|
|
1622
|
+
return {
|
|
1623
|
+
images: response.data.map((item) => item.b64_json),
|
|
1624
|
+
warnings,
|
|
1625
|
+
response: {
|
|
1626
|
+
timestamp: currentDate,
|
|
1627
|
+
modelId: this.modelId,
|
|
1628
|
+
headers: responseHeaders
|
|
1629
|
+
}
|
|
1630
|
+
};
|
|
1631
|
+
}
|
|
1632
|
+
};
|
|
1633
|
+
var openaiImageResponseSchema = z5.object({
|
|
1634
|
+
data: z5.array(z5.object({ b64_json: z5.string() }))
|
|
1635
|
+
});
|
|
1636
|
+
|
|
1637
|
+
// src/openai-transcription-model.ts
|
|
1638
|
+
import {
|
|
1639
|
+
combineHeaders as combineHeaders5,
|
|
1640
|
+
convertBase64ToUint8Array,
|
|
1641
|
+
createJsonResponseHandler as createJsonResponseHandler5,
|
|
1642
|
+
parseProviderOptions,
|
|
1643
|
+
postFormDataToApi
|
|
1644
|
+
} from "@ai-sdk/provider-utils";
|
|
1645
|
+
import { z as z6 } from "zod";
|
|
1646
|
+
var openAIProviderOptionsSchema = z6.object({
|
|
1647
|
+
include: z6.array(z6.string()).nullish(),
|
|
1648
|
+
language: z6.string().nullish(),
|
|
1649
|
+
prompt: z6.string().nullish(),
|
|
1650
|
+
temperature: z6.number().min(0).max(1).nullish().default(0),
|
|
1651
|
+
timestampGranularities: z6.array(z6.enum(["word", "segment"])).nullish().default(["segment"])
|
|
1652
|
+
});
|
|
1653
|
+
var languageMap = {
|
|
1654
|
+
afrikaans: "af",
|
|
1655
|
+
arabic: "ar",
|
|
1656
|
+
armenian: "hy",
|
|
1657
|
+
azerbaijani: "az",
|
|
1658
|
+
belarusian: "be",
|
|
1659
|
+
bosnian: "bs",
|
|
1660
|
+
bulgarian: "bg",
|
|
1661
|
+
catalan: "ca",
|
|
1662
|
+
chinese: "zh",
|
|
1663
|
+
croatian: "hr",
|
|
1664
|
+
czech: "cs",
|
|
1665
|
+
danish: "da",
|
|
1666
|
+
dutch: "nl",
|
|
1667
|
+
english: "en",
|
|
1668
|
+
estonian: "et",
|
|
1669
|
+
finnish: "fi",
|
|
1670
|
+
french: "fr",
|
|
1671
|
+
galician: "gl",
|
|
1672
|
+
german: "de",
|
|
1673
|
+
greek: "el",
|
|
1674
|
+
hebrew: "he",
|
|
1675
|
+
hindi: "hi",
|
|
1676
|
+
hungarian: "hu",
|
|
1677
|
+
icelandic: "is",
|
|
1678
|
+
indonesian: "id",
|
|
1679
|
+
italian: "it",
|
|
1680
|
+
japanese: "ja",
|
|
1681
|
+
kannada: "kn",
|
|
1682
|
+
kazakh: "kk",
|
|
1683
|
+
korean: "ko",
|
|
1684
|
+
latvian: "lv",
|
|
1685
|
+
lithuanian: "lt",
|
|
1686
|
+
macedonian: "mk",
|
|
1687
|
+
malay: "ms",
|
|
1688
|
+
marathi: "mr",
|
|
1689
|
+
maori: "mi",
|
|
1690
|
+
nepali: "ne",
|
|
1691
|
+
norwegian: "no",
|
|
1692
|
+
persian: "fa",
|
|
1693
|
+
polish: "pl",
|
|
1694
|
+
portuguese: "pt",
|
|
1695
|
+
romanian: "ro",
|
|
1696
|
+
russian: "ru",
|
|
1697
|
+
serbian: "sr",
|
|
1698
|
+
slovak: "sk",
|
|
1699
|
+
slovenian: "sl",
|
|
1700
|
+
spanish: "es",
|
|
1701
|
+
swahili: "sw",
|
|
1702
|
+
swedish: "sv",
|
|
1703
|
+
tagalog: "tl",
|
|
1704
|
+
tamil: "ta",
|
|
1705
|
+
thai: "th",
|
|
1706
|
+
turkish: "tr",
|
|
1707
|
+
ukrainian: "uk",
|
|
1708
|
+
urdu: "ur",
|
|
1709
|
+
vietnamese: "vi",
|
|
1710
|
+
welsh: "cy"
|
|
1711
|
+
};
|
|
1712
|
+
var OpenAITranscriptionModel = class {
|
|
1713
|
+
constructor(modelId, config) {
|
|
1714
|
+
this.modelId = modelId;
|
|
1715
|
+
this.config = config;
|
|
1716
|
+
this.specificationVersion = "v1";
|
|
1717
|
+
}
|
|
1718
|
+
get provider() {
|
|
1719
|
+
return this.config.provider;
|
|
1720
|
+
}
|
|
1721
|
+
getArgs({
|
|
1722
|
+
audio,
|
|
1723
|
+
mediaType,
|
|
1724
|
+
providerOptions
|
|
1725
|
+
}) {
|
|
1726
|
+
var _a, _b, _c, _d, _e;
|
|
1727
|
+
const warnings = [];
|
|
1728
|
+
const openAIOptions = parseProviderOptions({
|
|
1729
|
+
provider: "openai",
|
|
1730
|
+
providerOptions,
|
|
1731
|
+
schema: openAIProviderOptionsSchema
|
|
1732
|
+
});
|
|
1733
|
+
const formData = new FormData();
|
|
1734
|
+
const blob = audio instanceof Uint8Array ? new Blob([audio]) : new Blob([convertBase64ToUint8Array(audio)]);
|
|
1735
|
+
formData.append("model", this.modelId);
|
|
1736
|
+
formData.append("file", new File([blob], "audio", { type: mediaType }));
|
|
1737
|
+
if (openAIOptions) {
|
|
1738
|
+
const transcriptionModelOptions = {
|
|
1739
|
+
include: (_a = openAIOptions.include) != null ? _a : void 0,
|
|
1740
|
+
language: (_b = openAIOptions.language) != null ? _b : void 0,
|
|
1741
|
+
prompt: (_c = openAIOptions.prompt) != null ? _c : void 0,
|
|
1742
|
+
temperature: (_d = openAIOptions.temperature) != null ? _d : void 0,
|
|
1743
|
+
timestamp_granularities: (_e = openAIOptions.timestampGranularities) != null ? _e : void 0
|
|
1744
|
+
};
|
|
1745
|
+
for (const key in transcriptionModelOptions) {
|
|
1746
|
+
const value = transcriptionModelOptions[key];
|
|
1747
|
+
if (value !== void 0) {
|
|
1748
|
+
formData.append(key, String(value));
|
|
1749
|
+
}
|
|
1750
|
+
}
|
|
1751
|
+
}
|
|
1752
|
+
return {
|
|
1753
|
+
formData,
|
|
1754
|
+
warnings
|
|
1755
|
+
};
|
|
1756
|
+
}
|
|
1757
|
+
async doGenerate(options) {
|
|
1758
|
+
var _a, _b, _c, _d, _e, _f;
|
|
1759
|
+
const currentDate = (_c = (_b = (_a = this.config._internal) == null ? void 0 : _a.currentDate) == null ? void 0 : _b.call(_a)) != null ? _c : /* @__PURE__ */ new Date();
|
|
1760
|
+
const { formData, warnings } = this.getArgs(options);
|
|
1761
|
+
const {
|
|
1762
|
+
value: response,
|
|
1763
|
+
responseHeaders,
|
|
1764
|
+
rawValue: rawResponse
|
|
1765
|
+
} = await postFormDataToApi({
|
|
1766
|
+
url: this.config.url({
|
|
1767
|
+
path: "/audio/transcriptions",
|
|
1768
|
+
modelId: this.modelId
|
|
1769
|
+
}),
|
|
1770
|
+
headers: combineHeaders5(this.config.headers(), options.headers),
|
|
1771
|
+
formData,
|
|
1772
|
+
failedResponseHandler: openaiFailedResponseHandler,
|
|
1773
|
+
successfulResponseHandler: createJsonResponseHandler5(
|
|
1774
|
+
openaiTranscriptionResponseSchema
|
|
1775
|
+
),
|
|
1776
|
+
abortSignal: options.abortSignal,
|
|
1777
|
+
fetch: this.config.fetch
|
|
1778
|
+
});
|
|
1779
|
+
const language = response.language != null && response.language in languageMap ? languageMap[response.language] : void 0;
|
|
1780
|
+
return {
|
|
1781
|
+
text: response.text,
|
|
1782
|
+
segments: (_e = (_d = response.words) == null ? void 0 : _d.map((word) => ({
|
|
1783
|
+
text: word.word,
|
|
1784
|
+
startSecond: word.start,
|
|
1785
|
+
endSecond: word.end
|
|
1786
|
+
}))) != null ? _e : [],
|
|
1787
|
+
language,
|
|
1788
|
+
durationInSeconds: (_f = response.duration) != null ? _f : void 0,
|
|
1789
|
+
warnings,
|
|
1790
|
+
response: {
|
|
1791
|
+
timestamp: currentDate,
|
|
1792
|
+
modelId: this.modelId,
|
|
1793
|
+
headers: responseHeaders,
|
|
1794
|
+
body: rawResponse
|
|
1795
|
+
}
|
|
1796
|
+
};
|
|
1797
|
+
}
|
|
1798
|
+
};
|
|
1799
|
+
var openaiTranscriptionResponseSchema = z6.object({
|
|
1800
|
+
text: z6.string(),
|
|
1801
|
+
language: z6.string().nullish(),
|
|
1802
|
+
duration: z6.number().nullish(),
|
|
1803
|
+
words: z6.array(
|
|
1804
|
+
z6.object({
|
|
1805
|
+
word: z6.string(),
|
|
1806
|
+
start: z6.number(),
|
|
1807
|
+
end: z6.number()
|
|
1808
|
+
})
|
|
1809
|
+
).nullish()
|
|
1810
|
+
});
|
|
1811
|
+
|
|
1812
|
+
// src/responses/openai-responses-language-model.ts
|
|
1813
|
+
import {
|
|
1814
|
+
combineHeaders as combineHeaders6,
|
|
1815
|
+
createEventSourceResponseHandler as createEventSourceResponseHandler3,
|
|
1816
|
+
createJsonResponseHandler as createJsonResponseHandler6,
|
|
1817
|
+
generateId as generateId2,
|
|
1818
|
+
parseProviderOptions as parseProviderOptions2,
|
|
1819
|
+
postJsonToApi as postJsonToApi5
|
|
1820
|
+
} from "@ai-sdk/provider-utils";
|
|
1821
|
+
import { z as z7 } from "zod";
|
|
1822
|
+
|
|
1823
|
+
// src/responses/convert-to-openai-responses-messages.ts
|
|
1824
|
+
import {
|
|
1825
|
+
UnsupportedFunctionalityError as UnsupportedFunctionalityError6
|
|
1826
|
+
} from "@ai-sdk/provider";
|
|
1827
|
+
import { convertUint8ArrayToBase64 as convertUint8ArrayToBase642 } from "@ai-sdk/provider-utils";
|
|
1828
|
+
function convertToOpenAIResponsesMessages({
|
|
1829
|
+
prompt,
|
|
1830
|
+
systemMessageMode
|
|
1831
|
+
}) {
|
|
1832
|
+
const messages = [];
|
|
1833
|
+
const warnings = [];
|
|
1834
|
+
for (const { role, content } of prompt) {
|
|
1835
|
+
switch (role) {
|
|
1836
|
+
case "system": {
|
|
1837
|
+
switch (systemMessageMode) {
|
|
1838
|
+
case "system": {
|
|
1839
|
+
messages.push({ role: "system", content });
|
|
1840
|
+
break;
|
|
1841
|
+
}
|
|
1842
|
+
case "developer": {
|
|
1843
|
+
messages.push({ role: "developer", content });
|
|
1844
|
+
break;
|
|
1845
|
+
}
|
|
1846
|
+
case "remove": {
|
|
1847
|
+
warnings.push({
|
|
1848
|
+
type: "other",
|
|
1849
|
+
message: "system messages are removed for this model"
|
|
1850
|
+
});
|
|
1851
|
+
break;
|
|
1852
|
+
}
|
|
1853
|
+
default: {
|
|
1854
|
+
const _exhaustiveCheck = systemMessageMode;
|
|
1855
|
+
throw new Error(
|
|
1856
|
+
`Unsupported system message mode: ${_exhaustiveCheck}`
|
|
1857
|
+
);
|
|
1858
|
+
}
|
|
1859
|
+
}
|
|
1860
|
+
break;
|
|
1861
|
+
}
|
|
1862
|
+
case "user": {
|
|
1863
|
+
messages.push({
|
|
1864
|
+
role: "user",
|
|
1865
|
+
content: content.map((part, index) => {
|
|
1866
|
+
var _a, _b, _c, _d;
|
|
1867
|
+
switch (part.type) {
|
|
1868
|
+
case "text": {
|
|
1869
|
+
return { type: "input_text", text: part.text };
|
|
1870
|
+
}
|
|
1871
|
+
case "image": {
|
|
1872
|
+
return {
|
|
1873
|
+
type: "input_image",
|
|
1874
|
+
image_url: part.image instanceof URL ? part.image.toString() : `data:${(_a = part.mimeType) != null ? _a : "image/jpeg"};base64,${convertUint8ArrayToBase642(part.image)}`,
|
|
1875
|
+
// OpenAI specific extension: image detail
|
|
1876
|
+
detail: (_c = (_b = part.providerMetadata) == null ? void 0 : _b.openai) == null ? void 0 : _c.imageDetail
|
|
1877
|
+
};
|
|
1878
|
+
}
|
|
1879
|
+
case "file": {
|
|
1880
|
+
if (part.data instanceof URL) {
|
|
1881
|
+
throw new UnsupportedFunctionalityError6({
|
|
1882
|
+
functionality: "File URLs in user messages"
|
|
1883
|
+
});
|
|
1884
|
+
}
|
|
1885
|
+
switch (part.mimeType) {
|
|
1886
|
+
case "application/pdf": {
|
|
1887
|
+
return {
|
|
1888
|
+
type: "input_file",
|
|
1889
|
+
filename: (_d = part.filename) != null ? _d : `part-${index}.pdf`,
|
|
1890
|
+
file_data: `data:application/pdf;base64,${part.data}`
|
|
1891
|
+
};
|
|
1892
|
+
}
|
|
1893
|
+
default: {
|
|
1894
|
+
throw new UnsupportedFunctionalityError6({
|
|
1895
|
+
functionality: "Only PDF files are supported in user messages"
|
|
1896
|
+
});
|
|
1897
|
+
}
|
|
1898
|
+
}
|
|
1899
|
+
}
|
|
1900
|
+
}
|
|
1901
|
+
})
|
|
1902
|
+
});
|
|
1903
|
+
break;
|
|
1904
|
+
}
|
|
1905
|
+
case "assistant": {
|
|
1906
|
+
for (const part of content) {
|
|
1907
|
+
switch (part.type) {
|
|
1908
|
+
case "text": {
|
|
1909
|
+
messages.push({
|
|
1910
|
+
role: "assistant",
|
|
1911
|
+
content: [{ type: "output_text", text: part.text }]
|
|
1912
|
+
});
|
|
1913
|
+
break;
|
|
1914
|
+
}
|
|
1915
|
+
case "tool-call": {
|
|
1916
|
+
messages.push({
|
|
1917
|
+
type: "function_call",
|
|
1918
|
+
call_id: part.toolCallId,
|
|
1919
|
+
name: part.toolName,
|
|
1920
|
+
arguments: JSON.stringify(part.args)
|
|
1921
|
+
});
|
|
1922
|
+
break;
|
|
1923
|
+
}
|
|
1924
|
+
}
|
|
1925
|
+
}
|
|
1926
|
+
break;
|
|
1927
|
+
}
|
|
1928
|
+
case "tool": {
|
|
1929
|
+
for (const part of content) {
|
|
1930
|
+
messages.push({
|
|
1931
|
+
type: "function_call_output",
|
|
1932
|
+
call_id: part.toolCallId,
|
|
1933
|
+
output: JSON.stringify(part.result)
|
|
1934
|
+
});
|
|
1935
|
+
}
|
|
1936
|
+
break;
|
|
1937
|
+
}
|
|
1938
|
+
default: {
|
|
1939
|
+
const _exhaustiveCheck = role;
|
|
1940
|
+
throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
|
|
1941
|
+
}
|
|
1942
|
+
}
|
|
1943
|
+
}
|
|
1944
|
+
return { messages, warnings };
|
|
1945
|
+
}
|
|
1946
|
+
|
|
1947
|
+
// src/responses/map-openai-responses-finish-reason.ts
|
|
1948
|
+
function mapOpenAIResponseFinishReason({
|
|
1949
|
+
finishReason,
|
|
1950
|
+
hasToolCalls
|
|
1951
|
+
}) {
|
|
1952
|
+
switch (finishReason) {
|
|
1953
|
+
case void 0:
|
|
1954
|
+
case null:
|
|
1955
|
+
return hasToolCalls ? "tool-calls" : "stop";
|
|
1956
|
+
case "max_output_tokens":
|
|
1957
|
+
return "length";
|
|
1958
|
+
case "content_filter":
|
|
1959
|
+
return "content-filter";
|
|
1960
|
+
default:
|
|
1961
|
+
return hasToolCalls ? "tool-calls" : "unknown";
|
|
1962
|
+
}
|
|
1963
|
+
}
|
|
1964
|
+
|
|
1965
|
+
// src/responses/openai-responses-prepare-tools.ts
|
|
1966
|
+
import {
|
|
1967
|
+
UnsupportedFunctionalityError as UnsupportedFunctionalityError7
|
|
1968
|
+
} from "@ai-sdk/provider";
|
|
1969
|
+
function prepareResponsesTools({
|
|
1970
|
+
mode,
|
|
1971
|
+
strict
|
|
1972
|
+
}) {
|
|
1973
|
+
var _a;
|
|
1974
|
+
const tools = ((_a = mode.tools) == null ? void 0 : _a.length) ? mode.tools : void 0;
|
|
1975
|
+
const toolWarnings = [];
|
|
1976
|
+
if (tools == null) {
|
|
1977
|
+
return { tools: void 0, tool_choice: void 0, toolWarnings };
|
|
1978
|
+
}
|
|
1979
|
+
const toolChoice = mode.toolChoice;
|
|
1980
|
+
const openaiTools2 = [];
|
|
1981
|
+
for (const tool of tools) {
|
|
1982
|
+
switch (tool.type) {
|
|
1983
|
+
case "function":
|
|
1984
|
+
openaiTools2.push({
|
|
1985
|
+
type: "function",
|
|
1986
|
+
name: tool.name,
|
|
1987
|
+
description: tool.description,
|
|
1988
|
+
parameters: tool.parameters,
|
|
1989
|
+
strict: strict ? true : void 0
|
|
1990
|
+
});
|
|
1991
|
+
break;
|
|
1992
|
+
case "provider-defined":
|
|
1993
|
+
switch (tool.id) {
|
|
1994
|
+
case "openai.file_search":
|
|
1995
|
+
openaiTools2.push({
|
|
1996
|
+
type: "file_search",
|
|
1997
|
+
vector_store_ids: tool.args.vectorStoreIds,
|
|
1998
|
+
max_num_results: tool.args.maxNumResults,
|
|
1999
|
+
ranking: tool.args.ranking,
|
|
2000
|
+
filters: tool.args.filters
|
|
2001
|
+
});
|
|
2002
|
+
break;
|
|
2003
|
+
case "openai.web_search_preview":
|
|
2004
|
+
openaiTools2.push({
|
|
2005
|
+
type: "web_search_preview",
|
|
2006
|
+
search_context_size: tool.args.searchContextSize,
|
|
2007
|
+
user_location: tool.args.userLocation
|
|
2008
|
+
});
|
|
2009
|
+
break;
|
|
2010
|
+
case "openai.code_interpreter":
|
|
2011
|
+
openaiTools2.push({
|
|
2012
|
+
type: "code_interpreter",
|
|
2013
|
+
container: tool.args.container
|
|
2014
|
+
});
|
|
2015
|
+
break;
|
|
2016
|
+
default:
|
|
2017
|
+
toolWarnings.push({ type: "unsupported-tool", tool });
|
|
2018
|
+
break;
|
|
2019
|
+
}
|
|
2020
|
+
break;
|
|
2021
|
+
default:
|
|
2022
|
+
toolWarnings.push({ type: "unsupported-tool", tool });
|
|
2023
|
+
break;
|
|
2024
|
+
}
|
|
2025
|
+
}
|
|
2026
|
+
if (toolChoice == null) {
|
|
2027
|
+
return { tools: openaiTools2, tool_choice: void 0, toolWarnings };
|
|
2028
|
+
}
|
|
2029
|
+
const type = toolChoice.type;
|
|
2030
|
+
switch (type) {
|
|
2031
|
+
case "auto":
|
|
2032
|
+
case "none":
|
|
2033
|
+
case "required":
|
|
2034
|
+
return { tools: openaiTools2, tool_choice: type, toolWarnings };
|
|
2035
|
+
case "tool": {
|
|
2036
|
+
if (toolChoice.toolName === "web_search_preview") {
|
|
2037
|
+
return {
|
|
2038
|
+
tools: openaiTools2,
|
|
2039
|
+
tool_choice: {
|
|
2040
|
+
type: "web_search_preview"
|
|
2041
|
+
},
|
|
2042
|
+
toolWarnings
|
|
2043
|
+
};
|
|
2044
|
+
}
|
|
2045
|
+
if (toolChoice.toolName === "code_interpreter") {
|
|
2046
|
+
return {
|
|
2047
|
+
tools: openaiTools2,
|
|
2048
|
+
tool_choice: {
|
|
2049
|
+
type: "code_interpreter"
|
|
2050
|
+
},
|
|
2051
|
+
toolWarnings
|
|
2052
|
+
};
|
|
2053
|
+
}
|
|
2054
|
+
return {
|
|
2055
|
+
tools: openaiTools2,
|
|
2056
|
+
tool_choice: {
|
|
2057
|
+
type: "function",
|
|
2058
|
+
name: toolChoice.toolName
|
|
2059
|
+
},
|
|
2060
|
+
toolWarnings
|
|
2061
|
+
};
|
|
2062
|
+
}
|
|
2063
|
+
default: {
|
|
2064
|
+
const _exhaustiveCheck = type;
|
|
2065
|
+
throw new UnsupportedFunctionalityError7({
|
|
2066
|
+
functionality: `Unsupported tool choice type: ${_exhaustiveCheck}`
|
|
2067
|
+
});
|
|
2068
|
+
}
|
|
2069
|
+
}
|
|
2070
|
+
}
|
|
2071
|
+
|
|
2072
|
+
// src/responses/openai-responses-language-model.ts
|
|
2073
|
+
var OpenAIResponsesLanguageModel = class {
|
|
2074
|
+
constructor(modelId, config) {
|
|
2075
|
+
this.specificationVersion = "v1";
|
|
2076
|
+
this.defaultObjectGenerationMode = "json";
|
|
2077
|
+
this.supportsStructuredOutputs = true;
|
|
2078
|
+
this.modelId = modelId;
|
|
2079
|
+
this.config = config;
|
|
2080
|
+
}
|
|
2081
|
+
get provider() {
|
|
2082
|
+
return this.config.provider;
|
|
2083
|
+
}
|
|
2084
|
+
getArgs({
|
|
2085
|
+
mode,
|
|
2086
|
+
maxTokens,
|
|
2087
|
+
temperature,
|
|
2088
|
+
stopSequences,
|
|
2089
|
+
topP,
|
|
2090
|
+
topK,
|
|
2091
|
+
presencePenalty,
|
|
2092
|
+
frequencyPenalty,
|
|
2093
|
+
seed,
|
|
2094
|
+
prompt,
|
|
2095
|
+
providerMetadata,
|
|
2096
|
+
responseFormat
|
|
2097
|
+
}) {
|
|
2098
|
+
var _a, _b, _c;
|
|
2099
|
+
const warnings = [];
|
|
2100
|
+
const modelConfig = getResponsesModelConfig(this.modelId);
|
|
2101
|
+
const type = mode.type;
|
|
2102
|
+
if (topK != null) {
|
|
2103
|
+
warnings.push({
|
|
2104
|
+
type: "unsupported-setting",
|
|
2105
|
+
setting: "topK"
|
|
2106
|
+
});
|
|
2107
|
+
}
|
|
2108
|
+
if (seed != null) {
|
|
2109
|
+
warnings.push({
|
|
2110
|
+
type: "unsupported-setting",
|
|
2111
|
+
setting: "seed"
|
|
2112
|
+
});
|
|
2113
|
+
}
|
|
2114
|
+
if (presencePenalty != null) {
|
|
2115
|
+
warnings.push({
|
|
2116
|
+
type: "unsupported-setting",
|
|
2117
|
+
setting: "presencePenalty"
|
|
2118
|
+
});
|
|
2119
|
+
}
|
|
2120
|
+
if (frequencyPenalty != null) {
|
|
2121
|
+
warnings.push({
|
|
2122
|
+
type: "unsupported-setting",
|
|
2123
|
+
setting: "frequencyPenalty"
|
|
2124
|
+
});
|
|
2125
|
+
}
|
|
2126
|
+
if (stopSequences != null) {
|
|
2127
|
+
warnings.push({
|
|
2128
|
+
type: "unsupported-setting",
|
|
2129
|
+
setting: "stopSequences"
|
|
2130
|
+
});
|
|
2131
|
+
}
|
|
2132
|
+
const { messages, warnings: messageWarnings } = convertToOpenAIResponsesMessages({
|
|
2133
|
+
prompt,
|
|
2134
|
+
systemMessageMode: modelConfig.systemMessageMode
|
|
2135
|
+
});
|
|
2136
|
+
warnings.push(...messageWarnings);
|
|
2137
|
+
console.log("providerMetadata", JSON.stringify(providerMetadata));
|
|
2138
|
+
const openaiOptions = parseProviderOptions2({
|
|
2139
|
+
provider: "openai",
|
|
2140
|
+
providerOptions: providerMetadata,
|
|
2141
|
+
schema: openaiResponsesProviderOptionsSchema
|
|
2142
|
+
});
|
|
2143
|
+
const isStrict = (_a = openaiOptions == null ? void 0 : openaiOptions.strictSchemas) != null ? _a : true;
|
|
2144
|
+
console.log("openaiOptions", JSON.stringify(openaiOptions));
|
|
2145
|
+
const baseArgs = {
|
|
2146
|
+
model: this.modelId,
|
|
2147
|
+
input: messages,
|
|
2148
|
+
temperature: (openaiOptions == null ? void 0 : openaiOptions.forceNoTemperature) ? void 0 : temperature,
|
|
2149
|
+
top_p: topP,
|
|
2150
|
+
max_output_tokens: maxTokens,
|
|
2151
|
+
...(responseFormat == null ? void 0 : responseFormat.type) === "json" && {
|
|
2152
|
+
text: {
|
|
2153
|
+
format: responseFormat.schema != null ? {
|
|
2154
|
+
type: "json_schema",
|
|
2155
|
+
strict: isStrict,
|
|
2156
|
+
name: (_b = responseFormat.name) != null ? _b : "response",
|
|
2157
|
+
description: responseFormat.description,
|
|
2158
|
+
schema: responseFormat.schema
|
|
2159
|
+
} : { type: "json_object" }
|
|
2160
|
+
}
|
|
2161
|
+
},
|
|
2162
|
+
// provider options:
|
|
2163
|
+
metadata: openaiOptions == null ? void 0 : openaiOptions.metadata,
|
|
2164
|
+
parallel_tool_calls: openaiOptions == null ? void 0 : openaiOptions.parallelToolCalls,
|
|
2165
|
+
previous_response_id: openaiOptions == null ? void 0 : openaiOptions.previousResponseId,
|
|
2166
|
+
store: openaiOptions == null ? void 0 : openaiOptions.store,
|
|
2167
|
+
user: openaiOptions == null ? void 0 : openaiOptions.user,
|
|
2168
|
+
instructions: openaiOptions == null ? void 0 : openaiOptions.instructions,
|
|
2169
|
+
// model-specific settings:
|
|
2170
|
+
...modelConfig.isReasoningModel && ((openaiOptions == null ? void 0 : openaiOptions.reasoningEffort) != null || (openaiOptions == null ? void 0 : openaiOptions.reasoningSummary) != null) && {
|
|
2171
|
+
reasoning: {
|
|
2172
|
+
...(openaiOptions == null ? void 0 : openaiOptions.reasoningEffort) != null && {
|
|
2173
|
+
effort: openaiOptions.reasoningEffort
|
|
2174
|
+
},
|
|
2175
|
+
...(openaiOptions == null ? void 0 : openaiOptions.reasoningSummary) != null && {
|
|
2176
|
+
summary: openaiOptions.reasoningSummary
|
|
2177
|
+
}
|
|
2178
|
+
}
|
|
2179
|
+
},
|
|
2180
|
+
...modelConfig.requiredAutoTruncation && {
|
|
2181
|
+
truncation: "auto"
|
|
2182
|
+
}
|
|
2183
|
+
};
|
|
2184
|
+
console.log("baseArgs", JSON.stringify(baseArgs));
|
|
2185
|
+
if (modelConfig.isReasoningModel) {
|
|
2186
|
+
if (baseArgs.temperature != null) {
|
|
2187
|
+
baseArgs.temperature = void 0;
|
|
2188
|
+
warnings.push({
|
|
2189
|
+
type: "unsupported-setting",
|
|
2190
|
+
setting: "temperature",
|
|
2191
|
+
details: "temperature is not supported for reasoning models"
|
|
2192
|
+
});
|
|
2193
|
+
}
|
|
2194
|
+
if (baseArgs.top_p != null) {
|
|
2195
|
+
baseArgs.top_p = void 0;
|
|
2196
|
+
warnings.push({
|
|
2197
|
+
type: "unsupported-setting",
|
|
2198
|
+
setting: "topP",
|
|
2199
|
+
details: "topP is not supported for reasoning models"
|
|
2200
|
+
});
|
|
2201
|
+
}
|
|
2202
|
+
}
|
|
2203
|
+
switch (type) {
|
|
2204
|
+
case "regular": {
|
|
2205
|
+
const { tools, tool_choice, toolWarnings } = prepareResponsesTools({
|
|
2206
|
+
mode,
|
|
2207
|
+
strict: isStrict
|
|
2208
|
+
// TODO support provider options on tools
|
|
2209
|
+
});
|
|
2210
|
+
return {
|
|
2211
|
+
args: {
|
|
2212
|
+
...baseArgs,
|
|
2213
|
+
tools,
|
|
2214
|
+
tool_choice
|
|
2215
|
+
},
|
|
2216
|
+
warnings: [...warnings, ...toolWarnings]
|
|
2217
|
+
};
|
|
2218
|
+
}
|
|
2219
|
+
case "object-json": {
|
|
2220
|
+
return {
|
|
2221
|
+
args: {
|
|
2222
|
+
...baseArgs,
|
|
2223
|
+
text: {
|
|
2224
|
+
format: mode.schema != null ? {
|
|
2225
|
+
type: "json_schema",
|
|
2226
|
+
strict: isStrict,
|
|
2227
|
+
name: (_c = mode.name) != null ? _c : "response",
|
|
2228
|
+
description: mode.description,
|
|
2229
|
+
schema: mode.schema
|
|
2230
|
+
} : { type: "json_object" }
|
|
2231
|
+
}
|
|
2232
|
+
},
|
|
2233
|
+
warnings
|
|
2234
|
+
};
|
|
2235
|
+
}
|
|
2236
|
+
case "object-tool": {
|
|
2237
|
+
return {
|
|
2238
|
+
args: {
|
|
2239
|
+
...baseArgs,
|
|
2240
|
+
tool_choice: { type: "function", name: mode.tool.name },
|
|
2241
|
+
tools: [
|
|
2242
|
+
{
|
|
2243
|
+
type: "function",
|
|
2244
|
+
name: mode.tool.name,
|
|
2245
|
+
description: mode.tool.description,
|
|
2246
|
+
parameters: mode.tool.parameters,
|
|
2247
|
+
strict: isStrict
|
|
2248
|
+
}
|
|
2249
|
+
]
|
|
2250
|
+
},
|
|
2251
|
+
warnings
|
|
2252
|
+
};
|
|
2253
|
+
}
|
|
2254
|
+
default: {
|
|
2255
|
+
const _exhaustiveCheck = type;
|
|
2256
|
+
throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
|
|
2257
|
+
}
|
|
2258
|
+
}
|
|
2259
|
+
}
|
|
2260
|
+
async doGenerate(options) {
|
|
2261
|
+
var _a, _b, _c, _d, _e, _f, _g;
|
|
2262
|
+
const { args: body, warnings } = this.getArgs(options);
|
|
2263
|
+
const {
|
|
2264
|
+
responseHeaders,
|
|
2265
|
+
value: response,
|
|
2266
|
+
rawValue: rawResponse
|
|
2267
|
+
} = await postJsonToApi5({
|
|
2268
|
+
url: this.config.url({
|
|
2269
|
+
path: "/responses",
|
|
2270
|
+
modelId: this.modelId
|
|
2271
|
+
}),
|
|
2272
|
+
headers: combineHeaders6(this.config.headers(), options.headers),
|
|
2273
|
+
body,
|
|
2274
|
+
failedResponseHandler: openaiFailedResponseHandler,
|
|
2275
|
+
successfulResponseHandler: createJsonResponseHandler6(
|
|
2276
|
+
z7.object({
|
|
2277
|
+
id: z7.string(),
|
|
2278
|
+
created_at: z7.number(),
|
|
2279
|
+
model: z7.string(),
|
|
2280
|
+
output: z7.array(
|
|
2281
|
+
z7.discriminatedUnion("type", [
|
|
2282
|
+
z7.object({
|
|
2283
|
+
type: z7.literal("message"),
|
|
2284
|
+
role: z7.literal("assistant"),
|
|
2285
|
+
content: z7.array(
|
|
2286
|
+
z7.object({
|
|
2287
|
+
type: z7.literal("output_text"),
|
|
2288
|
+
text: z7.string(),
|
|
2289
|
+
annotations: z7.array(
|
|
2290
|
+
z7.object({
|
|
2291
|
+
type: z7.literal("url_citation"),
|
|
2292
|
+
start_index: z7.number(),
|
|
2293
|
+
end_index: z7.number(),
|
|
2294
|
+
url: z7.string(),
|
|
2295
|
+
title: z7.string()
|
|
2296
|
+
})
|
|
2297
|
+
)
|
|
2298
|
+
})
|
|
2299
|
+
)
|
|
2300
|
+
}),
|
|
2301
|
+
z7.object({
|
|
2302
|
+
type: z7.literal("function_call"),
|
|
2303
|
+
call_id: z7.string(),
|
|
2304
|
+
name: z7.string(),
|
|
2305
|
+
arguments: z7.string()
|
|
2306
|
+
}),
|
|
2307
|
+
z7.object({
|
|
2308
|
+
type: z7.literal("web_search_call")
|
|
2309
|
+
}),
|
|
2310
|
+
z7.object({
|
|
2311
|
+
type: z7.literal("computer_call")
|
|
2312
|
+
}),
|
|
2313
|
+
z7.object({
|
|
2314
|
+
type: z7.literal("reasoning"),
|
|
2315
|
+
summary: z7.array(
|
|
2316
|
+
z7.object({
|
|
2317
|
+
type: z7.literal("summary_text"),
|
|
2318
|
+
text: z7.string()
|
|
2319
|
+
})
|
|
2320
|
+
)
|
|
2321
|
+
})
|
|
2322
|
+
])
|
|
2323
|
+
),
|
|
2324
|
+
incomplete_details: z7.object({ reason: z7.string() }).nullable(),
|
|
2325
|
+
usage: usageSchema
|
|
2326
|
+
})
|
|
2327
|
+
),
|
|
2328
|
+
abortSignal: options.abortSignal,
|
|
2329
|
+
fetch: this.config.fetch
|
|
2330
|
+
});
|
|
2331
|
+
const outputTextElements = response.output.filter((output) => output.type === "message").flatMap((output) => output.content).filter((content) => content.type === "output_text");
|
|
2332
|
+
const toolCalls = response.output.filter((output) => output.type === "function_call").map((output) => ({
|
|
2333
|
+
toolCallType: "function",
|
|
2334
|
+
toolCallId: output.call_id,
|
|
2335
|
+
toolName: output.name,
|
|
2336
|
+
args: output.arguments
|
|
2337
|
+
}));
|
|
2338
|
+
const reasoningSummary = (_b = (_a = response.output.find((item) => item.type === "reasoning")) == null ? void 0 : _a.summary) != null ? _b : null;
|
|
2339
|
+
return {
|
|
2340
|
+
text: outputTextElements.map((content) => content.text).join("\n"),
|
|
2341
|
+
sources: outputTextElements.flatMap(
|
|
2342
|
+
(content) => content.annotations.map((annotation) => {
|
|
2343
|
+
var _a2, _b2, _c2;
|
|
2344
|
+
return {
|
|
2345
|
+
sourceType: "url",
|
|
2346
|
+
id: (_c2 = (_b2 = (_a2 = this.config).generateId) == null ? void 0 : _b2.call(_a2)) != null ? _c2 : generateId2(),
|
|
2347
|
+
url: annotation.url,
|
|
2348
|
+
title: annotation.title
|
|
2349
|
+
};
|
|
2350
|
+
})
|
|
2351
|
+
),
|
|
2352
|
+
finishReason: mapOpenAIResponseFinishReason({
|
|
2353
|
+
finishReason: (_c = response.incomplete_details) == null ? void 0 : _c.reason,
|
|
2354
|
+
hasToolCalls: toolCalls.length > 0
|
|
2355
|
+
}),
|
|
2356
|
+
toolCalls: toolCalls.length > 0 ? toolCalls : void 0,
|
|
2357
|
+
reasoning: reasoningSummary ? reasoningSummary.map((summary) => ({
|
|
2358
|
+
type: "text",
|
|
2359
|
+
text: summary.text
|
|
2360
|
+
})) : void 0,
|
|
2361
|
+
usage: {
|
|
2362
|
+
promptTokens: response.usage.input_tokens,
|
|
2363
|
+
completionTokens: response.usage.output_tokens
|
|
2364
|
+
},
|
|
2365
|
+
rawCall: {
|
|
2366
|
+
rawPrompt: void 0,
|
|
2367
|
+
rawSettings: {}
|
|
2368
|
+
},
|
|
2369
|
+
rawResponse: {
|
|
2370
|
+
headers: responseHeaders,
|
|
2371
|
+
body: rawResponse
|
|
2372
|
+
},
|
|
2373
|
+
request: {
|
|
2374
|
+
body: JSON.stringify(body)
|
|
2375
|
+
},
|
|
2376
|
+
response: {
|
|
2377
|
+
id: response.id,
|
|
2378
|
+
timestamp: new Date(response.created_at * 1e3),
|
|
2379
|
+
modelId: response.model
|
|
2380
|
+
},
|
|
2381
|
+
providerMetadata: {
|
|
2382
|
+
openai: {
|
|
2383
|
+
responseId: response.id,
|
|
2384
|
+
cachedPromptTokens: (_e = (_d = response.usage.input_tokens_details) == null ? void 0 : _d.cached_tokens) != null ? _e : null,
|
|
2385
|
+
reasoningTokens: (_g = (_f = response.usage.output_tokens_details) == null ? void 0 : _f.reasoning_tokens) != null ? _g : null
|
|
2386
|
+
}
|
|
2387
|
+
},
|
|
2388
|
+
warnings
|
|
2389
|
+
};
|
|
2390
|
+
}
|
|
2391
|
+
async doStream(options) {
|
|
2392
|
+
const { args: body, warnings } = this.getArgs(options);
|
|
2393
|
+
const { responseHeaders, value: response } = await postJsonToApi5({
|
|
2394
|
+
url: this.config.url({
|
|
2395
|
+
path: "/responses",
|
|
2396
|
+
modelId: this.modelId
|
|
2397
|
+
}),
|
|
2398
|
+
headers: combineHeaders6(this.config.headers(), options.headers),
|
|
2399
|
+
body: {
|
|
2400
|
+
...body,
|
|
2401
|
+
stream: true
|
|
2402
|
+
},
|
|
2403
|
+
failedResponseHandler: openaiFailedResponseHandler,
|
|
2404
|
+
successfulResponseHandler: createEventSourceResponseHandler3(
|
|
2405
|
+
openaiResponsesChunkSchema
|
|
2406
|
+
),
|
|
2407
|
+
abortSignal: options.abortSignal,
|
|
2408
|
+
fetch: this.config.fetch
|
|
2409
|
+
});
|
|
2410
|
+
const self = this;
|
|
2411
|
+
let finishReason = "unknown";
|
|
2412
|
+
let promptTokens = NaN;
|
|
2413
|
+
let completionTokens = NaN;
|
|
2414
|
+
let cachedPromptTokens = null;
|
|
2415
|
+
let reasoningTokens = null;
|
|
2416
|
+
let responseId = null;
|
|
2417
|
+
const ongoingToolCalls = {};
|
|
2418
|
+
let hasToolCalls = false;
|
|
2419
|
+
return {
|
|
2420
|
+
stream: response.pipeThrough(
|
|
2421
|
+
new TransformStream({
|
|
2422
|
+
transform(chunk, controller) {
|
|
2423
|
+
var _a, _b, _c, _d, _e, _f, _g, _h;
|
|
2424
|
+
if (!chunk.success) {
|
|
2425
|
+
finishReason = "error";
|
|
2426
|
+
controller.enqueue({ type: "error", error: chunk.error });
|
|
2427
|
+
return;
|
|
2428
|
+
}
|
|
2429
|
+
const value = chunk.value;
|
|
2430
|
+
if (isResponseOutputItemAddedChunk(value)) {
|
|
2431
|
+
if (value.item.type === "function_call") {
|
|
2432
|
+
ongoingToolCalls[value.output_index] = {
|
|
2433
|
+
toolName: value.item.name,
|
|
2434
|
+
toolCallId: value.item.call_id
|
|
2435
|
+
};
|
|
2436
|
+
controller.enqueue({
|
|
2437
|
+
type: "tool-call-delta",
|
|
2438
|
+
toolCallType: "function",
|
|
2439
|
+
toolCallId: value.item.call_id,
|
|
2440
|
+
toolName: value.item.name,
|
|
2441
|
+
argsTextDelta: value.item.arguments
|
|
2442
|
+
});
|
|
2443
|
+
}
|
|
2444
|
+
} else if (isResponseFunctionCallArgumentsDeltaChunk(value)) {
|
|
2445
|
+
const toolCall = ongoingToolCalls[value.output_index];
|
|
2446
|
+
if (toolCall != null) {
|
|
2447
|
+
controller.enqueue({
|
|
2448
|
+
type: "tool-call-delta",
|
|
2449
|
+
toolCallType: "function",
|
|
2450
|
+
toolCallId: toolCall.toolCallId,
|
|
2451
|
+
toolName: toolCall.toolName,
|
|
2452
|
+
argsTextDelta: value.delta
|
|
2453
|
+
});
|
|
2454
|
+
}
|
|
2455
|
+
} else if (isResponseCreatedChunk(value)) {
|
|
2456
|
+
responseId = value.response.id;
|
|
2457
|
+
controller.enqueue({
|
|
2458
|
+
type: "response-metadata",
|
|
2459
|
+
id: value.response.id,
|
|
2460
|
+
timestamp: new Date(value.response.created_at * 1e3),
|
|
2461
|
+
modelId: value.response.model
|
|
2462
|
+
});
|
|
2463
|
+
} else if (isTextDeltaChunk(value)) {
|
|
2464
|
+
controller.enqueue({
|
|
2465
|
+
type: "text-delta",
|
|
2466
|
+
textDelta: value.delta
|
|
2467
|
+
});
|
|
2468
|
+
} else if (isResponseReasoningSummaryTextDeltaChunk(value)) {
|
|
2469
|
+
controller.enqueue({
|
|
2470
|
+
type: "reasoning",
|
|
2471
|
+
textDelta: value.delta
|
|
2472
|
+
});
|
|
2473
|
+
} else if (isResponseOutputItemDoneChunk(value) && value.item.type === "function_call") {
|
|
2474
|
+
ongoingToolCalls[value.output_index] = void 0;
|
|
2475
|
+
hasToolCalls = true;
|
|
2476
|
+
controller.enqueue({
|
|
2477
|
+
type: "tool-call",
|
|
2478
|
+
toolCallType: "function",
|
|
2479
|
+
toolCallId: value.item.call_id,
|
|
2480
|
+
toolName: value.item.name,
|
|
2481
|
+
args: value.item.arguments
|
|
2482
|
+
});
|
|
2483
|
+
} else if (isResponseFinishedChunk(value)) {
|
|
2484
|
+
finishReason = mapOpenAIResponseFinishReason({
|
|
2485
|
+
finishReason: (_a = value.response.incomplete_details) == null ? void 0 : _a.reason,
|
|
2486
|
+
hasToolCalls
|
|
2487
|
+
});
|
|
2488
|
+
promptTokens = value.response.usage.input_tokens;
|
|
2489
|
+
completionTokens = value.response.usage.output_tokens;
|
|
2490
|
+
cachedPromptTokens = (_c = (_b = value.response.usage.input_tokens_details) == null ? void 0 : _b.cached_tokens) != null ? _c : cachedPromptTokens;
|
|
2491
|
+
reasoningTokens = (_e = (_d = value.response.usage.output_tokens_details) == null ? void 0 : _d.reasoning_tokens) != null ? _e : reasoningTokens;
|
|
2492
|
+
} else if (isResponseAnnotationAddedChunk(value)) {
|
|
2493
|
+
controller.enqueue({
|
|
2494
|
+
type: "source",
|
|
2495
|
+
source: {
|
|
2496
|
+
sourceType: "url",
|
|
2497
|
+
id: (_h = (_g = (_f = self.config).generateId) == null ? void 0 : _g.call(_f)) != null ? _h : generateId2(),
|
|
2498
|
+
url: value.annotation.url,
|
|
2499
|
+
title: value.annotation.title
|
|
2500
|
+
}
|
|
2501
|
+
});
|
|
2502
|
+
}
|
|
2503
|
+
},
|
|
2504
|
+
flush(controller) {
|
|
2505
|
+
controller.enqueue({
|
|
2506
|
+
type: "finish",
|
|
2507
|
+
finishReason,
|
|
2508
|
+
usage: { promptTokens, completionTokens },
|
|
2509
|
+
...(cachedPromptTokens != null || reasoningTokens != null) && {
|
|
2510
|
+
providerMetadata: {
|
|
2511
|
+
openai: {
|
|
2512
|
+
responseId,
|
|
2513
|
+
cachedPromptTokens,
|
|
2514
|
+
reasoningTokens
|
|
2515
|
+
}
|
|
2516
|
+
}
|
|
2517
|
+
}
|
|
2518
|
+
});
|
|
2519
|
+
}
|
|
2520
|
+
})
|
|
2521
|
+
),
|
|
2522
|
+
rawCall: {
|
|
2523
|
+
rawPrompt: void 0,
|
|
2524
|
+
rawSettings: {}
|
|
2525
|
+
},
|
|
2526
|
+
rawResponse: { headers: responseHeaders },
|
|
2527
|
+
request: { body: JSON.stringify(body) },
|
|
2528
|
+
warnings
|
|
2529
|
+
};
|
|
2530
|
+
}
|
|
2531
|
+
};
|
|
2532
|
+
var usageSchema = z7.object({
|
|
2533
|
+
input_tokens: z7.number(),
|
|
2534
|
+
input_tokens_details: z7.object({ cached_tokens: z7.number().nullish() }).nullish(),
|
|
2535
|
+
output_tokens: z7.number(),
|
|
2536
|
+
output_tokens_details: z7.object({ reasoning_tokens: z7.number().nullish() }).nullish()
|
|
2537
|
+
});
|
|
2538
|
+
var textDeltaChunkSchema = z7.object({
|
|
2539
|
+
type: z7.literal("response.output_text.delta"),
|
|
2540
|
+
delta: z7.string()
|
|
2541
|
+
});
|
|
2542
|
+
var responseFinishedChunkSchema = z7.object({
|
|
2543
|
+
type: z7.enum(["response.completed", "response.incomplete"]),
|
|
2544
|
+
response: z7.object({
|
|
2545
|
+
incomplete_details: z7.object({ reason: z7.string() }).nullish(),
|
|
2546
|
+
usage: usageSchema
|
|
2547
|
+
})
|
|
2548
|
+
});
|
|
2549
|
+
var responseCreatedChunkSchema = z7.object({
|
|
2550
|
+
type: z7.literal("response.created"),
|
|
2551
|
+
response: z7.object({
|
|
2552
|
+
id: z7.string(),
|
|
2553
|
+
created_at: z7.number(),
|
|
2554
|
+
model: z7.string()
|
|
2555
|
+
})
|
|
2556
|
+
});
|
|
2557
|
+
var responseOutputItemDoneSchema = z7.object({
|
|
2558
|
+
type: z7.literal("response.output_item.done"),
|
|
2559
|
+
output_index: z7.number(),
|
|
2560
|
+
item: z7.discriminatedUnion("type", [
|
|
2561
|
+
z7.object({
|
|
2562
|
+
type: z7.literal("message")
|
|
2563
|
+
}),
|
|
2564
|
+
z7.object({
|
|
2565
|
+
type: z7.literal("function_call"),
|
|
2566
|
+
id: z7.string(),
|
|
2567
|
+
call_id: z7.string(),
|
|
2568
|
+
name: z7.string(),
|
|
2569
|
+
arguments: z7.string(),
|
|
2570
|
+
status: z7.literal("completed")
|
|
2571
|
+
})
|
|
2572
|
+
])
|
|
2573
|
+
});
|
|
2574
|
+
var responseFunctionCallArgumentsDeltaSchema = z7.object({
|
|
2575
|
+
type: z7.literal("response.function_call_arguments.delta"),
|
|
2576
|
+
item_id: z7.string(),
|
|
2577
|
+
output_index: z7.number(),
|
|
2578
|
+
delta: z7.string()
|
|
2579
|
+
});
|
|
2580
|
+
var responseOutputItemAddedSchema = z7.object({
|
|
2581
|
+
type: z7.literal("response.output_item.added"),
|
|
2582
|
+
output_index: z7.number(),
|
|
2583
|
+
item: z7.discriminatedUnion("type", [
|
|
2584
|
+
z7.object({
|
|
2585
|
+
type: z7.literal("message")
|
|
2586
|
+
}),
|
|
2587
|
+
z7.object({
|
|
2588
|
+
type: z7.literal("function_call"),
|
|
2589
|
+
id: z7.string(),
|
|
2590
|
+
call_id: z7.string(),
|
|
2591
|
+
name: z7.string(),
|
|
2592
|
+
arguments: z7.string()
|
|
2593
|
+
})
|
|
2594
|
+
])
|
|
2595
|
+
});
|
|
2596
|
+
var responseAnnotationAddedSchema = z7.object({
|
|
2597
|
+
type: z7.literal("response.output_text.annotation.added"),
|
|
2598
|
+
annotation: z7.object({
|
|
2599
|
+
type: z7.literal("url_citation"),
|
|
2600
|
+
url: z7.string(),
|
|
2601
|
+
title: z7.string()
|
|
2602
|
+
})
|
|
2603
|
+
});
|
|
2604
|
+
var responseReasoningSummaryTextDeltaSchema = z7.object({
|
|
2605
|
+
type: z7.literal("response.reasoning_summary_text.delta"),
|
|
2606
|
+
item_id: z7.string(),
|
|
2607
|
+
output_index: z7.number(),
|
|
2608
|
+
summary_index: z7.number(),
|
|
2609
|
+
delta: z7.string()
|
|
2610
|
+
});
|
|
2611
|
+
var openaiResponsesChunkSchema = z7.union([
|
|
2612
|
+
textDeltaChunkSchema,
|
|
2613
|
+
responseFinishedChunkSchema,
|
|
2614
|
+
responseCreatedChunkSchema,
|
|
2615
|
+
responseOutputItemDoneSchema,
|
|
2616
|
+
responseFunctionCallArgumentsDeltaSchema,
|
|
2617
|
+
responseOutputItemAddedSchema,
|
|
2618
|
+
responseAnnotationAddedSchema,
|
|
2619
|
+
responseReasoningSummaryTextDeltaSchema,
|
|
2620
|
+
z7.object({ type: z7.string() }).passthrough()
|
|
2621
|
+
// fallback for unknown chunks
|
|
2622
|
+
]);
|
|
2623
|
+
function isTextDeltaChunk(chunk) {
|
|
2624
|
+
return chunk.type === "response.output_text.delta";
|
|
2625
|
+
}
|
|
2626
|
+
function isResponseOutputItemDoneChunk(chunk) {
|
|
2627
|
+
return chunk.type === "response.output_item.done";
|
|
2628
|
+
}
|
|
2629
|
+
function isResponseFinishedChunk(chunk) {
|
|
2630
|
+
return chunk.type === "response.completed" || chunk.type === "response.incomplete";
|
|
2631
|
+
}
|
|
2632
|
+
function isResponseCreatedChunk(chunk) {
|
|
2633
|
+
return chunk.type === "response.created";
|
|
2634
|
+
}
|
|
2635
|
+
function isResponseFunctionCallArgumentsDeltaChunk(chunk) {
|
|
2636
|
+
return chunk.type === "response.function_call_arguments.delta";
|
|
2637
|
+
}
|
|
2638
|
+
function isResponseOutputItemAddedChunk(chunk) {
|
|
2639
|
+
return chunk.type === "response.output_item.added";
|
|
2640
|
+
}
|
|
2641
|
+
function isResponseAnnotationAddedChunk(chunk) {
|
|
2642
|
+
return chunk.type === "response.output_text.annotation.added";
|
|
2643
|
+
}
|
|
2644
|
+
function isResponseReasoningSummaryTextDeltaChunk(chunk) {
|
|
2645
|
+
return chunk.type === "response.reasoning_summary_text.delta";
|
|
2646
|
+
}
|
|
2647
|
+
function getResponsesModelConfig(modelId) {
|
|
2648
|
+
if (modelId.startsWith("o")) {
|
|
2649
|
+
if (modelId.startsWith("o1-mini") || modelId.startsWith("o1-preview")) {
|
|
2650
|
+
return {
|
|
2651
|
+
isReasoningModel: true,
|
|
2652
|
+
systemMessageMode: "remove",
|
|
2653
|
+
requiredAutoTruncation: false
|
|
2654
|
+
};
|
|
2655
|
+
}
|
|
2656
|
+
return {
|
|
2657
|
+
isReasoningModel: true,
|
|
2658
|
+
systemMessageMode: "developer",
|
|
2659
|
+
requiredAutoTruncation: false
|
|
2660
|
+
};
|
|
2661
|
+
}
|
|
2662
|
+
return {
|
|
2663
|
+
isReasoningModel: false,
|
|
2664
|
+
systemMessageMode: "system",
|
|
2665
|
+
requiredAutoTruncation: false
|
|
2666
|
+
};
|
|
2667
|
+
}
|
|
2668
|
+
var openaiResponsesProviderOptionsSchema = z7.object({
|
|
2669
|
+
metadata: z7.any().nullish(),
|
|
2670
|
+
parallelToolCalls: z7.boolean().nullish(),
|
|
2671
|
+
previousResponseId: z7.string().nullish(),
|
|
2672
|
+
forceNoTemperature: z7.boolean().nullish(),
|
|
2673
|
+
store: z7.boolean().nullish(),
|
|
2674
|
+
user: z7.string().nullish(),
|
|
2675
|
+
reasoningEffort: z7.string().nullish(),
|
|
2676
|
+
strictSchemas: z7.boolean().nullish(),
|
|
2677
|
+
instructions: z7.string().nullish(),
|
|
2678
|
+
reasoningSummary: z7.string().nullish()
|
|
2679
|
+
});
|
|
2680
|
+
|
|
2681
|
+
// src/openai-tools.ts
|
|
2682
|
+
import { z as z8 } from "zod";
|
|
2683
|
+
var WebSearchPreviewParameters = z8.object({});
|
|
2684
|
+
function webSearchPreviewTool({
|
|
2685
|
+
searchContextSize,
|
|
2686
|
+
userLocation
|
|
2687
|
+
} = {}) {
|
|
2688
|
+
return {
|
|
2689
|
+
type: "provider-defined",
|
|
2690
|
+
id: "openai.web_search_preview",
|
|
2691
|
+
args: {
|
|
2692
|
+
searchContextSize,
|
|
2693
|
+
userLocation
|
|
2694
|
+
},
|
|
2695
|
+
parameters: WebSearchPreviewParameters
|
|
2696
|
+
};
|
|
2697
|
+
}
|
|
2698
|
+
var CodeInterpreterParameters = z8.object({
|
|
2699
|
+
container: z8.union([
|
|
2700
|
+
z8.string(),
|
|
2701
|
+
z8.object({
|
|
2702
|
+
containerId: z8.union([z8.string(), z8.null()]).transform((val) => val != null ? val : void 0),
|
|
2703
|
+
type: z8.enum(["auto", "file", "code_interpreter"]),
|
|
2704
|
+
files: z8.array(z8.string())
|
|
2705
|
+
})
|
|
2706
|
+
])
|
|
2707
|
+
});
|
|
2708
|
+
function codeInterpreterTool({
|
|
2709
|
+
container
|
|
2710
|
+
}) {
|
|
2711
|
+
return {
|
|
2712
|
+
type: "provider-defined",
|
|
2713
|
+
id: "openai.code_interpreter",
|
|
2714
|
+
args: {
|
|
2715
|
+
container
|
|
2716
|
+
},
|
|
2717
|
+
parameters: CodeInterpreterParameters
|
|
2718
|
+
};
|
|
2719
|
+
}
|
|
2720
|
+
var comparisonFilterSchema = z8.object({
|
|
2721
|
+
key: z8.string(),
|
|
2722
|
+
type: z8.enum(["eq", "ne", "gt", "gte", "lt", "lte"]),
|
|
2723
|
+
value: z8.union([z8.string(), z8.number(), z8.boolean()])
|
|
2724
|
+
});
|
|
2725
|
+
var compoundFilterSchema = z8.object({
|
|
2726
|
+
type: z8.enum(["and", "or"]),
|
|
2727
|
+
filters: z8.array(
|
|
2728
|
+
z8.union([comparisonFilterSchema, z8.lazy(() => compoundFilterSchema)])
|
|
2729
|
+
)
|
|
2730
|
+
});
|
|
2731
|
+
var filtersSchema = z8.union([comparisonFilterSchema, compoundFilterSchema]);
|
|
2732
|
+
var fileSearchArgsSchema = z8.object({
|
|
2733
|
+
/**
|
|
2734
|
+
* List of vector store IDs to search through. If not provided, searches all available vector stores.
|
|
2735
|
+
*/
|
|
2736
|
+
vectorStoreIds: z8.array(z8.string()).optional(),
|
|
2737
|
+
/**
|
|
2738
|
+
* Maximum number of search results to return. Defaults to 10.
|
|
2739
|
+
*/
|
|
2740
|
+
maxNumResults: z8.number().optional(),
|
|
2741
|
+
/**
|
|
2742
|
+
* Ranking options for the search.
|
|
2743
|
+
*/
|
|
2744
|
+
ranking: z8.object({
|
|
2745
|
+
ranker: z8.enum(["auto", "default-2024-08-21"]).optional()
|
|
2746
|
+
}).optional(),
|
|
2747
|
+
/**
|
|
2748
|
+
* A filter to apply based on file attributes.
|
|
2749
|
+
*/
|
|
2750
|
+
filters: filtersSchema.optional()
|
|
2751
|
+
});
|
|
2752
|
+
function fileSearchTool({
|
|
2753
|
+
vectorStoreIds,
|
|
2754
|
+
maxNumResults,
|
|
2755
|
+
ranking,
|
|
2756
|
+
filters
|
|
2757
|
+
} = {}) {
|
|
2758
|
+
return {
|
|
2759
|
+
type: "provider-defined",
|
|
2760
|
+
id: "openai.file_search",
|
|
2761
|
+
args: {
|
|
2762
|
+
vectorStoreIds,
|
|
2763
|
+
maxNumResults,
|
|
2764
|
+
ranking,
|
|
2765
|
+
filters
|
|
2766
|
+
},
|
|
2767
|
+
parameters: z8.object({})
|
|
2768
|
+
};
|
|
2769
|
+
}
|
|
2770
|
+
var openaiTools = {
|
|
2771
|
+
webSearchPreview: webSearchPreviewTool,
|
|
2772
|
+
codeInterpreter: codeInterpreterTool,
|
|
2773
|
+
fileSearch: fileSearchTool
|
|
2774
|
+
};
|
|
2775
|
+
|
|
2776
|
+
// src/openai-speech-model.ts
|
|
2777
|
+
import {
|
|
2778
|
+
combineHeaders as combineHeaders7,
|
|
2779
|
+
createBinaryResponseHandler,
|
|
2780
|
+
parseProviderOptions as parseProviderOptions3,
|
|
2781
|
+
postJsonToApi as postJsonToApi6
|
|
2782
|
+
} from "@ai-sdk/provider-utils";
|
|
2783
|
+
import { z as z9 } from "zod";
|
|
2784
|
+
var OpenAIProviderOptionsSchema = z9.object({
|
|
2785
|
+
instructions: z9.string().nullish(),
|
|
2786
|
+
speed: z9.number().min(0.25).max(4).default(1).nullish()
|
|
2787
|
+
});
|
|
2788
|
+
var OpenAISpeechModel = class {
|
|
2789
|
+
constructor(modelId, config) {
|
|
2790
|
+
this.modelId = modelId;
|
|
2791
|
+
this.config = config;
|
|
2792
|
+
this.specificationVersion = "v1";
|
|
2793
|
+
}
|
|
2794
|
+
get provider() {
|
|
2795
|
+
return this.config.provider;
|
|
2796
|
+
}
|
|
2797
|
+
getArgs({
|
|
2798
|
+
text,
|
|
2799
|
+
voice = "alloy",
|
|
2800
|
+
outputFormat = "mp3",
|
|
2801
|
+
speed,
|
|
2802
|
+
instructions,
|
|
2803
|
+
providerOptions
|
|
2804
|
+
}) {
|
|
2805
|
+
const warnings = [];
|
|
2806
|
+
const openAIOptions = parseProviderOptions3({
|
|
2807
|
+
provider: "openai",
|
|
2808
|
+
providerOptions,
|
|
2809
|
+
schema: OpenAIProviderOptionsSchema
|
|
2810
|
+
});
|
|
2811
|
+
const requestBody = {
|
|
2812
|
+
model: this.modelId,
|
|
2813
|
+
input: text,
|
|
2814
|
+
voice,
|
|
2815
|
+
response_format: "mp3",
|
|
2816
|
+
speed,
|
|
2817
|
+
instructions
|
|
2818
|
+
};
|
|
2819
|
+
if (outputFormat) {
|
|
2820
|
+
if (["mp3", "opus", "aac", "flac", "wav", "pcm"].includes(outputFormat)) {
|
|
2821
|
+
requestBody.response_format = outputFormat;
|
|
2822
|
+
} else {
|
|
2823
|
+
warnings.push({
|
|
2824
|
+
type: "unsupported-setting",
|
|
2825
|
+
setting: "outputFormat",
|
|
2826
|
+
details: `Unsupported output format: ${outputFormat}. Using mp3 instead.`
|
|
2827
|
+
});
|
|
2828
|
+
}
|
|
2829
|
+
}
|
|
2830
|
+
if (openAIOptions) {
|
|
2831
|
+
const speechModelOptions = {};
|
|
2832
|
+
for (const key in speechModelOptions) {
|
|
2833
|
+
const value = speechModelOptions[key];
|
|
2834
|
+
if (value !== void 0) {
|
|
2835
|
+
requestBody[key] = value;
|
|
2836
|
+
}
|
|
2837
|
+
}
|
|
2838
|
+
}
|
|
2839
|
+
return {
|
|
2840
|
+
requestBody,
|
|
2841
|
+
warnings
|
|
2842
|
+
};
|
|
2843
|
+
}
|
|
2844
|
+
async doGenerate(options) {
|
|
2845
|
+
var _a, _b, _c;
|
|
2846
|
+
const currentDate = (_c = (_b = (_a = this.config._internal) == null ? void 0 : _a.currentDate) == null ? void 0 : _b.call(_a)) != null ? _c : /* @__PURE__ */ new Date();
|
|
2847
|
+
const { requestBody, warnings } = this.getArgs(options);
|
|
2848
|
+
const {
|
|
2849
|
+
value: audio,
|
|
2850
|
+
responseHeaders,
|
|
2851
|
+
rawValue: rawResponse
|
|
2852
|
+
} = await postJsonToApi6({
|
|
2853
|
+
url: this.config.url({
|
|
2854
|
+
path: "/audio/speech",
|
|
2855
|
+
modelId: this.modelId
|
|
2856
|
+
}),
|
|
2857
|
+
headers: combineHeaders7(this.config.headers(), options.headers),
|
|
2858
|
+
body: requestBody,
|
|
2859
|
+
failedResponseHandler: openaiFailedResponseHandler,
|
|
2860
|
+
successfulResponseHandler: createBinaryResponseHandler(),
|
|
2861
|
+
abortSignal: options.abortSignal,
|
|
2862
|
+
fetch: this.config.fetch
|
|
2863
|
+
});
|
|
2864
|
+
return {
|
|
2865
|
+
audio,
|
|
2866
|
+
warnings,
|
|
2867
|
+
request: {
|
|
2868
|
+
body: JSON.stringify(requestBody)
|
|
2869
|
+
},
|
|
2870
|
+
response: {
|
|
2871
|
+
timestamp: currentDate,
|
|
2872
|
+
modelId: this.modelId,
|
|
2873
|
+
headers: responseHeaders,
|
|
2874
|
+
body: rawResponse
|
|
2875
|
+
}
|
|
2876
|
+
};
|
|
2877
|
+
}
|
|
2878
|
+
};
|
|
2879
|
+
|
|
2880
|
+
// src/openai-provider.ts
|
|
2881
|
+
function createOpenAI(options = {}) {
|
|
2882
|
+
var _a, _b, _c;
|
|
2883
|
+
const baseURL = (_a = withoutTrailingSlash(options.baseURL)) != null ? _a : "https://api.openai.com/v1";
|
|
2884
|
+
const compatibility = (_b = options.compatibility) != null ? _b : "compatible";
|
|
2885
|
+
const providerName = (_c = options.name) != null ? _c : "openai";
|
|
2886
|
+
const getHeaders = () => ({
|
|
2887
|
+
Authorization: `Bearer ${loadApiKey({
|
|
2888
|
+
apiKey: options.apiKey,
|
|
2889
|
+
environmentVariableName: "OPENAI_API_KEY",
|
|
2890
|
+
description: "OpenAI"
|
|
2891
|
+
})}`,
|
|
2892
|
+
"OpenAI-Organization": options.organization,
|
|
2893
|
+
"OpenAI-Project": options.project,
|
|
2894
|
+
...options.headers
|
|
2895
|
+
});
|
|
2896
|
+
const createChatModel = (modelId, settings = {}) => new OpenAIChatLanguageModel(modelId, settings, {
|
|
2897
|
+
provider: `${providerName}.chat`,
|
|
2898
|
+
url: ({ path }) => `${baseURL}${path}`,
|
|
2899
|
+
headers: getHeaders,
|
|
2900
|
+
compatibility,
|
|
2901
|
+
fetch: options.fetch
|
|
2902
|
+
});
|
|
2903
|
+
const createCompletionModel = (modelId, settings = {}) => new OpenAICompletionLanguageModel(modelId, settings, {
|
|
2904
|
+
provider: `${providerName}.completion`,
|
|
2905
|
+
url: ({ path }) => `${baseURL}${path}`,
|
|
2906
|
+
headers: getHeaders,
|
|
2907
|
+
compatibility,
|
|
2908
|
+
fetch: options.fetch
|
|
2909
|
+
});
|
|
2910
|
+
const createEmbeddingModel = (modelId, settings = {}) => new OpenAIEmbeddingModel(modelId, settings, {
|
|
2911
|
+
provider: `${providerName}.embedding`,
|
|
2912
|
+
url: ({ path }) => `${baseURL}${path}`,
|
|
2913
|
+
headers: getHeaders,
|
|
2914
|
+
fetch: options.fetch
|
|
2915
|
+
});
|
|
2916
|
+
const createImageModel = (modelId, settings = {}) => new OpenAIImageModel(modelId, settings, {
|
|
2917
|
+
provider: `${providerName}.image`,
|
|
2918
|
+
url: ({ path }) => `${baseURL}${path}`,
|
|
2919
|
+
headers: getHeaders,
|
|
2920
|
+
fetch: options.fetch
|
|
2921
|
+
});
|
|
2922
|
+
const createTranscriptionModel = (modelId) => new OpenAITranscriptionModel(modelId, {
|
|
2923
|
+
provider: `${providerName}.transcription`,
|
|
2924
|
+
url: ({ path }) => `${baseURL}${path}`,
|
|
2925
|
+
headers: getHeaders,
|
|
2926
|
+
fetch: options.fetch
|
|
2927
|
+
});
|
|
2928
|
+
const createSpeechModel = (modelId) => new OpenAISpeechModel(modelId, {
|
|
2929
|
+
provider: `${providerName}.speech`,
|
|
2930
|
+
url: ({ path }) => `${baseURL}${path}`,
|
|
2931
|
+
headers: getHeaders,
|
|
2932
|
+
fetch: options.fetch
|
|
2933
|
+
});
|
|
2934
|
+
const createLanguageModel = (modelId, settings) => {
|
|
2935
|
+
if (new.target) {
|
|
2936
|
+
throw new Error(
|
|
2937
|
+
"The OpenAI model function cannot be called with the new keyword."
|
|
2938
|
+
);
|
|
2939
|
+
}
|
|
2940
|
+
if (modelId === "gpt-3.5-turbo-instruct") {
|
|
2941
|
+
return createCompletionModel(
|
|
2942
|
+
modelId,
|
|
2943
|
+
settings
|
|
2944
|
+
);
|
|
2945
|
+
}
|
|
2946
|
+
return createChatModel(modelId, settings);
|
|
2947
|
+
};
|
|
2948
|
+
const createResponsesModel = (modelId) => {
|
|
2949
|
+
return new OpenAIResponsesLanguageModel(modelId, {
|
|
2950
|
+
provider: `${providerName}.responses`,
|
|
2951
|
+
url: ({ path }) => `${baseURL}${path}`,
|
|
2952
|
+
headers: getHeaders,
|
|
2953
|
+
fetch: options.fetch
|
|
2954
|
+
});
|
|
2955
|
+
};
|
|
2956
|
+
const provider = function(modelId, settings) {
|
|
2957
|
+
return createLanguageModel(modelId, settings);
|
|
2958
|
+
};
|
|
2959
|
+
provider.languageModel = createLanguageModel;
|
|
2960
|
+
provider.chat = createChatModel;
|
|
2961
|
+
provider.completion = createCompletionModel;
|
|
2962
|
+
provider.responses = createResponsesModel;
|
|
2963
|
+
provider.embedding = createEmbeddingModel;
|
|
2964
|
+
provider.textEmbedding = createEmbeddingModel;
|
|
2965
|
+
provider.textEmbeddingModel = createEmbeddingModel;
|
|
2966
|
+
provider.image = createImageModel;
|
|
2967
|
+
provider.imageModel = createImageModel;
|
|
2968
|
+
provider.transcription = createTranscriptionModel;
|
|
2969
|
+
provider.transcriptionModel = createTranscriptionModel;
|
|
2970
|
+
provider.speech = createSpeechModel;
|
|
2971
|
+
provider.speechModel = createSpeechModel;
|
|
2972
|
+
provider.tools = openaiTools;
|
|
2973
|
+
return provider;
|
|
2974
|
+
}
|
|
2975
|
+
var openai = createOpenAI({
|
|
2976
|
+
compatibility: "strict"
|
|
2977
|
+
// strict for OpenAI API
|
|
2978
|
+
});
|
|
2979
|
+
export {
|
|
2980
|
+
createOpenAI,
|
|
2981
|
+
openai
|
|
2982
|
+
};
|
|
2983
|
+
//# sourceMappingURL=index.mjs.map
|