@zenning/openai 1.6.0 → 2.0.29
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +930 -45
- package/dist/index.d.mts +121 -327
- package/dist/index.d.ts +121 -327
- package/dist/index.js +2406 -2088
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +2402 -2063
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.d.mts +249 -0
- package/dist/internal/index.d.ts +249 -0
- package/dist/internal/index.js +3755 -0
- package/dist/internal/index.js.map +1 -0
- package/dist/internal/index.mjs +3775 -0
- package/dist/internal/index.mjs.map +1 -0
- package/internal.d.ts +1 -0
- package/package.json +19 -18
- package/internal/dist/index.d.mts +0 -394
- package/internal/dist/index.d.ts +0 -394
- package/internal/dist/index.js +0 -3390
- package/internal/dist/index.js.map +0 -1
- package/internal/dist/index.mjs +0 -3393
- package/internal/dist/index.mjs.map +0 -1
package/internal/dist/index.mjs
DELETED
|
@@ -1,3393 +0,0 @@
|
|
|
1
|
-
// ../provider/dist/index.mjs
|
|
2
|
-
var marker = "vercel.ai.error";
|
|
3
|
-
var symbol = Symbol.for(marker);
|
|
4
|
-
var _a;
|
|
5
|
-
var _AISDKError = class _AISDKError2 extends Error {
|
|
6
|
-
/**
|
|
7
|
-
* Creates an AI SDK Error.
|
|
8
|
-
*
|
|
9
|
-
* @param {Object} params - The parameters for creating the error.
|
|
10
|
-
* @param {string} params.name - The name of the error.
|
|
11
|
-
* @param {string} params.message - The error message.
|
|
12
|
-
* @param {unknown} [params.cause] - The underlying cause of the error.
|
|
13
|
-
*/
|
|
14
|
-
constructor({
|
|
15
|
-
name: name14,
|
|
16
|
-
message,
|
|
17
|
-
cause
|
|
18
|
-
}) {
|
|
19
|
-
super(message);
|
|
20
|
-
this[_a] = true;
|
|
21
|
-
this.name = name14;
|
|
22
|
-
this.cause = cause;
|
|
23
|
-
}
|
|
24
|
-
/**
|
|
25
|
-
* Checks if the given error is an AI SDK Error.
|
|
26
|
-
* @param {unknown} error - The error to check.
|
|
27
|
-
* @returns {boolean} True if the error is an AI SDK Error, false otherwise.
|
|
28
|
-
*/
|
|
29
|
-
static isInstance(error) {
|
|
30
|
-
return _AISDKError2.hasMarker(error, marker);
|
|
31
|
-
}
|
|
32
|
-
static hasMarker(error, marker15) {
|
|
33
|
-
const markerSymbol = Symbol.for(marker15);
|
|
34
|
-
return error != null && typeof error === "object" && markerSymbol in error && typeof error[markerSymbol] === "boolean" && error[markerSymbol] === true;
|
|
35
|
-
}
|
|
36
|
-
};
|
|
37
|
-
_a = symbol;
|
|
38
|
-
var AISDKError = _AISDKError;
|
|
39
|
-
var name = "AI_APICallError";
|
|
40
|
-
var marker2 = `vercel.ai.error.${name}`;
|
|
41
|
-
var symbol2 = Symbol.for(marker2);
|
|
42
|
-
var _a2;
|
|
43
|
-
_a2 = symbol2;
|
|
44
|
-
var name2 = "AI_EmptyResponseBodyError";
|
|
45
|
-
var marker3 = `vercel.ai.error.${name2}`;
|
|
46
|
-
var symbol3 = Symbol.for(marker3);
|
|
47
|
-
var _a3;
|
|
48
|
-
_a3 = symbol3;
|
|
49
|
-
var name3 = "AI_InvalidArgumentError";
|
|
50
|
-
var marker4 = `vercel.ai.error.${name3}`;
|
|
51
|
-
var symbol4 = Symbol.for(marker4);
|
|
52
|
-
var _a4;
|
|
53
|
-
_a4 = symbol4;
|
|
54
|
-
var name4 = "AI_InvalidPromptError";
|
|
55
|
-
var marker5 = `vercel.ai.error.${name4}`;
|
|
56
|
-
var symbol5 = Symbol.for(marker5);
|
|
57
|
-
var _a5;
|
|
58
|
-
var InvalidPromptError = class extends AISDKError {
|
|
59
|
-
constructor({
|
|
60
|
-
prompt,
|
|
61
|
-
message,
|
|
62
|
-
cause
|
|
63
|
-
}) {
|
|
64
|
-
super({ name: name4, message: `Invalid prompt: ${message}`, cause });
|
|
65
|
-
this[_a5] = true;
|
|
66
|
-
this.prompt = prompt;
|
|
67
|
-
}
|
|
68
|
-
static isInstance(error) {
|
|
69
|
-
return AISDKError.hasMarker(error, marker5);
|
|
70
|
-
}
|
|
71
|
-
};
|
|
72
|
-
_a5 = symbol5;
|
|
73
|
-
var name5 = "AI_InvalidResponseDataError";
|
|
74
|
-
var marker6 = `vercel.ai.error.${name5}`;
|
|
75
|
-
var symbol6 = Symbol.for(marker6);
|
|
76
|
-
var _a6;
|
|
77
|
-
var InvalidResponseDataError = class extends AISDKError {
|
|
78
|
-
constructor({
|
|
79
|
-
data,
|
|
80
|
-
message = `Invalid response data: ${JSON.stringify(data)}.`
|
|
81
|
-
}) {
|
|
82
|
-
super({ name: name5, message });
|
|
83
|
-
this[_a6] = true;
|
|
84
|
-
this.data = data;
|
|
85
|
-
}
|
|
86
|
-
static isInstance(error) {
|
|
87
|
-
return AISDKError.hasMarker(error, marker6);
|
|
88
|
-
}
|
|
89
|
-
};
|
|
90
|
-
_a6 = symbol6;
|
|
91
|
-
var name6 = "AI_JSONParseError";
|
|
92
|
-
var marker7 = `vercel.ai.error.${name6}`;
|
|
93
|
-
var symbol7 = Symbol.for(marker7);
|
|
94
|
-
var _a7;
|
|
95
|
-
_a7 = symbol7;
|
|
96
|
-
var name7 = "AI_LoadAPIKeyError";
|
|
97
|
-
var marker8 = `vercel.ai.error.${name7}`;
|
|
98
|
-
var symbol8 = Symbol.for(marker8);
|
|
99
|
-
var _a8;
|
|
100
|
-
_a8 = symbol8;
|
|
101
|
-
var name8 = "AI_LoadSettingError";
|
|
102
|
-
var marker9 = `vercel.ai.error.${name8}`;
|
|
103
|
-
var symbol9 = Symbol.for(marker9);
|
|
104
|
-
var _a9;
|
|
105
|
-
_a9 = symbol9;
|
|
106
|
-
var name9 = "AI_NoContentGeneratedError";
|
|
107
|
-
var marker10 = `vercel.ai.error.${name9}`;
|
|
108
|
-
var symbol10 = Symbol.for(marker10);
|
|
109
|
-
var _a10;
|
|
110
|
-
_a10 = symbol10;
|
|
111
|
-
var name10 = "AI_NoSuchModelError";
|
|
112
|
-
var marker11 = `vercel.ai.error.${name10}`;
|
|
113
|
-
var symbol11 = Symbol.for(marker11);
|
|
114
|
-
var _a11;
|
|
115
|
-
_a11 = symbol11;
|
|
116
|
-
var name11 = "AI_TooManyEmbeddingValuesForCallError";
|
|
117
|
-
var marker12 = `vercel.ai.error.${name11}`;
|
|
118
|
-
var symbol12 = Symbol.for(marker12);
|
|
119
|
-
var _a12;
|
|
120
|
-
var TooManyEmbeddingValuesForCallError = class extends AISDKError {
|
|
121
|
-
constructor(options) {
|
|
122
|
-
super({
|
|
123
|
-
name: name11,
|
|
124
|
-
message: `Too many values for a single embedding call. The ${options.provider} model "${options.modelId}" can only embed up to ${options.maxEmbeddingsPerCall} values per call, but ${options.values.length} values were provided.`
|
|
125
|
-
});
|
|
126
|
-
this[_a12] = true;
|
|
127
|
-
this.provider = options.provider;
|
|
128
|
-
this.modelId = options.modelId;
|
|
129
|
-
this.maxEmbeddingsPerCall = options.maxEmbeddingsPerCall;
|
|
130
|
-
this.values = options.values;
|
|
131
|
-
}
|
|
132
|
-
static isInstance(error) {
|
|
133
|
-
return AISDKError.hasMarker(error, marker12);
|
|
134
|
-
}
|
|
135
|
-
};
|
|
136
|
-
_a12 = symbol12;
|
|
137
|
-
var name12 = "AI_TypeValidationError";
|
|
138
|
-
var marker13 = `vercel.ai.error.${name12}`;
|
|
139
|
-
var symbol13 = Symbol.for(marker13);
|
|
140
|
-
var _a13;
|
|
141
|
-
_a13 = symbol13;
|
|
142
|
-
var name13 = "AI_UnsupportedFunctionalityError";
|
|
143
|
-
var marker14 = `vercel.ai.error.${name13}`;
|
|
144
|
-
var symbol14 = Symbol.for(marker14);
|
|
145
|
-
var _a14;
|
|
146
|
-
var UnsupportedFunctionalityError = class extends AISDKError {
|
|
147
|
-
constructor({
|
|
148
|
-
functionality,
|
|
149
|
-
message = `'${functionality}' functionality not supported.`
|
|
150
|
-
}) {
|
|
151
|
-
super({ name: name13, message });
|
|
152
|
-
this[_a14] = true;
|
|
153
|
-
this.functionality = functionality;
|
|
154
|
-
}
|
|
155
|
-
static isInstance(error) {
|
|
156
|
-
return AISDKError.hasMarker(error, marker14);
|
|
157
|
-
}
|
|
158
|
-
};
|
|
159
|
-
_a14 = symbol14;
|
|
160
|
-
|
|
161
|
-
// src/openai-chat-language-model.ts
|
|
162
|
-
import {
|
|
163
|
-
combineHeaders,
|
|
164
|
-
createEventSourceResponseHandler,
|
|
165
|
-
createJsonResponseHandler,
|
|
166
|
-
generateId,
|
|
167
|
-
isParsableJson,
|
|
168
|
-
postJsonToApi
|
|
169
|
-
} from "@ai-sdk/provider-utils";
|
|
170
|
-
import { z as z2 } from "zod";
|
|
171
|
-
|
|
172
|
-
// src/convert-to-openai-chat-messages.ts
|
|
173
|
-
import { convertUint8ArrayToBase64 } from "@ai-sdk/provider-utils";
|
|
174
|
-
function convertToOpenAIChatMessages({
|
|
175
|
-
prompt,
|
|
176
|
-
useLegacyFunctionCalling = false,
|
|
177
|
-
systemMessageMode = "system"
|
|
178
|
-
}) {
|
|
179
|
-
const messages = [];
|
|
180
|
-
const warnings = [];
|
|
181
|
-
for (const { role, content } of prompt) {
|
|
182
|
-
switch (role) {
|
|
183
|
-
case "system": {
|
|
184
|
-
switch (systemMessageMode) {
|
|
185
|
-
case "system": {
|
|
186
|
-
messages.push({ role: "system", content });
|
|
187
|
-
break;
|
|
188
|
-
}
|
|
189
|
-
case "developer": {
|
|
190
|
-
messages.push({ role: "developer", content });
|
|
191
|
-
break;
|
|
192
|
-
}
|
|
193
|
-
case "remove": {
|
|
194
|
-
warnings.push({
|
|
195
|
-
type: "other",
|
|
196
|
-
message: "system messages are removed for this model"
|
|
197
|
-
});
|
|
198
|
-
break;
|
|
199
|
-
}
|
|
200
|
-
default: {
|
|
201
|
-
const _exhaustiveCheck = systemMessageMode;
|
|
202
|
-
throw new Error(
|
|
203
|
-
`Unsupported system message mode: ${_exhaustiveCheck}`
|
|
204
|
-
);
|
|
205
|
-
}
|
|
206
|
-
}
|
|
207
|
-
break;
|
|
208
|
-
}
|
|
209
|
-
case "user": {
|
|
210
|
-
if (content.length === 1 && content[0].type === "text") {
|
|
211
|
-
messages.push({ role: "user", content: content[0].text });
|
|
212
|
-
break;
|
|
213
|
-
}
|
|
214
|
-
messages.push({
|
|
215
|
-
role: "user",
|
|
216
|
-
content: content.map((part, index) => {
|
|
217
|
-
var _a15, _b, _c, _d;
|
|
218
|
-
switch (part.type) {
|
|
219
|
-
case "text": {
|
|
220
|
-
return { type: "text", text: part.text };
|
|
221
|
-
}
|
|
222
|
-
case "image": {
|
|
223
|
-
return {
|
|
224
|
-
type: "image_url",
|
|
225
|
-
image_url: {
|
|
226
|
-
url: part.image instanceof URL ? part.image.toString() : `data:${(_a15 = part.mimeType) != null ? _a15 : "image/jpeg"};base64,${convertUint8ArrayToBase64(part.image)}`,
|
|
227
|
-
// OpenAI specific extension: image detail
|
|
228
|
-
detail: (_c = (_b = part.providerMetadata) == null ? void 0 : _b.openai) == null ? void 0 : _c.imageDetail
|
|
229
|
-
}
|
|
230
|
-
};
|
|
231
|
-
}
|
|
232
|
-
case "file": {
|
|
233
|
-
if (part.data instanceof URL) {
|
|
234
|
-
throw new UnsupportedFunctionalityError({
|
|
235
|
-
functionality: "'File content parts with URL data' functionality not supported."
|
|
236
|
-
});
|
|
237
|
-
}
|
|
238
|
-
switch (part.mimeType) {
|
|
239
|
-
case "audio/wav": {
|
|
240
|
-
return {
|
|
241
|
-
type: "input_audio",
|
|
242
|
-
input_audio: { data: part.data, format: "wav" }
|
|
243
|
-
};
|
|
244
|
-
}
|
|
245
|
-
case "audio/mp3":
|
|
246
|
-
case "audio/mpeg": {
|
|
247
|
-
return {
|
|
248
|
-
type: "input_audio",
|
|
249
|
-
input_audio: { data: part.data, format: "mp3" }
|
|
250
|
-
};
|
|
251
|
-
}
|
|
252
|
-
case "application/pdf": {
|
|
253
|
-
return {
|
|
254
|
-
type: "file",
|
|
255
|
-
file: {
|
|
256
|
-
filename: (_d = part.filename) != null ? _d : `part-${index}.pdf`,
|
|
257
|
-
file_data: `data:application/pdf;base64,${part.data}`
|
|
258
|
-
}
|
|
259
|
-
};
|
|
260
|
-
}
|
|
261
|
-
default: {
|
|
262
|
-
throw new UnsupportedFunctionalityError({
|
|
263
|
-
functionality: `File content part type ${part.mimeType} in user messages`
|
|
264
|
-
});
|
|
265
|
-
}
|
|
266
|
-
}
|
|
267
|
-
}
|
|
268
|
-
}
|
|
269
|
-
})
|
|
270
|
-
});
|
|
271
|
-
break;
|
|
272
|
-
}
|
|
273
|
-
case "assistant": {
|
|
274
|
-
let text = "";
|
|
275
|
-
const toolCalls = [];
|
|
276
|
-
for (const part of content) {
|
|
277
|
-
switch (part.type) {
|
|
278
|
-
case "text": {
|
|
279
|
-
text += part.text;
|
|
280
|
-
break;
|
|
281
|
-
}
|
|
282
|
-
case "tool-call": {
|
|
283
|
-
toolCalls.push({
|
|
284
|
-
id: part.toolCallId,
|
|
285
|
-
type: "function",
|
|
286
|
-
function: {
|
|
287
|
-
name: part.toolName,
|
|
288
|
-
arguments: JSON.stringify(part.args)
|
|
289
|
-
}
|
|
290
|
-
});
|
|
291
|
-
break;
|
|
292
|
-
}
|
|
293
|
-
}
|
|
294
|
-
}
|
|
295
|
-
if (useLegacyFunctionCalling) {
|
|
296
|
-
if (toolCalls.length > 1) {
|
|
297
|
-
throw new UnsupportedFunctionalityError({
|
|
298
|
-
functionality: "useLegacyFunctionCalling with multiple tool calls in one message"
|
|
299
|
-
});
|
|
300
|
-
}
|
|
301
|
-
messages.push({
|
|
302
|
-
role: "assistant",
|
|
303
|
-
content: text,
|
|
304
|
-
function_call: toolCalls.length > 0 ? toolCalls[0].function : void 0
|
|
305
|
-
});
|
|
306
|
-
} else {
|
|
307
|
-
messages.push({
|
|
308
|
-
role: "assistant",
|
|
309
|
-
content: text,
|
|
310
|
-
tool_calls: toolCalls.length > 0 ? toolCalls : void 0
|
|
311
|
-
});
|
|
312
|
-
}
|
|
313
|
-
break;
|
|
314
|
-
}
|
|
315
|
-
case "tool": {
|
|
316
|
-
for (const toolResponse of content) {
|
|
317
|
-
if (useLegacyFunctionCalling) {
|
|
318
|
-
messages.push({
|
|
319
|
-
role: "function",
|
|
320
|
-
name: toolResponse.toolName,
|
|
321
|
-
content: JSON.stringify(toolResponse.result)
|
|
322
|
-
});
|
|
323
|
-
} else {
|
|
324
|
-
messages.push({
|
|
325
|
-
role: "tool",
|
|
326
|
-
tool_call_id: toolResponse.toolCallId,
|
|
327
|
-
content: JSON.stringify(toolResponse.result)
|
|
328
|
-
});
|
|
329
|
-
}
|
|
330
|
-
}
|
|
331
|
-
break;
|
|
332
|
-
}
|
|
333
|
-
default: {
|
|
334
|
-
const _exhaustiveCheck = role;
|
|
335
|
-
throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
|
|
336
|
-
}
|
|
337
|
-
}
|
|
338
|
-
}
|
|
339
|
-
return { messages, warnings };
|
|
340
|
-
}
|
|
341
|
-
|
|
342
|
-
// src/map-openai-chat-logprobs.ts
|
|
343
|
-
function mapOpenAIChatLogProbsOutput(logprobs) {
|
|
344
|
-
var _a15, _b;
|
|
345
|
-
return (_b = (_a15 = logprobs == null ? void 0 : logprobs.content) == null ? void 0 : _a15.map(({ token, logprob, top_logprobs }) => ({
|
|
346
|
-
token,
|
|
347
|
-
logprob,
|
|
348
|
-
topLogprobs: top_logprobs ? top_logprobs.map(({ token: token2, logprob: logprob2 }) => ({
|
|
349
|
-
token: token2,
|
|
350
|
-
logprob: logprob2
|
|
351
|
-
})) : []
|
|
352
|
-
}))) != null ? _b : void 0;
|
|
353
|
-
}
|
|
354
|
-
|
|
355
|
-
// src/map-openai-finish-reason.ts
|
|
356
|
-
function mapOpenAIFinishReason(finishReason) {
|
|
357
|
-
switch (finishReason) {
|
|
358
|
-
case "stop":
|
|
359
|
-
return "stop";
|
|
360
|
-
case "length":
|
|
361
|
-
return "length";
|
|
362
|
-
case "content_filter":
|
|
363
|
-
return "content-filter";
|
|
364
|
-
case "function_call":
|
|
365
|
-
case "tool_calls":
|
|
366
|
-
return "tool-calls";
|
|
367
|
-
default:
|
|
368
|
-
return "unknown";
|
|
369
|
-
}
|
|
370
|
-
}
|
|
371
|
-
|
|
372
|
-
// src/openai-error.ts
|
|
373
|
-
import { z } from "zod";
|
|
374
|
-
import { createJsonErrorResponseHandler } from "@ai-sdk/provider-utils";
|
|
375
|
-
var openaiErrorDataSchema = z.object({
|
|
376
|
-
error: z.object({
|
|
377
|
-
message: z.string(),
|
|
378
|
-
// The additional information below is handled loosely to support
|
|
379
|
-
// OpenAI-compatible providers that have slightly different error
|
|
380
|
-
// responses:
|
|
381
|
-
type: z.string().nullish(),
|
|
382
|
-
param: z.any().nullish(),
|
|
383
|
-
code: z.union([z.string(), z.number()]).nullish()
|
|
384
|
-
})
|
|
385
|
-
});
|
|
386
|
-
var openaiFailedResponseHandler = createJsonErrorResponseHandler({
|
|
387
|
-
errorSchema: openaiErrorDataSchema,
|
|
388
|
-
errorToMessage: (data) => data.error.message
|
|
389
|
-
});
|
|
390
|
-
|
|
391
|
-
// src/get-response-metadata.ts
|
|
392
|
-
function getResponseMetadata({
|
|
393
|
-
id,
|
|
394
|
-
model,
|
|
395
|
-
created
|
|
396
|
-
}) {
|
|
397
|
-
return {
|
|
398
|
-
id: id != null ? id : void 0,
|
|
399
|
-
modelId: model != null ? model : void 0,
|
|
400
|
-
timestamp: created != null ? new Date(created * 1e3) : void 0
|
|
401
|
-
};
|
|
402
|
-
}
|
|
403
|
-
|
|
404
|
-
// src/openai-prepare-tools.ts
|
|
405
|
-
function prepareTools({
|
|
406
|
-
mode,
|
|
407
|
-
useLegacyFunctionCalling = false,
|
|
408
|
-
structuredOutputs
|
|
409
|
-
}) {
|
|
410
|
-
var _a15;
|
|
411
|
-
const tools = ((_a15 = mode.tools) == null ? void 0 : _a15.length) ? mode.tools : void 0;
|
|
412
|
-
const toolWarnings = [];
|
|
413
|
-
if (tools == null) {
|
|
414
|
-
return { tools: void 0, tool_choice: void 0, toolWarnings };
|
|
415
|
-
}
|
|
416
|
-
const toolChoice = mode.toolChoice;
|
|
417
|
-
if (useLegacyFunctionCalling) {
|
|
418
|
-
const openaiFunctions = [];
|
|
419
|
-
for (const tool of tools) {
|
|
420
|
-
if (tool.type === "provider-defined") {
|
|
421
|
-
toolWarnings.push({ type: "unsupported-tool", tool });
|
|
422
|
-
} else {
|
|
423
|
-
openaiFunctions.push({
|
|
424
|
-
name: tool.name,
|
|
425
|
-
description: tool.description,
|
|
426
|
-
parameters: tool.parameters
|
|
427
|
-
});
|
|
428
|
-
}
|
|
429
|
-
}
|
|
430
|
-
if (toolChoice == null) {
|
|
431
|
-
return {
|
|
432
|
-
functions: openaiFunctions,
|
|
433
|
-
function_call: void 0,
|
|
434
|
-
toolWarnings
|
|
435
|
-
};
|
|
436
|
-
}
|
|
437
|
-
const type2 = toolChoice.type;
|
|
438
|
-
switch (type2) {
|
|
439
|
-
case "auto":
|
|
440
|
-
case "none":
|
|
441
|
-
case void 0:
|
|
442
|
-
return {
|
|
443
|
-
functions: openaiFunctions,
|
|
444
|
-
function_call: void 0,
|
|
445
|
-
toolWarnings
|
|
446
|
-
};
|
|
447
|
-
case "required":
|
|
448
|
-
throw new UnsupportedFunctionalityError({
|
|
449
|
-
functionality: "useLegacyFunctionCalling and toolChoice: required"
|
|
450
|
-
});
|
|
451
|
-
default:
|
|
452
|
-
return {
|
|
453
|
-
functions: openaiFunctions,
|
|
454
|
-
function_call: { name: toolChoice.toolName },
|
|
455
|
-
toolWarnings
|
|
456
|
-
};
|
|
457
|
-
}
|
|
458
|
-
}
|
|
459
|
-
const openaiTools = [];
|
|
460
|
-
for (const tool of tools) {
|
|
461
|
-
if (tool.type === "provider-defined") {
|
|
462
|
-
toolWarnings.push({ type: "unsupported-tool", tool });
|
|
463
|
-
} else {
|
|
464
|
-
openaiTools.push({
|
|
465
|
-
type: "function",
|
|
466
|
-
function: {
|
|
467
|
-
name: tool.name,
|
|
468
|
-
description: tool.description,
|
|
469
|
-
parameters: tool.parameters,
|
|
470
|
-
strict: structuredOutputs ? true : void 0
|
|
471
|
-
}
|
|
472
|
-
});
|
|
473
|
-
}
|
|
474
|
-
}
|
|
475
|
-
if (toolChoice == null) {
|
|
476
|
-
return { tools: openaiTools, tool_choice: void 0, toolWarnings };
|
|
477
|
-
}
|
|
478
|
-
const type = toolChoice.type;
|
|
479
|
-
switch (type) {
|
|
480
|
-
case "auto":
|
|
481
|
-
case "none":
|
|
482
|
-
case "required":
|
|
483
|
-
return { tools: openaiTools, tool_choice: type, toolWarnings };
|
|
484
|
-
case "tool":
|
|
485
|
-
return {
|
|
486
|
-
tools: openaiTools,
|
|
487
|
-
tool_choice: {
|
|
488
|
-
type: "function",
|
|
489
|
-
function: {
|
|
490
|
-
name: toolChoice.toolName
|
|
491
|
-
}
|
|
492
|
-
},
|
|
493
|
-
toolWarnings
|
|
494
|
-
};
|
|
495
|
-
default: {
|
|
496
|
-
const _exhaustiveCheck = type;
|
|
497
|
-
throw new UnsupportedFunctionalityError({
|
|
498
|
-
functionality: `Unsupported tool choice type: ${_exhaustiveCheck}`
|
|
499
|
-
});
|
|
500
|
-
}
|
|
501
|
-
}
|
|
502
|
-
}
|
|
503
|
-
|
|
504
|
-
// src/openai-chat-language-model.ts
|
|
505
|
-
var OpenAIChatLanguageModel = class {
|
|
506
|
-
constructor(modelId, settings, config) {
|
|
507
|
-
this.specificationVersion = "v1";
|
|
508
|
-
this.modelId = modelId;
|
|
509
|
-
this.settings = settings;
|
|
510
|
-
this.config = config;
|
|
511
|
-
}
|
|
512
|
-
get supportsStructuredOutputs() {
|
|
513
|
-
var _a15;
|
|
514
|
-
return (_a15 = this.settings.structuredOutputs) != null ? _a15 : isReasoningModel(this.modelId);
|
|
515
|
-
}
|
|
516
|
-
get defaultObjectGenerationMode() {
|
|
517
|
-
if (isAudioModel(this.modelId)) {
|
|
518
|
-
return "tool";
|
|
519
|
-
}
|
|
520
|
-
return this.supportsStructuredOutputs ? "json" : "tool";
|
|
521
|
-
}
|
|
522
|
-
get provider() {
|
|
523
|
-
return this.config.provider;
|
|
524
|
-
}
|
|
525
|
-
get supportsImageUrls() {
|
|
526
|
-
return !this.settings.downloadImages;
|
|
527
|
-
}
|
|
528
|
-
getArgs({
|
|
529
|
-
mode,
|
|
530
|
-
prompt,
|
|
531
|
-
maxTokens,
|
|
532
|
-
temperature,
|
|
533
|
-
topP,
|
|
534
|
-
topK,
|
|
535
|
-
frequencyPenalty,
|
|
536
|
-
presencePenalty,
|
|
537
|
-
stopSequences,
|
|
538
|
-
responseFormat,
|
|
539
|
-
seed,
|
|
540
|
-
providerMetadata
|
|
541
|
-
}) {
|
|
542
|
-
var _a15, _b, _c, _d, _e, _f, _g, _h;
|
|
543
|
-
const type = mode.type;
|
|
544
|
-
const warnings = [];
|
|
545
|
-
if (topK != null) {
|
|
546
|
-
warnings.push({
|
|
547
|
-
type: "unsupported-setting",
|
|
548
|
-
setting: "topK"
|
|
549
|
-
});
|
|
550
|
-
}
|
|
551
|
-
if ((responseFormat == null ? void 0 : responseFormat.type) === "json" && responseFormat.schema != null && !this.supportsStructuredOutputs) {
|
|
552
|
-
warnings.push({
|
|
553
|
-
type: "unsupported-setting",
|
|
554
|
-
setting: "responseFormat",
|
|
555
|
-
details: "JSON response format schema is only supported with structuredOutputs"
|
|
556
|
-
});
|
|
557
|
-
}
|
|
558
|
-
const useLegacyFunctionCalling = this.settings.useLegacyFunctionCalling;
|
|
559
|
-
if (useLegacyFunctionCalling && this.settings.parallelToolCalls === true) {
|
|
560
|
-
throw new UnsupportedFunctionalityError({
|
|
561
|
-
functionality: "useLegacyFunctionCalling with parallelToolCalls"
|
|
562
|
-
});
|
|
563
|
-
}
|
|
564
|
-
if (useLegacyFunctionCalling && this.supportsStructuredOutputs) {
|
|
565
|
-
throw new UnsupportedFunctionalityError({
|
|
566
|
-
functionality: "structuredOutputs with useLegacyFunctionCalling"
|
|
567
|
-
});
|
|
568
|
-
}
|
|
569
|
-
const { messages, warnings: messageWarnings } = convertToOpenAIChatMessages(
|
|
570
|
-
{
|
|
571
|
-
prompt,
|
|
572
|
-
useLegacyFunctionCalling,
|
|
573
|
-
systemMessageMode: getSystemMessageMode(this.modelId)
|
|
574
|
-
}
|
|
575
|
-
);
|
|
576
|
-
warnings.push(...messageWarnings);
|
|
577
|
-
const baseArgs = {
|
|
578
|
-
// model id:
|
|
579
|
-
model: this.modelId,
|
|
580
|
-
// model specific settings:
|
|
581
|
-
logit_bias: this.settings.logitBias,
|
|
582
|
-
logprobs: this.settings.logprobs === true || typeof this.settings.logprobs === "number" ? true : void 0,
|
|
583
|
-
top_logprobs: typeof this.settings.logprobs === "number" ? this.settings.logprobs : typeof this.settings.logprobs === "boolean" ? this.settings.logprobs ? 0 : void 0 : void 0,
|
|
584
|
-
user: this.settings.user,
|
|
585
|
-
parallel_tool_calls: this.settings.parallelToolCalls,
|
|
586
|
-
// standardized settings:
|
|
587
|
-
max_tokens: maxTokens,
|
|
588
|
-
temperature,
|
|
589
|
-
top_p: topP,
|
|
590
|
-
frequency_penalty: frequencyPenalty,
|
|
591
|
-
presence_penalty: presencePenalty,
|
|
592
|
-
response_format: (responseFormat == null ? void 0 : responseFormat.type) === "json" ? this.supportsStructuredOutputs && responseFormat.schema != null ? {
|
|
593
|
-
type: "json_schema",
|
|
594
|
-
json_schema: {
|
|
595
|
-
schema: responseFormat.schema,
|
|
596
|
-
strict: true,
|
|
597
|
-
name: (_a15 = responseFormat.name) != null ? _a15 : "response",
|
|
598
|
-
description: responseFormat.description
|
|
599
|
-
}
|
|
600
|
-
} : { type: "json_object" } : void 0,
|
|
601
|
-
stop: stopSequences,
|
|
602
|
-
seed,
|
|
603
|
-
// openai specific settings:
|
|
604
|
-
// TODO remove in next major version; we auto-map maxTokens now
|
|
605
|
-
max_completion_tokens: (_b = providerMetadata == null ? void 0 : providerMetadata.openai) == null ? void 0 : _b.maxCompletionTokens,
|
|
606
|
-
store: (_c = providerMetadata == null ? void 0 : providerMetadata.openai) == null ? void 0 : _c.store,
|
|
607
|
-
metadata: (_d = providerMetadata == null ? void 0 : providerMetadata.openai) == null ? void 0 : _d.metadata,
|
|
608
|
-
prediction: (_e = providerMetadata == null ? void 0 : providerMetadata.openai) == null ? void 0 : _e.prediction,
|
|
609
|
-
reasoning_effort: (_g = (_f = providerMetadata == null ? void 0 : providerMetadata.openai) == null ? void 0 : _f.reasoningEffort) != null ? _g : this.settings.reasoningEffort,
|
|
610
|
-
// messages:
|
|
611
|
-
messages
|
|
612
|
-
};
|
|
613
|
-
if (isReasoningModel(this.modelId)) {
|
|
614
|
-
if (baseArgs.temperature != null) {
|
|
615
|
-
baseArgs.temperature = void 0;
|
|
616
|
-
warnings.push({
|
|
617
|
-
type: "unsupported-setting",
|
|
618
|
-
setting: "temperature",
|
|
619
|
-
details: "temperature is not supported for reasoning models"
|
|
620
|
-
});
|
|
621
|
-
}
|
|
622
|
-
if (baseArgs.top_p != null) {
|
|
623
|
-
baseArgs.top_p = void 0;
|
|
624
|
-
warnings.push({
|
|
625
|
-
type: "unsupported-setting",
|
|
626
|
-
setting: "topP",
|
|
627
|
-
details: "topP is not supported for reasoning models"
|
|
628
|
-
});
|
|
629
|
-
}
|
|
630
|
-
if (baseArgs.frequency_penalty != null) {
|
|
631
|
-
baseArgs.frequency_penalty = void 0;
|
|
632
|
-
warnings.push({
|
|
633
|
-
type: "unsupported-setting",
|
|
634
|
-
setting: "frequencyPenalty",
|
|
635
|
-
details: "frequencyPenalty is not supported for reasoning models"
|
|
636
|
-
});
|
|
637
|
-
}
|
|
638
|
-
if (baseArgs.presence_penalty != null) {
|
|
639
|
-
baseArgs.presence_penalty = void 0;
|
|
640
|
-
warnings.push({
|
|
641
|
-
type: "unsupported-setting",
|
|
642
|
-
setting: "presencePenalty",
|
|
643
|
-
details: "presencePenalty is not supported for reasoning models"
|
|
644
|
-
});
|
|
645
|
-
}
|
|
646
|
-
if (baseArgs.logit_bias != null) {
|
|
647
|
-
baseArgs.logit_bias = void 0;
|
|
648
|
-
warnings.push({
|
|
649
|
-
type: "other",
|
|
650
|
-
message: "logitBias is not supported for reasoning models"
|
|
651
|
-
});
|
|
652
|
-
}
|
|
653
|
-
if (baseArgs.logprobs != null) {
|
|
654
|
-
baseArgs.logprobs = void 0;
|
|
655
|
-
warnings.push({
|
|
656
|
-
type: "other",
|
|
657
|
-
message: "logprobs is not supported for reasoning models"
|
|
658
|
-
});
|
|
659
|
-
}
|
|
660
|
-
if (baseArgs.top_logprobs != null) {
|
|
661
|
-
baseArgs.top_logprobs = void 0;
|
|
662
|
-
warnings.push({
|
|
663
|
-
type: "other",
|
|
664
|
-
message: "topLogprobs is not supported for reasoning models"
|
|
665
|
-
});
|
|
666
|
-
}
|
|
667
|
-
if (baseArgs.max_tokens != null) {
|
|
668
|
-
if (baseArgs.max_completion_tokens == null) {
|
|
669
|
-
baseArgs.max_completion_tokens = baseArgs.max_tokens;
|
|
670
|
-
}
|
|
671
|
-
baseArgs.max_tokens = void 0;
|
|
672
|
-
}
|
|
673
|
-
} else if (this.modelId.startsWith("gpt-4o-search-preview") || this.modelId.startsWith("gpt-4o-mini-search-preview")) {
|
|
674
|
-
if (baseArgs.temperature != null) {
|
|
675
|
-
baseArgs.temperature = void 0;
|
|
676
|
-
warnings.push({
|
|
677
|
-
type: "unsupported-setting",
|
|
678
|
-
setting: "temperature",
|
|
679
|
-
details: "temperature is not supported for the search preview models and has been removed."
|
|
680
|
-
});
|
|
681
|
-
}
|
|
682
|
-
}
|
|
683
|
-
switch (type) {
|
|
684
|
-
case "regular": {
|
|
685
|
-
const { tools, tool_choice, functions, function_call, toolWarnings } = prepareTools({
|
|
686
|
-
mode,
|
|
687
|
-
useLegacyFunctionCalling,
|
|
688
|
-
structuredOutputs: this.supportsStructuredOutputs
|
|
689
|
-
});
|
|
690
|
-
return {
|
|
691
|
-
args: {
|
|
692
|
-
...baseArgs,
|
|
693
|
-
tools,
|
|
694
|
-
tool_choice,
|
|
695
|
-
functions,
|
|
696
|
-
function_call
|
|
697
|
-
},
|
|
698
|
-
warnings: [...warnings, ...toolWarnings]
|
|
699
|
-
};
|
|
700
|
-
}
|
|
701
|
-
case "object-json": {
|
|
702
|
-
return {
|
|
703
|
-
args: {
|
|
704
|
-
...baseArgs,
|
|
705
|
-
response_format: this.supportsStructuredOutputs && mode.schema != null ? {
|
|
706
|
-
type: "json_schema",
|
|
707
|
-
json_schema: {
|
|
708
|
-
schema: mode.schema,
|
|
709
|
-
strict: true,
|
|
710
|
-
name: (_h = mode.name) != null ? _h : "response",
|
|
711
|
-
description: mode.description
|
|
712
|
-
}
|
|
713
|
-
} : { type: "json_object" }
|
|
714
|
-
},
|
|
715
|
-
warnings
|
|
716
|
-
};
|
|
717
|
-
}
|
|
718
|
-
case "object-tool": {
|
|
719
|
-
return {
|
|
720
|
-
args: useLegacyFunctionCalling ? {
|
|
721
|
-
...baseArgs,
|
|
722
|
-
function_call: {
|
|
723
|
-
name: mode.tool.name
|
|
724
|
-
},
|
|
725
|
-
functions: [
|
|
726
|
-
{
|
|
727
|
-
name: mode.tool.name,
|
|
728
|
-
description: mode.tool.description,
|
|
729
|
-
parameters: mode.tool.parameters
|
|
730
|
-
}
|
|
731
|
-
]
|
|
732
|
-
} : {
|
|
733
|
-
...baseArgs,
|
|
734
|
-
tool_choice: {
|
|
735
|
-
type: "function",
|
|
736
|
-
function: { name: mode.tool.name }
|
|
737
|
-
},
|
|
738
|
-
tools: [
|
|
739
|
-
{
|
|
740
|
-
type: "function",
|
|
741
|
-
function: {
|
|
742
|
-
name: mode.tool.name,
|
|
743
|
-
description: mode.tool.description,
|
|
744
|
-
parameters: mode.tool.parameters,
|
|
745
|
-
strict: this.supportsStructuredOutputs ? true : void 0
|
|
746
|
-
}
|
|
747
|
-
}
|
|
748
|
-
]
|
|
749
|
-
},
|
|
750
|
-
warnings
|
|
751
|
-
};
|
|
752
|
-
}
|
|
753
|
-
default: {
|
|
754
|
-
const _exhaustiveCheck = type;
|
|
755
|
-
throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
|
|
756
|
-
}
|
|
757
|
-
}
|
|
758
|
-
}
|
|
759
|
-
async doGenerate(options) {
|
|
760
|
-
var _a15, _b, _c, _d, _e, _f, _g, _h;
|
|
761
|
-
const { args: body, warnings } = this.getArgs(options);
|
|
762
|
-
const {
|
|
763
|
-
responseHeaders,
|
|
764
|
-
value: response,
|
|
765
|
-
rawValue: rawResponse
|
|
766
|
-
} = await postJsonToApi({
|
|
767
|
-
url: this.config.url({
|
|
768
|
-
path: "/chat/completions",
|
|
769
|
-
modelId: this.modelId
|
|
770
|
-
}),
|
|
771
|
-
headers: combineHeaders(this.config.headers(), options.headers),
|
|
772
|
-
body,
|
|
773
|
-
failedResponseHandler: openaiFailedResponseHandler,
|
|
774
|
-
successfulResponseHandler: createJsonResponseHandler(
|
|
775
|
-
openaiChatResponseSchema
|
|
776
|
-
),
|
|
777
|
-
abortSignal: options.abortSignal,
|
|
778
|
-
fetch: this.config.fetch
|
|
779
|
-
});
|
|
780
|
-
const { messages: rawPrompt, ...rawSettings } = body;
|
|
781
|
-
const choice = response.choices[0];
|
|
782
|
-
const completionTokenDetails = (_a15 = response.usage) == null ? void 0 : _a15.completion_tokens_details;
|
|
783
|
-
const promptTokenDetails = (_b = response.usage) == null ? void 0 : _b.prompt_tokens_details;
|
|
784
|
-
const providerMetadata = { openai: {} };
|
|
785
|
-
if ((completionTokenDetails == null ? void 0 : completionTokenDetails.reasoning_tokens) != null) {
|
|
786
|
-
providerMetadata.openai.reasoningTokens = completionTokenDetails == null ? void 0 : completionTokenDetails.reasoning_tokens;
|
|
787
|
-
}
|
|
788
|
-
if ((completionTokenDetails == null ? void 0 : completionTokenDetails.accepted_prediction_tokens) != null) {
|
|
789
|
-
providerMetadata.openai.acceptedPredictionTokens = completionTokenDetails == null ? void 0 : completionTokenDetails.accepted_prediction_tokens;
|
|
790
|
-
}
|
|
791
|
-
if ((completionTokenDetails == null ? void 0 : completionTokenDetails.rejected_prediction_tokens) != null) {
|
|
792
|
-
providerMetadata.openai.rejectedPredictionTokens = completionTokenDetails == null ? void 0 : completionTokenDetails.rejected_prediction_tokens;
|
|
793
|
-
}
|
|
794
|
-
if ((promptTokenDetails == null ? void 0 : promptTokenDetails.cached_tokens) != null) {
|
|
795
|
-
providerMetadata.openai.cachedPromptTokens = promptTokenDetails == null ? void 0 : promptTokenDetails.cached_tokens;
|
|
796
|
-
}
|
|
797
|
-
return {
|
|
798
|
-
text: (_c = choice.message.content) != null ? _c : void 0,
|
|
799
|
-
toolCalls: this.settings.useLegacyFunctionCalling && choice.message.function_call ? [
|
|
800
|
-
{
|
|
801
|
-
toolCallType: "function",
|
|
802
|
-
toolCallId: generateId(),
|
|
803
|
-
toolName: choice.message.function_call.name,
|
|
804
|
-
args: choice.message.function_call.arguments
|
|
805
|
-
}
|
|
806
|
-
] : (_d = choice.message.tool_calls) == null ? void 0 : _d.map((toolCall) => {
|
|
807
|
-
var _a16;
|
|
808
|
-
return {
|
|
809
|
-
toolCallType: "function",
|
|
810
|
-
toolCallId: (_a16 = toolCall.id) != null ? _a16 : generateId(),
|
|
811
|
-
toolName: toolCall.function.name,
|
|
812
|
-
args: toolCall.function.arguments
|
|
813
|
-
};
|
|
814
|
-
}),
|
|
815
|
-
finishReason: mapOpenAIFinishReason(choice.finish_reason),
|
|
816
|
-
usage: {
|
|
817
|
-
promptTokens: (_f = (_e = response.usage) == null ? void 0 : _e.prompt_tokens) != null ? _f : NaN,
|
|
818
|
-
completionTokens: (_h = (_g = response.usage) == null ? void 0 : _g.completion_tokens) != null ? _h : NaN
|
|
819
|
-
},
|
|
820
|
-
rawCall: { rawPrompt, rawSettings },
|
|
821
|
-
rawResponse: { headers: responseHeaders, body: rawResponse },
|
|
822
|
-
request: { body: JSON.stringify(body) },
|
|
823
|
-
response: getResponseMetadata(response),
|
|
824
|
-
warnings,
|
|
825
|
-
logprobs: mapOpenAIChatLogProbsOutput(choice.logprobs),
|
|
826
|
-
providerMetadata
|
|
827
|
-
};
|
|
828
|
-
}
|
|
829
|
-
async doStream(options) {
|
|
830
|
-
if (this.settings.simulateStreaming) {
|
|
831
|
-
const result = await this.doGenerate(options);
|
|
832
|
-
const simulatedStream = new ReadableStream({
|
|
833
|
-
start(controller) {
|
|
834
|
-
controller.enqueue({ type: "response-metadata", ...result.response });
|
|
835
|
-
if (result.text) {
|
|
836
|
-
controller.enqueue({
|
|
837
|
-
type: "text-delta",
|
|
838
|
-
textDelta: result.text
|
|
839
|
-
});
|
|
840
|
-
}
|
|
841
|
-
if (result.toolCalls) {
|
|
842
|
-
for (const toolCall of result.toolCalls) {
|
|
843
|
-
controller.enqueue({
|
|
844
|
-
type: "tool-call-delta",
|
|
845
|
-
toolCallType: "function",
|
|
846
|
-
toolCallId: toolCall.toolCallId,
|
|
847
|
-
toolName: toolCall.toolName,
|
|
848
|
-
argsTextDelta: toolCall.args
|
|
849
|
-
});
|
|
850
|
-
controller.enqueue({
|
|
851
|
-
type: "tool-call",
|
|
852
|
-
...toolCall
|
|
853
|
-
});
|
|
854
|
-
}
|
|
855
|
-
}
|
|
856
|
-
controller.enqueue({
|
|
857
|
-
type: "finish",
|
|
858
|
-
finishReason: result.finishReason,
|
|
859
|
-
usage: result.usage,
|
|
860
|
-
logprobs: result.logprobs,
|
|
861
|
-
providerMetadata: result.providerMetadata
|
|
862
|
-
});
|
|
863
|
-
controller.close();
|
|
864
|
-
}
|
|
865
|
-
});
|
|
866
|
-
return {
|
|
867
|
-
stream: simulatedStream,
|
|
868
|
-
rawCall: result.rawCall,
|
|
869
|
-
rawResponse: result.rawResponse,
|
|
870
|
-
warnings: result.warnings
|
|
871
|
-
};
|
|
872
|
-
}
|
|
873
|
-
const { args, warnings } = this.getArgs(options);
|
|
874
|
-
const body = {
|
|
875
|
-
...args,
|
|
876
|
-
stream: true,
|
|
877
|
-
// only include stream_options when in strict compatibility mode:
|
|
878
|
-
stream_options: this.config.compatibility === "strict" ? { include_usage: true } : void 0
|
|
879
|
-
};
|
|
880
|
-
const { responseHeaders, value: response } = await postJsonToApi({
|
|
881
|
-
url: this.config.url({
|
|
882
|
-
path: "/chat/completions",
|
|
883
|
-
modelId: this.modelId
|
|
884
|
-
}),
|
|
885
|
-
headers: combineHeaders(this.config.headers(), options.headers),
|
|
886
|
-
body,
|
|
887
|
-
failedResponseHandler: openaiFailedResponseHandler,
|
|
888
|
-
successfulResponseHandler: createEventSourceResponseHandler(
|
|
889
|
-
openaiChatChunkSchema
|
|
890
|
-
),
|
|
891
|
-
abortSignal: options.abortSignal,
|
|
892
|
-
fetch: this.config.fetch
|
|
893
|
-
});
|
|
894
|
-
const { messages: rawPrompt, ...rawSettings } = args;
|
|
895
|
-
const toolCalls = [];
|
|
896
|
-
let finishReason = "unknown";
|
|
897
|
-
let usage = {
|
|
898
|
-
promptTokens: void 0,
|
|
899
|
-
completionTokens: void 0
|
|
900
|
-
};
|
|
901
|
-
let logprobs;
|
|
902
|
-
let isFirstChunk = true;
|
|
903
|
-
const { useLegacyFunctionCalling } = this.settings;
|
|
904
|
-
const providerMetadata = { openai: {} };
|
|
905
|
-
return {
|
|
906
|
-
stream: response.pipeThrough(
|
|
907
|
-
new TransformStream({
|
|
908
|
-
transform(chunk, controller) {
|
|
909
|
-
var _a15, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l;
|
|
910
|
-
if (!chunk.success) {
|
|
911
|
-
finishReason = "error";
|
|
912
|
-
controller.enqueue({ type: "error", error: chunk.error });
|
|
913
|
-
return;
|
|
914
|
-
}
|
|
915
|
-
const value = chunk.value;
|
|
916
|
-
if ("error" in value) {
|
|
917
|
-
finishReason = "error";
|
|
918
|
-
controller.enqueue({ type: "error", error: value.error });
|
|
919
|
-
return;
|
|
920
|
-
}
|
|
921
|
-
if (isFirstChunk) {
|
|
922
|
-
isFirstChunk = false;
|
|
923
|
-
controller.enqueue({
|
|
924
|
-
type: "response-metadata",
|
|
925
|
-
...getResponseMetadata(value)
|
|
926
|
-
});
|
|
927
|
-
}
|
|
928
|
-
if (value.usage != null) {
|
|
929
|
-
const {
|
|
930
|
-
prompt_tokens,
|
|
931
|
-
completion_tokens,
|
|
932
|
-
prompt_tokens_details,
|
|
933
|
-
completion_tokens_details
|
|
934
|
-
} = value.usage;
|
|
935
|
-
usage = {
|
|
936
|
-
promptTokens: prompt_tokens != null ? prompt_tokens : void 0,
|
|
937
|
-
completionTokens: completion_tokens != null ? completion_tokens : void 0
|
|
938
|
-
};
|
|
939
|
-
if ((completion_tokens_details == null ? void 0 : completion_tokens_details.reasoning_tokens) != null) {
|
|
940
|
-
providerMetadata.openai.reasoningTokens = completion_tokens_details == null ? void 0 : completion_tokens_details.reasoning_tokens;
|
|
941
|
-
}
|
|
942
|
-
if ((completion_tokens_details == null ? void 0 : completion_tokens_details.accepted_prediction_tokens) != null) {
|
|
943
|
-
providerMetadata.openai.acceptedPredictionTokens = completion_tokens_details == null ? void 0 : completion_tokens_details.accepted_prediction_tokens;
|
|
944
|
-
}
|
|
945
|
-
if ((completion_tokens_details == null ? void 0 : completion_tokens_details.rejected_prediction_tokens) != null) {
|
|
946
|
-
providerMetadata.openai.rejectedPredictionTokens = completion_tokens_details == null ? void 0 : completion_tokens_details.rejected_prediction_tokens;
|
|
947
|
-
}
|
|
948
|
-
if ((prompt_tokens_details == null ? void 0 : prompt_tokens_details.cached_tokens) != null) {
|
|
949
|
-
providerMetadata.openai.cachedPromptTokens = prompt_tokens_details == null ? void 0 : prompt_tokens_details.cached_tokens;
|
|
950
|
-
}
|
|
951
|
-
}
|
|
952
|
-
const choice = value.choices[0];
|
|
953
|
-
if ((choice == null ? void 0 : choice.finish_reason) != null) {
|
|
954
|
-
finishReason = mapOpenAIFinishReason(choice.finish_reason);
|
|
955
|
-
}
|
|
956
|
-
if ((choice == null ? void 0 : choice.delta) == null) {
|
|
957
|
-
return;
|
|
958
|
-
}
|
|
959
|
-
const delta = choice.delta;
|
|
960
|
-
if (delta.content != null) {
|
|
961
|
-
controller.enqueue({
|
|
962
|
-
type: "text-delta",
|
|
963
|
-
textDelta: delta.content
|
|
964
|
-
});
|
|
965
|
-
}
|
|
966
|
-
const mappedLogprobs = mapOpenAIChatLogProbsOutput(
|
|
967
|
-
choice == null ? void 0 : choice.logprobs
|
|
968
|
-
);
|
|
969
|
-
if (mappedLogprobs == null ? void 0 : mappedLogprobs.length) {
|
|
970
|
-
if (logprobs === void 0) logprobs = [];
|
|
971
|
-
logprobs.push(...mappedLogprobs);
|
|
972
|
-
}
|
|
973
|
-
const mappedToolCalls = useLegacyFunctionCalling && delta.function_call != null ? [
|
|
974
|
-
{
|
|
975
|
-
type: "function",
|
|
976
|
-
id: generateId(),
|
|
977
|
-
function: delta.function_call,
|
|
978
|
-
index: 0
|
|
979
|
-
}
|
|
980
|
-
] : delta.tool_calls;
|
|
981
|
-
if (mappedToolCalls != null) {
|
|
982
|
-
for (const toolCallDelta of mappedToolCalls) {
|
|
983
|
-
const index = toolCallDelta.index;
|
|
984
|
-
if (toolCalls[index] == null) {
|
|
985
|
-
if (toolCallDelta.type !== "function") {
|
|
986
|
-
throw new InvalidResponseDataError({
|
|
987
|
-
data: toolCallDelta,
|
|
988
|
-
message: `Expected 'function' type.`
|
|
989
|
-
});
|
|
990
|
-
}
|
|
991
|
-
if (toolCallDelta.id == null) {
|
|
992
|
-
throw new InvalidResponseDataError({
|
|
993
|
-
data: toolCallDelta,
|
|
994
|
-
message: `Expected 'id' to be a string.`
|
|
995
|
-
});
|
|
996
|
-
}
|
|
997
|
-
if (((_a15 = toolCallDelta.function) == null ? void 0 : _a15.name) == null) {
|
|
998
|
-
throw new InvalidResponseDataError({
|
|
999
|
-
data: toolCallDelta,
|
|
1000
|
-
message: `Expected 'function.name' to be a string.`
|
|
1001
|
-
});
|
|
1002
|
-
}
|
|
1003
|
-
toolCalls[index] = {
|
|
1004
|
-
id: toolCallDelta.id,
|
|
1005
|
-
type: "function",
|
|
1006
|
-
function: {
|
|
1007
|
-
name: toolCallDelta.function.name,
|
|
1008
|
-
arguments: (_b = toolCallDelta.function.arguments) != null ? _b : ""
|
|
1009
|
-
},
|
|
1010
|
-
hasFinished: false
|
|
1011
|
-
};
|
|
1012
|
-
const toolCall2 = toolCalls[index];
|
|
1013
|
-
if (((_c = toolCall2.function) == null ? void 0 : _c.name) != null && ((_d = toolCall2.function) == null ? void 0 : _d.arguments) != null) {
|
|
1014
|
-
if (toolCall2.function.arguments.length > 0) {
|
|
1015
|
-
controller.enqueue({
|
|
1016
|
-
type: "tool-call-delta",
|
|
1017
|
-
toolCallType: "function",
|
|
1018
|
-
toolCallId: toolCall2.id,
|
|
1019
|
-
toolName: toolCall2.function.name,
|
|
1020
|
-
argsTextDelta: toolCall2.function.arguments
|
|
1021
|
-
});
|
|
1022
|
-
}
|
|
1023
|
-
if (isParsableJson(toolCall2.function.arguments)) {
|
|
1024
|
-
controller.enqueue({
|
|
1025
|
-
type: "tool-call",
|
|
1026
|
-
toolCallType: "function",
|
|
1027
|
-
toolCallId: (_e = toolCall2.id) != null ? _e : generateId(),
|
|
1028
|
-
toolName: toolCall2.function.name,
|
|
1029
|
-
args: toolCall2.function.arguments
|
|
1030
|
-
});
|
|
1031
|
-
toolCall2.hasFinished = true;
|
|
1032
|
-
}
|
|
1033
|
-
}
|
|
1034
|
-
continue;
|
|
1035
|
-
}
|
|
1036
|
-
const toolCall = toolCalls[index];
|
|
1037
|
-
if (toolCall.hasFinished) {
|
|
1038
|
-
continue;
|
|
1039
|
-
}
|
|
1040
|
-
if (((_f = toolCallDelta.function) == null ? void 0 : _f.arguments) != null) {
|
|
1041
|
-
toolCall.function.arguments += (_h = (_g = toolCallDelta.function) == null ? void 0 : _g.arguments) != null ? _h : "";
|
|
1042
|
-
}
|
|
1043
|
-
controller.enqueue({
|
|
1044
|
-
type: "tool-call-delta",
|
|
1045
|
-
toolCallType: "function",
|
|
1046
|
-
toolCallId: toolCall.id,
|
|
1047
|
-
toolName: toolCall.function.name,
|
|
1048
|
-
argsTextDelta: (_i = toolCallDelta.function.arguments) != null ? _i : ""
|
|
1049
|
-
});
|
|
1050
|
-
if (((_j = toolCall.function) == null ? void 0 : _j.name) != null && ((_k = toolCall.function) == null ? void 0 : _k.arguments) != null && isParsableJson(toolCall.function.arguments)) {
|
|
1051
|
-
controller.enqueue({
|
|
1052
|
-
type: "tool-call",
|
|
1053
|
-
toolCallType: "function",
|
|
1054
|
-
toolCallId: (_l = toolCall.id) != null ? _l : generateId(),
|
|
1055
|
-
toolName: toolCall.function.name,
|
|
1056
|
-
args: toolCall.function.arguments
|
|
1057
|
-
});
|
|
1058
|
-
toolCall.hasFinished = true;
|
|
1059
|
-
}
|
|
1060
|
-
}
|
|
1061
|
-
}
|
|
1062
|
-
},
|
|
1063
|
-
flush(controller) {
|
|
1064
|
-
var _a15, _b;
|
|
1065
|
-
controller.enqueue({
|
|
1066
|
-
type: "finish",
|
|
1067
|
-
finishReason,
|
|
1068
|
-
logprobs,
|
|
1069
|
-
usage: {
|
|
1070
|
-
promptTokens: (_a15 = usage.promptTokens) != null ? _a15 : NaN,
|
|
1071
|
-
completionTokens: (_b = usage.completionTokens) != null ? _b : NaN
|
|
1072
|
-
},
|
|
1073
|
-
...providerMetadata != null ? { providerMetadata } : {}
|
|
1074
|
-
});
|
|
1075
|
-
}
|
|
1076
|
-
})
|
|
1077
|
-
),
|
|
1078
|
-
rawCall: { rawPrompt, rawSettings },
|
|
1079
|
-
rawResponse: { headers: responseHeaders },
|
|
1080
|
-
request: { body: JSON.stringify(body) },
|
|
1081
|
-
warnings
|
|
1082
|
-
};
|
|
1083
|
-
}
|
|
1084
|
-
};
|
|
1085
|
-
var openaiTokenUsageSchema = z2.object({
|
|
1086
|
-
prompt_tokens: z2.number().nullish(),
|
|
1087
|
-
completion_tokens: z2.number().nullish(),
|
|
1088
|
-
prompt_tokens_details: z2.object({
|
|
1089
|
-
cached_tokens: z2.number().nullish()
|
|
1090
|
-
}).nullish(),
|
|
1091
|
-
completion_tokens_details: z2.object({
|
|
1092
|
-
reasoning_tokens: z2.number().nullish(),
|
|
1093
|
-
accepted_prediction_tokens: z2.number().nullish(),
|
|
1094
|
-
rejected_prediction_tokens: z2.number().nullish()
|
|
1095
|
-
}).nullish()
|
|
1096
|
-
}).nullish();
|
|
1097
|
-
var openaiChatResponseSchema = z2.object({
|
|
1098
|
-
id: z2.string().nullish(),
|
|
1099
|
-
created: z2.number().nullish(),
|
|
1100
|
-
model: z2.string().nullish(),
|
|
1101
|
-
choices: z2.array(
|
|
1102
|
-
z2.object({
|
|
1103
|
-
message: z2.object({
|
|
1104
|
-
role: z2.literal("assistant").nullish(),
|
|
1105
|
-
content: z2.string().nullish(),
|
|
1106
|
-
function_call: z2.object({
|
|
1107
|
-
arguments: z2.string(),
|
|
1108
|
-
name: z2.string()
|
|
1109
|
-
}).nullish(),
|
|
1110
|
-
tool_calls: z2.array(
|
|
1111
|
-
z2.object({
|
|
1112
|
-
id: z2.string().nullish(),
|
|
1113
|
-
type: z2.literal("function"),
|
|
1114
|
-
function: z2.object({
|
|
1115
|
-
name: z2.string(),
|
|
1116
|
-
arguments: z2.string()
|
|
1117
|
-
})
|
|
1118
|
-
})
|
|
1119
|
-
).nullish()
|
|
1120
|
-
}),
|
|
1121
|
-
index: z2.number(),
|
|
1122
|
-
logprobs: z2.object({
|
|
1123
|
-
content: z2.array(
|
|
1124
|
-
z2.object({
|
|
1125
|
-
token: z2.string(),
|
|
1126
|
-
logprob: z2.number(),
|
|
1127
|
-
top_logprobs: z2.array(
|
|
1128
|
-
z2.object({
|
|
1129
|
-
token: z2.string(),
|
|
1130
|
-
logprob: z2.number()
|
|
1131
|
-
})
|
|
1132
|
-
)
|
|
1133
|
-
})
|
|
1134
|
-
).nullable()
|
|
1135
|
-
}).nullish(),
|
|
1136
|
-
finish_reason: z2.string().nullish()
|
|
1137
|
-
})
|
|
1138
|
-
),
|
|
1139
|
-
usage: openaiTokenUsageSchema
|
|
1140
|
-
});
|
|
1141
|
-
var openaiChatChunkSchema = z2.union([
|
|
1142
|
-
z2.object({
|
|
1143
|
-
id: z2.string().nullish(),
|
|
1144
|
-
created: z2.number().nullish(),
|
|
1145
|
-
model: z2.string().nullish(),
|
|
1146
|
-
choices: z2.array(
|
|
1147
|
-
z2.object({
|
|
1148
|
-
delta: z2.object({
|
|
1149
|
-
role: z2.enum(["assistant"]).nullish(),
|
|
1150
|
-
content: z2.string().nullish(),
|
|
1151
|
-
function_call: z2.object({
|
|
1152
|
-
name: z2.string().optional(),
|
|
1153
|
-
arguments: z2.string().optional()
|
|
1154
|
-
}).nullish(),
|
|
1155
|
-
tool_calls: z2.array(
|
|
1156
|
-
z2.object({
|
|
1157
|
-
index: z2.number(),
|
|
1158
|
-
id: z2.string().nullish(),
|
|
1159
|
-
type: z2.literal("function").nullish(),
|
|
1160
|
-
function: z2.object({
|
|
1161
|
-
name: z2.string().nullish(),
|
|
1162
|
-
arguments: z2.string().nullish()
|
|
1163
|
-
})
|
|
1164
|
-
})
|
|
1165
|
-
).nullish()
|
|
1166
|
-
}).nullish(),
|
|
1167
|
-
logprobs: z2.object({
|
|
1168
|
-
content: z2.array(
|
|
1169
|
-
z2.object({
|
|
1170
|
-
token: z2.string(),
|
|
1171
|
-
logprob: z2.number(),
|
|
1172
|
-
top_logprobs: z2.array(
|
|
1173
|
-
z2.object({
|
|
1174
|
-
token: z2.string(),
|
|
1175
|
-
logprob: z2.number()
|
|
1176
|
-
})
|
|
1177
|
-
)
|
|
1178
|
-
})
|
|
1179
|
-
).nullable()
|
|
1180
|
-
}).nullish(),
|
|
1181
|
-
finish_reason: z2.string().nullish(),
|
|
1182
|
-
index: z2.number()
|
|
1183
|
-
})
|
|
1184
|
-
),
|
|
1185
|
-
usage: openaiTokenUsageSchema
|
|
1186
|
-
}),
|
|
1187
|
-
openaiErrorDataSchema
|
|
1188
|
-
]);
|
|
1189
|
-
function isReasoningModel(modelId) {
|
|
1190
|
-
return modelId.startsWith("o");
|
|
1191
|
-
}
|
|
1192
|
-
function isAudioModel(modelId) {
|
|
1193
|
-
return modelId.startsWith("gpt-4o-audio-preview");
|
|
1194
|
-
}
|
|
1195
|
-
function getSystemMessageMode(modelId) {
|
|
1196
|
-
var _a15, _b;
|
|
1197
|
-
if (!isReasoningModel(modelId)) {
|
|
1198
|
-
return "system";
|
|
1199
|
-
}
|
|
1200
|
-
return (_b = (_a15 = reasoningModels[modelId]) == null ? void 0 : _a15.systemMessageMode) != null ? _b : "developer";
|
|
1201
|
-
}
|
|
1202
|
-
var reasoningModels = {
|
|
1203
|
-
"o1-mini": {
|
|
1204
|
-
systemMessageMode: "remove"
|
|
1205
|
-
},
|
|
1206
|
-
"o1-mini-2024-09-12": {
|
|
1207
|
-
systemMessageMode: "remove"
|
|
1208
|
-
},
|
|
1209
|
-
"o1-preview": {
|
|
1210
|
-
systemMessageMode: "remove"
|
|
1211
|
-
},
|
|
1212
|
-
"o1-preview-2024-09-12": {
|
|
1213
|
-
systemMessageMode: "remove"
|
|
1214
|
-
},
|
|
1215
|
-
o3: {
|
|
1216
|
-
systemMessageMode: "developer"
|
|
1217
|
-
},
|
|
1218
|
-
"o3-2025-04-16": {
|
|
1219
|
-
systemMessageMode: "developer"
|
|
1220
|
-
},
|
|
1221
|
-
"o3-mini": {
|
|
1222
|
-
systemMessageMode: "developer"
|
|
1223
|
-
},
|
|
1224
|
-
"o3-mini-2025-01-31": {
|
|
1225
|
-
systemMessageMode: "developer"
|
|
1226
|
-
},
|
|
1227
|
-
"o4-mini": {
|
|
1228
|
-
systemMessageMode: "developer"
|
|
1229
|
-
},
|
|
1230
|
-
"o4-mini-2025-04-16": {
|
|
1231
|
-
systemMessageMode: "developer"
|
|
1232
|
-
}
|
|
1233
|
-
};
|
|
1234
|
-
|
|
1235
|
-
// src/openai-completion-language-model.ts
|
|
1236
|
-
import {
|
|
1237
|
-
combineHeaders as combineHeaders2,
|
|
1238
|
-
createEventSourceResponseHandler as createEventSourceResponseHandler2,
|
|
1239
|
-
createJsonResponseHandler as createJsonResponseHandler2,
|
|
1240
|
-
postJsonToApi as postJsonToApi2
|
|
1241
|
-
} from "@ai-sdk/provider-utils";
|
|
1242
|
-
import { z as z3 } from "zod";
|
|
1243
|
-
|
|
1244
|
-
// src/convert-to-openai-completion-prompt.ts
|
|
1245
|
-
function convertToOpenAICompletionPrompt({
|
|
1246
|
-
prompt,
|
|
1247
|
-
inputFormat,
|
|
1248
|
-
user = "user",
|
|
1249
|
-
assistant = "assistant"
|
|
1250
|
-
}) {
|
|
1251
|
-
if (inputFormat === "prompt" && prompt.length === 1 && prompt[0].role === "user" && prompt[0].content.length === 1 && prompt[0].content[0].type === "text") {
|
|
1252
|
-
return { prompt: prompt[0].content[0].text };
|
|
1253
|
-
}
|
|
1254
|
-
let text = "";
|
|
1255
|
-
if (prompt[0].role === "system") {
|
|
1256
|
-
text += `${prompt[0].content}
|
|
1257
|
-
|
|
1258
|
-
`;
|
|
1259
|
-
prompt = prompt.slice(1);
|
|
1260
|
-
}
|
|
1261
|
-
for (const { role, content } of prompt) {
|
|
1262
|
-
switch (role) {
|
|
1263
|
-
case "system": {
|
|
1264
|
-
throw new InvalidPromptError({
|
|
1265
|
-
message: "Unexpected system message in prompt: ${content}",
|
|
1266
|
-
prompt
|
|
1267
|
-
});
|
|
1268
|
-
}
|
|
1269
|
-
case "user": {
|
|
1270
|
-
const userMessage = content.map((part) => {
|
|
1271
|
-
switch (part.type) {
|
|
1272
|
-
case "text": {
|
|
1273
|
-
return part.text;
|
|
1274
|
-
}
|
|
1275
|
-
case "image": {
|
|
1276
|
-
throw new UnsupportedFunctionalityError({
|
|
1277
|
-
functionality: "images"
|
|
1278
|
-
});
|
|
1279
|
-
}
|
|
1280
|
-
}
|
|
1281
|
-
}).join("");
|
|
1282
|
-
text += `${user}:
|
|
1283
|
-
${userMessage}
|
|
1284
|
-
|
|
1285
|
-
`;
|
|
1286
|
-
break;
|
|
1287
|
-
}
|
|
1288
|
-
case "assistant": {
|
|
1289
|
-
const assistantMessage = content.map((part) => {
|
|
1290
|
-
switch (part.type) {
|
|
1291
|
-
case "text": {
|
|
1292
|
-
return part.text;
|
|
1293
|
-
}
|
|
1294
|
-
case "tool-call": {
|
|
1295
|
-
throw new UnsupportedFunctionalityError({
|
|
1296
|
-
functionality: "tool-call messages"
|
|
1297
|
-
});
|
|
1298
|
-
}
|
|
1299
|
-
}
|
|
1300
|
-
}).join("");
|
|
1301
|
-
text += `${assistant}:
|
|
1302
|
-
${assistantMessage}
|
|
1303
|
-
|
|
1304
|
-
`;
|
|
1305
|
-
break;
|
|
1306
|
-
}
|
|
1307
|
-
case "tool": {
|
|
1308
|
-
throw new UnsupportedFunctionalityError({
|
|
1309
|
-
functionality: "tool messages"
|
|
1310
|
-
});
|
|
1311
|
-
}
|
|
1312
|
-
default: {
|
|
1313
|
-
const _exhaustiveCheck = role;
|
|
1314
|
-
throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
|
|
1315
|
-
}
|
|
1316
|
-
}
|
|
1317
|
-
}
|
|
1318
|
-
text += `${assistant}:
|
|
1319
|
-
`;
|
|
1320
|
-
return {
|
|
1321
|
-
prompt: text,
|
|
1322
|
-
stopSequences: [`
|
|
1323
|
-
${user}:`]
|
|
1324
|
-
};
|
|
1325
|
-
}
|
|
1326
|
-
|
|
1327
|
-
// src/map-openai-completion-logprobs.ts
|
|
1328
|
-
function mapOpenAICompletionLogProbs(logprobs) {
|
|
1329
|
-
return logprobs == null ? void 0 : logprobs.tokens.map((token, index) => ({
|
|
1330
|
-
token,
|
|
1331
|
-
logprob: logprobs.token_logprobs[index],
|
|
1332
|
-
topLogprobs: logprobs.top_logprobs ? Object.entries(logprobs.top_logprobs[index]).map(
|
|
1333
|
-
([token2, logprob]) => ({
|
|
1334
|
-
token: token2,
|
|
1335
|
-
logprob
|
|
1336
|
-
})
|
|
1337
|
-
) : []
|
|
1338
|
-
}));
|
|
1339
|
-
}
|
|
1340
|
-
|
|
1341
|
-
// src/openai-completion-language-model.ts
|
|
1342
|
-
var OpenAICompletionLanguageModel = class {
|
|
1343
|
-
constructor(modelId, settings, config) {
|
|
1344
|
-
this.specificationVersion = "v1";
|
|
1345
|
-
this.defaultObjectGenerationMode = void 0;
|
|
1346
|
-
this.modelId = modelId;
|
|
1347
|
-
this.settings = settings;
|
|
1348
|
-
this.config = config;
|
|
1349
|
-
}
|
|
1350
|
-
get provider() {
|
|
1351
|
-
return this.config.provider;
|
|
1352
|
-
}
|
|
1353
|
-
getArgs({
|
|
1354
|
-
mode,
|
|
1355
|
-
inputFormat,
|
|
1356
|
-
prompt,
|
|
1357
|
-
maxTokens,
|
|
1358
|
-
temperature,
|
|
1359
|
-
topP,
|
|
1360
|
-
topK,
|
|
1361
|
-
frequencyPenalty,
|
|
1362
|
-
presencePenalty,
|
|
1363
|
-
stopSequences: userStopSequences,
|
|
1364
|
-
responseFormat,
|
|
1365
|
-
seed
|
|
1366
|
-
}) {
|
|
1367
|
-
var _a15;
|
|
1368
|
-
const type = mode.type;
|
|
1369
|
-
const warnings = [];
|
|
1370
|
-
if (topK != null) {
|
|
1371
|
-
warnings.push({
|
|
1372
|
-
type: "unsupported-setting",
|
|
1373
|
-
setting: "topK"
|
|
1374
|
-
});
|
|
1375
|
-
}
|
|
1376
|
-
if (responseFormat != null && responseFormat.type !== "text") {
|
|
1377
|
-
warnings.push({
|
|
1378
|
-
type: "unsupported-setting",
|
|
1379
|
-
setting: "responseFormat",
|
|
1380
|
-
details: "JSON response format is not supported."
|
|
1381
|
-
});
|
|
1382
|
-
}
|
|
1383
|
-
const { prompt: completionPrompt, stopSequences } = convertToOpenAICompletionPrompt({ prompt, inputFormat });
|
|
1384
|
-
const stop = [...stopSequences != null ? stopSequences : [], ...userStopSequences != null ? userStopSequences : []];
|
|
1385
|
-
const baseArgs = {
|
|
1386
|
-
// model id:
|
|
1387
|
-
model: this.modelId,
|
|
1388
|
-
// model specific settings:
|
|
1389
|
-
echo: this.settings.echo,
|
|
1390
|
-
logit_bias: this.settings.logitBias,
|
|
1391
|
-
logprobs: typeof this.settings.logprobs === "number" ? this.settings.logprobs : typeof this.settings.logprobs === "boolean" ? this.settings.logprobs ? 0 : void 0 : void 0,
|
|
1392
|
-
suffix: this.settings.suffix,
|
|
1393
|
-
user: this.settings.user,
|
|
1394
|
-
// standardized settings:
|
|
1395
|
-
max_tokens: maxTokens,
|
|
1396
|
-
temperature,
|
|
1397
|
-
top_p: topP,
|
|
1398
|
-
frequency_penalty: frequencyPenalty,
|
|
1399
|
-
presence_penalty: presencePenalty,
|
|
1400
|
-
seed,
|
|
1401
|
-
// prompt:
|
|
1402
|
-
prompt: completionPrompt,
|
|
1403
|
-
// stop sequences:
|
|
1404
|
-
stop: stop.length > 0 ? stop : void 0
|
|
1405
|
-
};
|
|
1406
|
-
switch (type) {
|
|
1407
|
-
case "regular": {
|
|
1408
|
-
if ((_a15 = mode.tools) == null ? void 0 : _a15.length) {
|
|
1409
|
-
throw new UnsupportedFunctionalityError({
|
|
1410
|
-
functionality: "tools"
|
|
1411
|
-
});
|
|
1412
|
-
}
|
|
1413
|
-
if (mode.toolChoice) {
|
|
1414
|
-
throw new UnsupportedFunctionalityError({
|
|
1415
|
-
functionality: "toolChoice"
|
|
1416
|
-
});
|
|
1417
|
-
}
|
|
1418
|
-
return { args: baseArgs, warnings };
|
|
1419
|
-
}
|
|
1420
|
-
case "object-json": {
|
|
1421
|
-
throw new UnsupportedFunctionalityError({
|
|
1422
|
-
functionality: "object-json mode"
|
|
1423
|
-
});
|
|
1424
|
-
}
|
|
1425
|
-
case "object-tool": {
|
|
1426
|
-
throw new UnsupportedFunctionalityError({
|
|
1427
|
-
functionality: "object-tool mode"
|
|
1428
|
-
});
|
|
1429
|
-
}
|
|
1430
|
-
default: {
|
|
1431
|
-
const _exhaustiveCheck = type;
|
|
1432
|
-
throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
|
|
1433
|
-
}
|
|
1434
|
-
}
|
|
1435
|
-
}
|
|
1436
|
-
async doGenerate(options) {
|
|
1437
|
-
const { args, warnings } = this.getArgs(options);
|
|
1438
|
-
const {
|
|
1439
|
-
responseHeaders,
|
|
1440
|
-
value: response,
|
|
1441
|
-
rawValue: rawResponse
|
|
1442
|
-
} = await postJsonToApi2({
|
|
1443
|
-
url: this.config.url({
|
|
1444
|
-
path: "/completions",
|
|
1445
|
-
modelId: this.modelId
|
|
1446
|
-
}),
|
|
1447
|
-
headers: combineHeaders2(this.config.headers(), options.headers),
|
|
1448
|
-
body: args,
|
|
1449
|
-
failedResponseHandler: openaiFailedResponseHandler,
|
|
1450
|
-
successfulResponseHandler: createJsonResponseHandler2(
|
|
1451
|
-
openaiCompletionResponseSchema
|
|
1452
|
-
),
|
|
1453
|
-
abortSignal: options.abortSignal,
|
|
1454
|
-
fetch: this.config.fetch
|
|
1455
|
-
});
|
|
1456
|
-
const { prompt: rawPrompt, ...rawSettings } = args;
|
|
1457
|
-
const choice = response.choices[0];
|
|
1458
|
-
return {
|
|
1459
|
-
text: choice.text,
|
|
1460
|
-
usage: {
|
|
1461
|
-
promptTokens: response.usage.prompt_tokens,
|
|
1462
|
-
completionTokens: response.usage.completion_tokens
|
|
1463
|
-
},
|
|
1464
|
-
finishReason: mapOpenAIFinishReason(choice.finish_reason),
|
|
1465
|
-
logprobs: mapOpenAICompletionLogProbs(choice.logprobs),
|
|
1466
|
-
rawCall: { rawPrompt, rawSettings },
|
|
1467
|
-
rawResponse: { headers: responseHeaders, body: rawResponse },
|
|
1468
|
-
response: getResponseMetadata(response),
|
|
1469
|
-
warnings,
|
|
1470
|
-
request: { body: JSON.stringify(args) }
|
|
1471
|
-
};
|
|
1472
|
-
}
|
|
1473
|
-
async doStream(options) {
|
|
1474
|
-
const { args, warnings } = this.getArgs(options);
|
|
1475
|
-
const body = {
|
|
1476
|
-
...args,
|
|
1477
|
-
stream: true,
|
|
1478
|
-
// only include stream_options when in strict compatibility mode:
|
|
1479
|
-
stream_options: this.config.compatibility === "strict" ? { include_usage: true } : void 0
|
|
1480
|
-
};
|
|
1481
|
-
const { responseHeaders, value: response } = await postJsonToApi2({
|
|
1482
|
-
url: this.config.url({
|
|
1483
|
-
path: "/completions",
|
|
1484
|
-
modelId: this.modelId
|
|
1485
|
-
}),
|
|
1486
|
-
headers: combineHeaders2(this.config.headers(), options.headers),
|
|
1487
|
-
body,
|
|
1488
|
-
failedResponseHandler: openaiFailedResponseHandler,
|
|
1489
|
-
successfulResponseHandler: createEventSourceResponseHandler2(
|
|
1490
|
-
openaiCompletionChunkSchema
|
|
1491
|
-
),
|
|
1492
|
-
abortSignal: options.abortSignal,
|
|
1493
|
-
fetch: this.config.fetch
|
|
1494
|
-
});
|
|
1495
|
-
const { prompt: rawPrompt, ...rawSettings } = args;
|
|
1496
|
-
let finishReason = "unknown";
|
|
1497
|
-
let usage = {
|
|
1498
|
-
promptTokens: Number.NaN,
|
|
1499
|
-
completionTokens: Number.NaN
|
|
1500
|
-
};
|
|
1501
|
-
let logprobs;
|
|
1502
|
-
let isFirstChunk = true;
|
|
1503
|
-
return {
|
|
1504
|
-
stream: response.pipeThrough(
|
|
1505
|
-
new TransformStream({
|
|
1506
|
-
transform(chunk, controller) {
|
|
1507
|
-
if (!chunk.success) {
|
|
1508
|
-
finishReason = "error";
|
|
1509
|
-
controller.enqueue({ type: "error", error: chunk.error });
|
|
1510
|
-
return;
|
|
1511
|
-
}
|
|
1512
|
-
const value = chunk.value;
|
|
1513
|
-
if ("error" in value) {
|
|
1514
|
-
finishReason = "error";
|
|
1515
|
-
controller.enqueue({ type: "error", error: value.error });
|
|
1516
|
-
return;
|
|
1517
|
-
}
|
|
1518
|
-
if (isFirstChunk) {
|
|
1519
|
-
isFirstChunk = false;
|
|
1520
|
-
controller.enqueue({
|
|
1521
|
-
type: "response-metadata",
|
|
1522
|
-
...getResponseMetadata(value)
|
|
1523
|
-
});
|
|
1524
|
-
}
|
|
1525
|
-
if (value.usage != null) {
|
|
1526
|
-
usage = {
|
|
1527
|
-
promptTokens: value.usage.prompt_tokens,
|
|
1528
|
-
completionTokens: value.usage.completion_tokens
|
|
1529
|
-
};
|
|
1530
|
-
}
|
|
1531
|
-
const choice = value.choices[0];
|
|
1532
|
-
if ((choice == null ? void 0 : choice.finish_reason) != null) {
|
|
1533
|
-
finishReason = mapOpenAIFinishReason(choice.finish_reason);
|
|
1534
|
-
}
|
|
1535
|
-
if ((choice == null ? void 0 : choice.text) != null) {
|
|
1536
|
-
controller.enqueue({
|
|
1537
|
-
type: "text-delta",
|
|
1538
|
-
textDelta: choice.text
|
|
1539
|
-
});
|
|
1540
|
-
}
|
|
1541
|
-
const mappedLogprobs = mapOpenAICompletionLogProbs(
|
|
1542
|
-
choice == null ? void 0 : choice.logprobs
|
|
1543
|
-
);
|
|
1544
|
-
if (mappedLogprobs == null ? void 0 : mappedLogprobs.length) {
|
|
1545
|
-
if (logprobs === void 0) logprobs = [];
|
|
1546
|
-
logprobs.push(...mappedLogprobs);
|
|
1547
|
-
}
|
|
1548
|
-
},
|
|
1549
|
-
flush(controller) {
|
|
1550
|
-
controller.enqueue({
|
|
1551
|
-
type: "finish",
|
|
1552
|
-
finishReason,
|
|
1553
|
-
logprobs,
|
|
1554
|
-
usage
|
|
1555
|
-
});
|
|
1556
|
-
}
|
|
1557
|
-
})
|
|
1558
|
-
),
|
|
1559
|
-
rawCall: { rawPrompt, rawSettings },
|
|
1560
|
-
rawResponse: { headers: responseHeaders },
|
|
1561
|
-
warnings,
|
|
1562
|
-
request: { body: JSON.stringify(body) }
|
|
1563
|
-
};
|
|
1564
|
-
}
|
|
1565
|
-
};
|
|
1566
|
-
var openaiCompletionResponseSchema = z3.object({
|
|
1567
|
-
id: z3.string().nullish(),
|
|
1568
|
-
created: z3.number().nullish(),
|
|
1569
|
-
model: z3.string().nullish(),
|
|
1570
|
-
choices: z3.array(
|
|
1571
|
-
z3.object({
|
|
1572
|
-
text: z3.string(),
|
|
1573
|
-
finish_reason: z3.string(),
|
|
1574
|
-
logprobs: z3.object({
|
|
1575
|
-
tokens: z3.array(z3.string()),
|
|
1576
|
-
token_logprobs: z3.array(z3.number()),
|
|
1577
|
-
top_logprobs: z3.array(z3.record(z3.string(), z3.number())).nullable()
|
|
1578
|
-
}).nullish()
|
|
1579
|
-
})
|
|
1580
|
-
),
|
|
1581
|
-
usage: z3.object({
|
|
1582
|
-
prompt_tokens: z3.number(),
|
|
1583
|
-
completion_tokens: z3.number()
|
|
1584
|
-
})
|
|
1585
|
-
});
|
|
1586
|
-
var openaiCompletionChunkSchema = z3.union([
|
|
1587
|
-
z3.object({
|
|
1588
|
-
id: z3.string().nullish(),
|
|
1589
|
-
created: z3.number().nullish(),
|
|
1590
|
-
model: z3.string().nullish(),
|
|
1591
|
-
choices: z3.array(
|
|
1592
|
-
z3.object({
|
|
1593
|
-
text: z3.string(),
|
|
1594
|
-
finish_reason: z3.string().nullish(),
|
|
1595
|
-
index: z3.number(),
|
|
1596
|
-
logprobs: z3.object({
|
|
1597
|
-
tokens: z3.array(z3.string()),
|
|
1598
|
-
token_logprobs: z3.array(z3.number()),
|
|
1599
|
-
top_logprobs: z3.array(z3.record(z3.string(), z3.number())).nullable()
|
|
1600
|
-
}).nullish()
|
|
1601
|
-
})
|
|
1602
|
-
),
|
|
1603
|
-
usage: z3.object({
|
|
1604
|
-
prompt_tokens: z3.number(),
|
|
1605
|
-
completion_tokens: z3.number()
|
|
1606
|
-
}).nullish()
|
|
1607
|
-
}),
|
|
1608
|
-
openaiErrorDataSchema
|
|
1609
|
-
]);
|
|
1610
|
-
|
|
1611
|
-
// src/openai-embedding-model.ts
|
|
1612
|
-
import {
|
|
1613
|
-
combineHeaders as combineHeaders3,
|
|
1614
|
-
createJsonResponseHandler as createJsonResponseHandler3,
|
|
1615
|
-
postJsonToApi as postJsonToApi3
|
|
1616
|
-
} from "@ai-sdk/provider-utils";
|
|
1617
|
-
import { z as z4 } from "zod";
|
|
1618
|
-
var OpenAIEmbeddingModel = class {
|
|
1619
|
-
constructor(modelId, settings, config) {
|
|
1620
|
-
this.specificationVersion = "v1";
|
|
1621
|
-
this.modelId = modelId;
|
|
1622
|
-
this.settings = settings;
|
|
1623
|
-
this.config = config;
|
|
1624
|
-
}
|
|
1625
|
-
get provider() {
|
|
1626
|
-
return this.config.provider;
|
|
1627
|
-
}
|
|
1628
|
-
get maxEmbeddingsPerCall() {
|
|
1629
|
-
var _a15;
|
|
1630
|
-
return (_a15 = this.settings.maxEmbeddingsPerCall) != null ? _a15 : 2048;
|
|
1631
|
-
}
|
|
1632
|
-
get supportsParallelCalls() {
|
|
1633
|
-
var _a15;
|
|
1634
|
-
return (_a15 = this.settings.supportsParallelCalls) != null ? _a15 : true;
|
|
1635
|
-
}
|
|
1636
|
-
async doEmbed({
|
|
1637
|
-
values,
|
|
1638
|
-
headers,
|
|
1639
|
-
abortSignal
|
|
1640
|
-
}) {
|
|
1641
|
-
if (values.length > this.maxEmbeddingsPerCall) {
|
|
1642
|
-
throw new TooManyEmbeddingValuesForCallError({
|
|
1643
|
-
provider: this.provider,
|
|
1644
|
-
modelId: this.modelId,
|
|
1645
|
-
maxEmbeddingsPerCall: this.maxEmbeddingsPerCall,
|
|
1646
|
-
values
|
|
1647
|
-
});
|
|
1648
|
-
}
|
|
1649
|
-
const { responseHeaders, value: response } = await postJsonToApi3({
|
|
1650
|
-
url: this.config.url({
|
|
1651
|
-
path: "/embeddings",
|
|
1652
|
-
modelId: this.modelId
|
|
1653
|
-
}),
|
|
1654
|
-
headers: combineHeaders3(this.config.headers(), headers),
|
|
1655
|
-
body: {
|
|
1656
|
-
model: this.modelId,
|
|
1657
|
-
input: values,
|
|
1658
|
-
encoding_format: "float",
|
|
1659
|
-
dimensions: this.settings.dimensions,
|
|
1660
|
-
user: this.settings.user
|
|
1661
|
-
},
|
|
1662
|
-
failedResponseHandler: openaiFailedResponseHandler,
|
|
1663
|
-
successfulResponseHandler: createJsonResponseHandler3(
|
|
1664
|
-
openaiTextEmbeddingResponseSchema
|
|
1665
|
-
),
|
|
1666
|
-
abortSignal,
|
|
1667
|
-
fetch: this.config.fetch
|
|
1668
|
-
});
|
|
1669
|
-
return {
|
|
1670
|
-
embeddings: response.data.map((item) => item.embedding),
|
|
1671
|
-
usage: response.usage ? { tokens: response.usage.prompt_tokens } : void 0,
|
|
1672
|
-
rawResponse: { headers: responseHeaders }
|
|
1673
|
-
};
|
|
1674
|
-
}
|
|
1675
|
-
};
|
|
1676
|
-
var openaiTextEmbeddingResponseSchema = z4.object({
|
|
1677
|
-
data: z4.array(z4.object({ embedding: z4.array(z4.number()) })),
|
|
1678
|
-
usage: z4.object({ prompt_tokens: z4.number() }).nullish()
|
|
1679
|
-
});
|
|
1680
|
-
|
|
1681
|
-
// src/openai-image-model.ts
|
|
1682
|
-
import {
|
|
1683
|
-
combineHeaders as combineHeaders4,
|
|
1684
|
-
createJsonResponseHandler as createJsonResponseHandler4,
|
|
1685
|
-
postJsonToApi as postJsonToApi4
|
|
1686
|
-
} from "@ai-sdk/provider-utils";
|
|
1687
|
-
import { z as z5 } from "zod";
|
|
1688
|
-
|
|
1689
|
-
// src/openai-image-settings.ts
|
|
1690
|
-
var modelMaxImagesPerCall = {
|
|
1691
|
-
"dall-e-3": 1,
|
|
1692
|
-
"dall-e-2": 10,
|
|
1693
|
-
"gpt-image-1": 10
|
|
1694
|
-
};
|
|
1695
|
-
var hasDefaultResponseFormat = /* @__PURE__ */ new Set(["gpt-image-1"]);
|
|
1696
|
-
|
|
1697
|
-
// src/openai-image-model.ts
|
|
1698
|
-
var OpenAIImageModel = class {
|
|
1699
|
-
constructor(modelId, settings, config) {
|
|
1700
|
-
this.modelId = modelId;
|
|
1701
|
-
this.settings = settings;
|
|
1702
|
-
this.config = config;
|
|
1703
|
-
this.specificationVersion = "v1";
|
|
1704
|
-
}
|
|
1705
|
-
get maxImagesPerCall() {
|
|
1706
|
-
var _a15, _b;
|
|
1707
|
-
return (_b = (_a15 = this.settings.maxImagesPerCall) != null ? _a15 : modelMaxImagesPerCall[this.modelId]) != null ? _b : 1;
|
|
1708
|
-
}
|
|
1709
|
-
get provider() {
|
|
1710
|
-
return this.config.provider;
|
|
1711
|
-
}
|
|
1712
|
-
async doGenerate({
|
|
1713
|
-
prompt,
|
|
1714
|
-
n,
|
|
1715
|
-
size,
|
|
1716
|
-
aspectRatio,
|
|
1717
|
-
seed,
|
|
1718
|
-
providerOptions,
|
|
1719
|
-
headers,
|
|
1720
|
-
abortSignal
|
|
1721
|
-
}) {
|
|
1722
|
-
var _a15, _b, _c, _d;
|
|
1723
|
-
const warnings = [];
|
|
1724
|
-
if (aspectRatio != null) {
|
|
1725
|
-
warnings.push({
|
|
1726
|
-
type: "unsupported-setting",
|
|
1727
|
-
setting: "aspectRatio",
|
|
1728
|
-
details: "This model does not support aspect ratio. Use `size` instead."
|
|
1729
|
-
});
|
|
1730
|
-
}
|
|
1731
|
-
if (seed != null) {
|
|
1732
|
-
warnings.push({ type: "unsupported-setting", setting: "seed" });
|
|
1733
|
-
}
|
|
1734
|
-
const currentDate = (_c = (_b = (_a15 = this.config._internal) == null ? void 0 : _a15.currentDate) == null ? void 0 : _b.call(_a15)) != null ? _c : /* @__PURE__ */ new Date();
|
|
1735
|
-
const { value: response, responseHeaders } = await postJsonToApi4({
|
|
1736
|
-
url: this.config.url({
|
|
1737
|
-
path: "/images/generations",
|
|
1738
|
-
modelId: this.modelId
|
|
1739
|
-
}),
|
|
1740
|
-
headers: combineHeaders4(this.config.headers(), headers),
|
|
1741
|
-
body: {
|
|
1742
|
-
model: this.modelId,
|
|
1743
|
-
prompt,
|
|
1744
|
-
n,
|
|
1745
|
-
size,
|
|
1746
|
-
...(_d = providerOptions.openai) != null ? _d : {},
|
|
1747
|
-
...!hasDefaultResponseFormat.has(this.modelId) ? { response_format: "b64_json" } : {}
|
|
1748
|
-
},
|
|
1749
|
-
failedResponseHandler: openaiFailedResponseHandler,
|
|
1750
|
-
successfulResponseHandler: createJsonResponseHandler4(
|
|
1751
|
-
openaiImageResponseSchema
|
|
1752
|
-
),
|
|
1753
|
-
abortSignal,
|
|
1754
|
-
fetch: this.config.fetch
|
|
1755
|
-
});
|
|
1756
|
-
return {
|
|
1757
|
-
images: response.data.map((item) => item.b64_json),
|
|
1758
|
-
warnings,
|
|
1759
|
-
response: {
|
|
1760
|
-
timestamp: currentDate,
|
|
1761
|
-
modelId: this.modelId,
|
|
1762
|
-
headers: responseHeaders
|
|
1763
|
-
}
|
|
1764
|
-
};
|
|
1765
|
-
}
|
|
1766
|
-
};
|
|
1767
|
-
var openaiImageResponseSchema = z5.object({
|
|
1768
|
-
data: z5.array(z5.object({ b64_json: z5.string() }))
|
|
1769
|
-
});
|
|
1770
|
-
|
|
1771
|
-
// src/openai-transcription-model.ts
|
|
1772
|
-
import {
|
|
1773
|
-
combineHeaders as combineHeaders5,
|
|
1774
|
-
convertBase64ToUint8Array,
|
|
1775
|
-
createJsonResponseHandler as createJsonResponseHandler5,
|
|
1776
|
-
parseProviderOptions,
|
|
1777
|
-
postFormDataToApi
|
|
1778
|
-
} from "@ai-sdk/provider-utils";
|
|
1779
|
-
import { z as z6 } from "zod";
|
|
1780
|
-
var openAIProviderOptionsSchema = z6.object({
|
|
1781
|
-
include: z6.array(z6.string()).nullish(),
|
|
1782
|
-
language: z6.string().nullish(),
|
|
1783
|
-
prompt: z6.string().nullish(),
|
|
1784
|
-
temperature: z6.number().min(0).max(1).nullish().default(0),
|
|
1785
|
-
timestampGranularities: z6.array(z6.enum(["word", "segment"])).nullish().default(["segment"])
|
|
1786
|
-
});
|
|
1787
|
-
var languageMap = {
|
|
1788
|
-
afrikaans: "af",
|
|
1789
|
-
arabic: "ar",
|
|
1790
|
-
armenian: "hy",
|
|
1791
|
-
azerbaijani: "az",
|
|
1792
|
-
belarusian: "be",
|
|
1793
|
-
bosnian: "bs",
|
|
1794
|
-
bulgarian: "bg",
|
|
1795
|
-
catalan: "ca",
|
|
1796
|
-
chinese: "zh",
|
|
1797
|
-
croatian: "hr",
|
|
1798
|
-
czech: "cs",
|
|
1799
|
-
danish: "da",
|
|
1800
|
-
dutch: "nl",
|
|
1801
|
-
english: "en",
|
|
1802
|
-
estonian: "et",
|
|
1803
|
-
finnish: "fi",
|
|
1804
|
-
french: "fr",
|
|
1805
|
-
galician: "gl",
|
|
1806
|
-
german: "de",
|
|
1807
|
-
greek: "el",
|
|
1808
|
-
hebrew: "he",
|
|
1809
|
-
hindi: "hi",
|
|
1810
|
-
hungarian: "hu",
|
|
1811
|
-
icelandic: "is",
|
|
1812
|
-
indonesian: "id",
|
|
1813
|
-
italian: "it",
|
|
1814
|
-
japanese: "ja",
|
|
1815
|
-
kannada: "kn",
|
|
1816
|
-
kazakh: "kk",
|
|
1817
|
-
korean: "ko",
|
|
1818
|
-
latvian: "lv",
|
|
1819
|
-
lithuanian: "lt",
|
|
1820
|
-
macedonian: "mk",
|
|
1821
|
-
malay: "ms",
|
|
1822
|
-
marathi: "mr",
|
|
1823
|
-
maori: "mi",
|
|
1824
|
-
nepali: "ne",
|
|
1825
|
-
norwegian: "no",
|
|
1826
|
-
persian: "fa",
|
|
1827
|
-
polish: "pl",
|
|
1828
|
-
portuguese: "pt",
|
|
1829
|
-
romanian: "ro",
|
|
1830
|
-
russian: "ru",
|
|
1831
|
-
serbian: "sr",
|
|
1832
|
-
slovak: "sk",
|
|
1833
|
-
slovenian: "sl",
|
|
1834
|
-
spanish: "es",
|
|
1835
|
-
swahili: "sw",
|
|
1836
|
-
swedish: "sv",
|
|
1837
|
-
tagalog: "tl",
|
|
1838
|
-
tamil: "ta",
|
|
1839
|
-
thai: "th",
|
|
1840
|
-
turkish: "tr",
|
|
1841
|
-
ukrainian: "uk",
|
|
1842
|
-
urdu: "ur",
|
|
1843
|
-
vietnamese: "vi",
|
|
1844
|
-
welsh: "cy"
|
|
1845
|
-
};
|
|
1846
|
-
var OpenAITranscriptionModel = class {
|
|
1847
|
-
constructor(modelId, config) {
|
|
1848
|
-
this.modelId = modelId;
|
|
1849
|
-
this.config = config;
|
|
1850
|
-
this.specificationVersion = "v1";
|
|
1851
|
-
}
|
|
1852
|
-
get provider() {
|
|
1853
|
-
return this.config.provider;
|
|
1854
|
-
}
|
|
1855
|
-
getArgs({
|
|
1856
|
-
audio,
|
|
1857
|
-
mediaType,
|
|
1858
|
-
providerOptions
|
|
1859
|
-
}) {
|
|
1860
|
-
var _a15, _b, _c, _d, _e;
|
|
1861
|
-
const warnings = [];
|
|
1862
|
-
const openAIOptions = parseProviderOptions({
|
|
1863
|
-
provider: "openai",
|
|
1864
|
-
providerOptions,
|
|
1865
|
-
schema: openAIProviderOptionsSchema
|
|
1866
|
-
});
|
|
1867
|
-
const formData = new FormData();
|
|
1868
|
-
const blob = audio instanceof Uint8Array ? new Blob([audio]) : new Blob([convertBase64ToUint8Array(audio)]);
|
|
1869
|
-
formData.append("model", this.modelId);
|
|
1870
|
-
formData.append("file", new File([blob], "audio", { type: mediaType }));
|
|
1871
|
-
if (openAIOptions) {
|
|
1872
|
-
const transcriptionModelOptions = {
|
|
1873
|
-
include: (_a15 = openAIOptions.include) != null ? _a15 : void 0,
|
|
1874
|
-
language: (_b = openAIOptions.language) != null ? _b : void 0,
|
|
1875
|
-
prompt: (_c = openAIOptions.prompt) != null ? _c : void 0,
|
|
1876
|
-
temperature: (_d = openAIOptions.temperature) != null ? _d : void 0,
|
|
1877
|
-
timestamp_granularities: (_e = openAIOptions.timestampGranularities) != null ? _e : void 0
|
|
1878
|
-
};
|
|
1879
|
-
for (const key in transcriptionModelOptions) {
|
|
1880
|
-
const value = transcriptionModelOptions[key];
|
|
1881
|
-
if (value !== void 0) {
|
|
1882
|
-
formData.append(key, String(value));
|
|
1883
|
-
}
|
|
1884
|
-
}
|
|
1885
|
-
}
|
|
1886
|
-
return {
|
|
1887
|
-
formData,
|
|
1888
|
-
warnings
|
|
1889
|
-
};
|
|
1890
|
-
}
|
|
1891
|
-
async doGenerate(options) {
|
|
1892
|
-
var _a15, _b, _c, _d, _e, _f;
|
|
1893
|
-
const currentDate = (_c = (_b = (_a15 = this.config._internal) == null ? void 0 : _a15.currentDate) == null ? void 0 : _b.call(_a15)) != null ? _c : /* @__PURE__ */ new Date();
|
|
1894
|
-
const { formData, warnings } = this.getArgs(options);
|
|
1895
|
-
const {
|
|
1896
|
-
value: response,
|
|
1897
|
-
responseHeaders,
|
|
1898
|
-
rawValue: rawResponse
|
|
1899
|
-
} = await postFormDataToApi({
|
|
1900
|
-
url: this.config.url({
|
|
1901
|
-
path: "/audio/transcriptions",
|
|
1902
|
-
modelId: this.modelId
|
|
1903
|
-
}),
|
|
1904
|
-
headers: combineHeaders5(this.config.headers(), options.headers),
|
|
1905
|
-
formData,
|
|
1906
|
-
failedResponseHandler: openaiFailedResponseHandler,
|
|
1907
|
-
successfulResponseHandler: createJsonResponseHandler5(
|
|
1908
|
-
openaiTranscriptionResponseSchema
|
|
1909
|
-
),
|
|
1910
|
-
abortSignal: options.abortSignal,
|
|
1911
|
-
fetch: this.config.fetch
|
|
1912
|
-
});
|
|
1913
|
-
const language = response.language != null && response.language in languageMap ? languageMap[response.language] : void 0;
|
|
1914
|
-
return {
|
|
1915
|
-
text: response.text,
|
|
1916
|
-
segments: (_e = (_d = response.words) == null ? void 0 : _d.map((word) => ({
|
|
1917
|
-
text: word.word,
|
|
1918
|
-
startSecond: word.start,
|
|
1919
|
-
endSecond: word.end
|
|
1920
|
-
}))) != null ? _e : [],
|
|
1921
|
-
language,
|
|
1922
|
-
durationInSeconds: (_f = response.duration) != null ? _f : void 0,
|
|
1923
|
-
warnings,
|
|
1924
|
-
response: {
|
|
1925
|
-
timestamp: currentDate,
|
|
1926
|
-
modelId: this.modelId,
|
|
1927
|
-
headers: responseHeaders,
|
|
1928
|
-
body: rawResponse
|
|
1929
|
-
}
|
|
1930
|
-
};
|
|
1931
|
-
}
|
|
1932
|
-
};
|
|
1933
|
-
var openaiTranscriptionResponseSchema = z6.object({
|
|
1934
|
-
text: z6.string(),
|
|
1935
|
-
language: z6.string().nullish(),
|
|
1936
|
-
duration: z6.number().nullish(),
|
|
1937
|
-
words: z6.array(
|
|
1938
|
-
z6.object({
|
|
1939
|
-
word: z6.string(),
|
|
1940
|
-
start: z6.number(),
|
|
1941
|
-
end: z6.number()
|
|
1942
|
-
})
|
|
1943
|
-
).nullish()
|
|
1944
|
-
});
|
|
1945
|
-
|
|
1946
|
-
// src/openai-speech-model.ts
|
|
1947
|
-
import {
|
|
1948
|
-
combineHeaders as combineHeaders6,
|
|
1949
|
-
createBinaryResponseHandler,
|
|
1950
|
-
parseProviderOptions as parseProviderOptions2,
|
|
1951
|
-
postJsonToApi as postJsonToApi5
|
|
1952
|
-
} from "@ai-sdk/provider-utils";
|
|
1953
|
-
import { z as z7 } from "zod";
|
|
1954
|
-
var OpenAIProviderOptionsSchema = z7.object({
|
|
1955
|
-
instructions: z7.string().nullish(),
|
|
1956
|
-
speed: z7.number().min(0.25).max(4).default(1).nullish()
|
|
1957
|
-
});
|
|
1958
|
-
var OpenAISpeechModel = class {
|
|
1959
|
-
constructor(modelId, config) {
|
|
1960
|
-
this.modelId = modelId;
|
|
1961
|
-
this.config = config;
|
|
1962
|
-
this.specificationVersion = "v1";
|
|
1963
|
-
}
|
|
1964
|
-
get provider() {
|
|
1965
|
-
return this.config.provider;
|
|
1966
|
-
}
|
|
1967
|
-
getArgs({
|
|
1968
|
-
text,
|
|
1969
|
-
voice = "alloy",
|
|
1970
|
-
outputFormat = "mp3",
|
|
1971
|
-
speed,
|
|
1972
|
-
instructions,
|
|
1973
|
-
providerOptions
|
|
1974
|
-
}) {
|
|
1975
|
-
const warnings = [];
|
|
1976
|
-
const openAIOptions = parseProviderOptions2({
|
|
1977
|
-
provider: "openai",
|
|
1978
|
-
providerOptions,
|
|
1979
|
-
schema: OpenAIProviderOptionsSchema
|
|
1980
|
-
});
|
|
1981
|
-
const requestBody = {
|
|
1982
|
-
model: this.modelId,
|
|
1983
|
-
input: text,
|
|
1984
|
-
voice,
|
|
1985
|
-
response_format: "mp3",
|
|
1986
|
-
speed,
|
|
1987
|
-
instructions
|
|
1988
|
-
};
|
|
1989
|
-
if (outputFormat) {
|
|
1990
|
-
if (["mp3", "opus", "aac", "flac", "wav", "pcm"].includes(outputFormat)) {
|
|
1991
|
-
requestBody.response_format = outputFormat;
|
|
1992
|
-
} else {
|
|
1993
|
-
warnings.push({
|
|
1994
|
-
type: "unsupported-setting",
|
|
1995
|
-
setting: "outputFormat",
|
|
1996
|
-
details: `Unsupported output format: ${outputFormat}. Using mp3 instead.`
|
|
1997
|
-
});
|
|
1998
|
-
}
|
|
1999
|
-
}
|
|
2000
|
-
if (openAIOptions) {
|
|
2001
|
-
const speechModelOptions = {};
|
|
2002
|
-
for (const key in speechModelOptions) {
|
|
2003
|
-
const value = speechModelOptions[key];
|
|
2004
|
-
if (value !== void 0) {
|
|
2005
|
-
requestBody[key] = value;
|
|
2006
|
-
}
|
|
2007
|
-
}
|
|
2008
|
-
}
|
|
2009
|
-
return {
|
|
2010
|
-
requestBody,
|
|
2011
|
-
warnings
|
|
2012
|
-
};
|
|
2013
|
-
}
|
|
2014
|
-
async doGenerate(options) {
|
|
2015
|
-
var _a15, _b, _c;
|
|
2016
|
-
const currentDate = (_c = (_b = (_a15 = this.config._internal) == null ? void 0 : _a15.currentDate) == null ? void 0 : _b.call(_a15)) != null ? _c : /* @__PURE__ */ new Date();
|
|
2017
|
-
const { requestBody, warnings } = this.getArgs(options);
|
|
2018
|
-
const {
|
|
2019
|
-
value: audio,
|
|
2020
|
-
responseHeaders,
|
|
2021
|
-
rawValue: rawResponse
|
|
2022
|
-
} = await postJsonToApi5({
|
|
2023
|
-
url: this.config.url({
|
|
2024
|
-
path: "/audio/speech",
|
|
2025
|
-
modelId: this.modelId
|
|
2026
|
-
}),
|
|
2027
|
-
headers: combineHeaders6(this.config.headers(), options.headers),
|
|
2028
|
-
body: requestBody,
|
|
2029
|
-
failedResponseHandler: openaiFailedResponseHandler,
|
|
2030
|
-
successfulResponseHandler: createBinaryResponseHandler(),
|
|
2031
|
-
abortSignal: options.abortSignal,
|
|
2032
|
-
fetch: this.config.fetch
|
|
2033
|
-
});
|
|
2034
|
-
return {
|
|
2035
|
-
audio,
|
|
2036
|
-
warnings,
|
|
2037
|
-
request: {
|
|
2038
|
-
body: JSON.stringify(requestBody)
|
|
2039
|
-
},
|
|
2040
|
-
response: {
|
|
2041
|
-
timestamp: currentDate,
|
|
2042
|
-
modelId: this.modelId,
|
|
2043
|
-
headers: responseHeaders,
|
|
2044
|
-
body: rawResponse
|
|
2045
|
-
}
|
|
2046
|
-
};
|
|
2047
|
-
}
|
|
2048
|
-
};
|
|
2049
|
-
|
|
2050
|
-
// src/responses/openai-responses-language-model.ts
|
|
2051
|
-
import {
|
|
2052
|
-
combineHeaders as combineHeaders7,
|
|
2053
|
-
createEventSourceResponseHandler as createEventSourceResponseHandler3,
|
|
2054
|
-
createJsonResponseHandler as createJsonResponseHandler6,
|
|
2055
|
-
generateId as generateId2,
|
|
2056
|
-
parseProviderOptions as parseProviderOptions3,
|
|
2057
|
-
postJsonToApi as postJsonToApi6
|
|
2058
|
-
} from "@ai-sdk/provider-utils";
|
|
2059
|
-
import { z as z12 } from "zod";
|
|
2060
|
-
|
|
2061
|
-
// src/responses/convert-to-openai-responses-messages.ts
|
|
2062
|
-
import { convertUint8ArrayToBase64 as convertUint8ArrayToBase642 } from "@ai-sdk/provider-utils";
|
|
2063
|
-
function convertToOpenAIResponsesMessages({
|
|
2064
|
-
prompt,
|
|
2065
|
-
systemMessageMode
|
|
2066
|
-
}) {
|
|
2067
|
-
const messages = [];
|
|
2068
|
-
const warnings = [];
|
|
2069
|
-
for (const { role, content } of prompt) {
|
|
2070
|
-
switch (role) {
|
|
2071
|
-
case "system": {
|
|
2072
|
-
switch (systemMessageMode) {
|
|
2073
|
-
case "system": {
|
|
2074
|
-
messages.push({ role: "system", content });
|
|
2075
|
-
break;
|
|
2076
|
-
}
|
|
2077
|
-
case "developer": {
|
|
2078
|
-
messages.push({ role: "developer", content });
|
|
2079
|
-
break;
|
|
2080
|
-
}
|
|
2081
|
-
case "remove": {
|
|
2082
|
-
warnings.push({
|
|
2083
|
-
type: "other",
|
|
2084
|
-
message: "system messages are removed for this model"
|
|
2085
|
-
});
|
|
2086
|
-
break;
|
|
2087
|
-
}
|
|
2088
|
-
default: {
|
|
2089
|
-
const _exhaustiveCheck = systemMessageMode;
|
|
2090
|
-
throw new Error(
|
|
2091
|
-
`Unsupported system message mode: ${_exhaustiveCheck}`
|
|
2092
|
-
);
|
|
2093
|
-
}
|
|
2094
|
-
}
|
|
2095
|
-
break;
|
|
2096
|
-
}
|
|
2097
|
-
case "user": {
|
|
2098
|
-
messages.push({
|
|
2099
|
-
role: "user",
|
|
2100
|
-
content: content.map((part, index) => {
|
|
2101
|
-
var _a15, _b, _c, _d;
|
|
2102
|
-
switch (part.type) {
|
|
2103
|
-
case "text": {
|
|
2104
|
-
return { type: "input_text", text: part.text };
|
|
2105
|
-
}
|
|
2106
|
-
case "image": {
|
|
2107
|
-
return {
|
|
2108
|
-
type: "input_image",
|
|
2109
|
-
image_url: part.image instanceof URL ? part.image.toString() : `data:${(_a15 = part.mimeType) != null ? _a15 : "image/jpeg"};base64,${convertUint8ArrayToBase642(part.image)}`,
|
|
2110
|
-
// OpenAI specific extension: image detail
|
|
2111
|
-
detail: (_c = (_b = part.providerMetadata) == null ? void 0 : _b.openai) == null ? void 0 : _c.imageDetail
|
|
2112
|
-
};
|
|
2113
|
-
}
|
|
2114
|
-
case "file": {
|
|
2115
|
-
if (part.data instanceof URL) {
|
|
2116
|
-
throw new UnsupportedFunctionalityError({
|
|
2117
|
-
functionality: "File URLs in user messages"
|
|
2118
|
-
});
|
|
2119
|
-
}
|
|
2120
|
-
switch (part.mimeType) {
|
|
2121
|
-
case "application/pdf": {
|
|
2122
|
-
return {
|
|
2123
|
-
type: "input_file",
|
|
2124
|
-
filename: (_d = part.filename) != null ? _d : `part-${index}.pdf`,
|
|
2125
|
-
file_data: `data:application/pdf;base64,${part.data}`
|
|
2126
|
-
};
|
|
2127
|
-
}
|
|
2128
|
-
default: {
|
|
2129
|
-
throw new UnsupportedFunctionalityError({
|
|
2130
|
-
functionality: "Only PDF files are supported in user messages"
|
|
2131
|
-
});
|
|
2132
|
-
}
|
|
2133
|
-
}
|
|
2134
|
-
}
|
|
2135
|
-
}
|
|
2136
|
-
})
|
|
2137
|
-
});
|
|
2138
|
-
break;
|
|
2139
|
-
}
|
|
2140
|
-
case "assistant": {
|
|
2141
|
-
for (const part of content) {
|
|
2142
|
-
switch (part.type) {
|
|
2143
|
-
case "text": {
|
|
2144
|
-
messages.push({
|
|
2145
|
-
role: "assistant",
|
|
2146
|
-
content: [{ type: "output_text", text: part.text }]
|
|
2147
|
-
});
|
|
2148
|
-
break;
|
|
2149
|
-
}
|
|
2150
|
-
case "tool-call": {
|
|
2151
|
-
messages.push({
|
|
2152
|
-
type: "function_call",
|
|
2153
|
-
call_id: part.toolCallId,
|
|
2154
|
-
name: part.toolName,
|
|
2155
|
-
arguments: JSON.stringify(part.args)
|
|
2156
|
-
});
|
|
2157
|
-
break;
|
|
2158
|
-
}
|
|
2159
|
-
}
|
|
2160
|
-
}
|
|
2161
|
-
break;
|
|
2162
|
-
}
|
|
2163
|
-
case "tool": {
|
|
2164
|
-
for (const part of content) {
|
|
2165
|
-
messages.push({
|
|
2166
|
-
type: "function_call_output",
|
|
2167
|
-
call_id: part.toolCallId,
|
|
2168
|
-
output: JSON.stringify(part.result)
|
|
2169
|
-
});
|
|
2170
|
-
}
|
|
2171
|
-
break;
|
|
2172
|
-
}
|
|
2173
|
-
default: {
|
|
2174
|
-
const _exhaustiveCheck = role;
|
|
2175
|
-
throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
|
|
2176
|
-
}
|
|
2177
|
-
}
|
|
2178
|
-
}
|
|
2179
|
-
return { messages, warnings };
|
|
2180
|
-
}
|
|
2181
|
-
|
|
2182
|
-
// src/responses/map-openai-responses-finish-reason.ts
|
|
2183
|
-
function mapOpenAIResponseFinishReason({
|
|
2184
|
-
finishReason,
|
|
2185
|
-
hasToolCalls
|
|
2186
|
-
}) {
|
|
2187
|
-
switch (finishReason) {
|
|
2188
|
-
case void 0:
|
|
2189
|
-
case null:
|
|
2190
|
-
return hasToolCalls ? "tool-calls" : "stop";
|
|
2191
|
-
case "max_output_tokens":
|
|
2192
|
-
return "length";
|
|
2193
|
-
case "content_filter":
|
|
2194
|
-
return "content-filter";
|
|
2195
|
-
default:
|
|
2196
|
-
return hasToolCalls ? "tool-calls" : "unknown";
|
|
2197
|
-
}
|
|
2198
|
-
}
|
|
2199
|
-
|
|
2200
|
-
// src/tool/code-interpreter.ts
|
|
2201
|
-
import { z as z8 } from "zod";
|
|
2202
|
-
var codeInterpreterArgsSchema = z8.object({
|
|
2203
|
-
container: z8.union([
|
|
2204
|
-
z8.string(),
|
|
2205
|
-
z8.object({
|
|
2206
|
-
fileIds: z8.array(z8.string()).optional()
|
|
2207
|
-
})
|
|
2208
|
-
]).optional()
|
|
2209
|
-
});
|
|
2210
|
-
|
|
2211
|
-
// src/tool/file-search.ts
|
|
2212
|
-
import { z as z9 } from "zod";
|
|
2213
|
-
var comparisonFilterSchema = z9.object({
|
|
2214
|
-
key: z9.string(),
|
|
2215
|
-
type: z9.enum(["eq", "ne", "gt", "gte", "lt", "lte"]),
|
|
2216
|
-
value: z9.union([z9.string(), z9.number(), z9.boolean()])
|
|
2217
|
-
});
|
|
2218
|
-
var compoundFilterSchema = z9.object({
|
|
2219
|
-
type: z9.enum(["and", "or"]),
|
|
2220
|
-
filters: z9.array(
|
|
2221
|
-
z9.union([comparisonFilterSchema, z9.lazy(() => compoundFilterSchema)])
|
|
2222
|
-
)
|
|
2223
|
-
});
|
|
2224
|
-
var filtersSchema = z9.union([comparisonFilterSchema, compoundFilterSchema]);
|
|
2225
|
-
var fileSearchArgsSchema = z9.object({
|
|
2226
|
-
vectorStoreIds: z9.array(z9.string()).optional(),
|
|
2227
|
-
maxNumResults: z9.number().optional(),
|
|
2228
|
-
ranking: z9.object({
|
|
2229
|
-
ranker: z9.enum(["auto", "default-2024-08-21"]).optional()
|
|
2230
|
-
}).optional(),
|
|
2231
|
-
filters: filtersSchema.optional()
|
|
2232
|
-
});
|
|
2233
|
-
|
|
2234
|
-
// src/tool/web-search.ts
|
|
2235
|
-
import { z as z10 } from "zod";
|
|
2236
|
-
var webSearchArgsSchema = z10.object({
|
|
2237
|
-
filters: z10.object({
|
|
2238
|
-
allowedDomains: z10.array(z10.string()).optional()
|
|
2239
|
-
}).optional(),
|
|
2240
|
-
searchContextSize: z10.enum(["low", "medium", "high"]).optional(),
|
|
2241
|
-
userLocation: z10.object({
|
|
2242
|
-
type: z10.literal("approximate"),
|
|
2243
|
-
country: z10.string().optional(),
|
|
2244
|
-
city: z10.string().optional(),
|
|
2245
|
-
region: z10.string().optional(),
|
|
2246
|
-
timezone: z10.string().optional()
|
|
2247
|
-
}).optional()
|
|
2248
|
-
});
|
|
2249
|
-
|
|
2250
|
-
// src/tool/web-search-preview.ts
|
|
2251
|
-
import { z as z11 } from "zod";
|
|
2252
|
-
var webSearchPreviewArgsSchema = z11.object({
|
|
2253
|
-
searchContextSize: z11.enum(["low", "medium", "high"]).optional(),
|
|
2254
|
-
userLocation: z11.object({
|
|
2255
|
-
type: z11.literal("approximate"),
|
|
2256
|
-
country: z11.string().optional(),
|
|
2257
|
-
city: z11.string().optional(),
|
|
2258
|
-
region: z11.string().optional(),
|
|
2259
|
-
timezone: z11.string().optional()
|
|
2260
|
-
}).optional()
|
|
2261
|
-
});
|
|
2262
|
-
|
|
2263
|
-
// src/responses/openai-responses-prepare-tools.ts
|
|
2264
|
-
function prepareResponsesTools({
|
|
2265
|
-
mode,
|
|
2266
|
-
strict
|
|
2267
|
-
}) {
|
|
2268
|
-
var _a15;
|
|
2269
|
-
const tools = ((_a15 = mode.tools) == null ? void 0 : _a15.length) ? mode.tools : void 0;
|
|
2270
|
-
const toolWarnings = [];
|
|
2271
|
-
if (tools == null) {
|
|
2272
|
-
return { tools: void 0, tool_choice: void 0, toolWarnings };
|
|
2273
|
-
}
|
|
2274
|
-
const toolChoice = mode.toolChoice;
|
|
2275
|
-
const openaiTools = [];
|
|
2276
|
-
for (const tool of tools) {
|
|
2277
|
-
switch (tool.type) {
|
|
2278
|
-
case "function":
|
|
2279
|
-
openaiTools.push({
|
|
2280
|
-
type: "function",
|
|
2281
|
-
name: tool.name,
|
|
2282
|
-
description: tool.description,
|
|
2283
|
-
parameters: tool.parameters,
|
|
2284
|
-
strict: strict ? true : void 0
|
|
2285
|
-
});
|
|
2286
|
-
break;
|
|
2287
|
-
case "provider-defined":
|
|
2288
|
-
switch (tool.id) {
|
|
2289
|
-
case "openai.file_search": {
|
|
2290
|
-
const args = fileSearchArgsSchema.parse(tool.args);
|
|
2291
|
-
openaiTools.push({
|
|
2292
|
-
type: "file_search",
|
|
2293
|
-
vector_store_ids: args.vectorStoreIds,
|
|
2294
|
-
max_num_results: args.maxNumResults,
|
|
2295
|
-
ranking_options: args.ranking ? { ranker: args.ranking.ranker } : void 0,
|
|
2296
|
-
filters: args.filters
|
|
2297
|
-
});
|
|
2298
|
-
break;
|
|
2299
|
-
}
|
|
2300
|
-
case "openai.web_search_preview": {
|
|
2301
|
-
const args = webSearchPreviewArgsSchema.parse(tool.args);
|
|
2302
|
-
openaiTools.push({
|
|
2303
|
-
type: "web_search_preview",
|
|
2304
|
-
search_context_size: args.searchContextSize,
|
|
2305
|
-
user_location: args.userLocation
|
|
2306
|
-
});
|
|
2307
|
-
break;
|
|
2308
|
-
}
|
|
2309
|
-
case "openai.web_search": {
|
|
2310
|
-
const args = webSearchArgsSchema.parse(tool.args);
|
|
2311
|
-
openaiTools.push({
|
|
2312
|
-
type: "web_search",
|
|
2313
|
-
filters: args.filters != null ? { allowed_domains: args.filters.allowedDomains } : void 0,
|
|
2314
|
-
search_context_size: args.searchContextSize,
|
|
2315
|
-
user_location: args.userLocation
|
|
2316
|
-
});
|
|
2317
|
-
break;
|
|
2318
|
-
}
|
|
2319
|
-
case "openai.code_interpreter": {
|
|
2320
|
-
const args = codeInterpreterArgsSchema.parse(tool.args);
|
|
2321
|
-
openaiTools.push({
|
|
2322
|
-
type: "code_interpreter",
|
|
2323
|
-
container: args.container == null ? { type: "auto", file_ids: void 0 } : typeof args.container === "string" ? args.container : { type: "auto", file_ids: args.container.fileIds }
|
|
2324
|
-
});
|
|
2325
|
-
break;
|
|
2326
|
-
}
|
|
2327
|
-
default: {
|
|
2328
|
-
toolWarnings.push({ type: "unsupported-tool", tool });
|
|
2329
|
-
break;
|
|
2330
|
-
}
|
|
2331
|
-
}
|
|
2332
|
-
break;
|
|
2333
|
-
default:
|
|
2334
|
-
toolWarnings.push({ type: "unsupported-tool", tool });
|
|
2335
|
-
break;
|
|
2336
|
-
}
|
|
2337
|
-
}
|
|
2338
|
-
if (toolChoice == null) {
|
|
2339
|
-
return { tools: openaiTools, tool_choice: void 0, toolWarnings };
|
|
2340
|
-
}
|
|
2341
|
-
const type = toolChoice.type;
|
|
2342
|
-
switch (type) {
|
|
2343
|
-
case "auto":
|
|
2344
|
-
case "none":
|
|
2345
|
-
case "required":
|
|
2346
|
-
return { tools: openaiTools, tool_choice: type, toolWarnings };
|
|
2347
|
-
case "tool":
|
|
2348
|
-
return {
|
|
2349
|
-
tools: openaiTools,
|
|
2350
|
-
tool_choice: toolChoice.toolName === "code_interpreter" || toolChoice.toolName === "file_search" || toolChoice.toolName === "web_search_preview" || toolChoice.toolName === "web_search" ? { type: toolChoice.toolName } : { type: "function", name: toolChoice.toolName },
|
|
2351
|
-
toolWarnings
|
|
2352
|
-
};
|
|
2353
|
-
default: {
|
|
2354
|
-
const _exhaustiveCheck = type;
|
|
2355
|
-
throw new UnsupportedFunctionalityError({
|
|
2356
|
-
functionality: `Unsupported tool choice type: ${_exhaustiveCheck}`
|
|
2357
|
-
});
|
|
2358
|
-
}
|
|
2359
|
-
}
|
|
2360
|
-
}
|
|
2361
|
-
|
|
2362
|
-
// src/responses/openai-responses-language-model.ts
|
|
2363
|
-
var OpenAIResponsesLanguageModel = class {
|
|
2364
|
-
constructor(modelId, config) {
|
|
2365
|
-
this.specificationVersion = "v1";
|
|
2366
|
-
this.defaultObjectGenerationMode = "json";
|
|
2367
|
-
this.supportsStructuredOutputs = true;
|
|
2368
|
-
this.modelId = modelId;
|
|
2369
|
-
this.config = config;
|
|
2370
|
-
}
|
|
2371
|
-
get provider() {
|
|
2372
|
-
return this.config.provider;
|
|
2373
|
-
}
|
|
2374
|
-
getArgs({
|
|
2375
|
-
mode,
|
|
2376
|
-
maxTokens,
|
|
2377
|
-
temperature,
|
|
2378
|
-
stopSequences,
|
|
2379
|
-
topP,
|
|
2380
|
-
topK,
|
|
2381
|
-
presencePenalty,
|
|
2382
|
-
frequencyPenalty,
|
|
2383
|
-
seed,
|
|
2384
|
-
prompt,
|
|
2385
|
-
providerMetadata,
|
|
2386
|
-
responseFormat
|
|
2387
|
-
}) {
|
|
2388
|
-
var _a15, _b, _c;
|
|
2389
|
-
const warnings = [];
|
|
2390
|
-
const modelConfig = getResponsesModelConfig(this.modelId);
|
|
2391
|
-
const type = mode.type;
|
|
2392
|
-
if (topK != null) {
|
|
2393
|
-
warnings.push({
|
|
2394
|
-
type: "unsupported-setting",
|
|
2395
|
-
setting: "topK"
|
|
2396
|
-
});
|
|
2397
|
-
}
|
|
2398
|
-
if (seed != null) {
|
|
2399
|
-
warnings.push({
|
|
2400
|
-
type: "unsupported-setting",
|
|
2401
|
-
setting: "seed"
|
|
2402
|
-
});
|
|
2403
|
-
}
|
|
2404
|
-
if (presencePenalty != null) {
|
|
2405
|
-
warnings.push({
|
|
2406
|
-
type: "unsupported-setting",
|
|
2407
|
-
setting: "presencePenalty"
|
|
2408
|
-
});
|
|
2409
|
-
}
|
|
2410
|
-
if (frequencyPenalty != null) {
|
|
2411
|
-
warnings.push({
|
|
2412
|
-
type: "unsupported-setting",
|
|
2413
|
-
setting: "frequencyPenalty"
|
|
2414
|
-
});
|
|
2415
|
-
}
|
|
2416
|
-
if (stopSequences != null) {
|
|
2417
|
-
warnings.push({
|
|
2418
|
-
type: "unsupported-setting",
|
|
2419
|
-
setting: "stopSequences"
|
|
2420
|
-
});
|
|
2421
|
-
}
|
|
2422
|
-
const { messages, warnings: messageWarnings } = convertToOpenAIResponsesMessages({
|
|
2423
|
-
prompt,
|
|
2424
|
-
systemMessageMode: modelConfig.systemMessageMode
|
|
2425
|
-
});
|
|
2426
|
-
warnings.push(...messageWarnings);
|
|
2427
|
-
console.log("providerMetadata", JSON.stringify(providerMetadata));
|
|
2428
|
-
const openaiOptions = parseProviderOptions3({
|
|
2429
|
-
provider: "openai",
|
|
2430
|
-
providerOptions: providerMetadata,
|
|
2431
|
-
schema: openaiResponsesProviderOptionsSchema
|
|
2432
|
-
});
|
|
2433
|
-
const isStrict = (_a15 = openaiOptions == null ? void 0 : openaiOptions.strictSchemas) != null ? _a15 : true;
|
|
2434
|
-
console.log("openaiOptions", JSON.stringify(openaiOptions));
|
|
2435
|
-
const baseArgs = {
|
|
2436
|
-
model: this.modelId,
|
|
2437
|
-
input: messages,
|
|
2438
|
-
temperature: (openaiOptions == null ? void 0 : openaiOptions.forceNoTemperature) ? void 0 : temperature,
|
|
2439
|
-
top_p: topP,
|
|
2440
|
-
max_output_tokens: maxTokens,
|
|
2441
|
-
...(responseFormat == null ? void 0 : responseFormat.type) === "json" && {
|
|
2442
|
-
text: {
|
|
2443
|
-
format: responseFormat.schema != null ? {
|
|
2444
|
-
type: "json_schema",
|
|
2445
|
-
strict: isStrict,
|
|
2446
|
-
name: (_b = responseFormat.name) != null ? _b : "response",
|
|
2447
|
-
description: responseFormat.description,
|
|
2448
|
-
schema: responseFormat.schema
|
|
2449
|
-
} : { type: "json_object" }
|
|
2450
|
-
}
|
|
2451
|
-
},
|
|
2452
|
-
// provider options:
|
|
2453
|
-
metadata: openaiOptions == null ? void 0 : openaiOptions.metadata,
|
|
2454
|
-
parallel_tool_calls: openaiOptions == null ? void 0 : openaiOptions.parallelToolCalls,
|
|
2455
|
-
include: openaiOptions == null ? void 0 : openaiOptions.include,
|
|
2456
|
-
previous_response_id: openaiOptions == null ? void 0 : openaiOptions.previousResponseId,
|
|
2457
|
-
store: openaiOptions == null ? void 0 : openaiOptions.store,
|
|
2458
|
-
user: openaiOptions == null ? void 0 : openaiOptions.user,
|
|
2459
|
-
instructions: openaiOptions == null ? void 0 : openaiOptions.instructions,
|
|
2460
|
-
// model-specific settings:
|
|
2461
|
-
...modelConfig.isReasoningModel && ((openaiOptions == null ? void 0 : openaiOptions.reasoningEffort) != null || (openaiOptions == null ? void 0 : openaiOptions.reasoningSummary) != null) && {
|
|
2462
|
-
reasoning: {
|
|
2463
|
-
...(openaiOptions == null ? void 0 : openaiOptions.reasoningEffort) != null && {
|
|
2464
|
-
effort: openaiOptions.reasoningEffort
|
|
2465
|
-
},
|
|
2466
|
-
...(openaiOptions == null ? void 0 : openaiOptions.reasoningSummary) != null && {
|
|
2467
|
-
summary: openaiOptions.reasoningSummary
|
|
2468
|
-
}
|
|
2469
|
-
}
|
|
2470
|
-
},
|
|
2471
|
-
...modelConfig.requiredAutoTruncation && {
|
|
2472
|
-
truncation: "auto"
|
|
2473
|
-
}
|
|
2474
|
-
};
|
|
2475
|
-
if (modelConfig.isReasoningModel) {
|
|
2476
|
-
if (baseArgs.temperature != null) {
|
|
2477
|
-
baseArgs.temperature = void 0;
|
|
2478
|
-
warnings.push({
|
|
2479
|
-
type: "unsupported-setting",
|
|
2480
|
-
setting: "temperature",
|
|
2481
|
-
details: "temperature is not supported for reasoning models"
|
|
2482
|
-
});
|
|
2483
|
-
}
|
|
2484
|
-
if (baseArgs.top_p != null) {
|
|
2485
|
-
baseArgs.top_p = void 0;
|
|
2486
|
-
warnings.push({
|
|
2487
|
-
type: "unsupported-setting",
|
|
2488
|
-
setting: "topP",
|
|
2489
|
-
details: "topP is not supported for reasoning models"
|
|
2490
|
-
});
|
|
2491
|
-
}
|
|
2492
|
-
}
|
|
2493
|
-
switch (type) {
|
|
2494
|
-
case "regular": {
|
|
2495
|
-
const { tools, tool_choice, toolWarnings } = prepareResponsesTools({
|
|
2496
|
-
mode,
|
|
2497
|
-
strict: isStrict
|
|
2498
|
-
// TODO support provider options on tools
|
|
2499
|
-
});
|
|
2500
|
-
return {
|
|
2501
|
-
args: {
|
|
2502
|
-
...baseArgs,
|
|
2503
|
-
tools,
|
|
2504
|
-
tool_choice
|
|
2505
|
-
},
|
|
2506
|
-
warnings: [...warnings, ...toolWarnings]
|
|
2507
|
-
};
|
|
2508
|
-
}
|
|
2509
|
-
case "object-json": {
|
|
2510
|
-
return {
|
|
2511
|
-
args: {
|
|
2512
|
-
...baseArgs,
|
|
2513
|
-
text: {
|
|
2514
|
-
format: mode.schema != null ? {
|
|
2515
|
-
type: "json_schema",
|
|
2516
|
-
strict: isStrict,
|
|
2517
|
-
name: (_c = mode.name) != null ? _c : "response",
|
|
2518
|
-
description: mode.description,
|
|
2519
|
-
schema: mode.schema
|
|
2520
|
-
} : { type: "json_object" }
|
|
2521
|
-
}
|
|
2522
|
-
},
|
|
2523
|
-
warnings
|
|
2524
|
-
};
|
|
2525
|
-
}
|
|
2526
|
-
case "object-tool": {
|
|
2527
|
-
return {
|
|
2528
|
-
args: {
|
|
2529
|
-
...baseArgs,
|
|
2530
|
-
tool_choice: { type: "function", name: mode.tool.name },
|
|
2531
|
-
tools: [
|
|
2532
|
-
{
|
|
2533
|
-
type: "function",
|
|
2534
|
-
name: mode.tool.name,
|
|
2535
|
-
description: mode.tool.description,
|
|
2536
|
-
parameters: mode.tool.parameters,
|
|
2537
|
-
strict: isStrict
|
|
2538
|
-
}
|
|
2539
|
-
]
|
|
2540
|
-
},
|
|
2541
|
-
warnings
|
|
2542
|
-
};
|
|
2543
|
-
}
|
|
2544
|
-
default: {
|
|
2545
|
-
const _exhaustiveCheck = type;
|
|
2546
|
-
throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
|
|
2547
|
-
}
|
|
2548
|
-
}
|
|
2549
|
-
}
|
|
2550
|
-
async doGenerate(options) {
|
|
2551
|
-
var _a15, _b, _c, _d, _e, _f, _g;
|
|
2552
|
-
const { args: body, warnings } = this.getArgs(options);
|
|
2553
|
-
const {
|
|
2554
|
-
responseHeaders,
|
|
2555
|
-
value: response,
|
|
2556
|
-
rawValue: rawResponse
|
|
2557
|
-
} = await postJsonToApi6({
|
|
2558
|
-
url: this.config.url({
|
|
2559
|
-
path: "/responses",
|
|
2560
|
-
modelId: this.modelId
|
|
2561
|
-
}),
|
|
2562
|
-
headers: combineHeaders7(this.config.headers(), options.headers),
|
|
2563
|
-
body,
|
|
2564
|
-
failedResponseHandler: openaiFailedResponseHandler,
|
|
2565
|
-
successfulResponseHandler: createJsonResponseHandler6(
|
|
2566
|
-
z12.object({
|
|
2567
|
-
id: z12.string(),
|
|
2568
|
-
created_at: z12.number(),
|
|
2569
|
-
model: z12.string(),
|
|
2570
|
-
output: z12.array(
|
|
2571
|
-
z12.discriminatedUnion("type", [
|
|
2572
|
-
z12.object({
|
|
2573
|
-
type: z12.literal("message"),
|
|
2574
|
-
role: z12.literal("assistant"),
|
|
2575
|
-
content: z12.array(
|
|
2576
|
-
z12.object({
|
|
2577
|
-
type: z12.literal("output_text"),
|
|
2578
|
-
text: z12.string(),
|
|
2579
|
-
annotations: z12.array(
|
|
2580
|
-
z12.discriminatedUnion("type", [
|
|
2581
|
-
z12.object({
|
|
2582
|
-
type: z12.literal("url_citation"),
|
|
2583
|
-
start_index: z12.number(),
|
|
2584
|
-
end_index: z12.number(),
|
|
2585
|
-
url: z12.string(),
|
|
2586
|
-
title: z12.string()
|
|
2587
|
-
}),
|
|
2588
|
-
z12.object({
|
|
2589
|
-
type: z12.literal("file_citation"),
|
|
2590
|
-
file_id: z12.string(),
|
|
2591
|
-
filename: z12.string().nullish(),
|
|
2592
|
-
index: z12.number().nullish(),
|
|
2593
|
-
start_index: z12.number().nullish(),
|
|
2594
|
-
end_index: z12.number().nullish(),
|
|
2595
|
-
quote: z12.string().nullish()
|
|
2596
|
-
}),
|
|
2597
|
-
z12.object({
|
|
2598
|
-
type: z12.literal("container_file_citation")
|
|
2599
|
-
})
|
|
2600
|
-
])
|
|
2601
|
-
)
|
|
2602
|
-
})
|
|
2603
|
-
)
|
|
2604
|
-
}),
|
|
2605
|
-
z12.object({
|
|
2606
|
-
type: z12.literal("code_interpreter_call")
|
|
2607
|
-
}),
|
|
2608
|
-
z12.object({
|
|
2609
|
-
type: z12.literal("function_call"),
|
|
2610
|
-
call_id: z12.string(),
|
|
2611
|
-
name: z12.string(),
|
|
2612
|
-
arguments: z12.string()
|
|
2613
|
-
}),
|
|
2614
|
-
z12.object({
|
|
2615
|
-
type: z12.literal("web_search_call"),
|
|
2616
|
-
id: z12.string(),
|
|
2617
|
-
status: z12.string().optional(),
|
|
2618
|
-
action: z12.discriminatedUnion("type", [
|
|
2619
|
-
z12.object({
|
|
2620
|
-
type: z12.literal("search"),
|
|
2621
|
-
query: z12.string().nullish()
|
|
2622
|
-
}),
|
|
2623
|
-
z12.object({
|
|
2624
|
-
type: z12.literal("open_page"),
|
|
2625
|
-
url: z12.string()
|
|
2626
|
-
}),
|
|
2627
|
-
z12.object({
|
|
2628
|
-
type: z12.literal("find"),
|
|
2629
|
-
url: z12.string(),
|
|
2630
|
-
pattern: z12.string()
|
|
2631
|
-
})
|
|
2632
|
-
]).nullish()
|
|
2633
|
-
}),
|
|
2634
|
-
z12.object({
|
|
2635
|
-
type: z12.literal("computer_call"),
|
|
2636
|
-
id: z12.string(),
|
|
2637
|
-
status: z12.string().optional()
|
|
2638
|
-
}),
|
|
2639
|
-
z12.object({
|
|
2640
|
-
type: z12.literal("file_search_call"),
|
|
2641
|
-
id: z12.string(),
|
|
2642
|
-
status: z12.string().optional(),
|
|
2643
|
-
queries: z12.array(z12.string()).nullish(),
|
|
2644
|
-
results: z12.array(
|
|
2645
|
-
z12.object({
|
|
2646
|
-
attributes: z12.object({
|
|
2647
|
-
file_id: z12.string(),
|
|
2648
|
-
filename: z12.string(),
|
|
2649
|
-
score: z12.number(),
|
|
2650
|
-
text: z12.string()
|
|
2651
|
-
})
|
|
2652
|
-
})
|
|
2653
|
-
).nullish()
|
|
2654
|
-
}),
|
|
2655
|
-
z12.object({
|
|
2656
|
-
type: z12.literal("reasoning"),
|
|
2657
|
-
summary: z12.array(
|
|
2658
|
-
z12.object({
|
|
2659
|
-
type: z12.literal("summary_text"),
|
|
2660
|
-
text: z12.string()
|
|
2661
|
-
})
|
|
2662
|
-
)
|
|
2663
|
-
})
|
|
2664
|
-
])
|
|
2665
|
-
),
|
|
2666
|
-
incomplete_details: z12.object({ reason: z12.string() }).nullable(),
|
|
2667
|
-
usage: usageSchema
|
|
2668
|
-
})
|
|
2669
|
-
),
|
|
2670
|
-
abortSignal: options.abortSignal,
|
|
2671
|
-
fetch: this.config.fetch
|
|
2672
|
-
});
|
|
2673
|
-
const outputTextElements = response.output.filter((output) => output.type === "message").flatMap((output) => output.content).filter((content) => content.type === "output_text");
|
|
2674
|
-
const toolCalls = [];
|
|
2675
|
-
for (const output of response.output) {
|
|
2676
|
-
if (output.type === "function_call") {
|
|
2677
|
-
toolCalls.push({
|
|
2678
|
-
toolCallType: "function",
|
|
2679
|
-
toolCallId: output.call_id,
|
|
2680
|
-
toolName: output.name,
|
|
2681
|
-
args: output.arguments
|
|
2682
|
-
});
|
|
2683
|
-
} else if (output.type === "web_search_call") {
|
|
2684
|
-
toolCalls.push({
|
|
2685
|
-
toolCallType: "function",
|
|
2686
|
-
toolCallId: output.id,
|
|
2687
|
-
toolName: "web_search_preview",
|
|
2688
|
-
args: JSON.stringify({ action: output.action })
|
|
2689
|
-
});
|
|
2690
|
-
} else if (output.type === "computer_call") {
|
|
2691
|
-
toolCalls.push({
|
|
2692
|
-
toolCallType: "function",
|
|
2693
|
-
toolCallId: output.id,
|
|
2694
|
-
toolName: "computer_use",
|
|
2695
|
-
args: ""
|
|
2696
|
-
});
|
|
2697
|
-
} else if (output.type === "file_search_call") {
|
|
2698
|
-
toolCalls.push({
|
|
2699
|
-
toolCallType: "function",
|
|
2700
|
-
toolCallId: output.id,
|
|
2701
|
-
toolName: "file_search",
|
|
2702
|
-
args: ""
|
|
2703
|
-
});
|
|
2704
|
-
}
|
|
2705
|
-
}
|
|
2706
|
-
const reasoningSummary = (_b = (_a15 = response.output.find((item) => item.type === "reasoning")) == null ? void 0 : _a15.summary) != null ? _b : null;
|
|
2707
|
-
const allAnnotations = outputTextElements.flatMap((content) => content.annotations);
|
|
2708
|
-
return {
|
|
2709
|
-
text: outputTextElements.map((content) => content.text).join("\n"),
|
|
2710
|
-
sources: outputTextElements.flatMap(
|
|
2711
|
-
(content) => content.annotations.map((annotation) => {
|
|
2712
|
-
var _a16, _b2, _c2, _d2, _e2, _f2, _g2, _h, _i;
|
|
2713
|
-
if (annotation.type === "url_citation") {
|
|
2714
|
-
return {
|
|
2715
|
-
sourceType: "url",
|
|
2716
|
-
id: (_c2 = (_b2 = (_a16 = this.config).generateId) == null ? void 0 : _b2.call(_a16)) != null ? _c2 : generateId2(),
|
|
2717
|
-
url: annotation.url,
|
|
2718
|
-
title: annotation.title
|
|
2719
|
-
};
|
|
2720
|
-
} else if (annotation.type === "file_citation") {
|
|
2721
|
-
return {
|
|
2722
|
-
sourceType: "url",
|
|
2723
|
-
id: (_f2 = (_e2 = (_d2 = this.config).generateId) == null ? void 0 : _e2.call(_d2)) != null ? _f2 : generateId2(),
|
|
2724
|
-
url: `file://${annotation.file_id}`,
|
|
2725
|
-
title: annotation.quote || annotation.filename || "Document"
|
|
2726
|
-
};
|
|
2727
|
-
} else {
|
|
2728
|
-
return {
|
|
2729
|
-
sourceType: "url",
|
|
2730
|
-
id: (_i = (_h = (_g2 = this.config).generateId) == null ? void 0 : _h.call(_g2)) != null ? _i : generateId2(),
|
|
2731
|
-
url: "",
|
|
2732
|
-
title: "Unknown Source"
|
|
2733
|
-
};
|
|
2734
|
-
}
|
|
2735
|
-
})
|
|
2736
|
-
),
|
|
2737
|
-
finishReason: mapOpenAIResponseFinishReason({
|
|
2738
|
-
finishReason: (_c = response.incomplete_details) == null ? void 0 : _c.reason,
|
|
2739
|
-
hasToolCalls: toolCalls.length > 0
|
|
2740
|
-
}),
|
|
2741
|
-
toolCalls: toolCalls.length > 0 ? toolCalls : void 0,
|
|
2742
|
-
reasoning: reasoningSummary ? reasoningSummary.map((summary) => ({
|
|
2743
|
-
type: "text",
|
|
2744
|
-
text: summary.text
|
|
2745
|
-
})) : void 0,
|
|
2746
|
-
usage: {
|
|
2747
|
-
promptTokens: response.usage.input_tokens,
|
|
2748
|
-
completionTokens: response.usage.output_tokens
|
|
2749
|
-
},
|
|
2750
|
-
rawCall: {
|
|
2751
|
-
rawPrompt: void 0,
|
|
2752
|
-
rawSettings: {}
|
|
2753
|
-
},
|
|
2754
|
-
rawResponse: {
|
|
2755
|
-
headers: responseHeaders,
|
|
2756
|
-
body: rawResponse
|
|
2757
|
-
},
|
|
2758
|
-
request: {
|
|
2759
|
-
body: JSON.stringify(body)
|
|
2760
|
-
},
|
|
2761
|
-
response: {
|
|
2762
|
-
id: response.id,
|
|
2763
|
-
timestamp: new Date(response.created_at * 1e3),
|
|
2764
|
-
modelId: response.model
|
|
2765
|
-
},
|
|
2766
|
-
providerMetadata: {
|
|
2767
|
-
openai: {
|
|
2768
|
-
responseId: response.id,
|
|
2769
|
-
cachedPromptTokens: (_e = (_d = response.usage.input_tokens_details) == null ? void 0 : _d.cached_tokens) != null ? _e : null,
|
|
2770
|
-
reasoningTokens: (_g = (_f = response.usage.output_tokens_details) == null ? void 0 : _f.reasoning_tokens) != null ? _g : null
|
|
2771
|
-
}
|
|
2772
|
-
},
|
|
2773
|
-
warnings
|
|
2774
|
-
};
|
|
2775
|
-
}
|
|
2776
|
-
async doStream(options) {
|
|
2777
|
-
const { args: body, warnings } = this.getArgs(options);
|
|
2778
|
-
let response;
|
|
2779
|
-
let responseHeaders;
|
|
2780
|
-
try {
|
|
2781
|
-
const result = await postJsonToApi6({
|
|
2782
|
-
url: this.config.url({
|
|
2783
|
-
path: "/responses",
|
|
2784
|
-
modelId: this.modelId
|
|
2785
|
-
}),
|
|
2786
|
-
headers: combineHeaders7(this.config.headers(), options.headers),
|
|
2787
|
-
body: {
|
|
2788
|
-
...body,
|
|
2789
|
-
stream: true
|
|
2790
|
-
},
|
|
2791
|
-
failedResponseHandler: openaiFailedResponseHandler,
|
|
2792
|
-
successfulResponseHandler: createEventSourceResponseHandler3(
|
|
2793
|
-
openaiResponsesChunkSchema
|
|
2794
|
-
),
|
|
2795
|
-
abortSignal: options.abortSignal,
|
|
2796
|
-
fetch: this.config.fetch
|
|
2797
|
-
});
|
|
2798
|
-
response = result.value;
|
|
2799
|
-
responseHeaders = result.responseHeaders;
|
|
2800
|
-
} catch (error) {
|
|
2801
|
-
console.error("\u274C API request failed:", error);
|
|
2802
|
-
throw error;
|
|
2803
|
-
}
|
|
2804
|
-
const self = this;
|
|
2805
|
-
let finishReason = "unknown";
|
|
2806
|
-
let promptTokens = NaN;
|
|
2807
|
-
let completionTokens = NaN;
|
|
2808
|
-
let cachedPromptTokens = null;
|
|
2809
|
-
let reasoningTokens = null;
|
|
2810
|
-
let responseId = null;
|
|
2811
|
-
const ongoingToolCalls = {};
|
|
2812
|
-
let hasToolCalls = false;
|
|
2813
|
-
return {
|
|
2814
|
-
stream: response.pipeThrough(
|
|
2815
|
-
new TransformStream({
|
|
2816
|
-
transform(chunk, controller) {
|
|
2817
|
-
var _a15, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k;
|
|
2818
|
-
try {
|
|
2819
|
-
if (!chunk.success) {
|
|
2820
|
-
console.error("\u274C Chunk parsing failed:", chunk.error);
|
|
2821
|
-
finishReason = "error";
|
|
2822
|
-
controller.enqueue({ type: "error", error: chunk.error });
|
|
2823
|
-
return;
|
|
2824
|
-
}
|
|
2825
|
-
const value = chunk.value;
|
|
2826
|
-
if (isResponseOutputItemAddedChunk(value)) {
|
|
2827
|
-
if (value.item.type === "function_call") {
|
|
2828
|
-
ongoingToolCalls[value.output_index] = {
|
|
2829
|
-
toolName: value.item.name,
|
|
2830
|
-
toolCallId: value.item.call_id
|
|
2831
|
-
};
|
|
2832
|
-
controller.enqueue({
|
|
2833
|
-
type: "tool-call-delta",
|
|
2834
|
-
toolCallType: "function",
|
|
2835
|
-
toolCallId: value.item.call_id,
|
|
2836
|
-
toolName: value.item.name,
|
|
2837
|
-
argsTextDelta: value.item.arguments
|
|
2838
|
-
});
|
|
2839
|
-
} else if (value.item.type === "web_search_call") {
|
|
2840
|
-
ongoingToolCalls[value.output_index] = {
|
|
2841
|
-
toolName: "web_search_preview",
|
|
2842
|
-
toolCallId: value.item.id
|
|
2843
|
-
};
|
|
2844
|
-
controller.enqueue({
|
|
2845
|
-
type: "tool-call-delta",
|
|
2846
|
-
toolCallType: "function",
|
|
2847
|
-
toolCallId: value.item.id,
|
|
2848
|
-
toolName: "web_search_preview",
|
|
2849
|
-
argsTextDelta: JSON.stringify({ action: value.item.action })
|
|
2850
|
-
});
|
|
2851
|
-
} else if (value.item.type === "computer_call") {
|
|
2852
|
-
ongoingToolCalls[value.output_index] = {
|
|
2853
|
-
toolName: "computer_use",
|
|
2854
|
-
toolCallId: value.item.id
|
|
2855
|
-
};
|
|
2856
|
-
controller.enqueue({
|
|
2857
|
-
type: "tool-call-delta",
|
|
2858
|
-
toolCallType: "function",
|
|
2859
|
-
toolCallId: value.item.id,
|
|
2860
|
-
toolName: "computer_use",
|
|
2861
|
-
argsTextDelta: ""
|
|
2862
|
-
});
|
|
2863
|
-
} else if (value.item.type === "file_search_call") {
|
|
2864
|
-
ongoingToolCalls[value.output_index] = {
|
|
2865
|
-
toolName: "file_search",
|
|
2866
|
-
toolCallId: value.item.id
|
|
2867
|
-
};
|
|
2868
|
-
controller.enqueue({
|
|
2869
|
-
type: "tool-call-delta",
|
|
2870
|
-
toolCallType: "function",
|
|
2871
|
-
toolCallId: value.item.id,
|
|
2872
|
-
toolName: "file_search",
|
|
2873
|
-
argsTextDelta: ""
|
|
2874
|
-
});
|
|
2875
|
-
}
|
|
2876
|
-
} else if (isResponseFunctionCallArgumentsDeltaChunk(value)) {
|
|
2877
|
-
console.log("\u{1F527} Function call arguments delta:", JSON.stringify(value, null, 2));
|
|
2878
|
-
const toolCall = ongoingToolCalls[value.output_index];
|
|
2879
|
-
if (toolCall != null) {
|
|
2880
|
-
controller.enqueue({
|
|
2881
|
-
type: "tool-call-delta",
|
|
2882
|
-
toolCallType: "function",
|
|
2883
|
-
toolCallId: toolCall.toolCallId,
|
|
2884
|
-
toolName: toolCall.toolName,
|
|
2885
|
-
argsTextDelta: value.delta
|
|
2886
|
-
});
|
|
2887
|
-
}
|
|
2888
|
-
} else if (isResponseCreatedChunk(value)) {
|
|
2889
|
-
responseId = value.response.id;
|
|
2890
|
-
controller.enqueue({
|
|
2891
|
-
type: "response-metadata",
|
|
2892
|
-
id: value.response.id,
|
|
2893
|
-
timestamp: new Date(value.response.created_at * 1e3),
|
|
2894
|
-
modelId: value.response.model
|
|
2895
|
-
});
|
|
2896
|
-
} else if (isTextDeltaChunk(value)) {
|
|
2897
|
-
controller.enqueue({
|
|
2898
|
-
type: "text-delta",
|
|
2899
|
-
textDelta: value.delta
|
|
2900
|
-
});
|
|
2901
|
-
} else if (isResponseReasoningSummaryTextDeltaChunk(value)) {
|
|
2902
|
-
controller.enqueue({
|
|
2903
|
-
type: "reasoning",
|
|
2904
|
-
textDelta: value.delta
|
|
2905
|
-
});
|
|
2906
|
-
} else if (isResponseOutputItemDoneChunk(value)) {
|
|
2907
|
-
if (value.item.type === "function_call") {
|
|
2908
|
-
ongoingToolCalls[value.output_index] = void 0;
|
|
2909
|
-
hasToolCalls = true;
|
|
2910
|
-
controller.enqueue({
|
|
2911
|
-
type: "tool-call",
|
|
2912
|
-
toolCallType: "function",
|
|
2913
|
-
toolCallId: value.item.call_id,
|
|
2914
|
-
toolName: value.item.name,
|
|
2915
|
-
args: value.item.arguments
|
|
2916
|
-
});
|
|
2917
|
-
} else if (value.item.type === "web_search_call") {
|
|
2918
|
-
ongoingToolCalls[value.output_index] = void 0;
|
|
2919
|
-
hasToolCalls = true;
|
|
2920
|
-
controller.enqueue({
|
|
2921
|
-
type: "tool-call",
|
|
2922
|
-
toolCallType: "function",
|
|
2923
|
-
toolCallId: value.item.id,
|
|
2924
|
-
toolName: "web_search_preview",
|
|
2925
|
-
args: JSON.stringify({ action: value.item.action })
|
|
2926
|
-
});
|
|
2927
|
-
} else if (value.item.type === "computer_call") {
|
|
2928
|
-
ongoingToolCalls[value.output_index] = void 0;
|
|
2929
|
-
hasToolCalls = true;
|
|
2930
|
-
controller.enqueue({
|
|
2931
|
-
type: "tool-call",
|
|
2932
|
-
toolCallType: "function",
|
|
2933
|
-
toolCallId: value.item.id,
|
|
2934
|
-
toolName: "computer_use",
|
|
2935
|
-
args: ""
|
|
2936
|
-
});
|
|
2937
|
-
} else if (value.item.type === "file_search_call") {
|
|
2938
|
-
ongoingToolCalls[value.output_index] = void 0;
|
|
2939
|
-
hasToolCalls = true;
|
|
2940
|
-
controller.enqueue({
|
|
2941
|
-
type: "tool-call",
|
|
2942
|
-
toolCallType: "function",
|
|
2943
|
-
toolCallId: value.item.id,
|
|
2944
|
-
toolName: "file_search",
|
|
2945
|
-
args: JSON.stringify({
|
|
2946
|
-
queries: value.item.queries,
|
|
2947
|
-
results: value.item.results
|
|
2948
|
-
})
|
|
2949
|
-
});
|
|
2950
|
-
}
|
|
2951
|
-
} else if (isResponseFinishedChunk(value)) {
|
|
2952
|
-
finishReason = mapOpenAIResponseFinishReason({
|
|
2953
|
-
finishReason: (_a15 = value.response.incomplete_details) == null ? void 0 : _a15.reason,
|
|
2954
|
-
hasToolCalls
|
|
2955
|
-
});
|
|
2956
|
-
promptTokens = value.response.usage.input_tokens;
|
|
2957
|
-
completionTokens = value.response.usage.output_tokens;
|
|
2958
|
-
cachedPromptTokens = (_c = (_b = value.response.usage.input_tokens_details) == null ? void 0 : _b.cached_tokens) != null ? _c : cachedPromptTokens;
|
|
2959
|
-
reasoningTokens = (_e = (_d = value.response.usage.output_tokens_details) == null ? void 0 : _d.reasoning_tokens) != null ? _e : reasoningTokens;
|
|
2960
|
-
} else if (isResponseAnnotationAddedChunk(value)) {
|
|
2961
|
-
try {
|
|
2962
|
-
if (value.annotation.type === "url_citation") {
|
|
2963
|
-
const urlSource = {
|
|
2964
|
-
sourceType: "url",
|
|
2965
|
-
id: (_h = (_g = (_f = self.config).generateId) == null ? void 0 : _g.call(_f)) != null ? _h : generateId2(),
|
|
2966
|
-
url: String(value.annotation.url || ""),
|
|
2967
|
-
title: String(value.annotation.title || "Web Source")
|
|
2968
|
-
};
|
|
2969
|
-
controller.enqueue({
|
|
2970
|
-
type: "source",
|
|
2971
|
-
source: urlSource
|
|
2972
|
-
});
|
|
2973
|
-
} else if (value.annotation.type === "file_citation") {
|
|
2974
|
-
const urlSource = {
|
|
2975
|
-
sourceType: "url",
|
|
2976
|
-
id: (_k = (_j = (_i = self.config).generateId) == null ? void 0 : _j.call(_i)) != null ? _k : generateId2(),
|
|
2977
|
-
url: `file://${value.annotation.file_id}`,
|
|
2978
|
-
title: String(value.annotation.quote || value.annotation.filename || "Document")
|
|
2979
|
-
};
|
|
2980
|
-
controller.enqueue({
|
|
2981
|
-
type: "source",
|
|
2982
|
-
source: urlSource
|
|
2983
|
-
});
|
|
2984
|
-
}
|
|
2985
|
-
} catch (error) {
|
|
2986
|
-
console.error("\u274C Error processing annotation:", error);
|
|
2987
|
-
}
|
|
2988
|
-
} else if (isResponseInProgressChunk(value)) {
|
|
2989
|
-
} else if (isResponseContentPartAddedChunk(value)) {
|
|
2990
|
-
} else if (isResponseOutputTextDoneChunk(value)) {
|
|
2991
|
-
} else if (isResponseContentPartDoneChunk(value)) {
|
|
2992
|
-
} else if (isResponseFileSearchCallInProgressChunk(value)) {
|
|
2993
|
-
} else if (isResponseFileSearchCallSearchingChunk(value)) {
|
|
2994
|
-
} else if (isResponseFileSearchCallCompletedChunk(value)) {
|
|
2995
|
-
} else if (isResponseFileSearchCallQueryAddedChunk(value)) {
|
|
2996
|
-
} else if (isResponseFileSearchCallResultAddedChunk(value)) {
|
|
2997
|
-
} else {
|
|
2998
|
-
console.log("\u2753 Unhandled chunk type:", value.type);
|
|
2999
|
-
}
|
|
3000
|
-
} catch (error) {
|
|
3001
|
-
console.error("\u{1F4A5} FATAL ERROR in chunk processing:", {
|
|
3002
|
-
error: error instanceof Error ? error.message : String(error),
|
|
3003
|
-
stack: error instanceof Error ? error.stack : void 0
|
|
3004
|
-
});
|
|
3005
|
-
finishReason = "error";
|
|
3006
|
-
controller.enqueue({
|
|
3007
|
-
type: "error",
|
|
3008
|
-
error: error instanceof Error ? error : new Error(String(error))
|
|
3009
|
-
});
|
|
3010
|
-
}
|
|
3011
|
-
},
|
|
3012
|
-
flush(controller) {
|
|
3013
|
-
controller.enqueue({
|
|
3014
|
-
type: "finish",
|
|
3015
|
-
finishReason,
|
|
3016
|
-
usage: { promptTokens, completionTokens },
|
|
3017
|
-
...(cachedPromptTokens != null || reasoningTokens != null) && {
|
|
3018
|
-
providerMetadata: {
|
|
3019
|
-
openai: {
|
|
3020
|
-
responseId,
|
|
3021
|
-
cachedPromptTokens,
|
|
3022
|
-
reasoningTokens
|
|
3023
|
-
}
|
|
3024
|
-
}
|
|
3025
|
-
}
|
|
3026
|
-
});
|
|
3027
|
-
}
|
|
3028
|
-
})
|
|
3029
|
-
),
|
|
3030
|
-
rawCall: {
|
|
3031
|
-
rawPrompt: void 0,
|
|
3032
|
-
rawSettings: {}
|
|
3033
|
-
},
|
|
3034
|
-
rawResponse: { headers: responseHeaders },
|
|
3035
|
-
request: { body: JSON.stringify(body) },
|
|
3036
|
-
warnings
|
|
3037
|
-
};
|
|
3038
|
-
}
|
|
3039
|
-
};
|
|
3040
|
-
var usageSchema = z12.object({
|
|
3041
|
-
input_tokens: z12.number(),
|
|
3042
|
-
input_tokens_details: z12.object({ cached_tokens: z12.number().nullish() }).nullish(),
|
|
3043
|
-
output_tokens: z12.number(),
|
|
3044
|
-
output_tokens_details: z12.object({ reasoning_tokens: z12.number().nullish() }).nullish()
|
|
3045
|
-
});
|
|
3046
|
-
var textDeltaChunkSchema = z12.object({
|
|
3047
|
-
type: z12.literal("response.output_text.delta"),
|
|
3048
|
-
delta: z12.string()
|
|
3049
|
-
});
|
|
3050
|
-
var responseFinishedChunkSchema = z12.object({
|
|
3051
|
-
type: z12.enum(["response.completed", "response.incomplete"]),
|
|
3052
|
-
response: z12.object({
|
|
3053
|
-
incomplete_details: z12.object({ reason: z12.string() }).nullish(),
|
|
3054
|
-
usage: usageSchema
|
|
3055
|
-
})
|
|
3056
|
-
});
|
|
3057
|
-
var responseCreatedChunkSchema = z12.object({
|
|
3058
|
-
type: z12.literal("response.created"),
|
|
3059
|
-
response: z12.object({
|
|
3060
|
-
id: z12.string(),
|
|
3061
|
-
created_at: z12.number(),
|
|
3062
|
-
model: z12.string()
|
|
3063
|
-
})
|
|
3064
|
-
});
|
|
3065
|
-
var responseOutputItemDoneSchema = z12.object({
|
|
3066
|
-
type: z12.literal("response.output_item.done"),
|
|
3067
|
-
output_index: z12.number(),
|
|
3068
|
-
item: z12.discriminatedUnion("type", [
|
|
3069
|
-
z12.object({
|
|
3070
|
-
type: z12.literal("message")
|
|
3071
|
-
}),
|
|
3072
|
-
z12.object({
|
|
3073
|
-
type: z12.literal("function_call"),
|
|
3074
|
-
id: z12.string(),
|
|
3075
|
-
call_id: z12.string(),
|
|
3076
|
-
name: z12.string(),
|
|
3077
|
-
arguments: z12.string(),
|
|
3078
|
-
status: z12.literal("completed")
|
|
3079
|
-
}),
|
|
3080
|
-
z12.object({
|
|
3081
|
-
type: z12.literal("web_search_call"),
|
|
3082
|
-
id: z12.string(),
|
|
3083
|
-
status: z12.string(),
|
|
3084
|
-
action: z12.discriminatedUnion("type", [
|
|
3085
|
-
z12.object({
|
|
3086
|
-
type: z12.literal("search"),
|
|
3087
|
-
query: z12.string().nullish()
|
|
3088
|
-
}),
|
|
3089
|
-
z12.object({
|
|
3090
|
-
type: z12.literal("open_page"),
|
|
3091
|
-
url: z12.string()
|
|
3092
|
-
}),
|
|
3093
|
-
z12.object({
|
|
3094
|
-
type: z12.literal("find"),
|
|
3095
|
-
url: z12.string(),
|
|
3096
|
-
pattern: z12.string()
|
|
3097
|
-
})
|
|
3098
|
-
]).nullish()
|
|
3099
|
-
}),
|
|
3100
|
-
z12.object({
|
|
3101
|
-
type: z12.literal("code_interpreter_call")
|
|
3102
|
-
}),
|
|
3103
|
-
z12.object({
|
|
3104
|
-
type: z12.literal("computer_call"),
|
|
3105
|
-
id: z12.string(),
|
|
3106
|
-
status: z12.literal("completed")
|
|
3107
|
-
}),
|
|
3108
|
-
z12.object({
|
|
3109
|
-
type: z12.literal("file_search_call"),
|
|
3110
|
-
id: z12.string(),
|
|
3111
|
-
status: z12.literal("completed"),
|
|
3112
|
-
queries: z12.array(z12.string()).nullish(),
|
|
3113
|
-
results: z12.array(
|
|
3114
|
-
z12.object({
|
|
3115
|
-
attributes: z12.object({
|
|
3116
|
-
file_id: z12.string(),
|
|
3117
|
-
filename: z12.string(),
|
|
3118
|
-
score: z12.number(),
|
|
3119
|
-
text: z12.string()
|
|
3120
|
-
})
|
|
3121
|
-
})
|
|
3122
|
-
).nullish()
|
|
3123
|
-
})
|
|
3124
|
-
])
|
|
3125
|
-
});
|
|
3126
|
-
var responseFunctionCallArgumentsDeltaSchema = z12.object({
|
|
3127
|
-
type: z12.literal("response.function_call_arguments.delta"),
|
|
3128
|
-
item_id: z12.string(),
|
|
3129
|
-
output_index: z12.number(),
|
|
3130
|
-
delta: z12.string()
|
|
3131
|
-
});
|
|
3132
|
-
var responseOutputItemAddedSchema = z12.object({
|
|
3133
|
-
type: z12.literal("response.output_item.added"),
|
|
3134
|
-
output_index: z12.number(),
|
|
3135
|
-
item: z12.discriminatedUnion("type", [
|
|
3136
|
-
z12.object({
|
|
3137
|
-
type: z12.literal("message")
|
|
3138
|
-
}),
|
|
3139
|
-
z12.object({
|
|
3140
|
-
type: z12.literal("function_call"),
|
|
3141
|
-
id: z12.string(),
|
|
3142
|
-
call_id: z12.string(),
|
|
3143
|
-
name: z12.string(),
|
|
3144
|
-
arguments: z12.string()
|
|
3145
|
-
}),
|
|
3146
|
-
z12.object({
|
|
3147
|
-
type: z12.literal("web_search_call"),
|
|
3148
|
-
id: z12.string(),
|
|
3149
|
-
status: z12.string(),
|
|
3150
|
-
action: z12.object({
|
|
3151
|
-
type: z12.literal("search"),
|
|
3152
|
-
query: z12.string().optional()
|
|
3153
|
-
}).nullish()
|
|
3154
|
-
}),
|
|
3155
|
-
z12.object({
|
|
3156
|
-
type: z12.literal("computer_call"),
|
|
3157
|
-
id: z12.string(),
|
|
3158
|
-
status: z12.string()
|
|
3159
|
-
}),
|
|
3160
|
-
z12.object({
|
|
3161
|
-
type: z12.literal("file_search_call"),
|
|
3162
|
-
id: z12.string(),
|
|
3163
|
-
status: z12.string(),
|
|
3164
|
-
queries: z12.array(z12.string()).nullish(),
|
|
3165
|
-
results: z12.array(
|
|
3166
|
-
z12.object({
|
|
3167
|
-
attributes: z12.object({
|
|
3168
|
-
file_id: z12.string(),
|
|
3169
|
-
filename: z12.string(),
|
|
3170
|
-
score: z12.number(),
|
|
3171
|
-
text: z12.string()
|
|
3172
|
-
})
|
|
3173
|
-
})
|
|
3174
|
-
).optional()
|
|
3175
|
-
})
|
|
3176
|
-
])
|
|
3177
|
-
});
|
|
3178
|
-
var responseAnnotationAddedSchema = z12.object({
|
|
3179
|
-
type: z12.literal("response.output_text.annotation.added"),
|
|
3180
|
-
annotation: z12.discriminatedUnion("type", [
|
|
3181
|
-
z12.object({
|
|
3182
|
-
type: z12.literal("url_citation"),
|
|
3183
|
-
url: z12.string(),
|
|
3184
|
-
title: z12.string()
|
|
3185
|
-
}),
|
|
3186
|
-
z12.object({
|
|
3187
|
-
type: z12.literal("file_citation"),
|
|
3188
|
-
file_id: z12.string(),
|
|
3189
|
-
filename: z12.string().nullish(),
|
|
3190
|
-
index: z12.number().nullish(),
|
|
3191
|
-
start_index: z12.number().nullish(),
|
|
3192
|
-
end_index: z12.number().nullish(),
|
|
3193
|
-
quote: z12.string().nullish()
|
|
3194
|
-
})
|
|
3195
|
-
])
|
|
3196
|
-
});
|
|
3197
|
-
var responseReasoningSummaryTextDeltaSchema = z12.object({
|
|
3198
|
-
type: z12.literal("response.reasoning_summary_text.delta"),
|
|
3199
|
-
item_id: z12.string(),
|
|
3200
|
-
output_index: z12.number(),
|
|
3201
|
-
summary_index: z12.number(),
|
|
3202
|
-
delta: z12.string()
|
|
3203
|
-
});
|
|
3204
|
-
var responseInProgressChunkSchema = z12.object({
|
|
3205
|
-
type: z12.literal("response.in_progress"),
|
|
3206
|
-
sequence_number: z12.number(),
|
|
3207
|
-
response: z12.any()
|
|
3208
|
-
});
|
|
3209
|
-
var responseContentPartAddedChunkSchema = z12.object({
|
|
3210
|
-
type: z12.literal("response.content_part.added"),
|
|
3211
|
-
sequence_number: z12.number(),
|
|
3212
|
-
item_id: z12.string(),
|
|
3213
|
-
output_index: z12.number(),
|
|
3214
|
-
content_index: z12.number(),
|
|
3215
|
-
part: z12.any()
|
|
3216
|
-
});
|
|
3217
|
-
var responseOutputTextDoneChunkSchema = z12.object({
|
|
3218
|
-
type: z12.literal("response.output_text.done"),
|
|
3219
|
-
sequence_number: z12.number(),
|
|
3220
|
-
item_id: z12.string(),
|
|
3221
|
-
output_index: z12.number(),
|
|
3222
|
-
content_index: z12.number(),
|
|
3223
|
-
text: z12.string(),
|
|
3224
|
-
logprobs: z12.array(z12.any()).optional()
|
|
3225
|
-
});
|
|
3226
|
-
var responseContentPartDoneChunkSchema = z12.object({
|
|
3227
|
-
type: z12.literal("response.content_part.done"),
|
|
3228
|
-
sequence_number: z12.number(),
|
|
3229
|
-
item_id: z12.string(),
|
|
3230
|
-
output_index: z12.number(),
|
|
3231
|
-
content_index: z12.number(),
|
|
3232
|
-
part: z12.any()
|
|
3233
|
-
});
|
|
3234
|
-
var responseFileSearchCallInProgressChunkSchema = z12.object({
|
|
3235
|
-
type: z12.literal("response.file_search_call.in_progress"),
|
|
3236
|
-
sequence_number: z12.number(),
|
|
3237
|
-
output_index: z12.number(),
|
|
3238
|
-
item_id: z12.string()
|
|
3239
|
-
});
|
|
3240
|
-
var responseFileSearchCallSearchingChunkSchema = z12.object({
|
|
3241
|
-
type: z12.literal("response.file_search_call.searching"),
|
|
3242
|
-
sequence_number: z12.number(),
|
|
3243
|
-
output_index: z12.number(),
|
|
3244
|
-
item_id: z12.string()
|
|
3245
|
-
});
|
|
3246
|
-
var responseFileSearchCallCompletedChunkSchema = z12.object({
|
|
3247
|
-
type: z12.literal("response.file_search_call.completed"),
|
|
3248
|
-
sequence_number: z12.number(),
|
|
3249
|
-
output_index: z12.number(),
|
|
3250
|
-
item_id: z12.string()
|
|
3251
|
-
});
|
|
3252
|
-
var responseFileSearchCallQueryAddedChunkSchema = z12.object({
|
|
3253
|
-
type: z12.literal("response.file_search_call.query.added"),
|
|
3254
|
-
sequence_number: z12.number(),
|
|
3255
|
-
output_index: z12.number(),
|
|
3256
|
-
item_id: z12.string(),
|
|
3257
|
-
query: z12.string()
|
|
3258
|
-
});
|
|
3259
|
-
var responseFileSearchCallResultAddedChunkSchema = z12.object({
|
|
3260
|
-
type: z12.literal("response.file_search_call.result.added"),
|
|
3261
|
-
sequence_number: z12.number(),
|
|
3262
|
-
output_index: z12.number(),
|
|
3263
|
-
item_id: z12.string(),
|
|
3264
|
-
result: z12.any()
|
|
3265
|
-
});
|
|
3266
|
-
var openaiResponsesChunkSchema = z12.union([
|
|
3267
|
-
textDeltaChunkSchema,
|
|
3268
|
-
responseFinishedChunkSchema,
|
|
3269
|
-
responseCreatedChunkSchema,
|
|
3270
|
-
responseOutputItemDoneSchema,
|
|
3271
|
-
responseFunctionCallArgumentsDeltaSchema,
|
|
3272
|
-
responseOutputItemAddedSchema,
|
|
3273
|
-
responseAnnotationAddedSchema,
|
|
3274
|
-
responseReasoningSummaryTextDeltaSchema,
|
|
3275
|
-
responseInProgressChunkSchema,
|
|
3276
|
-
responseContentPartAddedChunkSchema,
|
|
3277
|
-
responseOutputTextDoneChunkSchema,
|
|
3278
|
-
responseContentPartDoneChunkSchema,
|
|
3279
|
-
responseFileSearchCallInProgressChunkSchema,
|
|
3280
|
-
responseFileSearchCallSearchingChunkSchema,
|
|
3281
|
-
responseFileSearchCallCompletedChunkSchema,
|
|
3282
|
-
responseFileSearchCallQueryAddedChunkSchema,
|
|
3283
|
-
responseFileSearchCallResultAddedChunkSchema,
|
|
3284
|
-
z12.object({ type: z12.string() }).passthrough()
|
|
3285
|
-
// fallback for unknown chunks
|
|
3286
|
-
]);
|
|
3287
|
-
function isTextDeltaChunk(chunk) {
|
|
3288
|
-
return chunk.type === "response.output_text.delta";
|
|
3289
|
-
}
|
|
3290
|
-
function isResponseOutputItemDoneChunk(chunk) {
|
|
3291
|
-
return chunk.type === "response.output_item.done";
|
|
3292
|
-
}
|
|
3293
|
-
function isResponseFinishedChunk(chunk) {
|
|
3294
|
-
return chunk.type === "response.completed" || chunk.type === "response.incomplete";
|
|
3295
|
-
}
|
|
3296
|
-
function isResponseCreatedChunk(chunk) {
|
|
3297
|
-
return chunk.type === "response.created";
|
|
3298
|
-
}
|
|
3299
|
-
function isResponseFunctionCallArgumentsDeltaChunk(chunk) {
|
|
3300
|
-
return chunk.type === "response.function_call_arguments.delta";
|
|
3301
|
-
}
|
|
3302
|
-
function isResponseOutputItemAddedChunk(chunk) {
|
|
3303
|
-
return chunk.type === "response.output_item.added";
|
|
3304
|
-
}
|
|
3305
|
-
function isResponseAnnotationAddedChunk(chunk) {
|
|
3306
|
-
return chunk.type === "response.output_text.annotation.added";
|
|
3307
|
-
}
|
|
3308
|
-
function isResponseReasoningSummaryTextDeltaChunk(chunk) {
|
|
3309
|
-
return chunk.type === "response.reasoning_summary_text.delta";
|
|
3310
|
-
}
|
|
3311
|
-
function isResponseInProgressChunk(chunk) {
|
|
3312
|
-
return chunk.type === "response.in_progress";
|
|
3313
|
-
}
|
|
3314
|
-
function isResponseContentPartAddedChunk(chunk) {
|
|
3315
|
-
return chunk.type === "response.content_part.added";
|
|
3316
|
-
}
|
|
3317
|
-
function isResponseOutputTextDoneChunk(chunk) {
|
|
3318
|
-
return chunk.type === "response.output_text.done";
|
|
3319
|
-
}
|
|
3320
|
-
function isResponseContentPartDoneChunk(chunk) {
|
|
3321
|
-
return chunk.type === "response.content_part.done";
|
|
3322
|
-
}
|
|
3323
|
-
function isResponseFileSearchCallInProgressChunk(chunk) {
|
|
3324
|
-
return chunk.type === "response.file_search_call.in_progress";
|
|
3325
|
-
}
|
|
3326
|
-
function isResponseFileSearchCallSearchingChunk(chunk) {
|
|
3327
|
-
return chunk.type === "response.file_search_call.searching";
|
|
3328
|
-
}
|
|
3329
|
-
function isResponseFileSearchCallCompletedChunk(chunk) {
|
|
3330
|
-
return chunk.type === "response.file_search_call.completed";
|
|
3331
|
-
}
|
|
3332
|
-
function isResponseFileSearchCallQueryAddedChunk(chunk) {
|
|
3333
|
-
return chunk.type === "response.file_search_call.query.added";
|
|
3334
|
-
}
|
|
3335
|
-
function isResponseFileSearchCallResultAddedChunk(chunk) {
|
|
3336
|
-
return chunk.type === "response.file_search_call.result.added";
|
|
3337
|
-
}
|
|
3338
|
-
function getResponsesModelConfig(modelId) {
|
|
3339
|
-
if (modelId.startsWith("o")) {
|
|
3340
|
-
if (modelId.startsWith("o1-mini") || modelId.startsWith("o1-preview")) {
|
|
3341
|
-
return {
|
|
3342
|
-
isReasoningModel: true,
|
|
3343
|
-
systemMessageMode: "remove",
|
|
3344
|
-
requiredAutoTruncation: false
|
|
3345
|
-
};
|
|
3346
|
-
}
|
|
3347
|
-
return {
|
|
3348
|
-
isReasoningModel: true,
|
|
3349
|
-
systemMessageMode: "developer",
|
|
3350
|
-
requiredAutoTruncation: false
|
|
3351
|
-
};
|
|
3352
|
-
}
|
|
3353
|
-
return {
|
|
3354
|
-
isReasoningModel: false,
|
|
3355
|
-
systemMessageMode: "system",
|
|
3356
|
-
requiredAutoTruncation: false
|
|
3357
|
-
};
|
|
3358
|
-
}
|
|
3359
|
-
var openaiResponsesProviderOptionsSchema = z12.object({
|
|
3360
|
-
metadata: z12.any().nullish(),
|
|
3361
|
-
parallelToolCalls: z12.boolean().nullish(),
|
|
3362
|
-
include: z12.array(
|
|
3363
|
-
z12.enum([
|
|
3364
|
-
"file_search_call.results",
|
|
3365
|
-
"code_interpreter_call.output",
|
|
3366
|
-
"web_search_call.results",
|
|
3367
|
-
"message.input_image.image_url",
|
|
3368
|
-
"computer_call_output.output.image_url",
|
|
3369
|
-
"reasoning.encrypted_content",
|
|
3370
|
-
"message.output_text.logprobs"
|
|
3371
|
-
])
|
|
3372
|
-
).nullish(),
|
|
3373
|
-
previousResponseId: z12.string().nullish(),
|
|
3374
|
-
forceNoTemperature: z12.boolean().nullish(),
|
|
3375
|
-
store: z12.boolean().nullish(),
|
|
3376
|
-
user: z12.string().nullish(),
|
|
3377
|
-
reasoningEffort: z12.string().nullish(),
|
|
3378
|
-
strictSchemas: z12.boolean().nullish(),
|
|
3379
|
-
instructions: z12.string().nullish(),
|
|
3380
|
-
reasoningSummary: z12.string().nullish()
|
|
3381
|
-
});
|
|
3382
|
-
export {
|
|
3383
|
-
OpenAIChatLanguageModel,
|
|
3384
|
-
OpenAICompletionLanguageModel,
|
|
3385
|
-
OpenAIEmbeddingModel,
|
|
3386
|
-
OpenAIImageModel,
|
|
3387
|
-
OpenAIResponsesLanguageModel,
|
|
3388
|
-
OpenAISpeechModel,
|
|
3389
|
-
OpenAITranscriptionModel,
|
|
3390
|
-
hasDefaultResponseFormat,
|
|
3391
|
-
modelMaxImagesPerCall
|
|
3392
|
-
};
|
|
3393
|
-
//# sourceMappingURL=index.mjs.map
|