ai 3.0.21 → 3.0.23
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.mts +42 -1
- package/dist/index.d.ts +42 -1
- package/dist/index.js +104 -177
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +65 -138
- package/dist/index.mjs.map +1 -1
- package/package.json +4 -33
- package/react/dist/index.d.mts +6 -2
- package/react/dist/index.d.ts +6 -2
- package/react/dist/index.js +107 -24
- package/react/dist/index.js.map +1 -1
- package/react/dist/index.mjs +107 -24
- package/react/dist/index.mjs.map +1 -1
- package/rsc/dist/rsc-server.mjs +3 -3
- package/rsc/dist/rsc-server.mjs.map +1 -1
- package/solid/dist/index.d.mts +6 -2
- package/solid/dist/index.d.ts +6 -2
- package/solid/dist/index.js +105 -23
- package/solid/dist/index.js.map +1 -1
- package/solid/dist/index.mjs +105 -23
- package/solid/dist/index.mjs.map +1 -1
- package/svelte/dist/index.d.mts +6 -2
- package/svelte/dist/index.d.ts +6 -2
- package/svelte/dist/index.js +107 -24
- package/svelte/dist/index.js.map +1 -1
- package/svelte/dist/index.mjs +107 -24
- package/svelte/dist/index.mjs.map +1 -1
- package/vue/dist/index.d.mts +6 -2
- package/vue/dist/index.d.ts +6 -2
- package/vue/dist/index.js +105 -23
- package/vue/dist/index.js.map +1 -1
- package/vue/dist/index.mjs +105 -23
- package/vue/dist/index.mjs.map +1 -1
- package/anthropic/dist/index.d.mts +0 -51
- package/anthropic/dist/index.d.ts +0 -51
- package/anthropic/dist/index.js +0 -792
- package/anthropic/dist/index.js.map +0 -1
- package/anthropic/dist/index.mjs +0 -760
- package/anthropic/dist/index.mjs.map +0 -1
- package/google/dist/index.d.mts +0 -47
- package/google/dist/index.d.ts +0 -47
- package/google/dist/index.js +0 -796
- package/google/dist/index.js.map +0 -1
- package/google/dist/index.mjs +0 -764
- package/google/dist/index.mjs.map +0 -1
- package/mistral/dist/index.d.mts +0 -52
- package/mistral/dist/index.d.ts +0 -52
- package/mistral/dist/index.js +0 -763
- package/mistral/dist/index.js.map +0 -1
- package/mistral/dist/index.mjs +0 -731
- package/mistral/dist/index.mjs.map +0 -1
- package/openai/dist/index.d.mts +0 -116
- package/openai/dist/index.d.ts +0 -116
- package/openai/dist/index.js +0 -1143
- package/openai/dist/index.js.map +0 -1
- package/openai/dist/index.mjs +0 -1115
- package/openai/dist/index.mjs.map +0 -1
package/openai/dist/index.js
DELETED
@@ -1,1143 +0,0 @@
|
|
1
|
-
"use strict";
|
2
|
-
var __create = Object.create;
|
3
|
-
var __defProp = Object.defineProperty;
|
4
|
-
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
5
|
-
var __getOwnPropNames = Object.getOwnPropertyNames;
|
6
|
-
var __getProtoOf = Object.getPrototypeOf;
|
7
|
-
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
8
|
-
var __export = (target, all) => {
|
9
|
-
for (var name in all)
|
10
|
-
__defProp(target, name, { get: all[name], enumerable: true });
|
11
|
-
};
|
12
|
-
var __copyProps = (to, from, except, desc) => {
|
13
|
-
if (from && typeof from === "object" || typeof from === "function") {
|
14
|
-
for (let key of __getOwnPropNames(from))
|
15
|
-
if (!__hasOwnProp.call(to, key) && key !== except)
|
16
|
-
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
17
|
-
}
|
18
|
-
return to;
|
19
|
-
};
|
20
|
-
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
21
|
-
// If the importer is in node compatibility mode or this is not an ESM
|
22
|
-
// file that has been converted to a CommonJS file using a Babel-
|
23
|
-
// compatible transform (i.e. "__esModule" has not been set), then set
|
24
|
-
// "default" to the CommonJS "module.exports" for node compatibility.
|
25
|
-
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
26
|
-
mod
|
27
|
-
));
|
28
|
-
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
29
|
-
|
30
|
-
// openai/index.ts
|
31
|
-
var openai_exports = {};
|
32
|
-
__export(openai_exports, {
|
33
|
-
OpenAI: () => OpenAI,
|
34
|
-
openai: () => openai
|
35
|
-
});
|
36
|
-
module.exports = __toCommonJS(openai_exports);
|
37
|
-
|
38
|
-
// spec/util/generate-id.ts
|
39
|
-
var import_non_secure = require("nanoid/non-secure");
|
40
|
-
var generateId = (0, import_non_secure.customAlphabet)(
|
41
|
-
"0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz",
|
42
|
-
7
|
43
|
-
);
|
44
|
-
|
45
|
-
// spec/util/load-api-key.ts
|
46
|
-
var import_provider = require("@ai-sdk/provider");
|
47
|
-
function loadApiKey({
|
48
|
-
apiKey,
|
49
|
-
environmentVariableName,
|
50
|
-
apiKeyParameterName = "apiKey",
|
51
|
-
description
|
52
|
-
}) {
|
53
|
-
if (typeof apiKey === "string") {
|
54
|
-
return apiKey;
|
55
|
-
}
|
56
|
-
if (apiKey != null) {
|
57
|
-
throw new import_provider.LoadAPIKeyError({
|
58
|
-
message: `${description} API key must be a string.`
|
59
|
-
});
|
60
|
-
}
|
61
|
-
if (typeof process === "undefined") {
|
62
|
-
throw new import_provider.LoadAPIKeyError({
|
63
|
-
message: `${description} API key is missing. Pass it using the '${apiKeyParameterName}' parameter. Environment variables is not supported in this environment.`
|
64
|
-
});
|
65
|
-
}
|
66
|
-
apiKey = process.env[environmentVariableName];
|
67
|
-
if (apiKey == null) {
|
68
|
-
throw new import_provider.LoadAPIKeyError({
|
69
|
-
message: `${description} API key is missing. Pass it using the '${apiKeyParameterName}' parameter or the ${environmentVariableName} environment variable.`
|
70
|
-
});
|
71
|
-
}
|
72
|
-
if (typeof apiKey !== "string") {
|
73
|
-
throw new import_provider.LoadAPIKeyError({
|
74
|
-
message: `${description} API key must be a string. The value of the ${environmentVariableName} environment variable is not a string.`
|
75
|
-
});
|
76
|
-
}
|
77
|
-
return apiKey;
|
78
|
-
}
|
79
|
-
|
80
|
-
// spec/util/parse-json.ts
|
81
|
-
var import_provider3 = require("@ai-sdk/provider");
|
82
|
-
var import_secure_json_parse = __toESM(require("secure-json-parse"));
|
83
|
-
|
84
|
-
// spec/util/validate-types.ts
|
85
|
-
var import_provider2 = require("@ai-sdk/provider");
|
86
|
-
function validateTypes({
|
87
|
-
value,
|
88
|
-
schema
|
89
|
-
}) {
|
90
|
-
try {
|
91
|
-
return schema.parse(value);
|
92
|
-
} catch (error) {
|
93
|
-
throw new import_provider2.TypeValidationError({ value, cause: error });
|
94
|
-
}
|
95
|
-
}
|
96
|
-
function safeValidateTypes({
|
97
|
-
value,
|
98
|
-
schema
|
99
|
-
}) {
|
100
|
-
try {
|
101
|
-
const validationResult = schema.safeParse(value);
|
102
|
-
if (validationResult.success) {
|
103
|
-
return {
|
104
|
-
success: true,
|
105
|
-
value: validationResult.data
|
106
|
-
};
|
107
|
-
}
|
108
|
-
return {
|
109
|
-
success: false,
|
110
|
-
error: new import_provider2.TypeValidationError({
|
111
|
-
value,
|
112
|
-
cause: validationResult.error
|
113
|
-
})
|
114
|
-
};
|
115
|
-
} catch (error) {
|
116
|
-
return {
|
117
|
-
success: false,
|
118
|
-
error: import_provider2.TypeValidationError.isTypeValidationError(error) ? error : new import_provider2.TypeValidationError({ value, cause: error })
|
119
|
-
};
|
120
|
-
}
|
121
|
-
}
|
122
|
-
|
123
|
-
// spec/util/parse-json.ts
|
124
|
-
function parseJSON({
|
125
|
-
text,
|
126
|
-
schema
|
127
|
-
}) {
|
128
|
-
try {
|
129
|
-
const value = import_secure_json_parse.default.parse(text);
|
130
|
-
if (schema == null) {
|
131
|
-
return value;
|
132
|
-
}
|
133
|
-
return validateTypes({ value, schema });
|
134
|
-
} catch (error) {
|
135
|
-
if (import_provider3.JSONParseError.isJSONParseError(error) || import_provider3.TypeValidationError.isTypeValidationError(error)) {
|
136
|
-
throw error;
|
137
|
-
}
|
138
|
-
throw new import_provider3.JSONParseError({ text, cause: error });
|
139
|
-
}
|
140
|
-
}
|
141
|
-
function safeParseJSON({
|
142
|
-
text,
|
143
|
-
schema
|
144
|
-
}) {
|
145
|
-
try {
|
146
|
-
const value = import_secure_json_parse.default.parse(text);
|
147
|
-
if (schema == null) {
|
148
|
-
return {
|
149
|
-
success: true,
|
150
|
-
value
|
151
|
-
};
|
152
|
-
}
|
153
|
-
return safeValidateTypes({ value, schema });
|
154
|
-
} catch (error) {
|
155
|
-
return {
|
156
|
-
success: false,
|
157
|
-
error: import_provider3.JSONParseError.isJSONParseError(error) ? error : new import_provider3.JSONParseError({ text, cause: error })
|
158
|
-
};
|
159
|
-
}
|
160
|
-
}
|
161
|
-
function isParseableJson(input) {
|
162
|
-
try {
|
163
|
-
import_secure_json_parse.default.parse(input);
|
164
|
-
return true;
|
165
|
-
} catch (e) {
|
166
|
-
return false;
|
167
|
-
}
|
168
|
-
}
|
169
|
-
|
170
|
-
// spec/util/post-to-api.ts
|
171
|
-
var import_provider4 = require("@ai-sdk/provider");
|
172
|
-
var postJsonToApi = async ({
|
173
|
-
url,
|
174
|
-
headers,
|
175
|
-
body,
|
176
|
-
failedResponseHandler,
|
177
|
-
successfulResponseHandler,
|
178
|
-
abortSignal
|
179
|
-
}) => postToApi({
|
180
|
-
url,
|
181
|
-
headers: {
|
182
|
-
...headers,
|
183
|
-
"Content-Type": "application/json"
|
184
|
-
},
|
185
|
-
body: {
|
186
|
-
content: JSON.stringify(body),
|
187
|
-
values: body
|
188
|
-
},
|
189
|
-
failedResponseHandler,
|
190
|
-
successfulResponseHandler,
|
191
|
-
abortSignal
|
192
|
-
});
|
193
|
-
var postToApi = async ({
|
194
|
-
url,
|
195
|
-
headers = {},
|
196
|
-
body,
|
197
|
-
successfulResponseHandler,
|
198
|
-
failedResponseHandler,
|
199
|
-
abortSignal
|
200
|
-
}) => {
|
201
|
-
try {
|
202
|
-
const definedHeaders = Object.fromEntries(
|
203
|
-
Object.entries(headers).filter(([_key, value]) => value != null)
|
204
|
-
);
|
205
|
-
const response = await fetch(url, {
|
206
|
-
method: "POST",
|
207
|
-
headers: definedHeaders,
|
208
|
-
body: body.content,
|
209
|
-
signal: abortSignal
|
210
|
-
});
|
211
|
-
if (!response.ok) {
|
212
|
-
try {
|
213
|
-
throw await failedResponseHandler({
|
214
|
-
response,
|
215
|
-
url,
|
216
|
-
requestBodyValues: body.values
|
217
|
-
});
|
218
|
-
} catch (error) {
|
219
|
-
if (error instanceof Error) {
|
220
|
-
if (error.name === "AbortError" || import_provider4.APICallError.isAPICallError(error)) {
|
221
|
-
throw error;
|
222
|
-
}
|
223
|
-
}
|
224
|
-
throw new import_provider4.APICallError({
|
225
|
-
message: "Failed to process error response",
|
226
|
-
cause: error,
|
227
|
-
statusCode: response.status,
|
228
|
-
url,
|
229
|
-
requestBodyValues: body.values
|
230
|
-
});
|
231
|
-
}
|
232
|
-
}
|
233
|
-
try {
|
234
|
-
return await successfulResponseHandler({
|
235
|
-
response,
|
236
|
-
url,
|
237
|
-
requestBodyValues: body.values
|
238
|
-
});
|
239
|
-
} catch (error) {
|
240
|
-
if (error instanceof Error) {
|
241
|
-
if (error.name === "AbortError" || import_provider4.APICallError.isAPICallError(error)) {
|
242
|
-
throw error;
|
243
|
-
}
|
244
|
-
}
|
245
|
-
throw new import_provider4.APICallError({
|
246
|
-
message: "Failed to process successful response",
|
247
|
-
cause: error,
|
248
|
-
statusCode: response.status,
|
249
|
-
url,
|
250
|
-
requestBodyValues: body.values
|
251
|
-
});
|
252
|
-
}
|
253
|
-
} catch (error) {
|
254
|
-
if (error instanceof Error) {
|
255
|
-
if (error.name === "AbortError") {
|
256
|
-
throw error;
|
257
|
-
}
|
258
|
-
}
|
259
|
-
if (error instanceof TypeError && error.message === "fetch failed") {
|
260
|
-
const cause = error.cause;
|
261
|
-
if (cause != null) {
|
262
|
-
throw new import_provider4.APICallError({
|
263
|
-
message: `Cannot connect to API: ${cause.message}`,
|
264
|
-
cause,
|
265
|
-
url,
|
266
|
-
requestBodyValues: body.values,
|
267
|
-
isRetryable: true
|
268
|
-
// retry when network error
|
269
|
-
});
|
270
|
-
}
|
271
|
-
}
|
272
|
-
throw error;
|
273
|
-
}
|
274
|
-
};
|
275
|
-
|
276
|
-
// spec/util/response-handler.ts
|
277
|
-
var import_provider5 = require("@ai-sdk/provider");
|
278
|
-
var import_stream = require("eventsource-parser/stream");
|
279
|
-
var createJsonErrorResponseHandler = ({
|
280
|
-
errorSchema,
|
281
|
-
errorToMessage,
|
282
|
-
isRetryable
|
283
|
-
}) => async ({ response, url, requestBodyValues }) => {
|
284
|
-
const responseBody = await response.text();
|
285
|
-
if (responseBody.trim() === "") {
|
286
|
-
return new import_provider5.APICallError({
|
287
|
-
message: response.statusText,
|
288
|
-
url,
|
289
|
-
requestBodyValues,
|
290
|
-
statusCode: response.status,
|
291
|
-
responseBody,
|
292
|
-
isRetryable: isRetryable == null ? void 0 : isRetryable(response)
|
293
|
-
});
|
294
|
-
}
|
295
|
-
try {
|
296
|
-
const parsedError = parseJSON({
|
297
|
-
text: responseBody,
|
298
|
-
schema: errorSchema
|
299
|
-
});
|
300
|
-
return new import_provider5.APICallError({
|
301
|
-
message: errorToMessage(parsedError),
|
302
|
-
url,
|
303
|
-
requestBodyValues,
|
304
|
-
statusCode: response.status,
|
305
|
-
responseBody,
|
306
|
-
data: parsedError,
|
307
|
-
isRetryable: isRetryable == null ? void 0 : isRetryable(response, parsedError)
|
308
|
-
});
|
309
|
-
} catch (parseError) {
|
310
|
-
return new import_provider5.APICallError({
|
311
|
-
message: response.statusText,
|
312
|
-
url,
|
313
|
-
requestBodyValues,
|
314
|
-
statusCode: response.status,
|
315
|
-
responseBody,
|
316
|
-
isRetryable: isRetryable == null ? void 0 : isRetryable(response)
|
317
|
-
});
|
318
|
-
}
|
319
|
-
};
|
320
|
-
var createEventSourceResponseHandler = (chunkSchema) => async ({ response }) => {
|
321
|
-
if (response.body == null) {
|
322
|
-
throw new import_provider5.NoResponseBodyError();
|
323
|
-
}
|
324
|
-
return response.body.pipeThrough(new TextDecoderStream()).pipeThrough(new import_stream.EventSourceParserStream()).pipeThrough(
|
325
|
-
new TransformStream({
|
326
|
-
transform({ data }, controller) {
|
327
|
-
if (data === "[DONE]") {
|
328
|
-
return;
|
329
|
-
}
|
330
|
-
controller.enqueue(
|
331
|
-
safeParseJSON({
|
332
|
-
text: data,
|
333
|
-
schema: chunkSchema
|
334
|
-
})
|
335
|
-
);
|
336
|
-
}
|
337
|
-
})
|
338
|
-
);
|
339
|
-
};
|
340
|
-
var createJsonResponseHandler = (responseSchema) => async ({ response, url, requestBodyValues }) => {
|
341
|
-
const responseBody = await response.text();
|
342
|
-
const parsedResult = safeParseJSON({
|
343
|
-
text: responseBody,
|
344
|
-
schema: responseSchema
|
345
|
-
});
|
346
|
-
if (!parsedResult.success) {
|
347
|
-
throw new import_provider5.APICallError({
|
348
|
-
message: "Invalid JSON response",
|
349
|
-
cause: parsedResult.error,
|
350
|
-
statusCode: response.status,
|
351
|
-
responseBody,
|
352
|
-
url,
|
353
|
-
requestBodyValues
|
354
|
-
});
|
355
|
-
}
|
356
|
-
return parsedResult.value;
|
357
|
-
};
|
358
|
-
|
359
|
-
// spec/util/scale.ts
|
360
|
-
function scale({
|
361
|
-
inputMin = 0,
|
362
|
-
inputMax = 1,
|
363
|
-
outputMin,
|
364
|
-
outputMax,
|
365
|
-
value
|
366
|
-
}) {
|
367
|
-
if (value === void 0) {
|
368
|
-
return void 0;
|
369
|
-
}
|
370
|
-
const inputRange = inputMax - inputMin;
|
371
|
-
const outputRange = outputMax - outputMin;
|
372
|
-
return (value - inputMin) * outputRange / inputRange + outputMin;
|
373
|
-
}
|
374
|
-
|
375
|
-
// spec/util/uint8-utils.ts
|
376
|
-
function convertUint8ArrayToBase64(array) {
|
377
|
-
let latin1string = "";
|
378
|
-
for (let i = 0; i < array.length; i++) {
|
379
|
-
latin1string += String.fromCodePoint(array[i]);
|
380
|
-
}
|
381
|
-
return globalThis.btoa(latin1string);
|
382
|
-
}
|
383
|
-
|
384
|
-
// openai/openai-chat-language-model.ts
|
385
|
-
var import_provider6 = require("@ai-sdk/provider");
|
386
|
-
var import_zod2 = require("zod");
|
387
|
-
|
388
|
-
// openai/convert-to-openai-chat-messages.ts
|
389
|
-
function convertToOpenAIChatMessages(prompt) {
|
390
|
-
const messages = [];
|
391
|
-
for (const { role, content } of prompt) {
|
392
|
-
switch (role) {
|
393
|
-
case "system": {
|
394
|
-
messages.push({ role: "system", content });
|
395
|
-
break;
|
396
|
-
}
|
397
|
-
case "user": {
|
398
|
-
messages.push({
|
399
|
-
role: "user",
|
400
|
-
content: content.map((part) => {
|
401
|
-
var _a;
|
402
|
-
switch (part.type) {
|
403
|
-
case "text": {
|
404
|
-
return { type: "text", text: part.text };
|
405
|
-
}
|
406
|
-
case "image": {
|
407
|
-
return {
|
408
|
-
type: "image_url",
|
409
|
-
image_url: {
|
410
|
-
url: part.image instanceof URL ? part.image.toString() : `data:${(_a = part.mimeType) != null ? _a : "image/jpeg"};base64,${convertUint8ArrayToBase64(part.image)}`
|
411
|
-
}
|
412
|
-
};
|
413
|
-
}
|
414
|
-
}
|
415
|
-
})
|
416
|
-
});
|
417
|
-
break;
|
418
|
-
}
|
419
|
-
case "assistant": {
|
420
|
-
let text = "";
|
421
|
-
const toolCalls = [];
|
422
|
-
for (const part of content) {
|
423
|
-
switch (part.type) {
|
424
|
-
case "text": {
|
425
|
-
text += part.text;
|
426
|
-
break;
|
427
|
-
}
|
428
|
-
case "tool-call": {
|
429
|
-
toolCalls.push({
|
430
|
-
id: part.toolCallId,
|
431
|
-
type: "function",
|
432
|
-
function: {
|
433
|
-
name: part.toolName,
|
434
|
-
arguments: JSON.stringify(part.args)
|
435
|
-
}
|
436
|
-
});
|
437
|
-
break;
|
438
|
-
}
|
439
|
-
default: {
|
440
|
-
const _exhaustiveCheck = part;
|
441
|
-
throw new Error(`Unsupported part: ${_exhaustiveCheck}`);
|
442
|
-
}
|
443
|
-
}
|
444
|
-
}
|
445
|
-
messages.push({
|
446
|
-
role: "assistant",
|
447
|
-
content: text,
|
448
|
-
tool_calls: toolCalls.length > 0 ? toolCalls : void 0
|
449
|
-
});
|
450
|
-
break;
|
451
|
-
}
|
452
|
-
case "tool": {
|
453
|
-
for (const toolResponse of content) {
|
454
|
-
messages.push({
|
455
|
-
role: "tool",
|
456
|
-
tool_call_id: toolResponse.toolCallId,
|
457
|
-
content: JSON.stringify(toolResponse.result)
|
458
|
-
});
|
459
|
-
}
|
460
|
-
break;
|
461
|
-
}
|
462
|
-
default: {
|
463
|
-
const _exhaustiveCheck = role;
|
464
|
-
throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
|
465
|
-
}
|
466
|
-
}
|
467
|
-
}
|
468
|
-
return messages;
|
469
|
-
}
|
470
|
-
|
471
|
-
// openai/map-openai-finish-reason.ts
|
472
|
-
function mapOpenAIFinishReason(finishReason) {
|
473
|
-
switch (finishReason) {
|
474
|
-
case "stop":
|
475
|
-
return "stop";
|
476
|
-
case "length":
|
477
|
-
return "length";
|
478
|
-
case "content_filter":
|
479
|
-
return "content-filter";
|
480
|
-
case "function_call":
|
481
|
-
case "tool_calls":
|
482
|
-
return "tool-calls";
|
483
|
-
default:
|
484
|
-
return "other";
|
485
|
-
}
|
486
|
-
}
|
487
|
-
|
488
|
-
// openai/openai-error.ts
|
489
|
-
var import_zod = require("zod");
|
490
|
-
var openAIErrorDataSchema = import_zod.z.object({
|
491
|
-
error: import_zod.z.object({
|
492
|
-
message: import_zod.z.string(),
|
493
|
-
type: import_zod.z.string(),
|
494
|
-
param: import_zod.z.any().nullable(),
|
495
|
-
code: import_zod.z.string().nullable()
|
496
|
-
})
|
497
|
-
});
|
498
|
-
var openaiFailedResponseHandler = createJsonErrorResponseHandler({
|
499
|
-
errorSchema: openAIErrorDataSchema,
|
500
|
-
errorToMessage: (data) => data.error.message
|
501
|
-
});
|
502
|
-
|
503
|
-
// openai/openai-chat-language-model.ts
|
504
|
-
var OpenAIChatLanguageModel = class {
|
505
|
-
constructor(modelId, settings, config) {
|
506
|
-
this.specificationVersion = "v1";
|
507
|
-
this.defaultObjectGenerationMode = "tool";
|
508
|
-
this.modelId = modelId;
|
509
|
-
this.settings = settings;
|
510
|
-
this.config = config;
|
511
|
-
}
|
512
|
-
get provider() {
|
513
|
-
return this.config.provider;
|
514
|
-
}
|
515
|
-
getArgs({
|
516
|
-
mode,
|
517
|
-
prompt,
|
518
|
-
maxTokens,
|
519
|
-
temperature,
|
520
|
-
topP,
|
521
|
-
frequencyPenalty,
|
522
|
-
presencePenalty,
|
523
|
-
seed
|
524
|
-
}) {
|
525
|
-
var _a;
|
526
|
-
const type = mode.type;
|
527
|
-
const baseArgs = {
|
528
|
-
// model id:
|
529
|
-
model: this.modelId,
|
530
|
-
// model specific settings:
|
531
|
-
logit_bias: this.settings.logitBias,
|
532
|
-
user: this.settings.user,
|
533
|
-
// standardized settings:
|
534
|
-
max_tokens: maxTokens,
|
535
|
-
temperature: scale({
|
536
|
-
value: temperature,
|
537
|
-
outputMin: 0,
|
538
|
-
outputMax: 2
|
539
|
-
}),
|
540
|
-
top_p: topP,
|
541
|
-
frequency_penalty: scale({
|
542
|
-
value: frequencyPenalty,
|
543
|
-
inputMin: -1,
|
544
|
-
inputMax: 1,
|
545
|
-
outputMin: -2,
|
546
|
-
outputMax: 2
|
547
|
-
}),
|
548
|
-
presence_penalty: scale({
|
549
|
-
value: presencePenalty,
|
550
|
-
inputMin: -1,
|
551
|
-
inputMax: 1,
|
552
|
-
outputMin: -2,
|
553
|
-
outputMax: 2
|
554
|
-
}),
|
555
|
-
seed,
|
556
|
-
// messages:
|
557
|
-
messages: convertToOpenAIChatMessages(prompt)
|
558
|
-
};
|
559
|
-
switch (type) {
|
560
|
-
case "regular": {
|
561
|
-
const tools = ((_a = mode.tools) == null ? void 0 : _a.length) ? mode.tools : void 0;
|
562
|
-
return {
|
563
|
-
...baseArgs,
|
564
|
-
tools: tools == null ? void 0 : tools.map((tool) => ({
|
565
|
-
type: "function",
|
566
|
-
function: {
|
567
|
-
name: tool.name,
|
568
|
-
description: tool.description,
|
569
|
-
parameters: tool.parameters
|
570
|
-
}
|
571
|
-
}))
|
572
|
-
};
|
573
|
-
}
|
574
|
-
case "object-json": {
|
575
|
-
return {
|
576
|
-
...baseArgs,
|
577
|
-
response_format: { type: "json_object" }
|
578
|
-
};
|
579
|
-
}
|
580
|
-
case "object-tool": {
|
581
|
-
return {
|
582
|
-
...baseArgs,
|
583
|
-
tool_choice: { type: "function", function: { name: mode.tool.name } },
|
584
|
-
tools: [
|
585
|
-
{
|
586
|
-
type: "function",
|
587
|
-
function: {
|
588
|
-
name: mode.tool.name,
|
589
|
-
description: mode.tool.description,
|
590
|
-
parameters: mode.tool.parameters
|
591
|
-
}
|
592
|
-
}
|
593
|
-
]
|
594
|
-
};
|
595
|
-
}
|
596
|
-
case "object-grammar": {
|
597
|
-
throw new import_provider6.UnsupportedFunctionalityError({
|
598
|
-
functionality: "object-grammar mode"
|
599
|
-
});
|
600
|
-
}
|
601
|
-
default: {
|
602
|
-
const _exhaustiveCheck = type;
|
603
|
-
throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
|
604
|
-
}
|
605
|
-
}
|
606
|
-
}
|
607
|
-
async doGenerate(options) {
|
608
|
-
var _a, _b;
|
609
|
-
const args = this.getArgs(options);
|
610
|
-
const response = await postJsonToApi({
|
611
|
-
url: `${this.config.baseUrl}/chat/completions`,
|
612
|
-
headers: this.config.headers(),
|
613
|
-
body: args,
|
614
|
-
failedResponseHandler: openaiFailedResponseHandler,
|
615
|
-
successfulResponseHandler: createJsonResponseHandler(
|
616
|
-
openAIChatResponseSchema
|
617
|
-
),
|
618
|
-
abortSignal: options.abortSignal
|
619
|
-
});
|
620
|
-
const { messages: rawPrompt, ...rawSettings } = args;
|
621
|
-
const choice = response.choices[0];
|
622
|
-
return {
|
623
|
-
text: (_a = choice.message.content) != null ? _a : void 0,
|
624
|
-
toolCalls: (_b = choice.message.tool_calls) == null ? void 0 : _b.map((toolCall) => ({
|
625
|
-
toolCallType: "function",
|
626
|
-
toolCallId: toolCall.id,
|
627
|
-
toolName: toolCall.function.name,
|
628
|
-
args: toolCall.function.arguments
|
629
|
-
})),
|
630
|
-
finishReason: mapOpenAIFinishReason(choice.finish_reason),
|
631
|
-
usage: {
|
632
|
-
promptTokens: response.usage.prompt_tokens,
|
633
|
-
completionTokens: response.usage.completion_tokens
|
634
|
-
},
|
635
|
-
rawCall: { rawPrompt, rawSettings },
|
636
|
-
warnings: []
|
637
|
-
};
|
638
|
-
}
|
639
|
-
async doStream(options) {
|
640
|
-
const args = this.getArgs(options);
|
641
|
-
const response = await postJsonToApi({
|
642
|
-
url: `${this.config.baseUrl}/chat/completions`,
|
643
|
-
headers: this.config.headers(),
|
644
|
-
body: {
|
645
|
-
...args,
|
646
|
-
stream: true
|
647
|
-
},
|
648
|
-
failedResponseHandler: openaiFailedResponseHandler,
|
649
|
-
successfulResponseHandler: createEventSourceResponseHandler(
|
650
|
-
openaiChatChunkSchema
|
651
|
-
),
|
652
|
-
abortSignal: options.abortSignal
|
653
|
-
});
|
654
|
-
const { messages: rawPrompt, ...rawSettings } = args;
|
655
|
-
const toolCalls = [];
|
656
|
-
let finishReason = "other";
|
657
|
-
let usage = {
|
658
|
-
promptTokens: Number.NaN,
|
659
|
-
completionTokens: Number.NaN
|
660
|
-
};
|
661
|
-
return {
|
662
|
-
stream: response.pipeThrough(
|
663
|
-
new TransformStream({
|
664
|
-
transform(chunk, controller) {
|
665
|
-
var _a, _b, _c, _d, _e, _f, _g, _h, _i;
|
666
|
-
if (!chunk.success) {
|
667
|
-
controller.enqueue({ type: "error", error: chunk.error });
|
668
|
-
return;
|
669
|
-
}
|
670
|
-
const value = chunk.value;
|
671
|
-
if (value.usage != null) {
|
672
|
-
usage = {
|
673
|
-
promptTokens: value.usage.prompt_tokens,
|
674
|
-
completionTokens: value.usage.completion_tokens
|
675
|
-
};
|
676
|
-
}
|
677
|
-
const choice = value.choices[0];
|
678
|
-
if ((choice == null ? void 0 : choice.finish_reason) != null) {
|
679
|
-
finishReason = mapOpenAIFinishReason(choice.finish_reason);
|
680
|
-
}
|
681
|
-
if ((choice == null ? void 0 : choice.delta) == null) {
|
682
|
-
return;
|
683
|
-
}
|
684
|
-
const delta = choice.delta;
|
685
|
-
if (delta.content != null) {
|
686
|
-
controller.enqueue({
|
687
|
-
type: "text-delta",
|
688
|
-
textDelta: delta.content
|
689
|
-
});
|
690
|
-
}
|
691
|
-
if (delta.tool_calls != null) {
|
692
|
-
for (const toolCallDelta of delta.tool_calls) {
|
693
|
-
const index = toolCallDelta.index;
|
694
|
-
if (toolCalls[index] == null) {
|
695
|
-
if (toolCallDelta.type !== "function") {
|
696
|
-
throw new import_provider6.InvalidResponseDataError({
|
697
|
-
data: toolCallDelta,
|
698
|
-
message: `Expected 'function' type.`
|
699
|
-
});
|
700
|
-
}
|
701
|
-
if (toolCallDelta.id == null) {
|
702
|
-
throw new import_provider6.InvalidResponseDataError({
|
703
|
-
data: toolCallDelta,
|
704
|
-
message: `Expected 'id' to be a string.`
|
705
|
-
});
|
706
|
-
}
|
707
|
-
if (((_a = toolCallDelta.function) == null ? void 0 : _a.name) == null) {
|
708
|
-
throw new import_provider6.InvalidResponseDataError({
|
709
|
-
data: toolCallDelta,
|
710
|
-
message: `Expected 'function.name' to be a string.`
|
711
|
-
});
|
712
|
-
}
|
713
|
-
toolCalls[index] = {
|
714
|
-
id: toolCallDelta.id,
|
715
|
-
type: "function",
|
716
|
-
function: {
|
717
|
-
name: toolCallDelta.function.name,
|
718
|
-
arguments: (_b = toolCallDelta.function.arguments) != null ? _b : ""
|
719
|
-
}
|
720
|
-
};
|
721
|
-
continue;
|
722
|
-
}
|
723
|
-
const toolCall = toolCalls[index];
|
724
|
-
if (((_c = toolCallDelta.function) == null ? void 0 : _c.arguments) != null) {
|
725
|
-
toolCall.function.arguments += (_e = (_d = toolCallDelta.function) == null ? void 0 : _d.arguments) != null ? _e : "";
|
726
|
-
}
|
727
|
-
controller.enqueue({
|
728
|
-
type: "tool-call-delta",
|
729
|
-
toolCallType: "function",
|
730
|
-
toolCallId: toolCall.id,
|
731
|
-
toolName: toolCall.function.name,
|
732
|
-
argsTextDelta: (_f = toolCallDelta.function.arguments) != null ? _f : ""
|
733
|
-
});
|
734
|
-
if (((_g = toolCall.function) == null ? void 0 : _g.name) == null || ((_h = toolCall.function) == null ? void 0 : _h.arguments) == null || !isParseableJson(toolCall.function.arguments)) {
|
735
|
-
continue;
|
736
|
-
}
|
737
|
-
controller.enqueue({
|
738
|
-
type: "tool-call",
|
739
|
-
toolCallType: "function",
|
740
|
-
toolCallId: (_i = toolCall.id) != null ? _i : generateId(),
|
741
|
-
toolName: toolCall.function.name,
|
742
|
-
args: toolCall.function.arguments
|
743
|
-
});
|
744
|
-
}
|
745
|
-
}
|
746
|
-
},
|
747
|
-
flush(controller) {
|
748
|
-
controller.enqueue({ type: "finish", finishReason, usage });
|
749
|
-
}
|
750
|
-
})
|
751
|
-
),
|
752
|
-
rawCall: { rawPrompt, rawSettings },
|
753
|
-
warnings: []
|
754
|
-
};
|
755
|
-
}
|
756
|
-
};
|
757
|
-
var openAIChatResponseSchema = import_zod2.z.object({
|
758
|
-
choices: import_zod2.z.array(
|
759
|
-
import_zod2.z.object({
|
760
|
-
message: import_zod2.z.object({
|
761
|
-
role: import_zod2.z.literal("assistant"),
|
762
|
-
content: import_zod2.z.string().nullable(),
|
763
|
-
tool_calls: import_zod2.z.array(
|
764
|
-
import_zod2.z.object({
|
765
|
-
id: import_zod2.z.string(),
|
766
|
-
type: import_zod2.z.literal("function"),
|
767
|
-
function: import_zod2.z.object({
|
768
|
-
name: import_zod2.z.string(),
|
769
|
-
arguments: import_zod2.z.string()
|
770
|
-
})
|
771
|
-
})
|
772
|
-
).optional()
|
773
|
-
}),
|
774
|
-
index: import_zod2.z.number(),
|
775
|
-
finish_reason: import_zod2.z.string().optional().nullable()
|
776
|
-
})
|
777
|
-
),
|
778
|
-
object: import_zod2.z.literal("chat.completion"),
|
779
|
-
usage: import_zod2.z.object({
|
780
|
-
prompt_tokens: import_zod2.z.number(),
|
781
|
-
completion_tokens: import_zod2.z.number()
|
782
|
-
})
|
783
|
-
});
|
784
|
-
var openaiChatChunkSchema = import_zod2.z.object({
|
785
|
-
object: import_zod2.z.literal("chat.completion.chunk"),
|
786
|
-
choices: import_zod2.z.array(
|
787
|
-
import_zod2.z.object({
|
788
|
-
delta: import_zod2.z.object({
|
789
|
-
role: import_zod2.z.enum(["assistant"]).optional(),
|
790
|
-
content: import_zod2.z.string().nullable().optional(),
|
791
|
-
tool_calls: import_zod2.z.array(
|
792
|
-
import_zod2.z.object({
|
793
|
-
index: import_zod2.z.number(),
|
794
|
-
id: import_zod2.z.string().optional(),
|
795
|
-
type: import_zod2.z.literal("function").optional(),
|
796
|
-
function: import_zod2.z.object({
|
797
|
-
name: import_zod2.z.string().optional(),
|
798
|
-
arguments: import_zod2.z.string().optional()
|
799
|
-
})
|
800
|
-
})
|
801
|
-
).optional()
|
802
|
-
}),
|
803
|
-
finish_reason: import_zod2.z.string().nullable().optional(),
|
804
|
-
index: import_zod2.z.number()
|
805
|
-
})
|
806
|
-
),
|
807
|
-
usage: import_zod2.z.object({
|
808
|
-
prompt_tokens: import_zod2.z.number(),
|
809
|
-
completion_tokens: import_zod2.z.number()
|
810
|
-
}).optional().nullable()
|
811
|
-
});
|
812
|
-
|
813
|
-
// openai/openai-completion-language-model.ts
|
814
|
-
var import_provider8 = require("@ai-sdk/provider");
|
815
|
-
var import_zod3 = require("zod");
|
816
|
-
|
817
|
-
// openai/convert-to-openai-completion-prompt.ts
|
818
|
-
var import_provider7 = require("@ai-sdk/provider");
|
819
|
-
function convertToOpenAICompletionPrompt({
|
820
|
-
prompt,
|
821
|
-
inputFormat,
|
822
|
-
user = "user",
|
823
|
-
assistant = "assistant"
|
824
|
-
}) {
|
825
|
-
if (inputFormat === "prompt" && prompt.length === 1 && prompt[0].role === "user" && prompt[0].content.length === 1 && prompt[0].content[0].type === "text") {
|
826
|
-
return { prompt: prompt[0].content[0].text };
|
827
|
-
}
|
828
|
-
let text = "";
|
829
|
-
if (prompt[0].role === "system") {
|
830
|
-
text += `${prompt[0].content}
|
831
|
-
|
832
|
-
`;
|
833
|
-
prompt = prompt.slice(1);
|
834
|
-
}
|
835
|
-
for (const { role, content } of prompt) {
|
836
|
-
switch (role) {
|
837
|
-
case "system": {
|
838
|
-
throw new import_provider7.InvalidPromptError({
|
839
|
-
message: "Unexpected system message in prompt: ${content}",
|
840
|
-
prompt
|
841
|
-
});
|
842
|
-
}
|
843
|
-
case "user": {
|
844
|
-
const userMessage = content.map((part) => {
|
845
|
-
switch (part.type) {
|
846
|
-
case "text": {
|
847
|
-
return part.text;
|
848
|
-
}
|
849
|
-
case "image": {
|
850
|
-
throw new import_provider7.UnsupportedFunctionalityError({
|
851
|
-
functionality: "images"
|
852
|
-
});
|
853
|
-
}
|
854
|
-
}
|
855
|
-
}).join("");
|
856
|
-
text += `${user}:
|
857
|
-
${userMessage}
|
858
|
-
|
859
|
-
`;
|
860
|
-
break;
|
861
|
-
}
|
862
|
-
case "assistant": {
|
863
|
-
const assistantMessage = content.map((part) => {
|
864
|
-
switch (part.type) {
|
865
|
-
case "text": {
|
866
|
-
return part.text;
|
867
|
-
}
|
868
|
-
case "tool-call": {
|
869
|
-
throw new import_provider7.UnsupportedFunctionalityError({
|
870
|
-
functionality: "tool-call messages"
|
871
|
-
});
|
872
|
-
}
|
873
|
-
}
|
874
|
-
}).join("");
|
875
|
-
text += `${assistant}:
|
876
|
-
${assistantMessage}
|
877
|
-
|
878
|
-
`;
|
879
|
-
break;
|
880
|
-
}
|
881
|
-
case "tool": {
|
882
|
-
throw new import_provider7.UnsupportedFunctionalityError({
|
883
|
-
functionality: "tool messages"
|
884
|
-
});
|
885
|
-
}
|
886
|
-
default: {
|
887
|
-
const _exhaustiveCheck = role;
|
888
|
-
throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
|
889
|
-
}
|
890
|
-
}
|
891
|
-
}
|
892
|
-
text += `${assistant}:
|
893
|
-
`;
|
894
|
-
return {
|
895
|
-
prompt: text,
|
896
|
-
stopSequences: [`
|
897
|
-
${user}:`]
|
898
|
-
};
|
899
|
-
}
|
900
|
-
|
901
|
-
// openai/openai-completion-language-model.ts
|
902
|
-
var OpenAICompletionLanguageModel = class {
|
903
|
-
constructor(modelId, settings, config) {
|
904
|
-
this.specificationVersion = "v1";
|
905
|
-
this.defaultObjectGenerationMode = void 0;
|
906
|
-
this.modelId = modelId;
|
907
|
-
this.settings = settings;
|
908
|
-
this.config = config;
|
909
|
-
}
|
910
|
-
get provider() {
|
911
|
-
return this.config.provider;
|
912
|
-
}
|
913
|
-
getArgs({
|
914
|
-
mode,
|
915
|
-
inputFormat,
|
916
|
-
prompt,
|
917
|
-
maxTokens,
|
918
|
-
temperature,
|
919
|
-
topP,
|
920
|
-
frequencyPenalty,
|
921
|
-
presencePenalty,
|
922
|
-
seed
|
923
|
-
}) {
|
924
|
-
var _a;
|
925
|
-
const type = mode.type;
|
926
|
-
const { prompt: completionPrompt, stopSequences } = convertToOpenAICompletionPrompt({ prompt, inputFormat });
|
927
|
-
const baseArgs = {
|
928
|
-
// model id:
|
929
|
-
model: this.modelId,
|
930
|
-
// model specific settings:
|
931
|
-
echo: this.settings.echo,
|
932
|
-
logit_bias: this.settings.logitBias,
|
933
|
-
suffix: this.settings.suffix,
|
934
|
-
user: this.settings.user,
|
935
|
-
// standardized settings:
|
936
|
-
max_tokens: maxTokens,
|
937
|
-
temperature: scale({
|
938
|
-
value: temperature,
|
939
|
-
outputMin: 0,
|
940
|
-
outputMax: 2
|
941
|
-
}),
|
942
|
-
top_p: topP,
|
943
|
-
frequency_penalty: scale({
|
944
|
-
value: frequencyPenalty,
|
945
|
-
inputMin: -1,
|
946
|
-
inputMax: 1,
|
947
|
-
outputMin: -2,
|
948
|
-
outputMax: 2
|
949
|
-
}),
|
950
|
-
presence_penalty: scale({
|
951
|
-
value: presencePenalty,
|
952
|
-
inputMin: -1,
|
953
|
-
inputMax: 1,
|
954
|
-
outputMin: -2,
|
955
|
-
outputMax: 2
|
956
|
-
}),
|
957
|
-
seed,
|
958
|
-
// prompt:
|
959
|
-
prompt: completionPrompt,
|
960
|
-
// stop sequences:
|
961
|
-
stop: stopSequences
|
962
|
-
};
|
963
|
-
switch (type) {
|
964
|
-
case "regular": {
|
965
|
-
if ((_a = mode.tools) == null ? void 0 : _a.length) {
|
966
|
-
throw new import_provider8.UnsupportedFunctionalityError({
|
967
|
-
functionality: "tools"
|
968
|
-
});
|
969
|
-
}
|
970
|
-
return baseArgs;
|
971
|
-
}
|
972
|
-
case "object-json": {
|
973
|
-
throw new import_provider8.UnsupportedFunctionalityError({
|
974
|
-
functionality: "object-json mode"
|
975
|
-
});
|
976
|
-
}
|
977
|
-
case "object-tool": {
|
978
|
-
throw new import_provider8.UnsupportedFunctionalityError({
|
979
|
-
functionality: "object-tool mode"
|
980
|
-
});
|
981
|
-
}
|
982
|
-
case "object-grammar": {
|
983
|
-
throw new import_provider8.UnsupportedFunctionalityError({
|
984
|
-
functionality: "object-grammar mode"
|
985
|
-
});
|
986
|
-
}
|
987
|
-
default: {
|
988
|
-
const _exhaustiveCheck = type;
|
989
|
-
throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
|
990
|
-
}
|
991
|
-
}
|
992
|
-
}
|
993
|
-
async doGenerate(options) {
|
994
|
-
const args = this.getArgs(options);
|
995
|
-
const response = await postJsonToApi({
|
996
|
-
url: `${this.config.baseUrl}/completions`,
|
997
|
-
headers: this.config.headers(),
|
998
|
-
body: args,
|
999
|
-
failedResponseHandler: openaiFailedResponseHandler,
|
1000
|
-
successfulResponseHandler: createJsonResponseHandler(
|
1001
|
-
openAICompletionResponseSchema
|
1002
|
-
),
|
1003
|
-
abortSignal: options.abortSignal
|
1004
|
-
});
|
1005
|
-
const { prompt: rawPrompt, ...rawSettings } = args;
|
1006
|
-
const choice = response.choices[0];
|
1007
|
-
return {
|
1008
|
-
text: choice.text,
|
1009
|
-
usage: {
|
1010
|
-
promptTokens: response.usage.prompt_tokens,
|
1011
|
-
completionTokens: response.usage.completion_tokens
|
1012
|
-
},
|
1013
|
-
finishReason: mapOpenAIFinishReason(choice.finish_reason),
|
1014
|
-
rawCall: { rawPrompt, rawSettings },
|
1015
|
-
warnings: []
|
1016
|
-
};
|
1017
|
-
}
|
1018
|
-
async doStream(options) {
|
1019
|
-
const args = this.getArgs(options);
|
1020
|
-
const response = await postJsonToApi({
|
1021
|
-
url: `${this.config.baseUrl}/completions`,
|
1022
|
-
headers: this.config.headers(),
|
1023
|
-
body: {
|
1024
|
-
...this.getArgs(options),
|
1025
|
-
stream: true
|
1026
|
-
},
|
1027
|
-
failedResponseHandler: openaiFailedResponseHandler,
|
1028
|
-
successfulResponseHandler: createEventSourceResponseHandler(
|
1029
|
-
openaiCompletionChunkSchema
|
1030
|
-
),
|
1031
|
-
abortSignal: options.abortSignal
|
1032
|
-
});
|
1033
|
-
const { prompt: rawPrompt, ...rawSettings } = args;
|
1034
|
-
let finishReason = "other";
|
1035
|
-
let usage = {
|
1036
|
-
promptTokens: Number.NaN,
|
1037
|
-
completionTokens: Number.NaN
|
1038
|
-
};
|
1039
|
-
return {
|
1040
|
-
stream: response.pipeThrough(
|
1041
|
-
new TransformStream({
|
1042
|
-
transform(chunk, controller) {
|
1043
|
-
if (!chunk.success) {
|
1044
|
-
controller.enqueue({ type: "error", error: chunk.error });
|
1045
|
-
return;
|
1046
|
-
}
|
1047
|
-
const value = chunk.value;
|
1048
|
-
if (value.usage != null) {
|
1049
|
-
usage = {
|
1050
|
-
promptTokens: value.usage.prompt_tokens,
|
1051
|
-
completionTokens: value.usage.completion_tokens
|
1052
|
-
};
|
1053
|
-
}
|
1054
|
-
const choice = value.choices[0];
|
1055
|
-
if ((choice == null ? void 0 : choice.finish_reason) != null) {
|
1056
|
-
finishReason = mapOpenAIFinishReason(choice.finish_reason);
|
1057
|
-
}
|
1058
|
-
if ((choice == null ? void 0 : choice.text) != null) {
|
1059
|
-
controller.enqueue({
|
1060
|
-
type: "text-delta",
|
1061
|
-
textDelta: choice.text
|
1062
|
-
});
|
1063
|
-
}
|
1064
|
-
},
|
1065
|
-
flush(controller) {
|
1066
|
-
controller.enqueue({ type: "finish", finishReason, usage });
|
1067
|
-
}
|
1068
|
-
})
|
1069
|
-
),
|
1070
|
-
rawCall: { rawPrompt, rawSettings },
|
1071
|
-
warnings: []
|
1072
|
-
};
|
1073
|
-
}
|
1074
|
-
};
|
1075
|
-
var openAICompletionResponseSchema = import_zod3.z.object({
|
1076
|
-
choices: import_zod3.z.array(
|
1077
|
-
import_zod3.z.object({
|
1078
|
-
text: import_zod3.z.string(),
|
1079
|
-
finish_reason: import_zod3.z.string()
|
1080
|
-
})
|
1081
|
-
),
|
1082
|
-
usage: import_zod3.z.object({
|
1083
|
-
prompt_tokens: import_zod3.z.number(),
|
1084
|
-
completion_tokens: import_zod3.z.number()
|
1085
|
-
})
|
1086
|
-
});
|
1087
|
-
var openaiCompletionChunkSchema = import_zod3.z.object({
|
1088
|
-
object: import_zod3.z.literal("text_completion"),
|
1089
|
-
choices: import_zod3.z.array(
|
1090
|
-
import_zod3.z.object({
|
1091
|
-
text: import_zod3.z.string(),
|
1092
|
-
finish_reason: import_zod3.z.enum(["stop", "length", "content_filter"]).optional().nullable(),
|
1093
|
-
index: import_zod3.z.number()
|
1094
|
-
})
|
1095
|
-
),
|
1096
|
-
usage: import_zod3.z.object({
|
1097
|
-
prompt_tokens: import_zod3.z.number(),
|
1098
|
-
completion_tokens: import_zod3.z.number()
|
1099
|
-
}).optional().nullable()
|
1100
|
-
});
|
1101
|
-
|
1102
|
-
// openai/openai-facade.ts
|
1103
|
-
var OpenAI = class {
|
1104
|
-
constructor(options = {}) {
|
1105
|
-
this.baseUrl = options.baseUrl;
|
1106
|
-
this.apiKey = options.apiKey;
|
1107
|
-
this.organization = options.organization;
|
1108
|
-
}
|
1109
|
-
get baseConfig() {
|
1110
|
-
var _a;
|
1111
|
-
return {
|
1112
|
-
organization: this.organization,
|
1113
|
-
baseUrl: (_a = this.baseUrl) != null ? _a : "https://api.openai.com/v1",
|
1114
|
-
headers: () => ({
|
1115
|
-
Authorization: `Bearer ${loadApiKey({
|
1116
|
-
apiKey: this.apiKey,
|
1117
|
-
environmentVariableName: "OPENAI_API_KEY",
|
1118
|
-
description: "OpenAI"
|
1119
|
-
})}`,
|
1120
|
-
"OpenAI-Organization": this.organization
|
1121
|
-
})
|
1122
|
-
};
|
1123
|
-
}
|
1124
|
-
chat(modelId, settings = {}) {
|
1125
|
-
return new OpenAIChatLanguageModel(modelId, settings, {
|
1126
|
-
provider: "openai.chat",
|
1127
|
-
...this.baseConfig
|
1128
|
-
});
|
1129
|
-
}
|
1130
|
-
completion(modelId, settings = {}) {
|
1131
|
-
return new OpenAICompletionLanguageModel(modelId, settings, {
|
1132
|
-
provider: "openai.completion",
|
1133
|
-
...this.baseConfig
|
1134
|
-
});
|
1135
|
-
}
|
1136
|
-
};
|
1137
|
-
var openai = new OpenAI();
|
1138
|
-
// Annotate the CommonJS export names for ESM import in node:
|
1139
|
-
0 && (module.exports = {
|
1140
|
-
OpenAI,
|
1141
|
-
openai
|
1142
|
-
});
|
1143
|
-
//# sourceMappingURL=index.js.map
|