ai 3.0.20 → 3.0.22

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (63) hide show
  1. package/dist/index.d.mts +45 -354
  2. package/dist/index.d.ts +45 -354
  3. package/dist/index.js +161 -460
  4. package/dist/index.js.map +1 -1
  5. package/dist/index.mjs +136 -430
  6. package/dist/index.mjs.map +1 -1
  7. package/package.json +6 -41
  8. package/react/dist/index.d.mts +1 -1
  9. package/react/dist/index.d.ts +1 -1
  10. package/react/dist/index.js +3 -3
  11. package/react/dist/index.js.map +1 -1
  12. package/react/dist/index.mjs +3 -3
  13. package/react/dist/index.mjs.map +1 -1
  14. package/rsc/dist/rsc-server.mjs +3 -3
  15. package/rsc/dist/rsc-server.mjs.map +1 -1
  16. package/solid/dist/index.d.mts +1 -1
  17. package/solid/dist/index.d.ts +1 -1
  18. package/solid/dist/index.js +3 -3
  19. package/solid/dist/index.js.map +1 -1
  20. package/solid/dist/index.mjs +3 -3
  21. package/solid/dist/index.mjs.map +1 -1
  22. package/svelte/dist/index.d.mts +1 -1
  23. package/svelte/dist/index.d.ts +1 -1
  24. package/svelte/dist/index.js +3 -3
  25. package/svelte/dist/index.js.map +1 -1
  26. package/svelte/dist/index.mjs +3 -3
  27. package/svelte/dist/index.mjs.map +1 -1
  28. package/vue/dist/index.d.mts +1 -1
  29. package/vue/dist/index.d.ts +1 -1
  30. package/vue/dist/index.js +3 -3
  31. package/vue/dist/index.js.map +1 -1
  32. package/vue/dist/index.mjs +3 -3
  33. package/vue/dist/index.mjs.map +1 -1
  34. package/anthropic/dist/index.d.mts +0 -403
  35. package/anthropic/dist/index.d.ts +0 -403
  36. package/anthropic/dist/index.js +0 -950
  37. package/anthropic/dist/index.js.map +0 -1
  38. package/anthropic/dist/index.mjs +0 -914
  39. package/anthropic/dist/index.mjs.map +0 -1
  40. package/google/dist/index.d.mts +0 -399
  41. package/google/dist/index.d.ts +0 -399
  42. package/google/dist/index.js +0 -954
  43. package/google/dist/index.js.map +0 -1
  44. package/google/dist/index.mjs +0 -918
  45. package/google/dist/index.mjs.map +0 -1
  46. package/mistral/dist/index.d.mts +0 -404
  47. package/mistral/dist/index.d.ts +0 -404
  48. package/mistral/dist/index.js +0 -921
  49. package/mistral/dist/index.js.map +0 -1
  50. package/mistral/dist/index.mjs +0 -885
  51. package/mistral/dist/index.mjs.map +0 -1
  52. package/openai/dist/index.d.mts +0 -468
  53. package/openai/dist/index.d.ts +0 -468
  54. package/openai/dist/index.js +0 -1334
  55. package/openai/dist/index.js.map +0 -1
  56. package/openai/dist/index.mjs +0 -1298
  57. package/openai/dist/index.mjs.map +0 -1
  58. package/spec/dist/index.d.mts +0 -780
  59. package/spec/dist/index.d.ts +0 -780
  60. package/spec/dist/index.js +0 -863
  61. package/spec/dist/index.js.map +0 -1
  62. package/spec/dist/index.mjs +0 -797
  63. package/spec/dist/index.mjs.map +0 -1
@@ -1,1334 +0,0 @@
1
- "use strict";
2
- var __create = Object.create;
3
- var __defProp = Object.defineProperty;
4
- var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
5
- var __getOwnPropNames = Object.getOwnPropertyNames;
6
- var __getProtoOf = Object.getPrototypeOf;
7
- var __hasOwnProp = Object.prototype.hasOwnProperty;
8
- var __export = (target, all) => {
9
- for (var name in all)
10
- __defProp(target, name, { get: all[name], enumerable: true });
11
- };
12
- var __copyProps = (to, from, except, desc) => {
13
- if (from && typeof from === "object" || typeof from === "function") {
14
- for (let key of __getOwnPropNames(from))
15
- if (!__hasOwnProp.call(to, key) && key !== except)
16
- __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
17
- }
18
- return to;
19
- };
20
- var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
21
- // If the importer is in node compatibility mode or this is not an ESM
22
- // file that has been converted to a CommonJS file using a Babel-
23
- // compatible transform (i.e. "__esModule" has not been set), then set
24
- // "default" to the CommonJS "module.exports" for node compatibility.
25
- isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
26
- mod
27
- ));
28
- var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
29
-
30
- // openai/index.ts
31
- var openai_exports = {};
32
- __export(openai_exports, {
33
- OpenAI: () => OpenAI,
34
- openai: () => openai
35
- });
36
- module.exports = __toCommonJS(openai_exports);
37
-
38
- // spec/errors/api-call-error.ts
39
- var APICallError = class extends Error {
40
- constructor({
41
- message,
42
- url,
43
- requestBodyValues,
44
- statusCode,
45
- responseBody,
46
- cause,
47
- isRetryable = statusCode != null && (statusCode === 408 || // request timeout
48
- statusCode === 409 || // conflict
49
- statusCode === 429 || // too many requests
50
- statusCode >= 500),
51
- // server error
52
- data
53
- }) {
54
- super(message);
55
- this.name = "AI_APICallError";
56
- this.url = url;
57
- this.requestBodyValues = requestBodyValues;
58
- this.statusCode = statusCode;
59
- this.responseBody = responseBody;
60
- this.cause = cause;
61
- this.isRetryable = isRetryable;
62
- this.data = data;
63
- }
64
- static isAPICallError(error) {
65
- return error instanceof Error && error.name === "AI_APICallError" && typeof error.url === "string" && typeof error.requestBodyValues === "object" && (error.statusCode == null || typeof error.statusCode === "number") && (error.responseBody == null || typeof error.responseBody === "string") && (error.cause == null || typeof error.cause === "object") && typeof error.isRetryable === "boolean" && (error.data == null || typeof error.data === "object");
66
- }
67
- toJSON() {
68
- return {
69
- name: this.name,
70
- message: this.message,
71
- url: this.url,
72
- requestBodyValues: this.requestBodyValues,
73
- statusCode: this.statusCode,
74
- responseBody: this.responseBody,
75
- cause: this.cause,
76
- isRetryable: this.isRetryable,
77
- data: this.data
78
- };
79
- }
80
- };
81
-
82
- // spec/errors/invalid-prompt-error.ts
83
- var InvalidPromptError = class extends Error {
84
- constructor({ prompt: prompt2, message }) {
85
- super(`Invalid prompt: ${message}`);
86
- this.name = "AI_InvalidPromptError";
87
- this.prompt = prompt2;
88
- }
89
- static isInvalidPromptError(error) {
90
- return error instanceof Error && error.name === "AI_InvalidPromptError" && prompt != null;
91
- }
92
- toJSON() {
93
- return {
94
- name: this.name,
95
- message: this.message,
96
- stack: this.stack,
97
- prompt: this.prompt
98
- };
99
- }
100
- };
101
-
102
- // spec/errors/invalid-response-data-error.ts
103
- var InvalidResponseDataError = class extends Error {
104
- constructor({
105
- data,
106
- message = `Invalid response data: ${JSON.stringify(data)}.`
107
- }) {
108
- super(message);
109
- this.name = "AI_InvalidResponseDataError";
110
- this.data = data;
111
- }
112
- static isInvalidResponseDataError(error) {
113
- return error instanceof Error && error.name === "AI_InvalidResponseDataError" && error.data != null;
114
- }
115
- toJSON() {
116
- return {
117
- name: this.name,
118
- message: this.message,
119
- stack: this.stack,
120
- data: this.data
121
- };
122
- }
123
- };
124
-
125
- // spec/util/generate-id.ts
126
- var import_non_secure = require("nanoid/non-secure");
127
- var generateId = (0, import_non_secure.customAlphabet)(
128
- "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz",
129
- 7
130
- );
131
-
132
- // spec/util/get-error-message.ts
133
- function getErrorMessage(error) {
134
- if (error == null) {
135
- return "unknown error";
136
- }
137
- if (typeof error === "string") {
138
- return error;
139
- }
140
- if (error instanceof Error) {
141
- return error.message;
142
- }
143
- return JSON.stringify(error);
144
- }
145
-
146
- // spec/errors/load-api-key-error.ts
147
- var LoadAPIKeyError = class extends Error {
148
- constructor({ message }) {
149
- super(message);
150
- this.name = "AI_LoadAPIKeyError";
151
- }
152
- static isLoadAPIKeyError(error) {
153
- return error instanceof Error && error.name === "AI_LoadAPIKeyError";
154
- }
155
- toJSON() {
156
- return {
157
- name: this.name,
158
- message: this.message
159
- };
160
- }
161
- };
162
-
163
- // spec/util/load-api-key.ts
164
- function loadApiKey({
165
- apiKey,
166
- environmentVariableName,
167
- apiKeyParameterName = "apiKey",
168
- description
169
- }) {
170
- if (typeof apiKey === "string") {
171
- return apiKey;
172
- }
173
- if (apiKey != null) {
174
- throw new LoadAPIKeyError({
175
- message: `${description} API key must be a string.`
176
- });
177
- }
178
- if (typeof process === "undefined") {
179
- throw new LoadAPIKeyError({
180
- message: `${description} API key is missing. Pass it using the '${apiKeyParameterName}' parameter. Environment variables is not supported in this environment.`
181
- });
182
- }
183
- apiKey = process.env[environmentVariableName];
184
- if (apiKey == null) {
185
- throw new LoadAPIKeyError({
186
- message: `${description} API key is missing. Pass it using the '${apiKeyParameterName}' parameter or the ${environmentVariableName} environment variable.`
187
- });
188
- }
189
- if (typeof apiKey !== "string") {
190
- throw new LoadAPIKeyError({
191
- message: `${description} API key must be a string. The value of the ${environmentVariableName} environment variable is not a string.`
192
- });
193
- }
194
- return apiKey;
195
- }
196
-
197
- // spec/util/parse-json.ts
198
- var import_secure_json_parse = __toESM(require("secure-json-parse"));
199
-
200
- // spec/errors/json-parse-error.ts
201
- var JSONParseError = class extends Error {
202
- constructor({ text, cause }) {
203
- super(
204
- `JSON parsing failed: Text: ${text}.
205
- Error message: ${getErrorMessage(cause)}`
206
- );
207
- this.name = "AI_JSONParseError";
208
- this.cause = cause;
209
- this.text = text;
210
- }
211
- static isJSONParseError(error) {
212
- return error instanceof Error && error.name === "AI_JSONParseError" && typeof error.text === "string" && typeof error.cause === "string";
213
- }
214
- toJSON() {
215
- return {
216
- name: this.name,
217
- message: this.message,
218
- cause: this.cause,
219
- stack: this.stack,
220
- valueText: this.text
221
- };
222
- }
223
- };
224
-
225
- // spec/errors/type-validation-error.ts
226
- var TypeValidationError = class extends Error {
227
- constructor({ value, cause }) {
228
- super(
229
- `Type validation failed: Value: ${JSON.stringify(value)}.
230
- Error message: ${getErrorMessage(cause)}`
231
- );
232
- this.name = "AI_TypeValidationError";
233
- this.cause = cause;
234
- this.value = value;
235
- }
236
- static isTypeValidationError(error) {
237
- return error instanceof Error && error.name === "AI_TypeValidationError" && typeof error.value === "string" && typeof error.cause === "string";
238
- }
239
- toJSON() {
240
- return {
241
- name: this.name,
242
- message: this.message,
243
- cause: this.cause,
244
- stack: this.stack,
245
- value: this.value
246
- };
247
- }
248
- };
249
-
250
- // spec/util/validate-types.ts
251
- function validateTypes({
252
- value,
253
- schema
254
- }) {
255
- try {
256
- return schema.parse(value);
257
- } catch (error) {
258
- throw new TypeValidationError({ value, cause: error });
259
- }
260
- }
261
- function safeValidateTypes({
262
- value,
263
- schema
264
- }) {
265
- try {
266
- const validationResult = schema.safeParse(value);
267
- if (validationResult.success) {
268
- return {
269
- success: true,
270
- value: validationResult.data
271
- };
272
- }
273
- return {
274
- success: false,
275
- error: new TypeValidationError({
276
- value,
277
- cause: validationResult.error
278
- })
279
- };
280
- } catch (error) {
281
- return {
282
- success: false,
283
- error: TypeValidationError.isTypeValidationError(error) ? error : new TypeValidationError({ value, cause: error })
284
- };
285
- }
286
- }
287
-
288
- // spec/util/parse-json.ts
289
- function parseJSON({
290
- text,
291
- schema
292
- }) {
293
- try {
294
- const value = import_secure_json_parse.default.parse(text);
295
- if (schema == null) {
296
- return value;
297
- }
298
- return validateTypes({ value, schema });
299
- } catch (error) {
300
- if (JSONParseError.isJSONParseError(error) || TypeValidationError.isTypeValidationError(error)) {
301
- throw error;
302
- }
303
- throw new JSONParseError({ text, cause: error });
304
- }
305
- }
306
- function safeParseJSON({
307
- text,
308
- schema
309
- }) {
310
- try {
311
- const value = import_secure_json_parse.default.parse(text);
312
- if (schema == null) {
313
- return {
314
- success: true,
315
- value
316
- };
317
- }
318
- return safeValidateTypes({ value, schema });
319
- } catch (error) {
320
- return {
321
- success: false,
322
- error: JSONParseError.isJSONParseError(error) ? error : new JSONParseError({ text, cause: error })
323
- };
324
- }
325
- }
326
- function isParseableJson(input) {
327
- try {
328
- import_secure_json_parse.default.parse(input);
329
- return true;
330
- } catch (e) {
331
- return false;
332
- }
333
- }
334
-
335
- // spec/util/post-to-api.ts
336
- var postJsonToApi = async ({
337
- url,
338
- headers,
339
- body,
340
- failedResponseHandler,
341
- successfulResponseHandler,
342
- abortSignal
343
- }) => postToApi({
344
- url,
345
- headers: {
346
- ...headers,
347
- "Content-Type": "application/json"
348
- },
349
- body: {
350
- content: JSON.stringify(body),
351
- values: body
352
- },
353
- failedResponseHandler,
354
- successfulResponseHandler,
355
- abortSignal
356
- });
357
- var postToApi = async ({
358
- url,
359
- headers = {},
360
- body,
361
- successfulResponseHandler,
362
- failedResponseHandler,
363
- abortSignal
364
- }) => {
365
- try {
366
- const definedHeaders = Object.fromEntries(
367
- Object.entries(headers).filter(([_key, value]) => value != null)
368
- );
369
- const response = await fetch(url, {
370
- method: "POST",
371
- headers: definedHeaders,
372
- body: body.content,
373
- signal: abortSignal
374
- });
375
- if (!response.ok) {
376
- try {
377
- throw await failedResponseHandler({
378
- response,
379
- url,
380
- requestBodyValues: body.values
381
- });
382
- } catch (error) {
383
- if (error instanceof Error) {
384
- if (error.name === "AbortError" || APICallError.isAPICallError(error)) {
385
- throw error;
386
- }
387
- }
388
- throw new APICallError({
389
- message: "Failed to process error response",
390
- cause: error,
391
- statusCode: response.status,
392
- url,
393
- requestBodyValues: body.values
394
- });
395
- }
396
- }
397
- try {
398
- return await successfulResponseHandler({
399
- response,
400
- url,
401
- requestBodyValues: body.values
402
- });
403
- } catch (error) {
404
- if (error instanceof Error) {
405
- if (error.name === "AbortError" || APICallError.isAPICallError(error)) {
406
- throw error;
407
- }
408
- }
409
- throw new APICallError({
410
- message: "Failed to process successful response",
411
- cause: error,
412
- statusCode: response.status,
413
- url,
414
- requestBodyValues: body.values
415
- });
416
- }
417
- } catch (error) {
418
- if (error instanceof Error) {
419
- if (error.name === "AbortError") {
420
- throw error;
421
- }
422
- }
423
- if (error instanceof TypeError && error.message === "fetch failed") {
424
- const cause = error.cause;
425
- if (cause != null) {
426
- throw new APICallError({
427
- message: `Cannot connect to API: ${cause.message}`,
428
- cause,
429
- url,
430
- requestBodyValues: body.values,
431
- isRetryable: true
432
- // retry when network error
433
- });
434
- }
435
- }
436
- throw error;
437
- }
438
- };
439
-
440
- // spec/util/response-handler.ts
441
- var import_stream = require("eventsource-parser/stream");
442
-
443
- // spec/errors/no-response-body-error.ts
444
- var NoResponseBodyError = class extends Error {
445
- constructor({ message = "No response body" } = {}) {
446
- super(message);
447
- this.name = "AI_NoResponseBodyError";
448
- }
449
- static isNoResponseBodyError(error) {
450
- return error instanceof Error && error.name === "AI_NoResponseBodyError";
451
- }
452
- toJSON() {
453
- return {
454
- name: this.name,
455
- message: this.message,
456
- stack: this.stack
457
- };
458
- }
459
- };
460
-
461
- // spec/util/response-handler.ts
462
- var createJsonErrorResponseHandler = ({
463
- errorSchema,
464
- errorToMessage,
465
- isRetryable
466
- }) => async ({ response, url, requestBodyValues }) => {
467
- const responseBody = await response.text();
468
- if (responseBody.trim() === "") {
469
- return new APICallError({
470
- message: response.statusText,
471
- url,
472
- requestBodyValues,
473
- statusCode: response.status,
474
- responseBody,
475
- isRetryable: isRetryable == null ? void 0 : isRetryable(response)
476
- });
477
- }
478
- try {
479
- const parsedError = parseJSON({
480
- text: responseBody,
481
- schema: errorSchema
482
- });
483
- return new APICallError({
484
- message: errorToMessage(parsedError),
485
- url,
486
- requestBodyValues,
487
- statusCode: response.status,
488
- responseBody,
489
- data: parsedError,
490
- isRetryable: isRetryable == null ? void 0 : isRetryable(response, parsedError)
491
- });
492
- } catch (parseError) {
493
- return new APICallError({
494
- message: response.statusText,
495
- url,
496
- requestBodyValues,
497
- statusCode: response.status,
498
- responseBody,
499
- isRetryable: isRetryable == null ? void 0 : isRetryable(response)
500
- });
501
- }
502
- };
503
- var createEventSourceResponseHandler = (chunkSchema) => async ({ response }) => {
504
- if (response.body == null) {
505
- throw new NoResponseBodyError();
506
- }
507
- return response.body.pipeThrough(new TextDecoderStream()).pipeThrough(new import_stream.EventSourceParserStream()).pipeThrough(
508
- new TransformStream({
509
- transform({ data }, controller) {
510
- if (data === "[DONE]") {
511
- return;
512
- }
513
- controller.enqueue(
514
- safeParseJSON({
515
- text: data,
516
- schema: chunkSchema
517
- })
518
- );
519
- }
520
- })
521
- );
522
- };
523
- var createJsonResponseHandler = (responseSchema) => async ({ response, url, requestBodyValues }) => {
524
- const responseBody = await response.text();
525
- const parsedResult = safeParseJSON({
526
- text: responseBody,
527
- schema: responseSchema
528
- });
529
- if (!parsedResult.success) {
530
- throw new APICallError({
531
- message: "Invalid JSON response",
532
- cause: parsedResult.error,
533
- statusCode: response.status,
534
- responseBody,
535
- url,
536
- requestBodyValues
537
- });
538
- }
539
- return parsedResult.value;
540
- };
541
-
542
- // spec/util/scale.ts
543
- function scale({
544
- inputMin = 0,
545
- inputMax = 1,
546
- outputMin,
547
- outputMax,
548
- value
549
- }) {
550
- if (value === void 0) {
551
- return void 0;
552
- }
553
- const inputRange = inputMax - inputMin;
554
- const outputRange = outputMax - outputMin;
555
- return (value - inputMin) * outputRange / inputRange + outputMin;
556
- }
557
-
558
- // spec/util/uint8-utils.ts
559
- function convertUint8ArrayToBase64(array) {
560
- let latin1string = "";
561
- for (let i = 0; i < array.length; i++) {
562
- latin1string += String.fromCodePoint(array[i]);
563
- }
564
- return globalThis.btoa(latin1string);
565
- }
566
-
567
- // spec/errors/unsupported-functionality-error.ts
568
- var UnsupportedFunctionalityError = class extends Error {
569
- constructor({ functionality }) {
570
- super(`'${functionality}' functionality not supported.`);
571
- this.name = "AI_UnsupportedFunctionalityError";
572
- this.functionality = functionality;
573
- }
574
- static isUnsupportedFunctionalityError(error) {
575
- return error instanceof Error && error.name === "AI_UnsupportedFunctionalityError" && typeof error.functionality === "string";
576
- }
577
- toJSON() {
578
- return {
579
- name: this.name,
580
- message: this.message,
581
- stack: this.stack,
582
- functionality: this.functionality
583
- };
584
- }
585
- };
586
-
587
- // openai/openai-chat-language-model.ts
588
- var import_zod2 = require("zod");
589
-
590
- // openai/convert-to-openai-chat-messages.ts
591
- function convertToOpenAIChatMessages(prompt2) {
592
- const messages = [];
593
- for (const { role, content } of prompt2) {
594
- switch (role) {
595
- case "system": {
596
- messages.push({ role: "system", content });
597
- break;
598
- }
599
- case "user": {
600
- messages.push({
601
- role: "user",
602
- content: content.map((part) => {
603
- var _a;
604
- switch (part.type) {
605
- case "text": {
606
- return { type: "text", text: part.text };
607
- }
608
- case "image": {
609
- return {
610
- type: "image_url",
611
- image_url: {
612
- url: part.image instanceof URL ? part.image.toString() : `data:${(_a = part.mimeType) != null ? _a : "image/jpeg"};base64,${convertUint8ArrayToBase64(part.image)}`
613
- }
614
- };
615
- }
616
- }
617
- })
618
- });
619
- break;
620
- }
621
- case "assistant": {
622
- let text = "";
623
- const toolCalls = [];
624
- for (const part of content) {
625
- switch (part.type) {
626
- case "text": {
627
- text += part.text;
628
- break;
629
- }
630
- case "tool-call": {
631
- toolCalls.push({
632
- id: part.toolCallId,
633
- type: "function",
634
- function: {
635
- name: part.toolName,
636
- arguments: JSON.stringify(part.args)
637
- }
638
- });
639
- break;
640
- }
641
- default: {
642
- const _exhaustiveCheck = part;
643
- throw new Error(`Unsupported part: ${_exhaustiveCheck}`);
644
- }
645
- }
646
- }
647
- messages.push({
648
- role: "assistant",
649
- content: text,
650
- tool_calls: toolCalls.length > 0 ? toolCalls : void 0
651
- });
652
- break;
653
- }
654
- case "tool": {
655
- for (const toolResponse of content) {
656
- messages.push({
657
- role: "tool",
658
- tool_call_id: toolResponse.toolCallId,
659
- content: JSON.stringify(toolResponse.result)
660
- });
661
- }
662
- break;
663
- }
664
- default: {
665
- const _exhaustiveCheck = role;
666
- throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
667
- }
668
- }
669
- }
670
- return messages;
671
- }
672
-
673
- // openai/map-openai-finish-reason.ts
674
- function mapOpenAIFinishReason(finishReason) {
675
- switch (finishReason) {
676
- case "stop":
677
- return "stop";
678
- case "length":
679
- return "length";
680
- case "content_filter":
681
- return "content-filter";
682
- case "function_call":
683
- case "tool_calls":
684
- return "tool-calls";
685
- default:
686
- return "other";
687
- }
688
- }
689
-
690
- // openai/openai-error.ts
691
- var import_zod = require("zod");
692
- var openAIErrorDataSchema = import_zod.z.object({
693
- error: import_zod.z.object({
694
- message: import_zod.z.string(),
695
- type: import_zod.z.string(),
696
- param: import_zod.z.any().nullable(),
697
- code: import_zod.z.string().nullable()
698
- })
699
- });
700
- var openaiFailedResponseHandler = createJsonErrorResponseHandler({
701
- errorSchema: openAIErrorDataSchema,
702
- errorToMessage: (data) => data.error.message
703
- });
704
-
705
- // openai/openai-chat-language-model.ts
706
- var OpenAIChatLanguageModel = class {
707
- constructor(modelId, settings, config) {
708
- this.specificationVersion = "v1";
709
- this.defaultObjectGenerationMode = "tool";
710
- this.modelId = modelId;
711
- this.settings = settings;
712
- this.config = config;
713
- }
714
- get provider() {
715
- return this.config.provider;
716
- }
717
- getArgs({
718
- mode,
719
- prompt: prompt2,
720
- maxTokens,
721
- temperature,
722
- topP,
723
- frequencyPenalty,
724
- presencePenalty,
725
- seed
726
- }) {
727
- var _a;
728
- const type = mode.type;
729
- const baseArgs = {
730
- // model id:
731
- model: this.modelId,
732
- // model specific settings:
733
- logit_bias: this.settings.logitBias,
734
- user: this.settings.user,
735
- // standardized settings:
736
- max_tokens: maxTokens,
737
- temperature: scale({
738
- value: temperature,
739
- outputMin: 0,
740
- outputMax: 2
741
- }),
742
- top_p: topP,
743
- frequency_penalty: scale({
744
- value: frequencyPenalty,
745
- inputMin: -1,
746
- inputMax: 1,
747
- outputMin: -2,
748
- outputMax: 2
749
- }),
750
- presence_penalty: scale({
751
- value: presencePenalty,
752
- inputMin: -1,
753
- inputMax: 1,
754
- outputMin: -2,
755
- outputMax: 2
756
- }),
757
- seed,
758
- // messages:
759
- messages: convertToOpenAIChatMessages(prompt2)
760
- };
761
- switch (type) {
762
- case "regular": {
763
- const tools = ((_a = mode.tools) == null ? void 0 : _a.length) ? mode.tools : void 0;
764
- return {
765
- ...baseArgs,
766
- tools: tools == null ? void 0 : tools.map((tool) => ({
767
- type: "function",
768
- function: {
769
- name: tool.name,
770
- description: tool.description,
771
- parameters: tool.parameters
772
- }
773
- }))
774
- };
775
- }
776
- case "object-json": {
777
- return {
778
- ...baseArgs,
779
- response_format: { type: "json_object" }
780
- };
781
- }
782
- case "object-tool": {
783
- return {
784
- ...baseArgs,
785
- tool_choice: { type: "function", function: { name: mode.tool.name } },
786
- tools: [{ type: "function", function: mode.tool }]
787
- };
788
- }
789
- case "object-grammar": {
790
- throw new UnsupportedFunctionalityError({
791
- functionality: "object-grammar mode"
792
- });
793
- }
794
- default: {
795
- const _exhaustiveCheck = type;
796
- throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
797
- }
798
- }
799
- }
800
- async doGenerate(options) {
801
- var _a, _b;
802
- const args = this.getArgs(options);
803
- const response = await postJsonToApi({
804
- url: `${this.config.baseUrl}/chat/completions`,
805
- headers: this.config.headers(),
806
- body: args,
807
- failedResponseHandler: openaiFailedResponseHandler,
808
- successfulResponseHandler: createJsonResponseHandler(
809
- openAIChatResponseSchema
810
- ),
811
- abortSignal: options.abortSignal
812
- });
813
- const { messages: rawPrompt, ...rawSettings } = args;
814
- const choice = response.choices[0];
815
- return {
816
- text: (_a = choice.message.content) != null ? _a : void 0,
817
- toolCalls: (_b = choice.message.tool_calls) == null ? void 0 : _b.map((toolCall) => ({
818
- toolCallType: "function",
819
- toolCallId: toolCall.id,
820
- toolName: toolCall.function.name,
821
- args: toolCall.function.arguments
822
- })),
823
- finishReason: mapOpenAIFinishReason(choice.finish_reason),
824
- usage: {
825
- promptTokens: response.usage.prompt_tokens,
826
- completionTokens: response.usage.completion_tokens
827
- },
828
- rawCall: { rawPrompt, rawSettings },
829
- warnings: []
830
- };
831
- }
832
- async doStream(options) {
833
- const args = this.getArgs(options);
834
- const response = await postJsonToApi({
835
- url: `${this.config.baseUrl}/chat/completions`,
836
- headers: this.config.headers(),
837
- body: {
838
- ...args,
839
- stream: true
840
- },
841
- failedResponseHandler: openaiFailedResponseHandler,
842
- successfulResponseHandler: createEventSourceResponseHandler(
843
- openaiChatChunkSchema
844
- ),
845
- abortSignal: options.abortSignal
846
- });
847
- const { messages: rawPrompt, ...rawSettings } = args;
848
- const toolCalls = [];
849
- let finishReason = "other";
850
- let usage = {
851
- promptTokens: Number.NaN,
852
- completionTokens: Number.NaN
853
- };
854
- return {
855
- stream: response.pipeThrough(
856
- new TransformStream({
857
- transform(chunk, controller) {
858
- var _a, _b, _c, _d, _e, _f, _g, _h, _i;
859
- if (!chunk.success) {
860
- controller.enqueue({ type: "error", error: chunk.error });
861
- return;
862
- }
863
- const value = chunk.value;
864
- if (value.usage != null) {
865
- usage = {
866
- promptTokens: value.usage.prompt_tokens,
867
- completionTokens: value.usage.completion_tokens
868
- };
869
- }
870
- const choice = value.choices[0];
871
- if ((choice == null ? void 0 : choice.finish_reason) != null) {
872
- finishReason = mapOpenAIFinishReason(choice.finish_reason);
873
- }
874
- if ((choice == null ? void 0 : choice.delta) == null) {
875
- return;
876
- }
877
- const delta = choice.delta;
878
- if (delta.content != null) {
879
- controller.enqueue({
880
- type: "text-delta",
881
- textDelta: delta.content
882
- });
883
- }
884
- if (delta.tool_calls != null) {
885
- for (const toolCallDelta of delta.tool_calls) {
886
- const index = toolCallDelta.index;
887
- if (toolCalls[index] == null) {
888
- if (toolCallDelta.type !== "function") {
889
- throw new InvalidResponseDataError({
890
- data: toolCallDelta,
891
- message: `Expected 'function' type.`
892
- });
893
- }
894
- if (toolCallDelta.id == null) {
895
- throw new InvalidResponseDataError({
896
- data: toolCallDelta,
897
- message: `Expected 'id' to be a string.`
898
- });
899
- }
900
- if (((_a = toolCallDelta.function) == null ? void 0 : _a.name) == null) {
901
- throw new InvalidResponseDataError({
902
- data: toolCallDelta,
903
- message: `Expected 'function.name' to be a string.`
904
- });
905
- }
906
- toolCalls[index] = {
907
- id: toolCallDelta.id,
908
- type: "function",
909
- function: {
910
- name: toolCallDelta.function.name,
911
- arguments: (_b = toolCallDelta.function.arguments) != null ? _b : ""
912
- }
913
- };
914
- continue;
915
- }
916
- const toolCall = toolCalls[index];
917
- if (((_c = toolCallDelta.function) == null ? void 0 : _c.arguments) != null) {
918
- toolCall.function.arguments += (_e = (_d = toolCallDelta.function) == null ? void 0 : _d.arguments) != null ? _e : "";
919
- }
920
- controller.enqueue({
921
- type: "tool-call-delta",
922
- toolCallType: "function",
923
- toolCallId: toolCall.id,
924
- toolName: toolCall.function.name,
925
- argsTextDelta: (_f = toolCallDelta.function.arguments) != null ? _f : ""
926
- });
927
- if (((_g = toolCall.function) == null ? void 0 : _g.name) == null || ((_h = toolCall.function) == null ? void 0 : _h.arguments) == null || !isParseableJson(toolCall.function.arguments)) {
928
- continue;
929
- }
930
- controller.enqueue({
931
- type: "tool-call",
932
- toolCallType: "function",
933
- toolCallId: (_i = toolCall.id) != null ? _i : generateId(),
934
- toolName: toolCall.function.name,
935
- args: toolCall.function.arguments
936
- });
937
- }
938
- }
939
- },
940
- flush(controller) {
941
- controller.enqueue({ type: "finish", finishReason, usage });
942
- }
943
- })
944
- ),
945
- rawCall: { rawPrompt, rawSettings },
946
- warnings: []
947
- };
948
- }
949
- };
950
- var openAIChatResponseSchema = import_zod2.z.object({
951
- choices: import_zod2.z.array(
952
- import_zod2.z.object({
953
- message: import_zod2.z.object({
954
- role: import_zod2.z.literal("assistant"),
955
- content: import_zod2.z.string().nullable(),
956
- tool_calls: import_zod2.z.array(
957
- import_zod2.z.object({
958
- id: import_zod2.z.string(),
959
- type: import_zod2.z.literal("function"),
960
- function: import_zod2.z.object({
961
- name: import_zod2.z.string(),
962
- arguments: import_zod2.z.string()
963
- })
964
- })
965
- ).optional()
966
- }),
967
- index: import_zod2.z.number(),
968
- finish_reason: import_zod2.z.string().optional().nullable()
969
- })
970
- ),
971
- object: import_zod2.z.literal("chat.completion"),
972
- usage: import_zod2.z.object({
973
- prompt_tokens: import_zod2.z.number(),
974
- completion_tokens: import_zod2.z.number()
975
- })
976
- });
977
- var openaiChatChunkSchema = import_zod2.z.object({
978
- object: import_zod2.z.literal("chat.completion.chunk"),
979
- choices: import_zod2.z.array(
980
- import_zod2.z.object({
981
- delta: import_zod2.z.object({
982
- role: import_zod2.z.enum(["assistant"]).optional(),
983
- content: import_zod2.z.string().nullable().optional(),
984
- tool_calls: import_zod2.z.array(
985
- import_zod2.z.object({
986
- index: import_zod2.z.number(),
987
- id: import_zod2.z.string().optional(),
988
- type: import_zod2.z.literal("function").optional(),
989
- function: import_zod2.z.object({
990
- name: import_zod2.z.string().optional(),
991
- arguments: import_zod2.z.string().optional()
992
- })
993
- })
994
- ).optional()
995
- }),
996
- finish_reason: import_zod2.z.string().nullable().optional(),
997
- index: import_zod2.z.number()
998
- })
999
- ),
1000
- usage: import_zod2.z.object({
1001
- prompt_tokens: import_zod2.z.number(),
1002
- completion_tokens: import_zod2.z.number()
1003
- }).optional().nullable()
1004
- });
1005
-
1006
- // openai/openai-completion-language-model.ts
1007
- var import_zod3 = require("zod");
1008
-
1009
- // openai/convert-to-openai-completion-prompt.ts
1010
- function convertToOpenAICompletionPrompt({
1011
- prompt: prompt2,
1012
- inputFormat,
1013
- user = "user",
1014
- assistant = "assistant"
1015
- }) {
1016
- if (inputFormat === "prompt" && prompt2.length === 1 && prompt2[0].role === "user" && prompt2[0].content.length === 1 && prompt2[0].content[0].type === "text") {
1017
- return { prompt: prompt2[0].content[0].text };
1018
- }
1019
- let text = "";
1020
- if (prompt2[0].role === "system") {
1021
- text += `${prompt2[0].content}
1022
-
1023
- `;
1024
- prompt2 = prompt2.slice(1);
1025
- }
1026
- for (const { role, content } of prompt2) {
1027
- switch (role) {
1028
- case "system": {
1029
- throw new InvalidPromptError({
1030
- message: "Unexpected system message in prompt: ${content}",
1031
- prompt: prompt2
1032
- });
1033
- }
1034
- case "user": {
1035
- const userMessage = content.map((part) => {
1036
- switch (part.type) {
1037
- case "text": {
1038
- return part.text;
1039
- }
1040
- case "image": {
1041
- throw new UnsupportedFunctionalityError({
1042
- functionality: "images"
1043
- });
1044
- }
1045
- }
1046
- }).join("");
1047
- text += `${user}:
1048
- ${userMessage}
1049
-
1050
- `;
1051
- break;
1052
- }
1053
- case "assistant": {
1054
- const assistantMessage = content.map((part) => {
1055
- switch (part.type) {
1056
- case "text": {
1057
- return part.text;
1058
- }
1059
- case "tool-call": {
1060
- throw new UnsupportedFunctionalityError({
1061
- functionality: "tool-call messages"
1062
- });
1063
- }
1064
- }
1065
- }).join("");
1066
- text += `${assistant}:
1067
- ${assistantMessage}
1068
-
1069
- `;
1070
- break;
1071
- }
1072
- case "tool": {
1073
- throw new UnsupportedFunctionalityError({
1074
- functionality: "tool messages"
1075
- });
1076
- }
1077
- default: {
1078
- const _exhaustiveCheck = role;
1079
- throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
1080
- }
1081
- }
1082
- }
1083
- text += `${assistant}:
1084
- `;
1085
- return {
1086
- prompt: text,
1087
- stopSequences: [`
1088
- ${user}:`]
1089
- };
1090
- }
1091
-
1092
- // openai/openai-completion-language-model.ts
1093
- var OpenAICompletionLanguageModel = class {
1094
- constructor(modelId, settings, config) {
1095
- this.specificationVersion = "v1";
1096
- this.defaultObjectGenerationMode = void 0;
1097
- this.modelId = modelId;
1098
- this.settings = settings;
1099
- this.config = config;
1100
- }
1101
- get provider() {
1102
- return this.config.provider;
1103
- }
1104
- getArgs({
1105
- mode,
1106
- inputFormat,
1107
- prompt: prompt2,
1108
- maxTokens,
1109
- temperature,
1110
- topP,
1111
- frequencyPenalty,
1112
- presencePenalty,
1113
- seed
1114
- }) {
1115
- var _a;
1116
- const type = mode.type;
1117
- const { prompt: completionPrompt, stopSequences } = convertToOpenAICompletionPrompt({ prompt: prompt2, inputFormat });
1118
- const baseArgs = {
1119
- // model id:
1120
- model: this.modelId,
1121
- // model specific settings:
1122
- echo: this.settings.echo,
1123
- logit_bias: this.settings.logitBias,
1124
- suffix: this.settings.suffix,
1125
- user: this.settings.user,
1126
- // standardized settings:
1127
- max_tokens: maxTokens,
1128
- temperature: scale({
1129
- value: temperature,
1130
- outputMin: 0,
1131
- outputMax: 2
1132
- }),
1133
- top_p: topP,
1134
- frequency_penalty: scale({
1135
- value: frequencyPenalty,
1136
- inputMin: -1,
1137
- inputMax: 1,
1138
- outputMin: -2,
1139
- outputMax: 2
1140
- }),
1141
- presence_penalty: scale({
1142
- value: presencePenalty,
1143
- inputMin: -1,
1144
- inputMax: 1,
1145
- outputMin: -2,
1146
- outputMax: 2
1147
- }),
1148
- seed,
1149
- // prompt:
1150
- prompt: completionPrompt,
1151
- // stop sequences:
1152
- stop: stopSequences
1153
- };
1154
- switch (type) {
1155
- case "regular": {
1156
- if ((_a = mode.tools) == null ? void 0 : _a.length) {
1157
- throw new UnsupportedFunctionalityError({
1158
- functionality: "tools"
1159
- });
1160
- }
1161
- return baseArgs;
1162
- }
1163
- case "object-json": {
1164
- throw new UnsupportedFunctionalityError({
1165
- functionality: "object-json mode"
1166
- });
1167
- }
1168
- case "object-tool": {
1169
- throw new UnsupportedFunctionalityError({
1170
- functionality: "object-tool mode"
1171
- });
1172
- }
1173
- case "object-grammar": {
1174
- throw new UnsupportedFunctionalityError({
1175
- functionality: "object-grammar mode"
1176
- });
1177
- }
1178
- default: {
1179
- const _exhaustiveCheck = type;
1180
- throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
1181
- }
1182
- }
1183
- }
1184
- async doGenerate(options) {
1185
- const args = this.getArgs(options);
1186
- const response = await postJsonToApi({
1187
- url: `${this.config.baseUrl}/completions`,
1188
- headers: this.config.headers(),
1189
- body: args,
1190
- failedResponseHandler: openaiFailedResponseHandler,
1191
- successfulResponseHandler: createJsonResponseHandler(
1192
- openAICompletionResponseSchema
1193
- ),
1194
- abortSignal: options.abortSignal
1195
- });
1196
- const { prompt: rawPrompt, ...rawSettings } = args;
1197
- const choice = response.choices[0];
1198
- return {
1199
- text: choice.text,
1200
- usage: {
1201
- promptTokens: response.usage.prompt_tokens,
1202
- completionTokens: response.usage.completion_tokens
1203
- },
1204
- finishReason: mapOpenAIFinishReason(choice.finish_reason),
1205
- rawCall: { rawPrompt, rawSettings },
1206
- warnings: []
1207
- };
1208
- }
1209
- async doStream(options) {
1210
- const args = this.getArgs(options);
1211
- const response = await postJsonToApi({
1212
- url: `${this.config.baseUrl}/completions`,
1213
- headers: this.config.headers(),
1214
- body: {
1215
- ...this.getArgs(options),
1216
- stream: true
1217
- },
1218
- failedResponseHandler: openaiFailedResponseHandler,
1219
- successfulResponseHandler: createEventSourceResponseHandler(
1220
- openaiCompletionChunkSchema
1221
- ),
1222
- abortSignal: options.abortSignal
1223
- });
1224
- const { prompt: rawPrompt, ...rawSettings } = args;
1225
- let finishReason = "other";
1226
- let usage = {
1227
- promptTokens: Number.NaN,
1228
- completionTokens: Number.NaN
1229
- };
1230
- return {
1231
- stream: response.pipeThrough(
1232
- new TransformStream({
1233
- transform(chunk, controller) {
1234
- if (!chunk.success) {
1235
- controller.enqueue({ type: "error", error: chunk.error });
1236
- return;
1237
- }
1238
- const value = chunk.value;
1239
- if (value.usage != null) {
1240
- usage = {
1241
- promptTokens: value.usage.prompt_tokens,
1242
- completionTokens: value.usage.completion_tokens
1243
- };
1244
- }
1245
- const choice = value.choices[0];
1246
- if ((choice == null ? void 0 : choice.finish_reason) != null) {
1247
- finishReason = mapOpenAIFinishReason(choice.finish_reason);
1248
- }
1249
- if ((choice == null ? void 0 : choice.text) != null) {
1250
- controller.enqueue({
1251
- type: "text-delta",
1252
- textDelta: choice.text
1253
- });
1254
- }
1255
- },
1256
- flush(controller) {
1257
- controller.enqueue({ type: "finish", finishReason, usage });
1258
- }
1259
- })
1260
- ),
1261
- rawCall: { rawPrompt, rawSettings },
1262
- warnings: []
1263
- };
1264
- }
1265
- };
1266
- var openAICompletionResponseSchema = import_zod3.z.object({
1267
- choices: import_zod3.z.array(
1268
- import_zod3.z.object({
1269
- text: import_zod3.z.string(),
1270
- finish_reason: import_zod3.z.string()
1271
- })
1272
- ),
1273
- usage: import_zod3.z.object({
1274
- prompt_tokens: import_zod3.z.number(),
1275
- completion_tokens: import_zod3.z.number()
1276
- })
1277
- });
1278
- var openaiCompletionChunkSchema = import_zod3.z.object({
1279
- object: import_zod3.z.literal("text_completion"),
1280
- choices: import_zod3.z.array(
1281
- import_zod3.z.object({
1282
- text: import_zod3.z.string(),
1283
- finish_reason: import_zod3.z.enum(["stop", "length", "content_filter"]).optional().nullable(),
1284
- index: import_zod3.z.number()
1285
- })
1286
- ),
1287
- usage: import_zod3.z.object({
1288
- prompt_tokens: import_zod3.z.number(),
1289
- completion_tokens: import_zod3.z.number()
1290
- }).optional().nullable()
1291
- });
1292
-
1293
- // openai/openai-facade.ts
1294
- var OpenAI = class {
1295
- constructor(options = {}) {
1296
- this.baseUrl = options.baseUrl;
1297
- this.apiKey = options.apiKey;
1298
- this.organization = options.organization;
1299
- }
1300
- get baseConfig() {
1301
- var _a;
1302
- return {
1303
- organization: this.organization,
1304
- baseUrl: (_a = this.baseUrl) != null ? _a : "https://api.openai.com/v1",
1305
- headers: () => ({
1306
- Authorization: `Bearer ${loadApiKey({
1307
- apiKey: this.apiKey,
1308
- environmentVariableName: "OPENAI_API_KEY",
1309
- description: "OpenAI"
1310
- })}`,
1311
- "OpenAI-Organization": this.organization
1312
- })
1313
- };
1314
- }
1315
- chat(modelId, settings = {}) {
1316
- return new OpenAIChatLanguageModel(modelId, settings, {
1317
- provider: "openai.chat",
1318
- ...this.baseConfig
1319
- });
1320
- }
1321
- completion(modelId, settings = {}) {
1322
- return new OpenAICompletionLanguageModel(modelId, settings, {
1323
- provider: "openai.completion",
1324
- ...this.baseConfig
1325
- });
1326
- }
1327
- };
1328
- var openai = new OpenAI();
1329
- // Annotate the CommonJS export names for ESM import in node:
1330
- 0 && (module.exports = {
1331
- OpenAI,
1332
- openai
1333
- });
1334
- //# sourceMappingURL=index.js.map