ai 3.0.12 → 3.0.14

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (57) hide show
  1. package/README.md +1 -1
  2. package/ai-model-specification/dist/index.d.mts +704 -0
  3. package/ai-model-specification/dist/index.d.ts +704 -0
  4. package/ai-model-specification/dist/index.js +806 -0
  5. package/ai-model-specification/dist/index.js.map +1 -0
  6. package/ai-model-specification/dist/index.mjs +742 -0
  7. package/ai-model-specification/dist/index.mjs.map +1 -0
  8. package/dist/index.d.mts +686 -4
  9. package/dist/index.d.ts +686 -4
  10. package/dist/index.js +1723 -15
  11. package/dist/index.js.map +1 -1
  12. package/dist/index.mjs +1700 -15
  13. package/dist/index.mjs.map +1 -1
  14. package/mistral/dist/index.d.mts +367 -0
  15. package/mistral/dist/index.d.ts +367 -0
  16. package/mistral/dist/index.js +936 -0
  17. package/mistral/dist/index.js.map +1 -0
  18. package/mistral/dist/index.mjs +900 -0
  19. package/mistral/dist/index.mjs.map +1 -0
  20. package/openai/dist/index.d.mts +430 -0
  21. package/openai/dist/index.d.ts +430 -0
  22. package/openai/dist/index.js +1355 -0
  23. package/openai/dist/index.js.map +1 -0
  24. package/openai/dist/index.mjs +1319 -0
  25. package/openai/dist/index.mjs.map +1 -0
  26. package/package.json +33 -7
  27. package/prompts/dist/index.d.mts +13 -1
  28. package/prompts/dist/index.d.ts +13 -1
  29. package/prompts/dist/index.js +13 -0
  30. package/prompts/dist/index.js.map +1 -1
  31. package/prompts/dist/index.mjs +12 -0
  32. package/prompts/dist/index.mjs.map +1 -1
  33. package/react/dist/index.d.mts +8 -4
  34. package/react/dist/index.d.ts +8 -4
  35. package/react/dist/index.js +36 -34
  36. package/react/dist/index.js.map +1 -1
  37. package/react/dist/index.mjs +36 -34
  38. package/react/dist/index.mjs.map +1 -1
  39. package/rsc/dist/index.d.ts +45 -8
  40. package/rsc/dist/rsc-server.d.mts +45 -8
  41. package/rsc/dist/rsc-server.mjs +67 -13
  42. package/rsc/dist/rsc-server.mjs.map +1 -1
  43. package/rsc/dist/rsc-shared.d.mts +5 -8
  44. package/rsc/dist/rsc-shared.mjs +23 -2
  45. package/rsc/dist/rsc-shared.mjs.map +1 -1
  46. package/solid/dist/index.js +29 -27
  47. package/solid/dist/index.js.map +1 -1
  48. package/solid/dist/index.mjs +29 -27
  49. package/solid/dist/index.mjs.map +1 -1
  50. package/svelte/dist/index.js +31 -29
  51. package/svelte/dist/index.js.map +1 -1
  52. package/svelte/dist/index.mjs +31 -29
  53. package/svelte/dist/index.mjs.map +1 -1
  54. package/vue/dist/index.js +29 -27
  55. package/vue/dist/index.js.map +1 -1
  56. package/vue/dist/index.mjs +29 -27
  57. package/vue/dist/index.mjs.map +1 -1
@@ -0,0 +1,1355 @@
1
+ "use strict";
2
+ var __create = Object.create;
3
+ var __defProp = Object.defineProperty;
4
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
5
+ var __getOwnPropNames = Object.getOwnPropertyNames;
6
+ var __getProtoOf = Object.getPrototypeOf;
7
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
8
+ var __export = (target, all) => {
9
+ for (var name in all)
10
+ __defProp(target, name, { get: all[name], enumerable: true });
11
+ };
12
+ var __copyProps = (to, from, except, desc) => {
13
+ if (from && typeof from === "object" || typeof from === "function") {
14
+ for (let key of __getOwnPropNames(from))
15
+ if (!__hasOwnProp.call(to, key) && key !== except)
16
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
17
+ }
18
+ return to;
19
+ };
20
+ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
21
+ // If the importer is in node compatibility mode or this is not an ESM
22
+ // file that has been converted to a CommonJS file using a Babel-
23
+ // compatible transform (i.e. "__esModule" has not been set), then set
24
+ // "default" to the CommonJS "module.exports" for node compatibility.
25
+ isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
26
+ mod
27
+ ));
28
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
29
+
30
+ // openai/index.ts
31
+ var openai_exports = {};
32
+ __export(openai_exports, {
33
+ OpenAI: () => OpenAI,
34
+ openai: () => openai
35
+ });
36
+ module.exports = __toCommonJS(openai_exports);
37
+
38
+ // ai-model-specification/errors/api-call-error.ts
39
+ var APICallError = class extends Error {
40
+ constructor({
41
+ message,
42
+ url,
43
+ requestBodyValues,
44
+ statusCode,
45
+ responseBody,
46
+ cause,
47
+ isRetryable = statusCode != null && (statusCode === 408 || // request timeout
48
+ statusCode === 409 || // conflict
49
+ statusCode === 429 || // too many requests
50
+ statusCode >= 500),
51
+ // server error
52
+ data
53
+ }) {
54
+ super(message);
55
+ this.name = "AI_APICallError";
56
+ this.url = url;
57
+ this.requestBodyValues = requestBodyValues;
58
+ this.statusCode = statusCode;
59
+ this.responseBody = responseBody;
60
+ this.cause = cause;
61
+ this.isRetryable = isRetryable;
62
+ this.data = data;
63
+ }
64
+ static isAPICallError(error) {
65
+ return error instanceof Error && error.name === "AI_APICallError" && typeof error.url === "string" && typeof error.requestBodyValues === "object" && (error.statusCode == null || typeof error.statusCode === "number") && (error.responseBody == null || typeof error.responseBody === "string") && (error.cause == null || typeof error.cause === "object") && typeof error.isRetryable === "boolean" && (error.data == null || typeof error.data === "object");
66
+ }
67
+ toJSON() {
68
+ return {
69
+ name: this.name,
70
+ message: this.message,
71
+ url: this.url,
72
+ requestBodyValues: this.requestBodyValues,
73
+ statusCode: this.statusCode,
74
+ responseBody: this.responseBody,
75
+ cause: this.cause,
76
+ isRetryable: this.isRetryable,
77
+ data: this.data
78
+ };
79
+ }
80
+ };
81
+
82
+ // ai-model-specification/errors/invalid-prompt-error.ts
83
+ var InvalidPromptError = class extends Error {
84
+ constructor({ prompt: prompt2, message }) {
85
+ super(`Invalid prompt: ${message}`);
86
+ this.name = "AI_InvalidPromptError";
87
+ this.prompt = prompt2;
88
+ }
89
+ static isInvalidPromptError(error) {
90
+ return error instanceof Error && error.name === "AI_InvalidPromptError" && prompt != null;
91
+ }
92
+ toJSON() {
93
+ return {
94
+ name: this.name,
95
+ message: this.message,
96
+ stack: this.stack,
97
+ prompt: this.prompt
98
+ };
99
+ }
100
+ };
101
+
102
+ // ai-model-specification/errors/invalid-response-data-error.ts
103
+ var InvalidResponseDataError = class extends Error {
104
+ constructor({
105
+ data,
106
+ message = `Invalid response data: ${JSON.stringify(data)}.`
107
+ }) {
108
+ super(message);
109
+ this.name = "AI_InvalidResponseDataError";
110
+ this.data = data;
111
+ }
112
+ static isInvalidResponseDataError(error) {
113
+ return error instanceof Error && error.name === "AI_InvalidResponseDataError" && error.data != null;
114
+ }
115
+ toJSON() {
116
+ return {
117
+ name: this.name,
118
+ message: this.message,
119
+ stack: this.stack,
120
+ data: this.data
121
+ };
122
+ }
123
+ };
124
+
125
+ // ai-model-specification/util/generate-id.ts
126
+ var import_non_secure = require("nanoid/non-secure");
127
+ var generateId = (0, import_non_secure.customAlphabet)(
128
+ "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz",
129
+ 7
130
+ );
131
+
132
+ // ai-model-specification/util/get-error-message.ts
133
+ function getErrorMessage(error) {
134
+ if (error == null) {
135
+ return "unknown error";
136
+ }
137
+ if (typeof error === "string") {
138
+ return error;
139
+ }
140
+ if (error instanceof Error) {
141
+ return error.message;
142
+ }
143
+ return JSON.stringify(error);
144
+ }
145
+
146
+ // ai-model-specification/errors/load-api-key-error.ts
147
+ var LoadAPIKeyError = class extends Error {
148
+ constructor({ message }) {
149
+ super(message);
150
+ this.name = "AI_LoadAPIKeyError";
151
+ }
152
+ static isLoadAPIKeyError(error) {
153
+ return error instanceof Error && error.name === "AI_LoadAPIKeyError";
154
+ }
155
+ toJSON() {
156
+ return {
157
+ name: this.name,
158
+ message: this.message
159
+ };
160
+ }
161
+ };
162
+
163
+ // ai-model-specification/util/load-api-key.ts
164
+ function loadApiKey({
165
+ apiKey,
166
+ environmentVariableName,
167
+ apiKeyParameterName = "apiKey",
168
+ description
169
+ }) {
170
+ if (typeof apiKey === "string") {
171
+ return apiKey;
172
+ }
173
+ if (apiKey != null) {
174
+ throw new LoadAPIKeyError({
175
+ message: `${description} API key must be a string.`
176
+ });
177
+ }
178
+ if (typeof process === "undefined") {
179
+ throw new LoadAPIKeyError({
180
+ message: `${description} API key is missing. Pass it using the '${apiKeyParameterName}' parameter. Environment variables is not supported in this environment.`
181
+ });
182
+ }
183
+ apiKey = process.env[environmentVariableName];
184
+ if (apiKey == null) {
185
+ throw new LoadAPIKeyError({
186
+ message: `${description} API key is missing. Pass it using the '${apiKeyParameterName}' parameter or the ${environmentVariableName} environment variable.`
187
+ });
188
+ }
189
+ if (typeof apiKey !== "string") {
190
+ throw new LoadAPIKeyError({
191
+ message: `${description} API key must be a string. The value of the ${environmentVariableName} environment variable is not a string.`
192
+ });
193
+ }
194
+ return apiKey;
195
+ }
196
+
197
+ // ai-model-specification/util/parse-json.ts
198
+ var import_secure_json_parse = __toESM(require("secure-json-parse"));
199
+
200
+ // ai-model-specification/errors/json-parse-error.ts
201
+ var JSONParseError = class extends Error {
202
+ constructor({ text, cause }) {
203
+ super(
204
+ `JSON parsing failed: Text: ${text}.
205
+ Error message: ${getErrorMessage(cause)}`
206
+ );
207
+ this.name = "AI_JSONParseError";
208
+ this.cause = cause;
209
+ this.text = text;
210
+ }
211
+ static isJSONParseError(error) {
212
+ return error instanceof Error && error.name === "AI_JSONParseError" && typeof error.text === "string" && typeof error.cause === "string";
213
+ }
214
+ toJSON() {
215
+ return {
216
+ name: this.name,
217
+ message: this.message,
218
+ cause: this.cause,
219
+ stack: this.stack,
220
+ valueText: this.text
221
+ };
222
+ }
223
+ };
224
+
225
+ // ai-model-specification/errors/type-validation-error.ts
226
+ var TypeValidationError = class extends Error {
227
+ constructor({ value, cause }) {
228
+ super(
229
+ `Type validation failed: Value: ${JSON.stringify(value)}.
230
+ Error message: ${getErrorMessage(cause)}`
231
+ );
232
+ this.name = "AI_TypeValidationError";
233
+ this.cause = cause;
234
+ this.value = value;
235
+ }
236
+ static isTypeValidationError(error) {
237
+ return error instanceof Error && error.name === "AI_TypeValidationError" && typeof error.value === "string" && typeof error.cause === "string";
238
+ }
239
+ toJSON() {
240
+ return {
241
+ name: this.name,
242
+ message: this.message,
243
+ cause: this.cause,
244
+ stack: this.stack,
245
+ value: this.value
246
+ };
247
+ }
248
+ };
249
+
250
+ // ai-model-specification/util/validate-types.ts
251
+ function validateTypes({
252
+ value,
253
+ schema
254
+ }) {
255
+ try {
256
+ return schema.parse(value);
257
+ } catch (error) {
258
+ throw new TypeValidationError({ value, cause: error });
259
+ }
260
+ }
261
+ function safeValidateTypes({
262
+ value,
263
+ schema
264
+ }) {
265
+ try {
266
+ const validationResult = schema.safeParse(value);
267
+ if (validationResult.success) {
268
+ return {
269
+ success: true,
270
+ value: validationResult.data
271
+ };
272
+ }
273
+ return {
274
+ success: false,
275
+ error: new TypeValidationError({
276
+ value,
277
+ cause: validationResult.error
278
+ })
279
+ };
280
+ } catch (error) {
281
+ return {
282
+ success: false,
283
+ error: TypeValidationError.isTypeValidationError(error) ? error : new TypeValidationError({ value, cause: error })
284
+ };
285
+ }
286
+ }
287
+
288
+ // ai-model-specification/util/parse-json.ts
289
+ function parseJSON({
290
+ text,
291
+ schema
292
+ }) {
293
+ try {
294
+ const value = import_secure_json_parse.default.parse(text);
295
+ if (schema == null) {
296
+ return value;
297
+ }
298
+ return validateTypes({ value, schema });
299
+ } catch (error) {
300
+ if (JSONParseError.isJSONParseError(error) || TypeValidationError.isTypeValidationError(error)) {
301
+ throw error;
302
+ }
303
+ throw new JSONParseError({ text, cause: error });
304
+ }
305
+ }
306
+ function safeParseJSON({
307
+ text,
308
+ schema
309
+ }) {
310
+ try {
311
+ const value = import_secure_json_parse.default.parse(text);
312
+ if (schema == null) {
313
+ return {
314
+ success: true,
315
+ value
316
+ };
317
+ }
318
+ return safeValidateTypes({ value, schema });
319
+ } catch (error) {
320
+ return {
321
+ success: false,
322
+ error: JSONParseError.isJSONParseError(error) ? error : new JSONParseError({ text, cause: error })
323
+ };
324
+ }
325
+ }
326
+ function isParseableJson(input) {
327
+ try {
328
+ import_secure_json_parse.default.parse(input);
329
+ return true;
330
+ } catch (e) {
331
+ return false;
332
+ }
333
+ }
334
+
335
+ // ai-model-specification/util/post-to-api.ts
336
+ var postJsonToApi = async ({
337
+ url,
338
+ headers,
339
+ body,
340
+ failedResponseHandler,
341
+ successfulResponseHandler,
342
+ abortSignal
343
+ }) => postToApi({
344
+ url,
345
+ headers: {
346
+ ...headers,
347
+ "Content-Type": "application/json"
348
+ },
349
+ body: {
350
+ content: JSON.stringify(body),
351
+ values: body
352
+ },
353
+ failedResponseHandler,
354
+ successfulResponseHandler,
355
+ abortSignal
356
+ });
357
+ var postToApi = async ({
358
+ url,
359
+ headers = {},
360
+ body,
361
+ successfulResponseHandler,
362
+ failedResponseHandler,
363
+ abortSignal
364
+ }) => {
365
+ try {
366
+ const definedHeaders = Object.fromEntries(
367
+ Object.entries(headers).filter(([_key, value]) => value != null)
368
+ );
369
+ const response = await fetch(url, {
370
+ method: "POST",
371
+ headers: definedHeaders,
372
+ body: body.content,
373
+ signal: abortSignal
374
+ });
375
+ if (!response.ok) {
376
+ try {
377
+ throw await failedResponseHandler({
378
+ response,
379
+ url,
380
+ requestBodyValues: body.values
381
+ });
382
+ } catch (error) {
383
+ if (error instanceof Error) {
384
+ if (error.name === "AbortError" || APICallError.isAPICallError(error)) {
385
+ throw error;
386
+ }
387
+ }
388
+ throw new APICallError({
389
+ message: "Failed to process error response",
390
+ cause: error,
391
+ statusCode: response.status,
392
+ url,
393
+ requestBodyValues: body.values
394
+ });
395
+ }
396
+ }
397
+ try {
398
+ return await successfulResponseHandler({
399
+ response,
400
+ url,
401
+ requestBodyValues: body.values
402
+ });
403
+ } catch (error) {
404
+ if (error instanceof Error) {
405
+ if (error.name === "AbortError" || APICallError.isAPICallError(error)) {
406
+ throw error;
407
+ }
408
+ }
409
+ throw new APICallError({
410
+ message: "Failed to process successful response",
411
+ cause: error,
412
+ statusCode: response.status,
413
+ url,
414
+ requestBodyValues: body.values
415
+ });
416
+ }
417
+ } catch (error) {
418
+ if (error instanceof Error) {
419
+ if (error.name === "AbortError") {
420
+ throw error;
421
+ }
422
+ }
423
+ if (error instanceof TypeError && error.message === "fetch failed") {
424
+ const cause = error.cause;
425
+ if (cause != null) {
426
+ throw new APICallError({
427
+ message: `Cannot connect to API: ${cause.message}`,
428
+ cause,
429
+ url,
430
+ requestBodyValues: body.values,
431
+ isRetryable: true
432
+ // retry when network error
433
+ });
434
+ }
435
+ }
436
+ throw error;
437
+ }
438
+ };
439
+
440
+ // ai-model-specification/util/response-handler.ts
441
+ var import_stream = require("eventsource-parser/stream");
442
+
443
+ // ai-model-specification/errors/no-response-body-error.ts
444
+ var NoResponseBodyError = class extends Error {
445
+ constructor({ message = "No response body" } = {}) {
446
+ super(message);
447
+ this.name = "AI_NoResponseBodyError";
448
+ }
449
+ static isNoResponseBodyError(error) {
450
+ return error instanceof Error && error.name === "AI_NoResponseBodyError";
451
+ }
452
+ toJSON() {
453
+ return {
454
+ name: this.name,
455
+ message: this.message,
456
+ stack: this.stack
457
+ };
458
+ }
459
+ };
460
+
461
+ // ai-model-specification/util/response-handler.ts
462
+ var createJsonErrorResponseHandler = ({
463
+ errorSchema,
464
+ errorToMessage,
465
+ isRetryable
466
+ }) => async ({ response, url, requestBodyValues }) => {
467
+ const responseBody = await response.text();
468
+ if (responseBody.trim() === "") {
469
+ return new APICallError({
470
+ message: response.statusText,
471
+ url,
472
+ requestBodyValues,
473
+ statusCode: response.status,
474
+ responseBody,
475
+ isRetryable: isRetryable == null ? void 0 : isRetryable(response)
476
+ });
477
+ }
478
+ try {
479
+ const parsedError = parseJSON({
480
+ text: responseBody,
481
+ schema: errorSchema
482
+ });
483
+ return new APICallError({
484
+ message: errorToMessage(parsedError),
485
+ url,
486
+ requestBodyValues,
487
+ statusCode: response.status,
488
+ responseBody,
489
+ data: parsedError,
490
+ isRetryable: isRetryable == null ? void 0 : isRetryable(response, parsedError)
491
+ });
492
+ } catch (parseError) {
493
+ return new APICallError({
494
+ message: response.statusText,
495
+ url,
496
+ requestBodyValues,
497
+ statusCode: response.status,
498
+ responseBody,
499
+ isRetryable: isRetryable == null ? void 0 : isRetryable(response)
500
+ });
501
+ }
502
+ };
503
+ var createEventSourceResponseHandler = (chunkSchema) => async ({ response }) => {
504
+ if (response.body == null) {
505
+ throw new NoResponseBodyError();
506
+ }
507
+ return response.body.pipeThrough(new TextDecoderStream()).pipeThrough(new import_stream.EventSourceParserStream()).pipeThrough(
508
+ new TransformStream({
509
+ transform({ data }, controller) {
510
+ if (data === "[DONE]") {
511
+ return;
512
+ }
513
+ controller.enqueue(
514
+ safeParseJSON({
515
+ text: data,
516
+ schema: chunkSchema
517
+ })
518
+ );
519
+ }
520
+ })
521
+ );
522
+ };
523
+ var createJsonResponseHandler = (responseSchema) => async ({ response, url, requestBodyValues }) => {
524
+ const responseBody = await response.text();
525
+ const parsedResult = safeParseJSON({
526
+ text: responseBody,
527
+ schema: responseSchema
528
+ });
529
+ if (!parsedResult.success) {
530
+ throw new APICallError({
531
+ message: "Invalid JSON response",
532
+ cause: parsedResult.error,
533
+ statusCode: response.status,
534
+ responseBody,
535
+ url,
536
+ requestBodyValues
537
+ });
538
+ }
539
+ return parsedResult.value;
540
+ };
541
+
542
+ // ai-model-specification/util/scale.ts
543
+ function scale({
544
+ inputMin = 0,
545
+ inputMax = 1,
546
+ outputMin,
547
+ outputMax,
548
+ value
549
+ }) {
550
+ if (value === void 0) {
551
+ return void 0;
552
+ }
553
+ const inputRange = inputMax - inputMin;
554
+ const outputRange = outputMax - outputMin;
555
+ return (value - inputMin) * outputRange / inputRange + outputMin;
556
+ }
557
+
558
+ // ai-model-specification/util/uint8-utils.ts
559
+ function convertUint8ArrayToBase64(array) {
560
+ let latin1string = "";
561
+ for (let i = 0; i < array.length; i++) {
562
+ latin1string += String.fromCodePoint(array[i]);
563
+ }
564
+ return globalThis.btoa(latin1string);
565
+ }
566
+
567
+ // ai-model-specification/errors/unsupported-functionality-error.ts
568
+ var UnsupportedFunctionalityError = class extends Error {
569
+ constructor({
570
+ provider,
571
+ functionality
572
+ }) {
573
+ super(
574
+ `Functionality not supported by the provider. Provider: ${provider}.
575
+ Functionality: ${functionality}`
576
+ );
577
+ this.name = "AI_UnsupportedFunctionalityError";
578
+ this.provider = provider;
579
+ this.functionality = functionality;
580
+ }
581
+ static isUnsupportedFunctionalityError(error) {
582
+ return error instanceof Error && error.name === "AI_UnsupportedFunctionalityError" && typeof error.provider === "string" && typeof error.functionality === "string";
583
+ }
584
+ toJSON() {
585
+ return {
586
+ name: this.name,
587
+ message: this.message,
588
+ stack: this.stack,
589
+ provider: this.provider,
590
+ functionality: this.functionality
591
+ };
592
+ }
593
+ };
594
+
595
+ // openai/openai-chat-language-model.ts
596
+ var import_zod2 = require("zod");
597
+
598
+ // openai/convert-to-openai-chat-messages.ts
599
+ function convertToOpenAIChatMessages(prompt2) {
600
+ const messages = [];
601
+ for (const { role, content } of prompt2) {
602
+ switch (role) {
603
+ case "system": {
604
+ messages.push({ role: "system", content });
605
+ break;
606
+ }
607
+ case "user": {
608
+ messages.push({
609
+ role: "user",
610
+ content: content.map((part) => {
611
+ var _a;
612
+ switch (part.type) {
613
+ case "text": {
614
+ return { type: "text", text: part.text };
615
+ }
616
+ case "image": {
617
+ return {
618
+ type: "image_url",
619
+ image_url: {
620
+ url: part.image instanceof URL ? part.image.toString() : `data:${(_a = part.mimeType) != null ? _a : "image/jpeg"};base64,${convertUint8ArrayToBase64(part.image)}`
621
+ }
622
+ };
623
+ }
624
+ }
625
+ })
626
+ });
627
+ break;
628
+ }
629
+ case "assistant": {
630
+ let text = "";
631
+ const toolCalls = [];
632
+ for (const part of content) {
633
+ switch (part.type) {
634
+ case "text": {
635
+ text += part.text;
636
+ break;
637
+ }
638
+ case "tool-call": {
639
+ toolCalls.push({
640
+ id: part.toolCallId,
641
+ type: "function",
642
+ function: {
643
+ name: part.toolName,
644
+ arguments: JSON.stringify(part.args)
645
+ }
646
+ });
647
+ break;
648
+ }
649
+ default: {
650
+ const _exhaustiveCheck = part;
651
+ throw new Error(`Unsupported part: ${_exhaustiveCheck}`);
652
+ }
653
+ }
654
+ }
655
+ messages.push({
656
+ role: "assistant",
657
+ content: text,
658
+ tool_calls: toolCalls.length > 0 ? toolCalls : void 0
659
+ });
660
+ break;
661
+ }
662
+ case "tool": {
663
+ for (const toolResponse of content) {
664
+ messages.push({
665
+ role: "tool",
666
+ tool_call_id: toolResponse.toolCallId,
667
+ content: JSON.stringify(toolResponse.result)
668
+ });
669
+ }
670
+ break;
671
+ }
672
+ default: {
673
+ const _exhaustiveCheck = role;
674
+ throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
675
+ }
676
+ }
677
+ }
678
+ return messages;
679
+ }
680
+
681
+ // openai/map-openai-finish-reason.ts
682
+ function mapOpenAIFinishReason(finishReason) {
683
+ switch (finishReason) {
684
+ case "stop":
685
+ return "stop";
686
+ case "length":
687
+ return "length";
688
+ case "content_filter":
689
+ return "content-filter";
690
+ case "function_call":
691
+ case "tool_calls":
692
+ return "tool-calls";
693
+ default:
694
+ return "other";
695
+ }
696
+ }
697
+
698
+ // openai/openai-error.ts
699
+ var import_zod = require("zod");
700
+ var openAIErrorDataSchema = import_zod.z.object({
701
+ error: import_zod.z.object({
702
+ message: import_zod.z.string(),
703
+ type: import_zod.z.string(),
704
+ param: import_zod.z.any().nullable(),
705
+ code: import_zod.z.string().nullable()
706
+ })
707
+ });
708
+ var openaiFailedResponseHandler = createJsonErrorResponseHandler({
709
+ errorSchema: openAIErrorDataSchema,
710
+ errorToMessage: (data) => data.error.message
711
+ });
712
+
713
+ // openai/openai-chat-language-model.ts
714
+ var OpenAIChatLanguageModel = class {
715
+ constructor(modelId, settings, config) {
716
+ this.specificationVersion = "v1";
717
+ this.defaultObjectGenerationMode = "tool";
718
+ this.modelId = modelId;
719
+ this.settings = settings;
720
+ this.config = config;
721
+ }
722
+ get provider() {
723
+ return this.config.provider;
724
+ }
725
+ getArgs({
726
+ mode,
727
+ prompt: prompt2,
728
+ maxTokens,
729
+ temperature,
730
+ topP,
731
+ frequencyPenalty,
732
+ presencePenalty,
733
+ seed
734
+ }) {
735
+ var _a;
736
+ const type = mode.type;
737
+ const baseArgs = {
738
+ // model id:
739
+ model: this.modelId,
740
+ // model specific settings:
741
+ logit_bias: this.settings.logitBias,
742
+ user: this.settings.user,
743
+ // standardized settings:
744
+ max_tokens: maxTokens,
745
+ temperature: scale({
746
+ value: temperature,
747
+ outputMin: 0,
748
+ outputMax: 2
749
+ }),
750
+ top_p: topP,
751
+ frequency_penalty: scale({
752
+ value: frequencyPenalty,
753
+ inputMin: -1,
754
+ inputMax: 1,
755
+ outputMin: -2,
756
+ outputMax: 2
757
+ }),
758
+ presence_penalty: scale({
759
+ value: presencePenalty,
760
+ inputMin: -1,
761
+ inputMax: 1,
762
+ outputMin: -2,
763
+ outputMax: 2
764
+ }),
765
+ seed,
766
+ // messages:
767
+ messages: convertToOpenAIChatMessages(prompt2)
768
+ };
769
+ switch (type) {
770
+ case "regular": {
771
+ const tools = ((_a = mode.tools) == null ? void 0 : _a.length) ? mode.tools : void 0;
772
+ return {
773
+ ...baseArgs,
774
+ tools: tools == null ? void 0 : tools.map((tool) => ({
775
+ type: "function",
776
+ function: {
777
+ name: tool.name,
778
+ description: tool.description,
779
+ parameters: tool.parameters
780
+ }
781
+ }))
782
+ };
783
+ }
784
+ case "object-json": {
785
+ return {
786
+ ...baseArgs,
787
+ response_format: { type: "json_object" }
788
+ };
789
+ }
790
+ case "object-tool": {
791
+ return {
792
+ ...baseArgs,
793
+ tool_choice: { type: "function", function: { name: mode.tool.name } },
794
+ tools: [{ type: "function", function: mode.tool }]
795
+ };
796
+ }
797
+ case "object-grammar": {
798
+ throw new UnsupportedFunctionalityError({
799
+ functionality: "object-grammar mode",
800
+ provider: this.provider
801
+ });
802
+ }
803
+ default: {
804
+ const _exhaustiveCheck = type;
805
+ throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
806
+ }
807
+ }
808
+ }
809
+ async doGenerate(options) {
810
+ var _a, _b;
811
+ const args = this.getArgs(options);
812
+ const response = await postJsonToApi({
813
+ url: `${this.config.baseUrl}/chat/completions`,
814
+ headers: this.config.headers(),
815
+ body: args,
816
+ failedResponseHandler: openaiFailedResponseHandler,
817
+ successfulResponseHandler: createJsonResponseHandler(
818
+ openAIChatResponseSchema
819
+ ),
820
+ abortSignal: options.abortSignal
821
+ });
822
+ const { messages: rawPrompt, ...rawSettings } = args;
823
+ const choice = response.choices[0];
824
+ return {
825
+ text: (_a = choice.message.content) != null ? _a : void 0,
826
+ toolCalls: (_b = choice.message.tool_calls) == null ? void 0 : _b.map((toolCall) => ({
827
+ toolCallType: "function",
828
+ toolCallId: toolCall.id,
829
+ toolName: toolCall.function.name,
830
+ args: toolCall.function.arguments
831
+ })),
832
+ finishReason: mapOpenAIFinishReason(choice.finish_reason),
833
+ usage: {
834
+ promptTokens: response.usage.prompt_tokens,
835
+ completionTokens: response.usage.completion_tokens
836
+ },
837
+ rawCall: { rawPrompt, rawSettings },
838
+ warnings: []
839
+ };
840
+ }
841
+ async doStream(options) {
842
+ const args = this.getArgs(options);
843
+ const response = await postJsonToApi({
844
+ url: `${this.config.baseUrl}/chat/completions`,
845
+ headers: this.config.headers(),
846
+ body: {
847
+ ...args,
848
+ stream: true
849
+ },
850
+ failedResponseHandler: openaiFailedResponseHandler,
851
+ successfulResponseHandler: createEventSourceResponseHandler(
852
+ openaiChatChunkSchema
853
+ ),
854
+ abortSignal: options.abortSignal
855
+ });
856
+ const { messages: rawPrompt, ...rawSettings } = args;
857
+ const toolCalls = [];
858
+ let finishReason = "other";
859
+ let usage = {
860
+ promptTokens: Number.NaN,
861
+ completionTokens: Number.NaN
862
+ };
863
+ return {
864
+ stream: response.pipeThrough(
865
+ new TransformStream({
866
+ transform(chunk, controller) {
867
+ var _a, _b, _c, _d, _e, _f, _g, _h, _i;
868
+ if (!chunk.success) {
869
+ controller.enqueue({ type: "error", error: chunk.error });
870
+ return;
871
+ }
872
+ const value = chunk.value;
873
+ if (value.usage != null) {
874
+ usage = {
875
+ promptTokens: value.usage.prompt_tokens,
876
+ completionTokens: value.usage.completion_tokens
877
+ };
878
+ }
879
+ const choice = value.choices[0];
880
+ if ((choice == null ? void 0 : choice.finish_reason) != null) {
881
+ finishReason = mapOpenAIFinishReason(choice.finish_reason);
882
+ }
883
+ if ((choice == null ? void 0 : choice.delta) == null) {
884
+ return;
885
+ }
886
+ const delta = choice.delta;
887
+ if (delta.content != null) {
888
+ controller.enqueue({
889
+ type: "text-delta",
890
+ textDelta: delta.content
891
+ });
892
+ }
893
+ if (delta.tool_calls != null) {
894
+ for (const toolCallDelta of delta.tool_calls) {
895
+ const index = toolCallDelta.index;
896
+ if (toolCalls[index] == null) {
897
+ if (toolCallDelta.type !== "function") {
898
+ throw new InvalidResponseDataError({
899
+ data: toolCallDelta,
900
+ message: `Expected 'function' type.`
901
+ });
902
+ }
903
+ if (toolCallDelta.id == null) {
904
+ throw new InvalidResponseDataError({
905
+ data: toolCallDelta,
906
+ message: `Expected 'id' to be a string.`
907
+ });
908
+ }
909
+ if (((_a = toolCallDelta.function) == null ? void 0 : _a.name) == null) {
910
+ throw new InvalidResponseDataError({
911
+ data: toolCallDelta,
912
+ message: `Expected 'function.name' to be a string.`
913
+ });
914
+ }
915
+ toolCalls[index] = {
916
+ id: toolCallDelta.id,
917
+ type: "function",
918
+ function: {
919
+ name: toolCallDelta.function.name,
920
+ arguments: (_b = toolCallDelta.function.arguments) != null ? _b : ""
921
+ }
922
+ };
923
+ continue;
924
+ }
925
+ const toolCall = toolCalls[index];
926
+ if (((_c = toolCallDelta.function) == null ? void 0 : _c.arguments) != null) {
927
+ toolCall.function.arguments += (_e = (_d = toolCallDelta.function) == null ? void 0 : _d.arguments) != null ? _e : "";
928
+ }
929
+ controller.enqueue({
930
+ type: "tool-call-delta",
931
+ toolCallType: "function",
932
+ toolCallId: toolCall.id,
933
+ toolName: toolCall.function.name,
934
+ argsTextDelta: (_f = toolCallDelta.function.arguments) != null ? _f : ""
935
+ });
936
+ if (((_g = toolCall.function) == null ? void 0 : _g.name) == null || ((_h = toolCall.function) == null ? void 0 : _h.arguments) == null || !isParseableJson(toolCall.function.arguments)) {
937
+ continue;
938
+ }
939
+ controller.enqueue({
940
+ type: "tool-call",
941
+ toolCallType: "function",
942
+ toolCallId: (_i = toolCall.id) != null ? _i : generateId(),
943
+ toolName: toolCall.function.name,
944
+ args: toolCall.function.arguments
945
+ });
946
+ }
947
+ }
948
+ },
949
+ flush(controller) {
950
+ controller.enqueue({ type: "finish", finishReason, usage });
951
+ }
952
+ })
953
+ ),
954
+ rawCall: { rawPrompt, rawSettings },
955
+ warnings: []
956
+ };
957
+ }
958
+ };
959
+ var openAIChatResponseSchema = import_zod2.z.object({
960
+ choices: import_zod2.z.array(
961
+ import_zod2.z.object({
962
+ message: import_zod2.z.object({
963
+ role: import_zod2.z.literal("assistant"),
964
+ content: import_zod2.z.string().nullable(),
965
+ tool_calls: import_zod2.z.array(
966
+ import_zod2.z.object({
967
+ id: import_zod2.z.string(),
968
+ type: import_zod2.z.literal("function"),
969
+ function: import_zod2.z.object({
970
+ name: import_zod2.z.string(),
971
+ arguments: import_zod2.z.string()
972
+ })
973
+ })
974
+ ).optional()
975
+ }),
976
+ index: import_zod2.z.number(),
977
+ finish_reason: import_zod2.z.string().optional().nullable()
978
+ })
979
+ ),
980
+ object: import_zod2.z.literal("chat.completion"),
981
+ usage: import_zod2.z.object({
982
+ prompt_tokens: import_zod2.z.number(),
983
+ completion_tokens: import_zod2.z.number()
984
+ })
985
+ });
986
+ var openaiChatChunkSchema = import_zod2.z.object({
987
+ object: import_zod2.z.literal("chat.completion.chunk"),
988
+ choices: import_zod2.z.array(
989
+ import_zod2.z.object({
990
+ delta: import_zod2.z.object({
991
+ role: import_zod2.z.enum(["assistant"]).optional(),
992
+ content: import_zod2.z.string().nullable().optional(),
993
+ tool_calls: import_zod2.z.array(
994
+ import_zod2.z.object({
995
+ index: import_zod2.z.number(),
996
+ id: import_zod2.z.string().optional(),
997
+ type: import_zod2.z.literal("function").optional(),
998
+ function: import_zod2.z.object({
999
+ name: import_zod2.z.string().optional(),
1000
+ arguments: import_zod2.z.string().optional()
1001
+ })
1002
+ })
1003
+ ).optional()
1004
+ }),
1005
+ finish_reason: import_zod2.z.string().nullable().optional(),
1006
+ index: import_zod2.z.number()
1007
+ })
1008
+ ),
1009
+ usage: import_zod2.z.object({
1010
+ prompt_tokens: import_zod2.z.number(),
1011
+ completion_tokens: import_zod2.z.number()
1012
+ }).optional().nullable()
1013
+ });
1014
+
1015
+ // openai/openai-completion-language-model.ts
1016
+ var import_zod3 = require("zod");
1017
+
1018
+ // openai/convert-to-openai-completion-prompt.ts
1019
+ function convertToOpenAICompletionPrompt({
1020
+ prompt: prompt2,
1021
+ inputFormat,
1022
+ provider,
1023
+ user = "user",
1024
+ assistant = "assistant"
1025
+ }) {
1026
+ if (inputFormat === "prompt" && prompt2.length === 1 && prompt2[0].role === "user" && prompt2[0].content.length === 1 && prompt2[0].content[0].type === "text") {
1027
+ return { prompt: prompt2[0].content[0].text };
1028
+ }
1029
+ let text = "";
1030
+ if (prompt2[0].role === "system") {
1031
+ text += `${prompt2[0].content}
1032
+
1033
+ `;
1034
+ prompt2 = prompt2.slice(1);
1035
+ }
1036
+ for (const { role, content } of prompt2) {
1037
+ switch (role) {
1038
+ case "system": {
1039
+ throw new InvalidPromptError({
1040
+ message: "Unexpected system message in prompt: ${content}",
1041
+ prompt: prompt2
1042
+ });
1043
+ }
1044
+ case "user": {
1045
+ const userMessage = content.map((part) => {
1046
+ switch (part.type) {
1047
+ case "text": {
1048
+ return part.text;
1049
+ }
1050
+ case "image": {
1051
+ throw new UnsupportedFunctionalityError({
1052
+ provider,
1053
+ functionality: "images"
1054
+ });
1055
+ }
1056
+ }
1057
+ }).join("");
1058
+ text += `${user}:
1059
+ ${userMessage}
1060
+
1061
+ `;
1062
+ break;
1063
+ }
1064
+ case "assistant": {
1065
+ const assistantMessage = content.map((part) => {
1066
+ switch (part.type) {
1067
+ case "text": {
1068
+ return part.text;
1069
+ }
1070
+ case "tool-call": {
1071
+ throw new UnsupportedFunctionalityError({
1072
+ provider,
1073
+ functionality: "tool-call messages"
1074
+ });
1075
+ }
1076
+ }
1077
+ }).join("");
1078
+ text += `${assistant}:
1079
+ ${assistantMessage}
1080
+
1081
+ `;
1082
+ break;
1083
+ }
1084
+ case "tool": {
1085
+ throw new UnsupportedFunctionalityError({
1086
+ provider,
1087
+ functionality: "tool messages"
1088
+ });
1089
+ }
1090
+ default: {
1091
+ const _exhaustiveCheck = role;
1092
+ throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
1093
+ }
1094
+ }
1095
+ }
1096
+ text += `${assistant}:
1097
+ `;
1098
+ return {
1099
+ prompt: text,
1100
+ stopSequences: [`
1101
+ ${user}:`]
1102
+ };
1103
+ }
1104
+
1105
+ // openai/openai-completion-language-model.ts
1106
+ var OpenAICompletionLanguageModel = class {
1107
+ constructor(modelId, settings, config) {
1108
+ this.specificationVersion = "v1";
1109
+ this.defaultObjectGenerationMode = void 0;
1110
+ this.modelId = modelId;
1111
+ this.settings = settings;
1112
+ this.config = config;
1113
+ }
1114
+ get provider() {
1115
+ return this.config.provider;
1116
+ }
1117
+ getArgs({
1118
+ mode,
1119
+ inputFormat,
1120
+ prompt: prompt2,
1121
+ maxTokens,
1122
+ temperature,
1123
+ topP,
1124
+ frequencyPenalty,
1125
+ presencePenalty,
1126
+ seed
1127
+ }) {
1128
+ var _a;
1129
+ const type = mode.type;
1130
+ const { prompt: completionPrompt, stopSequences } = convertToOpenAICompletionPrompt({
1131
+ prompt: prompt2,
1132
+ inputFormat,
1133
+ provider: this.provider
1134
+ });
1135
+ const baseArgs = {
1136
+ // model id:
1137
+ model: this.modelId,
1138
+ // model specific settings:
1139
+ echo: this.settings.echo,
1140
+ logit_bias: this.settings.logitBias,
1141
+ suffix: this.settings.suffix,
1142
+ user: this.settings.user,
1143
+ // standardized settings:
1144
+ max_tokens: maxTokens,
1145
+ temperature: scale({
1146
+ value: temperature,
1147
+ outputMin: 0,
1148
+ outputMax: 2
1149
+ }),
1150
+ top_p: topP,
1151
+ frequency_penalty: scale({
1152
+ value: frequencyPenalty,
1153
+ inputMin: -1,
1154
+ inputMax: 1,
1155
+ outputMin: -2,
1156
+ outputMax: 2
1157
+ }),
1158
+ presence_penalty: scale({
1159
+ value: presencePenalty,
1160
+ inputMin: -1,
1161
+ inputMax: 1,
1162
+ outputMin: -2,
1163
+ outputMax: 2
1164
+ }),
1165
+ seed,
1166
+ // prompt:
1167
+ prompt: completionPrompt,
1168
+ // stop sequences:
1169
+ stop: stopSequences
1170
+ };
1171
+ switch (type) {
1172
+ case "regular": {
1173
+ if ((_a = mode.tools) == null ? void 0 : _a.length) {
1174
+ throw new UnsupportedFunctionalityError({
1175
+ functionality: "tools",
1176
+ provider: this.provider
1177
+ });
1178
+ }
1179
+ return baseArgs;
1180
+ }
1181
+ case "object-json": {
1182
+ throw new UnsupportedFunctionalityError({
1183
+ functionality: "object-json mode",
1184
+ provider: this.provider
1185
+ });
1186
+ }
1187
+ case "object-tool": {
1188
+ throw new UnsupportedFunctionalityError({
1189
+ functionality: "object-tool mode",
1190
+ provider: this.provider
1191
+ });
1192
+ }
1193
+ case "object-grammar": {
1194
+ throw new UnsupportedFunctionalityError({
1195
+ functionality: "object-grammar mode",
1196
+ provider: this.provider
1197
+ });
1198
+ }
1199
+ default: {
1200
+ const _exhaustiveCheck = type;
1201
+ throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
1202
+ }
1203
+ }
1204
+ }
1205
+ async doGenerate(options) {
1206
+ const args = this.getArgs(options);
1207
+ const response = await postJsonToApi({
1208
+ url: `${this.config.baseUrl}/completions`,
1209
+ headers: this.config.headers(),
1210
+ body: args,
1211
+ failedResponseHandler: openaiFailedResponseHandler,
1212
+ successfulResponseHandler: createJsonResponseHandler(
1213
+ openAICompletionResponseSchema
1214
+ ),
1215
+ abortSignal: options.abortSignal
1216
+ });
1217
+ const { prompt: rawPrompt, ...rawSettings } = args;
1218
+ const choice = response.choices[0];
1219
+ return {
1220
+ text: choice.text,
1221
+ usage: {
1222
+ promptTokens: response.usage.prompt_tokens,
1223
+ completionTokens: response.usage.completion_tokens
1224
+ },
1225
+ finishReason: mapOpenAIFinishReason(choice.finish_reason),
1226
+ rawCall: { rawPrompt, rawSettings },
1227
+ warnings: []
1228
+ };
1229
+ }
1230
+ async doStream(options) {
1231
+ const args = this.getArgs(options);
1232
+ const response = await postJsonToApi({
1233
+ url: `${this.config.baseUrl}/completions`,
1234
+ headers: this.config.headers(),
1235
+ body: {
1236
+ ...this.getArgs(options),
1237
+ stream: true
1238
+ },
1239
+ failedResponseHandler: openaiFailedResponseHandler,
1240
+ successfulResponseHandler: createEventSourceResponseHandler(
1241
+ openaiCompletionChunkSchema
1242
+ ),
1243
+ abortSignal: options.abortSignal
1244
+ });
1245
+ const { prompt: rawPrompt, ...rawSettings } = args;
1246
+ let finishReason = "other";
1247
+ let usage = {
1248
+ promptTokens: Number.NaN,
1249
+ completionTokens: Number.NaN
1250
+ };
1251
+ return {
1252
+ stream: response.pipeThrough(
1253
+ new TransformStream({
1254
+ transform(chunk, controller) {
1255
+ if (!chunk.success) {
1256
+ controller.enqueue({ type: "error", error: chunk.error });
1257
+ return;
1258
+ }
1259
+ const value = chunk.value;
1260
+ if (value.usage != null) {
1261
+ usage = {
1262
+ promptTokens: value.usage.prompt_tokens,
1263
+ completionTokens: value.usage.completion_tokens
1264
+ };
1265
+ }
1266
+ const choice = value.choices[0];
1267
+ if ((choice == null ? void 0 : choice.finish_reason) != null) {
1268
+ finishReason = mapOpenAIFinishReason(choice.finish_reason);
1269
+ }
1270
+ if ((choice == null ? void 0 : choice.text) != null) {
1271
+ controller.enqueue({
1272
+ type: "text-delta",
1273
+ textDelta: choice.text
1274
+ });
1275
+ }
1276
+ },
1277
+ flush(controller) {
1278
+ controller.enqueue({ type: "finish", finishReason, usage });
1279
+ }
1280
+ })
1281
+ ),
1282
+ rawCall: { rawPrompt, rawSettings },
1283
+ warnings: []
1284
+ };
1285
+ }
1286
+ };
1287
+ var openAICompletionResponseSchema = import_zod3.z.object({
1288
+ choices: import_zod3.z.array(
1289
+ import_zod3.z.object({
1290
+ text: import_zod3.z.string(),
1291
+ finish_reason: import_zod3.z.string()
1292
+ })
1293
+ ),
1294
+ usage: import_zod3.z.object({
1295
+ prompt_tokens: import_zod3.z.number(),
1296
+ completion_tokens: import_zod3.z.number()
1297
+ })
1298
+ });
1299
+ var openaiCompletionChunkSchema = import_zod3.z.object({
1300
+ object: import_zod3.z.literal("text_completion"),
1301
+ choices: import_zod3.z.array(
1302
+ import_zod3.z.object({
1303
+ text: import_zod3.z.string(),
1304
+ finish_reason: import_zod3.z.enum(["stop", "length", "content_filter"]).optional().nullable(),
1305
+ index: import_zod3.z.number()
1306
+ })
1307
+ ),
1308
+ usage: import_zod3.z.object({
1309
+ prompt_tokens: import_zod3.z.number(),
1310
+ completion_tokens: import_zod3.z.number()
1311
+ }).optional().nullable()
1312
+ });
1313
+
1314
+ // openai/openai-facade.ts
1315
+ var OpenAI = class {
1316
+ constructor(options = {}) {
1317
+ this.baseUrl = options.baseUrl;
1318
+ this.apiKey = options.apiKey;
1319
+ this.organization = options.organization;
1320
+ }
1321
+ get baseConfig() {
1322
+ var _a;
1323
+ return {
1324
+ organization: this.organization,
1325
+ baseUrl: (_a = this.baseUrl) != null ? _a : "https://api.openai.com/v1",
1326
+ headers: () => ({
1327
+ Authorization: `Bearer ${loadApiKey({
1328
+ apiKey: this.apiKey,
1329
+ environmentVariableName: "OPENAI_API_KEY",
1330
+ description: "OpenAI"
1331
+ })}`,
1332
+ "OpenAI-Organization": this.organization
1333
+ })
1334
+ };
1335
+ }
1336
+ chat(modelId, settings = {}) {
1337
+ return new OpenAIChatLanguageModel(modelId, settings, {
1338
+ provider: "openai.chat",
1339
+ ...this.baseConfig
1340
+ });
1341
+ }
1342
+ completion(modelId, settings = {}) {
1343
+ return new OpenAICompletionLanguageModel(modelId, settings, {
1344
+ provider: "openai.completion",
1345
+ ...this.baseConfig
1346
+ });
1347
+ }
1348
+ };
1349
+ var openai = new OpenAI();
1350
+ // Annotate the CommonJS export names for ESM import in node:
1351
+ 0 && (module.exports = {
1352
+ OpenAI,
1353
+ openai
1354
+ });
1355
+ //# sourceMappingURL=index.js.map