ai 3.0.12 → 3.0.14

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (57) hide show
  1. package/README.md +1 -1
  2. package/ai-model-specification/dist/index.d.mts +704 -0
  3. package/ai-model-specification/dist/index.d.ts +704 -0
  4. package/ai-model-specification/dist/index.js +806 -0
  5. package/ai-model-specification/dist/index.js.map +1 -0
  6. package/ai-model-specification/dist/index.mjs +742 -0
  7. package/ai-model-specification/dist/index.mjs.map +1 -0
  8. package/dist/index.d.mts +686 -4
  9. package/dist/index.d.ts +686 -4
  10. package/dist/index.js +1723 -15
  11. package/dist/index.js.map +1 -1
  12. package/dist/index.mjs +1700 -15
  13. package/dist/index.mjs.map +1 -1
  14. package/mistral/dist/index.d.mts +367 -0
  15. package/mistral/dist/index.d.ts +367 -0
  16. package/mistral/dist/index.js +936 -0
  17. package/mistral/dist/index.js.map +1 -0
  18. package/mistral/dist/index.mjs +900 -0
  19. package/mistral/dist/index.mjs.map +1 -0
  20. package/openai/dist/index.d.mts +430 -0
  21. package/openai/dist/index.d.ts +430 -0
  22. package/openai/dist/index.js +1355 -0
  23. package/openai/dist/index.js.map +1 -0
  24. package/openai/dist/index.mjs +1319 -0
  25. package/openai/dist/index.mjs.map +1 -0
  26. package/package.json +33 -7
  27. package/prompts/dist/index.d.mts +13 -1
  28. package/prompts/dist/index.d.ts +13 -1
  29. package/prompts/dist/index.js +13 -0
  30. package/prompts/dist/index.js.map +1 -1
  31. package/prompts/dist/index.mjs +12 -0
  32. package/prompts/dist/index.mjs.map +1 -1
  33. package/react/dist/index.d.mts +8 -4
  34. package/react/dist/index.d.ts +8 -4
  35. package/react/dist/index.js +36 -34
  36. package/react/dist/index.js.map +1 -1
  37. package/react/dist/index.mjs +36 -34
  38. package/react/dist/index.mjs.map +1 -1
  39. package/rsc/dist/index.d.ts +45 -8
  40. package/rsc/dist/rsc-server.d.mts +45 -8
  41. package/rsc/dist/rsc-server.mjs +67 -13
  42. package/rsc/dist/rsc-server.mjs.map +1 -1
  43. package/rsc/dist/rsc-shared.d.mts +5 -8
  44. package/rsc/dist/rsc-shared.mjs +23 -2
  45. package/rsc/dist/rsc-shared.mjs.map +1 -1
  46. package/solid/dist/index.js +29 -27
  47. package/solid/dist/index.js.map +1 -1
  48. package/solid/dist/index.mjs +29 -27
  49. package/solid/dist/index.mjs.map +1 -1
  50. package/svelte/dist/index.js +31 -29
  51. package/svelte/dist/index.js.map +1 -1
  52. package/svelte/dist/index.mjs +31 -29
  53. package/svelte/dist/index.mjs.map +1 -1
  54. package/vue/dist/index.js +29 -27
  55. package/vue/dist/index.js.map +1 -1
  56. package/vue/dist/index.mjs +29 -27
  57. package/vue/dist/index.mjs.map +1 -1
@@ -0,0 +1,1319 @@
1
+ // ai-model-specification/errors/api-call-error.ts
2
+ var APICallError = class extends Error {
3
+ constructor({
4
+ message,
5
+ url,
6
+ requestBodyValues,
7
+ statusCode,
8
+ responseBody,
9
+ cause,
10
+ isRetryable = statusCode != null && (statusCode === 408 || // request timeout
11
+ statusCode === 409 || // conflict
12
+ statusCode === 429 || // too many requests
13
+ statusCode >= 500),
14
+ // server error
15
+ data
16
+ }) {
17
+ super(message);
18
+ this.name = "AI_APICallError";
19
+ this.url = url;
20
+ this.requestBodyValues = requestBodyValues;
21
+ this.statusCode = statusCode;
22
+ this.responseBody = responseBody;
23
+ this.cause = cause;
24
+ this.isRetryable = isRetryable;
25
+ this.data = data;
26
+ }
27
+ static isAPICallError(error) {
28
+ return error instanceof Error && error.name === "AI_APICallError" && typeof error.url === "string" && typeof error.requestBodyValues === "object" && (error.statusCode == null || typeof error.statusCode === "number") && (error.responseBody == null || typeof error.responseBody === "string") && (error.cause == null || typeof error.cause === "object") && typeof error.isRetryable === "boolean" && (error.data == null || typeof error.data === "object");
29
+ }
30
+ toJSON() {
31
+ return {
32
+ name: this.name,
33
+ message: this.message,
34
+ url: this.url,
35
+ requestBodyValues: this.requestBodyValues,
36
+ statusCode: this.statusCode,
37
+ responseBody: this.responseBody,
38
+ cause: this.cause,
39
+ isRetryable: this.isRetryable,
40
+ data: this.data
41
+ };
42
+ }
43
+ };
44
+
45
+ // ai-model-specification/errors/invalid-prompt-error.ts
46
+ var InvalidPromptError = class extends Error {
47
+ constructor({ prompt: prompt2, message }) {
48
+ super(`Invalid prompt: ${message}`);
49
+ this.name = "AI_InvalidPromptError";
50
+ this.prompt = prompt2;
51
+ }
52
+ static isInvalidPromptError(error) {
53
+ return error instanceof Error && error.name === "AI_InvalidPromptError" && prompt != null;
54
+ }
55
+ toJSON() {
56
+ return {
57
+ name: this.name,
58
+ message: this.message,
59
+ stack: this.stack,
60
+ prompt: this.prompt
61
+ };
62
+ }
63
+ };
64
+
65
+ // ai-model-specification/errors/invalid-response-data-error.ts
66
+ var InvalidResponseDataError = class extends Error {
67
+ constructor({
68
+ data,
69
+ message = `Invalid response data: ${JSON.stringify(data)}.`
70
+ }) {
71
+ super(message);
72
+ this.name = "AI_InvalidResponseDataError";
73
+ this.data = data;
74
+ }
75
+ static isInvalidResponseDataError(error) {
76
+ return error instanceof Error && error.name === "AI_InvalidResponseDataError" && error.data != null;
77
+ }
78
+ toJSON() {
79
+ return {
80
+ name: this.name,
81
+ message: this.message,
82
+ stack: this.stack,
83
+ data: this.data
84
+ };
85
+ }
86
+ };
87
+
88
+ // ai-model-specification/util/generate-id.ts
89
+ import { customAlphabet } from "nanoid/non-secure";
90
+ var generateId = customAlphabet(
91
+ "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz",
92
+ 7
93
+ );
94
+
95
+ // ai-model-specification/util/get-error-message.ts
96
+ function getErrorMessage(error) {
97
+ if (error == null) {
98
+ return "unknown error";
99
+ }
100
+ if (typeof error === "string") {
101
+ return error;
102
+ }
103
+ if (error instanceof Error) {
104
+ return error.message;
105
+ }
106
+ return JSON.stringify(error);
107
+ }
108
+
109
+ // ai-model-specification/errors/load-api-key-error.ts
110
+ var LoadAPIKeyError = class extends Error {
111
+ constructor({ message }) {
112
+ super(message);
113
+ this.name = "AI_LoadAPIKeyError";
114
+ }
115
+ static isLoadAPIKeyError(error) {
116
+ return error instanceof Error && error.name === "AI_LoadAPIKeyError";
117
+ }
118
+ toJSON() {
119
+ return {
120
+ name: this.name,
121
+ message: this.message
122
+ };
123
+ }
124
+ };
125
+
126
+ // ai-model-specification/util/load-api-key.ts
127
+ function loadApiKey({
128
+ apiKey,
129
+ environmentVariableName,
130
+ apiKeyParameterName = "apiKey",
131
+ description
132
+ }) {
133
+ if (typeof apiKey === "string") {
134
+ return apiKey;
135
+ }
136
+ if (apiKey != null) {
137
+ throw new LoadAPIKeyError({
138
+ message: `${description} API key must be a string.`
139
+ });
140
+ }
141
+ if (typeof process === "undefined") {
142
+ throw new LoadAPIKeyError({
143
+ message: `${description} API key is missing. Pass it using the '${apiKeyParameterName}' parameter. Environment variables is not supported in this environment.`
144
+ });
145
+ }
146
+ apiKey = process.env[environmentVariableName];
147
+ if (apiKey == null) {
148
+ throw new LoadAPIKeyError({
149
+ message: `${description} API key is missing. Pass it using the '${apiKeyParameterName}' parameter or the ${environmentVariableName} environment variable.`
150
+ });
151
+ }
152
+ if (typeof apiKey !== "string") {
153
+ throw new LoadAPIKeyError({
154
+ message: `${description} API key must be a string. The value of the ${environmentVariableName} environment variable is not a string.`
155
+ });
156
+ }
157
+ return apiKey;
158
+ }
159
+
160
+ // ai-model-specification/util/parse-json.ts
161
+ import SecureJSON from "secure-json-parse";
162
+
163
+ // ai-model-specification/errors/json-parse-error.ts
164
+ var JSONParseError = class extends Error {
165
+ constructor({ text, cause }) {
166
+ super(
167
+ `JSON parsing failed: Text: ${text}.
168
+ Error message: ${getErrorMessage(cause)}`
169
+ );
170
+ this.name = "AI_JSONParseError";
171
+ this.cause = cause;
172
+ this.text = text;
173
+ }
174
+ static isJSONParseError(error) {
175
+ return error instanceof Error && error.name === "AI_JSONParseError" && typeof error.text === "string" && typeof error.cause === "string";
176
+ }
177
+ toJSON() {
178
+ return {
179
+ name: this.name,
180
+ message: this.message,
181
+ cause: this.cause,
182
+ stack: this.stack,
183
+ valueText: this.text
184
+ };
185
+ }
186
+ };
187
+
188
+ // ai-model-specification/errors/type-validation-error.ts
189
+ var TypeValidationError = class extends Error {
190
+ constructor({ value, cause }) {
191
+ super(
192
+ `Type validation failed: Value: ${JSON.stringify(value)}.
193
+ Error message: ${getErrorMessage(cause)}`
194
+ );
195
+ this.name = "AI_TypeValidationError";
196
+ this.cause = cause;
197
+ this.value = value;
198
+ }
199
+ static isTypeValidationError(error) {
200
+ return error instanceof Error && error.name === "AI_TypeValidationError" && typeof error.value === "string" && typeof error.cause === "string";
201
+ }
202
+ toJSON() {
203
+ return {
204
+ name: this.name,
205
+ message: this.message,
206
+ cause: this.cause,
207
+ stack: this.stack,
208
+ value: this.value
209
+ };
210
+ }
211
+ };
212
+
213
+ // ai-model-specification/util/validate-types.ts
214
+ function validateTypes({
215
+ value,
216
+ schema
217
+ }) {
218
+ try {
219
+ return schema.parse(value);
220
+ } catch (error) {
221
+ throw new TypeValidationError({ value, cause: error });
222
+ }
223
+ }
224
+ function safeValidateTypes({
225
+ value,
226
+ schema
227
+ }) {
228
+ try {
229
+ const validationResult = schema.safeParse(value);
230
+ if (validationResult.success) {
231
+ return {
232
+ success: true,
233
+ value: validationResult.data
234
+ };
235
+ }
236
+ return {
237
+ success: false,
238
+ error: new TypeValidationError({
239
+ value,
240
+ cause: validationResult.error
241
+ })
242
+ };
243
+ } catch (error) {
244
+ return {
245
+ success: false,
246
+ error: TypeValidationError.isTypeValidationError(error) ? error : new TypeValidationError({ value, cause: error })
247
+ };
248
+ }
249
+ }
250
+
251
+ // ai-model-specification/util/parse-json.ts
252
+ function parseJSON({
253
+ text,
254
+ schema
255
+ }) {
256
+ try {
257
+ const value = SecureJSON.parse(text);
258
+ if (schema == null) {
259
+ return value;
260
+ }
261
+ return validateTypes({ value, schema });
262
+ } catch (error) {
263
+ if (JSONParseError.isJSONParseError(error) || TypeValidationError.isTypeValidationError(error)) {
264
+ throw error;
265
+ }
266
+ throw new JSONParseError({ text, cause: error });
267
+ }
268
+ }
269
+ function safeParseJSON({
270
+ text,
271
+ schema
272
+ }) {
273
+ try {
274
+ const value = SecureJSON.parse(text);
275
+ if (schema == null) {
276
+ return {
277
+ success: true,
278
+ value
279
+ };
280
+ }
281
+ return safeValidateTypes({ value, schema });
282
+ } catch (error) {
283
+ return {
284
+ success: false,
285
+ error: JSONParseError.isJSONParseError(error) ? error : new JSONParseError({ text, cause: error })
286
+ };
287
+ }
288
+ }
289
+ function isParseableJson(input) {
290
+ try {
291
+ SecureJSON.parse(input);
292
+ return true;
293
+ } catch (e) {
294
+ return false;
295
+ }
296
+ }
297
+
298
+ // ai-model-specification/util/post-to-api.ts
299
+ var postJsonToApi = async ({
300
+ url,
301
+ headers,
302
+ body,
303
+ failedResponseHandler,
304
+ successfulResponseHandler,
305
+ abortSignal
306
+ }) => postToApi({
307
+ url,
308
+ headers: {
309
+ ...headers,
310
+ "Content-Type": "application/json"
311
+ },
312
+ body: {
313
+ content: JSON.stringify(body),
314
+ values: body
315
+ },
316
+ failedResponseHandler,
317
+ successfulResponseHandler,
318
+ abortSignal
319
+ });
320
+ var postToApi = async ({
321
+ url,
322
+ headers = {},
323
+ body,
324
+ successfulResponseHandler,
325
+ failedResponseHandler,
326
+ abortSignal
327
+ }) => {
328
+ try {
329
+ const definedHeaders = Object.fromEntries(
330
+ Object.entries(headers).filter(([_key, value]) => value != null)
331
+ );
332
+ const response = await fetch(url, {
333
+ method: "POST",
334
+ headers: definedHeaders,
335
+ body: body.content,
336
+ signal: abortSignal
337
+ });
338
+ if (!response.ok) {
339
+ try {
340
+ throw await failedResponseHandler({
341
+ response,
342
+ url,
343
+ requestBodyValues: body.values
344
+ });
345
+ } catch (error) {
346
+ if (error instanceof Error) {
347
+ if (error.name === "AbortError" || APICallError.isAPICallError(error)) {
348
+ throw error;
349
+ }
350
+ }
351
+ throw new APICallError({
352
+ message: "Failed to process error response",
353
+ cause: error,
354
+ statusCode: response.status,
355
+ url,
356
+ requestBodyValues: body.values
357
+ });
358
+ }
359
+ }
360
+ try {
361
+ return await successfulResponseHandler({
362
+ response,
363
+ url,
364
+ requestBodyValues: body.values
365
+ });
366
+ } catch (error) {
367
+ if (error instanceof Error) {
368
+ if (error.name === "AbortError" || APICallError.isAPICallError(error)) {
369
+ throw error;
370
+ }
371
+ }
372
+ throw new APICallError({
373
+ message: "Failed to process successful response",
374
+ cause: error,
375
+ statusCode: response.status,
376
+ url,
377
+ requestBodyValues: body.values
378
+ });
379
+ }
380
+ } catch (error) {
381
+ if (error instanceof Error) {
382
+ if (error.name === "AbortError") {
383
+ throw error;
384
+ }
385
+ }
386
+ if (error instanceof TypeError && error.message === "fetch failed") {
387
+ const cause = error.cause;
388
+ if (cause != null) {
389
+ throw new APICallError({
390
+ message: `Cannot connect to API: ${cause.message}`,
391
+ cause,
392
+ url,
393
+ requestBodyValues: body.values,
394
+ isRetryable: true
395
+ // retry when network error
396
+ });
397
+ }
398
+ }
399
+ throw error;
400
+ }
401
+ };
402
+
403
+ // ai-model-specification/util/response-handler.ts
404
+ import {
405
+ EventSourceParserStream
406
+ } from "eventsource-parser/stream";
407
+
408
+ // ai-model-specification/errors/no-response-body-error.ts
409
+ var NoResponseBodyError = class extends Error {
410
+ constructor({ message = "No response body" } = {}) {
411
+ super(message);
412
+ this.name = "AI_NoResponseBodyError";
413
+ }
414
+ static isNoResponseBodyError(error) {
415
+ return error instanceof Error && error.name === "AI_NoResponseBodyError";
416
+ }
417
+ toJSON() {
418
+ return {
419
+ name: this.name,
420
+ message: this.message,
421
+ stack: this.stack
422
+ };
423
+ }
424
+ };
425
+
426
+ // ai-model-specification/util/response-handler.ts
427
+ var createJsonErrorResponseHandler = ({
428
+ errorSchema,
429
+ errorToMessage,
430
+ isRetryable
431
+ }) => async ({ response, url, requestBodyValues }) => {
432
+ const responseBody = await response.text();
433
+ if (responseBody.trim() === "") {
434
+ return new APICallError({
435
+ message: response.statusText,
436
+ url,
437
+ requestBodyValues,
438
+ statusCode: response.status,
439
+ responseBody,
440
+ isRetryable: isRetryable == null ? void 0 : isRetryable(response)
441
+ });
442
+ }
443
+ try {
444
+ const parsedError = parseJSON({
445
+ text: responseBody,
446
+ schema: errorSchema
447
+ });
448
+ return new APICallError({
449
+ message: errorToMessage(parsedError),
450
+ url,
451
+ requestBodyValues,
452
+ statusCode: response.status,
453
+ responseBody,
454
+ data: parsedError,
455
+ isRetryable: isRetryable == null ? void 0 : isRetryable(response, parsedError)
456
+ });
457
+ } catch (parseError) {
458
+ return new APICallError({
459
+ message: response.statusText,
460
+ url,
461
+ requestBodyValues,
462
+ statusCode: response.status,
463
+ responseBody,
464
+ isRetryable: isRetryable == null ? void 0 : isRetryable(response)
465
+ });
466
+ }
467
+ };
468
+ var createEventSourceResponseHandler = (chunkSchema) => async ({ response }) => {
469
+ if (response.body == null) {
470
+ throw new NoResponseBodyError();
471
+ }
472
+ return response.body.pipeThrough(new TextDecoderStream()).pipeThrough(new EventSourceParserStream()).pipeThrough(
473
+ new TransformStream({
474
+ transform({ data }, controller) {
475
+ if (data === "[DONE]") {
476
+ return;
477
+ }
478
+ controller.enqueue(
479
+ safeParseJSON({
480
+ text: data,
481
+ schema: chunkSchema
482
+ })
483
+ );
484
+ }
485
+ })
486
+ );
487
+ };
488
+ var createJsonResponseHandler = (responseSchema) => async ({ response, url, requestBodyValues }) => {
489
+ const responseBody = await response.text();
490
+ const parsedResult = safeParseJSON({
491
+ text: responseBody,
492
+ schema: responseSchema
493
+ });
494
+ if (!parsedResult.success) {
495
+ throw new APICallError({
496
+ message: "Invalid JSON response",
497
+ cause: parsedResult.error,
498
+ statusCode: response.status,
499
+ responseBody,
500
+ url,
501
+ requestBodyValues
502
+ });
503
+ }
504
+ return parsedResult.value;
505
+ };
506
+
507
+ // ai-model-specification/util/scale.ts
508
+ function scale({
509
+ inputMin = 0,
510
+ inputMax = 1,
511
+ outputMin,
512
+ outputMax,
513
+ value
514
+ }) {
515
+ if (value === void 0) {
516
+ return void 0;
517
+ }
518
+ const inputRange = inputMax - inputMin;
519
+ const outputRange = outputMax - outputMin;
520
+ return (value - inputMin) * outputRange / inputRange + outputMin;
521
+ }
522
+
523
+ // ai-model-specification/util/uint8-utils.ts
524
+ function convertUint8ArrayToBase64(array) {
525
+ let latin1string = "";
526
+ for (let i = 0; i < array.length; i++) {
527
+ latin1string += String.fromCodePoint(array[i]);
528
+ }
529
+ return globalThis.btoa(latin1string);
530
+ }
531
+
532
+ // ai-model-specification/errors/unsupported-functionality-error.ts
533
+ var UnsupportedFunctionalityError = class extends Error {
534
+ constructor({
535
+ provider,
536
+ functionality
537
+ }) {
538
+ super(
539
+ `Functionality not supported by the provider. Provider: ${provider}.
540
+ Functionality: ${functionality}`
541
+ );
542
+ this.name = "AI_UnsupportedFunctionalityError";
543
+ this.provider = provider;
544
+ this.functionality = functionality;
545
+ }
546
+ static isUnsupportedFunctionalityError(error) {
547
+ return error instanceof Error && error.name === "AI_UnsupportedFunctionalityError" && typeof error.provider === "string" && typeof error.functionality === "string";
548
+ }
549
+ toJSON() {
550
+ return {
551
+ name: this.name,
552
+ message: this.message,
553
+ stack: this.stack,
554
+ provider: this.provider,
555
+ functionality: this.functionality
556
+ };
557
+ }
558
+ };
559
+
560
+ // openai/openai-chat-language-model.ts
561
+ import { z as z2 } from "zod";
562
+
563
+ // openai/convert-to-openai-chat-messages.ts
564
+ function convertToOpenAIChatMessages(prompt2) {
565
+ const messages = [];
566
+ for (const { role, content } of prompt2) {
567
+ switch (role) {
568
+ case "system": {
569
+ messages.push({ role: "system", content });
570
+ break;
571
+ }
572
+ case "user": {
573
+ messages.push({
574
+ role: "user",
575
+ content: content.map((part) => {
576
+ var _a;
577
+ switch (part.type) {
578
+ case "text": {
579
+ return { type: "text", text: part.text };
580
+ }
581
+ case "image": {
582
+ return {
583
+ type: "image_url",
584
+ image_url: {
585
+ url: part.image instanceof URL ? part.image.toString() : `data:${(_a = part.mimeType) != null ? _a : "image/jpeg"};base64,${convertUint8ArrayToBase64(part.image)}`
586
+ }
587
+ };
588
+ }
589
+ }
590
+ })
591
+ });
592
+ break;
593
+ }
594
+ case "assistant": {
595
+ let text = "";
596
+ const toolCalls = [];
597
+ for (const part of content) {
598
+ switch (part.type) {
599
+ case "text": {
600
+ text += part.text;
601
+ break;
602
+ }
603
+ case "tool-call": {
604
+ toolCalls.push({
605
+ id: part.toolCallId,
606
+ type: "function",
607
+ function: {
608
+ name: part.toolName,
609
+ arguments: JSON.stringify(part.args)
610
+ }
611
+ });
612
+ break;
613
+ }
614
+ default: {
615
+ const _exhaustiveCheck = part;
616
+ throw new Error(`Unsupported part: ${_exhaustiveCheck}`);
617
+ }
618
+ }
619
+ }
620
+ messages.push({
621
+ role: "assistant",
622
+ content: text,
623
+ tool_calls: toolCalls.length > 0 ? toolCalls : void 0
624
+ });
625
+ break;
626
+ }
627
+ case "tool": {
628
+ for (const toolResponse of content) {
629
+ messages.push({
630
+ role: "tool",
631
+ tool_call_id: toolResponse.toolCallId,
632
+ content: JSON.stringify(toolResponse.result)
633
+ });
634
+ }
635
+ break;
636
+ }
637
+ default: {
638
+ const _exhaustiveCheck = role;
639
+ throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
640
+ }
641
+ }
642
+ }
643
+ return messages;
644
+ }
645
+
646
+ // openai/map-openai-finish-reason.ts
647
+ function mapOpenAIFinishReason(finishReason) {
648
+ switch (finishReason) {
649
+ case "stop":
650
+ return "stop";
651
+ case "length":
652
+ return "length";
653
+ case "content_filter":
654
+ return "content-filter";
655
+ case "function_call":
656
+ case "tool_calls":
657
+ return "tool-calls";
658
+ default:
659
+ return "other";
660
+ }
661
+ }
662
+
663
+ // openai/openai-error.ts
664
+ import { z } from "zod";
665
+ var openAIErrorDataSchema = z.object({
666
+ error: z.object({
667
+ message: z.string(),
668
+ type: z.string(),
669
+ param: z.any().nullable(),
670
+ code: z.string().nullable()
671
+ })
672
+ });
673
+ var openaiFailedResponseHandler = createJsonErrorResponseHandler({
674
+ errorSchema: openAIErrorDataSchema,
675
+ errorToMessage: (data) => data.error.message
676
+ });
677
+
678
+ // openai/openai-chat-language-model.ts
679
+ var OpenAIChatLanguageModel = class {
680
+ constructor(modelId, settings, config) {
681
+ this.specificationVersion = "v1";
682
+ this.defaultObjectGenerationMode = "tool";
683
+ this.modelId = modelId;
684
+ this.settings = settings;
685
+ this.config = config;
686
+ }
687
+ get provider() {
688
+ return this.config.provider;
689
+ }
690
+ getArgs({
691
+ mode,
692
+ prompt: prompt2,
693
+ maxTokens,
694
+ temperature,
695
+ topP,
696
+ frequencyPenalty,
697
+ presencePenalty,
698
+ seed
699
+ }) {
700
+ var _a;
701
+ const type = mode.type;
702
+ const baseArgs = {
703
+ // model id:
704
+ model: this.modelId,
705
+ // model specific settings:
706
+ logit_bias: this.settings.logitBias,
707
+ user: this.settings.user,
708
+ // standardized settings:
709
+ max_tokens: maxTokens,
710
+ temperature: scale({
711
+ value: temperature,
712
+ outputMin: 0,
713
+ outputMax: 2
714
+ }),
715
+ top_p: topP,
716
+ frequency_penalty: scale({
717
+ value: frequencyPenalty,
718
+ inputMin: -1,
719
+ inputMax: 1,
720
+ outputMin: -2,
721
+ outputMax: 2
722
+ }),
723
+ presence_penalty: scale({
724
+ value: presencePenalty,
725
+ inputMin: -1,
726
+ inputMax: 1,
727
+ outputMin: -2,
728
+ outputMax: 2
729
+ }),
730
+ seed,
731
+ // messages:
732
+ messages: convertToOpenAIChatMessages(prompt2)
733
+ };
734
+ switch (type) {
735
+ case "regular": {
736
+ const tools = ((_a = mode.tools) == null ? void 0 : _a.length) ? mode.tools : void 0;
737
+ return {
738
+ ...baseArgs,
739
+ tools: tools == null ? void 0 : tools.map((tool) => ({
740
+ type: "function",
741
+ function: {
742
+ name: tool.name,
743
+ description: tool.description,
744
+ parameters: tool.parameters
745
+ }
746
+ }))
747
+ };
748
+ }
749
+ case "object-json": {
750
+ return {
751
+ ...baseArgs,
752
+ response_format: { type: "json_object" }
753
+ };
754
+ }
755
+ case "object-tool": {
756
+ return {
757
+ ...baseArgs,
758
+ tool_choice: { type: "function", function: { name: mode.tool.name } },
759
+ tools: [{ type: "function", function: mode.tool }]
760
+ };
761
+ }
762
+ case "object-grammar": {
763
+ throw new UnsupportedFunctionalityError({
764
+ functionality: "object-grammar mode",
765
+ provider: this.provider
766
+ });
767
+ }
768
+ default: {
769
+ const _exhaustiveCheck = type;
770
+ throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
771
+ }
772
+ }
773
+ }
774
+ async doGenerate(options) {
775
+ var _a, _b;
776
+ const args = this.getArgs(options);
777
+ const response = await postJsonToApi({
778
+ url: `${this.config.baseUrl}/chat/completions`,
779
+ headers: this.config.headers(),
780
+ body: args,
781
+ failedResponseHandler: openaiFailedResponseHandler,
782
+ successfulResponseHandler: createJsonResponseHandler(
783
+ openAIChatResponseSchema
784
+ ),
785
+ abortSignal: options.abortSignal
786
+ });
787
+ const { messages: rawPrompt, ...rawSettings } = args;
788
+ const choice = response.choices[0];
789
+ return {
790
+ text: (_a = choice.message.content) != null ? _a : void 0,
791
+ toolCalls: (_b = choice.message.tool_calls) == null ? void 0 : _b.map((toolCall) => ({
792
+ toolCallType: "function",
793
+ toolCallId: toolCall.id,
794
+ toolName: toolCall.function.name,
795
+ args: toolCall.function.arguments
796
+ })),
797
+ finishReason: mapOpenAIFinishReason(choice.finish_reason),
798
+ usage: {
799
+ promptTokens: response.usage.prompt_tokens,
800
+ completionTokens: response.usage.completion_tokens
801
+ },
802
+ rawCall: { rawPrompt, rawSettings },
803
+ warnings: []
804
+ };
805
+ }
806
+ async doStream(options) {
807
+ const args = this.getArgs(options);
808
+ const response = await postJsonToApi({
809
+ url: `${this.config.baseUrl}/chat/completions`,
810
+ headers: this.config.headers(),
811
+ body: {
812
+ ...args,
813
+ stream: true
814
+ },
815
+ failedResponseHandler: openaiFailedResponseHandler,
816
+ successfulResponseHandler: createEventSourceResponseHandler(
817
+ openaiChatChunkSchema
818
+ ),
819
+ abortSignal: options.abortSignal
820
+ });
821
+ const { messages: rawPrompt, ...rawSettings } = args;
822
+ const toolCalls = [];
823
+ let finishReason = "other";
824
+ let usage = {
825
+ promptTokens: Number.NaN,
826
+ completionTokens: Number.NaN
827
+ };
828
+ return {
829
+ stream: response.pipeThrough(
830
+ new TransformStream({
831
+ transform(chunk, controller) {
832
+ var _a, _b, _c, _d, _e, _f, _g, _h, _i;
833
+ if (!chunk.success) {
834
+ controller.enqueue({ type: "error", error: chunk.error });
835
+ return;
836
+ }
837
+ const value = chunk.value;
838
+ if (value.usage != null) {
839
+ usage = {
840
+ promptTokens: value.usage.prompt_tokens,
841
+ completionTokens: value.usage.completion_tokens
842
+ };
843
+ }
844
+ const choice = value.choices[0];
845
+ if ((choice == null ? void 0 : choice.finish_reason) != null) {
846
+ finishReason = mapOpenAIFinishReason(choice.finish_reason);
847
+ }
848
+ if ((choice == null ? void 0 : choice.delta) == null) {
849
+ return;
850
+ }
851
+ const delta = choice.delta;
852
+ if (delta.content != null) {
853
+ controller.enqueue({
854
+ type: "text-delta",
855
+ textDelta: delta.content
856
+ });
857
+ }
858
+ if (delta.tool_calls != null) {
859
+ for (const toolCallDelta of delta.tool_calls) {
860
+ const index = toolCallDelta.index;
861
+ if (toolCalls[index] == null) {
862
+ if (toolCallDelta.type !== "function") {
863
+ throw new InvalidResponseDataError({
864
+ data: toolCallDelta,
865
+ message: `Expected 'function' type.`
866
+ });
867
+ }
868
+ if (toolCallDelta.id == null) {
869
+ throw new InvalidResponseDataError({
870
+ data: toolCallDelta,
871
+ message: `Expected 'id' to be a string.`
872
+ });
873
+ }
874
+ if (((_a = toolCallDelta.function) == null ? void 0 : _a.name) == null) {
875
+ throw new InvalidResponseDataError({
876
+ data: toolCallDelta,
877
+ message: `Expected 'function.name' to be a string.`
878
+ });
879
+ }
880
+ toolCalls[index] = {
881
+ id: toolCallDelta.id,
882
+ type: "function",
883
+ function: {
884
+ name: toolCallDelta.function.name,
885
+ arguments: (_b = toolCallDelta.function.arguments) != null ? _b : ""
886
+ }
887
+ };
888
+ continue;
889
+ }
890
+ const toolCall = toolCalls[index];
891
+ if (((_c = toolCallDelta.function) == null ? void 0 : _c.arguments) != null) {
892
+ toolCall.function.arguments += (_e = (_d = toolCallDelta.function) == null ? void 0 : _d.arguments) != null ? _e : "";
893
+ }
894
+ controller.enqueue({
895
+ type: "tool-call-delta",
896
+ toolCallType: "function",
897
+ toolCallId: toolCall.id,
898
+ toolName: toolCall.function.name,
899
+ argsTextDelta: (_f = toolCallDelta.function.arguments) != null ? _f : ""
900
+ });
901
+ if (((_g = toolCall.function) == null ? void 0 : _g.name) == null || ((_h = toolCall.function) == null ? void 0 : _h.arguments) == null || !isParseableJson(toolCall.function.arguments)) {
902
+ continue;
903
+ }
904
+ controller.enqueue({
905
+ type: "tool-call",
906
+ toolCallType: "function",
907
+ toolCallId: (_i = toolCall.id) != null ? _i : generateId(),
908
+ toolName: toolCall.function.name,
909
+ args: toolCall.function.arguments
910
+ });
911
+ }
912
+ }
913
+ },
914
+ flush(controller) {
915
+ controller.enqueue({ type: "finish", finishReason, usage });
916
+ }
917
+ })
918
+ ),
919
+ rawCall: { rawPrompt, rawSettings },
920
+ warnings: []
921
+ };
922
+ }
923
+ };
924
+ var openAIChatResponseSchema = z2.object({
925
+ choices: z2.array(
926
+ z2.object({
927
+ message: z2.object({
928
+ role: z2.literal("assistant"),
929
+ content: z2.string().nullable(),
930
+ tool_calls: z2.array(
931
+ z2.object({
932
+ id: z2.string(),
933
+ type: z2.literal("function"),
934
+ function: z2.object({
935
+ name: z2.string(),
936
+ arguments: z2.string()
937
+ })
938
+ })
939
+ ).optional()
940
+ }),
941
+ index: z2.number(),
942
+ finish_reason: z2.string().optional().nullable()
943
+ })
944
+ ),
945
+ object: z2.literal("chat.completion"),
946
+ usage: z2.object({
947
+ prompt_tokens: z2.number(),
948
+ completion_tokens: z2.number()
949
+ })
950
+ });
951
+ var openaiChatChunkSchema = z2.object({
952
+ object: z2.literal("chat.completion.chunk"),
953
+ choices: z2.array(
954
+ z2.object({
955
+ delta: z2.object({
956
+ role: z2.enum(["assistant"]).optional(),
957
+ content: z2.string().nullable().optional(),
958
+ tool_calls: z2.array(
959
+ z2.object({
960
+ index: z2.number(),
961
+ id: z2.string().optional(),
962
+ type: z2.literal("function").optional(),
963
+ function: z2.object({
964
+ name: z2.string().optional(),
965
+ arguments: z2.string().optional()
966
+ })
967
+ })
968
+ ).optional()
969
+ }),
970
+ finish_reason: z2.string().nullable().optional(),
971
+ index: z2.number()
972
+ })
973
+ ),
974
+ usage: z2.object({
975
+ prompt_tokens: z2.number(),
976
+ completion_tokens: z2.number()
977
+ }).optional().nullable()
978
+ });
979
+
980
+ // openai/openai-completion-language-model.ts
981
+ import { z as z3 } from "zod";
982
+
983
+ // openai/convert-to-openai-completion-prompt.ts
984
+ function convertToOpenAICompletionPrompt({
985
+ prompt: prompt2,
986
+ inputFormat,
987
+ provider,
988
+ user = "user",
989
+ assistant = "assistant"
990
+ }) {
991
+ if (inputFormat === "prompt" && prompt2.length === 1 && prompt2[0].role === "user" && prompt2[0].content.length === 1 && prompt2[0].content[0].type === "text") {
992
+ return { prompt: prompt2[0].content[0].text };
993
+ }
994
+ let text = "";
995
+ if (prompt2[0].role === "system") {
996
+ text += `${prompt2[0].content}
997
+
998
+ `;
999
+ prompt2 = prompt2.slice(1);
1000
+ }
1001
+ for (const { role, content } of prompt2) {
1002
+ switch (role) {
1003
+ case "system": {
1004
+ throw new InvalidPromptError({
1005
+ message: "Unexpected system message in prompt: ${content}",
1006
+ prompt: prompt2
1007
+ });
1008
+ }
1009
+ case "user": {
1010
+ const userMessage = content.map((part) => {
1011
+ switch (part.type) {
1012
+ case "text": {
1013
+ return part.text;
1014
+ }
1015
+ case "image": {
1016
+ throw new UnsupportedFunctionalityError({
1017
+ provider,
1018
+ functionality: "images"
1019
+ });
1020
+ }
1021
+ }
1022
+ }).join("");
1023
+ text += `${user}:
1024
+ ${userMessage}
1025
+
1026
+ `;
1027
+ break;
1028
+ }
1029
+ case "assistant": {
1030
+ const assistantMessage = content.map((part) => {
1031
+ switch (part.type) {
1032
+ case "text": {
1033
+ return part.text;
1034
+ }
1035
+ case "tool-call": {
1036
+ throw new UnsupportedFunctionalityError({
1037
+ provider,
1038
+ functionality: "tool-call messages"
1039
+ });
1040
+ }
1041
+ }
1042
+ }).join("");
1043
+ text += `${assistant}:
1044
+ ${assistantMessage}
1045
+
1046
+ `;
1047
+ break;
1048
+ }
1049
+ case "tool": {
1050
+ throw new UnsupportedFunctionalityError({
1051
+ provider,
1052
+ functionality: "tool messages"
1053
+ });
1054
+ }
1055
+ default: {
1056
+ const _exhaustiveCheck = role;
1057
+ throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
1058
+ }
1059
+ }
1060
+ }
1061
+ text += `${assistant}:
1062
+ `;
1063
+ return {
1064
+ prompt: text,
1065
+ stopSequences: [`
1066
+ ${user}:`]
1067
+ };
1068
+ }
1069
+
1070
+ // openai/openai-completion-language-model.ts
1071
+ var OpenAICompletionLanguageModel = class {
1072
+ constructor(modelId, settings, config) {
1073
+ this.specificationVersion = "v1";
1074
+ this.defaultObjectGenerationMode = void 0;
1075
+ this.modelId = modelId;
1076
+ this.settings = settings;
1077
+ this.config = config;
1078
+ }
1079
+ get provider() {
1080
+ return this.config.provider;
1081
+ }
1082
+ getArgs({
1083
+ mode,
1084
+ inputFormat,
1085
+ prompt: prompt2,
1086
+ maxTokens,
1087
+ temperature,
1088
+ topP,
1089
+ frequencyPenalty,
1090
+ presencePenalty,
1091
+ seed
1092
+ }) {
1093
+ var _a;
1094
+ const type = mode.type;
1095
+ const { prompt: completionPrompt, stopSequences } = convertToOpenAICompletionPrompt({
1096
+ prompt: prompt2,
1097
+ inputFormat,
1098
+ provider: this.provider
1099
+ });
1100
+ const baseArgs = {
1101
+ // model id:
1102
+ model: this.modelId,
1103
+ // model specific settings:
1104
+ echo: this.settings.echo,
1105
+ logit_bias: this.settings.logitBias,
1106
+ suffix: this.settings.suffix,
1107
+ user: this.settings.user,
1108
+ // standardized settings:
1109
+ max_tokens: maxTokens,
1110
+ temperature: scale({
1111
+ value: temperature,
1112
+ outputMin: 0,
1113
+ outputMax: 2
1114
+ }),
1115
+ top_p: topP,
1116
+ frequency_penalty: scale({
1117
+ value: frequencyPenalty,
1118
+ inputMin: -1,
1119
+ inputMax: 1,
1120
+ outputMin: -2,
1121
+ outputMax: 2
1122
+ }),
1123
+ presence_penalty: scale({
1124
+ value: presencePenalty,
1125
+ inputMin: -1,
1126
+ inputMax: 1,
1127
+ outputMin: -2,
1128
+ outputMax: 2
1129
+ }),
1130
+ seed,
1131
+ // prompt:
1132
+ prompt: completionPrompt,
1133
+ // stop sequences:
1134
+ stop: stopSequences
1135
+ };
1136
+ switch (type) {
1137
+ case "regular": {
1138
+ if ((_a = mode.tools) == null ? void 0 : _a.length) {
1139
+ throw new UnsupportedFunctionalityError({
1140
+ functionality: "tools",
1141
+ provider: this.provider
1142
+ });
1143
+ }
1144
+ return baseArgs;
1145
+ }
1146
+ case "object-json": {
1147
+ throw new UnsupportedFunctionalityError({
1148
+ functionality: "object-json mode",
1149
+ provider: this.provider
1150
+ });
1151
+ }
1152
+ case "object-tool": {
1153
+ throw new UnsupportedFunctionalityError({
1154
+ functionality: "object-tool mode",
1155
+ provider: this.provider
1156
+ });
1157
+ }
1158
+ case "object-grammar": {
1159
+ throw new UnsupportedFunctionalityError({
1160
+ functionality: "object-grammar mode",
1161
+ provider: this.provider
1162
+ });
1163
+ }
1164
+ default: {
1165
+ const _exhaustiveCheck = type;
1166
+ throw new Error(`Unsupported type: ${_exhaustiveCheck}`);
1167
+ }
1168
+ }
1169
+ }
1170
+ async doGenerate(options) {
1171
+ const args = this.getArgs(options);
1172
+ const response = await postJsonToApi({
1173
+ url: `${this.config.baseUrl}/completions`,
1174
+ headers: this.config.headers(),
1175
+ body: args,
1176
+ failedResponseHandler: openaiFailedResponseHandler,
1177
+ successfulResponseHandler: createJsonResponseHandler(
1178
+ openAICompletionResponseSchema
1179
+ ),
1180
+ abortSignal: options.abortSignal
1181
+ });
1182
+ const { prompt: rawPrompt, ...rawSettings } = args;
1183
+ const choice = response.choices[0];
1184
+ return {
1185
+ text: choice.text,
1186
+ usage: {
1187
+ promptTokens: response.usage.prompt_tokens,
1188
+ completionTokens: response.usage.completion_tokens
1189
+ },
1190
+ finishReason: mapOpenAIFinishReason(choice.finish_reason),
1191
+ rawCall: { rawPrompt, rawSettings },
1192
+ warnings: []
1193
+ };
1194
+ }
1195
+ async doStream(options) {
1196
+ const args = this.getArgs(options);
1197
+ const response = await postJsonToApi({
1198
+ url: `${this.config.baseUrl}/completions`,
1199
+ headers: this.config.headers(),
1200
+ body: {
1201
+ ...this.getArgs(options),
1202
+ stream: true
1203
+ },
1204
+ failedResponseHandler: openaiFailedResponseHandler,
1205
+ successfulResponseHandler: createEventSourceResponseHandler(
1206
+ openaiCompletionChunkSchema
1207
+ ),
1208
+ abortSignal: options.abortSignal
1209
+ });
1210
+ const { prompt: rawPrompt, ...rawSettings } = args;
1211
+ let finishReason = "other";
1212
+ let usage = {
1213
+ promptTokens: Number.NaN,
1214
+ completionTokens: Number.NaN
1215
+ };
1216
+ return {
1217
+ stream: response.pipeThrough(
1218
+ new TransformStream({
1219
+ transform(chunk, controller) {
1220
+ if (!chunk.success) {
1221
+ controller.enqueue({ type: "error", error: chunk.error });
1222
+ return;
1223
+ }
1224
+ const value = chunk.value;
1225
+ if (value.usage != null) {
1226
+ usage = {
1227
+ promptTokens: value.usage.prompt_tokens,
1228
+ completionTokens: value.usage.completion_tokens
1229
+ };
1230
+ }
1231
+ const choice = value.choices[0];
1232
+ if ((choice == null ? void 0 : choice.finish_reason) != null) {
1233
+ finishReason = mapOpenAIFinishReason(choice.finish_reason);
1234
+ }
1235
+ if ((choice == null ? void 0 : choice.text) != null) {
1236
+ controller.enqueue({
1237
+ type: "text-delta",
1238
+ textDelta: choice.text
1239
+ });
1240
+ }
1241
+ },
1242
+ flush(controller) {
1243
+ controller.enqueue({ type: "finish", finishReason, usage });
1244
+ }
1245
+ })
1246
+ ),
1247
+ rawCall: { rawPrompt, rawSettings },
1248
+ warnings: []
1249
+ };
1250
+ }
1251
+ };
1252
+ var openAICompletionResponseSchema = z3.object({
1253
+ choices: z3.array(
1254
+ z3.object({
1255
+ text: z3.string(),
1256
+ finish_reason: z3.string()
1257
+ })
1258
+ ),
1259
+ usage: z3.object({
1260
+ prompt_tokens: z3.number(),
1261
+ completion_tokens: z3.number()
1262
+ })
1263
+ });
1264
+ var openaiCompletionChunkSchema = z3.object({
1265
+ object: z3.literal("text_completion"),
1266
+ choices: z3.array(
1267
+ z3.object({
1268
+ text: z3.string(),
1269
+ finish_reason: z3.enum(["stop", "length", "content_filter"]).optional().nullable(),
1270
+ index: z3.number()
1271
+ })
1272
+ ),
1273
+ usage: z3.object({
1274
+ prompt_tokens: z3.number(),
1275
+ completion_tokens: z3.number()
1276
+ }).optional().nullable()
1277
+ });
1278
+
1279
+ // openai/openai-facade.ts
1280
+ var OpenAI = class {
1281
+ constructor(options = {}) {
1282
+ this.baseUrl = options.baseUrl;
1283
+ this.apiKey = options.apiKey;
1284
+ this.organization = options.organization;
1285
+ }
1286
+ get baseConfig() {
1287
+ var _a;
1288
+ return {
1289
+ organization: this.organization,
1290
+ baseUrl: (_a = this.baseUrl) != null ? _a : "https://api.openai.com/v1",
1291
+ headers: () => ({
1292
+ Authorization: `Bearer ${loadApiKey({
1293
+ apiKey: this.apiKey,
1294
+ environmentVariableName: "OPENAI_API_KEY",
1295
+ description: "OpenAI"
1296
+ })}`,
1297
+ "OpenAI-Organization": this.organization
1298
+ })
1299
+ };
1300
+ }
1301
+ chat(modelId, settings = {}) {
1302
+ return new OpenAIChatLanguageModel(modelId, settings, {
1303
+ provider: "openai.chat",
1304
+ ...this.baseConfig
1305
+ });
1306
+ }
1307
+ completion(modelId, settings = {}) {
1308
+ return new OpenAICompletionLanguageModel(modelId, settings, {
1309
+ provider: "openai.completion",
1310
+ ...this.baseConfig
1311
+ });
1312
+ }
1313
+ };
1314
+ var openai = new OpenAI();
1315
+ export {
1316
+ OpenAI,
1317
+ openai
1318
+ };
1319
+ //# sourceMappingURL=index.mjs.map