ai 3.0.13 → 3.0.14

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (54) hide show
  1. package/ai-model-specification/dist/index.d.mts +704 -0
  2. package/ai-model-specification/dist/index.d.ts +704 -0
  3. package/ai-model-specification/dist/index.js +806 -0
  4. package/ai-model-specification/dist/index.js.map +1 -0
  5. package/ai-model-specification/dist/index.mjs +742 -0
  6. package/ai-model-specification/dist/index.mjs.map +1 -0
  7. package/dist/index.d.mts +683 -2
  8. package/dist/index.d.ts +683 -2
  9. package/dist/index.js +1723 -15
  10. package/dist/index.js.map +1 -1
  11. package/dist/index.mjs +1700 -15
  12. package/dist/index.mjs.map +1 -1
  13. package/mistral/dist/index.d.mts +367 -0
  14. package/mistral/dist/index.d.ts +367 -0
  15. package/mistral/dist/index.js +936 -0
  16. package/mistral/dist/index.js.map +1 -0
  17. package/mistral/dist/index.mjs +900 -0
  18. package/mistral/dist/index.mjs.map +1 -0
  19. package/openai/dist/index.d.mts +430 -0
  20. package/openai/dist/index.d.ts +430 -0
  21. package/openai/dist/index.js +1355 -0
  22. package/openai/dist/index.js.map +1 -0
  23. package/openai/dist/index.mjs +1319 -0
  24. package/openai/dist/index.mjs.map +1 -0
  25. package/package.json +30 -4
  26. package/prompts/dist/index.d.mts +13 -1
  27. package/prompts/dist/index.d.ts +13 -1
  28. package/prompts/dist/index.js +13 -0
  29. package/prompts/dist/index.js.map +1 -1
  30. package/prompts/dist/index.mjs +12 -0
  31. package/prompts/dist/index.mjs.map +1 -1
  32. package/react/dist/index.js +35 -34
  33. package/react/dist/index.js.map +1 -1
  34. package/react/dist/index.mjs +35 -34
  35. package/react/dist/index.mjs.map +1 -1
  36. package/rsc/dist/index.d.ts +45 -8
  37. package/rsc/dist/rsc-server.d.mts +45 -8
  38. package/rsc/dist/rsc-server.mjs +67 -13
  39. package/rsc/dist/rsc-server.mjs.map +1 -1
  40. package/rsc/dist/rsc-shared.d.mts +5 -8
  41. package/rsc/dist/rsc-shared.mjs +23 -2
  42. package/rsc/dist/rsc-shared.mjs.map +1 -1
  43. package/solid/dist/index.js +29 -27
  44. package/solid/dist/index.js.map +1 -1
  45. package/solid/dist/index.mjs +29 -27
  46. package/solid/dist/index.mjs.map +1 -1
  47. package/svelte/dist/index.js +31 -29
  48. package/svelte/dist/index.js.map +1 -1
  49. package/svelte/dist/index.mjs +31 -29
  50. package/svelte/dist/index.mjs.map +1 -1
  51. package/vue/dist/index.js +29 -27
  52. package/vue/dist/index.js.map +1 -1
  53. package/vue/dist/index.mjs +29 -27
  54. package/vue/dist/index.mjs.map +1 -1
package/dist/index.js CHANGED
@@ -1,7 +1,9 @@
1
1
  "use strict";
2
+ var __create = Object.create;
2
3
  var __defProp = Object.defineProperty;
3
4
  var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
4
5
  var __getOwnPropNames = Object.getOwnPropertyNames;
6
+ var __getProtoOf = Object.getPrototypeOf;
5
7
  var __hasOwnProp = Object.prototype.hasOwnProperty;
6
8
  var __export = (target, all) => {
7
9
  for (var name in all)
@@ -15,12 +17,21 @@ var __copyProps = (to, from, except, desc) => {
15
17
  }
16
18
  return to;
17
19
  };
20
+ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
21
+ // If the importer is in node compatibility mode or this is not an ESM
22
+ // file that has been converted to a CommonJS file using a Babel-
23
+ // compatible transform (i.e. "__esModule" has not been set), then set
24
+ // "default" to the CommonJS "module.exports" for node compatibility.
25
+ isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
26
+ mod
27
+ ));
18
28
  var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
19
29
 
20
30
  // streams/index.ts
21
31
  var streams_exports = {};
22
32
  __export(streams_exports, {
23
33
  AIStream: () => AIStream,
34
+ AWSBedrockAnthropicMessagesStream: () => AWSBedrockAnthropicMessagesStream,
24
35
  AWSBedrockAnthropicStream: () => AWSBedrockAnthropicStream,
25
36
  AWSBedrockCohereStream: () => AWSBedrockCohereStream,
26
37
  AWSBedrockLlama2Stream: () => AWSBedrockLlama2Stream,
@@ -28,6 +39,8 @@ __export(streams_exports, {
28
39
  AnthropicStream: () => AnthropicStream,
29
40
  COMPLEX_HEADER: () => COMPLEX_HEADER,
30
41
  CohereStream: () => CohereStream,
42
+ GenerateObjectResult: () => GenerateObjectResult,
43
+ GenerateTextResult: () => GenerateTextResult,
31
44
  GoogleGenerativeAIStream: () => GoogleGenerativeAIStream,
32
45
  HuggingFaceStream: () => HuggingFaceStream,
33
46
  InkeepStream: () => InkeepStream,
@@ -35,7 +48,11 @@ __export(streams_exports, {
35
48
  MistralStream: () => MistralStream,
36
49
  OpenAIStream: () => OpenAIStream,
37
50
  ReplicateStream: () => ReplicateStream,
51
+ StreamObjectResult: () => StreamObjectResult,
52
+ StreamTextResult: () => StreamTextResult,
38
53
  StreamingTextResponse: () => StreamingTextResponse,
54
+ convertDataContentToBase64String: () => convertDataContentToBase64String,
55
+ convertDataContentToUint8Array: () => convertDataContentToUint8Array,
39
56
  createCallbacksTransformer: () => createCallbacksTransformer,
40
57
  createChunkDecoder: () => createChunkDecoder,
41
58
  createEventStreamTransformer: () => createEventStreamTransformer,
@@ -43,16 +60,1692 @@ __export(streams_exports, {
43
60
  experimental_AssistantResponse: () => experimental_AssistantResponse,
44
61
  experimental_StreamData: () => experimental_StreamData,
45
62
  experimental_StreamingReactResponse: () => experimental_StreamingReactResponse,
63
+ experimental_generateObject: () => experimental_generateObject,
64
+ experimental_generateText: () => experimental_generateText,
65
+ experimental_streamObject: () => experimental_streamObject,
66
+ experimental_streamText: () => experimental_streamText,
67
+ generateId: () => generateId,
46
68
  isStreamStringEqualToType: () => isStreamStringEqualToType,
47
- nanoid: () => nanoid,
69
+ nanoid: () => generateId,
48
70
  readableFromAsyncIterable: () => readableFromAsyncIterable,
49
71
  streamToResponse: () => streamToResponse,
72
+ tool: () => tool,
50
73
  trimStartOfStreamHelper: () => trimStartOfStreamHelper
51
74
  });
52
75
  module.exports = __toCommonJS(streams_exports);
53
76
 
54
- // shared/utils.ts
77
+ // core/generate-object/generate-object.ts
78
+ var import_zod_to_json_schema = __toESM(require("zod-to-json-schema"));
79
+
80
+ // ai-model-specification/errors/api-call-error.ts
81
+ var APICallError = class extends Error {
82
+ constructor({
83
+ message,
84
+ url,
85
+ requestBodyValues,
86
+ statusCode,
87
+ responseBody,
88
+ cause,
89
+ isRetryable = statusCode != null && (statusCode === 408 || // request timeout
90
+ statusCode === 409 || // conflict
91
+ statusCode === 429 || // too many requests
92
+ statusCode >= 500),
93
+ // server error
94
+ data
95
+ }) {
96
+ super(message);
97
+ this.name = "AI_APICallError";
98
+ this.url = url;
99
+ this.requestBodyValues = requestBodyValues;
100
+ this.statusCode = statusCode;
101
+ this.responseBody = responseBody;
102
+ this.cause = cause;
103
+ this.isRetryable = isRetryable;
104
+ this.data = data;
105
+ }
106
+ static isAPICallError(error) {
107
+ return error instanceof Error && error.name === "AI_APICallError" && typeof error.url === "string" && typeof error.requestBodyValues === "object" && (error.statusCode == null || typeof error.statusCode === "number") && (error.responseBody == null || typeof error.responseBody === "string") && (error.cause == null || typeof error.cause === "object") && typeof error.isRetryable === "boolean" && (error.data == null || typeof error.data === "object");
108
+ }
109
+ toJSON() {
110
+ return {
111
+ name: this.name,
112
+ message: this.message,
113
+ url: this.url,
114
+ requestBodyValues: this.requestBodyValues,
115
+ statusCode: this.statusCode,
116
+ responseBody: this.responseBody,
117
+ cause: this.cause,
118
+ isRetryable: this.isRetryable,
119
+ data: this.data
120
+ };
121
+ }
122
+ };
123
+
124
+ // ai-model-specification/errors/invalid-argument-error.ts
125
+ var InvalidArgumentError = class extends Error {
126
+ constructor({
127
+ parameter,
128
+ value,
129
+ message
130
+ }) {
131
+ super(`Invalid argument for parameter ${parameter}: ${message}`);
132
+ this.name = "AI_InvalidArgumentError";
133
+ this.parameter = parameter;
134
+ this.value = value;
135
+ }
136
+ static isInvalidArgumentError(error) {
137
+ return error instanceof Error && error.name === "AI_InvalidArgumentError" && typeof error.parameter === "string" && typeof error.value === "string";
138
+ }
139
+ toJSON() {
140
+ return {
141
+ name: this.name,
142
+ message: this.message,
143
+ stack: this.stack,
144
+ parameter: this.parameter,
145
+ value: this.value
146
+ };
147
+ }
148
+ };
149
+
150
+ // ai-model-specification/errors/invalid-data-content-error.ts
151
+ var InvalidDataContentError = class extends Error {
152
+ constructor({
153
+ content,
154
+ message = `Invalid data content. Expected a string, Uint8Array, ArrayBuffer, or Buffer, but got ${typeof content}.`
155
+ }) {
156
+ super(message);
157
+ this.name = "AI_InvalidDataContentError";
158
+ this.content = content;
159
+ }
160
+ static isInvalidDataContentError(error) {
161
+ return error instanceof Error && error.name === "AI_InvalidDataContentError" && error.content != null;
162
+ }
163
+ toJSON() {
164
+ return {
165
+ name: this.name,
166
+ message: this.message,
167
+ stack: this.stack,
168
+ content: this.content
169
+ };
170
+ }
171
+ };
172
+
173
+ // ai-model-specification/util/get-error-message.ts
174
+ function getErrorMessage(error) {
175
+ if (error == null) {
176
+ return "unknown error";
177
+ }
178
+ if (typeof error === "string") {
179
+ return error;
180
+ }
181
+ if (error instanceof Error) {
182
+ return error.message;
183
+ }
184
+ return JSON.stringify(error);
185
+ }
186
+
187
+ // ai-model-specification/util/parse-json.ts
188
+ var import_secure_json_parse = __toESM(require("secure-json-parse"));
189
+
190
+ // ai-model-specification/errors/json-parse-error.ts
191
+ var JSONParseError = class extends Error {
192
+ constructor({ text, cause }) {
193
+ super(
194
+ `JSON parsing failed: Text: ${text}.
195
+ Error message: ${getErrorMessage(cause)}`
196
+ );
197
+ this.name = "AI_JSONParseError";
198
+ this.cause = cause;
199
+ this.text = text;
200
+ }
201
+ static isJSONParseError(error) {
202
+ return error instanceof Error && error.name === "AI_JSONParseError" && typeof error.text === "string" && typeof error.cause === "string";
203
+ }
204
+ toJSON() {
205
+ return {
206
+ name: this.name,
207
+ message: this.message,
208
+ cause: this.cause,
209
+ stack: this.stack,
210
+ valueText: this.text
211
+ };
212
+ }
213
+ };
214
+
215
+ // ai-model-specification/errors/type-validation-error.ts
216
+ var TypeValidationError = class extends Error {
217
+ constructor({ value, cause }) {
218
+ super(
219
+ `Type validation failed: Value: ${JSON.stringify(value)}.
220
+ Error message: ${getErrorMessage(cause)}`
221
+ );
222
+ this.name = "AI_TypeValidationError";
223
+ this.cause = cause;
224
+ this.value = value;
225
+ }
226
+ static isTypeValidationError(error) {
227
+ return error instanceof Error && error.name === "AI_TypeValidationError" && typeof error.value === "string" && typeof error.cause === "string";
228
+ }
229
+ toJSON() {
230
+ return {
231
+ name: this.name,
232
+ message: this.message,
233
+ cause: this.cause,
234
+ stack: this.stack,
235
+ value: this.value
236
+ };
237
+ }
238
+ };
239
+
240
+ // ai-model-specification/util/validate-types.ts
241
+ function safeValidateTypes({
242
+ value,
243
+ schema
244
+ }) {
245
+ try {
246
+ const validationResult = schema.safeParse(value);
247
+ if (validationResult.success) {
248
+ return {
249
+ success: true,
250
+ value: validationResult.data
251
+ };
252
+ }
253
+ return {
254
+ success: false,
255
+ error: new TypeValidationError({
256
+ value,
257
+ cause: validationResult.error
258
+ })
259
+ };
260
+ } catch (error) {
261
+ return {
262
+ success: false,
263
+ error: TypeValidationError.isTypeValidationError(error) ? error : new TypeValidationError({ value, cause: error })
264
+ };
265
+ }
266
+ }
267
+
268
+ // ai-model-specification/util/parse-json.ts
269
+ function safeParseJSON({
270
+ text,
271
+ schema
272
+ }) {
273
+ try {
274
+ const value = import_secure_json_parse.default.parse(text);
275
+ if (schema == null) {
276
+ return {
277
+ success: true,
278
+ value
279
+ };
280
+ }
281
+ return safeValidateTypes({ value, schema });
282
+ } catch (error) {
283
+ return {
284
+ success: false,
285
+ error: JSONParseError.isJSONParseError(error) ? error : new JSONParseError({ text, cause: error })
286
+ };
287
+ }
288
+ }
289
+
290
+ // ai-model-specification/util/uint8-utils.ts
291
+ function convertBase64ToUint8Array(base64String) {
292
+ const base64Url = base64String.replace(/-/g, "+").replace(/_/g, "/");
293
+ const latin1string = globalThis.atob(base64Url);
294
+ return Uint8Array.from(latin1string, (byte) => byte.codePointAt(0));
295
+ }
296
+ function convertUint8ArrayToBase64(array) {
297
+ let latin1string = "";
298
+ for (let i = 0; i < array.length; i++) {
299
+ latin1string += String.fromCodePoint(array[i]);
300
+ }
301
+ return globalThis.btoa(latin1string);
302
+ }
303
+
304
+ // ai-model-specification/errors/invalid-tool-arguments-error.ts
305
+ var InvalidToolArgumentsError = class extends Error {
306
+ constructor({
307
+ toolArgs,
308
+ toolName,
309
+ cause,
310
+ message = `Invalid arguments for tool ${toolName}: ${getErrorMessage(
311
+ cause
312
+ )}`
313
+ }) {
314
+ super(message);
315
+ this.name = "AI_InvalidToolArgumentsError";
316
+ this.toolArgs = toolArgs;
317
+ this.toolName = toolName;
318
+ this.cause = cause;
319
+ }
320
+ static isInvalidToolArgumentsError(error) {
321
+ return error instanceof Error && error.name === "AI_InvalidToolArgumentsError" && typeof error.toolName === "string" && typeof error.toolArgs === "string";
322
+ }
323
+ toJSON() {
324
+ return {
325
+ name: this.name,
326
+ message: this.message,
327
+ cause: this.cause,
328
+ stack: this.stack,
329
+ toolName: this.toolName,
330
+ toolArgs: this.toolArgs
331
+ };
332
+ }
333
+ };
334
+
335
+ // ai-model-specification/errors/no-object-generated-error.ts
336
+ var NoTextGeneratedError = class extends Error {
337
+ constructor() {
338
+ super(`No text generated.`);
339
+ this.name = "AI_NoTextGeneratedError";
340
+ }
341
+ static isNoTextGeneratedError(error) {
342
+ return error instanceof Error && error.name === "AI_NoTextGeneratedError";
343
+ }
344
+ toJSON() {
345
+ return {
346
+ name: this.name,
347
+ cause: this.cause,
348
+ message: this.message,
349
+ stack: this.stack
350
+ };
351
+ }
352
+ };
353
+
354
+ // ai-model-specification/errors/no-such-tool-error.ts
355
+ var NoSuchToolError = class extends Error {
356
+ constructor({ message, toolName }) {
357
+ super(message);
358
+ this.name = "AI_NoSuchToolError";
359
+ this.toolName = toolName;
360
+ }
361
+ static isNoSuchToolError(error) {
362
+ return error instanceof Error && error.name === "AI_NoSuchToolError" && typeof error.toolName === "string";
363
+ }
364
+ toJSON() {
365
+ return {
366
+ name: this.name,
367
+ message: this.message,
368
+ stack: this.stack,
369
+ toolName: this.toolName
370
+ };
371
+ }
372
+ };
373
+
374
+ // ai-model-specification/errors/retry-error.ts
375
+ var RetryError = class extends Error {
376
+ constructor({
377
+ message,
378
+ reason,
379
+ errors
380
+ }) {
381
+ super(message);
382
+ this.name = "AI_RetryError";
383
+ this.reason = reason;
384
+ this.errors = errors;
385
+ this.lastError = errors[errors.length - 1];
386
+ }
387
+ static isRetryError(error) {
388
+ return error instanceof Error && error.name === "AI_RetryError" && typeof error.reason === "string" && Array.isArray(error.errors);
389
+ }
390
+ toJSON() {
391
+ return {
392
+ name: this.name,
393
+ message: this.message,
394
+ reason: this.reason,
395
+ lastError: this.lastError,
396
+ errors: this.errors
397
+ };
398
+ }
399
+ };
400
+
401
+ // core/generate-text/token-usage.ts
402
+ function calculateTokenUsage(usage) {
403
+ return {
404
+ promptTokens: usage.promptTokens,
405
+ completionTokens: usage.completionTokens,
406
+ totalTokens: usage.promptTokens + usage.completionTokens
407
+ };
408
+ }
409
+
410
+ // core/prompt/data-content.ts
411
+ function convertDataContentToBase64String(content) {
412
+ if (typeof content === "string") {
413
+ return content;
414
+ }
415
+ if (content instanceof ArrayBuffer) {
416
+ return convertUint8ArrayToBase64(new Uint8Array(content));
417
+ }
418
+ return convertUint8ArrayToBase64(content);
419
+ }
420
+ function convertDataContentToUint8Array(content) {
421
+ if (content instanceof Uint8Array) {
422
+ return content;
423
+ }
424
+ if (typeof content === "string") {
425
+ return convertBase64ToUint8Array(content);
426
+ }
427
+ if (content instanceof ArrayBuffer) {
428
+ return new Uint8Array(content);
429
+ }
430
+ throw new InvalidDataContentError({ content });
431
+ }
432
+
433
+ // core/prompt/convert-to-language-model-prompt.ts
434
+ function convertToLanguageModelPrompt({
435
+ system,
436
+ prompt,
437
+ messages
438
+ }) {
439
+ if (prompt == null && messages == null) {
440
+ throw new Error("prompt or messages must be defined");
441
+ }
442
+ if (prompt != null && messages != null) {
443
+ throw new Error("prompt and messages cannot be defined at the same time");
444
+ }
445
+ const languageModelMessages = [];
446
+ if (system != null) {
447
+ languageModelMessages.push({ role: "system", content: system });
448
+ }
449
+ if (typeof prompt === "string") {
450
+ languageModelMessages.push({
451
+ role: "user",
452
+ content: [{ type: "text", text: prompt }]
453
+ });
454
+ } else {
455
+ messages = messages;
456
+ languageModelMessages.push(
457
+ ...messages.map((message) => {
458
+ switch (message.role) {
459
+ case "user": {
460
+ if (typeof message.content === "string") {
461
+ return {
462
+ role: "user",
463
+ content: [{ type: "text", text: message.content }]
464
+ };
465
+ }
466
+ return {
467
+ role: "user",
468
+ content: message.content.map(
469
+ (part) => {
470
+ switch (part.type) {
471
+ case "text": {
472
+ return part;
473
+ }
474
+ case "image": {
475
+ return {
476
+ type: "image",
477
+ image: part.image instanceof URL ? part.image : convertDataContentToUint8Array(part.image),
478
+ mimeType: part.mimeType
479
+ };
480
+ }
481
+ }
482
+ }
483
+ )
484
+ };
485
+ }
486
+ case "assistant": {
487
+ if (typeof message.content === "string") {
488
+ return {
489
+ role: "assistant",
490
+ content: [{ type: "text", text: message.content }]
491
+ };
492
+ }
493
+ return { role: "assistant", content: message.content };
494
+ }
495
+ case "tool": {
496
+ return message;
497
+ }
498
+ }
499
+ })
500
+ );
501
+ }
502
+ return languageModelMessages;
503
+ }
504
+
505
+ // core/prompt/get-input-format.ts
506
+ function getInputFormat({
507
+ prompt,
508
+ messages
509
+ }) {
510
+ if (prompt == null && messages == null) {
511
+ throw new Error("prompt or messages must be defined");
512
+ }
513
+ if (prompt != null && messages != null) {
514
+ throw new Error("prompt and messages cannot be defined at the same time");
515
+ }
516
+ return prompt != null ? "prompt" : "messages";
517
+ }
518
+
519
+ // core/prompt/prepare-call-settings.ts
520
+ function prepareCallSettings({
521
+ maxTokens,
522
+ temperature,
523
+ topP,
524
+ presencePenalty,
525
+ frequencyPenalty,
526
+ seed,
527
+ maxRetries
528
+ }) {
529
+ if (maxTokens != null) {
530
+ if (!Number.isInteger(maxTokens)) {
531
+ throw new InvalidArgumentError({
532
+ parameter: "maxTokens",
533
+ value: maxTokens,
534
+ message: "maxTokens must be an integer"
535
+ });
536
+ }
537
+ if (maxTokens < 1) {
538
+ throw new InvalidArgumentError({
539
+ parameter: "maxTokens",
540
+ value: maxTokens,
541
+ message: "maxTokens must be >= 1"
542
+ });
543
+ }
544
+ }
545
+ if (temperature != null) {
546
+ if (typeof temperature !== "number") {
547
+ throw new InvalidArgumentError({
548
+ parameter: "temperature",
549
+ value: temperature,
550
+ message: "temperature must be a number"
551
+ });
552
+ }
553
+ if (temperature < 0 || temperature > 1) {
554
+ throw new InvalidArgumentError({
555
+ parameter: "temperature",
556
+ value: temperature,
557
+ message: "temperature must be between 0 and 1 (inclusive)"
558
+ });
559
+ }
560
+ }
561
+ if (topP != null) {
562
+ if (typeof topP !== "number") {
563
+ throw new InvalidArgumentError({
564
+ parameter: "topP",
565
+ value: topP,
566
+ message: "topP must be a number"
567
+ });
568
+ }
569
+ if (topP < 0 || topP > 1) {
570
+ throw new InvalidArgumentError({
571
+ parameter: "topP",
572
+ value: topP,
573
+ message: "topP must be between 0 and 1 (inclusive)"
574
+ });
575
+ }
576
+ }
577
+ if (presencePenalty != null) {
578
+ if (typeof presencePenalty !== "number") {
579
+ throw new InvalidArgumentError({
580
+ parameter: "presencePenalty",
581
+ value: presencePenalty,
582
+ message: "presencePenalty must be a number"
583
+ });
584
+ }
585
+ if (presencePenalty < -1 || presencePenalty > 1) {
586
+ throw new InvalidArgumentError({
587
+ parameter: "presencePenalty",
588
+ value: presencePenalty,
589
+ message: "presencePenalty must be between -1 and 1 (inclusive)"
590
+ });
591
+ }
592
+ }
593
+ if (frequencyPenalty != null) {
594
+ if (typeof frequencyPenalty !== "number") {
595
+ throw new InvalidArgumentError({
596
+ parameter: "frequencyPenalty",
597
+ value: frequencyPenalty,
598
+ message: "frequencyPenalty must be a number"
599
+ });
600
+ }
601
+ if (frequencyPenalty < -1 || frequencyPenalty > 1) {
602
+ throw new InvalidArgumentError({
603
+ parameter: "frequencyPenalty",
604
+ value: frequencyPenalty,
605
+ message: "frequencyPenalty must be between -1 and 1 (inclusive)"
606
+ });
607
+ }
608
+ }
609
+ if (seed != null) {
610
+ if (!Number.isInteger(seed)) {
611
+ throw new InvalidArgumentError({
612
+ parameter: "seed",
613
+ value: seed,
614
+ message: "seed must be an integer"
615
+ });
616
+ }
617
+ }
618
+ if (maxRetries != null) {
619
+ if (!Number.isInteger(maxRetries)) {
620
+ throw new InvalidArgumentError({
621
+ parameter: "maxRetries",
622
+ value: maxRetries,
623
+ message: "maxRetries must be an integer"
624
+ });
625
+ }
626
+ if (maxRetries < 0) {
627
+ throw new InvalidArgumentError({
628
+ parameter: "maxRetries",
629
+ value: maxRetries,
630
+ message: "maxRetries must be >= 0"
631
+ });
632
+ }
633
+ }
634
+ return {
635
+ maxTokens,
636
+ temperature: temperature != null ? temperature : 0,
637
+ topP,
638
+ presencePenalty: presencePenalty != null ? presencePenalty : 0,
639
+ frequencyPenalty: frequencyPenalty != null ? frequencyPenalty : 0,
640
+ seed,
641
+ maxRetries: maxRetries != null ? maxRetries : 2
642
+ };
643
+ }
644
+
645
+ // core/util/delay.ts
646
+ async function delay(delayInMs) {
647
+ return new Promise((resolve) => setTimeout(resolve, delayInMs));
648
+ }
649
+
650
+ // core/util/retry-with-exponential-backoff.ts
651
+ var retryWithExponentialBackoff = ({
652
+ maxRetries = 2,
653
+ initialDelayInMs = 2e3,
654
+ backoffFactor = 2
655
+ } = {}) => async (f) => _retryWithExponentialBackoff(f, {
656
+ maxRetries,
657
+ delayInMs: initialDelayInMs,
658
+ backoffFactor
659
+ });
660
+ async function _retryWithExponentialBackoff(f, {
661
+ maxRetries,
662
+ delayInMs,
663
+ backoffFactor
664
+ }, errors = []) {
665
+ try {
666
+ return await f();
667
+ } catch (error) {
668
+ if (error instanceof Error && error.name === "AbortError") {
669
+ throw error;
670
+ }
671
+ if (maxRetries === 0) {
672
+ throw error;
673
+ }
674
+ const errorMessage = getErrorMessage(error);
675
+ const newErrors = [...errors, error];
676
+ const tryNumber = newErrors.length;
677
+ if (tryNumber > maxRetries) {
678
+ throw new RetryError({
679
+ message: `Failed after ${tryNumber} attemps. Last error: ${errorMessage}`,
680
+ reason: "maxRetriesExceeded",
681
+ errors: newErrors
682
+ });
683
+ }
684
+ if (error instanceof Error && APICallError.isAPICallError(error) && error.isRetryable === true && tryNumber <= maxRetries) {
685
+ await delay(delayInMs);
686
+ return _retryWithExponentialBackoff(
687
+ f,
688
+ { maxRetries, delayInMs: backoffFactor * delayInMs, backoffFactor },
689
+ newErrors
690
+ );
691
+ }
692
+ if (tryNumber === 1) {
693
+ throw error;
694
+ }
695
+ throw new RetryError({
696
+ message: `Failed after ${tryNumber} attemps with non-retryable error: '${errorMessage}'`,
697
+ reason: "errorNotRetryable",
698
+ errors: newErrors
699
+ });
700
+ }
701
+ }
702
+
703
+ // core/generate-object/inject-json-schema-into-system.ts
704
+ var DEFAULT_SCHEMA_PREFIX = "JSON schema:";
705
+ var DEFAULT_SCHEMA_SUFFIX = "You MUST answer with a JSON object that matches the JSON schema above.";
706
+ function injectJsonSchemaIntoSystem({
707
+ system,
708
+ schema,
709
+ schemaPrefix = DEFAULT_SCHEMA_PREFIX,
710
+ schemaSuffix = DEFAULT_SCHEMA_SUFFIX
711
+ }) {
712
+ return [
713
+ system,
714
+ system != null ? "" : null,
715
+ // add a newline if system is not null
716
+ schemaPrefix,
717
+ JSON.stringify(schema),
718
+ schemaSuffix
719
+ ].filter((line) => line != null).join("\n");
720
+ }
721
+
722
+ // core/generate-object/generate-object.ts
723
+ async function experimental_generateObject({
724
+ model,
725
+ schema,
726
+ mode,
727
+ system,
728
+ prompt,
729
+ messages,
730
+ maxRetries,
731
+ abortSignal,
732
+ ...settings
733
+ }) {
734
+ var _a, _b;
735
+ const retry = retryWithExponentialBackoff({ maxRetries });
736
+ const jsonSchema = (0, import_zod_to_json_schema.default)(schema);
737
+ if (mode === "auto" || mode == null) {
738
+ mode = model.defaultObjectGenerationMode;
739
+ }
740
+ let result;
741
+ let finishReason;
742
+ let usage;
743
+ let warnings;
744
+ switch (mode) {
745
+ case "json": {
746
+ const generateResult = await retry(
747
+ () => model.doGenerate({
748
+ mode: { type: "object-json" },
749
+ ...prepareCallSettings(settings),
750
+ inputFormat: getInputFormat({ prompt, messages }),
751
+ prompt: convertToLanguageModelPrompt({
752
+ system: injectJsonSchemaIntoSystem({ system, schema: jsonSchema }),
753
+ prompt,
754
+ messages
755
+ }),
756
+ abortSignal
757
+ })
758
+ );
759
+ if (generateResult.text === void 0) {
760
+ throw new NoTextGeneratedError();
761
+ }
762
+ result = generateResult.text;
763
+ finishReason = generateResult.finishReason;
764
+ usage = generateResult.usage;
765
+ warnings = generateResult.warnings;
766
+ break;
767
+ }
768
+ case "grammar": {
769
+ const generateResult = await retry(
770
+ () => model.doGenerate({
771
+ mode: { type: "object-grammar", schema: jsonSchema },
772
+ ...settings,
773
+ inputFormat: getInputFormat({ prompt, messages }),
774
+ prompt: convertToLanguageModelPrompt({
775
+ system: injectJsonSchemaIntoSystem({ system, schema: jsonSchema }),
776
+ prompt,
777
+ messages
778
+ }),
779
+ abortSignal
780
+ })
781
+ );
782
+ if (generateResult.text === void 0) {
783
+ throw new NoTextGeneratedError();
784
+ }
785
+ result = generateResult.text;
786
+ finishReason = generateResult.finishReason;
787
+ usage = generateResult.usage;
788
+ warnings = generateResult.warnings;
789
+ break;
790
+ }
791
+ case "tool": {
792
+ const generateResult = await retry(
793
+ () => model.doGenerate({
794
+ mode: {
795
+ type: "object-tool",
796
+ tool: {
797
+ type: "function",
798
+ name: "json",
799
+ description: "Respond with a JSON object.",
800
+ parameters: jsonSchema
801
+ }
802
+ },
803
+ ...settings,
804
+ inputFormat: getInputFormat({ prompt, messages }),
805
+ prompt: convertToLanguageModelPrompt({ system, prompt, messages }),
806
+ abortSignal
807
+ })
808
+ );
809
+ const functionArgs = (_b = (_a = generateResult.toolCalls) == null ? void 0 : _a[0]) == null ? void 0 : _b.args;
810
+ if (functionArgs === void 0) {
811
+ throw new NoTextGeneratedError();
812
+ }
813
+ result = functionArgs;
814
+ finishReason = generateResult.finishReason;
815
+ usage = generateResult.usage;
816
+ warnings = generateResult.warnings;
817
+ break;
818
+ }
819
+ case void 0: {
820
+ throw new Error("Model does not have a default object generation mode.");
821
+ }
822
+ default: {
823
+ const _exhaustiveCheck = mode;
824
+ throw new Error(`Unsupported mode: ${_exhaustiveCheck}`);
825
+ }
826
+ }
827
+ const parseResult = safeParseJSON({ text: result, schema });
828
+ if (!parseResult.success) {
829
+ throw parseResult.error;
830
+ }
831
+ return new GenerateObjectResult({
832
+ object: parseResult.value,
833
+ finishReason,
834
+ usage: calculateTokenUsage(usage),
835
+ warnings
836
+ });
837
+ }
838
+ var GenerateObjectResult = class {
839
+ constructor(options) {
840
+ this.object = options.object;
841
+ this.finishReason = options.finishReason;
842
+ this.usage = options.usage;
843
+ this.warnings = options.warnings;
844
+ }
845
+ };
846
+
847
+ // core/generate-object/stream-object.ts
848
+ var import_zod_to_json_schema2 = __toESM(require("zod-to-json-schema"));
849
+
850
+ // core/util/async-iterable-stream.ts
851
+ function createAsyncIterableStream(source, transformer) {
852
+ const transformedStream = source.pipeThrough(
853
+ new TransformStream(transformer)
854
+ );
855
+ transformedStream[Symbol.asyncIterator] = () => {
856
+ const reader = transformedStream.getReader();
857
+ return {
858
+ async next() {
859
+ const { done, value } = await reader.read();
860
+ return done ? { done: true, value: void 0 } : { done: false, value };
861
+ }
862
+ };
863
+ };
864
+ return transformedStream;
865
+ }
866
+
867
+ // core/util/is-deep-equal-data.ts
868
+ function isDeepEqualData(obj1, obj2) {
869
+ if (obj1 === obj2)
870
+ return true;
871
+ if (obj1 == null || obj2 == null)
872
+ return false;
873
+ if (typeof obj1 !== "object" && typeof obj2 !== "object")
874
+ return obj1 === obj2;
875
+ if (obj1.constructor !== obj2.constructor)
876
+ return false;
877
+ if (obj1 instanceof Date && obj2 instanceof Date) {
878
+ return obj1.getTime() === obj2.getTime();
879
+ }
880
+ if (Array.isArray(obj1)) {
881
+ if (obj1.length !== obj2.length)
882
+ return false;
883
+ for (let i = 0; i < obj1.length; i++) {
884
+ if (!isDeepEqualData(obj1[i], obj2[i]))
885
+ return false;
886
+ }
887
+ return true;
888
+ }
889
+ const keys1 = Object.keys(obj1);
890
+ const keys2 = Object.keys(obj2);
891
+ if (keys1.length !== keys2.length)
892
+ return false;
893
+ for (const key of keys1) {
894
+ if (!keys2.includes(key))
895
+ return false;
896
+ if (!isDeepEqualData(obj1[key], obj2[key]))
897
+ return false;
898
+ }
899
+ return true;
900
+ }
901
+
902
+ // core/util/parse-partial-json.ts
903
+ var import_secure_json_parse2 = __toESM(require("secure-json-parse"));
904
+
905
+ // core/util/fix-json.ts
906
+ function fixJson(input) {
907
+ const stack = ["ROOT"];
908
+ let lastValidIndex = -1;
909
+ let literalStart = null;
910
+ function processValueStart(char, i, swapState) {
911
+ {
912
+ switch (char) {
913
+ case '"': {
914
+ lastValidIndex = i;
915
+ stack.pop();
916
+ stack.push(swapState);
917
+ stack.push("INSIDE_STRING");
918
+ break;
919
+ }
920
+ case "f":
921
+ case "t":
922
+ case "n": {
923
+ lastValidIndex = i;
924
+ literalStart = i;
925
+ stack.pop();
926
+ stack.push(swapState);
927
+ stack.push("INSIDE_LITERAL");
928
+ break;
929
+ }
930
+ case "-": {
931
+ stack.pop();
932
+ stack.push(swapState);
933
+ stack.push("INSIDE_NUMBER");
934
+ break;
935
+ }
936
+ case "0":
937
+ case "1":
938
+ case "2":
939
+ case "3":
940
+ case "4":
941
+ case "5":
942
+ case "6":
943
+ case "7":
944
+ case "8":
945
+ case "9": {
946
+ lastValidIndex = i;
947
+ stack.pop();
948
+ stack.push(swapState);
949
+ stack.push("INSIDE_NUMBER");
950
+ break;
951
+ }
952
+ case "{": {
953
+ lastValidIndex = i;
954
+ stack.pop();
955
+ stack.push(swapState);
956
+ stack.push("INSIDE_OBJECT_START");
957
+ break;
958
+ }
959
+ case "[": {
960
+ lastValidIndex = i;
961
+ stack.pop();
962
+ stack.push(swapState);
963
+ stack.push("INSIDE_ARRAY_START");
964
+ break;
965
+ }
966
+ }
967
+ }
968
+ }
969
+ function processAfterObjectValue(char, i) {
970
+ switch (char) {
971
+ case ",": {
972
+ stack.pop();
973
+ stack.push("INSIDE_OBJECT_AFTER_COMMA");
974
+ break;
975
+ }
976
+ case "}": {
977
+ lastValidIndex = i;
978
+ stack.pop();
979
+ break;
980
+ }
981
+ }
982
+ }
983
+ function processAfterArrayValue(char, i) {
984
+ switch (char) {
985
+ case ",": {
986
+ stack.pop();
987
+ stack.push("INSIDE_ARRAY_AFTER_COMMA");
988
+ break;
989
+ }
990
+ case "]": {
991
+ lastValidIndex = i;
992
+ stack.pop();
993
+ break;
994
+ }
995
+ }
996
+ }
997
+ for (let i = 0; i < input.length; i++) {
998
+ const char = input[i];
999
+ const currentState = stack[stack.length - 1];
1000
+ switch (currentState) {
1001
+ case "ROOT":
1002
+ processValueStart(char, i, "FINISH");
1003
+ break;
1004
+ case "INSIDE_OBJECT_START": {
1005
+ switch (char) {
1006
+ case '"': {
1007
+ stack.pop();
1008
+ stack.push("INSIDE_OBJECT_KEY");
1009
+ break;
1010
+ }
1011
+ case "}": {
1012
+ stack.pop();
1013
+ break;
1014
+ }
1015
+ }
1016
+ break;
1017
+ }
1018
+ case "INSIDE_OBJECT_AFTER_COMMA": {
1019
+ switch (char) {
1020
+ case '"': {
1021
+ stack.pop();
1022
+ stack.push("INSIDE_OBJECT_KEY");
1023
+ break;
1024
+ }
1025
+ }
1026
+ break;
1027
+ }
1028
+ case "INSIDE_OBJECT_KEY": {
1029
+ switch (char) {
1030
+ case '"': {
1031
+ stack.pop();
1032
+ stack.push("INSIDE_OBJECT_AFTER_KEY");
1033
+ break;
1034
+ }
1035
+ }
1036
+ break;
1037
+ }
1038
+ case "INSIDE_OBJECT_AFTER_KEY": {
1039
+ switch (char) {
1040
+ case ":": {
1041
+ stack.pop();
1042
+ stack.push("INSIDE_OBJECT_BEFORE_VALUE");
1043
+ break;
1044
+ }
1045
+ }
1046
+ break;
1047
+ }
1048
+ case "INSIDE_OBJECT_BEFORE_VALUE": {
1049
+ processValueStart(char, i, "INSIDE_OBJECT_AFTER_VALUE");
1050
+ break;
1051
+ }
1052
+ case "INSIDE_OBJECT_AFTER_VALUE": {
1053
+ processAfterObjectValue(char, i);
1054
+ break;
1055
+ }
1056
+ case "INSIDE_STRING": {
1057
+ switch (char) {
1058
+ case '"': {
1059
+ stack.pop();
1060
+ lastValidIndex = i;
1061
+ break;
1062
+ }
1063
+ case "\\": {
1064
+ stack.push("INSIDE_STRING_ESCAPE");
1065
+ break;
1066
+ }
1067
+ default: {
1068
+ lastValidIndex = i;
1069
+ }
1070
+ }
1071
+ break;
1072
+ }
1073
+ case "INSIDE_ARRAY_START": {
1074
+ switch (char) {
1075
+ case "]": {
1076
+ lastValidIndex = i;
1077
+ stack.pop();
1078
+ break;
1079
+ }
1080
+ default: {
1081
+ lastValidIndex = i;
1082
+ processValueStart(char, i, "INSIDE_ARRAY_AFTER_VALUE");
1083
+ break;
1084
+ }
1085
+ }
1086
+ break;
1087
+ }
1088
+ case "INSIDE_ARRAY_AFTER_VALUE": {
1089
+ switch (char) {
1090
+ case ",": {
1091
+ stack.pop();
1092
+ stack.push("INSIDE_ARRAY_AFTER_COMMA");
1093
+ break;
1094
+ }
1095
+ case "]": {
1096
+ lastValidIndex = i;
1097
+ stack.pop();
1098
+ break;
1099
+ }
1100
+ default: {
1101
+ lastValidIndex = i;
1102
+ break;
1103
+ }
1104
+ }
1105
+ break;
1106
+ }
1107
+ case "INSIDE_ARRAY_AFTER_COMMA": {
1108
+ processValueStart(char, i, "INSIDE_ARRAY_AFTER_VALUE");
1109
+ break;
1110
+ }
1111
+ case "INSIDE_STRING_ESCAPE": {
1112
+ stack.pop();
1113
+ lastValidIndex = i;
1114
+ break;
1115
+ }
1116
+ case "INSIDE_NUMBER": {
1117
+ switch (char) {
1118
+ case "0":
1119
+ case "1":
1120
+ case "2":
1121
+ case "3":
1122
+ case "4":
1123
+ case "5":
1124
+ case "6":
1125
+ case "7":
1126
+ case "8":
1127
+ case "9": {
1128
+ lastValidIndex = i;
1129
+ break;
1130
+ }
1131
+ case "e":
1132
+ case "E":
1133
+ case "-":
1134
+ case ".": {
1135
+ break;
1136
+ }
1137
+ case ",": {
1138
+ stack.pop();
1139
+ if (stack[stack.length - 1] === "INSIDE_ARRAY_AFTER_VALUE") {
1140
+ processAfterArrayValue(char, i);
1141
+ }
1142
+ if (stack[stack.length - 1] === "INSIDE_OBJECT_AFTER_VALUE") {
1143
+ processAfterObjectValue(char, i);
1144
+ }
1145
+ break;
1146
+ }
1147
+ case "}": {
1148
+ stack.pop();
1149
+ if (stack[stack.length - 1] === "INSIDE_OBJECT_AFTER_VALUE") {
1150
+ processAfterObjectValue(char, i);
1151
+ }
1152
+ break;
1153
+ }
1154
+ case "]": {
1155
+ stack.pop();
1156
+ if (stack[stack.length - 1] === "INSIDE_ARRAY_AFTER_VALUE") {
1157
+ processAfterArrayValue(char, i);
1158
+ }
1159
+ break;
1160
+ }
1161
+ default: {
1162
+ stack.pop();
1163
+ break;
1164
+ }
1165
+ }
1166
+ break;
1167
+ }
1168
+ case "INSIDE_LITERAL": {
1169
+ const partialLiteral = input.substring(literalStart, i + 1);
1170
+ if (!"false".startsWith(partialLiteral) && !"true".startsWith(partialLiteral) && !"null".startsWith(partialLiteral)) {
1171
+ stack.pop();
1172
+ if (stack[stack.length - 1] === "INSIDE_OBJECT_AFTER_VALUE") {
1173
+ processAfterObjectValue(char, i);
1174
+ } else if (stack[stack.length - 1] === "INSIDE_ARRAY_AFTER_VALUE") {
1175
+ processAfterArrayValue(char, i);
1176
+ }
1177
+ } else {
1178
+ lastValidIndex = i;
1179
+ }
1180
+ break;
1181
+ }
1182
+ }
1183
+ }
1184
+ let result = input.slice(0, lastValidIndex + 1);
1185
+ for (let i = stack.length - 1; i >= 0; i--) {
1186
+ const state = stack[i];
1187
+ switch (state) {
1188
+ case "INSIDE_STRING": {
1189
+ result += '"';
1190
+ break;
1191
+ }
1192
+ case "INSIDE_OBJECT_KEY":
1193
+ case "INSIDE_OBJECT_AFTER_KEY":
1194
+ case "INSIDE_OBJECT_AFTER_COMMA":
1195
+ case "INSIDE_OBJECT_START":
1196
+ case "INSIDE_OBJECT_BEFORE_VALUE":
1197
+ case "INSIDE_OBJECT_AFTER_VALUE": {
1198
+ result += "}";
1199
+ break;
1200
+ }
1201
+ case "INSIDE_ARRAY_START":
1202
+ case "INSIDE_ARRAY_AFTER_COMMA":
1203
+ case "INSIDE_ARRAY_AFTER_VALUE": {
1204
+ result += "]";
1205
+ break;
1206
+ }
1207
+ case "INSIDE_LITERAL": {
1208
+ const partialLiteral = input.substring(literalStart, input.length);
1209
+ if ("true".startsWith(partialLiteral)) {
1210
+ result += "true".slice(partialLiteral.length);
1211
+ } else if ("false".startsWith(partialLiteral)) {
1212
+ result += "false".slice(partialLiteral.length);
1213
+ } else if ("null".startsWith(partialLiteral)) {
1214
+ result += "null".slice(partialLiteral.length);
1215
+ }
1216
+ }
1217
+ }
1218
+ }
1219
+ return result;
1220
+ }
1221
+
1222
+ // core/util/parse-partial-json.ts
1223
+ function parsePartialJson(jsonText) {
1224
+ if (jsonText == null) {
1225
+ return void 0;
1226
+ }
1227
+ try {
1228
+ return import_secure_json_parse2.default.parse(jsonText);
1229
+ } catch (ignored) {
1230
+ try {
1231
+ const fixedJsonText = fixJson(jsonText);
1232
+ return import_secure_json_parse2.default.parse(fixedJsonText);
1233
+ } catch (ignored2) {
1234
+ }
1235
+ }
1236
+ return void 0;
1237
+ }
1238
+
1239
+ // core/generate-object/stream-object.ts
1240
+ async function experimental_streamObject({
1241
+ model,
1242
+ schema,
1243
+ mode,
1244
+ system,
1245
+ prompt,
1246
+ messages,
1247
+ maxRetries,
1248
+ abortSignal,
1249
+ ...settings
1250
+ }) {
1251
+ const retry = retryWithExponentialBackoff({ maxRetries });
1252
+ const jsonSchema = (0, import_zod_to_json_schema2.default)(schema);
1253
+ if (mode === "auto" || mode == null) {
1254
+ mode = model.defaultObjectGenerationMode;
1255
+ }
1256
+ let callOptions;
1257
+ let transformer;
1258
+ switch (mode) {
1259
+ case "json": {
1260
+ callOptions = {
1261
+ mode: { type: "object-json" },
1262
+ ...prepareCallSettings(settings),
1263
+ inputFormat: getInputFormat({ prompt, messages }),
1264
+ prompt: convertToLanguageModelPrompt({
1265
+ system: injectJsonSchemaIntoSystem({ system, schema: jsonSchema }),
1266
+ prompt,
1267
+ messages
1268
+ }),
1269
+ abortSignal
1270
+ };
1271
+ transformer = {
1272
+ transform: (chunk, controller) => {
1273
+ switch (chunk.type) {
1274
+ case "text-delta":
1275
+ controller.enqueue(chunk.textDelta);
1276
+ break;
1277
+ case "error":
1278
+ controller.enqueue(chunk);
1279
+ break;
1280
+ }
1281
+ }
1282
+ };
1283
+ break;
1284
+ }
1285
+ case "grammar": {
1286
+ callOptions = {
1287
+ mode: { type: "object-grammar", schema: jsonSchema },
1288
+ ...settings,
1289
+ inputFormat: getInputFormat({ prompt, messages }),
1290
+ prompt: convertToLanguageModelPrompt({
1291
+ system: injectJsonSchemaIntoSystem({ system, schema: jsonSchema }),
1292
+ prompt,
1293
+ messages
1294
+ }),
1295
+ abortSignal
1296
+ };
1297
+ transformer = {
1298
+ transform: (chunk, controller) => {
1299
+ switch (chunk.type) {
1300
+ case "text-delta":
1301
+ controller.enqueue(chunk.textDelta);
1302
+ break;
1303
+ case "error":
1304
+ controller.enqueue(chunk);
1305
+ break;
1306
+ }
1307
+ }
1308
+ };
1309
+ break;
1310
+ }
1311
+ case "tool": {
1312
+ callOptions = {
1313
+ mode: {
1314
+ type: "object-tool",
1315
+ tool: {
1316
+ type: "function",
1317
+ name: "json",
1318
+ description: "Respond with a JSON object.",
1319
+ parameters: jsonSchema
1320
+ }
1321
+ },
1322
+ ...settings,
1323
+ inputFormat: getInputFormat({ prompt, messages }),
1324
+ prompt: convertToLanguageModelPrompt({ system, prompt, messages }),
1325
+ abortSignal
1326
+ };
1327
+ transformer = {
1328
+ transform(chunk, controller) {
1329
+ switch (chunk.type) {
1330
+ case "tool-call-delta":
1331
+ controller.enqueue(chunk.argsTextDelta);
1332
+ break;
1333
+ case "error":
1334
+ controller.enqueue(chunk);
1335
+ break;
1336
+ }
1337
+ }
1338
+ };
1339
+ break;
1340
+ }
1341
+ case void 0: {
1342
+ throw new Error("Model does not have a default object generation mode.");
1343
+ }
1344
+ default: {
1345
+ const _exhaustiveCheck = mode;
1346
+ throw new Error(`Unsupported mode: ${_exhaustiveCheck}`);
1347
+ }
1348
+ }
1349
+ const result = await retry(() => model.doStream(callOptions));
1350
+ return new StreamObjectResult({
1351
+ stream: result.stream.pipeThrough(new TransformStream(transformer)),
1352
+ warnings: result.warnings
1353
+ });
1354
+ }
1355
+ var StreamObjectResult = class {
1356
+ constructor({
1357
+ stream,
1358
+ warnings
1359
+ }) {
1360
+ this.originalStream = stream;
1361
+ this.warnings = warnings;
1362
+ }
1363
+ get partialObjectStream() {
1364
+ let accumulatedText = "";
1365
+ let latestObject = void 0;
1366
+ return createAsyncIterableStream(this.originalStream, {
1367
+ transform(chunk, controller) {
1368
+ if (typeof chunk === "string") {
1369
+ accumulatedText += chunk;
1370
+ const currentObject = parsePartialJson(
1371
+ accumulatedText
1372
+ );
1373
+ if (!isDeepEqualData(latestObject, currentObject)) {
1374
+ latestObject = currentObject;
1375
+ controller.enqueue(currentObject);
1376
+ }
1377
+ }
1378
+ if (typeof chunk === "object" && chunk.type === "error") {
1379
+ throw chunk.error;
1380
+ }
1381
+ }
1382
+ });
1383
+ }
1384
+ };
1385
+
1386
+ // core/generate-text/generate-text.ts
1387
+ var import_zod_to_json_schema3 = __toESM(require("zod-to-json-schema"));
1388
+
1389
+ // core/generate-text/tool-call.ts
1390
+ function parseToolCall({
1391
+ toolCall,
1392
+ tools
1393
+ }) {
1394
+ const toolName = toolCall.toolName;
1395
+ if (tools == null) {
1396
+ throw new NoSuchToolError({
1397
+ message: `Tool ${toolCall.toolName} not found (no tools provided).`,
1398
+ toolName: toolCall.toolName
1399
+ });
1400
+ }
1401
+ const tool2 = tools[toolName];
1402
+ if (tool2 == null) {
1403
+ throw new NoSuchToolError({
1404
+ message: `Tool ${toolCall.toolName} not found.`,
1405
+ toolName: toolCall.toolName
1406
+ });
1407
+ }
1408
+ const parseResult = safeParseJSON({
1409
+ text: toolCall.args,
1410
+ schema: tool2.parameters
1411
+ });
1412
+ if (parseResult.success === false) {
1413
+ throw new InvalidToolArgumentsError({
1414
+ toolName,
1415
+ toolArgs: toolCall.args,
1416
+ cause: parseResult.error
1417
+ });
1418
+ }
1419
+ return {
1420
+ toolCallId: toolCall.toolCallId,
1421
+ toolName,
1422
+ args: parseResult.value
1423
+ };
1424
+ }
1425
+
1426
+ // core/generate-text/generate-text.ts
1427
+ async function experimental_generateText({
1428
+ model,
1429
+ tools,
1430
+ system,
1431
+ prompt,
1432
+ messages,
1433
+ maxRetries,
1434
+ abortSignal,
1435
+ ...settings
1436
+ }) {
1437
+ var _a, _b;
1438
+ const retry = retryWithExponentialBackoff({ maxRetries });
1439
+ const modelResponse = await retry(
1440
+ () => model.doGenerate({
1441
+ mode: {
1442
+ type: "regular",
1443
+ tools: tools == null ? void 0 : Object.entries(tools).map(([name, tool2]) => ({
1444
+ type: "function",
1445
+ name,
1446
+ description: tool2.description,
1447
+ parameters: (0, import_zod_to_json_schema3.default)(tool2.parameters)
1448
+ }))
1449
+ },
1450
+ ...prepareCallSettings(settings),
1451
+ inputFormat: getInputFormat({ prompt, messages }),
1452
+ prompt: convertToLanguageModelPrompt({
1453
+ system,
1454
+ prompt,
1455
+ messages
1456
+ }),
1457
+ abortSignal
1458
+ })
1459
+ );
1460
+ const toolCalls = [];
1461
+ for (const modelToolCall of (_a = modelResponse.toolCalls) != null ? _a : []) {
1462
+ toolCalls.push(parseToolCall({ toolCall: modelToolCall, tools }));
1463
+ }
1464
+ const toolResults = tools == null ? [] : await executeTools({ toolCalls, tools });
1465
+ return new GenerateTextResult({
1466
+ // Always return a string so that the caller doesn't have to check for undefined.
1467
+ // If they need to check if the model did not return any text,
1468
+ // they can check the length of the string:
1469
+ text: (_b = modelResponse.text) != null ? _b : "",
1470
+ toolCalls,
1471
+ toolResults,
1472
+ finishReason: modelResponse.finishReason,
1473
+ usage: calculateTokenUsage(modelResponse.usage),
1474
+ warnings: modelResponse.warnings
1475
+ });
1476
+ }
1477
+ async function executeTools({
1478
+ toolCalls,
1479
+ tools
1480
+ }) {
1481
+ const toolResults = await Promise.all(
1482
+ toolCalls.map(async (toolCall) => {
1483
+ const tool2 = tools[toolCall.toolName];
1484
+ if ((tool2 == null ? void 0 : tool2.execute) == null) {
1485
+ return void 0;
1486
+ }
1487
+ const result = await tool2.execute(toolCall.args);
1488
+ return {
1489
+ toolCallId: toolCall.toolCallId,
1490
+ toolName: toolCall.toolName,
1491
+ args: toolCall.args,
1492
+ result
1493
+ };
1494
+ })
1495
+ );
1496
+ return toolResults.filter(
1497
+ (result) => result != null
1498
+ );
1499
+ }
1500
+ var GenerateTextResult = class {
1501
+ constructor(options) {
1502
+ this.text = options.text;
1503
+ this.toolCalls = options.toolCalls;
1504
+ this.toolResults = options.toolResults;
1505
+ this.finishReason = options.finishReason;
1506
+ this.usage = options.usage;
1507
+ this.warnings = options.warnings;
1508
+ }
1509
+ };
1510
+
1511
+ // core/generate-text/stream-text.ts
1512
+ var import_zod_to_json_schema4 = __toESM(require("zod-to-json-schema"));
1513
+
1514
+ // shared/generate-id.ts
55
1515
  var import_non_secure = require("nanoid/non-secure");
1516
+ var generateId = (0, import_non_secure.customAlphabet)(
1517
+ "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz",
1518
+ 7
1519
+ );
1520
+
1521
+ // core/generate-text/run-tools-transformation.ts
1522
+ function runToolsTransformation({
1523
+ tools,
1524
+ generatorStream
1525
+ }) {
1526
+ let canClose = false;
1527
+ const outstandingToolCalls = /* @__PURE__ */ new Set();
1528
+ let toolResultsStreamController = null;
1529
+ const toolResultsStream = new ReadableStream({
1530
+ start(controller) {
1531
+ toolResultsStreamController = controller;
1532
+ }
1533
+ });
1534
+ const forwardStream = new TransformStream({
1535
+ transform(chunk, controller) {
1536
+ const chunkType = chunk.type;
1537
+ switch (chunkType) {
1538
+ case "text-delta":
1539
+ case "error": {
1540
+ controller.enqueue(chunk);
1541
+ break;
1542
+ }
1543
+ case "tool-call": {
1544
+ const toolName = chunk.toolName;
1545
+ if (tools == null) {
1546
+ toolResultsStreamController.enqueue({
1547
+ type: "error",
1548
+ error: new NoSuchToolError({
1549
+ message: `Tool ${chunk.toolName} not found (no tools provided).`,
1550
+ toolName: chunk.toolName
1551
+ })
1552
+ });
1553
+ break;
1554
+ }
1555
+ const tool2 = tools[toolName];
1556
+ if (tool2 == null) {
1557
+ toolResultsStreamController.enqueue({
1558
+ type: "error",
1559
+ error: new NoSuchToolError({
1560
+ message: `Tool ${chunk.toolName} not found.`,
1561
+ toolName: chunk.toolName
1562
+ })
1563
+ });
1564
+ break;
1565
+ }
1566
+ try {
1567
+ const toolCall = parseToolCall({
1568
+ toolCall: chunk,
1569
+ tools
1570
+ });
1571
+ controller.enqueue({
1572
+ type: "tool-call",
1573
+ ...toolCall
1574
+ });
1575
+ if (tool2.execute != null) {
1576
+ const toolExecutionId = generateId();
1577
+ outstandingToolCalls.add(toolExecutionId);
1578
+ tool2.execute(toolCall.args).then(
1579
+ (result) => {
1580
+ toolResultsStreamController.enqueue({
1581
+ type: "tool-result",
1582
+ ...toolCall,
1583
+ result
1584
+ });
1585
+ outstandingToolCalls.delete(toolExecutionId);
1586
+ if (canClose && outstandingToolCalls.size === 0) {
1587
+ toolResultsStreamController.close();
1588
+ }
1589
+ },
1590
+ (error) => {
1591
+ toolResultsStreamController.enqueue({
1592
+ type: "error",
1593
+ error
1594
+ });
1595
+ outstandingToolCalls.delete(toolExecutionId);
1596
+ if (canClose && outstandingToolCalls.size === 0) {
1597
+ toolResultsStreamController.close();
1598
+ }
1599
+ }
1600
+ );
1601
+ }
1602
+ } catch (error) {
1603
+ toolResultsStreamController.enqueue({
1604
+ type: "error",
1605
+ error
1606
+ });
1607
+ }
1608
+ break;
1609
+ }
1610
+ case "finish": {
1611
+ controller.enqueue({
1612
+ type: "finish",
1613
+ finishReason: chunk.finishReason,
1614
+ usage: {
1615
+ promptTokens: chunk.usage.promptTokens,
1616
+ completionTokens: chunk.usage.completionTokens,
1617
+ totalTokens: chunk.usage.promptTokens + chunk.usage.completionTokens
1618
+ }
1619
+ });
1620
+ break;
1621
+ }
1622
+ case "tool-call-delta": {
1623
+ break;
1624
+ }
1625
+ default: {
1626
+ const _exhaustiveCheck = chunkType;
1627
+ throw new Error(`Unhandled chunk type: ${_exhaustiveCheck}`);
1628
+ }
1629
+ }
1630
+ },
1631
+ flush() {
1632
+ canClose = true;
1633
+ if (outstandingToolCalls.size === 0) {
1634
+ toolResultsStreamController.close();
1635
+ }
1636
+ }
1637
+ });
1638
+ return new ReadableStream({
1639
+ async start(controller) {
1640
+ generatorStream.pipeThrough(forwardStream).pipeTo(
1641
+ new WritableStream({
1642
+ write(chunk) {
1643
+ controller.enqueue(chunk);
1644
+ },
1645
+ close() {
1646
+ }
1647
+ })
1648
+ );
1649
+ toolResultsStream.pipeTo(
1650
+ new WritableStream({
1651
+ write(chunk) {
1652
+ controller.enqueue(chunk);
1653
+ },
1654
+ close() {
1655
+ controller.close();
1656
+ }
1657
+ })
1658
+ );
1659
+ }
1660
+ });
1661
+ }
1662
+
1663
+ // core/generate-text/stream-text.ts
1664
+ async function experimental_streamText({
1665
+ model,
1666
+ tools,
1667
+ system,
1668
+ prompt,
1669
+ messages,
1670
+ maxRetries,
1671
+ abortSignal,
1672
+ ...settings
1673
+ }) {
1674
+ const retry = retryWithExponentialBackoff({ maxRetries });
1675
+ const { stream, warnings } = await retry(
1676
+ () => model.doStream({
1677
+ mode: {
1678
+ type: "regular",
1679
+ tools: tools == null ? void 0 : Object.entries(tools).map(([name, tool2]) => ({
1680
+ type: "function",
1681
+ name,
1682
+ description: tool2.description,
1683
+ parameters: (0, import_zod_to_json_schema4.default)(tool2.parameters)
1684
+ }))
1685
+ },
1686
+ ...prepareCallSettings(settings),
1687
+ inputFormat: getInputFormat({ prompt, messages }),
1688
+ prompt: convertToLanguageModelPrompt({
1689
+ system,
1690
+ prompt,
1691
+ messages
1692
+ }),
1693
+ abortSignal
1694
+ })
1695
+ );
1696
+ return new StreamTextResult({
1697
+ stream: runToolsTransformation({
1698
+ tools,
1699
+ generatorStream: stream
1700
+ }),
1701
+ warnings
1702
+ });
1703
+ }
1704
+ var StreamTextResult = class {
1705
+ constructor({
1706
+ stream,
1707
+ warnings
1708
+ }) {
1709
+ this.originalStream = stream;
1710
+ this.warnings = warnings;
1711
+ }
1712
+ get textStream() {
1713
+ return createAsyncIterableStream(this.originalStream, {
1714
+ transform(chunk, controller) {
1715
+ if (chunk.type === "text-delta") {
1716
+ if (chunk.textDelta.length > 0) {
1717
+ controller.enqueue(chunk.textDelta);
1718
+ }
1719
+ } else if (chunk.type === "error") {
1720
+ throw chunk.error;
1721
+ }
1722
+ }
1723
+ });
1724
+ }
1725
+ get fullStream() {
1726
+ return createAsyncIterableStream(this.originalStream, {
1727
+ transform(chunk, controller) {
1728
+ if (chunk.type === "text-delta") {
1729
+ if (chunk.textDelta.length > 0) {
1730
+ controller.enqueue(chunk);
1731
+ }
1732
+ } else {
1733
+ controller.enqueue(chunk);
1734
+ }
1735
+ }
1736
+ });
1737
+ }
1738
+ toAIStream(callbacks) {
1739
+ return readableFromAsyncIterable(this.textStream).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
1740
+ createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
1741
+ );
1742
+ }
1743
+ };
1744
+
1745
+ // core/tool/tool.ts
1746
+ function tool(tool2) {
1747
+ return tool2;
1748
+ }
56
1749
 
57
1750
  // shared/stream-parts.ts
58
1751
  var textStreamPart = {
@@ -235,10 +1928,6 @@ function formatStreamPart(type, value) {
235
1928
  }
236
1929
 
237
1930
  // shared/utils.ts
238
- var nanoid = (0, import_non_secure.customAlphabet)(
239
- "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz",
240
- 7
241
- );
242
1931
  function createChunkDecoder(complex) {
243
1932
  const decoder = new TextDecoder();
244
1933
  if (!complex) {
@@ -638,6 +2327,12 @@ async function* asDeltaIterable(response, extractTextDeltaFromChunk) {
638
2327
  }
639
2328
  }
640
2329
  }
2330
+ function AWSBedrockAnthropicMessagesStream(response, callbacks) {
2331
+ return AWSBedrockStream(response, callbacks, (chunk) => {
2332
+ var _a;
2333
+ return (_a = chunk.delta) == null ? void 0 : _a.text;
2334
+ });
2335
+ }
641
2336
  function AWSBedrockAnthropicStream(response, callbacks) {
642
2337
  return AWSBedrockStream(response, callbacks, (chunk) => chunk.completion);
643
2338
  }
@@ -1111,13 +2806,13 @@ function createFunctionCallTransformer(callbacks) {
1111
2806
  const toolCalls = {
1112
2807
  tools: []
1113
2808
  };
1114
- for (const tool of payload.tool_calls) {
2809
+ for (const tool2 of payload.tool_calls) {
1115
2810
  toolCalls.tools.push({
1116
- id: tool.id,
2811
+ id: tool2.id,
1117
2812
  type: "function",
1118
2813
  func: {
1119
- name: tool.function.name,
1120
- arguments: JSON.parse(tool.function.arguments)
2814
+ name: tool2.function.name,
2815
+ arguments: JSON.parse(tool2.function.arguments)
1121
2816
  }
1122
2817
  });
1123
2818
  }
@@ -1288,7 +2983,7 @@ async function parseComplexResponse({
1288
2983
  abortControllerRef,
1289
2984
  update,
1290
2985
  onFinish,
1291
- generateId = nanoid,
2986
+ generateId: generateId2 = generateId,
1292
2987
  getCurrentDate = () => /* @__PURE__ */ new Date()
1293
2988
  }) {
1294
2989
  const createdAt = getCurrentDate();
@@ -1307,7 +3002,7 @@ async function parseComplexResponse({
1307
3002
  };
1308
3003
  } else {
1309
3004
  prefixMap["text"] = {
1310
- id: generateId(),
3005
+ id: generateId2(),
1311
3006
  role: "assistant",
1312
3007
  content: value,
1313
3008
  createdAt
@@ -1317,7 +3012,7 @@ async function parseComplexResponse({
1317
3012
  let functionCallMessage = null;
1318
3013
  if (type === "function_call") {
1319
3014
  prefixMap["function_call"] = {
1320
- id: generateId(),
3015
+ id: generateId2(),
1321
3016
  role: "assistant",
1322
3017
  content: "",
1323
3018
  function_call: value.function_call,
@@ -1329,7 +3024,7 @@ async function parseComplexResponse({
1329
3024
  let toolCallMessage = null;
1330
3025
  if (type === "tool_calls") {
1331
3026
  prefixMap["tool_calls"] = {
1332
- id: generateId(),
3027
+ id: generateId2(),
1333
3028
  role: "assistant",
1334
3029
  content: "",
1335
3030
  tool_calls: value.tool_calls,
@@ -1419,7 +3114,7 @@ var experimental_StreamingReactResponse = class {
1419
3114
  });
1420
3115
  lastPayload = payload;
1421
3116
  },
1422
- generateId: (_a = options.generateId) != null ? _a : nanoid,
3117
+ generateId: (_a = options.generateId) != null ? _a : generateId,
1423
3118
  onFinish: () => {
1424
3119
  if (lastPayload !== void 0) {
1425
3120
  resolveFunc({
@@ -1502,6 +3197,7 @@ function streamToResponse(res, response, init) {
1502
3197
  // Annotate the CommonJS export names for ESM import in node:
1503
3198
  0 && (module.exports = {
1504
3199
  AIStream,
3200
+ AWSBedrockAnthropicMessagesStream,
1505
3201
  AWSBedrockAnthropicStream,
1506
3202
  AWSBedrockCohereStream,
1507
3203
  AWSBedrockLlama2Stream,
@@ -1509,6 +3205,8 @@ function streamToResponse(res, response, init) {
1509
3205
  AnthropicStream,
1510
3206
  COMPLEX_HEADER,
1511
3207
  CohereStream,
3208
+ GenerateObjectResult,
3209
+ GenerateTextResult,
1512
3210
  GoogleGenerativeAIStream,
1513
3211
  HuggingFaceStream,
1514
3212
  InkeepStream,
@@ -1516,7 +3214,11 @@ function streamToResponse(res, response, init) {
1516
3214
  MistralStream,
1517
3215
  OpenAIStream,
1518
3216
  ReplicateStream,
3217
+ StreamObjectResult,
3218
+ StreamTextResult,
1519
3219
  StreamingTextResponse,
3220
+ convertDataContentToBase64String,
3221
+ convertDataContentToUint8Array,
1520
3222
  createCallbacksTransformer,
1521
3223
  createChunkDecoder,
1522
3224
  createEventStreamTransformer,
@@ -1524,10 +3226,16 @@ function streamToResponse(res, response, init) {
1524
3226
  experimental_AssistantResponse,
1525
3227
  experimental_StreamData,
1526
3228
  experimental_StreamingReactResponse,
3229
+ experimental_generateObject,
3230
+ experimental_generateText,
3231
+ experimental_streamObject,
3232
+ experimental_streamText,
3233
+ generateId,
1527
3234
  isStreamStringEqualToType,
1528
3235
  nanoid,
1529
3236
  readableFromAsyncIterable,
1530
3237
  streamToResponse,
3238
+ tool,
1531
3239
  trimStartOfStreamHelper
1532
3240
  });
1533
3241
  //# sourceMappingURL=index.js.map