ai 3.3.5 → 3.3.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -1,13 +1,11 @@
1
1
  "use strict";
2
- var __create = Object.create;
3
2
  var __defProp = Object.defineProperty;
4
3
  var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
5
4
  var __getOwnPropNames = Object.getOwnPropertyNames;
6
- var __getProtoOf = Object.getPrototypeOf;
7
5
  var __hasOwnProp = Object.prototype.hasOwnProperty;
8
6
  var __export = (target, all) => {
9
- for (var name9 in all)
10
- __defProp(target, name9, { get: all[name9], enumerable: true });
7
+ for (var name12 in all)
8
+ __defProp(target, name12, { get: all[name12], enumerable: true });
11
9
  };
12
10
  var __copyProps = (to, from, except, desc) => {
13
11
  if (from && typeof from === "object" || typeof from === "function") {
@@ -17,22 +15,14 @@ var __copyProps = (to, from, except, desc) => {
17
15
  }
18
16
  return to;
19
17
  };
20
- var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
21
- // If the importer is in node compatibility mode or this is not an ESM
22
- // file that has been converted to a CommonJS file using a Babel-
23
- // compatible transform (i.e. "__esModule" has not been set), then set
24
- // "default" to the CommonJS "module.exports" for node compatibility.
25
- isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
26
- mod
27
- ));
28
18
  var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
29
19
 
30
20
  // streams/index.ts
31
21
  var streams_exports = {};
32
22
  __export(streams_exports, {
33
- AISDKError: () => import_provider11.AISDKError,
23
+ AISDKError: () => import_provider14.AISDKError,
34
24
  AIStream: () => AIStream,
35
- APICallError: () => import_provider11.APICallError,
25
+ APICallError: () => import_provider14.APICallError,
36
26
  AWSBedrockAnthropicMessagesStream: () => AWSBedrockAnthropicMessagesStream,
37
27
  AWSBedrockAnthropicStream: () => AWSBedrockAnthropicStream,
38
28
  AWSBedrockCohereStream: () => AWSBedrockCohereStream,
@@ -42,7 +32,7 @@ __export(streams_exports, {
42
32
  AssistantResponse: () => AssistantResponse,
43
33
  CohereStream: () => CohereStream,
44
34
  DownloadError: () => DownloadError,
45
- EmptyResponseBodyError: () => import_provider11.EmptyResponseBodyError,
35
+ EmptyResponseBodyError: () => import_provider14.EmptyResponseBodyError,
46
36
  GoogleGenerativeAIStream: () => GoogleGenerativeAIStream,
47
37
  HuggingFaceStream: () => HuggingFaceStream,
48
38
  InkeepStream: () => InkeepStream,
@@ -50,13 +40,13 @@ __export(streams_exports, {
50
40
  InvalidDataContentError: () => InvalidDataContentError,
51
41
  InvalidMessageRoleError: () => InvalidMessageRoleError,
52
42
  InvalidModelIdError: () => InvalidModelIdError,
53
- InvalidPromptError: () => import_provider11.InvalidPromptError,
54
- InvalidResponseDataError: () => import_provider11.InvalidResponseDataError,
43
+ InvalidPromptError: () => import_provider14.InvalidPromptError,
44
+ InvalidResponseDataError: () => import_provider14.InvalidResponseDataError,
55
45
  InvalidToolArgumentsError: () => InvalidToolArgumentsError,
56
- JSONParseError: () => import_provider11.JSONParseError,
46
+ JSONParseError: () => import_provider14.JSONParseError,
57
47
  LangChainAdapter: () => langchain_adapter_exports,
58
48
  LangChainStream: () => LangChainStream,
59
- LoadAPIKeyError: () => import_provider11.LoadAPIKeyError,
49
+ LoadAPIKeyError: () => import_provider14.LoadAPIKeyError,
60
50
  MistralStream: () => MistralStream,
61
51
  NoObjectGeneratedError: () => NoObjectGeneratedError,
62
52
  NoSuchModelError: () => NoSuchModelError,
@@ -67,8 +57,8 @@ __export(streams_exports, {
67
57
  RetryError: () => RetryError,
68
58
  StreamData: () => StreamData2,
69
59
  StreamingTextResponse: () => StreamingTextResponse,
70
- TypeValidationError: () => import_provider11.TypeValidationError,
71
- UnsupportedFunctionalityError: () => import_provider11.UnsupportedFunctionalityError,
60
+ TypeValidationError: () => import_provider14.TypeValidationError,
61
+ UnsupportedFunctionalityError: () => import_provider14.UnsupportedFunctionalityError,
72
62
  convertDataContentToBase64String: () => convertDataContentToBase64String,
73
63
  convertDataContentToUint8Array: () => convertDataContentToUint8Array,
74
64
  convertToCoreMessages: () => convertToCoreMessages,
@@ -87,15 +77,15 @@ __export(streams_exports, {
87
77
  experimental_generateText: () => experimental_generateText,
88
78
  experimental_streamObject: () => experimental_streamObject,
89
79
  experimental_streamText: () => experimental_streamText,
90
- formatStreamPart: () => import_ui_utils6.formatStreamPart,
80
+ formatStreamPart: () => import_ui_utils10.formatStreamPart,
91
81
  generateId: () => generateId2,
92
82
  generateObject: () => generateObject,
93
83
  generateText: () => generateText,
94
- jsonSchema: () => jsonSchema,
84
+ jsonSchema: () => import_ui_utils6.jsonSchema,
95
85
  nanoid: () => nanoid,
96
- parseComplexResponse: () => import_ui_utils6.parseComplexResponse,
97
- parseStreamPart: () => import_ui_utils6.parseStreamPart,
98
- readDataStream: () => import_ui_utils6.readDataStream,
86
+ parseComplexResponse: () => import_ui_utils10.parseComplexResponse,
87
+ parseStreamPart: () => import_ui_utils10.parseStreamPart,
88
+ readDataStream: () => import_ui_utils10.readDataStream,
99
89
  readableFromAsyncIterable: () => readableFromAsyncIterable,
100
90
  streamObject: () => streamObject,
101
91
  streamText: () => streamText,
@@ -104,8 +94,11 @@ __export(streams_exports, {
104
94
  trimStartOfStreamHelper: () => trimStartOfStreamHelper
105
95
  });
106
96
  module.exports = __toCommonJS(streams_exports);
97
+ var import_ui_utils10 = require("@ai-sdk/ui-utils");
98
+ var import_provider_utils7 = require("@ai-sdk/provider-utils");
99
+
100
+ // core/index.ts
107
101
  var import_ui_utils6 = require("@ai-sdk/ui-utils");
108
- var import_provider_utils8 = require("@ai-sdk/provider-utils");
109
102
 
110
103
  // util/retry-with-exponential-backoff.ts
111
104
  var import_provider2 = require("@ai-sdk/provider");
@@ -113,7 +106,7 @@ var import_provider_utils = require("@ai-sdk/provider-utils");
113
106
 
114
107
  // util/delay.ts
115
108
  async function delay(delayInMs) {
116
- return new Promise((resolve) => setTimeout(resolve, delayInMs));
109
+ return delayInMs === void 0 ? Promise.resolve() : new Promise((resolve) => setTimeout(resolve, delayInMs));
117
110
  }
118
111
 
119
112
  // util/retry-error.ts
@@ -228,7 +221,7 @@ function getBaseTelemetryAttributes({
228
221
  telemetry,
229
222
  headers
230
223
  }) {
231
- var _a9;
224
+ var _a12;
232
225
  return {
233
226
  "ai.model.provider": model.provider,
234
227
  "ai.model.id": model.modelId,
@@ -241,7 +234,7 @@ function getBaseTelemetryAttributes({
241
234
  "resource.name": telemetry == null ? void 0 : telemetry.functionId,
242
235
  "ai.telemetry.functionId": telemetry == null ? void 0 : telemetry.functionId,
243
236
  // add metadata as attributes:
244
- ...Object.entries((_a9 = telemetry == null ? void 0 : telemetry.metadata) != null ? _a9 : {}).reduce(
237
+ ...Object.entries((_a12 = telemetry == null ? void 0 : telemetry.metadata) != null ? _a12 : {}).reduce(
245
238
  (attributes, [key, value]) => {
246
239
  attributes[`ai.telemetry.metadata.${key}`] = value;
247
240
  return attributes;
@@ -266,7 +259,7 @@ var noopTracer = {
266
259
  startSpan() {
267
260
  return noopSpan;
268
261
  },
269
- startActiveSpan(name9, arg1, arg2, arg3) {
262
+ startActiveSpan(name12, arg1, arg2, arg3) {
270
263
  if (typeof arg1 === "function") {
271
264
  return arg1(noopSpan);
272
265
  }
@@ -334,13 +327,13 @@ function getTracer({ isEnabled }) {
334
327
  // core/telemetry/record-span.ts
335
328
  var import_api2 = require("@opentelemetry/api");
336
329
  function recordSpan({
337
- name: name9,
330
+ name: name12,
338
331
  tracer,
339
332
  attributes,
340
333
  fn,
341
334
  endWhenDone = true
342
335
  }) {
343
- return tracer.startActiveSpan(name9, { attributes }, async (span) => {
336
+ return tracer.startActiveSpan(name12, { attributes }, async (span) => {
344
337
  try {
345
338
  const result = await fn(span);
346
339
  if (endWhenDone) {
@@ -406,14 +399,14 @@ async function embed({
406
399
  headers,
407
400
  experimental_telemetry: telemetry
408
401
  }) {
409
- var _a9;
402
+ var _a12;
410
403
  const baseTelemetryAttributes = getBaseTelemetryAttributes({
411
404
  model,
412
405
  telemetry,
413
406
  headers,
414
407
  settings: { maxRetries }
415
408
  });
416
- const tracer = getTracer({ isEnabled: (_a9 = telemetry == null ? void 0 : telemetry.isEnabled) != null ? _a9 : false });
409
+ const tracer = getTracer({ isEnabled: (_a12 = telemetry == null ? void 0 : telemetry.isEnabled) != null ? _a12 : false });
417
410
  return recordSpan({
418
411
  name: "ai.embed",
419
412
  attributes: selectTelemetryAttributes({
@@ -446,14 +439,14 @@ async function embed({
446
439
  }),
447
440
  tracer,
448
441
  fn: async (doEmbedSpan) => {
449
- var _a10;
442
+ var _a13;
450
443
  const modelResponse = await model.doEmbed({
451
444
  values: [value],
452
445
  abortSignal,
453
446
  headers
454
447
  });
455
448
  const embedding2 = modelResponse.embeddings[0];
456
- const usage2 = (_a10 = modelResponse.usage) != null ? _a10 : { tokens: NaN };
449
+ const usage2 = (_a13 = modelResponse.usage) != null ? _a13 : { tokens: NaN };
457
450
  doEmbedSpan.setAttributes(
458
451
  selectTelemetryAttributes({
459
452
  telemetry,
@@ -519,14 +512,14 @@ async function embedMany({
519
512
  headers,
520
513
  experimental_telemetry: telemetry
521
514
  }) {
522
- var _a9;
515
+ var _a12;
523
516
  const baseTelemetryAttributes = getBaseTelemetryAttributes({
524
517
  model,
525
518
  telemetry,
526
519
  headers,
527
520
  settings: { maxRetries }
528
521
  });
529
- const tracer = getTracer({ isEnabled: (_a9 = telemetry == null ? void 0 : telemetry.isEnabled) != null ? _a9 : false });
522
+ const tracer = getTracer({ isEnabled: (_a12 = telemetry == null ? void 0 : telemetry.isEnabled) != null ? _a12 : false });
530
523
  return recordSpan({
531
524
  name: "ai.embedMany",
532
525
  attributes: selectTelemetryAttributes({
@@ -564,14 +557,14 @@ async function embedMany({
564
557
  }),
565
558
  tracer,
566
559
  fn: async (doEmbedSpan) => {
567
- var _a10;
560
+ var _a13;
568
561
  const modelResponse = await model.doEmbed({
569
562
  values,
570
563
  abortSignal,
571
564
  headers
572
565
  });
573
566
  const embeddings3 = modelResponse.embeddings;
574
- const usage2 = (_a10 = modelResponse.usage) != null ? _a10 : { tokens: NaN };
567
+ const usage2 = (_a13 = modelResponse.usage) != null ? _a13 : { tokens: NaN };
575
568
  doEmbedSpan.setAttributes(
576
569
  selectTelemetryAttributes({
577
570
  telemetry,
@@ -623,14 +616,14 @@ async function embedMany({
623
616
  }),
624
617
  tracer,
625
618
  fn: async (doEmbedSpan) => {
626
- var _a10;
619
+ var _a13;
627
620
  const modelResponse = await model.doEmbed({
628
621
  values: chunk,
629
622
  abortSignal,
630
623
  headers
631
624
  });
632
625
  const embeddings2 = modelResponse.embeddings;
633
- const usage2 = (_a10 = modelResponse.usage) != null ? _a10 : { tokens: NaN };
626
+ const usage2 = (_a13 = modelResponse.usage) != null ? _a13 : { tokens: NaN };
634
627
  doEmbedSpan.setAttributes(
635
628
  selectTelemetryAttributes({
636
629
  telemetry,
@@ -677,7 +670,8 @@ var DefaultEmbedManyResult = class {
677
670
  };
678
671
 
679
672
  // core/generate-object/generate-object.ts
680
- var import_provider_utils5 = require("@ai-sdk/provider-utils");
673
+ var import_provider_utils4 = require("@ai-sdk/provider-utils");
674
+ var import_ui_utils = require("@ai-sdk/ui-utils");
681
675
 
682
676
  // core/prompt/convert-to-language-model-prompt.ts
683
677
  var import_provider_utils3 = require("@ai-sdk/provider-utils");
@@ -732,7 +726,7 @@ async function download({
732
726
  url,
733
727
  fetchImplementation = fetch
734
728
  }) {
735
- var _a9;
729
+ var _a12;
736
730
  const urlText = url.toString();
737
731
  try {
738
732
  const response = await fetchImplementation(urlText);
@@ -745,7 +739,7 @@ async function download({
745
739
  }
746
740
  return {
747
741
  data: new Uint8Array(await response.arrayBuffer()),
748
- mimeType: (_a9 = response.headers.get("content-type")) != null ? _a9 : void 0
742
+ mimeType: (_a12 = response.headers.get("content-type")) != null ? _a12 : void 0
749
743
  };
750
744
  } catch (error) {
751
745
  if (DownloadError.isInstance(error)) {
@@ -942,7 +936,7 @@ function convertToLanguageModelMessage(message, downloadedImages) {
942
936
  role: "user",
943
937
  content: message.content.map(
944
938
  (part) => {
945
- var _a9, _b, _c;
939
+ var _a12, _b, _c;
946
940
  switch (part.type) {
947
941
  case "text": {
948
942
  return part;
@@ -960,7 +954,7 @@ function convertToLanguageModelMessage(message, downloadedImages) {
960
954
  return {
961
955
  type: "image",
962
956
  image: downloadedImage.data,
963
- mimeType: (_a9 = part.mimeType) != null ? _a9 : downloadedImage.mimeType
957
+ mimeType: (_a12 = part.mimeType) != null ? _a12 : downloadedImage.mimeType
964
958
  };
965
959
  }
966
960
  }
@@ -1264,8 +1258,8 @@ function prepareResponseHeaders(init, {
1264
1258
  contentType,
1265
1259
  dataStreamVersion
1266
1260
  }) {
1267
- var _a9;
1268
- const headers = new Headers((_a9 = init == null ? void 0 : init.headers) != null ? _a9 : {});
1261
+ var _a12;
1262
+ const headers = new Headers((_a12 = init == null ? void 0 : init.headers) != null ? _a12 : {});
1269
1263
  if (!headers.has("Content-Type")) {
1270
1264
  headers.set("Content-Type", contentType);
1271
1265
  }
@@ -1275,41 +1269,6 @@ function prepareResponseHeaders(init, {
1275
1269
  return headers;
1276
1270
  }
1277
1271
 
1278
- // core/util/schema.ts
1279
- var import_provider_utils4 = require("@ai-sdk/provider-utils");
1280
- var import_zod_to_json_schema = __toESM(require("zod-to-json-schema"));
1281
- var schemaSymbol = Symbol.for("vercel.ai.schema");
1282
- function jsonSchema(jsonSchema2, {
1283
- validate
1284
- } = {}) {
1285
- return {
1286
- [schemaSymbol]: true,
1287
- _type: void 0,
1288
- // should never be used directly
1289
- [import_provider_utils4.validatorSymbol]: true,
1290
- jsonSchema: jsonSchema2,
1291
- validate
1292
- };
1293
- }
1294
- function isSchema(value) {
1295
- return typeof value === "object" && value !== null && schemaSymbol in value && value[schemaSymbol] === true && "jsonSchema" in value && "validate" in value;
1296
- }
1297
- function asSchema(schema) {
1298
- return isSchema(schema) ? schema : zodSchema(schema);
1299
- }
1300
- function zodSchema(zodSchema2) {
1301
- return jsonSchema(
1302
- // we assume that zodToJsonSchema will return a valid JSONSchema7:
1303
- (0, import_zod_to_json_schema.default)(zodSchema2),
1304
- {
1305
- validate: (value) => {
1306
- const result = zodSchema2.safeParse(value);
1307
- return result.success ? { success: true, value: result.data } : { success: false, error: result.error };
1308
- }
1309
- }
1310
- );
1311
- }
1312
-
1313
1272
  // core/generate-object/inject-json-schema-into-system.ts
1314
1273
  var DEFAULT_SCHEMA_PREFIX = "JSON schema:";
1315
1274
  var DEFAULT_SCHEMA_SUFFIX = "You MUST answer with a JSON object that matches the JSON schema above.";
@@ -1380,15 +1339,15 @@ async function generateObject({
1380
1339
  experimental_telemetry: telemetry,
1381
1340
  ...settings
1382
1341
  }) {
1383
- var _a9;
1342
+ var _a12;
1384
1343
  const baseTelemetryAttributes = getBaseTelemetryAttributes({
1385
1344
  model,
1386
1345
  telemetry,
1387
1346
  headers,
1388
1347
  settings: { ...settings, maxRetries }
1389
1348
  });
1390
- const schema = asSchema(inputSchema);
1391
- const tracer = getTracer({ isEnabled: (_a9 = telemetry == null ? void 0 : telemetry.isEnabled) != null ? _a9 : false });
1349
+ const schema = (0, import_ui_utils.asSchema)(inputSchema);
1350
+ const tracer = getTracer({ isEnabled: (_a12 = telemetry == null ? void 0 : telemetry.isEnabled) != null ? _a12 : false });
1392
1351
  return recordSpan({
1393
1352
  name: "ai.generateObject",
1394
1353
  attributes: selectTelemetryAttributes({
@@ -1548,7 +1507,7 @@ async function generateObject({
1548
1507
  }),
1549
1508
  tracer,
1550
1509
  fn: async (span2) => {
1551
- var _a10, _b;
1510
+ var _a13, _b;
1552
1511
  const result2 = await model.doGenerate({
1553
1512
  mode: {
1554
1513
  type: "object-tool",
@@ -1565,7 +1524,7 @@ async function generateObject({
1565
1524
  abortSignal,
1566
1525
  headers
1567
1526
  });
1568
- const objectText = (_b = (_a10 = result2.toolCalls) == null ? void 0 : _a10[0]) == null ? void 0 : _b.args;
1527
+ const objectText = (_b = (_a13 = result2.toolCalls) == null ? void 0 : _a13[0]) == null ? void 0 : _b.args;
1569
1528
  if (objectText === void 0) {
1570
1529
  throw new NoObjectGeneratedError();
1571
1530
  }
@@ -1606,7 +1565,7 @@ async function generateObject({
1606
1565
  throw new Error(`Unsupported mode: ${_exhaustiveCheck}`);
1607
1566
  }
1608
1567
  }
1609
- const parseResult = (0, import_provider_utils5.safeParseJSON)({ text: result, schema });
1568
+ const parseResult = (0, import_provider_utils4.safeParseJSON)({ text: result, schema });
1610
1569
  if (!parseResult.success) {
1611
1570
  throw parseResult.error;
1612
1571
  }
@@ -1644,9 +1603,9 @@ var DefaultGenerateObjectResult = class {
1644
1603
  this.logprobs = options.logprobs;
1645
1604
  }
1646
1605
  toJsonResponse(init) {
1647
- var _a9;
1606
+ var _a12;
1648
1607
  return new Response(JSON.stringify(this.object), {
1649
- status: (_a9 = init == null ? void 0 : init.status) != null ? _a9 : 200,
1608
+ status: (_a12 = init == null ? void 0 : init.status) != null ? _a12 : 200,
1650
1609
  headers: prepareResponseHeaders(init, {
1651
1610
  contentType: "application/json; charset=utf-8"
1652
1611
  })
@@ -1656,8 +1615,8 @@ var DefaultGenerateObjectResult = class {
1656
1615
  var experimental_generateObject = generateObject;
1657
1616
 
1658
1617
  // core/generate-object/stream-object.ts
1659
- var import_provider_utils6 = require("@ai-sdk/provider-utils");
1660
- var import_ui_utils = require("@ai-sdk/ui-utils");
1618
+ var import_provider_utils5 = require("@ai-sdk/provider-utils");
1619
+ var import_ui_utils2 = require("@ai-sdk/ui-utils");
1661
1620
 
1662
1621
  // util/create-resolvable-promise.ts
1663
1622
  function createResolvablePromise() {
@@ -1697,17 +1656,17 @@ var DelayedPromise = class {
1697
1656
  return this.promise;
1698
1657
  }
1699
1658
  resolve(value) {
1700
- var _a9;
1659
+ var _a12;
1701
1660
  this.status = { type: "resolved", value };
1702
1661
  if (this.promise) {
1703
- (_a9 = this._resolve) == null ? void 0 : _a9.call(this, value);
1662
+ (_a12 = this._resolve) == null ? void 0 : _a12.call(this, value);
1704
1663
  }
1705
1664
  }
1706
1665
  reject(error) {
1707
- var _a9;
1666
+ var _a12;
1708
1667
  this.status = { type: "rejected", error };
1709
1668
  if (this.promise) {
1710
- (_a9 = this._reject) == null ? void 0 : _a9.call(this, error);
1669
+ (_a12 = this._reject) == null ? void 0 : _a12.call(this, error);
1711
1670
  }
1712
1671
  }
1713
1672
  };
@@ -1746,16 +1705,16 @@ async function streamObject({
1746
1705
  onFinish,
1747
1706
  ...settings
1748
1707
  }) {
1749
- var _a9;
1708
+ var _a12;
1750
1709
  const baseTelemetryAttributes = getBaseTelemetryAttributes({
1751
1710
  model,
1752
1711
  telemetry,
1753
1712
  headers,
1754
1713
  settings: { ...settings, maxRetries }
1755
1714
  });
1756
- const tracer = getTracer({ isEnabled: (_a9 = telemetry == null ? void 0 : telemetry.isEnabled) != null ? _a9 : false });
1715
+ const tracer = getTracer({ isEnabled: (_a12 = telemetry == null ? void 0 : telemetry.isEnabled) != null ? _a12 : false });
1757
1716
  const retry = retryWithExponentialBackoff({ maxRetries });
1758
- const schema = asSchema(inputSchema);
1717
+ const schema = (0, import_ui_utils2.asSchema)(inputSchema);
1759
1718
  return recordSpan({
1760
1719
  name: "ai.streamObject",
1761
1720
  attributes: selectTelemetryAttributes({
@@ -1960,10 +1919,10 @@ var DefaultStreamObjectResult = class {
1960
1919
  if (typeof chunk === "string") {
1961
1920
  accumulatedText += chunk;
1962
1921
  delta += chunk;
1963
- const currentObject = (0, import_ui_utils.parsePartialJson)(
1922
+ const currentObject = (0, import_ui_utils2.parsePartialJson)(
1964
1923
  accumulatedText
1965
1924
  );
1966
- if (!(0, import_ui_utils.isDeepEqualData)(latestObject, currentObject)) {
1925
+ if (!(0, import_ui_utils2.isDeepEqualData)(latestObject, currentObject)) {
1967
1926
  latestObject = currentObject;
1968
1927
  controller.enqueue({
1969
1928
  type: "object",
@@ -1989,7 +1948,7 @@ var DefaultStreamObjectResult = class {
1989
1948
  usage = calculateCompletionTokenUsage(chunk.usage);
1990
1949
  controller.enqueue({ ...chunk, usage });
1991
1950
  resolveUsage(usage);
1992
- const validationResult = (0, import_provider_utils6.safeValidateTypes)({
1951
+ const validationResult = (0, import_provider_utils5.safeValidateTypes)({
1993
1952
  value: latestObject,
1994
1953
  schema
1995
1954
  });
@@ -2115,8 +2074,8 @@ var DefaultStreamObjectResult = class {
2115
2074
  });
2116
2075
  }
2117
2076
  pipeTextStreamToResponse(response, init) {
2118
- var _a9;
2119
- response.writeHead((_a9 = init == null ? void 0 : init.status) != null ? _a9 : 200, {
2077
+ var _a12;
2078
+ response.writeHead((_a12 = init == null ? void 0 : init.status) != null ? _a12 : 200, {
2120
2079
  "Content-Type": "text/plain; charset=utf-8",
2121
2080
  ...init == null ? void 0 : init.headers
2122
2081
  });
@@ -2138,9 +2097,9 @@ var DefaultStreamObjectResult = class {
2138
2097
  read();
2139
2098
  }
2140
2099
  toTextStreamResponse(init) {
2141
- var _a9;
2100
+ var _a12;
2142
2101
  return new Response(this.textStream.pipeThrough(new TextEncoderStream()), {
2143
- status: (_a9 = init == null ? void 0 : init.status) != null ? _a9 : 200,
2102
+ status: (_a12 = init == null ? void 0 : init.status) != null ? _a12 : 200,
2144
2103
  headers: prepareResponseHeaders(init, {
2145
2104
  contentType: "text/plain; charset=utf-8"
2146
2105
  })
@@ -2149,6 +2108,9 @@ var DefaultStreamObjectResult = class {
2149
2108
  };
2150
2109
  var experimental_streamObject = streamObject;
2151
2110
 
2111
+ // core/prompt/prepare-tools-and-tool-choice.ts
2112
+ var import_ui_utils3 = require("@ai-sdk/ui-utils");
2113
+
2152
2114
  // core/util/is-non-empty-object.ts
2153
2115
  function isNonEmptyObject(object) {
2154
2116
  return object != null && Object.keys(object).length > 0;
@@ -2166,18 +2128,19 @@ function prepareToolsAndToolChoice({
2166
2128
  };
2167
2129
  }
2168
2130
  return {
2169
- tools: Object.entries(tools).map(([name9, tool2]) => ({
2131
+ tools: Object.entries(tools).map(([name12, tool2]) => ({
2170
2132
  type: "function",
2171
- name: name9,
2133
+ name: name12,
2172
2134
  description: tool2.description,
2173
- parameters: asSchema(tool2.parameters).jsonSchema
2135
+ parameters: (0, import_ui_utils3.asSchema)(tool2.parameters).jsonSchema
2174
2136
  })),
2175
2137
  toolChoice: toolChoice == null ? { type: "auto" } : typeof toolChoice === "string" ? { type: toolChoice } : { type: "tool", toolName: toolChoice.toolName }
2176
2138
  };
2177
2139
  }
2178
2140
 
2179
2141
  // core/generate-text/tool-call.ts
2180
- var import_provider_utils7 = require("@ai-sdk/provider-utils");
2142
+ var import_provider_utils6 = require("@ai-sdk/provider-utils");
2143
+ var import_ui_utils4 = require("@ai-sdk/ui-utils");
2181
2144
 
2182
2145
  // errors/invalid-tool-arguments-error.ts
2183
2146
  var import_provider9 = require("@ai-sdk/provider");
@@ -2281,9 +2244,9 @@ function parseToolCall({
2281
2244
  availableTools: Object.keys(tools)
2282
2245
  });
2283
2246
  }
2284
- const parseResult = (0, import_provider_utils7.safeParseJSON)({
2247
+ const parseResult = (0, import_provider_utils6.safeParseJSON)({
2285
2248
  text: toolCall.args,
2286
- schema: asSchema(tool2.parameters)
2249
+ schema: (0, import_ui_utils4.asSchema)(tool2.parameters)
2287
2250
  });
2288
2251
  if (parseResult.success === false) {
2289
2252
  throw new InvalidToolArgumentsError({
@@ -2316,14 +2279,14 @@ async function generateText({
2316
2279
  experimental_telemetry: telemetry,
2317
2280
  ...settings
2318
2281
  }) {
2319
- var _a9;
2282
+ var _a12;
2320
2283
  const baseTelemetryAttributes = getBaseTelemetryAttributes({
2321
2284
  model,
2322
2285
  telemetry,
2323
2286
  headers,
2324
2287
  settings: { ...settings, maxRetries }
2325
2288
  });
2326
- const tracer = getTracer({ isEnabled: (_a9 = telemetry == null ? void 0 : telemetry.isEnabled) != null ? _a9 : false });
2289
+ const tracer = getTracer({ isEnabled: (_a12 = telemetry == null ? void 0 : telemetry.isEnabled) != null ? _a12 : false });
2327
2290
  return recordSpan({
2328
2291
  name: "ai.generateText",
2329
2292
  attributes: selectTelemetryAttributes({
@@ -2343,7 +2306,7 @@ async function generateText({
2343
2306
  }),
2344
2307
  tracer,
2345
2308
  fn: async (span) => {
2346
- var _a10, _b, _c, _d;
2309
+ var _a13, _b, _c, _d;
2347
2310
  const retry = retryWithExponentialBackoff({ maxRetries });
2348
2311
  const validatedPrompt = getValidatedPrompt({
2349
2312
  system,
@@ -2429,7 +2392,7 @@ async function generateText({
2429
2392
  }
2430
2393
  })
2431
2394
  );
2432
- currentToolCalls = ((_a10 = currentModelResponse.toolCalls) != null ? _a10 : []).map(
2395
+ currentToolCalls = ((_a13 = currentModelResponse.toolCalls) != null ? _a13 : []).map(
2433
2396
  (modelToolCall) => parseToolCall({ toolCall: modelToolCall, tools })
2434
2397
  );
2435
2398
  currentToolResults = tools == null ? [] : await executeTools({
@@ -2691,7 +2654,7 @@ function mergeStreams(stream1, stream2) {
2691
2654
  }
2692
2655
 
2693
2656
  // core/generate-text/run-tools-transformation.ts
2694
- var import_ui_utils2 = require("@ai-sdk/ui-utils");
2657
+ var import_ui_utils5 = require("@ai-sdk/ui-utils");
2695
2658
  function runToolsTransformation({
2696
2659
  tools,
2697
2660
  generatorStream,
@@ -2763,7 +2726,7 @@ function runToolsTransformation({
2763
2726
  });
2764
2727
  controller.enqueue(toolCall);
2765
2728
  if (tool2.execute != null) {
2766
- const toolExecutionId = (0, import_ui_utils2.generateId)();
2729
+ const toolExecutionId = (0, import_ui_utils5.generateId)();
2767
2730
  outstandingToolCalls.add(toolExecutionId);
2768
2731
  recordSpan({
2769
2732
  name: "ai.toolCall",
@@ -2890,17 +2853,18 @@ async function streamText({
2890
2853
  headers,
2891
2854
  experimental_telemetry: telemetry,
2892
2855
  experimental_toolCallStreaming: toolCallStreaming = false,
2856
+ onChunk,
2893
2857
  onFinish,
2894
2858
  ...settings
2895
2859
  }) {
2896
- var _a9;
2860
+ var _a12;
2897
2861
  const baseTelemetryAttributes = getBaseTelemetryAttributes({
2898
2862
  model,
2899
2863
  telemetry,
2900
2864
  headers,
2901
2865
  settings: { ...settings, maxRetries }
2902
2866
  });
2903
- const tracer = getTracer({ isEnabled: (_a9 = telemetry == null ? void 0 : telemetry.isEnabled) != null ? _a9 : false });
2867
+ const tracer = getTracer({ isEnabled: (_a12 = telemetry == null ? void 0 : telemetry.isEnabled) != null ? _a12 : false });
2904
2868
  return recordSpan({
2905
2869
  name: "ai.streamText",
2906
2870
  attributes: selectTelemetryAttributes({
@@ -2981,6 +2945,7 @@ async function streamText({
2981
2945
  }),
2982
2946
  warnings,
2983
2947
  rawResponse,
2948
+ onChunk,
2984
2949
  onFinish,
2985
2950
  rootSpan,
2986
2951
  doStreamSpan,
@@ -2994,6 +2959,7 @@ var DefaultStreamTextResult = class {
2994
2959
  stream,
2995
2960
  warnings,
2996
2961
  rawResponse,
2962
+ onChunk,
2997
2963
  onFinish,
2998
2964
  rootSpan,
2999
2965
  doStreamSpan,
@@ -3001,7 +2967,6 @@ var DefaultStreamTextResult = class {
3001
2967
  }) {
3002
2968
  this.warnings = warnings;
3003
2969
  this.rawResponse = rawResponse;
3004
- this.onFinish = onFinish;
3005
2970
  const { resolve: resolveUsage, promise: usagePromise } = createResolvablePromise();
3006
2971
  this.usage = usagePromise;
3007
2972
  const { resolve: resolveFinishReason, promise: finishReasonPromise } = createResolvablePromise();
@@ -3018,25 +2983,30 @@ var DefaultStreamTextResult = class {
3018
2983
  const toolCalls = [];
3019
2984
  const toolResults = [];
3020
2985
  let firstChunk = true;
3021
- const self = this;
3022
2986
  this.originalStream = stream.pipeThrough(
3023
2987
  new TransformStream({
3024
2988
  async transform(chunk, controller) {
3025
- controller.enqueue(chunk);
3026
2989
  if (firstChunk) {
3027
2990
  firstChunk = false;
3028
2991
  doStreamSpan.addEvent("ai.stream.firstChunk");
3029
2992
  }
2993
+ if (chunk.type === "text-delta" && chunk.textDelta.length === 0) {
2994
+ return;
2995
+ }
2996
+ controller.enqueue(chunk);
3030
2997
  const chunkType = chunk.type;
3031
2998
  switch (chunkType) {
3032
2999
  case "text-delta":
3033
3000
  text += chunk.textDelta;
3001
+ await (onChunk == null ? void 0 : onChunk({ chunk }));
3034
3002
  break;
3035
3003
  case "tool-call":
3036
3004
  toolCalls.push(chunk);
3005
+ await (onChunk == null ? void 0 : onChunk({ chunk }));
3037
3006
  break;
3038
3007
  case "tool-result":
3039
3008
  toolResults.push(chunk);
3009
+ await (onChunk == null ? void 0 : onChunk({ chunk }));
3040
3010
  break;
3041
3011
  case "finish":
3042
3012
  usage = chunk.usage;
@@ -3047,7 +3017,10 @@ var DefaultStreamTextResult = class {
3047
3017
  resolveToolCalls(toolCalls);
3048
3018
  break;
3049
3019
  case "tool-call-streaming-start":
3050
- case "tool-call-delta":
3020
+ case "tool-call-delta": {
3021
+ await (onChunk == null ? void 0 : onChunk({ chunk }));
3022
+ break;
3023
+ }
3051
3024
  case "error":
3052
3025
  break;
3053
3026
  default: {
@@ -3058,7 +3031,6 @@ var DefaultStreamTextResult = class {
3058
3031
  },
3059
3032
  // invoke onFinish callback and resolve toolResults promise when the stream is about to close:
3060
3033
  async flush(controller) {
3061
- var _a9;
3062
3034
  try {
3063
3035
  const finalUsage = usage != null ? usage : {
3064
3036
  promptTokens: NaN,
@@ -3097,7 +3069,7 @@ var DefaultStreamTextResult = class {
3097
3069
  })
3098
3070
  );
3099
3071
  resolveToolResults(toolResults);
3100
- await ((_a9 = self.onFinish) == null ? void 0 : _a9.call(self, {
3072
+ await (onFinish == null ? void 0 : onFinish({
3101
3073
  finishReason: finalFinishReason,
3102
3074
  usage: finalUsage,
3103
3075
  text,
@@ -3136,9 +3108,7 @@ var DefaultStreamTextResult = class {
3136
3108
  return createAsyncIterableStream(this.teeStream(), {
3137
3109
  transform(chunk, controller) {
3138
3110
  if (chunk.type === "text-delta") {
3139
- if (chunk.textDelta.length > 0) {
3140
- controller.enqueue(chunk.textDelta);
3141
- }
3111
+ controller.enqueue(chunk.textDelta);
3142
3112
  } else if (chunk.type === "error") {
3143
3113
  controller.error(chunk.error);
3144
3114
  }
@@ -3148,13 +3118,7 @@ var DefaultStreamTextResult = class {
3148
3118
  get fullStream() {
3149
3119
  return createAsyncIterableStream(this.teeStream(), {
3150
3120
  transform(chunk, controller) {
3151
- if (chunk.type === "text-delta") {
3152
- if (chunk.textDelta.length > 0) {
3153
- controller.enqueue(chunk);
3154
- }
3155
- } else {
3156
- controller.enqueue(chunk);
3157
- }
3121
+ controller.enqueue(chunk);
3158
3122
  }
3159
3123
  });
3160
3124
  }
@@ -3195,11 +3159,11 @@ var DefaultStreamTextResult = class {
3195
3159
  const chunkType = chunk.type;
3196
3160
  switch (chunkType) {
3197
3161
  case "text-delta":
3198
- controller.enqueue((0, import_ui_utils6.formatStreamPart)("text", chunk.textDelta));
3162
+ controller.enqueue((0, import_ui_utils10.formatStreamPart)("text", chunk.textDelta));
3199
3163
  break;
3200
3164
  case "tool-call-streaming-start":
3201
3165
  controller.enqueue(
3202
- (0, import_ui_utils6.formatStreamPart)("tool_call_streaming_start", {
3166
+ (0, import_ui_utils10.formatStreamPart)("tool_call_streaming_start", {
3203
3167
  toolCallId: chunk.toolCallId,
3204
3168
  toolName: chunk.toolName
3205
3169
  })
@@ -3207,7 +3171,7 @@ var DefaultStreamTextResult = class {
3207
3171
  break;
3208
3172
  case "tool-call-delta":
3209
3173
  controller.enqueue(
3210
- (0, import_ui_utils6.formatStreamPart)("tool_call_delta", {
3174
+ (0, import_ui_utils10.formatStreamPart)("tool_call_delta", {
3211
3175
  toolCallId: chunk.toolCallId,
3212
3176
  argsTextDelta: chunk.argsTextDelta
3213
3177
  })
@@ -3215,7 +3179,7 @@ var DefaultStreamTextResult = class {
3215
3179
  break;
3216
3180
  case "tool-call":
3217
3181
  controller.enqueue(
3218
- (0, import_ui_utils6.formatStreamPart)("tool_call", {
3182
+ (0, import_ui_utils10.formatStreamPart)("tool_call", {
3219
3183
  toolCallId: chunk.toolCallId,
3220
3184
  toolName: chunk.toolName,
3221
3185
  args: chunk.args
@@ -3224,7 +3188,7 @@ var DefaultStreamTextResult = class {
3224
3188
  break;
3225
3189
  case "tool-result":
3226
3190
  controller.enqueue(
3227
- (0, import_ui_utils6.formatStreamPart)("tool_result", {
3191
+ (0, import_ui_utils10.formatStreamPart)("tool_result", {
3228
3192
  toolCallId: chunk.toolCallId,
3229
3193
  result: chunk.result
3230
3194
  })
@@ -3232,12 +3196,12 @@ var DefaultStreamTextResult = class {
3232
3196
  break;
3233
3197
  case "error":
3234
3198
  controller.enqueue(
3235
- (0, import_ui_utils6.formatStreamPart)("error", getErrorMessage4(chunk.error))
3199
+ (0, import_ui_utils10.formatStreamPart)("error", getErrorMessage4(chunk.error))
3236
3200
  );
3237
3201
  break;
3238
3202
  case "finish":
3239
3203
  controller.enqueue(
3240
- (0, import_ui_utils6.formatStreamPart)("finish_message", {
3204
+ (0, import_ui_utils10.formatStreamPart)("finish_message", {
3241
3205
  finishReason: chunk.finishReason,
3242
3206
  usage: {
3243
3207
  promptTokens: chunk.usage.promptTokens,
@@ -3259,8 +3223,8 @@ var DefaultStreamTextResult = class {
3259
3223
  return this.pipeDataStreamToResponse(response, init);
3260
3224
  }
3261
3225
  pipeDataStreamToResponse(response, init) {
3262
- var _a9;
3263
- response.writeHead((_a9 = init == null ? void 0 : init.status) != null ? _a9 : 200, {
3226
+ var _a12;
3227
+ response.writeHead((_a12 = init == null ? void 0 : init.status) != null ? _a12 : 200, {
3264
3228
  "Content-Type": "text/plain; charset=utf-8",
3265
3229
  ...init == null ? void 0 : init.headers
3266
3230
  });
@@ -3282,8 +3246,8 @@ var DefaultStreamTextResult = class {
3282
3246
  read();
3283
3247
  }
3284
3248
  pipeTextStreamToResponse(response, init) {
3285
- var _a9;
3286
- response.writeHead((_a9 = init == null ? void 0 : init.status) != null ? _a9 : 200, {
3249
+ var _a12;
3250
+ response.writeHead((_a12 = init == null ? void 0 : init.status) != null ? _a12 : 200, {
3287
3251
  "Content-Type": "text/plain; charset=utf-8",
3288
3252
  ...init == null ? void 0 : init.headers
3289
3253
  });
@@ -3308,7 +3272,7 @@ var DefaultStreamTextResult = class {
3308
3272
  return this.toDataStreamResponse(options);
3309
3273
  }
3310
3274
  toDataStreamResponse(options) {
3311
- var _a9;
3275
+ var _a12;
3312
3276
  const init = options == null ? void 0 : "init" in options ? options.init : {
3313
3277
  headers: "headers" in options ? options.headers : void 0,
3314
3278
  status: "status" in options ? options.status : void 0,
@@ -3318,7 +3282,7 @@ var DefaultStreamTextResult = class {
3318
3282
  const getErrorMessage4 = options == null ? void 0 : "getErrorMessage" in options ? options.getErrorMessage : void 0;
3319
3283
  const stream = data ? mergeStreams(data.stream, this.toDataStream({ getErrorMessage: getErrorMessage4 })) : this.toDataStream({ getErrorMessage: getErrorMessage4 });
3320
3284
  return new Response(stream, {
3321
- status: (_a9 = init == null ? void 0 : init.status) != null ? _a9 : 200,
3285
+ status: (_a12 = init == null ? void 0 : init.status) != null ? _a12 : 200,
3322
3286
  statusText: init == null ? void 0 : init.statusText,
3323
3287
  headers: prepareResponseHeaders(init, {
3324
3288
  contentType: "text/plain; charset=utf-8",
@@ -3327,9 +3291,9 @@ var DefaultStreamTextResult = class {
3327
3291
  });
3328
3292
  }
3329
3293
  toTextStreamResponse(init) {
3330
- var _a9;
3294
+ var _a12;
3331
3295
  return new Response(this.textStream.pipeThrough(new TextEncoderStream()), {
3332
- status: (_a9 = init == null ? void 0 : init.status) != null ? _a9 : 200,
3296
+ status: (_a12 = init == null ? void 0 : init.status) != null ? _a12 : 200,
3333
3297
  headers: prepareResponseHeaders(init, {
3334
3298
  contentType: "text/plain; charset=utf-8"
3335
3299
  })
@@ -3340,7 +3304,7 @@ var experimental_streamText = streamText;
3340
3304
 
3341
3305
  // core/prompt/attachments-to-parts.ts
3342
3306
  function attachmentsToParts(attachments) {
3343
- var _a9, _b, _c;
3307
+ var _a12, _b, _c;
3344
3308
  const parts = [];
3345
3309
  for (const attachment of attachments) {
3346
3310
  let url;
@@ -3352,7 +3316,7 @@ function attachmentsToParts(attachments) {
3352
3316
  switch (url.protocol) {
3353
3317
  case "http:":
3354
3318
  case "https:": {
3355
- if ((_a9 = attachment.contentType) == null ? void 0 : _a9.startsWith("image/")) {
3319
+ if ((_a12 = attachment.contentType) == null ? void 0 : _a12.startsWith("image/")) {
3356
3320
  parts.push({ type: "image", image: url });
3357
3321
  }
3358
3322
  break;
@@ -3461,18 +3425,32 @@ function convertToCoreMessages(messages) {
3461
3425
  }
3462
3426
 
3463
3427
  // core/registry/invalid-model-id-error.ts
3464
- var InvalidModelIdError = class extends Error {
3428
+ var import_provider11 = require("@ai-sdk/provider");
3429
+ var name9 = "AI_InvalidModelIdError";
3430
+ var marker9 = `vercel.ai.error.${name9}`;
3431
+ var symbol9 = Symbol.for(marker9);
3432
+ var _a9;
3433
+ var InvalidModelIdError = class extends import_provider11.AISDKError {
3465
3434
  constructor({
3466
3435
  id,
3467
3436
  message = `Invalid model id: ${id}`
3468
3437
  }) {
3469
- super(message);
3470
- this.name = "AI_InvalidModelIdError";
3438
+ super({ name: name9, message });
3439
+ this[_a9] = true;
3471
3440
  this.id = id;
3472
3441
  }
3442
+ static isInstance(error) {
3443
+ return import_provider11.AISDKError.hasMarker(error, marker9);
3444
+ }
3445
+ /**
3446
+ * @deprecated use `isInstance` instead
3447
+ */
3473
3448
  static isInvalidModelIdError(error) {
3474
- return error instanceof Error && error.name === "AI_InvalidModelIdError" && typeof error.id === "string";
3449
+ return error instanceof Error && error.name === name9 && typeof error.id === "string";
3475
3450
  }
3451
+ /**
3452
+ * @deprecated Do not use this method. It will be removed in the next major version.
3453
+ */
3476
3454
  toJSON() {
3477
3455
  return {
3478
3456
  name: this.name,
@@ -3482,22 +3460,37 @@ var InvalidModelIdError = class extends Error {
3482
3460
  };
3483
3461
  }
3484
3462
  };
3463
+ _a9 = symbol9;
3485
3464
 
3486
3465
  // core/registry/no-such-model-error.ts
3487
- var NoSuchModelError = class extends Error {
3466
+ var import_provider12 = require("@ai-sdk/provider");
3467
+ var name10 = "AI_NoSuchModelError";
3468
+ var marker10 = `vercel.ai.error.${name10}`;
3469
+ var symbol10 = Symbol.for(marker10);
3470
+ var _a10;
3471
+ var NoSuchModelError = class extends import_provider12.AISDKError {
3488
3472
  constructor({
3489
3473
  modelId,
3490
3474
  modelType,
3491
3475
  message = `No such ${modelType}: ${modelId}`
3492
3476
  }) {
3493
- super(message);
3494
- this.name = "AI_NoSuchModelError";
3477
+ super({ name: name10, message });
3478
+ this[_a10] = true;
3495
3479
  this.modelId = modelId;
3496
3480
  this.modelType = modelType;
3497
3481
  }
3482
+ static isInstance(error) {
3483
+ return import_provider12.AISDKError.hasMarker(error, marker10);
3484
+ }
3485
+ /**
3486
+ * @deprecated use `isInstance` instead
3487
+ */
3498
3488
  static isNoSuchModelError(error) {
3499
- return error instanceof Error && error.name === "AI_NoSuchModelError" && typeof error.modelId === "string" && typeof error.modelType === "string";
3489
+ return error instanceof Error && error.name === name10 && typeof error.modelId === "string" && typeof error.modelType === "string";
3500
3490
  }
3491
+ /**
3492
+ * @deprecated Do not use this method. It will be removed in the next major version.
3493
+ */
3501
3494
  toJSON() {
3502
3495
  return {
3503
3496
  name: this.name,
@@ -3508,22 +3501,37 @@ var NoSuchModelError = class extends Error {
3508
3501
  };
3509
3502
  }
3510
3503
  };
3504
+ _a10 = symbol10;
3511
3505
 
3512
3506
  // core/registry/no-such-provider-error.ts
3513
- var NoSuchProviderError = class extends Error {
3507
+ var import_provider13 = require("@ai-sdk/provider");
3508
+ var name11 = "AI_NoSuchProviderError";
3509
+ var marker11 = `vercel.ai.error.${name11}`;
3510
+ var symbol11 = Symbol.for(marker11);
3511
+ var _a11;
3512
+ var NoSuchProviderError = class extends import_provider13.AISDKError {
3514
3513
  constructor({
3515
3514
  providerId,
3516
3515
  availableProviders,
3517
3516
  message = `No such provider: ${providerId} (available providers: ${availableProviders.join()})`
3518
3517
  }) {
3519
- super(message);
3520
- this.name = "AI_NoSuchProviderError";
3518
+ super({ name: name11, message });
3519
+ this[_a11] = true;
3521
3520
  this.providerId = providerId;
3522
3521
  this.availableProviders = availableProviders;
3523
3522
  }
3523
+ static isInstance(error) {
3524
+ return import_provider13.AISDKError.hasMarker(error, marker11);
3525
+ }
3526
+ /**
3527
+ * @deprecated use `isInstance` instead
3528
+ */
3524
3529
  static isNoSuchProviderError(error) {
3525
- return error instanceof Error && error.name === "AI_NoSuchProviderError" && typeof error.providerId === "string" && Array.isArray(error.availableProviders);
3530
+ return error instanceof Error && error.name === name11 && typeof error.providerId === "string" && Array.isArray(error.availableProviders);
3526
3531
  }
3532
+ /**
3533
+ * @deprecated Do not use this method. It will be removed in the next major version.
3534
+ */
3527
3535
  toJSON() {
3528
3536
  return {
3529
3537
  name: this.name,
@@ -3534,6 +3542,7 @@ var NoSuchProviderError = class extends Error {
3534
3542
  };
3535
3543
  }
3536
3544
  };
3545
+ _a11 = symbol11;
3537
3546
 
3538
3547
  // core/registry/provider-registry.ts
3539
3548
  function experimental_createProviderRegistry(providers) {
@@ -3548,7 +3557,10 @@ var DefaultProviderRegistry = class {
3548
3557
  constructor() {
3549
3558
  this.providers = {};
3550
3559
  }
3551
- registerProvider({ id, provider }) {
3560
+ registerProvider({
3561
+ id,
3562
+ provider
3563
+ }) {
3552
3564
  this.providers[id] = provider;
3553
3565
  }
3554
3566
  getProvider(id) {
@@ -3569,26 +3581,33 @@ var DefaultProviderRegistry = class {
3569
3581
  return [id.slice(0, index), id.slice(index + 1)];
3570
3582
  }
3571
3583
  languageModel(id) {
3572
- var _a9, _b;
3584
+ var _a12, _b;
3573
3585
  const [providerId, modelId] = this.splitId(id);
3574
- const model = (_b = (_a9 = this.getProvider(providerId)).languageModel) == null ? void 0 : _b.call(_a9, modelId);
3586
+ const model = (_b = (_a12 = this.getProvider(providerId)).languageModel) == null ? void 0 : _b.call(_a12, modelId);
3575
3587
  if (model == null) {
3576
- throw new NoSuchModelError({ modelId: id, modelType: "language model" });
3588
+ throw new NoSuchModelError({ modelId: id, modelType: "languageModel" });
3577
3589
  }
3578
3590
  return model;
3579
3591
  }
3580
3592
  textEmbeddingModel(id) {
3581
- var _a9, _b;
3593
+ var _a12, _b, _c;
3582
3594
  const [providerId, modelId] = this.splitId(id);
3583
- const model = (_b = (_a9 = this.getProvider(providerId)).textEmbedding) == null ? void 0 : _b.call(_a9, modelId);
3595
+ const provider = this.getProvider(providerId);
3596
+ const model = (_c = (_a12 = provider.textEmbeddingModel) == null ? void 0 : _a12.call(provider, modelId)) != null ? _c : (_b = provider.textEmbedding) == null ? void 0 : _b.call(provider, modelId);
3584
3597
  if (model == null) {
3585
3598
  throw new NoSuchModelError({
3586
3599
  modelId: id,
3587
- modelType: "text embedding model"
3600
+ modelType: "textEmbeddingModel"
3588
3601
  });
3589
3602
  }
3590
3603
  return model;
3591
3604
  }
3605
+ /**
3606
+ * @deprecated Use `textEmbeddingModel` instead.
3607
+ */
3608
+ textEmbedding(id) {
3609
+ return this.textEmbeddingModel(id);
3610
+ }
3592
3611
  };
3593
3612
 
3594
3613
  // core/tool/tool.ts
@@ -3616,7 +3635,7 @@ function magnitude(vector) {
3616
3635
  }
3617
3636
 
3618
3637
  // errors/index.ts
3619
- var import_provider11 = require("@ai-sdk/provider");
3638
+ var import_provider14 = require("@ai-sdk/provider");
3620
3639
 
3621
3640
  // streams/ai-stream.ts
3622
3641
  var import_eventsource_parser = require("eventsource-parser");
@@ -3734,15 +3753,19 @@ function readableFromAsyncIterable(iterable) {
3734
3753
  controller.enqueue(value);
3735
3754
  },
3736
3755
  async cancel(reason) {
3737
- var _a9;
3738
- await ((_a9 = it.return) == null ? void 0 : _a9.call(it, reason));
3756
+ var _a12;
3757
+ await ((_a12 = it.return) == null ? void 0 : _a12.call(it, reason));
3739
3758
  }
3740
3759
  });
3741
3760
  }
3742
3761
 
3743
3762
  // streams/stream-data.ts
3744
- var import_ui_utils3 = require("@ai-sdk/ui-utils");
3745
- var STREAM_DATA_WARNING_TIME_MS = 15 * 1e3;
3763
+ var import_ui_utils7 = require("@ai-sdk/ui-utils");
3764
+
3765
+ // util/constants.ts
3766
+ var HANGING_STREAM_WARNING_TIME_MS = 15 * 1e3;
3767
+
3768
+ // streams/stream-data.ts
3746
3769
  var StreamData2 = class {
3747
3770
  constructor() {
3748
3771
  this.encoder = new TextEncoder();
@@ -3758,7 +3781,7 @@ var StreamData2 = class {
3758
3781
  console.warn(
3759
3782
  "The data stream is hanging. Did you forget to close it with `data.close()`?"
3760
3783
  );
3761
- }, STREAM_DATA_WARNING_TIME_MS);
3784
+ }, HANGING_STREAM_WARNING_TIME_MS);
3762
3785
  }
3763
3786
  },
3764
3787
  pull: (controller) => {
@@ -3789,7 +3812,7 @@ var StreamData2 = class {
3789
3812
  throw new Error("Stream controller is not initialized.");
3790
3813
  }
3791
3814
  this.controller.enqueue(
3792
- this.encoder.encode((0, import_ui_utils3.formatStreamPart)("data", [value]))
3815
+ this.encoder.encode((0, import_ui_utils7.formatStreamPart)("data", [value]))
3793
3816
  );
3794
3817
  }
3795
3818
  appendMessageAnnotation(value) {
@@ -3800,7 +3823,7 @@ var StreamData2 = class {
3800
3823
  throw new Error("Stream controller is not initialized.");
3801
3824
  }
3802
3825
  this.controller.enqueue(
3803
- this.encoder.encode((0, import_ui_utils3.formatStreamPart)("message_annotations", [value]))
3826
+ this.encoder.encode((0, import_ui_utils7.formatStreamPart)("message_annotations", [value]))
3804
3827
  );
3805
3828
  }
3806
3829
  };
@@ -3810,7 +3833,7 @@ function createStreamDataTransformer() {
3810
3833
  return new TransformStream({
3811
3834
  transform: async (chunk, controller) => {
3812
3835
  const message = decoder.decode(chunk);
3813
- controller.enqueue(encoder.encode((0, import_ui_utils3.formatStreamPart)("text", message)));
3836
+ controller.enqueue(encoder.encode((0, import_ui_utils7.formatStreamPart)("text", message)));
3814
3837
  }
3815
3838
  });
3816
3839
  }
@@ -3864,36 +3887,36 @@ function AnthropicStream(res, cb) {
3864
3887
  }
3865
3888
 
3866
3889
  // streams/assistant-response.ts
3867
- var import_ui_utils4 = require("@ai-sdk/ui-utils");
3890
+ var import_ui_utils8 = require("@ai-sdk/ui-utils");
3868
3891
  function AssistantResponse({ threadId, messageId }, process2) {
3869
3892
  const stream = new ReadableStream({
3870
3893
  async start(controller) {
3871
- var _a9;
3894
+ var _a12;
3872
3895
  const textEncoder = new TextEncoder();
3873
3896
  const sendMessage = (message) => {
3874
3897
  controller.enqueue(
3875
- textEncoder.encode((0, import_ui_utils4.formatStreamPart)("assistant_message", message))
3898
+ textEncoder.encode((0, import_ui_utils8.formatStreamPart)("assistant_message", message))
3876
3899
  );
3877
3900
  };
3878
3901
  const sendDataMessage = (message) => {
3879
3902
  controller.enqueue(
3880
- textEncoder.encode((0, import_ui_utils4.formatStreamPart)("data_message", message))
3903
+ textEncoder.encode((0, import_ui_utils8.formatStreamPart)("data_message", message))
3881
3904
  );
3882
3905
  };
3883
3906
  const sendError = (errorMessage) => {
3884
3907
  controller.enqueue(
3885
- textEncoder.encode((0, import_ui_utils4.formatStreamPart)("error", errorMessage))
3908
+ textEncoder.encode((0, import_ui_utils8.formatStreamPart)("error", errorMessage))
3886
3909
  );
3887
3910
  };
3888
3911
  const forwardStream = async (stream2) => {
3889
- var _a10, _b;
3912
+ var _a13, _b;
3890
3913
  let result = void 0;
3891
3914
  for await (const value of stream2) {
3892
3915
  switch (value.event) {
3893
3916
  case "thread.message.created": {
3894
3917
  controller.enqueue(
3895
3918
  textEncoder.encode(
3896
- (0, import_ui_utils4.formatStreamPart)("assistant_message", {
3919
+ (0, import_ui_utils8.formatStreamPart)("assistant_message", {
3897
3920
  id: value.data.id,
3898
3921
  role: "assistant",
3899
3922
  content: [{ type: "text", text: { value: "" } }]
@@ -3903,11 +3926,11 @@ function AssistantResponse({ threadId, messageId }, process2) {
3903
3926
  break;
3904
3927
  }
3905
3928
  case "thread.message.delta": {
3906
- const content = (_a10 = value.data.delta.content) == null ? void 0 : _a10[0];
3929
+ const content = (_a13 = value.data.delta.content) == null ? void 0 : _a13[0];
3907
3930
  if ((content == null ? void 0 : content.type) === "text" && ((_b = content.text) == null ? void 0 : _b.value) != null) {
3908
3931
  controller.enqueue(
3909
3932
  textEncoder.encode(
3910
- (0, import_ui_utils4.formatStreamPart)("text", content.text.value)
3933
+ (0, import_ui_utils8.formatStreamPart)("text", content.text.value)
3911
3934
  )
3912
3935
  );
3913
3936
  }
@@ -3924,7 +3947,7 @@ function AssistantResponse({ threadId, messageId }, process2) {
3924
3947
  };
3925
3948
  controller.enqueue(
3926
3949
  textEncoder.encode(
3927
- (0, import_ui_utils4.formatStreamPart)("assistant_control_data", {
3950
+ (0, import_ui_utils8.formatStreamPart)("assistant_control_data", {
3928
3951
  threadId,
3929
3952
  messageId
3930
3953
  })
@@ -3939,7 +3962,7 @@ function AssistantResponse({ threadId, messageId }, process2) {
3939
3962
  forwardStream
3940
3963
  });
3941
3964
  } catch (error) {
3942
- sendError((_a9 = error.message) != null ? _a9 : `${error}`);
3965
+ sendError((_a12 = error.message) != null ? _a12 : `${error}`);
3943
3966
  } finally {
3944
3967
  controller.close();
3945
3968
  }
@@ -3960,9 +3983,9 @@ var experimental_AssistantResponse = AssistantResponse;
3960
3983
 
3961
3984
  // streams/aws-bedrock-stream.ts
3962
3985
  async function* asDeltaIterable(response, extractTextDeltaFromChunk) {
3963
- var _a9, _b;
3986
+ var _a12, _b;
3964
3987
  const decoder = new TextDecoder();
3965
- for await (const chunk of (_a9 = response.body) != null ? _a9 : []) {
3988
+ for await (const chunk of (_a12 = response.body) != null ? _a12 : []) {
3966
3989
  const bytes = (_b = chunk.chunk) == null ? void 0 : _b.bytes;
3967
3990
  if (bytes != null) {
3968
3991
  const chunkText = decoder.decode(bytes);
@@ -3976,8 +3999,8 @@ async function* asDeltaIterable(response, extractTextDeltaFromChunk) {
3976
3999
  }
3977
4000
  function AWSBedrockAnthropicMessagesStream(response, callbacks) {
3978
4001
  return AWSBedrockStream(response, callbacks, (chunk) => {
3979
- var _a9;
3980
- return (_a9 = chunk.delta) == null ? void 0 : _a9.text;
4002
+ var _a12;
4003
+ return (_a12 = chunk.delta) == null ? void 0 : _a12.text;
3981
4004
  });
3982
4005
  }
3983
4006
  function AWSBedrockAnthropicStream(response, callbacks) {
@@ -4024,8 +4047,8 @@ async function readAndProcessLines(reader, controller) {
4024
4047
  controller.close();
4025
4048
  }
4026
4049
  function createParser2(res) {
4027
- var _a9;
4028
- const reader = (_a9 = res.body) == null ? void 0 : _a9.getReader();
4050
+ var _a12;
4051
+ const reader = (_a12 = res.body) == null ? void 0 : _a12.getReader();
4029
4052
  return new ReadableStream({
4030
4053
  async start(controller) {
4031
4054
  if (!reader) {
@@ -4055,9 +4078,9 @@ function CohereStream(reader, callbacks) {
4055
4078
 
4056
4079
  // streams/google-generative-ai-stream.ts
4057
4080
  async function* streamable3(response) {
4058
- var _a9, _b, _c;
4081
+ var _a12, _b, _c;
4059
4082
  for await (const chunk of response.stream) {
4060
- const parts = (_c = (_b = (_a9 = chunk.candidates) == null ? void 0 : _a9[0]) == null ? void 0 : _b.content) == null ? void 0 : _c.parts;
4083
+ const parts = (_c = (_b = (_a12 = chunk.candidates) == null ? void 0 : _a12[0]) == null ? void 0 : _b.content) == null ? void 0 : _c.parts;
4061
4084
  if (parts === void 0) {
4062
4085
  continue;
4063
4086
  }
@@ -4076,13 +4099,13 @@ function createParser3(res) {
4076
4099
  const trimStartOfStream = trimStartOfStreamHelper();
4077
4100
  return new ReadableStream({
4078
4101
  async pull(controller) {
4079
- var _a9, _b;
4102
+ var _a12, _b;
4080
4103
  const { value, done } = await res.next();
4081
4104
  if (done) {
4082
4105
  controller.close();
4083
4106
  return;
4084
4107
  }
4085
- const text = trimStartOfStream((_b = (_a9 = value.token) == null ? void 0 : _a9.text) != null ? _b : "");
4108
+ const text = trimStartOfStream((_b = (_a12 = value.token) == null ? void 0 : _a12.text) != null ? _b : "");
4086
4109
  if (!text)
4087
4110
  return;
4088
4111
  if (value.generated_text != null && value.generated_text.length > 0) {
@@ -4107,11 +4130,11 @@ function InkeepStream(res, callbacks) {
4107
4130
  let chat_session_id = "";
4108
4131
  let records_cited;
4109
4132
  const inkeepEventParser = (data, options) => {
4110
- var _a9, _b;
4133
+ var _a12, _b;
4111
4134
  const { event } = options;
4112
4135
  if (event === "records_cited") {
4113
4136
  records_cited = JSON.parse(data);
4114
- (_a9 = callbacks == null ? void 0 : callbacks.onRecordsCited) == null ? void 0 : _a9.call(callbacks, records_cited);
4137
+ (_a12 = callbacks == null ? void 0 : callbacks.onRecordsCited) == null ? void 0 : _a12.call(callbacks, records_cited);
4115
4138
  }
4116
4139
  if (event === "message_chunk") {
4117
4140
  const inkeepMessageChunk = JSON.parse(data);
@@ -4124,12 +4147,12 @@ function InkeepStream(res, callbacks) {
4124
4147
  passThroughCallbacks = {
4125
4148
  ...passThroughCallbacks,
4126
4149
  onFinal: (completion) => {
4127
- var _a9;
4150
+ var _a12;
4128
4151
  const inkeepOnFinalMetadata = {
4129
4152
  chat_session_id,
4130
4153
  records_cited
4131
4154
  };
4132
- (_a9 = callbacks == null ? void 0 : callbacks.onFinal) == null ? void 0 : _a9.call(callbacks, completion, inkeepOnFinalMetadata);
4155
+ (_a12 = callbacks == null ? void 0 : callbacks.onFinal) == null ? void 0 : _a12.call(callbacks, completion, inkeepOnFinalMetadata);
4133
4156
  }
4134
4157
  };
4135
4158
  return AIStream(res, inkeepEventParser, passThroughCallbacks).pipeThrough(
@@ -4151,7 +4174,7 @@ function toDataStream(stream, callbacks) {
4151
4174
  return stream.pipeThrough(
4152
4175
  new TransformStream({
4153
4176
  transform: async (value, controller) => {
4154
- var _a9;
4177
+ var _a12;
4155
4178
  if (typeof value === "string") {
4156
4179
  controller.enqueue(value);
4157
4180
  return;
@@ -4159,7 +4182,7 @@ function toDataStream(stream, callbacks) {
4159
4182
  if ("event" in value) {
4160
4183
  if (value.event === "on_chat_model_stream") {
4161
4184
  forwardAIMessageChunk(
4162
- (_a9 = value.data) == null ? void 0 : _a9.chunk,
4185
+ (_a12 = value.data) == null ? void 0 : _a12.chunk,
4163
4186
  controller
4164
4187
  );
4165
4188
  }
@@ -4171,13 +4194,13 @@ function toDataStream(stream, callbacks) {
4171
4194
  ).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(createStreamDataTransformer());
4172
4195
  }
4173
4196
  function toDataStreamResponse(stream, options) {
4174
- var _a9;
4197
+ var _a12;
4175
4198
  const dataStream = toDataStream(stream, options == null ? void 0 : options.callbacks);
4176
4199
  const data = options == null ? void 0 : options.data;
4177
4200
  const init = options == null ? void 0 : options.init;
4178
4201
  const responseStream = data ? mergeStreams(data.stream, dataStream) : dataStream;
4179
4202
  return new Response(responseStream, {
4180
- status: (_a9 = init == null ? void 0 : init.status) != null ? _a9 : 200,
4203
+ status: (_a12 = init == null ? void 0 : init.status) != null ? _a12 : 200,
4181
4204
  statusText: init == null ? void 0 : init.statusText,
4182
4205
  headers: prepareResponseHeaders(init, {
4183
4206
  contentType: "text/plain; charset=utf-8",
@@ -4259,9 +4282,9 @@ function LangChainStream(callbacks) {
4259
4282
 
4260
4283
  // streams/mistral-stream.ts
4261
4284
  async function* streamable4(stream) {
4262
- var _a9, _b;
4285
+ var _a12, _b;
4263
4286
  for await (const chunk of stream) {
4264
- const content = (_b = (_a9 = chunk.choices[0]) == null ? void 0 : _a9.delta) == null ? void 0 : _b.content;
4287
+ const content = (_b = (_a12 = chunk.choices[0]) == null ? void 0 : _a12.delta) == null ? void 0 : _b.content;
4265
4288
  if (content === void 0 || content === "") {
4266
4289
  continue;
4267
4290
  }
@@ -4274,7 +4297,7 @@ function MistralStream(response, callbacks) {
4274
4297
  }
4275
4298
 
4276
4299
  // streams/openai-stream.ts
4277
- var import_ui_utils5 = require("@ai-sdk/ui-utils");
4300
+ var import_ui_utils9 = require("@ai-sdk/ui-utils");
4278
4301
  function parseOpenAIStream() {
4279
4302
  const extract = chunkToText();
4280
4303
  return (data) => extract(JSON.parse(data));
@@ -4291,10 +4314,10 @@ async function* streamable5(stream) {
4291
4314
  model: chunk.model,
4292
4315
  // not exposed by Azure API
4293
4316
  choices: chunk.choices.map((choice) => {
4294
- var _a9, _b, _c, _d, _e, _f, _g;
4317
+ var _a12, _b, _c, _d, _e, _f, _g;
4295
4318
  return {
4296
4319
  delta: {
4297
- content: (_a9 = choice.delta) == null ? void 0 : _a9.content,
4320
+ content: (_a12 = choice.delta) == null ? void 0 : _a12.content,
4298
4321
  function_call: (_b = choice.delta) == null ? void 0 : _b.functionCall,
4299
4322
  role: (_c = choice.delta) == null ? void 0 : _c.role,
4300
4323
  tool_calls: ((_e = (_d = choice.delta) == null ? void 0 : _d.toolCalls) == null ? void 0 : _e.length) ? (_g = (_f = choice.delta) == null ? void 0 : _f.toolCalls) == null ? void 0 : _g.map((toolCall, index) => ({
@@ -4319,9 +4342,9 @@ function chunkToText() {
4319
4342
  const trimStartOfStream = trimStartOfStreamHelper();
4320
4343
  let isFunctionStreamingIn;
4321
4344
  return (json) => {
4322
- var _a9, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r;
4345
+ var _a12, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r;
4323
4346
  if (isChatCompletionChunk(json)) {
4324
- const delta = (_a9 = json.choices[0]) == null ? void 0 : _a9.delta;
4347
+ const delta = (_a12 = json.choices[0]) == null ? void 0 : _a12.delta;
4325
4348
  if ((_b = delta.function_call) == null ? void 0 : _b.name) {
4326
4349
  isFunctionStreamingIn = true;
4327
4350
  return {
@@ -4425,7 +4448,7 @@ function createFunctionCallTransformer(callbacks) {
4425
4448
  let aggregatedFinalCompletionResponse = "";
4426
4449
  let isFunctionStreamingIn = false;
4427
4450
  let functionCallMessages = callbacks[__internal__OpenAIFnMessagesSymbol] || [];
4428
- const decode = (0, import_ui_utils5.createChunkDecoder)();
4451
+ const decode = (0, import_ui_utils9.createChunkDecoder)();
4429
4452
  return new TransformStream({
4430
4453
  async transform(chunk, controller) {
4431
4454
  const message = decode(chunk);
@@ -4439,7 +4462,7 @@ function createFunctionCallTransformer(callbacks) {
4439
4462
  }
4440
4463
  if (!isFunctionStreamingIn) {
4441
4464
  controller.enqueue(
4442
- textEncoder.encode((0, import_ui_utils5.formatStreamPart)("text", message))
4465
+ textEncoder.encode((0, import_ui_utils9.formatStreamPart)("text", message))
4443
4466
  );
4444
4467
  return;
4445
4468
  } else {
@@ -4550,7 +4573,7 @@ function createFunctionCallTransformer(callbacks) {
4550
4573
  if (!functionResponse) {
4551
4574
  controller.enqueue(
4552
4575
  textEncoder.encode(
4553
- (0, import_ui_utils5.formatStreamPart)(
4576
+ (0, import_ui_utils9.formatStreamPart)(
4554
4577
  payload.function_call ? "function_call" : "tool_calls",
4555
4578
  // parse to prevent double-encoding:
4556
4579
  JSON.parse(aggregatedResponse)
@@ -4560,7 +4583,7 @@ function createFunctionCallTransformer(callbacks) {
4560
4583
  return;
4561
4584
  } else if (typeof functionResponse === "string") {
4562
4585
  controller.enqueue(
4563
- textEncoder.encode((0, import_ui_utils5.formatStreamPart)("text", functionResponse))
4586
+ textEncoder.encode((0, import_ui_utils9.formatStreamPart)("text", functionResponse))
4564
4587
  );
4565
4588
  aggregatedFinalCompletionResponse = functionResponse;
4566
4589
  return;
@@ -4594,8 +4617,8 @@ function createFunctionCallTransformer(callbacks) {
4594
4617
 
4595
4618
  // streams/replicate-stream.ts
4596
4619
  async function ReplicateStream(res, cb, options) {
4597
- var _a9;
4598
- const url = (_a9 = res.urls) == null ? void 0 : _a9.stream;
4620
+ var _a12;
4621
+ const url = (_a12 = res.urls) == null ? void 0 : _a12.stream;
4599
4622
  if (!url) {
4600
4623
  if (res.error)
4601
4624
  throw new Error(res.error);
@@ -4616,8 +4639,8 @@ async function ReplicateStream(res, cb, options) {
4616
4639
 
4617
4640
  // streams/stream-to-response.ts
4618
4641
  function streamToResponse(res, response, init, data) {
4619
- var _a9;
4620
- response.writeHead((_a9 = init == null ? void 0 : init.status) != null ? _a9 : 200, {
4642
+ var _a12;
4643
+ response.writeHead((_a12 = init == null ? void 0 : init.status) != null ? _a12 : 200, {
4621
4644
  "Content-Type": "text/plain; charset=utf-8",
4622
4645
  ...init == null ? void 0 : init.headers
4623
4646
  });
@@ -4657,8 +4680,8 @@ var StreamingTextResponse = class extends Response {
4657
4680
  };
4658
4681
 
4659
4682
  // streams/index.ts
4660
- var generateId2 = import_provider_utils8.generateId;
4661
- var nanoid = import_provider_utils8.generateId;
4683
+ var generateId2 = import_provider_utils7.generateId;
4684
+ var nanoid = import_provider_utils7.generateId;
4662
4685
  // Annotate the CommonJS export names for ESM import in node:
4663
4686
  0 && (module.exports = {
4664
4687
  AISDKError,