ai 3.3.4 → 3.3.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -1,7 +1,7 @@
1
1
  var __defProp = Object.defineProperty;
2
2
  var __export = (target, all) => {
3
- for (var name9 in all)
4
- __defProp(target, name9, { get: all[name9], enumerable: true });
3
+ for (var name12 in all)
4
+ __defProp(target, name12, { get: all[name12], enumerable: true });
5
5
  };
6
6
 
7
7
  // streams/index.ts
@@ -13,6 +13,9 @@ import {
13
13
  } from "@ai-sdk/ui-utils";
14
14
  import { generateId as generateIdImpl } from "@ai-sdk/provider-utils";
15
15
 
16
+ // core/index.ts
17
+ import { jsonSchema } from "@ai-sdk/ui-utils";
18
+
16
19
  // util/retry-with-exponential-backoff.ts
17
20
  import { APICallError } from "@ai-sdk/provider";
18
21
  import { getErrorMessage, isAbortError } from "@ai-sdk/provider-utils";
@@ -134,7 +137,7 @@ function getBaseTelemetryAttributes({
134
137
  telemetry,
135
138
  headers
136
139
  }) {
137
- var _a9;
140
+ var _a12;
138
141
  return {
139
142
  "ai.model.provider": model.provider,
140
143
  "ai.model.id": model.modelId,
@@ -147,7 +150,7 @@ function getBaseTelemetryAttributes({
147
150
  "resource.name": telemetry == null ? void 0 : telemetry.functionId,
148
151
  "ai.telemetry.functionId": telemetry == null ? void 0 : telemetry.functionId,
149
152
  // add metadata as attributes:
150
- ...Object.entries((_a9 = telemetry == null ? void 0 : telemetry.metadata) != null ? _a9 : {}).reduce(
153
+ ...Object.entries((_a12 = telemetry == null ? void 0 : telemetry.metadata) != null ? _a12 : {}).reduce(
151
154
  (attributes, [key, value]) => {
152
155
  attributes[`ai.telemetry.metadata.${key}`] = value;
153
156
  return attributes;
@@ -172,7 +175,7 @@ var noopTracer = {
172
175
  startSpan() {
173
176
  return noopSpan;
174
177
  },
175
- startActiveSpan(name9, arg1, arg2, arg3) {
178
+ startActiveSpan(name12, arg1, arg2, arg3) {
176
179
  if (typeof arg1 === "function") {
177
180
  return arg1(noopSpan);
178
181
  }
@@ -240,13 +243,13 @@ function getTracer({ isEnabled }) {
240
243
  // core/telemetry/record-span.ts
241
244
  import { SpanStatusCode } from "@opentelemetry/api";
242
245
  function recordSpan({
243
- name: name9,
246
+ name: name12,
244
247
  tracer,
245
248
  attributes,
246
249
  fn,
247
250
  endWhenDone = true
248
251
  }) {
249
- return tracer.startActiveSpan(name9, { attributes }, async (span) => {
252
+ return tracer.startActiveSpan(name12, { attributes }, async (span) => {
250
253
  try {
251
254
  const result = await fn(span);
252
255
  if (endWhenDone) {
@@ -312,14 +315,14 @@ async function embed({
312
315
  headers,
313
316
  experimental_telemetry: telemetry
314
317
  }) {
315
- var _a9;
318
+ var _a12;
316
319
  const baseTelemetryAttributes = getBaseTelemetryAttributes({
317
320
  model,
318
321
  telemetry,
319
322
  headers,
320
323
  settings: { maxRetries }
321
324
  });
322
- const tracer = getTracer({ isEnabled: (_a9 = telemetry == null ? void 0 : telemetry.isEnabled) != null ? _a9 : false });
325
+ const tracer = getTracer({ isEnabled: (_a12 = telemetry == null ? void 0 : telemetry.isEnabled) != null ? _a12 : false });
323
326
  return recordSpan({
324
327
  name: "ai.embed",
325
328
  attributes: selectTelemetryAttributes({
@@ -352,14 +355,14 @@ async function embed({
352
355
  }),
353
356
  tracer,
354
357
  fn: async (doEmbedSpan) => {
355
- var _a10;
358
+ var _a13;
356
359
  const modelResponse = await model.doEmbed({
357
360
  values: [value],
358
361
  abortSignal,
359
362
  headers
360
363
  });
361
364
  const embedding2 = modelResponse.embeddings[0];
362
- const usage2 = (_a10 = modelResponse.usage) != null ? _a10 : { tokens: NaN };
365
+ const usage2 = (_a13 = modelResponse.usage) != null ? _a13 : { tokens: NaN };
363
366
  doEmbedSpan.setAttributes(
364
367
  selectTelemetryAttributes({
365
368
  telemetry,
@@ -425,14 +428,14 @@ async function embedMany({
425
428
  headers,
426
429
  experimental_telemetry: telemetry
427
430
  }) {
428
- var _a9;
431
+ var _a12;
429
432
  const baseTelemetryAttributes = getBaseTelemetryAttributes({
430
433
  model,
431
434
  telemetry,
432
435
  headers,
433
436
  settings: { maxRetries }
434
437
  });
435
- const tracer = getTracer({ isEnabled: (_a9 = telemetry == null ? void 0 : telemetry.isEnabled) != null ? _a9 : false });
438
+ const tracer = getTracer({ isEnabled: (_a12 = telemetry == null ? void 0 : telemetry.isEnabled) != null ? _a12 : false });
436
439
  return recordSpan({
437
440
  name: "ai.embedMany",
438
441
  attributes: selectTelemetryAttributes({
@@ -470,14 +473,14 @@ async function embedMany({
470
473
  }),
471
474
  tracer,
472
475
  fn: async (doEmbedSpan) => {
473
- var _a10;
476
+ var _a13;
474
477
  const modelResponse = await model.doEmbed({
475
478
  values,
476
479
  abortSignal,
477
480
  headers
478
481
  });
479
482
  const embeddings3 = modelResponse.embeddings;
480
- const usage2 = (_a10 = modelResponse.usage) != null ? _a10 : { tokens: NaN };
483
+ const usage2 = (_a13 = modelResponse.usage) != null ? _a13 : { tokens: NaN };
481
484
  doEmbedSpan.setAttributes(
482
485
  selectTelemetryAttributes({
483
486
  telemetry,
@@ -529,14 +532,14 @@ async function embedMany({
529
532
  }),
530
533
  tracer,
531
534
  fn: async (doEmbedSpan) => {
532
- var _a10;
535
+ var _a13;
533
536
  const modelResponse = await model.doEmbed({
534
537
  values: chunk,
535
538
  abortSignal,
536
539
  headers
537
540
  });
538
541
  const embeddings2 = modelResponse.embeddings;
539
- const usage2 = (_a10 = modelResponse.usage) != null ? _a10 : { tokens: NaN };
542
+ const usage2 = (_a13 = modelResponse.usage) != null ? _a13 : { tokens: NaN };
540
543
  doEmbedSpan.setAttributes(
541
544
  selectTelemetryAttributes({
542
545
  telemetry,
@@ -584,6 +587,7 @@ var DefaultEmbedManyResult = class {
584
587
 
585
588
  // core/generate-object/generate-object.ts
586
589
  import { safeParseJSON } from "@ai-sdk/provider-utils";
590
+ import { asSchema } from "@ai-sdk/ui-utils";
587
591
 
588
592
  // core/prompt/convert-to-language-model-prompt.ts
589
593
  import { getErrorMessage as getErrorMessage2 } from "@ai-sdk/provider-utils";
@@ -638,7 +642,7 @@ async function download({
638
642
  url,
639
643
  fetchImplementation = fetch
640
644
  }) {
641
- var _a9;
645
+ var _a12;
642
646
  const urlText = url.toString();
643
647
  try {
644
648
  const response = await fetchImplementation(urlText);
@@ -651,7 +655,7 @@ async function download({
651
655
  }
652
656
  return {
653
657
  data: new Uint8Array(await response.arrayBuffer()),
654
- mimeType: (_a9 = response.headers.get("content-type")) != null ? _a9 : void 0
658
+ mimeType: (_a12 = response.headers.get("content-type")) != null ? _a12 : void 0
655
659
  };
656
660
  } catch (error) {
657
661
  if (DownloadError.isInstance(error)) {
@@ -851,7 +855,7 @@ function convertToLanguageModelMessage(message, downloadedImages) {
851
855
  role: "user",
852
856
  content: message.content.map(
853
857
  (part) => {
854
- var _a9, _b, _c;
858
+ var _a12, _b, _c;
855
859
  switch (part.type) {
856
860
  case "text": {
857
861
  return part;
@@ -869,7 +873,7 @@ function convertToLanguageModelMessage(message, downloadedImages) {
869
873
  return {
870
874
  type: "image",
871
875
  image: downloadedImage.data,
872
- mimeType: (_a9 = part.mimeType) != null ? _a9 : downloadedImage.mimeType
876
+ mimeType: (_a12 = part.mimeType) != null ? _a12 : downloadedImage.mimeType
873
877
  };
874
878
  }
875
879
  }
@@ -1173,8 +1177,8 @@ function prepareResponseHeaders(init, {
1173
1177
  contentType,
1174
1178
  dataStreamVersion
1175
1179
  }) {
1176
- var _a9;
1177
- const headers = new Headers((_a9 = init == null ? void 0 : init.headers) != null ? _a9 : {});
1180
+ var _a12;
1181
+ const headers = new Headers((_a12 = init == null ? void 0 : init.headers) != null ? _a12 : {});
1178
1182
  if (!headers.has("Content-Type")) {
1179
1183
  headers.set("Content-Type", contentType);
1180
1184
  }
@@ -1184,41 +1188,6 @@ function prepareResponseHeaders(init, {
1184
1188
  return headers;
1185
1189
  }
1186
1190
 
1187
- // core/util/schema.ts
1188
- import { validatorSymbol } from "@ai-sdk/provider-utils";
1189
- import zodToJsonSchema from "zod-to-json-schema";
1190
- var schemaSymbol = Symbol.for("vercel.ai.schema");
1191
- function jsonSchema(jsonSchema2, {
1192
- validate
1193
- } = {}) {
1194
- return {
1195
- [schemaSymbol]: true,
1196
- _type: void 0,
1197
- // should never be used directly
1198
- [validatorSymbol]: true,
1199
- jsonSchema: jsonSchema2,
1200
- validate
1201
- };
1202
- }
1203
- function isSchema(value) {
1204
- return typeof value === "object" && value !== null && schemaSymbol in value && value[schemaSymbol] === true && "jsonSchema" in value && "validate" in value;
1205
- }
1206
- function asSchema(schema) {
1207
- return isSchema(schema) ? schema : zodSchema(schema);
1208
- }
1209
- function zodSchema(zodSchema2) {
1210
- return jsonSchema(
1211
- // we assume that zodToJsonSchema will return a valid JSONSchema7:
1212
- zodToJsonSchema(zodSchema2),
1213
- {
1214
- validate: (value) => {
1215
- const result = zodSchema2.safeParse(value);
1216
- return result.success ? { success: true, value: result.data } : { success: false, error: result.error };
1217
- }
1218
- }
1219
- );
1220
- }
1221
-
1222
1191
  // core/generate-object/inject-json-schema-into-system.ts
1223
1192
  var DEFAULT_SCHEMA_PREFIX = "JSON schema:";
1224
1193
  var DEFAULT_SCHEMA_SUFFIX = "You MUST answer with a JSON object that matches the JSON schema above.";
@@ -1289,7 +1258,7 @@ async function generateObject({
1289
1258
  experimental_telemetry: telemetry,
1290
1259
  ...settings
1291
1260
  }) {
1292
- var _a9;
1261
+ var _a12;
1293
1262
  const baseTelemetryAttributes = getBaseTelemetryAttributes({
1294
1263
  model,
1295
1264
  telemetry,
@@ -1297,7 +1266,7 @@ async function generateObject({
1297
1266
  settings: { ...settings, maxRetries }
1298
1267
  });
1299
1268
  const schema = asSchema(inputSchema);
1300
- const tracer = getTracer({ isEnabled: (_a9 = telemetry == null ? void 0 : telemetry.isEnabled) != null ? _a9 : false });
1269
+ const tracer = getTracer({ isEnabled: (_a12 = telemetry == null ? void 0 : telemetry.isEnabled) != null ? _a12 : false });
1301
1270
  return recordSpan({
1302
1271
  name: "ai.generateObject",
1303
1272
  attributes: selectTelemetryAttributes({
@@ -1457,7 +1426,7 @@ async function generateObject({
1457
1426
  }),
1458
1427
  tracer,
1459
1428
  fn: async (span2) => {
1460
- var _a10, _b;
1429
+ var _a13, _b;
1461
1430
  const result2 = await model.doGenerate({
1462
1431
  mode: {
1463
1432
  type: "object-tool",
@@ -1474,7 +1443,7 @@ async function generateObject({
1474
1443
  abortSignal,
1475
1444
  headers
1476
1445
  });
1477
- const objectText = (_b = (_a10 = result2.toolCalls) == null ? void 0 : _a10[0]) == null ? void 0 : _b.args;
1446
+ const objectText = (_b = (_a13 = result2.toolCalls) == null ? void 0 : _a13[0]) == null ? void 0 : _b.args;
1478
1447
  if (objectText === void 0) {
1479
1448
  throw new NoObjectGeneratedError();
1480
1449
  }
@@ -1553,9 +1522,9 @@ var DefaultGenerateObjectResult = class {
1553
1522
  this.logprobs = options.logprobs;
1554
1523
  }
1555
1524
  toJsonResponse(init) {
1556
- var _a9;
1525
+ var _a12;
1557
1526
  return new Response(JSON.stringify(this.object), {
1558
- status: (_a9 = init == null ? void 0 : init.status) != null ? _a9 : 200,
1527
+ status: (_a12 = init == null ? void 0 : init.status) != null ? _a12 : 200,
1559
1528
  headers: prepareResponseHeaders(init, {
1560
1529
  contentType: "application/json; charset=utf-8"
1561
1530
  })
@@ -1567,6 +1536,7 @@ var experimental_generateObject = generateObject;
1567
1536
  // core/generate-object/stream-object.ts
1568
1537
  import { safeValidateTypes } from "@ai-sdk/provider-utils";
1569
1538
  import {
1539
+ asSchema as asSchema2,
1570
1540
  isDeepEqualData,
1571
1541
  parsePartialJson
1572
1542
  } from "@ai-sdk/ui-utils";
@@ -1609,17 +1579,17 @@ var DelayedPromise = class {
1609
1579
  return this.promise;
1610
1580
  }
1611
1581
  resolve(value) {
1612
- var _a9;
1582
+ var _a12;
1613
1583
  this.status = { type: "resolved", value };
1614
1584
  if (this.promise) {
1615
- (_a9 = this._resolve) == null ? void 0 : _a9.call(this, value);
1585
+ (_a12 = this._resolve) == null ? void 0 : _a12.call(this, value);
1616
1586
  }
1617
1587
  }
1618
1588
  reject(error) {
1619
- var _a9;
1589
+ var _a12;
1620
1590
  this.status = { type: "rejected", error };
1621
1591
  if (this.promise) {
1622
- (_a9 = this._reject) == null ? void 0 : _a9.call(this, error);
1592
+ (_a12 = this._reject) == null ? void 0 : _a12.call(this, error);
1623
1593
  }
1624
1594
  }
1625
1595
  };
@@ -1658,16 +1628,16 @@ async function streamObject({
1658
1628
  onFinish,
1659
1629
  ...settings
1660
1630
  }) {
1661
- var _a9;
1631
+ var _a12;
1662
1632
  const baseTelemetryAttributes = getBaseTelemetryAttributes({
1663
1633
  model,
1664
1634
  telemetry,
1665
1635
  headers,
1666
1636
  settings: { ...settings, maxRetries }
1667
1637
  });
1668
- const tracer = getTracer({ isEnabled: (_a9 = telemetry == null ? void 0 : telemetry.isEnabled) != null ? _a9 : false });
1638
+ const tracer = getTracer({ isEnabled: (_a12 = telemetry == null ? void 0 : telemetry.isEnabled) != null ? _a12 : false });
1669
1639
  const retry = retryWithExponentialBackoff({ maxRetries });
1670
- const schema = asSchema(inputSchema);
1640
+ const schema = asSchema2(inputSchema);
1671
1641
  return recordSpan({
1672
1642
  name: "ai.streamObject",
1673
1643
  attributes: selectTelemetryAttributes({
@@ -2027,8 +1997,8 @@ var DefaultStreamObjectResult = class {
2027
1997
  });
2028
1998
  }
2029
1999
  pipeTextStreamToResponse(response, init) {
2030
- var _a9;
2031
- response.writeHead((_a9 = init == null ? void 0 : init.status) != null ? _a9 : 200, {
2000
+ var _a12;
2001
+ response.writeHead((_a12 = init == null ? void 0 : init.status) != null ? _a12 : 200, {
2032
2002
  "Content-Type": "text/plain; charset=utf-8",
2033
2003
  ...init == null ? void 0 : init.headers
2034
2004
  });
@@ -2050,9 +2020,9 @@ var DefaultStreamObjectResult = class {
2050
2020
  read();
2051
2021
  }
2052
2022
  toTextStreamResponse(init) {
2053
- var _a9;
2023
+ var _a12;
2054
2024
  return new Response(this.textStream.pipeThrough(new TextEncoderStream()), {
2055
- status: (_a9 = init == null ? void 0 : init.status) != null ? _a9 : 200,
2025
+ status: (_a12 = init == null ? void 0 : init.status) != null ? _a12 : 200,
2056
2026
  headers: prepareResponseHeaders(init, {
2057
2027
  contentType: "text/plain; charset=utf-8"
2058
2028
  })
@@ -2061,6 +2031,9 @@ var DefaultStreamObjectResult = class {
2061
2031
  };
2062
2032
  var experimental_streamObject = streamObject;
2063
2033
 
2034
+ // core/prompt/prepare-tools-and-tool-choice.ts
2035
+ import { asSchema as asSchema3 } from "@ai-sdk/ui-utils";
2036
+
2064
2037
  // core/util/is-non-empty-object.ts
2065
2038
  function isNonEmptyObject(object) {
2066
2039
  return object != null && Object.keys(object).length > 0;
@@ -2078,11 +2051,11 @@ function prepareToolsAndToolChoice({
2078
2051
  };
2079
2052
  }
2080
2053
  return {
2081
- tools: Object.entries(tools).map(([name9, tool2]) => ({
2054
+ tools: Object.entries(tools).map(([name12, tool2]) => ({
2082
2055
  type: "function",
2083
- name: name9,
2056
+ name: name12,
2084
2057
  description: tool2.description,
2085
- parameters: asSchema(tool2.parameters).jsonSchema
2058
+ parameters: asSchema3(tool2.parameters).jsonSchema
2086
2059
  })),
2087
2060
  toolChoice: toolChoice == null ? { type: "auto" } : typeof toolChoice === "string" ? { type: toolChoice } : { type: "tool", toolName: toolChoice.toolName }
2088
2061
  };
@@ -2090,6 +2063,7 @@ function prepareToolsAndToolChoice({
2090
2063
 
2091
2064
  // core/generate-text/tool-call.ts
2092
2065
  import { safeParseJSON as safeParseJSON2 } from "@ai-sdk/provider-utils";
2066
+ import { asSchema as asSchema4 } from "@ai-sdk/ui-utils";
2093
2067
 
2094
2068
  // errors/invalid-tool-arguments-error.ts
2095
2069
  import { AISDKError as AISDKError7, getErrorMessage as getErrorMessage3 } from "@ai-sdk/provider";
@@ -2195,7 +2169,7 @@ function parseToolCall({
2195
2169
  }
2196
2170
  const parseResult = safeParseJSON2({
2197
2171
  text: toolCall.args,
2198
- schema: asSchema(tool2.parameters)
2172
+ schema: asSchema4(tool2.parameters)
2199
2173
  });
2200
2174
  if (parseResult.success === false) {
2201
2175
  throw new InvalidToolArgumentsError({
@@ -2228,14 +2202,14 @@ async function generateText({
2228
2202
  experimental_telemetry: telemetry,
2229
2203
  ...settings
2230
2204
  }) {
2231
- var _a9;
2205
+ var _a12;
2232
2206
  const baseTelemetryAttributes = getBaseTelemetryAttributes({
2233
2207
  model,
2234
2208
  telemetry,
2235
2209
  headers,
2236
2210
  settings: { ...settings, maxRetries }
2237
2211
  });
2238
- const tracer = getTracer({ isEnabled: (_a9 = telemetry == null ? void 0 : telemetry.isEnabled) != null ? _a9 : false });
2212
+ const tracer = getTracer({ isEnabled: (_a12 = telemetry == null ? void 0 : telemetry.isEnabled) != null ? _a12 : false });
2239
2213
  return recordSpan({
2240
2214
  name: "ai.generateText",
2241
2215
  attributes: selectTelemetryAttributes({
@@ -2255,7 +2229,7 @@ async function generateText({
2255
2229
  }),
2256
2230
  tracer,
2257
2231
  fn: async (span) => {
2258
- var _a10, _b, _c, _d;
2232
+ var _a13, _b, _c, _d;
2259
2233
  const retry = retryWithExponentialBackoff({ maxRetries });
2260
2234
  const validatedPrompt = getValidatedPrompt({
2261
2235
  system,
@@ -2341,7 +2315,7 @@ async function generateText({
2341
2315
  }
2342
2316
  })
2343
2317
  );
2344
- currentToolCalls = ((_a10 = currentModelResponse.toolCalls) != null ? _a10 : []).map(
2318
+ currentToolCalls = ((_a13 = currentModelResponse.toolCalls) != null ? _a13 : []).map(
2345
2319
  (modelToolCall) => parseToolCall({ toolCall: modelToolCall, tools })
2346
2320
  );
2347
2321
  currentToolResults = tools == null ? [] : await executeTools({
@@ -2805,14 +2779,14 @@ async function streamText({
2805
2779
  onFinish,
2806
2780
  ...settings
2807
2781
  }) {
2808
- var _a9;
2782
+ var _a12;
2809
2783
  const baseTelemetryAttributes = getBaseTelemetryAttributes({
2810
2784
  model,
2811
2785
  telemetry,
2812
2786
  headers,
2813
2787
  settings: { ...settings, maxRetries }
2814
2788
  });
2815
- const tracer = getTracer({ isEnabled: (_a9 = telemetry == null ? void 0 : telemetry.isEnabled) != null ? _a9 : false });
2789
+ const tracer = getTracer({ isEnabled: (_a12 = telemetry == null ? void 0 : telemetry.isEnabled) != null ? _a12 : false });
2816
2790
  return recordSpan({
2817
2791
  name: "ai.streamText",
2818
2792
  attributes: selectTelemetryAttributes({
@@ -2970,7 +2944,7 @@ var DefaultStreamTextResult = class {
2970
2944
  },
2971
2945
  // invoke onFinish callback and resolve toolResults promise when the stream is about to close:
2972
2946
  async flush(controller) {
2973
- var _a9;
2947
+ var _a12;
2974
2948
  try {
2975
2949
  const finalUsage = usage != null ? usage : {
2976
2950
  promptTokens: NaN,
@@ -3009,7 +2983,7 @@ var DefaultStreamTextResult = class {
3009
2983
  })
3010
2984
  );
3011
2985
  resolveToolResults(toolResults);
3012
- await ((_a9 = self.onFinish) == null ? void 0 : _a9.call(self, {
2986
+ await ((_a12 = self.onFinish) == null ? void 0 : _a12.call(self, {
3013
2987
  finishReason: finalFinishReason,
3014
2988
  usage: finalUsage,
3015
2989
  text,
@@ -3071,6 +3045,13 @@ var DefaultStreamTextResult = class {
3071
3045
  });
3072
3046
  }
3073
3047
  toAIStream(callbacks = {}) {
3048
+ return this.toDataStream({ callbacks });
3049
+ }
3050
+ toDataStream({
3051
+ callbacks = {},
3052
+ getErrorMessage: getErrorMessage4 = () => ""
3053
+ // mask error messages for safety by default
3054
+ } = {}) {
3074
3055
  let aggregatedResponse = "";
3075
3056
  const callbackTransformer = new TransformStream({
3076
3057
  async start() {
@@ -3137,7 +3118,7 @@ var DefaultStreamTextResult = class {
3137
3118
  break;
3138
3119
  case "error":
3139
3120
  controller.enqueue(
3140
- formatStreamPart("error", JSON.stringify(chunk.error))
3121
+ formatStreamPart("error", getErrorMessage4(chunk.error))
3141
3122
  );
3142
3123
  break;
3143
3124
  case "finish":
@@ -3164,12 +3145,12 @@ var DefaultStreamTextResult = class {
3164
3145
  return this.pipeDataStreamToResponse(response, init);
3165
3146
  }
3166
3147
  pipeDataStreamToResponse(response, init) {
3167
- var _a9;
3168
- response.writeHead((_a9 = init == null ? void 0 : init.status) != null ? _a9 : 200, {
3148
+ var _a12;
3149
+ response.writeHead((_a12 = init == null ? void 0 : init.status) != null ? _a12 : 200, {
3169
3150
  "Content-Type": "text/plain; charset=utf-8",
3170
3151
  ...init == null ? void 0 : init.headers
3171
3152
  });
3172
- const reader = this.toAIStream().getReader();
3153
+ const reader = this.toDataStream().getReader();
3173
3154
  const read = async () => {
3174
3155
  try {
3175
3156
  while (true) {
@@ -3187,8 +3168,8 @@ var DefaultStreamTextResult = class {
3187
3168
  read();
3188
3169
  }
3189
3170
  pipeTextStreamToResponse(response, init) {
3190
- var _a9;
3191
- response.writeHead((_a9 = init == null ? void 0 : init.status) != null ? _a9 : 200, {
3171
+ var _a12;
3172
+ response.writeHead((_a12 = init == null ? void 0 : init.status) != null ? _a12 : 200, {
3192
3173
  "Content-Type": "text/plain; charset=utf-8",
3193
3174
  ...init == null ? void 0 : init.headers
3194
3175
  });
@@ -3213,16 +3194,17 @@ var DefaultStreamTextResult = class {
3213
3194
  return this.toDataStreamResponse(options);
3214
3195
  }
3215
3196
  toDataStreamResponse(options) {
3216
- var _a9;
3197
+ var _a12;
3217
3198
  const init = options == null ? void 0 : "init" in options ? options.init : {
3218
3199
  headers: "headers" in options ? options.headers : void 0,
3219
3200
  status: "status" in options ? options.status : void 0,
3220
3201
  statusText: "statusText" in options ? options.statusText : void 0
3221
3202
  };
3222
3203
  const data = options == null ? void 0 : "data" in options ? options.data : void 0;
3223
- const stream = data ? mergeStreams(data.stream, this.toAIStream()) : this.toAIStream();
3204
+ const getErrorMessage4 = options == null ? void 0 : "getErrorMessage" in options ? options.getErrorMessage : void 0;
3205
+ const stream = data ? mergeStreams(data.stream, this.toDataStream({ getErrorMessage: getErrorMessage4 })) : this.toDataStream({ getErrorMessage: getErrorMessage4 });
3224
3206
  return new Response(stream, {
3225
- status: (_a9 = init == null ? void 0 : init.status) != null ? _a9 : 200,
3207
+ status: (_a12 = init == null ? void 0 : init.status) != null ? _a12 : 200,
3226
3208
  statusText: init == null ? void 0 : init.statusText,
3227
3209
  headers: prepareResponseHeaders(init, {
3228
3210
  contentType: "text/plain; charset=utf-8",
@@ -3231,9 +3213,9 @@ var DefaultStreamTextResult = class {
3231
3213
  });
3232
3214
  }
3233
3215
  toTextStreamResponse(init) {
3234
- var _a9;
3216
+ var _a12;
3235
3217
  return new Response(this.textStream.pipeThrough(new TextEncoderStream()), {
3236
- status: (_a9 = init == null ? void 0 : init.status) != null ? _a9 : 200,
3218
+ status: (_a12 = init == null ? void 0 : init.status) != null ? _a12 : 200,
3237
3219
  headers: prepareResponseHeaders(init, {
3238
3220
  contentType: "text/plain; charset=utf-8"
3239
3221
  })
@@ -3244,7 +3226,7 @@ var experimental_streamText = streamText;
3244
3226
 
3245
3227
  // core/prompt/attachments-to-parts.ts
3246
3228
  function attachmentsToParts(attachments) {
3247
- var _a9, _b, _c;
3229
+ var _a12, _b, _c;
3248
3230
  const parts = [];
3249
3231
  for (const attachment of attachments) {
3250
3232
  let url;
@@ -3256,7 +3238,7 @@ function attachmentsToParts(attachments) {
3256
3238
  switch (url.protocol) {
3257
3239
  case "http:":
3258
3240
  case "https:": {
3259
- if ((_a9 = attachment.contentType) == null ? void 0 : _a9.startsWith("image/")) {
3241
+ if ((_a12 = attachment.contentType) == null ? void 0 : _a12.startsWith("image/")) {
3260
3242
  parts.push({ type: "image", image: url });
3261
3243
  }
3262
3244
  break;
@@ -3365,18 +3347,32 @@ function convertToCoreMessages(messages) {
3365
3347
  }
3366
3348
 
3367
3349
  // core/registry/invalid-model-id-error.ts
3368
- var InvalidModelIdError = class extends Error {
3350
+ import { AISDKError as AISDKError9 } from "@ai-sdk/provider";
3351
+ var name9 = "AI_InvalidModelIdError";
3352
+ var marker9 = `vercel.ai.error.${name9}`;
3353
+ var symbol9 = Symbol.for(marker9);
3354
+ var _a9;
3355
+ var InvalidModelIdError = class extends AISDKError9 {
3369
3356
  constructor({
3370
3357
  id,
3371
3358
  message = `Invalid model id: ${id}`
3372
3359
  }) {
3373
- super(message);
3374
- this.name = "AI_InvalidModelIdError";
3360
+ super({ name: name9, message });
3361
+ this[_a9] = true;
3375
3362
  this.id = id;
3376
3363
  }
3364
+ static isInstance(error) {
3365
+ return AISDKError9.hasMarker(error, marker9);
3366
+ }
3367
+ /**
3368
+ * @deprecated use `isInstance` instead
3369
+ */
3377
3370
  static isInvalidModelIdError(error) {
3378
- return error instanceof Error && error.name === "AI_InvalidModelIdError" && typeof error.id === "string";
3371
+ return error instanceof Error && error.name === name9 && typeof error.id === "string";
3379
3372
  }
3373
+ /**
3374
+ * @deprecated Do not use this method. It will be removed in the next major version.
3375
+ */
3380
3376
  toJSON() {
3381
3377
  return {
3382
3378
  name: this.name,
@@ -3386,22 +3382,37 @@ var InvalidModelIdError = class extends Error {
3386
3382
  };
3387
3383
  }
3388
3384
  };
3385
+ _a9 = symbol9;
3389
3386
 
3390
3387
  // core/registry/no-such-model-error.ts
3391
- var NoSuchModelError = class extends Error {
3388
+ import { AISDKError as AISDKError10 } from "@ai-sdk/provider";
3389
+ var name10 = "AI_NoSuchModelError";
3390
+ var marker10 = `vercel.ai.error.${name10}`;
3391
+ var symbol10 = Symbol.for(marker10);
3392
+ var _a10;
3393
+ var NoSuchModelError = class extends AISDKError10 {
3392
3394
  constructor({
3393
3395
  modelId,
3394
3396
  modelType,
3395
3397
  message = `No such ${modelType}: ${modelId}`
3396
3398
  }) {
3397
- super(message);
3398
- this.name = "AI_NoSuchModelError";
3399
+ super({ name: name10, message });
3400
+ this[_a10] = true;
3399
3401
  this.modelId = modelId;
3400
3402
  this.modelType = modelType;
3401
3403
  }
3404
+ static isInstance(error) {
3405
+ return AISDKError10.hasMarker(error, marker10);
3406
+ }
3407
+ /**
3408
+ * @deprecated use `isInstance` instead
3409
+ */
3402
3410
  static isNoSuchModelError(error) {
3403
- return error instanceof Error && error.name === "AI_NoSuchModelError" && typeof error.modelId === "string" && typeof error.modelType === "string";
3411
+ return error instanceof Error && error.name === name10 && typeof error.modelId === "string" && typeof error.modelType === "string";
3404
3412
  }
3413
+ /**
3414
+ * @deprecated Do not use this method. It will be removed in the next major version.
3415
+ */
3405
3416
  toJSON() {
3406
3417
  return {
3407
3418
  name: this.name,
@@ -3412,22 +3423,37 @@ var NoSuchModelError = class extends Error {
3412
3423
  };
3413
3424
  }
3414
3425
  };
3426
+ _a10 = symbol10;
3415
3427
 
3416
3428
  // core/registry/no-such-provider-error.ts
3417
- var NoSuchProviderError = class extends Error {
3429
+ import { AISDKError as AISDKError11 } from "@ai-sdk/provider";
3430
+ var name11 = "AI_NoSuchProviderError";
3431
+ var marker11 = `vercel.ai.error.${name11}`;
3432
+ var symbol11 = Symbol.for(marker11);
3433
+ var _a11;
3434
+ var NoSuchProviderError = class extends AISDKError11 {
3418
3435
  constructor({
3419
3436
  providerId,
3420
3437
  availableProviders,
3421
3438
  message = `No such provider: ${providerId} (available providers: ${availableProviders.join()})`
3422
3439
  }) {
3423
- super(message);
3424
- this.name = "AI_NoSuchProviderError";
3440
+ super({ name: name11, message });
3441
+ this[_a11] = true;
3425
3442
  this.providerId = providerId;
3426
3443
  this.availableProviders = availableProviders;
3427
3444
  }
3445
+ static isInstance(error) {
3446
+ return AISDKError11.hasMarker(error, marker11);
3447
+ }
3448
+ /**
3449
+ * @deprecated use `isInstance` instead
3450
+ */
3428
3451
  static isNoSuchProviderError(error) {
3429
- return error instanceof Error && error.name === "AI_NoSuchProviderError" && typeof error.providerId === "string" && Array.isArray(error.availableProviders);
3452
+ return error instanceof Error && error.name === name11 && typeof error.providerId === "string" && Array.isArray(error.availableProviders);
3430
3453
  }
3454
+ /**
3455
+ * @deprecated Do not use this method. It will be removed in the next major version.
3456
+ */
3431
3457
  toJSON() {
3432
3458
  return {
3433
3459
  name: this.name,
@@ -3438,6 +3464,7 @@ var NoSuchProviderError = class extends Error {
3438
3464
  };
3439
3465
  }
3440
3466
  };
3467
+ _a11 = symbol11;
3441
3468
 
3442
3469
  // core/registry/provider-registry.ts
3443
3470
  function experimental_createProviderRegistry(providers) {
@@ -3452,7 +3479,10 @@ var DefaultProviderRegistry = class {
3452
3479
  constructor() {
3453
3480
  this.providers = {};
3454
3481
  }
3455
- registerProvider({ id, provider }) {
3482
+ registerProvider({
3483
+ id,
3484
+ provider
3485
+ }) {
3456
3486
  this.providers[id] = provider;
3457
3487
  }
3458
3488
  getProvider(id) {
@@ -3473,26 +3503,33 @@ var DefaultProviderRegistry = class {
3473
3503
  return [id.slice(0, index), id.slice(index + 1)];
3474
3504
  }
3475
3505
  languageModel(id) {
3476
- var _a9, _b;
3506
+ var _a12, _b;
3477
3507
  const [providerId, modelId] = this.splitId(id);
3478
- const model = (_b = (_a9 = this.getProvider(providerId)).languageModel) == null ? void 0 : _b.call(_a9, modelId);
3508
+ const model = (_b = (_a12 = this.getProvider(providerId)).languageModel) == null ? void 0 : _b.call(_a12, modelId);
3479
3509
  if (model == null) {
3480
- throw new NoSuchModelError({ modelId: id, modelType: "language model" });
3510
+ throw new NoSuchModelError({ modelId: id, modelType: "languageModel" });
3481
3511
  }
3482
3512
  return model;
3483
3513
  }
3484
3514
  textEmbeddingModel(id) {
3485
- var _a9, _b;
3515
+ var _a12, _b, _c;
3486
3516
  const [providerId, modelId] = this.splitId(id);
3487
- const model = (_b = (_a9 = this.getProvider(providerId)).textEmbedding) == null ? void 0 : _b.call(_a9, modelId);
3517
+ const provider = this.getProvider(providerId);
3518
+ const model = (_c = (_a12 = provider.textEmbeddingModel) == null ? void 0 : _a12.call(provider, modelId)) != null ? _c : (_b = provider.textEmbedding) == null ? void 0 : _b.call(provider, modelId);
3488
3519
  if (model == null) {
3489
3520
  throw new NoSuchModelError({
3490
3521
  modelId: id,
3491
- modelType: "text embedding model"
3522
+ modelType: "textEmbeddingModel"
3492
3523
  });
3493
3524
  }
3494
3525
  return model;
3495
3526
  }
3527
+ /**
3528
+ * @deprecated Use `textEmbeddingModel` instead.
3529
+ */
3530
+ textEmbedding(id) {
3531
+ return this.textEmbeddingModel(id);
3532
+ }
3496
3533
  };
3497
3534
 
3498
3535
  // core/tool/tool.ts
@@ -3521,7 +3558,7 @@ function magnitude(vector) {
3521
3558
 
3522
3559
  // errors/index.ts
3523
3560
  import {
3524
- AISDKError as AISDKError9,
3561
+ AISDKError as AISDKError12,
3525
3562
  APICallError as APICallError2,
3526
3563
  EmptyResponseBodyError,
3527
3564
  InvalidPromptError as InvalidPromptError2,
@@ -3650,8 +3687,8 @@ function readableFromAsyncIterable(iterable) {
3650
3687
  controller.enqueue(value);
3651
3688
  },
3652
3689
  async cancel(reason) {
3653
- var _a9;
3654
- await ((_a9 = it.return) == null ? void 0 : _a9.call(it, reason));
3690
+ var _a12;
3691
+ await ((_a12 = it.return) == null ? void 0 : _a12.call(it, reason));
3655
3692
  }
3656
3693
  });
3657
3694
  }
@@ -3786,7 +3823,7 @@ import {
3786
3823
  function AssistantResponse({ threadId, messageId }, process2) {
3787
3824
  const stream = new ReadableStream({
3788
3825
  async start(controller) {
3789
- var _a9;
3826
+ var _a12;
3790
3827
  const textEncoder = new TextEncoder();
3791
3828
  const sendMessage = (message) => {
3792
3829
  controller.enqueue(
@@ -3804,7 +3841,7 @@ function AssistantResponse({ threadId, messageId }, process2) {
3804
3841
  );
3805
3842
  };
3806
3843
  const forwardStream = async (stream2) => {
3807
- var _a10, _b;
3844
+ var _a13, _b;
3808
3845
  let result = void 0;
3809
3846
  for await (const value of stream2) {
3810
3847
  switch (value.event) {
@@ -3821,7 +3858,7 @@ function AssistantResponse({ threadId, messageId }, process2) {
3821
3858
  break;
3822
3859
  }
3823
3860
  case "thread.message.delta": {
3824
- const content = (_a10 = value.data.delta.content) == null ? void 0 : _a10[0];
3861
+ const content = (_a13 = value.data.delta.content) == null ? void 0 : _a13[0];
3825
3862
  if ((content == null ? void 0 : content.type) === "text" && ((_b = content.text) == null ? void 0 : _b.value) != null) {
3826
3863
  controller.enqueue(
3827
3864
  textEncoder.encode(
@@ -3857,7 +3894,7 @@ function AssistantResponse({ threadId, messageId }, process2) {
3857
3894
  forwardStream
3858
3895
  });
3859
3896
  } catch (error) {
3860
- sendError((_a9 = error.message) != null ? _a9 : `${error}`);
3897
+ sendError((_a12 = error.message) != null ? _a12 : `${error}`);
3861
3898
  } finally {
3862
3899
  controller.close();
3863
3900
  }
@@ -3878,9 +3915,9 @@ var experimental_AssistantResponse = AssistantResponse;
3878
3915
 
3879
3916
  // streams/aws-bedrock-stream.ts
3880
3917
  async function* asDeltaIterable(response, extractTextDeltaFromChunk) {
3881
- var _a9, _b;
3918
+ var _a12, _b;
3882
3919
  const decoder = new TextDecoder();
3883
- for await (const chunk of (_a9 = response.body) != null ? _a9 : []) {
3920
+ for await (const chunk of (_a12 = response.body) != null ? _a12 : []) {
3884
3921
  const bytes = (_b = chunk.chunk) == null ? void 0 : _b.bytes;
3885
3922
  if (bytes != null) {
3886
3923
  const chunkText = decoder.decode(bytes);
@@ -3894,8 +3931,8 @@ async function* asDeltaIterable(response, extractTextDeltaFromChunk) {
3894
3931
  }
3895
3932
  function AWSBedrockAnthropicMessagesStream(response, callbacks) {
3896
3933
  return AWSBedrockStream(response, callbacks, (chunk) => {
3897
- var _a9;
3898
- return (_a9 = chunk.delta) == null ? void 0 : _a9.text;
3934
+ var _a12;
3935
+ return (_a12 = chunk.delta) == null ? void 0 : _a12.text;
3899
3936
  });
3900
3937
  }
3901
3938
  function AWSBedrockAnthropicStream(response, callbacks) {
@@ -3942,8 +3979,8 @@ async function readAndProcessLines(reader, controller) {
3942
3979
  controller.close();
3943
3980
  }
3944
3981
  function createParser2(res) {
3945
- var _a9;
3946
- const reader = (_a9 = res.body) == null ? void 0 : _a9.getReader();
3982
+ var _a12;
3983
+ const reader = (_a12 = res.body) == null ? void 0 : _a12.getReader();
3947
3984
  return new ReadableStream({
3948
3985
  async start(controller) {
3949
3986
  if (!reader) {
@@ -3973,9 +4010,9 @@ function CohereStream(reader, callbacks) {
3973
4010
 
3974
4011
  // streams/google-generative-ai-stream.ts
3975
4012
  async function* streamable3(response) {
3976
- var _a9, _b, _c;
4013
+ var _a12, _b, _c;
3977
4014
  for await (const chunk of response.stream) {
3978
- const parts = (_c = (_b = (_a9 = chunk.candidates) == null ? void 0 : _a9[0]) == null ? void 0 : _b.content) == null ? void 0 : _c.parts;
4015
+ const parts = (_c = (_b = (_a12 = chunk.candidates) == null ? void 0 : _a12[0]) == null ? void 0 : _b.content) == null ? void 0 : _c.parts;
3979
4016
  if (parts === void 0) {
3980
4017
  continue;
3981
4018
  }
@@ -3994,13 +4031,13 @@ function createParser3(res) {
3994
4031
  const trimStartOfStream = trimStartOfStreamHelper();
3995
4032
  return new ReadableStream({
3996
4033
  async pull(controller) {
3997
- var _a9, _b;
4034
+ var _a12, _b;
3998
4035
  const { value, done } = await res.next();
3999
4036
  if (done) {
4000
4037
  controller.close();
4001
4038
  return;
4002
4039
  }
4003
- const text = trimStartOfStream((_b = (_a9 = value.token) == null ? void 0 : _a9.text) != null ? _b : "");
4040
+ const text = trimStartOfStream((_b = (_a12 = value.token) == null ? void 0 : _a12.text) != null ? _b : "");
4004
4041
  if (!text)
4005
4042
  return;
4006
4043
  if (value.generated_text != null && value.generated_text.length > 0) {
@@ -4025,11 +4062,11 @@ function InkeepStream(res, callbacks) {
4025
4062
  let chat_session_id = "";
4026
4063
  let records_cited;
4027
4064
  const inkeepEventParser = (data, options) => {
4028
- var _a9, _b;
4065
+ var _a12, _b;
4029
4066
  const { event } = options;
4030
4067
  if (event === "records_cited") {
4031
4068
  records_cited = JSON.parse(data);
4032
- (_a9 = callbacks == null ? void 0 : callbacks.onRecordsCited) == null ? void 0 : _a9.call(callbacks, records_cited);
4069
+ (_a12 = callbacks == null ? void 0 : callbacks.onRecordsCited) == null ? void 0 : _a12.call(callbacks, records_cited);
4033
4070
  }
4034
4071
  if (event === "message_chunk") {
4035
4072
  const inkeepMessageChunk = JSON.parse(data);
@@ -4042,12 +4079,12 @@ function InkeepStream(res, callbacks) {
4042
4079
  passThroughCallbacks = {
4043
4080
  ...passThroughCallbacks,
4044
4081
  onFinal: (completion) => {
4045
- var _a9;
4082
+ var _a12;
4046
4083
  const inkeepOnFinalMetadata = {
4047
4084
  chat_session_id,
4048
4085
  records_cited
4049
4086
  };
4050
- (_a9 = callbacks == null ? void 0 : callbacks.onFinal) == null ? void 0 : _a9.call(callbacks, completion, inkeepOnFinalMetadata);
4087
+ (_a12 = callbacks == null ? void 0 : callbacks.onFinal) == null ? void 0 : _a12.call(callbacks, completion, inkeepOnFinalMetadata);
4051
4088
  }
4052
4089
  };
4053
4090
  return AIStream(res, inkeepEventParser, passThroughCallbacks).pipeThrough(
@@ -4069,7 +4106,7 @@ function toDataStream(stream, callbacks) {
4069
4106
  return stream.pipeThrough(
4070
4107
  new TransformStream({
4071
4108
  transform: async (value, controller) => {
4072
- var _a9;
4109
+ var _a12;
4073
4110
  if (typeof value === "string") {
4074
4111
  controller.enqueue(value);
4075
4112
  return;
@@ -4077,7 +4114,7 @@ function toDataStream(stream, callbacks) {
4077
4114
  if ("event" in value) {
4078
4115
  if (value.event === "on_chat_model_stream") {
4079
4116
  forwardAIMessageChunk(
4080
- (_a9 = value.data) == null ? void 0 : _a9.chunk,
4117
+ (_a12 = value.data) == null ? void 0 : _a12.chunk,
4081
4118
  controller
4082
4119
  );
4083
4120
  }
@@ -4089,13 +4126,13 @@ function toDataStream(stream, callbacks) {
4089
4126
  ).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(createStreamDataTransformer());
4090
4127
  }
4091
4128
  function toDataStreamResponse(stream, options) {
4092
- var _a9;
4129
+ var _a12;
4093
4130
  const dataStream = toDataStream(stream, options == null ? void 0 : options.callbacks);
4094
4131
  const data = options == null ? void 0 : options.data;
4095
4132
  const init = options == null ? void 0 : options.init;
4096
4133
  const responseStream = data ? mergeStreams(data.stream, dataStream) : dataStream;
4097
4134
  return new Response(responseStream, {
4098
- status: (_a9 = init == null ? void 0 : init.status) != null ? _a9 : 200,
4135
+ status: (_a12 = init == null ? void 0 : init.status) != null ? _a12 : 200,
4099
4136
  statusText: init == null ? void 0 : init.statusText,
4100
4137
  headers: prepareResponseHeaders(init, {
4101
4138
  contentType: "text/plain; charset=utf-8",
@@ -4177,9 +4214,9 @@ function LangChainStream(callbacks) {
4177
4214
 
4178
4215
  // streams/mistral-stream.ts
4179
4216
  async function* streamable4(stream) {
4180
- var _a9, _b;
4217
+ var _a12, _b;
4181
4218
  for await (const chunk of stream) {
4182
- const content = (_b = (_a9 = chunk.choices[0]) == null ? void 0 : _a9.delta) == null ? void 0 : _b.content;
4219
+ const content = (_b = (_a12 = chunk.choices[0]) == null ? void 0 : _a12.delta) == null ? void 0 : _b.content;
4183
4220
  if (content === void 0 || content === "") {
4184
4221
  continue;
4185
4222
  }
@@ -4212,10 +4249,10 @@ async function* streamable5(stream) {
4212
4249
  model: chunk.model,
4213
4250
  // not exposed by Azure API
4214
4251
  choices: chunk.choices.map((choice) => {
4215
- var _a9, _b, _c, _d, _e, _f, _g;
4252
+ var _a12, _b, _c, _d, _e, _f, _g;
4216
4253
  return {
4217
4254
  delta: {
4218
- content: (_a9 = choice.delta) == null ? void 0 : _a9.content,
4255
+ content: (_a12 = choice.delta) == null ? void 0 : _a12.content,
4219
4256
  function_call: (_b = choice.delta) == null ? void 0 : _b.functionCall,
4220
4257
  role: (_c = choice.delta) == null ? void 0 : _c.role,
4221
4258
  tool_calls: ((_e = (_d = choice.delta) == null ? void 0 : _d.toolCalls) == null ? void 0 : _e.length) ? (_g = (_f = choice.delta) == null ? void 0 : _f.toolCalls) == null ? void 0 : _g.map((toolCall, index) => ({
@@ -4240,9 +4277,9 @@ function chunkToText() {
4240
4277
  const trimStartOfStream = trimStartOfStreamHelper();
4241
4278
  let isFunctionStreamingIn;
4242
4279
  return (json) => {
4243
- var _a9, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r;
4280
+ var _a12, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r;
4244
4281
  if (isChatCompletionChunk(json)) {
4245
- const delta = (_a9 = json.choices[0]) == null ? void 0 : _a9.delta;
4282
+ const delta = (_a12 = json.choices[0]) == null ? void 0 : _a12.delta;
4246
4283
  if ((_b = delta.function_call) == null ? void 0 : _b.name) {
4247
4284
  isFunctionStreamingIn = true;
4248
4285
  return {
@@ -4515,8 +4552,8 @@ function createFunctionCallTransformer(callbacks) {
4515
4552
 
4516
4553
  // streams/replicate-stream.ts
4517
4554
  async function ReplicateStream(res, cb, options) {
4518
- var _a9;
4519
- const url = (_a9 = res.urls) == null ? void 0 : _a9.stream;
4555
+ var _a12;
4556
+ const url = (_a12 = res.urls) == null ? void 0 : _a12.stream;
4520
4557
  if (!url) {
4521
4558
  if (res.error)
4522
4559
  throw new Error(res.error);
@@ -4537,8 +4574,8 @@ async function ReplicateStream(res, cb, options) {
4537
4574
 
4538
4575
  // streams/stream-to-response.ts
4539
4576
  function streamToResponse(res, response, init, data) {
4540
- var _a9;
4541
- response.writeHead((_a9 = init == null ? void 0 : init.status) != null ? _a9 : 200, {
4577
+ var _a12;
4578
+ response.writeHead((_a12 = init == null ? void 0 : init.status) != null ? _a12 : 200, {
4542
4579
  "Content-Type": "text/plain; charset=utf-8",
4543
4580
  ...init == null ? void 0 : init.headers
4544
4581
  });
@@ -4581,7 +4618,7 @@ var StreamingTextResponse = class extends Response {
4581
4618
  var generateId2 = generateIdImpl;
4582
4619
  var nanoid = generateIdImpl;
4583
4620
  export {
4584
- AISDKError9 as AISDKError,
4621
+ AISDKError12 as AISDKError,
4585
4622
  AIStream,
4586
4623
  APICallError2 as APICallError,
4587
4624
  AWSBedrockAnthropicMessagesStream,