ai 3.3.3 → 3.3.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -1277,6 +1277,8 @@ _a6 = symbol6;
1277
1277
  async function generateObject({
1278
1278
  model,
1279
1279
  schema: inputSchema,
1280
+ schemaName,
1281
+ schemaDescription,
1280
1282
  mode,
1281
1283
  system,
1282
1284
  prompt,
@@ -1313,6 +1315,8 @@ async function generateObject({
1313
1315
  "ai.schema": {
1314
1316
  input: () => JSON.stringify(schema.jsonSchema)
1315
1317
  },
1318
+ "ai.schema.name": schemaName,
1319
+ "ai.schema.description": schemaDescription,
1316
1320
  "ai.settings.mode": mode
1317
1321
  }
1318
1322
  }),
@@ -1372,7 +1376,12 @@ async function generateObject({
1372
1376
  tracer,
1373
1377
  fn: async (span2) => {
1374
1378
  const result2 = await model.doGenerate({
1375
- mode: { type: "object-json", schema: schema.jsonSchema },
1379
+ mode: {
1380
+ type: "object-json",
1381
+ schema: schema.jsonSchema,
1382
+ name: schemaName,
1383
+ description: schemaDescription
1384
+ },
1376
1385
  ...prepareCallSettings(settings),
1377
1386
  inputFormat,
1378
1387
  prompt: promptMessages,
@@ -1454,8 +1463,8 @@ async function generateObject({
1454
1463
  type: "object-tool",
1455
1464
  tool: {
1456
1465
  type: "function",
1457
- name: "json",
1458
- description: "Respond with a JSON object.",
1466
+ name: schemaName != null ? schemaName : "json",
1467
+ description: schemaDescription != null ? schemaDescription : "Respond with a JSON object.",
1459
1468
  parameters: schema.jsonSchema
1460
1469
  }
1461
1470
  },
@@ -1636,6 +1645,8 @@ function createAsyncIterableStream(source, transformer) {
1636
1645
  async function streamObject({
1637
1646
  model,
1638
1647
  schema: inputSchema,
1648
+ schemaName,
1649
+ schemaDescription,
1639
1650
  mode,
1640
1651
  system,
1641
1652
  prompt,
@@ -1672,6 +1683,8 @@ async function streamObject({
1672
1683
  input: () => JSON.stringify({ system, prompt, messages })
1673
1684
  },
1674
1685
  "ai.schema": { input: () => JSON.stringify(schema.jsonSchema) },
1686
+ "ai.schema.name": schemaName,
1687
+ "ai.schema.description": schemaDescription,
1675
1688
  "ai.settings.mode": mode
1676
1689
  }
1677
1690
  }),
@@ -1694,7 +1707,12 @@ async function streamObject({
1694
1707
  messages
1695
1708
  });
1696
1709
  callOptions = {
1697
- mode: { type: "object-json", schema: schema.jsonSchema },
1710
+ mode: {
1711
+ type: "object-json",
1712
+ schema: schema.jsonSchema,
1713
+ name: schemaName,
1714
+ description: schemaDescription
1715
+ },
1698
1716
  ...prepareCallSettings(settings),
1699
1717
  inputFormat: validatedPrompt.type,
1700
1718
  prompt: await convertToLanguageModelPrompt({
@@ -1730,8 +1748,8 @@ async function streamObject({
1730
1748
  type: "object-tool",
1731
1749
  tool: {
1732
1750
  type: "function",
1733
- name: "json",
1734
- description: "Respond with a JSON object.",
1751
+ name: schemaName != null ? schemaName : "json",
1752
+ description: schemaDescription != null ? schemaDescription : "Respond with a JSON object.",
1735
1753
  parameters: schema.jsonSchema
1736
1754
  }
1737
1755
  },
@@ -3053,6 +3071,13 @@ var DefaultStreamTextResult = class {
3053
3071
  });
3054
3072
  }
3055
3073
  toAIStream(callbacks = {}) {
3074
+ return this.toDataStream({ callbacks });
3075
+ }
3076
+ toDataStream({
3077
+ callbacks = {},
3078
+ getErrorMessage: getErrorMessage4 = () => ""
3079
+ // mask error messages for safety by default
3080
+ } = {}) {
3056
3081
  let aggregatedResponse = "";
3057
3082
  const callbackTransformer = new TransformStream({
3058
3083
  async start() {
@@ -3119,7 +3144,7 @@ var DefaultStreamTextResult = class {
3119
3144
  break;
3120
3145
  case "error":
3121
3146
  controller.enqueue(
3122
- formatStreamPart("error", JSON.stringify(chunk.error))
3147
+ formatStreamPart("error", getErrorMessage4(chunk.error))
3123
3148
  );
3124
3149
  break;
3125
3150
  case "finish":
@@ -3151,7 +3176,7 @@ var DefaultStreamTextResult = class {
3151
3176
  "Content-Type": "text/plain; charset=utf-8",
3152
3177
  ...init == null ? void 0 : init.headers
3153
3178
  });
3154
- const reader = this.toAIStream().getReader();
3179
+ const reader = this.toDataStream().getReader();
3155
3180
  const read = async () => {
3156
3181
  try {
3157
3182
  while (true) {
@@ -3202,7 +3227,8 @@ var DefaultStreamTextResult = class {
3202
3227
  statusText: "statusText" in options ? options.statusText : void 0
3203
3228
  };
3204
3229
  const data = options == null ? void 0 : "data" in options ? options.data : void 0;
3205
- const stream = data ? mergeStreams(data.stream, this.toAIStream()) : this.toAIStream();
3230
+ const getErrorMessage4 = options == null ? void 0 : "getErrorMessage" in options ? options.getErrorMessage : void 0;
3231
+ const stream = data ? mergeStreams(data.stream, this.toDataStream({ getErrorMessage: getErrorMessage4 })) : this.toDataStream({ getErrorMessage: getErrorMessage4 });
3206
3232
  return new Response(stream, {
3207
3233
  status: (_a9 = init == null ? void 0 : init.status) != null ? _a9 : 200,
3208
3234
  statusText: init == null ? void 0 : init.statusText,
@@ -3640,6 +3666,7 @@ function readableFromAsyncIterable(iterable) {
3640
3666
 
3641
3667
  // streams/stream-data.ts
3642
3668
  import { formatStreamPart as formatStreamPart2 } from "@ai-sdk/ui-utils";
3669
+ var STREAM_DATA_WARNING_TIME_MS = 15 * 1e3;
3643
3670
  var StreamData2 = class {
3644
3671
  constructor() {
3645
3672
  this.encoder = new TextEncoder();
@@ -3655,7 +3682,7 @@ var StreamData2 = class {
3655
3682
  console.warn(
3656
3683
  "The data stream is hanging. Did you forget to close it with `data.close()`?"
3657
3684
  );
3658
- }, 3e3);
3685
+ }, STREAM_DATA_WARNING_TIME_MS);
3659
3686
  }
3660
3687
  },
3661
3688
  pull: (controller) => {