ai 4.3.2 → 5.0.0-canary.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -7,10 +7,8 @@ var __export = (target, all) => {
7
7
  // core/index.ts
8
8
  import { createIdGenerator as createIdGenerator5, generateId as generateId2 } from "@ai-sdk/provider-utils";
9
9
  import {
10
- formatAssistantStreamPart,
11
10
  formatDataStreamPart as formatDataStreamPart3,
12
11
  jsonSchema as jsonSchema2,
13
- parseAssistantStreamPart,
14
12
  parseDataStreamPart,
15
13
  processDataStream,
16
14
  processTextStream,
@@ -4702,25 +4700,6 @@ function asArray(value) {
4702
4700
  return value === void 0 ? [] : Array.isArray(value) ? value : [value];
4703
4701
  }
4704
4702
 
4705
- // util/consume-stream.ts
4706
- async function consumeStream({
4707
- stream,
4708
- onError
4709
- }) {
4710
- const reader = stream.getReader();
4711
- try {
4712
- while (true) {
4713
- const { done } = await reader.read();
4714
- if (done)
4715
- break;
4716
- }
4717
- } catch (error) {
4718
- onError == null ? void 0 : onError(error);
4719
- } finally {
4720
- reader.releaseLock();
4721
- }
4722
- }
4723
-
4724
4703
  // core/util/merge-streams.ts
4725
4704
  function mergeStreams(stream1, stream2) {
4726
4705
  const reader1 = stream1.getReader();
@@ -5962,15 +5941,9 @@ var DefaultStreamTextResult = class {
5962
5941
  )
5963
5942
  );
5964
5943
  }
5965
- async consumeStream(options) {
5966
- var _a17;
5967
- try {
5968
- await consumeStream({
5969
- stream: this.fullStream,
5970
- onError: options == null ? void 0 : options.onError
5971
- });
5972
- } catch (error) {
5973
- (_a17 = options == null ? void 0 : options.onError) == null ? void 0 : _a17.call(options, error);
5944
+ async consumeStream() {
5945
+ const stream = this.fullStream;
5946
+ for await (const part of stream) {
5974
5947
  }
5975
5948
  }
5976
5949
  get experimental_partialOutputStream() {
@@ -6279,7 +6252,7 @@ function defaultSettingsMiddleware({
6279
6252
  settings
6280
6253
  }) {
6281
6254
  return {
6282
- middlewareVersion: "v1",
6255
+ middlewareVersion: "v2",
6283
6256
  transformParams: async ({ params }) => {
6284
6257
  var _a17;
6285
6258
  return {
@@ -6324,7 +6297,7 @@ function extractReasoningMiddleware({
6324
6297
  const openingTag = `<${tagName}>`;
6325
6298
  const closingTag = `</${tagName}>`;
6326
6299
  return {
6327
- middlewareVersion: "v1",
6300
+ middlewareVersion: "v2",
6328
6301
  wrapGenerate: async ({ doGenerate }) => {
6329
6302
  const { text: rawText, ...rest } = await doGenerate();
6330
6303
  if (rawText == null) {
@@ -6410,7 +6383,7 @@ function extractReasoningMiddleware({
6410
6383
  // core/middleware/simulate-streaming-middleware.ts
6411
6384
  function simulateStreamingMiddleware() {
6412
6385
  return {
6413
- middlewareVersion: "v1",
6386
+ middlewareVersion: "v2",
6414
6387
  wrapStream: async ({ doGenerate }) => {
6415
6388
  const result = await doGenerate();
6416
6389
  const simulatedStream = new ReadableStream({
@@ -6515,7 +6488,7 @@ var doWrap = ({
6515
6488
  return transformParams ? await transformParams({ params, type }) : params;
6516
6489
  }
6517
6490
  return {
6518
- specificationVersion: "v1",
6491
+ specificationVersion: "v2",
6519
6492
  provider: providerId != null ? providerId : model.provider,
6520
6493
  modelId: modelId != null ? modelId : model.modelId,
6521
6494
  defaultObjectGenerationMode: model.defaultObjectGenerationMode,
@@ -7490,104 +7463,6 @@ function simulateReadableStream({
7490
7463
  });
7491
7464
  }
7492
7465
 
7493
- // streams/assistant-response.ts
7494
- import {
7495
- formatAssistantStreamPart as formatAssistantStreamPart2
7496
- } from "@ai-sdk/ui-utils";
7497
- function AssistantResponse({ threadId, messageId }, process2) {
7498
- const stream = new ReadableStream({
7499
- async start(controller) {
7500
- var _a17;
7501
- const textEncoder = new TextEncoder();
7502
- const sendMessage = (message) => {
7503
- controller.enqueue(
7504
- textEncoder.encode(
7505
- formatAssistantStreamPart2("assistant_message", message)
7506
- )
7507
- );
7508
- };
7509
- const sendDataMessage = (message) => {
7510
- controller.enqueue(
7511
- textEncoder.encode(
7512
- formatAssistantStreamPart2("data_message", message)
7513
- )
7514
- );
7515
- };
7516
- const sendError = (errorMessage) => {
7517
- controller.enqueue(
7518
- textEncoder.encode(formatAssistantStreamPart2("error", errorMessage))
7519
- );
7520
- };
7521
- const forwardStream = async (stream2) => {
7522
- var _a18, _b;
7523
- let result = void 0;
7524
- for await (const value of stream2) {
7525
- switch (value.event) {
7526
- case "thread.message.created": {
7527
- controller.enqueue(
7528
- textEncoder.encode(
7529
- formatAssistantStreamPart2("assistant_message", {
7530
- id: value.data.id,
7531
- role: "assistant",
7532
- content: [{ type: "text", text: { value: "" } }]
7533
- })
7534
- )
7535
- );
7536
- break;
7537
- }
7538
- case "thread.message.delta": {
7539
- const content = (_a18 = value.data.delta.content) == null ? void 0 : _a18[0];
7540
- if ((content == null ? void 0 : content.type) === "text" && ((_b = content.text) == null ? void 0 : _b.value) != null) {
7541
- controller.enqueue(
7542
- textEncoder.encode(
7543
- formatAssistantStreamPart2("text", content.text.value)
7544
- )
7545
- );
7546
- }
7547
- break;
7548
- }
7549
- case "thread.run.completed":
7550
- case "thread.run.requires_action": {
7551
- result = value.data;
7552
- break;
7553
- }
7554
- }
7555
- }
7556
- return result;
7557
- };
7558
- controller.enqueue(
7559
- textEncoder.encode(
7560
- formatAssistantStreamPart2("assistant_control_data", {
7561
- threadId,
7562
- messageId
7563
- })
7564
- )
7565
- );
7566
- try {
7567
- await process2({
7568
- sendMessage,
7569
- sendDataMessage,
7570
- forwardStream
7571
- });
7572
- } catch (error) {
7573
- sendError((_a17 = error.message) != null ? _a17 : `${error}`);
7574
- } finally {
7575
- controller.close();
7576
- }
7577
- },
7578
- pull(controller) {
7579
- },
7580
- cancel() {
7581
- }
7582
- });
7583
- return new Response(stream, {
7584
- status: 200,
7585
- headers: {
7586
- "Content-Type": "text/plain; charset=utf-8"
7587
- }
7588
- });
7589
- }
7590
-
7591
7466
  // streams/langchain-adapter.ts
7592
7467
  var langchain_adapter_exports = {};
7593
7468
  __export(langchain_adapter_exports, {
@@ -7827,7 +7702,6 @@ var StreamData = class {
7827
7702
  export {
7828
7703
  AISDKError16 as AISDKError,
7829
7704
  APICallError2 as APICallError,
7830
- AssistantResponse,
7831
7705
  DownloadError,
7832
7706
  EmptyResponseBodyError,
7833
7707
  InvalidArgumentError,
@@ -7880,13 +7754,11 @@ export {
7880
7754
  generateImage as experimental_generateImage,
7881
7755
  experimental_wrapLanguageModel,
7882
7756
  extractReasoningMiddleware,
7883
- formatAssistantStreamPart,
7884
7757
  formatDataStreamPart3 as formatDataStreamPart,
7885
7758
  generateId2 as generateId,
7886
7759
  generateObject,
7887
7760
  generateText,
7888
7761
  jsonSchema2 as jsonSchema,
7889
- parseAssistantStreamPart,
7890
7762
  parseDataStreamPart,
7891
7763
  pipeDataStreamToResponse,
7892
7764
  processDataStream,