ai 4.0.0-canary.2 → 4.0.0-canary.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -21,7 +21,6 @@ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: tru
21
21
  var streams_exports = {};
22
22
  __export(streams_exports, {
23
23
  AISDKError: () => import_provider13.AISDKError,
24
- AIStream: () => AIStream,
25
24
  APICallError: () => import_provider13.APICallError,
26
25
  AssistantResponse: () => AssistantResponse,
27
26
  DownloadError: () => DownloadError,
@@ -43,14 +42,12 @@ __export(streams_exports, {
43
42
  NoSuchProviderError: () => NoSuchProviderError,
44
43
  NoSuchToolError: () => NoSuchToolError,
45
44
  RetryError: () => RetryError,
46
- StreamData: () => StreamData2,
45
+ StreamData: () => StreamData,
47
46
  StreamingTextResponse: () => StreamingTextResponse,
48
47
  TypeValidationError: () => import_provider13.TypeValidationError,
49
48
  UnsupportedFunctionalityError: () => import_provider13.UnsupportedFunctionalityError,
50
49
  convertToCoreMessages: () => convertToCoreMessages,
51
50
  cosineSimilarity: () => cosineSimilarity,
52
- createCallbacksTransformer: () => createCallbacksTransformer,
53
- createEventStreamTransformer: () => createEventStreamTransformer,
54
51
  createStreamDataTransformer: () => createStreamDataTransformer,
55
52
  embed: () => embed,
56
53
  embedMany: () => embedMany,
@@ -59,32 +56,26 @@ __export(streams_exports, {
59
56
  experimental_createModelRegistry: () => experimental_createModelRegistry,
60
57
  experimental_createProviderRegistry: () => experimental_createProviderRegistry,
61
58
  experimental_customProvider: () => experimental_customProvider,
62
- experimental_generateObject: () => experimental_generateObject,
63
- experimental_generateText: () => experimental_generateText,
64
- experimental_streamObject: () => experimental_streamObject,
65
- experimental_streamText: () => experimental_streamText,
66
59
  experimental_wrapLanguageModel: () => experimental_wrapLanguageModel,
67
- formatStreamPart: () => import_ui_utils9.formatStreamPart,
68
- generateId: () => generateId2,
60
+ formatStreamPart: () => import_ui_utils10.formatStreamPart,
61
+ generateId: () => import_provider_utils11.generateId,
69
62
  generateObject: () => generateObject,
70
63
  generateText: () => generateText,
71
- jsonSchema: () => import_ui_utils6.jsonSchema,
72
- parseStreamPart: () => import_ui_utils9.parseStreamPart,
73
- processDataProtocolResponse: () => import_ui_utils9.processDataProtocolResponse,
74
- readDataStream: () => import_ui_utils9.readDataStream,
75
- readableFromAsyncIterable: () => readableFromAsyncIterable,
64
+ jsonSchema: () => import_ui_utils7.jsonSchema,
65
+ parseStreamPart: () => import_ui_utils10.parseStreamPart,
66
+ processDataProtocolResponse: () => import_ui_utils10.processDataProtocolResponse,
67
+ readDataStream: () => import_ui_utils10.readDataStream,
76
68
  streamObject: () => streamObject,
77
69
  streamText: () => streamText,
78
70
  streamToResponse: () => streamToResponse,
79
- tool: () => tool,
80
- trimStartOfStreamHelper: () => trimStartOfStreamHelper
71
+ tool: () => tool
81
72
  });
82
73
  module.exports = __toCommonJS(streams_exports);
83
- var import_ui_utils9 = require("@ai-sdk/ui-utils");
84
- var import_provider_utils10 = require("@ai-sdk/provider-utils");
74
+ var import_ui_utils10 = require("@ai-sdk/ui-utils");
75
+ var import_provider_utils11 = require("@ai-sdk/provider-utils");
85
76
 
86
77
  // core/index.ts
87
- var import_ui_utils6 = require("@ai-sdk/ui-utils");
78
+ var import_ui_utils7 = require("@ai-sdk/ui-utils");
88
79
 
89
80
  // util/retry-with-exponential-backoff.ts
90
81
  var import_provider2 = require("@ai-sdk/provider");
@@ -1469,9 +1460,7 @@ function convertToCoreMessages(messages, options) {
1469
1460
  });
1470
1461
  break;
1471
1462
  }
1472
- case "function":
1473
- case "data":
1474
- case "tool": {
1463
+ case "data": {
1475
1464
  break;
1476
1465
  }
1477
1466
  default: {
@@ -2361,7 +2350,6 @@ var DefaultGenerateObjectResult = class {
2361
2350
  });
2362
2351
  }
2363
2352
  };
2364
- var experimental_generateObject = generateObject;
2365
2353
 
2366
2354
  // core/generate-object/stream-object.ts
2367
2355
  var import_provider_utils6 = require("@ai-sdk/provider-utils");
@@ -2981,7 +2969,6 @@ var DefaultStreamObjectResult = class {
2981
2969
  });
2982
2970
  }
2983
2971
  };
2984
- var experimental_streamObject = streamObject;
2985
2972
 
2986
2973
  // core/generate-text/generate-text.ts
2987
2974
  var import_provider_utils8 = require("@ai-sdk/provider-utils");
@@ -3012,12 +2999,6 @@ var InvalidToolArgumentsError = class extends import_provider11.AISDKError {
3012
2999
  static isInstance(error) {
3013
3000
  return import_provider11.AISDKError.hasMarker(error, marker8);
3014
3001
  }
3015
- /**
3016
- * @deprecated use `isInstance` instead
3017
- */
3018
- static isInvalidToolArgumentsError(error) {
3019
- return error instanceof Error && error.name === name8 && typeof error.toolName === "string" && typeof error.toolArgs === "string";
3020
- }
3021
3002
  };
3022
3003
  _a8 = symbol8;
3023
3004
 
@@ -3193,9 +3174,7 @@ async function generateText({
3193
3174
  maxRetries,
3194
3175
  abortSignal,
3195
3176
  headers,
3196
- maxAutomaticRoundtrips = 0,
3197
- maxToolRoundtrips = maxAutomaticRoundtrips,
3198
- maxSteps = maxToolRoundtrips != null ? maxToolRoundtrips + 1 : 1,
3177
+ maxSteps = 1,
3199
3178
  experimental_continuationSteps,
3200
3179
  experimental_continueSteps: continueSteps = experimental_continuationSteps != null ? experimental_continuationSteps : false,
3201
3180
  experimental_telemetry: telemetry,
@@ -3563,7 +3542,6 @@ var DefaultGenerateTextResult = class {
3563
3542
  this.request = options.request;
3564
3543
  this.response = options.response;
3565
3544
  this.responseMessages = options.responseMessages;
3566
- this.roundtrips = options.steps;
3567
3545
  this.steps = options.steps;
3568
3546
  this.experimental_providerMetadata = options.providerMetadata;
3569
3547
  this.rawResponse = {
@@ -3572,10 +3550,10 @@ var DefaultGenerateTextResult = class {
3572
3550
  this.logprobs = options.logprobs;
3573
3551
  }
3574
3552
  };
3575
- var experimental_generateText = generateText;
3576
3553
 
3577
3554
  // core/generate-text/stream-text.ts
3578
3555
  var import_provider_utils9 = require("@ai-sdk/provider-utils");
3556
+ var import_ui_utils6 = require("@ai-sdk/ui-utils");
3579
3557
 
3580
3558
  // core/util/create-stitchable-stream.ts
3581
3559
  function createStitchableStream() {
@@ -3932,8 +3910,7 @@ async function streamText({
3932
3910
  maxRetries,
3933
3911
  abortSignal,
3934
3912
  headers,
3935
- maxToolRoundtrips = 0,
3936
- maxSteps = maxToolRoundtrips != null ? maxToolRoundtrips + 1 : 1,
3913
+ maxSteps = 1,
3937
3914
  experimental_continueSteps: continueSteps = false,
3938
3915
  experimental_telemetry: telemetry,
3939
3916
  experimental_providerMetadata: providerMetadata,
@@ -4579,37 +4556,18 @@ var DefaultStreamTextResult = class {
4579
4556
  }
4580
4557
  });
4581
4558
  }
4582
- toAIStream(callbacks = {}) {
4583
- return this.toDataStreamInternal({ callbacks });
4584
- }
4585
4559
  toDataStreamInternal({
4586
- callbacks = {},
4587
4560
  getErrorMessage: getErrorMessage3 = () => "",
4588
4561
  // mask error messages for safety by default
4589
4562
  sendUsage = true
4590
4563
  } = {}) {
4591
4564
  let aggregatedResponse = "";
4592
4565
  const callbackTransformer = new TransformStream({
4593
- async start() {
4594
- if (callbacks.onStart)
4595
- await callbacks.onStart();
4596
- },
4597
4566
  async transform(chunk, controller) {
4598
4567
  controller.enqueue(chunk);
4599
4568
  if (chunk.type === "text-delta") {
4600
- const textDelta = chunk.textDelta;
4601
- aggregatedResponse += textDelta;
4602
- if (callbacks.onToken)
4603
- await callbacks.onToken(textDelta);
4604
- if (callbacks.onText)
4605
- await callbacks.onText(textDelta);
4569
+ aggregatedResponse += chunk.textDelta;
4606
4570
  }
4607
- },
4608
- async flush() {
4609
- if (callbacks.onCompletion)
4610
- await callbacks.onCompletion(aggregatedResponse);
4611
- if (callbacks.onFinal)
4612
- await callbacks.onFinal(aggregatedResponse);
4613
4571
  }
4614
4572
  });
4615
4573
  const streamPartsTransformer = new TransformStream({
@@ -4617,12 +4575,12 @@ var DefaultStreamTextResult = class {
4617
4575
  const chunkType = chunk.type;
4618
4576
  switch (chunkType) {
4619
4577
  case "text-delta": {
4620
- controller.enqueue((0, import_ui_utils9.formatStreamPart)("text", chunk.textDelta));
4578
+ controller.enqueue((0, import_ui_utils6.formatStreamPart)("text", chunk.textDelta));
4621
4579
  break;
4622
4580
  }
4623
4581
  case "tool-call-streaming-start": {
4624
4582
  controller.enqueue(
4625
- (0, import_ui_utils9.formatStreamPart)("tool_call_streaming_start", {
4583
+ (0, import_ui_utils6.formatStreamPart)("tool_call_streaming_start", {
4626
4584
  toolCallId: chunk.toolCallId,
4627
4585
  toolName: chunk.toolName
4628
4586
  })
@@ -4631,7 +4589,7 @@ var DefaultStreamTextResult = class {
4631
4589
  }
4632
4590
  case "tool-call-delta": {
4633
4591
  controller.enqueue(
4634
- (0, import_ui_utils9.formatStreamPart)("tool_call_delta", {
4592
+ (0, import_ui_utils6.formatStreamPart)("tool_call_delta", {
4635
4593
  toolCallId: chunk.toolCallId,
4636
4594
  argsTextDelta: chunk.argsTextDelta
4637
4595
  })
@@ -4640,7 +4598,7 @@ var DefaultStreamTextResult = class {
4640
4598
  }
4641
4599
  case "tool-call": {
4642
4600
  controller.enqueue(
4643
- (0, import_ui_utils9.formatStreamPart)("tool_call", {
4601
+ (0, import_ui_utils6.formatStreamPart)("tool_call", {
4644
4602
  toolCallId: chunk.toolCallId,
4645
4603
  toolName: chunk.toolName,
4646
4604
  args: chunk.args
@@ -4650,7 +4608,7 @@ var DefaultStreamTextResult = class {
4650
4608
  }
4651
4609
  case "tool-result": {
4652
4610
  controller.enqueue(
4653
- (0, import_ui_utils9.formatStreamPart)("tool_result", {
4611
+ (0, import_ui_utils6.formatStreamPart)("tool_result", {
4654
4612
  toolCallId: chunk.toolCallId,
4655
4613
  result: chunk.result
4656
4614
  })
@@ -4659,13 +4617,13 @@ var DefaultStreamTextResult = class {
4659
4617
  }
4660
4618
  case "error": {
4661
4619
  controller.enqueue(
4662
- (0, import_ui_utils9.formatStreamPart)("error", getErrorMessage3(chunk.error))
4620
+ (0, import_ui_utils6.formatStreamPart)("error", getErrorMessage3(chunk.error))
4663
4621
  );
4664
4622
  break;
4665
4623
  }
4666
4624
  case "step-finish": {
4667
4625
  controller.enqueue(
4668
- (0, import_ui_utils9.formatStreamPart)("finish_step", {
4626
+ (0, import_ui_utils6.formatStreamPart)("finish_step", {
4669
4627
  finishReason: chunk.finishReason,
4670
4628
  usage: sendUsage ? {
4671
4629
  promptTokens: chunk.usage.promptTokens,
@@ -4678,7 +4636,7 @@ var DefaultStreamTextResult = class {
4678
4636
  }
4679
4637
  case "finish": {
4680
4638
  controller.enqueue(
4681
- (0, import_ui_utils9.formatStreamPart)("finish_message", {
4639
+ (0, import_ui_utils6.formatStreamPart)("finish_message", {
4682
4640
  finishReason: chunk.finishReason,
4683
4641
  usage: sendUsage ? {
4684
4642
  promptTokens: chunk.usage.promptTokens,
@@ -4697,9 +4655,6 @@ var DefaultStreamTextResult = class {
4697
4655
  });
4698
4656
  return this.fullStream.pipeThrough(callbackTransformer).pipeThrough(streamPartsTransformer).pipeThrough(new TextEncoderStream());
4699
4657
  }
4700
- pipeAIStreamToResponse(response, init) {
4701
- return this.pipeDataStreamToResponse(response, init);
4702
- }
4703
4658
  pipeDataStreamToResponse(response, options) {
4704
4659
  const init = options == null ? void 0 : "init" in options ? options.init : {
4705
4660
  headers: "headers" in options ? options.headers : void 0,
@@ -4731,9 +4686,6 @@ var DefaultStreamTextResult = class {
4731
4686
  stream: this.textStream.pipeThrough(new TextEncoderStream())
4732
4687
  });
4733
4688
  }
4734
- toAIStreamResponse(options) {
4735
- return this.toDataStreamResponse(options);
4736
- }
4737
4689
  toDataStream(options) {
4738
4690
  const stream = this.toDataStreamInternal({
4739
4691
  getErrorMessage: options == null ? void 0 : options.getErrorMessage,
@@ -4773,7 +4725,6 @@ var DefaultStreamTextResult = class {
4773
4725
  });
4774
4726
  }
4775
4727
  };
4776
- var experimental_streamText = streamText;
4777
4728
 
4778
4729
  // core/middleware/wrap-language-model.ts
4779
4730
  var experimental_wrapLanguageModel = ({
@@ -4960,123 +4911,8 @@ function magnitude(vector) {
4960
4911
  return Math.sqrt(dotProduct(vector, vector));
4961
4912
  }
4962
4913
 
4963
- // streams/ai-stream.ts
4964
- var import_eventsource_parser = require("eventsource-parser");
4965
- function createEventStreamTransformer(customParser) {
4966
- const textDecoder = new TextDecoder();
4967
- let eventSourceParser;
4968
- return new TransformStream({
4969
- async start(controller) {
4970
- eventSourceParser = (0, import_eventsource_parser.createParser)(
4971
- (event) => {
4972
- if ("data" in event && event.type === "event" && event.data === "[DONE]" || // Replicate doesn't send [DONE] but does send a 'done' event
4973
- // @see https://replicate.com/docs/streaming
4974
- event.event === "done") {
4975
- controller.terminate();
4976
- return;
4977
- }
4978
- if ("data" in event) {
4979
- const parsedMessage = customParser ? customParser(event.data, {
4980
- event: event.event
4981
- }) : event.data;
4982
- if (parsedMessage)
4983
- controller.enqueue(parsedMessage);
4984
- }
4985
- }
4986
- );
4987
- },
4988
- transform(chunk) {
4989
- eventSourceParser.feed(textDecoder.decode(chunk));
4990
- }
4991
- });
4992
- }
4993
- function createCallbacksTransformer(cb) {
4994
- const textEncoder = new TextEncoder();
4995
- let aggregatedResponse = "";
4996
- const callbacks = cb || {};
4997
- return new TransformStream({
4998
- async start() {
4999
- if (callbacks.onStart)
5000
- await callbacks.onStart();
5001
- },
5002
- async transform(message, controller) {
5003
- const content = typeof message === "string" ? message : message.content;
5004
- controller.enqueue(textEncoder.encode(content));
5005
- aggregatedResponse += content;
5006
- if (callbacks.onToken)
5007
- await callbacks.onToken(content);
5008
- if (callbacks.onText && typeof message === "string") {
5009
- await callbacks.onText(message);
5010
- }
5011
- },
5012
- async flush() {
5013
- if (callbacks.onCompletion) {
5014
- await callbacks.onCompletion(aggregatedResponse);
5015
- }
5016
- }
5017
- });
5018
- }
5019
- function trimStartOfStreamHelper() {
5020
- let isStreamStart = true;
5021
- return (text) => {
5022
- if (isStreamStart) {
5023
- text = text.trimStart();
5024
- if (text)
5025
- isStreamStart = false;
5026
- }
5027
- return text;
5028
- };
5029
- }
5030
- function AIStream(response, customParser, callbacks) {
5031
- if (!response.ok) {
5032
- if (response.body) {
5033
- const reader = response.body.getReader();
5034
- return new ReadableStream({
5035
- async start(controller) {
5036
- const { done, value } = await reader.read();
5037
- if (!done) {
5038
- const errorText = new TextDecoder().decode(value);
5039
- controller.error(new Error(`Response error: ${errorText}`));
5040
- }
5041
- }
5042
- });
5043
- } else {
5044
- return new ReadableStream({
5045
- start(controller) {
5046
- controller.error(new Error("Response error: No response body"));
5047
- }
5048
- });
5049
- }
5050
- }
5051
- const responseBodyStream = response.body || createEmptyReadableStream();
5052
- return responseBodyStream.pipeThrough(createEventStreamTransformer(customParser)).pipeThrough(createCallbacksTransformer(callbacks));
5053
- }
5054
- function createEmptyReadableStream() {
5055
- return new ReadableStream({
5056
- start(controller) {
5057
- controller.close();
5058
- }
5059
- });
5060
- }
5061
- function readableFromAsyncIterable(iterable) {
5062
- let it = iterable[Symbol.asyncIterator]();
5063
- return new ReadableStream({
5064
- async pull(controller) {
5065
- const { done, value } = await it.next();
5066
- if (done)
5067
- controller.close();
5068
- else
5069
- controller.enqueue(value);
5070
- },
5071
- async cancel(reason) {
5072
- var _a11;
5073
- await ((_a11 = it.return) == null ? void 0 : _a11.call(it, reason));
5074
- }
5075
- });
5076
- }
5077
-
5078
4914
  // streams/assistant-response.ts
5079
- var import_ui_utils7 = require("@ai-sdk/ui-utils");
4915
+ var import_ui_utils8 = require("@ai-sdk/ui-utils");
5080
4916
  function AssistantResponse({ threadId, messageId }, process2) {
5081
4917
  const stream = new ReadableStream({
5082
4918
  async start(controller) {
@@ -5084,17 +4920,17 @@ function AssistantResponse({ threadId, messageId }, process2) {
5084
4920
  const textEncoder = new TextEncoder();
5085
4921
  const sendMessage = (message) => {
5086
4922
  controller.enqueue(
5087
- textEncoder.encode((0, import_ui_utils7.formatStreamPart)("assistant_message", message))
4923
+ textEncoder.encode((0, import_ui_utils8.formatStreamPart)("assistant_message", message))
5088
4924
  );
5089
4925
  };
5090
4926
  const sendDataMessage = (message) => {
5091
4927
  controller.enqueue(
5092
- textEncoder.encode((0, import_ui_utils7.formatStreamPart)("data_message", message))
4928
+ textEncoder.encode((0, import_ui_utils8.formatStreamPart)("data_message", message))
5093
4929
  );
5094
4930
  };
5095
4931
  const sendError = (errorMessage) => {
5096
4932
  controller.enqueue(
5097
- textEncoder.encode((0, import_ui_utils7.formatStreamPart)("error", errorMessage))
4933
+ textEncoder.encode((0, import_ui_utils8.formatStreamPart)("error", errorMessage))
5098
4934
  );
5099
4935
  };
5100
4936
  const forwardStream = async (stream2) => {
@@ -5105,7 +4941,7 @@ function AssistantResponse({ threadId, messageId }, process2) {
5105
4941
  case "thread.message.created": {
5106
4942
  controller.enqueue(
5107
4943
  textEncoder.encode(
5108
- (0, import_ui_utils7.formatStreamPart)("assistant_message", {
4944
+ (0, import_ui_utils8.formatStreamPart)("assistant_message", {
5109
4945
  id: value.data.id,
5110
4946
  role: "assistant",
5111
4947
  content: [{ type: "text", text: { value: "" } }]
@@ -5119,7 +4955,7 @@ function AssistantResponse({ threadId, messageId }, process2) {
5119
4955
  if ((content == null ? void 0 : content.type) === "text" && ((_b = content.text) == null ? void 0 : _b.value) != null) {
5120
4956
  controller.enqueue(
5121
4957
  textEncoder.encode(
5122
- (0, import_ui_utils7.formatStreamPart)("text", content.text.value)
4958
+ (0, import_ui_utils8.formatStreamPart)("text", content.text.value)
5123
4959
  )
5124
4960
  );
5125
4961
  }
@@ -5136,7 +4972,7 @@ function AssistantResponse({ threadId, messageId }, process2) {
5136
4972
  };
5137
4973
  controller.enqueue(
5138
4974
  textEncoder.encode(
5139
- (0, import_ui_utils7.formatStreamPart)("assistant_control_data", {
4975
+ (0, import_ui_utils8.formatStreamPart)("assistant_control_data", {
5140
4976
  threadId,
5141
4977
  messageId
5142
4978
  })
@@ -5178,14 +5014,40 @@ __export(langchain_adapter_exports, {
5178
5014
  toDataStreamResponse: () => toDataStreamResponse
5179
5015
  });
5180
5016
 
5017
+ // streams/stream-callbacks.ts
5018
+ function createCallbacksTransformer(callbacks = {}) {
5019
+ const textEncoder = new TextEncoder();
5020
+ let aggregatedResponse = "";
5021
+ return new TransformStream({
5022
+ async start() {
5023
+ if (callbacks.onStart)
5024
+ await callbacks.onStart();
5025
+ },
5026
+ async transform(message, controller) {
5027
+ controller.enqueue(textEncoder.encode(message));
5028
+ aggregatedResponse += message;
5029
+ if (callbacks.onToken)
5030
+ await callbacks.onToken(message);
5031
+ if (callbacks.onText && typeof message === "string") {
5032
+ await callbacks.onText(message);
5033
+ }
5034
+ },
5035
+ async flush() {
5036
+ if (callbacks.onCompletion) {
5037
+ await callbacks.onCompletion(aggregatedResponse);
5038
+ }
5039
+ }
5040
+ });
5041
+ }
5042
+
5181
5043
  // streams/stream-data.ts
5182
- var import_ui_utils8 = require("@ai-sdk/ui-utils");
5044
+ var import_ui_utils9 = require("@ai-sdk/ui-utils");
5183
5045
 
5184
5046
  // util/constants.ts
5185
5047
  var HANGING_STREAM_WARNING_TIME_MS = 15 * 1e3;
5186
5048
 
5187
5049
  // streams/stream-data.ts
5188
- var StreamData2 = class {
5050
+ var StreamData = class {
5189
5051
  constructor() {
5190
5052
  this.encoder = new TextEncoder();
5191
5053
  this.controller = null;
@@ -5231,7 +5093,7 @@ var StreamData2 = class {
5231
5093
  throw new Error("Stream controller is not initialized.");
5232
5094
  }
5233
5095
  this.controller.enqueue(
5234
- this.encoder.encode((0, import_ui_utils8.formatStreamPart)("data", [value]))
5096
+ this.encoder.encode((0, import_ui_utils9.formatStreamPart)("data", [value]))
5235
5097
  );
5236
5098
  }
5237
5099
  appendMessageAnnotation(value) {
@@ -5242,7 +5104,7 @@ var StreamData2 = class {
5242
5104
  throw new Error("Stream controller is not initialized.");
5243
5105
  }
5244
5106
  this.controller.enqueue(
5245
- this.encoder.encode((0, import_ui_utils8.formatStreamPart)("message_annotations", [value]))
5107
+ this.encoder.encode((0, import_ui_utils9.formatStreamPart)("message_annotations", [value]))
5246
5108
  );
5247
5109
  }
5248
5110
  };
@@ -5252,11 +5114,11 @@ function createStreamDataTransformer() {
5252
5114
  return new TransformStream({
5253
5115
  transform: async (chunk, controller) => {
5254
5116
  const message = decoder.decode(chunk);
5255
- controller.enqueue(encoder.encode((0, import_ui_utils8.formatStreamPart)("text", message)));
5117
+ controller.enqueue(encoder.encode((0, import_ui_utils9.formatStreamPart)("text", message)));
5256
5118
  }
5257
5119
  });
5258
5120
  }
5259
- var experimental_StreamData = class extends StreamData2 {
5121
+ var experimental_StreamData = class extends StreamData {
5260
5122
  };
5261
5123
 
5262
5124
  // streams/langchain-adapter.ts
@@ -5320,8 +5182,16 @@ __export(llamaindex_adapter_exports, {
5320
5182
  toDataStream: () => toDataStream2,
5321
5183
  toDataStreamResponse: () => toDataStreamResponse2
5322
5184
  });
5185
+ var import_provider_utils10 = require("@ai-sdk/provider-utils");
5323
5186
  function toDataStream2(stream, callbacks) {
5324
- return toReadableStream(stream).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(createStreamDataTransformer());
5187
+ const trimStart = trimStartOfStream();
5188
+ return (0, import_provider_utils10.convertAsyncIteratorToReadableStream)(stream[Symbol.asyncIterator]()).pipeThrough(
5189
+ new TransformStream({
5190
+ async transform(message, controller) {
5191
+ controller.enqueue(trimStart(message.delta));
5192
+ }
5193
+ })
5194
+ ).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(createStreamDataTransformer());
5325
5195
  }
5326
5196
  function toDataStreamResponse2(stream, options = {}) {
5327
5197
  var _a11;
@@ -5337,23 +5207,16 @@ function toDataStreamResponse2(stream, options = {}) {
5337
5207
  })
5338
5208
  });
5339
5209
  }
5340
- function toReadableStream(res) {
5341
- const it = res[Symbol.asyncIterator]();
5342
- const trimStartOfStream = trimStartOfStreamHelper();
5343
- return new ReadableStream({
5344
- async pull(controller) {
5345
- var _a11;
5346
- const { value, done } = await it.next();
5347
- if (done) {
5348
- controller.close();
5349
- return;
5350
- }
5351
- const text = trimStartOfStream((_a11 = value.delta) != null ? _a11 : "");
5352
- if (text) {
5353
- controller.enqueue(text);
5354
- }
5210
+ function trimStartOfStream() {
5211
+ let isStreamStart = true;
5212
+ return (text) => {
5213
+ if (isStreamStart) {
5214
+ text = text.trimStart();
5215
+ if (text)
5216
+ isStreamStart = false;
5355
5217
  }
5356
- });
5218
+ return text;
5219
+ };
5357
5220
  }
5358
5221
 
5359
5222
  // streams/stream-to-response.ts
@@ -5397,13 +5260,9 @@ var StreamingTextResponse = class extends Response {
5397
5260
  });
5398
5261
  }
5399
5262
  };
5400
-
5401
- // streams/index.ts
5402
- var generateId2 = import_provider_utils10.generateId;
5403
5263
  // Annotate the CommonJS export names for ESM import in node:
5404
5264
  0 && (module.exports = {
5405
5265
  AISDKError,
5406
- AIStream,
5407
5266
  APICallError,
5408
5267
  AssistantResponse,
5409
5268
  DownloadError,
@@ -5431,8 +5290,6 @@ var generateId2 = import_provider_utils10.generateId;
5431
5290
  UnsupportedFunctionalityError,
5432
5291
  convertToCoreMessages,
5433
5292
  cosineSimilarity,
5434
- createCallbacksTransformer,
5435
- createEventStreamTransformer,
5436
5293
  createStreamDataTransformer,
5437
5294
  embed,
5438
5295
  embedMany,
@@ -5441,10 +5298,6 @@ var generateId2 = import_provider_utils10.generateId;
5441
5298
  experimental_createModelRegistry,
5442
5299
  experimental_createProviderRegistry,
5443
5300
  experimental_customProvider,
5444
- experimental_generateObject,
5445
- experimental_generateText,
5446
- experimental_streamObject,
5447
- experimental_streamText,
5448
5301
  experimental_wrapLanguageModel,
5449
5302
  formatStreamPart,
5450
5303
  generateId,
@@ -5454,11 +5307,9 @@ var generateId2 = import_provider_utils10.generateId;
5454
5307
  parseStreamPart,
5455
5308
  processDataProtocolResponse,
5456
5309
  readDataStream,
5457
- readableFromAsyncIterable,
5458
5310
  streamObject,
5459
5311
  streamText,
5460
5312
  streamToResponse,
5461
- tool,
5462
- trimStartOfStreamHelper
5313
+ tool
5463
5314
  });
5464
5315
  //# sourceMappingURL=index.js.map