ai 3.4.32 → 4.0.0-canary.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -3333,7 +3333,7 @@ async function generateText({
3333
3333
  }),
3334
3334
  tracer,
3335
3335
  fn: async (span) => {
3336
- var _a11, _b, _c, _d, _e, _f, _g;
3336
+ var _a11, _b, _c, _d, _e, _f;
3337
3337
  const retry = retryWithExponentialBackoff({ maxRetries });
3338
3338
  const mode = {
3339
3339
  type: "regular",
@@ -3487,12 +3487,15 @@ async function generateText({
3487
3487
  nextStepType = "tool-result";
3488
3488
  }
3489
3489
  }
3490
- const stepText = nextStepType === "continue" ? removeTextAfterLastWhitespace((_b = currentModelResponse.text) != null ? _b : "") : (_c = currentModelResponse.text) != null ? _c : "";
3490
+ const originalText = (_b = currentModelResponse.text) != null ? _b : "";
3491
+ const stepTextLeadingWhitespaceTrimmed = stepType === "continue" && // only for continue steps
3492
+ text.trimEnd() !== text ? originalText.trimStart() : originalText;
3493
+ const stepText = nextStepType === "continue" ? removeTextAfterLastWhitespace(stepTextLeadingWhitespaceTrimmed) : stepTextLeadingWhitespaceTrimmed;
3491
3494
  text = nextStepType === "continue" || stepType === "continue" ? text + stepText : stepText;
3492
3495
  if (stepType === "continue") {
3493
3496
  const lastMessage = responseMessages[responseMessages.length - 1];
3494
3497
  if (typeof lastMessage.content === "string") {
3495
- lastMessage.content = text;
3498
+ lastMessage.content += stepText;
3496
3499
  } else {
3497
3500
  lastMessage.content.push({
3498
3501
  text: stepText,
@@ -3518,10 +3521,10 @@ async function generateText({
3518
3521
  usage: currentUsage,
3519
3522
  warnings: currentModelResponse.warnings,
3520
3523
  logprobs: currentModelResponse.logprobs,
3521
- request: (_d = currentModelResponse.request) != null ? _d : {},
3524
+ request: (_c = currentModelResponse.request) != null ? _c : {},
3522
3525
  response: {
3523
3526
  ...currentModelResponse.response,
3524
- headers: (_e = currentModelResponse.rawResponse) == null ? void 0 : _e.headers,
3527
+ headers: (_d = currentModelResponse.rawResponse) == null ? void 0 : _d.headers,
3525
3528
  // deep clone msgs to avoid mutating past messages in multi-step:
3526
3529
  messages: JSON.parse(JSON.stringify(responseMessages))
3527
3530
  },
@@ -3563,10 +3566,10 @@ async function generateText({
3563
3566
  finishReason: currentModelResponse.finishReason,
3564
3567
  usage,
3565
3568
  warnings: currentModelResponse.warnings,
3566
- request: (_f = currentModelResponse.request) != null ? _f : {},
3569
+ request: (_e = currentModelResponse.request) != null ? _e : {},
3567
3570
  response: {
3568
3571
  ...currentModelResponse.response,
3569
- headers: (_g = currentModelResponse.rawResponse) == null ? void 0 : _g.headers,
3572
+ headers: (_f = currentModelResponse.rawResponse) == null ? void 0 : _f.headers,
3570
3573
  messages: responseMessages
3571
3574
  },
3572
3575
  logprobs: currentModelResponse.logprobs,
@@ -4266,7 +4269,8 @@ var DefaultStreamTextResult = class {
4266
4269
  },
4267
4270
  stepType,
4268
4271
  previousStepText = "",
4269
- stepRequest
4272
+ stepRequest,
4273
+ hasLeadingWhitespace
4270
4274
  }) {
4271
4275
  const stepToolCalls = [];
4272
4276
  const stepToolResults = [];
@@ -4288,6 +4292,8 @@ var DefaultStreamTextResult = class {
4288
4292
  };
4289
4293
  let chunkBuffer = "";
4290
4294
  let chunkTextPublished = false;
4295
+ let inWhitespacePrefix = true;
4296
+ let hasWhitespaceSuffix = false;
4291
4297
  async function publishTextChunk({
4292
4298
  controller,
4293
4299
  chunk
@@ -4296,6 +4302,7 @@ var DefaultStreamTextResult = class {
4296
4302
  stepText += chunk.textDelta;
4297
4303
  fullStepText += chunk.textDelta;
4298
4304
  chunkTextPublished = true;
4305
+ hasWhitespaceSuffix = chunk.textDelta.trimEnd() !== chunk.textDelta;
4299
4306
  await (onChunk == null ? void 0 : onChunk({ chunk }));
4300
4307
  }
4301
4308
  addStream(
@@ -4324,7 +4331,12 @@ var DefaultStreamTextResult = class {
4324
4331
  switch (chunkType) {
4325
4332
  case "text-delta": {
4326
4333
  if (continueSteps) {
4327
- chunkBuffer += chunk.textDelta;
4334
+ const trimmedChunkText = inWhitespacePrefix && hasLeadingWhitespace ? chunk.textDelta.trimStart() : chunk.textDelta;
4335
+ if (trimmedChunkText.length === 0) {
4336
+ break;
4337
+ }
4338
+ inWhitespacePrefix = false;
4339
+ chunkBuffer += trimmedChunkText;
4328
4340
  const split = splitOnLastWhitespace(chunkBuffer);
4329
4341
  if (split != null) {
4330
4342
  chunkBuffer = split.suffix;
@@ -4467,7 +4479,7 @@ var DefaultStreamTextResult = class {
4467
4479
  if (stepType === "continue") {
4468
4480
  const lastMessage = responseMessages[responseMessages.length - 1];
4469
4481
  if (typeof lastMessage.content === "string") {
4470
- lastMessage.content = stepText;
4482
+ lastMessage.content += stepText;
4471
4483
  } else {
4472
4484
  lastMessage.content.push({
4473
4485
  text: stepText,
@@ -4528,7 +4540,8 @@ var DefaultStreamTextResult = class {
4528
4540
  usage: combinedUsage,
4529
4541
  stepType: nextStepType,
4530
4542
  previousStepText: fullStepText,
4531
- stepRequest: result.request
4543
+ stepRequest: result.request,
4544
+ hasLeadingWhitespace: hasWhitespaceSuffix
4532
4545
  });
4533
4546
  return;
4534
4547
  }
@@ -4619,7 +4632,8 @@ var DefaultStreamTextResult = class {
4619
4632
  responseMessages: [],
4620
4633
  usage: void 0,
4621
4634
  stepType: "initial",
4622
- stepRequest: request
4635
+ stepRequest: request,
4636
+ hasLeadingWhitespace: false
4623
4637
  });
4624
4638
  }
4625
4639
  /**
@@ -5106,19 +5120,12 @@ function createCallbacksTransformer(cb) {
5106
5120
  }
5107
5121
  },
5108
5122
  async flush() {
5109
- const isOpenAICallbacks = isOfTypeOpenAIStreamCallbacks(callbacks);
5110
5123
  if (callbacks.onCompletion) {
5111
5124
  await callbacks.onCompletion(aggregatedResponse);
5112
5125
  }
5113
- if (callbacks.onFinal && !isOpenAICallbacks) {
5114
- await callbacks.onFinal(aggregatedResponse);
5115
- }
5116
5126
  }
5117
5127
  });
5118
5128
  }
5119
- function isOfTypeOpenAIStreamCallbacks(callbacks) {
5120
- return "experimental_onFunctionCall" in callbacks;
5121
- }
5122
5129
  function trimStartOfStreamHelper() {
5123
5130
  let isStreamStart = true;
5124
5131
  return (text) => {
@@ -5178,136 +5185,9 @@ function readableFromAsyncIterable(iterable) {
5178
5185
  });
5179
5186
  }
5180
5187
 
5181
- // streams/stream-data.ts
5182
- import { formatStreamPart as formatStreamPart2 } from "@ai-sdk/ui-utils";
5183
-
5184
- // util/constants.ts
5185
- var HANGING_STREAM_WARNING_TIME_MS = 15 * 1e3;
5186
-
5187
- // streams/stream-data.ts
5188
- var StreamData2 = class {
5189
- constructor() {
5190
- this.encoder = new TextEncoder();
5191
- this.controller = null;
5192
- this.isClosed = false;
5193
- this.warningTimeout = null;
5194
- const self = this;
5195
- this.stream = new ReadableStream({
5196
- start: async (controller) => {
5197
- self.controller = controller;
5198
- if (process.env.NODE_ENV === "development") {
5199
- self.warningTimeout = setTimeout(() => {
5200
- console.warn(
5201
- "The data stream is hanging. Did you forget to close it with `data.close()`?"
5202
- );
5203
- }, HANGING_STREAM_WARNING_TIME_MS);
5204
- }
5205
- },
5206
- pull: (controller) => {
5207
- },
5208
- cancel: (reason) => {
5209
- this.isClosed = true;
5210
- }
5211
- });
5212
- }
5213
- async close() {
5214
- if (this.isClosed) {
5215
- throw new Error("Data Stream has already been closed.");
5216
- }
5217
- if (!this.controller) {
5218
- throw new Error("Stream controller is not initialized.");
5219
- }
5220
- this.controller.close();
5221
- this.isClosed = true;
5222
- if (this.warningTimeout) {
5223
- clearTimeout(this.warningTimeout);
5224
- }
5225
- }
5226
- append(value) {
5227
- if (this.isClosed) {
5228
- throw new Error("Data Stream has already been closed.");
5229
- }
5230
- if (!this.controller) {
5231
- throw new Error("Stream controller is not initialized.");
5232
- }
5233
- this.controller.enqueue(
5234
- this.encoder.encode(formatStreamPart2("data", [value]))
5235
- );
5236
- }
5237
- appendMessageAnnotation(value) {
5238
- if (this.isClosed) {
5239
- throw new Error("Data Stream has already been closed.");
5240
- }
5241
- if (!this.controller) {
5242
- throw new Error("Stream controller is not initialized.");
5243
- }
5244
- this.controller.enqueue(
5245
- this.encoder.encode(formatStreamPart2("message_annotations", [value]))
5246
- );
5247
- }
5248
- };
5249
- function createStreamDataTransformer() {
5250
- const encoder = new TextEncoder();
5251
- const decoder = new TextDecoder();
5252
- return new TransformStream({
5253
- transform: async (chunk, controller) => {
5254
- const message = decoder.decode(chunk);
5255
- controller.enqueue(encoder.encode(formatStreamPart2("text", message)));
5256
- }
5257
- });
5258
- }
5259
- var experimental_StreamData = class extends StreamData2 {
5260
- };
5261
-
5262
- // streams/anthropic-stream.ts
5263
- function parseAnthropicStream() {
5264
- let previous = "";
5265
- return (data) => {
5266
- const json = JSON.parse(data);
5267
- if ("error" in json) {
5268
- throw new Error(`${json.error.type}: ${json.error.message}`);
5269
- }
5270
- if (!("completion" in json)) {
5271
- return;
5272
- }
5273
- const text = json.completion;
5274
- if (!previous || text.length > previous.length && text.startsWith(previous)) {
5275
- const delta = text.slice(previous.length);
5276
- previous = text;
5277
- return delta;
5278
- }
5279
- return text;
5280
- };
5281
- }
5282
- async function* streamable(stream) {
5283
- for await (const chunk of stream) {
5284
- if ("completion" in chunk) {
5285
- const text = chunk.completion;
5286
- if (text)
5287
- yield text;
5288
- } else if ("delta" in chunk) {
5289
- const { delta } = chunk;
5290
- if ("text" in delta) {
5291
- const text = delta.text;
5292
- if (text)
5293
- yield text;
5294
- }
5295
- }
5296
- }
5297
- }
5298
- function AnthropicStream(res, cb) {
5299
- if (Symbol.asyncIterator in res) {
5300
- return readableFromAsyncIterable(streamable(res)).pipeThrough(createCallbacksTransformer(cb)).pipeThrough(createStreamDataTransformer());
5301
- } else {
5302
- return AIStream(res, parseAnthropicStream(), cb).pipeThrough(
5303
- createStreamDataTransformer()
5304
- );
5305
- }
5306
- }
5307
-
5308
5188
  // streams/assistant-response.ts
5309
5189
  import {
5310
- formatStreamPart as formatStreamPart3
5190
+ formatStreamPart as formatStreamPart2
5311
5191
  } from "@ai-sdk/ui-utils";
5312
5192
  function AssistantResponse({ threadId, messageId }, process2) {
5313
5193
  const stream = new ReadableStream({
@@ -5316,17 +5196,17 @@ function AssistantResponse({ threadId, messageId }, process2) {
5316
5196
  const textEncoder = new TextEncoder();
5317
5197
  const sendMessage = (message) => {
5318
5198
  controller.enqueue(
5319
- textEncoder.encode(formatStreamPart3("assistant_message", message))
5199
+ textEncoder.encode(formatStreamPart2("assistant_message", message))
5320
5200
  );
5321
5201
  };
5322
5202
  const sendDataMessage = (message) => {
5323
5203
  controller.enqueue(
5324
- textEncoder.encode(formatStreamPart3("data_message", message))
5204
+ textEncoder.encode(formatStreamPart2("data_message", message))
5325
5205
  );
5326
5206
  };
5327
5207
  const sendError = (errorMessage) => {
5328
5208
  controller.enqueue(
5329
- textEncoder.encode(formatStreamPart3("error", errorMessage))
5209
+ textEncoder.encode(formatStreamPart2("error", errorMessage))
5330
5210
  );
5331
5211
  };
5332
5212
  const forwardStream = async (stream2) => {
@@ -5337,7 +5217,7 @@ function AssistantResponse({ threadId, messageId }, process2) {
5337
5217
  case "thread.message.created": {
5338
5218
  controller.enqueue(
5339
5219
  textEncoder.encode(
5340
- formatStreamPart3("assistant_message", {
5220
+ formatStreamPart2("assistant_message", {
5341
5221
  id: value.data.id,
5342
5222
  role: "assistant",
5343
5223
  content: [{ type: "text", text: { value: "" } }]
@@ -5351,7 +5231,7 @@ function AssistantResponse({ threadId, messageId }, process2) {
5351
5231
  if ((content == null ? void 0 : content.type) === "text" && ((_b = content.text) == null ? void 0 : _b.value) != null) {
5352
5232
  controller.enqueue(
5353
5233
  textEncoder.encode(
5354
- formatStreamPart3("text", content.text.value)
5234
+ formatStreamPart2("text", content.text.value)
5355
5235
  )
5356
5236
  );
5357
5237
  }
@@ -5368,7 +5248,7 @@ function AssistantResponse({ threadId, messageId }, process2) {
5368
5248
  };
5369
5249
  controller.enqueue(
5370
5250
  textEncoder.encode(
5371
- formatStreamPart3("assistant_control_data", {
5251
+ formatStreamPart2("assistant_control_data", {
5372
5252
  threadId,
5373
5253
  messageId
5374
5254
  })
@@ -5402,192 +5282,96 @@ function AssistantResponse({ threadId, messageId }, process2) {
5402
5282
  }
5403
5283
  var experimental_AssistantResponse = AssistantResponse;
5404
5284
 
5405
- // streams/aws-bedrock-stream.ts
5406
- async function* asDeltaIterable(response, extractTextDeltaFromChunk) {
5407
- var _a11, _b;
5408
- const decoder = new TextDecoder();
5409
- for await (const chunk of (_a11 = response.body) != null ? _a11 : []) {
5410
- const bytes = (_b = chunk.chunk) == null ? void 0 : _b.bytes;
5411
- if (bytes != null) {
5412
- const chunkText = decoder.decode(bytes);
5413
- const chunkJSON = JSON.parse(chunkText);
5414
- const delta = extractTextDeltaFromChunk(chunkJSON);
5415
- if (delta != null) {
5416
- yield delta;
5285
+ // streams/langchain-adapter.ts
5286
+ var langchain_adapter_exports = {};
5287
+ __export(langchain_adapter_exports, {
5288
+ toAIStream: () => toAIStream,
5289
+ toDataStream: () => toDataStream,
5290
+ toDataStreamResponse: () => toDataStreamResponse
5291
+ });
5292
+
5293
+ // streams/stream-data.ts
5294
+ import { formatStreamPart as formatStreamPart3 } from "@ai-sdk/ui-utils";
5295
+
5296
+ // util/constants.ts
5297
+ var HANGING_STREAM_WARNING_TIME_MS = 15 * 1e3;
5298
+
5299
+ // streams/stream-data.ts
5300
+ var StreamData2 = class {
5301
+ constructor() {
5302
+ this.encoder = new TextEncoder();
5303
+ this.controller = null;
5304
+ this.isClosed = false;
5305
+ this.warningTimeout = null;
5306
+ const self = this;
5307
+ this.stream = new ReadableStream({
5308
+ start: async (controller) => {
5309
+ self.controller = controller;
5310
+ if (process.env.NODE_ENV === "development") {
5311
+ self.warningTimeout = setTimeout(() => {
5312
+ console.warn(
5313
+ "The data stream is hanging. Did you forget to close it with `data.close()`?"
5314
+ );
5315
+ }, HANGING_STREAM_WARNING_TIME_MS);
5316
+ }
5317
+ },
5318
+ pull: (controller) => {
5319
+ },
5320
+ cancel: (reason) => {
5321
+ this.isClosed = true;
5417
5322
  }
5418
- }
5323
+ });
5419
5324
  }
5420
- }
5421
- function AWSBedrockAnthropicMessagesStream(response, callbacks) {
5422
- return AWSBedrockStream(response, callbacks, (chunk) => {
5423
- var _a11;
5424
- return (_a11 = chunk.delta) == null ? void 0 : _a11.text;
5425
- });
5426
- }
5427
- function AWSBedrockAnthropicStream(response, callbacks) {
5428
- return AWSBedrockStream(response, callbacks, (chunk) => chunk.completion);
5429
- }
5430
- function AWSBedrockCohereStream(response, callbacks) {
5431
- return AWSBedrockStream(response, callbacks, (chunk) => chunk == null ? void 0 : chunk.text);
5432
- }
5433
- function AWSBedrockLlama2Stream(response, callbacks) {
5434
- return AWSBedrockStream(response, callbacks, (chunk) => chunk.generation);
5435
- }
5436
- function AWSBedrockStream(response, callbacks, extractTextDeltaFromChunk) {
5437
- return readableFromAsyncIterable(
5438
- asDeltaIterable(response, extractTextDeltaFromChunk)
5439
- ).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(createStreamDataTransformer());
5440
- }
5441
-
5442
- // streams/cohere-stream.ts
5443
- var utf8Decoder = new TextDecoder("utf-8");
5444
- async function processLines(lines, controller) {
5445
- for (const line of lines) {
5446
- const { text, is_finished } = JSON.parse(line);
5447
- if (!is_finished) {
5448
- controller.enqueue(text);
5325
+ async close() {
5326
+ if (this.isClosed) {
5327
+ throw new Error("Data Stream has already been closed.");
5449
5328
  }
5450
- }
5451
- }
5452
- async function readAndProcessLines(reader, controller) {
5453
- let segment = "";
5454
- while (true) {
5455
- const { value: chunk, done } = await reader.read();
5456
- if (done) {
5457
- break;
5329
+ if (!this.controller) {
5330
+ throw new Error("Stream controller is not initialized.");
5331
+ }
5332
+ this.controller.close();
5333
+ this.isClosed = true;
5334
+ if (this.warningTimeout) {
5335
+ clearTimeout(this.warningTimeout);
5458
5336
  }
5459
- segment += utf8Decoder.decode(chunk, { stream: true });
5460
- const linesArray = segment.split(/\r\n|\n|\r/g);
5461
- segment = linesArray.pop() || "";
5462
- await processLines(linesArray, controller);
5463
- }
5464
- if (segment) {
5465
- const linesArray = [segment];
5466
- await processLines(linesArray, controller);
5467
5337
  }
5468
- controller.close();
5469
- }
5470
- function createParser2(res) {
5471
- var _a11;
5472
- const reader = (_a11 = res.body) == null ? void 0 : _a11.getReader();
5473
- return new ReadableStream({
5474
- async start(controller) {
5475
- if (!reader) {
5476
- controller.close();
5477
- return;
5478
- }
5479
- await readAndProcessLines(reader, controller);
5338
+ append(value) {
5339
+ if (this.isClosed) {
5340
+ throw new Error("Data Stream has already been closed.");
5480
5341
  }
5481
- });
5482
- }
5483
- async function* streamable2(stream) {
5484
- for await (const chunk of stream) {
5485
- if (chunk.eventType === "text-generation") {
5486
- const text = chunk.text;
5487
- if (text)
5488
- yield text;
5342
+ if (!this.controller) {
5343
+ throw new Error("Stream controller is not initialized.");
5489
5344
  }
5345
+ this.controller.enqueue(
5346
+ this.encoder.encode(formatStreamPart3("data", [value]))
5347
+ );
5490
5348
  }
5491
- }
5492
- function CohereStream(reader, callbacks) {
5493
- if (Symbol.asyncIterator in reader) {
5494
- return readableFromAsyncIterable(streamable2(reader)).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(createStreamDataTransformer());
5495
- } else {
5496
- return createParser2(reader).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(createStreamDataTransformer());
5497
- }
5498
- }
5499
-
5500
- // streams/google-generative-ai-stream.ts
5501
- async function* streamable3(response) {
5502
- var _a11, _b, _c;
5503
- for await (const chunk of response.stream) {
5504
- const parts = (_c = (_b = (_a11 = chunk.candidates) == null ? void 0 : _a11[0]) == null ? void 0 : _b.content) == null ? void 0 : _c.parts;
5505
- if (parts === void 0) {
5506
- continue;
5349
+ appendMessageAnnotation(value) {
5350
+ if (this.isClosed) {
5351
+ throw new Error("Data Stream has already been closed.");
5507
5352
  }
5508
- const firstPart = parts[0];
5509
- if (typeof firstPart.text === "string") {
5510
- yield firstPart.text;
5353
+ if (!this.controller) {
5354
+ throw new Error("Stream controller is not initialized.");
5511
5355
  }
5356
+ this.controller.enqueue(
5357
+ this.encoder.encode(formatStreamPart3("message_annotations", [value]))
5358
+ );
5512
5359
  }
5513
- }
5514
- function GoogleGenerativeAIStream(response, cb) {
5515
- return readableFromAsyncIterable(streamable3(response)).pipeThrough(createCallbacksTransformer(cb)).pipeThrough(createStreamDataTransformer());
5516
- }
5517
-
5518
- // streams/huggingface-stream.ts
5519
- function createParser3(res) {
5520
- const trimStartOfStream = trimStartOfStreamHelper();
5521
- return new ReadableStream({
5522
- async pull(controller) {
5523
- var _a11, _b;
5524
- const { value, done } = await res.next();
5525
- if (done) {
5526
- controller.close();
5527
- return;
5528
- }
5529
- const text = trimStartOfStream((_b = (_a11 = value.token) == null ? void 0 : _a11.text) != null ? _b : "");
5530
- if (!text)
5531
- return;
5532
- if (value.generated_text != null && value.generated_text.length > 0) {
5533
- return;
5534
- }
5535
- if (text === "</s>" || text === "<|endoftext|>" || text === "<|end|>") {
5536
- return;
5537
- }
5538
- controller.enqueue(text);
5360
+ };
5361
+ function createStreamDataTransformer() {
5362
+ const encoder = new TextEncoder();
5363
+ const decoder = new TextDecoder();
5364
+ return new TransformStream({
5365
+ transform: async (chunk, controller) => {
5366
+ const message = decoder.decode(chunk);
5367
+ controller.enqueue(encoder.encode(formatStreamPart3("text", message)));
5539
5368
  }
5540
5369
  });
5541
5370
  }
5542
- function HuggingFaceStream(res, callbacks) {
5543
- return createParser3(res).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(createStreamDataTransformer());
5544
- }
5545
-
5546
- // streams/inkeep-stream.ts
5547
- function InkeepStream(res, callbacks) {
5548
- if (!res.body) {
5549
- throw new Error("Response body is null");
5550
- }
5551
- let chat_session_id = "";
5552
- let records_cited;
5553
- const inkeepEventParser = (data, options) => {
5554
- var _a11, _b;
5555
- const { event } = options;
5556
- if (event === "records_cited") {
5557
- records_cited = JSON.parse(data);
5558
- (_a11 = callbacks == null ? void 0 : callbacks.onRecordsCited) == null ? void 0 : _a11.call(callbacks, records_cited);
5559
- }
5560
- if (event === "message_chunk") {
5561
- const inkeepMessageChunk = JSON.parse(data);
5562
- chat_session_id = (_b = inkeepMessageChunk.chat_session_id) != null ? _b : chat_session_id;
5563
- return inkeepMessageChunk.content_chunk;
5564
- }
5565
- return;
5566
- };
5567
- let { onRecordsCited, ...passThroughCallbacks } = callbacks || {};
5568
- passThroughCallbacks = {
5569
- ...passThroughCallbacks,
5570
- onFinal: (completion) => {
5571
- var _a11;
5572
- const inkeepOnFinalMetadata = {
5573
- chat_session_id,
5574
- records_cited
5575
- };
5576
- (_a11 = callbacks == null ? void 0 : callbacks.onFinal) == null ? void 0 : _a11.call(callbacks, completion, inkeepOnFinalMetadata);
5577
- }
5578
- };
5579
- return AIStream(res, inkeepEventParser, passThroughCallbacks).pipeThrough(
5580
- createStreamDataTransformer()
5581
- );
5582
- }
5371
+ var experimental_StreamData = class extends StreamData2 {
5372
+ };
5583
5373
 
5584
5374
  // streams/langchain-adapter.ts
5585
- var langchain_adapter_exports = {};
5586
- __export(langchain_adapter_exports, {
5587
- toAIStream: () => toAIStream,
5588
- toDataStream: () => toDataStream,
5589
- toDataStreamResponse: () => toDataStreamResponse
5590
- });
5591
5375
  function toAIStream(stream, callbacks) {
5592
5376
  return toDataStream(stream, callbacks);
5593
5377
  }
@@ -5684,425 +5468,6 @@ function toReadableStream(res) {
5684
5468
  });
5685
5469
  }
5686
5470
 
5687
- // streams/langchain-stream.ts
5688
- function LangChainStream(callbacks) {
5689
- const stream = new TransformStream();
5690
- const writer = stream.writable.getWriter();
5691
- const runs = /* @__PURE__ */ new Set();
5692
- const handleError = async (e, runId) => {
5693
- runs.delete(runId);
5694
- await writer.ready;
5695
- await writer.abort(e);
5696
- };
5697
- const handleStart = async (runId) => {
5698
- runs.add(runId);
5699
- };
5700
- const handleEnd = async (runId) => {
5701
- runs.delete(runId);
5702
- if (runs.size === 0) {
5703
- await writer.ready;
5704
- await writer.close();
5705
- }
5706
- };
5707
- return {
5708
- stream: stream.readable.pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(createStreamDataTransformer()),
5709
- writer,
5710
- handlers: {
5711
- handleLLMNewToken: async (token) => {
5712
- await writer.ready;
5713
- await writer.write(token);
5714
- },
5715
- handleLLMStart: async (_llm, _prompts, runId) => {
5716
- handleStart(runId);
5717
- },
5718
- handleLLMEnd: async (_output, runId) => {
5719
- await handleEnd(runId);
5720
- },
5721
- handleLLMError: async (e, runId) => {
5722
- await handleError(e, runId);
5723
- },
5724
- handleChainStart: async (_chain, _inputs, runId) => {
5725
- handleStart(runId);
5726
- },
5727
- handleChainEnd: async (_outputs, runId) => {
5728
- await handleEnd(runId);
5729
- },
5730
- handleChainError: async (e, runId) => {
5731
- await handleError(e, runId);
5732
- },
5733
- handleToolStart: async (_tool, _input, runId) => {
5734
- handleStart(runId);
5735
- },
5736
- handleToolEnd: async (_output, runId) => {
5737
- await handleEnd(runId);
5738
- },
5739
- handleToolError: async (e, runId) => {
5740
- await handleError(e, runId);
5741
- }
5742
- }
5743
- };
5744
- }
5745
-
5746
- // streams/mistral-stream.ts
5747
- async function* streamable4(stream) {
5748
- var _a11, _b;
5749
- for await (const chunk of stream) {
5750
- const content = (_b = (_a11 = chunk.choices[0]) == null ? void 0 : _a11.delta) == null ? void 0 : _b.content;
5751
- if (content === void 0 || content === "") {
5752
- continue;
5753
- }
5754
- yield content;
5755
- }
5756
- }
5757
- function MistralStream(response, callbacks) {
5758
- const stream = readableFromAsyncIterable(streamable4(response));
5759
- return stream.pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(createStreamDataTransformer());
5760
- }
5761
-
5762
- // streams/openai-stream.ts
5763
- import {
5764
- createChunkDecoder,
5765
- formatStreamPart as formatStreamPart4
5766
- } from "@ai-sdk/ui-utils";
5767
- function parseOpenAIStream() {
5768
- const extract = chunkToText();
5769
- return (data) => extract(JSON.parse(data));
5770
- }
5771
- async function* streamable5(stream) {
5772
- const extract = chunkToText();
5773
- for await (let chunk of stream) {
5774
- if ("promptFilterResults" in chunk) {
5775
- chunk = {
5776
- id: chunk.id,
5777
- created: chunk.created.getDate(),
5778
- object: chunk.object,
5779
- // not exposed by Azure API
5780
- model: chunk.model,
5781
- // not exposed by Azure API
5782
- choices: chunk.choices.map((choice) => {
5783
- var _a11, _b, _c, _d, _e, _f, _g;
5784
- return {
5785
- delta: {
5786
- content: (_a11 = choice.delta) == null ? void 0 : _a11.content,
5787
- function_call: (_b = choice.delta) == null ? void 0 : _b.functionCall,
5788
- role: (_c = choice.delta) == null ? void 0 : _c.role,
5789
- tool_calls: ((_e = (_d = choice.delta) == null ? void 0 : _d.toolCalls) == null ? void 0 : _e.length) ? (_g = (_f = choice.delta) == null ? void 0 : _f.toolCalls) == null ? void 0 : _g.map((toolCall, index) => ({
5790
- index,
5791
- id: toolCall.id,
5792
- function: toolCall.function,
5793
- type: toolCall.type
5794
- })) : void 0
5795
- },
5796
- finish_reason: choice.finishReason,
5797
- index: choice.index
5798
- };
5799
- })
5800
- };
5801
- }
5802
- const text = extract(chunk);
5803
- if (text)
5804
- yield text;
5805
- }
5806
- }
5807
- function chunkToText() {
5808
- const trimStartOfStream = trimStartOfStreamHelper();
5809
- let isFunctionStreamingIn;
5810
- return (json) => {
5811
- var _a11, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r;
5812
- if (isChatCompletionChunk(json)) {
5813
- const delta = (_a11 = json.choices[0]) == null ? void 0 : _a11.delta;
5814
- if ((_b = delta.function_call) == null ? void 0 : _b.name) {
5815
- isFunctionStreamingIn = true;
5816
- return {
5817
- isText: false,
5818
- content: `{"function_call": {"name": "${delta.function_call.name}", "arguments": "`
5819
- };
5820
- } else if ((_e = (_d = (_c = delta.tool_calls) == null ? void 0 : _c[0]) == null ? void 0 : _d.function) == null ? void 0 : _e.name) {
5821
- isFunctionStreamingIn = true;
5822
- const toolCall = delta.tool_calls[0];
5823
- if (toolCall.index === 0) {
5824
- return {
5825
- isText: false,
5826
- content: `{"tool_calls":[ {"id": "${toolCall.id}", "type": "function", "function": {"name": "${(_f = toolCall.function) == null ? void 0 : _f.name}", "arguments": "`
5827
- };
5828
- } else {
5829
- return {
5830
- isText: false,
5831
- content: `"}}, {"id": "${toolCall.id}", "type": "function", "function": {"name": "${(_g = toolCall.function) == null ? void 0 : _g.name}", "arguments": "`
5832
- };
5833
- }
5834
- } else if ((_h = delta.function_call) == null ? void 0 : _h.arguments) {
5835
- return {
5836
- isText: false,
5837
- content: cleanupArguments((_i = delta.function_call) == null ? void 0 : _i.arguments)
5838
- };
5839
- } else if ((_l = (_k = (_j = delta.tool_calls) == null ? void 0 : _j[0]) == null ? void 0 : _k.function) == null ? void 0 : _l.arguments) {
5840
- return {
5841
- isText: false,
5842
- content: cleanupArguments((_o = (_n = (_m = delta.tool_calls) == null ? void 0 : _m[0]) == null ? void 0 : _n.function) == null ? void 0 : _o.arguments)
5843
- };
5844
- } else if (isFunctionStreamingIn && (((_p = json.choices[0]) == null ? void 0 : _p.finish_reason) === "function_call" || ((_q = json.choices[0]) == null ? void 0 : _q.finish_reason) === "stop")) {
5845
- isFunctionStreamingIn = false;
5846
- return {
5847
- isText: false,
5848
- content: '"}}'
5849
- };
5850
- } else if (isFunctionStreamingIn && ((_r = json.choices[0]) == null ? void 0 : _r.finish_reason) === "tool_calls") {
5851
- isFunctionStreamingIn = false;
5852
- return {
5853
- isText: false,
5854
- content: '"}}]}'
5855
- };
5856
- }
5857
- }
5858
- const text = trimStartOfStream(
5859
- isChatCompletionChunk(json) && json.choices[0].delta.content ? json.choices[0].delta.content : isCompletion(json) ? json.choices[0].text : ""
5860
- );
5861
- return text;
5862
- };
5863
- function cleanupArguments(argumentChunk) {
5864
- let escapedPartialJson = argumentChunk.replace(/\\/g, "\\\\").replace(/\//g, "\\/").replace(/"/g, '\\"').replace(/\n/g, "\\n").replace(/\r/g, "\\r").replace(/\t/g, "\\t").replace(/\f/g, "\\f");
5865
- return `${escapedPartialJson}`;
5866
- }
5867
- }
5868
- var __internal__OpenAIFnMessagesSymbol = Symbol(
5869
- "internal_openai_fn_messages"
5870
- );
5871
- function isChatCompletionChunk(data) {
5872
- return "choices" in data && data.choices && data.choices[0] && "delta" in data.choices[0];
5873
- }
5874
- function isCompletion(data) {
5875
- return "choices" in data && data.choices && data.choices[0] && "text" in data.choices[0];
5876
- }
5877
- function OpenAIStream(res, callbacks) {
5878
- const cb = callbacks;
5879
- let stream;
5880
- if (Symbol.asyncIterator in res) {
5881
- stream = readableFromAsyncIterable(streamable5(res)).pipeThrough(
5882
- createCallbacksTransformer(
5883
- (cb == null ? void 0 : cb.experimental_onFunctionCall) || (cb == null ? void 0 : cb.experimental_onToolCall) ? {
5884
- ...cb,
5885
- onFinal: void 0
5886
- } : {
5887
- ...cb
5888
- }
5889
- )
5890
- );
5891
- } else {
5892
- stream = AIStream(
5893
- res,
5894
- parseOpenAIStream(),
5895
- (cb == null ? void 0 : cb.experimental_onFunctionCall) || (cb == null ? void 0 : cb.experimental_onToolCall) ? {
5896
- ...cb,
5897
- onFinal: void 0
5898
- } : {
5899
- ...cb
5900
- }
5901
- );
5902
- }
5903
- if (cb && (cb.experimental_onFunctionCall || cb.experimental_onToolCall)) {
5904
- const functionCallTransformer = createFunctionCallTransformer(cb);
5905
- return stream.pipeThrough(functionCallTransformer);
5906
- } else {
5907
- return stream.pipeThrough(createStreamDataTransformer());
5908
- }
5909
- }
5910
- function createFunctionCallTransformer(callbacks) {
5911
- const textEncoder = new TextEncoder();
5912
- let isFirstChunk = true;
5913
- let aggregatedResponse = "";
5914
- let aggregatedFinalCompletionResponse = "";
5915
- let isFunctionStreamingIn = false;
5916
- let functionCallMessages = callbacks[__internal__OpenAIFnMessagesSymbol] || [];
5917
- const decode = createChunkDecoder();
5918
- return new TransformStream({
5919
- async transform(chunk, controller) {
5920
- const message = decode(chunk);
5921
- aggregatedFinalCompletionResponse += message;
5922
- const shouldHandleAsFunction = isFirstChunk && (message.startsWith('{"function_call":') || message.startsWith('{"tool_calls":'));
5923
- if (shouldHandleAsFunction) {
5924
- isFunctionStreamingIn = true;
5925
- aggregatedResponse += message;
5926
- isFirstChunk = false;
5927
- return;
5928
- }
5929
- if (!isFunctionStreamingIn) {
5930
- controller.enqueue(
5931
- textEncoder.encode(formatStreamPart4("text", message))
5932
- );
5933
- return;
5934
- } else {
5935
- aggregatedResponse += message;
5936
- }
5937
- },
5938
- async flush(controller) {
5939
- try {
5940
- if (!isFirstChunk && isFunctionStreamingIn && (callbacks.experimental_onFunctionCall || callbacks.experimental_onToolCall)) {
5941
- isFunctionStreamingIn = false;
5942
- const payload = JSON.parse(aggregatedResponse);
5943
- let newFunctionCallMessages = [
5944
- ...functionCallMessages
5945
- ];
5946
- let functionResponse = void 0;
5947
- if (callbacks.experimental_onFunctionCall) {
5948
- if (payload.function_call === void 0) {
5949
- console.warn(
5950
- "experimental_onFunctionCall should not be defined when using tools"
5951
- );
5952
- }
5953
- const argumentsPayload = JSON.parse(
5954
- payload.function_call.arguments
5955
- );
5956
- functionResponse = await callbacks.experimental_onFunctionCall(
5957
- {
5958
- name: payload.function_call.name,
5959
- arguments: argumentsPayload
5960
- },
5961
- (result) => {
5962
- newFunctionCallMessages = [
5963
- ...functionCallMessages,
5964
- {
5965
- role: "assistant",
5966
- content: "",
5967
- function_call: payload.function_call
5968
- },
5969
- {
5970
- role: "function",
5971
- name: payload.function_call.name,
5972
- content: JSON.stringify(result)
5973
- }
5974
- ];
5975
- return newFunctionCallMessages;
5976
- }
5977
- );
5978
- }
5979
- if (callbacks.experimental_onToolCall) {
5980
- const toolCalls = {
5981
- tools: []
5982
- };
5983
- for (const tool2 of payload.tool_calls) {
5984
- toolCalls.tools.push({
5985
- id: tool2.id,
5986
- type: "function",
5987
- func: {
5988
- name: tool2.function.name,
5989
- arguments: JSON.parse(tool2.function.arguments)
5990
- }
5991
- });
5992
- }
5993
- let responseIndex = 0;
5994
- try {
5995
- functionResponse = await callbacks.experimental_onToolCall(
5996
- toolCalls,
5997
- (result) => {
5998
- if (result) {
5999
- const { tool_call_id, function_name, tool_call_result } = result;
6000
- newFunctionCallMessages = [
6001
- ...newFunctionCallMessages,
6002
- // Only append the assistant message if it's the first response
6003
- ...responseIndex === 0 ? [
6004
- {
6005
- role: "assistant",
6006
- content: "",
6007
- tool_calls: payload.tool_calls.map(
6008
- (tc) => ({
6009
- id: tc.id,
6010
- type: "function",
6011
- function: {
6012
- name: tc.function.name,
6013
- // we send the arguments an object to the user, but as the API expects a string, we need to stringify it
6014
- arguments: JSON.stringify(
6015
- tc.function.arguments
6016
- )
6017
- }
6018
- })
6019
- )
6020
- }
6021
- ] : [],
6022
- // Append the function call result message
6023
- {
6024
- role: "tool",
6025
- tool_call_id,
6026
- name: function_name,
6027
- content: JSON.stringify(tool_call_result)
6028
- }
6029
- ];
6030
- responseIndex++;
6031
- }
6032
- return newFunctionCallMessages;
6033
- }
6034
- );
6035
- } catch (e) {
6036
- console.error("Error calling experimental_onToolCall:", e);
6037
- }
6038
- }
6039
- if (!functionResponse) {
6040
- controller.enqueue(
6041
- textEncoder.encode(
6042
- formatStreamPart4(
6043
- payload.function_call ? "function_call" : "tool_calls",
6044
- // parse to prevent double-encoding:
6045
- JSON.parse(aggregatedResponse)
6046
- )
6047
- )
6048
- );
6049
- return;
6050
- } else if (typeof functionResponse === "string") {
6051
- controller.enqueue(
6052
- textEncoder.encode(formatStreamPart4("text", functionResponse))
6053
- );
6054
- aggregatedFinalCompletionResponse = functionResponse;
6055
- return;
6056
- }
6057
- const filteredCallbacks = {
6058
- ...callbacks,
6059
- onStart: void 0
6060
- };
6061
- callbacks.onFinal = void 0;
6062
- const openAIStream = OpenAIStream(functionResponse, {
6063
- ...filteredCallbacks,
6064
- [__internal__OpenAIFnMessagesSymbol]: newFunctionCallMessages
6065
- });
6066
- const reader = openAIStream.getReader();
6067
- while (true) {
6068
- const { done, value } = await reader.read();
6069
- if (done) {
6070
- break;
6071
- }
6072
- controller.enqueue(value);
6073
- }
6074
- }
6075
- } finally {
6076
- if (callbacks.onFinal && aggregatedFinalCompletionResponse) {
6077
- await callbacks.onFinal(aggregatedFinalCompletionResponse);
6078
- }
6079
- }
6080
- }
6081
- });
6082
- }
6083
-
6084
- // streams/replicate-stream.ts
6085
- async function ReplicateStream(res, cb, options) {
6086
- var _a11;
6087
- const url = (_a11 = res.urls) == null ? void 0 : _a11.stream;
6088
- if (!url) {
6089
- if (res.error)
6090
- throw new Error(res.error);
6091
- else
6092
- throw new Error("Missing stream URL in Replicate response");
6093
- }
6094
- const eventStream = await fetch(url, {
6095
- method: "GET",
6096
- headers: {
6097
- Accept: "text/event-stream",
6098
- ...options == null ? void 0 : options.headers
6099
- }
6100
- });
6101
- return AIStream(eventStream, void 0, cb).pipeThrough(
6102
- createStreamDataTransformer()
6103
- );
6104
- }
6105
-
6106
5471
  // streams/stream-to-response.ts
6107
5472
  function streamToResponse(res, response, init, data) {
6108
5473
  var _a11;
@@ -6147,24 +5512,13 @@ var StreamingTextResponse = class extends Response {
6147
5512
 
6148
5513
  // streams/index.ts
6149
5514
  var generateId2 = generateIdImpl;
6150
- var nanoid = generateIdImpl;
6151
5515
  export {
6152
5516
  AISDKError10 as AISDKError,
6153
5517
  AIStream,
6154
5518
  APICallError2 as APICallError,
6155
- AWSBedrockAnthropicMessagesStream,
6156
- AWSBedrockAnthropicStream,
6157
- AWSBedrockCohereStream,
6158
- AWSBedrockLlama2Stream,
6159
- AWSBedrockStream,
6160
- AnthropicStream,
6161
5519
  AssistantResponse,
6162
- CohereStream,
6163
5520
  DownloadError,
6164
5521
  EmptyResponseBodyError,
6165
- GoogleGenerativeAIStream,
6166
- HuggingFaceStream,
6167
- InkeepStream,
6168
5522
  InvalidArgumentError,
6169
5523
  InvalidDataContentError,
6170
5524
  InvalidMessageRoleError,
@@ -6173,18 +5527,14 @@ export {
6173
5527
  InvalidToolArgumentsError,
6174
5528
  JSONParseError,
6175
5529
  langchain_adapter_exports as LangChainAdapter,
6176
- LangChainStream,
6177
5530
  llamaindex_adapter_exports as LlamaIndexAdapter,
6178
5531
  LoadAPIKeyError,
6179
5532
  MessageConversionError,
6180
- MistralStream,
6181
5533
  NoContentGeneratedError,
6182
5534
  NoObjectGeneratedError,
6183
5535
  NoSuchModelError,
6184
5536
  NoSuchProviderError,
6185
5537
  NoSuchToolError,
6186
- OpenAIStream,
6187
- ReplicateStream,
6188
5538
  RetryError,
6189
5539
  StreamData2 as StreamData,
6190
5540
  StreamingTextResponse,
@@ -6212,7 +5562,6 @@ export {
6212
5562
  generateObject,
6213
5563
  generateText,
6214
5564
  jsonSchema,
6215
- nanoid,
6216
5565
  parseStreamPart,
6217
5566
  processDataProtocolResponse,
6218
5567
  readDataStream,