ai 4.2.10 → 5.0.0-canary.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -7,10 +7,8 @@ var __export = (target, all) => {
7
7
  // core/index.ts
8
8
  import { createIdGenerator as createIdGenerator5, generateId as generateId2 } from "@ai-sdk/provider-utils";
9
9
  import {
10
- formatAssistantStreamPart,
11
10
  formatDataStreamPart as formatDataStreamPart3,
12
11
  jsonSchema as jsonSchema2,
13
- parseAssistantStreamPart,
14
12
  parseDataStreamPart,
15
13
  processDataStream,
16
14
  processTextStream,
@@ -1078,14 +1076,11 @@ var DownloadError = class extends AISDKError5 {
1078
1076
  _a5 = symbol5;
1079
1077
 
1080
1078
  // util/download.ts
1081
- async function download({
1082
- url,
1083
- fetchImplementation = fetch
1084
- }) {
1079
+ async function download({ url }) {
1085
1080
  var _a17;
1086
1081
  const urlText = url.toString();
1087
1082
  try {
1088
- const response = await fetchImplementation(urlText);
1083
+ const response = await fetch(urlText);
1089
1084
  if (!response.ok) {
1090
1085
  throw new DownloadError({
1091
1086
  url: urlText,
@@ -4635,7 +4630,7 @@ function smoothStream({
4635
4630
  let buffer = "";
4636
4631
  return new TransformStream({
4637
4632
  async transform(chunk, controller) {
4638
- if (chunk.type === "step-finish") {
4633
+ if (chunk.type !== "text-delta") {
4639
4634
  if (buffer.length > 0) {
4640
4635
  controller.enqueue({ type: "text-delta", textDelta: buffer });
4641
4636
  buffer = "";
@@ -4643,10 +4638,6 @@ function smoothStream({
4643
4638
  controller.enqueue(chunk);
4644
4639
  return;
4645
4640
  }
4646
- if (chunk.type !== "text-delta") {
4647
- controller.enqueue(chunk);
4648
- return;
4649
- }
4650
4641
  buffer += chunk.textDelta;
4651
4642
  let match;
4652
4643
  while ((match = chunkingRegexp.exec(buffer)) != null) {
@@ -7433,104 +7424,6 @@ function simulateReadableStream({
7433
7424
  });
7434
7425
  }
7435
7426
 
7436
- // streams/assistant-response.ts
7437
- import {
7438
- formatAssistantStreamPart as formatAssistantStreamPart2
7439
- } from "@ai-sdk/ui-utils";
7440
- function AssistantResponse({ threadId, messageId }, process2) {
7441
- const stream = new ReadableStream({
7442
- async start(controller) {
7443
- var _a17;
7444
- const textEncoder = new TextEncoder();
7445
- const sendMessage = (message) => {
7446
- controller.enqueue(
7447
- textEncoder.encode(
7448
- formatAssistantStreamPart2("assistant_message", message)
7449
- )
7450
- );
7451
- };
7452
- const sendDataMessage = (message) => {
7453
- controller.enqueue(
7454
- textEncoder.encode(
7455
- formatAssistantStreamPart2("data_message", message)
7456
- )
7457
- );
7458
- };
7459
- const sendError = (errorMessage) => {
7460
- controller.enqueue(
7461
- textEncoder.encode(formatAssistantStreamPart2("error", errorMessage))
7462
- );
7463
- };
7464
- const forwardStream = async (stream2) => {
7465
- var _a18, _b;
7466
- let result = void 0;
7467
- for await (const value of stream2) {
7468
- switch (value.event) {
7469
- case "thread.message.created": {
7470
- controller.enqueue(
7471
- textEncoder.encode(
7472
- formatAssistantStreamPart2("assistant_message", {
7473
- id: value.data.id,
7474
- role: "assistant",
7475
- content: [{ type: "text", text: { value: "" } }]
7476
- })
7477
- )
7478
- );
7479
- break;
7480
- }
7481
- case "thread.message.delta": {
7482
- const content = (_a18 = value.data.delta.content) == null ? void 0 : _a18[0];
7483
- if ((content == null ? void 0 : content.type) === "text" && ((_b = content.text) == null ? void 0 : _b.value) != null) {
7484
- controller.enqueue(
7485
- textEncoder.encode(
7486
- formatAssistantStreamPart2("text", content.text.value)
7487
- )
7488
- );
7489
- }
7490
- break;
7491
- }
7492
- case "thread.run.completed":
7493
- case "thread.run.requires_action": {
7494
- result = value.data;
7495
- break;
7496
- }
7497
- }
7498
- }
7499
- return result;
7500
- };
7501
- controller.enqueue(
7502
- textEncoder.encode(
7503
- formatAssistantStreamPart2("assistant_control_data", {
7504
- threadId,
7505
- messageId
7506
- })
7507
- )
7508
- );
7509
- try {
7510
- await process2({
7511
- sendMessage,
7512
- sendDataMessage,
7513
- forwardStream
7514
- });
7515
- } catch (error) {
7516
- sendError((_a17 = error.message) != null ? _a17 : `${error}`);
7517
- } finally {
7518
- controller.close();
7519
- }
7520
- },
7521
- pull(controller) {
7522
- },
7523
- cancel() {
7524
- }
7525
- });
7526
- return new Response(stream, {
7527
- status: 200,
7528
- headers: {
7529
- "Content-Type": "text/plain; charset=utf-8"
7530
- }
7531
- });
7532
- }
7533
-
7534
7427
  // streams/langchain-adapter.ts
7535
7428
  var langchain_adapter_exports = {};
7536
7429
  __export(langchain_adapter_exports, {
@@ -7770,7 +7663,6 @@ var StreamData = class {
7770
7663
  export {
7771
7664
  AISDKError16 as AISDKError,
7772
7665
  APICallError2 as APICallError,
7773
- AssistantResponse,
7774
7666
  DownloadError,
7775
7667
  EmptyResponseBodyError,
7776
7668
  InvalidArgumentError,
@@ -7823,13 +7715,11 @@ export {
7823
7715
  generateImage as experimental_generateImage,
7824
7716
  experimental_wrapLanguageModel,
7825
7717
  extractReasoningMiddleware,
7826
- formatAssistantStreamPart,
7827
7718
  formatDataStreamPart3 as formatDataStreamPart,
7828
7719
  generateId2 as generateId,
7829
7720
  generateObject,
7830
7721
  generateText,
7831
7722
  jsonSchema2 as jsonSchema,
7832
- parseAssistantStreamPart,
7833
7723
  parseDataStreamPart,
7834
7724
  pipeDataStreamToResponse,
7835
7725
  processDataStream,