ai 5.0.0-alpha.8 → 5.0.0-alpha.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -683,11 +683,43 @@ async function callCompletionApi({
683
683
  }
684
684
  }
685
685
 
686
- // src/ui/chat-store.ts
686
+ // src/ui/chat.ts
687
687
  import {
688
688
  generateId as generateIdFunc
689
689
  } from "@ai-sdk/provider-utils";
690
690
 
691
+ // src/util/serial-job-executor.ts
692
+ var SerialJobExecutor = class {
693
+ constructor() {
694
+ this.queue = [];
695
+ this.isProcessing = false;
696
+ }
697
+ async processQueue() {
698
+ if (this.isProcessing) {
699
+ return;
700
+ }
701
+ this.isProcessing = true;
702
+ while (this.queue.length > 0) {
703
+ await this.queue[0]();
704
+ this.queue.shift();
705
+ }
706
+ this.isProcessing = false;
707
+ }
708
+ async run(job) {
709
+ return new Promise((resolve, reject) => {
710
+ this.queue.push(async () => {
711
+ try {
712
+ await job();
713
+ resolve();
714
+ } catch (error) {
715
+ reject(error);
716
+ }
717
+ });
718
+ void this.processQueue();
719
+ });
720
+ }
721
+ };
722
+
691
723
  // src/ui/process-ui-message-stream.ts
692
724
  import {
693
725
  validateTypes
@@ -1347,6 +1379,9 @@ function shouldResubmitMessages({
1347
1379
  );
1348
1380
  }
1349
1381
  function isAssistantMessageWithCompletedToolCalls(message) {
1382
+ if (!message) {
1383
+ return false;
1384
+ }
1350
1385
  if (message.role !== "assistant") {
1351
1386
  return false;
1352
1387
  }
@@ -1357,242 +1392,298 @@ function isAssistantMessageWithCompletedToolCalls(message) {
1357
1392
  return lastStepToolInvocations.length > 0 && lastStepToolInvocations.every((part) => "result" in part.toolInvocation);
1358
1393
  }
1359
1394
 
1360
- // src/ui/chat-store.ts
1361
- var ChatStore = class {
1395
+ // src/ui/default-chat-transport.ts
1396
+ import {
1397
+ parseJsonEventStream as parseJsonEventStream2
1398
+ } from "@ai-sdk/provider-utils";
1399
+ var getOriginalFetch2 = () => fetch;
1400
+ async function fetchUIMessageStream({
1401
+ api,
1402
+ body,
1403
+ credentials,
1404
+ headers,
1405
+ abortController,
1406
+ fetch: fetch2 = getOriginalFetch2(),
1407
+ requestType = "generate"
1408
+ }) {
1409
+ var _a17, _b, _c;
1410
+ const response = requestType === "resume" ? await fetch2(`${api}?chatId=${body.chatId}`, {
1411
+ method: "GET",
1412
+ headers: {
1413
+ "Content-Type": "application/json",
1414
+ ...headers
1415
+ },
1416
+ signal: (_a17 = abortController == null ? void 0 : abortController()) == null ? void 0 : _a17.signal,
1417
+ credentials
1418
+ }) : await fetch2(api, {
1419
+ method: "POST",
1420
+ body: JSON.stringify(body),
1421
+ headers: {
1422
+ "Content-Type": "application/json",
1423
+ ...headers
1424
+ },
1425
+ signal: (_b = abortController == null ? void 0 : abortController()) == null ? void 0 : _b.signal,
1426
+ credentials
1427
+ });
1428
+ if (!response.ok) {
1429
+ throw new Error(
1430
+ (_c = await response.text()) != null ? _c : "Failed to fetch the chat response."
1431
+ );
1432
+ }
1433
+ if (!response.body) {
1434
+ throw new Error("The response body is empty.");
1435
+ }
1436
+ return parseJsonEventStream2({
1437
+ stream: response.body,
1438
+ schema: uiMessageStreamPartSchema
1439
+ }).pipeThrough(
1440
+ new TransformStream({
1441
+ async transform(part, controller) {
1442
+ if (!part.success) {
1443
+ throw part.error;
1444
+ }
1445
+ controller.enqueue(part.value);
1446
+ }
1447
+ })
1448
+ );
1449
+ }
1450
+ var DefaultChatTransport = class {
1362
1451
  constructor({
1363
- chats = {},
1364
- generateId: generateId3,
1365
- transport,
1452
+ api = "/api/chat",
1453
+ credentials,
1454
+ headers,
1455
+ body,
1456
+ fetch: fetch2,
1457
+ prepareRequestBody
1458
+ } = {}) {
1459
+ this.api = api;
1460
+ this.credentials = credentials;
1461
+ this.headers = headers;
1462
+ this.body = body;
1463
+ this.fetch = fetch2;
1464
+ this.prepareRequestBody = prepareRequestBody;
1465
+ }
1466
+ submitMessages({
1467
+ chatId,
1468
+ messages,
1469
+ abortController,
1470
+ body,
1471
+ headers,
1472
+ requestType
1473
+ }) {
1474
+ var _a17, _b;
1475
+ return fetchUIMessageStream({
1476
+ api: this.api,
1477
+ headers: {
1478
+ ...this.headers,
1479
+ ...headers
1480
+ },
1481
+ body: (_b = (_a17 = this.prepareRequestBody) == null ? void 0 : _a17.call(this, {
1482
+ chatId,
1483
+ messages,
1484
+ ...this.body,
1485
+ ...body
1486
+ })) != null ? _b : {
1487
+ chatId,
1488
+ messages,
1489
+ ...this.body,
1490
+ ...body
1491
+ },
1492
+ credentials: this.credentials,
1493
+ abortController: () => abortController,
1494
+ fetch: this.fetch,
1495
+ requestType
1496
+ });
1497
+ }
1498
+ };
1499
+
1500
+ // src/ui/chat.ts
1501
+ var AbstractChat = class {
1502
+ constructor({
1503
+ generateId: generateId3 = generateIdFunc,
1504
+ id = generateId3(),
1505
+ transport = new DefaultChatTransport(),
1366
1506
  maxSteps = 1,
1367
1507
  messageMetadataSchema,
1368
1508
  dataPartSchemas,
1369
- createChat
1509
+ state,
1510
+ onError,
1511
+ onToolCall,
1512
+ onFinish
1370
1513
  }) {
1371
- this.createChat = createChat;
1372
- this.chats = new Map(
1373
- Object.entries(chats).map(([id, chat]) => [
1374
- id,
1375
- this.createChat({ messages: chat.messages })
1376
- ])
1377
- );
1514
+ this.subscribers = /* @__PURE__ */ new Set();
1515
+ this.activeResponse = void 0;
1516
+ this.jobExecutor = new SerialJobExecutor();
1517
+ this.removeAssistantResponse = () => {
1518
+ const lastMessage = this.state.messages[this.state.messages.length - 1];
1519
+ if (lastMessage == null) {
1520
+ throw new Error("Cannot remove assistant response from empty chat");
1521
+ }
1522
+ if (lastMessage.role !== "assistant") {
1523
+ throw new Error("Last message is not an assistant message");
1524
+ }
1525
+ this.state.popMessage();
1526
+ this.emit({ type: "messages-changed" });
1527
+ };
1528
+ /**
1529
+ * Append a user message to the chat list. This triggers the API call to fetch
1530
+ * the assistant's response.
1531
+ */
1532
+ this.append = async (message, { headers, body } = {}) => {
1533
+ var _a17;
1534
+ this.state.pushMessage({ ...message, id: (_a17 = message.id) != null ? _a17 : this.generateId() });
1535
+ this.emit({ type: "messages-changed" });
1536
+ await this.triggerRequest({
1537
+ headers,
1538
+ body,
1539
+ requestType: "generate"
1540
+ });
1541
+ };
1542
+ /**
1543
+ * Reload the last AI chat response for the given chat history. If the last
1544
+ * message isn't from the assistant, it will request the API to generate a
1545
+ * new response.
1546
+ */
1547
+ this.reload = async ({
1548
+ headers,
1549
+ body
1550
+ } = {}) => {
1551
+ if (this.lastMessage === void 0) {
1552
+ return;
1553
+ }
1554
+ if (this.lastMessage.role === "assistant") {
1555
+ this.state.popMessage();
1556
+ this.emit({ type: "messages-changed" });
1557
+ }
1558
+ await this.triggerRequest({
1559
+ requestType: "generate",
1560
+ headers,
1561
+ body
1562
+ });
1563
+ };
1564
+ /**
1565
+ * Resume an ongoing chat generation stream. This does not resume an aborted generation.
1566
+ */
1567
+ this.experimental_resume = async ({
1568
+ headers,
1569
+ body
1570
+ } = {}) => {
1571
+ await this.triggerRequest({
1572
+ requestType: "resume",
1573
+ headers,
1574
+ body
1575
+ });
1576
+ };
1577
+ this.addToolResult = async ({
1578
+ toolCallId,
1579
+ result
1580
+ }) => {
1581
+ this.jobExecutor.run(async () => {
1582
+ updateToolCallResult({
1583
+ messages: this.state.messages,
1584
+ toolCallId,
1585
+ toolResult: result
1586
+ });
1587
+ this.messages = this.state.messages;
1588
+ if (this.status === "submitted" || this.status === "streaming") {
1589
+ return;
1590
+ }
1591
+ const lastMessage = this.lastMessage;
1592
+ if (isAssistantMessageWithCompletedToolCalls(lastMessage)) {
1593
+ this.triggerRequest({
1594
+ requestType: "generate"
1595
+ });
1596
+ }
1597
+ });
1598
+ };
1599
+ /**
1600
+ * Abort the current request immediately, keep the generated tokens if any.
1601
+ */
1602
+ this.stop = async () => {
1603
+ var _a17;
1604
+ if (this.status !== "streaming" && this.status !== "submitted")
1605
+ return;
1606
+ if ((_a17 = this.activeResponse) == null ? void 0 : _a17.abortController) {
1607
+ this.activeResponse.abortController.abort();
1608
+ this.activeResponse.abortController = void 0;
1609
+ }
1610
+ };
1611
+ this.id = id;
1378
1612
  this.maxSteps = maxSteps;
1379
1613
  this.transport = transport;
1380
- this.subscribers = /* @__PURE__ */ new Set();
1381
- this.generateId = generateId3 != null ? generateId3 : generateIdFunc;
1614
+ this.generateId = generateId3;
1382
1615
  this.messageMetadataSchema = messageMetadataSchema;
1383
1616
  this.dataPartSchemas = dataPartSchemas;
1617
+ this.state = state;
1618
+ this.onError = onError;
1619
+ this.onToolCall = onToolCall;
1620
+ this.onFinish = onFinish;
1384
1621
  }
1385
- hasChat(id) {
1386
- return this.chats.has(id);
1387
- }
1388
- addChat(id, messages) {
1389
- this.chats.set(id, this.createChat({ messages }));
1390
- }
1391
- getChats() {
1392
- return Array.from(this.chats.entries());
1393
- }
1394
- get chatCount() {
1395
- return this.chats.size;
1396
- }
1397
- getStatus(id) {
1398
- return this.getChat(id).status;
1622
+ /**
1623
+ * Hook status:
1624
+ *
1625
+ * - `submitted`: The message has been sent to the API and we're awaiting the start of the response stream.
1626
+ * - `streaming`: The response is actively streaming in from the API, receiving chunks of data.
1627
+ * - `ready`: The full response has been received and processed; a new user message can be submitted.
1628
+ * - `error`: An error occurred during the API request, preventing successful completion.
1629
+ */
1630
+ get status() {
1631
+ return this.state.status;
1399
1632
  }
1400
1633
  setStatus({
1401
- id,
1402
1634
  status,
1403
1635
  error
1404
1636
  }) {
1405
- const state = this.getChat(id);
1406
- if (state.status === status)
1637
+ if (this.status === status)
1407
1638
  return;
1408
- state.setStatus(status);
1409
- state.setError(error);
1410
- this.emit({ type: "chat-status-changed", chatId: id, error });
1639
+ this.state.status = status;
1640
+ this.state.error = error;
1641
+ this.emit({ type: "status-changed" });
1411
1642
  }
1412
- getError(id) {
1413
- return this.getChat(id).error;
1643
+ get error() {
1644
+ return this.state.error;
1414
1645
  }
1415
- getMessages(id) {
1416
- return this.getChat(id).messages;
1646
+ get messages() {
1647
+ return this.state.messages;
1417
1648
  }
1418
- getLastMessage(id) {
1419
- const chat = this.getChat(id);
1420
- return chat.messages[chat.messages.length - 1];
1649
+ get lastMessage() {
1650
+ return this.state.messages[this.state.messages.length - 1];
1421
1651
  }
1422
1652
  subscribe(subscriber) {
1423
1653
  this.subscribers.add(subscriber);
1424
1654
  return () => this.subscribers.delete(subscriber);
1425
1655
  }
1426
- setMessages({
1427
- id,
1428
- messages
1429
- }) {
1430
- this.getChat(id).setMessages(messages);
1431
- this.emit({ type: "chat-messages-changed", chatId: id });
1432
- }
1433
- removeAssistantResponse(id) {
1434
- const chat = this.getChat(id);
1435
- const lastMessage = chat.messages[chat.messages.length - 1];
1436
- if (lastMessage == null) {
1437
- throw new Error("Cannot remove assistant response from empty chat");
1438
- }
1439
- if (lastMessage.role !== "assistant") {
1440
- throw new Error("Last message is not an assistant message");
1441
- }
1442
- chat.popMessage();
1443
- this.emit({ type: "chat-messages-changed", chatId: id });
1444
- }
1445
- async submitMessage({
1446
- chatId,
1447
- message,
1448
- headers,
1449
- body,
1450
- onError,
1451
- onToolCall,
1452
- onFinish
1453
- }) {
1454
- var _a17;
1455
- const chat = this.getChat(chatId);
1456
- chat.pushMessage({ ...message, id: (_a17 = message.id) != null ? _a17 : this.generateId() });
1457
- this.emit({
1458
- type: "chat-messages-changed",
1459
- chatId
1460
- });
1461
- await this.triggerRequest({
1462
- chatId,
1463
- headers,
1464
- body,
1465
- requestType: "generate",
1466
- onError,
1467
- onToolCall,
1468
- onFinish
1469
- });
1470
- }
1471
- async resubmitLastUserMessage({
1472
- chatId,
1473
- headers,
1474
- body,
1475
- onError,
1476
- onToolCall,
1477
- onFinish
1478
- }) {
1479
- const chat = this.getChat(chatId);
1480
- if (chat.messages[chat.messages.length - 1].role === "assistant") {
1481
- chat.popMessage();
1482
- this.emit({
1483
- type: "chat-messages-changed",
1484
- chatId
1485
- });
1486
- }
1487
- if (chat.messages.length === 0) {
1488
- return;
1489
- }
1490
- return this.triggerRequest({
1491
- chatId,
1492
- requestType: "generate",
1493
- headers,
1494
- body,
1495
- onError,
1496
- onToolCall,
1497
- onFinish
1498
- });
1499
- }
1500
- async resumeStream({
1501
- chatId,
1502
- headers,
1503
- body,
1504
- onError,
1505
- onToolCall,
1506
- onFinish
1507
- }) {
1508
- return this.triggerRequest({
1509
- chatId,
1510
- requestType: "resume",
1511
- headers,
1512
- body,
1513
- onError,
1514
- onToolCall,
1515
- onFinish
1516
- });
1517
- }
1518
- async addToolResult({
1519
- chatId,
1520
- toolCallId,
1521
- result
1522
- }) {
1523
- const chat = this.getChat(chatId);
1524
- chat.jobExecutor.run(async () => {
1525
- updateToolCallResult({
1526
- messages: chat.messages,
1527
- toolCallId,
1528
- toolResult: result
1529
- });
1530
- this.setMessages({
1531
- id: chatId,
1532
- messages: chat.messages
1533
- });
1534
- if (chat.status === "submitted" || chat.status === "streaming") {
1535
- return;
1536
- }
1537
- const lastMessage = chat.messages[chat.messages.length - 1];
1538
- if (isAssistantMessageWithCompletedToolCalls(lastMessage)) {
1539
- this.triggerRequest({
1540
- requestType: "generate",
1541
- chatId
1542
- });
1543
- }
1544
- });
1545
- }
1546
- async stopStream({ chatId }) {
1547
- var _a17;
1548
- const chat = this.getChat(chatId);
1549
- if (chat.status !== "streaming" && chat.status !== "submitted")
1550
- return;
1551
- if ((_a17 = chat.activeResponse) == null ? void 0 : _a17.abortController) {
1552
- chat.activeResponse.abortController.abort();
1553
- chat.activeResponse.abortController = void 0;
1554
- }
1656
+ set messages(messages) {
1657
+ this.state.messages = messages;
1658
+ this.emit({ type: "messages-changed" });
1555
1659
  }
1556
1660
  emit(event) {
1557
1661
  for (const subscriber of this.subscribers) {
1558
- subscriber.onChatChanged(event);
1559
- }
1560
- }
1561
- getChat(id) {
1562
- if (!this.hasChat(id)) {
1563
- this.addChat(id, []);
1662
+ subscriber.onChange(event);
1564
1663
  }
1565
- return this.chats.get(id);
1566
1664
  }
1567
1665
  async triggerRequest({
1568
- chatId,
1569
1666
  requestType,
1570
1667
  headers,
1571
- body,
1572
- onError,
1573
- onToolCall,
1574
- onFinish
1668
+ body
1575
1669
  }) {
1576
- const chat = this.getChat(chatId);
1577
- this.setStatus({ id: chatId, status: "submitted", error: void 0 });
1578
- const messageCount = chat.messages.length;
1579
- const lastMessage = chat.messages[chat.messages.length - 1];
1580
- const maxStep = lastMessage.parts.filter(
1581
- (part) => part.type === "step-start"
1582
- ).length;
1670
+ var _a17, _b;
1671
+ this.setStatus({ status: "submitted", error: void 0 });
1672
+ const messageCount = this.state.messages.length;
1673
+ const lastMessage = this.lastMessage;
1674
+ const maxStep = (_a17 = lastMessage == null ? void 0 : lastMessage.parts.filter((part) => part.type === "step-start").length) != null ? _a17 : 0;
1583
1675
  try {
1584
- const lastMessage2 = chat.messages[chat.messages.length - 1];
1585
1676
  const activeResponse = {
1586
1677
  state: createStreamingUIMessageState({
1587
- lastMessage: chat.snapshot ? chat.snapshot(lastMessage2) : lastMessage2,
1678
+ lastMessage: this.state.snapshot(lastMessage),
1588
1679
  newMessageId: this.generateId()
1589
1680
  }),
1590
1681
  abortController: new AbortController()
1591
1682
  };
1592
- chat.setActiveResponse(activeResponse);
1683
+ this.activeResponse = activeResponse;
1593
1684
  const stream = await this.transport.submitMessages({
1594
- chatId,
1595
- messages: chat.messages,
1685
+ chatId: this.id,
1686
+ messages: this.state.messages,
1596
1687
  body,
1597
1688
  headers,
1598
1689
  abortController: activeResponse.abortController,
@@ -1600,23 +1691,23 @@ var ChatStore = class {
1600
1691
  });
1601
1692
  const runUpdateMessageJob = (job) => (
1602
1693
  // serialize the job execution to avoid race conditions:
1603
- chat.jobExecutor.run(
1694
+ this.jobExecutor.run(
1604
1695
  () => job({
1605
1696
  state: activeResponse.state,
1606
1697
  write: () => {
1607
- this.setStatus({ id: chatId, status: "streaming" });
1608
- const replaceLastMessage = activeResponse.state.message.id === chat.messages[chat.messages.length - 1].id;
1698
+ var _a18;
1699
+ this.setStatus({ status: "streaming" });
1700
+ const replaceLastMessage = activeResponse.state.message.id === ((_a18 = this.lastMessage) == null ? void 0 : _a18.id);
1609
1701
  if (replaceLastMessage) {
1610
- chat.replaceMessage(
1611
- chat.messages.length - 1,
1702
+ this.state.replaceMessage(
1703
+ this.state.messages.length - 1,
1612
1704
  activeResponse.state.message
1613
1705
  );
1614
1706
  } else {
1615
- chat.pushMessage(activeResponse.state.message);
1707
+ this.state.pushMessage(activeResponse.state.message);
1616
1708
  }
1617
1709
  this.emit({
1618
- type: "chat-messages-changed",
1619
- chatId
1710
+ type: "messages-changed"
1620
1711
  });
1621
1712
  }
1622
1713
  })
@@ -1625,7 +1716,7 @@ var ChatStore = class {
1625
1716
  await consumeStream({
1626
1717
  stream: processUIMessageStream({
1627
1718
  stream,
1628
- onToolCall,
1719
+ onToolCall: this.onToolCall,
1629
1720
  messageMetadataSchema: this.messageMetadataSchema,
1630
1721
  dataPartSchemas: this.dataPartSchemas,
1631
1722
  runUpdateMessageJob
@@ -1634,32 +1725,29 @@ var ChatStore = class {
1634
1725
  throw error;
1635
1726
  }
1636
1727
  });
1637
- onFinish == null ? void 0 : onFinish({ message: activeResponse.state.message });
1638
- this.setStatus({ id: chatId, status: "ready" });
1728
+ (_b = this.onFinish) == null ? void 0 : _b.call(this, { message: activeResponse.state.message });
1729
+ this.setStatus({ status: "ready" });
1639
1730
  } catch (err) {
1731
+ console.error(err);
1640
1732
  if (err.name === "AbortError") {
1641
- this.setStatus({ id: chatId, status: "ready" });
1733
+ this.setStatus({ status: "ready" });
1642
1734
  return null;
1643
1735
  }
1644
- if (onError && err instanceof Error) {
1645
- onError(err);
1736
+ if (this.onError && err instanceof Error) {
1737
+ this.onError(err);
1646
1738
  }
1647
- this.setStatus({ id: chatId, status: "error", error: err });
1739
+ this.setStatus({ status: "error", error: err });
1648
1740
  } finally {
1649
- chat.setActiveResponse(void 0);
1741
+ this.activeResponse = void 0;
1650
1742
  }
1651
1743
  if (shouldResubmitMessages({
1652
1744
  originalMaxToolInvocationStep: maxStep,
1653
1745
  originalMessageCount: messageCount,
1654
1746
  maxSteps: this.maxSteps,
1655
- messages: chat.messages
1747
+ messages: this.state.messages
1656
1748
  })) {
1657
1749
  await this.triggerRequest({
1658
- chatId,
1659
1750
  requestType,
1660
- onError,
1661
- onToolCall,
1662
- onFinish,
1663
1751
  headers,
1664
1752
  body
1665
1753
  });
@@ -1861,147 +1949,6 @@ function convertToModelMessages(messages, options) {
1861
1949
  }
1862
1950
  var convertToCoreMessages = convertToModelMessages;
1863
1951
 
1864
- // src/ui/default-chat-store-options.ts
1865
- import {
1866
- generateId as generateIdFunc2
1867
- } from "@ai-sdk/provider-utils";
1868
-
1869
- // src/ui/default-chat-transport.ts
1870
- import {
1871
- parseJsonEventStream as parseJsonEventStream2
1872
- } from "@ai-sdk/provider-utils";
1873
- var getOriginalFetch2 = () => fetch;
1874
- async function fetchUIMessageStream({
1875
- api,
1876
- body,
1877
- credentials,
1878
- headers,
1879
- abortController,
1880
- fetch: fetch2 = getOriginalFetch2(),
1881
- requestType = "generate"
1882
- }) {
1883
- var _a17, _b, _c;
1884
- const response = requestType === "resume" ? await fetch2(`${api}?chatId=${body.chatId}`, {
1885
- method: "GET",
1886
- headers: {
1887
- "Content-Type": "application/json",
1888
- ...headers
1889
- },
1890
- signal: (_a17 = abortController == null ? void 0 : abortController()) == null ? void 0 : _a17.signal,
1891
- credentials
1892
- }) : await fetch2(api, {
1893
- method: "POST",
1894
- body: JSON.stringify(body),
1895
- headers: {
1896
- "Content-Type": "application/json",
1897
- ...headers
1898
- },
1899
- signal: (_b = abortController == null ? void 0 : abortController()) == null ? void 0 : _b.signal,
1900
- credentials
1901
- });
1902
- if (!response.ok) {
1903
- throw new Error(
1904
- (_c = await response.text()) != null ? _c : "Failed to fetch the chat response."
1905
- );
1906
- }
1907
- if (!response.body) {
1908
- throw new Error("The response body is empty.");
1909
- }
1910
- return parseJsonEventStream2({
1911
- stream: response.body,
1912
- schema: uiMessageStreamPartSchema
1913
- }).pipeThrough(
1914
- new TransformStream({
1915
- async transform(part, controller) {
1916
- if (!part.success) {
1917
- throw part.error;
1918
- }
1919
- controller.enqueue(part.value);
1920
- }
1921
- })
1922
- );
1923
- }
1924
- var DefaultChatTransport = class {
1925
- constructor({
1926
- api,
1927
- credentials,
1928
- headers,
1929
- body,
1930
- fetch: fetch2,
1931
- prepareRequestBody
1932
- }) {
1933
- this.api = api;
1934
- this.credentials = credentials;
1935
- this.headers = headers;
1936
- this.body = body;
1937
- this.fetch = fetch2;
1938
- this.prepareRequestBody = prepareRequestBody;
1939
- }
1940
- submitMessages({
1941
- chatId,
1942
- messages,
1943
- abortController,
1944
- body,
1945
- headers,
1946
- requestType
1947
- }) {
1948
- var _a17, _b;
1949
- return fetchUIMessageStream({
1950
- api: this.api,
1951
- headers: {
1952
- ...this.headers,
1953
- ...headers
1954
- },
1955
- body: (_b = (_a17 = this.prepareRequestBody) == null ? void 0 : _a17.call(this, {
1956
- chatId,
1957
- messages,
1958
- ...this.body,
1959
- ...body
1960
- })) != null ? _b : {
1961
- chatId,
1962
- messages,
1963
- ...this.body,
1964
- ...body
1965
- },
1966
- credentials: this.credentials,
1967
- abortController: () => abortController,
1968
- fetch: this.fetch,
1969
- requestType
1970
- });
1971
- }
1972
- };
1973
-
1974
- // src/ui/default-chat-store-options.ts
1975
- function defaultChatStoreOptions({
1976
- api = "/api/chat",
1977
- fetch: fetch2,
1978
- credentials,
1979
- headers,
1980
- body,
1981
- prepareRequestBody,
1982
- generateId: generateId3 = generateIdFunc2,
1983
- messageMetadataSchema,
1984
- maxSteps = 1,
1985
- dataPartSchemas,
1986
- chats
1987
- }) {
1988
- return () => ({
1989
- transport: new DefaultChatTransport({
1990
- api,
1991
- fetch: fetch2,
1992
- credentials,
1993
- headers,
1994
- body,
1995
- prepareRequestBody
1996
- }),
1997
- generateId: generateId3,
1998
- messageMetadataSchema,
1999
- dataPartSchemas,
2000
- maxSteps,
2001
- chats
2002
- });
2003
- }
2004
-
2005
1952
  // src/ui/transform-text-to-ui-message-stream.ts
2006
1953
  function transformTextToUiMessageStream({
2007
1954
  stream
@@ -2371,38 +2318,6 @@ function isDeepEqualData(obj1, obj2) {
2371
2318
  return true;
2372
2319
  }
2373
2320
 
2374
- // src/util/serial-job-executor.ts
2375
- var SerialJobExecutor = class {
2376
- constructor() {
2377
- this.queue = [];
2378
- this.isProcessing = false;
2379
- }
2380
- async processQueue() {
2381
- if (this.isProcessing) {
2382
- return;
2383
- }
2384
- this.isProcessing = true;
2385
- while (this.queue.length > 0) {
2386
- await this.queue[0]();
2387
- this.queue.shift();
2388
- }
2389
- this.isProcessing = false;
2390
- }
2391
- async run(job) {
2392
- return new Promise((resolve, reject) => {
2393
- this.queue.push(async () => {
2394
- try {
2395
- await job();
2396
- resolve();
2397
- } catch (error) {
2398
- reject(error);
2399
- }
2400
- });
2401
- void this.processQueue();
2402
- });
2403
- }
2404
- };
2405
-
2406
2321
  // src/util/simulate-readable-stream.ts
2407
2322
  import { delay as delayFunction } from "@ai-sdk/provider-utils";
2408
2323
  function simulateReadableStream({
@@ -8545,7 +8460,7 @@ var DefaultTranscriptionResult = class {
8545
8460
  export {
8546
8461
  AISDKError16 as AISDKError,
8547
8462
  APICallError,
8548
- ChatStore,
8463
+ AbstractChat,
8549
8464
  DefaultChatTransport,
8550
8465
  DownloadError,
8551
8466
  EmptyResponseBodyError,
@@ -8595,7 +8510,6 @@ export {
8595
8510
  createUIMessageStream,
8596
8511
  createUIMessageStreamResponse,
8597
8512
  customProvider,
8598
- defaultChatStoreOptions,
8599
8513
  defaultSettingsMiddleware,
8600
8514
  embed,
8601
8515
  embedMany,