ai 5.0.0-alpha.4 → 5.0.0-alpha.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +27 -0
- package/dist/index.d.mts +226 -169
- package/dist/index.d.ts +226 -169
- package/dist/index.js +291 -229
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +295 -230
- package/dist/index.mjs.map +1 -1
- package/package.json +4 -3
package/dist/index.mjs
CHANGED
@@ -962,12 +962,12 @@ function getToolInvocations(message) {
|
|
962
962
|
// src/ui/process-ui-message-stream.ts
|
963
963
|
function createStreamingUIMessageState({
|
964
964
|
lastMessage,
|
965
|
-
newMessageId = "
|
965
|
+
newMessageId = ""
|
966
966
|
} = {}) {
|
967
967
|
var _a17;
|
968
968
|
const isContinuation = (lastMessage == null ? void 0 : lastMessage.role) === "assistant";
|
969
969
|
const step = isContinuation ? 1 + ((_a17 = extractMaxToolInvocationStep(getToolInvocations(lastMessage))) != null ? _a17 : 0) : 0;
|
970
|
-
const message = isContinuation ?
|
970
|
+
const message = isContinuation ? lastMessage : {
|
971
971
|
id: newMessageId,
|
972
972
|
metadata: {},
|
973
973
|
role: "assistant",
|
@@ -1347,7 +1347,7 @@ async function consumeUIMessageStream({
|
|
1347
1347
|
messageMetadataSchema
|
1348
1348
|
}) {
|
1349
1349
|
const state = createStreamingUIMessageState({
|
1350
|
-
lastMessage,
|
1350
|
+
lastMessage: lastMessage ? structuredClone(lastMessage) : void 0,
|
1351
1351
|
newMessageId: generateId3()
|
1352
1352
|
});
|
1353
1353
|
const runUpdateMessageJob = async (job) => {
|
@@ -1550,38 +1550,6 @@ import {
|
|
1550
1550
|
generateId as generateIdFunc
|
1551
1551
|
} from "@ai-sdk/provider-utils";
|
1552
1552
|
|
1553
|
-
// src/util/serial-job-executor.ts
|
1554
|
-
var SerialJobExecutor = class {
|
1555
|
-
constructor() {
|
1556
|
-
this.queue = [];
|
1557
|
-
this.isProcessing = false;
|
1558
|
-
}
|
1559
|
-
async processQueue() {
|
1560
|
-
if (this.isProcessing) {
|
1561
|
-
return;
|
1562
|
-
}
|
1563
|
-
this.isProcessing = true;
|
1564
|
-
while (this.queue.length > 0) {
|
1565
|
-
await this.queue[0]();
|
1566
|
-
this.queue.shift();
|
1567
|
-
}
|
1568
|
-
this.isProcessing = false;
|
1569
|
-
}
|
1570
|
-
async run(job) {
|
1571
|
-
return new Promise((resolve, reject) => {
|
1572
|
-
this.queue.push(async () => {
|
1573
|
-
try {
|
1574
|
-
await job();
|
1575
|
-
resolve();
|
1576
|
-
} catch (error) {
|
1577
|
-
reject(error);
|
1578
|
-
}
|
1579
|
-
});
|
1580
|
-
void this.processQueue();
|
1581
|
-
});
|
1582
|
-
}
|
1583
|
-
};
|
1584
|
-
|
1585
1553
|
// src/ui/should-resubmit-messages.ts
|
1586
1554
|
function shouldResubmitMessages({
|
1587
1555
|
originalMaxToolInvocationStep,
|
@@ -1639,18 +1607,14 @@ var ChatStore = class {
|
|
1639
1607
|
transport,
|
1640
1608
|
maxSteps = 1,
|
1641
1609
|
messageMetadataSchema,
|
1642
|
-
dataPartSchemas
|
1610
|
+
dataPartSchemas,
|
1611
|
+
createChat
|
1643
1612
|
}) {
|
1613
|
+
this.createChat = createChat;
|
1644
1614
|
this.chats = new Map(
|
1645
|
-
Object.entries(chats).map(([id,
|
1615
|
+
Object.entries(chats).map(([id, chat]) => [
|
1646
1616
|
id,
|
1647
|
-
{
|
1648
|
-
messages: [...state.messages],
|
1649
|
-
status: "ready",
|
1650
|
-
activeResponse: void 0,
|
1651
|
-
error: void 0,
|
1652
|
-
jobExecutor: new SerialJobExecutor()
|
1653
|
-
}
|
1617
|
+
this.createChat({ messages: chat.messages })
|
1654
1618
|
])
|
1655
1619
|
);
|
1656
1620
|
this.maxSteps = maxSteps;
|
@@ -1664,11 +1628,7 @@ var ChatStore = class {
|
|
1664
1628
|
return this.chats.has(id);
|
1665
1629
|
}
|
1666
1630
|
addChat(id, messages) {
|
1667
|
-
this.chats.set(id, {
|
1668
|
-
messages,
|
1669
|
-
status: "ready",
|
1670
|
-
jobExecutor: new SerialJobExecutor()
|
1671
|
-
});
|
1631
|
+
this.chats.set(id, this.createChat({ messages }));
|
1672
1632
|
}
|
1673
1633
|
getChats() {
|
1674
1634
|
return Array.from(this.chats.entries());
|
@@ -1677,28 +1637,28 @@ var ChatStore = class {
|
|
1677
1637
|
return this.chats.size;
|
1678
1638
|
}
|
1679
1639
|
getStatus(id) {
|
1680
|
-
return this.
|
1640
|
+
return this.getChatState(id).status;
|
1681
1641
|
}
|
1682
1642
|
setStatus({
|
1683
1643
|
id,
|
1684
1644
|
status,
|
1685
1645
|
error
|
1686
1646
|
}) {
|
1687
|
-
const
|
1688
|
-
if (
|
1647
|
+
const state = this.getChatState(id);
|
1648
|
+
if (state.status === status)
|
1689
1649
|
return;
|
1690
|
-
|
1691
|
-
|
1650
|
+
state.setStatus(status);
|
1651
|
+
state.setError(error);
|
1692
1652
|
this.emit({ type: "chat-status-changed", chatId: id, error });
|
1693
1653
|
}
|
1694
1654
|
getError(id) {
|
1695
|
-
return this.
|
1655
|
+
return this.getChatState(id).error;
|
1696
1656
|
}
|
1697
1657
|
getMessages(id) {
|
1698
|
-
return this.
|
1658
|
+
return this.getChatState(id).messages;
|
1699
1659
|
}
|
1700
1660
|
getLastMessage(id) {
|
1701
|
-
const chat = this.
|
1661
|
+
const chat = this.getChatState(id);
|
1702
1662
|
return chat.messages[chat.messages.length - 1];
|
1703
1663
|
}
|
1704
1664
|
subscribe(subscriber) {
|
@@ -1709,11 +1669,11 @@ var ChatStore = class {
|
|
1709
1669
|
id,
|
1710
1670
|
messages
|
1711
1671
|
}) {
|
1712
|
-
this.
|
1672
|
+
this.getChatState(id).setMessages(messages);
|
1713
1673
|
this.emit({ type: "chat-messages-changed", chatId: id });
|
1714
1674
|
}
|
1715
1675
|
removeAssistantResponse(id) {
|
1716
|
-
const chat = this.
|
1676
|
+
const chat = this.getChatState(id);
|
1717
1677
|
const lastMessage = chat.messages[chat.messages.length - 1];
|
1718
1678
|
if (lastMessage == null) {
|
1719
1679
|
throw new Error("Cannot remove assistant response from empty chat");
|
@@ -1721,7 +1681,8 @@ var ChatStore = class {
|
|
1721
1681
|
if (lastMessage.role !== "assistant") {
|
1722
1682
|
throw new Error("Last message is not an assistant message");
|
1723
1683
|
}
|
1724
|
-
|
1684
|
+
chat.popMessage();
|
1685
|
+
this.emit({ type: "chat-messages-changed", chatId: id });
|
1725
1686
|
}
|
1726
1687
|
async submitMessage({
|
1727
1688
|
chatId,
|
@@ -1733,14 +1694,14 @@ var ChatStore = class {
|
|
1733
1694
|
onFinish
|
1734
1695
|
}) {
|
1735
1696
|
var _a17;
|
1736
|
-
const
|
1737
|
-
|
1697
|
+
const state = this.getChatState(chatId);
|
1698
|
+
state.pushMessage({ ...message, id: (_a17 = message.id) != null ? _a17 : this.generateId() });
|
1699
|
+
this.emit({
|
1700
|
+
type: "chat-messages-changed",
|
1701
|
+
chatId
|
1702
|
+
});
|
1738
1703
|
await this.triggerRequest({
|
1739
1704
|
chatId,
|
1740
|
-
messages: currentMessages.concat({
|
1741
|
-
...message,
|
1742
|
-
id: (_a17 = message.id) != null ? _a17 : this.generateId()
|
1743
|
-
}),
|
1744
1705
|
headers,
|
1745
1706
|
body,
|
1746
1707
|
requestType: "generate",
|
@@ -1757,15 +1718,20 @@ var ChatStore = class {
|
|
1757
1718
|
onToolCall,
|
1758
1719
|
onFinish
|
1759
1720
|
}) {
|
1760
|
-
const
|
1761
|
-
|
1762
|
-
|
1721
|
+
const chat = this.getChatState(chatId);
|
1722
|
+
if (chat.messages[chat.messages.length - 1].role === "assistant") {
|
1723
|
+
chat.popMessage();
|
1724
|
+
this.emit({
|
1725
|
+
type: "chat-messages-changed",
|
1726
|
+
chatId
|
1727
|
+
});
|
1728
|
+
}
|
1729
|
+
if (chat.messages.length === 0) {
|
1763
1730
|
return;
|
1764
1731
|
}
|
1765
1732
|
return this.triggerRequest({
|
1766
1733
|
chatId,
|
1767
1734
|
requestType: "generate",
|
1768
|
-
messages: messagesToSubmit,
|
1769
1735
|
headers,
|
1770
1736
|
body,
|
1771
1737
|
onError,
|
@@ -1781,11 +1747,8 @@ var ChatStore = class {
|
|
1781
1747
|
onToolCall,
|
1782
1748
|
onFinish
|
1783
1749
|
}) {
|
1784
|
-
const chat = this.getChat(chatId);
|
1785
|
-
const currentMessages = chat.messages;
|
1786
1750
|
return this.triggerRequest({
|
1787
1751
|
chatId,
|
1788
|
-
messages: currentMessages,
|
1789
1752
|
requestType: "resume",
|
1790
1753
|
headers,
|
1791
1754
|
body,
|
@@ -1799,22 +1762,23 @@ var ChatStore = class {
|
|
1799
1762
|
toolCallId,
|
1800
1763
|
result
|
1801
1764
|
}) {
|
1802
|
-
const chat = this.
|
1765
|
+
const chat = this.getChatState(chatId);
|
1803
1766
|
chat.jobExecutor.run(async () => {
|
1804
|
-
const currentMessages = chat.messages;
|
1805
1767
|
updateToolCallResult({
|
1806
|
-
messages:
|
1768
|
+
messages: chat.messages,
|
1807
1769
|
toolCallId,
|
1808
1770
|
toolResult: result
|
1809
1771
|
});
|
1810
|
-
this.setMessages({
|
1772
|
+
this.setMessages({
|
1773
|
+
id: chatId,
|
1774
|
+
messages: chat.messages
|
1775
|
+
});
|
1811
1776
|
if (chat.status === "submitted" || chat.status === "streaming") {
|
1812
1777
|
return;
|
1813
1778
|
}
|
1814
|
-
const lastMessage =
|
1779
|
+
const lastMessage = chat.messages[chat.messages.length - 1];
|
1815
1780
|
if (isAssistantMessageWithCompletedToolCalls(lastMessage)) {
|
1816
|
-
|
1817
|
-
messages: currentMessages,
|
1781
|
+
this.triggerRequest({
|
1818
1782
|
requestType: "generate",
|
1819
1783
|
chatId
|
1820
1784
|
});
|
@@ -1823,7 +1787,7 @@ var ChatStore = class {
|
|
1823
1787
|
}
|
1824
1788
|
async stopStream({ chatId }) {
|
1825
1789
|
var _a17;
|
1826
|
-
const chat = this.
|
1790
|
+
const chat = this.getChatState(chatId);
|
1827
1791
|
if (chat.status !== "streaming" && chat.status !== "submitted")
|
1828
1792
|
return;
|
1829
1793
|
if ((_a17 = chat.activeResponse) == null ? void 0 : _a17.abortController) {
|
@@ -1836,15 +1800,14 @@ var ChatStore = class {
|
|
1836
1800
|
subscriber.onChatChanged(event);
|
1837
1801
|
}
|
1838
1802
|
}
|
1839
|
-
|
1803
|
+
getChatState(id) {
|
1840
1804
|
if (!this.hasChat(id)) {
|
1841
|
-
|
1805
|
+
this.addChat(id, []);
|
1842
1806
|
}
|
1843
1807
|
return this.chats.get(id);
|
1844
1808
|
}
|
1845
1809
|
async triggerRequest({
|
1846
1810
|
chatId,
|
1847
|
-
messages: chatMessages,
|
1848
1811
|
requestType,
|
1849
1812
|
headers,
|
1850
1813
|
body,
|
@@ -1852,26 +1815,25 @@ var ChatStore = class {
|
|
1852
1815
|
onToolCall,
|
1853
1816
|
onFinish
|
1854
1817
|
}) {
|
1855
|
-
const
|
1856
|
-
const chat = this.getChat(chatId);
|
1857
|
-
this.setMessages({ id: chatId, messages: chatMessages });
|
1818
|
+
const chat = this.getChatState(chatId);
|
1858
1819
|
this.setStatus({ id: chatId, status: "submitted", error: void 0 });
|
1859
|
-
const messageCount =
|
1820
|
+
const messageCount = chat.messages.length;
|
1860
1821
|
const maxStep = extractMaxToolInvocationStep(
|
1861
|
-
getToolInvocations(
|
1822
|
+
getToolInvocations(chat.messages[chat.messages.length - 1])
|
1862
1823
|
);
|
1863
1824
|
try {
|
1825
|
+
const lastMessage = chat.messages[chat.messages.length - 1];
|
1864
1826
|
const activeResponse = {
|
1865
1827
|
state: createStreamingUIMessageState({
|
1866
|
-
lastMessage:
|
1867
|
-
newMessageId:
|
1828
|
+
lastMessage: chat.snapshot ? chat.snapshot(lastMessage) : lastMessage,
|
1829
|
+
newMessageId: this.generateId()
|
1868
1830
|
}),
|
1869
1831
|
abortController: new AbortController()
|
1870
1832
|
};
|
1871
|
-
chat.activeResponse
|
1872
|
-
const stream = await
|
1833
|
+
chat.setActiveResponse(activeResponse);
|
1834
|
+
const stream = await this.transport.submitMessages({
|
1873
1835
|
chatId,
|
1874
|
-
messages:
|
1836
|
+
messages: chat.messages,
|
1875
1837
|
body,
|
1876
1838
|
headers,
|
1877
1839
|
abortController: activeResponse.abortController,
|
@@ -1883,15 +1845,19 @@ var ChatStore = class {
|
|
1883
1845
|
() => job({
|
1884
1846
|
state: activeResponse.state,
|
1885
1847
|
write: () => {
|
1886
|
-
|
1887
|
-
const replaceLastMessage = activeResponse.state.message.id ===
|
1888
|
-
|
1889
|
-
|
1890
|
-
|
1891
|
-
|
1892
|
-
|
1893
|
-
|
1894
|
-
|
1848
|
+
this.setStatus({ id: chatId, status: "streaming" });
|
1849
|
+
const replaceLastMessage = activeResponse.state.message.id === chat.messages[chat.messages.length - 1].id;
|
1850
|
+
if (replaceLastMessage) {
|
1851
|
+
chat.replaceMessage(
|
1852
|
+
chat.messages.length - 1,
|
1853
|
+
activeResponse.state.message
|
1854
|
+
);
|
1855
|
+
} else {
|
1856
|
+
chat.pushMessage(activeResponse.state.message);
|
1857
|
+
}
|
1858
|
+
this.emit({
|
1859
|
+
type: "chat-messages-changed",
|
1860
|
+
chatId
|
1895
1861
|
});
|
1896
1862
|
}
|
1897
1863
|
})
|
@@ -1901,8 +1867,8 @@ var ChatStore = class {
|
|
1901
1867
|
stream: processUIMessageStream({
|
1902
1868
|
stream,
|
1903
1869
|
onToolCall,
|
1904
|
-
messageMetadataSchema:
|
1905
|
-
dataPartSchemas:
|
1870
|
+
messageMetadataSchema: this.messageMetadataSchema,
|
1871
|
+
dataPartSchemas: this.dataPartSchemas,
|
1906
1872
|
runUpdateMessageJob
|
1907
1873
|
}),
|
1908
1874
|
onError: (error) => {
|
@@ -1921,24 +1887,22 @@ var ChatStore = class {
|
|
1921
1887
|
}
|
1922
1888
|
this.setStatus({ id: chatId, status: "error", error: err });
|
1923
1889
|
} finally {
|
1924
|
-
chat.
|
1890
|
+
chat.setActiveResponse(void 0);
|
1925
1891
|
}
|
1926
|
-
const currentMessages = self.getMessages(chatId);
|
1927
1892
|
if (shouldResubmitMessages({
|
1928
1893
|
originalMaxToolInvocationStep: maxStep,
|
1929
1894
|
originalMessageCount: messageCount,
|
1930
|
-
maxSteps:
|
1931
|
-
messages:
|
1895
|
+
maxSteps: this.maxSteps,
|
1896
|
+
messages: chat.messages
|
1932
1897
|
})) {
|
1933
|
-
await
|
1898
|
+
await this.triggerRequest({
|
1934
1899
|
chatId,
|
1935
1900
|
requestType,
|
1936
1901
|
onError,
|
1937
1902
|
onToolCall,
|
1938
1903
|
onFinish,
|
1939
1904
|
headers,
|
1940
|
-
body
|
1941
|
-
messages: currentMessages
|
1905
|
+
body
|
1942
1906
|
});
|
1943
1907
|
}
|
1944
1908
|
}
|
@@ -2230,24 +2194,24 @@ function convertToModelMessages(messages, options) {
|
|
2230
2194
|
}
|
2231
2195
|
var convertToCoreMessages = convertToModelMessages;
|
2232
2196
|
|
2233
|
-
// src/ui/default-chat-store.ts
|
2197
|
+
// src/ui/default-chat-store-options.ts
|
2234
2198
|
import {
|
2235
2199
|
generateId as generateIdFunc2
|
2236
2200
|
} from "@ai-sdk/provider-utils";
|
2237
|
-
function
|
2238
|
-
api,
|
2201
|
+
function defaultChatStoreOptions({
|
2202
|
+
api = "/api/chat",
|
2239
2203
|
fetch: fetch2,
|
2240
2204
|
credentials,
|
2241
2205
|
headers,
|
2242
2206
|
body,
|
2243
2207
|
prepareRequestBody,
|
2244
2208
|
generateId: generateId3 = generateIdFunc2,
|
2245
|
-
dataPartSchemas,
|
2246
2209
|
messageMetadataSchema,
|
2247
2210
|
maxSteps = 1,
|
2211
|
+
dataPartSchemas,
|
2248
2212
|
chats
|
2249
2213
|
}) {
|
2250
|
-
return
|
2214
|
+
return () => ({
|
2251
2215
|
transport: new DefaultChatTransport({
|
2252
2216
|
api,
|
2253
2217
|
fetch: fetch2,
|
@@ -2264,11 +2228,57 @@ function defaultChatStore({
|
|
2264
2228
|
});
|
2265
2229
|
}
|
2266
2230
|
|
2231
|
+
// src/ui-message-stream/handle-ui-message-stream-finish.ts
|
2232
|
+
function handleUIMessageStreamFinish({
|
2233
|
+
newMessageId,
|
2234
|
+
originalMessages = [],
|
2235
|
+
onFinish,
|
2236
|
+
stream
|
2237
|
+
}) {
|
2238
|
+
if (onFinish == null) {
|
2239
|
+
return stream;
|
2240
|
+
}
|
2241
|
+
const lastMessage = originalMessages[originalMessages.length - 1];
|
2242
|
+
const isContinuation = (lastMessage == null ? void 0 : lastMessage.role) === "assistant";
|
2243
|
+
const messageId = isContinuation ? lastMessage.id : newMessageId;
|
2244
|
+
const state = createStreamingUIMessageState({
|
2245
|
+
lastMessage: structuredClone(lastMessage),
|
2246
|
+
newMessageId: messageId
|
2247
|
+
});
|
2248
|
+
const runUpdateMessageJob = async (job) => {
|
2249
|
+
await job({ state, write: () => {
|
2250
|
+
} });
|
2251
|
+
};
|
2252
|
+
return processUIMessageStream({
|
2253
|
+
stream,
|
2254
|
+
runUpdateMessageJob
|
2255
|
+
}).pipeThrough(
|
2256
|
+
new TransformStream({
|
2257
|
+
transform(chunk, controller) {
|
2258
|
+
controller.enqueue(chunk);
|
2259
|
+
},
|
2260
|
+
flush() {
|
2261
|
+
const isContinuation2 = state.message.id === (lastMessage == null ? void 0 : lastMessage.id);
|
2262
|
+
onFinish({
|
2263
|
+
isContinuation: isContinuation2,
|
2264
|
+
responseMessage: state.message,
|
2265
|
+
messages: [
|
2266
|
+
...isContinuation2 ? originalMessages.slice(0, -1) : originalMessages,
|
2267
|
+
state.message
|
2268
|
+
]
|
2269
|
+
});
|
2270
|
+
}
|
2271
|
+
})
|
2272
|
+
);
|
2273
|
+
}
|
2274
|
+
|
2267
2275
|
// src/ui-message-stream/create-ui-message-stream.ts
|
2268
2276
|
function createUIMessageStream({
|
2269
2277
|
execute,
|
2270
|
-
onError = () => "An error occurred."
|
2278
|
+
onError = () => "An error occurred.",
|
2271
2279
|
// mask error messages for safety by default
|
2280
|
+
originalMessages,
|
2281
|
+
onFinish
|
2272
2282
|
}) {
|
2273
2283
|
let controller;
|
2274
2284
|
const ongoingStreamPromises = [];
|
@@ -2285,25 +2295,27 @@ function createUIMessageStream({
|
|
2285
2295
|
}
|
2286
2296
|
try {
|
2287
2297
|
const result = execute({
|
2288
|
-
|
2289
|
-
|
2290
|
-
|
2291
|
-
|
2292
|
-
|
2293
|
-
(
|
2294
|
-
|
2295
|
-
|
2296
|
-
|
2297
|
-
|
2298
|
-
|
2299
|
-
|
2300
|
-
|
2301
|
-
|
2302
|
-
|
2303
|
-
|
2304
|
-
|
2305
|
-
|
2306
|
-
|
2298
|
+
writer: {
|
2299
|
+
write(part) {
|
2300
|
+
safeEnqueue(part);
|
2301
|
+
},
|
2302
|
+
merge(streamArg) {
|
2303
|
+
ongoingStreamPromises.push(
|
2304
|
+
(async () => {
|
2305
|
+
const reader = streamArg.getReader();
|
2306
|
+
while (true) {
|
2307
|
+
const { done, value } = await reader.read();
|
2308
|
+
if (done)
|
2309
|
+
break;
|
2310
|
+
safeEnqueue(value);
|
2311
|
+
}
|
2312
|
+
})().catch((error) => {
|
2313
|
+
safeEnqueue({ type: "error", errorText: onError(error) });
|
2314
|
+
})
|
2315
|
+
);
|
2316
|
+
},
|
2317
|
+
onError
|
2318
|
+
}
|
2307
2319
|
});
|
2308
2320
|
if (result) {
|
2309
2321
|
ongoingStreamPromises.push(
|
@@ -2327,7 +2339,12 @@ function createUIMessageStream({
|
|
2327
2339
|
} catch (error) {
|
2328
2340
|
}
|
2329
2341
|
});
|
2330
|
-
return
|
2342
|
+
return handleUIMessageStreamFinish({
|
2343
|
+
stream,
|
2344
|
+
newMessageId: "",
|
2345
|
+
originalMessages,
|
2346
|
+
onFinish
|
2347
|
+
});
|
2331
2348
|
}
|
2332
2349
|
|
2333
2350
|
// src/ui-message-stream/ui-message-stream-headers.ts
|
@@ -2392,6 +2409,32 @@ function pipeUIMessageStreamToResponse({
|
|
2392
2409
|
});
|
2393
2410
|
}
|
2394
2411
|
|
2412
|
+
// src/util/cosine-similarity.ts
|
2413
|
+
function cosineSimilarity(vector1, vector2) {
|
2414
|
+
if (vector1.length !== vector2.length) {
|
2415
|
+
throw new InvalidArgumentError({
|
2416
|
+
parameter: "vector1,vector2",
|
2417
|
+
value: { vector1Length: vector1.length, vector2Length: vector2.length },
|
2418
|
+
message: `Vectors must have the same length`
|
2419
|
+
});
|
2420
|
+
}
|
2421
|
+
const n = vector1.length;
|
2422
|
+
if (n === 0) {
|
2423
|
+
return 0;
|
2424
|
+
}
|
2425
|
+
let magnitudeSquared1 = 0;
|
2426
|
+
let magnitudeSquared2 = 0;
|
2427
|
+
let dotProduct = 0;
|
2428
|
+
for (let i = 0; i < n; i++) {
|
2429
|
+
const value1 = vector1[i];
|
2430
|
+
const value2 = vector2[i];
|
2431
|
+
magnitudeSquared1 += value1 * value1;
|
2432
|
+
magnitudeSquared2 += value2 * value2;
|
2433
|
+
dotProduct += value1 * value2;
|
2434
|
+
}
|
2435
|
+
return magnitudeSquared1 === 0 || magnitudeSquared2 === 0 ? 0 : dotProduct / (Math.sqrt(magnitudeSquared1) * Math.sqrt(magnitudeSquared2));
|
2436
|
+
}
|
2437
|
+
|
2395
2438
|
// src/util/data-url.ts
|
2396
2439
|
function getTextFromDataUrl(dataUrl) {
|
2397
2440
|
const [header, base64Content] = dataUrl.split(",");
|
@@ -2441,31 +2484,37 @@ function isDeepEqualData(obj1, obj2) {
|
|
2441
2484
|
return true;
|
2442
2485
|
}
|
2443
2486
|
|
2444
|
-
// src/util/
|
2445
|
-
|
2446
|
-
|
2447
|
-
|
2448
|
-
|
2449
|
-
value: { vector1Length: vector1.length, vector2Length: vector2.length },
|
2450
|
-
message: `Vectors must have the same length`
|
2451
|
-
});
|
2487
|
+
// src/util/serial-job-executor.ts
|
2488
|
+
var SerialJobExecutor = class {
|
2489
|
+
constructor() {
|
2490
|
+
this.queue = [];
|
2491
|
+
this.isProcessing = false;
|
2452
2492
|
}
|
2453
|
-
|
2454
|
-
|
2455
|
-
|
2493
|
+
async processQueue() {
|
2494
|
+
if (this.isProcessing) {
|
2495
|
+
return;
|
2496
|
+
}
|
2497
|
+
this.isProcessing = true;
|
2498
|
+
while (this.queue.length > 0) {
|
2499
|
+
await this.queue[0]();
|
2500
|
+
this.queue.shift();
|
2501
|
+
}
|
2502
|
+
this.isProcessing = false;
|
2456
2503
|
}
|
2457
|
-
|
2458
|
-
|
2459
|
-
|
2460
|
-
|
2461
|
-
|
2462
|
-
|
2463
|
-
|
2464
|
-
|
2465
|
-
|
2504
|
+
async run(job) {
|
2505
|
+
return new Promise((resolve, reject) => {
|
2506
|
+
this.queue.push(async () => {
|
2507
|
+
try {
|
2508
|
+
await job();
|
2509
|
+
resolve();
|
2510
|
+
} catch (error) {
|
2511
|
+
reject(error);
|
2512
|
+
}
|
2513
|
+
});
|
2514
|
+
void this.processQueue();
|
2515
|
+
});
|
2466
2516
|
}
|
2467
|
-
|
2468
|
-
}
|
2517
|
+
};
|
2469
2518
|
|
2470
2519
|
// src/util/simulate-readable-stream.ts
|
2471
2520
|
import { delay as delayFunction } from "@ai-sdk/provider-utils";
|
@@ -3471,6 +3520,15 @@ function convertToLanguageModelV2DataContent(content) {
|
|
3471
3520
|
}
|
3472
3521
|
return { data: content, mediaType: void 0 };
|
3473
3522
|
}
|
3523
|
+
function convertDataContentToBase64String(content) {
|
3524
|
+
if (typeof content === "string") {
|
3525
|
+
return content;
|
3526
|
+
}
|
3527
|
+
if (content instanceof ArrayBuffer) {
|
3528
|
+
return convertUint8ArrayToBase642(new Uint8Array(content));
|
3529
|
+
}
|
3530
|
+
return convertUint8ArrayToBase642(content);
|
3531
|
+
}
|
3474
3532
|
function convertDataContentToUint8Array(content) {
|
3475
3533
|
if (content instanceof Uint8Array) {
|
3476
3534
|
return content;
|
@@ -3988,6 +4046,21 @@ async function standardizePrompt(prompt) {
|
|
3988
4046
|
};
|
3989
4047
|
}
|
3990
4048
|
|
4049
|
+
// core/telemetry/stringify-for-telemetry.ts
|
4050
|
+
function stringifyForTelemetry(prompt) {
|
4051
|
+
return JSON.stringify(
|
4052
|
+
prompt.map((message) => ({
|
4053
|
+
...message,
|
4054
|
+
content: typeof message.content === "string" ? message.content : message.content.map(
|
4055
|
+
(part) => part.type === "file" ? {
|
4056
|
+
...part,
|
4057
|
+
data: part.data instanceof Uint8Array ? convertDataContentToBase64String(part.data) : part.data
|
4058
|
+
} : part
|
4059
|
+
)
|
4060
|
+
}))
|
4061
|
+
);
|
4062
|
+
}
|
4063
|
+
|
3991
4064
|
// core/generate-object/output-strategy.ts
|
3992
4065
|
import {
|
3993
4066
|
isJSONArray,
|
@@ -4389,11 +4462,17 @@ function validateObjectGenerationInput({
|
|
4389
4462
|
}
|
4390
4463
|
}
|
4391
4464
|
|
4465
|
+
// core/prompt/resolve-language-model.ts
|
4466
|
+
import { gateway } from "@ai-sdk/gateway";
|
4467
|
+
function resolveLanguageModel(model) {
|
4468
|
+
return typeof model === "string" ? gateway.languageModel(model) : model;
|
4469
|
+
}
|
4470
|
+
|
4392
4471
|
// core/generate-object/generate-object.ts
|
4393
4472
|
var originalGenerateId = createIdGenerator({ prefix: "aiobj", size: 24 });
|
4394
4473
|
async function generateObject(options) {
|
4395
4474
|
const {
|
4396
|
-
model,
|
4475
|
+
model: modelArg,
|
4397
4476
|
output = "object",
|
4398
4477
|
system,
|
4399
4478
|
prompt,
|
@@ -4410,6 +4489,7 @@ async function generateObject(options) {
|
|
4410
4489
|
} = {},
|
4411
4490
|
...settings
|
4412
4491
|
} = options;
|
4492
|
+
const model = resolveLanguageModel(modelArg);
|
4413
4493
|
const enumValues = "enum" in options ? options.enum : void 0;
|
4414
4494
|
const {
|
4415
4495
|
schema: inputSchema,
|
@@ -4488,7 +4568,7 @@ async function generateObject(options) {
|
|
4488
4568
|
}),
|
4489
4569
|
...baseTelemetryAttributes,
|
4490
4570
|
"ai.prompt.messages": {
|
4491
|
-
input: () =>
|
4571
|
+
input: () => stringifyForTelemetry(promptMessages)
|
4492
4572
|
},
|
4493
4573
|
// standardized gen-ai llm span attributes:
|
4494
4574
|
"gen_ai.system": model.provider,
|
@@ -4869,7 +4949,7 @@ function streamObject(options) {
|
|
4869
4949
|
}
|
4870
4950
|
var DefaultStreamObjectResult = class {
|
4871
4951
|
constructor({
|
4872
|
-
model,
|
4952
|
+
model: modelArg,
|
4873
4953
|
headers,
|
4874
4954
|
telemetry,
|
4875
4955
|
settings,
|
@@ -4894,6 +4974,7 @@ var DefaultStreamObjectResult = class {
|
|
4894
4974
|
this._warnings = new DelayedPromise();
|
4895
4975
|
this._request = new DelayedPromise();
|
4896
4976
|
this._response = new DelayedPromise();
|
4977
|
+
const model = resolveLanguageModel(modelArg);
|
4897
4978
|
const { maxRetries, retry } = prepareRetries({
|
4898
4979
|
maxRetries: maxRetriesArg
|
4899
4980
|
});
|
@@ -4990,7 +5071,7 @@ var DefaultStreamObjectResult = class {
|
|
4990
5071
|
}),
|
4991
5072
|
...baseTelemetryAttributes,
|
4992
5073
|
"ai.prompt.messages": {
|
4993
|
-
input: () =>
|
5074
|
+
input: () => stringifyForTelemetry(callOptions.prompt)
|
4994
5075
|
},
|
4995
5076
|
// standardized gen-ai llm span attributes:
|
4996
5077
|
"gen_ai.system": model.provider,
|
@@ -5711,7 +5792,7 @@ var originalGenerateId3 = createIdGenerator3({
|
|
5711
5792
|
size: 24
|
5712
5793
|
});
|
5713
5794
|
async function generateText({
|
5714
|
-
model,
|
5795
|
+
model: modelArg,
|
5715
5796
|
tools,
|
5716
5797
|
toolChoice,
|
5717
5798
|
system,
|
@@ -5736,6 +5817,7 @@ async function generateText({
|
|
5736
5817
|
onStepFinish,
|
5737
5818
|
...settings
|
5738
5819
|
}) {
|
5820
|
+
const model = resolveLanguageModel(modelArg);
|
5739
5821
|
const stopConditions = asArray(stopWhen);
|
5740
5822
|
const { maxRetries, retry } = prepareRetries({ maxRetries: maxRetriesArg });
|
5741
5823
|
const callSettings = prepareCallSettings(settings);
|
@@ -5772,7 +5854,7 @@ async function generateText({
|
|
5772
5854
|
}),
|
5773
5855
|
tracer,
|
5774
5856
|
fn: async (span) => {
|
5775
|
-
var _a17, _b, _c, _d;
|
5857
|
+
var _a17, _b, _c, _d, _e;
|
5776
5858
|
const callSettings2 = prepareCallSettings(settings);
|
5777
5859
|
let currentModelResponse;
|
5778
5860
|
let currentToolCalls = [];
|
@@ -5791,16 +5873,18 @@ async function generateText({
|
|
5791
5873
|
}));
|
5792
5874
|
const promptMessages = await convertToLanguageModelPrompt({
|
5793
5875
|
prompt: {
|
5794
|
-
system: initialPrompt.system,
|
5876
|
+
system: (_a17 = prepareStepResult == null ? void 0 : prepareStepResult.system) != null ? _a17 : initialPrompt.system,
|
5795
5877
|
messages: stepInputMessages
|
5796
5878
|
},
|
5797
5879
|
supportedUrls: await model.supportedUrls
|
5798
5880
|
});
|
5799
|
-
const stepModel = (
|
5881
|
+
const stepModel = resolveLanguageModel(
|
5882
|
+
(_b = prepareStepResult == null ? void 0 : prepareStepResult.model) != null ? _b : model
|
5883
|
+
);
|
5800
5884
|
const { toolChoice: stepToolChoice, tools: stepTools } = prepareToolsAndToolChoice({
|
5801
5885
|
tools,
|
5802
|
-
toolChoice: (
|
5803
|
-
activeTools: (
|
5886
|
+
toolChoice: (_c = prepareStepResult == null ? void 0 : prepareStepResult.toolChoice) != null ? _c : toolChoice,
|
5887
|
+
activeTools: (_d = prepareStepResult == null ? void 0 : prepareStepResult.activeTools) != null ? _d : activeTools
|
5804
5888
|
});
|
5805
5889
|
currentModelResponse = await retry(
|
5806
5890
|
() => {
|
@@ -5820,7 +5904,7 @@ async function generateText({
|
|
5820
5904
|
"ai.model.id": stepModel.modelId,
|
5821
5905
|
// prompt:
|
5822
5906
|
"ai.prompt.messages": {
|
5823
|
-
input: () =>
|
5907
|
+
input: () => stringifyForTelemetry(promptMessages)
|
5824
5908
|
},
|
5825
5909
|
"ai.prompt.tools": {
|
5826
5910
|
// convert the language model level tools:
|
@@ -5843,7 +5927,7 @@ async function generateText({
|
|
5843
5927
|
}),
|
5844
5928
|
tracer,
|
5845
5929
|
fn: async (span2) => {
|
5846
|
-
var _a19, _b2, _c2, _d2,
|
5930
|
+
var _a19, _b2, _c2, _d2, _e2, _f, _g, _h;
|
5847
5931
|
const result = await stepModel.doGenerate({
|
5848
5932
|
...callSettings2,
|
5849
5933
|
tools: stepTools,
|
@@ -5857,7 +5941,7 @@ async function generateText({
|
|
5857
5941
|
const responseData = {
|
5858
5942
|
id: (_b2 = (_a19 = result.response) == null ? void 0 : _a19.id) != null ? _b2 : generateId3(),
|
5859
5943
|
timestamp: (_d2 = (_c2 = result.response) == null ? void 0 : _c2.timestamp) != null ? _d2 : currentDate(),
|
5860
|
-
modelId: (_f = (
|
5944
|
+
modelId: (_f = (_e2 = result.response) == null ? void 0 : _e2.modelId) != null ? _f : stepModel.modelId,
|
5861
5945
|
headers: (_g = result.response) == null ? void 0 : _g.headers,
|
5862
5946
|
body: (_h = result.response) == null ? void 0 : _h.body
|
5863
5947
|
};
|
@@ -5933,7 +6017,7 @@ async function generateText({
|
|
5933
6017
|
usage: currentModelResponse.usage,
|
5934
6018
|
warnings: currentModelResponse.warnings,
|
5935
6019
|
providerMetadata: currentModelResponse.providerMetadata,
|
5936
|
-
request: (
|
6020
|
+
request: (_e = currentModelResponse.request) != null ? _e : {},
|
5937
6021
|
response: {
|
5938
6022
|
...currentModelResponse.response,
|
5939
6023
|
// deep clone msgs to avoid mutating past messages in multi-step:
|
@@ -6529,7 +6613,7 @@ function streamText({
|
|
6529
6613
|
...settings
|
6530
6614
|
}) {
|
6531
6615
|
return new DefaultStreamTextResult({
|
6532
|
-
model,
|
6616
|
+
model: resolveLanguageModel(model),
|
6533
6617
|
telemetry,
|
6534
6618
|
headers,
|
6535
6619
|
settings,
|
@@ -6844,7 +6928,7 @@ var DefaultStreamTextResult = class {
|
|
6844
6928
|
responseMessages,
|
6845
6929
|
usage
|
6846
6930
|
}) {
|
6847
|
-
var _a17, _b, _c;
|
6931
|
+
var _a17, _b, _c, _d;
|
6848
6932
|
stepFinish = new DelayedPromise();
|
6849
6933
|
const initialPrompt = await standardizePrompt({
|
6850
6934
|
system,
|
@@ -6862,16 +6946,18 @@ var DefaultStreamTextResult = class {
|
|
6862
6946
|
}));
|
6863
6947
|
const promptMessages = await convertToLanguageModelPrompt({
|
6864
6948
|
prompt: {
|
6865
|
-
system: initialPrompt.system,
|
6949
|
+
system: (_a17 = prepareStepResult == null ? void 0 : prepareStepResult.system) != null ? _a17 : initialPrompt.system,
|
6866
6950
|
messages: stepInputMessages
|
6867
6951
|
},
|
6868
6952
|
supportedUrls: await model.supportedUrls
|
6869
6953
|
});
|
6870
|
-
const stepModel = (
|
6954
|
+
const stepModel = resolveLanguageModel(
|
6955
|
+
(_b = prepareStepResult == null ? void 0 : prepareStepResult.model) != null ? _b : model
|
6956
|
+
);
|
6871
6957
|
const { toolChoice: stepToolChoice, tools: stepTools } = prepareToolsAndToolChoice({
|
6872
6958
|
tools,
|
6873
|
-
toolChoice: (
|
6874
|
-
activeTools: (
|
6959
|
+
toolChoice: (_c = prepareStepResult == null ? void 0 : prepareStepResult.toolChoice) != null ? _c : toolChoice,
|
6960
|
+
activeTools: (_d = prepareStepResult == null ? void 0 : prepareStepResult.activeTools) != null ? _d : activeTools
|
6875
6961
|
});
|
6876
6962
|
const {
|
6877
6963
|
result: { stream: stream2, response, request },
|
@@ -6893,7 +6979,7 @@ var DefaultStreamTextResult = class {
|
|
6893
6979
|
"ai.model.id": stepModel.modelId,
|
6894
6980
|
// prompt:
|
6895
6981
|
"ai.prompt.messages": {
|
6896
|
-
input: () =>
|
6982
|
+
input: () => stringifyForTelemetry(promptMessages)
|
6897
6983
|
},
|
6898
6984
|
"ai.prompt.tools": {
|
6899
6985
|
// convert the language model level tools:
|
@@ -6977,7 +7063,7 @@ var DefaultStreamTextResult = class {
|
|
6977
7063
|
streamWithToolResults.pipeThrough(
|
6978
7064
|
new TransformStream({
|
6979
7065
|
async transform(chunk, controller) {
|
6980
|
-
var _a18, _b2, _c2,
|
7066
|
+
var _a18, _b2, _c2, _d2;
|
6981
7067
|
if (chunk.type === "stream-start") {
|
6982
7068
|
warnings = chunk.warnings;
|
6983
7069
|
return;
|
@@ -7054,7 +7140,7 @@ var DefaultStreamTextResult = class {
|
|
7054
7140
|
doStreamSpan.addEvent("ai.stream.finish");
|
7055
7141
|
doStreamSpan.setAttributes({
|
7056
7142
|
"ai.response.msToFinish": msToFinish,
|
7057
|
-
"ai.response.avgOutputTokensPerSecond": 1e3 * ((
|
7143
|
+
"ai.response.avgOutputTokensPerSecond": 1e3 * ((_d2 = stepUsage.outputTokens) != null ? _d2 : 0) / msToFinish
|
7058
7144
|
});
|
7059
7145
|
break;
|
7060
7146
|
}
|
@@ -7304,14 +7390,14 @@ var DefaultStreamTextResult = class {
|
|
7304
7390
|
messageMetadata,
|
7305
7391
|
sendReasoning = false,
|
7306
7392
|
sendSources = false,
|
7307
|
-
|
7308
|
-
|
7393
|
+
sendStart = true,
|
7394
|
+
sendFinish = true,
|
7309
7395
|
onError = () => "An error occurred."
|
7310
7396
|
// mask error messages for safety by default
|
7311
7397
|
} = {}) {
|
7312
7398
|
const lastMessage = originalMessages[originalMessages.length - 1];
|
7313
7399
|
const isContinuation = (lastMessage == null ? void 0 : lastMessage.role) === "assistant";
|
7314
|
-
const messageId = isContinuation ? lastMessage.id : newMessageId;
|
7400
|
+
const messageId = isContinuation ? lastMessage.id : newMessageId != null ? newMessageId : this.generateId();
|
7315
7401
|
const baseStream = this.fullStream.pipeThrough(
|
7316
7402
|
new TransformStream({
|
7317
7403
|
transform: async (part, controller) => {
|
@@ -7417,7 +7503,7 @@ var DefaultStreamTextResult = class {
|
|
7417
7503
|
break;
|
7418
7504
|
}
|
7419
7505
|
case "start": {
|
7420
|
-
if (
|
7506
|
+
if (sendStart) {
|
7421
7507
|
const metadata = messageMetadata == null ? void 0 : messageMetadata({ part });
|
7422
7508
|
controller.enqueue({
|
7423
7509
|
type: "start",
|
@@ -7428,7 +7514,7 @@ var DefaultStreamTextResult = class {
|
|
7428
7514
|
break;
|
7429
7515
|
}
|
7430
7516
|
case "finish": {
|
7431
|
-
if (
|
7517
|
+
if (sendFinish) {
|
7432
7518
|
const metadata = messageMetadata == null ? void 0 : messageMetadata({ part });
|
7433
7519
|
controller.enqueue({
|
7434
7520
|
type: "finish",
|
@@ -7445,38 +7531,12 @@ var DefaultStreamTextResult = class {
|
|
7445
7531
|
}
|
7446
7532
|
})
|
7447
7533
|
);
|
7448
|
-
|
7449
|
-
return baseStream;
|
7450
|
-
}
|
7451
|
-
const state = createStreamingUIMessageState({
|
7452
|
-
lastMessage,
|
7453
|
-
newMessageId: messageId != null ? messageId : this.generateId()
|
7454
|
-
});
|
7455
|
-
const runUpdateMessageJob = async (job) => {
|
7456
|
-
await job({ state, write: () => {
|
7457
|
-
} });
|
7458
|
-
};
|
7459
|
-
return processUIMessageStream({
|
7534
|
+
return handleUIMessageStreamFinish({
|
7460
7535
|
stream: baseStream,
|
7461
|
-
|
7462
|
-
|
7463
|
-
|
7464
|
-
|
7465
|
-
controller.enqueue(chunk);
|
7466
|
-
},
|
7467
|
-
flush() {
|
7468
|
-
const isContinuation2 = state.message.id === (lastMessage == null ? void 0 : lastMessage.id);
|
7469
|
-
onFinish({
|
7470
|
-
isContinuation: isContinuation2,
|
7471
|
-
responseMessage: state.message,
|
7472
|
-
messages: [
|
7473
|
-
...isContinuation2 ? originalMessages.slice(0, -1) : originalMessages,
|
7474
|
-
state.message
|
7475
|
-
]
|
7476
|
-
});
|
7477
|
-
}
|
7478
|
-
})
|
7479
|
-
);
|
7536
|
+
newMessageId: messageId,
|
7537
|
+
originalMessages,
|
7538
|
+
onFinish
|
7539
|
+
});
|
7480
7540
|
}
|
7481
7541
|
pipeUIMessageStreamToResponse(response, {
|
7482
7542
|
newMessageId,
|
@@ -7485,8 +7545,8 @@ var DefaultStreamTextResult = class {
|
|
7485
7545
|
messageMetadata,
|
7486
7546
|
sendReasoning,
|
7487
7547
|
sendSources,
|
7488
|
-
|
7489
|
-
|
7548
|
+
sendFinish,
|
7549
|
+
sendStart,
|
7490
7550
|
onError,
|
7491
7551
|
...init
|
7492
7552
|
} = {}) {
|
@@ -7499,8 +7559,8 @@ var DefaultStreamTextResult = class {
|
|
7499
7559
|
messageMetadata,
|
7500
7560
|
sendReasoning,
|
7501
7561
|
sendSources,
|
7502
|
-
|
7503
|
-
|
7562
|
+
sendFinish,
|
7563
|
+
sendStart,
|
7504
7564
|
onError
|
7505
7565
|
}),
|
7506
7566
|
...init
|
@@ -7520,8 +7580,8 @@ var DefaultStreamTextResult = class {
|
|
7520
7580
|
messageMetadata,
|
7521
7581
|
sendReasoning,
|
7522
7582
|
sendSources,
|
7523
|
-
|
7524
|
-
|
7583
|
+
sendFinish,
|
7584
|
+
sendStart,
|
7525
7585
|
onError,
|
7526
7586
|
...init
|
7527
7587
|
} = {}) {
|
@@ -7533,8 +7593,8 @@ var DefaultStreamTextResult = class {
|
|
7533
7593
|
messageMetadata,
|
7534
7594
|
sendReasoning,
|
7535
7595
|
sendSources,
|
7536
|
-
|
7537
|
-
|
7596
|
+
sendFinish,
|
7597
|
+
sendStart,
|
7538
7598
|
onError
|
7539
7599
|
}),
|
7540
7600
|
...init
|
@@ -7777,7 +7837,9 @@ var doWrap = ({
|
|
7777
7837
|
};
|
7778
7838
|
|
7779
7839
|
// core/registry/custom-provider.ts
|
7780
|
-
import {
|
7840
|
+
import {
|
7841
|
+
NoSuchModelError as NoSuchModelError2
|
7842
|
+
} from "@ai-sdk/provider";
|
7781
7843
|
function customProvider({
|
7782
7844
|
languageModels,
|
7783
7845
|
textEmbeddingModels,
|
@@ -7842,7 +7904,9 @@ var NoSuchProviderError = class extends NoSuchModelError3 {
|
|
7842
7904
|
_a16 = symbol16;
|
7843
7905
|
|
7844
7906
|
// core/registry/provider-registry.ts
|
7845
|
-
import {
|
7907
|
+
import {
|
7908
|
+
NoSuchModelError as NoSuchModelError4
|
7909
|
+
} from "@ai-sdk/provider";
|
7846
7910
|
function createProviderRegistry(providers, {
|
7847
7911
|
separator = ":"
|
7848
7912
|
} = {}) {
|
@@ -8581,6 +8645,7 @@ export {
|
|
8581
8645
|
NoSuchToolError,
|
8582
8646
|
output_exports as Output,
|
8583
8647
|
RetryError,
|
8648
|
+
SerialJobExecutor,
|
8584
8649
|
TextStreamChatTransport,
|
8585
8650
|
ToolCallRepairError,
|
8586
8651
|
ToolExecutionError,
|
@@ -8606,7 +8671,7 @@ export {
|
|
8606
8671
|
createUIMessageStream,
|
8607
8672
|
createUIMessageStreamResponse,
|
8608
8673
|
customProvider,
|
8609
|
-
|
8674
|
+
defaultChatStoreOptions,
|
8610
8675
|
defaultSettingsMiddleware,
|
8611
8676
|
embed,
|
8612
8677
|
embedMany,
|