ai 5.0.0-alpha.14 → 5.0.0-alpha.15
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +22 -0
- package/dist/index.d.mts +127 -119
- package/dist/index.d.ts +127 -119
- package/dist/index.js +225 -223
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +226 -226
- package/dist/index.mjs.map +1 -1
- package/package.json +4 -4
package/dist/index.mjs
CHANGED
@@ -756,103 +756,127 @@ async function convertFileListToFileUIParts(files) {
|
|
756
756
|
}
|
757
757
|
|
758
758
|
// src/ui/default-chat-transport.ts
|
759
|
-
import {
|
760
|
-
|
761
|
-
|
759
|
+
import { parseJsonEventStream as parseJsonEventStream2 } from "@ai-sdk/provider-utils";
|
760
|
+
|
761
|
+
// src/ui/http-chat-transport.ts
|
762
762
|
var getOriginalFetch2 = () => fetch;
|
763
|
-
|
764
|
-
api,
|
765
|
-
body,
|
766
|
-
credentials,
|
767
|
-
headers,
|
768
|
-
abortSignal,
|
769
|
-
fetch: fetch2 = getOriginalFetch2(),
|
770
|
-
requestType = "generate"
|
771
|
-
}) {
|
772
|
-
var _a17;
|
773
|
-
const response = requestType === "resume" ? await fetch2(`${api}?id=${body.id}`, {
|
774
|
-
method: "GET",
|
775
|
-
headers: {
|
776
|
-
"Content-Type": "application/json",
|
777
|
-
...headers
|
778
|
-
},
|
779
|
-
signal: abortSignal,
|
780
|
-
credentials
|
781
|
-
}) : await fetch2(api, {
|
782
|
-
method: "POST",
|
783
|
-
body: JSON.stringify(body),
|
784
|
-
headers: {
|
785
|
-
"Content-Type": "application/json",
|
786
|
-
...headers
|
787
|
-
},
|
788
|
-
signal: abortSignal,
|
789
|
-
credentials
|
790
|
-
});
|
791
|
-
if (!response.ok) {
|
792
|
-
throw new Error(
|
793
|
-
(_a17 = await response.text()) != null ? _a17 : "Failed to fetch the chat response."
|
794
|
-
);
|
795
|
-
}
|
796
|
-
if (!response.body) {
|
797
|
-
throw new Error("The response body is empty.");
|
798
|
-
}
|
799
|
-
return parseJsonEventStream2({
|
800
|
-
stream: response.body,
|
801
|
-
schema: uiMessageStreamPartSchema
|
802
|
-
}).pipeThrough(
|
803
|
-
new TransformStream({
|
804
|
-
async transform(part, controller) {
|
805
|
-
if (!part.success) {
|
806
|
-
throw part.error;
|
807
|
-
}
|
808
|
-
controller.enqueue(part.value);
|
809
|
-
}
|
810
|
-
})
|
811
|
-
);
|
812
|
-
}
|
813
|
-
var DefaultChatTransport = class {
|
763
|
+
var HttpChatTransport = class {
|
814
764
|
constructor({
|
815
765
|
api = "/api/chat",
|
816
766
|
credentials,
|
817
767
|
headers,
|
818
768
|
body,
|
819
|
-
fetch: fetch2,
|
820
|
-
|
821
|
-
|
769
|
+
fetch: fetch2 = getOriginalFetch2(),
|
770
|
+
prepareSendMessagesRequest,
|
771
|
+
prepareReconnectToStreamRequest
|
772
|
+
}) {
|
822
773
|
this.api = api;
|
823
774
|
this.credentials = credentials;
|
824
775
|
this.headers = headers;
|
825
776
|
this.body = body;
|
826
777
|
this.fetch = fetch2;
|
827
|
-
this.
|
778
|
+
this.prepareSendMessagesRequest = prepareSendMessagesRequest;
|
779
|
+
this.prepareReconnectToStreamRequest = prepareReconnectToStreamRequest;
|
828
780
|
}
|
829
|
-
|
830
|
-
chatId,
|
831
|
-
messages,
|
781
|
+
async sendMessages({
|
832
782
|
abortSignal,
|
833
|
-
|
834
|
-
headers,
|
835
|
-
body,
|
836
|
-
requestType
|
783
|
+
...options
|
837
784
|
}) {
|
838
|
-
var _a17, _b;
|
839
|
-
const preparedRequest = (_a17 = this.
|
840
|
-
|
841
|
-
|
842
|
-
|
843
|
-
|
785
|
+
var _a17, _b, _c, _d;
|
786
|
+
const preparedRequest = (_a17 = this.prepareSendMessagesRequest) == null ? void 0 : _a17.call(this, {
|
787
|
+
api: this.api,
|
788
|
+
id: options.chatId,
|
789
|
+
messages: options.messages,
|
790
|
+
body: { ...this.body, ...options.body },
|
791
|
+
headers: { ...this.headers, ...options.headers },
|
844
792
|
credentials: this.credentials,
|
845
|
-
requestMetadata: metadata
|
793
|
+
requestMetadata: options.metadata,
|
794
|
+
trigger: options.trigger,
|
795
|
+
messageId: options.messageId
|
846
796
|
});
|
847
|
-
|
797
|
+
const api = (_b = preparedRequest == null ? void 0 : preparedRequest.api) != null ? _b : this.api;
|
798
|
+
const headers = (preparedRequest == null ? void 0 : preparedRequest.headers) !== void 0 ? preparedRequest.headers : { ...this.headers, ...options.headers };
|
799
|
+
const body = (preparedRequest == null ? void 0 : preparedRequest.body) !== void 0 ? preparedRequest.body : {
|
800
|
+
...this.body,
|
801
|
+
...options.body,
|
802
|
+
id: options.chatId,
|
803
|
+
messages: options.messages,
|
804
|
+
trigger: options.trigger,
|
805
|
+
messageId: options.messageId
|
806
|
+
};
|
807
|
+
const credentials = (_c = preparedRequest == null ? void 0 : preparedRequest.credentials) != null ? _c : this.credentials;
|
808
|
+
const response = await fetch(api, {
|
809
|
+
method: "POST",
|
810
|
+
headers: {
|
811
|
+
"Content-Type": "application/json",
|
812
|
+
...headers
|
813
|
+
},
|
814
|
+
body: JSON.stringify(body),
|
815
|
+
credentials,
|
816
|
+
signal: abortSignal
|
817
|
+
});
|
818
|
+
if (!response.ok) {
|
819
|
+
throw new Error(
|
820
|
+
(_d = await response.text()) != null ? _d : "Failed to fetch the chat response."
|
821
|
+
);
|
822
|
+
}
|
823
|
+
if (!response.body) {
|
824
|
+
throw new Error("The response body is empty.");
|
825
|
+
}
|
826
|
+
return this.processResponseStream(response.body);
|
827
|
+
}
|
828
|
+
async reconnectToStream(options) {
|
829
|
+
var _a17, _b, _c, _d;
|
830
|
+
const preparedRequest = (_a17 = this.prepareReconnectToStreamRequest) == null ? void 0 : _a17.call(this, {
|
848
831
|
api: this.api,
|
849
|
-
|
850
|
-
|
851
|
-
|
852
|
-
|
853
|
-
|
854
|
-
|
832
|
+
id: options.chatId,
|
833
|
+
body: { ...this.body, ...options.body },
|
834
|
+
headers: { ...this.headers, ...options.headers },
|
835
|
+
credentials: this.credentials,
|
836
|
+
requestMetadata: options.metadata
|
837
|
+
});
|
838
|
+
const api = (_b = preparedRequest == null ? void 0 : preparedRequest.api) != null ? _b : `${this.api}/${options.chatId}/stream`;
|
839
|
+
const headers = (preparedRequest == null ? void 0 : preparedRequest.headers) !== void 0 ? preparedRequest.headers : { ...this.headers, ...options.headers };
|
840
|
+
const credentials = (_c = preparedRequest == null ? void 0 : preparedRequest.credentials) != null ? _c : this.credentials;
|
841
|
+
const response = await fetch(api, {
|
842
|
+
method: "GET",
|
843
|
+
headers,
|
844
|
+
credentials
|
855
845
|
});
|
846
|
+
if (response.status === 204) {
|
847
|
+
return null;
|
848
|
+
}
|
849
|
+
if (!response.ok) {
|
850
|
+
throw new Error(
|
851
|
+
(_d = await response.text()) != null ? _d : "Failed to fetch the chat response."
|
852
|
+
);
|
853
|
+
}
|
854
|
+
if (!response.body) {
|
855
|
+
throw new Error("The response body is empty.");
|
856
|
+
}
|
857
|
+
return this.processResponseStream(response.body);
|
858
|
+
}
|
859
|
+
};
|
860
|
+
|
861
|
+
// src/ui/default-chat-transport.ts
|
862
|
+
var DefaultChatTransport = class extends HttpChatTransport {
|
863
|
+
constructor(options = {}) {
|
864
|
+
super(options);
|
865
|
+
}
|
866
|
+
processResponseStream(stream) {
|
867
|
+
return parseJsonEventStream2({
|
868
|
+
stream,
|
869
|
+
schema: uiMessageStreamPartSchema
|
870
|
+
}).pipeThrough(
|
871
|
+
new TransformStream({
|
872
|
+
async transform(part, controller) {
|
873
|
+
if (!part.success) {
|
874
|
+
throw part.error;
|
875
|
+
}
|
876
|
+
controller.enqueue(part.value);
|
877
|
+
}
|
878
|
+
})
|
879
|
+
);
|
856
880
|
}
|
857
881
|
};
|
858
882
|
|
@@ -1556,22 +1580,14 @@ var AbstractChat = class {
|
|
1556
1580
|
}) {
|
1557
1581
|
this.activeResponse = void 0;
|
1558
1582
|
this.jobExecutor = new SerialJobExecutor();
|
1559
|
-
this.removeAssistantResponse = () => {
|
1560
|
-
const lastMessage = this.state.messages[this.state.messages.length - 1];
|
1561
|
-
if (lastMessage == null) {
|
1562
|
-
throw new Error("Cannot remove assistant response from empty chat");
|
1563
|
-
}
|
1564
|
-
if (lastMessage.role !== "assistant") {
|
1565
|
-
throw new Error("Last message is not an assistant message");
|
1566
|
-
}
|
1567
|
-
this.state.popMessage();
|
1568
|
-
};
|
1569
1583
|
/**
|
1570
|
-
*
|
1584
|
+
* Appends or replaces a user message to the chat list. This triggers the API call to fetch
|
1571
1585
|
* the assistant's response.
|
1586
|
+
*
|
1587
|
+
* If a messageId is provided, the message will be replaced.
|
1572
1588
|
*/
|
1573
1589
|
this.sendMessage = async (message, options = {}) => {
|
1574
|
-
var _a17, _b;
|
1590
|
+
var _a17, _b, _c;
|
1575
1591
|
let uiMessage;
|
1576
1592
|
if ("text" in message || "files" in message) {
|
1577
1593
|
const fileParts = Array.isArray(message.files) ? message.files : await convertFileListToFileUIParts(message.files);
|
@@ -1584,30 +1600,65 @@ var AbstractChat = class {
|
|
1584
1600
|
} else {
|
1585
1601
|
uiMessage = message;
|
1586
1602
|
}
|
1587
|
-
|
1588
|
-
|
1589
|
-
|
1590
|
-
|
1603
|
+
if (message.messageId != null) {
|
1604
|
+
const messageIndex = this.state.messages.findIndex(
|
1605
|
+
(m) => m.id === message.messageId
|
1606
|
+
);
|
1607
|
+
if (messageIndex === -1) {
|
1608
|
+
throw new Error(`message with id ${message.messageId} not found`);
|
1609
|
+
}
|
1610
|
+
if (this.state.messages[messageIndex].role !== "user") {
|
1611
|
+
throw new Error(
|
1612
|
+
`message with id ${message.messageId} is not a user message`
|
1613
|
+
);
|
1614
|
+
}
|
1615
|
+
this.state.messages = this.state.messages.slice(0, messageIndex + 1);
|
1616
|
+
this.state.replaceMessage(messageIndex, {
|
1617
|
+
...uiMessage,
|
1618
|
+
id: message.messageId,
|
1619
|
+
role: (_a17 = uiMessage.role) != null ? _a17 : "user"
|
1620
|
+
});
|
1621
|
+
} else {
|
1622
|
+
this.state.pushMessage({
|
1623
|
+
...uiMessage,
|
1624
|
+
id: (_b = uiMessage.id) != null ? _b : this.generateId(),
|
1625
|
+
role: (_c = uiMessage.role) != null ? _c : "user"
|
1626
|
+
});
|
1627
|
+
}
|
1628
|
+
await this.makeRequest({
|
1629
|
+
trigger: "submit-user-message",
|
1630
|
+
messageId: message.messageId,
|
1631
|
+
...options
|
1591
1632
|
});
|
1592
|
-
await this.triggerRequest({ requestType: "generate", ...options });
|
1593
1633
|
};
|
1594
1634
|
/**
|
1595
|
-
* Regenerate the
|
1635
|
+
* Regenerate the assistant message with the provided message id.
|
1636
|
+
* If no message id is provided, the last assistant message will be regenerated.
|
1596
1637
|
*/
|
1597
|
-
this.
|
1598
|
-
|
1599
|
-
|
1638
|
+
this.regenerate = async ({
|
1639
|
+
messageId,
|
1640
|
+
...options
|
1641
|
+
} = {}) => {
|
1642
|
+
const messageIndex = messageId == null ? this.state.messages.length - 1 : this.state.messages.findIndex((message) => message.id === messageId);
|
1643
|
+
if (messageIndex === -1) {
|
1644
|
+
throw new Error(`message ${messageId} not found`);
|
1600
1645
|
}
|
1601
|
-
|
1602
|
-
|
1603
|
-
|
1604
|
-
|
1646
|
+
this.state.messages = this.state.messages.slice(
|
1647
|
+
0,
|
1648
|
+
// if the message is a user message, we need to include it in the request:
|
1649
|
+
this.messages[messageIndex].role === "assistant" ? messageIndex : messageIndex + 1
|
1650
|
+
);
|
1651
|
+
await this.makeRequest({
|
1652
|
+
trigger: "regenerate-assistant-message",
|
1653
|
+
messageId,
|
1654
|
+
...options
|
1655
|
+
});
|
1605
1656
|
};
|
1606
1657
|
/**
|
1607
|
-
*
|
1658
|
+
* Attempt to resume an ongoing streaming response.
|
1608
1659
|
*/
|
1609
|
-
this.
|
1610
|
-
await this.
|
1660
|
+
this.resumeStream = async (options = {}) => {
|
1661
|
+
await this.makeRequest({ trigger: "resume-stream", ...options });
|
1611
1662
|
};
|
1612
1663
|
this.addToolResult = async ({
|
1613
1664
|
toolCallId,
|
@@ -1625,8 +1676,8 @@ var AbstractChat = class {
|
|
1625
1676
|
}
|
1626
1677
|
const lastMessage = this.lastMessage;
|
1627
1678
|
if (isAssistantMessageWithCompletedToolCalls(lastMessage)) {
|
1628
|
-
this.
|
1629
|
-
|
1679
|
+
this.makeRequest({
|
1680
|
+
trigger: "submit-tool-result"
|
1630
1681
|
});
|
1631
1682
|
}
|
1632
1683
|
});
|
@@ -1685,11 +1736,12 @@ var AbstractChat = class {
|
|
1685
1736
|
set messages(messages) {
|
1686
1737
|
this.state.messages = messages;
|
1687
1738
|
}
|
1688
|
-
async
|
1689
|
-
|
1739
|
+
async makeRequest({
|
1740
|
+
trigger,
|
1690
1741
|
metadata,
|
1691
1742
|
headers,
|
1692
|
-
body
|
1743
|
+
body,
|
1744
|
+
messageId
|
1693
1745
|
}) {
|
1694
1746
|
var _a17, _b;
|
1695
1747
|
this.setStatus({ status: "submitted", error: void 0 });
|
@@ -1705,15 +1757,30 @@ var AbstractChat = class {
|
|
1705
1757
|
abortController: new AbortController()
|
1706
1758
|
};
|
1707
1759
|
this.activeResponse = activeResponse;
|
1708
|
-
|
1709
|
-
|
1710
|
-
|
1711
|
-
|
1712
|
-
|
1713
|
-
|
1714
|
-
|
1715
|
-
|
1716
|
-
|
1760
|
+
let stream;
|
1761
|
+
if (trigger === "resume-stream") {
|
1762
|
+
const reconnect = await this.transport.reconnectToStream({
|
1763
|
+
chatId: this.id,
|
1764
|
+
metadata,
|
1765
|
+
headers,
|
1766
|
+
body
|
1767
|
+
});
|
1768
|
+
if (reconnect == null) {
|
1769
|
+
return;
|
1770
|
+
}
|
1771
|
+
stream = reconnect;
|
1772
|
+
} else {
|
1773
|
+
stream = await this.transport.sendMessages({
|
1774
|
+
chatId: this.id,
|
1775
|
+
messages: this.state.messages,
|
1776
|
+
abortSignal: activeResponse.abortController.signal,
|
1777
|
+
metadata,
|
1778
|
+
headers,
|
1779
|
+
body,
|
1780
|
+
trigger,
|
1781
|
+
messageId
|
1782
|
+
});
|
1783
|
+
}
|
1717
1784
|
const runUpdateMessageJob = (job) => (
|
1718
1785
|
// serialize the job execution to avoid race conditions:
|
1719
1786
|
this.jobExecutor.run(
|
@@ -1768,11 +1835,12 @@ var AbstractChat = class {
|
|
1768
1835
|
maxSteps: this.maxSteps,
|
1769
1836
|
messages: this.state.messages
|
1770
1837
|
})) {
|
1771
|
-
await this.
|
1772
|
-
requestType,
|
1838
|
+
await this.makeRequest({
|
1773
1839
|
metadata,
|
1774
1840
|
headers,
|
1775
|
-
body
|
1841
|
+
body,
|
1842
|
+
// secondary requests are triggered by automatic tool execution
|
1843
|
+
trigger: "submit-tool-result"
|
1776
1844
|
});
|
1777
1845
|
}
|
1778
1846
|
}
|
@@ -1949,89 +2017,13 @@ function transformTextToUiMessageStream({
|
|
1949
2017
|
}
|
1950
2018
|
|
1951
2019
|
// src/ui/text-stream-chat-transport.ts
|
1952
|
-
var
|
1953
|
-
|
1954
|
-
|
1955
|
-
body,
|
1956
|
-
credentials,
|
1957
|
-
headers,
|
1958
|
-
abortSignal,
|
1959
|
-
fetch: fetch2 = getOriginalFetch3(),
|
1960
|
-
requestType = "generate"
|
1961
|
-
}) {
|
1962
|
-
var _a17;
|
1963
|
-
const response = requestType === "resume" ? await fetch2(`${api}?chatId=${body.chatId}`, {
|
1964
|
-
method: "GET",
|
1965
|
-
headers: {
|
1966
|
-
"Content-Type": "application/json",
|
1967
|
-
...headers
|
1968
|
-
},
|
1969
|
-
signal: abortSignal,
|
1970
|
-
credentials
|
1971
|
-
}) : await fetch2(api, {
|
1972
|
-
method: "POST",
|
1973
|
-
body: JSON.stringify(body),
|
1974
|
-
headers: {
|
1975
|
-
"Content-Type": "application/json",
|
1976
|
-
...headers
|
1977
|
-
},
|
1978
|
-
signal: abortSignal,
|
1979
|
-
credentials
|
1980
|
-
});
|
1981
|
-
if (!response.ok) {
|
1982
|
-
throw new Error(
|
1983
|
-
(_a17 = await response.text()) != null ? _a17 : "Failed to fetch the chat response."
|
1984
|
-
);
|
1985
|
-
}
|
1986
|
-
if (!response.body) {
|
1987
|
-
throw new Error("The response body is empty.");
|
1988
|
-
}
|
1989
|
-
return transformTextToUiMessageStream({
|
1990
|
-
stream: response.body.pipeThrough(new TextDecoderStream())
|
1991
|
-
});
|
1992
|
-
}
|
1993
|
-
var TextStreamChatTransport = class {
|
1994
|
-
constructor({
|
1995
|
-
api,
|
1996
|
-
credentials,
|
1997
|
-
headers,
|
1998
|
-
body,
|
1999
|
-
fetch: fetch2,
|
2000
|
-
prepareRequest
|
2001
|
-
}) {
|
2002
|
-
this.api = api;
|
2003
|
-
this.credentials = credentials;
|
2004
|
-
this.headers = headers;
|
2005
|
-
this.body = body;
|
2006
|
-
this.fetch = fetch2;
|
2007
|
-
this.prepareRequest = prepareRequest;
|
2020
|
+
var TextStreamChatTransport = class extends HttpChatTransport {
|
2021
|
+
constructor(options = {}) {
|
2022
|
+
super(options);
|
2008
2023
|
}
|
2009
|
-
|
2010
|
-
|
2011
|
-
|
2012
|
-
abortSignal,
|
2013
|
-
metadata,
|
2014
|
-
headers,
|
2015
|
-
body,
|
2016
|
-
requestType
|
2017
|
-
}) {
|
2018
|
-
var _a17, _b;
|
2019
|
-
const preparedRequest = (_a17 = this.prepareRequest) == null ? void 0 : _a17.call(this, {
|
2020
|
-
id: chatId,
|
2021
|
-
messages,
|
2022
|
-
body: { ...this.body, ...body },
|
2023
|
-
headers: { ...this.headers, ...headers },
|
2024
|
-
credentials: this.credentials,
|
2025
|
-
requestMetadata: metadata
|
2026
|
-
});
|
2027
|
-
return fetchTextStream({
|
2028
|
-
api: this.api,
|
2029
|
-
body: (preparedRequest == null ? void 0 : preparedRequest.body) !== void 0 ? preparedRequest.body : { ...this.body, ...body },
|
2030
|
-
headers: (preparedRequest == null ? void 0 : preparedRequest.headers) !== void 0 ? preparedRequest.headers : { ...this.headers, ...headers },
|
2031
|
-
credentials: (_b = preparedRequest == null ? void 0 : preparedRequest.credentials) != null ? _b : this.credentials,
|
2032
|
-
abortSignal,
|
2033
|
-
fetch: this.fetch,
|
2034
|
-
requestType
|
2024
|
+
processResponseStream(stream) {
|
2025
|
+
return transformTextToUiMessageStream({
|
2026
|
+
stream: stream.pipeThrough(new TextDecoderStream())
|
2035
2027
|
});
|
2036
2028
|
}
|
2037
2029
|
};
|
@@ -2198,7 +2190,7 @@ var JsonToSseTransformStream = class extends TransformStream {
|
|
2198
2190
|
};
|
2199
2191
|
|
2200
2192
|
// src/ui-message-stream/ui-message-stream-headers.ts
|
2201
|
-
var
|
2193
|
+
var UI_MESSAGE_STREAM_HEADERS = {
|
2202
2194
|
"content-type": "text/event-stream",
|
2203
2195
|
"cache-control": "no-cache",
|
2204
2196
|
connection: "keep-alive",
|
@@ -2212,16 +2204,20 @@ function createUIMessageStreamResponse({
|
|
2212
2204
|
status,
|
2213
2205
|
statusText,
|
2214
2206
|
headers,
|
2215
|
-
stream
|
2207
|
+
stream,
|
2208
|
+
consumeSseStream
|
2216
2209
|
}) {
|
2217
|
-
|
2218
|
-
|
2219
|
-
|
2220
|
-
|
2221
|
-
|
2222
|
-
|
2223
|
-
|
2224
|
-
|
2210
|
+
let sseStream = stream.pipeThrough(new JsonToSseTransformStream());
|
2211
|
+
if (consumeSseStream) {
|
2212
|
+
const [stream1, stream2] = sseStream.tee();
|
2213
|
+
sseStream = stream1;
|
2214
|
+
consumeSseStream({ stream: stream2 });
|
2215
|
+
}
|
2216
|
+
return new Response(sseStream.pipeThrough(new TextEncoderStream()), {
|
2217
|
+
status,
|
2218
|
+
statusText,
|
2219
|
+
headers: prepareHeaders(headers, UI_MESSAGE_STREAM_HEADERS)
|
2220
|
+
});
|
2225
2221
|
}
|
2226
2222
|
|
2227
2223
|
// src/ui-message-stream/pipe-ui-message-stream-to-response.ts
|
@@ -2230,16 +2226,23 @@ function pipeUIMessageStreamToResponse({
|
|
2230
2226
|
status,
|
2231
2227
|
statusText,
|
2232
2228
|
headers,
|
2233
|
-
stream
|
2229
|
+
stream,
|
2230
|
+
consumeSseStream
|
2234
2231
|
}) {
|
2232
|
+
let sseStream = stream.pipeThrough(new JsonToSseTransformStream());
|
2233
|
+
if (consumeSseStream) {
|
2234
|
+
const [stream1, stream2] = sseStream.tee();
|
2235
|
+
sseStream = stream1;
|
2236
|
+
consumeSseStream({ stream: stream2 });
|
2237
|
+
}
|
2235
2238
|
writeToServerResponse({
|
2236
2239
|
response,
|
2237
2240
|
status,
|
2238
2241
|
statusText,
|
2239
2242
|
headers: Object.fromEntries(
|
2240
|
-
prepareHeaders(headers,
|
2243
|
+
prepareHeaders(headers, UI_MESSAGE_STREAM_HEADERS).entries()
|
2241
2244
|
),
|
2242
|
-
stream:
|
2245
|
+
stream: sseStream.pipeThrough(new TextEncoderStream())
|
2243
2246
|
});
|
2244
2247
|
}
|
2245
2248
|
|
@@ -3659,14 +3662,11 @@ function prepareCallSettings({
|
|
3659
3662
|
|
3660
3663
|
// core/prompt/resolve-language-model.ts
|
3661
3664
|
import { gateway } from "@ai-sdk/gateway";
|
3662
|
-
var GLOBAL_DEFAULT_PROVIDER = Symbol(
|
3663
|
-
"vercel.ai.global.defaultProvider"
|
3664
|
-
);
|
3665
3665
|
function resolveLanguageModel(model) {
|
3666
3666
|
if (typeof model !== "string") {
|
3667
3667
|
return model;
|
3668
3668
|
}
|
3669
|
-
const globalProvider = globalThis
|
3669
|
+
const globalProvider = globalThis.AI_SDK_DEFAULT_PROVIDER;
|
3670
3670
|
return (globalProvider != null ? globalProvider : gateway).languageModel(model);
|
3671
3671
|
}
|
3672
3672
|
|
@@ -7906,7 +7906,7 @@ function tool(tool2) {
|
|
7906
7906
|
}
|
7907
7907
|
|
7908
7908
|
// core/tool/mcp/mcp-sse-transport.ts
|
7909
|
-
import {
|
7909
|
+
import { EventSourceParserStream } from "@ai-sdk/provider-utils";
|
7910
7910
|
|
7911
7911
|
// core/tool/mcp/json-rpc-message.ts
|
7912
7912
|
import { z as z10 } from "zod";
|
@@ -8077,7 +8077,7 @@ var SseMCPTransport = class {
|
|
8077
8077
|
(_b = this.onerror) == null ? void 0 : _b.call(this, error);
|
8078
8078
|
return reject(error);
|
8079
8079
|
}
|
8080
|
-
const stream = response.body.pipeThrough(new TextDecoderStream()).pipeThrough(
|
8080
|
+
const stream = response.body.pipeThrough(new TextDecoderStream()).pipeThrough(new EventSourceParserStream());
|
8081
8081
|
const reader = stream.getReader();
|
8082
8082
|
const processEvents = async () => {
|
8083
8083
|
var _a18, _b2, _c2;
|
@@ -8533,7 +8533,6 @@ export {
|
|
8533
8533
|
DefaultChatTransport,
|
8534
8534
|
DownloadError,
|
8535
8535
|
EmptyResponseBodyError,
|
8536
|
-
GLOBAL_DEFAULT_PROVIDER,
|
8537
8536
|
InvalidArgumentError,
|
8538
8537
|
InvalidDataContentError,
|
8539
8538
|
InvalidMessageRoleError,
|
@@ -8560,6 +8559,7 @@ export {
|
|
8560
8559
|
ToolCallRepairError,
|
8561
8560
|
ToolExecutionError,
|
8562
8561
|
TypeValidationError,
|
8562
|
+
UI_MESSAGE_STREAM_HEADERS,
|
8563
8563
|
UnsupportedFunctionalityError,
|
8564
8564
|
asSchema5 as asSchema,
|
8565
8565
|
assistantModelMessageSchema,
|