ai 5.0.0-alpha.14 → 5.0.0-alpha.15
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +22 -0
- package/dist/index.d.mts +127 -119
- package/dist/index.d.ts +127 -119
- package/dist/index.js +225 -223
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +226 -226
- package/dist/index.mjs.map +1 -1
- package/package.json +4 -4
package/dist/index.js
CHANGED
@@ -26,7 +26,6 @@ __export(src_exports, {
|
|
26
26
|
DefaultChatTransport: () => DefaultChatTransport,
|
27
27
|
DownloadError: () => DownloadError,
|
28
28
|
EmptyResponseBodyError: () => import_provider16.EmptyResponseBodyError,
|
29
|
-
GLOBAL_DEFAULT_PROVIDER: () => GLOBAL_DEFAULT_PROVIDER,
|
30
29
|
InvalidArgumentError: () => InvalidArgumentError,
|
31
30
|
InvalidDataContentError: () => InvalidDataContentError,
|
32
31
|
InvalidMessageRoleError: () => InvalidMessageRoleError,
|
@@ -53,6 +52,7 @@ __export(src_exports, {
|
|
53
52
|
ToolCallRepairError: () => ToolCallRepairError,
|
54
53
|
ToolExecutionError: () => ToolExecutionError,
|
55
54
|
TypeValidationError: () => import_provider16.TypeValidationError,
|
55
|
+
UI_MESSAGE_STREAM_HEADERS: () => UI_MESSAGE_STREAM_HEADERS,
|
56
56
|
UnsupportedFunctionalityError: () => import_provider16.UnsupportedFunctionalityError,
|
57
57
|
asSchema: () => import_provider_utils26.asSchema,
|
58
58
|
assistantModelMessageSchema: () => assistantModelMessageSchema,
|
@@ -841,100 +841,126 @@ async function convertFileListToFileUIParts(files) {
|
|
841
841
|
|
842
842
|
// src/ui/default-chat-transport.ts
|
843
843
|
var import_provider_utils2 = require("@ai-sdk/provider-utils");
|
844
|
+
|
845
|
+
// src/ui/http-chat-transport.ts
|
844
846
|
var getOriginalFetch2 = () => fetch;
|
845
|
-
|
846
|
-
api,
|
847
|
-
body,
|
848
|
-
credentials,
|
849
|
-
headers,
|
850
|
-
abortSignal,
|
851
|
-
fetch: fetch2 = getOriginalFetch2(),
|
852
|
-
requestType = "generate"
|
853
|
-
}) {
|
854
|
-
var _a17;
|
855
|
-
const response = requestType === "resume" ? await fetch2(`${api}?id=${body.id}`, {
|
856
|
-
method: "GET",
|
857
|
-
headers: {
|
858
|
-
"Content-Type": "application/json",
|
859
|
-
...headers
|
860
|
-
},
|
861
|
-
signal: abortSignal,
|
862
|
-
credentials
|
863
|
-
}) : await fetch2(api, {
|
864
|
-
method: "POST",
|
865
|
-
body: JSON.stringify(body),
|
866
|
-
headers: {
|
867
|
-
"Content-Type": "application/json",
|
868
|
-
...headers
|
869
|
-
},
|
870
|
-
signal: abortSignal,
|
871
|
-
credentials
|
872
|
-
});
|
873
|
-
if (!response.ok) {
|
874
|
-
throw new Error(
|
875
|
-
(_a17 = await response.text()) != null ? _a17 : "Failed to fetch the chat response."
|
876
|
-
);
|
877
|
-
}
|
878
|
-
if (!response.body) {
|
879
|
-
throw new Error("The response body is empty.");
|
880
|
-
}
|
881
|
-
return (0, import_provider_utils2.parseJsonEventStream)({
|
882
|
-
stream: response.body,
|
883
|
-
schema: uiMessageStreamPartSchema
|
884
|
-
}).pipeThrough(
|
885
|
-
new TransformStream({
|
886
|
-
async transform(part, controller) {
|
887
|
-
if (!part.success) {
|
888
|
-
throw part.error;
|
889
|
-
}
|
890
|
-
controller.enqueue(part.value);
|
891
|
-
}
|
892
|
-
})
|
893
|
-
);
|
894
|
-
}
|
895
|
-
var DefaultChatTransport = class {
|
847
|
+
var HttpChatTransport = class {
|
896
848
|
constructor({
|
897
849
|
api = "/api/chat",
|
898
850
|
credentials,
|
899
851
|
headers,
|
900
852
|
body,
|
901
|
-
fetch: fetch2,
|
902
|
-
|
903
|
-
|
853
|
+
fetch: fetch2 = getOriginalFetch2(),
|
854
|
+
prepareSendMessagesRequest,
|
855
|
+
prepareReconnectToStreamRequest
|
856
|
+
}) {
|
904
857
|
this.api = api;
|
905
858
|
this.credentials = credentials;
|
906
859
|
this.headers = headers;
|
907
860
|
this.body = body;
|
908
861
|
this.fetch = fetch2;
|
909
|
-
this.
|
862
|
+
this.prepareSendMessagesRequest = prepareSendMessagesRequest;
|
863
|
+
this.prepareReconnectToStreamRequest = prepareReconnectToStreamRequest;
|
910
864
|
}
|
911
|
-
|
912
|
-
chatId,
|
913
|
-
messages,
|
865
|
+
async sendMessages({
|
914
866
|
abortSignal,
|
915
|
-
|
916
|
-
headers,
|
917
|
-
body,
|
918
|
-
requestType
|
867
|
+
...options
|
919
868
|
}) {
|
920
|
-
var _a17, _b;
|
921
|
-
const preparedRequest = (_a17 = this.
|
922
|
-
|
923
|
-
|
924
|
-
|
925
|
-
|
869
|
+
var _a17, _b, _c, _d;
|
870
|
+
const preparedRequest = (_a17 = this.prepareSendMessagesRequest) == null ? void 0 : _a17.call(this, {
|
871
|
+
api: this.api,
|
872
|
+
id: options.chatId,
|
873
|
+
messages: options.messages,
|
874
|
+
body: { ...this.body, ...options.body },
|
875
|
+
headers: { ...this.headers, ...options.headers },
|
926
876
|
credentials: this.credentials,
|
927
|
-
requestMetadata: metadata
|
877
|
+
requestMetadata: options.metadata,
|
878
|
+
trigger: options.trigger,
|
879
|
+
messageId: options.messageId
|
880
|
+
});
|
881
|
+
const api = (_b = preparedRequest == null ? void 0 : preparedRequest.api) != null ? _b : this.api;
|
882
|
+
const headers = (preparedRequest == null ? void 0 : preparedRequest.headers) !== void 0 ? preparedRequest.headers : { ...this.headers, ...options.headers };
|
883
|
+
const body = (preparedRequest == null ? void 0 : preparedRequest.body) !== void 0 ? preparedRequest.body : {
|
884
|
+
...this.body,
|
885
|
+
...options.body,
|
886
|
+
id: options.chatId,
|
887
|
+
messages: options.messages,
|
888
|
+
trigger: options.trigger,
|
889
|
+
messageId: options.messageId
|
890
|
+
};
|
891
|
+
const credentials = (_c = preparedRequest == null ? void 0 : preparedRequest.credentials) != null ? _c : this.credentials;
|
892
|
+
const response = await fetch(api, {
|
893
|
+
method: "POST",
|
894
|
+
headers: {
|
895
|
+
"Content-Type": "application/json",
|
896
|
+
...headers
|
897
|
+
},
|
898
|
+
body: JSON.stringify(body),
|
899
|
+
credentials,
|
900
|
+
signal: abortSignal
|
928
901
|
});
|
929
|
-
|
902
|
+
if (!response.ok) {
|
903
|
+
throw new Error(
|
904
|
+
(_d = await response.text()) != null ? _d : "Failed to fetch the chat response."
|
905
|
+
);
|
906
|
+
}
|
907
|
+
if (!response.body) {
|
908
|
+
throw new Error("The response body is empty.");
|
909
|
+
}
|
910
|
+
return this.processResponseStream(response.body);
|
911
|
+
}
|
912
|
+
async reconnectToStream(options) {
|
913
|
+
var _a17, _b, _c, _d;
|
914
|
+
const preparedRequest = (_a17 = this.prepareReconnectToStreamRequest) == null ? void 0 : _a17.call(this, {
|
930
915
|
api: this.api,
|
931
|
-
|
932
|
-
|
933
|
-
|
934
|
-
|
935
|
-
|
936
|
-
requestType
|
916
|
+
id: options.chatId,
|
917
|
+
body: { ...this.body, ...options.body },
|
918
|
+
headers: { ...this.headers, ...options.headers },
|
919
|
+
credentials: this.credentials,
|
920
|
+
requestMetadata: options.metadata
|
937
921
|
});
|
922
|
+
const api = (_b = preparedRequest == null ? void 0 : preparedRequest.api) != null ? _b : `${this.api}/${options.chatId}/stream`;
|
923
|
+
const headers = (preparedRequest == null ? void 0 : preparedRequest.headers) !== void 0 ? preparedRequest.headers : { ...this.headers, ...options.headers };
|
924
|
+
const credentials = (_c = preparedRequest == null ? void 0 : preparedRequest.credentials) != null ? _c : this.credentials;
|
925
|
+
const response = await fetch(api, {
|
926
|
+
method: "GET",
|
927
|
+
headers,
|
928
|
+
credentials
|
929
|
+
});
|
930
|
+
if (response.status === 204) {
|
931
|
+
return null;
|
932
|
+
}
|
933
|
+
if (!response.ok) {
|
934
|
+
throw new Error(
|
935
|
+
(_d = await response.text()) != null ? _d : "Failed to fetch the chat response."
|
936
|
+
);
|
937
|
+
}
|
938
|
+
if (!response.body) {
|
939
|
+
throw new Error("The response body is empty.");
|
940
|
+
}
|
941
|
+
return this.processResponseStream(response.body);
|
942
|
+
}
|
943
|
+
};
|
944
|
+
|
945
|
+
// src/ui/default-chat-transport.ts
|
946
|
+
var DefaultChatTransport = class extends HttpChatTransport {
|
947
|
+
constructor(options = {}) {
|
948
|
+
super(options);
|
949
|
+
}
|
950
|
+
processResponseStream(stream) {
|
951
|
+
return (0, import_provider_utils2.parseJsonEventStream)({
|
952
|
+
stream,
|
953
|
+
schema: uiMessageStreamPartSchema
|
954
|
+
}).pipeThrough(
|
955
|
+
new TransformStream({
|
956
|
+
async transform(part, controller) {
|
957
|
+
if (!part.success) {
|
958
|
+
throw part.error;
|
959
|
+
}
|
960
|
+
controller.enqueue(part.value);
|
961
|
+
}
|
962
|
+
})
|
963
|
+
);
|
938
964
|
}
|
939
965
|
};
|
940
966
|
|
@@ -1636,22 +1662,14 @@ var AbstractChat = class {
|
|
1636
1662
|
}) {
|
1637
1663
|
this.activeResponse = void 0;
|
1638
1664
|
this.jobExecutor = new SerialJobExecutor();
|
1639
|
-
this.removeAssistantResponse = () => {
|
1640
|
-
const lastMessage = this.state.messages[this.state.messages.length - 1];
|
1641
|
-
if (lastMessage == null) {
|
1642
|
-
throw new Error("Cannot remove assistant response from empty chat");
|
1643
|
-
}
|
1644
|
-
if (lastMessage.role !== "assistant") {
|
1645
|
-
throw new Error("Last message is not an assistant message");
|
1646
|
-
}
|
1647
|
-
this.state.popMessage();
|
1648
|
-
};
|
1649
1665
|
/**
|
1650
|
-
*
|
1666
|
+
* Appends or replaces a user message to the chat list. This triggers the API call to fetch
|
1651
1667
|
* the assistant's response.
|
1668
|
+
*
|
1669
|
+
* If a messageId is provided, the message will be replaced.
|
1652
1670
|
*/
|
1653
1671
|
this.sendMessage = async (message, options = {}) => {
|
1654
|
-
var _a17, _b;
|
1672
|
+
var _a17, _b, _c;
|
1655
1673
|
let uiMessage;
|
1656
1674
|
if ("text" in message || "files" in message) {
|
1657
1675
|
const fileParts = Array.isArray(message.files) ? message.files : await convertFileListToFileUIParts(message.files);
|
@@ -1664,30 +1682,65 @@ var AbstractChat = class {
|
|
1664
1682
|
} else {
|
1665
1683
|
uiMessage = message;
|
1666
1684
|
}
|
1667
|
-
|
1668
|
-
|
1669
|
-
|
1670
|
-
|
1685
|
+
if (message.messageId != null) {
|
1686
|
+
const messageIndex = this.state.messages.findIndex(
|
1687
|
+
(m) => m.id === message.messageId
|
1688
|
+
);
|
1689
|
+
if (messageIndex === -1) {
|
1690
|
+
throw new Error(`message with id ${message.messageId} not found`);
|
1691
|
+
}
|
1692
|
+
if (this.state.messages[messageIndex].role !== "user") {
|
1693
|
+
throw new Error(
|
1694
|
+
`message with id ${message.messageId} is not a user message`
|
1695
|
+
);
|
1696
|
+
}
|
1697
|
+
this.state.messages = this.state.messages.slice(0, messageIndex + 1);
|
1698
|
+
this.state.replaceMessage(messageIndex, {
|
1699
|
+
...uiMessage,
|
1700
|
+
id: message.messageId,
|
1701
|
+
role: (_a17 = uiMessage.role) != null ? _a17 : "user"
|
1702
|
+
});
|
1703
|
+
} else {
|
1704
|
+
this.state.pushMessage({
|
1705
|
+
...uiMessage,
|
1706
|
+
id: (_b = uiMessage.id) != null ? _b : this.generateId(),
|
1707
|
+
role: (_c = uiMessage.role) != null ? _c : "user"
|
1708
|
+
});
|
1709
|
+
}
|
1710
|
+
await this.makeRequest({
|
1711
|
+
trigger: "submit-user-message",
|
1712
|
+
messageId: message.messageId,
|
1713
|
+
...options
|
1671
1714
|
});
|
1672
|
-
await this.triggerRequest({ requestType: "generate", ...options });
|
1673
1715
|
};
|
1674
1716
|
/**
|
1675
|
-
* Regenerate the
|
1717
|
+
* Regenerate the assistant message with the provided message id.
|
1718
|
+
* If no message id is provided, the last assistant message will be regenerated.
|
1676
1719
|
*/
|
1677
|
-
this.
|
1678
|
-
|
1679
|
-
|
1680
|
-
|
1681
|
-
|
1682
|
-
|
1720
|
+
this.regenerate = async ({
|
1721
|
+
messageId,
|
1722
|
+
...options
|
1723
|
+
} = {}) => {
|
1724
|
+
const messageIndex = messageId == null ? this.state.messages.length - 1 : this.state.messages.findIndex((message) => message.id === messageId);
|
1725
|
+
if (messageIndex === -1) {
|
1726
|
+
throw new Error(`message ${messageId} not found`);
|
1683
1727
|
}
|
1684
|
-
|
1728
|
+
this.state.messages = this.state.messages.slice(
|
1729
|
+
0,
|
1730
|
+
// if the message is a user message, we need to include it in the request:
|
1731
|
+
this.messages[messageIndex].role === "assistant" ? messageIndex : messageIndex + 1
|
1732
|
+
);
|
1733
|
+
await this.makeRequest({
|
1734
|
+
trigger: "regenerate-assistant-message",
|
1735
|
+
messageId,
|
1736
|
+
...options
|
1737
|
+
});
|
1685
1738
|
};
|
1686
1739
|
/**
|
1687
|
-
*
|
1740
|
+
* Attempt to resume an ongoing streaming response.
|
1688
1741
|
*/
|
1689
|
-
this.
|
1690
|
-
await this.
|
1742
|
+
this.resumeStream = async (options = {}) => {
|
1743
|
+
await this.makeRequest({ trigger: "resume-stream", ...options });
|
1691
1744
|
};
|
1692
1745
|
this.addToolResult = async ({
|
1693
1746
|
toolCallId,
|
@@ -1705,8 +1758,8 @@ var AbstractChat = class {
|
|
1705
1758
|
}
|
1706
1759
|
const lastMessage = this.lastMessage;
|
1707
1760
|
if (isAssistantMessageWithCompletedToolCalls(lastMessage)) {
|
1708
|
-
this.
|
1709
|
-
|
1761
|
+
this.makeRequest({
|
1762
|
+
trigger: "submit-tool-result"
|
1710
1763
|
});
|
1711
1764
|
}
|
1712
1765
|
});
|
@@ -1765,11 +1818,12 @@ var AbstractChat = class {
|
|
1765
1818
|
set messages(messages) {
|
1766
1819
|
this.state.messages = messages;
|
1767
1820
|
}
|
1768
|
-
async
|
1769
|
-
|
1821
|
+
async makeRequest({
|
1822
|
+
trigger,
|
1770
1823
|
metadata,
|
1771
1824
|
headers,
|
1772
|
-
body
|
1825
|
+
body,
|
1826
|
+
messageId
|
1773
1827
|
}) {
|
1774
1828
|
var _a17, _b;
|
1775
1829
|
this.setStatus({ status: "submitted", error: void 0 });
|
@@ -1785,15 +1839,30 @@ var AbstractChat = class {
|
|
1785
1839
|
abortController: new AbortController()
|
1786
1840
|
};
|
1787
1841
|
this.activeResponse = activeResponse;
|
1788
|
-
|
1789
|
-
|
1790
|
-
|
1791
|
-
|
1792
|
-
|
1793
|
-
|
1794
|
-
|
1795
|
-
|
1796
|
-
|
1842
|
+
let stream;
|
1843
|
+
if (trigger === "resume-stream") {
|
1844
|
+
const reconnect = await this.transport.reconnectToStream({
|
1845
|
+
chatId: this.id,
|
1846
|
+
metadata,
|
1847
|
+
headers,
|
1848
|
+
body
|
1849
|
+
});
|
1850
|
+
if (reconnect == null) {
|
1851
|
+
return;
|
1852
|
+
}
|
1853
|
+
stream = reconnect;
|
1854
|
+
} else {
|
1855
|
+
stream = await this.transport.sendMessages({
|
1856
|
+
chatId: this.id,
|
1857
|
+
messages: this.state.messages,
|
1858
|
+
abortSignal: activeResponse.abortController.signal,
|
1859
|
+
metadata,
|
1860
|
+
headers,
|
1861
|
+
body,
|
1862
|
+
trigger,
|
1863
|
+
messageId
|
1864
|
+
});
|
1865
|
+
}
|
1797
1866
|
const runUpdateMessageJob = (job) => (
|
1798
1867
|
// serialize the job execution to avoid race conditions:
|
1799
1868
|
this.jobExecutor.run(
|
@@ -1848,11 +1917,12 @@ var AbstractChat = class {
|
|
1848
1917
|
maxSteps: this.maxSteps,
|
1849
1918
|
messages: this.state.messages
|
1850
1919
|
})) {
|
1851
|
-
await this.
|
1852
|
-
requestType,
|
1920
|
+
await this.makeRequest({
|
1853
1921
|
metadata,
|
1854
1922
|
headers,
|
1855
|
-
body
|
1923
|
+
body,
|
1924
|
+
// secondary requests are triggered by automatic tool execution
|
1925
|
+
trigger: "submit-tool-result"
|
1856
1926
|
});
|
1857
1927
|
}
|
1858
1928
|
}
|
@@ -2029,89 +2099,13 @@ function transformTextToUiMessageStream({
|
|
2029
2099
|
}
|
2030
2100
|
|
2031
2101
|
// src/ui/text-stream-chat-transport.ts
|
2032
|
-
var
|
2033
|
-
|
2034
|
-
|
2035
|
-
body,
|
2036
|
-
credentials,
|
2037
|
-
headers,
|
2038
|
-
abortSignal,
|
2039
|
-
fetch: fetch2 = getOriginalFetch3(),
|
2040
|
-
requestType = "generate"
|
2041
|
-
}) {
|
2042
|
-
var _a17;
|
2043
|
-
const response = requestType === "resume" ? await fetch2(`${api}?chatId=${body.chatId}`, {
|
2044
|
-
method: "GET",
|
2045
|
-
headers: {
|
2046
|
-
"Content-Type": "application/json",
|
2047
|
-
...headers
|
2048
|
-
},
|
2049
|
-
signal: abortSignal,
|
2050
|
-
credentials
|
2051
|
-
}) : await fetch2(api, {
|
2052
|
-
method: "POST",
|
2053
|
-
body: JSON.stringify(body),
|
2054
|
-
headers: {
|
2055
|
-
"Content-Type": "application/json",
|
2056
|
-
...headers
|
2057
|
-
},
|
2058
|
-
signal: abortSignal,
|
2059
|
-
credentials
|
2060
|
-
});
|
2061
|
-
if (!response.ok) {
|
2062
|
-
throw new Error(
|
2063
|
-
(_a17 = await response.text()) != null ? _a17 : "Failed to fetch the chat response."
|
2064
|
-
);
|
2065
|
-
}
|
2066
|
-
if (!response.body) {
|
2067
|
-
throw new Error("The response body is empty.");
|
2068
|
-
}
|
2069
|
-
return transformTextToUiMessageStream({
|
2070
|
-
stream: response.body.pipeThrough(new TextDecoderStream())
|
2071
|
-
});
|
2072
|
-
}
|
2073
|
-
var TextStreamChatTransport = class {
|
2074
|
-
constructor({
|
2075
|
-
api,
|
2076
|
-
credentials,
|
2077
|
-
headers,
|
2078
|
-
body,
|
2079
|
-
fetch: fetch2,
|
2080
|
-
prepareRequest
|
2081
|
-
}) {
|
2082
|
-
this.api = api;
|
2083
|
-
this.credentials = credentials;
|
2084
|
-
this.headers = headers;
|
2085
|
-
this.body = body;
|
2086
|
-
this.fetch = fetch2;
|
2087
|
-
this.prepareRequest = prepareRequest;
|
2102
|
+
var TextStreamChatTransport = class extends HttpChatTransport {
|
2103
|
+
constructor(options = {}) {
|
2104
|
+
super(options);
|
2088
2105
|
}
|
2089
|
-
|
2090
|
-
|
2091
|
-
|
2092
|
-
abortSignal,
|
2093
|
-
metadata,
|
2094
|
-
headers,
|
2095
|
-
body,
|
2096
|
-
requestType
|
2097
|
-
}) {
|
2098
|
-
var _a17, _b;
|
2099
|
-
const preparedRequest = (_a17 = this.prepareRequest) == null ? void 0 : _a17.call(this, {
|
2100
|
-
id: chatId,
|
2101
|
-
messages,
|
2102
|
-
body: { ...this.body, ...body },
|
2103
|
-
headers: { ...this.headers, ...headers },
|
2104
|
-
credentials: this.credentials,
|
2105
|
-
requestMetadata: metadata
|
2106
|
-
});
|
2107
|
-
return fetchTextStream({
|
2108
|
-
api: this.api,
|
2109
|
-
body: (preparedRequest == null ? void 0 : preparedRequest.body) !== void 0 ? preparedRequest.body : { ...this.body, ...body },
|
2110
|
-
headers: (preparedRequest == null ? void 0 : preparedRequest.headers) !== void 0 ? preparedRequest.headers : { ...this.headers, ...headers },
|
2111
|
-
credentials: (_b = preparedRequest == null ? void 0 : preparedRequest.credentials) != null ? _b : this.credentials,
|
2112
|
-
abortSignal,
|
2113
|
-
fetch: this.fetch,
|
2114
|
-
requestType
|
2106
|
+
processResponseStream(stream) {
|
2107
|
+
return transformTextToUiMessageStream({
|
2108
|
+
stream: stream.pipeThrough(new TextDecoderStream())
|
2115
2109
|
});
|
2116
2110
|
}
|
2117
2111
|
};
|
@@ -2276,7 +2270,7 @@ var JsonToSseTransformStream = class extends TransformStream {
|
|
2276
2270
|
};
|
2277
2271
|
|
2278
2272
|
// src/ui-message-stream/ui-message-stream-headers.ts
|
2279
|
-
var
|
2273
|
+
var UI_MESSAGE_STREAM_HEADERS = {
|
2280
2274
|
"content-type": "text/event-stream",
|
2281
2275
|
"cache-control": "no-cache",
|
2282
2276
|
connection: "keep-alive",
|
@@ -2290,16 +2284,20 @@ function createUIMessageStreamResponse({
|
|
2290
2284
|
status,
|
2291
2285
|
statusText,
|
2292
2286
|
headers,
|
2293
|
-
stream
|
2287
|
+
stream,
|
2288
|
+
consumeSseStream
|
2294
2289
|
}) {
|
2295
|
-
|
2296
|
-
|
2297
|
-
|
2298
|
-
|
2299
|
-
|
2300
|
-
|
2301
|
-
|
2302
|
-
|
2290
|
+
let sseStream = stream.pipeThrough(new JsonToSseTransformStream());
|
2291
|
+
if (consumeSseStream) {
|
2292
|
+
const [stream1, stream2] = sseStream.tee();
|
2293
|
+
sseStream = stream1;
|
2294
|
+
consumeSseStream({ stream: stream2 });
|
2295
|
+
}
|
2296
|
+
return new Response(sseStream.pipeThrough(new TextEncoderStream()), {
|
2297
|
+
status,
|
2298
|
+
statusText,
|
2299
|
+
headers: prepareHeaders(headers, UI_MESSAGE_STREAM_HEADERS)
|
2300
|
+
});
|
2303
2301
|
}
|
2304
2302
|
|
2305
2303
|
// src/ui-message-stream/pipe-ui-message-stream-to-response.ts
|
@@ -2308,16 +2306,23 @@ function pipeUIMessageStreamToResponse({
|
|
2308
2306
|
status,
|
2309
2307
|
statusText,
|
2310
2308
|
headers,
|
2311
|
-
stream
|
2309
|
+
stream,
|
2310
|
+
consumeSseStream
|
2312
2311
|
}) {
|
2312
|
+
let sseStream = stream.pipeThrough(new JsonToSseTransformStream());
|
2313
|
+
if (consumeSseStream) {
|
2314
|
+
const [stream1, stream2] = sseStream.tee();
|
2315
|
+
sseStream = stream1;
|
2316
|
+
consumeSseStream({ stream: stream2 });
|
2317
|
+
}
|
2313
2318
|
writeToServerResponse({
|
2314
2319
|
response,
|
2315
2320
|
status,
|
2316
2321
|
statusText,
|
2317
2322
|
headers: Object.fromEntries(
|
2318
|
-
prepareHeaders(headers,
|
2323
|
+
prepareHeaders(headers, UI_MESSAGE_STREAM_HEADERS).entries()
|
2319
2324
|
),
|
2320
|
-
stream:
|
2325
|
+
stream: sseStream.pipeThrough(new TextEncoderStream())
|
2321
2326
|
});
|
2322
2327
|
}
|
2323
2328
|
|
@@ -3725,14 +3730,11 @@ function prepareCallSettings({
|
|
3725
3730
|
|
3726
3731
|
// core/prompt/resolve-language-model.ts
|
3727
3732
|
var import_gateway = require("@ai-sdk/gateway");
|
3728
|
-
var GLOBAL_DEFAULT_PROVIDER = Symbol(
|
3729
|
-
"vercel.ai.global.defaultProvider"
|
3730
|
-
);
|
3731
3733
|
function resolveLanguageModel(model) {
|
3732
3734
|
if (typeof model !== "string") {
|
3733
3735
|
return model;
|
3734
3736
|
}
|
3735
|
-
const globalProvider = globalThis
|
3737
|
+
const globalProvider = globalThis.AI_SDK_DEFAULT_PROVIDER;
|
3736
3738
|
return (globalProvider != null ? globalProvider : import_gateway.gateway).languageModel(model);
|
3737
3739
|
}
|
3738
3740
|
|
@@ -8118,7 +8120,7 @@ var SseMCPTransport = class {
|
|
8118
8120
|
(_b = this.onerror) == null ? void 0 : _b.call(this, error);
|
8119
8121
|
return reject(error);
|
8120
8122
|
}
|
8121
|
-
const stream = response.body.pipeThrough(new TextDecoderStream()).pipeThrough(
|
8123
|
+
const stream = response.body.pipeThrough(new TextDecoderStream()).pipeThrough(new import_provider_utils24.EventSourceParserStream());
|
8122
8124
|
const reader = stream.getReader();
|
8123
8125
|
const processEvents = async () => {
|
8124
8126
|
var _a18, _b2, _c2;
|
@@ -8575,7 +8577,6 @@ var DefaultTranscriptionResult = class {
|
|
8575
8577
|
DefaultChatTransport,
|
8576
8578
|
DownloadError,
|
8577
8579
|
EmptyResponseBodyError,
|
8578
|
-
GLOBAL_DEFAULT_PROVIDER,
|
8579
8580
|
InvalidArgumentError,
|
8580
8581
|
InvalidDataContentError,
|
8581
8582
|
InvalidMessageRoleError,
|
@@ -8602,6 +8603,7 @@ var DefaultTranscriptionResult = class {
|
|
8602
8603
|
ToolCallRepairError,
|
8603
8604
|
ToolExecutionError,
|
8604
8605
|
TypeValidationError,
|
8606
|
+
UI_MESSAGE_STREAM_HEADERS,
|
8605
8607
|
UnsupportedFunctionalityError,
|
8606
8608
|
asSchema,
|
8607
8609
|
assistantModelMessageSchema,
|