ai 5.0.0-canary.21 → 5.0.0-canary.22
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +9 -0
- package/README.md +1 -1
- package/dist/index.d.mts +574 -593
- package/dist/index.d.ts +574 -593
- package/dist/index.js +190 -202
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +187 -199
- package/dist/index.mjs.map +1 -1
- package/package.json +1 -1
package/dist/index.js
CHANGED
@@ -63,11 +63,11 @@ __export(src_exports, {
|
|
63
63
|
coreToolMessageSchema: () => coreToolMessageSchema,
|
64
64
|
coreUserMessageSchema: () => coreUserMessageSchema,
|
65
65
|
cosineSimilarity: () => cosineSimilarity,
|
66
|
-
createDataStream: () => createDataStream,
|
67
|
-
createDataStreamResponse: () => createDataStreamResponse,
|
68
66
|
createIdGenerator: () => import_provider_utils25.createIdGenerator,
|
69
67
|
createProviderRegistry: () => createProviderRegistry,
|
70
68
|
createTextStreamResponse: () => createTextStreamResponse,
|
69
|
+
createUIMessageStream: () => createUIMessageStream,
|
70
|
+
createUIMessageStreamResponse: () => createUIMessageStreamResponse,
|
71
71
|
customProvider: () => customProvider,
|
72
72
|
defaultSettingsMiddleware: () => defaultSettingsMiddleware,
|
73
73
|
embed: () => embed,
|
@@ -90,8 +90,8 @@ __export(src_exports, {
|
|
90
90
|
jsonSchema: () => import_provider_utils25.jsonSchema,
|
91
91
|
modelMessageSchema: () => modelMessageSchema,
|
92
92
|
parsePartialJson: () => parsePartialJson,
|
93
|
-
pipeDataStreamToResponse: () => pipeDataStreamToResponse,
|
94
93
|
pipeTextStreamToResponse: () => pipeTextStreamToResponse,
|
94
|
+
pipeUIMessageStreamToResponse: () => pipeUIMessageStreamToResponse,
|
95
95
|
processTextStream: () => processTextStream,
|
96
96
|
shouldResubmitMessages: () => shouldResubmitMessages,
|
97
97
|
simulateReadableStream: () => simulateReadableStream,
|
@@ -109,172 +109,6 @@ __export(src_exports, {
|
|
109
109
|
module.exports = __toCommonJS(src_exports);
|
110
110
|
var import_provider_utils25 = require("@ai-sdk/provider-utils");
|
111
111
|
|
112
|
-
// src/data-stream/create-data-stream.ts
|
113
|
-
function createDataStream({
|
114
|
-
execute,
|
115
|
-
onError = () => "An error occurred."
|
116
|
-
// mask error messages for safety by default
|
117
|
-
}) {
|
118
|
-
let controller;
|
119
|
-
const ongoingStreamPromises = [];
|
120
|
-
const stream = new ReadableStream({
|
121
|
-
start(controllerArg) {
|
122
|
-
controller = controllerArg;
|
123
|
-
}
|
124
|
-
});
|
125
|
-
function safeEnqueue(data) {
|
126
|
-
try {
|
127
|
-
controller.enqueue(data);
|
128
|
-
} catch (error) {
|
129
|
-
}
|
130
|
-
}
|
131
|
-
try {
|
132
|
-
const result = execute({
|
133
|
-
write(part) {
|
134
|
-
safeEnqueue(part);
|
135
|
-
},
|
136
|
-
merge(streamArg) {
|
137
|
-
ongoingStreamPromises.push(
|
138
|
-
(async () => {
|
139
|
-
const reader = streamArg.getReader();
|
140
|
-
while (true) {
|
141
|
-
const { done, value } = await reader.read();
|
142
|
-
if (done)
|
143
|
-
break;
|
144
|
-
safeEnqueue(value);
|
145
|
-
}
|
146
|
-
})().catch((error) => {
|
147
|
-
safeEnqueue({ type: "error", value: onError(error) });
|
148
|
-
})
|
149
|
-
);
|
150
|
-
},
|
151
|
-
onError
|
152
|
-
});
|
153
|
-
if (result) {
|
154
|
-
ongoingStreamPromises.push(
|
155
|
-
result.catch((error) => {
|
156
|
-
safeEnqueue({ type: "error", value: onError(error) });
|
157
|
-
})
|
158
|
-
);
|
159
|
-
}
|
160
|
-
} catch (error) {
|
161
|
-
safeEnqueue({ type: "error", value: onError(error) });
|
162
|
-
}
|
163
|
-
const waitForStreams = new Promise(async (resolve) => {
|
164
|
-
while (ongoingStreamPromises.length > 0) {
|
165
|
-
await ongoingStreamPromises.shift();
|
166
|
-
}
|
167
|
-
resolve();
|
168
|
-
});
|
169
|
-
waitForStreams.finally(() => {
|
170
|
-
try {
|
171
|
-
controller.close();
|
172
|
-
} catch (error) {
|
173
|
-
}
|
174
|
-
});
|
175
|
-
return stream;
|
176
|
-
}
|
177
|
-
|
178
|
-
// src/util/prepare-headers.ts
|
179
|
-
function prepareHeaders(headers, defaultHeaders) {
|
180
|
-
const responseHeaders = new Headers(headers != null ? headers : {});
|
181
|
-
for (const [key, value] of Object.entries(defaultHeaders)) {
|
182
|
-
if (!responseHeaders.has(key)) {
|
183
|
-
responseHeaders.set(key, value);
|
184
|
-
}
|
185
|
-
}
|
186
|
-
return responseHeaders;
|
187
|
-
}
|
188
|
-
|
189
|
-
// src/data-stream/data-stream-headers.ts
|
190
|
-
var dataStreamHeaders = {
|
191
|
-
"content-type": "text/event-stream",
|
192
|
-
"cache-control": "no-cache",
|
193
|
-
connection: "keep-alive",
|
194
|
-
"x-vercel-ai-data-stream": "v2",
|
195
|
-
"x-accel-buffering": "no"
|
196
|
-
// disable nginx buffering
|
197
|
-
};
|
198
|
-
|
199
|
-
// src/data-stream/json-to-sse-transform-stream.ts
|
200
|
-
var JsonToSseTransformStream = class extends TransformStream {
|
201
|
-
constructor() {
|
202
|
-
super({
|
203
|
-
transform(part, controller) {
|
204
|
-
controller.enqueue(`data: ${JSON.stringify(part)}
|
205
|
-
|
206
|
-
`);
|
207
|
-
},
|
208
|
-
flush(controller) {
|
209
|
-
controller.enqueue("data: [DONE]\n\n");
|
210
|
-
}
|
211
|
-
});
|
212
|
-
}
|
213
|
-
};
|
214
|
-
|
215
|
-
// src/data-stream/create-data-stream-response.ts
|
216
|
-
function createDataStreamResponse({
|
217
|
-
status,
|
218
|
-
statusText,
|
219
|
-
headers,
|
220
|
-
dataStream
|
221
|
-
}) {
|
222
|
-
return new Response(
|
223
|
-
dataStream.pipeThrough(new JsonToSseTransformStream()).pipeThrough(new TextEncoderStream()),
|
224
|
-
{
|
225
|
-
status,
|
226
|
-
statusText,
|
227
|
-
headers: prepareHeaders(headers, dataStreamHeaders)
|
228
|
-
}
|
229
|
-
);
|
230
|
-
}
|
231
|
-
|
232
|
-
// src/util/write-to-server-response.ts
|
233
|
-
function writeToServerResponse({
|
234
|
-
response,
|
235
|
-
status,
|
236
|
-
statusText,
|
237
|
-
headers,
|
238
|
-
stream
|
239
|
-
}) {
|
240
|
-
response.writeHead(status != null ? status : 200, statusText, headers);
|
241
|
-
const reader = stream.getReader();
|
242
|
-
const read = async () => {
|
243
|
-
try {
|
244
|
-
while (true) {
|
245
|
-
const { done, value } = await reader.read();
|
246
|
-
if (done)
|
247
|
-
break;
|
248
|
-
response.write(value);
|
249
|
-
}
|
250
|
-
} catch (error) {
|
251
|
-
throw error;
|
252
|
-
} finally {
|
253
|
-
response.end();
|
254
|
-
}
|
255
|
-
};
|
256
|
-
read();
|
257
|
-
}
|
258
|
-
|
259
|
-
// src/data-stream/pipe-data-stream-to-response.ts
|
260
|
-
function pipeDataStreamToResponse({
|
261
|
-
response,
|
262
|
-
status,
|
263
|
-
statusText,
|
264
|
-
headers,
|
265
|
-
dataStream
|
266
|
-
}) {
|
267
|
-
writeToServerResponse({
|
268
|
-
response,
|
269
|
-
status,
|
270
|
-
statusText,
|
271
|
-
headers: Object.fromEntries(
|
272
|
-
prepareHeaders(headers, dataStreamHeaders).entries()
|
273
|
-
),
|
274
|
-
stream: dataStream.pipeThrough(new JsonToSseTransformStream()).pipeThrough(new TextEncoderStream())
|
275
|
-
});
|
276
|
-
}
|
277
|
-
|
278
112
|
// src/error/index.ts
|
279
113
|
var import_provider16 = require("@ai-sdk/provider");
|
280
114
|
|
@@ -625,6 +459,17 @@ var RetryError = class extends import_provider15.AISDKError {
|
|
625
459
|
};
|
626
460
|
_a15 = symbol15;
|
627
461
|
|
462
|
+
// src/util/prepare-headers.ts
|
463
|
+
function prepareHeaders(headers, defaultHeaders) {
|
464
|
+
const responseHeaders = new Headers(headers != null ? headers : {});
|
465
|
+
for (const [key, value] of Object.entries(defaultHeaders)) {
|
466
|
+
if (!responseHeaders.has(key)) {
|
467
|
+
responseHeaders.set(key, value);
|
468
|
+
}
|
469
|
+
}
|
470
|
+
return responseHeaders;
|
471
|
+
}
|
472
|
+
|
628
473
|
// src/text-stream/create-text-stream-response.ts
|
629
474
|
function createTextStreamResponse({
|
630
475
|
status,
|
@@ -641,6 +486,33 @@ function createTextStreamResponse({
|
|
641
486
|
});
|
642
487
|
}
|
643
488
|
|
489
|
+
// src/util/write-to-server-response.ts
|
490
|
+
function writeToServerResponse({
|
491
|
+
response,
|
492
|
+
status,
|
493
|
+
statusText,
|
494
|
+
headers,
|
495
|
+
stream
|
496
|
+
}) {
|
497
|
+
response.writeHead(status != null ? status : 200, statusText, headers);
|
498
|
+
const reader = stream.getReader();
|
499
|
+
const read = async () => {
|
500
|
+
try {
|
501
|
+
while (true) {
|
502
|
+
const { done, value } = await reader.read();
|
503
|
+
if (done)
|
504
|
+
break;
|
505
|
+
response.write(value);
|
506
|
+
}
|
507
|
+
} catch (error) {
|
508
|
+
throw error;
|
509
|
+
} finally {
|
510
|
+
response.end();
|
511
|
+
}
|
512
|
+
};
|
513
|
+
read();
|
514
|
+
}
|
515
|
+
|
644
516
|
// src/text-stream/pipe-text-stream-to-response.ts
|
645
517
|
function pipeTextStreamToResponse({
|
646
518
|
response,
|
@@ -676,7 +548,7 @@ function appendClientMessage({
|
|
676
548
|
// src/ui/call-chat-api.ts
|
677
549
|
var import_provider_utils4 = require("@ai-sdk/provider-utils");
|
678
550
|
|
679
|
-
// src/
|
551
|
+
// src/ui-message-stream/ui-message-stream-parts.ts
|
680
552
|
var import_zod = require("zod");
|
681
553
|
var toolCallSchema = import_zod.z.object({
|
682
554
|
toolCallId: import_zod.z.string(),
|
@@ -697,7 +569,7 @@ var sourceSchema = import_zod.z.object({
|
|
697
569
|
providerMetadata: import_zod.z.any().optional()
|
698
570
|
// Use z.any() for generic metadata
|
699
571
|
});
|
700
|
-
var
|
572
|
+
var uiMessageStreamPartSchema = import_zod.z.discriminatedUnion("type", [
|
701
573
|
import_zod.z.object({
|
702
574
|
type: import_zod.z.literal("text"),
|
703
575
|
value: import_zod.z.string()
|
@@ -796,7 +668,7 @@ async function consumeStream({
|
|
796
668
|
}
|
797
669
|
}
|
798
670
|
|
799
|
-
// src/ui/process-
|
671
|
+
// src/ui/process-ui-message-stream.ts
|
800
672
|
var import_provider_utils2 = require("@ai-sdk/provider-utils");
|
801
673
|
|
802
674
|
// src/util/merge-objects.ts
|
@@ -1184,8 +1056,8 @@ function getToolInvocations(message) {
|
|
1184
1056
|
).map((part) => part.toolInvocation);
|
1185
1057
|
}
|
1186
1058
|
|
1187
|
-
// src/ui/process-
|
1188
|
-
function
|
1059
|
+
// src/ui/process-ui-message-stream.ts
|
1060
|
+
function processUIMessageStream({
|
1189
1061
|
stream,
|
1190
1062
|
onUpdate,
|
1191
1063
|
onToolCall,
|
@@ -1470,11 +1342,10 @@ var getOriginalFetch = () => fetch;
|
|
1470
1342
|
async function callChatApi({
|
1471
1343
|
api,
|
1472
1344
|
body,
|
1473
|
-
streamProtocol = "
|
1345
|
+
streamProtocol = "ui-message",
|
1474
1346
|
credentials,
|
1475
1347
|
headers,
|
1476
1348
|
abortController,
|
1477
|
-
onResponse,
|
1478
1349
|
onUpdate,
|
1479
1350
|
onFinish,
|
1480
1351
|
onToolCall,
|
@@ -1503,9 +1374,6 @@ async function callChatApi({
|
|
1503
1374
|
signal: (_b = abortController == null ? void 0 : abortController()) == null ? void 0 : _b.signal,
|
1504
1375
|
credentials
|
1505
1376
|
});
|
1506
|
-
if (onResponse != null) {
|
1507
|
-
await onResponse(response);
|
1508
|
-
}
|
1509
1377
|
if (!response.ok) {
|
1510
1378
|
throw new Error(
|
1511
1379
|
(_c = await response.text()) != null ? _c : "Failed to fetch the chat response."
|
@@ -1524,12 +1392,12 @@ async function callChatApi({
|
|
1524
1392
|
});
|
1525
1393
|
return;
|
1526
1394
|
}
|
1527
|
-
case "
|
1395
|
+
case "ui-message": {
|
1528
1396
|
await consumeStream({
|
1529
|
-
stream:
|
1397
|
+
stream: processUIMessageStream({
|
1530
1398
|
stream: (0, import_provider_utils4.parseJsonEventStream)({
|
1531
1399
|
stream: response.body,
|
1532
|
-
schema:
|
1400
|
+
schema: uiMessageStreamPartSchema
|
1533
1401
|
}).pipeThrough(
|
1534
1402
|
new TransformStream({
|
1535
1403
|
async transform(part, controller) {
|
@@ -1587,7 +1455,6 @@ async function callCompletionApi({
|
|
1587
1455
|
setLoading,
|
1588
1456
|
setError,
|
1589
1457
|
setAbortController,
|
1590
|
-
onResponse,
|
1591
1458
|
onFinish,
|
1592
1459
|
onError,
|
1593
1460
|
fetch: fetch2 = getOriginalFetch2()
|
@@ -1614,13 +1481,6 @@ async function callCompletionApi({
|
|
1614
1481
|
}).catch((err) => {
|
1615
1482
|
throw err;
|
1616
1483
|
});
|
1617
|
-
if (onResponse) {
|
1618
|
-
try {
|
1619
|
-
await onResponse(response);
|
1620
|
-
} catch (err) {
|
1621
|
-
throw err;
|
1622
|
-
}
|
1623
|
-
}
|
1624
1484
|
if (!response.ok) {
|
1625
1485
|
throw new Error(
|
1626
1486
|
(_a17 = await response.text()) != null ? _a17 : "Failed to fetch the chat response."
|
@@ -1645,7 +1505,7 @@ async function callCompletionApi({
|
|
1645
1505
|
await consumeStream({
|
1646
1506
|
stream: (0, import_provider_utils5.parseJsonEventStream)({
|
1647
1507
|
stream: response.body,
|
1648
|
-
schema:
|
1508
|
+
schema: uiMessageStreamPartSchema
|
1649
1509
|
}).pipeThrough(
|
1650
1510
|
new TransformStream({
|
1651
1511
|
async transform(part) {
|
@@ -1929,6 +1789,134 @@ function updateToolCallResult({
|
|
1929
1789
|
};
|
1930
1790
|
}
|
1931
1791
|
|
1792
|
+
// src/ui-message-stream/create-ui-message-stream.ts
|
1793
|
+
function createUIMessageStream({
|
1794
|
+
execute,
|
1795
|
+
onError = () => "An error occurred."
|
1796
|
+
// mask error messages for safety by default
|
1797
|
+
}) {
|
1798
|
+
let controller;
|
1799
|
+
const ongoingStreamPromises = [];
|
1800
|
+
const stream = new ReadableStream({
|
1801
|
+
start(controllerArg) {
|
1802
|
+
controller = controllerArg;
|
1803
|
+
}
|
1804
|
+
});
|
1805
|
+
function safeEnqueue(data) {
|
1806
|
+
try {
|
1807
|
+
controller.enqueue(data);
|
1808
|
+
} catch (error) {
|
1809
|
+
}
|
1810
|
+
}
|
1811
|
+
try {
|
1812
|
+
const result = execute({
|
1813
|
+
write(part) {
|
1814
|
+
safeEnqueue(part);
|
1815
|
+
},
|
1816
|
+
merge(streamArg) {
|
1817
|
+
ongoingStreamPromises.push(
|
1818
|
+
(async () => {
|
1819
|
+
const reader = streamArg.getReader();
|
1820
|
+
while (true) {
|
1821
|
+
const { done, value } = await reader.read();
|
1822
|
+
if (done)
|
1823
|
+
break;
|
1824
|
+
safeEnqueue(value);
|
1825
|
+
}
|
1826
|
+
})().catch((error) => {
|
1827
|
+
safeEnqueue({ type: "error", value: onError(error) });
|
1828
|
+
})
|
1829
|
+
);
|
1830
|
+
},
|
1831
|
+
onError
|
1832
|
+
});
|
1833
|
+
if (result) {
|
1834
|
+
ongoingStreamPromises.push(
|
1835
|
+
result.catch((error) => {
|
1836
|
+
safeEnqueue({ type: "error", value: onError(error) });
|
1837
|
+
})
|
1838
|
+
);
|
1839
|
+
}
|
1840
|
+
} catch (error) {
|
1841
|
+
safeEnqueue({ type: "error", value: onError(error) });
|
1842
|
+
}
|
1843
|
+
const waitForStreams = new Promise(async (resolve) => {
|
1844
|
+
while (ongoingStreamPromises.length > 0) {
|
1845
|
+
await ongoingStreamPromises.shift();
|
1846
|
+
}
|
1847
|
+
resolve();
|
1848
|
+
});
|
1849
|
+
waitForStreams.finally(() => {
|
1850
|
+
try {
|
1851
|
+
controller.close();
|
1852
|
+
} catch (error) {
|
1853
|
+
}
|
1854
|
+
});
|
1855
|
+
return stream;
|
1856
|
+
}
|
1857
|
+
|
1858
|
+
// src/ui-message-stream/ui-message-stream-headers.ts
|
1859
|
+
var uiMessageStreamHeaders = {
|
1860
|
+
"content-type": "text/event-stream",
|
1861
|
+
"cache-control": "no-cache",
|
1862
|
+
connection: "keep-alive",
|
1863
|
+
"x-vercel-ai-ui-message-stream": "v1",
|
1864
|
+
"x-accel-buffering": "no"
|
1865
|
+
// disable nginx buffering
|
1866
|
+
};
|
1867
|
+
|
1868
|
+
// src/ui-message-stream/json-to-sse-transform-stream.ts
|
1869
|
+
var JsonToSseTransformStream = class extends TransformStream {
|
1870
|
+
constructor() {
|
1871
|
+
super({
|
1872
|
+
transform(part, controller) {
|
1873
|
+
controller.enqueue(`data: ${JSON.stringify(part)}
|
1874
|
+
|
1875
|
+
`);
|
1876
|
+
},
|
1877
|
+
flush(controller) {
|
1878
|
+
controller.enqueue("data: [DONE]\n\n");
|
1879
|
+
}
|
1880
|
+
});
|
1881
|
+
}
|
1882
|
+
};
|
1883
|
+
|
1884
|
+
// src/ui-message-stream/create-ui-message-stream-response.ts
|
1885
|
+
function createUIMessageStreamResponse({
|
1886
|
+
status,
|
1887
|
+
statusText,
|
1888
|
+
headers,
|
1889
|
+
stream
|
1890
|
+
}) {
|
1891
|
+
return new Response(
|
1892
|
+
stream.pipeThrough(new JsonToSseTransformStream()).pipeThrough(new TextEncoderStream()),
|
1893
|
+
{
|
1894
|
+
status,
|
1895
|
+
statusText,
|
1896
|
+
headers: prepareHeaders(headers, uiMessageStreamHeaders)
|
1897
|
+
}
|
1898
|
+
);
|
1899
|
+
}
|
1900
|
+
|
1901
|
+
// src/ui-message-stream/pipe-ui-message-stream-to-response.ts
|
1902
|
+
function pipeUIMessageStreamToResponse({
|
1903
|
+
response,
|
1904
|
+
status,
|
1905
|
+
statusText,
|
1906
|
+
headers,
|
1907
|
+
stream
|
1908
|
+
}) {
|
1909
|
+
writeToServerResponse({
|
1910
|
+
response,
|
1911
|
+
status,
|
1912
|
+
statusText,
|
1913
|
+
headers: Object.fromEntries(
|
1914
|
+
prepareHeaders(headers, uiMessageStreamHeaders).entries()
|
1915
|
+
),
|
1916
|
+
stream: stream.pipeThrough(new JsonToSseTransformStream()).pipeThrough(new TextEncoderStream())
|
1917
|
+
});
|
1918
|
+
}
|
1919
|
+
|
1932
1920
|
// src/util/data-url.ts
|
1933
1921
|
function getTextFromDataUrl(dataUrl) {
|
1934
1922
|
const [header, base64Content] = dataUrl.split(",");
|
@@ -6764,7 +6752,7 @@ var DefaultStreamTextResult = class {
|
|
6764
6752
|
)
|
6765
6753
|
);
|
6766
6754
|
}
|
6767
|
-
|
6755
|
+
toUIMessageStream({
|
6768
6756
|
newMessageId,
|
6769
6757
|
originalMessages = [],
|
6770
6758
|
onFinish,
|
@@ -6916,7 +6904,7 @@ var DefaultStreamTextResult = class {
|
|
6916
6904
|
}
|
6917
6905
|
})
|
6918
6906
|
);
|
6919
|
-
return onFinish == null ? baseStream :
|
6907
|
+
return onFinish == null ? baseStream : processUIMessageStream({
|
6920
6908
|
stream: baseStream,
|
6921
6909
|
lastMessage,
|
6922
6910
|
newMessageId: messageId != null ? messageId : this.generateId(),
|
@@ -6933,7 +6921,7 @@ var DefaultStreamTextResult = class {
|
|
6933
6921
|
}
|
6934
6922
|
});
|
6935
6923
|
}
|
6936
|
-
|
6924
|
+
pipeUIMessageStreamToResponse(response, {
|
6937
6925
|
newMessageId,
|
6938
6926
|
originalMessages,
|
6939
6927
|
onFinish,
|
@@ -6945,9 +6933,9 @@ var DefaultStreamTextResult = class {
|
|
6945
6933
|
onError,
|
6946
6934
|
...init
|
6947
6935
|
} = {}) {
|
6948
|
-
|
6936
|
+
pipeUIMessageStreamToResponse({
|
6949
6937
|
response,
|
6950
|
-
|
6938
|
+
stream: this.toUIMessageStream({
|
6951
6939
|
newMessageId,
|
6952
6940
|
originalMessages,
|
6953
6941
|
onFinish,
|
@@ -6968,7 +6956,7 @@ var DefaultStreamTextResult = class {
|
|
6968
6956
|
...init
|
6969
6957
|
});
|
6970
6958
|
}
|
6971
|
-
|
6959
|
+
toUIMessageStreamResponse({
|
6972
6960
|
newMessageId,
|
6973
6961
|
originalMessages,
|
6974
6962
|
onFinish,
|
@@ -6980,8 +6968,8 @@ var DefaultStreamTextResult = class {
|
|
6980
6968
|
onError,
|
6981
6969
|
...init
|
6982
6970
|
} = {}) {
|
6983
|
-
return
|
6984
|
-
|
6971
|
+
return createUIMessageStreamResponse({
|
6972
|
+
stream: this.toUIMessageStream({
|
6985
6973
|
newMessageId,
|
6986
6974
|
originalMessages,
|
6987
6975
|
onFinish,
|
@@ -8053,11 +8041,11 @@ var DefaultTranscriptionResult = class {
|
|
8053
8041
|
coreToolMessageSchema,
|
8054
8042
|
coreUserMessageSchema,
|
8055
8043
|
cosineSimilarity,
|
8056
|
-
createDataStream,
|
8057
|
-
createDataStreamResponse,
|
8058
8044
|
createIdGenerator,
|
8059
8045
|
createProviderRegistry,
|
8060
8046
|
createTextStreamResponse,
|
8047
|
+
createUIMessageStream,
|
8048
|
+
createUIMessageStreamResponse,
|
8061
8049
|
customProvider,
|
8062
8050
|
defaultSettingsMiddleware,
|
8063
8051
|
embed,
|
@@ -8080,8 +8068,8 @@ var DefaultTranscriptionResult = class {
|
|
8080
8068
|
jsonSchema,
|
8081
8069
|
modelMessageSchema,
|
8082
8070
|
parsePartialJson,
|
8083
|
-
pipeDataStreamToResponse,
|
8084
8071
|
pipeTextStreamToResponse,
|
8072
|
+
pipeUIMessageStreamToResponse,
|
8085
8073
|
processTextStream,
|
8086
8074
|
shouldResubmitMessages,
|
8087
8075
|
simulateReadableStream,
|