ai 5.0.0-alpha.2 → 5.0.0-alpha.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +32 -0
- package/dist/index.d.mts +125 -42
- package/dist/index.d.ts +125 -42
- package/dist/index.js +224 -118
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +222 -117
- package/dist/index.mjs.map +1 -1
- package/package.json +3 -3
package/dist/index.mjs
CHANGED
@@ -504,9 +504,8 @@ var uiMessageStreamPartSchema = z.union([
|
|
504
504
|
providerMetadata: z.record(z.any()).optional()
|
505
505
|
}),
|
506
506
|
z.object({
|
507
|
-
type: z.literal("source"),
|
508
|
-
|
509
|
-
id: z.string(),
|
507
|
+
type: z.literal("source-url"),
|
508
|
+
sourceId: z.string(),
|
510
509
|
url: z.string(),
|
511
510
|
title: z.string().optional(),
|
512
511
|
providerMetadata: z.any().optional()
|
@@ -1062,16 +1061,13 @@ function processUIMessageStream({
|
|
1062
1061
|
write();
|
1063
1062
|
break;
|
1064
1063
|
}
|
1065
|
-
case "source": {
|
1064
|
+
case "source-url": {
|
1066
1065
|
state.message.parts.push({
|
1067
|
-
type: "source",
|
1068
|
-
|
1069
|
-
|
1070
|
-
|
1071
|
-
|
1072
|
-
title: part.title,
|
1073
|
-
providerMetadata: part.providerMetadata
|
1074
|
-
}
|
1066
|
+
type: "source-url",
|
1067
|
+
sourceId: part.sourceId,
|
1068
|
+
url: part.url,
|
1069
|
+
title: part.title,
|
1070
|
+
providerMetadata: part.providerMetadata
|
1075
1071
|
});
|
1076
1072
|
write();
|
1077
1073
|
break;
|
@@ -1254,7 +1250,6 @@ var getOriginalFetch = () => fetch;
|
|
1254
1250
|
async function fetchUIMessageStream({
|
1255
1251
|
api,
|
1256
1252
|
body,
|
1257
|
-
streamProtocol = "ui-message",
|
1258
1253
|
credentials,
|
1259
1254
|
headers,
|
1260
1255
|
abortController,
|
@@ -1288,9 +1283,7 @@ async function fetchUIMessageStream({
|
|
1288
1283
|
if (!response.body) {
|
1289
1284
|
throw new Error("The response body is empty.");
|
1290
1285
|
}
|
1291
|
-
return
|
1292
|
-
stream: response.body.pipeThrough(new TextDecoderStream())
|
1293
|
-
}) : parseJsonEventStream({
|
1286
|
+
return parseJsonEventStream({
|
1294
1287
|
stream: response.body,
|
1295
1288
|
schema: uiMessageStreamPartSchema
|
1296
1289
|
}).pipeThrough(
|
@@ -1304,6 +1297,46 @@ async function fetchUIMessageStream({
|
|
1304
1297
|
})
|
1305
1298
|
);
|
1306
1299
|
}
|
1300
|
+
async function fetchTextStream({
|
1301
|
+
api,
|
1302
|
+
body,
|
1303
|
+
credentials,
|
1304
|
+
headers,
|
1305
|
+
abortController,
|
1306
|
+
fetch: fetch2 = getOriginalFetch(),
|
1307
|
+
requestType = "generate"
|
1308
|
+
}) {
|
1309
|
+
var _a17, _b, _c;
|
1310
|
+
const response = requestType === "resume" ? await fetch2(`${api}?chatId=${body.chatId}`, {
|
1311
|
+
method: "GET",
|
1312
|
+
headers: {
|
1313
|
+
"Content-Type": "application/json",
|
1314
|
+
...headers
|
1315
|
+
},
|
1316
|
+
signal: (_a17 = abortController == null ? void 0 : abortController()) == null ? void 0 : _a17.signal,
|
1317
|
+
credentials
|
1318
|
+
}) : await fetch2(api, {
|
1319
|
+
method: "POST",
|
1320
|
+
body: JSON.stringify(body),
|
1321
|
+
headers: {
|
1322
|
+
"Content-Type": "application/json",
|
1323
|
+
...headers
|
1324
|
+
},
|
1325
|
+
signal: (_b = abortController == null ? void 0 : abortController()) == null ? void 0 : _b.signal,
|
1326
|
+
credentials
|
1327
|
+
});
|
1328
|
+
if (!response.ok) {
|
1329
|
+
throw new Error(
|
1330
|
+
(_c = await response.text()) != null ? _c : "Failed to fetch the chat response."
|
1331
|
+
);
|
1332
|
+
}
|
1333
|
+
if (!response.body) {
|
1334
|
+
throw new Error("The response body is empty.");
|
1335
|
+
}
|
1336
|
+
return transformTextToUiMessageStream({
|
1337
|
+
stream: response.body.pipeThrough(new TextDecoderStream())
|
1338
|
+
});
|
1339
|
+
}
|
1307
1340
|
async function consumeUIMessageStream({
|
1308
1341
|
stream,
|
1309
1342
|
onUpdate,
|
@@ -1354,10 +1387,17 @@ async function callChatApi({
|
|
1354
1387
|
requestType = "generate",
|
1355
1388
|
messageMetadataSchema
|
1356
1389
|
}) {
|
1357
|
-
const stream = await
|
1390
|
+
const stream = streamProtocol === "text" ? await fetchTextStream({
|
1391
|
+
api,
|
1392
|
+
body,
|
1393
|
+
credentials,
|
1394
|
+
headers,
|
1395
|
+
abortController,
|
1396
|
+
fetch: fetch2,
|
1397
|
+
requestType
|
1398
|
+
}) : await fetchUIMessageStream({
|
1358
1399
|
api,
|
1359
1400
|
body,
|
1360
|
-
streamProtocol,
|
1361
1401
|
credentials,
|
1362
1402
|
headers,
|
1363
1403
|
abortController,
|
@@ -1546,18 +1586,18 @@ var SerialJobExecutor = class {
|
|
1546
1586
|
function shouldResubmitMessages({
|
1547
1587
|
originalMaxToolInvocationStep,
|
1548
1588
|
originalMessageCount,
|
1549
|
-
maxSteps
|
1589
|
+
maxSteps,
|
1550
1590
|
messages
|
1551
1591
|
}) {
|
1552
1592
|
var _a17;
|
1553
1593
|
const lastMessage = messages[messages.length - 1];
|
1554
1594
|
return (
|
1555
1595
|
// check if the feature is enabled:
|
1556
|
-
|
1596
|
+
maxSteps > 1 && // ensure there is a last message:
|
1557
1597
|
lastMessage != null && // ensure we actually have new steps (to prevent infinite loops in case of errors):
|
1558
1598
|
(messages.length > originalMessageCount || extractMaxToolInvocationStep(getToolInvocations(lastMessage)) !== originalMaxToolInvocationStep) && // check that next step is possible:
|
1559
1599
|
isAssistantMessageWithCompletedToolCalls(lastMessage) && // limit the number of automatic steps:
|
1560
|
-
((_a17 = extractMaxToolInvocationStep(getToolInvocations(lastMessage))) != null ? _a17 : 0) <
|
1600
|
+
((_a17 = extractMaxToolInvocationStep(getToolInvocations(lastMessage))) != null ? _a17 : 0) < maxSteps
|
1561
1601
|
);
|
1562
1602
|
}
|
1563
1603
|
function isAssistantMessageWithCompletedToolCalls(message) {
|
@@ -1597,7 +1637,7 @@ var ChatStore = class {
|
|
1597
1637
|
chats = {},
|
1598
1638
|
generateId: generateId3,
|
1599
1639
|
transport,
|
1600
|
-
maxSteps
|
1640
|
+
maxSteps = 1,
|
1601
1641
|
messageMetadataSchema,
|
1602
1642
|
dataPartSchemas
|
1603
1643
|
}) {
|
@@ -1613,7 +1653,7 @@ var ChatStore = class {
|
|
1613
1653
|
}
|
1614
1654
|
])
|
1615
1655
|
);
|
1616
|
-
this.maxSteps =
|
1656
|
+
this.maxSteps = maxSteps;
|
1617
1657
|
this.transport = transport;
|
1618
1658
|
this.subscribers = /* @__PURE__ */ new Set();
|
1619
1659
|
this.generateId = generateId3 != null ? generateId3 : generateIdFunc;
|
@@ -1911,7 +1951,6 @@ var DefaultChatTransport = class {
|
|
1911
1951
|
credentials,
|
1912
1952
|
headers,
|
1913
1953
|
body,
|
1914
|
-
streamProtocol,
|
1915
1954
|
fetch: fetch2,
|
1916
1955
|
prepareRequestBody
|
1917
1956
|
}) {
|
@@ -1919,7 +1958,6 @@ var DefaultChatTransport = class {
|
|
1919
1958
|
this.credentials = credentials;
|
1920
1959
|
this.headers = headers;
|
1921
1960
|
this.body = body;
|
1922
|
-
this.streamProtocol = streamProtocol;
|
1923
1961
|
this.fetch = fetch2;
|
1924
1962
|
this.prepareRequestBody = prepareRequestBody;
|
1925
1963
|
}
|
@@ -1949,7 +1987,55 @@ var DefaultChatTransport = class {
|
|
1949
1987
|
...this.body,
|
1950
1988
|
...body
|
1951
1989
|
},
|
1952
|
-
|
1990
|
+
credentials: this.credentials,
|
1991
|
+
abortController: () => abortController,
|
1992
|
+
fetch: this.fetch,
|
1993
|
+
requestType
|
1994
|
+
});
|
1995
|
+
}
|
1996
|
+
};
|
1997
|
+
var TextStreamChatTransport = class {
|
1998
|
+
constructor({
|
1999
|
+
api,
|
2000
|
+
credentials,
|
2001
|
+
headers,
|
2002
|
+
body,
|
2003
|
+
fetch: fetch2,
|
2004
|
+
prepareRequestBody
|
2005
|
+
}) {
|
2006
|
+
this.api = api;
|
2007
|
+
this.credentials = credentials;
|
2008
|
+
this.headers = headers;
|
2009
|
+
this.body = body;
|
2010
|
+
this.fetch = fetch2;
|
2011
|
+
this.prepareRequestBody = prepareRequestBody;
|
2012
|
+
}
|
2013
|
+
submitMessages({
|
2014
|
+
chatId,
|
2015
|
+
messages,
|
2016
|
+
abortController,
|
2017
|
+
body,
|
2018
|
+
headers,
|
2019
|
+
requestType
|
2020
|
+
}) {
|
2021
|
+
var _a17, _b;
|
2022
|
+
return fetchTextStream({
|
2023
|
+
api: this.api,
|
2024
|
+
headers: {
|
2025
|
+
...this.headers,
|
2026
|
+
...headers
|
2027
|
+
},
|
2028
|
+
body: (_b = (_a17 = this.prepareRequestBody) == null ? void 0 : _a17.call(this, {
|
2029
|
+
chatId,
|
2030
|
+
messages,
|
2031
|
+
...this.body,
|
2032
|
+
...body
|
2033
|
+
})) != null ? _b : {
|
2034
|
+
chatId,
|
2035
|
+
messages,
|
2036
|
+
...this.body,
|
2037
|
+
...body
|
2038
|
+
},
|
1953
2039
|
credentials: this.credentials,
|
1954
2040
|
abortController: () => abortController,
|
1955
2041
|
fetch: this.fetch,
|
@@ -2151,7 +2237,6 @@ import {
|
|
2151
2237
|
function defaultChatStore({
|
2152
2238
|
api,
|
2153
2239
|
fetch: fetch2,
|
2154
|
-
streamProtocol = "ui-message",
|
2155
2240
|
credentials,
|
2156
2241
|
headers,
|
2157
2242
|
body,
|
@@ -2159,14 +2244,13 @@ function defaultChatStore({
|
|
2159
2244
|
generateId: generateId3 = generateIdFunc2,
|
2160
2245
|
dataPartSchemas,
|
2161
2246
|
messageMetadataSchema,
|
2162
|
-
maxSteps
|
2247
|
+
maxSteps = 1,
|
2163
2248
|
chats
|
2164
2249
|
}) {
|
2165
2250
|
return new ChatStore({
|
2166
2251
|
transport: new DefaultChatTransport({
|
2167
2252
|
api,
|
2168
2253
|
fetch: fetch2,
|
2169
|
-
streamProtocol,
|
2170
2254
|
credentials,
|
2171
2255
|
headers,
|
2172
2256
|
body,
|
@@ -2175,7 +2259,7 @@ function defaultChatStore({
|
|
2175
2259
|
generateId: generateId3,
|
2176
2260
|
messageMetadataSchema,
|
2177
2261
|
dataPartSchemas,
|
2178
|
-
maxSteps
|
2262
|
+
maxSteps,
|
2179
2263
|
chats
|
2180
2264
|
});
|
2181
2265
|
}
|
@@ -4684,11 +4768,11 @@ var DelayedPromise = class {
|
|
4684
4768
|
this._resolve = void 0;
|
4685
4769
|
this._reject = void 0;
|
4686
4770
|
}
|
4687
|
-
get
|
4688
|
-
if (this.
|
4689
|
-
return this.
|
4771
|
+
get promise() {
|
4772
|
+
if (this._promise) {
|
4773
|
+
return this._promise;
|
4690
4774
|
}
|
4691
|
-
this.
|
4775
|
+
this._promise = new Promise((resolve, reject) => {
|
4692
4776
|
if (this.status.type === "resolved") {
|
4693
4777
|
resolve(this.status.value);
|
4694
4778
|
} else if (this.status.type === "rejected") {
|
@@ -4697,19 +4781,19 @@ var DelayedPromise = class {
|
|
4697
4781
|
this._resolve = resolve;
|
4698
4782
|
this._reject = reject;
|
4699
4783
|
});
|
4700
|
-
return this.
|
4784
|
+
return this._promise;
|
4701
4785
|
}
|
4702
4786
|
resolve(value) {
|
4703
4787
|
var _a17;
|
4704
4788
|
this.status = { type: "resolved", value };
|
4705
|
-
if (this.
|
4789
|
+
if (this._promise) {
|
4706
4790
|
(_a17 = this._resolve) == null ? void 0 : _a17.call(this, value);
|
4707
4791
|
}
|
4708
4792
|
}
|
4709
4793
|
reject(error) {
|
4710
4794
|
var _a17;
|
4711
4795
|
this.status = { type: "rejected", error };
|
4712
|
-
if (this.
|
4796
|
+
if (this._promise) {
|
4713
4797
|
(_a17 = this._reject) == null ? void 0 : _a17.call(this, error);
|
4714
4798
|
}
|
4715
4799
|
}
|
@@ -4804,12 +4888,12 @@ var DefaultStreamObjectResult = class {
|
|
4804
4888
|
currentDate,
|
4805
4889
|
now: now2
|
4806
4890
|
}) {
|
4807
|
-
this.
|
4808
|
-
this.
|
4809
|
-
this.
|
4810
|
-
this.
|
4811
|
-
this.
|
4812
|
-
this.
|
4891
|
+
this._object = new DelayedPromise();
|
4892
|
+
this._usage = new DelayedPromise();
|
4893
|
+
this._providerMetadata = new DelayedPromise();
|
4894
|
+
this._warnings = new DelayedPromise();
|
4895
|
+
this._request = new DelayedPromise();
|
4896
|
+
this._response = new DelayedPromise();
|
4813
4897
|
const { maxRetries, retry } = prepareRetries({
|
4814
4898
|
maxRetries: maxRetriesArg
|
4815
4899
|
});
|
@@ -4928,7 +5012,7 @@ var DefaultStreamObjectResult = class {
|
|
4928
5012
|
})
|
4929
5013
|
})
|
4930
5014
|
);
|
4931
|
-
self.
|
5015
|
+
self._request.resolve(request != null ? request : {});
|
4932
5016
|
let warnings;
|
4933
5017
|
let usage = {
|
4934
5018
|
inputTokens: void 0,
|
@@ -5021,9 +5105,9 @@ var DefaultStreamObjectResult = class {
|
|
5021
5105
|
usage,
|
5022
5106
|
response: fullResponse
|
5023
5107
|
});
|
5024
|
-
self.
|
5025
|
-
self.
|
5026
|
-
self.
|
5108
|
+
self._usage.resolve(usage);
|
5109
|
+
self._providerMetadata.resolve(providerMetadata);
|
5110
|
+
self._response.resolve({
|
5027
5111
|
...fullResponse,
|
5028
5112
|
headers: response == null ? void 0 : response.headers
|
5029
5113
|
});
|
@@ -5037,7 +5121,7 @@ var DefaultStreamObjectResult = class {
|
|
5037
5121
|
);
|
5038
5122
|
if (validationResult.success) {
|
5039
5123
|
object2 = validationResult.value;
|
5040
|
-
self.
|
5124
|
+
self._object.resolve(object2);
|
5041
5125
|
} else {
|
5042
5126
|
error = new NoObjectGeneratedError({
|
5043
5127
|
message: "No object generated: response did not match schema.",
|
@@ -5047,7 +5131,7 @@ var DefaultStreamObjectResult = class {
|
|
5047
5131
|
usage,
|
5048
5132
|
finishReason
|
5049
5133
|
});
|
5050
|
-
self.
|
5134
|
+
self._object.reject(error);
|
5051
5135
|
}
|
5052
5136
|
break;
|
5053
5137
|
}
|
@@ -5142,22 +5226,22 @@ var DefaultStreamObjectResult = class {
|
|
5142
5226
|
this.outputStrategy = outputStrategy;
|
5143
5227
|
}
|
5144
5228
|
get object() {
|
5145
|
-
return this.
|
5229
|
+
return this._object.promise;
|
5146
5230
|
}
|
5147
5231
|
get usage() {
|
5148
|
-
return this.
|
5232
|
+
return this._usage.promise;
|
5149
5233
|
}
|
5150
5234
|
get providerMetadata() {
|
5151
|
-
return this.
|
5235
|
+
return this._providerMetadata.promise;
|
5152
5236
|
}
|
5153
5237
|
get warnings() {
|
5154
|
-
return this.
|
5238
|
+
return this._warnings.promise;
|
5155
5239
|
}
|
5156
5240
|
get request() {
|
5157
|
-
return this.
|
5241
|
+
return this._request.promise;
|
5158
5242
|
}
|
5159
5243
|
get response() {
|
5160
|
-
return this.
|
5244
|
+
return this._response.promise;
|
5161
5245
|
}
|
5162
5246
|
get partialObjectStream() {
|
5163
5247
|
return createAsyncIterableStream(
|
@@ -5319,6 +5403,11 @@ var DefaultSpeechResult = class {
|
|
5319
5403
|
// core/generate-text/generate-text.ts
|
5320
5404
|
import { createIdGenerator as createIdGenerator3 } from "@ai-sdk/provider-utils";
|
5321
5405
|
|
5406
|
+
// src/util/as-array.ts
|
5407
|
+
function asArray(value) {
|
5408
|
+
return value === void 0 ? [] : Array.isArray(value) ? value : [value];
|
5409
|
+
}
|
5410
|
+
|
5322
5411
|
// core/prompt/prepare-tools-and-tool-choice.ts
|
5323
5412
|
import { asSchema as asSchema2 } from "@ai-sdk/provider-utils";
|
5324
5413
|
|
@@ -5540,8 +5629,8 @@ var DefaultStepResult = class {
|
|
5540
5629
|
};
|
5541
5630
|
|
5542
5631
|
// core/generate-text/stop-condition.ts
|
5543
|
-
function
|
5544
|
-
return ({ steps }) => steps.length
|
5632
|
+
function stepCountIs(stepCount) {
|
5633
|
+
return ({ steps }) => steps.length === stepCount;
|
5545
5634
|
}
|
5546
5635
|
function hasToolCall(toolName) {
|
5547
5636
|
return ({ steps }) => {
|
@@ -5551,6 +5640,12 @@ function hasToolCall(toolName) {
|
|
5551
5640
|
)) != null ? _c : false;
|
5552
5641
|
};
|
5553
5642
|
}
|
5643
|
+
async function isStopConditionMet({
|
5644
|
+
stopConditions,
|
5645
|
+
steps
|
5646
|
+
}) {
|
5647
|
+
return (await Promise.all(stopConditions.map((condition) => condition({ steps })))).some((result) => result);
|
5648
|
+
}
|
5554
5649
|
|
5555
5650
|
// core/generate-text/to-response-messages.ts
|
5556
5651
|
function toResponseMessages({
|
@@ -5625,12 +5720,14 @@ async function generateText({
|
|
5625
5720
|
maxRetries: maxRetriesArg,
|
5626
5721
|
abortSignal,
|
5627
5722
|
headers,
|
5628
|
-
|
5723
|
+
stopWhen = stepCountIs(1),
|
5629
5724
|
experimental_output: output,
|
5630
5725
|
experimental_telemetry: telemetry,
|
5631
5726
|
providerOptions,
|
5632
|
-
experimental_activeTools
|
5633
|
-
|
5727
|
+
experimental_activeTools,
|
5728
|
+
activeTools = experimental_activeTools,
|
5729
|
+
experimental_prepareStep,
|
5730
|
+
prepareStep = experimental_prepareStep,
|
5634
5731
|
experimental_repairToolCall: repairToolCall,
|
5635
5732
|
_internal: {
|
5636
5733
|
generateId: generateId3 = originalGenerateId3,
|
@@ -5639,6 +5736,7 @@ async function generateText({
|
|
5639
5736
|
onStepFinish,
|
5640
5737
|
...settings
|
5641
5738
|
}) {
|
5739
|
+
const stopConditions = asArray(stopWhen);
|
5642
5740
|
const { maxRetries, retry } = prepareRetries({ maxRetries: maxRetriesArg });
|
5643
5741
|
const callSettings = prepareCallSettings(settings);
|
5644
5742
|
const baseTelemetryAttributes = getBaseTelemetryAttributes({
|
@@ -5702,7 +5800,7 @@ async function generateText({
|
|
5702
5800
|
const { toolChoice: stepToolChoice, tools: stepTools } = prepareToolsAndToolChoice({
|
5703
5801
|
tools,
|
5704
5802
|
toolChoice: (_b = prepareStepResult == null ? void 0 : prepareStepResult.toolChoice) != null ? _b : toolChoice,
|
5705
|
-
activeTools: (_c = prepareStepResult == null ? void 0 : prepareStepResult.
|
5803
|
+
activeTools: (_c = prepareStepResult == null ? void 0 : prepareStepResult.activeTools) != null ? _c : activeTools
|
5706
5804
|
});
|
5707
5805
|
currentModelResponse = await retry(
|
5708
5806
|
() => {
|
@@ -5847,8 +5945,8 @@ async function generateText({
|
|
5847
5945
|
} while (
|
5848
5946
|
// there are tool calls:
|
5849
5947
|
currentToolCalls.length > 0 && // all current tool calls have results:
|
5850
|
-
currentToolResults.length === currentToolCalls.length && // continue until
|
5851
|
-
!await
|
5948
|
+
currentToolResults.length === currentToolCalls.length && // continue until a stop condition is met:
|
5949
|
+
!await isStopConditionMet({ stopConditions, steps })
|
5852
5950
|
);
|
5853
5951
|
span.setAttributes(
|
5854
5952
|
selectTelemetryAttributes({
|
@@ -6198,11 +6296,6 @@ function smoothStream({
|
|
6198
6296
|
// core/generate-text/stream-text.ts
|
6199
6297
|
import { createIdGenerator as createIdGenerator4 } from "@ai-sdk/provider-utils";
|
6200
6298
|
|
6201
|
-
// src/util/as-array.ts
|
6202
|
-
function asArray(value) {
|
6203
|
-
return value === void 0 ? [] : Array.isArray(value) ? value : [value];
|
6204
|
-
}
|
6205
|
-
|
6206
6299
|
// core/generate-text/run-tools-transformation.ts
|
6207
6300
|
import { generateId } from "@ai-sdk/provider-utils";
|
6208
6301
|
function runToolsTransformation({
|
@@ -6413,13 +6506,15 @@ function streamText({
|
|
6413
6506
|
maxRetries,
|
6414
6507
|
abortSignal,
|
6415
6508
|
headers,
|
6416
|
-
|
6509
|
+
stopWhen = stepCountIs(1),
|
6417
6510
|
experimental_output: output,
|
6418
6511
|
experimental_telemetry: telemetry,
|
6512
|
+
prepareStep,
|
6419
6513
|
providerOptions,
|
6420
6514
|
experimental_toolCallStreaming = false,
|
6421
6515
|
toolCallStreaming = experimental_toolCallStreaming,
|
6422
|
-
experimental_activeTools
|
6516
|
+
experimental_activeTools,
|
6517
|
+
activeTools = experimental_activeTools,
|
6423
6518
|
experimental_repairToolCall: repairToolCall,
|
6424
6519
|
experimental_transform: transform,
|
6425
6520
|
onChunk,
|
@@ -6449,9 +6544,10 @@ function streamText({
|
|
6449
6544
|
transforms: asArray(transform),
|
6450
6545
|
activeTools,
|
6451
6546
|
repairToolCall,
|
6452
|
-
|
6547
|
+
stopConditions: asArray(stopWhen),
|
6453
6548
|
output,
|
6454
6549
|
providerOptions,
|
6550
|
+
prepareStep,
|
6455
6551
|
onChunk,
|
6456
6552
|
onError,
|
6457
6553
|
onFinish,
|
@@ -6526,9 +6622,10 @@ var DefaultStreamTextResult = class {
|
|
6526
6622
|
transforms,
|
6527
6623
|
activeTools,
|
6528
6624
|
repairToolCall,
|
6529
|
-
|
6625
|
+
stopConditions,
|
6530
6626
|
output,
|
6531
6627
|
providerOptions,
|
6628
|
+
prepareStep,
|
6532
6629
|
now: now2,
|
6533
6630
|
currentDate,
|
6534
6631
|
generateId: generateId3,
|
@@ -6537,18 +6634,12 @@ var DefaultStreamTextResult = class {
|
|
6537
6634
|
onFinish,
|
6538
6635
|
onStepFinish
|
6539
6636
|
}) {
|
6540
|
-
this.
|
6541
|
-
this.
|
6542
|
-
this.
|
6543
|
-
if (maxSteps2 < 1) {
|
6544
|
-
throw new InvalidArgumentError({
|
6545
|
-
parameter: "maxSteps",
|
6546
|
-
value: maxSteps2,
|
6547
|
-
message: "maxSteps must be at least 1"
|
6548
|
-
});
|
6549
|
-
}
|
6637
|
+
this._totalUsage = new DelayedPromise();
|
6638
|
+
this._finishReason = new DelayedPromise();
|
6639
|
+
this._steps = new DelayedPromise();
|
6550
6640
|
this.output = output;
|
6551
6641
|
this.generateId = generateId3;
|
6642
|
+
let stepFinish;
|
6552
6643
|
let activeReasoningPart = void 0;
|
6553
6644
|
let recordedContent = [];
|
6554
6645
|
const recordedResponseMessages = [];
|
@@ -6630,6 +6721,7 @@ var DefaultStreamTextResult = class {
|
|
6630
6721
|
recordedContent = [];
|
6631
6722
|
activeReasoningPart = void 0;
|
6632
6723
|
recordedResponseMessages.push(...stepMessages);
|
6724
|
+
stepFinish.resolve();
|
6633
6725
|
}
|
6634
6726
|
if (part.type === "finish") {
|
6635
6727
|
recordedTotalUsage = part.totalUsage;
|
@@ -6647,9 +6739,9 @@ var DefaultStreamTextResult = class {
|
|
6647
6739
|
outputTokens: void 0,
|
6648
6740
|
totalTokens: void 0
|
6649
6741
|
};
|
6650
|
-
self.
|
6651
|
-
self.
|
6652
|
-
self.
|
6742
|
+
self._finishReason.resolve(finishReason);
|
6743
|
+
self._totalUsage.resolve(totalUsage);
|
6744
|
+
self._steps.resolve(recordedSteps);
|
6653
6745
|
const finalStep = recordedSteps[recordedSteps.length - 1];
|
6654
6746
|
await (onFinish == null ? void 0 : onFinish({
|
6655
6747
|
finishReason,
|
@@ -6740,8 +6832,7 @@ var DefaultStreamTextResult = class {
|
|
6740
6832
|
// specific settings that only make sense on the outer level:
|
6741
6833
|
"ai.prompt": {
|
6742
6834
|
input: () => JSON.stringify({ system, prompt, messages })
|
6743
|
-
}
|
6744
|
-
"ai.settings.maxSteps": maxSteps2
|
6835
|
+
}
|
6745
6836
|
}
|
6746
6837
|
}),
|
6747
6838
|
tracer,
|
@@ -6753,6 +6844,8 @@ var DefaultStreamTextResult = class {
|
|
6753
6844
|
responseMessages,
|
6754
6845
|
usage
|
6755
6846
|
}) {
|
6847
|
+
var _a17, _b, _c;
|
6848
|
+
stepFinish = new DelayedPromise();
|
6756
6849
|
const initialPrompt = await standardizePrompt({
|
6757
6850
|
system,
|
6758
6851
|
prompt,
|
@@ -6762,6 +6855,11 @@ var DefaultStreamTextResult = class {
|
|
6762
6855
|
...initialPrompt.messages,
|
6763
6856
|
...responseMessages
|
6764
6857
|
];
|
6858
|
+
const prepareStepResult = await (prepareStep == null ? void 0 : prepareStep({
|
6859
|
+
model,
|
6860
|
+
steps: recordedSteps,
|
6861
|
+
stepNumber: recordedSteps.length
|
6862
|
+
}));
|
6765
6863
|
const promptMessages = await convertToLanguageModelPrompt({
|
6766
6864
|
prompt: {
|
6767
6865
|
system: initialPrompt.system,
|
@@ -6769,9 +6867,12 @@ var DefaultStreamTextResult = class {
|
|
6769
6867
|
},
|
6770
6868
|
supportedUrls: await model.supportedUrls
|
6771
6869
|
});
|
6772
|
-
const
|
6773
|
-
|
6774
|
-
|
6870
|
+
const stepModel = (_a17 = prepareStepResult == null ? void 0 : prepareStepResult.model) != null ? _a17 : model;
|
6871
|
+
const { toolChoice: stepToolChoice, tools: stepTools } = prepareToolsAndToolChoice({
|
6872
|
+
tools,
|
6873
|
+
toolChoice: (_b = prepareStepResult == null ? void 0 : prepareStepResult.toolChoice) != null ? _b : toolChoice,
|
6874
|
+
activeTools: (_c = prepareStepResult == null ? void 0 : prepareStepResult.activeTools) != null ? _c : activeTools
|
6875
|
+
});
|
6775
6876
|
const {
|
6776
6877
|
result: { stream: stream2, response, request },
|
6777
6878
|
doStreamSpan,
|
@@ -6787,24 +6888,23 @@ var DefaultStreamTextResult = class {
|
|
6787
6888
|
telemetry
|
6788
6889
|
}),
|
6789
6890
|
...baseTelemetryAttributes,
|
6891
|
+
// model:
|
6892
|
+
"ai.model.provider": stepModel.provider,
|
6893
|
+
"ai.model.id": stepModel.modelId,
|
6894
|
+
// prompt:
|
6790
6895
|
"ai.prompt.messages": {
|
6791
6896
|
input: () => JSON.stringify(promptMessages)
|
6792
6897
|
},
|
6793
6898
|
"ai.prompt.tools": {
|
6794
6899
|
// convert the language model level tools:
|
6795
|
-
input: () =>
|
6796
|
-
var _a17;
|
6797
|
-
return (_a17 = toolsAndToolChoice.tools) == null ? void 0 : _a17.map(
|
6798
|
-
(tool2) => JSON.stringify(tool2)
|
6799
|
-
);
|
6800
|
-
}
|
6900
|
+
input: () => stepTools == null ? void 0 : stepTools.map((tool2) => JSON.stringify(tool2))
|
6801
6901
|
},
|
6802
6902
|
"ai.prompt.toolChoice": {
|
6803
|
-
input: () =>
|
6903
|
+
input: () => stepToolChoice != null ? JSON.stringify(stepToolChoice) : void 0
|
6804
6904
|
},
|
6805
6905
|
// standardized gen-ai llm span attributes:
|
6806
|
-
"gen_ai.system":
|
6807
|
-
"gen_ai.request.model":
|
6906
|
+
"gen_ai.system": stepModel.provider,
|
6907
|
+
"gen_ai.request.model": stepModel.modelId,
|
6808
6908
|
"gen_ai.request.frequency_penalty": callSettings.frequencyPenalty,
|
6809
6909
|
"gen_ai.request.max_tokens": callSettings.maxOutputTokens,
|
6810
6910
|
"gen_ai.request.presence_penalty": callSettings.presencePenalty,
|
@@ -6821,9 +6921,10 @@ var DefaultStreamTextResult = class {
|
|
6821
6921
|
startTimestampMs: now2(),
|
6822
6922
|
// get before the call
|
6823
6923
|
doStreamSpan: doStreamSpan2,
|
6824
|
-
result: await
|
6924
|
+
result: await stepModel.doStream({
|
6825
6925
|
...callSettings,
|
6826
|
-
|
6926
|
+
tools: stepTools,
|
6927
|
+
toolChoice: stepToolChoice,
|
6827
6928
|
responseFormat: output == null ? void 0 : output.responseFormat,
|
6828
6929
|
prompt: promptMessages,
|
6829
6930
|
providerOptions,
|
@@ -6834,7 +6935,7 @@ var DefaultStreamTextResult = class {
|
|
6834
6935
|
}
|
6835
6936
|
})
|
6836
6937
|
);
|
6837
|
-
const
|
6938
|
+
const streamWithToolResults = runToolsTransformation({
|
6838
6939
|
tools,
|
6839
6940
|
generatorStream: stream2,
|
6840
6941
|
toolCallStreaming,
|
@@ -6873,10 +6974,10 @@ var DefaultStreamTextResult = class {
|
|
6873
6974
|
stepText += chunk.text;
|
6874
6975
|
}
|
6875
6976
|
self.addStream(
|
6876
|
-
|
6977
|
+
streamWithToolResults.pipeThrough(
|
6877
6978
|
new TransformStream({
|
6878
6979
|
async transform(chunk, controller) {
|
6879
|
-
var
|
6980
|
+
var _a18, _b2, _c2, _d;
|
6880
6981
|
if (chunk.type === "stream-start") {
|
6881
6982
|
warnings = chunk.warnings;
|
6882
6983
|
return;
|
@@ -6939,9 +7040,9 @@ var DefaultStreamTextResult = class {
|
|
6939
7040
|
}
|
6940
7041
|
case "response-metadata": {
|
6941
7042
|
stepResponse = {
|
6942
|
-
id: (
|
6943
|
-
timestamp: (
|
6944
|
-
modelId: (
|
7043
|
+
id: (_a18 = chunk.id) != null ? _a18 : stepResponse.id,
|
7044
|
+
timestamp: (_b2 = chunk.timestamp) != null ? _b2 : stepResponse.timestamp,
|
7045
|
+
modelId: (_c2 = chunk.modelId) != null ? _c2 : stepResponse.modelId
|
6945
7046
|
};
|
6946
7047
|
break;
|
6947
7048
|
}
|
@@ -7028,9 +7129,13 @@ var DefaultStreamTextResult = class {
|
|
7028
7129
|
}
|
7029
7130
|
});
|
7030
7131
|
const combinedUsage = addLanguageModelUsage(usage, stepUsage);
|
7031
|
-
|
7032
|
-
stepToolCalls.length > 0 && // all current tool calls have results:
|
7033
|
-
stepToolResults.length === stepToolCalls.length
|
7132
|
+
await stepFinish.promise;
|
7133
|
+
if (stepToolCalls.length > 0 && // all current tool calls have results:
|
7134
|
+
stepToolResults.length === stepToolCalls.length && // continue until a stop condition is met:
|
7135
|
+
!await isStopConditionMet({
|
7136
|
+
stopConditions,
|
7137
|
+
steps: recordedSteps
|
7138
|
+
})) {
|
7034
7139
|
responseMessages.push(
|
7035
7140
|
...toResponseMessages({
|
7036
7141
|
content: stepContent,
|
@@ -7078,7 +7183,7 @@ var DefaultStreamTextResult = class {
|
|
7078
7183
|
});
|
7079
7184
|
}
|
7080
7185
|
get steps() {
|
7081
|
-
return this.
|
7186
|
+
return this._steps.promise;
|
7082
7187
|
}
|
7083
7188
|
get finalStep() {
|
7084
7189
|
return this.steps.then((steps) => steps[steps.length - 1]);
|
@@ -7123,10 +7228,10 @@ var DefaultStreamTextResult = class {
|
|
7123
7228
|
return this.finalStep.then((step) => step.response);
|
7124
7229
|
}
|
7125
7230
|
get totalUsage() {
|
7126
|
-
return this.
|
7231
|
+
return this._totalUsage.promise;
|
7127
7232
|
}
|
7128
7233
|
get finishReason() {
|
7129
|
-
return this.
|
7234
|
+
return this._finishReason.promise;
|
7130
7235
|
}
|
7131
7236
|
/**
|
7132
7237
|
Split out a new stream from the original stream.
|
@@ -7246,9 +7351,8 @@ var DefaultStreamTextResult = class {
|
|
7246
7351
|
case "source": {
|
7247
7352
|
if (sendSources) {
|
7248
7353
|
controller.enqueue({
|
7249
|
-
type: "source",
|
7250
|
-
|
7251
|
-
id: part.id,
|
7354
|
+
type: "source-url",
|
7355
|
+
sourceId: part.id,
|
7252
7356
|
url: part.url,
|
7253
7357
|
title: part.title,
|
7254
7358
|
providerMetadata: part.providerMetadata
|
@@ -8477,6 +8581,7 @@ export {
|
|
8477
8581
|
NoSuchToolError,
|
8478
8582
|
output_exports as Output,
|
8479
8583
|
RetryError,
|
8584
|
+
TextStreamChatTransport,
|
8480
8585
|
ToolCallRepairError,
|
8481
8586
|
ToolExecutionError,
|
8482
8587
|
TypeValidationError,
|
@@ -8522,7 +8627,6 @@ export {
|
|
8522
8627
|
isAssistantMessageWithCompletedToolCalls,
|
8523
8628
|
isDeepEqualData,
|
8524
8629
|
jsonSchema2 as jsonSchema,
|
8525
|
-
maxSteps,
|
8526
8630
|
modelMessageSchema,
|
8527
8631
|
parsePartialJson,
|
8528
8632
|
pipeTextStreamToResponse,
|
@@ -8531,6 +8635,7 @@ export {
|
|
8531
8635
|
simulateReadableStream,
|
8532
8636
|
simulateStreamingMiddleware,
|
8533
8637
|
smoothStream,
|
8638
|
+
stepCountIs,
|
8534
8639
|
streamObject,
|
8535
8640
|
streamText,
|
8536
8641
|
systemModelMessageSchema,
|