ai 3.3.23 → 3.3.24
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +6 -0
- package/dist/index.js +26 -17
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +26 -17
- package/dist/index.mjs.map +1 -1
- package/package.json +1 -1
package/dist/index.mjs
CHANGED
@@ -2013,6 +2013,12 @@ var DelayedPromise = class {
|
|
2013
2013
|
}
|
2014
2014
|
};
|
2015
2015
|
|
2016
|
+
// core/util/now.ts
|
2017
|
+
function now() {
|
2018
|
+
var _a11, _b;
|
2019
|
+
return (_b = (_a11 = globalThis == null ? void 0 : globalThis.performance) == null ? void 0 : _a11.now()) != null ? _b : Date.now();
|
2020
|
+
}
|
2021
|
+
|
2016
2022
|
// core/generate-object/stream-object.ts
|
2017
2023
|
async function streamObject({
|
2018
2024
|
model,
|
@@ -2174,7 +2180,7 @@ async function streamObject({
|
|
2174
2180
|
const {
|
2175
2181
|
result: { stream, warnings, rawResponse },
|
2176
2182
|
doStreamSpan,
|
2177
|
-
|
2183
|
+
startTimestampMs
|
2178
2184
|
} = await retry(
|
2179
2185
|
() => recordSpan({
|
2180
2186
|
name: "ai.streamObject.doStream",
|
@@ -2204,8 +2210,7 @@ async function streamObject({
|
|
2204
2210
|
tracer,
|
2205
2211
|
endWhenDone: false,
|
2206
2212
|
fn: async (doStreamSpan2) => ({
|
2207
|
-
|
2208
|
-
// get before the call
|
2213
|
+
startTimestampMs: now(),
|
2209
2214
|
doStreamSpan: doStreamSpan2,
|
2210
2215
|
result: await model.doStream(callOptions)
|
2211
2216
|
})
|
@@ -2220,7 +2225,7 @@ async function streamObject({
|
|
2220
2225
|
rootSpan,
|
2221
2226
|
doStreamSpan,
|
2222
2227
|
telemetry,
|
2223
|
-
|
2228
|
+
startTimestampMs
|
2224
2229
|
});
|
2225
2230
|
}
|
2226
2231
|
});
|
@@ -2235,7 +2240,7 @@ var DefaultStreamObjectResult = class {
|
|
2235
2240
|
rootSpan,
|
2236
2241
|
doStreamSpan,
|
2237
2242
|
telemetry,
|
2238
|
-
|
2243
|
+
startTimestampMs
|
2239
2244
|
}) {
|
2240
2245
|
this.warnings = warnings;
|
2241
2246
|
this.rawResponse = rawResponse;
|
@@ -2264,7 +2269,7 @@ var DefaultStreamObjectResult = class {
|
|
2264
2269
|
new TransformStream({
|
2265
2270
|
async transform(chunk, controller) {
|
2266
2271
|
if (isFirstChunk) {
|
2267
|
-
const msToFirstChunk =
|
2272
|
+
const msToFirstChunk = now() - startTimestampMs;
|
2268
2273
|
isFirstChunk = false;
|
2269
2274
|
doStreamSpan.addEvent("ai.stream.firstChunk", {
|
2270
2275
|
"ai.stream.msToFirstChunk": msToFirstChunk
|
@@ -3322,7 +3327,7 @@ async function streamText({
|
|
3322
3327
|
const {
|
3323
3328
|
result: { stream: stream2, warnings: warnings2, rawResponse: rawResponse2 },
|
3324
3329
|
doStreamSpan: doStreamSpan2,
|
3325
|
-
|
3330
|
+
startTimestampMs: startTimestampMs2
|
3326
3331
|
} = await retry(
|
3327
3332
|
() => recordSpan({
|
3328
3333
|
name: "ai.streamText.doStream",
|
@@ -3351,7 +3356,7 @@ async function streamText({
|
|
3351
3356
|
tracer,
|
3352
3357
|
endWhenDone: false,
|
3353
3358
|
fn: async (doStreamSpan3) => ({
|
3354
|
-
|
3359
|
+
startTimestampMs: now(),
|
3355
3360
|
// get before the call
|
3356
3361
|
doStreamSpan: doStreamSpan3,
|
3357
3362
|
result: await model.doStream({
|
@@ -3381,7 +3386,7 @@ async function streamText({
|
|
3381
3386
|
rawResponse: rawResponse2
|
3382
3387
|
},
|
3383
3388
|
doStreamSpan: doStreamSpan2,
|
3384
|
-
|
3389
|
+
startTimestampMs: startTimestampMs2
|
3385
3390
|
};
|
3386
3391
|
};
|
3387
3392
|
const promptMessages = await convertToLanguageModelPrompt({
|
@@ -3391,7 +3396,7 @@ async function streamText({
|
|
3391
3396
|
const {
|
3392
3397
|
result: { stream, warnings, rawResponse },
|
3393
3398
|
doStreamSpan,
|
3394
|
-
|
3399
|
+
startTimestampMs
|
3395
3400
|
} = await startRoundtrip({
|
3396
3401
|
promptType: validatePrompt({ system, prompt, messages }).type,
|
3397
3402
|
promptMessages
|
@@ -3405,7 +3410,7 @@ async function streamText({
|
|
3405
3410
|
rootSpan,
|
3406
3411
|
doStreamSpan,
|
3407
3412
|
telemetry,
|
3408
|
-
|
3413
|
+
startTimestampMs,
|
3409
3414
|
maxToolRoundtrips,
|
3410
3415
|
startRoundtrip,
|
3411
3416
|
promptMessages
|
@@ -3423,7 +3428,7 @@ var DefaultStreamTextResult = class {
|
|
3423
3428
|
rootSpan,
|
3424
3429
|
doStreamSpan,
|
3425
3430
|
telemetry,
|
3426
|
-
|
3431
|
+
startTimestampMs,
|
3427
3432
|
maxToolRoundtrips,
|
3428
3433
|
startRoundtrip,
|
3429
3434
|
promptMessages
|
@@ -3454,7 +3459,7 @@ var DefaultStreamTextResult = class {
|
|
3454
3459
|
const self = this;
|
3455
3460
|
function addRoundtripStream({
|
3456
3461
|
stream: stream2,
|
3457
|
-
startTimestamp
|
3462
|
+
startTimestamp,
|
3458
3463
|
doStreamSpan: doStreamSpan2,
|
3459
3464
|
currentToolRoundtrip,
|
3460
3465
|
promptMessages: promptMessages2,
|
@@ -3481,7 +3486,7 @@ var DefaultStreamTextResult = class {
|
|
3481
3486
|
new TransformStream({
|
3482
3487
|
async transform(chunk, controller) {
|
3483
3488
|
if (roundtripFirstChunk) {
|
3484
|
-
const msToFirstChunk =
|
3489
|
+
const msToFirstChunk = now() - startTimestamp;
|
3485
3490
|
roundtripFirstChunk = false;
|
3486
3491
|
doStreamSpan2.addEvent("ai.stream.firstChunk", {
|
3487
3492
|
"ai.stream.msToFirstChunk": msToFirstChunk
|
@@ -3585,7 +3590,11 @@ var DefaultStreamTextResult = class {
|
|
3585
3590
|
(message) => convertToLanguageModelMessage(message, null)
|
3586
3591
|
)
|
3587
3592
|
);
|
3588
|
-
const {
|
3593
|
+
const {
|
3594
|
+
result,
|
3595
|
+
doStreamSpan: doStreamSpan3,
|
3596
|
+
startTimestampMs: startTimestamp2
|
3597
|
+
} = await startRoundtrip({
|
3589
3598
|
promptType: "messages",
|
3590
3599
|
promptMessages: promptMessages2
|
3591
3600
|
});
|
@@ -3593,7 +3602,7 @@ var DefaultStreamTextResult = class {
|
|
3593
3602
|
self.rawResponse = result.rawResponse;
|
3594
3603
|
addRoundtripStream({
|
3595
3604
|
stream: result.stream,
|
3596
|
-
startTimestamp:
|
3605
|
+
startTimestamp: startTimestamp2,
|
3597
3606
|
doStreamSpan: doStreamSpan3,
|
3598
3607
|
currentToolRoundtrip: currentToolRoundtrip + 1,
|
3599
3608
|
promptMessages: promptMessages2,
|
@@ -3656,7 +3665,7 @@ var DefaultStreamTextResult = class {
|
|
3656
3665
|
}
|
3657
3666
|
addRoundtripStream({
|
3658
3667
|
stream,
|
3659
|
-
startTimestamp,
|
3668
|
+
startTimestamp: startTimestampMs,
|
3660
3669
|
doStreamSpan,
|
3661
3670
|
currentToolRoundtrip: 0,
|
3662
3671
|
promptMessages,
|