ai 3.3.23 → 3.3.24
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +6 -0
- package/dist/index.js +26 -17
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +26 -17
- package/dist/index.mjs.map +1 -1
- package/package.json +1 -1
package/CHANGELOG.md
CHANGED
package/dist/index.js
CHANGED
@@ -2083,6 +2083,12 @@ var DelayedPromise = class {
|
|
2083
2083
|
}
|
2084
2084
|
};
|
2085
2085
|
|
2086
|
+
// core/util/now.ts
|
2087
|
+
function now() {
|
2088
|
+
var _a11, _b;
|
2089
|
+
return (_b = (_a11 = globalThis == null ? void 0 : globalThis.performance) == null ? void 0 : _a11.now()) != null ? _b : Date.now();
|
2090
|
+
}
|
2091
|
+
|
2086
2092
|
// core/generate-object/stream-object.ts
|
2087
2093
|
async function streamObject({
|
2088
2094
|
model,
|
@@ -2244,7 +2250,7 @@ async function streamObject({
|
|
2244
2250
|
const {
|
2245
2251
|
result: { stream, warnings, rawResponse },
|
2246
2252
|
doStreamSpan,
|
2247
|
-
|
2253
|
+
startTimestampMs
|
2248
2254
|
} = await retry(
|
2249
2255
|
() => recordSpan({
|
2250
2256
|
name: "ai.streamObject.doStream",
|
@@ -2274,8 +2280,7 @@ async function streamObject({
|
|
2274
2280
|
tracer,
|
2275
2281
|
endWhenDone: false,
|
2276
2282
|
fn: async (doStreamSpan2) => ({
|
2277
|
-
|
2278
|
-
// get before the call
|
2283
|
+
startTimestampMs: now(),
|
2279
2284
|
doStreamSpan: doStreamSpan2,
|
2280
2285
|
result: await model.doStream(callOptions)
|
2281
2286
|
})
|
@@ -2290,7 +2295,7 @@ async function streamObject({
|
|
2290
2295
|
rootSpan,
|
2291
2296
|
doStreamSpan,
|
2292
2297
|
telemetry,
|
2293
|
-
|
2298
|
+
startTimestampMs
|
2294
2299
|
});
|
2295
2300
|
}
|
2296
2301
|
});
|
@@ -2305,7 +2310,7 @@ var DefaultStreamObjectResult = class {
|
|
2305
2310
|
rootSpan,
|
2306
2311
|
doStreamSpan,
|
2307
2312
|
telemetry,
|
2308
|
-
|
2313
|
+
startTimestampMs
|
2309
2314
|
}) {
|
2310
2315
|
this.warnings = warnings;
|
2311
2316
|
this.rawResponse = rawResponse;
|
@@ -2334,7 +2339,7 @@ var DefaultStreamObjectResult = class {
|
|
2334
2339
|
new TransformStream({
|
2335
2340
|
async transform(chunk, controller) {
|
2336
2341
|
if (isFirstChunk) {
|
2337
|
-
const msToFirstChunk =
|
2342
|
+
const msToFirstChunk = now() - startTimestampMs;
|
2338
2343
|
isFirstChunk = false;
|
2339
2344
|
doStreamSpan.addEvent("ai.stream.firstChunk", {
|
2340
2345
|
"ai.stream.msToFirstChunk": msToFirstChunk
|
@@ -3392,7 +3397,7 @@ async function streamText({
|
|
3392
3397
|
const {
|
3393
3398
|
result: { stream: stream2, warnings: warnings2, rawResponse: rawResponse2 },
|
3394
3399
|
doStreamSpan: doStreamSpan2,
|
3395
|
-
|
3400
|
+
startTimestampMs: startTimestampMs2
|
3396
3401
|
} = await retry(
|
3397
3402
|
() => recordSpan({
|
3398
3403
|
name: "ai.streamText.doStream",
|
@@ -3421,7 +3426,7 @@ async function streamText({
|
|
3421
3426
|
tracer,
|
3422
3427
|
endWhenDone: false,
|
3423
3428
|
fn: async (doStreamSpan3) => ({
|
3424
|
-
|
3429
|
+
startTimestampMs: now(),
|
3425
3430
|
// get before the call
|
3426
3431
|
doStreamSpan: doStreamSpan3,
|
3427
3432
|
result: await model.doStream({
|
@@ -3451,7 +3456,7 @@ async function streamText({
|
|
3451
3456
|
rawResponse: rawResponse2
|
3452
3457
|
},
|
3453
3458
|
doStreamSpan: doStreamSpan2,
|
3454
|
-
|
3459
|
+
startTimestampMs: startTimestampMs2
|
3455
3460
|
};
|
3456
3461
|
};
|
3457
3462
|
const promptMessages = await convertToLanguageModelPrompt({
|
@@ -3461,7 +3466,7 @@ async function streamText({
|
|
3461
3466
|
const {
|
3462
3467
|
result: { stream, warnings, rawResponse },
|
3463
3468
|
doStreamSpan,
|
3464
|
-
|
3469
|
+
startTimestampMs
|
3465
3470
|
} = await startRoundtrip({
|
3466
3471
|
promptType: validatePrompt({ system, prompt, messages }).type,
|
3467
3472
|
promptMessages
|
@@ -3475,7 +3480,7 @@ async function streamText({
|
|
3475
3480
|
rootSpan,
|
3476
3481
|
doStreamSpan,
|
3477
3482
|
telemetry,
|
3478
|
-
|
3483
|
+
startTimestampMs,
|
3479
3484
|
maxToolRoundtrips,
|
3480
3485
|
startRoundtrip,
|
3481
3486
|
promptMessages
|
@@ -3493,7 +3498,7 @@ var DefaultStreamTextResult = class {
|
|
3493
3498
|
rootSpan,
|
3494
3499
|
doStreamSpan,
|
3495
3500
|
telemetry,
|
3496
|
-
|
3501
|
+
startTimestampMs,
|
3497
3502
|
maxToolRoundtrips,
|
3498
3503
|
startRoundtrip,
|
3499
3504
|
promptMessages
|
@@ -3524,7 +3529,7 @@ var DefaultStreamTextResult = class {
|
|
3524
3529
|
const self = this;
|
3525
3530
|
function addRoundtripStream({
|
3526
3531
|
stream: stream2,
|
3527
|
-
startTimestamp
|
3532
|
+
startTimestamp,
|
3528
3533
|
doStreamSpan: doStreamSpan2,
|
3529
3534
|
currentToolRoundtrip,
|
3530
3535
|
promptMessages: promptMessages2,
|
@@ -3551,7 +3556,7 @@ var DefaultStreamTextResult = class {
|
|
3551
3556
|
new TransformStream({
|
3552
3557
|
async transform(chunk, controller) {
|
3553
3558
|
if (roundtripFirstChunk) {
|
3554
|
-
const msToFirstChunk =
|
3559
|
+
const msToFirstChunk = now() - startTimestamp;
|
3555
3560
|
roundtripFirstChunk = false;
|
3556
3561
|
doStreamSpan2.addEvent("ai.stream.firstChunk", {
|
3557
3562
|
"ai.stream.msToFirstChunk": msToFirstChunk
|
@@ -3655,7 +3660,11 @@ var DefaultStreamTextResult = class {
|
|
3655
3660
|
(message) => convertToLanguageModelMessage(message, null)
|
3656
3661
|
)
|
3657
3662
|
);
|
3658
|
-
const {
|
3663
|
+
const {
|
3664
|
+
result,
|
3665
|
+
doStreamSpan: doStreamSpan3,
|
3666
|
+
startTimestampMs: startTimestamp2
|
3667
|
+
} = await startRoundtrip({
|
3659
3668
|
promptType: "messages",
|
3660
3669
|
promptMessages: promptMessages2
|
3661
3670
|
});
|
@@ -3663,7 +3672,7 @@ var DefaultStreamTextResult = class {
|
|
3663
3672
|
self.rawResponse = result.rawResponse;
|
3664
3673
|
addRoundtripStream({
|
3665
3674
|
stream: result.stream,
|
3666
|
-
startTimestamp:
|
3675
|
+
startTimestamp: startTimestamp2,
|
3667
3676
|
doStreamSpan: doStreamSpan3,
|
3668
3677
|
currentToolRoundtrip: currentToolRoundtrip + 1,
|
3669
3678
|
promptMessages: promptMessages2,
|
@@ -3726,7 +3735,7 @@ var DefaultStreamTextResult = class {
|
|
3726
3735
|
}
|
3727
3736
|
addRoundtripStream({
|
3728
3737
|
stream,
|
3729
|
-
startTimestamp,
|
3738
|
+
startTimestamp: startTimestampMs,
|
3730
3739
|
doStreamSpan,
|
3731
3740
|
currentToolRoundtrip: 0,
|
3732
3741
|
promptMessages,
|