ai 3.3.20 → 3.3.22
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +18 -0
- package/dist/index.d.mts +49 -21
- package/dist/index.d.ts +49 -21
- package/dist/index.js +401 -210
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +401 -208
- package/dist/index.mjs.map +1 -1
- package/package.json +8 -8
package/dist/index.js
CHANGED
@@ -73,17 +73,15 @@ __export(streams_exports, {
|
|
73
73
|
experimental_createProviderRegistry: () => experimental_createProviderRegistry,
|
74
74
|
experimental_customProvider: () => experimental_customProvider,
|
75
75
|
experimental_generateObject: () => experimental_generateObject,
|
76
|
-
experimental_generateText: () => experimental_generateText,
|
77
76
|
experimental_streamObject: () => experimental_streamObject,
|
78
|
-
experimental_streamText: () => experimental_streamText,
|
79
77
|
formatStreamPart: () => import_ui_utils10.formatStreamPart,
|
80
78
|
generateId: () => generateId2,
|
81
79
|
generateObject: () => generateObject,
|
82
80
|
generateText: () => generateText,
|
83
81
|
jsonSchema: () => import_ui_utils6.jsonSchema,
|
84
82
|
nanoid: () => nanoid,
|
85
|
-
parseComplexResponse: () => import_ui_utils10.parseComplexResponse,
|
86
83
|
parseStreamPart: () => import_ui_utils10.parseStreamPart,
|
84
|
+
processDataProtocolResponse: () => import_ui_utils10.processDataProtocolResponse,
|
87
85
|
readDataStream: () => import_ui_utils10.readDataStream,
|
88
86
|
readableFromAsyncIterable: () => readableFromAsyncIterable,
|
89
87
|
streamObject: () => streamObject,
|
@@ -2551,6 +2549,31 @@ function prepareToolsAndToolChoice({
|
|
2551
2549
|
};
|
2552
2550
|
}
|
2553
2551
|
|
2552
|
+
// core/generate-text/to-response-messages.ts
|
2553
|
+
function toResponseMessages({
|
2554
|
+
text = "",
|
2555
|
+
toolCalls,
|
2556
|
+
toolResults
|
2557
|
+
}) {
|
2558
|
+
const responseMessages = [];
|
2559
|
+
responseMessages.push({
|
2560
|
+
role: "assistant",
|
2561
|
+
content: [{ type: "text", text }, ...toolCalls]
|
2562
|
+
});
|
2563
|
+
if (toolResults.length > 0) {
|
2564
|
+
responseMessages.push({
|
2565
|
+
role: "tool",
|
2566
|
+
content: toolResults.map((result) => ({
|
2567
|
+
type: "tool-result",
|
2568
|
+
toolCallId: result.toolCallId,
|
2569
|
+
toolName: result.toolName,
|
2570
|
+
result: result.result
|
2571
|
+
}))
|
2572
|
+
});
|
2573
|
+
}
|
2574
|
+
return responseMessages;
|
2575
|
+
}
|
2576
|
+
|
2554
2577
|
// core/generate-text/tool-call.ts
|
2555
2578
|
var import_provider_utils7 = require("@ai-sdk/provider-utils");
|
2556
2579
|
var import_ui_utils4 = require("@ai-sdk/ui-utils");
|
@@ -2719,7 +2742,7 @@ async function generateText({
|
|
2719
2742
|
}),
|
2720
2743
|
tracer,
|
2721
2744
|
fn: async (span) => {
|
2722
|
-
var _a12, _b, _c
|
2745
|
+
var _a12, _b, _c;
|
2723
2746
|
const retry = retryWithExponentialBackoff({ maxRetries });
|
2724
2747
|
const validatedPrompt = validatePrompt({
|
2725
2748
|
system,
|
@@ -2830,7 +2853,7 @@ async function generateText({
|
|
2830
2853
|
logprobs: currentModelResponse.logprobs
|
2831
2854
|
});
|
2832
2855
|
const newResponseMessages = toResponseMessages({
|
2833
|
-
text:
|
2856
|
+
text: currentModelResponse.text,
|
2834
2857
|
toolCalls: currentToolCalls,
|
2835
2858
|
toolResults: currentToolResults
|
2836
2859
|
});
|
@@ -2866,7 +2889,7 @@ async function generateText({
|
|
2866
2889
|
// Always return a string so that the caller doesn't have to check for undefined.
|
2867
2890
|
// If they need to check if the model did not return any text,
|
2868
2891
|
// they can check the length of the string:
|
2869
|
-
text: (
|
2892
|
+
text: (_c = currentModelResponse.text) != null ? _c : "",
|
2870
2893
|
toolCalls: currentToolCalls,
|
2871
2894
|
toolResults: currentToolResults,
|
2872
2895
|
finishReason: currentModelResponse.finishReason,
|
@@ -2955,30 +2978,68 @@ var DefaultGenerateTextResult = class {
|
|
2955
2978
|
this.experimental_providerMetadata = options.providerMetadata;
|
2956
2979
|
}
|
2957
2980
|
};
|
2958
|
-
|
2959
|
-
|
2960
|
-
|
2961
|
-
|
2962
|
-
|
2963
|
-
|
2964
|
-
|
2965
|
-
|
2966
|
-
|
2967
|
-
|
2968
|
-
|
2969
|
-
|
2970
|
-
|
2971
|
-
|
2972
|
-
|
2973
|
-
|
2974
|
-
|
2975
|
-
|
2976
|
-
|
2977
|
-
|
2978
|
-
|
2979
|
-
|
2981
|
+
|
2982
|
+
// core/util/create-stitchable-stream.ts
|
2983
|
+
function createStitchableStream() {
|
2984
|
+
let innerStreamReaders = [];
|
2985
|
+
let controller = null;
|
2986
|
+
let isClosed = false;
|
2987
|
+
const processPull = async () => {
|
2988
|
+
if (isClosed && innerStreamReaders.length === 0) {
|
2989
|
+
controller == null ? void 0 : controller.close();
|
2990
|
+
return;
|
2991
|
+
}
|
2992
|
+
if (innerStreamReaders.length === 0) {
|
2993
|
+
return;
|
2994
|
+
}
|
2995
|
+
try {
|
2996
|
+
const { value, done } = await innerStreamReaders[0].read();
|
2997
|
+
if (done) {
|
2998
|
+
innerStreamReaders.shift();
|
2999
|
+
if (innerStreamReaders.length > 0) {
|
3000
|
+
await processPull();
|
3001
|
+
} else if (isClosed) {
|
3002
|
+
controller == null ? void 0 : controller.close();
|
3003
|
+
}
|
3004
|
+
} else {
|
3005
|
+
controller == null ? void 0 : controller.enqueue(value);
|
3006
|
+
}
|
3007
|
+
} catch (error) {
|
3008
|
+
controller == null ? void 0 : controller.error(error);
|
3009
|
+
innerStreamReaders.shift();
|
3010
|
+
if (isClosed && innerStreamReaders.length === 0) {
|
3011
|
+
controller == null ? void 0 : controller.close();
|
3012
|
+
}
|
3013
|
+
}
|
3014
|
+
};
|
3015
|
+
return {
|
3016
|
+
stream: new ReadableStream({
|
3017
|
+
start(controllerParam) {
|
3018
|
+
controller = controllerParam;
|
3019
|
+
},
|
3020
|
+
pull: processPull,
|
3021
|
+
async cancel() {
|
3022
|
+
for (const reader of innerStreamReaders) {
|
3023
|
+
await reader.cancel();
|
3024
|
+
}
|
3025
|
+
innerStreamReaders = [];
|
3026
|
+
isClosed = true;
|
3027
|
+
}
|
3028
|
+
}),
|
3029
|
+
addStream: (innerStream) => {
|
3030
|
+
if (isClosed) {
|
3031
|
+
throw new Error("Cannot add inner stream: outer stream is closed");
|
3032
|
+
}
|
3033
|
+
innerStreamReaders.push(innerStream.getReader());
|
3034
|
+
},
|
3035
|
+
close: () => {
|
3036
|
+
isClosed = true;
|
3037
|
+
if (innerStreamReaders.length === 0) {
|
3038
|
+
controller == null ? void 0 : controller.close();
|
3039
|
+
}
|
3040
|
+
}
|
3041
|
+
};
|
2980
3042
|
}
|
2981
|
-
var experimental_generateText = generateText;
|
2982
3043
|
|
2983
3044
|
// core/util/merge-streams.ts
|
2984
3045
|
function mergeStreams(stream1, stream2) {
|
@@ -3267,6 +3328,7 @@ async function streamText({
|
|
3267
3328
|
maxRetries,
|
3268
3329
|
abortSignal,
|
3269
3330
|
headers,
|
3331
|
+
maxToolRoundtrips = 0,
|
3270
3332
|
experimental_telemetry: telemetry,
|
3271
3333
|
experimental_toolCallStreaming: toolCallStreaming = false,
|
3272
3334
|
onChunk,
|
@@ -3298,68 +3360,89 @@ async function streamText({
|
|
3298
3360
|
endWhenDone: false,
|
3299
3361
|
fn: async (rootSpan) => {
|
3300
3362
|
const retry = retryWithExponentialBackoff({ maxRetries });
|
3301
|
-
const
|
3363
|
+
const startRoundtrip = async ({
|
3364
|
+
promptMessages: promptMessages2,
|
3365
|
+
promptType
|
3366
|
+
}) => {
|
3367
|
+
const {
|
3368
|
+
result: { stream: stream2, warnings: warnings2, rawResponse: rawResponse2 },
|
3369
|
+
doStreamSpan: doStreamSpan2,
|
3370
|
+
startTimestamp: startTimestamp2
|
3371
|
+
} = await retry(
|
3372
|
+
() => recordSpan({
|
3373
|
+
name: "ai.streamText.doStream",
|
3374
|
+
attributes: selectTelemetryAttributes({
|
3375
|
+
telemetry,
|
3376
|
+
attributes: {
|
3377
|
+
...assembleOperationName({
|
3378
|
+
operationId: "ai.streamText.doStream",
|
3379
|
+
telemetry
|
3380
|
+
}),
|
3381
|
+
...baseTelemetryAttributes,
|
3382
|
+
"ai.prompt.format": {
|
3383
|
+
input: () => promptType
|
3384
|
+
},
|
3385
|
+
"ai.prompt.messages": {
|
3386
|
+
input: () => JSON.stringify(promptMessages2)
|
3387
|
+
},
|
3388
|
+
// standardized gen-ai llm span attributes:
|
3389
|
+
"gen_ai.request.model": model.modelId,
|
3390
|
+
"gen_ai.system": model.provider,
|
3391
|
+
"gen_ai.request.max_tokens": settings.maxTokens,
|
3392
|
+
"gen_ai.request.temperature": settings.temperature,
|
3393
|
+
"gen_ai.request.top_p": settings.topP
|
3394
|
+
}
|
3395
|
+
}),
|
3396
|
+
tracer,
|
3397
|
+
endWhenDone: false,
|
3398
|
+
fn: async (doStreamSpan3) => ({
|
3399
|
+
startTimestamp: performance.now(),
|
3400
|
+
// get before the call
|
3401
|
+
doStreamSpan: doStreamSpan3,
|
3402
|
+
result: await model.doStream({
|
3403
|
+
mode: {
|
3404
|
+
type: "regular",
|
3405
|
+
...prepareToolsAndToolChoice({ tools, toolChoice })
|
3406
|
+
},
|
3407
|
+
...prepareCallSettings(settings),
|
3408
|
+
inputFormat: promptType,
|
3409
|
+
prompt: promptMessages2,
|
3410
|
+
abortSignal,
|
3411
|
+
headers
|
3412
|
+
})
|
3413
|
+
})
|
3414
|
+
})
|
3415
|
+
);
|
3416
|
+
return {
|
3417
|
+
result: {
|
3418
|
+
stream: runToolsTransformation({
|
3419
|
+
tools,
|
3420
|
+
generatorStream: stream2,
|
3421
|
+
toolCallStreaming,
|
3422
|
+
tracer,
|
3423
|
+
telemetry
|
3424
|
+
}),
|
3425
|
+
warnings: warnings2,
|
3426
|
+
rawResponse: rawResponse2
|
3427
|
+
},
|
3428
|
+
doStreamSpan: doStreamSpan2,
|
3429
|
+
startTimestamp: startTimestamp2
|
3430
|
+
};
|
3431
|
+
};
|
3302
3432
|
const promptMessages = await convertToLanguageModelPrompt({
|
3303
|
-
prompt:
|
3433
|
+
prompt: validatePrompt({ system, prompt, messages }),
|
3304
3434
|
modelSupportsImageUrls: model.supportsImageUrls
|
3305
3435
|
});
|
3306
3436
|
const {
|
3307
3437
|
result: { stream, warnings, rawResponse },
|
3308
3438
|
doStreamSpan,
|
3309
3439
|
startTimestamp
|
3310
|
-
} = await
|
3311
|
-
(
|
3312
|
-
|
3313
|
-
|
3314
|
-
telemetry,
|
3315
|
-
attributes: {
|
3316
|
-
...assembleOperationName({
|
3317
|
-
operationId: "ai.streamText.doStream",
|
3318
|
-
telemetry
|
3319
|
-
}),
|
3320
|
-
...baseTelemetryAttributes,
|
3321
|
-
"ai.prompt.format": {
|
3322
|
-
input: () => validatedPrompt.type
|
3323
|
-
},
|
3324
|
-
"ai.prompt.messages": {
|
3325
|
-
input: () => JSON.stringify(promptMessages)
|
3326
|
-
},
|
3327
|
-
// standardized gen-ai llm span attributes:
|
3328
|
-
"gen_ai.request.model": model.modelId,
|
3329
|
-
"gen_ai.system": model.provider,
|
3330
|
-
"gen_ai.request.max_tokens": settings.maxTokens,
|
3331
|
-
"gen_ai.request.temperature": settings.temperature,
|
3332
|
-
"gen_ai.request.top_p": settings.topP
|
3333
|
-
}
|
3334
|
-
}),
|
3335
|
-
tracer,
|
3336
|
-
endWhenDone: false,
|
3337
|
-
fn: async (doStreamSpan2) => ({
|
3338
|
-
startTimestamp: performance.now(),
|
3339
|
-
// get before the call
|
3340
|
-
doStreamSpan: doStreamSpan2,
|
3341
|
-
result: await model.doStream({
|
3342
|
-
mode: {
|
3343
|
-
type: "regular",
|
3344
|
-
...prepareToolsAndToolChoice({ tools, toolChoice })
|
3345
|
-
},
|
3346
|
-
...prepareCallSettings(settings),
|
3347
|
-
inputFormat: validatedPrompt.type,
|
3348
|
-
prompt: promptMessages,
|
3349
|
-
abortSignal,
|
3350
|
-
headers
|
3351
|
-
})
|
3352
|
-
})
|
3353
|
-
})
|
3354
|
-
);
|
3440
|
+
} = await startRoundtrip({
|
3441
|
+
promptType: validatePrompt({ system, prompt, messages }).type,
|
3442
|
+
promptMessages
|
3443
|
+
});
|
3355
3444
|
return new DefaultStreamTextResult({
|
3356
|
-
stream
|
3357
|
-
tools,
|
3358
|
-
generatorStream: stream,
|
3359
|
-
toolCallStreaming,
|
3360
|
-
tracer,
|
3361
|
-
telemetry
|
3362
|
-
}),
|
3445
|
+
stream,
|
3363
3446
|
warnings,
|
3364
3447
|
rawResponse,
|
3365
3448
|
onChunk,
|
@@ -3367,7 +3450,10 @@ async function streamText({
|
|
3367
3450
|
rootSpan,
|
3368
3451
|
doStreamSpan,
|
3369
3452
|
telemetry,
|
3370
|
-
startTimestamp
|
3453
|
+
startTimestamp,
|
3454
|
+
maxToolRoundtrips,
|
3455
|
+
startRoundtrip,
|
3456
|
+
promptMessages
|
3371
3457
|
});
|
3372
3458
|
}
|
3373
3459
|
});
|
@@ -3382,7 +3468,10 @@ var DefaultStreamTextResult = class {
|
|
3382
3468
|
rootSpan,
|
3383
3469
|
doStreamSpan,
|
3384
3470
|
telemetry,
|
3385
|
-
startTimestamp
|
3471
|
+
startTimestamp,
|
3472
|
+
maxToolRoundtrips,
|
3473
|
+
startRoundtrip,
|
3474
|
+
promptMessages
|
3386
3475
|
}) {
|
3387
3476
|
this.warnings = warnings;
|
3388
3477
|
this.rawResponse = rawResponse;
|
@@ -3401,129 +3490,223 @@ var DefaultStreamTextResult = class {
|
|
3401
3490
|
promise: providerMetadataPromise
|
3402
3491
|
} = createResolvablePromise();
|
3403
3492
|
this.experimental_providerMetadata = providerMetadataPromise;
|
3404
|
-
|
3405
|
-
|
3406
|
-
|
3407
|
-
|
3408
|
-
|
3409
|
-
|
3410
|
-
|
3411
|
-
|
3412
|
-
|
3413
|
-
|
3414
|
-
|
3415
|
-
|
3416
|
-
|
3417
|
-
|
3418
|
-
|
3419
|
-
|
3420
|
-
|
3421
|
-
|
3422
|
-
|
3423
|
-
|
3424
|
-
|
3425
|
-
|
3426
|
-
|
3427
|
-
|
3428
|
-
|
3429
|
-
|
3430
|
-
|
3431
|
-
|
3432
|
-
|
3433
|
-
|
3434
|
-
|
3435
|
-
|
3436
|
-
|
3437
|
-
|
3438
|
-
|
3439
|
-
|
3440
|
-
|
3441
|
-
|
3442
|
-
|
3443
|
-
|
3444
|
-
|
3445
|
-
|
3446
|
-
|
3447
|
-
|
3448
|
-
|
3449
|
-
|
3450
|
-
|
3451
|
-
|
3452
|
-
|
3453
|
-
|
3454
|
-
|
3455
|
-
|
3456
|
-
|
3457
|
-
|
3458
|
-
|
3459
|
-
|
3460
|
-
|
3461
|
-
|
3462
|
-
|
3463
|
-
|
3464
|
-
|
3465
|
-
|
3466
|
-
|
3467
|
-
|
3468
|
-
|
3469
|
-
|
3470
|
-
|
3471
|
-
|
3472
|
-
|
3473
|
-
|
3474
|
-
|
3475
|
-
|
3476
|
-
|
3477
|
-
|
3478
|
-
|
3479
|
-
|
3480
|
-
"ai.usage.promptTokens": finalUsage.promptTokens,
|
3481
|
-
"ai.usage.completionTokens": finalUsage.completionTokens,
|
3482
|
-
"ai.result.text": { output: () => text },
|
3483
|
-
"ai.result.toolCalls": { output: () => telemetryToolCalls },
|
3484
|
-
// standardized gen-ai llm span attributes:
|
3485
|
-
"gen_ai.response.finish_reasons": [finalFinishReason],
|
3486
|
-
"gen_ai.usage.prompt_tokens": finalUsage.promptTokens,
|
3487
|
-
"gen_ai.usage.completion_tokens": finalUsage.completionTokens
|
3493
|
+
const {
|
3494
|
+
stream: stitchableStream,
|
3495
|
+
addStream,
|
3496
|
+
close: closeStitchableStream
|
3497
|
+
} = createStitchableStream();
|
3498
|
+
this.originalStream = stitchableStream;
|
3499
|
+
const self = this;
|
3500
|
+
function addRoundtripStream({
|
3501
|
+
stream: stream2,
|
3502
|
+
startTimestamp: startTimestamp2,
|
3503
|
+
doStreamSpan: doStreamSpan2,
|
3504
|
+
currentToolRoundtrip,
|
3505
|
+
promptMessages: promptMessages2,
|
3506
|
+
usage = {
|
3507
|
+
promptTokens: 0,
|
3508
|
+
completionTokens: 0,
|
3509
|
+
totalTokens: 0
|
3510
|
+
}
|
3511
|
+
}) {
|
3512
|
+
const roundtripToolCalls = [];
|
3513
|
+
const roundtripToolResults = [];
|
3514
|
+
let roundtripFinishReason = "unknown";
|
3515
|
+
let roundtripUsage = {
|
3516
|
+
promptTokens: 0,
|
3517
|
+
completionTokens: 0,
|
3518
|
+
totalTokens: 0
|
3519
|
+
};
|
3520
|
+
let roundtripProviderMetadata;
|
3521
|
+
let roundtripFirstChunk = true;
|
3522
|
+
let roundtripText = "";
|
3523
|
+
let roundtripLogProbs;
|
3524
|
+
addStream(
|
3525
|
+
stream2.pipeThrough(
|
3526
|
+
new TransformStream({
|
3527
|
+
async transform(chunk, controller) {
|
3528
|
+
if (roundtripFirstChunk) {
|
3529
|
+
const msToFirstChunk = performance.now() - startTimestamp2;
|
3530
|
+
roundtripFirstChunk = false;
|
3531
|
+
doStreamSpan2.addEvent("ai.stream.firstChunk", {
|
3532
|
+
"ai.stream.msToFirstChunk": msToFirstChunk
|
3533
|
+
});
|
3534
|
+
doStreamSpan2.setAttributes({
|
3535
|
+
"ai.stream.msToFirstChunk": msToFirstChunk
|
3536
|
+
});
|
3537
|
+
}
|
3538
|
+
if (chunk.type === "text-delta" && chunk.textDelta.length === 0) {
|
3539
|
+
return;
|
3540
|
+
}
|
3541
|
+
const chunkType = chunk.type;
|
3542
|
+
switch (chunkType) {
|
3543
|
+
case "text-delta":
|
3544
|
+
controller.enqueue(chunk);
|
3545
|
+
roundtripText += chunk.textDelta;
|
3546
|
+
await (onChunk == null ? void 0 : onChunk({ chunk }));
|
3547
|
+
break;
|
3548
|
+
case "tool-call":
|
3549
|
+
controller.enqueue(chunk);
|
3550
|
+
roundtripToolCalls.push(chunk);
|
3551
|
+
await (onChunk == null ? void 0 : onChunk({ chunk }));
|
3552
|
+
break;
|
3553
|
+
case "tool-result":
|
3554
|
+
controller.enqueue(chunk);
|
3555
|
+
roundtripToolResults.push(chunk);
|
3556
|
+
await (onChunk == null ? void 0 : onChunk({ chunk }));
|
3557
|
+
break;
|
3558
|
+
case "finish":
|
3559
|
+
roundtripUsage = chunk.usage;
|
3560
|
+
roundtripFinishReason = chunk.finishReason;
|
3561
|
+
roundtripProviderMetadata = chunk.experimental_providerMetadata;
|
3562
|
+
roundtripLogProbs = chunk.logprobs;
|
3563
|
+
break;
|
3564
|
+
case "tool-call-streaming-start":
|
3565
|
+
case "tool-call-delta": {
|
3566
|
+
controller.enqueue(chunk);
|
3567
|
+
await (onChunk == null ? void 0 : onChunk({ chunk }));
|
3568
|
+
break;
|
3488
3569
|
}
|
3489
|
-
|
3490
|
-
|
3491
|
-
|
3492
|
-
|
3493
|
-
|
3494
|
-
|
3495
|
-
|
3496
|
-
"ai.finishReason": finalFinishReason,
|
3497
|
-
"ai.usage.promptTokens": finalUsage.promptTokens,
|
3498
|
-
"ai.usage.completionTokens": finalUsage.completionTokens,
|
3499
|
-
"ai.result.text": { output: () => text },
|
3500
|
-
"ai.result.toolCalls": { output: () => telemetryToolCalls }
|
3570
|
+
case "error":
|
3571
|
+
controller.enqueue(chunk);
|
3572
|
+
roundtripFinishReason = "error";
|
3573
|
+
break;
|
3574
|
+
default: {
|
3575
|
+
const exhaustiveCheck = chunkType;
|
3576
|
+
throw new Error(`Unknown chunk type: ${exhaustiveCheck}`);
|
3501
3577
|
}
|
3502
|
-
}
|
3503
|
-
|
3504
|
-
|
3505
|
-
|
3506
|
-
|
3507
|
-
|
3508
|
-
|
3509
|
-
|
3510
|
-
|
3511
|
-
|
3512
|
-
|
3513
|
-
|
3514
|
-
|
3515
|
-
|
3516
|
-
|
3517
|
-
|
3518
|
-
|
3519
|
-
|
3520
|
-
|
3521
|
-
|
3522
|
-
|
3523
|
-
|
3524
|
-
|
3525
|
-
|
3526
|
-
|
3578
|
+
}
|
3579
|
+
},
|
3580
|
+
// invoke onFinish callback and resolve toolResults promise when the stream is about to close:
|
3581
|
+
async flush(controller) {
|
3582
|
+
controller.enqueue({
|
3583
|
+
type: "roundtrip-finish",
|
3584
|
+
finishReason: roundtripFinishReason,
|
3585
|
+
usage: roundtripUsage,
|
3586
|
+
experimental_providerMetadata: roundtripProviderMetadata,
|
3587
|
+
logprobs: roundtripLogProbs
|
3588
|
+
});
|
3589
|
+
const telemetryToolCalls = roundtripToolCalls.length > 0 ? JSON.stringify(roundtripToolCalls) : void 0;
|
3590
|
+
try {
|
3591
|
+
doStreamSpan2.setAttributes(
|
3592
|
+
selectTelemetryAttributes({
|
3593
|
+
telemetry,
|
3594
|
+
attributes: {
|
3595
|
+
"ai.finishReason": roundtripFinishReason,
|
3596
|
+
"ai.usage.promptTokens": roundtripUsage.promptTokens,
|
3597
|
+
"ai.usage.completionTokens": roundtripUsage.completionTokens,
|
3598
|
+
"ai.result.text": { output: () => roundtripText },
|
3599
|
+
"ai.result.toolCalls": {
|
3600
|
+
output: () => telemetryToolCalls
|
3601
|
+
},
|
3602
|
+
// standardized gen-ai llm span attributes:
|
3603
|
+
"gen_ai.response.finish_reasons": [roundtripFinishReason],
|
3604
|
+
"gen_ai.usage.prompt_tokens": roundtripUsage.promptTokens,
|
3605
|
+
"gen_ai.usage.completion_tokens": roundtripUsage.completionTokens
|
3606
|
+
}
|
3607
|
+
})
|
3608
|
+
);
|
3609
|
+
} catch (error) {
|
3610
|
+
} finally {
|
3611
|
+
doStreamSpan2.end();
|
3612
|
+
}
|
3613
|
+
const combinedUsage = {
|
3614
|
+
promptTokens: usage.promptTokens + roundtripUsage.promptTokens,
|
3615
|
+
completionTokens: usage.completionTokens + roundtripUsage.completionTokens,
|
3616
|
+
totalTokens: usage.totalTokens + roundtripUsage.totalTokens
|
3617
|
+
};
|
3618
|
+
if (
|
3619
|
+
// there are tool calls:
|
3620
|
+
roundtripToolCalls.length > 0 && // all current tool calls have results:
|
3621
|
+
roundtripToolResults.length === roundtripToolCalls.length && // the number of roundtrips is less than the maximum:
|
3622
|
+
currentToolRoundtrip < maxToolRoundtrips
|
3623
|
+
) {
|
3624
|
+
promptMessages2.push(
|
3625
|
+
...toResponseMessages({
|
3626
|
+
text: roundtripText,
|
3627
|
+
toolCalls: roundtripToolCalls,
|
3628
|
+
toolResults: roundtripToolResults
|
3629
|
+
}).map(
|
3630
|
+
(message) => convertToLanguageModelMessage(message, null)
|
3631
|
+
)
|
3632
|
+
);
|
3633
|
+
const { result, doStreamSpan: doStreamSpan3, startTimestamp: startTimestamp3 } = await startRoundtrip({
|
3634
|
+
promptType: "messages",
|
3635
|
+
promptMessages: promptMessages2
|
3636
|
+
});
|
3637
|
+
self.warnings = result.warnings;
|
3638
|
+
self.rawResponse = result.rawResponse;
|
3639
|
+
addRoundtripStream({
|
3640
|
+
stream: result.stream,
|
3641
|
+
startTimestamp: startTimestamp3,
|
3642
|
+
doStreamSpan: doStreamSpan3,
|
3643
|
+
currentToolRoundtrip: currentToolRoundtrip + 1,
|
3644
|
+
promptMessages: promptMessages2,
|
3645
|
+
usage: combinedUsage
|
3646
|
+
});
|
3647
|
+
return;
|
3648
|
+
}
|
3649
|
+
try {
|
3650
|
+
controller.enqueue({
|
3651
|
+
type: "finish",
|
3652
|
+
finishReason: roundtripFinishReason,
|
3653
|
+
usage: combinedUsage,
|
3654
|
+
experimental_providerMetadata: roundtripProviderMetadata,
|
3655
|
+
logprobs: roundtripLogProbs
|
3656
|
+
});
|
3657
|
+
closeStitchableStream();
|
3658
|
+
rootSpan.setAttributes(
|
3659
|
+
selectTelemetryAttributes({
|
3660
|
+
telemetry,
|
3661
|
+
attributes: {
|
3662
|
+
"ai.finishReason": roundtripFinishReason,
|
3663
|
+
"ai.usage.promptTokens": combinedUsage.promptTokens,
|
3664
|
+
"ai.usage.completionTokens": combinedUsage.completionTokens,
|
3665
|
+
"ai.result.text": { output: () => roundtripText },
|
3666
|
+
"ai.result.toolCalls": {
|
3667
|
+
output: () => telemetryToolCalls
|
3668
|
+
}
|
3669
|
+
}
|
3670
|
+
})
|
3671
|
+
);
|
3672
|
+
resolveUsage(combinedUsage);
|
3673
|
+
resolveFinishReason(roundtripFinishReason);
|
3674
|
+
resolveText(roundtripText);
|
3675
|
+
resolveToolCalls(roundtripToolCalls);
|
3676
|
+
resolveProviderMetadata(roundtripProviderMetadata);
|
3677
|
+
resolveToolResults(roundtripToolResults);
|
3678
|
+
await (onFinish == null ? void 0 : onFinish({
|
3679
|
+
finishReason: roundtripFinishReason,
|
3680
|
+
usage: combinedUsage,
|
3681
|
+
text: roundtripText,
|
3682
|
+
toolCalls: roundtripToolCalls,
|
3683
|
+
// The tool results are inferred as a never[] type, because they are
|
3684
|
+
// optional and the execute method with an inferred result type is
|
3685
|
+
// optional as well. Therefore we need to cast the toolResults to any.
|
3686
|
+
// The type exposed to the users will be correctly inferred.
|
3687
|
+
toolResults: roundtripToolResults,
|
3688
|
+
rawResponse,
|
3689
|
+
warnings,
|
3690
|
+
experimental_providerMetadata: roundtripProviderMetadata
|
3691
|
+
}));
|
3692
|
+
} catch (error) {
|
3693
|
+
controller.error(error);
|
3694
|
+
} finally {
|
3695
|
+
rootSpan.end();
|
3696
|
+
}
|
3697
|
+
}
|
3698
|
+
})
|
3699
|
+
)
|
3700
|
+
);
|
3701
|
+
}
|
3702
|
+
addRoundtripStream({
|
3703
|
+
stream,
|
3704
|
+
startTimestamp,
|
3705
|
+
doStreamSpan,
|
3706
|
+
currentToolRoundtrip: 0,
|
3707
|
+
promptMessages,
|
3708
|
+
usage: void 0
|
3709
|
+
});
|
3527
3710
|
}
|
3528
3711
|
/**
|
3529
3712
|
Split out a new stream from the original stream.
|
@@ -3633,6 +3816,17 @@ var DefaultStreamTextResult = class {
|
|
3633
3816
|
(0, import_ui_utils10.formatStreamPart)("error", getErrorMessage4(chunk.error))
|
3634
3817
|
);
|
3635
3818
|
break;
|
3819
|
+
case "roundtrip-finish":
|
3820
|
+
controller.enqueue(
|
3821
|
+
(0, import_ui_utils10.formatStreamPart)("finish_roundtrip", {
|
3822
|
+
finishReason: chunk.finishReason,
|
3823
|
+
usage: {
|
3824
|
+
promptTokens: chunk.usage.promptTokens,
|
3825
|
+
completionTokens: chunk.usage.completionTokens
|
3826
|
+
}
|
3827
|
+
})
|
3828
|
+
);
|
3829
|
+
break;
|
3636
3830
|
case "finish":
|
3637
3831
|
controller.enqueue(
|
3638
3832
|
(0, import_ui_utils10.formatStreamPart)("finish_message", {
|
@@ -3734,7 +3928,6 @@ var DefaultStreamTextResult = class {
|
|
3734
3928
|
});
|
3735
3929
|
}
|
3736
3930
|
};
|
3737
|
-
var experimental_streamText = streamText;
|
3738
3931
|
|
3739
3932
|
// core/prompt/attachments-to-parts.ts
|
3740
3933
|
function attachmentsToParts(attachments) {
|
@@ -5164,17 +5357,15 @@ var nanoid = import_provider_utils8.generateId;
|
|
5164
5357
|
experimental_createProviderRegistry,
|
5165
5358
|
experimental_customProvider,
|
5166
5359
|
experimental_generateObject,
|
5167
|
-
experimental_generateText,
|
5168
5360
|
experimental_streamObject,
|
5169
|
-
experimental_streamText,
|
5170
5361
|
formatStreamPart,
|
5171
5362
|
generateId,
|
5172
5363
|
generateObject,
|
5173
5364
|
generateText,
|
5174
5365
|
jsonSchema,
|
5175
5366
|
nanoid,
|
5176
|
-
parseComplexResponse,
|
5177
5367
|
parseStreamPart,
|
5368
|
+
processDataProtocolResponse,
|
5178
5369
|
readDataStream,
|
5179
5370
|
readableFromAsyncIterable,
|
5180
5371
|
streamObject,
|