ai 6.0.0-beta.163 → 6.0.0-beta.165
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +20 -0
- package/dist/index.d.mts +49 -32
- package/dist/index.d.ts +49 -32
- package/dist/index.js +121 -75
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +121 -75
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.js +1 -1
- package/dist/internal/index.mjs +1 -1
- package/package.json +4 -4
package/dist/index.mjs
CHANGED
|
@@ -480,6 +480,7 @@ function asLanguageModelV3(model) {
|
|
|
480
480
|
const result = await target.doGenerate(...args);
|
|
481
481
|
return {
|
|
482
482
|
...result,
|
|
483
|
+
finishReason: convertV2FinishReasonToV3(result.finishReason),
|
|
483
484
|
usage: convertV2UsageToV3(result.usage)
|
|
484
485
|
};
|
|
485
486
|
};
|
|
@@ -505,6 +506,7 @@ function convertV2StreamToV3(stream) {
|
|
|
505
506
|
case "finish":
|
|
506
507
|
controller.enqueue({
|
|
507
508
|
...chunk,
|
|
509
|
+
finishReason: convertV2FinishReasonToV3(chunk.finishReason),
|
|
508
510
|
usage: convertV2UsageToV3(chunk.usage)
|
|
509
511
|
});
|
|
510
512
|
break;
|
|
@@ -516,6 +518,12 @@ function convertV2StreamToV3(stream) {
|
|
|
516
518
|
})
|
|
517
519
|
);
|
|
518
520
|
}
|
|
521
|
+
function convertV2FinishReasonToV3(finishReason) {
|
|
522
|
+
return {
|
|
523
|
+
unified: finishReason === "unknown" ? "other" : finishReason,
|
|
524
|
+
raw: void 0
|
|
525
|
+
};
|
|
526
|
+
}
|
|
519
527
|
function convertV2UsageToV3(usage) {
|
|
520
528
|
return {
|
|
521
529
|
inputTokens: {
|
|
@@ -844,7 +852,7 @@ import {
|
|
|
844
852
|
} from "@ai-sdk/provider-utils";
|
|
845
853
|
|
|
846
854
|
// src/version.ts
|
|
847
|
-
var VERSION = true ? "6.0.0-beta.
|
|
855
|
+
var VERSION = true ? "6.0.0-beta.165" : "0.0.0-test";
|
|
848
856
|
|
|
849
857
|
// src/util/download/download.ts
|
|
850
858
|
var download = async ({ url }) => {
|
|
@@ -3209,6 +3217,7 @@ var DefaultStepResult = class {
|
|
|
3209
3217
|
constructor({
|
|
3210
3218
|
content,
|
|
3211
3219
|
finishReason,
|
|
3220
|
+
rawFinishReason,
|
|
3212
3221
|
usage,
|
|
3213
3222
|
warnings,
|
|
3214
3223
|
request,
|
|
@@ -3217,6 +3226,7 @@ var DefaultStepResult = class {
|
|
|
3217
3226
|
}) {
|
|
3218
3227
|
this.content = content;
|
|
3219
3228
|
this.finishReason = finishReason;
|
|
3229
|
+
this.rawFinishReason = rawFinishReason;
|
|
3220
3230
|
this.usage = usage;
|
|
3221
3231
|
this.warnings = warnings;
|
|
3222
3232
|
this.request = request;
|
|
@@ -3634,7 +3644,7 @@ async function generateText({
|
|
|
3634
3644
|
await selectTelemetryAttributes({
|
|
3635
3645
|
telemetry,
|
|
3636
3646
|
attributes: {
|
|
3637
|
-
"ai.response.finishReason": result.finishReason,
|
|
3647
|
+
"ai.response.finishReason": result.finishReason.unified,
|
|
3638
3648
|
"ai.response.text": {
|
|
3639
3649
|
output: () => extractTextContent(result.content)
|
|
3640
3650
|
},
|
|
@@ -3654,7 +3664,9 @@ async function generateText({
|
|
|
3654
3664
|
"ai.usage.promptTokens": result.usage.inputTokens.total,
|
|
3655
3665
|
"ai.usage.completionTokens": result.usage.outputTokens.total,
|
|
3656
3666
|
// standardized gen-ai llm span attributes:
|
|
3657
|
-
"gen_ai.response.finish_reasons": [
|
|
3667
|
+
"gen_ai.response.finish_reasons": [
|
|
3668
|
+
result.finishReason.unified
|
|
3669
|
+
],
|
|
3658
3670
|
"gen_ai.response.id": responseData.id,
|
|
3659
3671
|
"gen_ai.response.model": responseData.modelId,
|
|
3660
3672
|
"gen_ai.usage.input_tokens": result.usage.inputTokens.total,
|
|
@@ -3778,7 +3790,8 @@ async function generateText({
|
|
|
3778
3790
|
);
|
|
3779
3791
|
const currentStepResult = new DefaultStepResult({
|
|
3780
3792
|
content: stepContent,
|
|
3781
|
-
finishReason: currentModelResponse.finishReason,
|
|
3793
|
+
finishReason: currentModelResponse.finishReason.unified,
|
|
3794
|
+
rawFinishReason: currentModelResponse.finishReason.raw,
|
|
3782
3795
|
usage: asLanguageModelUsage(currentModelResponse.usage),
|
|
3783
3796
|
warnings: currentModelResponse.warnings,
|
|
3784
3797
|
providerMetadata: currentModelResponse.providerMetadata,
|
|
@@ -3807,7 +3820,7 @@ async function generateText({
|
|
|
3807
3820
|
await selectTelemetryAttributes({
|
|
3808
3821
|
telemetry,
|
|
3809
3822
|
attributes: {
|
|
3810
|
-
"ai.response.finishReason": currentModelResponse.finishReason,
|
|
3823
|
+
"ai.response.finishReason": currentModelResponse.finishReason.unified,
|
|
3811
3824
|
"ai.response.text": {
|
|
3812
3825
|
output: () => extractTextContent(currentModelResponse.content)
|
|
3813
3826
|
},
|
|
@@ -3841,6 +3854,7 @@ async function generateText({
|
|
|
3841
3854
|
);
|
|
3842
3855
|
await (onFinish == null ? void 0 : onFinish({
|
|
3843
3856
|
finishReason: lastStep.finishReason,
|
|
3857
|
+
rawFinishReason: lastStep.rawFinishReason,
|
|
3844
3858
|
usage: lastStep.usage,
|
|
3845
3859
|
content: lastStep.content,
|
|
3846
3860
|
text: lastStep.text,
|
|
@@ -3959,6 +3973,9 @@ var DefaultGenerateTextResult = class {
|
|
|
3959
3973
|
get finishReason() {
|
|
3960
3974
|
return this.finalStep.finishReason;
|
|
3961
3975
|
}
|
|
3976
|
+
get rawFinishReason() {
|
|
3977
|
+
return this.finalStep.rawFinishReason;
|
|
3978
|
+
}
|
|
3962
3979
|
get warnings() {
|
|
3963
3980
|
return this.finalStep.warnings;
|
|
3964
3981
|
}
|
|
@@ -4004,68 +4021,73 @@ function asContent({
|
|
|
4004
4021
|
toolApprovalRequests,
|
|
4005
4022
|
tools
|
|
4006
4023
|
}) {
|
|
4007
|
-
|
|
4008
|
-
|
|
4009
|
-
|
|
4010
|
-
|
|
4011
|
-
|
|
4012
|
-
|
|
4013
|
-
|
|
4014
|
-
|
|
4015
|
-
|
|
4016
|
-
|
|
4017
|
-
|
|
4018
|
-
|
|
4019
|
-
}
|
|
4020
|
-
}
|
|
4021
|
-
|
|
4022
|
-
|
|
4023
|
-
|
|
4024
|
-
|
|
4025
|
-
|
|
4026
|
-
|
|
4027
|
-
|
|
4028
|
-
|
|
4029
|
-
|
|
4030
|
-
|
|
4031
|
-
|
|
4032
|
-
|
|
4033
|
-
|
|
4034
|
-
|
|
4035
|
-
|
|
4036
|
-
|
|
4037
|
-
|
|
4038
|
-
|
|
4039
|
-
|
|
4040
|
-
|
|
4041
|
-
|
|
4042
|
-
error: part.result,
|
|
4043
|
-
providerExecuted: true,
|
|
4044
|
-
dynamic: part.dynamic
|
|
4045
|
-
};
|
|
4046
|
-
}
|
|
4047
|
-
return {
|
|
4048
|
-
type: "tool-result",
|
|
4024
|
+
const contentParts = [];
|
|
4025
|
+
for (const part of content) {
|
|
4026
|
+
switch (part.type) {
|
|
4027
|
+
case "text":
|
|
4028
|
+
case "reasoning":
|
|
4029
|
+
case "source":
|
|
4030
|
+
contentParts.push(part);
|
|
4031
|
+
break;
|
|
4032
|
+
case "file": {
|
|
4033
|
+
contentParts.push({
|
|
4034
|
+
type: "file",
|
|
4035
|
+
file: new DefaultGeneratedFile(part),
|
|
4036
|
+
...part.providerMetadata != null ? { providerMetadata: part.providerMetadata } : {}
|
|
4037
|
+
});
|
|
4038
|
+
break;
|
|
4039
|
+
}
|
|
4040
|
+
case "tool-call": {
|
|
4041
|
+
contentParts.push(
|
|
4042
|
+
toolCalls.find((toolCall) => toolCall.toolCallId === part.toolCallId)
|
|
4043
|
+
);
|
|
4044
|
+
break;
|
|
4045
|
+
}
|
|
4046
|
+
case "tool-result": {
|
|
4047
|
+
const toolCall = toolCalls.find(
|
|
4048
|
+
(toolCall2) => toolCall2.toolCallId === part.toolCallId
|
|
4049
|
+
);
|
|
4050
|
+
if (toolCall == null) {
|
|
4051
|
+
const tool2 = tools == null ? void 0 : tools[part.toolName];
|
|
4052
|
+
const supportsDeferredResults = (tool2 == null ? void 0 : tool2.type) === "provider" && tool2.supportsDeferredResults;
|
|
4053
|
+
if (!supportsDeferredResults) {
|
|
4054
|
+
throw new Error(`Tool call ${part.toolCallId} not found.`);
|
|
4055
|
+
}
|
|
4056
|
+
if (part.isError) {
|
|
4057
|
+
contentParts.push({
|
|
4058
|
+
type: "tool-error",
|
|
4049
4059
|
toolCallId: part.toolCallId,
|
|
4050
4060
|
toolName: part.toolName,
|
|
4051
4061
|
input: void 0,
|
|
4052
|
-
|
|
4062
|
+
error: part.result,
|
|
4053
4063
|
providerExecuted: true,
|
|
4054
4064
|
dynamic: part.dynamic
|
|
4055
|
-
};
|
|
4056
|
-
}
|
|
4057
|
-
|
|
4058
|
-
|
|
4059
|
-
type: "tool-error",
|
|
4065
|
+
});
|
|
4066
|
+
} else {
|
|
4067
|
+
contentParts.push({
|
|
4068
|
+
type: "tool-result",
|
|
4060
4069
|
toolCallId: part.toolCallId,
|
|
4061
4070
|
toolName: part.toolName,
|
|
4062
|
-
input:
|
|
4063
|
-
|
|
4071
|
+
input: void 0,
|
|
4072
|
+
output: part.result,
|
|
4064
4073
|
providerExecuted: true,
|
|
4065
|
-
dynamic:
|
|
4066
|
-
};
|
|
4074
|
+
dynamic: part.dynamic
|
|
4075
|
+
});
|
|
4067
4076
|
}
|
|
4068
|
-
|
|
4077
|
+
break;
|
|
4078
|
+
}
|
|
4079
|
+
if (part.isError) {
|
|
4080
|
+
contentParts.push({
|
|
4081
|
+
type: "tool-error",
|
|
4082
|
+
toolCallId: part.toolCallId,
|
|
4083
|
+
toolName: part.toolName,
|
|
4084
|
+
input: toolCall.input,
|
|
4085
|
+
error: part.result,
|
|
4086
|
+
providerExecuted: true,
|
|
4087
|
+
dynamic: toolCall.dynamic
|
|
4088
|
+
});
|
|
4089
|
+
} else {
|
|
4090
|
+
contentParts.push({
|
|
4069
4091
|
type: "tool-result",
|
|
4070
4092
|
toolCallId: part.toolCallId,
|
|
4071
4093
|
toolName: part.toolName,
|
|
@@ -4073,13 +4095,16 @@ function asContent({
|
|
|
4073
4095
|
output: part.result,
|
|
4074
4096
|
providerExecuted: true,
|
|
4075
4097
|
dynamic: toolCall.dynamic
|
|
4076
|
-
};
|
|
4098
|
+
});
|
|
4077
4099
|
}
|
|
4100
|
+
break;
|
|
4078
4101
|
}
|
|
4079
|
-
|
|
4080
|
-
|
|
4081
|
-
|
|
4082
|
-
|
|
4102
|
+
case "tool-approval-request": {
|
|
4103
|
+
break;
|
|
4104
|
+
}
|
|
4105
|
+
}
|
|
4106
|
+
}
|
|
4107
|
+
return [...contentParts, ...toolOutputs, ...toolApprovalRequests];
|
|
4083
4108
|
}
|
|
4084
4109
|
|
|
4085
4110
|
// src/generate-text/stream-text.ts
|
|
@@ -4388,8 +4413,7 @@ var uiMessageChunkSchema = lazySchema(
|
|
|
4388
4413
|
"content-filter",
|
|
4389
4414
|
"tool-calls",
|
|
4390
4415
|
"error",
|
|
4391
|
-
"other"
|
|
4392
|
-
"unknown"
|
|
4416
|
+
"other"
|
|
4393
4417
|
]).optional(),
|
|
4394
4418
|
messageMetadata: z7.unknown().optional()
|
|
4395
4419
|
}),
|
|
@@ -5265,7 +5289,8 @@ function runToolsTransformation({
|
|
|
5265
5289
|
case "finish": {
|
|
5266
5290
|
finishChunk = {
|
|
5267
5291
|
type: "finish",
|
|
5268
|
-
finishReason: chunk.finishReason,
|
|
5292
|
+
finishReason: chunk.finishReason.unified,
|
|
5293
|
+
rawFinishReason: chunk.finishReason.raw,
|
|
5269
5294
|
usage: asLanguageModelUsage(chunk.usage),
|
|
5270
5295
|
providerMetadata: chunk.providerMetadata
|
|
5271
5296
|
};
|
|
@@ -5370,6 +5395,9 @@ function runToolsTransformation({
|
|
|
5370
5395
|
}
|
|
5371
5396
|
break;
|
|
5372
5397
|
}
|
|
5398
|
+
case "tool-approval-request": {
|
|
5399
|
+
break;
|
|
5400
|
+
}
|
|
5373
5401
|
default: {
|
|
5374
5402
|
const _exhaustiveCheck = chunkType;
|
|
5375
5403
|
throw new Error(`Unhandled chunk type: ${_exhaustiveCheck}`);
|
|
@@ -5578,6 +5606,7 @@ var DefaultStreamTextResult = class {
|
|
|
5578
5606
|
}) {
|
|
5579
5607
|
this._totalUsage = new DelayedPromise();
|
|
5580
5608
|
this._finishReason = new DelayedPromise();
|
|
5609
|
+
this._rawFinishReason = new DelayedPromise();
|
|
5581
5610
|
this._steps = new DelayedPromise();
|
|
5582
5611
|
this.outputSpecification = output;
|
|
5583
5612
|
this.includeRawChunks = includeRawChunks;
|
|
@@ -5586,6 +5615,7 @@ var DefaultStreamTextResult = class {
|
|
|
5586
5615
|
let recordedContent = [];
|
|
5587
5616
|
const recordedResponseMessages = [];
|
|
5588
5617
|
let recordedFinishReason = void 0;
|
|
5618
|
+
let recordedRawFinishReason = void 0;
|
|
5589
5619
|
let recordedTotalUsage = void 0;
|
|
5590
5620
|
let recordedRequest = {};
|
|
5591
5621
|
let recordedWarnings = [];
|
|
@@ -5714,6 +5744,7 @@ var DefaultStreamTextResult = class {
|
|
|
5714
5744
|
const currentStepResult = new DefaultStepResult({
|
|
5715
5745
|
content: recordedContent,
|
|
5716
5746
|
finishReason: part.finishReason,
|
|
5747
|
+
rawFinishReason: part.rawFinishReason,
|
|
5717
5748
|
usage: part.usage,
|
|
5718
5749
|
warnings: recordedWarnings,
|
|
5719
5750
|
request: recordedRequest,
|
|
@@ -5736,6 +5767,7 @@ var DefaultStreamTextResult = class {
|
|
|
5736
5767
|
if (part.type === "finish") {
|
|
5737
5768
|
recordedTotalUsage = part.totalUsage;
|
|
5738
5769
|
recordedFinishReason = part.finishReason;
|
|
5770
|
+
recordedRawFinishReason = part.rawFinishReason;
|
|
5739
5771
|
}
|
|
5740
5772
|
},
|
|
5741
5773
|
async flush(controller) {
|
|
@@ -5745,18 +5777,21 @@ var DefaultStreamTextResult = class {
|
|
|
5745
5777
|
message: "No output generated. Check the stream for errors."
|
|
5746
5778
|
});
|
|
5747
5779
|
self._finishReason.reject(error);
|
|
5780
|
+
self._rawFinishReason.reject(error);
|
|
5748
5781
|
self._totalUsage.reject(error);
|
|
5749
5782
|
self._steps.reject(error);
|
|
5750
5783
|
return;
|
|
5751
5784
|
}
|
|
5752
|
-
const finishReason = recordedFinishReason != null ? recordedFinishReason : "
|
|
5785
|
+
const finishReason = recordedFinishReason != null ? recordedFinishReason : "other";
|
|
5753
5786
|
const totalUsage = recordedTotalUsage != null ? recordedTotalUsage : createNullLanguageModelUsage();
|
|
5754
5787
|
self._finishReason.resolve(finishReason);
|
|
5788
|
+
self._rawFinishReason.resolve(recordedRawFinishReason);
|
|
5755
5789
|
self._totalUsage.resolve(totalUsage);
|
|
5756
5790
|
self._steps.resolve(recordedSteps);
|
|
5757
5791
|
const finalStep = recordedSteps[recordedSteps.length - 1];
|
|
5758
5792
|
await (onFinish == null ? void 0 : onFinish({
|
|
5759
|
-
finishReason,
|
|
5793
|
+
finishReason: finalStep.finishReason,
|
|
5794
|
+
rawFinishReason: finalStep.rawFinishReason,
|
|
5760
5795
|
totalUsage,
|
|
5761
5796
|
usage: finalStep.usage,
|
|
5762
5797
|
content: finalStep.content,
|
|
@@ -6081,7 +6116,8 @@ var DefaultStreamTextResult = class {
|
|
|
6081
6116
|
const stepToolOutputs = [];
|
|
6082
6117
|
let warnings;
|
|
6083
6118
|
const activeToolCallToolNames = {};
|
|
6084
|
-
let stepFinishReason = "
|
|
6119
|
+
let stepFinishReason = "other";
|
|
6120
|
+
let stepRawFinishReason = void 0;
|
|
6085
6121
|
let stepUsage = createNullLanguageModelUsage();
|
|
6086
6122
|
let stepProviderMetadata;
|
|
6087
6123
|
let stepFirstChunk = true;
|
|
@@ -6177,6 +6213,7 @@ var DefaultStreamTextResult = class {
|
|
|
6177
6213
|
case "finish": {
|
|
6178
6214
|
stepUsage = chunk.usage;
|
|
6179
6215
|
stepFinishReason = chunk.finishReason;
|
|
6216
|
+
stepRawFinishReason = chunk.rawFinishReason;
|
|
6180
6217
|
stepProviderMetadata = chunk.providerMetadata;
|
|
6181
6218
|
const msToFinish = now2() - startTimestampMs;
|
|
6182
6219
|
doStreamSpan.addEvent("ai.stream.finish");
|
|
@@ -6289,6 +6326,7 @@ var DefaultStreamTextResult = class {
|
|
|
6289
6326
|
controller.enqueue({
|
|
6290
6327
|
type: "finish-step",
|
|
6291
6328
|
finishReason: stepFinishReason,
|
|
6329
|
+
rawFinishReason: stepRawFinishReason,
|
|
6292
6330
|
usage: stepUsage,
|
|
6293
6331
|
providerMetadata: stepProviderMetadata,
|
|
6294
6332
|
response: {
|
|
@@ -6360,6 +6398,7 @@ var DefaultStreamTextResult = class {
|
|
|
6360
6398
|
controller.enqueue({
|
|
6361
6399
|
type: "finish",
|
|
6362
6400
|
finishReason: stepFinishReason,
|
|
6401
|
+
rawFinishReason: stepRawFinishReason,
|
|
6363
6402
|
totalUsage: combinedUsage
|
|
6364
6403
|
});
|
|
6365
6404
|
self.closeStream();
|
|
@@ -6453,6 +6492,10 @@ var DefaultStreamTextResult = class {
|
|
|
6453
6492
|
this.consumeStream();
|
|
6454
6493
|
return this._finishReason.promise;
|
|
6455
6494
|
}
|
|
6495
|
+
get rawFinishReason() {
|
|
6496
|
+
this.consumeStream();
|
|
6497
|
+
return this._rawFinishReason.promise;
|
|
6498
|
+
}
|
|
6456
6499
|
/**
|
|
6457
6500
|
Split out a new stream from the original stream.
|
|
6458
6501
|
The original stream is replaced to allow for further splitting,
|
|
@@ -8918,14 +8961,14 @@ async function generateObject(options) {
|
|
|
8918
8961
|
message: "No object generated: the model did not return a response.",
|
|
8919
8962
|
response: responseData,
|
|
8920
8963
|
usage: asLanguageModelUsage(result2.usage),
|
|
8921
|
-
finishReason: result2.finishReason
|
|
8964
|
+
finishReason: result2.finishReason.unified
|
|
8922
8965
|
});
|
|
8923
8966
|
}
|
|
8924
8967
|
span2.setAttributes(
|
|
8925
8968
|
await selectTelemetryAttributes({
|
|
8926
8969
|
telemetry,
|
|
8927
8970
|
attributes: {
|
|
8928
|
-
"ai.response.finishReason": result2.finishReason,
|
|
8971
|
+
"ai.response.finishReason": result2.finishReason.unified,
|
|
8929
8972
|
"ai.response.object": { output: () => text2 },
|
|
8930
8973
|
"ai.response.id": responseData.id,
|
|
8931
8974
|
"ai.response.model": responseData.modelId,
|
|
@@ -8937,7 +8980,9 @@ async function generateObject(options) {
|
|
|
8937
8980
|
"ai.usage.promptTokens": result2.usage.inputTokens.total,
|
|
8938
8981
|
"ai.usage.completionTokens": result2.usage.outputTokens.total,
|
|
8939
8982
|
// standardized gen-ai llm span attributes:
|
|
8940
|
-
"gen_ai.response.finish_reasons": [
|
|
8983
|
+
"gen_ai.response.finish_reasons": [
|
|
8984
|
+
result2.finishReason.unified
|
|
8985
|
+
],
|
|
8941
8986
|
"gen_ai.response.id": responseData.id,
|
|
8942
8987
|
"gen_ai.response.model": responseData.modelId,
|
|
8943
8988
|
"gen_ai.usage.input_tokens": result2.usage.inputTokens.total,
|
|
@@ -8955,7 +9000,7 @@ async function generateObject(options) {
|
|
|
8955
9000
|
})
|
|
8956
9001
|
);
|
|
8957
9002
|
result = generateResult.objectText;
|
|
8958
|
-
finishReason = generateResult.finishReason;
|
|
9003
|
+
finishReason = generateResult.finishReason.unified;
|
|
8959
9004
|
usage = asLanguageModelUsage(generateResult.usage);
|
|
8960
9005
|
warnings = generateResult.warnings;
|
|
8961
9006
|
resultProviderMetadata = generateResult.providerMetadata;
|
|
@@ -9472,11 +9517,12 @@ var DefaultStreamObjectResult = class {
|
|
|
9472
9517
|
if (textDelta !== "") {
|
|
9473
9518
|
controller.enqueue({ type: "text-delta", textDelta });
|
|
9474
9519
|
}
|
|
9475
|
-
finishReason = chunk.finishReason;
|
|
9520
|
+
finishReason = chunk.finishReason.unified;
|
|
9476
9521
|
usage = asLanguageModelUsage(chunk.usage);
|
|
9477
9522
|
providerMetadata = chunk.providerMetadata;
|
|
9478
9523
|
controller.enqueue({
|
|
9479
9524
|
...chunk,
|
|
9525
|
+
finishReason: chunk.finishReason.unified,
|
|
9480
9526
|
usage,
|
|
9481
9527
|
response: fullResponse
|
|
9482
9528
|
});
|
|
@@ -9492,7 +9538,7 @@ var DefaultStreamObjectResult = class {
|
|
|
9492
9538
|
...fullResponse,
|
|
9493
9539
|
headers: response == null ? void 0 : response.headers
|
|
9494
9540
|
});
|
|
9495
|
-
self._finishReason.resolve(finishReason != null ? finishReason : "
|
|
9541
|
+
self._finishReason.resolve(finishReason != null ? finishReason : "other");
|
|
9496
9542
|
try {
|
|
9497
9543
|
object2 = await parseAndValidateObjectResultWithRepair(
|
|
9498
9544
|
accumulatedText,
|