ai 6.0.0-beta.163 → 6.0.0-beta.165
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +20 -0
- package/dist/index.d.mts +49 -32
- package/dist/index.d.ts +49 -32
- package/dist/index.js +121 -75
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +121 -75
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.js +1 -1
- package/dist/internal/index.mjs +1 -1
- package/package.json +4 -4
package/dist/index.js
CHANGED
|
@@ -584,6 +584,7 @@ function asLanguageModelV3(model) {
|
|
|
584
584
|
const result = await target.doGenerate(...args);
|
|
585
585
|
return {
|
|
586
586
|
...result,
|
|
587
|
+
finishReason: convertV2FinishReasonToV3(result.finishReason),
|
|
587
588
|
usage: convertV2UsageToV3(result.usage)
|
|
588
589
|
};
|
|
589
590
|
};
|
|
@@ -609,6 +610,7 @@ function convertV2StreamToV3(stream) {
|
|
|
609
610
|
case "finish":
|
|
610
611
|
controller.enqueue({
|
|
611
612
|
...chunk,
|
|
613
|
+
finishReason: convertV2FinishReasonToV3(chunk.finishReason),
|
|
612
614
|
usage: convertV2UsageToV3(chunk.usage)
|
|
613
615
|
});
|
|
614
616
|
break;
|
|
@@ -620,6 +622,12 @@ function convertV2StreamToV3(stream) {
|
|
|
620
622
|
})
|
|
621
623
|
);
|
|
622
624
|
}
|
|
625
|
+
function convertV2FinishReasonToV3(finishReason) {
|
|
626
|
+
return {
|
|
627
|
+
unified: finishReason === "unknown" ? "other" : finishReason,
|
|
628
|
+
raw: void 0
|
|
629
|
+
};
|
|
630
|
+
}
|
|
623
631
|
function convertV2UsageToV3(usage) {
|
|
624
632
|
return {
|
|
625
633
|
inputTokens: {
|
|
@@ -943,7 +951,7 @@ var import_provider_utils3 = require("@ai-sdk/provider-utils");
|
|
|
943
951
|
var import_provider_utils4 = require("@ai-sdk/provider-utils");
|
|
944
952
|
|
|
945
953
|
// src/version.ts
|
|
946
|
-
var VERSION = true ? "6.0.0-beta.
|
|
954
|
+
var VERSION = true ? "6.0.0-beta.165" : "0.0.0-test";
|
|
947
955
|
|
|
948
956
|
// src/util/download/download.ts
|
|
949
957
|
var download = async ({ url }) => {
|
|
@@ -3289,6 +3297,7 @@ var DefaultStepResult = class {
|
|
|
3289
3297
|
constructor({
|
|
3290
3298
|
content,
|
|
3291
3299
|
finishReason,
|
|
3300
|
+
rawFinishReason,
|
|
3292
3301
|
usage,
|
|
3293
3302
|
warnings,
|
|
3294
3303
|
request,
|
|
@@ -3297,6 +3306,7 @@ var DefaultStepResult = class {
|
|
|
3297
3306
|
}) {
|
|
3298
3307
|
this.content = content;
|
|
3299
3308
|
this.finishReason = finishReason;
|
|
3309
|
+
this.rawFinishReason = rawFinishReason;
|
|
3300
3310
|
this.usage = usage;
|
|
3301
3311
|
this.warnings = warnings;
|
|
3302
3312
|
this.request = request;
|
|
@@ -3714,7 +3724,7 @@ async function generateText({
|
|
|
3714
3724
|
await selectTelemetryAttributes({
|
|
3715
3725
|
telemetry,
|
|
3716
3726
|
attributes: {
|
|
3717
|
-
"ai.response.finishReason": result.finishReason,
|
|
3727
|
+
"ai.response.finishReason": result.finishReason.unified,
|
|
3718
3728
|
"ai.response.text": {
|
|
3719
3729
|
output: () => extractTextContent(result.content)
|
|
3720
3730
|
},
|
|
@@ -3734,7 +3744,9 @@ async function generateText({
|
|
|
3734
3744
|
"ai.usage.promptTokens": result.usage.inputTokens.total,
|
|
3735
3745
|
"ai.usage.completionTokens": result.usage.outputTokens.total,
|
|
3736
3746
|
// standardized gen-ai llm span attributes:
|
|
3737
|
-
"gen_ai.response.finish_reasons": [
|
|
3747
|
+
"gen_ai.response.finish_reasons": [
|
|
3748
|
+
result.finishReason.unified
|
|
3749
|
+
],
|
|
3738
3750
|
"gen_ai.response.id": responseData.id,
|
|
3739
3751
|
"gen_ai.response.model": responseData.modelId,
|
|
3740
3752
|
"gen_ai.usage.input_tokens": result.usage.inputTokens.total,
|
|
@@ -3858,7 +3870,8 @@ async function generateText({
|
|
|
3858
3870
|
);
|
|
3859
3871
|
const currentStepResult = new DefaultStepResult({
|
|
3860
3872
|
content: stepContent,
|
|
3861
|
-
finishReason: currentModelResponse.finishReason,
|
|
3873
|
+
finishReason: currentModelResponse.finishReason.unified,
|
|
3874
|
+
rawFinishReason: currentModelResponse.finishReason.raw,
|
|
3862
3875
|
usage: asLanguageModelUsage(currentModelResponse.usage),
|
|
3863
3876
|
warnings: currentModelResponse.warnings,
|
|
3864
3877
|
providerMetadata: currentModelResponse.providerMetadata,
|
|
@@ -3887,7 +3900,7 @@ async function generateText({
|
|
|
3887
3900
|
await selectTelemetryAttributes({
|
|
3888
3901
|
telemetry,
|
|
3889
3902
|
attributes: {
|
|
3890
|
-
"ai.response.finishReason": currentModelResponse.finishReason,
|
|
3903
|
+
"ai.response.finishReason": currentModelResponse.finishReason.unified,
|
|
3891
3904
|
"ai.response.text": {
|
|
3892
3905
|
output: () => extractTextContent(currentModelResponse.content)
|
|
3893
3906
|
},
|
|
@@ -3921,6 +3934,7 @@ async function generateText({
|
|
|
3921
3934
|
);
|
|
3922
3935
|
await (onFinish == null ? void 0 : onFinish({
|
|
3923
3936
|
finishReason: lastStep.finishReason,
|
|
3937
|
+
rawFinishReason: lastStep.rawFinishReason,
|
|
3924
3938
|
usage: lastStep.usage,
|
|
3925
3939
|
content: lastStep.content,
|
|
3926
3940
|
text: lastStep.text,
|
|
@@ -4039,6 +4053,9 @@ var DefaultGenerateTextResult = class {
|
|
|
4039
4053
|
get finishReason() {
|
|
4040
4054
|
return this.finalStep.finishReason;
|
|
4041
4055
|
}
|
|
4056
|
+
get rawFinishReason() {
|
|
4057
|
+
return this.finalStep.rawFinishReason;
|
|
4058
|
+
}
|
|
4042
4059
|
get warnings() {
|
|
4043
4060
|
return this.finalStep.warnings;
|
|
4044
4061
|
}
|
|
@@ -4084,68 +4101,73 @@ function asContent({
|
|
|
4084
4101
|
toolApprovalRequests,
|
|
4085
4102
|
tools
|
|
4086
4103
|
}) {
|
|
4087
|
-
|
|
4088
|
-
|
|
4089
|
-
|
|
4090
|
-
|
|
4091
|
-
|
|
4092
|
-
|
|
4093
|
-
|
|
4094
|
-
|
|
4095
|
-
|
|
4096
|
-
|
|
4097
|
-
|
|
4098
|
-
|
|
4099
|
-
}
|
|
4100
|
-
}
|
|
4101
|
-
|
|
4102
|
-
|
|
4103
|
-
|
|
4104
|
-
|
|
4105
|
-
|
|
4106
|
-
|
|
4107
|
-
|
|
4108
|
-
|
|
4109
|
-
|
|
4110
|
-
|
|
4111
|
-
|
|
4112
|
-
|
|
4113
|
-
|
|
4114
|
-
|
|
4115
|
-
|
|
4116
|
-
|
|
4117
|
-
|
|
4118
|
-
|
|
4119
|
-
|
|
4120
|
-
|
|
4121
|
-
|
|
4122
|
-
error: part.result,
|
|
4123
|
-
providerExecuted: true,
|
|
4124
|
-
dynamic: part.dynamic
|
|
4125
|
-
};
|
|
4126
|
-
}
|
|
4127
|
-
return {
|
|
4128
|
-
type: "tool-result",
|
|
4104
|
+
const contentParts = [];
|
|
4105
|
+
for (const part of content) {
|
|
4106
|
+
switch (part.type) {
|
|
4107
|
+
case "text":
|
|
4108
|
+
case "reasoning":
|
|
4109
|
+
case "source":
|
|
4110
|
+
contentParts.push(part);
|
|
4111
|
+
break;
|
|
4112
|
+
case "file": {
|
|
4113
|
+
contentParts.push({
|
|
4114
|
+
type: "file",
|
|
4115
|
+
file: new DefaultGeneratedFile(part),
|
|
4116
|
+
...part.providerMetadata != null ? { providerMetadata: part.providerMetadata } : {}
|
|
4117
|
+
});
|
|
4118
|
+
break;
|
|
4119
|
+
}
|
|
4120
|
+
case "tool-call": {
|
|
4121
|
+
contentParts.push(
|
|
4122
|
+
toolCalls.find((toolCall) => toolCall.toolCallId === part.toolCallId)
|
|
4123
|
+
);
|
|
4124
|
+
break;
|
|
4125
|
+
}
|
|
4126
|
+
case "tool-result": {
|
|
4127
|
+
const toolCall = toolCalls.find(
|
|
4128
|
+
(toolCall2) => toolCall2.toolCallId === part.toolCallId
|
|
4129
|
+
);
|
|
4130
|
+
if (toolCall == null) {
|
|
4131
|
+
const tool2 = tools == null ? void 0 : tools[part.toolName];
|
|
4132
|
+
const supportsDeferredResults = (tool2 == null ? void 0 : tool2.type) === "provider" && tool2.supportsDeferredResults;
|
|
4133
|
+
if (!supportsDeferredResults) {
|
|
4134
|
+
throw new Error(`Tool call ${part.toolCallId} not found.`);
|
|
4135
|
+
}
|
|
4136
|
+
if (part.isError) {
|
|
4137
|
+
contentParts.push({
|
|
4138
|
+
type: "tool-error",
|
|
4129
4139
|
toolCallId: part.toolCallId,
|
|
4130
4140
|
toolName: part.toolName,
|
|
4131
4141
|
input: void 0,
|
|
4132
|
-
|
|
4142
|
+
error: part.result,
|
|
4133
4143
|
providerExecuted: true,
|
|
4134
4144
|
dynamic: part.dynamic
|
|
4135
|
-
};
|
|
4136
|
-
}
|
|
4137
|
-
|
|
4138
|
-
|
|
4139
|
-
type: "tool-error",
|
|
4145
|
+
});
|
|
4146
|
+
} else {
|
|
4147
|
+
contentParts.push({
|
|
4148
|
+
type: "tool-result",
|
|
4140
4149
|
toolCallId: part.toolCallId,
|
|
4141
4150
|
toolName: part.toolName,
|
|
4142
|
-
input:
|
|
4143
|
-
|
|
4151
|
+
input: void 0,
|
|
4152
|
+
output: part.result,
|
|
4144
4153
|
providerExecuted: true,
|
|
4145
|
-
dynamic:
|
|
4146
|
-
};
|
|
4154
|
+
dynamic: part.dynamic
|
|
4155
|
+
});
|
|
4147
4156
|
}
|
|
4148
|
-
|
|
4157
|
+
break;
|
|
4158
|
+
}
|
|
4159
|
+
if (part.isError) {
|
|
4160
|
+
contentParts.push({
|
|
4161
|
+
type: "tool-error",
|
|
4162
|
+
toolCallId: part.toolCallId,
|
|
4163
|
+
toolName: part.toolName,
|
|
4164
|
+
input: toolCall.input,
|
|
4165
|
+
error: part.result,
|
|
4166
|
+
providerExecuted: true,
|
|
4167
|
+
dynamic: toolCall.dynamic
|
|
4168
|
+
});
|
|
4169
|
+
} else {
|
|
4170
|
+
contentParts.push({
|
|
4149
4171
|
type: "tool-result",
|
|
4150
4172
|
toolCallId: part.toolCallId,
|
|
4151
4173
|
toolName: part.toolName,
|
|
@@ -4153,13 +4175,16 @@ function asContent({
|
|
|
4153
4175
|
output: part.result,
|
|
4154
4176
|
providerExecuted: true,
|
|
4155
4177
|
dynamic: toolCall.dynamic
|
|
4156
|
-
};
|
|
4178
|
+
});
|
|
4157
4179
|
}
|
|
4180
|
+
break;
|
|
4158
4181
|
}
|
|
4159
|
-
|
|
4160
|
-
|
|
4161
|
-
|
|
4162
|
-
|
|
4182
|
+
case "tool-approval-request": {
|
|
4183
|
+
break;
|
|
4184
|
+
}
|
|
4185
|
+
}
|
|
4186
|
+
}
|
|
4187
|
+
return [...contentParts, ...toolOutputs, ...toolApprovalRequests];
|
|
4163
4188
|
}
|
|
4164
4189
|
|
|
4165
4190
|
// src/generate-text/stream-text.ts
|
|
@@ -4462,8 +4487,7 @@ var uiMessageChunkSchema = (0, import_provider_utils16.lazySchema)(
|
|
|
4462
4487
|
"content-filter",
|
|
4463
4488
|
"tool-calls",
|
|
4464
4489
|
"error",
|
|
4465
|
-
"other"
|
|
4466
|
-
"unknown"
|
|
4490
|
+
"other"
|
|
4467
4491
|
]).optional(),
|
|
4468
4492
|
messageMetadata: import_v47.z.unknown().optional()
|
|
4469
4493
|
}),
|
|
@@ -5337,7 +5361,8 @@ function runToolsTransformation({
|
|
|
5337
5361
|
case "finish": {
|
|
5338
5362
|
finishChunk = {
|
|
5339
5363
|
type: "finish",
|
|
5340
|
-
finishReason: chunk.finishReason,
|
|
5364
|
+
finishReason: chunk.finishReason.unified,
|
|
5365
|
+
rawFinishReason: chunk.finishReason.raw,
|
|
5341
5366
|
usage: asLanguageModelUsage(chunk.usage),
|
|
5342
5367
|
providerMetadata: chunk.providerMetadata
|
|
5343
5368
|
};
|
|
@@ -5442,6 +5467,9 @@ function runToolsTransformation({
|
|
|
5442
5467
|
}
|
|
5443
5468
|
break;
|
|
5444
5469
|
}
|
|
5470
|
+
case "tool-approval-request": {
|
|
5471
|
+
break;
|
|
5472
|
+
}
|
|
5445
5473
|
default: {
|
|
5446
5474
|
const _exhaustiveCheck = chunkType;
|
|
5447
5475
|
throw new Error(`Unhandled chunk type: ${_exhaustiveCheck}`);
|
|
@@ -5650,6 +5678,7 @@ var DefaultStreamTextResult = class {
|
|
|
5650
5678
|
}) {
|
|
5651
5679
|
this._totalUsage = new import_provider_utils19.DelayedPromise();
|
|
5652
5680
|
this._finishReason = new import_provider_utils19.DelayedPromise();
|
|
5681
|
+
this._rawFinishReason = new import_provider_utils19.DelayedPromise();
|
|
5653
5682
|
this._steps = new import_provider_utils19.DelayedPromise();
|
|
5654
5683
|
this.outputSpecification = output;
|
|
5655
5684
|
this.includeRawChunks = includeRawChunks;
|
|
@@ -5658,6 +5687,7 @@ var DefaultStreamTextResult = class {
|
|
|
5658
5687
|
let recordedContent = [];
|
|
5659
5688
|
const recordedResponseMessages = [];
|
|
5660
5689
|
let recordedFinishReason = void 0;
|
|
5690
|
+
let recordedRawFinishReason = void 0;
|
|
5661
5691
|
let recordedTotalUsage = void 0;
|
|
5662
5692
|
let recordedRequest = {};
|
|
5663
5693
|
let recordedWarnings = [];
|
|
@@ -5786,6 +5816,7 @@ var DefaultStreamTextResult = class {
|
|
|
5786
5816
|
const currentStepResult = new DefaultStepResult({
|
|
5787
5817
|
content: recordedContent,
|
|
5788
5818
|
finishReason: part.finishReason,
|
|
5819
|
+
rawFinishReason: part.rawFinishReason,
|
|
5789
5820
|
usage: part.usage,
|
|
5790
5821
|
warnings: recordedWarnings,
|
|
5791
5822
|
request: recordedRequest,
|
|
@@ -5808,6 +5839,7 @@ var DefaultStreamTextResult = class {
|
|
|
5808
5839
|
if (part.type === "finish") {
|
|
5809
5840
|
recordedTotalUsage = part.totalUsage;
|
|
5810
5841
|
recordedFinishReason = part.finishReason;
|
|
5842
|
+
recordedRawFinishReason = part.rawFinishReason;
|
|
5811
5843
|
}
|
|
5812
5844
|
},
|
|
5813
5845
|
async flush(controller) {
|
|
@@ -5817,18 +5849,21 @@ var DefaultStreamTextResult = class {
|
|
|
5817
5849
|
message: "No output generated. Check the stream for errors."
|
|
5818
5850
|
});
|
|
5819
5851
|
self._finishReason.reject(error);
|
|
5852
|
+
self._rawFinishReason.reject(error);
|
|
5820
5853
|
self._totalUsage.reject(error);
|
|
5821
5854
|
self._steps.reject(error);
|
|
5822
5855
|
return;
|
|
5823
5856
|
}
|
|
5824
|
-
const finishReason = recordedFinishReason != null ? recordedFinishReason : "
|
|
5857
|
+
const finishReason = recordedFinishReason != null ? recordedFinishReason : "other";
|
|
5825
5858
|
const totalUsage = recordedTotalUsage != null ? recordedTotalUsage : createNullLanguageModelUsage();
|
|
5826
5859
|
self._finishReason.resolve(finishReason);
|
|
5860
|
+
self._rawFinishReason.resolve(recordedRawFinishReason);
|
|
5827
5861
|
self._totalUsage.resolve(totalUsage);
|
|
5828
5862
|
self._steps.resolve(recordedSteps);
|
|
5829
5863
|
const finalStep = recordedSteps[recordedSteps.length - 1];
|
|
5830
5864
|
await (onFinish == null ? void 0 : onFinish({
|
|
5831
|
-
finishReason,
|
|
5865
|
+
finishReason: finalStep.finishReason,
|
|
5866
|
+
rawFinishReason: finalStep.rawFinishReason,
|
|
5832
5867
|
totalUsage,
|
|
5833
5868
|
usage: finalStep.usage,
|
|
5834
5869
|
content: finalStep.content,
|
|
@@ -6153,7 +6188,8 @@ var DefaultStreamTextResult = class {
|
|
|
6153
6188
|
const stepToolOutputs = [];
|
|
6154
6189
|
let warnings;
|
|
6155
6190
|
const activeToolCallToolNames = {};
|
|
6156
|
-
let stepFinishReason = "
|
|
6191
|
+
let stepFinishReason = "other";
|
|
6192
|
+
let stepRawFinishReason = void 0;
|
|
6157
6193
|
let stepUsage = createNullLanguageModelUsage();
|
|
6158
6194
|
let stepProviderMetadata;
|
|
6159
6195
|
let stepFirstChunk = true;
|
|
@@ -6249,6 +6285,7 @@ var DefaultStreamTextResult = class {
|
|
|
6249
6285
|
case "finish": {
|
|
6250
6286
|
stepUsage = chunk.usage;
|
|
6251
6287
|
stepFinishReason = chunk.finishReason;
|
|
6288
|
+
stepRawFinishReason = chunk.rawFinishReason;
|
|
6252
6289
|
stepProviderMetadata = chunk.providerMetadata;
|
|
6253
6290
|
const msToFinish = now2() - startTimestampMs;
|
|
6254
6291
|
doStreamSpan.addEvent("ai.stream.finish");
|
|
@@ -6361,6 +6398,7 @@ var DefaultStreamTextResult = class {
|
|
|
6361
6398
|
controller.enqueue({
|
|
6362
6399
|
type: "finish-step",
|
|
6363
6400
|
finishReason: stepFinishReason,
|
|
6401
|
+
rawFinishReason: stepRawFinishReason,
|
|
6364
6402
|
usage: stepUsage,
|
|
6365
6403
|
providerMetadata: stepProviderMetadata,
|
|
6366
6404
|
response: {
|
|
@@ -6432,6 +6470,7 @@ var DefaultStreamTextResult = class {
|
|
|
6432
6470
|
controller.enqueue({
|
|
6433
6471
|
type: "finish",
|
|
6434
6472
|
finishReason: stepFinishReason,
|
|
6473
|
+
rawFinishReason: stepRawFinishReason,
|
|
6435
6474
|
totalUsage: combinedUsage
|
|
6436
6475
|
});
|
|
6437
6476
|
self.closeStream();
|
|
@@ -6525,6 +6564,10 @@ var DefaultStreamTextResult = class {
|
|
|
6525
6564
|
this.consumeStream();
|
|
6526
6565
|
return this._finishReason.promise;
|
|
6527
6566
|
}
|
|
6567
|
+
get rawFinishReason() {
|
|
6568
|
+
this.consumeStream();
|
|
6569
|
+
return this._rawFinishReason.promise;
|
|
6570
|
+
}
|
|
6528
6571
|
/**
|
|
6529
6572
|
Split out a new stream from the original stream.
|
|
6530
6573
|
The original stream is replaced to allow for further splitting,
|
|
@@ -8967,14 +9010,14 @@ async function generateObject(options) {
|
|
|
8967
9010
|
message: "No object generated: the model did not return a response.",
|
|
8968
9011
|
response: responseData,
|
|
8969
9012
|
usage: asLanguageModelUsage(result2.usage),
|
|
8970
|
-
finishReason: result2.finishReason
|
|
9013
|
+
finishReason: result2.finishReason.unified
|
|
8971
9014
|
});
|
|
8972
9015
|
}
|
|
8973
9016
|
span2.setAttributes(
|
|
8974
9017
|
await selectTelemetryAttributes({
|
|
8975
9018
|
telemetry,
|
|
8976
9019
|
attributes: {
|
|
8977
|
-
"ai.response.finishReason": result2.finishReason,
|
|
9020
|
+
"ai.response.finishReason": result2.finishReason.unified,
|
|
8978
9021
|
"ai.response.object": { output: () => text2 },
|
|
8979
9022
|
"ai.response.id": responseData.id,
|
|
8980
9023
|
"ai.response.model": responseData.modelId,
|
|
@@ -8986,7 +9029,9 @@ async function generateObject(options) {
|
|
|
8986
9029
|
"ai.usage.promptTokens": result2.usage.inputTokens.total,
|
|
8987
9030
|
"ai.usage.completionTokens": result2.usage.outputTokens.total,
|
|
8988
9031
|
// standardized gen-ai llm span attributes:
|
|
8989
|
-
"gen_ai.response.finish_reasons": [
|
|
9032
|
+
"gen_ai.response.finish_reasons": [
|
|
9033
|
+
result2.finishReason.unified
|
|
9034
|
+
],
|
|
8990
9035
|
"gen_ai.response.id": responseData.id,
|
|
8991
9036
|
"gen_ai.response.model": responseData.modelId,
|
|
8992
9037
|
"gen_ai.usage.input_tokens": result2.usage.inputTokens.total,
|
|
@@ -9004,7 +9049,7 @@ async function generateObject(options) {
|
|
|
9004
9049
|
})
|
|
9005
9050
|
);
|
|
9006
9051
|
result = generateResult.objectText;
|
|
9007
|
-
finishReason = generateResult.finishReason;
|
|
9052
|
+
finishReason = generateResult.finishReason.unified;
|
|
9008
9053
|
usage = asLanguageModelUsage(generateResult.usage);
|
|
9009
9054
|
warnings = generateResult.warnings;
|
|
9010
9055
|
resultProviderMetadata = generateResult.providerMetadata;
|
|
@@ -9518,11 +9563,12 @@ var DefaultStreamObjectResult = class {
|
|
|
9518
9563
|
if (textDelta !== "") {
|
|
9519
9564
|
controller.enqueue({ type: "text-delta", textDelta });
|
|
9520
9565
|
}
|
|
9521
|
-
finishReason = chunk.finishReason;
|
|
9566
|
+
finishReason = chunk.finishReason.unified;
|
|
9522
9567
|
usage = asLanguageModelUsage(chunk.usage);
|
|
9523
9568
|
providerMetadata = chunk.providerMetadata;
|
|
9524
9569
|
controller.enqueue({
|
|
9525
9570
|
...chunk,
|
|
9571
|
+
finishReason: chunk.finishReason.unified,
|
|
9526
9572
|
usage,
|
|
9527
9573
|
response: fullResponse
|
|
9528
9574
|
});
|
|
@@ -9538,7 +9584,7 @@ var DefaultStreamObjectResult = class {
|
|
|
9538
9584
|
...fullResponse,
|
|
9539
9585
|
headers: response == null ? void 0 : response.headers
|
|
9540
9586
|
});
|
|
9541
|
-
self._finishReason.resolve(finishReason != null ? finishReason : "
|
|
9587
|
+
self._finishReason.resolve(finishReason != null ? finishReason : "other");
|
|
9542
9588
|
try {
|
|
9543
9589
|
object2 = await parseAndValidateObjectResultWithRepair(
|
|
9544
9590
|
accumulatedText,
|