ai 4.0.0-canary.11 → 4.0.0-canary.12
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +9 -0
- package/README.md +9 -13
- package/dist/index.d.mts +1 -1
- package/dist/index.d.ts +1 -1
- package/dist/index.js +86 -119
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +89 -122
- package/dist/index.mjs.map +1 -1
- package/package.json +3 -3
package/dist/index.mjs
CHANGED
@@ -6,10 +6,12 @@ var __export = (target, all) => {
|
|
6
6
|
|
7
7
|
// streams/index.ts
|
8
8
|
import {
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
|
9
|
+
formatAssistantStreamPart as formatAssistantStreamPart2,
|
10
|
+
formatDataStreamPart as formatDataStreamPart3,
|
11
|
+
parseAssistantStreamPart,
|
12
|
+
parseDataStreamPart,
|
13
|
+
processDataStream,
|
14
|
+
processTextStream
|
13
15
|
} from "@ai-sdk/ui-utils";
|
14
16
|
import { generateId as generateId2 } from "@ai-sdk/provider-utils";
|
15
17
|
|
@@ -3576,7 +3578,7 @@ var DefaultGenerateTextResult = class {
|
|
3576
3578
|
|
3577
3579
|
// core/generate-text/stream-text.ts
|
3578
3580
|
import { createIdGenerator as createIdGenerator4 } from "@ai-sdk/provider-utils";
|
3579
|
-
import {
|
3581
|
+
import { formatDataStreamPart } from "@ai-sdk/ui-utils";
|
3580
3582
|
|
3581
3583
|
// core/util/merge-streams.ts
|
3582
3584
|
function mergeStreams(stream1, stream2) {
|
@@ -3887,13 +3889,6 @@ function streamText({
|
|
3887
3889
|
} = {},
|
3888
3890
|
...settings
|
3889
3891
|
}) {
|
3890
|
-
if (maxSteps < 1) {
|
3891
|
-
throw new InvalidArgumentError({
|
3892
|
-
parameter: "maxSteps",
|
3893
|
-
value: maxSteps,
|
3894
|
-
message: "maxSteps must be at least 1"
|
3895
|
-
});
|
3896
|
-
}
|
3897
3892
|
return new DefaultStreamTextResult({
|
3898
3893
|
model,
|
3899
3894
|
telemetry,
|
@@ -3955,6 +3950,13 @@ var DefaultStreamTextResult = class {
|
|
3955
3950
|
this.responsePromise = new DelayedPromise();
|
3956
3951
|
this.stepsPromise = new DelayedPromise();
|
3957
3952
|
this.stitchableStream = createStitchableStream();
|
3953
|
+
if (maxSteps < 1) {
|
3954
|
+
throw new InvalidArgumentError({
|
3955
|
+
parameter: "maxSteps",
|
3956
|
+
value: maxSteps,
|
3957
|
+
message: "maxSteps must be at least 1"
|
3958
|
+
});
|
3959
|
+
}
|
3958
3960
|
const tracer = getTracer(telemetry);
|
3959
3961
|
const baseTelemetryAttributes = getBaseTelemetryAttributes({
|
3960
3962
|
model,
|
@@ -3967,7 +3969,6 @@ var DefaultStreamTextResult = class {
|
|
3967
3969
|
tools
|
3968
3970
|
});
|
3969
3971
|
const self = this;
|
3970
|
-
const stepResults = [];
|
3971
3972
|
recordSpan({
|
3972
3973
|
name: "ai.streamText",
|
3973
3974
|
attributes: selectTelemetryAttributes({
|
@@ -3986,9 +3987,15 @@ var DefaultStreamTextResult = class {
|
|
3986
3987
|
endWhenDone: false,
|
3987
3988
|
fn: async (rootSpan) => {
|
3988
3989
|
const retry = retryWithExponentialBackoff({ maxRetries });
|
3989
|
-
const
|
3990
|
-
|
3991
|
-
|
3990
|
+
const stepResults = [];
|
3991
|
+
async function streamStep({
|
3992
|
+
currentStep,
|
3993
|
+
responseMessages,
|
3994
|
+
usage,
|
3995
|
+
stepType,
|
3996
|
+
previousStepText,
|
3997
|
+
hasLeadingWhitespace
|
3998
|
+
}) {
|
3992
3999
|
const promptFormat = responseMessages.length === 0 ? initialPrompt.type : "messages";
|
3993
4000
|
const promptMessages = await convertToLanguageModelPrompt({
|
3994
4001
|
prompt: {
|
@@ -4004,9 +4011,9 @@ var DefaultStreamTextResult = class {
|
|
4004
4011
|
...prepareToolsAndToolChoice({ tools, toolChoice, activeTools })
|
4005
4012
|
};
|
4006
4013
|
const {
|
4007
|
-
result: { stream
|
4008
|
-
doStreamSpan
|
4009
|
-
startTimestampMs
|
4014
|
+
result: { stream, warnings, rawResponse, request },
|
4015
|
+
doStreamSpan,
|
4016
|
+
startTimestampMs
|
4010
4017
|
} = await retry(
|
4011
4018
|
() => recordSpan({
|
4012
4019
|
name: "ai.streamText.doStream",
|
@@ -4048,10 +4055,10 @@ var DefaultStreamTextResult = class {
|
|
4048
4055
|
}),
|
4049
4056
|
tracer,
|
4050
4057
|
endWhenDone: false,
|
4051
|
-
fn: async (
|
4058
|
+
fn: async (doStreamSpan2) => ({
|
4052
4059
|
startTimestampMs: now2(),
|
4053
4060
|
// get before the call
|
4054
|
-
doStreamSpan:
|
4061
|
+
doStreamSpan: doStreamSpan2,
|
4055
4062
|
result: await model.doStream({
|
4056
4063
|
mode,
|
4057
4064
|
...prepareCallSettings(settings),
|
@@ -4064,47 +4071,15 @@ var DefaultStreamTextResult = class {
|
|
4064
4071
|
})
|
4065
4072
|
})
|
4066
4073
|
);
|
4067
|
-
|
4068
|
-
|
4069
|
-
|
4070
|
-
|
4071
|
-
|
4072
|
-
|
4073
|
-
|
4074
|
-
|
4075
|
-
|
4076
|
-
}),
|
4077
|
-
warnings: warnings2,
|
4078
|
-
request: request2 != null ? request2 : {},
|
4079
|
-
rawResponse: rawResponse2
|
4080
|
-
},
|
4081
|
-
doStreamSpan: doStreamSpan2,
|
4082
|
-
startTimestampMs: startTimestampMs2
|
4083
|
-
};
|
4084
|
-
};
|
4085
|
-
const {
|
4086
|
-
result: { stream, warnings, rawResponse, request },
|
4087
|
-
doStreamSpan,
|
4088
|
-
startTimestampMs
|
4089
|
-
} = await startStep({ responseMessages: [] });
|
4090
|
-
function addStepStream({
|
4091
|
-
stream: stream2,
|
4092
|
-
startTimestamp,
|
4093
|
-
doStreamSpan: doStreamSpan2,
|
4094
|
-
currentStep,
|
4095
|
-
responseMessages,
|
4096
|
-
usage = {
|
4097
|
-
promptTokens: 0,
|
4098
|
-
completionTokens: 0,
|
4099
|
-
totalTokens: 0
|
4100
|
-
},
|
4101
|
-
stepType,
|
4102
|
-
previousStepText = "",
|
4103
|
-
stepRequest,
|
4104
|
-
hasLeadingWhitespace,
|
4105
|
-
warnings: warnings2,
|
4106
|
-
response
|
4107
|
-
}) {
|
4074
|
+
const transformedStream = runToolsTransformation({
|
4075
|
+
tools,
|
4076
|
+
generatorStream: stream,
|
4077
|
+
toolCallStreaming,
|
4078
|
+
tracer,
|
4079
|
+
telemetry,
|
4080
|
+
abortSignal
|
4081
|
+
});
|
4082
|
+
const stepRequest = request != null ? request : {};
|
4108
4083
|
const stepToolCalls = [];
|
4109
4084
|
const stepToolResults = [];
|
4110
4085
|
let stepFinishReason = "unknown";
|
@@ -4139,17 +4114,17 @@ var DefaultStreamTextResult = class {
|
|
4139
4114
|
await (onChunk == null ? void 0 : onChunk({ chunk }));
|
4140
4115
|
}
|
4141
4116
|
self.stitchableStream.addStream(
|
4142
|
-
|
4117
|
+
transformedStream.pipeThrough(
|
4143
4118
|
new TransformStream({
|
4144
4119
|
async transform(chunk, controller) {
|
4145
4120
|
var _a11, _b, _c;
|
4146
4121
|
if (stepFirstChunk) {
|
4147
|
-
const msToFirstChunk = now2() -
|
4122
|
+
const msToFirstChunk = now2() - startTimestampMs;
|
4148
4123
|
stepFirstChunk = false;
|
4149
|
-
|
4124
|
+
doStreamSpan.addEvent("ai.stream.firstChunk", {
|
4150
4125
|
"ai.response.msToFirstChunk": msToFirstChunk
|
4151
4126
|
});
|
4152
|
-
|
4127
|
+
doStreamSpan.setAttributes({
|
4153
4128
|
"ai.response.msToFirstChunk": msToFirstChunk
|
4154
4129
|
});
|
4155
4130
|
}
|
@@ -4207,9 +4182,9 @@ var DefaultStreamTextResult = class {
|
|
4207
4182
|
stepFinishReason = chunk.finishReason;
|
4208
4183
|
stepProviderMetadata = chunk.experimental_providerMetadata;
|
4209
4184
|
stepLogProbs = chunk.logprobs;
|
4210
|
-
const msToFinish = now2() -
|
4211
|
-
|
4212
|
-
|
4185
|
+
const msToFinish = now2() - startTimestampMs;
|
4186
|
+
doStreamSpan.addEvent("ai.stream.finish");
|
4187
|
+
doStreamSpan.setAttributes({
|
4213
4188
|
"ai.response.msToFinish": msToFinish,
|
4214
4189
|
"ai.response.avgCompletionTokensPerSecond": 1e3 * stepUsage.completionTokens / msToFinish
|
4215
4190
|
});
|
@@ -4260,7 +4235,7 @@ var DefaultStreamTextResult = class {
|
|
4260
4235
|
chunkBuffer = "";
|
4261
4236
|
}
|
4262
4237
|
try {
|
4263
|
-
|
4238
|
+
doStreamSpan.setAttributes(
|
4264
4239
|
selectTelemetryAttributes({
|
4265
4240
|
telemetry,
|
4266
4241
|
attributes: {
|
@@ -4285,7 +4260,7 @@ var DefaultStreamTextResult = class {
|
|
4285
4260
|
);
|
4286
4261
|
} catch (error) {
|
4287
4262
|
} finally {
|
4288
|
-
|
4263
|
+
doStreamSpan.end();
|
4289
4264
|
}
|
4290
4265
|
controller.enqueue({
|
4291
4266
|
type: "step-finish",
|
@@ -4325,12 +4300,12 @@ var DefaultStreamTextResult = class {
|
|
4325
4300
|
toolResults: stepToolResults,
|
4326
4301
|
finishReason: stepFinishReason,
|
4327
4302
|
usage: stepUsage,
|
4328
|
-
warnings
|
4303
|
+
warnings,
|
4329
4304
|
logprobs: stepLogProbs,
|
4330
4305
|
request: stepRequest,
|
4331
4306
|
response: {
|
4332
4307
|
...stepResponse,
|
4333
|
-
headers:
|
4308
|
+
headers: rawResponse == null ? void 0 : rawResponse.headers,
|
4334
4309
|
// deep clone msgs to avoid mutating past messages in multi-step:
|
4335
4310
|
messages: JSON.parse(JSON.stringify(responseMessages))
|
4336
4311
|
},
|
@@ -4345,26 +4320,13 @@ var DefaultStreamTextResult = class {
|
|
4345
4320
|
totalTokens: usage.totalTokens + stepUsage.totalTokens
|
4346
4321
|
};
|
4347
4322
|
if (nextStepType !== "done") {
|
4348
|
-
|
4349
|
-
result,
|
4350
|
-
doStreamSpan: doStreamSpan3,
|
4351
|
-
startTimestampMs: startTimestamp2
|
4352
|
-
} = await startStep({ responseMessages });
|
4353
|
-
warnings2 = result.warnings;
|
4354
|
-
response = result.rawResponse;
|
4355
|
-
addStepStream({
|
4356
|
-
stream: result.stream,
|
4357
|
-
startTimestamp: startTimestamp2,
|
4358
|
-
doStreamSpan: doStreamSpan3,
|
4323
|
+
await streamStep({
|
4359
4324
|
currentStep: currentStep + 1,
|
4360
4325
|
responseMessages,
|
4361
4326
|
usage: combinedUsage,
|
4362
4327
|
stepType: nextStepType,
|
4363
4328
|
previousStepText: fullStepText,
|
4364
|
-
|
4365
|
-
hasLeadingWhitespace: hasWhitespaceSuffix,
|
4366
|
-
warnings: warnings2,
|
4367
|
-
response
|
4329
|
+
hasLeadingWhitespace: hasWhitespaceSuffix
|
4368
4330
|
});
|
4369
4331
|
return;
|
4370
4332
|
}
|
@@ -4407,7 +4369,7 @@ var DefaultStreamTextResult = class {
|
|
4407
4369
|
messages: responseMessages
|
4408
4370
|
});
|
4409
4371
|
self.stepsPromise.resolve(stepResults);
|
4410
|
-
self.warningsPromise.resolve(
|
4372
|
+
self.warningsPromise.resolve(warnings != null ? warnings : []);
|
4411
4373
|
await (onFinish == null ? void 0 : onFinish({
|
4412
4374
|
finishReason: stepFinishReason,
|
4413
4375
|
logprobs: stepLogProbs,
|
@@ -4425,7 +4387,7 @@ var DefaultStreamTextResult = class {
|
|
4425
4387
|
headers: rawResponse == null ? void 0 : rawResponse.headers,
|
4426
4388
|
messages: responseMessages
|
4427
4389
|
},
|
4428
|
-
warnings
|
4390
|
+
warnings,
|
4429
4391
|
experimental_providerMetadata: stepProviderMetadata,
|
4430
4392
|
steps: stepResults
|
4431
4393
|
}));
|
@@ -4439,18 +4401,17 @@ var DefaultStreamTextResult = class {
|
|
4439
4401
|
)
|
4440
4402
|
);
|
4441
4403
|
}
|
4442
|
-
|
4443
|
-
stream,
|
4444
|
-
startTimestamp: startTimestampMs,
|
4445
|
-
doStreamSpan,
|
4404
|
+
await streamStep({
|
4446
4405
|
currentStep: 0,
|
4447
4406
|
responseMessages: [],
|
4448
|
-
usage:
|
4407
|
+
usage: {
|
4408
|
+
promptTokens: 0,
|
4409
|
+
completionTokens: 0,
|
4410
|
+
totalTokens: 0
|
4411
|
+
},
|
4412
|
+
previousStepText: "",
|
4449
4413
|
stepType: "initial",
|
4450
|
-
|
4451
|
-
hasLeadingWhitespace: false,
|
4452
|
-
warnings,
|
4453
|
-
response: rawResponse
|
4414
|
+
hasLeadingWhitespace: false
|
4454
4415
|
});
|
4455
4416
|
}
|
4456
4417
|
}).catch((error) => {
|
@@ -4544,12 +4505,12 @@ var DefaultStreamTextResult = class {
|
|
4544
4505
|
const chunkType = chunk.type;
|
4545
4506
|
switch (chunkType) {
|
4546
4507
|
case "text-delta": {
|
4547
|
-
controller.enqueue(
|
4508
|
+
controller.enqueue(formatDataStreamPart("text", chunk.textDelta));
|
4548
4509
|
break;
|
4549
4510
|
}
|
4550
4511
|
case "tool-call-streaming-start": {
|
4551
4512
|
controller.enqueue(
|
4552
|
-
|
4513
|
+
formatDataStreamPart("tool_call_streaming_start", {
|
4553
4514
|
toolCallId: chunk.toolCallId,
|
4554
4515
|
toolName: chunk.toolName
|
4555
4516
|
})
|
@@ -4558,7 +4519,7 @@ var DefaultStreamTextResult = class {
|
|
4558
4519
|
}
|
4559
4520
|
case "tool-call-delta": {
|
4560
4521
|
controller.enqueue(
|
4561
|
-
|
4522
|
+
formatDataStreamPart("tool_call_delta", {
|
4562
4523
|
toolCallId: chunk.toolCallId,
|
4563
4524
|
argsTextDelta: chunk.argsTextDelta
|
4564
4525
|
})
|
@@ -4567,7 +4528,7 @@ var DefaultStreamTextResult = class {
|
|
4567
4528
|
}
|
4568
4529
|
case "tool-call": {
|
4569
4530
|
controller.enqueue(
|
4570
|
-
|
4531
|
+
formatDataStreamPart("tool_call", {
|
4571
4532
|
toolCallId: chunk.toolCallId,
|
4572
4533
|
toolName: chunk.toolName,
|
4573
4534
|
args: chunk.args
|
@@ -4577,7 +4538,7 @@ var DefaultStreamTextResult = class {
|
|
4577
4538
|
}
|
4578
4539
|
case "tool-result": {
|
4579
4540
|
controller.enqueue(
|
4580
|
-
|
4541
|
+
formatDataStreamPart("tool_result", {
|
4581
4542
|
toolCallId: chunk.toolCallId,
|
4582
4543
|
result: chunk.result
|
4583
4544
|
})
|
@@ -4586,13 +4547,13 @@ var DefaultStreamTextResult = class {
|
|
4586
4547
|
}
|
4587
4548
|
case "error": {
|
4588
4549
|
controller.enqueue(
|
4589
|
-
|
4550
|
+
formatDataStreamPart("error", getErrorMessage3(chunk.error))
|
4590
4551
|
);
|
4591
4552
|
break;
|
4592
4553
|
}
|
4593
4554
|
case "step-finish": {
|
4594
4555
|
controller.enqueue(
|
4595
|
-
|
4556
|
+
formatDataStreamPart("finish_step", {
|
4596
4557
|
finishReason: chunk.finishReason,
|
4597
4558
|
usage: sendUsage ? {
|
4598
4559
|
promptTokens: chunk.usage.promptTokens,
|
@@ -4605,7 +4566,7 @@ var DefaultStreamTextResult = class {
|
|
4605
4566
|
}
|
4606
4567
|
case "finish": {
|
4607
4568
|
controller.enqueue(
|
4608
|
-
|
4569
|
+
formatDataStreamPart("finish_message", {
|
4609
4570
|
finishReason: chunk.finishReason,
|
4610
4571
|
usage: sendUsage ? {
|
4611
4572
|
promptTokens: chunk.usage.promptTokens,
|
@@ -4875,7 +4836,7 @@ function magnitude(vector) {
|
|
4875
4836
|
|
4876
4837
|
// streams/assistant-response.ts
|
4877
4838
|
import {
|
4878
|
-
|
4839
|
+
formatAssistantStreamPart
|
4879
4840
|
} from "@ai-sdk/ui-utils";
|
4880
4841
|
function AssistantResponse({ threadId, messageId }, process2) {
|
4881
4842
|
const stream = new ReadableStream({
|
@@ -4884,17 +4845,21 @@ function AssistantResponse({ threadId, messageId }, process2) {
|
|
4884
4845
|
const textEncoder = new TextEncoder();
|
4885
4846
|
const sendMessage = (message) => {
|
4886
4847
|
controller.enqueue(
|
4887
|
-
textEncoder.encode(
|
4848
|
+
textEncoder.encode(
|
4849
|
+
formatAssistantStreamPart("assistant_message", message)
|
4850
|
+
)
|
4888
4851
|
);
|
4889
4852
|
};
|
4890
4853
|
const sendDataMessage = (message) => {
|
4891
4854
|
controller.enqueue(
|
4892
|
-
textEncoder.encode(
|
4855
|
+
textEncoder.encode(
|
4856
|
+
formatAssistantStreamPart("data_message", message)
|
4857
|
+
)
|
4893
4858
|
);
|
4894
4859
|
};
|
4895
4860
|
const sendError = (errorMessage) => {
|
4896
4861
|
controller.enqueue(
|
4897
|
-
textEncoder.encode(
|
4862
|
+
textEncoder.encode(formatAssistantStreamPart("error", errorMessage))
|
4898
4863
|
);
|
4899
4864
|
};
|
4900
4865
|
const forwardStream = async (stream2) => {
|
@@ -4905,7 +4870,7 @@ function AssistantResponse({ threadId, messageId }, process2) {
|
|
4905
4870
|
case "thread.message.created": {
|
4906
4871
|
controller.enqueue(
|
4907
4872
|
textEncoder.encode(
|
4908
|
-
|
4873
|
+
formatAssistantStreamPart("assistant_message", {
|
4909
4874
|
id: value.data.id,
|
4910
4875
|
role: "assistant",
|
4911
4876
|
content: [{ type: "text", text: { value: "" } }]
|
@@ -4919,7 +4884,7 @@ function AssistantResponse({ threadId, messageId }, process2) {
|
|
4919
4884
|
if ((content == null ? void 0 : content.type) === "text" && ((_b = content.text) == null ? void 0 : _b.value) != null) {
|
4920
4885
|
controller.enqueue(
|
4921
4886
|
textEncoder.encode(
|
4922
|
-
|
4887
|
+
formatAssistantStreamPart("text", content.text.value)
|
4923
4888
|
)
|
4924
4889
|
);
|
4925
4890
|
}
|
@@ -4936,7 +4901,7 @@ function AssistantResponse({ threadId, messageId }, process2) {
|
|
4936
4901
|
};
|
4937
4902
|
controller.enqueue(
|
4938
4903
|
textEncoder.encode(
|
4939
|
-
|
4904
|
+
formatAssistantStreamPart("assistant_control_data", {
|
4940
4905
|
threadId,
|
4941
4906
|
messageId
|
4942
4907
|
})
|
@@ -5001,7 +4966,7 @@ function createCallbacksTransformer(callbacks = {}) {
|
|
5001
4966
|
}
|
5002
4967
|
|
5003
4968
|
// streams/stream-data.ts
|
5004
|
-
import {
|
4969
|
+
import { formatDataStreamPart as formatDataStreamPart2 } from "@ai-sdk/ui-utils";
|
5005
4970
|
|
5006
4971
|
// util/constants.ts
|
5007
4972
|
var HANGING_STREAM_WARNING_TIME_MS = 15 * 1e3;
|
@@ -5053,7 +5018,7 @@ var StreamData = class {
|
|
5053
5018
|
throw new Error("Stream controller is not initialized.");
|
5054
5019
|
}
|
5055
5020
|
this.controller.enqueue(
|
5056
|
-
this.encoder.encode(
|
5021
|
+
this.encoder.encode(formatDataStreamPart2("data", [value]))
|
5057
5022
|
);
|
5058
5023
|
}
|
5059
5024
|
appendMessageAnnotation(value) {
|
@@ -5064,7 +5029,7 @@ var StreamData = class {
|
|
5064
5029
|
throw new Error("Stream controller is not initialized.");
|
5065
5030
|
}
|
5066
5031
|
this.controller.enqueue(
|
5067
|
-
this.encoder.encode(
|
5032
|
+
this.encoder.encode(formatDataStreamPart2("message_annotations", [value]))
|
5068
5033
|
);
|
5069
5034
|
}
|
5070
5035
|
};
|
@@ -5074,7 +5039,7 @@ function createStreamDataTransformer() {
|
|
5074
5039
|
return new TransformStream({
|
5075
5040
|
transform: async (chunk, controller) => {
|
5076
5041
|
const message = decoder.decode(chunk);
|
5077
|
-
controller.enqueue(encoder.encode(
|
5042
|
+
controller.enqueue(encoder.encode(formatDataStreamPart2("text", message)));
|
5078
5043
|
}
|
5079
5044
|
});
|
5080
5045
|
}
|
@@ -5207,14 +5172,16 @@ export {
|
|
5207
5172
|
experimental_createProviderRegistry,
|
5208
5173
|
experimental_customProvider,
|
5209
5174
|
experimental_wrapLanguageModel,
|
5210
|
-
|
5175
|
+
formatAssistantStreamPart2 as formatAssistantStreamPart,
|
5176
|
+
formatDataStreamPart3 as formatDataStreamPart,
|
5211
5177
|
generateId2 as generateId,
|
5212
5178
|
generateObject,
|
5213
5179
|
generateText,
|
5214
5180
|
jsonSchema,
|
5215
|
-
|
5216
|
-
|
5217
|
-
|
5181
|
+
parseAssistantStreamPart,
|
5182
|
+
parseDataStreamPart,
|
5183
|
+
processDataStream,
|
5184
|
+
processTextStream,
|
5218
5185
|
streamObject,
|
5219
5186
|
streamText,
|
5220
5187
|
tool
|