ai 5.0.0-beta.22 → 5.0.0-beta.24
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +19 -0
- package/dist/bin/ai.js +69 -21
- package/dist/bin/ai.js.map +1 -1
- package/dist/bin/ai.min.js +10 -10
- package/dist/index.d.mts +61 -39
- package/dist/index.d.ts +61 -39
- package/dist/index.js +71 -21
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +72 -22
- package/dist/index.mjs.map +1 -1
- package/package.json +3 -3
package/dist/index.d.ts
CHANGED
@@ -1004,13 +1004,13 @@ Callback that is set using the `onError` option.
|
|
1004
1004
|
*/
|
1005
1005
|
type StreamTextOnErrorCallback = (event: {
|
1006
1006
|
error: unknown;
|
1007
|
-
}) =>
|
1007
|
+
}) => PromiseLike<void> | void;
|
1008
1008
|
/**
|
1009
1009
|
Callback that is set using the `onStepFinish` option.
|
1010
1010
|
|
1011
1011
|
@param stepResult - The result of the step.
|
1012
1012
|
*/
|
1013
|
-
type StreamTextOnStepFinishCallback<TOOLS extends ToolSet> = (stepResult: StepResult<TOOLS>) =>
|
1013
|
+
type StreamTextOnStepFinishCallback<TOOLS extends ToolSet> = (stepResult: StepResult<TOOLS>) => PromiseLike<void> | void;
|
1014
1014
|
/**
|
1015
1015
|
Callback that is set using the `onChunk` option.
|
1016
1016
|
|
@@ -1020,7 +1020,7 @@ type StreamTextOnChunkCallback<TOOLS extends ToolSet> = (event: {
|
|
1020
1020
|
chunk: Extract<TextStreamPart<TOOLS>, {
|
1021
1021
|
type: 'text' | 'reasoning' | 'source' | 'tool-call' | 'tool-input-start' | 'tool-input-delta' | 'tool-result' | 'raw';
|
1022
1022
|
}>;
|
1023
|
-
}) =>
|
1023
|
+
}) => PromiseLike<void> | void;
|
1024
1024
|
/**
|
1025
1025
|
Callback that is set using the `onFinish` option.
|
1026
1026
|
|
@@ -1035,7 +1035,18 @@ Details for all steps.
|
|
1035
1035
|
Total usage for all steps. This is the sum of the usage of all steps.
|
1036
1036
|
*/
|
1037
1037
|
readonly totalUsage: LanguageModelUsage;
|
1038
|
-
}) =>
|
1038
|
+
}) => PromiseLike<void> | void;
|
1039
|
+
/**
|
1040
|
+
Callback that is set using the `onAbort` option.
|
1041
|
+
|
1042
|
+
@param event - The event that is passed to the callback.
|
1043
|
+
*/
|
1044
|
+
type StreamTextOnAbortCallback<TOOLS extends ToolSet> = (event: {
|
1045
|
+
/**
|
1046
|
+
Details for all previously finished steps.
|
1047
|
+
*/
|
1048
|
+
readonly steps: StepResult<TOOLS>[];
|
1049
|
+
}) => PromiseLike<void> | void;
|
1039
1050
|
/**
|
1040
1051
|
Generate a text and call tools for a given prompt using a language model.
|
1041
1052
|
|
@@ -1084,7 +1095,7 @@ If set and supported by the model, calls will generate deterministic results.
|
|
1084
1095
|
@return
|
1085
1096
|
A result object for accessing different stream types and additional information.
|
1086
1097
|
*/
|
1087
|
-
declare function streamText<TOOLS extends ToolSet, OUTPUT = never, PARTIAL_OUTPUT = never>({ model, tools, toolChoice, system, prompt, messages, maxRetries, abortSignal, headers, stopWhen, experimental_output: output, experimental_telemetry: telemetry, prepareStep, providerOptions, experimental_activeTools, activeTools, experimental_repairToolCall: repairToolCall, experimental_transform: transform, includeRawChunks, onChunk, onError, onFinish, onStepFinish, _internal: { now, generateId, currentDate, }, ...settings }: CallSettings & Prompt & {
|
1098
|
+
declare function streamText<TOOLS extends ToolSet, OUTPUT = never, PARTIAL_OUTPUT = never>({ model, tools, toolChoice, system, prompt, messages, maxRetries, abortSignal, headers, stopWhen, experimental_output: output, experimental_telemetry: telemetry, prepareStep, providerOptions, experimental_activeTools, activeTools, experimental_repairToolCall: repairToolCall, experimental_transform: transform, includeRawChunks, onChunk, onError, onFinish, onAbort, onStepFinish, _internal: { now, generateId, currentDate, }, ...settings }: CallSettings & Prompt & {
|
1088
1099
|
/**
|
1089
1100
|
The language model to use.
|
1090
1101
|
*/
|
@@ -1174,6 +1185,7 @@ Callback that is called when the LLM response and all request tool executions
|
|
1174
1185
|
The usage is the combined usage of all steps.
|
1175
1186
|
*/
|
1176
1187
|
onFinish?: StreamTextOnFinishCallback<TOOLS>;
|
1188
|
+
onAbort?: StreamTextOnAbortCallback<TOOLS>;
|
1177
1189
|
/**
|
1178
1190
|
Callback that is called when each step (LLM call) is finished, including intermediate steps.
|
1179
1191
|
*/
|
@@ -1463,6 +1475,8 @@ type UIMessageChunk<METADATA = unknown, DATA_TYPES extends UIDataTypes = UIDataT
|
|
1463
1475
|
} | {
|
1464
1476
|
type: 'finish';
|
1465
1477
|
messageMetadata?: METADATA;
|
1478
|
+
} | {
|
1479
|
+
type: 'abort';
|
1466
1480
|
} | {
|
1467
1481
|
type: 'message-metadata';
|
1468
1482
|
messageMetadata: METADATA;
|
@@ -1479,6 +1493,27 @@ type AsyncIterableStream<T> = AsyncIterable<T> & ReadableStream<T>;
|
|
1479
1493
|
|
1480
1494
|
type ErrorHandler = (error: unknown) => void;
|
1481
1495
|
|
1496
|
+
type UIMessageStreamOnFinishCallback<UI_MESSAGE extends UIMessage> = (event: {
|
1497
|
+
/**
|
1498
|
+
* The updated list of UI messages.
|
1499
|
+
*/
|
1500
|
+
messages: UI_MESSAGE[];
|
1501
|
+
/**
|
1502
|
+
* Indicates whether the response message is a continuation of the last original message,
|
1503
|
+
* or if a new message was created.
|
1504
|
+
*/
|
1505
|
+
isContinuation: boolean;
|
1506
|
+
/**
|
1507
|
+
* Indicates whether the stream was aborted.
|
1508
|
+
*/
|
1509
|
+
isAborted: boolean;
|
1510
|
+
/**
|
1511
|
+
* The message that was sent to the client as a response
|
1512
|
+
* (including the original message if it was extended).
|
1513
|
+
*/
|
1514
|
+
responseMessage: UI_MESSAGE;
|
1515
|
+
}) => PromiseLike<void> | void;
|
1516
|
+
|
1482
1517
|
type UIMessageStreamOptions<UI_MESSAGE extends UIMessage> = {
|
1483
1518
|
/**
|
1484
1519
|
* The original messages. If they are provided, persistence mode is assumed,
|
@@ -1492,22 +1527,7 @@ type UIMessageStreamOptions<UI_MESSAGE extends UIMessage> = {
|
|
1492
1527
|
* the original messages are provided and the last message is an assistant message).
|
1493
1528
|
*/
|
1494
1529
|
generateMessageId?: IdGenerator;
|
1495
|
-
onFinish?:
|
1496
|
-
/**
|
1497
|
-
* The updates list of UI messages.
|
1498
|
-
*/
|
1499
|
-
messages: UI_MESSAGE[];
|
1500
|
-
/**
|
1501
|
-
* Indicates whether the response message is a continuation of the last original message,
|
1502
|
-
* or if a new message was created.
|
1503
|
-
*/
|
1504
|
-
isContinuation: boolean;
|
1505
|
-
/**
|
1506
|
-
* The message that was sent to the client as a response
|
1507
|
-
* (including the original message if it was extended).
|
1508
|
-
*/
|
1509
|
-
responseMessage: UI_MESSAGE;
|
1510
|
-
}) => void;
|
1530
|
+
onFinish?: UIMessageStreamOnFinishCallback<UI_MESSAGE>;
|
1511
1531
|
/**
|
1512
1532
|
* Extracts message metadata that will be send to the client.
|
1513
1533
|
*
|
@@ -1698,7 +1718,7 @@ interface StreamTextResult<TOOLS extends ToolSet, PARTIAL_OUTPUT> {
|
|
1698
1718
|
|
1699
1719
|
@return A UI message stream.
|
1700
1720
|
*/
|
1701
|
-
toUIMessageStream<UI_MESSAGE extends UIMessage>(options?: UIMessageStreamOptions<UI_MESSAGE>):
|
1721
|
+
toUIMessageStream<UI_MESSAGE extends UIMessage>(options?: UIMessageStreamOptions<UI_MESSAGE>): AsyncIterableStream<InferUIMessageChunk<UI_MESSAGE>>;
|
1702
1722
|
/**
|
1703
1723
|
Writes UI message stream output to a Node.js response-like object.
|
1704
1724
|
@param response A Node.js response-like object (ServerResponse).
|
@@ -1806,6 +1826,8 @@ type TextStreamPart<TOOLS extends ToolSet> = {
|
|
1806
1826
|
type: 'finish';
|
1807
1827
|
finishReason: FinishReason;
|
1808
1828
|
totalUsage: LanguageModelUsage;
|
1829
|
+
} | {
|
1830
|
+
type: 'abort';
|
1809
1831
|
} | {
|
1810
1832
|
type: 'error';
|
1811
1833
|
error: unknown;
|
@@ -2638,6 +2660,21 @@ functionality that can be fully encapsulated in the provider.
|
|
2638
2660
|
};
|
2639
2661
|
}): Promise<GenerateObjectResult<RESULT>>;
|
2640
2662
|
|
2663
|
+
/**
|
2664
|
+
* Consumes a ReadableStream until it's fully read.
|
2665
|
+
*
|
2666
|
+
* This function reads the stream chunk by chunk until the stream is exhausted.
|
2667
|
+
* It doesn't process or return the data from the stream; it simply ensures
|
2668
|
+
* that the entire stream is read.
|
2669
|
+
*
|
2670
|
+
* @param {ReadableStream} stream - The ReadableStream to be consumed.
|
2671
|
+
* @returns {Promise<void>} A promise that resolves when the stream is fully consumed.
|
2672
|
+
*/
|
2673
|
+
declare function consumeStream({ stream, onError, }: {
|
2674
|
+
stream: ReadableStream;
|
2675
|
+
onError?: (error: unknown) => void;
|
2676
|
+
}): Promise<void>;
|
2677
|
+
|
2641
2678
|
/**
|
2642
2679
|
* Calculates the cosine similarity between two vectors. This is a useful metric for
|
2643
2680
|
* comparing the similarity of two vectors such as embeddings.
|
@@ -3588,22 +3625,7 @@ declare function createUIMessageStream<UI_MESSAGE extends UIMessage>({ execute,
|
|
3588
3625
|
* and a message ID is provided for the response message.
|
3589
3626
|
*/
|
3590
3627
|
originalMessages?: UI_MESSAGE[];
|
3591
|
-
onFinish?:
|
3592
|
-
/**
|
3593
|
-
* The updates list of UI messages.
|
3594
|
-
*/
|
3595
|
-
messages: UI_MESSAGE[];
|
3596
|
-
/**
|
3597
|
-
* Indicates whether the response message is a continuation of the last original message,
|
3598
|
-
* or if a new message was created.
|
3599
|
-
*/
|
3600
|
-
isContinuation: boolean;
|
3601
|
-
/**
|
3602
|
-
* The message that was sent to the client as a response
|
3603
|
-
* (including the original message if it was extended).
|
3604
|
-
*/
|
3605
|
-
responseMessage: UI_MESSAGE;
|
3606
|
-
}) => void;
|
3628
|
+
onFinish?: UIMessageStreamOnFinishCallback<UI_MESSAGE>;
|
3607
3629
|
generateId?: IdGenerator;
|
3608
3630
|
}): ReadableStream<InferUIMessageChunk<UI_MESSAGE>>;
|
3609
3631
|
|
@@ -4032,4 +4054,4 @@ declare global {
|
|
4032
4054
|
var AI_SDK_DEFAULT_PROVIDER: ProviderV2 | undefined;
|
4033
4055
|
}
|
4034
4056
|
|
4035
|
-
export { AbstractChat, AsyncIterableStream, CallSettings, CallWarning, ChatInit, ChatOnDataCallback, ChatOnErrorCallback, ChatOnFinishCallback, ChatOnToolCallCallback, ChatRequestOptions, ChatState, ChatStatus, ChatTransport, ChunkDetector, CompletionRequestOptions, CoreAssistantMessage, CoreMessage, CoreSystemMessage, CoreToolMessage, CoreUserMessage, CreateUIMessage, DataUIPart, DeepPartial, DefaultChatTransport, DownloadError, EmbedManyResult, EmbedResult, Embedding, EmbeddingModel, EmbeddingModelUsage, ErrorHandler, Agent as Experimental_Agent, AgentSettings as Experimental_AgentSettings, GenerateImageResult as Experimental_GenerateImageResult, GeneratedFile as Experimental_GeneratedImage, SpeechResult as Experimental_SpeechResult, TranscriptionResult as Experimental_TranscriptionResult, FileUIPart, FinishReason, GenerateObjectResult, GenerateTextOnStepFinishCallback, GenerateTextResult, GeneratedAudioFile, GeneratedFile, HttpChatTransport, HttpChatTransportInitOptions, ImageModel, ImageGenerationWarning as ImageModelCallWarning, ImageModelProviderMetadata, ImageModelResponseMetadata, InferUIDataParts, InferUIMessageChunk, InferUITool, InferUITools, InvalidArgumentError, InvalidDataContentError, InvalidMessageRoleError, InvalidStreamPartError, InvalidToolInputError, JSONRPCError, JSONRPCMessage, JSONRPCNotification, JSONRPCRequest, JSONRPCResponse, JSONValue, JsonToSseTransformStream, LanguageModel, LanguageModelRequestMetadata, LanguageModelResponseMetadata, LanguageModelUsage, MCPClientError, MCPTransport, MessageConversionError, NoImageGeneratedError, NoObjectGeneratedError, NoOutputSpecifiedError, NoSuchProviderError, NoSuchToolError, ObjectStreamPart, output as Output, PrepareReconnectToStreamRequest, PrepareSendMessagesRequest, PrepareStepFunction, PrepareStepResult, Prompt, Provider, ProviderMetadata, ProviderRegistryProvider, ReasoningUIPart, RepairTextFunction, RetryError, SerialJobExecutor, SourceDocumentUIPart, SourceUrlUIPart, SpeechModel, SpeechModelResponseMetadata, SpeechWarning, StepResult, StepStartUIPart, StopCondition, StreamObjectOnFinishCallback, StreamObjectResult, StreamTextOnChunkCallback, StreamTextOnErrorCallback, StreamTextOnFinishCallback, StreamTextOnStepFinishCallback, StreamTextResult, StreamTextTransform, TelemetrySettings, TextStreamChatTransport, TextStreamPart, TextUIPart, ToolCallRepairError, ToolCallRepairFunction, ToolCallUnion, ToolChoice, ToolErrorUnion, ToolResultUnion, ToolSet, ToolUIPart, TranscriptionModel, TranscriptionModelResponseMetadata, TranscriptionWarning, UIDataPartSchemas, UIDataTypes, UIMessage, UIMessageChunk, UIMessagePart, UIMessageStreamOptions, UIMessageStreamWriter, UITool, UITools, UI_MESSAGE_STREAM_HEADERS, UnsupportedModelVersionError, UseCompletionOptions, assistantModelMessageSchema, callCompletionApi, convertFileListToFileUIParts, convertToCoreMessages, convertToModelMessages, coreAssistantMessageSchema, coreMessageSchema, coreSystemMessageSchema, coreToolMessageSchema, coreUserMessageSchema, cosineSimilarity, createProviderRegistry, createTextStreamResponse, createUIMessageStream, createUIMessageStreamResponse, customProvider, defaultSettingsMiddleware, embed, embedMany, createMCPClient as experimental_createMCPClient, experimental_createProviderRegistry, experimental_customProvider, generateImage as experimental_generateImage, generateSpeech as experimental_generateSpeech, transcribe as experimental_transcribe, extractReasoningMiddleware, generateObject, generateText, getTextFromDataUrl, getToolName, hasToolCall, isDeepEqualData, isToolUIPart, modelMessageSchema, parsePartialJson, pipeTextStreamToResponse, pipeUIMessageStreamToResponse, readUIMessageStream, simulateReadableStream, simulateStreamingMiddleware, smoothStream, stepCountIs, streamObject, streamText, systemModelMessageSchema, toolModelMessageSchema, userModelMessageSchema, wrapLanguageModel, wrapProvider };
|
4057
|
+
export { AbstractChat, AsyncIterableStream, CallSettings, CallWarning, ChatInit, ChatOnDataCallback, ChatOnErrorCallback, ChatOnFinishCallback, ChatOnToolCallCallback, ChatRequestOptions, ChatState, ChatStatus, ChatTransport, ChunkDetector, CompletionRequestOptions, CoreAssistantMessage, CoreMessage, CoreSystemMessage, CoreToolMessage, CoreUserMessage, CreateUIMessage, DataUIPart, DeepPartial, DefaultChatTransport, DownloadError, EmbedManyResult, EmbedResult, Embedding, EmbeddingModel, EmbeddingModelUsage, ErrorHandler, Agent as Experimental_Agent, AgentSettings as Experimental_AgentSettings, GenerateImageResult as Experimental_GenerateImageResult, GeneratedFile as Experimental_GeneratedImage, SpeechResult as Experimental_SpeechResult, TranscriptionResult as Experimental_TranscriptionResult, FileUIPart, FinishReason, GenerateObjectResult, GenerateTextOnStepFinishCallback, GenerateTextResult, GeneratedAudioFile, GeneratedFile, HttpChatTransport, HttpChatTransportInitOptions, ImageModel, ImageGenerationWarning as ImageModelCallWarning, ImageModelProviderMetadata, ImageModelResponseMetadata, InferUIDataParts, InferUIMessageChunk, InferUITool, InferUITools, InvalidArgumentError, InvalidDataContentError, InvalidMessageRoleError, InvalidStreamPartError, InvalidToolInputError, JSONRPCError, JSONRPCMessage, JSONRPCNotification, JSONRPCRequest, JSONRPCResponse, JSONValue, JsonToSseTransformStream, LanguageModel, LanguageModelRequestMetadata, LanguageModelResponseMetadata, LanguageModelUsage, MCPClientError, MCPTransport, MessageConversionError, NoImageGeneratedError, NoObjectGeneratedError, NoOutputSpecifiedError, NoSuchProviderError, NoSuchToolError, ObjectStreamPart, output as Output, PrepareReconnectToStreamRequest, PrepareSendMessagesRequest, PrepareStepFunction, PrepareStepResult, Prompt, Provider, ProviderMetadata, ProviderRegistryProvider, ReasoningUIPart, RepairTextFunction, RetryError, SerialJobExecutor, SourceDocumentUIPart, SourceUrlUIPart, SpeechModel, SpeechModelResponseMetadata, SpeechWarning, StepResult, StepStartUIPart, StopCondition, StreamObjectOnFinishCallback, StreamObjectResult, StreamTextOnChunkCallback, StreamTextOnErrorCallback, StreamTextOnFinishCallback, StreamTextOnStepFinishCallback, StreamTextResult, StreamTextTransform, TelemetrySettings, TextStreamChatTransport, TextStreamPart, TextUIPart, ToolCallRepairError, ToolCallRepairFunction, ToolCallUnion, ToolChoice, ToolErrorUnion, ToolResultUnion, ToolSet, ToolUIPart, TranscriptionModel, TranscriptionModelResponseMetadata, TranscriptionWarning, UIDataPartSchemas, UIDataTypes, UIMessage, UIMessageChunk, UIMessagePart, UIMessageStreamOnFinishCallback, UIMessageStreamOptions, UIMessageStreamWriter, UITool, UITools, UI_MESSAGE_STREAM_HEADERS, UnsupportedModelVersionError, UseCompletionOptions, assistantModelMessageSchema, callCompletionApi, consumeStream, convertFileListToFileUIParts, convertToCoreMessages, convertToModelMessages, coreAssistantMessageSchema, coreMessageSchema, coreSystemMessageSchema, coreToolMessageSchema, coreUserMessageSchema, cosineSimilarity, createProviderRegistry, createTextStreamResponse, createUIMessageStream, createUIMessageStreamResponse, customProvider, defaultSettingsMiddleware, embed, embedMany, createMCPClient as experimental_createMCPClient, experimental_createProviderRegistry, experimental_customProvider, generateImage as experimental_generateImage, generateSpeech as experimental_generateSpeech, transcribe as experimental_transcribe, extractReasoningMiddleware, generateObject, generateText, getTextFromDataUrl, getToolName, hasToolCall, isDeepEqualData, isToolUIPart, modelMessageSchema, parsePartialJson, pipeTextStreamToResponse, pipeUIMessageStreamToResponse, readUIMessageStream, simulateReadableStream, simulateStreamingMiddleware, smoothStream, stepCountIs, streamObject, streamText, systemModelMessageSchema, toolModelMessageSchema, userModelMessageSchema, wrapLanguageModel, wrapProvider };
|
package/dist/index.js
CHANGED
@@ -59,6 +59,7 @@ __export(src_exports, {
|
|
59
59
|
asSchema: () => import_provider_utils29.asSchema,
|
60
60
|
assistantModelMessageSchema: () => assistantModelMessageSchema,
|
61
61
|
callCompletionApi: () => callCompletionApi,
|
62
|
+
consumeStream: () => consumeStream,
|
62
63
|
convertFileListToFileUIParts: () => convertFileListToFileUIParts,
|
63
64
|
convertToCoreMessages: () => convertToCoreMessages,
|
64
65
|
convertToModelMessages: () => convertToModelMessages,
|
@@ -2736,6 +2737,9 @@ var uiMessageChunkSchema = import_v47.z.union([
|
|
2736
2737
|
type: import_v47.z.literal("finish"),
|
2737
2738
|
messageMetadata: import_v47.z.unknown().optional()
|
2738
2739
|
}),
|
2740
|
+
import_v47.z.strictObject({
|
2741
|
+
type: import_v47.z.literal("abort")
|
2742
|
+
}),
|
2739
2743
|
import_v47.z.strictObject({
|
2740
2744
|
type: import_v47.z.literal("message-metadata"),
|
2741
2745
|
messageMetadata: import_v47.z.unknown()
|
@@ -3120,7 +3124,7 @@ function isToolUIPart(part) {
|
|
3120
3124
|
return part.type.startsWith("tool-");
|
3121
3125
|
}
|
3122
3126
|
function getToolName(part) {
|
3123
|
-
return part.type.split("-")
|
3127
|
+
return part.type.split("-").slice(1).join("-");
|
3124
3128
|
}
|
3125
3129
|
|
3126
3130
|
// src/ui/process-ui-message-stream.ts
|
@@ -3479,6 +3483,7 @@ function handleUIMessageStreamFinish({
|
|
3479
3483
|
} else {
|
3480
3484
|
messageId = lastMessage.id;
|
3481
3485
|
}
|
3486
|
+
let isAborted = false;
|
3482
3487
|
const idInjectedStream = stream.pipeThrough(
|
3483
3488
|
new TransformStream({
|
3484
3489
|
transform(chunk, controller) {
|
@@ -3488,6 +3493,9 @@ function handleUIMessageStreamFinish({
|
|
3488
3493
|
startChunk.messageId = messageId;
|
3489
3494
|
}
|
3490
3495
|
}
|
3496
|
+
if (chunk.type === "abort") {
|
3497
|
+
isAborted = true;
|
3498
|
+
}
|
3491
3499
|
controller.enqueue(chunk);
|
3492
3500
|
}
|
3493
3501
|
})
|
@@ -3513,9 +3521,10 @@ function handleUIMessageStreamFinish({
|
|
3513
3521
|
transform(chunk, controller) {
|
3514
3522
|
controller.enqueue(chunk);
|
3515
3523
|
},
|
3516
|
-
flush() {
|
3524
|
+
async flush() {
|
3517
3525
|
const isContinuation = state.message.id === (lastMessage == null ? void 0 : lastMessage.id);
|
3518
|
-
onFinish({
|
3526
|
+
await onFinish({
|
3527
|
+
isAborted,
|
3519
3528
|
isContinuation,
|
3520
3529
|
responseMessage: state.message,
|
3521
3530
|
messages: [
|
@@ -3609,6 +3618,13 @@ function createStitchableStream() {
|
|
3609
3618
|
let controller = null;
|
3610
3619
|
let isClosed = false;
|
3611
3620
|
let waitForNewStream = createResolvablePromise();
|
3621
|
+
const terminate = () => {
|
3622
|
+
isClosed = true;
|
3623
|
+
waitForNewStream.resolve();
|
3624
|
+
innerStreamReaders.forEach((reader) => reader.cancel());
|
3625
|
+
innerStreamReaders = [];
|
3626
|
+
controller == null ? void 0 : controller.close();
|
3627
|
+
};
|
3612
3628
|
const processPull = async () => {
|
3613
3629
|
if (isClosed && innerStreamReaders.length === 0) {
|
3614
3630
|
controller == null ? void 0 : controller.close();
|
@@ -3634,9 +3650,7 @@ function createStitchableStream() {
|
|
3634
3650
|
} catch (error) {
|
3635
3651
|
controller == null ? void 0 : controller.error(error);
|
3636
3652
|
innerStreamReaders.shift();
|
3637
|
-
|
3638
|
-
controller == null ? void 0 : controller.close();
|
3639
|
-
}
|
3653
|
+
terminate();
|
3640
3654
|
}
|
3641
3655
|
};
|
3642
3656
|
return {
|
@@ -3675,13 +3689,7 @@ function createStitchableStream() {
|
|
3675
3689
|
* Immediately close the outer stream. This will cancel all inner streams
|
3676
3690
|
* and close the outer stream.
|
3677
3691
|
*/
|
3678
|
-
terminate
|
3679
|
-
isClosed = true;
|
3680
|
-
waitForNewStream.resolve();
|
3681
|
-
innerStreamReaders.forEach((reader) => reader.cancel());
|
3682
|
-
innerStreamReaders = [];
|
3683
|
-
controller == null ? void 0 : controller.close();
|
3684
|
-
}
|
3692
|
+
terminate
|
3685
3693
|
};
|
3686
3694
|
}
|
3687
3695
|
|
@@ -3729,6 +3737,30 @@ function now() {
|
|
3729
3737
|
return (_b = (_a16 = globalThis == null ? void 0 : globalThis.performance) == null ? void 0 : _a16.now()) != null ? _b : Date.now();
|
3730
3738
|
}
|
3731
3739
|
|
3740
|
+
// src/util/filter-stream-errors.ts
|
3741
|
+
function filterStreamErrors(readable, onError) {
|
3742
|
+
return new ReadableStream({
|
3743
|
+
async start(controller) {
|
3744
|
+
const reader = readable.getReader();
|
3745
|
+
try {
|
3746
|
+
while (true) {
|
3747
|
+
const { done, value } = await reader.read();
|
3748
|
+
if (done) {
|
3749
|
+
controller.close();
|
3750
|
+
break;
|
3751
|
+
}
|
3752
|
+
controller.enqueue(value);
|
3753
|
+
}
|
3754
|
+
} catch (error) {
|
3755
|
+
await onError({ error, controller });
|
3756
|
+
}
|
3757
|
+
},
|
3758
|
+
cancel(reason) {
|
3759
|
+
return readable.cancel(reason);
|
3760
|
+
}
|
3761
|
+
});
|
3762
|
+
}
|
3763
|
+
|
3732
3764
|
// src/generate-text/run-tools-transformation.ts
|
3733
3765
|
var import_provider_utils12 = require("@ai-sdk/provider-utils");
|
3734
3766
|
function runToolsTransformation({
|
@@ -3977,6 +4009,7 @@ function streamText({
|
|
3977
4009
|
console.error(error);
|
3978
4010
|
},
|
3979
4011
|
onFinish,
|
4012
|
+
onAbort,
|
3980
4013
|
onStepFinish,
|
3981
4014
|
_internal: {
|
3982
4015
|
now: now2 = now,
|
@@ -4008,6 +4041,7 @@ function streamText({
|
|
4008
4041
|
onChunk,
|
4009
4042
|
onError,
|
4010
4043
|
onFinish,
|
4044
|
+
onAbort,
|
4011
4045
|
onStepFinish,
|
4012
4046
|
now: now2,
|
4013
4047
|
currentDate,
|
@@ -4106,6 +4140,7 @@ var DefaultStreamTextResult = class {
|
|
4106
4140
|
onChunk,
|
4107
4141
|
onError,
|
4108
4142
|
onFinish,
|
4143
|
+
onAbort,
|
4109
4144
|
onStepFinish
|
4110
4145
|
}) {
|
4111
4146
|
this._totalUsage = new DelayedPromise();
|
@@ -4113,7 +4148,6 @@ var DefaultStreamTextResult = class {
|
|
4113
4148
|
this._steps = new DelayedPromise();
|
4114
4149
|
this.output = output;
|
4115
4150
|
this.includeRawChunks = includeRawChunks;
|
4116
|
-
this.generateId = generateId3;
|
4117
4151
|
let stepFinish;
|
4118
4152
|
let recordedContent = [];
|
4119
4153
|
const recordedResponseMessages = [];
|
@@ -4316,6 +4350,15 @@ var DefaultStreamTextResult = class {
|
|
4316
4350
|
this.addStream = stitchableStream.addStream;
|
4317
4351
|
this.closeStream = stitchableStream.close;
|
4318
4352
|
let stream = stitchableStream.stream;
|
4353
|
+
stream = filterStreamErrors(stream, ({ error, controller }) => {
|
4354
|
+
if ((0, import_provider_utils13.isAbortError)(error) && (abortSignal == null ? void 0 : abortSignal.aborted)) {
|
4355
|
+
onAbort == null ? void 0 : onAbort({ steps: recordedSteps });
|
4356
|
+
controller.enqueue({ type: "abort" });
|
4357
|
+
controller.close();
|
4358
|
+
} else {
|
4359
|
+
controller.error(error);
|
4360
|
+
}
|
4361
|
+
});
|
4319
4362
|
stream = stream.pipeThrough(
|
4320
4363
|
new TransformStream({
|
4321
4364
|
start(controller) {
|
@@ -5058,6 +5101,10 @@ var DefaultStreamTextResult = class {
|
|
5058
5101
|
}
|
5059
5102
|
break;
|
5060
5103
|
}
|
5104
|
+
case "abort": {
|
5105
|
+
controller.enqueue(part);
|
5106
|
+
break;
|
5107
|
+
}
|
5061
5108
|
case "tool-input-end": {
|
5062
5109
|
break;
|
5063
5110
|
}
|
@@ -5078,13 +5125,15 @@ var DefaultStreamTextResult = class {
|
|
5078
5125
|
}
|
5079
5126
|
})
|
5080
5127
|
);
|
5081
|
-
return
|
5082
|
-
|
5083
|
-
|
5084
|
-
|
5085
|
-
|
5086
|
-
|
5087
|
-
|
5128
|
+
return createAsyncIterableStream(
|
5129
|
+
handleUIMessageStreamFinish({
|
5130
|
+
stream: baseStream,
|
5131
|
+
messageId: responseMessageId != null ? responseMessageId : generateMessageId == null ? void 0 : generateMessageId(),
|
5132
|
+
originalMessages,
|
5133
|
+
onFinish,
|
5134
|
+
onError
|
5135
|
+
})
|
5136
|
+
);
|
5088
5137
|
}
|
5089
5138
|
pipeUIMessageStreamToResponse(response, {
|
5090
5139
|
originalMessages,
|
@@ -9326,6 +9375,7 @@ function readUIMessageStream({
|
|
9326
9375
|
asSchema,
|
9327
9376
|
assistantModelMessageSchema,
|
9328
9377
|
callCompletionApi,
|
9378
|
+
consumeStream,
|
9329
9379
|
convertFileListToFileUIParts,
|
9330
9380
|
convertToCoreMessages,
|
9331
9381
|
convertToModelMessages,
|