@futurity/chat-react 0.0.3 → 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +120 -53
- package/dist/index.d.ts +80 -6
- package/dist/index.js +426 -12
- package/package.json +8 -4
- package/src/index.ts +7 -0
- package/src/stream-accumulator.ts +448 -0
- package/src/types.ts +3 -0
- package/src/useStreamChat.ts +135 -23
|
@@ -0,0 +1,448 @@
|
|
|
1
|
+
import {
|
|
2
|
+
type MessagePart,
|
|
3
|
+
Z_DataEndSplitSchema,
|
|
4
|
+
Z_DataSplitSchema,
|
|
5
|
+
Z_DataSubagentPartSchema,
|
|
6
|
+
} from "@futurity/chat-protocol";
|
|
7
|
+
|
|
8
|
+
export type ProcessedPart =
|
|
9
|
+
| { type: "regular"; part: MessagePart; originalIndex: number }
|
|
10
|
+
| {
|
|
11
|
+
type: "split-group";
|
|
12
|
+
title?: string;
|
|
13
|
+
subtitle?: string;
|
|
14
|
+
parts: MessagePart[];
|
|
15
|
+
startIndex: number;
|
|
16
|
+
endIndex: number;
|
|
17
|
+
desktopSessionId?: string;
|
|
18
|
+
};
|
|
19
|
+
|
|
20
|
+
// biome-ignore lint/suspicious/noExplicitAny: raw stream protocol chunks have dynamic shapes
|
|
21
|
+
type Chunk = Record<string, any>;
|
|
22
|
+
|
|
23
|
+
/** Narrow `unknown` to a `Chunk` whose `type` is a string. */
|
|
24
|
+
export function isTypedChunk(x: unknown): x is Chunk & { type: string } {
|
|
25
|
+
return (
|
|
26
|
+
typeof x === "object" &&
|
|
27
|
+
x !== null &&
|
|
28
|
+
"type" in x &&
|
|
29
|
+
typeof x.type === "string"
|
|
30
|
+
);
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
/**
|
|
34
|
+
* Incrementally accumulates raw UI message stream protocol chunks into
|
|
35
|
+
* rendered message parts. Maintains internal state so repeated calls with a
|
|
36
|
+
* growing `rawParts` array only process newly appended chunks.
|
|
37
|
+
*
|
|
38
|
+
* Internally tracks mutable objects via `Chunk` maps (text-by-id, tool-by-id)
|
|
39
|
+
* and casts to `MessagePart` at the output boundary. The constructed shapes
|
|
40
|
+
* conform to the `MessagePart` union at runtime.
|
|
41
|
+
*/
|
|
42
|
+
export class StreamAccumulator {
|
|
43
|
+
readonly parts: MessagePart[] = [];
|
|
44
|
+
private textById = new Map<string, Chunk>();
|
|
45
|
+
private reasoningById = new Map<string, Chunk>();
|
|
46
|
+
private toolById = new Map<string, Chunk>();
|
|
47
|
+
private partialToolText = new Map<string, string>();
|
|
48
|
+
private processedCount = 0;
|
|
49
|
+
|
|
50
|
+
private push(part: Chunk): void {
|
|
51
|
+
this.parts.push(part as MessagePart);
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
/** Process any new raw chunks and return the full accumulated parts array. */
|
|
55
|
+
accumulate(rawParts: unknown[]): MessagePart[] {
|
|
56
|
+
for (let i = this.processedCount; i < rawParts.length; i++) {
|
|
57
|
+
const chunk = rawParts[i];
|
|
58
|
+
if (!isTypedChunk(chunk)) {
|
|
59
|
+
this.parts.push(chunk as MessagePart);
|
|
60
|
+
continue;
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
switch (chunk.type) {
|
|
64
|
+
// --- Text accumulation ---
|
|
65
|
+
case "text-start": {
|
|
66
|
+
const part = { type: "text", text: "", state: "streaming" };
|
|
67
|
+
this.textById.set(chunk.id, part);
|
|
68
|
+
this.push(part);
|
|
69
|
+
break;
|
|
70
|
+
}
|
|
71
|
+
case "text-delta": {
|
|
72
|
+
const part = this.textById.get(chunk.id);
|
|
73
|
+
if (part) part.text += chunk.delta;
|
|
74
|
+
break;
|
|
75
|
+
}
|
|
76
|
+
case "text-end": {
|
|
77
|
+
const part = this.textById.get(chunk.id);
|
|
78
|
+
if (part) part.state = "done";
|
|
79
|
+
break;
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
// --- Reasoning accumulation ---
|
|
83
|
+
case "reasoning-start": {
|
|
84
|
+
const part = { type: "reasoning", text: "", state: "streaming" };
|
|
85
|
+
this.reasoningById.set(chunk.id, part);
|
|
86
|
+
this.push(part);
|
|
87
|
+
break;
|
|
88
|
+
}
|
|
89
|
+
case "reasoning-delta": {
|
|
90
|
+
const part = this.reasoningById.get(chunk.id);
|
|
91
|
+
if (part) part.text += chunk.delta;
|
|
92
|
+
break;
|
|
93
|
+
}
|
|
94
|
+
case "reasoning-end": {
|
|
95
|
+
const part = this.reasoningById.get(chunk.id);
|
|
96
|
+
if (part) part.state = "done";
|
|
97
|
+
break;
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
// --- Tool invocation accumulation ---
|
|
101
|
+
case "tool-input-start": {
|
|
102
|
+
const typeName = chunk.dynamic
|
|
103
|
+
? "dynamic-tool"
|
|
104
|
+
: `tool-${chunk.toolName}`;
|
|
105
|
+
const part: Chunk = {
|
|
106
|
+
type: typeName,
|
|
107
|
+
toolCallId: chunk.toolCallId,
|
|
108
|
+
toolName: chunk.toolName,
|
|
109
|
+
state: "input-streaming",
|
|
110
|
+
input: undefined,
|
|
111
|
+
providerExecuted: chunk.providerExecuted,
|
|
112
|
+
title: chunk.title,
|
|
113
|
+
};
|
|
114
|
+
this.toolById.set(chunk.toolCallId, part);
|
|
115
|
+
this.partialToolText.set(chunk.toolCallId, "");
|
|
116
|
+
this.push(part);
|
|
117
|
+
break;
|
|
118
|
+
}
|
|
119
|
+
case "tool-input-delta": {
|
|
120
|
+
const text =
|
|
121
|
+
(this.partialToolText.get(chunk.toolCallId) ?? "") +
|
|
122
|
+
(chunk.inputTextDelta ?? "");
|
|
123
|
+
this.partialToolText.set(chunk.toolCallId, text);
|
|
124
|
+
const part = this.toolById.get(chunk.toolCallId);
|
|
125
|
+
if (part) {
|
|
126
|
+
try {
|
|
127
|
+
part.input = JSON.parse(text);
|
|
128
|
+
} catch {
|
|
129
|
+
// Partial JSON — leave input as-is until complete
|
|
130
|
+
}
|
|
131
|
+
}
|
|
132
|
+
break;
|
|
133
|
+
}
|
|
134
|
+
case "tool-input-available": {
|
|
135
|
+
const typeName = chunk.dynamic
|
|
136
|
+
? "dynamic-tool"
|
|
137
|
+
: `tool-${chunk.toolName}`;
|
|
138
|
+
let part = this.toolById.get(chunk.toolCallId);
|
|
139
|
+
if (part) {
|
|
140
|
+
part.state = "input-available";
|
|
141
|
+
part.input = chunk.input;
|
|
142
|
+
part.providerExecuted = chunk.providerExecuted;
|
|
143
|
+
} else {
|
|
144
|
+
part = {
|
|
145
|
+
type: typeName,
|
|
146
|
+
toolCallId: chunk.toolCallId,
|
|
147
|
+
toolName: chunk.toolName,
|
|
148
|
+
state: "input-available",
|
|
149
|
+
input: chunk.input,
|
|
150
|
+
providerExecuted: chunk.providerExecuted,
|
|
151
|
+
title: chunk.title,
|
|
152
|
+
};
|
|
153
|
+
this.toolById.set(chunk.toolCallId, part);
|
|
154
|
+
this.push(part);
|
|
155
|
+
}
|
|
156
|
+
break;
|
|
157
|
+
}
|
|
158
|
+
case "tool-input-error": {
|
|
159
|
+
const part = this.toolById.get(chunk.toolCallId);
|
|
160
|
+
if (part) {
|
|
161
|
+
part.state = "output-error";
|
|
162
|
+
part.errorText = chunk.errorText;
|
|
163
|
+
part.input = chunk.input;
|
|
164
|
+
}
|
|
165
|
+
break;
|
|
166
|
+
}
|
|
167
|
+
case "tool-output-available": {
|
|
168
|
+
const part = this.toolById.get(chunk.toolCallId);
|
|
169
|
+
if (part) {
|
|
170
|
+
part.state = "output-available";
|
|
171
|
+
part.output = chunk.output;
|
|
172
|
+
part.preliminary = chunk.preliminary;
|
|
173
|
+
}
|
|
174
|
+
break;
|
|
175
|
+
}
|
|
176
|
+
case "tool-output-error": {
|
|
177
|
+
const part = this.toolById.get(chunk.toolCallId);
|
|
178
|
+
if (part) {
|
|
179
|
+
part.state = "output-error";
|
|
180
|
+
part.errorText = chunk.errorText;
|
|
181
|
+
}
|
|
182
|
+
break;
|
|
183
|
+
}
|
|
184
|
+
|
|
185
|
+
// --- Pass-through parts ---
|
|
186
|
+
case "file":
|
|
187
|
+
this.push({
|
|
188
|
+
type: "file",
|
|
189
|
+
mediaType: chunk.mediaType,
|
|
190
|
+
url: chunk.url,
|
|
191
|
+
filename: chunk.filename,
|
|
192
|
+
});
|
|
193
|
+
break;
|
|
194
|
+
case "source-url":
|
|
195
|
+
this.push({
|
|
196
|
+
type: "source-url",
|
|
197
|
+
sourceId: chunk.sourceId,
|
|
198
|
+
url: chunk.url,
|
|
199
|
+
title: chunk.title,
|
|
200
|
+
});
|
|
201
|
+
break;
|
|
202
|
+
case "source-document":
|
|
203
|
+
this.push({
|
|
204
|
+
type: "source-document",
|
|
205
|
+
sourceId: chunk.sourceId,
|
|
206
|
+
mediaType: chunk.mediaType,
|
|
207
|
+
title: chunk.title,
|
|
208
|
+
filename: chunk.filename,
|
|
209
|
+
});
|
|
210
|
+
break;
|
|
211
|
+
case "start-step":
|
|
212
|
+
this.push({ type: "step-start" });
|
|
213
|
+
break;
|
|
214
|
+
|
|
215
|
+
// --- Ignored protocol chunks ---
|
|
216
|
+
case "finish-step":
|
|
217
|
+
case "start":
|
|
218
|
+
case "finish":
|
|
219
|
+
case "error":
|
|
220
|
+
break;
|
|
221
|
+
|
|
222
|
+
// --- Unknown / custom data parts → pass through ---
|
|
223
|
+
default:
|
|
224
|
+
if (chunk.type.startsWith("data-")) {
|
|
225
|
+
this.push(chunk);
|
|
226
|
+
}
|
|
227
|
+
break;
|
|
228
|
+
}
|
|
229
|
+
}
|
|
230
|
+
|
|
231
|
+
this.processedCount = rawParts.length;
|
|
232
|
+
return this.parts;
|
|
233
|
+
}
|
|
234
|
+
}
|
|
235
|
+
|
|
236
|
+
type PendingGroup = {
|
|
237
|
+
startIndex: number;
|
|
238
|
+
title?: string;
|
|
239
|
+
subtitle?: string;
|
|
240
|
+
desktopSessionId?: string;
|
|
241
|
+
subAgentId: string;
|
|
242
|
+
/** Raw stream protocol chunks from data-subagent-part messages (pre-accumulation). */
|
|
243
|
+
innerParts: unknown[];
|
|
244
|
+
endIndex: number | null;
|
|
245
|
+
};
|
|
246
|
+
|
|
247
|
+
export type PreprocessorState = {
|
|
248
|
+
messageId: string | null;
|
|
249
|
+
/** How many raw parts pass 1 has already scanned. */
|
|
250
|
+
scannedLength: number;
|
|
251
|
+
/** Indices consumed by subAgentId-based groups. */
|
|
252
|
+
claimedIndices: Set<number>;
|
|
253
|
+
/** Groups still collecting inner parts (keyed by subAgentId). */
|
|
254
|
+
openGroups: Map<string, PendingGroup>;
|
|
255
|
+
/** All groups keyed by their startIndex (for pass 2 lookup). */
|
|
256
|
+
groupsByStartIndex: Map<number, PendingGroup>;
|
|
257
|
+
/** Reverse lookup: subAgentId → group (includes closed groups). */
|
|
258
|
+
groupBySubAgentId: Map<string, PendingGroup>;
|
|
259
|
+
/** Per-group stream accumulators (keyed by group startIndex). */
|
|
260
|
+
accumulators: Map<number, StreamAccumulator>;
|
|
261
|
+
};
|
|
262
|
+
|
|
263
|
+
export function createPreprocessorState(messageId?: string): PreprocessorState {
|
|
264
|
+
return {
|
|
265
|
+
messageId: messageId ?? null,
|
|
266
|
+
scannedLength: 0,
|
|
267
|
+
claimedIndices: new Set(),
|
|
268
|
+
openGroups: new Map(),
|
|
269
|
+
groupsByStartIndex: new Map(),
|
|
270
|
+
groupBySubAgentId: new Map(),
|
|
271
|
+
accumulators: new Map(),
|
|
272
|
+
};
|
|
273
|
+
}
|
|
274
|
+
|
|
275
|
+
/**
|
|
276
|
+
* Incrementally preprocess message parts to group subagent content.
|
|
277
|
+
*
|
|
278
|
+
* Pass 1 (incremental): only scans parts appended since the last call,
|
|
279
|
+
* building group data structures without re-parsing already-seen parts.
|
|
280
|
+
*
|
|
281
|
+
* Per-group accumulation (incremental): each group's stream protocol chunks
|
|
282
|
+
* are accumulated via a stateful {@link StreamAccumulator} that only processes
|
|
283
|
+
* new chunks on each call.
|
|
284
|
+
*
|
|
285
|
+
* Pass 2 (rebuild): constructs the output array from cached state. This is
|
|
286
|
+
* cheap — O(n) Map/Set lookups with no Zod parsing for subAgentId groups.
|
|
287
|
+
*
|
|
288
|
+
* Falls back to legacy greedy scanning for old messages without subAgentId.
|
|
289
|
+
*/
|
|
290
|
+
export function incrementalPreprocess(
|
|
291
|
+
state: PreprocessorState,
|
|
292
|
+
parts: MessagePart[],
|
|
293
|
+
): ProcessedPart[] {
|
|
294
|
+
// --- Incremental Pass 1: only scan newly appended parts ---
|
|
295
|
+
for (let i = state.scannedLength; i < parts.length; i++) {
|
|
296
|
+
const part = parts[i];
|
|
297
|
+
if (!isTypedChunk(part)) continue;
|
|
298
|
+
|
|
299
|
+
if (part.type === "data-split") {
|
|
300
|
+
const splitParse = Z_DataSplitSchema.safeParse(part);
|
|
301
|
+
if (splitParse.success && splitParse.data.data.subAgentId) {
|
|
302
|
+
const sid = splitParse.data.data.subAgentId;
|
|
303
|
+
const group: PendingGroup = {
|
|
304
|
+
startIndex: i,
|
|
305
|
+
title: splitParse.data.data.title,
|
|
306
|
+
subtitle: splitParse.data.data.subtitle,
|
|
307
|
+
desktopSessionId: splitParse.data.data.desktopSessionId,
|
|
308
|
+
subAgentId: sid,
|
|
309
|
+
innerParts: [],
|
|
310
|
+
endIndex: null,
|
|
311
|
+
};
|
|
312
|
+
state.openGroups.set(sid, group);
|
|
313
|
+
state.groupsByStartIndex.set(i, group);
|
|
314
|
+
state.groupBySubAgentId.set(sid, group);
|
|
315
|
+
state.claimedIndices.add(i);
|
|
316
|
+
continue;
|
|
317
|
+
}
|
|
318
|
+
}
|
|
319
|
+
|
|
320
|
+
if (part.type === "data-endsplit") {
|
|
321
|
+
const endSplitParse = Z_DataEndSplitSchema.safeParse(part);
|
|
322
|
+
if (endSplitParse.success && endSplitParse.data?.data?.subAgentId) {
|
|
323
|
+
const sid = endSplitParse.data.data.subAgentId;
|
|
324
|
+
const group = state.openGroups.get(sid);
|
|
325
|
+
if (group) {
|
|
326
|
+
group.endIndex = i;
|
|
327
|
+
group.desktopSessionId =
|
|
328
|
+
group.desktopSessionId ?? endSplitParse.data.data.desktopSessionId;
|
|
329
|
+
state.openGroups.delete(sid);
|
|
330
|
+
}
|
|
331
|
+
state.claimedIndices.add(i);
|
|
332
|
+
continue;
|
|
333
|
+
}
|
|
334
|
+
}
|
|
335
|
+
|
|
336
|
+
if (part.type === "data-subagent-part") {
|
|
337
|
+
const subagentParse = Z_DataSubagentPartSchema.safeParse(part);
|
|
338
|
+
if (subagentParse.success) {
|
|
339
|
+
const sid = subagentParse.data.data.subAgentId;
|
|
340
|
+
// Prefer open group, fall back to closed group (late-arriving part)
|
|
341
|
+
const group =
|
|
342
|
+
state.openGroups.get(sid) ?? state.groupBySubAgentId.get(sid);
|
|
343
|
+
if (group) {
|
|
344
|
+
group.innerParts.push(subagentParse.data.data.part);
|
|
345
|
+
}
|
|
346
|
+
state.claimedIndices.add(i);
|
|
347
|
+
}
|
|
348
|
+
}
|
|
349
|
+
}
|
|
350
|
+
state.scannedLength = parts.length;
|
|
351
|
+
|
|
352
|
+
// --- Pass 2: build output array using cached accumulators ---
|
|
353
|
+
const processed: ProcessedPart[] = [];
|
|
354
|
+
|
|
355
|
+
let i = 0;
|
|
356
|
+
while (i < parts.length) {
|
|
357
|
+
// Check if a subAgentId group starts here
|
|
358
|
+
const groupAtIndex = state.groupsByStartIndex.get(i);
|
|
359
|
+
if (groupAtIndex) {
|
|
360
|
+
// Get or create accumulator for this group
|
|
361
|
+
let acc = state.accumulators.get(groupAtIndex.startIndex);
|
|
362
|
+
if (!acc) {
|
|
363
|
+
acc = new StreamAccumulator();
|
|
364
|
+
state.accumulators.set(groupAtIndex.startIndex, acc);
|
|
365
|
+
}
|
|
366
|
+
|
|
367
|
+
processed.push({
|
|
368
|
+
type: "split-group",
|
|
369
|
+
title: groupAtIndex.title,
|
|
370
|
+
subtitle: groupAtIndex.subtitle,
|
|
371
|
+
// Incrementally accumulate — only processes newly added innerParts
|
|
372
|
+
parts: acc.accumulate(groupAtIndex.innerParts),
|
|
373
|
+
startIndex: groupAtIndex.startIndex,
|
|
374
|
+
endIndex: groupAtIndex.endIndex ?? parts.length,
|
|
375
|
+
desktopSessionId: groupAtIndex.desktopSessionId,
|
|
376
|
+
});
|
|
377
|
+
// Don't jump to endIndex — parallel subagent groups have overlapping
|
|
378
|
+
// index ranges. All inner data-subagent-part indices are claimed, so
|
|
379
|
+
// advancing one-by-one correctly skips them while still visiting other
|
|
380
|
+
// groups whose startIndex falls within this range.
|
|
381
|
+
i++;
|
|
382
|
+
continue;
|
|
383
|
+
}
|
|
384
|
+
|
|
385
|
+
// Skip any already-claimed index (subagent-part or endsplit handled above)
|
|
386
|
+
if (state.claimedIndices.has(i)) {
|
|
387
|
+
i++;
|
|
388
|
+
continue;
|
|
389
|
+
}
|
|
390
|
+
|
|
391
|
+
const part = parts[i];
|
|
392
|
+
const type: string | undefined = isTypedChunk(part) ? part.type : undefined;
|
|
393
|
+
|
|
394
|
+
// Legacy: data-split WITHOUT subAgentId → greedy first-match (backward compat)
|
|
395
|
+
if (type === "data-split") {
|
|
396
|
+
const splitParse = Z_DataSplitSchema.safeParse(part);
|
|
397
|
+
if (splitParse.success && !splitParse.data.data.subAgentId) {
|
|
398
|
+
let endIndex = parts.length;
|
|
399
|
+
let desktopSessionId = splitParse.data.data.desktopSessionId;
|
|
400
|
+
for (
|
|
401
|
+
let searchIndex = i + 1;
|
|
402
|
+
searchIndex < parts.length;
|
|
403
|
+
searchIndex++
|
|
404
|
+
) {
|
|
405
|
+
if (state.claimedIndices.has(searchIndex)) continue;
|
|
406
|
+
const checkPart = parts[searchIndex];
|
|
407
|
+
const endParse = Z_DataEndSplitSchema.safeParse(checkPart);
|
|
408
|
+
if (endParse.success && !endParse.data?.data?.subAgentId) {
|
|
409
|
+
endIndex = searchIndex;
|
|
410
|
+
desktopSessionId =
|
|
411
|
+
desktopSessionId ?? endParse.data?.data?.desktopSessionId;
|
|
412
|
+
break;
|
|
413
|
+
}
|
|
414
|
+
}
|
|
415
|
+
|
|
416
|
+
const groupedParts = parts.slice(i + 1, endIndex).filter((_, idx) => {
|
|
417
|
+
const absIdx = i + 1 + idx;
|
|
418
|
+
return !state.claimedIndices.has(absIdx);
|
|
419
|
+
});
|
|
420
|
+
|
|
421
|
+
processed.push({
|
|
422
|
+
type: "split-group",
|
|
423
|
+
title: splitParse.data.data.title,
|
|
424
|
+
subtitle: splitParse.data.data.subtitle,
|
|
425
|
+
parts: groupedParts,
|
|
426
|
+
startIndex: i,
|
|
427
|
+
endIndex,
|
|
428
|
+
desktopSessionId,
|
|
429
|
+
});
|
|
430
|
+
|
|
431
|
+
i = endIndex < parts.length ? endIndex + 1 : endIndex;
|
|
432
|
+
continue;
|
|
433
|
+
}
|
|
434
|
+
}
|
|
435
|
+
|
|
436
|
+
// Orphaned data-endsplit without subAgentId → skip
|
|
437
|
+
if (type === "data-endsplit") {
|
|
438
|
+
i++;
|
|
439
|
+
continue;
|
|
440
|
+
}
|
|
441
|
+
|
|
442
|
+
// Regular part
|
|
443
|
+
processed.push({ type: "regular", part, originalIndex: i });
|
|
444
|
+
i++;
|
|
445
|
+
}
|
|
446
|
+
|
|
447
|
+
return processed;
|
|
448
|
+
}
|
package/src/types.ts
CHANGED
|
@@ -8,6 +8,7 @@ import type {
|
|
|
8
8
|
} from "@futurity/chat-protocol";
|
|
9
9
|
import { type MessagePart, Z_MessagePart } from "@futurity/chat-protocol";
|
|
10
10
|
import { z } from "zod";
|
|
11
|
+
import type { ProcessedPart } from "./stream-accumulator";
|
|
11
12
|
|
|
12
13
|
export type { MessagePart };
|
|
13
14
|
|
|
@@ -22,6 +23,8 @@ export type ChatMessage = {
|
|
|
22
23
|
id: string;
|
|
23
24
|
role: "user" | "assistant" | "system";
|
|
24
25
|
parts: MessagePart[];
|
|
26
|
+
/** Pre-computed processed parts with subagent grouping and stream accumulation. */
|
|
27
|
+
processedParts: ProcessedPart[];
|
|
25
28
|
createdAt?: Date;
|
|
26
29
|
metadata?: MessageMetadata;
|
|
27
30
|
};
|