@assistant-ui/react-langgraph 0.5.4 → 0.5.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/src/types.ts CHANGED
@@ -14,12 +14,12 @@ export type LangChainToolCall = {
14
14
  args: ReadonlyJSONObject;
15
15
  };
16
16
 
17
- type MessageContentText = {
17
+ export type MessageContentText = {
18
18
  type: "text";
19
19
  text: string;
20
20
  };
21
21
 
22
- type MessageContentImageUrl = {
22
+ export type MessageContentImageUrl = {
23
23
  type: "image_url";
24
24
  image_url: string | { url: string };
25
25
  };
@@ -28,9 +28,21 @@ type MessageContentToolUse = {
28
28
  type: "tool_use";
29
29
  };
30
30
 
31
+ export enum LangGraphKnownEventTypes {
32
+ Messages = "messages",
33
+ MessagesPartial = "messages/partial",
34
+ MessagesComplete = "messages/complete",
35
+ Metadata = "metadata",
36
+ Updates = "updates",
37
+ }
38
+ type CustomEventType = string;
39
+
40
+ export type EventType = LangGraphKnownEventTypes | CustomEventType;
41
+
31
42
  type UserMessageContentComplex = MessageContentText | MessageContentImageUrl;
32
43
  type AssistantMessageContentComplex =
33
44
  | MessageContentText
45
+ | MessageContentImageUrl
34
46
  | MessageContentToolUse;
35
47
 
36
48
  type UserMessageContent = string | UserMessageContentComplex[];
@@ -54,6 +66,7 @@ export type LangChainMessage =
54
66
  tool_call_id: string;
55
67
  name: string;
56
68
  artifact?: any;
69
+ status: "success" | "error";
57
70
  }
58
71
  | {
59
72
  id?: string;
@@ -64,13 +77,22 @@ export type LangChainMessage =
64
77
  };
65
78
 
66
79
  export type LangChainMessageChunk = {
67
- id: string;
80
+ id?: string | undefined;
68
81
  type: "AIMessageChunk";
69
- content: (AssistantMessageContentComplex & { index: number })[];
70
- tool_call_chunks: LangChainToolCallChunk[];
82
+ content?: AssistantMessageContent | undefined;
83
+ tool_call_chunks?: LangChainToolCallChunk[] | undefined;
71
84
  };
72
85
 
73
86
  export type LangChainEvent = {
74
- event: "messages/partial" | "messages/complete";
87
+ event:
88
+ | LangGraphKnownEventTypes.MessagesPartial
89
+ | LangGraphKnownEventTypes.MessagesComplete;
75
90
  data: LangChainMessage[];
76
91
  };
92
+
93
+ type LangGraphTupleMetadata = Record<string, unknown>;
94
+
95
+ export type LangChainMessageTupleEvent = {
96
+ event: LangGraphKnownEventTypes.Messages;
97
+ data: [LangChainMessageChunk, LangGraphTupleMetadata];
98
+ };
@@ -0,0 +1,626 @@
1
+ import { describe, it, expect } from "vitest";
2
+ import { act, renderHook, waitFor } from "@testing-library/react";
3
+
4
+ import {
5
+ LangGraphMessagesEvent,
6
+ useLangGraphMessages,
7
+ } from "./useLangGraphMessages";
8
+ import { appendLangChainChunk } from "./appendLangChainChunk";
9
+ import {
10
+ LangChainMessage,
11
+ MessageContentImageUrl,
12
+ MessageContentText,
13
+ } from "./types";
14
+
15
+ const metadataEvent = {
16
+ event: "metadata",
17
+ data: {
18
+ thread_id: "123",
19
+ run_attempt: 1,
20
+ },
21
+ };
22
+
23
+ const mockStreamCallbackFactory = (
24
+ events: Array<LangGraphMessagesEvent<LangChainMessage>>,
25
+ ) =>
26
+ async function* () {
27
+ for (const event of events) {
28
+ yield event;
29
+ }
30
+ };
31
+
32
+ describe("useLangGraphMessages", {}, () => {
33
+ it("processes chunks correctly", async () => {
34
+ const mockStreamCallback = mockStreamCallbackFactory([
35
+ metadataEvent,
36
+ {
37
+ event: "messages",
38
+ data: [
39
+ {
40
+ id: "run-1",
41
+ content: "",
42
+ additional_kwargs: {},
43
+ response_metadata: { model_name: "claude-3-7-sonnet-latest" },
44
+ type: "AIMessageChunk",
45
+ name: null,
46
+ tool_calls: [],
47
+ invalid_tool_calls: [],
48
+ tool_call_chunks: [],
49
+ },
50
+ {
51
+ run_attempt: 1,
52
+ },
53
+ ],
54
+ },
55
+ ]);
56
+
57
+ const { result } = renderHook(() =>
58
+ useLangGraphMessages({
59
+ stream: mockStreamCallback,
60
+ appendMessage: appendLangChainChunk,
61
+ }),
62
+ );
63
+
64
+ act(() => {
65
+ result.current.sendMessage(
66
+ [
67
+ {
68
+ type: "human",
69
+ content: "Hello, world!",
70
+ },
71
+ ],
72
+ {},
73
+ );
74
+ });
75
+
76
+ await waitFor(() => {
77
+ expect(result.current.messages.length).toEqual(2);
78
+ expect(result.current.messages[0].type).toEqual("human");
79
+ expect(result.current.messages[1].type).toEqual("ai");
80
+ expect(result.current.messages[1].content).toEqual("");
81
+ });
82
+ });
83
+
84
+ it("appends chunks w/ same id", async () => {
85
+ const mockStreamCallback = mockStreamCallbackFactory([
86
+ metadataEvent,
87
+ {
88
+ event: "messages",
89
+ data: [
90
+ {
91
+ id: "run-1",
92
+ content: "",
93
+ additional_kwargs: {},
94
+ response_metadata: { model_name: "claude-3-7-sonnet-latest" },
95
+ type: "AIMessageChunk",
96
+ name: null,
97
+ tool_calls: [],
98
+ invalid_tool_calls: [],
99
+ tool_call_chunks: [],
100
+ },
101
+ {
102
+ run_attempt: 1,
103
+ },
104
+ ],
105
+ },
106
+ {
107
+ event: "messages",
108
+ data: [
109
+ {
110
+ id: "run-1",
111
+ content: "Hello!",
112
+ additional_kwargs: {},
113
+ response_metadata: { model_name: "claude-3-7-sonnet-latest" },
114
+ type: "AIMessageChunk",
115
+ name: null,
116
+ tool_calls: [],
117
+ invalid_tool_calls: [],
118
+ tool_call_chunks: [],
119
+ },
120
+ {
121
+ run_attempt: 1,
122
+ },
123
+ ],
124
+ },
125
+ {
126
+ event: "messages",
127
+ data: [
128
+ {
129
+ id: "run-1",
130
+ content: " How may I assist you today?",
131
+ additional_kwargs: {},
132
+ response_metadata: { model_name: "claude-3-7-sonnet-latest" },
133
+ type: "AIMessageChunk",
134
+ name: null,
135
+ tool_calls: [],
136
+ invalid_tool_calls: [],
137
+ tool_call_chunks: [],
138
+ },
139
+ {
140
+ run_attempt: 1,
141
+ },
142
+ ],
143
+ },
144
+ ]);
145
+
146
+ const { result } = renderHook(() =>
147
+ useLangGraphMessages({
148
+ stream: mockStreamCallback,
149
+ appendMessage: appendLangChainChunk,
150
+ }),
151
+ );
152
+
153
+ act(() => {
154
+ result.current.sendMessage(
155
+ [
156
+ {
157
+ type: "human",
158
+ content: "Hello!",
159
+ },
160
+ ],
161
+ {},
162
+ );
163
+ });
164
+
165
+ await waitFor(() => {
166
+ expect(result.current.messages.length).toEqual(2);
167
+ expect(result.current.messages[0].type).toEqual("human");
168
+ expect(result.current.messages[1].type).toEqual("ai");
169
+ expect(
170
+ (result.current.messages[1].content[0] as MessageContentText).type,
171
+ ).toEqual("text");
172
+ expect(
173
+ (result.current.messages[1].content[0] as MessageContentText).text,
174
+ ).toEqual("Hello! How may I assist you today?");
175
+ });
176
+ });
177
+
178
+ it("separates chunks w/ different ids", async () => {
179
+ const mockStreamCallback = mockStreamCallbackFactory([
180
+ metadataEvent,
181
+ {
182
+ event: "messages",
183
+ data: [
184
+ {
185
+ id: "run-1",
186
+ content: "",
187
+ additional_kwargs: {},
188
+ response_metadata: { model_name: "claude-3-7-sonnet-latest" },
189
+ type: "AIMessageChunk",
190
+ name: null,
191
+ tool_calls: [],
192
+ invalid_tool_calls: [],
193
+ tool_call_chunks: [],
194
+ },
195
+ {
196
+ run_attempt: 1,
197
+ },
198
+ ],
199
+ },
200
+ {
201
+ event: "messages",
202
+ data: [
203
+ {
204
+ id: "run-1",
205
+ content: "Hello!",
206
+ additional_kwargs: {},
207
+ response_metadata: { model_name: "claude-3-7-sonnet-latest" },
208
+ type: "AIMessageChunk",
209
+ name: null,
210
+ tool_calls: [],
211
+ invalid_tool_calls: [],
212
+ tool_call_chunks: [],
213
+ },
214
+ {
215
+ run_attempt: 1,
216
+ },
217
+ ],
218
+ },
219
+ {
220
+ event: "messages",
221
+ data: [
222
+ {
223
+ id: "run-2",
224
+ content: " How may I assist you today?",
225
+ additional_kwargs: {},
226
+ response_metadata: { model_name: "claude-3-7-sonnet-latest" },
227
+ type: "AIMessageChunk",
228
+ name: null,
229
+ tool_calls: [],
230
+ invalid_tool_calls: [],
231
+ tool_call_chunks: [],
232
+ },
233
+ {
234
+ run_attempt: 1,
235
+ },
236
+ ],
237
+ },
238
+ ]);
239
+
240
+ const { result } = renderHook(() =>
241
+ useLangGraphMessages({
242
+ stream: mockStreamCallback,
243
+ appendMessage: appendLangChainChunk,
244
+ }),
245
+ );
246
+
247
+ act(() => {
248
+ result.current.sendMessage(
249
+ [
250
+ {
251
+ type: "human",
252
+ content: "Hello!",
253
+ },
254
+ ],
255
+ {},
256
+ );
257
+ });
258
+
259
+ await waitFor(() => {
260
+ expect(result.current.messages.length).toEqual(3);
261
+ expect(result.current.messages[0].type).toEqual("human");
262
+ expect(result.current.messages[1].type).toEqual("ai");
263
+ expect(result.current.messages[2].type).toEqual("ai");
264
+ expect(
265
+ (result.current.messages[1].content[0] as MessageContentText).type,
266
+ ).toEqual("text");
267
+ expect(
268
+ (result.current.messages[1].content[0] as MessageContentText).text,
269
+ ).toEqual("Hello!");
270
+ expect(result.current.messages[2].content as string).toEqual(
271
+ " How may I assist you today?",
272
+ );
273
+ });
274
+ });
275
+
276
+ it("handles a mix of text and image chunks - start with text", async () => {
277
+ const mockStreamCallback = mockStreamCallbackFactory([
278
+ metadataEvent,
279
+ {
280
+ event: "messages",
281
+ data: [
282
+ {
283
+ id: "run-1",
284
+ content: "",
285
+ additional_kwargs: {},
286
+ response_metadata: { model_name: "claude-3-7-sonnet-latest" },
287
+ type: "AIMessageChunk",
288
+ name: null,
289
+ tool_calls: [],
290
+ invalid_tool_calls: [],
291
+ tool_call_chunks: [],
292
+ },
293
+ {
294
+ run_attempt: 1,
295
+ },
296
+ ],
297
+ },
298
+ {
299
+ event: "messages",
300
+ data: [
301
+ {
302
+ id: "run-1",
303
+ content: "Hello!",
304
+ additional_kwargs: {},
305
+ response_metadata: { model_name: "claude-3-7-sonnet-latest" },
306
+ type: "AIMessageChunk",
307
+ name: null,
308
+ tool_calls: [],
309
+ invalid_tool_calls: [],
310
+ tool_call_chunks: [],
311
+ },
312
+ {
313
+ run_attempt: 1,
314
+ },
315
+ ],
316
+ },
317
+ {
318
+ event: "messages",
319
+ data: [
320
+ {
321
+ id: "run-1",
322
+ content: " How may I assist you today?",
323
+ additional_kwargs: {},
324
+ response_metadata: { model_name: "claude-3-7-sonnet-latest" },
325
+ type: "AIMessageChunk",
326
+ name: null,
327
+ tool_calls: [],
328
+ invalid_tool_calls: [],
329
+ tool_call_chunks: [],
330
+ },
331
+ {
332
+ run_attempt: 1,
333
+ },
334
+ ],
335
+ },
336
+ {
337
+ event: "messages",
338
+ data: [
339
+ {
340
+ id: "run-1",
341
+ content: [
342
+ {
343
+ type: "image_url",
344
+ image_url: { url: "https://example.com/image.png" },
345
+ },
346
+ ],
347
+ type: "AIMessageChunk",
348
+ name: null,
349
+ tool_calls: [],
350
+ invalid_tool_calls: [],
351
+ tool_call_chunks: [],
352
+ },
353
+ {
354
+ run_attempt: 1,
355
+ },
356
+ ],
357
+ },
358
+ ]);
359
+
360
+ const { result } = renderHook(() =>
361
+ useLangGraphMessages({
362
+ stream: mockStreamCallback,
363
+ appendMessage: appendLangChainChunk,
364
+ }),
365
+ );
366
+
367
+ act(() => {
368
+ result.current.sendMessage(
369
+ [
370
+ {
371
+ type: "human",
372
+ content: "Hello!",
373
+ },
374
+ ],
375
+ {},
376
+ );
377
+ });
378
+
379
+ await waitFor(() => {
380
+ expect(result.current.messages.length).toEqual(2);
381
+ expect(result.current.messages[0].type).toEqual("human");
382
+ expect(result.current.messages[1].type).toEqual("ai");
383
+ expect(
384
+ (result.current.messages[1].content[0] as MessageContentText).type,
385
+ ).toEqual("text");
386
+ expect(
387
+ (result.current.messages[1].content[0] as MessageContentText).text,
388
+ ).toEqual("Hello! How may I assist you today?");
389
+ expect(
390
+ (result.current.messages[1].content[1] as MessageContentImageUrl).type,
391
+ ).toEqual("image_url");
392
+ const imageChunkContent = result.current.messages[1]
393
+ .content[1] as MessageContentImageUrl;
394
+ expect(typeof imageChunkContent.image_url).toEqual("object");
395
+ expect(
396
+ (
397
+ (result.current.messages[1].content[1] as MessageContentImageUrl)
398
+ .image_url as { url: string }
399
+ ).url,
400
+ ).toEqual("https://example.com/image.png");
401
+ });
402
+ });
403
+
404
+ it("handles a mix of text and image chunks - start with image", async () => {
405
+ const mockStreamCallback = mockStreamCallbackFactory([
406
+ metadataEvent,
407
+ {
408
+ event: "messages",
409
+ data: [
410
+ {
411
+ id: "run-1",
412
+ content: [
413
+ {
414
+ type: "image_url",
415
+ image_url: { url: "https://example.com/image.png" },
416
+ },
417
+ ],
418
+ type: "AIMessageChunk",
419
+ name: null,
420
+ tool_calls: [],
421
+ invalid_tool_calls: [],
422
+ tool_call_chunks: [],
423
+ },
424
+ {
425
+ run_attempt: 1,
426
+ },
427
+ ],
428
+ },
429
+ {
430
+ event: "messages",
431
+ data: [
432
+ {
433
+ id: "run-1",
434
+ content: "",
435
+ additional_kwargs: {},
436
+ response_metadata: { model_name: "claude-3-7-sonnet-latest" },
437
+ type: "AIMessageChunk",
438
+ name: null,
439
+ tool_calls: [],
440
+ invalid_tool_calls: [],
441
+ tool_call_chunks: [],
442
+ },
443
+ {
444
+ run_attempt: 1,
445
+ },
446
+ ],
447
+ },
448
+ {
449
+ event: "messages",
450
+ data: [
451
+ {
452
+ id: "run-1",
453
+ content: "Hello!",
454
+ additional_kwargs: {},
455
+ response_metadata: { model_name: "claude-3-7-sonnet-latest" },
456
+ type: "AIMessageChunk",
457
+ name: null,
458
+ tool_calls: [],
459
+ invalid_tool_calls: [],
460
+ tool_call_chunks: [],
461
+ },
462
+ {
463
+ run_attempt: 1,
464
+ },
465
+ ],
466
+ },
467
+ {
468
+ event: "messages",
469
+ data: [
470
+ {
471
+ id: "run-1",
472
+ content: " How may I assist you today?",
473
+ additional_kwargs: {},
474
+ response_metadata: { model_name: "claude-3-7-sonnet-latest" },
475
+ type: "AIMessageChunk",
476
+ name: null,
477
+ tool_calls: [],
478
+ invalid_tool_calls: [],
479
+ tool_call_chunks: [],
480
+ },
481
+ {
482
+ run_attempt: 1,
483
+ },
484
+ ],
485
+ },
486
+ ]);
487
+
488
+ const { result } = renderHook(() =>
489
+ useLangGraphMessages({
490
+ stream: mockStreamCallback,
491
+ appendMessage: appendLangChainChunk,
492
+ }),
493
+ );
494
+
495
+ act(() => {
496
+ result.current.sendMessage(
497
+ [
498
+ {
499
+ type: "human",
500
+ content: "Hello!",
501
+ },
502
+ ],
503
+ {},
504
+ );
505
+ });
506
+
507
+ await waitFor(() => {
508
+ expect(result.current.messages.length).toEqual(2);
509
+ expect(result.current.messages[0].type).toEqual("human");
510
+ expect(result.current.messages[1].type).toEqual("ai");
511
+ expect(
512
+ (result.current.messages[1].content[0] as MessageContentImageUrl).type,
513
+ ).toEqual("image_url");
514
+ const imageChunkContent = result.current.messages[1]
515
+ .content[0] as MessageContentImageUrl;
516
+ expect(typeof imageChunkContent.image_url).toEqual("object");
517
+ expect(
518
+ (
519
+ (result.current.messages[1].content[0] as MessageContentImageUrl)
520
+ .image_url as { url: string }
521
+ ).url,
522
+ ).toEqual("https://example.com/image.png");
523
+ expect(
524
+ (result.current.messages[1].content[1] as MessageContentText).type,
525
+ ).toEqual("text");
526
+ expect(
527
+ (result.current.messages[1].content[1] as MessageContentText).text,
528
+ ).toEqual("Hello! How may I assist you today?");
529
+ });
530
+ });
531
+
532
+ it("processes a mix of chunks and messages", async () => {
533
+ const mockStreamCallback = mockStreamCallbackFactory([
534
+ metadataEvent,
535
+ {
536
+ event: "messages",
537
+ data: [
538
+ {
539
+ id: "run-1",
540
+ content: "",
541
+ additional_kwargs: {},
542
+ response_metadata: { model_name: "claude-3-7-sonnet-latest" },
543
+ type: "AIMessageChunk",
544
+ name: null,
545
+ tool_calls: [],
546
+ invalid_tool_calls: [],
547
+ tool_call_chunks: [],
548
+ },
549
+ {
550
+ run_attempt: 1,
551
+ },
552
+ ],
553
+ },
554
+ {
555
+ event: "messages",
556
+ data: [
557
+ {
558
+ id: "run-1",
559
+ content: "Hello!",
560
+ additional_kwargs: {},
561
+ response_metadata: { model_name: "claude-3-7-sonnet-latest" },
562
+ type: "AIMessageChunk",
563
+ name: null,
564
+ tool_calls: [],
565
+ invalid_tool_calls: [],
566
+ tool_call_chunks: [],
567
+ },
568
+ {
569
+ run_attempt: 1,
570
+ },
571
+ ],
572
+ },
573
+ {
574
+ event: "messages/complete",
575
+ data: [
576
+ {
577
+ id: "run-2",
578
+ content: [{ type: "text", text: "How may I assist you today?" }],
579
+ additional_kwargs: {},
580
+ response_metadata: { model_name: "claude-3-7-sonnet-latest" },
581
+ type: "ai",
582
+ name: null,
583
+ tool_calls: [],
584
+ invalid_tool_calls: [],
585
+ tool_call_chunks: [],
586
+ },
587
+ ],
588
+ },
589
+ ]);
590
+
591
+ const { result } = renderHook(() =>
592
+ useLangGraphMessages({
593
+ stream: mockStreamCallback,
594
+ appendMessage: appendLangChainChunk,
595
+ }),
596
+ );
597
+
598
+ act(() => {
599
+ result.current.sendMessage(
600
+ [
601
+ {
602
+ type: "human",
603
+ content: "Hello!",
604
+ },
605
+ ],
606
+ {},
607
+ );
608
+ });
609
+
610
+ await waitFor(() => {
611
+ expect(result.current.messages.length).toEqual(3);
612
+ expect(result.current.messages[0].type).toEqual("human");
613
+ expect(result.current.messages[1].type).toEqual("ai");
614
+ expect(result.current.messages[2].type).toEqual("ai");
615
+ expect(
616
+ (result.current.messages[1].content[0] as MessageContentText).type,
617
+ ).toEqual("text");
618
+ expect(
619
+ (result.current.messages[1].content[0] as MessageContentText).text,
620
+ ).toEqual("Hello!");
621
+ expect(
622
+ (result.current.messages[2].content[0] as MessageContentText).text,
623
+ ).toEqual("How may I assist you today?");
624
+ });
625
+ });
626
+ });