@assistant-ui/react-langgraph 0.5.5 → 0.5.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (35) hide show
  1. package/dist/LangGraphMessageAccumulator.d.ts +0 -4
  2. package/dist/LangGraphMessageAccumulator.d.ts.map +1 -1
  3. package/dist/LangGraphMessageAccumulator.js.map +1 -1
  4. package/dist/appendLangChainChunk.d.ts.map +1 -1
  5. package/dist/appendLangChainChunk.js +23 -10
  6. package/dist/appendLangChainChunk.js.map +1 -1
  7. package/dist/convertLangChainMessages.d.ts.map +1 -1
  8. package/dist/convertLangChainMessages.js +4 -0
  9. package/dist/convertLangChainMessages.js.map +1 -1
  10. package/dist/testUtils.d.ts +4 -0
  11. package/dist/testUtils.d.ts.map +1 -0
  12. package/dist/testUtils.js +10 -0
  13. package/dist/testUtils.js.map +1 -0
  14. package/dist/types.d.ts +29 -11
  15. package/dist/types.d.ts.map +1 -1
  16. package/dist/types.js +14 -0
  17. package/dist/types.js.map +1 -1
  18. package/dist/useLangGraphMessages.d.ts +9 -2
  19. package/dist/useLangGraphMessages.d.ts.map +1 -1
  20. package/dist/useLangGraphMessages.js +72 -10
  21. package/dist/useLangGraphMessages.js.map +1 -1
  22. package/dist/useLangGraphRuntime.d.ts +23 -2
  23. package/dist/useLangGraphRuntime.d.ts.map +1 -1
  24. package/dist/useLangGraphRuntime.js +4 -2
  25. package/dist/useLangGraphRuntime.js.map +1 -1
  26. package/package.json +14 -8
  27. package/src/LangGraphMessageAccumulator.ts +0 -11
  28. package/src/appendLangChainChunk.ts +31 -11
  29. package/src/convertLangChainMessages.ts +4 -0
  30. package/src/testUtils.ts +11 -0
  31. package/src/types.ts +42 -8
  32. package/src/useLangGraphMessages.test.ts +611 -0
  33. package/src/useLangGraphMessages.ts +98 -19
  34. package/src/useLangGraphRuntime.test.tsx +276 -0
  35. package/src/useLangGraphRuntime.ts +33 -1
@@ -0,0 +1,611 @@
1
+ import { describe, it, expect } from "vitest";
2
+ import { act, renderHook, waitFor } from "@testing-library/react";
3
+
4
+ import { useLangGraphMessages } from "./useLangGraphMessages";
5
+ import { appendLangChainChunk } from "./appendLangChainChunk";
6
+ import { MessageContentImageUrl, MessageContentText } from "./types";
7
+ import { mockStreamCallbackFactory } from "./testUtils";
8
+
9
+ const metadataEvent = {
10
+ event: "metadata",
11
+ data: {
12
+ thread_id: "123",
13
+ run_attempt: 1,
14
+ },
15
+ };
16
+
17
+ describe("useLangGraphMessages", {}, () => {
18
+ it("processes chunks correctly", async () => {
19
+ const mockStreamCallback = mockStreamCallbackFactory([
20
+ metadataEvent,
21
+ {
22
+ event: "messages",
23
+ data: [
24
+ {
25
+ id: "run-1",
26
+ content: "",
27
+ additional_kwargs: {},
28
+ response_metadata: { model_name: "claude-3-7-sonnet-latest" },
29
+ type: "AIMessageChunk",
30
+ name: null,
31
+ tool_calls: [],
32
+ invalid_tool_calls: [],
33
+ tool_call_chunks: [],
34
+ },
35
+ {
36
+ run_attempt: 1,
37
+ },
38
+ ],
39
+ },
40
+ ]);
41
+
42
+ const { result } = renderHook(() =>
43
+ useLangGraphMessages({
44
+ stream: mockStreamCallback,
45
+ appendMessage: appendLangChainChunk,
46
+ }),
47
+ );
48
+
49
+ act(() => {
50
+ result.current.sendMessage(
51
+ [
52
+ {
53
+ type: "human",
54
+ content: "Hello, world!",
55
+ },
56
+ ],
57
+ {},
58
+ );
59
+ });
60
+
61
+ await waitFor(() => {
62
+ expect(result.current.messages.length).toEqual(2);
63
+ expect(result.current.messages[0].type).toEqual("human");
64
+ expect(result.current.messages[1].type).toEqual("ai");
65
+ expect(result.current.messages[1].content).toEqual("");
66
+ });
67
+ });
68
+
69
+ it("appends chunks w/ same id", async () => {
70
+ const mockStreamCallback = mockStreamCallbackFactory([
71
+ metadataEvent,
72
+ {
73
+ event: "messages",
74
+ data: [
75
+ {
76
+ id: "run-1",
77
+ content: "",
78
+ additional_kwargs: {},
79
+ response_metadata: { model_name: "claude-3-7-sonnet-latest" },
80
+ type: "AIMessageChunk",
81
+ name: null,
82
+ tool_calls: [],
83
+ invalid_tool_calls: [],
84
+ tool_call_chunks: [],
85
+ },
86
+ {
87
+ run_attempt: 1,
88
+ },
89
+ ],
90
+ },
91
+ {
92
+ event: "messages",
93
+ data: [
94
+ {
95
+ id: "run-1",
96
+ content: "Hello!",
97
+ additional_kwargs: {},
98
+ response_metadata: { model_name: "claude-3-7-sonnet-latest" },
99
+ type: "AIMessageChunk",
100
+ name: null,
101
+ tool_calls: [],
102
+ invalid_tool_calls: [],
103
+ tool_call_chunks: [],
104
+ },
105
+ {
106
+ run_attempt: 1,
107
+ },
108
+ ],
109
+ },
110
+ {
111
+ event: "messages",
112
+ data: [
113
+ {
114
+ id: "run-1",
115
+ content: " How may I assist you today?",
116
+ additional_kwargs: {},
117
+ response_metadata: { model_name: "claude-3-7-sonnet-latest" },
118
+ type: "AIMessageChunk",
119
+ name: null,
120
+ tool_calls: [],
121
+ invalid_tool_calls: [],
122
+ tool_call_chunks: [],
123
+ },
124
+ {
125
+ run_attempt: 1,
126
+ },
127
+ ],
128
+ },
129
+ ]);
130
+
131
+ const { result } = renderHook(() =>
132
+ useLangGraphMessages({
133
+ stream: mockStreamCallback,
134
+ appendMessage: appendLangChainChunk,
135
+ }),
136
+ );
137
+
138
+ act(() => {
139
+ result.current.sendMessage(
140
+ [
141
+ {
142
+ type: "human",
143
+ content: "Hello!",
144
+ },
145
+ ],
146
+ {},
147
+ );
148
+ });
149
+
150
+ await waitFor(() => {
151
+ expect(result.current.messages.length).toEqual(2);
152
+ expect(result.current.messages[0].type).toEqual("human");
153
+ expect(result.current.messages[1].type).toEqual("ai");
154
+ expect(
155
+ (result.current.messages[1].content[0] as MessageContentText).type,
156
+ ).toEqual("text");
157
+ expect(
158
+ (result.current.messages[1].content[0] as MessageContentText).text,
159
+ ).toEqual("Hello! How may I assist you today?");
160
+ });
161
+ });
162
+
163
+ it("separates chunks w/ different ids", async () => {
164
+ const mockStreamCallback = mockStreamCallbackFactory([
165
+ metadataEvent,
166
+ {
167
+ event: "messages",
168
+ data: [
169
+ {
170
+ id: "run-1",
171
+ content: "",
172
+ additional_kwargs: {},
173
+ response_metadata: { model_name: "claude-3-7-sonnet-latest" },
174
+ type: "AIMessageChunk",
175
+ name: null,
176
+ tool_calls: [],
177
+ invalid_tool_calls: [],
178
+ tool_call_chunks: [],
179
+ },
180
+ {
181
+ run_attempt: 1,
182
+ },
183
+ ],
184
+ },
185
+ {
186
+ event: "messages",
187
+ data: [
188
+ {
189
+ id: "run-1",
190
+ content: "Hello!",
191
+ additional_kwargs: {},
192
+ response_metadata: { model_name: "claude-3-7-sonnet-latest" },
193
+ type: "AIMessageChunk",
194
+ name: null,
195
+ tool_calls: [],
196
+ invalid_tool_calls: [],
197
+ tool_call_chunks: [],
198
+ },
199
+ {
200
+ run_attempt: 1,
201
+ },
202
+ ],
203
+ },
204
+ {
205
+ event: "messages",
206
+ data: [
207
+ {
208
+ id: "run-2",
209
+ content: " How may I assist you today?",
210
+ additional_kwargs: {},
211
+ response_metadata: { model_name: "claude-3-7-sonnet-latest" },
212
+ type: "AIMessageChunk",
213
+ name: null,
214
+ tool_calls: [],
215
+ invalid_tool_calls: [],
216
+ tool_call_chunks: [],
217
+ },
218
+ {
219
+ run_attempt: 1,
220
+ },
221
+ ],
222
+ },
223
+ ]);
224
+
225
+ const { result } = renderHook(() =>
226
+ useLangGraphMessages({
227
+ stream: mockStreamCallback,
228
+ appendMessage: appendLangChainChunk,
229
+ }),
230
+ );
231
+
232
+ act(() => {
233
+ result.current.sendMessage(
234
+ [
235
+ {
236
+ type: "human",
237
+ content: "Hello!",
238
+ },
239
+ ],
240
+ {},
241
+ );
242
+ });
243
+
244
+ await waitFor(() => {
245
+ expect(result.current.messages.length).toEqual(3);
246
+ expect(result.current.messages[0].type).toEqual("human");
247
+ expect(result.current.messages[1].type).toEqual("ai");
248
+ expect(result.current.messages[2].type).toEqual("ai");
249
+ expect(
250
+ (result.current.messages[1].content[0] as MessageContentText).type,
251
+ ).toEqual("text");
252
+ expect(
253
+ (result.current.messages[1].content[0] as MessageContentText).text,
254
+ ).toEqual("Hello!");
255
+ expect(result.current.messages[2].content as string).toEqual(
256
+ " How may I assist you today?",
257
+ );
258
+ });
259
+ });
260
+
261
+ it("handles a mix of text and image chunks - start with text", async () => {
262
+ const mockStreamCallback = mockStreamCallbackFactory([
263
+ metadataEvent,
264
+ {
265
+ event: "messages",
266
+ data: [
267
+ {
268
+ id: "run-1",
269
+ content: "",
270
+ additional_kwargs: {},
271
+ response_metadata: { model_name: "claude-3-7-sonnet-latest" },
272
+ type: "AIMessageChunk",
273
+ name: null,
274
+ tool_calls: [],
275
+ invalid_tool_calls: [],
276
+ tool_call_chunks: [],
277
+ },
278
+ {
279
+ run_attempt: 1,
280
+ },
281
+ ],
282
+ },
283
+ {
284
+ event: "messages",
285
+ data: [
286
+ {
287
+ id: "run-1",
288
+ content: "Hello!",
289
+ additional_kwargs: {},
290
+ response_metadata: { model_name: "claude-3-7-sonnet-latest" },
291
+ type: "AIMessageChunk",
292
+ name: null,
293
+ tool_calls: [],
294
+ invalid_tool_calls: [],
295
+ tool_call_chunks: [],
296
+ },
297
+ {
298
+ run_attempt: 1,
299
+ },
300
+ ],
301
+ },
302
+ {
303
+ event: "messages",
304
+ data: [
305
+ {
306
+ id: "run-1",
307
+ content: " How may I assist you today?",
308
+ additional_kwargs: {},
309
+ response_metadata: { model_name: "claude-3-7-sonnet-latest" },
310
+ type: "AIMessageChunk",
311
+ name: null,
312
+ tool_calls: [],
313
+ invalid_tool_calls: [],
314
+ tool_call_chunks: [],
315
+ },
316
+ {
317
+ run_attempt: 1,
318
+ },
319
+ ],
320
+ },
321
+ {
322
+ event: "messages",
323
+ data: [
324
+ {
325
+ id: "run-1",
326
+ content: [
327
+ {
328
+ type: "image_url",
329
+ image_url: { url: "https://example.com/image.png" },
330
+ },
331
+ ],
332
+ type: "AIMessageChunk",
333
+ name: null,
334
+ tool_calls: [],
335
+ invalid_tool_calls: [],
336
+ tool_call_chunks: [],
337
+ },
338
+ {
339
+ run_attempt: 1,
340
+ },
341
+ ],
342
+ },
343
+ ]);
344
+
345
+ const { result } = renderHook(() =>
346
+ useLangGraphMessages({
347
+ stream: mockStreamCallback,
348
+ appendMessage: appendLangChainChunk,
349
+ }),
350
+ );
351
+
352
+ act(() => {
353
+ result.current.sendMessage(
354
+ [
355
+ {
356
+ type: "human",
357
+ content: "Hello!",
358
+ },
359
+ ],
360
+ {},
361
+ );
362
+ });
363
+
364
+ await waitFor(() => {
365
+ expect(result.current.messages.length).toEqual(2);
366
+ expect(result.current.messages[0].type).toEqual("human");
367
+ expect(result.current.messages[1].type).toEqual("ai");
368
+ expect(
369
+ (result.current.messages[1].content[0] as MessageContentText).type,
370
+ ).toEqual("text");
371
+ expect(
372
+ (result.current.messages[1].content[0] as MessageContentText).text,
373
+ ).toEqual("Hello! How may I assist you today?");
374
+ expect(
375
+ (result.current.messages[1].content[1] as MessageContentImageUrl).type,
376
+ ).toEqual("image_url");
377
+ const imageChunkContent = result.current.messages[1]
378
+ .content[1] as MessageContentImageUrl;
379
+ expect(typeof imageChunkContent.image_url).toEqual("object");
380
+ expect(
381
+ (
382
+ (result.current.messages[1].content[1] as MessageContentImageUrl)
383
+ .image_url as { url: string }
384
+ ).url,
385
+ ).toEqual("https://example.com/image.png");
386
+ });
387
+ });
388
+
389
+ it("handles a mix of text and image chunks - start with image", async () => {
390
+ const mockStreamCallback = mockStreamCallbackFactory([
391
+ metadataEvent,
392
+ {
393
+ event: "messages",
394
+ data: [
395
+ {
396
+ id: "run-1",
397
+ content: [
398
+ {
399
+ type: "image_url",
400
+ image_url: { url: "https://example.com/image.png" },
401
+ },
402
+ ],
403
+ type: "AIMessageChunk",
404
+ name: null,
405
+ tool_calls: [],
406
+ invalid_tool_calls: [],
407
+ tool_call_chunks: [],
408
+ },
409
+ {
410
+ run_attempt: 1,
411
+ },
412
+ ],
413
+ },
414
+ {
415
+ event: "messages",
416
+ data: [
417
+ {
418
+ id: "run-1",
419
+ content: "",
420
+ additional_kwargs: {},
421
+ response_metadata: { model_name: "claude-3-7-sonnet-latest" },
422
+ type: "AIMessageChunk",
423
+ name: null,
424
+ tool_calls: [],
425
+ invalid_tool_calls: [],
426
+ tool_call_chunks: [],
427
+ },
428
+ {
429
+ run_attempt: 1,
430
+ },
431
+ ],
432
+ },
433
+ {
434
+ event: "messages",
435
+ data: [
436
+ {
437
+ id: "run-1",
438
+ content: "Hello!",
439
+ additional_kwargs: {},
440
+ response_metadata: { model_name: "claude-3-7-sonnet-latest" },
441
+ type: "AIMessageChunk",
442
+ name: null,
443
+ tool_calls: [],
444
+ invalid_tool_calls: [],
445
+ tool_call_chunks: [],
446
+ },
447
+ {
448
+ run_attempt: 1,
449
+ },
450
+ ],
451
+ },
452
+ {
453
+ event: "messages",
454
+ data: [
455
+ {
456
+ id: "run-1",
457
+ content: " How may I assist you today?",
458
+ additional_kwargs: {},
459
+ response_metadata: { model_name: "claude-3-7-sonnet-latest" },
460
+ type: "AIMessageChunk",
461
+ name: null,
462
+ tool_calls: [],
463
+ invalid_tool_calls: [],
464
+ tool_call_chunks: [],
465
+ },
466
+ {
467
+ run_attempt: 1,
468
+ },
469
+ ],
470
+ },
471
+ ]);
472
+
473
+ const { result } = renderHook(() =>
474
+ useLangGraphMessages({
475
+ stream: mockStreamCallback,
476
+ appendMessage: appendLangChainChunk,
477
+ }),
478
+ );
479
+
480
+ act(() => {
481
+ result.current.sendMessage(
482
+ [
483
+ {
484
+ type: "human",
485
+ content: "Hello!",
486
+ },
487
+ ],
488
+ {},
489
+ );
490
+ });
491
+
492
+ await waitFor(() => {
493
+ expect(result.current.messages.length).toEqual(2);
494
+ expect(result.current.messages[0].type).toEqual("human");
495
+ expect(result.current.messages[1].type).toEqual("ai");
496
+ expect(
497
+ (result.current.messages[1].content[0] as MessageContentImageUrl).type,
498
+ ).toEqual("image_url");
499
+ const imageChunkContent = result.current.messages[1]
500
+ .content[0] as MessageContentImageUrl;
501
+ expect(typeof imageChunkContent.image_url).toEqual("object");
502
+ expect(
503
+ (
504
+ (result.current.messages[1].content[0] as MessageContentImageUrl)
505
+ .image_url as { url: string }
506
+ ).url,
507
+ ).toEqual("https://example.com/image.png");
508
+ expect(
509
+ (result.current.messages[1].content[1] as MessageContentText).type,
510
+ ).toEqual("text");
511
+ expect(
512
+ (result.current.messages[1].content[1] as MessageContentText).text,
513
+ ).toEqual("Hello! How may I assist you today?");
514
+ });
515
+ });
516
+
517
+ it("processes a mix of chunks and messages", async () => {
518
+ const mockStreamCallback = mockStreamCallbackFactory([
519
+ metadataEvent,
520
+ {
521
+ event: "messages",
522
+ data: [
523
+ {
524
+ id: "run-1",
525
+ content: "",
526
+ additional_kwargs: {},
527
+ response_metadata: { model_name: "claude-3-7-sonnet-latest" },
528
+ type: "AIMessageChunk",
529
+ name: null,
530
+ tool_calls: [],
531
+ invalid_tool_calls: [],
532
+ tool_call_chunks: [],
533
+ },
534
+ {
535
+ run_attempt: 1,
536
+ },
537
+ ],
538
+ },
539
+ {
540
+ event: "messages",
541
+ data: [
542
+ {
543
+ id: "run-1",
544
+ content: "Hello!",
545
+ additional_kwargs: {},
546
+ response_metadata: { model_name: "claude-3-7-sonnet-latest" },
547
+ type: "AIMessageChunk",
548
+ name: null,
549
+ tool_calls: [],
550
+ invalid_tool_calls: [],
551
+ tool_call_chunks: [],
552
+ },
553
+ {
554
+ run_attempt: 1,
555
+ },
556
+ ],
557
+ },
558
+ {
559
+ event: "messages/complete",
560
+ data: [
561
+ {
562
+ id: "run-2",
563
+ content: [{ type: "text", text: "How may I assist you today?" }],
564
+ additional_kwargs: {},
565
+ response_metadata: { model_name: "claude-3-7-sonnet-latest" },
566
+ type: "ai",
567
+ name: null,
568
+ tool_calls: [],
569
+ invalid_tool_calls: [],
570
+ tool_call_chunks: [],
571
+ },
572
+ ],
573
+ },
574
+ ]);
575
+
576
+ const { result } = renderHook(() =>
577
+ useLangGraphMessages({
578
+ stream: mockStreamCallback,
579
+ appendMessage: appendLangChainChunk,
580
+ }),
581
+ );
582
+
583
+ act(() => {
584
+ result.current.sendMessage(
585
+ [
586
+ {
587
+ type: "human",
588
+ content: "Hello!",
589
+ },
590
+ ],
591
+ {},
592
+ );
593
+ });
594
+
595
+ await waitFor(() => {
596
+ expect(result.current.messages.length).toEqual(3);
597
+ expect(result.current.messages[0].type).toEqual("human");
598
+ expect(result.current.messages[1].type).toEqual("ai");
599
+ expect(result.current.messages[2].type).toEqual("ai");
600
+ expect(
601
+ (result.current.messages[1].content[0] as MessageContentText).type,
602
+ ).toEqual("text");
603
+ expect(
604
+ (result.current.messages[1].content[0] as MessageContentText).text,
605
+ ).toEqual("Hello!");
606
+ expect(
607
+ (result.current.messages[2].content[0] as MessageContentText).text,
608
+ ).toEqual("How may I assist you today?");
609
+ });
610
+ });
611
+ });