@langchain/langgraph-sdk 0.0.77 → 0.0.78

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -209,7 +209,18 @@ const useControllableThreadId = (options) => {
209
209
  return [options.threadId ?? null, onThreadId];
210
210
  };
211
211
  function useStream(options) {
212
- let { assistantId, messagesKey, onError, onFinish } = options;
212
+ let { assistantId, messagesKey, onCreated, onError, onFinish } = options;
213
+ const reconnectOnMountRef = (0, react_1.useRef)(options.reconnectOnMount);
214
+ const runMetadataStorage = (0, react_1.useMemo)(() => {
215
+ if (typeof window === "undefined")
216
+ return null;
217
+ const storage = reconnectOnMountRef.current;
218
+ if (storage === true)
219
+ return window.sessionStorage;
220
+ if (typeof storage === "function")
221
+ return storage();
222
+ return null;
223
+ }, []);
213
224
  messagesKey ??= "messages";
214
225
  const client = (0, react_1.useMemo)(() => options.client ??
215
226
  new client_js_1.Client({
@@ -268,6 +279,7 @@ function useStream(options) {
268
279
  };
269
280
  // TODO: this should be done on the server to avoid pagination
270
281
  // TODO: should we permit adapter? SWR / React Query?
282
+ // TODO: make this only when branching is expected
271
283
  const history = useThreadHistory(threadId, client, clearCallbackRef, submittingRef);
272
284
  const getMessages = (0, react_1.useMemo)(() => {
273
285
  return (value) => Array.isArray(value[messagesKey])
@@ -322,69 +334,26 @@ function useStream(options) {
322
334
  };
323
335
  });
324
336
  })();
325
- const stop = (0, react_1.useCallback)(() => {
337
+ const stop = () => {
326
338
  if (abortRef.current != null)
327
339
  abortRef.current.abort();
328
340
  abortRef.current = null;
329
- }, []);
330
- const submit = async (values, submitOptions) => {
341
+ if (runMetadataStorage && threadId) {
342
+ const runId = runMetadataStorage.getItem(`lg:stream:${threadId}`);
343
+ if (runId)
344
+ client.runs.cancel(threadId, runId);
345
+ runMetadataStorage.removeItem(`lg:stream:${threadId}`);
346
+ }
347
+ };
348
+ async function consumeStream(action) {
331
349
  try {
332
350
  setIsLoading(true);
333
351
  setStreamError(undefined);
334
352
  submittingRef.current = true;
335
353
  abortRef.current = new AbortController();
336
- // Unbranch things
337
- const newPath = submitOptions?.checkpoint?.checkpoint_id
338
- ? branchByCheckpoint[submitOptions?.checkpoint?.checkpoint_id]?.branch
339
- : undefined;
340
- if (newPath != null)
341
- setBranch(newPath ?? "");
342
- // Assumption: we're setting the initial value
343
- // Used for instant feedback
344
- setStreamValues(() => {
345
- const values = { ...historyValues };
346
- if (submitOptions?.optimisticValues != null) {
347
- return {
348
- ...values,
349
- ...(typeof submitOptions.optimisticValues === "function"
350
- ? submitOptions.optimisticValues(values)
351
- : submitOptions.optimisticValues),
352
- };
353
- }
354
- return values;
355
- });
356
- let usableThreadId = threadId;
357
- if (!usableThreadId) {
358
- const thread = await client.threads.create();
359
- onThreadId(thread.thread_id);
360
- usableThreadId = thread.thread_id;
361
- }
362
- const streamMode = unique([
363
- ...(submitOptions?.streamMode ?? []),
364
- ...trackStreamModeRef.current,
365
- ...callbackStreamMode,
366
- ]);
367
- const checkpoint = submitOptions?.checkpoint ?? threadHead?.checkpoint ?? undefined;
368
- // @ts-expect-error
369
- if (checkpoint != null)
370
- delete checkpoint.thread_id;
371
- const run = client.runs.stream(usableThreadId, assistantId, {
372
- input: values,
373
- config: submitOptions?.config,
374
- command: submitOptions?.command,
375
- interruptBefore: submitOptions?.interruptBefore,
376
- interruptAfter: submitOptions?.interruptAfter,
377
- metadata: submitOptions?.metadata,
378
- multitaskStrategy: submitOptions?.multitaskStrategy,
379
- onCompletion: submitOptions?.onCompletion,
380
- onDisconnect: submitOptions?.onDisconnect ?? "cancel",
381
- signal: abortRef.current.signal,
382
- checkpoint,
383
- streamMode,
384
- streamSubgraphs: submitOptions?.streamSubgraphs,
385
- });
354
+ const run = await action(abortRef.current.signal);
386
355
  let streamError;
387
- for await (const { event, data } of run) {
356
+ for await (const { event, data } of run.stream) {
388
357
  if (event === "error") {
389
358
  streamError = new StreamError(data);
390
359
  break;
@@ -431,7 +400,7 @@ function useStream(options) {
431
400
  }
432
401
  }
433
402
  // TODO: stream created checkpoints to avoid an unnecessary network request
434
- const result = await history.mutate(usableThreadId);
403
+ const result = await run.onSuccess();
435
404
  setStreamValues(null);
436
405
  if (streamError != null)
437
406
  throw streamError;
@@ -454,7 +423,127 @@ function useStream(options) {
454
423
  submittingRef.current = false;
455
424
  abortRef.current = null;
456
425
  }
426
+ }
427
+ const joinStream = async (runId, lastEventId) => {
428
+ lastEventId ??= "-1";
429
+ if (!threadId)
430
+ return;
431
+ await consumeStream(async (signal) => {
432
+ const stream = client.runs.joinStream(threadId, runId, {
433
+ signal,
434
+ lastEventId,
435
+ });
436
+ return {
437
+ onSuccess: () => {
438
+ runMetadataStorage?.removeItem(`lg:stream:${threadId}`);
439
+ return history.mutate(threadId);
440
+ },
441
+ stream,
442
+ };
443
+ });
457
444
  };
445
+ const submit = async (values, submitOptions) => {
446
+ await consumeStream(async (signal) => {
447
+ // Unbranch things
448
+ const newPath = submitOptions?.checkpoint?.checkpoint_id
449
+ ? branchByCheckpoint[submitOptions?.checkpoint?.checkpoint_id]?.branch
450
+ : undefined;
451
+ if (newPath != null)
452
+ setBranch(newPath ?? "");
453
+ // Assumption: we're setting the initial value
454
+ // Used for instant feedback
455
+ setStreamValues(() => {
456
+ const values = { ...historyValues };
457
+ if (submitOptions?.optimisticValues != null) {
458
+ return {
459
+ ...values,
460
+ ...(typeof submitOptions.optimisticValues === "function"
461
+ ? submitOptions.optimisticValues(values)
462
+ : submitOptions.optimisticValues),
463
+ };
464
+ }
465
+ return values;
466
+ });
467
+ let usableThreadId = threadId;
468
+ if (!usableThreadId) {
469
+ const thread = await client.threads.create();
470
+ onThreadId(thread.thread_id);
471
+ usableThreadId = thread.thread_id;
472
+ }
473
+ const streamMode = unique([
474
+ ...(submitOptions?.streamMode ?? []),
475
+ ...trackStreamModeRef.current,
476
+ ...callbackStreamMode,
477
+ ]);
478
+ const checkpoint = submitOptions?.checkpoint ?? threadHead?.checkpoint ?? undefined;
479
+ // @ts-expect-error
480
+ if (checkpoint != null)
481
+ delete checkpoint.thread_id;
482
+ let rejoinKey;
483
+ const stream = client.runs.stream(usableThreadId, assistantId, {
484
+ input: values,
485
+ config: submitOptions?.config,
486
+ command: submitOptions?.command,
487
+ interruptBefore: submitOptions?.interruptBefore,
488
+ interruptAfter: submitOptions?.interruptAfter,
489
+ metadata: submitOptions?.metadata,
490
+ multitaskStrategy: submitOptions?.multitaskStrategy,
491
+ onCompletion: submitOptions?.onCompletion,
492
+ onDisconnect: submitOptions?.onDisconnect ??
493
+ (runMetadataStorage ? "continue" : "cancel"),
494
+ signal,
495
+ checkpoint,
496
+ streamMode,
497
+ streamSubgraphs: submitOptions?.streamSubgraphs,
498
+ streamResumable: submitOptions?.streamResumable ?? !!runMetadataStorage,
499
+ onRunCreated(params) {
500
+ const runParams = {
501
+ run_id: params.run_id,
502
+ thread_id: params.thread_id ?? usableThreadId,
503
+ };
504
+ if (runMetadataStorage) {
505
+ rejoinKey = `lg:stream:${runParams.thread_id}`;
506
+ runMetadataStorage.setItem(rejoinKey, runParams.run_id);
507
+ }
508
+ onCreated?.(runParams);
509
+ },
510
+ });
511
+ return {
512
+ stream,
513
+ onSuccess: () => {
514
+ if (rejoinKey)
515
+ runMetadataStorage?.removeItem(rejoinKey);
516
+ return history.mutate(usableThreadId);
517
+ },
518
+ };
519
+ });
520
+ };
521
+ const reconnectKey = (0, react_1.useMemo)(() => {
522
+ if (!runMetadataStorage || isLoading)
523
+ return undefined;
524
+ if (typeof window === "undefined")
525
+ return undefined;
526
+ const runId = runMetadataStorage?.getItem(`lg:stream:${threadId}`);
527
+ if (!runId)
528
+ return undefined;
529
+ return { runId, threadId };
530
+ }, [runMetadataStorage, isLoading, threadId]);
531
+ const shouldReconnect = !!runMetadataStorage;
532
+ const reconnectRef = (0, react_1.useRef)({ threadId, shouldReconnect });
533
+ const joinStreamRef = (0, react_1.useRef)(joinStream);
534
+ joinStreamRef.current = joinStream;
535
+ (0, react_1.useEffect)(() => {
536
+ // reset shouldReconnect when switching threads
537
+ if (reconnectRef.current.threadId !== threadId) {
538
+ reconnectRef.current = { threadId, shouldReconnect };
539
+ }
540
+ }, [threadId, shouldReconnect]);
541
+ (0, react_1.useEffect)(() => {
542
+ if (reconnectKey && reconnectRef.current.shouldReconnect) {
543
+ reconnectRef.current.shouldReconnect = false;
544
+ joinStreamRef.current?.(reconnectKey.runId);
545
+ }
546
+ }, [reconnectKey]);
458
547
  const error = streamError ?? historyError;
459
548
  const values = streamValues ?? historyValues;
460
549
  return {
@@ -468,6 +557,7 @@ function useStream(options) {
468
557
  isLoading,
469
558
  stop,
470
559
  submit,
560
+ joinStream,
471
561
  branch,
472
562
  setBranch,
473
563
  history: flatHistory,
@@ -93,6 +93,13 @@ export interface UseStreamOptions<StateType extends Record<string, unknown> = Re
93
93
  * Callback that is called when the stream is finished.
94
94
  */
95
95
  onFinish?: (state: ThreadState<StateType>) => void;
96
+ /**
97
+ * Callback that is called when a new stream is created.
98
+ */
99
+ onCreated?: (run: {
100
+ run_id: string;
101
+ thread_id: string;
102
+ }) => void;
96
103
  /**
97
104
  * Callback that is called when an update event is received.
98
105
  */
@@ -125,6 +132,13 @@ export interface UseStreamOptions<StateType extends Record<string, unknown> = Re
125
132
  * Callback that is called when the thread ID is updated (ie when a new thread is created).
126
133
  */
127
134
  onThreadId?: (threadId: string) => void;
135
+ /** Will reconnect the stream on mount */
136
+ reconnectOnMount?: boolean | (() => RunMetadataStorage);
137
+ }
138
+ interface RunMetadataStorage {
139
+ getItem(key: `lg:stream:${string}`): string | null;
140
+ setItem(key: `lg:stream:${string}`, value: string): void;
141
+ removeItem(key: `lg:stream:${string}`): void;
128
142
  }
129
143
  export interface UseStream<StateType extends Record<string, unknown> = Record<string, unknown>, Bag extends BagTemplate = BagTemplate> {
130
144
  /**
@@ -190,6 +204,10 @@ export interface UseStream<StateType extends Record<string, unknown> = Record<st
190
204
  * The ID of the assistant to use.
191
205
  */
192
206
  assistantId: string;
207
+ /**
208
+ * Join an active stream.
209
+ */
210
+ joinStream: (runId: string) => Promise<void>;
193
211
  }
194
212
  type ConfigWithConfigurable<ConfigurableType extends Record<string, unknown>> = Config & {
195
213
  configurable?: ConfigurableType;
@@ -213,6 +231,7 @@ interface SubmitOptions<StateType extends Record<string, unknown> = Record<strin
213
231
  * @default false
214
232
  */
215
233
  streamSubgraphs?: boolean;
234
+ streamResumable?: boolean;
216
235
  }
217
236
  export declare function useStream<StateType extends Record<string, unknown> = Record<string, unknown>, Bag extends {
218
237
  ConfigurableType?: Record<string, unknown>;
@@ -206,7 +206,18 @@ const useControllableThreadId = (options) => {
206
206
  return [options.threadId ?? null, onThreadId];
207
207
  };
208
208
  export function useStream(options) {
209
- let { assistantId, messagesKey, onError, onFinish } = options;
209
+ let { assistantId, messagesKey, onCreated, onError, onFinish } = options;
210
+ const reconnectOnMountRef = useRef(options.reconnectOnMount);
211
+ const runMetadataStorage = useMemo(() => {
212
+ if (typeof window === "undefined")
213
+ return null;
214
+ const storage = reconnectOnMountRef.current;
215
+ if (storage === true)
216
+ return window.sessionStorage;
217
+ if (typeof storage === "function")
218
+ return storage();
219
+ return null;
220
+ }, []);
210
221
  messagesKey ??= "messages";
211
222
  const client = useMemo(() => options.client ??
212
223
  new Client({
@@ -265,6 +276,7 @@ export function useStream(options) {
265
276
  };
266
277
  // TODO: this should be done on the server to avoid pagination
267
278
  // TODO: should we permit adapter? SWR / React Query?
279
+ // TODO: make this only when branching is expected
268
280
  const history = useThreadHistory(threadId, client, clearCallbackRef, submittingRef);
269
281
  const getMessages = useMemo(() => {
270
282
  return (value) => Array.isArray(value[messagesKey])
@@ -319,69 +331,26 @@ export function useStream(options) {
319
331
  };
320
332
  });
321
333
  })();
322
- const stop = useCallback(() => {
334
+ const stop = () => {
323
335
  if (abortRef.current != null)
324
336
  abortRef.current.abort();
325
337
  abortRef.current = null;
326
- }, []);
327
- const submit = async (values, submitOptions) => {
338
+ if (runMetadataStorage && threadId) {
339
+ const runId = runMetadataStorage.getItem(`lg:stream:${threadId}`);
340
+ if (runId)
341
+ client.runs.cancel(threadId, runId);
342
+ runMetadataStorage.removeItem(`lg:stream:${threadId}`);
343
+ }
344
+ };
345
+ async function consumeStream(action) {
328
346
  try {
329
347
  setIsLoading(true);
330
348
  setStreamError(undefined);
331
349
  submittingRef.current = true;
332
350
  abortRef.current = new AbortController();
333
- // Unbranch things
334
- const newPath = submitOptions?.checkpoint?.checkpoint_id
335
- ? branchByCheckpoint[submitOptions?.checkpoint?.checkpoint_id]?.branch
336
- : undefined;
337
- if (newPath != null)
338
- setBranch(newPath ?? "");
339
- // Assumption: we're setting the initial value
340
- // Used for instant feedback
341
- setStreamValues(() => {
342
- const values = { ...historyValues };
343
- if (submitOptions?.optimisticValues != null) {
344
- return {
345
- ...values,
346
- ...(typeof submitOptions.optimisticValues === "function"
347
- ? submitOptions.optimisticValues(values)
348
- : submitOptions.optimisticValues),
349
- };
350
- }
351
- return values;
352
- });
353
- let usableThreadId = threadId;
354
- if (!usableThreadId) {
355
- const thread = await client.threads.create();
356
- onThreadId(thread.thread_id);
357
- usableThreadId = thread.thread_id;
358
- }
359
- const streamMode = unique([
360
- ...(submitOptions?.streamMode ?? []),
361
- ...trackStreamModeRef.current,
362
- ...callbackStreamMode,
363
- ]);
364
- const checkpoint = submitOptions?.checkpoint ?? threadHead?.checkpoint ?? undefined;
365
- // @ts-expect-error
366
- if (checkpoint != null)
367
- delete checkpoint.thread_id;
368
- const run = client.runs.stream(usableThreadId, assistantId, {
369
- input: values,
370
- config: submitOptions?.config,
371
- command: submitOptions?.command,
372
- interruptBefore: submitOptions?.interruptBefore,
373
- interruptAfter: submitOptions?.interruptAfter,
374
- metadata: submitOptions?.metadata,
375
- multitaskStrategy: submitOptions?.multitaskStrategy,
376
- onCompletion: submitOptions?.onCompletion,
377
- onDisconnect: submitOptions?.onDisconnect ?? "cancel",
378
- signal: abortRef.current.signal,
379
- checkpoint,
380
- streamMode,
381
- streamSubgraphs: submitOptions?.streamSubgraphs,
382
- });
351
+ const run = await action(abortRef.current.signal);
383
352
  let streamError;
384
- for await (const { event, data } of run) {
353
+ for await (const { event, data } of run.stream) {
385
354
  if (event === "error") {
386
355
  streamError = new StreamError(data);
387
356
  break;
@@ -428,7 +397,7 @@ export function useStream(options) {
428
397
  }
429
398
  }
430
399
  // TODO: stream created checkpoints to avoid an unnecessary network request
431
- const result = await history.mutate(usableThreadId);
400
+ const result = await run.onSuccess();
432
401
  setStreamValues(null);
433
402
  if (streamError != null)
434
403
  throw streamError;
@@ -451,7 +420,127 @@ export function useStream(options) {
451
420
  submittingRef.current = false;
452
421
  abortRef.current = null;
453
422
  }
423
+ }
424
+ const joinStream = async (runId, lastEventId) => {
425
+ lastEventId ??= "-1";
426
+ if (!threadId)
427
+ return;
428
+ await consumeStream(async (signal) => {
429
+ const stream = client.runs.joinStream(threadId, runId, {
430
+ signal,
431
+ lastEventId,
432
+ });
433
+ return {
434
+ onSuccess: () => {
435
+ runMetadataStorage?.removeItem(`lg:stream:${threadId}`);
436
+ return history.mutate(threadId);
437
+ },
438
+ stream,
439
+ };
440
+ });
454
441
  };
442
+ const submit = async (values, submitOptions) => {
443
+ await consumeStream(async (signal) => {
444
+ // Unbranch things
445
+ const newPath = submitOptions?.checkpoint?.checkpoint_id
446
+ ? branchByCheckpoint[submitOptions?.checkpoint?.checkpoint_id]?.branch
447
+ : undefined;
448
+ if (newPath != null)
449
+ setBranch(newPath ?? "");
450
+ // Assumption: we're setting the initial value
451
+ // Used for instant feedback
452
+ setStreamValues(() => {
453
+ const values = { ...historyValues };
454
+ if (submitOptions?.optimisticValues != null) {
455
+ return {
456
+ ...values,
457
+ ...(typeof submitOptions.optimisticValues === "function"
458
+ ? submitOptions.optimisticValues(values)
459
+ : submitOptions.optimisticValues),
460
+ };
461
+ }
462
+ return values;
463
+ });
464
+ let usableThreadId = threadId;
465
+ if (!usableThreadId) {
466
+ const thread = await client.threads.create();
467
+ onThreadId(thread.thread_id);
468
+ usableThreadId = thread.thread_id;
469
+ }
470
+ const streamMode = unique([
471
+ ...(submitOptions?.streamMode ?? []),
472
+ ...trackStreamModeRef.current,
473
+ ...callbackStreamMode,
474
+ ]);
475
+ const checkpoint = submitOptions?.checkpoint ?? threadHead?.checkpoint ?? undefined;
476
+ // @ts-expect-error
477
+ if (checkpoint != null)
478
+ delete checkpoint.thread_id;
479
+ let rejoinKey;
480
+ const stream = client.runs.stream(usableThreadId, assistantId, {
481
+ input: values,
482
+ config: submitOptions?.config,
483
+ command: submitOptions?.command,
484
+ interruptBefore: submitOptions?.interruptBefore,
485
+ interruptAfter: submitOptions?.interruptAfter,
486
+ metadata: submitOptions?.metadata,
487
+ multitaskStrategy: submitOptions?.multitaskStrategy,
488
+ onCompletion: submitOptions?.onCompletion,
489
+ onDisconnect: submitOptions?.onDisconnect ??
490
+ (runMetadataStorage ? "continue" : "cancel"),
491
+ signal,
492
+ checkpoint,
493
+ streamMode,
494
+ streamSubgraphs: submitOptions?.streamSubgraphs,
495
+ streamResumable: submitOptions?.streamResumable ?? !!runMetadataStorage,
496
+ onRunCreated(params) {
497
+ const runParams = {
498
+ run_id: params.run_id,
499
+ thread_id: params.thread_id ?? usableThreadId,
500
+ };
501
+ if (runMetadataStorage) {
502
+ rejoinKey = `lg:stream:${runParams.thread_id}`;
503
+ runMetadataStorage.setItem(rejoinKey, runParams.run_id);
504
+ }
505
+ onCreated?.(runParams);
506
+ },
507
+ });
508
+ return {
509
+ stream,
510
+ onSuccess: () => {
511
+ if (rejoinKey)
512
+ runMetadataStorage?.removeItem(rejoinKey);
513
+ return history.mutate(usableThreadId);
514
+ },
515
+ };
516
+ });
517
+ };
518
+ const reconnectKey = useMemo(() => {
519
+ if (!runMetadataStorage || isLoading)
520
+ return undefined;
521
+ if (typeof window === "undefined")
522
+ return undefined;
523
+ const runId = runMetadataStorage?.getItem(`lg:stream:${threadId}`);
524
+ if (!runId)
525
+ return undefined;
526
+ return { runId, threadId };
527
+ }, [runMetadataStorage, isLoading, threadId]);
528
+ const shouldReconnect = !!runMetadataStorage;
529
+ const reconnectRef = useRef({ threadId, shouldReconnect });
530
+ const joinStreamRef = useRef(joinStream);
531
+ joinStreamRef.current = joinStream;
532
+ useEffect(() => {
533
+ // reset shouldReconnect when switching threads
534
+ if (reconnectRef.current.threadId !== threadId) {
535
+ reconnectRef.current = { threadId, shouldReconnect };
536
+ }
537
+ }, [threadId, shouldReconnect]);
538
+ useEffect(() => {
539
+ if (reconnectKey && reconnectRef.current.shouldReconnect) {
540
+ reconnectRef.current.shouldReconnect = false;
541
+ joinStreamRef.current?.(reconnectKey.runId);
542
+ }
543
+ }, [reconnectKey]);
455
544
  const error = streamError ?? historyError;
456
545
  const values = streamValues ?? historyValues;
457
546
  return {
@@ -465,6 +554,7 @@ export function useStream(options) {
465
554
  isLoading,
466
555
  stop,
467
556
  submit,
557
+ joinStream,
468
558
  branch,
469
559
  setBranch,
470
560
  history: flatHistory,
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@langchain/langgraph-sdk",
3
- "version": "0.0.77",
3
+ "version": "0.0.78",
4
4
  "description": "Client library for interacting with the LangGraph API",
5
5
  "type": "module",
6
6
  "packageManager": "yarn@1.22.19",