@langchain/langgraph-sdk 0.0.91 → 0.0.93

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,17 @@
1
1
  # @langchain/langgraph-sdk
2
2
 
3
+ ## 0.0.93
4
+
5
+ ### Patch Changes
6
+
7
+ - d53c891: Fix useStream race condition when flushing messages
8
+
9
+ ## 0.0.92
10
+
11
+ ### Patch Changes
12
+
13
+ - 603daa6: Make history fetching configurable in useStream via `fetchStateHistory`
14
+
3
15
  ## 0.0.91
4
16
 
5
17
  ### Patch Changes
@@ -87,6 +87,17 @@ function findLastIndex(array, predicate) {
87
87
  }
88
88
  function getBranchSequence(history) {
89
89
  const childrenMap = {};
90
+ // Short circuit if there's only a singular one state
91
+ // TODO: I think we can make this more generalizable for all `fetchStateHistory` values.
92
+ if (history.length <= 1) {
93
+ return {
94
+ rootSequence: {
95
+ type: "sequence",
96
+ items: history.map((value) => ({ type: "node", value, path: [] })),
97
+ },
98
+ paths: [],
99
+ };
100
+ }
90
101
  // First pass - collect nodes for each checkpoint
91
102
  history.forEach((state) => {
92
103
  const checkpointId = state.parent_checkpoint?.checkpoint_id ?? "$";
@@ -171,10 +182,16 @@ function getBranchView(sequence, paths, branch) {
171
182
  }
172
183
  return { history, branchByCheckpoint };
173
184
  }
174
- function fetchHistory(client, threadId) {
175
- return client.threads.getHistory(threadId, { limit: 1000 });
185
+ function fetchHistory(client, threadId, options) {
186
+ if (options?.limit === false) {
187
+ return client.threads
188
+ .getState(threadId)
189
+ .then((state) => [state]);
190
+ }
191
+ const limit = typeof options?.limit === "number" ? options.limit : 1000;
192
+ return client.threads.getHistory(threadId, { limit });
176
193
  }
177
- function useThreadHistory(threadId, client, clearCallbackRef, submittingRef) {
194
+ function useThreadHistory(threadId, client, limit, clearCallbackRef, submittingRef) {
178
195
  const [history, setHistory] = (0, react_1.useState)([]);
179
196
  const clientHash = (0, client_js_1.getClientConfigHash)(client);
180
197
  const clientRef = (0, react_1.useRef)(client);
@@ -182,7 +199,9 @@ function useThreadHistory(threadId, client, clearCallbackRef, submittingRef) {
182
199
  const fetcher = (0, react_1.useCallback)((threadId) => {
183
200
  if (threadId != null) {
184
201
  const client = clientRef.current;
185
- return fetchHistory(client, threadId).then((history) => {
202
+ return fetchHistory(client, threadId, {
203
+ limit,
204
+ }).then((history) => {
186
205
  setHistory(history);
187
206
  return history;
188
207
  });
@@ -190,12 +209,12 @@ function useThreadHistory(threadId, client, clearCallbackRef, submittingRef) {
190
209
  setHistory([]);
191
210
  clearCallbackRef.current?.();
192
211
  return Promise.resolve([]);
193
- }, [clearCallbackRef]);
212
+ }, [clearCallbackRef, limit]);
194
213
  (0, react_1.useEffect)(() => {
195
214
  if (submittingRef.current)
196
215
  return;
197
216
  void fetcher(threadId);
198
- }, [fetcher, clientHash, submittingRef, threadId]);
217
+ }, [fetcher, clientHash, limit, submittingRef, threadId]);
199
218
  return {
200
219
  data: history,
201
220
  mutate: (mutateId) => fetcher(mutateId ?? threadId),
@@ -241,8 +260,9 @@ function useStreamValuesState() {
241
260
  return [values?.[0] ?? null, setStreamValues, mutate];
242
261
  }
243
262
  function useStream(options) {
244
- // eslint-disable-next-line prefer-const
245
- let { assistantId, messagesKey, onCreated, onError, onFinish } = options;
263
+ let { messagesKey } = options;
264
+ const { assistantId, fetchStateHistory } = options;
265
+ const { onCreated, onError, onFinish } = options;
246
266
  const reconnectOnMountRef = (0, react_1.useRef)(options.reconnectOnMount);
247
267
  const runMetadataStorage = (0, react_1.useMemo)(() => {
248
268
  if (typeof window === "undefined")
@@ -309,11 +329,12 @@ function useStream(options) {
309
329
  clearCallbackRef.current = () => {
310
330
  setStreamError(undefined);
311
331
  setStreamValues(null);
332
+ messageManagerRef.current.clear();
312
333
  };
313
- // TODO: this should be done on the server to avoid pagination
314
- // TODO: should we permit adapter? SWR / React Query?
315
- // TODO: make this only when branching is expected
316
- const history = useThreadHistory(threadId, client, clearCallbackRef, submittingRef);
334
+ const historyLimit = typeof fetchStateHistory === "object" && fetchStateHistory != null
335
+ ? fetchStateHistory.limit ?? true
336
+ : fetchStateHistory ?? true;
337
+ const history = useThreadHistory(threadId, client, historyLimit, clearCallbackRef, submittingRef);
317
338
  const getMessages = (0, react_1.useMemo)(() => {
318
339
  return (value) => Array.isArray(value[messagesKey])
319
340
  ? value[messagesKey]
@@ -456,8 +477,6 @@ function useStream(options) {
456
477
  }
457
478
  finally {
458
479
  setIsLoading(false);
459
- // Assumption: messages are already handled, we can clear the manager
460
- messageManagerRef.current.clear();
461
480
  submittingRef.current = false;
462
481
  abortRef.current = null;
463
482
  }
@@ -606,7 +625,12 @@ function useStream(options) {
606
625
  branch,
607
626
  setBranch,
608
627
  history: flatHistory,
609
- experimental_branchTree: rootSequence,
628
+ get experimental_branchTree() {
629
+ if (historyLimit === false) {
630
+ throw new Error("`experimental_branchTree` is not available when `fetchStateHistory` is set to `false`");
631
+ }
632
+ return rootSequence;
633
+ },
610
634
  get interrupt() {
611
635
  // Don't show the interrupt if the stream is loading
612
636
  if (isLoading)
@@ -167,6 +167,15 @@ export interface UseStreamOptions<StateType extends Record<string, unknown> = Re
167
167
  * cached UI display without server fetches.
168
168
  */
169
169
  initialValues?: StateType | null;
170
+ /**
171
+ * Whether to fetch the history of the thread.
172
+ * If true, the history will be fetched from the server. Defaults to 1000 entries.
173
+ * If false, only the last state will be fetched from the server.
174
+ * @default true
175
+ */
176
+ fetchStateHistory?: boolean | {
177
+ limit: number;
178
+ };
170
179
  }
171
180
  interface RunMetadataStorage {
172
181
  getItem(key: `lg:stream:${string}`): string | null;
@@ -83,6 +83,17 @@ function findLastIndex(array, predicate) {
83
83
  }
84
84
  function getBranchSequence(history) {
85
85
  const childrenMap = {};
86
+ // Short circuit if there's only a singular one state
87
+ // TODO: I think we can make this more generalizable for all `fetchStateHistory` values.
88
+ if (history.length <= 1) {
89
+ return {
90
+ rootSequence: {
91
+ type: "sequence",
92
+ items: history.map((value) => ({ type: "node", value, path: [] })),
93
+ },
94
+ paths: [],
95
+ };
96
+ }
86
97
  // First pass - collect nodes for each checkpoint
87
98
  history.forEach((state) => {
88
99
  const checkpointId = state.parent_checkpoint?.checkpoint_id ?? "$";
@@ -167,10 +178,16 @@ function getBranchView(sequence, paths, branch) {
167
178
  }
168
179
  return { history, branchByCheckpoint };
169
180
  }
170
- function fetchHistory(client, threadId) {
171
- return client.threads.getHistory(threadId, { limit: 1000 });
181
+ function fetchHistory(client, threadId, options) {
182
+ if (options?.limit === false) {
183
+ return client.threads
184
+ .getState(threadId)
185
+ .then((state) => [state]);
186
+ }
187
+ const limit = typeof options?.limit === "number" ? options.limit : 1000;
188
+ return client.threads.getHistory(threadId, { limit });
172
189
  }
173
- function useThreadHistory(threadId, client, clearCallbackRef, submittingRef) {
190
+ function useThreadHistory(threadId, client, limit, clearCallbackRef, submittingRef) {
174
191
  const [history, setHistory] = useState([]);
175
192
  const clientHash = getClientConfigHash(client);
176
193
  const clientRef = useRef(client);
@@ -178,7 +195,9 @@ function useThreadHistory(threadId, client, clearCallbackRef, submittingRef) {
178
195
  const fetcher = useCallback((threadId) => {
179
196
  if (threadId != null) {
180
197
  const client = clientRef.current;
181
- return fetchHistory(client, threadId).then((history) => {
198
+ return fetchHistory(client, threadId, {
199
+ limit,
200
+ }).then((history) => {
182
201
  setHistory(history);
183
202
  return history;
184
203
  });
@@ -186,12 +205,12 @@ function useThreadHistory(threadId, client, clearCallbackRef, submittingRef) {
186
205
  setHistory([]);
187
206
  clearCallbackRef.current?.();
188
207
  return Promise.resolve([]);
189
- }, [clearCallbackRef]);
208
+ }, [clearCallbackRef, limit]);
190
209
  useEffect(() => {
191
210
  if (submittingRef.current)
192
211
  return;
193
212
  void fetcher(threadId);
194
- }, [fetcher, clientHash, submittingRef, threadId]);
213
+ }, [fetcher, clientHash, limit, submittingRef, threadId]);
195
214
  return {
196
215
  data: history,
197
216
  mutate: (mutateId) => fetcher(mutateId ?? threadId),
@@ -237,8 +256,9 @@ function useStreamValuesState() {
237
256
  return [values?.[0] ?? null, setStreamValues, mutate];
238
257
  }
239
258
  export function useStream(options) {
240
- // eslint-disable-next-line prefer-const
241
- let { assistantId, messagesKey, onCreated, onError, onFinish } = options;
259
+ let { messagesKey } = options;
260
+ const { assistantId, fetchStateHistory } = options;
261
+ const { onCreated, onError, onFinish } = options;
242
262
  const reconnectOnMountRef = useRef(options.reconnectOnMount);
243
263
  const runMetadataStorage = useMemo(() => {
244
264
  if (typeof window === "undefined")
@@ -305,11 +325,12 @@ export function useStream(options) {
305
325
  clearCallbackRef.current = () => {
306
326
  setStreamError(undefined);
307
327
  setStreamValues(null);
328
+ messageManagerRef.current.clear();
308
329
  };
309
- // TODO: this should be done on the server to avoid pagination
310
- // TODO: should we permit adapter? SWR / React Query?
311
- // TODO: make this only when branching is expected
312
- const history = useThreadHistory(threadId, client, clearCallbackRef, submittingRef);
330
+ const historyLimit = typeof fetchStateHistory === "object" && fetchStateHistory != null
331
+ ? fetchStateHistory.limit ?? true
332
+ : fetchStateHistory ?? true;
333
+ const history = useThreadHistory(threadId, client, historyLimit, clearCallbackRef, submittingRef);
313
334
  const getMessages = useMemo(() => {
314
335
  return (value) => Array.isArray(value[messagesKey])
315
336
  ? value[messagesKey]
@@ -452,8 +473,6 @@ export function useStream(options) {
452
473
  }
453
474
  finally {
454
475
  setIsLoading(false);
455
- // Assumption: messages are already handled, we can clear the manager
456
- messageManagerRef.current.clear();
457
476
  submittingRef.current = false;
458
477
  abortRef.current = null;
459
478
  }
@@ -602,7 +621,12 @@ export function useStream(options) {
602
621
  branch,
603
622
  setBranch,
604
623
  history: flatHistory,
605
- experimental_branchTree: rootSequence,
624
+ get experimental_branchTree() {
625
+ if (historyLimit === false) {
626
+ throw new Error("`experimental_branchTree` is not available when `fetchStateHistory` is set to `false`");
627
+ }
628
+ return rootSequence;
629
+ },
606
630
  get interrupt() {
607
631
  // Don't show the interrupt if the stream is loading
608
632
  if (isLoading)
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@langchain/langgraph-sdk",
3
- "version": "0.0.91",
3
+ "version": "0.0.93",
4
4
  "description": "Client library for interacting with the LangGraph API",
5
5
  "type": "module",
6
6
  "scripts": {