@langchain/langgraph-sdk 0.1.4 → 0.1.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,485 @@
1
+ "use strict";
2
+ /* __LC_ALLOW_ENTRYPOINT_SIDE_EFFECTS__ */
3
+ "use client";
4
+ /* __LC_ALLOW_ENTRYPOINT_SIDE_EFFECTS__ */
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.useStreamLGP = useStreamLGP;
7
+ const react_1 = require("react");
8
+ const utils_js_1 = require("./utils.cjs");
9
+ const errors_js_1 = require("./errors.cjs");
10
+ const branching_js_1 = require("./branching.cjs");
11
+ const manager_js_1 = require("./manager.cjs");
12
+ const client_js_1 = require("../client.cjs");
13
+ const messages_js_1 = require("./messages.cjs");
14
+ const thread_js_1 = require("./thread.cjs");
15
+ function getFetchHistoryKey(client, threadId, limit) {
16
+ return [(0, client_js_1.getClientConfigHash)(client), threadId, limit].join(":");
17
+ }
18
+ function fetchHistory(client, threadId, options) {
19
+ if (options?.limit === false) {
20
+ return client.threads.getState(threadId).then((state) => {
21
+ if (state.checkpoint == null)
22
+ return [];
23
+ return [state];
24
+ });
25
+ }
26
+ const limit = typeof options?.limit === "number" ? options.limit : 10;
27
+ return client.threads.getHistory(threadId, { limit });
28
+ }
29
+ function useThreadHistory(client, threadId, limit, options) {
30
+ const key = getFetchHistoryKey(client, threadId, limit);
31
+ const [state, setState] = (0, react_1.useState)(() => ({
32
+ data: undefined,
33
+ error: undefined,
34
+ isLoading: threadId != null,
35
+ }));
36
+ const clientRef = (0, react_1.useRef)(client);
37
+ clientRef.current = client;
38
+ const onErrorRef = (0, react_1.useRef)(options?.onError);
39
+ onErrorRef.current = options?.onError;
40
+ const fetcher = (0, react_1.useCallback)((threadId, limit) => {
41
+ if (threadId != null) {
42
+ const client = clientRef.current;
43
+ setState((state) => ({ ...state, isLoading: true }));
44
+ return fetchHistory(client, threadId, { limit }).then((data) => {
45
+ setState({ data, error: undefined, isLoading: false });
46
+ return data;
47
+ }, (error) => {
48
+ setState(({ data }) => ({ data, error, isLoading: false }));
49
+ onErrorRef.current?.(error);
50
+ return Promise.reject(error);
51
+ });
52
+ }
53
+ setState({ data: undefined, error: undefined, isLoading: false });
54
+ return Promise.resolve([]);
55
+ }, []);
56
+ (0, react_1.useEffect)(() => {
57
+ // Skip if a stream is already in progress, no need to fetch history
58
+ if (options.submittingRef.current != null &&
59
+ options.submittingRef.current === threadId) {
60
+ return;
61
+ }
62
+ void fetcher(threadId, limit);
63
+ // The `threadId` and `limit` arguments are already present in `key`
64
+ // Thus we don't need to include them in the dependency array
65
+ // eslint-disable-next-line react-hooks/exhaustive-deps
66
+ }, [fetcher, key]);
67
+ return {
68
+ data: state.data,
69
+ error: state.error,
70
+ isLoading: state.isLoading,
71
+ mutate: (mutateId) => fetcher(mutateId ?? threadId, limit),
72
+ };
73
+ }
74
+ function useStreamLGP(options) {
75
+ const reconnectOnMountRef = (0, react_1.useRef)(options.reconnectOnMount);
76
+ const runMetadataStorage = (0, react_1.useMemo)(() => {
77
+ if (typeof window === "undefined")
78
+ return null;
79
+ const storage = reconnectOnMountRef.current;
80
+ if (storage === true)
81
+ return window.sessionStorage;
82
+ if (typeof storage === "function")
83
+ return storage();
84
+ return null;
85
+ }, []);
86
+ const client = (0, react_1.useMemo)(() => options.client ??
87
+ new client_js_1.Client({
88
+ apiUrl: options.apiUrl,
89
+ apiKey: options.apiKey,
90
+ callerOptions: options.callerOptions,
91
+ defaultHeaders: options.defaultHeaders,
92
+ }), [
93
+ options.client,
94
+ options.apiKey,
95
+ options.apiUrl,
96
+ options.callerOptions,
97
+ options.defaultHeaders,
98
+ ]);
99
+ const [messageManager] = (0, react_1.useState)(() => new messages_js_1.MessageTupleManager());
100
+ const [stream] = (0, react_1.useState)(() => new manager_js_1.StreamManager(messageManager));
101
+ (0, react_1.useSyncExternalStore)(stream.subscribe, stream.getSnapshot, stream.getSnapshot);
102
+ const [threadId, onThreadId] = (0, thread_js_1.useControllableThreadId)(options);
103
+ const trackStreamModeRef = (0, react_1.useRef)([]);
104
+ const trackStreamMode = (0, react_1.useCallback)((...mode) => {
105
+ const ref = trackStreamModeRef.current;
106
+ for (const m of mode) {
107
+ if (!ref.includes(m))
108
+ ref.push(m);
109
+ }
110
+ }, []);
111
+ const hasUpdateListener = options.onUpdateEvent != null;
112
+ const hasCustomListener = options.onCustomEvent != null;
113
+ const hasLangChainListener = options.onLangChainEvent != null;
114
+ const hasDebugListener = options.onDebugEvent != null;
115
+ const hasCheckpointListener = options.onCheckpointEvent != null;
116
+ const hasTaskListener = options.onTaskEvent != null;
117
+ const callbackStreamMode = (0, react_1.useMemo)(() => {
118
+ const modes = [];
119
+ if (hasUpdateListener)
120
+ modes.push("updates");
121
+ if (hasCustomListener)
122
+ modes.push("custom");
123
+ if (hasLangChainListener)
124
+ modes.push("events");
125
+ if (hasDebugListener)
126
+ modes.push("debug");
127
+ if (hasCheckpointListener)
128
+ modes.push("checkpoints");
129
+ if (hasTaskListener)
130
+ modes.push("tasks");
131
+ return modes;
132
+ }, [
133
+ hasUpdateListener,
134
+ hasCustomListener,
135
+ hasLangChainListener,
136
+ hasDebugListener,
137
+ hasCheckpointListener,
138
+ hasTaskListener,
139
+ ]);
140
+ const clearCallbackRef = (0, react_1.useRef)(null);
141
+ clearCallbackRef.current = stream.clear;
142
+ const threadIdRef = (0, react_1.useRef)(threadId);
143
+ const threadIdStreamingRef = (0, react_1.useRef)(null);
144
+ // Cancel the stream if thread ID has changed
145
+ (0, react_1.useEffect)(() => {
146
+ if (threadIdRef.current !== threadId) {
147
+ threadIdRef.current = threadId;
148
+ stream.clear();
149
+ }
150
+ }, [threadId, stream]);
151
+ const historyLimit = typeof options.fetchStateHistory === "object" &&
152
+ options.fetchStateHistory != null
153
+ ? options.fetchStateHistory.limit ?? false
154
+ : options.fetchStateHistory ?? false;
155
+ const history = useThreadHistory(client, threadId, historyLimit, {
156
+ submittingRef: threadIdStreamingRef,
157
+ onError: options.onError,
158
+ });
159
+ const getMessages = (value) => {
160
+ const messagesKey = options.messagesKey ?? "messages";
161
+ return Array.isArray(value[messagesKey]) ? value[messagesKey] : [];
162
+ };
163
+ const setMessages = (current, messages) => {
164
+ const messagesKey = options.messagesKey ?? "messages";
165
+ return { ...current, [messagesKey]: messages };
166
+ };
167
+ const [branch, setBranch] = (0, react_1.useState)("");
168
+ const branchContext = (0, branching_js_1.getBranchContext)(branch, history.data);
169
+ const historyValues = branchContext.threadHead?.values ??
170
+ options.initialValues ??
171
+ {};
172
+ const historyError = (() => {
173
+ const error = branchContext.threadHead?.tasks?.at(-1)?.error;
174
+ if (error == null)
175
+ return undefined;
176
+ try {
177
+ const parsed = JSON.parse(error);
178
+ if (errors_js_1.StreamError.isStructuredError(parsed))
179
+ return new errors_js_1.StreamError(parsed);
180
+ return parsed;
181
+ }
182
+ catch {
183
+ // do nothing
184
+ }
185
+ return error;
186
+ })();
187
+ const messageMetadata = (() => {
188
+ const alreadyShown = new Set();
189
+ return getMessages(historyValues).map((message, idx) => {
190
+ const messageId = message.id ?? idx;
191
+ // Find the first checkpoint where the message was seen
192
+ const firstSeenState = (0, utils_js_1.findLast)(history.data ?? [], (state) => getMessages(state.values)
193
+ .map((m, idx) => m.id ?? idx)
194
+ .includes(messageId));
195
+ const checkpointId = firstSeenState?.checkpoint?.checkpoint_id;
196
+ let branch = checkpointId != null
197
+ ? branchContext.branchByCheckpoint[checkpointId]
198
+ : undefined;
199
+ if (!branch?.branch?.length)
200
+ branch = undefined;
201
+ // serialize branches
202
+ const optionsShown = branch?.branchOptions?.flat(2).join(",");
203
+ if (optionsShown) {
204
+ if (alreadyShown.has(optionsShown))
205
+ branch = undefined;
206
+ alreadyShown.add(optionsShown);
207
+ }
208
+ return {
209
+ messageId: messageId.toString(),
210
+ firstSeenState,
211
+ branch: branch?.branch,
212
+ branchOptions: branch?.branchOptions,
213
+ };
214
+ });
215
+ })();
216
+ const stop = () => stream.stop(historyValues, {
217
+ onStop: (args) => {
218
+ if (runMetadataStorage && threadId) {
219
+ const runId = runMetadataStorage.getItem(`lg:stream:${threadId}`);
220
+ if (runId)
221
+ void client.runs.cancel(threadId, runId);
222
+ runMetadataStorage.removeItem(`lg:stream:${threadId}`);
223
+ }
224
+ options.onStop?.(args);
225
+ },
226
+ });
227
+ // --- TRANSPORT ---
228
+ const submit = async (values, submitOptions) => {
229
+ // Unbranch things
230
+ const checkpointId = submitOptions?.checkpoint?.checkpoint_id;
231
+ setBranch(checkpointId != null
232
+ ? branchContext.branchByCheckpoint[checkpointId]?.branch ?? ""
233
+ : "");
234
+ stream.setStreamValues(() => {
235
+ if (submitOptions?.optimisticValues != null) {
236
+ return {
237
+ ...historyValues,
238
+ ...(typeof submitOptions.optimisticValues === "function"
239
+ ? submitOptions.optimisticValues(historyValues)
240
+ : submitOptions.optimisticValues),
241
+ };
242
+ }
243
+ return { ...historyValues };
244
+ });
245
+ // When `fetchStateHistory` is requested, thus we assume that branching
246
+ // is enabled. We then need to include the implicit branch.
247
+ const includeImplicitBranch = historyLimit === true || typeof historyLimit === "number";
248
+ let callbackMeta;
249
+ let rejoinKey;
250
+ let usableThreadId = threadId;
251
+ await stream.start(async (signal) => {
252
+ if (!usableThreadId) {
253
+ const thread = await client.threads.create({
254
+ threadId: submitOptions?.threadId,
255
+ metadata: submitOptions?.metadata,
256
+ });
257
+ usableThreadId = thread.thread_id;
258
+ // Pre-emptively update the thread ID before
259
+ // stream cancellation is kicked off and thread
260
+ // is being refetched
261
+ threadIdRef.current = usableThreadId;
262
+ threadIdStreamingRef.current = usableThreadId;
263
+ onThreadId(usableThreadId);
264
+ }
265
+ if (!usableThreadId) {
266
+ throw new Error("Failed to obtain valid thread ID.");
267
+ }
268
+ threadIdStreamingRef.current = usableThreadId;
269
+ const streamMode = (0, utils_js_1.unique)([
270
+ ...(submitOptions?.streamMode ?? []),
271
+ ...trackStreamModeRef.current,
272
+ ...callbackStreamMode,
273
+ ]);
274
+ let checkpoint = submitOptions?.checkpoint ??
275
+ (includeImplicitBranch
276
+ ? branchContext.threadHead?.checkpoint
277
+ : undefined) ??
278
+ undefined;
279
+ // Avoid specifying a checkpoint if user explicitly set it to null
280
+ if (submitOptions?.checkpoint === null)
281
+ checkpoint = undefined;
282
+ // eslint-disable-next-line @typescript-eslint/ban-ts-comment
283
+ // @ts-expect-error
284
+ if (checkpoint != null)
285
+ delete checkpoint.thread_id;
286
+ const streamResumable = submitOptions?.streamResumable ?? !!runMetadataStorage;
287
+ return client.runs.stream(usableThreadId, options.assistantId, {
288
+ input: values,
289
+ config: submitOptions?.config,
290
+ context: submitOptions?.context,
291
+ command: submitOptions?.command,
292
+ interruptBefore: submitOptions?.interruptBefore,
293
+ interruptAfter: submitOptions?.interruptAfter,
294
+ metadata: submitOptions?.metadata,
295
+ multitaskStrategy: submitOptions?.multitaskStrategy,
296
+ onCompletion: submitOptions?.onCompletion,
297
+ onDisconnect: submitOptions?.onDisconnect ??
298
+ (streamResumable ? "continue" : "cancel"),
299
+ signal,
300
+ checkpoint,
301
+ streamMode,
302
+ streamSubgraphs: submitOptions?.streamSubgraphs,
303
+ streamResumable,
304
+ durability: submitOptions?.durability,
305
+ onRunCreated(params) {
306
+ callbackMeta = {
307
+ run_id: params.run_id,
308
+ thread_id: params.thread_id ?? usableThreadId,
309
+ };
310
+ if (runMetadataStorage) {
311
+ rejoinKey = `lg:stream:${usableThreadId}`;
312
+ runMetadataStorage.setItem(rejoinKey, callbackMeta.run_id);
313
+ }
314
+ options.onCreated?.(callbackMeta);
315
+ },
316
+ });
317
+ }, {
318
+ getMessages,
319
+ setMessages,
320
+ initialValues: historyValues,
321
+ callbacks: options,
322
+ async onSuccess() {
323
+ if (rejoinKey)
324
+ runMetadataStorage?.removeItem(rejoinKey);
325
+ const shouldRefetch =
326
+ // We're expecting the whole thread state in onFinish
327
+ options.onFinish != null ||
328
+ // We're fetching history, thus we need the latest checkpoint
329
+ // to ensure we're not accidentally submitting to a wrong branch
330
+ includeImplicitBranch;
331
+ if (shouldRefetch) {
332
+ const newHistory = await history.mutate(usableThreadId);
333
+ const lastHead = newHistory.at(0);
334
+ if (lastHead) {
335
+ // We now have the latest update from /history
336
+ // Thus we can clear the local stream state
337
+ options.onFinish?.(lastHead, callbackMeta);
338
+ return null;
339
+ }
340
+ }
341
+ return undefined;
342
+ },
343
+ onError(error) {
344
+ options.onError?.(error, callbackMeta);
345
+ },
346
+ onFinish() {
347
+ threadIdStreamingRef.current = null;
348
+ },
349
+ });
350
+ };
351
+ const joinStream = async (runId, lastEventId, joinOptions) => {
352
+ // eslint-disable-next-line no-param-reassign
353
+ lastEventId ??= "-1";
354
+ if (!threadId)
355
+ return;
356
+ const callbackMeta = {
357
+ thread_id: threadId,
358
+ run_id: runId,
359
+ };
360
+ await stream.start(async (signal) => {
361
+ threadIdStreamingRef.current = threadId;
362
+ return client.runs.joinStream(threadId, runId, {
363
+ signal,
364
+ lastEventId,
365
+ streamMode: joinOptions?.streamMode,
366
+ });
367
+ }, {
368
+ getMessages,
369
+ setMessages,
370
+ initialValues: historyValues,
371
+ callbacks: options,
372
+ async onSuccess() {
373
+ runMetadataStorage?.removeItem(`lg:stream:${threadId}`);
374
+ const newHistory = await history.mutate(threadId);
375
+ const lastHead = newHistory.at(0);
376
+ if (lastHead)
377
+ options.onFinish?.(lastHead, callbackMeta);
378
+ },
379
+ onError(error) {
380
+ options.onError?.(error, callbackMeta);
381
+ },
382
+ onFinish() {
383
+ threadIdStreamingRef.current = null;
384
+ },
385
+ });
386
+ };
387
+ const reconnectKey = (0, react_1.useMemo)(() => {
388
+ if (!runMetadataStorage || stream.isLoading)
389
+ return undefined;
390
+ if (typeof window === "undefined")
391
+ return undefined;
392
+ const runId = runMetadataStorage?.getItem(`lg:stream:${threadId}`);
393
+ if (!runId)
394
+ return undefined;
395
+ return { runId, threadId };
396
+ }, [runMetadataStorage, stream.isLoading, threadId]);
397
+ const shouldReconnect = !!runMetadataStorage;
398
+ const reconnectRef = (0, react_1.useRef)({ threadId, shouldReconnect });
399
+ const joinStreamRef = (0, react_1.useRef)(joinStream);
400
+ joinStreamRef.current = joinStream;
401
+ (0, react_1.useEffect)(() => {
402
+ // reset shouldReconnect when switching threads
403
+ if (reconnectRef.current.threadId !== threadId) {
404
+ reconnectRef.current = { threadId, shouldReconnect };
405
+ }
406
+ }, [threadId, shouldReconnect]);
407
+ (0, react_1.useEffect)(() => {
408
+ if (reconnectKey && reconnectRef.current.shouldReconnect) {
409
+ reconnectRef.current.shouldReconnect = false;
410
+ void joinStreamRef.current?.(reconnectKey.runId);
411
+ }
412
+ }, [reconnectKey]);
413
+ const error = stream.error ?? historyError ?? history.error;
414
+ const values = stream.values ?? historyValues;
415
+ return {
416
+ get values() {
417
+ trackStreamMode("values");
418
+ return values;
419
+ },
420
+ client,
421
+ assistantId: options.assistantId,
422
+ error,
423
+ isLoading: stream.isLoading,
424
+ stop,
425
+ submit,
426
+ joinStream,
427
+ branch,
428
+ setBranch,
429
+ get history() {
430
+ if (historyLimit === false) {
431
+ throw new Error("`fetchStateHistory` must be set to `true` to use `history`");
432
+ }
433
+ return branchContext.flatHistory;
434
+ },
435
+ isThreadLoading: history.isLoading && history.data == null,
436
+ get experimental_branchTree() {
437
+ if (historyLimit === false) {
438
+ throw new Error("`fetchStateHistory` must be set to `true` to use `experimental_branchTree`");
439
+ }
440
+ return branchContext.branchTree;
441
+ },
442
+ get interrupt() {
443
+ if (values != null &&
444
+ "__interrupt__" in values &&
445
+ Array.isArray(values.__interrupt__)) {
446
+ const valueInterrupts = values.__interrupt__;
447
+ if (valueInterrupts.length === 0)
448
+ return { when: "breakpoint" };
449
+ if (valueInterrupts.length === 1)
450
+ return valueInterrupts[0];
451
+ // TODO: fix the typing of interrupts if multiple interrupts are returned
452
+ return valueInterrupts;
453
+ }
454
+ // If we're deferring to old interrupt detection logic, don't show the interrupt if the stream is loading
455
+ if (stream.isLoading)
456
+ return undefined;
457
+ const interrupts = branchContext.threadHead?.tasks?.at(-1)?.interrupts;
458
+ if (interrupts == null || interrupts.length === 0) {
459
+ // check if there's a next task present
460
+ const next = branchContext.threadHead?.next ?? [];
461
+ if (!next.length || error != null)
462
+ return undefined;
463
+ return { when: "breakpoint" };
464
+ }
465
+ // Return only the current interrupt
466
+ return interrupts.at(-1);
467
+ },
468
+ get messages() {
469
+ trackStreamMode("messages-tuple", "values");
470
+ return getMessages(values);
471
+ },
472
+ getMessagesMetadata(message, index) {
473
+ trackStreamMode("values");
474
+ const streamMetadata = messageManager.get(message.id)?.metadata;
475
+ const historyMetadata = messageMetadata?.find((m) => m.messageId === (message.id ?? index));
476
+ if (streamMetadata != null || historyMetadata != null) {
477
+ return {
478
+ ...historyMetadata,
479
+ streamMetadata,
480
+ };
481
+ }
482
+ return undefined;
483
+ },
484
+ };
485
+ }
@@ -0,0 +1,7 @@
1
+ import type { BagTemplate, UseStreamOptions, UseStream } from "./types.js";
2
+ export declare function useStreamLGP<StateType extends Record<string, unknown> = Record<string, unknown>, Bag extends {
3
+ ConfigurableType?: Record<string, unknown>;
4
+ InterruptType?: unknown;
5
+ CustomEventType?: unknown;
6
+ UpdateType?: unknown;
7
+ } = BagTemplate>(options: UseStreamOptions<StateType, Bag>): UseStream<StateType, Bag>;