@langchain/react 0.3.4 → 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (98) hide show
  1. package/README.md +48 -523
  2. package/dist/context.cjs +12 -30
  3. package/dist/context.cjs.map +1 -1
  4. package/dist/context.d.cts +22 -39
  5. package/dist/context.d.cts.map +1 -1
  6. package/dist/context.d.ts +22 -39
  7. package/dist/context.d.ts.map +1 -1
  8. package/dist/context.js +11 -29
  9. package/dist/context.js.map +1 -1
  10. package/dist/index.cjs +29 -30
  11. package/dist/index.d.cts +10 -7
  12. package/dist/index.d.cts.map +1 -1
  13. package/dist/index.d.ts +10 -7
  14. package/dist/index.d.ts.map +1 -1
  15. package/dist/index.js +10 -6
  16. package/dist/selectors.cjs +178 -0
  17. package/dist/selectors.cjs.map +1 -0
  18. package/dist/selectors.d.cts +183 -0
  19. package/dist/selectors.d.cts.map +1 -0
  20. package/dist/selectors.d.ts +183 -0
  21. package/dist/selectors.d.ts.map +1 -0
  22. package/dist/selectors.js +168 -0
  23. package/dist/selectors.js.map +1 -0
  24. package/dist/suspense-stream.cjs +34 -159
  25. package/dist/suspense-stream.cjs.map +1 -1
  26. package/dist/suspense-stream.d.cts +15 -71
  27. package/dist/suspense-stream.d.cts.map +1 -1
  28. package/dist/suspense-stream.d.ts +15 -71
  29. package/dist/suspense-stream.d.ts.map +1 -1
  30. package/dist/suspense-stream.js +35 -158
  31. package/dist/suspense-stream.js.map +1 -1
  32. package/dist/use-audio-player.cjs +679 -0
  33. package/dist/use-audio-player.cjs.map +1 -0
  34. package/dist/use-audio-player.d.cts +161 -0
  35. package/dist/use-audio-player.d.cts.map +1 -0
  36. package/dist/use-audio-player.d.ts +161 -0
  37. package/dist/use-audio-player.d.ts.map +1 -0
  38. package/dist/use-audio-player.js +679 -0
  39. package/dist/use-audio-player.js.map +1 -0
  40. package/dist/use-media-url.cjs +49 -0
  41. package/dist/use-media-url.cjs.map +1 -0
  42. package/dist/use-media-url.d.cts +28 -0
  43. package/dist/use-media-url.d.cts.map +1 -0
  44. package/dist/use-media-url.d.ts +28 -0
  45. package/dist/use-media-url.d.ts.map +1 -0
  46. package/dist/use-media-url.js +49 -0
  47. package/dist/use-media-url.js.map +1 -0
  48. package/dist/use-projection.cjs +41 -0
  49. package/dist/use-projection.cjs.map +1 -0
  50. package/dist/use-projection.d.cts +27 -0
  51. package/dist/use-projection.d.cts.map +1 -0
  52. package/dist/use-projection.d.ts +27 -0
  53. package/dist/use-projection.d.ts.map +1 -0
  54. package/dist/use-projection.js +41 -0
  55. package/dist/use-projection.js.map +1 -0
  56. package/dist/use-stream.cjs +185 -0
  57. package/dist/use-stream.cjs.map +1 -0
  58. package/dist/use-stream.d.cts +184 -0
  59. package/dist/use-stream.d.cts.map +1 -0
  60. package/dist/use-stream.d.ts +184 -0
  61. package/dist/use-stream.d.ts.map +1 -0
  62. package/dist/use-stream.js +183 -0
  63. package/dist/use-stream.js.map +1 -0
  64. package/dist/use-video-player.cjs +218 -0
  65. package/dist/use-video-player.cjs.map +1 -0
  66. package/dist/use-video-player.d.cts +65 -0
  67. package/dist/use-video-player.d.cts.map +1 -0
  68. package/dist/use-video-player.d.ts +65 -0
  69. package/dist/use-video-player.d.ts.map +1 -0
  70. package/dist/use-video-player.js +218 -0
  71. package/dist/use-video-player.js.map +1 -0
  72. package/package.json +9 -8
  73. package/dist/stream.cjs +0 -18
  74. package/dist/stream.cjs.map +0 -1
  75. package/dist/stream.custom.cjs +0 -209
  76. package/dist/stream.custom.cjs.map +0 -1
  77. package/dist/stream.custom.d.cts +0 -3
  78. package/dist/stream.custom.d.ts +0 -3
  79. package/dist/stream.custom.js +0 -209
  80. package/dist/stream.custom.js.map +0 -1
  81. package/dist/stream.d.cts +0 -174
  82. package/dist/stream.d.cts.map +0 -1
  83. package/dist/stream.d.ts +0 -174
  84. package/dist/stream.d.ts.map +0 -1
  85. package/dist/stream.js +0 -18
  86. package/dist/stream.js.map +0 -1
  87. package/dist/stream.lgp.cjs +0 -671
  88. package/dist/stream.lgp.cjs.map +0 -1
  89. package/dist/stream.lgp.js +0 -671
  90. package/dist/stream.lgp.js.map +0 -1
  91. package/dist/thread.cjs +0 -18
  92. package/dist/thread.cjs.map +0 -1
  93. package/dist/thread.js +0 -18
  94. package/dist/thread.js.map +0 -1
  95. package/dist/types.d.cts +0 -109
  96. package/dist/types.d.cts.map +0 -1
  97. package/dist/types.d.ts +0 -109
  98. package/dist/types.d.ts.map +0 -1
@@ -1,671 +0,0 @@
1
- "use client";
2
- const require_thread = require("./thread.cjs");
3
- let react = require("react");
4
- let _langchain_langgraph_sdk_client = require("@langchain/langgraph-sdk/client");
5
- let _langchain_langgraph_sdk_ui = require("@langchain/langgraph-sdk/ui");
6
- let _langchain_langgraph_sdk_utils = require("@langchain/langgraph-sdk/utils");
7
- let _langchain_langgraph_sdk = require("@langchain/langgraph-sdk");
8
- //#region src/stream.lgp.tsx
9
- function getFetchHistoryKey(client, threadId, limit) {
10
- return [
11
- (0, _langchain_langgraph_sdk_client.getClientConfigHash)(client),
12
- threadId,
13
- limit
14
- ].join(":");
15
- }
16
- function fetchHistory(client, threadId, options) {
17
- if (options?.limit === false) return client.threads.getState(threadId).then((state) => {
18
- if (state.checkpoint == null) return [];
19
- return [state];
20
- });
21
- const limit = typeof options?.limit === "number" ? options.limit : 10;
22
- return client.threads.getHistory(threadId, { limit });
23
- }
24
- function useThreadHistory(client, threadId, limit, options) {
25
- const key = getFetchHistoryKey(client, threadId, limit);
26
- const [state, setState] = (0, react.useState)(() => ({
27
- key: void 0,
28
- data: void 0,
29
- error: void 0,
30
- isLoading: threadId != null
31
- }));
32
- const clientRef = (0, react.useRef)(client);
33
- clientRef.current = client;
34
- const onErrorRef = (0, react.useRef)(options?.onError);
35
- onErrorRef.current = options?.onError;
36
- const fetcher = (0, react.useCallback)((threadId, limit) => {
37
- if (options.passthrough) return Promise.resolve([]);
38
- const client = clientRef.current;
39
- const key = getFetchHistoryKey(client, threadId, limit);
40
- if (threadId != null) {
41
- setState((state) => {
42
- if (state.key === key) return {
43
- ...state,
44
- isLoading: true
45
- };
46
- return {
47
- key,
48
- data: void 0,
49
- error: void 0,
50
- isLoading: true
51
- };
52
- });
53
- return fetchHistory(client, threadId, { limit }).then((data) => {
54
- setState((state) => {
55
- if (state.key !== key) return state;
56
- return {
57
- key,
58
- data,
59
- error: void 0,
60
- isLoading: false
61
- };
62
- });
63
- return data;
64
- }, (error) => {
65
- setState((state) => {
66
- if (state.key !== key) return state;
67
- return {
68
- key,
69
- data: state.data,
70
- error,
71
- isLoading: false
72
- };
73
- });
74
- onErrorRef.current?.(error);
75
- return Promise.reject(error);
76
- });
77
- }
78
- setState({
79
- key,
80
- data: void 0,
81
- error: void 0,
82
- isLoading: false
83
- });
84
- return Promise.resolve([]);
85
- }, [options.passthrough]);
86
- (0, react.useEffect)(() => {
87
- if (options.submittingRef.current != null && options.submittingRef.current === threadId) return;
88
- fetcher(threadId, limit);
89
- }, [fetcher, key]);
90
- return {
91
- data: state.data,
92
- error: state.error,
93
- isLoading: state.isLoading,
94
- mutate: (mutateId) => fetcher(mutateId ?? threadId, limit)
95
- };
96
- }
97
- function useTrackStreamMode() {
98
- const trackStreamModeRef = (0, react.useRef)([]);
99
- return [trackStreamModeRef, (0, react.useCallback)((...mode) => {
100
- const ref = trackStreamModeRef.current;
101
- for (const m of mode) if (!ref.includes(m)) ref.push(m);
102
- }, [])];
103
- }
104
- function useCallbackStreamMode(options) {
105
- const hasUpdateListener = options.onUpdateEvent != null;
106
- const hasCustomListener = options.onCustomEvent != null;
107
- const hasLangChainListener = options.onLangChainEvent != null;
108
- const hasDebugListener = options.onDebugEvent != null;
109
- const hasCheckpointListener = options.onCheckpointEvent != null;
110
- const hasTaskListener = options.onTaskEvent != null;
111
- return (0, react.useMemo)(() => {
112
- const modes = [];
113
- if (hasUpdateListener) modes.push("updates");
114
- if (hasCustomListener) modes.push("custom");
115
- if (hasLangChainListener) modes.push("events");
116
- if (hasDebugListener) modes.push("debug");
117
- if (hasCheckpointListener) modes.push("checkpoints");
118
- if (hasTaskListener) modes.push("tasks");
119
- return modes;
120
- }, [
121
- hasUpdateListener,
122
- hasCustomListener,
123
- hasLangChainListener,
124
- hasDebugListener,
125
- hasCheckpointListener,
126
- hasTaskListener
127
- ]);
128
- }
129
- function useStreamLGP(options) {
130
- const reconnectOnMountRef = (0, react.useRef)(options.reconnectOnMount);
131
- const runMetadataStorage = (0, react.useMemo)(() => {
132
- if (typeof window === "undefined") return null;
133
- const storage = reconnectOnMountRef.current;
134
- if (storage === true) return window.sessionStorage;
135
- if (typeof storage === "function") return storage();
136
- return null;
137
- }, []);
138
- const client = (0, react.useMemo)(() => options.client ?? new _langchain_langgraph_sdk_client.Client({
139
- apiUrl: options.apiUrl,
140
- apiKey: options.apiKey,
141
- callerOptions: options.callerOptions,
142
- defaultHeaders: options.defaultHeaders
143
- }), [
144
- options.client,
145
- options.apiKey,
146
- options.apiUrl,
147
- options.callerOptions,
148
- options.defaultHeaders
149
- ]);
150
- const [messageManager] = (0, react.useState)(() => new _langchain_langgraph_sdk_ui.MessageTupleManager());
151
- const [stream] = (0, react.useState)(() => new _langchain_langgraph_sdk_ui.StreamManager(messageManager, {
152
- throttle: options.throttle ?? false,
153
- subagentToolNames: options.subagentToolNames,
154
- filterSubagentMessages: options.filterSubagentMessages,
155
- toMessage: options.toMessage ?? _langchain_langgraph_sdk_ui.toMessageClass
156
- }));
157
- const [pendingRuns] = (0, react.useState)(() => new _langchain_langgraph_sdk_ui.PendingRunsTracker());
158
- (0, react.useSyncExternalStore)(pendingRuns.subscribe, pendingRuns.getSnapshot, pendingRuns.getSnapshot);
159
- const [trackStreamModeRef, trackStreamMode] = useTrackStreamMode();
160
- const callbackStreamMode = useCallbackStreamMode(options);
161
- const getMessages = (value) => {
162
- const messagesKey = options.messagesKey ?? "messages";
163
- return Array.isArray(value[messagesKey]) ? value[messagesKey] : [];
164
- };
165
- const setMessages = (current, messages) => {
166
- const messagesKey = options.messagesKey ?? "messages";
167
- return {
168
- ...current,
169
- [messagesKey]: messages
170
- };
171
- };
172
- (0, react.useSyncExternalStore)(stream.subscribe, stream.getSnapshot, stream.getSnapshot);
173
- const [threadId, onThreadId] = require_thread.useControllableThreadId(options);
174
- const threadIdRef = (0, react.useRef)(threadId);
175
- const threadIdStreamingRef = (0, react.useRef)(null);
176
- const threadIdPromiseRef = (0, react.useRef)(null);
177
- (0, react.useEffect)(() => {
178
- if (threadIdRef.current !== threadId) {
179
- threadIdRef.current = threadId;
180
- stream.clear();
181
- }
182
- }, [threadId, stream]);
183
- const switchThread = (0, react.useCallback)((newThreadId) => {
184
- if (newThreadId !== threadIdRef.current) {
185
- const prevThreadId = threadIdRef.current;
186
- threadIdRef.current = newThreadId;
187
- stream.clear();
188
- const removed = pendingRuns.removeAll();
189
- if (prevThreadId && removed.length > 0) Promise.all(removed.map((e) => client.runs.cancel(prevThreadId, e.id)));
190
- onThreadId(newThreadId);
191
- }
192
- }, [
193
- stream,
194
- pendingRuns,
195
- onThreadId,
196
- client
197
- ]);
198
- const historyLimit = typeof options.fetchStateHistory === "object" && options.fetchStateHistory != null ? options.fetchStateHistory.limit ?? false : options.fetchStateHistory ?? false;
199
- const builtInHistory = useThreadHistory(client, threadId, historyLimit, {
200
- passthrough: options.thread != null,
201
- submittingRef: threadIdStreamingRef,
202
- onError: options.onError
203
- });
204
- const history = options.thread ?? builtInHistory;
205
- const [branch, setBranch] = (0, react.useState)("");
206
- const branchContext = (0, _langchain_langgraph_sdk_ui.getBranchContext)(branch, history.data ?? void 0);
207
- const [toolProgressMap, setToolProgressMap] = (0, react.useState)(/* @__PURE__ */ new Map());
208
- const handleToolEvent = (0, react.useCallback)((data) => {
209
- setToolProgressMap((prev) => {
210
- const next = new Map(prev);
211
- const key = data.toolCallId ?? data.name;
212
- const existing = next.get(key);
213
- switch (data.event) {
214
- case "on_tool_start":
215
- next.set(key, {
216
- toolCallId: data.toolCallId,
217
- name: data.name,
218
- state: "starting",
219
- input: data.input
220
- });
221
- break;
222
- case "on_tool_event":
223
- if (existing) next.set(key, {
224
- ...existing,
225
- state: "running",
226
- data: data.data
227
- });
228
- break;
229
- case "on_tool_end":
230
- if (existing) next.set(key, {
231
- ...existing,
232
- state: "completed",
233
- result: data.output
234
- });
235
- break;
236
- case "on_tool_error":
237
- if (existing) next.set(key, {
238
- ...existing,
239
- state: "error",
240
- error: data.error
241
- });
242
- break;
243
- default: throw new Error(`Unexpected tool event: ${data.event}`);
244
- }
245
- return next;
246
- });
247
- }, []);
248
- const historyValues = branchContext.threadHead?.values ?? options.initialValues ?? {};
249
- const historyMessages = getMessages(historyValues);
250
- const shouldReconstructSubagents = options.filterSubagentMessages && !stream.isLoading && !history.isLoading && historyMessages.length > 0;
251
- (0, react.useEffect)(() => {
252
- if (shouldReconstructSubagents) {
253
- stream.reconstructSubagents(historyMessages, { skipIfPopulated: true });
254
- if (historyLimit !== false && threadId) {
255
- const controller = new AbortController();
256
- stream.fetchSubagentHistory(client.threads, threadId, {
257
- messagesKey: options.messagesKey ?? "messages",
258
- historyLimit: typeof historyLimit === "number" ? historyLimit : void 0,
259
- signal: controller.signal
260
- });
261
- return () => controller.abort();
262
- }
263
- }
264
- }, [shouldReconstructSubagents, historyMessages.length]);
265
- const historyError = (() => {
266
- const error = branchContext.threadHead?.tasks?.at(-1)?.error;
267
- if (error == null) return void 0;
268
- try {
269
- const parsed = JSON.parse(error);
270
- if (_langchain_langgraph_sdk_ui.StreamError.isStructuredError(parsed)) return new _langchain_langgraph_sdk_ui.StreamError(parsed);
271
- return parsed;
272
- } catch {}
273
- return error;
274
- })();
275
- const messageMetadata = (0, _langchain_langgraph_sdk_ui.getMessagesMetadataMap)({
276
- initialValues: options.initialValues,
277
- history: history.data,
278
- getMessages,
279
- branchContext
280
- });
281
- const stop = () => stream.stop(historyValues, { onStop: (args) => {
282
- if (runMetadataStorage && threadId) {
283
- const runId = runMetadataStorage.getItem(`lg:stream:${threadId}`);
284
- if (runId) client.runs.cancel(threadId, runId);
285
- runMetadataStorage.removeItem(`lg:stream:${threadId}`);
286
- }
287
- options.onStop?.(args);
288
- } });
289
- const submitDirect = async (values, submitOptions) => {
290
- setToolProgressMap(/* @__PURE__ */ new Map());
291
- const checkpointId = submitOptions?.checkpoint?.checkpoint_id;
292
- setBranch(checkpointId != null ? branchContext.branchByCheckpoint[checkpointId]?.branch ?? "" : "");
293
- const includeImplicitBranch = historyLimit === true || typeof historyLimit === "number";
294
- const shouldRefetch = includeImplicitBranch || (0, _langchain_langgraph_sdk_ui.onFinishRequiresThreadState)(options.onFinish);
295
- let callbackMeta;
296
- let rejoinKey;
297
- let usableThreadId = threadId;
298
- const shouldAbortPrevious = (submitOptions?.multitaskStrategy === "interrupt" || submitOptions?.multitaskStrategy === "rollback") && stream.isLoading;
299
- await stream.start(async (signal) => {
300
- stream.setStreamValues((values) => {
301
- const prev = {
302
- ...historyValues,
303
- ...values
304
- };
305
- if (submitOptions?.optimisticValues != null) return {
306
- ...prev,
307
- ...typeof submitOptions.optimisticValues === "function" ? submitOptions.optimisticValues(prev) : submitOptions.optimisticValues
308
- };
309
- return { ...prev };
310
- });
311
- if (!usableThreadId) {
312
- const threadPromise = client.threads.create({
313
- threadId: submitOptions?.threadId,
314
- metadata: submitOptions?.metadata,
315
- signal
316
- });
317
- threadIdPromiseRef.current = threadPromise.then((t) => t.thread_id);
318
- usableThreadId = (await threadPromise).thread_id;
319
- threadIdRef.current = usableThreadId;
320
- threadIdStreamingRef.current = usableThreadId;
321
- onThreadId(usableThreadId);
322
- }
323
- if (!usableThreadId) throw new Error("Failed to obtain valid thread ID.");
324
- threadIdStreamingRef.current = usableThreadId;
325
- const streamMode = (0, _langchain_langgraph_sdk_ui.unique)([
326
- ...submitOptions?.streamMode ?? [],
327
- ...trackStreamModeRef.current,
328
- ...callbackStreamMode
329
- ]);
330
- let checkpoint = submitOptions?.checkpoint ?? (includeImplicitBranch ? branchContext.threadHead?.checkpoint : void 0) ?? void 0;
331
- if (submitOptions?.checkpoint === null) checkpoint = void 0;
332
- if (checkpoint != null) delete checkpoint.thread_id;
333
- const streamResumable = submitOptions?.streamResumable ?? !!runMetadataStorage;
334
- return client.runs.stream(usableThreadId, options.assistantId, {
335
- input: values,
336
- config: submitOptions?.config,
337
- context: submitOptions?.context,
338
- command: submitOptions?.command,
339
- interruptBefore: submitOptions?.interruptBefore,
340
- interruptAfter: submitOptions?.interruptAfter,
341
- metadata: submitOptions?.metadata,
342
- multitaskStrategy: submitOptions?.multitaskStrategy,
343
- onCompletion: submitOptions?.onCompletion,
344
- onDisconnect: submitOptions?.onDisconnect ?? (streamResumable ? "continue" : "cancel"),
345
- signal,
346
- checkpoint,
347
- streamMode,
348
- streamSubgraphs: submitOptions?.streamSubgraphs,
349
- streamResumable,
350
- durability: submitOptions?.durability,
351
- onRunCreated(params) {
352
- callbackMeta = {
353
- run_id: params.run_id,
354
- thread_id: params.thread_id ?? usableThreadId
355
- };
356
- if (runMetadataStorage) {
357
- rejoinKey = `lg:stream:${usableThreadId}`;
358
- runMetadataStorage.setItem(rejoinKey, callbackMeta.run_id);
359
- }
360
- options.onCreated?.(callbackMeta);
361
- }
362
- });
363
- }, {
364
- getMessages,
365
- setMessages,
366
- initialValues: historyValues,
367
- callbacks: {
368
- ...options,
369
- onToolEvent: (data, opts) => {
370
- handleToolEvent(data);
371
- options.onToolEvent?.(data, opts);
372
- }
373
- },
374
- async onSuccess() {
375
- if (rejoinKey) runMetadataStorage?.removeItem(rejoinKey);
376
- if (shouldRefetch) {
377
- const lastHead = (await history.mutate(usableThreadId))?.at(0);
378
- if (lastHead) {
379
- options.onFinish?.(lastHead, callbackMeta);
380
- return null;
381
- }
382
- } else if (options.onFinish != null && !(0, _langchain_langgraph_sdk_ui.onFinishRequiresThreadState)(options.onFinish)) options.onFinish(void 0, callbackMeta);
383
- },
384
- onError(error) {
385
- options.onError?.(error, callbackMeta);
386
- submitOptions?.onError?.(error, callbackMeta);
387
- },
388
- onFinish() {
389
- threadIdStreamingRef.current = null;
390
- }
391
- }, { abortPrevious: shouldAbortPrevious });
392
- };
393
- const submitDirectRef = (0, react.useRef)(submitDirect);
394
- submitDirectRef.current = submitDirect;
395
- const submittingRef = (0, react.useRef)(false);
396
- const drainQueueRef = (0, react.useRef)(() => {});
397
- const submit = async (values, submitOptions) => {
398
- if (stream.isLoading || submittingRef.current) {
399
- if (submitOptions?.multitaskStrategy === "interrupt" || submitOptions?.multitaskStrategy === "rollback") {
400
- submittingRef.current = true;
401
- try {
402
- await submitDirect(values, submitOptions);
403
- } finally {
404
- submittingRef.current = false;
405
- }
406
- return;
407
- }
408
- let usableThreadId = threadIdRef.current ?? threadId;
409
- if (!usableThreadId && threadIdPromiseRef.current) usableThreadId = await threadIdPromiseRef.current;
410
- if (usableThreadId) {
411
- try {
412
- const run = await client.runs.create(usableThreadId, options.assistantId, {
413
- input: values,
414
- config: submitOptions?.config,
415
- context: submitOptions?.context,
416
- command: submitOptions?.command,
417
- interruptBefore: submitOptions?.interruptBefore,
418
- interruptAfter: submitOptions?.interruptAfter,
419
- metadata: submitOptions?.metadata,
420
- multitaskStrategy: "enqueue",
421
- streamResumable: true,
422
- streamSubgraphs: submitOptions?.streamSubgraphs,
423
- durability: submitOptions?.durability
424
- });
425
- pendingRuns.add({
426
- id: run.run_id,
427
- values,
428
- options: submitOptions,
429
- createdAt: new Date(run.created_at)
430
- });
431
- } catch (error) {
432
- options.onError?.(error, void 0);
433
- submitOptions?.onError?.(error, void 0);
434
- }
435
- return;
436
- }
437
- }
438
- submittingRef.current = true;
439
- try {
440
- await submitDirect(values, submitOptions);
441
- } finally {
442
- submittingRef.current = false;
443
- drainQueueRef.current();
444
- }
445
- };
446
- const joinStream = async (runId, lastEventId, joinOptions) => {
447
- setToolProgressMap(/* @__PURE__ */ new Map());
448
- lastEventId ??= "-1";
449
- if (!threadId) return;
450
- const callbackMeta = {
451
- thread_id: threadId,
452
- run_id: runId
453
- };
454
- const shouldRefetchJoin = historyLimit === true || typeof historyLimit === "number" || (0, _langchain_langgraph_sdk_ui.onFinishRequiresThreadState)(options.onFinish);
455
- await stream.start(async (signal) => {
456
- threadIdStreamingRef.current = threadId;
457
- const stream = client.runs.joinStream(threadId, runId, {
458
- signal,
459
- lastEventId,
460
- streamMode: joinOptions?.streamMode
461
- });
462
- return joinOptions?.filter != null ? (0, _langchain_langgraph_sdk_ui.filterStream)(stream, joinOptions.filter) : stream;
463
- }, {
464
- getMessages,
465
- setMessages,
466
- initialValues: historyValues,
467
- callbacks: {
468
- ...options,
469
- onToolEvent: (data, opts) => {
470
- handleToolEvent(data);
471
- options.onToolEvent?.(data, opts);
472
- }
473
- },
474
- async onSuccess() {
475
- runMetadataStorage?.removeItem(`lg:stream:${threadId}`);
476
- if (!shouldRefetchJoin) {
477
- if (options.onFinish != null && !(0, _langchain_langgraph_sdk_ui.onFinishRequiresThreadState)(options.onFinish)) options.onFinish(void 0, callbackMeta);
478
- return;
479
- }
480
- const lastHead = (await history.mutate(threadId))?.at(0);
481
- if (lastHead) options.onFinish?.(lastHead, callbackMeta);
482
- },
483
- onError(error) {
484
- options.onError?.(error, callbackMeta);
485
- },
486
- onFinish() {
487
- threadIdStreamingRef.current = null;
488
- }
489
- });
490
- };
491
- const joinStreamRef = (0, react.useRef)(joinStream);
492
- joinStreamRef.current = joinStream;
493
- const drainQueue = () => {
494
- if (!stream.isLoading && !submittingRef.current && pendingRuns.size > 0) {
495
- const next = pendingRuns.shift();
496
- if (next) {
497
- submittingRef.current = true;
498
- joinStreamRef.current(next.id).finally(() => {
499
- submittingRef.current = false;
500
- drainQueue();
501
- });
502
- }
503
- }
504
- };
505
- drainQueueRef.current = drainQueue;
506
- (0, react.useEffect)(() => {
507
- drainQueueRef.current();
508
- }, [stream.isLoading, pendingRuns.size]);
509
- const reconnectKey = (0, react.useMemo)(() => {
510
- if (!runMetadataStorage || stream.isLoading) return void 0;
511
- if (typeof window === "undefined") return void 0;
512
- const runId = runMetadataStorage?.getItem(`lg:stream:${threadId}`);
513
- if (!runId) return void 0;
514
- return {
515
- runId,
516
- threadId
517
- };
518
- }, [
519
- runMetadataStorage,
520
- stream.isLoading,
521
- threadId
522
- ]);
523
- const shouldReconnect = !!runMetadataStorage;
524
- const reconnectRef = (0, react.useRef)({
525
- threadId,
526
- shouldReconnect
527
- });
528
- (0, react.useEffect)(() => {
529
- if (reconnectRef.current.threadId !== threadId) reconnectRef.current = {
530
- threadId,
531
- shouldReconnect
532
- };
533
- }, [threadId, shouldReconnect]);
534
- (0, react.useEffect)(() => {
535
- if (reconnectKey && reconnectRef.current.shouldReconnect) {
536
- reconnectRef.current.shouldReconnect = false;
537
- joinStreamRef.current?.(reconnectKey.runId);
538
- }
539
- }, [reconnectKey]);
540
- const error = stream.error ?? historyError ?? history.error;
541
- const values = stream.values ?? historyValues;
542
- const handledToolsRef = (0, react.useRef)(/* @__PURE__ */ new Set());
543
- (0, react.useEffect)(() => {
544
- handledToolsRef.current.clear();
545
- }, [threadId]);
546
- (0, react.useEffect)(() => {
547
- (0, _langchain_langgraph_sdk.flushPendingHeadlessToolInterrupts)(values, options.tools, handledToolsRef.current, {
548
- onTool: options.onTool,
549
- defer: (run) => {
550
- Promise.resolve().then(run);
551
- },
552
- resumeSubmit: (command) => submit(null, {
553
- multitaskStrategy: "interrupt",
554
- command
555
- })
556
- });
557
- }, [
558
- options.onTool,
559
- options.tools,
560
- submit,
561
- values
562
- ]);
563
- return {
564
- get values() {
565
- trackStreamMode("values");
566
- return values;
567
- },
568
- client,
569
- assistantId: options.assistantId,
570
- error,
571
- isLoading: stream.isLoading,
572
- stop,
573
- submit,
574
- switchThread,
575
- joinStream,
576
- branch,
577
- setBranch,
578
- get history() {
579
- if (historyLimit === false) throw new Error("`fetchStateHistory` must be set to `true` to use `history`");
580
- return (0, _langchain_langgraph_sdk_ui.ensureHistoryMessageInstances)(branchContext.flatHistory, options.messagesKey ?? "messages");
581
- },
582
- isThreadLoading: history.isLoading && history.data == null,
583
- get experimental_branchTree() {
584
- if (historyLimit === false) throw new Error("`fetchStateHistory` must be set to `true` to use `experimental_branchTree`");
585
- return branchContext.branchTree;
586
- },
587
- get interrupts() {
588
- if (values != null && "__interrupt__" in values && Array.isArray(values.__interrupt__)) return (0, _langchain_langgraph_sdk_ui.userFacingInterruptsFromValuesArray)(values.__interrupt__);
589
- if (stream.isLoading) return [];
590
- const taskInterrupts = (0, _langchain_langgraph_sdk_ui.userFacingInterruptsFromThreadTasks)((branchContext.threadHead?.tasks ?? []).flatMap((t) => t.interrupts ?? []));
591
- if (taskInterrupts != null) return taskInterrupts;
592
- if (!(branchContext.threadHead?.next ?? []).length || error != null) return [];
593
- return [{ when: "breakpoint" }];
594
- },
595
- get interrupt() {
596
- return (0, _langchain_langgraph_sdk_ui.extractInterrupts)(values, {
597
- error,
598
- isLoading: stream.isLoading,
599
- threadState: branchContext.threadHead
600
- });
601
- },
602
- get messages() {
603
- trackStreamMode("messages-tuple", "values");
604
- return (0, _langchain_langgraph_sdk_ui.ensureMessageInstances)(getMessages(values));
605
- },
606
- get toolCalls() {
607
- trackStreamMode("messages-tuple", "values");
608
- return (0, _langchain_langgraph_sdk_utils.getToolCallsWithResults)(getMessages(values));
609
- },
610
- get toolProgress() {
611
- trackStreamMode("tools");
612
- return Array.from(toolProgressMap.values());
613
- },
614
- getToolCalls(message) {
615
- trackStreamMode("messages-tuple", "values");
616
- return (0, _langchain_langgraph_sdk_utils.getToolCallsWithResults)(getMessages(values)).filter((tc) => tc.aiMessage.id === message.id);
617
- },
618
- getMessagesMetadata(message, index) {
619
- trackStreamMode("values");
620
- const streamMetadata = messageManager.get(message.id)?.metadata;
621
- const historyMetadata = messageMetadata?.find((m) => m.messageId === (message.id ?? index));
622
- if (streamMetadata != null || historyMetadata != null) return {
623
- ...historyMetadata,
624
- streamMetadata
625
- };
626
- },
627
- get subagents() {
628
- trackStreamMode("updates", "messages-tuple");
629
- return stream.getSubagents();
630
- },
631
- get activeSubagents() {
632
- trackStreamMode("updates", "messages-tuple");
633
- return stream.getActiveSubagents();
634
- },
635
- getSubagent(toolCallId) {
636
- trackStreamMode("updates", "messages-tuple");
637
- return stream.getSubagent(toolCallId);
638
- },
639
- getSubagentsByType(type) {
640
- trackStreamMode("updates", "messages-tuple");
641
- return stream.getSubagentsByType(type);
642
- },
643
- getSubagentsByMessage(messageId) {
644
- trackStreamMode("updates", "messages-tuple");
645
- return stream.getSubagentsByMessage(messageId);
646
- },
647
- queue: {
648
- get entries() {
649
- return pendingRuns.entries;
650
- },
651
- get size() {
652
- return pendingRuns.size;
653
- },
654
- async cancel(id) {
655
- const usableThreadId = threadIdRef.current ?? threadId;
656
- const removed = pendingRuns.remove(id);
657
- if (removed && usableThreadId) await client.runs.cancel(usableThreadId, id);
658
- return removed;
659
- },
660
- async clear() {
661
- const usableThreadId = threadIdRef.current ?? threadId;
662
- const removed = pendingRuns.removeAll();
663
- if (usableThreadId && removed.length > 0) await Promise.all(removed.map((e) => client.runs.cancel(usableThreadId, e.id)));
664
- }
665
- }
666
- };
667
- }
668
- //#endregion
669
- exports.useStreamLGP = useStreamLGP;
670
-
671
- //# sourceMappingURL=stream.lgp.cjs.map