@langchain/langgraph-sdk 1.7.4 → 1.8.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/react/stream.custom.cjs +21 -1
- package/dist/react/stream.custom.cjs.map +1 -1
- package/dist/react/stream.custom.js +21 -1
- package/dist/react/stream.custom.js.map +1 -1
- package/dist/react/stream.lgp.cjs +11 -1
- package/dist/react/stream.lgp.cjs.map +1 -1
- package/dist/react/stream.lgp.js +11 -1
- package/dist/react/stream.lgp.js.map +1 -1
- package/dist/ui/index.cjs +4 -0
- package/dist/ui/index.d.cts +3 -1
- package/dist/ui/index.d.ts +3 -1
- package/dist/ui/index.js +3 -1
- package/dist/ui/manager.cjs +181 -0
- package/dist/ui/manager.cjs.map +1 -1
- package/dist/ui/manager.d.cts +41 -0
- package/dist/ui/manager.d.cts.map +1 -1
- package/dist/ui/manager.d.ts +41 -0
- package/dist/ui/manager.d.ts.map +1 -1
- package/dist/ui/manager.js +181 -0
- package/dist/ui/manager.js.map +1 -1
- package/dist/ui/orchestrator-custom.cjs +372 -0
- package/dist/ui/orchestrator-custom.cjs.map +1 -0
- package/dist/ui/orchestrator-custom.d.cts +185 -0
- package/dist/ui/orchestrator-custom.d.cts.map +1 -0
- package/dist/ui/orchestrator-custom.d.ts +185 -0
- package/dist/ui/orchestrator-custom.d.ts.map +1 -0
- package/dist/ui/orchestrator-custom.js +372 -0
- package/dist/ui/orchestrator-custom.js.map +1 -0
- package/dist/ui/orchestrator.cjs +866 -0
- package/dist/ui/orchestrator.cjs.map +1 -0
- package/dist/ui/orchestrator.d.cts +366 -0
- package/dist/ui/orchestrator.d.cts.map +1 -0
- package/dist/ui/orchestrator.d.ts +366 -0
- package/dist/ui/orchestrator.d.ts.map +1 -0
- package/dist/ui/orchestrator.js +866 -0
- package/dist/ui/orchestrator.js.map +1 -0
- package/dist/ui/subagents.cjs +24 -1
- package/dist/ui/subagents.cjs.map +1 -1
- package/dist/ui/subagents.d.cts +13 -0
- package/dist/ui/subagents.d.cts.map +1 -1
- package/dist/ui/subagents.d.ts +13 -0
- package/dist/ui/subagents.d.ts.map +1 -1
- package/dist/ui/subagents.js +24 -1
- package/dist/ui/subagents.js.map +1 -1
- package/dist/ui/types.d.cts +3 -2
- package/dist/ui/types.d.cts.map +1 -1
- package/dist/ui/types.d.ts +3 -2
- package/dist/ui/types.d.ts.map +1 -1
- package/package.json +2 -6
|
@@ -0,0 +1,866 @@
|
|
|
1
|
+
import { StreamError } from "./errors.js";
|
|
2
|
+
import { MessageTupleManager, ensureHistoryMessageInstances, ensureMessageInstances, toMessageClass } from "./messages.js";
|
|
3
|
+
import { getToolCallsWithResults } from "../utils/tools.js";
|
|
4
|
+
import { StreamManager } from "./manager.js";
|
|
5
|
+
import { filterStream, unique } from "./utils.js";
|
|
6
|
+
import { getBranchContext, getMessagesMetadataMap } from "./branching.js";
|
|
7
|
+
import { extractInterrupts } from "./interrupts.js";
|
|
8
|
+
import { PendingRunsTracker } from "./queue.js";
|
|
9
|
+
//#region src/ui/orchestrator.ts
|
|
10
|
+
/**
|
|
11
|
+
* Fetch the history of a thread.
|
|
12
|
+
* @param client - The client to use.
|
|
13
|
+
* @param threadId - The ID of the thread to fetch the history of.
|
|
14
|
+
* @param options - The options to use.
|
|
15
|
+
* @returns The history of the thread.
|
|
16
|
+
*/
|
|
17
|
+
function fetchHistory(client, threadId, options) {
|
|
18
|
+
if (options?.limit === false) return client.threads.getState(threadId).then((state) => {
|
|
19
|
+
if (state.checkpoint == null) return [];
|
|
20
|
+
return [state];
|
|
21
|
+
});
|
|
22
|
+
const limit = typeof options?.limit === "number" ? options.limit : 10;
|
|
23
|
+
return client.threads.getHistory(threadId, { limit });
|
|
24
|
+
}
|
|
25
|
+
/**
|
|
26
|
+
* Resolve the run metadata storage.
|
|
27
|
+
* @param reconnectOnMount - The reconnect on mount option.
|
|
28
|
+
* @returns The run metadata storage.
|
|
29
|
+
*/
|
|
30
|
+
function resolveRunMetadataStorage(reconnectOnMount) {
|
|
31
|
+
if (typeof globalThis.window === "undefined") return null;
|
|
32
|
+
if (reconnectOnMount === true) return globalThis.window.sessionStorage;
|
|
33
|
+
if (typeof reconnectOnMount === "function") return reconnectOnMount();
|
|
34
|
+
return null;
|
|
35
|
+
}
|
|
36
|
+
/**
|
|
37
|
+
* Resolve the callback stream modes.
|
|
38
|
+
* @param options - The options to use.
|
|
39
|
+
* @returns The callback stream modes.
|
|
40
|
+
*/
|
|
41
|
+
function resolveCallbackStreamModes(options) {
|
|
42
|
+
const modes = [];
|
|
43
|
+
if (options.onUpdateEvent) modes.push("updates");
|
|
44
|
+
if (options.onCustomEvent) modes.push("custom");
|
|
45
|
+
if (options.onCheckpointEvent) modes.push("checkpoints");
|
|
46
|
+
if (options.onTaskEvent) modes.push("tasks");
|
|
47
|
+
if ("onDebugEvent" in options && options.onDebugEvent) modes.push("debug");
|
|
48
|
+
if ("onLangChainEvent" in options && options.onLangChainEvent) modes.push("events");
|
|
49
|
+
return modes;
|
|
50
|
+
}
|
|
51
|
+
/**
|
|
52
|
+
* Framework-agnostic orchestrator for LangGraph Platform streams.
|
|
53
|
+
*
|
|
54
|
+
* Encapsulates all business logic shared across React, Vue, Svelte, and Angular:
|
|
55
|
+
* thread management, history fetching, stream lifecycle, queue management,
|
|
56
|
+
* branching, subagent management, and auto-reconnect.
|
|
57
|
+
*
|
|
58
|
+
* Framework adapters subscribe to state changes via {@link subscribe} and
|
|
59
|
+
* map the orchestrator's getters to framework-specific reactive primitives.
|
|
60
|
+
*/
|
|
61
|
+
var StreamOrchestrator = class {
|
|
62
|
+
stream;
|
|
63
|
+
messageManager;
|
|
64
|
+
pendingRuns;
|
|
65
|
+
#options;
|
|
66
|
+
#accessors;
|
|
67
|
+
historyLimit;
|
|
68
|
+
#runMetadataStorage;
|
|
69
|
+
#callbackStreamModes;
|
|
70
|
+
#trackedStreamModes = [];
|
|
71
|
+
#threadId;
|
|
72
|
+
#threadIdPromise = null;
|
|
73
|
+
#threadIdStreaming = null;
|
|
74
|
+
#history;
|
|
75
|
+
#branch = "";
|
|
76
|
+
#submitting = false;
|
|
77
|
+
#listeners = /* @__PURE__ */ new Set();
|
|
78
|
+
#version = 0;
|
|
79
|
+
#streamUnsub = null;
|
|
80
|
+
#queueUnsub = null;
|
|
81
|
+
#disposed = false;
|
|
82
|
+
/**
|
|
83
|
+
* Create a new StreamOrchestrator.
|
|
84
|
+
*
|
|
85
|
+
* @param options - Configuration options for the stream, including callbacks,
|
|
86
|
+
* throttle settings, reconnect behaviour, and subagent filters.
|
|
87
|
+
* @param accessors - Framework-specific accessors that resolve reactive
|
|
88
|
+
* primitives (client, assistant ID, messages key) at call time.
|
|
89
|
+
*/
|
|
90
|
+
constructor(options, accessors) {
|
|
91
|
+
this.#options = options;
|
|
92
|
+
this.#accessors = accessors;
|
|
93
|
+
this.#runMetadataStorage = resolveRunMetadataStorage(options.reconnectOnMount);
|
|
94
|
+
this.#callbackStreamModes = resolveCallbackStreamModes(options);
|
|
95
|
+
this.historyLimit = typeof options.fetchStateHistory === "object" && options.fetchStateHistory != null ? options.fetchStateHistory.limit ?? false : options.fetchStateHistory ?? false;
|
|
96
|
+
this.messageManager = new MessageTupleManager();
|
|
97
|
+
this.stream = new StreamManager(this.messageManager, {
|
|
98
|
+
throttle: options.throttle ?? false,
|
|
99
|
+
subagentToolNames: options.subagentToolNames,
|
|
100
|
+
filterSubagentMessages: options.filterSubagentMessages,
|
|
101
|
+
toMessage: options.toMessage ?? toMessageClass
|
|
102
|
+
});
|
|
103
|
+
this.pendingRuns = new PendingRunsTracker();
|
|
104
|
+
this.#threadId = void 0;
|
|
105
|
+
this.#history = {
|
|
106
|
+
data: void 0,
|
|
107
|
+
error: void 0,
|
|
108
|
+
isLoading: false,
|
|
109
|
+
mutate: this.#mutate
|
|
110
|
+
};
|
|
111
|
+
this.#streamUnsub = this.stream.subscribe(() => {
|
|
112
|
+
this.#notify();
|
|
113
|
+
});
|
|
114
|
+
this.#queueUnsub = this.pendingRuns.subscribe(() => {
|
|
115
|
+
this.#notify();
|
|
116
|
+
});
|
|
117
|
+
}
|
|
118
|
+
/**
|
|
119
|
+
* Register a listener that is called whenever the orchestrator's internal
|
|
120
|
+
* state changes (stream updates, queue changes, history mutations, etc.).
|
|
121
|
+
*
|
|
122
|
+
* @param listener - Callback invoked on every state change.
|
|
123
|
+
* @returns An unsubscribe function that removes the listener.
|
|
124
|
+
*/
|
|
125
|
+
subscribe(listener) {
|
|
126
|
+
this.#listeners.add(listener);
|
|
127
|
+
return () => {
|
|
128
|
+
this.#listeners.delete(listener);
|
|
129
|
+
};
|
|
130
|
+
}
|
|
131
|
+
/**
|
|
132
|
+
* Return the current version number, incremented on every state change.
|
|
133
|
+
* Useful as a React `useSyncExternalStore` snapshot.
|
|
134
|
+
*
|
|
135
|
+
* @returns The current monotonically increasing version counter.
|
|
136
|
+
*/
|
|
137
|
+
getSnapshot() {
|
|
138
|
+
return this.#version;
|
|
139
|
+
}
|
|
140
|
+
/**
|
|
141
|
+
* Increment the version counter and invoke all registered listeners.
|
|
142
|
+
* No-op if the orchestrator has been disposed.
|
|
143
|
+
*/
|
|
144
|
+
#notify() {
|
|
145
|
+
if (this.#disposed) return;
|
|
146
|
+
this.#version += 1;
|
|
147
|
+
for (const listener of this.#listeners) listener();
|
|
148
|
+
}
|
|
149
|
+
/**
|
|
150
|
+
* The current thread ID, or `undefined` if no thread is active.
|
|
151
|
+
*/
|
|
152
|
+
get threadId() {
|
|
153
|
+
return this.#threadId;
|
|
154
|
+
}
|
|
155
|
+
/**
|
|
156
|
+
* Update thread ID from an external source (e.g. reactive prop change).
|
|
157
|
+
* Clears the current stream and triggers a history fetch.
|
|
158
|
+
* @param newId - The new thread ID to set.
|
|
159
|
+
* @returns The new thread ID.
|
|
160
|
+
*/
|
|
161
|
+
setThreadId(newId) {
|
|
162
|
+
if (newId === this.#threadId) return;
|
|
163
|
+
this.#threadId = newId;
|
|
164
|
+
this.stream.clear();
|
|
165
|
+
this.#fetchHistoryForThread(newId);
|
|
166
|
+
this.#notify();
|
|
167
|
+
}
|
|
168
|
+
/**
|
|
169
|
+
* Update the thread ID from within a submit flow. Sets both the
|
|
170
|
+
* streaming and canonical thread IDs, fires the `onThreadId` callback,
|
|
171
|
+
* and notifies listeners.
|
|
172
|
+
*
|
|
173
|
+
* @param newId - The newly created or resolved thread ID.
|
|
174
|
+
*/
|
|
175
|
+
#setThreadIdFromSubmit(newId) {
|
|
176
|
+
this.#threadIdStreaming = newId;
|
|
177
|
+
this.#threadId = newId;
|
|
178
|
+
this.#options.onThreadId?.(newId);
|
|
179
|
+
this.#notify();
|
|
180
|
+
}
|
|
181
|
+
#fetchHistoryForThread(threadId) {
|
|
182
|
+
if (this.#threadIdStreaming != null && this.#threadIdStreaming === threadId) return;
|
|
183
|
+
if (threadId != null) {
|
|
184
|
+
this.#history = {
|
|
185
|
+
...this.#history,
|
|
186
|
+
isLoading: true,
|
|
187
|
+
mutate: this.#mutate
|
|
188
|
+
};
|
|
189
|
+
this.#notify();
|
|
190
|
+
this.#mutate(threadId);
|
|
191
|
+
} else {
|
|
192
|
+
this.#history = {
|
|
193
|
+
data: void 0,
|
|
194
|
+
error: void 0,
|
|
195
|
+
isLoading: false,
|
|
196
|
+
mutate: this.#mutate
|
|
197
|
+
};
|
|
198
|
+
this.#notify();
|
|
199
|
+
}
|
|
200
|
+
}
|
|
201
|
+
/**
|
|
202
|
+
* The current thread history fetch state, including data, loading status,
|
|
203
|
+
* error, and a {@link UseStreamThread.mutate | mutate} function to
|
|
204
|
+
* manually re-fetch.
|
|
205
|
+
*/
|
|
206
|
+
get historyData() {
|
|
207
|
+
return this.#history;
|
|
208
|
+
}
|
|
209
|
+
async #mutate(mutateId) {
|
|
210
|
+
const tid = mutateId ?? this.#threadId;
|
|
211
|
+
if (!tid) return void 0;
|
|
212
|
+
try {
|
|
213
|
+
const data = await fetchHistory(this.#accessors.getClient(), tid, { limit: this.historyLimit });
|
|
214
|
+
this.#history = {
|
|
215
|
+
data,
|
|
216
|
+
error: void 0,
|
|
217
|
+
isLoading: false,
|
|
218
|
+
mutate: this.#mutate
|
|
219
|
+
};
|
|
220
|
+
this.#notify();
|
|
221
|
+
return data;
|
|
222
|
+
} catch (err) {
|
|
223
|
+
this.#history = {
|
|
224
|
+
...this.#history,
|
|
225
|
+
error: err,
|
|
226
|
+
isLoading: false
|
|
227
|
+
};
|
|
228
|
+
this.#notify();
|
|
229
|
+
this.#options.onError?.(err, void 0);
|
|
230
|
+
return;
|
|
231
|
+
}
|
|
232
|
+
}
|
|
233
|
+
/**
|
|
234
|
+
* Trigger initial history fetch for the current thread ID.
|
|
235
|
+
* Should be called once after construction when the initial threadId is known.
|
|
236
|
+
*/
|
|
237
|
+
initThreadId(threadId) {
|
|
238
|
+
this.#threadId = threadId;
|
|
239
|
+
this.#fetchHistoryForThread(threadId);
|
|
240
|
+
}
|
|
241
|
+
/**
|
|
242
|
+
* The currently active branch identifier. An empty string represents
|
|
243
|
+
* the main (default) branch.
|
|
244
|
+
*/
|
|
245
|
+
get branch() {
|
|
246
|
+
return this.#branch;
|
|
247
|
+
}
|
|
248
|
+
/**
|
|
249
|
+
* Set the active branch and notify listeners if the value changed.
|
|
250
|
+
*
|
|
251
|
+
* @param value - The branch identifier to switch to.
|
|
252
|
+
*/
|
|
253
|
+
setBranch(value) {
|
|
254
|
+
if (value === this.#branch) return;
|
|
255
|
+
this.#branch = value;
|
|
256
|
+
this.#notify();
|
|
257
|
+
}
|
|
258
|
+
/**
|
|
259
|
+
* Derived branch context computed from the current branch and thread
|
|
260
|
+
* history. Contains the thread head, branch tree, and checkpoint-to-branch
|
|
261
|
+
* mapping for the active branch.
|
|
262
|
+
*/
|
|
263
|
+
get branchContext() {
|
|
264
|
+
return getBranchContext(this.#branch, this.#history.data ?? void 0);
|
|
265
|
+
}
|
|
266
|
+
#getMessages(value) {
|
|
267
|
+
const messagesKey = this.#accessors.getMessagesKey();
|
|
268
|
+
return Array.isArray(value[messagesKey]) ? value[messagesKey] : [];
|
|
269
|
+
}
|
|
270
|
+
#setMessages(current, messages) {
|
|
271
|
+
const messagesKey = this.#accessors.getMessagesKey();
|
|
272
|
+
return {
|
|
273
|
+
...current,
|
|
274
|
+
[messagesKey]: messages
|
|
275
|
+
};
|
|
276
|
+
}
|
|
277
|
+
/**
|
|
278
|
+
* The state values from the thread head of the current branch history,
|
|
279
|
+
* falling back to {@link AnyStreamOptions.initialValues | initialValues}
|
|
280
|
+
* or an empty object.
|
|
281
|
+
*/
|
|
282
|
+
get historyValues() {
|
|
283
|
+
return this.branchContext.threadHead?.values ?? this.#options.initialValues ?? {};
|
|
284
|
+
}
|
|
285
|
+
/**
|
|
286
|
+
* The error from the last task in the thread head, if any.
|
|
287
|
+
* Attempts to parse structured {@link StreamError} instances from JSON.
|
|
288
|
+
*/
|
|
289
|
+
get historyError() {
|
|
290
|
+
const error = this.branchContext.threadHead?.tasks?.at(-1)?.error;
|
|
291
|
+
if (error == null) return void 0;
|
|
292
|
+
try {
|
|
293
|
+
const parsed = JSON.parse(error);
|
|
294
|
+
if (StreamError.isStructuredError(parsed)) return new StreamError(parsed);
|
|
295
|
+
return parsed;
|
|
296
|
+
} catch {}
|
|
297
|
+
return error;
|
|
298
|
+
}
|
|
299
|
+
/**
|
|
300
|
+
* The latest state values received from the active stream, or `null` if
|
|
301
|
+
* no stream is running or no values have been received yet.
|
|
302
|
+
*/
|
|
303
|
+
get streamValues() {
|
|
304
|
+
return this.stream.values;
|
|
305
|
+
}
|
|
306
|
+
/**
|
|
307
|
+
* The error from the active stream, if one occurred during streaming.
|
|
308
|
+
*/
|
|
309
|
+
get streamError() {
|
|
310
|
+
return this.stream.error;
|
|
311
|
+
}
|
|
312
|
+
/**
|
|
313
|
+
* The merged state values, preferring live stream values over history.
|
|
314
|
+
* This is the primary way to read the current thread state.
|
|
315
|
+
*/
|
|
316
|
+
get values() {
|
|
317
|
+
return this.stream.values ?? this.historyValues;
|
|
318
|
+
}
|
|
319
|
+
/**
|
|
320
|
+
* The first available error from the stream, history, or thread fetch.
|
|
321
|
+
* Returns `undefined` when no error is present.
|
|
322
|
+
*/
|
|
323
|
+
get error() {
|
|
324
|
+
return this.stream.error ?? this.historyError ?? this.#history.error;
|
|
325
|
+
}
|
|
326
|
+
/**
|
|
327
|
+
* Whether the stream is currently active and receiving events.
|
|
328
|
+
*/
|
|
329
|
+
get isLoading() {
|
|
330
|
+
return this.stream.isLoading;
|
|
331
|
+
}
|
|
332
|
+
/**
|
|
333
|
+
* The messages array extracted from the current {@link values} using the
|
|
334
|
+
* configured messages key.
|
|
335
|
+
*/
|
|
336
|
+
get messages() {
|
|
337
|
+
return this.#getMessages(this.values);
|
|
338
|
+
}
|
|
339
|
+
/**
|
|
340
|
+
* The current messages converted to LangChain {@link BaseMessage} instances.
|
|
341
|
+
* Automatically tracks the `"messages-tuple"` stream mode.
|
|
342
|
+
*/
|
|
343
|
+
get messageInstances() {
|
|
344
|
+
this.trackStreamMode("messages-tuple");
|
|
345
|
+
return ensureMessageInstances(this.messages);
|
|
346
|
+
}
|
|
347
|
+
/**
|
|
348
|
+
* All tool calls with their corresponding results extracted from
|
|
349
|
+
* the current messages. Automatically tracks the `"messages-tuple"`
|
|
350
|
+
* stream mode.
|
|
351
|
+
*/
|
|
352
|
+
get toolCalls() {
|
|
353
|
+
this.trackStreamMode("messages-tuple");
|
|
354
|
+
return getToolCallsWithResults(this.#getMessages(this.values));
|
|
355
|
+
}
|
|
356
|
+
/**
|
|
357
|
+
* Get tool calls with results for a specific AI message.
|
|
358
|
+
* Automatically tracks the `"messages-tuple"` stream mode.
|
|
359
|
+
*
|
|
360
|
+
* @param message - The AI message to extract tool calls from.
|
|
361
|
+
* @returns Tool calls whose AI message ID matches the given message.
|
|
362
|
+
*/
|
|
363
|
+
getToolCalls(message) {
|
|
364
|
+
this.trackStreamMode("messages-tuple");
|
|
365
|
+
return getToolCallsWithResults(this.#getMessages(this.values)).filter((tc) => tc.aiMessage.id === message.id);
|
|
366
|
+
}
|
|
367
|
+
/**
|
|
368
|
+
* All active interrupts for the current thread state.
|
|
369
|
+
* Returns an empty array when the stream is loading or no interrupts
|
|
370
|
+
* are present. Falls back to a `{ when: "breakpoint" }` sentinel when
|
|
371
|
+
* there are pending next nodes but no explicit interrupt data.
|
|
372
|
+
*/
|
|
373
|
+
get interrupts() {
|
|
374
|
+
const v = this.values;
|
|
375
|
+
if (v != null && "__interrupt__" in v && Array.isArray(v.__interrupt__)) {
|
|
376
|
+
const valueInterrupts = v.__interrupt__;
|
|
377
|
+
if (valueInterrupts.length === 0) return [{ when: "breakpoint" }];
|
|
378
|
+
return valueInterrupts;
|
|
379
|
+
}
|
|
380
|
+
if (this.isLoading) return [];
|
|
381
|
+
const allInterrupts = (this.branchContext.threadHead?.tasks ?? []).flatMap((t) => t.interrupts ?? []);
|
|
382
|
+
if (allInterrupts.length > 0) return allInterrupts;
|
|
383
|
+
if (!(this.branchContext.threadHead?.next ?? []).length || this.error != null) return [];
|
|
384
|
+
return [{ when: "breakpoint" }];
|
|
385
|
+
}
|
|
386
|
+
/**
|
|
387
|
+
* The single most relevant interrupt for the current thread state,
|
|
388
|
+
* or `undefined` if no interrupt is active. Convenience accessor that
|
|
389
|
+
* delegates to {@link extractInterrupts}.
|
|
390
|
+
*/
|
|
391
|
+
get interrupt() {
|
|
392
|
+
return extractInterrupts(this.values, {
|
|
393
|
+
isLoading: this.isLoading,
|
|
394
|
+
threadState: this.branchContext.threadHead,
|
|
395
|
+
error: this.error
|
|
396
|
+
});
|
|
397
|
+
}
|
|
398
|
+
/**
|
|
399
|
+
* Flattened history messages as LangChain {@link BaseMessage} instances,
|
|
400
|
+
* ordered chronologically across all branch checkpoints.
|
|
401
|
+
*
|
|
402
|
+
* @throws If `fetchStateHistory` was not enabled in the options.
|
|
403
|
+
*/
|
|
404
|
+
get flatHistory() {
|
|
405
|
+
if (this.historyLimit === false) throw new Error("`fetchStateHistory` must be set to `true` to use `history`");
|
|
406
|
+
return ensureHistoryMessageInstances(this.branchContext.flatHistory, this.#accessors.getMessagesKey());
|
|
407
|
+
}
|
|
408
|
+
/**
|
|
409
|
+
* Whether the initial thread history is still being loaded and no data
|
|
410
|
+
* is available yet. Returns `false` once the first fetch completes.
|
|
411
|
+
*/
|
|
412
|
+
get isThreadLoading() {
|
|
413
|
+
return this.#history.isLoading && this.#history.data == null;
|
|
414
|
+
}
|
|
415
|
+
/**
|
|
416
|
+
* The full branch tree structure for the current thread history.
|
|
417
|
+
*
|
|
418
|
+
* @experimental This API may change in future releases.
|
|
419
|
+
* @throws If `fetchStateHistory` was not enabled in the options.
|
|
420
|
+
*/
|
|
421
|
+
get experimental_branchTree() {
|
|
422
|
+
if (this.historyLimit === false) throw new Error("`fetchStateHistory` must be set to `true` to use `experimental_branchTree`");
|
|
423
|
+
return this.branchContext.branchTree;
|
|
424
|
+
}
|
|
425
|
+
/**
|
|
426
|
+
* A map of metadata entries for all messages, derived from history
|
|
427
|
+
* and branch context. Used internally by {@link getMessagesMetadata}.
|
|
428
|
+
*/
|
|
429
|
+
get messageMetadata() {
|
|
430
|
+
return getMessagesMetadataMap({
|
|
431
|
+
initialValues: this.#options.initialValues,
|
|
432
|
+
history: this.#history.data,
|
|
433
|
+
getMessages: (value) => this.#getMessages(value),
|
|
434
|
+
branchContext: this.branchContext
|
|
435
|
+
});
|
|
436
|
+
}
|
|
437
|
+
/**
|
|
438
|
+
* Look up metadata for a specific message, merging stream-time metadata
|
|
439
|
+
* with history-derived metadata.
|
|
440
|
+
*
|
|
441
|
+
* @param message - The message to look up metadata for.
|
|
442
|
+
* @param index - Optional positional index used as a fallback identifier.
|
|
443
|
+
* @returns The merged metadata, or `undefined` if none is available.
|
|
444
|
+
*/
|
|
445
|
+
getMessagesMetadata(message, index) {
|
|
446
|
+
const streamMetadata = this.messageManager.get(message.id)?.metadata;
|
|
447
|
+
const historyMetadata = this.messageMetadata?.find((m) => m.messageId === (message.id ?? index));
|
|
448
|
+
if (streamMetadata != null || historyMetadata != null) return {
|
|
449
|
+
...historyMetadata,
|
|
450
|
+
streamMetadata
|
|
451
|
+
};
|
|
452
|
+
}
|
|
453
|
+
/**
|
|
454
|
+
* The list of pending run entries currently waiting in the queue.
|
|
455
|
+
*/
|
|
456
|
+
get queueEntries() {
|
|
457
|
+
return this.pendingRuns.entries;
|
|
458
|
+
}
|
|
459
|
+
/**
|
|
460
|
+
* The number of pending runs in the queue.
|
|
461
|
+
*/
|
|
462
|
+
get queueSize() {
|
|
463
|
+
return this.pendingRuns.size;
|
|
464
|
+
}
|
|
465
|
+
/**
|
|
466
|
+
* Cancel and remove a specific pending run from the queue.
|
|
467
|
+
* If the run exists and a thread is active, the run is also cancelled
|
|
468
|
+
* on the server.
|
|
469
|
+
*
|
|
470
|
+
* @param id - The run ID to cancel.
|
|
471
|
+
* @returns `true` if the run was found and removed, `false` otherwise.
|
|
472
|
+
*/
|
|
473
|
+
async cancelQueueItem(id) {
|
|
474
|
+
const tid = this.#threadId;
|
|
475
|
+
const removed = this.pendingRuns.remove(id);
|
|
476
|
+
if (removed && tid) await this.#accessors.getClient().runs.cancel(tid, id);
|
|
477
|
+
return removed;
|
|
478
|
+
}
|
|
479
|
+
/**
|
|
480
|
+
* Remove all pending runs from the queue and cancel them on the server.
|
|
481
|
+
*/
|
|
482
|
+
async clearQueue() {
|
|
483
|
+
const tid = this.#threadId;
|
|
484
|
+
const removed = this.pendingRuns.removeAll();
|
|
485
|
+
if (tid && removed.length > 0) await Promise.all(removed.map((e) => this.#accessors.getClient().runs.cancel(tid, e.id)));
|
|
486
|
+
}
|
|
487
|
+
/**
|
|
488
|
+
* A map of all known subagent stream interfaces, keyed by tool call ID.
|
|
489
|
+
*/
|
|
490
|
+
get subagents() {
|
|
491
|
+
return this.stream.getSubagents();
|
|
492
|
+
}
|
|
493
|
+
/**
|
|
494
|
+
* The subset of subagents that are currently active (streaming).
|
|
495
|
+
*/
|
|
496
|
+
get activeSubagents() {
|
|
497
|
+
return this.stream.getActiveSubagents();
|
|
498
|
+
}
|
|
499
|
+
/**
|
|
500
|
+
* Retrieve a specific subagent stream interface by its tool call ID.
|
|
501
|
+
*
|
|
502
|
+
* @param toolCallId - The tool call ID that spawned the subagent.
|
|
503
|
+
* @returns The subagent interface, or `undefined` if not found.
|
|
504
|
+
*/
|
|
505
|
+
getSubagent(toolCallId) {
|
|
506
|
+
return this.stream.getSubagent(toolCallId);
|
|
507
|
+
}
|
|
508
|
+
/**
|
|
509
|
+
* Retrieve all subagent stream interfaces that match a given agent type.
|
|
510
|
+
*
|
|
511
|
+
* @param type - The agent type name to filter by.
|
|
512
|
+
* @returns An array of matching subagent interfaces.
|
|
513
|
+
*/
|
|
514
|
+
getSubagentsByType(type) {
|
|
515
|
+
return this.stream.getSubagentsByType(type);
|
|
516
|
+
}
|
|
517
|
+
/**
|
|
518
|
+
* Retrieve all subagent stream interfaces associated with a specific
|
|
519
|
+
* AI message.
|
|
520
|
+
*
|
|
521
|
+
* @param messageId - The ID of the parent AI message.
|
|
522
|
+
* @returns An array of subagent interfaces spawned by that message.
|
|
523
|
+
*/
|
|
524
|
+
getSubagentsByMessage(messageId) {
|
|
525
|
+
return this.stream.getSubagentsByMessage(messageId);
|
|
526
|
+
}
|
|
527
|
+
/**
|
|
528
|
+
* Reconstruct subagents from history messages if applicable.
|
|
529
|
+
* Call this when history finishes loading and the stream isn't active.
|
|
530
|
+
* Returns an AbortController for cancelling the subagent history fetch,
|
|
531
|
+
* or null if no reconstruction was needed.
|
|
532
|
+
*/
|
|
533
|
+
reconstructSubagentsIfNeeded() {
|
|
534
|
+
const hvMessages = this.#getMessages(this.historyValues);
|
|
535
|
+
if (!(this.#options.filterSubagentMessages && !this.isLoading && !this.#history.isLoading && hvMessages.length > 0)) return null;
|
|
536
|
+
this.stream.reconstructSubagents(hvMessages, { skipIfPopulated: true });
|
|
537
|
+
const tid = this.#threadId;
|
|
538
|
+
if (tid) {
|
|
539
|
+
const controller = new AbortController();
|
|
540
|
+
this.stream.fetchSubagentHistory(this.#accessors.getClient().threads, tid, {
|
|
541
|
+
messagesKey: this.#accessors.getMessagesKey(),
|
|
542
|
+
signal: controller.signal
|
|
543
|
+
});
|
|
544
|
+
return controller;
|
|
545
|
+
}
|
|
546
|
+
return null;
|
|
547
|
+
}
|
|
548
|
+
/**
|
|
549
|
+
* Register additional stream modes that should be included in future
|
|
550
|
+
* stream requests. Modes are deduplicated automatically.
|
|
551
|
+
*
|
|
552
|
+
* @param modes - One or more stream modes to track.
|
|
553
|
+
*/
|
|
554
|
+
trackStreamMode(...modes) {
|
|
555
|
+
for (const mode of modes) if (!this.#trackedStreamModes.includes(mode)) this.#trackedStreamModes.push(mode);
|
|
556
|
+
}
|
|
557
|
+
/**
|
|
558
|
+
* Stop the currently active stream. If reconnect metadata storage is
|
|
559
|
+
* configured, also cancels the run on the server and cleans up stored
|
|
560
|
+
* run metadata.
|
|
561
|
+
*/
|
|
562
|
+
stop() {
|
|
563
|
+
this.stream.stop(this.historyValues, { onStop: (args) => {
|
|
564
|
+
if (this.#runMetadataStorage && this.#threadId) {
|
|
565
|
+
const runId = this.#runMetadataStorage.getItem(`lg:stream:${this.#threadId}`);
|
|
566
|
+
if (runId) this.#accessors.getClient().runs.cancel(this.#threadId, runId);
|
|
567
|
+
this.#runMetadataStorage.removeItem(`lg:stream:${this.#threadId}`);
|
|
568
|
+
}
|
|
569
|
+
this.#options.onStop?.(args);
|
|
570
|
+
} });
|
|
571
|
+
}
|
|
572
|
+
/**
|
|
573
|
+
* Join an existing run's event stream by run ID. Used for reconnecting
|
|
574
|
+
* to in-progress runs or consuming queued runs.
|
|
575
|
+
*
|
|
576
|
+
* @param runId - The ID of the run to join.
|
|
577
|
+
* @param lastEventId - The last event ID received, for resuming mid-stream.
|
|
578
|
+
* Defaults to `"-1"` (start from the beginning).
|
|
579
|
+
* @param joinOptions - Additional options for stream mode and event filtering.
|
|
580
|
+
*/
|
|
581
|
+
async joinStream(runId, lastEventId, joinOptions) {
|
|
582
|
+
lastEventId ??= "-1";
|
|
583
|
+
const tid = this.#threadId;
|
|
584
|
+
if (!tid) return;
|
|
585
|
+
this.#threadIdStreaming = tid;
|
|
586
|
+
const callbackMeta = {
|
|
587
|
+
thread_id: tid,
|
|
588
|
+
run_id: runId
|
|
589
|
+
};
|
|
590
|
+
const client = this.#accessors.getClient();
|
|
591
|
+
await this.stream.start(async (signal) => {
|
|
592
|
+
const rawStream = client.runs.joinStream(tid, runId, {
|
|
593
|
+
signal,
|
|
594
|
+
lastEventId,
|
|
595
|
+
streamMode: joinOptions?.streamMode
|
|
596
|
+
});
|
|
597
|
+
return joinOptions?.filter != null ? filterStream(rawStream, joinOptions.filter) : rawStream;
|
|
598
|
+
}, {
|
|
599
|
+
getMessages: (value) => this.#getMessages(value),
|
|
600
|
+
setMessages: (current, messages) => this.#setMessages(current, messages),
|
|
601
|
+
initialValues: this.historyValues,
|
|
602
|
+
callbacks: this.#options,
|
|
603
|
+
onSuccess: async () => {
|
|
604
|
+
this.#runMetadataStorage?.removeItem(`lg:stream:${tid}`);
|
|
605
|
+
const lastHead = (await this.#mutate(tid))?.at(0);
|
|
606
|
+
if (lastHead) this.#options.onFinish?.(lastHead, callbackMeta);
|
|
607
|
+
},
|
|
608
|
+
onError: (error) => {
|
|
609
|
+
this.#options.onError?.(error, callbackMeta);
|
|
610
|
+
},
|
|
611
|
+
onFinish: () => {
|
|
612
|
+
this.#threadIdStreaming = null;
|
|
613
|
+
}
|
|
614
|
+
});
|
|
615
|
+
}
|
|
616
|
+
/**
|
|
617
|
+
* Submit input values directly to the LangGraph Platform, creating a new
|
|
618
|
+
* thread if necessary. Starts a streaming run and processes events until
|
|
619
|
+
* completion. Unlike {@link submit}, this does not handle queueing — if
|
|
620
|
+
* a stream is already active, a concurrent run will be started.
|
|
621
|
+
*
|
|
622
|
+
* @param values - The state values to send as run input.
|
|
623
|
+
* @param submitOptions - Optional configuration for the run (config,
|
|
624
|
+
* checkpoint, multitask strategy, optimistic values, etc.).
|
|
625
|
+
*/
|
|
626
|
+
submitDirect(values, submitOptions) {
|
|
627
|
+
const currentBranchContext = this.branchContext;
|
|
628
|
+
const checkpointId = submitOptions?.checkpoint?.checkpoint_id;
|
|
629
|
+
this.#branch = checkpointId != null ? currentBranchContext.branchByCheckpoint[checkpointId]?.branch ?? "" : "";
|
|
630
|
+
const includeImplicitBranch = this.historyLimit === true || typeof this.historyLimit === "number";
|
|
631
|
+
const shouldRefetch = this.#options.onFinish != null || includeImplicitBranch;
|
|
632
|
+
let checkpoint = submitOptions?.checkpoint ?? (includeImplicitBranch ? currentBranchContext.threadHead?.checkpoint : void 0) ?? void 0;
|
|
633
|
+
if (submitOptions?.checkpoint === null) checkpoint = void 0;
|
|
634
|
+
if (checkpoint != null) delete checkpoint.thread_id;
|
|
635
|
+
let callbackMeta;
|
|
636
|
+
let rejoinKey;
|
|
637
|
+
let usableThreadId;
|
|
638
|
+
const client = this.#accessors.getClient();
|
|
639
|
+
const assistantId = this.#accessors.getAssistantId();
|
|
640
|
+
return this.stream.start(async (signal) => {
|
|
641
|
+
usableThreadId = this.#threadId;
|
|
642
|
+
if (usableThreadId) this.#threadIdStreaming = usableThreadId;
|
|
643
|
+
if (!usableThreadId) {
|
|
644
|
+
const threadPromise = client.threads.create({
|
|
645
|
+
threadId: submitOptions?.threadId,
|
|
646
|
+
metadata: submitOptions?.metadata
|
|
647
|
+
});
|
|
648
|
+
this.#threadIdPromise = threadPromise.then((t) => t.thread_id);
|
|
649
|
+
usableThreadId = (await threadPromise).thread_id;
|
|
650
|
+
this.#setThreadIdFromSubmit(usableThreadId);
|
|
651
|
+
}
|
|
652
|
+
const streamMode = unique([
|
|
653
|
+
"values",
|
|
654
|
+
"updates",
|
|
655
|
+
...submitOptions?.streamMode ?? [],
|
|
656
|
+
...this.#trackedStreamModes,
|
|
657
|
+
...this.#callbackStreamModes
|
|
658
|
+
]);
|
|
659
|
+
this.stream.setStreamValues(() => {
|
|
660
|
+
const prev = {
|
|
661
|
+
...this.historyValues,
|
|
662
|
+
...this.stream.values
|
|
663
|
+
};
|
|
664
|
+
if (submitOptions?.optimisticValues != null) return {
|
|
665
|
+
...prev,
|
|
666
|
+
...typeof submitOptions.optimisticValues === "function" ? submitOptions.optimisticValues(prev) : submitOptions.optimisticValues
|
|
667
|
+
};
|
|
668
|
+
return { ...prev };
|
|
669
|
+
});
|
|
670
|
+
const streamResumable = submitOptions?.streamResumable ?? !!this.#runMetadataStorage;
|
|
671
|
+
return client.runs.stream(usableThreadId, assistantId, {
|
|
672
|
+
input: values,
|
|
673
|
+
config: submitOptions?.config,
|
|
674
|
+
context: submitOptions?.context,
|
|
675
|
+
command: submitOptions?.command,
|
|
676
|
+
interruptBefore: submitOptions?.interruptBefore,
|
|
677
|
+
interruptAfter: submitOptions?.interruptAfter,
|
|
678
|
+
metadata: submitOptions?.metadata,
|
|
679
|
+
multitaskStrategy: submitOptions?.multitaskStrategy,
|
|
680
|
+
onCompletion: submitOptions?.onCompletion,
|
|
681
|
+
onDisconnect: submitOptions?.onDisconnect ?? (streamResumable ? "continue" : "cancel"),
|
|
682
|
+
signal,
|
|
683
|
+
checkpoint,
|
|
684
|
+
streamMode,
|
|
685
|
+
streamSubgraphs: submitOptions?.streamSubgraphs,
|
|
686
|
+
streamResumable,
|
|
687
|
+
durability: submitOptions?.durability,
|
|
688
|
+
onRunCreated: (params) => {
|
|
689
|
+
callbackMeta = {
|
|
690
|
+
run_id: params.run_id,
|
|
691
|
+
thread_id: params.thread_id ?? usableThreadId
|
|
692
|
+
};
|
|
693
|
+
if (this.#runMetadataStorage) {
|
|
694
|
+
rejoinKey = `lg:stream:${usableThreadId}`;
|
|
695
|
+
this.#runMetadataStorage.setItem(rejoinKey, callbackMeta.run_id);
|
|
696
|
+
}
|
|
697
|
+
this.#options.onCreated?.(callbackMeta);
|
|
698
|
+
}
|
|
699
|
+
});
|
|
700
|
+
}, {
|
|
701
|
+
getMessages: (value) => this.#getMessages(value),
|
|
702
|
+
setMessages: (current, messages) => this.#setMessages(current, messages),
|
|
703
|
+
initialValues: this.historyValues,
|
|
704
|
+
callbacks: this.#options,
|
|
705
|
+
onSuccess: async () => {
|
|
706
|
+
if (rejoinKey) this.#runMetadataStorage?.removeItem(rejoinKey);
|
|
707
|
+
if (shouldRefetch && usableThreadId) {
|
|
708
|
+
const lastHead = (await this.#mutate(usableThreadId))?.at(0);
|
|
709
|
+
if (lastHead) {
|
|
710
|
+
this.#options.onFinish?.(lastHead, callbackMeta);
|
|
711
|
+
return null;
|
|
712
|
+
}
|
|
713
|
+
}
|
|
714
|
+
},
|
|
715
|
+
onError: (error) => {
|
|
716
|
+
this.#options.onError?.(error, callbackMeta);
|
|
717
|
+
submitOptions?.onError?.(error, callbackMeta);
|
|
718
|
+
},
|
|
719
|
+
onFinish: () => {
|
|
720
|
+
this.#threadIdStreaming = null;
|
|
721
|
+
}
|
|
722
|
+
});
|
|
723
|
+
}
|
|
724
|
+
#drainQueue() {
|
|
725
|
+
if (!this.isLoading && !this.#submitting && this.pendingRuns.size > 0) {
|
|
726
|
+
const next = this.pendingRuns.shift();
|
|
727
|
+
if (next) {
|
|
728
|
+
this.#submitting = true;
|
|
729
|
+
this.joinStream(next.id).finally(() => {
|
|
730
|
+
this.#submitting = false;
|
|
731
|
+
this.#drainQueue();
|
|
732
|
+
});
|
|
733
|
+
}
|
|
734
|
+
}
|
|
735
|
+
}
|
|
736
|
+
/**
|
|
737
|
+
* Trigger queue draining. Framework adapters should call this
|
|
738
|
+
* when isLoading or queue size changes.
|
|
739
|
+
*/
|
|
740
|
+
drainQueue() {
|
|
741
|
+
this.#drainQueue();
|
|
742
|
+
}
|
|
743
|
+
/**
|
|
744
|
+
* Submit input values with automatic queue management. If a stream is
|
|
745
|
+
* already active, the run is enqueued (unless the multitask strategy
|
|
746
|
+
* is `"interrupt"` or `"rollback"`, in which case the current run is
|
|
747
|
+
* replaced). Queued runs are drained sequentially via {@link drainQueue}.
|
|
748
|
+
*
|
|
749
|
+
* @param values - The state values to send as run input.
|
|
750
|
+
* @param submitOptions - Optional configuration for the run.
|
|
751
|
+
* @returns The result of {@link submitDirect} if the run was started
|
|
752
|
+
* immediately, or `void` if the run was enqueued.
|
|
753
|
+
*/
|
|
754
|
+
async submit(values, submitOptions) {
|
|
755
|
+
if (this.stream.isLoading || this.#submitting) {
|
|
756
|
+
if (submitOptions?.multitaskStrategy === "interrupt" || submitOptions?.multitaskStrategy === "rollback") {
|
|
757
|
+
this.#submitting = true;
|
|
758
|
+
try {
|
|
759
|
+
await this.submitDirect(values, submitOptions);
|
|
760
|
+
} finally {
|
|
761
|
+
this.#submitting = false;
|
|
762
|
+
}
|
|
763
|
+
return;
|
|
764
|
+
}
|
|
765
|
+
let usableThreadId = this.#threadId;
|
|
766
|
+
if (!usableThreadId && this.#threadIdPromise) usableThreadId = await this.#threadIdPromise;
|
|
767
|
+
if (usableThreadId) {
|
|
768
|
+
const client = this.#accessors.getClient();
|
|
769
|
+
const assistantId = this.#accessors.getAssistantId();
|
|
770
|
+
try {
|
|
771
|
+
const run = await client.runs.create(usableThreadId, assistantId, {
|
|
772
|
+
input: values,
|
|
773
|
+
config: submitOptions?.config,
|
|
774
|
+
context: submitOptions?.context,
|
|
775
|
+
command: submitOptions?.command,
|
|
776
|
+
interruptBefore: submitOptions?.interruptBefore,
|
|
777
|
+
interruptAfter: submitOptions?.interruptAfter,
|
|
778
|
+
metadata: submitOptions?.metadata,
|
|
779
|
+
multitaskStrategy: "enqueue",
|
|
780
|
+
streamResumable: true,
|
|
781
|
+
streamSubgraphs: submitOptions?.streamSubgraphs,
|
|
782
|
+
durability: submitOptions?.durability
|
|
783
|
+
});
|
|
784
|
+
this.pendingRuns.add({
|
|
785
|
+
id: run.run_id,
|
|
786
|
+
values,
|
|
787
|
+
options: submitOptions,
|
|
788
|
+
createdAt: new Date(run.created_at)
|
|
789
|
+
});
|
|
790
|
+
} catch (error) {
|
|
791
|
+
this.#options.onError?.(error, void 0);
|
|
792
|
+
submitOptions?.onError?.(error, void 0);
|
|
793
|
+
}
|
|
794
|
+
return;
|
|
795
|
+
}
|
|
796
|
+
}
|
|
797
|
+
this.#submitting = true;
|
|
798
|
+
const result = this.submitDirect(values, submitOptions);
|
|
799
|
+
Promise.resolve(result).finally(() => {
|
|
800
|
+
this.#submitting = false;
|
|
801
|
+
this.#drainQueue();
|
|
802
|
+
});
|
|
803
|
+
return result;
|
|
804
|
+
}
|
|
805
|
+
/**
|
|
806
|
+
* Switch to a different thread (or clear the current thread).
|
|
807
|
+
* Clears the active stream, cancels all queued runs on the previous
|
|
808
|
+
* thread, fetches history for the new thread, and notifies the
|
|
809
|
+
* {@link AnyStreamOptions.onThreadId | onThreadId} callback.
|
|
810
|
+
*
|
|
811
|
+
* @param newThreadId - The thread ID to switch to, or `null` to clear.
|
|
812
|
+
*/
|
|
813
|
+
switchThread(newThreadId) {
|
|
814
|
+
if (newThreadId !== (this.#threadId ?? null)) {
|
|
815
|
+
const prevThreadId = this.#threadId;
|
|
816
|
+
this.#threadId = newThreadId ?? void 0;
|
|
817
|
+
this.stream.clear();
|
|
818
|
+
const removed = this.pendingRuns.removeAll();
|
|
819
|
+
if (prevThreadId && removed.length > 0) {
|
|
820
|
+
const client = this.#accessors.getClient();
|
|
821
|
+
Promise.all(removed.map((e) => client.runs.cancel(prevThreadId, e.id)));
|
|
822
|
+
}
|
|
823
|
+
this.#fetchHistoryForThread(this.#threadId);
|
|
824
|
+
if (newThreadId != null) this.#options.onThreadId?.(newThreadId);
|
|
825
|
+
this.#notify();
|
|
826
|
+
}
|
|
827
|
+
}
|
|
828
|
+
/**
|
|
829
|
+
* Attempt to reconnect to a previously running stream.
|
|
830
|
+
* Returns true if a reconnection was initiated.
|
|
831
|
+
*/
|
|
832
|
+
tryReconnect() {
|
|
833
|
+
if (this.#runMetadataStorage && this.#threadId) {
|
|
834
|
+
const runId = this.#runMetadataStorage.getItem(`lg:stream:${this.#threadId}`);
|
|
835
|
+
if (runId) {
|
|
836
|
+
this.joinStream(runId);
|
|
837
|
+
return true;
|
|
838
|
+
}
|
|
839
|
+
}
|
|
840
|
+
return false;
|
|
841
|
+
}
|
|
842
|
+
/**
|
|
843
|
+
* Whether reconnect-on-mount behaviour is enabled (i.e. run metadata
|
|
844
|
+
* storage is available).
|
|
845
|
+
*/
|
|
846
|
+
get shouldReconnect() {
|
|
847
|
+
return !!this.#runMetadataStorage;
|
|
848
|
+
}
|
|
849
|
+
/**
|
|
850
|
+
* Tear down the orchestrator: stop the active stream, remove all
|
|
851
|
+
* internal subscriptions, and mark the instance as disposed.
|
|
852
|
+
* After calling this method, the orchestrator should not be reused.
|
|
853
|
+
*/
|
|
854
|
+
dispose() {
|
|
855
|
+
this.#disposed = true;
|
|
856
|
+
this.#streamUnsub?.();
|
|
857
|
+
this.#queueUnsub?.();
|
|
858
|
+
this.#streamUnsub = null;
|
|
859
|
+
this.#queueUnsub = null;
|
|
860
|
+
this.stop();
|
|
861
|
+
}
|
|
862
|
+
};
|
|
863
|
+
//#endregion
|
|
864
|
+
export { StreamOrchestrator };
|
|
865
|
+
|
|
866
|
+
//# sourceMappingURL=orchestrator.js.map
|