@langchain/langgraph-sdk 0.0.45 → 0.0.46

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -203,15 +203,25 @@ const useControllableThreadId = (options) => {
203
203
  _setLocalThreadId(threadId);
204
204
  onThreadIdRef.current?.(threadId);
205
205
  }, []);
206
- if (typeof options?.threadId === "undefined") {
206
+ if (!options || !("threadId" in options)) {
207
207
  return [localThreadId, onThreadId];
208
208
  }
209
- return [options.threadId, onThreadId];
209
+ return [options.threadId ?? null, onThreadId];
210
210
  };
211
211
  function useStream(options) {
212
212
  let { assistantId, messagesKey, onError, onFinish } = options;
213
213
  messagesKey ??= "messages";
214
- const client = (0, react_1.useMemo)(() => new client_js_1.Client({ apiUrl: options.apiUrl, apiKey: options.apiKey }), [options.apiKey, options.apiUrl]);
214
+ const client = (0, react_1.useMemo)(() => new client_js_1.Client({
215
+ apiUrl: options.apiUrl,
216
+ apiKey: options.apiKey,
217
+ callerOptions: options.callerOptions,
218
+ defaultHeaders: options.defaultHeaders,
219
+ }), [
220
+ options.apiKey,
221
+ options.apiUrl,
222
+ options.callerOptions,
223
+ options.defaultHeaders,
224
+ ]);
215
225
  const [threadId, onThreadId] = useControllableThreadId(options);
216
226
  const [branch, setBranch] = (0, react_1.useState)("");
217
227
  const [isLoading, setIsLoading] = (0, react_1.useState)(false);
@@ -220,10 +230,13 @@ function useStream(options) {
220
230
  const messageManagerRef = (0, react_1.useRef)(new MessageTupleManager());
221
231
  const submittingRef = (0, react_1.useRef)(false);
222
232
  const abortRef = (0, react_1.useRef)(null);
223
- const trackStreamModeRef = (0, react_1.useRef)(["values"]);
224
- const trackStreamMode = (0, react_1.useCallback)((mode) => {
225
- if (!trackStreamModeRef.current.includes(mode))
226
- trackStreamModeRef.current.push(mode);
233
+ const trackStreamModeRef = (0, react_1.useRef)([]);
234
+ const trackStreamMode = (0, react_1.useCallback)((...mode) => {
235
+ for (const m of mode) {
236
+ if (!trackStreamModeRef.current.includes(m)) {
237
+ trackStreamModeRef.current.push(m);
238
+ }
239
+ }
227
240
  }, []);
228
241
  const hasUpdateListener = options.onUpdateEvent != null;
229
242
  const hasCustomListener = options.onCustomEvent != null;
@@ -434,17 +447,22 @@ function useStream(options) {
434
447
  if (isLoading)
435
448
  return undefined;
436
449
  const interrupts = threadHead?.tasks?.at(-1)?.interrupts;
437
- if (interrupts == null || interrupts.length === 0)
438
- return undefined;
450
+ if (interrupts == null || interrupts.length === 0) {
451
+ // check if there's a next task present
452
+ const next = threadHead?.next ?? [];
453
+ if (!next.length || error != null)
454
+ return undefined;
455
+ return { when: "breakpoint" };
456
+ }
439
457
  // Return only the current interrupt
440
458
  return interrupts.at(-1);
441
459
  },
442
460
  get messages() {
443
- trackStreamMode("messages-tuple");
461
+ trackStreamMode("messages-tuple", "values");
444
462
  return getMessages(values);
445
463
  },
446
464
  getMessagesMetadata(message, index) {
447
- trackStreamMode("messages-tuple");
465
+ trackStreamMode("messages-tuple", "values");
448
466
  return messageMetadata?.find((m) => m.messageId === (message.id ?? index));
449
467
  },
450
468
  };
@@ -66,6 +66,14 @@ interface UseStreamOptions<StateType extends Record<string, unknown> = Record<st
66
66
  * The API key to use.
67
67
  */
68
68
  apiKey?: ClientConfig["apiKey"];
69
+ /**
70
+ * Custom call options, such as custom fetch implementation.
71
+ */
72
+ callerOptions?: ClientConfig["callerOptions"];
73
+ /**
74
+ * Default headers to send with requests.
75
+ */
76
+ defaultHeaders?: ClientConfig["defaultHeaders"];
69
77
  /**
70
78
  * Specify the key within the state that contains messages.
71
79
  * Defaults to "messages".
@@ -200,15 +200,25 @@ const useControllableThreadId = (options) => {
200
200
  _setLocalThreadId(threadId);
201
201
  onThreadIdRef.current?.(threadId);
202
202
  }, []);
203
- if (typeof options?.threadId === "undefined") {
203
+ if (!options || !("threadId" in options)) {
204
204
  return [localThreadId, onThreadId];
205
205
  }
206
- return [options.threadId, onThreadId];
206
+ return [options.threadId ?? null, onThreadId];
207
207
  };
208
208
  export function useStream(options) {
209
209
  let { assistantId, messagesKey, onError, onFinish } = options;
210
210
  messagesKey ??= "messages";
211
- const client = useMemo(() => new Client({ apiUrl: options.apiUrl, apiKey: options.apiKey }), [options.apiKey, options.apiUrl]);
211
+ const client = useMemo(() => new Client({
212
+ apiUrl: options.apiUrl,
213
+ apiKey: options.apiKey,
214
+ callerOptions: options.callerOptions,
215
+ defaultHeaders: options.defaultHeaders,
216
+ }), [
217
+ options.apiKey,
218
+ options.apiUrl,
219
+ options.callerOptions,
220
+ options.defaultHeaders,
221
+ ]);
212
222
  const [threadId, onThreadId] = useControllableThreadId(options);
213
223
  const [branch, setBranch] = useState("");
214
224
  const [isLoading, setIsLoading] = useState(false);
@@ -217,10 +227,13 @@ export function useStream(options) {
217
227
  const messageManagerRef = useRef(new MessageTupleManager());
218
228
  const submittingRef = useRef(false);
219
229
  const abortRef = useRef(null);
220
- const trackStreamModeRef = useRef(["values"]);
221
- const trackStreamMode = useCallback((mode) => {
222
- if (!trackStreamModeRef.current.includes(mode))
223
- trackStreamModeRef.current.push(mode);
230
+ const trackStreamModeRef = useRef([]);
231
+ const trackStreamMode = useCallback((...mode) => {
232
+ for (const m of mode) {
233
+ if (!trackStreamModeRef.current.includes(m)) {
234
+ trackStreamModeRef.current.push(m);
235
+ }
236
+ }
224
237
  }, []);
225
238
  const hasUpdateListener = options.onUpdateEvent != null;
226
239
  const hasCustomListener = options.onCustomEvent != null;
@@ -431,17 +444,22 @@ export function useStream(options) {
431
444
  if (isLoading)
432
445
  return undefined;
433
446
  const interrupts = threadHead?.tasks?.at(-1)?.interrupts;
434
- if (interrupts == null || interrupts.length === 0)
435
- return undefined;
447
+ if (interrupts == null || interrupts.length === 0) {
448
+ // check if there's a next task present
449
+ const next = threadHead?.next ?? [];
450
+ if (!next.length || error != null)
451
+ return undefined;
452
+ return { when: "breakpoint" };
453
+ }
436
454
  // Return only the current interrupt
437
455
  return interrupts.at(-1);
438
456
  },
439
457
  get messages() {
440
- trackStreamMode("messages-tuple");
458
+ trackStreamMode("messages-tuple", "values");
441
459
  return getMessages(values);
442
460
  },
443
461
  getMessagesMetadata(message, index) {
444
- trackStreamMode("messages-tuple");
462
+ trackStreamMode("messages-tuple", "values");
445
463
  return messageMetadata?.find((m) => m.messageId === (message.id ?? index));
446
464
  },
447
465
  };
package/dist/schema.d.ts CHANGED
@@ -104,9 +104,9 @@ export interface AssistantGraph {
104
104
  * An interrupt thrown inside a thread.
105
105
  */
106
106
  export interface Interrupt<TValue = unknown> {
107
- value: TValue;
108
- when: "during";
109
- resumable: boolean;
107
+ value?: TValue;
108
+ when: "during" | (string & {});
109
+ resumable?: boolean;
110
110
  ns?: string[];
111
111
  }
112
112
  export interface Thread<ValuesType = DefaultValues> {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@langchain/langgraph-sdk",
3
- "version": "0.0.45",
3
+ "version": "0.0.46",
4
4
  "description": "Client library for interacting with the LangGraph API",
5
5
  "type": "module",
6
6
  "packageManager": "yarn@1.22.19",