@langchain/langgraph-sdk 0.1.0 → 0.1.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,24 @@
1
1
  # @langchain/langgraph-sdk
2
2
 
3
+ ## 0.1.2
4
+
5
+ ### Patch Changes
6
+
7
+ - 3b1e137: Add `description` field for assistants auth handlers
8
+
9
+ ## 0.1.1
10
+
11
+ ### Patch Changes
12
+
13
+ - 7de6680: Fix `onRequest` not being called when streaming runs or threads (#1585)
14
+ - df8b662: Fix interrupts not being exposed in `useStream["interrupt"]` when `fetchStateHistory: false`
15
+ - 572de43: feat(threads): add `ids` filter to Threads.search
16
+
17
+ - SDK: `ThreadsClient.search` now accepts `ids?: string[]` and forwards it to `/threads/search`.
18
+ - API: `/threads/search` schema accepts `ids` and storage filters by provided thread IDs.
19
+
20
+ This enables fetching a specific set of threads directly via the search endpoint, while remaining backward compatible.
21
+
3
22
  ## 0.1.0
4
23
 
5
24
  ### Minor Changes
@@ -19,6 +19,7 @@ interface AssistantCreate {
19
19
  context?: Maybe<unknown>;
20
20
  if_exists?: Maybe<"raise" | "do_nothing">;
21
21
  name?: Maybe<string>;
22
+ description?: Maybe<string>;
22
23
  graph_id: string;
23
24
  }
24
25
  /**
@@ -38,6 +39,7 @@ interface AssistantUpdate {
38
39
  context?: Maybe<unknown>;
39
40
  graph_id?: Maybe<string>;
40
41
  name?: Maybe<string>;
42
+ description?: Maybe<string>;
41
43
  version?: Maybe<number>;
42
44
  }
43
45
  /**
@@ -89,6 +91,7 @@ interface ThreadDelete {
89
91
  */
90
92
  interface ThreadSearch {
91
93
  thread_id?: Maybe<string>;
94
+ ids?: Maybe<string[]>;
92
95
  status?: Maybe<"idle" | "busy" | "interrupted" | "error" | (string & {})>;
93
96
  metadata?: Maybe<Record<string, unknown>>;
94
97
  values?: Maybe<Record<string, unknown>>;
package/dist/client.cjs CHANGED
@@ -563,6 +563,7 @@ class ThreadsClient extends BaseClient {
563
563
  method: "POST",
564
564
  json: {
565
565
  metadata: query?.metadata ?? undefined,
566
+ ids: query?.ids ?? undefined,
566
567
  limit: query?.limit ?? 10,
567
568
  offset: query?.offset ?? 0,
568
569
  status: query?.status,
@@ -671,7 +672,7 @@ class ThreadsClient extends BaseClient {
671
672
  async *joinStream(threadId, options
672
673
  // eslint-disable-next-line @typescript-eslint/no-explicit-any
673
674
  ) {
674
- const response = await this.asyncCaller.fetch(...this.prepareFetchOptions(`/threads/${threadId}/stream`, {
675
+ let [url, init] = this.prepareFetchOptions(`/threads/${threadId}/stream`, {
675
676
  method: "GET",
676
677
  headers: options?.lastEventId
677
678
  ? { "Last-Event-ID": options.lastEventId }
@@ -679,7 +680,10 @@ class ThreadsClient extends BaseClient {
679
680
  params: options?.streamMode
680
681
  ? { stream_mode: options.streamMode }
681
682
  : undefined,
682
- }));
683
+ });
684
+ if (this.onRequest != null)
685
+ init = await this.onRequest(url, init);
686
+ const response = await this.asyncCaller.fetch(url, init);
683
687
  // eslint-disable-next-line @typescript-eslint/no-explicit-any
684
688
  const stream = (response.body || new ReadableStream({ start: (ctrl) => ctrl.close() }))
685
689
  .pipeThrough((0, sse_js_1.BytesLineDecoder)())
@@ -722,12 +726,15 @@ class RunsClient extends BaseClient {
722
726
  durability: payload?.durability,
723
727
  };
724
728
  const endpoint = threadId == null ? `/runs/stream` : `/threads/${threadId}/runs/stream`;
725
- const response = await this.asyncCaller.fetch(...this.prepareFetchOptions(endpoint, {
729
+ let [url, init] = this.prepareFetchOptions(endpoint, {
726
730
  method: "POST",
727
731
  json,
728
732
  timeoutMs: null,
729
733
  signal: payload?.signal,
730
- }));
734
+ });
735
+ if (this.onRequest != null)
736
+ init = await this.onRequest(url, init);
737
+ const response = await this.asyncCaller.fetch(url, init);
731
738
  const runMetadata = getRunMetadataFromResponse(response);
732
739
  if (runMetadata)
733
740
  payload?.onRunCreated?.(runMetadata);
@@ -939,7 +946,7 @@ class RunsClient extends BaseClient {
939
946
  options instanceof AbortSignal
940
947
  ? { signal: options }
941
948
  : options;
942
- const response = await this.asyncCaller.fetch(...this.prepareFetchOptions(threadId != null
949
+ let [url, init] = this.prepareFetchOptions(threadId != null
943
950
  ? `/threads/${threadId}/runs/${runId}/stream`
944
951
  : `/runs/${runId}/stream`, {
945
952
  method: "GET",
@@ -952,7 +959,10 @@ class RunsClient extends BaseClient {
952
959
  cancel_on_disconnect: opts?.cancelOnDisconnect ? "1" : "0",
953
960
  stream_mode: opts?.streamMode,
954
961
  },
955
- }));
962
+ });
963
+ if (this.onRequest != null)
964
+ init = await this.onRequest(url, init);
965
+ const response = await this.asyncCaller.fetch(url, init);
956
966
  // eslint-disable-next-line @typescript-eslint/no-explicit-any
957
967
  const stream = (response.body || new ReadableStream({ start: (ctrl) => ctrl.close() }))
958
968
  .pipeThrough((0, sse_js_1.BytesLineDecoder)())
@@ -1172,14 +1182,17 @@ class UiClient extends BaseClient {
1172
1182
  }
1173
1183
  async getComponent(assistantId, agentName) {
1174
1184
  return UiClient.getOrCached(`${this.apiUrl}-${assistantId}-${agentName}`, async () => {
1175
- const response = await this.asyncCaller.fetch(...this.prepareFetchOptions(`/ui/${assistantId}`, {
1185
+ let [url, init] = this.prepareFetchOptions(`/ui/${assistantId}`, {
1176
1186
  headers: {
1177
1187
  Accept: "text/html",
1178
1188
  "Content-Type": "application/json",
1179
1189
  },
1180
1190
  method: "POST",
1181
1191
  json: { name: agentName },
1182
- }));
1192
+ });
1193
+ if (this.onRequest != null)
1194
+ init = await this.onRequest(url, init);
1195
+ const response = await this.asyncCaller.fetch(url, init);
1183
1196
  return response.text();
1184
1197
  });
1185
1198
  }
package/dist/client.d.ts CHANGED
@@ -297,6 +297,10 @@ export declare class ThreadsClient<TStateType = DefaultValues, TUpdateType = TSt
297
297
  * Metadata to filter threads by.
298
298
  */
299
299
  metadata?: Metadata;
300
+ /**
301
+ * Filter by specific thread IDs.
302
+ */
303
+ ids?: string[];
300
304
  /**
301
305
  * Maximum number of threads to return.
302
306
  * Defaults to 10
package/dist/client.js CHANGED
@@ -556,6 +556,7 @@ export class ThreadsClient extends BaseClient {
556
556
  method: "POST",
557
557
  json: {
558
558
  metadata: query?.metadata ?? undefined,
559
+ ids: query?.ids ?? undefined,
559
560
  limit: query?.limit ?? 10,
560
561
  offset: query?.offset ?? 0,
561
562
  status: query?.status,
@@ -664,7 +665,7 @@ export class ThreadsClient extends BaseClient {
664
665
  async *joinStream(threadId, options
665
666
  // eslint-disable-next-line @typescript-eslint/no-explicit-any
666
667
  ) {
667
- const response = await this.asyncCaller.fetch(...this.prepareFetchOptions(`/threads/${threadId}/stream`, {
668
+ let [url, init] = this.prepareFetchOptions(`/threads/${threadId}/stream`, {
668
669
  method: "GET",
669
670
  headers: options?.lastEventId
670
671
  ? { "Last-Event-ID": options.lastEventId }
@@ -672,7 +673,10 @@ export class ThreadsClient extends BaseClient {
672
673
  params: options?.streamMode
673
674
  ? { stream_mode: options.streamMode }
674
675
  : undefined,
675
- }));
676
+ });
677
+ if (this.onRequest != null)
678
+ init = await this.onRequest(url, init);
679
+ const response = await this.asyncCaller.fetch(url, init);
676
680
  // eslint-disable-next-line @typescript-eslint/no-explicit-any
677
681
  const stream = (response.body || new ReadableStream({ start: (ctrl) => ctrl.close() }))
678
682
  .pipeThrough(BytesLineDecoder())
@@ -714,12 +718,15 @@ export class RunsClient extends BaseClient {
714
718
  durability: payload?.durability,
715
719
  };
716
720
  const endpoint = threadId == null ? `/runs/stream` : `/threads/${threadId}/runs/stream`;
717
- const response = await this.asyncCaller.fetch(...this.prepareFetchOptions(endpoint, {
721
+ let [url, init] = this.prepareFetchOptions(endpoint, {
718
722
  method: "POST",
719
723
  json,
720
724
  timeoutMs: null,
721
725
  signal: payload?.signal,
722
- }));
726
+ });
727
+ if (this.onRequest != null)
728
+ init = await this.onRequest(url, init);
729
+ const response = await this.asyncCaller.fetch(url, init);
723
730
  const runMetadata = getRunMetadataFromResponse(response);
724
731
  if (runMetadata)
725
732
  payload?.onRunCreated?.(runMetadata);
@@ -931,7 +938,7 @@ export class RunsClient extends BaseClient {
931
938
  options instanceof AbortSignal
932
939
  ? { signal: options }
933
940
  : options;
934
- const response = await this.asyncCaller.fetch(...this.prepareFetchOptions(threadId != null
941
+ let [url, init] = this.prepareFetchOptions(threadId != null
935
942
  ? `/threads/${threadId}/runs/${runId}/stream`
936
943
  : `/runs/${runId}/stream`, {
937
944
  method: "GET",
@@ -944,7 +951,10 @@ export class RunsClient extends BaseClient {
944
951
  cancel_on_disconnect: opts?.cancelOnDisconnect ? "1" : "0",
945
952
  stream_mode: opts?.streamMode,
946
953
  },
947
- }));
954
+ });
955
+ if (this.onRequest != null)
956
+ init = await this.onRequest(url, init);
957
+ const response = await this.asyncCaller.fetch(url, init);
948
958
  // eslint-disable-next-line @typescript-eslint/no-explicit-any
949
959
  const stream = (response.body || new ReadableStream({ start: (ctrl) => ctrl.close() }))
950
960
  .pipeThrough(BytesLineDecoder())
@@ -1162,14 +1172,17 @@ class UiClient extends BaseClient {
1162
1172
  }
1163
1173
  async getComponent(assistantId, agentName) {
1164
1174
  return UiClient.getOrCached(`${this.apiUrl}-${assistantId}-${agentName}`, async () => {
1165
- const response = await this.asyncCaller.fetch(...this.prepareFetchOptions(`/ui/${assistantId}`, {
1175
+ let [url, init] = this.prepareFetchOptions(`/ui/${assistantId}`, {
1166
1176
  headers: {
1167
1177
  Accept: "text/html",
1168
1178
  "Content-Type": "application/json",
1169
1179
  },
1170
1180
  method: "POST",
1171
1181
  json: { name: agentName },
1172
- }));
1182
+ });
1183
+ if (this.onRequest != null)
1184
+ init = await this.onRequest(url, init);
1185
+ const response = await this.asyncCaller.fetch(url, init);
1173
1186
  return response.text();
1174
1187
  });
1175
1188
  }
@@ -138,10 +138,12 @@ class StreamManager {
138
138
  options.callbacks.onDebugEvent?.(data, { namespace });
139
139
  }
140
140
  if (event === "values") {
141
- // don't update values on interrupt values event
142
- if ("__interrupt__" in data)
143
- continue;
144
- this.setStreamValues(data);
141
+ if ("__interrupt__" in data) {
142
+ this.setStreamValues((prev) => ({ ...prev, ...data }));
143
+ }
144
+ else {
145
+ this.setStreamValues(data);
146
+ }
145
147
  }
146
148
  if (this.matchEventType("messages", event, data)) {
147
149
  const [serialized, metadata] = data;
@@ -135,10 +135,12 @@ export class StreamManager {
135
135
  options.callbacks.onDebugEvent?.(data, { namespace });
136
136
  }
137
137
  if (event === "values") {
138
- // don't update values on interrupt values event
139
- if ("__interrupt__" in data)
140
- continue;
141
- this.setStreamValues(data);
138
+ if ("__interrupt__" in data) {
139
+ this.setStreamValues((prev) => ({ ...prev, ...data }));
140
+ }
141
+ else {
142
+ this.setStreamValues(data);
143
+ }
142
144
  }
143
145
  if (this.matchEventType("messages", event, data)) {
144
146
  const [serialized, metadata] = data;
@@ -422,7 +422,18 @@ function useStream(options) {
422
422
  return branchContext.branchTree;
423
423
  },
424
424
  get interrupt() {
425
- // Don't show the interrupt if the stream is loading
425
+ if (values != null &&
426
+ "__interrupt__" in values &&
427
+ Array.isArray(values.__interrupt__)) {
428
+ const valueInterrupts = values.__interrupt__;
429
+ if (valueInterrupts.length === 0)
430
+ return { when: "breakpoint" };
431
+ if (valueInterrupts.length === 1)
432
+ return valueInterrupts[0];
433
+ // TODO: fix the typing of interrupts if multiple interrupts are returned
434
+ return valueInterrupts;
435
+ }
436
+ // If we're deferring to old interrupt detection logic, don't show the interrupt if the stream is loading
426
437
  if (stream.isLoading)
427
438
  return undefined;
428
439
  const interrupts = branchContext.threadHead?.tasks?.at(-1)?.interrupts;
@@ -418,7 +418,18 @@ export function useStream(options) {
418
418
  return branchContext.branchTree;
419
419
  },
420
420
  get interrupt() {
421
- // Don't show the interrupt if the stream is loading
421
+ if (values != null &&
422
+ "__interrupt__" in values &&
423
+ Array.isArray(values.__interrupt__)) {
424
+ const valueInterrupts = values.__interrupt__;
425
+ if (valueInterrupts.length === 0)
426
+ return { when: "breakpoint" };
427
+ if (valueInterrupts.length === 1)
428
+ return valueInterrupts[0];
429
+ // TODO: fix the typing of interrupts if multiple interrupts are returned
430
+ return valueInterrupts;
431
+ }
432
+ // If we're deferring to old interrupt detection logic, don't show the interrupt if the stream is loading
422
433
  if (stream.isLoading)
423
434
  return undefined;
424
435
  const interrupts = branchContext.threadHead?.tasks?.at(-1)?.interrupts;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@langchain/langgraph-sdk",
3
- "version": "0.1.0",
3
+ "version": "0.1.2",
4
4
  "description": "Client library for interacting with the LangGraph API",
5
5
  "type": "module",
6
6
  "scripts": {