langgraph-api 0.0.45__py3-none-any.whl → 0.0.46__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
langgraph_api/api/ui.py CHANGED
@@ -1,34 +1,39 @@
1
1
  import json
2
2
  import os
3
- from functools import lru_cache
4
- from pathlib import Path
5
3
  from typing import TypedDict
6
4
 
5
+ from anyio import open_file
7
6
  from orjson import loads
8
7
  from starlette.responses import Response
9
8
  from starlette.routing import BaseRoute, Mount
10
9
  from starlette.staticfiles import StaticFiles
11
10
 
11
+ from langgraph_api.js.ui import UI_PUBLIC_DIR, UI_SCHEMAS_FILE
12
12
  from langgraph_api.route import ApiRequest, ApiRoute
13
13
 
14
- # Get path to built UI assets
15
- UI_DIR = Path(os.path.dirname(__file__)).parent / "js" / "ui"
16
- SCHEMAS_FILE = Path(os.path.dirname(__file__)).parent / "js" / "client.ui.schemas.json"
17
-
18
14
 
19
15
  class UiSchema(TypedDict):
20
16
  name: str
21
17
  assets: list[str]
22
18
 
23
19
 
24
- @lru_cache(maxsize=1)
25
- def load_ui_schemas() -> dict[str, UiSchema]:
20
+ _UI_SCHEMAS_CACHE: dict[str, UiSchema] | None = None
21
+
22
+
23
+ async def load_ui_schemas() -> dict[str, UiSchema]:
26
24
  """Load and cache UI schema mappings from JSON file."""
27
- if not SCHEMAS_FILE.exists():
28
- return {}
25
+ global _UI_SCHEMAS_CACHE
26
+
27
+ if _UI_SCHEMAS_CACHE is not None:
28
+ return _UI_SCHEMAS_CACHE
29
+
30
+ if not UI_SCHEMAS_FILE.exists():
31
+ _UI_SCHEMAS_CACHE = {}
32
+ else:
33
+ async with await open_file(UI_SCHEMAS_FILE, mode="r") as f:
34
+ _UI_SCHEMAS_CACHE = loads(await f.read())
29
35
 
30
- with open(SCHEMAS_FILE) as f:
31
- return loads(f.read())
36
+ return _UI_SCHEMAS_CACHE
32
37
 
33
38
 
34
39
  async def handle_ui(request: ApiRequest) -> Response:
@@ -38,7 +43,7 @@ async def handle_ui(request: ApiRequest) -> Response:
38
43
  message = await request.json(schema=None)
39
44
 
40
45
  # Load UI file paths from schema
41
- schemas = load_ui_schemas()
46
+ schemas = await load_ui_schemas()
42
47
 
43
48
  if graph_id not in schemas:
44
49
  return Response(f"UI not found for graph '{graph_id}'", status_code=404)
@@ -64,5 +69,5 @@ async def handle_ui(request: ApiRequest) -> Response:
64
69
 
65
70
  ui_routes: list[BaseRoute] = [
66
71
  ApiRoute("/ui/{graph_id}", handle_ui, methods=["POST"]),
67
- Mount("/ui", StaticFiles(directory=UI_DIR, check_dir=False)),
72
+ Mount("/ui", StaticFiles(directory=UI_PUBLIC_DIR, check_dir=False)),
68
73
  ]
langgraph_api/cli.py CHANGED
@@ -129,6 +129,8 @@ def run_server(
129
129
  store: typing.Optional["StoreConfig"] = None,
130
130
  auth: AuthConfig | None = None,
131
131
  http: typing.Optional["HttpConfig"] = None,
132
+ ui: dict | None = None,
133
+ ui_config: dict | None = None,
132
134
  studio_url: str | None = None,
133
135
  disable_persistence: bool = False,
134
136
  allow_blocking: bool = False,
@@ -190,6 +192,9 @@ def run_server(
190
192
  LANGSMITH_LANGGRAPH_API_VARIANT="local_dev",
191
193
  LANGGRAPH_AUTH=json.dumps(auth) if auth else None,
192
194
  LANGGRAPH_HTTP=json.dumps(http) if http else None,
195
+ LANGGRAPH_UI=json.dumps(ui) if ui else None,
196
+ LANGGRAPH_UI_CONFIG=json.dumps(ui_config) if ui_config else None,
197
+ LANGGRAPH_UI_BUNDLER="true",
193
198
  LANGGRAPH_API_URL=local_url,
194
199
  LANGGRAPH_DISABLE_FILE_PERSISTENCE=str(disable_persistence).lower(),
195
200
  # If true, we will not raise on blocking IO calls (via blockbuster)
@@ -349,6 +354,8 @@ def main():
349
354
 
350
355
  graphs = config_data.get("graphs", {})
351
356
  auth = config_data.get("auth")
357
+ ui = config_data.get("ui")
358
+ ui_config = config_data.get("ui_config")
352
359
  run_server(
353
360
  args.host,
354
361
  args.port,
@@ -360,6 +367,8 @@ def main():
360
367
  wait_for_client=args.wait_for_client,
361
368
  env=config_data.get("env", None),
362
369
  auth=auth,
370
+ ui=ui,
371
+ ui_config=ui_config,
363
372
  )
364
373
 
365
374
 
langgraph_api/config.py CHANGED
@@ -325,3 +325,6 @@ USES_INDEXING = (
325
325
  USES_CUSTOM_APP = HTTP_CONFIG and HTTP_CONFIG.get("app")
326
326
 
327
327
  API_VARIANT = env("LANGSMITH_LANGGRAPH_API_VARIANT", cast=str, default="")
328
+
329
+ # UI
330
+ UI_USE_BUNDLER = env("LANGGRAPH_UI_BUNDLER", cast=bool, default=False)
langgraph_api/graph.py CHANGED
@@ -289,7 +289,7 @@ def _handle_exception(task: asyncio.Task) -> None:
289
289
 
290
290
 
291
291
  async def stop_remote_graphs() -> None:
292
- logger.info("Cancelling remote graphs")
292
+ logger.info("Shutting down remote graphs")
293
293
  for task in js_bg_tasks:
294
294
  task.cancel("Stopping remote graphs.")
295
295
 
@@ -9,7 +9,7 @@ import {
9
9
  resolveGraph,
10
10
  runGraphSchemaWorker,
11
11
  } from "./src/graph.mts";
12
- import { build } from "@langchain/langgraph-api/ui/bundler";
12
+ import { build } from "@langchain/langgraph-ui";
13
13
 
14
14
  const __dirname = new URL(".", import.meta.url).pathname;
15
15
 
@@ -19,6 +19,7 @@ async function main() {
19
19
  );
20
20
 
21
21
  const GRAPH_SCHEMAS: Record<string, Record<string, GraphSchema> | false> = {};
22
+ let failed = false;
22
23
 
23
24
  try {
24
25
  await Promise.all(
@@ -47,59 +48,21 @@ async function main() {
47
48
  );
48
49
  } catch (error) {
49
50
  console.error(`Error resolving graphs: ${error}`);
50
- process.exit(1);
51
+ failed = true;
51
52
  }
52
53
 
53
- const uiSpecs = z
54
- .record(z.string())
55
- .parse(JSON.parse(process.env.LANGGRAPH_UI || "{}"));
56
-
57
- const uiConfig = z
58
- .object({ shared: z.array(z.string()).optional() })
59
- .parse(JSON.parse(process.env.LANGGRAPH_UI_CONFIG || "{}"));
60
-
61
- if (Object.keys(uiSpecs).length > 0) {
62
- try {
63
- const schemas: Record<string, { assets: string[]; name: string }> = {};
64
- await Promise.all(
65
- Object.entries(uiSpecs).map(async ([graphId, userPath]) => {
66
- console.info(`[${graphId}]: Building UI`);
67
- const files = await build(graphId, {
68
- userPath,
69
- cwd: process.cwd(),
70
- config: uiConfig,
71
- });
72
- await Promise.all([
73
- ...files.map(async (item) => {
74
- const folder = path.resolve(__dirname, "ui", graphId);
75
- const source = path.resolve(folder, item.basename);
76
-
77
- await fs.mkdir(path.dirname(source), { recursive: true });
78
- await fs.writeFile(source, item.contents);
79
-
80
- schemas[graphId] ??= { assets: [], name: graphId };
81
-
82
- const relative = path.relative(
83
- path.resolve(__dirname, "ui", graphId),
84
- source
85
- );
86
-
87
- schemas[graphId].assets.push(relative);
88
- }),
89
- ]);
90
- })
91
- );
54
+ // Build Gen UI assets
55
+ try {
56
+ console.info("Checking for UI assets");
57
+ await fs.mkdir(path.resolve(__dirname, "ui"), { recursive: true });
92
58
 
93
- await fs.writeFile(
94
- path.resolve(__dirname, "client.ui.schemas.json"),
95
- JSON.stringify(schemas),
96
- { encoding: "utf-8" }
97
- );
98
- } catch (error) {
99
- console.error(`Error building UI: ${error}`);
100
- process.exit(1);
101
- }
59
+ await build({ output: path.resolve(__dirname, "ui") });
60
+ } catch (error) {
61
+ console.error(`Error building UI: ${error}`);
62
+ failed = true;
102
63
  }
64
+
65
+ if (failed) process.exit(1);
103
66
  }
104
67
 
105
68
  main();
@@ -24,7 +24,8 @@
24
24
  "undici": "^6.21.1",
25
25
  "uuid": "^10.0.0",
26
26
  "winston": "^3.17.0",
27
- "@langchain/langgraph-api": "~0.0.19",
27
+ "@langchain/langgraph-api": "~0.0.20",
28
+ "@langchain/langgraph-ui": "~0.0.20",
28
29
  "zod": "^3.23.8"
29
30
  },
30
31
  "resolutions": {
@@ -12,7 +12,7 @@ import { randomUUID } from "crypto";
12
12
 
13
13
  const sql = postgres(
14
14
  process.env.POSTGRES_URI ??
15
- "postgres://postgres:postgres@127.0.0.1:5433/postgres?sslmode=disable",
15
+ "postgres://postgres:postgres@127.0.0.1:5433/postgres?sslmode=disable"
16
16
  );
17
17
 
18
18
  const client = new Client<any>({
@@ -61,7 +61,7 @@ describe("assistants", () => {
61
61
 
62
62
  await client.assistants.delete(res.assistant_id);
63
63
  await expect(() => client.assistants.get(res.assistant_id)).rejects.toThrow(
64
- "HTTP 404: Not Found",
64
+ "HTTP 404: Not Found"
65
65
  );
66
66
  });
67
67
 
@@ -130,7 +130,7 @@ describe("assistants", () => {
130
130
 
131
131
  await client.assistants.delete(res.assistant_id);
132
132
  await expect(() => client.assistants.get(res.assistant_id)).rejects.toThrow(
133
- "HTTP 404: Not Found",
133
+ "HTTP 404: Not Found"
134
134
  );
135
135
  });
136
136
 
@@ -154,7 +154,7 @@ describe("assistants", () => {
154
154
  });
155
155
  expect(search.length).toBeGreaterThanOrEqual(1);
156
156
  expect(search.every((i) => i.assistant_id !== create.assistant_id)).toBe(
157
- true,
157
+ true
158
158
  );
159
159
  });
160
160
 
@@ -257,7 +257,7 @@ describe("threads copy", () => {
257
257
 
258
258
  const copiedThread = await client.threads.copy(thread.thread_id);
259
259
  const copiedThreadState = await client.threads.getState(
260
- copiedThread.thread_id,
260
+ copiedThread.thread_id
261
261
  );
262
262
 
263
263
  // check copied thread state matches expected output
@@ -338,11 +338,11 @@ describe("threads copy", () => {
338
338
 
339
339
  // test that copied thread has original as well as new values
340
340
  const copiedThreadState = await client.threads.getState<AgentState>(
341
- copiedThread.thread_id,
341
+ copiedThread.thread_id
342
342
  );
343
343
 
344
344
  const copiedThreadStateMessages = copiedThreadState.values.messages.map(
345
- (m) => m.content,
345
+ (m) => m.content
346
346
  );
347
347
  expect(copiedThreadStateMessages).toEqual([
348
348
  // original messages
@@ -359,7 +359,7 @@ describe("threads copy", () => {
359
359
 
360
360
  // test that the new run on the copied thread doesn't affect the original one
361
361
  const currentOriginalThreadState = await client.threads.getState(
362
- thread.thread_id,
362
+ thread.thread_id
363
363
  );
364
364
  expect(currentOriginalThreadState).toEqual(originalThreadState);
365
365
  });
@@ -378,7 +378,7 @@ describe("threads copy", () => {
378
378
  });
379
379
 
380
380
  const history = await client.threads.getHistory<AgentState>(
381
- thread.thread_id,
381
+ thread.thread_id
382
382
  );
383
383
  expect(history.length).toBe(5);
384
384
  expect(history[0].values.messages.length).toBe(4);
@@ -394,11 +394,11 @@ describe("threads copy", () => {
394
394
  });
395
395
 
396
396
  const fullHistory = await client.threads.getHistory<AgentState>(
397
- thread.thread_id,
397
+ thread.thread_id
398
398
  );
399
399
  const filteredHistory = await client.threads.getHistory<AgentState>(
400
400
  thread.thread_id,
401
- { metadata: runMetadata },
401
+ { metadata: runMetadata }
402
402
  );
403
403
 
404
404
  expect(fullHistory.length).toBe(10);
@@ -429,13 +429,13 @@ describe("threads copy", () => {
429
429
  });
430
430
 
431
431
  const copiedThreadState = await client.threads.getState<AgentState>(
432
- copyThread.thread_id,
432
+ copyThread.thread_id
433
433
  );
434
434
  expect(copiedThreadState.values.messages[0].content).toBe("bar");
435
435
 
436
436
  // test that updating the copied thread doesn't affect the original one
437
437
  const currentOriginalThreadState = await client.threads.getState(
438
- thread.thread_id,
438
+ thread.thread_id
439
439
  );
440
440
  expect(currentOriginalThreadState).toEqual(originalState);
441
441
  });
@@ -462,7 +462,7 @@ describe("runs", () => {
462
462
  input: { messages: [{ type: "human", content: "bar" }] },
463
463
  config: globalConfig,
464
464
  afterSeconds: 10,
465
- },
465
+ }
466
466
  );
467
467
 
468
468
  let runs = await client.runs.list(thread.thread_id);
@@ -486,7 +486,7 @@ describe("runs", () => {
486
486
  const stream = client.runs.stream(
487
487
  thread.thread_id,
488
488
  assistant.assistant_id,
489
- { input, streamMode: "values", config: globalConfig },
489
+ { input, streamMode: "values", config: globalConfig }
490
490
  );
491
491
 
492
492
  let runId: string | null = null;
@@ -534,7 +534,7 @@ describe("runs", () => {
534
534
  client.runs.wait(thread.thread_id, assistant.assistant_id, {
535
535
  input,
536
536
  config: { ...globalConfig, recursion_limit: 1 },
537
- }),
537
+ })
538
538
  ).rejects.toThrowError(/GraphRecursionError/);
539
539
  const threadUpdated = await client.threads.get(thread.thread_id);
540
540
  expect(threadUpdated.status).toBe("error");
@@ -549,7 +549,7 @@ describe("runs", () => {
549
549
  const values = await client.runs.wait(
550
550
  thread.thread_id,
551
551
  assistant.assistant_id,
552
- { input, config: globalConfig },
552
+ { input, config: globalConfig }
553
553
  );
554
554
 
555
555
  expect(Array.isArray((values as any).messages)).toBe(true);
@@ -566,7 +566,7 @@ describe("runs", () => {
566
566
  const stream = client.runs.stream(
567
567
  thread.thread_id,
568
568
  assistant.assistant_id,
569
- { input, streamMode: "updates", config: globalConfig },
569
+ { input, streamMode: "updates", config: globalConfig }
570
570
  );
571
571
 
572
572
  let runId: string | null = null;
@@ -606,20 +606,20 @@ describe("runs", () => {
606
606
  const stream = client.runs.stream(
607
607
  thread.thread_id,
608
608
  assistant.assistant_id,
609
- { input, streamMode: "events", config: globalConfig },
609
+ { input, streamMode: "events", config: globalConfig }
610
610
  );
611
611
 
612
612
  const events = await gatherIterator(stream);
613
613
  expect(new Set(events.map((i) => i.event))).toEqual(
614
- new Set(["metadata", "events"]),
614
+ new Set(["metadata", "events"])
615
615
  );
616
616
 
617
617
  expect(
618
618
  new Set(
619
619
  events
620
620
  .filter((i) => i.event === "events")
621
- .map((i) => (i.data as any).event),
622
- ),
621
+ .map((i) => (i.data as any).event)
622
+ )
623
623
  ).toEqual(
624
624
  new Set([
625
625
  "on_chain_start",
@@ -627,7 +627,7 @@ describe("runs", () => {
627
627
  "on_chat_model_end",
628
628
  "on_chat_model_start",
629
629
  "on_chat_model_stream",
630
- ]),
630
+ ])
631
631
  );
632
632
  });
633
633
 
@@ -640,7 +640,7 @@ describe("runs", () => {
640
640
  const stream = client.runs.stream(
641
641
  thread.thread_id,
642
642
  assistant.assistant_id,
643
- { input, streamMode: "messages", config: globalConfig },
643
+ { input, streamMode: "messages", config: globalConfig }
644
644
  );
645
645
 
646
646
  let runId: string | null = null;
@@ -681,7 +681,7 @@ describe("runs", () => {
681
681
  "messages/metadata",
682
682
  "messages/partial",
683
683
  "messages/complete",
684
- ]),
684
+ ])
685
685
  );
686
686
 
687
687
  expect(runId).not.toBeNull();
@@ -698,13 +698,13 @@ describe("runs", () => {
698
698
  const stream = await client.runs.stream(
699
699
  thread.thread_id,
700
700
  assistant.assistant_id,
701
- { input, streamMode: "messages-tuple", config: globalConfig },
701
+ { input, streamMode: "messages-tuple", config: globalConfig }
702
702
  );
703
703
 
704
704
  const chunks = await gatherIterator(stream);
705
705
  const runId = findLast(
706
706
  chunks,
707
- (i): i is FeedbackStreamEvent => i.event === "metadata",
707
+ (i): i is FeedbackStreamEvent => i.event === "metadata"
708
708
  )?.data.run_id;
709
709
  expect(runId).not.toBeNull();
710
710
 
@@ -735,7 +735,7 @@ describe("runs", () => {
735
735
  const stream = await client.runs.stream(
736
736
  thread.thread_id,
737
737
  assistant.assistant_id,
738
- { input, streamMode: ["messages", "values"], config: globalConfig },
738
+ { input, streamMode: ["messages", "values"], config: globalConfig }
739
739
  );
740
740
 
741
741
  const chunks = await gatherIterator(stream);
@@ -744,7 +744,7 @@ describe("runs", () => {
744
744
 
745
745
  const messages: BaseMessage[] = findLast(
746
746
  chunks,
747
- (i) => i.event === "values",
747
+ (i) => i.event === "values"
748
748
  )?.data.messages;
749
749
 
750
750
  expect(messages.length).toBe(4);
@@ -761,7 +761,7 @@ describe("runs", () => {
761
761
  "messages/partial",
762
762
  "messages/complete",
763
763
  "values",
764
- ]),
764
+ ])
765
765
  );
766
766
 
767
767
  const run = await client.runs.get(thread.thread_id, runId!);
@@ -786,7 +786,7 @@ describe("runs", () => {
786
786
  input,
787
787
  interruptBefore: ["tool"],
788
788
  config: globalConfig,
789
- }),
789
+ })
790
790
  );
791
791
 
792
792
  expect(chunks.filter((i) => i.event === "error").length).toBe(0);
@@ -806,7 +806,7 @@ describe("runs", () => {
806
806
  client.runs.stream(thread.thread_id, assistant.assistant_id, {
807
807
  input: null,
808
808
  config: globalConfig,
809
- }),
809
+ })
810
810
  );
811
811
 
812
812
  expect(chunks.filter((i) => i.event === "error").length).toBe(0);
@@ -818,7 +818,7 @@ describe("runs", () => {
818
818
 
819
819
  const threadAfterContinue = await client.threads.get(thread.thread_id);
820
820
  expect(threadAfterContinue.status).toBe("idle");
821
- },
821
+ }
822
822
  );
823
823
 
824
824
  it.concurrent("human in the loop - modification", async () => {
@@ -836,7 +836,7 @@ describe("runs", () => {
836
836
  input,
837
837
  interruptBefore: ["tool"],
838
838
  config: globalConfig,
839
- }),
839
+ })
840
840
  );
841
841
 
842
842
  expect(chunks.filter((i) => i.event === "error").length).toBe(0);
@@ -844,7 +844,7 @@ describe("runs", () => {
844
844
  // edit the last message
845
845
  const lastMessage = findLast(
846
846
  chunks,
847
- (i) => i.event === "values",
847
+ (i) => i.event === "values"
848
848
  )?.data.messages.at(-1);
849
849
  lastMessage.content = "modified";
850
850
 
@@ -861,7 +861,7 @@ describe("runs", () => {
861
861
  expect(modifiedThread.metadata?.modified).toBe(true);
862
862
 
863
863
  const stateAfterModify = await client.threads.getState<AgentState>(
864
- thread.thread_id,
864
+ thread.thread_id
865
865
  );
866
866
  expect(stateAfterModify.values.messages.at(-1)?.content).toBe("modified");
867
867
  expect(stateAfterModify.next).toEqual(["tool"]);
@@ -874,7 +874,7 @@ describe("runs", () => {
874
874
  client.runs.stream(thread.thread_id, assistant.assistant_id, {
875
875
  input: null,
876
876
  config: globalConfig,
877
- }),
877
+ })
878
878
  );
879
879
 
880
880
  const threadAfterContinue = await client.threads.get(thread.thread_id);
@@ -889,7 +889,7 @@ describe("runs", () => {
889
889
 
890
890
  // get the history
891
891
  const history = await client.threads.getHistory<AgentState>(
892
- thread.thread_id,
892
+ thread.thread_id
893
893
  );
894
894
  expect(history.length).toBe(6);
895
895
  expect(history[0].next.length).toBe(0);
@@ -935,7 +935,7 @@ describe("shared state", () => {
935
935
  const res1 = (await client.runs.wait(
936
936
  thread.thread_id,
937
937
  assistant.assistant_id,
938
- { input, config },
938
+ { input, config }
939
939
  )) as Awaited<Record<string, any>>;
940
940
  expect(res1.sharedStateValue).toBe(null);
941
941
 
@@ -943,7 +943,7 @@ describe("shared state", () => {
943
943
  const res2 = (await client.runs.wait(
944
944
  thread.thread_id,
945
945
  assistant.assistant_id,
946
- { input, config },
946
+ { input, config }
947
947
  )) as Awaited<Record<string, any>>;
948
948
  expect(res2.sharedStateValue).toBe(config.configurable.user_id);
949
949
  });
@@ -961,7 +961,7 @@ describe("shared state", () => {
961
961
  const res1 = (await client.runs.wait(
962
962
  thread.thread_id,
963
963
  assistant.assistant_id,
964
- { input, config: config1 },
964
+ { input, config: config1 }
965
965
  )) as Awaited<Record<string, any>>;
966
966
 
967
967
  // Run with the same thread id but a new config
@@ -969,7 +969,7 @@ describe("shared state", () => {
969
969
  const res2 = (await client.runs.wait(
970
970
  thread.thread_id,
971
971
  assistant.assistant_id,
972
- { input, config: config2 },
972
+ { input, config: config2 }
973
973
  )) as Awaited<Record<string, any>>;
974
974
 
975
975
  expect(res1.sharedStateValue).toBe(config1.configurable.user_id);
@@ -995,12 +995,12 @@ describe("shared state", () => {
995
995
  const res1 = (await client.runs.wait(
996
996
  thread.thread_id,
997
997
  assistant.assistant_id,
998
- { input, config },
998
+ { input, config }
999
999
  )) as Awaited<Record<string, any>>;
1000
1000
  expect(res1.sharedStateFromStoreConfig).toBeDefined();
1001
1001
  expect(res1.sharedStateFromStoreConfig.id).toBeDefined();
1002
1002
  expect(res1.sharedStateFromStoreConfig.id).toBe(
1003
- config.configurable.user_id,
1003
+ config.configurable.user_id
1004
1004
  );
1005
1005
  });
1006
1006
 
@@ -1025,12 +1025,12 @@ describe("shared state", () => {
1025
1025
  const res1 = (await client.runs.wait(
1026
1026
  thread.thread_id,
1027
1027
  assistant.assistant_id,
1028
- { input, config },
1028
+ { input, config }
1029
1029
  )) as Awaited<Record<string, any>>;
1030
1030
  expect(res1.sharedStateFromStoreConfig).toBeDefined();
1031
1031
  expect(res1.sharedStateFromStoreConfig.id).toBeDefined();
1032
1032
  expect(res1.sharedStateFromStoreConfig.id).toBe(
1033
- config.configurable.user_id,
1033
+ config.configurable.user_id
1034
1034
  );
1035
1035
 
1036
1036
  // Fetch data from store client
@@ -1122,10 +1122,10 @@ describe("StoreClient", () => {
1122
1122
  expect(searchResAfterPut.items[0].createdAt).toBeDefined();
1123
1123
  expect(searchResAfterPut.items[0].updatedAt).toBeDefined();
1124
1124
  expect(
1125
- new Date(searchResAfterPut.items[0].createdAt).getTime(),
1125
+ new Date(searchResAfterPut.items[0].createdAt).getTime()
1126
1126
  ).toBeLessThanOrEqual(Date.now());
1127
1127
  expect(
1128
- new Date(searchResAfterPut.items[0].updatedAt).getTime(),
1128
+ new Date(searchResAfterPut.items[0].updatedAt).getTime()
1129
1129
  ).toBeLessThanOrEqual(Date.now());
1130
1130
 
1131
1131
  const updatedValue = { foo: "baz" };
@@ -1142,7 +1142,7 @@ describe("StoreClient", () => {
1142
1142
  expect(searchResAfterUpdate.items[0].value).toEqual(updatedValue);
1143
1143
 
1144
1144
  expect(
1145
- new Date(searchResAfterUpdate.items[0].updatedAt).getTime(),
1145
+ new Date(searchResAfterUpdate.items[0].updatedAt).getTime()
1146
1146
  ).toBeGreaterThan(new Date(searchResAfterPut.items[0].updatedAt).getTime());
1147
1147
 
1148
1148
  const listResAfterPut = await client.store.listNamespaces();
@@ -1166,12 +1166,12 @@ describe("subgraphs", () => {
1166
1166
  const assistant = await client.assistants.create({ graphId: "nested" });
1167
1167
 
1168
1168
  expect(
1169
- Object.keys(await client.assistants.getSubgraphs(assistant.assistant_id)),
1169
+ Object.keys(await client.assistants.getSubgraphs(assistant.assistant_id))
1170
1170
  ).toEqual(["gp_two"]);
1171
1171
 
1172
1172
  const subgraphs = await client.assistants.getSubgraphs(
1173
1173
  assistant.assistant_id,
1174
- { recurse: true },
1174
+ { recurse: true }
1175
1175
  );
1176
1176
 
1177
1177
  expect(Object.keys(subgraphs)).toEqual(["gp_two", "gp_two|p_two"]);
@@ -1216,7 +1216,7 @@ describe("subgraphs", () => {
1216
1216
  messages: [{ role: "human", content: "SF", id: "initial-message" }],
1217
1217
  },
1218
1218
  interruptBefore: ["tool"],
1219
- }),
1219
+ })
1220
1220
  );
1221
1221
 
1222
1222
  for (const chunk of chunks) {
@@ -1285,7 +1285,7 @@ describe("subgraphs", () => {
1285
1285
  const stateRecursive = await client.threads.getState(
1286
1286
  thread.thread_id,
1287
1287
  undefined,
1288
- { subgraphs: true },
1288
+ { subgraphs: true }
1289
1289
  );
1290
1290
 
1291
1291
  expect(stateRecursive.next).toEqual(["weather_graph"]);
@@ -1343,7 +1343,7 @@ describe("subgraphs", () => {
1343
1343
  input: null,
1344
1344
  streamMode: ["values", "updates"],
1345
1345
  streamSubgraphs: true,
1346
- }),
1346
+ })
1347
1347
  );
1348
1348
 
1349
1349
  expect(chunksSubgraph.filter((i) => i.event === "error")).toEqual([]);
@@ -1352,7 +1352,7 @@ describe("subgraphs", () => {
1352
1352
  type ChunkType = (typeof chunksSubgraph)[number];
1353
1353
  const continueMessages = findLast(
1354
1354
  chunksSubgraph,
1355
- (i): i is ChunkType & { event: "values" } => i.event === "values",
1355
+ (i): i is ChunkType & { event: "values" } => i.event === "values"
1356
1356
  )?.data.messages;
1357
1357
 
1358
1358
  expect(continueMessages.length).toBe(2);
@@ -1500,7 +1500,7 @@ describe("subgraphs", () => {
1500
1500
 
1501
1501
  // run until the interrupt (same as before)
1502
1502
  let chunks = await gatherIterator(
1503
- client.runs.stream(thread.thread_id, assistant.assistant_id, { input }),
1503
+ client.runs.stream(thread.thread_id, assistant.assistant_id, { input })
1504
1504
  );
1505
1505
  expect(chunks.filter((i) => i.event === "error")).toEqual([]);
1506
1506
 
@@ -1532,7 +1532,7 @@ describe("subgraphs", () => {
1532
1532
  // get inner state after update
1533
1533
  const innerState = await client.threads.getState<{ city: string }>(
1534
1534
  thread.thread_id,
1535
- state.tasks[0].checkpoint ?? undefined,
1535
+ state.tasks[0].checkpoint ?? undefined
1536
1536
  );
1537
1537
 
1538
1538
  expect(innerState.values.city).toBe("LA");
@@ -1554,7 +1554,7 @@ describe("subgraphs", () => {
1554
1554
  chunks = await gatherIterator(
1555
1555
  client.runs.stream(thread.thread_id, assistant.assistant_id, {
1556
1556
  input: null,
1557
- }),
1557
+ })
1558
1558
  );
1559
1559
 
1560
1560
  expect(chunks.filter((i) => i.event === "error")).toEqual([]);
@@ -1639,7 +1639,7 @@ describe("subgraphs", () => {
1639
1639
  const stream = await gatherIterator(
1640
1640
  client.runs.stream(thread.thread_id, assistant.assistant_id, {
1641
1641
  command: { resume: "i want to resume" },
1642
- }),
1642
+ })
1643
1643
  );
1644
1644
 
1645
1645
  expect(stream.at(-1)?.event).toBe("values");
@@ -1656,7 +1656,7 @@ describe("errors", () => {
1656
1656
  client.runs.stream(thread.thread_id, assistant.assistant_id, {
1657
1657
  input: { messages: [] },
1658
1658
  streamMode: ["debug", "events"],
1659
- }),
1659
+ })
1660
1660
  );
1661
1661
 
1662
1662
  expect(stream.at(-1)).toMatchObject({
@@ -1675,7 +1675,7 @@ describe("errors", () => {
1675
1675
  const run = await client.runs.create(
1676
1676
  thread.thread_id,
1677
1677
  assistant.assistant_id,
1678
- { input: { messages: [] } },
1678
+ { input: { messages: [] } }
1679
1679
  );
1680
1680
 
1681
1681
  await client.runs.join(thread.thread_id, run.run_id);
@@ -1690,11 +1690,11 @@ describe("errors", () => {
1690
1690
  const run = await client.runs.create(
1691
1691
  thread.thread_id,
1692
1692
  assistant.assistant_id,
1693
- { input: { messages: [] } },
1693
+ { input: { messages: [] } }
1694
1694
  );
1695
1695
 
1696
1696
  const stream = await gatherIterator(
1697
- client.runs.joinStream(thread.thread_id, run.run_id),
1697
+ client.runs.joinStream(thread.thread_id, run.run_id)
1698
1698
  );
1699
1699
 
1700
1700
  expect(stream.at(-1)).toMatchObject({
@@ -1724,7 +1724,7 @@ describe("long running tasks", () => {
1724
1724
  {
1725
1725
  input: { messages: [], delay },
1726
1726
  config: globalConfig,
1727
- },
1727
+ }
1728
1728
  );
1729
1729
 
1730
1730
  await client.runs.join(thread.thread_id, run.run_id);
@@ -1740,7 +1740,7 @@ describe("long running tasks", () => {
1740
1740
  expect(runResult.values.messages).toMatchObject([
1741
1741
  { content: `finished after ${delay}ms` },
1742
1742
  ]);
1743
- },
1743
+ }
1744
1744
  );
1745
1745
  });
1746
1746
 
@@ -1765,7 +1765,7 @@ it("unusual newline termination characters", async () => {
1765
1765
  expect(history.length).toBe(1);
1766
1766
  expect(history[0].values.messages.length).toBe(1);
1767
1767
  expect(history[0].values.messages[0].content).toBe(
1768
- "Page break characters: \n\r\x0b\x0c\x1c\x1d\x1e\x85\u2028\u2029",
1768
+ "Page break characters: \n\r\x0b\x0c\x1c\x1d\x1e\x85\u2028\u2029"
1769
1769
  );
1770
1770
  });
1771
1771
 
@@ -1786,7 +1786,7 @@ describe.skip("command update state", () => {
1786
1786
  client.runs.stream(thread.thread_id, assistant.assistant_id, {
1787
1787
  command: { update: { keyOne: "value3", keyTwo: "value4" } },
1788
1788
  config: globalConfig,
1789
- }),
1789
+ })
1790
1790
  );
1791
1791
  expect(stream.filter((chunk) => chunk.event === "error")).toEqual([]);
1792
1792
 
@@ -1814,7 +1814,7 @@ describe.skip("command update state", () => {
1814
1814
  ],
1815
1815
  },
1816
1816
  config: globalConfig,
1817
- }),
1817
+ })
1818
1818
  );
1819
1819
 
1820
1820
  expect(stream.filter((chunk) => chunk.event === "error")).toEqual([]);
@@ -1840,14 +1840,14 @@ it("dynamic graph", async () => {
1840
1840
  client.runs.stream(null, defaultAssistant.assistant_id, {
1841
1841
  input: { messages: ["input"] },
1842
1842
  streamMode: ["updates"],
1843
- }),
1843
+ })
1844
1844
  );
1845
1845
 
1846
1846
  expect
1847
1847
  .soft(
1848
1848
  updates
1849
1849
  .filter((i) => i.event === "updates")
1850
- .flatMap((i) => Object.keys(i.data)),
1850
+ .flatMap((i) => Object.keys(i.data))
1851
1851
  )
1852
1852
  .toEqual(expect.arrayContaining(["default"]));
1853
1853
 
@@ -1856,14 +1856,14 @@ it("dynamic graph", async () => {
1856
1856
  input: { messages: ["input"] },
1857
1857
  config: { configurable: { nodeName: "runtime" } },
1858
1858
  streamMode: ["updates"],
1859
- }),
1859
+ })
1860
1860
  );
1861
1861
 
1862
1862
  expect
1863
1863
  .soft(
1864
1864
  updates
1865
1865
  .filter((i) => i.event === "updates")
1866
- .flatMap((i) => Object.keys(i.data)),
1866
+ .flatMap((i) => Object.keys(i.data))
1867
1867
  )
1868
1868
  .toEqual(expect.arrayContaining(["runtime"]));
1869
1869
 
@@ -1877,14 +1877,14 @@ it("dynamic graph", async () => {
1877
1877
  client.runs.stream(thread.thread_id, configAssistant.assistant_id, {
1878
1878
  input: { messages: ["input"], configurable: { nodeName: "assistant" } },
1879
1879
  streamMode: ["updates"],
1880
- }),
1880
+ })
1881
1881
  );
1882
1882
 
1883
1883
  expect
1884
1884
  .soft(
1885
1885
  updates
1886
1886
  .filter((i) => i.event === "updates")
1887
- .flatMap((i) => Object.keys(i.data)),
1887
+ .flatMap((i) => Object.keys(i.data))
1888
1888
  )
1889
1889
  .toEqual(expect.arrayContaining(["assistant"]));
1890
1890
 
@@ -1900,3 +1900,23 @@ it("dynamic graph", async () => {
1900
1900
  const state = await client.threads.getState(thread.thread_id);
1901
1901
  expect(state.values.messages).toEqual(["input", "assistant", "update"]);
1902
1902
  });
1903
+
1904
+ it("generative ui", async () => {
1905
+ const ui = await client["~ui"].getComponent("agent", "weather-component");
1906
+ expect(ui).toEqual(
1907
+ `<script src="//localhost:9123/ui/agent/entrypoint.js" onload='__LGUI_agent.render("weather-component", "{{shadowRootId}}")'></script>\n<link rel="stylesheet" href="//localhost:9123/ui/agent/entrypoint.css" />`
1908
+ );
1909
+
1910
+ const match = /src="(?<src>[^"]+)"/.exec(ui);
1911
+ let jsFile = match?.groups?.src;
1912
+ if (!jsFile) throw new Error("No JS file found");
1913
+ if (jsFile.startsWith("//")) jsFile = "http:" + jsFile;
1914
+
1915
+ // Used to manually pass runtime dependencies
1916
+ const js = await fetch(jsFile).then((a) => a.text());
1917
+ expect(js).contains(`globalThis[Symbol.for("LGUI_REQUIRE")]`);
1918
+
1919
+ await expect(() =>
1920
+ client["~ui"].getComponent("non-existent", "none")
1921
+ ).rejects.toThrow();
1922
+ });
@@ -36,6 +36,7 @@ services:
36
36
  RUN yarn install --frozen-lockfile
37
37
  ENV LANGSERVE_GRAPHS='{"agent":"./agent.mts:graph", "nested": "./nested.mts:graph", "weather": "./weather.mts:graph", "error": "./error.mts:graph", "delay": "./delay.mts:graph", "dynamic": "./dynamic.mts:graph"}'
38
38
  ENV LANGGRAPH_CONFIG='{"agent": {"configurable": {"model_name": "openai"}}}'
39
+ ENV LANGGRAPH_UI='{"agent": "./agent.ui.tsx"}'
39
40
  RUN tsx /api/langgraph_api/js/build.mts
40
41
  depends_on:
41
42
  langgraph-postgres:
@@ -3,5 +3,8 @@
3
3
  "dependencies": {
4
4
  "@langchain/core": "^0.3.40",
5
5
  "@langchain/langgraph": "^0.2.49"
6
+ },
7
+ "devDependencies": {
8
+ "tailwindcss": "^4.1.1"
6
9
  }
7
10
  }
@@ -206,6 +206,11 @@ supports-color@^7.1.0:
206
206
  dependencies:
207
207
  has-flag "^4.0.0"
208
208
 
209
+ tailwindcss@^4.1.1:
210
+ version "4.1.1"
211
+ resolved "https://registry.yarnpkg.com/tailwindcss/-/tailwindcss-4.1.1.tgz#f2bae12b32687e63afa37c4f638b89520104e83f"
212
+ integrity sha512-QNbdmeS979Efzim2g/bEvfuh+fTcIdp1y7gA+sb6OYSW74rt7Cr7M78AKdf6HqWT3d5AiTb7SwTT3sLQxr4/qw==
213
+
209
214
  uuid@^10.0.0:
210
215
  version "10.0.0"
211
216
  resolved "https://registry.yarnpkg.com/uuid/-/uuid-10.0.0.tgz#5a95aa454e6e002725c79055fd42aaba30ca6294"
langgraph_api/js/ui.py ADDED
@@ -0,0 +1,93 @@
1
+ import asyncio
2
+ import os
3
+ import shutil
4
+ import sys
5
+ from pathlib import Path
6
+
7
+ import structlog
8
+
9
+ from langgraph_api.config import UI_USE_BUNDLER
10
+
11
+ logger = structlog.stdlib.get_logger(__name__)
12
+ bg_tasks: set[asyncio.Task] = set()
13
+
14
+ UI_ROOT_DIR = (
15
+ Path(
16
+ os.path.abspath(".langgraph_api")
17
+ if UI_USE_BUNDLER
18
+ else os.path.dirname(__file__)
19
+ )
20
+ / "ui"
21
+ )
22
+
23
+ UI_PUBLIC_DIR = UI_ROOT_DIR / "public"
24
+ UI_SCHEMAS_FILE = UI_ROOT_DIR / "schemas.json"
25
+
26
+
27
+ async def start_ui_bundler() -> None:
28
+ # LANGGRAPH_UI_ROOT_DIR is only set by in-memory server
29
+ # @see langgraph_api/cli.py
30
+ if not UI_USE_BUNDLER or not os.getenv("LANGGRAPH_UI"):
31
+ return
32
+
33
+ logger.info("Starting UI bundler")
34
+
35
+ bundler_task = asyncio.create_task(_start_ui_bundler_process(), name="ui-bundler")
36
+ bundler_task.add_done_callback(_handle_exception)
37
+
38
+ bg_tasks.add(bundler_task)
39
+
40
+
41
+ async def stop_ui_bundler() -> None:
42
+ for task in bg_tasks:
43
+ task.cancel()
44
+
45
+
46
+ async def _start_ui_bundler_process():
47
+ npx_path = shutil.which("npx")
48
+ if npx_path is None:
49
+ raise FileNotFoundError(
50
+ "To run LangGraph with UI support, Node.js and npm are required. "
51
+ "Please install Node.js from https://nodejs.org/ (this will include npm and npx). "
52
+ "After installation, restart your terminal and try again."
53
+ )
54
+
55
+ if not os.path.exists(UI_ROOT_DIR):
56
+ os.mkdir(UI_ROOT_DIR)
57
+
58
+ pid = None
59
+ try:
60
+ process = await asyncio.create_subprocess_exec(
61
+ npx_path,
62
+ "-y",
63
+ "@langchain/langgraph-ui@latest",
64
+ "watch",
65
+ "-o",
66
+ UI_ROOT_DIR,
67
+ env=os.environ,
68
+ )
69
+ pid = process.pid
70
+ logger.info("Started UI bundler process [%d]", pid)
71
+
72
+ code = await process.wait()
73
+ raise Exception(f"UI bundler process exited with code {code}")
74
+
75
+ except asyncio.CancelledError:
76
+ logger.info("Shutting down UI bundler process [%d]", pid or -1)
77
+ try:
78
+ process.terminate()
79
+ await process.wait()
80
+ except (UnboundLocalError, ProcessLookupError):
81
+ pass
82
+ raise
83
+
84
+
85
+ def _handle_exception(task: asyncio.Task) -> None:
86
+ try:
87
+ task.result()
88
+ except asyncio.CancelledError:
89
+ pass
90
+ finally:
91
+ bg_tasks.discard(task)
92
+ # if the task died either with exception or not, we should exit
93
+ sys.exit(1)
@@ -31,6 +31,11 @@
31
31
  resolved "https://registry.yarnpkg.com/@colors/colors/-/colors-1.6.0.tgz#ec6cd237440700bc23ca23087f513c75508958b0"
32
32
  integrity sha512-Ir+AOibqzrIsL6ajt3Rz3LskB7OiMVHqltZmspbW/TJuTVuyOMirVqAkjfY6JISiLHgyNqicAC8AyHHGzNd/dA==
33
33
 
34
+ "@commander-js/extra-typings@^13.0.0":
35
+ version "13.1.0"
36
+ resolved "https://registry.yarnpkg.com/@commander-js/extra-typings/-/extra-typings-13.1.0.tgz#026e29b04401c92fc4307223fbaadf1ff3e5551e"
37
+ integrity sha512-q5P52BYb1hwVWE6dtID7VvuJWrlfbCv4klj7BjUUOqMz4jbSZD4C9fJ9lRjL2jnBGTg+gDDlaXN51rkWcLk4fg==
38
+
34
39
  "@dabh/diagnostics@^2.0.2":
35
40
  version "2.0.3"
36
41
  resolved "https://registry.yarnpkg.com/@dabh/diagnostics/-/diagnostics-2.0.3.tgz#7f7e97ee9a725dffc7808d93668cc984e1dc477a"
@@ -198,20 +203,19 @@
198
203
  zod "^3.22.4"
199
204
  zod-to-json-schema "^3.22.3"
200
205
 
201
- "@langchain/langgraph-api@~0.0.19":
202
- version "0.0.19"
203
- resolved "https://registry.yarnpkg.com/@langchain/langgraph-api/-/langgraph-api-0.0.19.tgz#e4b8054a1f4e7943bc53787fb6335470b55f4b1d"
204
- integrity sha512-vsWnVUMVdr3i0ooI3Pcq4tbRKyRiDFGtUyyusCs2zZ/AHNIOV1ieHVTYubhZ04F+jNEwe0SwSMqtM0/Vg+cbTw==
206
+ "@langchain/langgraph-api@~0.0.20":
207
+ version "0.0.20"
208
+ resolved "https://registry.yarnpkg.com/@langchain/langgraph-api/-/langgraph-api-0.0.20.tgz#32e5cfa736ea938d53fa61285bf673901f37e029"
209
+ integrity sha512-MZA/6/th0oliK4UJb3SiEWwnMb/HO3sTr73uQxGoRJwjqCKJgYrnXHkU1eunclPcCeG86DanbEf/Rsn7UR4WeA==
205
210
  dependencies:
206
211
  "@babel/code-frame" "^7.26.2"
207
212
  "@hono/node-server" "^1.12.0"
208
213
  "@hono/zod-validator" "^0.2.2"
214
+ "@langchain/langgraph-ui" "0.0.20"
209
215
  "@types/json-schema" "^7.0.15"
210
216
  "@typescript/vfs" "^1.6.0"
211
217
  dedent "^1.5.3"
212
218
  dotenv "^16.4.7"
213
- esbuild "^0.25.0"
214
- esbuild-plugin-tailwindcss "^2.0.1"
215
219
  exit-hook "^4.0.0"
216
220
  hono "^4.5.4"
217
221
  langsmith "^0.2.15"
@@ -258,6 +262,17 @@
258
262
  p-retry "4"
259
263
  uuid "^9.0.0"
260
264
 
265
+ "@langchain/langgraph-ui@0.0.20", "@langchain/langgraph-ui@~0.0.20":
266
+ version "0.0.20"
267
+ resolved "https://registry.yarnpkg.com/@langchain/langgraph-ui/-/langgraph-ui-0.0.20.tgz#9075acef91c287cfafa81d4eef34a6a51ba1a4d1"
268
+ integrity sha512-x6fF6STKx0gsCAtEqEwyIib6klwLboqvpXsobHVeFOx6qGqOGsVKbq6rgCIKxQz8tZeSQVXslas4/20SFUjwKA==
269
+ dependencies:
270
+ "@commander-js/extra-typings" "^13.0.0"
271
+ commander "^13.0.0"
272
+ esbuild "^0.25.0"
273
+ esbuild-plugin-tailwindcss "^2.0.1"
274
+ zod "^3.23.8"
275
+
261
276
  "@langchain/langgraph@^0.2.49":
262
277
  version "0.2.54"
263
278
  resolved "https://registry.yarnpkg.com/@langchain/langgraph/-/langgraph-0.2.54.tgz#f57a9b471808c122ee5ae4506ed05cc75f1578bd"
@@ -729,6 +744,11 @@ commander@^10.0.1:
729
744
  resolved "https://registry.yarnpkg.com/commander/-/commander-10.0.1.tgz#881ee46b4f77d1c1dccc5823433aa39b022cbe06"
730
745
  integrity sha512-y4Mg2tXshplEbSGzx7amzPwKKOCGuoSRP/CjEdwwk0FOGlUbq6lKuoyDZTNZkmxHdJtp54hdfY/JUrdL7Xfdug==
731
746
 
747
+ commander@^13.0.0:
748
+ version "13.1.0"
749
+ resolved "https://registry.yarnpkg.com/commander/-/commander-13.1.0.tgz#776167db68c78f38dcce1f9b8d7b8b9a488abf46"
750
+ integrity sha512-/rFeCpNJQbhSZjGVwO9RFV3xPqbnERS8MmIQzCtD/zl6gpJuV/bMLuN92oG3F7d8oDEHHRrujSXNUr8fpjntKw==
751
+
732
752
  console-table-printer@^2.12.1:
733
753
  version "2.12.1"
734
754
  resolved "https://registry.yarnpkg.com/console-table-printer/-/console-table-printer-2.12.1.tgz#4a9646537a246a6d8de57075d4fae1e08abae267"
langgraph_api/lifespan.py CHANGED
@@ -11,6 +11,7 @@ from langgraph_api.asyncio import SimpleTaskGroup, set_event_loop
11
11
  from langgraph_api.cron_scheduler import cron_scheduler
12
12
  from langgraph_api.graph import collect_graphs_from_env, stop_remote_graphs
13
13
  from langgraph_api.http import start_http_client, stop_http_client
14
+ from langgraph_api.js.ui import start_ui_bundler, stop_ui_bundler
14
15
  from langgraph_api.metadata import metadata_loop
15
16
  from langgraph_api.thread_ttl import thread_ttl_sweep_loop
16
17
  from langgraph_license.validation import get_license_status, plus_features_enabled
@@ -46,6 +47,7 @@ async def lifespan(
46
47
  await start_http_client()
47
48
  await start_pool()
48
49
  await collect_graphs_from_env(True)
50
+ await start_ui_bundler()
49
51
  try:
50
52
  async with SimpleTaskGroup(
51
53
  cancel=True, taskset=taskset, taskgroup_name="Lifespan"
@@ -66,6 +68,7 @@ async def lifespan(
66
68
 
67
69
  yield
68
70
  finally:
71
+ await stop_ui_bundler()
69
72
  await stop_remote_graphs()
70
73
  await stop_http_client()
71
74
  await stop_pool()
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: langgraph-api
3
- Version: 0.0.45
3
+ Version: 0.0.46
4
4
  Summary:
5
5
  License: Elastic-2.0
6
6
  Author: Nuno Campos
@@ -13,13 +13,13 @@ Classifier: Programming Language :: Python :: 3.12
13
13
  Classifier: Programming Language :: Python :: 3.13
14
14
  Requires-Dist: blockbuster (>=1.5.24,<2.0.0)
15
15
  Requires-Dist: cloudpickle (>=3.0.0,<4.0.0)
16
- Requires-Dist: cryptography (>=43.0.3,<44.0.0)
16
+ Requires-Dist: cryptography (>=42.0.0,<45.0)
17
17
  Requires-Dist: httpx (>=0.25.0)
18
18
  Requires-Dist: jsonschema-rs (>=0.20.0,<0.30)
19
19
  Requires-Dist: langchain-core (>=0.2.38,<0.4.0)
20
20
  Requires-Dist: langgraph (>=0.2.56,<0.4.0)
21
21
  Requires-Dist: langgraph-checkpoint (>=2.0.23,<3.0)
22
- Requires-Dist: langgraph-sdk (>=0.1.59,<0.2.0)
22
+ Requires-Dist: langgraph-sdk (>=0.1.61,<0.2.0)
23
23
  Requires-Dist: langsmith (>=0.1.63,<0.4.0)
24
24
  Requires-Dist: orjson (>=3.9.7)
25
25
  Requires-Dist: pyjwt (>=2.9.0,<3.0.0)
@@ -8,7 +8,7 @@ langgraph_api/api/openapi.py,sha256=f9gfmWN2AMKNUpLCpSgZuw_aeOF9jCXPdOtFT5PaTWM,
8
8
  langgraph_api/api/runs.py,sha256=qAXfgZjjaMBfwPnlnAogvemtKZZeshNSIMQqcW20tGs,18010
9
9
  langgraph_api/api/store.py,sha256=XNNZFRlvgReidq9u7mg-i7pjwz21BdP9Qw3Jr5Ra9Fk,5447
10
10
  langgraph_api/api/threads.py,sha256=QbAy7MRupWKDUhmC6_LKU2ExJbLJ6Z-CJG2gSpcMXtc,9163
11
- langgraph_api/api/ui.py,sha256=LiOZVewKOPbKEykCm30hCEaOA7vuS_Ti5hB32EEy4vw,2082
11
+ langgraph_api/api/ui.py,sha256=kdCQ-p0voxAqIFc72aqqzdPGH2v-yEBKzjRE6cUPvpU,2201
12
12
  langgraph_api/asyncio.py,sha256=h0eZ7aoDGnJpoxnHLZABVlj1jQ78UxjgiHntTmAEWek,8613
13
13
  langgraph_api/auth/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
14
14
  langgraph_api/auth/custom.py,sha256=sPYkF-xTdGIn_WMSE2tfR_azg_azzXPRWwUfCye6GQ8,21401
@@ -18,20 +18,20 @@ langgraph_api/auth/langsmith/client.py,sha256=eKchvAom7hdkUXauD8vHNceBDDUijrFgdT
18
18
  langgraph_api/auth/middleware.py,sha256=jU8aDSIZHdzCGdifejRF7ndHkSjBtqIHcBwFIuUdHEA,1875
19
19
  langgraph_api/auth/noop.py,sha256=Bk6Nf3p8D_iMVy_OyfPlyiJp_aEwzL-sHrbxoXpCbac,586
20
20
  langgraph_api/auth/studio_user.py,sha256=FzFQRROKDlA9JjtBuwyZvk6Mbwno5M9RVYjDO6FU3F8,186
21
- langgraph_api/cli.py,sha256=X76TYnErQPjRkZhK-n6gthCUmtItkwqVrRPvwZswF5Y,12335
21
+ langgraph_api/cli.py,sha256=W8rf3v4rMwv9W-tK_9yqcc7waQezMMlpBoy64WSGLMw,12682
22
22
  langgraph_api/command.py,sha256=3O9v3i0OPa96ARyJ_oJbLXkfO8rPgDhLCswgO9koTFA,768
23
- langgraph_api/config.py,sha256=N6P_6jtr6rNGC7T5lSWoCbaTQhXbWi-F4YmH0NyPbAI,10141
23
+ langgraph_api/config.py,sha256=OJ6_9UCkPYNpUEB17EIU1nO0zQd3_T3G1Dr-ag0J8hE,10218
24
24
  langgraph_api/cron_scheduler.py,sha256=9yzbbGxzNgJdIg4ZT7yu2oTwT_wRuPxD1c2sbbd52xs,2630
25
25
  langgraph_api/errors.py,sha256=Bu_i5drgNTyJcLiyrwVE_6-XrSU50BHf9TDpttki9wQ,1690
26
- langgraph_api/graph.py,sha256=7NKAjtslNZq65J1tYhvJfpcP_P8JVTRra7vOfGuUWvc,18824
26
+ langgraph_api/graph.py,sha256=GnvG44jEGKpRMXZLp9QC__B_PGJteM7K3aJP-q22EvE,18827
27
27
  langgraph_api/http.py,sha256=gYbxxjY8aLnsXeJymcJ7G7Nj_yToOGpPYQqmZ1_ggfA,5240
28
28
  langgraph_api/js/.gitignore,sha256=l5yI6G_V6F1600I1IjiUKn87f4uYIrBAYU1MOyBBhg4,59
29
29
  langgraph_api/js/base.py,sha256=xkBp5bwRrbpMFaAMViEU-qIlnsJuu3X_G8sa1pqNZK0,227
30
- langgraph_api/js/build.mts,sha256=PZGeFTOhmRIBxkbFaaUOpTacqg1Z7kUkZWTU2l9a7FY,3077
30
+ langgraph_api/js/build.mts,sha256=oyP1GTnWAWl_dC4ny-h_9WxvWfZ8eJihGdrVdmMe81k,1834
31
31
  langgraph_api/js/client.mts,sha256=n6ecWwJBP2wp1jCaam55GxffH-YesVrV-7ZTml-k4Oc,26137
32
32
  langgraph_api/js/errors.py,sha256=Cm1TKWlUCwZReDC5AQ6SgNIVGD27Qov2xcgHyf8-GXo,361
33
33
  langgraph_api/js/global.d.ts,sha256=yDusqAyzVYhxfwqqcERUzucu2Pw9ma3-ug4DFyUvQfs,167
34
- langgraph_api/js/package.json,sha256=uA_2KV86yMJmqfQd9Xe4w2PPjH6DmttBpSxi1PlSq3U,1224
34
+ langgraph_api/js/package.json,sha256=oNcWe7UIoJtqzoIVr47px6-1n8KziqwEhyi4wBpzTQ0,1266
35
35
  langgraph_api/js/remote.py,sha256=zvjMINAYOFpniQ_cZ3MxIOjwKI83xsQzU7pPiKsoCmQ,24623
36
36
  langgraph_api/js/schema.py,sha256=7idnv7URlYUdSNMBXQcw7E4SxaPxCq_Oxwnlml8q5ik,408
37
37
  langgraph_api/js/src/graph.mts,sha256=otgztTNzNJpeF2IOrpNuuwbSbpAy4eFE5dHtUd7eQwU,3742
@@ -44,8 +44,8 @@ langgraph_api/js/src/utils/importMap.mts,sha256=pX4TGOyUpuuWF82kXcxcv3-8mgusRezO
44
44
  langgraph_api/js/src/utils/pythonSchemas.mts,sha256=98IW7Z_VP7L_CHNRMb3_MsiV3BgLE2JsWQY_PQcRR3o,685
45
45
  langgraph_api/js/src/utils/serde.mts,sha256=OuyyO9btvwWd55rU_H4x91dFEJiaPxL-lL9O6Zgo908,742
46
46
  langgraph_api/js/sse.py,sha256=lsfp4nyJyA1COmlKG9e2gJnTttf_HGCB5wyH8OZBER8,4105
47
- langgraph_api/js/tests/api.test.mts,sha256=GlCTOTuSGc2m2vdsw6PSZrDPgfcvJi0A5v4A9xGaZi4,60004
48
- langgraph_api/js/tests/compose-postgres.yml,sha256=pV1dW6aCgTTJ1WHSDeCqlVgFE9PbyWW5WbwrsiJcgoA,1772
47
+ langgraph_api/js/tests/api.test.mts,sha256=52DcdgJ7b_I53Zl5XyYnMbyA0iofgVoxcVcvQSN4BGo,60752
48
+ langgraph_api/js/tests/compose-postgres.yml,sha256=uMjmqdZ-rOImuLrfNaE9gsX-F-xlU35OVx7nWknpbdM,1827
49
49
  langgraph_api/js/tests/graphs/.gitignore,sha256=26J8MarZNXh7snXD5eTpV3CPFTht5Znv8dtHYCLNfkw,12
50
50
  langgraph_api/js/tests/graphs/agent.css,sha256=QgcOC0W7IBsrg4pSqqpull-WTgtULZfx_lF_5ZxLdag,23
51
51
  langgraph_api/js/tests/graphs/agent.mts,sha256=E9WMv0alMv0njUEECqEsqoRk9NXJUgXW7SyQJ3GOZ8k,5396
@@ -55,13 +55,14 @@ langgraph_api/js/tests/graphs/dynamic.mts,sha256=Wf_-keF7lkEfp_iyI45nlFGCeU8ARLQ
55
55
  langgraph_api/js/tests/graphs/error.mts,sha256=l4tk89449dj1BnEF_0ZcfPt0Ikk1gl8L1RaSnRfr3xo,487
56
56
  langgraph_api/js/tests/graphs/langgraph.json,sha256=iZL7XpAy3-QnCUHCRSj__Fxp3A-JPuYBJ_XQIxeyQfU,227
57
57
  langgraph_api/js/tests/graphs/nested.mts,sha256=4G7jSOSaFVQAza-_ARbK-Iai1biLlF2DIPDZXf7PLIY,1245
58
- langgraph_api/js/tests/graphs/package.json,sha256=Kv2kdlTNeWl00vYQAhngorQ6rLab4SMc7g1AgZslrHQ,118
58
+ langgraph_api/js/tests/graphs/package.json,sha256=SSYv9rN8XLeCKnVctKKwnksvy0RMh-Z9pC0j1lG17PM,174
59
59
  langgraph_api/js/tests/graphs/weather.mts,sha256=A7mLK3xW8h5B-ZyJNAyX2M2fJJwzPJzXs4DYesJwreQ,1655
60
- langgraph_api/js/tests/graphs/yarn.lock,sha256=i2AAIgXA3XBLM8-oU45wgUefCSG-Tne4ghWHmUCUKVk,10407
60
+ langgraph_api/js/tests/graphs/yarn.lock,sha256=PWHTM2Wg9mdiovQBC7yMXfb382ffGux8z7hoQE6oWAQ,10673
61
61
  langgraph_api/js/tests/parser.test.mts,sha256=dEC8KTqKygeb1u39ZvpPqCT4HtfPD947nLmITt2buxA,27883
62
62
  langgraph_api/js/tests/utils.mts,sha256=q1V9gvT63v95onlfK9W4iv3n9ZJO3h-0RD9TdDYuRyY,439
63
- langgraph_api/js/yarn.lock,sha256=JtMDtRVX9kr9lK0rbWRQsGHYbkQSmzNa4NwTunBv58U,82773
64
- langgraph_api/lifespan.py,sha256=rrmxgNrAxWa_D1H8AZTBKXoQoyiLeDv_C5z-byuxTWM,2788
63
+ langgraph_api/js/ui.py,sha256=XNT8iBcyT8XmbIqSQUWd-j_00HsaWB2vRTVabwFBkik,2439
64
+ langgraph_api/js/yarn.lock,sha256=m4erGgHd30Enxdu1wKnohnzotZiEDvXh-H83qitf_W4,83812
65
+ langgraph_api/lifespan.py,sha256=SfVZj0SQdsIfYwvN5s4dfxGMb7MMlGe40DghCGDFaTQ,2915
65
66
  langgraph_api/logging.py,sha256=JJIzbNIgLCN6ClQ3tA-Mm5ffuBGvpRDSZsEvnIlsuu4,3693
66
67
  langgraph_api/metadata.py,sha256=bAeN3NwibBuXUVPjOEbEUJMnhUXe_VdTGw508VNeav4,3655
67
68
  langgraph_api/middleware/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -96,8 +97,8 @@ langgraph_storage/retry.py,sha256=XmldOP4e_H5s264CagJRVnQMDFcEJR_dldVR1Hm5XvM,76
96
97
  langgraph_storage/store.py,sha256=JB9jZ87GE19MVN9wgl3-esgR2eIkeipws9q6qsPWkgc,3399
97
98
  logging.json,sha256=3RNjSADZmDq38eHePMm1CbP6qZ71AmpBtLwCmKU9Zgo,379
98
99
  openapi.json,sha256=P4Gy9hD4vXpGEjaMT1zLpw4ISNJ08RYB5BsVya1Wp_8,132113
99
- langgraph_api-0.0.45.dist-info/LICENSE,sha256=ZPwVR73Biwm3sK6vR54djCrhaRiM4cAD2zvOQZV8Xis,3859
100
- langgraph_api-0.0.45.dist-info/METADATA,sha256=xJGTqPMwi0HyPVcI5NVw0xDR3daSVgERYggEuHQR5j4,4167
101
- langgraph_api-0.0.45.dist-info/WHEEL,sha256=fGIA9gx4Qxk2KDKeNJCbOEwSrmLtjWCwzBz351GyrPQ,88
102
- langgraph_api-0.0.45.dist-info/entry_points.txt,sha256=3EYLgj89DfzqJHHYGxPH4A_fEtClvlRbWRUHaXO7hj4,77
103
- langgraph_api-0.0.45.dist-info/RECORD,,
100
+ langgraph_api-0.0.46.dist-info/LICENSE,sha256=ZPwVR73Biwm3sK6vR54djCrhaRiM4cAD2zvOQZV8Xis,3859
101
+ langgraph_api-0.0.46.dist-info/METADATA,sha256=YyzsDHSIz-8lj9Np7lEguVJ-Poz_eBpgtQXm4MpvhDM,4165
102
+ langgraph_api-0.0.46.dist-info/WHEEL,sha256=fGIA9gx4Qxk2KDKeNJCbOEwSrmLtjWCwzBz351GyrPQ,88
103
+ langgraph_api-0.0.46.dist-info/entry_points.txt,sha256=3EYLgj89DfzqJHHYGxPH4A_fEtClvlRbWRUHaXO7hj4,77
104
+ langgraph_api-0.0.46.dist-info/RECORD,,