langgraph-api 0.4.46__py3-none-any.whl → 0.4.48__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of langgraph-api might be problematic. Click here for more details.

@@ -4,7 +4,7 @@ from __future__ import annotations
4
4
 
5
5
  import asyncio
6
6
  import functools
7
- from collections.abc import AsyncIterator
7
+ from collections.abc import AsyncIterator, Sequence
8
8
  from datetime import UTC
9
9
  from http import HTTPStatus
10
10
  from typing import Any
@@ -16,6 +16,7 @@ from google.protobuf.json_format import MessageToDict
16
16
  from google.protobuf.struct_pb2 import Struct # type: ignore[import]
17
17
  from grpc import StatusCode
18
18
  from grpc.aio import AioRpcError
19
+ from langgraph.pregel.debug import CheckpointPayload
19
20
  from langgraph_sdk.schema import Config
20
21
  from starlette.exceptions import HTTPException
21
22
 
@@ -25,7 +26,11 @@ from langgraph_api.schema import (
25
26
  Context,
26
27
  MetadataInput,
27
28
  OnConflictBehavior,
29
+ Thread,
30
+ ThreadSelectField,
31
+ ThreadStatus,
28
32
  )
33
+ from langgraph_api.serde import json_dumpb, json_loads
29
34
 
30
35
  from .client import GrpcClient
31
36
  from .generated import core_api_pb2 as pb
@@ -57,7 +62,7 @@ def map_configurable(config: Config) -> Struct:
57
62
  extra_dict = {k: v for k, v in (config or {}).items() if k not in base_keys}
58
63
 
59
64
  kwargs: dict[str, Any] = dict(
60
- tags=config.get("tags"),
65
+ tags=pb.Tags(values=config.get("tags")),
61
66
  recursion_limit=config.get("recursion_limit"),
62
67
  configurable=(
63
68
  dict_to_struct(config.get("configurable", {}))
@@ -120,8 +125,8 @@ def _runnable_config_to_user_dict(cfg: pb.Config | None) -> dict[str, Any]:
120
125
 
121
126
  out: dict[str, Any] = {}
122
127
  # tags
123
- if cfg.tags:
124
- out["tags"] = list(cfg.tags)
128
+ if cfg.tags and cfg.tags.values:
129
+ out["tags"] = list(cfg.tags.values)
125
130
  # recursion_limit (preserve presence of 0 if set)
126
131
  try:
127
132
  if cfg.HasField("recursion_limit"):
@@ -138,7 +143,6 @@ def _runnable_config_to_user_dict(cfg: pb.Config | None) -> dict[str, Any]:
138
143
  extra = orjson.loads(cfg.extra)
139
144
  if isinstance(extra, dict) and extra:
140
145
  out.update(extra)
141
-
142
146
  return out
143
147
 
144
148
 
@@ -162,6 +166,248 @@ def proto_to_assistant(proto_assistant: pb.Assistant) -> Assistant:
162
166
  }
163
167
 
164
168
 
169
+ THREAD_STATUS_TO_PB = {
170
+ "idle": pb.ThreadStatus.THREAD_STATUS_IDLE,
171
+ "busy": pb.ThreadStatus.THREAD_STATUS_BUSY,
172
+ "interrupted": pb.ThreadStatus.THREAD_STATUS_INTERRUPTED,
173
+ "error": pb.ThreadStatus.THREAD_STATUS_ERROR,
174
+ }
175
+
176
+ THREAD_STATUS_FROM_PB = {
177
+ pb.ThreadStatus.THREAD_STATUS_IDLE: "idle",
178
+ pb.ThreadStatus.THREAD_STATUS_BUSY: "busy",
179
+ pb.ThreadStatus.THREAD_STATUS_INTERRUPTED: "interrupted",
180
+ pb.ThreadStatus.THREAD_STATUS_ERROR: "error",
181
+ }
182
+
183
+ THREAD_SORT_BY_MAP = {
184
+ "thread_id": pb.ThreadsSortBy.THREADS_SORT_BY_THREAD_ID,
185
+ "created_at": pb.ThreadsSortBy.THREADS_SORT_BY_CREATED_AT,
186
+ "updated_at": pb.ThreadsSortBy.THREADS_SORT_BY_UPDATED_AT,
187
+ "status": pb.ThreadsSortBy.THREADS_SORT_BY_STATUS,
188
+ }
189
+
190
+ THREAD_TTL_STRATEGY_MAP = {"delete": pb.ThreadTTLStrategy.THREAD_TTL_STRATEGY_DELETE}
191
+
192
+
193
+ def _map_thread_status(status: ThreadStatus | None) -> pb.ThreadStatus | None:
194
+ if status is None:
195
+ return None
196
+ return THREAD_STATUS_TO_PB.get(status)
197
+
198
+
199
+ def _map_threads_sort_by(sort_by: str | None) -> pb.ThreadsSortBy:
200
+ if not sort_by:
201
+ return pb.ThreadsSortBy.THREADS_SORT_BY_CREATED_AT
202
+ return THREAD_SORT_BY_MAP.get(
203
+ sort_by.lower(), pb.ThreadsSortBy.THREADS_SORT_BY_CREATED_AT
204
+ )
205
+
206
+
207
+ def _map_thread_ttl(ttl: dict[str, Any] | None) -> pb.ThreadTTLConfig | None:
208
+ if not ttl:
209
+ return None
210
+
211
+ config = pb.ThreadTTLConfig()
212
+ strategy = ttl.get("strategy")
213
+ if strategy:
214
+ mapped_strategy = THREAD_TTL_STRATEGY_MAP.get(str(strategy).lower())
215
+ if mapped_strategy is None:
216
+ raise HTTPException(
217
+ status_code=HTTPStatus.UNPROCESSABLE_ENTITY,
218
+ detail=f"Invalid thread TTL strategy: {strategy}. Expected one of ['delete']",
219
+ )
220
+ config.strategy = mapped_strategy
221
+
222
+ ttl_value = ttl.get("ttl", ttl.get("default_ttl"))
223
+ if ttl_value is not None:
224
+ config.default_ttl = float(ttl_value)
225
+
226
+ sweep_interval = ttl.get("sweep_interval_minutes")
227
+ if sweep_interval is not None:
228
+ config.sweep_interval_minutes = int(sweep_interval)
229
+
230
+ return config
231
+
232
+
233
+ def fragment_to_value(fragment: pb.Fragment | None) -> Any:
234
+ if fragment is None or not fragment.value:
235
+ return {}
236
+ try:
237
+ return json_loads(fragment.value)
238
+ except orjson.JSONDecodeError:
239
+ logger.warning("Failed to decode fragment", fragment=fragment.value)
240
+ return {}
241
+
242
+
243
+ def _proto_interrupts_to_dict(
244
+ interrupts_map: dict[str, pb.Interrupts],
245
+ ) -> dict[str, list[dict[str, Any]]]:
246
+ out: dict[str, list[dict[str, Any]]] = {}
247
+ for key, interrupts in interrupts_map.items():
248
+ entries: list[dict[str, Any]] = []
249
+ for interrupt in interrupts.interrupts:
250
+ entry: dict[str, Any] = {
251
+ "id": interrupt.id or None,
252
+ "value": json_loads(interrupt.value),
253
+ }
254
+ if interrupt.when:
255
+ entry["when"] = interrupt.when
256
+ if interrupt.resumable:
257
+ entry["resumable"] = interrupt.resumable
258
+ if interrupt.ns:
259
+ entry["ns"] = list(interrupt.ns)
260
+ entries.append(entry)
261
+ out[key] = entries
262
+ return out
263
+
264
+
265
+ def proto_to_thread(proto_thread: pb.Thread) -> Thread:
266
+ """Convert protobuf Thread to API dictionary format."""
267
+ thread_id = (
268
+ UUID(proto_thread.thread_id.value)
269
+ if proto_thread.HasField("thread_id")
270
+ else None
271
+ )
272
+ if thread_id is None:
273
+ raise HTTPException(
274
+ status_code=HTTPStatus.INTERNAL_SERVER_ERROR,
275
+ detail="Thread response missing thread_id",
276
+ )
277
+ created_at = (
278
+ proto_thread.created_at.ToDatetime(tzinfo=UTC)
279
+ if proto_thread.HasField("created_at")
280
+ else None
281
+ )
282
+ updated_at = (
283
+ proto_thread.updated_at.ToDatetime(tzinfo=UTC)
284
+ if proto_thread.HasField("updated_at")
285
+ else None
286
+ )
287
+ status = THREAD_STATUS_FROM_PB.get(proto_thread.status, "idle")
288
+
289
+ return {
290
+ "thread_id": thread_id,
291
+ "created_at": created_at,
292
+ "updated_at": updated_at,
293
+ "metadata": fragment_to_value(proto_thread.metadata),
294
+ "config": fragment_to_value(proto_thread.config),
295
+ "error": fragment_to_value(proto_thread.error),
296
+ "status": status, # type: ignore[typeddict-item]
297
+ "values": fragment_to_value(proto_thread.values),
298
+ "interrupts": _proto_interrupts_to_dict(dict(proto_thread.interrupts)),
299
+ }
300
+
301
+
302
+ def _checkpoint_metadata_to_pb(
303
+ metadata: dict[str, Any] | None,
304
+ ) -> pb.CheckpointMetadata | None:
305
+ if not metadata:
306
+ return None
307
+
308
+ message = pb.CheckpointMetadata()
309
+ source = metadata.get("source")
310
+ if source is not None:
311
+ if isinstance(source, str):
312
+ enum_key = f"CHECKPOINT_SOURCE_{source.upper()}"
313
+ try:
314
+ message.source = pb.CheckpointSource.Value(enum_key)
315
+ except ValueError:
316
+ logger.warning(
317
+ "Unknown checkpoint source enum, defaulting to unspecified",
318
+ source=source,
319
+ )
320
+ elif isinstance(source, int):
321
+ try:
322
+ message.source = pb.CheckpointSource(source)
323
+ except ValueError:
324
+ logger.warning(
325
+ "Unknown checkpoint source value, defaulting to unspecified",
326
+ source=source,
327
+ )
328
+ if step := metadata.get("step"):
329
+ message.step = int(step)
330
+ parents = metadata.get("parents")
331
+ if isinstance(parents, dict):
332
+ message.parents.update({str(k): str(v) for k, v in parents.items()})
333
+ return message
334
+
335
+
336
+ def _checkpoint_tasks_to_pb(tasks: Sequence[dict[str, Any]]) -> list[pb.CheckpointTask]:
337
+ task_messages: list[pb.CheckpointTask] = []
338
+ for task in tasks:
339
+ message = pb.CheckpointTask(
340
+ id=task.get("id", ""),
341
+ name=task.get("name", ""),
342
+ )
343
+ if task.get("error"):
344
+ message.error = str(task["error"])
345
+ interrupts = task.get("interrupts") or []
346
+ for interrupt in interrupts:
347
+ message.interrupts.append(dict_to_struct(interrupt))
348
+ if task.get("state"):
349
+ message.state.CopyFrom(dict_to_struct(task["state"]))
350
+ task_messages.append(message)
351
+ return task_messages
352
+
353
+
354
+ def checkpoint_to_pb(
355
+ checkpoint: CheckpointPayload | None,
356
+ ) -> pb.CheckpointPayload | None:
357
+ if checkpoint is None:
358
+ return None
359
+
360
+ message = pb.CheckpointPayload()
361
+
362
+ config = checkpoint.get("config")
363
+ if config:
364
+ message.config.CopyFrom(map_configurable(config))
365
+
366
+ metadata = _checkpoint_metadata_to_pb(checkpoint.get("metadata"))
367
+ if metadata:
368
+ message.metadata.CopyFrom(metadata)
369
+
370
+ values = checkpoint.get("values")
371
+ if values:
372
+ message.values.CopyFrom(dict_to_struct(values))
373
+
374
+ next_nodes = checkpoint.get("next")
375
+ if next_nodes:
376
+ message.next.extend([str(n) for n in next_nodes])
377
+
378
+ parent_config = checkpoint.get("parent_config")
379
+ if parent_config:
380
+ message.parent_config.CopyFrom(map_configurable(parent_config))
381
+
382
+ tasks = checkpoint.get("tasks")
383
+ if tasks:
384
+ message.tasks.extend(_checkpoint_tasks_to_pb(tasks))
385
+
386
+ return message
387
+
388
+
389
+ def exception_to_struct(exception: BaseException | None) -> Struct | None:
390
+ if exception is None:
391
+ return None
392
+ try:
393
+ payload = orjson.loads(json_dumpb(exception))
394
+ except orjson.JSONDecodeError:
395
+ payload = {"error": type(exception).__name__, "message": str(exception)}
396
+ return dict_to_struct(payload)
397
+
398
+
399
+ def _filter_thread_fields(
400
+ thread: Thread, select: list[ThreadSelectField] | None
401
+ ) -> dict[str, Any]:
402
+ if not select:
403
+ return thread
404
+ return {field: thread[field] for field in select if field in thread}
405
+
406
+
407
+ def _normalize_uuid(value: UUID | str) -> str:
408
+ return str(value) if isinstance(value, UUID) else str(UUID(str(value)))
409
+
410
+
165
411
  def _map_sort_by(sort_by: str | None) -> pb.AssistantsSortBy:
166
412
  """Map string sort_by to protobuf enum."""
167
413
  if not sort_by:
@@ -608,3 +854,334 @@ class Assistants(Authenticated):
608
854
  response = await client.assistants.Count(request)
609
855
 
610
856
  return int(response.count)
857
+
858
+
859
+ def _json_contains(container: Any, subset: dict[str, Any]) -> bool:
860
+ if not subset:
861
+ return True
862
+ if not isinstance(container, dict):
863
+ return False
864
+ for key, value in subset.items():
865
+ if key not in container:
866
+ return False
867
+ candidate = container[key]
868
+ if isinstance(value, dict):
869
+ if not _json_contains(candidate, value):
870
+ return False
871
+ else:
872
+ if candidate != value:
873
+ return False
874
+ return True
875
+
876
+
877
+ @grpc_error_guard
878
+ class Threads(Authenticated):
879
+ """gRPC-based threads operations."""
880
+
881
+ resource = "threads"
882
+
883
+ @staticmethod
884
+ async def search(
885
+ conn, # Not used in gRPC implementation
886
+ *,
887
+ ids: list[str] | list[UUID] | None = None,
888
+ metadata: MetadataInput,
889
+ values: MetadataInput,
890
+ status: ThreadStatus | None,
891
+ limit: int,
892
+ offset: int,
893
+ sort_by: str | None = None,
894
+ sort_order: str | None = None,
895
+ select: list[ThreadSelectField] | None = None,
896
+ ctx: Any = None,
897
+ ) -> tuple[AsyncIterator[Thread], int | None]: # type: ignore[return-value]
898
+ metadata = metadata or {}
899
+ values = values or {}
900
+
901
+ auth_filters = await Threads.handle_event(
902
+ ctx,
903
+ "search",
904
+ {
905
+ "metadata": metadata,
906
+ "values": values,
907
+ "status": status,
908
+ "limit": limit,
909
+ "offset": offset,
910
+ },
911
+ )
912
+
913
+ if ids:
914
+ normalized_ids = [_normalize_uuid(thread_id) for thread_id in ids]
915
+ threads: list[Thread] = []
916
+ async with GrpcClient() as client:
917
+ for thread_id in normalized_ids:
918
+ request = pb.GetThreadRequest(
919
+ thread_id=pb.UUID(value=_normalize_uuid(thread_id)),
920
+ filters=auth_filters or {},
921
+ )
922
+ response = await client.threads.Get(request)
923
+ thread = proto_to_thread(response)
924
+
925
+ if status and thread["status"] != status:
926
+ continue
927
+ if metadata and not _json_contains(thread["metadata"], metadata):
928
+ continue
929
+ if values and not _json_contains(
930
+ thread.get("values") or {}, values
931
+ ):
932
+ continue
933
+ threads.append(thread)
934
+
935
+ total = len(threads)
936
+ paginated = threads[offset : offset + limit]
937
+ cursor = offset + limit if total > offset + limit else None
938
+
939
+ async def generate_results():
940
+ for thread in paginated:
941
+ yield _filter_thread_fields(thread, select)
942
+
943
+ return generate_results(), cursor
944
+
945
+ request_kwargs: dict[str, Any] = {
946
+ "filters": auth_filters or {},
947
+ "metadata": dict_to_struct(metadata),
948
+ "values": dict_to_struct(values),
949
+ "limit": limit,
950
+ "offset": offset,
951
+ "sort_by": _map_threads_sort_by(sort_by),
952
+ "sort_order": _map_sort_order(sort_order),
953
+ }
954
+
955
+ if status:
956
+ mapped_status = _map_thread_status(status)
957
+ if mapped_status is None:
958
+ raise HTTPException(
959
+ status_code=HTTPStatus.UNPROCESSABLE_ENTITY,
960
+ detail=f"Invalid thread status: {status}",
961
+ )
962
+ request_kwargs["status"] = mapped_status
963
+
964
+ if select:
965
+ request_kwargs["select"] = select
966
+
967
+ async with GrpcClient() as client:
968
+ response = await client.threads.Search(
969
+ pb.SearchThreadsRequest(**request_kwargs)
970
+ )
971
+
972
+ threads = [proto_to_thread(thread) for thread in response.threads]
973
+ cursor = offset + limit if len(threads) == limit else None
974
+
975
+ async def generate_results():
976
+ for thread in threads:
977
+ yield _filter_thread_fields(thread, select)
978
+
979
+ return generate_results(), cursor
980
+
981
+ @staticmethod
982
+ async def count(
983
+ conn, # Not used
984
+ *,
985
+ metadata: MetadataInput,
986
+ values: MetadataInput,
987
+ status: ThreadStatus | None,
988
+ ctx: Any = None,
989
+ ) -> int: # type: ignore[override]
990
+ metadata = metadata or {}
991
+ values = values or {}
992
+
993
+ auth_filters = await Threads.handle_event(
994
+ ctx,
995
+ "search",
996
+ {
997
+ "metadata": metadata,
998
+ "values": values,
999
+ "status": status,
1000
+ },
1001
+ )
1002
+
1003
+ request_kwargs: dict[str, Any] = {
1004
+ "filters": auth_filters or {},
1005
+ "metadata": dict_to_struct(metadata),
1006
+ "values": dict_to_struct(values),
1007
+ }
1008
+ if status:
1009
+ mapped_status = _map_thread_status(status)
1010
+ if mapped_status is None:
1011
+ raise HTTPException(
1012
+ status_code=HTTPStatus.UNPROCESSABLE_ENTITY,
1013
+ detail=f"Invalid thread status: {status}",
1014
+ )
1015
+ request_kwargs["status"] = mapped_status
1016
+
1017
+ async with GrpcClient() as client:
1018
+ response = await client.threads.Count(
1019
+ pb.CountThreadsRequest(**request_kwargs)
1020
+ )
1021
+
1022
+ return int(response.count)
1023
+
1024
+ @staticmethod
1025
+ async def get(
1026
+ conn, # Not used
1027
+ thread_id: UUID | str,
1028
+ ctx: Any = None,
1029
+ ) -> AsyncIterator[Thread]: # type: ignore[return-value]
1030
+ auth_filters = await Threads.handle_event(
1031
+ ctx, "read", {"thread_id": str(thread_id)}
1032
+ )
1033
+
1034
+ request = pb.GetThreadRequest(
1035
+ thread_id=pb.UUID(value=_normalize_uuid(thread_id)),
1036
+ filters=auth_filters or {},
1037
+ )
1038
+ async with GrpcClient() as client:
1039
+ response = await client.threads.Get(request)
1040
+
1041
+ thread = proto_to_thread(response)
1042
+
1043
+ async def generate_result():
1044
+ yield thread
1045
+
1046
+ return generate_result()
1047
+
1048
+ @staticmethod
1049
+ async def put(
1050
+ conn, # Not used
1051
+ thread_id: UUID | str,
1052
+ *,
1053
+ metadata: MetadataInput,
1054
+ if_exists: OnConflictBehavior,
1055
+ ttl: dict[str, Any] | None = None,
1056
+ ctx: Any = None,
1057
+ ) -> AsyncIterator[Thread]: # type: ignore[return-value]
1058
+ metadata = metadata or {}
1059
+
1060
+ auth_filters = await Threads.handle_event(
1061
+ ctx,
1062
+ "create",
1063
+ {
1064
+ "thread_id": str(thread_id),
1065
+ "metadata": metadata,
1066
+ "if_exists": if_exists,
1067
+ },
1068
+ )
1069
+
1070
+ request = pb.CreateThreadRequest(
1071
+ thread_id=pb.UUID(value=_normalize_uuid(thread_id)),
1072
+ filters=auth_filters or {},
1073
+ if_exists=map_if_exists(if_exists),
1074
+ metadata=dict_to_struct(metadata),
1075
+ )
1076
+ ttl_config = _map_thread_ttl(ttl)
1077
+ if ttl_config is not None:
1078
+ request.ttl.CopyFrom(ttl_config)
1079
+
1080
+ async with GrpcClient() as client:
1081
+ response = await client.threads.Create(request)
1082
+ thread = proto_to_thread(response)
1083
+
1084
+ async def generate_result():
1085
+ yield thread
1086
+
1087
+ return generate_result()
1088
+
1089
+ @staticmethod
1090
+ async def patch(
1091
+ conn, # Not used
1092
+ thread_id: UUID | str,
1093
+ *,
1094
+ metadata: MetadataInput,
1095
+ ttl: dict[str, Any] | None = None,
1096
+ ctx: Any = None,
1097
+ ) -> AsyncIterator[Thread]: # type: ignore[return-value]
1098
+ metadata = metadata or {}
1099
+
1100
+ auth_filters = await Threads.handle_event(
1101
+ ctx,
1102
+ "update",
1103
+ {
1104
+ "thread_id": str(thread_id),
1105
+ "metadata": metadata,
1106
+ },
1107
+ )
1108
+
1109
+ request = pb.PatchThreadRequest(
1110
+ thread_id=pb.UUID(value=_normalize_uuid(thread_id)),
1111
+ filters=auth_filters or {},
1112
+ )
1113
+
1114
+ if metadata:
1115
+ request.metadata.CopyFrom(dict_to_struct(metadata))
1116
+
1117
+ ttl_config = _map_thread_ttl(ttl)
1118
+ if ttl_config is not None:
1119
+ request.ttl.CopyFrom(ttl_config)
1120
+
1121
+ async with GrpcClient() as client:
1122
+ response = await client.threads.Patch(request)
1123
+
1124
+ thread = proto_to_thread(response)
1125
+
1126
+ async def generate_result():
1127
+ yield thread
1128
+
1129
+ return generate_result()
1130
+
1131
+ @staticmethod
1132
+ async def delete(
1133
+ conn, # Not used
1134
+ thread_id: UUID | str,
1135
+ ctx: Any = None,
1136
+ ) -> AsyncIterator[UUID]: # type: ignore[return-value]
1137
+ auth_filters = await Threads.handle_event(
1138
+ ctx,
1139
+ "delete",
1140
+ {
1141
+ "thread_id": str(thread_id),
1142
+ },
1143
+ )
1144
+
1145
+ request = pb.DeleteThreadRequest(
1146
+ thread_id=pb.UUID(value=_normalize_uuid(thread_id)),
1147
+ filters=auth_filters or {},
1148
+ )
1149
+
1150
+ async with GrpcClient() as client:
1151
+ response = await client.threads.Delete(request)
1152
+
1153
+ deleted_id = UUID(response.value)
1154
+
1155
+ async def generate_result():
1156
+ yield deleted_id
1157
+
1158
+ return generate_result()
1159
+
1160
+ @staticmethod
1161
+ async def copy(
1162
+ conn, # Not used
1163
+ thread_id: UUID | str,
1164
+ ctx: Any = None,
1165
+ ) -> AsyncIterator[Thread]: # type: ignore[return-value]
1166
+ auth_filters = await Threads.handle_event(
1167
+ ctx,
1168
+ "read",
1169
+ {
1170
+ "thread_id": str(thread_id),
1171
+ },
1172
+ )
1173
+
1174
+ request = pb.CopyThreadRequest(
1175
+ thread_id=pb.UUID(value=_normalize_uuid(thread_id)),
1176
+ filters=auth_filters or {},
1177
+ )
1178
+
1179
+ async with GrpcClient() as client:
1180
+ response = await client.threads.Copy(request)
1181
+
1182
+ thread = proto_to_thread(response)
1183
+
1184
+ async def generate_result():
1185
+ yield thread
1186
+
1187
+ return generate_result()
@@ -1,7 +1,7 @@
1
1
  /// <reference types="./global.d.ts" />
2
2
  import "./src/preload.mjs";
3
3
 
4
- import { z } from "zod";
4
+ import { z } from "zod/v3";
5
5
  import * as fs from "node:fs/promises";
6
6
  import * as path from "node:path";
7
7
  import { type GraphSchema, resolveGraph } from "./src/graph.mts";
@@ -6,7 +6,7 @@ import * as path from "node:path";
6
6
  import * as url from "node:url";
7
7
  import { createLogger, format, transports } from "winston";
8
8
  import { gracefulExit } from "exit-hook";
9
- import { z } from "zod";
9
+ import { z } from "zod/v3";
10
10
 
11
11
  const logger = createLogger({
12
12
  level: "debug",
@@ -1,6 +1,6 @@
1
1
  /// <reference types="./global.d.ts" />
2
2
 
3
- import { z } from "zod";
3
+ import { z } from "zod/v3";
4
4
  import { Context, Hono } from "hono";
5
5
  import { serve } from "@hono/node-server";
6
6
  import { zValidator } from "@hono/zod-validator";
@@ -9,16 +9,16 @@
9
9
  "dependencies": {
10
10
  "@hono/node-server": "^1.12.0",
11
11
  "@hono/zod-validator": "^0.2.2",
12
- "@langchain/core": "^0.3.59",
13
- "@langchain/langgraph": "^0.2.65",
14
- "@langchain/langgraph-api": "~0.0.59",
15
- "@langchain/langgraph-ui": "~0.0.59",
16
- "@langchain/langgraph-checkpoint": "~0.0.18",
12
+ "@langchain/core": "^1.0.1",
13
+ "@langchain/langgraph": "^1.0.0",
14
+ "@langchain/langgraph-api": "^1.0.3",
15
+ "@langchain/langgraph-ui": "^1.0.3",
16
+ "@langchain/langgraph-checkpoint": "^1.0.0",
17
17
  "@types/json-schema": "^7.0.15",
18
18
  "@typescript/vfs": "^1.6.0",
19
19
  "dedent": "^1.5.3",
20
20
  "exit-hook": "^4.0.0",
21
- "hono": "^4.9.7",
21
+ "hono": "^4.10.2",
22
22
  "p-queue": "^8.0.1",
23
23
  "p-retry": "^6.2.0",
24
24
  "tsx": "^4.19.3",
@@ -44,4 +44,4 @@
44
44
  "vitest": "^3.0.5"
45
45
  },
46
46
  "packageManager": "yarn@1.22.22+sha512.a6b2f7906b721bba3d67d4aff083df04dad64c399707841b7acf00f6b133b7ac24255f2652fa22ae3534329dc6180534e98d17432037ff6fd140556e2bb3137e"
47
- }
47
+ }