langgraph-api 0.0.22__py3-none-any.whl → 0.0.23__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of langgraph-api might be problematic. Click here for more details.

langgraph_api/queue.py CHANGED
@@ -8,7 +8,7 @@ import structlog
8
8
  from langgraph.pregel.debug import CheckpointPayload, TaskResultPayload
9
9
 
10
10
  from langgraph_api.auth.custom import SimpleUser, normalize_user
11
- from langgraph_api.config import BG_JOB_DELAY, STATS_INTERVAL_SECS
11
+ from langgraph_api.config import BG_JOB_HEARTBEAT, STATS_INTERVAL_SECS
12
12
  from langgraph_api.errors import (
13
13
  UserInterrupt,
14
14
  UserRollback,
@@ -21,7 +21,7 @@ from langgraph_api.stream import (
21
21
  astream_state,
22
22
  consume,
23
23
  )
24
- from langgraph_api.utils import AsyncConnectionProto, set_auth_ctx, with_user
24
+ from langgraph_api.utils import set_auth_ctx, with_user
25
25
  from langgraph_storage.database import connect
26
26
  from langgraph_storage.ops import Runs, Threads
27
27
  from langgraph_storage.retry import RETRIABLE_EXCEPTIONS
@@ -46,6 +46,7 @@ def ms(after: datetime, before: datetime) -> int:
46
46
  async def queue(concurrency: int, timeout: float):
47
47
  loop = asyncio.get_running_loop()
48
48
  last_stats_secs: int | None = None
49
+ last_sweep_secs: int | None = None
49
50
  semaphore = asyncio.Semaphore(concurrency)
50
51
 
51
52
  def cleanup(task: asyncio.Task):
@@ -95,8 +96,15 @@ async def queue(concurrency: int, timeout: float):
95
96
 
96
97
  await logger.ainfo(f"Starting {concurrency} background workers")
97
98
  try:
99
+ tup: tuple[Run, int] | None = None
98
100
  while True:
99
101
  try:
102
+ # check if we need to sweep runs
103
+ do_sweep = (
104
+ last_sweep_secs is None
105
+ or loop.time() - last_sweep_secs > BG_JOB_HEARTBEAT * 2
106
+ )
107
+ # check if we need to update stats
100
108
  if calc_stats := (
101
109
  last_stats_secs is None
102
110
  or loop.time() - last_stats_secs > STATS_INTERVAL_SECS
@@ -109,16 +117,16 @@ async def queue(concurrency: int, timeout: float):
109
117
  available=concurrency - active,
110
118
  active=active,
111
119
  )
120
+ # wait for semaphore to respect concurrency
112
121
  await semaphore.acquire()
113
122
  exit = AsyncExitStack()
114
- conn = await exit.enter_async_context(connect())
115
- if calc_stats:
116
- stats = await Runs.stats(conn)
117
- await logger.ainfo("Queue stats", **stats)
118
- if tup := await exit.enter_async_context(Runs.next(conn)):
123
+ # skip the wait, if 1st time, or got a run last time
124
+ wait = tup is None and last_stats_secs is not None
125
+ # try to get a run, handle it
126
+ if tup := await exit.enter_async_context(Runs.next(wait=wait)):
119
127
  run_, attempt_ = tup
120
128
  task = asyncio.create_task(
121
- worker(timeout, exit, conn, run_, attempt_),
129
+ worker(timeout, exit, run_, attempt_),
122
130
  name=f"run-{run_['run_id']}-attempt-{attempt_}",
123
131
  )
124
132
  task.add_done_callback(cleanup)
@@ -126,13 +134,23 @@ async def queue(concurrency: int, timeout: float):
126
134
  else:
127
135
  semaphore.release()
128
136
  await exit.aclose()
129
- await asyncio.sleep(BG_JOB_DELAY)
137
+ # run stats and sweep if needed
138
+ if calc_stats or do_sweep:
139
+ async with connect() as conn:
140
+ # update stats if needed
141
+ if calc_stats:
142
+ stats = await Runs.stats(conn)
143
+ await logger.ainfo("Queue stats", **stats)
144
+ # sweep runs if needed
145
+ if do_sweep:
146
+ last_sweep_secs = loop.time()
147
+ run_ids = await Runs.sweep(conn)
148
+ logger.info("Sweeped runs", run_ids=run_ids)
130
149
  except Exception as exc:
131
150
  # keep trying to run the scheduler indefinitely
132
151
  logger.exception("Background worker scheduler failed", exc_info=exc)
133
152
  semaphore.release()
134
153
  await exit.aclose()
135
- await asyncio.sleep(BG_JOB_DELAY)
136
154
  finally:
137
155
  logger.info("Shutting down background workers")
138
156
  for task in WORKERS:
@@ -187,7 +205,6 @@ async def set_auth_ctx_for_run(
187
205
  async def worker(
188
206
  timeout: float,
189
207
  exit: AsyncExitStack,
190
- conn: AsyncConnectionProto,
191
208
  run: Run,
192
209
  attempt: int,
193
210
  ) -> WorkerResult:
@@ -201,7 +218,12 @@ async def worker(
201
218
  run_started_at = datetime.now(UTC)
202
219
  run_ended_at: str | None = None
203
220
 
204
- async with set_auth_ctx_for_run(run["kwargs"]), Runs.enter(run_id) as done, exit:
221
+ async with (
222
+ connect() as conn,
223
+ set_auth_ctx_for_run(run["kwargs"]),
224
+ Runs.enter(run_id) as done,
225
+ exit,
226
+ ):
205
227
  temporary = run["kwargs"].get("temporary", False)
206
228
  run_created_at = run["created_at"].isoformat()
207
229
  await logger.ainfo(
@@ -323,9 +345,8 @@ async def worker(
323
345
  run_ended_at=run_ended_at,
324
346
  run_exec_ms=ms(datetime.now(UTC), run_started_at),
325
347
  )
348
+ await Runs.set_status(conn, run_id, "pending")
326
349
  raise
327
- # Note we re-raise here, thus marking the run
328
- # as available to be picked up by another worker
329
350
  except Exception as exc:
330
351
  exception = exc
331
352
  status = "error"
langgraph_api/schema.py CHANGED
@@ -188,5 +188,6 @@ class ThreadUpdateResponse(TypedDict):
188
188
 
189
189
  class QueueStats(TypedDict):
190
190
  n_pending: int
191
+ n_running: int
191
192
  max_age_secs: datetime | None
192
193
  med_age_secs: datetime | None
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: langgraph-api
3
- Version: 0.0.22
3
+ Version: 0.0.23
4
4
  Summary:
5
5
  License: Elastic-2.0
6
6
  Author: Nuno Campos
@@ -17,7 +17,7 @@ langgraph_api/auth/middleware.py,sha256=qc7SbaFoeWaqxS1wbjZ2PPQ4iI2p9T0shWL7c6g0
17
17
  langgraph_api/auth/noop.py,sha256=Bk6Nf3p8D_iMVy_OyfPlyiJp_aEwzL-sHrbxoXpCbac,586
18
18
  langgraph_api/auth/studio_user.py,sha256=FzFQRROKDlA9JjtBuwyZvk6Mbwno5M9RVYjDO6FU3F8,186
19
19
  langgraph_api/cli.py,sha256=r7NJVIdTQ9mQ6_X01tk_I0ktlgn9odH0B8J53oaySz4,12022
20
- langgraph_api/config.py,sha256=vVANO2F7O94XvnXc0krwQrE8o7E-nEJtZQVGLbMoxZc,4272
20
+ langgraph_api/config.py,sha256=mkuhRHjUHA5cjzcSy0nYYS6K28FZ_3_FRl9s0sg_030,4225
21
21
  langgraph_api/cron_scheduler.py,sha256=MW41-TSGUe5OuXycFTy7Ax7ypxHVAv-0ImLonRT8h8o,2629
22
22
  langgraph_api/errors.py,sha256=Bu_i5drgNTyJcLiyrwVE_6-XrSU50BHf9TDpttki9wQ,1690
23
23
  langgraph_api/graph.py,sha256=FombjYQkqj8jrXJFEVkl3m2UyFcq5nSVNswR2HoRsQY,16385
@@ -25,14 +25,14 @@ langgraph_api/http.py,sha256=XrbyxpjtfSvnaWWh5ZLGpgZmY83WoDCrP_1GPguNiXI,4712
25
25
  langgraph_api/js/.gitignore,sha256=qAah3Fq0HWAlfRj5ktZyC6QRQIsAolGLRGcRukA1XJI,33
26
26
  langgraph_api/js/base.py,sha256=BpE8-xkUp8HFPRjSKx1tfUQubvoV4jYl6OwZdre3veI,209
27
27
  langgraph_api/js/build.mts,sha256=43_LQDjmtEyV6Sj6IZo7Mp6Y8zrsBbMP_5sWXQ_xBsY,1372
28
- langgraph_api/js/client.mts,sha256=AWDSJHAFFtCKU8JkSg0NEPt85jtovk64SYDesIALtW0,22684
28
+ langgraph_api/js/client.mts,sha256=5cJgJLmER3fpf-QQU8v2cjR593ROXcQk6vXd71cntsw,22772
29
29
  langgraph_api/js/client.new.mts,sha256=FskIvfdS4MvzoD07fJuHMoz26Puh_Mkwh2Lh6jKFj60,23658
30
30
  langgraph_api/js/errors.py,sha256=Cm1TKWlUCwZReDC5AQ6SgNIVGD27Qov2xcgHyf8-GXo,361
31
31
  langgraph_api/js/global.d.ts,sha256=zR_zLYfpzyPfxpEFth5RgZoyfGulIXyZYPRf7cU0K0Y,106
32
- langgraph_api/js/package.json,sha256=Z7Yluyxl4WbrcagLzBNB2ZL0cihX9dWkUn08vYrLoUw,840
32
+ langgraph_api/js/package.json,sha256=AmpkMzr96yF9xZ7bCrSApF-j7PJH6WeALn9HpPGBnmQ,840
33
33
  langgraph_api/js/remote.py,sha256=D9cqcEgXau-fm_trpNwCHMra5BXntgUa469lgs_a9JQ,622
34
34
  langgraph_api/js/remote_new.py,sha256=T_Vr8459bax1C9xxqz_ZYmGivq5Vhspg2Iu9TL0Qc-Q,22707
35
- langgraph_api/js/remote_old.py,sha256=2a-3ooAYUZs8aPsfnXafbBd4pP7lRmokoU7TiO7P9Js,22546
35
+ langgraph_api/js/remote_old.py,sha256=wC0wpYR4xv6Xqqq1PboIluViVeJ9ETFVTUEl53FSZyo,22578
36
36
  langgraph_api/js/schema.py,sha256=7idnv7URlYUdSNMBXQcw7E4SxaPxCq_Oxwnlml8q5ik,408
37
37
  langgraph_api/js/server_sent_events.py,sha256=DLgXOHauemt7706vnfDUCG1GI3TidKycSizccdz9KgA,3702
38
38
  langgraph_api/js/src/graph.mts,sha256=J-M-vYHj1G5tyonPUym3ePNGqGYtspPCrZOgr92xKb4,3171
@@ -44,7 +44,7 @@ langgraph_api/js/src/schema/types.template.mts,sha256=oAwnwWOgkEAQ3EouB8dG5Mdg4H
44
44
  langgraph_api/js/src/utils/importMap.mts,sha256=pX4TGOyUpuuWF82kXcxcv3-8mgusRezOGe6Uklm2O5A,1644
45
45
  langgraph_api/js/src/utils/pythonSchemas.mts,sha256=98IW7Z_VP7L_CHNRMb3_MsiV3BgLE2JsWQY_PQcRR3o,685
46
46
  langgraph_api/js/src/utils/serde.mts,sha256=OuyyO9btvwWd55rU_H4x91dFEJiaPxL-lL9O6Zgo908,742
47
- langgraph_api/js/tests/api.test.mts,sha256=qJdISmuKo7AWZmR4thY9eq8h4EL5bWKlhI5x7LCJ8dQ,56919
47
+ langgraph_api/js/tests/api.test.mts,sha256=fiQDSdfJj6DhOqQGghFq0RknIYnTPQ7nH_QfBjs7kAk,56931
48
48
  langgraph_api/js/tests/compose-postgres.yml,sha256=pbNfeqVUqhWILBuUdwAgQOYsVU_fgkCVm0YlTgU8og8,1721
49
49
  langgraph_api/js/tests/graphs/.gitignore,sha256=26J8MarZNXh7snXD5eTpV3CPFTht5Znv8dtHYCLNfkw,12
50
50
  langgraph_api/js/tests/graphs/agent.mts,sha256=E9WMv0alMv0njUEECqEsqoRk9NXJUgXW7SyQJ3GOZ8k,5396
@@ -55,9 +55,9 @@ langgraph_api/js/tests/graphs/nested.mts,sha256=4G7jSOSaFVQAza-_ARbK-Iai1biLlF2D
55
55
  langgraph_api/js/tests/graphs/package.json,sha256=U1e03aGujf7LHM9j_01fwbD7hcC59kxJM4ZvUECVF3o,118
56
56
  langgraph_api/js/tests/graphs/weather.mts,sha256=A7mLK3xW8h5B-ZyJNAyX2M2fJJwzPJzXs4DYesJwreQ,1655
57
57
  langgraph_api/js/tests/graphs/yarn.lock,sha256=N-LBwqNAve0B_VvVZvSAGVkCCo9AQO6VDM-AznGVVLQ,10407
58
- langgraph_api/js/tests/parser.test.mts,sha256=3zAbboUNhI-cY3hj4Ssr7J-sQXCBTeeI1ItrkG0Ftuk,26257
58
+ langgraph_api/js/tests/parser.test.mts,sha256=dEC8KTqKygeb1u39ZvpPqCT4HtfPD947nLmITt2buxA,27883
59
59
  langgraph_api/js/tests/utils.mts,sha256=2kTybJ3O7Yfe1q3ehDouqV54ibXkNzsPZ_wBZLJvY-4,421
60
- langgraph_api/js/yarn.lock,sha256=slQ8Gr_G-_8tyLgjhE6ABtvVAnv0PTZzei5Eo6Xh-1g,105950
60
+ langgraph_api/js/yarn.lock,sha256=SaJYDS1TmC5J94zGeoi2gHC3_lnF-h8xQcVND6wWd1c,102827
61
61
  langgraph_api/lifespan.py,sha256=Uj7NV-NqxxD1fgx_umM9pVqclcy-VlqrIxDljyj2he0,1820
62
62
  langgraph_api/logging.py,sha256=tiDNrEFwqaIdL5ywZv908OXlzzfXsPCws9GXeoFtBV8,3367
63
63
  langgraph_api/metadata.py,sha256=wvNCHvejBiO_VaAF12kbdlNDn1QXH4Fh_buvHD9J60U,3276
@@ -67,9 +67,9 @@ langgraph_api/middleware/private_network.py,sha256=eYgdyU8AzU2XJu362i1L8aSFoQRiV
67
67
  langgraph_api/models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
68
68
  langgraph_api/models/run.py,sha256=1TzTmagDXFQD_LhIMRdguZHmrPSzztq1wiMjF63d2fc,9843
69
69
  langgraph_api/patch.py,sha256=94ddcTSZJe22JcpjxiSNjFZdYVnmeoWjk4IX4iBSoyk,1249
70
- langgraph_api/queue.py,sha256=qRuM09mz8o7CkrBIwg-9lV3SW0TehdVGgeXvHc_adYk,13647
70
+ langgraph_api/queue.py,sha256=2sw9HB2cYVBhYUNA3F7lcJAgRjhQJXhA_HNGhFt2BW8,14508
71
71
  langgraph_api/route.py,sha256=fM4qYCGbmH0a3_cV8uKocb1sLklehxO6HhdRXqLK6OM,4421
72
- langgraph_api/schema.py,sha256=4aZCFY-dxd_nTot71bdcd9S8QCIgKajuRyj0p2QfgJ4,5291
72
+ langgraph_api/schema.py,sha256=mgam5lpuqZnrNWMm_0nQ95683gCnCvQNRKbiuFj7z8Q,5310
73
73
  langgraph_api/serde.py,sha256=VoJ7Z1IuqrQGXFzEP1qijAITtWCrmjtVqlCRuScjXJI,3533
74
74
  langgraph_api/server.py,sha256=mKJWBuHN5HFHCQIL_FtH04wyFabR4mR6WmQcioI-_Ns,2071
75
75
  langgraph_api/sse.py,sha256=2wNodCOP2eg7a9mpSu0S3FQ0CHk2BBV_vv0UtIgJIcc,4034
@@ -83,15 +83,15 @@ langgraph_license/validation.py,sha256=Uu_G8UGO_WTlLsBEY0gTVWjRR4czYGfw5YAD3HLZo
83
83
  langgraph_storage/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
84
84
  langgraph_storage/checkpoint.py,sha256=V4t2GwYEJdPCHbhq_4Udhlv0TWKDzlMu_rlNPdTDc50,3589
85
85
  langgraph_storage/database.py,sha256=Nr5zE9Fur3-tESkqe7xNXMf2QlBuw3H0CUie7jVa6Q4,6003
86
- langgraph_storage/ops.py,sha256=Pv829coa6wCYKKdYBR0IZaKMtkgw1g9EmOIayI_GJXg,68242
86
+ langgraph_storage/ops.py,sha256=Lp6ICiPq-Td8mE7dxBRRvkw6vLh1-hkIJBw-b6XrN84,68533
87
87
  langgraph_storage/queue.py,sha256=6cTZ0ubHu3S1T43yxHMVOwsQsDaJupByiU0sTUFFls8,3261
88
88
  langgraph_storage/retry.py,sha256=uvYFuXJ-T6S1QY1ZwkZHyZQbsvS-Ab68LSbzbUUSI2E,696
89
89
  langgraph_storage/store.py,sha256=D-p3cWc_umamkKp-6Cz3cAriSACpvM5nxUIvND6PuxE,2710
90
90
  langgraph_storage/ttl_dict.py,sha256=FlpEY8EANeXWKo_G5nmIotPquABZGyIJyk6HD9u6vqY,1533
91
91
  logging.json,sha256=3RNjSADZmDq38eHePMm1CbP6qZ71AmpBtLwCmKU9Zgo,379
92
92
  openapi.json,sha256=L1Ap1o6oAg2vKqjWohq4gl1-GpxezjlddS_oZHjS-xE,125227
93
- langgraph_api-0.0.22.dist-info/LICENSE,sha256=ZPwVR73Biwm3sK6vR54djCrhaRiM4cAD2zvOQZV8Xis,3859
94
- langgraph_api-0.0.22.dist-info/METADATA,sha256=7fHD4Lr32sZxoJaG4mBi-sUDq4WF6l-6cfK6BGHi-jI,4038
95
- langgraph_api-0.0.22.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
96
- langgraph_api-0.0.22.dist-info/entry_points.txt,sha256=3EYLgj89DfzqJHHYGxPH4A_fEtClvlRbWRUHaXO7hj4,77
97
- langgraph_api-0.0.22.dist-info/RECORD,,
93
+ langgraph_api-0.0.23.dist-info/LICENSE,sha256=ZPwVR73Biwm3sK6vR54djCrhaRiM4cAD2zvOQZV8Xis,3859
94
+ langgraph_api-0.0.23.dist-info/METADATA,sha256=mwWP3njPolhdWlLEDl7uQZMF-ptzkDXFRfU_BRaDSF0,4038
95
+ langgraph_api-0.0.23.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
96
+ langgraph_api-0.0.23.dist-info/entry_points.txt,sha256=3EYLgj89DfzqJHHYGxPH4A_fEtClvlRbWRUHaXO7hj4,77
97
+ langgraph_api-0.0.23.dist-info/RECORD,,
langgraph_storage/ops.py CHANGED
@@ -1160,63 +1160,65 @@ class Runs(Authenticated):
1160
1160
 
1161
1161
  @asynccontextmanager
1162
1162
  @staticmethod
1163
- async def next(conn: InMemConnectionProto) -> AsyncIterator[tuple[Run, int] | None]:
1163
+ async def next(wait: bool) -> AsyncIterator[tuple[Run, int] | None]:
1164
1164
  """Get the next run from the queue, and the attempt number.
1165
1165
  1 is the first attempt, 2 is the first retry, etc."""
1166
1166
  now = datetime.now(UTC)
1167
1167
 
1168
- pending_runs = sorted(
1169
- [
1170
- run
1171
- for run in conn.store["runs"]
1172
- if run["status"] == "pending" and run.get("created_at", now) < now
1173
- ],
1174
- key=lambda x: x.get("created_at", datetime.min),
1175
- )
1168
+ if wait:
1169
+ await asyncio.sleep(0.5)
1170
+ else:
1171
+ await asyncio.sleep(0)
1176
1172
 
1177
- if not pending_runs:
1178
- yield None
1179
- return
1173
+ async with connect() as conn:
1174
+ pending_runs = sorted(
1175
+ [
1176
+ run
1177
+ for run in conn.store["runs"]
1178
+ if run["status"] == "pending" and run.get("created_at", now) < now
1179
+ ],
1180
+ key=lambda x: x.get("created_at", datetime.min),
1181
+ )
1180
1182
 
1181
- # Try to lock and get the first available run
1182
- for run in pending_runs:
1183
- run_id = run["run_id"]
1184
- thread_id = run["thread_id"]
1185
- lock = conn.locks[thread_id]
1186
- acquired = lock.acquire(blocking=False)
1187
- if not acquired:
1188
- continue
1189
- try:
1190
- if run["status"] != "pending":
1183
+ if not pending_runs:
1184
+ yield None
1185
+ return
1186
+
1187
+ # Try to lock and get the first available run
1188
+ for run in pending_runs:
1189
+ run_id = run["run_id"]
1190
+ thread_id = run["thread_id"]
1191
+ lock = conn.locks[thread_id]
1192
+ acquired = lock.acquire(blocking=False)
1193
+ if not acquired:
1191
1194
  continue
1195
+ try:
1196
+ if run["status"] != "pending":
1197
+ continue
1192
1198
 
1193
- thread = next(
1194
- (
1195
- t
1196
- for t in conn.store["threads"]
1197
- if t["thread_id"] == run["thread_id"]
1198
- ),
1199
- None,
1200
- )
1201
-
1202
- if thread is None:
1203
- await logger.awarning(
1204
- "Unexpected missing thread in Runs.next",
1205
- thread_id=run["thread_id"],
1199
+ thread = next(
1200
+ (
1201
+ t
1202
+ for t in conn.store["threads"]
1203
+ if t["thread_id"] == run["thread_id"]
1204
+ ),
1205
+ None,
1206
1206
  )
1207
- continue
1208
1207
 
1209
- # Increment attempt counter
1210
- attempt = await conn.retry_counter.increment(run_id)
1211
- enriched_run = {
1212
- **run,
1213
- "thread_created_at": thread.get("created_at", now),
1214
- }
1215
- yield enriched_run, attempt
1216
- finally:
1217
- lock.release()
1218
- return
1219
- yield None
1208
+ if thread is None:
1209
+ await logger.awarning(
1210
+ "Unexpected missing thread in Runs.next",
1211
+ thread_id=run["thread_id"],
1212
+ )
1213
+ continue
1214
+
1215
+ # Increment attempt counter
1216
+ attempt = await conn.retry_counter.increment(run_id)
1217
+ yield run, attempt
1218
+ finally:
1219
+ lock.release()
1220
+ return
1221
+ yield None
1220
1222
 
1221
1223
  @asynccontextmanager
1222
1224
  @staticmethod
@@ -1247,6 +1249,11 @@ class Runs(Authenticated):
1247
1249
  # Clean up this queue
1248
1250
  await stream_manager.remove_queue(run_id, queue)
1249
1251
 
1252
+ @staticmethod
1253
+ async def sweep(conn: InMemConnectionProto) -> list[UUID]:
1254
+ """Sweep runs that are no longer running"""
1255
+ return []
1256
+
1250
1257
  @staticmethod
1251
1258
  def _merge_jsonb(*objects: dict) -> dict:
1252
1259
  """Mimics PostgreSQL's JSONB merge behavior"""