letta-nightly 0.7.28.dev20250601104146__py3-none-any.whl → 0.7.29.dev20250602104315__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- letta/__init__.py +1 -1
- letta/jobs/llm_batch_job_polling.py +5 -2
- letta/jobs/scheduler.py +20 -21
- letta/services/llm_batch_manager.py +11 -2
- letta/settings.py +2 -0
- {letta_nightly-0.7.28.dev20250601104146.dist-info → letta_nightly-0.7.29.dev20250602104315.dist-info}/METADATA +1 -1
- {letta_nightly-0.7.28.dev20250601104146.dist-info → letta_nightly-0.7.29.dev20250602104315.dist-info}/RECORD +10 -10
- {letta_nightly-0.7.28.dev20250601104146.dist-info → letta_nightly-0.7.29.dev20250602104315.dist-info}/LICENSE +0 -0
- {letta_nightly-0.7.28.dev20250601104146.dist-info → letta_nightly-0.7.29.dev20250602104315.dist-info}/WHEEL +0 -0
- {letta_nightly-0.7.28.dev20250601104146.dist-info → letta_nightly-0.7.29.dev20250602104315.dist-info}/entry_points.txt +0 -0
letta/__init__.py
CHANGED
@@ -11,6 +11,7 @@ from letta.schemas.letta_response import LettaBatchResponse
|
|
11
11
|
from letta.schemas.llm_batch_job import LLMBatchJob
|
12
12
|
from letta.schemas.user import User
|
13
13
|
from letta.server.server import SyncServer
|
14
|
+
from letta.settings import settings
|
14
15
|
|
15
16
|
logger = get_logger(__name__)
|
16
17
|
|
@@ -180,7 +181,9 @@ async def poll_running_llm_batches(server: "SyncServer") -> List[LettaBatchRespo
|
|
180
181
|
|
181
182
|
try:
|
182
183
|
# 1. Retrieve running batch jobs
|
183
|
-
batches = await server.batch_manager.list_running_llm_batches_async(
|
184
|
+
batches = await server.batch_manager.list_running_llm_batches_async(
|
185
|
+
weeks=max(settings.batch_job_polling_lookback_weeks, 1), batch_size=settings.batch_job_polling_batch_size
|
186
|
+
)
|
184
187
|
metrics.total_batches = len(batches)
|
185
188
|
|
186
189
|
# TODO: Expand to more providers
|
@@ -235,4 +238,4 @@ async def poll_running_llm_batches(server: "SyncServer") -> List[LettaBatchRespo
|
|
235
238
|
logger.exception("[Poll BatchJob] Unhandled error in poll_running_llm_batches", exc_info=e)
|
236
239
|
finally:
|
237
240
|
# 7. Log metrics summary
|
238
|
-
metrics.log_summary()
|
241
|
+
metrics.log_summary()
|
letta/jobs/scheduler.py
CHANGED
@@ -4,11 +4,10 @@ from typing import Optional
|
|
4
4
|
|
5
5
|
from apscheduler.schedulers.asyncio import AsyncIOScheduler
|
6
6
|
from apscheduler.triggers.interval import IntervalTrigger
|
7
|
-
from sqlalchemy import text
|
8
7
|
|
9
8
|
from letta.jobs.llm_batch_job_polling import poll_running_llm_batches
|
10
9
|
from letta.log import get_logger
|
11
|
-
from letta.server.db import
|
10
|
+
from letta.server.db import db_context
|
12
11
|
from letta.server.server import SyncServer
|
13
12
|
from letta.settings import settings
|
14
13
|
|
@@ -35,16 +34,18 @@ async def _try_acquire_lock_and_start_scheduler(server: SyncServer) -> bool:
|
|
35
34
|
acquired_lock = False
|
36
35
|
try:
|
37
36
|
# Use a temporary connection context for the attempt initially
|
38
|
-
|
39
|
-
|
37
|
+
with db_context() as session:
|
38
|
+
engine = session.get_bind()
|
39
|
+
# Get raw connection - MUST be kept open if lock is acquired
|
40
|
+
raw_conn = engine.raw_connection()
|
41
|
+
cur = raw_conn.cursor()
|
40
42
|
|
41
|
-
|
42
|
-
|
43
|
-
result = await session.execute(sql, {"lock_key": ADVISORY_LOCK_KEY})
|
44
|
-
acquired_lock = result.scalar_one()
|
43
|
+
cur.execute("SELECT pg_try_advisory_lock(CAST(%s AS bigint))", (ADVISORY_LOCK_KEY,))
|
44
|
+
acquired_lock = cur.fetchone()[0]
|
45
45
|
|
46
46
|
if not acquired_lock:
|
47
|
-
|
47
|
+
cur.close()
|
48
|
+
raw_conn.close()
|
48
49
|
logger.info("Scheduler lock held by another instance.")
|
49
50
|
return False
|
50
51
|
|
@@ -105,14 +106,14 @@ async def _try_acquire_lock_and_start_scheduler(server: SyncServer) -> bool:
|
|
105
106
|
# Clean up temporary resources if lock wasn't acquired or error occurred before storing
|
106
107
|
if cur:
|
107
108
|
try:
|
108
|
-
|
109
|
-
except
|
110
|
-
|
109
|
+
cur.close()
|
110
|
+
except:
|
111
|
+
pass
|
111
112
|
if raw_conn:
|
112
113
|
try:
|
113
|
-
|
114
|
-
except
|
115
|
-
|
114
|
+
raw_conn.close()
|
115
|
+
except:
|
116
|
+
pass
|
116
117
|
|
117
118
|
|
118
119
|
async def _background_lock_retry_loop(server: SyncServer):
|
@@ -160,9 +161,7 @@ async def _release_advisory_lock():
|
|
160
161
|
try:
|
161
162
|
if not lock_conn.closed:
|
162
163
|
if not lock_cur.closed:
|
163
|
-
|
164
|
-
unlock_sql = text("SELECT pg_advisory_unlock(CAST(:lock_key AS bigint))")
|
165
|
-
lock_cur.execute(unlock_sql, {"lock_key": ADVISORY_LOCK_KEY})
|
164
|
+
lock_cur.execute("SELECT pg_advisory_unlock(CAST(%s AS bigint))", (ADVISORY_LOCK_KEY,))
|
166
165
|
lock_cur.fetchone() # Consume result
|
167
166
|
lock_conn.commit()
|
168
167
|
logger.info(f"Executed pg_advisory_unlock for lock {ADVISORY_LOCK_KEY}")
|
@@ -176,12 +175,12 @@ async def _release_advisory_lock():
|
|
176
175
|
# Ensure resources are closed regardless of unlock success
|
177
176
|
try:
|
178
177
|
if lock_cur and not lock_cur.closed:
|
179
|
-
|
178
|
+
lock_cur.close()
|
180
179
|
except Exception as e:
|
181
180
|
logger.error(f"Error closing advisory lock cursor: {e}", exc_info=True)
|
182
181
|
try:
|
183
182
|
if lock_conn and not lock_conn.closed:
|
184
|
-
|
183
|
+
lock_conn.close()
|
185
184
|
logger.info("Closed database connection that held advisory lock.")
|
186
185
|
except Exception as e:
|
187
186
|
logger.error(f"Error closing advisory lock connection: {e}", exc_info=True)
|
@@ -253,4 +252,4 @@ async def shutdown_scheduler_and_release_lock():
|
|
253
252
|
try:
|
254
253
|
scheduler.shutdown(wait=False)
|
255
254
|
except:
|
256
|
-
pass
|
255
|
+
pass
|
@@ -205,14 +205,23 @@ class LLMBatchManager:
|
|
205
205
|
|
206
206
|
@enforce_types
|
207
207
|
@trace_method
|
208
|
-
async def list_running_llm_batches_async(
|
209
|
-
|
208
|
+
async def list_running_llm_batches_async(
|
209
|
+
self, actor: Optional[PydanticUser] = None, weeks: Optional[int] = None, batch_size: Optional[int] = None
|
210
|
+
) -> List[PydanticLLMBatchJob]:
|
211
|
+
"""Return all running LLM batch jobs, optionally filtered by actor's organization and recent weeks."""
|
210
212
|
async with db_registry.async_session() as session:
|
211
213
|
query = select(LLMBatchJob).where(LLMBatchJob.status == JobStatus.running)
|
212
214
|
|
213
215
|
if actor is not None:
|
214
216
|
query = query.where(LLMBatchJob.organization_id == actor.organization_id)
|
215
217
|
|
218
|
+
if weeks is not None:
|
219
|
+
cutoff_datetime = datetime.datetime.utcnow() - datetime.timedelta(weeks=weeks)
|
220
|
+
query = query.where(LLMBatchJob.created_at >= cutoff_datetime)
|
221
|
+
|
222
|
+
if batch_size is not None:
|
223
|
+
query = query.limit(batch_size)
|
224
|
+
|
216
225
|
results = await session.execute(query)
|
217
226
|
return [batch.to_pydantic() for batch in results.scalars().all()]
|
218
227
|
|
letta/settings.py
CHANGED
@@ -228,6 +228,8 @@ class Settings(BaseSettings):
|
|
228
228
|
enable_batch_job_polling: bool = False
|
229
229
|
poll_running_llm_batches_interval_seconds: int = 5 * 60
|
230
230
|
poll_lock_retry_interval_seconds: int = 5 * 60
|
231
|
+
batch_job_polling_lookback_weeks: int = 2
|
232
|
+
batch_job_polling_batch_size: Optional[int] = None
|
231
233
|
|
232
234
|
@property
|
233
235
|
def letta_pg_uri(self) -> str:
|
@@ -1,4 +1,4 @@
|
|
1
|
-
letta/__init__.py,sha256=
|
1
|
+
letta/__init__.py,sha256=UKatW_2XhjOKyRRGdXEXeM6M1DjImKw3757mkNUcEGo,888
|
2
2
|
letta/agent.py,sha256=2r6xovRHeUnmWZ6WJoIP217ryse5Q3Bkco1JXiV599w,87459
|
3
3
|
letta/agents/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
4
4
|
letta/agents/base_agent.py,sha256=mdFEpYBVyFjmt6BzO9YrpJnH99RkBWZ9gnP3Q_bnbBI,5505
|
@@ -65,8 +65,8 @@ letta/interfaces/openai_streaming_interface.py,sha256=vdEaD_yOZ1UqPY75on_d1_nc45
|
|
65
65
|
letta/interfaces/utils.py,sha256=c6jvO0dBYHh8DQnlN-B0qeNC64d3CSunhfqlFA4pJTY,278
|
66
66
|
letta/jobs/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
67
67
|
letta/jobs/helpers.py,sha256=kO4aj954xsQ1RAmkjY6LQQ7JEIGuhaxB1e9pzrYKHAY,914
|
68
|
-
letta/jobs/llm_batch_job_polling.py,sha256=
|
69
|
-
letta/jobs/scheduler.py,sha256=
|
68
|
+
letta/jobs/llm_batch_job_polling.py,sha256=c8bTQo9gID7KllHY-gozLsOqSHrK80C9aWU3LKNULuY,10298
|
69
|
+
letta/jobs/scheduler.py,sha256=ZGCmdJgXvUF8kyUjJQpvTaerG_xf44xgIOSndvdbBak,10239
|
70
70
|
letta/jobs/types.py,sha256=K8GKEnqEgAT6Kq4F2hUrBC4ZAFM9OkfOjVMStzxKuXQ,742
|
71
71
|
letta/llm_api/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
72
72
|
letta/llm_api/anthropic.py,sha256=Lvk2_d3UD32Q-PjWpATOjMp2Qf1SUPmXJUCn28UgB-Y,47281
|
@@ -311,7 +311,7 @@ letta/services/helpers/noop_helper.py,sha256=OZ6wZLsdNEAg9Q2t5oFTOMK6jp-YUMBPdoy
|
|
311
311
|
letta/services/helpers/tool_execution_helper.py,sha256=JdH6VTWFrXfwPWsWNSZFKuRFhhXp8qiDYWjbPc8PLLI,7649
|
312
312
|
letta/services/identity_manager.py,sha256=WZl5wTz_572FelNjnwyDX9QEDguZOsQvc9QfQTasGNI,10331
|
313
313
|
letta/services/job_manager.py,sha256=EsbMPkJwmF-5-qEKKT0c0qaHxBKJmOzB_0Is41gBTFc,22494
|
314
|
-
letta/services/llm_batch_manager.py,sha256=
|
314
|
+
letta/services/llm_batch_manager.py,sha256=tb1VtMQIwiXU7vsCy8vRKWShUwnHICvcajmubEr6IiM,20811
|
315
315
|
letta/services/mcp/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
316
316
|
letta/services/mcp/base_client.py,sha256=YoRb9eKKTGaLxaMVtuH5UcC74iXyWlcyYbC5xOeGr4k,2571
|
317
317
|
letta/services/mcp/sse_client.py,sha256=Vj0AgaadgMnpFQOWkSoPfeOI00ZvURMf3TIU7fv_DN8,1012
|
@@ -339,15 +339,15 @@ letta/services/tool_sandbox/base.py,sha256=pUnPFkEg9I5ktMuT4AOOxbTnTmZTGcTA2phLe
|
|
339
339
|
letta/services/tool_sandbox/e2b_sandbox.py,sha256=x7dkuHmFUTC75N7g0rxAe5CpuWfqWmC0UuxmUE-XRNQ,8214
|
340
340
|
letta/services/tool_sandbox/local_sandbox.py,sha256=4CzQfDHLd7nNAoKhWJyU55ERB3I0OYzS4F3S-BJoZfg,10552
|
341
341
|
letta/services/user_manager.py,sha256=vO8UHk5-TzVv0zMHDTBjj1hxsXjlrG5lxAVctq7tJho,7858
|
342
|
-
letta/settings.py,sha256=
|
342
|
+
letta/settings.py,sha256=4aYgzuMvFwQ1Ic-dijruAHCbEnIpGsko11xN8pSzud0,9150
|
343
343
|
letta/streaming_interface.py,sha256=c-T7zoMTXGXFwDWJJXrv7UypeMPXwPOmNHeuuh0b9zk,16398
|
344
344
|
letta/streaming_utils.py,sha256=jLqFTVhUL76FeOuYk8TaRQHmPTf3HSRc2EoJwxJNK6U,11946
|
345
345
|
letta/system.py,sha256=mKxmvvekuP8mdgsebRINGBoFbUdJhxLJ260crPBNVyk,8386
|
346
346
|
letta/tracing.py,sha256=YMb9KgoBVz7nwCPwnErk2EJEKMiQ_ohctW1nOwhHd1Y,8458
|
347
347
|
letta/types/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
348
348
|
letta/utils.py,sha256=W8J1FfhRADFqoyx3J8-Z1_aWyG433PBoEh_b5wdOZIg,32262
|
349
|
-
letta_nightly-0.7.
|
350
|
-
letta_nightly-0.7.
|
351
|
-
letta_nightly-0.7.
|
352
|
-
letta_nightly-0.7.
|
353
|
-
letta_nightly-0.7.
|
349
|
+
letta_nightly-0.7.29.dev20250602104315.dist-info/LICENSE,sha256=mExtuZ_GYJgDEI38GWdiEYZizZS4KkVt2SF1g_GPNhI,10759
|
350
|
+
letta_nightly-0.7.29.dev20250602104315.dist-info/METADATA,sha256=TWxh1DsU0Go1Gc9GeVGcNBJ4e8QwWJme8hG8TyWPThg,22374
|
351
|
+
letta_nightly-0.7.29.dev20250602104315.dist-info/WHEEL,sha256=FMvqSimYX_P7y0a7UY-_Mc83r5zkBZsCYPm7Lr0Bsq4,88
|
352
|
+
letta_nightly-0.7.29.dev20250602104315.dist-info/entry_points.txt,sha256=2zdiyGNEZGV5oYBuS-y2nAAgjDgcC9yM_mHJBFSRt5U,40
|
353
|
+
letta_nightly-0.7.29.dev20250602104315.dist-info/RECORD,,
|
File without changes
|
File without changes
|