letta-nightly 0.7.15.dev20250514104255__py3-none-any.whl → 0.7.16.dev20250515205957__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- letta/__init__.py +1 -1
- letta/agent.py +12 -0
- letta/agents/helpers.py +48 -5
- letta/agents/letta_agent.py +46 -18
- letta/agents/letta_agent_batch.py +44 -26
- letta/agents/voice_sleeptime_agent.py +6 -4
- letta/client/client.py +16 -1
- letta/constants.py +3 -0
- letta/functions/async_composio_toolset.py +1 -1
- letta/groups/sleeptime_multi_agent.py +1 -0
- letta/interfaces/anthropic_streaming_interface.py +40 -6
- letta/jobs/llm_batch_job_polling.py +6 -2
- letta/orm/agent.py +102 -1
- letta/orm/block.py +3 -0
- letta/orm/sqlalchemy_base.py +365 -133
- letta/schemas/agent.py +10 -2
- letta/schemas/block.py +3 -0
- letta/schemas/memory.py +7 -2
- letta/server/rest_api/routers/v1/agents.py +13 -13
- letta/server/rest_api/routers/v1/messages.py +6 -6
- letta/server/rest_api/routers/v1/tools.py +3 -3
- letta/server/server.py +74 -0
- letta/services/agent_manager.py +421 -7
- letta/services/block_manager.py +12 -8
- letta/services/helpers/agent_manager_helper.py +19 -0
- letta/services/job_manager.py +99 -0
- letta/services/llm_batch_manager.py +28 -27
- letta/services/message_manager.py +51 -19
- letta/services/tool_executor/tool_executor.py +19 -1
- letta/services/tool_manager.py +13 -3
- letta/types/__init__.py +0 -0
- {letta_nightly-0.7.15.dev20250514104255.dist-info → letta_nightly-0.7.16.dev20250515205957.dist-info}/METADATA +3 -3
- {letta_nightly-0.7.15.dev20250514104255.dist-info → letta_nightly-0.7.16.dev20250515205957.dist-info}/RECORD +36 -35
- {letta_nightly-0.7.15.dev20250514104255.dist-info → letta_nightly-0.7.16.dev20250515205957.dist-info}/LICENSE +0 -0
- {letta_nightly-0.7.15.dev20250514104255.dist-info → letta_nightly-0.7.16.dev20250515205957.dist-info}/WHEEL +0 -0
- {letta_nightly-0.7.15.dev20250514104255.dist-info → letta_nightly-0.7.16.dev20250515205957.dist-info}/entry_points.txt +0 -0
letta/services/job_manager.py
CHANGED
@@ -44,6 +44,19 @@ class JobManager:
|
|
44
44
|
job.create(session, actor=actor) # Save job in the database
|
45
45
|
return job.to_pydantic()
|
46
46
|
|
47
|
+
@enforce_types
|
48
|
+
async def create_job_async(
|
49
|
+
self, pydantic_job: Union[PydanticJob, PydanticRun, PydanticBatchJob], actor: PydanticUser
|
50
|
+
) -> Union[PydanticJob, PydanticRun, PydanticBatchJob]:
|
51
|
+
"""Create a new job based on the JobCreate schema."""
|
52
|
+
async with db_registry.async_session() as session:
|
53
|
+
# Associate the job with the user
|
54
|
+
pydantic_job.user_id = actor.id
|
55
|
+
job_data = pydantic_job.model_dump(to_orm=True)
|
56
|
+
job = JobModel(**job_data)
|
57
|
+
await job.create_async(session, actor=actor) # Save job in the database
|
58
|
+
return job.to_pydantic()
|
59
|
+
|
47
60
|
@enforce_types
|
48
61
|
def update_job_by_id(self, job_id: str, job_update: JobUpdate, actor: PydanticUser) -> PydanticJob:
|
49
62
|
"""Update a job by its ID with the given JobUpdate object."""
|
@@ -68,6 +81,30 @@ class JobManager:
|
|
68
81
|
|
69
82
|
return job.to_pydantic()
|
70
83
|
|
84
|
+
@enforce_types
|
85
|
+
async def update_job_by_id_async(self, job_id: str, job_update: JobUpdate, actor: PydanticUser) -> PydanticJob:
|
86
|
+
"""Update a job by its ID with the given JobUpdate object asynchronously."""
|
87
|
+
async with db_registry.async_session() as session:
|
88
|
+
# Fetch the job by ID
|
89
|
+
job = await self._verify_job_access_async(session=session, job_id=job_id, actor=actor, access=["write"])
|
90
|
+
|
91
|
+
# Update job attributes with only the fields that were explicitly set
|
92
|
+
update_data = job_update.model_dump(to_orm=True, exclude_unset=True, exclude_none=True)
|
93
|
+
|
94
|
+
# Automatically update the completion timestamp if status is set to 'completed'
|
95
|
+
for key, value in update_data.items():
|
96
|
+
setattr(job, key, value)
|
97
|
+
|
98
|
+
if update_data.get("status") == JobStatus.completed and not job.completed_at:
|
99
|
+
job.completed_at = get_utc_time()
|
100
|
+
if job.callback_url:
|
101
|
+
await self._dispatch_callback_async(session, job)
|
102
|
+
|
103
|
+
# Save the updated job to the database
|
104
|
+
await job.update_async(db_session=session, actor=actor)
|
105
|
+
|
106
|
+
return job.to_pydantic()
|
107
|
+
|
71
108
|
@enforce_types
|
72
109
|
def get_job_by_id(self, job_id: str, actor: PydanticUser) -> PydanticJob:
|
73
110
|
"""Fetch a job by its ID."""
|
@@ -76,6 +113,14 @@ class JobManager:
|
|
76
113
|
job = JobModel.read(db_session=session, identifier=job_id, actor=actor, access_type=AccessType.USER)
|
77
114
|
return job.to_pydantic()
|
78
115
|
|
116
|
+
@enforce_types
|
117
|
+
async def get_job_by_id_async(self, job_id: str, actor: PydanticUser) -> PydanticJob:
|
118
|
+
"""Fetch a job by its ID asynchronously."""
|
119
|
+
async with db_registry.async_session() as session:
|
120
|
+
# Retrieve job by ID using the Job model's read method
|
121
|
+
job = await JobModel.read_async(db_session=session, identifier=job_id, actor=actor, access_type=AccessType.USER)
|
122
|
+
return job.to_pydantic()
|
123
|
+
|
79
124
|
@enforce_types
|
80
125
|
def list_jobs(
|
81
126
|
self,
|
@@ -438,6 +483,35 @@ class JobManager:
|
|
438
483
|
raise NoResultFound(f"Job with id {job_id} does not exist or user does not have access")
|
439
484
|
return job
|
440
485
|
|
486
|
+
async def _verify_job_access_async(
|
487
|
+
self,
|
488
|
+
session: Session,
|
489
|
+
job_id: str,
|
490
|
+
actor: PydanticUser,
|
491
|
+
access: List[Literal["read", "write", "delete"]] = ["read"],
|
492
|
+
) -> JobModel:
|
493
|
+
"""
|
494
|
+
Verify that a job exists and the user has the required access.
|
495
|
+
|
496
|
+
Args:
|
497
|
+
session: The database session
|
498
|
+
job_id: The ID of the job to verify
|
499
|
+
actor: The user making the request
|
500
|
+
|
501
|
+
Returns:
|
502
|
+
The job if it exists and the user has access
|
503
|
+
|
504
|
+
Raises:
|
505
|
+
NoResultFound: If the job does not exist or user does not have access
|
506
|
+
"""
|
507
|
+
job_query = select(JobModel).where(JobModel.id == job_id)
|
508
|
+
job_query = JobModel.apply_access_predicate(job_query, actor, access, AccessType.USER)
|
509
|
+
result = await session.execute(job_query)
|
510
|
+
job = result.scalar_one_or_none()
|
511
|
+
if not job:
|
512
|
+
raise NoResultFound(f"Job with id {job_id} does not exist or user does not have access")
|
513
|
+
return job
|
514
|
+
|
441
515
|
def _get_run_request_config(self, run_id: str) -> LettaRequestConfig:
|
442
516
|
"""
|
443
517
|
Get the request config for a job.
|
@@ -476,3 +550,28 @@ class JobManager:
|
|
476
550
|
|
477
551
|
session.add(job)
|
478
552
|
session.commit()
|
553
|
+
|
554
|
+
async def _dispatch_callback_async(self, session, job: JobModel) -> None:
|
555
|
+
"""
|
556
|
+
POST a standard JSON payload to job.callback_url
|
557
|
+
and record timestamp + HTTP status asynchronously.
|
558
|
+
"""
|
559
|
+
|
560
|
+
payload = {
|
561
|
+
"job_id": job.id,
|
562
|
+
"status": job.status,
|
563
|
+
"completed_at": job.completed_at.isoformat(),
|
564
|
+
}
|
565
|
+
try:
|
566
|
+
import httpx
|
567
|
+
|
568
|
+
async with httpx.AsyncClient() as client:
|
569
|
+
resp = await client.post(job.callback_url, json=payload, timeout=5.0)
|
570
|
+
job.callback_sent_at = get_utc_time()
|
571
|
+
job.callback_status_code = resp.status_code
|
572
|
+
|
573
|
+
except Exception:
|
574
|
+
return
|
575
|
+
|
576
|
+
session.add(job)
|
577
|
+
await session.commit()
|
@@ -2,7 +2,7 @@ import datetime
|
|
2
2
|
from typing import Any, Dict, List, Optional, Tuple
|
3
3
|
|
4
4
|
from anthropic.types.beta.messages import BetaMessageBatch, BetaMessageBatchIndividualResponse
|
5
|
-
from sqlalchemy import desc, func, tuple_
|
5
|
+
from sqlalchemy import desc, func, select, tuple_
|
6
6
|
|
7
7
|
from letta.jobs.types import BatchPollingResult, ItemUpdateInfo, RequestStatusUpdateInfo, StepStatusUpdateInfo
|
8
8
|
from letta.log import get_logger
|
@@ -26,7 +26,7 @@ class LLMBatchManager:
|
|
26
26
|
"""Manager for handling both LLMBatchJob and LLMBatchItem operations."""
|
27
27
|
|
28
28
|
@enforce_types
|
29
|
-
def
|
29
|
+
async def create_llm_batch_job_async(
|
30
30
|
self,
|
31
31
|
llm_provider: ProviderType,
|
32
32
|
create_batch_response: BetaMessageBatch,
|
@@ -35,7 +35,7 @@ class LLMBatchManager:
|
|
35
35
|
status: JobStatus = JobStatus.created,
|
36
36
|
) -> PydanticLLMBatchJob:
|
37
37
|
"""Create a new LLM batch job."""
|
38
|
-
with db_registry.
|
38
|
+
async with db_registry.async_session() as session:
|
39
39
|
batch = LLMBatchJob(
|
40
40
|
status=status,
|
41
41
|
llm_provider=llm_provider,
|
@@ -43,14 +43,14 @@ class LLMBatchManager:
|
|
43
43
|
organization_id=actor.organization_id,
|
44
44
|
letta_batch_job_id=letta_batch_job_id,
|
45
45
|
)
|
46
|
-
batch.
|
46
|
+
await batch.create_async(session, actor=actor)
|
47
47
|
return batch.to_pydantic()
|
48
48
|
|
49
49
|
@enforce_types
|
50
|
-
def
|
50
|
+
async def get_llm_batch_job_by_id_async(self, llm_batch_id: str, actor: Optional[PydanticUser] = None) -> PydanticLLMBatchJob:
|
51
51
|
"""Retrieve a single batch job by ID."""
|
52
|
-
with db_registry.
|
53
|
-
batch = LLMBatchJob.
|
52
|
+
async with db_registry.async_session() as session:
|
53
|
+
batch = await LLMBatchJob.read_async(db_session=session, identifier=llm_batch_id, actor=actor)
|
54
54
|
return batch.to_pydantic()
|
55
55
|
|
56
56
|
@enforce_types
|
@@ -197,16 +197,16 @@ class LLMBatchManager:
|
|
197
197
|
return [message.to_pydantic() for message in results]
|
198
198
|
|
199
199
|
@enforce_types
|
200
|
-
def
|
200
|
+
async def list_running_llm_batches_async(self, actor: Optional[PydanticUser] = None) -> List[PydanticLLMBatchJob]:
|
201
201
|
"""Return all running LLM batch jobs, optionally filtered by actor's organization."""
|
202
|
-
with db_registry.
|
203
|
-
query =
|
202
|
+
async with db_registry.async_session() as session:
|
203
|
+
query = select(LLMBatchJob).where(LLMBatchJob.status == JobStatus.running)
|
204
204
|
|
205
205
|
if actor is not None:
|
206
|
-
query = query.
|
206
|
+
query = query.where(LLMBatchJob.organization_id == actor.organization_id)
|
207
207
|
|
208
|
-
results =
|
209
|
-
return [batch.to_pydantic() for batch in results]
|
208
|
+
results = await session.execute(query)
|
209
|
+
return [batch.to_pydantic() for batch in results.scalars().all()]
|
210
210
|
|
211
211
|
@enforce_types
|
212
212
|
def create_llm_batch_item(
|
@@ -234,7 +234,9 @@ class LLMBatchManager:
|
|
234
234
|
return item.to_pydantic()
|
235
235
|
|
236
236
|
@enforce_types
|
237
|
-
def
|
237
|
+
async def create_llm_batch_items_bulk_async(
|
238
|
+
self, llm_batch_items: List[PydanticLLMBatchItem], actor: PydanticUser
|
239
|
+
) -> List[PydanticLLMBatchItem]:
|
238
240
|
"""
|
239
241
|
Create multiple batch items in bulk for better performance.
|
240
242
|
|
@@ -245,7 +247,7 @@ class LLMBatchManager:
|
|
245
247
|
Returns:
|
246
248
|
List of created batch items as Pydantic models
|
247
249
|
"""
|
248
|
-
with db_registry.
|
250
|
+
async with db_registry.async_session() as session:
|
249
251
|
# Convert Pydantic models to ORM objects
|
250
252
|
orm_items = []
|
251
253
|
for item in llm_batch_items:
|
@@ -261,8 +263,7 @@ class LLMBatchManager:
|
|
261
263
|
)
|
262
264
|
orm_items.append(orm_item)
|
263
265
|
|
264
|
-
|
265
|
-
created_items = LLMBatchItem.batch_create(orm_items, session, actor=actor)
|
266
|
+
created_items = await LLMBatchItem.batch_create_async(orm_items, session, actor=actor)
|
266
267
|
|
267
268
|
# Convert back to Pydantic models
|
268
269
|
return [item.to_pydantic() for item in created_items]
|
@@ -300,7 +301,7 @@ class LLMBatchManager:
|
|
300
301
|
return item.update(db_session=session, actor=actor).to_pydantic()
|
301
302
|
|
302
303
|
@enforce_types
|
303
|
-
def
|
304
|
+
async def list_llm_batch_items_async(
|
304
305
|
self,
|
305
306
|
llm_batch_id: str,
|
306
307
|
limit: Optional[int] = None,
|
@@ -321,29 +322,29 @@ class LLMBatchManager:
|
|
321
322
|
|
322
323
|
The results are ordered by their id in ascending order.
|
323
324
|
"""
|
324
|
-
with db_registry.
|
325
|
-
query =
|
325
|
+
async with db_registry.async_session() as session:
|
326
|
+
query = select(LLMBatchItem).where(LLMBatchItem.llm_batch_id == llm_batch_id)
|
326
327
|
|
327
328
|
if actor is not None:
|
328
|
-
query = query.
|
329
|
+
query = query.where(LLMBatchItem.organization_id == actor.organization_id)
|
329
330
|
|
330
331
|
# Additional optional filters
|
331
332
|
if agent_id is not None:
|
332
|
-
query = query.
|
333
|
+
query = query.where(LLMBatchItem.agent_id == agent_id)
|
333
334
|
if request_status is not None:
|
334
|
-
query = query.
|
335
|
+
query = query.where(LLMBatchItem.request_status == request_status)
|
335
336
|
if step_status is not None:
|
336
|
-
query = query.
|
337
|
+
query = query.where(LLMBatchItem.step_status == step_status)
|
337
338
|
if after is not None:
|
338
|
-
query = query.
|
339
|
+
query = query.where(LLMBatchItem.id > after)
|
339
340
|
|
340
341
|
query = query.order_by(LLMBatchItem.id.asc())
|
341
342
|
|
342
343
|
if limit is not None:
|
343
344
|
query = query.limit(limit)
|
344
345
|
|
345
|
-
results =
|
346
|
-
return [item.to_pydantic() for item in results]
|
346
|
+
results = await session.execute(query)
|
347
|
+
return [item.to_pydantic() for item in results.scalars()]
|
347
348
|
|
348
349
|
def bulk_update_llm_batch_items(
|
349
350
|
self, llm_batch_id_agent_id_pairs: List[Tuple[str, str]], field_updates: List[Dict[str, Any]], strict: bool = True
|
@@ -36,15 +36,29 @@ class MessageManager:
|
|
36
36
|
"""Fetch messages by ID and return them in the requested order."""
|
37
37
|
with db_registry.session() as session:
|
38
38
|
results = MessageModel.list(db_session=session, id=message_ids, organization_id=actor.organization_id, limit=len(message_ids))
|
39
|
+
return self._get_messages_by_id_postprocess(results, message_ids)
|
39
40
|
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
41
|
+
@enforce_types
|
42
|
+
async def get_messages_by_ids_async(self, message_ids: List[str], actor: PydanticUser) -> List[PydanticMessage]:
|
43
|
+
"""Fetch messages by ID and return them in the requested order. Async version of above function."""
|
44
|
+
async with db_registry.async_session() as session:
|
45
|
+
results = await MessageModel.list_async(
|
46
|
+
db_session=session, id=message_ids, organization_id=actor.organization_id, limit=len(message_ids)
|
47
|
+
)
|
48
|
+
return self._get_messages_by_id_postprocess(results, message_ids)
|
44
49
|
|
45
|
-
|
46
|
-
|
47
|
-
|
50
|
+
def _get_messages_by_id_postprocess(
|
51
|
+
self,
|
52
|
+
results: List[MessageModel],
|
53
|
+
message_ids: List[str],
|
54
|
+
) -> List[PydanticMessage]:
|
55
|
+
if len(results) != len(message_ids):
|
56
|
+
logger.warning(
|
57
|
+
f"Expected {len(message_ids)} messages, but found {len(results)}. Missing ids={set(message_ids) - set([r.id for r in results])}"
|
58
|
+
)
|
59
|
+
# Sort results directly based on message_ids
|
60
|
+
result_dict = {msg.id: msg.to_pydantic() for msg in results}
|
61
|
+
return list(filter(lambda x: x is not None, [result_dict.get(msg_id, None) for msg_id in message_ids]))
|
48
62
|
|
49
63
|
@enforce_types
|
50
64
|
def create_message(self, pydantic_msg: PydanticMessage, actor: PydanticUser) -> PydanticMessage:
|
@@ -57,11 +71,20 @@ class MessageManager:
|
|
57
71
|
msg.create(session, actor=actor) # Persist to database
|
58
72
|
return msg.to_pydantic()
|
59
73
|
|
74
|
+
def _create_many_preprocess(self, pydantic_msgs: List[PydanticMessage], actor: PydanticUser) -> List[MessageModel]:
|
75
|
+
# Create ORM model instances for all messages
|
76
|
+
orm_messages = []
|
77
|
+
for pydantic_msg in pydantic_msgs:
|
78
|
+
# Set the organization id of the Pydantic message
|
79
|
+
pydantic_msg.organization_id = actor.organization_id
|
80
|
+
msg_data = pydantic_msg.model_dump(to_orm=True)
|
81
|
+
orm_messages.append(MessageModel(**msg_data))
|
82
|
+
return orm_messages
|
83
|
+
|
60
84
|
@enforce_types
|
61
85
|
def create_many_messages(self, pydantic_msgs: List[PydanticMessage], actor: PydanticUser) -> List[PydanticMessage]:
|
62
86
|
"""
|
63
87
|
Create multiple messages in a single database transaction.
|
64
|
-
|
65
88
|
Args:
|
66
89
|
pydantic_msgs: List of Pydantic message models to create
|
67
90
|
actor: User performing the action
|
@@ -69,23 +92,32 @@ class MessageManager:
|
|
69
92
|
Returns:
|
70
93
|
List of created Pydantic message models
|
71
94
|
"""
|
72
|
-
|
73
95
|
if not pydantic_msgs:
|
74
96
|
return []
|
75
97
|
|
76
|
-
|
77
|
-
orm_messages = []
|
78
|
-
for pydantic_msg in pydantic_msgs:
|
79
|
-
# Set the organization id of the Pydantic message
|
80
|
-
pydantic_msg.organization_id = actor.organization_id
|
81
|
-
msg_data = pydantic_msg.model_dump(to_orm=True)
|
82
|
-
orm_messages.append(MessageModel(**msg_data))
|
83
|
-
|
84
|
-
# Use the batch_create method for efficient creation
|
98
|
+
orm_messages = self._create_many_preprocess(pydantic_msgs, actor)
|
85
99
|
with db_registry.session() as session:
|
86
100
|
created_messages = MessageModel.batch_create(orm_messages, session, actor=actor)
|
101
|
+
return [msg.to_pydantic() for msg in created_messages]
|
102
|
+
|
103
|
+
@enforce_types
|
104
|
+
async def create_many_messages_async(self, pydantic_msgs: List[PydanticMessage], actor: PydanticUser) -> List[PydanticMessage]:
|
105
|
+
"""
|
106
|
+
Create multiple messages in a single database transaction asynchronously.
|
107
|
+
|
108
|
+
Args:
|
109
|
+
pydantic_msgs: List of Pydantic message models to create
|
110
|
+
actor: User performing the action
|
111
|
+
|
112
|
+
Returns:
|
113
|
+
List of created Pydantic message models
|
114
|
+
"""
|
115
|
+
if not pydantic_msgs:
|
116
|
+
return []
|
87
117
|
|
88
|
-
|
118
|
+
orm_messages = self._create_many_preprocess(pydantic_msgs, actor)
|
119
|
+
async with db_registry.async_session() as session:
|
120
|
+
created_messages = await MessageModel.batch_create_async(orm_messages, session, actor=actor)
|
89
121
|
return [msg.to_pydantic() for msg in created_messages]
|
90
122
|
|
91
123
|
@enforce_types
|
@@ -3,7 +3,12 @@ import traceback
|
|
3
3
|
from abc import ABC, abstractmethod
|
4
4
|
from typing import Any, Dict, Optional
|
5
5
|
|
6
|
-
from letta.constants import
|
6
|
+
from letta.constants import (
|
7
|
+
COMPOSIO_ENTITY_ENV_VAR_KEY,
|
8
|
+
CORE_MEMORY_LINE_NUMBER_WARNING,
|
9
|
+
READ_ONLY_BLOCK_EDIT_ERROR,
|
10
|
+
RETRIEVAL_QUERY_DEFAULT_PAGE_SIZE,
|
11
|
+
)
|
7
12
|
from letta.functions.ast_parsers import coerce_dict_args_by_annotations, get_function_annotations_from_source
|
8
13
|
from letta.functions.composio_helpers import execute_composio_action_async, generate_composio_action_from_func_name
|
9
14
|
from letta.helpers.composio_helpers import get_composio_api_key
|
@@ -203,6 +208,8 @@ class LettaCoreToolExecutor(ToolExecutor):
|
|
203
208
|
Returns:
|
204
209
|
Optional[str]: None is always returned as this function does not produce a response.
|
205
210
|
"""
|
211
|
+
if agent_state.memory.get_block(label).read_only:
|
212
|
+
raise ValueError(f"{READ_ONLY_BLOCK_EDIT_ERROR}")
|
206
213
|
current_value = str(agent_state.memory.get_block(label).value)
|
207
214
|
new_value = current_value + "\n" + str(content)
|
208
215
|
agent_state.memory.update_block_value(label=label, value=new_value)
|
@@ -228,6 +235,8 @@ class LettaCoreToolExecutor(ToolExecutor):
|
|
228
235
|
Returns:
|
229
236
|
Optional[str]: None is always returned as this function does not produce a response.
|
230
237
|
"""
|
238
|
+
if agent_state.memory.get_block(label).read_only:
|
239
|
+
raise ValueError(f"{READ_ONLY_BLOCK_EDIT_ERROR}")
|
231
240
|
current_value = str(agent_state.memory.get_block(label).value)
|
232
241
|
if old_content not in current_value:
|
233
242
|
raise ValueError(f"Old content '{old_content}' not found in memory block '{label}'")
|
@@ -260,6 +269,9 @@ class LettaCoreToolExecutor(ToolExecutor):
|
|
260
269
|
"""
|
261
270
|
import re
|
262
271
|
|
272
|
+
if agent_state.memory.get_block(label).read_only:
|
273
|
+
raise ValueError(f"{READ_ONLY_BLOCK_EDIT_ERROR}")
|
274
|
+
|
263
275
|
if bool(re.search(r"\nLine \d+: ", old_str)):
|
264
276
|
raise ValueError(
|
265
277
|
"old_str contains a line number prefix, which is not allowed. "
|
@@ -349,6 +361,9 @@ class LettaCoreToolExecutor(ToolExecutor):
|
|
349
361
|
"""
|
350
362
|
import re
|
351
363
|
|
364
|
+
if agent_state.memory.get_block(label).read_only:
|
365
|
+
raise ValueError(f"{READ_ONLY_BLOCK_EDIT_ERROR}")
|
366
|
+
|
352
367
|
if bool(re.search(r"\nLine \d+: ", new_str)):
|
353
368
|
raise ValueError(
|
354
369
|
"new_str contains a line number prefix, which is not allowed. Do not "
|
@@ -426,6 +441,9 @@ class LettaCoreToolExecutor(ToolExecutor):
|
|
426
441
|
"""
|
427
442
|
import re
|
428
443
|
|
444
|
+
if agent_state.memory.get_block(label).read_only:
|
445
|
+
raise ValueError(f"{READ_ONLY_BLOCK_EDIT_ERROR}")
|
446
|
+
|
429
447
|
if bool(re.search(r"\nLine \d+: ", new_memory)):
|
430
448
|
raise ValueError(
|
431
449
|
"new_memory contains a line number prefix, which is not allowed. Do not "
|
letta/services/tool_manager.py
CHANGED
@@ -115,6 +115,16 @@ class ToolManager:
|
|
115
115
|
except NoResultFound:
|
116
116
|
return None
|
117
117
|
|
118
|
+
@enforce_types
|
119
|
+
async def get_tool_by_name_async(self, tool_name: str, actor: PydanticUser) -> Optional[PydanticTool]:
|
120
|
+
"""Retrieve a tool by its name and a user. We derive the organization from the user, and retrieve that tool."""
|
121
|
+
try:
|
122
|
+
async with db_registry.async_session() as session:
|
123
|
+
tool = await ToolModel.read_async(db_session=session, name=tool_name, actor=actor)
|
124
|
+
return tool.to_pydantic()
|
125
|
+
except NoResultFound:
|
126
|
+
return None
|
127
|
+
|
118
128
|
@enforce_types
|
119
129
|
def get_tool_id_by_name(self, tool_name: str, actor: PydanticUser) -> Optional[str]:
|
120
130
|
"""Retrieve a tool by its name and a user. We derive the organization from the user, and retrieve that tool."""
|
@@ -126,10 +136,10 @@ class ToolManager:
|
|
126
136
|
return None
|
127
137
|
|
128
138
|
@enforce_types
|
129
|
-
def
|
139
|
+
async def list_tools_async(self, actor: PydanticUser, after: Optional[str] = None, limit: Optional[int] = 50) -> List[PydanticTool]:
|
130
140
|
"""List all tools with optional pagination."""
|
131
|
-
with db_registry.
|
132
|
-
tools = ToolModel.
|
141
|
+
async with db_registry.async_session() as session:
|
142
|
+
tools = await ToolModel.list_async(
|
133
143
|
db_session=session,
|
134
144
|
after=after,
|
135
145
|
limit=limit,
|
letta/types/__init__.py
ADDED
File without changes
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: letta-nightly
|
3
|
-
Version: 0.7.
|
3
|
+
Version: 0.7.16.dev20250515205957
|
4
4
|
Summary: Create LLM agents with long-term memory and custom tools
|
5
5
|
License: Apache License
|
6
6
|
Author: Letta Team
|
@@ -51,7 +51,7 @@ Requires-Dist: isort (>=5.13.2,<6.0.0) ; extra == "dev" or extra == "all"
|
|
51
51
|
Requires-Dist: jinja2 (>=3.1.5,<4.0.0)
|
52
52
|
Requires-Dist: langchain (>=0.3.7,<0.4.0) ; extra == "external-tools" or extra == "desktop" or extra == "all"
|
53
53
|
Requires-Dist: langchain-community (>=0.3.7,<0.4.0) ; extra == "external-tools" or extra == "desktop" or extra == "all"
|
54
|
-
Requires-Dist: letta_client (>=0.1.
|
54
|
+
Requires-Dist: letta_client (>=0.1.143,<0.2.0)
|
55
55
|
Requires-Dist: llama-index (>=0.12.2,<0.13.0)
|
56
56
|
Requires-Dist: llama-index-embeddings-openai (>=0.3.1,<0.4.0)
|
57
57
|
Requires-Dist: locust (>=2.31.5,<3.0.0) ; extra == "dev" or extra == "desktop" or extra == "all"
|
@@ -77,7 +77,7 @@ Requires-Dist: pydantic (>=2.10.6,<3.0.0)
|
|
77
77
|
Requires-Dist: pydantic-settings (>=2.2.1,<3.0.0)
|
78
78
|
Requires-Dist: pyhumps (>=3.8.0,<4.0.0)
|
79
79
|
Requires-Dist: pyright (>=1.1.347,<2.0.0) ; extra == "dev" or extra == "desktop" or extra == "all"
|
80
|
-
Requires-Dist: pytest-asyncio (>=0.
|
80
|
+
Requires-Dist: pytest-asyncio (>=0.24.0,<0.25.0) ; extra == "dev" or extra == "all"
|
81
81
|
Requires-Dist: pytest-order (>=1.2.0,<2.0.0) ; extra == "dev" or extra == "all"
|
82
82
|
Requires-Dist: python-box (>=7.1.1,<8.0.0)
|
83
83
|
Requires-Dist: python-multipart (>=0.0.19,<0.0.20)
|