letta-nightly 0.6.13.dev20250122185528__py3-none-any.whl → 0.6.14.dev20250123041709__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of letta-nightly might be problematic. Click here for more details.
- letta/__init__.py +2 -2
- letta/agent.py +69 -100
- letta/chat_only_agent.py +1 -1
- letta/client/client.py +153 -137
- letta/constants.py +1 -8
- letta/data_sources/connectors.py +1 -1
- letta/functions/helpers.py +29 -4
- letta/functions/schema_generator.py +55 -0
- letta/llm_api/helpers.py +51 -1
- letta/memory.py +9 -7
- letta/orm/agent.py +2 -2
- letta/orm/block.py +3 -1
- letta/orm/custom_columns.py +5 -4
- letta/orm/enums.py +1 -0
- letta/orm/message.py +2 -2
- letta/orm/sqlalchemy_base.py +5 -0
- letta/schemas/agent.py +3 -3
- letta/schemas/block.py +2 -2
- letta/schemas/environment_variables.py +1 -1
- letta/schemas/job.py +1 -1
- letta/schemas/letta_base.py +6 -0
- letta/schemas/letta_message.py +6 -6
- letta/schemas/memory.py +3 -2
- letta/schemas/message.py +21 -13
- letta/schemas/passage.py +1 -1
- letta/schemas/source.py +4 -4
- letta/schemas/tool.py +38 -43
- letta/server/rest_api/app.py +1 -16
- letta/server/rest_api/routers/v1/agents.py +101 -84
- letta/server/rest_api/routers/v1/blocks.py +8 -46
- letta/server/rest_api/routers/v1/jobs.py +4 -4
- letta/server/rest_api/routers/v1/providers.py +2 -2
- letta/server/rest_api/routers/v1/runs.py +6 -6
- letta/server/rest_api/routers/v1/sources.py +8 -38
- letta/server/rest_api/routers/v1/tags.py +1 -1
- letta/server/rest_api/routers/v1/tools.py +6 -7
- letta/server/server.py +3 -3
- letta/services/agent_manager.py +43 -9
- letta/services/block_manager.py +3 -3
- letta/services/job_manager.py +5 -3
- letta/services/organization_manager.py +1 -1
- letta/services/passage_manager.py +3 -3
- letta/services/provider_manager.py +2 -2
- letta/services/sandbox_config_manager.py +2 -2
- letta/services/source_manager.py +3 -3
- letta/services/tool_execution_sandbox.py +3 -1
- letta/services/tool_manager.py +8 -3
- letta/services/user_manager.py +2 -2
- letta/settings.py +29 -0
- letta/system.py +2 -2
- {letta_nightly-0.6.13.dev20250122185528.dist-info → letta_nightly-0.6.14.dev20250123041709.dist-info}/METADATA +1 -1
- {letta_nightly-0.6.13.dev20250122185528.dist-info → letta_nightly-0.6.14.dev20250123041709.dist-info}/RECORD +55 -61
- letta/server/rest_api/routers/openai/__init__.py +0 -0
- letta/server/rest_api/routers/openai/assistants/__init__.py +0 -0
- letta/server/rest_api/routers/openai/assistants/assistants.py +0 -115
- letta/server/rest_api/routers/openai/assistants/schemas.py +0 -115
- letta/server/rest_api/routers/openai/chat_completions/__init__.py +0 -0
- letta/server/rest_api/routers/openai/chat_completions/chat_completions.py +0 -120
- {letta_nightly-0.6.13.dev20250122185528.dist-info → letta_nightly-0.6.14.dev20250123041709.dist-info}/LICENSE +0 -0
- {letta_nightly-0.6.13.dev20250122185528.dist-info → letta_nightly-0.6.14.dev20250123041709.dist-info}/WHEEL +0 -0
- {letta_nightly-0.6.13.dev20250122185528.dist-info → letta_nightly-0.6.14.dev20250123041709.dist-info}/entry_points.txt +0 -0
|
@@ -45,8 +45,8 @@ def create_provider(
|
|
|
45
45
|
return provider
|
|
46
46
|
|
|
47
47
|
|
|
48
|
-
@router.patch("/", tags=["providers"], response_model=Provider, operation_id="
|
|
49
|
-
def
|
|
48
|
+
@router.patch("/", tags=["providers"], response_model=Provider, operation_id="modify_provider")
|
|
49
|
+
def modify_provider(
|
|
50
50
|
request: ProviderUpdate = Body(...),
|
|
51
51
|
server: "SyncServer" = Depends(get_letta_server),
|
|
52
52
|
):
|
|
@@ -43,8 +43,8 @@ def list_active_runs(
|
|
|
43
43
|
return [Run.from_job(job) for job in active_runs]
|
|
44
44
|
|
|
45
45
|
|
|
46
|
-
@router.get("/{run_id}", response_model=Run, operation_id="
|
|
47
|
-
def
|
|
46
|
+
@router.get("/{run_id}", response_model=Run, operation_id="retrieve_run")
|
|
47
|
+
def retrieve_run(
|
|
48
48
|
run_id: str,
|
|
49
49
|
user_id: Optional[str] = Header(None, alias="user_id"),
|
|
50
50
|
server: "SyncServer" = Depends(get_letta_server),
|
|
@@ -69,9 +69,9 @@ RunMessagesResponse = Annotated[
|
|
|
69
69
|
@router.get(
|
|
70
70
|
"/{run_id}/messages",
|
|
71
71
|
response_model=RunMessagesResponse,
|
|
72
|
-
operation_id="
|
|
72
|
+
operation_id="list_run_messages",
|
|
73
73
|
)
|
|
74
|
-
async def
|
|
74
|
+
async def list_run_messages(
|
|
75
75
|
run_id: str,
|
|
76
76
|
server: "SyncServer" = Depends(get_letta_server),
|
|
77
77
|
user_id: Optional[str] = Header(None, alias="user_id"),
|
|
@@ -111,8 +111,8 @@ async def get_run_messages(
|
|
|
111
111
|
raise HTTPException(status_code=404, detail=str(e))
|
|
112
112
|
|
|
113
113
|
|
|
114
|
-
@router.get("/{run_id}/usage", response_model=UsageStatistics, operation_id="
|
|
115
|
-
def
|
|
114
|
+
@router.get("/{run_id}/usage", response_model=UsageStatistics, operation_id="retrieve_run_usage")
|
|
115
|
+
def retrieve_run_usage(
|
|
116
116
|
run_id: str,
|
|
117
117
|
user_id: Optional[str] = Header(None, alias="user_id"),
|
|
118
118
|
server: "SyncServer" = Depends(get_letta_server),
|
|
@@ -19,8 +19,8 @@ from letta.utils import sanitize_filename
|
|
|
19
19
|
router = APIRouter(prefix="/sources", tags=["sources"])
|
|
20
20
|
|
|
21
21
|
|
|
22
|
-
@router.get("/{source_id}", response_model=Source, operation_id="
|
|
23
|
-
def
|
|
22
|
+
@router.get("/{source_id}", response_model=Source, operation_id="retrieve_source")
|
|
23
|
+
def retrieve_source(
|
|
24
24
|
source_id: str,
|
|
25
25
|
server: "SyncServer" = Depends(get_letta_server),
|
|
26
26
|
user_id: Optional[str] = Header(None, alias="user_id"), # Extract user_id from header, default to None if not present
|
|
@@ -81,8 +81,8 @@ def create_source(
|
|
|
81
81
|
return server.source_manager.create_source(source=source, actor=actor)
|
|
82
82
|
|
|
83
83
|
|
|
84
|
-
@router.patch("/{source_id}", response_model=Source, operation_id="
|
|
85
|
-
def
|
|
84
|
+
@router.patch("/{source_id}", response_model=Source, operation_id="modify_source")
|
|
85
|
+
def modify_source(
|
|
86
86
|
source_id: str,
|
|
87
87
|
source: SourceUpdate,
|
|
88
88
|
server: "SyncServer" = Depends(get_letta_server),
|
|
@@ -111,36 +111,6 @@ def delete_source(
|
|
|
111
111
|
server.delete_source(source_id=source_id, actor=actor)
|
|
112
112
|
|
|
113
113
|
|
|
114
|
-
@router.post("/{source_id}/attach", response_model=Source, operation_id="attach_agent_to_source")
|
|
115
|
-
def attach_source_to_agent(
|
|
116
|
-
source_id: str,
|
|
117
|
-
agent_id: str = Query(..., description="The unique identifier of the agent to attach the source to."),
|
|
118
|
-
server: "SyncServer" = Depends(get_letta_server),
|
|
119
|
-
user_id: Optional[str] = Header(None, alias="user_id"), # Extract user_id from header, default to None if not present
|
|
120
|
-
):
|
|
121
|
-
"""
|
|
122
|
-
Attach a data source to an existing agent.
|
|
123
|
-
"""
|
|
124
|
-
actor = server.user_manager.get_user_or_default(user_id=user_id)
|
|
125
|
-
server.agent_manager.attach_source(source_id=source_id, agent_id=agent_id, actor=actor)
|
|
126
|
-
return server.source_manager.get_source_by_id(source_id=source_id, actor=actor)
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
@router.post("/{source_id}/detach", response_model=Source, operation_id="detach_agent_from_source")
|
|
130
|
-
def detach_source_from_agent(
|
|
131
|
-
source_id: str,
|
|
132
|
-
agent_id: str = Query(..., description="The unique identifier of the agent to detach the source from."),
|
|
133
|
-
server: "SyncServer" = Depends(get_letta_server),
|
|
134
|
-
user_id: Optional[str] = Header(None, alias="user_id"), # Extract user_id from header, default to None if not present
|
|
135
|
-
) -> None:
|
|
136
|
-
"""
|
|
137
|
-
Detach a data source from an existing agent.
|
|
138
|
-
"""
|
|
139
|
-
actor = server.user_manager.get_user_or_default(user_id=user_id)
|
|
140
|
-
server.agent_manager.detach_source(agent_id=agent_id, source_id=source_id, actor=actor)
|
|
141
|
-
return server.source_manager.get_source_by_id(source_id=source_id, actor=actor)
|
|
142
|
-
|
|
143
|
-
|
|
144
114
|
@router.post("/{source_id}/upload", response_model=Job, operation_id="upload_file_to_source")
|
|
145
115
|
def upload_file_to_source(
|
|
146
116
|
file: UploadFile,
|
|
@@ -161,7 +131,7 @@ def upload_file_to_source(
|
|
|
161
131
|
# create job
|
|
162
132
|
job = Job(
|
|
163
133
|
user_id=actor.id,
|
|
164
|
-
|
|
134
|
+
metadata={"type": "embedding", "filename": file.filename, "source_id": source_id},
|
|
165
135
|
completed_at=None,
|
|
166
136
|
)
|
|
167
137
|
job_id = job.id
|
|
@@ -178,7 +148,7 @@ def upload_file_to_source(
|
|
|
178
148
|
|
|
179
149
|
|
|
180
150
|
@router.get("/{source_id}/passages", response_model=List[Passage], operation_id="list_source_passages")
|
|
181
|
-
def
|
|
151
|
+
def list_source_passages(
|
|
182
152
|
source_id: str,
|
|
183
153
|
server: SyncServer = Depends(get_letta_server),
|
|
184
154
|
user_id: Optional[str] = Header(None, alias="user_id"), # Extract user_id from header, default to None if not present
|
|
@@ -191,8 +161,8 @@ def list_passages(
|
|
|
191
161
|
return passages
|
|
192
162
|
|
|
193
163
|
|
|
194
|
-
@router.get("/{source_id}/files", response_model=List[FileMetadata], operation_id="
|
|
195
|
-
def
|
|
164
|
+
@router.get("/{source_id}/files", response_model=List[FileMetadata], operation_id="list_source_files")
|
|
165
|
+
def list_source_files(
|
|
196
166
|
source_id: str,
|
|
197
167
|
limit: int = Query(1000, description="Number of files to return"),
|
|
198
168
|
cursor: Optional[str] = Query(None, description="Pagination cursor to fetch the next set of results"),
|
|
@@ -12,7 +12,7 @@ router = APIRouter(prefix="/tags", tags=["tag", "admin"])
|
|
|
12
12
|
|
|
13
13
|
|
|
14
14
|
@router.get("/", tags=["admin"], response_model=List[str], operation_id="list_tags")
|
|
15
|
-
def
|
|
15
|
+
def list_tags(
|
|
16
16
|
cursor: Optional[str] = Query(None),
|
|
17
17
|
limit: Optional[int] = Query(50),
|
|
18
18
|
server: "SyncServer" = Depends(get_letta_server),
|
|
@@ -31,8 +31,8 @@ def delete_tool(
|
|
|
31
31
|
server.tool_manager.delete_tool_by_id(tool_id=tool_id, actor=actor)
|
|
32
32
|
|
|
33
33
|
|
|
34
|
-
@router.get("/{tool_id}", response_model=Tool, operation_id="
|
|
35
|
-
def
|
|
34
|
+
@router.get("/{tool_id}", response_model=Tool, operation_id="retrieve_tool")
|
|
35
|
+
def retrieve_tool(
|
|
36
36
|
tool_id: str,
|
|
37
37
|
server: SyncServer = Depends(get_letta_server),
|
|
38
38
|
user_id: Optional[str] = Header(None, alias="user_id"), # Extract user_id from header, default to None if not present
|
|
@@ -122,8 +122,8 @@ def upsert_tool(
|
|
|
122
122
|
raise HTTPException(status_code=500, detail=f"An unexpected error occurred: {str(e)}")
|
|
123
123
|
|
|
124
124
|
|
|
125
|
-
@router.patch("/{tool_id}", response_model=Tool, operation_id="
|
|
126
|
-
def
|
|
125
|
+
@router.patch("/{tool_id}", response_model=Tool, operation_id="modify_tool")
|
|
126
|
+
def modify_tool(
|
|
127
127
|
tool_id: str,
|
|
128
128
|
request: ToolUpdate = Body(...),
|
|
129
129
|
server: SyncServer = Depends(get_letta_server),
|
|
@@ -220,11 +220,10 @@ def add_composio_tool(
|
|
|
220
220
|
Add a new Composio tool by action name (Composio refers to each tool as an `Action`)
|
|
221
221
|
"""
|
|
222
222
|
actor = server.user_manager.get_user_or_default(user_id=user_id)
|
|
223
|
-
composio_api_key = get_composio_key(server, actor=actor)
|
|
224
223
|
|
|
225
224
|
try:
|
|
226
|
-
tool_create = ToolCreate.from_composio(action_name=composio_action_name
|
|
227
|
-
return server.tool_manager.
|
|
225
|
+
tool_create = ToolCreate.from_composio(action_name=composio_action_name)
|
|
226
|
+
return server.tool_manager.create_or_update_composio_tool(pydantic_tool=Tool(**tool_create.model_dump()), actor=actor)
|
|
228
227
|
except EnumStringNotFound as e:
|
|
229
228
|
raise HTTPException(
|
|
230
229
|
status_code=400, # Bad Request
|
letta/server/server.py
CHANGED
|
@@ -956,8 +956,8 @@ class SyncServer(Server):
|
|
|
956
956
|
|
|
957
957
|
# update job status
|
|
958
958
|
job.status = JobStatus.completed
|
|
959
|
-
job.
|
|
960
|
-
job.
|
|
959
|
+
job.metadata["num_passages"] = num_passages
|
|
960
|
+
job.metadata["num_documents"] = num_documents
|
|
961
961
|
self.job_manager.update_job_by_id(job_id=job_id, job_update=JobUpdate(**job.model_dump()), actor=actor)
|
|
962
962
|
|
|
963
963
|
# update all agents who have this source attached
|
|
@@ -1019,7 +1019,7 @@ class SyncServer(Server):
|
|
|
1019
1019
|
attached_agents = [{"id": agent.id, "name": agent.name} for agent in agents]
|
|
1020
1020
|
|
|
1021
1021
|
# Overwrite metadata field, should be empty anyways
|
|
1022
|
-
source.
|
|
1022
|
+
source.metadata = dict(
|
|
1023
1023
|
num_documents=num_documents,
|
|
1024
1024
|
num_passages=num_passages,
|
|
1025
1025
|
attached_agents=attached_agents,
|
letta/services/agent_manager.py
CHANGED
|
@@ -25,6 +25,7 @@ from letta.schemas.message import Message as PydanticMessage
|
|
|
25
25
|
from letta.schemas.message import MessageCreate
|
|
26
26
|
from letta.schemas.passage import Passage as PydanticPassage
|
|
27
27
|
from letta.schemas.source import Source as PydanticSource
|
|
28
|
+
from letta.schemas.tool import Tool as PydanticTool
|
|
28
29
|
from letta.schemas.tool_rule import ToolRule as PydanticToolRule
|
|
29
30
|
from letta.schemas.user import User as PydanticUser
|
|
30
31
|
from letta.services.block_manager import BlockManager
|
|
@@ -81,7 +82,7 @@ class AgentManager:
|
|
|
81
82
|
block_ids = list(agent_create.block_ids or []) # Create a local copy to avoid modifying the original
|
|
82
83
|
if agent_create.memory_blocks:
|
|
83
84
|
for create_block in agent_create.memory_blocks:
|
|
84
|
-
block = self.block_manager.create_or_update_block(PydanticBlock(**create_block.model_dump()), actor=actor)
|
|
85
|
+
block = self.block_manager.create_or_update_block(PydanticBlock(**create_block.model_dump(to_orm=True)), actor=actor)
|
|
85
86
|
block_ids.append(block.id)
|
|
86
87
|
|
|
87
88
|
# TODO: Remove this block once we deprecate the legacy `tools` field
|
|
@@ -116,7 +117,7 @@ class AgentManager:
|
|
|
116
117
|
source_ids=agent_create.source_ids or [],
|
|
117
118
|
tags=agent_create.tags or [],
|
|
118
119
|
description=agent_create.description,
|
|
119
|
-
|
|
120
|
+
metadata=agent_create.metadata,
|
|
120
121
|
tool_rules=agent_create.tool_rules,
|
|
121
122
|
actor=actor,
|
|
122
123
|
)
|
|
@@ -176,7 +177,7 @@ class AgentManager:
|
|
|
176
177
|
source_ids: List[str],
|
|
177
178
|
tags: List[str],
|
|
178
179
|
description: Optional[str] = None,
|
|
179
|
-
|
|
180
|
+
metadata: Optional[Dict] = None,
|
|
180
181
|
tool_rules: Optional[List[PydanticToolRule]] = None,
|
|
181
182
|
) -> PydanticAgentState:
|
|
182
183
|
"""Create a new agent."""
|
|
@@ -190,7 +191,7 @@ class AgentManager:
|
|
|
190
191
|
"embedding_config": embedding_config,
|
|
191
192
|
"organization_id": actor.organization_id,
|
|
192
193
|
"description": description,
|
|
193
|
-
"metadata_":
|
|
194
|
+
"metadata_": metadata,
|
|
194
195
|
"tool_rules": tool_rules,
|
|
195
196
|
}
|
|
196
197
|
|
|
@@ -241,11 +242,14 @@ class AgentManager:
|
|
|
241
242
|
agent = AgentModel.read(db_session=session, identifier=agent_id, actor=actor)
|
|
242
243
|
|
|
243
244
|
# Update scalar fields directly
|
|
244
|
-
scalar_fields = {"name", "system", "llm_config", "embedding_config", "message_ids", "tool_rules", "description", "
|
|
245
|
+
scalar_fields = {"name", "system", "llm_config", "embedding_config", "message_ids", "tool_rules", "description", "metadata"}
|
|
245
246
|
for field in scalar_fields:
|
|
246
247
|
value = getattr(agent_update, field, None)
|
|
247
248
|
if value is not None:
|
|
248
|
-
|
|
249
|
+
if field == "metadata":
|
|
250
|
+
setattr(agent, "metadata_", value)
|
|
251
|
+
else:
|
|
252
|
+
setattr(agent, field, value)
|
|
249
253
|
|
|
250
254
|
# Update relationships using _process_relationship and _process_tags
|
|
251
255
|
if agent_update.tool_ids is not None:
|
|
@@ -464,6 +468,12 @@ class AgentManager:
|
|
|
464
468
|
new_messages = [message_ids[0]] + message_ids[num:] # 0 is system message
|
|
465
469
|
return self.set_in_context_messages(agent_id=agent_id, message_ids=new_messages, actor=actor)
|
|
466
470
|
|
|
471
|
+
@enforce_types
|
|
472
|
+
def trim_all_in_context_messages_except_system(self, agent_id: str, actor: PydanticUser) -> PydanticAgentState:
|
|
473
|
+
message_ids = self.get_agent_by_id(agent_id=agent_id, actor=actor).message_ids
|
|
474
|
+
new_messages = [message_ids[0]] # 0 is system message
|
|
475
|
+
return self.set_in_context_messages(agent_id=agent_id, message_ids=new_messages, actor=actor)
|
|
476
|
+
|
|
467
477
|
@enforce_types
|
|
468
478
|
def prepend_to_in_context_messages(self, messages: List[PydanticMessage], agent_id: str, actor: PydanticUser) -> PydanticAgentState:
|
|
469
479
|
message_ids = self.get_agent_by_id(agent_id=agent_id, actor=actor).message_ids
|
|
@@ -531,7 +541,7 @@ class AgentManager:
|
|
|
531
541
|
# Source Management
|
|
532
542
|
# ======================================================================================================================
|
|
533
543
|
@enforce_types
|
|
534
|
-
def attach_source(self, agent_id: str, source_id: str, actor: PydanticUser) ->
|
|
544
|
+
def attach_source(self, agent_id: str, source_id: str, actor: PydanticUser) -> PydanticAgentState:
|
|
535
545
|
"""
|
|
536
546
|
Attaches a source to an agent.
|
|
537
547
|
|
|
@@ -561,6 +571,7 @@ class AgentManager:
|
|
|
561
571
|
|
|
562
572
|
# Commit the changes
|
|
563
573
|
agent.update(session, actor=actor)
|
|
574
|
+
return agent.to_pydantic()
|
|
564
575
|
|
|
565
576
|
@enforce_types
|
|
566
577
|
def list_attached_sources(self, agent_id: str, actor: PydanticUser) -> List[PydanticSource]:
|
|
@@ -582,7 +593,7 @@ class AgentManager:
|
|
|
582
593
|
return [source.to_pydantic() for source in agent.sources]
|
|
583
594
|
|
|
584
595
|
@enforce_types
|
|
585
|
-
def detach_source(self, agent_id: str, source_id: str, actor: PydanticUser) ->
|
|
596
|
+
def detach_source(self, agent_id: str, source_id: str, actor: PydanticUser) -> PydanticAgentState:
|
|
586
597
|
"""
|
|
587
598
|
Detaches a source from an agent.
|
|
588
599
|
|
|
@@ -596,10 +607,17 @@ class AgentManager:
|
|
|
596
607
|
agent = AgentModel.read(db_session=session, identifier=agent_id, actor=actor)
|
|
597
608
|
|
|
598
609
|
# Remove the source from the relationship
|
|
599
|
-
|
|
610
|
+
remaining_sources = [s for s in agent.sources if s.id != source_id]
|
|
611
|
+
|
|
612
|
+
if len(remaining_sources) == len(agent.sources): # Source ID was not in the relationship
|
|
613
|
+
logger.warning(f"Attempted to remove unattached source id={source_id} from agent id={agent_id} by actor={actor}")
|
|
614
|
+
|
|
615
|
+
# Update the sources relationship
|
|
616
|
+
agent.sources = remaining_sources
|
|
600
617
|
|
|
601
618
|
# Commit the changes
|
|
602
619
|
agent.update(session, actor=actor)
|
|
620
|
+
return agent.to_pydantic()
|
|
603
621
|
|
|
604
622
|
# ======================================================================================================================
|
|
605
623
|
# Block management
|
|
@@ -1005,6 +1023,22 @@ class AgentManager:
|
|
|
1005
1023
|
agent.update(session, actor=actor)
|
|
1006
1024
|
return agent.to_pydantic()
|
|
1007
1025
|
|
|
1026
|
+
@enforce_types
|
|
1027
|
+
def list_attached_tools(self, agent_id: str, actor: PydanticUser) -> List[PydanticTool]:
|
|
1028
|
+
"""
|
|
1029
|
+
List all tools attached to an agent.
|
|
1030
|
+
|
|
1031
|
+
Args:
|
|
1032
|
+
agent_id: ID of the agent to list tools for.
|
|
1033
|
+
actor: User performing the action.
|
|
1034
|
+
|
|
1035
|
+
Returns:
|
|
1036
|
+
List[PydanticTool]: List of tools attached to the agent.
|
|
1037
|
+
"""
|
|
1038
|
+
with self.session_maker() as session:
|
|
1039
|
+
agent = AgentModel.read(db_session=session, identifier=agent_id, actor=actor)
|
|
1040
|
+
return [tool.to_pydantic() for tool in agent.tools]
|
|
1041
|
+
|
|
1008
1042
|
# ======================================================================================================================
|
|
1009
1043
|
# Tag Management
|
|
1010
1044
|
# ======================================================================================================================
|
letta/services/block_manager.py
CHANGED
|
@@ -24,11 +24,11 @@ class BlockManager:
|
|
|
24
24
|
"""Create a new block based on the Block schema."""
|
|
25
25
|
db_block = self.get_block_by_id(block.id, actor)
|
|
26
26
|
if db_block:
|
|
27
|
-
update_data = BlockUpdate(**block.model_dump(exclude_none=True))
|
|
27
|
+
update_data = BlockUpdate(**block.model_dump(to_orm=True, exclude_none=True))
|
|
28
28
|
self.update_block(block.id, update_data, actor)
|
|
29
29
|
else:
|
|
30
30
|
with self.session_maker() as session:
|
|
31
|
-
data = block.model_dump(exclude_none=True)
|
|
31
|
+
data = block.model_dump(to_orm=True, exclude_none=True)
|
|
32
32
|
block = BlockModel(**data, organization_id=actor.organization_id)
|
|
33
33
|
block.create(session, actor=actor)
|
|
34
34
|
return block.to_pydantic()
|
|
@@ -40,7 +40,7 @@ class BlockManager:
|
|
|
40
40
|
|
|
41
41
|
with self.session_maker() as session:
|
|
42
42
|
block = BlockModel.read(db_session=session, identifier=block_id, actor=actor)
|
|
43
|
-
update_data = block_update.model_dump(exclude_unset=True, exclude_none=True)
|
|
43
|
+
update_data = block_update.model_dump(to_orm=True, exclude_unset=True, exclude_none=True)
|
|
44
44
|
|
|
45
45
|
for key, value in update_data.items():
|
|
46
46
|
setattr(block, key, value)
|
letta/services/job_manager.py
CHANGED
|
@@ -39,7 +39,7 @@ class JobManager:
|
|
|
39
39
|
with self.session_maker() as session:
|
|
40
40
|
# Associate the job with the user
|
|
41
41
|
pydantic_job.user_id = actor.id
|
|
42
|
-
job_data = pydantic_job.model_dump()
|
|
42
|
+
job_data = pydantic_job.model_dump(to_orm=True)
|
|
43
43
|
job = JobModel(**job_data)
|
|
44
44
|
job.create(session, actor=actor) # Save job in the database
|
|
45
45
|
return job.to_pydantic()
|
|
@@ -52,7 +52,7 @@ class JobManager:
|
|
|
52
52
|
job = self._verify_job_access(session=session, job_id=job_id, actor=actor, access=["write"])
|
|
53
53
|
|
|
54
54
|
# Update job attributes with only the fields that were explicitly set
|
|
55
|
-
update_data = job_update.model_dump(exclude_unset=True, exclude_none=True)
|
|
55
|
+
update_data = job_update.model_dump(to_orm=True, exclude_unset=True, exclude_none=True)
|
|
56
56
|
|
|
57
57
|
# Automatically update the completion timestamp if status is set to 'completed'
|
|
58
58
|
if update_data.get("status") == JobStatus.completed and not job.completed_at:
|
|
@@ -62,7 +62,9 @@ class JobManager:
|
|
|
62
62
|
setattr(job, key, value)
|
|
63
63
|
|
|
64
64
|
# Save the updated job to the database
|
|
65
|
-
|
|
65
|
+
job.update(db_session=session) # TODO: Add this later , actor=actor)
|
|
66
|
+
|
|
67
|
+
return job.to_pydantic()
|
|
66
68
|
|
|
67
69
|
@enforce_types
|
|
68
70
|
def get_job_by_id(self, job_id: str, actor: PydanticUser) -> PydanticJob:
|
|
@@ -44,7 +44,7 @@ class OrganizationManager:
|
|
|
44
44
|
@enforce_types
|
|
45
45
|
def _create_organization(self, pydantic_org: PydanticOrganization) -> PydanticOrganization:
|
|
46
46
|
with self.session_maker() as session:
|
|
47
|
-
org = OrganizationModel(**pydantic_org.model_dump())
|
|
47
|
+
org = OrganizationModel(**pydantic_org.model_dump(to_orm=True))
|
|
48
48
|
org.create(session)
|
|
49
49
|
return org.to_pydantic()
|
|
50
50
|
|
|
@@ -38,14 +38,14 @@ class PassageManager:
|
|
|
38
38
|
def create_passage(self, pydantic_passage: PydanticPassage, actor: PydanticUser) -> PydanticPassage:
|
|
39
39
|
"""Create a new passage in the appropriate table based on whether it has agent_id or source_id."""
|
|
40
40
|
# Common fields for both passage types
|
|
41
|
-
data = pydantic_passage.model_dump()
|
|
41
|
+
data = pydantic_passage.model_dump(to_orm=True)
|
|
42
42
|
common_fields = {
|
|
43
43
|
"id": data.get("id"),
|
|
44
44
|
"text": data["text"],
|
|
45
45
|
"embedding": data["embedding"],
|
|
46
46
|
"embedding_config": data["embedding_config"],
|
|
47
47
|
"organization_id": data["organization_id"],
|
|
48
|
-
"metadata_": data.get("
|
|
48
|
+
"metadata_": data.get("metadata", {}),
|
|
49
49
|
"is_deleted": data.get("is_deleted", False),
|
|
50
50
|
"created_at": data.get("created_at", datetime.utcnow()),
|
|
51
51
|
}
|
|
@@ -145,7 +145,7 @@ class PassageManager:
|
|
|
145
145
|
raise ValueError(f"Passage with id {passage_id} does not exist.")
|
|
146
146
|
|
|
147
147
|
# Update the database record with values from the provided record
|
|
148
|
-
update_data = passage.model_dump(exclude_unset=True, exclude_none=True)
|
|
148
|
+
update_data = passage.model_dump(to_orm=True, exclude_unset=True, exclude_none=True)
|
|
149
149
|
for key, value in update_data.items():
|
|
150
150
|
setattr(curr_passage, key, value)
|
|
151
151
|
|
|
@@ -24,7 +24,7 @@ class ProviderManager:
|
|
|
24
24
|
# Lazily create the provider id prior to persistence
|
|
25
25
|
provider.resolve_identifier()
|
|
26
26
|
|
|
27
|
-
new_provider = ProviderModel(**provider.model_dump(exclude_unset=True))
|
|
27
|
+
new_provider = ProviderModel(**provider.model_dump(to_orm=True, exclude_unset=True))
|
|
28
28
|
new_provider.create(session)
|
|
29
29
|
return new_provider.to_pydantic()
|
|
30
30
|
|
|
@@ -36,7 +36,7 @@ class ProviderManager:
|
|
|
36
36
|
existing_provider = ProviderModel.read(db_session=session, identifier=provider_update.id)
|
|
37
37
|
|
|
38
38
|
# Update only the fields that are provided in ProviderUpdate
|
|
39
|
-
update_data = provider_update.model_dump(exclude_unset=True, exclude_none=True)
|
|
39
|
+
update_data = provider_update.model_dump(to_orm=True, exclude_unset=True, exclude_none=True)
|
|
40
40
|
for key, value in update_data.items():
|
|
41
41
|
setattr(existing_provider, key, value)
|
|
42
42
|
|
|
@@ -172,7 +172,7 @@ class SandboxConfigManager:
|
|
|
172
172
|
return db_env_var
|
|
173
173
|
else:
|
|
174
174
|
with self.session_maker() as session:
|
|
175
|
-
env_var = SandboxEnvVarModel(**env_var.model_dump(exclude_none=True))
|
|
175
|
+
env_var = SandboxEnvVarModel(**env_var.model_dump(to_orm=True, exclude_none=True))
|
|
176
176
|
env_var.create(session, actor=actor)
|
|
177
177
|
return env_var.to_pydantic()
|
|
178
178
|
|
|
@@ -183,7 +183,7 @@ class SandboxConfigManager:
|
|
|
183
183
|
"""Update an existing sandbox environment variable."""
|
|
184
184
|
with self.session_maker() as session:
|
|
185
185
|
env_var = SandboxEnvVarModel.read(db_session=session, identifier=env_var_id, actor=actor)
|
|
186
|
-
update_data = env_var_update.model_dump(exclude_unset=True, exclude_none=True)
|
|
186
|
+
update_data = env_var_update.model_dump(to_orm=True, exclude_unset=True, exclude_none=True)
|
|
187
187
|
update_data = {key: value for key, value in update_data.items() if getattr(env_var, key) != value}
|
|
188
188
|
|
|
189
189
|
if update_data:
|
letta/services/source_manager.py
CHANGED
|
@@ -30,7 +30,7 @@ class SourceManager:
|
|
|
30
30
|
with self.session_maker() as session:
|
|
31
31
|
# Provide default embedding config if not given
|
|
32
32
|
source.organization_id = actor.organization_id
|
|
33
|
-
source = SourceModel(**source.model_dump(exclude_none=True))
|
|
33
|
+
source = SourceModel(**source.model_dump(to_orm=True, exclude_none=True))
|
|
34
34
|
source.create(session, actor=actor)
|
|
35
35
|
return source.to_pydantic()
|
|
36
36
|
|
|
@@ -41,7 +41,7 @@ class SourceManager:
|
|
|
41
41
|
source = SourceModel.read(db_session=session, identifier=source_id, actor=actor)
|
|
42
42
|
|
|
43
43
|
# get update dictionary
|
|
44
|
-
update_data = source_update.model_dump(exclude_unset=True, exclude_none=True)
|
|
44
|
+
update_data = source_update.model_dump(to_orm=True, exclude_unset=True, exclude_none=True)
|
|
45
45
|
# Remove redundant update fields
|
|
46
46
|
update_data = {key: value for key, value in update_data.items() if getattr(source, key) != value}
|
|
47
47
|
|
|
@@ -132,7 +132,7 @@ class SourceManager:
|
|
|
132
132
|
else:
|
|
133
133
|
with self.session_maker() as session:
|
|
134
134
|
file_metadata.organization_id = actor.organization_id
|
|
135
|
-
file_metadata = FileMetadataModel(**file_metadata.model_dump(exclude_none=True))
|
|
135
|
+
file_metadata = FileMetadataModel(**file_metadata.model_dump(to_orm=True, exclude_none=True))
|
|
136
136
|
file_metadata.create(session, actor=actor)
|
|
137
137
|
return file_metadata.to_pydantic()
|
|
138
138
|
|
|
@@ -364,7 +364,9 @@ class ToolExecutionSandbox:
|
|
|
364
364
|
sbx = Sandbox(sandbox_config.get_e2b_config().template, metadata={self.METADATA_CONFIG_STATE_KEY: state_hash})
|
|
365
365
|
else:
|
|
366
366
|
# no template
|
|
367
|
-
sbx = Sandbox(
|
|
367
|
+
sbx = Sandbox(
|
|
368
|
+
metadata={self.METADATA_CONFIG_STATE_KEY: state_hash}, **e2b_config.model_dump(to_orm=True, exclude={"pip_requirements"})
|
|
369
|
+
)
|
|
368
370
|
|
|
369
371
|
# install pip requirements
|
|
370
372
|
if e2b_config.pip_requirements:
|
letta/services/tool_manager.py
CHANGED
|
@@ -39,7 +39,7 @@ class ToolManager:
|
|
|
39
39
|
tool = self.get_tool_by_name(tool_name=pydantic_tool.name, actor=actor)
|
|
40
40
|
if tool:
|
|
41
41
|
# Put to dict and remove fields that should not be reset
|
|
42
|
-
update_data = pydantic_tool.model_dump(exclude_unset=True, exclude_none=True)
|
|
42
|
+
update_data = pydantic_tool.model_dump(to_orm=True, exclude_unset=True, exclude_none=True)
|
|
43
43
|
|
|
44
44
|
# If there's anything to update
|
|
45
45
|
if update_data:
|
|
@@ -53,6 +53,11 @@ class ToolManager:
|
|
|
53
53
|
|
|
54
54
|
return tool
|
|
55
55
|
|
|
56
|
+
@enforce_types
|
|
57
|
+
def create_or_update_composio_tool(self, pydantic_tool: PydanticTool, actor: PydanticUser) -> PydanticTool:
|
|
58
|
+
pydantic_tool.tool_type = ToolType.EXTERNAL_COMPOSIO
|
|
59
|
+
return self.create_or_update_tool(pydantic_tool, actor)
|
|
60
|
+
|
|
56
61
|
@enforce_types
|
|
57
62
|
def create_tool(self, pydantic_tool: PydanticTool, actor: PydanticUser) -> PydanticTool:
|
|
58
63
|
"""Create a new tool based on the ToolCreate schema."""
|
|
@@ -62,7 +67,7 @@ class ToolManager:
|
|
|
62
67
|
# Auto-generate description if not provided
|
|
63
68
|
if pydantic_tool.description is None:
|
|
64
69
|
pydantic_tool.description = pydantic_tool.json_schema.get("description", None)
|
|
65
|
-
tool_data = pydantic_tool.model_dump()
|
|
70
|
+
tool_data = pydantic_tool.model_dump(to_orm=True)
|
|
66
71
|
|
|
67
72
|
tool = ToolModel(**tool_data)
|
|
68
73
|
tool.create(session, actor=actor) # Re-raise other database-related errors
|
|
@@ -107,7 +112,7 @@ class ToolManager:
|
|
|
107
112
|
tool = ToolModel.read(db_session=session, identifier=tool_id, actor=actor)
|
|
108
113
|
|
|
109
114
|
# Update tool attributes with only the fields that were explicitly set
|
|
110
|
-
update_data = tool_update.model_dump(exclude_none=True)
|
|
115
|
+
update_data = tool_update.model_dump(to_orm=True, exclude_none=True)
|
|
111
116
|
for key, value in update_data.items():
|
|
112
117
|
setattr(tool, key, value)
|
|
113
118
|
|
letta/services/user_manager.py
CHANGED
|
@@ -45,7 +45,7 @@ class UserManager:
|
|
|
45
45
|
def create_user(self, pydantic_user: PydanticUser) -> PydanticUser:
|
|
46
46
|
"""Create a new user if it doesn't already exist."""
|
|
47
47
|
with self.session_maker() as session:
|
|
48
|
-
new_user = UserModel(**pydantic_user.model_dump())
|
|
48
|
+
new_user = UserModel(**pydantic_user.model_dump(to_orm=True))
|
|
49
49
|
new_user.create(session)
|
|
50
50
|
return new_user.to_pydantic()
|
|
51
51
|
|
|
@@ -57,7 +57,7 @@ class UserManager:
|
|
|
57
57
|
existing_user = UserModel.read(db_session=session, identifier=user_update.id)
|
|
58
58
|
|
|
59
59
|
# Update only the fields that are provided in UserUpdate
|
|
60
|
-
update_data = user_update.model_dump(exclude_unset=True, exclude_none=True)
|
|
60
|
+
update_data = user_update.model_dump(to_orm=True, exclude_unset=True, exclude_none=True)
|
|
61
61
|
for key, value in update_data.items():
|
|
62
62
|
setattr(existing_user, key, value)
|
|
63
63
|
|
letta/settings.py
CHANGED
|
@@ -18,6 +18,34 @@ class ToolSettings(BaseSettings):
|
|
|
18
18
|
local_sandbox_dir: Optional[str] = None
|
|
19
19
|
|
|
20
20
|
|
|
21
|
+
class SummarizerSettings(BaseSettings):
|
|
22
|
+
model_config = SettingsConfigDict(env_prefix="letta_summarizer_", extra="ignore")
|
|
23
|
+
|
|
24
|
+
# Controls if we should evict all messages
|
|
25
|
+
# TODO: Can refactor this into an enum if we have a bunch of different kinds of summarizers
|
|
26
|
+
evict_all_messages: bool = False
|
|
27
|
+
|
|
28
|
+
# The maximum number of retries for the summarizer
|
|
29
|
+
# If we reach this cutoff, it probably means that the summarizer is not compressing down the in-context messages any further
|
|
30
|
+
# And we throw a fatal error
|
|
31
|
+
max_summarizer_retries: int = 3
|
|
32
|
+
|
|
33
|
+
# When to warn the model that a summarize command will happen soon
|
|
34
|
+
# The amount of tokens before a system warning about upcoming truncation is sent to Letta
|
|
35
|
+
memory_warning_threshold: float = 0.75
|
|
36
|
+
|
|
37
|
+
# Whether to send the system memory warning message
|
|
38
|
+
send_memory_warning_message: bool = False
|
|
39
|
+
|
|
40
|
+
# The desired memory pressure to summarize down to
|
|
41
|
+
desired_memory_token_pressure: float = 0.3
|
|
42
|
+
|
|
43
|
+
# The number of messages at the end to keep
|
|
44
|
+
# Even when summarizing, we may want to keep a handful of recent messages
|
|
45
|
+
# These serve as in-context examples of how to use functions / what user messages look like
|
|
46
|
+
keep_last_n_messages: int = 0
|
|
47
|
+
|
|
48
|
+
|
|
21
49
|
class ModelSettings(BaseSettings):
|
|
22
50
|
|
|
23
51
|
model_config = SettingsConfigDict(env_file=".env", extra="ignore")
|
|
@@ -147,3 +175,4 @@ settings = Settings(_env_parse_none_str="None")
|
|
|
147
175
|
test_settings = TestSettings()
|
|
148
176
|
model_settings = ModelSettings()
|
|
149
177
|
tool_settings = ToolSettings()
|
|
178
|
+
summarizer_settings = SummarizerSettings()
|
letta/system.py
CHANGED
|
@@ -161,10 +161,10 @@ def package_system_message(system_message, message_type="system_alert", time=Non
|
|
|
161
161
|
return json.dumps(packaged_message)
|
|
162
162
|
|
|
163
163
|
|
|
164
|
-
def package_summarize_message(summary,
|
|
164
|
+
def package_summarize_message(summary, summary_message_count, hidden_message_count, total_message_count, timestamp=None):
|
|
165
165
|
context_message = (
|
|
166
166
|
f"Note: prior messages ({hidden_message_count} of {total_message_count} total messages) have been hidden from view due to conversation memory constraints.\n"
|
|
167
|
-
+ f"The following is a summary of the previous {
|
|
167
|
+
+ f"The following is a summary of the previous {summary_message_count} messages:\n {summary}"
|
|
168
168
|
)
|
|
169
169
|
|
|
170
170
|
formatted_time = get_local_time() if timestamp is None else timestamp
|