letta-nightly 0.6.53.dev20250418104238__py3-none-any.whl → 0.6.54.dev20250419104029__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- letta/__init__.py +1 -1
- letta/agent.py +6 -31
- letta/agents/letta_agent.py +1 -0
- letta/agents/letta_agent_batch.py +369 -18
- letta/constants.py +15 -4
- letta/functions/function_sets/base.py +168 -21
- letta/groups/sleeptime_multi_agent.py +3 -3
- letta/helpers/converters.py +1 -1
- letta/helpers/message_helper.py +1 -0
- letta/jobs/llm_batch_job_polling.py +39 -10
- letta/jobs/scheduler.py +54 -13
- letta/jobs/types.py +26 -6
- letta/llm_api/anthropic_client.py +3 -1
- letta/llm_api/llm_api_tools.py +7 -1
- letta/llm_api/openai.py +2 -0
- letta/orm/agent.py +5 -29
- letta/orm/base.py +2 -2
- letta/orm/enums.py +1 -0
- letta/orm/job.py +5 -0
- letta/orm/llm_batch_items.py +2 -2
- letta/orm/llm_batch_job.py +5 -2
- letta/orm/message.py +12 -4
- letta/orm/passage.py +0 -6
- letta/orm/sqlalchemy_base.py +0 -3
- letta/personas/examples/sleeptime_doc_persona.txt +2 -0
- letta/prompts/system/sleeptime.txt +20 -11
- letta/prompts/system/sleeptime_doc_ingest.txt +35 -0
- letta/schemas/agent.py +24 -1
- letta/schemas/enums.py +3 -1
- letta/schemas/job.py +39 -0
- letta/schemas/letta_message.py +24 -7
- letta/schemas/letta_request.py +7 -2
- letta/schemas/letta_response.py +3 -1
- letta/schemas/llm_batch_job.py +4 -3
- letta/schemas/llm_config.py +6 -2
- letta/schemas/message.py +11 -1
- letta/schemas/providers.py +10 -58
- letta/serialize_schemas/marshmallow_agent.py +25 -22
- letta/serialize_schemas/marshmallow_message.py +1 -1
- letta/server/db.py +75 -49
- letta/server/rest_api/app.py +1 -0
- letta/server/rest_api/interface.py +7 -2
- letta/server/rest_api/routers/v1/__init__.py +2 -0
- letta/server/rest_api/routers/v1/agents.py +33 -6
- letta/server/rest_api/routers/v1/messages.py +132 -0
- letta/server/rest_api/routers/v1/sources.py +21 -2
- letta/server/rest_api/utils.py +23 -10
- letta/server/server.py +67 -21
- letta/services/agent_manager.py +44 -21
- letta/services/group_manager.py +2 -2
- letta/services/helpers/agent_manager_helper.py +5 -3
- letta/services/job_manager.py +34 -5
- letta/services/llm_batch_manager.py +200 -57
- letta/services/message_manager.py +23 -1
- letta/services/passage_manager.py +2 -2
- letta/services/tool_executor/tool_execution_manager.py +13 -3
- letta/services/tool_executor/tool_execution_sandbox.py +0 -1
- letta/services/tool_executor/tool_executor.py +48 -9
- letta/services/tool_sandbox/base.py +24 -6
- letta/services/tool_sandbox/e2b_sandbox.py +25 -5
- letta/services/tool_sandbox/local_sandbox.py +23 -7
- letta/settings.py +2 -2
- {letta_nightly-0.6.53.dev20250418104238.dist-info → letta_nightly-0.6.54.dev20250419104029.dist-info}/METADATA +2 -1
- {letta_nightly-0.6.53.dev20250418104238.dist-info → letta_nightly-0.6.54.dev20250419104029.dist-info}/RECORD +67 -65
- letta/sleeptime_agent.py +0 -61
- {letta_nightly-0.6.53.dev20250418104238.dist-info → letta_nightly-0.6.54.dev20250419104029.dist-info}/LICENSE +0 -0
- {letta_nightly-0.6.53.dev20250418104238.dist-info → letta_nightly-0.6.54.dev20250419104029.dist-info}/WHEEL +0 -0
- {letta_nightly-0.6.53.dev20250418104238.dist-info → letta_nightly-0.6.54.dev20250419104029.dist-info}/entry_points.txt +0 -0
@@ -121,7 +121,15 @@ def delete_source(
|
|
121
121
|
Delete a data source.
|
122
122
|
"""
|
123
123
|
actor = server.user_manager.get_user_or_default(user_id=actor_id)
|
124
|
-
|
124
|
+
source = server.source_manager.get_source_by_id(source_id=source_id)
|
125
|
+
agents = server.source_manager.list_attached_agents(source_id=source_id, actor=actor)
|
126
|
+
for agent in agents:
|
127
|
+
if agent.enable_sleeptime:
|
128
|
+
try:
|
129
|
+
block = server.agent_manager.get_block_with_label(agent_id=agent.id, block_label=source.name, actor=actor)
|
130
|
+
server.block_manager.delete_block(block.id, actor)
|
131
|
+
except:
|
132
|
+
pass
|
125
133
|
server.delete_source(source_id=source_id, actor=actor)
|
126
134
|
|
127
135
|
|
@@ -151,8 +159,9 @@ def upload_file_to_source(
|
|
151
159
|
job_id = job.id
|
152
160
|
server.job_manager.create_job(job, actor=actor)
|
153
161
|
|
154
|
-
# create background
|
162
|
+
# create background tasks
|
155
163
|
background_tasks.add_task(load_file_to_source_async, server, source_id=source.id, file=file, job_id=job.id, bytes=bytes, actor=actor)
|
164
|
+
background_tasks.add_task(sleeptime_document_ingest_async, server, source_id, actor)
|
156
165
|
|
157
166
|
# return job information
|
158
167
|
# Is this necessary? Can we just return the job from create_job?
|
@@ -196,6 +205,7 @@ def list_source_files(
|
|
196
205
|
def delete_file_from_source(
|
197
206
|
source_id: str,
|
198
207
|
file_id: str,
|
208
|
+
background_tasks: BackgroundTasks,
|
199
209
|
server: "SyncServer" = Depends(get_letta_server),
|
200
210
|
actor_id: Optional[str] = Header(None, alias="user_id"), # Extract user_id from header, default to None if not present
|
201
211
|
):
|
@@ -205,6 +215,7 @@ def delete_file_from_source(
|
|
205
215
|
actor = server.user_manager.get_user_or_default(user_id=actor_id)
|
206
216
|
|
207
217
|
deleted_file = server.source_manager.delete_file(file_id=file_id, actor=actor)
|
218
|
+
background_tasks.add_task(sleeptime_document_ingest_async, server, source_id, actor, clear_history=True)
|
208
219
|
if deleted_file is None:
|
209
220
|
raise HTTPException(status_code=404, detail=f"File with id={file_id} not found.")
|
210
221
|
|
@@ -222,3 +233,11 @@ def load_file_to_source_async(server: SyncServer, source_id: str, job_id: str, f
|
|
222
233
|
|
223
234
|
# Pass the file to load_file_to_source
|
224
235
|
server.load_file_to_source(source_id, file_path, job_id, actor)
|
236
|
+
|
237
|
+
|
238
|
+
def sleeptime_document_ingest_async(server: SyncServer, source_id: str, actor: User, clear_history: bool = False):
|
239
|
+
source = server.source_manager.get_source_by_id(source_id=source_id)
|
240
|
+
agents = server.source_manager.list_attached_agents(source_id=source_id, actor=actor)
|
241
|
+
for agent in agents:
|
242
|
+
if agent.enable_sleeptime:
|
243
|
+
server.sleeptime_document_ingest(agent, source, actor, clear_history)
|
letta/server/rest_api/utils.py
CHANGED
@@ -153,6 +153,7 @@ def create_input_messages(input_messages: List[MessageCreate], agent_id: str, ac
|
|
153
153
|
content=input_message.content,
|
154
154
|
name=input_message.name,
|
155
155
|
otid=input_message.otid,
|
156
|
+
sender_id=input_message.sender_id,
|
156
157
|
organization_id=actor.organization_id,
|
157
158
|
agent_id=agent_id,
|
158
159
|
model=None,
|
@@ -224,22 +225,34 @@ def create_letta_messages_from_llm_response(
|
|
224
225
|
messages.append(tool_message)
|
225
226
|
|
226
227
|
if add_heartbeat_request_system_message:
|
227
|
-
|
228
|
-
|
229
|
-
role=MessageRole.user,
|
230
|
-
content=[TextContent(text=get_heartbeat(text_content))],
|
231
|
-
organization_id=actor.organization_id,
|
232
|
-
agent_id=agent_id,
|
233
|
-
model=model,
|
234
|
-
tool_calls=[],
|
235
|
-
tool_call_id=None,
|
236
|
-
created_at=get_utc_time(),
|
228
|
+
heartbeat_system_message = create_heartbeat_system_message(
|
229
|
+
agent_id=agent_id, model=model, function_call_success=function_call_success, actor=actor
|
237
230
|
)
|
238
231
|
messages.append(heartbeat_system_message)
|
239
232
|
|
240
233
|
return messages
|
241
234
|
|
242
235
|
|
236
|
+
def create_heartbeat_system_message(
|
237
|
+
agent_id: str,
|
238
|
+
model: str,
|
239
|
+
function_call_success: bool,
|
240
|
+
actor: User,
|
241
|
+
) -> Message:
|
242
|
+
text_content = REQ_HEARTBEAT_MESSAGE if function_call_success else FUNC_FAILED_HEARTBEAT_MESSAGE
|
243
|
+
heartbeat_system_message = Message(
|
244
|
+
role=MessageRole.user,
|
245
|
+
content=[TextContent(text=get_heartbeat(text_content))],
|
246
|
+
organization_id=actor.organization_id,
|
247
|
+
agent_id=agent_id,
|
248
|
+
model=model,
|
249
|
+
tool_calls=[],
|
250
|
+
tool_call_id=None,
|
251
|
+
created_at=get_utc_time(),
|
252
|
+
)
|
253
|
+
return heartbeat_system_message
|
254
|
+
|
255
|
+
|
243
256
|
def create_assistant_messages_from_openai_response(
|
244
257
|
response_text: str,
|
245
258
|
agent_id: str,
|
letta/server/server.py
CHANGED
@@ -34,8 +34,9 @@ from letta.interface import AgentInterface # abstract
|
|
34
34
|
from letta.interface import CLIInterface # for printing to terminal
|
35
35
|
from letta.log import get_logger
|
36
36
|
from letta.orm.errors import NoResultFound
|
37
|
+
from letta.prompts.gpt_system import get_system_text
|
37
38
|
from letta.schemas.agent import AgentState, AgentType, CreateAgent, UpdateAgent
|
38
|
-
from letta.schemas.block import BlockUpdate, CreateBlock
|
39
|
+
from letta.schemas.block import Block, BlockUpdate, CreateBlock
|
39
40
|
from letta.schemas.embedding_config import EmbeddingConfig
|
40
41
|
|
41
42
|
# openai schemas
|
@@ -67,7 +68,7 @@ from letta.schemas.providers import (
|
|
67
68
|
TogetherProvider,
|
68
69
|
VLLMChatCompletionsProvider,
|
69
70
|
VLLMCompletionsProvider,
|
70
|
-
|
71
|
+
XAIProvider,
|
71
72
|
)
|
72
73
|
from letta.schemas.sandbox_config import SandboxType
|
73
74
|
from letta.schemas.source import Source
|
@@ -94,9 +95,8 @@ from letta.services.tool_executor.tool_execution_sandbox import ToolExecutionSan
|
|
94
95
|
from letta.services.tool_manager import ToolManager
|
95
96
|
from letta.services.user_manager import UserManager
|
96
97
|
from letta.settings import model_settings, settings, tool_settings
|
97
|
-
from letta.sleeptime_agent import SleeptimeAgent
|
98
98
|
from letta.tracing import log_event, trace_method
|
99
|
-
from letta.utils import get_friendly_error_msg, make_key
|
99
|
+
from letta.utils import get_friendly_error_msg, get_persona_text, make_key
|
100
100
|
|
101
101
|
config = LettaConfig.load()
|
102
102
|
logger = get_logger(__name__)
|
@@ -321,7 +321,7 @@ class SyncServer(Server):
|
|
321
321
|
if model_settings.deepseek_api_key:
|
322
322
|
self._enabled_providers.append(DeepSeekProvider(api_key=model_settings.deepseek_api_key))
|
323
323
|
if model_settings.xai_api_key:
|
324
|
-
self._enabled_providers.append(
|
324
|
+
self._enabled_providers.append(XAIProvider(api_key=model_settings.xai_api_key))
|
325
325
|
|
326
326
|
# For MCP
|
327
327
|
"""Initialize the MCP clients (there may be multiple)"""
|
@@ -365,14 +365,7 @@ class SyncServer(Server):
|
|
365
365
|
)
|
366
366
|
|
367
367
|
interface = interface or self.default_interface_factory()
|
368
|
-
|
369
|
-
agent = Agent(agent_state=agent_state, interface=interface, user=actor, mcp_clients=self.mcp_clients)
|
370
|
-
elif agent_state.agent_type == AgentType.sleeptime_agent:
|
371
|
-
agent = SleeptimeAgent(agent_state=agent_state, interface=interface, user=actor)
|
372
|
-
else:
|
373
|
-
raise ValueError(f"Invalid agent type {agent_state.agent_type}")
|
374
|
-
|
375
|
-
return agent
|
368
|
+
return Agent(agent_state=agent_state, interface=interface, user=actor, mcp_clients=self.mcp_clients)
|
376
369
|
|
377
370
|
def _step(
|
378
371
|
self,
|
@@ -996,12 +989,6 @@ class SyncServer(Server):
|
|
996
989
|
connector = DirectoryConnector(input_files=[file_path])
|
997
990
|
num_passages, num_documents = self.load_data(user_id=source.created_by_id, source_name=source.name, connector=connector)
|
998
991
|
|
999
|
-
# update job status
|
1000
|
-
job.status = JobStatus.completed
|
1001
|
-
job.metadata["num_passages"] = num_passages
|
1002
|
-
job.metadata["num_documents"] = num_documents
|
1003
|
-
self.job_manager.update_job_by_id(job_id=job_id, job_update=JobUpdate(**job.model_dump()), actor=actor)
|
1004
|
-
|
1005
992
|
# update all agents who have this source attached
|
1006
993
|
agent_states = self.source_manager.list_attached_agents(source_id=source_id, actor=actor)
|
1007
994
|
for agent_state in agent_states:
|
@@ -1009,15 +996,74 @@ class SyncServer(Server):
|
|
1009
996
|
|
1010
997
|
# Attach source to agent
|
1011
998
|
curr_passage_size = self.agent_manager.passage_size(actor=actor, agent_id=agent_id)
|
1012
|
-
self.agent_manager.attach_source(agent_id=agent_state.id, source_id=source_id, actor=actor)
|
999
|
+
agent_state = self.agent_manager.attach_source(agent_id=agent_state.id, source_id=source_id, actor=actor)
|
1013
1000
|
new_passage_size = self.agent_manager.passage_size(actor=actor, agent_id=agent_id)
|
1014
1001
|
assert new_passage_size >= curr_passage_size # in case empty files are added
|
1015
1002
|
|
1016
1003
|
# rebuild system prompt and force
|
1017
|
-
self.agent_manager.rebuild_system_prompt(agent_id=agent_id, actor=actor, force=True)
|
1004
|
+
agent_state = self.agent_manager.rebuild_system_prompt(agent_id=agent_id, actor=actor, force=True)
|
1005
|
+
|
1006
|
+
# update job status
|
1007
|
+
job.status = JobStatus.completed
|
1008
|
+
job.metadata["num_passages"] = num_passages
|
1009
|
+
job.metadata["num_documents"] = num_documents
|
1010
|
+
self.job_manager.update_job_by_id(job_id=job_id, job_update=JobUpdate(**job.model_dump()), actor=actor)
|
1018
1011
|
|
1019
1012
|
return job
|
1020
1013
|
|
1014
|
+
def sleeptime_document_ingest(self, main_agent: AgentState, source: Source, actor: User, clear_history: bool = False) -> None:
|
1015
|
+
sleeptime_agent = self.create_document_sleeptime_agent(main_agent, source, actor, clear_history)
|
1016
|
+
agent = self.load_agent(agent_id=sleeptime_agent.id, actor=actor)
|
1017
|
+
for passage in self.list_data_source_passages(source_id=source.id, user_id=actor.id):
|
1018
|
+
agent.step(
|
1019
|
+
messages=[
|
1020
|
+
Message(
|
1021
|
+
role="user",
|
1022
|
+
content=[TextContent(text=passage.text)],
|
1023
|
+
agent_id=sleeptime_agent.id,
|
1024
|
+
),
|
1025
|
+
]
|
1026
|
+
)
|
1027
|
+
self.agent_manager.delete_agent(agent_id=sleeptime_agent.id, actor=actor)
|
1028
|
+
|
1029
|
+
def create_document_sleeptime_agent(
|
1030
|
+
self, main_agent: AgentState, source: Source, actor: User, clear_history: bool = False
|
1031
|
+
) -> AgentState:
|
1032
|
+
try:
|
1033
|
+
block = self.agent_manager.get_block_with_label(agent_id=main_agent.id, block_label=source.name, actor=actor)
|
1034
|
+
except:
|
1035
|
+
block = self.block_manager.create_or_update_block(Block(label=source.name, value=""), actor=actor)
|
1036
|
+
self.agent_manager.attach_block(agent_id=main_agent.id, block_id=block.id, actor=actor)
|
1037
|
+
|
1038
|
+
if clear_history and block.value != "":
|
1039
|
+
block = self.block_manager.update_block(block_id=block.id, block=BlockUpdate(value=""))
|
1040
|
+
|
1041
|
+
request = CreateAgent(
|
1042
|
+
name=main_agent.name + "-doc-sleeptime",
|
1043
|
+
system=get_system_text("sleeptime_doc_ingest"),
|
1044
|
+
agent_type=AgentType.sleeptime_agent,
|
1045
|
+
block_ids=[block.id],
|
1046
|
+
memory_blocks=[
|
1047
|
+
CreateBlock(
|
1048
|
+
label="persona",
|
1049
|
+
value=get_persona_text("sleeptime_doc_persona"),
|
1050
|
+
),
|
1051
|
+
CreateBlock(
|
1052
|
+
label="instructions",
|
1053
|
+
value=source.description,
|
1054
|
+
),
|
1055
|
+
],
|
1056
|
+
llm_config=main_agent.llm_config,
|
1057
|
+
embedding_config=main_agent.embedding_config,
|
1058
|
+
project_id=main_agent.project_id,
|
1059
|
+
include_base_tools=False,
|
1060
|
+
tools=constants.BASE_SLEEPTIME_TOOLS,
|
1061
|
+
)
|
1062
|
+
return self.agent_manager.create_agent(
|
1063
|
+
agent_create=request,
|
1064
|
+
actor=actor,
|
1065
|
+
)
|
1066
|
+
|
1021
1067
|
def load_data(
|
1022
1068
|
self,
|
1023
1069
|
user_id: str,
|
letta/services/agent_manager.py
CHANGED
@@ -30,10 +30,11 @@ from letta.orm.sandbox_config import AgentEnvironmentVariable as AgentEnvironmen
|
|
30
30
|
from letta.orm.sqlalchemy_base import AccessType
|
31
31
|
from letta.orm.sqlite_functions import adapt_array
|
32
32
|
from letta.schemas.agent import AgentState as PydanticAgentState
|
33
|
-
from letta.schemas.agent import AgentType, CreateAgent, UpdateAgent
|
33
|
+
from letta.schemas.agent import AgentType, CreateAgent, UpdateAgent, get_prompt_template_for_agent_type
|
34
34
|
from letta.schemas.block import Block as PydanticBlock
|
35
35
|
from letta.schemas.block import BlockUpdate
|
36
36
|
from letta.schemas.embedding_config import EmbeddingConfig
|
37
|
+
from letta.schemas.group import Group as PydanticGroup
|
37
38
|
from letta.schemas.group import ManagerType
|
38
39
|
from letta.schemas.llm_config import LLMConfig
|
39
40
|
from letta.schemas.memory import Memory
|
@@ -43,11 +44,11 @@ from letta.schemas.passage import Passage as PydanticPassage
|
|
43
44
|
from letta.schemas.source import Source as PydanticSource
|
44
45
|
from letta.schemas.tool import Tool as PydanticTool
|
45
46
|
from letta.schemas.tool_rule import ContinueToolRule as PydanticContinueToolRule
|
46
|
-
from letta.schemas.tool_rule import ParentToolRule as PydanticParentToolRule
|
47
47
|
from letta.schemas.tool_rule import TerminalToolRule as PydanticTerminalToolRule
|
48
48
|
from letta.schemas.tool_rule import ToolRule as PydanticToolRule
|
49
49
|
from letta.schemas.user import User as PydanticUser
|
50
50
|
from letta.serialize_schemas import MarshmallowAgentSchema
|
51
|
+
from letta.serialize_schemas.marshmallow_message import SerializedMessageSchema
|
51
52
|
from letta.serialize_schemas.marshmallow_tool import SerializedToolSchema
|
52
53
|
from letta.serialize_schemas.pydantic_agent_schema import AgentSchema
|
53
54
|
from letta.services.block_manager import BlockManager
|
@@ -158,14 +159,11 @@ class AgentManager:
|
|
158
159
|
if agent_create.include_base_tool_rules:
|
159
160
|
# apply default tool rules
|
160
161
|
for tool_name in tool_names:
|
161
|
-
if tool_name == "send_message" or tool_name == "send_message_to_agent_async" or tool_name == "
|
162
|
+
if tool_name == "send_message" or tool_name == "send_message_to_agent_async" or tool_name == "memory_finish_edits":
|
162
163
|
tool_rules.append(PydanticTerminalToolRule(tool_name=tool_name))
|
163
164
|
elif tool_name in BASE_TOOLS + BASE_MEMORY_TOOLS + BASE_SLEEPTIME_TOOLS:
|
164
165
|
tool_rules.append(PydanticContinueToolRule(tool_name=tool_name))
|
165
166
|
|
166
|
-
if agent_create.agent_type == AgentType.sleeptime_agent:
|
167
|
-
tool_rules.append(PydanticParentToolRule(tool_name="view_core_memory_with_line_numbers", children=["core_memory_insert"]))
|
168
|
-
|
169
167
|
# if custom rules, check tool rules are valid
|
170
168
|
if agent_create.tool_rules:
|
171
169
|
check_supports_structured_output(model=agent_create.llm_config.model, tool_rules=agent_create.tool_rules)
|
@@ -304,7 +302,7 @@ class AgentManager:
|
|
304
302
|
agent_update.system = derive_system_message(
|
305
303
|
agent_type=agent_state.agent_type,
|
306
304
|
enable_sleeptime=agent_update.enable_sleeptime,
|
307
|
-
system=agent_update.system,
|
305
|
+
system=agent_update.system or agent_state.system,
|
308
306
|
)
|
309
307
|
if agent_update.system and agent_update.system != agent_state.system:
|
310
308
|
agent_state = self.rebuild_system_prompt(agent_id=agent_state.id, actor=actor, force=True, update_timestamp=False)
|
@@ -422,7 +420,8 @@ class AgentManager:
|
|
422
420
|
query = _apply_tag_filter(query, tags, match_all_tags)
|
423
421
|
query = _apply_pagination(query, before, after, session, ascending=ascending)
|
424
422
|
|
425
|
-
|
423
|
+
if limit:
|
424
|
+
query = query.limit(limit)
|
426
425
|
|
427
426
|
agents = session.execute(query).scalars().all()
|
428
427
|
return [agent.to_pydantic(include_relationships=include_relationships) for agent in agents]
|
@@ -530,7 +529,6 @@ class AgentManager:
|
|
530
529
|
@enforce_types
|
531
530
|
def serialize(self, agent_id: str, actor: PydanticUser) -> AgentSchema:
|
532
531
|
with self.session_maker() as session:
|
533
|
-
# Retrieve the agent
|
534
532
|
agent = AgentModel.read(db_session=session, identifier=agent_id, actor=actor)
|
535
533
|
schema = MarshmallowAgentSchema(session=session, actor=actor)
|
536
534
|
data = schema.dump(agent)
|
@@ -546,12 +544,24 @@ class AgentManager:
|
|
546
544
|
project_id: Optional[str] = None,
|
547
545
|
strip_messages: Optional[bool] = False,
|
548
546
|
) -> PydanticAgentState:
|
549
|
-
|
550
|
-
tool_data_list =
|
547
|
+
serialized_agent_dict = serialized_agent.model_dump()
|
548
|
+
tool_data_list = serialized_agent_dict.pop("tools", [])
|
549
|
+
messages = serialized_agent_dict.pop(MarshmallowAgentSchema.FIELD_MESSAGES, [])
|
550
|
+
|
551
|
+
for msg in messages:
|
552
|
+
msg[MarshmallowAgentSchema.FIELD_ID] = SerializedMessageSchema.generate_id() # Generate new ID
|
553
|
+
|
554
|
+
message_ids = []
|
555
|
+
in_context_message_indices = serialized_agent_dict.pop(MarshmallowAgentSchema.FIELD_IN_CONTEXT_INDICES)
|
556
|
+
for idx in in_context_message_indices:
|
557
|
+
message_ids.append(messages[idx][MarshmallowAgentSchema.FIELD_ID])
|
558
|
+
|
559
|
+
serialized_agent_dict[MarshmallowAgentSchema.FIELD_MESSAGE_IDS] = message_ids
|
551
560
|
|
552
561
|
with self.session_maker() as session:
|
553
562
|
schema = MarshmallowAgentSchema(session=session, actor=actor)
|
554
|
-
agent = schema.load(
|
563
|
+
agent = schema.load(serialized_agent_dict, session=session)
|
564
|
+
|
555
565
|
if append_copy_suffix:
|
556
566
|
agent.name += "_copy"
|
557
567
|
if project_id:
|
@@ -561,17 +571,23 @@ class AgentManager:
|
|
561
571
|
# we want to strip all but the first (system) message
|
562
572
|
agent.message_ids = [agent.message_ids[0]]
|
563
573
|
agent = agent.create(session, actor=actor)
|
574
|
+
|
564
575
|
pydantic_agent = agent.to_pydantic()
|
565
576
|
|
577
|
+
pyd_msgs = []
|
578
|
+
message_schema = SerializedMessageSchema(session=session, actor=actor)
|
579
|
+
|
580
|
+
for serialized_message in messages:
|
581
|
+
pydantic_message = message_schema.load(serialized_message, session=session).to_pydantic()
|
582
|
+
pydantic_message.agent_id = agent.id
|
583
|
+
pyd_msgs.append(pydantic_message)
|
584
|
+
self.message_manager.create_many_messages(pyd_msgs, actor=actor)
|
585
|
+
|
566
586
|
# Need to do this separately as there's some fancy upsert logic that SqlAlchemy cannot handle
|
567
587
|
for tool_data in tool_data_list:
|
568
588
|
pydantic_tool = SerializedToolSchema(actor=actor).load(tool_data, transient=True).to_pydantic()
|
569
589
|
|
570
590
|
existing_pydantic_tool = self.tool_manager.get_tool_by_name(pydantic_tool.name, actor=actor)
|
571
|
-
# If the tool exists
|
572
|
-
# AND EITHER:
|
573
|
-
# 1) override_existing_tools is set to False
|
574
|
-
# 2) existing_pydantic_tool is NOT any type of Letta core tool
|
575
591
|
if existing_pydantic_tool and (
|
576
592
|
existing_pydantic_tool.tool_type in {ToolType.LETTA_CORE, ToolType.LETTA_MULTI_AGENT_CORE, ToolType.LETTA_MEMORY_CORE}
|
577
593
|
or not override_existing_tools
|
@@ -642,6 +658,14 @@ class AgentManager:
|
|
642
658
|
# Return the updated agent state
|
643
659
|
return agent.to_pydantic()
|
644
660
|
|
661
|
+
@enforce_types
|
662
|
+
def list_groups(self, agent_id: str, actor: PydanticUser, manager_type: Optional[str] = None) -> List[PydanticGroup]:
|
663
|
+
with self.session_maker() as session:
|
664
|
+
agent = AgentModel.read(db_session=session, identifier=agent_id, actor=actor)
|
665
|
+
if manager_type:
|
666
|
+
return [group.to_pydantic() for group in agent.groups if group.manager_type == manager_type]
|
667
|
+
return [group.to_pydantic() for group in agent.groups]
|
668
|
+
|
645
669
|
# ======================================================================================================================
|
646
670
|
# In Context Messages Management
|
647
671
|
# ======================================================================================================================
|
@@ -781,10 +805,6 @@ class AgentManager:
|
|
781
805
|
# Retrieve the existing agent (will raise NoResultFound if invalid)
|
782
806
|
agent = AgentModel.read(db_session=session, identifier=agent_id, actor=actor)
|
783
807
|
|
784
|
-
# Because of cascade="all, delete-orphan" on agent.messages, setting
|
785
|
-
# this relationship to an empty list will physically remove them from the DB.
|
786
|
-
agent.messages = []
|
787
|
-
|
788
808
|
# Also clear out the message_ids field to keep in-context memory consistent
|
789
809
|
agent.message_ids = []
|
790
810
|
|
@@ -793,6 +813,8 @@ class AgentManager:
|
|
793
813
|
|
794
814
|
agent_state = agent.to_pydantic()
|
795
815
|
|
816
|
+
self.message_manager.delete_all_messages_for_agent(agent_id=agent_id, actor=actor)
|
817
|
+
|
796
818
|
if add_default_initial_messages:
|
797
819
|
return self.append_initial_message_sequence_to_in_context_messages(actor, agent_state)
|
798
820
|
else:
|
@@ -833,7 +855,8 @@ class AgentManager:
|
|
833
855
|
|
834
856
|
# refresh memory from DB (using block ids)
|
835
857
|
agent_state.memory = Memory(
|
836
|
-
blocks=[self.block_manager.get_block_by_id(block.id, actor=actor) for block in agent_state.memory.get_blocks()]
|
858
|
+
blocks=[self.block_manager.get_block_by_id(block.id, actor=actor) for block in agent_state.memory.get_blocks()],
|
859
|
+
prompt_template=get_prompt_template_for_agent_type(agent_state.agent_type),
|
837
860
|
)
|
838
861
|
|
839
862
|
# NOTE: don't do this since re-buildin the memory is handled at the start of the step
|
letta/services/group_manager.py
CHANGED
@@ -76,7 +76,7 @@ class GroupManager:
|
|
76
76
|
new_group.manager_agent_id = group.manager_config.manager_agent_id
|
77
77
|
new_group.sleeptime_agent_frequency = group.manager_config.sleeptime_agent_frequency
|
78
78
|
if new_group.sleeptime_agent_frequency:
|
79
|
-
new_group.turns_counter =
|
79
|
+
new_group.turns_counter = -1
|
80
80
|
case _:
|
81
81
|
raise ValueError(f"Unsupported manager type: {group.manager_config.manager_type}")
|
82
82
|
|
@@ -113,7 +113,7 @@ class GroupManager:
|
|
113
113
|
manager_agent_id = group_update.manager_config.manager_agent_id
|
114
114
|
sleeptime_agent_frequency = group_update.manager_config.sleeptime_agent_frequency
|
115
115
|
if sleeptime_agent_frequency and group.turns_counter is None:
|
116
|
-
group.turns_counter =
|
116
|
+
group.turns_counter = -1
|
117
117
|
case _:
|
118
118
|
raise ValueError(f"Unsupported manager type: {group_update.manager_config.manager_type}")
|
119
119
|
|
@@ -194,10 +194,9 @@ def compile_system_message(
|
|
194
194
|
variables[IN_CONTEXT_MEMORY_KEYWORD] = full_memory_string
|
195
195
|
|
196
196
|
if template_format == "f-string":
|
197
|
-
|
197
|
+
memory_variable_string = "{" + IN_CONTEXT_MEMORY_KEYWORD + "}"
|
198
198
|
# Catch the special case where the system prompt is unformatted
|
199
199
|
if append_icm_if_missing:
|
200
|
-
memory_variable_string = "{" + IN_CONTEXT_MEMORY_KEYWORD + "}"
|
201
200
|
if memory_variable_string not in system_prompt:
|
202
201
|
# In this case, append it to the end to make sure memory is still injected
|
203
202
|
# warnings.warn(f"{IN_CONTEXT_MEMORY_KEYWORD} variable was missing from system prompt, appending instead")
|
@@ -205,7 +204,10 @@ def compile_system_message(
|
|
205
204
|
|
206
205
|
# render the variables using the built-in templater
|
207
206
|
try:
|
208
|
-
|
207
|
+
if user_defined_variables:
|
208
|
+
formatted_prompt = safe_format(system_prompt, variables)
|
209
|
+
else:
|
210
|
+
formatted_prompt = system_prompt.replace(memory_variable_string, full_memory_string)
|
209
211
|
except Exception as e:
|
210
212
|
raise ValueError(f"Failed to format system prompt - {str(e)}. System prompt value:\n{system_prompt}")
|
211
213
|
|
letta/services/job_manager.py
CHANGED
@@ -15,6 +15,7 @@ from letta.orm.sqlalchemy_base import AccessType
|
|
15
15
|
from letta.orm.step import Step
|
16
16
|
from letta.orm.step import Step as StepModel
|
17
17
|
from letta.schemas.enums import JobStatus, MessageRole
|
18
|
+
from letta.schemas.job import BatchJob as PydanticBatchJob
|
18
19
|
from letta.schemas.job import Job as PydanticJob
|
19
20
|
from letta.schemas.job import JobUpdate, LettaRequestConfig
|
20
21
|
from letta.schemas.letta_message import LettaMessage
|
@@ -36,7 +37,9 @@ class JobManager:
|
|
36
37
|
self.session_maker = db_context
|
37
38
|
|
38
39
|
@enforce_types
|
39
|
-
def create_job(
|
40
|
+
def create_job(
|
41
|
+
self, pydantic_job: Union[PydanticJob, PydanticRun, PydanticBatchJob], actor: PydanticUser
|
42
|
+
) -> Union[PydanticJob, PydanticRun, PydanticBatchJob]:
|
40
43
|
"""Create a new job based on the JobCreate schema."""
|
41
44
|
with self.session_maker() as session:
|
42
45
|
# Associate the job with the user
|
@@ -57,14 +60,16 @@ class JobManager:
|
|
57
60
|
update_data = job_update.model_dump(to_orm=True, exclude_unset=True, exclude_none=True)
|
58
61
|
|
59
62
|
# Automatically update the completion timestamp if status is set to 'completed'
|
60
|
-
if update_data.get("status") == JobStatus.completed and not job.completed_at:
|
61
|
-
job.completed_at = get_utc_time()
|
62
|
-
|
63
63
|
for key, value in update_data.items():
|
64
64
|
setattr(job, key, value)
|
65
65
|
|
66
|
+
if update_data.get("status") == JobStatus.completed and not job.completed_at:
|
67
|
+
job.completed_at = get_utc_time()
|
68
|
+
if job.callback_url:
|
69
|
+
self._dispatch_callback(session, job)
|
70
|
+
|
66
71
|
# Save the updated job to the database
|
67
|
-
job.update(db_session=session
|
72
|
+
job.update(db_session=session, actor=actor)
|
68
73
|
|
69
74
|
return job.to_pydantic()
|
70
75
|
|
@@ -452,3 +457,27 @@ class JobManager:
|
|
452
457
|
job = session.query(JobModel).filter(JobModel.id == run_id).first()
|
453
458
|
request_config = job.request_config or LettaRequestConfig()
|
454
459
|
return request_config
|
460
|
+
|
461
|
+
def _dispatch_callback(self, session: Session, job: JobModel) -> None:
|
462
|
+
"""
|
463
|
+
POST a standard JSON payload to job.callback_url
|
464
|
+
and record timestamp + HTTP status.
|
465
|
+
"""
|
466
|
+
|
467
|
+
payload = {
|
468
|
+
"job_id": job.id,
|
469
|
+
"status": job.status,
|
470
|
+
"completed_at": job.completed_at.isoformat(),
|
471
|
+
}
|
472
|
+
try:
|
473
|
+
import httpx
|
474
|
+
|
475
|
+
resp = httpx.post(job.callback_url, json=payload, timeout=5.0)
|
476
|
+
job.callback_sent_at = get_utc_time()
|
477
|
+
job.callback_status_code = resp.status_code
|
478
|
+
|
479
|
+
except Exception:
|
480
|
+
return
|
481
|
+
|
482
|
+
session.add(job)
|
483
|
+
session.commit()
|