letta-nightly 0.12.1.dev20251023104211__py3-none-any.whl → 0.13.0.dev20251024223017__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of letta-nightly might be problematic. Click here for more details.
- letta/__init__.py +2 -3
- letta/adapters/letta_llm_adapter.py +1 -0
- letta/adapters/simple_llm_request_adapter.py +8 -5
- letta/adapters/simple_llm_stream_adapter.py +22 -6
- letta/agents/agent_loop.py +10 -3
- letta/agents/base_agent.py +4 -1
- letta/agents/helpers.py +41 -9
- letta/agents/letta_agent.py +11 -10
- letta/agents/letta_agent_v2.py +47 -37
- letta/agents/letta_agent_v3.py +395 -300
- letta/agents/voice_agent.py +8 -6
- letta/agents/voice_sleeptime_agent.py +3 -3
- letta/constants.py +30 -7
- letta/errors.py +20 -0
- letta/functions/function_sets/base.py +55 -3
- letta/functions/mcp_client/types.py +33 -57
- letta/functions/schema_generator.py +135 -23
- letta/groups/sleeptime_multi_agent_v3.py +6 -11
- letta/groups/sleeptime_multi_agent_v4.py +227 -0
- letta/helpers/converters.py +78 -4
- letta/helpers/crypto_utils.py +6 -2
- letta/interfaces/anthropic_parallel_tool_call_streaming_interface.py +9 -11
- letta/interfaces/anthropic_streaming_interface.py +3 -4
- letta/interfaces/gemini_streaming_interface.py +4 -6
- letta/interfaces/openai_streaming_interface.py +63 -28
- letta/llm_api/anthropic_client.py +7 -4
- letta/llm_api/deepseek_client.py +6 -4
- letta/llm_api/google_ai_client.py +3 -12
- letta/llm_api/google_vertex_client.py +1 -1
- letta/llm_api/helpers.py +90 -61
- letta/llm_api/llm_api_tools.py +4 -1
- letta/llm_api/openai.py +12 -12
- letta/llm_api/openai_client.py +53 -16
- letta/local_llm/constants.py +4 -3
- letta/local_llm/json_parser.py +5 -2
- letta/local_llm/utils.py +2 -3
- letta/log.py +171 -7
- letta/orm/agent.py +43 -9
- letta/orm/archive.py +4 -0
- letta/orm/custom_columns.py +15 -0
- letta/orm/identity.py +11 -11
- letta/orm/mcp_server.py +9 -0
- letta/orm/message.py +6 -1
- letta/orm/run_metrics.py +7 -2
- letta/orm/sqlalchemy_base.py +2 -2
- letta/orm/tool.py +3 -0
- letta/otel/tracing.py +2 -0
- letta/prompts/prompt_generator.py +7 -2
- letta/schemas/agent.py +41 -10
- letta/schemas/agent_file.py +3 -0
- letta/schemas/archive.py +4 -2
- letta/schemas/block.py +2 -1
- letta/schemas/enums.py +36 -3
- letta/schemas/file.py +3 -3
- letta/schemas/folder.py +2 -1
- letta/schemas/group.py +2 -1
- letta/schemas/identity.py +18 -9
- letta/schemas/job.py +3 -1
- letta/schemas/letta_message.py +71 -12
- letta/schemas/letta_request.py +7 -3
- letta/schemas/letta_stop_reason.py +0 -25
- letta/schemas/llm_config.py +8 -2
- letta/schemas/mcp.py +80 -83
- letta/schemas/mcp_server.py +349 -0
- letta/schemas/memory.py +20 -8
- letta/schemas/message.py +212 -67
- letta/schemas/providers/anthropic.py +13 -6
- letta/schemas/providers/azure.py +6 -4
- letta/schemas/providers/base.py +8 -4
- letta/schemas/providers/bedrock.py +6 -2
- letta/schemas/providers/cerebras.py +7 -3
- letta/schemas/providers/deepseek.py +2 -1
- letta/schemas/providers/google_gemini.py +15 -6
- letta/schemas/providers/groq.py +2 -1
- letta/schemas/providers/lmstudio.py +9 -6
- letta/schemas/providers/mistral.py +2 -1
- letta/schemas/providers/openai.py +7 -2
- letta/schemas/providers/together.py +9 -3
- letta/schemas/providers/xai.py +7 -3
- letta/schemas/run.py +7 -2
- letta/schemas/run_metrics.py +2 -1
- letta/schemas/sandbox_config.py +2 -2
- letta/schemas/secret.py +3 -158
- letta/schemas/source.py +2 -2
- letta/schemas/step.py +2 -2
- letta/schemas/tool.py +24 -1
- letta/schemas/usage.py +0 -1
- letta/server/rest_api/app.py +123 -7
- letta/server/rest_api/dependencies.py +3 -0
- letta/server/rest_api/interface.py +7 -4
- letta/server/rest_api/redis_stream_manager.py +16 -1
- letta/server/rest_api/routers/v1/__init__.py +7 -0
- letta/server/rest_api/routers/v1/agents.py +332 -322
- letta/server/rest_api/routers/v1/archives.py +127 -40
- letta/server/rest_api/routers/v1/blocks.py +54 -6
- letta/server/rest_api/routers/v1/chat_completions.py +146 -0
- letta/server/rest_api/routers/v1/folders.py +27 -35
- letta/server/rest_api/routers/v1/groups.py +23 -35
- letta/server/rest_api/routers/v1/identities.py +24 -10
- letta/server/rest_api/routers/v1/internal_runs.py +107 -0
- letta/server/rest_api/routers/v1/internal_templates.py +162 -179
- letta/server/rest_api/routers/v1/jobs.py +15 -27
- letta/server/rest_api/routers/v1/mcp_servers.py +309 -0
- letta/server/rest_api/routers/v1/messages.py +23 -34
- letta/server/rest_api/routers/v1/organizations.py +6 -27
- letta/server/rest_api/routers/v1/providers.py +35 -62
- letta/server/rest_api/routers/v1/runs.py +30 -43
- letta/server/rest_api/routers/v1/sandbox_configs.py +6 -4
- letta/server/rest_api/routers/v1/sources.py +26 -42
- letta/server/rest_api/routers/v1/steps.py +16 -29
- letta/server/rest_api/routers/v1/tools.py +17 -13
- letta/server/rest_api/routers/v1/users.py +5 -17
- letta/server/rest_api/routers/v1/voice.py +18 -27
- letta/server/rest_api/streaming_response.py +5 -2
- letta/server/rest_api/utils.py +187 -25
- letta/server/server.py +27 -22
- letta/server/ws_api/server.py +5 -4
- letta/services/agent_manager.py +148 -26
- letta/services/agent_serialization_manager.py +6 -1
- letta/services/archive_manager.py +168 -15
- letta/services/block_manager.py +14 -4
- letta/services/file_manager.py +33 -29
- letta/services/group_manager.py +10 -0
- letta/services/helpers/agent_manager_helper.py +65 -11
- letta/services/identity_manager.py +105 -4
- letta/services/job_manager.py +11 -1
- letta/services/mcp/base_client.py +2 -2
- letta/services/mcp/oauth_utils.py +33 -8
- letta/services/mcp_manager.py +174 -78
- letta/services/mcp_server_manager.py +1331 -0
- letta/services/message_manager.py +109 -4
- letta/services/organization_manager.py +4 -4
- letta/services/passage_manager.py +9 -25
- letta/services/provider_manager.py +91 -15
- letta/services/run_manager.py +72 -15
- letta/services/sandbox_config_manager.py +45 -3
- letta/services/source_manager.py +15 -8
- letta/services/step_manager.py +24 -1
- letta/services/streaming_service.py +581 -0
- letta/services/summarizer/summarizer.py +1 -1
- letta/services/tool_executor/core_tool_executor.py +111 -0
- letta/services/tool_executor/files_tool_executor.py +5 -3
- letta/services/tool_executor/sandbox_tool_executor.py +2 -2
- letta/services/tool_executor/tool_execution_manager.py +1 -1
- letta/services/tool_manager.py +10 -3
- letta/services/tool_sandbox/base.py +61 -1
- letta/services/tool_sandbox/local_sandbox.py +1 -3
- letta/services/user_manager.py +2 -2
- letta/settings.py +49 -5
- letta/system.py +14 -5
- letta/utils.py +73 -1
- letta/validators.py +105 -0
- {letta_nightly-0.12.1.dev20251023104211.dist-info → letta_nightly-0.13.0.dev20251024223017.dist-info}/METADATA +4 -2
- {letta_nightly-0.12.1.dev20251023104211.dist-info → letta_nightly-0.13.0.dev20251024223017.dist-info}/RECORD +157 -151
- letta/schemas/letta_ping.py +0 -28
- letta/server/rest_api/routers/openai/chat_completions/__init__.py +0 -0
- {letta_nightly-0.12.1.dev20251023104211.dist-info → letta_nightly-0.13.0.dev20251024223017.dist-info}/WHEEL +0 -0
- {letta_nightly-0.12.1.dev20251023104211.dist-info → letta_nightly-0.13.0.dev20251024223017.dist-info}/entry_points.txt +0 -0
- {letta_nightly-0.12.1.dev20251023104211.dist-info → letta_nightly-0.13.0.dev20251024223017.dist-info}/licenses/LICENSE +0 -0
|
@@ -1,17 +1,22 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
from datetime import datetime
|
|
1
3
|
from typing import List, Optional
|
|
2
4
|
|
|
3
|
-
from sqlalchemy import select
|
|
5
|
+
from sqlalchemy import delete, or_, select
|
|
4
6
|
|
|
5
7
|
from letta.helpers.tpuf_client import should_use_tpuf
|
|
6
8
|
from letta.log import get_logger
|
|
7
9
|
from letta.orm import ArchivalPassage, Archive as ArchiveModel, ArchivesAgents
|
|
8
10
|
from letta.otel.tracing import trace_method
|
|
11
|
+
from letta.schemas.agent import AgentState as PydanticAgentState
|
|
9
12
|
from letta.schemas.archive import Archive as PydanticArchive
|
|
10
|
-
from letta.schemas.
|
|
13
|
+
from letta.schemas.embedding_config import EmbeddingConfig
|
|
14
|
+
from letta.schemas.enums import PrimitiveType, VectorDBProvider
|
|
11
15
|
from letta.schemas.user import User as PydanticUser
|
|
12
16
|
from letta.server.db import db_registry
|
|
13
|
-
from letta.settings import settings
|
|
17
|
+
from letta.settings import DatabaseChoice, settings
|
|
14
18
|
from letta.utils import enforce_types
|
|
19
|
+
from letta.validators import raise_on_invalid_id
|
|
15
20
|
|
|
16
21
|
logger = get_logger(__name__)
|
|
17
22
|
|
|
@@ -24,6 +29,7 @@ class ArchiveManager:
|
|
|
24
29
|
async def create_archive_async(
|
|
25
30
|
self,
|
|
26
31
|
name: str,
|
|
32
|
+
embedding_config: EmbeddingConfig,
|
|
27
33
|
description: Optional[str] = None,
|
|
28
34
|
actor: PydanticUser = None,
|
|
29
35
|
) -> PydanticArchive:
|
|
@@ -38,6 +44,7 @@ class ArchiveManager:
|
|
|
38
44
|
description=description,
|
|
39
45
|
organization_id=actor.organization_id,
|
|
40
46
|
vector_db_provider=vector_db_provider,
|
|
47
|
+
embedding_config=embedding_config,
|
|
41
48
|
)
|
|
42
49
|
await archive.create_async(session, actor=actor)
|
|
43
50
|
return archive.to_pydantic()
|
|
@@ -47,6 +54,7 @@ class ArchiveManager:
|
|
|
47
54
|
|
|
48
55
|
@enforce_types
|
|
49
56
|
@trace_method
|
|
57
|
+
@raise_on_invalid_id(param_name="archive_id", expected_prefix=PrimitiveType.ARCHIVE)
|
|
50
58
|
async def get_archive_by_id_async(
|
|
51
59
|
self,
|
|
52
60
|
archive_id: str,
|
|
@@ -63,6 +71,7 @@ class ArchiveManager:
|
|
|
63
71
|
|
|
64
72
|
@enforce_types
|
|
65
73
|
@trace_method
|
|
74
|
+
@raise_on_invalid_id(param_name="archive_id", expected_prefix=PrimitiveType.ARCHIVE)
|
|
66
75
|
async def update_archive_async(
|
|
67
76
|
self,
|
|
68
77
|
archive_id: str,
|
|
@@ -89,6 +98,7 @@ class ArchiveManager:
|
|
|
89
98
|
|
|
90
99
|
@enforce_types
|
|
91
100
|
@trace_method
|
|
101
|
+
@raise_on_invalid_id(param_name="agent_id", expected_prefix=PrimitiveType.AGENT)
|
|
92
102
|
async def list_archives_async(
|
|
93
103
|
self,
|
|
94
104
|
*,
|
|
@@ -136,6 +146,8 @@ class ArchiveManager:
|
|
|
136
146
|
|
|
137
147
|
@enforce_types
|
|
138
148
|
@trace_method
|
|
149
|
+
@raise_on_invalid_id(param_name="agent_id", expected_prefix=PrimitiveType.AGENT)
|
|
150
|
+
@raise_on_invalid_id(param_name="archive_id", expected_prefix=PrimitiveType.ARCHIVE)
|
|
139
151
|
async def attach_agent_to_archive_async(
|
|
140
152
|
self,
|
|
141
153
|
agent_id: str,
|
|
@@ -172,6 +184,34 @@ class ArchiveManager:
|
|
|
172
184
|
|
|
173
185
|
@enforce_types
|
|
174
186
|
@trace_method
|
|
187
|
+
@raise_on_invalid_id(param_name="agent_id", expected_prefix=PrimitiveType.AGENT)
|
|
188
|
+
@raise_on_invalid_id(param_name="archive_id", expected_prefix=PrimitiveType.ARCHIVE)
|
|
189
|
+
async def detach_agent_from_archive_async(
|
|
190
|
+
self,
|
|
191
|
+
agent_id: str,
|
|
192
|
+
archive_id: str,
|
|
193
|
+
actor: PydanticUser = None,
|
|
194
|
+
) -> None:
|
|
195
|
+
"""Detach an agent from an archive."""
|
|
196
|
+
async with db_registry.async_session() as session:
|
|
197
|
+
# Delete the relationship directly
|
|
198
|
+
result = await session.execute(
|
|
199
|
+
delete(ArchivesAgents).where(
|
|
200
|
+
ArchivesAgents.agent_id == agent_id,
|
|
201
|
+
ArchivesAgents.archive_id == archive_id,
|
|
202
|
+
)
|
|
203
|
+
)
|
|
204
|
+
|
|
205
|
+
if result.rowcount == 0:
|
|
206
|
+
logger.warning(f"Attempted to detach unattached agent {agent_id} from archive {archive_id}")
|
|
207
|
+
else:
|
|
208
|
+
logger.info(f"Detached agent {agent_id} from archive {archive_id}")
|
|
209
|
+
|
|
210
|
+
await session.commit()
|
|
211
|
+
|
|
212
|
+
@enforce_types
|
|
213
|
+
@trace_method
|
|
214
|
+
@raise_on_invalid_id(param_name="agent_id", expected_prefix=PrimitiveType.AGENT)
|
|
175
215
|
async def get_default_archive_for_agent_async(
|
|
176
216
|
self,
|
|
177
217
|
agent_id: str,
|
|
@@ -204,6 +244,7 @@ class ArchiveManager:
|
|
|
204
244
|
|
|
205
245
|
@enforce_types
|
|
206
246
|
@trace_method
|
|
247
|
+
@raise_on_invalid_id(param_name="archive_id", expected_prefix=PrimitiveType.ARCHIVE)
|
|
207
248
|
async def delete_archive_async(
|
|
208
249
|
self,
|
|
209
250
|
archive_id: str,
|
|
@@ -219,12 +260,51 @@ class ArchiveManager:
|
|
|
219
260
|
await archive_model.hard_delete_async(session, actor=actor)
|
|
220
261
|
logger.info(f"Deleted archive {archive_id}")
|
|
221
262
|
|
|
263
|
+
@enforce_types
|
|
264
|
+
@trace_method
|
|
265
|
+
@raise_on_invalid_id(param_name="archive_id", expected_prefix=PrimitiveType.ARCHIVE)
|
|
266
|
+
@raise_on_invalid_id(param_name="passage_id", expected_prefix=PrimitiveType.PASSAGE)
|
|
267
|
+
async def delete_passage_from_archive_async(
|
|
268
|
+
self,
|
|
269
|
+
archive_id: str,
|
|
270
|
+
passage_id: str,
|
|
271
|
+
actor: PydanticUser = None,
|
|
272
|
+
strict_mode: bool = False,
|
|
273
|
+
) -> None:
|
|
274
|
+
"""Delete a passage from an archive.
|
|
275
|
+
|
|
276
|
+
Args:
|
|
277
|
+
archive_id: ID of the archive containing the passage
|
|
278
|
+
passage_id: ID of the passage to delete
|
|
279
|
+
actor: User performing the operation
|
|
280
|
+
strict_mode: If True, raise errors on Turbopuffer failures
|
|
281
|
+
|
|
282
|
+
Raises:
|
|
283
|
+
NoResultFound: If archive or passage not found
|
|
284
|
+
ValueError: If passage does not belong to the specified archive
|
|
285
|
+
"""
|
|
286
|
+
from letta.services.passage_manager import PassageManager
|
|
287
|
+
|
|
288
|
+
await self.get_archive_by_id_async(archive_id=archive_id, actor=actor)
|
|
289
|
+
|
|
290
|
+
passage_manager = PassageManager()
|
|
291
|
+
passage = await passage_manager.get_agent_passage_by_id_async(passage_id=passage_id, actor=actor)
|
|
292
|
+
|
|
293
|
+
if passage.archive_id != archive_id:
|
|
294
|
+
raise ValueError(f"Passage {passage_id} does not belong to archive {archive_id}")
|
|
295
|
+
|
|
296
|
+
await passage_manager.delete_agent_passage_by_id_async(
|
|
297
|
+
passage_id=passage_id,
|
|
298
|
+
actor=actor,
|
|
299
|
+
strict_mode=strict_mode,
|
|
300
|
+
)
|
|
301
|
+
logger.info(f"Deleted passage {passage_id} from archive {archive_id}")
|
|
302
|
+
|
|
222
303
|
@enforce_types
|
|
223
304
|
@trace_method
|
|
224
305
|
async def get_or_create_default_archive_for_agent_async(
|
|
225
306
|
self,
|
|
226
|
-
|
|
227
|
-
agent_name: Optional[str] = None,
|
|
307
|
+
agent_state: PydanticAgentState,
|
|
228
308
|
actor: PydanticUser = None,
|
|
229
309
|
) -> PydanticArchive:
|
|
230
310
|
"""Get the agent's default archive, creating one if it doesn't exist."""
|
|
@@ -236,14 +316,14 @@ class ArchiveManager:
|
|
|
236
316
|
agent_manager = AgentManager()
|
|
237
317
|
|
|
238
318
|
archive_ids = await agent_manager.get_agent_archive_ids_async(
|
|
239
|
-
agent_id=
|
|
319
|
+
agent_id=agent_state.id,
|
|
240
320
|
actor=actor,
|
|
241
321
|
)
|
|
242
322
|
|
|
243
323
|
if archive_ids:
|
|
244
324
|
# TODO: Remove this check once we support multiple archives per agent
|
|
245
325
|
if len(archive_ids) > 1:
|
|
246
|
-
raise ValueError(f"Agent {
|
|
326
|
+
raise ValueError(f"Agent {agent_state.id} has multiple archives, which is not yet supported")
|
|
247
327
|
# Get the archive
|
|
248
328
|
archive = await self.get_archive_by_id_async(
|
|
249
329
|
archive_id=archive_ids[0],
|
|
@@ -252,9 +332,10 @@ class ArchiveManager:
|
|
|
252
332
|
return archive
|
|
253
333
|
|
|
254
334
|
# Create a default archive for this agent
|
|
255
|
-
archive_name = f"{
|
|
335
|
+
archive_name = f"{agent_state.name}'s Archive"
|
|
256
336
|
archive = await self.create_archive_async(
|
|
257
337
|
name=archive_name,
|
|
338
|
+
embedding_config=agent_state.embedding_config,
|
|
258
339
|
description="Default archive created automatically",
|
|
259
340
|
actor=actor,
|
|
260
341
|
)
|
|
@@ -262,7 +343,7 @@ class ArchiveManager:
|
|
|
262
343
|
try:
|
|
263
344
|
# Attach the agent to the archive as owner
|
|
264
345
|
await self.attach_agent_to_archive_async(
|
|
265
|
-
agent_id=
|
|
346
|
+
agent_id=agent_state.id,
|
|
266
347
|
archive_id=archive.id,
|
|
267
348
|
is_owner=True,
|
|
268
349
|
actor=actor,
|
|
@@ -271,12 +352,12 @@ class ArchiveManager:
|
|
|
271
352
|
except IntegrityError:
|
|
272
353
|
# race condition: another concurrent request already created and attached an archive
|
|
273
354
|
# clean up the orphaned archive we just created
|
|
274
|
-
logger.info(f"Race condition detected for agent {
|
|
355
|
+
logger.info(f"Race condition detected for agent {agent_state.id}, cleaning up orphaned archive {archive.id}")
|
|
275
356
|
await self.delete_archive_async(archive_id=archive.id, actor=actor)
|
|
276
357
|
|
|
277
358
|
# fetch the existing archive that was created by the concurrent request
|
|
278
359
|
archive_ids = await agent_manager.get_agent_archive_ids_async(
|
|
279
|
-
agent_id=
|
|
360
|
+
agent_id=agent_state.id,
|
|
280
361
|
actor=actor,
|
|
281
362
|
)
|
|
282
363
|
if archive_ids:
|
|
@@ -291,15 +372,86 @@ class ArchiveManager:
|
|
|
291
372
|
|
|
292
373
|
@enforce_types
|
|
293
374
|
@trace_method
|
|
375
|
+
@raise_on_invalid_id(param_name="archive_id", expected_prefix=PrimitiveType.ARCHIVE)
|
|
294
376
|
async def get_agents_for_archive_async(
|
|
295
377
|
self,
|
|
296
378
|
archive_id: str,
|
|
297
379
|
actor: PydanticUser,
|
|
298
|
-
|
|
299
|
-
|
|
380
|
+
before: Optional[str] = None,
|
|
381
|
+
after: Optional[str] = None,
|
|
382
|
+
limit: Optional[int] = 50,
|
|
383
|
+
ascending: bool = False,
|
|
384
|
+
include: List[str] = [],
|
|
385
|
+
) -> List[PydanticAgentState]:
|
|
386
|
+
"""Get agents that have access to an archive with pagination support.
|
|
387
|
+
|
|
388
|
+
Uses a subquery approach to avoid expensive JOINs.
|
|
389
|
+
"""
|
|
390
|
+
from letta.orm import Agent as AgentModel
|
|
391
|
+
|
|
300
392
|
async with db_registry.async_session() as session:
|
|
301
|
-
|
|
302
|
-
|
|
393
|
+
# Start with a basic query using subquery instead of JOIN
|
|
394
|
+
query = (
|
|
395
|
+
select(AgentModel)
|
|
396
|
+
.where(AgentModel.id.in_(select(ArchivesAgents.agent_id).where(ArchivesAgents.archive_id == archive_id)))
|
|
397
|
+
.where(AgentModel.organization_id == actor.organization_id)
|
|
398
|
+
)
|
|
399
|
+
|
|
400
|
+
# Apply pagination using cursor-based approach
|
|
401
|
+
if after:
|
|
402
|
+
result = (await session.execute(select(AgentModel.created_at, AgentModel.id).where(AgentModel.id == after))).first()
|
|
403
|
+
if result:
|
|
404
|
+
after_sort_value, after_id = result
|
|
405
|
+
# SQLite does not support as granular timestamping, so we need to round the timestamp
|
|
406
|
+
if settings.database_engine is DatabaseChoice.SQLITE and isinstance(after_sort_value, datetime):
|
|
407
|
+
after_sort_value = after_sort_value.strftime("%Y-%m-%d %H:%M:%S")
|
|
408
|
+
|
|
409
|
+
if ascending:
|
|
410
|
+
query = query.where(
|
|
411
|
+
AgentModel.created_at > after_sort_value,
|
|
412
|
+
or_(AgentModel.created_at == after_sort_value, AgentModel.id > after_id),
|
|
413
|
+
)
|
|
414
|
+
else:
|
|
415
|
+
query = query.where(
|
|
416
|
+
AgentModel.created_at < after_sort_value,
|
|
417
|
+
or_(AgentModel.created_at == after_sort_value, AgentModel.id < after_id),
|
|
418
|
+
)
|
|
419
|
+
|
|
420
|
+
if before:
|
|
421
|
+
result = (await session.execute(select(AgentModel.created_at, AgentModel.id).where(AgentModel.id == before))).first()
|
|
422
|
+
if result:
|
|
423
|
+
before_sort_value, before_id = result
|
|
424
|
+
# SQLite does not support as granular timestamping, so we need to round the timestamp
|
|
425
|
+
if settings.database_engine is DatabaseChoice.SQLITE and isinstance(before_sort_value, datetime):
|
|
426
|
+
before_sort_value = before_sort_value.strftime("%Y-%m-%d %H:%M:%S")
|
|
427
|
+
|
|
428
|
+
if ascending:
|
|
429
|
+
query = query.where(
|
|
430
|
+
AgentModel.created_at < before_sort_value,
|
|
431
|
+
or_(AgentModel.created_at == before_sort_value, AgentModel.id < before_id),
|
|
432
|
+
)
|
|
433
|
+
else:
|
|
434
|
+
query = query.where(
|
|
435
|
+
AgentModel.created_at > before_sort_value,
|
|
436
|
+
or_(AgentModel.created_at == before_sort_value, AgentModel.id > before_id),
|
|
437
|
+
)
|
|
438
|
+
|
|
439
|
+
# Apply sorting
|
|
440
|
+
if ascending:
|
|
441
|
+
query = query.order_by(AgentModel.created_at.asc(), AgentModel.id.asc())
|
|
442
|
+
else:
|
|
443
|
+
query = query.order_by(AgentModel.created_at.desc(), AgentModel.id.desc())
|
|
444
|
+
|
|
445
|
+
# Apply limit
|
|
446
|
+
if limit:
|
|
447
|
+
query = query.limit(limit)
|
|
448
|
+
|
|
449
|
+
# Execute the query
|
|
450
|
+
result = await session.execute(query)
|
|
451
|
+
agents_orm = result.scalars().all()
|
|
452
|
+
|
|
453
|
+
agents = await asyncio.gather(*[agent.to_pydantic_async(include_relationships=[], include=include) for agent in agents_orm])
|
|
454
|
+
return agents
|
|
303
455
|
|
|
304
456
|
@enforce_types
|
|
305
457
|
@trace_method
|
|
@@ -333,6 +485,7 @@ class ArchiveManager:
|
|
|
333
485
|
|
|
334
486
|
@enforce_types
|
|
335
487
|
@trace_method
|
|
488
|
+
@raise_on_invalid_id(param_name="archive_id", expected_prefix=PrimitiveType.ARCHIVE)
|
|
336
489
|
async def get_or_set_vector_db_namespace_async(
|
|
337
490
|
self,
|
|
338
491
|
archive_id: str,
|
letta/services/block_manager.py
CHANGED
|
@@ -15,11 +15,12 @@ from letta.orm.errors import NoResultFound
|
|
|
15
15
|
from letta.otel.tracing import trace_method
|
|
16
16
|
from letta.schemas.agent import AgentState as PydanticAgentState
|
|
17
17
|
from letta.schemas.block import Block as PydanticBlock, BlockUpdate
|
|
18
|
-
from letta.schemas.enums import ActorType
|
|
18
|
+
from letta.schemas.enums import ActorType, PrimitiveType
|
|
19
19
|
from letta.schemas.user import User as PydanticUser
|
|
20
20
|
from letta.server.db import db_registry
|
|
21
21
|
from letta.settings import DatabaseChoice, settings
|
|
22
22
|
from letta.utils import enforce_types
|
|
23
|
+
from letta.validators import raise_on_invalid_id
|
|
23
24
|
|
|
24
25
|
logger = get_logger(__name__)
|
|
25
26
|
|
|
@@ -134,10 +135,9 @@ class BlockManager:
|
|
|
134
135
|
|
|
135
136
|
@enforce_types
|
|
136
137
|
@trace_method
|
|
138
|
+
@raise_on_invalid_id(param_name="block_id", expected_prefix=PrimitiveType.BLOCK)
|
|
137
139
|
async def update_block_async(self, block_id: str, block_update: BlockUpdate, actor: PydanticUser) -> PydanticBlock:
|
|
138
140
|
"""Update a block by its ID with the given BlockUpdate object."""
|
|
139
|
-
# Safety check for block
|
|
140
|
-
|
|
141
141
|
async with db_registry.async_session() as session:
|
|
142
142
|
block = await BlockModel.read_async(db_session=session, identifier=block_id, actor=actor)
|
|
143
143
|
update_data = block_update.model_dump(to_orm=True, exclude_unset=True, exclude_none=True)
|
|
@@ -155,6 +155,7 @@ class BlockManager:
|
|
|
155
155
|
|
|
156
156
|
@enforce_types
|
|
157
157
|
@trace_method
|
|
158
|
+
@raise_on_invalid_id(param_name="block_id", expected_prefix=PrimitiveType.BLOCK)
|
|
158
159
|
async def delete_block_async(self, block_id: str, actor: PydanticUser) -> None:
|
|
159
160
|
"""Delete a block by its ID."""
|
|
160
161
|
async with db_registry.async_session() as session:
|
|
@@ -353,6 +354,7 @@ class BlockManager:
|
|
|
353
354
|
|
|
354
355
|
@enforce_types
|
|
355
356
|
@trace_method
|
|
357
|
+
@raise_on_invalid_id(param_name="block_id", expected_prefix=PrimitiveType.BLOCK)
|
|
356
358
|
async def get_block_by_id_async(self, block_id: str, actor: Optional[PydanticUser] = None) -> Optional[PydanticBlock]:
|
|
357
359
|
"""Retrieve a block by its name."""
|
|
358
360
|
async with db_registry.async_session() as session:
|
|
@@ -412,11 +414,13 @@ class BlockManager:
|
|
|
412
414
|
|
|
413
415
|
@enforce_types
|
|
414
416
|
@trace_method
|
|
417
|
+
@raise_on_invalid_id(param_name="block_id", expected_prefix=PrimitiveType.BLOCK)
|
|
415
418
|
async def get_agents_for_block_async(
|
|
416
419
|
self,
|
|
417
420
|
block_id: str,
|
|
418
421
|
actor: PydanticUser,
|
|
419
422
|
include_relationships: Optional[List[str]] = None,
|
|
423
|
+
include: List[str] = [],
|
|
420
424
|
before: Optional[str] = None,
|
|
421
425
|
after: Optional[str] = None,
|
|
422
426
|
limit: Optional[int] = 50,
|
|
@@ -498,7 +502,9 @@ class BlockManager:
|
|
|
498
502
|
result = await session.execute(query)
|
|
499
503
|
agents_orm = result.scalars().all()
|
|
500
504
|
|
|
501
|
-
agents = await asyncio.gather(
|
|
505
|
+
agents = await asyncio.gather(
|
|
506
|
+
*[agent.to_pydantic_async(include_relationships=include_relationships, include=include) for agent in agents_orm]
|
|
507
|
+
)
|
|
502
508
|
return agents
|
|
503
509
|
|
|
504
510
|
@enforce_types
|
|
@@ -595,6 +601,8 @@ class BlockManager:
|
|
|
595
601
|
|
|
596
602
|
@enforce_types
|
|
597
603
|
@trace_method
|
|
604
|
+
@raise_on_invalid_id(param_name="block_id", expected_prefix=PrimitiveType.BLOCK)
|
|
605
|
+
@raise_on_invalid_id(param_name="agent_id", expected_prefix=PrimitiveType.AGENT)
|
|
598
606
|
async def checkpoint_block_async(
|
|
599
607
|
self,
|
|
600
608
|
block_id: str,
|
|
@@ -703,6 +711,7 @@ class BlockManager:
|
|
|
703
711
|
|
|
704
712
|
@enforce_types
|
|
705
713
|
@trace_method
|
|
714
|
+
@raise_on_invalid_id(param_name="block_id", expected_prefix=PrimitiveType.BLOCK)
|
|
706
715
|
async def undo_checkpoint_block(
|
|
707
716
|
self, block_id: str, actor: PydanticUser, use_preloaded_block: Optional[BlockModel] = None
|
|
708
717
|
) -> PydanticBlock:
|
|
@@ -753,6 +762,7 @@ class BlockManager:
|
|
|
753
762
|
|
|
754
763
|
@enforce_types
|
|
755
764
|
@trace_method
|
|
765
|
+
@raise_on_invalid_id(param_name="block_id", expected_prefix=PrimitiveType.BLOCK)
|
|
756
766
|
async def redo_checkpoint_block(
|
|
757
767
|
self, block_id: str, actor: PydanticUser, use_preloaded_block: Optional[BlockModel] = None
|
|
758
768
|
) -> PydanticBlock:
|
letta/services/file_manager.py
CHANGED
|
@@ -15,7 +15,7 @@ from letta.orm.errors import NoResultFound
|
|
|
15
15
|
from letta.orm.file import FileContent as FileContentModel, FileMetadata as FileMetadataModel
|
|
16
16
|
from letta.orm.sqlalchemy_base import AccessType
|
|
17
17
|
from letta.otel.tracing import trace_method
|
|
18
|
-
from letta.schemas.enums import FileProcessingStatus
|
|
18
|
+
from letta.schemas.enums import FileProcessingStatus, PrimitiveType
|
|
19
19
|
from letta.schemas.file import FileMetadata as PydanticFileMetadata
|
|
20
20
|
from letta.schemas.source import Source as PydanticSource
|
|
21
21
|
from letta.schemas.source_metadata import FileStats, OrganizationSourcesStats, SourceStats
|
|
@@ -23,6 +23,7 @@ from letta.schemas.user import User as PydanticUser
|
|
|
23
23
|
from letta.server.db import db_registry
|
|
24
24
|
from letta.settings import settings
|
|
25
25
|
from letta.utils import enforce_types
|
|
26
|
+
from letta.validators import raise_on_invalid_id
|
|
26
27
|
|
|
27
28
|
logger = get_logger(__name__)
|
|
28
29
|
|
|
@@ -60,7 +61,11 @@ class FileManager:
|
|
|
60
61
|
text: Optional[str] = None,
|
|
61
62
|
) -> PydanticFileMetadata:
|
|
62
63
|
# short-circuit if it already exists
|
|
63
|
-
|
|
64
|
+
try:
|
|
65
|
+
existing = await self.get_file_by_id(file_metadata.id, actor=actor)
|
|
66
|
+
except NoResultFound:
|
|
67
|
+
existing = None
|
|
68
|
+
|
|
64
69
|
if existing:
|
|
65
70
|
return existing
|
|
66
71
|
|
|
@@ -89,6 +94,7 @@ class FileManager:
|
|
|
89
94
|
# TODO: We make actor optional for now, but should most likely be enforced due to security reasons
|
|
90
95
|
@enforce_types
|
|
91
96
|
@trace_method
|
|
97
|
+
@raise_on_invalid_id(param_name="file_id", expected_prefix=PrimitiveType.FILE)
|
|
92
98
|
# @async_redis_cache(
|
|
93
99
|
# key_func=lambda self, file_id, actor=None, include_content=False, strip_directory_prefix=False: f"{file_id}:{actor.organization_id if actor else 'none'}:{include_content}:{strip_directory_prefix}",
|
|
94
100
|
# prefix="file_content",
|
|
@@ -105,38 +111,33 @@ class FileManager:
|
|
|
105
111
|
lazy SELECT (avoids MissingGreenlet).
|
|
106
112
|
"""
|
|
107
113
|
async with db_registry.async_session() as session:
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
actor,
|
|
119
|
-
access=["read"],
|
|
120
|
-
access_type=AccessType.ORGANIZATION,
|
|
121
|
-
)
|
|
122
|
-
|
|
123
|
-
result = await session.execute(query)
|
|
124
|
-
file_orm = result.scalar_one()
|
|
125
|
-
else:
|
|
126
|
-
# fast path (metadata only)
|
|
127
|
-
file_orm = await FileMetadataModel.read_async(
|
|
128
|
-
db_session=session,
|
|
129
|
-
identifier=file_id,
|
|
130
|
-
actor=actor,
|
|
114
|
+
if include_content:
|
|
115
|
+
# explicit eager load
|
|
116
|
+
query = select(FileMetadataModel).where(FileMetadataModel.id == file_id).options(selectinload(FileMetadataModel.content))
|
|
117
|
+
# apply org-scoping if actor provided
|
|
118
|
+
if actor:
|
|
119
|
+
query = FileMetadataModel.apply_access_predicate(
|
|
120
|
+
query,
|
|
121
|
+
actor,
|
|
122
|
+
access=["read"],
|
|
123
|
+
access_type=AccessType.ORGANIZATION,
|
|
131
124
|
)
|
|
132
125
|
|
|
133
|
-
|
|
126
|
+
result = await session.execute(query)
|
|
127
|
+
file_orm = result.scalar_one()
|
|
128
|
+
else:
|
|
129
|
+
# fast path (metadata only)
|
|
130
|
+
file_orm = await FileMetadataModel.read_async(
|
|
131
|
+
db_session=session,
|
|
132
|
+
identifier=file_id,
|
|
133
|
+
actor=actor,
|
|
134
|
+
)
|
|
134
135
|
|
|
135
|
-
|
|
136
|
-
return None
|
|
136
|
+
return await file_orm.to_pydantic_async(include_content=include_content, strip_directory_prefix=strip_directory_prefix)
|
|
137
137
|
|
|
138
138
|
@enforce_types
|
|
139
139
|
@trace_method
|
|
140
|
+
@raise_on_invalid_id(param_name="file_id", expected_prefix=PrimitiveType.FILE)
|
|
140
141
|
async def update_file_status(
|
|
141
142
|
self,
|
|
142
143
|
*,
|
|
@@ -173,7 +174,6 @@ class FileManager:
|
|
|
173
174
|
* 1st round-trip → UPDATE with optional state validation
|
|
174
175
|
* 2nd round-trip → SELECT fresh row (same as read_async) if update succeeded
|
|
175
176
|
"""
|
|
176
|
-
|
|
177
177
|
if processing_status is None and error_message is None and total_chunks is None and chunks_embedded is None:
|
|
178
178
|
raise ValueError("Nothing to update")
|
|
179
179
|
|
|
@@ -355,6 +355,7 @@ class FileManager:
|
|
|
355
355
|
|
|
356
356
|
@enforce_types
|
|
357
357
|
@trace_method
|
|
358
|
+
@raise_on_invalid_id(param_name="file_id", expected_prefix=PrimitiveType.FILE)
|
|
358
359
|
async def upsert_file_content(
|
|
359
360
|
self,
|
|
360
361
|
*,
|
|
@@ -400,6 +401,7 @@ class FileManager:
|
|
|
400
401
|
|
|
401
402
|
@enforce_types
|
|
402
403
|
@trace_method
|
|
404
|
+
@raise_on_invalid_id(param_name="source_id", expected_prefix=PrimitiveType.SOURCE)
|
|
403
405
|
async def list_files(
|
|
404
406
|
self,
|
|
405
407
|
source_id: str,
|
|
@@ -457,6 +459,7 @@ class FileManager:
|
|
|
457
459
|
|
|
458
460
|
@enforce_types
|
|
459
461
|
@trace_method
|
|
462
|
+
@raise_on_invalid_id(param_name="file_id", expected_prefix=PrimitiveType.FILE)
|
|
460
463
|
async def delete_file(self, file_id: str, actor: PydanticUser) -> PydanticFileMetadata:
|
|
461
464
|
"""Delete a file by its ID."""
|
|
462
465
|
async with db_registry.async_session() as session:
|
|
@@ -511,6 +514,7 @@ class FileManager:
|
|
|
511
514
|
|
|
512
515
|
@enforce_types
|
|
513
516
|
@trace_method
|
|
517
|
+
@raise_on_invalid_id(param_name="source_id", expected_prefix=PrimitiveType.SOURCE)
|
|
514
518
|
# @async_redis_cache(
|
|
515
519
|
# key_func=lambda self, original_filename, source_id, actor: f"{original_filename}:{source_id}:{actor.organization_id}",
|
|
516
520
|
# prefix="file_by_name",
|
letta/services/group_manager.py
CHANGED
|
@@ -9,6 +9,7 @@ from letta.orm.errors import NoResultFound
|
|
|
9
9
|
from letta.orm.group import Group as GroupModel
|
|
10
10
|
from letta.orm.message import Message as MessageModel
|
|
11
11
|
from letta.otel.tracing import trace_method
|
|
12
|
+
from letta.schemas.enums import PrimitiveType
|
|
12
13
|
from letta.schemas.group import Group as PydanticGroup, GroupCreate, GroupUpdate, InternalTemplateGroupCreate, ManagerType
|
|
13
14
|
from letta.schemas.letta_message import LettaMessage
|
|
14
15
|
from letta.schemas.message import Message as PydanticMessage
|
|
@@ -16,6 +17,7 @@ from letta.schemas.user import User as PydanticUser
|
|
|
16
17
|
from letta.server.db import db_registry
|
|
17
18
|
from letta.settings import DatabaseChoice, settings
|
|
18
19
|
from letta.utils import enforce_types
|
|
20
|
+
from letta.validators import raise_on_invalid_id
|
|
19
21
|
|
|
20
22
|
|
|
21
23
|
class GroupManager:
|
|
@@ -62,6 +64,7 @@ class GroupManager:
|
|
|
62
64
|
|
|
63
65
|
@enforce_types
|
|
64
66
|
@trace_method
|
|
67
|
+
@raise_on_invalid_id(param_name="group_id", expected_prefix=PrimitiveType.GROUP)
|
|
65
68
|
async def retrieve_group_async(self, group_id: str, actor: PydanticUser) -> PydanticGroup:
|
|
66
69
|
async with db_registry.async_session() as session:
|
|
67
70
|
group = await GroupModel.read_async(db_session=session, identifier=group_id, actor=actor)
|
|
@@ -119,6 +122,7 @@ class GroupManager:
|
|
|
119
122
|
|
|
120
123
|
@enforce_types
|
|
121
124
|
@trace_method
|
|
125
|
+
@raise_on_invalid_id(param_name="group_id", expected_prefix=PrimitiveType.GROUP)
|
|
122
126
|
async def modify_group_async(self, group_id: str, group_update: GroupUpdate, actor: PydanticUser) -> PydanticGroup:
|
|
123
127
|
async with db_registry.async_session() as session:
|
|
124
128
|
group = await GroupModel.read_async(db_session=session, identifier=group_id, actor=actor)
|
|
@@ -182,6 +186,7 @@ class GroupManager:
|
|
|
182
186
|
|
|
183
187
|
@enforce_types
|
|
184
188
|
@trace_method
|
|
189
|
+
@raise_on_invalid_id(param_name="group_id", expected_prefix=PrimitiveType.GROUP)
|
|
185
190
|
async def delete_group_async(self, group_id: str, actor: PydanticUser) -> None:
|
|
186
191
|
async with db_registry.async_session() as session:
|
|
187
192
|
group = await GroupModel.read_async(db_session=session, identifier=group_id, actor=actor)
|
|
@@ -189,6 +194,7 @@ class GroupManager:
|
|
|
189
194
|
|
|
190
195
|
@enforce_types
|
|
191
196
|
@trace_method
|
|
197
|
+
@raise_on_invalid_id(param_name="group_id", expected_prefix=PrimitiveType.GROUP)
|
|
192
198
|
async def list_group_messages_async(
|
|
193
199
|
self,
|
|
194
200
|
actor: PydanticUser,
|
|
@@ -226,6 +232,7 @@ class GroupManager:
|
|
|
226
232
|
|
|
227
233
|
@enforce_types
|
|
228
234
|
@trace_method
|
|
235
|
+
@raise_on_invalid_id(param_name="group_id", expected_prefix=PrimitiveType.GROUP)
|
|
229
236
|
async def reset_messages_async(self, group_id: str, actor: PydanticUser) -> None:
|
|
230
237
|
async with db_registry.async_session() as session:
|
|
231
238
|
# Ensure group is loadable by user
|
|
@@ -241,6 +248,7 @@ class GroupManager:
|
|
|
241
248
|
|
|
242
249
|
@enforce_types
|
|
243
250
|
@trace_method
|
|
251
|
+
@raise_on_invalid_id(param_name="group_id", expected_prefix=PrimitiveType.GROUP)
|
|
244
252
|
async def bump_turns_counter_async(self, group_id: str, actor: PydanticUser) -> int:
|
|
245
253
|
async with db_registry.async_session() as session:
|
|
246
254
|
# Ensure group is loadable by user
|
|
@@ -253,6 +261,8 @@ class GroupManager:
|
|
|
253
261
|
|
|
254
262
|
@enforce_types
|
|
255
263
|
@trace_method
|
|
264
|
+
@raise_on_invalid_id(param_name="group_id", expected_prefix=PrimitiveType.GROUP)
|
|
265
|
+
@raise_on_invalid_id(param_name="last_processed_message_id", expected_prefix=PrimitiveType.MESSAGE)
|
|
256
266
|
async def get_last_processed_message_id_and_update_async(
|
|
257
267
|
self, group_id: str, last_processed_message_id: str, actor: PydanticUser
|
|
258
268
|
) -> str:
|