letta-nightly 0.9.0.dev20250725104508__py3-none-any.whl → 0.9.1.dev20250727063635__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- letta/__init__.py +1 -1
- letta/agents/base_agent.py +1 -1
- letta/agents/letta_agent.py +6 -0
- letta/helpers/datetime_helpers.py +1 -1
- letta/helpers/json_helpers.py +1 -1
- letta/orm/agent.py +2 -3
- letta/orm/agents_tags.py +1 -0
- letta/orm/block.py +2 -2
- letta/orm/group.py +2 -2
- letta/orm/identity.py +3 -4
- letta/orm/mcp_oauth.py +62 -0
- letta/orm/step.py +2 -4
- letta/schemas/agent_file.py +31 -5
- letta/schemas/block.py +3 -0
- letta/schemas/enums.py +4 -0
- letta/schemas/group.py +3 -0
- letta/schemas/mcp.py +70 -0
- letta/schemas/memory.py +35 -0
- letta/schemas/message.py +98 -91
- letta/schemas/providers/openai.py +1 -1
- letta/server/rest_api/app.py +19 -21
- letta/server/rest_api/middleware/__init__.py +4 -0
- letta/server/rest_api/middleware/check_password.py +24 -0
- letta/server/rest_api/middleware/profiler_context.py +25 -0
- letta/server/rest_api/routers/v1/blocks.py +2 -0
- letta/server/rest_api/routers/v1/groups.py +1 -1
- letta/server/rest_api/routers/v1/sources.py +26 -0
- letta/server/rest_api/routers/v1/tools.py +224 -23
- letta/services/agent_manager.py +15 -9
- letta/services/agent_serialization_manager.py +84 -3
- letta/services/block_manager.py +4 -0
- letta/services/file_manager.py +23 -13
- letta/services/file_processor/file_processor.py +12 -10
- letta/services/mcp/base_client.py +20 -28
- letta/services/mcp/oauth_utils.py +433 -0
- letta/services/mcp/sse_client.py +12 -1
- letta/services/mcp/streamable_http_client.py +17 -5
- letta/services/mcp/types.py +9 -0
- letta/services/mcp_manager.py +304 -42
- letta/services/provider_manager.py +2 -2
- letta/services/tool_executor/tool_executor.py +6 -2
- letta/services/tool_manager.py +8 -4
- letta/services/tool_sandbox/base.py +3 -3
- letta/services/tool_sandbox/e2b_sandbox.py +1 -1
- letta/services/tool_sandbox/local_sandbox.py +16 -9
- letta/settings.py +11 -1
- letta/system.py +1 -1
- letta/templates/template_helper.py +25 -1
- letta/utils.py +19 -35
- {letta_nightly-0.9.0.dev20250725104508.dist-info → letta_nightly-0.9.1.dev20250727063635.dist-info}/METADATA +3 -2
- {letta_nightly-0.9.0.dev20250725104508.dist-info → letta_nightly-0.9.1.dev20250727063635.dist-info}/RECORD +54 -49
- {letta_nightly-0.9.0.dev20250725104508.dist-info → letta_nightly-0.9.1.dev20250727063635.dist-info}/LICENSE +0 -0
- {letta_nightly-0.9.0.dev20250725104508.dist-info → letta_nightly-0.9.1.dev20250727063635.dist-info}/WHEEL +0 -0
- {letta_nightly-0.9.0.dev20250725104508.dist-info → letta_nightly-0.9.1.dev20250727063635.dist-info}/entry_points.txt +0 -0
letta/services/mcp_manager.py
CHANGED
@@ -1,15 +1,29 @@
|
|
1
1
|
import json
|
2
2
|
import os
|
3
|
+
import secrets
|
4
|
+
import uuid
|
5
|
+
from datetime import datetime, timedelta
|
3
6
|
from typing import Any, Dict, List, Optional, Tuple, Union
|
4
7
|
|
8
|
+
from fastapi import HTTPException
|
5
9
|
from sqlalchemy import null
|
6
10
|
|
7
11
|
import letta.constants as constants
|
8
12
|
from letta.functions.mcp_client.types import MCPServerType, MCPTool, SSEServerConfig, StdioServerConfig, StreamableHTTPServerConfig
|
9
13
|
from letta.log import get_logger
|
10
14
|
from letta.orm.errors import NoResultFound
|
15
|
+
from letta.orm.mcp_oauth import MCPOAuth, OAuthSessionStatus
|
11
16
|
from letta.orm.mcp_server import MCPServer as MCPServerModel
|
12
|
-
from letta.schemas.mcp import
|
17
|
+
from letta.schemas.mcp import (
|
18
|
+
MCPOAuthSession,
|
19
|
+
MCPOAuthSessionCreate,
|
20
|
+
MCPOAuthSessionUpdate,
|
21
|
+
MCPServer,
|
22
|
+
UpdateMCPServer,
|
23
|
+
UpdateSSEMCPServer,
|
24
|
+
UpdateStdioMCPServer,
|
25
|
+
UpdateStreamableHTTPMCPServer,
|
26
|
+
)
|
13
27
|
from letta.schemas.tool import Tool as PydanticTool
|
14
28
|
from letta.schemas.tool import ToolCreate
|
15
29
|
from letta.schemas.user import User as PydanticUser
|
@@ -34,27 +48,21 @@ class MCPManager:
|
|
34
48
|
@enforce_types
|
35
49
|
async def list_mcp_server_tools(self, mcp_server_name: str, actor: PydanticUser) -> List[MCPTool]:
|
36
50
|
"""Get a list of all tools for a specific MCP server."""
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
mcp_client
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
tools = await mcp_client.list_tools()
|
53
|
-
|
54
|
-
# TODO: change to pydantic tools
|
55
|
-
await mcp_client.cleanup()
|
56
|
-
|
57
|
-
return tools
|
51
|
+
try:
|
52
|
+
mcp_server_id = await self.get_mcp_server_id_by_name(mcp_server_name, actor=actor)
|
53
|
+
mcp_config = await self.get_mcp_server_by_id_async(mcp_server_id, actor=actor)
|
54
|
+
server_config = mcp_config.to_config()
|
55
|
+
mcp_client = await self.get_mcp_client(server_config, actor)
|
56
|
+
await mcp_client.connect_to_server()
|
57
|
+
|
58
|
+
# list tools
|
59
|
+
tools = await mcp_client.list_tools()
|
60
|
+
return tools
|
61
|
+
except Exception as e:
|
62
|
+
logger.error(f"Error listing tools for MCP server {mcp_server_name}: {e}")
|
63
|
+
return []
|
64
|
+
finally:
|
65
|
+
await mcp_client.cleanup()
|
58
66
|
|
59
67
|
@enforce_types
|
60
68
|
async def execute_mcp_server_tool(
|
@@ -72,38 +80,37 @@ class MCPManager:
|
|
72
80
|
# read from config file
|
73
81
|
mcp_config = self.read_mcp_config()
|
74
82
|
if mcp_server_name not in mcp_config:
|
75
|
-
print("MCP server not found in config.", mcp_config)
|
76
83
|
raise ValueError(f"MCP server {mcp_server_name} not found in config.")
|
77
84
|
server_config = mcp_config[mcp_server_name]
|
78
85
|
|
79
|
-
|
80
|
-
|
81
|
-
|
82
|
-
|
83
|
-
|
84
|
-
|
85
|
-
|
86
|
-
|
87
|
-
|
88
|
-
|
89
|
-
|
90
|
-
elif isinstance(server_config, StreamableHTTPServerConfig):
|
91
|
-
async with AsyncStreamableHTTPMCPClient(server_config=server_config) as mcp_client:
|
92
|
-
result, success = await mcp_client.execute_tool(tool_name, tool_args)
|
93
|
-
logger.info(f"MCP Result: {result}, Success: {success}")
|
94
|
-
return result, success
|
95
|
-
else:
|
96
|
-
raise ValueError(f"Unsupported server config type: {type(server_config)}")
|
86
|
+
mcp_client = await self.get_mcp_client(server_config, actor)
|
87
|
+
await mcp_client.connect_to_server()
|
88
|
+
|
89
|
+
# call tool
|
90
|
+
result, success = await mcp_client.execute_tool(tool_name, tool_args)
|
91
|
+
logger.info(f"MCP Result: {result}, Success: {success}")
|
92
|
+
# TODO: change to pydantic tool
|
93
|
+
|
94
|
+
await mcp_client.cleanup()
|
95
|
+
|
96
|
+
return result, success
|
97
97
|
|
98
98
|
@enforce_types
|
99
99
|
async def add_tool_from_mcp_server(self, mcp_server_name: str, mcp_tool_name: str, actor: PydanticUser) -> PydanticTool:
|
100
100
|
"""Add a tool from an MCP server to the Letta tool registry."""
|
101
|
+
# get the MCP server ID, we should migrate to use the server_id instead of the name
|
102
|
+
mcp_server_id = await self.get_mcp_server_id_by_name(mcp_server_name, actor=actor)
|
103
|
+
if not mcp_server_id:
|
104
|
+
raise ValueError(f"MCP server '{mcp_server_name}' not found")
|
105
|
+
|
101
106
|
mcp_tools = await self.list_mcp_server_tools(mcp_server_name, actor=actor)
|
102
107
|
|
103
108
|
for mcp_tool in mcp_tools:
|
104
109
|
if mcp_tool.name == mcp_tool_name:
|
105
110
|
tool_create = ToolCreate.from_mcp(mcp_server_name=mcp_server_name, mcp_tool=mcp_tool)
|
106
|
-
return await self.tool_manager.create_mcp_tool_async(
|
111
|
+
return await self.tool_manager.create_mcp_tool_async(
|
112
|
+
tool_create=tool_create, mcp_server_name=mcp_server_name, mcp_server_id=mcp_server_id, actor=actor
|
113
|
+
)
|
107
114
|
|
108
115
|
# failed to add - handle error?
|
109
116
|
return None
|
@@ -223,6 +230,18 @@ class MCPManager:
|
|
223
230
|
# Convert the SQLAlchemy Tool object to PydanticTool
|
224
231
|
return mcp_server.to_pydantic()
|
225
232
|
|
233
|
+
@enforce_types
|
234
|
+
async def get_mcp_servers_by_ids(self, mcp_server_ids: List[str], actor: PydanticUser) -> List[MCPServer]:
|
235
|
+
"""Fetch multiple MCP servers by their IDs in a single query."""
|
236
|
+
if not mcp_server_ids:
|
237
|
+
return []
|
238
|
+
|
239
|
+
async with db_registry.async_session() as session:
|
240
|
+
mcp_servers = await MCPServerModel.list_async(
|
241
|
+
db_session=session, organization_id=actor.organization_id, id=mcp_server_ids # This will use the IN operator
|
242
|
+
)
|
243
|
+
return [mcp_server.to_pydantic() for mcp_server in mcp_servers]
|
244
|
+
|
226
245
|
@enforce_types
|
227
246
|
async def get_mcp_server(self, mcp_server_name: str, actor: PydanticUser) -> PydanticTool:
|
228
247
|
"""Get a tool by name."""
|
@@ -319,3 +338,246 @@ class MCPManager:
|
|
319
338
|
logger.error(f"Failed to parse server params for MCP server {server_name} (skipping): {e}")
|
320
339
|
continue
|
321
340
|
return mcp_server_list
|
341
|
+
|
342
|
+
async def get_mcp_client(
|
343
|
+
self,
|
344
|
+
server_config: Union[SSEServerConfig, StdioServerConfig, StreamableHTTPServerConfig],
|
345
|
+
actor: PydanticUser,
|
346
|
+
oauth_provider: Optional[Any] = None,
|
347
|
+
) -> Union[AsyncSSEMCPClient, AsyncStdioMCPClient, AsyncStreamableHTTPMCPClient]:
|
348
|
+
"""
|
349
|
+
Helper function to create the appropriate MCP client based on server configuration.
|
350
|
+
|
351
|
+
Args:
|
352
|
+
server_config: The server configuration object
|
353
|
+
actor: The user making the request
|
354
|
+
oauth_provider: Optional OAuth provider for authentication
|
355
|
+
|
356
|
+
Returns:
|
357
|
+
The appropriate MCP client instance
|
358
|
+
|
359
|
+
Raises:
|
360
|
+
ValueError: If server config type is not supported
|
361
|
+
"""
|
362
|
+
# If no OAuth provider is provided, check if we have stored OAuth credentials
|
363
|
+
if oauth_provider is None and hasattr(server_config, "server_url"):
|
364
|
+
oauth_session = await self.get_oauth_session_by_server(server_config.server_url, actor)
|
365
|
+
if oauth_session and oauth_session.access_token:
|
366
|
+
# Create OAuth provider from stored credentials
|
367
|
+
from letta.services.mcp.oauth_utils import create_oauth_provider
|
368
|
+
|
369
|
+
oauth_provider = await create_oauth_provider(
|
370
|
+
session_id=oauth_session.id,
|
371
|
+
server_url=oauth_session.server_url,
|
372
|
+
redirect_uri=oauth_session.redirect_uri,
|
373
|
+
mcp_manager=self,
|
374
|
+
actor=actor,
|
375
|
+
)
|
376
|
+
|
377
|
+
if server_config.type == MCPServerType.SSE:
|
378
|
+
server_config = SSEServerConfig(**server_config.model_dump())
|
379
|
+
return AsyncSSEMCPClient(server_config=server_config, oauth_provider=oauth_provider)
|
380
|
+
elif server_config.type == MCPServerType.STDIO:
|
381
|
+
server_config = StdioServerConfig(**server_config.model_dump())
|
382
|
+
return AsyncStdioMCPClient(server_config=server_config, oauth_provider=oauth_provider)
|
383
|
+
elif server_config.type == MCPServerType.STREAMABLE_HTTP:
|
384
|
+
server_config = StreamableHTTPServerConfig(**server_config.model_dump())
|
385
|
+
return AsyncStreamableHTTPMCPClient(server_config=server_config, oauth_provider=oauth_provider)
|
386
|
+
else:
|
387
|
+
raise ValueError(f"Unsupported server config type: {type(server_config)}")
|
388
|
+
|
389
|
+
# OAuth-related methods
|
390
|
+
@enforce_types
|
391
|
+
async def create_oauth_session(self, session_create: MCPOAuthSessionCreate, actor: PydanticUser) -> MCPOAuthSession:
|
392
|
+
"""Create a new OAuth session for MCP server authentication."""
|
393
|
+
async with db_registry.async_session() as session:
|
394
|
+
# Create the OAuth session with a unique state
|
395
|
+
oauth_session = MCPOAuth(
|
396
|
+
id="mcp-oauth-" + str(uuid.uuid4())[:8],
|
397
|
+
state=secrets.token_urlsafe(32),
|
398
|
+
server_url=session_create.server_url,
|
399
|
+
server_name=session_create.server_name,
|
400
|
+
user_id=session_create.user_id,
|
401
|
+
organization_id=session_create.organization_id,
|
402
|
+
status=OAuthSessionStatus.PENDING,
|
403
|
+
created_at=datetime.now(),
|
404
|
+
updated_at=datetime.now(),
|
405
|
+
)
|
406
|
+
oauth_session = await oauth_session.create_async(session, actor=actor)
|
407
|
+
|
408
|
+
# Convert to Pydantic model
|
409
|
+
return MCPOAuthSession(
|
410
|
+
id=oauth_session.id,
|
411
|
+
state=oauth_session.state,
|
412
|
+
server_url=oauth_session.server_url,
|
413
|
+
server_name=oauth_session.server_name,
|
414
|
+
user_id=oauth_session.user_id,
|
415
|
+
organization_id=oauth_session.organization_id,
|
416
|
+
status=oauth_session.status,
|
417
|
+
created_at=oauth_session.created_at,
|
418
|
+
updated_at=oauth_session.updated_at,
|
419
|
+
)
|
420
|
+
|
421
|
+
@enforce_types
|
422
|
+
async def get_oauth_session_by_id(self, session_id: str, actor: PydanticUser) -> Optional[MCPOAuthSession]:
|
423
|
+
"""Get an OAuth session by its ID."""
|
424
|
+
async with db_registry.async_session() as session:
|
425
|
+
try:
|
426
|
+
oauth_session = await MCPOAuth.read_async(db_session=session, identifier=session_id, actor=actor)
|
427
|
+
return MCPOAuthSession(
|
428
|
+
id=oauth_session.id,
|
429
|
+
state=oauth_session.state,
|
430
|
+
server_url=oauth_session.server_url,
|
431
|
+
server_name=oauth_session.server_name,
|
432
|
+
user_id=oauth_session.user_id,
|
433
|
+
organization_id=oauth_session.organization_id,
|
434
|
+
authorization_url=oauth_session.authorization_url,
|
435
|
+
authorization_code=oauth_session.authorization_code,
|
436
|
+
access_token=oauth_session.access_token,
|
437
|
+
refresh_token=oauth_session.refresh_token,
|
438
|
+
token_type=oauth_session.token_type,
|
439
|
+
expires_at=oauth_session.expires_at,
|
440
|
+
scope=oauth_session.scope,
|
441
|
+
client_id=oauth_session.client_id,
|
442
|
+
client_secret=oauth_session.client_secret,
|
443
|
+
redirect_uri=oauth_session.redirect_uri,
|
444
|
+
status=oauth_session.status,
|
445
|
+
created_at=oauth_session.created_at,
|
446
|
+
updated_at=oauth_session.updated_at,
|
447
|
+
)
|
448
|
+
except NoResultFound:
|
449
|
+
return None
|
450
|
+
|
451
|
+
@enforce_types
|
452
|
+
async def get_oauth_session_by_server(self, server_url: str, actor: PydanticUser) -> Optional[MCPOAuthSession]:
|
453
|
+
"""Get the latest OAuth session by server URL, organization, and user."""
|
454
|
+
from sqlalchemy import desc, select
|
455
|
+
|
456
|
+
async with db_registry.async_session() as session:
|
457
|
+
# Query for OAuth session matching organization, user, server URL, and status
|
458
|
+
# Order by updated_at desc to get the most recent record
|
459
|
+
result = await session.execute(
|
460
|
+
select(MCPOAuth)
|
461
|
+
.where(
|
462
|
+
MCPOAuth.organization_id == actor.organization_id,
|
463
|
+
MCPOAuth.user_id == actor.id,
|
464
|
+
MCPOAuth.server_url == server_url,
|
465
|
+
MCPOAuth.status == OAuthSessionStatus.AUTHORIZED,
|
466
|
+
)
|
467
|
+
.order_by(desc(MCPOAuth.updated_at))
|
468
|
+
.limit(1)
|
469
|
+
)
|
470
|
+
oauth_session = result.scalar_one_or_none()
|
471
|
+
|
472
|
+
if not oauth_session:
|
473
|
+
return None
|
474
|
+
|
475
|
+
return MCPOAuthSession(
|
476
|
+
id=oauth_session.id,
|
477
|
+
state=oauth_session.state,
|
478
|
+
server_url=oauth_session.server_url,
|
479
|
+
server_name=oauth_session.server_name,
|
480
|
+
user_id=oauth_session.user_id,
|
481
|
+
organization_id=oauth_session.organization_id,
|
482
|
+
authorization_url=oauth_session.authorization_url,
|
483
|
+
authorization_code=oauth_session.authorization_code,
|
484
|
+
access_token=oauth_session.access_token,
|
485
|
+
refresh_token=oauth_session.refresh_token,
|
486
|
+
token_type=oauth_session.token_type,
|
487
|
+
expires_at=oauth_session.expires_at,
|
488
|
+
scope=oauth_session.scope,
|
489
|
+
client_id=oauth_session.client_id,
|
490
|
+
client_secret=oauth_session.client_secret,
|
491
|
+
redirect_uri=oauth_session.redirect_uri,
|
492
|
+
status=oauth_session.status,
|
493
|
+
created_at=oauth_session.created_at,
|
494
|
+
updated_at=oauth_session.updated_at,
|
495
|
+
)
|
496
|
+
|
497
|
+
@enforce_types
|
498
|
+
async def update_oauth_session(self, session_id: str, session_update: MCPOAuthSessionUpdate, actor: PydanticUser) -> MCPOAuthSession:
|
499
|
+
"""Update an existing OAuth session."""
|
500
|
+
async with db_registry.async_session() as session:
|
501
|
+
oauth_session = await MCPOAuth.read_async(db_session=session, identifier=session_id, actor=actor)
|
502
|
+
|
503
|
+
# Update fields that are provided
|
504
|
+
if session_update.authorization_url is not None:
|
505
|
+
oauth_session.authorization_url = session_update.authorization_url
|
506
|
+
if session_update.authorization_code is not None:
|
507
|
+
oauth_session.authorization_code = session_update.authorization_code
|
508
|
+
if session_update.access_token is not None:
|
509
|
+
oauth_session.access_token = session_update.access_token
|
510
|
+
if session_update.refresh_token is not None:
|
511
|
+
oauth_session.refresh_token = session_update.refresh_token
|
512
|
+
if session_update.token_type is not None:
|
513
|
+
oauth_session.token_type = session_update.token_type
|
514
|
+
if session_update.expires_at is not None:
|
515
|
+
oauth_session.expires_at = session_update.expires_at
|
516
|
+
if session_update.scope is not None:
|
517
|
+
oauth_session.scope = session_update.scope
|
518
|
+
if session_update.client_id is not None:
|
519
|
+
oauth_session.client_id = session_update.client_id
|
520
|
+
if session_update.client_secret is not None:
|
521
|
+
oauth_session.client_secret = session_update.client_secret
|
522
|
+
if session_update.redirect_uri is not None:
|
523
|
+
oauth_session.redirect_uri = session_update.redirect_uri
|
524
|
+
if session_update.status is not None:
|
525
|
+
oauth_session.status = session_update.status
|
526
|
+
|
527
|
+
# Always update the updated_at timestamp
|
528
|
+
oauth_session.updated_at = datetime.now()
|
529
|
+
|
530
|
+
oauth_session = await oauth_session.update_async(db_session=session, actor=actor)
|
531
|
+
|
532
|
+
return MCPOAuthSession(
|
533
|
+
id=oauth_session.id,
|
534
|
+
state=oauth_session.state,
|
535
|
+
server_url=oauth_session.server_url,
|
536
|
+
server_name=oauth_session.server_name,
|
537
|
+
user_id=oauth_session.user_id,
|
538
|
+
organization_id=oauth_session.organization_id,
|
539
|
+
authorization_url=oauth_session.authorization_url,
|
540
|
+
authorization_code=oauth_session.authorization_code,
|
541
|
+
access_token=oauth_session.access_token,
|
542
|
+
refresh_token=oauth_session.refresh_token,
|
543
|
+
token_type=oauth_session.token_type,
|
544
|
+
expires_at=oauth_session.expires_at,
|
545
|
+
scope=oauth_session.scope,
|
546
|
+
client_id=oauth_session.client_id,
|
547
|
+
client_secret=oauth_session.client_secret,
|
548
|
+
redirect_uri=oauth_session.redirect_uri,
|
549
|
+
status=oauth_session.status,
|
550
|
+
created_at=oauth_session.created_at,
|
551
|
+
updated_at=oauth_session.updated_at,
|
552
|
+
)
|
553
|
+
|
554
|
+
@enforce_types
|
555
|
+
async def delete_oauth_session(self, session_id: str, actor: PydanticUser) -> None:
|
556
|
+
"""Delete an OAuth session."""
|
557
|
+
async with db_registry.async_session() as session:
|
558
|
+
try:
|
559
|
+
oauth_session = await MCPOAuth.read_async(db_session=session, identifier=session_id, actor=actor)
|
560
|
+
await oauth_session.hard_delete_async(db_session=session, actor=actor)
|
561
|
+
except NoResultFound:
|
562
|
+
raise ValueError(f"OAuth session with id {session_id} not found.")
|
563
|
+
|
564
|
+
@enforce_types
|
565
|
+
async def cleanup_expired_oauth_sessions(self, max_age_hours: int = 24) -> int:
|
566
|
+
"""Clean up expired OAuth sessions and return the count of deleted sessions."""
|
567
|
+
cutoff_time = datetime.now() - timedelta(hours=max_age_hours)
|
568
|
+
|
569
|
+
async with db_registry.async_session() as session:
|
570
|
+
from sqlalchemy import select
|
571
|
+
|
572
|
+
# Find expired sessions
|
573
|
+
result = await session.execute(select(MCPOAuth).where(MCPOAuth.created_at < cutoff_time))
|
574
|
+
expired_sessions = result.scalars().all()
|
575
|
+
|
576
|
+
# Delete expired sessions using async ORM method
|
577
|
+
for oauth_session in expired_sessions:
|
578
|
+
await oauth_session.hard_delete_async(db_session=session, actor=None)
|
579
|
+
|
580
|
+
if expired_sessions:
|
581
|
+
logger.info(f"Cleaned up {len(expired_sessions)} expired OAuth sessions")
|
582
|
+
|
583
|
+
return len(expired_sessions)
|
@@ -213,12 +213,12 @@ class ProviderManager:
|
|
213
213
|
provider_type=provider_check.provider_type,
|
214
214
|
api_key=provider_check.api_key,
|
215
215
|
provider_category=ProviderCategory.byok,
|
216
|
-
|
216
|
+
access_key=provider_check.access_key, # This contains the access key ID for Bedrock
|
217
217
|
region=provider_check.region,
|
218
218
|
).cast_to_subtype()
|
219
219
|
|
220
220
|
# TODO: add more string sanity checks here before we hit actual endpoints
|
221
221
|
if not provider.api_key:
|
222
|
-
raise ValueError("API key is required")
|
222
|
+
raise ValueError("API key is required!")
|
223
223
|
|
224
224
|
await provider.check_api_key()
|
@@ -36,7 +36,10 @@ class SandboxToolExecutor(ToolExecutor):
|
|
36
36
|
) -> ToolExecutionResult:
|
37
37
|
|
38
38
|
# Store original memory state
|
39
|
-
|
39
|
+
if agent_state:
|
40
|
+
orig_memory_str = await agent_state.memory.compile_async()
|
41
|
+
else:
|
42
|
+
orig_memory_str = None
|
40
43
|
|
41
44
|
try:
|
42
45
|
# Prepare function arguments
|
@@ -58,7 +61,8 @@ class SandboxToolExecutor(ToolExecutor):
|
|
58
61
|
|
59
62
|
# Verify memory integrity
|
60
63
|
if agent_state:
|
61
|
-
|
64
|
+
new_memory_str = await agent_state.memory.compile_async()
|
65
|
+
assert orig_memory_str == new_memory_str, "Memory should not be modified in a sandbox tool"
|
62
66
|
|
63
67
|
# Update agent memory if needed
|
64
68
|
if tool_execution_result.agent_state is not None:
|
letta/services/tool_manager.py
CHANGED
@@ -106,8 +106,10 @@ class ToolManager:
|
|
106
106
|
|
107
107
|
@enforce_types
|
108
108
|
@trace_method
|
109
|
-
def create_or_update_mcp_tool(
|
110
|
-
|
109
|
+
def create_or_update_mcp_tool(
|
110
|
+
self, tool_create: ToolCreate, mcp_server_name: str, mcp_server_id: str, actor: PydanticUser
|
111
|
+
) -> PydanticTool:
|
112
|
+
metadata = {MCP_TOOL_TAG_NAME_PREFIX: {"server_name": mcp_server_name, "server_id": mcp_server_id}}
|
111
113
|
return self.create_or_update_tool(
|
112
114
|
PydanticTool(
|
113
115
|
tool_type=ToolType.EXTERNAL_MCP, name=tool_create.json_schema["name"], metadata_=metadata, **tool_create.model_dump()
|
@@ -116,8 +118,10 @@ class ToolManager:
|
|
116
118
|
)
|
117
119
|
|
118
120
|
@enforce_types
|
119
|
-
async def create_mcp_tool_async(
|
120
|
-
|
121
|
+
async def create_mcp_tool_async(
|
122
|
+
self, tool_create: ToolCreate, mcp_server_name: str, mcp_server_id: str, actor: PydanticUser
|
123
|
+
) -> PydanticTool:
|
124
|
+
metadata = {MCP_TOOL_TAG_NAME_PREFIX: {"server_name": mcp_server_name, "server_id": mcp_server_id}}
|
121
125
|
return await self.create_or_update_tool_async(
|
122
126
|
PydanticTool(
|
123
127
|
tool_type=ToolType.EXTERNAL_MCP, name=tool_create.json_schema["name"], metadata_=metadata, **tool_create.model_dump()
|
@@ -74,12 +74,12 @@ class AsyncToolSandboxBase(ABC):
|
|
74
74
|
"""
|
75
75
|
raise NotImplementedError
|
76
76
|
|
77
|
-
def generate_execution_script(self, agent_state: Optional[AgentState], wrap_print_with_markers: bool = False) -> str:
|
77
|
+
async def generate_execution_script(self, agent_state: Optional[AgentState], wrap_print_with_markers: bool = False) -> str:
|
78
78
|
"""
|
79
79
|
Generate code to run inside of execution sandbox. Serialize the agent state and arguments, call the tool,
|
80
80
|
then base64-encode/pickle the result. Runs a jinja2 template constructing the python file.
|
81
81
|
"""
|
82
|
-
from letta.templates.template_helper import
|
82
|
+
from letta.templates.template_helper import render_template_async
|
83
83
|
|
84
84
|
# Select the appropriate template based on whether the function is async
|
85
85
|
TEMPLATE_NAME = "sandbox_code_file_async.py.j2" if self.is_async_function else "sandbox_code_file.py.j2"
|
@@ -106,7 +106,7 @@ class AsyncToolSandboxBase(ABC):
|
|
106
106
|
|
107
107
|
agent_state_pickle = pickle.dumps(agent_state) if self.inject_agent_state else None
|
108
108
|
|
109
|
-
return
|
109
|
+
return await render_template_async(
|
110
110
|
TEMPLATE_NAME,
|
111
111
|
future_import=future_import,
|
112
112
|
inject_agent_state=self.inject_agent_state,
|
@@ -92,7 +92,7 @@ class AsyncToolSandboxE2B(AsyncToolSandboxBase):
|
|
92
92
|
# Finally, get any that are passed explicitly into the `run` function call
|
93
93
|
if additional_env_vars:
|
94
94
|
env_vars.update(additional_env_vars)
|
95
|
-
code = self.generate_execution_script(agent_state=agent_state)
|
95
|
+
code = await self.generate_execution_script(agent_state=agent_state)
|
96
96
|
|
97
97
|
try:
|
98
98
|
log_event(
|
@@ -99,8 +99,8 @@ class AsyncToolSandboxLocal(AsyncToolSandboxBase):
|
|
99
99
|
|
100
100
|
# Make sure sandbox directory exists
|
101
101
|
sandbox_dir = os.path.expanduser(local_configs.sandbox_dir)
|
102
|
-
if not os.path.exists(sandbox_dir)
|
103
|
-
os.makedirs
|
102
|
+
if not await asyncio.to_thread(lambda: os.path.exists(sandbox_dir) and os.path.isdir(sandbox_dir)):
|
103
|
+
await asyncio.to_thread(os.makedirs, sandbox_dir)
|
104
104
|
|
105
105
|
# If using a virtual environment, ensure it's prepared in parallel
|
106
106
|
venv_preparation_task = None
|
@@ -109,11 +109,18 @@ class AsyncToolSandboxLocal(AsyncToolSandboxBase):
|
|
109
109
|
venv_preparation_task = asyncio.create_task(self._prepare_venv(local_configs, venv_path, env))
|
110
110
|
|
111
111
|
# Generate and write execution script (always with markers, since we rely on stdout)
|
112
|
-
|
113
|
-
|
114
|
-
|
115
|
-
|
116
|
-
|
112
|
+
code = await self.generate_execution_script(agent_state=agent_state, wrap_print_with_markers=True)
|
113
|
+
|
114
|
+
async def write_temp_file(dir, content):
|
115
|
+
def _write():
|
116
|
+
with tempfile.NamedTemporaryFile(mode="w", dir=dir, suffix=".py", delete=False) as temp_file:
|
117
|
+
temp_file.write(content)
|
118
|
+
temp_file.flush()
|
119
|
+
return temp_file.name
|
120
|
+
|
121
|
+
return await asyncio.to_thread(_write)
|
122
|
+
|
123
|
+
temp_file_path = await write_temp_file(sandbox_dir, code)
|
117
124
|
|
118
125
|
try:
|
119
126
|
# If we started a venv preparation task, wait for it to complete
|
@@ -159,14 +166,14 @@ class AsyncToolSandboxLocal(AsyncToolSandboxBase):
|
|
159
166
|
from letta.settings import settings
|
160
167
|
|
161
168
|
if not settings.debug:
|
162
|
-
os.remove
|
169
|
+
await asyncio.to_thread(os.remove, temp_file_path)
|
163
170
|
|
164
171
|
@trace_method
|
165
172
|
async def _prepare_venv(self, local_configs, venv_path: str, env: Dict[str, str]):
|
166
173
|
"""
|
167
174
|
Prepare virtual environment asynchronously (in a background thread).
|
168
175
|
"""
|
169
|
-
if self.force_recreate_venv or not os.path.isdir
|
176
|
+
if self.force_recreate_venv or not await asyncio.to_thread(os.path.isdir, venv_path):
|
170
177
|
sandbox_dir = os.path.expanduser(local_configs.sandbox_dir)
|
171
178
|
log_event(name="start create_venv_for_local_sandbox", attributes={"venv_path": venv_path})
|
172
179
|
await asyncio.to_thread(
|
letta/settings.py
CHANGED
@@ -242,7 +242,7 @@ class Settings(BaseSettings):
|
|
242
242
|
uvicorn_reload: bool = False
|
243
243
|
uvicorn_timeout_keep_alive: int = 5
|
244
244
|
|
245
|
-
use_uvloop: bool = Field(default=
|
245
|
+
use_uvloop: bool = Field(default=False, description="Enable uvloop as asyncio event loop.")
|
246
246
|
use_granian: bool = Field(default=False, description="Use Granian for workers")
|
247
247
|
sqlalchemy_tracing: bool = False
|
248
248
|
|
@@ -278,6 +278,10 @@ class Settings(BaseSettings):
|
|
278
278
|
pinecone_agent_index: Optional[str] = "recall"
|
279
279
|
upsert_pinecone_indices: bool = False
|
280
280
|
|
281
|
+
# File processing timeout settings
|
282
|
+
file_processing_timeout_minutes: int = 30
|
283
|
+
file_processing_timeout_error_message: str = "File processing timed out after {} minutes. Please try again."
|
284
|
+
|
281
285
|
@property
|
282
286
|
def letta_pg_uri(self) -> str:
|
283
287
|
if self.pg_uri:
|
@@ -327,6 +331,11 @@ class LogSettings(BaseSettings):
|
|
327
331
|
verbose_telemetry_logging: bool = Field(False)
|
328
332
|
|
329
333
|
|
334
|
+
class TelemetrySettings(BaseSettings):
|
335
|
+
model_config = SettingsConfigDict(env_prefix="letta_telemetry_", extra="ignore")
|
336
|
+
profiler: bool | None = Field(False, description="Enable use of the profiler.")
|
337
|
+
|
338
|
+
|
330
339
|
# singleton
|
331
340
|
settings = Settings(_env_parse_none_str="None")
|
332
341
|
test_settings = TestSettings()
|
@@ -334,3 +343,4 @@ model_settings = ModelSettings()
|
|
334
343
|
tool_settings = ToolSettings()
|
335
344
|
summarizer_settings = SummarizerSettings()
|
336
345
|
log_settings = LogSettings()
|
346
|
+
telemetry_settings = TelemetrySettings()
|
letta/system.py
CHANGED
@@ -141,7 +141,7 @@ def package_user_message(
|
|
141
141
|
return json_dumps(packaged_message)
|
142
142
|
|
143
143
|
|
144
|
-
def package_function_response(was_success, response_string, timezone):
|
144
|
+
def package_function_response(was_success: bool, response_string: str, timezone: str | None) -> str:
|
145
145
|
formatted_time = get_local_time(timezone=timezone)
|
146
146
|
packaged_message = {
|
147
147
|
"status": "OK" if was_success else "Failed",
|
@@ -1,8 +1,10 @@
|
|
1
1
|
import os
|
2
2
|
|
3
|
-
from jinja2 import Environment, FileSystemLoader, StrictUndefined
|
3
|
+
from jinja2 import Environment, FileSystemLoader, StrictUndefined, Template
|
4
4
|
|
5
5
|
TEMPLATE_DIR = os.path.dirname(__file__)
|
6
|
+
|
7
|
+
# Synchronous environment (for backward compatibility)
|
6
8
|
jinja_env = Environment(
|
7
9
|
loader=FileSystemLoader(TEMPLATE_DIR),
|
8
10
|
undefined=StrictUndefined,
|
@@ -10,7 +12,29 @@ jinja_env = Environment(
|
|
10
12
|
lstrip_blocks=True,
|
11
13
|
)
|
12
14
|
|
15
|
+
# Async-enabled environment
|
16
|
+
jinja_async_env = Environment(
|
17
|
+
loader=FileSystemLoader(TEMPLATE_DIR),
|
18
|
+
undefined=StrictUndefined,
|
19
|
+
trim_blocks=True,
|
20
|
+
lstrip_blocks=True,
|
21
|
+
enable_async=True, # Enable async support
|
22
|
+
)
|
23
|
+
|
13
24
|
|
14
25
|
def render_template(template_name: str, **kwargs):
|
26
|
+
"""Synchronous template rendering function (kept for backward compatibility)"""
|
15
27
|
template = jinja_env.get_template(template_name)
|
16
28
|
return template.render(**kwargs)
|
29
|
+
|
30
|
+
|
31
|
+
async def render_template_async(template_name: str, **kwargs):
|
32
|
+
"""Asynchronous template rendering function that doesn't block the event loop"""
|
33
|
+
template = jinja_async_env.get_template(template_name)
|
34
|
+
return await template.render_async(**kwargs)
|
35
|
+
|
36
|
+
|
37
|
+
async def render_string_async(template_string: str, **kwargs):
|
38
|
+
"""Asynchronously render a template from a string"""
|
39
|
+
template = Template(template_string, enable_async=True)
|
40
|
+
return await template.render_async(**kwargs)
|