letta-nightly 0.7.13.dev20250511104036__py3-none-any.whl → 0.7.14.dev20250513020711__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- letta/__init__.py +1 -1
- letta/agent.py +14 -17
- letta/agents/base_agent.py +112 -1
- letta/agents/letta_agent.py +35 -55
- letta/agents/letta_agent_batch.py +22 -45
- letta/agents/voice_agent.py +10 -42
- letta/functions/schema_generator.py +7 -3
- letta/llm_api/anthropic.py +4 -2
- letta/llm_api/openai.py +4 -2
- letta/orm/agents_tags.py +5 -2
- letta/orm/blocks_agents.py +3 -1
- letta/orm/sqlalchemy_base.py +91 -1
- letta/schemas/message.py +1 -1
- letta/serialize_schemas/marshmallow_agent.py +4 -4
- letta/server/db.py +180 -88
- letta/server/rest_api/app.py +6 -3
- letta/server/rest_api/chat_completions_interface.py +1 -0
- letta/server/rest_api/interface.py +54 -16
- letta/server/rest_api/routers/v1/sources.py +1 -0
- letta/server/server.py +1 -2
- letta/services/agent_manager.py +40 -31
- letta/services/block_manager.py +61 -34
- letta/services/group_manager.py +11 -15
- letta/services/identity_manager.py +9 -13
- letta/services/job_manager.py +12 -17
- letta/services/llm_batch_manager.py +17 -21
- letta/services/message_manager.py +53 -31
- letta/services/organization_manager.py +7 -14
- letta/services/passage_manager.py +6 -10
- letta/services/provider_manager.py +5 -9
- letta/services/sandbox_config_manager.py +13 -17
- letta/services/source_manager.py +13 -17
- letta/services/step_manager.py +5 -9
- letta/services/tool_manager.py +9 -14
- letta/services/user_manager.py +7 -12
- letta/settings.py +2 -0
- letta/streaming_interface.py +2 -0
- letta/utils.py +1 -1
- {letta_nightly-0.7.13.dev20250511104036.dist-info → letta_nightly-0.7.14.dev20250513020711.dist-info}/METADATA +2 -1
- {letta_nightly-0.7.13.dev20250511104036.dist-info → letta_nightly-0.7.14.dev20250513020711.dist-info}/RECORD +43 -43
- {letta_nightly-0.7.13.dev20250511104036.dist-info → letta_nightly-0.7.14.dev20250513020711.dist-info}/LICENSE +0 -0
- {letta_nightly-0.7.13.dev20250511104036.dist-info → letta_nightly-0.7.14.dev20250513020711.dist-info}/WHEEL +0 -0
- {letta_nightly-0.7.13.dev20250511104036.dist-info → letta_nightly-0.7.14.dev20250513020711.dist-info}/entry_points.txt +0 -0
letta/orm/blocks_agents.py
CHANGED
@@ -1,4 +1,4 @@
|
|
1
|
-
from sqlalchemy import ForeignKey, ForeignKeyConstraint, String, UniqueConstraint
|
1
|
+
from sqlalchemy import ForeignKey, ForeignKeyConstraint, Index, String, UniqueConstraint
|
2
2
|
from sqlalchemy.orm import Mapped, mapped_column
|
3
3
|
|
4
4
|
from letta.orm.base import Base
|
@@ -18,6 +18,8 @@ class BlocksAgents(Base):
|
|
18
18
|
["block_id", "block_label"], ["block.id", "block.label"], name="fk_block_id_label", deferrable=True, initially="DEFERRED"
|
19
19
|
),
|
20
20
|
UniqueConstraint("agent_id", "block_id", name="unique_agent_block"),
|
21
|
+
Index("ix_blocks_agents_block_label_agent_id", "block_label", "agent_id"),
|
22
|
+
Index("ix_blocks_block_label", "block_label"),
|
21
23
|
)
|
22
24
|
|
23
25
|
# unique agent + block label
|
letta/orm/sqlalchemy_base.py
CHANGED
@@ -6,6 +6,7 @@ from typing import TYPE_CHECKING, List, Literal, Optional, Tuple, Union
|
|
6
6
|
|
7
7
|
from sqlalchemy import String, and_, func, or_, select
|
8
8
|
from sqlalchemy.exc import DBAPIError, IntegrityError, TimeoutError
|
9
|
+
from sqlalchemy.ext.asyncio import AsyncSession
|
9
10
|
from sqlalchemy.orm import Mapped, Session, mapped_column
|
10
11
|
|
11
12
|
from letta.log import get_logger
|
@@ -300,6 +301,44 @@ class SqlalchemyBase(CommonSqlalchemyMetaMixins, Base):
|
|
300
301
|
raise NoResultFound(f"{cls.__name__} not found with {', '.join(conditions if conditions else ['no conditions'])}")
|
301
302
|
return found[0]
|
302
303
|
|
304
|
+
@classmethod
|
305
|
+
@handle_db_timeout
|
306
|
+
async def read_async(
|
307
|
+
cls,
|
308
|
+
db_session: "Session",
|
309
|
+
identifier: Optional[str] = None,
|
310
|
+
actor: Optional["User"] = None,
|
311
|
+
access: Optional[List[Literal["read", "write", "admin"]]] = ["read"],
|
312
|
+
access_type: AccessType = AccessType.ORGANIZATION,
|
313
|
+
**kwargs,
|
314
|
+
) -> "SqlalchemyBase":
|
315
|
+
"""The primary accessor for an ORM record. Async version of read method.
|
316
|
+
Args:
|
317
|
+
db_session: the database session to use when retrieving the record
|
318
|
+
identifier: the identifier of the record to read, can be the id string or the UUID object for backwards compatibility
|
319
|
+
actor: if specified, results will be scoped only to records the user is able to access
|
320
|
+
access: if actor is specified, records will be filtered to the minimum permission level for the actor
|
321
|
+
kwargs: additional arguments to pass to the read, used for more complex objects
|
322
|
+
Returns:
|
323
|
+
The matching object
|
324
|
+
Raises:
|
325
|
+
NoResultFound: if the object is not found
|
326
|
+
"""
|
327
|
+
# this is ok because read_multiple will check if the
|
328
|
+
identifiers = [] if identifier is None else [identifier]
|
329
|
+
found = await cls.read_multiple_async(db_session, identifiers, actor, access, access_type, **kwargs)
|
330
|
+
if len(found) == 0:
|
331
|
+
# for backwards compatibility.
|
332
|
+
conditions = []
|
333
|
+
if identifier:
|
334
|
+
conditions.append(f"id={identifier}")
|
335
|
+
if actor:
|
336
|
+
conditions.append(f"access level in {access} for {actor}")
|
337
|
+
if hasattr(cls, "is_deleted"):
|
338
|
+
conditions.append("is_deleted=False")
|
339
|
+
raise NoResultFound(f"{cls.__name__} not found with {', '.join(conditions if conditions else ['no conditions'])}")
|
340
|
+
return found[0]
|
341
|
+
|
303
342
|
@classmethod
|
304
343
|
@handle_db_timeout
|
305
344
|
def read_multiple(
|
@@ -323,6 +362,38 @@ class SqlalchemyBase(CommonSqlalchemyMetaMixins, Base):
|
|
323
362
|
Raises:
|
324
363
|
NoResultFound: if the object is not found
|
325
364
|
"""
|
365
|
+
query, query_conditions = cls._read_multiple_preprocess(identifiers, actor, access, access_type, **kwargs)
|
366
|
+
results = db_session.execute(query).scalars().all()
|
367
|
+
return cls._read_multiple_postprocess(results, identifiers, query_conditions)
|
368
|
+
|
369
|
+
@classmethod
|
370
|
+
@handle_db_timeout
|
371
|
+
async def read_multiple_async(
|
372
|
+
cls,
|
373
|
+
db_session: "AsyncSession",
|
374
|
+
identifiers: List[str] = [],
|
375
|
+
actor: Optional["User"] = None,
|
376
|
+
access: Optional[List[Literal["read", "write", "admin"]]] = ["read"],
|
377
|
+
access_type: AccessType = AccessType.ORGANIZATION,
|
378
|
+
**kwargs,
|
379
|
+
) -> List["SqlalchemyBase"]:
|
380
|
+
"""
|
381
|
+
Async version of read_multiple(...)
|
382
|
+
The primary accessor for ORM record(s)
|
383
|
+
"""
|
384
|
+
query, query_conditions = cls._read_multiple_preprocess(identifiers, actor, access, access_type, **kwargs)
|
385
|
+
results = await db_session.execute(query)
|
386
|
+
return cls._read_multiple_postprocess(results.scalars().all(), identifiers, query_conditions)
|
387
|
+
|
388
|
+
@classmethod
|
389
|
+
def _read_multiple_preprocess(
|
390
|
+
cls,
|
391
|
+
identifiers: List[str],
|
392
|
+
actor: Optional["User"],
|
393
|
+
access: Optional[List[Literal["read", "write", "admin"]]],
|
394
|
+
access_type: AccessType,
|
395
|
+
**kwargs,
|
396
|
+
):
|
326
397
|
logger.debug(f"Reading {cls.__name__} with ID(s): {identifiers} with actor={actor}")
|
327
398
|
|
328
399
|
# Start the query
|
@@ -350,7 +421,10 @@ class SqlalchemyBase(CommonSqlalchemyMetaMixins, Base):
|
|
350
421
|
query = query.where(cls.is_deleted == False)
|
351
422
|
query_conditions.append("is_deleted=False")
|
352
423
|
|
353
|
-
|
424
|
+
return query, query_conditions
|
425
|
+
|
426
|
+
@classmethod
|
427
|
+
def _read_multiple_postprocess(cls, results, identifiers: List[str], query_conditions) -> List["SqlalchemyBase"]:
|
354
428
|
if results: # if empty list a.k.a. no results
|
355
429
|
if len(identifiers) > 0:
|
356
430
|
# find which identifiers were not found
|
@@ -471,6 +545,22 @@ class SqlalchemyBase(CommonSqlalchemyMetaMixins, Base):
|
|
471
545
|
db_session.refresh(self)
|
472
546
|
return self
|
473
547
|
|
548
|
+
@handle_db_timeout
|
549
|
+
async def update_async(self, db_session: AsyncSession, actor: "User | None" = None, no_commit: bool = False) -> "SqlalchemyBase":
|
550
|
+
"""Async version of update function"""
|
551
|
+
logger.debug(...)
|
552
|
+
if actor:
|
553
|
+
self._set_created_and_updated_by_fields(actor.id)
|
554
|
+
self.set_updated_at()
|
555
|
+
|
556
|
+
db_session.add(self)
|
557
|
+
if no_commit:
|
558
|
+
await db_session.flush()
|
559
|
+
else:
|
560
|
+
await db_session.commit()
|
561
|
+
await db_session.refresh(self)
|
562
|
+
return self
|
563
|
+
|
474
564
|
@classmethod
|
475
565
|
@handle_db_timeout
|
476
566
|
def size(
|
letta/schemas/message.py
CHANGED
@@ -404,7 +404,7 @@ class Message(BaseMessage):
|
|
404
404
|
stdout=self.tool_returns[0].stdout if self.tool_returns else None,
|
405
405
|
stderr=self.tool_returns[0].stderr if self.tool_returns else None,
|
406
406
|
name=self.name,
|
407
|
-
otid=self.id
|
407
|
+
otid=Message.generate_otid_from_id(self.id, len(messages)),
|
408
408
|
sender_id=self.sender_id,
|
409
409
|
step_id=self.step_id,
|
410
410
|
)
|
@@ -1,6 +1,7 @@
|
|
1
1
|
from typing import Dict
|
2
2
|
|
3
3
|
from marshmallow import fields, post_dump, pre_load
|
4
|
+
from sqlalchemy.orm import sessionmaker
|
4
5
|
|
5
6
|
import letta
|
6
7
|
from letta.orm import Agent
|
@@ -14,7 +15,6 @@ from letta.serialize_schemas.marshmallow_custom_fields import EmbeddingConfigFie
|
|
14
15
|
from letta.serialize_schemas.marshmallow_message import SerializedMessageSchema
|
15
16
|
from letta.serialize_schemas.marshmallow_tag import SerializedAgentTagSchema
|
16
17
|
from letta.serialize_schemas.marshmallow_tool import SerializedToolSchema
|
17
|
-
from letta.server.db import SessionLocal
|
18
18
|
|
19
19
|
|
20
20
|
class MarshmallowAgentSchema(BaseSchema):
|
@@ -41,7 +41,7 @@ class MarshmallowAgentSchema(BaseSchema):
|
|
41
41
|
tool_exec_environment_variables = fields.List(fields.Nested(SerializedAgentEnvironmentVariableSchema))
|
42
42
|
tags = fields.List(fields.Nested(SerializedAgentTagSchema))
|
43
43
|
|
44
|
-
def __init__(self, *args, session:
|
44
|
+
def __init__(self, *args, session: sessionmaker, actor: User, **kwargs):
|
45
45
|
super().__init__(*args, actor=actor, **kwargs)
|
46
46
|
self.session = session
|
47
47
|
|
@@ -60,9 +60,9 @@ class MarshmallowAgentSchema(BaseSchema):
|
|
60
60
|
After dumping the agent, load all its Message rows and serialize them here.
|
61
61
|
"""
|
62
62
|
# TODO: This is hacky, but want to move fast, please refactor moving forward
|
63
|
-
from letta.server.db import
|
63
|
+
from letta.server.db import db_registry
|
64
64
|
|
65
|
-
with
|
65
|
+
with db_registry.session() as session:
|
66
66
|
agent_id = data.get("id")
|
67
67
|
msgs = (
|
68
68
|
session.query(MessageModel)
|
letta/server/db.py
CHANGED
@@ -1,28 +1,19 @@
|
|
1
1
|
import os
|
2
2
|
import threading
|
3
|
-
from contextlib import contextmanager
|
3
|
+
from contextlib import asynccontextmanager, contextmanager
|
4
|
+
from typing import Any, AsyncGenerator, Generator
|
4
5
|
|
5
6
|
from rich.console import Console
|
6
7
|
from rich.panel import Panel
|
7
8
|
from rich.text import Text
|
8
|
-
from sqlalchemy import create_engine
|
9
|
+
from sqlalchemy import Engine, create_engine
|
10
|
+
from sqlalchemy.ext.asyncio import AsyncEngine, AsyncSession, async_sessionmaker, create_async_engine
|
9
11
|
from sqlalchemy.orm import sessionmaker
|
10
12
|
|
11
13
|
from letta.config import LettaConfig
|
12
14
|
from letta.log import get_logger
|
13
|
-
from letta.orm import Base
|
14
15
|
from letta.settings import settings
|
15
16
|
|
16
|
-
# Use globals for the lock and initialization flag
|
17
|
-
_engine_lock = threading.Lock()
|
18
|
-
_engine_initialized = False
|
19
|
-
|
20
|
-
# Create variables in global scope but don't initialize them yet
|
21
|
-
config = LettaConfig.load()
|
22
|
-
logger = get_logger(__name__)
|
23
|
-
engine = None
|
24
|
-
SessionLocal = None
|
25
|
-
|
26
17
|
|
27
18
|
def print_sqlite_schema_error():
|
28
19
|
"""Print a formatted error message for SQLite schema issues"""
|
@@ -54,86 +45,187 @@ def db_error_handler():
|
|
54
45
|
exit(1)
|
55
46
|
|
56
47
|
|
57
|
-
|
58
|
-
"""
|
59
|
-
|
60
|
-
|
61
|
-
|
62
|
-
|
63
|
-
|
64
|
-
|
65
|
-
|
66
|
-
|
67
|
-
|
68
|
-
|
69
|
-
|
70
|
-
|
71
|
-
|
72
|
-
|
73
|
-
|
74
|
-
|
75
|
-
|
76
|
-
|
77
|
-
|
78
|
-
|
79
|
-
|
80
|
-
|
81
|
-
|
82
|
-
|
83
|
-
|
84
|
-
|
85
|
-
|
86
|
-
|
87
|
-
|
88
|
-
|
89
|
-
|
90
|
-
|
91
|
-
|
92
|
-
|
93
|
-
|
94
|
-
|
95
|
-
|
96
|
-
#
|
97
|
-
|
98
|
-
|
99
|
-
|
100
|
-
|
101
|
-
|
102
|
-
|
103
|
-
|
104
|
-
|
105
|
-
|
106
|
-
|
107
|
-
|
108
|
-
|
109
|
-
|
110
|
-
|
111
|
-
|
112
|
-
|
113
|
-
|
114
|
-
|
115
|
-
|
116
|
-
|
117
|
-
|
118
|
-
|
119
|
-
|
48
|
+
class DatabaseRegistry:
|
49
|
+
"""Registry for database connections and sessions.
|
50
|
+
|
51
|
+
This class manages both synchronous and asynchronous database connections
|
52
|
+
and provides context managers for session handling.
|
53
|
+
"""
|
54
|
+
|
55
|
+
def __init__(self):
|
56
|
+
self._engines: dict[str, Engine] = {}
|
57
|
+
self._async_engines: dict[str, AsyncEngine] = {}
|
58
|
+
self._session_factories: dict[str, sessionmaker] = {}
|
59
|
+
self._async_session_factories: dict[str, async_sessionmaker] = {}
|
60
|
+
self._initialized: dict[str, bool] = {"sync": False, "async": False}
|
61
|
+
self._lock = threading.Lock()
|
62
|
+
self.config = LettaConfig.load()
|
63
|
+
self.logger = get_logger(__name__)
|
64
|
+
|
65
|
+
def initialize_sync(self, force: bool = False) -> None:
|
66
|
+
"""Initialize the synchronous database engine if not already initialized."""
|
67
|
+
with self._lock:
|
68
|
+
if self._initialized.get("sync") and not force:
|
69
|
+
return
|
70
|
+
|
71
|
+
# Postgres engine
|
72
|
+
if settings.letta_pg_uri_no_default:
|
73
|
+
self.logger.info("Creating postgres engine")
|
74
|
+
self.config.recall_storage_type = "postgres"
|
75
|
+
self.config.recall_storage_uri = settings.letta_pg_uri_no_default
|
76
|
+
self.config.archival_storage_type = "postgres"
|
77
|
+
self.config.archival_storage_uri = settings.letta_pg_uri_no_default
|
78
|
+
|
79
|
+
engine = create_engine(
|
80
|
+
settings.letta_pg_uri,
|
81
|
+
# f"{settings.letta_pg_uri}?options=-c%20client_encoding=UTF8",
|
82
|
+
pool_size=settings.pg_pool_size,
|
83
|
+
max_overflow=settings.pg_max_overflow,
|
84
|
+
pool_timeout=settings.pg_pool_timeout,
|
85
|
+
pool_recycle=settings.pg_pool_recycle,
|
86
|
+
echo=settings.pg_echo,
|
87
|
+
# connect_args={"client_encoding": "utf8"},
|
88
|
+
)
|
89
|
+
|
90
|
+
self._engines["default"] = engine
|
91
|
+
# SQLite engine
|
92
|
+
else:
|
93
|
+
from letta.orm import Base
|
94
|
+
|
95
|
+
# TODO: don't rely on config storage
|
96
|
+
engine_path = "sqlite:///" + os.path.join(self.config.recall_storage_path, "sqlite.db")
|
97
|
+
self.logger.info("Creating sqlite engine " + engine_path)
|
98
|
+
|
99
|
+
engine = create_engine(engine_path)
|
100
|
+
|
101
|
+
# Wrap the engine with error handling
|
102
|
+
self._wrap_sqlite_engine(engine)
|
103
|
+
|
104
|
+
Base.metadata.create_all(bind=engine)
|
105
|
+
self._engines["default"] = engine
|
106
|
+
|
107
|
+
# Create session factory
|
108
|
+
self._session_factories["default"] = sessionmaker(autocommit=False, autoflush=False, bind=self._engines["default"])
|
109
|
+
self._initialized["sync"] = True
|
110
|
+
|
111
|
+
def initialize_async(self, force: bool = False) -> None:
|
112
|
+
"""Initialize the asynchronous database engine if not already initialized."""
|
113
|
+
with self._lock:
|
114
|
+
if self._initialized.get("async") and not force:
|
115
|
+
return
|
116
|
+
|
117
|
+
if settings.letta_pg_uri_no_default:
|
118
|
+
self.logger.info("Creating async postgres engine")
|
119
|
+
|
120
|
+
# Create async engine - convert URI to async format
|
121
|
+
pg_uri = settings.letta_pg_uri
|
122
|
+
if pg_uri.startswith("postgresql://"):
|
123
|
+
async_pg_uri = pg_uri.replace("postgresql://", "postgresql+asyncpg://")
|
124
|
+
else:
|
125
|
+
async_pg_uri = f"postgresql+asyncpg://{pg_uri.split('://', 1)[1]}" if "://" in pg_uri else pg_uri
|
126
|
+
|
127
|
+
async_engine = create_async_engine(
|
128
|
+
async_pg_uri,
|
129
|
+
pool_size=settings.pg_pool_size,
|
130
|
+
max_overflow=settings.pg_max_overflow,
|
131
|
+
pool_timeout=settings.pg_pool_timeout,
|
132
|
+
pool_recycle=settings.pg_pool_recycle,
|
133
|
+
echo=settings.pg_echo,
|
134
|
+
)
|
135
|
+
|
136
|
+
self._async_engines["default"] = async_engine
|
137
|
+
|
138
|
+
# Create async session factory
|
139
|
+
self._async_session_factories["default"] = async_sessionmaker(
|
140
|
+
autocommit=False, autoflush=False, bind=self._async_engines["default"], class_=AsyncSession
|
141
|
+
)
|
142
|
+
self._initialized["async"] = True
|
143
|
+
else:
|
144
|
+
self.logger.warning("Async SQLite is currently not supported. Please use PostgreSQL for async database operations.")
|
145
|
+
# TODO (cliandy): unclear around async sqlite support in sqlalchemy, we will not currently support this
|
146
|
+
self._initialized["async"] = False
|
147
|
+
|
148
|
+
def _wrap_sqlite_engine(self, engine: Engine) -> None:
|
149
|
+
"""Wrap SQLite engine with error handling."""
|
150
|
+
original_connect = engine.connect
|
151
|
+
|
152
|
+
def wrapped_connect(*args, **kwargs):
|
153
|
+
with db_error_handler():
|
154
|
+
connection = original_connect(*args, **kwargs)
|
155
|
+
original_execute = connection.execute
|
156
|
+
|
157
|
+
def wrapped_execute(*args, **kwargs):
|
158
|
+
with db_error_handler():
|
159
|
+
return original_execute(*args, **kwargs)
|
160
|
+
|
161
|
+
connection.execute = wrapped_execute
|
162
|
+
return connection
|
163
|
+
|
164
|
+
engine.connect = wrapped_connect
|
165
|
+
|
166
|
+
def get_engine(self, name: str = "default") -> Engine:
|
167
|
+
"""Get a database engine by name."""
|
168
|
+
self.initialize_sync()
|
169
|
+
return self._engines.get(name)
|
170
|
+
|
171
|
+
def get_async_engine(self, name: str = "default") -> AsyncEngine:
|
172
|
+
"""Get an async database engine by name."""
|
173
|
+
self.initialize_async()
|
174
|
+
return self._async_engines.get(name)
|
175
|
+
|
176
|
+
def get_session_factory(self, name: str = "default") -> sessionmaker:
|
177
|
+
"""Get a session factory by name."""
|
178
|
+
self.initialize_sync()
|
179
|
+
return self._session_factories.get(name)
|
180
|
+
|
181
|
+
def get_async_session_factory(self, name: str = "default") -> async_sessionmaker:
|
182
|
+
"""Get an async session factory by name."""
|
183
|
+
self.initialize_async()
|
184
|
+
return self._async_session_factories.get(name)
|
185
|
+
|
186
|
+
@contextmanager
|
187
|
+
def session(self, name: str = "default") -> Generator[Any, None, None]:
|
188
|
+
"""Context manager for database sessions."""
|
189
|
+
session_factory = self.get_session_factory(name)
|
190
|
+
if not session_factory:
|
191
|
+
raise ValueError(f"No session factory found for '{name}'")
|
192
|
+
|
193
|
+
session = session_factory()
|
194
|
+
try:
|
195
|
+
yield session
|
196
|
+
finally:
|
197
|
+
session.close()
|
198
|
+
|
199
|
+
@asynccontextmanager
|
200
|
+
async def async_session(self, name: str = "default") -> AsyncGenerator[AsyncSession, None]:
|
201
|
+
"""Async context manager for database sessions."""
|
202
|
+
session_factory = self.get_async_session_factory(name)
|
203
|
+
if not session_factory:
|
204
|
+
raise ValueError(f"No async session factory found for '{name}' or async database is not configured")
|
205
|
+
|
206
|
+
session = session_factory()
|
207
|
+
try:
|
208
|
+
yield session
|
209
|
+
finally:
|
210
|
+
await session.close()
|
211
|
+
|
212
|
+
|
213
|
+
# Create a singleton instance
|
214
|
+
db_registry = DatabaseRegistry()
|
120
215
|
|
121
216
|
|
122
217
|
def get_db():
|
123
|
-
"""Get a database session
|
124
|
-
|
218
|
+
"""Get a database session."""
|
219
|
+
with db_registry.session() as session:
|
220
|
+
yield session
|
125
221
|
|
126
|
-
# Make sure engine is initialized
|
127
|
-
if not _engine_initialized:
|
128
|
-
initialize_engine()
|
129
222
|
|
130
|
-
|
131
|
-
|
132
|
-
|
133
|
-
yield
|
134
|
-
finally:
|
135
|
-
db.close()
|
223
|
+
async def get_db_async():
|
224
|
+
"""Get an async database session."""
|
225
|
+
async with db_registry.async_session() as session:
|
226
|
+
yield session
|
136
227
|
|
137
228
|
|
138
|
-
#
|
229
|
+
# Prefer calling db_registry.session() or db_registry.async_session() directly
|
230
|
+
# This is for backwards compatibility
|
139
231
|
db_context = contextmanager(get_db)
|
letta/server/rest_api/app.py
CHANGED
@@ -184,11 +184,14 @@ def create_application() -> "FastAPI":
|
|
184
184
|
@app.exception_handler(Exception)
|
185
185
|
async def generic_error_handler(request: Request, exc: Exception):
|
186
186
|
# Log the actual error for debugging
|
187
|
-
log.error(f"Unhandled error: {exc}", exc_info=True)
|
188
|
-
print(f"Unhandled error: {exc}")
|
187
|
+
log.error(f"Unhandled error: {str(exc)}", exc_info=True)
|
188
|
+
print(f"Unhandled error: {str(exc)}")
|
189
|
+
|
190
|
+
import traceback
|
189
191
|
|
190
192
|
# Print the stack trace
|
191
|
-
print(f"Stack trace: {
|
193
|
+
print(f"Stack trace: {traceback.format_exc()}")
|
194
|
+
|
192
195
|
if (os.getenv("SENTRY_DSN") is not None) and (os.getenv("SENTRY_DSN") != ""):
|
193
196
|
import sentry_sdk
|
194
197
|
|
@@ -162,6 +162,7 @@ class ChatCompletionsStreamingInterface(AgentChunkStreamingInterface):
|
|
162
162
|
expect_reasoning_content: bool = False,
|
163
163
|
name: Optional[str] = None,
|
164
164
|
message_index: int = 0,
|
165
|
+
prev_message_type: Optional[str] = None,
|
165
166
|
) -> None:
|
166
167
|
"""
|
167
168
|
Called externally with a ChatCompletionChunkResponse. Transforms
|