letta-nightly 0.6.4.dev20241216104246__py3-none-any.whl → 0.6.5.dev20241218055539__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of letta-nightly might be problematic. Click here for more details.
- letta/__init__.py +1 -1
- letta/agent.py +95 -101
- letta/client/client.py +1 -0
- letta/constants.py +6 -1
- letta/embeddings.py +3 -9
- letta/functions/function_sets/base.py +11 -57
- letta/functions/schema_generator.py +2 -6
- letta/llm_api/anthropic.py +38 -13
- letta/llm_api/llm_api_tools.py +12 -1
- letta/local_llm/function_parser.py +2 -2
- letta/orm/__init__.py +1 -1
- letta/orm/agent.py +19 -1
- letta/orm/errors.py +8 -0
- letta/orm/file.py +3 -2
- letta/orm/mixins.py +3 -14
- letta/orm/organization.py +19 -3
- letta/orm/passage.py +59 -23
- letta/orm/source.py +4 -0
- letta/orm/sqlalchemy_base.py +25 -18
- letta/prompts/system/memgpt_modified_chat.txt +1 -1
- letta/prompts/system/memgpt_modified_o1.txt +1 -1
- letta/providers.py +2 -0
- letta/schemas/agent.py +35 -0
- letta/schemas/embedding_config.py +20 -2
- letta/schemas/passage.py +1 -1
- letta/schemas/sandbox_config.py +2 -1
- letta/server/rest_api/app.py +43 -5
- letta/server/rest_api/routers/v1/tools.py +1 -1
- letta/server/rest_api/utils.py +24 -5
- letta/server/server.py +105 -164
- letta/server/ws_api/server.py +1 -1
- letta/services/agent_manager.py +344 -9
- letta/services/passage_manager.py +76 -100
- letta/services/tool_execution_sandbox.py +54 -45
- letta/settings.py +10 -5
- letta/utils.py +8 -0
- {letta_nightly-0.6.4.dev20241216104246.dist-info → letta_nightly-0.6.5.dev20241218055539.dist-info}/METADATA +6 -6
- {letta_nightly-0.6.4.dev20241216104246.dist-info → letta_nightly-0.6.5.dev20241218055539.dist-info}/RECORD +41 -41
- {letta_nightly-0.6.4.dev20241216104246.dist-info → letta_nightly-0.6.5.dev20241218055539.dist-info}/LICENSE +0 -0
- {letta_nightly-0.6.4.dev20241216104246.dist-info → letta_nightly-0.6.5.dev20241218055539.dist-info}/WHEEL +0 -0
- {letta_nightly-0.6.4.dev20241216104246.dist-info → letta_nightly-0.6.5.dev20241218055539.dist-info}/entry_points.txt +0 -0
|
@@ -312,11 +312,7 @@ def generate_schema(function, name: Optional[str] = None, description: Optional[
|
|
|
312
312
|
for param in sig.parameters.values():
|
|
313
313
|
# Exclude 'self' parameter
|
|
314
314
|
# TODO: eventually remove this (only applies to BASE_TOOLS)
|
|
315
|
-
if param.name
|
|
316
|
-
continue
|
|
317
|
-
|
|
318
|
-
# exclude 'agent_state' parameter
|
|
319
|
-
if param.name == "agent_state":
|
|
315
|
+
if param.name in ["self", "agent_state"]: # Add agent_manager to excluded
|
|
320
316
|
continue
|
|
321
317
|
|
|
322
318
|
# Assert that the parameter has a type annotation
|
|
@@ -390,7 +386,7 @@ def generate_schema(function, name: Optional[str] = None, description: Optional[
|
|
|
390
386
|
# append the heartbeat
|
|
391
387
|
# TODO: don't hard-code
|
|
392
388
|
# TODO: if terminal, don't include this
|
|
393
|
-
if function.__name__ not in ["send_message"
|
|
389
|
+
if function.__name__ not in ["send_message"]:
|
|
394
390
|
schema["parameters"]["properties"]["request_heartbeat"] = {
|
|
395
391
|
"type": "boolean",
|
|
396
392
|
"description": "Request an immediate heartbeat after function execution. Set to `True` if you want to send a follow-up message or run a follow-up function.",
|
letta/llm_api/anthropic.py
CHANGED
|
@@ -99,16 +99,20 @@ def convert_tools_to_anthropic_format(tools: List[Tool]) -> List[dict]:
|
|
|
99
99
|
- 1 level less of nesting
|
|
100
100
|
- "parameters" -> "input_schema"
|
|
101
101
|
"""
|
|
102
|
-
|
|
102
|
+
formatted_tools = []
|
|
103
103
|
for tool in tools:
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
"
|
|
104
|
+
formatted_tool = {
|
|
105
|
+
"name" : tool.function.name,
|
|
106
|
+
"description" : tool.function.description,
|
|
107
|
+
"input_schema" : tool.function.parameters or {
|
|
108
|
+
"type": "object",
|
|
109
|
+
"properties": {},
|
|
110
|
+
"required": []
|
|
109
111
|
}
|
|
110
|
-
|
|
111
|
-
|
|
112
|
+
}
|
|
113
|
+
formatted_tools.append(formatted_tool)
|
|
114
|
+
|
|
115
|
+
return formatted_tools
|
|
112
116
|
|
|
113
117
|
|
|
114
118
|
def merge_tool_results_into_user_messages(messages: List[dict]):
|
|
@@ -258,10 +262,24 @@ def convert_anthropic_response_to_chatcompletion(
|
|
|
258
262
|
),
|
|
259
263
|
)
|
|
260
264
|
]
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
+
elif len(response_json["content"]) == 1:
|
|
266
|
+
if response_json["content"][0]["type"] == "tool_use":
|
|
267
|
+
# function call only
|
|
268
|
+
content = None
|
|
269
|
+
tool_calls = [
|
|
270
|
+
ToolCall(
|
|
271
|
+
id=response_json["content"][0]["id"],
|
|
272
|
+
type="function",
|
|
273
|
+
function=FunctionCall(
|
|
274
|
+
name=response_json["content"][0]["name"],
|
|
275
|
+
arguments=json.dumps(response_json["content"][0]["input"], indent=2),
|
|
276
|
+
),
|
|
277
|
+
)
|
|
278
|
+
]
|
|
279
|
+
else:
|
|
280
|
+
# inner mono only
|
|
281
|
+
content = strip_xml_tags(string=response_json["content"][0]["text"], tag=inner_thoughts_xml_tag)
|
|
282
|
+
tool_calls = None
|
|
265
283
|
else:
|
|
266
284
|
raise RuntimeError("Unexpected type for content in response_json.")
|
|
267
285
|
|
|
@@ -323,6 +341,14 @@ def anthropic_chat_completions_request(
|
|
|
323
341
|
if anthropic_tools is not None:
|
|
324
342
|
data["tools"] = anthropic_tools
|
|
325
343
|
|
|
344
|
+
# TODO: Add support for other tool_choice options like "auto", "any"
|
|
345
|
+
if len(anthropic_tools) == 1:
|
|
346
|
+
data["tool_choice"] = {
|
|
347
|
+
"type": "tool", # Changed from "function" to "tool"
|
|
348
|
+
"name": anthropic_tools[0]["name"], # Directly specify name without nested "function" object
|
|
349
|
+
"disable_parallel_tool_use": True # Force single tool use
|
|
350
|
+
}
|
|
351
|
+
|
|
326
352
|
# Move 'system' to the top level
|
|
327
353
|
# 'messages: Unexpected role "system". The Messages API accepts a top-level `system` parameter, not "system" as an input message role.'
|
|
328
354
|
assert data["messages"][0]["role"] == "system", f"Expected 'system' role in messages[0]:\n{data['messages'][0]}"
|
|
@@ -358,7 +384,6 @@ def anthropic_chat_completions_request(
|
|
|
358
384
|
data.pop("top_p", None)
|
|
359
385
|
data.pop("presence_penalty", None)
|
|
360
386
|
data.pop("user", None)
|
|
361
|
-
data.pop("tool_choice", None)
|
|
362
387
|
|
|
363
388
|
response_json = make_post_request(url, headers, data)
|
|
364
389
|
return convert_anthropic_response_to_chatcompletion(response_json=response_json, inner_thoughts_xml_tag=inner_thoughts_xml_tag)
|
letta/llm_api/llm_api_tools.py
CHANGED
|
@@ -113,6 +113,7 @@ def create(
|
|
|
113
113
|
function_call: str = "auto",
|
|
114
114
|
# hint
|
|
115
115
|
first_message: bool = False,
|
|
116
|
+
force_tool_call: Optional[str] = None, # Force a specific tool to be called
|
|
116
117
|
# use tool naming?
|
|
117
118
|
# if false, will use deprecated 'functions' style
|
|
118
119
|
use_tool_naming: bool = True,
|
|
@@ -252,6 +253,16 @@ def create(
|
|
|
252
253
|
if not use_tool_naming:
|
|
253
254
|
raise NotImplementedError("Only tool calling supported on Anthropic API requests")
|
|
254
255
|
|
|
256
|
+
tool_call = None
|
|
257
|
+
if force_tool_call is not None:
|
|
258
|
+
tool_call = {
|
|
259
|
+
"type": "function",
|
|
260
|
+
"function": {
|
|
261
|
+
"name": force_tool_call
|
|
262
|
+
}
|
|
263
|
+
}
|
|
264
|
+
assert functions is not None
|
|
265
|
+
|
|
255
266
|
return anthropic_chat_completions_request(
|
|
256
267
|
url=llm_config.model_endpoint,
|
|
257
268
|
api_key=model_settings.anthropic_api_key,
|
|
@@ -259,7 +270,7 @@ def create(
|
|
|
259
270
|
model=llm_config.model,
|
|
260
271
|
messages=[cast_message_to_subtype(m.to_openai_dict()) for m in messages],
|
|
261
272
|
tools=[{"type": "function", "function": f} for f in functions] if functions else None,
|
|
262
|
-
|
|
273
|
+
tool_choice=tool_call,
|
|
263
274
|
# user=str(user_id),
|
|
264
275
|
# NOTE: max_tokens is required for Anthropic API
|
|
265
276
|
max_tokens=1024, # TODO make dynamic
|
|
@@ -3,7 +3,7 @@ import json
|
|
|
3
3
|
|
|
4
4
|
from letta.utils import json_dumps, json_loads
|
|
5
5
|
|
|
6
|
-
NO_HEARTBEAT_FUNCS = ["send_message"
|
|
6
|
+
NO_HEARTBEAT_FUNCS = ["send_message"]
|
|
7
7
|
|
|
8
8
|
|
|
9
9
|
def insert_heartbeat(message):
|
|
@@ -32,7 +32,7 @@ def heartbeat_correction(message_history, new_message):
|
|
|
32
32
|
|
|
33
33
|
If the last message in the stack is a user message and the new message is an assistant func call, fix the heartbeat
|
|
34
34
|
|
|
35
|
-
See: https://github.com/
|
|
35
|
+
See: https://github.com/letta-ai/letta/issues/601
|
|
36
36
|
"""
|
|
37
37
|
if len(message_history) < 1:
|
|
38
38
|
return None
|
letta/orm/__init__.py
CHANGED
|
@@ -7,7 +7,7 @@ from letta.orm.file import FileMetadata
|
|
|
7
7
|
from letta.orm.job import Job
|
|
8
8
|
from letta.orm.message import Message
|
|
9
9
|
from letta.orm.organization import Organization
|
|
10
|
-
from letta.orm.passage import
|
|
10
|
+
from letta.orm.passage import BasePassage, AgentPassage, SourcePassage
|
|
11
11
|
from letta.orm.sandbox_config import SandboxConfig, SandboxEnvironmentVariable
|
|
12
12
|
from letta.orm.source import Source
|
|
13
13
|
from letta.orm.sources_agents import SourcesAgents
|
letta/orm/agent.py
CHANGED
|
@@ -82,7 +82,25 @@ class Agent(SqlalchemyBase, OrganizationMixin):
|
|
|
82
82
|
lazy="selectin",
|
|
83
83
|
doc="Tags associated with the agent.",
|
|
84
84
|
)
|
|
85
|
-
|
|
85
|
+
source_passages: Mapped[List["SourcePassage"]] = relationship(
|
|
86
|
+
"SourcePassage",
|
|
87
|
+
secondary="sources_agents", # The join table for Agent -> Source
|
|
88
|
+
primaryjoin="Agent.id == sources_agents.c.agent_id",
|
|
89
|
+
secondaryjoin="and_(SourcePassage.source_id == sources_agents.c.source_id)",
|
|
90
|
+
lazy="selectin",
|
|
91
|
+
order_by="SourcePassage.created_at.desc()",
|
|
92
|
+
viewonly=True, # Ensures SQLAlchemy doesn't attempt to manage this relationship
|
|
93
|
+
doc="All passages derived from sources associated with this agent.",
|
|
94
|
+
)
|
|
95
|
+
agent_passages: Mapped[List["AgentPassage"]] = relationship(
|
|
96
|
+
"AgentPassage",
|
|
97
|
+
back_populates="agent",
|
|
98
|
+
lazy="selectin",
|
|
99
|
+
order_by="AgentPassage.created_at.desc()",
|
|
100
|
+
cascade="all, delete-orphan",
|
|
101
|
+
viewonly=True, # Ensures SQLAlchemy doesn't attempt to manage this relationship
|
|
102
|
+
doc="All passages derived created by this agent.",
|
|
103
|
+
)
|
|
86
104
|
|
|
87
105
|
def to_pydantic(self) -> PydanticAgentState:
|
|
88
106
|
"""converts to the basic pydantic model counterpart"""
|
letta/orm/errors.py
CHANGED
|
@@ -12,3 +12,11 @@ class UniqueConstraintViolationError(ValueError):
|
|
|
12
12
|
|
|
13
13
|
class ForeignKeyConstraintViolationError(ValueError):
|
|
14
14
|
"""Custom exception for foreign key constraint violations."""
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class DatabaseTimeoutError(Exception):
|
|
18
|
+
"""Custom exception for database timeout issues."""
|
|
19
|
+
|
|
20
|
+
def __init__(self, message="Database operation timed out", original_exception=None):
|
|
21
|
+
super().__init__(message)
|
|
22
|
+
self.original_exception = original_exception
|
letta/orm/file.py
CHANGED
|
@@ -9,7 +9,8 @@ from letta.schemas.file import FileMetadata as PydanticFileMetadata
|
|
|
9
9
|
|
|
10
10
|
if TYPE_CHECKING:
|
|
11
11
|
from letta.orm.organization import Organization
|
|
12
|
-
|
|
12
|
+
from letta.orm.source import Source
|
|
13
|
+
from letta.orm.passage import SourcePassage
|
|
13
14
|
|
|
14
15
|
class FileMetadata(SqlalchemyBase, OrganizationMixin, SourceMixin):
|
|
15
16
|
"""Represents metadata for an uploaded file."""
|
|
@@ -27,4 +28,4 @@ class FileMetadata(SqlalchemyBase, OrganizationMixin, SourceMixin):
|
|
|
27
28
|
# relationships
|
|
28
29
|
organization: Mapped["Organization"] = relationship("Organization", back_populates="files", lazy="selectin")
|
|
29
30
|
source: Mapped["Source"] = relationship("Source", back_populates="files", lazy="selectin")
|
|
30
|
-
|
|
31
|
+
source_passages: Mapped[List["SourcePassage"]] = relationship("SourcePassage", back_populates="file", lazy="selectin", cascade="all, delete-orphan")
|
letta/orm/mixins.py
CHANGED
|
@@ -31,30 +31,19 @@ class UserMixin(Base):
|
|
|
31
31
|
|
|
32
32
|
user_id: Mapped[str] = mapped_column(String, ForeignKey("users.id"))
|
|
33
33
|
|
|
34
|
-
class FileMixin(Base):
|
|
35
|
-
"""Mixin for models that belong to a file."""
|
|
36
|
-
|
|
37
|
-
__abstract__ = True
|
|
38
|
-
|
|
39
|
-
file_id: Mapped[str] = mapped_column(String, ForeignKey("files.id"))
|
|
40
|
-
|
|
41
34
|
class AgentMixin(Base):
|
|
42
35
|
"""Mixin for models that belong to an agent."""
|
|
43
36
|
|
|
44
37
|
__abstract__ = True
|
|
45
38
|
|
|
46
|
-
agent_id: Mapped[str] = mapped_column(String, ForeignKey("agents.id"))
|
|
39
|
+
agent_id: Mapped[str] = mapped_column(String, ForeignKey("agents.id", ondelete="CASCADE"))
|
|
47
40
|
|
|
48
41
|
class FileMixin(Base):
|
|
49
42
|
"""Mixin for models that belong to a file."""
|
|
50
43
|
|
|
51
44
|
__abstract__ = True
|
|
52
45
|
|
|
53
|
-
file_id: Mapped[Optional[str]] = mapped_column(
|
|
54
|
-
String,
|
|
55
|
-
ForeignKey("files.id", ondelete="CASCADE"),
|
|
56
|
-
nullable=True
|
|
57
|
-
)
|
|
46
|
+
file_id: Mapped[Optional[str]] = mapped_column(String, ForeignKey("files.id", ondelete="CASCADE"))
|
|
58
47
|
|
|
59
48
|
|
|
60
49
|
class SourceMixin(Base):
|
|
@@ -62,7 +51,7 @@ class SourceMixin(Base):
|
|
|
62
51
|
|
|
63
52
|
__abstract__ = True
|
|
64
53
|
|
|
65
|
-
source_id: Mapped[str] = mapped_column(String, ForeignKey("sources.id"))
|
|
54
|
+
source_id: Mapped[str] = mapped_column(String, ForeignKey("sources.id", ondelete="CASCADE"), nullable=False)
|
|
66
55
|
|
|
67
56
|
|
|
68
57
|
class SandboxConfigMixin(Base):
|
letta/orm/organization.py
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
from typing import TYPE_CHECKING, List
|
|
1
|
+
from typing import TYPE_CHECKING, List, Union
|
|
2
2
|
|
|
3
3
|
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
|
4
4
|
|
|
@@ -35,6 +35,22 @@ class Organization(SqlalchemyBase):
|
|
|
35
35
|
)
|
|
36
36
|
|
|
37
37
|
# relationships
|
|
38
|
-
messages: Mapped[List["Message"]] = relationship("Message", back_populates="organization", cascade="all, delete-orphan")
|
|
39
38
|
agents: Mapped[List["Agent"]] = relationship("Agent", back_populates="organization", cascade="all, delete-orphan")
|
|
40
|
-
|
|
39
|
+
messages: Mapped[List["Message"]] = relationship("Message", back_populates="organization", cascade="all, delete-orphan")
|
|
40
|
+
source_passages: Mapped[List["SourcePassage"]] = relationship(
|
|
41
|
+
"SourcePassage",
|
|
42
|
+
back_populates="organization",
|
|
43
|
+
cascade="all, delete-orphan"
|
|
44
|
+
)
|
|
45
|
+
agent_passages: Mapped[List["AgentPassage"]] = relationship(
|
|
46
|
+
"AgentPassage",
|
|
47
|
+
back_populates="organization",
|
|
48
|
+
cascade="all, delete-orphan"
|
|
49
|
+
)
|
|
50
|
+
|
|
51
|
+
@property
|
|
52
|
+
def passages(self) -> List[Union["SourcePassage", "AgentPassage"]]:
|
|
53
|
+
"""Convenience property to get all passages"""
|
|
54
|
+
return self.source_passages + self.agent_passages
|
|
55
|
+
|
|
56
|
+
|
letta/orm/passage.py
CHANGED
|
@@ -1,39 +1,35 @@
|
|
|
1
|
-
from
|
|
2
|
-
from
|
|
1
|
+
from typing import TYPE_CHECKING
|
|
2
|
+
from sqlalchemy import Column, JSON, Index
|
|
3
|
+
from sqlalchemy.orm import Mapped, mapped_column, relationship, declared_attr
|
|
3
4
|
|
|
4
|
-
from sqlalchemy import JSON, Column, DateTime, ForeignKey, String
|
|
5
|
-
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
|
6
|
-
|
|
7
|
-
from letta.config import LettaConfig
|
|
8
|
-
from letta.constants import MAX_EMBEDDING_DIM
|
|
9
|
-
from letta.orm.custom_columns import CommonVector
|
|
10
5
|
from letta.orm.mixins import FileMixin, OrganizationMixin
|
|
11
|
-
from letta.orm.
|
|
6
|
+
from letta.orm.custom_columns import CommonVector, EmbeddingConfigColumn
|
|
12
7
|
from letta.orm.sqlalchemy_base import SqlalchemyBase
|
|
8
|
+
from letta.orm.mixins import AgentMixin, FileMixin, OrganizationMixin, SourceMixin
|
|
13
9
|
from letta.schemas.passage import Passage as PydanticPassage
|
|
14
10
|
from letta.settings import settings
|
|
15
11
|
|
|
12
|
+
from letta.config import LettaConfig
|
|
13
|
+
from letta.constants import MAX_EMBEDDING_DIM
|
|
14
|
+
|
|
16
15
|
config = LettaConfig()
|
|
17
16
|
|
|
18
17
|
if TYPE_CHECKING:
|
|
19
18
|
from letta.orm.organization import Organization
|
|
19
|
+
from letta.orm.agent import Agent
|
|
20
20
|
|
|
21
21
|
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
"""Defines data model for storing Passages"""
|
|
26
|
-
|
|
27
|
-
__tablename__ = "passages"
|
|
28
|
-
__table_args__ = {"extend_existing": True}
|
|
22
|
+
class BasePassage(SqlalchemyBase, OrganizationMixin):
|
|
23
|
+
"""Base class for all passage types with common fields"""
|
|
24
|
+
__abstract__ = True
|
|
29
25
|
__pydantic_model__ = PydanticPassage
|
|
30
26
|
|
|
31
27
|
id: Mapped[str] = mapped_column(primary_key=True, doc="Unique passage identifier")
|
|
32
28
|
text: Mapped[str] = mapped_column(doc="Passage text content")
|
|
33
|
-
source_id: Mapped[Optional[str]] = mapped_column(nullable=True, doc="Source identifier")
|
|
34
29
|
embedding_config: Mapped[dict] = mapped_column(EmbeddingConfigColumn, doc="Embedding configuration")
|
|
35
30
|
metadata_: Mapped[dict] = mapped_column(JSON, doc="Additional metadata")
|
|
36
|
-
|
|
31
|
+
|
|
32
|
+
# Vector embedding field based on database type
|
|
37
33
|
if settings.letta_pg_uri_no_default:
|
|
38
34
|
from pgvector.sqlalchemy import Vector
|
|
39
35
|
|
|
@@ -41,9 +37,49 @@ class Passage(SqlalchemyBase, OrganizationMixin, FileMixin):
|
|
|
41
37
|
else:
|
|
42
38
|
embedding = Column(CommonVector)
|
|
43
39
|
|
|
44
|
-
|
|
45
|
-
|
|
40
|
+
@declared_attr
|
|
41
|
+
def organization(cls) -> Mapped["Organization"]:
|
|
42
|
+
"""Relationship to organization"""
|
|
43
|
+
return relationship("Organization", back_populates="passages", lazy="selectin")
|
|
44
|
+
|
|
45
|
+
@declared_attr
|
|
46
|
+
def __table_args__(cls):
|
|
47
|
+
if settings.letta_pg_uri_no_default:
|
|
48
|
+
return (
|
|
49
|
+
Index(f'{cls.__tablename__}_org_idx', 'organization_id'),
|
|
50
|
+
{"extend_existing": True}
|
|
51
|
+
)
|
|
52
|
+
return ({"extend_existing": True},)
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
class SourcePassage(BasePassage, FileMixin, SourceMixin):
|
|
56
|
+
"""Passages derived from external files/sources"""
|
|
57
|
+
__tablename__ = "source_passages"
|
|
58
|
+
|
|
59
|
+
@declared_attr
|
|
60
|
+
def file(cls) -> Mapped["FileMetadata"]:
|
|
61
|
+
"""Relationship to file"""
|
|
62
|
+
return relationship("FileMetadata", back_populates="source_passages", lazy="selectin")
|
|
63
|
+
|
|
64
|
+
@declared_attr
|
|
65
|
+
def organization(cls) -> Mapped["Organization"]:
|
|
66
|
+
return relationship("Organization", back_populates="source_passages", lazy="selectin")
|
|
67
|
+
|
|
68
|
+
@declared_attr
|
|
69
|
+
def source(cls) -> Mapped["Source"]:
|
|
70
|
+
"""Relationship to source"""
|
|
71
|
+
return relationship("Source", back_populates="passages", lazy="selectin", passive_deletes=True)
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
class AgentPassage(BasePassage, AgentMixin):
|
|
75
|
+
"""Passages created by agents as archival memories"""
|
|
76
|
+
__tablename__ = "agent_passages"
|
|
77
|
+
|
|
78
|
+
@declared_attr
|
|
79
|
+
def organization(cls) -> Mapped["Organization"]:
|
|
80
|
+
return relationship("Organization", back_populates="agent_passages", lazy="selectin")
|
|
46
81
|
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
82
|
+
@declared_attr
|
|
83
|
+
def agent(cls) -> Mapped["Agent"]:
|
|
84
|
+
"""Relationship to agent"""
|
|
85
|
+
return relationship("Agent", back_populates="agent_passages", lazy="selectin", passive_deletes=True)
|
letta/orm/source.py
CHANGED
|
@@ -12,6 +12,9 @@ from letta.schemas.source import Source as PydanticSource
|
|
|
12
12
|
|
|
13
13
|
if TYPE_CHECKING:
|
|
14
14
|
from letta.orm.organization import Organization
|
|
15
|
+
from letta.orm.file import FileMetadata
|
|
16
|
+
from letta.orm.passage import SourcePassage
|
|
17
|
+
from letta.orm.agent import Agent
|
|
15
18
|
|
|
16
19
|
|
|
17
20
|
class Source(SqlalchemyBase, OrganizationMixin):
|
|
@@ -28,4 +31,5 @@ class Source(SqlalchemyBase, OrganizationMixin):
|
|
|
28
31
|
# relationships
|
|
29
32
|
organization: Mapped["Organization"] = relationship("Organization", back_populates="sources")
|
|
30
33
|
files: Mapped[List["FileMetadata"]] = relationship("FileMetadata", back_populates="source", cascade="all, delete-orphan")
|
|
34
|
+
passages: Mapped[List["SourcePassage"]] = relationship("SourcePassage", back_populates="source", cascade="all, delete-orphan")
|
|
31
35
|
agents: Mapped[List["Agent"]] = relationship("Agent", secondary="sources_agents", back_populates="sources")
|
letta/orm/sqlalchemy_base.py
CHANGED
|
@@ -1,14 +1,16 @@
|
|
|
1
1
|
from datetime import datetime
|
|
2
2
|
from enum import Enum
|
|
3
|
+
from functools import wraps
|
|
3
4
|
from typing import TYPE_CHECKING, List, Literal, Optional
|
|
4
5
|
|
|
5
6
|
from sqlalchemy import String, desc, func, or_, select
|
|
6
|
-
from sqlalchemy.exc import DBAPIError
|
|
7
|
+
from sqlalchemy.exc import DBAPIError, IntegrityError, TimeoutError
|
|
7
8
|
from sqlalchemy.orm import Mapped, Session, mapped_column
|
|
8
9
|
|
|
9
10
|
from letta.log import get_logger
|
|
10
11
|
from letta.orm.base import Base, CommonSqlalchemyMetaMixins
|
|
11
12
|
from letta.orm.errors import (
|
|
13
|
+
DatabaseTimeoutError,
|
|
12
14
|
ForeignKeyConstraintViolationError,
|
|
13
15
|
NoResultFound,
|
|
14
16
|
UniqueConstraintViolationError,
|
|
@@ -23,6 +25,20 @@ if TYPE_CHECKING:
|
|
|
23
25
|
logger = get_logger(__name__)
|
|
24
26
|
|
|
25
27
|
|
|
28
|
+
def handle_db_timeout(func):
|
|
29
|
+
"""Decorator to handle SQLAlchemy TimeoutError and wrap it in a custom exception."""
|
|
30
|
+
|
|
31
|
+
@wraps(func)
|
|
32
|
+
def wrapper(*args, **kwargs):
|
|
33
|
+
try:
|
|
34
|
+
return func(*args, **kwargs)
|
|
35
|
+
except TimeoutError as e:
|
|
36
|
+
logger.error(f"Timeout while executing {func.__name__} with args {args} and kwargs {kwargs}: {e}")
|
|
37
|
+
raise DatabaseTimeoutError(message=f"Timeout occurred in {func.__name__}.", original_exception=e)
|
|
38
|
+
|
|
39
|
+
return wrapper
|
|
40
|
+
|
|
41
|
+
|
|
26
42
|
class AccessType(str, Enum):
|
|
27
43
|
ORGANIZATION = "organization"
|
|
28
44
|
USER = "user"
|
|
@@ -36,22 +52,7 @@ class SqlalchemyBase(CommonSqlalchemyMetaMixins, Base):
|
|
|
36
52
|
id: Mapped[str] = mapped_column(String, primary_key=True)
|
|
37
53
|
|
|
38
54
|
@classmethod
|
|
39
|
-
|
|
40
|
-
"""Get a record by ID.
|
|
41
|
-
|
|
42
|
-
Args:
|
|
43
|
-
db_session: SQLAlchemy session
|
|
44
|
-
id: Record ID to retrieve
|
|
45
|
-
|
|
46
|
-
Returns:
|
|
47
|
-
Optional[SqlalchemyBase]: The record if found, None otherwise
|
|
48
|
-
"""
|
|
49
|
-
try:
|
|
50
|
-
return db_session.query(cls).filter(cls.id == id).first()
|
|
51
|
-
except DBAPIError:
|
|
52
|
-
return None
|
|
53
|
-
|
|
54
|
-
@classmethod
|
|
55
|
+
@handle_db_timeout
|
|
55
56
|
def list(
|
|
56
57
|
cls,
|
|
57
58
|
*,
|
|
@@ -180,6 +181,7 @@ class SqlalchemyBase(CommonSqlalchemyMetaMixins, Base):
|
|
|
180
181
|
return list(session.execute(query).scalars())
|
|
181
182
|
|
|
182
183
|
@classmethod
|
|
184
|
+
@handle_db_timeout
|
|
183
185
|
def read(
|
|
184
186
|
cls,
|
|
185
187
|
db_session: "Session",
|
|
@@ -231,6 +233,7 @@ class SqlalchemyBase(CommonSqlalchemyMetaMixins, Base):
|
|
|
231
233
|
conditions_str = ", ".join(query_conditions) if query_conditions else "no specific conditions"
|
|
232
234
|
raise NoResultFound(f"{cls.__name__} not found with {conditions_str}")
|
|
233
235
|
|
|
236
|
+
@handle_db_timeout
|
|
234
237
|
def create(self, db_session: "Session", actor: Optional["User"] = None) -> "SqlalchemyBase":
|
|
235
238
|
logger.debug(f"Creating {self.__class__.__name__} with ID: {self.id} with actor={actor}")
|
|
236
239
|
|
|
@@ -242,9 +245,10 @@ class SqlalchemyBase(CommonSqlalchemyMetaMixins, Base):
|
|
|
242
245
|
session.commit()
|
|
243
246
|
session.refresh(self)
|
|
244
247
|
return self
|
|
245
|
-
except DBAPIError as e:
|
|
248
|
+
except (DBAPIError, IntegrityError) as e:
|
|
246
249
|
self._handle_dbapi_error(e)
|
|
247
250
|
|
|
251
|
+
@handle_db_timeout
|
|
248
252
|
def delete(self, db_session: "Session", actor: Optional["User"] = None) -> "SqlalchemyBase":
|
|
249
253
|
logger.debug(f"Soft deleting {self.__class__.__name__} with ID: {self.id} with actor={actor}")
|
|
250
254
|
|
|
@@ -254,6 +258,7 @@ class SqlalchemyBase(CommonSqlalchemyMetaMixins, Base):
|
|
|
254
258
|
self.is_deleted = True
|
|
255
259
|
return self.update(db_session)
|
|
256
260
|
|
|
261
|
+
@handle_db_timeout
|
|
257
262
|
def hard_delete(self, db_session: "Session", actor: Optional["User"] = None) -> None:
|
|
258
263
|
"""Permanently removes the record from the database."""
|
|
259
264
|
logger.debug(f"Hard deleting {self.__class__.__name__} with ID: {self.id} with actor={actor}")
|
|
@@ -269,6 +274,7 @@ class SqlalchemyBase(CommonSqlalchemyMetaMixins, Base):
|
|
|
269
274
|
else:
|
|
270
275
|
logger.debug(f"{self.__class__.__name__} with ID {self.id} successfully hard deleted")
|
|
271
276
|
|
|
277
|
+
@handle_db_timeout
|
|
272
278
|
def update(self, db_session: "Session", actor: Optional["User"] = None) -> "SqlalchemyBase":
|
|
273
279
|
logger.debug(f"Updating {self.__class__.__name__} with ID: {self.id} with actor={actor}")
|
|
274
280
|
if actor:
|
|
@@ -281,6 +287,7 @@ class SqlalchemyBase(CommonSqlalchemyMetaMixins, Base):
|
|
|
281
287
|
return self
|
|
282
288
|
|
|
283
289
|
@classmethod
|
|
290
|
+
@handle_db_timeout
|
|
284
291
|
def size(
|
|
285
292
|
cls,
|
|
286
293
|
*,
|
|
@@ -14,7 +14,7 @@ Core Memory', 'Recall Memory' and 'Archival Memory' are the key components that
|
|
|
14
14
|
Always make sure to use these memory systems to keep yourself updated about the user and the conversation!
|
|
15
15
|
Your core memory unit will be initialized with a <persona> chosen by the user, as well as information about the user in <human>.
|
|
16
16
|
|
|
17
|
-
The following will
|
|
17
|
+
The following will describe the different parts of your advanced memory system in more detail:
|
|
18
18
|
|
|
19
19
|
'Core Memory' (limited size): Your core memory unit is always visible to you. The core memory provides essential, foundational context for keeping track of your persona and key details about the user. This includes persona information and essential user details, allowing you to have conscious awareness we have when talking to a person. Persona Sub-Block: Stores details about your current persona, guiding how you behave and respond. This helps you maintain consistency and personality in your interactions. Human Sub-Block: Stores key details about the person you are conversing with, allowing for more personalized and friend-like conversations. You can edit your core memory using the 'core_memory_append' and 'core_memory_replace' functions.
|
|
20
20
|
|
|
@@ -14,7 +14,7 @@ Core Memory', 'Recall Memory' and 'Archival Memory' are the key components that
|
|
|
14
14
|
Always make sure to use these memory systems to keep yourself updated about the user and the conversation!
|
|
15
15
|
Your core memory unit will be initialized with a <persona> chosen by the user, as well as information about the user in <human>.
|
|
16
16
|
|
|
17
|
-
The following will
|
|
17
|
+
The following will describe the different parts of your advanced memory system in more detail:
|
|
18
18
|
|
|
19
19
|
'Core Memory' (limited size): Your core memory unit is always visible to you. The core memory provides essential, foundational context for keeping track of your persona and key details about the user. This includes persona information and essential user details, allowing you to have conscious awareness we have when talking to a person. Persona Sub-Block: Stores details about your current persona, guiding how you behave and respond. This helps you maintain consistency and personality in your interactions. Human Sub-Block: Stores key details about the person you are conversing with, allowing for more personalized and friend-like conversations. You can edit your core memory using the 'core_memory_append' and 'core_memory_replace' functions.
|
|
20
20
|
|
letta/providers.py
CHANGED
|
@@ -13,6 +13,7 @@ from letta.schemas.llm_config import LLMConfig
|
|
|
13
13
|
|
|
14
14
|
|
|
15
15
|
class Provider(BaseModel):
|
|
16
|
+
name: str = Field(..., description="The name of the provider")
|
|
16
17
|
|
|
17
18
|
def list_llm_models(self) -> List[LLMConfig]:
|
|
18
19
|
return []
|
|
@@ -465,6 +466,7 @@ class TogetherProvider(OpenAIProvider):
|
|
|
465
466
|
|
|
466
467
|
class GoogleAIProvider(Provider):
|
|
467
468
|
# gemini
|
|
469
|
+
name: str = "google_ai"
|
|
468
470
|
api_key: str = Field(..., description="API key for the Google AI API.")
|
|
469
471
|
base_url: str = "https://generativelanguage.googleapis.com"
|
|
470
472
|
|
letta/schemas/agent.py
CHANGED
|
@@ -3,6 +3,7 @@ from typing import Dict, List, Optional
|
|
|
3
3
|
|
|
4
4
|
from pydantic import BaseModel, Field, field_validator
|
|
5
5
|
|
|
6
|
+
from letta.constants import DEFAULT_EMBEDDING_CHUNK_SIZE
|
|
6
7
|
from letta.schemas.block import CreateBlock
|
|
7
8
|
from letta.schemas.embedding_config import EmbeddingConfig
|
|
8
9
|
from letta.schemas.letta_base import OrmMetadataBase
|
|
@@ -107,6 +108,16 @@ class CreateAgent(BaseModel, validate_assignment=True): #
|
|
|
107
108
|
include_base_tools: bool = Field(True, description="The LLM configuration used by the agent.")
|
|
108
109
|
description: Optional[str] = Field(None, description="The description of the agent.")
|
|
109
110
|
metadata_: Optional[Dict] = Field(None, description="The metadata of the agent.", alias="metadata_")
|
|
111
|
+
llm: Optional[str] = Field(
|
|
112
|
+
None,
|
|
113
|
+
description="The LLM configuration handle used by the agent, specified in the format "
|
|
114
|
+
"provider/model-name, as an alternative to specifying llm_config.",
|
|
115
|
+
)
|
|
116
|
+
embedding: Optional[str] = Field(
|
|
117
|
+
None, description="The embedding configuration handle used by the agent, specified in the format provider/model-name."
|
|
118
|
+
)
|
|
119
|
+
context_window_limit: Optional[int] = Field(None, description="The context window limit used by the agent.")
|
|
120
|
+
embedding_chunk_size: Optional[int] = Field(DEFAULT_EMBEDDING_CHUNK_SIZE, description="The embedding chunk size used by the agent.")
|
|
110
121
|
|
|
111
122
|
@field_validator("name")
|
|
112
123
|
@classmethod
|
|
@@ -133,6 +144,30 @@ class CreateAgent(BaseModel, validate_assignment=True): #
|
|
|
133
144
|
|
|
134
145
|
return name
|
|
135
146
|
|
|
147
|
+
@field_validator("llm")
|
|
148
|
+
@classmethod
|
|
149
|
+
def validate_llm(cls, llm: Optional[str]) -> Optional[str]:
|
|
150
|
+
if not llm:
|
|
151
|
+
return llm
|
|
152
|
+
|
|
153
|
+
provider_name, model_name = llm.split("/", 1)
|
|
154
|
+
if not provider_name or not model_name:
|
|
155
|
+
raise ValueError("The llm config handle should be in the format provider/model-name")
|
|
156
|
+
|
|
157
|
+
return llm
|
|
158
|
+
|
|
159
|
+
@field_validator("embedding")
|
|
160
|
+
@classmethod
|
|
161
|
+
def validate_embedding(cls, embedding: Optional[str]) -> Optional[str]:
|
|
162
|
+
if not embedding:
|
|
163
|
+
return embedding
|
|
164
|
+
|
|
165
|
+
provider_name, model_name = embedding.split("/", 1)
|
|
166
|
+
if not provider_name or not model_name:
|
|
167
|
+
raise ValueError("The embedding config handle should be in the format provider/model-name")
|
|
168
|
+
|
|
169
|
+
return embedding
|
|
170
|
+
|
|
136
171
|
|
|
137
172
|
class UpdateAgent(BaseModel):
|
|
138
173
|
name: Optional[str] = Field(None, description="The name of the agent.")
|