letta-nightly 0.5.4.dev20241126104249__py3-none-any.whl → 0.5.4.dev20241128000451__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of letta-nightly might be problematic. Click here for more details.

Files changed (46) hide show
  1. letta/__init__.py +1 -1
  2. letta/agent.py +102 -140
  3. letta/agent_store/chroma.py +2 -0
  4. letta/cli/cli.py +3 -5
  5. letta/client/client.py +360 -117
  6. letta/config.py +2 -2
  7. letta/constants.py +5 -0
  8. letta/errors.py +12 -0
  9. letta/functions/function_sets/base.py +38 -1
  10. letta/functions/functions.py +4 -6
  11. letta/functions/schema_generator.py +6 -5
  12. letta/helpers/tool_rule_solver.py +6 -5
  13. letta/main.py +1 -1
  14. letta/metadata.py +45 -42
  15. letta/o1_agent.py +1 -4
  16. letta/orm/block.py +2 -1
  17. letta/orm/blocks_agents.py +4 -1
  18. letta/orm/sqlalchemy_base.py +13 -0
  19. letta/persistence_manager.py +1 -0
  20. letta/schemas/agent.py +57 -52
  21. letta/schemas/block.py +70 -26
  22. letta/schemas/enums.py +14 -0
  23. letta/schemas/letta_base.py +1 -1
  24. letta/schemas/letta_request.py +11 -23
  25. letta/schemas/letta_response.py +1 -2
  26. letta/schemas/memory.py +31 -100
  27. letta/schemas/message.py +3 -3
  28. letta/schemas/tool_rule.py +13 -5
  29. letta/server/rest_api/interface.py +12 -19
  30. letta/server/rest_api/routers/openai/assistants/threads.py +2 -3
  31. letta/server/rest_api/routers/openai/chat_completions/chat_completions.py +0 -2
  32. letta/server/rest_api/routers/v1/agents.py +100 -94
  33. letta/server/rest_api/routers/v1/blocks.py +50 -5
  34. letta/server/rest_api/routers/v1/tools.py +14 -3
  35. letta/server/server.py +246 -460
  36. letta/server/static_files/assets/index-9fa459a2.js +1 -1
  37. letta/services/block_manager.py +23 -4
  38. letta/services/blocks_agents_manager.py +23 -1
  39. letta/services/per_agent_lock_manager.py +18 -0
  40. letta/services/tool_execution_sandbox.py +1 -1
  41. letta/services/tool_manager.py +2 -1
  42. {letta_nightly-0.5.4.dev20241126104249.dist-info → letta_nightly-0.5.4.dev20241128000451.dist-info}/METADATA +1 -1
  43. {letta_nightly-0.5.4.dev20241126104249.dist-info → letta_nightly-0.5.4.dev20241128000451.dist-info}/RECORD +46 -45
  44. {letta_nightly-0.5.4.dev20241126104249.dist-info → letta_nightly-0.5.4.dev20241128000451.dist-info}/LICENSE +0 -0
  45. {letta_nightly-0.5.4.dev20241126104249.dist-info → letta_nightly-0.5.4.dev20241128000451.dist-info}/WHEEL +0 -0
  46. {letta_nightly-0.5.4.dev20241126104249.dist-info → letta_nightly-0.5.4.dev20241128000451.dist-info}/entry_points.txt +0 -0
letta/config.py CHANGED
@@ -16,7 +16,7 @@ from letta.constants import (
16
16
  LETTA_DIR,
17
17
  )
18
18
  from letta.log import get_logger
19
- from letta.schemas.agent import AgentState
19
+ from letta.schemas.agent import PersistedAgentState
20
20
  from letta.schemas.embedding_config import EmbeddingConfig
21
21
  from letta.schemas.llm_config import LLMConfig
22
22
 
@@ -434,7 +434,7 @@ class AgentConfig:
434
434
  json.dump(vars(self), f, indent=4)
435
435
 
436
436
  def to_agent_state(self):
437
- return AgentState(
437
+ return PersistedAgentState(
438
438
  name=self.name,
439
439
  preset=self.preset,
440
440
  persona=self.persona,
letta/constants.py CHANGED
@@ -38,6 +38,8 @@ DEFAULT_PRESET = "memgpt_chat"
38
38
 
39
39
  # Base tools that cannot be edited, as they access agent state directly
40
40
  BASE_TOOLS = ["send_message", "conversation_search", "conversation_search_date", "archival_memory_insert", "archival_memory_search"]
41
+ # Base memory tools CAN be edited, and are added by default by the server
42
+ BASE_MEMORY_TOOLS = ["core_memory_append", "core_memory_replace"]
41
43
 
42
44
  # The name of the tool used to send message to the user
43
45
  # May not be relevant in cases where the agent has multiple ways to message to user (send_imessage, send_discord_mesasge, ...)
@@ -127,6 +129,9 @@ MESSAGE_SUMMARY_REQUEST_ACK = "Understood, I will respond with a summary of the
127
129
  # These serve as in-context examples of how to use functions / what user messages look like
128
130
  MESSAGE_SUMMARY_TRUNC_KEEP_N_LAST = 3
129
131
 
132
+ # Maximum length of an error message
133
+ MAX_ERROR_MESSAGE_CHAR_LIMIT = 500
134
+
130
135
  # Default memory limits
131
136
  CORE_MEMORY_PERSONA_CHAR_LIMIT: int = 5000
132
137
  CORE_MEMORY_HUMAN_CHAR_LIMIT: int = 5000
letta/errors.py CHANGED
@@ -10,6 +10,18 @@ class LettaError(Exception):
10
10
  """Base class for all Letta related errors."""
11
11
 
12
12
 
13
+ class LettaToolCreateError(LettaError):
14
+ """Error raised when a tool cannot be created."""
15
+
16
+ default_error_message = "Error creating tool."
17
+
18
+ def __init__(self, message=None):
19
+ if message is None:
20
+ message = self.default_error_message
21
+ self.message = message
22
+ super().__init__(self.message)
23
+
24
+
13
25
  class LLMError(LettaError):
14
26
  pass
15
27
 
@@ -11,7 +11,7 @@ from letta.constants import MAX_PAUSE_HEARTBEATS
11
11
  # If the function fails, throw an exception
12
12
 
13
13
 
14
- def send_message(self: Agent, message: str) -> Optional[str]:
14
+ def send_message(self: "Agent", message: str) -> Optional[str]:
15
15
  """
16
16
  Sends a message to the human user.
17
17
 
@@ -172,3 +172,40 @@ def archival_memory_search(self: Agent, query: str, page: Optional[int] = 0) ->
172
172
  results_formatted = [f"timestamp: {d['timestamp']}, memory: {d['content']}" for d in results]
173
173
  results_str = f"{results_pref} {json_dumps(results_formatted)}"
174
174
  return results_str
175
+
176
+
177
+ def core_memory_append(agent_state: "AgentState", label: str, content: str) -> Optional[str]: # type: ignore
178
+ """
179
+ Append to the contents of core memory.
180
+
181
+ Args:
182
+ label (str): Section of the memory to be edited (persona or human).
183
+ content (str): Content to write to the memory. All unicode (including emojis) are supported.
184
+
185
+ Returns:
186
+ Optional[str]: None is always returned as this function does not produce a response.
187
+ """
188
+ current_value = str(agent_state.memory.get_block(label).value)
189
+ new_value = current_value + "\n" + str(content)
190
+ agent_state.memory.update_block_value(label=label, value=new_value)
191
+ return None
192
+
193
+
194
+ def core_memory_replace(agent_state: "AgentState", label: str, old_content: str, new_content: str) -> Optional[str]: # type: ignore
195
+ """
196
+ Replace the contents of core memory. To delete memories, use an empty string for new_content.
197
+
198
+ Args:
199
+ label (str): Section of the memory to be edited (persona or human).
200
+ old_content (str): String to replace. Must be an exact match.
201
+ new_content (str): Content to write to the memory. All unicode (including emojis) are supported.
202
+
203
+ Returns:
204
+ Optional[str]: None is always returned as this function does not produce a response.
205
+ """
206
+ current_value = str(agent_state.memory.get_block(label).value)
207
+ if old_content not in current_value:
208
+ raise ValueError(f"Old content '{old_content}' not found in memory block '{label}'")
209
+ new_value = current_value.replace(str(old_content), str(new_content))
210
+ agent_state.memory.update_block_value(label=label, value=new_value)
211
+ return None
@@ -3,9 +3,10 @@ import inspect
3
3
  import os
4
4
  from textwrap import dedent # remove indentation
5
5
  from types import ModuleType
6
- from typing import Optional, List
6
+ from typing import Dict, List, Optional
7
7
 
8
8
  from letta.constants import CLI_WARNING_PREFIX
9
+ from letta.errors import LettaToolCreateError
9
10
  from letta.functions.schema_generator import generate_schema
10
11
 
11
12
 
@@ -13,10 +14,7 @@ def derive_openai_json_schema(source_code: str, name: Optional[str] = None) -> d
13
14
  # auto-generate openai schema
14
15
  try:
15
16
  # Define a custom environment with necessary imports
16
- env = {
17
- "Optional": Optional, # Add any other required imports here
18
- "List": List
19
- }
17
+ env = {"Optional": Optional, "List": List, "Dict": Dict} # Add any other required imports here
20
18
 
21
19
  env.update(globals())
22
20
  exec(source_code, env)
@@ -29,7 +27,7 @@ def derive_openai_json_schema(source_code: str, name: Optional[str] = None) -> d
29
27
  json_schema = generate_schema(func, name=name)
30
28
  return json_schema
31
29
  except Exception as e:
32
- raise RuntimeError(f"Failed to execute source code: {e}")
30
+ raise LettaToolCreateError(f"Failed to derive JSON schema from source code: {e}")
33
31
 
34
32
 
35
33
  def parse_source_code(func) -> str:
@@ -131,11 +131,12 @@ def generate_schema(function, name: Optional[str] = None, description: Optional[
131
131
  else:
132
132
  # Add parameter details to the schema
133
133
  param_doc = next((d for d in docstring.params if d.arg_name == param.name), None)
134
- schema["parameters"]["properties"][param.name] = {
135
- # "type": "string" if param.annotation == str else str(param.annotation),
136
- "type": type_to_json_schema_type(param.annotation) if param.annotation != inspect.Parameter.empty else "string",
137
- "description": param_doc.description,
138
- }
134
+ if param_doc:
135
+ schema["parameters"]["properties"][param.name] = {
136
+ # "type": "string" if param.annotation == str else str(param.annotation),
137
+ "type": type_to_json_schema_type(param.annotation) if param.annotation != inspect.Parameter.empty else "string",
138
+ "description": param_doc.description,
139
+ }
139
140
  if param.default == inspect.Parameter.empty:
140
141
  schema["parameters"]["required"].append(param.name)
141
142
 
@@ -2,11 +2,12 @@ from typing import Dict, List, Optional, Set
2
2
 
3
3
  from pydantic import BaseModel, Field
4
4
 
5
+ from letta.schemas.enums import ToolRuleType
5
6
  from letta.schemas.tool_rule import (
6
7
  BaseToolRule,
8
+ ChildToolRule,
7
9
  InitToolRule,
8
10
  TerminalToolRule,
9
- ToolRule,
10
11
  )
11
12
 
12
13
 
@@ -21,7 +22,7 @@ class ToolRulesSolver(BaseModel):
21
22
  init_tool_rules: List[InitToolRule] = Field(
22
23
  default_factory=list, description="Initial tool rules to be used at the start of tool execution."
23
24
  )
24
- tool_rules: List[ToolRule] = Field(
25
+ tool_rules: List[ChildToolRule] = Field(
25
26
  default_factory=list, description="Standard tool rules for controlling execution sequence and allowed transitions."
26
27
  )
27
28
  terminal_tool_rules: List[TerminalToolRule] = Field(
@@ -33,11 +34,11 @@ class ToolRulesSolver(BaseModel):
33
34
  super().__init__(**kwargs)
34
35
  # Separate the provided tool rules into init, standard, and terminal categories
35
36
  for rule in tool_rules:
36
- if isinstance(rule, InitToolRule):
37
+ if rule.type == ToolRuleType.run_first:
37
38
  self.init_tool_rules.append(rule)
38
- elif isinstance(rule, ToolRule):
39
+ elif rule.type == ToolRuleType.constrain_child_tools:
39
40
  self.tool_rules.append(rule)
40
- elif isinstance(rule, TerminalToolRule):
41
+ elif rule.type == ToolRuleType.exit_loop:
41
42
  self.terminal_tool_rules.append(rule)
42
43
 
43
44
  # Validate the tool rules to ensure they form a DAG
letta/main.py CHANGED
@@ -189,7 +189,7 @@ def run_agent_loop(
189
189
 
190
190
  elif user_input.lower() == "/memory":
191
191
  print(f"\nDumping memory contents:\n")
192
- print(f"{letta_agent.memory.compile()}")
192
+ print(f"{letta_agent.agent_state.memory.compile()}")
193
193
  print(f"{letta_agent.persistence_manager.archival_memory.compile()}")
194
194
  print(f"{letta_agent.persistence_manager.recall_memory.compile()}")
195
195
  continue
letta/metadata.py CHANGED
@@ -3,28 +3,23 @@
3
3
  import os
4
4
  import secrets
5
5
  import warnings
6
- from typing import List, Optional
6
+ from typing import List, Optional, Union
7
7
 
8
8
  from sqlalchemy import JSON, Column, DateTime, Index, String, TypeDecorator
9
9
  from sqlalchemy.sql import func
10
10
 
11
11
  from letta.config import LettaConfig
12
12
  from letta.orm.base import Base
13
- from letta.schemas.agent import AgentState
13
+ from letta.schemas.agent import PersistedAgentState
14
14
  from letta.schemas.api_key import APIKey
15
15
  from letta.schemas.embedding_config import EmbeddingConfig
16
- from letta.schemas.enums import JobStatus
16
+ from letta.schemas.enums import JobStatus, ToolRuleType
17
17
  from letta.schemas.job import Job
18
18
  from letta.schemas.llm_config import LLMConfig
19
- from letta.schemas.memory import Memory
20
19
  from letta.schemas.openai.chat_completions import ToolCall, ToolCallFunction
21
- from letta.schemas.tool_rule import (
22
- BaseToolRule,
23
- InitToolRule,
24
- TerminalToolRule,
25
- ToolRule,
26
- )
20
+ from letta.schemas.tool_rule import ChildToolRule, InitToolRule, TerminalToolRule
27
21
  from letta.schemas.user import User
22
+ from letta.services.per_agent_lock_manager import PerAgentLockManager
28
23
  from letta.settings import settings
29
24
  from letta.utils import enforce_types, get_utc_time, printd
30
25
 
@@ -164,28 +159,35 @@ class ToolRulesColumn(TypeDecorator):
164
159
  def load_dialect_impl(self, dialect):
165
160
  return dialect.type_descriptor(JSON())
166
161
 
167
- def process_bind_param(self, value: List[BaseToolRule], dialect):
162
+ def process_bind_param(self, value, dialect):
168
163
  """Convert a list of ToolRules to JSON-serializable format."""
169
164
  if value:
170
- return [rule.model_dump() for rule in value]
165
+ data = [rule.model_dump() for rule in value]
166
+ for d in data:
167
+ d["type"] = d["type"].value
168
+
169
+ for d in data:
170
+ assert not (d["type"] == "ToolRule" and "children" not in d), "ToolRule does not have children field"
171
+ return data
171
172
  return value
172
173
 
173
- def process_result_value(self, value, dialect) -> List[BaseToolRule]:
174
+ def process_result_value(self, value, dialect) -> List[Union[ChildToolRule, InitToolRule, TerminalToolRule]]:
174
175
  """Convert JSON back to a list of ToolRules."""
175
176
  if value:
176
177
  return [self.deserialize_tool_rule(rule_data) for rule_data in value]
177
178
  return value
178
179
 
179
180
  @staticmethod
180
- def deserialize_tool_rule(data: dict) -> BaseToolRule:
181
+ def deserialize_tool_rule(data: dict) -> Union[ChildToolRule, InitToolRule, TerminalToolRule]:
181
182
  """Deserialize a dictionary to the appropriate ToolRule subclass based on the 'type'."""
182
- rule_type = data.get("type") # Remove 'type' field if it exists since it is a class var
183
- if rule_type == "InitToolRule":
183
+ rule_type = ToolRuleType(data.get("type")) # Remove 'type' field if it exists since it is a class var
184
+ if rule_type == ToolRuleType.run_first:
184
185
  return InitToolRule(**data)
185
- elif rule_type == "TerminalToolRule":
186
+ elif rule_type == ToolRuleType.exit_loop:
186
187
  return TerminalToolRule(**data)
187
- elif rule_type == "ToolRule":
188
- return ToolRule(**data)
188
+ elif rule_type == ToolRuleType.constrain_child_tools:
189
+ rule = ChildToolRule(**data)
190
+ return rule
189
191
  else:
190
192
  raise ValueError(f"Unknown tool rule type: {rule_type}")
191
193
 
@@ -204,7 +206,6 @@ class AgentModel(Base):
204
206
 
205
207
  # state (context compilation)
206
208
  message_ids = Column(JSON)
207
- memory = Column(JSON)
208
209
  system = Column(String)
209
210
 
210
211
  # configs
@@ -216,7 +217,7 @@ class AgentModel(Base):
216
217
  metadata_ = Column(JSON)
217
218
 
218
219
  # tools
219
- tools = Column(JSON)
220
+ tool_names = Column(JSON)
220
221
  tool_rules = Column(ToolRulesColumn)
221
222
 
222
223
  Index(__tablename__ + "_idx_user", user_id),
@@ -224,24 +225,22 @@ class AgentModel(Base):
224
225
  def __repr__(self) -> str:
225
226
  return f"<Agent(id='{self.id}', name='{self.name}')>"
226
227
 
227
- def to_record(self) -> AgentState:
228
- agent_state = AgentState(
228
+ def to_record(self) -> PersistedAgentState:
229
+ agent_state = PersistedAgentState(
229
230
  id=self.id,
230
231
  user_id=self.user_id,
231
232
  name=self.name,
232
233
  created_at=self.created_at,
233
234
  description=self.description,
234
235
  message_ids=self.message_ids,
235
- memory=Memory.load(self.memory), # load dictionary
236
236
  system=self.system,
237
- tools=self.tools,
237
+ tool_names=self.tool_names,
238
238
  tool_rules=self.tool_rules,
239
239
  agent_type=self.agent_type,
240
240
  llm_config=self.llm_config,
241
241
  embedding_config=self.embedding_config,
242
242
  metadata_=self.metadata_,
243
243
  )
244
- assert isinstance(agent_state.memory, Memory), f"Memory object is not of type Memory: {type(agent_state.memory)}"
245
244
  return agent_state
246
245
 
247
246
 
@@ -346,18 +345,18 @@ class MetadataStore:
346
345
  return tokens
347
346
 
348
347
  @enforce_types
349
- def create_agent(self, agent: AgentState):
348
+ def create_agent(self, agent: PersistedAgentState):
350
349
  # insert into agent table
351
350
  # make sure agent.name does not already exist for user user_id
352
351
  with self.session_maker() as session:
353
352
  if session.query(AgentModel).filter(AgentModel.name == agent.name).filter(AgentModel.user_id == agent.user_id).count() > 0:
354
353
  raise ValueError(f"Agent with name {agent.name} already exists")
355
354
  fields = vars(agent)
356
- fields["memory"] = agent.memory.to_dict()
357
- if "_internal_memory" in fields:
358
- del fields["_internal_memory"]
359
- else:
360
- warnings.warn(f"Agent {agent.id} has no _internal_memory field")
355
+ # fields["memory"] = agent.memory.to_dict()
356
+ # if "_internal_memory" in fields:
357
+ # del fields["_internal_memory"]
358
+ # else:
359
+ # warnings.warn(f"Agent {agent.id} has no _internal_memory field")
361
360
  if "tags" in fields:
362
361
  del fields["tags"]
363
362
  else:
@@ -366,15 +365,15 @@ class MetadataStore:
366
365
  session.commit()
367
366
 
368
367
  @enforce_types
369
- def update_agent(self, agent: AgentState):
368
+ def update_agent(self, agent: PersistedAgentState):
370
369
  with self.session_maker() as session:
371
370
  fields = vars(agent)
372
- if isinstance(agent.memory, Memory): # TODO: this is nasty but this whole class will soon be removed so whatever
373
- fields["memory"] = agent.memory.to_dict()
374
- if "_internal_memory" in fields:
375
- del fields["_internal_memory"]
376
- else:
377
- warnings.warn(f"Agent {agent.id} has no _internal_memory field")
371
+ # if isinstance(agent.memory, Memory): # TODO: this is nasty but this whole class will soon be removed so whatever
372
+ # fields["memory"] = agent.memory.to_dict()
373
+ # if "_internal_memory" in fields:
374
+ # del fields["_internal_memory"]
375
+ # else:
376
+ # warnings.warn(f"Agent {agent.id} has no _internal_memory field")
378
377
  if "tags" in fields:
379
378
  del fields["tags"]
380
379
  else:
@@ -383,7 +382,11 @@ class MetadataStore:
383
382
  session.commit()
384
383
 
385
384
  @enforce_types
386
- def delete_agent(self, agent_id: str):
385
+ def delete_agent(self, agent_id: str, per_agent_lock_manager: PerAgentLockManager):
386
+ # TODO: Remove this once Agent is on the ORM
387
+ # TODO: To prevent unbounded growth
388
+ per_agent_lock_manager.clear_lock(agent_id)
389
+
387
390
  with self.session_maker() as session:
388
391
 
389
392
  # delete agents
@@ -395,7 +398,7 @@ class MetadataStore:
395
398
  session.commit()
396
399
 
397
400
  @enforce_types
398
- def list_agents(self, user_id: str) -> List[AgentState]:
401
+ def list_agents(self, user_id: str) -> List[PersistedAgentState]:
399
402
  with self.session_maker() as session:
400
403
  results = session.query(AgentModel).filter(AgentModel.user_id == user_id).all()
401
404
  return [r.to_record() for r in results]
@@ -403,7 +406,7 @@ class MetadataStore:
403
406
  @enforce_types
404
407
  def get_agent(
405
408
  self, agent_id: Optional[str] = None, agent_name: Optional[str] = None, user_id: Optional[str] = None
406
- ) -> Optional[AgentState]:
409
+ ) -> Optional[PersistedAgentState]:
407
410
  with self.session_maker() as session:
408
411
  if agent_id:
409
412
  results = session.query(AgentModel).filter(AgentModel.id == agent_id).all()
letta/o1_agent.py CHANGED
@@ -6,7 +6,6 @@ from letta.metadata import MetadataStore
6
6
  from letta.schemas.agent import AgentState
7
7
  from letta.schemas.message import Message
8
8
  from letta.schemas.openai.chat_completion_response import UsageStatistics
9
- from letta.schemas.tool import Tool
10
9
  from letta.schemas.usage import LettaUsageStatistics
11
10
  from letta.schemas.user import User
12
11
 
@@ -45,13 +44,11 @@ class O1Agent(Agent):
45
44
  interface: AgentInterface,
46
45
  agent_state: AgentState,
47
46
  user: User,
48
- tools: List[Tool] = [],
49
47
  max_thinking_steps: int = 10,
50
48
  first_message_verify_mono: bool = False,
51
49
  ):
52
- super().__init__(interface, agent_state, tools, user)
50
+ super().__init__(interface, agent_state, user)
53
51
  self.max_thinking_steps = max_thinking_steps
54
- self.tools = tools
55
52
  self.first_message_verify_mono = first_message_verify_mono
56
53
 
57
54
  def step(
letta/orm/block.py CHANGED
@@ -10,7 +10,7 @@ from letta.schemas.block import Block as PydanticBlock
10
10
  from letta.schemas.block import Human, Persona
11
11
 
12
12
  if TYPE_CHECKING:
13
- from letta.orm.organization import Organization
13
+ from letta.orm import BlocksAgents, Organization
14
14
 
15
15
 
16
16
  class Block(OrganizationMixin, SqlalchemyBase):
@@ -35,6 +35,7 @@ class Block(OrganizationMixin, SqlalchemyBase):
35
35
 
36
36
  # relationships
37
37
  organization: Mapped[Optional["Organization"]] = relationship("Organization")
38
+ blocks_agents: Mapped[list["BlocksAgents"]] = relationship("BlocksAgents", back_populates="block", cascade="all, delete")
38
39
 
39
40
  def to_pydantic(self) -> Type:
40
41
  match self.label:
@@ -1,5 +1,5 @@
1
1
  from sqlalchemy import ForeignKey, ForeignKeyConstraint, String, UniqueConstraint
2
- from sqlalchemy.orm import Mapped, mapped_column
2
+ from sqlalchemy.orm import Mapped, mapped_column, relationship
3
3
 
4
4
  from letta.orm.sqlalchemy_base import SqlalchemyBase
5
5
  from letta.schemas.blocks_agents import BlocksAgents as PydanticBlocksAgents
@@ -27,3 +27,6 @@ class BlocksAgents(SqlalchemyBase):
27
27
  agent_id: Mapped[str] = mapped_column(String, ForeignKey("agents.id"), primary_key=True)
28
28
  block_id: Mapped[str] = mapped_column(String, primary_key=True)
29
29
  block_label: Mapped[str] = mapped_column(String, primary_key=True)
30
+
31
+ # relationships
32
+ block: Mapped["Block"] = relationship("Block", back_populates="blocks_agents")
@@ -180,6 +180,19 @@ class SqlalchemyBase(CommonSqlalchemyMetaMixins, Base):
180
180
  """Handle database errors and raise appropriate custom exceptions."""
181
181
  orig = e.orig # Extract the original error from the DBAPIError
182
182
  error_code = None
183
+ error_message = str(orig) if orig else str(e)
184
+ logger.info(f"Handling DBAPIError: {error_message}")
185
+
186
+ # Handle SQLite-specific errors
187
+ if "UNIQUE constraint failed" in error_message:
188
+ raise UniqueConstraintViolationError(
189
+ f"A unique constraint was violated for {cls.__name__}. Check your input for duplicates: {e}"
190
+ ) from e
191
+
192
+ if "FOREIGN KEY constraint failed" in error_message:
193
+ raise ForeignKeyConstraintViolationError(
194
+ f"A foreign key constraint was violated for {cls.__name__}. Check your input for missing or invalid references: {e}"
195
+ ) from e
183
196
 
184
197
  # For psycopg2
185
198
  if hasattr(orig, "pgcode"):
@@ -121,6 +121,7 @@ class LocalStateManager(PersistenceManager):
121
121
  # self.messages = [self.messages[0]] + added_messages + self.messages[1:]
122
122
 
123
123
  # add to recall memory
124
+ self.recall_memory.insert_many([m for m in added_messages])
124
125
 
125
126
  def append_to_messages(self, added_messages: List[Message]):
126
127
  # first tag with timestamps