letta-nightly 0.5.4.dev20241121104201__py3-none-any.whl → 0.5.4.dev20241123104112__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of letta-nightly might be problematic. Click here for more details.

Files changed (40) hide show
  1. letta/agent.py +48 -25
  2. letta/agent_store/db.py +1 -1
  3. letta/client/client.py +361 -7
  4. letta/constants.py +5 -14
  5. letta/functions/helpers.py +5 -42
  6. letta/functions/schema_generator.py +24 -4
  7. letta/local_llm/utils.py +6 -3
  8. letta/log.py +7 -9
  9. letta/metadata.py +17 -4
  10. letta/orm/__init__.py +2 -0
  11. letta/orm/block.py +5 -2
  12. letta/orm/blocks_agents.py +29 -0
  13. letta/orm/mixins.py +8 -0
  14. letta/orm/organization.py +8 -1
  15. letta/orm/sandbox_config.py +56 -0
  16. letta/orm/sqlalchemy_base.py +9 -3
  17. letta/schemas/block.py +15 -1
  18. letta/schemas/blocks_agents.py +32 -0
  19. letta/schemas/letta_base.py +9 -0
  20. letta/schemas/memory.py +42 -8
  21. letta/schemas/sandbox_config.py +114 -0
  22. letta/schemas/tool.py +2 -45
  23. letta/server/rest_api/routers/v1/__init__.py +4 -9
  24. letta/server/rest_api/routers/v1/agents.py +85 -1
  25. letta/server/rest_api/routers/v1/sandbox_configs.py +108 -0
  26. letta/server/rest_api/routers/v1/tools.py +3 -5
  27. letta/server/rest_api/utils.py +6 -0
  28. letta/server/server.py +159 -12
  29. letta/services/block_manager.py +3 -1
  30. letta/services/blocks_agents_manager.py +84 -0
  31. letta/services/sandbox_config_manager.py +256 -0
  32. letta/services/tool_execution_sandbox.py +326 -0
  33. letta/services/tool_manager.py +10 -10
  34. letta/services/tool_sandbox_env/.gitkeep +0 -0
  35. letta/settings.py +4 -0
  36. {letta_nightly-0.5.4.dev20241121104201.dist-info → letta_nightly-0.5.4.dev20241123104112.dist-info}/METADATA +28 -27
  37. {letta_nightly-0.5.4.dev20241121104201.dist-info → letta_nightly-0.5.4.dev20241123104112.dist-info}/RECORD +40 -31
  38. {letta_nightly-0.5.4.dev20241121104201.dist-info → letta_nightly-0.5.4.dev20241123104112.dist-info}/LICENSE +0 -0
  39. {letta_nightly-0.5.4.dev20241121104201.dist-info → letta_nightly-0.5.4.dev20241123104112.dist-info}/WHEEL +0 -0
  40. {letta_nightly-0.5.4.dev20241121104201.dist-info → letta_nightly-0.5.4.dev20241123104112.dist-info}/entry_points.txt +0 -0
@@ -13,8 +13,6 @@ def generate_composio_tool_wrapper(action: "ActionType") -> tuple[str, str]:
13
13
 
14
14
  wrapper_function_str = f"""
15
15
  def {func_name}(**kwargs):
16
- if 'self' in kwargs:
17
- del kwargs['self']
18
16
  from composio import Action, App, Tag
19
17
  from composio_langchain import ComposioToolSet
20
18
 
@@ -46,38 +44,6 @@ def generate_langchain_tool_wrapper(
46
44
  # Combine all parts into the wrapper function
47
45
  wrapper_function_str = f"""
48
46
  def {func_name}(**kwargs):
49
- if 'self' in kwargs:
50
- del kwargs['self']
51
- import importlib
52
- {import_statement}
53
- {extra_module_imports}
54
- {tool_instantiation}
55
- {run_call}
56
- """
57
-
58
- # Compile safety check
59
- assert_code_gen_compilable(wrapper_function_str)
60
-
61
- return func_name, wrapper_function_str
62
-
63
-
64
- def generate_crewai_tool_wrapper(tool: "CrewAIBaseTool", additional_imports_module_attr_map: dict[str, str] = None) -> tuple[str, str]:
65
- tool_name = tool.__class__.__name__
66
- import_statement = f"from crewai_tools import {tool_name}"
67
- extra_module_imports = generate_import_code(additional_imports_module_attr_map)
68
-
69
- # Safety check that user has passed in all required imports:
70
- assert_all_classes_are_imported(tool, additional_imports_module_attr_map)
71
-
72
- tool_instantiation = f"tool = {generate_imported_tool_instantiation_call_str(tool)}"
73
- run_call = f"return tool._run(**kwargs)"
74
- func_name = humps.decamelize(tool_name)
75
-
76
- # Combine all parts into the wrapper function
77
- wrapper_function_str = f"""
78
- def {func_name}(**kwargs):
79
- if 'self' in kwargs:
80
- del kwargs['self']
81
47
  import importlib
82
48
  {import_statement}
83
49
  {extra_module_imports}
@@ -98,9 +64,7 @@ def assert_code_gen_compilable(code_str):
98
64
  print(f"Syntax error in code: {e}")
99
65
 
100
66
 
101
- def assert_all_classes_are_imported(
102
- tool: Union["LangChainBaseTool", "CrewAIBaseTool"], additional_imports_module_attr_map: dict[str, str]
103
- ) -> None:
67
+ def assert_all_classes_are_imported(tool: Union["LangChainBaseTool"], additional_imports_module_attr_map: dict[str, str]) -> None:
104
68
  # Safety check that user has passed in all required imports:
105
69
  tool_name = tool.__class__.__name__
106
70
  current_class_imports = {tool_name}
@@ -114,7 +78,7 @@ def assert_all_classes_are_imported(
114
78
  raise RuntimeError(err_msg)
115
79
 
116
80
 
117
- def find_required_class_names_for_import(obj: Union["LangChainBaseTool", "CrewAIBaseTool", BaseModel]) -> list[str]:
81
+ def find_required_class_names_for_import(obj: Union["LangChainBaseTool", BaseModel]) -> list[str]:
118
82
  """
119
83
  Finds all the class names for required imports when instantiating the `obj`.
120
84
  NOTE: This does not return the full import path, only the class name.
@@ -202,10 +166,10 @@ def generate_imported_tool_instantiation_call_str(obj: Any) -> Optional[str]:
202
166
  else:
203
167
  # Otherwise, if it is none of the above, that usually means it is a custom Python class that is NOT a BaseModel
204
168
  # Thus, we cannot get enough information about it to stringify it
205
- # This may cause issues, but we are making the assumption that any of these custom Python types are handled correctly by the parent library, such as LangChain or CrewAI
169
+ # This may cause issues, but we are making the assumption that any of these custom Python types are handled correctly by the parent library, such as LangChain
206
170
  # An example would be that WikipediaAPIWrapper has an argument that is a wikipedia (pip install wikipedia) object
207
171
  # We cannot stringify this easily, but WikipediaAPIWrapper handles the setting of this parameter internally
208
- # This assumption seems fair to me, since usually they are external imports, and LangChain and CrewAI should be bundling those as module-level imports within the tool
172
+ # This assumption seems fair to me, since usually they are external imports, and LangChain should be bundling those as module-level imports within the tool
209
173
  # We throw a warning here anyway and provide the class name
210
174
  print(
211
175
  f"[WARNING] Skipping parsing unknown class {obj.__class__.__name__} (does not inherit from the Pydantic BaseModel and is not a basic Python type)"
@@ -219,10 +183,9 @@ def generate_imported_tool_instantiation_call_str(obj: Any) -> Optional[str]:
219
183
 
220
184
 
221
185
  def is_base_model(obj: Any):
222
- from crewai_tools.tools.base_tool import BaseModel as CrewAiBaseModel
223
186
  from langchain_core.pydantic_v1 import BaseModel as LangChainBaseModel
224
187
 
225
- return isinstance(obj, BaseModel) or isinstance(obj, LangChainBaseModel) or isinstance(obj, CrewAiBaseModel)
188
+ return isinstance(obj, BaseModel) or isinstance(obj, LangChainBaseModel)
226
189
 
227
190
 
228
191
  def generate_import_code(module_attr_map: Optional[dict]):
@@ -1,5 +1,5 @@
1
1
  import inspect
2
- from typing import Any, Dict, Optional, Type, Union, get_args, get_origin
2
+ from typing import Any, Dict, List, Optional, Type, Union, get_args, get_origin
3
3
 
4
4
  from docstring_parser import parse
5
5
  from pydantic import BaseModel
@@ -38,15 +38,29 @@ def type_to_json_schema_type(py_type):
38
38
 
39
39
  # Mapping of Python types to JSON schema types
40
40
  type_map = {
41
+ # Basic types
41
42
  int: "integer",
42
43
  str: "string",
43
44
  bool: "boolean",
44
45
  float: "number",
45
- list[str]: "array",
46
- # Add more mappings as needed
46
+ # Collections
47
+ List[str]: "array",
48
+ List[int]: "array",
49
+ list: "array",
50
+ tuple: "array",
51
+ set: "array",
52
+ # Dictionaries
53
+ dict: "object",
54
+ Dict[str, Any]: "object",
55
+ # Special types
56
+ None: "null",
57
+ type(None): "null",
58
+ # Optional types
59
+ # Optional[str]: "string", # NOTE: caught above ^
60
+ Union[str, None]: "string",
47
61
  }
48
62
  if py_type not in type_map:
49
- raise ValueError(f"Python type {py_type} has no corresponding JSON schema type")
63
+ raise ValueError(f"Python type {py_type} has no corresponding JSON schema type - full map: {type_map}")
50
64
 
51
65
  return type_map.get(py_type, "string") # Default to "string" if type not in map
52
66
 
@@ -93,9 +107,14 @@ def generate_schema(function, name: Optional[str] = None, description: Optional[
93
107
 
94
108
  for param in sig.parameters.values():
95
109
  # Exclude 'self' parameter
110
+ # TODO: eventually remove this (only applies to BASE_TOOLS)
96
111
  if param.name == "self":
97
112
  continue
98
113
 
114
+ # exclude 'agent_state' parameter
115
+ if param.name == "agent_state":
116
+ continue
117
+
99
118
  # Assert that the parameter has a type annotation
100
119
  if param.annotation == inspect.Parameter.empty:
101
120
  raise TypeError(f"Parameter '{param.name}' in function '{function.__name__}' lacks a type annotation")
@@ -129,6 +148,7 @@ def generate_schema(function, name: Optional[str] = None, description: Optional[
129
148
 
130
149
  # append the heartbeat
131
150
  # TODO: don't hard-code
151
+ # TODO: if terminal, don't include this
132
152
  if function.__name__ not in ["send_message", "pause_heartbeats"]:
133
153
  schema["parameters"]["properties"]["request_heartbeat"] = {
134
154
  "type": "boolean",
letta/local_llm/utils.py CHANGED
@@ -88,16 +88,19 @@ def num_tokens_from_functions(functions: List[dict], model: str = "gpt-4"):
88
88
  try:
89
89
  encoding = tiktoken.encoding_for_model(model)
90
90
  except KeyError:
91
- print("Warning: model not found. Using cl100k_base encoding.")
91
+ warnings.warn("Warning: model not found. Using cl100k_base encoding.")
92
92
  encoding = tiktoken.get_encoding("cl100k_base")
93
93
 
94
94
  num_tokens = 0
95
95
  for function in functions:
96
96
  function_tokens = len(encoding.encode(function["name"]))
97
97
  if function["description"]:
98
- function_tokens += len(encoding.encode(function["description"]))
98
+ if not isinstance(function["description"], str):
99
+ warnings.warn(f"Function {function['name']} has non-string description: {function['description']}")
100
+ else:
101
+ function_tokens += len(encoding.encode(function["description"]))
99
102
  else:
100
- raise ValueError(f"Function {function['name']} has no description, function: {function}")
103
+ warnings.warn(f"Function {function['name']} has no description, function: {function}")
101
104
 
102
105
  if "parameters" in function:
103
106
  parameters = function["parameters"]
letta/log.py CHANGED
@@ -23,12 +23,10 @@ def _setup_logfile() -> "Path":
23
23
  # TODO: production logging should be much less invasive
24
24
  DEVELOPMENT_LOGGING = {
25
25
  "version": 1,
26
- "disable_existing_loggers": True,
26
+ "disable_existing_loggers": False, # Allow capturing from all loggers
27
27
  "formatters": {
28
28
  "standard": {"format": "%(asctime)s - %(name)s - %(levelname)s - %(message)s"},
29
- "no_datetime": {
30
- "format": "%(name)s - %(levelname)s - %(message)s",
31
- },
29
+ "no_datetime": {"format": "%(name)s - %(levelname)s - %(message)s"},
32
30
  },
33
31
  "handlers": {
34
32
  "console": {
@@ -46,14 +44,14 @@ DEVELOPMENT_LOGGING = {
46
44
  "formatter": "standard",
47
45
  },
48
46
  },
47
+ "root": { # Root logger handles all logs
48
+ "level": logging.DEBUG if settings.debug else logging.INFO,
49
+ "handlers": ["console", "file"],
50
+ },
49
51
  "loggers": {
50
52
  "Letta": {
51
53
  "level": logging.DEBUG if settings.debug else logging.INFO,
52
- "handlers": [
53
- "console",
54
- "file",
55
- ],
56
- "propagate": False,
54
+ "propagate": True, # Let logs bubble up to root
57
55
  },
58
56
  "uvicorn": {
59
57
  "level": "CRITICAL",
letta/metadata.py CHANGED
@@ -2,6 +2,7 @@
2
2
 
3
3
  import os
4
4
  import secrets
5
+ import warnings
5
6
  from typing import List, Optional
6
7
 
7
8
  from sqlalchemy import JSON, Column, DateTime, Index, String, TypeDecorator
@@ -353,8 +354,14 @@ class MetadataStore:
353
354
  raise ValueError(f"Agent with name {agent.name} already exists")
354
355
  fields = vars(agent)
355
356
  fields["memory"] = agent.memory.to_dict()
356
- del fields["_internal_memory"]
357
- del fields["tags"]
357
+ if "_internal_memory" in fields:
358
+ del fields["_internal_memory"]
359
+ else:
360
+ warnings.warn(f"Agent {agent.id} has no _internal_memory field")
361
+ if "tags" in fields:
362
+ del fields["tags"]
363
+ else:
364
+ warnings.warn(f"Agent {agent.id} has no tags field")
358
365
  session.add(AgentModel(**fields))
359
366
  session.commit()
360
367
 
@@ -364,8 +371,14 @@ class MetadataStore:
364
371
  fields = vars(agent)
365
372
  if isinstance(agent.memory, Memory): # TODO: this is nasty but this whole class will soon be removed so whatever
366
373
  fields["memory"] = agent.memory.to_dict()
367
- del fields["_internal_memory"]
368
- del fields["tags"]
374
+ if "_internal_memory" in fields:
375
+ del fields["_internal_memory"]
376
+ else:
377
+ warnings.warn(f"Agent {agent.id} has no _internal_memory field")
378
+ if "tags" in fields:
379
+ del fields["tags"]
380
+ else:
381
+ warnings.warn(f"Agent {agent.id} has no tags field")
369
382
  session.query(AgentModel).filter(AgentModel.id == agent.id).update(fields)
370
383
  session.commit()
371
384
 
letta/orm/__init__.py CHANGED
@@ -1,7 +1,9 @@
1
1
  from letta.orm.base import Base
2
2
  from letta.orm.block import Block
3
+ from letta.orm.blocks_agents import BlocksAgents
3
4
  from letta.orm.file import FileMetadata
4
5
  from letta.orm.organization import Organization
6
+ from letta.orm.sandbox_config import SandboxConfig, SandboxEnvironmentVariable
5
7
  from letta.orm.source import Source
6
8
  from letta.orm.tool import Tool
7
9
  from letta.orm.user import User
letta/orm/block.py CHANGED
@@ -1,8 +1,9 @@
1
1
  from typing import TYPE_CHECKING, Optional, Type
2
2
 
3
- from sqlalchemy import JSON, BigInteger, Integer
3
+ from sqlalchemy import JSON, BigInteger, Integer, UniqueConstraint
4
4
  from sqlalchemy.orm import Mapped, mapped_column, relationship
5
5
 
6
+ from letta.constants import CORE_MEMORY_BLOCK_CHAR_LIMIT
6
7
  from letta.orm.mixins import OrganizationMixin
7
8
  from letta.orm.sqlalchemy_base import SqlalchemyBase
8
9
  from letta.schemas.block import Block as PydanticBlock
@@ -17,6 +18,8 @@ class Block(OrganizationMixin, SqlalchemyBase):
17
18
 
18
19
  __tablename__ = "block"
19
20
  __pydantic_model__ = PydanticBlock
21
+ # This may seem redundant, but is necessary for the BlocksAgents composite FK relationship
22
+ __table_args__ = (UniqueConstraint("id", "label", name="unique_block_id_label"),)
20
23
 
21
24
  template_name: Mapped[Optional[str]] = mapped_column(
22
25
  nullable=True, doc="the unique name that identifies a block in a human-readable way"
@@ -27,7 +30,7 @@ class Block(OrganizationMixin, SqlalchemyBase):
27
30
  doc="whether the block is a template (e.g. saved human/persona options as baselines for other templates)", default=False
28
31
  )
29
32
  value: Mapped[str] = mapped_column(doc="Text content of the block for the respective section of core memory.")
30
- limit: Mapped[BigInteger] = mapped_column(Integer, default=2000, doc="Character limit of the block.")
33
+ limit: Mapped[BigInteger] = mapped_column(Integer, default=CORE_MEMORY_BLOCK_CHAR_LIMIT, doc="Character limit of the block.")
31
34
  metadata_: Mapped[Optional[dict]] = mapped_column(JSON, default={}, doc="arbitrary information related to the block.")
32
35
 
33
36
  # relationships
@@ -0,0 +1,29 @@
1
+ from sqlalchemy import ForeignKey, ForeignKeyConstraint, String, UniqueConstraint
2
+ from sqlalchemy.orm import Mapped, mapped_column
3
+
4
+ from letta.orm.sqlalchemy_base import SqlalchemyBase
5
+ from letta.schemas.blocks_agents import BlocksAgents as PydanticBlocksAgents
6
+
7
+
8
+ class BlocksAgents(SqlalchemyBase):
9
+ """Agents must have one or many blocks to make up their core memory."""
10
+
11
+ __tablename__ = "blocks_agents"
12
+ __pydantic_model__ = PydanticBlocksAgents
13
+ __table_args__ = (
14
+ UniqueConstraint(
15
+ "agent_id",
16
+ "block_label",
17
+ name="unique_label_per_agent",
18
+ ),
19
+ ForeignKeyConstraint(
20
+ ["block_id", "block_label"],
21
+ ["block.id", "block.label"],
22
+ name="fk_block_id_label",
23
+ ),
24
+ )
25
+
26
+ # unique agent + block label
27
+ agent_id: Mapped[str] = mapped_column(String, ForeignKey("agents.id"), primary_key=True)
28
+ block_id: Mapped[str] = mapped_column(String, primary_key=True)
29
+ block_label: Mapped[str] = mapped_column(String, primary_key=True)
letta/orm/mixins.py CHANGED
@@ -37,3 +37,11 @@ class SourceMixin(Base):
37
37
  __abstract__ = True
38
38
 
39
39
  source_id: Mapped[str] = mapped_column(String, ForeignKey("sources.id"))
40
+
41
+
42
+ class SandboxConfigMixin(Base):
43
+ """Mixin for models that belong to a SandboxConfig."""
44
+
45
+ __abstract__ = True
46
+
47
+ sandbox_config_id: Mapped[str] = mapped_column(String, ForeignKey("sandbox_configs.id"))
letta/orm/organization.py CHANGED
@@ -2,12 +2,12 @@ from typing import TYPE_CHECKING, List
2
2
 
3
3
  from sqlalchemy.orm import Mapped, mapped_column, relationship
4
4
 
5
- from letta.orm.file import FileMetadata
6
5
  from letta.orm.sqlalchemy_base import SqlalchemyBase
7
6
  from letta.schemas.organization import Organization as PydanticOrganization
8
7
 
9
8
  if TYPE_CHECKING:
10
9
 
10
+ from letta.orm.file import FileMetadata
11
11
  from letta.orm.tool import Tool
12
12
  from letta.orm.user import User
13
13
 
@@ -27,6 +27,13 @@ class Organization(SqlalchemyBase):
27
27
  sources: Mapped[List["Source"]] = relationship("Source", back_populates="organization", cascade="all, delete-orphan")
28
28
  agents_tags: Mapped[List["AgentsTags"]] = relationship("AgentsTags", back_populates="organization", cascade="all, delete-orphan")
29
29
  files: Mapped[List["FileMetadata"]] = relationship("FileMetadata", back_populates="organization", cascade="all, delete-orphan")
30
+ sandbox_configs: Mapped[List["SandboxConfig"]] = relationship(
31
+ "SandboxConfig", back_populates="organization", cascade="all, delete-orphan"
32
+ )
33
+ sandbox_environment_variables: Mapped[List["SandboxEnvironmentVariable"]] = relationship(
34
+ "SandboxEnvironmentVariable", back_populates="organization", cascade="all, delete-orphan"
35
+ )
36
+
30
37
  # TODO: Map these relationships later when we actually make these models
31
38
  # below is just a suggestion
32
39
  # agents: Mapped[List["Agent"]] = relationship("Agent", back_populates="organization", cascade="all, delete-orphan")
@@ -0,0 +1,56 @@
1
+ from typing import TYPE_CHECKING, Dict, List, Optional
2
+
3
+ from sqlalchemy import JSON
4
+ from sqlalchemy import Enum as SqlEnum
5
+ from sqlalchemy import String, UniqueConstraint
6
+ from sqlalchemy.orm import Mapped, mapped_column, relationship
7
+
8
+ from letta.orm.mixins import OrganizationMixin, SandboxConfigMixin
9
+ from letta.orm.sqlalchemy_base import SqlalchemyBase
10
+ from letta.schemas.sandbox_config import SandboxConfig as PydanticSandboxConfig
11
+ from letta.schemas.sandbox_config import (
12
+ SandboxEnvironmentVariable as PydanticSandboxEnvironmentVariable,
13
+ )
14
+ from letta.schemas.sandbox_config import SandboxType
15
+
16
+ if TYPE_CHECKING:
17
+ from letta.orm.organization import Organization
18
+
19
+
20
+ class SandboxConfig(SqlalchemyBase, OrganizationMixin):
21
+ """ORM model for sandbox configurations with JSON storage for arbitrary config data."""
22
+
23
+ __tablename__ = "sandbox_configs"
24
+ __pydantic_model__ = PydanticSandboxConfig
25
+
26
+ # For now, we only allow one type of sandbox config per organization
27
+ __table_args__ = (UniqueConstraint("type", "organization_id", name="uix_type_organization"),)
28
+
29
+ id: Mapped[str] = mapped_column(String, primary_key=True, nullable=False)
30
+ type: Mapped[SandboxType] = mapped_column(SqlEnum(SandboxType), nullable=False, doc="The type of sandbox.")
31
+ config: Mapped[Dict] = mapped_column(JSON, nullable=False, doc="The JSON configuration data.")
32
+
33
+ # relationships
34
+ organization: Mapped["Organization"] = relationship("Organization", back_populates="sandbox_configs")
35
+ sandbox_environment_variables: Mapped[List["SandboxEnvironmentVariable"]] = relationship(
36
+ "SandboxEnvironmentVariable", back_populates="sandbox_config", cascade="all, delete-orphan"
37
+ )
38
+
39
+
40
+ class SandboxEnvironmentVariable(SqlalchemyBase, OrganizationMixin, SandboxConfigMixin):
41
+ """ORM model for environment variables associated with sandboxes."""
42
+
43
+ __tablename__ = "sandbox_environment_variables"
44
+ __pydantic_model__ = PydanticSandboxEnvironmentVariable
45
+
46
+ # We cannot have duplicate key names in the same sandbox, the env var would get overwritten
47
+ __table_args__ = (UniqueConstraint("key", "sandbox_config_id", name="uix_key_sandbox_config"),)
48
+
49
+ id: Mapped[str] = mapped_column(String, primary_key=True, nullable=False)
50
+ key: Mapped[str] = mapped_column(String, nullable=False, doc="The name of the environment variable.")
51
+ value: Mapped[str] = mapped_column(String, nullable=False, doc="The value of the environment variable.")
52
+ description: Mapped[Optional[str]] = mapped_column(String, nullable=True, doc="An optional description of the environment variable.")
53
+
54
+ # relationships
55
+ organization: Mapped["Organization"] = relationship("Organization", back_populates="sandbox_environment_variables")
56
+ sandbox_config: Mapped["SandboxConfig"] = relationship("SandboxConfig", back_populates="sandbox_environment_variables")
@@ -11,7 +11,6 @@ if TYPE_CHECKING:
11
11
  from pydantic import BaseModel
12
12
  from sqlalchemy.orm import Session
13
13
 
14
- # from letta.orm.user import User
15
14
 
16
15
  logger = get_logger(__name__)
17
16
 
@@ -28,6 +27,7 @@ class SqlalchemyBase(CommonSqlalchemyMetaMixins, Base):
28
27
  cls, *, db_session: "Session", cursor: Optional[str] = None, limit: Optional[int] = 50, **kwargs
29
28
  ) -> List[Type["SqlalchemyBase"]]:
30
29
  """List records with optional cursor (for pagination) and limit."""
30
+ logger.debug(f"Listing {cls.__name__} with kwarg filters {kwargs}")
31
31
  with db_session as session:
32
32
  # Start with the base query filtered by kwargs
33
33
  query = select(cls).filter_by(**kwargs)
@@ -67,6 +67,8 @@ class SqlalchemyBase(CommonSqlalchemyMetaMixins, Base):
67
67
  Raises:
68
68
  NoResultFound: if the object is not found
69
69
  """
70
+ logger.debug(f"Reading {cls.__name__} with ID: {identifier} with actor={actor}")
71
+
70
72
  # Start the query
71
73
  query = select(cls)
72
74
  # Collect query conditions for better error reporting
@@ -96,6 +98,8 @@ class SqlalchemyBase(CommonSqlalchemyMetaMixins, Base):
96
98
  raise NoResultFound(f"{cls.__name__} not found with {conditions_str}")
97
99
 
98
100
  def create(self, db_session: "Session", actor: Optional["User"] = None) -> Type["SqlalchemyBase"]:
101
+ logger.debug(f"Creating {self.__class__.__name__} with ID: {self.id} with actor={actor}")
102
+
99
103
  if actor:
100
104
  self._set_created_and_updated_by_fields(actor.id)
101
105
 
@@ -106,6 +110,8 @@ class SqlalchemyBase(CommonSqlalchemyMetaMixins, Base):
106
110
  return self
107
111
 
108
112
  def delete(self, db_session: "Session", actor: Optional["User"] = None) -> Type["SqlalchemyBase"]:
113
+ logger.debug(f"Soft deleting {self.__class__.__name__} with ID: {self.id} with actor={actor}")
114
+
109
115
  if actor:
110
116
  self._set_created_and_updated_by_fields(actor.id)
111
117
 
@@ -114,8 +120,7 @@ class SqlalchemyBase(CommonSqlalchemyMetaMixins, Base):
114
120
 
115
121
  def hard_delete(self, db_session: "Session", actor: Optional["User"] = None) -> None:
116
122
  """Permanently removes the record from the database."""
117
- if actor:
118
- logger.info(f"User {actor.id} requested hard deletion of {self.__class__.__name__} with ID {self.id}")
123
+ logger.debug(f"Hard deleting {self.__class__.__name__} with ID: {self.id} with actor={actor}")
119
124
 
120
125
  with db_session as session:
121
126
  try:
@@ -129,6 +134,7 @@ class SqlalchemyBase(CommonSqlalchemyMetaMixins, Base):
129
134
  logger.info(f"{self.__class__.__name__} with ID {self.id} successfully hard deleted")
130
135
 
131
136
  def update(self, db_session: "Session", actor: Optional["User"] = None) -> Type["SqlalchemyBase"]:
137
+ logger.debug(f"Updating {self.__class__.__name__} with ID: {self.id} with actor={actor}")
132
138
  if actor:
133
139
  self._set_created_and_updated_by_fields(actor.id)
134
140
 
letta/schemas/block.py CHANGED
@@ -1,6 +1,6 @@
1
1
  from typing import Optional
2
2
 
3
- from pydantic import Field, model_validator
3
+ from pydantic import BaseModel, Field, model_validator
4
4
  from typing_extensions import Self
5
5
 
6
6
  from letta.schemas.letta_base import LettaBase
@@ -95,6 +95,13 @@ class BlockCreate(BaseBlock):
95
95
  label: str = Field(..., description="Label of the block.")
96
96
 
97
97
 
98
+ class BlockLabelUpdate(BaseModel):
99
+ """Update the label of a block"""
100
+
101
+ current_label: str = Field(..., description="Current label of the block.")
102
+ new_label: str = Field(..., description="New label of the block.")
103
+
104
+
98
105
  class CreatePersona(BlockCreate):
99
106
  """Create a persona block"""
100
107
 
@@ -117,6 +124,13 @@ class BlockUpdate(BaseBlock):
117
124
  extra = "ignore" # Ignores extra fields
118
125
 
119
126
 
127
+ class BlockLimitUpdate(BaseModel):
128
+ """Update the limit of a block"""
129
+
130
+ label: str = Field(..., description="Label of the block.")
131
+ limit: int = Field(..., description="New limit of the block.")
132
+
133
+
120
134
  class UpdatePersona(BlockUpdate):
121
135
  """Update a persona block"""
122
136
 
@@ -0,0 +1,32 @@
1
+ from datetime import datetime
2
+ from typing import Optional
3
+
4
+ from pydantic import Field
5
+
6
+ from letta.schemas.letta_base import LettaBase
7
+
8
+
9
+ class BlocksAgentsBase(LettaBase):
10
+ __id_prefix__ = "blocks_agents"
11
+
12
+
13
+ class BlocksAgents(BlocksAgentsBase):
14
+ """
15
+ Schema representing the relationship between blocks and agents.
16
+
17
+ Parameters:
18
+ agent_id (str): The ID of the associated agent.
19
+ block_id (str): The ID of the associated block.
20
+ block_label (str): The label of the block.
21
+ created_at (datetime): The date this relationship was created.
22
+ updated_at (datetime): The date this relationship was last updated.
23
+ is_deleted (bool): Whether this block-agent relationship is deleted or not.
24
+ """
25
+
26
+ id: str = BlocksAgentsBase.generate_id_field()
27
+ agent_id: str = Field(..., description="The ID of the associated agent.")
28
+ block_id: str = Field(..., description="The ID of the associated block.")
29
+ block_label: str = Field(..., description="The label of the block.")
30
+ created_at: Optional[datetime] = Field(None, description="The creation date of the association.")
31
+ updated_at: Optional[datetime] = Field(None, description="The update date of the association.")
32
+ is_deleted: bool = Field(False, description="Whether this block-agent relationship is deleted or not.")
@@ -1,4 +1,5 @@
1
1
  import uuid
2
+ from datetime import datetime
2
3
  from logging import getLogger
3
4
  from typing import Optional
4
5
  from uuid import UUID
@@ -80,3 +81,11 @@ class LettaBase(BaseModel):
80
81
  logger.warning(f"Bare UUIDs are deprecated, please use the full prefixed id ({cls.__id_prefix__})!")
81
82
  return f"{cls.__id_prefix__}-{v}"
82
83
  return v
84
+
85
+
86
+ class OrmMetadataBase(LettaBase):
87
+ # metadata fields
88
+ created_by_id: Optional[str] = Field(None, description="The id of the user that made this object.")
89
+ last_updated_by_id: Optional[str] = Field(None, description="The id of the user that made this object.")
90
+ created_at: Optional[datetime] = Field(None, description="The timestamp when the object was created.")
91
+ updated_at: Optional[datetime] = Field(None, description="The timestamp when the object was last updated.")