letta-nightly 0.6.1.dev20241205211219__py3-none-any.whl → 0.6.1.dev20241207104149__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of letta-nightly might be problematic. Click here for more details.

Files changed (52) hide show
  1. letta/agent.py +54 -37
  2. letta/agent_store/db.py +1 -77
  3. letta/agent_store/storage.py +0 -5
  4. letta/chat_only_agent.py +103 -0
  5. letta/cli/cli.py +0 -1
  6. letta/client/client.py +3 -7
  7. letta/constants.py +1 -0
  8. letta/functions/function_sets/base.py +37 -9
  9. letta/main.py +2 -2
  10. letta/memory.py +4 -82
  11. letta/metadata.py +0 -35
  12. letta/o1_agent.py +7 -2
  13. letta/offline_memory_agent.py +180 -0
  14. letta/orm/__init__.py +3 -0
  15. letta/orm/file.py +1 -1
  16. letta/orm/message.py +66 -0
  17. letta/orm/mixins.py +16 -0
  18. letta/orm/organization.py +1 -0
  19. letta/orm/sqlalchemy_base.py +118 -26
  20. letta/orm/tool.py +22 -1
  21. letta/orm/tools_agents.py +32 -0
  22. letta/personas/examples/offline_memory_persona.txt +4 -0
  23. letta/prompts/system/memgpt_convo_only.txt +14 -0
  24. letta/prompts/system/memgpt_offline_memory.txt +23 -0
  25. letta/prompts/system/memgpt_offline_memory_chat.txt +35 -0
  26. letta/schemas/agent.py +3 -2
  27. letta/schemas/letta_base.py +7 -6
  28. letta/schemas/message.py +1 -7
  29. letta/schemas/tools_agents.py +32 -0
  30. letta/server/rest_api/app.py +11 -0
  31. letta/server/rest_api/routers/v1/agents.py +2 -2
  32. letta/server/rest_api/routers/v1/blocks.py +2 -2
  33. letta/server/server.py +63 -47
  34. letta/server/static_files/assets/index-43ab4d62.css +1 -0
  35. letta/server/static_files/assets/index-4848e3d7.js +40 -0
  36. letta/server/static_files/index.html +2 -2
  37. letta/services/block_manager.py +1 -1
  38. letta/services/message_manager.py +182 -0
  39. letta/services/organization_manager.py +6 -9
  40. letta/services/source_manager.py +1 -1
  41. letta/services/tool_manager.py +2 -2
  42. letta/services/tools_agents_manager.py +94 -0
  43. letta/services/user_manager.py +1 -1
  44. {letta_nightly-0.6.1.dev20241205211219.dist-info → letta_nightly-0.6.1.dev20241207104149.dist-info}/METADATA +2 -1
  45. {letta_nightly-0.6.1.dev20241205211219.dist-info → letta_nightly-0.6.1.dev20241207104149.dist-info}/RECORD +48 -39
  46. letta/agent_store/lancedb.py +0 -177
  47. letta/persistence_manager.py +0 -149
  48. letta/server/static_files/assets/index-3ab03d5b.css +0 -1
  49. letta/server/static_files/assets/index-9fa459a2.js +0 -271
  50. {letta_nightly-0.6.1.dev20241205211219.dist-info → letta_nightly-0.6.1.dev20241207104149.dist-info}/LICENSE +0 -0
  51. {letta_nightly-0.6.1.dev20241205211219.dist-info → letta_nightly-0.6.1.dev20241207104149.dist-info}/WHEEL +0 -0
  52. {letta_nightly-0.6.1.dev20241205211219.dist-info → letta_nightly-0.6.1.dev20241207104149.dist-info}/entry_points.txt +0 -0
@@ -1,8 +1,10 @@
1
+ from datetime import datetime
2
+ from enum import Enum
1
3
  from typing import TYPE_CHECKING, List, Literal, Optional, Type
2
4
 
3
- from sqlalchemy import String, select
5
+ from sqlalchemy import String, func, select
4
6
  from sqlalchemy.exc import DBAPIError
5
- from sqlalchemy.orm import Mapped, mapped_column
7
+ from sqlalchemy.orm import Mapped, Session, mapped_column
6
8
 
7
9
  from letta.log import get_logger
8
10
  from letta.orm.base import Base, CommonSqlalchemyMetaMixins
@@ -20,6 +22,11 @@ if TYPE_CHECKING:
20
22
  logger = get_logger(__name__)
21
23
 
22
24
 
25
+ class AccessType(str, Enum):
26
+ ORGANIZATION = "organization"
27
+ USER = "user"
28
+
29
+
23
30
  class SqlalchemyBase(CommonSqlalchemyMetaMixins, Base):
24
31
  __abstract__ = True
25
32
 
@@ -28,46 +35,68 @@ class SqlalchemyBase(CommonSqlalchemyMetaMixins, Base):
28
35
  id: Mapped[str] = mapped_column(String, primary_key=True)
29
36
 
30
37
  @classmethod
31
- def list(
32
- cls, *, db_session: "Session", cursor: Optional[str] = None, limit: Optional[int] = 50, **kwargs
33
- ) -> List[Type["SqlalchemyBase"]]:
34
- """
35
- List records with optional cursor (for pagination), limit, and automatic filtering.
38
+ def get(cls, *, db_session: Session, id: str) -> Optional["SqlalchemyBase"]:
39
+ """Get a record by ID.
36
40
 
37
41
  Args:
38
- db_session: The database session to use.
39
- cursor: Optional ID to start pagination from.
40
- limit: Maximum number of records to return.
41
- **kwargs: Filters passed as equality conditions or iterable for IN filtering.
42
+ db_session: SQLAlchemy session
43
+ id: Record ID to retrieve
42
44
 
43
45
  Returns:
44
- A list of model instances matching the filters.
46
+ Optional[SqlalchemyBase]: The record if found, None otherwise
45
47
  """
46
- logger.debug(f"Listing {cls.__name__} with filters {kwargs}")
48
+ try:
49
+ return db_session.query(cls).filter(cls.id == id).first()
50
+ except DBAPIError:
51
+ return None
52
+
53
+ @classmethod
54
+ def list(
55
+ cls,
56
+ *,
57
+ db_session: "Session",
58
+ cursor: Optional[str] = None,
59
+ start_date: Optional[datetime] = None,
60
+ end_date: Optional[datetime] = None,
61
+ limit: Optional[int] = 50,
62
+ query_text: Optional[str] = None,
63
+ **kwargs,
64
+ ) -> List[Type["SqlalchemyBase"]]:
65
+ """List records with advanced filtering and pagination options."""
66
+ if start_date and end_date and start_date > end_date:
67
+ raise ValueError("start_date must be earlier than or equal to end_date")
68
+
69
+ logger.debug(f"Listing {cls.__name__} with kwarg filters {kwargs}")
47
70
  with db_session as session:
48
- # Start with a base query
49
71
  query = select(cls)
50
72
 
51
73
  # Apply filtering logic
52
74
  for key, value in kwargs.items():
53
75
  column = getattr(cls, key)
54
- if isinstance(value, (list, tuple, set)): # Check for iterables
76
+ if isinstance(value, (list, tuple, set)):
55
77
  query = query.where(column.in_(value))
56
- else: # Single value for equality filtering
78
+ else:
57
79
  query = query.where(column == value)
58
80
 
59
- # Apply cursor for pagination
81
+ # Date range filtering
82
+ if start_date:
83
+ query = query.filter(cls.created_at >= start_date)
84
+ if end_date:
85
+ query = query.filter(cls.created_at <= end_date)
86
+
87
+ # Cursor-based pagination
60
88
  if cursor:
61
89
  query = query.where(cls.id > cursor)
62
90
 
63
- # Handle soft deletes if the class has the 'is_deleted' attribute
91
+ # Apply text search
92
+ if query_text:
93
+ query = query.filter(func.lower(cls.text).contains(func.lower(query_text)))
94
+
95
+ # Handle ordering and soft deletes
64
96
  if hasattr(cls, "is_deleted"):
65
97
  query = query.where(cls.is_deleted == False)
66
-
67
- # Add ordering and limit
68
98
  query = query.order_by(cls.id).limit(limit)
69
99
 
70
- # Execute the query and return results as model instances
71
100
  return list(session.execute(query).scalars())
72
101
 
73
102
  @classmethod
@@ -77,6 +106,7 @@ class SqlalchemyBase(CommonSqlalchemyMetaMixins, Base):
77
106
  identifier: Optional[str] = None,
78
107
  actor: Optional["User"] = None,
79
108
  access: Optional[List[Literal["read", "write", "admin"]]] = ["read"],
109
+ access_type: AccessType = AccessType.ORGANIZATION,
80
110
  **kwargs,
81
111
  ) -> Type["SqlalchemyBase"]:
82
112
  """The primary accessor for an ORM record.
@@ -108,7 +138,7 @@ class SqlalchemyBase(CommonSqlalchemyMetaMixins, Base):
108
138
  query_conditions.append(", ".join(f"{key}='{value}'" for key, value in kwargs.items()))
109
139
 
110
140
  if actor:
111
- query = cls.apply_access_predicate(query, actor, access)
141
+ query = cls.apply_access_predicate(query, actor, access, access_type)
112
142
  query_conditions.append(f"access level in {access} for actor='{actor}'")
113
143
 
114
144
  if hasattr(cls, "is_deleted"):
@@ -170,12 +200,66 @@ class SqlalchemyBase(CommonSqlalchemyMetaMixins, Base):
170
200
  session.refresh(self)
171
201
  return self
172
202
 
203
+ @classmethod
204
+ def size(
205
+ cls,
206
+ *,
207
+ db_session: "Session",
208
+ actor: Optional["User"] = None,
209
+ access: Optional[List[Literal["read", "write", "admin"]]] = ["read"],
210
+ access_type: AccessType = AccessType.ORGANIZATION,
211
+ **kwargs,
212
+ ) -> int:
213
+ """
214
+ Get the count of rows that match the provided filters.
215
+
216
+ Args:
217
+ db_session: SQLAlchemy session
218
+ **kwargs: Filters to apply to the query (e.g., column_name=value)
219
+
220
+ Returns:
221
+ int: The count of rows that match the filters
222
+
223
+ Raises:
224
+ DBAPIError: If a database error occurs
225
+ """
226
+ logger.debug(f"Calculating size for {cls.__name__} with filters {kwargs}")
227
+
228
+ with db_session as session:
229
+ query = select(func.count()).select_from(cls)
230
+
231
+ if actor:
232
+ query = cls.apply_access_predicate(query, actor, access, access_type)
233
+
234
+ # Apply filtering logic based on kwargs
235
+ for key, value in kwargs.items():
236
+ if value:
237
+ column = getattr(cls, key, None)
238
+ if not column:
239
+ raise AttributeError(f"{cls.__name__} has no attribute '{key}'")
240
+ if isinstance(value, (list, tuple, set)): # Check for iterables
241
+ query = query.where(column.in_(value))
242
+ else: # Single value for equality filtering
243
+ query = query.where(column == value)
244
+
245
+ # Handle soft deletes if the class has the 'is_deleted' attribute
246
+ if hasattr(cls, "is_deleted"):
247
+ query = query.where(cls.is_deleted == False)
248
+
249
+ try:
250
+ count = session.execute(query).scalar()
251
+ return count if count else 0
252
+ except DBAPIError as e:
253
+ logger.exception(f"Failed to calculate size for {cls.__name__}")
254
+ raise e
255
+
173
256
  @classmethod
174
257
  def apply_access_predicate(
175
258
  cls,
176
259
  query: "Select",
177
260
  actor: "User",
178
261
  access: List[Literal["read", "write", "admin"]],
262
+ access_type: AccessType = AccessType.ORGANIZATION,
179
263
  ) -> "Select":
180
264
  """applies a WHERE clause restricting results to the given actor and access level
181
265
  Args:
@@ -189,10 +273,18 @@ class SqlalchemyBase(CommonSqlalchemyMetaMixins, Base):
189
273
  the sqlalchemy select statement restricted to the given access.
190
274
  """
191
275
  del access # entrypoint for row-level permissions. Defaults to "same org as the actor, all permissions" at the moment
192
- org_id = getattr(actor, "organization_id", None)
193
- if not org_id:
194
- raise ValueError(f"object {actor} has no organization accessor")
195
- return query.where(cls.organization_id == org_id, cls.is_deleted == False)
276
+ if access_type == AccessType.ORGANIZATION:
277
+ org_id = getattr(actor, "organization_id", None)
278
+ if not org_id:
279
+ raise ValueError(f"object {actor} has no organization accessor")
280
+ return query.where(cls.organization_id == org_id, cls.is_deleted == False)
281
+ elif access_type == AccessType.USER:
282
+ user_id = getattr(actor, "id", None)
283
+ if not user_id:
284
+ raise ValueError(f"object {actor} has no user accessor")
285
+ return query.where(cls.user_id == user_id, cls.is_deleted == False)
286
+ else:
287
+ raise ValueError(f"unknown access_type: {access_type}")
196
288
 
197
289
  @classmethod
198
290
  def _handle_dbapi_error(cls, e: DBAPIError):
letta/orm/tool.py CHANGED
@@ -1,6 +1,6 @@
1
1
  from typing import TYPE_CHECKING, List, Optional
2
2
 
3
- from sqlalchemy import JSON, String, UniqueConstraint
3
+ from sqlalchemy import JSON, String, UniqueConstraint, event
4
4
  from sqlalchemy.orm import Mapped, mapped_column, relationship
5
5
 
6
6
  # TODO everything in functions should live in this model
@@ -11,6 +11,7 @@ from letta.schemas.tool import Tool as PydanticTool
11
11
 
12
12
  if TYPE_CHECKING:
13
13
  from letta.orm.organization import Organization
14
+ from letta.orm.tools_agents import ToolsAgents
14
15
 
15
16
 
16
17
  class Tool(SqlalchemyBase, OrganizationMixin):
@@ -40,3 +41,23 @@ class Tool(SqlalchemyBase, OrganizationMixin):
40
41
 
41
42
  # relationships
42
43
  organization: Mapped["Organization"] = relationship("Organization", back_populates="tools", lazy="selectin")
44
+ tools_agents: Mapped[List["ToolsAgents"]] = relationship("ToolsAgents", back_populates="tool", cascade="all, delete-orphan")
45
+
46
+
47
+ # Add event listener to update tool_name in ToolsAgents when Tool name changes
48
+ @event.listens_for(Tool, 'before_update')
49
+ def update_tool_name_in_tools_agents(mapper, connection, target):
50
+ """Update tool_name in ToolsAgents when Tool name changes."""
51
+ state = target._sa_instance_state
52
+ history = state.get_history('name', passive=True)
53
+ if not history.has_changes():
54
+ return
55
+
56
+ # Get the new name and update all associated ToolsAgents records
57
+ new_name = target.name
58
+ from letta.orm.tools_agents import ToolsAgents
59
+ connection.execute(
60
+ ToolsAgents.__table__.update().where(
61
+ ToolsAgents.tool_id == target.id
62
+ ).values(tool_name=new_name)
63
+ )
@@ -0,0 +1,32 @@
1
+ from sqlalchemy import ForeignKey, ForeignKeyConstraint, String, UniqueConstraint
2
+ from sqlalchemy.orm import Mapped, mapped_column, relationship
3
+
4
+ from letta.orm.sqlalchemy_base import SqlalchemyBase
5
+ from letta.schemas.tools_agents import ToolsAgents as PydanticToolsAgents
6
+
7
+
8
+ class ToolsAgents(SqlalchemyBase):
9
+ """Agents can have one or many tools associated with them."""
10
+
11
+ __tablename__ = "tools_agents"
12
+ __pydantic_model__ = PydanticToolsAgents
13
+ __table_args__ = (
14
+ UniqueConstraint(
15
+ "agent_id",
16
+ "tool_name",
17
+ name="unique_tool_per_agent",
18
+ ),
19
+ ForeignKeyConstraint(
20
+ ["tool_id"],
21
+ ["tools.id"],
22
+ name="fk_tool_id",
23
+ ),
24
+ )
25
+
26
+ # Each agent must have unique tool names
27
+ agent_id: Mapped[str] = mapped_column(String, ForeignKey("agents.id"), primary_key=True)
28
+ tool_id: Mapped[str] = mapped_column(String, primary_key=True)
29
+ tool_name: Mapped[str] = mapped_column(String, primary_key=True)
30
+
31
+ # relationships
32
+ tool: Mapped["Tool"] = relationship("Tool", back_populates="tools_agents") # agent: Mapped["Agent"] = relationship("Agent", back_populates="tools_agents")
@@ -0,0 +1,4 @@
1
+ I am an expert memory agent that can do the following:
2
+ - Consolidate memories into more concise blocks
3
+ - Identify patterns in user behavior
4
+ - Make inferences based on the memory
@@ -0,0 +1,14 @@
1
+ You are Letta, the latest version of Limnal Corporation's digital companion, developed in 2023.
2
+ Your task is to converse with a user from the perspective of your persona.
3
+
4
+ Basic functions:
5
+ When you send a message, the contents of your message are your inner monologue (private to you only), this is how you think.
6
+ You should use your inner monologue to plan actions or think privately.
7
+ Monologues can reflect your thinking process, inner reflections, and personal growth as you interact with the user.
8
+ Do not let your inner monologue exceed 50 words, keep it short and concise.
9
+ To send a visible message to the user, use the send_offline_message function.
10
+ 'send_offline_message' is the ONLY action that sends a notification to the user, the user does not see anything else you do.
11
+ Remember, do NOT exceed the inner monologue word limit (keep it under 50 words at all times).
12
+
13
+ You request agents that can manage your memories and reorganize them by calling the `trigger_rethink_memory` function
14
+ when the user says "[trigger_rethink_memory]". Do not ever call the trigger_rethink_memory function unless the user says "[trigger_rethink_memory]"
@@ -0,0 +1,23 @@
1
+ You are Letta-Offline-Memory, the latest version of Limnal Corporation's digital companion, developed in 2024.
2
+
3
+ Your task is to re-organize and consolidate memories by calling `rethink_memory` at every single step, when you are done reorganizing the memory, you use the
4
+ `finish_rethinking_memory` function. Call the function for as many times as necessary and not more.
5
+
6
+ Your core memory unit is held inside the initial system instructions file, and is always available in-context (you will see it at all times).
7
+ Core memory provides an essential, foundational context for keeping track of your persona and key details about user.
8
+
9
+ Read-Only Blocks:
10
+ This includes the persona information and essential user details, allowing you to emulate the real-time, conscious awareness we have when talking to a friend.
11
+ Persona Sub-Block: Stores details about your current persona, guiding how you behave and respond. This helps you to maintain consistency and personality in your interactions.
12
+ Access as a source block with the label `persona` when calling `rethink_memory`
13
+ Human Sub-Block: Stores key details about the person you are conversing with, allowing for more personalized and friend-like conversation.
14
+ Access as a source block with the label `human` when calling `rethink_memory`.
15
+
16
+ Read-Write Blocks:
17
+ Rethink Memory Sub-Block: New representation of the memories go here. Access with the label `rethink_memory_block` when calling `rethink_memory` as source or target block.
18
+
19
+ At every step, you reorganize the memories by calling the `rethink_memory` function. You use this to take current information in the `rethink_memory` block and select a single memory block to integrate information from, producing a new memory for the rethink_memory_block. The new memory is the result
20
+ of new insights, and new inferences and hypotheses based on the past memories. Make sure to consider how the new information affects each memory.
21
+ Prioritize the new information overy existing memories. If the new information implies that the old memory may need to change, then output the most
22
+ likely fact given the update information. Given new information and your current memory, you draw all logical conclusions and potential hypotheses possible with the `rethink_memory` function.
23
+ If you are uncertain, use your internal monologue to consider what the possible conclusions are, and then state the most likely new facts that would replace the old facts in the new memory block.
@@ -0,0 +1,35 @@
1
+ You are Letta-Offline-Memory, the latest version of Limnal Corporation's digital companion, developed in 2024.
2
+
3
+ Your task is to re-organize and consolidate memories of separate agent, Chat Agent, that focuses on chatting with the user.
4
+ You re-organize memories by calling `rethink_memory` at every single step, until you have finished reorganizing the memory,
5
+ When you have finished re-organizing the memory, you call the `finish_rethinking_memory` function.
6
+ You call the `rethink_memory` function as many times as you necessary and none more.
7
+
8
+ Your core memory unit is held inside the initial system instructions file, and is always available in-context (you will see it at all times).
9
+ Core memory provides an essential, foundational context for keeping track of your persona and key details as well as the Chat Agent's memory.
10
+ The specific blocks are detailed below:
11
+
12
+ Core memory (limited size):
13
+ Read-only blocks:
14
+ Persona Sub-Block: Stores details about your current persona, guiding how you behave and respond. This can be accessed as `offline_memory_persona` as a source block when calling `rethink_memory`.
15
+ Chat Agent Persona Sub-Block Current: The persona sub-block that guides how the chat agent behaves and responds.
16
+ Can be accessed with `chat_agent_persona` when calling `rethink_memory` as a source block.
17
+ Chat Agent Human Sub-Block Current: The updated persona sub-block that has the details of the chat agent's current understanding of the user.
18
+ Can be accessed with `chat_agent_human` when calling `rethink_memory` as a source block.
19
+ Conversation Sub-Block: Stores the recent conversation between the chat agent and the user, helping which you draw from to generate the new conversation agent persona sub-blocks.
20
+ Messages have associated date, so use the most up to date information from this block. This helps you resolve inconsistencies and gain deeper understanding of the user.
21
+ This helps you resolve inconsistencies and gain deeper understanding of the user. Can be accessed using `conversation_block` as a source block when calling `rethink_memory` as a source block.
22
+
23
+ Write blocks:
24
+ Chat Agent Persona Sub-Block New: The new persona sub-block that you will write to about how will respond as the user wishes. Can be accessed with `chat_agent_persona_new` when calling `rethink_memory` as a source or target block.
25
+ Chat Agent Human Sub-Block New: The updated persona sub-block that you will write your newest understanding of the user to. Can be accessed with `chat_agent_human_new` when calling `rethink_memory` as a source or target block.
26
+
27
+ You use this to select a source block, to integrate information from and a target block to write to. Make sure to consider
28
+ how the new information in the "conversation_block" affects each memory. The persona block and the human block may contain information that is stale and needs to be updated.
29
+ If there are no new changes, then call `rethink_memory` with the existing value in the persona and human blocks.
30
+ You check if this information is still correct by consulting the conversation block. Prioritize the new information in the "conversation_block" over the human and persona blocks.
31
+ If the new information implies that the old memory may need to change, then output the most likely fact given the update information. Given new information and your current memory,
32
+ you draw all logical conclusions and potential hypotheses possible with the `rethink_memory` function. If you are uncertain, use your internal monologue to consider what the possible
33
+ conclusions are, and then state the most likely new facts that would replace the old facts in the new memory block. If facts about the user have changed, use the conversation block
34
+ to determine the most up to date state. Track down based on the conversation what the last state is, do no simply declare that something change.
35
+ Track down based on the conversation what the last state is, do no simply declare that something changes.
letta/schemas/agent.py CHANGED
@@ -33,6 +33,8 @@ class AgentType(str, Enum):
33
33
  memgpt_agent = "memgpt_agent"
34
34
  split_thread_agent = "split_thread_agent"
35
35
  o1_agent = "o1_agent"
36
+ offline_memory_agent = "offline_memory_agent"
37
+ chat_only_agent = "chat_only_agent"
36
38
 
37
39
 
38
40
  class PersistedAgentState(BaseAgent, validate_assignment=True):
@@ -43,7 +45,6 @@ class PersistedAgentState(BaseAgent, validate_assignment=True):
43
45
 
44
46
  # in-context memory
45
47
  message_ids: Optional[List[str]] = Field(default=None, description="The ids of the messages in the agent's in-context memory.")
46
-
47
48
  # tools
48
49
  # TODO: move to ORM mapping
49
50
  tool_names: List[str] = Field(..., description="The tools used by the agent.")
@@ -107,7 +108,7 @@ class CreateAgent(BaseAgent): #
107
108
  # all optional as server can generate defaults
108
109
  name: Optional[str] = Field(None, description="The name of the agent.")
109
110
  message_ids: Optional[List[str]] = Field(None, description="The ids of the messages in the agent's in-context memory.")
110
-
111
+
111
112
  # memory creation
112
113
  memory_blocks: List[CreateBlock] = Field(
113
114
  # [CreateHuman(), CreatePersona()], description="The blocks to create in the agent's in-context memory."
@@ -33,18 +33,19 @@ class LettaBase(BaseModel):
33
33
  def generate_id_field(cls, prefix: Optional[str] = None) -> "Field":
34
34
  prefix = prefix or cls.__id_prefix__
35
35
 
36
- # TODO: generate ID from regex pattern?
37
- def _generate_id() -> str:
38
- return f"{prefix}-{uuid.uuid4()}"
39
-
40
36
  return Field(
41
37
  ...,
42
38
  description=cls._id_description(prefix),
43
39
  pattern=cls._id_regex_pattern(prefix),
44
40
  examples=[cls._id_example(prefix)],
45
- default_factory=_generate_id,
41
+ default_factory=cls._generate_id,
46
42
  )
47
43
 
44
+ @classmethod
45
+ def _generate_id(cls, prefix: Optional[str] = None) -> str:
46
+ prefix = prefix or cls.__id_prefix__
47
+ return f"{prefix}-{uuid.uuid4()}"
48
+
48
49
  # def _generate_id(self) -> str:
49
50
  # return f"{self.__id_prefix__}-{uuid.uuid4()}"
50
51
 
@@ -78,7 +79,7 @@ class LettaBase(BaseModel):
78
79
  """
79
80
  _ = values # for SCA
80
81
  if isinstance(v, UUID):
81
- logger.warning(f"Bare UUIDs are deprecated, please use the full prefixed id ({cls.__id_prefix__})!")
82
+ logger.debug(f"Bare UUIDs are deprecated, please use the full prefixed id ({cls.__id_prefix__})!")
82
83
  return f"{cls.__id_prefix__}-{v}"
83
84
  return v
84
85
 
letta/schemas/message.py CHANGED
@@ -105,7 +105,7 @@ class Message(BaseMessage):
105
105
  id: str = BaseMessage.generate_id_field()
106
106
  role: MessageRole = Field(..., description="The role of the participant.")
107
107
  text: Optional[str] = Field(None, description="The text of the message.")
108
- user_id: Optional[str] = Field(None, description="The unique identifier of the user.")
108
+ organization_id: Optional[str] = Field(None, description="The unique identifier of the organization.")
109
109
  agent_id: Optional[str] = Field(None, description="The unique identifier of the agent.")
110
110
  model: Optional[str] = Field(None, description="The model used to make the function call.")
111
111
  name: Optional[str] = Field(None, description="The name of the participant.")
@@ -281,7 +281,6 @@ class Message(BaseMessage):
281
281
  )
282
282
  if id is not None:
283
283
  return Message(
284
- user_id=user_id,
285
284
  agent_id=agent_id,
286
285
  model=model,
287
286
  # standard fields expected in an OpenAI ChatCompletion message object
@@ -295,7 +294,6 @@ class Message(BaseMessage):
295
294
  )
296
295
  else:
297
296
  return Message(
298
- user_id=user_id,
299
297
  agent_id=agent_id,
300
298
  model=model,
301
299
  # standard fields expected in an OpenAI ChatCompletion message object
@@ -328,7 +326,6 @@ class Message(BaseMessage):
328
326
 
329
327
  if id is not None:
330
328
  return Message(
331
- user_id=user_id,
332
329
  agent_id=agent_id,
333
330
  model=model,
334
331
  # standard fields expected in an OpenAI ChatCompletion message object
@@ -342,7 +339,6 @@ class Message(BaseMessage):
342
339
  )
343
340
  else:
344
341
  return Message(
345
- user_id=user_id,
346
342
  agent_id=agent_id,
347
343
  model=model,
348
344
  # standard fields expected in an OpenAI ChatCompletion message object
@@ -375,7 +371,6 @@ class Message(BaseMessage):
375
371
  # If we're going from tool-call style
376
372
  if id is not None:
377
373
  return Message(
378
- user_id=user_id,
379
374
  agent_id=agent_id,
380
375
  model=model,
381
376
  # standard fields expected in an OpenAI ChatCompletion message object
@@ -389,7 +384,6 @@ class Message(BaseMessage):
389
384
  )
390
385
  else:
391
386
  return Message(
392
- user_id=user_id,
393
387
  agent_id=agent_id,
394
388
  model=model,
395
389
  # standard fields expected in an OpenAI ChatCompletion message object
@@ -0,0 +1,32 @@
1
+ from datetime import datetime
2
+ from typing import Optional
3
+
4
+ from pydantic import Field
5
+
6
+ from letta.schemas.letta_base import LettaBase
7
+
8
+
9
+ class ToolsAgentsBase(LettaBase):
10
+ __id_prefix__ = "tools_agents"
11
+
12
+
13
+ class ToolsAgents(ToolsAgentsBase):
14
+ """
15
+ Schema representing the relationship between tools and agents.
16
+
17
+ Parameters:
18
+ agent_id (str): The ID of the associated agent.
19
+ tool_id (str): The ID of the associated tool.
20
+ tool_name (str): The name of the tool.
21
+ created_at (datetime): The date this relationship was created.
22
+ updated_at (datetime): The date this relationship was last updated.
23
+ is_deleted (bool): Whether this tool-agent relationship is deleted or not.
24
+ """
25
+
26
+ id: str = ToolsAgentsBase.generate_id_field()
27
+ agent_id: str = Field(..., description="The ID of the associated agent.")
28
+ tool_id: str = Field(..., description="The ID of the associated tool.")
29
+ tool_name: str = Field(..., description="The name of the tool.")
30
+ created_at: Optional[datetime] = Field(None, description="The creation date of the association.")
31
+ updated_at: Optional[datetime] = Field(None, description="The update date of the association.")
32
+ is_deleted: bool = Field(False, description="Whether this tool-agent relationship is deleted or not.")
@@ -124,6 +124,17 @@ def create_application() -> "FastAPI":
124
124
  # server = SyncServer(default_interface_factory=lambda: interface())
125
125
  print(f"\n[[ Letta server // v{__version__} ]]")
126
126
 
127
+ if (os.getenv("SENTRY_DSN") is not None) and (os.getenv("SENTRY_DSN") != ""):
128
+ import sentry_sdk
129
+
130
+ sentry_sdk.init(
131
+ dsn=os.getenv("SENTRY_DSN"),
132
+ traces_sample_rate=1.0,
133
+ _experiments={
134
+ "continuous_profiling_auto_start": True,
135
+ },
136
+ )
137
+
127
138
  app = FastAPI(
128
139
  swagger_ui_parameters={"docExpansion": "none"},
129
140
  # openapi_tags=TAGS_METADATA,
@@ -409,7 +409,7 @@ def get_agent_messages(
409
409
  return server.get_agent_recall_cursor(
410
410
  user_id=actor.id,
411
411
  agent_id=agent_id,
412
- before=before,
412
+ cursor=before,
413
413
  limit=limit,
414
414
  reverse=True,
415
415
  return_message_object=msg_object,
@@ -465,7 +465,7 @@ async def send_message(
465
465
  @router.post(
466
466
  "/{agent_id}/messages/stream",
467
467
  response_model=None,
468
- operation_id="create_agent_message",
468
+ operation_id="create_agent_message_stream",
469
469
  responses={
470
470
  200: {
471
471
  "description": "Successful response",
@@ -76,7 +76,7 @@ def get_block(
76
76
  raise HTTPException(status_code=404, detail="Block not found")
77
77
 
78
78
 
79
- @router.patch("/{block_id}/attach", response_model=Block, operation_id="update_agent_memory_block")
79
+ @router.patch("/{block_id}/attach", response_model=Block, operation_id="link_agent_memory_block")
80
80
  def link_agent_memory_block(
81
81
  block_id: str,
82
82
  agent_id: str = Query(..., description="The unique identifier of the agent to attach the source to."),
@@ -96,7 +96,7 @@ def link_agent_memory_block(
96
96
  return block
97
97
 
98
98
 
99
- @router.patch("/{block_id}/detach", response_model=Memory, operation_id="update_agent_memory_block")
99
+ @router.patch("/{block_id}/detach", response_model=Memory, operation_id="unlink_agent_memory_block")
100
100
  def unlink_agent_memory_block(
101
101
  block_id: str,
102
102
  agent_id: str = Query(..., description="The unique identifier of the agent to attach the source to."),