letta-nightly 0.5.1.dev20241030104135__py3-none-any.whl → 0.5.1.dev20241101104122__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of letta-nightly might be problematic. Click here for more details.

Files changed (35) hide show
  1. letta/agent.py +41 -2
  2. letta/client/client.py +98 -23
  3. letta/client/streaming.py +3 -0
  4. letta/constants.py +3 -0
  5. letta/functions/functions.py +4 -5
  6. letta/functions/schema_generator.py +4 -3
  7. letta/helpers/__init__.py +1 -0
  8. letta/helpers/tool_rule_solver.py +115 -0
  9. letta/llm_api/helpers.py +3 -1
  10. letta/llm_api/llm_api_tools.py +1 -2
  11. letta/llm_api/openai.py +5 -0
  12. letta/metadata.py +43 -1
  13. letta/orm/__init__.py +4 -0
  14. letta/orm/tool.py +0 -3
  15. letta/schemas/agent.py +5 -5
  16. letta/schemas/letta_response.py +3 -3
  17. letta/schemas/tool.py +4 -6
  18. letta/schemas/tool_rule.py +25 -0
  19. letta/server/rest_api/app.py +5 -3
  20. letta/server/rest_api/routers/v1/agents.py +16 -3
  21. letta/server/rest_api/routers/v1/organizations.py +2 -2
  22. letta/server/server.py +7 -43
  23. letta/server/startup.sh +3 -0
  24. letta/server/static_files/assets/{index-d6b3669a.js → index-9fa459a2.js} +66 -69
  25. letta/server/static_files/index.html +1 -1
  26. letta/services/tool_manager.py +21 -4
  27. {letta_nightly-0.5.1.dev20241030104135.dist-info → letta_nightly-0.5.1.dev20241101104122.dist-info}/METADATA +1 -1
  28. {letta_nightly-0.5.1.dev20241030104135.dist-info → letta_nightly-0.5.1.dev20241101104122.dist-info}/RECORD +31 -32
  29. letta/server/rest_api/admin/__init__.py +0 -0
  30. letta/server/rest_api/admin/agents.py +0 -21
  31. letta/server/rest_api/admin/tools.py +0 -82
  32. letta/server/rest_api/admin/users.py +0 -98
  33. {letta_nightly-0.5.1.dev20241030104135.dist-info → letta_nightly-0.5.1.dev20241101104122.dist-info}/LICENSE +0 -0
  34. {letta_nightly-0.5.1.dev20241030104135.dist-info → letta_nightly-0.5.1.dev20241101104122.dist-info}/WHEEL +0 -0
  35. {letta_nightly-0.5.1.dev20241030104135.dist-info → letta_nightly-0.5.1.dev20241101104122.dist-info}/entry_points.txt +0 -0
letta/metadata.py CHANGED
@@ -30,6 +30,12 @@ from letta.schemas.llm_config import LLMConfig
30
30
  from letta.schemas.memory import Memory
31
31
  from letta.schemas.openai.chat_completions import ToolCall, ToolCallFunction
32
32
  from letta.schemas.source import Source
33
+ from letta.schemas.tool_rule import (
34
+ BaseToolRule,
35
+ InitToolRule,
36
+ TerminalToolRule,
37
+ ToolRule,
38
+ )
33
39
  from letta.schemas.user import User
34
40
  from letta.settings import settings
35
41
  from letta.utils import enforce_types, get_utc_time, printd
@@ -196,6 +202,41 @@ def generate_api_key(prefix="sk-", length=51) -> str:
196
202
  return new_key
197
203
 
198
204
 
205
+ class ToolRulesColumn(TypeDecorator):
206
+ """Custom type for storing a list of ToolRules as JSON"""
207
+
208
+ impl = JSON
209
+ cache_ok = True
210
+
211
+ def load_dialect_impl(self, dialect):
212
+ return dialect.type_descriptor(JSON())
213
+
214
+ def process_bind_param(self, value: List[BaseToolRule], dialect):
215
+ """Convert a list of ToolRules to JSON-serializable format."""
216
+ if value:
217
+ return [rule.model_dump() for rule in value]
218
+ return value
219
+
220
+ def process_result_value(self, value, dialect) -> List[BaseToolRule]:
221
+ """Convert JSON back to a list of ToolRules."""
222
+ if value:
223
+ return [self.deserialize_tool_rule(rule_data) for rule_data in value]
224
+ return value
225
+
226
+ @staticmethod
227
+ def deserialize_tool_rule(data: dict) -> BaseToolRule:
228
+ """Deserialize a dictionary to the appropriate ToolRule subclass based on the 'type'."""
229
+ rule_type = data.get("type") # Remove 'type' field if it exists since it is a class var
230
+ if rule_type == "InitToolRule":
231
+ return InitToolRule(**data)
232
+ elif rule_type == "TerminalToolRule":
233
+ return TerminalToolRule(**data)
234
+ elif rule_type == "ToolRule":
235
+ return ToolRule(**data)
236
+ else:
237
+ raise ValueError(f"Unknown tool rule type: {rule_type}")
238
+
239
+
199
240
  class AgentModel(Base):
200
241
  """Defines data model for storing Passages (consisting of text, embedding)"""
201
242
 
@@ -212,7 +253,6 @@ class AgentModel(Base):
212
253
  message_ids = Column(JSON)
213
254
  memory = Column(JSON)
214
255
  system = Column(String)
215
- tools = Column(JSON)
216
256
 
217
257
  # configs
218
258
  agent_type = Column(String)
@@ -224,6 +264,7 @@ class AgentModel(Base):
224
264
 
225
265
  # tools
226
266
  tools = Column(JSON)
267
+ tool_rules = Column(ToolRulesColumn)
227
268
 
228
269
  Index(__tablename__ + "_idx_user", user_id),
229
270
 
@@ -241,6 +282,7 @@ class AgentModel(Base):
241
282
  memory=Memory.load(self.memory), # load dictionary
242
283
  system=self.system,
243
284
  tools=self.tools,
285
+ tool_rules=self.tool_rules,
244
286
  agent_type=self.agent_type,
245
287
  llm_config=self.llm_config,
246
288
  embedding_config=self.embedding_config,
letta/orm/__init__.py CHANGED
@@ -0,0 +1,4 @@
1
+ from letta.orm.base import Base
2
+ from letta.orm.organization import Organization
3
+ from letta.orm.tool import Tool
4
+ from letta.orm.user import User
letta/orm/tool.py CHANGED
@@ -38,8 +38,5 @@ class Tool(SqlalchemyBase, OrganizationMixin):
38
38
  String, nullable=True, doc="the module path from which this tool was derived in the codebase."
39
39
  )
40
40
 
41
- # TODO: add terminal here eventually
42
- # This was an intentional decision by Sarah
43
-
44
41
  # relationships
45
42
  organization: Mapped["Organization"] = relationship("Organization", back_populates="tools", lazy="selectin")
letta/schemas/agent.py CHANGED
@@ -11,6 +11,7 @@ from letta.schemas.llm_config import LLMConfig
11
11
  from letta.schemas.memory import Memory
12
12
  from letta.schemas.message import Message
13
13
  from letta.schemas.openai.chat_completion_response import UsageStatistics
14
+ from letta.schemas.tool_rule import BaseToolRule
14
15
 
15
16
 
16
17
  class BaseAgent(LettaBase, validate_assignment=True):
@@ -61,6 +62,9 @@ class AgentState(BaseAgent, validate_assignment=True):
61
62
  # tools
62
63
  tools: List[str] = Field(..., description="The tools used by the agent.")
63
64
 
65
+ # tool rules
66
+ tool_rules: Optional[List[BaseToolRule]] = Field(default=None, description="The list of tool rules.")
67
+
64
68
  # system prompt
65
69
  system: str = Field(..., description="The system prompt used by the agent.")
66
70
 
@@ -104,6 +108,7 @@ class CreateAgent(BaseAgent):
104
108
  message_ids: Optional[List[uuid.UUID]] = Field(None, description="The ids of the messages in the agent's in-context memory.")
105
109
  memory: Optional[Memory] = Field(None, description="The in-context memory of the agent.")
106
110
  tools: Optional[List[str]] = Field(None, description="The tools used by the agent.")
111
+ tool_rules: Optional[List[BaseToolRule]] = Field(None, description="The tool rules governing the agent.")
107
112
  system: Optional[str] = Field(None, description="The system prompt used by the agent.")
108
113
  agent_type: Optional[AgentType] = Field(None, description="The type of agent.")
109
114
  llm_config: Optional[LLMConfig] = Field(None, description="The LLM configuration used by the agent.")
@@ -156,8 +161,3 @@ class AgentStepResponse(BaseModel):
156
161
  ..., description="Whether the agent step ended because the in-context memory is near its limit."
157
162
  )
158
163
  usage: UsageStatistics = Field(..., description="Usage statistics of the LLM call during the agent's step.")
159
-
160
-
161
- class RemoveToolsFromAgent(BaseModel):
162
- agent_id: str = Field(..., description="The id of the agent.")
163
- tool_ids: Optional[List[str]] = Field(None, description="The tools to be removed from the agent.")
@@ -3,7 +3,7 @@ from typing import List, Union
3
3
  from pydantic import BaseModel, Field
4
4
 
5
5
  from letta.schemas.enums import MessageStreamStatus
6
- from letta.schemas.letta_message import LettaMessage
6
+ from letta.schemas.letta_message import LettaMessage, LettaMessageUnion
7
7
  from letta.schemas.message import Message
8
8
  from letta.schemas.usage import LettaUsageStatistics
9
9
  from letta.utils import json_dumps
@@ -21,7 +21,7 @@ class LettaResponse(BaseModel):
21
21
  usage (LettaUsageStatistics): The usage statistics
22
22
  """
23
23
 
24
- messages: Union[List[Message], List[LettaMessage]] = Field(..., description="The messages returned by the agent.")
24
+ messages: Union[List[Message], List[LettaMessageUnion]] = Field(..., description="The messages returned by the agent.")
25
25
  usage: LettaUsageStatistics = Field(..., description="The usage statistics of the agent.")
26
26
 
27
27
  def __str__(self):
@@ -36,4 +36,4 @@ class LettaResponse(BaseModel):
36
36
 
37
37
 
38
38
  # The streaming response is either [DONE], [DONE_STEP], [DONE], an error, or a LettaMessage
39
- LettaStreamingResponse = Union[LettaMessage, MessageStreamStatus]
39
+ LettaStreamingResponse = Union[LettaMessage, MessageStreamStatus, LettaUsageStatistics]
letta/schemas/tool.py CHANGED
@@ -10,7 +10,6 @@ from letta.functions.helpers import (
10
10
  from letta.functions.schema_generator import generate_schema_from_args_schema
11
11
  from letta.schemas.letta_base import LettaBase
12
12
  from letta.schemas.openai.chat_completions import ToolCall
13
- from letta.services.organization_manager import OrganizationManager
14
13
 
15
14
 
16
15
  class BaseTool(LettaBase):
@@ -69,10 +68,9 @@ class ToolCreate(LettaBase):
69
68
  json_schema: Optional[Dict] = Field(
70
69
  None, description="The JSON schema of the function (auto-generated from source_code if not provided)"
71
70
  )
72
- terminal: Optional[bool] = Field(None, description="Whether the tool is a terminal tool (allow requesting heartbeats).")
73
71
 
74
72
  @classmethod
75
- def from_composio(cls, action: "ActionType", organization_id: str = OrganizationManager.DEFAULT_ORG_ID) -> "ToolCreate":
73
+ def from_composio(cls, action: "ActionType") -> "ToolCreate":
76
74
  """
77
75
  Class method to create an instance of Letta-compatible Composio Tool.
78
76
  Check https://docs.composio.dev/introduction/intro/overview to look at options for from_composio
@@ -114,7 +112,6 @@ class ToolCreate(LettaBase):
114
112
  cls,
115
113
  langchain_tool: "LangChainBaseTool",
116
114
  additional_imports_module_attr_map: dict[str, str] = None,
117
- organization_id: str = OrganizationManager.DEFAULT_ORG_ID,
118
115
  ) -> "ToolCreate":
119
116
  """
120
117
  Class method to create an instance of Tool from a Langchain tool (must be from langchain_community.tools).
@@ -147,7 +144,6 @@ class ToolCreate(LettaBase):
147
144
  cls,
148
145
  crewai_tool: "CrewAIBaseTool",
149
146
  additional_imports_module_attr_map: dict[str, str] = None,
150
- organization_id: str = OrganizationManager.DEFAULT_ORG_ID,
151
147
  ) -> "ToolCreate":
152
148
  """
153
149
  Class method to create an instance of Tool from a crewAI BaseTool object.
@@ -212,5 +208,7 @@ class ToolUpdate(LettaBase):
212
208
  tags: Optional[List[str]] = Field(None, description="Metadata tags.")
213
209
  module: Optional[str] = Field(None, description="The source code of the function.")
214
210
  source_code: Optional[str] = Field(None, description="The source code of the function.")
215
- json_schema: Optional[Dict] = Field(None, description="The JSON schema of the function.")
216
211
  source_type: Optional[str] = Field(None, description="The type of the source code.")
212
+ json_schema: Optional[Dict] = Field(
213
+ None, description="The JSON schema of the function (auto-generated from source_code if not provided)"
214
+ )
@@ -0,0 +1,25 @@
1
+ from typing import List
2
+
3
+ from pydantic import Field
4
+
5
+ from letta.schemas.letta_base import LettaBase
6
+
7
+
8
+ class BaseToolRule(LettaBase):
9
+ __id_prefix__ = "tool_rule"
10
+ tool_name: str = Field(..., description="The name of the tool. Must exist in the database for the user's organization.")
11
+
12
+
13
+ class ToolRule(BaseToolRule):
14
+ type: str = Field("ToolRule")
15
+ children: List[str] = Field(..., description="The children tools that can be invoked.")
16
+
17
+
18
+ class InitToolRule(BaseToolRule):
19
+ type: str = Field("InitToolRule")
20
+ """Represents the initial tool rule configuration."""
21
+
22
+
23
+ class TerminalToolRule(BaseToolRule):
24
+ type: str = Field("TerminalToolRule")
25
+ """Represents a terminal tool rule configuration where if this tool gets called, it must end the agent loop."""
@@ -8,6 +8,7 @@ import uvicorn
8
8
  from fastapi import FastAPI
9
9
  from starlette.middleware.cors import CORSMiddleware
10
10
 
11
+ from letta.constants import ADMIN_PREFIX, API_PREFIX, OPENAI_API_PREFIX
11
12
  from letta.server.constants import REST_DEFAULT_PORT
12
13
 
13
14
  # NOTE(charles): these are extra routes that are not part of v1 but we still need to mount to pass tests
@@ -53,10 +54,11 @@ password = None
53
54
  # password = secrets.token_urlsafe(16)
54
55
  # #typer.secho(f"Generated admin server password for this session: {password}", fg=typer.colors.GREEN)
55
56
 
57
+ import logging
58
+
59
+ from fastapi import FastAPI
56
60
 
57
- ADMIN_PREFIX = "/v1/admin"
58
- API_PREFIX = "/v1"
59
- OPENAI_API_PREFIX = "/openai"
61
+ log = logging.getLogger("uvicorn")
60
62
 
61
63
 
62
64
  def create_application() -> "FastAPI":
@@ -4,7 +4,6 @@ from typing import Dict, List, Optional, Union
4
4
 
5
5
  from fastapi import APIRouter, Body, Depends, Header, HTTPException, Query, status
6
6
  from fastapi.responses import JSONResponse, StreamingResponse
7
- from starlette.responses import StreamingResponse
8
7
 
9
8
  from letta.constants import DEFAULT_MESSAGE_TOOL, DEFAULT_MESSAGE_TOOL_KWARG
10
9
  from letta.schemas.agent import AgentState, CreateAgent, UpdateAgentState
@@ -359,7 +358,20 @@ def update_message(
359
358
  return server.update_agent_message(agent_id=agent_id, request=request)
360
359
 
361
360
 
362
- @router.post("/{agent_id}/messages", response_model=None, operation_id="create_agent_message")
361
+ @router.post(
362
+ "/{agent_id}/messages",
363
+ response_model=None,
364
+ operation_id="create_agent_message",
365
+ responses={
366
+ 200: {
367
+ "description": "Successful response",
368
+ "content": {
369
+ "application/json": {"schema": LettaResponse.model_json_schema()}, # Use model_json_schema() instead of model directly
370
+ "text/event-stream": {"description": "Server-Sent Events stream"},
371
+ },
372
+ }
373
+ },
374
+ )
363
375
  async def send_message(
364
376
  agent_id: str,
365
377
  server: SyncServer = Depends(get_letta_server),
@@ -373,7 +385,7 @@ async def send_message(
373
385
  """
374
386
  actor = server.get_user_or_default(user_id=user_id)
375
387
 
376
- return await send_message_to_agent(
388
+ result = await send_message_to_agent(
377
389
  server=server,
378
390
  agent_id=agent_id,
379
391
  user_id=actor.id,
@@ -386,6 +398,7 @@ async def send_message(
386
398
  assistant_message_function_name=request.assistant_message_function_name,
387
399
  assistant_message_function_kwarg=request.assistant_message_function_kwarg,
388
400
  )
401
+ return result
389
402
 
390
403
 
391
404
  # TODO: move this into server.py?
@@ -22,7 +22,7 @@ def get_all_orgs(
22
22
  Get a list of all orgs in the database
23
23
  """
24
24
  try:
25
- next_cursor, orgs = server.organization_manager.list_organizations(cursor=cursor, limit=limit)
25
+ orgs = server.organization_manager.list_organizations(cursor=cursor, limit=limit)
26
26
  except HTTPException:
27
27
  raise
28
28
  except Exception as e:
@@ -38,7 +38,7 @@ def create_org(
38
38
  """
39
39
  Create a new org in the database
40
40
  """
41
- org = server.organization_manager.create_organization(request)
41
+ org = server.organization_manager.create_organization(name=request.name)
42
42
  return org
43
43
 
44
44
 
letta/server/server.py CHANGED
@@ -35,8 +35,9 @@ from letta.interface import AgentInterface # abstract
35
35
  from letta.interface import CLIInterface # for printing to terminal
36
36
  from letta.log import get_logger
37
37
  from letta.memory import get_memory_functions
38
- from letta.metadata import Base, MetadataStore
38
+ from letta.metadata import MetadataStore
39
39
  from letta.o1_agent import O1Agent
40
+ from letta.orm import Base
40
41
  from letta.orm.errors import NoResultFound
41
42
  from letta.prompts import gpt_system
42
43
  from letta.providers import (
@@ -169,6 +170,8 @@ from letta.settings import model_settings, settings, tool_settings
169
170
 
170
171
  config = LettaConfig.load()
171
172
 
173
+ attach_base()
174
+
172
175
  if settings.letta_pg_uri_no_default:
173
176
  config.recall_storage_type = "postgres"
174
177
  config.recall_storage_uri = settings.letta_pg_uri_no_default
@@ -181,13 +184,10 @@ else:
181
184
  # TODO: don't rely on config storage
182
185
  engine = create_engine("sqlite:///" + os.path.join(config.recall_storage_path, "sqlite.db"))
183
186
 
187
+ Base.metadata.create_all(bind=engine)
184
188
 
185
189
  SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
186
190
 
187
- attach_base()
188
-
189
- Base.metadata.create_all(bind=engine)
190
-
191
191
 
192
192
  # Dependency
193
193
  def get_db():
@@ -820,7 +820,7 @@ class SyncServer(Server):
820
820
  continue
821
821
  source_code = parse_source_code(func)
822
822
  # memory functions are not terminal
823
- json_schema = generate_schema(func, terminal=False, name=func_name)
823
+ json_schema = generate_schema(func, name=func_name)
824
824
  source_type = "python"
825
825
  tags = ["memory", "memgpt-base"]
826
826
  tool = self.tool_manager.create_or_update_tool(
@@ -842,6 +842,7 @@ class SyncServer(Server):
842
842
  name=request.name,
843
843
  user_id=user_id,
844
844
  tools=request.tools if request.tools else [],
845
+ tool_rules=request.tool_rules if request.tool_rules else [],
845
846
  agent_type=request.agent_type or AgentType.memgpt_agent,
846
847
  llm_config=llm_config,
847
848
  embedding_config=embedding_config,
@@ -1793,43 +1794,6 @@ class SyncServer(Server):
1793
1794
  letta_agent = self._get_or_load_agent(agent_id=agent_id)
1794
1795
  return letta_agent.update_message(request=request)
1795
1796
 
1796
- # TODO decide whether this should be done in the server.py or agent.py
1797
- # Reason to put it in agent.py:
1798
- # - we use the agent object's persistence_manager to update the message
1799
- # - it makes it easy to do things like `retry`, `rethink`, etc.
1800
- # Reason to put it in server.py:
1801
- # - fundamentally, we should be able to edit a message (without agent id)
1802
- # in the server by directly accessing the DB / message store
1803
- """
1804
- message = letta_agent.persistence_manager.recall_memory.storage.get(id=request.id)
1805
- if message is None:
1806
- raise ValueError(f"Message with id {request.id} not found")
1807
-
1808
- # Override fields
1809
- # NOTE: we try to do some sanity checking here (see asserts), but it's not foolproof
1810
- if request.role:
1811
- message.role = request.role
1812
- if request.text:
1813
- message.text = request.text
1814
- if request.name:
1815
- message.name = request.name
1816
- if request.tool_calls:
1817
- assert message.role == MessageRole.assistant, "Tool calls can only be added to assistant messages"
1818
- message.tool_calls = request.tool_calls
1819
- if request.tool_call_id:
1820
- assert message.role == MessageRole.tool, "tool_call_id can only be added to tool messages"
1821
- message.tool_call_id = request.tool_call_id
1822
-
1823
- # Save the updated message
1824
- letta_agent.persistence_manager.recall_memory.storage.update(record=message)
1825
-
1826
- # Return the updated message
1827
- updated_message = letta_agent.persistence_manager.recall_memory.storage.get(id=message.id)
1828
- if updated_message is None:
1829
- raise ValueError(f"Error persisting message - message with id {request.id} not found")
1830
- return updated_message
1831
- """
1832
-
1833
1797
  def rewrite_agent_message(self, agent_id: str, new_text: str) -> Message:
1834
1798
 
1835
1799
  # Get the current message
letta/server/startup.sh CHANGED
@@ -1,5 +1,8 @@
1
1
  #!/bin/sh
2
2
  echo "Starting MEMGPT server..."
3
+
4
+ alembic upgrade head
5
+
3
6
  if [ "$MEMGPT_ENVIRONMENT" = "DEVELOPMENT" ] ; then
4
7
  echo "Starting in development mode!"
5
8
  uvicorn letta.server.rest_api.app:app --reload --reload-dir /letta --host 0.0.0.0 --port 8283