letta-nightly 0.6.8.dev20250110190527__py3-none-any.whl → 0.6.8.dev20250111030335__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of letta-nightly might be problematic. Click here for more details.

letta/agent.py CHANGED
@@ -7,11 +7,11 @@ from abc import ABC, abstractmethod
7
7
  from typing import List, Optional, Tuple, Union
8
8
 
9
9
  from letta.constants import (
10
- BASE_TOOLS,
11
10
  CLI_WARNING_PREFIX,
12
11
  ERROR_MESSAGE_PREFIX,
13
12
  FIRST_MESSAGE_ATTEMPTS,
14
13
  FUNC_FAILED_HEARTBEAT_MESSAGE,
14
+ LETTA_CORE_TOOL_MODULE_NAME,
15
15
  LLM_MAX_TOKENS,
16
16
  MESSAGE_SUMMARY_TRUNC_KEEP_N_LAST,
17
17
  MESSAGE_SUMMARY_TRUNC_TOKEN_FRAC,
@@ -19,6 +19,7 @@ from letta.constants import (
19
19
  REQ_HEARTBEAT_MESSAGE,
20
20
  )
21
21
  from letta.errors import ContextWindowExceededError
22
+ from letta.functions.functions import get_function_from_module
22
23
  from letta.helpers import ToolRulesSolver
23
24
  from letta.interface import AgentInterface
24
25
  from letta.llm_api.helpers import is_context_overflow_error
@@ -26,6 +27,7 @@ from letta.llm_api.llm_api_tools import create
26
27
  from letta.local_llm.utils import num_tokens_from_functions, num_tokens_from_messages
27
28
  from letta.memory import summarize_messages
28
29
  from letta.orm import User
30
+ from letta.orm.enums import ToolType
29
31
  from letta.schemas.agent import AgentState, AgentStepResponse, UpdateAgent
30
32
  from letta.schemas.block import BlockUpdate
31
33
  from letta.schemas.embedding_config import EmbeddingConfig
@@ -153,7 +155,7 @@ class Agent(BaseAgent):
153
155
  raise ValueError(f"Invalid JSON format in message: {msg.text}")
154
156
  return None
155
157
 
156
- def update_memory_if_change(self, new_memory: Memory) -> bool:
158
+ def update_memory_if_changed(self, new_memory: Memory) -> bool:
157
159
  """
158
160
  Update internal memory object and system prompt if there have been modifications.
159
161
 
@@ -192,39 +194,45 @@ class Agent(BaseAgent):
192
194
  Execute tool modifications and persist the state of the agent.
193
195
  Note: only some agent state modifications will be persisted, such as data in the AgentState ORM and block data
194
196
  """
195
- # TODO: Get rid of this. This whole piece is pretty shady, that we exec the function to just get the type hints for args.
196
- env = {}
197
- env.update(globals())
198
- exec(target_letta_tool.source_code, env)
199
- callable_func = env[target_letta_tool.json_schema["name"]]
200
- spec = inspect.getfullargspec(callable_func).annotations
201
- for name, arg in function_args.items():
202
- if isinstance(function_args[name], dict):
203
- function_args[name] = spec[name](**function_args[name])
204
-
205
197
  # TODO: add agent manager here
206
198
  orig_memory_str = self.agent_state.memory.compile()
207
199
 
208
200
  # TODO: need to have an AgentState object that actually has full access to the block data
209
201
  # this is because the sandbox tools need to be able to access block.value to edit this data
210
202
  try:
211
- # TODO: This is NO BUENO
212
- # TODO: Matching purely by names is extremely problematic, users can create tools with these names and run them in the agent loop
213
- # TODO: We will have probably have to match the function strings exactly for safety
214
- if function_name in BASE_TOOLS:
203
+ if target_letta_tool.tool_type == ToolType.LETTA_CORE:
215
204
  # base tools are allowed to access the `Agent` object and run on the database
205
+ callable_func = get_function_from_module(LETTA_CORE_TOOL_MODULE_NAME, function_name)
216
206
  function_args["self"] = self # need to attach self to arg since it's dynamically linked
217
207
  function_response = callable_func(**function_args)
208
+ elif target_letta_tool.tool_type == ToolType.LETTA_MEMORY_CORE:
209
+ callable_func = get_function_from_module(LETTA_CORE_TOOL_MODULE_NAME, function_name)
210
+ agent_state_copy = self.agent_state.__deepcopy__()
211
+ function_args["agent_state"] = agent_state_copy # need to attach self to arg since it's dynamically linked
212
+ function_response = callable_func(**function_args)
213
+ self.update_memory_if_changed(agent_state_copy.memory)
218
214
  else:
215
+ # TODO: Get rid of this. This whole piece is pretty shady, that we exec the function to just get the type hints for args.
216
+ env = {}
217
+ env.update(globals())
218
+ exec(target_letta_tool.source_code, env)
219
+ callable_func = env[target_letta_tool.json_schema["name"]]
220
+ spec = inspect.getfullargspec(callable_func).annotations
221
+ for name, arg in function_args.items():
222
+ if isinstance(function_args[name], dict):
223
+ function_args[name] = spec[name](**function_args[name])
224
+
219
225
  # execute tool in a sandbox
220
226
  # TODO: allow agent_state to specify which sandbox to execute tools in
221
- sandbox_run_result = ToolExecutionSandbox(function_name, function_args, self.user).run(
222
- agent_state=self.agent_state.__deepcopy__()
223
- )
227
+ # TODO: This is only temporary, can remove after we publish a pip package with this object
228
+ agent_state_copy = self.agent_state.__deepcopy__()
229
+ agent_state_copy.tools = []
230
+
231
+ sandbox_run_result = ToolExecutionSandbox(function_name, function_args, self.user).run(agent_state=agent_state_copy)
224
232
  function_response, updated_agent_state = sandbox_run_result.func_return, sandbox_run_result.agent_state
225
233
  assert orig_memory_str == self.agent_state.memory.compile(), "Memory should not be modified in a sandbox tool"
226
234
  if updated_agent_state is not None:
227
- self.update_memory_if_change(updated_agent_state.memory)
235
+ self.update_memory_if_changed(updated_agent_state.memory)
228
236
  except Exception as e:
229
237
  # Need to catch error here, or else trunction wont happen
230
238
  # TODO: modify to function execution error
@@ -677,7 +685,7 @@ class Agent(BaseAgent):
677
685
  current_persisted_memory = Memory(
678
686
  blocks=[self.block_manager.get_block_by_id(block.id, actor=self.user) for block in self.agent_state.memory.get_blocks()]
679
687
  ) # read blocks from DB
680
- self.update_memory_if_change(current_persisted_memory)
688
+ self.update_memory_if_changed(current_persisted_memory)
681
689
 
682
690
  # Step 1: add user message
683
691
  if isinstance(messages, Message):
letta/constants.py CHANGED
@@ -2,12 +2,16 @@ import os
2
2
  from logging import CRITICAL, DEBUG, ERROR, INFO, NOTSET, WARN, WARNING
3
3
 
4
4
  LETTA_DIR = os.path.join(os.path.expanduser("~"), ".letta")
5
+ LETTA_DIR_TOOL_SANDBOX = os.path.join(LETTA_DIR, "tool_sandbox_dir")
5
6
 
6
7
  ADMIN_PREFIX = "/v1/admin"
7
8
  API_PREFIX = "/v1"
8
9
  OPENAI_API_PREFIX = "/openai"
9
10
 
10
11
  COMPOSIO_ENTITY_ENV_VAR_KEY = "COMPOSIO_ENTITY"
12
+ COMPOSIO_TOOL_TAG_NAME = "composio"
13
+
14
+ LETTA_CORE_TOOL_MODULE_NAME = "letta.functions.function_sets.base"
11
15
 
12
16
  # String in the error message for when the context window is too large
13
17
  # Example full message:
@@ -1,3 +1,4 @@
1
+ import importlib
1
2
  import inspect
2
3
  from textwrap import dedent # remove indentation
3
4
  from types import ModuleType
@@ -64,6 +65,70 @@ def parse_source_code(func) -> str:
64
65
  return source_code
65
66
 
66
67
 
68
+ def get_function_from_module(module_name: str, function_name: str):
69
+ """
70
+ Dynamically imports a function from a specified module.
71
+
72
+ Args:
73
+ module_name (str): The name of the module to import (e.g., 'base').
74
+ function_name (str): The name of the function to retrieve.
75
+
76
+ Returns:
77
+ Callable: The imported function.
78
+
79
+ Raises:
80
+ ModuleNotFoundError: If the specified module cannot be found.
81
+ AttributeError: If the function is not found in the module.
82
+ """
83
+ try:
84
+ # Dynamically import the module
85
+ module = importlib.import_module(module_name)
86
+ # Retrieve the function
87
+ return getattr(module, function_name)
88
+ except ModuleNotFoundError:
89
+ raise ModuleNotFoundError(f"Module '{module_name}' not found.")
90
+ except AttributeError:
91
+ raise AttributeError(f"Function '{function_name}' not found in module '{module_name}'.")
92
+
93
+
94
+ def get_json_schema_from_module(module_name: str, function_name: str) -> dict:
95
+ """
96
+ Dynamically loads a specific function from a module and generates its JSON schema.
97
+
98
+ Args:
99
+ module_name (str): The name of the module to import (e.g., 'base').
100
+ function_name (str): The name of the function to retrieve.
101
+
102
+ Returns:
103
+ dict: The JSON schema for the specified function.
104
+
105
+ Raises:
106
+ ModuleNotFoundError: If the specified module cannot be found.
107
+ AttributeError: If the function is not found in the module.
108
+ ValueError: If the attribute is not a user-defined function.
109
+ """
110
+ try:
111
+ # Dynamically import the module
112
+ module = importlib.import_module(module_name)
113
+
114
+ # Retrieve the function
115
+ attr = getattr(module, function_name, None)
116
+
117
+ # Check if it's a user-defined function
118
+ if not (inspect.isfunction(attr) and attr.__module__ == module.__name__):
119
+ raise ValueError(f"'{function_name}' is not a user-defined function in module '{module_name}'")
120
+
121
+ # Generate schema (assuming a `generate_schema` function exists)
122
+ generated_schema = generate_schema(attr)
123
+
124
+ return generated_schema
125
+
126
+ except ModuleNotFoundError:
127
+ raise ModuleNotFoundError(f"Module '{module_name}' not found.")
128
+ except AttributeError:
129
+ raise AttributeError(f"Function '{function_name}' not found in module '{module_name}'.")
130
+
131
+
67
132
  def load_function_set(module: ModuleType) -> dict:
68
133
  """Load the functions and generate schema for them, given a module object"""
69
134
  function_dict = {}
@@ -264,6 +264,7 @@ def convert_google_ai_response_to_chatcompletion(
264
264
  """
265
265
  try:
266
266
  choices = []
267
+ index = 0
267
268
  for candidate in response_json["candidates"]:
268
269
  content = candidate["content"]
269
270
 
@@ -272,86 +273,87 @@ def convert_google_ai_response_to_chatcompletion(
272
273
 
273
274
  parts = content["parts"]
274
275
  # TODO support parts / multimodal
275
- assert len(parts) == 1, f"Multi-part not yet supported:\n{parts}"
276
- response_message = parts[0]
277
-
278
- # Convert the actual message style to OpenAI style
279
- if "functionCall" in response_message and response_message["functionCall"] is not None:
280
- function_call = response_message["functionCall"]
281
- assert isinstance(function_call, dict), function_call
282
- function_name = function_call["name"]
283
- assert isinstance(function_name, str), function_name
284
- function_args = function_call["args"]
285
- assert isinstance(function_args, dict), function_args
286
-
287
- # NOTE: this also involves stripping the inner monologue out of the function
288
- if pull_inner_thoughts_from_args:
289
- from letta.local_llm.constants import INNER_THOUGHTS_KWARG
290
-
291
- assert INNER_THOUGHTS_KWARG in function_args, f"Couldn't find inner thoughts in function args:\n{function_call}"
292
- inner_thoughts = function_args.pop(INNER_THOUGHTS_KWARG)
293
- assert inner_thoughts is not None, f"Expected non-null inner thoughts function arg:\n{function_call}"
294
- else:
295
- inner_thoughts = None
296
-
297
- # Google AI API doesn't generate tool call IDs
298
- openai_response_message = Message(
299
- role="assistant", # NOTE: "model" -> "assistant"
300
- content=inner_thoughts,
301
- tool_calls=[
302
- ToolCall(
303
- id=get_tool_call_id(),
304
- type="function",
305
- function=FunctionCall(
306
- name=function_name,
307
- arguments=clean_json_string_extra_backslash(json_dumps(function_args)),
308
- ),
309
- )
310
- ],
311
- )
312
-
313
- else:
276
+ # TODO support parallel tool calling natively
277
+ # TODO Alternative here is to throw away everything else except for the first part
278
+ for response_message in parts:
279
+ # Convert the actual message style to OpenAI style
280
+ if "functionCall" in response_message and response_message["functionCall"] is not None:
281
+ function_call = response_message["functionCall"]
282
+ assert isinstance(function_call, dict), function_call
283
+ function_name = function_call["name"]
284
+ assert isinstance(function_name, str), function_name
285
+ function_args = function_call["args"]
286
+ assert isinstance(function_args, dict), function_args
287
+
288
+ # NOTE: this also involves stripping the inner monologue out of the function
289
+ if pull_inner_thoughts_from_args:
290
+ from letta.local_llm.constants import INNER_THOUGHTS_KWARG
291
+
292
+ assert INNER_THOUGHTS_KWARG in function_args, f"Couldn't find inner thoughts in function args:\n{function_call}"
293
+ inner_thoughts = function_args.pop(INNER_THOUGHTS_KWARG)
294
+ assert inner_thoughts is not None, f"Expected non-null inner thoughts function arg:\n{function_call}"
295
+ else:
296
+ inner_thoughts = None
297
+
298
+ # Google AI API doesn't generate tool call IDs
299
+ openai_response_message = Message(
300
+ role="assistant", # NOTE: "model" -> "assistant"
301
+ content=inner_thoughts,
302
+ tool_calls=[
303
+ ToolCall(
304
+ id=get_tool_call_id(),
305
+ type="function",
306
+ function=FunctionCall(
307
+ name=function_name,
308
+ arguments=clean_json_string_extra_backslash(json_dumps(function_args)),
309
+ ),
310
+ )
311
+ ],
312
+ )
314
313
 
315
- # Inner thoughts are the content by default
316
- inner_thoughts = response_message["text"]
317
-
318
- # Google AI API doesn't generate tool call IDs
319
- openai_response_message = Message(
320
- role="assistant", # NOTE: "model" -> "assistant"
321
- content=inner_thoughts,
322
- )
314
+ else:
323
315
 
324
- # Google AI API uses different finish reason strings than OpenAI
325
- # OpenAI: 'stop', 'length', 'function_call', 'content_filter', null
326
- # see: https://platform.openai.com/docs/guides/text-generation/chat-completions-api
327
- # Google AI API: FINISH_REASON_UNSPECIFIED, STOP, MAX_TOKENS, SAFETY, RECITATION, OTHER
328
- # see: https://ai.google.dev/api/python/google/ai/generativelanguage/Candidate/FinishReason
329
- finish_reason = candidate["finishReason"]
330
- if finish_reason == "STOP":
331
- openai_finish_reason = (
332
- "function_call"
333
- if openai_response_message.tool_calls is not None and len(openai_response_message.tool_calls) > 0
334
- else "stop"
335
- )
336
- elif finish_reason == "MAX_TOKENS":
337
- openai_finish_reason = "length"
338
- elif finish_reason == "SAFETY":
339
- openai_finish_reason = "content_filter"
340
- elif finish_reason == "RECITATION":
341
- openai_finish_reason = "content_filter"
342
- else:
343
- raise ValueError(f"Unrecognized finish reason in Google AI response: {finish_reason}")
344
-
345
- choices.append(
346
- Choice(
347
- finish_reason=openai_finish_reason,
348
- index=candidate["index"],
349
- message=openai_response_message,
316
+ # Inner thoughts are the content by default
317
+ inner_thoughts = response_message["text"]
318
+
319
+ # Google AI API doesn't generate tool call IDs
320
+ openai_response_message = Message(
321
+ role="assistant", # NOTE: "model" -> "assistant"
322
+ content=inner_thoughts,
323
+ )
324
+
325
+ # Google AI API uses different finish reason strings than OpenAI
326
+ # OpenAI: 'stop', 'length', 'function_call', 'content_filter', null
327
+ # see: https://platform.openai.com/docs/guides/text-generation/chat-completions-api
328
+ # Google AI API: FINISH_REASON_UNSPECIFIED, STOP, MAX_TOKENS, SAFETY, RECITATION, OTHER
329
+ # see: https://ai.google.dev/api/python/google/ai/generativelanguage/Candidate/FinishReason
330
+ finish_reason = candidate["finishReason"]
331
+ if finish_reason == "STOP":
332
+ openai_finish_reason = (
333
+ "function_call"
334
+ if openai_response_message.tool_calls is not None and len(openai_response_message.tool_calls) > 0
335
+ else "stop"
336
+ )
337
+ elif finish_reason == "MAX_TOKENS":
338
+ openai_finish_reason = "length"
339
+ elif finish_reason == "SAFETY":
340
+ openai_finish_reason = "content_filter"
341
+ elif finish_reason == "RECITATION":
342
+ openai_finish_reason = "content_filter"
343
+ else:
344
+ raise ValueError(f"Unrecognized finish reason in Google AI response: {finish_reason}")
345
+
346
+ choices.append(
347
+ Choice(
348
+ finish_reason=openai_finish_reason,
349
+ index=index,
350
+ message=openai_response_message,
351
+ )
350
352
  )
351
- )
353
+ index += 1
352
354
 
353
- if len(choices) > 1:
354
- raise UserWarning(f"Unexpected number of candidates in response (expected 1, got {len(choices)})")
355
+ # if len(choices) > 1:
356
+ # raise UserWarning(f"Unexpected number of candidates in response (expected 1, got {len(choices)})")
355
357
 
356
358
  # NOTE: some of the Google AI APIs show UsageMetadata in the response, but it seems to not exist?
357
359
  # "usageMetadata": {
letta/orm/agent.py CHANGED
@@ -1,7 +1,7 @@
1
1
  import uuid
2
2
  from typing import TYPE_CHECKING, List, Optional
3
3
 
4
- from sqlalchemy import JSON, String, UniqueConstraint
4
+ from sqlalchemy import JSON, String
5
5
  from sqlalchemy.orm import Mapped, mapped_column, relationship
6
6
 
7
7
  from letta.orm.block import Block
@@ -27,7 +27,6 @@ if TYPE_CHECKING:
27
27
  class Agent(SqlalchemyBase, OrganizationMixin):
28
28
  __tablename__ = "agents"
29
29
  __pydantic_model__ = PydanticAgentState
30
- __table_args__ = (UniqueConstraint("organization_id", "name", name="unique_org_agent_name"),)
31
30
 
32
31
  # agent generates its own id
33
32
  # TODO: We want to migrate all the ORM models to do this, so we will need to move this to the SqlalchemyBase
@@ -109,6 +108,7 @@ class Agent(SqlalchemyBase, OrganizationMixin):
109
108
  """converts to the basic pydantic model counterpart"""
110
109
  state = {
111
110
  "id": self.id,
111
+ "organization_id": self.organization_id,
112
112
  "name": self.name,
113
113
  "description": self.description,
114
114
  "message_ids": self.message_ids,
letta/orm/base.py CHANGED
@@ -17,6 +17,16 @@ class CommonSqlalchemyMetaMixins(Base):
17
17
  updated_at: Mapped[Optional[datetime]] = mapped_column(DateTime(timezone=True), server_default=func.now(), server_onupdate=func.now())
18
18
  is_deleted: Mapped[bool] = mapped_column(Boolean, server_default=text("FALSE"))
19
19
 
20
+ def set_updated_at(self, timestamp: Optional[datetime] = None) -> None:
21
+ """
22
+ Set the updated_at timestamp for the model instance.
23
+
24
+ Args:
25
+ timestamp (Optional[datetime]): The timestamp to set.
26
+ If None, uses the current UTC time.
27
+ """
28
+ self.updated_at = timestamp or datetime.utcnow()
29
+
20
30
  def _set_created_and_updated_by_fields(self, actor_id: str) -> None:
21
31
  """Populate created_by_id and last_updated_by_id based on actor."""
22
32
  if not self.created_by_id:
letta/orm/provider.py CHANGED
@@ -4,7 +4,7 @@ from sqlalchemy.orm import Mapped, mapped_column, relationship
4
4
 
5
5
  from letta.orm.mixins import OrganizationMixin
6
6
  from letta.orm.sqlalchemy_base import SqlalchemyBase
7
- from letta.providers import Provider as PydanticProvider
7
+ from letta.schemas.providers import Provider as PydanticProvider
8
8
 
9
9
  if TYPE_CHECKING:
10
10
  from letta.orm.organization import Organization
@@ -100,9 +100,13 @@ class SqlalchemyBase(CommonSqlalchemyMetaMixins, Base):
100
100
 
101
101
  if match_all_tags:
102
102
  # Match ALL tags - use subqueries
103
- for tag in tags:
104
- subquery = select(cls.tags.property.mapper.class_.agent_id).where(cls.tags.property.mapper.class_.tag == tag)
105
- query = query.filter(cls.id.in_(subquery))
103
+ subquery = (
104
+ select(cls.tags.property.mapper.class_.agent_id)
105
+ .where(cls.tags.property.mapper.class_.tag.in_(tags))
106
+ .group_by(cls.tags.property.mapper.class_.agent_id)
107
+ .having(func.count() == len(tags))
108
+ )
109
+ query = query.filter(cls.id.in_(subquery))
106
110
  else:
107
111
  # Match ANY tag - use join and filter
108
112
  query = (
@@ -275,6 +279,8 @@ class SqlalchemyBase(CommonSqlalchemyMetaMixins, Base):
275
279
  if actor:
276
280
  self._set_created_and_updated_by_fields(actor.id)
277
281
 
282
+ self.set_updated_at()
283
+
278
284
  with db_session as session:
279
285
  session.add(self)
280
286
  session.commit()
letta/orm/tool.py CHANGED
@@ -4,7 +4,7 @@ from sqlalchemy import JSON, String, UniqueConstraint
4
4
  from sqlalchemy.orm import Mapped, mapped_column, relationship
5
5
 
6
6
  # TODO everything in functions should live in this model
7
- from letta.orm.enums import ToolSourceType
7
+ from letta.orm.enums import ToolSourceType, ToolType
8
8
  from letta.orm.mixins import OrganizationMixin
9
9
  from letta.orm.sqlalchemy_base import SqlalchemyBase
10
10
  from letta.schemas.tool import Tool as PydanticTool
@@ -29,12 +29,17 @@ class Tool(SqlalchemyBase, OrganizationMixin):
29
29
  __table_args__ = (UniqueConstraint("name", "organization_id", name="uix_name_organization"),)
30
30
 
31
31
  name: Mapped[str] = mapped_column(doc="The display name of the tool.")
32
+ tool_type: Mapped[ToolType] = mapped_column(
33
+ String,
34
+ default=ToolType.CUSTOM,
35
+ doc="The type of tool. This affects whether or not we generate json_schema and source_code on the fly.",
36
+ )
32
37
  return_char_limit: Mapped[int] = mapped_column(nullable=True, doc="The maximum number of characters the tool can return.")
33
38
  description: Mapped[Optional[str]] = mapped_column(nullable=True, doc="The description of the tool.")
34
39
  tags: Mapped[List] = mapped_column(JSON, doc="Metadata tags used to filter tools.")
35
40
  source_type: Mapped[ToolSourceType] = mapped_column(String, doc="The type of the source code.", default=ToolSourceType.json)
36
41
  source_code: Mapped[Optional[str]] = mapped_column(String, doc="The source code of the function.")
37
- json_schema: Mapped[dict] = mapped_column(JSON, default=lambda: {}, doc="The OAI compatable JSON schema of the function.")
42
+ json_schema: Mapped[Optional[dict]] = mapped_column(JSON, default=lambda: {}, doc="The OAI compatable JSON schema of the function.")
38
43
  module: Mapped[Optional[str]] = mapped_column(
39
44
  String, nullable=True, doc="the module path from which this tool was derived in the codebase."
40
45
  )
@@ -1,3 +1,4 @@
1
+ from datetime import datetime
1
2
  from typing import List, Optional
2
3
 
3
4
  from pydantic import Field, model_validator
@@ -8,7 +9,6 @@ from letta.llm_api.azure_openai_constants import AZURE_MODEL_TO_CONTEXT_LENGTH
8
9
  from letta.schemas.embedding_config import EmbeddingConfig
9
10
  from letta.schemas.letta_base import LettaBase
10
11
  from letta.schemas.llm_config import LLMConfig
11
- from letta.services.organization_manager import OrganizationManager
12
12
 
13
13
 
14
14
  class ProviderBase(LettaBase):
@@ -16,9 +16,15 @@ class ProviderBase(LettaBase):
16
16
 
17
17
 
18
18
  class Provider(ProviderBase):
19
+ id: Optional[str] = Field(None, description="The id of the provider, lazily created by the database manager.")
19
20
  name: str = Field(..., description="The name of the provider")
20
21
  api_key: Optional[str] = Field(None, description="API key used for requests to the provider.")
21
- organization_id: Optional[str] = Field(OrganizationManager.DEFAULT_ORG_ID, description="The organization id of the user")
22
+ organization_id: Optional[str] = Field(None, description="The organization id of the user")
23
+ updated_at: Optional[datetime] = Field(None, description="The last update timestamp of the provider.")
24
+
25
+ def resolve_identifier(self):
26
+ if not self.id:
27
+ self.id = ProviderBase._generate_id(prefix=ProviderBase.__id_prefix__)
22
28
 
23
29
  def list_llm_models(self) -> List[LLMConfig]:
24
30
  return []
@@ -40,7 +46,6 @@ class Provider(ProviderBase):
40
46
  class ProviderCreate(ProviderBase):
41
47
  name: str = Field(..., description="The name of the provider.")
42
48
  api_key: str = Field(..., description="API key used for requests to the provider.")
43
- organization_id: str = Field(..., description="The organization id that this provider information pertains to.")
44
49
 
45
50
 
46
51
  class ProviderUpdate(ProviderBase):
letta/schemas/tool.py CHANGED
@@ -1,11 +1,12 @@
1
- from typing import Dict, List, Optional
1
+ from typing import Any, Dict, List, Optional
2
2
 
3
3
  from pydantic import Field, model_validator
4
4
 
5
- from letta.constants import FUNCTION_RETURN_CHAR_LIMIT
6
- from letta.functions.functions import derive_openai_json_schema
5
+ from letta.constants import COMPOSIO_TOOL_TAG_NAME, FUNCTION_RETURN_CHAR_LIMIT, LETTA_CORE_TOOL_MODULE_NAME
6
+ from letta.functions.functions import derive_openai_json_schema, get_json_schema_from_module
7
7
  from letta.functions.helpers import generate_composio_tool_wrapper, generate_langchain_tool_wrapper
8
8
  from letta.functions.schema_generator import generate_schema_from_args_schema_v2
9
+ from letta.orm.enums import ToolType
9
10
  from letta.schemas.letta_base import LettaBase
10
11
  from letta.schemas.openai.chat_completions import ToolCall
11
12
 
@@ -28,6 +29,7 @@ class Tool(BaseTool):
28
29
  """
29
30
 
30
31
  id: str = BaseTool.generate_id_field()
32
+ tool_type: ToolType = Field(ToolType.CUSTOM, description="The type of the tool.")
31
33
  description: Optional[str] = Field(None, description="The description of the tool.")
32
34
  source_type: Optional[str] = Field(None, description="The type of the source code.")
33
35
  module: Optional[str] = Field(None, description="The module of the function.")
@@ -36,7 +38,7 @@ class Tool(BaseTool):
36
38
  tags: List[str] = Field([], description="Metadata tags.")
37
39
 
38
40
  # code
39
- source_code: str = Field(..., description="The source code of the function.")
41
+ source_code: Optional[str] = Field(None, description="The source code of the function.")
40
42
  json_schema: Optional[Dict] = Field(None, description="The JSON schema of the function.")
41
43
 
42
44
  # tool configuration
@@ -51,9 +53,19 @@ class Tool(BaseTool):
51
53
  """
52
54
  Populate missing fields: name, description, and json_schema.
53
55
  """
54
- # Derive JSON schema if not provided
55
- if not self.json_schema:
56
- self.json_schema = derive_openai_json_schema(source_code=self.source_code)
56
+ if self.tool_type == ToolType.CUSTOM:
57
+ # If it's a custom tool, we need to ensure source_code is present
58
+ if not self.source_code:
59
+ raise ValueError(f"Custom tool with id={self.id} is missing source_code field.")
60
+
61
+ # Always derive json_schema for freshest possible json_schema
62
+ # TODO: Instead of checking the tag, we should having `COMPOSIO` as a specific ToolType
63
+ # TODO: We skip this for Composio bc composio json schemas are derived differently
64
+ if not (COMPOSIO_TOOL_TAG_NAME in self.tags):
65
+ self.json_schema = derive_openai_json_schema(source_code=self.source_code)
66
+ elif self.tool_type in {ToolType.LETTA_CORE, ToolType.LETTA_MEMORY_CORE}:
67
+ # If it's letta core tool, we generate the json_schema on the fly here
68
+ self.json_schema = get_json_schema_from_module(module_name=LETTA_CORE_TOOL_MODULE_NAME, function_name=self.name)
57
69
 
58
70
  # Derive name from the JSON schema if not provided
59
71
  if not self.name:
@@ -125,7 +137,7 @@ class ToolCreate(LettaBase):
125
137
 
126
138
  description = composio_tool.description
127
139
  source_type = "python"
128
- tags = ["composio"]
140
+ tags = [COMPOSIO_TOOL_TAG_NAME]
129
141
  wrapper_func_name, wrapper_function_str = generate_composio_tool_wrapper(action_name)
130
142
  json_schema = generate_schema_from_args_schema_v2(composio_tool.args_schema, name=wrapper_func_name, description=description)
131
143
 
@@ -215,7 +227,7 @@ class ToolUpdate(LettaBase):
215
227
 
216
228
  class ToolRunFromSource(LettaBase):
217
229
  source_code: str = Field(..., description="The source code of the function.")
218
- args: Dict[str, str] = Field(..., description="The arguments to pass to the tool.")
230
+ args: Dict[str, Any] = Field(..., description="The arguments to pass to the tool.")
219
231
  env_vars: Dict[str, str] = Field(None, description="The environment variables to pass to the tool.")
220
232
  name: Optional[str] = Field(None, description="The name of the tool to run.")
221
233
  source_type: Optional[str] = Field(None, description="The type of the source code.")
@@ -244,10 +244,6 @@ def create_application() -> "FastAPI":
244
244
  # / static files
245
245
  mount_static_files(app)
246
246
 
247
- @app.on_event("startup")
248
- def on_startup():
249
- generate_openapi_schema(app)
250
-
251
247
  @app.on_event("shutdown")
252
248
  def on_shutdown():
253
249
  global server
@@ -198,10 +198,6 @@ class QueuingInterface(AgentInterface):
198
198
  assert is_utc_datetime(msg_obj.created_at), msg_obj.created_at
199
199
  new_message["date"] = msg_obj.created_at.isoformat()
200
200
  else:
201
- # FIXME this is a total hack
202
- assert self.buffer.qsize() > 1, "Tried to reach back to grab function call data, but couldn't find a buffer message."
203
- # TODO also should not be accessing protected member here
204
-
205
201
  new_message["id"] = self.buffer.queue[-1]["message_api"]["id"]
206
202
  # assert is_utc_datetime(msg_obj.created_at), msg_obj.created_at
207
203
  new_message["date"] = self.buffer.queue[-1]["message_api"]["date"]
@@ -3,8 +3,19 @@ from letta.server.rest_api.routers.v1.blocks import router as blocks_router
3
3
  from letta.server.rest_api.routers.v1.health import router as health_router
4
4
  from letta.server.rest_api.routers.v1.jobs import router as jobs_router
5
5
  from letta.server.rest_api.routers.v1.llms import router as llm_router
6
+ from letta.server.rest_api.routers.v1.providers import router as providers_router
6
7
  from letta.server.rest_api.routers.v1.sandbox_configs import router as sandbox_configs_router
7
8
  from letta.server.rest_api.routers.v1.sources import router as sources_router
8
9
  from letta.server.rest_api.routers.v1.tools import router as tools_router
9
10
 
10
- ROUTERS = [tools_router, sources_router, agents_router, llm_router, blocks_router, jobs_router, health_router, sandbox_configs_router]
11
+ ROUTERS = [
12
+ tools_router,
13
+ sources_router,
14
+ agents_router,
15
+ llm_router,
16
+ blocks_router,
17
+ jobs_router,
18
+ health_router,
19
+ sandbox_configs_router,
20
+ providers_router,
21
+ ]
@@ -1,15 +1,17 @@
1
- from fastapi import APIRouter, Depends
1
+ from typing import TYPE_CHECKING, List, Optional
2
2
 
3
- from letta.providers import Provider, ProviderCreate, ProviderUpdate
3
+ from fastapi import APIRouter, Body, Depends, Header, HTTPException, Query
4
+
5
+ from letta.schemas.providers import Provider, ProviderCreate, ProviderUpdate
4
6
  from letta.server.rest_api.utils import get_letta_server
5
7
 
6
8
  if TYPE_CHECKING:
7
9
  from letta.server.server import SyncServer
8
10
 
9
- router = APIRouter(prefix="/providers", tags=["providers", "admin"])
11
+ router = APIRouter(prefix="/providers", tags=["providers"])
10
12
 
11
13
 
12
- @router.get("/", tags=["admin"], response_model=List[Provider], operation_id="list_providers")
14
+ @router.get("/", tags=["providers"], response_model=List[Provider], operation_id="list_providers")
13
15
  def list_providers(
14
16
  cursor: Optional[str] = Query(None),
15
17
  limit: Optional[int] = Query(50),
@@ -27,20 +29,23 @@ def list_providers(
27
29
  return providers
28
30
 
29
31
 
30
- @router.post("/", tags=["admin"], response_model=Provider, operation_id="create_provider")
32
+ @router.post("/", tags=["providers"], response_model=Provider, operation_id="create_provider")
31
33
  def create_provider(
32
34
  request: ProviderCreate = Body(...),
33
35
  server: "SyncServer" = Depends(get_letta_server),
36
+ user_id: Optional[str] = Header(None, alias="user_id"), # Extract user_id from header, default to None if not present
34
37
  ):
35
38
  """
36
39
  Create a new custom provider
37
40
  """
41
+ actor = server.user_manager.get_user_or_default(user_id=user_id)
42
+
38
43
  provider = Provider(**request.model_dump())
39
- provider = server.provider_manager.create_provider(provider)
44
+ provider = server.provider_manager.create_provider(provider, actor=actor)
40
45
  return provider
41
46
 
42
47
 
43
- @router.put("/", tags=["admin"], response_model=Provider, operation_id="update_provider")
48
+ @router.put("/", tags=["providers"], response_model=Provider, operation_id="update_provider")
44
49
  def update_provider(
45
50
  request: ProviderUpdate = Body(...),
46
51
  server: "SyncServer" = Depends(get_letta_server),
@@ -52,7 +57,7 @@ def update_provider(
52
57
  return provider
53
58
 
54
59
 
55
- @router.delete("/", tags=["admin"], response_model=Provider, operation_id="delete_provider")
60
+ @router.delete("/", tags=["providers"], response_model=None, operation_id="delete_provider")
56
61
  def delete_provider(
57
62
  provider_id: str = Query(..., description="The provider_id key to be deleted."),
58
63
  server: "SyncServer" = Depends(get_letta_server),
@@ -61,12 +66,8 @@ def delete_provider(
61
66
  Delete an existing custom provider
62
67
  """
63
68
  try:
64
- provider = server.provider_manager.get_provider_by_id(provider_id=provider_id)
65
- if provider is None:
66
- raise HTTPException(status_code=404, detail=f"Provider does not exist")
67
69
  server.provider_manager.delete_provider_by_id(provider_id=provider_id)
68
70
  except HTTPException:
69
71
  raise
70
72
  except Exception as e:
71
73
  raise HTTPException(status_code=500, detail=f"{e}")
72
- return user
letta/server/server.py CHANGED
@@ -25,19 +25,6 @@ from letta.log import get_logger
25
25
  from letta.offline_memory_agent import OfflineMemoryAgent
26
26
  from letta.orm import Base
27
27
  from letta.orm.errors import NoResultFound
28
- from letta.providers import (
29
- AnthropicProvider,
30
- AzureProvider,
31
- GoogleAIProvider,
32
- GroqProvider,
33
- LettaProvider,
34
- OllamaProvider,
35
- OpenAIProvider,
36
- Provider,
37
- TogetherProvider,
38
- VLLMChatCompletionsProvider,
39
- VLLMCompletionsProvider,
40
- )
41
28
  from letta.schemas.agent import AgentState, AgentType, CreateAgent
42
29
  from letta.schemas.block import BlockUpdate
43
30
  from letta.schemas.embedding_config import EmbeddingConfig
@@ -52,6 +39,19 @@ from letta.schemas.memory import ArchivalMemorySummary, ContextWindowOverview, M
52
39
  from letta.schemas.message import Message, MessageCreate, MessageRole, MessageUpdate
53
40
  from letta.schemas.organization import Organization
54
41
  from letta.schemas.passage import Passage
42
+ from letta.schemas.providers import (
43
+ AnthropicProvider,
44
+ AzureProvider,
45
+ GoogleAIProvider,
46
+ GroqProvider,
47
+ LettaProvider,
48
+ OllamaProvider,
49
+ OpenAIProvider,
50
+ Provider,
51
+ TogetherProvider,
52
+ VLLMChatCompletionsProvider,
53
+ VLLMCompletionsProvider,
54
+ )
55
55
  from letta.schemas.sandbox_config import SandboxType
56
56
  from letta.schemas.source import Source
57
57
  from letta.schemas.tool import Tool
@@ -1,8 +1,9 @@
1
1
  from typing import List, Optional
2
2
 
3
3
  from letta.orm.provider import Provider as ProviderModel
4
- from letta.providers import Provider as PydanticProvider
5
- from letta.providers import ProviderUpdate
4
+ from letta.schemas.providers import Provider as PydanticProvider
5
+ from letta.schemas.providers import ProviderUpdate
6
+ from letta.schemas.user import User as PydanticUser
6
7
  from letta.utils import enforce_types
7
8
 
8
9
 
@@ -14,10 +15,16 @@ class ProviderManager:
14
15
  self.session_maker = db_context
15
16
 
16
17
  @enforce_types
17
- def create_provider(self, provider: PydanticProvider) -> PydanticProvider:
18
+ def create_provider(self, provider: PydanticProvider, actor: PydanticUser) -> PydanticProvider:
18
19
  """Create a new provider if it doesn't already exist."""
19
20
  with self.session_maker() as session:
20
- new_provider = ProviderModel(**provider.model_dump())
21
+ # Assign the organization id based on the actor
22
+ provider.organization_id = actor.organization_id
23
+
24
+ # Lazily create the provider id prior to persistence
25
+ provider.resolve_identifier()
26
+
27
+ new_provider = ProviderModel(**provider.model_dump(exclude_unset=True))
21
28
  new_provider.create(session)
22
29
  return new_provider.to_pydantic()
23
30
 
@@ -1,6 +1,6 @@
1
- from pathlib import Path
2
1
  from typing import Dict, List, Optional
3
2
 
3
+ from letta.constants import LETTA_DIR_TOOL_SANDBOX
4
4
  from letta.log import get_logger
5
5
  from letta.orm.errors import NoResultFound
6
6
  from letta.orm.sandbox_config import SandboxConfig as SandboxConfigModel
@@ -35,7 +35,7 @@ class SandboxConfigManager:
35
35
  default_config = {} # Empty
36
36
  else:
37
37
  # TODO: May want to move this to environment variables v.s. persisting in database
38
- default_local_sandbox_path = str(Path(__file__).parent / "tool_sandbox_env")
38
+ default_local_sandbox_path = LETTA_DIR_TOOL_SANDBOX
39
39
  default_config = LocalSandboxConfig(sandbox_dir=default_local_sandbox_path).model_dump(exclude_none=True)
40
40
 
41
41
  sandbox_config = self.create_or_update_sandbox_config(SandboxConfigCreate(config=default_config), actor=actor)
@@ -119,8 +119,9 @@ class ToolExecutionSandbox:
119
119
  env.update(additional_env_vars)
120
120
 
121
121
  # Safety checks
122
- if not os.path.isdir(local_configs.sandbox_dir):
123
- raise FileNotFoundError(f"Sandbox directory does not exist: {local_configs.sandbox_dir}")
122
+ if not os.path.exists(local_configs.sandbox_dir) or not os.path.isdir(local_configs.sandbox_dir):
123
+ logger.warning(f"Sandbox directory does not exist, creating: {local_configs.sandbox_dir}")
124
+ os.makedirs(local_configs.sandbox_dir)
124
125
 
125
126
  # Write the code to a temp file in the sandbox_dir
126
127
  with tempfile.NamedTemporaryFile(mode="w", dir=local_configs.sandbox_dir, suffix=".py", delete=False) as temp_file:
@@ -1,10 +1,10 @@
1
1
  import importlib
2
- import inspect
3
2
  import warnings
4
3
  from typing import List, Optional
5
4
 
6
5
  from letta.constants import BASE_MEMORY_TOOLS, BASE_TOOLS
7
6
  from letta.functions.functions import derive_openai_json_schema, load_function_set
7
+ from letta.orm.enums import ToolType
8
8
 
9
9
  # TODO: Remove this once we translate all of these to the ORM
10
10
  from letta.orm.errors import NoResultFound
@@ -32,10 +32,10 @@ class ToolManager:
32
32
 
33
33
  self.session_maker = db_context
34
34
 
35
+ # TODO: Refactor this across the codebase to use CreateTool instead of passing in a Tool object
35
36
  @enforce_types
36
37
  def create_or_update_tool(self, pydantic_tool: PydanticTool, actor: PydanticUser) -> PydanticTool:
37
38
  """Create a new tool based on the ToolCreate schema."""
38
- # Derive json_schema
39
39
  tool = self.get_tool_by_name(tool_name=pydantic_tool.name, actor=actor)
40
40
  if tool:
41
41
  # Put to dict and remove fields that should not be reset
@@ -63,6 +63,7 @@ class ToolManager:
63
63
  if pydantic_tool.description is None:
64
64
  pydantic_tool.description = pydantic_tool.json_schema.get("description", None)
65
65
  tool_data = pydantic_tool.model_dump()
66
+
66
67
  tool = ToolModel(**tool_data)
67
68
  tool.create(session, actor=actor) # Re-raise other database-related errors
68
69
  return tool.to_pydantic()
@@ -113,8 +114,6 @@ class ToolManager:
113
114
  # If source code is changed and a new json_schema is not provided, we want to auto-refresh the schema
114
115
  if "source_code" in update_data.keys() and "json_schema" not in update_data.keys():
115
116
  pydantic_tool = tool.to_pydantic()
116
-
117
- update_data["name"] if "name" in update_data.keys() else None
118
117
  new_schema = derive_openai_json_schema(source_code=pydantic_tool.source_code)
119
118
 
120
119
  tool.json_schema = new_schema
@@ -155,12 +154,19 @@ class ToolManager:
155
154
  tools = []
156
155
  for name, schema in functions_to_schema.items():
157
156
  if name in BASE_TOOLS + BASE_MEMORY_TOOLS:
158
- # print([str(inspect.getsource(line)) for line in schema["imports"]])
159
- source_code = inspect.getsource(schema["python_function"])
160
157
  tags = [module_name]
161
158
  if module_name == "base":
162
159
  tags.append("letta-base")
163
160
 
161
+ # BASE_MEMORY_TOOLS should be executed in an e2b sandbox
162
+ # so they should NOT be letta_core tools, instead, treated as custom tools
163
+ if name in BASE_TOOLS:
164
+ tool_type = ToolType.LETTA_CORE
165
+ elif name in BASE_MEMORY_TOOLS:
166
+ tool_type = ToolType.LETTA_MEMORY_CORE
167
+ else:
168
+ raise ValueError(f"Tool name {name} is not in the list of base tool names: {BASE_TOOLS + BASE_MEMORY_TOOLS}")
169
+
164
170
  # create to tool
165
171
  tools.append(
166
172
  self.create_or_update_tool(
@@ -168,9 +174,7 @@ class ToolManager:
168
174
  name=name,
169
175
  tags=tags,
170
176
  source_type="python",
171
- module=schema["module"],
172
- source_code=source_code,
173
- json_schema=schema["json_schema"],
177
+ tool_type=tool_type,
174
178
  ),
175
179
  actor=actor,
176
180
  )
letta/settings.py CHANGED
@@ -10,10 +10,13 @@ from letta.local_llm.constants import DEFAULT_WRAPPER_NAME
10
10
  class ToolSettings(BaseSettings):
11
11
  composio_api_key: Optional[str] = None
12
12
 
13
- # Sandbox configurations
13
+ # E2B Sandbox configurations
14
14
  e2b_api_key: Optional[str] = None
15
15
  e2b_sandbox_template_id: Optional[str] = None # Updated manually
16
16
 
17
+ # Local Sandbox configurations
18
+ local_sandbox_dir: Optional[str] = None
19
+
17
20
 
18
21
  class ModelSettings(BaseSettings):
19
22
 
@@ -59,6 +62,9 @@ class ModelSettings(BaseSettings):
59
62
  openllm_auth_type: Optional[str] = None
60
63
  openllm_api_key: Optional[str] = None
61
64
 
65
+ # disable openapi schema generation
66
+ disable_schema_generation: bool = False
67
+
62
68
 
63
69
  cors_origins = [
64
70
  "http://letta.localhost",
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: letta-nightly
3
- Version: 0.6.8.dev20250110190527
3
+ Version: 0.6.8.dev20250111030335
4
4
  Summary: Create LLM agents with long-term memory and custom tools
5
5
  License: Apache License
6
6
  Author: Letta Team
@@ -1,6 +1,6 @@
1
1
  letta/__init__.py,sha256=-CRF0Bm3fnfx91Zwqgy6SjNRFRTorD37JtSJnVMpRGQ,991
2
2
  letta/__main__.py,sha256=6Hs2PV7EYc5Tid4g4OtcLXhqVHiNYTGzSBdoOnW2HXA,29
3
- letta/agent.py,sha256=fvd_atYh1BDgjDWxP3pJjCQ7cJFRWQigfI8TtmXQ0Ng,54838
3
+ letta/agent.py,sha256=QedEmRz_9UWl_0qZxz3nMK-eMSyVFMWugO2YrpNzSpI,55551
4
4
  letta/benchmark/benchmark.py,sha256=ebvnwfp3yezaXOQyGXkYCDYpsmre-b9hvNtnyx4xkG0,3701
5
5
  letta/benchmark/constants.py,sha256=aXc5gdpMGJT327VuxsT5FngbCK2J41PQYeICBO7g_RE,536
6
6
  letta/chat_only_agent.py,sha256=ECqJS7KzXOsNkJc9mv7reKbcxBI_PKP_PQyk95tsT1Y,4761
@@ -12,7 +12,7 @@ letta/client/client.py,sha256=oOwifm06z0bVpHnv3yR4o-tMQxD0C7_p41luQ5r-C5E,127098
12
12
  letta/client/streaming.py,sha256=Hz2j_hQZG2g7uhucjx2p3ybf2qjPT-vmIGCHGo87iCQ,4677
13
13
  letta/client/utils.py,sha256=VCGV-op5ZSmurd4yw7Vhf93XDQ0BkyBT8qsuV7EqfiU,2859
14
14
  letta/config.py,sha256=JFGY4TWW0Wm5fTbZamOwWqk5G8Nn-TXyhgByGoAqy2c,12375
15
- letta/constants.py,sha256=gl0uecmIZxO2si3gG9H-MRvHrH6jzFih3Uvfa02lewk,7071
15
+ letta/constants.py,sha256=FrbNM2ngNNI-MO9pYbBchSwqydKgNpGCW44qYMCUH14,7244
16
16
  letta/credentials.py,sha256=D9mlcPsdDWlIIXQQD8wSPE9M_QvsRrb0p3LB5i9OF5Q,5806
17
17
  letta/data_sources/connectors.py,sha256=L-WL-znjaRstMwSunHf3xDywjvgnbjnUR9rUpL6Ypo0,7023
18
18
  letta/data_sources/connectors_helper.py,sha256=2TQjCt74fCgT5sw1AP8PalDEk06jPBbhrPG4HVr-WLs,3371
@@ -21,7 +21,7 @@ letta/errors.py,sha256=bzHXK2co-cQOUWCjKjgphEJ-_-BihO3wTfwGfK570_Q,5093
21
21
  letta/functions/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
22
22
  letta/functions/function_sets/base.py,sha256=bOiitkhzqYKwZBiRYrx29AlordiA5IrXw25eVSRK8BY,5984
23
23
  letta/functions/function_sets/extras.py,sha256=sNY5oavQ5ZmO5GpAtnm8hkWokqwqB8ClPB2VOL-B8MM,4719
24
- letta/functions/functions.py,sha256=evH6GKnIJwVVre1Xre2gaSIqREv4eNM4DiWOhn8PMqg,3299
24
+ letta/functions/functions.py,sha256=wxxo6MJXBfcPeEc1YYWK5ENOD3RFNTIc65RTDBo77x4,5673
25
25
  letta/functions/helpers.py,sha256=PkhzSHc8gZBxX2NHFaDYX0ZwUK9kuGQbz1zpIZ-KXVo,8860
26
26
  letta/functions/schema_generator.py,sha256=u8ix-spnLeP-epFMxP4jF23_aQDa-yCUIW2_O2CBOuo,17561
27
27
  letta/helpers/__init__.py,sha256=p0luQ1Oe3Skc6sH4O58aHHA3Qbkyjifpuq0DZ1GAY0U,59
@@ -35,7 +35,7 @@ letta/llm_api/anthropic.py,sha256=iq2iwEWEa53T7WX9ROyvzVzBgqfJP32tPqBdFBAHpG0,14
35
35
  letta/llm_api/azure_openai.py,sha256=Y1HKPog1XzM_f7ujUK_Gv2zQkoy5pU-1bKiUnvSxSrs,6297
36
36
  letta/llm_api/azure_openai_constants.py,sha256=oXtKrgBFHf744gyt5l1thILXgyi8NDNUrKEa2GGGpjw,278
37
37
  letta/llm_api/cohere.py,sha256=H5kzYH_aQAnGNq7lip7XyKGLEOKC318Iw0_tiTP6kc4,14772
38
- letta/llm_api/google_ai.py,sha256=0rkWKGzOGUvMXycbflqJMUQ3zFZ1VFLxWf0h7uGtPYQ,17543
38
+ letta/llm_api/google_ai.py,sha256=MIX4nmyC6448AvyPPSE8JZ_tzSpKJTArkZSfQGGoy0M,17920
39
39
  letta/llm_api/helpers.py,sha256=iP9EswPflaRzsmLqQuMGt1OCUQgPrPq1xTjrqmMKPiA,13675
40
40
  letta/llm_api/llm_api_tools.py,sha256=DnWsyowH_re5Zvd8-XNfGvB7KkjqcfdkqjmlVvM7_BU,18444
41
41
  letta/llm_api/mistral.py,sha256=fHdfD9ug-rQIk2qn8tRKay1U6w9maF11ryhKi91FfXM,1593
@@ -85,9 +85,9 @@ letta/openai_backcompat/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG
85
85
  letta/openai_backcompat/openai_object.py,sha256=Y1ZS1sATP60qxJiOsjOP3NbwSzuzvkNAvb3DeuhM5Uk,13490
86
86
  letta/orm/__all__.py,sha256=2gh2MZTkA3Hw67VWVKK3JIStJOqTeLdpCvYSVYNeEDA,692
87
87
  letta/orm/__init__.py,sha256=cVnGGFjrAIuRpguw10l5moUG3Z5vKHqQjVsCv6uww3M,764
88
- letta/orm/agent.py,sha256=Uvez0vJGyuL20gVTSIvqYpVp1C2udLJlfQrGvLZCeH8,6162
88
+ letta/orm/agent.py,sha256=X-u2qUlI3O6RqGlZVWKSDK722YaovIHpuGy5yy9DQDE,6099
89
89
  letta/orm/agents_tags.py,sha256=dYSnHz4IWBjyOiQ4RJomX3P0QN76JTlEZEw5eJM6Emg,925
90
- letta/orm/base.py,sha256=47Pyo6iGPlJoT8oeXZWl3E7XHbPzcB81EueoFzeXbxE,2691
90
+ letta/orm/base.py,sha256=VjvxF9TwKF9Trf8BJkDgf7D6KrWWopOkUiF18J3IElk,3071
91
91
  letta/orm/block.py,sha256=U2fOXdab9ynQscOqzUo3xv1a_GjqHLIgoNSZq-U0mYg,3308
92
92
  letta/orm/blocks_agents.py,sha256=W0dykl9OchAofSuAYZD5zNmhyMabPr9LTJrz-I3A0m4,983
93
93
  letta/orm/custom_columns.py,sha256=dBYJn3yc1BIy7ZntIFfq9oEdQav-u0r412C2HyDeUPU,5056
@@ -99,13 +99,13 @@ letta/orm/message.py,sha256=Ui3mMZTAObP7RvmNnqCFudw8Pkt8IAPTlIjCSCsVDFA,1560
99
99
  letta/orm/mixins.py,sha256=9c79Kfr-Z1hL-SDYKeoptx_yMTbBwJJBo9nrKEzSDAc,1622
100
100
  letta/orm/organization.py,sha256=b12iASuskPnG2yHyFh8p2BFROkoqMPEYUFMuVcFPCHs,2897
101
101
  letta/orm/passage.py,sha256=tm5YhUozLR9hN7odGCqCniTl-3GDiFNz3LWAxullaGA,3132
102
- letta/orm/provider.py,sha256=Efy4dATyl1AbT_mGpKce5H7sKNqCHVQhkcSZpXgTw-k,797
102
+ letta/orm/provider.py,sha256=-qA9tvKTZgaM4D7CoDZZiA7zTgjaaWDV4jZvifQv_MM,805
103
103
  letta/orm/sandbox_config.py,sha256=DyOy_1_zCMlp13elCqPcuuA6OwUove6mrjhcpROTg50,4150
104
104
  letta/orm/source.py,sha256=xM3Iwy3xzYdoZja9BZrQwyAnPf5iksaQOs8HlNCvb_c,2031
105
105
  letta/orm/sources_agents.py,sha256=Ik_PokCBrXRd9wXWomeNeb8EtLUwjb9VMZ8LWXqpK5A,473
106
- letta/orm/sqlalchemy_base.py,sha256=VXGFfcdUTXFJcW10XHClHhsj7Y9pFhjeoX-Kj4ZPYKA,17753
106
+ letta/orm/sqlalchemy_base.py,sha256=opPgwG9DjKaL56x-h0teQBAIrs-IdaLZRAV0RaWoU-Q,17950
107
107
  letta/orm/sqlite_functions.py,sha256=JCScKiRlYCKxy9hChQ8wsk4GMKknZE24MunnG3fM1Gw,4255
108
- letta/orm/tool.py,sha256=qvDul85Gq0XORx6gyMGk0As3C1bSt9nASqezdPOikQ4,2216
108
+ letta/orm/tool.py,sha256=L2FwFC6gaKwTxcn3uyjQOCSdqD-3eOTigemnSKomZMU,2453
109
109
  letta/orm/tools_agents.py,sha256=r6t-V21w2_mG8n38zuUb5jOi_3hRxsjgezsLA4sg0m4,626
110
110
  letta/orm/user.py,sha256=rK5N5ViDxmesZMqVVHB7FcQNpcSoM-hB42MyI6q3MnI,1004
111
111
  letta/personas/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -134,7 +134,6 @@ letta/prompts/system/memgpt_modified_chat.txt,sha256=F_yD4ZcR4aGDE3Z98tI7e609GYe
134
134
  letta/prompts/system/memgpt_modified_o1.txt,sha256=objnDgnxpF3-MmU28ZqZ7-TOG8UlHBM_HMyAdSDWK88,5492
135
135
  letta/prompts/system/memgpt_offline_memory.txt,sha256=rWEJeF-6aiinjkJM9hgLUYCmlEcC_HekYB1bjEUYq6M,2460
136
136
  letta/prompts/system/memgpt_offline_memory_chat.txt,sha256=ituh7gDuio7nC2UKFB7GpBq6crxb8bYedQfJ0ADoPgg,3949
137
- letta/providers.py,sha256=f-ZbMBmvX1rtVWuW-GS4mihFjWHg343keAzqVb9NgWY,27468
138
137
  letta/pytest.ini,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
139
138
  letta/schemas/agent.py,sha256=mDaOHz1TOb4VgaXJisI6OZXNFfK38W9A6jVCyrD2NBQ,11482
140
139
  letta/schemas/block.py,sha256=pVDH8jr5r-oxdX4cK9dX2wXyLBzgGKQOBWOzqZSeBog,5944
@@ -158,9 +157,10 @@ letta/schemas/openai/embedding_response.py,sha256=WKIZpXab1Av7v6sxKG8feW3ZtpQUNo
158
157
  letta/schemas/openai/openai.py,sha256=Hilo5BiLAGabzxCwnwfzK5QrWqwYD8epaEKFa4Pwndk,7970
159
158
  letta/schemas/organization.py,sha256=WWbUWVSp_VQRFwWN4fdHg1yObiV6x9rZnvIY8x5BPs0,746
160
159
  letta/schemas/passage.py,sha256=t_bSI8hpEuh-mj8bV8qOiIA1tAgyjGKrZMVe9l5oIaY,3675
160
+ letta/schemas/providers.py,sha256=YsVQWVjFlyT2I2H-wfD6AL3VP0TIrHkb-DsVp-H85Hc,27647
161
161
  letta/schemas/sandbox_config.py,sha256=v32V5T73X-VxhDk0g_1RGniK985KMvg2xyLVi1dvMQY,4215
162
162
  letta/schemas/source.py,sha256=B1VbaDJV-EGPv1nQXwCx_RAzeAJd50UqP_1m1cIRT8c,2854
163
- letta/schemas/tool.py,sha256=_2FaWTDdGtbKTh3-KHa57f5CKS_veWYtyS2Fk5ZwMFw,10349
163
+ letta/schemas/tool.py,sha256=v2y1vNQqa0AoCFHsakUcALiCRhn0lsmWqcVd7aPeAU8,11384
164
164
  letta/schemas/tool_rule.py,sha256=LJwi1T474-3zbFGiW7_fegyfduC3F2u7cdlBsV0U_IU,1679
165
165
  letta/schemas/usage.py,sha256=8oYRH-JX0PfjIu2zkT5Uu3UWQ7Unnz_uHiO8hRGI4m0,912
166
166
  letta/schemas/user.py,sha256=V32Tgl6oqB3KznkxUz12y7agkQicjzW7VocSpj78i6Q,1526
@@ -168,11 +168,11 @@ letta/server/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
168
168
  letta/server/constants.py,sha256=yAdGbLkzlOU_dLTx0lKDmAnj0ZgRXCEaIcPJWO69eaE,92
169
169
  letta/server/generate_openapi_schema.sh,sha256=0OtBhkC1g6CobVmNEd_m2B6sTdppjbJLXaM95icejvE,371
170
170
  letta/server/rest_api/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
171
- letta/server/rest_api/app.py,sha256=8_wwXLzeL84a2NtN-LMW9t-yUHQ59GV72cC3DB1LTFQ,11096
171
+ letta/server/rest_api/app.py,sha256=xa34qWm8rpcb_pYnOL9k7sQ0qZAeVhH32uvbb6Xnu8g,11007
172
172
  letta/server/rest_api/auth/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
173
173
  letta/server/rest_api/auth/index.py,sha256=fQBGyVylGSRfEMLQ17cZzrHd5Y1xiVylvPqH5Rl-lXQ,1378
174
174
  letta/server/rest_api/auth_token.py,sha256=725EFEIiNj4dh70hrSd94UysmFD8vcJLrTRfNHkzxDo,774
175
- letta/server/rest_api/interface.py,sha256=1VswXnbsufSJEvK9Swq7_y2cmUcCBeGa5742u3NSXw8,45767
175
+ letta/server/rest_api/interface.py,sha256=imcpXkfEJboKVEFIHD5OPbS3F_fqjupWratm0zSFpwY,45525
176
176
  letta/server/rest_api/routers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
177
177
  letta/server/rest_api/routers/openai/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
178
178
  letta/server/rest_api/routers/openai/assistants/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -180,21 +180,21 @@ letta/server/rest_api/routers/openai/assistants/assistants.py,sha256=PXv5vLFDa3p
180
180
  letta/server/rest_api/routers/openai/assistants/schemas.py,sha256=ZWUrmkvDMeywlxYhcp1hHzLXNgWpD8qWt80jRlhb7Rc,5605
181
181
  letta/server/rest_api/routers/openai/chat_completions/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
182
182
  letta/server/rest_api/routers/openai/chat_completions/chat_completions.py,sha256=szOelaigEMNNzlvl5ZkS4KDzdyLhv2ATZ8XuaDPN3Jc,4820
183
- letta/server/rest_api/routers/v1/__init__.py,sha256=6Aw18C9of0ayRNyFYUK_-nxajeVLaYau1bLw-tg_OQ4,753
183
+ letta/server/rest_api/routers/v1/__init__.py,sha256=UX9dUw3ZO-h5up9VnoJFNucDgRlPh05PCUX9gTD8ynA,892
184
184
  letta/server/rest_api/routers/v1/agents.py,sha256=S79Bz0y-MGPQfzXuPPDNjJvBDBRQOKE_ixAqENx9J8Q,30986
185
185
  letta/server/rest_api/routers/v1/blocks.py,sha256=IJ2pppwNooaUjIwyBALnKL4sJ8idW8cVJlY-VH_J9HY,4803
186
186
  letta/server/rest_api/routers/v1/health.py,sha256=pKCuVESlVOhGIb4VC4K-H82eZqfghmT6kvj2iOkkKuc,401
187
187
  letta/server/rest_api/routers/v1/jobs.py,sha256=-tEyuIxlXZfPREeMks-sRzHwhKE2xxgzbXeEbBAS2Q8,2730
188
188
  letta/server/rest_api/routers/v1/llms.py,sha256=TcyvSx6MEM3je5F4DysL7ligmssL_pFlJaaO4uL95VY,877
189
189
  letta/server/rest_api/routers/v1/organizations.py,sha256=tyqVzXTpMtk3sKxI3Iz4aS6RhbGEbXDzFBB_CpW18v4,2080
190
- letta/server/rest_api/routers/v1/providers.py,sha256=7_921hS6AsvOG-9LeGsF61fM_8cP7Ez-AVUBX-G1iVs,2358
190
+ letta/server/rest_api/routers/v1/providers.py,sha256=ftYyPgqFinfPbjCS4y77xQaq2SPFj8k8wwaErieAppU,2445
191
191
  letta/server/rest_api/routers/v1/sandbox_configs.py,sha256=DJ8mz7HsXCuGypNaxTgoMW8xR1kMOwdVnon00srRdCo,5266
192
192
  letta/server/rest_api/routers/v1/sources.py,sha256=EJ-VCqiKtaiYuivZrWx1gYSNUKnWJuduSm-L_2ljhLc,9913
193
193
  letta/server/rest_api/routers/v1/tools.py,sha256=rGw3hDUFYmY_kmJw5hsQWnBOWQqJFdfT3bfKiXwXikg,12764
194
194
  letta/server/rest_api/routers/v1/users.py,sha256=EBQe9IfCG3kzHpKmotz4yVGZioXz3SCSRy5yEhJK8hU,2293
195
195
  letta/server/rest_api/static_files.py,sha256=NG8sN4Z5EJ8JVQdj19tkFa9iQ1kBPTab9f_CUxd_u4Q,3143
196
196
  letta/server/rest_api/utils.py,sha256=M4VE76TtC9qorvszq1x7MTBW9jSWgtUUCRahqpJNYNc,3953
197
- letta/server/server.py,sha256=H_-51emg5gHQryjuL0cw5J7XV8mEtzs1JOun_GhYOYQ,50272
197
+ letta/server/server.py,sha256=f6-IqdHScc9K3fIHlVkfMFwSJSo9zBn1W2Smg4GdGRA,50280
198
198
  letta/server/startup.sh,sha256=722uKJWB2C4q3vjn39De2zzPacaZNw_1fN1SpLGjKIo,1569
199
199
  letta/server/static_files/assets/index-048c9598.js,sha256=mR16XppvselwKCcNgONs4L7kZEVa4OEERm4lNZYtLSk,146819
200
200
  letta/server/static_files/assets/index-0e31b727.css,sha256=SBbja96uiQVLDhDOroHgM6NSl7tS4lpJRCREgSS_hA8,7672
@@ -216,20 +216,19 @@ letta/services/message_manager.py,sha256=a7U0MfgaNAdjbls7ZtUdS7eJ6prJaMkE0NIHgtz
216
216
  letta/services/organization_manager.py,sha256=hJI86_FErkRaW-VLBBmvmmciIXN59LN0mEMm78C36kQ,3331
217
217
  letta/services/passage_manager.py,sha256=Lq1caspE1VGmT7vlsUOuJVRflJZ122qfG0dmNGm_6o8,7691
218
218
  letta/services/per_agent_lock_manager.py,sha256=porM0cKKANQ1FvcGXOO_qM7ARk5Fgi1HVEAhXsAg9-4,546
219
- letta/services/provider_manager.py,sha256=wCYgl2toWsuOHwFGkz0i94G0hyWtFNyRLUzB2ElTckw,2589
220
- letta/services/sandbox_config_manager.py,sha256=RtJCiw43S80K_a8QgpQUH7P0jpi7UwoUV1fDjfyX1t4,13289
219
+ letta/services/provider_manager.py,sha256=QsiKXnl-TuVLpL1keAkkMmCbJj4KehGgqu-SZACgbg0,2926
220
+ letta/services/sandbox_config_manager.py,sha256=YWJES09KX5POXwL-hijaf87zW3az3Ioh8lWDeZYPY6k,13290
221
221
  letta/services/source_manager.py,sha256=ZtLQikeJjwAq49f0d4WxUzyUN3LZBqWCZI4a-AzEMWQ,7643
222
- letta/services/tool_execution_sandbox.py,sha256=380xw7ptSdrAbfolZ0l4jnwpzCfbdUfIkOh2tg2vbmI,23019
223
- letta/services/tool_manager.py,sha256=eKhvGYNBq8MOwfuk-GyqiTdEcxRn4srYvTJqj-HgTKA,7686
224
- letta/services/tool_sandbox_env/.gitkeep,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
222
+ letta/services/tool_execution_sandbox.py,sha256=ycAVNabaEI30t2MxYFi-7gLtselSpt3knJdPZvvmT-I,23120
223
+ letta/services/tool_manager.py,sha256=zLdqgo0pipmfyG3-8YkrEGxtrj2i7Iw81ibUV4BuaqM,7935
225
224
  letta/services/user_manager.py,sha256=oqLF9C4mGbN0TaGj7wMpb2RH2bUg6OJJcdyaWv370rQ,4272
226
- letta/settings.py,sha256=8WfAKNCG_ahCvnshMG5YvEQ9qYWs2ZFnWk9EOLOLwpA,3945
225
+ letta/settings.py,sha256=5R0VeNaoIz5KWPtsCk3LWHPCI9b70ifIiHd9p0y92Ak,4114
227
226
  letta/streaming_interface.py,sha256=lo2VAQRUJOdWTijwnXuKOC9uejqr2siUAEmZiQUXkj8,15710
228
227
  letta/streaming_utils.py,sha256=329fsvj1ZN0r0LpQtmMPZ2vSxkDBIUUwvGHZFkjm2I8,11745
229
228
  letta/system.py,sha256=buKYPqG5n2x41hVmWpu6JUpyd7vTWED9Km2_M7dLrvk,6960
230
229
  letta/utils.py,sha256=1-HhTZEw7j5fXI2ukdto0y1cZ-I8wpHKf7rqtdwFnT4,33382
231
- letta_nightly-0.6.8.dev20250110190527.dist-info/LICENSE,sha256=mExtuZ_GYJgDEI38GWdiEYZizZS4KkVt2SF1g_GPNhI,10759
232
- letta_nightly-0.6.8.dev20250110190527.dist-info/METADATA,sha256=jVpu1w9e5acpFTZtETXxGlx0ZrQGdwn7jpVJXts6DCI,21694
233
- letta_nightly-0.6.8.dev20250110190527.dist-info/WHEEL,sha256=FMvqSimYX_P7y0a7UY-_Mc83r5zkBZsCYPm7Lr0Bsq4,88
234
- letta_nightly-0.6.8.dev20250110190527.dist-info/entry_points.txt,sha256=2zdiyGNEZGV5oYBuS-y2nAAgjDgcC9yM_mHJBFSRt5U,40
235
- letta_nightly-0.6.8.dev20250110190527.dist-info/RECORD,,
230
+ letta_nightly-0.6.8.dev20250111030335.dist-info/LICENSE,sha256=mExtuZ_GYJgDEI38GWdiEYZizZS4KkVt2SF1g_GPNhI,10759
231
+ letta_nightly-0.6.8.dev20250111030335.dist-info/METADATA,sha256=OFRSl9XZNZcbMgMk-68iBg5XISR4Q5tJF7RoTF_t4jQ,21694
232
+ letta_nightly-0.6.8.dev20250111030335.dist-info/WHEEL,sha256=FMvqSimYX_P7y0a7UY-_Mc83r5zkBZsCYPm7Lr0Bsq4,88
233
+ letta_nightly-0.6.8.dev20250111030335.dist-info/entry_points.txt,sha256=2zdiyGNEZGV5oYBuS-y2nAAgjDgcC9yM_mHJBFSRt5U,40
234
+ letta_nightly-0.6.8.dev20250111030335.dist-info/RECORD,,
File without changes