letta-nightly 0.5.0.dev20241022104124__py3-none-any.whl → 0.5.1.dev20241023193051__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of letta-nightly might be problematic. Click here for more details.
- letta/__init__.py +8 -3
- letta/agent_store/db.py +4 -2
- letta/cli/cli_config.py +2 -2
- letta/client/client.py +13 -0
- letta/constants.py +7 -4
- letta/embeddings.py +34 -16
- letta/llm_api/azure_openai.py +44 -4
- letta/llm_api/openai.py +7 -1
- letta/metadata.py +1 -145
- letta/orm/__all__.py +0 -0
- letta/orm/__init__.py +0 -0
- letta/orm/base.py +75 -0
- letta/orm/enums.py +8 -0
- letta/orm/errors.py +6 -0
- letta/orm/mixins.py +67 -0
- letta/orm/organization.py +28 -0
- letta/orm/sqlalchemy_base.py +204 -0
- letta/orm/user.py +25 -0
- letta/schemas/organization.py +3 -3
- letta/schemas/user.py +13 -6
- letta/server/rest_api/interface.py +47 -9
- letta/server/rest_api/routers/v1/organizations.py +5 -6
- letta/server/rest_api/routers/v1/users.py +6 -7
- letta/server/server.py +51 -85
- letta/services/__init__.py +0 -0
- letta/services/organization_manager.py +76 -0
- letta/services/user_manager.py +99 -0
- letta/settings.py +5 -0
- {letta_nightly-0.5.0.dev20241022104124.dist-info → letta_nightly-0.5.1.dev20241023193051.dist-info}/METADATA +2 -1
- {letta_nightly-0.5.0.dev20241022104124.dist-info → letta_nightly-0.5.1.dev20241023193051.dist-info}/RECORD +33 -23
- letta/base.py +0 -3
- letta/client/admin.py +0 -171
- {letta_nightly-0.5.0.dev20241022104124.dist-info → letta_nightly-0.5.1.dev20241023193051.dist-info}/LICENSE +0 -0
- {letta_nightly-0.5.0.dev20241022104124.dist-info → letta_nightly-0.5.1.dev20241023193051.dist-info}/WHEEL +0 -0
- {letta_nightly-0.5.0.dev20241022104124.dist-info → letta_nightly-0.5.1.dev20241023193051.dist-info}/entry_points.txt +0 -0
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
from typing import TYPE_CHECKING, List
|
|
2
|
+
|
|
3
|
+
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
|
4
|
+
|
|
5
|
+
from letta.orm.sqlalchemy_base import SqlalchemyBase
|
|
6
|
+
from letta.schemas.organization import Organization as PydanticOrganization
|
|
7
|
+
|
|
8
|
+
if TYPE_CHECKING:
|
|
9
|
+
|
|
10
|
+
from letta.orm.user import User
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class Organization(SqlalchemyBase):
|
|
14
|
+
"""The highest level of the object tree. All Entities belong to one and only one Organization."""
|
|
15
|
+
|
|
16
|
+
__tablename__ = "organization"
|
|
17
|
+
__pydantic_model__ = PydanticOrganization
|
|
18
|
+
|
|
19
|
+
name: Mapped[str] = mapped_column(doc="The display name of the organization.")
|
|
20
|
+
|
|
21
|
+
users: Mapped[List["User"]] = relationship("User", back_populates="organization", cascade="all, delete-orphan")
|
|
22
|
+
|
|
23
|
+
# TODO: Map these relationships later when we actually make these models
|
|
24
|
+
# below is just a suggestion
|
|
25
|
+
# agents: Mapped[List["Agent"]] = relationship("Agent", back_populates="organization", cascade="all, delete-orphan")
|
|
26
|
+
# sources: Mapped[List["Source"]] = relationship("Source", back_populates="organization", cascade="all, delete-orphan")
|
|
27
|
+
# tools: Mapped[List["Tool"]] = relationship("Tool", back_populates="organization", cascade="all, delete-orphan")
|
|
28
|
+
# documents: Mapped[List["Document"]] = relationship("Document", back_populates="organization", cascade="all, delete-orphan")
|
|
@@ -0,0 +1,204 @@
|
|
|
1
|
+
from typing import TYPE_CHECKING, List, Literal, Optional, Type, Union
|
|
2
|
+
from uuid import UUID, uuid4
|
|
3
|
+
|
|
4
|
+
from humps import depascalize
|
|
5
|
+
from sqlalchemy import Boolean, String, select
|
|
6
|
+
from sqlalchemy.orm import Mapped, mapped_column
|
|
7
|
+
|
|
8
|
+
from letta.log import get_logger
|
|
9
|
+
from letta.orm.base import Base, CommonSqlalchemyMetaMixins
|
|
10
|
+
from letta.orm.errors import NoResultFound
|
|
11
|
+
from letta.orm.mixins import is_valid_uuid4
|
|
12
|
+
|
|
13
|
+
if TYPE_CHECKING:
|
|
14
|
+
from pydantic import BaseModel
|
|
15
|
+
from sqlalchemy.orm import Session
|
|
16
|
+
|
|
17
|
+
# from letta.orm.user import User
|
|
18
|
+
|
|
19
|
+
logger = get_logger(__name__)
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class SqlalchemyBase(CommonSqlalchemyMetaMixins, Base):
|
|
23
|
+
__abstract__ = True
|
|
24
|
+
|
|
25
|
+
__order_by_default__ = "created_at"
|
|
26
|
+
|
|
27
|
+
_id: Mapped[str] = mapped_column(String, primary_key=True, default=lambda: f"{uuid4()}")
|
|
28
|
+
|
|
29
|
+
deleted: Mapped[bool] = mapped_column(Boolean, default=False, doc="Is this record deleted? Used for universal soft deletes.")
|
|
30
|
+
|
|
31
|
+
@classmethod
|
|
32
|
+
def __prefix__(cls) -> str:
|
|
33
|
+
return depascalize(cls.__name__)
|
|
34
|
+
|
|
35
|
+
@property
|
|
36
|
+
def id(self) -> Optional[str]:
|
|
37
|
+
if self._id:
|
|
38
|
+
return f"{self.__prefix__()}-{self._id}"
|
|
39
|
+
|
|
40
|
+
@id.setter
|
|
41
|
+
def id(self, value: str) -> None:
|
|
42
|
+
if not value:
|
|
43
|
+
return
|
|
44
|
+
prefix, id_ = value.split("-", 1)
|
|
45
|
+
assert prefix == self.__prefix__(), f"{prefix} is not a valid id prefix for {self.__class__.__name__}"
|
|
46
|
+
assert is_valid_uuid4(id_), f"{id_} is not a valid uuid4"
|
|
47
|
+
self._id = id_
|
|
48
|
+
|
|
49
|
+
@classmethod
|
|
50
|
+
def list(
|
|
51
|
+
cls, *, db_session: "Session", cursor: Optional[str] = None, limit: Optional[int] = 50, **kwargs
|
|
52
|
+
) -> List[Type["SqlalchemyBase"]]:
|
|
53
|
+
"""List records with optional cursor (for pagination) and limit."""
|
|
54
|
+
with db_session as session:
|
|
55
|
+
# Start with the base query filtered by kwargs
|
|
56
|
+
query = select(cls).filter_by(**kwargs)
|
|
57
|
+
|
|
58
|
+
# Add a cursor condition if provided
|
|
59
|
+
if cursor:
|
|
60
|
+
cursor_uuid = cls.get_uid_from_identifier(cursor) # Assuming the cursor is an _id value
|
|
61
|
+
query = query.where(cls._id > cursor_uuid)
|
|
62
|
+
|
|
63
|
+
# Add a limit to the query if provided
|
|
64
|
+
query = query.order_by(cls._id).limit(limit)
|
|
65
|
+
|
|
66
|
+
# Handle soft deletes if the class has the 'is_deleted' attribute
|
|
67
|
+
if hasattr(cls, "is_deleted"):
|
|
68
|
+
query = query.where(cls.is_deleted == False)
|
|
69
|
+
|
|
70
|
+
# Execute the query and return the results as a list of model instances
|
|
71
|
+
return list(session.execute(query).scalars())
|
|
72
|
+
|
|
73
|
+
@classmethod
|
|
74
|
+
def get_uid_from_identifier(cls, identifier: str, indifferent: Optional[bool] = False) -> str:
|
|
75
|
+
"""converts the id into a uuid object
|
|
76
|
+
Args:
|
|
77
|
+
identifier: the string identifier, such as `organization-xxxx-xx...`
|
|
78
|
+
indifferent: if True, will not enforce the prefix check
|
|
79
|
+
"""
|
|
80
|
+
try:
|
|
81
|
+
uuid_string = identifier.split("-", 1)[1] if indifferent else identifier.replace(f"{cls.__prefix__()}-", "")
|
|
82
|
+
assert is_valid_uuid4(uuid_string)
|
|
83
|
+
return uuid_string
|
|
84
|
+
except ValueError as e:
|
|
85
|
+
raise ValueError(f"{identifier} is not a valid identifier for class {cls.__name__}") from e
|
|
86
|
+
|
|
87
|
+
@classmethod
|
|
88
|
+
def read(
|
|
89
|
+
cls,
|
|
90
|
+
db_session: "Session",
|
|
91
|
+
identifier: Union[str, UUID],
|
|
92
|
+
actor: Optional["User"] = None,
|
|
93
|
+
access: Optional[List[Literal["read", "write", "admin"]]] = ["read"],
|
|
94
|
+
**kwargs,
|
|
95
|
+
) -> Type["SqlalchemyBase"]:
|
|
96
|
+
"""The primary accessor for an ORM record.
|
|
97
|
+
Args:
|
|
98
|
+
db_session: the database session to use when retrieving the record
|
|
99
|
+
identifier: the identifier of the record to read, can be the id string or the UUID object for backwards compatibility
|
|
100
|
+
actor: if specified, results will be scoped only to records the user is able to access
|
|
101
|
+
access: if actor is specified, records will be filtered to the minimum permission level for the actor
|
|
102
|
+
kwargs: additional arguments to pass to the read, used for more complex objects
|
|
103
|
+
Returns:
|
|
104
|
+
The matching object
|
|
105
|
+
Raises:
|
|
106
|
+
NoResultFound: if the object is not found
|
|
107
|
+
"""
|
|
108
|
+
del kwargs # arity for more complex reads
|
|
109
|
+
identifier = cls.get_uid_from_identifier(identifier)
|
|
110
|
+
query = select(cls).where(cls._id == identifier)
|
|
111
|
+
# if actor:
|
|
112
|
+
# query = cls.apply_access_predicate(query, actor, access)
|
|
113
|
+
if hasattr(cls, "is_deleted"):
|
|
114
|
+
query = query.where(cls.is_deleted == False)
|
|
115
|
+
if found := db_session.execute(query).scalar():
|
|
116
|
+
return found
|
|
117
|
+
raise NoResultFound(f"{cls.__name__} with id {identifier} not found")
|
|
118
|
+
|
|
119
|
+
def create(self, db_session: "Session") -> Type["SqlalchemyBase"]:
|
|
120
|
+
# self._infer_organization(db_session)
|
|
121
|
+
|
|
122
|
+
with db_session as session:
|
|
123
|
+
session.add(self)
|
|
124
|
+
session.commit()
|
|
125
|
+
session.refresh(self)
|
|
126
|
+
return self
|
|
127
|
+
|
|
128
|
+
def delete(self, db_session: "Session") -> Type["SqlalchemyBase"]:
|
|
129
|
+
self.is_deleted = True
|
|
130
|
+
return self.update(db_session)
|
|
131
|
+
|
|
132
|
+
def update(self, db_session: "Session") -> Type["SqlalchemyBase"]:
|
|
133
|
+
with db_session as session:
|
|
134
|
+
session.add(self)
|
|
135
|
+
session.commit()
|
|
136
|
+
session.refresh(self)
|
|
137
|
+
return self
|
|
138
|
+
|
|
139
|
+
@classmethod
|
|
140
|
+
def read_or_create(cls, *, db_session: "Session", **kwargs) -> Type["SqlalchemyBase"]:
|
|
141
|
+
"""get an instance by search criteria or create it if it doesn't exist"""
|
|
142
|
+
try:
|
|
143
|
+
return cls.read(db_session=db_session, identifier=kwargs.get("id", None))
|
|
144
|
+
except NoResultFound:
|
|
145
|
+
clean_kwargs = {k: v for k, v in kwargs.items() if k in cls.__table__.columns}
|
|
146
|
+
return cls(**clean_kwargs).create(db_session=db_session)
|
|
147
|
+
|
|
148
|
+
# TODO: Add back later when access predicates are actually important
|
|
149
|
+
# The idea behind this is that you can add a WHERE clause restricting the actions you can take, e.g. R/W
|
|
150
|
+
# @classmethod
|
|
151
|
+
# def apply_access_predicate(
|
|
152
|
+
# cls,
|
|
153
|
+
# query: "Select",
|
|
154
|
+
# actor: "User",
|
|
155
|
+
# access: List[Literal["read", "write", "admin"]],
|
|
156
|
+
# ) -> "Select":
|
|
157
|
+
# """applies a WHERE clause restricting results to the given actor and access level
|
|
158
|
+
# Args:
|
|
159
|
+
# query: The initial sqlalchemy select statement
|
|
160
|
+
# actor: The user acting on the query. **Note**: this is called 'actor' to identify the
|
|
161
|
+
# person or system acting. Users can act on users, making naming very sticky otherwise.
|
|
162
|
+
# access:
|
|
163
|
+
# what mode of access should the query restrict to? This will be used with granular permissions,
|
|
164
|
+
# but because of how it will impact every query we want to be explicitly calling access ahead of time.
|
|
165
|
+
# Returns:
|
|
166
|
+
# the sqlalchemy select statement restricted to the given access.
|
|
167
|
+
# """
|
|
168
|
+
# del access # entrypoint for row-level permissions. Defaults to "same org as the actor, all permissions" at the moment
|
|
169
|
+
# org_uid = getattr(actor, "_organization_id", getattr(actor.organization, "_id", None))
|
|
170
|
+
# if not org_uid:
|
|
171
|
+
# raise ValueError("object %s has no organization accessor", actor)
|
|
172
|
+
# return query.where(cls._organization_id == org_uid, cls.is_deleted == False)
|
|
173
|
+
|
|
174
|
+
@property
|
|
175
|
+
def __pydantic_model__(self) -> Type["BaseModel"]:
|
|
176
|
+
raise NotImplementedError("Sqlalchemy models must declare a __pydantic_model__ property to be convertable.")
|
|
177
|
+
|
|
178
|
+
def to_pydantic(self) -> Type["BaseModel"]:
|
|
179
|
+
"""converts to the basic pydantic model counterpart"""
|
|
180
|
+
return self.__pydantic_model__.model_validate(self)
|
|
181
|
+
|
|
182
|
+
def to_record(self) -> Type["BaseModel"]:
|
|
183
|
+
"""Deprecated accessor for to_pydantic"""
|
|
184
|
+
logger.warning("to_record is deprecated, use to_pydantic instead.")
|
|
185
|
+
return self.to_pydantic()
|
|
186
|
+
|
|
187
|
+
# TODO: Look into this later and maybe add back?
|
|
188
|
+
# def _infer_organization(self, db_session: "Session") -> None:
|
|
189
|
+
# """🪄 MAGIC ALERT! 🪄
|
|
190
|
+
# Because so much of the original API is centered around user scopes,
|
|
191
|
+
# this allows us to continue with that scope and then infer the org from the creating user.
|
|
192
|
+
#
|
|
193
|
+
# IF a created_by_id is set, we will use that to infer the organization and magic set it at create time!
|
|
194
|
+
# If not do nothing to the object. Mutates in place.
|
|
195
|
+
# """
|
|
196
|
+
# if self.created_by_id and hasattr(self, "_organization_id"):
|
|
197
|
+
# try:
|
|
198
|
+
# from letta.orm.user import User # to avoid circular import
|
|
199
|
+
#
|
|
200
|
+
# created_by = User.read(db_session, self.created_by_id)
|
|
201
|
+
# except NoResultFound:
|
|
202
|
+
# logger.warning(f"User {self.created_by_id} not found, unable to infer organization.")
|
|
203
|
+
# return
|
|
204
|
+
# self._organization_id = created_by._organization_id
|
letta/orm/user.py
ADDED
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
|
2
|
+
|
|
3
|
+
from letta.orm.mixins import OrganizationMixin
|
|
4
|
+
from letta.orm.organization import Organization
|
|
5
|
+
from letta.orm.sqlalchemy_base import SqlalchemyBase
|
|
6
|
+
from letta.schemas.user import User as PydanticUser
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class User(SqlalchemyBase, OrganizationMixin):
|
|
10
|
+
"""User ORM class"""
|
|
11
|
+
|
|
12
|
+
__tablename__ = "user"
|
|
13
|
+
__pydantic_model__ = PydanticUser
|
|
14
|
+
|
|
15
|
+
name: Mapped[str] = mapped_column(nullable=False, doc="The display name of the user.")
|
|
16
|
+
|
|
17
|
+
# relationships
|
|
18
|
+
organization: Mapped["Organization"] = relationship("Organization", back_populates="users")
|
|
19
|
+
|
|
20
|
+
# TODO: Add this back later potentially
|
|
21
|
+
# agents: Mapped[List["Agent"]] = relationship(
|
|
22
|
+
# "Agent", secondary="users_agents", back_populates="users", doc="the agents associated with this user."
|
|
23
|
+
# )
|
|
24
|
+
# tokens: Mapped[List["Token"]] = relationship("Token", back_populates="user", doc="the tokens associated with this user.")
|
|
25
|
+
# jobs: Mapped[List["Job"]] = relationship("Job", back_populates="user", doc="the jobs associated with this user.")
|
letta/schemas/organization.py
CHANGED
|
@@ -7,13 +7,13 @@ from letta.schemas.letta_base import LettaBase
|
|
|
7
7
|
|
|
8
8
|
|
|
9
9
|
class OrganizationBase(LettaBase):
|
|
10
|
-
__id_prefix__ = "
|
|
10
|
+
__id_prefix__ = "organization"
|
|
11
11
|
|
|
12
12
|
|
|
13
13
|
class Organization(OrganizationBase):
|
|
14
|
-
id: str =
|
|
14
|
+
id: str = Field(..., description="The id of the organization.")
|
|
15
15
|
name: str = Field(..., description="The name of the organization.")
|
|
16
|
-
created_at: datetime = Field(default_factory=datetime.utcnow, description="The creation date of the
|
|
16
|
+
created_at: datetime = Field(default_factory=datetime.utcnow, description="The creation date of the organization.")
|
|
17
17
|
|
|
18
18
|
|
|
19
19
|
class OrganizationCreate(OrganizationBase):
|
letta/schemas/user.py
CHANGED
|
@@ -3,6 +3,7 @@ from typing import Optional
|
|
|
3
3
|
|
|
4
4
|
from pydantic import Field
|
|
5
5
|
|
|
6
|
+
from letta.constants import DEFAULT_ORG_ID
|
|
6
7
|
from letta.schemas.letta_base import LettaBase
|
|
7
8
|
|
|
8
9
|
|
|
@@ -20,14 +21,20 @@ class User(UserBase):
|
|
|
20
21
|
created_at (datetime): The creation date of the user.
|
|
21
22
|
"""
|
|
22
23
|
|
|
23
|
-
id: str =
|
|
24
|
-
|
|
25
|
-
..., description="The organization id of the user"
|
|
26
|
-
) # TODO: dont make optional, and pass in default org ID
|
|
24
|
+
id: str = Field(..., description="The id of the user.")
|
|
25
|
+
organization_id: Optional[str] = Field(DEFAULT_ORG_ID, description="The organization id of the user")
|
|
27
26
|
name: str = Field(..., description="The name of the user.")
|
|
28
27
|
created_at: datetime = Field(default_factory=datetime.utcnow, description="The creation date of the user.")
|
|
28
|
+
updated_at: datetime = Field(default_factory=datetime.utcnow, description="The update date of the user.")
|
|
29
|
+
is_deleted: bool = Field(False, description="Whether this user is deleted or not.")
|
|
29
30
|
|
|
30
31
|
|
|
31
32
|
class UserCreate(UserBase):
|
|
32
|
-
name:
|
|
33
|
-
|
|
33
|
+
name: str = Field(..., description="The name of the user.")
|
|
34
|
+
organization_id: str = Field(..., description="The organization id of the user.")
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
class UserUpdate(UserBase):
|
|
38
|
+
id: str = Field(..., description="The id of the user to update.")
|
|
39
|
+
name: Optional[str] = Field(None, description="The new name of the user.")
|
|
40
|
+
organization_id: Optional[str] = Field(None, description="The new organization id of the user.")
|
|
@@ -312,11 +312,20 @@ class StreamingServerInterface(AgentChunkStreamingInterface):
|
|
|
312
312
|
# Two buffers used to make sure that the 'name' comes after the inner thoughts stream (if inner_thoughts_in_kwargs)
|
|
313
313
|
self.function_name_buffer = None
|
|
314
314
|
self.function_args_buffer = None
|
|
315
|
+
self.function_id_buffer = None
|
|
315
316
|
|
|
316
317
|
# extra prints
|
|
317
318
|
self.debug = False
|
|
318
319
|
self.timeout = 30
|
|
319
320
|
|
|
321
|
+
def _reset_inner_thoughts_json_reader(self):
|
|
322
|
+
# A buffer for accumulating function arguments (we want to buffer keys and run checks on each one)
|
|
323
|
+
self.function_args_reader = JSONInnerThoughtsExtractor(inner_thoughts_key=self.inner_thoughts_kwarg, wait_for_first_key=True)
|
|
324
|
+
# Two buffers used to make sure that the 'name' comes after the inner thoughts stream (if inner_thoughts_in_kwargs)
|
|
325
|
+
self.function_name_buffer = None
|
|
326
|
+
self.function_args_buffer = None
|
|
327
|
+
self.function_id_buffer = None
|
|
328
|
+
|
|
320
329
|
async def _create_generator(self) -> AsyncGenerator[Union[LettaMessage, LegacyLettaMessage, MessageStreamStatus], None]:
|
|
321
330
|
"""An asynchronous generator that yields chunks as they become available."""
|
|
322
331
|
while self._active:
|
|
@@ -376,6 +385,9 @@ class StreamingServerInterface(AgentChunkStreamingInterface):
|
|
|
376
385
|
if not self.streaming_chat_completion_mode and not self.nonstreaming_legacy_mode:
|
|
377
386
|
self._push_to_buffer(self.multi_step_gen_indicator)
|
|
378
387
|
|
|
388
|
+
# Wipe the inner thoughts buffers
|
|
389
|
+
self._reset_inner_thoughts_json_reader()
|
|
390
|
+
|
|
379
391
|
def step_complete(self):
|
|
380
392
|
"""Signal from the agent that one 'step' finished (step = LLM response + tool execution)"""
|
|
381
393
|
if not self.multi_step:
|
|
@@ -386,6 +398,9 @@ class StreamingServerInterface(AgentChunkStreamingInterface):
|
|
|
386
398
|
# signal that a new step has started in the stream
|
|
387
399
|
self._push_to_buffer(self.multi_step_indicator)
|
|
388
400
|
|
|
401
|
+
# Wipe the inner thoughts buffers
|
|
402
|
+
self._reset_inner_thoughts_json_reader()
|
|
403
|
+
|
|
389
404
|
def step_yield(self):
|
|
390
405
|
"""If multi_step, this is the true 'stream_end' function."""
|
|
391
406
|
self._active = False
|
|
@@ -498,6 +513,13 @@ class StreamingServerInterface(AgentChunkStreamingInterface):
|
|
|
498
513
|
else:
|
|
499
514
|
self.function_name_buffer += tool_call.function.name
|
|
500
515
|
|
|
516
|
+
if tool_call.id:
|
|
517
|
+
# Buffer until next time
|
|
518
|
+
if self.function_id_buffer is None:
|
|
519
|
+
self.function_id_buffer = tool_call.id
|
|
520
|
+
else:
|
|
521
|
+
self.function_id_buffer += tool_call.id
|
|
522
|
+
|
|
501
523
|
if tool_call.function.arguments:
|
|
502
524
|
updates_main_json, updates_inner_thoughts = self.function_args_reader.process_fragment(tool_call.function.arguments)
|
|
503
525
|
|
|
@@ -518,6 +540,7 @@ class StreamingServerInterface(AgentChunkStreamingInterface):
|
|
|
518
540
|
|
|
519
541
|
# If we have main_json, we should output a FunctionCallMessage
|
|
520
542
|
elif updates_main_json:
|
|
543
|
+
|
|
521
544
|
# If there's something in the function_name buffer, we should release it first
|
|
522
545
|
# NOTE: we could output it as part of a chunk that has both name and args,
|
|
523
546
|
# however the frontend may expect name first, then args, so to be
|
|
@@ -526,18 +549,23 @@ class StreamingServerInterface(AgentChunkStreamingInterface):
|
|
|
526
549
|
processed_chunk = FunctionCallMessage(
|
|
527
550
|
id=message_id,
|
|
528
551
|
date=message_date,
|
|
529
|
-
function_call=FunctionCallDelta(
|
|
552
|
+
function_call=FunctionCallDelta(
|
|
553
|
+
name=self.function_name_buffer,
|
|
554
|
+
arguments=None,
|
|
555
|
+
function_call_id=self.function_id_buffer,
|
|
556
|
+
),
|
|
530
557
|
)
|
|
531
558
|
# Clear the buffer
|
|
532
559
|
self.function_name_buffer = None
|
|
560
|
+
self.function_id_buffer = None
|
|
533
561
|
# Since we're clearing the name buffer, we should store
|
|
534
562
|
# any updates to the arguments inside a separate buffer
|
|
535
|
-
|
|
536
|
-
|
|
537
|
-
|
|
538
|
-
|
|
539
|
-
|
|
540
|
-
|
|
563
|
+
|
|
564
|
+
# Add any main_json updates to the arguments buffer
|
|
565
|
+
if self.function_args_buffer is None:
|
|
566
|
+
self.function_args_buffer = updates_main_json
|
|
567
|
+
else:
|
|
568
|
+
self.function_args_buffer += updates_main_json
|
|
541
569
|
|
|
542
570
|
# If there was nothing in the name buffer, we can proceed to
|
|
543
571
|
# output the arguments chunk as a FunctionCallMessage
|
|
@@ -550,17 +578,27 @@ class StreamingServerInterface(AgentChunkStreamingInterface):
|
|
|
550
578
|
processed_chunk = FunctionCallMessage(
|
|
551
579
|
id=message_id,
|
|
552
580
|
date=message_date,
|
|
553
|
-
function_call=FunctionCallDelta(
|
|
581
|
+
function_call=FunctionCallDelta(
|
|
582
|
+
name=None,
|
|
583
|
+
arguments=combined_chunk,
|
|
584
|
+
function_call_id=self.function_id_buffer,
|
|
585
|
+
),
|
|
554
586
|
)
|
|
555
587
|
# clear buffer
|
|
556
588
|
self.function_args_buffer = None
|
|
589
|
+
self.function_id_buffer = None
|
|
557
590
|
else:
|
|
558
591
|
# If there's no buffer to clear, just output a new chunk with new data
|
|
559
592
|
processed_chunk = FunctionCallMessage(
|
|
560
593
|
id=message_id,
|
|
561
594
|
date=message_date,
|
|
562
|
-
function_call=FunctionCallDelta(
|
|
595
|
+
function_call=FunctionCallDelta(
|
|
596
|
+
name=None,
|
|
597
|
+
arguments=updates_main_json,
|
|
598
|
+
function_call_id=self.function_id_buffer,
|
|
599
|
+
),
|
|
563
600
|
)
|
|
601
|
+
self.function_id_buffer = None
|
|
564
602
|
|
|
565
603
|
# # If there's something in the main_json buffer, we should add if to the arguments and release it together
|
|
566
604
|
# tool_call_delta = {}
|
|
@@ -22,7 +22,7 @@ def get_all_orgs(
|
|
|
22
22
|
Get a list of all orgs in the database
|
|
23
23
|
"""
|
|
24
24
|
try:
|
|
25
|
-
next_cursor, orgs = server.
|
|
25
|
+
next_cursor, orgs = server.organization_manager.list_organizations(cursor=cursor, limit=limit)
|
|
26
26
|
except HTTPException:
|
|
27
27
|
raise
|
|
28
28
|
except Exception as e:
|
|
@@ -38,22 +38,21 @@ def create_org(
|
|
|
38
38
|
"""
|
|
39
39
|
Create a new org in the database
|
|
40
40
|
"""
|
|
41
|
-
|
|
42
|
-
org = server.create_organization(request)
|
|
41
|
+
org = server.organization_manager.create_organization(request)
|
|
43
42
|
return org
|
|
44
43
|
|
|
45
44
|
|
|
46
|
-
@router.delete("/", tags=["admin"], response_model=Organization, operation_id="
|
|
45
|
+
@router.delete("/", tags=["admin"], response_model=Organization, operation_id="delete_organization_by_id")
|
|
47
46
|
def delete_org(
|
|
48
47
|
org_id: str = Query(..., description="The org_id key to be deleted."),
|
|
49
48
|
server: "SyncServer" = Depends(get_letta_server),
|
|
50
49
|
):
|
|
51
50
|
# TODO make a soft deletion, instead of a hard deletion
|
|
52
51
|
try:
|
|
53
|
-
org = server.
|
|
52
|
+
org = server.organization_manager.get_organization_by_id(org_id=org_id)
|
|
54
53
|
if org is None:
|
|
55
54
|
raise HTTPException(status_code=404, detail=f"Organization does not exist")
|
|
56
|
-
server.
|
|
55
|
+
server.organization_manager.delete_organization_by_id(org_id=org_id)
|
|
57
56
|
except HTTPException:
|
|
58
57
|
raise
|
|
59
58
|
except Exception as e:
|
|
@@ -26,7 +26,7 @@ router = APIRouter(prefix="/users", tags=["users", "admin"])
|
|
|
26
26
|
|
|
27
27
|
|
|
28
28
|
@router.get("/", tags=["admin"], response_model=List[User], operation_id="list_users")
|
|
29
|
-
def
|
|
29
|
+
def list_users(
|
|
30
30
|
cursor: Optional[str] = Query(None),
|
|
31
31
|
limit: Optional[int] = Query(50),
|
|
32
32
|
server: "SyncServer" = Depends(get_letta_server),
|
|
@@ -35,8 +35,7 @@ def get_all_users(
|
|
|
35
35
|
Get a list of all users in the database
|
|
36
36
|
"""
|
|
37
37
|
try:
|
|
38
|
-
next_cursor, users = server.
|
|
39
|
-
# processed_users = [{"user_id": user.id} for user in users]
|
|
38
|
+
next_cursor, users = server.user_manager.list_users(cursor=cursor, limit=limit)
|
|
40
39
|
except HTTPException:
|
|
41
40
|
raise
|
|
42
41
|
except Exception as e:
|
|
@@ -53,7 +52,7 @@ def create_user(
|
|
|
53
52
|
Create a new user in the database
|
|
54
53
|
"""
|
|
55
54
|
|
|
56
|
-
user = server.create_user(request)
|
|
55
|
+
user = server.user_manager.create_user(request)
|
|
57
56
|
return user
|
|
58
57
|
|
|
59
58
|
|
|
@@ -64,10 +63,10 @@ def delete_user(
|
|
|
64
63
|
):
|
|
65
64
|
# TODO make a soft deletion, instead of a hard deletion
|
|
66
65
|
try:
|
|
67
|
-
user = server.
|
|
66
|
+
user = server.user_manager.get_user_by_id(user_id=user_id)
|
|
68
67
|
if user is None:
|
|
69
68
|
raise HTTPException(status_code=404, detail=f"User does not exist")
|
|
70
|
-
server.
|
|
69
|
+
server.user_manager.delete_user_by_id(user_id=user_id)
|
|
71
70
|
except HTTPException:
|
|
72
71
|
raise
|
|
73
72
|
except Exception as e:
|
|
@@ -95,7 +94,7 @@ def get_api_keys(
|
|
|
95
94
|
"""
|
|
96
95
|
Get a list of all API keys for a user
|
|
97
96
|
"""
|
|
98
|
-
if server.
|
|
97
|
+
if server.user_manager.get_user_by_id(user_id=user_id) is None:
|
|
99
98
|
raise HTTPException(status_code=404, detail=f"User does not exist")
|
|
100
99
|
api_keys = server.ms.get_all_api_keys_for_user(user_id=user_id)
|
|
101
100
|
return api_keys
|