letta-nightly 0.5.0.dev20241022104124__py3-none-any.whl → 0.5.0.dev20241023104105__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of letta-nightly might be problematic. Click here for more details.
- letta/__init__.py +7 -2
- letta/agent_store/db.py +4 -2
- letta/cli/cli_config.py +2 -2
- letta/client/client.py +13 -0
- letta/constants.py +4 -1
- letta/embeddings.py +34 -16
- letta/llm_api/azure_openai.py +44 -4
- letta/llm_api/openai.py +7 -1
- letta/metadata.py +1 -59
- letta/orm/__all__.py +0 -0
- letta/orm/__init__.py +0 -0
- letta/orm/base.py +75 -0
- letta/orm/enums.py +8 -0
- letta/orm/errors.py +2 -0
- letta/orm/mixins.py +40 -0
- letta/orm/organization.py +35 -0
- letta/orm/sqlalchemy_base.py +214 -0
- letta/schemas/organization.py +3 -3
- letta/server/rest_api/interface.py +47 -9
- letta/server/rest_api/routers/v1/organizations.py +4 -5
- letta/server/server.py +10 -25
- letta/services/__init__.py +0 -0
- letta/services/organization_manager.py +66 -0
- {letta_nightly-0.5.0.dev20241022104124.dist-info → letta_nightly-0.5.0.dev20241023104105.dist-info}/METADATA +2 -1
- {letta_nightly-0.5.0.dev20241022104124.dist-info → letta_nightly-0.5.0.dev20241023104105.dist-info}/RECORD +28 -20
- letta/base.py +0 -3
- letta/client/admin.py +0 -171
- {letta_nightly-0.5.0.dev20241022104124.dist-info → letta_nightly-0.5.0.dev20241023104105.dist-info}/LICENSE +0 -0
- {letta_nightly-0.5.0.dev20241022104124.dist-info → letta_nightly-0.5.0.dev20241023104105.dist-info}/WHEEL +0 -0
- {letta_nightly-0.5.0.dev20241022104124.dist-info → letta_nightly-0.5.0.dev20241023104105.dist-info}/entry_points.txt +0 -0
|
@@ -0,0 +1,214 @@
|
|
|
1
|
+
from typing import TYPE_CHECKING, List, Literal, Optional, Type, Union
|
|
2
|
+
from uuid import UUID, uuid4
|
|
3
|
+
|
|
4
|
+
from humps import depascalize
|
|
5
|
+
from sqlalchemy import Boolean, String, select
|
|
6
|
+
from sqlalchemy.orm import Mapped, mapped_column
|
|
7
|
+
|
|
8
|
+
from letta.log import get_logger
|
|
9
|
+
from letta.orm.base import Base, CommonSqlalchemyMetaMixins
|
|
10
|
+
from letta.orm.errors import NoResultFound
|
|
11
|
+
|
|
12
|
+
if TYPE_CHECKING:
|
|
13
|
+
from pydantic import BaseModel
|
|
14
|
+
from sqlalchemy.orm import Session
|
|
15
|
+
|
|
16
|
+
# from letta.orm.user import User
|
|
17
|
+
|
|
18
|
+
logger = get_logger(__name__)
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class SqlalchemyBase(CommonSqlalchemyMetaMixins, Base):
|
|
22
|
+
__abstract__ = True
|
|
23
|
+
|
|
24
|
+
__order_by_default__ = "created_at"
|
|
25
|
+
|
|
26
|
+
_id: Mapped[str] = mapped_column(String, primary_key=True, default=lambda: f"{uuid4()}")
|
|
27
|
+
|
|
28
|
+
deleted: Mapped[bool] = mapped_column(Boolean, default=False, doc="Is this record deleted? Used for universal soft deletes.")
|
|
29
|
+
|
|
30
|
+
@classmethod
|
|
31
|
+
def __prefix__(cls) -> str:
|
|
32
|
+
return depascalize(cls.__name__)
|
|
33
|
+
|
|
34
|
+
@property
|
|
35
|
+
def id(self) -> Optional[str]:
|
|
36
|
+
if self._id:
|
|
37
|
+
return f"{self.__prefix__()}-{self._id}"
|
|
38
|
+
|
|
39
|
+
@id.setter
|
|
40
|
+
def id(self, value: str) -> None:
|
|
41
|
+
if not value:
|
|
42
|
+
return
|
|
43
|
+
prefix, id_ = value.split("-", 1)
|
|
44
|
+
assert prefix == self.__prefix__(), f"{prefix} is not a valid id prefix for {self.__class__.__name__}"
|
|
45
|
+
assert SqlalchemyBase.is_valid_uuid4(id_), f"{id_} is not a valid uuid4"
|
|
46
|
+
self._id = id_
|
|
47
|
+
|
|
48
|
+
@classmethod
|
|
49
|
+
def list(
|
|
50
|
+
cls, *, db_session: "Session", cursor: Optional[str] = None, limit: Optional[int] = 50, **kwargs
|
|
51
|
+
) -> List[Type["SqlalchemyBase"]]:
|
|
52
|
+
"""List records with optional cursor (for pagination) and limit."""
|
|
53
|
+
with db_session as session:
|
|
54
|
+
# Start with the base query filtered by kwargs
|
|
55
|
+
query = select(cls).filter_by(**kwargs)
|
|
56
|
+
|
|
57
|
+
# Add a cursor condition if provided
|
|
58
|
+
if cursor:
|
|
59
|
+
cursor_uuid = cls.get_uid_from_identifier(cursor) # Assuming the cursor is an _id value
|
|
60
|
+
query = query.where(cls._id > cursor_uuid)
|
|
61
|
+
|
|
62
|
+
# Add a limit to the query if provided
|
|
63
|
+
query = query.order_by(cls._id).limit(limit)
|
|
64
|
+
|
|
65
|
+
# Handle soft deletes if the class has the 'is_deleted' attribute
|
|
66
|
+
if hasattr(cls, "is_deleted"):
|
|
67
|
+
query = query.where(cls.is_deleted == False)
|
|
68
|
+
|
|
69
|
+
# Execute the query and return the results as a list of model instances
|
|
70
|
+
return list(session.execute(query).scalars())
|
|
71
|
+
|
|
72
|
+
@classmethod
|
|
73
|
+
def get_uid_from_identifier(cls, identifier: str, indifferent: Optional[bool] = False) -> str:
|
|
74
|
+
"""converts the id into a uuid object
|
|
75
|
+
Args:
|
|
76
|
+
identifier: the string identifier, such as `organization-xxxx-xx...`
|
|
77
|
+
indifferent: if True, will not enforce the prefix check
|
|
78
|
+
"""
|
|
79
|
+
try:
|
|
80
|
+
uuid_string = identifier.split("-", 1)[1] if indifferent else identifier.replace(f"{cls.__prefix__()}-", "")
|
|
81
|
+
assert SqlalchemyBase.is_valid_uuid4(uuid_string)
|
|
82
|
+
return uuid_string
|
|
83
|
+
except ValueError as e:
|
|
84
|
+
raise ValueError(f"{identifier} is not a valid identifier for class {cls.__name__}") from e
|
|
85
|
+
|
|
86
|
+
@classmethod
|
|
87
|
+
def is_valid_uuid4(cls, uuid_string: str) -> bool:
|
|
88
|
+
try:
|
|
89
|
+
# Try to create a UUID object from the string
|
|
90
|
+
uuid_obj = UUID(uuid_string)
|
|
91
|
+
# Check if the UUID is version 4
|
|
92
|
+
return uuid_obj.version == 4
|
|
93
|
+
except ValueError:
|
|
94
|
+
# Raised if the string is not a valid UUID
|
|
95
|
+
return False
|
|
96
|
+
|
|
97
|
+
@classmethod
|
|
98
|
+
def read(
|
|
99
|
+
cls,
|
|
100
|
+
db_session: "Session",
|
|
101
|
+
identifier: Union[str, UUID],
|
|
102
|
+
actor: Optional["User"] = None,
|
|
103
|
+
access: Optional[List[Literal["read", "write", "admin"]]] = ["read"],
|
|
104
|
+
**kwargs,
|
|
105
|
+
) -> Type["SqlalchemyBase"]:
|
|
106
|
+
"""The primary accessor for an ORM record.
|
|
107
|
+
Args:
|
|
108
|
+
db_session: the database session to use when retrieving the record
|
|
109
|
+
identifier: the identifier of the record to read, can be the id string or the UUID object for backwards compatibility
|
|
110
|
+
actor: if specified, results will be scoped only to records the user is able to access
|
|
111
|
+
access: if actor is specified, records will be filtered to the minimum permission level for the actor
|
|
112
|
+
kwargs: additional arguments to pass to the read, used for more complex objects
|
|
113
|
+
Returns:
|
|
114
|
+
The matching object
|
|
115
|
+
Raises:
|
|
116
|
+
NoResultFound: if the object is not found
|
|
117
|
+
"""
|
|
118
|
+
del kwargs # arity for more complex reads
|
|
119
|
+
identifier = cls.get_uid_from_identifier(identifier)
|
|
120
|
+
query = select(cls).where(cls._id == identifier)
|
|
121
|
+
# if actor:
|
|
122
|
+
# query = cls.apply_access_predicate(query, actor, access)
|
|
123
|
+
if hasattr(cls, "is_deleted"):
|
|
124
|
+
query = query.where(cls.is_deleted == False)
|
|
125
|
+
if found := db_session.execute(query).scalar():
|
|
126
|
+
return found
|
|
127
|
+
raise NoResultFound(f"{cls.__name__} with id {identifier} not found")
|
|
128
|
+
|
|
129
|
+
def create(self, db_session: "Session") -> Type["SqlalchemyBase"]:
|
|
130
|
+
# self._infer_organization(db_session)
|
|
131
|
+
|
|
132
|
+
with db_session as session:
|
|
133
|
+
session.add(self)
|
|
134
|
+
session.commit()
|
|
135
|
+
session.refresh(self)
|
|
136
|
+
return self
|
|
137
|
+
|
|
138
|
+
def delete(self, db_session: "Session") -> Type["SqlalchemyBase"]:
|
|
139
|
+
self.is_deleted = True
|
|
140
|
+
return self.update(db_session)
|
|
141
|
+
|
|
142
|
+
def update(self, db_session: "Session") -> Type["SqlalchemyBase"]:
|
|
143
|
+
with db_session as session:
|
|
144
|
+
session.add(self)
|
|
145
|
+
session.commit()
|
|
146
|
+
session.refresh(self)
|
|
147
|
+
return self
|
|
148
|
+
|
|
149
|
+
@classmethod
|
|
150
|
+
def read_or_create(cls, *, db_session: "Session", **kwargs) -> Type["SqlalchemyBase"]:
|
|
151
|
+
"""get an instance by search criteria or create it if it doesn't exist"""
|
|
152
|
+
try:
|
|
153
|
+
return cls.read(db_session=db_session, identifier=kwargs.get("id", None))
|
|
154
|
+
except NoResultFound:
|
|
155
|
+
clean_kwargs = {k: v for k, v in kwargs.items() if k in cls.__table__.columns}
|
|
156
|
+
return cls(**clean_kwargs).create(db_session=db_session)
|
|
157
|
+
|
|
158
|
+
# TODO: Add back later when access predicates are actually important
|
|
159
|
+
# The idea behind this is that you can add a WHERE clause restricting the actions you can take, e.g. R/W
|
|
160
|
+
# @classmethod
|
|
161
|
+
# def apply_access_predicate(
|
|
162
|
+
# cls,
|
|
163
|
+
# query: "Select",
|
|
164
|
+
# actor: "User",
|
|
165
|
+
# access: List[Literal["read", "write", "admin"]],
|
|
166
|
+
# ) -> "Select":
|
|
167
|
+
# """applies a WHERE clause restricting results to the given actor and access level
|
|
168
|
+
# Args:
|
|
169
|
+
# query: The initial sqlalchemy select statement
|
|
170
|
+
# actor: The user acting on the query. **Note**: this is called 'actor' to identify the
|
|
171
|
+
# person or system acting. Users can act on users, making naming very sticky otherwise.
|
|
172
|
+
# access:
|
|
173
|
+
# what mode of access should the query restrict to? This will be used with granular permissions,
|
|
174
|
+
# but because of how it will impact every query we want to be explicitly calling access ahead of time.
|
|
175
|
+
# Returns:
|
|
176
|
+
# the sqlalchemy select statement restricted to the given access.
|
|
177
|
+
# """
|
|
178
|
+
# del access # entrypoint for row-level permissions. Defaults to "same org as the actor, all permissions" at the moment
|
|
179
|
+
# org_uid = getattr(actor, "_organization_id", getattr(actor.organization, "_id", None))
|
|
180
|
+
# if not org_uid:
|
|
181
|
+
# raise ValueError("object %s has no organization accessor", actor)
|
|
182
|
+
# return query.where(cls._organization_id == org_uid, cls.is_deleted == False)
|
|
183
|
+
|
|
184
|
+
@property
|
|
185
|
+
def __pydantic_model__(self) -> Type["BaseModel"]:
|
|
186
|
+
raise NotImplementedError("Sqlalchemy models must declare a __pydantic_model__ property to be convertable.")
|
|
187
|
+
|
|
188
|
+
def to_pydantic(self) -> Type["BaseModel"]:
|
|
189
|
+
"""converts to the basic pydantic model counterpart"""
|
|
190
|
+
return self.__pydantic_model__.model_validate(self)
|
|
191
|
+
|
|
192
|
+
def to_record(self) -> Type["BaseModel"]:
|
|
193
|
+
"""Deprecated accessor for to_pydantic"""
|
|
194
|
+
logger.warning("to_record is deprecated, use to_pydantic instead.")
|
|
195
|
+
return self.to_pydantic()
|
|
196
|
+
|
|
197
|
+
# TODO: Look into this later and maybe add back?
|
|
198
|
+
# def _infer_organization(self, db_session: "Session") -> None:
|
|
199
|
+
# """🪄 MAGIC ALERT! 🪄
|
|
200
|
+
# Because so much of the original API is centered around user scopes,
|
|
201
|
+
# this allows us to continue with that scope and then infer the org from the creating user.
|
|
202
|
+
#
|
|
203
|
+
# IF a created_by_id is set, we will use that to infer the organization and magic set it at create time!
|
|
204
|
+
# If not do nothing to the object. Mutates in place.
|
|
205
|
+
# """
|
|
206
|
+
# if self.created_by_id and hasattr(self, "_organization_id"):
|
|
207
|
+
# try:
|
|
208
|
+
# from letta.orm.user import User # to avoid circular import
|
|
209
|
+
#
|
|
210
|
+
# created_by = User.read(db_session, self.created_by_id)
|
|
211
|
+
# except NoResultFound:
|
|
212
|
+
# logger.warning(f"User {self.created_by_id} not found, unable to infer organization.")
|
|
213
|
+
# return
|
|
214
|
+
# self._organization_id = created_by._organization_id
|
letta/schemas/organization.py
CHANGED
|
@@ -7,13 +7,13 @@ from letta.schemas.letta_base import LettaBase
|
|
|
7
7
|
|
|
8
8
|
|
|
9
9
|
class OrganizationBase(LettaBase):
|
|
10
|
-
__id_prefix__ = "
|
|
10
|
+
__id_prefix__ = "organization"
|
|
11
11
|
|
|
12
12
|
|
|
13
13
|
class Organization(OrganizationBase):
|
|
14
|
-
id: str =
|
|
14
|
+
id: str = Field(..., description="The id of the organization.")
|
|
15
15
|
name: str = Field(..., description="The name of the organization.")
|
|
16
|
-
created_at: datetime = Field(default_factory=datetime.utcnow, description="The creation date of the
|
|
16
|
+
created_at: datetime = Field(default_factory=datetime.utcnow, description="The creation date of the organization.")
|
|
17
17
|
|
|
18
18
|
|
|
19
19
|
class OrganizationCreate(OrganizationBase):
|
|
@@ -312,11 +312,20 @@ class StreamingServerInterface(AgentChunkStreamingInterface):
|
|
|
312
312
|
# Two buffers used to make sure that the 'name' comes after the inner thoughts stream (if inner_thoughts_in_kwargs)
|
|
313
313
|
self.function_name_buffer = None
|
|
314
314
|
self.function_args_buffer = None
|
|
315
|
+
self.function_id_buffer = None
|
|
315
316
|
|
|
316
317
|
# extra prints
|
|
317
318
|
self.debug = False
|
|
318
319
|
self.timeout = 30
|
|
319
320
|
|
|
321
|
+
def _reset_inner_thoughts_json_reader(self):
|
|
322
|
+
# A buffer for accumulating function arguments (we want to buffer keys and run checks on each one)
|
|
323
|
+
self.function_args_reader = JSONInnerThoughtsExtractor(inner_thoughts_key=self.inner_thoughts_kwarg, wait_for_first_key=True)
|
|
324
|
+
# Two buffers used to make sure that the 'name' comes after the inner thoughts stream (if inner_thoughts_in_kwargs)
|
|
325
|
+
self.function_name_buffer = None
|
|
326
|
+
self.function_args_buffer = None
|
|
327
|
+
self.function_id_buffer = None
|
|
328
|
+
|
|
320
329
|
async def _create_generator(self) -> AsyncGenerator[Union[LettaMessage, LegacyLettaMessage, MessageStreamStatus], None]:
|
|
321
330
|
"""An asynchronous generator that yields chunks as they become available."""
|
|
322
331
|
while self._active:
|
|
@@ -376,6 +385,9 @@ class StreamingServerInterface(AgentChunkStreamingInterface):
|
|
|
376
385
|
if not self.streaming_chat_completion_mode and not self.nonstreaming_legacy_mode:
|
|
377
386
|
self._push_to_buffer(self.multi_step_gen_indicator)
|
|
378
387
|
|
|
388
|
+
# Wipe the inner thoughts buffers
|
|
389
|
+
self._reset_inner_thoughts_json_reader()
|
|
390
|
+
|
|
379
391
|
def step_complete(self):
|
|
380
392
|
"""Signal from the agent that one 'step' finished (step = LLM response + tool execution)"""
|
|
381
393
|
if not self.multi_step:
|
|
@@ -386,6 +398,9 @@ class StreamingServerInterface(AgentChunkStreamingInterface):
|
|
|
386
398
|
# signal that a new step has started in the stream
|
|
387
399
|
self._push_to_buffer(self.multi_step_indicator)
|
|
388
400
|
|
|
401
|
+
# Wipe the inner thoughts buffers
|
|
402
|
+
self._reset_inner_thoughts_json_reader()
|
|
403
|
+
|
|
389
404
|
def step_yield(self):
|
|
390
405
|
"""If multi_step, this is the true 'stream_end' function."""
|
|
391
406
|
self._active = False
|
|
@@ -498,6 +513,13 @@ class StreamingServerInterface(AgentChunkStreamingInterface):
|
|
|
498
513
|
else:
|
|
499
514
|
self.function_name_buffer += tool_call.function.name
|
|
500
515
|
|
|
516
|
+
if tool_call.id:
|
|
517
|
+
# Buffer until next time
|
|
518
|
+
if self.function_id_buffer is None:
|
|
519
|
+
self.function_id_buffer = tool_call.id
|
|
520
|
+
else:
|
|
521
|
+
self.function_id_buffer += tool_call.id
|
|
522
|
+
|
|
501
523
|
if tool_call.function.arguments:
|
|
502
524
|
updates_main_json, updates_inner_thoughts = self.function_args_reader.process_fragment(tool_call.function.arguments)
|
|
503
525
|
|
|
@@ -518,6 +540,7 @@ class StreamingServerInterface(AgentChunkStreamingInterface):
|
|
|
518
540
|
|
|
519
541
|
# If we have main_json, we should output a FunctionCallMessage
|
|
520
542
|
elif updates_main_json:
|
|
543
|
+
|
|
521
544
|
# If there's something in the function_name buffer, we should release it first
|
|
522
545
|
# NOTE: we could output it as part of a chunk that has both name and args,
|
|
523
546
|
# however the frontend may expect name first, then args, so to be
|
|
@@ -526,18 +549,23 @@ class StreamingServerInterface(AgentChunkStreamingInterface):
|
|
|
526
549
|
processed_chunk = FunctionCallMessage(
|
|
527
550
|
id=message_id,
|
|
528
551
|
date=message_date,
|
|
529
|
-
function_call=FunctionCallDelta(
|
|
552
|
+
function_call=FunctionCallDelta(
|
|
553
|
+
name=self.function_name_buffer,
|
|
554
|
+
arguments=None,
|
|
555
|
+
function_call_id=self.function_id_buffer,
|
|
556
|
+
),
|
|
530
557
|
)
|
|
531
558
|
# Clear the buffer
|
|
532
559
|
self.function_name_buffer = None
|
|
560
|
+
self.function_id_buffer = None
|
|
533
561
|
# Since we're clearing the name buffer, we should store
|
|
534
562
|
# any updates to the arguments inside a separate buffer
|
|
535
|
-
|
|
536
|
-
|
|
537
|
-
|
|
538
|
-
|
|
539
|
-
|
|
540
|
-
|
|
563
|
+
|
|
564
|
+
# Add any main_json updates to the arguments buffer
|
|
565
|
+
if self.function_args_buffer is None:
|
|
566
|
+
self.function_args_buffer = updates_main_json
|
|
567
|
+
else:
|
|
568
|
+
self.function_args_buffer += updates_main_json
|
|
541
569
|
|
|
542
570
|
# If there was nothing in the name buffer, we can proceed to
|
|
543
571
|
# output the arguments chunk as a FunctionCallMessage
|
|
@@ -550,17 +578,27 @@ class StreamingServerInterface(AgentChunkStreamingInterface):
|
|
|
550
578
|
processed_chunk = FunctionCallMessage(
|
|
551
579
|
id=message_id,
|
|
552
580
|
date=message_date,
|
|
553
|
-
function_call=FunctionCallDelta(
|
|
581
|
+
function_call=FunctionCallDelta(
|
|
582
|
+
name=None,
|
|
583
|
+
arguments=combined_chunk,
|
|
584
|
+
function_call_id=self.function_id_buffer,
|
|
585
|
+
),
|
|
554
586
|
)
|
|
555
587
|
# clear buffer
|
|
556
588
|
self.function_args_buffer = None
|
|
589
|
+
self.function_id_buffer = None
|
|
557
590
|
else:
|
|
558
591
|
# If there's no buffer to clear, just output a new chunk with new data
|
|
559
592
|
processed_chunk = FunctionCallMessage(
|
|
560
593
|
id=message_id,
|
|
561
594
|
date=message_date,
|
|
562
|
-
function_call=FunctionCallDelta(
|
|
595
|
+
function_call=FunctionCallDelta(
|
|
596
|
+
name=None,
|
|
597
|
+
arguments=updates_main_json,
|
|
598
|
+
function_call_id=self.function_id_buffer,
|
|
599
|
+
),
|
|
563
600
|
)
|
|
601
|
+
self.function_id_buffer = None
|
|
564
602
|
|
|
565
603
|
# # If there's something in the main_json buffer, we should add if to the arguments and release it together
|
|
566
604
|
# tool_call_delta = {}
|
|
@@ -22,7 +22,7 @@ def get_all_orgs(
|
|
|
22
22
|
Get a list of all orgs in the database
|
|
23
23
|
"""
|
|
24
24
|
try:
|
|
25
|
-
next_cursor, orgs = server.
|
|
25
|
+
next_cursor, orgs = server.organization_manager.list_organizations(cursor=cursor, limit=limit)
|
|
26
26
|
except HTTPException:
|
|
27
27
|
raise
|
|
28
28
|
except Exception as e:
|
|
@@ -38,8 +38,7 @@ def create_org(
|
|
|
38
38
|
"""
|
|
39
39
|
Create a new org in the database
|
|
40
40
|
"""
|
|
41
|
-
|
|
42
|
-
org = server.create_organization(request)
|
|
41
|
+
org = server.organization_manager.create_organization(request)
|
|
43
42
|
return org
|
|
44
43
|
|
|
45
44
|
|
|
@@ -50,10 +49,10 @@ def delete_org(
|
|
|
50
49
|
):
|
|
51
50
|
# TODO make a soft deletion, instead of a hard deletion
|
|
52
51
|
try:
|
|
53
|
-
org = server.
|
|
52
|
+
org = server.organization_manager.get_organization_by_id(org_id=org_id)
|
|
54
53
|
if org is None:
|
|
55
54
|
raise HTTPException(status_code=404, detail=f"Organization does not exist")
|
|
56
|
-
server.
|
|
55
|
+
server.organization_manager.delete_organization(org_id=org_id)
|
|
57
56
|
except HTTPException:
|
|
58
57
|
raise
|
|
59
58
|
except Exception as e:
|
letta/server/server.py
CHANGED
|
@@ -44,6 +44,7 @@ from letta.log import get_logger
|
|
|
44
44
|
from letta.memory import get_memory_functions
|
|
45
45
|
from letta.metadata import Base, MetadataStore
|
|
46
46
|
from letta.o1_agent import O1Agent
|
|
47
|
+
from letta.orm.errors import NoResultFound
|
|
47
48
|
from letta.prompts import gpt_system
|
|
48
49
|
from letta.providers import (
|
|
49
50
|
AnthropicProvider,
|
|
@@ -80,12 +81,12 @@ from letta.schemas.memory import (
|
|
|
80
81
|
RecallMemorySummary,
|
|
81
82
|
)
|
|
82
83
|
from letta.schemas.message import Message, MessageCreate, MessageRole, UpdateMessage
|
|
83
|
-
from letta.schemas.organization import Organization, OrganizationCreate
|
|
84
84
|
from letta.schemas.passage import Passage
|
|
85
85
|
from letta.schemas.source import Source, SourceCreate, SourceUpdate
|
|
86
86
|
from letta.schemas.tool import Tool, ToolCreate, ToolUpdate
|
|
87
87
|
from letta.schemas.usage import LettaUsageStatistics
|
|
88
88
|
from letta.schemas.user import User, UserCreate
|
|
89
|
+
from letta.services.organization_manager import OrganizationManager
|
|
89
90
|
from letta.utils import create_random_username, json_dumps, json_loads
|
|
90
91
|
|
|
91
92
|
# from letta.llm_api_tools import openai_get_model_list, azure_openai_get_model_list, smart_urljoin
|
|
@@ -245,6 +246,9 @@ class SyncServer(Server):
|
|
|
245
246
|
self.config = config
|
|
246
247
|
self.ms = MetadataStore(self.config)
|
|
247
248
|
|
|
249
|
+
# Managers that interface with data models
|
|
250
|
+
self.organization_manager = OrganizationManager()
|
|
251
|
+
|
|
248
252
|
# TODO: this should be removed
|
|
249
253
|
# add global default tools (for admin)
|
|
250
254
|
self.add_default_tools(module_name="base")
|
|
@@ -773,20 +777,6 @@ class SyncServer(Server):
|
|
|
773
777
|
|
|
774
778
|
return user
|
|
775
779
|
|
|
776
|
-
def create_organization(self, request: OrganizationCreate) -> Organization:
|
|
777
|
-
"""Create a new org using a config"""
|
|
778
|
-
if not request.name:
|
|
779
|
-
# auto-generate a name
|
|
780
|
-
request.name = create_random_username()
|
|
781
|
-
org = Organization(name=request.name)
|
|
782
|
-
self.ms.create_organization(org)
|
|
783
|
-
logger.info(f"Created new org from config: {org}")
|
|
784
|
-
|
|
785
|
-
# add default for the org
|
|
786
|
-
# TODO: add default data
|
|
787
|
-
|
|
788
|
-
return org
|
|
789
|
-
|
|
790
780
|
def create_agent(
|
|
791
781
|
self,
|
|
792
782
|
request: CreateAgent,
|
|
@@ -2125,18 +2115,13 @@ class SyncServer(Server):
|
|
|
2125
2115
|
|
|
2126
2116
|
def get_default_user(self) -> User:
|
|
2127
2117
|
|
|
2128
|
-
from letta.constants import
|
|
2129
|
-
DEFAULT_ORG_ID,
|
|
2130
|
-
DEFAULT_ORG_NAME,
|
|
2131
|
-
DEFAULT_USER_ID,
|
|
2132
|
-
DEFAULT_USER_NAME,
|
|
2133
|
-
)
|
|
2118
|
+
from letta.constants import DEFAULT_ORG_ID, DEFAULT_USER_ID, DEFAULT_USER_NAME
|
|
2134
2119
|
|
|
2135
2120
|
# check if default org exists
|
|
2136
|
-
|
|
2137
|
-
|
|
2138
|
-
|
|
2139
|
-
self.
|
|
2121
|
+
try:
|
|
2122
|
+
self.organization_manager.get_organization_by_id(DEFAULT_ORG_ID)
|
|
2123
|
+
except NoResultFound:
|
|
2124
|
+
self.organization_manager.create_default_organization()
|
|
2140
2125
|
|
|
2141
2126
|
# check if default user exists
|
|
2142
2127
|
try:
|
|
File without changes
|
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
from typing import List, Optional
|
|
2
|
+
|
|
3
|
+
from sqlalchemy.exc import NoResultFound
|
|
4
|
+
|
|
5
|
+
from letta.constants import DEFAULT_ORG_ID, DEFAULT_ORG_NAME
|
|
6
|
+
from letta.orm.organization import Organization
|
|
7
|
+
from letta.schemas.organization import Organization as PydanticOrganization
|
|
8
|
+
from letta.utils import create_random_username
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class OrganizationManager:
|
|
12
|
+
"""Manager class to handle business logic related to Organizations."""
|
|
13
|
+
|
|
14
|
+
def __init__(self):
|
|
15
|
+
# This is probably horrible but we reuse this technique from metadata.py
|
|
16
|
+
# TODO: Please refactor this out
|
|
17
|
+
# I am currently working on a ORM refactor and would like to make a more minimal set of changes
|
|
18
|
+
# - Matt
|
|
19
|
+
from letta.server.server import db_context
|
|
20
|
+
|
|
21
|
+
self.session_maker = db_context
|
|
22
|
+
|
|
23
|
+
def get_organization_by_id(self, org_id: str) -> PydanticOrganization:
|
|
24
|
+
"""Fetch an organization by ID."""
|
|
25
|
+
with self.session_maker() as session:
|
|
26
|
+
try:
|
|
27
|
+
organization = Organization.read(db_session=session, identifier=org_id)
|
|
28
|
+
return organization.to_pydantic()
|
|
29
|
+
except NoResultFound:
|
|
30
|
+
raise ValueError(f"Organization with id {org_id} not found.")
|
|
31
|
+
|
|
32
|
+
def create_organization(self, name: Optional[str] = None) -> PydanticOrganization:
|
|
33
|
+
"""Create a new organization. If a name is provided, it is used, otherwise, a random one is generated."""
|
|
34
|
+
with self.session_maker() as session:
|
|
35
|
+
org = Organization(name=name if name else create_random_username())
|
|
36
|
+
org.create(session)
|
|
37
|
+
return org.to_pydantic()
|
|
38
|
+
|
|
39
|
+
def create_default_organization(self) -> PydanticOrganization:
|
|
40
|
+
"""Create the default organization."""
|
|
41
|
+
with self.session_maker() as session:
|
|
42
|
+
org = Organization(name=DEFAULT_ORG_NAME)
|
|
43
|
+
org.id = DEFAULT_ORG_ID
|
|
44
|
+
org.create(session)
|
|
45
|
+
return org.to_pydantic()
|
|
46
|
+
|
|
47
|
+
def update_organization_name_using_id(self, org_id: str, name: Optional[str] = None) -> PydanticOrganization:
|
|
48
|
+
"""Update an organization."""
|
|
49
|
+
with self.session_maker() as session:
|
|
50
|
+
organization = Organization.read(db_session=session, identifier=org_id)
|
|
51
|
+
if name:
|
|
52
|
+
organization.name = name
|
|
53
|
+
organization.update(session)
|
|
54
|
+
return organization.to_pydantic()
|
|
55
|
+
|
|
56
|
+
def delete_organization(self, org_id: str):
|
|
57
|
+
"""Delete an organization by marking it as deleted."""
|
|
58
|
+
with self.session_maker() as session:
|
|
59
|
+
organization = Organization.read(db_session=session, identifier=org_id)
|
|
60
|
+
organization.delete(session)
|
|
61
|
+
|
|
62
|
+
def list_organizations(self, cursor: Optional[str] = None, limit: Optional[int] = 50) -> List[PydanticOrganization]:
|
|
63
|
+
"""List organizations with pagination based on cursor (org_id) and limit."""
|
|
64
|
+
with self.session_maker() as session:
|
|
65
|
+
results = Organization.list(db_session=session, cursor=cursor, limit=limit)
|
|
66
|
+
return [org.to_pydantic() for org in results]
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: letta-nightly
|
|
3
|
-
Version: 0.5.0.
|
|
3
|
+
Version: 0.5.0.dev20241023104105
|
|
4
4
|
Summary: Create LLM agents with long-term memory and custom tools
|
|
5
5
|
License: Apache License
|
|
6
6
|
Author: Letta Team
|
|
@@ -55,6 +55,7 @@ Requires-Dist: prettytable (>=3.9.0,<4.0.0)
|
|
|
55
55
|
Requires-Dist: pyautogen (==0.2.22) ; extra == "autogen"
|
|
56
56
|
Requires-Dist: pydantic (>=2.7.4,<3.0.0)
|
|
57
57
|
Requires-Dist: pydantic-settings (>=2.2.1,<3.0.0)
|
|
58
|
+
Requires-Dist: pyhumps (>=3.8.0,<4.0.0)
|
|
58
59
|
Requires-Dist: pymilvus (>=2.4.3,<3.0.0) ; extra == "milvus"
|
|
59
60
|
Requires-Dist: pyright (>=1.1.347,<2.0.0) ; extra == "dev"
|
|
60
61
|
Requires-Dist: pytest-asyncio (>=0.23.2,<0.24.0) ; extra == "dev"
|