letta-nightly 0.4.1.dev20241009104130__py3-none-any.whl → 0.4.1.dev20241011104054__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of letta-nightly might be problematic. Click here for more details.
- letta/agent_store/db.py +23 -7
- letta/cli/cli.py +27 -3
- letta/cli/cli_config.py +1 -1098
- letta/client/utils.py +7 -2
- letta/constants.py +21 -0
- letta/embeddings.py +3 -0
- letta/interface.py +6 -2
- letta/llm_api/google_ai.py +1 -1
- letta/llm_api/helpers.py +11 -4
- letta/llm_api/llm_api_tools.py +2 -12
- letta/llm_api/openai.py +6 -2
- letta/local_llm/constants.py +3 -0
- letta/providers.py +48 -6
- letta/server/server.py +10 -3
- letta/settings.py +1 -1
- letta/streaming_interface.py +8 -4
- {letta_nightly-0.4.1.dev20241009104130.dist-info → letta_nightly-0.4.1.dev20241011104054.dist-info}/METADATA +1 -1
- {letta_nightly-0.4.1.dev20241009104130.dist-info → letta_nightly-0.4.1.dev20241011104054.dist-info}/RECORD +21 -24
- letta/configs/anthropic.json +0 -13
- letta/configs/letta_hosted.json +0 -11
- letta/configs/openai.json +0 -12
- {letta_nightly-0.4.1.dev20241009104130.dist-info → letta_nightly-0.4.1.dev20241011104054.dist-info}/LICENSE +0 -0
- {letta_nightly-0.4.1.dev20241009104130.dist-info → letta_nightly-0.4.1.dev20241011104054.dist-info}/WHEEL +0 -0
- {letta_nightly-0.4.1.dev20241009104130.dist-info → letta_nightly-0.4.1.dev20241011104054.dist-info}/entry_points.txt +0 -0
letta/agent_store/db.py
CHANGED
|
@@ -398,8 +398,6 @@ class PostgresStorageConnector(SQLStorageConnector):
|
|
|
398
398
|
return records
|
|
399
399
|
|
|
400
400
|
def insert_many(self, records, exists_ok=True, show_progress=False):
|
|
401
|
-
pass
|
|
402
|
-
|
|
403
401
|
# TODO: this is terrible, should eventually be done the same way for all types (migrate to SQLModel)
|
|
404
402
|
if len(records) == 0:
|
|
405
403
|
return
|
|
@@ -506,18 +504,36 @@ class SQLLiteStorageConnector(SQLStorageConnector):
|
|
|
506
504
|
# sqlite3.register_converter("UUID", lambda b: uuid.UUID(bytes_le=b))
|
|
507
505
|
|
|
508
506
|
def insert_many(self, records, exists_ok=True, show_progress=False):
|
|
509
|
-
pass
|
|
510
|
-
|
|
511
507
|
# TODO: this is terrible, should eventually be done the same way for all types (migrate to SQLModel)
|
|
512
508
|
if len(records) == 0:
|
|
513
509
|
return
|
|
510
|
+
|
|
511
|
+
added_ids = [] # avoid adding duplicates
|
|
512
|
+
# NOTE: this has not great performance due to the excessive commits
|
|
514
513
|
with self.session_maker() as session:
|
|
515
514
|
iterable = tqdm(records) if show_progress else records
|
|
516
515
|
for record in iterable:
|
|
517
516
|
# db_record = self.db_model(**vars(record))
|
|
518
|
-
|
|
519
|
-
|
|
520
|
-
|
|
517
|
+
|
|
518
|
+
if record.id in added_ids:
|
|
519
|
+
continue
|
|
520
|
+
|
|
521
|
+
existing_record = session.query(self.db_model).filter_by(id=record.id).first()
|
|
522
|
+
if existing_record:
|
|
523
|
+
if exists_ok:
|
|
524
|
+
fields = record.model_dump()
|
|
525
|
+
fields.pop("id")
|
|
526
|
+
session.query(self.db_model).filter(self.db_model.id == record.id).update(fields)
|
|
527
|
+
session.commit()
|
|
528
|
+
else:
|
|
529
|
+
raise ValueError(f"Record with id {record.id} already exists.")
|
|
530
|
+
|
|
531
|
+
else:
|
|
532
|
+
db_record = self.db_model(**record.dict())
|
|
533
|
+
session.add(db_record)
|
|
534
|
+
session.commit()
|
|
535
|
+
|
|
536
|
+
added_ids.append(record.id)
|
|
521
537
|
|
|
522
538
|
def insert(self, record, exists_ok=True):
|
|
523
539
|
self.insert_many([record], exists_ok=exists_ok)
|
letta/cli/cli.py
CHANGED
|
@@ -11,9 +11,12 @@ from letta import create_client
|
|
|
11
11
|
from letta.agent import Agent, save_agent
|
|
12
12
|
from letta.config import LettaConfig
|
|
13
13
|
from letta.constants import CLI_WARNING_PREFIX, LETTA_DIR
|
|
14
|
+
from letta.local_llm.constants import ASSISTANT_MESSAGE_CLI_SYMBOL
|
|
14
15
|
from letta.log import get_logger
|
|
15
16
|
from letta.metadata import MetadataStore
|
|
17
|
+
from letta.schemas.embedding_config import EmbeddingConfig
|
|
16
18
|
from letta.schemas.enums import OptionState
|
|
19
|
+
from letta.schemas.llm_config import LLMConfig
|
|
17
20
|
from letta.schemas.memory import ChatMemory, Memory
|
|
18
21
|
from letta.server.server import logger as server_logger
|
|
19
22
|
|
|
@@ -232,25 +235,46 @@ def run(
|
|
|
232
235
|
# choose from list of llm_configs
|
|
233
236
|
llm_configs = client.list_llm_configs()
|
|
234
237
|
llm_options = [llm_config.model for llm_config in llm_configs]
|
|
238
|
+
|
|
239
|
+
# TODO move into LLMConfig as a class method?
|
|
240
|
+
def prettify_llm_config(llm_config: LLMConfig) -> str:
|
|
241
|
+
return f"{llm_config.model}" + f" ({llm_config.model_endpoint})" if llm_config.model_endpoint else ""
|
|
242
|
+
|
|
243
|
+
llm_choices = [questionary.Choice(title=prettify_llm_config(llm_config), value=llm_config) for llm_config in llm_configs]
|
|
244
|
+
|
|
235
245
|
# select model
|
|
236
246
|
if len(llm_options) == 0:
|
|
237
247
|
raise ValueError("No LLM models found. Please enable a provider.")
|
|
238
248
|
elif len(llm_options) == 1:
|
|
239
249
|
llm_model_name = llm_options[0]
|
|
240
250
|
else:
|
|
241
|
-
llm_model_name = questionary.select("Select LLM model:", choices=
|
|
251
|
+
llm_model_name = questionary.select("Select LLM model:", choices=llm_choices).ask().model
|
|
242
252
|
llm_config = [llm_config for llm_config in llm_configs if llm_config.model == llm_model_name][0]
|
|
243
253
|
|
|
244
254
|
# choose form list of embedding configs
|
|
245
255
|
embedding_configs = client.list_embedding_configs()
|
|
246
256
|
embedding_options = [embedding_config.embedding_model for embedding_config in embedding_configs]
|
|
257
|
+
|
|
258
|
+
# TODO move into EmbeddingConfig as a class method?
|
|
259
|
+
def prettify_embed_config(embedding_config: EmbeddingConfig) -> str:
|
|
260
|
+
return (
|
|
261
|
+
f"{embedding_config.embedding_model}" + f" ({embedding_config.embedding_endpoint})"
|
|
262
|
+
if embedding_config.embedding_endpoint
|
|
263
|
+
else ""
|
|
264
|
+
)
|
|
265
|
+
|
|
266
|
+
embedding_choices = [
|
|
267
|
+
questionary.Choice(title=prettify_embed_config(embedding_config), value=embedding_config)
|
|
268
|
+
for embedding_config in embedding_configs
|
|
269
|
+
]
|
|
270
|
+
|
|
247
271
|
# select model
|
|
248
272
|
if len(embedding_options) == 0:
|
|
249
273
|
raise ValueError("No embedding models found. Please enable a provider.")
|
|
250
274
|
elif len(embedding_options) == 1:
|
|
251
275
|
embedding_model_name = embedding_options[0]
|
|
252
276
|
else:
|
|
253
|
-
embedding_model_name = questionary.select("Select embedding model:", choices=
|
|
277
|
+
embedding_model_name = questionary.select("Select embedding model:", choices=embedding_choices).ask().embedding_model
|
|
254
278
|
embedding_config = [
|
|
255
279
|
embedding_config for embedding_config in embedding_configs if embedding_config.embedding_model == embedding_model_name
|
|
256
280
|
][0]
|
|
@@ -276,7 +300,7 @@ def run(
|
|
|
276
300
|
memory = ChatMemory(human=human_obj.value, persona=persona_obj.value, limit=core_memory_limit)
|
|
277
301
|
metadata = {"human": human_obj.name, "persona": persona_obj.name}
|
|
278
302
|
|
|
279
|
-
typer.secho(f"->
|
|
303
|
+
typer.secho(f"-> {ASSISTANT_MESSAGE_CLI_SYMBOL} Using persona profile: '{persona_obj.name}'", fg=typer.colors.WHITE)
|
|
280
304
|
typer.secho(f"-> 🧑 Using human profile: '{human_obj.name}'", fg=typer.colors.WHITE)
|
|
281
305
|
|
|
282
306
|
# add tools
|