letta-nightly 0.4.1.dev20241009104130__tar.gz → 0.4.1.dev20241011104054__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of letta-nightly might be problematic. Click here for more details.
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/PKG-INFO +1 -1
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/agent_store/db.py +23 -7
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/cli/cli.py +27 -3
- letta_nightly-0.4.1.dev20241011104054/letta/cli/cli_config.py +228 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/client/utils.py +7 -2
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/constants.py +21 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/embeddings.py +3 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/interface.py +6 -2
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/llm_api/google_ai.py +1 -1
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/llm_api/helpers.py +11 -4
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/llm_api/llm_api_tools.py +2 -12
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/llm_api/openai.py +6 -2
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/local_llm/constants.py +3 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/providers.py +48 -6
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/server/server.py +10 -3
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/settings.py +1 -1
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/streaming_interface.py +8 -4
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/pyproject.toml +1 -1
- letta_nightly-0.4.1.dev20241009104130/letta/cli/cli_config.py +0 -1325
- letta_nightly-0.4.1.dev20241009104130/letta/configs/anthropic.json +0 -13
- letta_nightly-0.4.1.dev20241009104130/letta/configs/letta_hosted.json +0 -11
- letta_nightly-0.4.1.dev20241009104130/letta/configs/openai.json +0 -12
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/LICENSE +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/README.md +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/__init__.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/__main__.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/agent.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/agent_store/chroma.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/agent_store/lancedb.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/agent_store/milvus.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/agent_store/qdrant.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/agent_store/storage.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/benchmark/benchmark.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/benchmark/constants.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/cli/cli_load.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/client/__init__.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/client/admin.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/client/client.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/client/streaming.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/config.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/credentials.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/data_sources/connectors.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/errors.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/functions/__init__.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/functions/function_sets/base.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/functions/function_sets/extras.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/functions/functions.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/functions/helpers.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/functions/schema_generator.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/humans/__init__.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/humans/examples/basic.txt +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/humans/examples/cs_phd.txt +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/llm_api/__init__.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/llm_api/anthropic.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/llm_api/azure_openai.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/llm_api/azure_openai_constants.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/llm_api/cohere.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/local_llm/README.md +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/local_llm/__init__.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/local_llm/chat_completion_proxy.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/local_llm/function_parser.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/local_llm/grammars/__init__.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/local_llm/grammars/gbnf_grammar_generator.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/local_llm/grammars/json.gbnf +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/local_llm/grammars/json_func_calls_with_inner_thoughts.gbnf +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/local_llm/json_parser.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/local_llm/koboldcpp/api.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/local_llm/koboldcpp/settings.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/local_llm/llamacpp/api.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/local_llm/llamacpp/settings.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/local_llm/llm_chat_completion_wrappers/__init__.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/local_llm/llm_chat_completion_wrappers/airoboros.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/local_llm/llm_chat_completion_wrappers/chatml.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/local_llm/llm_chat_completion_wrappers/configurable_wrapper.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/local_llm/llm_chat_completion_wrappers/dolphin.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/local_llm/llm_chat_completion_wrappers/llama3.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/local_llm/llm_chat_completion_wrappers/simple_summary_wrapper.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/local_llm/llm_chat_completion_wrappers/wrapper_base.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/local_llm/llm_chat_completion_wrappers/zephyr.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/local_llm/lmstudio/api.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/local_llm/lmstudio/settings.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/local_llm/ollama/api.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/local_llm/ollama/settings.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/local_llm/settings/__init__.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/local_llm/settings/deterministic_mirostat.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/local_llm/settings/settings.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/local_llm/settings/simple.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/local_llm/utils.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/local_llm/vllm/api.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/local_llm/webui/api.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/local_llm/webui/legacy_api.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/local_llm/webui/legacy_settings.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/local_llm/webui/settings.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/log.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/main.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/memory.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/metadata.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/openai_backcompat/__init__.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/openai_backcompat/openai_object.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/persistence_manager.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/personas/__init__.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/personas/examples/anna_pa.txt +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/personas/examples/google_search_persona.txt +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/personas/examples/memgpt_doc.txt +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/personas/examples/memgpt_starter.txt +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/personas/examples/sam.txt +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/personas/examples/sam_pov.txt +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/personas/examples/sam_simple_pov_gpt35.txt +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/personas/examples/sqldb/test.db +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/prompts/__init__.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/prompts/gpt_summarize.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/prompts/gpt_system.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/prompts/system/memgpt_base.txt +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/prompts/system/memgpt_chat.txt +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/prompts/system/memgpt_chat_compressed.txt +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/prompts/system/memgpt_chat_fstring.txt +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/prompts/system/memgpt_doc.txt +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/prompts/system/memgpt_gpt35_extralong.txt +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/prompts/system/memgpt_intuitive_knowledge.txt +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/prompts/system/memgpt_modified_chat.txt +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/pytest.ini +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/schemas/agent.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/schemas/api_key.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/schemas/block.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/schemas/document.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/schemas/embedding_config.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/schemas/enums.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/schemas/health.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/schemas/job.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/schemas/letta_base.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/schemas/letta_message.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/schemas/letta_request.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/schemas/letta_response.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/schemas/llm_config.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/schemas/memory.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/schemas/message.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/schemas/openai/chat_completion_request.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/schemas/openai/chat_completion_response.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/schemas/openai/chat_completions.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/schemas/openai/embedding_response.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/schemas/openai/openai.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/schemas/organization.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/schemas/passage.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/schemas/source.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/schemas/tool.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/schemas/usage.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/schemas/user.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/server/__init__.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/server/constants.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/server/rest_api/__init__.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/server/rest_api/admin/__init__.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/server/rest_api/admin/agents.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/server/rest_api/admin/tools.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/server/rest_api/admin/users.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/server/rest_api/app.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/server/rest_api/auth/__init__.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/server/rest_api/auth/index.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/server/rest_api/auth_token.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/server/rest_api/interface.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/server/rest_api/routers/__init__.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/server/rest_api/routers/openai/__init__.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/server/rest_api/routers/openai/assistants/__init__.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/server/rest_api/routers/openai/assistants/assistants.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/server/rest_api/routers/openai/assistants/schemas.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/server/rest_api/routers/openai/assistants/threads.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/server/rest_api/routers/openai/chat_completions/__init__.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/server/rest_api/routers/openai/chat_completions/chat_completions.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/server/rest_api/routers/v1/__init__.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/server/rest_api/routers/v1/agents.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/server/rest_api/routers/v1/blocks.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/server/rest_api/routers/v1/health.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/server/rest_api/routers/v1/jobs.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/server/rest_api/routers/v1/llms.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/server/rest_api/routers/v1/organizations.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/server/rest_api/routers/v1/sources.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/server/rest_api/routers/v1/tools.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/server/rest_api/routers/v1/users.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/server/rest_api/static_files.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/server/rest_api/utils.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/server/startup.sh +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/server/static_files/assets/index-3ab03d5b.css +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/server/static_files/assets/index-9a9c449b.js +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/server/static_files/favicon.ico +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/server/static_files/index.html +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/server/static_files/memgpt_logo_transparent.png +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/server/utils.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/server/ws_api/__init__.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/server/ws_api/example_client.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/server/ws_api/interface.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/server/ws_api/protocol.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/server/ws_api/server.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/system.py +0 -0
- {letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/utils.py +0 -0
|
@@ -398,8 +398,6 @@ class PostgresStorageConnector(SQLStorageConnector):
|
|
|
398
398
|
return records
|
|
399
399
|
|
|
400
400
|
def insert_many(self, records, exists_ok=True, show_progress=False):
|
|
401
|
-
pass
|
|
402
|
-
|
|
403
401
|
# TODO: this is terrible, should eventually be done the same way for all types (migrate to SQLModel)
|
|
404
402
|
if len(records) == 0:
|
|
405
403
|
return
|
|
@@ -506,18 +504,36 @@ class SQLLiteStorageConnector(SQLStorageConnector):
|
|
|
506
504
|
# sqlite3.register_converter("UUID", lambda b: uuid.UUID(bytes_le=b))
|
|
507
505
|
|
|
508
506
|
def insert_many(self, records, exists_ok=True, show_progress=False):
|
|
509
|
-
pass
|
|
510
|
-
|
|
511
507
|
# TODO: this is terrible, should eventually be done the same way for all types (migrate to SQLModel)
|
|
512
508
|
if len(records) == 0:
|
|
513
509
|
return
|
|
510
|
+
|
|
511
|
+
added_ids = [] # avoid adding duplicates
|
|
512
|
+
# NOTE: this has not great performance due to the excessive commits
|
|
514
513
|
with self.session_maker() as session:
|
|
515
514
|
iterable = tqdm(records) if show_progress else records
|
|
516
515
|
for record in iterable:
|
|
517
516
|
# db_record = self.db_model(**vars(record))
|
|
518
|
-
|
|
519
|
-
|
|
520
|
-
|
|
517
|
+
|
|
518
|
+
if record.id in added_ids:
|
|
519
|
+
continue
|
|
520
|
+
|
|
521
|
+
existing_record = session.query(self.db_model).filter_by(id=record.id).first()
|
|
522
|
+
if existing_record:
|
|
523
|
+
if exists_ok:
|
|
524
|
+
fields = record.model_dump()
|
|
525
|
+
fields.pop("id")
|
|
526
|
+
session.query(self.db_model).filter(self.db_model.id == record.id).update(fields)
|
|
527
|
+
session.commit()
|
|
528
|
+
else:
|
|
529
|
+
raise ValueError(f"Record with id {record.id} already exists.")
|
|
530
|
+
|
|
531
|
+
else:
|
|
532
|
+
db_record = self.db_model(**record.dict())
|
|
533
|
+
session.add(db_record)
|
|
534
|
+
session.commit()
|
|
535
|
+
|
|
536
|
+
added_ids.append(record.id)
|
|
521
537
|
|
|
522
538
|
def insert(self, record, exists_ok=True):
|
|
523
539
|
self.insert_many([record], exists_ok=exists_ok)
|
{letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/cli/cli.py
RENAMED
|
@@ -11,9 +11,12 @@ from letta import create_client
|
|
|
11
11
|
from letta.agent import Agent, save_agent
|
|
12
12
|
from letta.config import LettaConfig
|
|
13
13
|
from letta.constants import CLI_WARNING_PREFIX, LETTA_DIR
|
|
14
|
+
from letta.local_llm.constants import ASSISTANT_MESSAGE_CLI_SYMBOL
|
|
14
15
|
from letta.log import get_logger
|
|
15
16
|
from letta.metadata import MetadataStore
|
|
17
|
+
from letta.schemas.embedding_config import EmbeddingConfig
|
|
16
18
|
from letta.schemas.enums import OptionState
|
|
19
|
+
from letta.schemas.llm_config import LLMConfig
|
|
17
20
|
from letta.schemas.memory import ChatMemory, Memory
|
|
18
21
|
from letta.server.server import logger as server_logger
|
|
19
22
|
|
|
@@ -232,25 +235,46 @@ def run(
|
|
|
232
235
|
# choose from list of llm_configs
|
|
233
236
|
llm_configs = client.list_llm_configs()
|
|
234
237
|
llm_options = [llm_config.model for llm_config in llm_configs]
|
|
238
|
+
|
|
239
|
+
# TODO move into LLMConfig as a class method?
|
|
240
|
+
def prettify_llm_config(llm_config: LLMConfig) -> str:
|
|
241
|
+
return f"{llm_config.model}" + f" ({llm_config.model_endpoint})" if llm_config.model_endpoint else ""
|
|
242
|
+
|
|
243
|
+
llm_choices = [questionary.Choice(title=prettify_llm_config(llm_config), value=llm_config) for llm_config in llm_configs]
|
|
244
|
+
|
|
235
245
|
# select model
|
|
236
246
|
if len(llm_options) == 0:
|
|
237
247
|
raise ValueError("No LLM models found. Please enable a provider.")
|
|
238
248
|
elif len(llm_options) == 1:
|
|
239
249
|
llm_model_name = llm_options[0]
|
|
240
250
|
else:
|
|
241
|
-
llm_model_name = questionary.select("Select LLM model:", choices=
|
|
251
|
+
llm_model_name = questionary.select("Select LLM model:", choices=llm_choices).ask().model
|
|
242
252
|
llm_config = [llm_config for llm_config in llm_configs if llm_config.model == llm_model_name][0]
|
|
243
253
|
|
|
244
254
|
# choose form list of embedding configs
|
|
245
255
|
embedding_configs = client.list_embedding_configs()
|
|
246
256
|
embedding_options = [embedding_config.embedding_model for embedding_config in embedding_configs]
|
|
257
|
+
|
|
258
|
+
# TODO move into EmbeddingConfig as a class method?
|
|
259
|
+
def prettify_embed_config(embedding_config: EmbeddingConfig) -> str:
|
|
260
|
+
return (
|
|
261
|
+
f"{embedding_config.embedding_model}" + f" ({embedding_config.embedding_endpoint})"
|
|
262
|
+
if embedding_config.embedding_endpoint
|
|
263
|
+
else ""
|
|
264
|
+
)
|
|
265
|
+
|
|
266
|
+
embedding_choices = [
|
|
267
|
+
questionary.Choice(title=prettify_embed_config(embedding_config), value=embedding_config)
|
|
268
|
+
for embedding_config in embedding_configs
|
|
269
|
+
]
|
|
270
|
+
|
|
247
271
|
# select model
|
|
248
272
|
if len(embedding_options) == 0:
|
|
249
273
|
raise ValueError("No embedding models found. Please enable a provider.")
|
|
250
274
|
elif len(embedding_options) == 1:
|
|
251
275
|
embedding_model_name = embedding_options[0]
|
|
252
276
|
else:
|
|
253
|
-
embedding_model_name = questionary.select("Select embedding model:", choices=
|
|
277
|
+
embedding_model_name = questionary.select("Select embedding model:", choices=embedding_choices).ask().embedding_model
|
|
254
278
|
embedding_config = [
|
|
255
279
|
embedding_config for embedding_config in embedding_configs if embedding_config.embedding_model == embedding_model_name
|
|
256
280
|
][0]
|
|
@@ -276,7 +300,7 @@ def run(
|
|
|
276
300
|
memory = ChatMemory(human=human_obj.value, persona=persona_obj.value, limit=core_memory_limit)
|
|
277
301
|
metadata = {"human": human_obj.name, "persona": persona_obj.name}
|
|
278
302
|
|
|
279
|
-
typer.secho(f"->
|
|
303
|
+
typer.secho(f"-> {ASSISTANT_MESSAGE_CLI_SYMBOL} Using persona profile: '{persona_obj.name}'", fg=typer.colors.WHITE)
|
|
280
304
|
typer.secho(f"-> 🧑 Using human profile: '{human_obj.name}'", fg=typer.colors.WHITE)
|
|
281
305
|
|
|
282
306
|
# add tools
|
|
@@ -0,0 +1,228 @@
|
|
|
1
|
+
import ast
|
|
2
|
+
import os
|
|
3
|
+
from enum import Enum
|
|
4
|
+
from typing import Annotated, List, Optional
|
|
5
|
+
|
|
6
|
+
import questionary
|
|
7
|
+
import typer
|
|
8
|
+
from prettytable.colortable import ColorTable, Themes
|
|
9
|
+
from tqdm import tqdm
|
|
10
|
+
|
|
11
|
+
from letta import utils
|
|
12
|
+
|
|
13
|
+
app = typer.Typer()
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
@app.command()
|
|
17
|
+
def configure():
|
|
18
|
+
"""Updates default Letta configurations
|
|
19
|
+
|
|
20
|
+
This function and quickstart should be the ONLY place where LettaConfig.save() is called
|
|
21
|
+
"""
|
|
22
|
+
print("`letta configure` has been deprecated. Please see documentation on configuration, and run `letta run` instead.")
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class ListChoice(str, Enum):
|
|
26
|
+
agents = "agents"
|
|
27
|
+
humans = "humans"
|
|
28
|
+
personas = "personas"
|
|
29
|
+
sources = "sources"
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
@app.command()
|
|
33
|
+
def list(arg: Annotated[ListChoice, typer.Argument]):
|
|
34
|
+
from letta.client.client import create_client
|
|
35
|
+
|
|
36
|
+
client = create_client()
|
|
37
|
+
table = ColorTable(theme=Themes.OCEAN)
|
|
38
|
+
if arg == ListChoice.agents:
|
|
39
|
+
"""List all agents"""
|
|
40
|
+
table.field_names = ["Name", "LLM Model", "Embedding Model", "Embedding Dim", "Persona", "Human", "Data Source", "Create Time"]
|
|
41
|
+
for agent in tqdm(client.list_agents()):
|
|
42
|
+
# TODO: add this function
|
|
43
|
+
sources = client.list_attached_sources(agent_id=agent.id)
|
|
44
|
+
source_names = [source.name for source in sources if source is not None]
|
|
45
|
+
table.add_row(
|
|
46
|
+
[
|
|
47
|
+
agent.name,
|
|
48
|
+
agent.llm_config.model,
|
|
49
|
+
agent.embedding_config.embedding_model,
|
|
50
|
+
agent.embedding_config.embedding_dim,
|
|
51
|
+
agent.memory.get_block("persona").value[:100] + "...",
|
|
52
|
+
agent.memory.get_block("human").value[:100] + "...",
|
|
53
|
+
",".join(source_names),
|
|
54
|
+
utils.format_datetime(agent.created_at),
|
|
55
|
+
]
|
|
56
|
+
)
|
|
57
|
+
print(table)
|
|
58
|
+
elif arg == ListChoice.humans:
|
|
59
|
+
"""List all humans"""
|
|
60
|
+
table.field_names = ["Name", "Text"]
|
|
61
|
+
for human in client.list_humans():
|
|
62
|
+
table.add_row([human.name, human.value.replace("\n", "")[:100]])
|
|
63
|
+
print(table)
|
|
64
|
+
elif arg == ListChoice.personas:
|
|
65
|
+
"""List all personas"""
|
|
66
|
+
table.field_names = ["Name", "Text"]
|
|
67
|
+
for persona in client.list_personas():
|
|
68
|
+
table.add_row([persona.name, persona.value.replace("\n", "")[:100]])
|
|
69
|
+
print(table)
|
|
70
|
+
elif arg == ListChoice.sources:
|
|
71
|
+
"""List all data sources"""
|
|
72
|
+
|
|
73
|
+
# create table
|
|
74
|
+
table.field_names = ["Name", "Description", "Embedding Model", "Embedding Dim", "Created At"]
|
|
75
|
+
# TODO: eventually look accross all storage connections
|
|
76
|
+
# TODO: add data source stats
|
|
77
|
+
# TODO: connect to agents
|
|
78
|
+
|
|
79
|
+
# get all sources
|
|
80
|
+
for source in client.list_sources():
|
|
81
|
+
# get attached agents
|
|
82
|
+
table.add_row(
|
|
83
|
+
[
|
|
84
|
+
source.name,
|
|
85
|
+
source.description,
|
|
86
|
+
source.embedding_config.embedding_model,
|
|
87
|
+
source.embedding_config.embedding_dim,
|
|
88
|
+
utils.format_datetime(source.created_at),
|
|
89
|
+
]
|
|
90
|
+
)
|
|
91
|
+
|
|
92
|
+
print(table)
|
|
93
|
+
else:
|
|
94
|
+
raise ValueError(f"Unknown argument {arg}")
|
|
95
|
+
return table
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
@app.command()
|
|
99
|
+
def add_tool(
|
|
100
|
+
filename: str = typer.Option(..., help="Path to the Python file containing the function"),
|
|
101
|
+
name: Optional[str] = typer.Option(None, help="Name of the tool"),
|
|
102
|
+
update: bool = typer.Option(True, help="Update the tool if it already exists"),
|
|
103
|
+
tags: Optional[List[str]] = typer.Option(None, help="Tags for the tool"),
|
|
104
|
+
):
|
|
105
|
+
"""Add or update a tool from a Python file."""
|
|
106
|
+
from letta.client.client import create_client
|
|
107
|
+
|
|
108
|
+
client = create_client(base_url=os.getenv("MEMGPT_BASE_URL"), token=os.getenv("MEMGPT_SERVER_PASS"))
|
|
109
|
+
|
|
110
|
+
# 1. Parse the Python file
|
|
111
|
+
with open(filename, "r", encoding="utf-8") as file:
|
|
112
|
+
source_code = file.read()
|
|
113
|
+
|
|
114
|
+
# 2. Parse the source code to extract the function
|
|
115
|
+
# Note: here we assume it is one function only in the file.
|
|
116
|
+
module = ast.parse(source_code)
|
|
117
|
+
func_def = None
|
|
118
|
+
for node in module.body:
|
|
119
|
+
if isinstance(node, ast.FunctionDef):
|
|
120
|
+
func_def = node
|
|
121
|
+
break
|
|
122
|
+
|
|
123
|
+
if not func_def:
|
|
124
|
+
raise ValueError("No function found in the provided file")
|
|
125
|
+
|
|
126
|
+
# 3. Compile the function to make it callable
|
|
127
|
+
# Explanation courtesy of GPT-4:
|
|
128
|
+
# Compile the AST (Abstract Syntax Tree) node representing the function definition into a code object
|
|
129
|
+
# ast.Module creates a module node containing the function definition (func_def)
|
|
130
|
+
# compile converts the AST into a code object that can be executed by the Python interpreter
|
|
131
|
+
# The exec function executes the compiled code object in the current context,
|
|
132
|
+
# effectively defining the function within the current namespace
|
|
133
|
+
exec(compile(ast.Module([func_def], []), filename, "exec"))
|
|
134
|
+
# Retrieve the function object by evaluating its name in the current namespace
|
|
135
|
+
# eval looks up the function name in the current scope and returns the function object
|
|
136
|
+
func = eval(func_def.name)
|
|
137
|
+
|
|
138
|
+
# 4. Add or update the tool
|
|
139
|
+
tool = client.create_tool(func=func, name=name, tags=tags, update=update)
|
|
140
|
+
print(f"Tool {tool.name} added successfully")
|
|
141
|
+
|
|
142
|
+
|
|
143
|
+
@app.command()
|
|
144
|
+
def list_tools():
|
|
145
|
+
"""List all available tools."""
|
|
146
|
+
from letta.client.client import create_client
|
|
147
|
+
|
|
148
|
+
client = create_client(base_url=os.getenv("MEMGPT_BASE_URL"), token=os.getenv("MEMGPT_SERVER_PASS"))
|
|
149
|
+
|
|
150
|
+
tools = client.list_tools()
|
|
151
|
+
for tool in tools:
|
|
152
|
+
print(f"Tool: {tool.name}")
|
|
153
|
+
|
|
154
|
+
|
|
155
|
+
@app.command()
|
|
156
|
+
def add(
|
|
157
|
+
option: str, # [human, persona]
|
|
158
|
+
name: Annotated[str, typer.Option(help="Name of human/persona")],
|
|
159
|
+
text: Annotated[Optional[str], typer.Option(help="Text of human/persona")] = None,
|
|
160
|
+
filename: Annotated[Optional[str], typer.Option("-f", help="Specify filename")] = None,
|
|
161
|
+
):
|
|
162
|
+
"""Add a person/human"""
|
|
163
|
+
from letta.client.client import create_client
|
|
164
|
+
|
|
165
|
+
client = create_client(base_url=os.getenv("MEMGPT_BASE_URL"), token=os.getenv("MEMGPT_SERVER_PASS"))
|
|
166
|
+
if filename: # read from file
|
|
167
|
+
assert text is None, "Cannot specify both text and filename"
|
|
168
|
+
with open(filename, "r", encoding="utf-8") as f:
|
|
169
|
+
text = f.read()
|
|
170
|
+
else:
|
|
171
|
+
assert text is not None, "Must specify either text or filename"
|
|
172
|
+
if option == "persona":
|
|
173
|
+
persona_id = client.get_persona_id(name)
|
|
174
|
+
if persona_id:
|
|
175
|
+
client.get_persona(persona_id)
|
|
176
|
+
# config if user wants to overwrite
|
|
177
|
+
if not questionary.confirm(f"Persona {name} already exists. Overwrite?").ask():
|
|
178
|
+
return
|
|
179
|
+
client.update_persona(persona_id, text=text)
|
|
180
|
+
else:
|
|
181
|
+
client.create_persona(name=name, text=text)
|
|
182
|
+
|
|
183
|
+
elif option == "human":
|
|
184
|
+
human_id = client.get_human_id(name)
|
|
185
|
+
if human_id:
|
|
186
|
+
human = client.get_human(human_id)
|
|
187
|
+
# config if user wants to overwrite
|
|
188
|
+
if not questionary.confirm(f"Human {name} already exists. Overwrite?").ask():
|
|
189
|
+
return
|
|
190
|
+
client.update_human(human_id, text=text)
|
|
191
|
+
else:
|
|
192
|
+
human = client.create_human(name=name, text=text)
|
|
193
|
+
else:
|
|
194
|
+
raise ValueError(f"Unknown kind {option}")
|
|
195
|
+
|
|
196
|
+
|
|
197
|
+
@app.command()
|
|
198
|
+
def delete(option: str, name: str):
|
|
199
|
+
"""Delete a source from the archival memory."""
|
|
200
|
+
from letta.client.client import create_client
|
|
201
|
+
|
|
202
|
+
client = create_client(base_url=os.getenv("MEMGPT_BASE_URL"), token=os.getenv("MEMGPT_API_KEY"))
|
|
203
|
+
try:
|
|
204
|
+
# delete from metadata
|
|
205
|
+
if option == "source":
|
|
206
|
+
# delete metadata
|
|
207
|
+
source_id = client.get_source_id(name)
|
|
208
|
+
assert source_id is not None, f"Source {name} does not exist"
|
|
209
|
+
client.delete_source(source_id)
|
|
210
|
+
elif option == "agent":
|
|
211
|
+
agent_id = client.get_agent_id(name)
|
|
212
|
+
assert agent_id is not None, f"Agent {name} does not exist"
|
|
213
|
+
client.delete_agent(agent_id=agent_id)
|
|
214
|
+
elif option == "human":
|
|
215
|
+
human_id = client.get_human_id(name)
|
|
216
|
+
assert human_id is not None, f"Human {name} does not exist"
|
|
217
|
+
client.delete_human(human_id)
|
|
218
|
+
elif option == "persona":
|
|
219
|
+
persona_id = client.get_persona_id(name)
|
|
220
|
+
assert persona_id is not None, f"Persona {name} does not exist"
|
|
221
|
+
client.delete_persona(persona_id)
|
|
222
|
+
else:
|
|
223
|
+
raise ValueError(f"Option {option} not implemented")
|
|
224
|
+
|
|
225
|
+
typer.secho(f"Deleted {option} '{name}'", fg=typer.colors.GREEN)
|
|
226
|
+
|
|
227
|
+
except Exception as e:
|
|
228
|
+
typer.secho(f"Failed to delete {option}'{name}'\n{e}", fg=typer.colors.RED)
|
|
@@ -2,6 +2,11 @@ from datetime import datetime
|
|
|
2
2
|
|
|
3
3
|
from IPython.display import HTML, display
|
|
4
4
|
|
|
5
|
+
from letta.local_llm.constants import (
|
|
6
|
+
ASSISTANT_MESSAGE_CLI_SYMBOL,
|
|
7
|
+
INNER_THOUGHTS_CLI_SYMBOL,
|
|
8
|
+
)
|
|
9
|
+
|
|
5
10
|
|
|
6
11
|
def pprint(messages):
|
|
7
12
|
"""Utility function for pretty-printing the output of client.send_message in notebooks"""
|
|
@@ -47,13 +52,13 @@ def pprint(messages):
|
|
|
47
52
|
html_content += f"<p><strong>🛠️ [{date_formatted}] Function Return ({return_status}):</strong></p>"
|
|
48
53
|
html_content += f"<p class='function-return'>{return_string}</p>"
|
|
49
54
|
elif "internal_monologue" in message:
|
|
50
|
-
html_content += f"<p><strong
|
|
55
|
+
html_content += f"<p><strong>{INNER_THOUGHTS_CLI_SYMBOL} [{date_formatted}] Internal Monologue:</strong></p>"
|
|
51
56
|
html_content += f"<p class='internal-monologue'>{message['internal_monologue']}</p>"
|
|
52
57
|
elif "function_call" in message:
|
|
53
58
|
html_content += f"<p><strong>🛠️ [[{date_formatted}] Function Call:</strong></p>"
|
|
54
59
|
html_content += f"<p class='function-call'>{message['function_call']}</p>"
|
|
55
60
|
elif "assistant_message" in message:
|
|
56
|
-
html_content += f"<p><strong
|
|
61
|
+
html_content += f"<p><strong>{ASSISTANT_MESSAGE_CLI_SYMBOL} [{date_formatted}] Assistant Message:</strong></p>"
|
|
57
62
|
html_content += f"<p class='assistant-message'>{message['assistant_message']}</p>"
|
|
58
63
|
html_content += "<br>"
|
|
59
64
|
html_content += "</div>"
|
{letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/constants.py
RENAMED
|
@@ -75,6 +75,27 @@ NON_USER_MSG_PREFIX = "[This is an automated system message hidden from the user
|
|
|
75
75
|
LLM_MAX_TOKENS = {
|
|
76
76
|
"DEFAULT": 8192,
|
|
77
77
|
## OpenAI models: https://platform.openai.com/docs/models/overview
|
|
78
|
+
# "o1-preview
|
|
79
|
+
"chatgpt-4o-latest": 128000,
|
|
80
|
+
# "o1-preview-2024-09-12
|
|
81
|
+
"gpt-4o-2024-08-06": 128000,
|
|
82
|
+
"gpt-4-turbo-preview": 128000,
|
|
83
|
+
"gpt-4o": 128000,
|
|
84
|
+
"gpt-3.5-turbo-instruct": 16385,
|
|
85
|
+
"gpt-4-0125-preview": 128000,
|
|
86
|
+
"gpt-3.5-turbo-0125": 16385,
|
|
87
|
+
# "babbage-002": 128000,
|
|
88
|
+
# "davinci-002": 128000,
|
|
89
|
+
"gpt-4-turbo-2024-04-09": 128000,
|
|
90
|
+
# "gpt-4o-realtime-preview-2024-10-01
|
|
91
|
+
"gpt-4-turbo": 8192,
|
|
92
|
+
"gpt-4o-2024-05-13": 128000,
|
|
93
|
+
# "o1-mini
|
|
94
|
+
# "o1-mini-2024-09-12
|
|
95
|
+
# "gpt-3.5-turbo-instruct-0914
|
|
96
|
+
"gpt-4o-mini": 128000,
|
|
97
|
+
# "gpt-4o-realtime-preview
|
|
98
|
+
"gpt-4o-mini-2024-07-18": 128000,
|
|
78
99
|
# gpt-4
|
|
79
100
|
"gpt-4-1106-preview": 128000,
|
|
80
101
|
"gpt-4": 8192,
|
{letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/embeddings.py
RENAMED
|
@@ -91,6 +91,9 @@ class EmbeddingEndpoint:
|
|
|
91
91
|
raise ValueError(
|
|
92
92
|
f"Embeddings endpoint was provided an invalid URL (set to: '{base_url}'). Make sure embedding_endpoint is set correctly in your Letta config."
|
|
93
93
|
)
|
|
94
|
+
# TODO: find a neater solution - re-mapping for letta endpoint
|
|
95
|
+
if model == "letta-free":
|
|
96
|
+
model = "BAAI/bge-large-en-v1.5"
|
|
94
97
|
self.model_name = model
|
|
95
98
|
self._user = user
|
|
96
99
|
self._base_url = base_url
|
{letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/interface.py
RENAMED
|
@@ -5,6 +5,10 @@ from typing import List, Optional
|
|
|
5
5
|
from colorama import Fore, Style, init
|
|
6
6
|
|
|
7
7
|
from letta.constants import CLI_WARNING_PREFIX
|
|
8
|
+
from letta.local_llm.constants import (
|
|
9
|
+
ASSISTANT_MESSAGE_CLI_SYMBOL,
|
|
10
|
+
INNER_THOUGHTS_CLI_SYMBOL,
|
|
11
|
+
)
|
|
8
12
|
from letta.schemas.message import Message
|
|
9
13
|
from letta.utils import json_loads, printd
|
|
10
14
|
|
|
@@ -79,14 +83,14 @@ class CLIInterface(AgentInterface):
|
|
|
79
83
|
@staticmethod
|
|
80
84
|
def internal_monologue(msg: str, msg_obj: Optional[Message] = None):
|
|
81
85
|
# ANSI escape code for italic is '\x1B[3m'
|
|
82
|
-
fstr = f"\x1B[3m{Fore.LIGHTBLACK_EX}
|
|
86
|
+
fstr = f"\x1B[3m{Fore.LIGHTBLACK_EX}{INNER_THOUGHTS_CLI_SYMBOL} {{msg}}{Style.RESET_ALL}"
|
|
83
87
|
if STRIP_UI:
|
|
84
88
|
fstr = "{msg}"
|
|
85
89
|
print(fstr.format(msg=msg))
|
|
86
90
|
|
|
87
91
|
@staticmethod
|
|
88
92
|
def assistant_message(msg: str, msg_obj: Optional[Message] = None):
|
|
89
|
-
fstr = f"{Fore.YELLOW}{Style.BRIGHT}
|
|
93
|
+
fstr = f"{Fore.YELLOW}{Style.BRIGHT}{ASSISTANT_MESSAGE_CLI_SYMBOL} {Fore.YELLOW}{{msg}}{Style.RESET_ALL}"
|
|
90
94
|
if STRIP_UI:
|
|
91
95
|
fstr = "{msg}"
|
|
92
96
|
print(fstr.format(msg=msg))
|
|
@@ -436,7 +436,7 @@ def google_ai_chat_completions_request(
|
|
|
436
436
|
response_json=response_json,
|
|
437
437
|
model=data.get("model"),
|
|
438
438
|
input_messages=data["contents"],
|
|
439
|
-
pull_inner_thoughts_from_args=
|
|
439
|
+
pull_inner_thoughts_from_args=inner_thoughts_in_kwargs,
|
|
440
440
|
)
|
|
441
441
|
except Exception as conversion_error:
|
|
442
442
|
print(f"Error during response conversion: {conversion_error}")
|
|
@@ -21,10 +21,17 @@ def make_post_request(url: str, headers: dict[str, str], data: dict[str, Any]) -
|
|
|
21
21
|
# Raise for 4XX/5XX HTTP errors
|
|
22
22
|
response.raise_for_status()
|
|
23
23
|
|
|
24
|
-
#
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
24
|
+
# Check if the response content type indicates JSON and attempt to parse it
|
|
25
|
+
content_type = response.headers.get("Content-Type", "")
|
|
26
|
+
if "application/json" in content_type.lower():
|
|
27
|
+
try:
|
|
28
|
+
response_data = response.json() # Attempt to parse the response as JSON
|
|
29
|
+
printd(f"Response JSON: {response_data}")
|
|
30
|
+
except ValueError as json_err:
|
|
31
|
+
# Handle the case where the content type says JSON but the body is invalid
|
|
32
|
+
error_message = f"Failed to parse JSON despite Content-Type being {content_type}: {json_err}"
|
|
33
|
+
printd(error_message)
|
|
34
|
+
raise ValueError(error_message) from json_err
|
|
28
35
|
else:
|
|
29
36
|
error_message = f"Unexpected content type returned: {response.headers.get('Content-Type')}"
|
|
30
37
|
printd(error_message)
|
|
@@ -217,19 +217,14 @@ def create(
|
|
|
217
217
|
if not use_tool_naming:
|
|
218
218
|
raise NotImplementedError("Only tool calling supported on Google AI API requests")
|
|
219
219
|
|
|
220
|
-
# NOTE: until Google AI supports CoT / text alongside function calls,
|
|
221
|
-
# we need to put it in a kwarg (unless we want to split the message into two)
|
|
222
|
-
google_ai_inner_thoughts_in_kwarg = True
|
|
223
|
-
|
|
224
220
|
if functions is not None:
|
|
225
221
|
tools = [{"type": "function", "function": f} for f in functions]
|
|
226
222
|
tools = [Tool(**t) for t in tools]
|
|
227
|
-
tools = convert_tools_to_google_ai_format(tools, inner_thoughts_in_kwargs=
|
|
223
|
+
tools = convert_tools_to_google_ai_format(tools, inner_thoughts_in_kwargs=True)
|
|
228
224
|
else:
|
|
229
225
|
tools = None
|
|
230
226
|
|
|
231
227
|
return google_ai_chat_completions_request(
|
|
232
|
-
inner_thoughts_in_kwargs=google_ai_inner_thoughts_in_kwarg,
|
|
233
228
|
base_url=llm_config.model_endpoint,
|
|
234
229
|
model=llm_config.model,
|
|
235
230
|
api_key=model_settings.gemini_api_key,
|
|
@@ -238,6 +233,7 @@ def create(
|
|
|
238
233
|
contents=[m.to_google_ai_dict() for m in messages],
|
|
239
234
|
tools=tools,
|
|
240
235
|
),
|
|
236
|
+
inner_thoughts_in_kwargs=True,
|
|
241
237
|
)
|
|
242
238
|
|
|
243
239
|
elif llm_config.model_endpoint_type == "anthropic":
|
|
@@ -246,12 +242,6 @@ def create(
|
|
|
246
242
|
if not use_tool_naming:
|
|
247
243
|
raise NotImplementedError("Only tool calling supported on Anthropic API requests")
|
|
248
244
|
|
|
249
|
-
if functions is not None:
|
|
250
|
-
tools = [{"type": "function", "function": f} for f in functions]
|
|
251
|
-
tools = [Tool(**t) for t in tools]
|
|
252
|
-
else:
|
|
253
|
-
tools = None
|
|
254
|
-
|
|
255
245
|
return anthropic_chat_completions_request(
|
|
256
246
|
url=llm_config.model_endpoint,
|
|
257
247
|
api_key=model_settings.anthropic_api_key,
|
|
@@ -41,7 +41,9 @@ from letta.utils import smart_urljoin
|
|
|
41
41
|
OPENAI_SSE_DONE = "[DONE]"
|
|
42
42
|
|
|
43
43
|
|
|
44
|
-
def openai_get_model_list(
|
|
44
|
+
def openai_get_model_list(
|
|
45
|
+
url: str, api_key: Union[str, None], fix_url: Optional[bool] = False, extra_params: Optional[dict] = None
|
|
46
|
+
) -> dict:
|
|
45
47
|
"""https://platform.openai.com/docs/api-reference/models/list"""
|
|
46
48
|
from letta.utils import printd
|
|
47
49
|
|
|
@@ -60,7 +62,8 @@ def openai_get_model_list(url: str, api_key: Union[str, None], fix_url: Optional
|
|
|
60
62
|
|
|
61
63
|
printd(f"Sending request to {url}")
|
|
62
64
|
try:
|
|
63
|
-
|
|
65
|
+
# TODO add query param "tool" to be true
|
|
66
|
+
response = requests.get(url, headers=headers, params=extra_params)
|
|
64
67
|
response.raise_for_status() # Raises HTTPError for 4XX/5XX status
|
|
65
68
|
response = response.json() # convert to dict from string
|
|
66
69
|
printd(f"response = {response}")
|
|
@@ -145,6 +148,7 @@ def build_openai_chat_completions_request(
|
|
|
145
148
|
import uuid
|
|
146
149
|
|
|
147
150
|
data.user = str(uuid.UUID(int=0))
|
|
151
|
+
data.model = "memgpt-openai"
|
|
148
152
|
|
|
149
153
|
return data
|
|
150
154
|
|
{letta_nightly-0.4.1.dev20241009104130 → letta_nightly-0.4.1.dev20241011104054}/letta/providers.py
RENAMED
|
@@ -13,7 +13,6 @@ from letta.schemas.llm_config import LLMConfig
|
|
|
13
13
|
|
|
14
14
|
|
|
15
15
|
class Provider(BaseModel):
|
|
16
|
-
base_url: str
|
|
17
16
|
|
|
18
17
|
def list_llm_models(self):
|
|
19
18
|
return []
|
|
@@ -25,26 +24,69 @@ class Provider(BaseModel):
|
|
|
25
24
|
pass
|
|
26
25
|
|
|
27
26
|
|
|
27
|
+
class LettaProvider(Provider):
|
|
28
|
+
|
|
29
|
+
name: str = "letta"
|
|
30
|
+
|
|
31
|
+
def list_llm_models(self) -> List[LLMConfig]:
|
|
32
|
+
return [
|
|
33
|
+
LLMConfig(
|
|
34
|
+
model="letta-free", # NOTE: renamed
|
|
35
|
+
model_endpoint_type="openai",
|
|
36
|
+
model_endpoint="https://inference.memgpt.ai",
|
|
37
|
+
context_window=16384,
|
|
38
|
+
)
|
|
39
|
+
]
|
|
40
|
+
|
|
41
|
+
def list_embedding_models(self):
|
|
42
|
+
return [
|
|
43
|
+
EmbeddingConfig(
|
|
44
|
+
embedding_model="letta-free", # NOTE: renamed
|
|
45
|
+
embedding_endpoint_type="hugging-face",
|
|
46
|
+
embedding_endpoint="https://embeddings.memgpt.ai",
|
|
47
|
+
embedding_dim=1024,
|
|
48
|
+
embedding_chunk_size=300,
|
|
49
|
+
)
|
|
50
|
+
]
|
|
51
|
+
|
|
52
|
+
|
|
28
53
|
class OpenAIProvider(Provider):
|
|
29
54
|
name: str = "openai"
|
|
30
55
|
api_key: str = Field(..., description="API key for the OpenAI API.")
|
|
31
|
-
base_url: str = "
|
|
56
|
+
base_url: str = Field(..., description="Base URL for the OpenAI API.")
|
|
32
57
|
|
|
33
58
|
def list_llm_models(self) -> List[LLMConfig]:
|
|
34
59
|
from letta.llm_api.openai import openai_get_model_list
|
|
35
60
|
|
|
36
|
-
|
|
37
|
-
|
|
61
|
+
# Some hardcoded support for OpenRouter (so that we only get models with tool calling support)...
|
|
62
|
+
# See: https://openrouter.ai/docs/requests
|
|
63
|
+
extra_params = {"supported_parameters": "tools"} if "openrouter.ai" in self.base_url else None
|
|
64
|
+
response = openai_get_model_list(self.base_url, api_key=self.api_key, extra_params=extra_params)
|
|
65
|
+
|
|
66
|
+
assert "data" in response, f"OpenAI model query response missing 'data' field: {response}"
|
|
38
67
|
|
|
39
68
|
configs = []
|
|
40
|
-
for
|
|
41
|
-
|
|
69
|
+
for model in response["data"]:
|
|
70
|
+
assert "id" in model, f"OpenAI model missing 'id' field: {model}"
|
|
71
|
+
model_name = model["id"]
|
|
72
|
+
|
|
73
|
+
if "context_length" in model:
|
|
74
|
+
# Context length is returned in OpenRouter as "context_length"
|
|
75
|
+
context_window_size = model["context_length"]
|
|
76
|
+
else:
|
|
77
|
+
context_window_size = self.get_model_context_window_size(model_name)
|
|
42
78
|
|
|
43
79
|
if not context_window_size:
|
|
44
80
|
continue
|
|
45
81
|
configs.append(
|
|
46
82
|
LLMConfig(model=model_name, model_endpoint_type="openai", model_endpoint=self.base_url, context_window=context_window_size)
|
|
47
83
|
)
|
|
84
|
+
|
|
85
|
+
# for OpenAI, sort in reverse order
|
|
86
|
+
if self.base_url == "https://api.openai.com/v1":
|
|
87
|
+
# alphnumeric sort
|
|
88
|
+
configs.sort(key=lambda x: x.model, reverse=True)
|
|
89
|
+
|
|
48
90
|
return configs
|
|
49
91
|
|
|
50
92
|
def list_embedding_models(self) -> List[EmbeddingConfig]:
|