letta-nightly 0.4.1.dev20241004104123__tar.gz → 0.4.1.dev20241005104008__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of letta-nightly might be problematic. Click here for more details.
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/PKG-INFO +1 -1
- letta_nightly-0.4.1.dev20241005104008/letta/cli/cli.py +354 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/cli/cli_config.py +70 -27
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/client/client.py +103 -11
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/config.py +80 -80
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/constants.py +6 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/credentials.py +10 -1
- letta_nightly-0.4.1.dev20241005104008/letta/errors.py +84 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/llm_api/llm_api_tools.py +110 -52
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/local_llm/chat_completion_proxy.py +0 -3
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/main.py +1 -2
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/metadata.py +12 -0
- letta_nightly-0.4.1.dev20241005104008/letta/providers.py +232 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/schemas/block.py +1 -1
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/schemas/letta_request.py +17 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/schemas/letta_response.py +11 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/schemas/llm_config.py +18 -2
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/schemas/message.py +40 -13
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/server/rest_api/app.py +5 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/server/rest_api/interface.py +115 -24
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/server/rest_api/routers/v1/agents.py +36 -3
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/server/rest_api/routers/v1/llms.py +6 -2
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/server/server.py +60 -87
- letta_nightly-0.4.1.dev20241005104008/letta/server/static_files/assets/index-3ab03d5b.css +1 -0
- letta_nightly-0.4.1.dev20241004104123/letta/server/static_files/assets/index-4d08d8a3.js → letta_nightly-0.4.1.dev20241005104008/letta/server/static_files/assets/index-9a9c449b.js +69 -69
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/server/static_files/index.html +2 -2
- letta_nightly-0.4.1.dev20241005104008/letta/settings.py +197 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/utils.py +6 -1
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/pyproject.toml +1 -1
- letta_nightly-0.4.1.dev20241004104123/letta/cli/cli.py +0 -689
- letta_nightly-0.4.1.dev20241004104123/letta/errors.py +0 -26
- letta_nightly-0.4.1.dev20241004104123/letta/local_llm/groq/api.py +0 -97
- letta_nightly-0.4.1.dev20241004104123/letta/server/static_files/assets/index-156816da.css +0 -1
- letta_nightly-0.4.1.dev20241004104123/letta/settings.py +0 -167
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/LICENSE +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/README.md +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/__init__.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/__main__.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/agent.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/agent_store/chroma.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/agent_store/db.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/agent_store/lancedb.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/agent_store/milvus.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/agent_store/qdrant.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/agent_store/storage.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/benchmark/benchmark.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/benchmark/constants.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/cli/cli_load.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/client/__init__.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/client/admin.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/client/streaming.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/client/utils.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/configs/anthropic.json +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/configs/letta_hosted.json +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/configs/openai.json +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/data_sources/connectors.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/embeddings.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/functions/__init__.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/functions/function_sets/base.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/functions/function_sets/extras.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/functions/functions.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/functions/helpers.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/functions/schema_generator.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/humans/__init__.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/humans/examples/basic.txt +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/humans/examples/cs_phd.txt +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/interface.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/llm_api/__init__.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/llm_api/anthropic.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/llm_api/azure_openai.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/llm_api/cohere.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/llm_api/google_ai.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/llm_api/openai.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/local_llm/README.md +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/local_llm/__init__.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/local_llm/constants.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/local_llm/function_parser.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/local_llm/grammars/__init__.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/local_llm/grammars/gbnf_grammar_generator.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/local_llm/grammars/json.gbnf +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/local_llm/grammars/json_func_calls_with_inner_thoughts.gbnf +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/local_llm/json_parser.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/local_llm/koboldcpp/api.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/local_llm/koboldcpp/settings.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/local_llm/llamacpp/api.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/local_llm/llamacpp/settings.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/local_llm/llm_chat_completion_wrappers/__init__.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/local_llm/llm_chat_completion_wrappers/airoboros.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/local_llm/llm_chat_completion_wrappers/chatml.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/local_llm/llm_chat_completion_wrappers/configurable_wrapper.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/local_llm/llm_chat_completion_wrappers/dolphin.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/local_llm/llm_chat_completion_wrappers/llama3.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/local_llm/llm_chat_completion_wrappers/simple_summary_wrapper.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/local_llm/llm_chat_completion_wrappers/wrapper_base.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/local_llm/llm_chat_completion_wrappers/zephyr.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/local_llm/lmstudio/api.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/local_llm/lmstudio/settings.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/local_llm/ollama/api.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/local_llm/ollama/settings.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/local_llm/settings/__init__.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/local_llm/settings/deterministic_mirostat.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/local_llm/settings/settings.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/local_llm/settings/simple.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/local_llm/utils.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/local_llm/vllm/api.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/local_llm/webui/api.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/local_llm/webui/legacy_api.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/local_llm/webui/legacy_settings.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/local_llm/webui/settings.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/log.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/memory.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/openai_backcompat/__init__.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/openai_backcompat/openai_object.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/persistence_manager.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/personas/__init__.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/personas/examples/anna_pa.txt +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/personas/examples/google_search_persona.txt +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/personas/examples/memgpt_doc.txt +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/personas/examples/memgpt_starter.txt +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/personas/examples/sam.txt +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/personas/examples/sam_pov.txt +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/personas/examples/sam_simple_pov_gpt35.txt +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/personas/examples/sqldb/test.db +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/prompts/__init__.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/prompts/gpt_summarize.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/prompts/gpt_system.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/prompts/system/memgpt_base.txt +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/prompts/system/memgpt_chat.txt +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/prompts/system/memgpt_chat_compressed.txt +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/prompts/system/memgpt_chat_fstring.txt +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/prompts/system/memgpt_doc.txt +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/prompts/system/memgpt_gpt35_extralong.txt +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/prompts/system/memgpt_intuitive_knowledge.txt +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/prompts/system/memgpt_modified_chat.txt +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/pytest.ini +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/schemas/agent.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/schemas/api_key.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/schemas/document.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/schemas/embedding_config.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/schemas/enums.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/schemas/health.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/schemas/job.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/schemas/letta_base.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/schemas/letta_message.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/schemas/memory.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/schemas/openai/chat_completion_request.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/schemas/openai/chat_completion_response.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/schemas/openai/chat_completions.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/schemas/openai/embedding_response.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/schemas/openai/openai.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/schemas/organization.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/schemas/passage.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/schemas/source.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/schemas/tool.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/schemas/usage.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/schemas/user.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/server/__init__.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/server/constants.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/server/rest_api/__init__.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/server/rest_api/admin/__init__.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/server/rest_api/admin/agents.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/server/rest_api/admin/tools.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/server/rest_api/admin/users.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/server/rest_api/auth/__init__.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/server/rest_api/auth/index.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/server/rest_api/auth_token.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/server/rest_api/routers/__init__.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/server/rest_api/routers/openai/__init__.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/server/rest_api/routers/openai/assistants/__init__.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/server/rest_api/routers/openai/assistants/assistants.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/server/rest_api/routers/openai/assistants/schemas.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/server/rest_api/routers/openai/assistants/threads.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/server/rest_api/routers/openai/chat_completions/__init__.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/server/rest_api/routers/openai/chat_completions/chat_completions.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/server/rest_api/routers/v1/__init__.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/server/rest_api/routers/v1/blocks.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/server/rest_api/routers/v1/health.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/server/rest_api/routers/v1/jobs.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/server/rest_api/routers/v1/organizations.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/server/rest_api/routers/v1/sources.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/server/rest_api/routers/v1/tools.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/server/rest_api/routers/v1/users.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/server/rest_api/static_files.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/server/rest_api/utils.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/server/startup.sh +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/server/static_files/favicon.ico +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/server/static_files/memgpt_logo_transparent.png +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/server/utils.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/server/ws_api/__init__.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/server/ws_api/example_client.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/server/ws_api/interface.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/server/ws_api/protocol.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/server/ws_api/server.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/streaming_interface.py +0 -0
- {letta_nightly-0.4.1.dev20241004104123 → letta_nightly-0.4.1.dev20241005104008}/letta/system.py +0 -0
|
@@ -0,0 +1,354 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
import sys
|
|
3
|
+
from enum import Enum
|
|
4
|
+
from typing import Annotated, Optional
|
|
5
|
+
|
|
6
|
+
import questionary
|
|
7
|
+
import typer
|
|
8
|
+
|
|
9
|
+
import letta.utils as utils
|
|
10
|
+
from letta import create_client
|
|
11
|
+
from letta.agent import Agent, save_agent
|
|
12
|
+
from letta.config import LettaConfig
|
|
13
|
+
from letta.constants import CLI_WARNING_PREFIX, LETTA_DIR
|
|
14
|
+
from letta.log import get_logger
|
|
15
|
+
from letta.metadata import MetadataStore
|
|
16
|
+
from letta.schemas.enums import OptionState
|
|
17
|
+
from letta.schemas.memory import ChatMemory, Memory
|
|
18
|
+
from letta.server.server import logger as server_logger
|
|
19
|
+
|
|
20
|
+
# from letta.interface import CLIInterface as interface # for printing to terminal
|
|
21
|
+
from letta.streaming_interface import (
|
|
22
|
+
StreamingRefreshCLIInterface as interface, # for printing to terminal
|
|
23
|
+
)
|
|
24
|
+
from letta.utils import open_folder_in_explorer, printd
|
|
25
|
+
|
|
26
|
+
logger = get_logger(__name__)
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def open_folder():
|
|
30
|
+
"""Open a folder viewer of the Letta home directory"""
|
|
31
|
+
try:
|
|
32
|
+
print(f"Opening home folder: {LETTA_DIR}")
|
|
33
|
+
open_folder_in_explorer(LETTA_DIR)
|
|
34
|
+
except Exception as e:
|
|
35
|
+
print(f"Failed to open folder with system viewer, error:\n{e}")
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
class ServerChoice(Enum):
|
|
39
|
+
rest_api = "rest"
|
|
40
|
+
ws_api = "websocket"
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
def server(
|
|
44
|
+
type: Annotated[ServerChoice, typer.Option(help="Server to run")] = "rest",
|
|
45
|
+
port: Annotated[Optional[int], typer.Option(help="Port to run the server on")] = None,
|
|
46
|
+
host: Annotated[Optional[str], typer.Option(help="Host to run the server on (default to localhost)")] = None,
|
|
47
|
+
debug: Annotated[bool, typer.Option(help="Turn debugging output on")] = False,
|
|
48
|
+
ade: Annotated[bool, typer.Option(help="Allows remote access")] = False,
|
|
49
|
+
):
|
|
50
|
+
"""Launch a Letta server process"""
|
|
51
|
+
|
|
52
|
+
if type == ServerChoice.rest_api:
|
|
53
|
+
pass
|
|
54
|
+
|
|
55
|
+
# if LettaConfig.exists():
|
|
56
|
+
# config = LettaConfig.load()
|
|
57
|
+
# MetadataStore(config)
|
|
58
|
+
# _ = create_client() # triggers user creation
|
|
59
|
+
# else:
|
|
60
|
+
# typer.secho(f"No configuration exists. Run letta configure before starting the server.", fg=typer.colors.RED)
|
|
61
|
+
# sys.exit(1)
|
|
62
|
+
|
|
63
|
+
try:
|
|
64
|
+
from letta.server.rest_api.app import start_server
|
|
65
|
+
|
|
66
|
+
start_server(port=port, host=host, debug=debug)
|
|
67
|
+
|
|
68
|
+
except KeyboardInterrupt:
|
|
69
|
+
# Handle CTRL-C
|
|
70
|
+
typer.secho("Terminating the server...")
|
|
71
|
+
sys.exit(0)
|
|
72
|
+
|
|
73
|
+
elif type == ServerChoice.ws_api:
|
|
74
|
+
raise NotImplementedError("WS suppport deprecated")
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
def run(
|
|
78
|
+
persona: Annotated[Optional[str], typer.Option(help="Specify persona")] = None,
|
|
79
|
+
agent: Annotated[Optional[str], typer.Option(help="Specify agent name")] = None,
|
|
80
|
+
human: Annotated[Optional[str], typer.Option(help="Specify human")] = None,
|
|
81
|
+
system: Annotated[Optional[str], typer.Option(help="Specify system prompt (raw text)")] = None,
|
|
82
|
+
system_file: Annotated[Optional[str], typer.Option(help="Specify raw text file containing system prompt")] = None,
|
|
83
|
+
# model flags
|
|
84
|
+
model: Annotated[Optional[str], typer.Option(help="Specify the LLM model")] = None,
|
|
85
|
+
model_wrapper: Annotated[Optional[str], typer.Option(help="Specify the LLM model wrapper")] = None,
|
|
86
|
+
model_endpoint: Annotated[Optional[str], typer.Option(help="Specify the LLM model endpoint")] = None,
|
|
87
|
+
model_endpoint_type: Annotated[Optional[str], typer.Option(help="Specify the LLM model endpoint type")] = None,
|
|
88
|
+
context_window: Annotated[
|
|
89
|
+
Optional[int], typer.Option(help="The context window of the LLM you are using (e.g. 8k for most Mistral 7B variants)")
|
|
90
|
+
] = None,
|
|
91
|
+
core_memory_limit: Annotated[
|
|
92
|
+
Optional[int], typer.Option(help="The character limit to each core-memory section (human/persona).")
|
|
93
|
+
] = 2000,
|
|
94
|
+
# other
|
|
95
|
+
first: Annotated[bool, typer.Option(help="Use --first to send the first message in the sequence")] = False,
|
|
96
|
+
strip_ui: Annotated[bool, typer.Option(help="Remove all the bells and whistles in CLI output (helpful for testing)")] = False,
|
|
97
|
+
debug: Annotated[bool, typer.Option(help="Use --debug to enable debugging output")] = False,
|
|
98
|
+
no_verify: Annotated[bool, typer.Option(help="Bypass message verification")] = False,
|
|
99
|
+
yes: Annotated[bool, typer.Option("-y", help="Skip confirmation prompt and use defaults")] = False,
|
|
100
|
+
# streaming
|
|
101
|
+
stream: Annotated[bool, typer.Option(help="Enables message streaming in the CLI (if the backend supports it)")] = False,
|
|
102
|
+
# whether or not to put the inner thoughts inside the function args
|
|
103
|
+
no_content: Annotated[
|
|
104
|
+
OptionState, typer.Option(help="Set to 'yes' for LLM APIs that omit the `content` field during tool calling")
|
|
105
|
+
] = OptionState.DEFAULT,
|
|
106
|
+
):
|
|
107
|
+
"""Start chatting with an Letta agent
|
|
108
|
+
|
|
109
|
+
Example usage: `letta run --agent myagent --data-source mydata --persona mypersona --human myhuman --model gpt-3.5-turbo`
|
|
110
|
+
|
|
111
|
+
:param persona: Specify persona
|
|
112
|
+
:param agent: Specify agent name (will load existing state if the agent exists, or create a new one with that name)
|
|
113
|
+
:param human: Specify human
|
|
114
|
+
:param model: Specify the LLM model
|
|
115
|
+
|
|
116
|
+
"""
|
|
117
|
+
|
|
118
|
+
# setup logger
|
|
119
|
+
# TODO: remove Utils Debug after global logging is complete.
|
|
120
|
+
utils.DEBUG = debug
|
|
121
|
+
# TODO: add logging command line options for runtime log level
|
|
122
|
+
|
|
123
|
+
if debug:
|
|
124
|
+
logger.setLevel(logging.DEBUG)
|
|
125
|
+
server_logger.setLevel(logging.DEBUG)
|
|
126
|
+
else:
|
|
127
|
+
logger.setLevel(logging.CRITICAL)
|
|
128
|
+
server_logger.setLevel(logging.CRITICAL)
|
|
129
|
+
|
|
130
|
+
# load config file
|
|
131
|
+
config = LettaConfig.load()
|
|
132
|
+
|
|
133
|
+
# read user id from config
|
|
134
|
+
ms = MetadataStore(config)
|
|
135
|
+
client = create_client()
|
|
136
|
+
|
|
137
|
+
# determine agent to use, if not provided
|
|
138
|
+
if not yes and not agent:
|
|
139
|
+
agents = client.list_agents()
|
|
140
|
+
agents = [a.name for a in agents]
|
|
141
|
+
|
|
142
|
+
if len(agents) > 0:
|
|
143
|
+
print()
|
|
144
|
+
select_agent = questionary.confirm("Would you like to select an existing agent?").ask()
|
|
145
|
+
if select_agent is None:
|
|
146
|
+
raise KeyboardInterrupt
|
|
147
|
+
if select_agent:
|
|
148
|
+
agent = questionary.select("Select agent:", choices=agents).ask()
|
|
149
|
+
|
|
150
|
+
# create agent config
|
|
151
|
+
if agent:
|
|
152
|
+
agent_id = client.get_agent_id(agent)
|
|
153
|
+
agent_state = client.get_agent(agent_id)
|
|
154
|
+
else:
|
|
155
|
+
agent_state = None
|
|
156
|
+
human = human if human else config.human
|
|
157
|
+
persona = persona if persona else config.persona
|
|
158
|
+
if agent and agent_state: # use existing agent
|
|
159
|
+
typer.secho(f"\n🔁 Using existing agent {agent}", fg=typer.colors.GREEN)
|
|
160
|
+
# agent_config = AgentConfig.load(agent)
|
|
161
|
+
# agent_state = ms.get_agent(agent_name=agent, user_id=user_id)
|
|
162
|
+
printd("Loading agent state:", agent_state.id)
|
|
163
|
+
printd("Agent state:", agent_state.name)
|
|
164
|
+
# printd("State path:", agent_config.save_state_dir())
|
|
165
|
+
# printd("Persistent manager path:", agent_config.save_persistence_manager_dir())
|
|
166
|
+
# printd("Index path:", agent_config.save_agent_index_dir())
|
|
167
|
+
# persistence_manager = LocalStateManager(agent_config).load() # TODO: implement load
|
|
168
|
+
# TODO: load prior agent state
|
|
169
|
+
|
|
170
|
+
# Allow overriding model specifics (model, model wrapper, model endpoint IP + type, context_window)
|
|
171
|
+
if model and model != agent_state.llm_config.model:
|
|
172
|
+
typer.secho(
|
|
173
|
+
f"{CLI_WARNING_PREFIX}Overriding existing model {agent_state.llm_config.model} with {model}", fg=typer.colors.YELLOW
|
|
174
|
+
)
|
|
175
|
+
agent_state.llm_config.model = model
|
|
176
|
+
if context_window is not None and int(context_window) != agent_state.llm_config.context_window:
|
|
177
|
+
typer.secho(
|
|
178
|
+
f"{CLI_WARNING_PREFIX}Overriding existing context window {agent_state.llm_config.context_window} with {context_window}",
|
|
179
|
+
fg=typer.colors.YELLOW,
|
|
180
|
+
)
|
|
181
|
+
agent_state.llm_config.context_window = context_window
|
|
182
|
+
if model_wrapper and model_wrapper != agent_state.llm_config.model_wrapper:
|
|
183
|
+
typer.secho(
|
|
184
|
+
f"{CLI_WARNING_PREFIX}Overriding existing model wrapper {agent_state.llm_config.model_wrapper} with {model_wrapper}",
|
|
185
|
+
fg=typer.colors.YELLOW,
|
|
186
|
+
)
|
|
187
|
+
agent_state.llm_config.model_wrapper = model_wrapper
|
|
188
|
+
if model_endpoint and model_endpoint != agent_state.llm_config.model_endpoint:
|
|
189
|
+
typer.secho(
|
|
190
|
+
f"{CLI_WARNING_PREFIX}Overriding existing model endpoint {agent_state.llm_config.model_endpoint} with {model_endpoint}",
|
|
191
|
+
fg=typer.colors.YELLOW,
|
|
192
|
+
)
|
|
193
|
+
agent_state.llm_config.model_endpoint = model_endpoint
|
|
194
|
+
if model_endpoint_type and model_endpoint_type != agent_state.llm_config.model_endpoint_type:
|
|
195
|
+
typer.secho(
|
|
196
|
+
f"{CLI_WARNING_PREFIX}Overriding existing model endpoint type {agent_state.llm_config.model_endpoint_type} with {model_endpoint_type}",
|
|
197
|
+
fg=typer.colors.YELLOW,
|
|
198
|
+
)
|
|
199
|
+
agent_state.llm_config.model_endpoint_type = model_endpoint_type
|
|
200
|
+
|
|
201
|
+
# NOTE: commented out because this seems dangerous - instead users should use /systemswap when in the CLI
|
|
202
|
+
# # user specified a new system prompt
|
|
203
|
+
# if system:
|
|
204
|
+
# # NOTE: agent_state.system is the ORIGINAL system prompt,
|
|
205
|
+
# # whereas agent_state.state["system"] is the LATEST system prompt
|
|
206
|
+
# existing_system_prompt = agent_state.state["system"] if "system" in agent_state.state else None
|
|
207
|
+
# if existing_system_prompt != system:
|
|
208
|
+
# # override
|
|
209
|
+
# agent_state.state["system"] = system
|
|
210
|
+
|
|
211
|
+
# Update the agent with any overrides
|
|
212
|
+
agent_state = client.update_agent(
|
|
213
|
+
agent_id=agent_state.id,
|
|
214
|
+
name=agent_state.name,
|
|
215
|
+
llm_config=agent_state.llm_config,
|
|
216
|
+
embedding_config=agent_state.embedding_config,
|
|
217
|
+
)
|
|
218
|
+
|
|
219
|
+
# create agent
|
|
220
|
+
tools = [ms.get_tool(tool_name, user_id=client.user_id) for tool_name in agent_state.tools]
|
|
221
|
+
letta_agent = Agent(agent_state=agent_state, interface=interface(), tools=tools)
|
|
222
|
+
|
|
223
|
+
else: # create new agent
|
|
224
|
+
# create new agent config: override defaults with args if provided
|
|
225
|
+
typer.secho("\n🧬 Creating new agent...", fg=typer.colors.WHITE)
|
|
226
|
+
|
|
227
|
+
agent_name = agent if agent else utils.create_random_username()
|
|
228
|
+
|
|
229
|
+
# create agent
|
|
230
|
+
client = create_client()
|
|
231
|
+
|
|
232
|
+
# choose from list of llm_configs
|
|
233
|
+
llm_configs = client.list_llm_configs()
|
|
234
|
+
llm_options = [llm_config.model for llm_config in llm_configs]
|
|
235
|
+
# select model
|
|
236
|
+
if len(llm_options) == 0:
|
|
237
|
+
raise ValueError("No LLM models found. Please enable a provider.")
|
|
238
|
+
elif len(llm_options) == 1:
|
|
239
|
+
llm_model_name = llm_options[0]
|
|
240
|
+
else:
|
|
241
|
+
llm_model_name = questionary.select("Select LLM model:", choices=llm_options).ask()
|
|
242
|
+
llm_config = [llm_config for llm_config in llm_configs if llm_config.model == llm_model_name][0]
|
|
243
|
+
|
|
244
|
+
# choose form list of embedding configs
|
|
245
|
+
embedding_configs = client.list_embedding_configs()
|
|
246
|
+
embedding_options = [embedding_config.embedding_model for embedding_config in embedding_configs]
|
|
247
|
+
# select model
|
|
248
|
+
if len(embedding_options) == 0:
|
|
249
|
+
raise ValueError("No embedding models found. Please enable a provider.")
|
|
250
|
+
elif len(embedding_options) == 1:
|
|
251
|
+
embedding_model_name = embedding_options[0]
|
|
252
|
+
else:
|
|
253
|
+
embedding_model_name = questionary.select("Select embedding model:", choices=embedding_options).ask()
|
|
254
|
+
embedding_config = [
|
|
255
|
+
embedding_config for embedding_config in embedding_configs if embedding_config.embedding_model == embedding_model_name
|
|
256
|
+
][0]
|
|
257
|
+
|
|
258
|
+
human_obj = client.get_human(client.get_human_id(name=human))
|
|
259
|
+
persona_obj = client.get_persona(client.get_persona_id(name=persona))
|
|
260
|
+
if human_obj is None:
|
|
261
|
+
typer.secho(f"Couldn't find human {human} in database, please run `letta add human`", fg=typer.colors.RED)
|
|
262
|
+
sys.exit(1)
|
|
263
|
+
if persona_obj is None:
|
|
264
|
+
typer.secho(f"Couldn't find persona {persona} in database, please run `letta add persona`", fg=typer.colors.RED)
|
|
265
|
+
sys.exit(1)
|
|
266
|
+
|
|
267
|
+
if system_file:
|
|
268
|
+
try:
|
|
269
|
+
with open(system_file, "r", encoding="utf-8") as file:
|
|
270
|
+
system = file.read().strip()
|
|
271
|
+
printd("Loaded system file successfully.")
|
|
272
|
+
except FileNotFoundError:
|
|
273
|
+
typer.secho(f"System file not found at {system_file}", fg=typer.colors.RED)
|
|
274
|
+
system_prompt = system if system else None
|
|
275
|
+
|
|
276
|
+
memory = ChatMemory(human=human_obj.value, persona=persona_obj.value, limit=core_memory_limit)
|
|
277
|
+
metadata = {"human": human_obj.name, "persona": persona_obj.name}
|
|
278
|
+
|
|
279
|
+
typer.secho(f"-> 🤖 Using persona profile: '{persona_obj.name}'", fg=typer.colors.WHITE)
|
|
280
|
+
typer.secho(f"-> 🧑 Using human profile: '{human_obj.name}'", fg=typer.colors.WHITE)
|
|
281
|
+
|
|
282
|
+
# add tools
|
|
283
|
+
agent_state = client.create_agent(
|
|
284
|
+
name=agent_name,
|
|
285
|
+
system=system_prompt,
|
|
286
|
+
embedding_config=embedding_config,
|
|
287
|
+
llm_config=llm_config,
|
|
288
|
+
memory=memory,
|
|
289
|
+
metadata=metadata,
|
|
290
|
+
)
|
|
291
|
+
assert isinstance(agent_state.memory, Memory), f"Expected Memory, got {type(agent_state.memory)}"
|
|
292
|
+
typer.secho(f"-> 🛠️ {len(agent_state.tools)} tools: {', '.join([t for t in agent_state.tools])}", fg=typer.colors.WHITE)
|
|
293
|
+
tools = [ms.get_tool(tool_name, user_id=client.user_id) for tool_name in agent_state.tools]
|
|
294
|
+
|
|
295
|
+
letta_agent = Agent(
|
|
296
|
+
interface=interface(),
|
|
297
|
+
agent_state=agent_state,
|
|
298
|
+
tools=tools,
|
|
299
|
+
# gpt-3.5-turbo tends to omit inner monologue, relax this requirement for now
|
|
300
|
+
first_message_verify_mono=True if (model is not None and "gpt-4" in model) else False,
|
|
301
|
+
)
|
|
302
|
+
save_agent(agent=letta_agent, ms=ms)
|
|
303
|
+
typer.secho(f"🎉 Created new agent '{letta_agent.agent_state.name}' (id={letta_agent.agent_state.id})", fg=typer.colors.GREEN)
|
|
304
|
+
|
|
305
|
+
# start event loop
|
|
306
|
+
from letta.main import run_agent_loop
|
|
307
|
+
|
|
308
|
+
print() # extra space
|
|
309
|
+
run_agent_loop(
|
|
310
|
+
letta_agent=letta_agent,
|
|
311
|
+
config=config,
|
|
312
|
+
first=first,
|
|
313
|
+
ms=ms,
|
|
314
|
+
no_verify=no_verify,
|
|
315
|
+
stream=stream,
|
|
316
|
+
inner_thoughts_in_kwargs=no_content,
|
|
317
|
+
) # TODO: add back no_verify
|
|
318
|
+
|
|
319
|
+
|
|
320
|
+
def delete_agent(
|
|
321
|
+
agent_name: Annotated[str, typer.Option(help="Specify agent to delete")],
|
|
322
|
+
user_id: Annotated[Optional[str], typer.Option(help="User ID to associate with the agent.")] = None,
|
|
323
|
+
):
|
|
324
|
+
"""Delete an agent from the database"""
|
|
325
|
+
# use client ID is no user_id provided
|
|
326
|
+
config = LettaConfig.load()
|
|
327
|
+
MetadataStore(config)
|
|
328
|
+
client = create_client(user_id=user_id)
|
|
329
|
+
agent = client.get_agent_by_name(agent_name)
|
|
330
|
+
if not agent:
|
|
331
|
+
typer.secho(f"Couldn't find agent named '{agent_name}' to delete", fg=typer.colors.RED)
|
|
332
|
+
sys.exit(1)
|
|
333
|
+
|
|
334
|
+
confirm = questionary.confirm(f"Are you sure you want to delete agent '{agent_name}' (id={agent.id})?", default=False).ask()
|
|
335
|
+
if confirm is None:
|
|
336
|
+
raise KeyboardInterrupt
|
|
337
|
+
if not confirm:
|
|
338
|
+
typer.secho(f"Cancelled agent deletion '{agent_name}' (id={agent.id})", fg=typer.colors.GREEN)
|
|
339
|
+
return
|
|
340
|
+
|
|
341
|
+
try:
|
|
342
|
+
# delete the agent
|
|
343
|
+
client.delete_agent(agent.id)
|
|
344
|
+
typer.secho(f"🕊️ Successfully deleted agent '{agent_name}' (id={agent.id})", fg=typer.colors.GREEN)
|
|
345
|
+
except Exception:
|
|
346
|
+
typer.secho(f"Failed to delete agent '{agent_name}' (id={agent.id})", fg=typer.colors.RED)
|
|
347
|
+
sys.exit(1)
|
|
348
|
+
|
|
349
|
+
|
|
350
|
+
def version():
|
|
351
|
+
import letta
|
|
352
|
+
|
|
353
|
+
print(letta.__version__)
|
|
354
|
+
return letta.__version__
|
|
@@ -35,8 +35,6 @@ from letta.local_llm.constants import (
|
|
|
35
35
|
DEFAULT_WRAPPER_NAME,
|
|
36
36
|
)
|
|
37
37
|
from letta.local_llm.utils import get_available_wrappers
|
|
38
|
-
from letta.schemas.embedding_config import EmbeddingConfig
|
|
39
|
-
from letta.schemas.llm_config import LLMConfig
|
|
40
38
|
from letta.server.utils import shorten_key_middle
|
|
41
39
|
|
|
42
40
|
app = typer.Typer()
|
|
@@ -71,7 +69,7 @@ def configure_llm_endpoint(config: LettaConfig, credentials: LettaCredentials):
|
|
|
71
69
|
model_endpoint_type, model_endpoint = None, None
|
|
72
70
|
|
|
73
71
|
# get default
|
|
74
|
-
default_model_endpoint_type =
|
|
72
|
+
default_model_endpoint_type = None
|
|
75
73
|
if (
|
|
76
74
|
config.default_llm_config
|
|
77
75
|
and config.default_llm_config.model_endpoint_type is not None
|
|
@@ -126,7 +124,41 @@ def configure_llm_endpoint(config: LettaConfig, credentials: LettaCredentials):
|
|
|
126
124
|
model_endpoint = questionary.text("Override default endpoint:", default=model_endpoint).ask()
|
|
127
125
|
if model_endpoint is None:
|
|
128
126
|
raise KeyboardInterrupt
|
|
129
|
-
|
|
127
|
+
|
|
128
|
+
elif provider == "groq":
|
|
129
|
+
groq_user_msg = "Enter your Groq API key (starts with 'gsk-', see https://console.groq.com/keys):"
|
|
130
|
+
# check for key
|
|
131
|
+
if credentials.groq_key is None:
|
|
132
|
+
# allow key to get pulled from env vars
|
|
133
|
+
groq_api_key = os.getenv("GROQ_API_KEY", None)
|
|
134
|
+
# if we still can't find it, ask for it as input
|
|
135
|
+
if groq_api_key is None:
|
|
136
|
+
while groq_api_key is None or len(groq_api_key) == 0:
|
|
137
|
+
# Ask for API key as input
|
|
138
|
+
groq_api_key = questionary.password(groq_user_msg).ask()
|
|
139
|
+
if groq_api_key is None:
|
|
140
|
+
raise KeyboardInterrupt
|
|
141
|
+
credentials.groq_key = groq_api_key
|
|
142
|
+
credentials.save()
|
|
143
|
+
else:
|
|
144
|
+
# Give the user an opportunity to overwrite the key
|
|
145
|
+
default_input = shorten_key_middle(credentials.groq_key) if credentials.groq_key.startswith("gsk-") else credentials.groq_key
|
|
146
|
+
groq_api_key = questionary.password(
|
|
147
|
+
groq_user_msg,
|
|
148
|
+
default=default_input,
|
|
149
|
+
).ask()
|
|
150
|
+
if groq_api_key is None:
|
|
151
|
+
raise KeyboardInterrupt
|
|
152
|
+
# If the user modified it, use the new one
|
|
153
|
+
if groq_api_key != default_input:
|
|
154
|
+
credentials.groq_key = groq_api_key
|
|
155
|
+
credentials.save()
|
|
156
|
+
|
|
157
|
+
model_endpoint_type = "groq"
|
|
158
|
+
model_endpoint = "https://api.groq.com/openai/v1"
|
|
159
|
+
model_endpoint = questionary.text("Override default endpoint:", default=model_endpoint).ask()
|
|
160
|
+
if model_endpoint is None:
|
|
161
|
+
raise KeyboardInterrupt
|
|
130
162
|
|
|
131
163
|
elif provider == "azure":
|
|
132
164
|
# check for necessary vars
|
|
@@ -392,6 +424,12 @@ def get_model_options(
|
|
|
392
424
|
fetched_model_options = cohere_get_model_list(url=model_endpoint, api_key=credentials.cohere_key)
|
|
393
425
|
model_options = [obj for obj in fetched_model_options]
|
|
394
426
|
|
|
427
|
+
elif model_endpoint_type == "groq":
|
|
428
|
+
if credentials.groq_key is None:
|
|
429
|
+
raise ValueError("Missing Groq API key")
|
|
430
|
+
fetched_model_options_response = openai_get_model_list(url=model_endpoint, api_key=credentials.groq_key, fix_url=True)
|
|
431
|
+
model_options = [obj["id"] for obj in fetched_model_options_response["data"]]
|
|
432
|
+
|
|
395
433
|
else:
|
|
396
434
|
# Attempt to do OpenAI endpoint style model fetching
|
|
397
435
|
# TODO support local auth with api-key header
|
|
@@ -555,10 +593,32 @@ def configure_model(config: LettaConfig, credentials: LettaCredentials, model_en
|
|
|
555
593
|
if model is None:
|
|
556
594
|
raise KeyboardInterrupt
|
|
557
595
|
|
|
596
|
+
# Groq support via /chat/completions + function calling endpoints
|
|
597
|
+
elif model_endpoint_type == "groq":
|
|
598
|
+
try:
|
|
599
|
+
fetched_model_options = get_model_options(
|
|
600
|
+
credentials=credentials, model_endpoint_type=model_endpoint_type, model_endpoint=model_endpoint
|
|
601
|
+
)
|
|
602
|
+
|
|
603
|
+
except Exception as e:
|
|
604
|
+
# NOTE: if this fails, it means the user's key is probably bad
|
|
605
|
+
typer.secho(
|
|
606
|
+
f"Failed to get model list from {model_endpoint} - make sure your API key and endpoints are correct!", fg=typer.colors.RED
|
|
607
|
+
)
|
|
608
|
+
raise e
|
|
609
|
+
|
|
610
|
+
model = questionary.select(
|
|
611
|
+
"Select default model:",
|
|
612
|
+
choices=fetched_model_options,
|
|
613
|
+
default=fetched_model_options[0],
|
|
614
|
+
).ask()
|
|
615
|
+
if model is None:
|
|
616
|
+
raise KeyboardInterrupt
|
|
617
|
+
|
|
558
618
|
else: # local models
|
|
559
619
|
|
|
560
620
|
# ask about local auth
|
|
561
|
-
if model_endpoint_type in ["groq"]: # TODO all llm engines under 'local' that will require api keys
|
|
621
|
+
if model_endpoint_type in ["groq-chat-compltions"]: # TODO all llm engines under 'local' that will require api keys
|
|
562
622
|
use_local_auth = True
|
|
563
623
|
local_auth_type = "bearer_token"
|
|
564
624
|
local_auth_key = questionary.password(
|
|
@@ -779,7 +839,7 @@ def configure_model(config: LettaConfig, credentials: LettaCredentials, model_en
|
|
|
779
839
|
def configure_embedding_endpoint(config: LettaConfig, credentials: LettaCredentials):
|
|
780
840
|
# configure embedding endpoint
|
|
781
841
|
|
|
782
|
-
default_embedding_endpoint_type =
|
|
842
|
+
default_embedding_endpoint_type = None
|
|
783
843
|
|
|
784
844
|
embedding_endpoint_type, embedding_endpoint, embedding_dim, embedding_model = None, None, None, None
|
|
785
845
|
embedding_provider = questionary.select(
|
|
@@ -844,9 +904,7 @@ def configure_embedding_endpoint(config: LettaConfig, credentials: LettaCredenti
|
|
|
844
904
|
raise KeyboardInterrupt
|
|
845
905
|
|
|
846
906
|
# get model type
|
|
847
|
-
default_embedding_model =
|
|
848
|
-
config.default_embedding_config.embedding_model if config.default_embedding_config else "BAAI/bge-large-en-v1.5"
|
|
849
|
-
)
|
|
907
|
+
default_embedding_model = "BAAI/bge-large-en-v1.5"
|
|
850
908
|
embedding_model = questionary.text(
|
|
851
909
|
"Enter HuggingFace model tag (e.g. BAAI/bge-large-en-v1.5):",
|
|
852
910
|
default=default_embedding_model,
|
|
@@ -855,7 +913,7 @@ def configure_embedding_endpoint(config: LettaConfig, credentials: LettaCredenti
|
|
|
855
913
|
raise KeyboardInterrupt
|
|
856
914
|
|
|
857
915
|
# get model dimentions
|
|
858
|
-
default_embedding_dim =
|
|
916
|
+
default_embedding_dim = "1024"
|
|
859
917
|
embedding_dim = questionary.text("Enter embedding model dimentions (e.g. 1024):", default=str(default_embedding_dim)).ask()
|
|
860
918
|
if embedding_dim is None:
|
|
861
919
|
raise KeyboardInterrupt
|
|
@@ -880,9 +938,7 @@ def configure_embedding_endpoint(config: LettaConfig, credentials: LettaCredenti
|
|
|
880
938
|
raise KeyboardInterrupt
|
|
881
939
|
|
|
882
940
|
# get model type
|
|
883
|
-
default_embedding_model =
|
|
884
|
-
config.default_embedding_config.embedding_model if config.default_embedding_config else "mxbai-embed-large"
|
|
885
|
-
)
|
|
941
|
+
default_embedding_model = "mxbai-embed-large"
|
|
886
942
|
embedding_model = questionary.text(
|
|
887
943
|
"Enter Ollama model tag (e.g. mxbai-embed-large):",
|
|
888
944
|
default=default_embedding_model,
|
|
@@ -891,7 +947,7 @@ def configure_embedding_endpoint(config: LettaConfig, credentials: LettaCredenti
|
|
|
891
947
|
raise KeyboardInterrupt
|
|
892
948
|
|
|
893
949
|
# get model dimensions
|
|
894
|
-
default_embedding_dim =
|
|
950
|
+
default_embedding_dim = "512"
|
|
895
951
|
embedding_dim = questionary.text("Enter embedding model dimensions (e.g. 512):", default=str(default_embedding_dim)).ask()
|
|
896
952
|
if embedding_dim is None:
|
|
897
953
|
raise KeyboardInterrupt
|
|
@@ -1040,19 +1096,6 @@ def configure():
|
|
|
1040
1096
|
|
|
1041
1097
|
# TODO: remove most of this (deplicated with User table)
|
|
1042
1098
|
config = LettaConfig(
|
|
1043
|
-
default_llm_config=LLMConfig(
|
|
1044
|
-
model=model,
|
|
1045
|
-
model_endpoint=model_endpoint,
|
|
1046
|
-
model_endpoint_type=model_endpoint_type,
|
|
1047
|
-
model_wrapper=model_wrapper,
|
|
1048
|
-
context_window=context_window,
|
|
1049
|
-
),
|
|
1050
|
-
default_embedding_config=EmbeddingConfig(
|
|
1051
|
-
embedding_endpoint_type=embedding_endpoint_type,
|
|
1052
|
-
embedding_endpoint=embedding_endpoint,
|
|
1053
|
-
embedding_dim=embedding_dim,
|
|
1054
|
-
embedding_model=embedding_model,
|
|
1055
|
-
),
|
|
1056
1099
|
# storage
|
|
1057
1100
|
archival_storage_type=archival_storage_type,
|
|
1058
1101
|
archival_storage_uri=archival_storage_uri,
|