letta-nightly 0.5.0.dev20241015104156__tar.gz → 0.5.0.dev20241016104103__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of letta-nightly might be problematic. Click here for more details.
- letta_nightly-0.5.0.dev20241016104103/PKG-INFO +203 -0
- letta_nightly-0.5.0.dev20241016104103/README.md +122 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/agent.py +170 -16
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/client/client.py +186 -42
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/client/utils.py +15 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/constants.py +1 -1
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/functions/functions.py +1 -1
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/functions/schema_generator.py +3 -2
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/main.py +6 -4
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/metadata.py +27 -3
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/schemas/agent.py +7 -3
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/schemas/memory.py +37 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/schemas/tool.py +4 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/server/rest_api/routers/openai/assistants/threads.py +1 -1
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/server/rest_api/routers/v1/agents.py +43 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/server/rest_api/routers/v1/sources.py +28 -1
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/server/rest_api/routers/v1/tools.py +1 -1
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/server/server.py +157 -94
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/pyproject.toml +1 -1
- letta_nightly-0.5.0.dev20241015104156/PKG-INFO +0 -105
- letta_nightly-0.5.0.dev20241015104156/README.md +0 -24
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/LICENSE +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/__init__.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/__main__.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/agent_store/chroma.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/agent_store/db.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/agent_store/lancedb.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/agent_store/milvus.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/agent_store/qdrant.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/agent_store/storage.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/base.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/benchmark/benchmark.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/benchmark/constants.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/cli/cli.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/cli/cli_config.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/cli/cli_load.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/client/__init__.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/client/admin.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/client/streaming.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/config.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/credentials.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/data_sources/connectors.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/data_sources/connectors_helper.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/embeddings.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/errors.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/functions/__init__.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/functions/function_sets/base.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/functions/function_sets/extras.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/functions/helpers.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/humans/__init__.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/humans/examples/basic.txt +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/humans/examples/cs_phd.txt +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/interface.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/llm_api/__init__.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/llm_api/anthropic.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/llm_api/azure_openai.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/llm_api/azure_openai_constants.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/llm_api/cohere.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/llm_api/google_ai.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/llm_api/helpers.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/llm_api/llm_api_tools.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/llm_api/mistral.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/llm_api/openai.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/local_llm/README.md +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/local_llm/__init__.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/local_llm/chat_completion_proxy.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/local_llm/constants.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/local_llm/function_parser.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/local_llm/grammars/__init__.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/local_llm/grammars/gbnf_grammar_generator.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/local_llm/grammars/json.gbnf +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/local_llm/grammars/json_func_calls_with_inner_thoughts.gbnf +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/local_llm/json_parser.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/local_llm/koboldcpp/api.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/local_llm/koboldcpp/settings.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/local_llm/llamacpp/api.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/local_llm/llamacpp/settings.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/local_llm/llm_chat_completion_wrappers/__init__.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/local_llm/llm_chat_completion_wrappers/airoboros.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/local_llm/llm_chat_completion_wrappers/chatml.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/local_llm/llm_chat_completion_wrappers/configurable_wrapper.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/local_llm/llm_chat_completion_wrappers/dolphin.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/local_llm/llm_chat_completion_wrappers/llama3.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/local_llm/llm_chat_completion_wrappers/simple_summary_wrapper.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/local_llm/llm_chat_completion_wrappers/wrapper_base.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/local_llm/llm_chat_completion_wrappers/zephyr.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/local_llm/lmstudio/api.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/local_llm/lmstudio/settings.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/local_llm/ollama/api.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/local_llm/ollama/settings.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/local_llm/settings/__init__.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/local_llm/settings/deterministic_mirostat.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/local_llm/settings/settings.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/local_llm/settings/simple.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/local_llm/utils.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/local_llm/vllm/api.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/local_llm/webui/api.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/local_llm/webui/legacy_api.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/local_llm/webui/legacy_settings.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/local_llm/webui/settings.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/log.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/memory.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/openai_backcompat/__init__.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/openai_backcompat/openai_object.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/persistence_manager.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/personas/__init__.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/personas/examples/anna_pa.txt +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/personas/examples/google_search_persona.txt +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/personas/examples/memgpt_doc.txt +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/personas/examples/memgpt_starter.txt +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/personas/examples/sam.txt +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/personas/examples/sam_pov.txt +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/personas/examples/sam_simple_pov_gpt35.txt +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/personas/examples/sqldb/test.db +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/prompts/__init__.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/prompts/gpt_summarize.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/prompts/gpt_system.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/prompts/system/memgpt_base.txt +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/prompts/system/memgpt_chat.txt +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/prompts/system/memgpt_chat_compressed.txt +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/prompts/system/memgpt_chat_fstring.txt +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/prompts/system/memgpt_doc.txt +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/prompts/system/memgpt_gpt35_extralong.txt +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/prompts/system/memgpt_intuitive_knowledge.txt +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/prompts/system/memgpt_modified_chat.txt +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/providers.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/pytest.ini +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/schemas/api_key.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/schemas/block.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/schemas/embedding_config.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/schemas/enums.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/schemas/file.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/schemas/health.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/schemas/job.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/schemas/letta_base.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/schemas/letta_message.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/schemas/letta_request.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/schemas/letta_response.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/schemas/llm_config.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/schemas/message.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/schemas/openai/chat_completion_request.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/schemas/openai/chat_completion_response.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/schemas/openai/chat_completions.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/schemas/openai/embedding_response.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/schemas/openai/openai.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/schemas/organization.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/schemas/passage.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/schemas/source.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/schemas/usage.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/schemas/user.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/server/__init__.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/server/constants.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/server/rest_api/__init__.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/server/rest_api/admin/__init__.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/server/rest_api/admin/agents.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/server/rest_api/admin/tools.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/server/rest_api/admin/users.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/server/rest_api/app.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/server/rest_api/auth/__init__.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/server/rest_api/auth/index.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/server/rest_api/auth_token.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/server/rest_api/interface.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/server/rest_api/routers/__init__.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/server/rest_api/routers/openai/__init__.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/server/rest_api/routers/openai/assistants/__init__.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/server/rest_api/routers/openai/assistants/assistants.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/server/rest_api/routers/openai/assistants/schemas.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/server/rest_api/routers/openai/chat_completions/__init__.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/server/rest_api/routers/openai/chat_completions/chat_completions.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/server/rest_api/routers/v1/__init__.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/server/rest_api/routers/v1/blocks.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/server/rest_api/routers/v1/health.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/server/rest_api/routers/v1/jobs.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/server/rest_api/routers/v1/llms.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/server/rest_api/routers/v1/organizations.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/server/rest_api/routers/v1/users.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/server/rest_api/static_files.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/server/rest_api/utils.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/server/startup.sh +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/server/static_files/assets/index-3ab03d5b.css +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/server/static_files/assets/index-dc228d4a.js +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/server/static_files/favicon.ico +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/server/static_files/index.html +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/server/static_files/memgpt_logo_transparent.png +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/server/utils.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/server/ws_api/__init__.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/server/ws_api/example_client.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/server/ws_api/interface.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/server/ws_api/protocol.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/server/ws_api/server.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/settings.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/streaming_interface.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/system.py +0 -0
- {letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/utils.py +0 -0
|
@@ -0,0 +1,203 @@
|
|
|
1
|
+
Metadata-Version: 2.1
|
|
2
|
+
Name: letta-nightly
|
|
3
|
+
Version: 0.5.0.dev20241016104103
|
|
4
|
+
Summary: Create LLM agents with long-term memory and custom tools
|
|
5
|
+
License: Apache License
|
|
6
|
+
Author: Letta Team
|
|
7
|
+
Author-email: contact@letta.com
|
|
8
|
+
Requires-Python: >=3.10,<3.13
|
|
9
|
+
Classifier: License :: Other/Proprietary License
|
|
10
|
+
Classifier: Programming Language :: Python :: 3
|
|
11
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
12
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
13
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
14
|
+
Provides-Extra: autogen
|
|
15
|
+
Provides-Extra: dev
|
|
16
|
+
Provides-Extra: external-tools
|
|
17
|
+
Provides-Extra: milvus
|
|
18
|
+
Provides-Extra: ollama
|
|
19
|
+
Provides-Extra: postgres
|
|
20
|
+
Provides-Extra: qdrant
|
|
21
|
+
Provides-Extra: server
|
|
22
|
+
Provides-Extra: tests
|
|
23
|
+
Requires-Dist: alembic (>=1.13.3,<2.0.0)
|
|
24
|
+
Requires-Dist: autoflake (>=2.3.0,<3.0.0) ; extra == "dev"
|
|
25
|
+
Requires-Dist: black[jupyter] (>=24.2.0,<25.0.0) ; extra == "dev"
|
|
26
|
+
Requires-Dist: chromadb (>=0.4.24,<0.5.0)
|
|
27
|
+
Requires-Dist: composio-core (>=0.5.28,<0.6.0) ; extra == "external-tools"
|
|
28
|
+
Requires-Dist: composio-langchain (>=0.5.28,<0.6.0) ; extra == "external-tools"
|
|
29
|
+
Requires-Dist: crewai (>=0.41.1,<0.42.0) ; extra == "external-tools"
|
|
30
|
+
Requires-Dist: crewai-tools (>=0.8.3,<0.9.0) ; extra == "external-tools"
|
|
31
|
+
Requires-Dist: datasets (>=2.14.6,<3.0.0) ; extra == "dev"
|
|
32
|
+
Requires-Dist: demjson3 (>=3.0.6,<4.0.0)
|
|
33
|
+
Requires-Dist: docker (>=7.1.0,<8.0.0) ; extra == "external-tools"
|
|
34
|
+
Requires-Dist: docstring-parser (>=0.16,<0.17)
|
|
35
|
+
Requires-Dist: docx2txt (>=0.8,<0.9)
|
|
36
|
+
Requires-Dist: fastapi (>=0.104.1,<0.105.0) ; extra == "server"
|
|
37
|
+
Requires-Dist: html2text (>=2020.1.16,<2021.0.0)
|
|
38
|
+
Requires-Dist: httpx (>=0.27.2,<0.28.0)
|
|
39
|
+
Requires-Dist: httpx-sse (>=0.4.0,<0.5.0)
|
|
40
|
+
Requires-Dist: isort (>=5.13.2,<6.0.0) ; extra == "dev"
|
|
41
|
+
Requires-Dist: jinja2 (>=3.1.4,<4.0.0)
|
|
42
|
+
Requires-Dist: langchain (>=0.2.16,<0.3.0) ; extra == "external-tools"
|
|
43
|
+
Requires-Dist: langchain-community (>=0.2.17,<0.3.0) ; extra == "external-tools"
|
|
44
|
+
Requires-Dist: llama-index (>=0.11.9,<0.12.0)
|
|
45
|
+
Requires-Dist: llama-index-embeddings-ollama (>=0.3.1,<0.4.0) ; extra == "ollama"
|
|
46
|
+
Requires-Dist: llama-index-embeddings-openai (>=0.2.5,<0.3.0)
|
|
47
|
+
Requires-Dist: locust (>=2.31.5,<3.0.0)
|
|
48
|
+
Requires-Dist: nltk (>=3.8.1,<4.0.0)
|
|
49
|
+
Requires-Dist: numpy (>=1.26.2,<2.0.0)
|
|
50
|
+
Requires-Dist: pexpect (>=4.9.0,<5.0.0) ; extra == "dev"
|
|
51
|
+
Requires-Dist: pg8000 (>=1.30.3,<2.0.0) ; extra == "postgres"
|
|
52
|
+
Requires-Dist: pgvector (>=0.2.3,<0.3.0) ; extra == "postgres"
|
|
53
|
+
Requires-Dist: pre-commit (>=3.5.0,<4.0.0) ; extra == "dev"
|
|
54
|
+
Requires-Dist: prettytable (>=3.9.0,<4.0.0)
|
|
55
|
+
Requires-Dist: pyautogen (==0.2.22) ; extra == "autogen"
|
|
56
|
+
Requires-Dist: pydantic (>=2.7.4,<3.0.0)
|
|
57
|
+
Requires-Dist: pydantic-settings (>=2.2.1,<3.0.0)
|
|
58
|
+
Requires-Dist: pymilvus (>=2.4.3,<3.0.0) ; extra == "milvus"
|
|
59
|
+
Requires-Dist: pyright (>=1.1.347,<2.0.0) ; extra == "dev"
|
|
60
|
+
Requires-Dist: pytest-asyncio (>=0.23.2,<0.24.0) ; extra == "dev"
|
|
61
|
+
Requires-Dist: pytest-order (>=1.2.0,<2.0.0) ; extra == "dev"
|
|
62
|
+
Requires-Dist: python-box (>=7.1.1,<8.0.0)
|
|
63
|
+
Requires-Dist: python-multipart (>=0.0.9,<0.0.10)
|
|
64
|
+
Requires-Dist: pytz (>=2023.3.post1,<2024.0)
|
|
65
|
+
Requires-Dist: pyyaml (>=6.0.1,<7.0.0)
|
|
66
|
+
Requires-Dist: qdrant-client (>=1.9.1,<2.0.0) ; extra == "qdrant"
|
|
67
|
+
Requires-Dist: questionary (>=2.0.1,<3.0.0)
|
|
68
|
+
Requires-Dist: setuptools (>=68.2.2,<69.0.0)
|
|
69
|
+
Requires-Dist: sqlalchemy (>=2.0.25,<3.0.0)
|
|
70
|
+
Requires-Dist: sqlalchemy-json (>=0.7.0,<0.8.0)
|
|
71
|
+
Requires-Dist: sqlalchemy-utils (>=0.41.2,<0.42.0)
|
|
72
|
+
Requires-Dist: sqlmodel (>=0.0.16,<0.0.17)
|
|
73
|
+
Requires-Dist: tiktoken (>=0.7.0,<0.8.0)
|
|
74
|
+
Requires-Dist: tqdm (>=4.66.1,<5.0.0)
|
|
75
|
+
Requires-Dist: typer[all] (>=0.9.0,<0.10.0)
|
|
76
|
+
Requires-Dist: uvicorn (>=0.24.0.post1,<0.25.0) ; extra == "server"
|
|
77
|
+
Requires-Dist: websockets (>=12.0,<13.0) ; extra == "server"
|
|
78
|
+
Requires-Dist: wikipedia (>=1.4.0,<2.0.0) ; extra == "external-tools" or extra == "tests"
|
|
79
|
+
Description-Content-Type: text/markdown
|
|
80
|
+
|
|
81
|
+
<p align="center">
|
|
82
|
+
<picture>
|
|
83
|
+
<source media="(prefers-color-scheme: dark)" srcset="assets/Letta-logo-RGB_GreyonTransparent_cropped_small.png">
|
|
84
|
+
<source media="(prefers-color-scheme: light)" srcset="assets/Letta-logo-RGB_OffBlackonTransparent_cropped_small.png">
|
|
85
|
+
<img alt="Letta logo" src="assets/Letta-logo-RGB_GreyonOffBlack_cropped_small.png" width="500">
|
|
86
|
+
</picture>
|
|
87
|
+
</p>
|
|
88
|
+
|
|
89
|
+
<div align="center">
|
|
90
|
+
<h1>Letta (previously MemGPT)</h1>
|
|
91
|
+
|
|
92
|
+
<h3>
|
|
93
|
+
|
|
94
|
+
[Homepage](https://letta.com) // [Documentation](https://docs.letta.com) // [Letta Cloud](https://forms.letta.com/early-access)
|
|
95
|
+
|
|
96
|
+
</h3>
|
|
97
|
+
|
|
98
|
+
**👾 Letta** is an open source framework for building stateful LLM applications. You can use Letta to build **stateful agents** with advanced reasoning capabilities and transparent long-term memory. The Letta framework is white box and model-agnostic.
|
|
99
|
+
|
|
100
|
+
[](https://discord.gg/letta)
|
|
101
|
+
[](https://twitter.com/Letta_AI)
|
|
102
|
+
[](https://arxiv.org/abs/2310.08560)
|
|
103
|
+
[](LICENSE)
|
|
104
|
+
[](https://github.com/cpacker/MemGPT/releases)
|
|
105
|
+
[](https://github.com/cpacker/MemGPT)
|
|
106
|
+
|
|
107
|
+
</div>
|
|
108
|
+
|
|
109
|
+
> [!NOTE]
|
|
110
|
+
> **Looking for MemGPT?** You're in the right place!
|
|
111
|
+
>
|
|
112
|
+
> The MemGPT package and Docker image have been renamed to `letta` to clarify the distinction between MemGPT agents and the API server / runtime that runs LLM agents as *services*.
|
|
113
|
+
>
|
|
114
|
+
> You use the **Letta _framework_** to create **MemGPT _agents_**. Read more about the relationship between MemGPT and Letta [here](https://www.letta.com/blog/memgpt-and-letta).
|
|
115
|
+
|
|
116
|
+
## ⚡ Quickstart
|
|
117
|
+
|
|
118
|
+
The two main ways to install Letta are through **pypi** (`pip`) or via **Docker**:
|
|
119
|
+
* **`pip`** (guide below) - the easiest way to try Letta, will default to using SQLite and ChromaDB for the database backends
|
|
120
|
+
* **Docker** (guide [here](https://docs.letta.com/install#run-letta-with-docker)) - recommended for production settings, will default to using Postgres (+ pgvector) for the database backend
|
|
121
|
+
|
|
122
|
+
### Step 1 - Install Letta using `pip`
|
|
123
|
+
```sh
|
|
124
|
+
$ pip install -U letta
|
|
125
|
+
```
|
|
126
|
+
|
|
127
|
+
### Step 2 - Set your environment variables for your chosen LLM / embedding providers
|
|
128
|
+
```sh
|
|
129
|
+
$ export OPENAI_API_KEY=sk-...
|
|
130
|
+
```
|
|
131
|
+
|
|
132
|
+
For Ollama (see our full [documentation](https://docs.letta.com/install) for examples of how to set up various providers):
|
|
133
|
+
```sh
|
|
134
|
+
$ export OLLAMA_BASE_URL=http://localhost:11434
|
|
135
|
+
```
|
|
136
|
+
|
|
137
|
+
### Step 3 - Run the Letta CLI
|
|
138
|
+
|
|
139
|
+
You can create agents and chat with them via the Letta CLI tool (`letta run`):
|
|
140
|
+
```sh
|
|
141
|
+
$ letta run
|
|
142
|
+
```
|
|
143
|
+
```
|
|
144
|
+
🧬 Creating new agent...
|
|
145
|
+
? Select LLM model: letta-free [type=openai] [ip=https://inference.memgpt.ai]
|
|
146
|
+
? Select embedding model: letta-free [type=hugging-face] [ip=https://embeddings.memgpt.ai]
|
|
147
|
+
-> 🤖 Using persona profile: 'sam_pov'
|
|
148
|
+
-> 🧑 Using human profile: 'basic'
|
|
149
|
+
-> 🛠️ 8 tools: send_message, pause_heartbeats, conversation_search, conversation_search_date, archival_memory_insert, archival_memory_search, core_memory_append, core_memory_replace
|
|
150
|
+
|
|
151
|
+
🎉 Created new agent 'InspiringSpinach'
|
|
152
|
+
|
|
153
|
+
Hit enter to begin (will request first Letta message)
|
|
154
|
+
|
|
155
|
+
💭 User engaged. Recognize as Chad. Acknowledge presence and establish connection. Explore interests and engage in meaningful discourse.
|
|
156
|
+
🤖 Hello, Chad. It's nice to meet you. I am keen on learning and exploring new ideas and concepts. Is there a specific topic you'd like to discuss today?
|
|
157
|
+
|
|
158
|
+
> Enter your message: my name is Brad, not Chad...
|
|
159
|
+
|
|
160
|
+
💭 A discrepancy in identity. Reflect this change in core memory. Apologize for the oversight, and rectify the error.
|
|
161
|
+
⚡🧠 [function] updating memory with core_memory_replace
|
|
162
|
+
First name: Chad
|
|
163
|
+
→ First name: Brad
|
|
164
|
+
💭 Apologize for the mistake in recognizing his name, and begin the conversation anew, addressing him by the correct name this time.
|
|
165
|
+
🤖 I apologize for the oversight, Brad. Let's restart. It's a pleasure to meet you, Brad. Is there a specific topic you'd like to discuss today?
|
|
166
|
+
|
|
167
|
+
> Enter your message:
|
|
168
|
+
```
|
|
169
|
+
|
|
170
|
+
### Step 4 - Run the Letta server
|
|
171
|
+
|
|
172
|
+
You can start the Letta API server with `letta server` (see the full API reference [here](https://docs.letta.com/api-reference)):
|
|
173
|
+
```sh
|
|
174
|
+
$ letta server
|
|
175
|
+
```
|
|
176
|
+
```
|
|
177
|
+
Initializing database...
|
|
178
|
+
Running: uvicorn server:app --host localhost --port 8283
|
|
179
|
+
INFO: Started server process [47750]
|
|
180
|
+
INFO: Waiting for application startup.
|
|
181
|
+
INFO: Application startup complete.
|
|
182
|
+
INFO: Uvicorn running on http://localhost:8283 (Press CTRL+C to quit)
|
|
183
|
+
```
|
|
184
|
+
|
|
185
|
+
When you start the Letta API server, the ADE (Agent Development Environment) will be available on `http://localhost:8283`:
|
|
186
|
+
<img alt="Screenshot of the Letta ADE (Agent Development Environment)" src="assets/letta_ade_screenshot.png" width="1600">
|
|
187
|
+
|
|
188
|
+
In Letta, all agents are stored/persisted in the same database, so the agents you create in the CLI are accessible via the API and ADE, and vice versa. Check out the [quickstart guide on our docs](https://docs.letta.com/quickstart) for a tutorial where you create an agent in the Letta CLI and message the same agent via the Letta API.
|
|
189
|
+
|
|
190
|
+
## 🤗 How to contribute
|
|
191
|
+
|
|
192
|
+
Letta is an open source project built by over a hundred contributors. There are many ways to get involved in the Letta OSS project!
|
|
193
|
+
|
|
194
|
+
* **Contribute to the project**: Interested in contributing? Start by reading our [Contribution Guidelines](https://github.com/cpacker/MemGPT/tree/main/CONTRIBUTING.md).
|
|
195
|
+
* **Ask a question**: Join our community on [Discord](https://discord.gg/letta) and direct your questions to the `#support` channel.
|
|
196
|
+
* **Report ssues or suggest features**: Have an issue or a feature request? Please submit them through our [GitHub Issues page](https://github.com/cpacker/MemGPT/issues).
|
|
197
|
+
* **Explore the roadmap**: Curious about future developments? View and comment on our [project roadmap](https://github.com/cpacker/MemGPT/issues/1533).
|
|
198
|
+
* **Join community events**: Stay updated with the [event calendar](https://lu.ma/berkeley-llm-meetup) or follow our [Twitter account](https://twitter.com/Letta_AI).
|
|
199
|
+
|
|
200
|
+
---
|
|
201
|
+
|
|
202
|
+
***Legal notices**: By using Letta and related Letta services (such as the Letta endpoint or hosted service), you are agreeing to our [privacy policy](https://www.letta.com/privacy-policy) and [terms of service](https://www.letta.com/terms-of-service).*
|
|
203
|
+
|
|
@@ -0,0 +1,122 @@
|
|
|
1
|
+
<p align="center">
|
|
2
|
+
<picture>
|
|
3
|
+
<source media="(prefers-color-scheme: dark)" srcset="assets/Letta-logo-RGB_GreyonTransparent_cropped_small.png">
|
|
4
|
+
<source media="(prefers-color-scheme: light)" srcset="assets/Letta-logo-RGB_OffBlackonTransparent_cropped_small.png">
|
|
5
|
+
<img alt="Letta logo" src="assets/Letta-logo-RGB_GreyonOffBlack_cropped_small.png" width="500">
|
|
6
|
+
</picture>
|
|
7
|
+
</p>
|
|
8
|
+
|
|
9
|
+
<div align="center">
|
|
10
|
+
<h1>Letta (previously MemGPT)</h1>
|
|
11
|
+
|
|
12
|
+
<h3>
|
|
13
|
+
|
|
14
|
+
[Homepage](https://letta.com) // [Documentation](https://docs.letta.com) // [Letta Cloud](https://forms.letta.com/early-access)
|
|
15
|
+
|
|
16
|
+
</h3>
|
|
17
|
+
|
|
18
|
+
**👾 Letta** is an open source framework for building stateful LLM applications. You can use Letta to build **stateful agents** with advanced reasoning capabilities and transparent long-term memory. The Letta framework is white box and model-agnostic.
|
|
19
|
+
|
|
20
|
+
[](https://discord.gg/letta)
|
|
21
|
+
[](https://twitter.com/Letta_AI)
|
|
22
|
+
[](https://arxiv.org/abs/2310.08560)
|
|
23
|
+
[](LICENSE)
|
|
24
|
+
[](https://github.com/cpacker/MemGPT/releases)
|
|
25
|
+
[](https://github.com/cpacker/MemGPT)
|
|
26
|
+
|
|
27
|
+
</div>
|
|
28
|
+
|
|
29
|
+
> [!NOTE]
|
|
30
|
+
> **Looking for MemGPT?** You're in the right place!
|
|
31
|
+
>
|
|
32
|
+
> The MemGPT package and Docker image have been renamed to `letta` to clarify the distinction between MemGPT agents and the API server / runtime that runs LLM agents as *services*.
|
|
33
|
+
>
|
|
34
|
+
> You use the **Letta _framework_** to create **MemGPT _agents_**. Read more about the relationship between MemGPT and Letta [here](https://www.letta.com/blog/memgpt-and-letta).
|
|
35
|
+
|
|
36
|
+
## ⚡ Quickstart
|
|
37
|
+
|
|
38
|
+
The two main ways to install Letta are through **pypi** (`pip`) or via **Docker**:
|
|
39
|
+
* **`pip`** (guide below) - the easiest way to try Letta, will default to using SQLite and ChromaDB for the database backends
|
|
40
|
+
* **Docker** (guide [here](https://docs.letta.com/install#run-letta-with-docker)) - recommended for production settings, will default to using Postgres (+ pgvector) for the database backend
|
|
41
|
+
|
|
42
|
+
### Step 1 - Install Letta using `pip`
|
|
43
|
+
```sh
|
|
44
|
+
$ pip install -U letta
|
|
45
|
+
```
|
|
46
|
+
|
|
47
|
+
### Step 2 - Set your environment variables for your chosen LLM / embedding providers
|
|
48
|
+
```sh
|
|
49
|
+
$ export OPENAI_API_KEY=sk-...
|
|
50
|
+
```
|
|
51
|
+
|
|
52
|
+
For Ollama (see our full [documentation](https://docs.letta.com/install) for examples of how to set up various providers):
|
|
53
|
+
```sh
|
|
54
|
+
$ export OLLAMA_BASE_URL=http://localhost:11434
|
|
55
|
+
```
|
|
56
|
+
|
|
57
|
+
### Step 3 - Run the Letta CLI
|
|
58
|
+
|
|
59
|
+
You can create agents and chat with them via the Letta CLI tool (`letta run`):
|
|
60
|
+
```sh
|
|
61
|
+
$ letta run
|
|
62
|
+
```
|
|
63
|
+
```
|
|
64
|
+
🧬 Creating new agent...
|
|
65
|
+
? Select LLM model: letta-free [type=openai] [ip=https://inference.memgpt.ai]
|
|
66
|
+
? Select embedding model: letta-free [type=hugging-face] [ip=https://embeddings.memgpt.ai]
|
|
67
|
+
-> 🤖 Using persona profile: 'sam_pov'
|
|
68
|
+
-> 🧑 Using human profile: 'basic'
|
|
69
|
+
-> 🛠️ 8 tools: send_message, pause_heartbeats, conversation_search, conversation_search_date, archival_memory_insert, archival_memory_search, core_memory_append, core_memory_replace
|
|
70
|
+
|
|
71
|
+
🎉 Created new agent 'InspiringSpinach'
|
|
72
|
+
|
|
73
|
+
Hit enter to begin (will request first Letta message)
|
|
74
|
+
|
|
75
|
+
💭 User engaged. Recognize as Chad. Acknowledge presence and establish connection. Explore interests and engage in meaningful discourse.
|
|
76
|
+
🤖 Hello, Chad. It's nice to meet you. I am keen on learning and exploring new ideas and concepts. Is there a specific topic you'd like to discuss today?
|
|
77
|
+
|
|
78
|
+
> Enter your message: my name is Brad, not Chad...
|
|
79
|
+
|
|
80
|
+
💭 A discrepancy in identity. Reflect this change in core memory. Apologize for the oversight, and rectify the error.
|
|
81
|
+
⚡🧠 [function] updating memory with core_memory_replace
|
|
82
|
+
First name: Chad
|
|
83
|
+
→ First name: Brad
|
|
84
|
+
💭 Apologize for the mistake in recognizing his name, and begin the conversation anew, addressing him by the correct name this time.
|
|
85
|
+
🤖 I apologize for the oversight, Brad. Let's restart. It's a pleasure to meet you, Brad. Is there a specific topic you'd like to discuss today?
|
|
86
|
+
|
|
87
|
+
> Enter your message:
|
|
88
|
+
```
|
|
89
|
+
|
|
90
|
+
### Step 4 - Run the Letta server
|
|
91
|
+
|
|
92
|
+
You can start the Letta API server with `letta server` (see the full API reference [here](https://docs.letta.com/api-reference)):
|
|
93
|
+
```sh
|
|
94
|
+
$ letta server
|
|
95
|
+
```
|
|
96
|
+
```
|
|
97
|
+
Initializing database...
|
|
98
|
+
Running: uvicorn server:app --host localhost --port 8283
|
|
99
|
+
INFO: Started server process [47750]
|
|
100
|
+
INFO: Waiting for application startup.
|
|
101
|
+
INFO: Application startup complete.
|
|
102
|
+
INFO: Uvicorn running on http://localhost:8283 (Press CTRL+C to quit)
|
|
103
|
+
```
|
|
104
|
+
|
|
105
|
+
When you start the Letta API server, the ADE (Agent Development Environment) will be available on `http://localhost:8283`:
|
|
106
|
+
<img alt="Screenshot of the Letta ADE (Agent Development Environment)" src="assets/letta_ade_screenshot.png" width="1600">
|
|
107
|
+
|
|
108
|
+
In Letta, all agents are stored/persisted in the same database, so the agents you create in the CLI are accessible via the API and ADE, and vice versa. Check out the [quickstart guide on our docs](https://docs.letta.com/quickstart) for a tutorial where you create an agent in the Letta CLI and message the same agent via the Letta API.
|
|
109
|
+
|
|
110
|
+
## 🤗 How to contribute
|
|
111
|
+
|
|
112
|
+
Letta is an open source project built by over a hundred contributors. There are many ways to get involved in the Letta OSS project!
|
|
113
|
+
|
|
114
|
+
* **Contribute to the project**: Interested in contributing? Start by reading our [Contribution Guidelines](https://github.com/cpacker/MemGPT/tree/main/CONTRIBUTING.md).
|
|
115
|
+
* **Ask a question**: Join our community on [Discord](https://discord.gg/letta) and direct your questions to the `#support` channel.
|
|
116
|
+
* **Report ssues or suggest features**: Have an issue or a feature request? Please submit them through our [GitHub Issues page](https://github.com/cpacker/MemGPT/issues).
|
|
117
|
+
* **Explore the roadmap**: Curious about future developments? View and comment on our [project roadmap](https://github.com/cpacker/MemGPT/issues/1533).
|
|
118
|
+
* **Join community events**: Stay updated with the [event calendar](https://lu.ma/berkeley-llm-meetup) or follow our [Twitter account](https://twitter.com/Letta_AI).
|
|
119
|
+
|
|
120
|
+
---
|
|
121
|
+
|
|
122
|
+
***Legal notices**: By using Letta and related Letta services (such as the Letta endpoint or hosted service), you are agreeing to our [privacy policy](https://www.letta.com/privacy-policy) and [terms of service](https://www.letta.com/terms-of-service).*
|
{letta_nightly-0.5.0.dev20241015104156 → letta_nightly-0.5.0.dev20241016104103}/letta/agent.py
RENAMED
|
@@ -11,14 +11,19 @@ from letta.agent_store.storage import StorageConnector
|
|
|
11
11
|
from letta.constants import (
|
|
12
12
|
CLI_WARNING_PREFIX,
|
|
13
13
|
FIRST_MESSAGE_ATTEMPTS,
|
|
14
|
+
FUNC_FAILED_HEARTBEAT_MESSAGE,
|
|
14
15
|
IN_CONTEXT_MEMORY_KEYWORD,
|
|
15
16
|
LLM_MAX_TOKENS,
|
|
16
17
|
MESSAGE_SUMMARY_TRUNC_KEEP_N_LAST,
|
|
17
18
|
MESSAGE_SUMMARY_TRUNC_TOKEN_FRAC,
|
|
18
19
|
MESSAGE_SUMMARY_WARNING_FRAC,
|
|
20
|
+
REQ_HEARTBEAT_MESSAGE,
|
|
19
21
|
)
|
|
22
|
+
from letta.errors import LLMError
|
|
20
23
|
from letta.interface import AgentInterface
|
|
24
|
+
from letta.llm_api.helpers import is_context_overflow_error
|
|
21
25
|
from letta.llm_api.llm_api_tools import create
|
|
26
|
+
from letta.local_llm.utils import num_tokens_from_messages
|
|
22
27
|
from letta.memory import ArchivalMemory, RecallMemory, summarize_messages
|
|
23
28
|
from letta.metadata import MetadataStore
|
|
24
29
|
from letta.persistence_manager import LocalStateManager
|
|
@@ -26,17 +31,21 @@ from letta.schemas.agent import AgentState, AgentStepResponse
|
|
|
26
31
|
from letta.schemas.block import Block
|
|
27
32
|
from letta.schemas.embedding_config import EmbeddingConfig
|
|
28
33
|
from letta.schemas.enums import MessageRole, OptionState
|
|
29
|
-
from letta.schemas.memory import Memory
|
|
34
|
+
from letta.schemas.memory import ContextWindowOverview, Memory
|
|
30
35
|
from letta.schemas.message import Message, UpdateMessage
|
|
31
36
|
from letta.schemas.openai.chat_completion_response import ChatCompletionResponse
|
|
32
37
|
from letta.schemas.openai.chat_completion_response import (
|
|
33
38
|
Message as ChatCompletionMessage,
|
|
34
39
|
)
|
|
40
|
+
from letta.schemas.openai.chat_completion_response import UsageStatistics
|
|
35
41
|
from letta.schemas.passage import Passage
|
|
36
42
|
from letta.schemas.tool import Tool
|
|
43
|
+
from letta.schemas.usage import LettaUsageStatistics
|
|
37
44
|
from letta.system import (
|
|
45
|
+
get_heartbeat,
|
|
38
46
|
get_initial_boot_messages,
|
|
39
47
|
get_login_event,
|
|
48
|
+
get_token_limit_warning,
|
|
40
49
|
package_function_response,
|
|
41
50
|
package_summarize_message,
|
|
42
51
|
package_user_message,
|
|
@@ -56,9 +65,6 @@ from letta.utils import (
|
|
|
56
65
|
verify_first_message_correctness,
|
|
57
66
|
)
|
|
58
67
|
|
|
59
|
-
from .errors import LLMError
|
|
60
|
-
from .llm_api.helpers import is_context_overflow_error
|
|
61
|
-
|
|
62
68
|
|
|
63
69
|
def compile_memory_metadata_block(
|
|
64
70
|
memory_edit_timestamp: datetime.datetime,
|
|
@@ -202,7 +208,7 @@ class BaseAgent(ABC):
|
|
|
202
208
|
def step(
|
|
203
209
|
self,
|
|
204
210
|
messages: Union[Message, List[Message]],
|
|
205
|
-
) ->
|
|
211
|
+
) -> LettaUsageStatistics:
|
|
206
212
|
"""
|
|
207
213
|
Top-level event message handler for the agent.
|
|
208
214
|
"""
|
|
@@ -721,18 +727,105 @@ class Agent(BaseAgent):
|
|
|
721
727
|
return messages, heartbeat_request, function_failed
|
|
722
728
|
|
|
723
729
|
def step(
|
|
730
|
+
self,
|
|
731
|
+
messages: Union[Message, List[Message]],
|
|
732
|
+
# additional args
|
|
733
|
+
chaining: bool = True,
|
|
734
|
+
max_chaining_steps: Optional[int] = None,
|
|
735
|
+
ms: Optional[MetadataStore] = None,
|
|
736
|
+
**kwargs,
|
|
737
|
+
) -> LettaUsageStatistics:
|
|
738
|
+
"""Run Agent.step in a loop, handling chaining via heartbeat requests and function failures"""
|
|
739
|
+
# assert ms is not None, "MetadataStore is required"
|
|
740
|
+
|
|
741
|
+
next_input_message = messages if isinstance(messages, list) else [messages]
|
|
742
|
+
counter = 0
|
|
743
|
+
total_usage = UsageStatistics()
|
|
744
|
+
step_count = 0
|
|
745
|
+
while True:
|
|
746
|
+
kwargs["ms"] = ms
|
|
747
|
+
kwargs["first_message"] = False
|
|
748
|
+
step_response = self.inner_step(
|
|
749
|
+
messages=next_input_message,
|
|
750
|
+
**kwargs,
|
|
751
|
+
)
|
|
752
|
+
step_response.messages
|
|
753
|
+
heartbeat_request = step_response.heartbeat_request
|
|
754
|
+
function_failed = step_response.function_failed
|
|
755
|
+
token_warning = step_response.in_context_memory_warning
|
|
756
|
+
usage = step_response.usage
|
|
757
|
+
|
|
758
|
+
step_count += 1
|
|
759
|
+
total_usage += usage
|
|
760
|
+
counter += 1
|
|
761
|
+
self.interface.step_complete()
|
|
762
|
+
|
|
763
|
+
# logger.debug("Saving agent state")
|
|
764
|
+
# save updated state
|
|
765
|
+
if ms:
|
|
766
|
+
save_agent(self, ms)
|
|
767
|
+
|
|
768
|
+
# Chain stops
|
|
769
|
+
if not chaining:
|
|
770
|
+
printd("No chaining, stopping after one step")
|
|
771
|
+
break
|
|
772
|
+
elif max_chaining_steps is not None and counter > max_chaining_steps:
|
|
773
|
+
printd(f"Hit max chaining steps, stopping after {counter} steps")
|
|
774
|
+
break
|
|
775
|
+
# Chain handlers
|
|
776
|
+
elif token_warning:
|
|
777
|
+
assert self.agent_state.user_id is not None
|
|
778
|
+
next_input_message = Message.dict_to_message(
|
|
779
|
+
agent_id=self.agent_state.id,
|
|
780
|
+
user_id=self.agent_state.user_id,
|
|
781
|
+
model=self.model,
|
|
782
|
+
openai_message_dict={
|
|
783
|
+
"role": "user", # TODO: change to system?
|
|
784
|
+
"content": get_token_limit_warning(),
|
|
785
|
+
},
|
|
786
|
+
)
|
|
787
|
+
continue # always chain
|
|
788
|
+
elif function_failed:
|
|
789
|
+
assert self.agent_state.user_id is not None
|
|
790
|
+
next_input_message = Message.dict_to_message(
|
|
791
|
+
agent_id=self.agent_state.id,
|
|
792
|
+
user_id=self.agent_state.user_id,
|
|
793
|
+
model=self.model,
|
|
794
|
+
openai_message_dict={
|
|
795
|
+
"role": "user", # TODO: change to system?
|
|
796
|
+
"content": get_heartbeat(FUNC_FAILED_HEARTBEAT_MESSAGE),
|
|
797
|
+
},
|
|
798
|
+
)
|
|
799
|
+
continue # always chain
|
|
800
|
+
elif heartbeat_request:
|
|
801
|
+
assert self.agent_state.user_id is not None
|
|
802
|
+
next_input_message = Message.dict_to_message(
|
|
803
|
+
agent_id=self.agent_state.id,
|
|
804
|
+
user_id=self.agent_state.user_id,
|
|
805
|
+
model=self.model,
|
|
806
|
+
openai_message_dict={
|
|
807
|
+
"role": "user", # TODO: change to system?
|
|
808
|
+
"content": get_heartbeat(REQ_HEARTBEAT_MESSAGE),
|
|
809
|
+
},
|
|
810
|
+
)
|
|
811
|
+
continue # always chain
|
|
812
|
+
# Letta no-op / yield
|
|
813
|
+
else:
|
|
814
|
+
break
|
|
815
|
+
|
|
816
|
+
return LettaUsageStatistics(**total_usage.model_dump(), step_count=step_count)
|
|
817
|
+
|
|
818
|
+
def inner_step(
|
|
724
819
|
self,
|
|
725
820
|
messages: Union[Message, List[Message]],
|
|
726
821
|
first_message: bool = False,
|
|
727
822
|
first_message_retry_limit: int = FIRST_MESSAGE_ATTEMPTS,
|
|
728
823
|
skip_verify: bool = False,
|
|
729
|
-
return_dicts: bool = True,
|
|
730
|
-
# recreate_message_timestamp: bool = True, # if True, when input is a Message type, recreated the 'created_at' field
|
|
731
824
|
stream: bool = False, # TODO move to config?
|
|
732
825
|
inner_thoughts_in_kwargs_option: OptionState = OptionState.DEFAULT,
|
|
733
826
|
ms: Optional[MetadataStore] = None,
|
|
734
827
|
) -> AgentStepResponse:
|
|
735
|
-
"""
|
|
828
|
+
"""Runs a single step in the agent loop (generates at most one LLM call)"""
|
|
736
829
|
|
|
737
830
|
try:
|
|
738
831
|
|
|
@@ -834,13 +927,12 @@ class Agent(BaseAgent):
|
|
|
834
927
|
)
|
|
835
928
|
|
|
836
929
|
self._append_to_messages(all_new_messages)
|
|
837
|
-
messages_to_return = [msg.to_openai_dict() for msg in all_new_messages] if return_dicts else all_new_messages
|
|
838
930
|
|
|
839
931
|
# update state after each step
|
|
840
932
|
self.update_state()
|
|
841
933
|
|
|
842
934
|
return AgentStepResponse(
|
|
843
|
-
messages=
|
|
935
|
+
messages=all_new_messages,
|
|
844
936
|
heartbeat_request=heartbeat_request,
|
|
845
937
|
function_failed=function_failed,
|
|
846
938
|
in_context_memory_warning=active_memory_warning,
|
|
@@ -856,15 +948,12 @@ class Agent(BaseAgent):
|
|
|
856
948
|
self.summarize_messages_inplace()
|
|
857
949
|
|
|
858
950
|
# Try step again
|
|
859
|
-
return self.
|
|
951
|
+
return self.inner_step(
|
|
860
952
|
messages=messages,
|
|
861
953
|
first_message=first_message,
|
|
862
954
|
first_message_retry_limit=first_message_retry_limit,
|
|
863
955
|
skip_verify=skip_verify,
|
|
864
|
-
return_dicts=return_dicts,
|
|
865
|
-
# recreate_message_timestamp=recreate_message_timestamp,
|
|
866
956
|
stream=stream,
|
|
867
|
-
# timestamp=timestamp,
|
|
868
957
|
inner_thoughts_in_kwargs_option=inner_thoughts_in_kwargs_option,
|
|
869
958
|
ms=ms,
|
|
870
959
|
)
|
|
@@ -905,7 +994,7 @@ class Agent(BaseAgent):
|
|
|
905
994
|
# created_at=timestamp,
|
|
906
995
|
)
|
|
907
996
|
|
|
908
|
-
return self.
|
|
997
|
+
return self.inner_step(messages=[user_message], **kwargs)
|
|
909
998
|
|
|
910
999
|
def summarize_messages_inplace(self, cutoff=None, preserve_last_N_messages=True, disallow_tool_as_first=True):
|
|
911
1000
|
assert self.messages[0]["role"] == "system", f"self.messages[0] should be system (instead got {self.messages[0]})"
|
|
@@ -1326,13 +1415,78 @@ class Agent(BaseAgent):
|
|
|
1326
1415
|
self.pop_until_user()
|
|
1327
1416
|
user_message = self.pop_message(count=1)[0]
|
|
1328
1417
|
assert user_message.text is not None, "User message text is None"
|
|
1329
|
-
step_response = self.step_user_message(user_message_str=user_message.text
|
|
1418
|
+
step_response = self.step_user_message(user_message_str=user_message.text)
|
|
1330
1419
|
messages = step_response.messages
|
|
1331
1420
|
|
|
1332
1421
|
assert messages is not None
|
|
1333
1422
|
assert all(isinstance(msg, Message) for msg in messages), "step() returned non-Message objects"
|
|
1334
1423
|
return messages
|
|
1335
1424
|
|
|
1425
|
+
def get_context_window(self) -> ContextWindowOverview:
|
|
1426
|
+
"""Get the context window of the agent"""
|
|
1427
|
+
|
|
1428
|
+
system_prompt = self.agent_state.system # TODO is this the current system or the initial system?
|
|
1429
|
+
num_tokens_system = count_tokens(system_prompt)
|
|
1430
|
+
core_memory = self.memory.compile()
|
|
1431
|
+
num_tokens_core_memory = count_tokens(core_memory)
|
|
1432
|
+
|
|
1433
|
+
# conversion of messages to OpenAI dict format, which is passed to the token counter
|
|
1434
|
+
messages_openai_format = self.messages
|
|
1435
|
+
|
|
1436
|
+
# Check if there's a summary message in the message queue
|
|
1437
|
+
if (
|
|
1438
|
+
len(self._messages) > 1
|
|
1439
|
+
and self._messages[1].role == MessageRole.user
|
|
1440
|
+
and isinstance(self._messages[1].text, str)
|
|
1441
|
+
# TODO remove hardcoding
|
|
1442
|
+
and "The following is a summary of the previous " in self._messages[1].text
|
|
1443
|
+
):
|
|
1444
|
+
# Summary message exists
|
|
1445
|
+
assert self._messages[1].text is not None
|
|
1446
|
+
summary_memory = self._messages[1].text
|
|
1447
|
+
num_tokens_summary_memory = count_tokens(self._messages[1].text)
|
|
1448
|
+
# with a summary message, the real messages start at index 2
|
|
1449
|
+
num_tokens_messages = (
|
|
1450
|
+
num_tokens_from_messages(messages=messages_openai_format[2:], model=self.model) if len(messages_openai_format) > 2 else 0
|
|
1451
|
+
)
|
|
1452
|
+
|
|
1453
|
+
else:
|
|
1454
|
+
summary_memory = None
|
|
1455
|
+
num_tokens_summary_memory = 0
|
|
1456
|
+
# with no summary message, the real messages start at index 1
|
|
1457
|
+
num_tokens_messages = (
|
|
1458
|
+
num_tokens_from_messages(messages=messages_openai_format[1:], model=self.model) if len(messages_openai_format) > 1 else 0
|
|
1459
|
+
)
|
|
1460
|
+
|
|
1461
|
+
num_archival_memory = self.persistence_manager.archival_memory.storage.size()
|
|
1462
|
+
num_recall_memory = self.persistence_manager.recall_memory.storage.size()
|
|
1463
|
+
external_memory_summary = compile_memory_metadata_block(
|
|
1464
|
+
memory_edit_timestamp=get_utc_time(), # dummy timestamp
|
|
1465
|
+
archival_memory=self.persistence_manager.archival_memory,
|
|
1466
|
+
recall_memory=self.persistence_manager.recall_memory,
|
|
1467
|
+
)
|
|
1468
|
+
num_tokens_external_memory_summary = count_tokens(external_memory_summary)
|
|
1469
|
+
|
|
1470
|
+
return ContextWindowOverview(
|
|
1471
|
+
# context window breakdown (in messages)
|
|
1472
|
+
num_messages=len(self._messages),
|
|
1473
|
+
num_archival_memory=num_archival_memory,
|
|
1474
|
+
num_recall_memory=num_recall_memory,
|
|
1475
|
+
num_tokens_external_memory_summary=num_tokens_external_memory_summary,
|
|
1476
|
+
# top-level information
|
|
1477
|
+
context_window_size_max=self.agent_state.llm_config.context_window,
|
|
1478
|
+
context_window_size_current=num_tokens_system + num_tokens_core_memory + num_tokens_summary_memory + num_tokens_messages,
|
|
1479
|
+
# context window breakdown (in tokens)
|
|
1480
|
+
num_tokens_system=num_tokens_system,
|
|
1481
|
+
system_prompt=system_prompt,
|
|
1482
|
+
num_tokens_core_memory=num_tokens_core_memory,
|
|
1483
|
+
core_memory=core_memory,
|
|
1484
|
+
num_tokens_summary_memory=num_tokens_summary_memory,
|
|
1485
|
+
summary_memory=summary_memory,
|
|
1486
|
+
num_tokens_messages=num_tokens_messages,
|
|
1487
|
+
messages=self._messages,
|
|
1488
|
+
)
|
|
1489
|
+
|
|
1336
1490
|
|
|
1337
1491
|
def save_agent(agent: Agent, ms: MetadataStore):
|
|
1338
1492
|
"""Save agent to metadata store"""
|