letta-nightly 0.5.2.dev20241118104226__tar.gz → 0.5.2.dev20241119104253__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of letta-nightly might be problematic. Click here for more details.

Files changed (214) hide show
  1. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/PKG-INFO +1 -1
  2. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/agent.py +2 -1
  3. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/constants.py +1 -1
  4. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/functions/functions.py +2 -1
  5. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/llm_api/llm_api_tools.py +38 -0
  6. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/llm_api/openai.py +0 -1
  7. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/local_llm/utils.py +12 -2
  8. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/providers.py +141 -3
  9. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/schemas/llm_config.py +1 -0
  10. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/schemas/openai/chat_completion_response.py +1 -0
  11. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/server/server.py +13 -1
  12. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/services/tool_manager.py +4 -0
  13. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/settings.py +3 -0
  14. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/pyproject.toml +1 -1
  15. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/LICENSE +0 -0
  16. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/README.md +0 -0
  17. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/__init__.py +0 -0
  18. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/__main__.py +0 -0
  19. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/agent_store/chroma.py +0 -0
  20. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/agent_store/db.py +0 -0
  21. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/agent_store/lancedb.py +0 -0
  22. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/agent_store/milvus.py +0 -0
  23. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/agent_store/qdrant.py +0 -0
  24. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/agent_store/storage.py +0 -0
  25. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/benchmark/benchmark.py +0 -0
  26. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/benchmark/constants.py +0 -0
  27. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/cli/cli.py +0 -0
  28. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/cli/cli_config.py +0 -0
  29. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/cli/cli_load.py +0 -0
  30. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/client/__init__.py +0 -0
  31. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/client/client.py +0 -0
  32. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/client/streaming.py +0 -0
  33. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/client/utils.py +0 -0
  34. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/config.py +0 -0
  35. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/credentials.py +0 -0
  36. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/data_sources/connectors.py +0 -0
  37. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/data_sources/connectors_helper.py +0 -0
  38. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/embeddings.py +0 -0
  39. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/errors.py +0 -0
  40. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/functions/__init__.py +0 -0
  41. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/functions/function_sets/base.py +0 -0
  42. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/functions/function_sets/extras.py +0 -0
  43. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/functions/helpers.py +0 -0
  44. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/functions/schema_generator.py +0 -0
  45. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/helpers/__init__.py +0 -0
  46. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/helpers/tool_rule_solver.py +0 -0
  47. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/humans/__init__.py +0 -0
  48. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/humans/examples/basic.txt +0 -0
  49. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/humans/examples/cs_phd.txt +0 -0
  50. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/interface.py +0 -0
  51. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/llm_api/__init__.py +0 -0
  52. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/llm_api/anthropic.py +0 -0
  53. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/llm_api/azure_openai.py +0 -0
  54. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/llm_api/azure_openai_constants.py +0 -0
  55. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/llm_api/cohere.py +0 -0
  56. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/llm_api/google_ai.py +0 -0
  57. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/llm_api/helpers.py +0 -0
  58. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/llm_api/mistral.py +0 -0
  59. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/local_llm/README.md +0 -0
  60. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/local_llm/__init__.py +0 -0
  61. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/local_llm/chat_completion_proxy.py +0 -0
  62. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/local_llm/constants.py +0 -0
  63. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/local_llm/function_parser.py +0 -0
  64. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/local_llm/grammars/__init__.py +0 -0
  65. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/local_llm/grammars/gbnf_grammar_generator.py +0 -0
  66. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/local_llm/grammars/json.gbnf +0 -0
  67. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/local_llm/grammars/json_func_calls_with_inner_thoughts.gbnf +0 -0
  68. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/local_llm/json_parser.py +0 -0
  69. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/local_llm/koboldcpp/api.py +0 -0
  70. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/local_llm/koboldcpp/settings.py +0 -0
  71. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/local_llm/llamacpp/api.py +0 -0
  72. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/local_llm/llamacpp/settings.py +0 -0
  73. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/local_llm/llm_chat_completion_wrappers/__init__.py +0 -0
  74. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/local_llm/llm_chat_completion_wrappers/airoboros.py +0 -0
  75. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/local_llm/llm_chat_completion_wrappers/chatml.py +0 -0
  76. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/local_llm/llm_chat_completion_wrappers/configurable_wrapper.py +0 -0
  77. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/local_llm/llm_chat_completion_wrappers/dolphin.py +0 -0
  78. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/local_llm/llm_chat_completion_wrappers/llama3.py +0 -0
  79. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/local_llm/llm_chat_completion_wrappers/simple_summary_wrapper.py +0 -0
  80. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/local_llm/llm_chat_completion_wrappers/wrapper_base.py +0 -0
  81. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/local_llm/llm_chat_completion_wrappers/zephyr.py +0 -0
  82. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/local_llm/lmstudio/api.py +0 -0
  83. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/local_llm/lmstudio/settings.py +0 -0
  84. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/local_llm/ollama/api.py +0 -0
  85. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/local_llm/ollama/settings.py +0 -0
  86. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/local_llm/settings/__init__.py +0 -0
  87. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/local_llm/settings/deterministic_mirostat.py +0 -0
  88. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/local_llm/settings/settings.py +0 -0
  89. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/local_llm/settings/simple.py +0 -0
  90. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/local_llm/vllm/api.py +0 -0
  91. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/local_llm/webui/api.py +0 -0
  92. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/local_llm/webui/legacy_api.py +0 -0
  93. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/local_llm/webui/legacy_settings.py +0 -0
  94. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/local_llm/webui/settings.py +0 -0
  95. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/log.py +0 -0
  96. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/main.py +0 -0
  97. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/memory.py +0 -0
  98. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/metadata.py +0 -0
  99. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/o1_agent.py +0 -0
  100. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/openai_backcompat/__init__.py +0 -0
  101. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/openai_backcompat/openai_object.py +0 -0
  102. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/orm/__all__.py +0 -0
  103. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/orm/__init__.py +0 -0
  104. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/orm/agents_tags.py +0 -0
  105. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/orm/base.py +0 -0
  106. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/orm/enums.py +0 -0
  107. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/orm/errors.py +0 -0
  108. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/orm/file.py +0 -0
  109. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/orm/mixins.py +0 -0
  110. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/orm/organization.py +0 -0
  111. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/orm/source.py +0 -0
  112. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/orm/sqlalchemy_base.py +0 -0
  113. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/orm/tool.py +0 -0
  114. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/orm/user.py +0 -0
  115. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/persistence_manager.py +0 -0
  116. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/personas/__init__.py +0 -0
  117. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/personas/examples/anna_pa.txt +0 -0
  118. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/personas/examples/google_search_persona.txt +0 -0
  119. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/personas/examples/memgpt_doc.txt +0 -0
  120. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/personas/examples/memgpt_starter.txt +0 -0
  121. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/personas/examples/o1_persona.txt +0 -0
  122. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/personas/examples/sam.txt +0 -0
  123. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/personas/examples/sam_pov.txt +0 -0
  124. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/personas/examples/sam_simple_pov_gpt35.txt +0 -0
  125. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/personas/examples/sqldb/test.db +0 -0
  126. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/prompts/__init__.py +0 -0
  127. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/prompts/gpt_summarize.py +0 -0
  128. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/prompts/gpt_system.py +0 -0
  129. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/prompts/system/memgpt_base.txt +0 -0
  130. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/prompts/system/memgpt_chat.txt +0 -0
  131. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/prompts/system/memgpt_chat_compressed.txt +0 -0
  132. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/prompts/system/memgpt_chat_fstring.txt +0 -0
  133. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/prompts/system/memgpt_doc.txt +0 -0
  134. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/prompts/system/memgpt_gpt35_extralong.txt +0 -0
  135. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/prompts/system/memgpt_intuitive_knowledge.txt +0 -0
  136. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/prompts/system/memgpt_modified_chat.txt +0 -0
  137. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/prompts/system/memgpt_modified_o1.txt +0 -0
  138. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/pytest.ini +0 -0
  139. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/schemas/agent.py +0 -0
  140. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/schemas/agents_tags.py +0 -0
  141. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/schemas/api_key.py +0 -0
  142. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/schemas/block.py +0 -0
  143. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/schemas/embedding_config.py +0 -0
  144. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/schemas/enums.py +0 -0
  145. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/schemas/file.py +0 -0
  146. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/schemas/health.py +0 -0
  147. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/schemas/job.py +0 -0
  148. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/schemas/letta_base.py +0 -0
  149. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/schemas/letta_message.py +0 -0
  150. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/schemas/letta_request.py +0 -0
  151. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/schemas/letta_response.py +0 -0
  152. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/schemas/memory.py +0 -0
  153. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/schemas/message.py +0 -0
  154. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/schemas/openai/chat_completion_request.py +0 -0
  155. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/schemas/openai/chat_completions.py +0 -0
  156. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/schemas/openai/embedding_response.py +0 -0
  157. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/schemas/openai/openai.py +0 -0
  158. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/schemas/organization.py +0 -0
  159. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/schemas/passage.py +0 -0
  160. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/schemas/source.py +0 -0
  161. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/schemas/tool.py +0 -0
  162. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/schemas/tool_rule.py +0 -0
  163. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/schemas/usage.py +0 -0
  164. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/schemas/user.py +0 -0
  165. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/server/__init__.py +0 -0
  166. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/server/constants.py +0 -0
  167. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/server/generate_openapi_schema.sh +0 -0
  168. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/server/rest_api/__init__.py +0 -0
  169. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/server/rest_api/app.py +0 -0
  170. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/server/rest_api/auth/__init__.py +0 -0
  171. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/server/rest_api/auth/index.py +0 -0
  172. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/server/rest_api/auth_token.py +0 -0
  173. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/server/rest_api/interface.py +0 -0
  174. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/server/rest_api/routers/__init__.py +0 -0
  175. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/server/rest_api/routers/openai/__init__.py +0 -0
  176. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/server/rest_api/routers/openai/assistants/__init__.py +0 -0
  177. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/server/rest_api/routers/openai/assistants/assistants.py +0 -0
  178. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/server/rest_api/routers/openai/assistants/schemas.py +0 -0
  179. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/server/rest_api/routers/openai/assistants/threads.py +0 -0
  180. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/server/rest_api/routers/openai/chat_completions/__init__.py +0 -0
  181. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/server/rest_api/routers/openai/chat_completions/chat_completions.py +0 -0
  182. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/server/rest_api/routers/v1/__init__.py +0 -0
  183. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/server/rest_api/routers/v1/agents.py +0 -0
  184. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/server/rest_api/routers/v1/blocks.py +0 -0
  185. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/server/rest_api/routers/v1/health.py +0 -0
  186. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/server/rest_api/routers/v1/jobs.py +0 -0
  187. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/server/rest_api/routers/v1/llms.py +0 -0
  188. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/server/rest_api/routers/v1/organizations.py +0 -0
  189. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/server/rest_api/routers/v1/sources.py +0 -0
  190. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/server/rest_api/routers/v1/tools.py +0 -0
  191. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/server/rest_api/routers/v1/users.py +0 -0
  192. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/server/rest_api/static_files.py +0 -0
  193. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/server/rest_api/utils.py +0 -0
  194. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/server/startup.sh +0 -0
  195. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/server/static_files/assets/index-3ab03d5b.css +0 -0
  196. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/server/static_files/assets/index-9fa459a2.js +0 -0
  197. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/server/static_files/favicon.ico +0 -0
  198. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/server/static_files/index.html +0 -0
  199. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/server/static_files/memgpt_logo_transparent.png +0 -0
  200. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/server/utils.py +0 -0
  201. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/server/ws_api/__init__.py +0 -0
  202. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/server/ws_api/example_client.py +0 -0
  203. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/server/ws_api/interface.py +0 -0
  204. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/server/ws_api/protocol.py +0 -0
  205. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/server/ws_api/server.py +0 -0
  206. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/services/__init__.py +0 -0
  207. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/services/agents_tags_manager.py +0 -0
  208. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/services/organization_manager.py +0 -0
  209. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/services/source_manager.py +0 -0
  210. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/services/user_manager.py +0 -0
  211. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/streaming_interface.py +0 -0
  212. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/streaming_utils.py +0 -0
  213. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/system.py +0 -0
  214. {letta_nightly-0.5.2.dev20241118104226 → letta_nightly-0.5.2.dev20241119104253}/letta/utils.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: letta-nightly
3
- Version: 0.5.2.dev20241118104226
3
+ Version: 0.5.2.dev20241119104253
4
4
  Summary: Create LLM agents with long-term memory and custom tools
5
5
  License: Apache License
6
6
  Author: Letta Team
@@ -1584,7 +1584,8 @@ class Agent(BaseAgent):
1584
1584
 
1585
1585
  def count_tokens(self) -> int:
1586
1586
  """Count the tokens in the current context window"""
1587
- return self.get_context_window().context_window_size_current
1587
+ context_window_breakdown = self.get_context_window()
1588
+ return context_window_breakdown.context_window_size_current
1588
1589
 
1589
1590
 
1590
1591
  def save_agent(agent: Agent, ms: MetadataStore):
@@ -19,7 +19,7 @@ IN_CONTEXT_MEMORY_KEYWORD = "CORE_MEMORY"
19
19
  TOOL_CALL_ID_MAX_LEN = 29
20
20
 
21
21
  # minimum context window size
22
- MIN_CONTEXT_WINDOW = 4000
22
+ MIN_CONTEXT_WINDOW = 4096
23
23
 
24
24
  # embeddings
25
25
  MAX_EMBEDDING_DIM = 4096 # maximum supported embeding size - do NOT change or else DBs will need to be reset
@@ -3,7 +3,7 @@ import inspect
3
3
  import os
4
4
  from textwrap import dedent # remove indentation
5
5
  from types import ModuleType
6
- from typing import Optional
6
+ from typing import Optional, List
7
7
 
8
8
  from letta.constants import CLI_WARNING_PREFIX
9
9
  from letta.functions.schema_generator import generate_schema
@@ -15,6 +15,7 @@ def derive_openai_json_schema(source_code: str, name: Optional[str] = None) -> d
15
15
  # Define a custom environment with necessary imports
16
16
  env = {
17
17
  "Optional": Optional, # Add any other required imports here
18
+ "List": List
18
19
  }
19
20
 
20
21
  env.update(globals())
@@ -25,6 +25,7 @@ from letta.local_llm.constants import (
25
25
  INNER_THOUGHTS_KWARG,
26
26
  INNER_THOUGHTS_KWARG_DESCRIPTION,
27
27
  )
28
+ from letta.local_llm.utils import num_tokens_from_functions, num_tokens_from_messages
28
29
  from letta.schemas.llm_config import LLMConfig
29
30
  from letta.schemas.message import Message
30
31
  from letta.schemas.openai.chat_completion_request import (
@@ -33,6 +34,7 @@ from letta.schemas.openai.chat_completion_request import (
33
34
  cast_message_to_subtype,
34
35
  )
35
36
  from letta.schemas.openai.chat_completion_response import ChatCompletionResponse
37
+ from letta.settings import ModelSettings
36
38
  from letta.streaming_interface import (
37
39
  AgentChunkStreamingInterface,
38
40
  AgentRefreshStreamingInterface,
@@ -122,10 +124,19 @@ def create(
122
124
  """Return response to chat completion with backoff"""
123
125
  from letta.utils import printd
124
126
 
127
+ # Count the tokens first, if there's an overflow exit early by throwing an error up the stack
128
+ # NOTE: we want to include a specific substring in the error message to trigger summarization
129
+ messages_oai_format = [m.to_openai_dict() for m in messages]
130
+ prompt_tokens = num_tokens_from_messages(messages=messages_oai_format, model=llm_config.model)
131
+ function_tokens = num_tokens_from_functions(functions=functions, model=llm_config.model) if functions else 0
132
+ if prompt_tokens + function_tokens > llm_config.context_window:
133
+ raise Exception(f"Request exceeds maximum context length ({prompt_tokens + function_tokens} > {llm_config.context_window} tokens)")
134
+
125
135
  if not model_settings:
126
136
  from letta.settings import model_settings
127
137
 
128
138
  model_settings = model_settings
139
+ assert isinstance(model_settings, ModelSettings)
129
140
 
130
141
  printd(f"Using model {llm_config.model_endpoint_type}, endpoint: {llm_config.model_endpoint}")
131
142
 
@@ -326,6 +337,33 @@ def create(
326
337
 
327
338
  return response
328
339
 
340
+ elif llm_config.model_endpoint_type == "together":
341
+ """TogetherAI endpoint that goes via /completions instead of /chat/completions"""
342
+
343
+ if stream:
344
+ raise NotImplementedError(f"Streaming not yet implemented for TogetherAI (via the /completions endpoint).")
345
+
346
+ if model_settings.together_api_key is None and llm_config.model_endpoint == "https://api.together.ai/v1/completions":
347
+ raise ValueError(f"TogetherAI key is missing from letta config file")
348
+
349
+ return get_chat_completion(
350
+ model=llm_config.model,
351
+ messages=messages,
352
+ functions=functions,
353
+ functions_python=functions_python,
354
+ function_call=function_call,
355
+ context_window=llm_config.context_window,
356
+ endpoint=llm_config.model_endpoint,
357
+ endpoint_type="vllm", # NOTE: use the vLLM path through /completions
358
+ wrapper=llm_config.model_wrapper,
359
+ user=str(user_id),
360
+ # hint
361
+ first_message=first_message,
362
+ # auth-related
363
+ auth_type="bearer_token", # NOTE: Together expects bearer token auth
364
+ auth_key=model_settings.together_api_key,
365
+ )
366
+
329
367
  # local model
330
368
  else:
331
369
  if stream:
@@ -536,7 +536,6 @@ def openai_chat_completions_request(
536
536
  tool["function"] = convert_to_structured_output(tool["function"])
537
537
 
538
538
  response_json = make_post_request(url, headers, data)
539
-
540
539
  return ChatCompletionResponse(**response_json)
541
540
 
542
541
 
@@ -94,7 +94,10 @@ def num_tokens_from_functions(functions: List[dict], model: str = "gpt-4"):
94
94
  num_tokens = 0
95
95
  for function in functions:
96
96
  function_tokens = len(encoding.encode(function["name"]))
97
- function_tokens += len(encoding.encode(function["description"]))
97
+ if function["description"]:
98
+ function_tokens += len(encoding.encode(function["description"]))
99
+ else:
100
+ raise ValueError(f"Function {function['name']} has no description, function: {function}")
98
101
 
99
102
  if "parameters" in function:
100
103
  parameters = function["parameters"]
@@ -184,6 +187,7 @@ def num_tokens_from_messages(messages: List[dict], model: str = "gpt-4") -> int:
184
187
  https://community.openai.com/t/how-to-calculate-the-tokens-when-using-function-call/266573/11
185
188
  """
186
189
  try:
190
+ # Attempt to search for the encoding based on the model string
187
191
  encoding = tiktoken.encoding_for_model(model)
188
192
  except KeyError:
189
193
  # print("Warning: model not found. Using cl100k_base encoding.")
@@ -228,7 +232,13 @@ def num_tokens_from_messages(messages: List[dict], model: str = "gpt-4") -> int:
228
232
  # num_tokens += len(encoding.encode(value["arguments"]))
229
233
 
230
234
  else:
231
- num_tokens += len(encoding.encode(value))
235
+ if value is None:
236
+ # raise ValueError(f"Message has null value: {key} with value: {value} - message={message}")
237
+ warnings.warn(f"Message has null value: {key} with value: {value} - message={message}")
238
+ else:
239
+ if not isinstance(value, str):
240
+ raise ValueError(f"Message has non-string value: {key} with value: {value} - message={message}")
241
+ num_tokens += len(encoding.encode(value))
232
242
 
233
243
  if key == "name":
234
244
  num_tokens += tokens_per_name
@@ -2,7 +2,7 @@ from typing import List, Optional
2
2
 
3
3
  from pydantic import BaseModel, Field, model_validator
4
4
 
5
- from letta.constants import LLM_MAX_TOKENS
5
+ from letta.constants import LLM_MAX_TOKENS, MIN_CONTEXT_WINDOW
6
6
  from letta.llm_api.azure_openai import (
7
7
  get_azure_chat_completions_endpoint,
8
8
  get_azure_embeddings_endpoint,
@@ -67,10 +67,15 @@ class OpenAIProvider(Provider):
67
67
  extra_params = {"supported_parameters": "tools"} if "openrouter.ai" in self.base_url else None
68
68
  response = openai_get_model_list(self.base_url, api_key=self.api_key, extra_params=extra_params)
69
69
 
70
- assert "data" in response, f"OpenAI model query response missing 'data' field: {response}"
70
+ # TogetherAI's response is missing the 'data' field
71
+ # assert "data" in response, f"OpenAI model query response missing 'data' field: {response}"
72
+ if "data" in response:
73
+ data = response["data"]
74
+ else:
75
+ data = response
71
76
 
72
77
  configs = []
73
- for model in response["data"]:
78
+ for model in data:
74
79
  assert "id" in model, f"OpenAI model missing 'id' field: {model}"
75
80
  model_name = model["id"]
76
81
 
@@ -82,6 +87,32 @@ class OpenAIProvider(Provider):
82
87
 
83
88
  if not context_window_size:
84
89
  continue
90
+
91
+ # TogetherAI includes the type, which we can use to filter out embedding models
92
+ if self.base_url == "https://api.together.ai/v1":
93
+ if "type" in model and model["type"] != "chat":
94
+ continue
95
+
96
+ # for TogetherAI, we need to skip the models that don't support JSON mode / function calling
97
+ # requests.exceptions.HTTPError: HTTP error occurred: 400 Client Error: Bad Request for url: https://api.together.ai/v1/chat/completions | Status code: 400, Message: {
98
+ # "error": {
99
+ # "message": "mistralai/Mixtral-8x7B-v0.1 is not supported for JSON mode/function calling",
100
+ # "type": "invalid_request_error",
101
+ # "param": null,
102
+ # "code": "constraints_model"
103
+ # }
104
+ # }
105
+ if "config" not in model:
106
+ continue
107
+ if "chat_template" not in model["config"]:
108
+ continue
109
+ if model["config"]["chat_template"] is None:
110
+ continue
111
+ if "tools" not in model["config"]["chat_template"]:
112
+ continue
113
+ # if "config" in data and "chat_template" in data["config"] and "tools" not in data["config"]["chat_template"]:
114
+ # continue
115
+
85
116
  configs.append(
86
117
  LLMConfig(model=model_name, model_endpoint_type="openai", model_endpoint=self.base_url, context_window=context_window_size)
87
118
  )
@@ -325,6 +356,113 @@ class GroqProvider(OpenAIProvider):
325
356
  raise NotImplementedError
326
357
 
327
358
 
359
+ class TogetherProvider(OpenAIProvider):
360
+ """TogetherAI provider that uses the /completions API
361
+
362
+ TogetherAI can also be used via the /chat/completions API
363
+ by settings OPENAI_API_KEY and OPENAI_API_BASE to the TogetherAI API key
364
+ and API URL, however /completions is preferred because their /chat/completions
365
+ function calling support is limited.
366
+ """
367
+
368
+ name: str = "together"
369
+ base_url: str = "https://api.together.ai/v1"
370
+ api_key: str = Field(..., description="API key for the TogetherAI API.")
371
+ default_prompt_formatter: str = Field(..., description="Default prompt formatter (aka model wrapper) to use on vLLM /completions API.")
372
+
373
+ def list_llm_models(self) -> List[LLMConfig]:
374
+ from letta.llm_api.openai import openai_get_model_list
375
+
376
+ response = openai_get_model_list(self.base_url, api_key=self.api_key)
377
+
378
+ # TogetherAI's response is missing the 'data' field
379
+ # assert "data" in response, f"OpenAI model query response missing 'data' field: {response}"
380
+ if "data" in response:
381
+ data = response["data"]
382
+ else:
383
+ data = response
384
+
385
+ configs = []
386
+ for model in data:
387
+ assert "id" in model, f"TogetherAI model missing 'id' field: {model}"
388
+ model_name = model["id"]
389
+
390
+ if "context_length" in model:
391
+ # Context length is returned in OpenRouter as "context_length"
392
+ context_window_size = model["context_length"]
393
+ else:
394
+ context_window_size = self.get_model_context_window_size(model_name)
395
+
396
+ # We need the context length for embeddings too
397
+ if not context_window_size:
398
+ continue
399
+
400
+ # Skip models that are too small for Letta
401
+ if context_window_size <= MIN_CONTEXT_WINDOW:
402
+ continue
403
+
404
+ # TogetherAI includes the type, which we can use to filter for embedding models
405
+ if "type" in model and model["type"] not in ["chat", "language"]:
406
+ continue
407
+
408
+ configs.append(
409
+ LLMConfig(
410
+ model=model_name,
411
+ model_endpoint_type="together",
412
+ model_endpoint=self.base_url,
413
+ model_wrapper=self.default_prompt_formatter,
414
+ context_window=context_window_size,
415
+ )
416
+ )
417
+
418
+ return configs
419
+
420
+ def list_embedding_models(self) -> List[EmbeddingConfig]:
421
+ # TODO renable once we figure out how to pass API keys through properly
422
+ return []
423
+
424
+ # from letta.llm_api.openai import openai_get_model_list
425
+
426
+ # response = openai_get_model_list(self.base_url, api_key=self.api_key)
427
+
428
+ # # TogetherAI's response is missing the 'data' field
429
+ # # assert "data" in response, f"OpenAI model query response missing 'data' field: {response}"
430
+ # if "data" in response:
431
+ # data = response["data"]
432
+ # else:
433
+ # data = response
434
+
435
+ # configs = []
436
+ # for model in data:
437
+ # assert "id" in model, f"TogetherAI model missing 'id' field: {model}"
438
+ # model_name = model["id"]
439
+
440
+ # if "context_length" in model:
441
+ # # Context length is returned in OpenRouter as "context_length"
442
+ # context_window_size = model["context_length"]
443
+ # else:
444
+ # context_window_size = self.get_model_context_window_size(model_name)
445
+
446
+ # if not context_window_size:
447
+ # continue
448
+
449
+ # # TogetherAI includes the type, which we can use to filter out embedding models
450
+ # if "type" in model and model["type"] not in ["embedding"]:
451
+ # continue
452
+
453
+ # configs.append(
454
+ # EmbeddingConfig(
455
+ # embedding_model=model_name,
456
+ # embedding_endpoint_type="openai",
457
+ # embedding_endpoint=self.base_url,
458
+ # embedding_dim=context_window_size,
459
+ # embedding_chunk_size=300, # TODO: change?
460
+ # )
461
+ # )
462
+
463
+ # return configs
464
+
465
+
328
466
  class GoogleAIProvider(Provider):
329
467
  # gemini
330
468
  api_key: str = Field(..., description="API key for the Google AI API.")
@@ -35,6 +35,7 @@ class LLMConfig(BaseModel):
35
35
  "vllm",
36
36
  "hugging-face",
37
37
  "mistral",
38
+ "together", # completions endpoint
38
39
  ] = Field(..., description="The endpoint type for the model.")
39
40
  model_endpoint: Optional[str] = Field(None, description="The endpoint for the model.")
40
41
  model_wrapper: Optional[str] = Field(None, description="The wrapper for the model.")
@@ -46,6 +46,7 @@ class Choice(BaseModel):
46
46
  index: int
47
47
  message: Message
48
48
  logprobs: Optional[Dict[str, Union[List[MessageContentLogProb], None]]] = None
49
+ seed: Optional[int] = None # found in TogetherAI
49
50
 
50
51
 
51
52
  class UsageStatistics(BaseModel):
@@ -49,6 +49,7 @@ from letta.providers import (
49
49
  OllamaProvider,
50
50
  OpenAIProvider,
51
51
  Provider,
52
+ TogetherProvider,
52
53
  VLLMChatCompletionsProvider,
53
54
  VLLMCompletionsProvider,
54
55
  )
@@ -303,7 +304,18 @@ class SyncServer(Server):
303
304
  )
304
305
  )
305
306
  if model_settings.groq_api_key:
306
- self._enabled_providers.append(GroqProvider(api_key=model_settings.groq_api_key))
307
+ self._enabled_providers.append(
308
+ GroqProvider(
309
+ api_key=model_settings.groq_api_key,
310
+ )
311
+ )
312
+ if model_settings.together_api_key:
313
+ self._enabled_providers.append(
314
+ TogetherProvider(
315
+ api_key=model_settings.together_api_key,
316
+ default_prompt_formatter=model_settings.default_prompt_formatter,
317
+ )
318
+ )
307
319
  if model_settings.vllm_api_base:
308
320
  # vLLM exposes both a /chat/completions and a /completions endpoint
309
321
  self._enabled_providers.append(
@@ -70,6 +70,10 @@ class ToolManager:
70
70
  pydantic_tool.organization_id = actor.organization_id
71
71
  tool_data = pydantic_tool.model_dump()
72
72
  tool = ToolModel(**tool_data)
73
+ # The description is most likely auto-generated via the json_schema,
74
+ # so copy it over into the top-level description field
75
+ if tool.description is None:
76
+ tool.description = tool.json_schema.get("description", None)
73
77
  tool.create(session, actor=actor)
74
78
 
75
79
  return tool.to_pydantic()
@@ -43,6 +43,9 @@ class ModelSettings(BaseSettings):
43
43
  # google ai
44
44
  gemini_api_key: Optional[str] = None
45
45
 
46
+ # together
47
+ together_api_key: Optional[str] = None
48
+
46
49
  # vLLM
47
50
  vllm_api_base: Optional[str] = None
48
51
 
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "letta-nightly"
3
- version = "0.5.2.dev20241118104226"
3
+ version = "0.5.2.dev20241119104253"
4
4
  packages = [
5
5
  {include = "letta"}
6
6
  ]