letta-nightly 0.7.12.dev20250509104216__tar.gz → 0.7.13.dev20250510172445__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (347) hide show
  1. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/PKG-INFO +1 -1
  2. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/__init__.py +1 -1
  3. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/cli/cli.py +2 -1
  4. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/llm_api/google_vertex_client.py +1 -0
  5. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/llm_api/helpers.py +4 -0
  6. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/llm_api/llm_api_tools.py +12 -1
  7. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/llm_api/openai.py +43 -17
  8. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/llm_api/openai_client.py +38 -6
  9. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/orm/source.py +1 -0
  10. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/schemas/llm_config.py +0 -1
  11. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/schemas/openai/chat_completion_response.py +24 -4
  12. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/schemas/providers.py +124 -46
  13. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/schemas/source.py +3 -0
  14. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/server/rest_api/app.py +3 -2
  15. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/server/rest_api/interface.py +4 -0
  16. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/server/rest_api/routers/v1/agents.py +13 -20
  17. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/server/rest_api/utils.py +1 -1
  18. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/server/server.py +3 -0
  19. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/services/agent_manager.py +2 -13
  20. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/services/summarizer/summarizer.py +3 -7
  21. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/settings.py +8 -2
  22. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/pyproject.toml +1 -1
  23. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/LICENSE +0 -0
  24. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/README.md +0 -0
  25. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/__main__.py +0 -0
  26. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/agent.py +0 -0
  27. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/agents/__init__.py +0 -0
  28. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/agents/base_agent.py +0 -0
  29. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/agents/ephemeral_agent.py +0 -0
  30. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/agents/exceptions.py +0 -0
  31. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/agents/helpers.py +0 -0
  32. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/agents/letta_agent.py +0 -0
  33. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/agents/letta_agent_batch.py +0 -0
  34. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/agents/voice_agent.py +0 -0
  35. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/agents/voice_sleeptime_agent.py +0 -0
  36. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/benchmark/benchmark.py +0 -0
  37. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/benchmark/constants.py +0 -0
  38. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/cli/cli_config.py +0 -0
  39. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/cli/cli_load.py +0 -0
  40. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/client/__init__.py +0 -0
  41. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/client/client.py +0 -0
  42. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/client/streaming.py +0 -0
  43. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/client/utils.py +0 -0
  44. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/config.py +0 -0
  45. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/constants.py +0 -0
  46. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/data_sources/connectors.py +0 -0
  47. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/data_sources/connectors_helper.py +0 -0
  48. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/embeddings.py +0 -0
  49. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/errors.py +0 -0
  50. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/functions/__init__.py +0 -0
  51. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/functions/ast_parsers.py +0 -0
  52. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/functions/async_composio_toolset.py +0 -0
  53. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/functions/composio_helpers.py +0 -0
  54. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/functions/function_sets/base.py +0 -0
  55. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/functions/function_sets/extras.py +0 -0
  56. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/functions/function_sets/multi_agent.py +0 -0
  57. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/functions/function_sets/voice.py +0 -0
  58. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/functions/functions.py +0 -0
  59. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/functions/helpers.py +0 -0
  60. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/functions/interface.py +0 -0
  61. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/functions/mcp_client/__init__.py +0 -0
  62. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/functions/mcp_client/base_client.py +0 -0
  63. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/functions/mcp_client/exceptions.py +0 -0
  64. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/functions/mcp_client/sse_client.py +0 -0
  65. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/functions/mcp_client/stdio_client.py +0 -0
  66. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/functions/mcp_client/types.py +0 -0
  67. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/functions/schema_generator.py +0 -0
  68. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/groups/dynamic_multi_agent.py +0 -0
  69. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/groups/helpers.py +0 -0
  70. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/groups/round_robin_multi_agent.py +0 -0
  71. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/groups/sleeptime_multi_agent.py +0 -0
  72. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/groups/sleeptime_multi_agent_v2.py +0 -0
  73. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/groups/supervisor_multi_agent.py +0 -0
  74. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/helpers/__init__.py +0 -0
  75. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/helpers/composio_helpers.py +0 -0
  76. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/helpers/converters.py +0 -0
  77. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/helpers/datetime_helpers.py +0 -0
  78. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/helpers/json_helpers.py +0 -0
  79. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/helpers/message_helper.py +0 -0
  80. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/helpers/tool_execution_helper.py +0 -0
  81. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/helpers/tool_rule_solver.py +0 -0
  82. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/humans/__init__.py +0 -0
  83. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/humans/examples/basic.txt +0 -0
  84. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/humans/examples/cs_phd.txt +0 -0
  85. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/interface.py +0 -0
  86. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/interfaces/__init__.py +0 -0
  87. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/interfaces/anthropic_streaming_interface.py +0 -0
  88. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/interfaces/openai_chat_completions_streaming_interface.py +0 -0
  89. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/interfaces/utils.py +0 -0
  90. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/jobs/__init__.py +0 -0
  91. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/jobs/helpers.py +0 -0
  92. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/jobs/llm_batch_job_polling.py +0 -0
  93. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/jobs/scheduler.py +0 -0
  94. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/jobs/types.py +0 -0
  95. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/llm_api/__init__.py +0 -0
  96. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/llm_api/anthropic.py +0 -0
  97. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/llm_api/anthropic_client.py +0 -0
  98. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/llm_api/aws_bedrock.py +0 -0
  99. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/llm_api/azure_openai.py +0 -0
  100. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/llm_api/azure_openai_constants.py +0 -0
  101. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/llm_api/cohere.py +0 -0
  102. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/llm_api/deepseek.py +0 -0
  103. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/llm_api/google_ai_client.py +0 -0
  104. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/llm_api/google_constants.py +0 -0
  105. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/llm_api/llm_client.py +0 -0
  106. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/llm_api/llm_client_base.py +0 -0
  107. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/llm_api/mistral.py +0 -0
  108. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/local_llm/README.md +0 -0
  109. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/local_llm/__init__.py +0 -0
  110. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/local_llm/chat_completion_proxy.py +0 -0
  111. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/local_llm/constants.py +0 -0
  112. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/local_llm/function_parser.py +0 -0
  113. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/local_llm/grammars/__init__.py +0 -0
  114. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/local_llm/grammars/gbnf_grammar_generator.py +0 -0
  115. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/local_llm/grammars/json.gbnf +0 -0
  116. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/local_llm/grammars/json_func_calls_with_inner_thoughts.gbnf +0 -0
  117. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/local_llm/json_parser.py +0 -0
  118. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/local_llm/koboldcpp/api.py +0 -0
  119. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/local_llm/koboldcpp/settings.py +0 -0
  120. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/local_llm/llamacpp/api.py +0 -0
  121. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/local_llm/llamacpp/settings.py +0 -0
  122. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/local_llm/llm_chat_completion_wrappers/__init__.py +0 -0
  123. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/local_llm/llm_chat_completion_wrappers/airoboros.py +0 -0
  124. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/local_llm/llm_chat_completion_wrappers/chatml.py +0 -0
  125. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/local_llm/llm_chat_completion_wrappers/configurable_wrapper.py +0 -0
  126. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/local_llm/llm_chat_completion_wrappers/dolphin.py +0 -0
  127. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/local_llm/llm_chat_completion_wrappers/llama3.py +0 -0
  128. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/local_llm/llm_chat_completion_wrappers/simple_summary_wrapper.py +0 -0
  129. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/local_llm/llm_chat_completion_wrappers/wrapper_base.py +0 -0
  130. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/local_llm/llm_chat_completion_wrappers/zephyr.py +0 -0
  131. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/local_llm/lmstudio/api.py +0 -0
  132. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/local_llm/lmstudio/settings.py +0 -0
  133. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/local_llm/ollama/api.py +0 -0
  134. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/local_llm/ollama/settings.py +0 -0
  135. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/local_llm/settings/__init__.py +0 -0
  136. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/local_llm/settings/deterministic_mirostat.py +0 -0
  137. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/local_llm/settings/settings.py +0 -0
  138. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/local_llm/settings/simple.py +0 -0
  139. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/local_llm/utils.py +0 -0
  140. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/local_llm/vllm/api.py +0 -0
  141. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/local_llm/webui/api.py +0 -0
  142. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/local_llm/webui/legacy_api.py +0 -0
  143. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/local_llm/webui/legacy_settings.py +0 -0
  144. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/local_llm/webui/settings.py +0 -0
  145. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/log.py +0 -0
  146. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/main.py +0 -0
  147. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/memory.py +0 -0
  148. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/openai_backcompat/__init__.py +0 -0
  149. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/openai_backcompat/openai_object.py +0 -0
  150. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/orm/__all__.py +0 -0
  151. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/orm/__init__.py +0 -0
  152. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/orm/agent.py +0 -0
  153. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/orm/agents_tags.py +0 -0
  154. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/orm/base.py +0 -0
  155. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/orm/block.py +0 -0
  156. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/orm/block_history.py +0 -0
  157. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/orm/blocks_agents.py +0 -0
  158. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/orm/custom_columns.py +0 -0
  159. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/orm/enums.py +0 -0
  160. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/orm/errors.py +0 -0
  161. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/orm/file.py +0 -0
  162. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/orm/group.py +0 -0
  163. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/orm/groups_agents.py +0 -0
  164. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/orm/groups_blocks.py +0 -0
  165. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/orm/identities_agents.py +0 -0
  166. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/orm/identities_blocks.py +0 -0
  167. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/orm/identity.py +0 -0
  168. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/orm/job.py +0 -0
  169. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/orm/job_messages.py +0 -0
  170. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/orm/llm_batch_items.py +0 -0
  171. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/orm/llm_batch_job.py +0 -0
  172. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/orm/message.py +0 -0
  173. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/orm/mixins.py +0 -0
  174. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/orm/organization.py +0 -0
  175. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/orm/passage.py +0 -0
  176. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/orm/provider.py +0 -0
  177. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/orm/sandbox_config.py +0 -0
  178. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/orm/sources_agents.py +0 -0
  179. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/orm/sqlalchemy_base.py +0 -0
  180. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/orm/sqlite_functions.py +0 -0
  181. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/orm/step.py +0 -0
  182. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/orm/tool.py +0 -0
  183. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/orm/tools_agents.py +0 -0
  184. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/orm/user.py +0 -0
  185. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/personas/__init__.py +0 -0
  186. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/personas/examples/anna_pa.txt +0 -0
  187. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/personas/examples/google_search_persona.txt +0 -0
  188. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/personas/examples/memgpt_doc.txt +0 -0
  189. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/personas/examples/memgpt_starter.txt +0 -0
  190. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/personas/examples/o1_persona.txt +0 -0
  191. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/personas/examples/sam.txt +0 -0
  192. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/personas/examples/sam_pov.txt +0 -0
  193. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/personas/examples/sam_simple_pov_gpt35.txt +0 -0
  194. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/personas/examples/sleeptime_doc_persona.txt +0 -0
  195. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/personas/examples/sleeptime_memory_persona.txt +0 -0
  196. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/personas/examples/sqldb/test.db +0 -0
  197. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/personas/examples/voice_memory_persona.txt +0 -0
  198. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/prompts/__init__.py +0 -0
  199. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/prompts/gpt_summarize.py +0 -0
  200. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/prompts/gpt_system.py +0 -0
  201. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/prompts/system/memgpt_base.txt +0 -0
  202. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/prompts/system/memgpt_chat.txt +0 -0
  203. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/prompts/system/memgpt_chat_compressed.txt +0 -0
  204. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/prompts/system/memgpt_chat_fstring.txt +0 -0
  205. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/prompts/system/memgpt_convo_only.txt +0 -0
  206. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/prompts/system/memgpt_doc.txt +0 -0
  207. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/prompts/system/memgpt_gpt35_extralong.txt +0 -0
  208. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/prompts/system/memgpt_intuitive_knowledge.txt +0 -0
  209. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/prompts/system/memgpt_memory_only.txt +0 -0
  210. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/prompts/system/memgpt_modified_chat.txt +0 -0
  211. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/prompts/system/memgpt_modified_o1.txt +0 -0
  212. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/prompts/system/memgpt_offline_memory.txt +0 -0
  213. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/prompts/system/memgpt_offline_memory_chat.txt +0 -0
  214. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/prompts/system/memgpt_sleeptime_chat.txt +0 -0
  215. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/prompts/system/sleeptime.txt +0 -0
  216. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/prompts/system/sleeptime_doc_ingest.txt +0 -0
  217. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/prompts/system/voice_chat.txt +0 -0
  218. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/prompts/system/voice_sleeptime.txt +0 -0
  219. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/pytest.ini +0 -0
  220. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/schemas/agent.py +0 -0
  221. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/schemas/block.py +0 -0
  222. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/schemas/embedding_config.py +0 -0
  223. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/schemas/embedding_config_overrides.py +0 -0
  224. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/schemas/enums.py +0 -0
  225. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/schemas/environment_variables.py +0 -0
  226. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/schemas/file.py +0 -0
  227. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/schemas/group.py +0 -0
  228. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/schemas/health.py +0 -0
  229. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/schemas/identity.py +0 -0
  230. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/schemas/job.py +0 -0
  231. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/schemas/letta_base.py +0 -0
  232. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/schemas/letta_message.py +0 -0
  233. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/schemas/letta_message_content.py +0 -0
  234. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/schemas/letta_request.py +0 -0
  235. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/schemas/letta_response.py +0 -0
  236. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/schemas/llm_batch_job.py +0 -0
  237. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/schemas/llm_config_overrides.py +0 -0
  238. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/schemas/memory.py +0 -0
  239. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/schemas/message.py +0 -0
  240. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/schemas/openai/chat_completion_request.py +0 -0
  241. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/schemas/openai/chat_completions.py +0 -0
  242. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/schemas/openai/embedding_response.py +0 -0
  243. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/schemas/openai/openai.py +0 -0
  244. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/schemas/organization.py +0 -0
  245. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/schemas/passage.py +0 -0
  246. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/schemas/response_format.py +0 -0
  247. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/schemas/run.py +0 -0
  248. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/schemas/sandbox_config.py +0 -0
  249. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/schemas/step.py +0 -0
  250. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/schemas/tool.py +0 -0
  251. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/schemas/tool_execution_result.py +0 -0
  252. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/schemas/tool_rule.py +0 -0
  253. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/schemas/usage.py +0 -0
  254. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/schemas/user.py +0 -0
  255. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/serialize_schemas/__init__.py +0 -0
  256. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/serialize_schemas/marshmallow_agent.py +0 -0
  257. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/serialize_schemas/marshmallow_agent_environment_variable.py +0 -0
  258. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/serialize_schemas/marshmallow_base.py +0 -0
  259. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/serialize_schemas/marshmallow_block.py +0 -0
  260. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/serialize_schemas/marshmallow_custom_fields.py +0 -0
  261. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/serialize_schemas/marshmallow_message.py +0 -0
  262. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/serialize_schemas/marshmallow_tag.py +0 -0
  263. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/serialize_schemas/marshmallow_tool.py +0 -0
  264. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/serialize_schemas/pydantic_agent_schema.py +0 -0
  265. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/server/__init__.py +0 -0
  266. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/server/constants.py +0 -0
  267. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/server/db.py +0 -0
  268. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/server/generate_openapi_schema.sh +0 -0
  269. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/server/rest_api/__init__.py +0 -0
  270. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/server/rest_api/auth/__init__.py +0 -0
  271. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/server/rest_api/auth/index.py +0 -0
  272. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/server/rest_api/auth_token.py +0 -0
  273. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/server/rest_api/chat_completions_interface.py +0 -0
  274. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/server/rest_api/json_parser.py +0 -0
  275. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/server/rest_api/routers/__init__.py +0 -0
  276. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/server/rest_api/routers/openai/chat_completions/__init__.py +0 -0
  277. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/server/rest_api/routers/openai/chat_completions/chat_completions.py +0 -0
  278. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/server/rest_api/routers/v1/__init__.py +0 -0
  279. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/server/rest_api/routers/v1/blocks.py +0 -0
  280. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/server/rest_api/routers/v1/embeddings.py +0 -0
  281. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/server/rest_api/routers/v1/groups.py +0 -0
  282. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/server/rest_api/routers/v1/health.py +0 -0
  283. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/server/rest_api/routers/v1/identities.py +0 -0
  284. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/server/rest_api/routers/v1/jobs.py +0 -0
  285. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/server/rest_api/routers/v1/llms.py +0 -0
  286. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/server/rest_api/routers/v1/messages.py +0 -0
  287. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/server/rest_api/routers/v1/organizations.py +0 -0
  288. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/server/rest_api/routers/v1/providers.py +0 -0
  289. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/server/rest_api/routers/v1/runs.py +0 -0
  290. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/server/rest_api/routers/v1/sandbox_configs.py +0 -0
  291. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/server/rest_api/routers/v1/sources.py +0 -0
  292. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/server/rest_api/routers/v1/steps.py +0 -0
  293. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/server/rest_api/routers/v1/tags.py +0 -0
  294. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/server/rest_api/routers/v1/tools.py +0 -0
  295. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/server/rest_api/routers/v1/users.py +0 -0
  296. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/server/rest_api/routers/v1/voice.py +0 -0
  297. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/server/rest_api/static_files.py +0 -0
  298. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/server/startup.sh +0 -0
  299. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/server/static_files/assets/index-048c9598.js +0 -0
  300. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/server/static_files/assets/index-0e31b727.css +0 -0
  301. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/server/static_files/favicon.ico +0 -0
  302. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/server/static_files/index.html +0 -0
  303. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/server/static_files/memgpt_logo_transparent.png +0 -0
  304. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/server/utils.py +0 -0
  305. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/server/ws_api/__init__.py +0 -0
  306. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/server/ws_api/example_client.py +0 -0
  307. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/server/ws_api/interface.py +0 -0
  308. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/server/ws_api/protocol.py +0 -0
  309. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/server/ws_api/server.py +0 -0
  310. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/services/__init__.py +0 -0
  311. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/services/block_manager.py +0 -0
  312. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/services/group_manager.py +0 -0
  313. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/services/helpers/agent_manager_helper.py +0 -0
  314. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/services/helpers/tool_execution_helper.py +0 -0
  315. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/services/identity_manager.py +0 -0
  316. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/services/job_manager.py +0 -0
  317. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/services/llm_batch_manager.py +0 -0
  318. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/services/mcp/__init__.py +0 -0
  319. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/services/mcp/base_client.py +0 -0
  320. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/services/mcp/sse_client.py +0 -0
  321. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/services/mcp/stdio_client.py +0 -0
  322. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/services/mcp/types.py +0 -0
  323. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/services/message_manager.py +0 -0
  324. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/services/organization_manager.py +0 -0
  325. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/services/passage_manager.py +0 -0
  326. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/services/per_agent_lock_manager.py +0 -0
  327. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/services/provider_manager.py +0 -0
  328. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/services/sandbox_config_manager.py +0 -0
  329. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/services/source_manager.py +0 -0
  330. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/services/step_manager.py +0 -0
  331. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/services/summarizer/__init__.py +0 -0
  332. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/services/summarizer/enums.py +0 -0
  333. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/services/tool_executor/__init__.py +0 -0
  334. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/services/tool_executor/tool_execution_manager.py +0 -0
  335. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/services/tool_executor/tool_execution_sandbox.py +0 -0
  336. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/services/tool_executor/tool_executor.py +0 -0
  337. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/services/tool_manager.py +0 -0
  338. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/services/tool_sandbox/__init__.py +0 -0
  339. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/services/tool_sandbox/base.py +0 -0
  340. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/services/tool_sandbox/e2b_sandbox.py +0 -0
  341. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/services/tool_sandbox/local_sandbox.py +0 -0
  342. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/services/user_manager.py +0 -0
  343. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/streaming_interface.py +0 -0
  344. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/streaming_utils.py +0 -0
  345. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/system.py +0 -0
  346. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/tracing.py +0 -0
  347. {letta_nightly-0.7.12.dev20250509104216 → letta_nightly-0.7.13.dev20250510172445}/letta/utils.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: letta-nightly
3
- Version: 0.7.12.dev20250509104216
3
+ Version: 0.7.13.dev20250510172445
4
4
  Summary: Create LLM agents with long-term memory and custom tools
5
5
  License: Apache License
6
6
  Author: Letta Team
@@ -1,4 +1,4 @@
1
- __version__ = "0.7.12"
1
+ __version__ = "0.7.13"
2
2
 
3
3
  # import clients
4
4
  from letta.client.client import LocalClient, RESTClient, create_client
@@ -42,6 +42,7 @@ def server(
42
42
  port: Annotated[Optional[int], typer.Option(help="Port to run the server on")] = None,
43
43
  host: Annotated[Optional[str], typer.Option(help="Host to run the server on (default to localhost)")] = None,
44
44
  debug: Annotated[bool, typer.Option(help="Turn debugging output on")] = False,
45
+ reload: Annotated[bool, typer.Option(help="Enable hot-reload")] = False,
45
46
  ade: Annotated[bool, typer.Option(help="Allows remote access")] = False, # NOTE: deprecated
46
47
  secure: Annotated[bool, typer.Option(help="Adds simple security access")] = False,
47
48
  localhttps: Annotated[bool, typer.Option(help="Setup local https")] = False,
@@ -61,7 +62,7 @@ def server(
61
62
  try:
62
63
  from letta.server.rest_api.app import start_server
63
64
 
64
- start_server(port=port, host=host, debug=debug)
65
+ start_server(port=port, host=host, debug=debug, reload=reload)
65
66
 
66
67
  except KeyboardInterrupt:
67
68
  # Handle CTRL-C
@@ -1,3 +1,4 @@
1
+ import json
1
2
  import uuid
2
3
  from typing import List, Optional
3
4
 
@@ -337,6 +337,10 @@ def calculate_summarizer_cutoff(in_context_messages: List[Message], token_counts
337
337
  )
338
338
  break
339
339
 
340
+ # includes the tool response to be summarized after a tool call so we don't have any hanging tool calls after trimming.
341
+ if i + 1 < len(in_context_messages_openai) and in_context_messages_openai[i + 1]["role"] == "tool":
342
+ cutoff += 1
343
+
340
344
  logger.info(f"Evicting {cutoff}/{len(in_context_messages)} messages...")
341
345
  return cutoff + 1
342
346
 
@@ -215,6 +215,9 @@ def create(
215
215
  chat_completion_request=data,
216
216
  stream_interface=stream_interface,
217
217
  name=name,
218
+ # NOTE: needs to be true for OpenAI proxies that use the `reasoning_content` field
219
+ # For example, DeepSeek, or LM Studio
220
+ expect_reasoning_content=False,
218
221
  )
219
222
  else: # Client did not request token streaming (expect a blocking backend response)
220
223
  data.stream = False
@@ -272,6 +275,9 @@ def create(
272
275
  chat_completion_request=data,
273
276
  stream_interface=stream_interface,
274
277
  name=name,
278
+ # TODO turn on to support reasoning content from xAI reasoners:
279
+ # https://docs.x.ai/docs/guides/reasoning#reasoning
280
+ expect_reasoning_content=False,
275
281
  )
276
282
  else: # Client did not request token streaming (expect a blocking backend response)
277
283
  data.stream = False
@@ -486,7 +492,10 @@ def create(
486
492
  if stream:
487
493
  raise NotImplementedError(f"Streaming not yet implemented for TogetherAI (via the /completions endpoint).")
488
494
 
489
- if model_settings.together_api_key is None and llm_config.model_endpoint == "https://api.together.ai/v1/completions":
495
+ if model_settings.together_api_key is None and (
496
+ llm_config.model_endpoint == "https://api.together.ai/v1/completions"
497
+ or llm_config.model_endpoint == "https://api.together.xyz/v1/completions"
498
+ ):
490
499
  raise LettaConfigurationError(message="TogetherAI key is missing from letta config file", missing_fields=["together_api_key"])
491
500
 
492
501
  return get_chat_completion(
@@ -560,6 +569,8 @@ def create(
560
569
  chat_completion_request=data,
561
570
  stream_interface=stream_interface,
562
571
  name=name,
572
+ # TODO should we toggle for R1 vs V3?
573
+ expect_reasoning_content=True,
563
574
  )
564
575
  else: # Client did not request token streaming (expect a blocking backend response)
565
576
  data.stream = False
@@ -8,7 +8,13 @@ from letta.constants import LETTA_MODEL_ENDPOINT
8
8
  from letta.errors import ErrorCode, LLMAuthenticationError, LLMError
9
9
  from letta.helpers.datetime_helpers import timestamp_to_datetime
10
10
  from letta.llm_api.helpers import add_inner_thoughts_to_functions, convert_to_structured_output, make_post_request
11
- from letta.llm_api.openai_client import accepts_developer_role, supports_parallel_tool_calling, supports_temperature_param
11
+ from letta.llm_api.openai_client import (
12
+ accepts_developer_role,
13
+ requires_auto_tool_choice,
14
+ supports_parallel_tool_calling,
15
+ supports_structured_output,
16
+ supports_temperature_param,
17
+ )
12
18
  from letta.local_llm.constants import INNER_THOUGHTS_KWARG, INNER_THOUGHTS_KWARG_DESCRIPTION, INNER_THOUGHTS_KWARG_DESCRIPTION_GO_FIRST
13
19
  from letta.local_llm.utils import num_tokens_from_functions, num_tokens_from_messages
14
20
  from letta.log import get_logger
@@ -49,10 +55,7 @@ def openai_check_valid_api_key(base_url: str, api_key: Union[str, None]) -> None
49
55
  else:
50
56
  raise ValueError("No API key provided")
51
57
 
52
-
53
- def openai_get_model_list(
54
- url: str, api_key: Optional[str] = None, fix_url: Optional[bool] = False, extra_params: Optional[dict] = None
55
- ) -> dict:
58
+ def openai_get_model_list(url: str, api_key: Optional[str] = None, fix_url: bool = False, extra_params: Optional[dict] = None) -> dict:
56
59
  """https://platform.openai.com/docs/api-reference/models/list"""
57
60
  from letta.utils import printd
58
61
 
@@ -154,7 +157,10 @@ def build_openai_chat_completions_request(
154
157
  elif function_call not in ["none", "auto", "required"]:
155
158
  tool_choice = ToolFunctionChoice(type="function", function=ToolFunctionChoiceFunctionCall(name=function_call))
156
159
  else:
157
- tool_choice = function_call
160
+ if requires_auto_tool_choice(llm_config):
161
+ tool_choice = "auto"
162
+ else:
163
+ tool_choice = function_call
158
164
  data = ChatCompletionRequest(
159
165
  model=model,
160
166
  messages=openai_message_list,
@@ -197,12 +203,13 @@ def build_openai_chat_completions_request(
197
203
  if use_structured_output and data.tools is not None and len(data.tools) > 0:
198
204
  # Convert to structured output style (which has 'strict' and no optionals)
199
205
  for tool in data.tools:
200
- try:
201
- # tool["function"] = convert_to_structured_output(tool["function"])
202
- structured_output_version = convert_to_structured_output(tool.function.model_dump())
203
- tool.function = FunctionSchema(**structured_output_version)
204
- except ValueError as e:
205
- warnings.warn(f"Failed to convert tool function to structured output, tool={tool}, error={e}")
206
+ if supports_structured_output(llm_config):
207
+ try:
208
+ # tool["function"] = convert_to_structured_output(tool["function"])
209
+ structured_output_version = convert_to_structured_output(tool.function.model_dump())
210
+ tool.function = FunctionSchema(**structured_output_version)
211
+ except ValueError as e:
212
+ warnings.warn(f"Failed to convert tool function to structured output, tool={tool}, error={e}")
206
213
  return data
207
214
 
208
215
 
@@ -221,7 +228,7 @@ def openai_chat_completions_process_stream(
221
228
  expect_reasoning_content: bool = True,
222
229
  name: Optional[str] = None,
223
230
  ) -> ChatCompletionResponse:
224
- """Process a streaming completion response, and return a ChatCompletionRequest at the end.
231
+ """Process a streaming completion response, and return a ChatCompletionResponse at the end.
225
232
 
226
233
  To "stream" the response in Letta, we want to call a streaming-compatible interface function
227
234
  on the chunks received from the OpenAI-compatible server POST SSE response.
@@ -293,6 +300,9 @@ def openai_chat_completions_process_stream(
293
300
  url=url, api_key=api_key, chat_completion_request=chat_completion_request
294
301
  ):
295
302
  assert isinstance(chat_completion_chunk, ChatCompletionChunkResponse), type(chat_completion_chunk)
303
+ if chat_completion_chunk.choices is None or len(chat_completion_chunk.choices) == 0:
304
+ warnings.warn(f"No choices in chunk: {chat_completion_chunk}")
305
+ continue
296
306
 
297
307
  # NOTE: this assumes that the tool call ID will only appear in one of the chunks during the stream
298
308
  if override_tool_call_id:
@@ -429,6 +439,9 @@ def openai_chat_completions_process_stream(
429
439
  except Exception as e:
430
440
  if stream_interface:
431
441
  stream_interface.stream_end()
442
+ import traceback
443
+
444
+ traceback.print_exc()
432
445
  logger.error(f"Parsing ChatCompletion stream failed with error:\n{str(e)}")
433
446
  raise e
434
447
  finally:
@@ -463,14 +476,27 @@ def openai_chat_completions_request_stream(
463
476
  url: str,
464
477
  api_key: str,
465
478
  chat_completion_request: ChatCompletionRequest,
479
+ fix_url: bool = False,
466
480
  ) -> Generator[ChatCompletionChunkResponse, None, None]:
481
+
482
+ # In some cases we may want to double-check the URL and do basic correction, eg:
483
+ # In Letta config the address for vLLM is w/o a /v1 suffix for simplicity
484
+ # However if we're treating the server as an OpenAI proxy we want the /v1 suffix on our model hit
485
+ if fix_url:
486
+ if not url.endswith("/v1"):
487
+ url = smart_urljoin(url, "v1")
488
+
467
489
  data = prepare_openai_payload(chat_completion_request)
468
490
  data["stream"] = True
469
491
  client = OpenAI(api_key=api_key, base_url=url, max_retries=0)
470
- stream = client.chat.completions.create(**data)
471
- for chunk in stream:
472
- # TODO: Use the native OpenAI objects here?
473
- yield ChatCompletionChunkResponse(**chunk.model_dump(exclude_none=True))
492
+ try:
493
+ stream = client.chat.completions.create(**data)
494
+ for chunk in stream:
495
+ # TODO: Use the native OpenAI objects here?
496
+ yield ChatCompletionChunkResponse(**chunk.model_dump(exclude_none=True))
497
+ except Exception as e:
498
+ print(f"Error request stream from /v1/chat/completions, url={url}, data={data}:\n{e}")
499
+ raise e
474
500
 
475
501
 
476
502
  def openai_chat_completions_request(
@@ -75,6 +75,37 @@ def supports_parallel_tool_calling(model: str) -> bool:
75
75
  return True
76
76
 
77
77
 
78
+ # TODO move into LLMConfig as a field?
79
+ def supports_structured_output(llm_config: LLMConfig) -> bool:
80
+ """Certain providers don't support structured output."""
81
+
82
+ # FIXME pretty hacky - turn off for providers we know users will use,
83
+ # but also don't support structured output
84
+ if "nebius.com" in llm_config.model_endpoint:
85
+ return False
86
+ else:
87
+ return True
88
+
89
+
90
+ # TODO move into LLMConfig as a field?
91
+ def requires_auto_tool_choice(llm_config: LLMConfig) -> bool:
92
+ """Certain providers require the tool choice to be set to 'auto'."""
93
+
94
+ if "nebius.com" in llm_config.model_endpoint:
95
+ return True
96
+ if "together.ai" in llm_config.model_endpoint or "together.xyz" in llm_config.model_endpoint:
97
+ return True
98
+ # proxy also has this issue (FIXME check)
99
+ elif llm_config.model_endpoint == LETTA_MODEL_ENDPOINT:
100
+ return True
101
+ # same with vLLM (FIXME check)
102
+ elif llm_config.handle and "vllm" in llm_config.handle:
103
+ return True
104
+ else:
105
+ # will use "required" instead of "auto"
106
+ return False
107
+
108
+
78
109
  class OpenAIClient(LLMClientBase):
79
110
  def _prepare_client_kwargs(self, llm_config: LLMConfig) -> dict:
80
111
  api_key = None
@@ -136,7 +167,7 @@ class OpenAIClient(LLMClientBase):
136
167
  # TODO(matt) move into LLMConfig
137
168
  # TODO: This vllm checking is very brittle and is a patch at most
138
169
  tool_choice = None
139
- if llm_config.model_endpoint == LETTA_MODEL_ENDPOINT or (llm_config.handle and "vllm" in llm_config.handle):
170
+ if requires_auto_tool_choice(llm_config):
140
171
  tool_choice = "auto" # TODO change to "required" once proxy supports it
141
172
  elif tools:
142
173
  # only set if tools is non-Null
@@ -171,11 +202,12 @@ class OpenAIClient(LLMClientBase):
171
202
  if data.tools is not None and len(data.tools) > 0:
172
203
  # Convert to structured output style (which has 'strict' and no optionals)
173
204
  for tool in data.tools:
174
- try:
175
- structured_output_version = convert_to_structured_output(tool.function.model_dump())
176
- tool.function = FunctionSchema(**structured_output_version)
177
- except ValueError as e:
178
- logger.warning(f"Failed to convert tool function to structured output, tool={tool}, error={e}")
205
+ if supports_structured_output(llm_config):
206
+ try:
207
+ structured_output_version = convert_to_structured_output(tool.function.model_dump())
208
+ tool.function = FunctionSchema(**structured_output_version)
209
+ except ValueError as e:
210
+ logger.warning(f"Failed to convert tool function to structured output, tool={tool}, error={e}")
179
211
 
180
212
  return data.model_dump(exclude_unset=True)
181
213
 
@@ -30,6 +30,7 @@ class Source(SqlalchemyBase, OrganizationMixin):
30
30
 
31
31
  name: Mapped[str] = mapped_column(doc="the name of the source, must be unique within the org", nullable=False)
32
32
  description: Mapped[str] = mapped_column(nullable=True, doc="a human-readable description of the source")
33
+ instructions: Mapped[str] = mapped_column(nullable=True, doc="instructions for how to use the source")
33
34
  embedding_config: Mapped[EmbeddingConfig] = mapped_column(EmbeddingConfigColumn, doc="Configuration settings for embedding.")
34
35
  metadata_: Mapped[Optional[dict]] = mapped_column(JSON, nullable=True, doc="metadata for the source.")
35
36
 
@@ -24,7 +24,6 @@ class LLMConfig(BaseModel):
24
24
  max_tokens (int): The maximum number of tokens to generate.
25
25
  """
26
26
 
27
- # TODO: 🤮 don't default to a vendor! bug city!
28
27
  model: str = Field(..., description="LLM model name. ")
29
28
  model_endpoint_type: Literal[
30
29
  "openai",
@@ -1,5 +1,5 @@
1
1
  import datetime
2
- from typing import Dict, List, Literal, Optional, Union
2
+ from typing import List, Literal, Optional, Union
3
3
 
4
4
  from pydantic import BaseModel
5
5
 
@@ -27,6 +27,7 @@ class LogProbToken(BaseModel):
27
27
  bytes: Optional[List[int]]
28
28
 
29
29
 
30
+ # Legacy?
30
31
  class MessageContentLogProb(BaseModel):
31
32
  token: str
32
33
  logprob: float
@@ -34,6 +35,25 @@ class MessageContentLogProb(BaseModel):
34
35
  top_logprobs: Optional[List[LogProbToken]]
35
36
 
36
37
 
38
+ class TopLogprob(BaseModel):
39
+ token: str
40
+ bytes: Optional[List[int]] = None
41
+ logprob: float
42
+
43
+
44
+ class ChatCompletionTokenLogprob(BaseModel):
45
+ token: str
46
+ bytes: Optional[List[int]] = None
47
+ logprob: float
48
+ top_logprobs: List[TopLogprob]
49
+
50
+
51
+ class ChoiceLogprobs(BaseModel):
52
+ content: Optional[List[ChatCompletionTokenLogprob]] = None
53
+
54
+ refusal: Optional[List[ChatCompletionTokenLogprob]] = None
55
+
56
+
37
57
  class Message(BaseModel):
38
58
  content: Optional[str] = None
39
59
  tool_calls: Optional[List[ToolCall]] = None
@@ -49,7 +69,7 @@ class Choice(BaseModel):
49
69
  finish_reason: str
50
70
  index: int
51
71
  message: Message
52
- logprobs: Optional[Dict[str, Union[List[MessageContentLogProb], None]]] = None
72
+ logprobs: Optional[ChoiceLogprobs] = None
53
73
  seed: Optional[int] = None # found in TogetherAI
54
74
 
55
75
 
@@ -134,7 +154,7 @@ class ChatCompletionResponse(BaseModel):
134
154
  class FunctionCallDelta(BaseModel):
135
155
  # arguments: Optional[str] = None
136
156
  name: Optional[str] = None
137
- arguments: str
157
+ arguments: Optional[str] = None
138
158
  # name: str
139
159
 
140
160
 
@@ -179,7 +199,7 @@ class ChunkChoice(BaseModel):
179
199
  finish_reason: Optional[str] = None # NOTE: when streaming will be null
180
200
  index: int
181
201
  delta: MessageDelta
182
- logprobs: Optional[Dict[str, Union[List[MessageContentLogProb], None]]] = None
202
+ logprobs: Optional[ChoiceLogprobs] = None
183
203
 
184
204
 
185
205
  class ChatCompletionChunkResponse(BaseModel):
@@ -4,7 +4,7 @@ from typing import List, Literal, Optional
4
4
 
5
5
  from pydantic import BaseModel, Field, model_validator
6
6
 
7
- from letta.constants import LETTA_MODEL_ENDPOINT, LLM_MAX_TOKENS, MIN_CONTEXT_WINDOW
7
+ from letta.constants import DEFAULT_EMBEDDING_CHUNK_SIZE, LETTA_MODEL_ENDPOINT, LLM_MAX_TOKENS, MIN_CONTEXT_WINDOW
8
8
  from letta.llm_api.azure_openai import get_azure_chat_completions_endpoint, get_azure_embeddings_endpoint
9
9
  from letta.llm_api.azure_openai_constants import AZURE_MODEL_TO_CONTEXT_LENGTH
10
10
  from letta.schemas.embedding_config import EmbeddingConfig
@@ -57,7 +57,7 @@ class Provider(ProviderBase):
57
57
  """String representation of the provider for display purposes"""
58
58
  raise NotImplementedError
59
59
 
60
- def get_handle(self, model_name: str, is_embedding: bool = False) -> str:
60
+ def get_handle(self, model_name: str, is_embedding: bool = False, base_name: Optional[str] = None) -> str:
61
61
  """
62
62
  Get the handle for a model, with support for custom overrides.
63
63
 
@@ -68,11 +68,13 @@ class Provider(ProviderBase):
68
68
  Returns:
69
69
  str: The handle for the model.
70
70
  """
71
+ base_name = base_name if base_name else self.name
72
+
71
73
  overrides = EMBEDDING_HANDLE_OVERRIDES if is_embedding else LLM_HANDLE_OVERRIDES
72
- if self.name in overrides and model_name in overrides[self.name]:
73
- model_name = overrides[self.name][model_name]
74
+ if base_name in overrides and model_name in overrides[base_name]:
75
+ model_name = overrides[base_name][model_name]
74
76
 
75
- return f"{self.name}/{model_name}"
77
+ return f"{base_name}/{model_name}"
76
78
 
77
79
  def cast_to_subtype(self):
78
80
  match (self.provider_type):
@@ -162,21 +164,34 @@ class OpenAIProvider(Provider):
162
164
 
163
165
  openai_check_valid_api_key(self.base_url, self.api_key)
164
166
 
165
- def list_llm_models(self) -> List[LLMConfig]:
167
+ def _get_models(self) -> List[dict]:
166
168
  from letta.llm_api.openai import openai_get_model_list
167
169
 
168
170
  # Some hardcoded support for OpenRouter (so that we only get models with tool calling support)...
169
171
  # See: https://openrouter.ai/docs/requests
170
172
  extra_params = {"supported_parameters": "tools"} if "openrouter.ai" in self.base_url else None
171
- response = openai_get_model_list(self.base_url, api_key=self.api_key, extra_params=extra_params)
172
173
 
173
- # TogetherAI's response is missing the 'data' field
174
- # assert "data" in response, f"OpenAI model query response missing 'data' field: {response}"
174
+ # Similar to Nebius
175
+ extra_params = {"verbose": True} if "nebius.com" in self.base_url else None
176
+
177
+ response = openai_get_model_list(
178
+ self.base_url,
179
+ api_key=self.api_key,
180
+ extra_params=extra_params,
181
+ # fix_url=True, # NOTE: make sure together ends with /v1
182
+ )
183
+
175
184
  if "data" in response:
176
185
  data = response["data"]
177
186
  else:
187
+ # TogetherAI's response is missing the 'data' field
178
188
  data = response
179
189
 
190
+ return data
191
+
192
+ def list_llm_models(self) -> List[LLMConfig]:
193
+ data = self._get_models()
194
+
180
195
  configs = []
181
196
  for model in data:
182
197
  assert "id" in model, f"OpenAI model missing 'id' field: {model}"
@@ -192,8 +207,8 @@ class OpenAIProvider(Provider):
192
207
  continue
193
208
 
194
209
  # TogetherAI includes the type, which we can use to filter out embedding models
195
- if self.base_url == "https://api.together.ai/v1":
196
- if "type" in model and model["type"] != "chat":
210
+ if "api.together.ai" in self.base_url or "api.together.xyz" in self.base_url:
211
+ if "type" in model and model["type"] not in ["chat", "language"]:
197
212
  continue
198
213
 
199
214
  # for TogetherAI, we need to skip the models that don't support JSON mode / function calling
@@ -207,14 +222,17 @@ class OpenAIProvider(Provider):
207
222
  # }
208
223
  if "config" not in model:
209
224
  continue
210
- if "chat_template" not in model["config"]:
211
- continue
212
- if model["config"]["chat_template"] is None:
213
- continue
214
- if "tools" not in model["config"]["chat_template"]:
225
+
226
+ if "nebius.com" in self.base_url:
227
+ # Nebius includes the type, which we can use to filter for text models
228
+ try:
229
+ model_type = model["architecture"]["modality"]
230
+ if model_type not in ["text->text", "text+image->text"]:
231
+ # print(f"Skipping model w/ modality {model_type}:\n{model}")
232
+ continue
233
+ except KeyError:
234
+ print(f"Couldn't access architecture type field, skipping model:\n{model}")
215
235
  continue
216
- # if "config" in data and "chat_template" in data["config"] and "tools" not in data["config"]["chat_template"]:
217
- # continue
218
236
 
219
237
  # for openai, filter models
220
238
  if self.base_url == "https://api.openai.com/v1":
@@ -235,13 +253,19 @@ class OpenAIProvider(Provider):
235
253
  if skip:
236
254
  continue
237
255
 
256
+ # set the handle to openai-proxy if the base URL isn't OpenAI
257
+ if self.base_url != "https://api.openai.com/v1":
258
+ handle = self.get_handle(model_name, base_name="openai-proxy")
259
+ else:
260
+ handle = self.get_handle(model_name)
261
+
238
262
  configs.append(
239
263
  LLMConfig(
240
264
  model=model_name,
241
265
  model_endpoint_type="openai",
242
266
  model_endpoint=self.base_url,
243
267
  context_window=context_window_size,
244
- handle=self.get_handle(model_name),
268
+ handle=handle,
245
269
  provider_name=self.name,
246
270
  provider_category=self.provider_category,
247
271
  )
@@ -256,33 +280,87 @@ class OpenAIProvider(Provider):
256
280
 
257
281
  def list_embedding_models(self) -> List[EmbeddingConfig]:
258
282
 
259
- # TODO: actually automatically list models
260
- return [
261
- EmbeddingConfig(
262
- embedding_model="text-embedding-ada-002",
263
- embedding_endpoint_type="openai",
264
- embedding_endpoint=self.base_url,
265
- embedding_dim=1536,
266
- embedding_chunk_size=300,
267
- handle=self.get_handle("text-embedding-ada-002", is_embedding=True),
268
- ),
269
- EmbeddingConfig(
270
- embedding_model="text-embedding-3-small",
271
- embedding_endpoint_type="openai",
272
- embedding_endpoint=self.base_url,
273
- embedding_dim=2000,
274
- embedding_chunk_size=300,
275
- handle=self.get_handle("text-embedding-3-small", is_embedding=True),
276
- ),
277
- EmbeddingConfig(
278
- embedding_model="text-embedding-3-large",
279
- embedding_endpoint_type="openai",
280
- embedding_endpoint=self.base_url,
281
- embedding_dim=2000,
282
- embedding_chunk_size=300,
283
- handle=self.get_handle("text-embedding-3-large", is_embedding=True),
284
- ),
285
- ]
283
+ if self.base_url == "https://api.openai.com/v1":
284
+ # TODO: actually automatically list models for OpenAI
285
+ return [
286
+ EmbeddingConfig(
287
+ embedding_model="text-embedding-ada-002",
288
+ embedding_endpoint_type="openai",
289
+ embedding_endpoint=self.base_url,
290
+ embedding_dim=1536,
291
+ embedding_chunk_size=300,
292
+ handle=self.get_handle("text-embedding-ada-002", is_embedding=True),
293
+ ),
294
+ EmbeddingConfig(
295
+ embedding_model="text-embedding-3-small",
296
+ embedding_endpoint_type="openai",
297
+ embedding_endpoint=self.base_url,
298
+ embedding_dim=2000,
299
+ embedding_chunk_size=300,
300
+ handle=self.get_handle("text-embedding-3-small", is_embedding=True),
301
+ ),
302
+ EmbeddingConfig(
303
+ embedding_model="text-embedding-3-large",
304
+ embedding_endpoint_type="openai",
305
+ embedding_endpoint=self.base_url,
306
+ embedding_dim=2000,
307
+ embedding_chunk_size=300,
308
+ handle=self.get_handle("text-embedding-3-large", is_embedding=True),
309
+ ),
310
+ ]
311
+
312
+ else:
313
+ # Actually attempt to list
314
+ data = self._get_models()
315
+
316
+ configs = []
317
+ for model in data:
318
+ assert "id" in model, f"Model missing 'id' field: {model}"
319
+ model_name = model["id"]
320
+
321
+ if "context_length" in model:
322
+ # Context length is returned in Nebius as "context_length"
323
+ context_window_size = model["context_length"]
324
+ else:
325
+ context_window_size = self.get_model_context_window_size(model_name)
326
+
327
+ # We need the context length for embeddings too
328
+ if not context_window_size:
329
+ continue
330
+
331
+ if "nebius.com" in self.base_url:
332
+ # Nebius includes the type, which we can use to filter for embedidng models
333
+ try:
334
+ model_type = model["architecture"]["modality"]
335
+ if model_type not in ["text->embedding"]:
336
+ # print(f"Skipping model w/ modality {model_type}:\n{model}")
337
+ continue
338
+ except KeyError:
339
+ print(f"Couldn't access architecture type field, skipping model:\n{model}")
340
+ continue
341
+
342
+ elif "together.ai" in self.base_url or "together.xyz" in self.base_url:
343
+ # TogetherAI includes the type, which we can use to filter for embedding models
344
+ if "type" in model and model["type"] not in ["embedding"]:
345
+ # print(f"Skipping model w/ modality {model_type}:\n{model}")
346
+ continue
347
+
348
+ else:
349
+ # For other providers we should skip by default, since we don't want to assume embeddings are supported
350
+ continue
351
+
352
+ configs.append(
353
+ EmbeddingConfig(
354
+ embedding_model=model_name,
355
+ embedding_endpoint_type=self.provider_type,
356
+ embedding_endpoint=self.base_url,
357
+ embedding_dim=context_window_size,
358
+ embedding_chunk_size=DEFAULT_EMBEDDING_CHUNK_SIZE,
359
+ handle=self.get_handle(model, is_embedding=True),
360
+ )
361
+ )
362
+
363
+ return configs
286
364
 
287
365
  def get_model_context_window_size(self, model_name: str):
288
366
  if model_name in LLM_MAX_TOKENS:
@@ -31,6 +31,7 @@ class Source(BaseSource):
31
31
  id: str = BaseSource.generate_id_field()
32
32
  name: str = Field(..., description="The name of the source.")
33
33
  description: Optional[str] = Field(None, description="The description of the source.")
34
+ instructions: Optional[str] = Field(None, description="Instructions for how to use the source.")
34
35
  embedding_config: EmbeddingConfig = Field(..., description="The embedding configuration used by the source.")
35
36
  organization_id: Optional[str] = Field(None, description="The ID of the organization that created the source.")
36
37
  metadata: Optional[dict] = Field(None, validation_alias="metadata_", description="Metadata associated with the source.")
@@ -59,6 +60,7 @@ class SourceCreate(BaseSource):
59
60
 
60
61
  # optional
61
62
  description: Optional[str] = Field(None, description="The description of the source.")
63
+ instructions: Optional[str] = Field(None, description="Instructions for how to use the source.")
62
64
  metadata: Optional[dict] = Field(None, description="Metadata associated with the source.")
63
65
 
64
66
 
@@ -69,5 +71,6 @@ class SourceUpdate(BaseSource):
69
71
 
70
72
  name: Optional[str] = Field(None, description="The name of the source.")
71
73
  description: Optional[str] = Field(None, description="The description of the source.")
74
+ instructions: Optional[str] = Field(None, description="Instructions for how to use the source.")
72
75
  metadata: Optional[dict] = Field(None, description="Metadata associated with the source.")
73
76
  embedding_config: Optional[EmbeddingConfig] = Field(None, description="The embedding configuration used by the source.")
@@ -333,6 +333,7 @@ def start_server(
333
333
  port: Optional[int] = None,
334
334
  host: Optional[str] = None,
335
335
  debug: bool = False,
336
+ reload: bool = False,
336
337
  ):
337
338
  """Convenience method to start the server from within Python"""
338
339
  if debug:
@@ -356,7 +357,7 @@ def start_server(
356
357
  host=host or "localhost",
357
358
  port=port or REST_DEFAULT_PORT,
358
359
  workers=settings.uvicorn_workers,
359
- reload=settings.uvicorn_reload,
360
+ reload=reload or settings.uvicorn_reload,
360
361
  timeout_keep_alive=settings.uvicorn_timeout_keep_alive,
361
362
  ssl_keyfile="certs/localhost-key.pem",
362
363
  ssl_certfile="certs/localhost.pem",
@@ -375,6 +376,6 @@ def start_server(
375
376
  host=host or "localhost",
376
377
  port=port or REST_DEFAULT_PORT,
377
378
  workers=settings.uvicorn_workers,
378
- reload=settings.uvicorn_reload,
379
+ reload=reload or settings.uvicorn_reload,
379
380
  timeout_keep_alive=settings.uvicorn_timeout_keep_alive,
380
381
  )