letta-nightly 0.4.1.dev20241010104112__py3-none-any.whl → 0.4.1.dev20241011104054__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of letta-nightly might be problematic. Click here for more details.

letta/cli/cli.py CHANGED
@@ -14,7 +14,9 @@ from letta.constants import CLI_WARNING_PREFIX, LETTA_DIR
14
14
  from letta.local_llm.constants import ASSISTANT_MESSAGE_CLI_SYMBOL
15
15
  from letta.log import get_logger
16
16
  from letta.metadata import MetadataStore
17
+ from letta.schemas.embedding_config import EmbeddingConfig
17
18
  from letta.schemas.enums import OptionState
19
+ from letta.schemas.llm_config import LLMConfig
18
20
  from letta.schemas.memory import ChatMemory, Memory
19
21
  from letta.server.server import logger as server_logger
20
22
 
@@ -233,25 +235,46 @@ def run(
233
235
  # choose from list of llm_configs
234
236
  llm_configs = client.list_llm_configs()
235
237
  llm_options = [llm_config.model for llm_config in llm_configs]
238
+
239
+ # TODO move into LLMConfig as a class method?
240
+ def prettify_llm_config(llm_config: LLMConfig) -> str:
241
+ return f"{llm_config.model}" + f" ({llm_config.model_endpoint})" if llm_config.model_endpoint else ""
242
+
243
+ llm_choices = [questionary.Choice(title=prettify_llm_config(llm_config), value=llm_config) for llm_config in llm_configs]
244
+
236
245
  # select model
237
246
  if len(llm_options) == 0:
238
247
  raise ValueError("No LLM models found. Please enable a provider.")
239
248
  elif len(llm_options) == 1:
240
249
  llm_model_name = llm_options[0]
241
250
  else:
242
- llm_model_name = questionary.select("Select LLM model:", choices=llm_options).ask()
251
+ llm_model_name = questionary.select("Select LLM model:", choices=llm_choices).ask().model
243
252
  llm_config = [llm_config for llm_config in llm_configs if llm_config.model == llm_model_name][0]
244
253
 
245
254
  # choose form list of embedding configs
246
255
  embedding_configs = client.list_embedding_configs()
247
256
  embedding_options = [embedding_config.embedding_model for embedding_config in embedding_configs]
257
+
258
+ # TODO move into EmbeddingConfig as a class method?
259
+ def prettify_embed_config(embedding_config: EmbeddingConfig) -> str:
260
+ return (
261
+ f"{embedding_config.embedding_model}" + f" ({embedding_config.embedding_endpoint})"
262
+ if embedding_config.embedding_endpoint
263
+ else ""
264
+ )
265
+
266
+ embedding_choices = [
267
+ questionary.Choice(title=prettify_embed_config(embedding_config), value=embedding_config)
268
+ for embedding_config in embedding_configs
269
+ ]
270
+
248
271
  # select model
249
272
  if len(embedding_options) == 0:
250
273
  raise ValueError("No embedding models found. Please enable a provider.")
251
274
  elif len(embedding_options) == 1:
252
275
  embedding_model_name = embedding_options[0]
253
276
  else:
254
- embedding_model_name = questionary.select("Select embedding model:", choices=embedding_options).ask()
277
+ embedding_model_name = questionary.select("Select embedding model:", choices=embedding_choices).ask().embedding_model
255
278
  embedding_config = [
256
279
  embedding_config for embedding_config in embedding_configs if embedding_config.embedding_model == embedding_model_name
257
280
  ][0]
letta/constants.py CHANGED
@@ -75,6 +75,27 @@ NON_USER_MSG_PREFIX = "[This is an automated system message hidden from the user
75
75
  LLM_MAX_TOKENS = {
76
76
  "DEFAULT": 8192,
77
77
  ## OpenAI models: https://platform.openai.com/docs/models/overview
78
+ # "o1-preview
79
+ "chatgpt-4o-latest": 128000,
80
+ # "o1-preview-2024-09-12
81
+ "gpt-4o-2024-08-06": 128000,
82
+ "gpt-4-turbo-preview": 128000,
83
+ "gpt-4o": 128000,
84
+ "gpt-3.5-turbo-instruct": 16385,
85
+ "gpt-4-0125-preview": 128000,
86
+ "gpt-3.5-turbo-0125": 16385,
87
+ # "babbage-002": 128000,
88
+ # "davinci-002": 128000,
89
+ "gpt-4-turbo-2024-04-09": 128000,
90
+ # "gpt-4o-realtime-preview-2024-10-01
91
+ "gpt-4-turbo": 8192,
92
+ "gpt-4o-2024-05-13": 128000,
93
+ # "o1-mini
94
+ # "o1-mini-2024-09-12
95
+ # "gpt-3.5-turbo-instruct-0914
96
+ "gpt-4o-mini": 128000,
97
+ # "gpt-4o-realtime-preview
98
+ "gpt-4o-mini-2024-07-18": 128000,
78
99
  # gpt-4
79
100
  "gpt-4-1106-preview": 128000,
80
101
  "gpt-4": 8192,
letta/llm_api/openai.py CHANGED
@@ -41,7 +41,9 @@ from letta.utils import smart_urljoin
41
41
  OPENAI_SSE_DONE = "[DONE]"
42
42
 
43
43
 
44
- def openai_get_model_list(url: str, api_key: Union[str, None], fix_url: Optional[bool] = False) -> dict:
44
+ def openai_get_model_list(
45
+ url: str, api_key: Union[str, None], fix_url: Optional[bool] = False, extra_params: Optional[dict] = None
46
+ ) -> dict:
45
47
  """https://platform.openai.com/docs/api-reference/models/list"""
46
48
  from letta.utils import printd
47
49
 
@@ -60,7 +62,8 @@ def openai_get_model_list(url: str, api_key: Union[str, None], fix_url: Optional
60
62
 
61
63
  printd(f"Sending request to {url}")
62
64
  try:
63
- response = requests.get(url, headers=headers)
65
+ # TODO add query param "tool" to be true
66
+ response = requests.get(url, headers=headers, params=extra_params)
64
67
  response.raise_for_status() # Raises HTTPError for 4XX/5XX status
65
68
  response = response.json() # convert to dict from string
66
69
  printd(f"response = {response}")
letta/providers.py CHANGED
@@ -53,23 +53,40 @@ class LettaProvider(Provider):
53
53
  class OpenAIProvider(Provider):
54
54
  name: str = "openai"
55
55
  api_key: str = Field(..., description="API key for the OpenAI API.")
56
- base_url: str = "https://api.openai.com/v1"
56
+ base_url: str = Field(..., description="Base URL for the OpenAI API.")
57
57
 
58
58
  def list_llm_models(self) -> List[LLMConfig]:
59
59
  from letta.llm_api.openai import openai_get_model_list
60
60
 
61
- response = openai_get_model_list(self.base_url, api_key=self.api_key)
62
- model_options = [obj["id"] for obj in response["data"]]
61
+ # Some hardcoded support for OpenRouter (so that we only get models with tool calling support)...
62
+ # See: https://openrouter.ai/docs/requests
63
+ extra_params = {"supported_parameters": "tools"} if "openrouter.ai" in self.base_url else None
64
+ response = openai_get_model_list(self.base_url, api_key=self.api_key, extra_params=extra_params)
65
+
66
+ assert "data" in response, f"OpenAI model query response missing 'data' field: {response}"
63
67
 
64
68
  configs = []
65
- for model_name in model_options:
66
- context_window_size = self.get_model_context_window_size(model_name)
69
+ for model in response["data"]:
70
+ assert "id" in model, f"OpenAI model missing 'id' field: {model}"
71
+ model_name = model["id"]
72
+
73
+ if "context_length" in model:
74
+ # Context length is returned in OpenRouter as "context_length"
75
+ context_window_size = model["context_length"]
76
+ else:
77
+ context_window_size = self.get_model_context_window_size(model_name)
67
78
 
68
79
  if not context_window_size:
69
80
  continue
70
81
  configs.append(
71
82
  LLMConfig(model=model_name, model_endpoint_type="openai", model_endpoint=self.base_url, context_window=context_window_size)
72
83
  )
84
+
85
+ # for OpenAI, sort in reverse order
86
+ if self.base_url == "https://api.openai.com/v1":
87
+ # alphnumeric sort
88
+ configs.sort(key=lambda x: x.model, reverse=True)
89
+
73
90
  return configs
74
91
 
75
92
  def list_embedding_models(self) -> List[EmbeddingConfig]:
letta/server/server.py CHANGED
@@ -50,6 +50,7 @@ from letta.providers import (
50
50
  LettaProvider,
51
51
  OllamaProvider,
52
52
  OpenAIProvider,
53
+ Provider,
53
54
  VLLMProvider,
54
55
  )
55
56
  from letta.schemas.agent import AgentState, AgentType, CreateAgent, UpdateAgentState
@@ -261,9 +262,9 @@ class SyncServer(Server):
261
262
  self.add_default_tools(module_name="base")
262
263
 
263
264
  # collect providers (always has Letta as a default)
264
- self._enabled_providers = [LettaProvider()]
265
+ self._enabled_providers: List[Provider] = [LettaProvider()]
265
266
  if model_settings.openai_api_key:
266
- self._enabled_providers.append(OpenAIProvider(api_key=model_settings.openai_api_key))
267
+ self._enabled_providers.append(OpenAIProvider(api_key=model_settings.openai_api_key, base_url=model_settings.openai_api_base))
267
268
  if model_settings.anthropic_api_key:
268
269
  self._enabled_providers.append(AnthropicProvider(api_key=model_settings.anthropic_api_key))
269
270
  if model_settings.ollama_base_url:
letta/settings.py CHANGED
@@ -11,7 +11,7 @@ class ModelSettings(BaseSettings):
11
11
 
12
12
  # openai
13
13
  openai_api_key: Optional[str] = None
14
- # TODO: provide overriding BASE_URL?
14
+ openai_api_base: Optional[str] = "https://api.openai.com/v1"
15
15
 
16
16
  # groq
17
17
  groq_api_key: Optional[str] = None
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: letta-nightly
3
- Version: 0.4.1.dev20241010104112
3
+ Version: 0.4.1.dev20241011104054
4
4
  Summary: Create LLM agents with long-term memory and custom tools
5
5
  License: Apache License
6
6
  Author: Letta Team
@@ -9,7 +9,7 @@ letta/agent_store/qdrant.py,sha256=qIEJhXJb6GzcT4wp8iV5Ox5W1CFMvcPViTI4HLSh59E,7
9
9
  letta/agent_store/storage.py,sha256=QWrPdIEJCnsPg1xnPrG1xbOXmbjpz37ZNhvuH52M7A8,6642
10
10
  letta/benchmark/benchmark.py,sha256=ebvnwfp3yezaXOQyGXkYCDYpsmre-b9hvNtnyx4xkG0,3701
11
11
  letta/benchmark/constants.py,sha256=aXc5gdpMGJT327VuxsT5FngbCK2J41PQYeICBO7g_RE,536
12
- letta/cli/cli.py,sha256=-gJtASac1OJyboFzXXZtY2MH5DPylYYk3ZO8zV519Tc,15870
12
+ letta/cli/cli.py,sha256=1r_UlOonkrBXd6SW3KMob6uy4iUnxVs1jQmfKK-ylZw,16912
13
13
  letta/cli/cli_config.py,sha256=G7QqPNTtlQ4TdrXZrrFFGblZEhnkyrqN1Cl5z415C-g,8689
14
14
  letta/cli/cli_load.py,sha256=aVlGWiNEUs_eG793HLl7cES-dEIuA1CJfZpT1Cm8Uo4,4591
15
15
  letta/client/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -18,7 +18,7 @@ letta/client/client.py,sha256=bPvSQrbym4xXZu9EfEbX02fpkNVxFBpKoyzK9PFwykE,84515
18
18
  letta/client/streaming.py,sha256=bfWlUu7z7EoPfKxBqIarYxGKyrL7Pj79BlliToqcCgI,4592
19
19
  letta/client/utils.py,sha256=AQWl2q11AzSjd1Y7slIENoZ6fO1YW1JSv1qw0fPt57k,2419
20
20
  letta/config.py,sha256=j2I90fOh9d9__kOYObwTDLbvVwYR50rIql5nzrvREKg,19161
21
- letta/constants.py,sha256=VV6T8O4w4ju8q5CrPCbvPwHlUTltkeFji-r7hz8LTIw,5930
21
+ letta/constants.py,sha256=PAvnF4l9hDuWSrPSKVoXhwjWb6V282dI43EOOpKZaFE,6572
22
22
  letta/credentials.py,sha256=D9mlcPsdDWlIIXQQD8wSPE9M_QvsRrb0p3LB5i9OF5Q,5806
23
23
  letta/data_sources/connectors.py,sha256=E2rJNqVT4WEvxBqOQl0YgNKa_JQXkG0h1luw_XLcTis,10232
24
24
  letta/embeddings.py,sha256=ayAMxW6RUK1RUpLsDiJCG1oY2H6fgricaoqMa4GBjRE,8170
@@ -41,7 +41,7 @@ letta/llm_api/cohere.py,sha256=vDRd-SUGp1t_JUIdwC3RkIhwMl0OY7n-tAU9uPORYkY,14826
41
41
  letta/llm_api/google_ai.py,sha256=3xZ074nSOCC22c15yerA5ngWzh0ex4wxeI-6faNbHPE,17708
42
42
  letta/llm_api/helpers.py,sha256=Qe1YC36QjjOKE-Xh1Ss3dhMNcWergOK_MpG9xdtN9CM,9519
43
43
  letta/llm_api/llm_api_tools.py,sha256=8HndYHAH6ENL5vFEYn2px6CjZdx3ttCGxbEtRfK2RAY,15237
44
- letta/llm_api/openai.py,sha256=3C_1PjyBcLDraQur6jBzEONqANRYmL78Ym1SaE6LgqE,21469
44
+ letta/llm_api/openai.py,sha256=gqFIyy7nc254RJddbTs6CXfB_bmABote18KZOePuOdI,21582
45
45
  letta/local_llm/README.md,sha256=hFJyw5B0TU2jrh9nb0zGZMgdH-Ei1dSRfhvPQG_NSoU,168
46
46
  letta/local_llm/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
47
47
  letta/local_llm/chat_completion_proxy.py,sha256=PXgNveahts5DbZ7GVcPShxmrDKropL81PY2JHc31yAA,13091
@@ -106,7 +106,7 @@ letta/prompts/system/memgpt_doc.txt,sha256=AsT55NOORoH-K-p0fxklrDRZ3qHs4MIKMuR-M
106
106
  letta/prompts/system/memgpt_gpt35_extralong.txt,sha256=FheNhYoIzNz6qnJKhVquZVSMj3HduC48reFaX7Pf7ig,5046
107
107
  letta/prompts/system/memgpt_intuitive_knowledge.txt,sha256=sA7c3urYqREVnSBI81nTGImXAekqC0Fxc7RojFqud1g,2966
108
108
  letta/prompts/system/memgpt_modified_chat.txt,sha256=HOaPVurEftD8KsuwsclDgE2afIfklMjxhuSO96q1-6I,4656
109
- letta/providers.py,sha256=nC0xGWEDIpnZz_tiFMswrko9HFSUQQJO8o9jRxGStb0,13579
109
+ letta/providers.py,sha256=UTgD9VhH0CEszW7sTMvfTnHOpDCd1m1RmgoI6Pr3Vy0,14444
110
110
  letta/pytest.ini,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
111
111
  letta/schemas/agent.py,sha256=ztnUqdhY9V3g0jsbTjF1ypKPC1tZx4QVFaRuLAOXNSA,6230
112
112
  letta/schemas/api_key.py,sha256=u07yzzMn-hBAHZIIKbWY16KsgiFjSNR8lAghpMUo3_4,682
@@ -166,7 +166,7 @@ letta/server/rest_api/routers/v1/tools.py,sha256=MEhxu-zMS2ff_wwcRpMuQyWA71w_3BJ
166
166
  letta/server/rest_api/routers/v1/users.py,sha256=Y2rDvHOG1B5FLSOjutY3R22vt48IngbZ-9h8CohG5rc,3378
167
167
  letta/server/rest_api/static_files.py,sha256=NG8sN4Z5EJ8JVQdj19tkFa9iQ1kBPTab9f_CUxd_u4Q,3143
168
168
  letta/server/rest_api/utils.py,sha256=Fc2ZGKzLaBa2sEtSTVjJ8D5M0xIwsWC0CVAOIJaD3rY,2176
169
- letta/server/server.py,sha256=Vac_o-C1l5wOybARMhmuU4Q02rl7YJPB51rjLA0cL9I,81970
169
+ letta/server/server.py,sha256=Vjc6-GdvU5n-VmvFm9rxvD_ahBsz1BYFSbDxOQ9g1JI,82041
170
170
  letta/server/startup.sh,sha256=jeGV7B_PS0hS-tT6o6GpACrUbV9WV1NI2L9aLoUDDtc,311
171
171
  letta/server/static_files/assets/index-3ab03d5b.css,sha256=OrA9W4iKJ5h2Wlr7GwdAT4wow0CM8hVit1yOxEL49Qw,54295
172
172
  letta/server/static_files/assets/index-9a9c449b.js,sha256=qoWUq6_kuLhE9NFkNeCBptgq-oERW46r0tB3JlWe_qc,1818951
@@ -179,12 +179,12 @@ letta/server/ws_api/example_client.py,sha256=95AA5UFgTlNJ0FUQkLxli8dKNx48MNm3eWG
179
179
  letta/server/ws_api/interface.py,sha256=TWl9vkcMCnLsUtgsuENZ-ku2oMDA-OUTzLh_yNRoMa4,4120
180
180
  letta/server/ws_api/protocol.py,sha256=M_-gM5iuDBwa1cuN2IGNCG5GxMJwU2d3XW93XALv9s8,1821
181
181
  letta/server/ws_api/server.py,sha256=C2Kv48PCwl46DQFb0ZP30s86KJLQ6dZk2AhWQEZn9pY,6004
182
- letta/settings.py,sha256=a1dN-ntNXM46IuF-ITG9u881aLdESfNGWl8_uBYSH20,2677
182
+ letta/settings.py,sha256=nRtdDiCV-ffIPobpTQMiM50fHRWy0DoX3D1VveaSo_0,2701
183
183
  letta/streaming_interface.py,sha256=_FPUWy58j50evHcpXyd7zB1wWqeCc71NCFeWh_TBvnw,15736
184
184
  letta/system.py,sha256=buKYPqG5n2x41hVmWpu6JUpyd7vTWED9Km2_M7dLrvk,6960
185
185
  letta/utils.py,sha256=neUs7mxNfndzRL5XUxerr8Lic6w7qnyyvf8FBwMnyWw,30852
186
- letta_nightly-0.4.1.dev20241010104112.dist-info/LICENSE,sha256=mExtuZ_GYJgDEI38GWdiEYZizZS4KkVt2SF1g_GPNhI,10759
187
- letta_nightly-0.4.1.dev20241010104112.dist-info/METADATA,sha256=9yfZIXzwV7Roc1ajgsVQT6PkEN1q2TBKODtjJrMyVeo,5967
188
- letta_nightly-0.4.1.dev20241010104112.dist-info/WHEEL,sha256=FMvqSimYX_P7y0a7UY-_Mc83r5zkBZsCYPm7Lr0Bsq4,88
189
- letta_nightly-0.4.1.dev20241010104112.dist-info/entry_points.txt,sha256=2zdiyGNEZGV5oYBuS-y2nAAgjDgcC9yM_mHJBFSRt5U,40
190
- letta_nightly-0.4.1.dev20241010104112.dist-info/RECORD,,
186
+ letta_nightly-0.4.1.dev20241011104054.dist-info/LICENSE,sha256=mExtuZ_GYJgDEI38GWdiEYZizZS4KkVt2SF1g_GPNhI,10759
187
+ letta_nightly-0.4.1.dev20241011104054.dist-info/METADATA,sha256=fqSS3tY0N8sjslAt2u0MFQv8DPLhVKymVktdjs__DL0,5967
188
+ letta_nightly-0.4.1.dev20241011104054.dist-info/WHEEL,sha256=FMvqSimYX_P7y0a7UY-_Mc83r5zkBZsCYPm7Lr0Bsq4,88
189
+ letta_nightly-0.4.1.dev20241011104054.dist-info/entry_points.txt,sha256=2zdiyGNEZGV5oYBuS-y2nAAgjDgcC9yM_mHJBFSRt5U,40
190
+ letta_nightly-0.4.1.dev20241011104054.dist-info/RECORD,,