letta-nightly 0.6.26.dev20250214013434__py3-none-any.whl → 0.6.27.dev20250215023154__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of letta-nightly might be problematic. Click here for more details.

letta/__init__.py CHANGED
@@ -1,4 +1,4 @@
1
- __version__ = "0.6.26"
1
+ __version__ = "0.6.27"
2
2
 
3
3
  # import clients
4
4
  from letta.client.client import LocalClient, RESTClient, create_client
@@ -307,15 +307,27 @@ def google_vertex_chat_completions_request(
307
307
  """
308
308
 
309
309
  from google import genai
310
+ from google.genai.types import FunctionCallingConfig, FunctionCallingConfigMode, ToolConfig
310
311
 
311
312
  client = genai.Client(vertexai=True, project=project_id, location=region, http_options={"api_version": "v1"})
312
313
  # add dummy model messages to the end of the input
313
314
  if add_postfunc_model_messages:
314
315
  contents = add_dummy_model_messages(contents)
315
316
 
317
+ tool_config = ToolConfig(
318
+ function_calling_config=FunctionCallingConfig(
319
+ # ANY mode forces the model to predict only function calls
320
+ mode=FunctionCallingConfigMode.ANY,
321
+ )
322
+ )
323
+ config["tool_config"] = tool_config.model_dump()
324
+
316
325
  # make request to client
317
- response = client.models.generate_content(model=model, contents=contents, config=config)
318
- print(response)
326
+ response = client.models.generate_content(
327
+ model=model,
328
+ contents=contents,
329
+ config=config,
330
+ )
319
331
 
320
332
  # convert back response
321
333
  try:
@@ -151,7 +151,8 @@ def create(
151
151
  if function_call is None and functions is not None and len(functions) > 0:
152
152
  # force function calling for reliability, see https://platform.openai.com/docs/api-reference/chat/create#chat-create-tool_choice
153
153
  # TODO(matt) move into LLMConfig
154
- if llm_config.model_endpoint == "https://inference.memgpt.ai":
154
+ # TODO: This vllm checking is very brittle and is a patch at most
155
+ if llm_config.model_endpoint == "https://inference.memgpt.ai" or (llm_config.handle and "vllm" in llm_config.handle):
155
156
  function_call = "auto" # TODO change to "required" once proxy supports it
156
157
  else:
157
158
  function_call = "required"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: letta-nightly
3
- Version: 0.6.26.dev20250214013434
3
+ Version: 0.6.27.dev20250215023154
4
4
  Summary: Create LLM agents with long-term memory and custom tools
5
5
  License: Apache License
6
6
  Author: Letta Team
@@ -1,4 +1,4 @@
1
- letta/__init__.py,sha256=N01gwYd0I1HifmEloysc6D-it3aLYbloi3F676QkJ54,918
1
+ letta/__init__.py,sha256=GFX4ppYbghJdXx6Fu9PxSfN1WaCjMRNiFKoz97vINbc,918
2
2
  letta/__main__.py,sha256=6Hs2PV7EYc5Tid4g4OtcLXhqVHiNYTGzSBdoOnW2HXA,29
3
3
  letta/agent.py,sha256=qdBP_rO-35W_m9K-HMl_ZZm803MgARAHaNEG4kjn4gU,59768
4
4
  letta/benchmark/benchmark.py,sha256=ebvnwfp3yezaXOQyGXkYCDYpsmre-b9hvNtnyx4xkG0,3701
@@ -43,9 +43,9 @@ letta/llm_api/azure_openai_constants.py,sha256=_f7NKjKBPxGPFQPfP1e0umHk4Jmf56qNj
43
43
  letta/llm_api/cohere.py,sha256=A5uUoxoRsXsaMDCIIWiVXeyCbiQ7gUfiASb8Ovv2I_o,14852
44
44
  letta/llm_api/google_ai.py,sha256=VnoxG6QYcwgFEbH8iJ8MHaMQrW4ROekZy6ZV5ZdHxzI,18000
45
45
  letta/llm_api/google_constants.py,sha256=ZdABT9l9l-qKcV2QCkVsv9kQbttx6JyIJoOWS8IMS5o,448
46
- letta/llm_api/google_vertex.py,sha256=7CGbJdpNyQIwTfKkeUdYr4qbZVa83qwd65-o-nLwJ14,13768
46
+ letta/llm_api/google_vertex.py,sha256=Cqr73-jZJJvii1M_0QEmasNajOIJ5TDs5GabsCJjI04,14149
47
47
  letta/llm_api/helpers.py,sha256=8rM_grbeCIQN27Uh-adhmAFcg2NPFSg6SA9WmogoZzY,16231
48
- letta/llm_api/llm_api_tools.py,sha256=70lXa-5J8fAmkkXPoGOR0jmZb-vH6GENNJGU_Oit4NQ,21935
48
+ letta/llm_api/llm_api_tools.py,sha256=Ss6RLmrNpqplHYVcl7QkK4xXeHZBAnzG1m0KTIqMW-c,22068
49
49
  letta/llm_api/mistral.py,sha256=fHdfD9ug-rQIk2qn8tRKay1U6w9maF11ryhKi91FfXM,1593
50
50
  letta/llm_api/openai.py,sha256=T69e4oveJw1IdzuONIaK4t1aRqXggTdfQ6n6eW0Uh8Q,20371
51
51
  letta/local_llm/README.md,sha256=hFJyw5B0TU2jrh9nb0zGZMgdH-Ei1dSRfhvPQG_NSoU,168
@@ -244,8 +244,8 @@ letta/streaming_interface.py,sha256=lo2VAQRUJOdWTijwnXuKOC9uejqr2siUAEmZiQUXkj8,
244
244
  letta/streaming_utils.py,sha256=jLqFTVhUL76FeOuYk8TaRQHmPTf3HSRc2EoJwxJNK6U,11946
245
245
  letta/system.py,sha256=dnOrS2FlRMwijQnOvfrky0Lg8wEw-FUq2zzfAJOUSKA,8477
246
246
  letta/utils.py,sha256=E1Kfbezh_yOivlr9RniZid_uWdZkedRPrH5Vd76-Fu4,31805
247
- letta_nightly-0.6.26.dev20250214013434.dist-info/LICENSE,sha256=mExtuZ_GYJgDEI38GWdiEYZizZS4KkVt2SF1g_GPNhI,10759
248
- letta_nightly-0.6.26.dev20250214013434.dist-info/METADATA,sha256=CF5zNI8oN7WXsOdm_lrJl519BO0eLSMuqPttQt1marY,22203
249
- letta_nightly-0.6.26.dev20250214013434.dist-info/WHEEL,sha256=FMvqSimYX_P7y0a7UY-_Mc83r5zkBZsCYPm7Lr0Bsq4,88
250
- letta_nightly-0.6.26.dev20250214013434.dist-info/entry_points.txt,sha256=2zdiyGNEZGV5oYBuS-y2nAAgjDgcC9yM_mHJBFSRt5U,40
251
- letta_nightly-0.6.26.dev20250214013434.dist-info/RECORD,,
247
+ letta_nightly-0.6.27.dev20250215023154.dist-info/LICENSE,sha256=mExtuZ_GYJgDEI38GWdiEYZizZS4KkVt2SF1g_GPNhI,10759
248
+ letta_nightly-0.6.27.dev20250215023154.dist-info/METADATA,sha256=-_GzfKK1-u753pau65KvNiX0gJx8faLFfctHsRmAFww,22203
249
+ letta_nightly-0.6.27.dev20250215023154.dist-info/WHEEL,sha256=FMvqSimYX_P7y0a7UY-_Mc83r5zkBZsCYPm7Lr0Bsq4,88
250
+ letta_nightly-0.6.27.dev20250215023154.dist-info/entry_points.txt,sha256=2zdiyGNEZGV5oYBuS-y2nAAgjDgcC9yM_mHJBFSRt5U,40
251
+ letta_nightly-0.6.27.dev20250215023154.dist-info/RECORD,,