letta-nightly 0.6.12.dev20250122104013__py3-none-any.whl → 0.6.14.dev20250123041709__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of letta-nightly might be problematic. Click here for more details.

Files changed (61) hide show
  1. letta/__init__.py +2 -2
  2. letta/agent.py +69 -100
  3. letta/chat_only_agent.py +1 -1
  4. letta/client/client.py +169 -149
  5. letta/constants.py +1 -8
  6. letta/data_sources/connectors.py +1 -1
  7. letta/functions/helpers.py +29 -4
  8. letta/functions/schema_generator.py +55 -0
  9. letta/llm_api/helpers.py +51 -1
  10. letta/memory.py +9 -7
  11. letta/orm/agent.py +2 -2
  12. letta/orm/block.py +3 -1
  13. letta/orm/custom_columns.py +5 -4
  14. letta/orm/enums.py +1 -0
  15. letta/orm/message.py +2 -2
  16. letta/orm/sqlalchemy_base.py +5 -0
  17. letta/schemas/agent.py +13 -13
  18. letta/schemas/block.py +2 -2
  19. letta/schemas/environment_variables.py +1 -1
  20. letta/schemas/job.py +1 -1
  21. letta/schemas/letta_base.py +6 -0
  22. letta/schemas/letta_message.py +6 -6
  23. letta/schemas/memory.py +3 -2
  24. letta/schemas/message.py +21 -13
  25. letta/schemas/passage.py +1 -1
  26. letta/schemas/source.py +4 -4
  27. letta/schemas/tool.py +38 -43
  28. letta/server/rest_api/app.py +1 -16
  29. letta/server/rest_api/routers/v1/agents.py +95 -118
  30. letta/server/rest_api/routers/v1/blocks.py +8 -46
  31. letta/server/rest_api/routers/v1/jobs.py +4 -4
  32. letta/server/rest_api/routers/v1/providers.py +2 -2
  33. letta/server/rest_api/routers/v1/runs.py +6 -6
  34. letta/server/rest_api/routers/v1/sources.py +8 -38
  35. letta/server/rest_api/routers/v1/tags.py +1 -1
  36. letta/server/rest_api/routers/v1/tools.py +6 -24
  37. letta/server/server.py +6 -6
  38. letta/services/agent_manager.py +43 -9
  39. letta/services/block_manager.py +3 -3
  40. letta/services/job_manager.py +5 -3
  41. letta/services/organization_manager.py +1 -1
  42. letta/services/passage_manager.py +3 -3
  43. letta/services/provider_manager.py +2 -2
  44. letta/services/sandbox_config_manager.py +2 -2
  45. letta/services/source_manager.py +3 -3
  46. letta/services/tool_execution_sandbox.py +3 -1
  47. letta/services/tool_manager.py +8 -3
  48. letta/services/user_manager.py +2 -2
  49. letta/settings.py +29 -0
  50. letta/system.py +2 -2
  51. {letta_nightly-0.6.12.dev20250122104013.dist-info → letta_nightly-0.6.14.dev20250123041709.dist-info}/METADATA +1 -1
  52. {letta_nightly-0.6.12.dev20250122104013.dist-info → letta_nightly-0.6.14.dev20250123041709.dist-info}/RECORD +55 -61
  53. letta/server/rest_api/routers/openai/__init__.py +0 -0
  54. letta/server/rest_api/routers/openai/assistants/__init__.py +0 -0
  55. letta/server/rest_api/routers/openai/assistants/assistants.py +0 -115
  56. letta/server/rest_api/routers/openai/assistants/schemas.py +0 -115
  57. letta/server/rest_api/routers/openai/chat_completions/__init__.py +0 -0
  58. letta/server/rest_api/routers/openai/chat_completions/chat_completions.py +0 -120
  59. {letta_nightly-0.6.12.dev20250122104013.dist-info → letta_nightly-0.6.14.dev20250123041709.dist-info}/LICENSE +0 -0
  60. {letta_nightly-0.6.12.dev20250122104013.dist-info → letta_nightly-0.6.14.dev20250123041709.dist-info}/WHEEL +0 -0
  61. {letta_nightly-0.6.12.dev20250122104013.dist-info → letta_nightly-0.6.14.dev20250123041709.dist-info}/entry_points.txt +0 -0
@@ -1,120 +0,0 @@
1
- import json
2
- from typing import TYPE_CHECKING, Optional
3
-
4
- from fastapi import APIRouter, Body, Depends, Header, HTTPException
5
-
6
- from letta.schemas.letta_message import LettaMessage, ToolCall
7
- from letta.schemas.openai.chat_completion_request import ChatCompletionRequest
8
- from letta.schemas.openai.chat_completion_response import ChatCompletionResponse, Choice, Message, UsageStatistics
9
-
10
- # TODO this belongs in a controller!
11
- from letta.server.rest_api.utils import get_letta_server
12
-
13
- if TYPE_CHECKING:
14
- pass
15
-
16
- from letta.server.server import SyncServer
17
- from letta.utils import get_utc_time
18
-
19
- router = APIRouter(prefix="/v1/chat/completions", tags=["chat_completions"])
20
-
21
-
22
- @router.post("/", response_model=ChatCompletionResponse)
23
- async def create_chat_completion(
24
- completion_request: ChatCompletionRequest = Body(...),
25
- server: "SyncServer" = Depends(get_letta_server),
26
- user_id: Optional[str] = Header(None, alias="user_id"), # Extract user_id from header, default to None if not present
27
- ):
28
- """Send a message to a Letta agent via a /chat/completions completion_request
29
- The bearer token will be used to identify the user.
30
- The 'user' field in the completion_request should be set to the agent ID.
31
- """
32
- actor = server.user_manager.get_user_or_default(user_id=user_id)
33
-
34
- agent_id = completion_request.user
35
- if agent_id is None:
36
- raise HTTPException(status_code=400, detail="Must pass agent_id in the 'user' field")
37
-
38
- messages = completion_request.messages
39
- if messages is None:
40
- raise HTTPException(status_code=400, detail="'messages' field must not be empty")
41
- if len(messages) > 1:
42
- raise HTTPException(status_code=400, detail="'messages' field must be a list of length 1")
43
- if messages[0].role != "user":
44
- raise HTTPException(status_code=400, detail="'messages[0].role' must be a 'user'")
45
-
46
- input_message = completion_request.messages[0]
47
- if completion_request.stream:
48
- print("Starting streaming OpenAI proxy response")
49
-
50
- # TODO(charles) support multimodal parts
51
- assert isinstance(input_message.content, str)
52
-
53
- return await server.send_message_to_agent(
54
- agent_id=agent_id,
55
- actor=actor,
56
- message=input_message.content, # TODO: This is broken
57
- # Turn streaming ON
58
- stream_steps=True,
59
- stream_tokens=True,
60
- # Turn on ChatCompletion mode (eg remaps send_message to content)
61
- chat_completion_mode=True,
62
- )
63
-
64
- else:
65
- print("Starting non-streaming OpenAI proxy response")
66
-
67
- # TODO(charles) support multimodal parts
68
- assert isinstance(input_message.content, str)
69
-
70
- response_messages = await server.send_message_to_agent(
71
- agent_id=agent_id,
72
- actor=actor,
73
- message=input_message.content, # TODO: This is broken
74
- # Turn streaming OFF
75
- stream_steps=False,
76
- stream_tokens=False,
77
- )
78
- # print(response_messages)
79
-
80
- # Concatenate all send_message outputs together
81
- id = ""
82
- visible_message_str = ""
83
- created_at = None
84
- for letta_msg in response_messages.messages:
85
- assert isinstance(letta_msg, LettaMessage)
86
- if isinstance(letta_msg, ToolCall):
87
- if letta_msg.name and letta_msg.name == "send_message":
88
- try:
89
- letta_function_call_args = json.loads(letta_msg.arguments)
90
- visible_message_str += letta_function_call_args["message"]
91
- id = letta_msg.id
92
- created_at = letta_msg.date
93
- except:
94
- print(f"Failed to parse Letta message: {str(letta_msg)}")
95
- else:
96
- print(f"Skipping function_call: {str(letta_msg)}")
97
- else:
98
- print(f"Skipping message: {str(letta_msg)}")
99
-
100
- response = ChatCompletionResponse(
101
- id=id,
102
- created=created_at if created_at else get_utc_time(),
103
- choices=[
104
- Choice(
105
- finish_reason="stop",
106
- index=0,
107
- message=Message(
108
- role="assistant",
109
- content=visible_message_str,
110
- ),
111
- )
112
- ],
113
- # TODO add real usage
114
- usage=UsageStatistics(
115
- completion_tokens=0,
116
- prompt_tokens=0,
117
- total_tokens=0,
118
- ),
119
- )
120
- return response