letta-nightly 0.6.46.dev20250330104049__py3-none-any.whl → 0.6.47.dev20250331104251__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of letta-nightly might be problematic. Click here for more details.
- letta/__init__.py +1 -1
- letta/errors.py +12 -0
- letta/llm_api/anthropic_client.py +14 -3
- letta/llm_api/llm_client_base.py +1 -0
- letta/schemas/message.py +1 -1
- {letta_nightly-0.6.46.dev20250330104049.dist-info → letta_nightly-0.6.47.dev20250331104251.dist-info}/METADATA +1 -1
- {letta_nightly-0.6.46.dev20250330104049.dist-info → letta_nightly-0.6.47.dev20250331104251.dist-info}/RECORD +10 -10
- {letta_nightly-0.6.46.dev20250330104049.dist-info → letta_nightly-0.6.47.dev20250331104251.dist-info}/LICENSE +0 -0
- {letta_nightly-0.6.46.dev20250330104049.dist-info → letta_nightly-0.6.47.dev20250331104251.dist-info}/WHEEL +0 -0
- {letta_nightly-0.6.46.dev20250330104049.dist-info → letta_nightly-0.6.47.dev20250331104251.dist-info}/entry_points.txt +0 -0
letta/__init__.py
CHANGED
letta/errors.py
CHANGED
|
@@ -70,6 +70,14 @@ class LLMRateLimitError(LLMError):
|
|
|
70
70
|
"""Error when rate limited by LLM service"""
|
|
71
71
|
|
|
72
72
|
|
|
73
|
+
class LLMBadRequestError(LLMError):
|
|
74
|
+
"""Error when LLM service cannot process request"""
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
class LLMAuthenticationError(LLMError):
|
|
78
|
+
"""Error when authentication fails with LLM service"""
|
|
79
|
+
|
|
80
|
+
|
|
73
81
|
class LLMPermissionDeniedError(LLMError):
|
|
74
82
|
"""Error when permission is denied by LLM service"""
|
|
75
83
|
|
|
@@ -82,6 +90,10 @@ class LLMUnprocessableEntityError(LLMError):
|
|
|
82
90
|
"""Error when request is well-formed but semantically invalid"""
|
|
83
91
|
|
|
84
92
|
|
|
93
|
+
class LLMServerError(LLMError):
|
|
94
|
+
"""Error when LLM service encounters an internal error"""
|
|
95
|
+
|
|
96
|
+
|
|
85
97
|
class BedrockPermissionError(LettaError):
|
|
86
98
|
"""Exception raised for errors in the Bedrock permission process."""
|
|
87
99
|
|
|
@@ -5,6 +5,17 @@ from typing import List, Optional, Union
|
|
|
5
5
|
import anthropic
|
|
6
6
|
from anthropic.types import Message as AnthropicMessage
|
|
7
7
|
|
|
8
|
+
from letta.errors import (
|
|
9
|
+
ErrorCode,
|
|
10
|
+
LLMAuthenticationError,
|
|
11
|
+
LLMBadRequestError,
|
|
12
|
+
LLMConnectionError,
|
|
13
|
+
LLMNotFoundError,
|
|
14
|
+
LLMPermissionDeniedError,
|
|
15
|
+
LLMRateLimitError,
|
|
16
|
+
LLMServerError,
|
|
17
|
+
LLMUnprocessableEntityError,
|
|
18
|
+
)
|
|
8
19
|
from letta.helpers.datetime_helpers import get_utc_time
|
|
9
20
|
from letta.llm_api.helpers import add_inner_thoughts_to_functions, unpack_all_inner_thoughts_from_kwargs
|
|
10
21
|
from letta.llm_api.llm_client_base import LLMClientBase
|
|
@@ -104,14 +115,14 @@ class AnthropicClient(LLMClientBase):
|
|
|
104
115
|
data["messages"] = [
|
|
105
116
|
m.to_anthropic_dict(
|
|
106
117
|
inner_thoughts_xml_tag=inner_thoughts_xml_tag,
|
|
107
|
-
put_inner_thoughts_in_kwargs=self.llm_config.put_inner_thoughts_in_kwargs,
|
|
118
|
+
put_inner_thoughts_in_kwargs=bool(self.llm_config.put_inner_thoughts_in_kwargs),
|
|
108
119
|
)
|
|
109
120
|
for m in messages
|
|
110
121
|
]
|
|
111
122
|
|
|
112
123
|
# Move 'system' to the top level
|
|
113
124
|
if data["messages"][0]["role"] != "system":
|
|
114
|
-
raise RuntimeError(f
|
|
125
|
+
raise RuntimeError(f'First message is not a system message, instead has role {data["messages"][0]["role"]}')
|
|
115
126
|
|
|
116
127
|
data["system"] = data["messages"][0]["content"]
|
|
117
128
|
data["messages"] = data["messages"][1:]
|
|
@@ -241,7 +252,7 @@ class AnthropicClient(LLMClientBase):
|
|
|
241
252
|
response = AnthropicMessage(**response_data)
|
|
242
253
|
prompt_tokens = response.usage.input_tokens
|
|
243
254
|
completion_tokens = response.usage.output_tokens
|
|
244
|
-
finish_reason = remap_finish_reason(response.stop_reason)
|
|
255
|
+
finish_reason = remap_finish_reason(str(response.stop_reason))
|
|
245
256
|
|
|
246
257
|
content = None
|
|
247
258
|
reasoning_content = None
|
letta/llm_api/llm_client_base.py
CHANGED
|
@@ -4,6 +4,7 @@ from typing import List, Optional, Union
|
|
|
4
4
|
from openai import AsyncStream, Stream
|
|
5
5
|
from openai.types.chat.chat_completion_chunk import ChatCompletionChunk
|
|
6
6
|
|
|
7
|
+
from letta.errors import LLMError
|
|
7
8
|
from letta.schemas.llm_config import LLMConfig
|
|
8
9
|
from letta.schemas.message import Message
|
|
9
10
|
from letta.schemas.openai.chat_completion_response import ChatCompletionResponse
|
letta/schemas/message.py
CHANGED
|
@@ -681,7 +681,7 @@ class Message(BaseMessage):
|
|
|
681
681
|
user_system_event = add_xml_tag(string=f"SYSTEM ALERT: {text_content}", xml_tag="event")
|
|
682
682
|
anthropic_message = {
|
|
683
683
|
"content": user_system_event,
|
|
684
|
-
"role": "
|
|
684
|
+
"role": "system",
|
|
685
685
|
}
|
|
686
686
|
|
|
687
687
|
elif self.role == "user":
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
letta/__init__.py,sha256=
|
|
1
|
+
letta/__init__.py,sha256=0SeBIgkvP2V3VVcs24897GJNr-gn-hZFpWzXLWfS0uY,918
|
|
2
2
|
letta/__main__.py,sha256=6Hs2PV7EYc5Tid4g4OtcLXhqVHiNYTGzSBdoOnW2HXA,29
|
|
3
3
|
letta/agent.py,sha256=swKmDf1bR1hD8VAIEl74-VgiQym0IAPxL4u9F7pvtBQ,69450
|
|
4
4
|
letta/agents/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@@ -22,7 +22,7 @@ letta/data_sources/connectors.py,sha256=R2AssXpqS7wN6VI8AfxvqaZs5S1ZACc4E_FewmR9
|
|
|
22
22
|
letta/data_sources/connectors_helper.py,sha256=oQpVlc-BjSz9sTZ7sp4PsJSXJbBKpZPi3Dam03CURTQ,3376
|
|
23
23
|
letta/dynamic_multi_agent.py,sha256=DlYaSK5roLZ8M1HGUNeTh9YcVnDpPHKGnBv1jHdWkC4,12066
|
|
24
24
|
letta/embeddings.py,sha256=KvC2bl5tARpVY9xcFmw4Cwu1vN0DoH266v2mSUZqwkY,10528
|
|
25
|
-
letta/errors.py,sha256=
|
|
25
|
+
letta/errors.py,sha256=szJhSRXndgjMCqpN_rSa-o3AGhMo7AMqd_OmP8-wM28,6333
|
|
26
26
|
letta/functions/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
27
27
|
letta/functions/ast_parsers.py,sha256=CQI0rUoIXcLKAev_GYrXcldRIGN5ZQtk5u4FLoHe5sE,5728
|
|
28
28
|
letta/functions/function_sets/base.py,sha256=sxtxbIRfPvDnOQ1heQzKf3PbVPPoFUjNWJgFIilWGWM,6136
|
|
@@ -55,7 +55,7 @@ letta/interfaces/openai_chat_completions_streaming_interface.py,sha256=SfqVp7V7A
|
|
|
55
55
|
letta/interfaces/utils.py,sha256=c6jvO0dBYHh8DQnlN-B0qeNC64d3CSunhfqlFA4pJTY,278
|
|
56
56
|
letta/llm_api/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
57
57
|
letta/llm_api/anthropic.py,sha256=PRppmT-aZq3i2s-iLg6O4AgxfPcHU5TNVgzppY0DaUY,43652
|
|
58
|
-
letta/llm_api/anthropic_client.py,sha256=
|
|
58
|
+
letta/llm_api/anthropic_client.py,sha256=HGQjVs7mEz1fyq3ztJ1YTiVessOFx3tOZegjCNi0KoU,19205
|
|
59
59
|
letta/llm_api/aws_bedrock.py,sha256=kAPpKPRe4ZUa6fkxFbo8xwQgq4fJf3QoZEAP1LOCfaw,4168
|
|
60
60
|
letta/llm_api/azure_openai.py,sha256=Y8R9vDmruJnCr_EfmPj_oangX8gJPpiyhTppD9T8SHE,5168
|
|
61
61
|
letta/llm_api/azure_openai_constants.py,sha256=ZaR2IasJThijG0uhLKJThrixdAxLPD2IojfeaJ-KQMQ,294
|
|
@@ -67,7 +67,7 @@ letta/llm_api/google_vertex_client.py,sha256=Cu1AVKxLCSvip1h8lleJ8voatSH2d6XnGmQ
|
|
|
67
67
|
letta/llm_api/helpers.py,sha256=sLYv30UnKBRVPuhU_KDXfKFdbkUONiDAyVEwGr86l3A,16780
|
|
68
68
|
letta/llm_api/llm_api_tools.py,sha256=ABNSPIJgbGmOBcCTjoMIB5g2bOYbXP9HpTKVjMmDlv0,26993
|
|
69
69
|
letta/llm_api/llm_client.py,sha256=bnS5vU5NW-rybZVECSwLYv8AvHMbPP6VMRfMqYVZ-CY,1846
|
|
70
|
-
letta/llm_api/llm_client_base.py,sha256=
|
|
70
|
+
letta/llm_api/llm_client_base.py,sha256=3umeVqo-IbSUUN1Uzq6wvoDeTHqvhktCCzNM0nJecq8,5619
|
|
71
71
|
letta/llm_api/mistral.py,sha256=fHdfD9ug-rQIk2qn8tRKay1U6w9maF11ryhKi91FfXM,1593
|
|
72
72
|
letta/llm_api/openai.py,sha256=pdaZmFTTOvxJ-Od91rGwZ0ucvpdyKMCyGI5yDZgKIUM,22468
|
|
73
73
|
letta/local_llm/README.md,sha256=hFJyw5B0TU2jrh9nb0zGZMgdH-Ei1dSRfhvPQG_NSoU,168
|
|
@@ -193,7 +193,7 @@ letta/schemas/letta_response.py,sha256=pq-SxXQy5yZo1-DiAwV2mMURlUvz1Uu7HHR_tB1hM
|
|
|
193
193
|
letta/schemas/llm_config.py,sha256=xEAfwigMc5MDY6hUtLRhsWW6iZPlPUezo-ZOZzWB8cs,5953
|
|
194
194
|
letta/schemas/llm_config_overrides.py,sha256=-oRglCTcajF6UAK3RAa0FLWVuKODPI1v403fDIWMAtA,1815
|
|
195
195
|
letta/schemas/memory.py,sha256=GOYDfPKzbWftUWO9Hv4KW7xAi1EIQmC8zpP7qvEkVHw,10245
|
|
196
|
-
letta/schemas/message.py,sha256=
|
|
196
|
+
letta/schemas/message.py,sha256=C8eogmQdqsPM9o1o6LqUBG-iuhMNV4TxuWJ3-tdHsFM,45343
|
|
197
197
|
letta/schemas/openai/chat_completion_request.py,sha256=MtqUG7YsgsbJ7Rdauw-NOSpwNBgYjpx00xxrY8jOvmQ,4092
|
|
198
198
|
letta/schemas/openai/chat_completion_response.py,sha256=yoepGZkg5PIobGqvATJruPdV4odpIUDHWniodSQo3PY,4433
|
|
199
199
|
letta/schemas/openai/chat_completions.py,sha256=l0e9sT9boTD5VBU5YtJ0s7qUtCfFGB2K-gQLeEZ2LHU,3599
|
|
@@ -301,8 +301,8 @@ letta/supervisor_multi_agent.py,sha256=EwdfgznrwKkJZpxU-HO6iofR4PINCnGuWEbKamO3A
|
|
|
301
301
|
letta/system.py,sha256=dnOrS2FlRMwijQnOvfrky0Lg8wEw-FUq2zzfAJOUSKA,8477
|
|
302
302
|
letta/tracing.py,sha256=RstWXpfWVF77nmb_ISORVWd9IQw2Ky3de40k_S70yKI,8258
|
|
303
303
|
letta/utils.py,sha256=AdHrQ2OQ3V4XhJ1LtYwbLUO71j2IJY37cIUxXPgaaRY,32125
|
|
304
|
-
letta_nightly-0.6.
|
|
305
|
-
letta_nightly-0.6.
|
|
306
|
-
letta_nightly-0.6.
|
|
307
|
-
letta_nightly-0.6.
|
|
308
|
-
letta_nightly-0.6.
|
|
304
|
+
letta_nightly-0.6.47.dev20250331104251.dist-info/LICENSE,sha256=mExtuZ_GYJgDEI38GWdiEYZizZS4KkVt2SF1g_GPNhI,10759
|
|
305
|
+
letta_nightly-0.6.47.dev20250331104251.dist-info/METADATA,sha256=pz_efaOER7df70ejuEmiyLQuxkUS0KNKn8ngkbbEfyw,22937
|
|
306
|
+
letta_nightly-0.6.47.dev20250331104251.dist-info/WHEEL,sha256=FMvqSimYX_P7y0a7UY-_Mc83r5zkBZsCYPm7Lr0Bsq4,88
|
|
307
|
+
letta_nightly-0.6.47.dev20250331104251.dist-info/entry_points.txt,sha256=2zdiyGNEZGV5oYBuS-y2nAAgjDgcC9yM_mHJBFSRt5U,40
|
|
308
|
+
letta_nightly-0.6.47.dev20250331104251.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|