letta-client 0.1.0__py3-none-any.whl → 0.1.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of letta-client might be problematic. Click here for more details.
- letta/__init__.py +277 -0
- letta/agents/__init__.py +44 -0
- letta/agents/archival_memory/__init__.py +2 -0
- letta/agents/archival_memory/client.py +591 -0
- letta/agents/client.py +2604 -0
- letta/agents/context/__init__.py +2 -0
- letta/agents/context/client.py +145 -0
- letta/agents/memory/__init__.py +5 -0
- letta/agents/memory/client.py +149 -0
- letta/agents/memory/messages/__init__.py +2 -0
- letta/agents/memory/messages/client.py +147 -0
- letta/agents/memory_blocks/__init__.py +2 -0
- letta/agents/memory_blocks/client.py +364 -0
- letta/agents/messages/__init__.py +5 -0
- letta/agents/messages/client.py +787 -0
- letta/agents/messages/types/__init__.py +7 -0
- letta/agents/messages/types/letta_streaming_response.py +20 -0
- letta/agents/messages/types/messages_list_response.py +7 -0
- letta/agents/messages/types/messages_list_response_item.py +13 -0
- letta/agents/recall_memory/__init__.py +2 -0
- letta/agents/recall_memory/client.py +147 -0
- letta/agents/sources/__init__.py +2 -0
- letta/agents/sources/client.py +145 -0
- letta/agents/tools/__init__.py +2 -0
- letta/agents/tools/client.py +408 -0
- letta/agents/types/__init__.py +39 -0
- letta/agents/types/agents_get_agent_variables_response.py +19 -0
- letta/agents/types/agents_migrate_response.py +19 -0
- letta/agents/types/agents_search_deployed_agents_request_combinator.py +5 -0
- letta/agents/types/agents_search_deployed_agents_request_search_item.py +16 -0
- letta/agents/types/agents_search_deployed_agents_request_search_item_direction.py +27 -0
- letta/agents/types/agents_search_deployed_agents_request_search_item_direction_direction.py +5 -0
- letta/agents/types/agents_search_deployed_agents_request_search_item_direction_value.py +7 -0
- letta/agents/types/agents_search_deployed_agents_request_search_item_operator.py +24 -0
- letta/agents/types/agents_search_deployed_agents_request_search_item_operator_operator.py +7 -0
- letta/agents/types/agents_search_deployed_agents_request_search_item_zero.py +20 -0
- letta/agents/types/create_agent_request_tool_rules_item.py +9 -0
- letta/agents/types/update_agent_tool_rules_item.py +9 -0
- letta/blocks/__init__.py +2 -0
- letta/blocks/client.py +1054 -0
- letta/client.py +164 -0
- letta/core/__init__.py +47 -0
- letta/core/api_error.py +15 -0
- letta/core/client_wrapper.py +76 -0
- letta/core/datetime_utils.py +28 -0
- letta/core/file.py +67 -0
- letta/core/http_client.py +499 -0
- letta/core/jsonable_encoder.py +101 -0
- letta/core/pydantic_utilities.py +296 -0
- letta/core/query_encoder.py +58 -0
- letta/core/remove_none_from_dict.py +11 -0
- letta/core/request_options.py +35 -0
- letta/core/serialization.py +272 -0
- letta/environment.py +8 -0
- letta/errors/__init__.py +8 -0
- letta/errors/conflict_error.py +9 -0
- letta/errors/internal_server_error.py +9 -0
- letta/errors/not_found_error.py +9 -0
- letta/errors/unprocessable_entity_error.py +9 -0
- letta/health/__init__.py +2 -0
- letta/health/client.py +108 -0
- letta/jobs/__init__.py +2 -0
- letta/jobs/client.py +503 -0
- letta/models/__init__.py +2 -0
- letta/models/client.py +201 -0
- letta/sources/__init__.py +5 -0
- letta/sources/client.py +1154 -0
- letta/sources/files/__init__.py +2 -0
- letta/sources/files/client.py +436 -0
- letta/sources/passages/__init__.py +2 -0
- letta/sources/passages/client.py +145 -0
- letta/tools/__init__.py +2 -0
- letta/tools/client.py +1823 -0
- letta/types/__init__.py +231 -0
- letta/types/action_model.py +36 -0
- letta/types/action_parameters_model.py +26 -0
- letta/types/action_response_model.py +26 -0
- letta/types/agent_state.py +139 -0
- letta/types/agent_state_tool_rules_item.py +9 -0
- letta/types/agent_type.py +8 -0
- letta/types/app_auth_scheme.py +34 -0
- letta/types/app_auth_scheme_auth_mode.py +7 -0
- letta/types/app_model.py +44 -0
- letta/types/archival_memory_summary.py +22 -0
- letta/types/assistant_file.py +33 -0
- letta/types/assistant_message_input.py +23 -0
- letta/types/assistant_message_output.py +23 -0
- letta/types/auth_request.py +22 -0
- letta/types/auth_response.py +29 -0
- letta/types/auth_scheme_field.py +30 -0
- letta/types/block.py +91 -0
- letta/types/block_update.py +60 -0
- letta/types/chat_completion_request.py +49 -0
- letta/types/chat_completion_request_function_call.py +6 -0
- letta/types/chat_completion_request_messages_item.py +11 -0
- letta/types/chat_completion_request_stop.py +5 -0
- letta/types/chat_completion_request_tool_choice.py +8 -0
- letta/types/chat_completion_response.py +32 -0
- letta/types/child_tool_rule.py +33 -0
- letta/types/choice.py +25 -0
- letta/types/conditional_tool_rule.py +43 -0
- letta/types/conflict_error_body.py +21 -0
- letta/types/context_window_overview.py +105 -0
- letta/types/create_assistant_file_request.py +22 -0
- letta/types/create_assistant_request.py +57 -0
- letta/types/create_block.py +56 -0
- letta/types/delete_assistant_file_response.py +28 -0
- letta/types/delete_assistant_response.py +28 -0
- letta/types/e_2_b_sandbox_config.py +32 -0
- letta/types/embedding_config.py +77 -0
- letta/types/embedding_config_embedding_endpoint_type.py +26 -0
- letta/types/file_metadata.py +82 -0
- letta/types/function_call_input.py +19 -0
- letta/types/function_call_output.py +20 -0
- letta/types/function_schema.py +21 -0
- letta/types/health.py +24 -0
- letta/types/http_validation_error.py +20 -0
- letta/types/init_tool_rule.py +29 -0
- letta/types/internal_server_error_body.py +19 -0
- letta/types/job.py +79 -0
- letta/types/job_status.py +5 -0
- letta/types/letta_request.py +33 -0
- letta/types/letta_response.py +37 -0
- letta/types/letta_schemas_letta_message_tool_call.py +21 -0
- letta/types/letta_schemas_message_message.py +103 -0
- letta/types/letta_schemas_openai_chat_completion_request_tool.py +21 -0
- letta/types/letta_schemas_openai_chat_completion_request_tool_call.py +24 -0
- letta/types/letta_schemas_openai_chat_completion_request_tool_call_function.py +20 -0
- letta/types/letta_schemas_openai_chat_completion_response_message.py +24 -0
- letta/types/letta_schemas_openai_chat_completion_response_tool_call.py +22 -0
- letta/types/letta_schemas_openai_chat_completions_tool_call_function.py +27 -0
- letta/types/letta_schemas_openai_chat_completions_tool_call_input.py +29 -0
- letta/types/letta_schemas_openai_chat_completions_tool_call_output.py +29 -0
- letta/types/letta_schemas_tool_tool.py +88 -0
- letta/types/letta_usage_statistics.py +48 -0
- letta/types/llm_config.py +65 -0
- letta/types/llm_config_model_endpoint_type.py +26 -0
- letta/types/local_sandbox_config.py +32 -0
- letta/types/log_prob_token.py +21 -0
- letta/types/memory.py +32 -0
- letta/types/message_content_log_prob.py +23 -0
- letta/types/message_create.py +37 -0
- letta/types/message_create_role.py +5 -0
- letta/types/message_role.py +5 -0
- letta/types/not_found_error_body.py +19 -0
- letta/types/not_found_error_body_message.py +11 -0
- letta/types/open_ai_assistant.py +67 -0
- letta/types/organization.py +33 -0
- letta/types/organization_create.py +22 -0
- letta/types/passage.py +107 -0
- letta/types/reasoning_message.py +32 -0
- letta/types/recall_memory_summary.py +22 -0
- letta/types/response_format.py +19 -0
- letta/types/sandbox_config.py +59 -0
- letta/types/sandbox_config_create.py +23 -0
- letta/types/sandbox_config_create_config.py +7 -0
- letta/types/sandbox_config_update.py +27 -0
- letta/types/sandbox_config_update_config.py +7 -0
- letta/types/sandbox_environment_variable.py +68 -0
- letta/types/sandbox_environment_variable_create.py +32 -0
- letta/types/sandbox_environment_variable_update.py +36 -0
- letta/types/sandbox_type.py +5 -0
- letta/types/source.py +85 -0
- letta/types/system_message_input.py +21 -0
- letta/types/system_message_output.py +32 -0
- letta/types/terminal_tool_rule.py +29 -0
- letta/types/tool_call_delta.py +21 -0
- letta/types/tool_call_function_output.py +27 -0
- letta/types/tool_call_message.py +33 -0
- letta/types/tool_call_message_tool_call.py +7 -0
- letta/types/tool_create.py +57 -0
- letta/types/tool_function_choice.py +21 -0
- letta/types/tool_input.py +21 -0
- letta/types/tool_message.py +21 -0
- letta/types/tool_return_message.py +41 -0
- letta/types/tool_return_message_status.py +5 -0
- letta/types/tool_rule_type.py +10 -0
- letta/types/usage_statistics.py +21 -0
- letta/types/user.py +57 -0
- letta/types/user_create.py +27 -0
- letta/types/user_message_input.py +22 -0
- letta/types/user_message_input_content.py +5 -0
- letta/types/user_message_output.py +32 -0
- letta/types/user_update.py +32 -0
- letta/types/validation_error.py +22 -0
- letta/types/validation_error_loc_item.py +5 -0
- letta/version.py +3 -0
- letta_client-0.1.4.dist-info/METADATA +189 -0
- letta_client-0.1.4.dist-info/RECORD +191 -0
- {letta_client-0.1.0.dist-info → letta_client-0.1.4.dist-info}/WHEEL +1 -1
- letta_client-0.1.0.dist-info/METADATA +0 -15
- letta_client-0.1.0.dist-info/RECORD +0 -4
- /letta_client/__init__.py → /letta/py.typed +0 -0
|
@@ -0,0 +1,787 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
import typing
|
|
4
|
+
from ...core.client_wrapper import SyncClientWrapper
|
|
5
|
+
from ...core.request_options import RequestOptions
|
|
6
|
+
from .types.messages_list_response import MessagesListResponse
|
|
7
|
+
from ...core.jsonable_encoder import jsonable_encoder
|
|
8
|
+
from ...core.pydantic_utilities import parse_obj_as
|
|
9
|
+
from ...errors.unprocessable_entity_error import UnprocessableEntityError
|
|
10
|
+
from ...types.http_validation_error import HttpValidationError
|
|
11
|
+
from json.decoder import JSONDecodeError
|
|
12
|
+
from ...core.api_error import ApiError
|
|
13
|
+
from ...types.message_create import MessageCreate
|
|
14
|
+
from ...types.letta_response import LettaResponse
|
|
15
|
+
from ...core.serialization import convert_and_respect_annotation_metadata
|
|
16
|
+
from ...types.message_role import MessageRole
|
|
17
|
+
from ...types.letta_schemas_openai_chat_completions_tool_call_input import (
|
|
18
|
+
LettaSchemasOpenaiChatCompletionsToolCallInput,
|
|
19
|
+
)
|
|
20
|
+
from ...types.letta_schemas_message_message import LettaSchemasMessageMessage
|
|
21
|
+
from .types.letta_streaming_response import LettaStreamingResponse
|
|
22
|
+
import httpx_sse
|
|
23
|
+
import json
|
|
24
|
+
from ...core.client_wrapper import AsyncClientWrapper
|
|
25
|
+
|
|
26
|
+
# this is used as the default value for optional parameters
|
|
27
|
+
OMIT = typing.cast(typing.Any, ...)
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
class MessagesClient:
|
|
31
|
+
def __init__(self, *, client_wrapper: SyncClientWrapper):
|
|
32
|
+
self._client_wrapper = client_wrapper
|
|
33
|
+
|
|
34
|
+
def list(
|
|
35
|
+
self,
|
|
36
|
+
agent_id: str,
|
|
37
|
+
*,
|
|
38
|
+
before: typing.Optional[str] = None,
|
|
39
|
+
limit: typing.Optional[int] = None,
|
|
40
|
+
msg_object: typing.Optional[bool] = None,
|
|
41
|
+
assistant_message_tool_name: typing.Optional[str] = None,
|
|
42
|
+
assistant_message_tool_kwarg: typing.Optional[str] = None,
|
|
43
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
44
|
+
) -> MessagesListResponse:
|
|
45
|
+
"""
|
|
46
|
+
Retrieve message history for an agent.
|
|
47
|
+
|
|
48
|
+
Parameters
|
|
49
|
+
----------
|
|
50
|
+
agent_id : str
|
|
51
|
+
|
|
52
|
+
before : typing.Optional[str]
|
|
53
|
+
Message before which to retrieve the returned messages.
|
|
54
|
+
|
|
55
|
+
limit : typing.Optional[int]
|
|
56
|
+
Maximum number of messages to retrieve.
|
|
57
|
+
|
|
58
|
+
msg_object : typing.Optional[bool]
|
|
59
|
+
If true, returns Message objects. If false, return LettaMessage objects.
|
|
60
|
+
|
|
61
|
+
assistant_message_tool_name : typing.Optional[str]
|
|
62
|
+
The name of the designated message tool.
|
|
63
|
+
|
|
64
|
+
assistant_message_tool_kwarg : typing.Optional[str]
|
|
65
|
+
The name of the message argument in the designated message tool.
|
|
66
|
+
|
|
67
|
+
request_options : typing.Optional[RequestOptions]
|
|
68
|
+
Request-specific configuration.
|
|
69
|
+
|
|
70
|
+
Returns
|
|
71
|
+
-------
|
|
72
|
+
MessagesListResponse
|
|
73
|
+
Successful Response
|
|
74
|
+
|
|
75
|
+
Examples
|
|
76
|
+
--------
|
|
77
|
+
from letta import Letta
|
|
78
|
+
|
|
79
|
+
client = Letta(
|
|
80
|
+
token="YOUR_TOKEN",
|
|
81
|
+
)
|
|
82
|
+
client.agents.messages.list(
|
|
83
|
+
agent_id="agent_id",
|
|
84
|
+
)
|
|
85
|
+
"""
|
|
86
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
87
|
+
f"v1/agents/{jsonable_encoder(agent_id)}/messages",
|
|
88
|
+
method="GET",
|
|
89
|
+
params={
|
|
90
|
+
"before": before,
|
|
91
|
+
"limit": limit,
|
|
92
|
+
"msg_object": msg_object,
|
|
93
|
+
"assistant_message_tool_name": assistant_message_tool_name,
|
|
94
|
+
"assistant_message_tool_kwarg": assistant_message_tool_kwarg,
|
|
95
|
+
},
|
|
96
|
+
request_options=request_options,
|
|
97
|
+
)
|
|
98
|
+
try:
|
|
99
|
+
if 200 <= _response.status_code < 300:
|
|
100
|
+
return typing.cast(
|
|
101
|
+
MessagesListResponse,
|
|
102
|
+
parse_obj_as(
|
|
103
|
+
type_=MessagesListResponse, # type: ignore
|
|
104
|
+
object_=_response.json(),
|
|
105
|
+
),
|
|
106
|
+
)
|
|
107
|
+
if _response.status_code == 422:
|
|
108
|
+
raise UnprocessableEntityError(
|
|
109
|
+
typing.cast(
|
|
110
|
+
HttpValidationError,
|
|
111
|
+
parse_obj_as(
|
|
112
|
+
type_=HttpValidationError, # type: ignore
|
|
113
|
+
object_=_response.json(),
|
|
114
|
+
),
|
|
115
|
+
)
|
|
116
|
+
)
|
|
117
|
+
_response_json = _response.json()
|
|
118
|
+
except JSONDecodeError:
|
|
119
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
120
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
121
|
+
|
|
122
|
+
def create(
|
|
123
|
+
self,
|
|
124
|
+
agent_id: str,
|
|
125
|
+
*,
|
|
126
|
+
messages: typing.Sequence[MessageCreate],
|
|
127
|
+
assistant_message_tool_name: typing.Optional[str] = OMIT,
|
|
128
|
+
assistant_message_tool_kwarg: typing.Optional[str] = OMIT,
|
|
129
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
130
|
+
) -> LettaResponse:
|
|
131
|
+
"""
|
|
132
|
+
Process a user message and return the agent's response.
|
|
133
|
+
This endpoint accepts a message from a user and processes it through the agent.
|
|
134
|
+
|
|
135
|
+
Parameters
|
|
136
|
+
----------
|
|
137
|
+
agent_id : str
|
|
138
|
+
|
|
139
|
+
messages : typing.Sequence[MessageCreate]
|
|
140
|
+
The messages to be sent to the agent.
|
|
141
|
+
|
|
142
|
+
assistant_message_tool_name : typing.Optional[str]
|
|
143
|
+
The name of the designated message tool.
|
|
144
|
+
|
|
145
|
+
assistant_message_tool_kwarg : typing.Optional[str]
|
|
146
|
+
The name of the message argument in the designated message tool.
|
|
147
|
+
|
|
148
|
+
request_options : typing.Optional[RequestOptions]
|
|
149
|
+
Request-specific configuration.
|
|
150
|
+
|
|
151
|
+
Returns
|
|
152
|
+
-------
|
|
153
|
+
LettaResponse
|
|
154
|
+
Successful Response
|
|
155
|
+
|
|
156
|
+
Examples
|
|
157
|
+
--------
|
|
158
|
+
from letta import Letta, MessageCreate
|
|
159
|
+
|
|
160
|
+
client = Letta(
|
|
161
|
+
token="YOUR_TOKEN",
|
|
162
|
+
)
|
|
163
|
+
client.agents.messages.create(
|
|
164
|
+
agent_id="agent_id",
|
|
165
|
+
messages=[
|
|
166
|
+
MessageCreate(
|
|
167
|
+
role="user",
|
|
168
|
+
text="text",
|
|
169
|
+
)
|
|
170
|
+
],
|
|
171
|
+
)
|
|
172
|
+
"""
|
|
173
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
174
|
+
f"v1/agents/{jsonable_encoder(agent_id)}/messages",
|
|
175
|
+
method="POST",
|
|
176
|
+
json={
|
|
177
|
+
"messages": convert_and_respect_annotation_metadata(
|
|
178
|
+
object_=messages, annotation=typing.Sequence[MessageCreate], direction="write"
|
|
179
|
+
),
|
|
180
|
+
"assistant_message_tool_name": assistant_message_tool_name,
|
|
181
|
+
"assistant_message_tool_kwarg": assistant_message_tool_kwarg,
|
|
182
|
+
},
|
|
183
|
+
request_options=request_options,
|
|
184
|
+
omit=OMIT,
|
|
185
|
+
)
|
|
186
|
+
try:
|
|
187
|
+
if 200 <= _response.status_code < 300:
|
|
188
|
+
return typing.cast(
|
|
189
|
+
LettaResponse,
|
|
190
|
+
parse_obj_as(
|
|
191
|
+
type_=LettaResponse, # type: ignore
|
|
192
|
+
object_=_response.json(),
|
|
193
|
+
),
|
|
194
|
+
)
|
|
195
|
+
if _response.status_code == 422:
|
|
196
|
+
raise UnprocessableEntityError(
|
|
197
|
+
typing.cast(
|
|
198
|
+
HttpValidationError,
|
|
199
|
+
parse_obj_as(
|
|
200
|
+
type_=HttpValidationError, # type: ignore
|
|
201
|
+
object_=_response.json(),
|
|
202
|
+
),
|
|
203
|
+
)
|
|
204
|
+
)
|
|
205
|
+
_response_json = _response.json()
|
|
206
|
+
except JSONDecodeError:
|
|
207
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
208
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
209
|
+
|
|
210
|
+
def update(
|
|
211
|
+
self,
|
|
212
|
+
agent_id: str,
|
|
213
|
+
message_id: str,
|
|
214
|
+
*,
|
|
215
|
+
role: typing.Optional[MessageRole] = OMIT,
|
|
216
|
+
text: typing.Optional[str] = OMIT,
|
|
217
|
+
name: typing.Optional[str] = OMIT,
|
|
218
|
+
tool_calls: typing.Optional[typing.Sequence[LettaSchemasOpenaiChatCompletionsToolCallInput]] = OMIT,
|
|
219
|
+
tool_call_id: typing.Optional[str] = OMIT,
|
|
220
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
221
|
+
) -> LettaSchemasMessageMessage:
|
|
222
|
+
"""
|
|
223
|
+
Update the details of a message associated with an agent.
|
|
224
|
+
|
|
225
|
+
Parameters
|
|
226
|
+
----------
|
|
227
|
+
agent_id : str
|
|
228
|
+
|
|
229
|
+
message_id : str
|
|
230
|
+
|
|
231
|
+
role : typing.Optional[MessageRole]
|
|
232
|
+
The role of the participant.
|
|
233
|
+
|
|
234
|
+
text : typing.Optional[str]
|
|
235
|
+
The text of the message.
|
|
236
|
+
|
|
237
|
+
name : typing.Optional[str]
|
|
238
|
+
The name of the participant.
|
|
239
|
+
|
|
240
|
+
tool_calls : typing.Optional[typing.Sequence[LettaSchemasOpenaiChatCompletionsToolCallInput]]
|
|
241
|
+
The list of tool calls requested.
|
|
242
|
+
|
|
243
|
+
tool_call_id : typing.Optional[str]
|
|
244
|
+
The id of the tool call.
|
|
245
|
+
|
|
246
|
+
request_options : typing.Optional[RequestOptions]
|
|
247
|
+
Request-specific configuration.
|
|
248
|
+
|
|
249
|
+
Returns
|
|
250
|
+
-------
|
|
251
|
+
LettaSchemasMessageMessage
|
|
252
|
+
Successful Response
|
|
253
|
+
|
|
254
|
+
Examples
|
|
255
|
+
--------
|
|
256
|
+
from letta import Letta
|
|
257
|
+
|
|
258
|
+
client = Letta(
|
|
259
|
+
token="YOUR_TOKEN",
|
|
260
|
+
)
|
|
261
|
+
client.agents.messages.update(
|
|
262
|
+
agent_id="agent_id",
|
|
263
|
+
message_id="message_id",
|
|
264
|
+
)
|
|
265
|
+
"""
|
|
266
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
267
|
+
f"v1/agents/{jsonable_encoder(agent_id)}/messages/{jsonable_encoder(message_id)}",
|
|
268
|
+
method="PATCH",
|
|
269
|
+
json={
|
|
270
|
+
"role": role,
|
|
271
|
+
"text": text,
|
|
272
|
+
"name": name,
|
|
273
|
+
"tool_calls": convert_and_respect_annotation_metadata(
|
|
274
|
+
object_=tool_calls,
|
|
275
|
+
annotation=typing.Sequence[LettaSchemasOpenaiChatCompletionsToolCallInput],
|
|
276
|
+
direction="write",
|
|
277
|
+
),
|
|
278
|
+
"tool_call_id": tool_call_id,
|
|
279
|
+
},
|
|
280
|
+
headers={
|
|
281
|
+
"content-type": "application/json",
|
|
282
|
+
},
|
|
283
|
+
request_options=request_options,
|
|
284
|
+
omit=OMIT,
|
|
285
|
+
)
|
|
286
|
+
try:
|
|
287
|
+
if 200 <= _response.status_code < 300:
|
|
288
|
+
return typing.cast(
|
|
289
|
+
LettaSchemasMessageMessage,
|
|
290
|
+
parse_obj_as(
|
|
291
|
+
type_=LettaSchemasMessageMessage, # type: ignore
|
|
292
|
+
object_=_response.json(),
|
|
293
|
+
),
|
|
294
|
+
)
|
|
295
|
+
if _response.status_code == 422:
|
|
296
|
+
raise UnprocessableEntityError(
|
|
297
|
+
typing.cast(
|
|
298
|
+
HttpValidationError,
|
|
299
|
+
parse_obj_as(
|
|
300
|
+
type_=HttpValidationError, # type: ignore
|
|
301
|
+
object_=_response.json(),
|
|
302
|
+
),
|
|
303
|
+
)
|
|
304
|
+
)
|
|
305
|
+
_response_json = _response.json()
|
|
306
|
+
except JSONDecodeError:
|
|
307
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
308
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
309
|
+
|
|
310
|
+
def stream(
|
|
311
|
+
self,
|
|
312
|
+
agent_id: str,
|
|
313
|
+
*,
|
|
314
|
+
messages: typing.Sequence[MessageCreate],
|
|
315
|
+
assistant_message_tool_name: typing.Optional[str] = OMIT,
|
|
316
|
+
assistant_message_tool_kwarg: typing.Optional[str] = OMIT,
|
|
317
|
+
stream_tokens: typing.Optional[bool] = OMIT,
|
|
318
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
319
|
+
) -> typing.Iterator[LettaStreamingResponse]:
|
|
320
|
+
"""
|
|
321
|
+
Process a user message and return the agent's response.
|
|
322
|
+
This endpoint accepts a message from a user and processes it through the agent.
|
|
323
|
+
It will stream the steps of the response always, and stream the tokens if 'stream_tokens' is set to True.
|
|
324
|
+
|
|
325
|
+
Parameters
|
|
326
|
+
----------
|
|
327
|
+
agent_id : str
|
|
328
|
+
|
|
329
|
+
messages : typing.Sequence[MessageCreate]
|
|
330
|
+
The messages to be sent to the agent.
|
|
331
|
+
|
|
332
|
+
assistant_message_tool_name : typing.Optional[str]
|
|
333
|
+
The name of the designated message tool.
|
|
334
|
+
|
|
335
|
+
assistant_message_tool_kwarg : typing.Optional[str]
|
|
336
|
+
The name of the message argument in the designated message tool.
|
|
337
|
+
|
|
338
|
+
stream_tokens : typing.Optional[bool]
|
|
339
|
+
Flag to determine if individual tokens should be streamed. Set to True for token streaming (requires stream_steps = True).
|
|
340
|
+
|
|
341
|
+
request_options : typing.Optional[RequestOptions]
|
|
342
|
+
Request-specific configuration.
|
|
343
|
+
|
|
344
|
+
Yields
|
|
345
|
+
------
|
|
346
|
+
typing.Iterator[LettaStreamingResponse]
|
|
347
|
+
Successful response
|
|
348
|
+
"""
|
|
349
|
+
with self._client_wrapper.httpx_client.stream(
|
|
350
|
+
f"v1/agents/{jsonable_encoder(agent_id)}/messages/stream",
|
|
351
|
+
method="POST",
|
|
352
|
+
json={
|
|
353
|
+
"messages": convert_and_respect_annotation_metadata(
|
|
354
|
+
object_=messages, annotation=typing.Sequence[MessageCreate], direction="write"
|
|
355
|
+
),
|
|
356
|
+
"assistant_message_tool_name": assistant_message_tool_name,
|
|
357
|
+
"assistant_message_tool_kwarg": assistant_message_tool_kwarg,
|
|
358
|
+
"stream_tokens": stream_tokens,
|
|
359
|
+
},
|
|
360
|
+
headers={
|
|
361
|
+
"content-type": "application/json",
|
|
362
|
+
},
|
|
363
|
+
request_options=request_options,
|
|
364
|
+
omit=OMIT,
|
|
365
|
+
) as _response:
|
|
366
|
+
try:
|
|
367
|
+
if 200 <= _response.status_code < 300:
|
|
368
|
+
_event_source = httpx_sse.EventSource(_response)
|
|
369
|
+
for _sse in _event_source.iter_sse():
|
|
370
|
+
try:
|
|
371
|
+
yield typing.cast(
|
|
372
|
+
LettaStreamingResponse,
|
|
373
|
+
parse_obj_as(
|
|
374
|
+
type_=LettaStreamingResponse, # type: ignore
|
|
375
|
+
object_=json.loads(_sse.data),
|
|
376
|
+
),
|
|
377
|
+
)
|
|
378
|
+
except:
|
|
379
|
+
pass
|
|
380
|
+
return
|
|
381
|
+
_response.read()
|
|
382
|
+
if _response.status_code == 422:
|
|
383
|
+
raise UnprocessableEntityError(
|
|
384
|
+
typing.cast(
|
|
385
|
+
HttpValidationError,
|
|
386
|
+
parse_obj_as(
|
|
387
|
+
type_=HttpValidationError, # type: ignore
|
|
388
|
+
object_=_response.json(),
|
|
389
|
+
),
|
|
390
|
+
)
|
|
391
|
+
)
|
|
392
|
+
_response_json = _response.json()
|
|
393
|
+
except JSONDecodeError:
|
|
394
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
395
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
396
|
+
|
|
397
|
+
|
|
398
|
+
class AsyncMessagesClient:
|
|
399
|
+
def __init__(self, *, client_wrapper: AsyncClientWrapper):
|
|
400
|
+
self._client_wrapper = client_wrapper
|
|
401
|
+
|
|
402
|
+
async def list(
|
|
403
|
+
self,
|
|
404
|
+
agent_id: str,
|
|
405
|
+
*,
|
|
406
|
+
before: typing.Optional[str] = None,
|
|
407
|
+
limit: typing.Optional[int] = None,
|
|
408
|
+
msg_object: typing.Optional[bool] = None,
|
|
409
|
+
assistant_message_tool_name: typing.Optional[str] = None,
|
|
410
|
+
assistant_message_tool_kwarg: typing.Optional[str] = None,
|
|
411
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
412
|
+
) -> MessagesListResponse:
|
|
413
|
+
"""
|
|
414
|
+
Retrieve message history for an agent.
|
|
415
|
+
|
|
416
|
+
Parameters
|
|
417
|
+
----------
|
|
418
|
+
agent_id : str
|
|
419
|
+
|
|
420
|
+
before : typing.Optional[str]
|
|
421
|
+
Message before which to retrieve the returned messages.
|
|
422
|
+
|
|
423
|
+
limit : typing.Optional[int]
|
|
424
|
+
Maximum number of messages to retrieve.
|
|
425
|
+
|
|
426
|
+
msg_object : typing.Optional[bool]
|
|
427
|
+
If true, returns Message objects. If false, return LettaMessage objects.
|
|
428
|
+
|
|
429
|
+
assistant_message_tool_name : typing.Optional[str]
|
|
430
|
+
The name of the designated message tool.
|
|
431
|
+
|
|
432
|
+
assistant_message_tool_kwarg : typing.Optional[str]
|
|
433
|
+
The name of the message argument in the designated message tool.
|
|
434
|
+
|
|
435
|
+
request_options : typing.Optional[RequestOptions]
|
|
436
|
+
Request-specific configuration.
|
|
437
|
+
|
|
438
|
+
Returns
|
|
439
|
+
-------
|
|
440
|
+
MessagesListResponse
|
|
441
|
+
Successful Response
|
|
442
|
+
|
|
443
|
+
Examples
|
|
444
|
+
--------
|
|
445
|
+
import asyncio
|
|
446
|
+
|
|
447
|
+
from letta import AsyncLetta
|
|
448
|
+
|
|
449
|
+
client = AsyncLetta(
|
|
450
|
+
token="YOUR_TOKEN",
|
|
451
|
+
)
|
|
452
|
+
|
|
453
|
+
|
|
454
|
+
async def main() -> None:
|
|
455
|
+
await client.agents.messages.list(
|
|
456
|
+
agent_id="agent_id",
|
|
457
|
+
)
|
|
458
|
+
|
|
459
|
+
|
|
460
|
+
asyncio.run(main())
|
|
461
|
+
"""
|
|
462
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
463
|
+
f"v1/agents/{jsonable_encoder(agent_id)}/messages",
|
|
464
|
+
method="GET",
|
|
465
|
+
params={
|
|
466
|
+
"before": before,
|
|
467
|
+
"limit": limit,
|
|
468
|
+
"msg_object": msg_object,
|
|
469
|
+
"assistant_message_tool_name": assistant_message_tool_name,
|
|
470
|
+
"assistant_message_tool_kwarg": assistant_message_tool_kwarg,
|
|
471
|
+
},
|
|
472
|
+
request_options=request_options,
|
|
473
|
+
)
|
|
474
|
+
try:
|
|
475
|
+
if 200 <= _response.status_code < 300:
|
|
476
|
+
return typing.cast(
|
|
477
|
+
MessagesListResponse,
|
|
478
|
+
parse_obj_as(
|
|
479
|
+
type_=MessagesListResponse, # type: ignore
|
|
480
|
+
object_=_response.json(),
|
|
481
|
+
),
|
|
482
|
+
)
|
|
483
|
+
if _response.status_code == 422:
|
|
484
|
+
raise UnprocessableEntityError(
|
|
485
|
+
typing.cast(
|
|
486
|
+
HttpValidationError,
|
|
487
|
+
parse_obj_as(
|
|
488
|
+
type_=HttpValidationError, # type: ignore
|
|
489
|
+
object_=_response.json(),
|
|
490
|
+
),
|
|
491
|
+
)
|
|
492
|
+
)
|
|
493
|
+
_response_json = _response.json()
|
|
494
|
+
except JSONDecodeError:
|
|
495
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
496
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
497
|
+
|
|
498
|
+
async def create(
|
|
499
|
+
self,
|
|
500
|
+
agent_id: str,
|
|
501
|
+
*,
|
|
502
|
+
messages: typing.Sequence[MessageCreate],
|
|
503
|
+
assistant_message_tool_name: typing.Optional[str] = OMIT,
|
|
504
|
+
assistant_message_tool_kwarg: typing.Optional[str] = OMIT,
|
|
505
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
506
|
+
) -> LettaResponse:
|
|
507
|
+
"""
|
|
508
|
+
Process a user message and return the agent's response.
|
|
509
|
+
This endpoint accepts a message from a user and processes it through the agent.
|
|
510
|
+
|
|
511
|
+
Parameters
|
|
512
|
+
----------
|
|
513
|
+
agent_id : str
|
|
514
|
+
|
|
515
|
+
messages : typing.Sequence[MessageCreate]
|
|
516
|
+
The messages to be sent to the agent.
|
|
517
|
+
|
|
518
|
+
assistant_message_tool_name : typing.Optional[str]
|
|
519
|
+
The name of the designated message tool.
|
|
520
|
+
|
|
521
|
+
assistant_message_tool_kwarg : typing.Optional[str]
|
|
522
|
+
The name of the message argument in the designated message tool.
|
|
523
|
+
|
|
524
|
+
request_options : typing.Optional[RequestOptions]
|
|
525
|
+
Request-specific configuration.
|
|
526
|
+
|
|
527
|
+
Returns
|
|
528
|
+
-------
|
|
529
|
+
LettaResponse
|
|
530
|
+
Successful Response
|
|
531
|
+
|
|
532
|
+
Examples
|
|
533
|
+
--------
|
|
534
|
+
import asyncio
|
|
535
|
+
|
|
536
|
+
from letta import AsyncLetta, MessageCreate
|
|
537
|
+
|
|
538
|
+
client = AsyncLetta(
|
|
539
|
+
token="YOUR_TOKEN",
|
|
540
|
+
)
|
|
541
|
+
|
|
542
|
+
|
|
543
|
+
async def main() -> None:
|
|
544
|
+
await client.agents.messages.create(
|
|
545
|
+
agent_id="agent_id",
|
|
546
|
+
messages=[
|
|
547
|
+
MessageCreate(
|
|
548
|
+
role="user",
|
|
549
|
+
text="text",
|
|
550
|
+
)
|
|
551
|
+
],
|
|
552
|
+
)
|
|
553
|
+
|
|
554
|
+
|
|
555
|
+
asyncio.run(main())
|
|
556
|
+
"""
|
|
557
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
558
|
+
f"v1/agents/{jsonable_encoder(agent_id)}/messages",
|
|
559
|
+
method="POST",
|
|
560
|
+
json={
|
|
561
|
+
"messages": convert_and_respect_annotation_metadata(
|
|
562
|
+
object_=messages, annotation=typing.Sequence[MessageCreate], direction="write"
|
|
563
|
+
),
|
|
564
|
+
"assistant_message_tool_name": assistant_message_tool_name,
|
|
565
|
+
"assistant_message_tool_kwarg": assistant_message_tool_kwarg,
|
|
566
|
+
},
|
|
567
|
+
request_options=request_options,
|
|
568
|
+
omit=OMIT,
|
|
569
|
+
)
|
|
570
|
+
try:
|
|
571
|
+
if 200 <= _response.status_code < 300:
|
|
572
|
+
return typing.cast(
|
|
573
|
+
LettaResponse,
|
|
574
|
+
parse_obj_as(
|
|
575
|
+
type_=LettaResponse, # type: ignore
|
|
576
|
+
object_=_response.json(),
|
|
577
|
+
),
|
|
578
|
+
)
|
|
579
|
+
if _response.status_code == 422:
|
|
580
|
+
raise UnprocessableEntityError(
|
|
581
|
+
typing.cast(
|
|
582
|
+
HttpValidationError,
|
|
583
|
+
parse_obj_as(
|
|
584
|
+
type_=HttpValidationError, # type: ignore
|
|
585
|
+
object_=_response.json(),
|
|
586
|
+
),
|
|
587
|
+
)
|
|
588
|
+
)
|
|
589
|
+
_response_json = _response.json()
|
|
590
|
+
except JSONDecodeError:
|
|
591
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
592
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
593
|
+
|
|
594
|
+
async def update(
|
|
595
|
+
self,
|
|
596
|
+
agent_id: str,
|
|
597
|
+
message_id: str,
|
|
598
|
+
*,
|
|
599
|
+
role: typing.Optional[MessageRole] = OMIT,
|
|
600
|
+
text: typing.Optional[str] = OMIT,
|
|
601
|
+
name: typing.Optional[str] = OMIT,
|
|
602
|
+
tool_calls: typing.Optional[typing.Sequence[LettaSchemasOpenaiChatCompletionsToolCallInput]] = OMIT,
|
|
603
|
+
tool_call_id: typing.Optional[str] = OMIT,
|
|
604
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
605
|
+
) -> LettaSchemasMessageMessage:
|
|
606
|
+
"""
|
|
607
|
+
Update the details of a message associated with an agent.
|
|
608
|
+
|
|
609
|
+
Parameters
|
|
610
|
+
----------
|
|
611
|
+
agent_id : str
|
|
612
|
+
|
|
613
|
+
message_id : str
|
|
614
|
+
|
|
615
|
+
role : typing.Optional[MessageRole]
|
|
616
|
+
The role of the participant.
|
|
617
|
+
|
|
618
|
+
text : typing.Optional[str]
|
|
619
|
+
The text of the message.
|
|
620
|
+
|
|
621
|
+
name : typing.Optional[str]
|
|
622
|
+
The name of the participant.
|
|
623
|
+
|
|
624
|
+
tool_calls : typing.Optional[typing.Sequence[LettaSchemasOpenaiChatCompletionsToolCallInput]]
|
|
625
|
+
The list of tool calls requested.
|
|
626
|
+
|
|
627
|
+
tool_call_id : typing.Optional[str]
|
|
628
|
+
The id of the tool call.
|
|
629
|
+
|
|
630
|
+
request_options : typing.Optional[RequestOptions]
|
|
631
|
+
Request-specific configuration.
|
|
632
|
+
|
|
633
|
+
Returns
|
|
634
|
+
-------
|
|
635
|
+
LettaSchemasMessageMessage
|
|
636
|
+
Successful Response
|
|
637
|
+
|
|
638
|
+
Examples
|
|
639
|
+
--------
|
|
640
|
+
import asyncio
|
|
641
|
+
|
|
642
|
+
from letta import AsyncLetta
|
|
643
|
+
|
|
644
|
+
client = AsyncLetta(
|
|
645
|
+
token="YOUR_TOKEN",
|
|
646
|
+
)
|
|
647
|
+
|
|
648
|
+
|
|
649
|
+
async def main() -> None:
|
|
650
|
+
await client.agents.messages.update(
|
|
651
|
+
agent_id="agent_id",
|
|
652
|
+
message_id="message_id",
|
|
653
|
+
)
|
|
654
|
+
|
|
655
|
+
|
|
656
|
+
asyncio.run(main())
|
|
657
|
+
"""
|
|
658
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
659
|
+
f"v1/agents/{jsonable_encoder(agent_id)}/messages/{jsonable_encoder(message_id)}",
|
|
660
|
+
method="PATCH",
|
|
661
|
+
json={
|
|
662
|
+
"role": role,
|
|
663
|
+
"text": text,
|
|
664
|
+
"name": name,
|
|
665
|
+
"tool_calls": convert_and_respect_annotation_metadata(
|
|
666
|
+
object_=tool_calls,
|
|
667
|
+
annotation=typing.Sequence[LettaSchemasOpenaiChatCompletionsToolCallInput],
|
|
668
|
+
direction="write",
|
|
669
|
+
),
|
|
670
|
+
"tool_call_id": tool_call_id,
|
|
671
|
+
},
|
|
672
|
+
headers={
|
|
673
|
+
"content-type": "application/json",
|
|
674
|
+
},
|
|
675
|
+
request_options=request_options,
|
|
676
|
+
omit=OMIT,
|
|
677
|
+
)
|
|
678
|
+
try:
|
|
679
|
+
if 200 <= _response.status_code < 300:
|
|
680
|
+
return typing.cast(
|
|
681
|
+
LettaSchemasMessageMessage,
|
|
682
|
+
parse_obj_as(
|
|
683
|
+
type_=LettaSchemasMessageMessage, # type: ignore
|
|
684
|
+
object_=_response.json(),
|
|
685
|
+
),
|
|
686
|
+
)
|
|
687
|
+
if _response.status_code == 422:
|
|
688
|
+
raise UnprocessableEntityError(
|
|
689
|
+
typing.cast(
|
|
690
|
+
HttpValidationError,
|
|
691
|
+
parse_obj_as(
|
|
692
|
+
type_=HttpValidationError, # type: ignore
|
|
693
|
+
object_=_response.json(),
|
|
694
|
+
),
|
|
695
|
+
)
|
|
696
|
+
)
|
|
697
|
+
_response_json = _response.json()
|
|
698
|
+
except JSONDecodeError:
|
|
699
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
700
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
701
|
+
|
|
702
|
+
async def stream(
|
|
703
|
+
self,
|
|
704
|
+
agent_id: str,
|
|
705
|
+
*,
|
|
706
|
+
messages: typing.Sequence[MessageCreate],
|
|
707
|
+
assistant_message_tool_name: typing.Optional[str] = OMIT,
|
|
708
|
+
assistant_message_tool_kwarg: typing.Optional[str] = OMIT,
|
|
709
|
+
stream_tokens: typing.Optional[bool] = OMIT,
|
|
710
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
711
|
+
) -> typing.AsyncIterator[LettaStreamingResponse]:
|
|
712
|
+
"""
|
|
713
|
+
Process a user message and return the agent's response.
|
|
714
|
+
This endpoint accepts a message from a user and processes it through the agent.
|
|
715
|
+
It will stream the steps of the response always, and stream the tokens if 'stream_tokens' is set to True.
|
|
716
|
+
|
|
717
|
+
Parameters
|
|
718
|
+
----------
|
|
719
|
+
agent_id : str
|
|
720
|
+
|
|
721
|
+
messages : typing.Sequence[MessageCreate]
|
|
722
|
+
The messages to be sent to the agent.
|
|
723
|
+
|
|
724
|
+
assistant_message_tool_name : typing.Optional[str]
|
|
725
|
+
The name of the designated message tool.
|
|
726
|
+
|
|
727
|
+
assistant_message_tool_kwarg : typing.Optional[str]
|
|
728
|
+
The name of the message argument in the designated message tool.
|
|
729
|
+
|
|
730
|
+
stream_tokens : typing.Optional[bool]
|
|
731
|
+
Flag to determine if individual tokens should be streamed. Set to True for token streaming (requires stream_steps = True).
|
|
732
|
+
|
|
733
|
+
request_options : typing.Optional[RequestOptions]
|
|
734
|
+
Request-specific configuration.
|
|
735
|
+
|
|
736
|
+
Yields
|
|
737
|
+
------
|
|
738
|
+
typing.AsyncIterator[LettaStreamingResponse]
|
|
739
|
+
Successful response
|
|
740
|
+
"""
|
|
741
|
+
async with self._client_wrapper.httpx_client.stream(
|
|
742
|
+
f"v1/agents/{jsonable_encoder(agent_id)}/messages/stream",
|
|
743
|
+
method="POST",
|
|
744
|
+
json={
|
|
745
|
+
"messages": convert_and_respect_annotation_metadata(
|
|
746
|
+
object_=messages, annotation=typing.Sequence[MessageCreate], direction="write"
|
|
747
|
+
),
|
|
748
|
+
"assistant_message_tool_name": assistant_message_tool_name,
|
|
749
|
+
"assistant_message_tool_kwarg": assistant_message_tool_kwarg,
|
|
750
|
+
"stream_tokens": stream_tokens,
|
|
751
|
+
},
|
|
752
|
+
headers={
|
|
753
|
+
"content-type": "application/json",
|
|
754
|
+
},
|
|
755
|
+
request_options=request_options,
|
|
756
|
+
omit=OMIT,
|
|
757
|
+
) as _response:
|
|
758
|
+
try:
|
|
759
|
+
if 200 <= _response.status_code < 300:
|
|
760
|
+
_event_source = httpx_sse.EventSource(_response)
|
|
761
|
+
async for _sse in _event_source.aiter_sse():
|
|
762
|
+
try:
|
|
763
|
+
yield typing.cast(
|
|
764
|
+
LettaStreamingResponse,
|
|
765
|
+
parse_obj_as(
|
|
766
|
+
type_=LettaStreamingResponse, # type: ignore
|
|
767
|
+
object_=json.loads(_sse.data),
|
|
768
|
+
),
|
|
769
|
+
)
|
|
770
|
+
except:
|
|
771
|
+
pass
|
|
772
|
+
return
|
|
773
|
+
await _response.aread()
|
|
774
|
+
if _response.status_code == 422:
|
|
775
|
+
raise UnprocessableEntityError(
|
|
776
|
+
typing.cast(
|
|
777
|
+
HttpValidationError,
|
|
778
|
+
parse_obj_as(
|
|
779
|
+
type_=HttpValidationError, # type: ignore
|
|
780
|
+
object_=_response.json(),
|
|
781
|
+
),
|
|
782
|
+
)
|
|
783
|
+
)
|
|
784
|
+
_response_json = _response.json()
|
|
785
|
+
except JSONDecodeError:
|
|
786
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
787
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|