letta-client 0.1.0__py3-none-any.whl → 0.1.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of letta-client might be problematic. Click here for more details.

Files changed (193) hide show
  1. letta/__init__.py +277 -0
  2. letta/agents/__init__.py +44 -0
  3. letta/agents/archival_memory/__init__.py +2 -0
  4. letta/agents/archival_memory/client.py +591 -0
  5. letta/agents/client.py +2604 -0
  6. letta/agents/context/__init__.py +2 -0
  7. letta/agents/context/client.py +145 -0
  8. letta/agents/memory/__init__.py +5 -0
  9. letta/agents/memory/client.py +149 -0
  10. letta/agents/memory/messages/__init__.py +2 -0
  11. letta/agents/memory/messages/client.py +147 -0
  12. letta/agents/memory_blocks/__init__.py +2 -0
  13. letta/agents/memory_blocks/client.py +364 -0
  14. letta/agents/messages/__init__.py +5 -0
  15. letta/agents/messages/client.py +787 -0
  16. letta/agents/messages/types/__init__.py +7 -0
  17. letta/agents/messages/types/letta_streaming_response.py +20 -0
  18. letta/agents/messages/types/messages_list_response.py +7 -0
  19. letta/agents/messages/types/messages_list_response_item.py +13 -0
  20. letta/agents/recall_memory/__init__.py +2 -0
  21. letta/agents/recall_memory/client.py +147 -0
  22. letta/agents/sources/__init__.py +2 -0
  23. letta/agents/sources/client.py +145 -0
  24. letta/agents/tools/__init__.py +2 -0
  25. letta/agents/tools/client.py +408 -0
  26. letta/agents/types/__init__.py +39 -0
  27. letta/agents/types/agents_get_agent_variables_response.py +19 -0
  28. letta/agents/types/agents_migrate_response.py +19 -0
  29. letta/agents/types/agents_search_deployed_agents_request_combinator.py +5 -0
  30. letta/agents/types/agents_search_deployed_agents_request_search_item.py +16 -0
  31. letta/agents/types/agents_search_deployed_agents_request_search_item_direction.py +27 -0
  32. letta/agents/types/agents_search_deployed_agents_request_search_item_direction_direction.py +5 -0
  33. letta/agents/types/agents_search_deployed_agents_request_search_item_direction_value.py +7 -0
  34. letta/agents/types/agents_search_deployed_agents_request_search_item_operator.py +24 -0
  35. letta/agents/types/agents_search_deployed_agents_request_search_item_operator_operator.py +7 -0
  36. letta/agents/types/agents_search_deployed_agents_request_search_item_zero.py +20 -0
  37. letta/agents/types/create_agent_request_tool_rules_item.py +9 -0
  38. letta/agents/types/update_agent_tool_rules_item.py +9 -0
  39. letta/blocks/__init__.py +2 -0
  40. letta/blocks/client.py +1054 -0
  41. letta/client.py +164 -0
  42. letta/core/__init__.py +47 -0
  43. letta/core/api_error.py +15 -0
  44. letta/core/client_wrapper.py +76 -0
  45. letta/core/datetime_utils.py +28 -0
  46. letta/core/file.py +67 -0
  47. letta/core/http_client.py +499 -0
  48. letta/core/jsonable_encoder.py +101 -0
  49. letta/core/pydantic_utilities.py +296 -0
  50. letta/core/query_encoder.py +58 -0
  51. letta/core/remove_none_from_dict.py +11 -0
  52. letta/core/request_options.py +35 -0
  53. letta/core/serialization.py +272 -0
  54. letta/environment.py +8 -0
  55. letta/errors/__init__.py +8 -0
  56. letta/errors/conflict_error.py +9 -0
  57. letta/errors/internal_server_error.py +9 -0
  58. letta/errors/not_found_error.py +9 -0
  59. letta/errors/unprocessable_entity_error.py +9 -0
  60. letta/health/__init__.py +2 -0
  61. letta/health/client.py +108 -0
  62. letta/jobs/__init__.py +2 -0
  63. letta/jobs/client.py +503 -0
  64. letta/models/__init__.py +2 -0
  65. letta/models/client.py +201 -0
  66. letta/sources/__init__.py +5 -0
  67. letta/sources/client.py +1154 -0
  68. letta/sources/files/__init__.py +2 -0
  69. letta/sources/files/client.py +436 -0
  70. letta/sources/passages/__init__.py +2 -0
  71. letta/sources/passages/client.py +145 -0
  72. letta/tools/__init__.py +2 -0
  73. letta/tools/client.py +1823 -0
  74. letta/types/__init__.py +231 -0
  75. letta/types/action_model.py +36 -0
  76. letta/types/action_parameters_model.py +26 -0
  77. letta/types/action_response_model.py +26 -0
  78. letta/types/agent_state.py +139 -0
  79. letta/types/agent_state_tool_rules_item.py +9 -0
  80. letta/types/agent_type.py +8 -0
  81. letta/types/app_auth_scheme.py +34 -0
  82. letta/types/app_auth_scheme_auth_mode.py +7 -0
  83. letta/types/app_model.py +44 -0
  84. letta/types/archival_memory_summary.py +22 -0
  85. letta/types/assistant_file.py +33 -0
  86. letta/types/assistant_message_input.py +23 -0
  87. letta/types/assistant_message_output.py +23 -0
  88. letta/types/auth_request.py +22 -0
  89. letta/types/auth_response.py +29 -0
  90. letta/types/auth_scheme_field.py +30 -0
  91. letta/types/block.py +91 -0
  92. letta/types/block_update.py +60 -0
  93. letta/types/chat_completion_request.py +49 -0
  94. letta/types/chat_completion_request_function_call.py +6 -0
  95. letta/types/chat_completion_request_messages_item.py +11 -0
  96. letta/types/chat_completion_request_stop.py +5 -0
  97. letta/types/chat_completion_request_tool_choice.py +8 -0
  98. letta/types/chat_completion_response.py +32 -0
  99. letta/types/child_tool_rule.py +33 -0
  100. letta/types/choice.py +25 -0
  101. letta/types/conditional_tool_rule.py +43 -0
  102. letta/types/conflict_error_body.py +21 -0
  103. letta/types/context_window_overview.py +105 -0
  104. letta/types/create_assistant_file_request.py +22 -0
  105. letta/types/create_assistant_request.py +57 -0
  106. letta/types/create_block.py +56 -0
  107. letta/types/delete_assistant_file_response.py +28 -0
  108. letta/types/delete_assistant_response.py +28 -0
  109. letta/types/e_2_b_sandbox_config.py +32 -0
  110. letta/types/embedding_config.py +77 -0
  111. letta/types/embedding_config_embedding_endpoint_type.py +26 -0
  112. letta/types/file_metadata.py +82 -0
  113. letta/types/function_call_input.py +19 -0
  114. letta/types/function_call_output.py +20 -0
  115. letta/types/function_schema.py +21 -0
  116. letta/types/health.py +24 -0
  117. letta/types/http_validation_error.py +20 -0
  118. letta/types/init_tool_rule.py +29 -0
  119. letta/types/internal_server_error_body.py +19 -0
  120. letta/types/job.py +79 -0
  121. letta/types/job_status.py +5 -0
  122. letta/types/letta_request.py +33 -0
  123. letta/types/letta_response.py +37 -0
  124. letta/types/letta_schemas_letta_message_tool_call.py +21 -0
  125. letta/types/letta_schemas_message_message.py +103 -0
  126. letta/types/letta_schemas_openai_chat_completion_request_tool.py +21 -0
  127. letta/types/letta_schemas_openai_chat_completion_request_tool_call.py +24 -0
  128. letta/types/letta_schemas_openai_chat_completion_request_tool_call_function.py +20 -0
  129. letta/types/letta_schemas_openai_chat_completion_response_message.py +24 -0
  130. letta/types/letta_schemas_openai_chat_completion_response_tool_call.py +22 -0
  131. letta/types/letta_schemas_openai_chat_completions_tool_call_function.py +27 -0
  132. letta/types/letta_schemas_openai_chat_completions_tool_call_input.py +29 -0
  133. letta/types/letta_schemas_openai_chat_completions_tool_call_output.py +29 -0
  134. letta/types/letta_schemas_tool_tool.py +88 -0
  135. letta/types/letta_usage_statistics.py +48 -0
  136. letta/types/llm_config.py +65 -0
  137. letta/types/llm_config_model_endpoint_type.py +26 -0
  138. letta/types/local_sandbox_config.py +32 -0
  139. letta/types/log_prob_token.py +21 -0
  140. letta/types/memory.py +32 -0
  141. letta/types/message_content_log_prob.py +23 -0
  142. letta/types/message_create.py +37 -0
  143. letta/types/message_create_role.py +5 -0
  144. letta/types/message_role.py +5 -0
  145. letta/types/not_found_error_body.py +19 -0
  146. letta/types/not_found_error_body_message.py +11 -0
  147. letta/types/open_ai_assistant.py +67 -0
  148. letta/types/organization.py +33 -0
  149. letta/types/organization_create.py +22 -0
  150. letta/types/passage.py +107 -0
  151. letta/types/reasoning_message.py +32 -0
  152. letta/types/recall_memory_summary.py +22 -0
  153. letta/types/response_format.py +19 -0
  154. letta/types/sandbox_config.py +59 -0
  155. letta/types/sandbox_config_create.py +23 -0
  156. letta/types/sandbox_config_create_config.py +7 -0
  157. letta/types/sandbox_config_update.py +27 -0
  158. letta/types/sandbox_config_update_config.py +7 -0
  159. letta/types/sandbox_environment_variable.py +68 -0
  160. letta/types/sandbox_environment_variable_create.py +32 -0
  161. letta/types/sandbox_environment_variable_update.py +36 -0
  162. letta/types/sandbox_type.py +5 -0
  163. letta/types/source.py +85 -0
  164. letta/types/system_message_input.py +21 -0
  165. letta/types/system_message_output.py +32 -0
  166. letta/types/terminal_tool_rule.py +29 -0
  167. letta/types/tool_call_delta.py +21 -0
  168. letta/types/tool_call_function_output.py +27 -0
  169. letta/types/tool_call_message.py +33 -0
  170. letta/types/tool_call_message_tool_call.py +7 -0
  171. letta/types/tool_create.py +57 -0
  172. letta/types/tool_function_choice.py +21 -0
  173. letta/types/tool_input.py +21 -0
  174. letta/types/tool_message.py +21 -0
  175. letta/types/tool_return_message.py +41 -0
  176. letta/types/tool_return_message_status.py +5 -0
  177. letta/types/tool_rule_type.py +10 -0
  178. letta/types/usage_statistics.py +21 -0
  179. letta/types/user.py +57 -0
  180. letta/types/user_create.py +27 -0
  181. letta/types/user_message_input.py +22 -0
  182. letta/types/user_message_input_content.py +5 -0
  183. letta/types/user_message_output.py +32 -0
  184. letta/types/user_update.py +32 -0
  185. letta/types/validation_error.py +22 -0
  186. letta/types/validation_error_loc_item.py +5 -0
  187. letta/version.py +3 -0
  188. letta_client-0.1.4.dist-info/METADATA +189 -0
  189. letta_client-0.1.4.dist-info/RECORD +191 -0
  190. {letta_client-0.1.0.dist-info → letta_client-0.1.4.dist-info}/WHEEL +1 -1
  191. letta_client-0.1.0.dist-info/METADATA +0 -15
  192. letta_client-0.1.0.dist-info/RECORD +0 -4
  193. /letta_client/__init__.py → /letta/py.typed +0 -0
@@ -0,0 +1,787 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import typing
4
+ from ...core.client_wrapper import SyncClientWrapper
5
+ from ...core.request_options import RequestOptions
6
+ from .types.messages_list_response import MessagesListResponse
7
+ from ...core.jsonable_encoder import jsonable_encoder
8
+ from ...core.pydantic_utilities import parse_obj_as
9
+ from ...errors.unprocessable_entity_error import UnprocessableEntityError
10
+ from ...types.http_validation_error import HttpValidationError
11
+ from json.decoder import JSONDecodeError
12
+ from ...core.api_error import ApiError
13
+ from ...types.message_create import MessageCreate
14
+ from ...types.letta_response import LettaResponse
15
+ from ...core.serialization import convert_and_respect_annotation_metadata
16
+ from ...types.message_role import MessageRole
17
+ from ...types.letta_schemas_openai_chat_completions_tool_call_input import (
18
+ LettaSchemasOpenaiChatCompletionsToolCallInput,
19
+ )
20
+ from ...types.letta_schemas_message_message import LettaSchemasMessageMessage
21
+ from .types.letta_streaming_response import LettaStreamingResponse
22
+ import httpx_sse
23
+ import json
24
+ from ...core.client_wrapper import AsyncClientWrapper
25
+
26
+ # this is used as the default value for optional parameters
27
+ OMIT = typing.cast(typing.Any, ...)
28
+
29
+
30
+ class MessagesClient:
31
+ def __init__(self, *, client_wrapper: SyncClientWrapper):
32
+ self._client_wrapper = client_wrapper
33
+
34
+ def list(
35
+ self,
36
+ agent_id: str,
37
+ *,
38
+ before: typing.Optional[str] = None,
39
+ limit: typing.Optional[int] = None,
40
+ msg_object: typing.Optional[bool] = None,
41
+ assistant_message_tool_name: typing.Optional[str] = None,
42
+ assistant_message_tool_kwarg: typing.Optional[str] = None,
43
+ request_options: typing.Optional[RequestOptions] = None,
44
+ ) -> MessagesListResponse:
45
+ """
46
+ Retrieve message history for an agent.
47
+
48
+ Parameters
49
+ ----------
50
+ agent_id : str
51
+
52
+ before : typing.Optional[str]
53
+ Message before which to retrieve the returned messages.
54
+
55
+ limit : typing.Optional[int]
56
+ Maximum number of messages to retrieve.
57
+
58
+ msg_object : typing.Optional[bool]
59
+ If true, returns Message objects. If false, return LettaMessage objects.
60
+
61
+ assistant_message_tool_name : typing.Optional[str]
62
+ The name of the designated message tool.
63
+
64
+ assistant_message_tool_kwarg : typing.Optional[str]
65
+ The name of the message argument in the designated message tool.
66
+
67
+ request_options : typing.Optional[RequestOptions]
68
+ Request-specific configuration.
69
+
70
+ Returns
71
+ -------
72
+ MessagesListResponse
73
+ Successful Response
74
+
75
+ Examples
76
+ --------
77
+ from letta import Letta
78
+
79
+ client = Letta(
80
+ token="YOUR_TOKEN",
81
+ )
82
+ client.agents.messages.list(
83
+ agent_id="agent_id",
84
+ )
85
+ """
86
+ _response = self._client_wrapper.httpx_client.request(
87
+ f"v1/agents/{jsonable_encoder(agent_id)}/messages",
88
+ method="GET",
89
+ params={
90
+ "before": before,
91
+ "limit": limit,
92
+ "msg_object": msg_object,
93
+ "assistant_message_tool_name": assistant_message_tool_name,
94
+ "assistant_message_tool_kwarg": assistant_message_tool_kwarg,
95
+ },
96
+ request_options=request_options,
97
+ )
98
+ try:
99
+ if 200 <= _response.status_code < 300:
100
+ return typing.cast(
101
+ MessagesListResponse,
102
+ parse_obj_as(
103
+ type_=MessagesListResponse, # type: ignore
104
+ object_=_response.json(),
105
+ ),
106
+ )
107
+ if _response.status_code == 422:
108
+ raise UnprocessableEntityError(
109
+ typing.cast(
110
+ HttpValidationError,
111
+ parse_obj_as(
112
+ type_=HttpValidationError, # type: ignore
113
+ object_=_response.json(),
114
+ ),
115
+ )
116
+ )
117
+ _response_json = _response.json()
118
+ except JSONDecodeError:
119
+ raise ApiError(status_code=_response.status_code, body=_response.text)
120
+ raise ApiError(status_code=_response.status_code, body=_response_json)
121
+
122
+ def create(
123
+ self,
124
+ agent_id: str,
125
+ *,
126
+ messages: typing.Sequence[MessageCreate],
127
+ assistant_message_tool_name: typing.Optional[str] = OMIT,
128
+ assistant_message_tool_kwarg: typing.Optional[str] = OMIT,
129
+ request_options: typing.Optional[RequestOptions] = None,
130
+ ) -> LettaResponse:
131
+ """
132
+ Process a user message and return the agent's response.
133
+ This endpoint accepts a message from a user and processes it through the agent.
134
+
135
+ Parameters
136
+ ----------
137
+ agent_id : str
138
+
139
+ messages : typing.Sequence[MessageCreate]
140
+ The messages to be sent to the agent.
141
+
142
+ assistant_message_tool_name : typing.Optional[str]
143
+ The name of the designated message tool.
144
+
145
+ assistant_message_tool_kwarg : typing.Optional[str]
146
+ The name of the message argument in the designated message tool.
147
+
148
+ request_options : typing.Optional[RequestOptions]
149
+ Request-specific configuration.
150
+
151
+ Returns
152
+ -------
153
+ LettaResponse
154
+ Successful Response
155
+
156
+ Examples
157
+ --------
158
+ from letta import Letta, MessageCreate
159
+
160
+ client = Letta(
161
+ token="YOUR_TOKEN",
162
+ )
163
+ client.agents.messages.create(
164
+ agent_id="agent_id",
165
+ messages=[
166
+ MessageCreate(
167
+ role="user",
168
+ text="text",
169
+ )
170
+ ],
171
+ )
172
+ """
173
+ _response = self._client_wrapper.httpx_client.request(
174
+ f"v1/agents/{jsonable_encoder(agent_id)}/messages",
175
+ method="POST",
176
+ json={
177
+ "messages": convert_and_respect_annotation_metadata(
178
+ object_=messages, annotation=typing.Sequence[MessageCreate], direction="write"
179
+ ),
180
+ "assistant_message_tool_name": assistant_message_tool_name,
181
+ "assistant_message_tool_kwarg": assistant_message_tool_kwarg,
182
+ },
183
+ request_options=request_options,
184
+ omit=OMIT,
185
+ )
186
+ try:
187
+ if 200 <= _response.status_code < 300:
188
+ return typing.cast(
189
+ LettaResponse,
190
+ parse_obj_as(
191
+ type_=LettaResponse, # type: ignore
192
+ object_=_response.json(),
193
+ ),
194
+ )
195
+ if _response.status_code == 422:
196
+ raise UnprocessableEntityError(
197
+ typing.cast(
198
+ HttpValidationError,
199
+ parse_obj_as(
200
+ type_=HttpValidationError, # type: ignore
201
+ object_=_response.json(),
202
+ ),
203
+ )
204
+ )
205
+ _response_json = _response.json()
206
+ except JSONDecodeError:
207
+ raise ApiError(status_code=_response.status_code, body=_response.text)
208
+ raise ApiError(status_code=_response.status_code, body=_response_json)
209
+
210
+ def update(
211
+ self,
212
+ agent_id: str,
213
+ message_id: str,
214
+ *,
215
+ role: typing.Optional[MessageRole] = OMIT,
216
+ text: typing.Optional[str] = OMIT,
217
+ name: typing.Optional[str] = OMIT,
218
+ tool_calls: typing.Optional[typing.Sequence[LettaSchemasOpenaiChatCompletionsToolCallInput]] = OMIT,
219
+ tool_call_id: typing.Optional[str] = OMIT,
220
+ request_options: typing.Optional[RequestOptions] = None,
221
+ ) -> LettaSchemasMessageMessage:
222
+ """
223
+ Update the details of a message associated with an agent.
224
+
225
+ Parameters
226
+ ----------
227
+ agent_id : str
228
+
229
+ message_id : str
230
+
231
+ role : typing.Optional[MessageRole]
232
+ The role of the participant.
233
+
234
+ text : typing.Optional[str]
235
+ The text of the message.
236
+
237
+ name : typing.Optional[str]
238
+ The name of the participant.
239
+
240
+ tool_calls : typing.Optional[typing.Sequence[LettaSchemasOpenaiChatCompletionsToolCallInput]]
241
+ The list of tool calls requested.
242
+
243
+ tool_call_id : typing.Optional[str]
244
+ The id of the tool call.
245
+
246
+ request_options : typing.Optional[RequestOptions]
247
+ Request-specific configuration.
248
+
249
+ Returns
250
+ -------
251
+ LettaSchemasMessageMessage
252
+ Successful Response
253
+
254
+ Examples
255
+ --------
256
+ from letta import Letta
257
+
258
+ client = Letta(
259
+ token="YOUR_TOKEN",
260
+ )
261
+ client.agents.messages.update(
262
+ agent_id="agent_id",
263
+ message_id="message_id",
264
+ )
265
+ """
266
+ _response = self._client_wrapper.httpx_client.request(
267
+ f"v1/agents/{jsonable_encoder(agent_id)}/messages/{jsonable_encoder(message_id)}",
268
+ method="PATCH",
269
+ json={
270
+ "role": role,
271
+ "text": text,
272
+ "name": name,
273
+ "tool_calls": convert_and_respect_annotation_metadata(
274
+ object_=tool_calls,
275
+ annotation=typing.Sequence[LettaSchemasOpenaiChatCompletionsToolCallInput],
276
+ direction="write",
277
+ ),
278
+ "tool_call_id": tool_call_id,
279
+ },
280
+ headers={
281
+ "content-type": "application/json",
282
+ },
283
+ request_options=request_options,
284
+ omit=OMIT,
285
+ )
286
+ try:
287
+ if 200 <= _response.status_code < 300:
288
+ return typing.cast(
289
+ LettaSchemasMessageMessage,
290
+ parse_obj_as(
291
+ type_=LettaSchemasMessageMessage, # type: ignore
292
+ object_=_response.json(),
293
+ ),
294
+ )
295
+ if _response.status_code == 422:
296
+ raise UnprocessableEntityError(
297
+ typing.cast(
298
+ HttpValidationError,
299
+ parse_obj_as(
300
+ type_=HttpValidationError, # type: ignore
301
+ object_=_response.json(),
302
+ ),
303
+ )
304
+ )
305
+ _response_json = _response.json()
306
+ except JSONDecodeError:
307
+ raise ApiError(status_code=_response.status_code, body=_response.text)
308
+ raise ApiError(status_code=_response.status_code, body=_response_json)
309
+
310
+ def stream(
311
+ self,
312
+ agent_id: str,
313
+ *,
314
+ messages: typing.Sequence[MessageCreate],
315
+ assistant_message_tool_name: typing.Optional[str] = OMIT,
316
+ assistant_message_tool_kwarg: typing.Optional[str] = OMIT,
317
+ stream_tokens: typing.Optional[bool] = OMIT,
318
+ request_options: typing.Optional[RequestOptions] = None,
319
+ ) -> typing.Iterator[LettaStreamingResponse]:
320
+ """
321
+ Process a user message and return the agent's response.
322
+ This endpoint accepts a message from a user and processes it through the agent.
323
+ It will stream the steps of the response always, and stream the tokens if 'stream_tokens' is set to True.
324
+
325
+ Parameters
326
+ ----------
327
+ agent_id : str
328
+
329
+ messages : typing.Sequence[MessageCreate]
330
+ The messages to be sent to the agent.
331
+
332
+ assistant_message_tool_name : typing.Optional[str]
333
+ The name of the designated message tool.
334
+
335
+ assistant_message_tool_kwarg : typing.Optional[str]
336
+ The name of the message argument in the designated message tool.
337
+
338
+ stream_tokens : typing.Optional[bool]
339
+ Flag to determine if individual tokens should be streamed. Set to True for token streaming (requires stream_steps = True).
340
+
341
+ request_options : typing.Optional[RequestOptions]
342
+ Request-specific configuration.
343
+
344
+ Yields
345
+ ------
346
+ typing.Iterator[LettaStreamingResponse]
347
+ Successful response
348
+ """
349
+ with self._client_wrapper.httpx_client.stream(
350
+ f"v1/agents/{jsonable_encoder(agent_id)}/messages/stream",
351
+ method="POST",
352
+ json={
353
+ "messages": convert_and_respect_annotation_metadata(
354
+ object_=messages, annotation=typing.Sequence[MessageCreate], direction="write"
355
+ ),
356
+ "assistant_message_tool_name": assistant_message_tool_name,
357
+ "assistant_message_tool_kwarg": assistant_message_tool_kwarg,
358
+ "stream_tokens": stream_tokens,
359
+ },
360
+ headers={
361
+ "content-type": "application/json",
362
+ },
363
+ request_options=request_options,
364
+ omit=OMIT,
365
+ ) as _response:
366
+ try:
367
+ if 200 <= _response.status_code < 300:
368
+ _event_source = httpx_sse.EventSource(_response)
369
+ for _sse in _event_source.iter_sse():
370
+ try:
371
+ yield typing.cast(
372
+ LettaStreamingResponse,
373
+ parse_obj_as(
374
+ type_=LettaStreamingResponse, # type: ignore
375
+ object_=json.loads(_sse.data),
376
+ ),
377
+ )
378
+ except:
379
+ pass
380
+ return
381
+ _response.read()
382
+ if _response.status_code == 422:
383
+ raise UnprocessableEntityError(
384
+ typing.cast(
385
+ HttpValidationError,
386
+ parse_obj_as(
387
+ type_=HttpValidationError, # type: ignore
388
+ object_=_response.json(),
389
+ ),
390
+ )
391
+ )
392
+ _response_json = _response.json()
393
+ except JSONDecodeError:
394
+ raise ApiError(status_code=_response.status_code, body=_response.text)
395
+ raise ApiError(status_code=_response.status_code, body=_response_json)
396
+
397
+
398
+ class AsyncMessagesClient:
399
+ def __init__(self, *, client_wrapper: AsyncClientWrapper):
400
+ self._client_wrapper = client_wrapper
401
+
402
+ async def list(
403
+ self,
404
+ agent_id: str,
405
+ *,
406
+ before: typing.Optional[str] = None,
407
+ limit: typing.Optional[int] = None,
408
+ msg_object: typing.Optional[bool] = None,
409
+ assistant_message_tool_name: typing.Optional[str] = None,
410
+ assistant_message_tool_kwarg: typing.Optional[str] = None,
411
+ request_options: typing.Optional[RequestOptions] = None,
412
+ ) -> MessagesListResponse:
413
+ """
414
+ Retrieve message history for an agent.
415
+
416
+ Parameters
417
+ ----------
418
+ agent_id : str
419
+
420
+ before : typing.Optional[str]
421
+ Message before which to retrieve the returned messages.
422
+
423
+ limit : typing.Optional[int]
424
+ Maximum number of messages to retrieve.
425
+
426
+ msg_object : typing.Optional[bool]
427
+ If true, returns Message objects. If false, return LettaMessage objects.
428
+
429
+ assistant_message_tool_name : typing.Optional[str]
430
+ The name of the designated message tool.
431
+
432
+ assistant_message_tool_kwarg : typing.Optional[str]
433
+ The name of the message argument in the designated message tool.
434
+
435
+ request_options : typing.Optional[RequestOptions]
436
+ Request-specific configuration.
437
+
438
+ Returns
439
+ -------
440
+ MessagesListResponse
441
+ Successful Response
442
+
443
+ Examples
444
+ --------
445
+ import asyncio
446
+
447
+ from letta import AsyncLetta
448
+
449
+ client = AsyncLetta(
450
+ token="YOUR_TOKEN",
451
+ )
452
+
453
+
454
+ async def main() -> None:
455
+ await client.agents.messages.list(
456
+ agent_id="agent_id",
457
+ )
458
+
459
+
460
+ asyncio.run(main())
461
+ """
462
+ _response = await self._client_wrapper.httpx_client.request(
463
+ f"v1/agents/{jsonable_encoder(agent_id)}/messages",
464
+ method="GET",
465
+ params={
466
+ "before": before,
467
+ "limit": limit,
468
+ "msg_object": msg_object,
469
+ "assistant_message_tool_name": assistant_message_tool_name,
470
+ "assistant_message_tool_kwarg": assistant_message_tool_kwarg,
471
+ },
472
+ request_options=request_options,
473
+ )
474
+ try:
475
+ if 200 <= _response.status_code < 300:
476
+ return typing.cast(
477
+ MessagesListResponse,
478
+ parse_obj_as(
479
+ type_=MessagesListResponse, # type: ignore
480
+ object_=_response.json(),
481
+ ),
482
+ )
483
+ if _response.status_code == 422:
484
+ raise UnprocessableEntityError(
485
+ typing.cast(
486
+ HttpValidationError,
487
+ parse_obj_as(
488
+ type_=HttpValidationError, # type: ignore
489
+ object_=_response.json(),
490
+ ),
491
+ )
492
+ )
493
+ _response_json = _response.json()
494
+ except JSONDecodeError:
495
+ raise ApiError(status_code=_response.status_code, body=_response.text)
496
+ raise ApiError(status_code=_response.status_code, body=_response_json)
497
+
498
+ async def create(
499
+ self,
500
+ agent_id: str,
501
+ *,
502
+ messages: typing.Sequence[MessageCreate],
503
+ assistant_message_tool_name: typing.Optional[str] = OMIT,
504
+ assistant_message_tool_kwarg: typing.Optional[str] = OMIT,
505
+ request_options: typing.Optional[RequestOptions] = None,
506
+ ) -> LettaResponse:
507
+ """
508
+ Process a user message and return the agent's response.
509
+ This endpoint accepts a message from a user and processes it through the agent.
510
+
511
+ Parameters
512
+ ----------
513
+ agent_id : str
514
+
515
+ messages : typing.Sequence[MessageCreate]
516
+ The messages to be sent to the agent.
517
+
518
+ assistant_message_tool_name : typing.Optional[str]
519
+ The name of the designated message tool.
520
+
521
+ assistant_message_tool_kwarg : typing.Optional[str]
522
+ The name of the message argument in the designated message tool.
523
+
524
+ request_options : typing.Optional[RequestOptions]
525
+ Request-specific configuration.
526
+
527
+ Returns
528
+ -------
529
+ LettaResponse
530
+ Successful Response
531
+
532
+ Examples
533
+ --------
534
+ import asyncio
535
+
536
+ from letta import AsyncLetta, MessageCreate
537
+
538
+ client = AsyncLetta(
539
+ token="YOUR_TOKEN",
540
+ )
541
+
542
+
543
+ async def main() -> None:
544
+ await client.agents.messages.create(
545
+ agent_id="agent_id",
546
+ messages=[
547
+ MessageCreate(
548
+ role="user",
549
+ text="text",
550
+ )
551
+ ],
552
+ )
553
+
554
+
555
+ asyncio.run(main())
556
+ """
557
+ _response = await self._client_wrapper.httpx_client.request(
558
+ f"v1/agents/{jsonable_encoder(agent_id)}/messages",
559
+ method="POST",
560
+ json={
561
+ "messages": convert_and_respect_annotation_metadata(
562
+ object_=messages, annotation=typing.Sequence[MessageCreate], direction="write"
563
+ ),
564
+ "assistant_message_tool_name": assistant_message_tool_name,
565
+ "assistant_message_tool_kwarg": assistant_message_tool_kwarg,
566
+ },
567
+ request_options=request_options,
568
+ omit=OMIT,
569
+ )
570
+ try:
571
+ if 200 <= _response.status_code < 300:
572
+ return typing.cast(
573
+ LettaResponse,
574
+ parse_obj_as(
575
+ type_=LettaResponse, # type: ignore
576
+ object_=_response.json(),
577
+ ),
578
+ )
579
+ if _response.status_code == 422:
580
+ raise UnprocessableEntityError(
581
+ typing.cast(
582
+ HttpValidationError,
583
+ parse_obj_as(
584
+ type_=HttpValidationError, # type: ignore
585
+ object_=_response.json(),
586
+ ),
587
+ )
588
+ )
589
+ _response_json = _response.json()
590
+ except JSONDecodeError:
591
+ raise ApiError(status_code=_response.status_code, body=_response.text)
592
+ raise ApiError(status_code=_response.status_code, body=_response_json)
593
+
594
+ async def update(
595
+ self,
596
+ agent_id: str,
597
+ message_id: str,
598
+ *,
599
+ role: typing.Optional[MessageRole] = OMIT,
600
+ text: typing.Optional[str] = OMIT,
601
+ name: typing.Optional[str] = OMIT,
602
+ tool_calls: typing.Optional[typing.Sequence[LettaSchemasOpenaiChatCompletionsToolCallInput]] = OMIT,
603
+ tool_call_id: typing.Optional[str] = OMIT,
604
+ request_options: typing.Optional[RequestOptions] = None,
605
+ ) -> LettaSchemasMessageMessage:
606
+ """
607
+ Update the details of a message associated with an agent.
608
+
609
+ Parameters
610
+ ----------
611
+ agent_id : str
612
+
613
+ message_id : str
614
+
615
+ role : typing.Optional[MessageRole]
616
+ The role of the participant.
617
+
618
+ text : typing.Optional[str]
619
+ The text of the message.
620
+
621
+ name : typing.Optional[str]
622
+ The name of the participant.
623
+
624
+ tool_calls : typing.Optional[typing.Sequence[LettaSchemasOpenaiChatCompletionsToolCallInput]]
625
+ The list of tool calls requested.
626
+
627
+ tool_call_id : typing.Optional[str]
628
+ The id of the tool call.
629
+
630
+ request_options : typing.Optional[RequestOptions]
631
+ Request-specific configuration.
632
+
633
+ Returns
634
+ -------
635
+ LettaSchemasMessageMessage
636
+ Successful Response
637
+
638
+ Examples
639
+ --------
640
+ import asyncio
641
+
642
+ from letta import AsyncLetta
643
+
644
+ client = AsyncLetta(
645
+ token="YOUR_TOKEN",
646
+ )
647
+
648
+
649
+ async def main() -> None:
650
+ await client.agents.messages.update(
651
+ agent_id="agent_id",
652
+ message_id="message_id",
653
+ )
654
+
655
+
656
+ asyncio.run(main())
657
+ """
658
+ _response = await self._client_wrapper.httpx_client.request(
659
+ f"v1/agents/{jsonable_encoder(agent_id)}/messages/{jsonable_encoder(message_id)}",
660
+ method="PATCH",
661
+ json={
662
+ "role": role,
663
+ "text": text,
664
+ "name": name,
665
+ "tool_calls": convert_and_respect_annotation_metadata(
666
+ object_=tool_calls,
667
+ annotation=typing.Sequence[LettaSchemasOpenaiChatCompletionsToolCallInput],
668
+ direction="write",
669
+ ),
670
+ "tool_call_id": tool_call_id,
671
+ },
672
+ headers={
673
+ "content-type": "application/json",
674
+ },
675
+ request_options=request_options,
676
+ omit=OMIT,
677
+ )
678
+ try:
679
+ if 200 <= _response.status_code < 300:
680
+ return typing.cast(
681
+ LettaSchemasMessageMessage,
682
+ parse_obj_as(
683
+ type_=LettaSchemasMessageMessage, # type: ignore
684
+ object_=_response.json(),
685
+ ),
686
+ )
687
+ if _response.status_code == 422:
688
+ raise UnprocessableEntityError(
689
+ typing.cast(
690
+ HttpValidationError,
691
+ parse_obj_as(
692
+ type_=HttpValidationError, # type: ignore
693
+ object_=_response.json(),
694
+ ),
695
+ )
696
+ )
697
+ _response_json = _response.json()
698
+ except JSONDecodeError:
699
+ raise ApiError(status_code=_response.status_code, body=_response.text)
700
+ raise ApiError(status_code=_response.status_code, body=_response_json)
701
+
702
+ async def stream(
703
+ self,
704
+ agent_id: str,
705
+ *,
706
+ messages: typing.Sequence[MessageCreate],
707
+ assistant_message_tool_name: typing.Optional[str] = OMIT,
708
+ assistant_message_tool_kwarg: typing.Optional[str] = OMIT,
709
+ stream_tokens: typing.Optional[bool] = OMIT,
710
+ request_options: typing.Optional[RequestOptions] = None,
711
+ ) -> typing.AsyncIterator[LettaStreamingResponse]:
712
+ """
713
+ Process a user message and return the agent's response.
714
+ This endpoint accepts a message from a user and processes it through the agent.
715
+ It will stream the steps of the response always, and stream the tokens if 'stream_tokens' is set to True.
716
+
717
+ Parameters
718
+ ----------
719
+ agent_id : str
720
+
721
+ messages : typing.Sequence[MessageCreate]
722
+ The messages to be sent to the agent.
723
+
724
+ assistant_message_tool_name : typing.Optional[str]
725
+ The name of the designated message tool.
726
+
727
+ assistant_message_tool_kwarg : typing.Optional[str]
728
+ The name of the message argument in the designated message tool.
729
+
730
+ stream_tokens : typing.Optional[bool]
731
+ Flag to determine if individual tokens should be streamed. Set to True for token streaming (requires stream_steps = True).
732
+
733
+ request_options : typing.Optional[RequestOptions]
734
+ Request-specific configuration.
735
+
736
+ Yields
737
+ ------
738
+ typing.AsyncIterator[LettaStreamingResponse]
739
+ Successful response
740
+ """
741
+ async with self._client_wrapper.httpx_client.stream(
742
+ f"v1/agents/{jsonable_encoder(agent_id)}/messages/stream",
743
+ method="POST",
744
+ json={
745
+ "messages": convert_and_respect_annotation_metadata(
746
+ object_=messages, annotation=typing.Sequence[MessageCreate], direction="write"
747
+ ),
748
+ "assistant_message_tool_name": assistant_message_tool_name,
749
+ "assistant_message_tool_kwarg": assistant_message_tool_kwarg,
750
+ "stream_tokens": stream_tokens,
751
+ },
752
+ headers={
753
+ "content-type": "application/json",
754
+ },
755
+ request_options=request_options,
756
+ omit=OMIT,
757
+ ) as _response:
758
+ try:
759
+ if 200 <= _response.status_code < 300:
760
+ _event_source = httpx_sse.EventSource(_response)
761
+ async for _sse in _event_source.aiter_sse():
762
+ try:
763
+ yield typing.cast(
764
+ LettaStreamingResponse,
765
+ parse_obj_as(
766
+ type_=LettaStreamingResponse, # type: ignore
767
+ object_=json.loads(_sse.data),
768
+ ),
769
+ )
770
+ except:
771
+ pass
772
+ return
773
+ await _response.aread()
774
+ if _response.status_code == 422:
775
+ raise UnprocessableEntityError(
776
+ typing.cast(
777
+ HttpValidationError,
778
+ parse_obj_as(
779
+ type_=HttpValidationError, # type: ignore
780
+ object_=_response.json(),
781
+ ),
782
+ )
783
+ )
784
+ _response_json = _response.json()
785
+ except JSONDecodeError:
786
+ raise ApiError(status_code=_response.status_code, body=_response.text)
787
+ raise ApiError(status_code=_response.status_code, body=_response_json)