letta-client 0.1.16__py3-none-any.whl → 0.1.19__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of letta-client might be problematic. Click here for more details.
- letta_client/__init__.py +34 -120
- letta_client/agents/__init__.py +18 -54
- letta_client/agents/archival_memory/client.py +25 -343
- letta_client/agents/client.py +1640 -347
- letta_client/agents/context/client.py +6 -4
- letta_client/agents/core_memory/client.py +95 -624
- letta_client/agents/memory_variables/__init__.py +2 -2
- letta_client/agents/memory_variables/client.py +15 -15
- letta_client/agents/memory_variables/types/__init__.py +2 -2
- letta_client/agents/memory_variables/types/{memory_variables_get_response.py → memory_variables_list_response.py} +1 -1
- letta_client/agents/messages/__init__.py +2 -22
- letta_client/agents/messages/client.py +32 -38
- letta_client/agents/messages/types/__init__.py +2 -21
- letta_client/agents/messages/types/letta_streaming_response.py +16 -139
- letta_client/agents/messages/types/messages_list_response.py +2 -2
- letta_client/agents/sources/client.py +266 -5
- letta_client/agents/tools/client.py +25 -27
- letta_client/agents/types/__init__.py +15 -25
- letta_client/agents/types/agents_search_request_search_item.py +10 -78
- letta_client/agents/types/{agents_search_request_search_item_order_by.py → agents_search_request_search_item_direction.py} +7 -6
- letta_client/agents/types/agents_search_request_search_item_direction_direction.py +5 -0
- letta_client/agents/types/agents_search_request_search_item_direction_value.py +5 -0
- letta_client/agents/types/{agents_search_request_search_item_name.py → agents_search_request_search_item_one.py} +5 -4
- letta_client/agents/types/agents_search_request_search_item_one_operator.py +5 -0
- letta_client/agents/types/{agents_search_request_search_item_tags.py → agents_search_request_search_item_two.py} +2 -1
- letta_client/agents/types/{agents_search_request_search_item_version.py → agents_search_request_search_item_zero.py} +3 -2
- letta_client/blocks/client.py +12 -260
- letta_client/client.py +3 -3
- letta_client/core/client_wrapper.py +1 -1
- letta_client/jobs/client.py +4 -4
- letta_client/providers/client.py +74 -74
- letta_client/runs/client.py +14 -12
- letta_client/sources/client.py +12 -288
- letta_client/tools/client.py +63 -189
- letta_client/types/__init__.py +21 -103
- letta_client/types/agent_state.py +3 -7
- letta_client/types/{assistant_message_output.py → assistant_message.py} +3 -2
- letta_client/types/block.py +2 -6
- letta_client/types/block_update.py +1 -5
- letta_client/types/{archival_memory_summary.py → chat_completion_message_tool_call.py} +7 -7
- letta_client/types/context_window_overview.py +4 -6
- letta_client/types/create_block.py +1 -5
- letta_client/types/embedding_config_embedding_endpoint_type.py +1 -0
- letta_client/types/{function_call_output.py → function.py} +1 -1
- letta_client/types/{function_schema.py → function_definition.py} +2 -1
- letta_client/types/{create_assistant_file_request.py → function_tool.py} +6 -7
- letta_client/types/job.py +1 -5
- letta_client/types/letta_message_union.py +9 -121
- letta_client/types/letta_usage_statistics.py +1 -0
- letta_client/types/llm_config_model_endpoint_type.py +1 -0
- letta_client/types/{letta_schemas_message_message.py → message.py} +9 -6
- letta_client/types/passage.py +1 -5
- letta_client/types/reasoning_message.py +2 -1
- letta_client/types/run.py +1 -5
- letta_client/types/source.py +2 -6
- letta_client/types/{system_message_output.py → system_message.py} +3 -2
- letta_client/types/{letta_schemas_tool_tool.py → tool.py} +1 -1
- letta_client/types/{letta_schemas_letta_message_tool_call.py → tool_call.py} +1 -1
- letta_client/types/tool_call_message.py +2 -1
- letta_client/types/tool_call_message_tool_call.py +2 -2
- letta_client/types/tool_return_message.py +2 -1
- letta_client/types/tool_type.py +2 -1
- letta_client/types/{user_message_output.py → user_message.py} +3 -2
- {letta_client-0.1.16.dist-info → letta_client-0.1.19.dist-info}/METADATA +2 -2
- {letta_client-0.1.16.dist-info → letta_client-0.1.19.dist-info}/RECORD +66 -101
- letta_client/agents/recall_memory/__init__.py +0 -2
- letta_client/agents/recall_memory/client.py +0 -147
- letta_client/agents/types/agents_search_request_search_item_name_operator.py +0 -5
- letta_client/agents/types/agents_search_request_search_item_order_by_direction.py +0 -5
- letta_client/agents/types/agents_search_request_search_item_order_by_value.py +0 -5
- letta_client/types/assistant_file.py +0 -33
- letta_client/types/assistant_message_input.py +0 -23
- letta_client/types/chat_completion_request.py +0 -49
- letta_client/types/chat_completion_request_function_call.py +0 -6
- letta_client/types/chat_completion_request_messages_item.py +0 -11
- letta_client/types/chat_completion_request_stop.py +0 -5
- letta_client/types/chat_completion_request_tool_choice.py +0 -8
- letta_client/types/chat_completion_response.py +0 -32
- letta_client/types/choice.py +0 -25
- letta_client/types/create_assistant_request.py +0 -57
- letta_client/types/delete_assistant_file_response.py +0 -28
- letta_client/types/delete_assistant_response.py +0 -28
- letta_client/types/function_call_input.py +0 -19
- letta_client/types/letta_schemas_openai_chat_completion_request_tool.py +0 -21
- letta_client/types/letta_schemas_openai_chat_completion_request_tool_call.py +0 -24
- letta_client/types/letta_schemas_openai_chat_completion_request_tool_call_function.py +0 -20
- letta_client/types/letta_schemas_openai_chat_completion_response_message.py +0 -24
- letta_client/types/letta_schemas_openai_chat_completion_response_tool_call.py +0 -22
- letta_client/types/letta_schemas_openai_chat_completions_tool_call_function.py +0 -27
- letta_client/types/letta_schemas_openai_chat_completions_tool_call_input.py +0 -29
- letta_client/types/letta_schemas_openai_chat_completions_tool_call_output.py +0 -29
- letta_client/types/log_prob_token.py +0 -21
- letta_client/types/message_content_log_prob.py +0 -23
- letta_client/types/open_ai_assistant.py +0 -67
- letta_client/types/recall_memory_summary.py +0 -22
- letta_client/types/response_format.py +0 -19
- letta_client/types/system_message_input.py +0 -21
- letta_client/types/tool_call_function_output.py +0 -27
- letta_client/types/tool_function_choice.py +0 -21
- letta_client/types/tool_input.py +0 -21
- letta_client/types/tool_message.py +0 -21
- letta_client/types/user_message_input.py +0 -22
- letta_client/types/user_message_input_content.py +0 -5
- {letta_client-0.1.16.dist-info → letta_client-0.1.19.dist-info}/WHEEL +0 -0
|
@@ -3,7 +3,7 @@
|
|
|
3
3
|
from ...core.client_wrapper import SyncClientWrapper
|
|
4
4
|
import typing
|
|
5
5
|
from ...core.request_options import RequestOptions
|
|
6
|
-
from .types.
|
|
6
|
+
from .types.memory_variables_list_response import MemoryVariablesListResponse
|
|
7
7
|
from ...core.jsonable_encoder import jsonable_encoder
|
|
8
8
|
from ...core.unchecked_base_model import construct_type
|
|
9
9
|
from ...errors.not_found_error import NotFoundError
|
|
@@ -16,9 +16,9 @@ class MemoryVariablesClient:
|
|
|
16
16
|
def __init__(self, *, client_wrapper: SyncClientWrapper):
|
|
17
17
|
self._client_wrapper = client_wrapper
|
|
18
18
|
|
|
19
|
-
def
|
|
19
|
+
def list(
|
|
20
20
|
self, agent_id: str, *, request_options: typing.Optional[RequestOptions] = None
|
|
21
|
-
) ->
|
|
21
|
+
) -> MemoryVariablesListResponse:
|
|
22
22
|
"""
|
|
23
23
|
<Note>This endpoint is only available on Letta Cloud.</Note>
|
|
24
24
|
|
|
@@ -33,7 +33,7 @@ class MemoryVariablesClient:
|
|
|
33
33
|
|
|
34
34
|
Returns
|
|
35
35
|
-------
|
|
36
|
-
|
|
36
|
+
MemoryVariablesListResponse
|
|
37
37
|
200
|
|
38
38
|
|
|
39
39
|
Examples
|
|
@@ -43,21 +43,21 @@ class MemoryVariablesClient:
|
|
|
43
43
|
client = Letta(
|
|
44
44
|
token="YOUR_TOKEN",
|
|
45
45
|
)
|
|
46
|
-
client.agents.memory_variables.
|
|
46
|
+
client.agents.memory_variables.list(
|
|
47
47
|
agent_id="agent_id",
|
|
48
48
|
)
|
|
49
49
|
"""
|
|
50
50
|
_response = self._client_wrapper.httpx_client.request(
|
|
51
|
-
f"v1/agents/{jsonable_encoder(agent_id)}/variables",
|
|
51
|
+
f"v1/agents/{jsonable_encoder(agent_id)}/core_memory/variables",
|
|
52
52
|
method="GET",
|
|
53
53
|
request_options=request_options,
|
|
54
54
|
)
|
|
55
55
|
try:
|
|
56
56
|
if 200 <= _response.status_code < 300:
|
|
57
57
|
return typing.cast(
|
|
58
|
-
|
|
58
|
+
MemoryVariablesListResponse,
|
|
59
59
|
construct_type(
|
|
60
|
-
type_=
|
|
60
|
+
type_=MemoryVariablesListResponse, # type: ignore
|
|
61
61
|
object_=_response.json(),
|
|
62
62
|
),
|
|
63
63
|
)
|
|
@@ -81,9 +81,9 @@ class AsyncMemoryVariablesClient:
|
|
|
81
81
|
def __init__(self, *, client_wrapper: AsyncClientWrapper):
|
|
82
82
|
self._client_wrapper = client_wrapper
|
|
83
83
|
|
|
84
|
-
async def
|
|
84
|
+
async def list(
|
|
85
85
|
self, agent_id: str, *, request_options: typing.Optional[RequestOptions] = None
|
|
86
|
-
) ->
|
|
86
|
+
) -> MemoryVariablesListResponse:
|
|
87
87
|
"""
|
|
88
88
|
<Note>This endpoint is only available on Letta Cloud.</Note>
|
|
89
89
|
|
|
@@ -98,7 +98,7 @@ class AsyncMemoryVariablesClient:
|
|
|
98
98
|
|
|
99
99
|
Returns
|
|
100
100
|
-------
|
|
101
|
-
|
|
101
|
+
MemoryVariablesListResponse
|
|
102
102
|
200
|
|
103
103
|
|
|
104
104
|
Examples
|
|
@@ -113,7 +113,7 @@ class AsyncMemoryVariablesClient:
|
|
|
113
113
|
|
|
114
114
|
|
|
115
115
|
async def main() -> None:
|
|
116
|
-
await client.agents.memory_variables.
|
|
116
|
+
await client.agents.memory_variables.list(
|
|
117
117
|
agent_id="agent_id",
|
|
118
118
|
)
|
|
119
119
|
|
|
@@ -121,16 +121,16 @@ class AsyncMemoryVariablesClient:
|
|
|
121
121
|
asyncio.run(main())
|
|
122
122
|
"""
|
|
123
123
|
_response = await self._client_wrapper.httpx_client.request(
|
|
124
|
-
f"v1/agents/{jsonable_encoder(agent_id)}/variables",
|
|
124
|
+
f"v1/agents/{jsonable_encoder(agent_id)}/core_memory/variables",
|
|
125
125
|
method="GET",
|
|
126
126
|
request_options=request_options,
|
|
127
127
|
)
|
|
128
128
|
try:
|
|
129
129
|
if 200 <= _response.status_code < 300:
|
|
130
130
|
return typing.cast(
|
|
131
|
-
|
|
131
|
+
MemoryVariablesListResponse,
|
|
132
132
|
construct_type(
|
|
133
|
-
type_=
|
|
133
|
+
type_=MemoryVariablesListResponse, # type: ignore
|
|
134
134
|
object_=_response.json(),
|
|
135
135
|
),
|
|
136
136
|
)
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
# This file was auto-generated by Fern from our API Definition.
|
|
2
2
|
|
|
3
|
-
from .
|
|
3
|
+
from .memory_variables_list_response import MemoryVariablesListResponse
|
|
4
4
|
|
|
5
|
-
__all__ = ["
|
|
5
|
+
__all__ = ["MemoryVariablesListResponse"]
|
|
@@ -1,25 +1,5 @@
|
|
|
1
1
|
# This file was auto-generated by Fern from our API Definition.
|
|
2
2
|
|
|
3
|
-
from .types import
|
|
4
|
-
LettaStreamingResponse,
|
|
5
|
-
LettaStreamingResponse_AssistantMessage,
|
|
6
|
-
LettaStreamingResponse_ReasoningMessage,
|
|
7
|
-
LettaStreamingResponse_SystemMessage,
|
|
8
|
-
LettaStreamingResponse_ToolCallMessage,
|
|
9
|
-
LettaStreamingResponse_ToolReturnMessage,
|
|
10
|
-
LettaStreamingResponse_UsageStatistics,
|
|
11
|
-
LettaStreamingResponse_UserMessage,
|
|
12
|
-
MessagesListResponse,
|
|
13
|
-
)
|
|
3
|
+
from .types import LettaStreamingResponse, MessagesListResponse
|
|
14
4
|
|
|
15
|
-
__all__ = [
|
|
16
|
-
"LettaStreamingResponse",
|
|
17
|
-
"LettaStreamingResponse_AssistantMessage",
|
|
18
|
-
"LettaStreamingResponse_ReasoningMessage",
|
|
19
|
-
"LettaStreamingResponse_SystemMessage",
|
|
20
|
-
"LettaStreamingResponse_ToolCallMessage",
|
|
21
|
-
"LettaStreamingResponse_ToolReturnMessage",
|
|
22
|
-
"LettaStreamingResponse_UsageStatistics",
|
|
23
|
-
"LettaStreamingResponse_UserMessage",
|
|
24
|
-
"MessagesListResponse",
|
|
25
|
-
]
|
|
5
|
+
__all__ = ["LettaStreamingResponse", "MessagesListResponse"]
|
|
@@ -15,10 +15,8 @@ from ...types.letta_request_config import LettaRequestConfig
|
|
|
15
15
|
from ...types.letta_response import LettaResponse
|
|
16
16
|
from ...core.serialization import convert_and_respect_annotation_metadata
|
|
17
17
|
from ...types.message_role import MessageRole
|
|
18
|
-
from ...types.
|
|
19
|
-
|
|
20
|
-
)
|
|
21
|
-
from ...types.letta_schemas_message_message import LettaSchemasMessageMessage
|
|
18
|
+
from ...types.chat_completion_message_tool_call import ChatCompletionMessageToolCall
|
|
19
|
+
from ...types.message import Message
|
|
22
20
|
from .types.letta_streaming_response import LettaStreamingResponse
|
|
23
21
|
import httpx_sse
|
|
24
22
|
import json
|
|
@@ -121,7 +119,7 @@ class MessagesClient:
|
|
|
121
119
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
122
120
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
123
121
|
|
|
124
|
-
def
|
|
122
|
+
def create(
|
|
125
123
|
self,
|
|
126
124
|
agent_id: str,
|
|
127
125
|
*,
|
|
@@ -158,7 +156,7 @@ class MessagesClient:
|
|
|
158
156
|
client = Letta(
|
|
159
157
|
token="YOUR_TOKEN",
|
|
160
158
|
)
|
|
161
|
-
client.agents.messages.
|
|
159
|
+
client.agents.messages.create(
|
|
162
160
|
agent_id="agent_id",
|
|
163
161
|
messages=[
|
|
164
162
|
MessageCreate(
|
|
@@ -206,7 +204,7 @@ class MessagesClient:
|
|
|
206
204
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
207
205
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
208
206
|
|
|
209
|
-
def
|
|
207
|
+
def modify(
|
|
210
208
|
self,
|
|
211
209
|
agent_id: str,
|
|
212
210
|
message_id: str,
|
|
@@ -214,10 +212,10 @@ class MessagesClient:
|
|
|
214
212
|
role: typing.Optional[MessageRole] = OMIT,
|
|
215
213
|
text: typing.Optional[str] = OMIT,
|
|
216
214
|
name: typing.Optional[str] = OMIT,
|
|
217
|
-
tool_calls: typing.Optional[typing.Sequence[
|
|
215
|
+
tool_calls: typing.Optional[typing.Sequence[ChatCompletionMessageToolCall]] = OMIT,
|
|
218
216
|
tool_call_id: typing.Optional[str] = OMIT,
|
|
219
217
|
request_options: typing.Optional[RequestOptions] = None,
|
|
220
|
-
) ->
|
|
218
|
+
) -> Message:
|
|
221
219
|
"""
|
|
222
220
|
Update the details of a message associated with an agent.
|
|
223
221
|
|
|
@@ -236,7 +234,7 @@ class MessagesClient:
|
|
|
236
234
|
name : typing.Optional[str]
|
|
237
235
|
The name of the participant.
|
|
238
236
|
|
|
239
|
-
tool_calls : typing.Optional[typing.Sequence[
|
|
237
|
+
tool_calls : typing.Optional[typing.Sequence[ChatCompletionMessageToolCall]]
|
|
240
238
|
The list of tool calls requested.
|
|
241
239
|
|
|
242
240
|
tool_call_id : typing.Optional[str]
|
|
@@ -247,7 +245,7 @@ class MessagesClient:
|
|
|
247
245
|
|
|
248
246
|
Returns
|
|
249
247
|
-------
|
|
250
|
-
|
|
248
|
+
Message
|
|
251
249
|
Successful Response
|
|
252
250
|
|
|
253
251
|
Examples
|
|
@@ -257,7 +255,7 @@ class MessagesClient:
|
|
|
257
255
|
client = Letta(
|
|
258
256
|
token="YOUR_TOKEN",
|
|
259
257
|
)
|
|
260
|
-
client.agents.messages.
|
|
258
|
+
client.agents.messages.modify(
|
|
261
259
|
agent_id="agent_id",
|
|
262
260
|
message_id="message_id",
|
|
263
261
|
)
|
|
@@ -270,9 +268,7 @@ class MessagesClient:
|
|
|
270
268
|
"text": text,
|
|
271
269
|
"name": name,
|
|
272
270
|
"tool_calls": convert_and_respect_annotation_metadata(
|
|
273
|
-
object_=tool_calls,
|
|
274
|
-
annotation=typing.Sequence[LettaSchemasOpenaiChatCompletionsToolCallInput],
|
|
275
|
-
direction="write",
|
|
271
|
+
object_=tool_calls, annotation=typing.Sequence[ChatCompletionMessageToolCall], direction="write"
|
|
276
272
|
),
|
|
277
273
|
"tool_call_id": tool_call_id,
|
|
278
274
|
},
|
|
@@ -285,9 +281,9 @@ class MessagesClient:
|
|
|
285
281
|
try:
|
|
286
282
|
if 200 <= _response.status_code < 300:
|
|
287
283
|
return typing.cast(
|
|
288
|
-
|
|
284
|
+
Message,
|
|
289
285
|
construct_type(
|
|
290
|
-
type_=
|
|
286
|
+
type_=Message, # type: ignore
|
|
291
287
|
object_=_response.json(),
|
|
292
288
|
),
|
|
293
289
|
)
|
|
@@ -306,7 +302,7 @@ class MessagesClient:
|
|
|
306
302
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
307
303
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
308
304
|
|
|
309
|
-
def
|
|
305
|
+
def create_stream(
|
|
310
306
|
self,
|
|
311
307
|
agent_id: str,
|
|
312
308
|
*,
|
|
@@ -348,7 +344,7 @@ class MessagesClient:
|
|
|
348
344
|
client = Letta(
|
|
349
345
|
token="YOUR_TOKEN",
|
|
350
346
|
)
|
|
351
|
-
response = client.agents.messages.
|
|
347
|
+
response = client.agents.messages.create_stream(
|
|
352
348
|
agent_id="agent_id",
|
|
353
349
|
messages=[
|
|
354
350
|
MessageCreate(
|
|
@@ -409,7 +405,7 @@ class MessagesClient:
|
|
|
409
405
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
410
406
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
411
407
|
|
|
412
|
-
def
|
|
408
|
+
def create_async(
|
|
413
409
|
self,
|
|
414
410
|
agent_id: str,
|
|
415
411
|
*,
|
|
@@ -446,7 +442,7 @@ class MessagesClient:
|
|
|
446
442
|
client = Letta(
|
|
447
443
|
token="YOUR_TOKEN",
|
|
448
444
|
)
|
|
449
|
-
client.agents.messages.
|
|
445
|
+
client.agents.messages.create_async(
|
|
450
446
|
agent_id="agent_id",
|
|
451
447
|
messages=[
|
|
452
448
|
MessageCreate(
|
|
@@ -595,7 +591,7 @@ class AsyncMessagesClient:
|
|
|
595
591
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
596
592
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
597
593
|
|
|
598
|
-
async def
|
|
594
|
+
async def create(
|
|
599
595
|
self,
|
|
600
596
|
agent_id: str,
|
|
601
597
|
*,
|
|
@@ -637,7 +633,7 @@ class AsyncMessagesClient:
|
|
|
637
633
|
|
|
638
634
|
|
|
639
635
|
async def main() -> None:
|
|
640
|
-
await client.agents.messages.
|
|
636
|
+
await client.agents.messages.create(
|
|
641
637
|
agent_id="agent_id",
|
|
642
638
|
messages=[
|
|
643
639
|
MessageCreate(
|
|
@@ -688,7 +684,7 @@ class AsyncMessagesClient:
|
|
|
688
684
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
689
685
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
690
686
|
|
|
691
|
-
async def
|
|
687
|
+
async def modify(
|
|
692
688
|
self,
|
|
693
689
|
agent_id: str,
|
|
694
690
|
message_id: str,
|
|
@@ -696,10 +692,10 @@ class AsyncMessagesClient:
|
|
|
696
692
|
role: typing.Optional[MessageRole] = OMIT,
|
|
697
693
|
text: typing.Optional[str] = OMIT,
|
|
698
694
|
name: typing.Optional[str] = OMIT,
|
|
699
|
-
tool_calls: typing.Optional[typing.Sequence[
|
|
695
|
+
tool_calls: typing.Optional[typing.Sequence[ChatCompletionMessageToolCall]] = OMIT,
|
|
700
696
|
tool_call_id: typing.Optional[str] = OMIT,
|
|
701
697
|
request_options: typing.Optional[RequestOptions] = None,
|
|
702
|
-
) ->
|
|
698
|
+
) -> Message:
|
|
703
699
|
"""
|
|
704
700
|
Update the details of a message associated with an agent.
|
|
705
701
|
|
|
@@ -718,7 +714,7 @@ class AsyncMessagesClient:
|
|
|
718
714
|
name : typing.Optional[str]
|
|
719
715
|
The name of the participant.
|
|
720
716
|
|
|
721
|
-
tool_calls : typing.Optional[typing.Sequence[
|
|
717
|
+
tool_calls : typing.Optional[typing.Sequence[ChatCompletionMessageToolCall]]
|
|
722
718
|
The list of tool calls requested.
|
|
723
719
|
|
|
724
720
|
tool_call_id : typing.Optional[str]
|
|
@@ -729,7 +725,7 @@ class AsyncMessagesClient:
|
|
|
729
725
|
|
|
730
726
|
Returns
|
|
731
727
|
-------
|
|
732
|
-
|
|
728
|
+
Message
|
|
733
729
|
Successful Response
|
|
734
730
|
|
|
735
731
|
Examples
|
|
@@ -744,7 +740,7 @@ class AsyncMessagesClient:
|
|
|
744
740
|
|
|
745
741
|
|
|
746
742
|
async def main() -> None:
|
|
747
|
-
await client.agents.messages.
|
|
743
|
+
await client.agents.messages.modify(
|
|
748
744
|
agent_id="agent_id",
|
|
749
745
|
message_id="message_id",
|
|
750
746
|
)
|
|
@@ -760,9 +756,7 @@ class AsyncMessagesClient:
|
|
|
760
756
|
"text": text,
|
|
761
757
|
"name": name,
|
|
762
758
|
"tool_calls": convert_and_respect_annotation_metadata(
|
|
763
|
-
object_=tool_calls,
|
|
764
|
-
annotation=typing.Sequence[LettaSchemasOpenaiChatCompletionsToolCallInput],
|
|
765
|
-
direction="write",
|
|
759
|
+
object_=tool_calls, annotation=typing.Sequence[ChatCompletionMessageToolCall], direction="write"
|
|
766
760
|
),
|
|
767
761
|
"tool_call_id": tool_call_id,
|
|
768
762
|
},
|
|
@@ -775,9 +769,9 @@ class AsyncMessagesClient:
|
|
|
775
769
|
try:
|
|
776
770
|
if 200 <= _response.status_code < 300:
|
|
777
771
|
return typing.cast(
|
|
778
|
-
|
|
772
|
+
Message,
|
|
779
773
|
construct_type(
|
|
780
|
-
type_=
|
|
774
|
+
type_=Message, # type: ignore
|
|
781
775
|
object_=_response.json(),
|
|
782
776
|
),
|
|
783
777
|
)
|
|
@@ -796,7 +790,7 @@ class AsyncMessagesClient:
|
|
|
796
790
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
797
791
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
798
792
|
|
|
799
|
-
async def
|
|
793
|
+
async def create_stream(
|
|
800
794
|
self,
|
|
801
795
|
agent_id: str,
|
|
802
796
|
*,
|
|
@@ -843,7 +837,7 @@ class AsyncMessagesClient:
|
|
|
843
837
|
|
|
844
838
|
|
|
845
839
|
async def main() -> None:
|
|
846
|
-
response = await client.agents.messages.
|
|
840
|
+
response = await client.agents.messages.create_stream(
|
|
847
841
|
agent_id="agent_id",
|
|
848
842
|
messages=[
|
|
849
843
|
MessageCreate(
|
|
@@ -907,7 +901,7 @@ class AsyncMessagesClient:
|
|
|
907
901
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
908
902
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
909
903
|
|
|
910
|
-
async def
|
|
904
|
+
async def create_async(
|
|
911
905
|
self,
|
|
912
906
|
agent_id: str,
|
|
913
907
|
*,
|
|
@@ -949,7 +943,7 @@ class AsyncMessagesClient:
|
|
|
949
943
|
|
|
950
944
|
|
|
951
945
|
async def main() -> None:
|
|
952
|
-
await client.agents.messages.
|
|
946
|
+
await client.agents.messages.create_async(
|
|
953
947
|
agent_id="agent_id",
|
|
954
948
|
messages=[
|
|
955
949
|
MessageCreate(
|
|
@@ -1,25 +1,6 @@
|
|
|
1
1
|
# This file was auto-generated by Fern from our API Definition.
|
|
2
2
|
|
|
3
|
-
from .letta_streaming_response import
|
|
4
|
-
LettaStreamingResponse,
|
|
5
|
-
LettaStreamingResponse_AssistantMessage,
|
|
6
|
-
LettaStreamingResponse_ReasoningMessage,
|
|
7
|
-
LettaStreamingResponse_SystemMessage,
|
|
8
|
-
LettaStreamingResponse_ToolCallMessage,
|
|
9
|
-
LettaStreamingResponse_ToolReturnMessage,
|
|
10
|
-
LettaStreamingResponse_UsageStatistics,
|
|
11
|
-
LettaStreamingResponse_UserMessage,
|
|
12
|
-
)
|
|
3
|
+
from .letta_streaming_response import LettaStreamingResponse
|
|
13
4
|
from .messages_list_response import MessagesListResponse
|
|
14
5
|
|
|
15
|
-
__all__ = [
|
|
16
|
-
"LettaStreamingResponse",
|
|
17
|
-
"LettaStreamingResponse_AssistantMessage",
|
|
18
|
-
"LettaStreamingResponse_ReasoningMessage",
|
|
19
|
-
"LettaStreamingResponse_SystemMessage",
|
|
20
|
-
"LettaStreamingResponse_ToolCallMessage",
|
|
21
|
-
"LettaStreamingResponse_ToolReturnMessage",
|
|
22
|
-
"LettaStreamingResponse_UsageStatistics",
|
|
23
|
-
"LettaStreamingResponse_UserMessage",
|
|
24
|
-
"MessagesListResponse",
|
|
25
|
-
]
|
|
6
|
+
__all__ = ["LettaStreamingResponse", "MessagesListResponse"]
|
|
@@ -1,143 +1,20 @@
|
|
|
1
1
|
# This file was auto-generated by Fern from our API Definition.
|
|
2
2
|
|
|
3
|
-
from __future__ import annotations
|
|
4
|
-
from ....core.unchecked_base_model import UncheckedBaseModel
|
|
5
3
|
import typing
|
|
6
|
-
|
|
7
|
-
from ....
|
|
8
|
-
import
|
|
9
|
-
from ....types.
|
|
10
|
-
from ....types.
|
|
11
|
-
import
|
|
12
|
-
from ....
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
|
|
23
|
-
else:
|
|
24
|
-
|
|
25
|
-
class Config:
|
|
26
|
-
frozen = True
|
|
27
|
-
smart_union = True
|
|
28
|
-
extra = pydantic.Extra.allow
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
class LettaStreamingResponse_UserMessage(UncheckedBaseModel):
|
|
32
|
-
message_type: typing.Literal["user_message"] = "user_message"
|
|
33
|
-
id: str
|
|
34
|
-
date: dt.datetime
|
|
35
|
-
message: str
|
|
36
|
-
|
|
37
|
-
if IS_PYDANTIC_V2:
|
|
38
|
-
model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
|
|
39
|
-
else:
|
|
40
|
-
|
|
41
|
-
class Config:
|
|
42
|
-
frozen = True
|
|
43
|
-
smart_union = True
|
|
44
|
-
extra = pydantic.Extra.allow
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
class LettaStreamingResponse_ReasoningMessage(UncheckedBaseModel):
|
|
48
|
-
message_type: typing.Literal["reasoning_message"] = "reasoning_message"
|
|
49
|
-
id: str
|
|
50
|
-
date: dt.datetime
|
|
51
|
-
reasoning: str
|
|
52
|
-
|
|
53
|
-
if IS_PYDANTIC_V2:
|
|
54
|
-
model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
|
|
55
|
-
else:
|
|
56
|
-
|
|
57
|
-
class Config:
|
|
58
|
-
frozen = True
|
|
59
|
-
smart_union = True
|
|
60
|
-
extra = pydantic.Extra.allow
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
class LettaStreamingResponse_ToolCallMessage(UncheckedBaseModel):
|
|
64
|
-
message_type: typing.Literal["tool_call_message"] = "tool_call_message"
|
|
65
|
-
id: str
|
|
66
|
-
date: dt.datetime
|
|
67
|
-
tool_call: ToolCallMessageToolCall
|
|
68
|
-
|
|
69
|
-
if IS_PYDANTIC_V2:
|
|
70
|
-
model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
|
|
71
|
-
else:
|
|
72
|
-
|
|
73
|
-
class Config:
|
|
74
|
-
frozen = True
|
|
75
|
-
smart_union = True
|
|
76
|
-
extra = pydantic.Extra.allow
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
class LettaStreamingResponse_ToolReturnMessage(UncheckedBaseModel):
|
|
80
|
-
message_type: typing.Literal["tool_return_message"] = "tool_return_message"
|
|
81
|
-
id: str
|
|
82
|
-
date: dt.datetime
|
|
83
|
-
tool_return: str
|
|
84
|
-
status: ToolReturnMessageStatus
|
|
85
|
-
tool_call_id: str
|
|
86
|
-
stdout: typing.Optional[typing.List[str]] = None
|
|
87
|
-
stderr: typing.Optional[typing.List[str]] = None
|
|
88
|
-
|
|
89
|
-
if IS_PYDANTIC_V2:
|
|
90
|
-
model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
|
|
91
|
-
else:
|
|
92
|
-
|
|
93
|
-
class Config:
|
|
94
|
-
frozen = True
|
|
95
|
-
smart_union = True
|
|
96
|
-
extra = pydantic.Extra.allow
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
class LettaStreamingResponse_AssistantMessage(UncheckedBaseModel):
|
|
100
|
-
message_type: typing.Literal["assistant_message"] = "assistant_message"
|
|
101
|
-
id: str
|
|
102
|
-
date: dt.datetime
|
|
103
|
-
assistant_message: str
|
|
104
|
-
|
|
105
|
-
if IS_PYDANTIC_V2:
|
|
106
|
-
model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
|
|
107
|
-
else:
|
|
108
|
-
|
|
109
|
-
class Config:
|
|
110
|
-
frozen = True
|
|
111
|
-
smart_union = True
|
|
112
|
-
extra = pydantic.Extra.allow
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
class LettaStreamingResponse_UsageStatistics(UncheckedBaseModel):
|
|
116
|
-
message_type: typing.Literal["usage_statistics"] = "usage_statistics"
|
|
117
|
-
completion_tokens: typing.Optional[int] = None
|
|
118
|
-
prompt_tokens: typing.Optional[int] = None
|
|
119
|
-
total_tokens: typing.Optional[int] = None
|
|
120
|
-
step_count: typing.Optional[int] = None
|
|
121
|
-
|
|
122
|
-
if IS_PYDANTIC_V2:
|
|
123
|
-
model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
|
|
124
|
-
else:
|
|
125
|
-
|
|
126
|
-
class Config:
|
|
127
|
-
frozen = True
|
|
128
|
-
smart_union = True
|
|
129
|
-
extra = pydantic.Extra.allow
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
LettaStreamingResponse = typing_extensions.Annotated[
|
|
133
|
-
typing.Union[
|
|
134
|
-
LettaStreamingResponse_SystemMessage,
|
|
135
|
-
LettaStreamingResponse_UserMessage,
|
|
136
|
-
LettaStreamingResponse_ReasoningMessage,
|
|
137
|
-
LettaStreamingResponse_ToolCallMessage,
|
|
138
|
-
LettaStreamingResponse_ToolReturnMessage,
|
|
139
|
-
LettaStreamingResponse_AssistantMessage,
|
|
140
|
-
LettaStreamingResponse_UsageStatistics,
|
|
141
|
-
],
|
|
142
|
-
UnionMetadata(discriminant="message_type"),
|
|
4
|
+
from ....types.system_message import SystemMessage
|
|
5
|
+
from ....types.user_message import UserMessage
|
|
6
|
+
from ....types.reasoning_message import ReasoningMessage
|
|
7
|
+
from ....types.tool_call_message import ToolCallMessage
|
|
8
|
+
from ....types.tool_return_message import ToolReturnMessage
|
|
9
|
+
from ....types.assistant_message import AssistantMessage
|
|
10
|
+
from ....types.letta_usage_statistics import LettaUsageStatistics
|
|
11
|
+
|
|
12
|
+
LettaStreamingResponse = typing.Union[
|
|
13
|
+
SystemMessage,
|
|
14
|
+
UserMessage,
|
|
15
|
+
ReasoningMessage,
|
|
16
|
+
ToolCallMessage,
|
|
17
|
+
ToolReturnMessage,
|
|
18
|
+
AssistantMessage,
|
|
19
|
+
LettaUsageStatistics,
|
|
143
20
|
]
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
# This file was auto-generated by Fern from our API Definition.
|
|
2
2
|
|
|
3
3
|
import typing
|
|
4
|
-
from ....types.
|
|
4
|
+
from ....types.message import Message
|
|
5
5
|
from ....types.letta_message_union import LettaMessageUnion
|
|
6
6
|
|
|
7
|
-
MessagesListResponse = typing.Union[typing.List[
|
|
7
|
+
MessagesListResponse = typing.Union[typing.List[Message], typing.List[LettaMessageUnion]]
|