letta-client 0.1.295__py3-none-any.whl → 0.1.299__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of letta-client might be problematic. Click here for more details.
- letta_client/__init__.py +12 -0
- letta_client/agents/__init__.py +2 -0
- letta_client/agents/client.py +100 -0
- letta_client/agents/messages/__init__.py +2 -0
- letta_client/agents/messages/client.py +17 -15
- letta_client/agents/messages/raw_client.py +23 -21
- letta_client/agents/messages/types/__init__.py +2 -0
- letta_client/agents/messages/types/letta_async_request_messages_item.py +8 -0
- letta_client/agents/messages/types/messages_modify_response.py +4 -0
- letta_client/agents/raw_client.py +122 -0
- letta_client/agents/tools/client.py +74 -0
- letta_client/agents/tools/raw_client.py +60 -0
- letta_client/core/client_wrapper.py +2 -2
- letta_client/groups/messages/client.py +12 -11
- letta_client/groups/messages/raw_client.py +16 -15
- letta_client/groups/messages/types/messages_modify_response.py +4 -0
- letta_client/templates/client.py +20 -0
- letta_client/templates/raw_client.py +20 -0
- letta_client/tools/client.py +30 -0
- letta_client/tools/raw_client.py +30 -0
- letta_client/types/__init__.py +12 -0
- letta_client/types/approval_create.py +42 -0
- letta_client/types/approval_request_message.py +45 -0
- letta_client/types/approval_response_message.py +56 -0
- letta_client/types/letta_batch_request.py +2 -2
- letta_client/types/letta_batch_request_messages_item.py +8 -0
- letta_client/types/letta_message_union.py +4 -0
- letta_client/types/letta_request.py +2 -2
- letta_client/types/letta_request_messages_item.py +8 -0
- letta_client/types/letta_schemas_agent_file_message_schema.py +5 -0
- letta_client/types/letta_schemas_agent_file_tool_schema.py +5 -0
- letta_client/types/letta_streaming_request.py +3 -3
- letta_client/types/letta_streaming_request_messages_item.py +8 -0
- letta_client/types/message_create.py +5 -0
- letta_client/types/message_role.py +1 -1
- letta_client/types/message_type.py +2 -0
- letta_client/types/passage.py +5 -0
- letta_client/types/stop_reason_type.py +1 -0
- letta_client/types/tool.py +5 -0
- letta_client/types/tool_create.py +5 -0
- {letta_client-0.1.295.dist-info → letta_client-0.1.299.dist-info}/METADATA +1 -1
- {letta_client-0.1.295.dist-info → letta_client-0.1.299.dist-info}/RECORD +43 -36
- {letta_client-0.1.295.dist-info → letta_client-0.1.299.dist-info}/WHEEL +0 -0
letta_client/__init__.py
CHANGED
|
@@ -16,6 +16,9 @@ from .types import (
|
|
|
16
16
|
AppAuthScheme,
|
|
17
17
|
AppAuthSchemeAuthMode,
|
|
18
18
|
AppModel,
|
|
19
|
+
ApprovalCreate,
|
|
20
|
+
ApprovalRequestMessage,
|
|
21
|
+
ApprovalResponseMessage,
|
|
19
22
|
AssistantMessage,
|
|
20
23
|
AssistantMessageContent,
|
|
21
24
|
Audio,
|
|
@@ -156,12 +159,14 @@ from .types import (
|
|
|
156
159
|
JsonSchemaResponseFormat,
|
|
157
160
|
LettaBatchMessages,
|
|
158
161
|
LettaBatchRequest,
|
|
162
|
+
LettaBatchRequestMessagesItem,
|
|
159
163
|
LettaImage,
|
|
160
164
|
LettaMessageContentUnion,
|
|
161
165
|
LettaMessageUnion,
|
|
162
166
|
LettaPing,
|
|
163
167
|
LettaRequest,
|
|
164
168
|
LettaRequestConfig,
|
|
169
|
+
LettaRequestMessagesItem,
|
|
165
170
|
LettaResponse,
|
|
166
171
|
LettaSchemasAgentFileAgentSchema,
|
|
167
172
|
LettaSchemasAgentFileAgentSchemaResponseFormat,
|
|
@@ -175,6 +180,7 @@ from .types import (
|
|
|
175
180
|
LettaSerializeSchemasPydanticAgentSchemaToolSchema,
|
|
176
181
|
LettaStopReason,
|
|
177
182
|
LettaStreamingRequest,
|
|
183
|
+
LettaStreamingRequestMessagesItem,
|
|
178
184
|
LettaUsageStatistics,
|
|
179
185
|
LettaUserMessageContentUnion,
|
|
180
186
|
LlmConfig,
|
|
@@ -443,6 +449,9 @@ __all__ = [
|
|
|
443
449
|
"AppAuthScheme",
|
|
444
450
|
"AppAuthSchemeAuthMode",
|
|
445
451
|
"AppModel",
|
|
452
|
+
"ApprovalCreate",
|
|
453
|
+
"ApprovalRequestMessage",
|
|
454
|
+
"ApprovalResponseMessage",
|
|
446
455
|
"AssistantMessage",
|
|
447
456
|
"AssistantMessageContent",
|
|
448
457
|
"AsyncLetta",
|
|
@@ -605,6 +614,7 @@ __all__ = [
|
|
|
605
614
|
"Letta",
|
|
606
615
|
"LettaBatchMessages",
|
|
607
616
|
"LettaBatchRequest",
|
|
617
|
+
"LettaBatchRequestMessagesItem",
|
|
608
618
|
"LettaEnvironment",
|
|
609
619
|
"LettaImage",
|
|
610
620
|
"LettaMessageContentUnion",
|
|
@@ -612,6 +622,7 @@ __all__ = [
|
|
|
612
622
|
"LettaPing",
|
|
613
623
|
"LettaRequest",
|
|
614
624
|
"LettaRequestConfig",
|
|
625
|
+
"LettaRequestMessagesItem",
|
|
615
626
|
"LettaResponse",
|
|
616
627
|
"LettaSchemasAgentFileAgentSchema",
|
|
617
628
|
"LettaSchemasAgentFileAgentSchemaResponseFormat",
|
|
@@ -625,6 +636,7 @@ __all__ = [
|
|
|
625
636
|
"LettaSerializeSchemasPydanticAgentSchemaToolSchema",
|
|
626
637
|
"LettaStopReason",
|
|
627
638
|
"LettaStreamingRequest",
|
|
639
|
+
"LettaStreamingRequestMessagesItem",
|
|
628
640
|
"LettaStreamingResponse",
|
|
629
641
|
"LettaUsageStatistics",
|
|
630
642
|
"LettaUserMessageContentUnion",
|
letta_client/agents/__init__.py
CHANGED
|
@@ -33,6 +33,7 @@ from . import (
|
|
|
33
33
|
)
|
|
34
34
|
from .memory_variables import MemoryVariablesListResponse
|
|
35
35
|
from .messages import (
|
|
36
|
+
LettaAsyncRequestMessagesItem,
|
|
36
37
|
LettaStreamingResponse,
|
|
37
38
|
MessagesModifyRequest,
|
|
38
39
|
MessagesModifyResponse,
|
|
@@ -52,6 +53,7 @@ __all__ = [
|
|
|
52
53
|
"AgentsSearchResponse",
|
|
53
54
|
"CreateAgentRequestResponseFormat",
|
|
54
55
|
"CreateAgentRequestToolRulesItem",
|
|
56
|
+
"LettaAsyncRequestMessagesItem",
|
|
55
57
|
"LettaStreamingResponse",
|
|
56
58
|
"MemoryVariablesListResponse",
|
|
57
59
|
"MessagesModifyRequest",
|
letta_client/agents/client.py
CHANGED
|
@@ -844,6 +844,52 @@ class AgentsClient:
|
|
|
844
844
|
)
|
|
845
845
|
return _response.data
|
|
846
846
|
|
|
847
|
+
def modify_approval(
|
|
848
|
+
self,
|
|
849
|
+
agent_id: str,
|
|
850
|
+
tool_name: str,
|
|
851
|
+
*,
|
|
852
|
+
requires_approval: bool,
|
|
853
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
854
|
+
) -> AgentState:
|
|
855
|
+
"""
|
|
856
|
+
Attach a tool to an agent.
|
|
857
|
+
|
|
858
|
+
Parameters
|
|
859
|
+
----------
|
|
860
|
+
agent_id : str
|
|
861
|
+
|
|
862
|
+
tool_name : str
|
|
863
|
+
|
|
864
|
+
requires_approval : bool
|
|
865
|
+
|
|
866
|
+
request_options : typing.Optional[RequestOptions]
|
|
867
|
+
Request-specific configuration.
|
|
868
|
+
|
|
869
|
+
Returns
|
|
870
|
+
-------
|
|
871
|
+
AgentState
|
|
872
|
+
Successful Response
|
|
873
|
+
|
|
874
|
+
Examples
|
|
875
|
+
--------
|
|
876
|
+
from letta_client import Letta
|
|
877
|
+
|
|
878
|
+
client = Letta(
|
|
879
|
+
project="YOUR_PROJECT",
|
|
880
|
+
token="YOUR_TOKEN",
|
|
881
|
+
)
|
|
882
|
+
client.agents.modify_approval(
|
|
883
|
+
agent_id="agent_id",
|
|
884
|
+
tool_name="tool_name",
|
|
885
|
+
requires_approval=True,
|
|
886
|
+
)
|
|
887
|
+
"""
|
|
888
|
+
_response = self._raw_client.modify_approval(
|
|
889
|
+
agent_id, tool_name, requires_approval=requires_approval, request_options=request_options
|
|
890
|
+
)
|
|
891
|
+
return _response.data
|
|
892
|
+
|
|
847
893
|
def list_agent_files(
|
|
848
894
|
self,
|
|
849
895
|
agent_id: str,
|
|
@@ -1868,6 +1914,60 @@ class AsyncAgentsClient:
|
|
|
1868
1914
|
)
|
|
1869
1915
|
return _response.data
|
|
1870
1916
|
|
|
1917
|
+
async def modify_approval(
|
|
1918
|
+
self,
|
|
1919
|
+
agent_id: str,
|
|
1920
|
+
tool_name: str,
|
|
1921
|
+
*,
|
|
1922
|
+
requires_approval: bool,
|
|
1923
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
1924
|
+
) -> AgentState:
|
|
1925
|
+
"""
|
|
1926
|
+
Attach a tool to an agent.
|
|
1927
|
+
|
|
1928
|
+
Parameters
|
|
1929
|
+
----------
|
|
1930
|
+
agent_id : str
|
|
1931
|
+
|
|
1932
|
+
tool_name : str
|
|
1933
|
+
|
|
1934
|
+
requires_approval : bool
|
|
1935
|
+
|
|
1936
|
+
request_options : typing.Optional[RequestOptions]
|
|
1937
|
+
Request-specific configuration.
|
|
1938
|
+
|
|
1939
|
+
Returns
|
|
1940
|
+
-------
|
|
1941
|
+
AgentState
|
|
1942
|
+
Successful Response
|
|
1943
|
+
|
|
1944
|
+
Examples
|
|
1945
|
+
--------
|
|
1946
|
+
import asyncio
|
|
1947
|
+
|
|
1948
|
+
from letta_client import AsyncLetta
|
|
1949
|
+
|
|
1950
|
+
client = AsyncLetta(
|
|
1951
|
+
project="YOUR_PROJECT",
|
|
1952
|
+
token="YOUR_TOKEN",
|
|
1953
|
+
)
|
|
1954
|
+
|
|
1955
|
+
|
|
1956
|
+
async def main() -> None:
|
|
1957
|
+
await client.agents.modify_approval(
|
|
1958
|
+
agent_id="agent_id",
|
|
1959
|
+
tool_name="tool_name",
|
|
1960
|
+
requires_approval=True,
|
|
1961
|
+
)
|
|
1962
|
+
|
|
1963
|
+
|
|
1964
|
+
asyncio.run(main())
|
|
1965
|
+
"""
|
|
1966
|
+
_response = await self._raw_client.modify_approval(
|
|
1967
|
+
agent_id, tool_name, requires_approval=requires_approval, request_options=request_options
|
|
1968
|
+
)
|
|
1969
|
+
return _response.data
|
|
1970
|
+
|
|
1871
1971
|
async def list_agent_files(
|
|
1872
1972
|
self,
|
|
1873
1973
|
agent_id: str,
|
|
@@ -3,6 +3,7 @@
|
|
|
3
3
|
# isort: skip_file
|
|
4
4
|
|
|
5
5
|
from .types import (
|
|
6
|
+
LettaAsyncRequestMessagesItem,
|
|
6
7
|
LettaStreamingResponse,
|
|
7
8
|
MessagesModifyRequest,
|
|
8
9
|
MessagesModifyResponse,
|
|
@@ -10,6 +11,7 @@ from .types import (
|
|
|
10
11
|
)
|
|
11
12
|
|
|
12
13
|
__all__ = [
|
|
14
|
+
"LettaAsyncRequestMessagesItem",
|
|
13
15
|
"LettaStreamingResponse",
|
|
14
16
|
"MessagesModifyRequest",
|
|
15
17
|
"MessagesModifyResponse",
|
|
@@ -6,11 +6,13 @@ from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper
|
|
|
6
6
|
from ...core.request_options import RequestOptions
|
|
7
7
|
from ...types.agent_state import AgentState
|
|
8
8
|
from ...types.letta_message_union import LettaMessageUnion
|
|
9
|
+
from ...types.letta_request_messages_item import LettaRequestMessagesItem
|
|
9
10
|
from ...types.letta_response import LettaResponse
|
|
10
|
-
from ...types.
|
|
11
|
+
from ...types.letta_streaming_request_messages_item import LettaStreamingRequestMessagesItem
|
|
11
12
|
from ...types.message_type import MessageType
|
|
12
13
|
from ...types.run import Run
|
|
13
14
|
from .raw_client import AsyncRawMessagesClient, RawMessagesClient
|
|
15
|
+
from .types.letta_async_request_messages_item import LettaAsyncRequestMessagesItem
|
|
14
16
|
from .types.letta_streaming_response import LettaStreamingResponse
|
|
15
17
|
from .types.messages_modify_request import MessagesModifyRequest
|
|
16
18
|
from .types.messages_modify_response import MessagesModifyResponse
|
|
@@ -118,7 +120,7 @@ class MessagesClient:
|
|
|
118
120
|
self,
|
|
119
121
|
agent_id: str,
|
|
120
122
|
*,
|
|
121
|
-
messages: typing.Sequence[
|
|
123
|
+
messages: typing.Sequence[LettaRequestMessagesItem],
|
|
122
124
|
max_steps: typing.Optional[int] = OMIT,
|
|
123
125
|
use_assistant_message: typing.Optional[bool] = OMIT,
|
|
124
126
|
assistant_message_tool_name: typing.Optional[str] = OMIT,
|
|
@@ -135,7 +137,7 @@ class MessagesClient:
|
|
|
135
137
|
----------
|
|
136
138
|
agent_id : str
|
|
137
139
|
|
|
138
|
-
messages : typing.Sequence[
|
|
140
|
+
messages : typing.Sequence[LettaRequestMessagesItem]
|
|
139
141
|
The messages to be sent to the agent.
|
|
140
142
|
|
|
141
143
|
max_steps : typing.Optional[int]
|
|
@@ -249,7 +251,7 @@ class MessagesClient:
|
|
|
249
251
|
self,
|
|
250
252
|
agent_id: str,
|
|
251
253
|
*,
|
|
252
|
-
messages: typing.Sequence[
|
|
254
|
+
messages: typing.Sequence[LettaStreamingRequestMessagesItem],
|
|
253
255
|
max_steps: typing.Optional[int] = OMIT,
|
|
254
256
|
use_assistant_message: typing.Optional[bool] = OMIT,
|
|
255
257
|
assistant_message_tool_name: typing.Optional[str] = OMIT,
|
|
@@ -270,7 +272,7 @@ class MessagesClient:
|
|
|
270
272
|
----------
|
|
271
273
|
agent_id : str
|
|
272
274
|
|
|
273
|
-
messages : typing.Sequence[
|
|
275
|
+
messages : typing.Sequence[LettaStreamingRequestMessagesItem]
|
|
274
276
|
The messages to be sent to the agent.
|
|
275
277
|
|
|
276
278
|
max_steps : typing.Optional[int]
|
|
@@ -292,7 +294,7 @@ class MessagesClient:
|
|
|
292
294
|
If set to True, enables reasoning before responses or tool calls from the agent.
|
|
293
295
|
|
|
294
296
|
stream_tokens : typing.Optional[bool]
|
|
295
|
-
Flag to determine if individual tokens should be streamed
|
|
297
|
+
Flag to determine if individual tokens should be streamed, rather than streaming per step.
|
|
296
298
|
|
|
297
299
|
include_pings : typing.Optional[bool]
|
|
298
300
|
Whether to include periodic keepalive ping messages in the stream to prevent connection timeouts.
|
|
@@ -394,7 +396,7 @@ class MessagesClient:
|
|
|
394
396
|
self,
|
|
395
397
|
agent_id: str,
|
|
396
398
|
*,
|
|
397
|
-
messages: typing.Sequence[
|
|
399
|
+
messages: typing.Sequence[LettaAsyncRequestMessagesItem],
|
|
398
400
|
max_steps: typing.Optional[int] = OMIT,
|
|
399
401
|
use_assistant_message: typing.Optional[bool] = OMIT,
|
|
400
402
|
assistant_message_tool_name: typing.Optional[str] = OMIT,
|
|
@@ -415,7 +417,7 @@ class MessagesClient:
|
|
|
415
417
|
----------
|
|
416
418
|
agent_id : str
|
|
417
419
|
|
|
418
|
-
messages : typing.Sequence[
|
|
420
|
+
messages : typing.Sequence[LettaAsyncRequestMessagesItem]
|
|
419
421
|
The messages to be sent to the agent.
|
|
420
422
|
|
|
421
423
|
max_steps : typing.Optional[int]
|
|
@@ -687,7 +689,7 @@ class AsyncMessagesClient:
|
|
|
687
689
|
self,
|
|
688
690
|
agent_id: str,
|
|
689
691
|
*,
|
|
690
|
-
messages: typing.Sequence[
|
|
692
|
+
messages: typing.Sequence[LettaRequestMessagesItem],
|
|
691
693
|
max_steps: typing.Optional[int] = OMIT,
|
|
692
694
|
use_assistant_message: typing.Optional[bool] = OMIT,
|
|
693
695
|
assistant_message_tool_name: typing.Optional[str] = OMIT,
|
|
@@ -704,7 +706,7 @@ class AsyncMessagesClient:
|
|
|
704
706
|
----------
|
|
705
707
|
agent_id : str
|
|
706
708
|
|
|
707
|
-
messages : typing.Sequence[
|
|
709
|
+
messages : typing.Sequence[LettaRequestMessagesItem]
|
|
708
710
|
The messages to be sent to the agent.
|
|
709
711
|
|
|
710
712
|
max_steps : typing.Optional[int]
|
|
@@ -836,7 +838,7 @@ class AsyncMessagesClient:
|
|
|
836
838
|
self,
|
|
837
839
|
agent_id: str,
|
|
838
840
|
*,
|
|
839
|
-
messages: typing.Sequence[
|
|
841
|
+
messages: typing.Sequence[LettaStreamingRequestMessagesItem],
|
|
840
842
|
max_steps: typing.Optional[int] = OMIT,
|
|
841
843
|
use_assistant_message: typing.Optional[bool] = OMIT,
|
|
842
844
|
assistant_message_tool_name: typing.Optional[str] = OMIT,
|
|
@@ -857,7 +859,7 @@ class AsyncMessagesClient:
|
|
|
857
859
|
----------
|
|
858
860
|
agent_id : str
|
|
859
861
|
|
|
860
|
-
messages : typing.Sequence[
|
|
862
|
+
messages : typing.Sequence[LettaStreamingRequestMessagesItem]
|
|
861
863
|
The messages to be sent to the agent.
|
|
862
864
|
|
|
863
865
|
max_steps : typing.Optional[int]
|
|
@@ -879,7 +881,7 @@ class AsyncMessagesClient:
|
|
|
879
881
|
If set to True, enables reasoning before responses or tool calls from the agent.
|
|
880
882
|
|
|
881
883
|
stream_tokens : typing.Optional[bool]
|
|
882
|
-
Flag to determine if individual tokens should be streamed
|
|
884
|
+
Flag to determine if individual tokens should be streamed, rather than streaming per step.
|
|
883
885
|
|
|
884
886
|
include_pings : typing.Optional[bool]
|
|
885
887
|
Whether to include periodic keepalive ping messages in the stream to prevent connection timeouts.
|
|
@@ -998,7 +1000,7 @@ class AsyncMessagesClient:
|
|
|
998
1000
|
self,
|
|
999
1001
|
agent_id: str,
|
|
1000
1002
|
*,
|
|
1001
|
-
messages: typing.Sequence[
|
|
1003
|
+
messages: typing.Sequence[LettaAsyncRequestMessagesItem],
|
|
1002
1004
|
max_steps: typing.Optional[int] = OMIT,
|
|
1003
1005
|
use_assistant_message: typing.Optional[bool] = OMIT,
|
|
1004
1006
|
assistant_message_tool_name: typing.Optional[str] = OMIT,
|
|
@@ -1019,7 +1021,7 @@ class AsyncMessagesClient:
|
|
|
1019
1021
|
----------
|
|
1020
1022
|
agent_id : str
|
|
1021
1023
|
|
|
1022
|
-
messages : typing.Sequence[
|
|
1024
|
+
messages : typing.Sequence[LettaAsyncRequestMessagesItem]
|
|
1023
1025
|
The messages to be sent to the agent.
|
|
1024
1026
|
|
|
1025
1027
|
max_steps : typing.Optional[int]
|
|
@@ -17,10 +17,12 @@ from ...errors.unprocessable_entity_error import UnprocessableEntityError
|
|
|
17
17
|
from ...types.agent_state import AgentState
|
|
18
18
|
from ...types.http_validation_error import HttpValidationError
|
|
19
19
|
from ...types.letta_message_union import LettaMessageUnion
|
|
20
|
+
from ...types.letta_request_messages_item import LettaRequestMessagesItem
|
|
20
21
|
from ...types.letta_response import LettaResponse
|
|
21
|
-
from ...types.
|
|
22
|
+
from ...types.letta_streaming_request_messages_item import LettaStreamingRequestMessagesItem
|
|
22
23
|
from ...types.message_type import MessageType
|
|
23
24
|
from ...types.run import Run
|
|
25
|
+
from .types.letta_async_request_messages_item import LettaAsyncRequestMessagesItem
|
|
24
26
|
from .types.letta_streaming_response import LettaStreamingResponse
|
|
25
27
|
from .types.messages_modify_request import MessagesModifyRequest
|
|
26
28
|
from .types.messages_modify_response import MessagesModifyResponse
|
|
@@ -132,7 +134,7 @@ class RawMessagesClient:
|
|
|
132
134
|
self,
|
|
133
135
|
agent_id: str,
|
|
134
136
|
*,
|
|
135
|
-
messages: typing.Sequence[
|
|
137
|
+
messages: typing.Sequence[LettaRequestMessagesItem],
|
|
136
138
|
max_steps: typing.Optional[int] = OMIT,
|
|
137
139
|
use_assistant_message: typing.Optional[bool] = OMIT,
|
|
138
140
|
assistant_message_tool_name: typing.Optional[str] = OMIT,
|
|
@@ -149,7 +151,7 @@ class RawMessagesClient:
|
|
|
149
151
|
----------
|
|
150
152
|
agent_id : str
|
|
151
153
|
|
|
152
|
-
messages : typing.Sequence[
|
|
154
|
+
messages : typing.Sequence[LettaRequestMessagesItem]
|
|
153
155
|
The messages to be sent to the agent.
|
|
154
156
|
|
|
155
157
|
max_steps : typing.Optional[int]
|
|
@@ -183,7 +185,7 @@ class RawMessagesClient:
|
|
|
183
185
|
method="POST",
|
|
184
186
|
json={
|
|
185
187
|
"messages": convert_and_respect_annotation_metadata(
|
|
186
|
-
object_=messages, annotation=typing.Sequence[
|
|
188
|
+
object_=messages, annotation=typing.Sequence[LettaRequestMessagesItem], direction="write"
|
|
187
189
|
),
|
|
188
190
|
"max_steps": max_steps,
|
|
189
191
|
"use_assistant_message": use_assistant_message,
|
|
@@ -294,7 +296,7 @@ class RawMessagesClient:
|
|
|
294
296
|
self,
|
|
295
297
|
agent_id: str,
|
|
296
298
|
*,
|
|
297
|
-
messages: typing.Sequence[
|
|
299
|
+
messages: typing.Sequence[LettaStreamingRequestMessagesItem],
|
|
298
300
|
max_steps: typing.Optional[int] = OMIT,
|
|
299
301
|
use_assistant_message: typing.Optional[bool] = OMIT,
|
|
300
302
|
assistant_message_tool_name: typing.Optional[str] = OMIT,
|
|
@@ -315,7 +317,7 @@ class RawMessagesClient:
|
|
|
315
317
|
----------
|
|
316
318
|
agent_id : str
|
|
317
319
|
|
|
318
|
-
messages : typing.Sequence[
|
|
320
|
+
messages : typing.Sequence[LettaStreamingRequestMessagesItem]
|
|
319
321
|
The messages to be sent to the agent.
|
|
320
322
|
|
|
321
323
|
max_steps : typing.Optional[int]
|
|
@@ -337,7 +339,7 @@ class RawMessagesClient:
|
|
|
337
339
|
If set to True, enables reasoning before responses or tool calls from the agent.
|
|
338
340
|
|
|
339
341
|
stream_tokens : typing.Optional[bool]
|
|
340
|
-
Flag to determine if individual tokens should be streamed
|
|
342
|
+
Flag to determine if individual tokens should be streamed, rather than streaming per step.
|
|
341
343
|
|
|
342
344
|
include_pings : typing.Optional[bool]
|
|
343
345
|
Whether to include periodic keepalive ping messages in the stream to prevent connection timeouts.
|
|
@@ -358,7 +360,7 @@ class RawMessagesClient:
|
|
|
358
360
|
method="POST",
|
|
359
361
|
json={
|
|
360
362
|
"messages": convert_and_respect_annotation_metadata(
|
|
361
|
-
object_=messages, annotation=typing.Sequence[
|
|
363
|
+
object_=messages, annotation=typing.Sequence[LettaStreamingRequestMessagesItem], direction="write"
|
|
362
364
|
),
|
|
363
365
|
"max_steps": max_steps,
|
|
364
366
|
"use_assistant_message": use_assistant_message,
|
|
@@ -489,7 +491,7 @@ class RawMessagesClient:
|
|
|
489
491
|
self,
|
|
490
492
|
agent_id: str,
|
|
491
493
|
*,
|
|
492
|
-
messages: typing.Sequence[
|
|
494
|
+
messages: typing.Sequence[LettaAsyncRequestMessagesItem],
|
|
493
495
|
max_steps: typing.Optional[int] = OMIT,
|
|
494
496
|
use_assistant_message: typing.Optional[bool] = OMIT,
|
|
495
497
|
assistant_message_tool_name: typing.Optional[str] = OMIT,
|
|
@@ -510,7 +512,7 @@ class RawMessagesClient:
|
|
|
510
512
|
----------
|
|
511
513
|
agent_id : str
|
|
512
514
|
|
|
513
|
-
messages : typing.Sequence[
|
|
515
|
+
messages : typing.Sequence[LettaAsyncRequestMessagesItem]
|
|
514
516
|
The messages to be sent to the agent.
|
|
515
517
|
|
|
516
518
|
max_steps : typing.Optional[int]
|
|
@@ -547,7 +549,7 @@ class RawMessagesClient:
|
|
|
547
549
|
method="POST",
|
|
548
550
|
json={
|
|
549
551
|
"messages": convert_and_respect_annotation_metadata(
|
|
550
|
-
object_=messages, annotation=typing.Sequence[
|
|
552
|
+
object_=messages, annotation=typing.Sequence[LettaAsyncRequestMessagesItem], direction="write"
|
|
551
553
|
),
|
|
552
554
|
"max_steps": max_steps,
|
|
553
555
|
"use_assistant_message": use_assistant_message,
|
|
@@ -817,7 +819,7 @@ class AsyncRawMessagesClient:
|
|
|
817
819
|
self,
|
|
818
820
|
agent_id: str,
|
|
819
821
|
*,
|
|
820
|
-
messages: typing.Sequence[
|
|
822
|
+
messages: typing.Sequence[LettaRequestMessagesItem],
|
|
821
823
|
max_steps: typing.Optional[int] = OMIT,
|
|
822
824
|
use_assistant_message: typing.Optional[bool] = OMIT,
|
|
823
825
|
assistant_message_tool_name: typing.Optional[str] = OMIT,
|
|
@@ -834,7 +836,7 @@ class AsyncRawMessagesClient:
|
|
|
834
836
|
----------
|
|
835
837
|
agent_id : str
|
|
836
838
|
|
|
837
|
-
messages : typing.Sequence[
|
|
839
|
+
messages : typing.Sequence[LettaRequestMessagesItem]
|
|
838
840
|
The messages to be sent to the agent.
|
|
839
841
|
|
|
840
842
|
max_steps : typing.Optional[int]
|
|
@@ -868,7 +870,7 @@ class AsyncRawMessagesClient:
|
|
|
868
870
|
method="POST",
|
|
869
871
|
json={
|
|
870
872
|
"messages": convert_and_respect_annotation_metadata(
|
|
871
|
-
object_=messages, annotation=typing.Sequence[
|
|
873
|
+
object_=messages, annotation=typing.Sequence[LettaRequestMessagesItem], direction="write"
|
|
872
874
|
),
|
|
873
875
|
"max_steps": max_steps,
|
|
874
876
|
"use_assistant_message": use_assistant_message,
|
|
@@ -979,7 +981,7 @@ class AsyncRawMessagesClient:
|
|
|
979
981
|
self,
|
|
980
982
|
agent_id: str,
|
|
981
983
|
*,
|
|
982
|
-
messages: typing.Sequence[
|
|
984
|
+
messages: typing.Sequence[LettaStreamingRequestMessagesItem],
|
|
983
985
|
max_steps: typing.Optional[int] = OMIT,
|
|
984
986
|
use_assistant_message: typing.Optional[bool] = OMIT,
|
|
985
987
|
assistant_message_tool_name: typing.Optional[str] = OMIT,
|
|
@@ -1000,7 +1002,7 @@ class AsyncRawMessagesClient:
|
|
|
1000
1002
|
----------
|
|
1001
1003
|
agent_id : str
|
|
1002
1004
|
|
|
1003
|
-
messages : typing.Sequence[
|
|
1005
|
+
messages : typing.Sequence[LettaStreamingRequestMessagesItem]
|
|
1004
1006
|
The messages to be sent to the agent.
|
|
1005
1007
|
|
|
1006
1008
|
max_steps : typing.Optional[int]
|
|
@@ -1022,7 +1024,7 @@ class AsyncRawMessagesClient:
|
|
|
1022
1024
|
If set to True, enables reasoning before responses or tool calls from the agent.
|
|
1023
1025
|
|
|
1024
1026
|
stream_tokens : typing.Optional[bool]
|
|
1025
|
-
Flag to determine if individual tokens should be streamed
|
|
1027
|
+
Flag to determine if individual tokens should be streamed, rather than streaming per step.
|
|
1026
1028
|
|
|
1027
1029
|
include_pings : typing.Optional[bool]
|
|
1028
1030
|
Whether to include periodic keepalive ping messages in the stream to prevent connection timeouts.
|
|
@@ -1043,7 +1045,7 @@ class AsyncRawMessagesClient:
|
|
|
1043
1045
|
method="POST",
|
|
1044
1046
|
json={
|
|
1045
1047
|
"messages": convert_and_respect_annotation_metadata(
|
|
1046
|
-
object_=messages, annotation=typing.Sequence[
|
|
1048
|
+
object_=messages, annotation=typing.Sequence[LettaStreamingRequestMessagesItem], direction="write"
|
|
1047
1049
|
),
|
|
1048
1050
|
"max_steps": max_steps,
|
|
1049
1051
|
"use_assistant_message": use_assistant_message,
|
|
@@ -1174,7 +1176,7 @@ class AsyncRawMessagesClient:
|
|
|
1174
1176
|
self,
|
|
1175
1177
|
agent_id: str,
|
|
1176
1178
|
*,
|
|
1177
|
-
messages: typing.Sequence[
|
|
1179
|
+
messages: typing.Sequence[LettaAsyncRequestMessagesItem],
|
|
1178
1180
|
max_steps: typing.Optional[int] = OMIT,
|
|
1179
1181
|
use_assistant_message: typing.Optional[bool] = OMIT,
|
|
1180
1182
|
assistant_message_tool_name: typing.Optional[str] = OMIT,
|
|
@@ -1195,7 +1197,7 @@ class AsyncRawMessagesClient:
|
|
|
1195
1197
|
----------
|
|
1196
1198
|
agent_id : str
|
|
1197
1199
|
|
|
1198
|
-
messages : typing.Sequence[
|
|
1200
|
+
messages : typing.Sequence[LettaAsyncRequestMessagesItem]
|
|
1199
1201
|
The messages to be sent to the agent.
|
|
1200
1202
|
|
|
1201
1203
|
max_steps : typing.Optional[int]
|
|
@@ -1232,7 +1234,7 @@ class AsyncRawMessagesClient:
|
|
|
1232
1234
|
method="POST",
|
|
1233
1235
|
json={
|
|
1234
1236
|
"messages": convert_and_respect_annotation_metadata(
|
|
1235
|
-
object_=messages, annotation=typing.Sequence[
|
|
1237
|
+
object_=messages, annotation=typing.Sequence[LettaAsyncRequestMessagesItem], direction="write"
|
|
1236
1238
|
),
|
|
1237
1239
|
"max_steps": max_steps,
|
|
1238
1240
|
"use_assistant_message": use_assistant_message,
|
|
@@ -2,12 +2,14 @@
|
|
|
2
2
|
|
|
3
3
|
# isort: skip_file
|
|
4
4
|
|
|
5
|
+
from .letta_async_request_messages_item import LettaAsyncRequestMessagesItem
|
|
5
6
|
from .letta_streaming_response import LettaStreamingResponse
|
|
6
7
|
from .messages_modify_request import MessagesModifyRequest
|
|
7
8
|
from .messages_modify_response import MessagesModifyResponse
|
|
8
9
|
from .messages_preview_raw_payload_request import MessagesPreviewRawPayloadRequest
|
|
9
10
|
|
|
10
11
|
__all__ = [
|
|
12
|
+
"LettaAsyncRequestMessagesItem",
|
|
11
13
|
"LettaStreamingResponse",
|
|
12
14
|
"MessagesModifyRequest",
|
|
13
15
|
"MessagesModifyResponse",
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
import typing
|
|
4
|
+
|
|
5
|
+
from ....types.approval_create import ApprovalCreate
|
|
6
|
+
from ....types.message_create import MessageCreate
|
|
7
|
+
|
|
8
|
+
LettaAsyncRequestMessagesItem = typing.Union[MessageCreate, ApprovalCreate]
|
|
@@ -2,6 +2,8 @@
|
|
|
2
2
|
|
|
3
3
|
import typing
|
|
4
4
|
|
|
5
|
+
from ....types.approval_request_message import ApprovalRequestMessage
|
|
6
|
+
from ....types.approval_response_message import ApprovalResponseMessage
|
|
5
7
|
from ....types.assistant_message import AssistantMessage
|
|
6
8
|
from ....types.hidden_reasoning_message import HiddenReasoningMessage
|
|
7
9
|
from ....types.reasoning_message import ReasoningMessage
|
|
@@ -18,4 +20,6 @@ MessagesModifyResponse = typing.Union[
|
|
|
18
20
|
ToolCallMessage,
|
|
19
21
|
ToolReturnMessage,
|
|
20
22
|
AssistantMessage,
|
|
23
|
+
ApprovalRequestMessage,
|
|
24
|
+
ApprovalResponseMessage,
|
|
21
25
|
]
|