letta-client 0.1.17__py3-none-any.whl → 0.1.19__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of letta-client might be problematic. Click here for more details.
- letta_client/__init__.py +34 -116
- letta_client/agents/__init__.py +17 -42
- letta_client/agents/archival_memory/client.py +19 -212
- letta_client/agents/client.py +1633 -336
- letta_client/agents/context/client.py +6 -4
- letta_client/agents/core_memory/client.py +97 -499
- letta_client/agents/memory_variables/__init__.py +2 -2
- letta_client/agents/memory_variables/client.py +13 -13
- letta_client/agents/memory_variables/types/__init__.py +2 -2
- letta_client/agents/memory_variables/types/{memory_variables_get_response.py → memory_variables_list_response.py} +1 -1
- letta_client/agents/messages/__init__.py +2 -22
- letta_client/agents/messages/client.py +20 -26
- letta_client/agents/messages/types/__init__.py +2 -21
- letta_client/agents/messages/types/letta_streaming_response.py +16 -139
- letta_client/agents/messages/types/messages_list_response.py +2 -2
- letta_client/agents/sources/client.py +266 -5
- letta_client/agents/tools/client.py +25 -27
- letta_client/agents/types/__init__.py +15 -25
- letta_client/agents/types/agents_search_request_search_item.py +10 -78
- letta_client/agents/types/{agents_search_request_search_item_order_by.py → agents_search_request_search_item_direction.py} +7 -6
- letta_client/agents/types/agents_search_request_search_item_direction_direction.py +5 -0
- letta_client/agents/types/agents_search_request_search_item_direction_value.py +5 -0
- letta_client/agents/types/{agents_search_request_search_item_name.py → agents_search_request_search_item_one.py} +5 -4
- letta_client/agents/types/agents_search_request_search_item_one_operator.py +5 -0
- letta_client/agents/types/{agents_search_request_search_item_tags.py → agents_search_request_search_item_two.py} +2 -1
- letta_client/agents/types/{agents_search_request_search_item_version.py → agents_search_request_search_item_zero.py} +3 -2
- letta_client/blocks/client.py +12 -260
- letta_client/client.py +3 -3
- letta_client/core/client_wrapper.py +1 -1
- letta_client/jobs/client.py +4 -4
- letta_client/providers/client.py +4 -4
- letta_client/runs/client.py +14 -12
- letta_client/sources/client.py +12 -288
- letta_client/tools/client.py +63 -69
- letta_client/types/__init__.py +21 -99
- letta_client/types/agent_state.py +3 -7
- letta_client/types/{assistant_message_output.py → assistant_message.py} +3 -2
- letta_client/types/block.py +2 -6
- letta_client/types/block_update.py +1 -5
- letta_client/types/{create_assistant_file_request.py → chat_completion_message_tool_call.py} +7 -7
- letta_client/types/context_window_overview.py +4 -6
- letta_client/types/create_block.py +1 -5
- letta_client/types/{function_call_output.py → function.py} +1 -1
- letta_client/types/{function_schema.py → function_definition.py} +2 -1
- letta_client/types/{function_call_input.py → function_tool.py} +5 -3
- letta_client/types/job.py +1 -5
- letta_client/types/letta_message_union.py +9 -121
- letta_client/types/letta_usage_statistics.py +1 -0
- letta_client/types/{letta_schemas_message_message.py → message.py} +4 -6
- letta_client/types/passage.py +1 -5
- letta_client/types/reasoning_message.py +2 -1
- letta_client/types/run.py +1 -5
- letta_client/types/source.py +2 -6
- letta_client/types/{system_message_output.py → system_message.py} +3 -2
- letta_client/types/{letta_schemas_tool_tool.py → tool.py} +1 -1
- letta_client/types/{letta_schemas_letta_message_tool_call.py → tool_call.py} +1 -1
- letta_client/types/tool_call_message.py +2 -1
- letta_client/types/tool_call_message_tool_call.py +2 -2
- letta_client/types/tool_return_message.py +2 -1
- letta_client/types/tool_type.py +2 -1
- letta_client/types/{user_message_output.py → user_message.py} +3 -2
- {letta_client-0.1.17.dist-info → letta_client-0.1.19.dist-info}/METADATA +1 -1
- {letta_client-0.1.17.dist-info → letta_client-0.1.19.dist-info}/RECORD +64 -95
- letta_client/agents/types/agents_search_request_search_item_name_operator.py +0 -5
- letta_client/agents/types/agents_search_request_search_item_order_by_direction.py +0 -5
- letta_client/agents/types/agents_search_request_search_item_order_by_value.py +0 -5
- letta_client/types/assistant_file.py +0 -33
- letta_client/types/assistant_message_input.py +0 -23
- letta_client/types/chat_completion_request.py +0 -49
- letta_client/types/chat_completion_request_function_call.py +0 -6
- letta_client/types/chat_completion_request_messages_item.py +0 -11
- letta_client/types/chat_completion_request_stop.py +0 -5
- letta_client/types/chat_completion_request_tool_choice.py +0 -8
- letta_client/types/chat_completion_response.py +0 -32
- letta_client/types/choice.py +0 -25
- letta_client/types/create_assistant_request.py +0 -57
- letta_client/types/delete_assistant_file_response.py +0 -28
- letta_client/types/delete_assistant_response.py +0 -28
- letta_client/types/letta_schemas_openai_chat_completion_request_tool.py +0 -21
- letta_client/types/letta_schemas_openai_chat_completion_request_tool_call.py +0 -24
- letta_client/types/letta_schemas_openai_chat_completion_request_tool_call_function.py +0 -20
- letta_client/types/letta_schemas_openai_chat_completion_response_message.py +0 -24
- letta_client/types/letta_schemas_openai_chat_completion_response_tool_call.py +0 -22
- letta_client/types/letta_schemas_openai_chat_completions_tool_call_function.py +0 -27
- letta_client/types/letta_schemas_openai_chat_completions_tool_call_input.py +0 -29
- letta_client/types/letta_schemas_openai_chat_completions_tool_call_output.py +0 -29
- letta_client/types/log_prob_token.py +0 -21
- letta_client/types/message_content_log_prob.py +0 -23
- letta_client/types/open_ai_assistant.py +0 -67
- letta_client/types/response_format.py +0 -19
- letta_client/types/system_message_input.py +0 -21
- letta_client/types/tool_call_function_output.py +0 -27
- letta_client/types/tool_function_choice.py +0 -21
- letta_client/types/tool_input.py +0 -21
- letta_client/types/tool_message.py +0 -21
- letta_client/types/user_message_input.py +0 -22
- letta_client/types/user_message_input_content.py +0 -5
- {letta_client-0.1.17.dist-info → letta_client-0.1.19.dist-info}/WHEEL +0 -0
letta_client/agents/client.py
CHANGED
|
@@ -5,11 +5,11 @@ from ..core.client_wrapper import SyncClientWrapper
|
|
|
5
5
|
from .context.client import ContextClient
|
|
6
6
|
from .tools.client import ToolsClient
|
|
7
7
|
from .sources.client import SourcesClient
|
|
8
|
-
from .core_memory.client import CoreMemoryClient
|
|
9
|
-
from .archival_memory.client import ArchivalMemoryClient
|
|
10
8
|
from .messages.client import MessagesClient
|
|
11
9
|
from .templates.client import TemplatesClient
|
|
12
10
|
from .memory_variables.client import MemoryVariablesClient
|
|
11
|
+
from .core_memory.client import CoreMemoryClient
|
|
12
|
+
from .archival_memory.client import ArchivalMemoryClient
|
|
13
13
|
from ..core.request_options import RequestOptions
|
|
14
14
|
from ..types.agent_state import AgentState
|
|
15
15
|
from ..core.unchecked_base_model import construct_type
|
|
@@ -26,17 +26,20 @@ from ..types.message_create import MessageCreate
|
|
|
26
26
|
from ..core.serialization import convert_and_respect_annotation_metadata
|
|
27
27
|
from ..core.jsonable_encoder import jsonable_encoder
|
|
28
28
|
from .types.update_agent_tool_rules_item import UpdateAgentToolRulesItem
|
|
29
|
+
from ..types.memory import Memory
|
|
30
|
+
from ..types.block import Block
|
|
31
|
+
from ..types.passage import Passage
|
|
29
32
|
from .types.agents_search_request_search_item import AgentsSearchRequestSearchItem
|
|
30
33
|
from .types.agents_search_request_combinator import AgentsSearchRequestCombinator
|
|
31
34
|
from ..core.client_wrapper import AsyncClientWrapper
|
|
32
35
|
from .context.client import AsyncContextClient
|
|
33
36
|
from .tools.client import AsyncToolsClient
|
|
34
37
|
from .sources.client import AsyncSourcesClient
|
|
35
|
-
from .core_memory.client import AsyncCoreMemoryClient
|
|
36
|
-
from .archival_memory.client import AsyncArchivalMemoryClient
|
|
37
38
|
from .messages.client import AsyncMessagesClient
|
|
38
39
|
from .templates.client import AsyncTemplatesClient
|
|
39
40
|
from .memory_variables.client import AsyncMemoryVariablesClient
|
|
41
|
+
from .core_memory.client import AsyncCoreMemoryClient
|
|
42
|
+
from .archival_memory.client import AsyncArchivalMemoryClient
|
|
40
43
|
|
|
41
44
|
# this is used as the default value for optional parameters
|
|
42
45
|
OMIT = typing.cast(typing.Any, ...)
|
|
@@ -48,11 +51,11 @@ class AgentsClient:
|
|
|
48
51
|
self.context = ContextClient(client_wrapper=self._client_wrapper)
|
|
49
52
|
self.tools = ToolsClient(client_wrapper=self._client_wrapper)
|
|
50
53
|
self.sources = SourcesClient(client_wrapper=self._client_wrapper)
|
|
51
|
-
self.core_memory = CoreMemoryClient(client_wrapper=self._client_wrapper)
|
|
52
|
-
self.archival_memory = ArchivalMemoryClient(client_wrapper=self._client_wrapper)
|
|
53
54
|
self.messages = MessagesClient(client_wrapper=self._client_wrapper)
|
|
54
55
|
self.templates = TemplatesClient(client_wrapper=self._client_wrapper)
|
|
55
56
|
self.memory_variables = MemoryVariablesClient(client_wrapper=self._client_wrapper)
|
|
57
|
+
self.core_memory = CoreMemoryClient(client_wrapper=self._client_wrapper)
|
|
58
|
+
self.archival_memory = ArchivalMemoryClient(client_wrapper=self._client_wrapper)
|
|
56
59
|
|
|
57
60
|
def list(
|
|
58
61
|
self,
|
|
@@ -304,7 +307,7 @@ class AgentsClient:
|
|
|
304
307
|
"include_base_tools": include_base_tools,
|
|
305
308
|
"include_multi_agent_tools": include_multi_agent_tools,
|
|
306
309
|
"description": description,
|
|
307
|
-
"
|
|
310
|
+
"metadata": metadata,
|
|
308
311
|
"model": model,
|
|
309
312
|
"embedding": embedding,
|
|
310
313
|
"context_window_limit": context_window_limit,
|
|
@@ -345,7 +348,7 @@ class AgentsClient:
|
|
|
345
348
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
346
349
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
347
350
|
|
|
348
|
-
def
|
|
351
|
+
def retrieve(self, agent_id: str, *, request_options: typing.Optional[RequestOptions] = None) -> AgentState:
|
|
349
352
|
"""
|
|
350
353
|
Get the state of the agent.
|
|
351
354
|
|
|
@@ -368,7 +371,7 @@ class AgentsClient:
|
|
|
368
371
|
client = Letta(
|
|
369
372
|
token="YOUR_TOKEN",
|
|
370
373
|
)
|
|
371
|
-
client.agents.
|
|
374
|
+
client.agents.retrieve(
|
|
372
375
|
agent_id="agent_id",
|
|
373
376
|
)
|
|
374
377
|
"""
|
|
@@ -459,7 +462,7 @@ class AgentsClient:
|
|
|
459
462
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
460
463
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
461
464
|
|
|
462
|
-
def
|
|
465
|
+
def modify(
|
|
463
466
|
self,
|
|
464
467
|
agent_id: str,
|
|
465
468
|
*,
|
|
@@ -539,7 +542,7 @@ class AgentsClient:
|
|
|
539
542
|
client = Letta(
|
|
540
543
|
token="YOUR_TOKEN",
|
|
541
544
|
)
|
|
542
|
-
client.agents.
|
|
545
|
+
client.agents.modify(
|
|
543
546
|
agent_id="agent_id",
|
|
544
547
|
)
|
|
545
548
|
"""
|
|
@@ -564,7 +567,7 @@ class AgentsClient:
|
|
|
564
567
|
),
|
|
565
568
|
"message_ids": message_ids,
|
|
566
569
|
"description": description,
|
|
567
|
-
"
|
|
570
|
+
"metadata": metadata,
|
|
568
571
|
"tool_exec_environment_variables": tool_exec_environment_variables,
|
|
569
572
|
},
|
|
570
573
|
headers={
|
|
@@ -597,29 +600,23 @@ class AgentsClient:
|
|
|
597
600
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
598
601
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
599
602
|
|
|
600
|
-
def
|
|
601
|
-
self,
|
|
602
|
-
|
|
603
|
-
*,
|
|
604
|
-
add_default_initial_messages: typing.Optional[bool] = None,
|
|
605
|
-
request_options: typing.Optional[RequestOptions] = None,
|
|
606
|
-
) -> AgentState:
|
|
603
|
+
def retrieve_agent_memory(
|
|
604
|
+
self, agent_id: str, *, request_options: typing.Optional[RequestOptions] = None
|
|
605
|
+
) -> Memory:
|
|
607
606
|
"""
|
|
608
|
-
|
|
607
|
+
Retrieve the memory state of a specific agent.
|
|
608
|
+
This endpoint fetches the current memory state of the agent identified by the user ID and agent ID.
|
|
609
609
|
|
|
610
610
|
Parameters
|
|
611
611
|
----------
|
|
612
612
|
agent_id : str
|
|
613
613
|
|
|
614
|
-
add_default_initial_messages : typing.Optional[bool]
|
|
615
|
-
If true, adds the default initial messages after resetting.
|
|
616
|
-
|
|
617
614
|
request_options : typing.Optional[RequestOptions]
|
|
618
615
|
Request-specific configuration.
|
|
619
616
|
|
|
620
617
|
Returns
|
|
621
618
|
-------
|
|
622
|
-
|
|
619
|
+
Memory
|
|
623
620
|
Successful Response
|
|
624
621
|
|
|
625
622
|
Examples
|
|
@@ -629,24 +626,21 @@ class AgentsClient:
|
|
|
629
626
|
client = Letta(
|
|
630
627
|
token="YOUR_TOKEN",
|
|
631
628
|
)
|
|
632
|
-
client.agents.
|
|
629
|
+
client.agents.retrieve_agent_memory(
|
|
633
630
|
agent_id="agent_id",
|
|
634
631
|
)
|
|
635
632
|
"""
|
|
636
633
|
_response = self._client_wrapper.httpx_client.request(
|
|
637
|
-
f"v1/agents/{jsonable_encoder(agent_id)}/
|
|
638
|
-
method="
|
|
639
|
-
params={
|
|
640
|
-
"add_default_initial_messages": add_default_initial_messages,
|
|
641
|
-
},
|
|
634
|
+
f"v1/agents/{jsonable_encoder(agent_id)}/core-memory",
|
|
635
|
+
method="GET",
|
|
642
636
|
request_options=request_options,
|
|
643
637
|
)
|
|
644
638
|
try:
|
|
645
639
|
if 200 <= _response.status_code < 300:
|
|
646
640
|
return typing.cast(
|
|
647
|
-
|
|
641
|
+
Memory,
|
|
648
642
|
construct_type(
|
|
649
|
-
type_=
|
|
643
|
+
type_=Memory, # type: ignore
|
|
650
644
|
object_=_response.json(),
|
|
651
645
|
),
|
|
652
646
|
)
|
|
@@ -665,39 +659,25 @@ class AgentsClient:
|
|
|
665
659
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
666
660
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
667
661
|
|
|
668
|
-
def
|
|
669
|
-
self,
|
|
670
|
-
|
|
671
|
-
search: typing.Optional[typing.Sequence[AgentsSearchRequestSearchItem]] = OMIT,
|
|
672
|
-
project_id: typing.Optional[str] = OMIT,
|
|
673
|
-
combinator: typing.Optional[AgentsSearchRequestCombinator] = OMIT,
|
|
674
|
-
limit: typing.Optional[float] = OMIT,
|
|
675
|
-
offset: typing.Optional[float] = OMIT,
|
|
676
|
-
request_options: typing.Optional[RequestOptions] = None,
|
|
677
|
-
) -> None:
|
|
662
|
+
def retrieve_core_memory_block(
|
|
663
|
+
self, agent_id: str, block_label: str, *, request_options: typing.Optional[RequestOptions] = None
|
|
664
|
+
) -> Block:
|
|
678
665
|
"""
|
|
679
|
-
|
|
680
|
-
|
|
681
|
-
Search deployed agents.
|
|
666
|
+
Retrieve a memory block from an agent.
|
|
682
667
|
|
|
683
668
|
Parameters
|
|
684
669
|
----------
|
|
685
|
-
|
|
686
|
-
|
|
687
|
-
project_id : typing.Optional[str]
|
|
688
|
-
|
|
689
|
-
combinator : typing.Optional[AgentsSearchRequestCombinator]
|
|
690
|
-
|
|
691
|
-
limit : typing.Optional[float]
|
|
670
|
+
agent_id : str
|
|
692
671
|
|
|
693
|
-
|
|
672
|
+
block_label : str
|
|
694
673
|
|
|
695
674
|
request_options : typing.Optional[RequestOptions]
|
|
696
675
|
Request-specific configuration.
|
|
697
676
|
|
|
698
677
|
Returns
|
|
699
678
|
-------
|
|
700
|
-
|
|
679
|
+
Block
|
|
680
|
+
Successful Response
|
|
701
681
|
|
|
702
682
|
Examples
|
|
703
683
|
--------
|
|
@@ -706,126 +686,183 @@ class AgentsClient:
|
|
|
706
686
|
client = Letta(
|
|
707
687
|
token="YOUR_TOKEN",
|
|
708
688
|
)
|
|
709
|
-
client.agents.
|
|
689
|
+
client.agents.retrieve_core_memory_block(
|
|
690
|
+
agent_id="agent_id",
|
|
691
|
+
block_label="block_label",
|
|
692
|
+
)
|
|
710
693
|
"""
|
|
711
694
|
_response = self._client_wrapper.httpx_client.request(
|
|
712
|
-
"v1/agents/
|
|
713
|
-
method="
|
|
714
|
-
json={
|
|
715
|
-
"search": convert_and_respect_annotation_metadata(
|
|
716
|
-
object_=search, annotation=typing.Sequence[AgentsSearchRequestSearchItem], direction="write"
|
|
717
|
-
),
|
|
718
|
-
"project_id": project_id,
|
|
719
|
-
"combinator": combinator,
|
|
720
|
-
"limit": limit,
|
|
721
|
-
"offset": offset,
|
|
722
|
-
},
|
|
723
|
-
headers={
|
|
724
|
-
"content-type": "application/json",
|
|
725
|
-
},
|
|
695
|
+
f"v1/agents/{jsonable_encoder(agent_id)}/core-memory/blocks/{jsonable_encoder(block_label)}",
|
|
696
|
+
method="GET",
|
|
726
697
|
request_options=request_options,
|
|
727
|
-
omit=OMIT,
|
|
728
698
|
)
|
|
729
699
|
try:
|
|
730
700
|
if 200 <= _response.status_code < 300:
|
|
731
|
-
return
|
|
701
|
+
return typing.cast(
|
|
702
|
+
Block,
|
|
703
|
+
construct_type(
|
|
704
|
+
type_=Block, # type: ignore
|
|
705
|
+
object_=_response.json(),
|
|
706
|
+
),
|
|
707
|
+
)
|
|
708
|
+
if _response.status_code == 422:
|
|
709
|
+
raise UnprocessableEntityError(
|
|
710
|
+
typing.cast(
|
|
711
|
+
HttpValidationError,
|
|
712
|
+
construct_type(
|
|
713
|
+
type_=HttpValidationError, # type: ignore
|
|
714
|
+
object_=_response.json(),
|
|
715
|
+
),
|
|
716
|
+
)
|
|
717
|
+
)
|
|
732
718
|
_response_json = _response.json()
|
|
733
719
|
except JSONDecodeError:
|
|
734
720
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
735
721
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
736
722
|
|
|
737
|
-
|
|
738
|
-
class AsyncAgentsClient:
|
|
739
|
-
def __init__(self, *, client_wrapper: AsyncClientWrapper):
|
|
740
|
-
self._client_wrapper = client_wrapper
|
|
741
|
-
self.context = AsyncContextClient(client_wrapper=self._client_wrapper)
|
|
742
|
-
self.tools = AsyncToolsClient(client_wrapper=self._client_wrapper)
|
|
743
|
-
self.sources = AsyncSourcesClient(client_wrapper=self._client_wrapper)
|
|
744
|
-
self.core_memory = AsyncCoreMemoryClient(client_wrapper=self._client_wrapper)
|
|
745
|
-
self.archival_memory = AsyncArchivalMemoryClient(client_wrapper=self._client_wrapper)
|
|
746
|
-
self.messages = AsyncMessagesClient(client_wrapper=self._client_wrapper)
|
|
747
|
-
self.templates = AsyncTemplatesClient(client_wrapper=self._client_wrapper)
|
|
748
|
-
self.memory_variables = AsyncMemoryVariablesClient(client_wrapper=self._client_wrapper)
|
|
749
|
-
|
|
750
|
-
async def list(
|
|
723
|
+
def modify_core_memory_block(
|
|
751
724
|
self,
|
|
725
|
+
agent_id: str,
|
|
726
|
+
block_label: str,
|
|
752
727
|
*,
|
|
753
|
-
|
|
754
|
-
|
|
755
|
-
|
|
756
|
-
|
|
757
|
-
|
|
758
|
-
|
|
728
|
+
value: typing.Optional[str] = OMIT,
|
|
729
|
+
limit: typing.Optional[int] = OMIT,
|
|
730
|
+
name: typing.Optional[str] = OMIT,
|
|
731
|
+
is_template: typing.Optional[bool] = OMIT,
|
|
732
|
+
label: typing.Optional[str] = OMIT,
|
|
733
|
+
description: typing.Optional[str] = OMIT,
|
|
734
|
+
metadata: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT,
|
|
759
735
|
request_options: typing.Optional[RequestOptions] = None,
|
|
760
|
-
) ->
|
|
736
|
+
) -> Block:
|
|
761
737
|
"""
|
|
762
|
-
|
|
763
|
-
This endpoint retrieves a list of all agents and their configurations associated with the specified user ID.
|
|
738
|
+
Updates a memory block of an agent.
|
|
764
739
|
|
|
765
740
|
Parameters
|
|
766
741
|
----------
|
|
767
|
-
|
|
768
|
-
Name of the agent
|
|
769
|
-
|
|
770
|
-
tags : typing.Optional[typing.Union[str, typing.Sequence[str]]]
|
|
771
|
-
List of tags to filter agents by
|
|
742
|
+
agent_id : str
|
|
772
743
|
|
|
773
|
-
|
|
774
|
-
If True, only returns agents that match ALL given tags. Otherwise, return agents that have ANY of the passed in tags.
|
|
744
|
+
block_label : str
|
|
775
745
|
|
|
776
|
-
|
|
777
|
-
|
|
746
|
+
value : typing.Optional[str]
|
|
747
|
+
Value of the block.
|
|
778
748
|
|
|
779
749
|
limit : typing.Optional[int]
|
|
780
|
-
|
|
750
|
+
Character limit of the block.
|
|
781
751
|
|
|
782
|
-
|
|
783
|
-
|
|
752
|
+
name : typing.Optional[str]
|
|
753
|
+
Name of the block if it is a template.
|
|
754
|
+
|
|
755
|
+
is_template : typing.Optional[bool]
|
|
756
|
+
Whether the block is a template (e.g. saved human/persona options).
|
|
757
|
+
|
|
758
|
+
label : typing.Optional[str]
|
|
759
|
+
Label of the block (e.g. 'human', 'persona') in the context window.
|
|
760
|
+
|
|
761
|
+
description : typing.Optional[str]
|
|
762
|
+
Description of the block.
|
|
763
|
+
|
|
764
|
+
metadata : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]]
|
|
765
|
+
Metadata of the block.
|
|
784
766
|
|
|
785
767
|
request_options : typing.Optional[RequestOptions]
|
|
786
768
|
Request-specific configuration.
|
|
787
769
|
|
|
788
770
|
Returns
|
|
789
771
|
-------
|
|
790
|
-
|
|
772
|
+
Block
|
|
791
773
|
Successful Response
|
|
792
774
|
|
|
793
775
|
Examples
|
|
794
776
|
--------
|
|
795
|
-
import
|
|
796
|
-
|
|
797
|
-
from letta_client import AsyncLetta
|
|
777
|
+
from letta_client import Letta
|
|
798
778
|
|
|
799
|
-
client =
|
|
779
|
+
client = Letta(
|
|
800
780
|
token="YOUR_TOKEN",
|
|
801
781
|
)
|
|
782
|
+
client.agents.modify_core_memory_block(
|
|
783
|
+
agent_id="agent_id",
|
|
784
|
+
block_label="block_label",
|
|
785
|
+
)
|
|
786
|
+
"""
|
|
787
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
788
|
+
f"v1/agents/{jsonable_encoder(agent_id)}/core-memory/blocks/{jsonable_encoder(block_label)}",
|
|
789
|
+
method="PATCH",
|
|
790
|
+
json={
|
|
791
|
+
"value": value,
|
|
792
|
+
"limit": limit,
|
|
793
|
+
"name": name,
|
|
794
|
+
"is_template": is_template,
|
|
795
|
+
"label": label,
|
|
796
|
+
"description": description,
|
|
797
|
+
"metadata": metadata,
|
|
798
|
+
},
|
|
799
|
+
request_options=request_options,
|
|
800
|
+
omit=OMIT,
|
|
801
|
+
)
|
|
802
|
+
try:
|
|
803
|
+
if 200 <= _response.status_code < 300:
|
|
804
|
+
return typing.cast(
|
|
805
|
+
Block,
|
|
806
|
+
construct_type(
|
|
807
|
+
type_=Block, # type: ignore
|
|
808
|
+
object_=_response.json(),
|
|
809
|
+
),
|
|
810
|
+
)
|
|
811
|
+
if _response.status_code == 422:
|
|
812
|
+
raise UnprocessableEntityError(
|
|
813
|
+
typing.cast(
|
|
814
|
+
HttpValidationError,
|
|
815
|
+
construct_type(
|
|
816
|
+
type_=HttpValidationError, # type: ignore
|
|
817
|
+
object_=_response.json(),
|
|
818
|
+
),
|
|
819
|
+
)
|
|
820
|
+
)
|
|
821
|
+
_response_json = _response.json()
|
|
822
|
+
except JSONDecodeError:
|
|
823
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
824
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
802
825
|
|
|
826
|
+
def list_core_memory_blocks(
|
|
827
|
+
self, agent_id: str, *, request_options: typing.Optional[RequestOptions] = None
|
|
828
|
+
) -> typing.List[Block]:
|
|
829
|
+
"""
|
|
830
|
+
Retrieve the memory blocks of a specific agent.
|
|
803
831
|
|
|
804
|
-
|
|
805
|
-
|
|
832
|
+
Parameters
|
|
833
|
+
----------
|
|
834
|
+
agent_id : str
|
|
806
835
|
|
|
836
|
+
request_options : typing.Optional[RequestOptions]
|
|
837
|
+
Request-specific configuration.
|
|
807
838
|
|
|
808
|
-
|
|
839
|
+
Returns
|
|
840
|
+
-------
|
|
841
|
+
typing.List[Block]
|
|
842
|
+
Successful Response
|
|
843
|
+
|
|
844
|
+
Examples
|
|
845
|
+
--------
|
|
846
|
+
from letta_client import Letta
|
|
847
|
+
|
|
848
|
+
client = Letta(
|
|
849
|
+
token="YOUR_TOKEN",
|
|
850
|
+
)
|
|
851
|
+
client.agents.list_core_memory_blocks(
|
|
852
|
+
agent_id="agent_id",
|
|
853
|
+
)
|
|
809
854
|
"""
|
|
810
|
-
_response =
|
|
811
|
-
"v1/agents/",
|
|
855
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
856
|
+
f"v1/agents/{jsonable_encoder(agent_id)}/core-memory/blocks",
|
|
812
857
|
method="GET",
|
|
813
|
-
params={
|
|
814
|
-
"name": name,
|
|
815
|
-
"tags": tags,
|
|
816
|
-
"match_all_tags": match_all_tags,
|
|
817
|
-
"cursor": cursor,
|
|
818
|
-
"limit": limit,
|
|
819
|
-
"query_text": query_text,
|
|
820
|
-
},
|
|
821
858
|
request_options=request_options,
|
|
822
859
|
)
|
|
823
860
|
try:
|
|
824
861
|
if 200 <= _response.status_code < 300:
|
|
825
862
|
return typing.cast(
|
|
826
|
-
typing.List[
|
|
863
|
+
typing.List[Block],
|
|
827
864
|
construct_type(
|
|
828
|
-
type_=typing.List[
|
|
865
|
+
type_=typing.List[Block], # type: ignore
|
|
829
866
|
object_=_response.json(),
|
|
830
867
|
),
|
|
831
868
|
)
|
|
@@ -844,126 +881,1332 @@ class AsyncAgentsClient:
|
|
|
844
881
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
845
882
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
846
883
|
|
|
847
|
-
|
|
848
|
-
self,
|
|
849
|
-
*,
|
|
850
|
-
name: typing.Optional[str] = OMIT,
|
|
851
|
-
memory_blocks: typing.Optional[typing.Sequence[CreateBlock]] = OMIT,
|
|
852
|
-
tools: typing.Optional[typing.Sequence[str]] = OMIT,
|
|
853
|
-
tool_ids: typing.Optional[typing.Sequence[str]] = OMIT,
|
|
854
|
-
source_ids: typing.Optional[typing.Sequence[str]] = OMIT,
|
|
855
|
-
block_ids: typing.Optional[typing.Sequence[str]] = OMIT,
|
|
856
|
-
tool_rules: typing.Optional[typing.Sequence[CreateAgentRequestToolRulesItem]] = OMIT,
|
|
857
|
-
tags: typing.Optional[typing.Sequence[str]] = OMIT,
|
|
858
|
-
system: typing.Optional[str] = OMIT,
|
|
859
|
-
agent_type: typing.Optional[AgentType] = OMIT,
|
|
860
|
-
llm_config: typing.Optional[LlmConfig] = OMIT,
|
|
861
|
-
embedding_config: typing.Optional[EmbeddingConfig] = OMIT,
|
|
862
|
-
initial_message_sequence: typing.Optional[typing.Sequence[MessageCreate]] = OMIT,
|
|
863
|
-
include_base_tools: typing.Optional[bool] = OMIT,
|
|
864
|
-
include_multi_agent_tools: typing.Optional[bool] = OMIT,
|
|
865
|
-
description: typing.Optional[str] = OMIT,
|
|
866
|
-
metadata: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT,
|
|
867
|
-
model: typing.Optional[str] = OMIT,
|
|
868
|
-
embedding: typing.Optional[str] = OMIT,
|
|
869
|
-
context_window_limit: typing.Optional[int] = OMIT,
|
|
870
|
-
embedding_chunk_size: typing.Optional[int] = OMIT,
|
|
871
|
-
from_template: typing.Optional[str] = OMIT,
|
|
872
|
-
template: typing.Optional[bool] = OMIT,
|
|
873
|
-
project: typing.Optional[str] = OMIT,
|
|
874
|
-
tool_exec_environment_variables: typing.Optional[typing.Dict[str, typing.Optional[str]]] = OMIT,
|
|
875
|
-
memory_variables: typing.Optional[typing.Dict[str, typing.Optional[str]]] = OMIT,
|
|
876
|
-
request_options: typing.Optional[RequestOptions] = None,
|
|
884
|
+
def attach_core_memory_block(
|
|
885
|
+
self, agent_id: str, block_id: str, *, request_options: typing.Optional[RequestOptions] = None
|
|
877
886
|
) -> AgentState:
|
|
878
887
|
"""
|
|
879
|
-
|
|
888
|
+
Attach a block to an agent.
|
|
880
889
|
|
|
881
890
|
Parameters
|
|
882
891
|
----------
|
|
883
|
-
|
|
884
|
-
The name of the agent.
|
|
885
|
-
|
|
886
|
-
memory_blocks : typing.Optional[typing.Sequence[CreateBlock]]
|
|
887
|
-
The blocks to create in the agent's in-context memory.
|
|
892
|
+
agent_id : str
|
|
888
893
|
|
|
889
|
-
|
|
890
|
-
|
|
894
|
+
block_id : str
|
|
895
|
+
|
|
896
|
+
request_options : typing.Optional[RequestOptions]
|
|
897
|
+
Request-specific configuration.
|
|
898
|
+
|
|
899
|
+
Returns
|
|
900
|
+
-------
|
|
901
|
+
AgentState
|
|
902
|
+
Successful Response
|
|
903
|
+
|
|
904
|
+
Examples
|
|
905
|
+
--------
|
|
906
|
+
from letta_client import Letta
|
|
907
|
+
|
|
908
|
+
client = Letta(
|
|
909
|
+
token="YOUR_TOKEN",
|
|
910
|
+
)
|
|
911
|
+
client.agents.attach_core_memory_block(
|
|
912
|
+
agent_id="agent_id",
|
|
913
|
+
block_id="block_id",
|
|
914
|
+
)
|
|
915
|
+
"""
|
|
916
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
917
|
+
f"v1/agents/{jsonable_encoder(agent_id)}/core-memory/blocks/attach/{jsonable_encoder(block_id)}",
|
|
918
|
+
method="PATCH",
|
|
919
|
+
request_options=request_options,
|
|
920
|
+
)
|
|
921
|
+
try:
|
|
922
|
+
if 200 <= _response.status_code < 300:
|
|
923
|
+
return typing.cast(
|
|
924
|
+
AgentState,
|
|
925
|
+
construct_type(
|
|
926
|
+
type_=AgentState, # type: ignore
|
|
927
|
+
object_=_response.json(),
|
|
928
|
+
),
|
|
929
|
+
)
|
|
930
|
+
if _response.status_code == 422:
|
|
931
|
+
raise UnprocessableEntityError(
|
|
932
|
+
typing.cast(
|
|
933
|
+
HttpValidationError,
|
|
934
|
+
construct_type(
|
|
935
|
+
type_=HttpValidationError, # type: ignore
|
|
936
|
+
object_=_response.json(),
|
|
937
|
+
),
|
|
938
|
+
)
|
|
939
|
+
)
|
|
940
|
+
_response_json = _response.json()
|
|
941
|
+
except JSONDecodeError:
|
|
942
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
943
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
944
|
+
|
|
945
|
+
def detach_core_memory_block(
|
|
946
|
+
self, agent_id: str, block_id: str, *, request_options: typing.Optional[RequestOptions] = None
|
|
947
|
+
) -> AgentState:
|
|
948
|
+
"""
|
|
949
|
+
Detach a block from an agent.
|
|
950
|
+
|
|
951
|
+
Parameters
|
|
952
|
+
----------
|
|
953
|
+
agent_id : str
|
|
954
|
+
|
|
955
|
+
block_id : str
|
|
956
|
+
|
|
957
|
+
request_options : typing.Optional[RequestOptions]
|
|
958
|
+
Request-specific configuration.
|
|
959
|
+
|
|
960
|
+
Returns
|
|
961
|
+
-------
|
|
962
|
+
AgentState
|
|
963
|
+
Successful Response
|
|
964
|
+
|
|
965
|
+
Examples
|
|
966
|
+
--------
|
|
967
|
+
from letta_client import Letta
|
|
968
|
+
|
|
969
|
+
client = Letta(
|
|
970
|
+
token="YOUR_TOKEN",
|
|
971
|
+
)
|
|
972
|
+
client.agents.detach_core_memory_block(
|
|
973
|
+
agent_id="agent_id",
|
|
974
|
+
block_id="block_id",
|
|
975
|
+
)
|
|
976
|
+
"""
|
|
977
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
978
|
+
f"v1/agents/{jsonable_encoder(agent_id)}/core-memory/blocks/detach/{jsonable_encoder(block_id)}",
|
|
979
|
+
method="PATCH",
|
|
980
|
+
request_options=request_options,
|
|
981
|
+
)
|
|
982
|
+
try:
|
|
983
|
+
if 200 <= _response.status_code < 300:
|
|
984
|
+
return typing.cast(
|
|
985
|
+
AgentState,
|
|
986
|
+
construct_type(
|
|
987
|
+
type_=AgentState, # type: ignore
|
|
988
|
+
object_=_response.json(),
|
|
989
|
+
),
|
|
990
|
+
)
|
|
991
|
+
if _response.status_code == 422:
|
|
992
|
+
raise UnprocessableEntityError(
|
|
993
|
+
typing.cast(
|
|
994
|
+
HttpValidationError,
|
|
995
|
+
construct_type(
|
|
996
|
+
type_=HttpValidationError, # type: ignore
|
|
997
|
+
object_=_response.json(),
|
|
998
|
+
),
|
|
999
|
+
)
|
|
1000
|
+
)
|
|
1001
|
+
_response_json = _response.json()
|
|
1002
|
+
except JSONDecodeError:
|
|
1003
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
1004
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
1005
|
+
|
|
1006
|
+
def list_archival_memory(
|
|
1007
|
+
self,
|
|
1008
|
+
agent_id: str,
|
|
1009
|
+
*,
|
|
1010
|
+
after: typing.Optional[int] = None,
|
|
1011
|
+
before: typing.Optional[int] = None,
|
|
1012
|
+
limit: typing.Optional[int] = None,
|
|
1013
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
1014
|
+
) -> typing.List[Passage]:
|
|
1015
|
+
"""
|
|
1016
|
+
Retrieve the memories in an agent's archival memory store (paginated query).
|
|
1017
|
+
|
|
1018
|
+
Parameters
|
|
1019
|
+
----------
|
|
1020
|
+
agent_id : str
|
|
1021
|
+
|
|
1022
|
+
after : typing.Optional[int]
|
|
1023
|
+
Unique ID of the memory to start the query range at.
|
|
1024
|
+
|
|
1025
|
+
before : typing.Optional[int]
|
|
1026
|
+
Unique ID of the memory to end the query range at.
|
|
1027
|
+
|
|
1028
|
+
limit : typing.Optional[int]
|
|
1029
|
+
How many results to include in the response.
|
|
1030
|
+
|
|
1031
|
+
request_options : typing.Optional[RequestOptions]
|
|
1032
|
+
Request-specific configuration.
|
|
1033
|
+
|
|
1034
|
+
Returns
|
|
1035
|
+
-------
|
|
1036
|
+
typing.List[Passage]
|
|
1037
|
+
Successful Response
|
|
1038
|
+
|
|
1039
|
+
Examples
|
|
1040
|
+
--------
|
|
1041
|
+
from letta_client import Letta
|
|
1042
|
+
|
|
1043
|
+
client = Letta(
|
|
1044
|
+
token="YOUR_TOKEN",
|
|
1045
|
+
)
|
|
1046
|
+
client.agents.list_archival_memory(
|
|
1047
|
+
agent_id="agent_id",
|
|
1048
|
+
)
|
|
1049
|
+
"""
|
|
1050
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
1051
|
+
f"v1/agents/{jsonable_encoder(agent_id)}/archival-memory",
|
|
1052
|
+
method="GET",
|
|
1053
|
+
params={
|
|
1054
|
+
"after": after,
|
|
1055
|
+
"before": before,
|
|
1056
|
+
"limit": limit,
|
|
1057
|
+
},
|
|
1058
|
+
request_options=request_options,
|
|
1059
|
+
)
|
|
1060
|
+
try:
|
|
1061
|
+
if 200 <= _response.status_code < 300:
|
|
1062
|
+
return typing.cast(
|
|
1063
|
+
typing.List[Passage],
|
|
1064
|
+
construct_type(
|
|
1065
|
+
type_=typing.List[Passage], # type: ignore
|
|
1066
|
+
object_=_response.json(),
|
|
1067
|
+
),
|
|
1068
|
+
)
|
|
1069
|
+
if _response.status_code == 422:
|
|
1070
|
+
raise UnprocessableEntityError(
|
|
1071
|
+
typing.cast(
|
|
1072
|
+
HttpValidationError,
|
|
1073
|
+
construct_type(
|
|
1074
|
+
type_=HttpValidationError, # type: ignore
|
|
1075
|
+
object_=_response.json(),
|
|
1076
|
+
),
|
|
1077
|
+
)
|
|
1078
|
+
)
|
|
1079
|
+
_response_json = _response.json()
|
|
1080
|
+
except JSONDecodeError:
|
|
1081
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
1082
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
1083
|
+
|
|
1084
|
+
def create_archival_memory(
|
|
1085
|
+
self, agent_id: str, *, text: str, request_options: typing.Optional[RequestOptions] = None
|
|
1086
|
+
) -> typing.List[Passage]:
|
|
1087
|
+
"""
|
|
1088
|
+
Insert a memory into an agent's archival memory store.
|
|
1089
|
+
|
|
1090
|
+
Parameters
|
|
1091
|
+
----------
|
|
1092
|
+
agent_id : str
|
|
1093
|
+
|
|
1094
|
+
text : str
|
|
1095
|
+
Text to write to archival memory.
|
|
1096
|
+
|
|
1097
|
+
request_options : typing.Optional[RequestOptions]
|
|
1098
|
+
Request-specific configuration.
|
|
1099
|
+
|
|
1100
|
+
Returns
|
|
1101
|
+
-------
|
|
1102
|
+
typing.List[Passage]
|
|
1103
|
+
Successful Response
|
|
1104
|
+
|
|
1105
|
+
Examples
|
|
1106
|
+
--------
|
|
1107
|
+
from letta_client import Letta
|
|
1108
|
+
|
|
1109
|
+
client = Letta(
|
|
1110
|
+
token="YOUR_TOKEN",
|
|
1111
|
+
)
|
|
1112
|
+
client.agents.create_archival_memory(
|
|
1113
|
+
agent_id="agent_id",
|
|
1114
|
+
text="text",
|
|
1115
|
+
)
|
|
1116
|
+
"""
|
|
1117
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
1118
|
+
f"v1/agents/{jsonable_encoder(agent_id)}/archival-memory",
|
|
1119
|
+
method="POST",
|
|
1120
|
+
json={
|
|
1121
|
+
"text": text,
|
|
1122
|
+
},
|
|
1123
|
+
headers={
|
|
1124
|
+
"content-type": "application/json",
|
|
1125
|
+
},
|
|
1126
|
+
request_options=request_options,
|
|
1127
|
+
omit=OMIT,
|
|
1128
|
+
)
|
|
1129
|
+
try:
|
|
1130
|
+
if 200 <= _response.status_code < 300:
|
|
1131
|
+
return typing.cast(
|
|
1132
|
+
typing.List[Passage],
|
|
1133
|
+
construct_type(
|
|
1134
|
+
type_=typing.List[Passage], # type: ignore
|
|
1135
|
+
object_=_response.json(),
|
|
1136
|
+
),
|
|
1137
|
+
)
|
|
1138
|
+
if _response.status_code == 422:
|
|
1139
|
+
raise UnprocessableEntityError(
|
|
1140
|
+
typing.cast(
|
|
1141
|
+
HttpValidationError,
|
|
1142
|
+
construct_type(
|
|
1143
|
+
type_=HttpValidationError, # type: ignore
|
|
1144
|
+
object_=_response.json(),
|
|
1145
|
+
),
|
|
1146
|
+
)
|
|
1147
|
+
)
|
|
1148
|
+
_response_json = _response.json()
|
|
1149
|
+
except JSONDecodeError:
|
|
1150
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
1151
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
1152
|
+
|
|
1153
|
+
def delete_archival_memory(
|
|
1154
|
+
self, agent_id: str, memory_id: str, *, request_options: typing.Optional[RequestOptions] = None
|
|
1155
|
+
) -> typing.Optional[typing.Any]:
|
|
1156
|
+
"""
|
|
1157
|
+
Delete a memory from an agent's archival memory store.
|
|
1158
|
+
|
|
1159
|
+
Parameters
|
|
1160
|
+
----------
|
|
1161
|
+
agent_id : str
|
|
1162
|
+
|
|
1163
|
+
memory_id : str
|
|
1164
|
+
|
|
1165
|
+
request_options : typing.Optional[RequestOptions]
|
|
1166
|
+
Request-specific configuration.
|
|
1167
|
+
|
|
1168
|
+
Returns
|
|
1169
|
+
-------
|
|
1170
|
+
typing.Optional[typing.Any]
|
|
1171
|
+
Successful Response
|
|
1172
|
+
|
|
1173
|
+
Examples
|
|
1174
|
+
--------
|
|
1175
|
+
from letta_client import Letta
|
|
1176
|
+
|
|
1177
|
+
client = Letta(
|
|
1178
|
+
token="YOUR_TOKEN",
|
|
1179
|
+
)
|
|
1180
|
+
client.agents.delete_archival_memory(
|
|
1181
|
+
agent_id="agent_id",
|
|
1182
|
+
memory_id="memory_id",
|
|
1183
|
+
)
|
|
1184
|
+
"""
|
|
1185
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
1186
|
+
f"v1/agents/{jsonable_encoder(agent_id)}/archival-memory/{jsonable_encoder(memory_id)}",
|
|
1187
|
+
method="DELETE",
|
|
1188
|
+
request_options=request_options,
|
|
1189
|
+
)
|
|
1190
|
+
try:
|
|
1191
|
+
if 200 <= _response.status_code < 300:
|
|
1192
|
+
return typing.cast(
|
|
1193
|
+
typing.Optional[typing.Any],
|
|
1194
|
+
construct_type(
|
|
1195
|
+
type_=typing.Optional[typing.Any], # type: ignore
|
|
1196
|
+
object_=_response.json(),
|
|
1197
|
+
),
|
|
1198
|
+
)
|
|
1199
|
+
if _response.status_code == 422:
|
|
1200
|
+
raise UnprocessableEntityError(
|
|
1201
|
+
typing.cast(
|
|
1202
|
+
HttpValidationError,
|
|
1203
|
+
construct_type(
|
|
1204
|
+
type_=HttpValidationError, # type: ignore
|
|
1205
|
+
object_=_response.json(),
|
|
1206
|
+
),
|
|
1207
|
+
)
|
|
1208
|
+
)
|
|
1209
|
+
_response_json = _response.json()
|
|
1210
|
+
except JSONDecodeError:
|
|
1211
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
1212
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
1213
|
+
|
|
1214
|
+
def reset_messages(
|
|
1215
|
+
self,
|
|
1216
|
+
agent_id: str,
|
|
1217
|
+
*,
|
|
1218
|
+
add_default_initial_messages: typing.Optional[bool] = None,
|
|
1219
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
1220
|
+
) -> AgentState:
|
|
1221
|
+
"""
|
|
1222
|
+
Resets the messages for an agent
|
|
1223
|
+
|
|
1224
|
+
Parameters
|
|
1225
|
+
----------
|
|
1226
|
+
agent_id : str
|
|
1227
|
+
|
|
1228
|
+
add_default_initial_messages : typing.Optional[bool]
|
|
1229
|
+
If true, adds the default initial messages after resetting.
|
|
1230
|
+
|
|
1231
|
+
request_options : typing.Optional[RequestOptions]
|
|
1232
|
+
Request-specific configuration.
|
|
1233
|
+
|
|
1234
|
+
Returns
|
|
1235
|
+
-------
|
|
1236
|
+
AgentState
|
|
1237
|
+
Successful Response
|
|
1238
|
+
|
|
1239
|
+
Examples
|
|
1240
|
+
--------
|
|
1241
|
+
from letta_client import Letta
|
|
1242
|
+
|
|
1243
|
+
client = Letta(
|
|
1244
|
+
token="YOUR_TOKEN",
|
|
1245
|
+
)
|
|
1246
|
+
client.agents.reset_messages(
|
|
1247
|
+
agent_id="agent_id",
|
|
1248
|
+
)
|
|
1249
|
+
"""
|
|
1250
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
1251
|
+
f"v1/agents/{jsonable_encoder(agent_id)}/reset-messages",
|
|
1252
|
+
method="PATCH",
|
|
1253
|
+
params={
|
|
1254
|
+
"add_default_initial_messages": add_default_initial_messages,
|
|
1255
|
+
},
|
|
1256
|
+
request_options=request_options,
|
|
1257
|
+
)
|
|
1258
|
+
try:
|
|
1259
|
+
if 200 <= _response.status_code < 300:
|
|
1260
|
+
return typing.cast(
|
|
1261
|
+
AgentState,
|
|
1262
|
+
construct_type(
|
|
1263
|
+
type_=AgentState, # type: ignore
|
|
1264
|
+
object_=_response.json(),
|
|
1265
|
+
),
|
|
1266
|
+
)
|
|
1267
|
+
if _response.status_code == 422:
|
|
1268
|
+
raise UnprocessableEntityError(
|
|
1269
|
+
typing.cast(
|
|
1270
|
+
HttpValidationError,
|
|
1271
|
+
construct_type(
|
|
1272
|
+
type_=HttpValidationError, # type: ignore
|
|
1273
|
+
object_=_response.json(),
|
|
1274
|
+
),
|
|
1275
|
+
)
|
|
1276
|
+
)
|
|
1277
|
+
_response_json = _response.json()
|
|
1278
|
+
except JSONDecodeError:
|
|
1279
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
1280
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
1281
|
+
|
|
1282
|
+
def search(
|
|
1283
|
+
self,
|
|
1284
|
+
*,
|
|
1285
|
+
search: typing.Optional[typing.Sequence[AgentsSearchRequestSearchItem]] = OMIT,
|
|
1286
|
+
project_id: typing.Optional[str] = OMIT,
|
|
1287
|
+
combinator: typing.Optional[AgentsSearchRequestCombinator] = OMIT,
|
|
1288
|
+
limit: typing.Optional[float] = OMIT,
|
|
1289
|
+
offset: typing.Optional[float] = OMIT,
|
|
1290
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
1291
|
+
) -> None:
|
|
1292
|
+
"""
|
|
1293
|
+
<Note>This endpoint is only available on Letta Cloud.</Note>
|
|
1294
|
+
|
|
1295
|
+
Search deployed agents.
|
|
1296
|
+
|
|
1297
|
+
Parameters
|
|
1298
|
+
----------
|
|
1299
|
+
search : typing.Optional[typing.Sequence[AgentsSearchRequestSearchItem]]
|
|
1300
|
+
|
|
1301
|
+
project_id : typing.Optional[str]
|
|
1302
|
+
|
|
1303
|
+
combinator : typing.Optional[AgentsSearchRequestCombinator]
|
|
1304
|
+
|
|
1305
|
+
limit : typing.Optional[float]
|
|
1306
|
+
|
|
1307
|
+
offset : typing.Optional[float]
|
|
1308
|
+
|
|
1309
|
+
request_options : typing.Optional[RequestOptions]
|
|
1310
|
+
Request-specific configuration.
|
|
1311
|
+
|
|
1312
|
+
Returns
|
|
1313
|
+
-------
|
|
1314
|
+
None
|
|
1315
|
+
|
|
1316
|
+
Examples
|
|
1317
|
+
--------
|
|
1318
|
+
from letta_client import Letta
|
|
1319
|
+
|
|
1320
|
+
client = Letta(
|
|
1321
|
+
token="YOUR_TOKEN",
|
|
1322
|
+
)
|
|
1323
|
+
client.agents.search()
|
|
1324
|
+
"""
|
|
1325
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
1326
|
+
"v1/agents/search",
|
|
1327
|
+
method="POST",
|
|
1328
|
+
json={
|
|
1329
|
+
"search": convert_and_respect_annotation_metadata(
|
|
1330
|
+
object_=search, annotation=typing.Sequence[AgentsSearchRequestSearchItem], direction="write"
|
|
1331
|
+
),
|
|
1332
|
+
"project_id": project_id,
|
|
1333
|
+
"combinator": combinator,
|
|
1334
|
+
"limit": limit,
|
|
1335
|
+
"offset": offset,
|
|
1336
|
+
},
|
|
1337
|
+
headers={
|
|
1338
|
+
"content-type": "application/json",
|
|
1339
|
+
},
|
|
1340
|
+
request_options=request_options,
|
|
1341
|
+
omit=OMIT,
|
|
1342
|
+
)
|
|
1343
|
+
try:
|
|
1344
|
+
if 200 <= _response.status_code < 300:
|
|
1345
|
+
return
|
|
1346
|
+
_response_json = _response.json()
|
|
1347
|
+
except JSONDecodeError:
|
|
1348
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
1349
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
1350
|
+
|
|
1351
|
+
|
|
1352
|
+
class AsyncAgentsClient:
|
|
1353
|
+
def __init__(self, *, client_wrapper: AsyncClientWrapper):
|
|
1354
|
+
self._client_wrapper = client_wrapper
|
|
1355
|
+
self.context = AsyncContextClient(client_wrapper=self._client_wrapper)
|
|
1356
|
+
self.tools = AsyncToolsClient(client_wrapper=self._client_wrapper)
|
|
1357
|
+
self.sources = AsyncSourcesClient(client_wrapper=self._client_wrapper)
|
|
1358
|
+
self.messages = AsyncMessagesClient(client_wrapper=self._client_wrapper)
|
|
1359
|
+
self.templates = AsyncTemplatesClient(client_wrapper=self._client_wrapper)
|
|
1360
|
+
self.memory_variables = AsyncMemoryVariablesClient(client_wrapper=self._client_wrapper)
|
|
1361
|
+
self.core_memory = AsyncCoreMemoryClient(client_wrapper=self._client_wrapper)
|
|
1362
|
+
self.archival_memory = AsyncArchivalMemoryClient(client_wrapper=self._client_wrapper)
|
|
1363
|
+
|
|
1364
|
+
async def list(
|
|
1365
|
+
self,
|
|
1366
|
+
*,
|
|
1367
|
+
name: typing.Optional[str] = None,
|
|
1368
|
+
tags: typing.Optional[typing.Union[str, typing.Sequence[str]]] = None,
|
|
1369
|
+
match_all_tags: typing.Optional[bool] = None,
|
|
1370
|
+
cursor: typing.Optional[str] = None,
|
|
1371
|
+
limit: typing.Optional[int] = None,
|
|
1372
|
+
query_text: typing.Optional[str] = None,
|
|
1373
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
1374
|
+
) -> typing.List[AgentState]:
|
|
1375
|
+
"""
|
|
1376
|
+
List all agents associated with a given user.
|
|
1377
|
+
This endpoint retrieves a list of all agents and their configurations associated with the specified user ID.
|
|
1378
|
+
|
|
1379
|
+
Parameters
|
|
1380
|
+
----------
|
|
1381
|
+
name : typing.Optional[str]
|
|
1382
|
+
Name of the agent
|
|
1383
|
+
|
|
1384
|
+
tags : typing.Optional[typing.Union[str, typing.Sequence[str]]]
|
|
1385
|
+
List of tags to filter agents by
|
|
1386
|
+
|
|
1387
|
+
match_all_tags : typing.Optional[bool]
|
|
1388
|
+
If True, only returns agents that match ALL given tags. Otherwise, return agents that have ANY of the passed in tags.
|
|
1389
|
+
|
|
1390
|
+
cursor : typing.Optional[str]
|
|
1391
|
+
Cursor for pagination
|
|
1392
|
+
|
|
1393
|
+
limit : typing.Optional[int]
|
|
1394
|
+
Limit for pagination
|
|
1395
|
+
|
|
1396
|
+
query_text : typing.Optional[str]
|
|
1397
|
+
Search agents by name
|
|
1398
|
+
|
|
1399
|
+
request_options : typing.Optional[RequestOptions]
|
|
1400
|
+
Request-specific configuration.
|
|
1401
|
+
|
|
1402
|
+
Returns
|
|
1403
|
+
-------
|
|
1404
|
+
typing.List[AgentState]
|
|
1405
|
+
Successful Response
|
|
1406
|
+
|
|
1407
|
+
Examples
|
|
1408
|
+
--------
|
|
1409
|
+
import asyncio
|
|
1410
|
+
|
|
1411
|
+
from letta_client import AsyncLetta
|
|
1412
|
+
|
|
1413
|
+
client = AsyncLetta(
|
|
1414
|
+
token="YOUR_TOKEN",
|
|
1415
|
+
)
|
|
1416
|
+
|
|
1417
|
+
|
|
1418
|
+
async def main() -> None:
|
|
1419
|
+
await client.agents.list()
|
|
1420
|
+
|
|
1421
|
+
|
|
1422
|
+
asyncio.run(main())
|
|
1423
|
+
"""
|
|
1424
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
1425
|
+
"v1/agents/",
|
|
1426
|
+
method="GET",
|
|
1427
|
+
params={
|
|
1428
|
+
"name": name,
|
|
1429
|
+
"tags": tags,
|
|
1430
|
+
"match_all_tags": match_all_tags,
|
|
1431
|
+
"cursor": cursor,
|
|
1432
|
+
"limit": limit,
|
|
1433
|
+
"query_text": query_text,
|
|
1434
|
+
},
|
|
1435
|
+
request_options=request_options,
|
|
1436
|
+
)
|
|
1437
|
+
try:
|
|
1438
|
+
if 200 <= _response.status_code < 300:
|
|
1439
|
+
return typing.cast(
|
|
1440
|
+
typing.List[AgentState],
|
|
1441
|
+
construct_type(
|
|
1442
|
+
type_=typing.List[AgentState], # type: ignore
|
|
1443
|
+
object_=_response.json(),
|
|
1444
|
+
),
|
|
1445
|
+
)
|
|
1446
|
+
if _response.status_code == 422:
|
|
1447
|
+
raise UnprocessableEntityError(
|
|
1448
|
+
typing.cast(
|
|
1449
|
+
HttpValidationError,
|
|
1450
|
+
construct_type(
|
|
1451
|
+
type_=HttpValidationError, # type: ignore
|
|
1452
|
+
object_=_response.json(),
|
|
1453
|
+
),
|
|
1454
|
+
)
|
|
1455
|
+
)
|
|
1456
|
+
_response_json = _response.json()
|
|
1457
|
+
except JSONDecodeError:
|
|
1458
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
1459
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
1460
|
+
|
|
1461
|
+
async def create(
|
|
1462
|
+
self,
|
|
1463
|
+
*,
|
|
1464
|
+
name: typing.Optional[str] = OMIT,
|
|
1465
|
+
memory_blocks: typing.Optional[typing.Sequence[CreateBlock]] = OMIT,
|
|
1466
|
+
tools: typing.Optional[typing.Sequence[str]] = OMIT,
|
|
1467
|
+
tool_ids: typing.Optional[typing.Sequence[str]] = OMIT,
|
|
1468
|
+
source_ids: typing.Optional[typing.Sequence[str]] = OMIT,
|
|
1469
|
+
block_ids: typing.Optional[typing.Sequence[str]] = OMIT,
|
|
1470
|
+
tool_rules: typing.Optional[typing.Sequence[CreateAgentRequestToolRulesItem]] = OMIT,
|
|
1471
|
+
tags: typing.Optional[typing.Sequence[str]] = OMIT,
|
|
1472
|
+
system: typing.Optional[str] = OMIT,
|
|
1473
|
+
agent_type: typing.Optional[AgentType] = OMIT,
|
|
1474
|
+
llm_config: typing.Optional[LlmConfig] = OMIT,
|
|
1475
|
+
embedding_config: typing.Optional[EmbeddingConfig] = OMIT,
|
|
1476
|
+
initial_message_sequence: typing.Optional[typing.Sequence[MessageCreate]] = OMIT,
|
|
1477
|
+
include_base_tools: typing.Optional[bool] = OMIT,
|
|
1478
|
+
include_multi_agent_tools: typing.Optional[bool] = OMIT,
|
|
1479
|
+
description: typing.Optional[str] = OMIT,
|
|
1480
|
+
metadata: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT,
|
|
1481
|
+
model: typing.Optional[str] = OMIT,
|
|
1482
|
+
embedding: typing.Optional[str] = OMIT,
|
|
1483
|
+
context_window_limit: typing.Optional[int] = OMIT,
|
|
1484
|
+
embedding_chunk_size: typing.Optional[int] = OMIT,
|
|
1485
|
+
from_template: typing.Optional[str] = OMIT,
|
|
1486
|
+
template: typing.Optional[bool] = OMIT,
|
|
1487
|
+
project: typing.Optional[str] = OMIT,
|
|
1488
|
+
tool_exec_environment_variables: typing.Optional[typing.Dict[str, typing.Optional[str]]] = OMIT,
|
|
1489
|
+
memory_variables: typing.Optional[typing.Dict[str, typing.Optional[str]]] = OMIT,
|
|
1490
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
1491
|
+
) -> AgentState:
|
|
1492
|
+
"""
|
|
1493
|
+
Create a new agent with the specified configuration.
|
|
1494
|
+
|
|
1495
|
+
Parameters
|
|
1496
|
+
----------
|
|
1497
|
+
name : typing.Optional[str]
|
|
1498
|
+
The name of the agent.
|
|
1499
|
+
|
|
1500
|
+
memory_blocks : typing.Optional[typing.Sequence[CreateBlock]]
|
|
1501
|
+
The blocks to create in the agent's in-context memory.
|
|
1502
|
+
|
|
1503
|
+
tools : typing.Optional[typing.Sequence[str]]
|
|
1504
|
+
The tools used by the agent.
|
|
1505
|
+
|
|
1506
|
+
tool_ids : typing.Optional[typing.Sequence[str]]
|
|
1507
|
+
The ids of the tools used by the agent.
|
|
1508
|
+
|
|
1509
|
+
source_ids : typing.Optional[typing.Sequence[str]]
|
|
1510
|
+
The ids of the sources used by the agent.
|
|
1511
|
+
|
|
1512
|
+
block_ids : typing.Optional[typing.Sequence[str]]
|
|
1513
|
+
The ids of the blocks used by the agent.
|
|
1514
|
+
|
|
1515
|
+
tool_rules : typing.Optional[typing.Sequence[CreateAgentRequestToolRulesItem]]
|
|
1516
|
+
The tool rules governing the agent.
|
|
1517
|
+
|
|
1518
|
+
tags : typing.Optional[typing.Sequence[str]]
|
|
1519
|
+
The tags associated with the agent.
|
|
1520
|
+
|
|
1521
|
+
system : typing.Optional[str]
|
|
1522
|
+
The system prompt used by the agent.
|
|
1523
|
+
|
|
1524
|
+
agent_type : typing.Optional[AgentType]
|
|
1525
|
+
The type of agent.
|
|
1526
|
+
|
|
1527
|
+
llm_config : typing.Optional[LlmConfig]
|
|
1528
|
+
The LLM configuration used by the agent.
|
|
1529
|
+
|
|
1530
|
+
embedding_config : typing.Optional[EmbeddingConfig]
|
|
1531
|
+
The embedding configuration used by the agent.
|
|
1532
|
+
|
|
1533
|
+
initial_message_sequence : typing.Optional[typing.Sequence[MessageCreate]]
|
|
1534
|
+
The initial set of messages to put in the agent's in-context memory.
|
|
1535
|
+
|
|
1536
|
+
include_base_tools : typing.Optional[bool]
|
|
1537
|
+
If true, attaches the Letta core tools (e.g. archival_memory and core_memory related functions).
|
|
1538
|
+
|
|
1539
|
+
include_multi_agent_tools : typing.Optional[bool]
|
|
1540
|
+
If true, attaches the Letta multi-agent tools (e.g. sending a message to another agent).
|
|
1541
|
+
|
|
1542
|
+
description : typing.Optional[str]
|
|
1543
|
+
The description of the agent.
|
|
1544
|
+
|
|
1545
|
+
metadata : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]]
|
|
1546
|
+
The metadata of the agent.
|
|
1547
|
+
|
|
1548
|
+
model : typing.Optional[str]
|
|
1549
|
+
The LLM configuration handle used by the agent, specified in the format provider/model-name, as an alternative to specifying llm_config.
|
|
1550
|
+
|
|
1551
|
+
embedding : typing.Optional[str]
|
|
1552
|
+
The embedding configuration handle used by the agent, specified in the format provider/model-name.
|
|
1553
|
+
|
|
1554
|
+
context_window_limit : typing.Optional[int]
|
|
1555
|
+
The context window limit used by the agent.
|
|
1556
|
+
|
|
1557
|
+
embedding_chunk_size : typing.Optional[int]
|
|
1558
|
+
The embedding chunk size used by the agent.
|
|
1559
|
+
|
|
1560
|
+
from_template : typing.Optional[str]
|
|
1561
|
+
The template id used to configure the agent
|
|
1562
|
+
|
|
1563
|
+
template : typing.Optional[bool]
|
|
1564
|
+
Whether the agent is a template
|
|
1565
|
+
|
|
1566
|
+
project : typing.Optional[str]
|
|
1567
|
+
The project slug that the agent will be associated with.
|
|
1568
|
+
|
|
1569
|
+
tool_exec_environment_variables : typing.Optional[typing.Dict[str, typing.Optional[str]]]
|
|
1570
|
+
The environment variables for tool execution specific to this agent.
|
|
1571
|
+
|
|
1572
|
+
memory_variables : typing.Optional[typing.Dict[str, typing.Optional[str]]]
|
|
1573
|
+
The variables that should be set for the agent.
|
|
1574
|
+
|
|
1575
|
+
request_options : typing.Optional[RequestOptions]
|
|
1576
|
+
Request-specific configuration.
|
|
1577
|
+
|
|
1578
|
+
Returns
|
|
1579
|
+
-------
|
|
1580
|
+
AgentState
|
|
1581
|
+
Successful Response
|
|
1582
|
+
|
|
1583
|
+
Examples
|
|
1584
|
+
--------
|
|
1585
|
+
import asyncio
|
|
1586
|
+
|
|
1587
|
+
from letta_client import AsyncLetta
|
|
1588
|
+
|
|
1589
|
+
client = AsyncLetta(
|
|
1590
|
+
token="YOUR_TOKEN",
|
|
1591
|
+
)
|
|
1592
|
+
|
|
1593
|
+
|
|
1594
|
+
async def main() -> None:
|
|
1595
|
+
await client.agents.create()
|
|
1596
|
+
|
|
1597
|
+
|
|
1598
|
+
asyncio.run(main())
|
|
1599
|
+
"""
|
|
1600
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
1601
|
+
"v1/agents/",
|
|
1602
|
+
method="POST",
|
|
1603
|
+
json={
|
|
1604
|
+
"name": name,
|
|
1605
|
+
"memory_blocks": convert_and_respect_annotation_metadata(
|
|
1606
|
+
object_=memory_blocks, annotation=typing.Sequence[CreateBlock], direction="write"
|
|
1607
|
+
),
|
|
1608
|
+
"tools": tools,
|
|
1609
|
+
"tool_ids": tool_ids,
|
|
1610
|
+
"source_ids": source_ids,
|
|
1611
|
+
"block_ids": block_ids,
|
|
1612
|
+
"tool_rules": convert_and_respect_annotation_metadata(
|
|
1613
|
+
object_=tool_rules, annotation=typing.Sequence[CreateAgentRequestToolRulesItem], direction="write"
|
|
1614
|
+
),
|
|
1615
|
+
"tags": tags,
|
|
1616
|
+
"system": system,
|
|
1617
|
+
"agent_type": agent_type,
|
|
1618
|
+
"llm_config": convert_and_respect_annotation_metadata(
|
|
1619
|
+
object_=llm_config, annotation=LlmConfig, direction="write"
|
|
1620
|
+
),
|
|
1621
|
+
"embedding_config": convert_and_respect_annotation_metadata(
|
|
1622
|
+
object_=embedding_config, annotation=EmbeddingConfig, direction="write"
|
|
1623
|
+
),
|
|
1624
|
+
"initial_message_sequence": convert_and_respect_annotation_metadata(
|
|
1625
|
+
object_=initial_message_sequence, annotation=typing.Sequence[MessageCreate], direction="write"
|
|
1626
|
+
),
|
|
1627
|
+
"include_base_tools": include_base_tools,
|
|
1628
|
+
"include_multi_agent_tools": include_multi_agent_tools,
|
|
1629
|
+
"description": description,
|
|
1630
|
+
"metadata": metadata,
|
|
1631
|
+
"model": model,
|
|
1632
|
+
"embedding": embedding,
|
|
1633
|
+
"context_window_limit": context_window_limit,
|
|
1634
|
+
"embedding_chunk_size": embedding_chunk_size,
|
|
1635
|
+
"from_template": from_template,
|
|
1636
|
+
"template": template,
|
|
1637
|
+
"project": project,
|
|
1638
|
+
"tool_exec_environment_variables": tool_exec_environment_variables,
|
|
1639
|
+
"memory_variables": memory_variables,
|
|
1640
|
+
},
|
|
1641
|
+
headers={
|
|
1642
|
+
"content-type": "application/json",
|
|
1643
|
+
},
|
|
1644
|
+
request_options=request_options,
|
|
1645
|
+
omit=OMIT,
|
|
1646
|
+
)
|
|
1647
|
+
try:
|
|
1648
|
+
if 200 <= _response.status_code < 300:
|
|
1649
|
+
return typing.cast(
|
|
1650
|
+
AgentState,
|
|
1651
|
+
construct_type(
|
|
1652
|
+
type_=AgentState, # type: ignore
|
|
1653
|
+
object_=_response.json(),
|
|
1654
|
+
),
|
|
1655
|
+
)
|
|
1656
|
+
if _response.status_code == 422:
|
|
1657
|
+
raise UnprocessableEntityError(
|
|
1658
|
+
typing.cast(
|
|
1659
|
+
HttpValidationError,
|
|
1660
|
+
construct_type(
|
|
1661
|
+
type_=HttpValidationError, # type: ignore
|
|
1662
|
+
object_=_response.json(),
|
|
1663
|
+
),
|
|
1664
|
+
)
|
|
1665
|
+
)
|
|
1666
|
+
_response_json = _response.json()
|
|
1667
|
+
except JSONDecodeError:
|
|
1668
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
1669
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
1670
|
+
|
|
1671
|
+
async def retrieve(self, agent_id: str, *, request_options: typing.Optional[RequestOptions] = None) -> AgentState:
|
|
1672
|
+
"""
|
|
1673
|
+
Get the state of the agent.
|
|
1674
|
+
|
|
1675
|
+
Parameters
|
|
1676
|
+
----------
|
|
1677
|
+
agent_id : str
|
|
1678
|
+
|
|
1679
|
+
request_options : typing.Optional[RequestOptions]
|
|
1680
|
+
Request-specific configuration.
|
|
1681
|
+
|
|
1682
|
+
Returns
|
|
1683
|
+
-------
|
|
1684
|
+
AgentState
|
|
1685
|
+
Successful Response
|
|
1686
|
+
|
|
1687
|
+
Examples
|
|
1688
|
+
--------
|
|
1689
|
+
import asyncio
|
|
1690
|
+
|
|
1691
|
+
from letta_client import AsyncLetta
|
|
1692
|
+
|
|
1693
|
+
client = AsyncLetta(
|
|
1694
|
+
token="YOUR_TOKEN",
|
|
1695
|
+
)
|
|
1696
|
+
|
|
1697
|
+
|
|
1698
|
+
async def main() -> None:
|
|
1699
|
+
await client.agents.retrieve(
|
|
1700
|
+
agent_id="agent_id",
|
|
1701
|
+
)
|
|
1702
|
+
|
|
1703
|
+
|
|
1704
|
+
asyncio.run(main())
|
|
1705
|
+
"""
|
|
1706
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
1707
|
+
f"v1/agents/{jsonable_encoder(agent_id)}",
|
|
1708
|
+
method="GET",
|
|
1709
|
+
request_options=request_options,
|
|
1710
|
+
)
|
|
1711
|
+
try:
|
|
1712
|
+
if 200 <= _response.status_code < 300:
|
|
1713
|
+
return typing.cast(
|
|
1714
|
+
AgentState,
|
|
1715
|
+
construct_type(
|
|
1716
|
+
type_=AgentState, # type: ignore
|
|
1717
|
+
object_=_response.json(),
|
|
1718
|
+
),
|
|
1719
|
+
)
|
|
1720
|
+
if _response.status_code == 422:
|
|
1721
|
+
raise UnprocessableEntityError(
|
|
1722
|
+
typing.cast(
|
|
1723
|
+
HttpValidationError,
|
|
1724
|
+
construct_type(
|
|
1725
|
+
type_=HttpValidationError, # type: ignore
|
|
1726
|
+
object_=_response.json(),
|
|
1727
|
+
),
|
|
1728
|
+
)
|
|
1729
|
+
)
|
|
1730
|
+
_response_json = _response.json()
|
|
1731
|
+
except JSONDecodeError:
|
|
1732
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
1733
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
1734
|
+
|
|
1735
|
+
async def delete(
|
|
1736
|
+
self, agent_id: str, *, request_options: typing.Optional[RequestOptions] = None
|
|
1737
|
+
) -> typing.Optional[typing.Any]:
|
|
1738
|
+
"""
|
|
1739
|
+
Delete an agent.
|
|
1740
|
+
|
|
1741
|
+
Parameters
|
|
1742
|
+
----------
|
|
1743
|
+
agent_id : str
|
|
1744
|
+
|
|
1745
|
+
request_options : typing.Optional[RequestOptions]
|
|
1746
|
+
Request-specific configuration.
|
|
1747
|
+
|
|
1748
|
+
Returns
|
|
1749
|
+
-------
|
|
1750
|
+
typing.Optional[typing.Any]
|
|
1751
|
+
Successful Response
|
|
1752
|
+
|
|
1753
|
+
Examples
|
|
1754
|
+
--------
|
|
1755
|
+
import asyncio
|
|
1756
|
+
|
|
1757
|
+
from letta_client import AsyncLetta
|
|
1758
|
+
|
|
1759
|
+
client = AsyncLetta(
|
|
1760
|
+
token="YOUR_TOKEN",
|
|
1761
|
+
)
|
|
1762
|
+
|
|
1763
|
+
|
|
1764
|
+
async def main() -> None:
|
|
1765
|
+
await client.agents.delete(
|
|
1766
|
+
agent_id="agent_id",
|
|
1767
|
+
)
|
|
1768
|
+
|
|
1769
|
+
|
|
1770
|
+
asyncio.run(main())
|
|
1771
|
+
"""
|
|
1772
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
1773
|
+
f"v1/agents/{jsonable_encoder(agent_id)}",
|
|
1774
|
+
method="DELETE",
|
|
1775
|
+
request_options=request_options,
|
|
1776
|
+
)
|
|
1777
|
+
try:
|
|
1778
|
+
if 200 <= _response.status_code < 300:
|
|
1779
|
+
return typing.cast(
|
|
1780
|
+
typing.Optional[typing.Any],
|
|
1781
|
+
construct_type(
|
|
1782
|
+
type_=typing.Optional[typing.Any], # type: ignore
|
|
1783
|
+
object_=_response.json(),
|
|
1784
|
+
),
|
|
1785
|
+
)
|
|
1786
|
+
if _response.status_code == 422:
|
|
1787
|
+
raise UnprocessableEntityError(
|
|
1788
|
+
typing.cast(
|
|
1789
|
+
HttpValidationError,
|
|
1790
|
+
construct_type(
|
|
1791
|
+
type_=HttpValidationError, # type: ignore
|
|
1792
|
+
object_=_response.json(),
|
|
1793
|
+
),
|
|
1794
|
+
)
|
|
1795
|
+
)
|
|
1796
|
+
_response_json = _response.json()
|
|
1797
|
+
except JSONDecodeError:
|
|
1798
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
1799
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
1800
|
+
|
|
1801
|
+
async def modify(
|
|
1802
|
+
self,
|
|
1803
|
+
agent_id: str,
|
|
1804
|
+
*,
|
|
1805
|
+
name: typing.Optional[str] = OMIT,
|
|
1806
|
+
tool_ids: typing.Optional[typing.Sequence[str]] = OMIT,
|
|
1807
|
+
source_ids: typing.Optional[typing.Sequence[str]] = OMIT,
|
|
1808
|
+
block_ids: typing.Optional[typing.Sequence[str]] = OMIT,
|
|
1809
|
+
tags: typing.Optional[typing.Sequence[str]] = OMIT,
|
|
1810
|
+
system: typing.Optional[str] = OMIT,
|
|
1811
|
+
tool_rules: typing.Optional[typing.Sequence[UpdateAgentToolRulesItem]] = OMIT,
|
|
1812
|
+
llm_config: typing.Optional[LlmConfig] = OMIT,
|
|
1813
|
+
embedding_config: typing.Optional[EmbeddingConfig] = OMIT,
|
|
1814
|
+
message_ids: typing.Optional[typing.Sequence[str]] = OMIT,
|
|
1815
|
+
description: typing.Optional[str] = OMIT,
|
|
1816
|
+
metadata: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT,
|
|
1817
|
+
tool_exec_environment_variables: typing.Optional[typing.Dict[str, typing.Optional[str]]] = OMIT,
|
|
1818
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
1819
|
+
) -> AgentState:
|
|
1820
|
+
"""
|
|
1821
|
+
Update an existing agent
|
|
1822
|
+
|
|
1823
|
+
Parameters
|
|
1824
|
+
----------
|
|
1825
|
+
agent_id : str
|
|
1826
|
+
|
|
1827
|
+
name : typing.Optional[str]
|
|
1828
|
+
The name of the agent.
|
|
891
1829
|
|
|
892
1830
|
tool_ids : typing.Optional[typing.Sequence[str]]
|
|
893
1831
|
The ids of the tools used by the agent.
|
|
894
1832
|
|
|
895
|
-
source_ids : typing.Optional[typing.Sequence[str]]
|
|
896
|
-
The ids of the sources used by the agent.
|
|
1833
|
+
source_ids : typing.Optional[typing.Sequence[str]]
|
|
1834
|
+
The ids of the sources used by the agent.
|
|
1835
|
+
|
|
1836
|
+
block_ids : typing.Optional[typing.Sequence[str]]
|
|
1837
|
+
The ids of the blocks used by the agent.
|
|
1838
|
+
|
|
1839
|
+
tags : typing.Optional[typing.Sequence[str]]
|
|
1840
|
+
The tags associated with the agent.
|
|
1841
|
+
|
|
1842
|
+
system : typing.Optional[str]
|
|
1843
|
+
The system prompt used by the agent.
|
|
1844
|
+
|
|
1845
|
+
tool_rules : typing.Optional[typing.Sequence[UpdateAgentToolRulesItem]]
|
|
1846
|
+
The tool rules governing the agent.
|
|
1847
|
+
|
|
1848
|
+
llm_config : typing.Optional[LlmConfig]
|
|
1849
|
+
The LLM configuration used by the agent.
|
|
1850
|
+
|
|
1851
|
+
embedding_config : typing.Optional[EmbeddingConfig]
|
|
1852
|
+
The embedding configuration used by the agent.
|
|
1853
|
+
|
|
1854
|
+
message_ids : typing.Optional[typing.Sequence[str]]
|
|
1855
|
+
The ids of the messages in the agent's in-context memory.
|
|
1856
|
+
|
|
1857
|
+
description : typing.Optional[str]
|
|
1858
|
+
The description of the agent.
|
|
1859
|
+
|
|
1860
|
+
metadata : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]]
|
|
1861
|
+
The metadata of the agent.
|
|
1862
|
+
|
|
1863
|
+
tool_exec_environment_variables : typing.Optional[typing.Dict[str, typing.Optional[str]]]
|
|
1864
|
+
The environment variables for tool execution specific to this agent.
|
|
1865
|
+
|
|
1866
|
+
request_options : typing.Optional[RequestOptions]
|
|
1867
|
+
Request-specific configuration.
|
|
1868
|
+
|
|
1869
|
+
Returns
|
|
1870
|
+
-------
|
|
1871
|
+
AgentState
|
|
1872
|
+
Successful Response
|
|
1873
|
+
|
|
1874
|
+
Examples
|
|
1875
|
+
--------
|
|
1876
|
+
import asyncio
|
|
1877
|
+
|
|
1878
|
+
from letta_client import AsyncLetta
|
|
1879
|
+
|
|
1880
|
+
client = AsyncLetta(
|
|
1881
|
+
token="YOUR_TOKEN",
|
|
1882
|
+
)
|
|
1883
|
+
|
|
1884
|
+
|
|
1885
|
+
async def main() -> None:
|
|
1886
|
+
await client.agents.modify(
|
|
1887
|
+
agent_id="agent_id",
|
|
1888
|
+
)
|
|
1889
|
+
|
|
1890
|
+
|
|
1891
|
+
asyncio.run(main())
|
|
1892
|
+
"""
|
|
1893
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
1894
|
+
f"v1/agents/{jsonable_encoder(agent_id)}",
|
|
1895
|
+
method="PATCH",
|
|
1896
|
+
json={
|
|
1897
|
+
"name": name,
|
|
1898
|
+
"tool_ids": tool_ids,
|
|
1899
|
+
"source_ids": source_ids,
|
|
1900
|
+
"block_ids": block_ids,
|
|
1901
|
+
"tags": tags,
|
|
1902
|
+
"system": system,
|
|
1903
|
+
"tool_rules": convert_and_respect_annotation_metadata(
|
|
1904
|
+
object_=tool_rules, annotation=typing.Sequence[UpdateAgentToolRulesItem], direction="write"
|
|
1905
|
+
),
|
|
1906
|
+
"llm_config": convert_and_respect_annotation_metadata(
|
|
1907
|
+
object_=llm_config, annotation=LlmConfig, direction="write"
|
|
1908
|
+
),
|
|
1909
|
+
"embedding_config": convert_and_respect_annotation_metadata(
|
|
1910
|
+
object_=embedding_config, annotation=EmbeddingConfig, direction="write"
|
|
1911
|
+
),
|
|
1912
|
+
"message_ids": message_ids,
|
|
1913
|
+
"description": description,
|
|
1914
|
+
"metadata": metadata,
|
|
1915
|
+
"tool_exec_environment_variables": tool_exec_environment_variables,
|
|
1916
|
+
},
|
|
1917
|
+
headers={
|
|
1918
|
+
"content-type": "application/json",
|
|
1919
|
+
},
|
|
1920
|
+
request_options=request_options,
|
|
1921
|
+
omit=OMIT,
|
|
1922
|
+
)
|
|
1923
|
+
try:
|
|
1924
|
+
if 200 <= _response.status_code < 300:
|
|
1925
|
+
return typing.cast(
|
|
1926
|
+
AgentState,
|
|
1927
|
+
construct_type(
|
|
1928
|
+
type_=AgentState, # type: ignore
|
|
1929
|
+
object_=_response.json(),
|
|
1930
|
+
),
|
|
1931
|
+
)
|
|
1932
|
+
if _response.status_code == 422:
|
|
1933
|
+
raise UnprocessableEntityError(
|
|
1934
|
+
typing.cast(
|
|
1935
|
+
HttpValidationError,
|
|
1936
|
+
construct_type(
|
|
1937
|
+
type_=HttpValidationError, # type: ignore
|
|
1938
|
+
object_=_response.json(),
|
|
1939
|
+
),
|
|
1940
|
+
)
|
|
1941
|
+
)
|
|
1942
|
+
_response_json = _response.json()
|
|
1943
|
+
except JSONDecodeError:
|
|
1944
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
1945
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
1946
|
+
|
|
1947
|
+
async def retrieve_agent_memory(
|
|
1948
|
+
self, agent_id: str, *, request_options: typing.Optional[RequestOptions] = None
|
|
1949
|
+
) -> Memory:
|
|
1950
|
+
"""
|
|
1951
|
+
Retrieve the memory state of a specific agent.
|
|
1952
|
+
This endpoint fetches the current memory state of the agent identified by the user ID and agent ID.
|
|
897
1953
|
|
|
898
|
-
|
|
899
|
-
|
|
1954
|
+
Parameters
|
|
1955
|
+
----------
|
|
1956
|
+
agent_id : str
|
|
900
1957
|
|
|
901
|
-
|
|
902
|
-
|
|
1958
|
+
request_options : typing.Optional[RequestOptions]
|
|
1959
|
+
Request-specific configuration.
|
|
903
1960
|
|
|
904
|
-
|
|
905
|
-
|
|
1961
|
+
Returns
|
|
1962
|
+
-------
|
|
1963
|
+
Memory
|
|
1964
|
+
Successful Response
|
|
906
1965
|
|
|
907
|
-
|
|
908
|
-
|
|
1966
|
+
Examples
|
|
1967
|
+
--------
|
|
1968
|
+
import asyncio
|
|
909
1969
|
|
|
910
|
-
|
|
911
|
-
The type of agent.
|
|
1970
|
+
from letta_client import AsyncLetta
|
|
912
1971
|
|
|
913
|
-
|
|
914
|
-
|
|
1972
|
+
client = AsyncLetta(
|
|
1973
|
+
token="YOUR_TOKEN",
|
|
1974
|
+
)
|
|
915
1975
|
|
|
916
|
-
embedding_config : typing.Optional[EmbeddingConfig]
|
|
917
|
-
The embedding configuration used by the agent.
|
|
918
1976
|
|
|
919
|
-
|
|
920
|
-
|
|
1977
|
+
async def main() -> None:
|
|
1978
|
+
await client.agents.retrieve_agent_memory(
|
|
1979
|
+
agent_id="agent_id",
|
|
1980
|
+
)
|
|
921
1981
|
|
|
922
|
-
include_base_tools : typing.Optional[bool]
|
|
923
|
-
If true, attaches the Letta core tools (e.g. archival_memory and core_memory related functions).
|
|
924
1982
|
|
|
925
|
-
|
|
926
|
-
|
|
1983
|
+
asyncio.run(main())
|
|
1984
|
+
"""
|
|
1985
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
1986
|
+
f"v1/agents/{jsonable_encoder(agent_id)}/core-memory",
|
|
1987
|
+
method="GET",
|
|
1988
|
+
request_options=request_options,
|
|
1989
|
+
)
|
|
1990
|
+
try:
|
|
1991
|
+
if 200 <= _response.status_code < 300:
|
|
1992
|
+
return typing.cast(
|
|
1993
|
+
Memory,
|
|
1994
|
+
construct_type(
|
|
1995
|
+
type_=Memory, # type: ignore
|
|
1996
|
+
object_=_response.json(),
|
|
1997
|
+
),
|
|
1998
|
+
)
|
|
1999
|
+
if _response.status_code == 422:
|
|
2000
|
+
raise UnprocessableEntityError(
|
|
2001
|
+
typing.cast(
|
|
2002
|
+
HttpValidationError,
|
|
2003
|
+
construct_type(
|
|
2004
|
+
type_=HttpValidationError, # type: ignore
|
|
2005
|
+
object_=_response.json(),
|
|
2006
|
+
),
|
|
2007
|
+
)
|
|
2008
|
+
)
|
|
2009
|
+
_response_json = _response.json()
|
|
2010
|
+
except JSONDecodeError:
|
|
2011
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
2012
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
2013
|
+
|
|
2014
|
+
async def retrieve_core_memory_block(
|
|
2015
|
+
self, agent_id: str, block_label: str, *, request_options: typing.Optional[RequestOptions] = None
|
|
2016
|
+
) -> Block:
|
|
2017
|
+
"""
|
|
2018
|
+
Retrieve a memory block from an agent.
|
|
2019
|
+
|
|
2020
|
+
Parameters
|
|
2021
|
+
----------
|
|
2022
|
+
agent_id : str
|
|
2023
|
+
|
|
2024
|
+
block_label : str
|
|
2025
|
+
|
|
2026
|
+
request_options : typing.Optional[RequestOptions]
|
|
2027
|
+
Request-specific configuration.
|
|
2028
|
+
|
|
2029
|
+
Returns
|
|
2030
|
+
-------
|
|
2031
|
+
Block
|
|
2032
|
+
Successful Response
|
|
2033
|
+
|
|
2034
|
+
Examples
|
|
2035
|
+
--------
|
|
2036
|
+
import asyncio
|
|
2037
|
+
|
|
2038
|
+
from letta_client import AsyncLetta
|
|
2039
|
+
|
|
2040
|
+
client = AsyncLetta(
|
|
2041
|
+
token="YOUR_TOKEN",
|
|
2042
|
+
)
|
|
2043
|
+
|
|
2044
|
+
|
|
2045
|
+
async def main() -> None:
|
|
2046
|
+
await client.agents.retrieve_core_memory_block(
|
|
2047
|
+
agent_id="agent_id",
|
|
2048
|
+
block_label="block_label",
|
|
2049
|
+
)
|
|
2050
|
+
|
|
2051
|
+
|
|
2052
|
+
asyncio.run(main())
|
|
2053
|
+
"""
|
|
2054
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
2055
|
+
f"v1/agents/{jsonable_encoder(agent_id)}/core-memory/blocks/{jsonable_encoder(block_label)}",
|
|
2056
|
+
method="GET",
|
|
2057
|
+
request_options=request_options,
|
|
2058
|
+
)
|
|
2059
|
+
try:
|
|
2060
|
+
if 200 <= _response.status_code < 300:
|
|
2061
|
+
return typing.cast(
|
|
2062
|
+
Block,
|
|
2063
|
+
construct_type(
|
|
2064
|
+
type_=Block, # type: ignore
|
|
2065
|
+
object_=_response.json(),
|
|
2066
|
+
),
|
|
2067
|
+
)
|
|
2068
|
+
if _response.status_code == 422:
|
|
2069
|
+
raise UnprocessableEntityError(
|
|
2070
|
+
typing.cast(
|
|
2071
|
+
HttpValidationError,
|
|
2072
|
+
construct_type(
|
|
2073
|
+
type_=HttpValidationError, # type: ignore
|
|
2074
|
+
object_=_response.json(),
|
|
2075
|
+
),
|
|
2076
|
+
)
|
|
2077
|
+
)
|
|
2078
|
+
_response_json = _response.json()
|
|
2079
|
+
except JSONDecodeError:
|
|
2080
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
2081
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
2082
|
+
|
|
2083
|
+
async def modify_core_memory_block(
|
|
2084
|
+
self,
|
|
2085
|
+
agent_id: str,
|
|
2086
|
+
block_label: str,
|
|
2087
|
+
*,
|
|
2088
|
+
value: typing.Optional[str] = OMIT,
|
|
2089
|
+
limit: typing.Optional[int] = OMIT,
|
|
2090
|
+
name: typing.Optional[str] = OMIT,
|
|
2091
|
+
is_template: typing.Optional[bool] = OMIT,
|
|
2092
|
+
label: typing.Optional[str] = OMIT,
|
|
2093
|
+
description: typing.Optional[str] = OMIT,
|
|
2094
|
+
metadata: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT,
|
|
2095
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
2096
|
+
) -> Block:
|
|
2097
|
+
"""
|
|
2098
|
+
Updates a memory block of an agent.
|
|
2099
|
+
|
|
2100
|
+
Parameters
|
|
2101
|
+
----------
|
|
2102
|
+
agent_id : str
|
|
2103
|
+
|
|
2104
|
+
block_label : str
|
|
2105
|
+
|
|
2106
|
+
value : typing.Optional[str]
|
|
2107
|
+
Value of the block.
|
|
2108
|
+
|
|
2109
|
+
limit : typing.Optional[int]
|
|
2110
|
+
Character limit of the block.
|
|
2111
|
+
|
|
2112
|
+
name : typing.Optional[str]
|
|
2113
|
+
Name of the block if it is a template.
|
|
2114
|
+
|
|
2115
|
+
is_template : typing.Optional[bool]
|
|
2116
|
+
Whether the block is a template (e.g. saved human/persona options).
|
|
2117
|
+
|
|
2118
|
+
label : typing.Optional[str]
|
|
2119
|
+
Label of the block (e.g. 'human', 'persona') in the context window.
|
|
927
2120
|
|
|
928
2121
|
description : typing.Optional[str]
|
|
929
|
-
|
|
2122
|
+
Description of the block.
|
|
930
2123
|
|
|
931
2124
|
metadata : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]]
|
|
932
|
-
|
|
2125
|
+
Metadata of the block.
|
|
933
2126
|
|
|
934
|
-
|
|
935
|
-
|
|
2127
|
+
request_options : typing.Optional[RequestOptions]
|
|
2128
|
+
Request-specific configuration.
|
|
936
2129
|
|
|
937
|
-
|
|
938
|
-
|
|
2130
|
+
Returns
|
|
2131
|
+
-------
|
|
2132
|
+
Block
|
|
2133
|
+
Successful Response
|
|
939
2134
|
|
|
940
|
-
|
|
941
|
-
|
|
2135
|
+
Examples
|
|
2136
|
+
--------
|
|
2137
|
+
import asyncio
|
|
942
2138
|
|
|
943
|
-
|
|
944
|
-
The embedding chunk size used by the agent.
|
|
2139
|
+
from letta_client import AsyncLetta
|
|
945
2140
|
|
|
946
|
-
|
|
947
|
-
|
|
2141
|
+
client = AsyncLetta(
|
|
2142
|
+
token="YOUR_TOKEN",
|
|
2143
|
+
)
|
|
948
2144
|
|
|
949
|
-
template : typing.Optional[bool]
|
|
950
|
-
Whether the agent is a template
|
|
951
2145
|
|
|
952
|
-
|
|
953
|
-
|
|
2146
|
+
async def main() -> None:
|
|
2147
|
+
await client.agents.modify_core_memory_block(
|
|
2148
|
+
agent_id="agent_id",
|
|
2149
|
+
block_label="block_label",
|
|
2150
|
+
)
|
|
954
2151
|
|
|
955
|
-
tool_exec_environment_variables : typing.Optional[typing.Dict[str, typing.Optional[str]]]
|
|
956
|
-
The environment variables for tool execution specific to this agent.
|
|
957
2152
|
|
|
958
|
-
|
|
959
|
-
|
|
2153
|
+
asyncio.run(main())
|
|
2154
|
+
"""
|
|
2155
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
2156
|
+
f"v1/agents/{jsonable_encoder(agent_id)}/core-memory/blocks/{jsonable_encoder(block_label)}",
|
|
2157
|
+
method="PATCH",
|
|
2158
|
+
json={
|
|
2159
|
+
"value": value,
|
|
2160
|
+
"limit": limit,
|
|
2161
|
+
"name": name,
|
|
2162
|
+
"is_template": is_template,
|
|
2163
|
+
"label": label,
|
|
2164
|
+
"description": description,
|
|
2165
|
+
"metadata": metadata,
|
|
2166
|
+
},
|
|
2167
|
+
request_options=request_options,
|
|
2168
|
+
omit=OMIT,
|
|
2169
|
+
)
|
|
2170
|
+
try:
|
|
2171
|
+
if 200 <= _response.status_code < 300:
|
|
2172
|
+
return typing.cast(
|
|
2173
|
+
Block,
|
|
2174
|
+
construct_type(
|
|
2175
|
+
type_=Block, # type: ignore
|
|
2176
|
+
object_=_response.json(),
|
|
2177
|
+
),
|
|
2178
|
+
)
|
|
2179
|
+
if _response.status_code == 422:
|
|
2180
|
+
raise UnprocessableEntityError(
|
|
2181
|
+
typing.cast(
|
|
2182
|
+
HttpValidationError,
|
|
2183
|
+
construct_type(
|
|
2184
|
+
type_=HttpValidationError, # type: ignore
|
|
2185
|
+
object_=_response.json(),
|
|
2186
|
+
),
|
|
2187
|
+
)
|
|
2188
|
+
)
|
|
2189
|
+
_response_json = _response.json()
|
|
2190
|
+
except JSONDecodeError:
|
|
2191
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
2192
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
2193
|
+
|
|
2194
|
+
async def list_core_memory_blocks(
|
|
2195
|
+
self, agent_id: str, *, request_options: typing.Optional[RequestOptions] = None
|
|
2196
|
+
) -> typing.List[Block]:
|
|
2197
|
+
"""
|
|
2198
|
+
Retrieve the memory blocks of a specific agent.
|
|
2199
|
+
|
|
2200
|
+
Parameters
|
|
2201
|
+
----------
|
|
2202
|
+
agent_id : str
|
|
960
2203
|
|
|
961
2204
|
request_options : typing.Optional[RequestOptions]
|
|
962
2205
|
Request-specific configuration.
|
|
963
2206
|
|
|
964
2207
|
Returns
|
|
965
2208
|
-------
|
|
966
|
-
|
|
2209
|
+
typing.List[Block]
|
|
967
2210
|
Successful Response
|
|
968
2211
|
|
|
969
2212
|
Examples
|
|
@@ -978,64 +2221,24 @@ class AsyncAgentsClient:
|
|
|
978
2221
|
|
|
979
2222
|
|
|
980
2223
|
async def main() -> None:
|
|
981
|
-
await client.agents.
|
|
2224
|
+
await client.agents.list_core_memory_blocks(
|
|
2225
|
+
agent_id="agent_id",
|
|
2226
|
+
)
|
|
982
2227
|
|
|
983
2228
|
|
|
984
2229
|
asyncio.run(main())
|
|
985
2230
|
"""
|
|
986
2231
|
_response = await self._client_wrapper.httpx_client.request(
|
|
987
|
-
"v1/agents/",
|
|
988
|
-
method="
|
|
989
|
-
json={
|
|
990
|
-
"name": name,
|
|
991
|
-
"memory_blocks": convert_and_respect_annotation_metadata(
|
|
992
|
-
object_=memory_blocks, annotation=typing.Sequence[CreateBlock], direction="write"
|
|
993
|
-
),
|
|
994
|
-
"tools": tools,
|
|
995
|
-
"tool_ids": tool_ids,
|
|
996
|
-
"source_ids": source_ids,
|
|
997
|
-
"block_ids": block_ids,
|
|
998
|
-
"tool_rules": convert_and_respect_annotation_metadata(
|
|
999
|
-
object_=tool_rules, annotation=typing.Sequence[CreateAgentRequestToolRulesItem], direction="write"
|
|
1000
|
-
),
|
|
1001
|
-
"tags": tags,
|
|
1002
|
-
"system": system,
|
|
1003
|
-
"agent_type": agent_type,
|
|
1004
|
-
"llm_config": convert_and_respect_annotation_metadata(
|
|
1005
|
-
object_=llm_config, annotation=LlmConfig, direction="write"
|
|
1006
|
-
),
|
|
1007
|
-
"embedding_config": convert_and_respect_annotation_metadata(
|
|
1008
|
-
object_=embedding_config, annotation=EmbeddingConfig, direction="write"
|
|
1009
|
-
),
|
|
1010
|
-
"initial_message_sequence": convert_and_respect_annotation_metadata(
|
|
1011
|
-
object_=initial_message_sequence, annotation=typing.Sequence[MessageCreate], direction="write"
|
|
1012
|
-
),
|
|
1013
|
-
"include_base_tools": include_base_tools,
|
|
1014
|
-
"include_multi_agent_tools": include_multi_agent_tools,
|
|
1015
|
-
"description": description,
|
|
1016
|
-
"metadata_": metadata,
|
|
1017
|
-
"model": model,
|
|
1018
|
-
"embedding": embedding,
|
|
1019
|
-
"context_window_limit": context_window_limit,
|
|
1020
|
-
"embedding_chunk_size": embedding_chunk_size,
|
|
1021
|
-
"from_template": from_template,
|
|
1022
|
-
"template": template,
|
|
1023
|
-
"project": project,
|
|
1024
|
-
"tool_exec_environment_variables": tool_exec_environment_variables,
|
|
1025
|
-
"memory_variables": memory_variables,
|
|
1026
|
-
},
|
|
1027
|
-
headers={
|
|
1028
|
-
"content-type": "application/json",
|
|
1029
|
-
},
|
|
2232
|
+
f"v1/agents/{jsonable_encoder(agent_id)}/core-memory/blocks",
|
|
2233
|
+
method="GET",
|
|
1030
2234
|
request_options=request_options,
|
|
1031
|
-
omit=OMIT,
|
|
1032
2235
|
)
|
|
1033
2236
|
try:
|
|
1034
2237
|
if 200 <= _response.status_code < 300:
|
|
1035
2238
|
return typing.cast(
|
|
1036
|
-
|
|
2239
|
+
typing.List[Block],
|
|
1037
2240
|
construct_type(
|
|
1038
|
-
type_=
|
|
2241
|
+
type_=typing.List[Block], # type: ignore
|
|
1039
2242
|
object_=_response.json(),
|
|
1040
2243
|
),
|
|
1041
2244
|
)
|
|
@@ -1054,14 +2257,18 @@ class AsyncAgentsClient:
|
|
|
1054
2257
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
1055
2258
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
1056
2259
|
|
|
1057
|
-
async def
|
|
2260
|
+
async def attach_core_memory_block(
|
|
2261
|
+
self, agent_id: str, block_id: str, *, request_options: typing.Optional[RequestOptions] = None
|
|
2262
|
+
) -> AgentState:
|
|
1058
2263
|
"""
|
|
1059
|
-
|
|
2264
|
+
Attach a block to an agent.
|
|
1060
2265
|
|
|
1061
2266
|
Parameters
|
|
1062
2267
|
----------
|
|
1063
2268
|
agent_id : str
|
|
1064
2269
|
|
|
2270
|
+
block_id : str
|
|
2271
|
+
|
|
1065
2272
|
request_options : typing.Optional[RequestOptions]
|
|
1066
2273
|
Request-specific configuration.
|
|
1067
2274
|
|
|
@@ -1082,16 +2289,17 @@ class AsyncAgentsClient:
|
|
|
1082
2289
|
|
|
1083
2290
|
|
|
1084
2291
|
async def main() -> None:
|
|
1085
|
-
await client.agents.
|
|
2292
|
+
await client.agents.attach_core_memory_block(
|
|
1086
2293
|
agent_id="agent_id",
|
|
2294
|
+
block_id="block_id",
|
|
1087
2295
|
)
|
|
1088
2296
|
|
|
1089
2297
|
|
|
1090
2298
|
asyncio.run(main())
|
|
1091
2299
|
"""
|
|
1092
2300
|
_response = await self._client_wrapper.httpx_client.request(
|
|
1093
|
-
f"v1/agents/{jsonable_encoder(agent_id)}",
|
|
1094
|
-
method="
|
|
2301
|
+
f"v1/agents/{jsonable_encoder(agent_id)}/core-memory/blocks/attach/{jsonable_encoder(block_id)}",
|
|
2302
|
+
method="PATCH",
|
|
1095
2303
|
request_options=request_options,
|
|
1096
2304
|
)
|
|
1097
2305
|
try:
|
|
@@ -1118,22 +2326,24 @@ class AsyncAgentsClient:
|
|
|
1118
2326
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
1119
2327
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
1120
2328
|
|
|
1121
|
-
async def
|
|
1122
|
-
self, agent_id: str, *, request_options: typing.Optional[RequestOptions] = None
|
|
1123
|
-
) ->
|
|
2329
|
+
async def detach_core_memory_block(
|
|
2330
|
+
self, agent_id: str, block_id: str, *, request_options: typing.Optional[RequestOptions] = None
|
|
2331
|
+
) -> AgentState:
|
|
1124
2332
|
"""
|
|
1125
|
-
|
|
2333
|
+
Detach a block from an agent.
|
|
1126
2334
|
|
|
1127
2335
|
Parameters
|
|
1128
2336
|
----------
|
|
1129
2337
|
agent_id : str
|
|
1130
2338
|
|
|
2339
|
+
block_id : str
|
|
2340
|
+
|
|
1131
2341
|
request_options : typing.Optional[RequestOptions]
|
|
1132
2342
|
Request-specific configuration.
|
|
1133
2343
|
|
|
1134
2344
|
Returns
|
|
1135
2345
|
-------
|
|
1136
|
-
|
|
2346
|
+
AgentState
|
|
1137
2347
|
Successful Response
|
|
1138
2348
|
|
|
1139
2349
|
Examples
|
|
@@ -1148,24 +2358,25 @@ class AsyncAgentsClient:
|
|
|
1148
2358
|
|
|
1149
2359
|
|
|
1150
2360
|
async def main() -> None:
|
|
1151
|
-
await client.agents.
|
|
2361
|
+
await client.agents.detach_core_memory_block(
|
|
1152
2362
|
agent_id="agent_id",
|
|
2363
|
+
block_id="block_id",
|
|
1153
2364
|
)
|
|
1154
2365
|
|
|
1155
2366
|
|
|
1156
2367
|
asyncio.run(main())
|
|
1157
2368
|
"""
|
|
1158
2369
|
_response = await self._client_wrapper.httpx_client.request(
|
|
1159
|
-
f"v1/agents/{jsonable_encoder(agent_id)}",
|
|
1160
|
-
method="
|
|
2370
|
+
f"v1/agents/{jsonable_encoder(agent_id)}/core-memory/blocks/detach/{jsonable_encoder(block_id)}",
|
|
2371
|
+
method="PATCH",
|
|
1161
2372
|
request_options=request_options,
|
|
1162
2373
|
)
|
|
1163
2374
|
try:
|
|
1164
2375
|
if 200 <= _response.status_code < 300:
|
|
1165
2376
|
return typing.cast(
|
|
1166
|
-
|
|
2377
|
+
AgentState,
|
|
1167
2378
|
construct_type(
|
|
1168
|
-
type_=
|
|
2379
|
+
type_=AgentState, # type: ignore
|
|
1169
2380
|
object_=_response.json(),
|
|
1170
2381
|
),
|
|
1171
2382
|
)
|
|
@@ -1184,77 +2395,111 @@ class AsyncAgentsClient:
|
|
|
1184
2395
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
1185
2396
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
1186
2397
|
|
|
1187
|
-
async def
|
|
2398
|
+
async def list_archival_memory(
|
|
1188
2399
|
self,
|
|
1189
2400
|
agent_id: str,
|
|
1190
2401
|
*,
|
|
1191
|
-
|
|
1192
|
-
|
|
1193
|
-
|
|
1194
|
-
block_ids: typing.Optional[typing.Sequence[str]] = OMIT,
|
|
1195
|
-
tags: typing.Optional[typing.Sequence[str]] = OMIT,
|
|
1196
|
-
system: typing.Optional[str] = OMIT,
|
|
1197
|
-
tool_rules: typing.Optional[typing.Sequence[UpdateAgentToolRulesItem]] = OMIT,
|
|
1198
|
-
llm_config: typing.Optional[LlmConfig] = OMIT,
|
|
1199
|
-
embedding_config: typing.Optional[EmbeddingConfig] = OMIT,
|
|
1200
|
-
message_ids: typing.Optional[typing.Sequence[str]] = OMIT,
|
|
1201
|
-
description: typing.Optional[str] = OMIT,
|
|
1202
|
-
metadata: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT,
|
|
1203
|
-
tool_exec_environment_variables: typing.Optional[typing.Dict[str, typing.Optional[str]]] = OMIT,
|
|
2402
|
+
after: typing.Optional[int] = None,
|
|
2403
|
+
before: typing.Optional[int] = None,
|
|
2404
|
+
limit: typing.Optional[int] = None,
|
|
1204
2405
|
request_options: typing.Optional[RequestOptions] = None,
|
|
1205
|
-
) ->
|
|
2406
|
+
) -> typing.List[Passage]:
|
|
1206
2407
|
"""
|
|
1207
|
-
|
|
2408
|
+
Retrieve the memories in an agent's archival memory store (paginated query).
|
|
1208
2409
|
|
|
1209
2410
|
Parameters
|
|
1210
2411
|
----------
|
|
1211
2412
|
agent_id : str
|
|
1212
2413
|
|
|
1213
|
-
|
|
1214
|
-
|
|
2414
|
+
after : typing.Optional[int]
|
|
2415
|
+
Unique ID of the memory to start the query range at.
|
|
1215
2416
|
|
|
1216
|
-
|
|
1217
|
-
|
|
2417
|
+
before : typing.Optional[int]
|
|
2418
|
+
Unique ID of the memory to end the query range at.
|
|
1218
2419
|
|
|
1219
|
-
|
|
1220
|
-
|
|
2420
|
+
limit : typing.Optional[int]
|
|
2421
|
+
How many results to include in the response.
|
|
1221
2422
|
|
|
1222
|
-
|
|
1223
|
-
|
|
2423
|
+
request_options : typing.Optional[RequestOptions]
|
|
2424
|
+
Request-specific configuration.
|
|
1224
2425
|
|
|
1225
|
-
|
|
1226
|
-
|
|
2426
|
+
Returns
|
|
2427
|
+
-------
|
|
2428
|
+
typing.List[Passage]
|
|
2429
|
+
Successful Response
|
|
1227
2430
|
|
|
1228
|
-
|
|
1229
|
-
|
|
2431
|
+
Examples
|
|
2432
|
+
--------
|
|
2433
|
+
import asyncio
|
|
1230
2434
|
|
|
1231
|
-
|
|
1232
|
-
The tool rules governing the agent.
|
|
2435
|
+
from letta_client import AsyncLetta
|
|
1233
2436
|
|
|
1234
|
-
|
|
1235
|
-
|
|
2437
|
+
client = AsyncLetta(
|
|
2438
|
+
token="YOUR_TOKEN",
|
|
2439
|
+
)
|
|
1236
2440
|
|
|
1237
|
-
embedding_config : typing.Optional[EmbeddingConfig]
|
|
1238
|
-
The embedding configuration used by the agent.
|
|
1239
2441
|
|
|
1240
|
-
|
|
1241
|
-
|
|
2442
|
+
async def main() -> None:
|
|
2443
|
+
await client.agents.list_archival_memory(
|
|
2444
|
+
agent_id="agent_id",
|
|
2445
|
+
)
|
|
1242
2446
|
|
|
1243
|
-
description : typing.Optional[str]
|
|
1244
|
-
The description of the agent.
|
|
1245
2447
|
|
|
1246
|
-
|
|
1247
|
-
|
|
2448
|
+
asyncio.run(main())
|
|
2449
|
+
"""
|
|
2450
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
2451
|
+
f"v1/agents/{jsonable_encoder(agent_id)}/archival-memory",
|
|
2452
|
+
method="GET",
|
|
2453
|
+
params={
|
|
2454
|
+
"after": after,
|
|
2455
|
+
"before": before,
|
|
2456
|
+
"limit": limit,
|
|
2457
|
+
},
|
|
2458
|
+
request_options=request_options,
|
|
2459
|
+
)
|
|
2460
|
+
try:
|
|
2461
|
+
if 200 <= _response.status_code < 300:
|
|
2462
|
+
return typing.cast(
|
|
2463
|
+
typing.List[Passage],
|
|
2464
|
+
construct_type(
|
|
2465
|
+
type_=typing.List[Passage], # type: ignore
|
|
2466
|
+
object_=_response.json(),
|
|
2467
|
+
),
|
|
2468
|
+
)
|
|
2469
|
+
if _response.status_code == 422:
|
|
2470
|
+
raise UnprocessableEntityError(
|
|
2471
|
+
typing.cast(
|
|
2472
|
+
HttpValidationError,
|
|
2473
|
+
construct_type(
|
|
2474
|
+
type_=HttpValidationError, # type: ignore
|
|
2475
|
+
object_=_response.json(),
|
|
2476
|
+
),
|
|
2477
|
+
)
|
|
2478
|
+
)
|
|
2479
|
+
_response_json = _response.json()
|
|
2480
|
+
except JSONDecodeError:
|
|
2481
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
2482
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
1248
2483
|
|
|
1249
|
-
|
|
1250
|
-
|
|
2484
|
+
async def create_archival_memory(
|
|
2485
|
+
self, agent_id: str, *, text: str, request_options: typing.Optional[RequestOptions] = None
|
|
2486
|
+
) -> typing.List[Passage]:
|
|
2487
|
+
"""
|
|
2488
|
+
Insert a memory into an agent's archival memory store.
|
|
2489
|
+
|
|
2490
|
+
Parameters
|
|
2491
|
+
----------
|
|
2492
|
+
agent_id : str
|
|
2493
|
+
|
|
2494
|
+
text : str
|
|
2495
|
+
Text to write to archival memory.
|
|
1251
2496
|
|
|
1252
2497
|
request_options : typing.Optional[RequestOptions]
|
|
1253
2498
|
Request-specific configuration.
|
|
1254
2499
|
|
|
1255
2500
|
Returns
|
|
1256
2501
|
-------
|
|
1257
|
-
|
|
2502
|
+
typing.List[Passage]
|
|
1258
2503
|
Successful Response
|
|
1259
2504
|
|
|
1260
2505
|
Examples
|
|
@@ -1269,36 +2514,19 @@ class AsyncAgentsClient:
|
|
|
1269
2514
|
|
|
1270
2515
|
|
|
1271
2516
|
async def main() -> None:
|
|
1272
|
-
await client.agents.
|
|
2517
|
+
await client.agents.create_archival_memory(
|
|
1273
2518
|
agent_id="agent_id",
|
|
2519
|
+
text="text",
|
|
1274
2520
|
)
|
|
1275
2521
|
|
|
1276
2522
|
|
|
1277
2523
|
asyncio.run(main())
|
|
1278
2524
|
"""
|
|
1279
2525
|
_response = await self._client_wrapper.httpx_client.request(
|
|
1280
|
-
f"v1/agents/{jsonable_encoder(agent_id)}",
|
|
1281
|
-
method="
|
|
2526
|
+
f"v1/agents/{jsonable_encoder(agent_id)}/archival-memory",
|
|
2527
|
+
method="POST",
|
|
1282
2528
|
json={
|
|
1283
|
-
"
|
|
1284
|
-
"tool_ids": tool_ids,
|
|
1285
|
-
"source_ids": source_ids,
|
|
1286
|
-
"block_ids": block_ids,
|
|
1287
|
-
"tags": tags,
|
|
1288
|
-
"system": system,
|
|
1289
|
-
"tool_rules": convert_and_respect_annotation_metadata(
|
|
1290
|
-
object_=tool_rules, annotation=typing.Sequence[UpdateAgentToolRulesItem], direction="write"
|
|
1291
|
-
),
|
|
1292
|
-
"llm_config": convert_and_respect_annotation_metadata(
|
|
1293
|
-
object_=llm_config, annotation=LlmConfig, direction="write"
|
|
1294
|
-
),
|
|
1295
|
-
"embedding_config": convert_and_respect_annotation_metadata(
|
|
1296
|
-
object_=embedding_config, annotation=EmbeddingConfig, direction="write"
|
|
1297
|
-
),
|
|
1298
|
-
"message_ids": message_ids,
|
|
1299
|
-
"description": description,
|
|
1300
|
-
"metadata_": metadata,
|
|
1301
|
-
"tool_exec_environment_variables": tool_exec_environment_variables,
|
|
2529
|
+
"text": text,
|
|
1302
2530
|
},
|
|
1303
2531
|
headers={
|
|
1304
2532
|
"content-type": "application/json",
|
|
@@ -1309,9 +2537,78 @@ class AsyncAgentsClient:
|
|
|
1309
2537
|
try:
|
|
1310
2538
|
if 200 <= _response.status_code < 300:
|
|
1311
2539
|
return typing.cast(
|
|
1312
|
-
|
|
2540
|
+
typing.List[Passage],
|
|
1313
2541
|
construct_type(
|
|
1314
|
-
type_=
|
|
2542
|
+
type_=typing.List[Passage], # type: ignore
|
|
2543
|
+
object_=_response.json(),
|
|
2544
|
+
),
|
|
2545
|
+
)
|
|
2546
|
+
if _response.status_code == 422:
|
|
2547
|
+
raise UnprocessableEntityError(
|
|
2548
|
+
typing.cast(
|
|
2549
|
+
HttpValidationError,
|
|
2550
|
+
construct_type(
|
|
2551
|
+
type_=HttpValidationError, # type: ignore
|
|
2552
|
+
object_=_response.json(),
|
|
2553
|
+
),
|
|
2554
|
+
)
|
|
2555
|
+
)
|
|
2556
|
+
_response_json = _response.json()
|
|
2557
|
+
except JSONDecodeError:
|
|
2558
|
+
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
2559
|
+
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
2560
|
+
|
|
2561
|
+
async def delete_archival_memory(
|
|
2562
|
+
self, agent_id: str, memory_id: str, *, request_options: typing.Optional[RequestOptions] = None
|
|
2563
|
+
) -> typing.Optional[typing.Any]:
|
|
2564
|
+
"""
|
|
2565
|
+
Delete a memory from an agent's archival memory store.
|
|
2566
|
+
|
|
2567
|
+
Parameters
|
|
2568
|
+
----------
|
|
2569
|
+
agent_id : str
|
|
2570
|
+
|
|
2571
|
+
memory_id : str
|
|
2572
|
+
|
|
2573
|
+
request_options : typing.Optional[RequestOptions]
|
|
2574
|
+
Request-specific configuration.
|
|
2575
|
+
|
|
2576
|
+
Returns
|
|
2577
|
+
-------
|
|
2578
|
+
typing.Optional[typing.Any]
|
|
2579
|
+
Successful Response
|
|
2580
|
+
|
|
2581
|
+
Examples
|
|
2582
|
+
--------
|
|
2583
|
+
import asyncio
|
|
2584
|
+
|
|
2585
|
+
from letta_client import AsyncLetta
|
|
2586
|
+
|
|
2587
|
+
client = AsyncLetta(
|
|
2588
|
+
token="YOUR_TOKEN",
|
|
2589
|
+
)
|
|
2590
|
+
|
|
2591
|
+
|
|
2592
|
+
async def main() -> None:
|
|
2593
|
+
await client.agents.delete_archival_memory(
|
|
2594
|
+
agent_id="agent_id",
|
|
2595
|
+
memory_id="memory_id",
|
|
2596
|
+
)
|
|
2597
|
+
|
|
2598
|
+
|
|
2599
|
+
asyncio.run(main())
|
|
2600
|
+
"""
|
|
2601
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
2602
|
+
f"v1/agents/{jsonable_encoder(agent_id)}/archival-memory/{jsonable_encoder(memory_id)}",
|
|
2603
|
+
method="DELETE",
|
|
2604
|
+
request_options=request_options,
|
|
2605
|
+
)
|
|
2606
|
+
try:
|
|
2607
|
+
if 200 <= _response.status_code < 300:
|
|
2608
|
+
return typing.cast(
|
|
2609
|
+
typing.Optional[typing.Any],
|
|
2610
|
+
construct_type(
|
|
2611
|
+
type_=typing.Optional[typing.Any], # type: ignore
|
|
1315
2612
|
object_=_response.json(),
|
|
1316
2613
|
),
|
|
1317
2614
|
)
|