letta-nightly 0.5.0.dev20241015104156__py3-none-any.whl → 0.5.0.dev20241017104103__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of letta-nightly might be problematic. Click here for more details.
- letta/agent.py +170 -16
- letta/client/client.py +186 -42
- letta/client/utils.py +15 -0
- letta/constants.py +1 -1
- letta/functions/functions.py +1 -1
- letta/functions/schema_generator.py +3 -2
- letta/main.py +6 -4
- letta/metadata.py +27 -3
- letta/schemas/agent.py +7 -3
- letta/schemas/memory.py +37 -0
- letta/schemas/tool.py +4 -0
- letta/server/rest_api/routers/openai/assistants/threads.py +1 -1
- letta/server/rest_api/routers/v1/agents.py +43 -0
- letta/server/rest_api/routers/v1/sources.py +28 -1
- letta/server/rest_api/routers/v1/tools.py +1 -1
- letta/server/server.py +157 -94
- letta/server/static_files/assets/{index-dc228d4a.js → index-d6b3669a.js} +59 -59
- letta/server/static_files/index.html +1 -1
- letta_nightly-0.5.0.dev20241017104103.dist-info/METADATA +203 -0
- {letta_nightly-0.5.0.dev20241015104156.dist-info → letta_nightly-0.5.0.dev20241017104103.dist-info}/RECORD +23 -23
- letta_nightly-0.5.0.dev20241015104156.dist-info/METADATA +0 -105
- {letta_nightly-0.5.0.dev20241015104156.dist-info → letta_nightly-0.5.0.dev20241017104103.dist-info}/LICENSE +0 -0
- {letta_nightly-0.5.0.dev20241015104156.dist-info → letta_nightly-0.5.0.dev20241017104103.dist-info}/WHEEL +0 -0
- {letta_nightly-0.5.0.dev20241015104156.dist-info → letta_nightly-0.5.0.dev20241017104103.dist-info}/entry_points.txt +0 -0
letta/agent.py
CHANGED
|
@@ -11,14 +11,19 @@ from letta.agent_store.storage import StorageConnector
|
|
|
11
11
|
from letta.constants import (
|
|
12
12
|
CLI_WARNING_PREFIX,
|
|
13
13
|
FIRST_MESSAGE_ATTEMPTS,
|
|
14
|
+
FUNC_FAILED_HEARTBEAT_MESSAGE,
|
|
14
15
|
IN_CONTEXT_MEMORY_KEYWORD,
|
|
15
16
|
LLM_MAX_TOKENS,
|
|
16
17
|
MESSAGE_SUMMARY_TRUNC_KEEP_N_LAST,
|
|
17
18
|
MESSAGE_SUMMARY_TRUNC_TOKEN_FRAC,
|
|
18
19
|
MESSAGE_SUMMARY_WARNING_FRAC,
|
|
20
|
+
REQ_HEARTBEAT_MESSAGE,
|
|
19
21
|
)
|
|
22
|
+
from letta.errors import LLMError
|
|
20
23
|
from letta.interface import AgentInterface
|
|
24
|
+
from letta.llm_api.helpers import is_context_overflow_error
|
|
21
25
|
from letta.llm_api.llm_api_tools import create
|
|
26
|
+
from letta.local_llm.utils import num_tokens_from_messages
|
|
22
27
|
from letta.memory import ArchivalMemory, RecallMemory, summarize_messages
|
|
23
28
|
from letta.metadata import MetadataStore
|
|
24
29
|
from letta.persistence_manager import LocalStateManager
|
|
@@ -26,17 +31,21 @@ from letta.schemas.agent import AgentState, AgentStepResponse
|
|
|
26
31
|
from letta.schemas.block import Block
|
|
27
32
|
from letta.schemas.embedding_config import EmbeddingConfig
|
|
28
33
|
from letta.schemas.enums import MessageRole, OptionState
|
|
29
|
-
from letta.schemas.memory import Memory
|
|
34
|
+
from letta.schemas.memory import ContextWindowOverview, Memory
|
|
30
35
|
from letta.schemas.message import Message, UpdateMessage
|
|
31
36
|
from letta.schemas.openai.chat_completion_response import ChatCompletionResponse
|
|
32
37
|
from letta.schemas.openai.chat_completion_response import (
|
|
33
38
|
Message as ChatCompletionMessage,
|
|
34
39
|
)
|
|
40
|
+
from letta.schemas.openai.chat_completion_response import UsageStatistics
|
|
35
41
|
from letta.schemas.passage import Passage
|
|
36
42
|
from letta.schemas.tool import Tool
|
|
43
|
+
from letta.schemas.usage import LettaUsageStatistics
|
|
37
44
|
from letta.system import (
|
|
45
|
+
get_heartbeat,
|
|
38
46
|
get_initial_boot_messages,
|
|
39
47
|
get_login_event,
|
|
48
|
+
get_token_limit_warning,
|
|
40
49
|
package_function_response,
|
|
41
50
|
package_summarize_message,
|
|
42
51
|
package_user_message,
|
|
@@ -56,9 +65,6 @@ from letta.utils import (
|
|
|
56
65
|
verify_first_message_correctness,
|
|
57
66
|
)
|
|
58
67
|
|
|
59
|
-
from .errors import LLMError
|
|
60
|
-
from .llm_api.helpers import is_context_overflow_error
|
|
61
|
-
|
|
62
68
|
|
|
63
69
|
def compile_memory_metadata_block(
|
|
64
70
|
memory_edit_timestamp: datetime.datetime,
|
|
@@ -202,7 +208,7 @@ class BaseAgent(ABC):
|
|
|
202
208
|
def step(
|
|
203
209
|
self,
|
|
204
210
|
messages: Union[Message, List[Message]],
|
|
205
|
-
) ->
|
|
211
|
+
) -> LettaUsageStatistics:
|
|
206
212
|
"""
|
|
207
213
|
Top-level event message handler for the agent.
|
|
208
214
|
"""
|
|
@@ -721,18 +727,105 @@ class Agent(BaseAgent):
|
|
|
721
727
|
return messages, heartbeat_request, function_failed
|
|
722
728
|
|
|
723
729
|
def step(
|
|
730
|
+
self,
|
|
731
|
+
messages: Union[Message, List[Message]],
|
|
732
|
+
# additional args
|
|
733
|
+
chaining: bool = True,
|
|
734
|
+
max_chaining_steps: Optional[int] = None,
|
|
735
|
+
ms: Optional[MetadataStore] = None,
|
|
736
|
+
**kwargs,
|
|
737
|
+
) -> LettaUsageStatistics:
|
|
738
|
+
"""Run Agent.step in a loop, handling chaining via heartbeat requests and function failures"""
|
|
739
|
+
# assert ms is not None, "MetadataStore is required"
|
|
740
|
+
|
|
741
|
+
next_input_message = messages if isinstance(messages, list) else [messages]
|
|
742
|
+
counter = 0
|
|
743
|
+
total_usage = UsageStatistics()
|
|
744
|
+
step_count = 0
|
|
745
|
+
while True:
|
|
746
|
+
kwargs["ms"] = ms
|
|
747
|
+
kwargs["first_message"] = False
|
|
748
|
+
step_response = self.inner_step(
|
|
749
|
+
messages=next_input_message,
|
|
750
|
+
**kwargs,
|
|
751
|
+
)
|
|
752
|
+
step_response.messages
|
|
753
|
+
heartbeat_request = step_response.heartbeat_request
|
|
754
|
+
function_failed = step_response.function_failed
|
|
755
|
+
token_warning = step_response.in_context_memory_warning
|
|
756
|
+
usage = step_response.usage
|
|
757
|
+
|
|
758
|
+
step_count += 1
|
|
759
|
+
total_usage += usage
|
|
760
|
+
counter += 1
|
|
761
|
+
self.interface.step_complete()
|
|
762
|
+
|
|
763
|
+
# logger.debug("Saving agent state")
|
|
764
|
+
# save updated state
|
|
765
|
+
if ms:
|
|
766
|
+
save_agent(self, ms)
|
|
767
|
+
|
|
768
|
+
# Chain stops
|
|
769
|
+
if not chaining:
|
|
770
|
+
printd("No chaining, stopping after one step")
|
|
771
|
+
break
|
|
772
|
+
elif max_chaining_steps is not None and counter > max_chaining_steps:
|
|
773
|
+
printd(f"Hit max chaining steps, stopping after {counter} steps")
|
|
774
|
+
break
|
|
775
|
+
# Chain handlers
|
|
776
|
+
elif token_warning:
|
|
777
|
+
assert self.agent_state.user_id is not None
|
|
778
|
+
next_input_message = Message.dict_to_message(
|
|
779
|
+
agent_id=self.agent_state.id,
|
|
780
|
+
user_id=self.agent_state.user_id,
|
|
781
|
+
model=self.model,
|
|
782
|
+
openai_message_dict={
|
|
783
|
+
"role": "user", # TODO: change to system?
|
|
784
|
+
"content": get_token_limit_warning(),
|
|
785
|
+
},
|
|
786
|
+
)
|
|
787
|
+
continue # always chain
|
|
788
|
+
elif function_failed:
|
|
789
|
+
assert self.agent_state.user_id is not None
|
|
790
|
+
next_input_message = Message.dict_to_message(
|
|
791
|
+
agent_id=self.agent_state.id,
|
|
792
|
+
user_id=self.agent_state.user_id,
|
|
793
|
+
model=self.model,
|
|
794
|
+
openai_message_dict={
|
|
795
|
+
"role": "user", # TODO: change to system?
|
|
796
|
+
"content": get_heartbeat(FUNC_FAILED_HEARTBEAT_MESSAGE),
|
|
797
|
+
},
|
|
798
|
+
)
|
|
799
|
+
continue # always chain
|
|
800
|
+
elif heartbeat_request:
|
|
801
|
+
assert self.agent_state.user_id is not None
|
|
802
|
+
next_input_message = Message.dict_to_message(
|
|
803
|
+
agent_id=self.agent_state.id,
|
|
804
|
+
user_id=self.agent_state.user_id,
|
|
805
|
+
model=self.model,
|
|
806
|
+
openai_message_dict={
|
|
807
|
+
"role": "user", # TODO: change to system?
|
|
808
|
+
"content": get_heartbeat(REQ_HEARTBEAT_MESSAGE),
|
|
809
|
+
},
|
|
810
|
+
)
|
|
811
|
+
continue # always chain
|
|
812
|
+
# Letta no-op / yield
|
|
813
|
+
else:
|
|
814
|
+
break
|
|
815
|
+
|
|
816
|
+
return LettaUsageStatistics(**total_usage.model_dump(), step_count=step_count)
|
|
817
|
+
|
|
818
|
+
def inner_step(
|
|
724
819
|
self,
|
|
725
820
|
messages: Union[Message, List[Message]],
|
|
726
821
|
first_message: bool = False,
|
|
727
822
|
first_message_retry_limit: int = FIRST_MESSAGE_ATTEMPTS,
|
|
728
823
|
skip_verify: bool = False,
|
|
729
|
-
return_dicts: bool = True,
|
|
730
|
-
# recreate_message_timestamp: bool = True, # if True, when input is a Message type, recreated the 'created_at' field
|
|
731
824
|
stream: bool = False, # TODO move to config?
|
|
732
825
|
inner_thoughts_in_kwargs_option: OptionState = OptionState.DEFAULT,
|
|
733
826
|
ms: Optional[MetadataStore] = None,
|
|
734
827
|
) -> AgentStepResponse:
|
|
735
|
-
"""
|
|
828
|
+
"""Runs a single step in the agent loop (generates at most one LLM call)"""
|
|
736
829
|
|
|
737
830
|
try:
|
|
738
831
|
|
|
@@ -834,13 +927,12 @@ class Agent(BaseAgent):
|
|
|
834
927
|
)
|
|
835
928
|
|
|
836
929
|
self._append_to_messages(all_new_messages)
|
|
837
|
-
messages_to_return = [msg.to_openai_dict() for msg in all_new_messages] if return_dicts else all_new_messages
|
|
838
930
|
|
|
839
931
|
# update state after each step
|
|
840
932
|
self.update_state()
|
|
841
933
|
|
|
842
934
|
return AgentStepResponse(
|
|
843
|
-
messages=
|
|
935
|
+
messages=all_new_messages,
|
|
844
936
|
heartbeat_request=heartbeat_request,
|
|
845
937
|
function_failed=function_failed,
|
|
846
938
|
in_context_memory_warning=active_memory_warning,
|
|
@@ -856,15 +948,12 @@ class Agent(BaseAgent):
|
|
|
856
948
|
self.summarize_messages_inplace()
|
|
857
949
|
|
|
858
950
|
# Try step again
|
|
859
|
-
return self.
|
|
951
|
+
return self.inner_step(
|
|
860
952
|
messages=messages,
|
|
861
953
|
first_message=first_message,
|
|
862
954
|
first_message_retry_limit=first_message_retry_limit,
|
|
863
955
|
skip_verify=skip_verify,
|
|
864
|
-
return_dicts=return_dicts,
|
|
865
|
-
# recreate_message_timestamp=recreate_message_timestamp,
|
|
866
956
|
stream=stream,
|
|
867
|
-
# timestamp=timestamp,
|
|
868
957
|
inner_thoughts_in_kwargs_option=inner_thoughts_in_kwargs_option,
|
|
869
958
|
ms=ms,
|
|
870
959
|
)
|
|
@@ -905,7 +994,7 @@ class Agent(BaseAgent):
|
|
|
905
994
|
# created_at=timestamp,
|
|
906
995
|
)
|
|
907
996
|
|
|
908
|
-
return self.
|
|
997
|
+
return self.inner_step(messages=[user_message], **kwargs)
|
|
909
998
|
|
|
910
999
|
def summarize_messages_inplace(self, cutoff=None, preserve_last_N_messages=True, disallow_tool_as_first=True):
|
|
911
1000
|
assert self.messages[0]["role"] == "system", f"self.messages[0] should be system (instead got {self.messages[0]})"
|
|
@@ -1326,13 +1415,78 @@ class Agent(BaseAgent):
|
|
|
1326
1415
|
self.pop_until_user()
|
|
1327
1416
|
user_message = self.pop_message(count=1)[0]
|
|
1328
1417
|
assert user_message.text is not None, "User message text is None"
|
|
1329
|
-
step_response = self.step_user_message(user_message_str=user_message.text
|
|
1418
|
+
step_response = self.step_user_message(user_message_str=user_message.text)
|
|
1330
1419
|
messages = step_response.messages
|
|
1331
1420
|
|
|
1332
1421
|
assert messages is not None
|
|
1333
1422
|
assert all(isinstance(msg, Message) for msg in messages), "step() returned non-Message objects"
|
|
1334
1423
|
return messages
|
|
1335
1424
|
|
|
1425
|
+
def get_context_window(self) -> ContextWindowOverview:
|
|
1426
|
+
"""Get the context window of the agent"""
|
|
1427
|
+
|
|
1428
|
+
system_prompt = self.agent_state.system # TODO is this the current system or the initial system?
|
|
1429
|
+
num_tokens_system = count_tokens(system_prompt)
|
|
1430
|
+
core_memory = self.memory.compile()
|
|
1431
|
+
num_tokens_core_memory = count_tokens(core_memory)
|
|
1432
|
+
|
|
1433
|
+
# conversion of messages to OpenAI dict format, which is passed to the token counter
|
|
1434
|
+
messages_openai_format = self.messages
|
|
1435
|
+
|
|
1436
|
+
# Check if there's a summary message in the message queue
|
|
1437
|
+
if (
|
|
1438
|
+
len(self._messages) > 1
|
|
1439
|
+
and self._messages[1].role == MessageRole.user
|
|
1440
|
+
and isinstance(self._messages[1].text, str)
|
|
1441
|
+
# TODO remove hardcoding
|
|
1442
|
+
and "The following is a summary of the previous " in self._messages[1].text
|
|
1443
|
+
):
|
|
1444
|
+
# Summary message exists
|
|
1445
|
+
assert self._messages[1].text is not None
|
|
1446
|
+
summary_memory = self._messages[1].text
|
|
1447
|
+
num_tokens_summary_memory = count_tokens(self._messages[1].text)
|
|
1448
|
+
# with a summary message, the real messages start at index 2
|
|
1449
|
+
num_tokens_messages = (
|
|
1450
|
+
num_tokens_from_messages(messages=messages_openai_format[2:], model=self.model) if len(messages_openai_format) > 2 else 0
|
|
1451
|
+
)
|
|
1452
|
+
|
|
1453
|
+
else:
|
|
1454
|
+
summary_memory = None
|
|
1455
|
+
num_tokens_summary_memory = 0
|
|
1456
|
+
# with no summary message, the real messages start at index 1
|
|
1457
|
+
num_tokens_messages = (
|
|
1458
|
+
num_tokens_from_messages(messages=messages_openai_format[1:], model=self.model) if len(messages_openai_format) > 1 else 0
|
|
1459
|
+
)
|
|
1460
|
+
|
|
1461
|
+
num_archival_memory = self.persistence_manager.archival_memory.storage.size()
|
|
1462
|
+
num_recall_memory = self.persistence_manager.recall_memory.storage.size()
|
|
1463
|
+
external_memory_summary = compile_memory_metadata_block(
|
|
1464
|
+
memory_edit_timestamp=get_utc_time(), # dummy timestamp
|
|
1465
|
+
archival_memory=self.persistence_manager.archival_memory,
|
|
1466
|
+
recall_memory=self.persistence_manager.recall_memory,
|
|
1467
|
+
)
|
|
1468
|
+
num_tokens_external_memory_summary = count_tokens(external_memory_summary)
|
|
1469
|
+
|
|
1470
|
+
return ContextWindowOverview(
|
|
1471
|
+
# context window breakdown (in messages)
|
|
1472
|
+
num_messages=len(self._messages),
|
|
1473
|
+
num_archival_memory=num_archival_memory,
|
|
1474
|
+
num_recall_memory=num_recall_memory,
|
|
1475
|
+
num_tokens_external_memory_summary=num_tokens_external_memory_summary,
|
|
1476
|
+
# top-level information
|
|
1477
|
+
context_window_size_max=self.agent_state.llm_config.context_window,
|
|
1478
|
+
context_window_size_current=num_tokens_system + num_tokens_core_memory + num_tokens_summary_memory + num_tokens_messages,
|
|
1479
|
+
# context window breakdown (in tokens)
|
|
1480
|
+
num_tokens_system=num_tokens_system,
|
|
1481
|
+
system_prompt=system_prompt,
|
|
1482
|
+
num_tokens_core_memory=num_tokens_core_memory,
|
|
1483
|
+
core_memory=core_memory,
|
|
1484
|
+
num_tokens_summary_memory=num_tokens_summary_memory,
|
|
1485
|
+
summary_memory=summary_memory,
|
|
1486
|
+
num_tokens_messages=num_tokens_messages,
|
|
1487
|
+
messages=self._messages,
|
|
1488
|
+
)
|
|
1489
|
+
|
|
1336
1490
|
|
|
1337
1491
|
def save_agent(agent: Agent, ms: MetadataStore):
|
|
1338
1492
|
"""Save agent to metadata store"""
|
letta/client/client.py
CHANGED
|
@@ -96,6 +96,12 @@ class AbstractClient(object):
|
|
|
96
96
|
):
|
|
97
97
|
raise NotImplementedError
|
|
98
98
|
|
|
99
|
+
def add_tool_to_agent(self, agent_id: str, tool_id: str):
|
|
100
|
+
raise NotImplementedError
|
|
101
|
+
|
|
102
|
+
def remove_tool_from_agent(self, agent_id: str, tool_id: str):
|
|
103
|
+
raise NotImplementedError
|
|
104
|
+
|
|
99
105
|
def rename_agent(self, agent_id: str, new_name: str):
|
|
100
106
|
raise NotImplementedError
|
|
101
107
|
|
|
@@ -206,7 +212,10 @@ class AbstractClient(object):
|
|
|
206
212
|
def load_data(self, connector: DataConnector, source_name: str):
|
|
207
213
|
raise NotImplementedError
|
|
208
214
|
|
|
209
|
-
def
|
|
215
|
+
def load_file_to_source(self, filename: str, source_id: str, blocking=True) -> Job:
|
|
216
|
+
raise NotImplementedError
|
|
217
|
+
|
|
218
|
+
def delete_file_from_source(self, source_id: str, file_id: str) -> None:
|
|
210
219
|
raise NotImplementedError
|
|
211
220
|
|
|
212
221
|
def create_source(self, name: str) -> Source:
|
|
@@ -471,6 +480,39 @@ class RESTClient(AbstractClient):
|
|
|
471
480
|
raise ValueError(f"Failed to update agent: {response.text}")
|
|
472
481
|
return AgentState(**response.json())
|
|
473
482
|
|
|
483
|
+
def add_tool_to_agent(self, agent_id: str, tool_id: str):
|
|
484
|
+
"""
|
|
485
|
+
Add tool to an existing agent
|
|
486
|
+
|
|
487
|
+
Args:
|
|
488
|
+
agent_id (str): ID of the agent
|
|
489
|
+
tool_id (str): A tool id
|
|
490
|
+
|
|
491
|
+
Returns:
|
|
492
|
+
agent_state (AgentState): State of the updated agent
|
|
493
|
+
"""
|
|
494
|
+
response = requests.patch(f"{self.base_url}/{self.api_prefix}/agents/{agent_id}/add-tool/{tool_id}", headers=self.headers)
|
|
495
|
+
if response.status_code != 200:
|
|
496
|
+
raise ValueError(f"Failed to update agent: {response.text}")
|
|
497
|
+
return AgentState(**response.json())
|
|
498
|
+
|
|
499
|
+
def remove_tool_from_agent(self, agent_id: str, tool_id: str):
|
|
500
|
+
"""
|
|
501
|
+
Removes tools from an existing agent
|
|
502
|
+
|
|
503
|
+
Args:
|
|
504
|
+
agent_id (str): ID of the agent
|
|
505
|
+
tool_id (str): The tool id
|
|
506
|
+
|
|
507
|
+
Returns:
|
|
508
|
+
agent_state (AgentState): State of the updated agent
|
|
509
|
+
"""
|
|
510
|
+
|
|
511
|
+
response = requests.patch(f"{self.base_url}/{self.api_prefix}/agents/{agent_id}/remove-tool/{tool_id}", headers=self.headers)
|
|
512
|
+
if response.status_code != 200:
|
|
513
|
+
raise ValueError(f"Failed to update agent: {response.text}")
|
|
514
|
+
return AgentState(**response.json())
|
|
515
|
+
|
|
474
516
|
def rename_agent(self, agent_id: str, new_name: str):
|
|
475
517
|
"""
|
|
476
518
|
Rename an agent
|
|
@@ -747,8 +789,9 @@ class RESTClient(AbstractClient):
|
|
|
747
789
|
# simplify messages
|
|
748
790
|
if not include_full_message:
|
|
749
791
|
messages = []
|
|
750
|
-
for
|
|
751
|
-
|
|
792
|
+
for m in response.messages:
|
|
793
|
+
assert isinstance(m, Message)
|
|
794
|
+
messages += m.to_letta_message()
|
|
752
795
|
response.messages = messages
|
|
753
796
|
|
|
754
797
|
return response
|
|
@@ -1037,7 +1080,7 @@ class RESTClient(AbstractClient):
|
|
|
1037
1080
|
def load_data(self, connector: DataConnector, source_name: str):
|
|
1038
1081
|
raise NotImplementedError
|
|
1039
1082
|
|
|
1040
|
-
def
|
|
1083
|
+
def load_file_to_source(self, filename: str, source_id: str, blocking=True):
|
|
1041
1084
|
"""
|
|
1042
1085
|
Load a file into a source
|
|
1043
1086
|
|
|
@@ -1068,6 +1111,11 @@ class RESTClient(AbstractClient):
|
|
|
1068
1111
|
time.sleep(1)
|
|
1069
1112
|
return job
|
|
1070
1113
|
|
|
1114
|
+
def delete_file_from_source(self, source_id: str, file_id: str) -> None:
|
|
1115
|
+
response = requests.delete(f"{self.base_url}/{self.api_prefix}/sources/{source_id}/{file_id}", headers=self.headers)
|
|
1116
|
+
if response.status_code not in [200, 204]:
|
|
1117
|
+
raise ValueError(f"Failed to delete tool: {response.text}")
|
|
1118
|
+
|
|
1071
1119
|
def create_source(self, name: str) -> Source:
|
|
1072
1120
|
"""
|
|
1073
1121
|
Create a source
|
|
@@ -1644,6 +1692,36 @@ class LocalClient(AbstractClient):
|
|
|
1644
1692
|
)
|
|
1645
1693
|
return agent_state
|
|
1646
1694
|
|
|
1695
|
+
def add_tool_to_agent(self, agent_id: str, tool_id: str):
|
|
1696
|
+
"""
|
|
1697
|
+
Add tool to an existing agent
|
|
1698
|
+
|
|
1699
|
+
Args:
|
|
1700
|
+
agent_id (str): ID of the agent
|
|
1701
|
+
tool_id (str): A tool id
|
|
1702
|
+
|
|
1703
|
+
Returns:
|
|
1704
|
+
agent_state (AgentState): State of the updated agent
|
|
1705
|
+
"""
|
|
1706
|
+
self.interface.clear()
|
|
1707
|
+
agent_state = self.server.add_tool_to_agent(agent_id=agent_id, tool_id=tool_id, user_id=self.user_id)
|
|
1708
|
+
return agent_state
|
|
1709
|
+
|
|
1710
|
+
def remove_tool_from_agent(self, agent_id: str, tool_id: str):
|
|
1711
|
+
"""
|
|
1712
|
+
Removes tools from an existing agent
|
|
1713
|
+
|
|
1714
|
+
Args:
|
|
1715
|
+
agent_id (str): ID of the agent
|
|
1716
|
+
tool_id (str): The tool id
|
|
1717
|
+
|
|
1718
|
+
Returns:
|
|
1719
|
+
agent_state (AgentState): State of the updated agent
|
|
1720
|
+
"""
|
|
1721
|
+
self.interface.clear()
|
|
1722
|
+
agent_state = self.server.remove_tool_from_agent(agent_id=agent_id, tool_id=tool_id, user_id=self.user_id)
|
|
1723
|
+
return agent_state
|
|
1724
|
+
|
|
1647
1725
|
def rename_agent(self, agent_id: str, new_name: str):
|
|
1648
1726
|
"""
|
|
1649
1727
|
Rename an agent
|
|
@@ -1677,7 +1755,7 @@ class LocalClient(AbstractClient):
|
|
|
1677
1755
|
self.interface.clear()
|
|
1678
1756
|
return self.server.get_agent_state(user_id=self.user_id, agent_id=agent_id)
|
|
1679
1757
|
|
|
1680
|
-
def get_agent_id(self, agent_name: str) ->
|
|
1758
|
+
def get_agent_id(self, agent_name: str) -> Optional[str]:
|
|
1681
1759
|
"""
|
|
1682
1760
|
Get the ID of an agent by name (names are unique per user)
|
|
1683
1761
|
|
|
@@ -1763,10 +1841,45 @@ class LocalClient(AbstractClient):
|
|
|
1763
1841
|
|
|
1764
1842
|
# agent interactions
|
|
1765
1843
|
|
|
1844
|
+
def send_messages(
|
|
1845
|
+
self,
|
|
1846
|
+
agent_id: str,
|
|
1847
|
+
messages: List[Union[Message | MessageCreate]],
|
|
1848
|
+
include_full_message: Optional[bool] = False,
|
|
1849
|
+
):
|
|
1850
|
+
"""
|
|
1851
|
+
Send pre-packed messages to an agent.
|
|
1852
|
+
|
|
1853
|
+
Args:
|
|
1854
|
+
agent_id (str): ID of the agent
|
|
1855
|
+
messages (List[Union[Message | MessageCreate]]): List of messages to send
|
|
1856
|
+
|
|
1857
|
+
Returns:
|
|
1858
|
+
response (LettaResponse): Response from the agent
|
|
1859
|
+
"""
|
|
1860
|
+
self.interface.clear()
|
|
1861
|
+
usage = self.server.send_messages(user_id=self.user_id, agent_id=agent_id, messages=messages)
|
|
1862
|
+
|
|
1863
|
+
# auto-save
|
|
1864
|
+
if self.auto_save:
|
|
1865
|
+
self.save()
|
|
1866
|
+
|
|
1867
|
+
# format messages
|
|
1868
|
+
messages = self.interface.to_list()
|
|
1869
|
+
if include_full_message:
|
|
1870
|
+
letta_messages = messages
|
|
1871
|
+
else:
|
|
1872
|
+
letta_messages = []
|
|
1873
|
+
for m in messages:
|
|
1874
|
+
letta_messages += m.to_letta_message()
|
|
1875
|
+
|
|
1876
|
+
return LettaResponse(messages=letta_messages, usage=usage)
|
|
1877
|
+
|
|
1766
1878
|
def send_message(
|
|
1767
1879
|
self,
|
|
1768
1880
|
message: str,
|
|
1769
1881
|
role: str,
|
|
1882
|
+
name: Optional[str] = None,
|
|
1770
1883
|
agent_id: Optional[str] = None,
|
|
1771
1884
|
agent_name: Optional[str] = None,
|
|
1772
1885
|
stream_steps: bool = False,
|
|
@@ -1790,36 +1903,36 @@ class LocalClient(AbstractClient):
|
|
|
1790
1903
|
# lookup agent by name
|
|
1791
1904
|
assert agent_name, f"Either agent_id or agent_name must be provided"
|
|
1792
1905
|
agent_id = self.get_agent_id(agent_name=agent_name)
|
|
1793
|
-
|
|
1794
|
-
agent_state = self.get_agent(agent_id=agent_id)
|
|
1906
|
+
assert agent_id, f"Agent with name {agent_name} not found"
|
|
1795
1907
|
|
|
1796
1908
|
if stream_steps or stream_tokens:
|
|
1797
1909
|
# TODO: implement streaming with stream=True/False
|
|
1798
1910
|
raise NotImplementedError
|
|
1799
1911
|
self.interface.clear()
|
|
1800
|
-
|
|
1801
|
-
|
|
1802
|
-
|
|
1803
|
-
|
|
1804
|
-
|
|
1805
|
-
|
|
1912
|
+
|
|
1913
|
+
usage = self.server.send_messages(
|
|
1914
|
+
user_id=self.user_id,
|
|
1915
|
+
agent_id=agent_id,
|
|
1916
|
+
messages=[MessageCreate(role=MessageRole(role), text=message, name=name)],
|
|
1917
|
+
)
|
|
1806
1918
|
|
|
1807
1919
|
# auto-save
|
|
1808
1920
|
if self.auto_save:
|
|
1809
1921
|
self.save()
|
|
1810
1922
|
|
|
1811
|
-
|
|
1812
|
-
|
|
1813
|
-
|
|
1814
|
-
messages = self.interface.to_list()
|
|
1815
|
-
for m in messages:
|
|
1816
|
-
|
|
1817
|
-
letta_messages = []
|
|
1818
|
-
for m in messages:
|
|
1819
|
-
|
|
1820
|
-
return LettaResponse(messages=letta_messages, usage=usage)
|
|
1923
|
+
## TODO: need to make sure date/timestamp is propely passed
|
|
1924
|
+
## TODO: update self.interface.to_list() to return actual Message objects
|
|
1925
|
+
## here, the message objects will have faulty created_by timestamps
|
|
1926
|
+
# messages = self.interface.to_list()
|
|
1927
|
+
# for m in messages:
|
|
1928
|
+
# assert isinstance(m, Message), f"Expected Message object, got {type(m)}"
|
|
1929
|
+
# letta_messages = []
|
|
1930
|
+
# for m in messages:
|
|
1931
|
+
# letta_messages += m.to_letta_message()
|
|
1932
|
+
# return LettaResponse(messages=letta_messages, usage=usage)
|
|
1821
1933
|
|
|
1822
1934
|
# format messages
|
|
1935
|
+
messages = self.interface.to_list()
|
|
1823
1936
|
if include_full_message:
|
|
1824
1937
|
letta_messages = messages
|
|
1825
1938
|
else:
|
|
@@ -1872,6 +1985,13 @@ class LocalClient(AbstractClient):
|
|
|
1872
1985
|
|
|
1873
1986
|
# humans / personas
|
|
1874
1987
|
|
|
1988
|
+
def get_block_id(self, name: str, label: str) -> str:
|
|
1989
|
+
|
|
1990
|
+
block = self.server.get_blocks(name=name, label=label, user_id=self.user_id, template=True)
|
|
1991
|
+
if not block:
|
|
1992
|
+
return None
|
|
1993
|
+
return block[0].id
|
|
1994
|
+
|
|
1875
1995
|
def create_human(self, name: str, text: str):
|
|
1876
1996
|
"""
|
|
1877
1997
|
Create a human block template (saved human string to pre-fill `ChatMemory`)
|
|
@@ -2030,30 +2150,37 @@ class LocalClient(AbstractClient):
|
|
|
2030
2150
|
Returns:
|
|
2031
2151
|
None
|
|
2032
2152
|
"""
|
|
2033
|
-
|
|
2034
|
-
if existing_tool_id:
|
|
2153
|
+
if self.tool_with_name_and_user_id_exists(tool):
|
|
2035
2154
|
if update:
|
|
2036
|
-
self.server.update_tool(
|
|
2155
|
+
return self.server.update_tool(
|
|
2037
2156
|
ToolUpdate(
|
|
2038
|
-
id=
|
|
2157
|
+
id=tool.id,
|
|
2158
|
+
description=tool.description,
|
|
2039
2159
|
source_type=tool.source_type,
|
|
2040
2160
|
source_code=tool.source_code,
|
|
2041
2161
|
tags=tool.tags,
|
|
2042
2162
|
json_schema=tool.json_schema,
|
|
2043
2163
|
name=tool.name,
|
|
2044
|
-
)
|
|
2164
|
+
),
|
|
2165
|
+
self.user_id,
|
|
2045
2166
|
)
|
|
2046
2167
|
else:
|
|
2047
|
-
raise ValueError(f"Tool with name
|
|
2048
|
-
|
|
2049
|
-
|
|
2050
|
-
|
|
2051
|
-
|
|
2052
|
-
|
|
2053
|
-
|
|
2054
|
-
|
|
2055
|
-
|
|
2056
|
-
|
|
2168
|
+
raise ValueError(f"Tool with id={tool.id} and name={tool.name}already exists")
|
|
2169
|
+
else:
|
|
2170
|
+
# call server function
|
|
2171
|
+
return self.server.create_tool(
|
|
2172
|
+
ToolCreate(
|
|
2173
|
+
id=tool.id,
|
|
2174
|
+
description=tool.description,
|
|
2175
|
+
source_type=tool.source_type,
|
|
2176
|
+
source_code=tool.source_code,
|
|
2177
|
+
name=tool.name,
|
|
2178
|
+
json_schema=tool.json_schema,
|
|
2179
|
+
tags=tool.tags,
|
|
2180
|
+
),
|
|
2181
|
+
user_id=self.user_id,
|
|
2182
|
+
update=update,
|
|
2183
|
+
)
|
|
2057
2184
|
|
|
2058
2185
|
# TODO: Use the above function `add_tool` here as there is duplicate logic
|
|
2059
2186
|
def create_tool(
|
|
@@ -2062,6 +2189,7 @@ class LocalClient(AbstractClient):
|
|
|
2062
2189
|
name: Optional[str] = None,
|
|
2063
2190
|
update: Optional[bool] = True, # TODO: actually use this
|
|
2064
2191
|
tags: Optional[List[str]] = None,
|
|
2192
|
+
terminal: Optional[bool] = False,
|
|
2065
2193
|
) -> Tool:
|
|
2066
2194
|
"""
|
|
2067
2195
|
Create a tool. This stores the source code of function on the server, so that the server can execute the function and generate an OpenAI JSON schemas for it when using with an agent.
|
|
@@ -2071,6 +2199,7 @@ class LocalClient(AbstractClient):
|
|
|
2071
2199
|
name: (str): Name of the tool (must be unique per-user.)
|
|
2072
2200
|
tags (Optional[List[str]], optional): Tags for the tool. Defaults to None.
|
|
2073
2201
|
update (bool, optional): Update the tool if it already exists. Defaults to True.
|
|
2202
|
+
terminal (bool, optional): Whether the tool is a terminal tool (no more agent steps). Defaults to False.
|
|
2074
2203
|
|
|
2075
2204
|
Returns:
|
|
2076
2205
|
tool (Tool): The created tool.
|
|
@@ -2086,7 +2215,7 @@ class LocalClient(AbstractClient):
|
|
|
2086
2215
|
# call server function
|
|
2087
2216
|
return self.server.create_tool(
|
|
2088
2217
|
# ToolCreate(source_type=source_type, source_code=source_code, name=tool_name, json_schema=json_schema, tags=tags),
|
|
2089
|
-
ToolCreate(source_type=source_type, source_code=source_code, name=name, tags=tags),
|
|
2218
|
+
ToolCreate(source_type=source_type, source_code=source_code, name=name, tags=tags, terminal=terminal),
|
|
2090
2219
|
user_id=self.user_id,
|
|
2091
2220
|
update=update,
|
|
2092
2221
|
)
|
|
@@ -2117,7 +2246,9 @@ class LocalClient(AbstractClient):
|
|
|
2117
2246
|
|
|
2118
2247
|
source_type = "python"
|
|
2119
2248
|
|
|
2120
|
-
return self.server.update_tool(
|
|
2249
|
+
return self.server.update_tool(
|
|
2250
|
+
ToolUpdate(id=id, source_type=source_type, source_code=source_code, tags=tags, name=name), self.user_id
|
|
2251
|
+
)
|
|
2121
2252
|
|
|
2122
2253
|
def list_tools(self):
|
|
2123
2254
|
"""
|
|
@@ -2162,7 +2293,17 @@ class LocalClient(AbstractClient):
|
|
|
2162
2293
|
"""
|
|
2163
2294
|
return self.server.get_tool_id(name, self.user_id)
|
|
2164
2295
|
|
|
2165
|
-
|
|
2296
|
+
def tool_with_name_and_user_id_exists(self, tool: Tool) -> bool:
|
|
2297
|
+
"""
|
|
2298
|
+
Check if the tool with name and user_id exists
|
|
2299
|
+
|
|
2300
|
+
Args:
|
|
2301
|
+
tool (Tool): the tool
|
|
2302
|
+
|
|
2303
|
+
Returns:
|
|
2304
|
+
(bool): True if the id exists, False otherwise.
|
|
2305
|
+
"""
|
|
2306
|
+
return self.server.tool_with_name_and_user_id_exists(tool, self.user_id)
|
|
2166
2307
|
|
|
2167
2308
|
def load_data(self, connector: DataConnector, source_name: str):
|
|
2168
2309
|
"""
|
|
@@ -2174,7 +2315,7 @@ class LocalClient(AbstractClient):
|
|
|
2174
2315
|
"""
|
|
2175
2316
|
self.server.load_data(user_id=self.user_id, connector=connector, source_name=source_name)
|
|
2176
2317
|
|
|
2177
|
-
def
|
|
2318
|
+
def load_file_to_source(self, filename: str, source_id: str, blocking=True):
|
|
2178
2319
|
"""
|
|
2179
2320
|
Load a file into a source
|
|
2180
2321
|
|
|
@@ -2193,6 +2334,9 @@ class LocalClient(AbstractClient):
|
|
|
2193
2334
|
self.server.load_file_to_source(source_id=source_id, file_path=filename, job_id=job.id)
|
|
2194
2335
|
return job
|
|
2195
2336
|
|
|
2337
|
+
def delete_file_from_source(self, source_id: str, file_id: str):
|
|
2338
|
+
self.server.delete_file_from_source(source_id, file_id, user_id=self.user_id)
|
|
2339
|
+
|
|
2196
2340
|
def get_job(self, job_id: str):
|
|
2197
2341
|
return self.server.get_job(job_id=job_id)
|
|
2198
2342
|
|