letta-nightly 0.1.7.dev20240924104148__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of letta-nightly might be problematic. Click here for more details.
- letta/__init__.py +24 -0
- letta/__main__.py +3 -0
- letta/agent.py +1427 -0
- letta/agent_store/chroma.py +295 -0
- letta/agent_store/db.py +546 -0
- letta/agent_store/lancedb.py +177 -0
- letta/agent_store/milvus.py +198 -0
- letta/agent_store/qdrant.py +201 -0
- letta/agent_store/storage.py +188 -0
- letta/benchmark/benchmark.py +96 -0
- letta/benchmark/constants.py +14 -0
- letta/cli/cli.py +689 -0
- letta/cli/cli_config.py +1282 -0
- letta/cli/cli_load.py +166 -0
- letta/client/__init__.py +0 -0
- letta/client/admin.py +171 -0
- letta/client/client.py +2360 -0
- letta/client/streaming.py +90 -0
- letta/client/utils.py +61 -0
- letta/config.py +484 -0
- letta/configs/anthropic.json +13 -0
- letta/configs/letta_hosted.json +11 -0
- letta/configs/openai.json +12 -0
- letta/constants.py +134 -0
- letta/credentials.py +140 -0
- letta/data_sources/connectors.py +247 -0
- letta/embeddings.py +218 -0
- letta/errors.py +26 -0
- letta/functions/__init__.py +0 -0
- letta/functions/function_sets/base.py +174 -0
- letta/functions/function_sets/extras.py +132 -0
- letta/functions/functions.py +105 -0
- letta/functions/schema_generator.py +205 -0
- letta/humans/__init__.py +0 -0
- letta/humans/examples/basic.txt +1 -0
- letta/humans/examples/cs_phd.txt +9 -0
- letta/interface.py +314 -0
- letta/llm_api/__init__.py +0 -0
- letta/llm_api/anthropic.py +383 -0
- letta/llm_api/azure_openai.py +155 -0
- letta/llm_api/cohere.py +396 -0
- letta/llm_api/google_ai.py +468 -0
- letta/llm_api/llm_api_tools.py +485 -0
- letta/llm_api/openai.py +470 -0
- letta/local_llm/README.md +3 -0
- letta/local_llm/__init__.py +0 -0
- letta/local_llm/chat_completion_proxy.py +279 -0
- letta/local_llm/constants.py +31 -0
- letta/local_llm/function_parser.py +68 -0
- letta/local_llm/grammars/__init__.py +0 -0
- letta/local_llm/grammars/gbnf_grammar_generator.py +1324 -0
- letta/local_llm/grammars/json.gbnf +26 -0
- letta/local_llm/grammars/json_func_calls_with_inner_thoughts.gbnf +32 -0
- letta/local_llm/groq/api.py +97 -0
- letta/local_llm/json_parser.py +202 -0
- letta/local_llm/koboldcpp/api.py +62 -0
- letta/local_llm/koboldcpp/settings.py +23 -0
- letta/local_llm/llamacpp/api.py +58 -0
- letta/local_llm/llamacpp/settings.py +22 -0
- letta/local_llm/llm_chat_completion_wrappers/__init__.py +0 -0
- letta/local_llm/llm_chat_completion_wrappers/airoboros.py +452 -0
- letta/local_llm/llm_chat_completion_wrappers/chatml.py +470 -0
- letta/local_llm/llm_chat_completion_wrappers/configurable_wrapper.py +387 -0
- letta/local_llm/llm_chat_completion_wrappers/dolphin.py +246 -0
- letta/local_llm/llm_chat_completion_wrappers/llama3.py +345 -0
- letta/local_llm/llm_chat_completion_wrappers/simple_summary_wrapper.py +156 -0
- letta/local_llm/llm_chat_completion_wrappers/wrapper_base.py +11 -0
- letta/local_llm/llm_chat_completion_wrappers/zephyr.py +345 -0
- letta/local_llm/lmstudio/api.py +100 -0
- letta/local_llm/lmstudio/settings.py +29 -0
- letta/local_llm/ollama/api.py +88 -0
- letta/local_llm/ollama/settings.py +32 -0
- letta/local_llm/settings/__init__.py +0 -0
- letta/local_llm/settings/deterministic_mirostat.py +45 -0
- letta/local_llm/settings/settings.py +72 -0
- letta/local_llm/settings/simple.py +28 -0
- letta/local_llm/utils.py +265 -0
- letta/local_llm/vllm/api.py +63 -0
- letta/local_llm/webui/api.py +60 -0
- letta/local_llm/webui/legacy_api.py +58 -0
- letta/local_llm/webui/legacy_settings.py +23 -0
- letta/local_llm/webui/settings.py +24 -0
- letta/log.py +76 -0
- letta/main.py +437 -0
- letta/memory.py +440 -0
- letta/metadata.py +884 -0
- letta/openai_backcompat/__init__.py +0 -0
- letta/openai_backcompat/openai_object.py +437 -0
- letta/persistence_manager.py +148 -0
- letta/personas/__init__.py +0 -0
- letta/personas/examples/anna_pa.txt +13 -0
- letta/personas/examples/google_search_persona.txt +15 -0
- letta/personas/examples/memgpt_doc.txt +6 -0
- letta/personas/examples/memgpt_starter.txt +4 -0
- letta/personas/examples/sam.txt +14 -0
- letta/personas/examples/sam_pov.txt +14 -0
- letta/personas/examples/sam_simple_pov_gpt35.txt +13 -0
- letta/personas/examples/sqldb/test.db +0 -0
- letta/prompts/__init__.py +0 -0
- letta/prompts/gpt_summarize.py +14 -0
- letta/prompts/gpt_system.py +26 -0
- letta/prompts/system/memgpt_base.txt +49 -0
- letta/prompts/system/memgpt_chat.txt +58 -0
- letta/prompts/system/memgpt_chat_compressed.txt +13 -0
- letta/prompts/system/memgpt_chat_fstring.txt +51 -0
- letta/prompts/system/memgpt_doc.txt +50 -0
- letta/prompts/system/memgpt_gpt35_extralong.txt +53 -0
- letta/prompts/system/memgpt_intuitive_knowledge.txt +31 -0
- letta/prompts/system/memgpt_modified_chat.txt +23 -0
- letta/pytest.ini +0 -0
- letta/schemas/agent.py +117 -0
- letta/schemas/api_key.py +21 -0
- letta/schemas/block.py +135 -0
- letta/schemas/document.py +21 -0
- letta/schemas/embedding_config.py +54 -0
- letta/schemas/enums.py +35 -0
- letta/schemas/job.py +38 -0
- letta/schemas/letta_base.py +80 -0
- letta/schemas/letta_message.py +175 -0
- letta/schemas/letta_request.py +23 -0
- letta/schemas/letta_response.py +28 -0
- letta/schemas/llm_config.py +54 -0
- letta/schemas/memory.py +224 -0
- letta/schemas/message.py +727 -0
- letta/schemas/openai/chat_completion_request.py +123 -0
- letta/schemas/openai/chat_completion_response.py +136 -0
- letta/schemas/openai/chat_completions.py +123 -0
- letta/schemas/openai/embedding_response.py +11 -0
- letta/schemas/openai/openai.py +157 -0
- letta/schemas/organization.py +20 -0
- letta/schemas/passage.py +80 -0
- letta/schemas/source.py +62 -0
- letta/schemas/tool.py +143 -0
- letta/schemas/usage.py +18 -0
- letta/schemas/user.py +33 -0
- letta/server/__init__.py +0 -0
- letta/server/constants.py +6 -0
- letta/server/rest_api/__init__.py +0 -0
- letta/server/rest_api/admin/__init__.py +0 -0
- letta/server/rest_api/admin/agents.py +21 -0
- letta/server/rest_api/admin/tools.py +83 -0
- letta/server/rest_api/admin/users.py +98 -0
- letta/server/rest_api/app.py +193 -0
- letta/server/rest_api/auth/__init__.py +0 -0
- letta/server/rest_api/auth/index.py +43 -0
- letta/server/rest_api/auth_token.py +22 -0
- letta/server/rest_api/interface.py +726 -0
- letta/server/rest_api/routers/__init__.py +0 -0
- letta/server/rest_api/routers/openai/__init__.py +0 -0
- letta/server/rest_api/routers/openai/assistants/__init__.py +0 -0
- letta/server/rest_api/routers/openai/assistants/assistants.py +115 -0
- letta/server/rest_api/routers/openai/assistants/schemas.py +121 -0
- letta/server/rest_api/routers/openai/assistants/threads.py +336 -0
- letta/server/rest_api/routers/openai/chat_completions/__init__.py +0 -0
- letta/server/rest_api/routers/openai/chat_completions/chat_completions.py +131 -0
- letta/server/rest_api/routers/v1/__init__.py +15 -0
- letta/server/rest_api/routers/v1/agents.py +543 -0
- letta/server/rest_api/routers/v1/blocks.py +73 -0
- letta/server/rest_api/routers/v1/jobs.py +46 -0
- letta/server/rest_api/routers/v1/llms.py +28 -0
- letta/server/rest_api/routers/v1/organizations.py +61 -0
- letta/server/rest_api/routers/v1/sources.py +199 -0
- letta/server/rest_api/routers/v1/tools.py +103 -0
- letta/server/rest_api/routers/v1/users.py +109 -0
- letta/server/rest_api/static_files.py +74 -0
- letta/server/rest_api/utils.py +69 -0
- letta/server/server.py +1995 -0
- letta/server/startup.sh +8 -0
- letta/server/static_files/assets/index-0cbf7ad5.js +274 -0
- letta/server/static_files/assets/index-156816da.css +1 -0
- letta/server/static_files/assets/index-486e3228.js +274 -0
- letta/server/static_files/favicon.ico +0 -0
- letta/server/static_files/index.html +39 -0
- letta/server/static_files/memgpt_logo_transparent.png +0 -0
- letta/server/utils.py +46 -0
- letta/server/ws_api/__init__.py +0 -0
- letta/server/ws_api/example_client.py +104 -0
- letta/server/ws_api/interface.py +108 -0
- letta/server/ws_api/protocol.py +100 -0
- letta/server/ws_api/server.py +145 -0
- letta/settings.py +165 -0
- letta/streaming_interface.py +396 -0
- letta/system.py +207 -0
- letta/utils.py +1065 -0
- letta_nightly-0.1.7.dev20240924104148.dist-info/LICENSE +190 -0
- letta_nightly-0.1.7.dev20240924104148.dist-info/METADATA +98 -0
- letta_nightly-0.1.7.dev20240924104148.dist-info/RECORD +189 -0
- letta_nightly-0.1.7.dev20240924104148.dist-info/WHEEL +4 -0
- letta_nightly-0.1.7.dev20240924104148.dist-info/entry_points.txt +3 -0
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
@@ -0,0 +1,115 @@
|
|
|
1
|
+
from typing import List
|
|
2
|
+
|
|
3
|
+
from fastapi import APIRouter, Body, HTTPException, Path, Query
|
|
4
|
+
|
|
5
|
+
from letta.constants import DEFAULT_PRESET
|
|
6
|
+
from letta.schemas.openai.openai import AssistantFile, OpenAIAssistant
|
|
7
|
+
from letta.server.rest_api.routers.openai.assistants.schemas import (
|
|
8
|
+
CreateAssistantFileRequest,
|
|
9
|
+
CreateAssistantRequest,
|
|
10
|
+
DeleteAssistantFileResponse,
|
|
11
|
+
DeleteAssistantResponse,
|
|
12
|
+
)
|
|
13
|
+
from letta.utils import get_utc_time
|
|
14
|
+
|
|
15
|
+
router = APIRouter()
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
# TODO: implement mechanism for creating/authenticating users associated with a bearer token
|
|
19
|
+
router = APIRouter(prefix="/v1/assistants", tags=["assistants"])
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
# create assistant (Letta agent)
|
|
23
|
+
@router.post("/", response_model=OpenAIAssistant)
|
|
24
|
+
def create_assistant(request: CreateAssistantRequest = Body(...)):
|
|
25
|
+
# TODO: create preset
|
|
26
|
+
return OpenAIAssistant(
|
|
27
|
+
id=DEFAULT_PRESET,
|
|
28
|
+
name="default_preset",
|
|
29
|
+
description=request.description,
|
|
30
|
+
created_at=int(get_utc_time().timestamp()),
|
|
31
|
+
model=request.model,
|
|
32
|
+
instructions=request.instructions,
|
|
33
|
+
tools=request.tools,
|
|
34
|
+
file_ids=request.file_ids,
|
|
35
|
+
metadata=request.metadata,
|
|
36
|
+
)
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
@router.post("/{assistant_id}/files", response_model=AssistantFile)
|
|
40
|
+
def create_assistant_file(
|
|
41
|
+
assistant_id: str = Path(..., description="The unique identifier of the assistant."),
|
|
42
|
+
request: CreateAssistantFileRequest = Body(...),
|
|
43
|
+
):
|
|
44
|
+
# TODO: add file to assistant
|
|
45
|
+
return AssistantFile(
|
|
46
|
+
id=request.file_id,
|
|
47
|
+
created_at=int(get_utc_time().timestamp()),
|
|
48
|
+
assistant_id=assistant_id,
|
|
49
|
+
)
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
@router.get("/", response_model=List[OpenAIAssistant])
|
|
53
|
+
def list_assistants(
|
|
54
|
+
limit: int = Query(1000, description="How many assistants to retrieve."),
|
|
55
|
+
order: str = Query("asc", description="Order of assistants to retrieve (either 'asc' or 'desc')."),
|
|
56
|
+
after: str = Query(None, description="A cursor for use in pagination. `after` is an object ID that defines your place in the list."),
|
|
57
|
+
before: str = Query(None, description="A cursor for use in pagination. `after` is an object ID that defines your place in the list."),
|
|
58
|
+
):
|
|
59
|
+
# TODO: implement list assistants (i.e. list available Letta presets)
|
|
60
|
+
raise HTTPException(status_code=404, detail="Not yet implemented (coming soon)")
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
@router.get("/{assistant_id}/files", response_model=List[AssistantFile])
|
|
64
|
+
def list_assistant_files(
|
|
65
|
+
assistant_id: str = Path(..., description="The unique identifier of the assistant."),
|
|
66
|
+
limit: int = Query(1000, description="How many files to retrieve."),
|
|
67
|
+
order: str = Query("asc", description="Order of files to retrieve (either 'asc' or 'desc')."),
|
|
68
|
+
after: str = Query(None, description="A cursor for use in pagination. `after` is an object ID that defines your place in the list."),
|
|
69
|
+
before: str = Query(None, description="A cursor for use in pagination. `after` is an object ID that defines your place in the list."),
|
|
70
|
+
):
|
|
71
|
+
# TODO: list attached data sources to preset
|
|
72
|
+
raise HTTPException(status_code=404, detail="Not yet implemented (coming soon)")
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
@router.get("/{assistant_id}", response_model=OpenAIAssistant)
|
|
76
|
+
def retrieve_assistant(
|
|
77
|
+
assistant_id: str = Path(..., description="The unique identifier of the assistant."),
|
|
78
|
+
):
|
|
79
|
+
# TODO: get and return preset
|
|
80
|
+
raise HTTPException(status_code=404, detail="Not yet implemented (coming soon)")
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
@router.get("/{assistant_id}/files/{file_id}", response_model=AssistantFile)
|
|
84
|
+
def retrieve_assistant_file(
|
|
85
|
+
assistant_id: str = Path(..., description="The unique identifier of the assistant."),
|
|
86
|
+
file_id: str = Path(..., description="The unique identifier of the file."),
|
|
87
|
+
):
|
|
88
|
+
# TODO: return data source attached to preset
|
|
89
|
+
raise HTTPException(status_code=404, detail="Not yet implemented (coming soon)")
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
@router.post("/{assistant_id}", response_model=OpenAIAssistant)
|
|
93
|
+
def modify_assistant(
|
|
94
|
+
assistant_id: str = Path(..., description="The unique identifier of the assistant."),
|
|
95
|
+
request: CreateAssistantRequest = Body(...),
|
|
96
|
+
):
|
|
97
|
+
# TODO: modify preset
|
|
98
|
+
raise HTTPException(status_code=404, detail="Not yet implemented (coming soon)")
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
@router.delete("/{assistant_id}", response_model=DeleteAssistantResponse)
|
|
102
|
+
def delete_assistant(
|
|
103
|
+
assistant_id: str = Path(..., description="The unique identifier of the assistant."),
|
|
104
|
+
):
|
|
105
|
+
# TODO: delete preset
|
|
106
|
+
raise HTTPException(status_code=404, detail="Not yet implemented (coming soon)")
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
@router.delete("/{assistant_id}/files/{file_id}", response_model=DeleteAssistantFileResponse)
|
|
110
|
+
def delete_assistant_file(
|
|
111
|
+
assistant_id: str = Path(..., description="The unique identifier of the assistant."),
|
|
112
|
+
file_id: str = Path(..., description="The unique identifier of the file."),
|
|
113
|
+
):
|
|
114
|
+
# TODO: delete source on preset
|
|
115
|
+
raise HTTPException(status_code=404, detail="Not yet implemented (coming soon)")
|
|
@@ -0,0 +1,121 @@
|
|
|
1
|
+
from typing import List, Optional
|
|
2
|
+
|
|
3
|
+
from pydantic import BaseModel, Field
|
|
4
|
+
|
|
5
|
+
from letta.schemas.openai.openai import (
|
|
6
|
+
MessageRoleType,
|
|
7
|
+
OpenAIMessage,
|
|
8
|
+
OpenAIThread,
|
|
9
|
+
ToolCall,
|
|
10
|
+
ToolCallOutput,
|
|
11
|
+
)
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class CreateAssistantRequest(BaseModel):
|
|
15
|
+
model: str = Field(..., description="The model to use for the assistant.")
|
|
16
|
+
name: str = Field(..., description="The name of the assistant.")
|
|
17
|
+
description: str = Field(None, description="The description of the assistant.")
|
|
18
|
+
instructions: str = Field(..., description="The instructions for the assistant.")
|
|
19
|
+
tools: List[str] = Field(None, description="The tools used by the assistant.")
|
|
20
|
+
file_ids: List[str] = Field(None, description="List of file IDs associated with the assistant.")
|
|
21
|
+
metadata: dict = Field(None, description="Metadata associated with the assistant.")
|
|
22
|
+
|
|
23
|
+
# letta-only (not openai)
|
|
24
|
+
embedding_model: str = Field(None, description="The model to use for the assistant.")
|
|
25
|
+
|
|
26
|
+
## TODO: remove
|
|
27
|
+
# user_id: str = Field(..., description="The unique identifier of the user.")
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
class CreateThreadRequest(BaseModel):
|
|
31
|
+
messages: Optional[List[str]] = Field(None, description="List of message IDs associated with the thread.")
|
|
32
|
+
metadata: Optional[dict] = Field(None, description="Metadata associated with the thread.")
|
|
33
|
+
|
|
34
|
+
# letta-only
|
|
35
|
+
assistant_name: Optional[str] = Field(None, description="The name of the assistant (i.e. Letta preset)")
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
class ModifyThreadRequest(BaseModel):
|
|
39
|
+
metadata: dict = Field(None, description="Metadata associated with the thread.")
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
class ModifyMessageRequest(BaseModel):
|
|
43
|
+
metadata: dict = Field(None, description="Metadata associated with the message.")
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
class ModifyRunRequest(BaseModel):
|
|
47
|
+
metadata: dict = Field(None, description="Metadata associated with the run.")
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
class CreateMessageRequest(BaseModel):
|
|
51
|
+
role: str = Field(..., description="Role of the message sender (either 'user' or 'system')")
|
|
52
|
+
content: str = Field(..., description="The message content to be processed by the agent.")
|
|
53
|
+
file_ids: Optional[List[str]] = Field(None, description="List of file IDs associated with the message.")
|
|
54
|
+
metadata: Optional[dict] = Field(None, description="Metadata associated with the message.")
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
class UserMessageRequest(BaseModel):
|
|
58
|
+
user_id: str = Field(..., description="The unique identifier of the user.")
|
|
59
|
+
agent_id: str = Field(..., description="The unique identifier of the agent.")
|
|
60
|
+
message: str = Field(..., description="The message content to be processed by the agent.")
|
|
61
|
+
stream: bool = Field(default=False, description="Flag to determine if the response should be streamed. Set to True for streaming.")
|
|
62
|
+
role: MessageRoleType = Field(default=MessageRoleType.user, description="Role of the message sender (either 'user' or 'system')")
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
class UserMessageResponse(BaseModel):
|
|
66
|
+
messages: List[dict] = Field(..., description="List of messages generated by the agent in response to the received message.")
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
class GetAgentMessagesRequest(BaseModel):
|
|
70
|
+
user_id: str = Field(..., description="The unique identifier of the user.")
|
|
71
|
+
agent_id: str = Field(..., description="The unique identifier of the agent.")
|
|
72
|
+
start: int = Field(..., description="Message index to start on (reverse chronological).")
|
|
73
|
+
count: int = Field(..., description="How many messages to retrieve.")
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
class ListMessagesResponse(BaseModel):
|
|
77
|
+
messages: List[OpenAIMessage] = Field(..., description="List of message objects.")
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
class CreateAssistantFileRequest(BaseModel):
|
|
81
|
+
file_id: str = Field(..., description="The unique identifier of the file.")
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
class CreateRunRequest(BaseModel):
|
|
85
|
+
assistant_id: str = Field(..., description="The unique identifier of the assistant.")
|
|
86
|
+
model: Optional[str] = Field(None, description="The model used by the run.")
|
|
87
|
+
instructions: str = Field(..., description="The instructions for the run.")
|
|
88
|
+
additional_instructions: Optional[str] = Field(None, description="Additional instructions for the run.")
|
|
89
|
+
tools: Optional[List[ToolCall]] = Field(None, description="The tools used by the run (overrides assistant).")
|
|
90
|
+
metadata: Optional[dict] = Field(None, description="Metadata associated with the run.")
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
class CreateThreadRunRequest(BaseModel):
|
|
94
|
+
assistant_id: str = Field(..., description="The unique identifier of the assistant.")
|
|
95
|
+
thread: OpenAIThread = Field(..., description="The thread to run.")
|
|
96
|
+
model: str = Field(..., description="The model used by the run.")
|
|
97
|
+
instructions: str = Field(..., description="The instructions for the run.")
|
|
98
|
+
tools: Optional[List[ToolCall]] = Field(None, description="The tools used by the run (overrides assistant).")
|
|
99
|
+
metadata: Optional[dict] = Field(None, description="Metadata associated with the run.")
|
|
100
|
+
|
|
101
|
+
|
|
102
|
+
class DeleteAssistantResponse(BaseModel):
|
|
103
|
+
id: str = Field(..., description="The unique identifier of the agent.")
|
|
104
|
+
object: str = "assistant.deleted"
|
|
105
|
+
deleted: bool = Field(..., description="Whether the agent was deleted.")
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
class DeleteAssistantFileResponse(BaseModel):
|
|
109
|
+
id: str = Field(..., description="The unique identifier of the file.")
|
|
110
|
+
object: str = "assistant.file.deleted"
|
|
111
|
+
deleted: bool = Field(..., description="Whether the file was deleted.")
|
|
112
|
+
|
|
113
|
+
|
|
114
|
+
class DeleteThreadResponse(BaseModel):
|
|
115
|
+
id: str = Field(..., description="The unique identifier of the agent.")
|
|
116
|
+
object: str = "thread.deleted"
|
|
117
|
+
deleted: bool = Field(..., description="Whether the agent was deleted.")
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
class SubmitToolOutputsToRunRequest(BaseModel):
|
|
121
|
+
tools_outputs: List[ToolCallOutput] = Field(..., description="The tool outputs to submit.")
|
|
@@ -0,0 +1,336 @@
|
|
|
1
|
+
import uuid
|
|
2
|
+
from typing import TYPE_CHECKING, List
|
|
3
|
+
|
|
4
|
+
from fastapi import APIRouter, Body, Depends, HTTPException, Path, Query
|
|
5
|
+
|
|
6
|
+
from letta.constants import DEFAULT_PRESET
|
|
7
|
+
from letta.schemas.agent import CreateAgent
|
|
8
|
+
from letta.schemas.enums import MessageRole
|
|
9
|
+
from letta.schemas.message import Message
|
|
10
|
+
from letta.schemas.openai.openai import (
|
|
11
|
+
MessageFile,
|
|
12
|
+
OpenAIMessage,
|
|
13
|
+
OpenAIRun,
|
|
14
|
+
OpenAIRunStep,
|
|
15
|
+
OpenAIThread,
|
|
16
|
+
Text,
|
|
17
|
+
)
|
|
18
|
+
from letta.server.rest_api.routers.openai.assistants.schemas import (
|
|
19
|
+
CreateMessageRequest,
|
|
20
|
+
CreateRunRequest,
|
|
21
|
+
CreateThreadRequest,
|
|
22
|
+
CreateThreadRunRequest,
|
|
23
|
+
DeleteThreadResponse,
|
|
24
|
+
ListMessagesResponse,
|
|
25
|
+
ModifyMessageRequest,
|
|
26
|
+
ModifyRunRequest,
|
|
27
|
+
ModifyThreadRequest,
|
|
28
|
+
OpenAIThread,
|
|
29
|
+
SubmitToolOutputsToRunRequest,
|
|
30
|
+
)
|
|
31
|
+
from letta.server.rest_api.utils import get_letta_server
|
|
32
|
+
from letta.server.server import SyncServer
|
|
33
|
+
|
|
34
|
+
if TYPE_CHECKING:
|
|
35
|
+
from letta.utils import get_utc_time
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
# TODO: implement mechanism for creating/authenticating users associated with a bearer token
|
|
39
|
+
router = APIRouter(prefix="/v1/threads", tags=["threads"])
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
@router.post("/", response_model=OpenAIThread)
|
|
43
|
+
def create_thread(
|
|
44
|
+
request: CreateThreadRequest = Body(...),
|
|
45
|
+
server: SyncServer = Depends(get_letta_server),
|
|
46
|
+
):
|
|
47
|
+
# TODO: use requests.description and requests.metadata fields
|
|
48
|
+
# TODO: handle requests.file_ids and requests.tools
|
|
49
|
+
# TODO: eventually allow request to override embedding/llm model
|
|
50
|
+
actor = server.get_current_user()
|
|
51
|
+
|
|
52
|
+
print("Create thread/agent", request)
|
|
53
|
+
# create a letta agent
|
|
54
|
+
agent_state = server.create_agent(
|
|
55
|
+
request=CreateAgent(),
|
|
56
|
+
user_id=actor.id,
|
|
57
|
+
)
|
|
58
|
+
# TODO: insert messages into recall memory
|
|
59
|
+
return OpenAIThread(
|
|
60
|
+
id=str(agent_state.id),
|
|
61
|
+
created_at=int(agent_state.created_at.timestamp()),
|
|
62
|
+
metadata={}, # TODO add metadata?
|
|
63
|
+
)
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
@router.get("/{thread_id}", response_model=OpenAIThread)
|
|
67
|
+
def retrieve_thread(
|
|
68
|
+
thread_id: str = Path(..., description="The unique identifier of the thread."),
|
|
69
|
+
server: SyncServer = Depends(get_letta_server),
|
|
70
|
+
):
|
|
71
|
+
actor = server.get_current_user()
|
|
72
|
+
agent = server.get_agent(user_id=actor.id, agent_id=thread_id)
|
|
73
|
+
assert agent is not None
|
|
74
|
+
return OpenAIThread(
|
|
75
|
+
id=str(agent.id),
|
|
76
|
+
created_at=int(agent.created_at.timestamp()),
|
|
77
|
+
metadata={}, # TODO add metadata?
|
|
78
|
+
)
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
@router.get("/{thread_id}", response_model=OpenAIThread)
|
|
82
|
+
def modify_thread(
|
|
83
|
+
thread_id: str = Path(..., description="The unique identifier of the thread."),
|
|
84
|
+
request: ModifyThreadRequest = Body(...),
|
|
85
|
+
):
|
|
86
|
+
# TODO: add agent metadata so this can be modified
|
|
87
|
+
raise HTTPException(status_code=404, detail="Not yet implemented (coming soon)")
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
@router.delete("/{thread_id}", response_model=DeleteThreadResponse)
|
|
91
|
+
def delete_thread(
|
|
92
|
+
thread_id: str = Path(..., description="The unique identifier of the thread."),
|
|
93
|
+
):
|
|
94
|
+
# TODO: delete agent
|
|
95
|
+
raise HTTPException(status_code=404, detail="Not yet implemented (coming soon)")
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
@router.post("/{thread_id}/messages", tags=["messages"], response_model=OpenAIMessage)
|
|
99
|
+
def create_message(
|
|
100
|
+
thread_id: str = Path(..., description="The unique identifier of the thread."),
|
|
101
|
+
request: CreateMessageRequest = Body(...),
|
|
102
|
+
server: SyncServer = Depends(get_letta_server),
|
|
103
|
+
):
|
|
104
|
+
actor = server.get_current_user()
|
|
105
|
+
agent_id = thread_id
|
|
106
|
+
# create message object
|
|
107
|
+
message = Message(
|
|
108
|
+
user_id=actor.id,
|
|
109
|
+
agent_id=agent_id,
|
|
110
|
+
role=MessageRole(request.role),
|
|
111
|
+
text=request.content,
|
|
112
|
+
model=None,
|
|
113
|
+
tool_calls=None,
|
|
114
|
+
tool_call_id=None,
|
|
115
|
+
name=None,
|
|
116
|
+
)
|
|
117
|
+
agent = server._get_or_load_agent(agent_id=agent_id)
|
|
118
|
+
# add message to agent
|
|
119
|
+
agent._append_to_messages([message])
|
|
120
|
+
|
|
121
|
+
openai_message = OpenAIMessage(
|
|
122
|
+
id=str(message.id),
|
|
123
|
+
created_at=int(message.created_at.timestamp()),
|
|
124
|
+
content=[Text(text=(message.text if message.text else ""))],
|
|
125
|
+
role=message.role,
|
|
126
|
+
thread_id=str(message.agent_id),
|
|
127
|
+
assistant_id=DEFAULT_PRESET, # TODO: update this
|
|
128
|
+
# TODO(sarah) fill in?
|
|
129
|
+
run_id=None,
|
|
130
|
+
file_ids=None,
|
|
131
|
+
metadata=None,
|
|
132
|
+
# file_ids=message.file_ids,
|
|
133
|
+
# metadata=message.metadata,
|
|
134
|
+
)
|
|
135
|
+
return openai_message
|
|
136
|
+
|
|
137
|
+
|
|
138
|
+
@router.get("/{thread_id}/messages", tags=["messages"], response_model=ListMessagesResponse)
|
|
139
|
+
def list_messages(
|
|
140
|
+
thread_id: str = Path(..., description="The unique identifier of the thread."),
|
|
141
|
+
limit: int = Query(1000, description="How many messages to retrieve."),
|
|
142
|
+
order: str = Query("asc", description="Order of messages to retrieve (either 'asc' or 'desc')."),
|
|
143
|
+
after: str = Query(None, description="A cursor for use in pagination. `after` is an object ID that defines your place in the list."),
|
|
144
|
+
before: str = Query(None, description="A cursor for use in pagination. `after` is an object ID that defines your place in the list."),
|
|
145
|
+
server: SyncServer = Depends(get_letta_server),
|
|
146
|
+
):
|
|
147
|
+
actor = server.get_current_user()
|
|
148
|
+
after_uuid = after if before else None
|
|
149
|
+
before_uuid = before if before else None
|
|
150
|
+
agent_id = thread_id
|
|
151
|
+
reverse = True if (order == "desc") else False
|
|
152
|
+
json_messages = server.get_agent_recall_cursor(
|
|
153
|
+
user_id=actor.id,
|
|
154
|
+
agent_id=agent_id,
|
|
155
|
+
limit=limit,
|
|
156
|
+
after=after_uuid,
|
|
157
|
+
before=before_uuid,
|
|
158
|
+
order_by="created_at",
|
|
159
|
+
reverse=reverse,
|
|
160
|
+
return_message_object=True,
|
|
161
|
+
)
|
|
162
|
+
assert isinstance(json_messages, List)
|
|
163
|
+
assert all([isinstance(message, Message) for message in json_messages])
|
|
164
|
+
assert isinstance(json_messages[0], Message)
|
|
165
|
+
print(json_messages[0].text)
|
|
166
|
+
# convert to openai style messages
|
|
167
|
+
openai_messages = []
|
|
168
|
+
for message in json_messages:
|
|
169
|
+
assert isinstance(message, Message)
|
|
170
|
+
openai_messages.append(
|
|
171
|
+
OpenAIMessage(
|
|
172
|
+
id=str(message.id),
|
|
173
|
+
created_at=int(message.created_at.timestamp()),
|
|
174
|
+
content=[Text(text=(message.text if message.text else ""))],
|
|
175
|
+
role=str(message.role),
|
|
176
|
+
thread_id=str(message.agent_id),
|
|
177
|
+
assistant_id=DEFAULT_PRESET, # TODO: update this
|
|
178
|
+
# TODO(sarah) fill in?
|
|
179
|
+
run_id=None,
|
|
180
|
+
file_ids=None,
|
|
181
|
+
metadata=None,
|
|
182
|
+
# file_ids=message.file_ids,
|
|
183
|
+
# metadata=message.metadata,
|
|
184
|
+
)
|
|
185
|
+
)
|
|
186
|
+
print("MESSAGES", openai_messages)
|
|
187
|
+
# TODO: cast back to message objects
|
|
188
|
+
return ListMessagesResponse(messages=openai_messages)
|
|
189
|
+
|
|
190
|
+
|
|
191
|
+
@router.get("/{thread_id}/messages/{message_id}", tags=["messages"], response_model=OpenAIMessage)
|
|
192
|
+
def retrieve_message(
|
|
193
|
+
thread_id: str = Path(..., description="The unique identifier of the thread."),
|
|
194
|
+
message_id: str = Path(..., description="The unique identifier of the message."),
|
|
195
|
+
server: SyncServer = Depends(get_letta_server),
|
|
196
|
+
):
|
|
197
|
+
agent_id = thread_id
|
|
198
|
+
message = server.get_agent_message(agent_id=agent_id, message_id=message_id)
|
|
199
|
+
assert message is not None
|
|
200
|
+
return OpenAIMessage(
|
|
201
|
+
id=message_id,
|
|
202
|
+
created_at=int(message.created_at.timestamp()),
|
|
203
|
+
content=[Text(text=(message.text if message.text else ""))],
|
|
204
|
+
role=message.role,
|
|
205
|
+
thread_id=str(message.agent_id),
|
|
206
|
+
assistant_id=DEFAULT_PRESET, # TODO: update this
|
|
207
|
+
# TODO(sarah) fill in?
|
|
208
|
+
run_id=None,
|
|
209
|
+
file_ids=None,
|
|
210
|
+
metadata=None,
|
|
211
|
+
# file_ids=message.file_ids,
|
|
212
|
+
# metadata=message.metadata,
|
|
213
|
+
)
|
|
214
|
+
|
|
215
|
+
|
|
216
|
+
@router.get("/{thread_id}/messages/{message_id}/files/{file_id}", tags=["messages"], response_model=MessageFile)
|
|
217
|
+
def retrieve_message_file(
|
|
218
|
+
thread_id: str = Path(..., description="The unique identifier of the thread."),
|
|
219
|
+
message_id: str = Path(..., description="The unique identifier of the message."),
|
|
220
|
+
file_id: str = Path(..., description="The unique identifier of the file."),
|
|
221
|
+
):
|
|
222
|
+
# TODO: implement?
|
|
223
|
+
raise HTTPException(status_code=404, detail="Not yet implemented (coming soon)")
|
|
224
|
+
|
|
225
|
+
|
|
226
|
+
@router.post("/{thread_id}/messages/{message_id}", tags=["messages"], response_model=OpenAIMessage)
|
|
227
|
+
def modify_message(
|
|
228
|
+
thread_id: str = Path(..., description="The unique identifier of the thread."),
|
|
229
|
+
message_id: str = Path(..., description="The unique identifier of the message."),
|
|
230
|
+
request: ModifyMessageRequest = Body(...),
|
|
231
|
+
):
|
|
232
|
+
# TODO: add metada field to message so this can be modified
|
|
233
|
+
raise HTTPException(status_code=404, detail="Not yet implemented (coming soon)")
|
|
234
|
+
|
|
235
|
+
|
|
236
|
+
@router.post("/{thread_id}/runs", tags=["runs"], response_model=OpenAIRun)
|
|
237
|
+
def create_run(
|
|
238
|
+
thread_id: str = Path(..., description="The unique identifier of the thread."),
|
|
239
|
+
request: CreateRunRequest = Body(...),
|
|
240
|
+
server: SyncServer = Depends(get_letta_server),
|
|
241
|
+
):
|
|
242
|
+
server.get_current_user()
|
|
243
|
+
|
|
244
|
+
# TODO: add request.instructions as a message?
|
|
245
|
+
agent_id = thread_id
|
|
246
|
+
# TODO: override preset of agent with request.assistant_id
|
|
247
|
+
agent = server._get_or_load_agent(agent_id=agent_id)
|
|
248
|
+
agent.step(user_message=None) # already has messages added
|
|
249
|
+
run_id = str(uuid.uuid4())
|
|
250
|
+
create_time = int(get_utc_time().timestamp())
|
|
251
|
+
return OpenAIRun(
|
|
252
|
+
id=run_id,
|
|
253
|
+
created_at=create_time,
|
|
254
|
+
thread_id=str(agent_id),
|
|
255
|
+
assistant_id=DEFAULT_PRESET, # TODO: update this
|
|
256
|
+
status="completed", # TODO: eventaully allow offline execution
|
|
257
|
+
expires_at=create_time,
|
|
258
|
+
model=agent.agent_state.llm_config.model,
|
|
259
|
+
instructions=request.instructions,
|
|
260
|
+
)
|
|
261
|
+
|
|
262
|
+
|
|
263
|
+
@router.post("/runs", tags=["runs"], response_model=OpenAIRun)
|
|
264
|
+
def create_thread_and_run(
|
|
265
|
+
request: CreateThreadRunRequest = Body(...),
|
|
266
|
+
):
|
|
267
|
+
# TODO: add a bunch of messages and execute
|
|
268
|
+
raise HTTPException(status_code=404, detail="Not yet implemented (coming soon)")
|
|
269
|
+
|
|
270
|
+
|
|
271
|
+
@router.get("/{thread_id}/runs", tags=["runs"], response_model=List[OpenAIRun])
|
|
272
|
+
def list_runs(
|
|
273
|
+
thread_id: str = Path(..., description="The unique identifier of the thread."),
|
|
274
|
+
limit: int = Query(1000, description="How many runs to retrieve."),
|
|
275
|
+
order: str = Query("asc", description="Order of runs to retrieve (either 'asc' or 'desc')."),
|
|
276
|
+
after: str = Query(None, description="A cursor for use in pagination. `after` is an object ID that defines your place in the list."),
|
|
277
|
+
before: str = Query(None, description="A cursor for use in pagination. `after` is an object ID that defines your place in the list."),
|
|
278
|
+
):
|
|
279
|
+
# TODO: store run information in a DB so it can be returned here
|
|
280
|
+
raise HTTPException(status_code=404, detail="Not yet implemented (coming soon)")
|
|
281
|
+
|
|
282
|
+
|
|
283
|
+
@router.get("/{thread_id}/runs/{run_id}/steps", tags=["runs"], response_model=List[OpenAIRunStep])
|
|
284
|
+
def list_run_steps(
|
|
285
|
+
thread_id: str = Path(..., description="The unique identifier of the thread."),
|
|
286
|
+
run_id: str = Path(..., description="The unique identifier of the run."),
|
|
287
|
+
limit: int = Query(1000, description="How many run steps to retrieve."),
|
|
288
|
+
order: str = Query("asc", description="Order of run steps to retrieve (either 'asc' or 'desc')."),
|
|
289
|
+
after: str = Query(None, description="A cursor for use in pagination. `after` is an object ID that defines your place in the list."),
|
|
290
|
+
before: str = Query(None, description="A cursor for use in pagination. `after` is an object ID that defines your place in the list."),
|
|
291
|
+
):
|
|
292
|
+
# TODO: store run information in a DB so it can be returned here
|
|
293
|
+
raise HTTPException(status_code=404, detail="Not yet implemented (coming soon)")
|
|
294
|
+
|
|
295
|
+
|
|
296
|
+
@router.get("/{thread_id}/runs/{run_id}", tags=["runs"], response_model=OpenAIRun)
|
|
297
|
+
def retrieve_run(
|
|
298
|
+
thread_id: str = Path(..., description="The unique identifier of the thread."),
|
|
299
|
+
run_id: str = Path(..., description="The unique identifier of the run."),
|
|
300
|
+
):
|
|
301
|
+
raise HTTPException(status_code=404, detail="Not yet implemented (coming soon)")
|
|
302
|
+
|
|
303
|
+
|
|
304
|
+
@router.get("/{thread_id}/runs/{run_id}/steps/{step_id}", tags=["runs"], response_model=OpenAIRunStep)
|
|
305
|
+
def retrieve_run_step(
|
|
306
|
+
thread_id: str = Path(..., description="The unique identifier of the thread."),
|
|
307
|
+
run_id: str = Path(..., description="The unique identifier of the run."),
|
|
308
|
+
step_id: str = Path(..., description="The unique identifier of the run step."),
|
|
309
|
+
):
|
|
310
|
+
raise HTTPException(status_code=404, detail="Not yet implemented (coming soon)")
|
|
311
|
+
|
|
312
|
+
|
|
313
|
+
@router.post("/{thread_id}/runs/{run_id}", tags=["runs"], response_model=OpenAIRun)
|
|
314
|
+
def modify_run(
|
|
315
|
+
thread_id: str = Path(..., description="The unique identifier of the thread."),
|
|
316
|
+
run_id: str = Path(..., description="The unique identifier of the run."),
|
|
317
|
+
request: ModifyRunRequest = Body(...),
|
|
318
|
+
):
|
|
319
|
+
raise HTTPException(status_code=404, detail="Not yet implemented (coming soon)")
|
|
320
|
+
|
|
321
|
+
|
|
322
|
+
@router.post("/{thread_id}/runs/{run_id}/submit_tool_outputs", tags=["runs"], response_model=OpenAIRun)
|
|
323
|
+
def submit_tool_outputs_to_run(
|
|
324
|
+
thread_id: str = Path(..., description="The unique identifier of the thread."),
|
|
325
|
+
run_id: str = Path(..., description="The unique identifier of the run."),
|
|
326
|
+
request: SubmitToolOutputsToRunRequest = Body(...),
|
|
327
|
+
):
|
|
328
|
+
raise HTTPException(status_code=404, detail="Not yet implemented (coming soon)")
|
|
329
|
+
|
|
330
|
+
|
|
331
|
+
@router.post("/{thread_id}/runs/{run_id}/cancel", tags=["runs"], response_model=OpenAIRun)
|
|
332
|
+
def cancel_run(
|
|
333
|
+
thread_id: str = Path(..., description="The unique identifier of the thread."),
|
|
334
|
+
run_id: str = Path(..., description="The unique identifier of the run."),
|
|
335
|
+
):
|
|
336
|
+
raise HTTPException(status_code=404, detail="Not yet implemented (coming soon)")
|
|
File without changes
|