letta-client 0.1.16__py3-none-any.whl → 0.1.19__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- letta_client/__init__.py +34 -120
- letta_client/agents/__init__.py +18 -54
- letta_client/agents/archival_memory/client.py +25 -343
- letta_client/agents/client.py +1640 -347
- letta_client/agents/context/client.py +6 -4
- letta_client/agents/core_memory/client.py +95 -624
- letta_client/agents/memory_variables/__init__.py +2 -2
- letta_client/agents/memory_variables/client.py +15 -15
- letta_client/agents/memory_variables/types/__init__.py +2 -2
- letta_client/agents/memory_variables/types/{memory_variables_get_response.py → memory_variables_list_response.py} +1 -1
- letta_client/agents/messages/__init__.py +2 -22
- letta_client/agents/messages/client.py +32 -38
- letta_client/agents/messages/types/__init__.py +2 -21
- letta_client/agents/messages/types/letta_streaming_response.py +16 -139
- letta_client/agents/messages/types/messages_list_response.py +2 -2
- letta_client/agents/sources/client.py +266 -5
- letta_client/agents/tools/client.py +25 -27
- letta_client/agents/types/__init__.py +15 -25
- letta_client/agents/types/agents_search_request_search_item.py +10 -78
- letta_client/agents/types/{agents_search_request_search_item_order_by.py → agents_search_request_search_item_direction.py} +7 -6
- letta_client/agents/types/agents_search_request_search_item_direction_direction.py +5 -0
- letta_client/agents/types/agents_search_request_search_item_direction_value.py +5 -0
- letta_client/agents/types/{agents_search_request_search_item_name.py → agents_search_request_search_item_one.py} +5 -4
- letta_client/agents/types/agents_search_request_search_item_one_operator.py +5 -0
- letta_client/agents/types/{agents_search_request_search_item_tags.py → agents_search_request_search_item_two.py} +2 -1
- letta_client/agents/types/{agents_search_request_search_item_version.py → agents_search_request_search_item_zero.py} +3 -2
- letta_client/blocks/client.py +12 -260
- letta_client/client.py +3 -3
- letta_client/core/client_wrapper.py +1 -1
- letta_client/jobs/client.py +4 -4
- letta_client/providers/client.py +74 -74
- letta_client/runs/client.py +14 -12
- letta_client/sources/client.py +12 -288
- letta_client/tools/client.py +63 -189
- letta_client/types/__init__.py +21 -103
- letta_client/types/agent_state.py +3 -7
- letta_client/types/{assistant_message_output.py → assistant_message.py} +3 -2
- letta_client/types/block.py +2 -6
- letta_client/types/block_update.py +1 -5
- letta_client/types/{archival_memory_summary.py → chat_completion_message_tool_call.py} +7 -7
- letta_client/types/context_window_overview.py +4 -6
- letta_client/types/create_block.py +1 -5
- letta_client/types/embedding_config_embedding_endpoint_type.py +1 -0
- letta_client/types/{function_call_output.py → function.py} +1 -1
- letta_client/types/{function_schema.py → function_definition.py} +2 -1
- letta_client/types/{create_assistant_file_request.py → function_tool.py} +6 -7
- letta_client/types/job.py +1 -5
- letta_client/types/letta_message_union.py +9 -121
- letta_client/types/letta_usage_statistics.py +1 -0
- letta_client/types/llm_config_model_endpoint_type.py +1 -0
- letta_client/types/{letta_schemas_message_message.py → message.py} +9 -6
- letta_client/types/passage.py +1 -5
- letta_client/types/reasoning_message.py +2 -1
- letta_client/types/run.py +1 -5
- letta_client/types/source.py +2 -6
- letta_client/types/{system_message_output.py → system_message.py} +3 -2
- letta_client/types/{letta_schemas_tool_tool.py → tool.py} +1 -1
- letta_client/types/{letta_schemas_letta_message_tool_call.py → tool_call.py} +1 -1
- letta_client/types/tool_call_message.py +2 -1
- letta_client/types/tool_call_message_tool_call.py +2 -2
- letta_client/types/tool_return_message.py +2 -1
- letta_client/types/tool_type.py +2 -1
- letta_client/types/{user_message_output.py → user_message.py} +3 -2
- {letta_client-0.1.16.dist-info → letta_client-0.1.19.dist-info}/METADATA +2 -2
- {letta_client-0.1.16.dist-info → letta_client-0.1.19.dist-info}/RECORD +66 -101
- letta_client/agents/recall_memory/__init__.py +0 -2
- letta_client/agents/recall_memory/client.py +0 -147
- letta_client/agents/types/agents_search_request_search_item_name_operator.py +0 -5
- letta_client/agents/types/agents_search_request_search_item_order_by_direction.py +0 -5
- letta_client/agents/types/agents_search_request_search_item_order_by_value.py +0 -5
- letta_client/types/assistant_file.py +0 -33
- letta_client/types/assistant_message_input.py +0 -23
- letta_client/types/chat_completion_request.py +0 -49
- letta_client/types/chat_completion_request_function_call.py +0 -6
- letta_client/types/chat_completion_request_messages_item.py +0 -11
- letta_client/types/chat_completion_request_stop.py +0 -5
- letta_client/types/chat_completion_request_tool_choice.py +0 -8
- letta_client/types/chat_completion_response.py +0 -32
- letta_client/types/choice.py +0 -25
- letta_client/types/create_assistant_request.py +0 -57
- letta_client/types/delete_assistant_file_response.py +0 -28
- letta_client/types/delete_assistant_response.py +0 -28
- letta_client/types/function_call_input.py +0 -19
- letta_client/types/letta_schemas_openai_chat_completion_request_tool.py +0 -21
- letta_client/types/letta_schemas_openai_chat_completion_request_tool_call.py +0 -24
- letta_client/types/letta_schemas_openai_chat_completion_request_tool_call_function.py +0 -20
- letta_client/types/letta_schemas_openai_chat_completion_response_message.py +0 -24
- letta_client/types/letta_schemas_openai_chat_completion_response_tool_call.py +0 -22
- letta_client/types/letta_schemas_openai_chat_completions_tool_call_function.py +0 -27
- letta_client/types/letta_schemas_openai_chat_completions_tool_call_input.py +0 -29
- letta_client/types/letta_schemas_openai_chat_completions_tool_call_output.py +0 -29
- letta_client/types/log_prob_token.py +0 -21
- letta_client/types/message_content_log_prob.py +0 -23
- letta_client/types/open_ai_assistant.py +0 -67
- letta_client/types/recall_memory_summary.py +0 -22
- letta_client/types/response_format.py +0 -19
- letta_client/types/system_message_input.py +0 -21
- letta_client/types/tool_call_function_output.py +0 -27
- letta_client/types/tool_function_choice.py +0 -21
- letta_client/types/tool_input.py +0 -21
- letta_client/types/tool_message.py +0 -21
- letta_client/types/user_message_input.py +0 -22
- letta_client/types/user_message_input_content.py +0 -5
- {letta_client-0.1.16.dist-info → letta_client-0.1.19.dist-info}/WHEEL +0 -0
|
@@ -1,147 +0,0 @@
|
|
|
1
|
-
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
-
|
|
3
|
-
from ...core.client_wrapper import SyncClientWrapper
|
|
4
|
-
import typing
|
|
5
|
-
from ...core.request_options import RequestOptions
|
|
6
|
-
from ...types.recall_memory_summary import RecallMemorySummary
|
|
7
|
-
from ...core.jsonable_encoder import jsonable_encoder
|
|
8
|
-
from ...core.unchecked_base_model import construct_type
|
|
9
|
-
from ...errors.unprocessable_entity_error import UnprocessableEntityError
|
|
10
|
-
from ...types.http_validation_error import HttpValidationError
|
|
11
|
-
from json.decoder import JSONDecodeError
|
|
12
|
-
from ...core.api_error import ApiError
|
|
13
|
-
from ...core.client_wrapper import AsyncClientWrapper
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
class RecallMemoryClient:
|
|
17
|
-
def __init__(self, *, client_wrapper: SyncClientWrapper):
|
|
18
|
-
self._client_wrapper = client_wrapper
|
|
19
|
-
|
|
20
|
-
def get_summary(
|
|
21
|
-
self, agent_id: str, *, request_options: typing.Optional[RequestOptions] = None
|
|
22
|
-
) -> RecallMemorySummary:
|
|
23
|
-
"""
|
|
24
|
-
Retrieve the summary of the recall memory of a specific agent.
|
|
25
|
-
|
|
26
|
-
Parameters
|
|
27
|
-
----------
|
|
28
|
-
agent_id : str
|
|
29
|
-
|
|
30
|
-
request_options : typing.Optional[RequestOptions]
|
|
31
|
-
Request-specific configuration.
|
|
32
|
-
|
|
33
|
-
Returns
|
|
34
|
-
-------
|
|
35
|
-
RecallMemorySummary
|
|
36
|
-
Successful Response
|
|
37
|
-
|
|
38
|
-
Examples
|
|
39
|
-
--------
|
|
40
|
-
from letta_client import Letta
|
|
41
|
-
|
|
42
|
-
client = Letta(
|
|
43
|
-
token="YOUR_TOKEN",
|
|
44
|
-
)
|
|
45
|
-
client.agents.recall_memory.get_summary(
|
|
46
|
-
agent_id="agent_id",
|
|
47
|
-
)
|
|
48
|
-
"""
|
|
49
|
-
_response = self._client_wrapper.httpx_client.request(
|
|
50
|
-
f"v1/agents/{jsonable_encoder(agent_id)}/memory/recall",
|
|
51
|
-
method="GET",
|
|
52
|
-
request_options=request_options,
|
|
53
|
-
)
|
|
54
|
-
try:
|
|
55
|
-
if 200 <= _response.status_code < 300:
|
|
56
|
-
return typing.cast(
|
|
57
|
-
RecallMemorySummary,
|
|
58
|
-
construct_type(
|
|
59
|
-
type_=RecallMemorySummary, # type: ignore
|
|
60
|
-
object_=_response.json(),
|
|
61
|
-
),
|
|
62
|
-
)
|
|
63
|
-
if _response.status_code == 422:
|
|
64
|
-
raise UnprocessableEntityError(
|
|
65
|
-
typing.cast(
|
|
66
|
-
HttpValidationError,
|
|
67
|
-
construct_type(
|
|
68
|
-
type_=HttpValidationError, # type: ignore
|
|
69
|
-
object_=_response.json(),
|
|
70
|
-
),
|
|
71
|
-
)
|
|
72
|
-
)
|
|
73
|
-
_response_json = _response.json()
|
|
74
|
-
except JSONDecodeError:
|
|
75
|
-
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
76
|
-
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
class AsyncRecallMemoryClient:
|
|
80
|
-
def __init__(self, *, client_wrapper: AsyncClientWrapper):
|
|
81
|
-
self._client_wrapper = client_wrapper
|
|
82
|
-
|
|
83
|
-
async def get_summary(
|
|
84
|
-
self, agent_id: str, *, request_options: typing.Optional[RequestOptions] = None
|
|
85
|
-
) -> RecallMemorySummary:
|
|
86
|
-
"""
|
|
87
|
-
Retrieve the summary of the recall memory of a specific agent.
|
|
88
|
-
|
|
89
|
-
Parameters
|
|
90
|
-
----------
|
|
91
|
-
agent_id : str
|
|
92
|
-
|
|
93
|
-
request_options : typing.Optional[RequestOptions]
|
|
94
|
-
Request-specific configuration.
|
|
95
|
-
|
|
96
|
-
Returns
|
|
97
|
-
-------
|
|
98
|
-
RecallMemorySummary
|
|
99
|
-
Successful Response
|
|
100
|
-
|
|
101
|
-
Examples
|
|
102
|
-
--------
|
|
103
|
-
import asyncio
|
|
104
|
-
|
|
105
|
-
from letta_client import AsyncLetta
|
|
106
|
-
|
|
107
|
-
client = AsyncLetta(
|
|
108
|
-
token="YOUR_TOKEN",
|
|
109
|
-
)
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
async def main() -> None:
|
|
113
|
-
await client.agents.recall_memory.get_summary(
|
|
114
|
-
agent_id="agent_id",
|
|
115
|
-
)
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
asyncio.run(main())
|
|
119
|
-
"""
|
|
120
|
-
_response = await self._client_wrapper.httpx_client.request(
|
|
121
|
-
f"v1/agents/{jsonable_encoder(agent_id)}/memory/recall",
|
|
122
|
-
method="GET",
|
|
123
|
-
request_options=request_options,
|
|
124
|
-
)
|
|
125
|
-
try:
|
|
126
|
-
if 200 <= _response.status_code < 300:
|
|
127
|
-
return typing.cast(
|
|
128
|
-
RecallMemorySummary,
|
|
129
|
-
construct_type(
|
|
130
|
-
type_=RecallMemorySummary, # type: ignore
|
|
131
|
-
object_=_response.json(),
|
|
132
|
-
),
|
|
133
|
-
)
|
|
134
|
-
if _response.status_code == 422:
|
|
135
|
-
raise UnprocessableEntityError(
|
|
136
|
-
typing.cast(
|
|
137
|
-
HttpValidationError,
|
|
138
|
-
construct_type(
|
|
139
|
-
type_=HttpValidationError, # type: ignore
|
|
140
|
-
object_=_response.json(),
|
|
141
|
-
),
|
|
142
|
-
)
|
|
143
|
-
)
|
|
144
|
-
_response_json = _response.json()
|
|
145
|
-
except JSONDecodeError:
|
|
146
|
-
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
147
|
-
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
@@ -1,33 +0,0 @@
|
|
|
1
|
-
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
-
|
|
3
|
-
from ..core.unchecked_base_model import UncheckedBaseModel
|
|
4
|
-
import pydantic
|
|
5
|
-
import typing
|
|
6
|
-
from ..core.pydantic_utilities import IS_PYDANTIC_V2
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
class AssistantFile(UncheckedBaseModel):
|
|
10
|
-
id: str = pydantic.Field()
|
|
11
|
-
"""
|
|
12
|
-
The unique identifier of the file.
|
|
13
|
-
"""
|
|
14
|
-
|
|
15
|
-
object: typing.Optional[str] = None
|
|
16
|
-
created_at: int = pydantic.Field()
|
|
17
|
-
"""
|
|
18
|
-
The unix timestamp of when the file was created.
|
|
19
|
-
"""
|
|
20
|
-
|
|
21
|
-
assistant_id: str = pydantic.Field()
|
|
22
|
-
"""
|
|
23
|
-
The unique identifier of the assistant.
|
|
24
|
-
"""
|
|
25
|
-
|
|
26
|
-
if IS_PYDANTIC_V2:
|
|
27
|
-
model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
|
|
28
|
-
else:
|
|
29
|
-
|
|
30
|
-
class Config:
|
|
31
|
-
frozen = True
|
|
32
|
-
smart_union = True
|
|
33
|
-
extra = pydantic.Extra.allow
|
|
@@ -1,23 +0,0 @@
|
|
|
1
|
-
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
-
|
|
3
|
-
from ..core.unchecked_base_model import UncheckedBaseModel
|
|
4
|
-
import typing
|
|
5
|
-
from .letta_schemas_openai_chat_completion_request_tool_call import LettaSchemasOpenaiChatCompletionRequestToolCall
|
|
6
|
-
from ..core.pydantic_utilities import IS_PYDANTIC_V2
|
|
7
|
-
import pydantic
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
class AssistantMessageInput(UncheckedBaseModel):
|
|
11
|
-
content: typing.Optional[str] = None
|
|
12
|
-
role: typing.Optional[str] = None
|
|
13
|
-
name: typing.Optional[str] = None
|
|
14
|
-
tool_calls: typing.Optional[typing.List[LettaSchemasOpenaiChatCompletionRequestToolCall]] = None
|
|
15
|
-
|
|
16
|
-
if IS_PYDANTIC_V2:
|
|
17
|
-
model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
|
|
18
|
-
else:
|
|
19
|
-
|
|
20
|
-
class Config:
|
|
21
|
-
frozen = True
|
|
22
|
-
smart_union = True
|
|
23
|
-
extra = pydantic.Extra.allow
|
|
@@ -1,49 +0,0 @@
|
|
|
1
|
-
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
-
|
|
3
|
-
from ..core.unchecked_base_model import UncheckedBaseModel
|
|
4
|
-
import typing
|
|
5
|
-
from .chat_completion_request_messages_item import ChatCompletionRequestMessagesItem
|
|
6
|
-
from .response_format import ResponseFormat
|
|
7
|
-
from .chat_completion_request_stop import ChatCompletionRequestStop
|
|
8
|
-
from .tool_input import ToolInput
|
|
9
|
-
from .chat_completion_request_tool_choice import ChatCompletionRequestToolChoice
|
|
10
|
-
from .function_schema import FunctionSchema
|
|
11
|
-
from .chat_completion_request_function_call import ChatCompletionRequestFunctionCall
|
|
12
|
-
from ..core.pydantic_utilities import IS_PYDANTIC_V2
|
|
13
|
-
import pydantic
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
class ChatCompletionRequest(UncheckedBaseModel):
|
|
17
|
-
"""
|
|
18
|
-
https://platform.openai.com/docs/api-reference/chat/create
|
|
19
|
-
"""
|
|
20
|
-
|
|
21
|
-
model: str
|
|
22
|
-
messages: typing.List[ChatCompletionRequestMessagesItem]
|
|
23
|
-
frequency_penalty: typing.Optional[float] = None
|
|
24
|
-
logit_bias: typing.Optional[typing.Dict[str, typing.Optional[int]]] = None
|
|
25
|
-
logprobs: typing.Optional[bool] = None
|
|
26
|
-
top_logprobs: typing.Optional[int] = None
|
|
27
|
-
max_tokens: typing.Optional[int] = None
|
|
28
|
-
n: typing.Optional[int] = None
|
|
29
|
-
presence_penalty: typing.Optional[float] = None
|
|
30
|
-
response_format: typing.Optional[ResponseFormat] = None
|
|
31
|
-
seed: typing.Optional[int] = None
|
|
32
|
-
stop: typing.Optional[ChatCompletionRequestStop] = None
|
|
33
|
-
stream: typing.Optional[bool] = None
|
|
34
|
-
temperature: typing.Optional[float] = None
|
|
35
|
-
top_p: typing.Optional[float] = None
|
|
36
|
-
user: typing.Optional[str] = None
|
|
37
|
-
tools: typing.Optional[typing.List[ToolInput]] = None
|
|
38
|
-
tool_choice: typing.Optional[ChatCompletionRequestToolChoice] = None
|
|
39
|
-
functions: typing.Optional[typing.List[FunctionSchema]] = None
|
|
40
|
-
function_call: typing.Optional[ChatCompletionRequestFunctionCall] = None
|
|
41
|
-
|
|
42
|
-
if IS_PYDANTIC_V2:
|
|
43
|
-
model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
|
|
44
|
-
else:
|
|
45
|
-
|
|
46
|
-
class Config:
|
|
47
|
-
frozen = True
|
|
48
|
-
smart_union = True
|
|
49
|
-
extra = pydantic.Extra.allow
|
|
@@ -1,11 +0,0 @@
|
|
|
1
|
-
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
-
|
|
3
|
-
import typing
|
|
4
|
-
from .system_message_input import SystemMessageInput
|
|
5
|
-
from .user_message_input import UserMessageInput
|
|
6
|
-
from .assistant_message_input import AssistantMessageInput
|
|
7
|
-
from .tool_message import ToolMessage
|
|
8
|
-
|
|
9
|
-
ChatCompletionRequestMessagesItem = typing.Union[
|
|
10
|
-
SystemMessageInput, UserMessageInput, AssistantMessageInput, ToolMessage
|
|
11
|
-
]
|
|
@@ -1,8 +0,0 @@
|
|
|
1
|
-
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
-
|
|
3
|
-
import typing
|
|
4
|
-
from .tool_function_choice import ToolFunctionChoice
|
|
5
|
-
|
|
6
|
-
ChatCompletionRequestToolChoice = typing.Union[
|
|
7
|
-
typing.Literal["none"], typing.Literal["auto"], typing.Literal["required"], ToolFunctionChoice
|
|
8
|
-
]
|
|
@@ -1,32 +0,0 @@
|
|
|
1
|
-
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
-
|
|
3
|
-
from ..core.unchecked_base_model import UncheckedBaseModel
|
|
4
|
-
import typing
|
|
5
|
-
from .choice import Choice
|
|
6
|
-
import datetime as dt
|
|
7
|
-
from .usage_statistics import UsageStatistics
|
|
8
|
-
from ..core.pydantic_utilities import IS_PYDANTIC_V2
|
|
9
|
-
import pydantic
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
class ChatCompletionResponse(UncheckedBaseModel):
|
|
13
|
-
"""
|
|
14
|
-
https://platform.openai.com/docs/api-reference/chat/object
|
|
15
|
-
"""
|
|
16
|
-
|
|
17
|
-
id: str
|
|
18
|
-
choices: typing.List[Choice]
|
|
19
|
-
created: dt.datetime
|
|
20
|
-
model: typing.Optional[str] = None
|
|
21
|
-
system_fingerprint: typing.Optional[str] = None
|
|
22
|
-
object: typing.Optional[typing.Literal["chat.completion"]] = None
|
|
23
|
-
usage: UsageStatistics
|
|
24
|
-
|
|
25
|
-
if IS_PYDANTIC_V2:
|
|
26
|
-
model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
|
|
27
|
-
else:
|
|
28
|
-
|
|
29
|
-
class Config:
|
|
30
|
-
frozen = True
|
|
31
|
-
smart_union = True
|
|
32
|
-
extra = pydantic.Extra.allow
|
letta_client/types/choice.py
DELETED
|
@@ -1,25 +0,0 @@
|
|
|
1
|
-
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
-
|
|
3
|
-
from ..core.unchecked_base_model import UncheckedBaseModel
|
|
4
|
-
from .letta_schemas_openai_chat_completion_response_message import LettaSchemasOpenaiChatCompletionResponseMessage
|
|
5
|
-
import typing
|
|
6
|
-
from .message_content_log_prob import MessageContentLogProb
|
|
7
|
-
from ..core.pydantic_utilities import IS_PYDANTIC_V2
|
|
8
|
-
import pydantic
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
class Choice(UncheckedBaseModel):
|
|
12
|
-
finish_reason: str
|
|
13
|
-
index: int
|
|
14
|
-
message: LettaSchemasOpenaiChatCompletionResponseMessage
|
|
15
|
-
logprobs: typing.Optional[typing.Dict[str, typing.Optional[typing.List[MessageContentLogProb]]]] = None
|
|
16
|
-
seed: typing.Optional[int] = None
|
|
17
|
-
|
|
18
|
-
if IS_PYDANTIC_V2:
|
|
19
|
-
model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
|
|
20
|
-
else:
|
|
21
|
-
|
|
22
|
-
class Config:
|
|
23
|
-
frozen = True
|
|
24
|
-
smart_union = True
|
|
25
|
-
extra = pydantic.Extra.allow
|
|
@@ -1,57 +0,0 @@
|
|
|
1
|
-
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
-
|
|
3
|
-
from ..core.unchecked_base_model import UncheckedBaseModel
|
|
4
|
-
import pydantic
|
|
5
|
-
import typing
|
|
6
|
-
from ..core.pydantic_utilities import IS_PYDANTIC_V2
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
class CreateAssistantRequest(UncheckedBaseModel):
|
|
10
|
-
model: str = pydantic.Field()
|
|
11
|
-
"""
|
|
12
|
-
The model to use for the assistant.
|
|
13
|
-
"""
|
|
14
|
-
|
|
15
|
-
name: str = pydantic.Field()
|
|
16
|
-
"""
|
|
17
|
-
The name of the assistant.
|
|
18
|
-
"""
|
|
19
|
-
|
|
20
|
-
description: typing.Optional[str] = pydantic.Field(default=None)
|
|
21
|
-
"""
|
|
22
|
-
The description of the assistant.
|
|
23
|
-
"""
|
|
24
|
-
|
|
25
|
-
instructions: str = pydantic.Field()
|
|
26
|
-
"""
|
|
27
|
-
The instructions for the assistant.
|
|
28
|
-
"""
|
|
29
|
-
|
|
30
|
-
tools: typing.Optional[typing.List[str]] = pydantic.Field(default=None)
|
|
31
|
-
"""
|
|
32
|
-
The tools used by the assistant.
|
|
33
|
-
"""
|
|
34
|
-
|
|
35
|
-
file_ids: typing.Optional[typing.List[str]] = pydantic.Field(default=None)
|
|
36
|
-
"""
|
|
37
|
-
List of file IDs associated with the assistant.
|
|
38
|
-
"""
|
|
39
|
-
|
|
40
|
-
metadata: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = pydantic.Field(default=None)
|
|
41
|
-
"""
|
|
42
|
-
Metadata associated with the assistant.
|
|
43
|
-
"""
|
|
44
|
-
|
|
45
|
-
embedding_model: typing.Optional[str] = pydantic.Field(default=None)
|
|
46
|
-
"""
|
|
47
|
-
The model to use for the assistant.
|
|
48
|
-
"""
|
|
49
|
-
|
|
50
|
-
if IS_PYDANTIC_V2:
|
|
51
|
-
model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
|
|
52
|
-
else:
|
|
53
|
-
|
|
54
|
-
class Config:
|
|
55
|
-
frozen = True
|
|
56
|
-
smart_union = True
|
|
57
|
-
extra = pydantic.Extra.allow
|
|
@@ -1,28 +0,0 @@
|
|
|
1
|
-
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
-
|
|
3
|
-
from ..core.unchecked_base_model import UncheckedBaseModel
|
|
4
|
-
import pydantic
|
|
5
|
-
import typing
|
|
6
|
-
from ..core.pydantic_utilities import IS_PYDANTIC_V2
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
class DeleteAssistantFileResponse(UncheckedBaseModel):
|
|
10
|
-
id: str = pydantic.Field()
|
|
11
|
-
"""
|
|
12
|
-
The unique identifier of the file.
|
|
13
|
-
"""
|
|
14
|
-
|
|
15
|
-
object: typing.Optional[str] = None
|
|
16
|
-
deleted: bool = pydantic.Field()
|
|
17
|
-
"""
|
|
18
|
-
Whether the file was deleted.
|
|
19
|
-
"""
|
|
20
|
-
|
|
21
|
-
if IS_PYDANTIC_V2:
|
|
22
|
-
model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
|
|
23
|
-
else:
|
|
24
|
-
|
|
25
|
-
class Config:
|
|
26
|
-
frozen = True
|
|
27
|
-
smart_union = True
|
|
28
|
-
extra = pydantic.Extra.allow
|
|
@@ -1,28 +0,0 @@
|
|
|
1
|
-
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
-
|
|
3
|
-
from ..core.unchecked_base_model import UncheckedBaseModel
|
|
4
|
-
import pydantic
|
|
5
|
-
import typing
|
|
6
|
-
from ..core.pydantic_utilities import IS_PYDANTIC_V2
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
class DeleteAssistantResponse(UncheckedBaseModel):
|
|
10
|
-
id: str = pydantic.Field()
|
|
11
|
-
"""
|
|
12
|
-
The unique identifier of the agent.
|
|
13
|
-
"""
|
|
14
|
-
|
|
15
|
-
object: typing.Optional[str] = None
|
|
16
|
-
deleted: bool = pydantic.Field()
|
|
17
|
-
"""
|
|
18
|
-
Whether the agent was deleted.
|
|
19
|
-
"""
|
|
20
|
-
|
|
21
|
-
if IS_PYDANTIC_V2:
|
|
22
|
-
model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
|
|
23
|
-
else:
|
|
24
|
-
|
|
25
|
-
class Config:
|
|
26
|
-
frozen = True
|
|
27
|
-
smart_union = True
|
|
28
|
-
extra = pydantic.Extra.allow
|
|
@@ -1,19 +0,0 @@
|
|
|
1
|
-
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
-
|
|
3
|
-
from ..core.unchecked_base_model import UncheckedBaseModel
|
|
4
|
-
from ..core.pydantic_utilities import IS_PYDANTIC_V2
|
|
5
|
-
import typing
|
|
6
|
-
import pydantic
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
class FunctionCallInput(UncheckedBaseModel):
|
|
10
|
-
name: str
|
|
11
|
-
|
|
12
|
-
if IS_PYDANTIC_V2:
|
|
13
|
-
model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
|
|
14
|
-
else:
|
|
15
|
-
|
|
16
|
-
class Config:
|
|
17
|
-
frozen = True
|
|
18
|
-
smart_union = True
|
|
19
|
-
extra = pydantic.Extra.allow
|
|
@@ -1,21 +0,0 @@
|
|
|
1
|
-
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
-
|
|
3
|
-
from ..core.unchecked_base_model import UncheckedBaseModel
|
|
4
|
-
import typing
|
|
5
|
-
from .function_schema import FunctionSchema
|
|
6
|
-
from ..core.pydantic_utilities import IS_PYDANTIC_V2
|
|
7
|
-
import pydantic
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
class LettaSchemasOpenaiChatCompletionRequestTool(UncheckedBaseModel):
|
|
11
|
-
type: typing.Optional[typing.Literal["function"]] = None
|
|
12
|
-
function: FunctionSchema
|
|
13
|
-
|
|
14
|
-
if IS_PYDANTIC_V2:
|
|
15
|
-
model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
|
|
16
|
-
else:
|
|
17
|
-
|
|
18
|
-
class Config:
|
|
19
|
-
frozen = True
|
|
20
|
-
smart_union = True
|
|
21
|
-
extra = pydantic.Extra.allow
|
|
@@ -1,24 +0,0 @@
|
|
|
1
|
-
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
-
|
|
3
|
-
from ..core.unchecked_base_model import UncheckedBaseModel
|
|
4
|
-
import typing
|
|
5
|
-
from .letta_schemas_openai_chat_completion_request_tool_call_function import (
|
|
6
|
-
LettaSchemasOpenaiChatCompletionRequestToolCallFunction,
|
|
7
|
-
)
|
|
8
|
-
from ..core.pydantic_utilities import IS_PYDANTIC_V2
|
|
9
|
-
import pydantic
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
class LettaSchemasOpenaiChatCompletionRequestToolCall(UncheckedBaseModel):
|
|
13
|
-
id: str
|
|
14
|
-
type: typing.Optional[typing.Literal["function"]] = None
|
|
15
|
-
function: LettaSchemasOpenaiChatCompletionRequestToolCallFunction
|
|
16
|
-
|
|
17
|
-
if IS_PYDANTIC_V2:
|
|
18
|
-
model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
|
|
19
|
-
else:
|
|
20
|
-
|
|
21
|
-
class Config:
|
|
22
|
-
frozen = True
|
|
23
|
-
smart_union = True
|
|
24
|
-
extra = pydantic.Extra.allow
|
|
@@ -1,20 +0,0 @@
|
|
|
1
|
-
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
-
|
|
3
|
-
from ..core.unchecked_base_model import UncheckedBaseModel
|
|
4
|
-
from ..core.pydantic_utilities import IS_PYDANTIC_V2
|
|
5
|
-
import typing
|
|
6
|
-
import pydantic
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
class LettaSchemasOpenaiChatCompletionRequestToolCallFunction(UncheckedBaseModel):
|
|
10
|
-
name: str
|
|
11
|
-
arguments: str
|
|
12
|
-
|
|
13
|
-
if IS_PYDANTIC_V2:
|
|
14
|
-
model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
|
|
15
|
-
else:
|
|
16
|
-
|
|
17
|
-
class Config:
|
|
18
|
-
frozen = True
|
|
19
|
-
smart_union = True
|
|
20
|
-
extra = pydantic.Extra.allow
|
|
@@ -1,24 +0,0 @@
|
|
|
1
|
-
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
-
|
|
3
|
-
from ..core.unchecked_base_model import UncheckedBaseModel
|
|
4
|
-
import typing
|
|
5
|
-
from .letta_schemas_openai_chat_completion_response_tool_call import LettaSchemasOpenaiChatCompletionResponseToolCall
|
|
6
|
-
from .function_call_output import FunctionCallOutput
|
|
7
|
-
from ..core.pydantic_utilities import IS_PYDANTIC_V2
|
|
8
|
-
import pydantic
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
class LettaSchemasOpenaiChatCompletionResponseMessage(UncheckedBaseModel):
|
|
12
|
-
content: typing.Optional[str] = None
|
|
13
|
-
tool_calls: typing.Optional[typing.List[LettaSchemasOpenaiChatCompletionResponseToolCall]] = None
|
|
14
|
-
role: str
|
|
15
|
-
function_call: typing.Optional[FunctionCallOutput] = None
|
|
16
|
-
|
|
17
|
-
if IS_PYDANTIC_V2:
|
|
18
|
-
model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
|
|
19
|
-
else:
|
|
20
|
-
|
|
21
|
-
class Config:
|
|
22
|
-
frozen = True
|
|
23
|
-
smart_union = True
|
|
24
|
-
extra = pydantic.Extra.allow
|
|
@@ -1,22 +0,0 @@
|
|
|
1
|
-
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
-
|
|
3
|
-
from ..core.unchecked_base_model import UncheckedBaseModel
|
|
4
|
-
import typing
|
|
5
|
-
from .function_call_output import FunctionCallOutput
|
|
6
|
-
from ..core.pydantic_utilities import IS_PYDANTIC_V2
|
|
7
|
-
import pydantic
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
class LettaSchemasOpenaiChatCompletionResponseToolCall(UncheckedBaseModel):
|
|
11
|
-
id: str
|
|
12
|
-
type: typing.Optional[typing.Literal["function"]] = None
|
|
13
|
-
function: FunctionCallOutput
|
|
14
|
-
|
|
15
|
-
if IS_PYDANTIC_V2:
|
|
16
|
-
model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
|
|
17
|
-
else:
|
|
18
|
-
|
|
19
|
-
class Config:
|
|
20
|
-
frozen = True
|
|
21
|
-
smart_union = True
|
|
22
|
-
extra = pydantic.Extra.allow
|
|
@@ -1,27 +0,0 @@
|
|
|
1
|
-
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
-
|
|
3
|
-
from ..core.unchecked_base_model import UncheckedBaseModel
|
|
4
|
-
import pydantic
|
|
5
|
-
from ..core.pydantic_utilities import IS_PYDANTIC_V2
|
|
6
|
-
import typing
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
class LettaSchemasOpenaiChatCompletionsToolCallFunction(UncheckedBaseModel):
|
|
10
|
-
name: str = pydantic.Field()
|
|
11
|
-
"""
|
|
12
|
-
The name of the function to call
|
|
13
|
-
"""
|
|
14
|
-
|
|
15
|
-
arguments: str = pydantic.Field()
|
|
16
|
-
"""
|
|
17
|
-
The arguments to pass to the function (JSON dump)
|
|
18
|
-
"""
|
|
19
|
-
|
|
20
|
-
if IS_PYDANTIC_V2:
|
|
21
|
-
model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
|
|
22
|
-
else:
|
|
23
|
-
|
|
24
|
-
class Config:
|
|
25
|
-
frozen = True
|
|
26
|
-
smart_union = True
|
|
27
|
-
extra = pydantic.Extra.allow
|