letta-client 0.1.271__py3-none-any.whl → 0.1.272__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of letta-client might be problematic. Click here for more details.
- letta_client/__init__.py +10 -2
- letta_client/agents/client.py +109 -0
- letta_client/agents/raw_client.py +139 -0
- letta_client/core/client_wrapper.py +2 -2
- letta_client/templates/types/templates_get_template_snapshot_response_agents_item_properties.py +2 -0
- letta_client/types/__init__.py +12 -2
- letta_client/types/agent_file_attachment.py +73 -0
- letta_client/types/chat_completion_message_function_tool_call_input.py +25 -0
- letta_client/types/{chat_completion_message_function_tool_call.py → chat_completion_message_function_tool_call_output.py} +1 -1
- letta_client/types/letta_schemas_agent_file_message_schema.py +23 -0
- letta_client/types/message.py +2 -2
- letta_client/types/openai_types_chat_chat_completion_message_function_tool_call_function.py +21 -0
- letta_client/types/paginated_agent_files.py +38 -0
- {letta_client-0.1.271.dist-info → letta_client-0.1.272.dist-info}/METADATA +1 -1
- {letta_client-0.1.271.dist-info → letta_client-0.1.272.dist-info}/RECORD +16 -12
- {letta_client-0.1.271.dist-info → letta_client-0.1.272.dist-info}/WHEEL +0 -0
letta_client/__init__.py
CHANGED
|
@@ -7,6 +7,7 @@ from .types import (
|
|
|
7
7
|
ActionParametersModel,
|
|
8
8
|
ActionResponseModel,
|
|
9
9
|
AgentEnvironmentVariable,
|
|
10
|
+
AgentFileAttachment,
|
|
10
11
|
AgentFileSchema,
|
|
11
12
|
AgentState,
|
|
12
13
|
AgentStateResponseFormat,
|
|
@@ -50,7 +51,8 @@ from .types import (
|
|
|
50
51
|
ChatCompletionFunctionMessageParam,
|
|
51
52
|
ChatCompletionFunctionToolParam,
|
|
52
53
|
ChatCompletionMessageCustomToolCallParam,
|
|
53
|
-
|
|
54
|
+
ChatCompletionMessageFunctionToolCallInput,
|
|
55
|
+
ChatCompletionMessageFunctionToolCallOutput,
|
|
54
56
|
ChatCompletionMessageFunctionToolCallParam,
|
|
55
57
|
ChatCompletionNamedToolChoiceCustomParam,
|
|
56
58
|
ChatCompletionNamedToolChoiceParam,
|
|
@@ -205,6 +207,7 @@ from .types import (
|
|
|
205
207
|
OpenaiTypesChatChatCompletionCustomToolParamCustom,
|
|
206
208
|
OpenaiTypesChatChatCompletionCustomToolParamCustomFormat,
|
|
207
209
|
OpenaiTypesChatChatCompletionMessageCustomToolCallParamCustom,
|
|
210
|
+
OpenaiTypesChatChatCompletionMessageFunctionToolCallFunction,
|
|
208
211
|
OpenaiTypesChatChatCompletionMessageFunctionToolCallParamFunction,
|
|
209
212
|
OpenaiTypesChatChatCompletionNamedToolChoiceCustomParamCustom,
|
|
210
213
|
OpenaiTypesChatChatCompletionNamedToolChoiceParamFunction,
|
|
@@ -213,6 +216,7 @@ from .types import (
|
|
|
213
216
|
OrganizationCreate,
|
|
214
217
|
OrganizationSourcesStats,
|
|
215
218
|
OrganizationUpdate,
|
|
219
|
+
PaginatedAgentFiles,
|
|
216
220
|
ParameterProperties,
|
|
217
221
|
ParametersSchema,
|
|
218
222
|
ParentToolRule,
|
|
@@ -418,6 +422,7 @@ __all__ = [
|
|
|
418
422
|
"AddMcpServerRequest",
|
|
419
423
|
"AddMcpServerResponseItem",
|
|
420
424
|
"AgentEnvironmentVariable",
|
|
425
|
+
"AgentFileAttachment",
|
|
421
426
|
"AgentFileSchema",
|
|
422
427
|
"AgentState",
|
|
423
428
|
"AgentStateResponseFormat",
|
|
@@ -473,7 +478,8 @@ __all__ = [
|
|
|
473
478
|
"ChatCompletionFunctionMessageParam",
|
|
474
479
|
"ChatCompletionFunctionToolParam",
|
|
475
480
|
"ChatCompletionMessageCustomToolCallParam",
|
|
476
|
-
"
|
|
481
|
+
"ChatCompletionMessageFunctionToolCallInput",
|
|
482
|
+
"ChatCompletionMessageFunctionToolCallOutput",
|
|
477
483
|
"ChatCompletionMessageFunctionToolCallParam",
|
|
478
484
|
"ChatCompletionNamedToolChoiceCustomParam",
|
|
479
485
|
"ChatCompletionNamedToolChoiceParam",
|
|
@@ -651,6 +657,7 @@ __all__ = [
|
|
|
651
657
|
"OpenaiTypesChatChatCompletionCustomToolParamCustom",
|
|
652
658
|
"OpenaiTypesChatChatCompletionCustomToolParamCustomFormat",
|
|
653
659
|
"OpenaiTypesChatChatCompletionMessageCustomToolCallParamCustom",
|
|
660
|
+
"OpenaiTypesChatChatCompletionMessageFunctionToolCallFunction",
|
|
654
661
|
"OpenaiTypesChatChatCompletionMessageFunctionToolCallParamFunction",
|
|
655
662
|
"OpenaiTypesChatChatCompletionNamedToolChoiceCustomParamCustom",
|
|
656
663
|
"OpenaiTypesChatChatCompletionNamedToolChoiceParamFunction",
|
|
@@ -659,6 +666,7 @@ __all__ = [
|
|
|
659
666
|
"OrganizationCreate",
|
|
660
667
|
"OrganizationSourcesStats",
|
|
661
668
|
"OrganizationUpdate",
|
|
669
|
+
"PaginatedAgentFiles",
|
|
662
670
|
"ParameterProperties",
|
|
663
671
|
"ParametersSchema",
|
|
664
672
|
"ParentToolRule",
|
letta_client/agents/client.py
CHANGED
|
@@ -13,6 +13,7 @@ from ..types.embedding_config import EmbeddingConfig
|
|
|
13
13
|
from ..types.imported_agents_response import ImportedAgentsResponse
|
|
14
14
|
from ..types.llm_config import LlmConfig
|
|
15
15
|
from ..types.message_create import MessageCreate
|
|
16
|
+
from ..types.paginated_agent_files import PaginatedAgentFiles
|
|
16
17
|
from .blocks.client import AsyncBlocksClient, BlocksClient
|
|
17
18
|
from .context.client import AsyncContextClient, ContextClient
|
|
18
19
|
from .core_memory.client import AsyncCoreMemoryClient, CoreMemoryClient
|
|
@@ -834,6 +835,56 @@ class AgentsClient:
|
|
|
834
835
|
)
|
|
835
836
|
return _response.data
|
|
836
837
|
|
|
838
|
+
def list_agent_files(
|
|
839
|
+
self,
|
|
840
|
+
agent_id: str,
|
|
841
|
+
*,
|
|
842
|
+
cursor: typing.Optional[str] = None,
|
|
843
|
+
limit: typing.Optional[int] = None,
|
|
844
|
+
is_open: typing.Optional[bool] = None,
|
|
845
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
846
|
+
) -> PaginatedAgentFiles:
|
|
847
|
+
"""
|
|
848
|
+
Get the files attached to an agent with their open/closed status (paginated).
|
|
849
|
+
|
|
850
|
+
Parameters
|
|
851
|
+
----------
|
|
852
|
+
agent_id : str
|
|
853
|
+
|
|
854
|
+
cursor : typing.Optional[str]
|
|
855
|
+
Pagination cursor from previous response
|
|
856
|
+
|
|
857
|
+
limit : typing.Optional[int]
|
|
858
|
+
Number of items to return (1-100)
|
|
859
|
+
|
|
860
|
+
is_open : typing.Optional[bool]
|
|
861
|
+
Filter by open status (true for open files, false for closed files)
|
|
862
|
+
|
|
863
|
+
request_options : typing.Optional[RequestOptions]
|
|
864
|
+
Request-specific configuration.
|
|
865
|
+
|
|
866
|
+
Returns
|
|
867
|
+
-------
|
|
868
|
+
PaginatedAgentFiles
|
|
869
|
+
Successful Response
|
|
870
|
+
|
|
871
|
+
Examples
|
|
872
|
+
--------
|
|
873
|
+
from letta_client import Letta
|
|
874
|
+
|
|
875
|
+
client = Letta(
|
|
876
|
+
project="YOUR_PROJECT",
|
|
877
|
+
token="YOUR_TOKEN",
|
|
878
|
+
)
|
|
879
|
+
client.agents.list_agent_files(
|
|
880
|
+
agent_id="agent_id",
|
|
881
|
+
)
|
|
882
|
+
"""
|
|
883
|
+
_response = self._raw_client.list_agent_files(
|
|
884
|
+
agent_id, cursor=cursor, limit=limit, is_open=is_open, request_options=request_options
|
|
885
|
+
)
|
|
886
|
+
return _response.data
|
|
887
|
+
|
|
837
888
|
def summarize_agent_conversation(
|
|
838
889
|
self, agent_id: str, *, max_message_length: int, request_options: typing.Optional[RequestOptions] = None
|
|
839
890
|
) -> None:
|
|
@@ -1799,6 +1850,64 @@ class AsyncAgentsClient:
|
|
|
1799
1850
|
)
|
|
1800
1851
|
return _response.data
|
|
1801
1852
|
|
|
1853
|
+
async def list_agent_files(
|
|
1854
|
+
self,
|
|
1855
|
+
agent_id: str,
|
|
1856
|
+
*,
|
|
1857
|
+
cursor: typing.Optional[str] = None,
|
|
1858
|
+
limit: typing.Optional[int] = None,
|
|
1859
|
+
is_open: typing.Optional[bool] = None,
|
|
1860
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
1861
|
+
) -> PaginatedAgentFiles:
|
|
1862
|
+
"""
|
|
1863
|
+
Get the files attached to an agent with their open/closed status (paginated).
|
|
1864
|
+
|
|
1865
|
+
Parameters
|
|
1866
|
+
----------
|
|
1867
|
+
agent_id : str
|
|
1868
|
+
|
|
1869
|
+
cursor : typing.Optional[str]
|
|
1870
|
+
Pagination cursor from previous response
|
|
1871
|
+
|
|
1872
|
+
limit : typing.Optional[int]
|
|
1873
|
+
Number of items to return (1-100)
|
|
1874
|
+
|
|
1875
|
+
is_open : typing.Optional[bool]
|
|
1876
|
+
Filter by open status (true for open files, false for closed files)
|
|
1877
|
+
|
|
1878
|
+
request_options : typing.Optional[RequestOptions]
|
|
1879
|
+
Request-specific configuration.
|
|
1880
|
+
|
|
1881
|
+
Returns
|
|
1882
|
+
-------
|
|
1883
|
+
PaginatedAgentFiles
|
|
1884
|
+
Successful Response
|
|
1885
|
+
|
|
1886
|
+
Examples
|
|
1887
|
+
--------
|
|
1888
|
+
import asyncio
|
|
1889
|
+
|
|
1890
|
+
from letta_client import AsyncLetta
|
|
1891
|
+
|
|
1892
|
+
client = AsyncLetta(
|
|
1893
|
+
project="YOUR_PROJECT",
|
|
1894
|
+
token="YOUR_TOKEN",
|
|
1895
|
+
)
|
|
1896
|
+
|
|
1897
|
+
|
|
1898
|
+
async def main() -> None:
|
|
1899
|
+
await client.agents.list_agent_files(
|
|
1900
|
+
agent_id="agent_id",
|
|
1901
|
+
)
|
|
1902
|
+
|
|
1903
|
+
|
|
1904
|
+
asyncio.run(main())
|
|
1905
|
+
"""
|
|
1906
|
+
_response = await self._raw_client.list_agent_files(
|
|
1907
|
+
agent_id, cursor=cursor, limit=limit, is_open=is_open, request_options=request_options
|
|
1908
|
+
)
|
|
1909
|
+
return _response.data
|
|
1910
|
+
|
|
1802
1911
|
async def summarize_agent_conversation(
|
|
1803
1912
|
self, agent_id: str, *, max_message_length: int, request_options: typing.Optional[RequestOptions] = None
|
|
1804
1913
|
) -> None:
|
|
@@ -21,6 +21,7 @@ from ..types.http_validation_error import HttpValidationError
|
|
|
21
21
|
from ..types.imported_agents_response import ImportedAgentsResponse
|
|
22
22
|
from ..types.llm_config import LlmConfig
|
|
23
23
|
from ..types.message_create import MessageCreate
|
|
24
|
+
from ..types.paginated_agent_files import PaginatedAgentFiles
|
|
24
25
|
from .types.agents_search_request_search_item import AgentsSearchRequestSearchItem
|
|
25
26
|
from .types.agents_search_request_sort_by import AgentsSearchRequestSortBy
|
|
26
27
|
from .types.agents_search_response import AgentsSearchResponse
|
|
@@ -967,6 +968,75 @@ class RawAgentsClient:
|
|
|
967
968
|
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
968
969
|
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
969
970
|
|
|
971
|
+
def list_agent_files(
|
|
972
|
+
self,
|
|
973
|
+
agent_id: str,
|
|
974
|
+
*,
|
|
975
|
+
cursor: typing.Optional[str] = None,
|
|
976
|
+
limit: typing.Optional[int] = None,
|
|
977
|
+
is_open: typing.Optional[bool] = None,
|
|
978
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
979
|
+
) -> HttpResponse[PaginatedAgentFiles]:
|
|
980
|
+
"""
|
|
981
|
+
Get the files attached to an agent with their open/closed status (paginated).
|
|
982
|
+
|
|
983
|
+
Parameters
|
|
984
|
+
----------
|
|
985
|
+
agent_id : str
|
|
986
|
+
|
|
987
|
+
cursor : typing.Optional[str]
|
|
988
|
+
Pagination cursor from previous response
|
|
989
|
+
|
|
990
|
+
limit : typing.Optional[int]
|
|
991
|
+
Number of items to return (1-100)
|
|
992
|
+
|
|
993
|
+
is_open : typing.Optional[bool]
|
|
994
|
+
Filter by open status (true for open files, false for closed files)
|
|
995
|
+
|
|
996
|
+
request_options : typing.Optional[RequestOptions]
|
|
997
|
+
Request-specific configuration.
|
|
998
|
+
|
|
999
|
+
Returns
|
|
1000
|
+
-------
|
|
1001
|
+
HttpResponse[PaginatedAgentFiles]
|
|
1002
|
+
Successful Response
|
|
1003
|
+
"""
|
|
1004
|
+
_response = self._client_wrapper.httpx_client.request(
|
|
1005
|
+
f"v1/agents/{jsonable_encoder(agent_id)}/files",
|
|
1006
|
+
method="GET",
|
|
1007
|
+
params={
|
|
1008
|
+
"cursor": cursor,
|
|
1009
|
+
"limit": limit,
|
|
1010
|
+
"is_open": is_open,
|
|
1011
|
+
},
|
|
1012
|
+
request_options=request_options,
|
|
1013
|
+
)
|
|
1014
|
+
try:
|
|
1015
|
+
if 200 <= _response.status_code < 300:
|
|
1016
|
+
_data = typing.cast(
|
|
1017
|
+
PaginatedAgentFiles,
|
|
1018
|
+
construct_type(
|
|
1019
|
+
type_=PaginatedAgentFiles, # type: ignore
|
|
1020
|
+
object_=_response.json(),
|
|
1021
|
+
),
|
|
1022
|
+
)
|
|
1023
|
+
return HttpResponse(response=_response, data=_data)
|
|
1024
|
+
if _response.status_code == 422:
|
|
1025
|
+
raise UnprocessableEntityError(
|
|
1026
|
+
headers=dict(_response.headers),
|
|
1027
|
+
body=typing.cast(
|
|
1028
|
+
HttpValidationError,
|
|
1029
|
+
construct_type(
|
|
1030
|
+
type_=HttpValidationError, # type: ignore
|
|
1031
|
+
object_=_response.json(),
|
|
1032
|
+
),
|
|
1033
|
+
),
|
|
1034
|
+
)
|
|
1035
|
+
_response_json = _response.json()
|
|
1036
|
+
except JSONDecodeError:
|
|
1037
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
1038
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
1039
|
+
|
|
970
1040
|
def summarize_agent_conversation(
|
|
971
1041
|
self, agent_id: str, *, max_message_length: int, request_options: typing.Optional[RequestOptions] = None
|
|
972
1042
|
) -> HttpResponse[None]:
|
|
@@ -2028,6 +2098,75 @@ class AsyncRawAgentsClient:
|
|
|
2028
2098
|
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
2029
2099
|
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
2030
2100
|
|
|
2101
|
+
async def list_agent_files(
|
|
2102
|
+
self,
|
|
2103
|
+
agent_id: str,
|
|
2104
|
+
*,
|
|
2105
|
+
cursor: typing.Optional[str] = None,
|
|
2106
|
+
limit: typing.Optional[int] = None,
|
|
2107
|
+
is_open: typing.Optional[bool] = None,
|
|
2108
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
2109
|
+
) -> AsyncHttpResponse[PaginatedAgentFiles]:
|
|
2110
|
+
"""
|
|
2111
|
+
Get the files attached to an agent with their open/closed status (paginated).
|
|
2112
|
+
|
|
2113
|
+
Parameters
|
|
2114
|
+
----------
|
|
2115
|
+
agent_id : str
|
|
2116
|
+
|
|
2117
|
+
cursor : typing.Optional[str]
|
|
2118
|
+
Pagination cursor from previous response
|
|
2119
|
+
|
|
2120
|
+
limit : typing.Optional[int]
|
|
2121
|
+
Number of items to return (1-100)
|
|
2122
|
+
|
|
2123
|
+
is_open : typing.Optional[bool]
|
|
2124
|
+
Filter by open status (true for open files, false for closed files)
|
|
2125
|
+
|
|
2126
|
+
request_options : typing.Optional[RequestOptions]
|
|
2127
|
+
Request-specific configuration.
|
|
2128
|
+
|
|
2129
|
+
Returns
|
|
2130
|
+
-------
|
|
2131
|
+
AsyncHttpResponse[PaginatedAgentFiles]
|
|
2132
|
+
Successful Response
|
|
2133
|
+
"""
|
|
2134
|
+
_response = await self._client_wrapper.httpx_client.request(
|
|
2135
|
+
f"v1/agents/{jsonable_encoder(agent_id)}/files",
|
|
2136
|
+
method="GET",
|
|
2137
|
+
params={
|
|
2138
|
+
"cursor": cursor,
|
|
2139
|
+
"limit": limit,
|
|
2140
|
+
"is_open": is_open,
|
|
2141
|
+
},
|
|
2142
|
+
request_options=request_options,
|
|
2143
|
+
)
|
|
2144
|
+
try:
|
|
2145
|
+
if 200 <= _response.status_code < 300:
|
|
2146
|
+
_data = typing.cast(
|
|
2147
|
+
PaginatedAgentFiles,
|
|
2148
|
+
construct_type(
|
|
2149
|
+
type_=PaginatedAgentFiles, # type: ignore
|
|
2150
|
+
object_=_response.json(),
|
|
2151
|
+
),
|
|
2152
|
+
)
|
|
2153
|
+
return AsyncHttpResponse(response=_response, data=_data)
|
|
2154
|
+
if _response.status_code == 422:
|
|
2155
|
+
raise UnprocessableEntityError(
|
|
2156
|
+
headers=dict(_response.headers),
|
|
2157
|
+
body=typing.cast(
|
|
2158
|
+
HttpValidationError,
|
|
2159
|
+
construct_type(
|
|
2160
|
+
type_=HttpValidationError, # type: ignore
|
|
2161
|
+
object_=_response.json(),
|
|
2162
|
+
),
|
|
2163
|
+
),
|
|
2164
|
+
)
|
|
2165
|
+
_response_json = _response.json()
|
|
2166
|
+
except JSONDecodeError:
|
|
2167
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
2168
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
2169
|
+
|
|
2031
2170
|
async def summarize_agent_conversation(
|
|
2032
2171
|
self, agent_id: str, *, max_message_length: int, request_options: typing.Optional[RequestOptions] = None
|
|
2033
2172
|
) -> AsyncHttpResponse[None]:
|
|
@@ -24,10 +24,10 @@ class BaseClientWrapper:
|
|
|
24
24
|
|
|
25
25
|
def get_headers(self) -> typing.Dict[str, str]:
|
|
26
26
|
headers: typing.Dict[str, str] = {
|
|
27
|
-
"User-Agent": "letta-client/0.1.
|
|
27
|
+
"User-Agent": "letta-client/0.1.272",
|
|
28
28
|
"X-Fern-Language": "Python",
|
|
29
29
|
"X-Fern-SDK-Name": "letta-client",
|
|
30
|
-
"X-Fern-SDK-Version": "0.1.
|
|
30
|
+
"X-Fern-SDK-Version": "0.1.272",
|
|
31
31
|
**(self.get_custom_headers() or {}),
|
|
32
32
|
}
|
|
33
33
|
if self._project is not None:
|
letta_client/templates/types/templates_get_template_snapshot_response_agents_item_properties.py
CHANGED
|
@@ -8,6 +8,8 @@ from ...core.unchecked_base_model import UncheckedBaseModel
|
|
|
8
8
|
|
|
9
9
|
|
|
10
10
|
class TemplatesGetTemplateSnapshotResponseAgentsItemProperties(UncheckedBaseModel):
|
|
11
|
+
enable_reasoner: typing.Optional[bool] = None
|
|
12
|
+
put_inner_thoughts_in_kwargs: typing.Optional[bool] = None
|
|
11
13
|
context_window_limit: typing.Optional[float] = None
|
|
12
14
|
max_tokens: typing.Optional[float] = None
|
|
13
15
|
max_reasoning_tokens: typing.Optional[float] = None
|
letta_client/types/__init__.py
CHANGED
|
@@ -6,6 +6,7 @@ from .action_model import ActionModel
|
|
|
6
6
|
from .action_parameters_model import ActionParametersModel
|
|
7
7
|
from .action_response_model import ActionResponseModel
|
|
8
8
|
from .agent_environment_variable import AgentEnvironmentVariable
|
|
9
|
+
from .agent_file_attachment import AgentFileAttachment
|
|
9
10
|
from .agent_file_schema import AgentFileSchema
|
|
10
11
|
from .agent_state import AgentState
|
|
11
12
|
from .agent_state_response_format import AgentStateResponseFormat
|
|
@@ -49,7 +50,8 @@ from .chat_completion_function_call_option_param import ChatCompletionFunctionCa
|
|
|
49
50
|
from .chat_completion_function_message_param import ChatCompletionFunctionMessageParam
|
|
50
51
|
from .chat_completion_function_tool_param import ChatCompletionFunctionToolParam
|
|
51
52
|
from .chat_completion_message_custom_tool_call_param import ChatCompletionMessageCustomToolCallParam
|
|
52
|
-
from .
|
|
53
|
+
from .chat_completion_message_function_tool_call_input import ChatCompletionMessageFunctionToolCallInput
|
|
54
|
+
from .chat_completion_message_function_tool_call_output import ChatCompletionMessageFunctionToolCallOutput
|
|
53
55
|
from .chat_completion_message_function_tool_call_param import ChatCompletionMessageFunctionToolCallParam
|
|
54
56
|
from .chat_completion_named_tool_choice_custom_param import ChatCompletionNamedToolChoiceCustomParam
|
|
55
57
|
from .chat_completion_named_tool_choice_param import ChatCompletionNamedToolChoiceParam
|
|
@@ -218,6 +220,9 @@ from .openai_types_chat_chat_completion_custom_tool_param_custom_format import (
|
|
|
218
220
|
from .openai_types_chat_chat_completion_message_custom_tool_call_param_custom import (
|
|
219
221
|
OpenaiTypesChatChatCompletionMessageCustomToolCallParamCustom,
|
|
220
222
|
)
|
|
223
|
+
from .openai_types_chat_chat_completion_message_function_tool_call_function import (
|
|
224
|
+
OpenaiTypesChatChatCompletionMessageFunctionToolCallFunction,
|
|
225
|
+
)
|
|
221
226
|
from .openai_types_chat_chat_completion_message_function_tool_call_param_function import (
|
|
222
227
|
OpenaiTypesChatChatCompletionMessageFunctionToolCallParamFunction,
|
|
223
228
|
)
|
|
@@ -232,6 +237,7 @@ from .organization import Organization
|
|
|
232
237
|
from .organization_create import OrganizationCreate
|
|
233
238
|
from .organization_sources_stats import OrganizationSourcesStats
|
|
234
239
|
from .organization_update import OrganizationUpdate
|
|
240
|
+
from .paginated_agent_files import PaginatedAgentFiles
|
|
235
241
|
from .parameter_properties import ParameterProperties
|
|
236
242
|
from .parameters_schema import ParametersSchema
|
|
237
243
|
from .parent_tool_rule import ParentToolRule
|
|
@@ -332,6 +338,7 @@ __all__ = [
|
|
|
332
338
|
"ActionParametersModel",
|
|
333
339
|
"ActionResponseModel",
|
|
334
340
|
"AgentEnvironmentVariable",
|
|
341
|
+
"AgentFileAttachment",
|
|
335
342
|
"AgentFileSchema",
|
|
336
343
|
"AgentState",
|
|
337
344
|
"AgentStateResponseFormat",
|
|
@@ -375,7 +382,8 @@ __all__ = [
|
|
|
375
382
|
"ChatCompletionFunctionMessageParam",
|
|
376
383
|
"ChatCompletionFunctionToolParam",
|
|
377
384
|
"ChatCompletionMessageCustomToolCallParam",
|
|
378
|
-
"
|
|
385
|
+
"ChatCompletionMessageFunctionToolCallInput",
|
|
386
|
+
"ChatCompletionMessageFunctionToolCallOutput",
|
|
379
387
|
"ChatCompletionMessageFunctionToolCallParam",
|
|
380
388
|
"ChatCompletionNamedToolChoiceCustomParam",
|
|
381
389
|
"ChatCompletionNamedToolChoiceParam",
|
|
@@ -530,6 +538,7 @@ __all__ = [
|
|
|
530
538
|
"OpenaiTypesChatChatCompletionCustomToolParamCustom",
|
|
531
539
|
"OpenaiTypesChatChatCompletionCustomToolParamCustomFormat",
|
|
532
540
|
"OpenaiTypesChatChatCompletionMessageCustomToolCallParamCustom",
|
|
541
|
+
"OpenaiTypesChatChatCompletionMessageFunctionToolCallFunction",
|
|
533
542
|
"OpenaiTypesChatChatCompletionMessageFunctionToolCallParamFunction",
|
|
534
543
|
"OpenaiTypesChatChatCompletionNamedToolChoiceCustomParamCustom",
|
|
535
544
|
"OpenaiTypesChatChatCompletionNamedToolChoiceParamFunction",
|
|
@@ -538,6 +547,7 @@ __all__ = [
|
|
|
538
547
|
"OrganizationCreate",
|
|
539
548
|
"OrganizationSourcesStats",
|
|
540
549
|
"OrganizationUpdate",
|
|
550
|
+
"PaginatedAgentFiles",
|
|
541
551
|
"ParameterProperties",
|
|
542
552
|
"ParametersSchema",
|
|
543
553
|
"ParentToolRule",
|
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
import datetime as dt
|
|
4
|
+
import typing
|
|
5
|
+
|
|
6
|
+
import pydantic
|
|
7
|
+
from ..core.pydantic_utilities import IS_PYDANTIC_V2
|
|
8
|
+
from ..core.unchecked_base_model import UncheckedBaseModel
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class AgentFileAttachment(UncheckedBaseModel):
|
|
12
|
+
"""
|
|
13
|
+
Response model for agent file attachments showing file status in agent context
|
|
14
|
+
"""
|
|
15
|
+
|
|
16
|
+
id: str = pydantic.Field()
|
|
17
|
+
"""
|
|
18
|
+
Unique identifier of the file-agent relationship
|
|
19
|
+
"""
|
|
20
|
+
|
|
21
|
+
file_id: str = pydantic.Field()
|
|
22
|
+
"""
|
|
23
|
+
Unique identifier of the file
|
|
24
|
+
"""
|
|
25
|
+
|
|
26
|
+
file_name: str = pydantic.Field()
|
|
27
|
+
"""
|
|
28
|
+
Name of the file
|
|
29
|
+
"""
|
|
30
|
+
|
|
31
|
+
folder_id: str = pydantic.Field()
|
|
32
|
+
"""
|
|
33
|
+
Unique identifier of the folder/source
|
|
34
|
+
"""
|
|
35
|
+
|
|
36
|
+
folder_name: str = pydantic.Field()
|
|
37
|
+
"""
|
|
38
|
+
Name of the folder/source
|
|
39
|
+
"""
|
|
40
|
+
|
|
41
|
+
is_open: bool = pydantic.Field()
|
|
42
|
+
"""
|
|
43
|
+
Whether the file is currently open in the agent's context
|
|
44
|
+
"""
|
|
45
|
+
|
|
46
|
+
last_accessed_at: typing.Optional[dt.datetime] = pydantic.Field(default=None)
|
|
47
|
+
"""
|
|
48
|
+
Timestamp of last access by the agent
|
|
49
|
+
"""
|
|
50
|
+
|
|
51
|
+
visible_content: typing.Optional[str] = pydantic.Field(default=None)
|
|
52
|
+
"""
|
|
53
|
+
Portion of the file visible to the agent if open
|
|
54
|
+
"""
|
|
55
|
+
|
|
56
|
+
start_line: typing.Optional[int] = pydantic.Field(default=None)
|
|
57
|
+
"""
|
|
58
|
+
Starting line number if file was opened with line range
|
|
59
|
+
"""
|
|
60
|
+
|
|
61
|
+
end_line: typing.Optional[int] = pydantic.Field(default=None)
|
|
62
|
+
"""
|
|
63
|
+
Ending line number if file was opened with line range
|
|
64
|
+
"""
|
|
65
|
+
|
|
66
|
+
if IS_PYDANTIC_V2:
|
|
67
|
+
model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
|
|
68
|
+
else:
|
|
69
|
+
|
|
70
|
+
class Config:
|
|
71
|
+
frozen = True
|
|
72
|
+
smart_union = True
|
|
73
|
+
extra = pydantic.Extra.allow
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
import typing
|
|
4
|
+
|
|
5
|
+
import pydantic
|
|
6
|
+
from ..core.pydantic_utilities import IS_PYDANTIC_V2
|
|
7
|
+
from ..core.unchecked_base_model import UncheckedBaseModel
|
|
8
|
+
from .openai_types_chat_chat_completion_message_function_tool_call_function import (
|
|
9
|
+
OpenaiTypesChatChatCompletionMessageFunctionToolCallFunction,
|
|
10
|
+
)
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class ChatCompletionMessageFunctionToolCallInput(UncheckedBaseModel):
|
|
14
|
+
id: str
|
|
15
|
+
function: OpenaiTypesChatChatCompletionMessageFunctionToolCallFunction
|
|
16
|
+
type: typing.Literal["function"] = "function"
|
|
17
|
+
|
|
18
|
+
if IS_PYDANTIC_V2:
|
|
19
|
+
model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
|
|
20
|
+
else:
|
|
21
|
+
|
|
22
|
+
class Config:
|
|
23
|
+
frozen = True
|
|
24
|
+
smart_union = True
|
|
25
|
+
extra = pydantic.Extra.allow
|
|
@@ -8,7 +8,7 @@ from ..core.unchecked_base_model import UncheckedBaseModel
|
|
|
8
8
|
from .function_output import FunctionOutput
|
|
9
9
|
|
|
10
10
|
|
|
11
|
-
class
|
|
11
|
+
class ChatCompletionMessageFunctionToolCallOutput(UncheckedBaseModel):
|
|
12
12
|
id: str
|
|
13
13
|
function: FunctionOutput
|
|
14
14
|
type: typing.Literal["function"] = "function"
|
|
@@ -1,12 +1,15 @@
|
|
|
1
1
|
# This file was auto-generated by Fern from our API Definition.
|
|
2
2
|
|
|
3
|
+
import datetime as dt
|
|
3
4
|
import typing
|
|
4
5
|
|
|
5
6
|
import pydantic
|
|
6
7
|
from ..core.pydantic_utilities import IS_PYDANTIC_V2
|
|
7
8
|
from ..core.unchecked_base_model import UncheckedBaseModel
|
|
9
|
+
from .chat_completion_message_function_tool_call_input import ChatCompletionMessageFunctionToolCallInput
|
|
8
10
|
from .letta_schemas_agent_file_message_schema_content import LettaSchemasAgentFileMessageSchemaContent
|
|
9
11
|
from .message_role import MessageRole
|
|
12
|
+
from .tool_return import ToolReturn
|
|
10
13
|
|
|
11
14
|
|
|
12
15
|
class LettaSchemasAgentFileMessageSchema(UncheckedBaseModel):
|
|
@@ -64,6 +67,26 @@ class LettaSchemasAgentFileMessageSchema(UncheckedBaseModel):
|
|
|
64
67
|
The unique identifier of the agent
|
|
65
68
|
"""
|
|
66
69
|
|
|
70
|
+
tool_calls: typing.Optional[typing.List[ChatCompletionMessageFunctionToolCallInput]] = pydantic.Field(default=None)
|
|
71
|
+
"""
|
|
72
|
+
The list of tool calls requested. Only applicable for role assistant.
|
|
73
|
+
"""
|
|
74
|
+
|
|
75
|
+
tool_call_id: typing.Optional[str] = pydantic.Field(default=None)
|
|
76
|
+
"""
|
|
77
|
+
The ID of the tool call. Only applicable for role tool.
|
|
78
|
+
"""
|
|
79
|
+
|
|
80
|
+
tool_returns: typing.Optional[typing.List[ToolReturn]] = pydantic.Field(default=None)
|
|
81
|
+
"""
|
|
82
|
+
Tool execution return information for prior tool calls
|
|
83
|
+
"""
|
|
84
|
+
|
|
85
|
+
created_at: typing.Optional[dt.datetime] = pydantic.Field(default=None)
|
|
86
|
+
"""
|
|
87
|
+
The timestamp when the object was created.
|
|
88
|
+
"""
|
|
89
|
+
|
|
67
90
|
if IS_PYDANTIC_V2:
|
|
68
91
|
model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
|
|
69
92
|
else:
|
letta_client/types/message.py
CHANGED
|
@@ -6,7 +6,7 @@ import typing
|
|
|
6
6
|
import pydantic
|
|
7
7
|
from ..core.pydantic_utilities import IS_PYDANTIC_V2
|
|
8
8
|
from ..core.unchecked_base_model import UncheckedBaseModel
|
|
9
|
-
from .
|
|
9
|
+
from .chat_completion_message_function_tool_call_output import ChatCompletionMessageFunctionToolCallOutput
|
|
10
10
|
from .message_content_item import MessageContentItem
|
|
11
11
|
from .message_role import MessageRole
|
|
12
12
|
from .tool_return import ToolReturn
|
|
@@ -85,7 +85,7 @@ class Message(UncheckedBaseModel):
|
|
|
85
85
|
For role user/assistant: the (optional) name of the participant. For role tool/function: the name of the function called.
|
|
86
86
|
"""
|
|
87
87
|
|
|
88
|
-
tool_calls: typing.Optional[typing.List[
|
|
88
|
+
tool_calls: typing.Optional[typing.List[ChatCompletionMessageFunctionToolCallOutput]] = pydantic.Field(default=None)
|
|
89
89
|
"""
|
|
90
90
|
The list of tool calls requested. Only applicable for role assistant.
|
|
91
91
|
"""
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
import typing
|
|
4
|
+
|
|
5
|
+
import pydantic
|
|
6
|
+
from ..core.pydantic_utilities import IS_PYDANTIC_V2
|
|
7
|
+
from ..core.unchecked_base_model import UncheckedBaseModel
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class OpenaiTypesChatChatCompletionMessageFunctionToolCallFunction(UncheckedBaseModel):
|
|
11
|
+
arguments: str
|
|
12
|
+
name: str
|
|
13
|
+
|
|
14
|
+
if IS_PYDANTIC_V2:
|
|
15
|
+
model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
|
|
16
|
+
else:
|
|
17
|
+
|
|
18
|
+
class Config:
|
|
19
|
+
frozen = True
|
|
20
|
+
smart_union = True
|
|
21
|
+
extra = pydantic.Extra.allow
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
import typing
|
|
4
|
+
|
|
5
|
+
import pydantic
|
|
6
|
+
from ..core.pydantic_utilities import IS_PYDANTIC_V2
|
|
7
|
+
from ..core.unchecked_base_model import UncheckedBaseModel
|
|
8
|
+
from .agent_file_attachment import AgentFileAttachment
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class PaginatedAgentFiles(UncheckedBaseModel):
|
|
12
|
+
"""
|
|
13
|
+
Paginated response for agent files
|
|
14
|
+
"""
|
|
15
|
+
|
|
16
|
+
files: typing.List[AgentFileAttachment] = pydantic.Field()
|
|
17
|
+
"""
|
|
18
|
+
List of file attachments for the agent
|
|
19
|
+
"""
|
|
20
|
+
|
|
21
|
+
next_cursor: typing.Optional[str] = pydantic.Field(default=None)
|
|
22
|
+
"""
|
|
23
|
+
Cursor for fetching the next page (file-agent relationship ID)
|
|
24
|
+
"""
|
|
25
|
+
|
|
26
|
+
has_more: bool = pydantic.Field()
|
|
27
|
+
"""
|
|
28
|
+
Whether more results exist after this page
|
|
29
|
+
"""
|
|
30
|
+
|
|
31
|
+
if IS_PYDANTIC_V2:
|
|
32
|
+
model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
|
|
33
|
+
else:
|
|
34
|
+
|
|
35
|
+
class Config:
|
|
36
|
+
frozen = True
|
|
37
|
+
smart_union = True
|
|
38
|
+
extra = pydantic.Extra.allow
|
|
@@ -1,9 +1,9 @@
|
|
|
1
|
-
letta_client/__init__.py,sha256=
|
|
1
|
+
letta_client/__init__.py,sha256=lqKjfPSuq_YX3zT6hJr-dhiio2gsyVjh6y3QuWSfe44,26442
|
|
2
2
|
letta_client/agents/__init__.py,sha256=yl1d02BPp-nGZLaUdH9mWcYvHu-1RhRyZUgpZQKOMGo,2010
|
|
3
3
|
letta_client/agents/blocks/__init__.py,sha256=_VhToAyIt_5axN6CLJwtxg3-CO7THa_23pbUzqhXJa4,85
|
|
4
4
|
letta_client/agents/blocks/client.py,sha256=Akx-1SYEXkmdtLtytPtdFNhVts8JkjC2aMQnnWgd8Ug,14735
|
|
5
5
|
letta_client/agents/blocks/raw_client.py,sha256=7tdlieWtGyMe1G5Ne9Rcujvr43DbD4K3hVJ7eiJNuFo,24454
|
|
6
|
-
letta_client/agents/client.py,sha256
|
|
6
|
+
letta_client/agents/client.py,sha256=-iD2qFwuuFQkYqtnpKYqq24mtL7tUj1OhG9cbMa_HYA,73342
|
|
7
7
|
letta_client/agents/context/__init__.py,sha256=_VhToAyIt_5axN6CLJwtxg3-CO7THa_23pbUzqhXJa4,85
|
|
8
8
|
letta_client/agents/context/client.py,sha256=fhpJFWRs6INGreRyEw9gsFnlUWR48vIHbN_jVIHIBrw,3052
|
|
9
9
|
letta_client/agents/context/raw_client.py,sha256=j2gko-oEFWuCgPkcX9jCv31OWvR6sTOtAYcSWllXYDs,4747
|
|
@@ -35,7 +35,7 @@ letta_client/agents/messages/types/messages_preview_raw_payload_request.py,sha25
|
|
|
35
35
|
letta_client/agents/passages/__init__.py,sha256=_VhToAyIt_5axN6CLJwtxg3-CO7THa_23pbUzqhXJa4,85
|
|
36
36
|
letta_client/agents/passages/client.py,sha256=XHPpqOH2BDjHkegTRM9MRdDVxW5VH40ERSFvWchWT48,16785
|
|
37
37
|
letta_client/agents/passages/raw_client.py,sha256=TnNrFsnrexrPVmemkFbRIBfFMcq1Iap2qk23L7mr1Z0,25710
|
|
38
|
-
letta_client/agents/raw_client.py,sha256=
|
|
38
|
+
letta_client/agents/raw_client.py,sha256=y0JXg33ytgCTmI-5CQuMKB2mOn5je55VWtTGRsHk61I,96888
|
|
39
39
|
letta_client/agents/sources/__init__.py,sha256=_VhToAyIt_5axN6CLJwtxg3-CO7THa_23pbUzqhXJa4,85
|
|
40
40
|
letta_client/agents/sources/client.py,sha256=lCqB6FF9svrwf0oZSFs41WKlMXc-YRhUeb4FZkHbicM,6868
|
|
41
41
|
letta_client/agents/sources/raw_client.py,sha256=ts4c5UBuXzrHU-lFWWrYniQqrMEc8SN0rfiqNXJLP5Y,12399
|
|
@@ -90,7 +90,7 @@ letta_client/client_side_access_tokens/types/client_side_access_tokens_list_clie
|
|
|
90
90
|
letta_client/client_side_access_tokens/types/client_side_access_tokens_list_client_side_access_tokens_response_tokens_item_policy_data_item_access_item.py,sha256=kNHfEWFl7u71Pu8NPqutod0a2NXfvq8il05Hqm0iBB4,284
|
|
91
91
|
letta_client/core/__init__.py,sha256=tpn7rjb6C2UIkYZYIqdrNpI7Yax2jw88sXh2baxaxAI,1715
|
|
92
92
|
letta_client/core/api_error.py,sha256=44vPoTyWN59gonCIZMdzw7M1uspygiLnr3GNFOoVL2Q,614
|
|
93
|
-
letta_client/core/client_wrapper.py,sha256=
|
|
93
|
+
letta_client/core/client_wrapper.py,sha256=SVJmJ9wDEzoHaOGXde7NGu44Ob2L7o_GfNycm-Gp8UY,2776
|
|
94
94
|
letta_client/core/datetime_utils.py,sha256=nBys2IsYrhPdszxGKCNRPSOCwa-5DWOHG95FB8G9PKo,1047
|
|
95
95
|
letta_client/core/file.py,sha256=d4NNbX8XvXP32z8KpK2Xovv33nFfruIrpz0QWxlgpZk,2663
|
|
96
96
|
letta_client/core/force_multipart.py,sha256=awxh5MtcRYe74ehY8U76jzv6fYM_w_D3Rur7KQQzSDk,429
|
|
@@ -216,7 +216,7 @@ letta_client/templates/types/templates_get_template_snapshot_response.py,sha256=
|
|
|
216
216
|
letta_client/templates/types/templates_get_template_snapshot_response_agents_item.py,sha256=ciczqvGIPMcuZCu3ObpVAZh8u_cDWbY6ImApwBOK6lc,2567
|
|
217
217
|
letta_client/templates/types/templates_get_template_snapshot_response_agents_item_memory_variables.py,sha256=POh1PTstz0UC_rOnkpEyIQI0yHrANeM6Y5vuJlJAruU,877
|
|
218
218
|
letta_client/templates/types/templates_get_template_snapshot_response_agents_item_memory_variables_data_item.py,sha256=TNgE_92wCm2MEGERb_q24_GKzvbh1z1I3pchuwowowA,816
|
|
219
|
-
letta_client/templates/types/templates_get_template_snapshot_response_agents_item_properties.py,sha256=
|
|
219
|
+
letta_client/templates/types/templates_get_template_snapshot_response_agents_item_properties.py,sha256=kIyAiTnIdJ5M5tM96lq4zJE7EGVMzZKnJzi8UtruKgw,1037
|
|
220
220
|
letta_client/templates/types/templates_get_template_snapshot_response_agents_item_tool_rules_item.py,sha256=qoZ-EdDcNRYAQ2bADpvPLAzTKURXZR7ubz4o8yIu3LA,2061
|
|
221
221
|
letta_client/templates/types/templates_get_template_snapshot_response_agents_item_tool_rules_item_child_output_mapping.py,sha256=LLnaNqnXFnoLRTZo2O9nCFlkLTkdj2Re1h6ItsM-_RQ,895
|
|
222
222
|
letta_client/templates/types/templates_get_template_snapshot_response_agents_item_tool_rules_item_five.py,sha256=6yyJTWEoTy6UVFHqqzbvfY_i1VmWLuZVaRBWU8EbKx8,738
|
|
@@ -251,11 +251,12 @@ letta_client/tools/types/streaming_response.py,sha256=V1qT-XAqm-z7zffJ7W1JKPCaxZ
|
|
|
251
251
|
letta_client/tools/types/test_mcp_server_request.py,sha256=3SqjEL3EYi7iV57TjTIzuBSKv8O3Y7qSUFrCiXEvSRk,373
|
|
252
252
|
letta_client/tools/types/update_mcp_server_request.py,sha256=MHouV3iyZCTROguOQP5rOYvnmvDbBeXe5VtEejRvrEs,403
|
|
253
253
|
letta_client/tools/types/update_mcp_server_response.py,sha256=BJTPHWkb8hwgd4FvftQ8eZjl2QzCQT-vZAUVnLft9hw,376
|
|
254
|
-
letta_client/types/__init__.py,sha256=
|
|
254
|
+
letta_client/types/__init__.py,sha256=9G-qISuN0CXedGcxxSNIzDCzQBuVMFni4BsQZuPudQc,29284
|
|
255
255
|
letta_client/types/action_model.py,sha256=VTXavHB6J2d4MjjTMEpkuEyVaiTHyj1FGfa4j8kN6hQ,1241
|
|
256
256
|
letta_client/types/action_parameters_model.py,sha256=s1mJ4tycms8UmCFsxyjKr6RbghSuqv35xpa9mK42sjg,829
|
|
257
257
|
letta_client/types/action_response_model.py,sha256=LcML150OvsKimVV3sP4jSFh8pVxQXn_r_ff8DADOr3c,825
|
|
258
258
|
letta_client/types/agent_environment_variable.py,sha256=z9nGJdnyrLGxYaIqdD0a73K3Euemvb87Bpe9YmvXPO8,1699
|
|
259
|
+
letta_client/types/agent_file_attachment.py,sha256=J8zj7RkNkKr2ikgY1nbtcMNiBzZchwo_fh4j6iySqF4,1807
|
|
259
260
|
letta_client/types/agent_file_schema.py,sha256=yD3QqRVWQr7T9M7OXL9KWNCP9BiR9wZC48xKnGzqsxQ,2212
|
|
260
261
|
letta_client/types/agent_state.py,sha256=_8ulaUsdrU3ONYFP8chKcKbgnkmwroQVnle99j0CxlY,6675
|
|
261
262
|
letta_client/types/agent_state_response_format.py,sha256=EsaiTnlxRPkGPBc67DCsVAARsGubFQM7Xbs12jtsvWw,378
|
|
@@ -299,7 +300,8 @@ letta_client/types/chat_completion_function_call_option_param.py,sha256=5vTg5QPq
|
|
|
299
300
|
letta_client/types/chat_completion_function_message_param.py,sha256=AD5W28jAE-7MjHQpxExIBYm4xZNZf8ElGT-GB4sVw5M,671
|
|
300
301
|
letta_client/types/chat_completion_function_tool_param.py,sha256=mfeTpehqf3lfBEpe--Cdikgu3ekhwtYFDYXYSVkYnrM,714
|
|
301
302
|
letta_client/types/chat_completion_message_custom_tool_call_param.py,sha256=osXBxq4ke0zP1clZfWFFq_Q5UskH0VoSRBRxm4a_s3I,860
|
|
302
|
-
letta_client/types/
|
|
303
|
+
letta_client/types/chat_completion_message_function_tool_call_input.py,sha256=k2o-enfLtQqaUfEoy3Nsm5d8gQ44PFWliFhVBrHbSYc,864
|
|
304
|
+
letta_client/types/chat_completion_message_function_tool_call_output.py,sha256=emTFSDAGqlPaPIL2S70T59mAruewNlfVfc7-6warCJk,710
|
|
303
305
|
letta_client/types/chat_completion_message_function_tool_call_param.py,sha256=Evo8XxMbnWtB7Ok0z5pWWqC6kxxEGueWv7T9Gp00LeQ,880
|
|
304
306
|
letta_client/types/chat_completion_named_tool_choice_custom_param.py,sha256=gIi87aveu0HVitoTIid6DnWGIu7BylZwqS4it9KxnwQ,848
|
|
305
307
|
letta_client/types/chat_completion_named_tool_choice_param.py,sha256=6pdtCulwRn0FeYbFHlklRRaFInofhzQFDALZlw9cSco,835
|
|
@@ -413,7 +415,7 @@ letta_client/types/letta_response.py,sha256=XsdGEunygyUZc-jegfZ0dqmERToeFQShvJY5
|
|
|
413
415
|
letta_client/types/letta_schemas_agent_file_agent_schema.py,sha256=VGnQ-6FJAV0c4UGs-FgKoHPjwdpt83qN10W1aiUJ1Ts,9129
|
|
414
416
|
letta_client/types/letta_schemas_agent_file_agent_schema_response_format.py,sha256=ZXsdcfac2W9-vcdCAmdCT6YX1n59LKTbdNHfWkykGm8,406
|
|
415
417
|
letta_client/types/letta_schemas_agent_file_agent_schema_tool_rules_item.py,sha256=EMSqw6V53oBiunKwK-S0_seIbipVYKy2LETvEGkMbhw,751
|
|
416
|
-
letta_client/types/letta_schemas_agent_file_message_schema.py,sha256=
|
|
418
|
+
letta_client/types/letta_schemas_agent_file_message_schema.py,sha256=1oBl0jYC-6KcOeTBSHe3625Wv_wKh46uI1QN7w2FVIQ,2870
|
|
417
419
|
letta_client/types/letta_schemas_agent_file_message_schema_content.py,sha256=7Co1Px87C9iREMZ3882Pt2NJCufjU5bOp0JgDnf46DQ,248
|
|
418
420
|
letta_client/types/letta_schemas_agent_file_tool_schema.py,sha256=FlArylR1PkQDmk50zvxX_1DN3jmANkJKfD_OHs-AvV0,2973
|
|
419
421
|
letta_client/types/letta_serialize_schemas_pydantic_agent_schema_agent_schema.py,sha256=lfuVF1g3x3MtgW6Ed1glJJ0OBRZtMuuYPDbt6sm7hFc,2230
|
|
@@ -439,7 +441,7 @@ letta_client/types/mcp_tool.py,sha256=o-0Z8eDpkKR8oUb_OhwxPWMFTwN7S0_6k-EvQgW0ik
|
|
|
439
441
|
letta_client/types/mcp_tool_execute_request.py,sha256=SVH2RlYN6Lb95-8eSB_PZYpHK_lOqeEELIZTmw7jBVY,711
|
|
440
442
|
letta_client/types/mcp_tool_health.py,sha256=ToG3Ut-9nvPkdFNL4gzu1LMv4bNmD7yFZ-bkMO16mzg,866
|
|
441
443
|
letta_client/types/memory.py,sha256=VduNAXj6OQ9lbPKcC5mLUZmnM2yUCiWLTISbvcfs93U,1257
|
|
442
|
-
letta_client/types/message.py,sha256=
|
|
444
|
+
letta_client/types/message.py,sha256=ElThmDE3A1b8akYUCsIzOSW8T1ZJ_DvdZ5u5WENZpBQ,4779
|
|
443
445
|
letta_client/types/message_content_item.py,sha256=mu9j_dp-XAHQjbE0bv5wxMH3fTRW_dCtiIzbhdCMauw,630
|
|
444
446
|
letta_client/types/message_create.py,sha256=4R5kWzAzlK-95W_sI64LESyxS6cqs-_mvaYKpMI2pE0,1602
|
|
445
447
|
letta_client/types/message_create_content.py,sha256=pKppl-N8IdqR-ShWCZ0HN-3b13plPE2vSi6YcRM3w8o,227
|
|
@@ -454,6 +456,7 @@ letta_client/types/omitted_reasoning_content.py,sha256=SW3FdgrmkcGwF4CH2cFx3KoYk
|
|
|
454
456
|
letta_client/types/openai_types_chat_chat_completion_custom_tool_param_custom.py,sha256=qpE18SGU6NorQ3lziqS4Hy51f8HNgwi1DFhPAl9yclc,879
|
|
455
457
|
letta_client/types/openai_types_chat_chat_completion_custom_tool_param_custom_format.py,sha256=tu-G3vlXYK1pYbW86y1bXdbdh_70IF7l459GYZ9RgWY,296
|
|
456
458
|
letta_client/types/openai_types_chat_chat_completion_message_custom_tool_call_param_custom.py,sha256=1J_4Po51hTHIBG5MyKwH_gHZjRLvbL7Adx4QqbvWjXE,622
|
|
459
|
+
letta_client/types/openai_types_chat_chat_completion_message_function_tool_call_function.py,sha256=xG-TDGyDxYP6aoNRm61_G6Af7EteDvqqROq4YOjU5G0,625
|
|
457
460
|
letta_client/types/openai_types_chat_chat_completion_message_function_tool_call_param_function.py,sha256=hijQEemNQENudbTqjgpgoVYH9LUTM24TEeBPodUCdEA,630
|
|
458
461
|
letta_client/types/openai_types_chat_chat_completion_named_tool_choice_custom_param_custom.py,sha256=JUyK64k43ovsIQeMtGcnmYePDMKXmPuim4LTwiiSwzc,607
|
|
459
462
|
letta_client/types/openai_types_chat_chat_completion_named_tool_choice_param_function.py,sha256=exiuqKxoYd0HqYkNhlit0rFkdehpWW-7lWXN84bqZkw,603
|
|
@@ -462,6 +465,7 @@ letta_client/types/organization.py,sha256=RV0shhT3Lg3WlJG6vcIFRGb7lpf6nWCZGBofD6
|
|
|
462
465
|
letta_client/types/organization_create.py,sha256=ixvuJTv8UDefV9fS7gHCTjr-a32eTDhjaJMay5fE4Ak,815
|
|
463
466
|
letta_client/types/organization_sources_stats.py,sha256=58f-ZnVS0rrRXJHBa4y47Oa_fymEKnNiwbc8r_L7Dmo,1166
|
|
464
467
|
letta_client/types/organization_update.py,sha256=ysej4hr_dVeCt-oYTWSZOpPuhp-GRUhMbk1LKuEz05w,815
|
|
468
|
+
letta_client/types/paginated_agent_files.py,sha256=olgJMJ_MxsIstpqMino2oIto9a15AbGqgcPMJg9HEEI,1039
|
|
465
469
|
letta_client/types/parameter_properties.py,sha256=bDVR3EsnqLvKidHyYpDYJnD66p5jueA-5_v_Ckc1apo,610
|
|
466
470
|
letta_client/types/parameters_schema.py,sha256=GVeAO7gTpvMsOWoIELwbC2M-j8k11hpsxxJz_yx36kg,749
|
|
467
471
|
letta_client/types/parent_tool_rule.py,sha256=UKTLrRUeNI8TwTmwUsvBurbpLZKsoqF-7ZIOag_OiZM,1134
|
|
@@ -560,6 +564,6 @@ letta_client/version.py,sha256=bttKLbIhO3UonCYQlqs600zzbQgfhCCMjeXR9WRzid4,79
|
|
|
560
564
|
letta_client/voice/__init__.py,sha256=_VhToAyIt_5axN6CLJwtxg3-CO7THa_23pbUzqhXJa4,85
|
|
561
565
|
letta_client/voice/client.py,sha256=EbIVOQh4HXqU9McATxwga08STk-HUwPEAUr_UHqyKHg,3748
|
|
562
566
|
letta_client/voice/raw_client.py,sha256=KvM_3GXuSf51bubM0RVBnxvlf20qZTFMnaA_BzhXzjQ,5938
|
|
563
|
-
letta_client-0.1.
|
|
564
|
-
letta_client-0.1.
|
|
565
|
-
letta_client-0.1.
|
|
567
|
+
letta_client-0.1.272.dist-info/METADATA,sha256=jdeV4CCNv3sxzOKn1QTIPP5e-BuhiDacEnrQH5Q4jU4,5781
|
|
568
|
+
letta_client-0.1.272.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88
|
|
569
|
+
letta_client-0.1.272.dist-info/RECORD,,
|
|
File without changes
|