letta-client 0.1.217__py3-none-any.whl → 0.1.219__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of letta-client might be problematic. Click here for more details.

@@ -16,7 +16,20 @@ from .types import (
16
16
  UpdateAgentResponseFormat,
17
17
  UpdateAgentToolRulesItem,
18
18
  )
19
- from . import blocks, context, core_memory, groups, memory_variables, messages, passages, sources, templates, tools
19
+ from . import (
20
+ blocks,
21
+ context,
22
+ core_memory,
23
+ files,
24
+ folders,
25
+ groups,
26
+ memory_variables,
27
+ messages,
28
+ passages,
29
+ sources,
30
+ templates,
31
+ tools,
32
+ )
20
33
  from .memory_variables import MemoryVariablesListResponse
21
34
  from .messages import (
22
35
  LettaStreamingResponse,
@@ -52,6 +65,8 @@ __all__ = [
52
65
  "blocks",
53
66
  "context",
54
67
  "core_memory",
68
+ "files",
69
+ "folders",
55
70
  "groups",
56
71
  "memory_variables",
57
72
  "messages",
@@ -5,6 +5,8 @@ from ..core.client_wrapper import SyncClientWrapper
5
5
  from .context.client import ContextClient
6
6
  from .tools.client import ToolsClient
7
7
  from .sources.client import SourcesClient
8
+ from .folders.client import FoldersClient
9
+ from .files.client import FilesClient
8
10
  from .core_memory.client import CoreMemoryClient
9
11
  from .blocks.client import BlocksClient
10
12
  from .passages.client import PassagesClient
@@ -39,6 +41,8 @@ from ..core.client_wrapper import AsyncClientWrapper
39
41
  from .context.client import AsyncContextClient
40
42
  from .tools.client import AsyncToolsClient
41
43
  from .sources.client import AsyncSourcesClient
44
+ from .folders.client import AsyncFoldersClient
45
+ from .files.client import AsyncFilesClient
42
46
  from .core_memory.client import AsyncCoreMemoryClient
43
47
  from .blocks.client import AsyncBlocksClient
44
48
  from .passages.client import AsyncPassagesClient
@@ -57,6 +61,8 @@ class AgentsClient:
57
61
  self.context = ContextClient(client_wrapper=self._client_wrapper)
58
62
  self.tools = ToolsClient(client_wrapper=self._client_wrapper)
59
63
  self.sources = SourcesClient(client_wrapper=self._client_wrapper)
64
+ self.folders = FoldersClient(client_wrapper=self._client_wrapper)
65
+ self.files = FilesClient(client_wrapper=self._client_wrapper)
60
66
  self.core_memory = CoreMemoryClient(client_wrapper=self._client_wrapper)
61
67
  self.blocks = BlocksClient(client_wrapper=self._client_wrapper)
62
68
  self.passages = PassagesClient(client_wrapper=self._client_wrapper)
@@ -1019,199 +1025,6 @@ class AgentsClient:
1019
1025
  raise ApiError(status_code=_response.status_code, body=_response.text)
1020
1026
  raise ApiError(status_code=_response.status_code, body=_response_json)
1021
1027
 
1022
- def close_all_open_files(
1023
- self, agent_id: str, *, request_options: typing.Optional[RequestOptions] = None
1024
- ) -> typing.List[str]:
1025
- """
1026
- Closes all currently open files for a given agent.
1027
-
1028
- This endpoint updates the file state for the agent so that no files are marked as open.
1029
- Typically used to reset the working memory view for the agent.
1030
-
1031
- Parameters
1032
- ----------
1033
- agent_id : str
1034
-
1035
- request_options : typing.Optional[RequestOptions]
1036
- Request-specific configuration.
1037
-
1038
- Returns
1039
- -------
1040
- typing.List[str]
1041
- Successful Response
1042
-
1043
- Examples
1044
- --------
1045
- from letta_client import Letta
1046
-
1047
- client = Letta(
1048
- project="YOUR_PROJECT",
1049
- token="YOUR_TOKEN",
1050
- )
1051
- client.agents.close_all_open_files(
1052
- agent_id="agent_id",
1053
- )
1054
- """
1055
- _response = self._client_wrapper.httpx_client.request(
1056
- f"v1/agents/{jsonable_encoder(agent_id)}/files/close-all",
1057
- method="PATCH",
1058
- request_options=request_options,
1059
- )
1060
- try:
1061
- if 200 <= _response.status_code < 300:
1062
- return typing.cast(
1063
- typing.List[str],
1064
- construct_type(
1065
- type_=typing.List[str], # type: ignore
1066
- object_=_response.json(),
1067
- ),
1068
- )
1069
- if _response.status_code == 422:
1070
- raise UnprocessableEntityError(
1071
- typing.cast(
1072
- HttpValidationError,
1073
- construct_type(
1074
- type_=HttpValidationError, # type: ignore
1075
- object_=_response.json(),
1076
- ),
1077
- )
1078
- )
1079
- _response_json = _response.json()
1080
- except JSONDecodeError:
1081
- raise ApiError(status_code=_response.status_code, body=_response.text)
1082
- raise ApiError(status_code=_response.status_code, body=_response_json)
1083
-
1084
- def open_file(
1085
- self, agent_id: str, file_id: str, *, request_options: typing.Optional[RequestOptions] = None
1086
- ) -> typing.List[str]:
1087
- """
1088
- Opens a specific file for a given agent.
1089
-
1090
- This endpoint marks a specific file as open in the agent's file state.
1091
- The file will be included in the agent's working memory view.
1092
- Returns a list of file names that were closed due to LRU eviction.
1093
-
1094
- Parameters
1095
- ----------
1096
- agent_id : str
1097
-
1098
- file_id : str
1099
-
1100
- request_options : typing.Optional[RequestOptions]
1101
- Request-specific configuration.
1102
-
1103
- Returns
1104
- -------
1105
- typing.List[str]
1106
- Successful Response
1107
-
1108
- Examples
1109
- --------
1110
- from letta_client import Letta
1111
-
1112
- client = Letta(
1113
- project="YOUR_PROJECT",
1114
- token="YOUR_TOKEN",
1115
- )
1116
- client.agents.open_file(
1117
- agent_id="agent_id",
1118
- file_id="file_id",
1119
- )
1120
- """
1121
- _response = self._client_wrapper.httpx_client.request(
1122
- f"v1/agents/{jsonable_encoder(agent_id)}/files/{jsonable_encoder(file_id)}/open",
1123
- method="PATCH",
1124
- request_options=request_options,
1125
- )
1126
- try:
1127
- if 200 <= _response.status_code < 300:
1128
- return typing.cast(
1129
- typing.List[str],
1130
- construct_type(
1131
- type_=typing.List[str], # type: ignore
1132
- object_=_response.json(),
1133
- ),
1134
- )
1135
- if _response.status_code == 422:
1136
- raise UnprocessableEntityError(
1137
- typing.cast(
1138
- HttpValidationError,
1139
- construct_type(
1140
- type_=HttpValidationError, # type: ignore
1141
- object_=_response.json(),
1142
- ),
1143
- )
1144
- )
1145
- _response_json = _response.json()
1146
- except JSONDecodeError:
1147
- raise ApiError(status_code=_response.status_code, body=_response.text)
1148
- raise ApiError(status_code=_response.status_code, body=_response_json)
1149
-
1150
- def close_file(
1151
- self, agent_id: str, file_id: str, *, request_options: typing.Optional[RequestOptions] = None
1152
- ) -> typing.Optional[typing.Any]:
1153
- """
1154
- Closes a specific file for a given agent.
1155
-
1156
- This endpoint marks a specific file as closed in the agent's file state.
1157
- The file will be removed from the agent's working memory view.
1158
-
1159
- Parameters
1160
- ----------
1161
- agent_id : str
1162
-
1163
- file_id : str
1164
-
1165
- request_options : typing.Optional[RequestOptions]
1166
- Request-specific configuration.
1167
-
1168
- Returns
1169
- -------
1170
- typing.Optional[typing.Any]
1171
- Successful Response
1172
-
1173
- Examples
1174
- --------
1175
- from letta_client import Letta
1176
-
1177
- client = Letta(
1178
- project="YOUR_PROJECT",
1179
- token="YOUR_TOKEN",
1180
- )
1181
- client.agents.close_file(
1182
- agent_id="agent_id",
1183
- file_id="file_id",
1184
- )
1185
- """
1186
- _response = self._client_wrapper.httpx_client.request(
1187
- f"v1/agents/{jsonable_encoder(agent_id)}/files/{jsonable_encoder(file_id)}/close",
1188
- method="PATCH",
1189
- request_options=request_options,
1190
- )
1191
- try:
1192
- if 200 <= _response.status_code < 300:
1193
- return typing.cast(
1194
- typing.Optional[typing.Any],
1195
- construct_type(
1196
- type_=typing.Optional[typing.Any], # type: ignore
1197
- object_=_response.json(),
1198
- ),
1199
- )
1200
- if _response.status_code == 422:
1201
- raise UnprocessableEntityError(
1202
- typing.cast(
1203
- HttpValidationError,
1204
- construct_type(
1205
- type_=HttpValidationError, # type: ignore
1206
- object_=_response.json(),
1207
- ),
1208
- )
1209
- )
1210
- _response_json = _response.json()
1211
- except JSONDecodeError:
1212
- raise ApiError(status_code=_response.status_code, body=_response.text)
1213
- raise ApiError(status_code=_response.status_code, body=_response_json)
1214
-
1215
1028
  def summarize_agent_conversation(
1216
1029
  self, agent_id: str, *, max_message_length: int, request_options: typing.Optional[RequestOptions] = None
1217
1030
  ) -> AgentState:
@@ -1373,6 +1186,8 @@ class AsyncAgentsClient:
1373
1186
  self.context = AsyncContextClient(client_wrapper=self._client_wrapper)
1374
1187
  self.tools = AsyncToolsClient(client_wrapper=self._client_wrapper)
1375
1188
  self.sources = AsyncSourcesClient(client_wrapper=self._client_wrapper)
1189
+ self.folders = AsyncFoldersClient(client_wrapper=self._client_wrapper)
1190
+ self.files = AsyncFilesClient(client_wrapper=self._client_wrapper)
1376
1191
  self.core_memory = AsyncCoreMemoryClient(client_wrapper=self._client_wrapper)
1377
1192
  self.blocks = AsyncBlocksClient(client_wrapper=self._client_wrapper)
1378
1193
  self.passages = AsyncPassagesClient(client_wrapper=self._client_wrapper)
@@ -2399,223 +2214,6 @@ class AsyncAgentsClient:
2399
2214
  raise ApiError(status_code=_response.status_code, body=_response.text)
2400
2215
  raise ApiError(status_code=_response.status_code, body=_response_json)
2401
2216
 
2402
- async def close_all_open_files(
2403
- self, agent_id: str, *, request_options: typing.Optional[RequestOptions] = None
2404
- ) -> typing.List[str]:
2405
- """
2406
- Closes all currently open files for a given agent.
2407
-
2408
- This endpoint updates the file state for the agent so that no files are marked as open.
2409
- Typically used to reset the working memory view for the agent.
2410
-
2411
- Parameters
2412
- ----------
2413
- agent_id : str
2414
-
2415
- request_options : typing.Optional[RequestOptions]
2416
- Request-specific configuration.
2417
-
2418
- Returns
2419
- -------
2420
- typing.List[str]
2421
- Successful Response
2422
-
2423
- Examples
2424
- --------
2425
- import asyncio
2426
-
2427
- from letta_client import AsyncLetta
2428
-
2429
- client = AsyncLetta(
2430
- project="YOUR_PROJECT",
2431
- token="YOUR_TOKEN",
2432
- )
2433
-
2434
-
2435
- async def main() -> None:
2436
- await client.agents.close_all_open_files(
2437
- agent_id="agent_id",
2438
- )
2439
-
2440
-
2441
- asyncio.run(main())
2442
- """
2443
- _response = await self._client_wrapper.httpx_client.request(
2444
- f"v1/agents/{jsonable_encoder(agent_id)}/files/close-all",
2445
- method="PATCH",
2446
- request_options=request_options,
2447
- )
2448
- try:
2449
- if 200 <= _response.status_code < 300:
2450
- return typing.cast(
2451
- typing.List[str],
2452
- construct_type(
2453
- type_=typing.List[str], # type: ignore
2454
- object_=_response.json(),
2455
- ),
2456
- )
2457
- if _response.status_code == 422:
2458
- raise UnprocessableEntityError(
2459
- typing.cast(
2460
- HttpValidationError,
2461
- construct_type(
2462
- type_=HttpValidationError, # type: ignore
2463
- object_=_response.json(),
2464
- ),
2465
- )
2466
- )
2467
- _response_json = _response.json()
2468
- except JSONDecodeError:
2469
- raise ApiError(status_code=_response.status_code, body=_response.text)
2470
- raise ApiError(status_code=_response.status_code, body=_response_json)
2471
-
2472
- async def open_file(
2473
- self, agent_id: str, file_id: str, *, request_options: typing.Optional[RequestOptions] = None
2474
- ) -> typing.List[str]:
2475
- """
2476
- Opens a specific file for a given agent.
2477
-
2478
- This endpoint marks a specific file as open in the agent's file state.
2479
- The file will be included in the agent's working memory view.
2480
- Returns a list of file names that were closed due to LRU eviction.
2481
-
2482
- Parameters
2483
- ----------
2484
- agent_id : str
2485
-
2486
- file_id : str
2487
-
2488
- request_options : typing.Optional[RequestOptions]
2489
- Request-specific configuration.
2490
-
2491
- Returns
2492
- -------
2493
- typing.List[str]
2494
- Successful Response
2495
-
2496
- Examples
2497
- --------
2498
- import asyncio
2499
-
2500
- from letta_client import AsyncLetta
2501
-
2502
- client = AsyncLetta(
2503
- project="YOUR_PROJECT",
2504
- token="YOUR_TOKEN",
2505
- )
2506
-
2507
-
2508
- async def main() -> None:
2509
- await client.agents.open_file(
2510
- agent_id="agent_id",
2511
- file_id="file_id",
2512
- )
2513
-
2514
-
2515
- asyncio.run(main())
2516
- """
2517
- _response = await self._client_wrapper.httpx_client.request(
2518
- f"v1/agents/{jsonable_encoder(agent_id)}/files/{jsonable_encoder(file_id)}/open",
2519
- method="PATCH",
2520
- request_options=request_options,
2521
- )
2522
- try:
2523
- if 200 <= _response.status_code < 300:
2524
- return typing.cast(
2525
- typing.List[str],
2526
- construct_type(
2527
- type_=typing.List[str], # type: ignore
2528
- object_=_response.json(),
2529
- ),
2530
- )
2531
- if _response.status_code == 422:
2532
- raise UnprocessableEntityError(
2533
- typing.cast(
2534
- HttpValidationError,
2535
- construct_type(
2536
- type_=HttpValidationError, # type: ignore
2537
- object_=_response.json(),
2538
- ),
2539
- )
2540
- )
2541
- _response_json = _response.json()
2542
- except JSONDecodeError:
2543
- raise ApiError(status_code=_response.status_code, body=_response.text)
2544
- raise ApiError(status_code=_response.status_code, body=_response_json)
2545
-
2546
- async def close_file(
2547
- self, agent_id: str, file_id: str, *, request_options: typing.Optional[RequestOptions] = None
2548
- ) -> typing.Optional[typing.Any]:
2549
- """
2550
- Closes a specific file for a given agent.
2551
-
2552
- This endpoint marks a specific file as closed in the agent's file state.
2553
- The file will be removed from the agent's working memory view.
2554
-
2555
- Parameters
2556
- ----------
2557
- agent_id : str
2558
-
2559
- file_id : str
2560
-
2561
- request_options : typing.Optional[RequestOptions]
2562
- Request-specific configuration.
2563
-
2564
- Returns
2565
- -------
2566
- typing.Optional[typing.Any]
2567
- Successful Response
2568
-
2569
- Examples
2570
- --------
2571
- import asyncio
2572
-
2573
- from letta_client import AsyncLetta
2574
-
2575
- client = AsyncLetta(
2576
- project="YOUR_PROJECT",
2577
- token="YOUR_TOKEN",
2578
- )
2579
-
2580
-
2581
- async def main() -> None:
2582
- await client.agents.close_file(
2583
- agent_id="agent_id",
2584
- file_id="file_id",
2585
- )
2586
-
2587
-
2588
- asyncio.run(main())
2589
- """
2590
- _response = await self._client_wrapper.httpx_client.request(
2591
- f"v1/agents/{jsonable_encoder(agent_id)}/files/{jsonable_encoder(file_id)}/close",
2592
- method="PATCH",
2593
- request_options=request_options,
2594
- )
2595
- try:
2596
- if 200 <= _response.status_code < 300:
2597
- return typing.cast(
2598
- typing.Optional[typing.Any],
2599
- construct_type(
2600
- type_=typing.Optional[typing.Any], # type: ignore
2601
- object_=_response.json(),
2602
- ),
2603
- )
2604
- if _response.status_code == 422:
2605
- raise UnprocessableEntityError(
2606
- typing.cast(
2607
- HttpValidationError,
2608
- construct_type(
2609
- type_=HttpValidationError, # type: ignore
2610
- object_=_response.json(),
2611
- ),
2612
- )
2613
- )
2614
- _response_json = _response.json()
2615
- except JSONDecodeError:
2616
- raise ApiError(status_code=_response.status_code, body=_response.text)
2617
- raise ApiError(status_code=_response.status_code, body=_response_json)
2618
-
2619
2217
  async def summarize_agent_conversation(
2620
2218
  self, agent_id: str, *, max_message_length: int, request_options: typing.Optional[RequestOptions] = None
2621
2219
  ) -> AgentState:
@@ -0,0 +1,2 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+