letta-client 0.1.276__py3-none-any.whl → 0.1.279__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of letta-client might be problematic. Click here for more details.
- letta_client/__init__.py +2 -0
- letta_client/core/client_wrapper.py +2 -2
- letta_client/runs/__init__.py +2 -1
- letta_client/runs/client.py +157 -4
- letta_client/runs/raw_client.py +217 -0
- letta_client/runs/types/__init__.py +7 -0
- letta_client/runs/types/letta_streaming_response.py +25 -0
- letta_client/sources/files/client.py +20 -2
- letta_client/sources/files/raw_client.py +10 -0
- letta_client/templates/types/templates_get_template_snapshot_response_agents_item_properties.py +1 -0
- {letta_client-0.1.276.dist-info → letta_client-0.1.279.dist-info}/METADATA +2 -2
- {letta_client-0.1.276.dist-info → letta_client-0.1.279.dist-info}/RECORD +13 -11
- {letta_client-0.1.276.dist-info → letta_client-0.1.279.dist-info}/WHEEL +0 -0
letta_client/__init__.py
CHANGED
|
@@ -370,6 +370,7 @@ from .client_side_access_tokens import (
|
|
|
370
370
|
from .environment import LettaEnvironment
|
|
371
371
|
from .groups import GroupCreateManagerConfig, GroupUpdateManagerConfig
|
|
372
372
|
from .projects import ProjectsListResponse, ProjectsListResponseProjectsItem
|
|
373
|
+
from .runs import LettaStreamingResponse
|
|
373
374
|
from .steps import StepsListRequestFeedback
|
|
374
375
|
from .templates import (
|
|
375
376
|
TemplatesCreateTemplateResponse,
|
|
@@ -623,6 +624,7 @@ __all__ = [
|
|
|
623
624
|
"LettaSerializeSchemasPydanticAgentSchemaToolSchema",
|
|
624
625
|
"LettaStopReason",
|
|
625
626
|
"LettaStreamingRequest",
|
|
627
|
+
"LettaStreamingResponse",
|
|
626
628
|
"LettaUsageStatistics",
|
|
627
629
|
"LettaUserMessageContentUnion",
|
|
628
630
|
"ListMcpServersResponseValue",
|
|
@@ -24,10 +24,10 @@ class BaseClientWrapper:
|
|
|
24
24
|
|
|
25
25
|
def get_headers(self) -> typing.Dict[str, str]:
|
|
26
26
|
headers: typing.Dict[str, str] = {
|
|
27
|
-
"User-Agent": "letta-client/0.1.
|
|
27
|
+
"User-Agent": "letta-client/0.1.279",
|
|
28
28
|
"X-Fern-Language": "Python",
|
|
29
29
|
"X-Fern-SDK-Name": "letta-client",
|
|
30
|
-
"X-Fern-SDK-Version": "0.1.
|
|
30
|
+
"X-Fern-SDK-Version": "0.1.279",
|
|
31
31
|
**(self.get_custom_headers() or {}),
|
|
32
32
|
}
|
|
33
33
|
if self._project is not None:
|
letta_client/runs/__init__.py
CHANGED
letta_client/runs/client.py
CHANGED
|
@@ -8,8 +8,12 @@ from ..types.run import Run
|
|
|
8
8
|
from .messages.client import AsyncMessagesClient, MessagesClient
|
|
9
9
|
from .raw_client import AsyncRawRunsClient, RawRunsClient
|
|
10
10
|
from .steps.client import AsyncStepsClient, StepsClient
|
|
11
|
+
from .types.letta_streaming_response import LettaStreamingResponse
|
|
11
12
|
from .usage.client import AsyncUsageClient, UsageClient
|
|
12
13
|
|
|
14
|
+
# this is used as the default value for optional parameters
|
|
15
|
+
OMIT = typing.cast(typing.Any, ...)
|
|
16
|
+
|
|
13
17
|
|
|
14
18
|
class RunsClient:
|
|
15
19
|
def __init__(self, *, client_wrapper: SyncClientWrapper):
|
|
@@ -35,6 +39,7 @@ class RunsClient:
|
|
|
35
39
|
self,
|
|
36
40
|
*,
|
|
37
41
|
agent_ids: typing.Optional[typing.Union[str, typing.Sequence[str]]] = None,
|
|
42
|
+
background: typing.Optional[bool] = None,
|
|
38
43
|
request_options: typing.Optional[RequestOptions] = None,
|
|
39
44
|
) -> typing.List[Run]:
|
|
40
45
|
"""
|
|
@@ -45,6 +50,9 @@ class RunsClient:
|
|
|
45
50
|
agent_ids : typing.Optional[typing.Union[str, typing.Sequence[str]]]
|
|
46
51
|
The unique identifier of the agent associated with the run.
|
|
47
52
|
|
|
53
|
+
background : typing.Optional[bool]
|
|
54
|
+
If True, filters for runs that were created in background mode.
|
|
55
|
+
|
|
48
56
|
request_options : typing.Optional[RequestOptions]
|
|
49
57
|
Request-specific configuration.
|
|
50
58
|
|
|
@@ -63,13 +71,14 @@ class RunsClient:
|
|
|
63
71
|
)
|
|
64
72
|
client.runs.list()
|
|
65
73
|
"""
|
|
66
|
-
_response = self._raw_client.list(agent_ids=agent_ids, request_options=request_options)
|
|
74
|
+
_response = self._raw_client.list(agent_ids=agent_ids, background=background, request_options=request_options)
|
|
67
75
|
return _response.data
|
|
68
76
|
|
|
69
77
|
def list_active(
|
|
70
78
|
self,
|
|
71
79
|
*,
|
|
72
80
|
agent_ids: typing.Optional[typing.Union[str, typing.Sequence[str]]] = None,
|
|
81
|
+
background: typing.Optional[bool] = None,
|
|
73
82
|
request_options: typing.Optional[RequestOptions] = None,
|
|
74
83
|
) -> typing.List[Run]:
|
|
75
84
|
"""
|
|
@@ -80,6 +89,9 @@ class RunsClient:
|
|
|
80
89
|
agent_ids : typing.Optional[typing.Union[str, typing.Sequence[str]]]
|
|
81
90
|
The unique identifier of the agent associated with the run.
|
|
82
91
|
|
|
92
|
+
background : typing.Optional[bool]
|
|
93
|
+
If True, filters for runs that were created in background mode.
|
|
94
|
+
|
|
83
95
|
request_options : typing.Optional[RequestOptions]
|
|
84
96
|
Request-specific configuration.
|
|
85
97
|
|
|
@@ -98,7 +110,9 @@ class RunsClient:
|
|
|
98
110
|
)
|
|
99
111
|
client.runs.list_active()
|
|
100
112
|
"""
|
|
101
|
-
_response = self._raw_client.list_active(
|
|
113
|
+
_response = self._raw_client.list_active(
|
|
114
|
+
agent_ids=agent_ids, background=background, request_options=request_options
|
|
115
|
+
)
|
|
102
116
|
return _response.data
|
|
103
117
|
|
|
104
118
|
def retrieve(self, run_id: str, *, request_options: typing.Optional[RequestOptions] = None) -> Run:
|
|
@@ -163,6 +177,65 @@ class RunsClient:
|
|
|
163
177
|
_response = self._raw_client.delete(run_id, request_options=request_options)
|
|
164
178
|
return _response.data
|
|
165
179
|
|
|
180
|
+
def stream(
|
|
181
|
+
self,
|
|
182
|
+
run_id: str,
|
|
183
|
+
*,
|
|
184
|
+
starting_after: typing.Optional[int] = OMIT,
|
|
185
|
+
include_pings: typing.Optional[bool] = OMIT,
|
|
186
|
+
poll_interval: typing.Optional[float] = OMIT,
|
|
187
|
+
batch_size: typing.Optional[int] = OMIT,
|
|
188
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
189
|
+
) -> typing.Iterator[LettaStreamingResponse]:
|
|
190
|
+
"""
|
|
191
|
+
Parameters
|
|
192
|
+
----------
|
|
193
|
+
run_id : str
|
|
194
|
+
|
|
195
|
+
starting_after : typing.Optional[int]
|
|
196
|
+
Sequence id to use as a cursor for pagination. Response will start streaming after this chunk sequence id
|
|
197
|
+
|
|
198
|
+
include_pings : typing.Optional[bool]
|
|
199
|
+
Whether to include periodic keepalive ping messages in the stream to prevent connection timeouts.
|
|
200
|
+
|
|
201
|
+
poll_interval : typing.Optional[float]
|
|
202
|
+
Seconds to wait between polls when no new data.
|
|
203
|
+
|
|
204
|
+
batch_size : typing.Optional[int]
|
|
205
|
+
Number of entries to read per batch.
|
|
206
|
+
|
|
207
|
+
request_options : typing.Optional[RequestOptions]
|
|
208
|
+
Request-specific configuration.
|
|
209
|
+
|
|
210
|
+
Yields
|
|
211
|
+
------
|
|
212
|
+
typing.Iterator[LettaStreamingResponse]
|
|
213
|
+
Successful response
|
|
214
|
+
|
|
215
|
+
Examples
|
|
216
|
+
--------
|
|
217
|
+
from letta_client import Letta
|
|
218
|
+
|
|
219
|
+
client = Letta(
|
|
220
|
+
project="YOUR_PROJECT",
|
|
221
|
+
token="YOUR_TOKEN",
|
|
222
|
+
)
|
|
223
|
+
response = client.runs.stream(
|
|
224
|
+
run_id="run_id",
|
|
225
|
+
)
|
|
226
|
+
for chunk in response:
|
|
227
|
+
yield chunk
|
|
228
|
+
"""
|
|
229
|
+
with self._raw_client.stream(
|
|
230
|
+
run_id,
|
|
231
|
+
starting_after=starting_after,
|
|
232
|
+
include_pings=include_pings,
|
|
233
|
+
poll_interval=poll_interval,
|
|
234
|
+
batch_size=batch_size,
|
|
235
|
+
request_options=request_options,
|
|
236
|
+
) as r:
|
|
237
|
+
yield from r.data
|
|
238
|
+
|
|
166
239
|
|
|
167
240
|
class AsyncRunsClient:
|
|
168
241
|
def __init__(self, *, client_wrapper: AsyncClientWrapper):
|
|
@@ -188,6 +261,7 @@ class AsyncRunsClient:
|
|
|
188
261
|
self,
|
|
189
262
|
*,
|
|
190
263
|
agent_ids: typing.Optional[typing.Union[str, typing.Sequence[str]]] = None,
|
|
264
|
+
background: typing.Optional[bool] = None,
|
|
191
265
|
request_options: typing.Optional[RequestOptions] = None,
|
|
192
266
|
) -> typing.List[Run]:
|
|
193
267
|
"""
|
|
@@ -198,6 +272,9 @@ class AsyncRunsClient:
|
|
|
198
272
|
agent_ids : typing.Optional[typing.Union[str, typing.Sequence[str]]]
|
|
199
273
|
The unique identifier of the agent associated with the run.
|
|
200
274
|
|
|
275
|
+
background : typing.Optional[bool]
|
|
276
|
+
If True, filters for runs that were created in background mode.
|
|
277
|
+
|
|
201
278
|
request_options : typing.Optional[RequestOptions]
|
|
202
279
|
Request-specific configuration.
|
|
203
280
|
|
|
@@ -224,13 +301,16 @@ class AsyncRunsClient:
|
|
|
224
301
|
|
|
225
302
|
asyncio.run(main())
|
|
226
303
|
"""
|
|
227
|
-
_response = await self._raw_client.list(
|
|
304
|
+
_response = await self._raw_client.list(
|
|
305
|
+
agent_ids=agent_ids, background=background, request_options=request_options
|
|
306
|
+
)
|
|
228
307
|
return _response.data
|
|
229
308
|
|
|
230
309
|
async def list_active(
|
|
231
310
|
self,
|
|
232
311
|
*,
|
|
233
312
|
agent_ids: typing.Optional[typing.Union[str, typing.Sequence[str]]] = None,
|
|
313
|
+
background: typing.Optional[bool] = None,
|
|
234
314
|
request_options: typing.Optional[RequestOptions] = None,
|
|
235
315
|
) -> typing.List[Run]:
|
|
236
316
|
"""
|
|
@@ -241,6 +321,9 @@ class AsyncRunsClient:
|
|
|
241
321
|
agent_ids : typing.Optional[typing.Union[str, typing.Sequence[str]]]
|
|
242
322
|
The unique identifier of the agent associated with the run.
|
|
243
323
|
|
|
324
|
+
background : typing.Optional[bool]
|
|
325
|
+
If True, filters for runs that were created in background mode.
|
|
326
|
+
|
|
244
327
|
request_options : typing.Optional[RequestOptions]
|
|
245
328
|
Request-specific configuration.
|
|
246
329
|
|
|
@@ -267,7 +350,9 @@ class AsyncRunsClient:
|
|
|
267
350
|
|
|
268
351
|
asyncio.run(main())
|
|
269
352
|
"""
|
|
270
|
-
_response = await self._raw_client.list_active(
|
|
353
|
+
_response = await self._raw_client.list_active(
|
|
354
|
+
agent_ids=agent_ids, background=background, request_options=request_options
|
|
355
|
+
)
|
|
271
356
|
return _response.data
|
|
272
357
|
|
|
273
358
|
async def retrieve(self, run_id: str, *, request_options: typing.Optional[RequestOptions] = None) -> Run:
|
|
@@ -347,3 +432,71 @@ class AsyncRunsClient:
|
|
|
347
432
|
"""
|
|
348
433
|
_response = await self._raw_client.delete(run_id, request_options=request_options)
|
|
349
434
|
return _response.data
|
|
435
|
+
|
|
436
|
+
async def stream(
|
|
437
|
+
self,
|
|
438
|
+
run_id: str,
|
|
439
|
+
*,
|
|
440
|
+
starting_after: typing.Optional[int] = OMIT,
|
|
441
|
+
include_pings: typing.Optional[bool] = OMIT,
|
|
442
|
+
poll_interval: typing.Optional[float] = OMIT,
|
|
443
|
+
batch_size: typing.Optional[int] = OMIT,
|
|
444
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
445
|
+
) -> typing.AsyncIterator[LettaStreamingResponse]:
|
|
446
|
+
"""
|
|
447
|
+
Parameters
|
|
448
|
+
----------
|
|
449
|
+
run_id : str
|
|
450
|
+
|
|
451
|
+
starting_after : typing.Optional[int]
|
|
452
|
+
Sequence id to use as a cursor for pagination. Response will start streaming after this chunk sequence id
|
|
453
|
+
|
|
454
|
+
include_pings : typing.Optional[bool]
|
|
455
|
+
Whether to include periodic keepalive ping messages in the stream to prevent connection timeouts.
|
|
456
|
+
|
|
457
|
+
poll_interval : typing.Optional[float]
|
|
458
|
+
Seconds to wait between polls when no new data.
|
|
459
|
+
|
|
460
|
+
batch_size : typing.Optional[int]
|
|
461
|
+
Number of entries to read per batch.
|
|
462
|
+
|
|
463
|
+
request_options : typing.Optional[RequestOptions]
|
|
464
|
+
Request-specific configuration.
|
|
465
|
+
|
|
466
|
+
Yields
|
|
467
|
+
------
|
|
468
|
+
typing.AsyncIterator[LettaStreamingResponse]
|
|
469
|
+
Successful response
|
|
470
|
+
|
|
471
|
+
Examples
|
|
472
|
+
--------
|
|
473
|
+
import asyncio
|
|
474
|
+
|
|
475
|
+
from letta_client import AsyncLetta
|
|
476
|
+
|
|
477
|
+
client = AsyncLetta(
|
|
478
|
+
project="YOUR_PROJECT",
|
|
479
|
+
token="YOUR_TOKEN",
|
|
480
|
+
)
|
|
481
|
+
|
|
482
|
+
|
|
483
|
+
async def main() -> None:
|
|
484
|
+
response = await client.runs.stream(
|
|
485
|
+
run_id="run_id",
|
|
486
|
+
)
|
|
487
|
+
async for chunk in response:
|
|
488
|
+
yield chunk
|
|
489
|
+
|
|
490
|
+
|
|
491
|
+
asyncio.run(main())
|
|
492
|
+
"""
|
|
493
|
+
async with self._raw_client.stream(
|
|
494
|
+
run_id,
|
|
495
|
+
starting_after=starting_after,
|
|
496
|
+
include_pings=include_pings,
|
|
497
|
+
poll_interval=poll_interval,
|
|
498
|
+
batch_size=batch_size,
|
|
499
|
+
request_options=request_options,
|
|
500
|
+
) as r:
|
|
501
|
+
async for _chunk in r.data:
|
|
502
|
+
yield _chunk
|
letta_client/runs/raw_client.py
CHANGED
|
@@ -1,8 +1,11 @@
|
|
|
1
1
|
# This file was auto-generated by Fern from our API Definition.
|
|
2
2
|
|
|
3
|
+
import contextlib
|
|
4
|
+
import json
|
|
3
5
|
import typing
|
|
4
6
|
from json.decoder import JSONDecodeError
|
|
5
7
|
|
|
8
|
+
import httpx_sse
|
|
6
9
|
from ..core.api_error import ApiError
|
|
7
10
|
from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper
|
|
8
11
|
from ..core.http_response import AsyncHttpResponse, HttpResponse
|
|
@@ -12,6 +15,10 @@ from ..core.unchecked_base_model import construct_type
|
|
|
12
15
|
from ..errors.unprocessable_entity_error import UnprocessableEntityError
|
|
13
16
|
from ..types.http_validation_error import HttpValidationError
|
|
14
17
|
from ..types.run import Run
|
|
18
|
+
from .types.letta_streaming_response import LettaStreamingResponse
|
|
19
|
+
|
|
20
|
+
# this is used as the default value for optional parameters
|
|
21
|
+
OMIT = typing.cast(typing.Any, ...)
|
|
15
22
|
|
|
16
23
|
|
|
17
24
|
class RawRunsClient:
|
|
@@ -22,6 +29,7 @@ class RawRunsClient:
|
|
|
22
29
|
self,
|
|
23
30
|
*,
|
|
24
31
|
agent_ids: typing.Optional[typing.Union[str, typing.Sequence[str]]] = None,
|
|
32
|
+
background: typing.Optional[bool] = None,
|
|
25
33
|
request_options: typing.Optional[RequestOptions] = None,
|
|
26
34
|
) -> HttpResponse[typing.List[Run]]:
|
|
27
35
|
"""
|
|
@@ -32,6 +40,9 @@ class RawRunsClient:
|
|
|
32
40
|
agent_ids : typing.Optional[typing.Union[str, typing.Sequence[str]]]
|
|
33
41
|
The unique identifier of the agent associated with the run.
|
|
34
42
|
|
|
43
|
+
background : typing.Optional[bool]
|
|
44
|
+
If True, filters for runs that were created in background mode.
|
|
45
|
+
|
|
35
46
|
request_options : typing.Optional[RequestOptions]
|
|
36
47
|
Request-specific configuration.
|
|
37
48
|
|
|
@@ -45,6 +56,7 @@ class RawRunsClient:
|
|
|
45
56
|
method="GET",
|
|
46
57
|
params={
|
|
47
58
|
"agent_ids": agent_ids,
|
|
59
|
+
"background": background,
|
|
48
60
|
},
|
|
49
61
|
request_options=request_options,
|
|
50
62
|
)
|
|
@@ -78,6 +90,7 @@ class RawRunsClient:
|
|
|
78
90
|
self,
|
|
79
91
|
*,
|
|
80
92
|
agent_ids: typing.Optional[typing.Union[str, typing.Sequence[str]]] = None,
|
|
93
|
+
background: typing.Optional[bool] = None,
|
|
81
94
|
request_options: typing.Optional[RequestOptions] = None,
|
|
82
95
|
) -> HttpResponse[typing.List[Run]]:
|
|
83
96
|
"""
|
|
@@ -88,6 +101,9 @@ class RawRunsClient:
|
|
|
88
101
|
agent_ids : typing.Optional[typing.Union[str, typing.Sequence[str]]]
|
|
89
102
|
The unique identifier of the agent associated with the run.
|
|
90
103
|
|
|
104
|
+
background : typing.Optional[bool]
|
|
105
|
+
If True, filters for runs that were created in background mode.
|
|
106
|
+
|
|
91
107
|
request_options : typing.Optional[RequestOptions]
|
|
92
108
|
Request-specific configuration.
|
|
93
109
|
|
|
@@ -101,6 +117,7 @@ class RawRunsClient:
|
|
|
101
117
|
method="GET",
|
|
102
118
|
params={
|
|
103
119
|
"agent_ids": agent_ids,
|
|
120
|
+
"background": background,
|
|
104
121
|
},
|
|
105
122
|
request_options=request_options,
|
|
106
123
|
)
|
|
@@ -224,6 +241,101 @@ class RawRunsClient:
|
|
|
224
241
|
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
225
242
|
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
226
243
|
|
|
244
|
+
@contextlib.contextmanager
|
|
245
|
+
def stream(
|
|
246
|
+
self,
|
|
247
|
+
run_id: str,
|
|
248
|
+
*,
|
|
249
|
+
starting_after: typing.Optional[int] = OMIT,
|
|
250
|
+
include_pings: typing.Optional[bool] = OMIT,
|
|
251
|
+
poll_interval: typing.Optional[float] = OMIT,
|
|
252
|
+
batch_size: typing.Optional[int] = OMIT,
|
|
253
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
254
|
+
) -> typing.Iterator[HttpResponse[typing.Iterator[LettaStreamingResponse]]]:
|
|
255
|
+
"""
|
|
256
|
+
Parameters
|
|
257
|
+
----------
|
|
258
|
+
run_id : str
|
|
259
|
+
|
|
260
|
+
starting_after : typing.Optional[int]
|
|
261
|
+
Sequence id to use as a cursor for pagination. Response will start streaming after this chunk sequence id
|
|
262
|
+
|
|
263
|
+
include_pings : typing.Optional[bool]
|
|
264
|
+
Whether to include periodic keepalive ping messages in the stream to prevent connection timeouts.
|
|
265
|
+
|
|
266
|
+
poll_interval : typing.Optional[float]
|
|
267
|
+
Seconds to wait between polls when no new data.
|
|
268
|
+
|
|
269
|
+
batch_size : typing.Optional[int]
|
|
270
|
+
Number of entries to read per batch.
|
|
271
|
+
|
|
272
|
+
request_options : typing.Optional[RequestOptions]
|
|
273
|
+
Request-specific configuration.
|
|
274
|
+
|
|
275
|
+
Yields
|
|
276
|
+
------
|
|
277
|
+
typing.Iterator[HttpResponse[typing.Iterator[LettaStreamingResponse]]]
|
|
278
|
+
Successful response
|
|
279
|
+
"""
|
|
280
|
+
with self._client_wrapper.httpx_client.stream(
|
|
281
|
+
f"v1/runs/{jsonable_encoder(run_id)}/stream",
|
|
282
|
+
method="POST",
|
|
283
|
+
json={
|
|
284
|
+
"starting_after": starting_after,
|
|
285
|
+
"include_pings": include_pings,
|
|
286
|
+
"poll_interval": poll_interval,
|
|
287
|
+
"batch_size": batch_size,
|
|
288
|
+
},
|
|
289
|
+
headers={
|
|
290
|
+
"content-type": "application/json",
|
|
291
|
+
},
|
|
292
|
+
request_options=request_options,
|
|
293
|
+
omit=OMIT,
|
|
294
|
+
) as _response:
|
|
295
|
+
|
|
296
|
+
def _stream() -> HttpResponse[typing.Iterator[LettaStreamingResponse]]:
|
|
297
|
+
try:
|
|
298
|
+
if 200 <= _response.status_code < 300:
|
|
299
|
+
|
|
300
|
+
def _iter():
|
|
301
|
+
_event_source = httpx_sse.EventSource(_response)
|
|
302
|
+
for _sse in _event_source.iter_sse():
|
|
303
|
+
if _sse.data == None:
|
|
304
|
+
return
|
|
305
|
+
try:
|
|
306
|
+
yield typing.cast(
|
|
307
|
+
LettaStreamingResponse,
|
|
308
|
+
construct_type(
|
|
309
|
+
type_=LettaStreamingResponse, # type: ignore
|
|
310
|
+
object_=json.loads(_sse.data),
|
|
311
|
+
),
|
|
312
|
+
)
|
|
313
|
+
except Exception:
|
|
314
|
+
pass
|
|
315
|
+
return
|
|
316
|
+
|
|
317
|
+
return HttpResponse(response=_response, data=_iter())
|
|
318
|
+
_response.read()
|
|
319
|
+
if _response.status_code == 422:
|
|
320
|
+
raise UnprocessableEntityError(
|
|
321
|
+
headers=dict(_response.headers),
|
|
322
|
+
body=typing.cast(
|
|
323
|
+
HttpValidationError,
|
|
324
|
+
construct_type(
|
|
325
|
+
type_=HttpValidationError, # type: ignore
|
|
326
|
+
object_=_response.json(),
|
|
327
|
+
),
|
|
328
|
+
),
|
|
329
|
+
)
|
|
330
|
+
_response_json = _response.json()
|
|
331
|
+
except JSONDecodeError:
|
|
332
|
+
raise ApiError(
|
|
333
|
+
status_code=_response.status_code, headers=dict(_response.headers), body=_response.text
|
|
334
|
+
)
|
|
335
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
336
|
+
|
|
337
|
+
yield _stream()
|
|
338
|
+
|
|
227
339
|
|
|
228
340
|
class AsyncRawRunsClient:
|
|
229
341
|
def __init__(self, *, client_wrapper: AsyncClientWrapper):
|
|
@@ -233,6 +345,7 @@ class AsyncRawRunsClient:
|
|
|
233
345
|
self,
|
|
234
346
|
*,
|
|
235
347
|
agent_ids: typing.Optional[typing.Union[str, typing.Sequence[str]]] = None,
|
|
348
|
+
background: typing.Optional[bool] = None,
|
|
236
349
|
request_options: typing.Optional[RequestOptions] = None,
|
|
237
350
|
) -> AsyncHttpResponse[typing.List[Run]]:
|
|
238
351
|
"""
|
|
@@ -243,6 +356,9 @@ class AsyncRawRunsClient:
|
|
|
243
356
|
agent_ids : typing.Optional[typing.Union[str, typing.Sequence[str]]]
|
|
244
357
|
The unique identifier of the agent associated with the run.
|
|
245
358
|
|
|
359
|
+
background : typing.Optional[bool]
|
|
360
|
+
If True, filters for runs that were created in background mode.
|
|
361
|
+
|
|
246
362
|
request_options : typing.Optional[RequestOptions]
|
|
247
363
|
Request-specific configuration.
|
|
248
364
|
|
|
@@ -256,6 +372,7 @@ class AsyncRawRunsClient:
|
|
|
256
372
|
method="GET",
|
|
257
373
|
params={
|
|
258
374
|
"agent_ids": agent_ids,
|
|
375
|
+
"background": background,
|
|
259
376
|
},
|
|
260
377
|
request_options=request_options,
|
|
261
378
|
)
|
|
@@ -289,6 +406,7 @@ class AsyncRawRunsClient:
|
|
|
289
406
|
self,
|
|
290
407
|
*,
|
|
291
408
|
agent_ids: typing.Optional[typing.Union[str, typing.Sequence[str]]] = None,
|
|
409
|
+
background: typing.Optional[bool] = None,
|
|
292
410
|
request_options: typing.Optional[RequestOptions] = None,
|
|
293
411
|
) -> AsyncHttpResponse[typing.List[Run]]:
|
|
294
412
|
"""
|
|
@@ -299,6 +417,9 @@ class AsyncRawRunsClient:
|
|
|
299
417
|
agent_ids : typing.Optional[typing.Union[str, typing.Sequence[str]]]
|
|
300
418
|
The unique identifier of the agent associated with the run.
|
|
301
419
|
|
|
420
|
+
background : typing.Optional[bool]
|
|
421
|
+
If True, filters for runs that were created in background mode.
|
|
422
|
+
|
|
302
423
|
request_options : typing.Optional[RequestOptions]
|
|
303
424
|
Request-specific configuration.
|
|
304
425
|
|
|
@@ -312,6 +433,7 @@ class AsyncRawRunsClient:
|
|
|
312
433
|
method="GET",
|
|
313
434
|
params={
|
|
314
435
|
"agent_ids": agent_ids,
|
|
436
|
+
"background": background,
|
|
315
437
|
},
|
|
316
438
|
request_options=request_options,
|
|
317
439
|
)
|
|
@@ -438,3 +560,98 @@ class AsyncRawRunsClient:
|
|
|
438
560
|
except JSONDecodeError:
|
|
439
561
|
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
440
562
|
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
563
|
+
|
|
564
|
+
@contextlib.asynccontextmanager
|
|
565
|
+
async def stream(
|
|
566
|
+
self,
|
|
567
|
+
run_id: str,
|
|
568
|
+
*,
|
|
569
|
+
starting_after: typing.Optional[int] = OMIT,
|
|
570
|
+
include_pings: typing.Optional[bool] = OMIT,
|
|
571
|
+
poll_interval: typing.Optional[float] = OMIT,
|
|
572
|
+
batch_size: typing.Optional[int] = OMIT,
|
|
573
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
574
|
+
) -> typing.AsyncIterator[AsyncHttpResponse[typing.AsyncIterator[LettaStreamingResponse]]]:
|
|
575
|
+
"""
|
|
576
|
+
Parameters
|
|
577
|
+
----------
|
|
578
|
+
run_id : str
|
|
579
|
+
|
|
580
|
+
starting_after : typing.Optional[int]
|
|
581
|
+
Sequence id to use as a cursor for pagination. Response will start streaming after this chunk sequence id
|
|
582
|
+
|
|
583
|
+
include_pings : typing.Optional[bool]
|
|
584
|
+
Whether to include periodic keepalive ping messages in the stream to prevent connection timeouts.
|
|
585
|
+
|
|
586
|
+
poll_interval : typing.Optional[float]
|
|
587
|
+
Seconds to wait between polls when no new data.
|
|
588
|
+
|
|
589
|
+
batch_size : typing.Optional[int]
|
|
590
|
+
Number of entries to read per batch.
|
|
591
|
+
|
|
592
|
+
request_options : typing.Optional[RequestOptions]
|
|
593
|
+
Request-specific configuration.
|
|
594
|
+
|
|
595
|
+
Yields
|
|
596
|
+
------
|
|
597
|
+
typing.AsyncIterator[AsyncHttpResponse[typing.AsyncIterator[LettaStreamingResponse]]]
|
|
598
|
+
Successful response
|
|
599
|
+
"""
|
|
600
|
+
async with self._client_wrapper.httpx_client.stream(
|
|
601
|
+
f"v1/runs/{jsonable_encoder(run_id)}/stream",
|
|
602
|
+
method="POST",
|
|
603
|
+
json={
|
|
604
|
+
"starting_after": starting_after,
|
|
605
|
+
"include_pings": include_pings,
|
|
606
|
+
"poll_interval": poll_interval,
|
|
607
|
+
"batch_size": batch_size,
|
|
608
|
+
},
|
|
609
|
+
headers={
|
|
610
|
+
"content-type": "application/json",
|
|
611
|
+
},
|
|
612
|
+
request_options=request_options,
|
|
613
|
+
omit=OMIT,
|
|
614
|
+
) as _response:
|
|
615
|
+
|
|
616
|
+
async def _stream() -> AsyncHttpResponse[typing.AsyncIterator[LettaStreamingResponse]]:
|
|
617
|
+
try:
|
|
618
|
+
if 200 <= _response.status_code < 300:
|
|
619
|
+
|
|
620
|
+
async def _iter():
|
|
621
|
+
_event_source = httpx_sse.EventSource(_response)
|
|
622
|
+
async for _sse in _event_source.aiter_sse():
|
|
623
|
+
if _sse.data == None:
|
|
624
|
+
return
|
|
625
|
+
try:
|
|
626
|
+
yield typing.cast(
|
|
627
|
+
LettaStreamingResponse,
|
|
628
|
+
construct_type(
|
|
629
|
+
type_=LettaStreamingResponse, # type: ignore
|
|
630
|
+
object_=json.loads(_sse.data),
|
|
631
|
+
),
|
|
632
|
+
)
|
|
633
|
+
except Exception:
|
|
634
|
+
pass
|
|
635
|
+
return
|
|
636
|
+
|
|
637
|
+
return AsyncHttpResponse(response=_response, data=_iter())
|
|
638
|
+
await _response.aread()
|
|
639
|
+
if _response.status_code == 422:
|
|
640
|
+
raise UnprocessableEntityError(
|
|
641
|
+
headers=dict(_response.headers),
|
|
642
|
+
body=typing.cast(
|
|
643
|
+
HttpValidationError,
|
|
644
|
+
construct_type(
|
|
645
|
+
type_=HttpValidationError, # type: ignore
|
|
646
|
+
object_=_response.json(),
|
|
647
|
+
),
|
|
648
|
+
),
|
|
649
|
+
)
|
|
650
|
+
_response_json = _response.json()
|
|
651
|
+
except JSONDecodeError:
|
|
652
|
+
raise ApiError(
|
|
653
|
+
status_code=_response.status_code, headers=dict(_response.headers), body=_response.text
|
|
654
|
+
)
|
|
655
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
656
|
+
|
|
657
|
+
yield await _stream()
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
import typing
|
|
4
|
+
|
|
5
|
+
from ...types.assistant_message import AssistantMessage
|
|
6
|
+
from ...types.letta_ping import LettaPing
|
|
7
|
+
from ...types.letta_stop_reason import LettaStopReason
|
|
8
|
+
from ...types.letta_usage_statistics import LettaUsageStatistics
|
|
9
|
+
from ...types.reasoning_message import ReasoningMessage
|
|
10
|
+
from ...types.system_message import SystemMessage
|
|
11
|
+
from ...types.tool_call_message import ToolCallMessage
|
|
12
|
+
from ...types.tool_return_message import ToolReturnMessage
|
|
13
|
+
from ...types.user_message import UserMessage
|
|
14
|
+
|
|
15
|
+
LettaStreamingResponse = typing.Union[
|
|
16
|
+
SystemMessage,
|
|
17
|
+
UserMessage,
|
|
18
|
+
ReasoningMessage,
|
|
19
|
+
ToolCallMessage,
|
|
20
|
+
ToolReturnMessage,
|
|
21
|
+
AssistantMessage,
|
|
22
|
+
LettaPing,
|
|
23
|
+
LettaStopReason,
|
|
24
|
+
LettaUsageStatistics,
|
|
25
|
+
]
|
|
@@ -85,6 +85,7 @@ class FilesClient:
|
|
|
85
85
|
limit: typing.Optional[int] = None,
|
|
86
86
|
after: typing.Optional[str] = None,
|
|
87
87
|
include_content: typing.Optional[bool] = None,
|
|
88
|
+
check_status_updates: typing.Optional[bool] = None,
|
|
88
89
|
request_options: typing.Optional[RequestOptions] = None,
|
|
89
90
|
) -> typing.List[FileMetadata]:
|
|
90
91
|
"""
|
|
@@ -103,6 +104,9 @@ class FilesClient:
|
|
|
103
104
|
include_content : typing.Optional[bool]
|
|
104
105
|
Whether to include full file content
|
|
105
106
|
|
|
107
|
+
check_status_updates : typing.Optional[bool]
|
|
108
|
+
Whether to check and update file processing status (from the vector db service). If False, will not fetch and update the status, which may lead to performance gains.
|
|
109
|
+
|
|
106
110
|
request_options : typing.Optional[RequestOptions]
|
|
107
111
|
Request-specific configuration.
|
|
108
112
|
|
|
@@ -124,7 +128,12 @@ class FilesClient:
|
|
|
124
128
|
)
|
|
125
129
|
"""
|
|
126
130
|
_response = self._raw_client.list(
|
|
127
|
-
source_id,
|
|
131
|
+
source_id,
|
|
132
|
+
limit=limit,
|
|
133
|
+
after=after,
|
|
134
|
+
include_content=include_content,
|
|
135
|
+
check_status_updates=check_status_updates,
|
|
136
|
+
request_options=request_options,
|
|
128
137
|
)
|
|
129
138
|
return _response.data
|
|
130
139
|
|
|
@@ -242,6 +251,7 @@ class AsyncFilesClient:
|
|
|
242
251
|
limit: typing.Optional[int] = None,
|
|
243
252
|
after: typing.Optional[str] = None,
|
|
244
253
|
include_content: typing.Optional[bool] = None,
|
|
254
|
+
check_status_updates: typing.Optional[bool] = None,
|
|
245
255
|
request_options: typing.Optional[RequestOptions] = None,
|
|
246
256
|
) -> typing.List[FileMetadata]:
|
|
247
257
|
"""
|
|
@@ -260,6 +270,9 @@ class AsyncFilesClient:
|
|
|
260
270
|
include_content : typing.Optional[bool]
|
|
261
271
|
Whether to include full file content
|
|
262
272
|
|
|
273
|
+
check_status_updates : typing.Optional[bool]
|
|
274
|
+
Whether to check and update file processing status (from the vector db service). If False, will not fetch and update the status, which may lead to performance gains.
|
|
275
|
+
|
|
263
276
|
request_options : typing.Optional[RequestOptions]
|
|
264
277
|
Request-specific configuration.
|
|
265
278
|
|
|
@@ -289,7 +302,12 @@ class AsyncFilesClient:
|
|
|
289
302
|
asyncio.run(main())
|
|
290
303
|
"""
|
|
291
304
|
_response = await self._raw_client.list(
|
|
292
|
-
source_id,
|
|
305
|
+
source_id,
|
|
306
|
+
limit=limit,
|
|
307
|
+
after=after,
|
|
308
|
+
include_content=include_content,
|
|
309
|
+
check_status_updates=check_status_updates,
|
|
310
|
+
request_options=request_options,
|
|
293
311
|
)
|
|
294
312
|
return _response.data
|
|
295
313
|
|
|
@@ -104,6 +104,7 @@ class RawFilesClient:
|
|
|
104
104
|
limit: typing.Optional[int] = None,
|
|
105
105
|
after: typing.Optional[str] = None,
|
|
106
106
|
include_content: typing.Optional[bool] = None,
|
|
107
|
+
check_status_updates: typing.Optional[bool] = None,
|
|
107
108
|
request_options: typing.Optional[RequestOptions] = None,
|
|
108
109
|
) -> HttpResponse[typing.List[FileMetadata]]:
|
|
109
110
|
"""
|
|
@@ -122,6 +123,9 @@ class RawFilesClient:
|
|
|
122
123
|
include_content : typing.Optional[bool]
|
|
123
124
|
Whether to include full file content
|
|
124
125
|
|
|
126
|
+
check_status_updates : typing.Optional[bool]
|
|
127
|
+
Whether to check and update file processing status (from the vector db service). If False, will not fetch and update the status, which may lead to performance gains.
|
|
128
|
+
|
|
125
129
|
request_options : typing.Optional[RequestOptions]
|
|
126
130
|
Request-specific configuration.
|
|
127
131
|
|
|
@@ -137,6 +141,7 @@ class RawFilesClient:
|
|
|
137
141
|
"limit": limit,
|
|
138
142
|
"after": after,
|
|
139
143
|
"include_content": include_content,
|
|
144
|
+
"check_status_updates": check_status_updates,
|
|
140
145
|
},
|
|
141
146
|
request_options=request_options,
|
|
142
147
|
)
|
|
@@ -295,6 +300,7 @@ class AsyncRawFilesClient:
|
|
|
295
300
|
limit: typing.Optional[int] = None,
|
|
296
301
|
after: typing.Optional[str] = None,
|
|
297
302
|
include_content: typing.Optional[bool] = None,
|
|
303
|
+
check_status_updates: typing.Optional[bool] = None,
|
|
298
304
|
request_options: typing.Optional[RequestOptions] = None,
|
|
299
305
|
) -> AsyncHttpResponse[typing.List[FileMetadata]]:
|
|
300
306
|
"""
|
|
@@ -313,6 +319,9 @@ class AsyncRawFilesClient:
|
|
|
313
319
|
include_content : typing.Optional[bool]
|
|
314
320
|
Whether to include full file content
|
|
315
321
|
|
|
322
|
+
check_status_updates : typing.Optional[bool]
|
|
323
|
+
Whether to check and update file processing status (from the vector db service). If False, will not fetch and update the status, which may lead to performance gains.
|
|
324
|
+
|
|
316
325
|
request_options : typing.Optional[RequestOptions]
|
|
317
326
|
Request-specific configuration.
|
|
318
327
|
|
|
@@ -328,6 +337,7 @@ class AsyncRawFilesClient:
|
|
|
328
337
|
"limit": limit,
|
|
329
338
|
"after": after,
|
|
330
339
|
"include_content": include_content,
|
|
340
|
+
"check_status_updates": check_status_updates,
|
|
331
341
|
},
|
|
332
342
|
request_options=request_options,
|
|
333
343
|
)
|
letta_client/templates/types/templates_get_template_snapshot_response_agents_item_properties.py
CHANGED
|
@@ -16,6 +16,7 @@ class TemplatesGetTemplateSnapshotResponseAgentsItemProperties(UncheckedBaseMode
|
|
|
16
16
|
max_files_open: typing.Optional[float] = None
|
|
17
17
|
message_buffer_autoclear: typing.Optional[bool] = None
|
|
18
18
|
per_file_view_window_char_limit: typing.Optional[float] = None
|
|
19
|
+
temperature: typing.Optional[float] = None
|
|
19
20
|
|
|
20
21
|
if IS_PYDANTIC_V2:
|
|
21
22
|
model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: letta-client
|
|
3
|
-
Version: 0.1.
|
|
3
|
+
Version: 0.1.279
|
|
4
4
|
Summary:
|
|
5
5
|
Requires-Python: >=3.8,<4.0
|
|
6
6
|
Classifier: Intended Audience :: Developers
|
|
@@ -30,7 +30,7 @@ Description-Content-Type: text/markdown
|
|
|
30
30
|
[](https://buildwithfern.com?utm_source=github&utm_medium=github&utm_campaign=readme&utm_source=https%3A%2F%2Fgithub.com%2Fletta-ai%2Fletta-python)
|
|
31
31
|
[](https://pypi.python.org/pypi/letta-client)
|
|
32
32
|
|
|
33
|
-
The Letta Python library provides convenient access to the Letta
|
|
33
|
+
The Letta Python library provides convenient access to the Letta APIs from Python.
|
|
34
34
|
|
|
35
35
|
## Installation
|
|
36
36
|
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
letta_client/__init__.py,sha256=
|
|
1
|
+
letta_client/__init__.py,sha256=DpYv21zmjmXHKtq_-pCoVE63jYtsWeAqby9F2y6makk,26513
|
|
2
2
|
letta_client/agents/__init__.py,sha256=yl1d02BPp-nGZLaUdH9mWcYvHu-1RhRyZUgpZQKOMGo,2010
|
|
3
3
|
letta_client/agents/blocks/__init__.py,sha256=_VhToAyIt_5axN6CLJwtxg3-CO7THa_23pbUzqhXJa4,85
|
|
4
4
|
letta_client/agents/blocks/client.py,sha256=Akx-1SYEXkmdtLtytPtdFNhVts8JkjC2aMQnnWgd8Ug,14735
|
|
@@ -90,7 +90,7 @@ letta_client/client_side_access_tokens/types/client_side_access_tokens_list_clie
|
|
|
90
90
|
letta_client/client_side_access_tokens/types/client_side_access_tokens_list_client_side_access_tokens_response_tokens_item_policy_data_item_access_item.py,sha256=kNHfEWFl7u71Pu8NPqutod0a2NXfvq8il05Hqm0iBB4,284
|
|
91
91
|
letta_client/core/__init__.py,sha256=tpn7rjb6C2UIkYZYIqdrNpI7Yax2jw88sXh2baxaxAI,1715
|
|
92
92
|
letta_client/core/api_error.py,sha256=44vPoTyWN59gonCIZMdzw7M1uspygiLnr3GNFOoVL2Q,614
|
|
93
|
-
letta_client/core/client_wrapper.py,sha256=
|
|
93
|
+
letta_client/core/client_wrapper.py,sha256=TRLBxm8O2-zoarCi6gHYF23PNBX56FaYtCAex0bcRhw,2776
|
|
94
94
|
letta_client/core/datetime_utils.py,sha256=nBys2IsYrhPdszxGKCNRPSOCwa-5DWOHG95FB8G9PKo,1047
|
|
95
95
|
letta_client/core/file.py,sha256=d4NNbX8XvXP32z8KpK2Xovv33nFfruIrpz0QWxlgpZk,2663
|
|
96
96
|
letta_client/core/force_multipart.py,sha256=awxh5MtcRYe74ehY8U76jzv6fYM_w_D3Rur7KQQzSDk,429
|
|
@@ -163,23 +163,25 @@ letta_client/providers/__init__.py,sha256=_VhToAyIt_5axN6CLJwtxg3-CO7THa_23pbUzq
|
|
|
163
163
|
letta_client/providers/client.py,sha256=999OcO9GFtwmgx9PxA3lF-dEOp4ZEADsWDckeIKcKI0,18717
|
|
164
164
|
letta_client/providers/raw_client.py,sha256=vg3z7P7UOjtLraW6GYb2YS5q496GYoyWN1s7u127q8E,30135
|
|
165
165
|
letta_client/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
166
|
-
letta_client/runs/__init__.py,sha256=
|
|
167
|
-
letta_client/runs/client.py,sha256=
|
|
166
|
+
letta_client/runs/__init__.py,sha256=rTMzYM1OUKbBt0EOGKEhjNDseImuXG9gUdMCjy2mEbQ,232
|
|
167
|
+
letta_client/runs/client.py,sha256=b2gKIc2l9KMao09cGRnmOrbjKmhUVUhKUR-cfutDnnA,14080
|
|
168
168
|
letta_client/runs/messages/__init__.py,sha256=_VhToAyIt_5axN6CLJwtxg3-CO7THa_23pbUzqhXJa4,85
|
|
169
169
|
letta_client/runs/messages/client.py,sha256=Ir9uA6W7kSpMLaOgmJeUKAokT4FgAH3l3sMucH-yqds,6998
|
|
170
170
|
letta_client/runs/messages/raw_client.py,sha256=PbbkMSucuK-AmhcUyAdMtdtbnDSZGHb0nvw0vJqlb3s,8963
|
|
171
|
-
letta_client/runs/raw_client.py,sha256=
|
|
171
|
+
letta_client/runs/raw_client.py,sha256=m4teQ7fO-2zPzTz7uRl05S787aogA5zeKeNGlXMNNlQ,25506
|
|
172
172
|
letta_client/runs/steps/__init__.py,sha256=_VhToAyIt_5axN6CLJwtxg3-CO7THa_23pbUzqhXJa4,85
|
|
173
173
|
letta_client/runs/steps/client.py,sha256=4MiWOwmSiIX-0U_ic5a-x72Svjh2N7q6etXA5VJ9V30,6074
|
|
174
174
|
letta_client/runs/steps/raw_client.py,sha256=dbM7QYusDn_u4UYZl0I_jK-hCOek_m525we6boGo8jA,7973
|
|
175
|
+
letta_client/runs/types/__init__.py,sha256=_T3fEaCnP6BEwVQKFUrBv8iIjFqUrd-DF3hQFwjSb6Y,184
|
|
176
|
+
letta_client/runs/types/letta_streaming_response.py,sha256=_UJOBKMBZ6S6naOyltSnQwAhDj3MFvAXh-K107dsKKU,792
|
|
175
177
|
letta_client/runs/usage/__init__.py,sha256=_VhToAyIt_5axN6CLJwtxg3-CO7THa_23pbUzqhXJa4,85
|
|
176
178
|
letta_client/runs/usage/client.py,sha256=pTs7XvHjETe7lbSWjgy1GdLzS03j9VWHrY3TULhlcVM,2919
|
|
177
179
|
letta_client/runs/usage/raw_client.py,sha256=cNOsh9fT6MifL9Nh7sEpuNo6LmU0C0AfRv-K2HWPLFo,4628
|
|
178
180
|
letta_client/sources/__init__.py,sha256=Y43-19f7EppL3LmM4hO5LtqT2CRaDJb0LAfnHurqYxU,148
|
|
179
181
|
letta_client/sources/client.py,sha256=A_wBPqj8kkQ5RPeeJljvj8lQ9-HuDogNG6t3DJUnHWk,25755
|
|
180
182
|
letta_client/sources/files/__init__.py,sha256=_VhToAyIt_5axN6CLJwtxg3-CO7THa_23pbUzqhXJa4,85
|
|
181
|
-
letta_client/sources/files/client.py,sha256=
|
|
182
|
-
letta_client/sources/files/raw_client.py,sha256=
|
|
183
|
+
letta_client/sources/files/client.py,sha256=vicwwYIuRJeCzjNUeYGx9Qs3OsrSIWs8TU3iCGyQYIw,9716
|
|
184
|
+
letta_client/sources/files/raw_client.py,sha256=7_ZU-U2rf9I0lwc0MLCV-K8zXNGbfBdYbnCvCUvd9sw,15205
|
|
183
185
|
letta_client/sources/passages/__init__.py,sha256=_VhToAyIt_5axN6CLJwtxg3-CO7THa_23pbUzqhXJa4,85
|
|
184
186
|
letta_client/sources/passages/client.py,sha256=6wo3iTfQcDLvmS-TaoX6wUXZ31t1l3VAdoIoJwEVVFY,4064
|
|
185
187
|
letta_client/sources/passages/raw_client.py,sha256=iPIHJs_pmSsbyu5vBFuQdopymxlfQDkEKN4cs4YKVB8,5901
|
|
@@ -216,7 +218,7 @@ letta_client/templates/types/templates_get_template_snapshot_response.py,sha256=
|
|
|
216
218
|
letta_client/templates/types/templates_get_template_snapshot_response_agents_item.py,sha256=ciczqvGIPMcuZCu3ObpVAZh8u_cDWbY6ImApwBOK6lc,2567
|
|
217
219
|
letta_client/templates/types/templates_get_template_snapshot_response_agents_item_memory_variables.py,sha256=POh1PTstz0UC_rOnkpEyIQI0yHrANeM6Y5vuJlJAruU,877
|
|
218
220
|
letta_client/templates/types/templates_get_template_snapshot_response_agents_item_memory_variables_data_item.py,sha256=TNgE_92wCm2MEGERb_q24_GKzvbh1z1I3pchuwowowA,816
|
|
219
|
-
letta_client/templates/types/templates_get_template_snapshot_response_agents_item_properties.py,sha256=
|
|
221
|
+
letta_client/templates/types/templates_get_template_snapshot_response_agents_item_properties.py,sha256=9s-Abzd4QfGjeOyzrdwrPLad4mZx7z4cvX7aOKbeyHs,1084
|
|
220
222
|
letta_client/templates/types/templates_get_template_snapshot_response_agents_item_tool_rules_item.py,sha256=qoZ-EdDcNRYAQ2bADpvPLAzTKURXZR7ubz4o8yIu3LA,2061
|
|
221
223
|
letta_client/templates/types/templates_get_template_snapshot_response_agents_item_tool_rules_item_child_output_mapping.py,sha256=LLnaNqnXFnoLRTZo2O9nCFlkLTkdj2Re1h6ItsM-_RQ,895
|
|
222
224
|
letta_client/templates/types/templates_get_template_snapshot_response_agents_item_tool_rules_item_five.py,sha256=6yyJTWEoTy6UVFHqqzbvfY_i1VmWLuZVaRBWU8EbKx8,738
|
|
@@ -564,6 +566,6 @@ letta_client/version.py,sha256=bttKLbIhO3UonCYQlqs600zzbQgfhCCMjeXR9WRzid4,79
|
|
|
564
566
|
letta_client/voice/__init__.py,sha256=_VhToAyIt_5axN6CLJwtxg3-CO7THa_23pbUzqhXJa4,85
|
|
565
567
|
letta_client/voice/client.py,sha256=EbIVOQh4HXqU9McATxwga08STk-HUwPEAUr_UHqyKHg,3748
|
|
566
568
|
letta_client/voice/raw_client.py,sha256=KvM_3GXuSf51bubM0RVBnxvlf20qZTFMnaA_BzhXzjQ,5938
|
|
567
|
-
letta_client-0.1.
|
|
568
|
-
letta_client-0.1.
|
|
569
|
-
letta_client-0.1.
|
|
569
|
+
letta_client-0.1.279.dist-info/METADATA,sha256=Qi7i-C24HlApUWaBDl6Ml4sygCASQMMx6hmkTP7wkIs,5782
|
|
570
|
+
letta_client-0.1.279.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88
|
|
571
|
+
letta_client-0.1.279.dist-info/RECORD,,
|
|
File without changes
|