letta-client 0.1.275__py3-none-any.whl → 0.1.277__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of letta-client might be problematic. Click here for more details.
- letta_client/__init__.py +2 -0
- letta_client/core/client_wrapper.py +2 -2
- letta_client/runs/__init__.py +2 -1
- letta_client/runs/client.py +131 -0
- letta_client/runs/raw_client.py +197 -0
- letta_client/runs/types/__init__.py +7 -0
- letta_client/runs/types/letta_streaming_response.py +25 -0
- letta_client/types/assistant_message.py +1 -0
- letta_client/types/hidden_reasoning_message.py +1 -0
- letta_client/types/reasoning_message.py +1 -0
- letta_client/types/system_message.py +1 -0
- letta_client/types/tool_call_message.py +1 -0
- letta_client/types/tool_return_message.py +1 -0
- letta_client/types/user_message.py +1 -0
- {letta_client-0.1.275.dist-info → letta_client-0.1.277.dist-info}/METADATA +1 -1
- {letta_client-0.1.275.dist-info → letta_client-0.1.277.dist-info}/RECORD +17 -15
- {letta_client-0.1.275.dist-info → letta_client-0.1.277.dist-info}/WHEEL +0 -0
letta_client/__init__.py
CHANGED
|
@@ -370,6 +370,7 @@ from .client_side_access_tokens import (
|
|
|
370
370
|
from .environment import LettaEnvironment
|
|
371
371
|
from .groups import GroupCreateManagerConfig, GroupUpdateManagerConfig
|
|
372
372
|
from .projects import ProjectsListResponse, ProjectsListResponseProjectsItem
|
|
373
|
+
from .runs import LettaStreamingResponse
|
|
373
374
|
from .steps import StepsListRequestFeedback
|
|
374
375
|
from .templates import (
|
|
375
376
|
TemplatesCreateTemplateResponse,
|
|
@@ -623,6 +624,7 @@ __all__ = [
|
|
|
623
624
|
"LettaSerializeSchemasPydanticAgentSchemaToolSchema",
|
|
624
625
|
"LettaStopReason",
|
|
625
626
|
"LettaStreamingRequest",
|
|
627
|
+
"LettaStreamingResponse",
|
|
626
628
|
"LettaUsageStatistics",
|
|
627
629
|
"LettaUserMessageContentUnion",
|
|
628
630
|
"ListMcpServersResponseValue",
|
|
@@ -24,10 +24,10 @@ class BaseClientWrapper:
|
|
|
24
24
|
|
|
25
25
|
def get_headers(self) -> typing.Dict[str, str]:
|
|
26
26
|
headers: typing.Dict[str, str] = {
|
|
27
|
-
"User-Agent": "letta-client/0.1.
|
|
27
|
+
"User-Agent": "letta-client/0.1.277",
|
|
28
28
|
"X-Fern-Language": "Python",
|
|
29
29
|
"X-Fern-SDK-Name": "letta-client",
|
|
30
|
-
"X-Fern-SDK-Version": "0.1.
|
|
30
|
+
"X-Fern-SDK-Version": "0.1.277",
|
|
31
31
|
**(self.get_custom_headers() or {}),
|
|
32
32
|
}
|
|
33
33
|
if self._project is not None:
|
letta_client/runs/__init__.py
CHANGED
letta_client/runs/client.py
CHANGED
|
@@ -8,8 +8,12 @@ from ..types.run import Run
|
|
|
8
8
|
from .messages.client import AsyncMessagesClient, MessagesClient
|
|
9
9
|
from .raw_client import AsyncRawRunsClient, RawRunsClient
|
|
10
10
|
from .steps.client import AsyncStepsClient, StepsClient
|
|
11
|
+
from .types.letta_streaming_response import LettaStreamingResponse
|
|
11
12
|
from .usage.client import AsyncUsageClient, UsageClient
|
|
12
13
|
|
|
14
|
+
# this is used as the default value for optional parameters
|
|
15
|
+
OMIT = typing.cast(typing.Any, ...)
|
|
16
|
+
|
|
13
17
|
|
|
14
18
|
class RunsClient:
|
|
15
19
|
def __init__(self, *, client_wrapper: SyncClientWrapper):
|
|
@@ -163,6 +167,65 @@ class RunsClient:
|
|
|
163
167
|
_response = self._raw_client.delete(run_id, request_options=request_options)
|
|
164
168
|
return _response.data
|
|
165
169
|
|
|
170
|
+
def stream(
|
|
171
|
+
self,
|
|
172
|
+
run_id: str,
|
|
173
|
+
*,
|
|
174
|
+
starting_after: typing.Optional[int] = OMIT,
|
|
175
|
+
include_pings: typing.Optional[bool] = OMIT,
|
|
176
|
+
poll_interval: typing.Optional[float] = OMIT,
|
|
177
|
+
batch_size: typing.Optional[int] = OMIT,
|
|
178
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
179
|
+
) -> typing.Iterator[LettaStreamingResponse]:
|
|
180
|
+
"""
|
|
181
|
+
Parameters
|
|
182
|
+
----------
|
|
183
|
+
run_id : str
|
|
184
|
+
|
|
185
|
+
starting_after : typing.Optional[int]
|
|
186
|
+
Sequence id to use as a cursor for pagination. Response will start streaming after this chunk sequence id
|
|
187
|
+
|
|
188
|
+
include_pings : typing.Optional[bool]
|
|
189
|
+
Whether to include periodic keepalive ping messages in the stream to prevent connection timeouts.
|
|
190
|
+
|
|
191
|
+
poll_interval : typing.Optional[float]
|
|
192
|
+
Seconds to wait between polls when no new data.
|
|
193
|
+
|
|
194
|
+
batch_size : typing.Optional[int]
|
|
195
|
+
Number of entries to read per batch.
|
|
196
|
+
|
|
197
|
+
request_options : typing.Optional[RequestOptions]
|
|
198
|
+
Request-specific configuration.
|
|
199
|
+
|
|
200
|
+
Yields
|
|
201
|
+
------
|
|
202
|
+
typing.Iterator[LettaStreamingResponse]
|
|
203
|
+
Successful response
|
|
204
|
+
|
|
205
|
+
Examples
|
|
206
|
+
--------
|
|
207
|
+
from letta_client import Letta
|
|
208
|
+
|
|
209
|
+
client = Letta(
|
|
210
|
+
project="YOUR_PROJECT",
|
|
211
|
+
token="YOUR_TOKEN",
|
|
212
|
+
)
|
|
213
|
+
response = client.runs.stream(
|
|
214
|
+
run_id="run_id",
|
|
215
|
+
)
|
|
216
|
+
for chunk in response:
|
|
217
|
+
yield chunk
|
|
218
|
+
"""
|
|
219
|
+
with self._raw_client.stream(
|
|
220
|
+
run_id,
|
|
221
|
+
starting_after=starting_after,
|
|
222
|
+
include_pings=include_pings,
|
|
223
|
+
poll_interval=poll_interval,
|
|
224
|
+
batch_size=batch_size,
|
|
225
|
+
request_options=request_options,
|
|
226
|
+
) as r:
|
|
227
|
+
yield from r.data
|
|
228
|
+
|
|
166
229
|
|
|
167
230
|
class AsyncRunsClient:
|
|
168
231
|
def __init__(self, *, client_wrapper: AsyncClientWrapper):
|
|
@@ -347,3 +410,71 @@ class AsyncRunsClient:
|
|
|
347
410
|
"""
|
|
348
411
|
_response = await self._raw_client.delete(run_id, request_options=request_options)
|
|
349
412
|
return _response.data
|
|
413
|
+
|
|
414
|
+
async def stream(
|
|
415
|
+
self,
|
|
416
|
+
run_id: str,
|
|
417
|
+
*,
|
|
418
|
+
starting_after: typing.Optional[int] = OMIT,
|
|
419
|
+
include_pings: typing.Optional[bool] = OMIT,
|
|
420
|
+
poll_interval: typing.Optional[float] = OMIT,
|
|
421
|
+
batch_size: typing.Optional[int] = OMIT,
|
|
422
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
423
|
+
) -> typing.AsyncIterator[LettaStreamingResponse]:
|
|
424
|
+
"""
|
|
425
|
+
Parameters
|
|
426
|
+
----------
|
|
427
|
+
run_id : str
|
|
428
|
+
|
|
429
|
+
starting_after : typing.Optional[int]
|
|
430
|
+
Sequence id to use as a cursor for pagination. Response will start streaming after this chunk sequence id
|
|
431
|
+
|
|
432
|
+
include_pings : typing.Optional[bool]
|
|
433
|
+
Whether to include periodic keepalive ping messages in the stream to prevent connection timeouts.
|
|
434
|
+
|
|
435
|
+
poll_interval : typing.Optional[float]
|
|
436
|
+
Seconds to wait between polls when no new data.
|
|
437
|
+
|
|
438
|
+
batch_size : typing.Optional[int]
|
|
439
|
+
Number of entries to read per batch.
|
|
440
|
+
|
|
441
|
+
request_options : typing.Optional[RequestOptions]
|
|
442
|
+
Request-specific configuration.
|
|
443
|
+
|
|
444
|
+
Yields
|
|
445
|
+
------
|
|
446
|
+
typing.AsyncIterator[LettaStreamingResponse]
|
|
447
|
+
Successful response
|
|
448
|
+
|
|
449
|
+
Examples
|
|
450
|
+
--------
|
|
451
|
+
import asyncio
|
|
452
|
+
|
|
453
|
+
from letta_client import AsyncLetta
|
|
454
|
+
|
|
455
|
+
client = AsyncLetta(
|
|
456
|
+
project="YOUR_PROJECT",
|
|
457
|
+
token="YOUR_TOKEN",
|
|
458
|
+
)
|
|
459
|
+
|
|
460
|
+
|
|
461
|
+
async def main() -> None:
|
|
462
|
+
response = await client.runs.stream(
|
|
463
|
+
run_id="run_id",
|
|
464
|
+
)
|
|
465
|
+
async for chunk in response:
|
|
466
|
+
yield chunk
|
|
467
|
+
|
|
468
|
+
|
|
469
|
+
asyncio.run(main())
|
|
470
|
+
"""
|
|
471
|
+
async with self._raw_client.stream(
|
|
472
|
+
run_id,
|
|
473
|
+
starting_after=starting_after,
|
|
474
|
+
include_pings=include_pings,
|
|
475
|
+
poll_interval=poll_interval,
|
|
476
|
+
batch_size=batch_size,
|
|
477
|
+
request_options=request_options,
|
|
478
|
+
) as r:
|
|
479
|
+
async for _chunk in r.data:
|
|
480
|
+
yield _chunk
|
letta_client/runs/raw_client.py
CHANGED
|
@@ -1,8 +1,11 @@
|
|
|
1
1
|
# This file was auto-generated by Fern from our API Definition.
|
|
2
2
|
|
|
3
|
+
import contextlib
|
|
4
|
+
import json
|
|
3
5
|
import typing
|
|
4
6
|
from json.decoder import JSONDecodeError
|
|
5
7
|
|
|
8
|
+
import httpx_sse
|
|
6
9
|
from ..core.api_error import ApiError
|
|
7
10
|
from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper
|
|
8
11
|
from ..core.http_response import AsyncHttpResponse, HttpResponse
|
|
@@ -12,6 +15,10 @@ from ..core.unchecked_base_model import construct_type
|
|
|
12
15
|
from ..errors.unprocessable_entity_error import UnprocessableEntityError
|
|
13
16
|
from ..types.http_validation_error import HttpValidationError
|
|
14
17
|
from ..types.run import Run
|
|
18
|
+
from .types.letta_streaming_response import LettaStreamingResponse
|
|
19
|
+
|
|
20
|
+
# this is used as the default value for optional parameters
|
|
21
|
+
OMIT = typing.cast(typing.Any, ...)
|
|
15
22
|
|
|
16
23
|
|
|
17
24
|
class RawRunsClient:
|
|
@@ -224,6 +231,101 @@ class RawRunsClient:
|
|
|
224
231
|
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
225
232
|
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
226
233
|
|
|
234
|
+
@contextlib.contextmanager
|
|
235
|
+
def stream(
|
|
236
|
+
self,
|
|
237
|
+
run_id: str,
|
|
238
|
+
*,
|
|
239
|
+
starting_after: typing.Optional[int] = OMIT,
|
|
240
|
+
include_pings: typing.Optional[bool] = OMIT,
|
|
241
|
+
poll_interval: typing.Optional[float] = OMIT,
|
|
242
|
+
batch_size: typing.Optional[int] = OMIT,
|
|
243
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
244
|
+
) -> typing.Iterator[HttpResponse[typing.Iterator[LettaStreamingResponse]]]:
|
|
245
|
+
"""
|
|
246
|
+
Parameters
|
|
247
|
+
----------
|
|
248
|
+
run_id : str
|
|
249
|
+
|
|
250
|
+
starting_after : typing.Optional[int]
|
|
251
|
+
Sequence id to use as a cursor for pagination. Response will start streaming after this chunk sequence id
|
|
252
|
+
|
|
253
|
+
include_pings : typing.Optional[bool]
|
|
254
|
+
Whether to include periodic keepalive ping messages in the stream to prevent connection timeouts.
|
|
255
|
+
|
|
256
|
+
poll_interval : typing.Optional[float]
|
|
257
|
+
Seconds to wait between polls when no new data.
|
|
258
|
+
|
|
259
|
+
batch_size : typing.Optional[int]
|
|
260
|
+
Number of entries to read per batch.
|
|
261
|
+
|
|
262
|
+
request_options : typing.Optional[RequestOptions]
|
|
263
|
+
Request-specific configuration.
|
|
264
|
+
|
|
265
|
+
Yields
|
|
266
|
+
------
|
|
267
|
+
typing.Iterator[HttpResponse[typing.Iterator[LettaStreamingResponse]]]
|
|
268
|
+
Successful response
|
|
269
|
+
"""
|
|
270
|
+
with self._client_wrapper.httpx_client.stream(
|
|
271
|
+
f"v1/runs/{jsonable_encoder(run_id)}/stream",
|
|
272
|
+
method="POST",
|
|
273
|
+
json={
|
|
274
|
+
"starting_after": starting_after,
|
|
275
|
+
"include_pings": include_pings,
|
|
276
|
+
"poll_interval": poll_interval,
|
|
277
|
+
"batch_size": batch_size,
|
|
278
|
+
},
|
|
279
|
+
headers={
|
|
280
|
+
"content-type": "application/json",
|
|
281
|
+
},
|
|
282
|
+
request_options=request_options,
|
|
283
|
+
omit=OMIT,
|
|
284
|
+
) as _response:
|
|
285
|
+
|
|
286
|
+
def _stream() -> HttpResponse[typing.Iterator[LettaStreamingResponse]]:
|
|
287
|
+
try:
|
|
288
|
+
if 200 <= _response.status_code < 300:
|
|
289
|
+
|
|
290
|
+
def _iter():
|
|
291
|
+
_event_source = httpx_sse.EventSource(_response)
|
|
292
|
+
for _sse in _event_source.iter_sse():
|
|
293
|
+
if _sse.data == None:
|
|
294
|
+
return
|
|
295
|
+
try:
|
|
296
|
+
yield typing.cast(
|
|
297
|
+
LettaStreamingResponse,
|
|
298
|
+
construct_type(
|
|
299
|
+
type_=LettaStreamingResponse, # type: ignore
|
|
300
|
+
object_=json.loads(_sse.data),
|
|
301
|
+
),
|
|
302
|
+
)
|
|
303
|
+
except Exception:
|
|
304
|
+
pass
|
|
305
|
+
return
|
|
306
|
+
|
|
307
|
+
return HttpResponse(response=_response, data=_iter())
|
|
308
|
+
_response.read()
|
|
309
|
+
if _response.status_code == 422:
|
|
310
|
+
raise UnprocessableEntityError(
|
|
311
|
+
headers=dict(_response.headers),
|
|
312
|
+
body=typing.cast(
|
|
313
|
+
HttpValidationError,
|
|
314
|
+
construct_type(
|
|
315
|
+
type_=HttpValidationError, # type: ignore
|
|
316
|
+
object_=_response.json(),
|
|
317
|
+
),
|
|
318
|
+
),
|
|
319
|
+
)
|
|
320
|
+
_response_json = _response.json()
|
|
321
|
+
except JSONDecodeError:
|
|
322
|
+
raise ApiError(
|
|
323
|
+
status_code=_response.status_code, headers=dict(_response.headers), body=_response.text
|
|
324
|
+
)
|
|
325
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
326
|
+
|
|
327
|
+
yield _stream()
|
|
328
|
+
|
|
227
329
|
|
|
228
330
|
class AsyncRawRunsClient:
|
|
229
331
|
def __init__(self, *, client_wrapper: AsyncClientWrapper):
|
|
@@ -438,3 +540,98 @@ class AsyncRawRunsClient:
|
|
|
438
540
|
except JSONDecodeError:
|
|
439
541
|
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
|
|
440
542
|
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
543
|
+
|
|
544
|
+
@contextlib.asynccontextmanager
|
|
545
|
+
async def stream(
|
|
546
|
+
self,
|
|
547
|
+
run_id: str,
|
|
548
|
+
*,
|
|
549
|
+
starting_after: typing.Optional[int] = OMIT,
|
|
550
|
+
include_pings: typing.Optional[bool] = OMIT,
|
|
551
|
+
poll_interval: typing.Optional[float] = OMIT,
|
|
552
|
+
batch_size: typing.Optional[int] = OMIT,
|
|
553
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
554
|
+
) -> typing.AsyncIterator[AsyncHttpResponse[typing.AsyncIterator[LettaStreamingResponse]]]:
|
|
555
|
+
"""
|
|
556
|
+
Parameters
|
|
557
|
+
----------
|
|
558
|
+
run_id : str
|
|
559
|
+
|
|
560
|
+
starting_after : typing.Optional[int]
|
|
561
|
+
Sequence id to use as a cursor for pagination. Response will start streaming after this chunk sequence id
|
|
562
|
+
|
|
563
|
+
include_pings : typing.Optional[bool]
|
|
564
|
+
Whether to include periodic keepalive ping messages in the stream to prevent connection timeouts.
|
|
565
|
+
|
|
566
|
+
poll_interval : typing.Optional[float]
|
|
567
|
+
Seconds to wait between polls when no new data.
|
|
568
|
+
|
|
569
|
+
batch_size : typing.Optional[int]
|
|
570
|
+
Number of entries to read per batch.
|
|
571
|
+
|
|
572
|
+
request_options : typing.Optional[RequestOptions]
|
|
573
|
+
Request-specific configuration.
|
|
574
|
+
|
|
575
|
+
Yields
|
|
576
|
+
------
|
|
577
|
+
typing.AsyncIterator[AsyncHttpResponse[typing.AsyncIterator[LettaStreamingResponse]]]
|
|
578
|
+
Successful response
|
|
579
|
+
"""
|
|
580
|
+
async with self._client_wrapper.httpx_client.stream(
|
|
581
|
+
f"v1/runs/{jsonable_encoder(run_id)}/stream",
|
|
582
|
+
method="POST",
|
|
583
|
+
json={
|
|
584
|
+
"starting_after": starting_after,
|
|
585
|
+
"include_pings": include_pings,
|
|
586
|
+
"poll_interval": poll_interval,
|
|
587
|
+
"batch_size": batch_size,
|
|
588
|
+
},
|
|
589
|
+
headers={
|
|
590
|
+
"content-type": "application/json",
|
|
591
|
+
},
|
|
592
|
+
request_options=request_options,
|
|
593
|
+
omit=OMIT,
|
|
594
|
+
) as _response:
|
|
595
|
+
|
|
596
|
+
async def _stream() -> AsyncHttpResponse[typing.AsyncIterator[LettaStreamingResponse]]:
|
|
597
|
+
try:
|
|
598
|
+
if 200 <= _response.status_code < 300:
|
|
599
|
+
|
|
600
|
+
async def _iter():
|
|
601
|
+
_event_source = httpx_sse.EventSource(_response)
|
|
602
|
+
async for _sse in _event_source.aiter_sse():
|
|
603
|
+
if _sse.data == None:
|
|
604
|
+
return
|
|
605
|
+
try:
|
|
606
|
+
yield typing.cast(
|
|
607
|
+
LettaStreamingResponse,
|
|
608
|
+
construct_type(
|
|
609
|
+
type_=LettaStreamingResponse, # type: ignore
|
|
610
|
+
object_=json.loads(_sse.data),
|
|
611
|
+
),
|
|
612
|
+
)
|
|
613
|
+
except Exception:
|
|
614
|
+
pass
|
|
615
|
+
return
|
|
616
|
+
|
|
617
|
+
return AsyncHttpResponse(response=_response, data=_iter())
|
|
618
|
+
await _response.aread()
|
|
619
|
+
if _response.status_code == 422:
|
|
620
|
+
raise UnprocessableEntityError(
|
|
621
|
+
headers=dict(_response.headers),
|
|
622
|
+
body=typing.cast(
|
|
623
|
+
HttpValidationError,
|
|
624
|
+
construct_type(
|
|
625
|
+
type_=HttpValidationError, # type: ignore
|
|
626
|
+
object_=_response.json(),
|
|
627
|
+
),
|
|
628
|
+
),
|
|
629
|
+
)
|
|
630
|
+
_response_json = _response.json()
|
|
631
|
+
except JSONDecodeError:
|
|
632
|
+
raise ApiError(
|
|
633
|
+
status_code=_response.status_code, headers=dict(_response.headers), body=_response.text
|
|
634
|
+
)
|
|
635
|
+
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
|
|
636
|
+
|
|
637
|
+
yield await _stream()
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
import typing
|
|
4
|
+
|
|
5
|
+
from ...types.assistant_message import AssistantMessage
|
|
6
|
+
from ...types.letta_ping import LettaPing
|
|
7
|
+
from ...types.letta_stop_reason import LettaStopReason
|
|
8
|
+
from ...types.letta_usage_statistics import LettaUsageStatistics
|
|
9
|
+
from ...types.reasoning_message import ReasoningMessage
|
|
10
|
+
from ...types.system_message import SystemMessage
|
|
11
|
+
from ...types.tool_call_message import ToolCallMessage
|
|
12
|
+
from ...types.tool_return_message import ToolReturnMessage
|
|
13
|
+
from ...types.user_message import UserMessage
|
|
14
|
+
|
|
15
|
+
LettaStreamingResponse = typing.Union[
|
|
16
|
+
SystemMessage,
|
|
17
|
+
UserMessage,
|
|
18
|
+
ReasoningMessage,
|
|
19
|
+
ToolCallMessage,
|
|
20
|
+
ToolReturnMessage,
|
|
21
|
+
AssistantMessage,
|
|
22
|
+
LettaPing,
|
|
23
|
+
LettaStopReason,
|
|
24
|
+
LettaUsageStatistics,
|
|
25
|
+
]
|
|
@@ -29,6 +29,7 @@ class AssistantMessage(UncheckedBaseModel):
|
|
|
29
29
|
step_id: typing.Optional[str] = None
|
|
30
30
|
is_err: typing.Optional[bool] = None
|
|
31
31
|
seq_id: typing.Optional[int] = None
|
|
32
|
+
run_id: typing.Optional[str] = None
|
|
32
33
|
content: AssistantMessageContent = pydantic.Field()
|
|
33
34
|
"""
|
|
34
35
|
The message content sent by the agent (can be a string or an array of content parts)
|
|
@@ -32,6 +32,7 @@ class HiddenReasoningMessage(UncheckedBaseModel):
|
|
|
32
32
|
step_id: typing.Optional[str] = None
|
|
33
33
|
is_err: typing.Optional[bool] = None
|
|
34
34
|
seq_id: typing.Optional[int] = None
|
|
35
|
+
run_id: typing.Optional[str] = None
|
|
35
36
|
state: HiddenReasoningMessageState
|
|
36
37
|
hidden_reasoning: typing.Optional[str] = None
|
|
37
38
|
|
|
@@ -32,6 +32,7 @@ class ReasoningMessage(UncheckedBaseModel):
|
|
|
32
32
|
step_id: typing.Optional[str] = None
|
|
33
33
|
is_err: typing.Optional[bool] = None
|
|
34
34
|
seq_id: typing.Optional[int] = None
|
|
35
|
+
run_id: typing.Optional[str] = None
|
|
35
36
|
source: typing.Optional[ReasoningMessageSource] = None
|
|
36
37
|
reasoning: str
|
|
37
38
|
signature: typing.Optional[str] = None
|
|
@@ -28,6 +28,7 @@ class SystemMessage(UncheckedBaseModel):
|
|
|
28
28
|
step_id: typing.Optional[str] = None
|
|
29
29
|
is_err: typing.Optional[bool] = None
|
|
30
30
|
seq_id: typing.Optional[int] = None
|
|
31
|
+
run_id: typing.Optional[str] = None
|
|
31
32
|
content: str = pydantic.Field()
|
|
32
33
|
"""
|
|
33
34
|
The message content sent by the system
|
|
@@ -29,6 +29,7 @@ class ToolCallMessage(UncheckedBaseModel):
|
|
|
29
29
|
step_id: typing.Optional[str] = None
|
|
30
30
|
is_err: typing.Optional[bool] = None
|
|
31
31
|
seq_id: typing.Optional[int] = None
|
|
32
|
+
run_id: typing.Optional[str] = None
|
|
32
33
|
tool_call: ToolCallMessageToolCall
|
|
33
34
|
|
|
34
35
|
if IS_PYDANTIC_V2:
|
|
@@ -33,6 +33,7 @@ class ToolReturnMessage(UncheckedBaseModel):
|
|
|
33
33
|
step_id: typing.Optional[str] = None
|
|
34
34
|
is_err: typing.Optional[bool] = None
|
|
35
35
|
seq_id: typing.Optional[int] = None
|
|
36
|
+
run_id: typing.Optional[str] = None
|
|
36
37
|
tool_return: str
|
|
37
38
|
status: ToolReturnMessageStatus
|
|
38
39
|
tool_call_id: str
|
|
@@ -29,6 +29,7 @@ class UserMessage(UncheckedBaseModel):
|
|
|
29
29
|
step_id: typing.Optional[str] = None
|
|
30
30
|
is_err: typing.Optional[bool] = None
|
|
31
31
|
seq_id: typing.Optional[int] = None
|
|
32
|
+
run_id: typing.Optional[str] = None
|
|
32
33
|
content: UserMessageContent = pydantic.Field()
|
|
33
34
|
"""
|
|
34
35
|
The message content sent by the user (can be a string or an array of multi-modal content parts)
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
letta_client/__init__.py,sha256=
|
|
1
|
+
letta_client/__init__.py,sha256=DpYv21zmjmXHKtq_-pCoVE63jYtsWeAqby9F2y6makk,26513
|
|
2
2
|
letta_client/agents/__init__.py,sha256=yl1d02BPp-nGZLaUdH9mWcYvHu-1RhRyZUgpZQKOMGo,2010
|
|
3
3
|
letta_client/agents/blocks/__init__.py,sha256=_VhToAyIt_5axN6CLJwtxg3-CO7THa_23pbUzqhXJa4,85
|
|
4
4
|
letta_client/agents/blocks/client.py,sha256=Akx-1SYEXkmdtLtytPtdFNhVts8JkjC2aMQnnWgd8Ug,14735
|
|
@@ -90,7 +90,7 @@ letta_client/client_side_access_tokens/types/client_side_access_tokens_list_clie
|
|
|
90
90
|
letta_client/client_side_access_tokens/types/client_side_access_tokens_list_client_side_access_tokens_response_tokens_item_policy_data_item_access_item.py,sha256=kNHfEWFl7u71Pu8NPqutod0a2NXfvq8il05Hqm0iBB4,284
|
|
91
91
|
letta_client/core/__init__.py,sha256=tpn7rjb6C2UIkYZYIqdrNpI7Yax2jw88sXh2baxaxAI,1715
|
|
92
92
|
letta_client/core/api_error.py,sha256=44vPoTyWN59gonCIZMdzw7M1uspygiLnr3GNFOoVL2Q,614
|
|
93
|
-
letta_client/core/client_wrapper.py,sha256=
|
|
93
|
+
letta_client/core/client_wrapper.py,sha256=qM2jq77f0f7-rxfEu2Bs-StIY2KCCf7374oISiMizqw,2776
|
|
94
94
|
letta_client/core/datetime_utils.py,sha256=nBys2IsYrhPdszxGKCNRPSOCwa-5DWOHG95FB8G9PKo,1047
|
|
95
95
|
letta_client/core/file.py,sha256=d4NNbX8XvXP32z8KpK2Xovv33nFfruIrpz0QWxlgpZk,2663
|
|
96
96
|
letta_client/core/force_multipart.py,sha256=awxh5MtcRYe74ehY8U76jzv6fYM_w_D3Rur7KQQzSDk,429
|
|
@@ -163,15 +163,17 @@ letta_client/providers/__init__.py,sha256=_VhToAyIt_5axN6CLJwtxg3-CO7THa_23pbUzq
|
|
|
163
163
|
letta_client/providers/client.py,sha256=999OcO9GFtwmgx9PxA3lF-dEOp4ZEADsWDckeIKcKI0,18717
|
|
164
164
|
letta_client/providers/raw_client.py,sha256=vg3z7P7UOjtLraW6GYb2YS5q496GYoyWN1s7u127q8E,30135
|
|
165
165
|
letta_client/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
166
|
-
letta_client/runs/__init__.py,sha256=
|
|
167
|
-
letta_client/runs/client.py,sha256=
|
|
166
|
+
letta_client/runs/__init__.py,sha256=rTMzYM1OUKbBt0EOGKEhjNDseImuXG9gUdMCjy2mEbQ,232
|
|
167
|
+
letta_client/runs/client.py,sha256=SOg_BxqMyHafUJnS7Gf-J5pbMsx87RZ-s7D198neiHs,13242
|
|
168
168
|
letta_client/runs/messages/__init__.py,sha256=_VhToAyIt_5axN6CLJwtxg3-CO7THa_23pbUzqhXJa4,85
|
|
169
169
|
letta_client/runs/messages/client.py,sha256=Ir9uA6W7kSpMLaOgmJeUKAokT4FgAH3l3sMucH-yqds,6998
|
|
170
170
|
letta_client/runs/messages/raw_client.py,sha256=PbbkMSucuK-AmhcUyAdMtdtbnDSZGHb0nvw0vJqlb3s,8963
|
|
171
|
-
letta_client/runs/raw_client.py,sha256=
|
|
171
|
+
letta_client/runs/raw_client.py,sha256=oQFNOL2zY46DiaM3qsBuEXm_j0vPZC2EC6NhxNLQ8N8,24658
|
|
172
172
|
letta_client/runs/steps/__init__.py,sha256=_VhToAyIt_5axN6CLJwtxg3-CO7THa_23pbUzqhXJa4,85
|
|
173
173
|
letta_client/runs/steps/client.py,sha256=4MiWOwmSiIX-0U_ic5a-x72Svjh2N7q6etXA5VJ9V30,6074
|
|
174
174
|
letta_client/runs/steps/raw_client.py,sha256=dbM7QYusDn_u4UYZl0I_jK-hCOek_m525we6boGo8jA,7973
|
|
175
|
+
letta_client/runs/types/__init__.py,sha256=_T3fEaCnP6BEwVQKFUrBv8iIjFqUrd-DF3hQFwjSb6Y,184
|
|
176
|
+
letta_client/runs/types/letta_streaming_response.py,sha256=_UJOBKMBZ6S6naOyltSnQwAhDj3MFvAXh-K107dsKKU,792
|
|
175
177
|
letta_client/runs/usage/__init__.py,sha256=_VhToAyIt_5axN6CLJwtxg3-CO7THa_23pbUzqhXJa4,85
|
|
176
178
|
letta_client/runs/usage/client.py,sha256=pTs7XvHjETe7lbSWjgy1GdLzS03j9VWHrY3TULhlcVM,2919
|
|
177
179
|
letta_client/runs/usage/raw_client.py,sha256=cNOsh9fT6MifL9Nh7sEpuNo6LmU0C0AfRv-K2HWPLFo,4628
|
|
@@ -265,7 +267,7 @@ letta_client/types/agent_type.py,sha256=4_JSgy0zfiqtUcKh5XMfRDYaablo4RuzUmIstRZ2
|
|
|
265
267
|
letta_client/types/app_auth_scheme.py,sha256=A1ha3TmjgFw3-mW8r44LPB_Go3pdbGxJ_LHrDChSG14,1243
|
|
266
268
|
letta_client/types/app_auth_scheme_auth_mode.py,sha256=cEj9XAxLgFcang_Irw6h3koWac9A0tpNeBG05NUeGlw,387
|
|
267
269
|
letta_client/types/app_model.py,sha256=6QlEj1uFSnUMDEkmM1XF1umO-X6AHq5oBGzVTZeZeEo,1520
|
|
268
|
-
letta_client/types/assistant_message.py,sha256=
|
|
270
|
+
letta_client/types/assistant_message.py,sha256=iFCIXRIndpEfBpdv5jX1IMBOWKJsciQF12lfB4I9bHI,1631
|
|
269
271
|
letta_client/types/assistant_message_content.py,sha256=rJZePqcZN74tqx-FbArUF1FaqvATOYAn87mvdpqyINA,242
|
|
270
272
|
letta_client/types/audio.py,sha256=1HlHjJ1FEv4J5G4jQY24G0H91T5LT3K6e-dnQ1Di7i8,549
|
|
271
273
|
letta_client/types/auth_request.py,sha256=kIiQTHpCLr0UPAt6SVL60cPnFjRW1x4tpazupclHSkk,684
|
|
@@ -381,7 +383,7 @@ letta_client/types/group.py,sha256=tjBeaPlVtR6m6VfImqh506QfTR1fvnLwRXGj1BnRDXw,2
|
|
|
381
383
|
letta_client/types/group_schema.py,sha256=xNW28jXZ1npZZ3B7yUpRGx1uetcj80EQZxHpgUSoEGQ,1253
|
|
382
384
|
letta_client/types/group_schema_manager_config.py,sha256=3kIh_FoLwKOA5JDcZbHmpF8j1Ld0w7-kk9pRugVdWvw,474
|
|
383
385
|
letta_client/types/health.py,sha256=M5qOOKBb6PET3M4VrdHZ4_6643I0GNKq6WldhzkilJI,619
|
|
384
|
-
letta_client/types/hidden_reasoning_message.py,sha256=
|
|
386
|
+
letta_client/types/hidden_reasoning_message.py,sha256=qbzaXbCp19cqaCRQaw2EWaqdxWSrRwohiLFfpY298JA,1691
|
|
385
387
|
letta_client/types/hidden_reasoning_message_state.py,sha256=qotAgF_P4T7OEHzbhGDVFaLZYOs1ULMPVHmiFvoRIfM,174
|
|
386
388
|
letta_client/types/http_validation_error.py,sha256=LTr0zWUWfG2-2lTgDwRtrEo0CYdOvF5jkH4h40YGP1g,662
|
|
387
389
|
letta_client/types/identity.py,sha256=h7Yn795wOyvXBNzBBUJToC6GlDife4h7tKRx2mOBKXs,1592
|
|
@@ -477,7 +479,7 @@ letta_client/types/provider_category.py,sha256=St4tSc_Wc5huF79kb088-L-tRz9Cj2_b5
|
|
|
477
479
|
letta_client/types/provider_trace.py,sha256=d7_IpoEgLeqnPaElWjOp6iAL8SbeI4DZsBaaaFtkorM,2201
|
|
478
480
|
letta_client/types/provider_type.py,sha256=NwBjRytkZr9uOxRgXATflyDcxchus2xjBb5kKtJ9aQ8,463
|
|
479
481
|
letta_client/types/reasoning_content.py,sha256=YPmNwwSH_toPAThpE5gq7gaxBlvvjh33csKBRdFI_iY,996
|
|
480
|
-
letta_client/types/reasoning_message.py,sha256
|
|
482
|
+
letta_client/types/reasoning_message.py,sha256=-eZpuDwFkRjNcfrebI3u_7HIP62sxXnZl59bx21oTR8,1728
|
|
481
483
|
letta_client/types/reasoning_message_source.py,sha256=GYOWGm2mje1yYbR8E2kbAeQS--VDrGlpsobEBQHE2cU,186
|
|
482
484
|
letta_client/types/redacted_reasoning_content.py,sha256=d3L2OoswodrCBaeCsP9VSDd7VUmVFEMN9ORPCUGsz6c,736
|
|
483
485
|
letta_client/types/required_before_exit_tool_rule.py,sha256=QAoqKyCXkGyIRuHeIE3WW86XQYAn1U6_5QMcUiCCrZY,1058
|
|
@@ -513,7 +515,7 @@ letta_client/types/stop_reason_type.py,sha256=BgrPBP-v9YBOpGmpusQvVQqCLPNOQFl57k
|
|
|
513
515
|
letta_client/types/streamable_http_server_config.py,sha256=GNbt2KO9VepHUtiMoK6htJLpFHcGoFJaoH1Az6zkt-Q,1776
|
|
514
516
|
letta_client/types/supervisor_manager.py,sha256=tq3WDIxiyJJe4zf1Pf7UkMqTXcMF7NETLgFr-DCTNIc,677
|
|
515
517
|
letta_client/types/supervisor_manager_update.py,sha256=srC_cYti3h_DdWRmcrLMgWnMS07Rpwo_U0tjCD19cb0,712
|
|
516
|
-
letta_client/types/system_message.py,sha256
|
|
518
|
+
letta_client/types/system_message.py,sha256=-jWKwRBrVleOGAE9n8VXCRk1sI6cFMw4KWB_3CsCeZw,1424
|
|
517
519
|
letta_client/types/tag_schema.py,sha256=TtqUHpKMxiMD-k-4vQgCOxiwzKRFBjEGhaq8cd0hhtQ,554
|
|
518
520
|
letta_client/types/terminal_tool_rule.py,sha256=WgJQdVd6sTGqcWBDL3Dow09CsjohhM96gO6m3bv6p6A,1032
|
|
519
521
|
letta_client/types/text_content.py,sha256=ivADxvqhZUBBr8XGv3ODB1Ll8GYdQ74c0wlIp31xQHk,671
|
|
@@ -523,14 +525,14 @@ letta_client/types/tool_annotations.py,sha256=gOUNd0gxzHvSz57G5c8gxK45VTxzk2yoB1
|
|
|
523
525
|
letta_client/types/tool_call.py,sha256=nIC6Tew85oekAwM7TWuD1nh-wMMSbwnHkDRIPh9znDk,595
|
|
524
526
|
letta_client/types/tool_call_content.py,sha256=ahSG2qf-hy3kM0L0OC5Yz9Xco_2B5fnPA6Wrb1FD9hw,996
|
|
525
527
|
letta_client/types/tool_call_delta.py,sha256=BDXIgt8R_kNy6A2j-O5e-6LUD1CoRVQ78xZpdfy9G_I,672
|
|
526
|
-
letta_client/types/tool_call_message.py,sha256=
|
|
528
|
+
letta_client/types/tool_call_message.py,sha256=9WzXDFsusL2lDt9LFmeXcBhCfHliiMc6gUAtglTKUQ8,1440
|
|
527
529
|
letta_client/types/tool_call_message_tool_call.py,sha256=d9RPFSB8CXdANdNi7EoHYvRYpK-ETkTUSI5EWm71UlQ,220
|
|
528
530
|
letta_client/types/tool_create.py,sha256=jR1HKTkwbHacfUW5S2OMlTLOxRT_5feR9q9s94316gw,1955
|
|
529
531
|
letta_client/types/tool_env_var_schema.py,sha256=0LXRU-zRlBvMq9OZBnRHeZZvre3U3u3OF0zlBfgEXTo,661
|
|
530
532
|
letta_client/types/tool_json_schema.py,sha256=HFQvRiMLY1wWtAnnxgWA0hLKntgfPr-JLUdHoeKarj0,755
|
|
531
533
|
letta_client/types/tool_return.py,sha256=P0hvUOPtIYs0y6yzDRPtY86JOZn-W75Hi4efUyc7CDI,985
|
|
532
534
|
letta_client/types/tool_return_content.py,sha256=Z18tVJa_FZoDHw2UL04XbTz3eEeUR-Qs0iydBFv3dog,957
|
|
533
|
-
letta_client/types/tool_return_message.py,sha256=
|
|
535
|
+
letta_client/types/tool_return_message.py,sha256=aOUlf7UJ2jVYxT7JL5pHj3ZN3WT_yxzzCcYaaVHaRpI,1935
|
|
534
536
|
letta_client/types/tool_return_message_status.py,sha256=FvFOMaG9mnmgnHi2UBQVQQMtHFabbWnQnHTxGUDgVl0,167
|
|
535
537
|
letta_client/types/tool_return_status.py,sha256=TQjwYprn5F_jU9kIbrtiyk7Gw2SjcmFFZLjFbGDpBM0,160
|
|
536
538
|
letta_client/types/tool_type.py,sha256=Lrced4b0gDW3IWOhyCPC_dZX6dRUReI8VsutrgRTCzM,459
|
|
@@ -549,7 +551,7 @@ letta_client/types/usage_statistics_completion_token_details.py,sha256=LSyXm35vq
|
|
|
549
551
|
letta_client/types/usage_statistics_prompt_token_details.py,sha256=Nevoj7tMsA6N8IkEykyk3RSDocbcXb3ILdqcaJAs3b0,612
|
|
550
552
|
letta_client/types/user.py,sha256=jWL92p1yE91PpAmr8GSYpC9RECBq8pfJNOelCgEQm74,1350
|
|
551
553
|
letta_client/types/user_create.py,sha256=f0Tpag3Hhopr4UC0G0tvhkD-2fCkG-Cojf-3IZ8MKmA,617
|
|
552
|
-
letta_client/types/user_message.py,sha256=
|
|
554
|
+
letta_client/types/user_message.py,sha256=48ZdZfYN55Tj3XFqLAV4H1NWxYo8jJOYup1rzjyr8kQ,1636
|
|
553
555
|
letta_client/types/user_message_content.py,sha256=2sDDlXGGOrwJEFDmU0CziH8SE4JG9z-wV24PEDYlC4s,238
|
|
554
556
|
letta_client/types/user_update.py,sha256=s3QyERsCqx8QlcimGuoBJWIKH5b7AZ1-35btlSQqxr8,732
|
|
555
557
|
letta_client/types/validation_error.py,sha256=jftGp9JtEt1bO2z9pFqhfq02Vu-0gGX9vz4AGM_pabg,681
|
|
@@ -564,6 +566,6 @@ letta_client/version.py,sha256=bttKLbIhO3UonCYQlqs600zzbQgfhCCMjeXR9WRzid4,79
|
|
|
564
566
|
letta_client/voice/__init__.py,sha256=_VhToAyIt_5axN6CLJwtxg3-CO7THa_23pbUzqhXJa4,85
|
|
565
567
|
letta_client/voice/client.py,sha256=EbIVOQh4HXqU9McATxwga08STk-HUwPEAUr_UHqyKHg,3748
|
|
566
568
|
letta_client/voice/raw_client.py,sha256=KvM_3GXuSf51bubM0RVBnxvlf20qZTFMnaA_BzhXzjQ,5938
|
|
567
|
-
letta_client-0.1.
|
|
568
|
-
letta_client-0.1.
|
|
569
|
-
letta_client-0.1.
|
|
569
|
+
letta_client-0.1.277.dist-info/METADATA,sha256=jYPmqfBlNJx64sEmeU4N7WhE20n-rwrAULTToFf_2_I,5781
|
|
570
|
+
letta_client-0.1.277.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88
|
|
571
|
+
letta_client-0.1.277.dist-info/RECORD,,
|
|
File without changes
|