letta-client 0.1.116__py3-none-any.whl → 0.1.118__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of letta-client might be problematic. Click here for more details.
- letta_client/__init__.py +4 -2
- letta_client/agents/client.py +0 -293
- letta_client/base_client.py +4 -0
- letta_client/core/client_wrapper.py +1 -1
- letta_client/messages/__init__.py +5 -0
- letta_client/messages/batches/__init__.py +2 -0
- letta_client/messages/batches/client.py +537 -0
- letta_client/messages/client.py +150 -0
- letta_client/types/__init__.py +2 -2
- letta_client/types/batch_job.py +76 -0
- letta_client/types/chat_completion_audio_param_format.py +1 -1
- letta_client/types/completion_create_params_non_streaming_model.py +10 -0
- letta_client/types/completion_create_params_non_streaming_service_tier.py +1 -1
- letta_client/types/completion_create_params_streaming_model.py +10 -0
- letta_client/types/completion_create_params_streaming_service_tier.py +1 -1
- letta_client/types/job.py +15 -0
- letta_client/types/job_type.py +1 -1
- letta_client/types/run.py +15 -0
- {letta_client-0.1.116.dist-info → letta_client-0.1.118.dist-info}/METADATA +1 -1
- {letta_client-0.1.116.dist-info → letta_client-0.1.118.dist-info}/RECORD +21 -17
- letta_client/types/letta_batch_response.py +0 -44
- {letta_client-0.1.116.dist-info → letta_client-0.1.118.dist-info}/WHEEL +0 -0
letta_client/__init__.py
CHANGED
|
@@ -21,6 +21,7 @@ from .types import (
|
|
|
21
21
|
AuthSchemeField,
|
|
22
22
|
BadRequestErrorBody,
|
|
23
23
|
BaseToolRuleSchema,
|
|
24
|
+
BatchJob,
|
|
24
25
|
Block,
|
|
25
26
|
BlockUpdate,
|
|
26
27
|
ChatCompletionAssistantMessageParam,
|
|
@@ -115,7 +116,6 @@ from .types import (
|
|
|
115
116
|
JobType,
|
|
116
117
|
JsonSchema,
|
|
117
118
|
LettaBatchRequest,
|
|
118
|
-
LettaBatchResponse,
|
|
119
119
|
LettaMessageContentUnion,
|
|
120
120
|
LettaMessageUnion,
|
|
121
121
|
LettaRequest,
|
|
@@ -230,6 +230,7 @@ from . import (
|
|
|
230
230
|
health,
|
|
231
231
|
identities,
|
|
232
232
|
jobs,
|
|
233
|
+
messages,
|
|
233
234
|
models,
|
|
234
235
|
projects,
|
|
235
236
|
providers,
|
|
@@ -310,6 +311,7 @@ __all__ = [
|
|
|
310
311
|
"BadRequestError",
|
|
311
312
|
"BadRequestErrorBody",
|
|
312
313
|
"BaseToolRuleSchema",
|
|
314
|
+
"BatchJob",
|
|
313
315
|
"Block",
|
|
314
316
|
"BlockUpdate",
|
|
315
317
|
"ChatCompletionAssistantMessageParam",
|
|
@@ -418,7 +420,6 @@ __all__ = [
|
|
|
418
420
|
"JsonSchema",
|
|
419
421
|
"Letta",
|
|
420
422
|
"LettaBatchRequest",
|
|
421
|
-
"LettaBatchResponse",
|
|
422
423
|
"LettaEnvironment",
|
|
423
424
|
"LettaMessageContentUnion",
|
|
424
425
|
"LettaMessageUnion",
|
|
@@ -541,6 +542,7 @@ __all__ = [
|
|
|
541
542
|
"health",
|
|
542
543
|
"identities",
|
|
543
544
|
"jobs",
|
|
545
|
+
"messages",
|
|
544
546
|
"models",
|
|
545
547
|
"projects",
|
|
546
548
|
"providers",
|
letta_client/agents/client.py
CHANGED
|
@@ -31,8 +31,6 @@ from .types.update_agent_tool_rules_item import UpdateAgentToolRulesItem
|
|
|
31
31
|
import datetime as dt
|
|
32
32
|
from ..types.passage import Passage
|
|
33
33
|
from ..types.group import Group
|
|
34
|
-
from ..types.letta_batch_request import LettaBatchRequest
|
|
35
|
-
from ..types.letta_batch_response import LettaBatchResponse
|
|
36
34
|
from .types.agents_search_request_search_item import AgentsSearchRequestSearchItem
|
|
37
35
|
from .types.agents_search_response import AgentsSearchResponse
|
|
38
36
|
from ..core.client_wrapper import AsyncClientWrapper
|
|
@@ -1160,141 +1158,6 @@ class AgentsClient:
|
|
|
1160
1158
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
1161
1159
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
1162
1160
|
|
|
1163
|
-
def create_batch_message_request(
|
|
1164
|
-
self, *, request: typing.Sequence[LettaBatchRequest], request_options: typing.Optional[RequestOptions] = None
|
|
1165
|
-
) -> LettaBatchResponse:
|
|
1166
|
-
"""
|
|
1167
|
-
Submit a batch of agent messages for asynchronous processing.
|
|
1168
|
-
Creates a job that will fan out messages to all listed agents and process them in parallel.
|
|
1169
|
-
|
|
1170
|
-
Parameters
|
|
1171
|
-
----------
|
|
1172
|
-
request : typing.Sequence[LettaBatchRequest]
|
|
1173
|
-
|
|
1174
|
-
request_options : typing.Optional[RequestOptions]
|
|
1175
|
-
Request-specific configuration.
|
|
1176
|
-
|
|
1177
|
-
Returns
|
|
1178
|
-
-------
|
|
1179
|
-
LettaBatchResponse
|
|
1180
|
-
Successful Response
|
|
1181
|
-
|
|
1182
|
-
Examples
|
|
1183
|
-
--------
|
|
1184
|
-
from letta_client import Letta, LettaBatchRequest, MessageCreate, TextContent
|
|
1185
|
-
|
|
1186
|
-
client = Letta(
|
|
1187
|
-
token="YOUR_TOKEN",
|
|
1188
|
-
)
|
|
1189
|
-
client.agents.create_batch_message_request(
|
|
1190
|
-
request=[
|
|
1191
|
-
LettaBatchRequest(
|
|
1192
|
-
messages=[
|
|
1193
|
-
MessageCreate(
|
|
1194
|
-
role="user",
|
|
1195
|
-
content=[
|
|
1196
|
-
TextContent(
|
|
1197
|
-
text="text",
|
|
1198
|
-
)
|
|
1199
|
-
],
|
|
1200
|
-
)
|
|
1201
|
-
],
|
|
1202
|
-
agent_id="agent_id",
|
|
1203
|
-
)
|
|
1204
|
-
],
|
|
1205
|
-
)
|
|
1206
|
-
"""
|
|
1207
|
-
_response = self._client_wrapper.httpx_client.request(
|
|
1208
|
-
"v1/agents/messages/batches",
|
|
1209
|
-
method="POST",
|
|
1210
|
-
json=convert_and_respect_annotation_metadata(
|
|
1211
|
-
object_=request, annotation=typing.Sequence[LettaBatchRequest], direction="write"
|
|
1212
|
-
),
|
|
1213
|
-
request_options=request_options,
|
|
1214
|
-
omit=OMIT,
|
|
1215
|
-
)
|
|
1216
|
-
try:
|
|
1217
|
-
if 200 <= _response.status_code < 300:
|
|
1218
|
-
return typing.cast(
|
|
1219
|
-
LettaBatchResponse,
|
|
1220
|
-
construct_type(
|
|
1221
|
-
type_=LettaBatchResponse, # type: ignore
|
|
1222
|
-
object_=_response.json(),
|
|
1223
|
-
),
|
|
1224
|
-
)
|
|
1225
|
-
if _response.status_code == 422:
|
|
1226
|
-
raise UnprocessableEntityError(
|
|
1227
|
-
typing.cast(
|
|
1228
|
-
HttpValidationError,
|
|
1229
|
-
construct_type(
|
|
1230
|
-
type_=HttpValidationError, # type: ignore
|
|
1231
|
-
object_=_response.json(),
|
|
1232
|
-
),
|
|
1233
|
-
)
|
|
1234
|
-
)
|
|
1235
|
-
_response_json = _response.json()
|
|
1236
|
-
except JSONDecodeError:
|
|
1237
|
-
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
1238
|
-
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
1239
|
-
|
|
1240
|
-
def retrieve_batch_message_request(
|
|
1241
|
-
self, batch_id: str, *, request_options: typing.Optional[RequestOptions] = None
|
|
1242
|
-
) -> LettaBatchResponse:
|
|
1243
|
-
"""
|
|
1244
|
-
Retrieve the result or current status of a previously submitted batch message request.
|
|
1245
|
-
|
|
1246
|
-
Parameters
|
|
1247
|
-
----------
|
|
1248
|
-
batch_id : str
|
|
1249
|
-
|
|
1250
|
-
request_options : typing.Optional[RequestOptions]
|
|
1251
|
-
Request-specific configuration.
|
|
1252
|
-
|
|
1253
|
-
Returns
|
|
1254
|
-
-------
|
|
1255
|
-
LettaBatchResponse
|
|
1256
|
-
Successful Response
|
|
1257
|
-
|
|
1258
|
-
Examples
|
|
1259
|
-
--------
|
|
1260
|
-
from letta_client import Letta
|
|
1261
|
-
|
|
1262
|
-
client = Letta(
|
|
1263
|
-
token="YOUR_TOKEN",
|
|
1264
|
-
)
|
|
1265
|
-
client.agents.retrieve_batch_message_request(
|
|
1266
|
-
batch_id="batch_id",
|
|
1267
|
-
)
|
|
1268
|
-
"""
|
|
1269
|
-
_response = self._client_wrapper.httpx_client.request(
|
|
1270
|
-
f"v1/agents/messages/batches/{jsonable_encoder(batch_id)}",
|
|
1271
|
-
method="GET",
|
|
1272
|
-
request_options=request_options,
|
|
1273
|
-
)
|
|
1274
|
-
try:
|
|
1275
|
-
if 200 <= _response.status_code < 300:
|
|
1276
|
-
return typing.cast(
|
|
1277
|
-
LettaBatchResponse,
|
|
1278
|
-
construct_type(
|
|
1279
|
-
type_=LettaBatchResponse, # type: ignore
|
|
1280
|
-
object_=_response.json(),
|
|
1281
|
-
),
|
|
1282
|
-
)
|
|
1283
|
-
if _response.status_code == 422:
|
|
1284
|
-
raise UnprocessableEntityError(
|
|
1285
|
-
typing.cast(
|
|
1286
|
-
HttpValidationError,
|
|
1287
|
-
construct_type(
|
|
1288
|
-
type_=HttpValidationError, # type: ignore
|
|
1289
|
-
object_=_response.json(),
|
|
1290
|
-
),
|
|
1291
|
-
)
|
|
1292
|
-
)
|
|
1293
|
-
_response_json = _response.json()
|
|
1294
|
-
except JSONDecodeError:
|
|
1295
|
-
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
1296
|
-
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
1297
|
-
|
|
1298
1161
|
def search(
|
|
1299
1162
|
self,
|
|
1300
1163
|
*,
|
|
@@ -2564,162 +2427,6 @@ class AsyncAgentsClient:
|
|
|
2564
2427
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
2565
2428
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
2566
2429
|
|
|
2567
|
-
async def create_batch_message_request(
|
|
2568
|
-
self, *, request: typing.Sequence[LettaBatchRequest], request_options: typing.Optional[RequestOptions] = None
|
|
2569
|
-
) -> LettaBatchResponse:
|
|
2570
|
-
"""
|
|
2571
|
-
Submit a batch of agent messages for asynchronous processing.
|
|
2572
|
-
Creates a job that will fan out messages to all listed agents and process them in parallel.
|
|
2573
|
-
|
|
2574
|
-
Parameters
|
|
2575
|
-
----------
|
|
2576
|
-
request : typing.Sequence[LettaBatchRequest]
|
|
2577
|
-
|
|
2578
|
-
request_options : typing.Optional[RequestOptions]
|
|
2579
|
-
Request-specific configuration.
|
|
2580
|
-
|
|
2581
|
-
Returns
|
|
2582
|
-
-------
|
|
2583
|
-
LettaBatchResponse
|
|
2584
|
-
Successful Response
|
|
2585
|
-
|
|
2586
|
-
Examples
|
|
2587
|
-
--------
|
|
2588
|
-
import asyncio
|
|
2589
|
-
|
|
2590
|
-
from letta_client import (
|
|
2591
|
-
AsyncLetta,
|
|
2592
|
-
LettaBatchRequest,
|
|
2593
|
-
MessageCreate,
|
|
2594
|
-
TextContent,
|
|
2595
|
-
)
|
|
2596
|
-
|
|
2597
|
-
client = AsyncLetta(
|
|
2598
|
-
token="YOUR_TOKEN",
|
|
2599
|
-
)
|
|
2600
|
-
|
|
2601
|
-
|
|
2602
|
-
async def main() -> None:
|
|
2603
|
-
await client.agents.create_batch_message_request(
|
|
2604
|
-
request=[
|
|
2605
|
-
LettaBatchRequest(
|
|
2606
|
-
messages=[
|
|
2607
|
-
MessageCreate(
|
|
2608
|
-
role="user",
|
|
2609
|
-
content=[
|
|
2610
|
-
TextContent(
|
|
2611
|
-
text="text",
|
|
2612
|
-
)
|
|
2613
|
-
],
|
|
2614
|
-
)
|
|
2615
|
-
],
|
|
2616
|
-
agent_id="agent_id",
|
|
2617
|
-
)
|
|
2618
|
-
],
|
|
2619
|
-
)
|
|
2620
|
-
|
|
2621
|
-
|
|
2622
|
-
asyncio.run(main())
|
|
2623
|
-
"""
|
|
2624
|
-
_response = await self._client_wrapper.httpx_client.request(
|
|
2625
|
-
"v1/agents/messages/batches",
|
|
2626
|
-
method="POST",
|
|
2627
|
-
json=convert_and_respect_annotation_metadata(
|
|
2628
|
-
object_=request, annotation=typing.Sequence[LettaBatchRequest], direction="write"
|
|
2629
|
-
),
|
|
2630
|
-
request_options=request_options,
|
|
2631
|
-
omit=OMIT,
|
|
2632
|
-
)
|
|
2633
|
-
try:
|
|
2634
|
-
if 200 <= _response.status_code < 300:
|
|
2635
|
-
return typing.cast(
|
|
2636
|
-
LettaBatchResponse,
|
|
2637
|
-
construct_type(
|
|
2638
|
-
type_=LettaBatchResponse, # type: ignore
|
|
2639
|
-
object_=_response.json(),
|
|
2640
|
-
),
|
|
2641
|
-
)
|
|
2642
|
-
if _response.status_code == 422:
|
|
2643
|
-
raise UnprocessableEntityError(
|
|
2644
|
-
typing.cast(
|
|
2645
|
-
HttpValidationError,
|
|
2646
|
-
construct_type(
|
|
2647
|
-
type_=HttpValidationError, # type: ignore
|
|
2648
|
-
object_=_response.json(),
|
|
2649
|
-
),
|
|
2650
|
-
)
|
|
2651
|
-
)
|
|
2652
|
-
_response_json = _response.json()
|
|
2653
|
-
except JSONDecodeError:
|
|
2654
|
-
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
2655
|
-
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
2656
|
-
|
|
2657
|
-
async def retrieve_batch_message_request(
|
|
2658
|
-
self, batch_id: str, *, request_options: typing.Optional[RequestOptions] = None
|
|
2659
|
-
) -> LettaBatchResponse:
|
|
2660
|
-
"""
|
|
2661
|
-
Retrieve the result or current status of a previously submitted batch message request.
|
|
2662
|
-
|
|
2663
|
-
Parameters
|
|
2664
|
-
----------
|
|
2665
|
-
batch_id : str
|
|
2666
|
-
|
|
2667
|
-
request_options : typing.Optional[RequestOptions]
|
|
2668
|
-
Request-specific configuration.
|
|
2669
|
-
|
|
2670
|
-
Returns
|
|
2671
|
-
-------
|
|
2672
|
-
LettaBatchResponse
|
|
2673
|
-
Successful Response
|
|
2674
|
-
|
|
2675
|
-
Examples
|
|
2676
|
-
--------
|
|
2677
|
-
import asyncio
|
|
2678
|
-
|
|
2679
|
-
from letta_client import AsyncLetta
|
|
2680
|
-
|
|
2681
|
-
client = AsyncLetta(
|
|
2682
|
-
token="YOUR_TOKEN",
|
|
2683
|
-
)
|
|
2684
|
-
|
|
2685
|
-
|
|
2686
|
-
async def main() -> None:
|
|
2687
|
-
await client.agents.retrieve_batch_message_request(
|
|
2688
|
-
batch_id="batch_id",
|
|
2689
|
-
)
|
|
2690
|
-
|
|
2691
|
-
|
|
2692
|
-
asyncio.run(main())
|
|
2693
|
-
"""
|
|
2694
|
-
_response = await self._client_wrapper.httpx_client.request(
|
|
2695
|
-
f"v1/agents/messages/batches/{jsonable_encoder(batch_id)}",
|
|
2696
|
-
method="GET",
|
|
2697
|
-
request_options=request_options,
|
|
2698
|
-
)
|
|
2699
|
-
try:
|
|
2700
|
-
if 200 <= _response.status_code < 300:
|
|
2701
|
-
return typing.cast(
|
|
2702
|
-
LettaBatchResponse,
|
|
2703
|
-
construct_type(
|
|
2704
|
-
type_=LettaBatchResponse, # type: ignore
|
|
2705
|
-
object_=_response.json(),
|
|
2706
|
-
),
|
|
2707
|
-
)
|
|
2708
|
-
if _response.status_code == 422:
|
|
2709
|
-
raise UnprocessableEntityError(
|
|
2710
|
-
typing.cast(
|
|
2711
|
-
HttpValidationError,
|
|
2712
|
-
construct_type(
|
|
2713
|
-
type_=HttpValidationError, # type: ignore
|
|
2714
|
-
object_=_response.json(),
|
|
2715
|
-
),
|
|
2716
|
-
)
|
|
2717
|
-
)
|
|
2718
|
-
_response_json = _response.json()
|
|
2719
|
-
except JSONDecodeError:
|
|
2720
|
-
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
2721
|
-
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
2722
|
-
|
|
2723
2430
|
async def search(
|
|
2724
2431
|
self,
|
|
2725
2432
|
*,
|
letta_client/base_client.py
CHANGED
|
@@ -17,6 +17,7 @@ from .providers.client import ProvidersClient
|
|
|
17
17
|
from .runs.client import RunsClient
|
|
18
18
|
from .steps.client import StepsClient
|
|
19
19
|
from .tag.client import TagClient
|
|
20
|
+
from .messages.client import MessagesClient
|
|
20
21
|
from .voice.client import VoiceClient
|
|
21
22
|
from .templates.client import TemplatesClient
|
|
22
23
|
from .client_side_access_tokens.client import ClientSideAccessTokensClient
|
|
@@ -35,6 +36,7 @@ from .providers.client import AsyncProvidersClient
|
|
|
35
36
|
from .runs.client import AsyncRunsClient
|
|
36
37
|
from .steps.client import AsyncStepsClient
|
|
37
38
|
from .tag.client import AsyncTagClient
|
|
39
|
+
from .messages.client import AsyncMessagesClient
|
|
38
40
|
from .voice.client import AsyncVoiceClient
|
|
39
41
|
from .templates.client import AsyncTemplatesClient
|
|
40
42
|
from .client_side_access_tokens.client import AsyncClientSideAccessTokensClient
|
|
@@ -112,6 +114,7 @@ class LettaBase:
|
|
|
112
114
|
self.runs = RunsClient(client_wrapper=self._client_wrapper)
|
|
113
115
|
self.steps = StepsClient(client_wrapper=self._client_wrapper)
|
|
114
116
|
self.tag = TagClient(client_wrapper=self._client_wrapper)
|
|
117
|
+
self.messages = MessagesClient(client_wrapper=self._client_wrapper)
|
|
115
118
|
self.voice = VoiceClient(client_wrapper=self._client_wrapper)
|
|
116
119
|
self.templates = TemplatesClient(client_wrapper=self._client_wrapper)
|
|
117
120
|
self.client_side_access_tokens = ClientSideAccessTokensClient(client_wrapper=self._client_wrapper)
|
|
@@ -189,6 +192,7 @@ class AsyncLettaBase:
|
|
|
189
192
|
self.runs = AsyncRunsClient(client_wrapper=self._client_wrapper)
|
|
190
193
|
self.steps = AsyncStepsClient(client_wrapper=self._client_wrapper)
|
|
191
194
|
self.tag = AsyncTagClient(client_wrapper=self._client_wrapper)
|
|
195
|
+
self.messages = AsyncMessagesClient(client_wrapper=self._client_wrapper)
|
|
192
196
|
self.voice = AsyncVoiceClient(client_wrapper=self._client_wrapper)
|
|
193
197
|
self.templates = AsyncTemplatesClient(client_wrapper=self._client_wrapper)
|
|
194
198
|
self.client_side_access_tokens = AsyncClientSideAccessTokensClient(client_wrapper=self._client_wrapper)
|
|
@@ -16,7 +16,7 @@ class BaseClientWrapper:
|
|
|
16
16
|
headers: typing.Dict[str, str] = {
|
|
17
17
|
"X-Fern-Language": "Python",
|
|
18
18
|
"X-Fern-SDK-Name": "letta-client",
|
|
19
|
-
"X-Fern-SDK-Version": "0.1.
|
|
19
|
+
"X-Fern-SDK-Version": "0.1.118",
|
|
20
20
|
}
|
|
21
21
|
if self.token is not None:
|
|
22
22
|
headers["Authorization"] = f"Bearer {self.token}"
|