letta-client 0.1.132__py3-none-any.whl → 0.1.134__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of letta-client might be problematic. Click here for more details.
- letta_client/__init__.py +2 -0
- letta_client/core/client_wrapper.py +1 -1
- letta_client/models/client.py +38 -2
- letta_client/providers/client.py +41 -2
- letta_client/steps/client.py +6 -80
- letta_client/types/__init__.py +2 -0
- letta_client/types/group.py +10 -0
- letta_client/types/llm_config.py +5 -0
- letta_client/types/provider.py +11 -0
- letta_client/types/provider_type.py +24 -0
- letta_client/types/voice_sleeptime_manager.py +10 -0
- letta_client/types/voice_sleeptime_manager_update.py +10 -0
- {letta_client-0.1.132.dist-info → letta_client-0.1.134.dist-info}/METADATA +1 -1
- {letta_client-0.1.132.dist-info → letta_client-0.1.134.dist-info}/RECORD +15 -14
- {letta_client-0.1.132.dist-info → letta_client-0.1.134.dist-info}/WHEEL +0 -0
letta_client/__init__.py
CHANGED
|
@@ -158,6 +158,7 @@ from .types import (
|
|
|
158
158
|
Passage,
|
|
159
159
|
PipRequirement,
|
|
160
160
|
Provider,
|
|
161
|
+
ProviderType,
|
|
161
162
|
ReasoningContent,
|
|
162
163
|
ReasoningMessage,
|
|
163
164
|
ReasoningMessageSource,
|
|
@@ -476,6 +477,7 @@ __all__ = [
|
|
|
476
477
|
"ProjectsListResponse",
|
|
477
478
|
"ProjectsListResponseProjectsItem",
|
|
478
479
|
"Provider",
|
|
480
|
+
"ProviderType",
|
|
479
481
|
"ReasoningContent",
|
|
480
482
|
"ReasoningMessage",
|
|
481
483
|
"ReasoningMessageSource",
|
|
@@ -16,7 +16,7 @@ class BaseClientWrapper:
|
|
|
16
16
|
headers: typing.Dict[str, str] = {
|
|
17
17
|
"X-Fern-Language": "Python",
|
|
18
18
|
"X-Fern-SDK-Name": "letta-client",
|
|
19
|
-
"X-Fern-SDK-Version": "0.1.
|
|
19
|
+
"X-Fern-SDK-Version": "0.1.134",
|
|
20
20
|
}
|
|
21
21
|
if self.token is not None:
|
|
22
22
|
headers["Authorization"] = f"Bearer {self.token}"
|
letta_client/models/client.py
CHANGED
|
@@ -5,6 +5,8 @@ import typing
|
|
|
5
5
|
from ..core.request_options import RequestOptions
|
|
6
6
|
from ..types.llm_config import LlmConfig
|
|
7
7
|
from ..core.unchecked_base_model import construct_type
|
|
8
|
+
from ..errors.unprocessable_entity_error import UnprocessableEntityError
|
|
9
|
+
from ..types.http_validation_error import HttpValidationError
|
|
8
10
|
from json.decoder import JSONDecodeError
|
|
9
11
|
from ..core.api_error import ApiError
|
|
10
12
|
from ..core.client_wrapper import AsyncClientWrapper
|
|
@@ -14,10 +16,14 @@ class ModelsClient:
|
|
|
14
16
|
def __init__(self, *, client_wrapper: SyncClientWrapper):
|
|
15
17
|
self._client_wrapper = client_wrapper
|
|
16
18
|
|
|
17
|
-
def list(
|
|
19
|
+
def list(
|
|
20
|
+
self, *, byok_only: typing.Optional[bool] = None, request_options: typing.Optional[RequestOptions] = None
|
|
21
|
+
) -> typing.List[LlmConfig]:
|
|
18
22
|
"""
|
|
19
23
|
Parameters
|
|
20
24
|
----------
|
|
25
|
+
byok_only : typing.Optional[bool]
|
|
26
|
+
|
|
21
27
|
request_options : typing.Optional[RequestOptions]
|
|
22
28
|
Request-specific configuration.
|
|
23
29
|
|
|
@@ -38,6 +44,9 @@ class ModelsClient:
|
|
|
38
44
|
_response = self._client_wrapper.httpx_client.request(
|
|
39
45
|
"v1/models/",
|
|
40
46
|
method="GET",
|
|
47
|
+
params={
|
|
48
|
+
"byok_only": byok_only,
|
|
49
|
+
},
|
|
41
50
|
request_options=request_options,
|
|
42
51
|
)
|
|
43
52
|
try:
|
|
@@ -49,6 +58,16 @@ class ModelsClient:
|
|
|
49
58
|
object_=_response.json(),
|
|
50
59
|
),
|
|
51
60
|
)
|
|
61
|
+
if _response.status_code == 422:
|
|
62
|
+
raise UnprocessableEntityError(
|
|
63
|
+
typing.cast(
|
|
64
|
+
HttpValidationError,
|
|
65
|
+
construct_type(
|
|
66
|
+
type_=HttpValidationError, # type: ignore
|
|
67
|
+
object_=_response.json(),
|
|
68
|
+
),
|
|
69
|
+
)
|
|
70
|
+
)
|
|
52
71
|
_response_json = _response.json()
|
|
53
72
|
except JSONDecodeError:
|
|
54
73
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
@@ -59,10 +78,14 @@ class AsyncModelsClient:
|
|
|
59
78
|
def __init__(self, *, client_wrapper: AsyncClientWrapper):
|
|
60
79
|
self._client_wrapper = client_wrapper
|
|
61
80
|
|
|
62
|
-
async def list(
|
|
81
|
+
async def list(
|
|
82
|
+
self, *, byok_only: typing.Optional[bool] = None, request_options: typing.Optional[RequestOptions] = None
|
|
83
|
+
) -> typing.List[LlmConfig]:
|
|
63
84
|
"""
|
|
64
85
|
Parameters
|
|
65
86
|
----------
|
|
87
|
+
byok_only : typing.Optional[bool]
|
|
88
|
+
|
|
66
89
|
request_options : typing.Optional[RequestOptions]
|
|
67
90
|
Request-specific configuration.
|
|
68
91
|
|
|
@@ -91,6 +114,9 @@ class AsyncModelsClient:
|
|
|
91
114
|
_response = await self._client_wrapper.httpx_client.request(
|
|
92
115
|
"v1/models/",
|
|
93
116
|
method="GET",
|
|
117
|
+
params={
|
|
118
|
+
"byok_only": byok_only,
|
|
119
|
+
},
|
|
94
120
|
request_options=request_options,
|
|
95
121
|
)
|
|
96
122
|
try:
|
|
@@ -102,6 +128,16 @@ class AsyncModelsClient:
|
|
|
102
128
|
object_=_response.json(),
|
|
103
129
|
),
|
|
104
130
|
)
|
|
131
|
+
if _response.status_code == 422:
|
|
132
|
+
raise UnprocessableEntityError(
|
|
133
|
+
typing.cast(
|
|
134
|
+
HttpValidationError,
|
|
135
|
+
construct_type(
|
|
136
|
+
type_=HttpValidationError, # type: ignore
|
|
137
|
+
object_=_response.json(),
|
|
138
|
+
),
|
|
139
|
+
)
|
|
140
|
+
)
|
|
105
141
|
_response_json = _response.json()
|
|
106
142
|
except JSONDecodeError:
|
|
107
143
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
letta_client/providers/client.py
CHANGED
|
@@ -2,6 +2,7 @@
|
|
|
2
2
|
|
|
3
3
|
import typing
|
|
4
4
|
from ..core.client_wrapper import SyncClientWrapper
|
|
5
|
+
from ..types.provider_type import ProviderType
|
|
5
6
|
from ..core.request_options import RequestOptions
|
|
6
7
|
from ..types.provider import Provider
|
|
7
8
|
from ..core.unchecked_base_model import construct_type
|
|
@@ -23,6 +24,8 @@ class ProvidersClient:
|
|
|
23
24
|
def list(
|
|
24
25
|
self,
|
|
25
26
|
*,
|
|
27
|
+
name: typing.Optional[str] = None,
|
|
28
|
+
provider_type: typing.Optional[ProviderType] = None,
|
|
26
29
|
after: typing.Optional[str] = None,
|
|
27
30
|
limit: typing.Optional[int] = None,
|
|
28
31
|
request_options: typing.Optional[RequestOptions] = None,
|
|
@@ -32,6 +35,10 @@ class ProvidersClient:
|
|
|
32
35
|
|
|
33
36
|
Parameters
|
|
34
37
|
----------
|
|
38
|
+
name : typing.Optional[str]
|
|
39
|
+
|
|
40
|
+
provider_type : typing.Optional[ProviderType]
|
|
41
|
+
|
|
35
42
|
after : typing.Optional[str]
|
|
36
43
|
|
|
37
44
|
limit : typing.Optional[int]
|
|
@@ -57,6 +64,8 @@ class ProvidersClient:
|
|
|
57
64
|
"v1/providers/",
|
|
58
65
|
method="GET",
|
|
59
66
|
params={
|
|
67
|
+
"name": name,
|
|
68
|
+
"provider_type": provider_type,
|
|
60
69
|
"after": after,
|
|
61
70
|
"limit": limit,
|
|
62
71
|
},
|
|
@@ -86,7 +95,14 @@ class ProvidersClient:
|
|
|
86
95
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
87
96
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
88
97
|
|
|
89
|
-
def create(
|
|
98
|
+
def create(
|
|
99
|
+
self,
|
|
100
|
+
*,
|
|
101
|
+
name: str,
|
|
102
|
+
provider_type: ProviderType,
|
|
103
|
+
api_key: str,
|
|
104
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
105
|
+
) -> Provider:
|
|
90
106
|
"""
|
|
91
107
|
Create a new custom provider
|
|
92
108
|
|
|
@@ -95,6 +111,9 @@ class ProvidersClient:
|
|
|
95
111
|
name : str
|
|
96
112
|
The name of the provider.
|
|
97
113
|
|
|
114
|
+
provider_type : ProviderType
|
|
115
|
+
The type of the provider.
|
|
116
|
+
|
|
98
117
|
api_key : str
|
|
99
118
|
API key used for requests to the provider.
|
|
100
119
|
|
|
@@ -115,6 +134,7 @@ class ProvidersClient:
|
|
|
115
134
|
)
|
|
116
135
|
client.providers.create(
|
|
117
136
|
name="name",
|
|
137
|
+
provider_type="anthropic",
|
|
118
138
|
api_key="api_key",
|
|
119
139
|
)
|
|
120
140
|
"""
|
|
@@ -123,6 +143,7 @@ class ProvidersClient:
|
|
|
123
143
|
method="POST",
|
|
124
144
|
json={
|
|
125
145
|
"name": name,
|
|
146
|
+
"provider_type": provider_type,
|
|
126
147
|
"api_key": api_key,
|
|
127
148
|
},
|
|
128
149
|
headers={
|
|
@@ -290,6 +311,8 @@ class AsyncProvidersClient:
|
|
|
290
311
|
async def list(
|
|
291
312
|
self,
|
|
292
313
|
*,
|
|
314
|
+
name: typing.Optional[str] = None,
|
|
315
|
+
provider_type: typing.Optional[ProviderType] = None,
|
|
293
316
|
after: typing.Optional[str] = None,
|
|
294
317
|
limit: typing.Optional[int] = None,
|
|
295
318
|
request_options: typing.Optional[RequestOptions] = None,
|
|
@@ -299,6 +322,10 @@ class AsyncProvidersClient:
|
|
|
299
322
|
|
|
300
323
|
Parameters
|
|
301
324
|
----------
|
|
325
|
+
name : typing.Optional[str]
|
|
326
|
+
|
|
327
|
+
provider_type : typing.Optional[ProviderType]
|
|
328
|
+
|
|
302
329
|
after : typing.Optional[str]
|
|
303
330
|
|
|
304
331
|
limit : typing.Optional[int]
|
|
@@ -332,6 +359,8 @@ class AsyncProvidersClient:
|
|
|
332
359
|
"v1/providers/",
|
|
333
360
|
method="GET",
|
|
334
361
|
params={
|
|
362
|
+
"name": name,
|
|
363
|
+
"provider_type": provider_type,
|
|
335
364
|
"after": after,
|
|
336
365
|
"limit": limit,
|
|
337
366
|
},
|
|
@@ -362,7 +391,12 @@ class AsyncProvidersClient:
|
|
|
362
391
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
363
392
|
|
|
364
393
|
async def create(
|
|
365
|
-
self,
|
|
394
|
+
self,
|
|
395
|
+
*,
|
|
396
|
+
name: str,
|
|
397
|
+
provider_type: ProviderType,
|
|
398
|
+
api_key: str,
|
|
399
|
+
request_options: typing.Optional[RequestOptions] = None,
|
|
366
400
|
) -> Provider:
|
|
367
401
|
"""
|
|
368
402
|
Create a new custom provider
|
|
@@ -372,6 +406,9 @@ class AsyncProvidersClient:
|
|
|
372
406
|
name : str
|
|
373
407
|
The name of the provider.
|
|
374
408
|
|
|
409
|
+
provider_type : ProviderType
|
|
410
|
+
The type of the provider.
|
|
411
|
+
|
|
375
412
|
api_key : str
|
|
376
413
|
API key used for requests to the provider.
|
|
377
414
|
|
|
@@ -397,6 +434,7 @@ class AsyncProvidersClient:
|
|
|
397
434
|
async def main() -> None:
|
|
398
435
|
await client.providers.create(
|
|
399
436
|
name="name",
|
|
437
|
+
provider_type="anthropic",
|
|
400
438
|
api_key="api_key",
|
|
401
439
|
)
|
|
402
440
|
|
|
@@ -408,6 +446,7 @@ class AsyncProvidersClient:
|
|
|
408
446
|
method="POST",
|
|
409
447
|
json={
|
|
410
448
|
"name": name,
|
|
449
|
+
"provider_type": provider_type,
|
|
411
450
|
"api_key": api_key,
|
|
412
451
|
},
|
|
413
452
|
headers={
|
letta_client/steps/client.py
CHANGED
|
@@ -17,7 +17,7 @@ class StepsClient:
|
|
|
17
17
|
def __init__(self, *, client_wrapper: SyncClientWrapper):
|
|
18
18
|
self._client_wrapper = client_wrapper
|
|
19
19
|
|
|
20
|
-
def
|
|
20
|
+
def list(
|
|
21
21
|
self,
|
|
22
22
|
*,
|
|
23
23
|
before: typing.Optional[str] = None,
|
|
@@ -75,10 +75,10 @@ class StepsClient:
|
|
|
75
75
|
client = Letta(
|
|
76
76
|
token="YOUR_TOKEN",
|
|
77
77
|
)
|
|
78
|
-
client.steps.
|
|
78
|
+
client.steps.list()
|
|
79
79
|
"""
|
|
80
80
|
_response = self._client_wrapper.httpx_client.request(
|
|
81
|
-
"v1/steps",
|
|
81
|
+
"v1/steps/",
|
|
82
82
|
method="GET",
|
|
83
83
|
params={
|
|
84
84
|
"before": before,
|
|
@@ -172,45 +172,12 @@ class StepsClient:
|
|
|
172
172
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
173
173
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
174
174
|
|
|
175
|
-
def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> None:
|
|
176
|
-
"""
|
|
177
|
-
Parameters
|
|
178
|
-
----------
|
|
179
|
-
request_options : typing.Optional[RequestOptions]
|
|
180
|
-
Request-specific configuration.
|
|
181
|
-
|
|
182
|
-
Returns
|
|
183
|
-
-------
|
|
184
|
-
None
|
|
185
|
-
|
|
186
|
-
Examples
|
|
187
|
-
--------
|
|
188
|
-
from letta_client import Letta
|
|
189
|
-
|
|
190
|
-
client = Letta(
|
|
191
|
-
token="YOUR_TOKEN",
|
|
192
|
-
)
|
|
193
|
-
client.steps.list()
|
|
194
|
-
"""
|
|
195
|
-
_response = self._client_wrapper.httpx_client.request(
|
|
196
|
-
"v1/steps/",
|
|
197
|
-
method="GET",
|
|
198
|
-
request_options=request_options,
|
|
199
|
-
)
|
|
200
|
-
try:
|
|
201
|
-
if 200 <= _response.status_code < 300:
|
|
202
|
-
return
|
|
203
|
-
_response_json = _response.json()
|
|
204
|
-
except JSONDecodeError:
|
|
205
|
-
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
206
|
-
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
207
|
-
|
|
208
175
|
|
|
209
176
|
class AsyncStepsClient:
|
|
210
177
|
def __init__(self, *, client_wrapper: AsyncClientWrapper):
|
|
211
178
|
self._client_wrapper = client_wrapper
|
|
212
179
|
|
|
213
|
-
async def
|
|
180
|
+
async def list(
|
|
214
181
|
self,
|
|
215
182
|
*,
|
|
216
183
|
before: typing.Optional[str] = None,
|
|
@@ -273,13 +240,13 @@ class AsyncStepsClient:
|
|
|
273
240
|
|
|
274
241
|
|
|
275
242
|
async def main() -> None:
|
|
276
|
-
await client.steps.
|
|
243
|
+
await client.steps.list()
|
|
277
244
|
|
|
278
245
|
|
|
279
246
|
asyncio.run(main())
|
|
280
247
|
"""
|
|
281
248
|
_response = await self._client_wrapper.httpx_client.request(
|
|
282
|
-
"v1/steps",
|
|
249
|
+
"v1/steps/",
|
|
283
250
|
method="GET",
|
|
284
251
|
params={
|
|
285
252
|
"before": before,
|
|
@@ -380,44 +347,3 @@ class AsyncStepsClient:
|
|
|
380
347
|
except JSONDecodeError:
|
|
381
348
|
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
382
349
|
raise ApiError(status_code=_response.status_code, body=_response_json)
|
|
383
|
-
|
|
384
|
-
async def list(self, *, request_options: typing.Optional[RequestOptions] = None) -> None:
|
|
385
|
-
"""
|
|
386
|
-
Parameters
|
|
387
|
-
----------
|
|
388
|
-
request_options : typing.Optional[RequestOptions]
|
|
389
|
-
Request-specific configuration.
|
|
390
|
-
|
|
391
|
-
Returns
|
|
392
|
-
-------
|
|
393
|
-
None
|
|
394
|
-
|
|
395
|
-
Examples
|
|
396
|
-
--------
|
|
397
|
-
import asyncio
|
|
398
|
-
|
|
399
|
-
from letta_client import AsyncLetta
|
|
400
|
-
|
|
401
|
-
client = AsyncLetta(
|
|
402
|
-
token="YOUR_TOKEN",
|
|
403
|
-
)
|
|
404
|
-
|
|
405
|
-
|
|
406
|
-
async def main() -> None:
|
|
407
|
-
await client.steps.list()
|
|
408
|
-
|
|
409
|
-
|
|
410
|
-
asyncio.run(main())
|
|
411
|
-
"""
|
|
412
|
-
_response = await self._client_wrapper.httpx_client.request(
|
|
413
|
-
"v1/steps/",
|
|
414
|
-
method="GET",
|
|
415
|
-
request_options=request_options,
|
|
416
|
-
)
|
|
417
|
-
try:
|
|
418
|
-
if 200 <= _response.status_code < 300:
|
|
419
|
-
return
|
|
420
|
-
_response_json = _response.json()
|
|
421
|
-
except JSONDecodeError:
|
|
422
|
-
raise ApiError(status_code=_response.status_code, body=_response.text)
|
|
423
|
-
raise ApiError(status_code=_response.status_code, body=_response_json)
|
letta_client/types/__init__.py
CHANGED
|
@@ -161,6 +161,7 @@ from .parent_tool_rule import ParentToolRule
|
|
|
161
161
|
from .passage import Passage
|
|
162
162
|
from .pip_requirement import PipRequirement
|
|
163
163
|
from .provider import Provider
|
|
164
|
+
from .provider_type import ProviderType
|
|
164
165
|
from .reasoning_content import ReasoningContent
|
|
165
166
|
from .reasoning_message import ReasoningMessage
|
|
166
167
|
from .reasoning_message_source import ReasoningMessageSource
|
|
@@ -390,6 +391,7 @@ __all__ = [
|
|
|
390
391
|
"Passage",
|
|
391
392
|
"PipRequirement",
|
|
392
393
|
"Provider",
|
|
394
|
+
"ProviderType",
|
|
393
395
|
"ReasoningContent",
|
|
394
396
|
"ReasoningMessage",
|
|
395
397
|
"ReasoningMessageSource",
|
letta_client/types/group.py
CHANGED
|
@@ -63,6 +63,16 @@ class Group(UncheckedBaseModel):
|
|
|
63
63
|
|
|
64
64
|
"""
|
|
65
65
|
|
|
66
|
+
max_message_buffer_length: typing.Optional[int] = pydantic.Field(default=None)
|
|
67
|
+
"""
|
|
68
|
+
The desired maximum length of messages in the context window of the convo agent. This is a best effort, and may be off slightly due to user/assistant interleaving.
|
|
69
|
+
"""
|
|
70
|
+
|
|
71
|
+
min_message_buffer_length: typing.Optional[int] = pydantic.Field(default=None)
|
|
72
|
+
"""
|
|
73
|
+
The desired minimum length of messages in the context window of the convo agent. This is a best effort, and may be off-by-one due to user/assistant interleaving.
|
|
74
|
+
"""
|
|
75
|
+
|
|
66
76
|
if IS_PYDANTIC_V2:
|
|
67
77
|
model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
|
|
68
78
|
else:
|
letta_client/types/llm_config.py
CHANGED
|
@@ -38,6 +38,11 @@ class LlmConfig(UncheckedBaseModel):
|
|
|
38
38
|
The endpoint for the model.
|
|
39
39
|
"""
|
|
40
40
|
|
|
41
|
+
provider_name: typing.Optional[str] = pydantic.Field(default=None)
|
|
42
|
+
"""
|
|
43
|
+
The provider name for the model.
|
|
44
|
+
"""
|
|
45
|
+
|
|
41
46
|
model_wrapper: typing.Optional[str] = pydantic.Field(default=None)
|
|
42
47
|
"""
|
|
43
48
|
The wrapper for the model.
|
letta_client/types/provider.py
CHANGED
|
@@ -3,6 +3,7 @@
|
|
|
3
3
|
from ..core.unchecked_base_model import UncheckedBaseModel
|
|
4
4
|
import typing
|
|
5
5
|
import pydantic
|
|
6
|
+
from .provider_type import ProviderType
|
|
6
7
|
import datetime as dt
|
|
7
8
|
from ..core.pydantic_utilities import IS_PYDANTIC_V2
|
|
8
9
|
|
|
@@ -18,11 +19,21 @@ class Provider(UncheckedBaseModel):
|
|
|
18
19
|
The name of the provider
|
|
19
20
|
"""
|
|
20
21
|
|
|
22
|
+
provider_type: ProviderType = pydantic.Field()
|
|
23
|
+
"""
|
|
24
|
+
The type of the provider
|
|
25
|
+
"""
|
|
26
|
+
|
|
21
27
|
api_key: typing.Optional[str] = pydantic.Field(default=None)
|
|
22
28
|
"""
|
|
23
29
|
API key used for requests to the provider.
|
|
24
30
|
"""
|
|
25
31
|
|
|
32
|
+
base_url: typing.Optional[str] = pydantic.Field(default=None)
|
|
33
|
+
"""
|
|
34
|
+
Base URL for the provider.
|
|
35
|
+
"""
|
|
36
|
+
|
|
26
37
|
updated_at: typing.Optional[dt.datetime] = pydantic.Field(default=None)
|
|
27
38
|
"""
|
|
28
39
|
The last update timestamp of the provider.
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
|
2
|
+
|
|
3
|
+
import typing
|
|
4
|
+
|
|
5
|
+
ProviderType = typing.Union[
|
|
6
|
+
typing.Literal[
|
|
7
|
+
"anthropic",
|
|
8
|
+
"google_ai",
|
|
9
|
+
"google_vertex",
|
|
10
|
+
"openai",
|
|
11
|
+
"letta",
|
|
12
|
+
"deepseek",
|
|
13
|
+
"lmstudio_openai",
|
|
14
|
+
"xai",
|
|
15
|
+
"mistral",
|
|
16
|
+
"ollama",
|
|
17
|
+
"groq",
|
|
18
|
+
"together",
|
|
19
|
+
"azure",
|
|
20
|
+
"vllm",
|
|
21
|
+
"bedrock",
|
|
22
|
+
],
|
|
23
|
+
typing.Any,
|
|
24
|
+
]
|
|
@@ -13,6 +13,16 @@ class VoiceSleeptimeManager(UncheckedBaseModel):
|
|
|
13
13
|
|
|
14
14
|
"""
|
|
15
15
|
|
|
16
|
+
max_message_buffer_length: typing.Optional[int] = pydantic.Field(default=None)
|
|
17
|
+
"""
|
|
18
|
+
The desired maximum length of messages in the context window of the convo agent. This is a best effort, and may be off slightly due to user/assistant interleaving.
|
|
19
|
+
"""
|
|
20
|
+
|
|
21
|
+
min_message_buffer_length: typing.Optional[int] = pydantic.Field(default=None)
|
|
22
|
+
"""
|
|
23
|
+
The desired minimum length of messages in the context window of the convo agent. This is a best effort, and may be off-by-one due to user/assistant interleaving.
|
|
24
|
+
"""
|
|
25
|
+
|
|
16
26
|
if IS_PYDANTIC_V2:
|
|
17
27
|
model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
|
|
18
28
|
else:
|
|
@@ -13,6 +13,16 @@ class VoiceSleeptimeManagerUpdate(UncheckedBaseModel):
|
|
|
13
13
|
|
|
14
14
|
"""
|
|
15
15
|
|
|
16
|
+
max_message_buffer_length: typing.Optional[int] = pydantic.Field(default=None)
|
|
17
|
+
"""
|
|
18
|
+
The desired maximum length of messages in the context window of the convo agent. This is a best effort, and may be off slightly due to user/assistant interleaving.
|
|
19
|
+
"""
|
|
20
|
+
|
|
21
|
+
min_message_buffer_length: typing.Optional[int] = pydantic.Field(default=None)
|
|
22
|
+
"""
|
|
23
|
+
The desired minimum length of messages in the context window of the convo agent. This is a best effort, and may be off-by-one due to user/assistant interleaving.
|
|
24
|
+
"""
|
|
25
|
+
|
|
16
26
|
if IS_PYDANTIC_V2:
|
|
17
27
|
model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
|
|
18
28
|
else:
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
letta_client/__init__.py,sha256=
|
|
1
|
+
letta_client/__init__.py,sha256=ZEBs2nbufUCjHDLLgzzmNzlzecHsRonSc7ZoWT9E5aM,16557
|
|
2
2
|
letta_client/agents/__init__.py,sha256=3oFWVxaaxkphkjGJVk31Llb9ll9dKoCGx3B_r3qqtes,1716
|
|
3
3
|
letta_client/agents/blocks/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
|
|
4
4
|
letta_client/agents/blocks/client.py,sha256=u5zvutxoH_DqfSLWhRtNSRBC9_ezQDx682cxkxDz3JA,23822
|
|
@@ -62,7 +62,7 @@ letta_client/client_side_access_tokens/types/client_side_access_tokens_create_re
|
|
|
62
62
|
letta_client/client_side_access_tokens/types/client_side_access_tokens_create_response_policy_data_item_access_item.py,sha256=R-H25IpNp9feSrW8Yj3h9O3UTMVvFniQJElogKxLuoE,254
|
|
63
63
|
letta_client/core/__init__.py,sha256=OKbX2aCZXgHCDUsCouqv-OiX32xA6eFFCKIUH9M5Vzk,1591
|
|
64
64
|
letta_client/core/api_error.py,sha256=RE8LELok2QCjABadECTvtDp7qejA1VmINCh6TbqPwSE,426
|
|
65
|
-
letta_client/core/client_wrapper.py,sha256=
|
|
65
|
+
letta_client/core/client_wrapper.py,sha256=kvwwizFYZY8vw9sDV5YgEMVAjcC2YeCtNUBNRILdgh4,1998
|
|
66
66
|
letta_client/core/datetime_utils.py,sha256=nBys2IsYrhPdszxGKCNRPSOCwa-5DWOHG95FB8G9PKo,1047
|
|
67
67
|
letta_client/core/file.py,sha256=d4NNbX8XvXP32z8KpK2Xovv33nFfruIrpz0QWxlgpZk,2663
|
|
68
68
|
letta_client/core/http_client.py,sha256=Z77OIxIbL4OAB2IDqjRq_sYa5yNYAWfmdhdCSSvh6Y4,19552
|
|
@@ -104,14 +104,14 @@ letta_client/identities/properties/client.py,sha256=Nv7jOi5O8TmeZ1g0-TqnqiJ0hLcH
|
|
|
104
104
|
letta_client/jobs/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
|
|
105
105
|
letta_client/jobs/client.py,sha256=z1Zq6dGs2xbf3EAFuD3-m-qbpbUeqpCBYqtIFKkGoMk,15622
|
|
106
106
|
letta_client/models/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
|
|
107
|
-
letta_client/models/client.py,sha256=
|
|
107
|
+
letta_client/models/client.py,sha256=_G57OI1uQBJ5S4lFROCKhFVemKD7KK5nrOVlzCuas7U,4662
|
|
108
108
|
letta_client/projects/__init__.py,sha256=Mg9xvTJ4N4xDkj521w3jvmCgrbW3CYx9LxG7kkdoyzs,211
|
|
109
109
|
letta_client/projects/client.py,sha256=VNJyt5QyAQoZwPDL4PQSVrwBK6jb0vOxleTBuMBJSC4,4229
|
|
110
110
|
letta_client/projects/types/__init__.py,sha256=1nE8QFsR2GukiQxkaRFQfBuk1u_yuO-emykjWq8pXRs,277
|
|
111
111
|
letta_client/projects/types/projects_list_response.py,sha256=LdWVSnP8fqrVTcRfkd73N4wIa5_VkxrAUS-GFftkqHo,858
|
|
112
112
|
letta_client/projects/types/projects_list_response_projects_item.py,sha256=7mFQdVQCNqvl2zBzVWzClENfF9N35T1Wpv3lgYbbAz0,605
|
|
113
113
|
letta_client/providers/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
|
|
114
|
-
letta_client/providers/client.py,sha256
|
|
114
|
+
letta_client/providers/client.py,sha256=-9weA21WYKIcY0YOoE-UOWSPGHFWtuzH9yWZ3kwm78c,19000
|
|
115
115
|
letta_client/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
116
116
|
letta_client/runs/__init__.py,sha256=0Mn3wMqzm7ppXeiwu9zfY_KlyzBbWSM1wt_rsx0NmM0,144
|
|
117
117
|
letta_client/runs/client.py,sha256=6A0i8-fWzRgK1U5P4jeKKav-cRSjaaN5ttMh66ihwe8,17234
|
|
@@ -128,7 +128,7 @@ letta_client/sources/files/client.py,sha256=R-9zHK_wWtvW-2K7erQVVh9rR7a5JC4zxmTK
|
|
|
128
128
|
letta_client/sources/passages/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
|
|
129
129
|
letta_client/sources/passages/client.py,sha256=n0QVtLC0W1X6_SjhiEGSl9oZexocnsLZYeYRAqV2BCk,4767
|
|
130
130
|
letta_client/steps/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
|
|
131
|
-
letta_client/steps/client.py,sha256=
|
|
131
|
+
letta_client/steps/client.py,sha256=Vqw3coPITSFK8skl5fBa6YWqL_0UuAkYAFFeKipL0NU,11242
|
|
132
132
|
letta_client/tags/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
|
|
133
133
|
letta_client/tags/client.py,sha256=1xIPtMWJ6ssAhPEFgl5CyJHyvND9MHCLIbEzQWxntZ0,5167
|
|
134
134
|
letta_client/templates/__init__.py,sha256=6kqaRnkWVngMoV08wPrkA6urr_lCnE6FRIVq4jj4z1M,313
|
|
@@ -147,7 +147,7 @@ letta_client/tools/types/add_mcp_server_request.py,sha256=EieZjfOT95sjkpxXdqy7gl
|
|
|
147
147
|
letta_client/tools/types/add_mcp_server_response_item.py,sha256=TWdsKqGb1INhYtpGnAckz0Pw4nZShumSp4pfocRfxCA,270
|
|
148
148
|
letta_client/tools/types/delete_mcp_server_response_item.py,sha256=MeZObU-7tMSCd-S5yuUjNDse6A1hUz1LLjbko0pXaro,273
|
|
149
149
|
letta_client/tools/types/list_mcp_servers_response_value.py,sha256=AIoXu4bO8QNSU7zjL1jj0Rg4313wVtPaTt13W0aevLQ,273
|
|
150
|
-
letta_client/types/__init__.py,sha256=
|
|
150
|
+
letta_client/types/__init__.py,sha256=f_1yLCH3hc8QJH0lv1NOGM0S1NR-gjoCoazCCT2DxQM,20719
|
|
151
151
|
letta_client/types/action_model.py,sha256=y1e2XMv3skFaNJIBdYoBKgiORzGh05aOVvu-qVR9uHg,1240
|
|
152
152
|
letta_client/types/action_parameters_model.py,sha256=LgKf5aPZG3-OHGxFdXiSokIDgce8c02xPYIAY05VgW8,828
|
|
153
153
|
letta_client/types/action_response_model.py,sha256=yq2Fd9UU8j7vvtE3VqXUoRRvDzWcfJPj_95ynGdeHCs,824
|
|
@@ -243,7 +243,7 @@ letta_client/types/function_definition_input.py,sha256=UpoD7ftRpHquJ5zhy28TjXPBV
|
|
|
243
243
|
letta_client/types/function_definition_output.py,sha256=Id0SzyiMHF5l25iKQhCN4sWJwBJ7AkYK-I5RDZy3_rc,741
|
|
244
244
|
letta_client/types/function_output.py,sha256=7b8550BllXxtZQ3T3jfvZjcCU_ZGWNBvjlrMB8S2xas,578
|
|
245
245
|
letta_client/types/function_tool.py,sha256=TOETpZdqgPIgd4g9JFo3yvDBpTx4lDFzJNZH8PxAjpI,697
|
|
246
|
-
letta_client/types/group.py,sha256=
|
|
246
|
+
letta_client/types/group.py,sha256=6Cv30-JOOfRsOG_T9d1rifW40-eYj9g6OR9BvdP7Ppc,2106
|
|
247
247
|
letta_client/types/health.py,sha256=nQwx5ysn_cJMKUoqsfaPcGNSRSjfwX5S272UiSQJ03w,618
|
|
248
248
|
letta_client/types/hidden_reasoning_message.py,sha256=yXIm8xuWhmCGo5v-s9rjFNW2hffv7K1KAuvkL8P9J3s,1528
|
|
249
249
|
letta_client/types/hidden_reasoning_message_state.py,sha256=qotAgF_P4T7OEHzbhGDVFaLZYOs1ULMPVHmiFvoRIfM,174
|
|
@@ -273,7 +273,7 @@ letta_client/types/letta_request_config.py,sha256=b6K4QtDdHjcZKfBb1fugUuoPrT2N4d
|
|
|
273
273
|
letta_client/types/letta_response.py,sha256=i5gAUTgWzIst_RP8I_zSh0GSnLIS3z--1BmK6EF1mkQ,1315
|
|
274
274
|
letta_client/types/letta_streaming_request.py,sha256=jm0HLzfzWzIRs8uwtX33V5f5Ljw_hFOKOhPjdIZX9cA,1465
|
|
275
275
|
letta_client/types/letta_usage_statistics.py,sha256=k6V72J2TEPd-RQBuUQxF3oylrAMcuSKBskd2nnZmGOw,1886
|
|
276
|
-
letta_client/types/llm_config.py,sha256=
|
|
276
|
+
letta_client/types/llm_config.py,sha256=i6mxKNAn2TdzAyOPcOGiBZwnpW12G9MJRUTxyq06Sd8,3814
|
|
277
277
|
letta_client/types/llm_config_model_endpoint_type.py,sha256=HOSM5kIZDCNAVCWmASvAk52K819plqGlD66yKQ1xFkI,620
|
|
278
278
|
letta_client/types/llm_config_reasoning_effort.py,sha256=AHL2nI5aeTfPhijnhaL3aiP8EoJhy_Wdupi2pyMm4sA,173
|
|
279
279
|
letta_client/types/local_sandbox_config.py,sha256=jfe7akG_YrJJ8csLaLdev04Zg1x-PTN0XCAL4KifaZI,1387
|
|
@@ -304,7 +304,8 @@ letta_client/types/parameters_schema.py,sha256=ptXcwjuaCwqRhfizeiWAsu3pqT87Jcj_P
|
|
|
304
304
|
letta_client/types/parent_tool_rule.py,sha256=zPTfn5epS8spEIw71HUbbSX2KYxlIPB-cGJ52UQmQ_M,964
|
|
305
305
|
letta_client/types/passage.py,sha256=1OM19TyVCQEL1P3BC58hmzWfawZM4vejiKr0P11dOUk,3034
|
|
306
306
|
letta_client/types/pip_requirement.py,sha256=Hmh7VpJhdSfFkafh6QwAehCp0MQUBXv1YAoYP-2wV2M,773
|
|
307
|
-
letta_client/types/provider.py,sha256=
|
|
307
|
+
letta_client/types/provider.py,sha256=qPLMJ-9oDl94YGP_5DaRTWblHHjKFc1PkS5OPp1lUwo,1304
|
|
308
|
+
letta_client/types/provider_type.py,sha256=uEUNL2qcGizQfQp5kr9-egi6uBY-JJ4ASJA5J33utXw,443
|
|
308
309
|
letta_client/types/reasoning_content.py,sha256=aId-87QjQ4sm_fuCmzIdZZghr-9DFeVV-Lv9x5iVw3I,995
|
|
309
310
|
letta_client/types/reasoning_message.py,sha256=qZ7YVUqFHjZiEf1sUJPOqqZPnX_zmxYOdXPHwgSFdrg,1565
|
|
310
311
|
letta_client/types/reasoning_message_source.py,sha256=GYOWGm2mje1yYbR8E2kbAeQS--VDrGlpsobEBQHE2cU,186
|
|
@@ -369,8 +370,8 @@ letta_client/types/user_message_content.py,sha256=JHOtxDEVm7FKDb6Ac2Hw7tAl5HCTDD
|
|
|
369
370
|
letta_client/types/user_update.py,sha256=0Bl1OjO7bfmlpsGQ36dSh6DH1UB_wJOTNewS0wDLkP4,731
|
|
370
371
|
letta_client/types/validation_error.py,sha256=ACDS7wL5nQbS8ymFhWljwbBJmbugNa8bs2O5xEZC3u4,680
|
|
371
372
|
letta_client/types/validation_error_loc_item.py,sha256=LAtjCHIllWRBFXvAZ5QZpp7CPXjdtN9EB7HrLVo6EP0,128
|
|
372
|
-
letta_client/types/voice_sleeptime_manager.py,sha256=
|
|
373
|
-
letta_client/types/voice_sleeptime_manager_update.py,sha256=
|
|
373
|
+
letta_client/types/voice_sleeptime_manager.py,sha256=DGXoHsOVes0HA3xU_qX8JQUzTSkk6HmKF5qW1-xlGlQ,1224
|
|
374
|
+
letta_client/types/voice_sleeptime_manager_update.py,sha256=3fdAA7gSiPRr1sufzTTRdhovW1jn7kPU8m6oMvvOONU,1259
|
|
374
375
|
letta_client/types/web_search_options.py,sha256=ENx_YMOh8Dxj6q57LvuM7Qmq_j2h5WJh9D91lbBnj90,863
|
|
375
376
|
letta_client/types/web_search_options_search_context_size.py,sha256=RgJGV4rkuaCTcaS4zsw_MWzRlTYpyNT9QqzNvpd1Gno,182
|
|
376
377
|
letta_client/types/web_search_options_user_location.py,sha256=4aXfFcwUBu7YNA5XBjfhmD6tgRb0e8LTFexmn-rkDfw,770
|
|
@@ -380,6 +381,6 @@ letta_client/voice/__init__.py,sha256=7hX85553PiRMtIMM12a0DSoFzsglNiUziYR2ekS84Q
|
|
|
380
381
|
letta_client/voice/client.py,sha256=STjswa5oOLoP59QwTJvQwi73kgn0UzKOaXc2CsTRI4k,6912
|
|
381
382
|
letta_client/voice/types/__init__.py,sha256=FRc3iKRTONE4N8Lf1IqvnqWZ2kXdrFFvkL7PxVcR8Ew,212
|
|
382
383
|
letta_client/voice/types/create_voice_chat_completions_request_body.py,sha256=ZLfKgNK1T6IAwLEvaBVFfy7jEAoPUXP28n-nfmHkklc,391
|
|
383
|
-
letta_client-0.1.
|
|
384
|
-
letta_client-0.1.
|
|
385
|
-
letta_client-0.1.
|
|
384
|
+
letta_client-0.1.134.dist-info/METADATA,sha256=tvoWiquOwGccg2--YopZPWPlOe-QP8yEueQFPESMMYg,5042
|
|
385
|
+
letta_client-0.1.134.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88
|
|
386
|
+
letta_client-0.1.134.dist-info/RECORD,,
|
|
File without changes
|