letta-client 0.1.242__py3-none-any.whl → 0.1.243__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of letta-client might be problematic. Click here for more details.
- letta_client/agents/client.py +20 -0
- letta_client/agents/raw_client.py +20 -0
- letta_client/core/client_wrapper.py +2 -2
- {letta_client-0.1.242.dist-info → letta_client-0.1.243.dist-info}/METADATA +1 -1
- {letta_client-0.1.242.dist-info → letta_client-0.1.243.dist-info}/RECORD +6 -6
- {letta_client-0.1.242.dist-info → letta_client-0.1.243.dist-info}/WHEEL +0 -0
letta_client/agents/client.py
CHANGED
|
@@ -215,6 +215,7 @@ class AgentsClient:
|
|
|
215
215
|
max_tokens: typing.Optional[int] = OMIT,
|
|
216
216
|
max_reasoning_tokens: typing.Optional[int] = OMIT,
|
|
217
217
|
enable_reasoner: typing.Optional[bool] = OMIT,
|
|
218
|
+
reasoning: typing.Optional[bool] = OMIT,
|
|
218
219
|
from_template: typing.Optional[str] = OMIT,
|
|
219
220
|
template: typing.Optional[bool] = OMIT,
|
|
220
221
|
project: typing.Optional[str] = OMIT,
|
|
@@ -316,6 +317,9 @@ class AgentsClient:
|
|
|
316
317
|
enable_reasoner : typing.Optional[bool]
|
|
317
318
|
Whether to enable internal extended thinking step for a reasoner model.
|
|
318
319
|
|
|
320
|
+
reasoning : typing.Optional[bool]
|
|
321
|
+
Whether to enable reasoning for this agent.
|
|
322
|
+
|
|
319
323
|
from_template : typing.Optional[str]
|
|
320
324
|
The template id used to configure the agent
|
|
321
325
|
|
|
@@ -409,6 +413,7 @@ class AgentsClient:
|
|
|
409
413
|
max_tokens=max_tokens,
|
|
410
414
|
max_reasoning_tokens=max_reasoning_tokens,
|
|
411
415
|
enable_reasoner=enable_reasoner,
|
|
416
|
+
reasoning=reasoning,
|
|
412
417
|
from_template=from_template,
|
|
413
418
|
template=template,
|
|
414
419
|
project=project,
|
|
@@ -649,6 +654,7 @@ class AgentsClient:
|
|
|
649
654
|
message_buffer_autoclear: typing.Optional[bool] = OMIT,
|
|
650
655
|
model: typing.Optional[str] = OMIT,
|
|
651
656
|
embedding: typing.Optional[str] = OMIT,
|
|
657
|
+
reasoning: typing.Optional[bool] = OMIT,
|
|
652
658
|
enable_sleeptime: typing.Optional[bool] = OMIT,
|
|
653
659
|
response_format: typing.Optional[UpdateAgentResponseFormat] = OMIT,
|
|
654
660
|
last_run_completion: typing.Optional[dt.datetime] = OMIT,
|
|
@@ -726,6 +732,9 @@ class AgentsClient:
|
|
|
726
732
|
embedding : typing.Optional[str]
|
|
727
733
|
The embedding configuration handle used by the agent, specified in the format provider/model-name.
|
|
728
734
|
|
|
735
|
+
reasoning : typing.Optional[bool]
|
|
736
|
+
Whether to enable reasoning for this agent.
|
|
737
|
+
|
|
729
738
|
enable_sleeptime : typing.Optional[bool]
|
|
730
739
|
If set to True, memory management will move to a background agent thread.
|
|
731
740
|
|
|
@@ -792,6 +801,7 @@ class AgentsClient:
|
|
|
792
801
|
message_buffer_autoclear=message_buffer_autoclear,
|
|
793
802
|
model=model,
|
|
794
803
|
embedding=embedding,
|
|
804
|
+
reasoning=reasoning,
|
|
795
805
|
enable_sleeptime=enable_sleeptime,
|
|
796
806
|
response_format=response_format,
|
|
797
807
|
last_run_completion=last_run_completion,
|
|
@@ -1095,6 +1105,7 @@ class AsyncAgentsClient:
|
|
|
1095
1105
|
max_tokens: typing.Optional[int] = OMIT,
|
|
1096
1106
|
max_reasoning_tokens: typing.Optional[int] = OMIT,
|
|
1097
1107
|
enable_reasoner: typing.Optional[bool] = OMIT,
|
|
1108
|
+
reasoning: typing.Optional[bool] = OMIT,
|
|
1098
1109
|
from_template: typing.Optional[str] = OMIT,
|
|
1099
1110
|
template: typing.Optional[bool] = OMIT,
|
|
1100
1111
|
project: typing.Optional[str] = OMIT,
|
|
@@ -1196,6 +1207,9 @@ class AsyncAgentsClient:
|
|
|
1196
1207
|
enable_reasoner : typing.Optional[bool]
|
|
1197
1208
|
Whether to enable internal extended thinking step for a reasoner model.
|
|
1198
1209
|
|
|
1210
|
+
reasoning : typing.Optional[bool]
|
|
1211
|
+
Whether to enable reasoning for this agent.
|
|
1212
|
+
|
|
1199
1213
|
from_template : typing.Optional[str]
|
|
1200
1214
|
The template id used to configure the agent
|
|
1201
1215
|
|
|
@@ -1297,6 +1311,7 @@ class AsyncAgentsClient:
|
|
|
1297
1311
|
max_tokens=max_tokens,
|
|
1298
1312
|
max_reasoning_tokens=max_reasoning_tokens,
|
|
1299
1313
|
enable_reasoner=enable_reasoner,
|
|
1314
|
+
reasoning=reasoning,
|
|
1300
1315
|
from_template=from_template,
|
|
1301
1316
|
template=template,
|
|
1302
1317
|
project=project,
|
|
@@ -1577,6 +1592,7 @@ class AsyncAgentsClient:
|
|
|
1577
1592
|
message_buffer_autoclear: typing.Optional[bool] = OMIT,
|
|
1578
1593
|
model: typing.Optional[str] = OMIT,
|
|
1579
1594
|
embedding: typing.Optional[str] = OMIT,
|
|
1595
|
+
reasoning: typing.Optional[bool] = OMIT,
|
|
1580
1596
|
enable_sleeptime: typing.Optional[bool] = OMIT,
|
|
1581
1597
|
response_format: typing.Optional[UpdateAgentResponseFormat] = OMIT,
|
|
1582
1598
|
last_run_completion: typing.Optional[dt.datetime] = OMIT,
|
|
@@ -1654,6 +1670,9 @@ class AsyncAgentsClient:
|
|
|
1654
1670
|
embedding : typing.Optional[str]
|
|
1655
1671
|
The embedding configuration handle used by the agent, specified in the format provider/model-name.
|
|
1656
1672
|
|
|
1673
|
+
reasoning : typing.Optional[bool]
|
|
1674
|
+
Whether to enable reasoning for this agent.
|
|
1675
|
+
|
|
1657
1676
|
enable_sleeptime : typing.Optional[bool]
|
|
1658
1677
|
If set to True, memory management will move to a background agent thread.
|
|
1659
1678
|
|
|
@@ -1728,6 +1747,7 @@ class AsyncAgentsClient:
|
|
|
1728
1747
|
message_buffer_autoclear=message_buffer_autoclear,
|
|
1729
1748
|
model=model,
|
|
1730
1749
|
embedding=embedding,
|
|
1750
|
+
reasoning=reasoning,
|
|
1731
1751
|
enable_sleeptime=enable_sleeptime,
|
|
1732
1752
|
response_format=response_format,
|
|
1733
1753
|
last_run_completion=last_run_completion,
|
|
@@ -194,6 +194,7 @@ class RawAgentsClient:
|
|
|
194
194
|
max_tokens: typing.Optional[int] = OMIT,
|
|
195
195
|
max_reasoning_tokens: typing.Optional[int] = OMIT,
|
|
196
196
|
enable_reasoner: typing.Optional[bool] = OMIT,
|
|
197
|
+
reasoning: typing.Optional[bool] = OMIT,
|
|
197
198
|
from_template: typing.Optional[str] = OMIT,
|
|
198
199
|
template: typing.Optional[bool] = OMIT,
|
|
199
200
|
project: typing.Optional[str] = OMIT,
|
|
@@ -295,6 +296,9 @@ class RawAgentsClient:
|
|
|
295
296
|
enable_reasoner : typing.Optional[bool]
|
|
296
297
|
Whether to enable internal extended thinking step for a reasoner model.
|
|
297
298
|
|
|
299
|
+
reasoning : typing.Optional[bool]
|
|
300
|
+
Whether to enable reasoning for this agent.
|
|
301
|
+
|
|
298
302
|
from_template : typing.Optional[str]
|
|
299
303
|
The template id used to configure the agent
|
|
300
304
|
|
|
@@ -391,6 +395,7 @@ class RawAgentsClient:
|
|
|
391
395
|
"max_tokens": max_tokens,
|
|
392
396
|
"max_reasoning_tokens": max_reasoning_tokens,
|
|
393
397
|
"enable_reasoner": enable_reasoner,
|
|
398
|
+
"reasoning": reasoning,
|
|
394
399
|
"from_template": from_template,
|
|
395
400
|
"template": template,
|
|
396
401
|
"project": project,
|
|
@@ -755,6 +760,7 @@ class RawAgentsClient:
|
|
|
755
760
|
message_buffer_autoclear: typing.Optional[bool] = OMIT,
|
|
756
761
|
model: typing.Optional[str] = OMIT,
|
|
757
762
|
embedding: typing.Optional[str] = OMIT,
|
|
763
|
+
reasoning: typing.Optional[bool] = OMIT,
|
|
758
764
|
enable_sleeptime: typing.Optional[bool] = OMIT,
|
|
759
765
|
response_format: typing.Optional[UpdateAgentResponseFormat] = OMIT,
|
|
760
766
|
last_run_completion: typing.Optional[dt.datetime] = OMIT,
|
|
@@ -832,6 +838,9 @@ class RawAgentsClient:
|
|
|
832
838
|
embedding : typing.Optional[str]
|
|
833
839
|
The embedding configuration handle used by the agent, specified in the format provider/model-name.
|
|
834
840
|
|
|
841
|
+
reasoning : typing.Optional[bool]
|
|
842
|
+
Whether to enable reasoning for this agent.
|
|
843
|
+
|
|
835
844
|
enable_sleeptime : typing.Optional[bool]
|
|
836
845
|
If set to True, memory management will move to a background agent thread.
|
|
837
846
|
|
|
@@ -894,6 +903,7 @@ class RawAgentsClient:
|
|
|
894
903
|
"message_buffer_autoclear": message_buffer_autoclear,
|
|
895
904
|
"model": model,
|
|
896
905
|
"embedding": embedding,
|
|
906
|
+
"reasoning": reasoning,
|
|
897
907
|
"enable_sleeptime": enable_sleeptime,
|
|
898
908
|
"response_format": convert_and_respect_annotation_metadata(
|
|
899
909
|
object_=response_format, annotation=UpdateAgentResponseFormat, direction="write"
|
|
@@ -1226,6 +1236,7 @@ class AsyncRawAgentsClient:
|
|
|
1226
1236
|
max_tokens: typing.Optional[int] = OMIT,
|
|
1227
1237
|
max_reasoning_tokens: typing.Optional[int] = OMIT,
|
|
1228
1238
|
enable_reasoner: typing.Optional[bool] = OMIT,
|
|
1239
|
+
reasoning: typing.Optional[bool] = OMIT,
|
|
1229
1240
|
from_template: typing.Optional[str] = OMIT,
|
|
1230
1241
|
template: typing.Optional[bool] = OMIT,
|
|
1231
1242
|
project: typing.Optional[str] = OMIT,
|
|
@@ -1327,6 +1338,9 @@ class AsyncRawAgentsClient:
|
|
|
1327
1338
|
enable_reasoner : typing.Optional[bool]
|
|
1328
1339
|
Whether to enable internal extended thinking step for a reasoner model.
|
|
1329
1340
|
|
|
1341
|
+
reasoning : typing.Optional[bool]
|
|
1342
|
+
Whether to enable reasoning for this agent.
|
|
1343
|
+
|
|
1330
1344
|
from_template : typing.Optional[str]
|
|
1331
1345
|
The template id used to configure the agent
|
|
1332
1346
|
|
|
@@ -1423,6 +1437,7 @@ class AsyncRawAgentsClient:
|
|
|
1423
1437
|
"max_tokens": max_tokens,
|
|
1424
1438
|
"max_reasoning_tokens": max_reasoning_tokens,
|
|
1425
1439
|
"enable_reasoner": enable_reasoner,
|
|
1440
|
+
"reasoning": reasoning,
|
|
1426
1441
|
"from_template": from_template,
|
|
1427
1442
|
"template": template,
|
|
1428
1443
|
"project": project,
|
|
@@ -1787,6 +1802,7 @@ class AsyncRawAgentsClient:
|
|
|
1787
1802
|
message_buffer_autoclear: typing.Optional[bool] = OMIT,
|
|
1788
1803
|
model: typing.Optional[str] = OMIT,
|
|
1789
1804
|
embedding: typing.Optional[str] = OMIT,
|
|
1805
|
+
reasoning: typing.Optional[bool] = OMIT,
|
|
1790
1806
|
enable_sleeptime: typing.Optional[bool] = OMIT,
|
|
1791
1807
|
response_format: typing.Optional[UpdateAgentResponseFormat] = OMIT,
|
|
1792
1808
|
last_run_completion: typing.Optional[dt.datetime] = OMIT,
|
|
@@ -1864,6 +1880,9 @@ class AsyncRawAgentsClient:
|
|
|
1864
1880
|
embedding : typing.Optional[str]
|
|
1865
1881
|
The embedding configuration handle used by the agent, specified in the format provider/model-name.
|
|
1866
1882
|
|
|
1883
|
+
reasoning : typing.Optional[bool]
|
|
1884
|
+
Whether to enable reasoning for this agent.
|
|
1885
|
+
|
|
1867
1886
|
enable_sleeptime : typing.Optional[bool]
|
|
1868
1887
|
If set to True, memory management will move to a background agent thread.
|
|
1869
1888
|
|
|
@@ -1926,6 +1945,7 @@ class AsyncRawAgentsClient:
|
|
|
1926
1945
|
"message_buffer_autoclear": message_buffer_autoclear,
|
|
1927
1946
|
"model": model,
|
|
1928
1947
|
"embedding": embedding,
|
|
1948
|
+
"reasoning": reasoning,
|
|
1929
1949
|
"enable_sleeptime": enable_sleeptime,
|
|
1930
1950
|
"response_format": convert_and_respect_annotation_metadata(
|
|
1931
1951
|
object_=response_format, annotation=UpdateAgentResponseFormat, direction="write"
|
|
@@ -24,10 +24,10 @@ class BaseClientWrapper:
|
|
|
24
24
|
|
|
25
25
|
def get_headers(self) -> typing.Dict[str, str]:
|
|
26
26
|
headers: typing.Dict[str, str] = {
|
|
27
|
-
"User-Agent": "letta-client/0.1.
|
|
27
|
+
"User-Agent": "letta-client/0.1.243",
|
|
28
28
|
"X-Fern-Language": "Python",
|
|
29
29
|
"X-Fern-SDK-Name": "letta-client",
|
|
30
|
-
"X-Fern-SDK-Version": "0.1.
|
|
30
|
+
"X-Fern-SDK-Version": "0.1.243",
|
|
31
31
|
**(self.get_custom_headers() or {}),
|
|
32
32
|
}
|
|
33
33
|
if self._project is not None:
|
|
@@ -3,7 +3,7 @@ letta_client/agents/__init__.py,sha256=JkuWGGNJsCfnMr2DFzQ1SiqEB1tcFZnafdidODi0_
|
|
|
3
3
|
letta_client/agents/blocks/__init__.py,sha256=_VhToAyIt_5axN6CLJwtxg3-CO7THa_23pbUzqhXJa4,85
|
|
4
4
|
letta_client/agents/blocks/client.py,sha256=Akx-1SYEXkmdtLtytPtdFNhVts8JkjC2aMQnnWgd8Ug,14735
|
|
5
5
|
letta_client/agents/blocks/raw_client.py,sha256=7tdlieWtGyMe1G5Ne9Rcujvr43DbD4K3hVJ7eiJNuFo,24454
|
|
6
|
-
letta_client/agents/client.py,sha256=
|
|
6
|
+
letta_client/agents/client.py,sha256=1WZjtnWAdDV_suXJ1xnCte-cMKVEiw0bTWvQ3KvEhcw,68754
|
|
7
7
|
letta_client/agents/context/__init__.py,sha256=_VhToAyIt_5axN6CLJwtxg3-CO7THa_23pbUzqhXJa4,85
|
|
8
8
|
letta_client/agents/context/client.py,sha256=fhpJFWRs6INGreRyEw9gsFnlUWR48vIHbN_jVIHIBrw,3052
|
|
9
9
|
letta_client/agents/context/raw_client.py,sha256=j2gko-oEFWuCgPkcX9jCv31OWvR6sTOtAYcSWllXYDs,4747
|
|
@@ -35,7 +35,7 @@ letta_client/agents/messages/types/messages_preview_raw_payload_request.py,sha25
|
|
|
35
35
|
letta_client/agents/passages/__init__.py,sha256=_VhToAyIt_5axN6CLJwtxg3-CO7THa_23pbUzqhXJa4,85
|
|
36
36
|
letta_client/agents/passages/client.py,sha256=XHPpqOH2BDjHkegTRM9MRdDVxW5VH40ERSFvWchWT48,16785
|
|
37
37
|
letta_client/agents/passages/raw_client.py,sha256=TnNrFsnrexrPVmemkFbRIBfFMcq1Iap2qk23L7mr1Z0,25710
|
|
38
|
-
letta_client/agents/raw_client.py,sha256=
|
|
38
|
+
letta_client/agents/raw_client.py,sha256=0bK7N9RIE9sic4_Miy_w4JYsXbBlPs-UYGmJRiIKF4g,90101
|
|
39
39
|
letta_client/agents/sources/__init__.py,sha256=_VhToAyIt_5axN6CLJwtxg3-CO7THa_23pbUzqhXJa4,85
|
|
40
40
|
letta_client/agents/sources/client.py,sha256=lCqB6FF9svrwf0oZSFs41WKlMXc-YRhUeb4FZkHbicM,6868
|
|
41
41
|
letta_client/agents/sources/raw_client.py,sha256=ts4c5UBuXzrHU-lFWWrYniQqrMEc8SN0rfiqNXJLP5Y,12399
|
|
@@ -92,7 +92,7 @@ letta_client/client_side_access_tokens/types/client_side_access_tokens_list_clie
|
|
|
92
92
|
letta_client/client_side_access_tokens/types/client_side_access_tokens_list_client_side_access_tokens_response_tokens_item_policy_data_item_access_item.py,sha256=kNHfEWFl7u71Pu8NPqutod0a2NXfvq8il05Hqm0iBB4,284
|
|
93
93
|
letta_client/core/__init__.py,sha256=tpn7rjb6C2UIkYZYIqdrNpI7Yax2jw88sXh2baxaxAI,1715
|
|
94
94
|
letta_client/core/api_error.py,sha256=44vPoTyWN59gonCIZMdzw7M1uspygiLnr3GNFOoVL2Q,614
|
|
95
|
-
letta_client/core/client_wrapper.py,sha256=
|
|
95
|
+
letta_client/core/client_wrapper.py,sha256=sl3wXpkRFyOAKOcfgN7Li-ZqpTXhf1ziSVdV-uNwytc,2776
|
|
96
96
|
letta_client/core/datetime_utils.py,sha256=nBys2IsYrhPdszxGKCNRPSOCwa-5DWOHG95FB8G9PKo,1047
|
|
97
97
|
letta_client/core/file.py,sha256=d4NNbX8XvXP32z8KpK2Xovv33nFfruIrpz0QWxlgpZk,2663
|
|
98
98
|
letta_client/core/force_multipart.py,sha256=awxh5MtcRYe74ehY8U76jzv6fYM_w_D3Rur7KQQzSDk,429
|
|
@@ -500,6 +500,6 @@ letta_client/version.py,sha256=bttKLbIhO3UonCYQlqs600zzbQgfhCCMjeXR9WRzid4,79
|
|
|
500
500
|
letta_client/voice/__init__.py,sha256=_VhToAyIt_5axN6CLJwtxg3-CO7THa_23pbUzqhXJa4,85
|
|
501
501
|
letta_client/voice/client.py,sha256=EbIVOQh4HXqU9McATxwga08STk-HUwPEAUr_UHqyKHg,3748
|
|
502
502
|
letta_client/voice/raw_client.py,sha256=KvM_3GXuSf51bubM0RVBnxvlf20qZTFMnaA_BzhXzjQ,5938
|
|
503
|
-
letta_client-0.1.
|
|
504
|
-
letta_client-0.1.
|
|
505
|
-
letta_client-0.1.
|
|
503
|
+
letta_client-0.1.243.dist-info/METADATA,sha256=7TXSPJYxRfIgj5IWxf5nn363z2ZpsmUK8zLIGfcjOi0,5781
|
|
504
|
+
letta_client-0.1.243.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88
|
|
505
|
+
letta_client-0.1.243.dist-info/RECORD,,
|
|
File without changes
|