letta-client 0.1.84__py3-none-any.whl → 0.1.86__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of letta-client might be problematic. Click here for more details.

Files changed (35) hide show
  1. letta_client/__init__.py +14 -2
  2. letta_client/agents/__init__.py +4 -0
  3. letta_client/agents/client.py +30 -0
  4. letta_client/agents/messages/types/messages_modify_response.py +8 -1
  5. letta_client/agents/types/__init__.py +8 -0
  6. letta_client/agents/types/agents_search_response_agents_item_llm_config.py +8 -0
  7. letta_client/agents/types/agents_search_response_agents_item_llm_config_enable_reasoner.py +5 -0
  8. letta_client/agents/types/agents_search_response_agents_item_llm_config_max_reasoning_tokens.py +5 -0
  9. letta_client/core/client_wrapper.py +1 -1
  10. letta_client/templates/__init__.py +4 -0
  11. letta_client/templates/types/__init__.py +8 -0
  12. letta_client/templates/types/templates_create_agents_response_agents_item_llm_config.py +8 -0
  13. letta_client/templates/types/templates_create_agents_response_agents_item_llm_config_enable_reasoner.py +5 -0
  14. letta_client/templates/types/templates_create_agents_response_agents_item_llm_config_max_reasoning_tokens.py +5 -0
  15. letta_client/tools/client.py +4 -26
  16. letta_client/types/__init__.py +4 -0
  17. letta_client/types/agent_schema.py +0 -3
  18. letta_client/types/completion_create_params_non_streaming_model.py +5 -4
  19. letta_client/types/completion_create_params_streaming_model.py +5 -4
  20. letta_client/types/core_memory_block_schema.py +0 -2
  21. letta_client/types/file_file.py +1 -1
  22. letta_client/types/hidden_reasoning_message.py +39 -0
  23. letta_client/types/hidden_reasoning_message_state.py +5 -0
  24. letta_client/types/letta_message_union.py +8 -1
  25. letta_client/types/llm_config.py +10 -0
  26. letta_client/types/reasoning_message.py +2 -0
  27. letta_client/types/tool_env_var_schema.py +0 -1
  28. letta_client/types/tool_schema.py +0 -1
  29. letta_client/voice/__init__.py +2 -2
  30. letta_client/voice/client.py +7 -19
  31. letta_client/voice/types/__init__.py +2 -2
  32. letta_client/voice/types/{create_voice_chat_completions_request_body.py → create_voice_chat_completions_request.py} +1 -3
  33. {letta_client-0.1.84.dist-info → letta_client-0.1.86.dist-info}/METADATA +1 -1
  34. {letta_client-0.1.84.dist-info → letta_client-0.1.86.dist-info}/RECORD +35 -29
  35. {letta_client-0.1.84.dist-info → letta_client-0.1.86.dist-info}/WHEEL +0 -0
letta_client/__init__.py CHANGED
@@ -92,6 +92,8 @@ from .types import (
92
92
  GroupCreate,
93
93
  GroupCreateManagerConfig,
94
94
  Health,
95
+ HiddenReasoningMessage,
96
+ HiddenReasoningMessageState,
95
97
  HttpValidationError,
96
98
  Identity,
97
99
  IdentityCreate,
@@ -261,8 +263,10 @@ from .agents import (
261
263
  AgentsSearchResponseAgentsItemLastUpdatedById,
262
264
  AgentsSearchResponseAgentsItemLastUpdatedByIdItem,
263
265
  AgentsSearchResponseAgentsItemLlmConfig,
266
+ AgentsSearchResponseAgentsItemLlmConfigEnableReasoner,
264
267
  AgentsSearchResponseAgentsItemLlmConfigHandle,
265
268
  AgentsSearchResponseAgentsItemLlmConfigHandleItem,
269
+ AgentsSearchResponseAgentsItemLlmConfigMaxReasoningTokens,
266
270
  AgentsSearchResponseAgentsItemLlmConfigMaxTokens,
267
271
  AgentsSearchResponseAgentsItemLlmConfigMaxTokensItem,
268
272
  AgentsSearchResponseAgentsItemLlmConfigModelEndpoint,
@@ -459,8 +463,10 @@ from .templates import (
459
463
  TemplatesCreateAgentsResponseAgentsItemLastUpdatedById,
460
464
  TemplatesCreateAgentsResponseAgentsItemLastUpdatedByIdItem,
461
465
  TemplatesCreateAgentsResponseAgentsItemLlmConfig,
466
+ TemplatesCreateAgentsResponseAgentsItemLlmConfigEnableReasoner,
462
467
  TemplatesCreateAgentsResponseAgentsItemLlmConfigHandle,
463
468
  TemplatesCreateAgentsResponseAgentsItemLlmConfigHandleItem,
469
+ TemplatesCreateAgentsResponseAgentsItemLlmConfigMaxReasoningTokens,
464
470
  TemplatesCreateAgentsResponseAgentsItemLlmConfigMaxTokens,
465
471
  TemplatesCreateAgentsResponseAgentsItemLlmConfigMaxTokensItem,
466
472
  TemplatesCreateAgentsResponseAgentsItemLlmConfigModelEndpoint,
@@ -630,7 +636,7 @@ from .tools import (
630
636
  ListMcpServersResponseValue,
631
637
  )
632
638
  from .version import __version__
633
- from .voice import CreateVoiceChatCompletionsRequestBody
639
+ from .voice import CreateVoiceChatCompletionsRequest
634
640
 
635
641
  __all__ = [
636
642
  "ActionModel",
@@ -677,8 +683,10 @@ __all__ = [
677
683
  "AgentsSearchResponseAgentsItemLastUpdatedById",
678
684
  "AgentsSearchResponseAgentsItemLastUpdatedByIdItem",
679
685
  "AgentsSearchResponseAgentsItemLlmConfig",
686
+ "AgentsSearchResponseAgentsItemLlmConfigEnableReasoner",
680
687
  "AgentsSearchResponseAgentsItemLlmConfigHandle",
681
688
  "AgentsSearchResponseAgentsItemLlmConfigHandleItem",
689
+ "AgentsSearchResponseAgentsItemLlmConfigMaxReasoningTokens",
682
690
  "AgentsSearchResponseAgentsItemLlmConfigMaxTokens",
683
691
  "AgentsSearchResponseAgentsItemLlmConfigMaxTokensItem",
684
692
  "AgentsSearchResponseAgentsItemLlmConfigModelEndpoint",
@@ -910,7 +918,7 @@ __all__ = [
910
918
  "CoreMemoryBlockSchema",
911
919
  "CreateAgentRequestToolRulesItem",
912
920
  "CreateBlock",
913
- "CreateVoiceChatCompletionsRequestBody",
921
+ "CreateVoiceChatCompletionsRequest",
914
922
  "DeleteMcpServerResponseItem",
915
923
  "DynamicManager",
916
924
  "E2BSandboxConfig",
@@ -928,6 +936,8 @@ __all__ = [
928
936
  "GroupCreate",
929
937
  "GroupCreateManagerConfig",
930
938
  "Health",
939
+ "HiddenReasoningMessage",
940
+ "HiddenReasoningMessageState",
931
941
  "HttpValidationError",
932
942
  "Identity",
933
943
  "IdentityCreate",
@@ -1040,8 +1050,10 @@ __all__ = [
1040
1050
  "TemplatesCreateAgentsResponseAgentsItemLastUpdatedById",
1041
1051
  "TemplatesCreateAgentsResponseAgentsItemLastUpdatedByIdItem",
1042
1052
  "TemplatesCreateAgentsResponseAgentsItemLlmConfig",
1053
+ "TemplatesCreateAgentsResponseAgentsItemLlmConfigEnableReasoner",
1043
1054
  "TemplatesCreateAgentsResponseAgentsItemLlmConfigHandle",
1044
1055
  "TemplatesCreateAgentsResponseAgentsItemLlmConfigHandleItem",
1056
+ "TemplatesCreateAgentsResponseAgentsItemLlmConfigMaxReasoningTokens",
1045
1057
  "TemplatesCreateAgentsResponseAgentsItemLlmConfigMaxTokens",
1046
1058
  "TemplatesCreateAgentsResponseAgentsItemLlmConfigMaxTokensItem",
1047
1059
  "TemplatesCreateAgentsResponseAgentsItemLlmConfigModelEndpoint",
@@ -35,8 +35,10 @@ from .types import (
35
35
  AgentsSearchResponseAgentsItemLastUpdatedById,
36
36
  AgentsSearchResponseAgentsItemLastUpdatedByIdItem,
37
37
  AgentsSearchResponseAgentsItemLlmConfig,
38
+ AgentsSearchResponseAgentsItemLlmConfigEnableReasoner,
38
39
  AgentsSearchResponseAgentsItemLlmConfigHandle,
39
40
  AgentsSearchResponseAgentsItemLlmConfigHandleItem,
41
+ AgentsSearchResponseAgentsItemLlmConfigMaxReasoningTokens,
40
42
  AgentsSearchResponseAgentsItemLlmConfigMaxTokens,
41
43
  AgentsSearchResponseAgentsItemLlmConfigMaxTokensItem,
42
44
  AgentsSearchResponseAgentsItemLlmConfigModelEndpoint,
@@ -241,8 +243,10 @@ __all__ = [
241
243
  "AgentsSearchResponseAgentsItemLastUpdatedById",
242
244
  "AgentsSearchResponseAgentsItemLastUpdatedByIdItem",
243
245
  "AgentsSearchResponseAgentsItemLlmConfig",
246
+ "AgentsSearchResponseAgentsItemLlmConfigEnableReasoner",
244
247
  "AgentsSearchResponseAgentsItemLlmConfigHandle",
245
248
  "AgentsSearchResponseAgentsItemLlmConfigHandleItem",
249
+ "AgentsSearchResponseAgentsItemLlmConfigMaxReasoningTokens",
246
250
  "AgentsSearchResponseAgentsItemLlmConfigMaxTokens",
247
251
  "AgentsSearchResponseAgentsItemLlmConfigMaxTokensItem",
248
252
  "AgentsSearchResponseAgentsItemLlmConfigModelEndpoint",
@@ -218,6 +218,9 @@ class AgentsClient:
218
218
  embedding: typing.Optional[str] = OMIT,
219
219
  context_window_limit: typing.Optional[int] = OMIT,
220
220
  embedding_chunk_size: typing.Optional[int] = OMIT,
221
+ max_tokens: typing.Optional[int] = OMIT,
222
+ max_reasoning_tokens: typing.Optional[int] = OMIT,
223
+ enable_reasoner: typing.Optional[bool] = OMIT,
221
224
  from_template: typing.Optional[str] = OMIT,
222
225
  template: typing.Optional[bool] = OMIT,
223
226
  create_agent_request_project: typing.Optional[str] = OMIT,
@@ -303,6 +306,15 @@ class AgentsClient:
303
306
  embedding_chunk_size : typing.Optional[int]
304
307
  The embedding chunk size used by the agent.
305
308
 
309
+ max_tokens : typing.Optional[int]
310
+ The maximum number of tokens to generate, including reasoning step. If not set, the model will use its default value.
311
+
312
+ max_reasoning_tokens : typing.Optional[int]
313
+ The maximum number of tokens to generate for reasoning step. If not set, the model will use its default value.
314
+
315
+ enable_reasoner : typing.Optional[bool]
316
+ Whether to enable internal extended thinking step for a reasoner model.
317
+
306
318
  from_template : typing.Optional[str]
307
319
  The template id used to configure the agent
308
320
 
@@ -386,6 +398,9 @@ class AgentsClient:
386
398
  "embedding": embedding,
387
399
  "context_window_limit": context_window_limit,
388
400
  "embedding_chunk_size": embedding_chunk_size,
401
+ "max_tokens": max_tokens,
402
+ "max_reasoning_tokens": max_reasoning_tokens,
403
+ "enable_reasoner": enable_reasoner,
389
404
  "from_template": from_template,
390
405
  "template": template,
391
406
  "project": create_agent_request_project,
@@ -1307,6 +1322,9 @@ class AsyncAgentsClient:
1307
1322
  embedding: typing.Optional[str] = OMIT,
1308
1323
  context_window_limit: typing.Optional[int] = OMIT,
1309
1324
  embedding_chunk_size: typing.Optional[int] = OMIT,
1325
+ max_tokens: typing.Optional[int] = OMIT,
1326
+ max_reasoning_tokens: typing.Optional[int] = OMIT,
1327
+ enable_reasoner: typing.Optional[bool] = OMIT,
1310
1328
  from_template: typing.Optional[str] = OMIT,
1311
1329
  template: typing.Optional[bool] = OMIT,
1312
1330
  create_agent_request_project: typing.Optional[str] = OMIT,
@@ -1392,6 +1410,15 @@ class AsyncAgentsClient:
1392
1410
  embedding_chunk_size : typing.Optional[int]
1393
1411
  The embedding chunk size used by the agent.
1394
1412
 
1413
+ max_tokens : typing.Optional[int]
1414
+ The maximum number of tokens to generate, including reasoning step. If not set, the model will use its default value.
1415
+
1416
+ max_reasoning_tokens : typing.Optional[int]
1417
+ The maximum number of tokens to generate for reasoning step. If not set, the model will use its default value.
1418
+
1419
+ enable_reasoner : typing.Optional[bool]
1420
+ Whether to enable internal extended thinking step for a reasoner model.
1421
+
1395
1422
  from_template : typing.Optional[str]
1396
1423
  The template id used to configure the agent
1397
1424
 
@@ -1483,6 +1510,9 @@ class AsyncAgentsClient:
1483
1510
  "embedding": embedding,
1484
1511
  "context_window_limit": context_window_limit,
1485
1512
  "embedding_chunk_size": embedding_chunk_size,
1513
+ "max_tokens": max_tokens,
1514
+ "max_reasoning_tokens": max_reasoning_tokens,
1515
+ "enable_reasoner": enable_reasoner,
1486
1516
  "from_template": from_template,
1487
1517
  "template": template,
1488
1518
  "project": create_agent_request_project,
@@ -4,10 +4,17 @@ import typing
4
4
  from ....types.system_message import SystemMessage
5
5
  from ....types.user_message import UserMessage
6
6
  from ....types.reasoning_message import ReasoningMessage
7
+ from ....types.hidden_reasoning_message import HiddenReasoningMessage
7
8
  from ....types.tool_call_message import ToolCallMessage
8
9
  from ....types.tool_return_message import ToolReturnMessage
9
10
  from ....types.assistant_message import AssistantMessage
10
11
 
11
12
  MessagesModifyResponse = typing.Union[
12
- SystemMessage, UserMessage, ReasoningMessage, ToolCallMessage, ToolReturnMessage, AssistantMessage
13
+ SystemMessage,
14
+ UserMessage,
15
+ ReasoningMessage,
16
+ HiddenReasoningMessage,
17
+ ToolCallMessage,
18
+ ToolReturnMessage,
19
+ AssistantMessage,
13
20
  ]
@@ -62,8 +62,14 @@ from .agents_search_response_agents_item_last_updated_by_id_item import (
62
62
  AgentsSearchResponseAgentsItemLastUpdatedByIdItem,
63
63
  )
64
64
  from .agents_search_response_agents_item_llm_config import AgentsSearchResponseAgentsItemLlmConfig
65
+ from .agents_search_response_agents_item_llm_config_enable_reasoner import (
66
+ AgentsSearchResponseAgentsItemLlmConfigEnableReasoner,
67
+ )
65
68
  from .agents_search_response_agents_item_llm_config_handle import AgentsSearchResponseAgentsItemLlmConfigHandle
66
69
  from .agents_search_response_agents_item_llm_config_handle_item import AgentsSearchResponseAgentsItemLlmConfigHandleItem
70
+ from .agents_search_response_agents_item_llm_config_max_reasoning_tokens import (
71
+ AgentsSearchResponseAgentsItemLlmConfigMaxReasoningTokens,
72
+ )
67
73
  from .agents_search_response_agents_item_llm_config_max_tokens import AgentsSearchResponseAgentsItemLlmConfigMaxTokens
68
74
  from .agents_search_response_agents_item_llm_config_max_tokens_item import (
69
75
  AgentsSearchResponseAgentsItemLlmConfigMaxTokensItem,
@@ -515,8 +521,10 @@ __all__ = [
515
521
  "AgentsSearchResponseAgentsItemLastUpdatedById",
516
522
  "AgentsSearchResponseAgentsItemLastUpdatedByIdItem",
517
523
  "AgentsSearchResponseAgentsItemLlmConfig",
524
+ "AgentsSearchResponseAgentsItemLlmConfigEnableReasoner",
518
525
  "AgentsSearchResponseAgentsItemLlmConfigHandle",
519
526
  "AgentsSearchResponseAgentsItemLlmConfigHandleItem",
527
+ "AgentsSearchResponseAgentsItemLlmConfigMaxReasoningTokens",
520
528
  "AgentsSearchResponseAgentsItemLlmConfigMaxTokens",
521
529
  "AgentsSearchResponseAgentsItemLlmConfigMaxTokensItem",
522
530
  "AgentsSearchResponseAgentsItemLlmConfigModelEndpoint",
@@ -19,6 +19,12 @@ from .agents_search_response_agents_item_llm_config_temperature import (
19
19
  AgentsSearchResponseAgentsItemLlmConfigTemperature,
20
20
  )
21
21
  from .agents_search_response_agents_item_llm_config_max_tokens import AgentsSearchResponseAgentsItemLlmConfigMaxTokens
22
+ from .agents_search_response_agents_item_llm_config_enable_reasoner import (
23
+ AgentsSearchResponseAgentsItemLlmConfigEnableReasoner,
24
+ )
25
+ from .agents_search_response_agents_item_llm_config_max_reasoning_tokens import (
26
+ AgentsSearchResponseAgentsItemLlmConfigMaxReasoningTokens,
27
+ )
22
28
  from ...core.pydantic_utilities import IS_PYDANTIC_V2
23
29
  import pydantic
24
30
 
@@ -35,6 +41,8 @@ class AgentsSearchResponseAgentsItemLlmConfig(UncheckedBaseModel):
35
41
  handle: typing.Optional[AgentsSearchResponseAgentsItemLlmConfigHandle] = None
36
42
  temperature: typing.Optional[AgentsSearchResponseAgentsItemLlmConfigTemperature] = None
37
43
  max_tokens: typing.Optional[AgentsSearchResponseAgentsItemLlmConfigMaxTokens] = None
44
+ enable_reasoner: typing.Optional[AgentsSearchResponseAgentsItemLlmConfigEnableReasoner] = None
45
+ max_reasoning_tokens: typing.Optional[AgentsSearchResponseAgentsItemLlmConfigMaxReasoningTokens] = None
38
46
 
39
47
  if IS_PYDANTIC_V2:
40
48
  model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
@@ -0,0 +1,5 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import typing
4
+
5
+ AgentsSearchResponseAgentsItemLlmConfigEnableReasoner = typing.Union[bool, typing.Optional[typing.Any]]
@@ -0,0 +1,5 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import typing
4
+
5
+ AgentsSearchResponseAgentsItemLlmConfigMaxReasoningTokens = typing.Union[float, typing.Optional[typing.Any]]
@@ -16,7 +16,7 @@ class BaseClientWrapper:
16
16
  headers: typing.Dict[str, str] = {
17
17
  "X-Fern-Language": "Python",
18
18
  "X-Fern-SDK-Name": "letta-client",
19
- "X-Fern-SDK-Version": "0.1.84",
19
+ "X-Fern-SDK-Version": "0.1.86",
20
20
  }
21
21
  if self.token is not None:
22
22
  headers["Authorization"] = f"Bearer {self.token}"
@@ -30,8 +30,10 @@ from .types import (
30
30
  TemplatesCreateAgentsResponseAgentsItemLastUpdatedById,
31
31
  TemplatesCreateAgentsResponseAgentsItemLastUpdatedByIdItem,
32
32
  TemplatesCreateAgentsResponseAgentsItemLlmConfig,
33
+ TemplatesCreateAgentsResponseAgentsItemLlmConfigEnableReasoner,
33
34
  TemplatesCreateAgentsResponseAgentsItemLlmConfigHandle,
34
35
  TemplatesCreateAgentsResponseAgentsItemLlmConfigHandleItem,
36
+ TemplatesCreateAgentsResponseAgentsItemLlmConfigMaxReasoningTokens,
35
37
  TemplatesCreateAgentsResponseAgentsItemLlmConfigMaxTokens,
36
38
  TemplatesCreateAgentsResponseAgentsItemLlmConfigMaxTokensItem,
37
39
  TemplatesCreateAgentsResponseAgentsItemLlmConfigModelEndpoint,
@@ -225,8 +227,10 @@ __all__ = [
225
227
  "TemplatesCreateAgentsResponseAgentsItemLastUpdatedById",
226
228
  "TemplatesCreateAgentsResponseAgentsItemLastUpdatedByIdItem",
227
229
  "TemplatesCreateAgentsResponseAgentsItemLlmConfig",
230
+ "TemplatesCreateAgentsResponseAgentsItemLlmConfigEnableReasoner",
228
231
  "TemplatesCreateAgentsResponseAgentsItemLlmConfigHandle",
229
232
  "TemplatesCreateAgentsResponseAgentsItemLlmConfigHandleItem",
233
+ "TemplatesCreateAgentsResponseAgentsItemLlmConfigMaxReasoningTokens",
230
234
  "TemplatesCreateAgentsResponseAgentsItemLlmConfigMaxTokens",
231
235
  "TemplatesCreateAgentsResponseAgentsItemLlmConfigMaxTokensItem",
232
236
  "TemplatesCreateAgentsResponseAgentsItemLlmConfigModelEndpoint",
@@ -75,12 +75,18 @@ from .templates_create_agents_response_agents_item_last_updated_by_id_item impor
75
75
  TemplatesCreateAgentsResponseAgentsItemLastUpdatedByIdItem,
76
76
  )
77
77
  from .templates_create_agents_response_agents_item_llm_config import TemplatesCreateAgentsResponseAgentsItemLlmConfig
78
+ from .templates_create_agents_response_agents_item_llm_config_enable_reasoner import (
79
+ TemplatesCreateAgentsResponseAgentsItemLlmConfigEnableReasoner,
80
+ )
78
81
  from .templates_create_agents_response_agents_item_llm_config_handle import (
79
82
  TemplatesCreateAgentsResponseAgentsItemLlmConfigHandle,
80
83
  )
81
84
  from .templates_create_agents_response_agents_item_llm_config_handle_item import (
82
85
  TemplatesCreateAgentsResponseAgentsItemLlmConfigHandleItem,
83
86
  )
87
+ from .templates_create_agents_response_agents_item_llm_config_max_reasoning_tokens import (
88
+ TemplatesCreateAgentsResponseAgentsItemLlmConfigMaxReasoningTokens,
89
+ )
84
90
  from .templates_create_agents_response_agents_item_llm_config_max_tokens import (
85
91
  TemplatesCreateAgentsResponseAgentsItemLlmConfigMaxTokens,
86
92
  )
@@ -579,8 +585,10 @@ __all__ = [
579
585
  "TemplatesCreateAgentsResponseAgentsItemLastUpdatedById",
580
586
  "TemplatesCreateAgentsResponseAgentsItemLastUpdatedByIdItem",
581
587
  "TemplatesCreateAgentsResponseAgentsItemLlmConfig",
588
+ "TemplatesCreateAgentsResponseAgentsItemLlmConfigEnableReasoner",
582
589
  "TemplatesCreateAgentsResponseAgentsItemLlmConfigHandle",
583
590
  "TemplatesCreateAgentsResponseAgentsItemLlmConfigHandleItem",
591
+ "TemplatesCreateAgentsResponseAgentsItemLlmConfigMaxReasoningTokens",
584
592
  "TemplatesCreateAgentsResponseAgentsItemLlmConfigMaxTokens",
585
593
  "TemplatesCreateAgentsResponseAgentsItemLlmConfigMaxTokensItem",
586
594
  "TemplatesCreateAgentsResponseAgentsItemLlmConfigModelEndpoint",
@@ -23,6 +23,12 @@ from .templates_create_agents_response_agents_item_llm_config_temperature import
23
23
  from .templates_create_agents_response_agents_item_llm_config_max_tokens import (
24
24
  TemplatesCreateAgentsResponseAgentsItemLlmConfigMaxTokens,
25
25
  )
26
+ from .templates_create_agents_response_agents_item_llm_config_enable_reasoner import (
27
+ TemplatesCreateAgentsResponseAgentsItemLlmConfigEnableReasoner,
28
+ )
29
+ from .templates_create_agents_response_agents_item_llm_config_max_reasoning_tokens import (
30
+ TemplatesCreateAgentsResponseAgentsItemLlmConfigMaxReasoningTokens,
31
+ )
26
32
  from ...core.pydantic_utilities import IS_PYDANTIC_V2
27
33
  import pydantic
28
34
 
@@ -39,6 +45,8 @@ class TemplatesCreateAgentsResponseAgentsItemLlmConfig(UncheckedBaseModel):
39
45
  handle: typing.Optional[TemplatesCreateAgentsResponseAgentsItemLlmConfigHandle] = None
40
46
  temperature: typing.Optional[TemplatesCreateAgentsResponseAgentsItemLlmConfigTemperature] = None
41
47
  max_tokens: typing.Optional[TemplatesCreateAgentsResponseAgentsItemLlmConfigMaxTokens] = None
48
+ enable_reasoner: typing.Optional[TemplatesCreateAgentsResponseAgentsItemLlmConfigEnableReasoner] = None
49
+ max_reasoning_tokens: typing.Optional[TemplatesCreateAgentsResponseAgentsItemLlmConfigMaxReasoningTokens] = None
42
50
 
43
51
  if IS_PYDANTIC_V2:
44
52
  model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
@@ -0,0 +1,5 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import typing
4
+
5
+ TemplatesCreateAgentsResponseAgentsItemLlmConfigEnableReasoner = typing.Union[bool, typing.Optional[typing.Any]]
@@ -0,0 +1,5 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import typing
4
+
5
+ TemplatesCreateAgentsResponseAgentsItemLlmConfigMaxReasoningTokens = typing.Union[float, typing.Optional[typing.Any]]
@@ -659,16 +659,12 @@ class ToolsClient:
659
659
  raise ApiError(status_code=_response.status_code, body=_response.text)
660
660
  raise ApiError(status_code=_response.status_code, body=_response_json)
661
661
 
662
- def list_composio_apps(
663
- self, *, user_id: typing.Optional[str] = None, request_options: typing.Optional[RequestOptions] = None
664
- ) -> typing.List[AppModel]:
662
+ def list_composio_apps(self, *, request_options: typing.Optional[RequestOptions] = None) -> typing.List[AppModel]:
665
663
  """
666
664
  Get a list of all Composio apps
667
665
 
668
666
  Parameters
669
667
  ----------
670
- user_id : typing.Optional[str]
671
-
672
668
  request_options : typing.Optional[RequestOptions]
673
669
  Request-specific configuration.
674
670
 
@@ -689,9 +685,6 @@ class ToolsClient:
689
685
  _response = self._client_wrapper.httpx_client.request(
690
686
  "v1/tools/composio/apps",
691
687
  method="GET",
692
- headers={
693
- "user-id": str(user_id) if user_id is not None else None,
694
- },
695
688
  request_options=request_options,
696
689
  )
697
690
  try:
@@ -835,15 +828,13 @@ class ToolsClient:
835
828
  raise ApiError(status_code=_response.status_code, body=_response_json)
836
829
 
837
830
  def list_mcp_servers(
838
- self, *, user_id: typing.Optional[str] = None, request_options: typing.Optional[RequestOptions] = None
831
+ self, *, request_options: typing.Optional[RequestOptions] = None
839
832
  ) -> typing.Dict[str, ListMcpServersResponseValue]:
840
833
  """
841
834
  Get a list of all configured MCP servers
842
835
 
843
836
  Parameters
844
837
  ----------
845
- user_id : typing.Optional[str]
846
-
847
838
  request_options : typing.Optional[RequestOptions]
848
839
  Request-specific configuration.
849
840
 
@@ -864,9 +855,6 @@ class ToolsClient:
864
855
  _response = self._client_wrapper.httpx_client.request(
865
856
  "v1/tools/mcp/servers",
866
857
  method="GET",
867
- headers={
868
- "user-id": str(user_id) if user_id is not None else None,
869
- },
870
858
  request_options=request_options,
871
859
  )
872
860
  try:
@@ -1836,15 +1824,13 @@ class AsyncToolsClient:
1836
1824
  raise ApiError(status_code=_response.status_code, body=_response_json)
1837
1825
 
1838
1826
  async def list_composio_apps(
1839
- self, *, user_id: typing.Optional[str] = None, request_options: typing.Optional[RequestOptions] = None
1827
+ self, *, request_options: typing.Optional[RequestOptions] = None
1840
1828
  ) -> typing.List[AppModel]:
1841
1829
  """
1842
1830
  Get a list of all Composio apps
1843
1831
 
1844
1832
  Parameters
1845
1833
  ----------
1846
- user_id : typing.Optional[str]
1847
-
1848
1834
  request_options : typing.Optional[RequestOptions]
1849
1835
  Request-specific configuration.
1850
1836
 
@@ -1873,9 +1859,6 @@ class AsyncToolsClient:
1873
1859
  _response = await self._client_wrapper.httpx_client.request(
1874
1860
  "v1/tools/composio/apps",
1875
1861
  method="GET",
1876
- headers={
1877
- "user-id": str(user_id) if user_id is not None else None,
1878
- },
1879
1862
  request_options=request_options,
1880
1863
  )
1881
1864
  try:
@@ -2035,15 +2018,13 @@ class AsyncToolsClient:
2035
2018
  raise ApiError(status_code=_response.status_code, body=_response_json)
2036
2019
 
2037
2020
  async def list_mcp_servers(
2038
- self, *, user_id: typing.Optional[str] = None, request_options: typing.Optional[RequestOptions] = None
2021
+ self, *, request_options: typing.Optional[RequestOptions] = None
2039
2022
  ) -> typing.Dict[str, ListMcpServersResponseValue]:
2040
2023
  """
2041
2024
  Get a list of all configured MCP servers
2042
2025
 
2043
2026
  Parameters
2044
2027
  ----------
2045
- user_id : typing.Optional[str]
2046
-
2047
2028
  request_options : typing.Optional[RequestOptions]
2048
2029
  Request-specific configuration.
2049
2030
 
@@ -2072,9 +2053,6 @@ class AsyncToolsClient:
2072
2053
  _response = await self._client_wrapper.httpx_client.request(
2073
2054
  "v1/tools/mcp/servers",
2074
2055
  method="GET",
2075
- headers={
2076
- "user-id": str(user_id) if user_id is not None else None,
2077
- },
2078
2056
  request_options=request_options,
2079
2057
  )
2080
2058
  try:
@@ -91,6 +91,8 @@ from .group import Group
91
91
  from .group_create import GroupCreate
92
92
  from .group_create_manager_config import GroupCreateManagerConfig
93
93
  from .health import Health
94
+ from .hidden_reasoning_message import HiddenReasoningMessage
95
+ from .hidden_reasoning_message_state import HiddenReasoningMessageState
94
96
  from .http_validation_error import HttpValidationError
95
97
  from .identity import Identity
96
98
  from .identity_create import IdentityCreate
@@ -303,6 +305,8 @@ __all__ = [
303
305
  "GroupCreate",
304
306
  "GroupCreateManagerConfig",
305
307
  "Health",
308
+ "HiddenReasoningMessage",
309
+ "HiddenReasoningMessageState",
306
310
  "HttpValidationError",
307
311
  "Identity",
308
312
  "IdentityCreate",
@@ -22,9 +22,6 @@ class AgentSchema(UncheckedBaseModel):
22
22
  created_at: str
23
23
  description: typing.Optional[str] = None
24
24
  embedding_config: EmbeddingConfig
25
- groups: typing.List[typing.Optional[typing.Any]]
26
- identities: typing.List[typing.Optional[typing.Any]]
27
- is_deleted: bool
28
25
  llm_config: LlmConfig
29
26
  message_buffer_autoclear: bool
30
27
  messages: typing.List[MessageSchema]
@@ -12,6 +12,11 @@ CompletionCreateParamsNonStreamingModel = typing.Union[
12
12
  typing.Literal["o1-preview-2024-09-12"],
13
13
  typing.Literal["o1-mini"],
14
14
  typing.Literal["o1-mini-2024-09-12"],
15
+ typing.Literal["computer-use-preview"],
16
+ typing.Literal["computer-use-preview-2025-02-04"],
17
+ typing.Literal["computer-use-preview-2025-03-11"],
18
+ typing.Literal["gpt-4.5-preview"],
19
+ typing.Literal["gpt-4.5-preview-2025-02-27"],
15
20
  typing.Literal["gpt-4o"],
16
21
  typing.Literal["gpt-4o-2024-11-20"],
17
22
  typing.Literal["gpt-4o-2024-08-06"],
@@ -21,10 +26,6 @@ CompletionCreateParamsNonStreamingModel = typing.Union[
21
26
  typing.Literal["gpt-4o-audio-preview-2024-12-17"],
22
27
  typing.Literal["gpt-4o-mini-audio-preview"],
23
28
  typing.Literal["gpt-4o-mini-audio-preview-2024-12-17"],
24
- typing.Literal["gpt-4o-search-preview"],
25
- typing.Literal["gpt-4o-mini-search-preview"],
26
- typing.Literal["gpt-4o-search-preview-2025-03-11"],
27
- typing.Literal["gpt-4o-mini-search-preview-2025-03-11"],
28
29
  typing.Literal["chatgpt-4o-latest"],
29
30
  typing.Literal["gpt-4o-mini"],
30
31
  typing.Literal["gpt-4o-mini-2024-07-18"],
@@ -12,6 +12,11 @@ CompletionCreateParamsStreamingModel = typing.Union[
12
12
  typing.Literal["o1-preview-2024-09-12"],
13
13
  typing.Literal["o1-mini"],
14
14
  typing.Literal["o1-mini-2024-09-12"],
15
+ typing.Literal["computer-use-preview"],
16
+ typing.Literal["computer-use-preview-2025-02-04"],
17
+ typing.Literal["computer-use-preview-2025-03-11"],
18
+ typing.Literal["gpt-4.5-preview"],
19
+ typing.Literal["gpt-4.5-preview-2025-02-27"],
15
20
  typing.Literal["gpt-4o"],
16
21
  typing.Literal["gpt-4o-2024-11-20"],
17
22
  typing.Literal["gpt-4o-2024-08-06"],
@@ -21,10 +26,6 @@ CompletionCreateParamsStreamingModel = typing.Union[
21
26
  typing.Literal["gpt-4o-audio-preview-2024-12-17"],
22
27
  typing.Literal["gpt-4o-mini-audio-preview"],
23
28
  typing.Literal["gpt-4o-mini-audio-preview-2024-12-17"],
24
- typing.Literal["gpt-4o-search-preview"],
25
- typing.Literal["gpt-4o-mini-search-preview"],
26
- typing.Literal["gpt-4o-search-preview-2025-03-11"],
27
- typing.Literal["gpt-4o-mini-search-preview-2025-03-11"],
28
29
  typing.Literal["chatgpt-4o-latest"],
29
30
  typing.Literal["gpt-4o-mini"],
30
31
  typing.Literal["gpt-4o-mini-2024-07-18"],
@@ -11,8 +11,6 @@ import pydantic
11
11
  class CoreMemoryBlockSchema(UncheckedBaseModel):
12
12
  created_at: str
13
13
  description: typing.Optional[str] = None
14
- identities: typing.List[typing.Optional[typing.Any]]
15
- is_deleted: bool
16
14
  is_template: bool
17
15
  label: str
18
16
  limit: int
@@ -9,7 +9,7 @@ import pydantic
9
9
  class FileFile(UncheckedBaseModel):
10
10
  file_data: typing.Optional[str] = None
11
11
  file_id: typing.Optional[str] = None
12
- filename: typing.Optional[str] = None
12
+ file_name: typing.Optional[str] = None
13
13
 
14
14
  if IS_PYDANTIC_V2:
15
15
  model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
@@ -0,0 +1,39 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ from ..core.unchecked_base_model import UncheckedBaseModel
4
+ import datetime as dt
5
+ import typing
6
+ from .hidden_reasoning_message_state import HiddenReasoningMessageState
7
+ from ..core.pydantic_utilities import IS_PYDANTIC_V2
8
+ import pydantic
9
+
10
+
11
+ class HiddenReasoningMessage(UncheckedBaseModel):
12
+ """
13
+ Representation of an agent's internal reasoning where reasoning content
14
+ has been hidden from the response.
15
+
16
+ Args:
17
+ id (str): The ID of the message
18
+ date (datetime): The date the message was created in ISO format
19
+ name (Optional[str]): The name of the sender of the message
20
+ state (Literal["redacted", "omitted"]): Whether the reasoning
21
+ content was redacted by the provider or simply omitted by the API
22
+ hidden_reasoning (Optional[str]): The internal reasoning of the agent
23
+ """
24
+
25
+ id: str
26
+ date: dt.datetime
27
+ name: typing.Optional[str] = None
28
+ message_type: typing.Literal["hidden_reasoning_message"] = "hidden_reasoning_message"
29
+ state: HiddenReasoningMessageState
30
+ hidden_reasoning: typing.Optional[str] = None
31
+
32
+ if IS_PYDANTIC_V2:
33
+ model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
34
+ else:
35
+
36
+ class Config:
37
+ frozen = True
38
+ smart_union = True
39
+ extra = pydantic.Extra.allow
@@ -0,0 +1,5 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import typing
4
+
5
+ HiddenReasoningMessageState = typing.Union[typing.Literal["redacted", "omitted"], typing.Any]
@@ -4,10 +4,17 @@ import typing
4
4
  from .system_message import SystemMessage
5
5
  from .user_message import UserMessage
6
6
  from .reasoning_message import ReasoningMessage
7
+ from .hidden_reasoning_message import HiddenReasoningMessage
7
8
  from .tool_call_message import ToolCallMessage
8
9
  from .tool_return_message import ToolReturnMessage
9
10
  from .assistant_message import AssistantMessage
10
11
 
11
12
  LettaMessageUnion = typing.Union[
12
- SystemMessage, UserMessage, ReasoningMessage, ToolCallMessage, ToolReturnMessage, AssistantMessage
13
+ SystemMessage,
14
+ UserMessage,
15
+ ReasoningMessage,
16
+ HiddenReasoningMessage,
17
+ ToolCallMessage,
18
+ ToolReturnMessage,
19
+ AssistantMessage,
13
20
  ]
@@ -67,6 +67,16 @@ class LlmConfig(UncheckedBaseModel):
67
67
  The maximum number of tokens to generate. If not set, the model will use its default value.
68
68
  """
69
69
 
70
+ enable_reasoner: typing.Optional[bool] = pydantic.Field(default=None)
71
+ """
72
+ Whether or not the model should use extended thinking if it is a 'reasoning' style model
73
+ """
74
+
75
+ max_reasoning_tokens: typing.Optional[int] = pydantic.Field(default=None)
76
+ """
77
+ Configurable thinking budget for extended thinking, only used if enable_reasoner is True. Minimum value is 1024.
78
+ """
79
+
70
80
  if IS_PYDANTIC_V2:
71
81
  model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
72
82
  else:
@@ -19,6 +19,7 @@ class ReasoningMessage(UncheckedBaseModel):
19
19
  source (Literal["reasoner_model", "non_reasoner_model"]): Whether the reasoning
20
20
  content was generated natively by a reasoner model or derived via prompting
21
21
  reasoning (str): The internal reasoning of the agent
22
+ signature (Optional[str]): The model-generated signature of the reasoning step
22
23
  """
23
24
 
24
25
  id: str
@@ -27,6 +28,7 @@ class ReasoningMessage(UncheckedBaseModel):
27
28
  message_type: typing.Literal["reasoning_message"] = "reasoning_message"
28
29
  source: typing.Optional[ReasoningMessageSource] = None
29
30
  reasoning: str
31
+ signature: typing.Optional[str] = None
30
32
 
31
33
  if IS_PYDANTIC_V2:
32
34
  model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
@@ -9,7 +9,6 @@ import pydantic
9
9
  class ToolEnvVarSchema(UncheckedBaseModel):
10
10
  created_at: str
11
11
  description: typing.Optional[str] = None
12
- is_deleted: bool
13
12
  key: str
14
13
  updated_at: str
15
14
  value: str
@@ -13,7 +13,6 @@ class ToolSchema(UncheckedBaseModel):
13
13
  args_json_schema: typing.Optional[typing.Any] = None
14
14
  created_at: str
15
15
  description: str
16
- is_deleted: bool
17
16
  json_schema: ToolJsonSchema
18
17
  name: str
19
18
  return_char_limit: int
@@ -1,5 +1,5 @@
1
1
  # This file was auto-generated by Fern from our API Definition.
2
2
 
3
- from .types import CreateVoiceChatCompletionsRequestBody
3
+ from .types import CreateVoiceChatCompletionsRequest
4
4
 
5
- __all__ = ["CreateVoiceChatCompletionsRequestBody"]
5
+ __all__ = ["CreateVoiceChatCompletionsRequest"]
@@ -2,7 +2,7 @@
2
2
 
3
3
  import typing
4
4
  from ..core.client_wrapper import SyncClientWrapper
5
- from .types.create_voice_chat_completions_request_body import CreateVoiceChatCompletionsRequestBody
5
+ from .types.create_voice_chat_completions_request import CreateVoiceChatCompletionsRequest
6
6
  from ..core.request_options import RequestOptions
7
7
  from ..core.jsonable_encoder import jsonable_encoder
8
8
  from ..core.serialization import convert_and_respect_annotation_metadata
@@ -25,8 +25,7 @@ class VoiceClient:
25
25
  self,
26
26
  agent_id: str,
27
27
  *,
28
- request: CreateVoiceChatCompletionsRequestBody,
29
- user_id: typing.Optional[str] = None,
28
+ request: CreateVoiceChatCompletionsRequest,
30
29
  request_options: typing.Optional[RequestOptions] = None,
31
30
  ) -> typing.Optional[typing.Any]:
32
31
  """
@@ -34,9 +33,7 @@ class VoiceClient:
34
33
  ----------
35
34
  agent_id : str
36
35
 
37
- request : CreateVoiceChatCompletionsRequestBody
38
-
39
- user_id : typing.Optional[str]
36
+ request : CreateVoiceChatCompletionsRequest
40
37
 
41
38
  request_options : typing.Optional[RequestOptions]
42
39
  Request-specific configuration.
@@ -73,11 +70,8 @@ class VoiceClient:
73
70
  f"v1/voice-beta/{jsonable_encoder(agent_id)}/chat/completions",
74
71
  method="POST",
75
72
  json=convert_and_respect_annotation_metadata(
76
- object_=request, annotation=CreateVoiceChatCompletionsRequestBody, direction="write"
73
+ object_=request, annotation=CreateVoiceChatCompletionsRequest, direction="write"
77
74
  ),
78
- headers={
79
- "user-id": str(user_id) if user_id is not None else None,
80
- },
81
75
  request_options=request_options,
82
76
  omit=OMIT,
83
77
  )
@@ -114,8 +108,7 @@ class AsyncVoiceClient:
114
108
  self,
115
109
  agent_id: str,
116
110
  *,
117
- request: CreateVoiceChatCompletionsRequestBody,
118
- user_id: typing.Optional[str] = None,
111
+ request: CreateVoiceChatCompletionsRequest,
119
112
  request_options: typing.Optional[RequestOptions] = None,
120
113
  ) -> typing.Optional[typing.Any]:
121
114
  """
@@ -123,9 +116,7 @@ class AsyncVoiceClient:
123
116
  ----------
124
117
  agent_id : str
125
118
 
126
- request : CreateVoiceChatCompletionsRequestBody
127
-
128
- user_id : typing.Optional[str]
119
+ request : CreateVoiceChatCompletionsRequest
129
120
 
130
121
  request_options : typing.Optional[RequestOptions]
131
122
  Request-specific configuration.
@@ -170,11 +161,8 @@ class AsyncVoiceClient:
170
161
  f"v1/voice-beta/{jsonable_encoder(agent_id)}/chat/completions",
171
162
  method="POST",
172
163
  json=convert_and_respect_annotation_metadata(
173
- object_=request, annotation=CreateVoiceChatCompletionsRequestBody, direction="write"
164
+ object_=request, annotation=CreateVoiceChatCompletionsRequest, direction="write"
174
165
  ),
175
- headers={
176
- "user-id": str(user_id) if user_id is not None else None,
177
- },
178
166
  request_options=request_options,
179
167
  omit=OMIT,
180
168
  )
@@ -1,5 +1,5 @@
1
1
  # This file was auto-generated by Fern from our API Definition.
2
2
 
3
- from .create_voice_chat_completions_request_body import CreateVoiceChatCompletionsRequestBody
3
+ from .create_voice_chat_completions_request import CreateVoiceChatCompletionsRequest
4
4
 
5
- __all__ = ["CreateVoiceChatCompletionsRequestBody"]
5
+ __all__ = ["CreateVoiceChatCompletionsRequest"]
@@ -4,6 +4,4 @@ import typing
4
4
  from ...types.completion_create_params_non_streaming import CompletionCreateParamsNonStreaming
5
5
  from ...types.completion_create_params_streaming import CompletionCreateParamsStreaming
6
6
 
7
- CreateVoiceChatCompletionsRequestBody = typing.Union[
8
- CompletionCreateParamsNonStreaming, CompletionCreateParamsStreaming
9
- ]
7
+ CreateVoiceChatCompletionsRequest = typing.Union[CompletionCreateParamsNonStreaming, CompletionCreateParamsStreaming]
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: letta-client
3
- Version: 0.1.84
3
+ Version: 0.1.86
4
4
  Summary:
5
5
  Requires-Python: >=3.8,<4.0
6
6
  Classifier: Intended Audience :: Developers
@@ -1,8 +1,8 @@
1
- letta_client/__init__.py,sha256=bMJ2QVwB5uaFZFCplyqr74M69gu7egtuZwIs7LOoQUA,65959
2
- letta_client/agents/__init__.py,sha256=5zJALonfv-KgVXgPFZjAlTSo-Fm7Fe3S7i3F8vccAvg,25764
1
+ letta_client/__init__.py,sha256=zYswZck2pupa9xTLm1OUJP0X6nLcEQVZcackwL6bEfs,66609
2
+ letta_client/agents/__init__.py,sha256=EZeH7kHAWnifaPd0MwY_sD3BCchrB29ZprJzqwKeTMM,26012
3
3
  letta_client/agents/blocks/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
4
4
  letta_client/agents/blocks/client.py,sha256=u5zvutxoH_DqfSLWhRtNSRBC9_ezQDx682cxkxDz3JA,23822
5
- letta_client/agents/client.py,sha256=ZUwxURBPy4jW4YgeZR_VijXQB2iUPadzQaSoG2-ZcX0,85152
5
+ letta_client/agents/client.py,sha256=noGlyMJyKLp7HvMogDK3QtxXl0KWic_X6pNsMy06apY,86754
6
6
  letta_client/agents/context/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
7
7
  letta_client/agents/context/client.py,sha256=GKKvoG4N_K8Biz9yDjeIHpFG0C8Cwc7tHmEX3pTL_9U,4815
8
8
  letta_client/agents/core_memory/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
@@ -16,7 +16,7 @@ letta_client/agents/messages/client.py,sha256=eFYRQUTubKq6N1CTebdLL_oT7LIkOyVEXi
16
16
  letta_client/agents/messages/types/__init__.py,sha256=Oc2j0oGOs96IEFf9xsJIkjBjoq3OMtse64YwWv3F9Io,335
17
17
  letta_client/agents/messages/types/letta_streaming_response.py,sha256=MdE2PxQ1x1AviakHXsWVcFv97a3RchzzzIiD77w4EC8,665
18
18
  letta_client/agents/messages/types/messages_modify_request.py,sha256=7C2X3BKye-YDSXOkdEmxxt34seI4jkLK0-govtc4nhg,475
19
- letta_client/agents/messages/types/messages_modify_response.py,sha256=f2eITUx-zQ4qzcYd1JPS_mFSqJw7xVsxX7GR7d2RYRI,552
19
+ letta_client/agents/messages/types/messages_modify_response.py,sha256=THyiUMxZyzVSp0kk1s0XOLW1LUass7mXcfFER1PTLyw,671
20
20
  letta_client/agents/passages/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
21
21
  letta_client/agents/passages/client.py,sha256=hWC-WHKU-0kwkn5ncPhxALL_wGLCu1JmLlmfDaAOVww,15586
22
22
  letta_client/agents/sources/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
@@ -28,7 +28,7 @@ letta_client/agents/templates/types/templates_create_response.py,sha256=kKjkyjv3
28
28
  letta_client/agents/templates/types/templates_migrate_response.py,sha256=7N4JtAaiao-LrNdi72K7XB01uXJVkczaKYIJIMf0QYs,577
29
29
  letta_client/agents/tools/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
30
30
  letta_client/agents/tools/client.py,sha256=xZMRZhG8mI_h8_QqgI4lXh3FieRCLeoPwdtB56GB-XU,12685
31
- letta_client/agents/types/__init__.py,sha256=vWZEiEPxWIefq0fkaRJUcdzu6hJOoGR_K_stM0v-iig,41009
31
+ letta_client/agents/types/__init__.py,sha256=hVfHOKotPmiacVAvJDd6zq1qjpXG6VPaI5OnBHYYljE,41420
32
32
  letta_client/agents/types/agents_search_request_search_item.py,sha256=9wZPvTP5ESFOhdF9YqdYwv4h_fEFF9TWbGtDO9xkrzA,494
33
33
  letta_client/agents/types/agents_search_request_search_item_field.py,sha256=06cbjgIRD2GL7Ck7ZYxLVNbrKP9HLHNOCi9DSPspATQ,692
34
34
  letta_client/agents/types/agents_search_request_search_item_one.py,sha256=ECWv-hDZen6AomM01zmRsOz0PlXVEwIwLHjid9yko9o,779
@@ -62,9 +62,11 @@ letta_client/agents/types/agents_search_response_agents_item_embedding_config_ha
62
62
  letta_client/agents/types/agents_search_response_agents_item_identity_ids.py,sha256=Me2QPiXqsJiSWcpmnAu-n6runXGk0OCTv2z-FSHx-iA,184
63
63
  letta_client/agents/types/agents_search_response_agents_item_last_updated_by_id.py,sha256=WpPW5-0-xegQTh_-7igpbuFr5wFeFM359kuBqbQDYRE,428
64
64
  letta_client/agents/types/agents_search_response_agents_item_last_updated_by_id_item.py,sha256=ewpvZ8ScpPBI1Vi7cWjTPQ1eeYBqU8BcsdmFwXR3fsM,172
65
- letta_client/agents/types/agents_search_response_agents_item_llm_config.py,sha256=QLL21-fKxgjgHcpAZR1VwyRj8G1i1j7mTx_U6qZ25d8,2226
65
+ letta_client/agents/types/agents_search_response_agents_item_llm_config.py,sha256=JOJ8U4d66KpbgFI68U0x2ylHv53hFFxyu4DitxBDtuE,2718
66
+ letta_client/agents/types/agents_search_response_agents_item_llm_config_enable_reasoner.py,sha256=6q8s7q-UnvHFiGvZp7TdXdw2x2Wgo5dBMKBG502J1dA,184
66
67
  letta_client/agents/types/agents_search_response_agents_item_llm_config_handle.py,sha256=OkFtdvvJ0GCcYZr8p7nHNbC9JTgUky1W8nYmo30WRpo,418
67
68
  letta_client/agents/types/agents_search_response_agents_item_llm_config_handle_item.py,sha256=Ykt36D4KDmb1X-t3R7EaECAHbDq8xQqTBU--lSo2g7g,172
69
+ letta_client/agents/types/agents_search_response_agents_item_llm_config_max_reasoning_tokens.py,sha256=VtcctcNE5cvTe08u9cZYOP6emQUKWDPcJKUhD0IRbQk,189
68
70
  letta_client/agents/types/agents_search_response_agents_item_llm_config_max_tokens.py,sha256=oS9ztkcyf_1lABiND482ROlh7ghOLhhNBLCe69tRkwc,442
69
71
  letta_client/agents/types/agents_search_response_agents_item_llm_config_max_tokens_item.py,sha256=mfB0NNoNIQJPbAv1VLjSKKc2F4UaeOuNkUMzkqay7xY,177
70
72
  letta_client/agents/types/agents_search_response_agents_item_llm_config_model_endpoint.py,sha256=ejEayxna8G74hNJxeVXNRfDnDaQsl81Fi_KoqNsfeCk,456
@@ -234,7 +236,7 @@ letta_client/blocks/client.py,sha256=LE9dsHaBxFLC3G035f0VpNDG7XKWRK8y9OXpeFCMvUw
234
236
  letta_client/client.py,sha256=k2mZqqEWciVmEQHgipjCK4kQILk74hpSqzcdNwdql9A,21212
235
237
  letta_client/core/__init__.py,sha256=OKbX2aCZXgHCDUsCouqv-OiX32xA6eFFCKIUH9M5Vzk,1591
236
238
  letta_client/core/api_error.py,sha256=RE8LELok2QCjABadECTvtDp7qejA1VmINCh6TbqPwSE,426
237
- letta_client/core/client_wrapper.py,sha256=zDW4PmX_uTU1Hjf-_FeeEad1PsKrSZMe6_xtiVes198,1997
239
+ letta_client/core/client_wrapper.py,sha256=ayLcQ8UCLbGLalgukljffmnrGVi8cItr4bBLhg9L8Do,1997
238
240
  letta_client/core/datetime_utils.py,sha256=nBys2IsYrhPdszxGKCNRPSOCwa-5DWOHG95FB8G9PKo,1047
239
241
  letta_client/core/file.py,sha256=d4NNbX8XvXP32z8KpK2Xovv33nFfruIrpz0QWxlgpZk,2663
240
242
  letta_client/core/http_client.py,sha256=Z77OIxIbL4OAB2IDqjRq_sYa5yNYAWfmdhdCSSvh6Y4,19552
@@ -276,9 +278,9 @@ letta_client/steps/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_p
276
278
  letta_client/steps/client.py,sha256=g4XUUtdKzkSiRkxJW6ACrYe8ySvJ_tUMGK4ag6QRZT4,11284
277
279
  letta_client/tag/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
278
280
  letta_client/tag/client.py,sha256=TBAotdb0e2_x2pANF4dOE1qmWY3GIgb7nOhvN7iZ3_4,5183
279
- letta_client/templates/__init__.py,sha256=mKuy259yyTkJiU4Vlrhcivwf5yGTdkeNBKj8W4doJZ8,28009
281
+ letta_client/templates/__init__.py,sha256=Bq3bRs6E-kRxhQK-mcuW_9Hg_WIH-Xus0N4bSv6WLec,28293
280
282
  letta_client/templates/client.py,sha256=k1eya9jpfKw8CwKP8U7qIhKVmpk1IBqPG-oeK8C6fOM,7188
281
- letta_client/templates/types/__init__.py,sha256=l5Q0UIzuIgvWxV5-f-BNE5JefHD8NrZt910a54qEERk,45863
283
+ letta_client/templates/types/__init__.py,sha256=y4mGN4JSA-aDZVXfCQRmnakS0LtrE67OVj8TRkODc8I,46330
282
284
  letta_client/templates/types/templates_create_agents_response.py,sha256=UNMZSUckqoug1sq-gqC7luO392eItxxy0NNdUL0CRfQ,725
283
285
  letta_client/templates/types/templates_create_agents_response_agents_item.py,sha256=yp0fKlo9CTbpETu9x5ahDEBDVV8eCSRGW47eFFZRfYc,5445
284
286
  letta_client/templates/types/templates_create_agents_response_agents_item_agent_type.py,sha256=667uXDfYpS48UZIiSpjdgY4xDcApSpW5viyc3T4eKjo,240
@@ -307,9 +309,11 @@ letta_client/templates/types/templates_create_agents_response_agents_item_embedd
307
309
  letta_client/templates/types/templates_create_agents_response_agents_item_identity_ids.py,sha256=-ss5Ifi4HdGrP0g5X8yPPv-V3EGltzmPz2oVec4HGCc,193
308
310
  letta_client/templates/types/templates_create_agents_response_agents_item_last_updated_by_id.py,sha256=h0ZU1lx4hDem3EkXpQfcZe0T6llCxudbWL4D0wzs8Gw,465
309
311
  letta_client/templates/types/templates_create_agents_response_agents_item_last_updated_by_id_item.py,sha256=Gm8rlP1HXMx0kf6yVTiHvn3ak1YfUQ-MrRs4njYg6ds,181
310
- letta_client/templates/types/templates_create_agents_response_agents_item_llm_config.py,sha256=yN1LYm0YhsSNEZvy8IaxMlTDQifhWCHELfyVipg2pUw,2447
312
+ letta_client/templates/types/templates_create_agents_response_agents_item_llm_config.py,sha256=T2-zkGdLW3Ae74F-VlE6rTp6oDCqYY6qFoBqnOCPvo4,2995
313
+ letta_client/templates/types/templates_create_agents_response_agents_item_llm_config_enable_reasoner.py,sha256=VUCyPaiqPp9Qlw4GkgfuA8gLLqjkEWmLsJ2UL4WYHVA,193
311
314
  letta_client/templates/types/templates_create_agents_response_agents_item_llm_config_handle.py,sha256=6iViMHSzqSIYklNasY3z5KxZUunTmdLGDBRHhdveq98,464
312
315
  letta_client/templates/types/templates_create_agents_response_agents_item_llm_config_handle_item.py,sha256=SX6bP8RWlynXRdOjoqg3NLNZNOnpVIn4ZTtEQZmFFD0,181
316
+ letta_client/templates/types/templates_create_agents_response_agents_item_llm_config_max_reasoning_tokens.py,sha256=9zXTM6Mh0P5UrqHKQ-GP30ahClk9GD_vGEiTeTyXf_U,198
313
317
  letta_client/templates/types/templates_create_agents_response_agents_item_llm_config_max_tokens.py,sha256=mk3l02cY3_xWUtUYr0y9i82QwwsODgidF4mNCv0tW6I,479
314
318
  letta_client/templates/types/templates_create_agents_response_agents_item_llm_config_max_tokens_item.py,sha256=7FWlQX8vYiYuiZlFz74EQTPGUKr9m0IWbu1eKPE95Vw,186
315
319
  letta_client/templates/types/templates_create_agents_response_agents_item_llm_config_model_endpoint.py,sha256=ksCAK3zWAtSUDcuk6QZeb4cX824hnvaS5koJzOFliQU,493
@@ -472,18 +476,18 @@ letta_client/templates/types/templates_create_agents_response_agents_item_tools_
472
476
  letta_client/templates/types/templates_create_agents_response_agents_item_updated_at.py,sha256=Md7WfCTT1_AGvyd24EeWzUibPvnrun9rhyxqCLeAURg,439
473
477
  letta_client/templates/types/templates_create_agents_response_agents_item_updated_at_item.py,sha256=T3rYnv5m_cBAEPBnEjUkkHJLYtFZfXNMbb7a9FrIwKY,175
474
478
  letta_client/tools/__init__.py,sha256=XsuAkxHDA-Z98gLNNW_fiEwFP3fP4XQipflrK2bHl8k,353
475
- letta_client/tools/client.py,sha256=aJqW1sNecrsjBAs6eFubMo2Up0u3lJxpafo1mkj2fnQ,78344
479
+ letta_client/tools/client.py,sha256=y5WTB5Wzh5gv1IWaSraU6pHYck7RBZlCnQW157OFjuw,77574
476
480
  letta_client/tools/types/__init__.py,sha256=R11LYBi6lxkud_DRyaHFUHtlnbfnEI93-SEo7FL4tzs,478
477
481
  letta_client/tools/types/add_mcp_server_request.py,sha256=EieZjfOT95sjkpxXdqy7glpxF4J4J3fm6tlaHFnYk84,265
478
482
  letta_client/tools/types/add_mcp_server_response_item.py,sha256=TWdsKqGb1INhYtpGnAckz0Pw4nZShumSp4pfocRfxCA,270
479
483
  letta_client/tools/types/delete_mcp_server_response_item.py,sha256=MeZObU-7tMSCd-S5yuUjNDse6A1hUz1LLjbko0pXaro,273
480
484
  letta_client/tools/types/list_mcp_servers_response_value.py,sha256=AIoXu4bO8QNSU7zjL1jj0Rg4313wVtPaTt13W0aevLQ,273
481
- letta_client/types/__init__.py,sha256=jPtbBLUzunOPB2nW-wVHYIi-PiYBOvb1EeCO5i7ulUE,18629
485
+ letta_client/types/__init__.py,sha256=7gQk1HcKIdNShFr6J2RTVb0zzuBYFAHc_T0yTdgSZ74,18827
482
486
  letta_client/types/action_model.py,sha256=y1e2XMv3skFaNJIBdYoBKgiORzGh05aOVvu-qVR9uHg,1240
483
487
  letta_client/types/action_parameters_model.py,sha256=LgKf5aPZG3-OHGxFdXiSokIDgce8c02xPYIAY05VgW8,828
484
488
  letta_client/types/action_response_model.py,sha256=yq2Fd9UU8j7vvtE3VqXUoRRvDzWcfJPj_95ynGdeHCs,824
485
489
  letta_client/types/agent_environment_variable.py,sha256=vutZLcR0yETltgOZ7E_o9kR4vOdBxybVL9lzXSux75w,1698
486
- letta_client/types/agent_schema.py,sha256=u3szbE5tw0VZ9lXVzZxj1DX-3GVvr-FZSSYvLKVJ5I8,1834
490
+ letta_client/types/agent_schema.py,sha256=LFHhHc-aUY1-5tEBrkvkbU1GZIoGkbUor3t3jxpTdzw,1703
487
491
  letta_client/types/agent_state.py,sha256=hogE7T9E56HzOycCGuv-jmPVsmcA3w7sGsoMnWMW3IY,5196
488
492
  letta_client/types/agent_state_tool_rules_item.py,sha256=jrcYmhULwLq704i85rCxW2GJLdz8XnBK0HxBLSkgs6k,539
489
493
  letta_client/types/agent_type.py,sha256=BvztKbFTW_Acvc3QPIvzK7JGwSLie1V407byu-VZHz0,195
@@ -531,7 +535,7 @@ letta_client/types/completion_create_params_non_streaming.py,sha256=hjEJ-wJWuFuT
531
535
  letta_client/types/completion_create_params_non_streaming_function_call.py,sha256=6iCjgXwsXnflllhfDDKtHRyxzKqtLcX6-HVr7AXlyUM,329
532
536
  letta_client/types/completion_create_params_non_streaming_messages_item.py,sha256=pKMxLh1XFgMl7LqcjKJmdeKYTCwlr3FLFPTuvaLf3D0,883
533
537
  letta_client/types/completion_create_params_non_streaming_modalities_item.py,sha256=BuyCf2nTCWVhishXFk3CsQphnPwNXj-kBdPMjkb8X10,189
534
- letta_client/types/completion_create_params_non_streaming_model.py,sha256=RhKFYjt4pgTgHfoihS1VdfIDjIlR1KCvPVMDIBY6UbY,1935
538
+ letta_client/types/completion_create_params_non_streaming_model.py,sha256=bWeGbuTRstRZEW3AiHwZIeHFnsLWGRI5wfrpWxv91ko,1966
535
539
  letta_client/types/completion_create_params_non_streaming_reasoning_effort.py,sha256=f1hBX3qksGoGC6O2W5qHblCQXtoZiEhiN8LUy1Rv9Ig,198
536
540
  letta_client/types/completion_create_params_non_streaming_response_format.py,sha256=c16kBch59yhxAgMeFTxGNrEBNl4Vu3fPmZ2RqqS6bkU,407
537
541
  letta_client/types/completion_create_params_non_streaming_service_tier.py,sha256=Tfw62WLF3WSHWZy8VOVXal1INDQNtZhoB8DSA0btJ0g,188
@@ -541,7 +545,7 @@ letta_client/types/completion_create_params_streaming.py,sha256=sgazDkBKpQTk2Ntr
541
545
  letta_client/types/completion_create_params_streaming_function_call.py,sha256=cxsVe0wAIKPAsndL5vB_BCTy6oSxFph7qB1c1LWmeDw,326
542
546
  letta_client/types/completion_create_params_streaming_messages_item.py,sha256=S4E0fe3LgVyetb2PEqhGNxqMj5kgQx4q6Qk2bvvu2Ok,880
543
547
  letta_client/types/completion_create_params_streaming_modalities_item.py,sha256=o9ZU7r22WrE6z-BSJ72LJXHtVRIpK499WArVgY-ODgI,186
544
- letta_client/types/completion_create_params_streaming_model.py,sha256=f80smBsCDdtc7oGKFz4sx8h_wnj_Ls4tyvjZeHdrkwc,1932
548
+ letta_client/types/completion_create_params_streaming_model.py,sha256=ESDrTKo0396c3uH7-bxu9Xu7--e0RnBLmuVUN_0R3ho,1963
545
549
  letta_client/types/completion_create_params_streaming_reasoning_effort.py,sha256=4-JFyaD92zia-kN7bPyCWwf_AMDnG2xUXWx8GQU1EFE,195
546
550
  letta_client/types/completion_create_params_streaming_response_format.py,sha256=31sy6fKZ4r50zvjVTnoOpwNX81Bx7kFM75Mn7-obbYI,404
547
551
  letta_client/types/completion_create_params_streaming_service_tier.py,sha256=chHakgbKOYCMtxdtGmP85rcjGkyOqt2S_JJ9SabSd-o,185
@@ -552,14 +556,14 @@ letta_client/types/conditional_tool_rule.py,sha256=R0nzgjpqedSPUWdBQuv-doiIgYTAZ
552
556
  letta_client/types/conflict_error_body.py,sha256=Mena-q1jti6nv_7-xrp6sDb_5MXNKPGobHrhpnz9kpY,787
553
557
  letta_client/types/context_window_overview.py,sha256=9pwiObSxu-SFyQ1pxSTlQiRatVAyFgqa6t0_qrrsGfU,2815
554
558
  letta_client/types/continue_tool_rule.py,sha256=AIKTGsQrJdSNsMCqdSqMqjKS7s610vDO8taVEbSJ6Yc,867
555
- letta_client/types/core_memory_block_schema.py,sha256=FAKcmH4B8EiJ6vltX6-dVstVzgznYUU4sOPUVnlFBOY,1062
559
+ letta_client/types/core_memory_block_schema.py,sha256=DGHyLAcFhHBm7oXkhkGIkkckcl9S2bCaU9b3qrUeNtc,984
556
560
  letta_client/types/create_block.py,sha256=V57mbqUkh5c-HcDxmIiFVr3tNfoqx-WJ1GRQZPobbxI,1277
557
561
  letta_client/types/dynamic_manager.py,sha256=5DRNqtUnjeTwOe5mkNB-SXItqLOfEX0avSrwsrJt1Aw,853
558
562
  letta_client/types/e_2_b_sandbox_config.py,sha256=w3R4QpPjeie5aKw8sb_eKhl78J0k5vLCcATNS3Qaeyw,957
559
563
  letta_client/types/embedding_config.py,sha256=ubGDLn8_H1qOoZUUj6de0MVrQnM2umVR2vdnOolPyr4,2539
560
564
  letta_client/types/embedding_config_embedding_endpoint_type.py,sha256=Ho1HSODi21PkzsZR58g7FlIMReFU2yf0hAS5OyUsW6Q,559
561
565
  letta_client/types/file.py,sha256=ZLCEYJqIJ1pzAJn4Pke6gVdKivKU9FrIg98P4GmFY8M,628
562
- letta_client/types/file_file.py,sha256=jbWcPKn-fSUlq9kl8n2us9fPU6x-Z20IKScHD_pJruw,665
566
+ letta_client/types/file_file.py,sha256=5hunDKL7BFz4jvXp9X2oF_YH50Veg1G19fBOQBVcuCQ,666
563
567
  letta_client/types/file_metadata.py,sha256=vORZH5WZO8AwAuKq0h0W9TTuydjmDlkZC6YyZMy2jbc,1973
564
568
  letta_client/types/function_call.py,sha256=eE6VYWK3A-2xRrIV-QKqrofvaVFcPNqSzl6lrWnopZA,576
565
569
  letta_client/types/function_definition_input.py,sha256=UpoD7ftRpHquJ5zhy28TjXPBVzxj7rOHKv3gX84Nfj8,740
@@ -570,6 +574,8 @@ letta_client/types/group.py,sha256=Y8iaGI08uSt99AE0GkiGV95YB5ywiAZOWd0jXojgwqU,1
570
574
  letta_client/types/group_create.py,sha256=TPYqXPVtriwhTVwHm_MRDNrKyl2mlice7Q22vVbOHwg,857
571
575
  letta_client/types/group_create_manager_config.py,sha256=4NhJEsVCuLBfHD1lbT7xKAgfKWycrwXV7W_u7ifjh1E,319
572
576
  letta_client/types/health.py,sha256=nQwx5ysn_cJMKUoqsfaPcGNSRSjfwX5S272UiSQJ03w,618
577
+ letta_client/types/hidden_reasoning_message.py,sha256=2ExD6XKtWsMQQQCiZcyAGr-Tzgk-i3L663lT3p778pc,1447
578
+ letta_client/types/hidden_reasoning_message_state.py,sha256=qotAgF_P4T7OEHzbhGDVFaLZYOs1ULMPVHmiFvoRIfM,174
573
579
  letta_client/types/http_validation_error.py,sha256=yHa4_NHIMB-VKNZpk7agjLTwWIg7mv7ml3d7I-Bqiog,661
574
580
  letta_client/types/identity.py,sha256=ODegiJaCyiFFfYd177v-hRdJBnIwbCQImB9U_fk4s4E,1591
575
581
  letta_client/types/identity_create.py,sha256=QuYCfc7dL5aHQqRIt6SlOV00bWyeMouxpKiY3Wx10o0,1538
@@ -588,13 +594,13 @@ letta_client/types/job_status.py,sha256=0Gu5Tku79SDVzCxnjVXQyDPNCizGWUP1ppohAck6
588
594
  letta_client/types/job_type.py,sha256=Roa04Ry0I-8YMYcDHiHSQwqBavZyPonzkZtjf098e-Q,145
589
595
  letta_client/types/json_schema.py,sha256=EHcLKBSGRsSzCKTpujKFHylcLJG6ODQIBrjQkU4lWDQ,870
590
596
  letta_client/types/letta_message_content_union.py,sha256=YxzyXKxUMeqbqWOlDs9LC8HUiqEhgkNCV9a76GS3spg,486
591
- letta_client/types/letta_message_union.py,sha256=FM4Zippr5fJ05AZ2aZRFlqp348xNgLbzVOcrnyNfytI,493
597
+ letta_client/types/letta_message_union.py,sha256=TTQwlur2CZNdZ466Nb_2TFcSFXrgoMliaNzD33t7Ktw,603
592
598
  letta_client/types/letta_request.py,sha256=bCPDRJhSJSo5eILJp0mTw_k26O3dZL1vChfAcaZ0rE8,1240
593
599
  letta_client/types/letta_request_config.py,sha256=b6K4QtDdHjcZKfBb1fugUuoPrT2N4d5TTB0PIRNI2SU,1085
594
600
  letta_client/types/letta_response.py,sha256=i5gAUTgWzIst_RP8I_zSh0GSnLIS3z--1BmK6EF1mkQ,1315
595
601
  letta_client/types/letta_streaming_request.py,sha256=jm0HLzfzWzIRs8uwtX33V5f5Ljw_hFOKOhPjdIZX9cA,1465
596
602
  letta_client/types/letta_usage_statistics.py,sha256=pdlEk_GYVTiDUgW0ZePOdyrJZ6zoSCGEgm_gM3B1wr8,1721
597
- letta_client/types/llm_config.py,sha256=B-LJpzPB5RNSPG-cag65yTIWc0mbD7iKg77N6ejPL64,3045
603
+ letta_client/types/llm_config.py,sha256=cycdnu-lgQsLsFmFQrc9S_O20snEdxRLcvwWwLMFnik,3441
598
604
  letta_client/types/llm_config_model_endpoint_type.py,sha256=HOSM5kIZDCNAVCWmASvAk52K819plqGlD66yKQ1xFkI,620
599
605
  letta_client/types/local_sandbox_config.py,sha256=jfe7akG_YrJJ8csLaLdev04Zg1x-PTN0XCAL4KifaZI,1387
600
606
  letta_client/types/manager_type.py,sha256=hV271989JpEhJQH02MzLpJ34EsbGnyMlckbz2TXBc-E,184
@@ -624,7 +630,7 @@ letta_client/types/passage.py,sha256=1OM19TyVCQEL1P3BC58hmzWfawZM4vejiKr0P11dOUk
624
630
  letta_client/types/pip_requirement.py,sha256=Hmh7VpJhdSfFkafh6QwAehCp0MQUBXv1YAoYP-2wV2M,773
625
631
  letta_client/types/provider.py,sha256=RvdE9dzGFJ4hcmyvk2xeO7RNpxQvXhB_S9DNy8t_z-E,1053
626
632
  letta_client/types/reasoning_content.py,sha256=aId-87QjQ4sm_fuCmzIdZZghr-9DFeVV-Lv9x5iVw3I,995
627
- letta_client/types/reasoning_message.py,sha256=HbSYz0TbnGsFb1MELz0oCDMVC2dg5mY9jdmn3KCeFm0,1354
633
+ letta_client/types/reasoning_message.py,sha256=hlD4UCaCIJjSmhgJTUpHzO_WAkK9B6ilFaN1Xbhh-ok,1484
628
634
  letta_client/types/reasoning_message_source.py,sha256=GYOWGm2mje1yYbR8E2kbAeQS--VDrGlpsobEBQHE2cU,186
629
635
  letta_client/types/redacted_reasoning_content.py,sha256=ROAcdqOjM-kaw23HrVJrh0a49TRYuijanHDaCqcMErM,735
630
636
  letta_client/types/response_format_json_object.py,sha256=ZSWmwdN8itFr5q77mxuBhEWRBh2CubAonJUCi88UjbA,611
@@ -657,7 +663,7 @@ letta_client/types/tool_call_delta.py,sha256=wGeZwJ9pwYHD5-f4Unf5-vJqefK40eHw9i0
657
663
  letta_client/types/tool_call_message.py,sha256=AQxpHtGGrPSAm3wnkoaoKO-vx_fFJQWb-YRK940mrGI,1196
658
664
  letta_client/types/tool_call_message_tool_call.py,sha256=twtq5-vZIeh1nShqm8iTCN9YFtY7LUIL-bFYuUfhF1o,219
659
665
  letta_client/types/tool_create.py,sha256=VSMd23Kkd77SPbLv2oRHEzXqR2Eexc0ervjxXYLHiqc,1522
660
- letta_client/types/tool_env_var_schema.py,sha256=TgH1mM9rkfY3ci__at0tZDJpn7G6G92tzussSoviZ7Y,681
666
+ letta_client/types/tool_env_var_schema.py,sha256=-YFJaXkjIO4BoPgyEQpaOGMNK5zBWlLkIuUjGX9DqHU,660
661
667
  letta_client/types/tool_json_schema.py,sha256=EgCxNOxeoF4y_-BDLAp6z_qcxTc87w_uSuZdjZpn3Gk,754
662
668
  letta_client/types/tool_return.py,sha256=f-6zaRo8Bwl0i0Q0rHl8vKOfzymFHN_tVRoC2lMWksI,984
663
669
  letta_client/types/tool_return_content.py,sha256=0CdaO0-oM9iwGQoDX0MmzcT9liNgOOuItvDUY0QNYWA,956
@@ -665,7 +671,7 @@ letta_client/types/tool_return_message.py,sha256=tC-YbPGSIlfezC0gcNv33qWBDyTAd41
665
671
  letta_client/types/tool_return_message_status.py,sha256=FvFOMaG9mnmgnHi2UBQVQQMtHFabbWnQnHTxGUDgVl0,167
666
672
  letta_client/types/tool_return_status.py,sha256=TQjwYprn5F_jU9kIbrtiyk7Gw2SjcmFFZLjFbGDpBM0,160
667
673
  letta_client/types/tool_rule_schema.py,sha256=cuOWIHHG63nG-EVYz4qV9psQ8MH0ujmLGjHiPVV-3Kk,578
668
- letta_client/types/tool_schema.py,sha256=vR257mZ8xsrcoXrkAUmEQoUFLoqotM2eQrdnOkmSJDc,1143
674
+ letta_client/types/tool_schema.py,sha256=q5iRbpiIqWpNvXeDCi7BUyDbQzBKUnTIXEIAujn1bxw,1122
669
675
  letta_client/types/tool_type.py,sha256=v6DX7qGAbg9t4HZTa9GBuzehNDCW3NkD6Zi3Z1teEKI,336
670
676
  letta_client/types/update_assistant_message.py,sha256=D-51o8uXk3X_2Fb2zJ4KoMeRxPiDWaCb3ugRfjBMCTI,878
671
677
  letta_client/types/update_assistant_message_content.py,sha256=rh3DP_SpxyBNnf0EDtoaKmPIPV-cXRSFju33NbHgeF0,247
@@ -686,10 +692,10 @@ letta_client/types/web_search_options_search_context_size.py,sha256=RgJGV4rkuaCT
686
692
  letta_client/types/web_search_options_user_location.py,sha256=4aXfFcwUBu7YNA5XBjfhmD6tgRb0e8LTFexmn-rkDfw,770
687
693
  letta_client/types/web_search_options_user_location_approximate.py,sha256=Ywk01J9H67L6_498E5E6ceJ2VbJUfcLiIJWD_s92_M0,731
688
694
  letta_client/version.py,sha256=bttKLbIhO3UonCYQlqs600zzbQgfhCCMjeXR9WRzid4,79
689
- letta_client/voice/__init__.py,sha256=7hX85553PiRMtIMM12a0DSoFzsglNiUziYR2ekS84Qw,175
690
- letta_client/voice/client.py,sha256=STjswa5oOLoP59QwTJvQwi73kgn0UzKOaXc2CsTRI4k,6912
691
- letta_client/voice/types/__init__.py,sha256=FRc3iKRTONE4N8Lf1IqvnqWZ2kXdrFFvkL7PxVcR8Ew,212
692
- letta_client/voice/types/create_voice_chat_completions_request_body.py,sha256=ZLfKgNK1T6IAwLEvaBVFfy7jEAoPUXP28n-nfmHkklc,391
693
- letta_client-0.1.84.dist-info/METADATA,sha256=2oxhbs4fOa6Mc7303-xWZKJ7s_LHZK19DKT9PkLBTK4,5041
694
- letta_client-0.1.84.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88
695
- letta_client-0.1.84.dist-info/RECORD,,
695
+ letta_client/voice/__init__.py,sha256=ZrZEuXIukVGhsfM-i0dIFfqjeSOBMPeEgDva7VvnipE,167
696
+ letta_client/voice/client.py,sha256=2KKJiteGk5HQM79ne1jOPl_ZyUTfZM_gXNdZZ_ndPU8,6485
697
+ letta_client/voice/types/__init__.py,sha256=hBLJcrom99DkDxxsVRU2ni8kPx6SsCy8gtAJvNOz26w,199
698
+ letta_client/voice/types/create_voice_chat_completions_request.py,sha256=K4__83rXRCshfdobyAmH-5fUDJQ_PeSQetTUeC4Abk0,381
699
+ letta_client-0.1.86.dist-info/METADATA,sha256=aaJcBeufHb0NMGNnG2ZKR8wdQUgF4Akzn3OVtHXv_-8,5041
700
+ letta_client-0.1.86.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88
701
+ letta_client-0.1.86.dist-info/RECORD,,