letta-client 0.1.199__py3-none-any.whl → 0.1.201__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of letta-client might be problematic. Click here for more details.

@@ -24,7 +24,7 @@ class BaseClientWrapper:
24
24
  headers: typing.Dict[str, str] = {
25
25
  "X-Fern-Language": "Python",
26
26
  "X-Fern-SDK-Name": "letta-client",
27
- "X-Fern-SDK-Version": "0.1.199",
27
+ "X-Fern-SDK-Version": "0.1.201",
28
28
  }
29
29
  if self._project is not None:
30
30
  headers["X-Project"] = self._project
@@ -469,12 +469,12 @@ class SourcesClient:
469
469
  self,
470
470
  *,
471
471
  name: str,
472
- embedding: typing.Optional[str] = OMIT,
473
- embedding_chunk_size: typing.Optional[int] = OMIT,
474
- embedding_config: typing.Optional[EmbeddingConfig] = OMIT,
475
472
  description: typing.Optional[str] = OMIT,
476
473
  instructions: typing.Optional[str] = OMIT,
477
474
  metadata: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT,
475
+ embedding: typing.Optional[str] = OMIT,
476
+ embedding_chunk_size: typing.Optional[int] = OMIT,
477
+ embedding_config: typing.Optional[EmbeddingConfig] = OMIT,
478
478
  request_options: typing.Optional[RequestOptions] = None,
479
479
  ) -> Source:
480
480
  """
@@ -485,15 +485,6 @@ class SourcesClient:
485
485
  name : str
486
486
  The name of the source.
487
487
 
488
- embedding : typing.Optional[str]
489
- The hande for the embedding config used by the source.
490
-
491
- embedding_chunk_size : typing.Optional[int]
492
- The chunk size of the embedding.
493
-
494
- embedding_config : typing.Optional[EmbeddingConfig]
495
- (Legacy) The embedding configuration used by the source.
496
-
497
488
  description : typing.Optional[str]
498
489
  The description of the source.
499
490
 
@@ -503,6 +494,15 @@ class SourcesClient:
503
494
  metadata : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]]
504
495
  Metadata associated with the source.
505
496
 
497
+ embedding : typing.Optional[str]
498
+ The handle for the embedding config used by the source.
499
+
500
+ embedding_chunk_size : typing.Optional[int]
501
+ The chunk size of the embedding.
502
+
503
+ embedding_config : typing.Optional[EmbeddingConfig]
504
+ (Legacy) The embedding configuration used by the source.
505
+
506
506
  request_options : typing.Optional[RequestOptions]
507
507
  Request-specific configuration.
508
508
 
@@ -528,14 +528,14 @@ class SourcesClient:
528
528
  method="POST",
529
529
  json={
530
530
  "name": name,
531
+ "description": description,
532
+ "instructions": instructions,
533
+ "metadata": metadata,
531
534
  "embedding": embedding,
532
535
  "embedding_chunk_size": embedding_chunk_size,
533
536
  "embedding_config": convert_and_respect_annotation_metadata(
534
537
  object_=embedding_config, annotation=EmbeddingConfig, direction="write"
535
538
  ),
536
- "description": description,
537
- "instructions": instructions,
538
- "metadata": metadata,
539
539
  },
540
540
  headers={
541
541
  "content-type": "application/json",
@@ -1144,12 +1144,12 @@ class AsyncSourcesClient:
1144
1144
  self,
1145
1145
  *,
1146
1146
  name: str,
1147
- embedding: typing.Optional[str] = OMIT,
1148
- embedding_chunk_size: typing.Optional[int] = OMIT,
1149
- embedding_config: typing.Optional[EmbeddingConfig] = OMIT,
1150
1147
  description: typing.Optional[str] = OMIT,
1151
1148
  instructions: typing.Optional[str] = OMIT,
1152
1149
  metadata: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT,
1150
+ embedding: typing.Optional[str] = OMIT,
1151
+ embedding_chunk_size: typing.Optional[int] = OMIT,
1152
+ embedding_config: typing.Optional[EmbeddingConfig] = OMIT,
1153
1153
  request_options: typing.Optional[RequestOptions] = None,
1154
1154
  ) -> Source:
1155
1155
  """
@@ -1160,15 +1160,6 @@ class AsyncSourcesClient:
1160
1160
  name : str
1161
1161
  The name of the source.
1162
1162
 
1163
- embedding : typing.Optional[str]
1164
- The hande for the embedding config used by the source.
1165
-
1166
- embedding_chunk_size : typing.Optional[int]
1167
- The chunk size of the embedding.
1168
-
1169
- embedding_config : typing.Optional[EmbeddingConfig]
1170
- (Legacy) The embedding configuration used by the source.
1171
-
1172
1163
  description : typing.Optional[str]
1173
1164
  The description of the source.
1174
1165
 
@@ -1178,6 +1169,15 @@ class AsyncSourcesClient:
1178
1169
  metadata : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]]
1179
1170
  Metadata associated with the source.
1180
1171
 
1172
+ embedding : typing.Optional[str]
1173
+ The handle for the embedding config used by the source.
1174
+
1175
+ embedding_chunk_size : typing.Optional[int]
1176
+ The chunk size of the embedding.
1177
+
1178
+ embedding_config : typing.Optional[EmbeddingConfig]
1179
+ (Legacy) The embedding configuration used by the source.
1180
+
1181
1181
  request_options : typing.Optional[RequestOptions]
1182
1182
  Request-specific configuration.
1183
1183
 
@@ -1211,14 +1211,14 @@ class AsyncSourcesClient:
1211
1211
  method="POST",
1212
1212
  json={
1213
1213
  "name": name,
1214
+ "description": description,
1215
+ "instructions": instructions,
1216
+ "metadata": metadata,
1214
1217
  "embedding": embedding,
1215
1218
  "embedding_chunk_size": embedding_chunk_size,
1216
1219
  "embedding_config": convert_and_respect_annotation_metadata(
1217
1220
  object_=embedding_config, annotation=EmbeddingConfig, direction="write"
1218
1221
  ),
1219
- "description": description,
1220
- "instructions": instructions,
1221
- "metadata": metadata,
1222
1222
  },
1223
1223
  headers={
1224
1224
  "content-type": "application/json",
@@ -2,6 +2,17 @@
2
2
 
3
3
  from .types import TemplatesListResponse, TemplatesListResponseTemplatesItem
4
4
  from . import agents
5
- from .agents import AgentsCreateResponse
5
+ from .agents import (
6
+ AgentsCreateRequestInitialMessageSequenceItem,
7
+ AgentsCreateRequestInitialMessageSequenceItemRole,
8
+ AgentsCreateResponse,
9
+ )
6
10
 
7
- __all__ = ["AgentsCreateResponse", "TemplatesListResponse", "TemplatesListResponseTemplatesItem", "agents"]
11
+ __all__ = [
12
+ "AgentsCreateRequestInitialMessageSequenceItem",
13
+ "AgentsCreateRequestInitialMessageSequenceItemRole",
14
+ "AgentsCreateResponse",
15
+ "TemplatesListResponse",
16
+ "TemplatesListResponseTemplatesItem",
17
+ "agents",
18
+ ]
@@ -1,5 +1,13 @@
1
1
  # This file was auto-generated by Fern from our API Definition.
2
2
 
3
- from .types import AgentsCreateResponse
3
+ from .types import (
4
+ AgentsCreateRequestInitialMessageSequenceItem,
5
+ AgentsCreateRequestInitialMessageSequenceItemRole,
6
+ AgentsCreateResponse,
7
+ )
4
8
 
5
- __all__ = ["AgentsCreateResponse"]
9
+ __all__ = [
10
+ "AgentsCreateRequestInitialMessageSequenceItem",
11
+ "AgentsCreateRequestInitialMessageSequenceItemRole",
12
+ "AgentsCreateResponse",
13
+ ]
@@ -2,9 +2,11 @@
2
2
 
3
3
  import typing
4
4
  from ...core.client_wrapper import SyncClientWrapper
5
+ from .types.agents_create_request_initial_message_sequence_item import AgentsCreateRequestInitialMessageSequenceItem
5
6
  from ...core.request_options import RequestOptions
6
7
  from .types.agents_create_response import AgentsCreateResponse
7
8
  from ...core.jsonable_encoder import jsonable_encoder
9
+ from ...core.serialization import convert_and_respect_annotation_metadata
8
10
  from ...core.unchecked_base_model import construct_type
9
11
  from ...errors.payment_required_error import PaymentRequiredError
10
12
  from ...types.payment_required_error_body import PaymentRequiredErrorBody
@@ -27,6 +29,9 @@ class AgentsClient:
27
29
  *,
28
30
  tags: typing.Optional[typing.Sequence[str]] = OMIT,
29
31
  agent_name: typing.Optional[str] = OMIT,
32
+ initial_message_sequence: typing.Optional[
33
+ typing.Sequence[AgentsCreateRequestInitialMessageSequenceItem]
34
+ ] = OMIT,
30
35
  memory_variables: typing.Optional[typing.Dict[str, str]] = OMIT,
31
36
  tool_variables: typing.Optional[typing.Dict[str, str]] = OMIT,
32
37
  identity_ids: typing.Optional[typing.Sequence[str]] = OMIT,
@@ -49,6 +54,9 @@ class AgentsClient:
49
54
  agent_name : typing.Optional[str]
50
55
  The name of the agent, optional otherwise a random one will be assigned
51
56
 
57
+ initial_message_sequence : typing.Optional[typing.Sequence[AgentsCreateRequestInitialMessageSequenceItem]]
58
+ Set an initial sequence of messages, if not provided, the agent will start with the default message sequence, if an empty array is provided, the agent will start with no messages
59
+
52
60
  memory_variables : typing.Optional[typing.Dict[str, str]]
53
61
  The memory variables to assign to the agent
54
62
 
@@ -85,6 +93,11 @@ class AgentsClient:
85
93
  json={
86
94
  "tags": tags,
87
95
  "agent_name": agent_name,
96
+ "initial_message_sequence": convert_and_respect_annotation_metadata(
97
+ object_=initial_message_sequence,
98
+ annotation=typing.Sequence[AgentsCreateRequestInitialMessageSequenceItem],
99
+ direction="write",
100
+ ),
88
101
  "memory_variables": memory_variables,
89
102
  "tool_variables": tool_variables,
90
103
  "identity_ids": identity_ids,
@@ -131,6 +144,9 @@ class AsyncAgentsClient:
131
144
  *,
132
145
  tags: typing.Optional[typing.Sequence[str]] = OMIT,
133
146
  agent_name: typing.Optional[str] = OMIT,
147
+ initial_message_sequence: typing.Optional[
148
+ typing.Sequence[AgentsCreateRequestInitialMessageSequenceItem]
149
+ ] = OMIT,
134
150
  memory_variables: typing.Optional[typing.Dict[str, str]] = OMIT,
135
151
  tool_variables: typing.Optional[typing.Dict[str, str]] = OMIT,
136
152
  identity_ids: typing.Optional[typing.Sequence[str]] = OMIT,
@@ -153,6 +169,9 @@ class AsyncAgentsClient:
153
169
  agent_name : typing.Optional[str]
154
170
  The name of the agent, optional otherwise a random one will be assigned
155
171
 
172
+ initial_message_sequence : typing.Optional[typing.Sequence[AgentsCreateRequestInitialMessageSequenceItem]]
173
+ Set an initial sequence of messages, if not provided, the agent will start with the default message sequence, if an empty array is provided, the agent will start with no messages
174
+
156
175
  memory_variables : typing.Optional[typing.Dict[str, str]]
157
176
  The memory variables to assign to the agent
158
177
 
@@ -197,6 +216,11 @@ class AsyncAgentsClient:
197
216
  json={
198
217
  "tags": tags,
199
218
  "agent_name": agent_name,
219
+ "initial_message_sequence": convert_and_respect_annotation_metadata(
220
+ object_=initial_message_sequence,
221
+ annotation=typing.Sequence[AgentsCreateRequestInitialMessageSequenceItem],
222
+ direction="write",
223
+ ),
200
224
  "memory_variables": memory_variables,
201
225
  "tool_variables": tool_variables,
202
226
  "identity_ids": identity_ids,
@@ -1,5 +1,11 @@
1
1
  # This file was auto-generated by Fern from our API Definition.
2
2
 
3
+ from .agents_create_request_initial_message_sequence_item import AgentsCreateRequestInitialMessageSequenceItem
4
+ from .agents_create_request_initial_message_sequence_item_role import AgentsCreateRequestInitialMessageSequenceItemRole
3
5
  from .agents_create_response import AgentsCreateResponse
4
6
 
5
- __all__ = ["AgentsCreateResponse"]
7
+ __all__ = [
8
+ "AgentsCreateRequestInitialMessageSequenceItem",
9
+ "AgentsCreateRequestInitialMessageSequenceItemRole",
10
+ "AgentsCreateResponse",
11
+ ]
@@ -0,0 +1,26 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ from ....core.unchecked_base_model import UncheckedBaseModel
4
+ from .agents_create_request_initial_message_sequence_item_role import AgentsCreateRequestInitialMessageSequenceItemRole
5
+ import typing
6
+ from ....core.pydantic_utilities import IS_PYDANTIC_V2
7
+ import pydantic
8
+
9
+
10
+ class AgentsCreateRequestInitialMessageSequenceItem(UncheckedBaseModel):
11
+ role: AgentsCreateRequestInitialMessageSequenceItemRole
12
+ content: str
13
+ name: typing.Optional[str] = None
14
+ otid: typing.Optional[str] = None
15
+ sender_id: typing.Optional[str] = None
16
+ batch_item_id: typing.Optional[str] = None
17
+ group_id: typing.Optional[str] = None
18
+
19
+ if IS_PYDANTIC_V2:
20
+ model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
21
+ else:
22
+
23
+ class Config:
24
+ frozen = True
25
+ smart_union = True
26
+ extra = pydantic.Extra.allow
@@ -0,0 +1,7 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import typing
4
+
5
+ AgentsCreateRequestInitialMessageSequenceItemRole = typing.Union[
6
+ typing.Literal["user", "system", "assistant"], typing.Any
7
+ ]
@@ -1,8 +1,8 @@
1
1
  # This file was auto-generated by Fern from our API Definition.
2
2
 
3
3
  from ..core.unchecked_base_model import UncheckedBaseModel
4
- import typing
5
4
  import pydantic
5
+ import typing
6
6
  from .file_processing_status import FileProcessingStatus
7
7
  import datetime as dt
8
8
  from ..core.pydantic_utilities import IS_PYDANTIC_V2
@@ -13,11 +13,6 @@ class FileMetadata(UncheckedBaseModel):
13
13
  Representation of a single FileMetadata
14
14
  """
15
15
 
16
- id: typing.Optional[str] = pydantic.Field(default=None)
17
- """
18
- The human-friendly ID of the File
19
- """
20
-
21
16
  source_id: str = pydantic.Field()
22
17
  """
23
18
  The unique identifier of the source associated with the document.
@@ -78,6 +73,16 @@ class FileMetadata(UncheckedBaseModel):
78
73
  Number of chunks that have been embedded.
79
74
  """
80
75
 
76
+ content: typing.Optional[str] = pydantic.Field(default=None)
77
+ """
78
+ Optional full-text content of the file; only populated on demand due to its size.
79
+ """
80
+
81
+ id: typing.Optional[str] = pydantic.Field(default=None)
82
+ """
83
+ The human-friendly ID of the File
84
+ """
85
+
81
86
  created_at: typing.Optional[dt.datetime] = pydantic.Field(default=None)
82
87
  """
83
88
  The creation date of the file.
@@ -93,11 +98,6 @@ class FileMetadata(UncheckedBaseModel):
93
98
  Whether this file is deleted or not.
94
99
  """
95
100
 
96
- content: typing.Optional[str] = pydantic.Field(default=None)
97
- """
98
- Optional full-text content of the file; only populated on demand due to its size.
99
- """
100
-
101
101
  if IS_PYDANTIC_V2:
102
102
  model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
103
103
  else:
@@ -13,24 +13,25 @@ from ..core.pydantic_utilities import IS_PYDANTIC_V2
13
13
 
14
14
  class Message(UncheckedBaseModel):
15
15
  """
16
- Letta's internal representation of a message. Includes methods to convert to/from LLM provider formats.
17
-
18
- Attributes:
19
- id (str): The unique identifier of the message.
20
- role (MessageRole): The role of the participant.
21
- text (str): The text of the message.
22
- user_id (str): The unique identifier of the user.
23
- agent_id (str): The unique identifier of the agent.
24
- model (str): The model used to make the function call.
25
- name (str): The name of the participant.
26
- created_at (datetime): The time the message was created.
27
- tool_calls (List[OpenAIToolCall,]): The list of tool calls requested.
28
- tool_call_id (str): The id of the tool call.
29
- step_id (str): The id of the step that this message was created in.
30
- otid (str): The offline threading id associated with this message.
31
- tool_returns (List[ToolReturn]): The list of tool returns requested.
32
- group_id (str): The multi-agent group that the message was sent in.
33
- sender_id (str): The id of the sender of the message, can be an identity id or agent id.
16
+ Letta's internal representation of a message. Includes methods to convert to/from LLM provider formats.
17
+
18
+ Attributes:
19
+ id (str): The unique identifier of the message.
20
+ role (MessageRole): The role of the participant.
21
+ text (str): The text of the message.
22
+ user_id (str): The unique identifier of the user.
23
+ agent_id (str): The unique identifier of the agent.
24
+ model (str): The model used to make the function call.
25
+ name (str): The name of the participant.
26
+ created_at (datetime): The time the message was created.
27
+ tool_calls (List[OpenAIToolCall,]): The list of tool calls requested.
28
+ tool_call_id (str): The id of the tool call.
29
+ step_id (str): The id of the step that this message was created in.
30
+ otid (str): The offline threading id associated with this message.
31
+ tool_returns (List[ToolReturn]): The list of tool returns requested.
32
+ group_id (str): The multi-agent group that the message was sent in.
33
+ sender_id (str): The id of the sender of the message, can be an identity id or agent id.
34
+ t
34
35
  """
35
36
 
36
37
  created_by_id: typing.Optional[str] = pydantic.Field(default=None)
@@ -1,8 +1,8 @@
1
1
  # This file was auto-generated by Fern from our API Definition.
2
2
 
3
3
  from ..core.unchecked_base_model import UncheckedBaseModel
4
- import typing
5
4
  import pydantic
5
+ import typing
6
6
  from .embedding_config import EmbeddingConfig
7
7
  import datetime as dt
8
8
  from ..core.pydantic_utilities import IS_PYDANTIC_V2
@@ -21,11 +21,6 @@ class Source(UncheckedBaseModel):
21
21
  description (str): The description of the source.
22
22
  """
23
23
 
24
- id: typing.Optional[str] = pydantic.Field(default=None)
25
- """
26
- The human-friendly ID of the Source
27
- """
28
-
29
24
  name: str = pydantic.Field()
30
25
  """
31
26
  The name of the source.
@@ -41,14 +36,19 @@ class Source(UncheckedBaseModel):
41
36
  Instructions for how to use the source.
42
37
  """
43
38
 
44
- embedding_config: EmbeddingConfig = pydantic.Field()
39
+ metadata: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = pydantic.Field(default=None)
45
40
  """
46
- The embedding configuration used by the source.
41
+ Metadata associated with the source.
47
42
  """
48
43
 
49
- metadata: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = pydantic.Field(default=None)
44
+ id: typing.Optional[str] = pydantic.Field(default=None)
50
45
  """
51
- Metadata associated with the source.
46
+ The human-friendly ID of the Source
47
+ """
48
+
49
+ embedding_config: EmbeddingConfig = pydantic.Field()
50
+ """
51
+ The embedding configuration used by the source.
52
52
  """
53
53
 
54
54
  created_by_id: typing.Optional[str] = pydantic.Field(default=None)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: letta-client
3
- Version: 0.1.199
3
+ Version: 0.1.201
4
4
  Summary:
5
5
  Requires-Python: >=3.8,<4.0
6
6
  Classifier: Intended Audience :: Developers
@@ -66,7 +66,7 @@ letta_client/client_side_access_tokens/types/client_side_access_tokens_create_re
66
66
  letta_client/client_side_access_tokens/types/client_side_access_tokens_create_response_policy_data_item_access_item.py,sha256=R-H25IpNp9feSrW8Yj3h9O3UTMVvFniQJElogKxLuoE,254
67
67
  letta_client/core/__init__.py,sha256=OKbX2aCZXgHCDUsCouqv-OiX32xA6eFFCKIUH9M5Vzk,1591
68
68
  letta_client/core/api_error.py,sha256=RE8LELok2QCjABadECTvtDp7qejA1VmINCh6TbqPwSE,426
69
- letta_client/core/client_wrapper.py,sha256=uSlKkU7kY2e83JC7q-mER5TG32sjZcD2A65FYjUSNlU,2336
69
+ letta_client/core/client_wrapper.py,sha256=Z6UfuErG1hhfHFuznbYGwnAakCyZHuKjqBwSPsYTwzM,2336
70
70
  letta_client/core/datetime_utils.py,sha256=nBys2IsYrhPdszxGKCNRPSOCwa-5DWOHG95FB8G9PKo,1047
71
71
  letta_client/core/file.py,sha256=d4NNbX8XvXP32z8KpK2Xovv33nFfruIrpz0QWxlgpZk,2663
72
72
  letta_client/core/http_client.py,sha256=Z77OIxIbL4OAB2IDqjRq_sYa5yNYAWfmdhdCSSvh6Y4,19552
@@ -127,7 +127,7 @@ letta_client/runs/steps/client.py,sha256=KgpKM6tLn7CgnkUlUihLvxucw4PW4bb_8XPVaEb
127
127
  letta_client/runs/usage/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
128
128
  letta_client/runs/usage/client.py,sha256=LGJL8cPGaVfTG5OBi85KRbwvv3P_jQNehFq2Kg0xrC4,4738
129
129
  letta_client/sources/__init__.py,sha256=kswgCv4UdkSVk1Y4tsMM1HadOwvhh_Fr96VTSMV4Umc,128
130
- letta_client/sources/client.py,sha256=GVZ8KaFIpWs2gbCJBeYNTJVKB9p3iLcU91u5TZ3fJeg,43006
130
+ letta_client/sources/client.py,sha256=bN1cht7KPYgGhZFQCuPducOl78gMU154Vx4aqY21zT0,43008
131
131
  letta_client/sources/files/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
132
132
  letta_client/sources/files/client.py,sha256=6RgAo1778b1o_BLUZKDbdrSvhsLCvK_TnwFXBEUISpM,14659
133
133
  letta_client/sources/passages/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
@@ -142,10 +142,12 @@ letta_client/tags/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw
142
142
  letta_client/tags/client.py,sha256=41ey1rZT5ff1zLQpV4D2pR-HAdvmmqRrtregv7xm-G4,5239
143
143
  letta_client/telemetry/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
144
144
  letta_client/telemetry/client.py,sha256=43lFVUNsA9kx9Di8fcJ4S3xU2ybu5KvoF-UGE6N_5ts,4715
145
- letta_client/templates/__init__.py,sha256=6kqaRnkWVngMoV08wPrkA6urr_lCnE6FRIVq4jj4z1M,313
146
- letta_client/templates/agents/__init__.py,sha256=1tdurb6H9iIIc-Lq68A1xpuidIej7LU1Lz9zCSO4seM,141
147
- letta_client/templates/agents/client.py,sha256=D43K2NVvaEknfxCM32r7b0CxRYzaPIpEmhL4a9KLbps,8113
148
- letta_client/templates/agents/types/__init__.py,sha256=oYK-SXvccx0ZCfsjUdDUYAQ5jZi2UQBkV3nx_DIJJC8,158
145
+ letta_client/templates/__init__.py,sha256=QuQSr63nihN-i_iDN8nRYye73EbutB_4Yfj9QAdWL-8,557
146
+ letta_client/templates/agents/__init__.py,sha256=92KhYgnrVvc0EHXXCqfx2bkD6ru1NISN2haiyOmUsgk,373
147
+ letta_client/templates/agents/client.py,sha256=mnSNe2VYjoLaIENKQf1-pe5Sg0lym8Vxa2AHqXMbdXs,9790
148
+ letta_client/templates/agents/types/__init__.py,sha256=gKrRq3eWX6edFu0WwOli98KEKzhfPGzDz7wyL234dJc,506
149
+ letta_client/templates/agents/types/agents_create_request_initial_message_sequence_item.py,sha256=KDBNZiySf6UGm_r0omZgYq5i5_4V7rDhTHaIwKo1Mug,985
150
+ letta_client/templates/agents/types/agents_create_request_initial_message_sequence_item_role.py,sha256=Xp6uU0_CfWtIBtHdwicF9b4yAcrCYXQyYvxtNKyq-K4,210
149
151
  letta_client/templates/agents/types/agents_create_response.py,sha256=FHjCM6NlichekqQ73bTuGEoYe8xyUle0hcNFv6gocJU,636
150
152
  letta_client/templates/client.py,sha256=wcidkaF0eRgKEYRrDTgKAS_A57MO7H2S_muzznEVmEg,4819
151
153
  letta_client/templates/types/__init__.py,sha256=dAr_dEh0BdwUxAcV1sJ9RM07Z8nCv4dCK6fmTltqQ6c,286
@@ -255,7 +257,7 @@ letta_client/types/embedding_config_embedding_endpoint_type.py,sha256=Ho1HSODi21
255
257
  letta_client/types/feedback_type.py,sha256=sDfsniSnnpSwzZqfIkRL7vYPxYqdwURpI6LMI7eDkoQ,160
256
258
  letta_client/types/file.py,sha256=ZLCEYJqIJ1pzAJn4Pke6gVdKivKU9FrIg98P4GmFY8M,628
257
259
  letta_client/types/file_file.py,sha256=jbWcPKn-fSUlq9kl8n2us9fPU6x-Z20IKScHD_pJruw,665
258
- letta_client/types/file_metadata.py,sha256=Fhn8ptJSmczuUoXv-N6vI-yWbVTonAqdg7jcG4nZurs,2957
260
+ letta_client/types/file_metadata.py,sha256=51abJ_M4dmpRJetdWcMf_P39l3EaJ1R2kjuexzEWwMI,2957
259
261
  letta_client/types/file_processing_status.py,sha256=8W8VAx9-jCaUx6q6mvyCMyLoa2peLTE_sgIaGloOWo4,201
260
262
  letta_client/types/file_stats.py,sha256=gEaG0m4vulK21EoIuYlOcdy0IK4qWkjBTDoMzXw3GEQ,875
261
263
  letta_client/types/function_call.py,sha256=eE6VYWK3A-2xRrIV-QKqrofvaVFcPNqSzl6lrWnopZA,576
@@ -309,7 +311,7 @@ letta_client/types/max_count_per_step_tool_rule_schema.py,sha256=1Zq4vblRTqFycqk
309
311
  letta_client/types/mcp_server_type.py,sha256=cEiRY8zJw3YdV0RV6tt4JUYd0AHT_UNeLgxaouU-_4A,172
310
312
  letta_client/types/mcp_tool.py,sha256=1Bdh9eDfLWxEB_5spzGXImmcoEQ2XRo8BTaeolaMA1M,1015
311
313
  letta_client/types/memory.py,sha256=Fa07vLHBsc4eNK65Yla2zOuzYhtgFGlnPzAGo9GvJ-c,1210
312
- letta_client/types/message.py,sha256=xLOrSRBL3GHlEN_aZAVR_ruftSqqDMu3CVnRnB01ZD0,4493
314
+ letta_client/types/message.py,sha256=Y5wbp-VTYNmTQE0RtHARVB55vPJ5SpIFZvUfNbjQ4X8,4567
313
315
  letta_client/types/message_content_item.py,sha256=FrwERKfU5MpV4Y8LC5ejKFkoqqSV_Ooww-r32VGBbME,629
314
316
  letta_client/types/message_create.py,sha256=jgtA2pi59E7Pv37oyGO51wjZyRtfxVpgENXad8fxQqM,1601
315
317
  letta_client/types/message_create_content.py,sha256=KL3XAVKVrdsh4DZwdxKofUyehS-vnOT_VJNVzZDpE20,226
@@ -360,7 +362,7 @@ letta_client/types/sandbox_environment_variable_update.py,sha256=JMkX6nzvcBNEemj
360
362
  letta_client/types/sandbox_type.py,sha256=XSWmX3JIFFrDPQ4i89E8LauXY8kjmJEtaz6e_JheGm4,151
361
363
  letta_client/types/sleeptime_manager.py,sha256=oKI3CCoA4guwktWs1bbPdCmv9jg94EeMvbXQWvzbt6M,778
362
364
  letta_client/types/sleeptime_manager_update.py,sha256=JMzgtvGMDI5VBzlTuzm4FpuFAL7uwPbQgN9TYxip93s,813
363
- letta_client/types/source.py,sha256=BsfE9yrefXREQtskGZnR-TFGqmHkFKIGHC5udtHUi14,2370
365
+ letta_client/types/source.py,sha256=EELyis2796C75CPBVh5Yu9s7qw2nCszu7QgtpR3wPr4,2370
364
366
  letta_client/types/source_stats.py,sha256=QNp0U24Y6gCLgQp3VDMjiQSgqLnk7CjzRfMobxgOslk,1168
365
367
  letta_client/types/sse_server_config.py,sha256=IN-FdECflYF-XiIM_fvVOwyDu215Csoixepv44PAVvQ,1738
366
368
  letta_client/types/stdio_server_config.py,sha256=dEQ7bguiLikGemLxYZJ3JCmmEQgAMsSPO_P52oHZSl0,1091
@@ -421,6 +423,6 @@ letta_client/types/web_search_options_user_location_approximate.py,sha256=Ywk01J
421
423
  letta_client/version.py,sha256=bttKLbIhO3UonCYQlqs600zzbQgfhCCMjeXR9WRzid4,79
422
424
  letta_client/voice/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
423
425
  letta_client/voice/client.py,sha256=47iQYCuW_qpKI4hM3pYVxn3hw7kgQj3emU1_oRpkRMA,5811
424
- letta_client-0.1.199.dist-info/METADATA,sha256=gc4aPmJV620OywQem79nkf9mO4qOzuJIe0FI7eEcAAc,5177
425
- letta_client-0.1.199.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88
426
- letta_client-0.1.199.dist-info/RECORD,,
426
+ letta_client-0.1.201.dist-info/METADATA,sha256=ELCpM6bt9EPXoym46JpnbYW_pd_zYG07QI7xiLZmLRU,5177
427
+ letta_client-0.1.201.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88
428
+ letta_client-0.1.201.dist-info/RECORD,,