letta-client 0.1.174__py3-none-any.whl → 0.1.175__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of letta-client might be problematic. Click here for more details.

letta_client/__init__.py CHANGED
@@ -198,6 +198,7 @@ from .types import (
198
198
  SseServerConfig,
199
199
  StdioServerConfig,
200
200
  Step,
201
+ StepFeedback,
201
202
  StopReasonType,
202
203
  StreamableHttpServerConfig,
203
204
  SupervisorManager,
@@ -307,6 +308,7 @@ from .client_side_access_tokens import (
307
308
  from .environment import LettaEnvironment
308
309
  from .groups import GroupCreateManagerConfig, GroupUpdateManagerConfig
309
310
  from .projects import ProjectsListResponse, ProjectsListResponseProjectsItem
311
+ from .steps import AddFeedbackRequestFeedback, StepsListRequestFeedback
310
312
  from .templates import TemplatesListResponse, TemplatesListResponseTemplatesItem
311
313
  from .tools import (
312
314
  AddMcpServerRequest,
@@ -322,6 +324,7 @@ __all__ = [
322
324
  "ActionModel",
323
325
  "ActionParametersModel",
324
326
  "ActionResponseModel",
327
+ "AddFeedbackRequestFeedback",
325
328
  "AddMcpServerRequest",
326
329
  "AddMcpServerResponseItem",
327
330
  "AgentEnvironmentVariable",
@@ -548,6 +551,8 @@ __all__ = [
548
551
  "SseServerConfig",
549
552
  "StdioServerConfig",
550
553
  "Step",
554
+ "StepFeedback",
555
+ "StepsListRequestFeedback",
551
556
  "StopReasonType",
552
557
  "StreamableHttpServerConfig",
553
558
  "SupervisorManager",
@@ -221,6 +221,7 @@ class AgentsClient:
221
221
  include_base_tools: typing.Optional[bool] = OMIT,
222
222
  include_multi_agent_tools: typing.Optional[bool] = OMIT,
223
223
  include_base_tool_rules: typing.Optional[bool] = OMIT,
224
+ include_default_source: typing.Optional[bool] = OMIT,
224
225
  description: typing.Optional[str] = OMIT,
225
226
  metadata: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT,
226
227
  model: typing.Optional[str] = OMIT,
@@ -300,6 +301,9 @@ class AgentsClient:
300
301
  include_base_tool_rules : typing.Optional[bool]
301
302
  If true, attaches the Letta base tool rules (e.g. deny all tools not explicitly allowed).
302
303
 
304
+ include_default_source : typing.Optional[bool]
305
+ If true, automatically creates and attaches a default data source for this agent.
306
+
303
307
  description : typing.Optional[str]
304
308
  The description of the agent.
305
309
 
@@ -413,6 +417,7 @@ class AgentsClient:
413
417
  "include_base_tools": include_base_tools,
414
418
  "include_multi_agent_tools": include_multi_agent_tools,
415
419
  "include_base_tool_rules": include_base_tool_rules,
420
+ "include_default_source": include_default_source,
416
421
  "description": description,
417
422
  "metadata": metadata,
418
423
  "model": model,
@@ -1321,6 +1326,7 @@ class AsyncAgentsClient:
1321
1326
  include_base_tools: typing.Optional[bool] = OMIT,
1322
1327
  include_multi_agent_tools: typing.Optional[bool] = OMIT,
1323
1328
  include_base_tool_rules: typing.Optional[bool] = OMIT,
1329
+ include_default_source: typing.Optional[bool] = OMIT,
1324
1330
  description: typing.Optional[str] = OMIT,
1325
1331
  metadata: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT,
1326
1332
  model: typing.Optional[str] = OMIT,
@@ -1400,6 +1406,9 @@ class AsyncAgentsClient:
1400
1406
  include_base_tool_rules : typing.Optional[bool]
1401
1407
  If true, attaches the Letta base tool rules (e.g. deny all tools not explicitly allowed).
1402
1408
 
1409
+ include_default_source : typing.Optional[bool]
1410
+ If true, automatically creates and attaches a default data source for this agent.
1411
+
1403
1412
  description : typing.Optional[str]
1404
1413
  The description of the agent.
1405
1414
 
@@ -1521,6 +1530,7 @@ class AsyncAgentsClient:
1521
1530
  "include_base_tools": include_base_tools,
1522
1531
  "include_multi_agent_tools": include_multi_agent_tools,
1523
1532
  "include_base_tool_rules": include_base_tool_rules,
1533
+ "include_default_source": include_default_source,
1524
1534
  "description": description,
1525
1535
  "metadata": metadata,
1526
1536
  "model": model,
@@ -16,7 +16,7 @@ class BaseClientWrapper:
16
16
  headers: typing.Dict[str, str] = {
17
17
  "X-Fern-Language": "Python",
18
18
  "X-Fern-SDK-Name": "letta-client",
19
- "X-Fern-SDK-Version": "0.1.174",
19
+ "X-Fern-SDK-Version": "0.1.175",
20
20
  }
21
21
  if self.token is not None:
22
22
  headers["Authorization"] = f"Bearer {self.token}"
@@ -1,2 +1,6 @@
1
1
  # This file was auto-generated by Fern from our API Definition.
2
2
 
3
+ from .types import AddFeedbackRequestFeedback, StepsListRequestFeedback
4
+ from . import feedback
5
+
6
+ __all__ = ["AddFeedbackRequestFeedback", "StepsListRequestFeedback", "feedback"]
@@ -1,7 +1,9 @@
1
1
  # This file was auto-generated by Fern from our API Definition.
2
2
 
3
3
  from ..core.client_wrapper import SyncClientWrapper
4
+ from .feedback.client import FeedbackClient
4
5
  import typing
6
+ from .types.steps_list_request_feedback import StepsListRequestFeedback
5
7
  from ..core.request_options import RequestOptions
6
8
  from ..types.step import Step
7
9
  from ..core.unchecked_base_model import construct_type
@@ -10,12 +12,15 @@ from ..types.http_validation_error import HttpValidationError
10
12
  from json.decoder import JSONDecodeError
11
13
  from ..core.api_error import ApiError
12
14
  from ..core.jsonable_encoder import jsonable_encoder
15
+ from .types.add_feedback_request_feedback import AddFeedbackRequestFeedback
13
16
  from ..core.client_wrapper import AsyncClientWrapper
17
+ from .feedback.client import AsyncFeedbackClient
14
18
 
15
19
 
16
20
  class StepsClient:
17
21
  def __init__(self, *, client_wrapper: SyncClientWrapper):
18
22
  self._client_wrapper = client_wrapper
23
+ self.feedback = FeedbackClient(client_wrapper=self._client_wrapper)
19
24
 
20
25
  def list(
21
26
  self,
@@ -29,6 +34,8 @@ class StepsClient:
29
34
  model: typing.Optional[str] = None,
30
35
  agent_id: typing.Optional[str] = None,
31
36
  trace_ids: typing.Optional[typing.Union[str, typing.Sequence[str]]] = None,
37
+ feedback: typing.Optional[StepsListRequestFeedback] = None,
38
+ tags: typing.Optional[typing.Union[str, typing.Sequence[str]]] = None,
32
39
  request_options: typing.Optional[RequestOptions] = None,
33
40
  ) -> typing.List[Step]:
34
41
  """
@@ -64,6 +71,12 @@ class StepsClient:
64
71
  trace_ids : typing.Optional[typing.Union[str, typing.Sequence[str]]]
65
72
  Filter by trace ids returned by the server
66
73
 
74
+ feedback : typing.Optional[StepsListRequestFeedback]
75
+ Filter by feedback
76
+
77
+ tags : typing.Optional[typing.Union[str, typing.Sequence[str]]]
78
+ Filter by tags
79
+
67
80
  request_options : typing.Optional[RequestOptions]
68
81
  Request-specific configuration.
69
82
 
@@ -94,6 +107,8 @@ class StepsClient:
94
107
  "model": model,
95
108
  "agent_id": agent_id,
96
109
  "trace_ids": trace_ids,
110
+ "feedback": feedback,
111
+ "tags": tags,
97
112
  },
98
113
  request_options=request_options,
99
114
  )
@@ -177,10 +192,78 @@ class StepsClient:
177
192
  raise ApiError(status_code=_response.status_code, body=_response.text)
178
193
  raise ApiError(status_code=_response.status_code, body=_response_json)
179
194
 
195
+ def add_feedback(
196
+ self,
197
+ step_id: str,
198
+ *,
199
+ feedback: typing.Optional[AddFeedbackRequestFeedback] = None,
200
+ request_options: typing.Optional[RequestOptions] = None,
201
+ ) -> Step:
202
+ """
203
+ Add feedback to a step.
204
+
205
+ Parameters
206
+ ----------
207
+ step_id : str
208
+
209
+ feedback : typing.Optional[AddFeedbackRequestFeedback]
210
+
211
+ request_options : typing.Optional[RequestOptions]
212
+ Request-specific configuration.
213
+
214
+ Returns
215
+ -------
216
+ Step
217
+ Successful Response
218
+
219
+ Examples
220
+ --------
221
+ from letta_client import Letta
222
+
223
+ client = Letta(
224
+ token="YOUR_TOKEN",
225
+ )
226
+ client.steps.add_feedback(
227
+ step_id="step_id",
228
+ )
229
+ """
230
+ _response = self._client_wrapper.httpx_client.request(
231
+ f"v1/steps/{jsonable_encoder(step_id)}/feedback",
232
+ method="PATCH",
233
+ params={
234
+ "feedback": feedback,
235
+ },
236
+ request_options=request_options,
237
+ )
238
+ try:
239
+ if 200 <= _response.status_code < 300:
240
+ return typing.cast(
241
+ Step,
242
+ construct_type(
243
+ type_=Step, # type: ignore
244
+ object_=_response.json(),
245
+ ),
246
+ )
247
+ if _response.status_code == 422:
248
+ raise UnprocessableEntityError(
249
+ typing.cast(
250
+ HttpValidationError,
251
+ construct_type(
252
+ type_=HttpValidationError, # type: ignore
253
+ object_=_response.json(),
254
+ ),
255
+ )
256
+ )
257
+ _response_json = _response.json()
258
+ except JSONDecodeError:
259
+ raise ApiError(status_code=_response.status_code, body=_response.text)
260
+ raise ApiError(status_code=_response.status_code, body=_response_json)
261
+
180
262
 
181
263
  class AsyncStepsClient:
182
264
  def __init__(self, *, client_wrapper: AsyncClientWrapper):
183
265
  self._client_wrapper = client_wrapper
266
+ self.feedback = AsyncFeedbackClient(client_wrapper=self._client_wrapper)
184
267
 
185
268
  async def list(
186
269
  self,
@@ -194,6 +277,8 @@ class AsyncStepsClient:
194
277
  model: typing.Optional[str] = None,
195
278
  agent_id: typing.Optional[str] = None,
196
279
  trace_ids: typing.Optional[typing.Union[str, typing.Sequence[str]]] = None,
280
+ feedback: typing.Optional[StepsListRequestFeedback] = None,
281
+ tags: typing.Optional[typing.Union[str, typing.Sequence[str]]] = None,
197
282
  request_options: typing.Optional[RequestOptions] = None,
198
283
  ) -> typing.List[Step]:
199
284
  """
@@ -229,6 +314,12 @@ class AsyncStepsClient:
229
314
  trace_ids : typing.Optional[typing.Union[str, typing.Sequence[str]]]
230
315
  Filter by trace ids returned by the server
231
316
 
317
+ feedback : typing.Optional[StepsListRequestFeedback]
318
+ Filter by feedback
319
+
320
+ tags : typing.Optional[typing.Union[str, typing.Sequence[str]]]
321
+ Filter by tags
322
+
232
323
  request_options : typing.Optional[RequestOptions]
233
324
  Request-specific configuration.
234
325
 
@@ -267,6 +358,8 @@ class AsyncStepsClient:
267
358
  "model": model,
268
359
  "agent_id": agent_id,
269
360
  "trace_ids": trace_ids,
361
+ "feedback": feedback,
362
+ "tags": tags,
270
363
  },
271
364
  request_options=request_options,
272
365
  )
@@ -357,3 +450,78 @@ class AsyncStepsClient:
357
450
  except JSONDecodeError:
358
451
  raise ApiError(status_code=_response.status_code, body=_response.text)
359
452
  raise ApiError(status_code=_response.status_code, body=_response_json)
453
+
454
+ async def add_feedback(
455
+ self,
456
+ step_id: str,
457
+ *,
458
+ feedback: typing.Optional[AddFeedbackRequestFeedback] = None,
459
+ request_options: typing.Optional[RequestOptions] = None,
460
+ ) -> Step:
461
+ """
462
+ Add feedback to a step.
463
+
464
+ Parameters
465
+ ----------
466
+ step_id : str
467
+
468
+ feedback : typing.Optional[AddFeedbackRequestFeedback]
469
+
470
+ request_options : typing.Optional[RequestOptions]
471
+ Request-specific configuration.
472
+
473
+ Returns
474
+ -------
475
+ Step
476
+ Successful Response
477
+
478
+ Examples
479
+ --------
480
+ import asyncio
481
+
482
+ from letta_client import AsyncLetta
483
+
484
+ client = AsyncLetta(
485
+ token="YOUR_TOKEN",
486
+ )
487
+
488
+
489
+ async def main() -> None:
490
+ await client.steps.add_feedback(
491
+ step_id="step_id",
492
+ )
493
+
494
+
495
+ asyncio.run(main())
496
+ """
497
+ _response = await self._client_wrapper.httpx_client.request(
498
+ f"v1/steps/{jsonable_encoder(step_id)}/feedback",
499
+ method="PATCH",
500
+ params={
501
+ "feedback": feedback,
502
+ },
503
+ request_options=request_options,
504
+ )
505
+ try:
506
+ if 200 <= _response.status_code < 300:
507
+ return typing.cast(
508
+ Step,
509
+ construct_type(
510
+ type_=Step, # type: ignore
511
+ object_=_response.json(),
512
+ ),
513
+ )
514
+ if _response.status_code == 422:
515
+ raise UnprocessableEntityError(
516
+ typing.cast(
517
+ HttpValidationError,
518
+ construct_type(
519
+ type_=HttpValidationError, # type: ignore
520
+ object_=_response.json(),
521
+ ),
522
+ )
523
+ )
524
+ _response_json = _response.json()
525
+ except JSONDecodeError:
526
+ raise ApiError(status_code=_response.status_code, body=_response.text)
527
+ raise ApiError(status_code=_response.status_code, body=_response_json)
@@ -0,0 +1,2 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
@@ -0,0 +1,101 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ from ...core.client_wrapper import SyncClientWrapper
4
+ import typing
5
+ from ...core.request_options import RequestOptions
6
+ from ...core.jsonable_encoder import jsonable_encoder
7
+ from json.decoder import JSONDecodeError
8
+ from ...core.api_error import ApiError
9
+ from ...core.client_wrapper import AsyncClientWrapper
10
+
11
+
12
+ class FeedbackClient:
13
+ def __init__(self, *, client_wrapper: SyncClientWrapper):
14
+ self._client_wrapper = client_wrapper
15
+
16
+ def add(self, step_id: str, *, request_options: typing.Optional[RequestOptions] = None) -> None:
17
+ """
18
+ Parameters
19
+ ----------
20
+ step_id : str
21
+
22
+ request_options : typing.Optional[RequestOptions]
23
+ Request-specific configuration.
24
+
25
+ Returns
26
+ -------
27
+ None
28
+
29
+ Examples
30
+ --------
31
+ from letta_client import Letta
32
+
33
+ client = Letta(
34
+ token="YOUR_TOKEN",
35
+ )
36
+ client.steps.feedback.add(
37
+ step_id="step_id",
38
+ )
39
+ """
40
+ _response = self._client_wrapper.httpx_client.request(
41
+ f"v1/steps/{jsonable_encoder(step_id)}/feedback",
42
+ method="POST",
43
+ request_options=request_options,
44
+ )
45
+ try:
46
+ if 200 <= _response.status_code < 300:
47
+ return
48
+ _response_json = _response.json()
49
+ except JSONDecodeError:
50
+ raise ApiError(status_code=_response.status_code, body=_response.text)
51
+ raise ApiError(status_code=_response.status_code, body=_response_json)
52
+
53
+
54
+ class AsyncFeedbackClient:
55
+ def __init__(self, *, client_wrapper: AsyncClientWrapper):
56
+ self._client_wrapper = client_wrapper
57
+
58
+ async def add(self, step_id: str, *, request_options: typing.Optional[RequestOptions] = None) -> None:
59
+ """
60
+ Parameters
61
+ ----------
62
+ step_id : str
63
+
64
+ request_options : typing.Optional[RequestOptions]
65
+ Request-specific configuration.
66
+
67
+ Returns
68
+ -------
69
+ None
70
+
71
+ Examples
72
+ --------
73
+ import asyncio
74
+
75
+ from letta_client import AsyncLetta
76
+
77
+ client = AsyncLetta(
78
+ token="YOUR_TOKEN",
79
+ )
80
+
81
+
82
+ async def main() -> None:
83
+ await client.steps.feedback.add(
84
+ step_id="step_id",
85
+ )
86
+
87
+
88
+ asyncio.run(main())
89
+ """
90
+ _response = await self._client_wrapper.httpx_client.request(
91
+ f"v1/steps/{jsonable_encoder(step_id)}/feedback",
92
+ method="POST",
93
+ request_options=request_options,
94
+ )
95
+ try:
96
+ if 200 <= _response.status_code < 300:
97
+ return
98
+ _response_json = _response.json()
99
+ except JSONDecodeError:
100
+ raise ApiError(status_code=_response.status_code, body=_response.text)
101
+ raise ApiError(status_code=_response.status_code, body=_response_json)
@@ -0,0 +1,6 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ from .add_feedback_request_feedback import AddFeedbackRequestFeedback
4
+ from .steps_list_request_feedback import StepsListRequestFeedback
5
+
6
+ __all__ = ["AddFeedbackRequestFeedback", "StepsListRequestFeedback"]
@@ -0,0 +1,5 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import typing
4
+
5
+ AddFeedbackRequestFeedback = typing.Union[typing.Literal["positive", "negative"], typing.Any]
@@ -0,0 +1,5 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import typing
4
+
5
+ StepsListRequestFeedback = typing.Union[typing.Literal["positive", "negative"], typing.Any]
@@ -201,6 +201,7 @@ from .source import Source
201
201
  from .sse_server_config import SseServerConfig
202
202
  from .stdio_server_config import StdioServerConfig
203
203
  from .step import Step
204
+ from .step_feedback import StepFeedback
204
205
  from .stop_reason_type import StopReasonType
205
206
  from .streamable_http_server_config import StreamableHttpServerConfig
206
207
  from .supervisor_manager import SupervisorManager
@@ -451,6 +452,7 @@ __all__ = [
451
452
  "SseServerConfig",
452
453
  "StdioServerConfig",
453
454
  "Step",
455
+ "StepFeedback",
454
456
  "StopReasonType",
455
457
  "StreamableHttpServerConfig",
456
458
  "SupervisorManager",
@@ -4,6 +4,7 @@ from ..core.unchecked_base_model import UncheckedBaseModel
4
4
  import pydantic
5
5
  import typing
6
6
  from .message import Message
7
+ from .step_feedback import StepFeedback
7
8
  from ..core.pydantic_utilities import IS_PYDANTIC_V2
8
9
 
9
10
 
@@ -100,6 +101,11 @@ class Step(UncheckedBaseModel):
100
101
  The messages generated during this step.
101
102
  """
102
103
 
104
+ feedback: typing.Optional[StepFeedback] = pydantic.Field(default=None)
105
+ """
106
+ The feedback for this step. Must be either 'positive' or 'negative'.
107
+ """
108
+
103
109
  if IS_PYDANTIC_V2:
104
110
  model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
105
111
  else:
@@ -0,0 +1,5 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import typing
4
+
5
+ StepFeedback = typing.Union[typing.Literal["positive", "negative"], typing.Any]
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: letta-client
3
- Version: 0.1.174
3
+ Version: 0.1.175
4
4
  Summary:
5
5
  Requires-Python: >=3.8,<4.0
6
6
  Classifier: Intended Audience :: Developers
@@ -1,8 +1,8 @@
1
- letta_client/__init__.py,sha256=Uv2lKOc9DGdpjZ2KUjxHAWQtEpN24bA5eWpeziiknFI,17714
1
+ letta_client/__init__.py,sha256=ivW66MILcihii1Mco30utv086S2dwmeu1Os75Z9qckQ,17890
2
2
  letta_client/agents/__init__.py,sha256=c_9OiE6ofyiPcq9BP37qvo7h0SKmw34PKN3KMxuRja0,1780
3
3
  letta_client/agents/blocks/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
4
4
  letta_client/agents/blocks/client.py,sha256=ecE03lE5tP1AtCMFLT9FzdYyQMx_D7NI5m42b41pV40,24684
5
- letta_client/agents/client.py,sha256=OBKxImstibXHF0YrEiBFsOfzVkoawmuAtvlM8rNVqYY,88627
5
+ letta_client/agents/client.py,sha256=Vqaf5qcNDWsox-4WqX9Uw_4Au5oBe9Hn_TVaBnpSYdw,89183
6
6
  letta_client/agents/context/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
7
7
  letta_client/agents/context/client.py,sha256=GKKvoG4N_K8Biz9yDjeIHpFG0C8Cwc7tHmEX3pTL_9U,4815
8
8
  letta_client/agents/core_memory/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
@@ -63,7 +63,7 @@ letta_client/client_side_access_tokens/types/client_side_access_tokens_create_re
63
63
  letta_client/client_side_access_tokens/types/client_side_access_tokens_create_response_policy_data_item_access_item.py,sha256=R-H25IpNp9feSrW8Yj3h9O3UTMVvFniQJElogKxLuoE,254
64
64
  letta_client/core/__init__.py,sha256=OKbX2aCZXgHCDUsCouqv-OiX32xA6eFFCKIUH9M5Vzk,1591
65
65
  letta_client/core/api_error.py,sha256=RE8LELok2QCjABadECTvtDp7qejA1VmINCh6TbqPwSE,426
66
- letta_client/core/client_wrapper.py,sha256=a_C-q6XTZ0ZUUAr03ZwIO9KWRxpSe4RmrPuDKZjLylY,1998
66
+ letta_client/core/client_wrapper.py,sha256=KNJ2nnmTu38KnBb5re4q4fLIunMQPqXVg0Ypxr_tvSQ,1998
67
67
  letta_client/core/datetime_utils.py,sha256=nBys2IsYrhPdszxGKCNRPSOCwa-5DWOHG95FB8G9PKo,1047
68
68
  letta_client/core/file.py,sha256=d4NNbX8XvXP32z8KpK2Xovv33nFfruIrpz0QWxlgpZk,2663
69
69
  letta_client/core/http_client.py,sha256=Z77OIxIbL4OAB2IDqjRq_sYa5yNYAWfmdhdCSSvh6Y4,19552
@@ -129,8 +129,13 @@ letta_client/sources/files/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roe
129
129
  letta_client/sources/files/client.py,sha256=VwOnQEZpY0j2LqRAPO1EbtfykAYbBwPHcI7DC19L91w,13742
130
130
  letta_client/sources/passages/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
131
131
  letta_client/sources/passages/client.py,sha256=XxpITU_fq9MKiSd8Qu720Dprnxp5wlDEf6yjXaEfwSQ,5969
132
- letta_client/steps/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
133
- letta_client/steps/client.py,sha256=_Q9lvzACQlvLRzMnJB6VeNtPKv-PX-YiFeVmkC7_wAs,11756
132
+ letta_client/steps/__init__.py,sha256=MSKzlK6mG9jhcc3-4u97qWL74k3pVT-MhDtGniONybI,242
133
+ letta_client/steps/client.py,sha256=H2TKSio7eFkSrfjGbmaP1Kgmrdgspdto3UgdPKmbjhk,17158
134
+ letta_client/steps/feedback/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
135
+ letta_client/steps/feedback/client.py,sha256=GXsXnrsf-wmuRLvsj8F2wjVcOOg4fbUvseIMjhFm7Xg,2943
136
+ letta_client/steps/types/__init__.py,sha256=OVfnyWcJwDi-inN3YSyOqD0eQ2IQW4UfwpA6fnRKboc,271
137
+ letta_client/steps/types/add_feedback_request_feedback.py,sha256=tCrw9Pmeu0tiJhEhp900iw1-nmDf0R--ZB7H7uplUOI,174
138
+ letta_client/steps/types/steps_list_request_feedback.py,sha256=Au1YSn3UYRc_b4yxUT6hFqru4iJ-SX-_Ndb3PwCYGp8,172
134
139
  letta_client/tags/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
135
140
  letta_client/tags/client.py,sha256=1xIPtMWJ6ssAhPEFgl5CyJHyvND9MHCLIbEzQWxntZ0,5167
136
141
  letta_client/telemetry/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
@@ -153,7 +158,7 @@ letta_client/tools/types/delete_mcp_server_response_item.py,sha256=YLIBE7OD535NJ
153
158
  letta_client/tools/types/list_mcp_servers_response_value.py,sha256=Eyji5qB7FhowiogsAbpcU_aMyH9zClv9lUMmHOmNPYk,379
154
159
  letta_client/tools/types/update_mcp_server_request.py,sha256=SEMNYHB_mwJNSMHKO7keU0C_CMBktV7lfZUnACPe_fU,314
155
160
  letta_client/tools/types/update_mcp_server_response.py,sha256=muwHagaQBMwQI0of9EBCBtG9lD-jELFAevgTB2MjpFQ,375
156
- letta_client/types/__init__.py,sha256=dL4BviMX9cBydo1RevGw8yjAgEuR2lwbJl50_d2qldg,22253
161
+ letta_client/types/__init__.py,sha256=gijUXzC79eFOCNbc2zQkM3GlhHTsXxcixcuNskoFIMw,22313
157
162
  letta_client/types/action_model.py,sha256=y1e2XMv3skFaNJIBdYoBKgiORzGh05aOVvu-qVR9uHg,1240
158
163
  letta_client/types/action_parameters_model.py,sha256=LgKf5aPZG3-OHGxFdXiSokIDgce8c02xPYIAY05VgW8,828
159
164
  letta_client/types/action_response_model.py,sha256=yq2Fd9UU8j7vvtE3VqXUoRRvDzWcfJPj_95ynGdeHCs,824
@@ -350,7 +355,8 @@ letta_client/types/sleeptime_manager_update.py,sha256=JMzgtvGMDI5VBzlTuzm4FpuFAL
350
355
  letta_client/types/source.py,sha256=BsfE9yrefXREQtskGZnR-TFGqmHkFKIGHC5udtHUi14,2370
351
356
  letta_client/types/sse_server_config.py,sha256=IN-FdECflYF-XiIM_fvVOwyDu215Csoixepv44PAVvQ,1738
352
357
  letta_client/types/stdio_server_config.py,sha256=dEQ7bguiLikGemLxYZJ3JCmmEQgAMsSPO_P52oHZSl0,1091
353
- letta_client/types/step.py,sha256=-5KHfBc6NZnYGLXHJMK6Bdyw2ae0G1zPFzsURjPiN3c,3133
358
+ letta_client/types/step.py,sha256=iMc18pPufFs9Bs-QLYWWChZ0-Dy2WHhp9aJ_Dszb0uE,3338
359
+ letta_client/types/step_feedback.py,sha256=JXUkclvJ6C-6ZTgd2lteOxqEyO5KRDNQ8ronBPYMdbo,160
354
360
  letta_client/types/stop_reason_type.py,sha256=PyYTS9bIvCSDfzyG4wJyk6bi9fCdDBNsoleLd7nMJYI,228
355
361
  letta_client/types/streamable_http_server_config.py,sha256=ya5IZi_bHa3IW0SIbWuQfKXUqSAPlZ_qGZYXiwl4sh0,1775
356
362
  letta_client/types/supervisor_manager.py,sha256=VdR1ySp4k43apxM8Bb5uNoBvADsvz8oMEEtDy2F5K6M,676
@@ -405,6 +411,6 @@ letta_client/types/web_search_options_user_location_approximate.py,sha256=Ywk01J
405
411
  letta_client/version.py,sha256=bttKLbIhO3UonCYQlqs600zzbQgfhCCMjeXR9WRzid4,79
406
412
  letta_client/voice/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
407
413
  letta_client/voice/client.py,sha256=EX79F2D5bieXNP8g1ZPw8xwAzqE1A3hshCHUSlTV1kw,5739
408
- letta_client-0.1.174.dist-info/METADATA,sha256=mklVjfyyI2Fl6cPcKGL560igIKEYC0B_bQhod5LHRX8,5093
409
- letta_client-0.1.174.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88
410
- letta_client-0.1.174.dist-info/RECORD,,
414
+ letta_client-0.1.175.dist-info/METADATA,sha256=nH9P0Ggf4OZnTxWlvFR2hr9fto9Qod1gjbc3u8AvV5I,5093
415
+ letta_client-0.1.175.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88
416
+ letta_client-0.1.175.dist-info/RECORD,,