letta-client 0.1.274__py3-none-any.whl → 0.1.275__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of letta-client might be problematic. Click here for more details.

@@ -258,6 +258,7 @@ class MessagesClient:
258
258
  enable_thinking: typing.Optional[str] = OMIT,
259
259
  stream_tokens: typing.Optional[bool] = OMIT,
260
260
  include_pings: typing.Optional[bool] = OMIT,
261
+ background: typing.Optional[bool] = OMIT,
261
262
  request_options: typing.Optional[RequestOptions] = None,
262
263
  ) -> typing.Iterator[LettaStreamingResponse]:
263
264
  """
@@ -296,6 +297,9 @@ class MessagesClient:
296
297
  include_pings : typing.Optional[bool]
297
298
  Whether to include periodic keepalive ping messages in the stream to prevent connection timeouts.
298
299
 
300
+ background : typing.Optional[bool]
301
+ Whether to process the request in the background.
302
+
299
303
  request_options : typing.Optional[RequestOptions]
300
304
  Request-specific configuration.
301
305
 
@@ -339,6 +343,7 @@ class MessagesClient:
339
343
  enable_thinking=enable_thinking,
340
344
  stream_tokens=stream_tokens,
341
345
  include_pings=include_pings,
346
+ background=background,
342
347
  request_options=request_options,
343
348
  ) as r:
344
349
  yield from r.data
@@ -840,6 +845,7 @@ class AsyncMessagesClient:
840
845
  enable_thinking: typing.Optional[str] = OMIT,
841
846
  stream_tokens: typing.Optional[bool] = OMIT,
842
847
  include_pings: typing.Optional[bool] = OMIT,
848
+ background: typing.Optional[bool] = OMIT,
843
849
  request_options: typing.Optional[RequestOptions] = None,
844
850
  ) -> typing.AsyncIterator[LettaStreamingResponse]:
845
851
  """
@@ -878,6 +884,9 @@ class AsyncMessagesClient:
878
884
  include_pings : typing.Optional[bool]
879
885
  Whether to include periodic keepalive ping messages in the stream to prevent connection timeouts.
880
886
 
887
+ background : typing.Optional[bool]
888
+ Whether to process the request in the background.
889
+
881
890
  request_options : typing.Optional[RequestOptions]
882
891
  Request-specific configuration.
883
892
 
@@ -929,6 +938,7 @@ class AsyncMessagesClient:
929
938
  enable_thinking=enable_thinking,
930
939
  stream_tokens=stream_tokens,
931
940
  include_pings=include_pings,
941
+ background=background,
932
942
  request_options=request_options,
933
943
  ) as r:
934
944
  async for _chunk in r.data:
@@ -303,6 +303,7 @@ class RawMessagesClient:
303
303
  enable_thinking: typing.Optional[str] = OMIT,
304
304
  stream_tokens: typing.Optional[bool] = OMIT,
305
305
  include_pings: typing.Optional[bool] = OMIT,
306
+ background: typing.Optional[bool] = OMIT,
306
307
  request_options: typing.Optional[RequestOptions] = None,
307
308
  ) -> typing.Iterator[HttpResponse[typing.Iterator[LettaStreamingResponse]]]:
308
309
  """
@@ -341,6 +342,9 @@ class RawMessagesClient:
341
342
  include_pings : typing.Optional[bool]
342
343
  Whether to include periodic keepalive ping messages in the stream to prevent connection timeouts.
343
344
 
345
+ background : typing.Optional[bool]
346
+ Whether to process the request in the background.
347
+
344
348
  request_options : typing.Optional[RequestOptions]
345
349
  Request-specific configuration.
346
350
 
@@ -364,6 +368,7 @@ class RawMessagesClient:
364
368
  "enable_thinking": enable_thinking,
365
369
  "stream_tokens": stream_tokens,
366
370
  "include_pings": include_pings,
371
+ "background": background,
367
372
  },
368
373
  headers={
369
374
  "content-type": "application/json",
@@ -983,6 +988,7 @@ class AsyncRawMessagesClient:
983
988
  enable_thinking: typing.Optional[str] = OMIT,
984
989
  stream_tokens: typing.Optional[bool] = OMIT,
985
990
  include_pings: typing.Optional[bool] = OMIT,
991
+ background: typing.Optional[bool] = OMIT,
986
992
  request_options: typing.Optional[RequestOptions] = None,
987
993
  ) -> typing.AsyncIterator[AsyncHttpResponse[typing.AsyncIterator[LettaStreamingResponse]]]:
988
994
  """
@@ -1021,6 +1027,9 @@ class AsyncRawMessagesClient:
1021
1027
  include_pings : typing.Optional[bool]
1022
1028
  Whether to include periodic keepalive ping messages in the stream to prevent connection timeouts.
1023
1029
 
1030
+ background : typing.Optional[bool]
1031
+ Whether to process the request in the background.
1032
+
1024
1033
  request_options : typing.Optional[RequestOptions]
1025
1034
  Request-specific configuration.
1026
1035
 
@@ -1044,6 +1053,7 @@ class AsyncRawMessagesClient:
1044
1053
  "enable_thinking": enable_thinking,
1045
1054
  "stream_tokens": stream_tokens,
1046
1055
  "include_pings": include_pings,
1056
+ "background": background,
1047
1057
  },
1048
1058
  headers={
1049
1059
  "content-type": "application/json",
@@ -24,10 +24,10 @@ class BaseClientWrapper:
24
24
 
25
25
  def get_headers(self) -> typing.Dict[str, str]:
26
26
  headers: typing.Dict[str, str] = {
27
- "User-Agent": "letta-client/0.1.274",
27
+ "User-Agent": "letta-client/0.1.275",
28
28
  "X-Fern-Language": "Python",
29
29
  "X-Fern-SDK-Name": "letta-client",
30
- "X-Fern-SDK-Version": "0.1.274",
30
+ "X-Fern-SDK-Version": "0.1.275",
31
31
  **(self.get_custom_headers() or {}),
32
32
  }
33
33
  if self._project is not None:
@@ -199,6 +199,7 @@ class MessagesClient:
199
199
  enable_thinking: typing.Optional[str] = OMIT,
200
200
  stream_tokens: typing.Optional[bool] = OMIT,
201
201
  include_pings: typing.Optional[bool] = OMIT,
202
+ background: typing.Optional[bool] = OMIT,
202
203
  request_options: typing.Optional[RequestOptions] = None,
203
204
  ) -> typing.Iterator[LettaStreamingResponse]:
204
205
  """
@@ -237,6 +238,9 @@ class MessagesClient:
237
238
  include_pings : typing.Optional[bool]
238
239
  Whether to include periodic keepalive ping messages in the stream to prevent connection timeouts.
239
240
 
241
+ background : typing.Optional[bool]
242
+ Whether to process the request in the background.
243
+
240
244
  request_options : typing.Optional[RequestOptions]
241
245
  Request-specific configuration.
242
246
 
@@ -280,6 +284,7 @@ class MessagesClient:
280
284
  enable_thinking=enable_thinking,
281
285
  stream_tokens=stream_tokens,
282
286
  include_pings=include_pings,
287
+ background=background,
283
288
  request_options=request_options,
284
289
  ) as r:
285
290
  yield from r.data
@@ -562,6 +567,7 @@ class AsyncMessagesClient:
562
567
  enable_thinking: typing.Optional[str] = OMIT,
563
568
  stream_tokens: typing.Optional[bool] = OMIT,
564
569
  include_pings: typing.Optional[bool] = OMIT,
570
+ background: typing.Optional[bool] = OMIT,
565
571
  request_options: typing.Optional[RequestOptions] = None,
566
572
  ) -> typing.AsyncIterator[LettaStreamingResponse]:
567
573
  """
@@ -600,6 +606,9 @@ class AsyncMessagesClient:
600
606
  include_pings : typing.Optional[bool]
601
607
  Whether to include periodic keepalive ping messages in the stream to prevent connection timeouts.
602
608
 
609
+ background : typing.Optional[bool]
610
+ Whether to process the request in the background.
611
+
603
612
  request_options : typing.Optional[RequestOptions]
604
613
  Request-specific configuration.
605
614
 
@@ -651,6 +660,7 @@ class AsyncMessagesClient:
651
660
  enable_thinking=enable_thinking,
652
661
  stream_tokens=stream_tokens,
653
662
  include_pings=include_pings,
663
+ background=background,
654
664
  request_options=request_options,
655
665
  ) as r:
656
666
  async for _chunk in r.data:
@@ -225,6 +225,7 @@ class RawMessagesClient:
225
225
  enable_thinking: typing.Optional[str] = OMIT,
226
226
  stream_tokens: typing.Optional[bool] = OMIT,
227
227
  include_pings: typing.Optional[bool] = OMIT,
228
+ background: typing.Optional[bool] = OMIT,
228
229
  request_options: typing.Optional[RequestOptions] = None,
229
230
  ) -> typing.Iterator[HttpResponse[typing.Iterator[LettaStreamingResponse]]]:
230
231
  """
@@ -263,6 +264,9 @@ class RawMessagesClient:
263
264
  include_pings : typing.Optional[bool]
264
265
  Whether to include periodic keepalive ping messages in the stream to prevent connection timeouts.
265
266
 
267
+ background : typing.Optional[bool]
268
+ Whether to process the request in the background.
269
+
266
270
  request_options : typing.Optional[RequestOptions]
267
271
  Request-specific configuration.
268
272
 
@@ -286,6 +290,7 @@ class RawMessagesClient:
286
290
  "enable_thinking": enable_thinking,
287
291
  "stream_tokens": stream_tokens,
288
292
  "include_pings": include_pings,
293
+ "background": background,
289
294
  },
290
295
  headers={
291
296
  "content-type": "application/json",
@@ -652,6 +657,7 @@ class AsyncRawMessagesClient:
652
657
  enable_thinking: typing.Optional[str] = OMIT,
653
658
  stream_tokens: typing.Optional[bool] = OMIT,
654
659
  include_pings: typing.Optional[bool] = OMIT,
660
+ background: typing.Optional[bool] = OMIT,
655
661
  request_options: typing.Optional[RequestOptions] = None,
656
662
  ) -> typing.AsyncIterator[AsyncHttpResponse[typing.AsyncIterator[LettaStreamingResponse]]]:
657
663
  """
@@ -690,6 +696,9 @@ class AsyncRawMessagesClient:
690
696
  include_pings : typing.Optional[bool]
691
697
  Whether to include periodic keepalive ping messages in the stream to prevent connection timeouts.
692
698
 
699
+ background : typing.Optional[bool]
700
+ Whether to process the request in the background.
701
+
693
702
  request_options : typing.Optional[RequestOptions]
694
703
  Request-specific configuration.
695
704
 
@@ -713,6 +722,7 @@ class AsyncRawMessagesClient:
713
722
  "enable_thinking": enable_thinking,
714
723
  "stream_tokens": stream_tokens,
715
724
  "include_pings": include_pings,
725
+ "background": background,
716
726
  },
717
727
  headers={
718
728
  "content-type": "application/json",
@@ -55,6 +55,11 @@ class LettaStreamingRequest(UncheckedBaseModel):
55
55
  Whether to include periodic keepalive ping messages in the stream to prevent connection timeouts.
56
56
  """
57
57
 
58
+ background: typing.Optional[bool] = pydantic.Field(default=None)
59
+ """
60
+ Whether to process the request in the background.
61
+ """
62
+
58
63
  if IS_PYDANTIC_V2:
59
64
  model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
60
65
  else:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: letta-client
3
- Version: 0.1.274
3
+ Version: 0.1.275
4
4
  Summary:
5
5
  Requires-Python: >=3.8,<4.0
6
6
  Classifier: Intended Audience :: Developers
@@ -25,8 +25,8 @@ letta_client/agents/memory_variables/raw_client.py,sha256=lwWJQlKh4InQgrCH8iHPBw
25
25
  letta_client/agents/memory_variables/types/__init__.py,sha256=r_Wc0Jjyp1_Y2qC_eWsabWx1sTwoxAaV1s24y8Ep_Zg,200
26
26
  letta_client/agents/memory_variables/types/memory_variables_list_response.py,sha256=iAXAqp-J0fnyUK4MMa3PMrYwqQfbui3tiaWaWR9_O5M,600
27
27
  letta_client/agents/messages/__init__.py,sha256=e8v77Rj3x-BqXX_NKI1ON66_rDZwbb7ub2nGivPuxnM,373
28
- letta_client/agents/messages/client.py,sha256=iCYSrSpnLIBpgMWP0pOQ3fAwuJVefPfOLTZO4Qo9gBU,39737
29
- letta_client/agents/messages/raw_client.py,sha256=FI2JXCjv8-xK1Rr16Z6mNxei0lbW7GbySurh_uJmZAY,57620
28
+ letta_client/agents/messages/client.py,sha256=cpEUzOGduqnxWm85iqGuemdHTwNpEBVoLBxLiHZS4E8,40119
29
+ letta_client/agents/messages/raw_client.py,sha256=z5MvQqOINMm5mk8mKvg5v71x1OKwQjISeS32_7y_s38,58016
30
30
  letta_client/agents/messages/types/__init__.py,sha256=UVYgxnuengkOnW2BzXKiWodjQxdBEuwtJd7U83-TIbk,493
31
31
  letta_client/agents/messages/types/letta_streaming_response.py,sha256=TEqXH71L621ToSXJpmJk8QUJE71QhXuZOVk4AMeWgFc,801
32
32
  letta_client/agents/messages/types/messages_modify_request.py,sha256=0NT3pgbqQItc_p5cjBl4MaJ6bIMAlMhvdBJWm9zilpQ,476
@@ -90,7 +90,7 @@ letta_client/client_side_access_tokens/types/client_side_access_tokens_list_clie
90
90
  letta_client/client_side_access_tokens/types/client_side_access_tokens_list_client_side_access_tokens_response_tokens_item_policy_data_item_access_item.py,sha256=kNHfEWFl7u71Pu8NPqutod0a2NXfvq8il05Hqm0iBB4,284
91
91
  letta_client/core/__init__.py,sha256=tpn7rjb6C2UIkYZYIqdrNpI7Yax2jw88sXh2baxaxAI,1715
92
92
  letta_client/core/api_error.py,sha256=44vPoTyWN59gonCIZMdzw7M1uspygiLnr3GNFOoVL2Q,614
93
- letta_client/core/client_wrapper.py,sha256=9VWXhGOxKCpDFPhccF47loEd963snjkMYCwMfJ_QgWA,2776
93
+ letta_client/core/client_wrapper.py,sha256=iRiDk7L8K2lk9Ie0YES_9TLAJpnIvbwf7zWe4-gs2bg,2776
94
94
  letta_client/core/datetime_utils.py,sha256=nBys2IsYrhPdszxGKCNRPSOCwa-5DWOHG95FB8G9PKo,1047
95
95
  letta_client/core/file.py,sha256=d4NNbX8XvXP32z8KpK2Xovv33nFfruIrpz0QWxlgpZk,2663
96
96
  letta_client/core/force_multipart.py,sha256=awxh5MtcRYe74ehY8U76jzv6fYM_w_D3Rur7KQQzSDk,429
@@ -125,8 +125,8 @@ letta_client/folders/raw_client.py,sha256=bd5th49qJBEdmDMB5ugVEzP_mXUeZsCo3Kxn6K
125
125
  letta_client/groups/__init__.py,sha256=qrGge7rHzMf1V10ZpxL3rbxmpx9esxUCoQ29gV3Nwhk,454
126
126
  letta_client/groups/client.py,sha256=TRiW1CNfMzrxJyHLAiryVLEuPGdWKQ76xb8f_u_4cRc,16857
127
127
  letta_client/groups/messages/__init__.py,sha256=sX6KPOYCrwTCyzKGNBxejgOvDNFopOJkWd037ei6Uik,263
128
- letta_client/groups/messages/client.py,sha256=7yZtmrH30TvqdeYDNnhjHe-rdo7fD9yVGADgSIizyEw,25111
129
- letta_client/groups/messages/raw_client.py,sha256=JArU4UGMOROV3zDwMoDyxQaPLF1dRRFpdxXVoyGOWdc,37258
128
+ letta_client/groups/messages/client.py,sha256=ju1VUWbIQzqd4DqcX7Ksbu-iFk_mA-iA_3_GkPcmL4E,25493
129
+ letta_client/groups/messages/raw_client.py,sha256=3rgQhKlxtWBjBX489PQrT4DaqIJJc4sDfnibTqpjTsI,37654
130
130
  letta_client/groups/messages/types/__init__.py,sha256=8WRhpu-Xkvw1oqY-n0vptOO46BddTEt68L1kVVClvNM,355
131
131
  letta_client/groups/messages/types/letta_streaming_response.py,sha256=IFJSwCiedThHyMYhK4CqLV01HqS8JZ1KZDs4V13Yc0w,666
132
132
  letta_client/groups/messages/types/messages_modify_request.py,sha256=0NT3pgbqQItc_p5cjBl4MaJ6bIMAlMhvdBJWm9zilpQ,476
@@ -423,7 +423,7 @@ letta_client/types/letta_serialize_schemas_pydantic_agent_schema_agent_schema_to
423
423
  letta_client/types/letta_serialize_schemas_pydantic_agent_schema_message_schema.py,sha256=RPwPiGzs8TU1JwkQIRm1wFHLt8a5KU4WFFvuDvh_NII,1037
424
424
  letta_client/types/letta_serialize_schemas_pydantic_agent_schema_tool_schema.py,sha256=qBuwEw6yao9zihtGIdruSG6SFiVYDwyzumW8V4YJJmc,1163
425
425
  letta_client/types/letta_stop_reason.py,sha256=jYkXBnAKsdPS8tmdv_xumyVVQk9OoHKFypAgNZejCyo,932
426
- letta_client/types/letta_streaming_request.py,sha256=pEq4WEzKTtKA5XdyR544D6kEQztJQxg0PSeMSHteDfg,2251
426
+ letta_client/types/letta_streaming_request.py,sha256=NX8WMPMUWiWJyM3bNBBpdy-cw61VIZQMm6iJcUjhr38,2391
427
427
  letta_client/types/letta_usage_statistics.py,sha256=uZZq2lVOGHK6N-VhA0oknQfUjE9Zb0sMYh0mHDvl-lc,1887
428
428
  letta_client/types/letta_user_message_content_union.py,sha256=2SrcmMjvsQzCvfIUYG7PkaE4brMZcL6H437GSCLK4zg,230
429
429
  letta_client/types/llm_config.py,sha256=QxVLfO04egVhKg1J3nB-FkrrMpyK124HpgAyVD-_zk0,3864
@@ -564,6 +564,6 @@ letta_client/version.py,sha256=bttKLbIhO3UonCYQlqs600zzbQgfhCCMjeXR9WRzid4,79
564
564
  letta_client/voice/__init__.py,sha256=_VhToAyIt_5axN6CLJwtxg3-CO7THa_23pbUzqhXJa4,85
565
565
  letta_client/voice/client.py,sha256=EbIVOQh4HXqU9McATxwga08STk-HUwPEAUr_UHqyKHg,3748
566
566
  letta_client/voice/raw_client.py,sha256=KvM_3GXuSf51bubM0RVBnxvlf20qZTFMnaA_BzhXzjQ,5938
567
- letta_client-0.1.274.dist-info/METADATA,sha256=YJpwnmguzTVVTiXc_C5URNkzU35bU7NNfiKkUiKF1pI,5781
568
- letta_client-0.1.274.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88
569
- letta_client-0.1.274.dist-info/RECORD,,
567
+ letta_client-0.1.275.dist-info/METADATA,sha256=53Hu69viZUy-yEO2jGr_MvWdQNt0OIRWvrIuRjls2j4,5781
568
+ letta_client-0.1.275.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88
569
+ letta_client-0.1.275.dist-info/RECORD,,