letta-client 0.1.290__py3-none-any.whl → 0.1.292__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of letta-client might be problematic. Click here for more details.

@@ -516,6 +516,7 @@ class AgentsClient:
516
516
  self,
517
517
  *,
518
518
  file: core.File,
519
+ override_embedding_model: typing.Optional[str] = None,
519
520
  append_copy_suffix: typing.Optional[bool] = OMIT,
520
521
  override_existing_tools: typing.Optional[bool] = OMIT,
521
522
  override_embedding_handle: typing.Optional[str] = OMIT,
@@ -533,6 +534,8 @@ class AgentsClient:
533
534
  file : core.File
534
535
  See core.File for more documentation
535
536
 
537
+ override_embedding_model : typing.Optional[str]
538
+
536
539
  append_copy_suffix : typing.Optional[bool]
537
540
  If set to True, appends "_copy" to the end of the agent name.
538
541
 
@@ -571,6 +574,7 @@ class AgentsClient:
571
574
  """
572
575
  _response = self._raw_client.import_file(
573
576
  file=file,
577
+ override_embedding_model=override_embedding_model,
574
578
  append_copy_suffix=append_copy_suffix,
575
579
  override_existing_tools=override_existing_tools,
576
580
  override_embedding_handle=override_embedding_handle,
@@ -1504,6 +1508,7 @@ class AsyncAgentsClient:
1504
1508
  self,
1505
1509
  *,
1506
1510
  file: core.File,
1511
+ override_embedding_model: typing.Optional[str] = None,
1507
1512
  append_copy_suffix: typing.Optional[bool] = OMIT,
1508
1513
  override_existing_tools: typing.Optional[bool] = OMIT,
1509
1514
  override_embedding_handle: typing.Optional[str] = OMIT,
@@ -1521,6 +1526,8 @@ class AsyncAgentsClient:
1521
1526
  file : core.File
1522
1527
  See core.File for more documentation
1523
1528
 
1529
+ override_embedding_model : typing.Optional[str]
1530
+
1524
1531
  append_copy_suffix : typing.Optional[bool]
1525
1532
  If set to True, appends "_copy" to the end of the agent name.
1526
1533
 
@@ -1567,6 +1574,7 @@ class AsyncAgentsClient:
1567
1574
  """
1568
1575
  _response = await self._raw_client.import_file(
1569
1576
  file=file,
1577
+ override_embedding_model=override_embedding_model,
1570
1578
  append_copy_suffix=append_copy_suffix,
1571
1579
  override_existing_tools=override_existing_tools,
1572
1580
  override_embedding_handle=override_embedding_handle,
@@ -565,6 +565,7 @@ class RawAgentsClient:
565
565
  self,
566
566
  *,
567
567
  file: core.File,
568
+ override_embedding_model: typing.Optional[str] = None,
568
569
  append_copy_suffix: typing.Optional[bool] = OMIT,
569
570
  override_existing_tools: typing.Optional[bool] = OMIT,
570
571
  override_embedding_handle: typing.Optional[str] = OMIT,
@@ -582,6 +583,8 @@ class RawAgentsClient:
582
583
  file : core.File
583
584
  See core.File for more documentation
584
585
 
586
+ override_embedding_model : typing.Optional[str]
587
+
585
588
  append_copy_suffix : typing.Optional[bool]
586
589
  If set to True, appends "_copy" to the end of the agent name.
587
590
 
@@ -622,6 +625,11 @@ class RawAgentsClient:
622
625
  files={
623
626
  "file": file,
624
627
  },
628
+ headers={
629
+ "x-override-embedding-model": str(override_embedding_model)
630
+ if override_embedding_model is not None
631
+ else None,
632
+ },
625
633
  request_options=request_options,
626
634
  omit=OMIT,
627
635
  force_multipart=True,
@@ -1700,6 +1708,7 @@ class AsyncRawAgentsClient:
1700
1708
  self,
1701
1709
  *,
1702
1710
  file: core.File,
1711
+ override_embedding_model: typing.Optional[str] = None,
1703
1712
  append_copy_suffix: typing.Optional[bool] = OMIT,
1704
1713
  override_existing_tools: typing.Optional[bool] = OMIT,
1705
1714
  override_embedding_handle: typing.Optional[str] = OMIT,
@@ -1717,6 +1726,8 @@ class AsyncRawAgentsClient:
1717
1726
  file : core.File
1718
1727
  See core.File for more documentation
1719
1728
 
1729
+ override_embedding_model : typing.Optional[str]
1730
+
1720
1731
  append_copy_suffix : typing.Optional[bool]
1721
1732
  If set to True, appends "_copy" to the end of the agent name.
1722
1733
 
@@ -1757,6 +1768,11 @@ class AsyncRawAgentsClient:
1757
1768
  files={
1758
1769
  "file": file,
1759
1770
  },
1771
+ headers={
1772
+ "x-override-embedding-model": str(override_embedding_model)
1773
+ if override_embedding_model is not None
1774
+ else None,
1775
+ },
1760
1776
  request_options=request_options,
1761
1777
  omit=OMIT,
1762
1778
  force_multipart=True,
@@ -24,10 +24,10 @@ class BaseClientWrapper:
24
24
 
25
25
  def get_headers(self) -> typing.Dict[str, str]:
26
26
  headers: typing.Dict[str, str] = {
27
- "User-Agent": "letta-client/0.1.290",
27
+ "User-Agent": "letta-client/0.1.292",
28
28
  "X-Fern-Language": "Python",
29
29
  "X-Fern-SDK-Name": "letta-client",
30
- "X-Fern-SDK-Version": "0.1.290",
30
+ "X-Fern-SDK-Version": "0.1.292",
31
31
  **(self.get_custom_headers() or {}),
32
32
  }
33
33
  if self._project is not None:
@@ -70,6 +70,30 @@ class ModelsClient:
70
70
  )
71
71
  return _response.data
72
72
 
73
+ def listembeddingmodels(self, *, request_options: typing.Optional[RequestOptions] = None) -> None:
74
+ """
75
+ Parameters
76
+ ----------
77
+ request_options : typing.Optional[RequestOptions]
78
+ Request-specific configuration.
79
+
80
+ Returns
81
+ -------
82
+ None
83
+
84
+ Examples
85
+ --------
86
+ from letta_client import Letta
87
+
88
+ client = Letta(
89
+ project="YOUR_PROJECT",
90
+ token="YOUR_TOKEN",
91
+ )
92
+ client.models.listembeddingmodels()
93
+ """
94
+ _response = self._raw_client.listembeddingmodels(request_options=request_options)
95
+ return _response.data
96
+
73
97
 
74
98
  class AsyncModelsClient:
75
99
  def __init__(self, *, client_wrapper: AsyncClientWrapper):
@@ -138,3 +162,35 @@ class AsyncModelsClient:
138
162
  request_options=request_options,
139
163
  )
140
164
  return _response.data
165
+
166
+ async def listembeddingmodels(self, *, request_options: typing.Optional[RequestOptions] = None) -> None:
167
+ """
168
+ Parameters
169
+ ----------
170
+ request_options : typing.Optional[RequestOptions]
171
+ Request-specific configuration.
172
+
173
+ Returns
174
+ -------
175
+ None
176
+
177
+ Examples
178
+ --------
179
+ import asyncio
180
+
181
+ from letta_client import AsyncLetta
182
+
183
+ client = AsyncLetta(
184
+ project="YOUR_PROJECT",
185
+ token="YOUR_TOKEN",
186
+ )
187
+
188
+
189
+ async def main() -> None:
190
+ await client.models.listembeddingmodels()
191
+
192
+
193
+ asyncio.run(main())
194
+ """
195
+ _response = await self._raw_client.listembeddingmodels(request_options=request_options)
196
+ return _response.data
@@ -82,6 +82,30 @@ class RawModelsClient:
82
82
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
83
83
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
84
84
 
85
+ def listembeddingmodels(self, *, request_options: typing.Optional[RequestOptions] = None) -> HttpResponse[None]:
86
+ """
87
+ Parameters
88
+ ----------
89
+ request_options : typing.Optional[RequestOptions]
90
+ Request-specific configuration.
91
+
92
+ Returns
93
+ -------
94
+ HttpResponse[None]
95
+ """
96
+ _response = self._client_wrapper.httpx_client.request(
97
+ "v1/models/embeddings",
98
+ method="GET",
99
+ request_options=request_options,
100
+ )
101
+ try:
102
+ if 200 <= _response.status_code < 300:
103
+ return HttpResponse(response=_response, data=None)
104
+ _response_json = _response.json()
105
+ except JSONDecodeError:
106
+ raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
107
+ raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
108
+
85
109
 
86
110
  class AsyncRawModelsClient:
87
111
  def __init__(self, *, client_wrapper: AsyncClientWrapper):
@@ -149,3 +173,29 @@ class AsyncRawModelsClient:
149
173
  except JSONDecodeError:
150
174
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
151
175
  raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
176
+
177
+ async def listembeddingmodels(
178
+ self, *, request_options: typing.Optional[RequestOptions] = None
179
+ ) -> AsyncHttpResponse[None]:
180
+ """
181
+ Parameters
182
+ ----------
183
+ request_options : typing.Optional[RequestOptions]
184
+ Request-specific configuration.
185
+
186
+ Returns
187
+ -------
188
+ AsyncHttpResponse[None]
189
+ """
190
+ _response = await self._client_wrapper.httpx_client.request(
191
+ "v1/models/embeddings",
192
+ method="GET",
193
+ request_options=request_options,
194
+ )
195
+ try:
196
+ if 200 <= _response.status_code < 300:
197
+ return AsyncHttpResponse(response=_response, data=None)
198
+ _response_json = _response.json()
199
+ except JSONDecodeError:
200
+ raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
201
+ raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
@@ -102,6 +102,11 @@ class LlmConfig(UncheckedBaseModel):
102
102
  Soft control for how verbose model output should be, used for GPT-5 models.
103
103
  """
104
104
 
105
+ tier: typing.Optional[str] = pydantic.Field(default=None)
106
+ """
107
+ The cost tier for the model (cloud only).
108
+ """
109
+
105
110
  if IS_PYDANTIC_V2:
106
111
  model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
107
112
  else:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: letta-client
3
- Version: 0.1.290
3
+ Version: 0.1.292
4
4
  Summary:
5
5
  Requires-Python: >=3.8,<4.0
6
6
  Classifier: Intended Audience :: Developers
@@ -3,7 +3,7 @@ letta_client/agents/__init__.py,sha256=mrreK8Hk5PitAA7gomrc2g9EW9idazMGLDTHBAPrR
3
3
  letta_client/agents/blocks/__init__.py,sha256=_VhToAyIt_5axN6CLJwtxg3-CO7THa_23pbUzqhXJa4,85
4
4
  letta_client/agents/blocks/client.py,sha256=CUwVh5FHgD0YP3VNhUrWdkedMWk49yH3IiDD589AWEM,15809
5
5
  letta_client/agents/blocks/raw_client.py,sha256=Cx_85c78oqIOPZIPfCOsIa8WOL2EUNRwXJRGbOqn2AA,25570
6
- letta_client/agents/client.py,sha256=txXN5rtyN6ZArNusnIwuTvLpZkGtD1QAcmmFQQrFD0E,73762
6
+ letta_client/agents/client.py,sha256=Vbp6YR8VGA47BZdPx-sB3TP8ziFqCcR97_NBK54jiF0,74128
7
7
  letta_client/agents/context/__init__.py,sha256=_VhToAyIt_5axN6CLJwtxg3-CO7THa_23pbUzqhXJa4,85
8
8
  letta_client/agents/context/client.py,sha256=fhpJFWRs6INGreRyEw9gsFnlUWR48vIHbN_jVIHIBrw,3052
9
9
  letta_client/agents/context/raw_client.py,sha256=j2gko-oEFWuCgPkcX9jCv31OWvR6sTOtAYcSWllXYDs,4747
@@ -35,7 +35,7 @@ letta_client/agents/messages/types/messages_preview_raw_payload_request.py,sha25
35
35
  letta_client/agents/passages/__init__.py,sha256=_VhToAyIt_5axN6CLJwtxg3-CO7THa_23pbUzqhXJa4,85
36
36
  letta_client/agents/passages/client.py,sha256=XHPpqOH2BDjHkegTRM9MRdDVxW5VH40ERSFvWchWT48,16785
37
37
  letta_client/agents/passages/raw_client.py,sha256=TnNrFsnrexrPVmemkFbRIBfFMcq1Iap2qk23L7mr1Z0,25710
38
- letta_client/agents/raw_client.py,sha256=s8Ry5sNnTqwuKdYu3WM5xL_rCGQ0oUseRP0BzIwmM9I,97322
38
+ letta_client/agents/raw_client.py,sha256=h3USsoctFijFNYUQDi6OdgvpJBtj7a8ekrL58EtsV7A,97954
39
39
  letta_client/agents/sources/__init__.py,sha256=_VhToAyIt_5axN6CLJwtxg3-CO7THa_23pbUzqhXJa4,85
40
40
  letta_client/agents/sources/client.py,sha256=lCqB6FF9svrwf0oZSFs41WKlMXc-YRhUeb4FZkHbicM,6868
41
41
  letta_client/agents/sources/raw_client.py,sha256=ts4c5UBuXzrHU-lFWWrYniQqrMEc8SN0rfiqNXJLP5Y,12399
@@ -89,7 +89,7 @@ letta_client/client_side_access_tokens/types/client_side_access_tokens_list_clie
89
89
  letta_client/client_side_access_tokens/types/client_side_access_tokens_list_client_side_access_tokens_response_tokens_item_policy_data_item_access_item.py,sha256=kNHfEWFl7u71Pu8NPqutod0a2NXfvq8il05Hqm0iBB4,284
90
90
  letta_client/core/__init__.py,sha256=tpn7rjb6C2UIkYZYIqdrNpI7Yax2jw88sXh2baxaxAI,1715
91
91
  letta_client/core/api_error.py,sha256=44vPoTyWN59gonCIZMdzw7M1uspygiLnr3GNFOoVL2Q,614
92
- letta_client/core/client_wrapper.py,sha256=4-soOpl4hZ-FQwvNjrIwGgUZOS2IDo-zuSLyA9uFJl8,2776
92
+ letta_client/core/client_wrapper.py,sha256=wvGuWzw-3GOwgE5JYNao1qxd5UKv4DIzWvcDtoZvHr8,2776
93
93
  letta_client/core/datetime_utils.py,sha256=nBys2IsYrhPdszxGKCNRPSOCwa-5DWOHG95FB8G9PKo,1047
94
94
  letta_client/core/file.py,sha256=d4NNbX8XvXP32z8KpK2Xovv33nFfruIrpz0QWxlgpZk,2663
95
95
  letta_client/core/force_multipart.py,sha256=awxh5MtcRYe74ehY8U76jzv6fYM_w_D3Rur7KQQzSDk,429
@@ -150,8 +150,8 @@ letta_client/messages/__init__.py,sha256=_VhToAyIt_5axN6CLJwtxg3-CO7THa_23pbUzqh
150
150
  letta_client/messages/client.py,sha256=XkRwRPmkGURZY9cmwqi3TABOaYO2cFL27FmgZB0vPvo,5397
151
151
  letta_client/messages/raw_client.py,sha256=lARe5xhB0ntMkrSwWyi0SruzuEQ_tCcN7hiE3isiYqU,7114
152
152
  letta_client/models/__init__.py,sha256=_VhToAyIt_5axN6CLJwtxg3-CO7THa_23pbUzqhXJa4,85
153
- letta_client/models/client.py,sha256=9Z7gnJmZs7yAihEMQ6YxH8WtEt-uvxSAYFfX5idtMWg,4200
154
- letta_client/models/raw_client.py,sha256=oPsxG4Vym9Kpz21DOadFstw9lb-8ViGh3K6K8YQsV1I,5986
153
+ letta_client/models/client.py,sha256=B5QsYjmml0Cxlol8tlbtA8bX-dAu0cXgde4QXb2eVmc,5632
154
+ letta_client/models/raw_client.py,sha256=0AryEdljqcaSwNYQbZ8Q7ieDLuu2Cuk8Z1bdXaKA-VA,7945
155
155
  letta_client/projects/__init__.py,sha256=IC61JeyeW7qLtLPtPDy_5kemZCKNeiVEz2TndPEz4GU,231
156
156
  letta_client/projects/client.py,sha256=2VojFHMlsyQgDxQer6RndPw5W0l-yXX7WBy7TMhO5AA,3336
157
157
  letta_client/projects/raw_client.py,sha256=HhRNyG5FT8810HOaW9lQI2qbG2ZmPyaC7hMMNjYXmC4,4155
@@ -428,7 +428,7 @@ letta_client/types/letta_stop_reason.py,sha256=jYkXBnAKsdPS8tmdv_xumyVVQk9OoHKFy
428
428
  letta_client/types/letta_streaming_request.py,sha256=NX8WMPMUWiWJyM3bNBBpdy-cw61VIZQMm6iJcUjhr38,2391
429
429
  letta_client/types/letta_usage_statistics.py,sha256=uZZq2lVOGHK6N-VhA0oknQfUjE9Zb0sMYh0mHDvl-lc,1887
430
430
  letta_client/types/letta_user_message_content_union.py,sha256=2SrcmMjvsQzCvfIUYG7PkaE4brMZcL6H437GSCLK4zg,230
431
- letta_client/types/llm_config.py,sha256=QxVLfO04egVhKg1J3nB-FkrrMpyK124HpgAyVD-_zk0,3864
431
+ letta_client/types/llm_config.py,sha256=-fyzSmnQpkQ1vqqQOSyuIvMDfgSMMdrDtj-0Tbsw42I,3989
432
432
  letta_client/types/llm_config_compatibility_type.py,sha256=m6E90W-R9-Oi3EGSV_GdPIuVC2rmAH7TsUKbl79EiAQ,165
433
433
  letta_client/types/llm_config_model_endpoint_type.py,sha256=o59NDg3-3ud2mqAPYze40G7kyVD7pkRRbdT_vdTqL24,602
434
434
  letta_client/types/llm_config_reasoning_effort.py,sha256=r4I3i2c7RxkBe-xXOE_XCXwjp9Y0QoaF2SVY7WYPdg4,184
@@ -566,6 +566,6 @@ letta_client/version.py,sha256=bttKLbIhO3UonCYQlqs600zzbQgfhCCMjeXR9WRzid4,79
566
566
  letta_client/voice/__init__.py,sha256=_VhToAyIt_5axN6CLJwtxg3-CO7THa_23pbUzqhXJa4,85
567
567
  letta_client/voice/client.py,sha256=EbIVOQh4HXqU9McATxwga08STk-HUwPEAUr_UHqyKHg,3748
568
568
  letta_client/voice/raw_client.py,sha256=KvM_3GXuSf51bubM0RVBnxvlf20qZTFMnaA_BzhXzjQ,5938
569
- letta_client-0.1.290.dist-info/METADATA,sha256=9HeSfSublK1yplFpz76F0oQvq6wwl345wV49j8niYeY,5782
570
- letta_client-0.1.290.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88
571
- letta_client-0.1.290.dist-info/RECORD,,
569
+ letta_client-0.1.292.dist-info/METADATA,sha256=4Fcx61qxwTn0AumFq3nHG4WX0r-g-6_knPKvK5jDGuw,5782
570
+ letta_client-0.1.292.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88
571
+ letta_client-0.1.292.dist-info/RECORD,,