meilisearch-python-sdk 2.8.0__py3-none-any.whl → 2.10.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of meilisearch-python-sdk might be problematic. Click here for more details.

@@ -194,6 +194,7 @@ class AsyncClient(BaseClient):
194
194
  *,
195
195
  settings: MeilisearchSettings | None = None,
196
196
  wait: bool = True,
197
+ timeout_in_ms: int | None = None,
197
198
  plugins: AsyncIndexPlugins | None = None,
198
199
  ) -> AsyncIndex:
199
200
  """Creates a new index.
@@ -210,6 +211,9 @@ class AsyncClient(BaseClient):
210
211
  wait: If set to True and settings are being updated, the index will be returned after
211
212
  the settings update has completed. If False it will not wait for settings to complete.
212
213
  Default: True
214
+ timeout_in_ms: Amount of time in milliseconds to wait before raising a
215
+ MeilisearchTimeoutError. `None` can also be passed to wait indefinitely. Be aware that
216
+ if the `None` option is used the wait time could be very long. Defaults to None.
213
217
  plugins: Optional plugins can be provided to extend functionality.
214
218
 
215
219
  Returns:
@@ -228,7 +232,13 @@ class AsyncClient(BaseClient):
228
232
  >>> index = await client.create_index("movies")
229
233
  """
230
234
  return await AsyncIndex.create(
231
- self.http_client, uid, primary_key, settings=settings, wait=wait, plugins=plugins
235
+ self.http_client,
236
+ uid,
237
+ primary_key,
238
+ settings=settings,
239
+ wait=wait,
240
+ timeout_in_ms=timeout_in_ms,
241
+ plugins=plugins,
232
242
  )
233
243
 
234
244
  async def create_snapshot(self) -> TaskInfo:
@@ -470,6 +480,7 @@ class AsyncClient(BaseClient):
470
480
  warn(
471
481
  "The use of Pydantic less than version 2 is depreciated and will be removed in a future release",
472
482
  DeprecationWarning,
483
+ stacklevel=2,
473
484
  )
474
485
  response = await self._http_requests.post("keys", json.loads(key.json(by_alias=True))) # type: ignore[attr-defined]
475
486
 
@@ -625,6 +636,7 @@ class AsyncClient(BaseClient):
625
636
  warn(
626
637
  "The use of Pydantic less than version 2 is depreciated and will be removed in a future release",
627
638
  DeprecationWarning,
639
+ stacklevel=2,
628
640
  )
629
641
  response = await self._http_requests.post(
630
642
  url,
@@ -870,14 +882,14 @@ class AsyncClient(BaseClient):
870
882
 
871
883
  Args:
872
884
 
873
- uids: A list of task UIDs to cancel.
874
- index_uids: A list of index UIDs for which to cancel tasks.
875
- statuses: A list of statuses to cancel.
876
- types: A list of types to cancel.
877
- before_enqueued_at: Cancel tasks that were enqueued before the specified date time.
878
- after_enqueued_at: Cancel tasks that were enqueued after the specified date time.
879
- before_started_at: Cancel tasks that were started before the specified date time.
880
- after_finished_at: Cancel tasks that were finished after the specified date time.
885
+ uids: A list of task UIDs to delete.
886
+ index_uids: A list of index UIDs for which to delete tasks.
887
+ statuses: A list of statuses to delete.
888
+ types: A list of types to delete.
889
+ before_enqueued_at: Delete tasks that were enqueued before the specified date time.
890
+ after_enqueued_at: Delete tasks that were enqueued after the specified date time.
891
+ before_started_at: Delete tasks that were started before the specified date time.
892
+ after_finished_at: Delete tasks that were finished after the specified date time.
881
893
 
882
894
  Returns:
883
895
 
@@ -1053,6 +1065,7 @@ class Client(BaseClient):
1053
1065
  *,
1054
1066
  settings: MeilisearchSettings | None = None,
1055
1067
  wait: bool = True,
1068
+ timeout_in_ms: int | None = None,
1056
1069
  plugins: IndexPlugins | None = None,
1057
1070
  ) -> Index:
1058
1071
  """Creates a new index.
@@ -1069,6 +1082,9 @@ class Client(BaseClient):
1069
1082
  wait: If set to True and settings are being updated, the index will be returned after
1070
1083
  the settings update has completed. If False it will not wait for settings to complete.
1071
1084
  Default: True
1085
+ timeout_in_ms: Amount of time in milliseconds to wait before raising a
1086
+ MeilisearchTimeoutError. `None` can also be passed to wait indefinitely. Be aware that
1087
+ if the `None` option is used the wait time could be very long. Defaults to None.
1072
1088
  plugins: Optional plugins can be provided to extend functionality.
1073
1089
 
1074
1090
  Returns:
@@ -1087,7 +1103,13 @@ class Client(BaseClient):
1087
1103
  >>> index = client.create_index("movies")
1088
1104
  """
1089
1105
  return Index.create(
1090
- self.http_client, uid, primary_key, settings=settings, wait=wait, plugins=plugins
1106
+ self.http_client,
1107
+ uid,
1108
+ primary_key,
1109
+ settings=settings,
1110
+ wait=wait,
1111
+ timeout_in_ms=timeout_in_ms,
1112
+ plugins=plugins,
1091
1113
  )
1092
1114
 
1093
1115
  def create_snapshot(self) -> TaskInfo:
@@ -1327,6 +1349,7 @@ class Client(BaseClient):
1327
1349
  warn(
1328
1350
  "The use of Pydantic less than version 2 is depreciated and will be removed in a future release",
1329
1351
  DeprecationWarning,
1352
+ stacklevel=2,
1330
1353
  )
1331
1354
  response = self._http_requests.post("keys", json.loads(key.json(by_alias=True))) # type: ignore[attr-defined]
1332
1355
 
@@ -1482,6 +1505,7 @@ class Client(BaseClient):
1482
1505
  warn(
1483
1506
  "The use of Pydantic less than version 2 is depreciated and will be removed in a future release",
1484
1507
  DeprecationWarning,
1508
+ stacklevel=2,
1485
1509
  )
1486
1510
  response = self._http_requests.post(
1487
1511
  url,
@@ -1701,14 +1725,14 @@ class Client(BaseClient):
1701
1725
 
1702
1726
  Args:
1703
1727
 
1704
- uids: A list of task UIDs to cancel.
1705
- index_uids: A list of index UIDs for which to cancel tasks.
1706
- statuses: A list of statuses to cancel.
1707
- types: A list of types to cancel.
1708
- before_enqueued_at: Cancel tasks that were enqueued before the specified date time.
1709
- after_enqueued_at: Cancel tasks that were enqueued after the specified date time.
1710
- before_started_at: Cancel tasks that were started before the specified date time.
1711
- after_finished_at: Cancel tasks that were finished after the specified date time.
1728
+ uids: A list of task UIDs to delete.
1729
+ index_uids: A list of index UIDs for which to delete tasks.
1730
+ statuses: A list of statuses to delete.
1731
+ types: A list of types to delete.
1732
+ before_enqueued_at: Delete tasks that were enqueued before the specified date time.
1733
+ after_enqueued_at: Delete tasks that were enqueued after the specified date time.
1734
+ before_started_at: Delete tasks that were started before the specified date time.
1735
+ after_finished_at: Delete tasks that were finished after the specified date time.
1712
1736
 
1713
1737
  Returns:
1714
1738
 
@@ -1875,6 +1899,7 @@ def _build_update_key_payload(key: KeyUpdate) -> JsonDict:
1875
1899
  warn(
1876
1900
  "The use of Pydantic less than version 2 is depreciated and will be removed in a future release",
1877
1901
  DeprecationWarning,
1902
+ stacklevel=2,
1878
1903
  )
1879
1904
  return { # type: ignore[attr-defined]
1880
1905
  k: v
@@ -34,6 +34,8 @@ class AsyncHttpRequests:
34
34
  body: Any | None = None,
35
35
  content_type: str = "application/json",
36
36
  compress: bool = False,
37
+ *,
38
+ serializer: type[json.JSONEncoder] | None = None,
37
39
  ) -> Response:
38
40
  headers = build_headers(content_type, compress)
39
41
 
@@ -41,11 +43,13 @@ class AsyncHttpRequests:
41
43
  if body is None:
42
44
  response = await http_method(path)
43
45
  elif content_type == "application/json" and not compress:
44
- response = await http_method(path, json=body, headers=headers)
46
+ response = await http_method(
47
+ path, data=json.dumps(body, cls=serializer), headers=headers
48
+ )
45
49
  else:
46
50
  if body and compress:
47
51
  if content_type == "application/json":
48
- body = gzip.compress(json.dumps(body).encode("utf-8"))
52
+ body = gzip.compress(json.dumps(body, cls=serializer).encode("utf-8"))
49
53
  else:
50
54
  body = gzip.compress((body).encode("utf-8"))
51
55
  response = await http_method(path, content=body, headers=headers)
@@ -60,7 +64,7 @@ class AsyncHttpRequests:
60
64
  raise MeilisearchApiError(str(err), response) from err
61
65
  else:
62
66
  # Fail safe just in case error happens before response is created
63
- raise MeilisearchError(str(err)) # pragma: no cover
67
+ raise MeilisearchError(str(err)) from err # pragma: no cover
64
68
 
65
69
  async def get(self, path: str) -> Response:
66
70
  return await self._send_request(self.http_client.get, path)
@@ -80,8 +84,12 @@ class AsyncHttpRequests:
80
84
  body: Any | None = None,
81
85
  content_type: str = "application/json",
82
86
  compress: bool = False,
87
+ *,
88
+ serializer: type[json.JSONEncoder] | None = None,
83
89
  ) -> Response:
84
- return await self._send_request(self.http_client.post, path, body, content_type, compress)
90
+ return await self._send_request(
91
+ self.http_client.post, path, body, content_type, compress, serializer=serializer
92
+ )
85
93
 
86
94
  async def put(
87
95
  self,
@@ -89,8 +97,12 @@ class AsyncHttpRequests:
89
97
  body: Any | None = None,
90
98
  content_type: str = "application/json",
91
99
  compress: bool = False,
100
+ *,
101
+ serializer: type[json.JSONEncoder] | None = None,
92
102
  ) -> Response:
93
- return await self._send_request(self.http_client.put, path, body, content_type, compress)
103
+ return await self._send_request(
104
+ self.http_client.put, path, body, content_type, compress, serializer=serializer
105
+ )
94
106
 
95
107
  async def delete(self, path: str, body: dict | None = None) -> Response:
96
108
  return await self._send_request(self.http_client.delete, path, body)
@@ -107,17 +119,19 @@ class HttpRequests:
107
119
  body: Any | None = None,
108
120
  content_type: str = "applicaton/json",
109
121
  compress: bool = False,
122
+ *,
123
+ serializer: type[json.JSONEncoder] | None = None,
110
124
  ) -> Response:
111
125
  headers = build_headers(content_type, compress)
112
126
  try:
113
127
  if not body:
114
128
  response = http_method(path)
115
129
  elif content_type == "application/json" and not compress:
116
- response = http_method(path, json=body, headers=headers)
130
+ response = http_method(path, data=json.dumps(body, cls=serializer), headers=headers)
117
131
  else:
118
132
  if body and compress:
119
133
  if content_type == "application/json":
120
- body = gzip.compress(json.dumps(body).encode("utf-8"))
134
+ body = gzip.compress(json.dumps(body, cls=serializer).encode("utf-8"))
121
135
  else:
122
136
  body = gzip.compress((body).encode("utf-8"))
123
137
  response = http_method(path, content=body, headers=headers)
@@ -132,7 +146,7 @@ class HttpRequests:
132
146
  raise MeilisearchApiError(str(err), response) from err
133
147
  else:
134
148
  # Fail safe just in case error happens before response is created
135
- raise MeilisearchError(str(err)) # pragma: no cover
149
+ raise MeilisearchError(str(err)) from err # pragma: no cover
136
150
 
137
151
  def get(self, path: str) -> Response:
138
152
  return self._send_request(self.http_client.get, path)
@@ -152,8 +166,12 @@ class HttpRequests:
152
166
  body: Any | None = None,
153
167
  content_type: str = "application/json",
154
168
  compress: bool = False,
169
+ *,
170
+ serializer: type[json.JSONEncoder] | None = None,
155
171
  ) -> Response:
156
- return self._send_request(self.http_client.post, path, body, content_type, compress)
172
+ return self._send_request(
173
+ self.http_client.post, path, body, content_type, compress, serializer=serializer
174
+ )
157
175
 
158
176
  def put(
159
177
  self,
@@ -161,8 +179,12 @@ class HttpRequests:
161
179
  body: Any | None = None,
162
180
  content_type: str = "application/json",
163
181
  compress: bool = False,
182
+ *,
183
+ serializer: type[json.JSONEncoder] | None = None,
164
184
  ) -> Response:
165
- return self._send_request(self.http_client.put, path, body, content_type, compress)
185
+ return self._send_request(
186
+ self.http_client.put, path, body, content_type, compress, serializer=serializer
187
+ )
166
188
 
167
189
  def delete(self, path: str, body: dict | None = None) -> Response:
168
190
  return self._send_request(self.http_client.delete, path, body)
@@ -1 +1 @@
1
- VERSION = "2.8.0"
1
+ VERSION = "2.10.0"
@@ -68,7 +68,7 @@ def async_add_documents(
68
68
  >>> # with `ConnectionInfo`
69
69
  >>> @async_add_documents(
70
70
  index_name="movies",
71
- connection_info=ConnectionInfo(url="http://localhost:7700", api_key="masterKey",
71
+ connection_info=ConnectionInfo(url="http://localhost:7700", api_key="masterKey"),
72
72
  )
73
73
  >>> async def my_function() -> list[dict[str, Any]]:
74
74
  >>> return [{"id": 1, "title": "Test 1"}, {"id": 2, "title": "Test 2"}]