meilisearch-python-sdk 3.6.2__py3-none-any.whl → 4.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of meilisearch-python-sdk might be problematic. Click here for more details.

@@ -0,0 +1,166 @@
1
+ from __future__ import annotations
2
+
3
+ from datetime import datetime
4
+ from typing import TYPE_CHECKING
5
+
6
+ from meilisearch_python_sdk._utils import get_async_client, get_client
7
+ from meilisearch_python_sdk.errors import BatchNotFoundError
8
+ from meilisearch_python_sdk.models.batch import BatchResult, BatchStatus
9
+
10
+ if TYPE_CHECKING:
11
+ from httpx import AsyncClient as HttpxAsyncClient # pragma: no cover
12
+ from httpx import Client as HttpxClient # pragma: no cover
13
+
14
+ from meilisearch_python_sdk._client import ( # pragma: no cover
15
+ AsyncClient,
16
+ Client,
17
+ )
18
+
19
+
20
+ async def async_get_batch(
21
+ client: HttpxAsyncClient | AsyncClient, batch_uid: int
22
+ ) -> BatchResult | None:
23
+ client_ = get_async_client(client)
24
+ response = await client_.get(f"batches/{batch_uid}")
25
+
26
+ if response.status_code == 404:
27
+ raise BatchNotFoundError(f"Batch {batch_uid} not found")
28
+
29
+ return BatchResult(**response.json())
30
+
31
+
32
+ async def async_get_batches(
33
+ client: HttpxAsyncClient | AsyncClient,
34
+ *,
35
+ uids: list[int] | None = None,
36
+ batch_uids: list[int] | None = None,
37
+ index_uids: list[int] | None = None,
38
+ statuses: list[str] | None = None,
39
+ types: list[str] | None = None,
40
+ limit: int = 20,
41
+ from_: str | None = None,
42
+ reverse: bool = False,
43
+ before_enqueued_at: datetime | None = None,
44
+ after_enqueued_at: datetime | None = None,
45
+ before_started_at: datetime | None = None,
46
+ after_finished_at: datetime | None = None,
47
+ ) -> BatchStatus:
48
+ client_ = get_async_client(client)
49
+ params = _build_parameters(
50
+ uids=uids,
51
+ batch_uids=batch_uids,
52
+ index_uids=index_uids,
53
+ statuses=statuses,
54
+ types=types,
55
+ limit=limit,
56
+ from_=from_,
57
+ reverse=reverse,
58
+ before_enqueued_at=before_enqueued_at,
59
+ after_enqueued_at=after_enqueued_at,
60
+ before_started_at=before_started_at,
61
+ after_finished_at=after_finished_at,
62
+ )
63
+ response = await client_.get("batches", params=params)
64
+
65
+ return BatchStatus(**response.json())
66
+
67
+
68
+ def get_batch(client: HttpxClient | Client, batch_uid: int) -> BatchResult | None:
69
+ client_ = get_client(client)
70
+ response = client_.get(f"batches/{batch_uid}")
71
+
72
+ if response.status_code == 404:
73
+ raise BatchNotFoundError(f"Batch {batch_uid} not found")
74
+
75
+ return BatchResult(**response.json())
76
+
77
+
78
+ def get_batches(
79
+ client: HttpxClient | Client,
80
+ *,
81
+ uids: list[int] | None = None,
82
+ batch_uids: list[int] | None = None,
83
+ index_uids: list[int] | None = None,
84
+ statuses: list[str] | None = None,
85
+ types: list[str] | None = None,
86
+ limit: int = 20,
87
+ from_: str | None = None,
88
+ reverse: bool = False,
89
+ before_enqueued_at: datetime | None = None,
90
+ after_enqueued_at: datetime | None = None,
91
+ before_started_at: datetime | None = None,
92
+ after_finished_at: datetime | None = None,
93
+ ) -> BatchStatus:
94
+ client_ = get_client(client)
95
+ params = _build_parameters(
96
+ uids=uids,
97
+ batch_uids=batch_uids,
98
+ index_uids=index_uids,
99
+ statuses=statuses,
100
+ types=types,
101
+ limit=limit,
102
+ from_=from_,
103
+ reverse=reverse,
104
+ before_enqueued_at=before_enqueued_at,
105
+ after_enqueued_at=after_enqueued_at,
106
+ before_started_at=before_started_at,
107
+ after_finished_at=after_finished_at,
108
+ )
109
+
110
+ response = client_.get("batches", params=params)
111
+
112
+ return BatchStatus(**response.json())
113
+
114
+
115
+ def _build_parameters(
116
+ *,
117
+ uids: list[int] | None = None,
118
+ batch_uids: list[int] | None = None,
119
+ index_uids: list[int] | None = None,
120
+ statuses: list[str] | None = None,
121
+ types: list[str] | None = None,
122
+ limit: int = 20,
123
+ from_: str | None = None,
124
+ reverse: bool = False,
125
+ before_enqueued_at: datetime | None = None,
126
+ after_enqueued_at: datetime | None = None,
127
+ before_started_at: datetime | None = None,
128
+ after_finished_at: datetime | None = None,
129
+ ) -> dict[str, str]:
130
+ params = {}
131
+
132
+ if uids:
133
+ params["uids"] = ",".join([str(uid) for uid in uids])
134
+
135
+ if batch_uids: # pragma: no cover
136
+ params["batchUids"] = ",".join([str(uid) for uid in batch_uids])
137
+
138
+ if index_uids: # pragma: no cover
139
+ params["indexUids"] = ",".join([str(uid) for uid in index_uids])
140
+
141
+ if statuses: # pragma: no cover
142
+ params["statuses"] = ",".join(statuses)
143
+
144
+ if types: # pragma: no cover
145
+ params["types"] = ",".join(types)
146
+
147
+ params["limit"] = str(limit)
148
+
149
+ if from_: # pragma: no cover
150
+ params["from"] = from_
151
+
152
+ params["reverse"] = "true" if reverse else "false"
153
+
154
+ if before_enqueued_at: # pragma: no cover
155
+ params["beforeEnqueuedAt"] = before_enqueued_at.isoformat()
156
+
157
+ if after_enqueued_at: # pragma: no cover
158
+ params["afterEnqueuedAt"] = after_enqueued_at.isoformat()
159
+
160
+ if before_started_at: # pragma: no cover
161
+ params["beforeStartedAt"] = before_started_at.isoformat()
162
+
163
+ if after_finished_at: # pragma: no cover
164
+ params["afterFinishedAt"] = after_finished_at.isoformat()
165
+
166
+ return params
@@ -9,6 +9,9 @@ from httpx import AsyncClient as HttpxAsyncClient
9
9
  from httpx import Client as HttpxClient
10
10
 
11
11
  from meilisearch_python_sdk import _task
12
+ from meilisearch_python_sdk._batch import async_get_batch, async_get_batches
13
+ from meilisearch_python_sdk._batch import get_batch as _get_batch
14
+ from meilisearch_python_sdk._batch import get_batches as _get_batches
12
15
  from meilisearch_python_sdk._http_requests import AsyncHttpRequests, HttpRequests
13
16
  from meilisearch_python_sdk.errors import InvalidRestriction, MeilisearchApiError
14
17
  from meilisearch_python_sdk.index import AsyncIndex, Index
@@ -39,6 +42,7 @@ if TYPE_CHECKING: # pragma: no cover
39
42
  import sys
40
43
  from types import TracebackType
41
44
 
45
+ from meilisearch_python_sdk.models.batch import BatchResult, BatchStatus
42
46
  from meilisearch_python_sdk.types import JsonMapping
43
47
 
44
48
  if sys.version_info >= (3, 11):
@@ -144,7 +148,7 @@ class AsyncClient(BaseClient):
144
148
  api_key: str | None = None,
145
149
  *,
146
150
  timeout: int | None = None,
147
- verify: str | bool | SSLContext = True,
151
+ verify: bool | SSLContext = True,
148
152
  custom_headers: dict[str, str] | None = None,
149
153
  json_handler: BuiltinHandler | OrjsonHandler | UjsonHandler | None = None,
150
154
  http2: bool = False,
@@ -770,11 +774,46 @@ class AsyncClient(BaseClient):
770
774
 
771
775
  return TaskInfo(**response.json())
772
776
 
777
+ async def get_batch(self, batch_uid: int) -> BatchResult | None:
778
+ return await async_get_batch(self, batch_uid)
779
+
780
+ async def get_batches(
781
+ self,
782
+ *,
783
+ uids: list[int] | None = None,
784
+ batch_uids: list[int] | None = None,
785
+ index_uids: list[int] | None = None,
786
+ statuses: list[str] | None = None,
787
+ types: list[str] | None = None,
788
+ limit: int = 20,
789
+ from_: str | None = None,
790
+ reverse: bool = False,
791
+ before_enqueued_at: datetime | None = None,
792
+ after_enqueued_at: datetime | None = None,
793
+ before_started_at: datetime | None = None,
794
+ after_finished_at: datetime | None = None,
795
+ ) -> BatchStatus:
796
+ return await async_get_batches(
797
+ self,
798
+ uids=uids,
799
+ batch_uids=batch_uids,
800
+ index_uids=index_uids,
801
+ statuses=statuses,
802
+ types=types,
803
+ limit=limit,
804
+ from_=from_,
805
+ reverse=reverse,
806
+ before_enqueued_at=before_enqueued_at,
807
+ after_enqueued_at=after_enqueued_at,
808
+ before_started_at=before_started_at,
809
+ after_finished_at=after_finished_at,
810
+ )
811
+
773
812
  async def cancel_tasks(
774
813
  self,
775
814
  *,
776
- uids: list[str] | None = None,
777
- index_uids: list[str] | None = None,
815
+ uids: list[int] | None = None,
816
+ index_uids: list[int] | None = None,
778
817
  statuses: list[str] | None = None,
779
818
  types: list[str] | None = None,
780
819
  before_enqueued_at: datetime | None = None,
@@ -848,8 +887,8 @@ class AsyncClient(BaseClient):
848
887
  async def delete_tasks(
849
888
  self,
850
889
  *,
851
- uids: list[str] | None = None,
852
- index_uids: list[str] | None = None,
890
+ uids: list[int] | None = None,
891
+ index_uids: list[int] | None = None,
853
892
  statuses: list[str] | None = None,
854
893
  types: list[str] | None = None,
855
894
  before_enqueued_at: datetime | None = None,
@@ -903,6 +942,7 @@ class AsyncClient(BaseClient):
903
942
  *,
904
943
  index_ids: list[str] | None = None,
905
944
  types: str | list[str] | None = None,
945
+ reverse: bool | None = None,
906
946
  ) -> TaskStatus:
907
947
  """Get multiple tasks.
908
948
 
@@ -910,6 +950,7 @@ class AsyncClient(BaseClient):
910
950
  index_ids: A list of index UIDs for which to get the tasks. If provided this will get the
911
951
  tasks only for the specified indexes, if not all tasks will be returned. Default = None
912
952
  types: Specify specific task types to retrieve. Default = None
953
+ reverse: If True the tasks will be returned in reverse order. Default = None
913
954
 
914
955
  Returns:
915
956
  Task statuses.
@@ -925,7 +966,9 @@ class AsyncClient(BaseClient):
925
966
  >>> async with AsyncClient("http://localhost.com", "masterKey") as client:
926
967
  >>> await client.get_tasks()
927
968
  """
928
- return await _task.async_get_tasks(self.http_client, index_ids=index_ids, types=types)
969
+ return await _task.async_get_tasks(
970
+ self.http_client, index_ids=index_ids, types=types, reverse=reverse
971
+ )
929
972
 
930
973
  async def wait_for_task(
931
974
  self,
@@ -984,7 +1027,7 @@ class Client(BaseClient):
984
1027
  api_key: str | None = None,
985
1028
  *,
986
1029
  timeout: int | None = None,
987
- verify: str | bool | SSLContext = True,
1030
+ verify: bool | SSLContext = True,
988
1031
  custom_headers: dict[str, str] | None = None,
989
1032
  json_handler: BuiltinHandler | OrjsonHandler | UjsonHandler | None = None,
990
1033
  http2: bool = False,
@@ -1586,11 +1629,46 @@ class Client(BaseClient):
1586
1629
 
1587
1630
  return TaskInfo(**response.json())
1588
1631
 
1632
+ def get_batch(self, batch_uid: int) -> BatchResult | None:
1633
+ return _get_batch(self, batch_uid)
1634
+
1635
+ def get_batches(
1636
+ self,
1637
+ *,
1638
+ uids: list[int] | None = None,
1639
+ batch_uids: list[int] | None = None,
1640
+ index_uids: list[int] | None = None,
1641
+ statuses: list[str] | None = None,
1642
+ types: list[str] | None = None,
1643
+ limit: int = 20,
1644
+ from_: str | None = None,
1645
+ reverse: bool = False,
1646
+ before_enqueued_at: datetime | None = None,
1647
+ after_enqueued_at: datetime | None = None,
1648
+ before_started_at: datetime | None = None,
1649
+ after_finished_at: datetime | None = None,
1650
+ ) -> BatchStatus:
1651
+ return _get_batches(
1652
+ self,
1653
+ uids=uids,
1654
+ batch_uids=batch_uids,
1655
+ index_uids=index_uids,
1656
+ statuses=statuses,
1657
+ types=types,
1658
+ limit=limit,
1659
+ from_=from_,
1660
+ reverse=reverse,
1661
+ before_enqueued_at=before_enqueued_at,
1662
+ after_enqueued_at=after_enqueued_at,
1663
+ before_started_at=before_started_at,
1664
+ after_finished_at=after_finished_at,
1665
+ )
1666
+
1589
1667
  def cancel_tasks(
1590
1668
  self,
1591
1669
  *,
1592
- uids: list[str] | None = None,
1593
- index_uids: list[str] | None = None,
1670
+ uids: list[int] | None = None,
1671
+ index_uids: list[int] | None = None,
1594
1672
  statuses: list[str] | None = None,
1595
1673
  types: list[str] | None = None,
1596
1674
  before_enqueued_at: datetime | None = None,
@@ -1642,8 +1720,8 @@ class Client(BaseClient):
1642
1720
  def delete_tasks(
1643
1721
  self,
1644
1722
  *,
1645
- uids: list[str] | None = None,
1646
- index_uids: list[str] | None = None,
1723
+ uids: list[int] | None = None,
1724
+ index_uids: list[int] | None = None,
1647
1725
  statuses: list[str] | None = None,
1648
1726
  types: list[str] | None = None,
1649
1727
  before_enqueued_at: datetime | None = None,
@@ -1718,6 +1796,7 @@ class Client(BaseClient):
1718
1796
  *,
1719
1797
  index_ids: list[str] | None = None,
1720
1798
  types: str | list[str] | None = None,
1799
+ reverse: bool | None = None,
1721
1800
  ) -> TaskStatus:
1722
1801
  """Get multiple tasks.
1723
1802
 
@@ -1725,6 +1804,7 @@ class Client(BaseClient):
1725
1804
  index_ids: A list of index UIDs for which to get the tasks. If provided this will get the
1726
1805
  tasks only for the specified indexes, if not all tasks will be returned. Default = None
1727
1806
  types: Specify specific task types to retrieve. Default = None
1807
+ reverse: If True the tasks will be returned in reverse order. Default = None
1728
1808
 
1729
1809
  Returns:
1730
1810
  Task statuses.
@@ -1740,7 +1820,7 @@ class Client(BaseClient):
1740
1820
  >>> client = Client("http://localhost.com", "masterKey")
1741
1821
  >>> client.get_tasks(client)
1742
1822
  """
1743
- return _task.get_tasks(self.http_client, index_ids=index_ids, types=types)
1823
+ return _task.get_tasks(self.http_client, index_ids=index_ids, types=types, reverse=reverse)
1744
1824
 
1745
1825
  def wait_for_task(
1746
1826
  self,
@@ -116,7 +116,7 @@ class HttpRequests:
116
116
  http_method: Callable,
117
117
  path: str,
118
118
  body: Any | None = None,
119
- content_type: str = "applicaton/json",
119
+ content_type: str = "application/json",
120
120
  compress: bool = False,
121
121
  ) -> Response:
122
122
  headers = build_headers(content_type, compress)
@@ -10,6 +10,7 @@ from httpx import AsyncClient as HttpxAsyncClient
10
10
  from httpx import Client as HttpxClient
11
11
 
12
12
  from meilisearch_python_sdk._http_requests import AsyncHttpRequests, HttpRequests
13
+ from meilisearch_python_sdk._utils import get_async_client, get_client
13
14
  from meilisearch_python_sdk.errors import MeilisearchTaskFailedError, MeilisearchTimeoutError
14
15
  from meilisearch_python_sdk.json_handler import BuiltinHandler, OrjsonHandler, UjsonHandler
15
16
  from meilisearch_python_sdk.models.task import TaskInfo, TaskResult, TaskStatus
@@ -21,8 +22,8 @@ if TYPE_CHECKING:
21
22
  async def async_cancel_tasks(
22
23
  client: HttpxAsyncClient | AsyncClient,
23
24
  *,
24
- uids: list[str] | None = None,
25
- index_uids: list[str] | None = None,
25
+ uids: list[int] | None = None,
26
+ index_uids: list[int] | None = None,
26
27
  statuses: list[str] | None = None,
27
28
  types: list[str] | None = None,
28
29
  before_enqueued_at: datetime | None = None,
@@ -76,7 +77,7 @@ async def async_cancel_tasks(
76
77
  parameters["statuses"] = "enqueued,processing"
77
78
 
78
79
  url = f"tasks/cancel?{urlencode(parameters)}"
79
- client_ = _get_async_client(client)
80
+ client_ = get_async_client(client)
80
81
  response = await client_.post(url)
81
82
 
82
83
  return TaskInfo(**response.json())
@@ -85,8 +86,8 @@ async def async_cancel_tasks(
85
86
  async def async_delete_tasks(
86
87
  client: HttpxAsyncClient | AsyncClient,
87
88
  *,
88
- uids: list[str] | None = None,
89
- index_uids: list[str] | None = None,
89
+ uids: list[int] | None = None,
90
+ index_uids: list[int] | None = None,
90
91
  statuses: list[str] | None = None,
91
92
  types: list[str] | None = None,
92
93
  before_enqueued_at: datetime | None = None,
@@ -110,14 +111,17 @@ async def async_delete_tasks(
110
111
  parameters["statuses"] = "canceled,enqueued,failed,processing,succeeded"
111
112
 
112
113
  url = f"tasks?{urlencode(parameters)}"
113
- client_ = _get_async_client(client)
114
+ client_ = get_async_client(client)
114
115
  response = await client_.delete(url)
115
116
 
116
117
  return TaskInfo(**response.json())
117
118
 
118
119
 
119
- async def async_get_task(client: HttpxAsyncClient | AsyncClient, task_id: int) -> TaskResult:
120
- client_ = _get_async_client(client)
120
+ async def async_get_task(
121
+ client: HttpxAsyncClient | AsyncClient,
122
+ task_id: int,
123
+ ) -> TaskResult:
124
+ client_ = get_async_client(client)
121
125
  response = await client_.get(f"tasks/{task_id}")
122
126
 
123
127
  return TaskResult(**response.json())
@@ -128,12 +132,19 @@ async def async_get_tasks(
128
132
  *,
129
133
  index_ids: list[str] | None = None,
130
134
  types: str | list[str] | None = None,
135
+ reverse: bool | None = None,
131
136
  ) -> TaskStatus:
132
137
  url = f"tasks?indexUids={','.join(index_ids)}" if index_ids else "tasks"
133
138
  if types:
134
139
  formatted_types = ",".join(types) if isinstance(types, list) else types
135
140
  url = f"{url}&types={formatted_types}" if "?" in url else f"{url}?types={formatted_types}"
136
- client_ = _get_async_client(client)
141
+ if reverse:
142
+ url = (
143
+ f"{url}&reverse={str(reverse).lower()}"
144
+ if "?" in url
145
+ else f"{url}?reverse={str(reverse).lower()}"
146
+ )
147
+ client_ = get_async_client(client)
137
148
  response = await client_.get(url)
138
149
 
139
150
  return TaskStatus(**response.json())
@@ -147,7 +158,7 @@ async def async_wait_for_task(
147
158
  interval_in_ms: int = 50,
148
159
  raise_for_status: bool = False,
149
160
  ) -> TaskResult:
150
- client_ = _get_async_client(client)
161
+ client_ = get_async_client(client)
151
162
  handler = _get_json_handler(client)
152
163
  url = f"tasks/{task_id}"
153
164
  http_requests = AsyncHttpRequests(client_, handler)
@@ -182,8 +193,8 @@ async def async_wait_for_task(
182
193
  def cancel_tasks(
183
194
  client: HttpxClient | Client,
184
195
  *,
185
- uids: list[str] | None = None,
186
- index_uids: list[str] | None = None,
196
+ uids: list[int] | None = None,
197
+ index_uids: list[int] | None = None,
187
198
  statuses: list[str] | None = None,
188
199
  types: list[str] | None = None,
189
200
  before_enqueued_at: datetime | None = None,
@@ -207,7 +218,7 @@ def cancel_tasks(
207
218
  parameters["statuses"] = "enqueued,processing"
208
219
 
209
220
  url = f"tasks/cancel?{urlencode(parameters)}"
210
- client_ = _get_client(client)
221
+ client_ = get_client(client)
211
222
  response = client_.post(url)
212
223
 
213
224
  return TaskInfo(**response.json())
@@ -216,8 +227,8 @@ def cancel_tasks(
216
227
  def delete_tasks(
217
228
  client: HttpxClient | Client,
218
229
  *,
219
- uids: list[str] | None = None,
220
- index_uids: list[str] | None = None,
230
+ uids: list[int] | None = None,
231
+ index_uids: list[int] | None = None,
221
232
  statuses: list[str] | None = None,
222
233
  types: list[str] | None = None,
223
234
  before_enqueued_at: datetime | None = None,
@@ -241,14 +252,14 @@ def delete_tasks(
241
252
  parameters["statuses"] = "canceled,enqueued,failed,processing,succeeded"
242
253
 
243
254
  url = f"tasks?{urlencode(parameters)}"
244
- client_ = _get_client(client)
255
+ client_ = get_client(client)
245
256
  response = client_.delete(url)
246
257
 
247
258
  return TaskInfo(**response.json())
248
259
 
249
260
 
250
261
  def get_task(client: HttpxClient | Client, task_id: int) -> TaskResult:
251
- client_ = _get_client(client)
262
+ client_ = get_client(client)
252
263
  response = client_.get(f"tasks/{task_id}")
253
264
 
254
265
  return TaskResult(**response.json())
@@ -259,12 +270,19 @@ def get_tasks(
259
270
  *,
260
271
  index_ids: list[str] | None = None,
261
272
  types: str | list[str] | None = None,
273
+ reverse: bool | None = None,
262
274
  ) -> TaskStatus:
263
275
  url = f"tasks?indexUids={','.join(index_ids)}" if index_ids else "tasks"
264
276
  if types:
265
277
  formatted_types = ",".join(types) if isinstance(types, list) else types
266
278
  url = f"{url}&types={formatted_types}" if "?" in url else f"{url}?types={formatted_types}"
267
- client_ = _get_client(client)
279
+ if reverse:
280
+ url = (
281
+ f"{url}&reverse={str(reverse).lower()}"
282
+ if "?" in url
283
+ else f"{url}?reverse={str(reverse).lower()}"
284
+ )
285
+ client_ = get_client(client)
268
286
  response = client_.get(url)
269
287
 
270
288
  return TaskStatus(**response.json())
@@ -278,7 +296,7 @@ def wait_for_task(
278
296
  interval_in_ms: int = 50,
279
297
  raise_for_status: bool = False,
280
298
  ) -> TaskResult:
281
- client_ = _get_client(client)
299
+ client_ = get_client(client)
282
300
  handler = _get_json_handler(client)
283
301
  url = f"tasks/{task_id}"
284
302
  http_requests = HttpRequests(client_, json_handler=handler)
@@ -310,24 +328,6 @@ def wait_for_task(
310
328
  time.sleep(interval_in_ms / 1000)
311
329
 
312
330
 
313
- def _get_async_client(
314
- client: AsyncClient | HttpxAsyncClient,
315
- ) -> HttpxAsyncClient:
316
- if isinstance(client, HttpxAsyncClient):
317
- return client
318
-
319
- return client.http_client
320
-
321
-
322
- def _get_client(
323
- client: Client | HttpxClient,
324
- ) -> HttpxClient:
325
- if isinstance(client, HttpxClient):
326
- return client
327
-
328
- return client.http_client
329
-
330
-
331
331
  def _get_json_handler(
332
332
  client: AsyncClient | Client | HttpxAsyncClient | HttpxClient,
333
333
  ) -> BuiltinHandler | OrjsonHandler | UjsonHandler:
@@ -338,8 +338,8 @@ def _get_json_handler(
338
338
 
339
339
 
340
340
  def _process_params(
341
- uids: list[str] | None = None,
342
- index_uids: list[str] | None = None,
341
+ uids: list[int] | None = None,
342
+ index_uids: list[int] | None = None,
343
343
  statuses: list[str] | None = None,
344
344
  types: list[str] | None = None,
345
345
  before_enqueued_at: datetime | None = None,
@@ -3,6 +3,31 @@ from __future__ import annotations
3
3
  import sys
4
4
  from datetime import datetime
5
5
  from functools import lru_cache
6
+ from typing import TYPE_CHECKING
7
+
8
+ from httpx import AsyncClient as HttpxAsyncClient
9
+ from httpx import Client as HttpxClient
10
+
11
+ if TYPE_CHECKING:
12
+ from meilisearch_python_sdk._client import AsyncClient, Client # pragma: no cover
13
+
14
+
15
+ def get_async_client(
16
+ client: AsyncClient | HttpxAsyncClient,
17
+ ) -> HttpxAsyncClient:
18
+ if isinstance(client, HttpxAsyncClient):
19
+ return client
20
+
21
+ return client.http_client
22
+
23
+
24
+ def get_client(
25
+ client: Client | HttpxClient,
26
+ ) -> HttpxClient:
27
+ if isinstance(client, HttpxClient):
28
+ return client
29
+
30
+ return client.http_client
6
31
 
7
32
 
8
33
  def iso_to_date_time(iso_date: datetime | str | None) -> datetime | None:
@@ -1 +1 @@
1
- VERSION = "3.6.2"
1
+ VERSION = "4.1.0"
@@ -1,6 +1,10 @@
1
1
  from httpx import Response
2
2
 
3
3
 
4
+ class BatchNotFoundError(Exception):
5
+ pass
6
+
7
+
4
8
  class InvalidDocumentError(Exception):
5
9
  """Error for documents that are not in a valid format for Meilisearch."""
6
10
 
@@ -4383,6 +4383,147 @@ class AsyncIndex(_BaseIndex):
4383
4383
 
4384
4384
  return TaskInfo(**response.json())
4385
4385
 
4386
+ async def get_facet_search(self) -> bool | None:
4387
+ """Get setting for facet search opt-out.
4388
+
4389
+ Returns:
4390
+ True if facet search is enabled or False if not.
4391
+
4392
+ Raises:
4393
+ MeilisearchCommunicationError: If there was an error communicating with the server.
4394
+ MeilisearchApiError: If the Meilisearch API returned an error.
4395
+
4396
+ Examples
4397
+ >>> from meilisearch_async_client import AsyncClient
4398
+ >>> async with AsyncClient("http://localhost.com", "masterKey") as client:
4399
+ >>> index = client.index("movies")
4400
+ >>> facet_search = await index.get_facet_search()
4401
+ """
4402
+ response = await self._http_requests.get(f"{self._settings_url}/facet-search")
4403
+
4404
+ return response.json()
4405
+
4406
+ async def update_facet_search(self, facet_search: bool, *, compress: bool = False) -> TaskInfo:
4407
+ """Update setting for facet search opt-out.
4408
+
4409
+ Args:
4410
+ facet_search: Boolean indicating if facet search should be disabled.
4411
+ compress: If set to True the data will be sent in gzip format. Defaults to False.
4412
+
4413
+ Returns:
4414
+ The details of the task status.
4415
+
4416
+ Raises:
4417
+ MeilisearchCommunicationError: If there was an error communicating with the server.
4418
+ MeilisearchApiError: If the Meilisearch API returned an error.
4419
+
4420
+ Examples
4421
+ >>> from meilisearch_python_sdk import AsyncClient
4422
+ >>> async with AsyncClient("http://localhost.com", "masterKey") as client:
4423
+ >>> index = client.index("movies")
4424
+ >>> await index.update_facet_search(True)
4425
+ """
4426
+ response = await self._http_requests.put(
4427
+ f"{self._settings_url}/facet-search",
4428
+ facet_search,
4429
+ compress=compress,
4430
+ )
4431
+
4432
+ return TaskInfo(**response.json())
4433
+
4434
+ async def reset_facet_search(self) -> TaskInfo:
4435
+ """Reset the facet search opt-out settings.
4436
+
4437
+ Returns:
4438
+ The details of the task status.
4439
+
4440
+ Raises:
4441
+ MeilisearchCommunicationError: If there was an error communicating with the server.
4442
+ MeilisearchApiError: If the Meilisearch API returned an error.
4443
+
4444
+ Examples
4445
+ >>> from meilisearch_async_client import AsyncClient
4446
+ >>> async with AsyncClient("http://localhost.com", "masterKey") as client:
4447
+ >>> index = client.index("movies")
4448
+ >>> await index.reset_facet_search()
4449
+ """
4450
+ response = await self._http_requests.delete(f"{self._settings_url}/facet-search")
4451
+
4452
+ return TaskInfo(**response.json())
4453
+
4454
+ async def get_prefix_search(self) -> str:
4455
+ """Get setting for prefix search opt-out.
4456
+
4457
+ Returns:
4458
+ True if prefix search is enabled or False if not.
4459
+
4460
+ Raises:
4461
+ MeilisearchCommunicationError: If there was an error communicating with the server.
4462
+ MeilisearchApiError: If the Meilisearch API returned an error.
4463
+
4464
+ Examples
4465
+ >>> from meilisearch_async_client import AsyncClient
4466
+ >>> async with AsyncClient("http://localhost.com", "masterKey") as client:
4467
+ >>> index = await client.index("movies")
4468
+ >>> prefix_search = await index.get_prefix_search()
4469
+ """
4470
+ response = await self._http_requests.get(f"{self._settings_url}/prefix-search")
4471
+
4472
+ return response.json()
4473
+
4474
+ async def update_prefix_search(
4475
+ self,
4476
+ prefix_search: Literal["disabled", "indexingTime", "searchTime"],
4477
+ *,
4478
+ compress: bool = False,
4479
+ ) -> TaskInfo:
4480
+ """Update setting for prefix search opt-out.
4481
+
4482
+ Args:
4483
+ prefix_search: Value indicating prefix search setting.
4484
+ compress: If set to True the data will be sent in gzip format. Defaults to False.
4485
+
4486
+ Returns:
4487
+ The details of the task status.
4488
+
4489
+ Raises:
4490
+ MeilisearchCommunicationError: If there was an error communicating with the server.
4491
+ MeilisearchApiError: If the Meilisearch API returned an error.
4492
+
4493
+ Examples
4494
+ >>> from meilisearch_python_sdk import AsyncClient
4495
+ >>> async with AsyncClient("http://localhost.com", "masterKey") as client:
4496
+ >>> index = await client.index("movies")
4497
+ >>> await index.update_prefix_search("disabled")
4498
+ """
4499
+ response = await self._http_requests.put(
4500
+ f"{self._settings_url}/prefix-search",
4501
+ prefix_search,
4502
+ compress=compress,
4503
+ )
4504
+
4505
+ return TaskInfo(**response.json())
4506
+
4507
+ async def reset_prefix_search(self) -> TaskInfo:
4508
+ """Reset the prefix search opt-out settings.
4509
+
4510
+ Returns:
4511
+ The details of the task status.
4512
+
4513
+ Raises:
4514
+ MeilisearchCommunicationError: If there was an error communicating with the server.
4515
+ MeilisearchApiError: If the Meilisearch API returned an error.
4516
+
4517
+ Examples
4518
+ >>> from meilisearch_async_client import AsyncClient
4519
+ >>> async with AsyncClient("http://localhost.com", "masterKey") as client:
4520
+ >>> index = await client.index("movies")
4521
+ >>> await index.reset_prefix_search()
4522
+ """
4523
+ response = await self._http_requests.delete(f"{self._settings_url}/prefix-search")
4524
+
4525
+ return TaskInfo(**response.json())
4526
+
4386
4527
  @staticmethod
4387
4528
  async def _run_plugins(
4388
4529
  plugins: Sequence[AsyncPlugin | AsyncDocumentPlugin | AsyncPostSearchPlugin],
@@ -7792,6 +7933,147 @@ class Index(_BaseIndex):
7792
7933
 
7793
7934
  return TaskInfo(**response.json())
7794
7935
 
7936
+ def get_facet_search(self) -> bool:
7937
+ """Get setting for facet search opt-out.
7938
+
7939
+ Returns:
7940
+ True if facet search is enabled or False if not.
7941
+
7942
+ Raises:
7943
+ MeilisearchCommunicationError: If there was an error communicating with the server.
7944
+ MeilisearchApiError: If the Meilisearch API returned an error.
7945
+
7946
+ Examples
7947
+ >>> from meilisearch_async_client import Client
7948
+ >>> client = Client("http://localhost.com", "masterKey")
7949
+ >>> index = client.index("movies")
7950
+ >>> facet_search = await index.get_facet_search()
7951
+ """
7952
+ response = self._http_requests.get(f"{self._settings_url}/facet-search")
7953
+
7954
+ return response.json()
7955
+
7956
+ def update_facet_search(self, facet_search: bool, *, compress: bool = False) -> TaskInfo:
7957
+ """Update setting for facet search opt-out.
7958
+
7959
+ Args:
7960
+ facet_search: Boolean indicating if facet search should be disabled.
7961
+ compress: If set to True the data will be sent in gzip format. Defaults to False.
7962
+
7963
+ Returns:
7964
+ The details of the task status.
7965
+
7966
+ Raises:
7967
+ MeilisearchCommunicationError: If there was an error communicating with the server.
7968
+ MeilisearchApiError: If the Meilisearch API returned an error.
7969
+
7970
+ Examples
7971
+ >>> from meilisearch_python_sdk import Client
7972
+ >>> client = Client("http://localhost.com", "masterKey")
7973
+ >>> index = client.index("movies")
7974
+ >>> index.update_facet_search(True)
7975
+ """
7976
+ response = self._http_requests.put(
7977
+ f"{self._settings_url}/facet-search",
7978
+ facet_search,
7979
+ compress=compress,
7980
+ )
7981
+
7982
+ return TaskInfo(**response.json())
7983
+
7984
+ def reset_facet_search(self) -> TaskInfo:
7985
+ """Reset the facet search opt-out settings.
7986
+
7987
+ Returns:
7988
+ The details of the task status.
7989
+
7990
+ Raises:
7991
+ MeilisearchCommunicationError: If there was an error communicating with the server.
7992
+ MeilisearchApiError: If the Meilisearch API returned an error.
7993
+
7994
+ Examples
7995
+ >>> from meilisearch_async_client import Client
7996
+ >>> client = Client("http://localhost.com", "masterKey")
7997
+ >>> index = client.index("movies")
7998
+ >>> await index.reset_facet_search()
7999
+ """
8000
+ response = self._http_requests.delete(f"{self._settings_url}/facet-search")
8001
+
8002
+ return TaskInfo(**response.json())
8003
+
8004
+ def get_prefix_search(self) -> bool:
8005
+ """Get setting for prefix search opt-out.
8006
+
8007
+ Returns:
8008
+ True if prefix search is enabled or False if not.
8009
+
8010
+ Raises:
8011
+ MeilisearchCommunicationError: If there was an error communicating with the server.
8012
+ MeilisearchApiError: If the Meilisearch API returned an error.
8013
+
8014
+ Examples
8015
+ >>> from meilisearch_async_client import Client
8016
+ >>> client = Client("http://localhost.com", "masterKey")
8017
+ >>> index = client.index("movies")
8018
+ >>> prefix_search = index.get_prefix_search()
8019
+ """
8020
+ response = self._http_requests.get(f"{self._settings_url}/prefix-search")
8021
+
8022
+ return response.json()
8023
+
8024
+ def update_prefix_search(
8025
+ self,
8026
+ prefix_search: Literal["disabled", "indexingTime", "searchTime"],
8027
+ *,
8028
+ compress: bool = False,
8029
+ ) -> TaskInfo:
8030
+ """Update setting for prefix search opt-out.
8031
+
8032
+ Args:
8033
+ prefix_search: Value indicating prefix search setting.
8034
+ compress: If set to True the data will be sent in gzip format. Defaults to False.
8035
+
8036
+ Returns:
8037
+ The details of the task status.
8038
+
8039
+ Raises:
8040
+ MeilisearchCommunicationError: If there was an error communicating with the server.
8041
+ MeilisearchApiError: If the Meilisearch API returned an error.
8042
+
8043
+ Examples
8044
+ >>> from meilisearch_python_sdk import Client
8045
+ >>> client = Client("http://localhost.com", "masterKey")
8046
+ >>> index = client.index("movies")
8047
+ >>> index.update_prefix_search("disabled")
8048
+ """
8049
+ response = self._http_requests.put(
8050
+ f"{self._settings_url}/prefix-search",
8051
+ prefix_search,
8052
+ compress=compress,
8053
+ )
8054
+
8055
+ return TaskInfo(**response.json())
8056
+
8057
+ def reset_prefix_search(self) -> TaskInfo:
8058
+ """Reset the prefix search opt-out settings.
8059
+
8060
+ Returns:
8061
+ The details of the task status.
8062
+
8063
+ Raises:
8064
+ MeilisearchCommunicationError: If there was an error communicating with the server.
8065
+ MeilisearchApiError: If the Meilisearch API returned an error.
8066
+
8067
+ Examples
8068
+ >>> from meilisearch_async_client import Client
8069
+ >>> client = Client("http://localhost.com", "masterKey")
8070
+ >>> index = client.index("movies")
8071
+ >>> index.reset_prefix_search()
8072
+ """
8073
+ response = self._http_requests.delete(f"{self._settings_url}/prefix-search")
8074
+
8075
+ return TaskInfo(**response.json())
8076
+
7795
8077
  @staticmethod
7796
8078
  def _run_plugins(
7797
8079
  plugins: Sequence[Plugin | DocumentPlugin | PostSearchPlugin],
@@ -2,10 +2,7 @@ from __future__ import annotations
2
2
 
3
3
  import json
4
4
  from abc import ABC, abstractmethod
5
- from typing import TYPE_CHECKING, Any
6
-
7
- if TYPE_CHECKING: # pragma: no cover
8
- pass
5
+ from typing import Any
9
6
 
10
7
  try:
11
8
  import orjson
@@ -0,0 +1,55 @@
1
+ from __future__ import annotations
2
+
3
+ from datetime import datetime
4
+
5
+ from camel_converter.pydantic_base import CamelBase
6
+ from pydantic import Field, field_validator
7
+
8
+ from meilisearch_python_sdk._utils import iso_to_date_time
9
+ from meilisearch_python_sdk.types import JsonDict
10
+
11
+
12
+ class BatchId(CamelBase):
13
+ uid: int
14
+
15
+
16
+ class Status(CamelBase):
17
+ succeeded: int | None = None
18
+ failed: int | None = None
19
+ cancelled: int | None = None
20
+ processing: int | None = None
21
+ enqueued: int | None = None
22
+
23
+
24
+ class Stats(CamelBase):
25
+ total_nb_tasks: int
26
+ status: Status
27
+ batch_types: JsonDict | None = Field(None, alias="types")
28
+ index_uids: JsonDict | None = None
29
+
30
+
31
+ class BatchResult(BatchId):
32
+ details: JsonDict | None = None
33
+ progress: JsonDict | None = None
34
+ stats: Stats
35
+ duration: str | None = None
36
+ started_at: datetime | None = None
37
+ finished_at: datetime | None = None
38
+
39
+ @field_validator("started_at", mode="before") # type: ignore[attr-defined]
40
+ @classmethod
41
+ def validate_started_at(cls, v: str) -> datetime | None:
42
+ return iso_to_date_time(v)
43
+
44
+ @field_validator("finished_at", mode="before") # type: ignore[attr-defined]
45
+ @classmethod
46
+ def validate_finished_at(cls, v: str) -> datetime | None:
47
+ return iso_to_date_time(v)
48
+
49
+
50
+ class BatchStatus(CamelBase):
51
+ results: list[BatchResult]
52
+ total: int
53
+ limit: int
54
+ from_: int | None = Field(None, alias="from")
55
+ next: int | None = None
@@ -1,6 +1,7 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  from enum import Enum
4
+ from typing import Literal
4
5
 
5
6
  import pydantic
6
7
  from camel_converter.pydantic_base import CamelBase
@@ -142,3 +143,5 @@ class MeilisearchSettings(CamelBase):
142
143
  | None
143
144
  ) = None # Optional[Embedders] = None
144
145
  localized_attributes: list[LocalizedAttributes] | None = None
146
+ facet_search: bool | None = None
147
+ prefix_search: Literal["disabled", "indexingTime", "searchTime"] | None = None
@@ -25,6 +25,7 @@ class TaskResult(TaskId):
25
25
  enqueued_at: datetime
26
26
  started_at: datetime | None = None
27
27
  finished_at: datetime | None = None
28
+ batch_uid: int | None = None
28
29
 
29
30
  @pydantic.field_validator("enqueued_at", mode="before") # type: ignore[attr-defined]
30
31
  @classmethod
@@ -61,6 +62,7 @@ class TaskInfo(CamelBase):
61
62
  status: str
62
63
  task_type: str | JsonDict = Field(..., alias="type")
63
64
  enqueued_at: datetime
65
+ batch_uid: int | None = None
64
66
 
65
67
  @pydantic.field_validator("enqueued_at", mode="before") # type: ignore[attr-defined]
66
68
  @classmethod
@@ -1,13 +1,15 @@
1
1
  from __future__ import annotations
2
2
 
3
- import sys
4
3
  from collections.abc import MutableMapping
5
- from typing import Any, Union
4
+ from typing import TYPE_CHECKING, Any, Union
6
5
 
7
- if sys.version_info >= (3, 10): # pragma: no cover
8
- from typing import TypeAlias
9
- else:
10
- from typing_extensions import TypeAlias
6
+ if TYPE_CHECKING: # pragma: no cover
7
+ import sys
8
+
9
+ if sys.version_info >= (3, 10):
10
+ from typing import TypeAlias
11
+ else:
12
+ from typing_extensions import TypeAlias
11
13
 
12
14
  Filter: TypeAlias = Union[str, list[Union[str, list[str]]]]
13
15
  JsonDict: TypeAlias = dict[str, Any]
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.3
1
+ Metadata-Version: 2.4
2
2
  Name: meilisearch-python-sdk
3
- Version: 3.6.2
3
+ Version: 4.1.0
4
4
  Summary: A Python client providing both async and sync support for the Meilisearch API
5
5
  Project-URL: repository, https://github.com/sanders41/meilisearch-python-sdk
6
6
  Project-URL: homepage, https://github.com/sanders41/meilisearch-python-sdk
@@ -27,6 +27,7 @@ License: MIT License
27
27
  LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
28
28
  OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
29
29
  SOFTWARE.
30
+ License-File: LICENSE
30
31
  Keywords: async,client,meilisearch,python,sdk
31
32
  Classifier: Development Status :: 5 - Production/Stable
32
33
  Classifier: Intended Audience :: Developers
@@ -56,7 +57,7 @@ Description-Content-Type: text/markdown
56
57
 
57
58
  # Meilisearch Python SDK
58
59
 
59
- [![Tests Status](https://github.com/sanders41/meilisearch-python-sdk/workflows/Testing/badge.svg?branch=main&event=push)](https://github.com/sanders41/meilisearch-python-sdk/actions?query=workflow%3ATesting+branch%3Amain+event%3Apush)
60
+ [![Tests Status](https://github.com/sanders41/meilisearch-python-sdk/actions/workflows/testing.yml/badge.svg?branch=main&event=push)](https://github.com/sanders41/meilisearch-python-sdk/actions?query=workflow%3ATesting+branch%3Amain+event%3Apush)
60
61
  [![pre-commit.ci status](https://results.pre-commit.ci/badge/github/sanders41/meilisearch-python-sdk/main.svg)](https://results.pre-commit.ci/latest/github/sanders41/meilisearch-python-sdk/main)
61
62
  [![Coverage](https://codecov.io/github/sanders41/meilisearch-python-sdk/coverage.svg?branch=main)](https://codecov.io/gh/sanders41/meilisearch-python-sdk)
62
63
  [![PyPI version](https://badge.fury.io/py/meilisearch-python-sdk.svg)](https://badge.fury.io/py/meilisearch-python-sdk)
@@ -141,15 +142,15 @@ variable, this will be an `UpdateId` object, and use it to check the status of t
141
142
  #### AsyncClient
142
143
 
143
144
  ```py
144
- update = await index.add_documents(documents)
145
- status = await client.index('books').get_update_status(update.update_id)
145
+ task = await index.add_documents([{"id": 1, "title": "test"}])
146
+ status = await client.get_task(task.task_uid)
146
147
  ```
147
148
 
148
149
  #### Client
149
150
 
150
151
  ```py
151
- update = index.add_documents(documents)
152
- status = client.index('books').get_update_status(update.update_id)
152
+ task = index.add_documents([{"id": 1, "title": "test"}])
153
+ status = client.get_task(task.task_uid)
153
154
  ```
154
155
 
155
156
  ### Basic Searching
@@ -0,0 +1,28 @@
1
+ meilisearch_python_sdk/__init__.py,sha256=SB0Jlm6FwT13J9xasZKseZzTWBk0hkfe1CWyWmIIZnE,258
2
+ meilisearch_python_sdk/_batch.py,sha256=Hbt-M8Lt8ZDZqcKToUMzUd5zvT-gku709er4pRlvXWk,5065
3
+ meilisearch_python_sdk/_client.py,sha256=LJ_MXxdIMfZ-Rva3feZRS7pzfY7rgr0lYUxY8hD9be4,73282
4
+ meilisearch_python_sdk/_http_requests.py,sha256=O3M3n-t1jAKwccWowPbk-HPD3ExtHq8a3XhnZT5facs,6746
5
+ meilisearch_python_sdk/_task.py,sha256=QgVcqMlZdURRS_oYpB_bTBa5dvT3Sp_-O0-s6TqAxHk,12485
6
+ meilisearch_python_sdk/_utils.py,sha256=NoCDxJPhjABeuSxFTNCih585UDWdXEUBD_FvdgtScQw,1539
7
+ meilisearch_python_sdk/_version.py,sha256=5hHes-uiUQVCKxIz0rFy7XVtaMCXeP6lPXPOk_JZPmU,18
8
+ meilisearch_python_sdk/decorators.py,sha256=njMn40P-qOzKGGQLCDpsBKWyj2ai10s4XG4IUBSHoD4,8674
9
+ meilisearch_python_sdk/errors.py,sha256=RNNHXtXLBiCVZaLM2MeKKs9RbRuE-SLRttiPeVAEXgA,2133
10
+ meilisearch_python_sdk/index.py,sha256=QIZk0O8cZ3qR2osPb03gSatR72dGaFEF_MC0PqTLmdU,341518
11
+ meilisearch_python_sdk/json_handler.py,sha256=c1rGKzYlE0dGfLygQjPqVUNfQkN1JvafBGmIx31JW8g,2044
12
+ meilisearch_python_sdk/plugins.py,sha256=YySzTuVr4IrogTgrP8q-gZPsew8TwedopjWnTj5eV48,3607
13
+ meilisearch_python_sdk/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
14
+ meilisearch_python_sdk/types.py,sha256=WxbQBPfy5S_j9hRKJ74ktuIxkH9Oifn7GYStQjs49Ik,458
15
+ meilisearch_python_sdk/models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
16
+ meilisearch_python_sdk/models/batch.py,sha256=w0R0tINqm5DkkdX-9RXDqyS8rxDGCjEySSzzyVZ_gGs,1465
17
+ meilisearch_python_sdk/models/client.py,sha256=ntecx3ya_5EwgnxZfqYsBg9UlyQe_i3jgaTlRWFUVTE,2451
18
+ meilisearch_python_sdk/models/documents.py,sha256=eT3FHrPND-g2IzNRyOHQApTTJ1WbFcGlqgxZ6aKrRgI,247
19
+ meilisearch_python_sdk/models/health.py,sha256=hvruti7ylsk7bAh8RPOhTPcRrjx6MPgdkDFX9vZ5Qks,95
20
+ meilisearch_python_sdk/models/index.py,sha256=GGwuhx5Wsn5iyj1ov3f4eWjfw6ttM8WzvyrnSsC4vRg,1132
21
+ meilisearch_python_sdk/models/search.py,sha256=Wmv8LmhwMVGoZnndkhw16RuoxxC203jlcOO2Q_VZIU4,3439
22
+ meilisearch_python_sdk/models/settings.py,sha256=uCm-F4PDeCR5e2d8WrGT8hwq0VhrCv19MatQVpS8ocU,4154
23
+ meilisearch_python_sdk/models/task.py,sha256=_PyuH5tSlHCKMwFsx1nMKjFgc8bDBZxscKYZOdf-4pg,2166
24
+ meilisearch_python_sdk/models/version.py,sha256=YDu-aj5H-d6nSaWRTXzlwWghmZAoiknaw250UyEd48I,215
25
+ meilisearch_python_sdk-4.1.0.dist-info/METADATA,sha256=ZTC9P3sbdC7sxtMnXpHAa0EOagZ1zXkWWOPrnSf84cs,9763
26
+ meilisearch_python_sdk-4.1.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
27
+ meilisearch_python_sdk-4.1.0.dist-info/licenses/LICENSE,sha256=xVzevI1TrlKfM0plmJ7vfK1Muu0V9n-dGE8RnDrOFlM,1069
28
+ meilisearch_python_sdk-4.1.0.dist-info/RECORD,,
@@ -1,4 +1,4 @@
1
1
  Wheel-Version: 1.0
2
- Generator: hatchling 1.26.3
2
+ Generator: hatchling 1.27.0
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
@@ -1,26 +0,0 @@
1
- meilisearch_python_sdk/__init__.py,sha256=SB0Jlm6FwT13J9xasZKseZzTWBk0hkfe1CWyWmIIZnE,258
2
- meilisearch_python_sdk/_client.py,sha256=HvZ74xfKtOfxwujsj0INdlBycW2vMakQfWw5IbTrpX8,70330
3
- meilisearch_python_sdk/_http_requests.py,sha256=baXB-tBhNrE9C5PQmyR-KEKxSNINgK02sqf3MFDSEL4,6745
4
- meilisearch_python_sdk/_task.py,sha256=WpUwj-CuFtMbbljNOrFUGzp41k5LuZ_riw08IWLwp4M,12347
5
- meilisearch_python_sdk/_utils.py,sha256=k6SYMJSiVjfF-vlhQRMaE1ziJsVf5FrL94mFwrMfdLY,957
6
- meilisearch_python_sdk/_version.py,sha256=S_1FdFnjMgmMn9CDgNZ3etYT2R6OERmYf-F2a4tDMdE,18
7
- meilisearch_python_sdk/decorators.py,sha256=njMn40P-qOzKGGQLCDpsBKWyj2ai10s4XG4IUBSHoD4,8674
8
- meilisearch_python_sdk/errors.py,sha256=0sAKYt47-zFpKsEU6W8Qnvf4uHBynKtlGPpPl-5laSA,2085
9
- meilisearch_python_sdk/index.py,sha256=RWbJAKajw_wI96Zifjc4crmn0HDvwxfUiPYobIXx7p8,330996
10
- meilisearch_python_sdk/json_handler.py,sha256=q_87zSnJfDNuVEI9cEvuOQOGBC7AGWJMEqCh2kGAAqA,2107
11
- meilisearch_python_sdk/plugins.py,sha256=YySzTuVr4IrogTgrP8q-gZPsew8TwedopjWnTj5eV48,3607
12
- meilisearch_python_sdk/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
13
- meilisearch_python_sdk/types.py,sha256=VBzt-JF6w1f5V_aTAM3NetDQxs9fscnRy8t-Y1HWZXM,404
14
- meilisearch_python_sdk/models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
15
- meilisearch_python_sdk/models/client.py,sha256=ntecx3ya_5EwgnxZfqYsBg9UlyQe_i3jgaTlRWFUVTE,2451
16
- meilisearch_python_sdk/models/documents.py,sha256=eT3FHrPND-g2IzNRyOHQApTTJ1WbFcGlqgxZ6aKrRgI,247
17
- meilisearch_python_sdk/models/health.py,sha256=hvruti7ylsk7bAh8RPOhTPcRrjx6MPgdkDFX9vZ5Qks,95
18
- meilisearch_python_sdk/models/index.py,sha256=GGwuhx5Wsn5iyj1ov3f4eWjfw6ttM8WzvyrnSsC4vRg,1132
19
- meilisearch_python_sdk/models/search.py,sha256=Wmv8LmhwMVGoZnndkhw16RuoxxC203jlcOO2Q_VZIU4,3439
20
- meilisearch_python_sdk/models/settings.py,sha256=wg9nmmZd9cP2WrgtHqIgLXdJH4BlCmAuS793QRK6cjc,4007
21
- meilisearch_python_sdk/models/task.py,sha256=P3NLaZhrY8H02Q9lDEkoq-3Z6_qGESglOxs4dNRyMWg,2100
22
- meilisearch_python_sdk/models/version.py,sha256=YDu-aj5H-d6nSaWRTXzlwWghmZAoiknaw250UyEd48I,215
23
- meilisearch_python_sdk-3.6.2.dist-info/METADATA,sha256=4vIJhJWyHoloj27cBlsL_LN5xleHTuKwVKEps2n5VXg,9749
24
- meilisearch_python_sdk-3.6.2.dist-info/WHEEL,sha256=C2FUgwZgiLbznR-k0b_5k3Ai_1aASOXDss3lzCUsUug,87
25
- meilisearch_python_sdk-3.6.2.dist-info/licenses/LICENSE,sha256=xVzevI1TrlKfM0plmJ7vfK1Muu0V9n-dGE8RnDrOFlM,1069
26
- meilisearch_python_sdk-3.6.2.dist-info/RECORD,,