meilisearch-python-sdk 2.12.1__tar.gz → 3.0.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of meilisearch-python-sdk might be problematic. Click here for more details.
- {meilisearch_python_sdk-2.12.1 → meilisearch_python_sdk-3.0.1}/PKG-INFO +9 -4
- {meilisearch_python_sdk-2.12.1 → meilisearch_python_sdk-3.0.1}/meilisearch_python_sdk/_client.py +52 -84
- {meilisearch_python_sdk-2.12.1 → meilisearch_python_sdk-3.0.1}/meilisearch_python_sdk/_http_requests.py +17 -33
- {meilisearch_python_sdk-2.12.1 → meilisearch_python_sdk-3.0.1}/meilisearch_python_sdk/_task.py +17 -3
- {meilisearch_python_sdk-2.12.1 → meilisearch_python_sdk-3.0.1}/meilisearch_python_sdk/_utils.py +0 -17
- meilisearch_python_sdk-3.0.1/meilisearch_python_sdk/_version.py +1 -0
- {meilisearch_python_sdk-2.12.1 → meilisearch_python_sdk-3.0.1}/meilisearch_python_sdk/index.py +212 -390
- meilisearch_python_sdk-3.0.1/meilisearch_python_sdk/json_handler.py +77 -0
- meilisearch_python_sdk-3.0.1/meilisearch_python_sdk/models/client.py +85 -0
- {meilisearch_python_sdk-2.12.1 → meilisearch_python_sdk-3.0.1}/meilisearch_python_sdk/models/documents.py +2 -2
- meilisearch_python_sdk-3.0.1/meilisearch_python_sdk/models/index.py +44 -0
- meilisearch_python_sdk-3.0.1/meilisearch_python_sdk/models/search.py +88 -0
- meilisearch_python_sdk-3.0.1/meilisearch_python_sdk/models/settings.py +134 -0
- meilisearch_python_sdk-3.0.1/meilisearch_python_sdk/models/task.py +73 -0
- {meilisearch_python_sdk-2.12.1 → meilisearch_python_sdk-3.0.1}/meilisearch_python_sdk/plugins.py +2 -1
- meilisearch_python_sdk-3.0.1/meilisearch_python_sdk/types.py +14 -0
- {meilisearch_python_sdk-2.12.1 → meilisearch_python_sdk-3.0.1}/pyproject.toml +26 -18
- meilisearch_python_sdk-2.12.1/meilisearch_python_sdk/_version.py +0 -1
- meilisearch_python_sdk-2.12.1/meilisearch_python_sdk/models/client.py +0 -185
- meilisearch_python_sdk-2.12.1/meilisearch_python_sdk/models/index.py +0 -73
- meilisearch_python_sdk-2.12.1/meilisearch_python_sdk/models/search.py +0 -105
- meilisearch_python_sdk-2.12.1/meilisearch_python_sdk/models/settings.py +0 -159
- meilisearch_python_sdk-2.12.1/meilisearch_python_sdk/models/task.py +0 -121
- meilisearch_python_sdk-2.12.1/meilisearch_python_sdk/types.py +0 -11
- {meilisearch_python_sdk-2.12.1 → meilisearch_python_sdk-3.0.1}/LICENSE +0 -0
- {meilisearch_python_sdk-2.12.1 → meilisearch_python_sdk-3.0.1}/README.md +0 -0
- {meilisearch_python_sdk-2.12.1 → meilisearch_python_sdk-3.0.1}/meilisearch_python_sdk/__init__.py +0 -0
- {meilisearch_python_sdk-2.12.1 → meilisearch_python_sdk-3.0.1}/meilisearch_python_sdk/decorators.py +0 -0
- {meilisearch_python_sdk-2.12.1 → meilisearch_python_sdk-3.0.1}/meilisearch_python_sdk/errors.py +0 -0
- {meilisearch_python_sdk-2.12.1 → meilisearch_python_sdk-3.0.1}/meilisearch_python_sdk/models/__init__.py +0 -0
- {meilisearch_python_sdk-2.12.1 → meilisearch_python_sdk-3.0.1}/meilisearch_python_sdk/models/health.py +0 -0
- {meilisearch_python_sdk-2.12.1 → meilisearch_python_sdk-3.0.1}/meilisearch_python_sdk/models/version.py +0 -0
- {meilisearch_python_sdk-2.12.1 → meilisearch_python_sdk-3.0.1}/meilisearch_python_sdk/py.typed +0 -0
|
@@ -1,29 +1,34 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: meilisearch-python-sdk
|
|
3
|
-
Version:
|
|
3
|
+
Version: 3.0.1
|
|
4
4
|
Summary: A Python client providing both async and sync support for the Meilisearch API
|
|
5
5
|
Home-page: https://github.com/sanders41/meilisearch-python-sdk
|
|
6
6
|
License: MIT
|
|
7
7
|
Keywords: meilisearch,async,python,client,sdk
|
|
8
8
|
Author: Paul Sanders
|
|
9
9
|
Author-email: psanders1@gmail.com
|
|
10
|
-
Requires-Python: >=3.
|
|
10
|
+
Requires-Python: >=3.9,<4.0
|
|
11
11
|
Classifier: Development Status :: 5 - Production/Stable
|
|
12
12
|
Classifier: Intended Audience :: Developers
|
|
13
13
|
Classifier: License :: OSI Approved :: MIT License
|
|
14
14
|
Classifier: Operating System :: OS Independent
|
|
15
15
|
Classifier: Programming Language :: Python :: 3
|
|
16
|
-
Classifier: Programming Language :: Python :: 3.8
|
|
17
16
|
Classifier: Programming Language :: Python :: 3.9
|
|
18
17
|
Classifier: Programming Language :: Python :: 3.10
|
|
19
18
|
Classifier: Programming Language :: Python :: 3.11
|
|
20
19
|
Classifier: Programming Language :: Python :: 3.12
|
|
21
20
|
Classifier: Typing :: Typed
|
|
21
|
+
Provides-Extra: all
|
|
22
|
+
Provides-Extra: orjson
|
|
23
|
+
Provides-Extra: ujson
|
|
22
24
|
Requires-Dist: PyJWT (>=2.3.0)
|
|
23
25
|
Requires-Dist: aiofiles (>=0.7)
|
|
24
26
|
Requires-Dist: camel-converter (>=1.0.0)
|
|
27
|
+
Requires-Dist: eval-type-backport (>=0.2.0) ; python_version < "3.10"
|
|
25
28
|
Requires-Dist: httpx (>=0.17)
|
|
26
|
-
Requires-Dist:
|
|
29
|
+
Requires-Dist: orjson (>=3.10.6) ; extra == "orjson" or extra == "all"
|
|
30
|
+
Requires-Dist: pydantic (>=2.0.0)
|
|
31
|
+
Requires-Dist: ujson (>=5.10.0) ; extra == "ujson" or extra == "all"
|
|
27
32
|
Project-URL: Documentation, https://meilisearch-python-sdk.paulsanders.dev
|
|
28
33
|
Project-URL: Repository, https://github.com/sanders41/meilisearch-python-sdk
|
|
29
34
|
Description-Content-Type: text/markdown
|
{meilisearch_python_sdk-2.12.1 → meilisearch_python_sdk-3.0.1}/meilisearch_python_sdk/_client.py
RENAMED
|
@@ -1,10 +1,8 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
|
-
import json
|
|
4
3
|
from datetime import datetime, timezone
|
|
5
4
|
from ssl import SSLContext
|
|
6
5
|
from typing import TYPE_CHECKING
|
|
7
|
-
from warnings import warn
|
|
8
6
|
|
|
9
7
|
import jwt
|
|
10
8
|
from httpx import AsyncClient as HttpxAsyncClient
|
|
@@ -12,9 +10,9 @@ from httpx import Client as HttpxClient
|
|
|
12
10
|
|
|
13
11
|
from meilisearch_python_sdk import _task
|
|
14
12
|
from meilisearch_python_sdk._http_requests import AsyncHttpRequests, HttpRequests
|
|
15
|
-
from meilisearch_python_sdk._utils import is_pydantic_2
|
|
16
13
|
from meilisearch_python_sdk.errors import InvalidRestriction, MeilisearchApiError
|
|
17
14
|
from meilisearch_python_sdk.index import AsyncIndex, Index
|
|
15
|
+
from meilisearch_python_sdk.json_handler import BuiltinHandler, OrjsonHandler, UjsonHandler
|
|
18
16
|
from meilisearch_python_sdk.models.client import (
|
|
19
17
|
ClientStats,
|
|
20
18
|
Key,
|
|
@@ -47,7 +45,9 @@ class BaseClient:
|
|
|
47
45
|
self,
|
|
48
46
|
api_key: str | None = None,
|
|
49
47
|
custom_headers: dict[str, str] | None = None,
|
|
48
|
+
json_handler: BuiltinHandler | OrjsonHandler | UjsonHandler | None = None,
|
|
50
49
|
) -> None:
|
|
50
|
+
self.json_handler = json_handler if json_handler else BuiltinHandler()
|
|
51
51
|
self._headers: dict[str, str] | None = None
|
|
52
52
|
if api_key:
|
|
53
53
|
self._headers = {"Authorization": f"Bearer {api_key}"}
|
|
@@ -143,6 +143,7 @@ class AsyncClient(BaseClient):
|
|
|
143
143
|
timeout: int | None = None,
|
|
144
144
|
verify: str | bool | SSLContext = True,
|
|
145
145
|
custom_headers: dict[str, str] | None = None,
|
|
146
|
+
json_handler: BuiltinHandler | OrjsonHandler | UjsonHandler | None = None,
|
|
146
147
|
) -> None:
|
|
147
148
|
"""Class initializer.
|
|
148
149
|
|
|
@@ -157,13 +158,17 @@ class AsyncClient(BaseClient):
|
|
|
157
158
|
a path to an SSL certificate file, or `False` (disable verification)
|
|
158
159
|
custom_headers: Custom headers to add when sending data to Meilisearch. Defaults to
|
|
159
160
|
None.
|
|
161
|
+
json_handler: The module to use for json operations. The options are BuiltinHandler
|
|
162
|
+
(uses the json module from the standard library), OrjsonHandler (uses orjson), or
|
|
163
|
+
UjsonHandler (uses ujson). Note that in order use orjson or ujson the corresponding
|
|
164
|
+
extra needs to be included. Default: BuiltinHandler.
|
|
160
165
|
"""
|
|
161
|
-
super().__init__(api_key, custom_headers)
|
|
166
|
+
super().__init__(api_key, custom_headers, json_handler)
|
|
162
167
|
|
|
163
168
|
self.http_client = HttpxAsyncClient(
|
|
164
169
|
base_url=url, timeout=timeout, headers=self._headers, verify=verify
|
|
165
170
|
)
|
|
166
|
-
self._http_requests = AsyncHttpRequests(self.http_client)
|
|
171
|
+
self._http_requests = AsyncHttpRequests(self.http_client, json_handler=self.json_handler)
|
|
167
172
|
|
|
168
173
|
async def __aenter__(self) -> Self:
|
|
169
174
|
return self
|
|
@@ -257,6 +262,7 @@ class AsyncClient(BaseClient):
|
|
|
257
262
|
wait=wait,
|
|
258
263
|
timeout_in_ms=timeout_in_ms,
|
|
259
264
|
plugins=plugins,
|
|
265
|
+
json_handler=self.json_handler,
|
|
260
266
|
)
|
|
261
267
|
|
|
262
268
|
async def create_snapshot(self) -> TaskInfo:
|
|
@@ -348,6 +354,7 @@ class AsyncClient(BaseClient):
|
|
|
348
354
|
primary_key=x["primaryKey"],
|
|
349
355
|
created_at=x["createdAt"],
|
|
350
356
|
updated_at=x["updatedAt"],
|
|
357
|
+
json_handler=self.json_handler,
|
|
351
358
|
)
|
|
352
359
|
for x in response.json()["results"]
|
|
353
360
|
]
|
|
@@ -374,7 +381,7 @@ class AsyncClient(BaseClient):
|
|
|
374
381
|
>>> async with AsyncClient("http://localhost.com", "masterKey") as client:
|
|
375
382
|
>>> index = await client.get_index()
|
|
376
383
|
"""
|
|
377
|
-
return await AsyncIndex(self.http_client, uid).fetch_info()
|
|
384
|
+
return await AsyncIndex(self.http_client, uid, json_handler=self.json_handler).fetch_info()
|
|
378
385
|
|
|
379
386
|
def index(self, uid: str, *, plugins: AsyncIndexPlugins | None = None) -> AsyncIndex:
|
|
380
387
|
"""Create a local reference to an index identified by UID, without making an HTTP call.
|
|
@@ -401,7 +408,9 @@ class AsyncClient(BaseClient):
|
|
|
401
408
|
>>> async with AsyncClient("http://localhost.com", "masterKey") as client:
|
|
402
409
|
>>> index = client.index("movies")
|
|
403
410
|
"""
|
|
404
|
-
return AsyncIndex(
|
|
411
|
+
return AsyncIndex(
|
|
412
|
+
self.http_client, uid=uid, plugins=plugins, json_handler=self.json_handler
|
|
413
|
+
)
|
|
405
414
|
|
|
406
415
|
async def get_all_stats(self) -> ClientStats:
|
|
407
416
|
"""Get stats for all indexes.
|
|
@@ -490,17 +499,9 @@ class AsyncClient(BaseClient):
|
|
|
490
499
|
>>> )
|
|
491
500
|
>>> keys = await client.create_key(key_info)
|
|
492
501
|
"""
|
|
493
|
-
|
|
494
|
-
|
|
495
|
-
|
|
496
|
-
) # type: ignore[attr-defined]
|
|
497
|
-
else: # pragma: no cover
|
|
498
|
-
warn(
|
|
499
|
-
"The use of Pydantic less than version 2 is depreciated and will be removed in a future release",
|
|
500
|
-
DeprecationWarning,
|
|
501
|
-
stacklevel=2,
|
|
502
|
-
)
|
|
503
|
-
response = await self._http_requests.post("keys", json.loads(key.json(by_alias=True))) # type: ignore[attr-defined]
|
|
502
|
+
response = await self._http_requests.post(
|
|
503
|
+
"keys", self.json_handler.loads(key.model_dump_json(by_alias=True))
|
|
504
|
+
) # type: ignore[attr-defined]
|
|
504
505
|
|
|
505
506
|
return Key(**response.json())
|
|
506
507
|
|
|
@@ -612,7 +613,7 @@ class AsyncClient(BaseClient):
|
|
|
612
613
|
>>> )
|
|
613
614
|
>>> keys = await client.update_key(key_info)
|
|
614
615
|
"""
|
|
615
|
-
payload = _build_update_key_payload(key)
|
|
616
|
+
payload = _build_update_key_payload(key, self.json_handler)
|
|
616
617
|
response = await self._http_requests.patch(f"keys/{key.key}", payload)
|
|
617
618
|
|
|
618
619
|
return Key(**response.json())
|
|
@@ -645,21 +646,10 @@ class AsyncClient(BaseClient):
|
|
|
645
646
|
>>> search_results = await client.search(queries)
|
|
646
647
|
"""
|
|
647
648
|
url = "multi-search"
|
|
648
|
-
|
|
649
|
-
|
|
650
|
-
|
|
651
|
-
|
|
652
|
-
)
|
|
653
|
-
else: # pragma: no cover
|
|
654
|
-
warn(
|
|
655
|
-
"The use of Pydantic less than version 2 is depreciated and will be removed in a future release",
|
|
656
|
-
DeprecationWarning,
|
|
657
|
-
stacklevel=2,
|
|
658
|
-
)
|
|
659
|
-
response = await self._http_requests.post(
|
|
660
|
-
url,
|
|
661
|
-
body={"queries": [x.dict(by_alias=True) for x in queries]}, # type: ignore[attr-defined]
|
|
662
|
-
)
|
|
649
|
+
response = await self._http_requests.post(
|
|
650
|
+
url,
|
|
651
|
+
body={"queries": [x.model_dump(by_alias=True) for x in queries]}, # type: ignore[attr-defined]
|
|
652
|
+
)
|
|
663
653
|
|
|
664
654
|
return [SearchResultsWithUID(**x) for x in response.json()["results"]]
|
|
665
655
|
|
|
@@ -1035,6 +1025,7 @@ class Client(BaseClient):
|
|
|
1035
1025
|
timeout: int | None = None,
|
|
1036
1026
|
verify: str | bool | SSLContext = True,
|
|
1037
1027
|
custom_headers: dict[str, str] | None = None,
|
|
1028
|
+
json_handler: BuiltinHandler | OrjsonHandler | UjsonHandler | None = None,
|
|
1038
1029
|
) -> None:
|
|
1039
1030
|
"""Class initializer.
|
|
1040
1031
|
|
|
@@ -1049,13 +1040,17 @@ class Client(BaseClient):
|
|
|
1049
1040
|
a path to an SSL certificate file, or `False` (disable verification)
|
|
1050
1041
|
custom_headers: Custom headers to add when sending data to Meilisearch. Defaults to
|
|
1051
1042
|
None.
|
|
1043
|
+
json_handler: The module to use for json operations. The options are BuiltinHandler
|
|
1044
|
+
(uses the json module from the standard library), OrjsonHandler (uses orjson), or
|
|
1045
|
+
UjsonHandler (uses ujson). Note that in order use orjson or ujson the corresponding
|
|
1046
|
+
extra needs to be included. Default: BuiltinHandler.
|
|
1052
1047
|
"""
|
|
1053
|
-
super().__init__(api_key, custom_headers)
|
|
1048
|
+
super().__init__(api_key, custom_headers, json_handler)
|
|
1054
1049
|
|
|
1055
1050
|
self.http_client = HttpxClient(
|
|
1056
1051
|
base_url=url, timeout=timeout, headers=self._headers, verify=verify
|
|
1057
1052
|
)
|
|
1058
|
-
self._http_requests = HttpRequests(self.http_client)
|
|
1053
|
+
self._http_requests = HttpRequests(self.http_client, json_handler=self.json_handler)
|
|
1059
1054
|
|
|
1060
1055
|
def create_dump(self) -> TaskInfo:
|
|
1061
1056
|
"""Trigger the creation of a Meilisearch dump.
|
|
@@ -1131,6 +1126,7 @@ class Client(BaseClient):
|
|
|
1131
1126
|
wait=wait,
|
|
1132
1127
|
timeout_in_ms=timeout_in_ms,
|
|
1133
1128
|
plugins=plugins,
|
|
1129
|
+
json_handler=self.json_handler,
|
|
1134
1130
|
)
|
|
1135
1131
|
|
|
1136
1132
|
def create_snapshot(self) -> TaskInfo:
|
|
@@ -1222,6 +1218,7 @@ class Client(BaseClient):
|
|
|
1222
1218
|
primary_key=x["primaryKey"],
|
|
1223
1219
|
created_at=x["createdAt"],
|
|
1224
1220
|
updated_at=x["updatedAt"],
|
|
1221
|
+
json_handler=self.json_handler,
|
|
1225
1222
|
)
|
|
1226
1223
|
for x in response.json()["results"]
|
|
1227
1224
|
]
|
|
@@ -1248,7 +1245,7 @@ class Client(BaseClient):
|
|
|
1248
1245
|
>>> client = Client("http://localhost.com", "masterKey")
|
|
1249
1246
|
>>> index = client.get_index()
|
|
1250
1247
|
"""
|
|
1251
|
-
return Index(self.http_client, uid).fetch_info()
|
|
1248
|
+
return Index(self.http_client, uid, json_handler=self.json_handler).fetch_info()
|
|
1252
1249
|
|
|
1253
1250
|
def index(self, uid: str, *, plugins: IndexPlugins | None = None) -> Index:
|
|
1254
1251
|
"""Create a local reference to an index identified by UID, without making an HTTP call.
|
|
@@ -1273,7 +1270,7 @@ class Client(BaseClient):
|
|
|
1273
1270
|
>>> client = Client("http://localhost.com", "masterKey")
|
|
1274
1271
|
>>> index = client.index("movies")
|
|
1275
1272
|
"""
|
|
1276
|
-
return Index(self.http_client, uid=uid, plugins=plugins)
|
|
1273
|
+
return Index(self.http_client, uid=uid, plugins=plugins, json_handler=self.json_handler)
|
|
1277
1274
|
|
|
1278
1275
|
def get_all_stats(self) -> ClientStats:
|
|
1279
1276
|
"""Get stats for all indexes.
|
|
@@ -1362,17 +1359,9 @@ class Client(BaseClient):
|
|
|
1362
1359
|
>>> )
|
|
1363
1360
|
>>> keys = client.create_key(key_info)
|
|
1364
1361
|
"""
|
|
1365
|
-
|
|
1366
|
-
|
|
1367
|
-
|
|
1368
|
-
) # type: ignore[attr-defined]
|
|
1369
|
-
else: # pragma: no cover
|
|
1370
|
-
warn(
|
|
1371
|
-
"The use of Pydantic less than version 2 is depreciated and will be removed in a future release",
|
|
1372
|
-
DeprecationWarning,
|
|
1373
|
-
stacklevel=2,
|
|
1374
|
-
)
|
|
1375
|
-
response = self._http_requests.post("keys", json.loads(key.json(by_alias=True))) # type: ignore[attr-defined]
|
|
1362
|
+
response = self._http_requests.post(
|
|
1363
|
+
"keys", self.json_handler.loads(key.model_dump_json(by_alias=True))
|
|
1364
|
+
) # type: ignore[attr-defined]
|
|
1376
1365
|
|
|
1377
1366
|
return Key(**response.json())
|
|
1378
1367
|
|
|
@@ -1484,7 +1473,7 @@ class Client(BaseClient):
|
|
|
1484
1473
|
>>> )
|
|
1485
1474
|
>>> keys = client.update_key(key_info)
|
|
1486
1475
|
"""
|
|
1487
|
-
payload = _build_update_key_payload(key)
|
|
1476
|
+
payload = _build_update_key_payload(key, self.json_handler)
|
|
1488
1477
|
response = self._http_requests.patch(f"keys/{key.key}", payload)
|
|
1489
1478
|
|
|
1490
1479
|
return Key(**response.json())
|
|
@@ -1517,21 +1506,10 @@ class Client(BaseClient):
|
|
|
1517
1506
|
>>> search_results = client.search(queries)
|
|
1518
1507
|
"""
|
|
1519
1508
|
url = "multi-search"
|
|
1520
|
-
|
|
1521
|
-
|
|
1522
|
-
|
|
1523
|
-
|
|
1524
|
-
)
|
|
1525
|
-
else: # pragma: no cover
|
|
1526
|
-
warn(
|
|
1527
|
-
"The use of Pydantic less than version 2 is depreciated and will be removed in a future release",
|
|
1528
|
-
DeprecationWarning,
|
|
1529
|
-
stacklevel=2,
|
|
1530
|
-
)
|
|
1531
|
-
response = self._http_requests.post(
|
|
1532
|
-
url,
|
|
1533
|
-
body={"queries": [x.dict(by_alias=True) for x in queries]}, # type: ignore[attr-defined]
|
|
1534
|
-
)
|
|
1509
|
+
response = self._http_requests.post(
|
|
1510
|
+
url,
|
|
1511
|
+
body={"queries": [x.model_dump(by_alias=True) for x in queries]}, # type: ignore[attr-defined]
|
|
1512
|
+
)
|
|
1535
1513
|
|
|
1536
1514
|
return [SearchResultsWithUID(**x) for x in response.json()["results"]]
|
|
1537
1515
|
|
|
@@ -1907,23 +1885,13 @@ def _build_offset_limit_url(base: str, offset: int | None, limit: int | None) ->
|
|
|
1907
1885
|
return base
|
|
1908
1886
|
|
|
1909
1887
|
|
|
1910
|
-
def _build_update_key_payload(
|
|
1911
|
-
|
|
1888
|
+
def _build_update_key_payload(
|
|
1889
|
+
key: KeyUpdate, json_handler: BuiltinHandler | OrjsonHandler | UjsonHandler
|
|
1890
|
+
) -> JsonDict:
|
|
1891
|
+
# The json_handler.loads(key.json()) is because Pydantic can't serialize a date in a Python dict,
|
|
1912
1892
|
# but can when converting to a json string.
|
|
1913
|
-
|
|
1914
|
-
|
|
1915
|
-
|
|
1916
|
-
|
|
1917
|
-
|
|
1918
|
-
}
|
|
1919
|
-
else: # pragma: no cover
|
|
1920
|
-
warn(
|
|
1921
|
-
"The use of Pydantic less than version 2 is depreciated and will be removed in a future release",
|
|
1922
|
-
DeprecationWarning,
|
|
1923
|
-
stacklevel=2,
|
|
1924
|
-
)
|
|
1925
|
-
return { # type: ignore[attr-defined]
|
|
1926
|
-
k: v
|
|
1927
|
-
for k, v in json.loads(key.json(by_alias=True)).items()
|
|
1928
|
-
if v is not None and k != "key"
|
|
1929
|
-
}
|
|
1893
|
+
return { # type: ignore[attr-defined]
|
|
1894
|
+
k: v
|
|
1895
|
+
for k, v in json_handler.loads(key.model_dump_json(by_alias=True)).items()
|
|
1896
|
+
if v is not None and k != "key"
|
|
1897
|
+
}
|
|
@@ -1,7 +1,6 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
3
|
import gzip
|
|
4
|
-
import json
|
|
5
4
|
from functools import lru_cache
|
|
6
5
|
from typing import Any, Callable
|
|
7
6
|
|
|
@@ -21,11 +20,15 @@ from meilisearch_python_sdk.errors import (
|
|
|
21
20
|
MeilisearchCommunicationError,
|
|
22
21
|
MeilisearchError,
|
|
23
22
|
)
|
|
23
|
+
from meilisearch_python_sdk.json_handler import BuiltinHandler, OrjsonHandler, UjsonHandler
|
|
24
24
|
|
|
25
25
|
|
|
26
26
|
class AsyncHttpRequests:
|
|
27
|
-
def __init__(
|
|
27
|
+
def __init__(
|
|
28
|
+
self, http_client: AsyncClient, json_handler: BuiltinHandler | OrjsonHandler | UjsonHandler
|
|
29
|
+
) -> None:
|
|
28
30
|
self.http_client = http_client
|
|
31
|
+
self.json_handler = json_handler
|
|
29
32
|
|
|
30
33
|
async def _send_request(
|
|
31
34
|
self,
|
|
@@ -34,8 +37,6 @@ class AsyncHttpRequests:
|
|
|
34
37
|
body: Any | None = None,
|
|
35
38
|
content_type: str = "application/json",
|
|
36
39
|
compress: bool = False,
|
|
37
|
-
*,
|
|
38
|
-
serializer: type[json.JSONEncoder] | None = None,
|
|
39
40
|
) -> Response:
|
|
40
41
|
headers = build_headers(content_type, compress)
|
|
41
42
|
|
|
@@ -44,12 +45,12 @@ class AsyncHttpRequests:
|
|
|
44
45
|
response = await http_method(path)
|
|
45
46
|
elif content_type == "application/json" and not compress:
|
|
46
47
|
response = await http_method(
|
|
47
|
-
path, content=
|
|
48
|
+
path, content=self.json_handler.dumps(body), headers=headers
|
|
48
49
|
)
|
|
49
50
|
else:
|
|
50
51
|
if body and compress:
|
|
51
52
|
if content_type == "application/json":
|
|
52
|
-
body = gzip.compress(
|
|
53
|
+
body = gzip.compress(self.json_handler.dumps(body).encode("utf-8"))
|
|
53
54
|
else:
|
|
54
55
|
body = gzip.compress((body).encode("utf-8"))
|
|
55
56
|
response = await http_method(path, content=body, headers=headers)
|
|
@@ -87,12 +88,8 @@ class AsyncHttpRequests:
|
|
|
87
88
|
body: Any | None = None,
|
|
88
89
|
content_type: str = "application/json",
|
|
89
90
|
compress: bool = False,
|
|
90
|
-
*,
|
|
91
|
-
serializer: type[json.JSONEncoder] | None = None,
|
|
92
91
|
) -> Response:
|
|
93
|
-
return await self._send_request(
|
|
94
|
-
self.http_client.post, path, body, content_type, compress, serializer=serializer
|
|
95
|
-
)
|
|
92
|
+
return await self._send_request(self.http_client.post, path, body, content_type, compress)
|
|
96
93
|
|
|
97
94
|
async def put(
|
|
98
95
|
self,
|
|
@@ -100,20 +97,19 @@ class AsyncHttpRequests:
|
|
|
100
97
|
body: Any | None = None,
|
|
101
98
|
content_type: str = "application/json",
|
|
102
99
|
compress: bool = False,
|
|
103
|
-
*,
|
|
104
|
-
serializer: type[json.JSONEncoder] | None = None,
|
|
105
100
|
) -> Response:
|
|
106
|
-
return await self._send_request(
|
|
107
|
-
self.http_client.put, path, body, content_type, compress, serializer=serializer
|
|
108
|
-
)
|
|
101
|
+
return await self._send_request(self.http_client.put, path, body, content_type, compress)
|
|
109
102
|
|
|
110
103
|
async def delete(self, path: str, body: dict | None = None) -> Response:
|
|
111
104
|
return await self._send_request(self.http_client.delete, path, body)
|
|
112
105
|
|
|
113
106
|
|
|
114
107
|
class HttpRequests:
|
|
115
|
-
def __init__(
|
|
108
|
+
def __init__(
|
|
109
|
+
self, http_client: Client, json_handler: BuiltinHandler | OrjsonHandler | UjsonHandler
|
|
110
|
+
) -> None:
|
|
116
111
|
self.http_client = http_client
|
|
112
|
+
self.json_handler = json_handler
|
|
117
113
|
|
|
118
114
|
def _send_request(
|
|
119
115
|
self,
|
|
@@ -122,21 +118,17 @@ class HttpRequests:
|
|
|
122
118
|
body: Any | None = None,
|
|
123
119
|
content_type: str = "applicaton/json",
|
|
124
120
|
compress: bool = False,
|
|
125
|
-
*,
|
|
126
|
-
serializer: type[json.JSONEncoder] | None = None,
|
|
127
121
|
) -> Response:
|
|
128
122
|
headers = build_headers(content_type, compress)
|
|
129
123
|
try:
|
|
130
124
|
if not body:
|
|
131
125
|
response = http_method(path)
|
|
132
126
|
elif content_type == "application/json" and not compress:
|
|
133
|
-
response = http_method(
|
|
134
|
-
path, content=json.dumps(body, cls=serializer), headers=headers
|
|
135
|
-
)
|
|
127
|
+
response = http_method(path, content=self.json_handler.dumps(body), headers=headers)
|
|
136
128
|
else:
|
|
137
129
|
if body and compress:
|
|
138
130
|
if content_type == "application/json":
|
|
139
|
-
body = gzip.compress(
|
|
131
|
+
body = gzip.compress(self.json_handler.dumps(body).encode("utf-8"))
|
|
140
132
|
else:
|
|
141
133
|
body = gzip.compress((body).encode("utf-8"))
|
|
142
134
|
response = http_method(path, content=body, headers=headers)
|
|
@@ -174,12 +166,8 @@ class HttpRequests:
|
|
|
174
166
|
body: Any | None = None,
|
|
175
167
|
content_type: str = "application/json",
|
|
176
168
|
compress: bool = False,
|
|
177
|
-
*,
|
|
178
|
-
serializer: type[json.JSONEncoder] | None = None,
|
|
179
169
|
) -> Response:
|
|
180
|
-
return self._send_request(
|
|
181
|
-
self.http_client.post, path, body, content_type, compress, serializer=serializer
|
|
182
|
-
)
|
|
170
|
+
return self._send_request(self.http_client.post, path, body, content_type, compress)
|
|
183
171
|
|
|
184
172
|
def put(
|
|
185
173
|
self,
|
|
@@ -187,12 +175,8 @@ class HttpRequests:
|
|
|
187
175
|
body: Any | None = None,
|
|
188
176
|
content_type: str = "application/json",
|
|
189
177
|
compress: bool = False,
|
|
190
|
-
*,
|
|
191
|
-
serializer: type[json.JSONEncoder] | None = None,
|
|
192
178
|
) -> Response:
|
|
193
|
-
return self._send_request(
|
|
194
|
-
self.http_client.put, path, body, content_type, compress, serializer=serializer
|
|
195
|
-
)
|
|
179
|
+
return self._send_request(self.http_client.put, path, body, content_type, compress)
|
|
196
180
|
|
|
197
181
|
def delete(self, path: str, body: dict | None = None) -> Response:
|
|
198
182
|
return self._send_request(self.http_client.delete, path, body)
|
{meilisearch_python_sdk-2.12.1 → meilisearch_python_sdk-3.0.1}/meilisearch_python_sdk/_task.py
RENAMED
|
@@ -11,6 +11,7 @@ from httpx import Client as HttpxClient
|
|
|
11
11
|
|
|
12
12
|
from meilisearch_python_sdk._http_requests import AsyncHttpRequests, HttpRequests
|
|
13
13
|
from meilisearch_python_sdk.errors import MeilisearchTaskFailedError, MeilisearchTimeoutError
|
|
14
|
+
from meilisearch_python_sdk.json_handler import BuiltinHandler, OrjsonHandler, UjsonHandler
|
|
14
15
|
from meilisearch_python_sdk.models.task import TaskInfo, TaskResult, TaskStatus
|
|
15
16
|
|
|
16
17
|
if TYPE_CHECKING:
|
|
@@ -151,8 +152,9 @@ async def async_wait_for_task(
|
|
|
151
152
|
raise_for_status: bool = False,
|
|
152
153
|
) -> TaskResult:
|
|
153
154
|
client_ = _get_async_client(client)
|
|
155
|
+
handler = _get_json_handler(client)
|
|
154
156
|
url = f"tasks/{task_id}"
|
|
155
|
-
http_requests = AsyncHttpRequests(client_)
|
|
157
|
+
http_requests = AsyncHttpRequests(client_, handler)
|
|
156
158
|
start_time = datetime.now()
|
|
157
159
|
elapsed_time = 0.0
|
|
158
160
|
|
|
@@ -281,8 +283,9 @@ def wait_for_task(
|
|
|
281
283
|
raise_for_status: bool = False,
|
|
282
284
|
) -> TaskResult:
|
|
283
285
|
client_ = _get_client(client)
|
|
286
|
+
handler = _get_json_handler(client)
|
|
284
287
|
url = f"tasks/{task_id}"
|
|
285
|
-
http_requests = HttpRequests(client_)
|
|
288
|
+
http_requests = HttpRequests(client_, json_handler=handler)
|
|
286
289
|
start_time = datetime.now()
|
|
287
290
|
elapsed_time = 0.0
|
|
288
291
|
|
|
@@ -311,7 +314,9 @@ def wait_for_task(
|
|
|
311
314
|
time.sleep(interval_in_ms / 1000)
|
|
312
315
|
|
|
313
316
|
|
|
314
|
-
def _get_async_client(
|
|
317
|
+
def _get_async_client(
|
|
318
|
+
client: AsyncClient | HttpxAsyncClient,
|
|
319
|
+
) -> HttpxAsyncClient:
|
|
315
320
|
if isinstance(client, HttpxAsyncClient):
|
|
316
321
|
return client
|
|
317
322
|
|
|
@@ -327,6 +332,15 @@ def _get_client(
|
|
|
327
332
|
return client.http_client
|
|
328
333
|
|
|
329
334
|
|
|
335
|
+
def _get_json_handler(
|
|
336
|
+
client: AsyncClient | Client | HttpxAsyncClient | HttpxClient,
|
|
337
|
+
) -> BuiltinHandler | OrjsonHandler | UjsonHandler:
|
|
338
|
+
if isinstance(client, (HttpxAsyncClient, HttpxClient)):
|
|
339
|
+
return BuiltinHandler()
|
|
340
|
+
|
|
341
|
+
return client.json_handler
|
|
342
|
+
|
|
343
|
+
|
|
330
344
|
def _process_params(
|
|
331
345
|
uids: list[str] | None = None,
|
|
332
346
|
index_uids: list[str] | None = None,
|
{meilisearch_python_sdk-2.12.1 → meilisearch_python_sdk-3.0.1}/meilisearch_python_sdk/_utils.py
RENAMED
|
@@ -4,23 +4,6 @@ import sys
|
|
|
4
4
|
from datetime import datetime
|
|
5
5
|
from functools import lru_cache
|
|
6
6
|
|
|
7
|
-
import pydantic
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
@lru_cache(maxsize=1)
|
|
11
|
-
def is_pydantic_2() -> bool:
|
|
12
|
-
try:
|
|
13
|
-
# __version__ was added with Pydantic 2 so we know if this errors the version is < 2.
|
|
14
|
-
# Still check the version as a fail safe incase __version__ gets added to verion 1.
|
|
15
|
-
if int(pydantic.__version__[:1]) >= 2: # type: ignore[attr-defined]
|
|
16
|
-
return True
|
|
17
|
-
else: # pragma: no cover
|
|
18
|
-
# Raise an AttributeError to match the AttributeError on __version__ because in either
|
|
19
|
-
# case we need to get to the same place.
|
|
20
|
-
raise AttributeError
|
|
21
|
-
except AttributeError: # pragma: no cover
|
|
22
|
-
return False
|
|
23
|
-
|
|
24
7
|
|
|
25
8
|
def iso_to_date_time(iso_date: datetime | str | None) -> datetime | None:
|
|
26
9
|
"""Handle conversion of iso string to datetime.
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
VERSION = "3.0.1"
|