meilisearch-python-sdk 2.8.0__tar.gz → 2.9.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of meilisearch-python-sdk might be problematic. Click here for more details.
- {meilisearch_python_sdk-2.8.0 → meilisearch_python_sdk-2.9.0}/PKG-INFO +1 -4
- {meilisearch_python_sdk-2.8.0 → meilisearch_python_sdk-2.9.0}/README.md +0 -3
- {meilisearch_python_sdk-2.8.0 → meilisearch_python_sdk-2.9.0}/meilisearch_python_sdk/_client.py +43 -18
- {meilisearch_python_sdk-2.8.0 → meilisearch_python_sdk-2.9.0}/meilisearch_python_sdk/_http_requests.py +2 -2
- meilisearch_python_sdk-2.9.0/meilisearch_python_sdk/_version.py +1 -0
- {meilisearch_python_sdk-2.8.0 → meilisearch_python_sdk-2.9.0}/meilisearch_python_sdk/decorators.py +1 -1
- {meilisearch_python_sdk-2.8.0 → meilisearch_python_sdk-2.9.0}/meilisearch_python_sdk/index.py +238 -23
- {meilisearch_python_sdk-2.8.0 → meilisearch_python_sdk-2.9.0}/meilisearch_python_sdk/models/client.py +5 -0
- {meilisearch_python_sdk-2.8.0 → meilisearch_python_sdk-2.9.0}/meilisearch_python_sdk/models/index.py +1 -0
- {meilisearch_python_sdk-2.8.0 → meilisearch_python_sdk-2.9.0}/meilisearch_python_sdk/models/search.py +1 -1
- {meilisearch_python_sdk-2.8.0 → meilisearch_python_sdk-2.9.0}/meilisearch_python_sdk/models/settings.py +49 -2
- {meilisearch_python_sdk-2.8.0 → meilisearch_python_sdk-2.9.0}/meilisearch_python_sdk/models/task.py +2 -0
- {meilisearch_python_sdk-2.8.0 → meilisearch_python_sdk-2.9.0}/meilisearch_python_sdk/plugins.py +5 -4
- {meilisearch_python_sdk-2.8.0 → meilisearch_python_sdk-2.9.0}/pyproject.toml +2 -2
- meilisearch_python_sdk-2.8.0/meilisearch_python_sdk/_version.py +0 -1
- {meilisearch_python_sdk-2.8.0 → meilisearch_python_sdk-2.9.0}/LICENSE +0 -0
- {meilisearch_python_sdk-2.8.0 → meilisearch_python_sdk-2.9.0}/meilisearch_python_sdk/__init__.py +0 -0
- {meilisearch_python_sdk-2.8.0 → meilisearch_python_sdk-2.9.0}/meilisearch_python_sdk/_task.py +0 -0
- {meilisearch_python_sdk-2.8.0 → meilisearch_python_sdk-2.9.0}/meilisearch_python_sdk/_utils.py +0 -0
- {meilisearch_python_sdk-2.8.0 → meilisearch_python_sdk-2.9.0}/meilisearch_python_sdk/errors.py +0 -0
- {meilisearch_python_sdk-2.8.0 → meilisearch_python_sdk-2.9.0}/meilisearch_python_sdk/models/__init__.py +0 -0
- {meilisearch_python_sdk-2.8.0 → meilisearch_python_sdk-2.9.0}/meilisearch_python_sdk/models/documents.py +0 -0
- {meilisearch_python_sdk-2.8.0 → meilisearch_python_sdk-2.9.0}/meilisearch_python_sdk/models/health.py +0 -0
- {meilisearch_python_sdk-2.8.0 → meilisearch_python_sdk-2.9.0}/meilisearch_python_sdk/models/version.py +0 -0
- {meilisearch_python_sdk-2.8.0 → meilisearch_python_sdk-2.9.0}/meilisearch_python_sdk/py.typed +0 -0
- {meilisearch_python_sdk-2.8.0 → meilisearch_python_sdk-2.9.0}/meilisearch_python_sdk/types.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: meilisearch-python-sdk
|
|
3
|
-
Version: 2.
|
|
3
|
+
Version: 2.9.0
|
|
4
4
|
Summary: A Python client providing both async and sync support for the Meilisearch API
|
|
5
5
|
Home-page: https://github.com/sanders41/meilisearch-python-sdk
|
|
6
6
|
License: MIT
|
|
@@ -36,9 +36,6 @@ Description-Content-Type: text/markdown
|
|
|
36
36
|
[](https://badge.fury.io/py/meilisearch-python-sdk)
|
|
37
37
|
[](https://github.com/sanders41/meilisearch-python-sdk)
|
|
38
38
|
|
|
39
|
-
NOTE: This project was previously named `meilisearch-python-async`. Development on
|
|
40
|
-
that project continues here under the new name.
|
|
41
|
-
|
|
42
39
|
Meilisearch Python SDK provides both an async and sync client for the
|
|
43
40
|
[Meilisearch](https://github.com/meilisearch/meilisearch) API.
|
|
44
41
|
|
|
@@ -6,9 +6,6 @@
|
|
|
6
6
|
[](https://badge.fury.io/py/meilisearch-python-sdk)
|
|
7
7
|
[](https://github.com/sanders41/meilisearch-python-sdk)
|
|
8
8
|
|
|
9
|
-
NOTE: This project was previously named `meilisearch-python-async`. Development on
|
|
10
|
-
that project continues here under the new name.
|
|
11
|
-
|
|
12
9
|
Meilisearch Python SDK provides both an async and sync client for the
|
|
13
10
|
[Meilisearch](https://github.com/meilisearch/meilisearch) API.
|
|
14
11
|
|
{meilisearch_python_sdk-2.8.0 → meilisearch_python_sdk-2.9.0}/meilisearch_python_sdk/_client.py
RENAMED
|
@@ -194,6 +194,7 @@ class AsyncClient(BaseClient):
|
|
|
194
194
|
*,
|
|
195
195
|
settings: MeilisearchSettings | None = None,
|
|
196
196
|
wait: bool = True,
|
|
197
|
+
timeout_in_ms: int | None = None,
|
|
197
198
|
plugins: AsyncIndexPlugins | None = None,
|
|
198
199
|
) -> AsyncIndex:
|
|
199
200
|
"""Creates a new index.
|
|
@@ -210,6 +211,9 @@ class AsyncClient(BaseClient):
|
|
|
210
211
|
wait: If set to True and settings are being updated, the index will be returned after
|
|
211
212
|
the settings update has completed. If False it will not wait for settings to complete.
|
|
212
213
|
Default: True
|
|
214
|
+
timeout_in_ms: Amount of time in milliseconds to wait before raising a
|
|
215
|
+
MeilisearchTimeoutError. `None` can also be passed to wait indefinitely. Be aware that
|
|
216
|
+
if the `None` option is used the wait time could be very long. Defaults to None.
|
|
213
217
|
plugins: Optional plugins can be provided to extend functionality.
|
|
214
218
|
|
|
215
219
|
Returns:
|
|
@@ -228,7 +232,13 @@ class AsyncClient(BaseClient):
|
|
|
228
232
|
>>> index = await client.create_index("movies")
|
|
229
233
|
"""
|
|
230
234
|
return await AsyncIndex.create(
|
|
231
|
-
self.http_client,
|
|
235
|
+
self.http_client,
|
|
236
|
+
uid,
|
|
237
|
+
primary_key,
|
|
238
|
+
settings=settings,
|
|
239
|
+
wait=wait,
|
|
240
|
+
timeout_in_ms=timeout_in_ms,
|
|
241
|
+
plugins=plugins,
|
|
232
242
|
)
|
|
233
243
|
|
|
234
244
|
async def create_snapshot(self) -> TaskInfo:
|
|
@@ -470,6 +480,7 @@ class AsyncClient(BaseClient):
|
|
|
470
480
|
warn(
|
|
471
481
|
"The use of Pydantic less than version 2 is depreciated and will be removed in a future release",
|
|
472
482
|
DeprecationWarning,
|
|
483
|
+
stacklevel=2,
|
|
473
484
|
)
|
|
474
485
|
response = await self._http_requests.post("keys", json.loads(key.json(by_alias=True))) # type: ignore[attr-defined]
|
|
475
486
|
|
|
@@ -625,6 +636,7 @@ class AsyncClient(BaseClient):
|
|
|
625
636
|
warn(
|
|
626
637
|
"The use of Pydantic less than version 2 is depreciated and will be removed in a future release",
|
|
627
638
|
DeprecationWarning,
|
|
639
|
+
stacklevel=2,
|
|
628
640
|
)
|
|
629
641
|
response = await self._http_requests.post(
|
|
630
642
|
url,
|
|
@@ -870,14 +882,14 @@ class AsyncClient(BaseClient):
|
|
|
870
882
|
|
|
871
883
|
Args:
|
|
872
884
|
|
|
873
|
-
uids: A list of task UIDs to
|
|
874
|
-
index_uids: A list of index UIDs for which to
|
|
875
|
-
statuses: A list of statuses to
|
|
876
|
-
types: A list of types to
|
|
877
|
-
before_enqueued_at:
|
|
878
|
-
after_enqueued_at:
|
|
879
|
-
before_started_at:
|
|
880
|
-
after_finished_at:
|
|
885
|
+
uids: A list of task UIDs to delete.
|
|
886
|
+
index_uids: A list of index UIDs for which to delete tasks.
|
|
887
|
+
statuses: A list of statuses to delete.
|
|
888
|
+
types: A list of types to delete.
|
|
889
|
+
before_enqueued_at: Delete tasks that were enqueued before the specified date time.
|
|
890
|
+
after_enqueued_at: Delete tasks that were enqueued after the specified date time.
|
|
891
|
+
before_started_at: Delete tasks that were started before the specified date time.
|
|
892
|
+
after_finished_at: Delete tasks that were finished after the specified date time.
|
|
881
893
|
|
|
882
894
|
Returns:
|
|
883
895
|
|
|
@@ -1053,6 +1065,7 @@ class Client(BaseClient):
|
|
|
1053
1065
|
*,
|
|
1054
1066
|
settings: MeilisearchSettings | None = None,
|
|
1055
1067
|
wait: bool = True,
|
|
1068
|
+
timeout_in_ms: int | None = None,
|
|
1056
1069
|
plugins: IndexPlugins | None = None,
|
|
1057
1070
|
) -> Index:
|
|
1058
1071
|
"""Creates a new index.
|
|
@@ -1069,6 +1082,9 @@ class Client(BaseClient):
|
|
|
1069
1082
|
wait: If set to True and settings are being updated, the index will be returned after
|
|
1070
1083
|
the settings update has completed. If False it will not wait for settings to complete.
|
|
1071
1084
|
Default: True
|
|
1085
|
+
timeout_in_ms: Amount of time in milliseconds to wait before raising a
|
|
1086
|
+
MeilisearchTimeoutError. `None` can also be passed to wait indefinitely. Be aware that
|
|
1087
|
+
if the `None` option is used the wait time could be very long. Defaults to None.
|
|
1072
1088
|
plugins: Optional plugins can be provided to extend functionality.
|
|
1073
1089
|
|
|
1074
1090
|
Returns:
|
|
@@ -1087,7 +1103,13 @@ class Client(BaseClient):
|
|
|
1087
1103
|
>>> index = client.create_index("movies")
|
|
1088
1104
|
"""
|
|
1089
1105
|
return Index.create(
|
|
1090
|
-
self.http_client,
|
|
1106
|
+
self.http_client,
|
|
1107
|
+
uid,
|
|
1108
|
+
primary_key,
|
|
1109
|
+
settings=settings,
|
|
1110
|
+
wait=wait,
|
|
1111
|
+
timeout_in_ms=timeout_in_ms,
|
|
1112
|
+
plugins=plugins,
|
|
1091
1113
|
)
|
|
1092
1114
|
|
|
1093
1115
|
def create_snapshot(self) -> TaskInfo:
|
|
@@ -1327,6 +1349,7 @@ class Client(BaseClient):
|
|
|
1327
1349
|
warn(
|
|
1328
1350
|
"The use of Pydantic less than version 2 is depreciated and will be removed in a future release",
|
|
1329
1351
|
DeprecationWarning,
|
|
1352
|
+
stacklevel=2,
|
|
1330
1353
|
)
|
|
1331
1354
|
response = self._http_requests.post("keys", json.loads(key.json(by_alias=True))) # type: ignore[attr-defined]
|
|
1332
1355
|
|
|
@@ -1482,6 +1505,7 @@ class Client(BaseClient):
|
|
|
1482
1505
|
warn(
|
|
1483
1506
|
"The use of Pydantic less than version 2 is depreciated and will be removed in a future release",
|
|
1484
1507
|
DeprecationWarning,
|
|
1508
|
+
stacklevel=2,
|
|
1485
1509
|
)
|
|
1486
1510
|
response = self._http_requests.post(
|
|
1487
1511
|
url,
|
|
@@ -1701,14 +1725,14 @@ class Client(BaseClient):
|
|
|
1701
1725
|
|
|
1702
1726
|
Args:
|
|
1703
1727
|
|
|
1704
|
-
uids: A list of task UIDs to
|
|
1705
|
-
index_uids: A list of index UIDs for which to
|
|
1706
|
-
statuses: A list of statuses to
|
|
1707
|
-
types: A list of types to
|
|
1708
|
-
before_enqueued_at:
|
|
1709
|
-
after_enqueued_at:
|
|
1710
|
-
before_started_at:
|
|
1711
|
-
after_finished_at:
|
|
1728
|
+
uids: A list of task UIDs to delete.
|
|
1729
|
+
index_uids: A list of index UIDs for which to delete tasks.
|
|
1730
|
+
statuses: A list of statuses to delete.
|
|
1731
|
+
types: A list of types to delete.
|
|
1732
|
+
before_enqueued_at: Delete tasks that were enqueued before the specified date time.
|
|
1733
|
+
after_enqueued_at: Delete tasks that were enqueued after the specified date time.
|
|
1734
|
+
before_started_at: Delete tasks that were started before the specified date time.
|
|
1735
|
+
after_finished_at: Delete tasks that were finished after the specified date time.
|
|
1712
1736
|
|
|
1713
1737
|
Returns:
|
|
1714
1738
|
|
|
@@ -1875,6 +1899,7 @@ def _build_update_key_payload(key: KeyUpdate) -> JsonDict:
|
|
|
1875
1899
|
warn(
|
|
1876
1900
|
"The use of Pydantic less than version 2 is depreciated and will be removed in a future release",
|
|
1877
1901
|
DeprecationWarning,
|
|
1902
|
+
stacklevel=2,
|
|
1878
1903
|
)
|
|
1879
1904
|
return { # type: ignore[attr-defined]
|
|
1880
1905
|
k: v
|
|
@@ -60,7 +60,7 @@ class AsyncHttpRequests:
|
|
|
60
60
|
raise MeilisearchApiError(str(err), response) from err
|
|
61
61
|
else:
|
|
62
62
|
# Fail safe just in case error happens before response is created
|
|
63
|
-
raise MeilisearchError(str(err)) # pragma: no cover
|
|
63
|
+
raise MeilisearchError(str(err)) from err # pragma: no cover
|
|
64
64
|
|
|
65
65
|
async def get(self, path: str) -> Response:
|
|
66
66
|
return await self._send_request(self.http_client.get, path)
|
|
@@ -132,7 +132,7 @@ class HttpRequests:
|
|
|
132
132
|
raise MeilisearchApiError(str(err), response) from err
|
|
133
133
|
else:
|
|
134
134
|
# Fail safe just in case error happens before response is created
|
|
135
|
-
raise MeilisearchError(str(err)) # pragma: no cover
|
|
135
|
+
raise MeilisearchError(str(err)) from err # pragma: no cover
|
|
136
136
|
|
|
137
137
|
def get(self, path: str) -> Response:
|
|
138
138
|
return self._send_request(self.http_client.get, path)
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
VERSION = "2.9.0"
|
{meilisearch_python_sdk-2.8.0 → meilisearch_python_sdk-2.9.0}/meilisearch_python_sdk/decorators.py
RENAMED
|
@@ -68,7 +68,7 @@ def async_add_documents(
|
|
|
68
68
|
>>> # with `ConnectionInfo`
|
|
69
69
|
>>> @async_add_documents(
|
|
70
70
|
index_name="movies",
|
|
71
|
-
connection_info=ConnectionInfo(url="http://localhost:7700", api_key="masterKey",
|
|
71
|
+
connection_info=ConnectionInfo(url="http://localhost:7700", api_key="masterKey"),
|
|
72
72
|
)
|
|
73
73
|
>>> async def my_function() -> list[dict[str, Any]]:
|
|
74
74
|
>>> return [{"id": 1, "title": "Test 1"}, {"id": 2, "title": "Test 2"}]
|
{meilisearch_python_sdk-2.8.0 → meilisearch_python_sdk-2.9.0}/meilisearch_python_sdk/index.py
RENAMED
|
@@ -6,7 +6,7 @@ from csv import DictReader
|
|
|
6
6
|
from datetime import datetime
|
|
7
7
|
from functools import cached_property, partial
|
|
8
8
|
from pathlib import Path
|
|
9
|
-
from typing import Any, Generator, MutableMapping, Sequence
|
|
9
|
+
from typing import TYPE_CHECKING, Any, Generator, MutableMapping, Sequence
|
|
10
10
|
from urllib.parse import urlencode
|
|
11
11
|
from warnings import warn
|
|
12
12
|
|
|
@@ -26,9 +26,11 @@ from meilisearch_python_sdk.models.settings import (
|
|
|
26
26
|
Faceting,
|
|
27
27
|
HuggingFaceEmbedder,
|
|
28
28
|
MeilisearchSettings,
|
|
29
|
+
OllamaEmbedder,
|
|
29
30
|
OpenAiEmbedder,
|
|
30
31
|
Pagination,
|
|
31
32
|
ProximityPrecision,
|
|
33
|
+
RestEmbedder,
|
|
32
34
|
TypoTolerance,
|
|
33
35
|
UserProvidedEmbedder,
|
|
34
36
|
)
|
|
@@ -45,7 +47,9 @@ from meilisearch_python_sdk.plugins import (
|
|
|
45
47
|
Plugin,
|
|
46
48
|
PostSearchPlugin,
|
|
47
49
|
)
|
|
48
|
-
|
|
50
|
+
|
|
51
|
+
if TYPE_CHECKING: # pragma: no cover
|
|
52
|
+
from meilisearch_python_sdk.types import Filter, JsonDict, JsonMapping
|
|
49
53
|
|
|
50
54
|
|
|
51
55
|
class _BaseIndex:
|
|
@@ -613,6 +617,7 @@ class AsyncIndex(_BaseIndex):
|
|
|
613
617
|
*,
|
|
614
618
|
settings: MeilisearchSettings | None = None,
|
|
615
619
|
wait: bool = True,
|
|
620
|
+
timeout_in_ms: int | None = None,
|
|
616
621
|
plugins: AsyncIndexPlugins | None = None,
|
|
617
622
|
) -> AsyncIndex:
|
|
618
623
|
"""Creates a new index.
|
|
@@ -634,6 +639,9 @@ class AsyncIndex(_BaseIndex):
|
|
|
634
639
|
wait: If set to True and settings are being updated, the index will be returned after
|
|
635
640
|
the settings update has completed. If False it will not wait for settings to complete.
|
|
636
641
|
Default: True
|
|
642
|
+
timeout_in_ms: Amount of time in milliseconds to wait before raising a
|
|
643
|
+
MeilisearchTimeoutError. `None` can also be passed to wait indefinitely. Be aware that
|
|
644
|
+
if the `None` option is used the wait time could be very long. Defaults to None.
|
|
637
645
|
plugins: Optional plugins can be provided to extend functionality.
|
|
638
646
|
|
|
639
647
|
Returns:
|
|
@@ -659,7 +667,9 @@ class AsyncIndex(_BaseIndex):
|
|
|
659
667
|
url = "indexes"
|
|
660
668
|
http_request = AsyncHttpRequests(http_client)
|
|
661
669
|
response = await http_request.post(url, payload)
|
|
662
|
-
await async_wait_for_task(
|
|
670
|
+
await async_wait_for_task(
|
|
671
|
+
http_client, response.json()["taskUid"], timeout_in_ms=timeout_in_ms
|
|
672
|
+
)
|
|
663
673
|
|
|
664
674
|
index_response = await http_request.get(f"{url}/{uid}")
|
|
665
675
|
index_dict = index_response.json()
|
|
@@ -675,7 +685,9 @@ class AsyncIndex(_BaseIndex):
|
|
|
675
685
|
if settings:
|
|
676
686
|
settings_task = await index.update_settings(settings)
|
|
677
687
|
if wait:
|
|
678
|
-
await async_wait_for_task(
|
|
688
|
+
await async_wait_for_task(
|
|
689
|
+
http_client, settings_task.task_uid, timeout_in_ms=timeout_in_ms
|
|
690
|
+
)
|
|
679
691
|
|
|
680
692
|
return index
|
|
681
693
|
|
|
@@ -710,7 +722,7 @@ class AsyncIndex(_BaseIndex):
|
|
|
710
722
|
limit: int = 20,
|
|
711
723
|
filter: Filter | None = None,
|
|
712
724
|
facets: list[str] | None = None,
|
|
713
|
-
attributes_to_retrieve: list[str] =
|
|
725
|
+
attributes_to_retrieve: list[str] | None = None,
|
|
714
726
|
attributes_to_crop: list[str] | None = None,
|
|
715
727
|
crop_length: int = 200,
|
|
716
728
|
attributes_to_highlight: list[str] | None = None,
|
|
@@ -796,7 +808,6 @@ class AsyncIndex(_BaseIndex):
|
|
|
796
808
|
>>> index = client.index("movies")
|
|
797
809
|
>>> search_results = await index.search("Tron")
|
|
798
810
|
"""
|
|
799
|
-
|
|
800
811
|
body = _process_search_parameters(
|
|
801
812
|
q=query,
|
|
802
813
|
offset=offset,
|
|
@@ -961,7 +972,7 @@ class AsyncIndex(_BaseIndex):
|
|
|
961
972
|
limit: int = 20,
|
|
962
973
|
filter: Filter | None = None,
|
|
963
974
|
facets: list[str] | None = None,
|
|
964
|
-
attributes_to_retrieve: list[str] =
|
|
975
|
+
attributes_to_retrieve: list[str] | None = None,
|
|
965
976
|
attributes_to_crop: list[str] | None = None,
|
|
966
977
|
crop_length: int = 200,
|
|
967
978
|
attributes_to_highlight: list[str] | None = None,
|
|
@@ -2809,7 +2820,10 @@ class AsyncIndex(_BaseIndex):
|
|
|
2809
2820
|
settings = MeilisearchSettings(**response_json)
|
|
2810
2821
|
|
|
2811
2822
|
if response_json.get("embedders"):
|
|
2812
|
-
|
|
2823
|
+
# TODO: Add back after embedder setting issue fixed https://github.com/meilisearch/meilisearch/issues/4585
|
|
2824
|
+
settings.embedders = _embedder_json_to_settings_model( # pragma: no cover
|
|
2825
|
+
response_json["embedders"]
|
|
2826
|
+
)
|
|
2813
2827
|
|
|
2814
2828
|
return settings
|
|
2815
2829
|
|
|
@@ -2865,6 +2879,7 @@ class AsyncIndex(_BaseIndex):
|
|
|
2865
2879
|
warn(
|
|
2866
2880
|
"The use of Pydantic less than version 2 is depreciated and will be removed in a future release",
|
|
2867
2881
|
DeprecationWarning,
|
|
2882
|
+
stacklevel=2,
|
|
2868
2883
|
)
|
|
2869
2884
|
body_dict = {k: v for k, v in body.dict(by_alias=True).items() if v is not None} # type: ignore[attr-defined]
|
|
2870
2885
|
|
|
@@ -3006,7 +3021,7 @@ class AsyncIndex(_BaseIndex):
|
|
|
3006
3021
|
response = await self._http_requests.get(f"{self._settings_url}/distinct-attribute")
|
|
3007
3022
|
|
|
3008
3023
|
if not response.json():
|
|
3009
|
-
None
|
|
3024
|
+
return None
|
|
3010
3025
|
|
|
3011
3026
|
return response.json()
|
|
3012
3027
|
|
|
@@ -3602,6 +3617,7 @@ class AsyncIndex(_BaseIndex):
|
|
|
3602
3617
|
warn(
|
|
3603
3618
|
"The use of Pydantic less than version 2 is depreciated and will be removed in a future release",
|
|
3604
3619
|
DeprecationWarning,
|
|
3620
|
+
stacklevel=2,
|
|
3605
3621
|
)
|
|
3606
3622
|
response = await self._http_requests.patch(
|
|
3607
3623
|
f"{self._settings_url}/typo-tolerance",
|
|
@@ -3691,6 +3707,7 @@ class AsyncIndex(_BaseIndex):
|
|
|
3691
3707
|
warn(
|
|
3692
3708
|
"The use of Pydantic less than version 2 is depreciated and will be removed in a future release",
|
|
3693
3709
|
DeprecationWarning,
|
|
3710
|
+
stacklevel=2,
|
|
3694
3711
|
)
|
|
3695
3712
|
response = await self._http_requests.patch(
|
|
3696
3713
|
f"{self._settings_url}/faceting", faceting.dict(by_alias=True), compress=compress
|
|
@@ -3779,6 +3796,7 @@ class AsyncIndex(_BaseIndex):
|
|
|
3779
3796
|
warn(
|
|
3780
3797
|
"The use of Pydantic less than version 2 is depreciated and will be removed in a future release",
|
|
3781
3798
|
DeprecationWarning,
|
|
3799
|
+
stacklevel=2,
|
|
3782
3800
|
)
|
|
3783
3801
|
response = await self._http_requests.patch(
|
|
3784
3802
|
f"{self._settings_url}/pagination", settings.dict(by_alias=True), compress=compress
|
|
@@ -3965,6 +3983,84 @@ class AsyncIndex(_BaseIndex):
|
|
|
3965
3983
|
|
|
3966
3984
|
return TaskInfo(**response.json())
|
|
3967
3985
|
|
|
3986
|
+
async def get_search_cutoff_ms(self) -> int | None:
|
|
3987
|
+
"""Get search cutoff time in ms.
|
|
3988
|
+
|
|
3989
|
+
Returns:
|
|
3990
|
+
|
|
3991
|
+
Integer representing the search cutoff time in ms, or None.
|
|
3992
|
+
|
|
3993
|
+
Raises:
|
|
3994
|
+
|
|
3995
|
+
MeilisearchCommunicationError: If there was an error communicating with the server.
|
|
3996
|
+
MeilisearchApiError: If the Meilisearch API returned an error.
|
|
3997
|
+
|
|
3998
|
+
Examples:
|
|
3999
|
+
|
|
4000
|
+
>>> from meilisearch_async_client import AsyncClient
|
|
4001
|
+
>>> async with AsyncClient("http://localhost.com", "masterKey") as client:
|
|
4002
|
+
>>> index = client.index("movies")
|
|
4003
|
+
>>> search_cutoff_ms_settings = await index.get_search_cutoff_ms()
|
|
4004
|
+
"""
|
|
4005
|
+
response = await self._http_requests.get(f"{self._settings_url}/search-cutoff-ms")
|
|
4006
|
+
|
|
4007
|
+
return response.json()
|
|
4008
|
+
|
|
4009
|
+
async def update_search_cutoff_ms(
|
|
4010
|
+
self, search_cutoff_ms: int, *, compress: bool = False
|
|
4011
|
+
) -> TaskInfo:
|
|
4012
|
+
"""Update the search cutoff for an index.
|
|
4013
|
+
|
|
4014
|
+
Args:
|
|
4015
|
+
|
|
4016
|
+
search_cutoff_ms: Integer value of the search cutoff time in ms.
|
|
4017
|
+
compress: If set to True the data will be sent in gzip format. Defaults to False.
|
|
4018
|
+
|
|
4019
|
+
Returns:
|
|
4020
|
+
|
|
4021
|
+
The details of the task status.
|
|
4022
|
+
|
|
4023
|
+
Raises:
|
|
4024
|
+
|
|
4025
|
+
MeilisearchCommunicationError: If there was an error communicating with the server.
|
|
4026
|
+
MeilisearchApiError: If the Meilisearch API returned an error.
|
|
4027
|
+
|
|
4028
|
+
Examples:
|
|
4029
|
+
|
|
4030
|
+
>>> from meilisearch_python_sdk import AsyncClient
|
|
4031
|
+
>>> async with AsyncClient("http://localhost.com", "masterKey") as client:
|
|
4032
|
+
>>> index = client.index("movies")
|
|
4033
|
+
>>> await index.update_search_cutoff_ms(100)
|
|
4034
|
+
"""
|
|
4035
|
+
response = await self._http_requests.put(
|
|
4036
|
+
f"{self._settings_url}/search-cutoff-ms", search_cutoff_ms, compress=compress
|
|
4037
|
+
)
|
|
4038
|
+
|
|
4039
|
+
return TaskInfo(**response.json())
|
|
4040
|
+
|
|
4041
|
+
async def reset_search_cutoff_ms(self) -> TaskInfo:
|
|
4042
|
+
"""Reset the search cutoff time to the default value.
|
|
4043
|
+
|
|
4044
|
+
Returns:
|
|
4045
|
+
|
|
4046
|
+
The details of the task status.
|
|
4047
|
+
|
|
4048
|
+
Raises:
|
|
4049
|
+
|
|
4050
|
+
MeilisearchCommunicationError: If there was an error communicating with the server.
|
|
4051
|
+
MeilisearchApiError: If the Meilisearch API returned an error.
|
|
4052
|
+
|
|
4053
|
+
Examples:
|
|
4054
|
+
|
|
4055
|
+
>>> from meilisearch_async_client import AsyncClient
|
|
4056
|
+
>>> async with AsyncClient("http://localhost.com", "masterKey") as client:
|
|
4057
|
+
>>> index = client.index("movies")
|
|
4058
|
+
>>> await index.reset_search_cutoff_ms()
|
|
4059
|
+
"""
|
|
4060
|
+
response = await self._http_requests.delete(f"{self._settings_url}/search-cutoff-ms")
|
|
4061
|
+
|
|
4062
|
+
return TaskInfo(**response.json())
|
|
4063
|
+
|
|
3968
4064
|
async def get_word_dictionary(self) -> list[str]:
|
|
3969
4065
|
"""Get word dictionary settings for the index.
|
|
3970
4066
|
|
|
@@ -4186,6 +4282,7 @@ class AsyncIndex(_BaseIndex):
|
|
|
4186
4282
|
warn(
|
|
4187
4283
|
"The use of Pydantic less than version 2 is depreciated and will be removed in a future release",
|
|
4188
4284
|
DeprecationWarning,
|
|
4285
|
+
stacklevel=2,
|
|
4189
4286
|
)
|
|
4190
4287
|
payload[key] = {
|
|
4191
4288
|
k: v for k, v in embedder.dict(by_alias=True).items() if v is not None
|
|
@@ -4197,7 +4294,8 @@ class AsyncIndex(_BaseIndex):
|
|
|
4197
4294
|
|
|
4198
4295
|
return TaskInfo(**response.json())
|
|
4199
4296
|
|
|
4200
|
-
|
|
4297
|
+
# TODO: Add back after embedder setting issue fixed https://github.com/meilisearch/meilisearch/issues/4585
|
|
4298
|
+
async def reset_embedders(self) -> TaskInfo: # pragma: no cover
|
|
4201
4299
|
"""Reset an index's embedders settings to the default value.
|
|
4202
4300
|
|
|
4203
4301
|
Returns:
|
|
@@ -4696,6 +4794,7 @@ class Index(_BaseIndex):
|
|
|
4696
4794
|
*,
|
|
4697
4795
|
settings: MeilisearchSettings | None = None,
|
|
4698
4796
|
wait: bool = True,
|
|
4797
|
+
timeout_in_ms: int | None = None,
|
|
4699
4798
|
plugins: IndexPlugins | None = None,
|
|
4700
4799
|
) -> Index:
|
|
4701
4800
|
"""Creates a new index.
|
|
@@ -4717,6 +4816,9 @@ class Index(_BaseIndex):
|
|
|
4717
4816
|
wait: If set to True and settings are being updated, the index will be returned after
|
|
4718
4817
|
the settings update has completed. If False it will not wait for settings to complete.
|
|
4719
4818
|
Default: True
|
|
4819
|
+
timeout_in_ms: Amount of time in milliseconds to wait before raising a
|
|
4820
|
+
MeilisearchTimeoutError. `None` can also be passed to wait indefinitely. Be aware that
|
|
4821
|
+
if the `None` option is used the wait time could be very long. Defaults to None.
|
|
4720
4822
|
plugins: Optional plugins can be provided to extend functionality.
|
|
4721
4823
|
|
|
4722
4824
|
Returns:
|
|
@@ -4742,7 +4844,7 @@ class Index(_BaseIndex):
|
|
|
4742
4844
|
url = "indexes"
|
|
4743
4845
|
http_request = HttpRequests(http_client)
|
|
4744
4846
|
response = http_request.post(url, payload)
|
|
4745
|
-
wait_for_task(http_client, response.json()["taskUid"], timeout_in_ms=
|
|
4847
|
+
wait_for_task(http_client, response.json()["taskUid"], timeout_in_ms=timeout_in_ms)
|
|
4746
4848
|
index_response = http_request.get(f"{url}/{uid}")
|
|
4747
4849
|
index_dict = index_response.json()
|
|
4748
4850
|
index = cls(
|
|
@@ -4757,7 +4859,7 @@ class Index(_BaseIndex):
|
|
|
4757
4859
|
if settings:
|
|
4758
4860
|
settings_task = index.update_settings(settings)
|
|
4759
4861
|
if wait:
|
|
4760
|
-
wait_for_task(http_client, settings_task.task_uid, timeout_in_ms=
|
|
4862
|
+
wait_for_task(http_client, settings_task.task_uid, timeout_in_ms=timeout_in_ms)
|
|
4761
4863
|
|
|
4762
4864
|
return index
|
|
4763
4865
|
|
|
@@ -4792,7 +4894,7 @@ class Index(_BaseIndex):
|
|
|
4792
4894
|
limit: int = 20,
|
|
4793
4895
|
filter: Filter | None = None,
|
|
4794
4896
|
facets: list[str] | None = None,
|
|
4795
|
-
attributes_to_retrieve: list[str] =
|
|
4897
|
+
attributes_to_retrieve: list[str] | None = None,
|
|
4796
4898
|
attributes_to_crop: list[str] | None = None,
|
|
4797
4899
|
crop_length: int = 200,
|
|
4798
4900
|
attributes_to_highlight: list[str] | None = None,
|
|
@@ -4950,7 +5052,7 @@ class Index(_BaseIndex):
|
|
|
4950
5052
|
limit: int = 20,
|
|
4951
5053
|
filter: Filter | None = None,
|
|
4952
5054
|
facets: list[str] | None = None,
|
|
4953
|
-
attributes_to_retrieve: list[str] =
|
|
5055
|
+
attributes_to_retrieve: list[str] | None = None,
|
|
4954
5056
|
attributes_to_crop: list[str] | None = None,
|
|
4955
5057
|
crop_length: int = 200,
|
|
4956
5058
|
attributes_to_highlight: list[str] | None = None,
|
|
@@ -6239,7 +6341,10 @@ class Index(_BaseIndex):
|
|
|
6239
6341
|
settings = MeilisearchSettings(**response_json)
|
|
6240
6342
|
|
|
6241
6343
|
if response_json.get("embedders"):
|
|
6242
|
-
|
|
6344
|
+
# TODO: Add back after embedder setting issue fixed https://github.com/meilisearch/meilisearch/issues/4585
|
|
6345
|
+
settings.embedders = _embedder_json_to_settings_model( # pragma: no cover
|
|
6346
|
+
response_json["embedders"]
|
|
6347
|
+
)
|
|
6243
6348
|
|
|
6244
6349
|
return settings
|
|
6245
6350
|
|
|
@@ -6293,6 +6398,7 @@ class Index(_BaseIndex):
|
|
|
6293
6398
|
warn(
|
|
6294
6399
|
"The use of Pydantic less than version 2 is depreciated and will be removed in a future release",
|
|
6295
6400
|
DeprecationWarning,
|
|
6401
|
+
stacklevel=2,
|
|
6296
6402
|
)
|
|
6297
6403
|
body_dict = {k: v for k, v in body.dict(by_alias=True).items() if v is not None} # type: ignore[attr-defined]
|
|
6298
6404
|
|
|
@@ -6432,7 +6538,7 @@ class Index(_BaseIndex):
|
|
|
6432
6538
|
response = self._http_requests.get(f"{self._settings_url}/distinct-attribute")
|
|
6433
6539
|
|
|
6434
6540
|
if not response.json():
|
|
6435
|
-
None
|
|
6541
|
+
return None
|
|
6436
6542
|
|
|
6437
6543
|
return response.json()
|
|
6438
6544
|
|
|
@@ -7019,6 +7125,7 @@ class Index(_BaseIndex):
|
|
|
7019
7125
|
warn(
|
|
7020
7126
|
"The use of Pydantic less than version 2 is depreciated and will be removed in a future release",
|
|
7021
7127
|
DeprecationWarning,
|
|
7128
|
+
stacklevel=2,
|
|
7022
7129
|
)
|
|
7023
7130
|
response = self._http_requests.patch(
|
|
7024
7131
|
f"{self._settings_url}/typo-tolerance",
|
|
@@ -7108,6 +7215,7 @@ class Index(_BaseIndex):
|
|
|
7108
7215
|
warn(
|
|
7109
7216
|
"The use of Pydantic less than version 2 is depreciated and will be removed in a future release",
|
|
7110
7217
|
DeprecationWarning,
|
|
7218
|
+
stacklevel=2,
|
|
7111
7219
|
)
|
|
7112
7220
|
response = self._http_requests.patch(
|
|
7113
7221
|
f"{self._settings_url}/faceting", faceting.dict(by_alias=True), compress=compress
|
|
@@ -7196,6 +7304,7 @@ class Index(_BaseIndex):
|
|
|
7196
7304
|
warn(
|
|
7197
7305
|
"The use of Pydantic less than version 2 is depreciated and will be removed in a future release",
|
|
7198
7306
|
DeprecationWarning,
|
|
7307
|
+
stacklevel=2,
|
|
7199
7308
|
)
|
|
7200
7309
|
response = self._http_requests.patch(
|
|
7201
7310
|
f"{self._settings_url}/pagination", settings.dict(by_alias=True), compress=compress
|
|
@@ -7382,6 +7491,82 @@ class Index(_BaseIndex):
|
|
|
7382
7491
|
|
|
7383
7492
|
return TaskInfo(**response.json())
|
|
7384
7493
|
|
|
7494
|
+
def get_search_cutoff_ms(self) -> int | None:
|
|
7495
|
+
"""Get search cutoff time in ms.
|
|
7496
|
+
|
|
7497
|
+
Returns:
|
|
7498
|
+
|
|
7499
|
+
Integer representing the search cutoff time in ms, or None.
|
|
7500
|
+
|
|
7501
|
+
Raises:
|
|
7502
|
+
|
|
7503
|
+
MeilisearchCommunicationError: If there was an error communicating with the server.
|
|
7504
|
+
MeilisearchApiError: If the Meilisearch API returned an error.
|
|
7505
|
+
|
|
7506
|
+
Examples:
|
|
7507
|
+
|
|
7508
|
+
>>> from meilisearch_async_client import Client
|
|
7509
|
+
>>> client = Client("http://localhost.com", "masterKey")
|
|
7510
|
+
>>> index = client.index("movies")
|
|
7511
|
+
>>> search_cutoff_ms_settings = index.get_search_cutoff_ms()
|
|
7512
|
+
"""
|
|
7513
|
+
response = self._http_requests.get(f"{self._settings_url}/search-cutoff-ms")
|
|
7514
|
+
|
|
7515
|
+
return response.json()
|
|
7516
|
+
|
|
7517
|
+
def update_search_cutoff_ms(self, search_cutoff_ms: int, *, compress: bool = False) -> TaskInfo:
|
|
7518
|
+
"""Update the search cutoff for an index.
|
|
7519
|
+
|
|
7520
|
+
Args:
|
|
7521
|
+
|
|
7522
|
+
search_cutoff_ms: Integer value of the search cutoff time in ms.
|
|
7523
|
+
compress: If set to True the data will be sent in gzip format. Defaults to False.
|
|
7524
|
+
|
|
7525
|
+
Returns:
|
|
7526
|
+
|
|
7527
|
+
Task to track the action.
|
|
7528
|
+
|
|
7529
|
+
Raises:
|
|
7530
|
+
|
|
7531
|
+
MeilisearchCommunicationError: If there was an error communicating with the server.
|
|
7532
|
+
MeilisearchApiError: If the Meilisearch API returned an error.
|
|
7533
|
+
|
|
7534
|
+
Examples:
|
|
7535
|
+
|
|
7536
|
+
>>> from meilisearch_python_sdk import Client
|
|
7537
|
+
>>> client = Client("http://localhost.com", "masterKey")
|
|
7538
|
+
>>> index = client.index("movies")
|
|
7539
|
+
>>> index.update_search_cutoff_ms(100)
|
|
7540
|
+
"""
|
|
7541
|
+
response = self._http_requests.put(
|
|
7542
|
+
f"{self._settings_url}/search-cutoff-ms", search_cutoff_ms, compress=compress
|
|
7543
|
+
)
|
|
7544
|
+
|
|
7545
|
+
return TaskInfo(**response.json())
|
|
7546
|
+
|
|
7547
|
+
def reset_search_cutoff_ms(self) -> TaskInfo:
|
|
7548
|
+
"""Reset the search cutoff time to the default value.
|
|
7549
|
+
|
|
7550
|
+
Returns:
|
|
7551
|
+
|
|
7552
|
+
The details of the task status.
|
|
7553
|
+
|
|
7554
|
+
Raises:
|
|
7555
|
+
|
|
7556
|
+
MeilisearchCommunicationError: If there was an error communicating with the server.
|
|
7557
|
+
MeilisearchApiError: If the Meilisearch API returned an error.
|
|
7558
|
+
|
|
7559
|
+
Examples:
|
|
7560
|
+
|
|
7561
|
+
>>> from meilisearch_async_client import Client
|
|
7562
|
+
>>> client = Client("http://localhost.com", "masterKey")
|
|
7563
|
+
>>> index = client.index("movies")
|
|
7564
|
+
>>> index.reset_search_cutoff_ms()
|
|
7565
|
+
"""
|
|
7566
|
+
response = self._http_requests.delete(f"{self._settings_url}/search-cutoff-ms")
|
|
7567
|
+
|
|
7568
|
+
return TaskInfo(**response.json())
|
|
7569
|
+
|
|
7385
7570
|
def get_word_dictionary(self) -> list[str]:
|
|
7386
7571
|
"""Get word dictionary settings for the index.
|
|
7387
7572
|
|
|
@@ -7599,6 +7784,7 @@ class Index(_BaseIndex):
|
|
|
7599
7784
|
warn(
|
|
7600
7785
|
"The use of Pydantic less than version 2 is depreciated and will be removed in a future release",
|
|
7601
7786
|
DeprecationWarning,
|
|
7787
|
+
stacklevel=2,
|
|
7602
7788
|
)
|
|
7603
7789
|
payload[key] = {
|
|
7604
7790
|
k: v for k, v in embedder.dict(by_alias=True).items() if v is not None
|
|
@@ -7610,7 +7796,8 @@ class Index(_BaseIndex):
|
|
|
7610
7796
|
|
|
7611
7797
|
return TaskInfo(**response.json())
|
|
7612
7798
|
|
|
7613
|
-
|
|
7799
|
+
# TODO: Add back after embedder setting issue fixed https://github.com/meilisearch/meilisearch/issues/4585
|
|
7800
|
+
def reset_embedders(self) -> TaskInfo: # pragma: no cover
|
|
7614
7801
|
"""Reset an index's embedders settings to the default value.
|
|
7615
7802
|
|
|
7616
7803
|
Returns:
|
|
@@ -7797,7 +7984,7 @@ def _process_search_parameters(
|
|
|
7797
7984
|
limit: int = 20,
|
|
7798
7985
|
filter: Filter | None = None,
|
|
7799
7986
|
facets: list[str] | None = None,
|
|
7800
|
-
attributes_to_retrieve: list[str] =
|
|
7987
|
+
attributes_to_retrieve: list[str] | None = None,
|
|
7801
7988
|
attributes_to_crop: list[str] | None = None,
|
|
7802
7989
|
crop_length: int = 200,
|
|
7803
7990
|
attributes_to_highlight: list[str] | None = None,
|
|
@@ -7815,6 +8002,9 @@ def _process_search_parameters(
|
|
|
7815
8002
|
vector: list[float] | None = None,
|
|
7816
8003
|
hybrid: Hybrid | None = None,
|
|
7817
8004
|
) -> JsonDict:
|
|
8005
|
+
if attributes_to_retrieve is None:
|
|
8006
|
+
attributes_to_retrieve = ["*"]
|
|
8007
|
+
|
|
7818
8008
|
body: JsonDict = {
|
|
7819
8009
|
"q": q,
|
|
7820
8010
|
"offset": offset,
|
|
@@ -7856,6 +8046,7 @@ def _process_search_parameters(
|
|
|
7856
8046
|
warn(
|
|
7857
8047
|
"The use of Pydantic less than version 2 is depreciated and will be removed in a future release",
|
|
7858
8048
|
DeprecationWarning,
|
|
8049
|
+
stacklevel=2,
|
|
7859
8050
|
)
|
|
7860
8051
|
body["hybrid"] = hybrid.dict(by_alias=True) # type: ignore[attr-defined]
|
|
7861
8052
|
|
|
@@ -7866,34 +8057,58 @@ def _build_encoded_url(base_url: str, params: JsonMapping) -> str:
|
|
|
7866
8057
|
return f"{base_url}?{urlencode(params)}"
|
|
7867
8058
|
|
|
7868
8059
|
|
|
7869
|
-
|
|
8060
|
+
# TODO: Add back after embedder setting issue fixed https://github.com/meilisearch/meilisearch/issues/4585
|
|
8061
|
+
def _embedder_json_to_embedders_model( # pragma: no cover
|
|
8062
|
+
embedder_json: JsonDict | None,
|
|
8063
|
+
) -> Embedders | None:
|
|
7870
8064
|
if not embedder_json: # pragma: no cover
|
|
7871
8065
|
return None
|
|
7872
8066
|
|
|
7873
|
-
embedders: dict[
|
|
8067
|
+
embedders: dict[
|
|
8068
|
+
str,
|
|
8069
|
+
OpenAiEmbedder | HuggingFaceEmbedder | OllamaEmbedder | RestEmbedder | UserProvidedEmbedder,
|
|
8070
|
+
] = {}
|
|
7874
8071
|
for k, v in embedder_json.items():
|
|
7875
8072
|
if v.get("source") == "openAi":
|
|
7876
8073
|
embedders[k] = OpenAiEmbedder(**v)
|
|
7877
8074
|
elif v.get("source") == "huggingFace":
|
|
7878
8075
|
embedders[k] = HuggingFaceEmbedder(**v)
|
|
8076
|
+
elif v.get("source") == "ollama":
|
|
8077
|
+
embedders[k] = OllamaEmbedder(**v)
|
|
8078
|
+
elif v.get("source") == "rest":
|
|
8079
|
+
embedders[k] = RestEmbedder(**v)
|
|
7879
8080
|
else:
|
|
7880
8081
|
embedders[k] = UserProvidedEmbedder(**v)
|
|
7881
8082
|
|
|
7882
8083
|
return Embedders(embedders=embedders)
|
|
7883
8084
|
|
|
7884
8085
|
|
|
7885
|
-
|
|
8086
|
+
# TODO: Add back after embedder setting issue fixed https://github.com/meilisearch/meilisearch/issues/4585
|
|
8087
|
+
def _embedder_json_to_settings_model( # pragma: no cover
|
|
7886
8088
|
embedder_json: JsonDict | None,
|
|
7887
|
-
) ->
|
|
8089
|
+
) -> (
|
|
8090
|
+
dict[
|
|
8091
|
+
str,
|
|
8092
|
+
OpenAiEmbedder | HuggingFaceEmbedder | OllamaEmbedder | RestEmbedder | UserProvidedEmbedder,
|
|
8093
|
+
]
|
|
8094
|
+
| None
|
|
8095
|
+
):
|
|
7888
8096
|
if not embedder_json: # pragma: no cover
|
|
7889
8097
|
return None
|
|
7890
8098
|
|
|
7891
|
-
embedders: dict[
|
|
8099
|
+
embedders: dict[
|
|
8100
|
+
str,
|
|
8101
|
+
OpenAiEmbedder | HuggingFaceEmbedder | OllamaEmbedder | RestEmbedder | UserProvidedEmbedder,
|
|
8102
|
+
] = {}
|
|
7892
8103
|
for k, v in embedder_json.items():
|
|
7893
8104
|
if v.get("source") == "openAi":
|
|
7894
8105
|
embedders[k] = OpenAiEmbedder(**v)
|
|
7895
8106
|
elif v.get("source") == "huggingFace":
|
|
7896
8107
|
embedders[k] = HuggingFaceEmbedder(**v)
|
|
8108
|
+
elif v.get("source") == "ollama":
|
|
8109
|
+
embedders[k] = OllamaEmbedder(**v)
|
|
8110
|
+
elif v.get("source") == "rest":
|
|
8111
|
+
embedders[k] = RestEmbedder(**v)
|
|
7897
8112
|
else:
|
|
7898
8113
|
embedders[k] = UserProvidedEmbedder(**v)
|
|
7899
8114
|
|
|
@@ -25,6 +25,7 @@ class ClientStats(CamelBase):
|
|
|
25
25
|
warn(
|
|
26
26
|
"The use of Pydantic less than version 2 is depreciated and will be removed in a future release",
|
|
27
27
|
DeprecationWarning,
|
|
28
|
+
stacklevel=2,
|
|
28
29
|
)
|
|
29
30
|
|
|
30
31
|
@pydantic.validator("last_update", pre=True)
|
|
@@ -53,6 +54,7 @@ class _KeyBase(CamelBase):
|
|
|
53
54
|
warn(
|
|
54
55
|
"The use of Pydantic less than version 2 is depreciated and will be removed in a future release",
|
|
55
56
|
DeprecationWarning,
|
|
57
|
+
stacklevel=2,
|
|
56
58
|
)
|
|
57
59
|
|
|
58
60
|
@pydantic.validator("expires_at", pre=True)
|
|
@@ -98,6 +100,7 @@ class Key(_KeyBase):
|
|
|
98
100
|
warn(
|
|
99
101
|
"The use of Pydantic less than version 2 is depreciated and will be removed in a future release",
|
|
100
102
|
DeprecationWarning,
|
|
103
|
+
stacklevel=2,
|
|
101
104
|
)
|
|
102
105
|
|
|
103
106
|
@pydantic.validator("created_at", pre=True)
|
|
@@ -130,6 +133,7 @@ class KeyCreate(CamelBase):
|
|
|
130
133
|
warn(
|
|
131
134
|
"The use of Pydantic less than version 2 is depreciated and will be removed in a future release",
|
|
132
135
|
DeprecationWarning,
|
|
136
|
+
stacklevel=2,
|
|
133
137
|
)
|
|
134
138
|
|
|
135
139
|
class Config:
|
|
@@ -159,6 +163,7 @@ class KeyUpdate(CamelBase):
|
|
|
159
163
|
warn(
|
|
160
164
|
"The use of Pydantic less than version 2 is depreciated and will be removed in a future release",
|
|
161
165
|
DeprecationWarning,
|
|
166
|
+
stacklevel=2,
|
|
162
167
|
)
|
|
163
168
|
|
|
164
169
|
class Config:
|
|
@@ -60,7 +60,7 @@ class SearchResults(CamelBase):
|
|
|
60
60
|
total_hits: Optional[int] = None
|
|
61
61
|
page: Optional[int] = None
|
|
62
62
|
hits_per_page: Optional[int] = None
|
|
63
|
-
|
|
63
|
+
semantic_hit_count: Optional[int] = None
|
|
64
64
|
|
|
65
65
|
|
|
66
66
|
class SearchResultsWithUID(SearchResults):
|
|
@@ -43,6 +43,7 @@ class Faceting(CamelBase):
|
|
|
43
43
|
warn(
|
|
44
44
|
"The use of Pydantic less than version 2 is depreciated and will be removed in a future release",
|
|
45
45
|
DeprecationWarning,
|
|
46
|
+
stacklevel=2,
|
|
46
47
|
)
|
|
47
48
|
|
|
48
49
|
@pydantic.validator("sort_facet_values_by") # type: ignore[attr-defined]
|
|
@@ -62,12 +63,18 @@ class Pagination(CamelBase):
|
|
|
62
63
|
max_total_hits: int
|
|
63
64
|
|
|
64
65
|
|
|
66
|
+
class Distribution(CamelBase):
|
|
67
|
+
mean: float
|
|
68
|
+
sigma: float
|
|
69
|
+
|
|
70
|
+
|
|
65
71
|
class OpenAiEmbedder(CamelBase):
|
|
66
72
|
source: str = "openAi"
|
|
67
73
|
model: Optional[str] = None # Defaults to text-embedding-ada-002
|
|
68
74
|
dimensions: Optional[int] = None # Uses the model default
|
|
69
75
|
api_key: Optional[str] = None # Can be provided through a CLI option or environment variable
|
|
70
76
|
document_template: Optional[str] = None
|
|
77
|
+
distribution: Optional[Distribution] = None
|
|
71
78
|
|
|
72
79
|
|
|
73
80
|
class HuggingFaceEmbedder(CamelBase):
|
|
@@ -75,15 +82,45 @@ class HuggingFaceEmbedder(CamelBase):
|
|
|
75
82
|
model: Optional[str] = None # Defaults to BAAI/bge-base-en-v1.5
|
|
76
83
|
revision: Optional[str] = None
|
|
77
84
|
document_template: Optional[str] = None
|
|
85
|
+
distribution: Optional[Distribution] = None
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
class OllamaEmbedder(CamelBase):
|
|
89
|
+
source: str = "ollama"
|
|
90
|
+
url: Optional[str] = None
|
|
91
|
+
api_key: Optional[str] = None
|
|
92
|
+
model: str
|
|
93
|
+
document_template: Optional[str] = None
|
|
94
|
+
distribution: Optional[Distribution] = None
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
class RestEmbedder(CamelBase):
|
|
98
|
+
source: str = "rest"
|
|
99
|
+
url: str
|
|
100
|
+
api_key: Optional[str] = None
|
|
101
|
+
dimensions: int
|
|
102
|
+
document_template: Optional[str] = None
|
|
103
|
+
input_field: Optional[List[str]] = None
|
|
104
|
+
input_type: str = "text"
|
|
105
|
+
query: JsonDict = {}
|
|
106
|
+
path_to_embeddings: Optional[List[str]] = None
|
|
107
|
+
embedding_object: Optional[List[str]] = None
|
|
108
|
+
distribution: Optional[Distribution] = None
|
|
78
109
|
|
|
79
110
|
|
|
80
111
|
class UserProvidedEmbedder(CamelBase):
|
|
81
112
|
source: str = "userProvided"
|
|
82
113
|
dimensions: int
|
|
114
|
+
distribution: Optional[Distribution] = None
|
|
83
115
|
|
|
84
116
|
|
|
85
117
|
class Embedders(CamelBase):
|
|
86
|
-
embedders: Dict[
|
|
118
|
+
embedders: Dict[
|
|
119
|
+
str,
|
|
120
|
+
Union[
|
|
121
|
+
OpenAiEmbedder, HuggingFaceEmbedder, OllamaEmbedder, RestEmbedder, UserProvidedEmbedder
|
|
122
|
+
],
|
|
123
|
+
]
|
|
87
124
|
|
|
88
125
|
|
|
89
126
|
class ProximityPrecision(str, Enum):
|
|
@@ -106,7 +143,17 @@ class MeilisearchSettings(CamelBase):
|
|
|
106
143
|
proximity_precision: Optional[ProximityPrecision] = None
|
|
107
144
|
separator_tokens: Optional[List[str]] = None
|
|
108
145
|
non_separator_tokens: Optional[List[str]] = None
|
|
146
|
+
search_cutoff_ms: Optional[int] = None
|
|
109
147
|
dictionary: Optional[List[str]] = None
|
|
110
148
|
embedders: Optional[
|
|
111
|
-
Dict[
|
|
149
|
+
Dict[
|
|
150
|
+
str,
|
|
151
|
+
Union[
|
|
152
|
+
OpenAiEmbedder,
|
|
153
|
+
HuggingFaceEmbedder,
|
|
154
|
+
OllamaEmbedder,
|
|
155
|
+
RestEmbedder,
|
|
156
|
+
UserProvidedEmbedder,
|
|
157
|
+
],
|
|
158
|
+
]
|
|
112
159
|
] = None # Optional[Embedders] = None
|
{meilisearch_python_sdk-2.8.0 → meilisearch_python_sdk-2.9.0}/meilisearch_python_sdk/models/task.py
RENAMED
|
@@ -52,6 +52,7 @@ class TaskResult(TaskId):
|
|
|
52
52
|
warn(
|
|
53
53
|
"The use of Pydantic less than version 2 is depreciated and will be removed in a future release",
|
|
54
54
|
DeprecationWarning,
|
|
55
|
+
stacklevel=2,
|
|
55
56
|
)
|
|
56
57
|
|
|
57
58
|
@pydantic.validator("enqueued_at", pre=True)
|
|
@@ -106,6 +107,7 @@ class TaskInfo(CamelBase):
|
|
|
106
107
|
warn(
|
|
107
108
|
"The use of Pydantic less than version 2 is depreciated and will be removed in a future release",
|
|
108
109
|
DeprecationWarning,
|
|
110
|
+
stacklevel=2,
|
|
109
111
|
)
|
|
110
112
|
|
|
111
113
|
@pydantic.validator("enqueued_at", pre=True)
|
{meilisearch_python_sdk-2.8.0 → meilisearch_python_sdk-2.9.0}/meilisearch_python_sdk/plugins.py
RENAMED
|
@@ -1,11 +1,12 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
3
|
from enum import Enum
|
|
4
|
-
from typing import Any, NamedTuple, Protocol, Sequence
|
|
4
|
+
from typing import TYPE_CHECKING, Any, NamedTuple, Protocol, Sequence
|
|
5
5
|
|
|
6
|
-
|
|
7
|
-
from meilisearch_python_sdk.models.
|
|
8
|
-
from meilisearch_python_sdk.
|
|
6
|
+
if TYPE_CHECKING: # pragma: no cover
|
|
7
|
+
from meilisearch_python_sdk.models.search import FacetSearchResults, SearchResults
|
|
8
|
+
from meilisearch_python_sdk.models.task import TaskInfo
|
|
9
|
+
from meilisearch_python_sdk.types import JsonDict, JsonMapping
|
|
9
10
|
|
|
10
11
|
|
|
11
12
|
class AsyncEvent(Enum):
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
[tool.poetry]
|
|
2
2
|
name = "meilisearch-python-sdk"
|
|
3
|
-
version = "2.
|
|
3
|
+
version = "2.9.0"
|
|
4
4
|
description = "A Python client providing both async and sync support for the Meilisearch API"
|
|
5
5
|
authors = ["Paul Sanders <psanders1@gmail.com>"]
|
|
6
6
|
license = "MIT"
|
|
@@ -76,7 +76,7 @@ target-version = "py38"
|
|
|
76
76
|
fix = true
|
|
77
77
|
|
|
78
78
|
[tool.ruff.lint]
|
|
79
|
-
select=["E", "F", "UP", "I001", "T201", "T203"]
|
|
79
|
+
select=["E", "B", "F", "UP", "I001", "T201", "T203"]
|
|
80
80
|
ignore=[
|
|
81
81
|
# Recommened ignores by ruff when using formatter
|
|
82
82
|
"E501",
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
VERSION = "2.8.0"
|
|
File without changes
|
{meilisearch_python_sdk-2.8.0 → meilisearch_python_sdk-2.9.0}/meilisearch_python_sdk/__init__.py
RENAMED
|
File without changes
|
{meilisearch_python_sdk-2.8.0 → meilisearch_python_sdk-2.9.0}/meilisearch_python_sdk/_task.py
RENAMED
|
File without changes
|
{meilisearch_python_sdk-2.8.0 → meilisearch_python_sdk-2.9.0}/meilisearch_python_sdk/_utils.py
RENAMED
|
File without changes
|
{meilisearch_python_sdk-2.8.0 → meilisearch_python_sdk-2.9.0}/meilisearch_python_sdk/errors.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{meilisearch_python_sdk-2.8.0 → meilisearch_python_sdk-2.9.0}/meilisearch_python_sdk/py.typed
RENAMED
|
File without changes
|
{meilisearch_python_sdk-2.8.0 → meilisearch_python_sdk-2.9.0}/meilisearch_python_sdk/types.py
RENAMED
|
File without changes
|