meilisearch-python-sdk 5.5.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- meilisearch_python_sdk/__init__.py +8 -0
- meilisearch_python_sdk/_batch.py +166 -0
- meilisearch_python_sdk/_client.py +2468 -0
- meilisearch_python_sdk/_http_requests.py +197 -0
- meilisearch_python_sdk/_task.py +368 -0
- meilisearch_python_sdk/_utils.py +58 -0
- meilisearch_python_sdk/_version.py +1 -0
- meilisearch_python_sdk/decorators.py +242 -0
- meilisearch_python_sdk/errors.py +75 -0
- meilisearch_python_sdk/index/__init__.py +4 -0
- meilisearch_python_sdk/index/_common.py +296 -0
- meilisearch_python_sdk/index/async_index.py +4891 -0
- meilisearch_python_sdk/index/index.py +3839 -0
- meilisearch_python_sdk/json_handler.py +74 -0
- meilisearch_python_sdk/models/__init__.py +0 -0
- meilisearch_python_sdk/models/batch.py +58 -0
- meilisearch_python_sdk/models/client.py +97 -0
- meilisearch_python_sdk/models/documents.py +12 -0
- meilisearch_python_sdk/models/health.py +5 -0
- meilisearch_python_sdk/models/index.py +46 -0
- meilisearch_python_sdk/models/search.py +126 -0
- meilisearch_python_sdk/models/settings.py +197 -0
- meilisearch_python_sdk/models/task.py +77 -0
- meilisearch_python_sdk/models/version.py +9 -0
- meilisearch_python_sdk/models/webhook.py +24 -0
- meilisearch_python_sdk/plugins.py +124 -0
- meilisearch_python_sdk/py.typed +0 -0
- meilisearch_python_sdk/types.py +8 -0
- meilisearch_python_sdk-5.5.0.dist-info/METADATA +279 -0
- meilisearch_python_sdk-5.5.0.dist-info/RECORD +32 -0
- meilisearch_python_sdk-5.5.0.dist-info/WHEEL +4 -0
- meilisearch_python_sdk-5.5.0.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,197 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import gzip
|
|
4
|
+
from collections.abc import Callable
|
|
5
|
+
from functools import lru_cache
|
|
6
|
+
from typing import Any
|
|
7
|
+
|
|
8
|
+
from httpx import (
|
|
9
|
+
AsyncClient,
|
|
10
|
+
Client,
|
|
11
|
+
ConnectError,
|
|
12
|
+
ConnectTimeout,
|
|
13
|
+
HTTPError,
|
|
14
|
+
RemoteProtocolError,
|
|
15
|
+
Response,
|
|
16
|
+
)
|
|
17
|
+
|
|
18
|
+
from meilisearch_python_sdk._version import VERSION
|
|
19
|
+
from meilisearch_python_sdk.errors import (
|
|
20
|
+
MeilisearchApiError,
|
|
21
|
+
MeilisearchCommunicationError,
|
|
22
|
+
MeilisearchError,
|
|
23
|
+
)
|
|
24
|
+
from meilisearch_python_sdk.json_handler import BuiltinHandler, OrjsonHandler, UjsonHandler
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class AsyncHttpRequests:
|
|
28
|
+
def __init__(
|
|
29
|
+
self, http_client: AsyncClient, json_handler: BuiltinHandler | OrjsonHandler | UjsonHandler
|
|
30
|
+
) -> None:
|
|
31
|
+
self.http_client = http_client
|
|
32
|
+
self.json_handler = json_handler
|
|
33
|
+
|
|
34
|
+
async def _send_request(
|
|
35
|
+
self,
|
|
36
|
+
http_method: Callable,
|
|
37
|
+
path: str,
|
|
38
|
+
body: Any | None = None,
|
|
39
|
+
content_type: str = "application/json",
|
|
40
|
+
compress: bool = False,
|
|
41
|
+
) -> Response:
|
|
42
|
+
headers = build_headers(content_type, compress)
|
|
43
|
+
|
|
44
|
+
try:
|
|
45
|
+
if body is None:
|
|
46
|
+
response = await http_method(path)
|
|
47
|
+
elif content_type == "application/json" and not compress:
|
|
48
|
+
response = await http_method(
|
|
49
|
+
path, content=self.json_handler.dumps(body), headers=headers
|
|
50
|
+
)
|
|
51
|
+
else:
|
|
52
|
+
if body and compress:
|
|
53
|
+
if content_type == "application/json":
|
|
54
|
+
body = gzip.compress(self.json_handler.dumps(body).encode("utf-8"))
|
|
55
|
+
else:
|
|
56
|
+
body = gzip.compress((body).encode("utf-8"))
|
|
57
|
+
response = await http_method(path, content=body, headers=headers)
|
|
58
|
+
|
|
59
|
+
response.raise_for_status()
|
|
60
|
+
return response
|
|
61
|
+
|
|
62
|
+
except (ConnectError, ConnectTimeout, RemoteProtocolError) as err:
|
|
63
|
+
raise MeilisearchCommunicationError(str(err)) from err
|
|
64
|
+
except HTTPError as err:
|
|
65
|
+
if "response" in locals():
|
|
66
|
+
if "application/json" in response.headers.get("content-type", ""):
|
|
67
|
+
raise MeilisearchApiError(str(err), response) from err
|
|
68
|
+
else:
|
|
69
|
+
raise
|
|
70
|
+
else:
|
|
71
|
+
# Fail safe just in case error happens before response is created
|
|
72
|
+
raise MeilisearchError(str(err)) from err # pragma: no cover
|
|
73
|
+
|
|
74
|
+
async def get(self, path: str) -> Response:
|
|
75
|
+
return await self._send_request(self.http_client.get, path)
|
|
76
|
+
|
|
77
|
+
async def patch(
|
|
78
|
+
self,
|
|
79
|
+
path: str,
|
|
80
|
+
body: Any | None = None,
|
|
81
|
+
content_type: str = "application/json",
|
|
82
|
+
compress: bool = False,
|
|
83
|
+
) -> Response:
|
|
84
|
+
return await self._send_request(self.http_client.patch, path, body, content_type, compress)
|
|
85
|
+
|
|
86
|
+
async def post(
|
|
87
|
+
self,
|
|
88
|
+
path: str,
|
|
89
|
+
body: Any | None = None,
|
|
90
|
+
content_type: str = "application/json",
|
|
91
|
+
compress: bool = False,
|
|
92
|
+
) -> Response:
|
|
93
|
+
return await self._send_request(self.http_client.post, path, body, content_type, compress)
|
|
94
|
+
|
|
95
|
+
async def put(
|
|
96
|
+
self,
|
|
97
|
+
path: str,
|
|
98
|
+
body: Any | None = None,
|
|
99
|
+
content_type: str = "application/json",
|
|
100
|
+
compress: bool = False,
|
|
101
|
+
) -> Response:
|
|
102
|
+
return await self._send_request(self.http_client.put, path, body, content_type, compress)
|
|
103
|
+
|
|
104
|
+
async def delete(self, path: str, body: dict | None = None) -> Response:
|
|
105
|
+
return await self._send_request(self.http_client.delete, path, body)
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
class HttpRequests:
|
|
109
|
+
def __init__(
|
|
110
|
+
self, http_client: Client, json_handler: BuiltinHandler | OrjsonHandler | UjsonHandler
|
|
111
|
+
) -> None:
|
|
112
|
+
self.http_client = http_client
|
|
113
|
+
self.json_handler = json_handler
|
|
114
|
+
|
|
115
|
+
def _send_request(
|
|
116
|
+
self,
|
|
117
|
+
http_method: Callable,
|
|
118
|
+
path: str,
|
|
119
|
+
body: Any | None = None,
|
|
120
|
+
content_type: str = "application/json",
|
|
121
|
+
compress: bool = False,
|
|
122
|
+
) -> Response:
|
|
123
|
+
headers = build_headers(content_type, compress)
|
|
124
|
+
try:
|
|
125
|
+
if body is None:
|
|
126
|
+
response = http_method(path)
|
|
127
|
+
elif content_type == "application/json" and not compress:
|
|
128
|
+
response = http_method(path, content=self.json_handler.dumps(body), headers=headers)
|
|
129
|
+
else:
|
|
130
|
+
if body and compress:
|
|
131
|
+
if content_type == "application/json":
|
|
132
|
+
body = gzip.compress(self.json_handler.dumps(body).encode("utf-8"))
|
|
133
|
+
else:
|
|
134
|
+
body = gzip.compress((body).encode("utf-8"))
|
|
135
|
+
response = http_method(path, content=body, headers=headers)
|
|
136
|
+
|
|
137
|
+
response.raise_for_status()
|
|
138
|
+
return response
|
|
139
|
+
|
|
140
|
+
except (ConnectError, ConnectTimeout, RemoteProtocolError) as err:
|
|
141
|
+
raise MeilisearchCommunicationError(str(err)) from err
|
|
142
|
+
except HTTPError as err:
|
|
143
|
+
if "response" in locals():
|
|
144
|
+
if "application/json" in response.headers.get("content-type", ""):
|
|
145
|
+
raise MeilisearchApiError(str(err), response) from err
|
|
146
|
+
else:
|
|
147
|
+
raise
|
|
148
|
+
else:
|
|
149
|
+
# Fail safe just in case error happens before response is created
|
|
150
|
+
raise MeilisearchError(str(err)) from err # pragma: no cover
|
|
151
|
+
|
|
152
|
+
def get(self, path: str) -> Response:
|
|
153
|
+
return self._send_request(self.http_client.get, path)
|
|
154
|
+
|
|
155
|
+
def patch(
|
|
156
|
+
self,
|
|
157
|
+
path: str,
|
|
158
|
+
body: Any | None = None,
|
|
159
|
+
content_type: str = "application/json",
|
|
160
|
+
compress: bool = False,
|
|
161
|
+
) -> Response:
|
|
162
|
+
return self._send_request(self.http_client.patch, path, body, content_type, compress)
|
|
163
|
+
|
|
164
|
+
def post(
|
|
165
|
+
self,
|
|
166
|
+
path: str,
|
|
167
|
+
body: Any | None = None,
|
|
168
|
+
content_type: str = "application/json",
|
|
169
|
+
compress: bool = False,
|
|
170
|
+
) -> Response:
|
|
171
|
+
return self._send_request(self.http_client.post, path, body, content_type, compress)
|
|
172
|
+
|
|
173
|
+
def put(
|
|
174
|
+
self,
|
|
175
|
+
path: str,
|
|
176
|
+
body: Any | None = None,
|
|
177
|
+
content_type: str = "application/json",
|
|
178
|
+
compress: bool = False,
|
|
179
|
+
) -> Response:
|
|
180
|
+
return self._send_request(self.http_client.put, path, body, content_type, compress)
|
|
181
|
+
|
|
182
|
+
def delete(self, path: str, body: dict | None = None) -> Response:
|
|
183
|
+
return self._send_request(self.http_client.delete, path, body)
|
|
184
|
+
|
|
185
|
+
|
|
186
|
+
def build_headers(content_type: str, compress: bool) -> dict[str, str]:
|
|
187
|
+
headers = {"user-agent": user_agent(), "Content-Type": content_type}
|
|
188
|
+
|
|
189
|
+
if compress:
|
|
190
|
+
headers["Content-Encoding"] = "gzip"
|
|
191
|
+
|
|
192
|
+
return headers
|
|
193
|
+
|
|
194
|
+
|
|
195
|
+
@lru_cache(maxsize=1)
|
|
196
|
+
def user_agent() -> str:
|
|
197
|
+
return f"Meilisearch Python SDK (v{VERSION})"
|
|
@@ -0,0 +1,368 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import asyncio
|
|
4
|
+
import time
|
|
5
|
+
from datetime import datetime
|
|
6
|
+
from typing import TYPE_CHECKING
|
|
7
|
+
from urllib.parse import urlencode
|
|
8
|
+
|
|
9
|
+
from httpx import AsyncClient as HttpxAsyncClient
|
|
10
|
+
from httpx import Client as HttpxClient
|
|
11
|
+
|
|
12
|
+
from meilisearch_python_sdk._http_requests import AsyncHttpRequests, HttpRequests
|
|
13
|
+
from meilisearch_python_sdk._utils import get_async_client, get_client
|
|
14
|
+
from meilisearch_python_sdk.errors import MeilisearchTaskFailedError, MeilisearchTimeoutError
|
|
15
|
+
from meilisearch_python_sdk.json_handler import BuiltinHandler, OrjsonHandler, UjsonHandler
|
|
16
|
+
from meilisearch_python_sdk.models.task import TaskInfo, TaskResult, TaskStatus
|
|
17
|
+
|
|
18
|
+
if TYPE_CHECKING:
|
|
19
|
+
from meilisearch_python_sdk._client import AsyncClient, Client # pragma: no cover
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
async def async_cancel_tasks(
|
|
23
|
+
client: HttpxAsyncClient | AsyncClient,
|
|
24
|
+
*,
|
|
25
|
+
uids: list[int] | None = None,
|
|
26
|
+
index_uids: list[int] | None = None,
|
|
27
|
+
statuses: list[str] | None = None,
|
|
28
|
+
types: list[str] | None = None,
|
|
29
|
+
before_enqueued_at: datetime | None = None,
|
|
30
|
+
after_enqueued_at: datetime | None = None,
|
|
31
|
+
before_started_at: datetime | None = None,
|
|
32
|
+
after_finished_at: datetime | None = None,
|
|
33
|
+
) -> TaskInfo:
|
|
34
|
+
"""Cancel a list of enqueued or processing tasks.
|
|
35
|
+
|
|
36
|
+
Defaults to cancelling all tasks.
|
|
37
|
+
|
|
38
|
+
Args:
|
|
39
|
+
client: An httpx HttpxAsyncClient or meilisearch_python_sdk AsyncClient instance.
|
|
40
|
+
uids: A list of task UIDs to cancel.
|
|
41
|
+
index_uids: A list of index UIDs for which to cancel tasks.
|
|
42
|
+
statuses: A list of statuses to cancel.
|
|
43
|
+
types: A list of types to cancel.
|
|
44
|
+
before_enqueued_at: Cancel tasks that were enqueued before the specified date time.
|
|
45
|
+
after_enqueued_at: Cancel tasks that were enqueued after the specified date time.
|
|
46
|
+
before_started_at: Cancel tasks that were started before the specified date time.
|
|
47
|
+
after_finished_at: Cancel tasks that were finished after the specified date time.
|
|
48
|
+
|
|
49
|
+
Returns:
|
|
50
|
+
The details of the task
|
|
51
|
+
|
|
52
|
+
Raises:
|
|
53
|
+
MeilisearchCommunicationError: If there was an error communicating with the server.
|
|
54
|
+
MeilisearchApiError: If the Meilisearch API returned an error.
|
|
55
|
+
MeilisearchTimeoutError: If the connection times out.
|
|
56
|
+
|
|
57
|
+
Examples
|
|
58
|
+
>>> from meilisearch_python_sdk import AsyncClient
|
|
59
|
+
>>> from meilisearch_python_sdk.task import cancel_tasks
|
|
60
|
+
>>>
|
|
61
|
+
>>> async with AsyncClient("http://localhost.com", "masterKey") as client:
|
|
62
|
+
>>> await cancel_tasks(client, uids=[1, 2])
|
|
63
|
+
"""
|
|
64
|
+
parameters = _process_params(
|
|
65
|
+
uids,
|
|
66
|
+
index_uids,
|
|
67
|
+
statuses,
|
|
68
|
+
types,
|
|
69
|
+
before_enqueued_at,
|
|
70
|
+
after_enqueued_at,
|
|
71
|
+
before_started_at,
|
|
72
|
+
after_finished_at,
|
|
73
|
+
)
|
|
74
|
+
|
|
75
|
+
if not parameters:
|
|
76
|
+
# Cancel all tasks if no parameters provided
|
|
77
|
+
parameters["statuses"] = "enqueued,processing"
|
|
78
|
+
|
|
79
|
+
url = f"tasks/cancel?{urlencode(parameters)}"
|
|
80
|
+
client_ = get_async_client(client)
|
|
81
|
+
response = await client_.post(url)
|
|
82
|
+
|
|
83
|
+
return TaskInfo(**response.json())
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
async def async_delete_tasks(
|
|
87
|
+
client: HttpxAsyncClient | AsyncClient,
|
|
88
|
+
*,
|
|
89
|
+
uids: list[int] | None = None,
|
|
90
|
+
index_uids: list[int] | None = None,
|
|
91
|
+
statuses: list[str] | None = None,
|
|
92
|
+
types: list[str] | None = None,
|
|
93
|
+
before_enqueued_at: datetime | None = None,
|
|
94
|
+
after_enqueued_at: datetime | None = None,
|
|
95
|
+
before_started_at: datetime | None = None,
|
|
96
|
+
after_finished_at: datetime | None = None,
|
|
97
|
+
) -> TaskInfo:
|
|
98
|
+
parameters = _process_params(
|
|
99
|
+
uids,
|
|
100
|
+
index_uids,
|
|
101
|
+
statuses,
|
|
102
|
+
types,
|
|
103
|
+
before_enqueued_at,
|
|
104
|
+
after_enqueued_at,
|
|
105
|
+
before_started_at,
|
|
106
|
+
after_finished_at,
|
|
107
|
+
)
|
|
108
|
+
|
|
109
|
+
if not parameters:
|
|
110
|
+
# delete all tasks if no parameters provided
|
|
111
|
+
parameters["statuses"] = "canceled,enqueued,failed,processing,succeeded"
|
|
112
|
+
|
|
113
|
+
url = f"tasks?{urlencode(parameters)}"
|
|
114
|
+
client_ = get_async_client(client)
|
|
115
|
+
response = await client_.delete(url)
|
|
116
|
+
|
|
117
|
+
return TaskInfo(**response.json())
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
async def async_get_task(
|
|
121
|
+
client: HttpxAsyncClient | AsyncClient,
|
|
122
|
+
task_id: int,
|
|
123
|
+
) -> TaskResult:
|
|
124
|
+
client_ = get_async_client(client)
|
|
125
|
+
response = await client_.get(f"tasks/{task_id}")
|
|
126
|
+
|
|
127
|
+
return TaskResult(**response.json())
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
async def async_get_tasks(
|
|
131
|
+
client: HttpxAsyncClient | AsyncClient,
|
|
132
|
+
*,
|
|
133
|
+
index_ids: list[str] | None = None,
|
|
134
|
+
types: str | list[str] | None = None,
|
|
135
|
+
reverse: bool | None = None,
|
|
136
|
+
) -> TaskStatus:
|
|
137
|
+
url = f"tasks?indexUids={','.join(index_ids)}" if index_ids else "tasks"
|
|
138
|
+
if types:
|
|
139
|
+
formatted_types = ",".join(types) if isinstance(types, list) else types
|
|
140
|
+
url = f"{url}&types={formatted_types}" if "?" in url else f"{url}?types={formatted_types}"
|
|
141
|
+
if reverse:
|
|
142
|
+
url = (
|
|
143
|
+
f"{url}&reverse={str(reverse).lower()}"
|
|
144
|
+
if "?" in url
|
|
145
|
+
else f"{url}?reverse={str(reverse).lower()}"
|
|
146
|
+
)
|
|
147
|
+
client_ = get_async_client(client)
|
|
148
|
+
response = await client_.get(url)
|
|
149
|
+
|
|
150
|
+
return TaskStatus(**response.json())
|
|
151
|
+
|
|
152
|
+
|
|
153
|
+
async def async_wait_for_task(
|
|
154
|
+
client: HttpxAsyncClient | AsyncClient,
|
|
155
|
+
task_id: int,
|
|
156
|
+
*,
|
|
157
|
+
timeout_in_ms: int | None = 5000,
|
|
158
|
+
interval_in_ms: int = 50,
|
|
159
|
+
raise_for_status: bool = False,
|
|
160
|
+
) -> TaskResult:
|
|
161
|
+
client_ = get_async_client(client)
|
|
162
|
+
handler = _get_json_handler(client)
|
|
163
|
+
url = f"tasks/{task_id}"
|
|
164
|
+
http_requests = AsyncHttpRequests(client_, handler)
|
|
165
|
+
start_time = datetime.now()
|
|
166
|
+
elapsed_time = 0.0
|
|
167
|
+
|
|
168
|
+
if timeout_in_ms:
|
|
169
|
+
while elapsed_time < timeout_in_ms:
|
|
170
|
+
response = await http_requests.get(url)
|
|
171
|
+
status = TaskResult(**response.json())
|
|
172
|
+
if status.status in ("succeeded", "failed"):
|
|
173
|
+
if raise_for_status and status.status == "failed":
|
|
174
|
+
raise MeilisearchTaskFailedError(f"Task {task_id} failed")
|
|
175
|
+
return status
|
|
176
|
+
await asyncio.sleep(interval_in_ms / 1000)
|
|
177
|
+
time_delta = datetime.now() - start_time
|
|
178
|
+
elapsed_time = time_delta.seconds * 1000 + time_delta.microseconds / 1000
|
|
179
|
+
raise MeilisearchTimeoutError(
|
|
180
|
+
f"timeout of {timeout_in_ms}ms has exceeded on process {task_id} when waiting for pending update to resolve."
|
|
181
|
+
)
|
|
182
|
+
else:
|
|
183
|
+
while True:
|
|
184
|
+
response = await http_requests.get(url)
|
|
185
|
+
status = TaskResult(**response.json())
|
|
186
|
+
if status.status in ("succeeded", "failed"):
|
|
187
|
+
if raise_for_status and status.status == "failed":
|
|
188
|
+
raise MeilisearchTaskFailedError(f"Task {task_id} failed")
|
|
189
|
+
return status
|
|
190
|
+
await asyncio.sleep(interval_in_ms / 1000)
|
|
191
|
+
|
|
192
|
+
|
|
193
|
+
def cancel_tasks(
|
|
194
|
+
client: HttpxClient | Client,
|
|
195
|
+
*,
|
|
196
|
+
uids: list[int] | None = None,
|
|
197
|
+
index_uids: list[int] | None = None,
|
|
198
|
+
statuses: list[str] | None = None,
|
|
199
|
+
types: list[str] | None = None,
|
|
200
|
+
before_enqueued_at: datetime | None = None,
|
|
201
|
+
after_enqueued_at: datetime | None = None,
|
|
202
|
+
before_started_at: datetime | None = None,
|
|
203
|
+
after_finished_at: datetime | None = None,
|
|
204
|
+
) -> TaskInfo:
|
|
205
|
+
parameters = _process_params(
|
|
206
|
+
uids,
|
|
207
|
+
index_uids,
|
|
208
|
+
statuses,
|
|
209
|
+
types,
|
|
210
|
+
before_enqueued_at,
|
|
211
|
+
after_enqueued_at,
|
|
212
|
+
before_started_at,
|
|
213
|
+
after_finished_at,
|
|
214
|
+
)
|
|
215
|
+
|
|
216
|
+
if not parameters:
|
|
217
|
+
# Cancel all tasks if no parameters provided
|
|
218
|
+
parameters["statuses"] = "enqueued,processing"
|
|
219
|
+
|
|
220
|
+
url = f"tasks/cancel?{urlencode(parameters)}"
|
|
221
|
+
client_ = get_client(client)
|
|
222
|
+
response = client_.post(url)
|
|
223
|
+
|
|
224
|
+
return TaskInfo(**response.json())
|
|
225
|
+
|
|
226
|
+
|
|
227
|
+
def delete_tasks(
|
|
228
|
+
client: HttpxClient | Client,
|
|
229
|
+
*,
|
|
230
|
+
uids: list[int] | None = None,
|
|
231
|
+
index_uids: list[int] | None = None,
|
|
232
|
+
statuses: list[str] | None = None,
|
|
233
|
+
types: list[str] | None = None,
|
|
234
|
+
before_enqueued_at: datetime | None = None,
|
|
235
|
+
after_enqueued_at: datetime | None = None,
|
|
236
|
+
before_started_at: datetime | None = None,
|
|
237
|
+
after_finished_at: datetime | None = None,
|
|
238
|
+
) -> TaskInfo:
|
|
239
|
+
parameters = _process_params(
|
|
240
|
+
uids,
|
|
241
|
+
index_uids,
|
|
242
|
+
statuses,
|
|
243
|
+
types,
|
|
244
|
+
before_enqueued_at,
|
|
245
|
+
after_enqueued_at,
|
|
246
|
+
before_started_at,
|
|
247
|
+
after_finished_at,
|
|
248
|
+
)
|
|
249
|
+
|
|
250
|
+
if not parameters:
|
|
251
|
+
# delete all tasks if no parameters provided
|
|
252
|
+
parameters["statuses"] = "canceled,enqueued,failed,processing,succeeded"
|
|
253
|
+
|
|
254
|
+
url = f"tasks?{urlencode(parameters)}"
|
|
255
|
+
client_ = get_client(client)
|
|
256
|
+
response = client_.delete(url)
|
|
257
|
+
|
|
258
|
+
return TaskInfo(**response.json())
|
|
259
|
+
|
|
260
|
+
|
|
261
|
+
def get_task(client: HttpxClient | Client, task_id: int) -> TaskResult:
|
|
262
|
+
client_ = get_client(client)
|
|
263
|
+
response = client_.get(f"tasks/{task_id}")
|
|
264
|
+
|
|
265
|
+
return TaskResult(**response.json())
|
|
266
|
+
|
|
267
|
+
|
|
268
|
+
def get_tasks(
|
|
269
|
+
client: HttpxClient | Client,
|
|
270
|
+
*,
|
|
271
|
+
index_ids: list[str] | None = None,
|
|
272
|
+
types: str | list[str] | None = None,
|
|
273
|
+
reverse: bool | None = None,
|
|
274
|
+
) -> TaskStatus:
|
|
275
|
+
url = f"tasks?indexUids={','.join(index_ids)}" if index_ids else "tasks"
|
|
276
|
+
if types:
|
|
277
|
+
formatted_types = ",".join(types) if isinstance(types, list) else types
|
|
278
|
+
url = f"{url}&types={formatted_types}" if "?" in url else f"{url}?types={formatted_types}"
|
|
279
|
+
if reverse:
|
|
280
|
+
url = (
|
|
281
|
+
f"{url}&reverse={str(reverse).lower()}"
|
|
282
|
+
if "?" in url
|
|
283
|
+
else f"{url}?reverse={str(reverse).lower()}"
|
|
284
|
+
)
|
|
285
|
+
client_ = get_client(client)
|
|
286
|
+
response = client_.get(url)
|
|
287
|
+
|
|
288
|
+
return TaskStatus(**response.json())
|
|
289
|
+
|
|
290
|
+
|
|
291
|
+
def wait_for_task(
|
|
292
|
+
client: HttpxClient | Client,
|
|
293
|
+
task_id: int,
|
|
294
|
+
*,
|
|
295
|
+
timeout_in_ms: int | None = 5000,
|
|
296
|
+
interval_in_ms: int = 50,
|
|
297
|
+
raise_for_status: bool = False,
|
|
298
|
+
) -> TaskResult:
|
|
299
|
+
client_ = get_client(client)
|
|
300
|
+
handler = _get_json_handler(client)
|
|
301
|
+
url = f"tasks/{task_id}"
|
|
302
|
+
http_requests = HttpRequests(client_, json_handler=handler)
|
|
303
|
+
start_time = datetime.now()
|
|
304
|
+
elapsed_time = 0.0
|
|
305
|
+
|
|
306
|
+
if timeout_in_ms:
|
|
307
|
+
while elapsed_time < timeout_in_ms:
|
|
308
|
+
response = http_requests.get(url)
|
|
309
|
+
status = TaskResult(**response.json())
|
|
310
|
+
if status.status in ("succeeded", "failed"):
|
|
311
|
+
if raise_for_status and status.status == "failed":
|
|
312
|
+
raise MeilisearchTaskFailedError(f"Task {task_id} failed")
|
|
313
|
+
return status
|
|
314
|
+
time.sleep(interval_in_ms / 1000)
|
|
315
|
+
time_delta = datetime.now() - start_time
|
|
316
|
+
elapsed_time = time_delta.seconds * 1000 + time_delta.microseconds / 1000
|
|
317
|
+
raise MeilisearchTimeoutError(
|
|
318
|
+
f"timeout of {timeout_in_ms}ms has exceeded on process {task_id} when waiting for pending update to resolve."
|
|
319
|
+
)
|
|
320
|
+
else:
|
|
321
|
+
while True:
|
|
322
|
+
response = http_requests.get(url)
|
|
323
|
+
status = TaskResult(**response.json())
|
|
324
|
+
if status.status in ("succeeded", "failed"):
|
|
325
|
+
if raise_for_status and status.status == "failed":
|
|
326
|
+
raise MeilisearchTaskFailedError(f"Task {task_id} failed")
|
|
327
|
+
return status
|
|
328
|
+
time.sleep(interval_in_ms / 1000)
|
|
329
|
+
|
|
330
|
+
|
|
331
|
+
def _get_json_handler(
|
|
332
|
+
client: AsyncClient | Client | HttpxAsyncClient | HttpxClient,
|
|
333
|
+
) -> BuiltinHandler | OrjsonHandler | UjsonHandler:
|
|
334
|
+
if isinstance(client, (HttpxAsyncClient, HttpxClient)):
|
|
335
|
+
return BuiltinHandler()
|
|
336
|
+
|
|
337
|
+
return client.json_handler
|
|
338
|
+
|
|
339
|
+
|
|
340
|
+
def _process_params(
|
|
341
|
+
uids: list[int] | None = None,
|
|
342
|
+
index_uids: list[int] | None = None,
|
|
343
|
+
statuses: list[str] | None = None,
|
|
344
|
+
types: list[str] | None = None,
|
|
345
|
+
before_enqueued_at: datetime | None = None,
|
|
346
|
+
after_enqueued_at: datetime | None = None,
|
|
347
|
+
before_started_at: datetime | None = None,
|
|
348
|
+
after_finished_at: datetime | None = None,
|
|
349
|
+
) -> dict[str, str]:
|
|
350
|
+
parameters = {}
|
|
351
|
+
if uids:
|
|
352
|
+
parameters["uids"] = ",".join([str(x) for x in uids])
|
|
353
|
+
if index_uids:
|
|
354
|
+
parameters["indexUids"] = ",".join([str(x) for x in index_uids])
|
|
355
|
+
if statuses:
|
|
356
|
+
parameters["statuses"] = ",".join(statuses)
|
|
357
|
+
if types:
|
|
358
|
+
parameters["types"] = ",".join(types)
|
|
359
|
+
if before_enqueued_at:
|
|
360
|
+
parameters["beforeEnqueuedAt"] = f"{before_enqueued_at.isoformat()}Z"
|
|
361
|
+
if after_enqueued_at:
|
|
362
|
+
parameters["afterEnqueuedAt"] = f"{after_enqueued_at.isoformat()}Z"
|
|
363
|
+
if before_started_at:
|
|
364
|
+
parameters["beforeStartedAt"] = f"{before_started_at.isoformat()}Z"
|
|
365
|
+
if after_finished_at:
|
|
366
|
+
parameters["afterFinishedAt"] = f"{after_finished_at.isoformat()}Z"
|
|
367
|
+
|
|
368
|
+
return parameters
|
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import sys
|
|
4
|
+
from datetime import datetime
|
|
5
|
+
from functools import lru_cache
|
|
6
|
+
from typing import TYPE_CHECKING
|
|
7
|
+
|
|
8
|
+
from httpx import AsyncClient as HttpxAsyncClient
|
|
9
|
+
from httpx import Client as HttpxClient
|
|
10
|
+
|
|
11
|
+
if TYPE_CHECKING:
|
|
12
|
+
from meilisearch_python_sdk._client import AsyncClient, Client # pragma: no cover
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def get_async_client(
|
|
16
|
+
client: AsyncClient | HttpxAsyncClient,
|
|
17
|
+
) -> HttpxAsyncClient:
|
|
18
|
+
if isinstance(client, HttpxAsyncClient):
|
|
19
|
+
return client
|
|
20
|
+
|
|
21
|
+
return client.http_client
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def get_client(
|
|
25
|
+
client: Client | HttpxClient,
|
|
26
|
+
) -> HttpxClient:
|
|
27
|
+
if isinstance(client, HttpxClient):
|
|
28
|
+
return client
|
|
29
|
+
|
|
30
|
+
return client.http_client
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def iso_to_date_time(iso_date: datetime | str | None) -> datetime | None:
|
|
34
|
+
"""Handle conversion of iso string to datetime.
|
|
35
|
+
|
|
36
|
+
The microseconds from Meilisearch are sometimes too long for python to convert so this
|
|
37
|
+
strips off the last digits to shorten it when that happens.
|
|
38
|
+
"""
|
|
39
|
+
if not iso_date:
|
|
40
|
+
return None
|
|
41
|
+
|
|
42
|
+
if isinstance(iso_date, datetime):
|
|
43
|
+
return iso_date
|
|
44
|
+
|
|
45
|
+
try:
|
|
46
|
+
return datetime.strptime(iso_date, "%Y-%m-%dT%H:%M:%S.%fZ")
|
|
47
|
+
except ValueError:
|
|
48
|
+
split = iso_date.split(".")
|
|
49
|
+
if len(split) < 2:
|
|
50
|
+
raise
|
|
51
|
+
reduce = len(split[1]) - 6
|
|
52
|
+
reduced = f"{split[0]}.{split[1][:-reduce]}Z"
|
|
53
|
+
return datetime.strptime(reduced, "%Y-%m-%dT%H:%M:%S.%fZ")
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
@lru_cache(maxsize=1)
|
|
57
|
+
def use_task_groups() -> bool:
|
|
58
|
+
return True if sys.version_info >= (3, 11) else False
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
VERSION = "5.5.0"
|