elasticsearch 8.17.2__py3-none-any.whl → 8.18.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- elasticsearch/_async/client/__init__.py +174 -79
- elasticsearch/_async/client/_base.py +0 -1
- elasticsearch/_async/client/async_search.py +12 -8
- elasticsearch/_async/client/autoscaling.py +4 -4
- elasticsearch/_async/client/cat.py +26 -26
- elasticsearch/_async/client/ccr.py +186 -72
- elasticsearch/_async/client/cluster.py +38 -19
- elasticsearch/_async/client/connector.py +30 -30
- elasticsearch/_async/client/dangling_indices.py +3 -3
- elasticsearch/_async/client/enrich.py +26 -5
- elasticsearch/_async/client/eql.py +32 -4
- elasticsearch/_async/client/esql.py +62 -6
- elasticsearch/_async/client/features.py +12 -2
- elasticsearch/_async/client/fleet.py +8 -2
- elasticsearch/_async/client/graph.py +1 -1
- elasticsearch/_async/client/ilm.py +23 -22
- elasticsearch/_async/client/indices.py +424 -132
- elasticsearch/_async/client/inference.py +1853 -115
- elasticsearch/_async/client/ingest.py +32 -38
- elasticsearch/_async/client/license.py +51 -16
- elasticsearch/_async/client/logstash.py +3 -3
- elasticsearch/_async/client/migration.py +3 -3
- elasticsearch/_async/client/ml.py +141 -112
- elasticsearch/_async/client/monitoring.py +1 -1
- elasticsearch/_async/client/nodes.py +9 -27
- elasticsearch/_async/client/query_rules.py +8 -8
- elasticsearch/_async/client/rollup.py +8 -8
- elasticsearch/_async/client/search_application.py +13 -13
- elasticsearch/_async/client/searchable_snapshots.py +4 -4
- elasticsearch/_async/client/security.py +71 -71
- elasticsearch/_async/client/shutdown.py +3 -10
- elasticsearch/_async/client/simulate.py +6 -6
- elasticsearch/_async/client/slm.py +9 -9
- elasticsearch/_async/client/snapshot.py +13 -17
- elasticsearch/_async/client/sql.py +6 -6
- elasticsearch/_async/client/ssl.py +1 -1
- elasticsearch/_async/client/synonyms.py +7 -7
- elasticsearch/_async/client/tasks.py +3 -9
- elasticsearch/_async/client/text_structure.py +4 -4
- elasticsearch/_async/client/transform.py +30 -28
- elasticsearch/_async/client/watcher.py +22 -14
- elasticsearch/_async/client/xpack.py +2 -2
- elasticsearch/_async/helpers.py +0 -1
- elasticsearch/_sync/client/__init__.py +174 -79
- elasticsearch/_sync/client/_base.py +0 -1
- elasticsearch/_sync/client/async_search.py +12 -8
- elasticsearch/_sync/client/autoscaling.py +4 -4
- elasticsearch/_sync/client/cat.py +26 -26
- elasticsearch/_sync/client/ccr.py +186 -72
- elasticsearch/_sync/client/cluster.py +38 -19
- elasticsearch/_sync/client/connector.py +30 -30
- elasticsearch/_sync/client/dangling_indices.py +3 -3
- elasticsearch/_sync/client/enrich.py +26 -5
- elasticsearch/_sync/client/eql.py +32 -4
- elasticsearch/_sync/client/esql.py +62 -6
- elasticsearch/_sync/client/features.py +12 -2
- elasticsearch/_sync/client/fleet.py +8 -2
- elasticsearch/_sync/client/graph.py +1 -1
- elasticsearch/_sync/client/ilm.py +23 -22
- elasticsearch/_sync/client/indices.py +424 -132
- elasticsearch/_sync/client/inference.py +1853 -115
- elasticsearch/_sync/client/ingest.py +32 -38
- elasticsearch/_sync/client/license.py +51 -16
- elasticsearch/_sync/client/logstash.py +3 -3
- elasticsearch/_sync/client/migration.py +3 -3
- elasticsearch/_sync/client/ml.py +141 -112
- elasticsearch/_sync/client/monitoring.py +1 -1
- elasticsearch/_sync/client/nodes.py +9 -27
- elasticsearch/_sync/client/query_rules.py +8 -8
- elasticsearch/_sync/client/rollup.py +8 -8
- elasticsearch/_sync/client/search_application.py +13 -13
- elasticsearch/_sync/client/searchable_snapshots.py +4 -4
- elasticsearch/_sync/client/security.py +71 -71
- elasticsearch/_sync/client/shutdown.py +3 -10
- elasticsearch/_sync/client/simulate.py +6 -6
- elasticsearch/_sync/client/slm.py +9 -9
- elasticsearch/_sync/client/snapshot.py +13 -17
- elasticsearch/_sync/client/sql.py +6 -6
- elasticsearch/_sync/client/ssl.py +1 -1
- elasticsearch/_sync/client/synonyms.py +7 -7
- elasticsearch/_sync/client/tasks.py +3 -9
- elasticsearch/_sync/client/text_structure.py +4 -4
- elasticsearch/_sync/client/transform.py +30 -28
- elasticsearch/_sync/client/utils.py +0 -3
- elasticsearch/_sync/client/watcher.py +22 -14
- elasticsearch/_sync/client/xpack.py +2 -2
- elasticsearch/_version.py +1 -1
- elasticsearch/dsl/__init__.py +203 -0
- elasticsearch/dsl/_async/__init__.py +16 -0
- elasticsearch/dsl/_async/document.py +522 -0
- elasticsearch/dsl/_async/faceted_search.py +50 -0
- elasticsearch/dsl/_async/index.py +639 -0
- elasticsearch/dsl/_async/mapping.py +49 -0
- elasticsearch/dsl/_async/search.py +233 -0
- elasticsearch/dsl/_async/update_by_query.py +47 -0
- elasticsearch/dsl/_sync/__init__.py +16 -0
- elasticsearch/dsl/_sync/document.py +514 -0
- elasticsearch/dsl/_sync/faceted_search.py +50 -0
- elasticsearch/dsl/_sync/index.py +597 -0
- elasticsearch/dsl/_sync/mapping.py +49 -0
- elasticsearch/dsl/_sync/search.py +226 -0
- elasticsearch/dsl/_sync/update_by_query.py +45 -0
- elasticsearch/dsl/aggs.py +3730 -0
- elasticsearch/dsl/analysis.py +341 -0
- elasticsearch/dsl/async_connections.py +37 -0
- elasticsearch/dsl/connections.py +142 -0
- elasticsearch/dsl/document.py +20 -0
- elasticsearch/dsl/document_base.py +444 -0
- elasticsearch/dsl/exceptions.py +32 -0
- elasticsearch/dsl/faceted_search.py +28 -0
- elasticsearch/dsl/faceted_search_base.py +489 -0
- elasticsearch/dsl/field.py +4254 -0
- elasticsearch/dsl/function.py +180 -0
- elasticsearch/dsl/index.py +23 -0
- elasticsearch/dsl/index_base.py +178 -0
- elasticsearch/dsl/mapping.py +19 -0
- elasticsearch/dsl/mapping_base.py +219 -0
- elasticsearch/dsl/query.py +2816 -0
- elasticsearch/dsl/response/__init__.py +388 -0
- elasticsearch/dsl/response/aggs.py +100 -0
- elasticsearch/dsl/response/hit.py +53 -0
- elasticsearch/dsl/search.py +20 -0
- elasticsearch/dsl/search_base.py +1040 -0
- elasticsearch/dsl/serializer.py +34 -0
- elasticsearch/dsl/types.py +6471 -0
- elasticsearch/dsl/update_by_query.py +19 -0
- elasticsearch/dsl/update_by_query_base.py +149 -0
- elasticsearch/dsl/utils.py +687 -0
- elasticsearch/dsl/wrappers.py +119 -0
- {elasticsearch-8.17.2.dist-info → elasticsearch-8.18.0.dist-info}/METADATA +12 -2
- elasticsearch-8.18.0.dist-info/RECORD +161 -0
- elasticsearch-8.17.2.dist-info/RECORD +0 -119
- {elasticsearch-8.17.2.dist-info → elasticsearch-8.18.0.dist-info}/WHEEL +0 -0
- {elasticsearch-8.17.2.dist-info → elasticsearch-8.18.0.dist-info}/licenses/LICENSE +0 -0
- {elasticsearch-8.17.2.dist-info → elasticsearch-8.18.0.dist-info}/licenses/NOTICE +0 -0
|
@@ -0,0 +1,522 @@
|
|
|
1
|
+
# Licensed to Elasticsearch B.V. under one or more contributor
|
|
2
|
+
# license agreements. See the NOTICE file distributed with
|
|
3
|
+
# this work for additional information regarding copyright
|
|
4
|
+
# ownership. Elasticsearch B.V. licenses this file to you under
|
|
5
|
+
# the Apache License, Version 2.0 (the "License"); you may
|
|
6
|
+
# not use this file except in compliance with the License.
|
|
7
|
+
# You may obtain a copy of the License at
|
|
8
|
+
#
|
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
10
|
+
#
|
|
11
|
+
# Unless required by applicable law or agreed to in writing,
|
|
12
|
+
# software distributed under the License is distributed on an
|
|
13
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
14
|
+
# KIND, either express or implied. See the License for the
|
|
15
|
+
# specific language governing permissions and limitations
|
|
16
|
+
# under the License.
|
|
17
|
+
|
|
18
|
+
import collections.abc
|
|
19
|
+
from typing import (
|
|
20
|
+
TYPE_CHECKING,
|
|
21
|
+
Any,
|
|
22
|
+
AsyncIterable,
|
|
23
|
+
Dict,
|
|
24
|
+
List,
|
|
25
|
+
Optional,
|
|
26
|
+
Tuple,
|
|
27
|
+
Union,
|
|
28
|
+
cast,
|
|
29
|
+
)
|
|
30
|
+
|
|
31
|
+
from typing_extensions import Self, dataclass_transform
|
|
32
|
+
|
|
33
|
+
from elasticsearch.exceptions import NotFoundError, RequestError
|
|
34
|
+
from elasticsearch.helpers import async_bulk
|
|
35
|
+
|
|
36
|
+
from .._async.index import AsyncIndex
|
|
37
|
+
from ..async_connections import get_connection
|
|
38
|
+
from ..document_base import DocumentBase, DocumentMeta, mapped_field
|
|
39
|
+
from ..exceptions import IllegalOperation
|
|
40
|
+
from ..utils import DOC_META_FIELDS, META_FIELDS, AsyncUsingType, merge
|
|
41
|
+
from .search import AsyncSearch
|
|
42
|
+
|
|
43
|
+
if TYPE_CHECKING:
|
|
44
|
+
from elasticsearch import AsyncElasticsearch
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
class AsyncIndexMeta(DocumentMeta):
|
|
48
|
+
_index: AsyncIndex
|
|
49
|
+
|
|
50
|
+
# global flag to guard us from associating an Index with the base Document
|
|
51
|
+
# class, only user defined subclasses should have an _index attr
|
|
52
|
+
_document_initialized = False
|
|
53
|
+
|
|
54
|
+
def __new__(
|
|
55
|
+
cls, name: str, bases: Tuple[type, ...], attrs: Dict[str, Any]
|
|
56
|
+
) -> "AsyncIndexMeta":
|
|
57
|
+
new_cls = super().__new__(cls, name, bases, attrs)
|
|
58
|
+
if cls._document_initialized:
|
|
59
|
+
index_opts = attrs.pop("Index", None)
|
|
60
|
+
index = cls.construct_index(index_opts, bases)
|
|
61
|
+
new_cls._index = index
|
|
62
|
+
index.document(new_cls)
|
|
63
|
+
cls._document_initialized = True
|
|
64
|
+
return cast(AsyncIndexMeta, new_cls)
|
|
65
|
+
|
|
66
|
+
@classmethod
|
|
67
|
+
def construct_index(
|
|
68
|
+
cls, opts: Dict[str, Any], bases: Tuple[type, ...]
|
|
69
|
+
) -> AsyncIndex:
|
|
70
|
+
if opts is None:
|
|
71
|
+
for b in bases:
|
|
72
|
+
if hasattr(b, "_index"):
|
|
73
|
+
return b._index
|
|
74
|
+
|
|
75
|
+
# Set None as Index name so it will set _all while making the query
|
|
76
|
+
return AsyncIndex(name=None)
|
|
77
|
+
|
|
78
|
+
i = AsyncIndex(
|
|
79
|
+
getattr(opts, "name", "*"), using=getattr(opts, "using", "default")
|
|
80
|
+
)
|
|
81
|
+
i.settings(**getattr(opts, "settings", {}))
|
|
82
|
+
i.aliases(**getattr(opts, "aliases", {}))
|
|
83
|
+
for a in getattr(opts, "analyzers", ()):
|
|
84
|
+
i.analyzer(a)
|
|
85
|
+
return i
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
@dataclass_transform(field_specifiers=(mapped_field,))
|
|
89
|
+
class AsyncDocument(DocumentBase, metaclass=AsyncIndexMeta):
|
|
90
|
+
"""
|
|
91
|
+
Model-like class for persisting documents in elasticsearch.
|
|
92
|
+
"""
|
|
93
|
+
|
|
94
|
+
if TYPE_CHECKING:
|
|
95
|
+
_index: AsyncIndex
|
|
96
|
+
|
|
97
|
+
@classmethod
|
|
98
|
+
def _get_using(cls, using: Optional[AsyncUsingType] = None) -> AsyncUsingType:
|
|
99
|
+
return cast(AsyncUsingType, using or cls._index._using)
|
|
100
|
+
|
|
101
|
+
@classmethod
|
|
102
|
+
def _get_connection(
|
|
103
|
+
cls, using: Optional[AsyncUsingType] = None
|
|
104
|
+
) -> "AsyncElasticsearch":
|
|
105
|
+
return get_connection(cls._get_using(using))
|
|
106
|
+
|
|
107
|
+
@classmethod
|
|
108
|
+
async def init(
|
|
109
|
+
cls, index: Optional[str] = None, using: Optional[AsyncUsingType] = None
|
|
110
|
+
) -> None:
|
|
111
|
+
"""
|
|
112
|
+
Create the index and populate the mappings in elasticsearch.
|
|
113
|
+
"""
|
|
114
|
+
i = cls._index
|
|
115
|
+
if index:
|
|
116
|
+
i = i.clone(name=index)
|
|
117
|
+
await i.save(using=using)
|
|
118
|
+
|
|
119
|
+
@classmethod
|
|
120
|
+
def search(
|
|
121
|
+
cls, using: Optional[AsyncUsingType] = None, index: Optional[str] = None
|
|
122
|
+
) -> AsyncSearch[Self]:
|
|
123
|
+
"""
|
|
124
|
+
Create an :class:`~elasticsearch.dsl.Search` instance that will search
|
|
125
|
+
over this ``Document``.
|
|
126
|
+
"""
|
|
127
|
+
return AsyncSearch(
|
|
128
|
+
using=cls._get_using(using), index=cls._default_index(index), doc_type=[cls]
|
|
129
|
+
)
|
|
130
|
+
|
|
131
|
+
@classmethod
|
|
132
|
+
async def get(
|
|
133
|
+
cls,
|
|
134
|
+
id: str,
|
|
135
|
+
using: Optional[AsyncUsingType] = None,
|
|
136
|
+
index: Optional[str] = None,
|
|
137
|
+
**kwargs: Any,
|
|
138
|
+
) -> Optional[Self]:
|
|
139
|
+
"""
|
|
140
|
+
Retrieve a single document from elasticsearch using its ``id``.
|
|
141
|
+
|
|
142
|
+
:arg id: ``id`` of the document to be retrieved
|
|
143
|
+
:arg index: elasticsearch index to use, if the ``Document`` is
|
|
144
|
+
associated with an index this can be omitted.
|
|
145
|
+
:arg using: connection alias to use, defaults to ``'default'``
|
|
146
|
+
|
|
147
|
+
Any additional keyword arguments will be passed to
|
|
148
|
+
``Elasticsearch.get`` unchanged.
|
|
149
|
+
"""
|
|
150
|
+
es = cls._get_connection(using)
|
|
151
|
+
doc = await es.get(index=cls._default_index(index), id=id, **kwargs)
|
|
152
|
+
if not doc.get("found", False):
|
|
153
|
+
return None
|
|
154
|
+
return cls.from_es(doc)
|
|
155
|
+
|
|
156
|
+
@classmethod
|
|
157
|
+
async def exists(
|
|
158
|
+
cls,
|
|
159
|
+
id: str,
|
|
160
|
+
using: Optional[AsyncUsingType] = None,
|
|
161
|
+
index: Optional[str] = None,
|
|
162
|
+
**kwargs: Any,
|
|
163
|
+
) -> bool:
|
|
164
|
+
"""
|
|
165
|
+
check if exists a single document from elasticsearch using its ``id``.
|
|
166
|
+
|
|
167
|
+
:arg id: ``id`` of the document to check if exists
|
|
168
|
+
:arg index: elasticsearch index to use, if the ``Document`` is
|
|
169
|
+
associated with an index this can be omitted.
|
|
170
|
+
:arg using: connection alias to use, defaults to ``'default'``
|
|
171
|
+
|
|
172
|
+
Any additional keyword arguments will be passed to
|
|
173
|
+
``Elasticsearch.exists`` unchanged.
|
|
174
|
+
"""
|
|
175
|
+
es = cls._get_connection(using)
|
|
176
|
+
return bool(await es.exists(index=cls._default_index(index), id=id, **kwargs))
|
|
177
|
+
|
|
178
|
+
@classmethod
|
|
179
|
+
async def mget(
|
|
180
|
+
cls,
|
|
181
|
+
docs: List[Dict[str, Any]],
|
|
182
|
+
using: Optional[AsyncUsingType] = None,
|
|
183
|
+
index: Optional[str] = None,
|
|
184
|
+
raise_on_error: bool = True,
|
|
185
|
+
missing: str = "none",
|
|
186
|
+
**kwargs: Any,
|
|
187
|
+
) -> List[Optional[Self]]:
|
|
188
|
+
r"""
|
|
189
|
+
Retrieve multiple document by their ``id``\s. Returns a list of instances
|
|
190
|
+
in the same order as requested.
|
|
191
|
+
|
|
192
|
+
:arg docs: list of ``id``\s of the documents to be retrieved or a list
|
|
193
|
+
of document specifications as per
|
|
194
|
+
https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-multi-get.html
|
|
195
|
+
:arg index: elasticsearch index to use, if the ``Document`` is
|
|
196
|
+
associated with an index this can be omitted.
|
|
197
|
+
:arg using: connection alias to use, defaults to ``'default'``
|
|
198
|
+
:arg missing: what to do when one of the documents requested is not
|
|
199
|
+
found. Valid options are ``'none'`` (use ``None``), ``'raise'`` (raise
|
|
200
|
+
``NotFoundError``) or ``'skip'`` (ignore the missing document).
|
|
201
|
+
|
|
202
|
+
Any additional keyword arguments will be passed to
|
|
203
|
+
``Elasticsearch.mget`` unchanged.
|
|
204
|
+
"""
|
|
205
|
+
if missing not in ("raise", "skip", "none"):
|
|
206
|
+
raise ValueError("'missing' must be 'raise', 'skip', or 'none'.")
|
|
207
|
+
es = cls._get_connection(using)
|
|
208
|
+
body = {
|
|
209
|
+
"docs": [
|
|
210
|
+
doc if isinstance(doc, collections.abc.Mapping) else {"_id": doc}
|
|
211
|
+
for doc in docs
|
|
212
|
+
]
|
|
213
|
+
}
|
|
214
|
+
results = await es.mget(index=cls._default_index(index), body=body, **kwargs)
|
|
215
|
+
|
|
216
|
+
objs: List[Optional[Self]] = []
|
|
217
|
+
error_docs: List[Self] = []
|
|
218
|
+
missing_docs: List[Self] = []
|
|
219
|
+
for doc in results["docs"]:
|
|
220
|
+
if doc.get("found"):
|
|
221
|
+
if error_docs or missing_docs:
|
|
222
|
+
# We're going to raise an exception anyway, so avoid an
|
|
223
|
+
# expensive call to cls.from_es().
|
|
224
|
+
continue
|
|
225
|
+
|
|
226
|
+
objs.append(cls.from_es(doc))
|
|
227
|
+
|
|
228
|
+
elif doc.get("error"):
|
|
229
|
+
if raise_on_error:
|
|
230
|
+
error_docs.append(doc)
|
|
231
|
+
if missing == "none":
|
|
232
|
+
objs.append(None)
|
|
233
|
+
|
|
234
|
+
# The doc didn't cause an error, but the doc also wasn't found.
|
|
235
|
+
elif missing == "raise":
|
|
236
|
+
missing_docs.append(doc)
|
|
237
|
+
elif missing == "none":
|
|
238
|
+
objs.append(None)
|
|
239
|
+
|
|
240
|
+
if error_docs:
|
|
241
|
+
error_ids = [doc["_id"] for doc in error_docs]
|
|
242
|
+
message = "Required routing not provided for documents %s."
|
|
243
|
+
message %= ", ".join(error_ids)
|
|
244
|
+
raise RequestError(400, message, error_docs) # type: ignore[arg-type]
|
|
245
|
+
if missing_docs:
|
|
246
|
+
missing_ids = [doc["_id"] for doc in missing_docs]
|
|
247
|
+
message = f"Documents {', '.join(missing_ids)} not found."
|
|
248
|
+
raise NotFoundError(404, message, {"docs": missing_docs}) # type: ignore[arg-type]
|
|
249
|
+
return objs
|
|
250
|
+
|
|
251
|
+
async def delete(
|
|
252
|
+
self,
|
|
253
|
+
using: Optional[AsyncUsingType] = None,
|
|
254
|
+
index: Optional[str] = None,
|
|
255
|
+
**kwargs: Any,
|
|
256
|
+
) -> None:
|
|
257
|
+
"""
|
|
258
|
+
Delete the instance in elasticsearch.
|
|
259
|
+
|
|
260
|
+
:arg index: elasticsearch index to use, if the ``Document`` is
|
|
261
|
+
associated with an index this can be omitted.
|
|
262
|
+
:arg using: connection alias to use, defaults to ``'default'``
|
|
263
|
+
|
|
264
|
+
Any additional keyword arguments will be passed to
|
|
265
|
+
``Elasticsearch.delete`` unchanged.
|
|
266
|
+
"""
|
|
267
|
+
es = self._get_connection(using)
|
|
268
|
+
# extract routing etc from meta
|
|
269
|
+
doc_meta = {k: self.meta[k] for k in DOC_META_FIELDS if k in self.meta}
|
|
270
|
+
|
|
271
|
+
# Optimistic concurrency control
|
|
272
|
+
if "seq_no" in self.meta and "primary_term" in self.meta:
|
|
273
|
+
doc_meta["if_seq_no"] = self.meta["seq_no"]
|
|
274
|
+
doc_meta["if_primary_term"] = self.meta["primary_term"]
|
|
275
|
+
|
|
276
|
+
doc_meta.update(kwargs)
|
|
277
|
+
i = self._get_index(index)
|
|
278
|
+
assert i is not None
|
|
279
|
+
|
|
280
|
+
await es.delete(index=i, **doc_meta)
|
|
281
|
+
|
|
282
|
+
async def update(
|
|
283
|
+
self,
|
|
284
|
+
using: Optional[AsyncUsingType] = None,
|
|
285
|
+
index: Optional[str] = None,
|
|
286
|
+
detect_noop: bool = True,
|
|
287
|
+
doc_as_upsert: bool = False,
|
|
288
|
+
refresh: bool = False,
|
|
289
|
+
retry_on_conflict: Optional[int] = None,
|
|
290
|
+
script: Optional[Union[str, Dict[str, Any]]] = None,
|
|
291
|
+
script_id: Optional[str] = None,
|
|
292
|
+
scripted_upsert: bool = False,
|
|
293
|
+
upsert: Optional[Dict[str, Any]] = None,
|
|
294
|
+
return_doc_meta: bool = False,
|
|
295
|
+
**fields: Any,
|
|
296
|
+
) -> Any:
|
|
297
|
+
"""
|
|
298
|
+
Partial update of the document, specify fields you wish to update and
|
|
299
|
+
both the instance and the document in elasticsearch will be updated::
|
|
300
|
+
|
|
301
|
+
doc = MyDocument(title='Document Title!')
|
|
302
|
+
doc.save()
|
|
303
|
+
doc.update(title='New Document Title!')
|
|
304
|
+
|
|
305
|
+
:arg index: elasticsearch index to use, if the ``Document`` is
|
|
306
|
+
associated with an index this can be omitted.
|
|
307
|
+
:arg using: connection alias to use, defaults to ``'default'``
|
|
308
|
+
:arg detect_noop: Set to ``False`` to disable noop detection.
|
|
309
|
+
:arg refresh: Control when the changes made by this request are visible
|
|
310
|
+
to search. Set to ``True`` for immediate effect.
|
|
311
|
+
:arg retry_on_conflict: In between the get and indexing phases of the
|
|
312
|
+
update, it is possible that another process might have already
|
|
313
|
+
updated the same document. By default, the update will fail with a
|
|
314
|
+
version conflict exception. The retry_on_conflict parameter
|
|
315
|
+
controls how many times to retry the update before finally throwing
|
|
316
|
+
an exception.
|
|
317
|
+
:arg doc_as_upsert: Instead of sending a partial doc plus an upsert
|
|
318
|
+
doc, setting doc_as_upsert to true will use the contents of doc as
|
|
319
|
+
the upsert value
|
|
320
|
+
:arg script: the source code of the script as a string, or a dictionary
|
|
321
|
+
with script attributes to update.
|
|
322
|
+
:arg return_doc_meta: set to ``True`` to return all metadata from the
|
|
323
|
+
index API call instead of only the operation result
|
|
324
|
+
|
|
325
|
+
:return: operation result noop/updated
|
|
326
|
+
"""
|
|
327
|
+
body: Dict[str, Any] = {
|
|
328
|
+
"doc_as_upsert": doc_as_upsert,
|
|
329
|
+
"detect_noop": detect_noop,
|
|
330
|
+
}
|
|
331
|
+
|
|
332
|
+
# scripted update
|
|
333
|
+
if script or script_id:
|
|
334
|
+
if upsert is not None:
|
|
335
|
+
body["upsert"] = upsert
|
|
336
|
+
|
|
337
|
+
if script:
|
|
338
|
+
if isinstance(script, str):
|
|
339
|
+
script = {"source": script}
|
|
340
|
+
else:
|
|
341
|
+
script = {"id": script_id}
|
|
342
|
+
|
|
343
|
+
if "params" not in script:
|
|
344
|
+
script["params"] = fields
|
|
345
|
+
else:
|
|
346
|
+
script["params"].update(fields)
|
|
347
|
+
|
|
348
|
+
body["script"] = script
|
|
349
|
+
body["scripted_upsert"] = scripted_upsert
|
|
350
|
+
|
|
351
|
+
# partial document update
|
|
352
|
+
else:
|
|
353
|
+
if not fields:
|
|
354
|
+
raise IllegalOperation(
|
|
355
|
+
"You cannot call update() without updating individual fields or a script. "
|
|
356
|
+
"If you wish to update the entire object use save()."
|
|
357
|
+
)
|
|
358
|
+
|
|
359
|
+
# update given fields locally
|
|
360
|
+
merge(self, fields)
|
|
361
|
+
|
|
362
|
+
# prepare data for ES
|
|
363
|
+
values = self.to_dict(skip_empty=False)
|
|
364
|
+
|
|
365
|
+
# if fields were given: partial update
|
|
366
|
+
body["doc"] = {k: values.get(k) for k in fields.keys()}
|
|
367
|
+
|
|
368
|
+
# extract routing etc from meta
|
|
369
|
+
doc_meta = {k: self.meta[k] for k in DOC_META_FIELDS if k in self.meta}
|
|
370
|
+
|
|
371
|
+
if retry_on_conflict is not None:
|
|
372
|
+
doc_meta["retry_on_conflict"] = retry_on_conflict
|
|
373
|
+
|
|
374
|
+
# Optimistic concurrency control
|
|
375
|
+
if (
|
|
376
|
+
retry_on_conflict in (None, 0)
|
|
377
|
+
and "seq_no" in self.meta
|
|
378
|
+
and "primary_term" in self.meta
|
|
379
|
+
):
|
|
380
|
+
doc_meta["if_seq_no"] = self.meta["seq_no"]
|
|
381
|
+
doc_meta["if_primary_term"] = self.meta["primary_term"]
|
|
382
|
+
|
|
383
|
+
i = self._get_index(index)
|
|
384
|
+
assert i is not None
|
|
385
|
+
|
|
386
|
+
meta = await self._get_connection(using).update(
|
|
387
|
+
index=i, body=body, refresh=refresh, **doc_meta
|
|
388
|
+
)
|
|
389
|
+
|
|
390
|
+
# update meta information from ES
|
|
391
|
+
for k in META_FIELDS:
|
|
392
|
+
if "_" + k in meta:
|
|
393
|
+
setattr(self.meta, k, meta["_" + k])
|
|
394
|
+
|
|
395
|
+
return meta if return_doc_meta else meta["result"]
|
|
396
|
+
|
|
397
|
+
async def save(
|
|
398
|
+
self,
|
|
399
|
+
using: Optional[AsyncUsingType] = None,
|
|
400
|
+
index: Optional[str] = None,
|
|
401
|
+
validate: bool = True,
|
|
402
|
+
skip_empty: bool = True,
|
|
403
|
+
return_doc_meta: bool = False,
|
|
404
|
+
**kwargs: Any,
|
|
405
|
+
) -> Any:
|
|
406
|
+
"""
|
|
407
|
+
Save the document into elasticsearch. If the document doesn't exist it
|
|
408
|
+
is created, it is overwritten otherwise. Returns ``True`` if this
|
|
409
|
+
operations resulted in new document being created.
|
|
410
|
+
|
|
411
|
+
:arg index: elasticsearch index to use, if the ``Document`` is
|
|
412
|
+
associated with an index this can be omitted.
|
|
413
|
+
:arg using: connection alias to use, defaults to ``'default'``
|
|
414
|
+
:arg validate: set to ``False`` to skip validating the document
|
|
415
|
+
:arg skip_empty: if set to ``False`` will cause empty values (``None``,
|
|
416
|
+
``[]``, ``{}``) to be left on the document. Those values will be
|
|
417
|
+
stripped out otherwise as they make no difference in elasticsearch.
|
|
418
|
+
:arg return_doc_meta: set to ``True`` to return all metadata from the
|
|
419
|
+
update API call instead of only the operation result
|
|
420
|
+
|
|
421
|
+
Any additional keyword arguments will be passed to
|
|
422
|
+
``Elasticsearch.index`` unchanged.
|
|
423
|
+
|
|
424
|
+
:return: operation result created/updated
|
|
425
|
+
"""
|
|
426
|
+
if validate:
|
|
427
|
+
self.full_clean()
|
|
428
|
+
|
|
429
|
+
es = self._get_connection(using)
|
|
430
|
+
# extract routing etc from meta
|
|
431
|
+
doc_meta = {k: self.meta[k] for k in DOC_META_FIELDS if k in self.meta}
|
|
432
|
+
|
|
433
|
+
# Optimistic concurrency control
|
|
434
|
+
if "seq_no" in self.meta and "primary_term" in self.meta:
|
|
435
|
+
doc_meta["if_seq_no"] = self.meta["seq_no"]
|
|
436
|
+
doc_meta["if_primary_term"] = self.meta["primary_term"]
|
|
437
|
+
|
|
438
|
+
doc_meta.update(kwargs)
|
|
439
|
+
i = self._get_index(index)
|
|
440
|
+
assert i is not None
|
|
441
|
+
|
|
442
|
+
meta = await es.index(
|
|
443
|
+
index=i,
|
|
444
|
+
body=self.to_dict(skip_empty=skip_empty),
|
|
445
|
+
**doc_meta,
|
|
446
|
+
)
|
|
447
|
+
# update meta information from ES
|
|
448
|
+
for k in META_FIELDS:
|
|
449
|
+
if "_" + k in meta:
|
|
450
|
+
setattr(self.meta, k, meta["_" + k])
|
|
451
|
+
|
|
452
|
+
return meta if return_doc_meta else meta["result"]
|
|
453
|
+
|
|
454
|
+
@classmethod
|
|
455
|
+
async def bulk(
|
|
456
|
+
cls,
|
|
457
|
+
actions: AsyncIterable[Union[Self, Dict[str, Any]]],
|
|
458
|
+
using: Optional[AsyncUsingType] = None,
|
|
459
|
+
index: Optional[str] = None,
|
|
460
|
+
validate: bool = True,
|
|
461
|
+
skip_empty: bool = True,
|
|
462
|
+
**kwargs: Any,
|
|
463
|
+
) -> Tuple[int, Union[int, List[Any]]]:
|
|
464
|
+
"""
|
|
465
|
+
Allows to perform multiple indexing operations in a single request.
|
|
466
|
+
|
|
467
|
+
:arg actions: a generator that returns document instances to be indexed,
|
|
468
|
+
bulk operation dictionaries.
|
|
469
|
+
:arg using: connection alias to use, defaults to ``'default'``
|
|
470
|
+
:arg index: Elasticsearch index to use, if the ``Document`` is
|
|
471
|
+
associated with an index this can be omitted.
|
|
472
|
+
:arg validate: set to ``False`` to skip validating the documents
|
|
473
|
+
:arg skip_empty: if set to ``False`` will cause empty values (``None``,
|
|
474
|
+
``[]``, ``{}``) to be left on the document. Those values will be
|
|
475
|
+
stripped out otherwise as they make no difference in Elasticsearch.
|
|
476
|
+
|
|
477
|
+
Any additional keyword arguments will be passed to
|
|
478
|
+
``Elasticsearch.bulk`` unchanged.
|
|
479
|
+
|
|
480
|
+
:return: bulk operation results
|
|
481
|
+
"""
|
|
482
|
+
es = cls._get_connection(using)
|
|
483
|
+
|
|
484
|
+
i = cls._default_index(index)
|
|
485
|
+
assert i is not None
|
|
486
|
+
|
|
487
|
+
class Generate:
|
|
488
|
+
def __init__(
|
|
489
|
+
self,
|
|
490
|
+
doc_iterator: AsyncIterable[Union[AsyncDocument, Dict[str, Any]]],
|
|
491
|
+
):
|
|
492
|
+
self.doc_iterator = doc_iterator.__aiter__()
|
|
493
|
+
|
|
494
|
+
def __aiter__(self) -> Self:
|
|
495
|
+
return self
|
|
496
|
+
|
|
497
|
+
async def __anext__(self) -> Dict[str, Any]:
|
|
498
|
+
doc: Optional[Union[AsyncDocument, Dict[str, Any]]] = (
|
|
499
|
+
await self.doc_iterator.__anext__()
|
|
500
|
+
)
|
|
501
|
+
|
|
502
|
+
if isinstance(doc, dict):
|
|
503
|
+
action = doc
|
|
504
|
+
doc = None
|
|
505
|
+
if "_source" in action and isinstance(
|
|
506
|
+
action["_source"], AsyncDocument
|
|
507
|
+
):
|
|
508
|
+
doc = action["_source"]
|
|
509
|
+
if validate: # pragma: no cover
|
|
510
|
+
doc.full_clean()
|
|
511
|
+
action["_source"] = doc.to_dict(
|
|
512
|
+
include_meta=False, skip_empty=skip_empty
|
|
513
|
+
)
|
|
514
|
+
elif doc is not None:
|
|
515
|
+
if validate: # pragma: no cover
|
|
516
|
+
doc.full_clean()
|
|
517
|
+
action = doc.to_dict(include_meta=True, skip_empty=skip_empty)
|
|
518
|
+
if "_index" not in action:
|
|
519
|
+
action["_index"] = i
|
|
520
|
+
return action
|
|
521
|
+
|
|
522
|
+
return await async_bulk(es, Generate(actions), **kwargs)
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
# Licensed to Elasticsearch B.V. under one or more contributor
|
|
2
|
+
# license agreements. See the NOTICE file distributed with
|
|
3
|
+
# this work for additional information regarding copyright
|
|
4
|
+
# ownership. Elasticsearch B.V. licenses this file to you under
|
|
5
|
+
# the Apache License, Version 2.0 (the "License"); you may
|
|
6
|
+
# not use this file except in compliance with the License.
|
|
7
|
+
# You may obtain a copy of the License at
|
|
8
|
+
#
|
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
10
|
+
#
|
|
11
|
+
# Unless required by applicable law or agreed to in writing,
|
|
12
|
+
# software distributed under the License is distributed on an
|
|
13
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
14
|
+
# KIND, either express or implied. See the License for the
|
|
15
|
+
# specific language governing permissions and limitations
|
|
16
|
+
# under the License.
|
|
17
|
+
|
|
18
|
+
from typing import TYPE_CHECKING
|
|
19
|
+
|
|
20
|
+
from ..faceted_search_base import FacetedResponse, FacetedSearchBase
|
|
21
|
+
from ..utils import _R
|
|
22
|
+
from .search import AsyncSearch
|
|
23
|
+
|
|
24
|
+
if TYPE_CHECKING:
|
|
25
|
+
from ..response import Response
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class AsyncFacetedSearch(FacetedSearchBase[_R]):
|
|
29
|
+
_s: AsyncSearch[_R]
|
|
30
|
+
|
|
31
|
+
async def count(self) -> int:
|
|
32
|
+
return await self._s.count()
|
|
33
|
+
|
|
34
|
+
def search(self) -> AsyncSearch[_R]:
|
|
35
|
+
"""
|
|
36
|
+
Returns the base Search object to which the facets are added.
|
|
37
|
+
|
|
38
|
+
You can customize the query by overriding this method and returning a
|
|
39
|
+
modified search object.
|
|
40
|
+
"""
|
|
41
|
+
s = AsyncSearch[_R](doc_type=self.doc_types, index=self.index, using=self.using)
|
|
42
|
+
return s.response_class(FacetedResponse)
|
|
43
|
+
|
|
44
|
+
async def execute(self) -> "Response[_R]":
|
|
45
|
+
"""
|
|
46
|
+
Execute the search and return the response.
|
|
47
|
+
"""
|
|
48
|
+
r = await self._s.execute()
|
|
49
|
+
r._faceted_search = self
|
|
50
|
+
return r
|