elasticsearch 8.17.2__py3-none-any.whl → 9.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- elasticsearch/_async/client/__init__.py +192 -312
- elasticsearch/_async/client/_base.py +1 -2
- elasticsearch/_async/client/async_search.py +14 -14
- elasticsearch/_async/client/autoscaling.py +4 -4
- elasticsearch/_async/client/cat.py +26 -33
- elasticsearch/_async/client/ccr.py +186 -72
- elasticsearch/_async/client/cluster.py +42 -23
- elasticsearch/_async/client/connector.py +44 -30
- elasticsearch/_async/client/dangling_indices.py +3 -3
- elasticsearch/_async/client/enrich.py +26 -5
- elasticsearch/_async/client/eql.py +32 -4
- elasticsearch/_async/client/esql.py +64 -12
- elasticsearch/_async/client/features.py +12 -2
- elasticsearch/_async/client/fleet.py +23 -19
- elasticsearch/_async/client/graph.py +1 -1
- elasticsearch/_async/client/ilm.py +30 -22
- elasticsearch/_async/client/indices.py +435 -231
- elasticsearch/_async/client/inference.py +1906 -61
- elasticsearch/_async/client/ingest.py +32 -38
- elasticsearch/_async/client/license.py +51 -16
- elasticsearch/_async/client/logstash.py +3 -3
- elasticsearch/_async/client/migration.py +3 -3
- elasticsearch/_async/client/ml.py +145 -121
- elasticsearch/_async/client/monitoring.py +1 -1
- elasticsearch/_async/client/nodes.py +10 -28
- elasticsearch/_async/client/query_rules.py +8 -8
- elasticsearch/_async/client/rollup.py +8 -8
- elasticsearch/_async/client/search_application.py +13 -13
- elasticsearch/_async/client/searchable_snapshots.py +4 -4
- elasticsearch/_async/client/security.py +78 -75
- elasticsearch/_async/client/shutdown.py +3 -10
- elasticsearch/_async/client/simulate.py +6 -6
- elasticsearch/_async/client/slm.py +9 -9
- elasticsearch/_async/client/snapshot.py +280 -134
- elasticsearch/_async/client/sql.py +6 -6
- elasticsearch/_async/client/ssl.py +1 -1
- elasticsearch/_async/client/synonyms.py +7 -7
- elasticsearch/_async/client/tasks.py +3 -9
- elasticsearch/_async/client/text_structure.py +4 -4
- elasticsearch/_async/client/transform.py +30 -28
- elasticsearch/_async/client/watcher.py +23 -15
- elasticsearch/_async/client/xpack.py +2 -2
- elasticsearch/_async/helpers.py +0 -1
- elasticsearch/_sync/client/__init__.py +192 -312
- elasticsearch/_sync/client/_base.py +1 -2
- elasticsearch/_sync/client/async_search.py +14 -14
- elasticsearch/_sync/client/autoscaling.py +4 -4
- elasticsearch/_sync/client/cat.py +26 -33
- elasticsearch/_sync/client/ccr.py +186 -72
- elasticsearch/_sync/client/cluster.py +42 -23
- elasticsearch/_sync/client/connector.py +44 -30
- elasticsearch/_sync/client/dangling_indices.py +3 -3
- elasticsearch/_sync/client/enrich.py +26 -5
- elasticsearch/_sync/client/eql.py +32 -4
- elasticsearch/_sync/client/esql.py +64 -12
- elasticsearch/_sync/client/features.py +12 -2
- elasticsearch/_sync/client/fleet.py +23 -19
- elasticsearch/_sync/client/graph.py +1 -1
- elasticsearch/_sync/client/ilm.py +30 -22
- elasticsearch/_sync/client/indices.py +435 -231
- elasticsearch/_sync/client/inference.py +1906 -61
- elasticsearch/_sync/client/ingest.py +32 -38
- elasticsearch/_sync/client/license.py +51 -16
- elasticsearch/_sync/client/logstash.py +3 -3
- elasticsearch/_sync/client/migration.py +3 -3
- elasticsearch/_sync/client/ml.py +145 -121
- elasticsearch/_sync/client/monitoring.py +1 -1
- elasticsearch/_sync/client/nodes.py +10 -28
- elasticsearch/_sync/client/query_rules.py +8 -8
- elasticsearch/_sync/client/rollup.py +8 -8
- elasticsearch/_sync/client/search_application.py +13 -13
- elasticsearch/_sync/client/searchable_snapshots.py +4 -4
- elasticsearch/_sync/client/security.py +78 -75
- elasticsearch/_sync/client/shutdown.py +3 -10
- elasticsearch/_sync/client/simulate.py +6 -6
- elasticsearch/_sync/client/slm.py +9 -9
- elasticsearch/_sync/client/snapshot.py +280 -134
- elasticsearch/_sync/client/sql.py +6 -6
- elasticsearch/_sync/client/ssl.py +1 -1
- elasticsearch/_sync/client/synonyms.py +7 -7
- elasticsearch/_sync/client/tasks.py +3 -9
- elasticsearch/_sync/client/text_structure.py +4 -4
- elasticsearch/_sync/client/transform.py +30 -28
- elasticsearch/_sync/client/utils.py +0 -40
- elasticsearch/_sync/client/watcher.py +23 -15
- elasticsearch/_sync/client/xpack.py +2 -2
- elasticsearch/_version.py +1 -1
- elasticsearch/dsl/__init__.py +203 -0
- elasticsearch/dsl/_async/__init__.py +16 -0
- elasticsearch/dsl/_async/document.py +522 -0
- elasticsearch/dsl/_async/faceted_search.py +50 -0
- elasticsearch/dsl/_async/index.py +639 -0
- elasticsearch/dsl/_async/mapping.py +49 -0
- elasticsearch/dsl/_async/search.py +237 -0
- elasticsearch/dsl/_async/update_by_query.py +47 -0
- elasticsearch/dsl/_sync/__init__.py +16 -0
- elasticsearch/dsl/_sync/document.py +514 -0
- elasticsearch/dsl/_sync/faceted_search.py +50 -0
- elasticsearch/dsl/_sync/index.py +597 -0
- elasticsearch/dsl/_sync/mapping.py +49 -0
- elasticsearch/dsl/_sync/search.py +230 -0
- elasticsearch/dsl/_sync/update_by_query.py +45 -0
- elasticsearch/dsl/aggs.py +3734 -0
- elasticsearch/dsl/analysis.py +341 -0
- elasticsearch/dsl/async_connections.py +37 -0
- elasticsearch/dsl/connections.py +142 -0
- elasticsearch/dsl/document.py +20 -0
- elasticsearch/dsl/document_base.py +444 -0
- elasticsearch/dsl/exceptions.py +32 -0
- elasticsearch/dsl/faceted_search.py +28 -0
- elasticsearch/dsl/faceted_search_base.py +489 -0
- elasticsearch/dsl/field.py +4392 -0
- elasticsearch/dsl/function.py +180 -0
- elasticsearch/dsl/index.py +23 -0
- elasticsearch/dsl/index_base.py +178 -0
- elasticsearch/dsl/mapping.py +19 -0
- elasticsearch/dsl/mapping_base.py +219 -0
- elasticsearch/dsl/query.py +2822 -0
- elasticsearch/dsl/response/__init__.py +388 -0
- elasticsearch/dsl/response/aggs.py +100 -0
- elasticsearch/dsl/response/hit.py +53 -0
- elasticsearch/dsl/search.py +20 -0
- elasticsearch/dsl/search_base.py +1053 -0
- elasticsearch/dsl/serializer.py +34 -0
- elasticsearch/dsl/types.py +6453 -0
- elasticsearch/dsl/update_by_query.py +19 -0
- elasticsearch/dsl/update_by_query_base.py +149 -0
- elasticsearch/dsl/utils.py +687 -0
- elasticsearch/dsl/wrappers.py +144 -0
- elasticsearch/helpers/vectorstore/_async/strategies.py +12 -12
- elasticsearch/helpers/vectorstore/_sync/strategies.py +12 -12
- {elasticsearch-8.17.2.dist-info → elasticsearch-9.0.0.dist-info}/METADATA +12 -15
- elasticsearch-9.0.0.dist-info/RECORD +160 -0
- elasticsearch/transport.py +0 -57
- elasticsearch-8.17.2.dist-info/RECORD +0 -119
- {elasticsearch-8.17.2.dist-info → elasticsearch-9.0.0.dist-info}/WHEEL +0 -0
- {elasticsearch-8.17.2.dist-info → elasticsearch-9.0.0.dist-info}/licenses/LICENSE +0 -0
- {elasticsearch-8.17.2.dist-info → elasticsearch-9.0.0.dist-info}/licenses/NOTICE +0 -0
|
@@ -0,0 +1,388 @@
|
|
|
1
|
+
# Licensed to Elasticsearch B.V. under one or more contributor
|
|
2
|
+
# license agreements. See the NOTICE file distributed with
|
|
3
|
+
# this work for additional information regarding copyright
|
|
4
|
+
# ownership. Elasticsearch B.V. licenses this file to you under
|
|
5
|
+
# the Apache License, Version 2.0 (the "License"); you may
|
|
6
|
+
# not use this file except in compliance with the License.
|
|
7
|
+
# You may obtain a copy of the License at
|
|
8
|
+
#
|
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
10
|
+
#
|
|
11
|
+
# Unless required by applicable law or agreed to in writing,
|
|
12
|
+
# software distributed under the License is distributed on an
|
|
13
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
14
|
+
# KIND, either express or implied. See the License for the
|
|
15
|
+
# specific language governing permissions and limitations
|
|
16
|
+
# under the License.
|
|
17
|
+
|
|
18
|
+
from typing import (
|
|
19
|
+
TYPE_CHECKING,
|
|
20
|
+
Any,
|
|
21
|
+
Dict,
|
|
22
|
+
Generic,
|
|
23
|
+
Iterator,
|
|
24
|
+
List,
|
|
25
|
+
Mapping,
|
|
26
|
+
Optional,
|
|
27
|
+
Sequence,
|
|
28
|
+
Tuple,
|
|
29
|
+
Union,
|
|
30
|
+
cast,
|
|
31
|
+
)
|
|
32
|
+
|
|
33
|
+
from ..utils import _R, AttrDict, AttrList, _wrap
|
|
34
|
+
from .hit import Hit, HitMeta
|
|
35
|
+
|
|
36
|
+
if TYPE_CHECKING:
|
|
37
|
+
from .. import types
|
|
38
|
+
from ..aggs import Agg
|
|
39
|
+
from ..faceted_search_base import FacetedSearchBase
|
|
40
|
+
from ..search_base import Request, SearchBase
|
|
41
|
+
from ..update_by_query_base import UpdateByQueryBase
|
|
42
|
+
|
|
43
|
+
__all__ = [
|
|
44
|
+
"Response",
|
|
45
|
+
"AggResponse",
|
|
46
|
+
"UpdateByQueryResponse",
|
|
47
|
+
"Hit",
|
|
48
|
+
"HitMeta",
|
|
49
|
+
"AggregateResponseType",
|
|
50
|
+
]
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
class Response(AttrDict[Any], Generic[_R]):
|
|
54
|
+
"""An Elasticsearch search response.
|
|
55
|
+
|
|
56
|
+
:arg took: (required) The number of milliseconds it took Elasticsearch
|
|
57
|
+
to run the request. This value is calculated by measuring the time
|
|
58
|
+
elapsed between receipt of a request on the coordinating node and
|
|
59
|
+
the time at which the coordinating node is ready to send the
|
|
60
|
+
response. It includes: * Communication time between the
|
|
61
|
+
coordinating node and data nodes * Time the request spends in the
|
|
62
|
+
search thread pool, queued for execution * Actual run time It
|
|
63
|
+
does not include: * Time needed to send the request to
|
|
64
|
+
Elasticsearch * Time needed to serialize the JSON response * Time
|
|
65
|
+
needed to send the response to a client
|
|
66
|
+
:arg timed_out: (required) If `true`, the request timed out before
|
|
67
|
+
completion; returned results may be partial or empty.
|
|
68
|
+
:arg _shards: (required) A count of shards used for the request.
|
|
69
|
+
:arg hits: search results
|
|
70
|
+
:arg aggregations: aggregation results
|
|
71
|
+
:arg _clusters:
|
|
72
|
+
:arg fields:
|
|
73
|
+
:arg max_score:
|
|
74
|
+
:arg num_reduce_phases:
|
|
75
|
+
:arg profile:
|
|
76
|
+
:arg pit_id:
|
|
77
|
+
:arg _scroll_id: The identifier for the search and its search context.
|
|
78
|
+
You can use this scroll ID with the scroll API to retrieve the
|
|
79
|
+
next batch of search results for the request. This property is
|
|
80
|
+
returned only if the `scroll` query parameter is specified in the
|
|
81
|
+
request.
|
|
82
|
+
:arg suggest:
|
|
83
|
+
:arg terminated_early:
|
|
84
|
+
"""
|
|
85
|
+
|
|
86
|
+
_search: "SearchBase[_R]"
|
|
87
|
+
_faceted_search: "FacetedSearchBase[_R]"
|
|
88
|
+
_doc_class: Optional[_R]
|
|
89
|
+
_hits: List[_R]
|
|
90
|
+
|
|
91
|
+
took: int
|
|
92
|
+
timed_out: bool
|
|
93
|
+
_shards: "types.ShardStatistics"
|
|
94
|
+
_clusters: "types.ClusterStatistics"
|
|
95
|
+
fields: Mapping[str, Any]
|
|
96
|
+
max_score: float
|
|
97
|
+
num_reduce_phases: int
|
|
98
|
+
profile: "types.Profile"
|
|
99
|
+
pit_id: str
|
|
100
|
+
_scroll_id: str
|
|
101
|
+
suggest: Mapping[
|
|
102
|
+
str,
|
|
103
|
+
Sequence[
|
|
104
|
+
Union["types.CompletionSuggest", "types.PhraseSuggest", "types.TermSuggest"]
|
|
105
|
+
],
|
|
106
|
+
]
|
|
107
|
+
terminated_early: bool
|
|
108
|
+
|
|
109
|
+
def __init__(
|
|
110
|
+
self,
|
|
111
|
+
search: "Request[_R]",
|
|
112
|
+
response: Dict[str, Any],
|
|
113
|
+
doc_class: Optional[_R] = None,
|
|
114
|
+
):
|
|
115
|
+
super(AttrDict, self).__setattr__("_search", search)
|
|
116
|
+
super(AttrDict, self).__setattr__("_doc_class", doc_class)
|
|
117
|
+
super().__init__(response)
|
|
118
|
+
|
|
119
|
+
def __iter__(self) -> Iterator[_R]: # type: ignore[override]
|
|
120
|
+
return iter(self.hits)
|
|
121
|
+
|
|
122
|
+
def __getitem__(self, key: Union[slice, int, str]) -> Any:
|
|
123
|
+
if isinstance(key, (slice, int)):
|
|
124
|
+
# for slicing etc
|
|
125
|
+
return self.hits[key]
|
|
126
|
+
return super().__getitem__(key)
|
|
127
|
+
|
|
128
|
+
def __nonzero__(self) -> bool:
|
|
129
|
+
return bool(self.hits)
|
|
130
|
+
|
|
131
|
+
__bool__ = __nonzero__
|
|
132
|
+
|
|
133
|
+
def __repr__(self) -> str:
|
|
134
|
+
return "<Response: %r>" % (self.hits or self.aggregations)
|
|
135
|
+
|
|
136
|
+
def __len__(self) -> int:
|
|
137
|
+
return len(self.hits)
|
|
138
|
+
|
|
139
|
+
def __getstate__(self) -> Tuple[Dict[str, Any], "Request[_R]", Optional[_R]]: # type: ignore[override]
|
|
140
|
+
return self._d_, self._search, self._doc_class
|
|
141
|
+
|
|
142
|
+
def __setstate__(
|
|
143
|
+
self, state: Tuple[Dict[str, Any], "Request[_R]", Optional[_R]] # type: ignore[override]
|
|
144
|
+
) -> None:
|
|
145
|
+
super(AttrDict, self).__setattr__("_d_", state[0])
|
|
146
|
+
super(AttrDict, self).__setattr__("_search", state[1])
|
|
147
|
+
super(AttrDict, self).__setattr__("_doc_class", state[2])
|
|
148
|
+
|
|
149
|
+
def success(self) -> bool:
|
|
150
|
+
return self._shards.total == self._shards.successful and not self.timed_out
|
|
151
|
+
|
|
152
|
+
@property
|
|
153
|
+
def hits(self) -> List[_R]:
|
|
154
|
+
if not hasattr(self, "_hits"):
|
|
155
|
+
h = cast(AttrDict[Any], self._d_["hits"])
|
|
156
|
+
|
|
157
|
+
try:
|
|
158
|
+
hits = AttrList(list(map(self._search._get_result, h["hits"])))
|
|
159
|
+
except AttributeError as e:
|
|
160
|
+
# avoid raising AttributeError since it will be hidden by the property
|
|
161
|
+
raise TypeError("Could not parse hits.", e)
|
|
162
|
+
|
|
163
|
+
# avoid assigning _hits into self._d_
|
|
164
|
+
super(AttrDict, self).__setattr__("_hits", hits)
|
|
165
|
+
for k in h:
|
|
166
|
+
setattr(self._hits, k, _wrap(h[k]))
|
|
167
|
+
return self._hits
|
|
168
|
+
|
|
169
|
+
@property
|
|
170
|
+
def aggregations(self) -> "AggResponse[_R]":
|
|
171
|
+
return self.aggs
|
|
172
|
+
|
|
173
|
+
@property
|
|
174
|
+
def aggs(self) -> "AggResponse[_R]":
|
|
175
|
+
if not hasattr(self, "_aggs"):
|
|
176
|
+
aggs = AggResponse[_R](
|
|
177
|
+
cast("Agg[_R]", self._search.aggs),
|
|
178
|
+
self._search,
|
|
179
|
+
cast(Dict[str, Any], self._d_.get("aggregations", {})),
|
|
180
|
+
)
|
|
181
|
+
|
|
182
|
+
# avoid assigning _aggs into self._d_
|
|
183
|
+
super(AttrDict, self).__setattr__("_aggs", aggs)
|
|
184
|
+
return cast("AggResponse[_R]", self._aggs)
|
|
185
|
+
|
|
186
|
+
def search_after(self) -> "SearchBase[_R]":
|
|
187
|
+
"""
|
|
188
|
+
Return a ``Search`` instance that retrieves the next page of results.
|
|
189
|
+
|
|
190
|
+
This method provides an easy way to paginate a long list of results using
|
|
191
|
+
the ``search_after`` option. For example::
|
|
192
|
+
|
|
193
|
+
page_size = 20
|
|
194
|
+
s = Search()[:page_size].sort("date")
|
|
195
|
+
|
|
196
|
+
while True:
|
|
197
|
+
# get a page of results
|
|
198
|
+
r = await s.execute()
|
|
199
|
+
|
|
200
|
+
# do something with this page of results
|
|
201
|
+
|
|
202
|
+
# exit the loop if we reached the end
|
|
203
|
+
if len(r.hits) < page_size:
|
|
204
|
+
break
|
|
205
|
+
|
|
206
|
+
# get a search object with the next page of results
|
|
207
|
+
s = r.search_after()
|
|
208
|
+
|
|
209
|
+
Note that the ``search_after`` option requires the search to have an
|
|
210
|
+
explicit ``sort`` order.
|
|
211
|
+
"""
|
|
212
|
+
if len(self.hits) == 0:
|
|
213
|
+
raise ValueError("Cannot use search_after when there are no search results")
|
|
214
|
+
if not hasattr(self.hits[-1].meta, "sort"): # type: ignore[attr-defined]
|
|
215
|
+
raise ValueError("Cannot use search_after when results are not sorted")
|
|
216
|
+
return self._search.extra(search_after=self.hits[-1].meta.sort) # type: ignore[attr-defined]
|
|
217
|
+
|
|
218
|
+
|
|
219
|
+
AggregateResponseType = Union[
|
|
220
|
+
"types.CardinalityAggregate",
|
|
221
|
+
"types.HdrPercentilesAggregate",
|
|
222
|
+
"types.HdrPercentileRanksAggregate",
|
|
223
|
+
"types.TDigestPercentilesAggregate",
|
|
224
|
+
"types.TDigestPercentileRanksAggregate",
|
|
225
|
+
"types.PercentilesBucketAggregate",
|
|
226
|
+
"types.MedianAbsoluteDeviationAggregate",
|
|
227
|
+
"types.MinAggregate",
|
|
228
|
+
"types.MaxAggregate",
|
|
229
|
+
"types.SumAggregate",
|
|
230
|
+
"types.AvgAggregate",
|
|
231
|
+
"types.WeightedAvgAggregate",
|
|
232
|
+
"types.ValueCountAggregate",
|
|
233
|
+
"types.SimpleValueAggregate",
|
|
234
|
+
"types.DerivativeAggregate",
|
|
235
|
+
"types.BucketMetricValueAggregate",
|
|
236
|
+
"types.StatsAggregate",
|
|
237
|
+
"types.StatsBucketAggregate",
|
|
238
|
+
"types.ExtendedStatsAggregate",
|
|
239
|
+
"types.ExtendedStatsBucketAggregate",
|
|
240
|
+
"types.GeoBoundsAggregate",
|
|
241
|
+
"types.GeoCentroidAggregate",
|
|
242
|
+
"types.HistogramAggregate",
|
|
243
|
+
"types.DateHistogramAggregate",
|
|
244
|
+
"types.AutoDateHistogramAggregate",
|
|
245
|
+
"types.VariableWidthHistogramAggregate",
|
|
246
|
+
"types.StringTermsAggregate",
|
|
247
|
+
"types.LongTermsAggregate",
|
|
248
|
+
"types.DoubleTermsAggregate",
|
|
249
|
+
"types.UnmappedTermsAggregate",
|
|
250
|
+
"types.LongRareTermsAggregate",
|
|
251
|
+
"types.StringRareTermsAggregate",
|
|
252
|
+
"types.UnmappedRareTermsAggregate",
|
|
253
|
+
"types.MultiTermsAggregate",
|
|
254
|
+
"types.MissingAggregate",
|
|
255
|
+
"types.NestedAggregate",
|
|
256
|
+
"types.ReverseNestedAggregate",
|
|
257
|
+
"types.GlobalAggregate",
|
|
258
|
+
"types.FilterAggregate",
|
|
259
|
+
"types.ChildrenAggregate",
|
|
260
|
+
"types.ParentAggregate",
|
|
261
|
+
"types.SamplerAggregate",
|
|
262
|
+
"types.UnmappedSamplerAggregate",
|
|
263
|
+
"types.GeoHashGridAggregate",
|
|
264
|
+
"types.GeoTileGridAggregate",
|
|
265
|
+
"types.GeoHexGridAggregate",
|
|
266
|
+
"types.RangeAggregate",
|
|
267
|
+
"types.DateRangeAggregate",
|
|
268
|
+
"types.GeoDistanceAggregate",
|
|
269
|
+
"types.IpRangeAggregate",
|
|
270
|
+
"types.IpPrefixAggregate",
|
|
271
|
+
"types.FiltersAggregate",
|
|
272
|
+
"types.AdjacencyMatrixAggregate",
|
|
273
|
+
"types.SignificantLongTermsAggregate",
|
|
274
|
+
"types.SignificantStringTermsAggregate",
|
|
275
|
+
"types.UnmappedSignificantTermsAggregate",
|
|
276
|
+
"types.CompositeAggregate",
|
|
277
|
+
"types.FrequentItemSetsAggregate",
|
|
278
|
+
"types.TimeSeriesAggregate",
|
|
279
|
+
"types.ScriptedMetricAggregate",
|
|
280
|
+
"types.TopHitsAggregate",
|
|
281
|
+
"types.InferenceAggregate",
|
|
282
|
+
"types.StringStatsAggregate",
|
|
283
|
+
"types.BoxPlotAggregate",
|
|
284
|
+
"types.TopMetricsAggregate",
|
|
285
|
+
"types.TTestAggregate",
|
|
286
|
+
"types.RateAggregate",
|
|
287
|
+
"types.CumulativeCardinalityAggregate",
|
|
288
|
+
"types.MatrixStatsAggregate",
|
|
289
|
+
"types.GeoLineAggregate",
|
|
290
|
+
]
|
|
291
|
+
|
|
292
|
+
|
|
293
|
+
class AggResponse(AttrDict[Any], Generic[_R]):
|
|
294
|
+
"""An Elasticsearch aggregation response."""
|
|
295
|
+
|
|
296
|
+
_meta: Dict[str, Any]
|
|
297
|
+
|
|
298
|
+
def __init__(self, aggs: "Agg[_R]", search: "Request[_R]", data: Dict[str, Any]):
|
|
299
|
+
super(AttrDict, self).__setattr__("_meta", {"search": search, "aggs": aggs})
|
|
300
|
+
super().__init__(data)
|
|
301
|
+
|
|
302
|
+
def __getitem__(self, attr_name: str) -> AggregateResponseType:
|
|
303
|
+
if attr_name in self._meta["aggs"]:
|
|
304
|
+
# don't do self._meta['aggs'][attr_name] to avoid copying
|
|
305
|
+
agg = self._meta["aggs"].aggs[attr_name]
|
|
306
|
+
return cast(
|
|
307
|
+
AggregateResponseType,
|
|
308
|
+
agg.result(self._meta["search"], self._d_[attr_name]),
|
|
309
|
+
)
|
|
310
|
+
return super().__getitem__(attr_name) # type: ignore[no-any-return]
|
|
311
|
+
|
|
312
|
+
def __iter__(self) -> Iterator[AggregateResponseType]: # type: ignore[override]
|
|
313
|
+
for name in self._meta["aggs"]:
|
|
314
|
+
yield self[name]
|
|
315
|
+
|
|
316
|
+
|
|
317
|
+
class UpdateByQueryResponse(AttrDict[Any], Generic[_R]):
|
|
318
|
+
"""An Elasticsearch update by query response.
|
|
319
|
+
|
|
320
|
+
:arg batches: The number of scroll responses pulled back by the update
|
|
321
|
+
by query.
|
|
322
|
+
:arg failures: Array of failures if there were any unrecoverable
|
|
323
|
+
errors during the process. If this is non-empty then the request
|
|
324
|
+
ended because of those failures. Update by query is implemented
|
|
325
|
+
using batches. Any failure causes the entire process to end, but
|
|
326
|
+
all failures in the current batch are collected into the array.
|
|
327
|
+
You can use the `conflicts` option to prevent reindex from ending
|
|
328
|
+
when version conflicts occur.
|
|
329
|
+
:arg noops: The number of documents that were ignored because the
|
|
330
|
+
script used for the update by query returned a noop value for
|
|
331
|
+
`ctx.op`.
|
|
332
|
+
:arg deleted: The number of documents that were successfully deleted.
|
|
333
|
+
:arg requests_per_second: The number of requests per second
|
|
334
|
+
effectively run during the update by query.
|
|
335
|
+
:arg retries: The number of retries attempted by update by query.
|
|
336
|
+
`bulk` is the number of bulk actions retried. `search` is the
|
|
337
|
+
number of search actions retried.
|
|
338
|
+
:arg task:
|
|
339
|
+
:arg timed_out: If true, some requests timed out during the update by
|
|
340
|
+
query.
|
|
341
|
+
:arg took: The number of milliseconds from start to end of the whole
|
|
342
|
+
operation.
|
|
343
|
+
:arg total: The number of documents that were successfully processed.
|
|
344
|
+
:arg updated: The number of documents that were successfully updated.
|
|
345
|
+
:arg version_conflicts: The number of version conflicts that the
|
|
346
|
+
update by query hit.
|
|
347
|
+
:arg throttled:
|
|
348
|
+
:arg throttled_millis: The number of milliseconds the request slept to
|
|
349
|
+
conform to `requests_per_second`.
|
|
350
|
+
:arg throttled_until:
|
|
351
|
+
:arg throttled_until_millis: This field should always be equal to zero
|
|
352
|
+
in an _update_by_query response. It only has meaning when using
|
|
353
|
+
the task API, where it indicates the next time (in milliseconds
|
|
354
|
+
since epoch) a throttled request will be run again in order to
|
|
355
|
+
conform to `requests_per_second`.
|
|
356
|
+
"""
|
|
357
|
+
|
|
358
|
+
_search: "UpdateByQueryBase[_R]"
|
|
359
|
+
|
|
360
|
+
batches: int
|
|
361
|
+
failures: Sequence["types.BulkIndexByScrollFailure"]
|
|
362
|
+
noops: int
|
|
363
|
+
deleted: int
|
|
364
|
+
requests_per_second: float
|
|
365
|
+
retries: "types.Retries"
|
|
366
|
+
task: Union[str, int]
|
|
367
|
+
timed_out: bool
|
|
368
|
+
took: Any
|
|
369
|
+
total: int
|
|
370
|
+
updated: int
|
|
371
|
+
version_conflicts: int
|
|
372
|
+
throttled: Any
|
|
373
|
+
throttled_millis: Any
|
|
374
|
+
throttled_until: Any
|
|
375
|
+
throttled_until_millis: Any
|
|
376
|
+
|
|
377
|
+
def __init__(
|
|
378
|
+
self,
|
|
379
|
+
search: "Request[_R]",
|
|
380
|
+
response: Dict[str, Any],
|
|
381
|
+
doc_class: Optional[_R] = None,
|
|
382
|
+
):
|
|
383
|
+
super(AttrDict, self).__setattr__("_search", search)
|
|
384
|
+
super(AttrDict, self).__setattr__("_doc_class", doc_class)
|
|
385
|
+
super().__init__(response)
|
|
386
|
+
|
|
387
|
+
def success(self) -> bool:
|
|
388
|
+
return not self.timed_out and not self.failures
|
|
@@ -0,0 +1,100 @@
|
|
|
1
|
+
# Licensed to Elasticsearch B.V. under one or more contributor
|
|
2
|
+
# license agreements. See the NOTICE file distributed with
|
|
3
|
+
# this work for additional information regarding copyright
|
|
4
|
+
# ownership. Elasticsearch B.V. licenses this file to you under
|
|
5
|
+
# the Apache License, Version 2.0 (the "License"); you may
|
|
6
|
+
# not use this file except in compliance with the License.
|
|
7
|
+
# You may obtain a copy of the License at
|
|
8
|
+
#
|
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
10
|
+
#
|
|
11
|
+
# Unless required by applicable law or agreed to in writing,
|
|
12
|
+
# software distributed under the License is distributed on an
|
|
13
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
14
|
+
# KIND, either express or implied. See the License for the
|
|
15
|
+
# specific language governing permissions and limitations
|
|
16
|
+
# under the License.
|
|
17
|
+
|
|
18
|
+
from typing import TYPE_CHECKING, Any, Dict, Iterator, List, Optional, Union, cast
|
|
19
|
+
|
|
20
|
+
from ..utils import _R, AttrDict, AttrList
|
|
21
|
+
from . import AggResponse, Response
|
|
22
|
+
|
|
23
|
+
if TYPE_CHECKING:
|
|
24
|
+
from ..aggs import Agg
|
|
25
|
+
from ..field import Field
|
|
26
|
+
from ..search_base import SearchBase
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class Bucket(AggResponse[_R]):
|
|
30
|
+
def __init__(
|
|
31
|
+
self,
|
|
32
|
+
aggs: "Agg[_R]",
|
|
33
|
+
search: "SearchBase[_R]",
|
|
34
|
+
data: Dict[str, Any],
|
|
35
|
+
field: Optional["Field"] = None,
|
|
36
|
+
):
|
|
37
|
+
super().__init__(aggs, search, data)
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
class FieldBucket(Bucket[_R]):
|
|
41
|
+
def __init__(
|
|
42
|
+
self,
|
|
43
|
+
aggs: "Agg[_R]",
|
|
44
|
+
search: "SearchBase[_R]",
|
|
45
|
+
data: Dict[str, Any],
|
|
46
|
+
field: Optional["Field"] = None,
|
|
47
|
+
):
|
|
48
|
+
if field:
|
|
49
|
+
data["key"] = field.deserialize(data["key"])
|
|
50
|
+
super().__init__(aggs, search, data, field)
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
class BucketData(AggResponse[_R]):
|
|
54
|
+
_bucket_class = Bucket
|
|
55
|
+
_buckets: Union[AttrDict[Any], AttrList[Any]]
|
|
56
|
+
|
|
57
|
+
def _wrap_bucket(self, data: Dict[str, Any]) -> Bucket[_R]:
|
|
58
|
+
return self._bucket_class(
|
|
59
|
+
self._meta["aggs"],
|
|
60
|
+
self._meta["search"],
|
|
61
|
+
data,
|
|
62
|
+
field=self._meta.get("field"),
|
|
63
|
+
)
|
|
64
|
+
|
|
65
|
+
def __iter__(self) -> Iterator["Agg"]: # type: ignore[override]
|
|
66
|
+
return iter(self.buckets) # type: ignore[arg-type]
|
|
67
|
+
|
|
68
|
+
def __len__(self) -> int:
|
|
69
|
+
return len(self.buckets)
|
|
70
|
+
|
|
71
|
+
def __getitem__(self, key: Any) -> Any:
|
|
72
|
+
if isinstance(key, (int, slice)):
|
|
73
|
+
return cast(AttrList[Any], self.buckets)[key]
|
|
74
|
+
return super().__getitem__(key)
|
|
75
|
+
|
|
76
|
+
@property
|
|
77
|
+
def buckets(self) -> Union[AttrDict[Any], AttrList[Any]]:
|
|
78
|
+
if not hasattr(self, "_buckets"):
|
|
79
|
+
field = getattr(self._meta["aggs"], "field", None)
|
|
80
|
+
if field:
|
|
81
|
+
self._meta["field"] = self._meta["search"]._resolve_field(field)
|
|
82
|
+
bs = cast(Union[Dict[str, Any], List[Any]], self._d_["buckets"])
|
|
83
|
+
if isinstance(bs, list):
|
|
84
|
+
ret = AttrList(bs, obj_wrapper=self._wrap_bucket)
|
|
85
|
+
else:
|
|
86
|
+
ret = AttrDict[Any]({k: self._wrap_bucket(bs[k]) for k in bs}) # type: ignore[assignment]
|
|
87
|
+
super(AttrDict, self).__setattr__("_buckets", ret)
|
|
88
|
+
return self._buckets
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
class FieldBucketData(BucketData[_R]):
|
|
92
|
+
_bucket_class = FieldBucket
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
class TopHitsData(Response[_R]):
|
|
96
|
+
def __init__(self, agg: "Agg[_R]", search: "SearchBase[_R]", data: Any):
|
|
97
|
+
super(AttrDict, self).__setattr__(
|
|
98
|
+
"meta", AttrDict({"agg": agg, "search": search})
|
|
99
|
+
)
|
|
100
|
+
super().__init__(search, data)
|
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
# Licensed to Elasticsearch B.V. under one or more contributor
|
|
2
|
+
# license agreements. See the NOTICE file distributed with
|
|
3
|
+
# this work for additional information regarding copyright
|
|
4
|
+
# ownership. Elasticsearch B.V. licenses this file to you under
|
|
5
|
+
# the Apache License, Version 2.0 (the "License"); you may
|
|
6
|
+
# not use this file except in compliance with the License.
|
|
7
|
+
# You may obtain a copy of the License at
|
|
8
|
+
#
|
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
10
|
+
#
|
|
11
|
+
# Unless required by applicable law or agreed to in writing,
|
|
12
|
+
# software distributed under the License is distributed on an
|
|
13
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
14
|
+
# KIND, either express or implied. See the License for the
|
|
15
|
+
# specific language governing permissions and limitations
|
|
16
|
+
# under the License.
|
|
17
|
+
|
|
18
|
+
from typing import Any, Dict, List, Tuple, cast
|
|
19
|
+
|
|
20
|
+
from ..utils import AttrDict, HitMeta
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class Hit(AttrDict[Any]):
|
|
24
|
+
def __init__(self, document: Dict[str, Any]):
|
|
25
|
+
data: Dict[str, Any] = {}
|
|
26
|
+
if "_source" in document:
|
|
27
|
+
data = cast(Dict[str, Any], document["_source"])
|
|
28
|
+
if "fields" in document:
|
|
29
|
+
data.update(cast(Dict[str, Any], document["fields"]))
|
|
30
|
+
|
|
31
|
+
super().__init__(data)
|
|
32
|
+
# assign meta as attribute and not as key in self._d_
|
|
33
|
+
super(AttrDict, self).__setattr__("meta", HitMeta(document))
|
|
34
|
+
|
|
35
|
+
def __getstate__(self) -> Tuple[Dict[str, Any], HitMeta]: # type: ignore[override]
|
|
36
|
+
# add self.meta since it is not in self.__dict__
|
|
37
|
+
return super().__getstate__() + (self.meta,)
|
|
38
|
+
|
|
39
|
+
def __setstate__(self, state: Tuple[Dict[str, Any], HitMeta]) -> None: # type: ignore[override]
|
|
40
|
+
super(AttrDict, self).__setattr__("meta", state[-1])
|
|
41
|
+
super().__setstate__(state[:-1])
|
|
42
|
+
|
|
43
|
+
def __dir__(self) -> List[str]:
|
|
44
|
+
# be sure to expose meta in dir(self)
|
|
45
|
+
return super().__dir__() + ["meta"]
|
|
46
|
+
|
|
47
|
+
def __repr__(self) -> str:
|
|
48
|
+
return "<Hit({}): {}>".format(
|
|
49
|
+
"/".join(
|
|
50
|
+
getattr(self.meta, key) for key in ("index", "id") if key in self.meta
|
|
51
|
+
),
|
|
52
|
+
super().__repr__(),
|
|
53
|
+
)
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
# Licensed to Elasticsearch B.V. under one or more contributor
|
|
2
|
+
# license agreements. See the NOTICE file distributed with
|
|
3
|
+
# this work for additional information regarding copyright
|
|
4
|
+
# ownership. Elasticsearch B.V. licenses this file to you under
|
|
5
|
+
# the Apache License, Version 2.0 (the "License"); you may
|
|
6
|
+
# not use this file except in compliance with the License.
|
|
7
|
+
# You may obtain a copy of the License at
|
|
8
|
+
#
|
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
10
|
+
#
|
|
11
|
+
# Unless required by applicable law or agreed to in writing,
|
|
12
|
+
# software distributed under the License is distributed on an
|
|
13
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
14
|
+
# KIND, either express or implied. See the License for the
|
|
15
|
+
# specific language governing permissions and limitations
|
|
16
|
+
# under the License.
|
|
17
|
+
|
|
18
|
+
from ._async.search import AsyncEmptySearch, AsyncMultiSearch, AsyncSearch # noqa: F401
|
|
19
|
+
from ._sync.search import EmptySearch, MultiSearch, Search # noqa: F401
|
|
20
|
+
from .search_base import Q # noqa: F401
|