elasticsearch 8.17.2__py3-none-any.whl → 8.18.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- elasticsearch/_async/client/__init__.py +174 -79
- elasticsearch/_async/client/_base.py +0 -1
- elasticsearch/_async/client/async_search.py +12 -8
- elasticsearch/_async/client/autoscaling.py +4 -4
- elasticsearch/_async/client/cat.py +26 -26
- elasticsearch/_async/client/ccr.py +186 -72
- elasticsearch/_async/client/cluster.py +38 -19
- elasticsearch/_async/client/connector.py +30 -30
- elasticsearch/_async/client/dangling_indices.py +3 -3
- elasticsearch/_async/client/enrich.py +26 -5
- elasticsearch/_async/client/eql.py +32 -4
- elasticsearch/_async/client/esql.py +62 -6
- elasticsearch/_async/client/features.py +12 -2
- elasticsearch/_async/client/fleet.py +8 -2
- elasticsearch/_async/client/graph.py +1 -1
- elasticsearch/_async/client/ilm.py +23 -22
- elasticsearch/_async/client/indices.py +424 -132
- elasticsearch/_async/client/inference.py +1853 -115
- elasticsearch/_async/client/ingest.py +32 -38
- elasticsearch/_async/client/license.py +51 -16
- elasticsearch/_async/client/logstash.py +3 -3
- elasticsearch/_async/client/migration.py +3 -3
- elasticsearch/_async/client/ml.py +141 -112
- elasticsearch/_async/client/monitoring.py +1 -1
- elasticsearch/_async/client/nodes.py +9 -27
- elasticsearch/_async/client/query_rules.py +8 -8
- elasticsearch/_async/client/rollup.py +8 -8
- elasticsearch/_async/client/search_application.py +13 -13
- elasticsearch/_async/client/searchable_snapshots.py +4 -4
- elasticsearch/_async/client/security.py +71 -71
- elasticsearch/_async/client/shutdown.py +3 -10
- elasticsearch/_async/client/simulate.py +6 -6
- elasticsearch/_async/client/slm.py +9 -9
- elasticsearch/_async/client/snapshot.py +13 -17
- elasticsearch/_async/client/sql.py +6 -6
- elasticsearch/_async/client/ssl.py +1 -1
- elasticsearch/_async/client/synonyms.py +7 -7
- elasticsearch/_async/client/tasks.py +3 -9
- elasticsearch/_async/client/text_structure.py +4 -4
- elasticsearch/_async/client/transform.py +30 -28
- elasticsearch/_async/client/watcher.py +22 -14
- elasticsearch/_async/client/xpack.py +2 -2
- elasticsearch/_async/helpers.py +0 -1
- elasticsearch/_sync/client/__init__.py +174 -79
- elasticsearch/_sync/client/_base.py +0 -1
- elasticsearch/_sync/client/async_search.py +12 -8
- elasticsearch/_sync/client/autoscaling.py +4 -4
- elasticsearch/_sync/client/cat.py +26 -26
- elasticsearch/_sync/client/ccr.py +186 -72
- elasticsearch/_sync/client/cluster.py +38 -19
- elasticsearch/_sync/client/connector.py +30 -30
- elasticsearch/_sync/client/dangling_indices.py +3 -3
- elasticsearch/_sync/client/enrich.py +26 -5
- elasticsearch/_sync/client/eql.py +32 -4
- elasticsearch/_sync/client/esql.py +62 -6
- elasticsearch/_sync/client/features.py +12 -2
- elasticsearch/_sync/client/fleet.py +8 -2
- elasticsearch/_sync/client/graph.py +1 -1
- elasticsearch/_sync/client/ilm.py +23 -22
- elasticsearch/_sync/client/indices.py +424 -132
- elasticsearch/_sync/client/inference.py +1853 -115
- elasticsearch/_sync/client/ingest.py +32 -38
- elasticsearch/_sync/client/license.py +51 -16
- elasticsearch/_sync/client/logstash.py +3 -3
- elasticsearch/_sync/client/migration.py +3 -3
- elasticsearch/_sync/client/ml.py +141 -112
- elasticsearch/_sync/client/monitoring.py +1 -1
- elasticsearch/_sync/client/nodes.py +9 -27
- elasticsearch/_sync/client/query_rules.py +8 -8
- elasticsearch/_sync/client/rollup.py +8 -8
- elasticsearch/_sync/client/search_application.py +13 -13
- elasticsearch/_sync/client/searchable_snapshots.py +4 -4
- elasticsearch/_sync/client/security.py +71 -71
- elasticsearch/_sync/client/shutdown.py +3 -10
- elasticsearch/_sync/client/simulate.py +6 -6
- elasticsearch/_sync/client/slm.py +9 -9
- elasticsearch/_sync/client/snapshot.py +13 -17
- elasticsearch/_sync/client/sql.py +6 -6
- elasticsearch/_sync/client/ssl.py +1 -1
- elasticsearch/_sync/client/synonyms.py +7 -7
- elasticsearch/_sync/client/tasks.py +3 -9
- elasticsearch/_sync/client/text_structure.py +4 -4
- elasticsearch/_sync/client/transform.py +30 -28
- elasticsearch/_sync/client/utils.py +0 -3
- elasticsearch/_sync/client/watcher.py +22 -14
- elasticsearch/_sync/client/xpack.py +2 -2
- elasticsearch/_version.py +1 -1
- elasticsearch/dsl/__init__.py +203 -0
- elasticsearch/dsl/_async/__init__.py +16 -0
- elasticsearch/dsl/_async/document.py +522 -0
- elasticsearch/dsl/_async/faceted_search.py +50 -0
- elasticsearch/dsl/_async/index.py +639 -0
- elasticsearch/dsl/_async/mapping.py +49 -0
- elasticsearch/dsl/_async/search.py +233 -0
- elasticsearch/dsl/_async/update_by_query.py +47 -0
- elasticsearch/dsl/_sync/__init__.py +16 -0
- elasticsearch/dsl/_sync/document.py +514 -0
- elasticsearch/dsl/_sync/faceted_search.py +50 -0
- elasticsearch/dsl/_sync/index.py +597 -0
- elasticsearch/dsl/_sync/mapping.py +49 -0
- elasticsearch/dsl/_sync/search.py +226 -0
- elasticsearch/dsl/_sync/update_by_query.py +45 -0
- elasticsearch/dsl/aggs.py +3730 -0
- elasticsearch/dsl/analysis.py +341 -0
- elasticsearch/dsl/async_connections.py +37 -0
- elasticsearch/dsl/connections.py +142 -0
- elasticsearch/dsl/document.py +20 -0
- elasticsearch/dsl/document_base.py +444 -0
- elasticsearch/dsl/exceptions.py +32 -0
- elasticsearch/dsl/faceted_search.py +28 -0
- elasticsearch/dsl/faceted_search_base.py +489 -0
- elasticsearch/dsl/field.py +4254 -0
- elasticsearch/dsl/function.py +180 -0
- elasticsearch/dsl/index.py +23 -0
- elasticsearch/dsl/index_base.py +178 -0
- elasticsearch/dsl/mapping.py +19 -0
- elasticsearch/dsl/mapping_base.py +219 -0
- elasticsearch/dsl/query.py +2816 -0
- elasticsearch/dsl/response/__init__.py +388 -0
- elasticsearch/dsl/response/aggs.py +100 -0
- elasticsearch/dsl/response/hit.py +53 -0
- elasticsearch/dsl/search.py +20 -0
- elasticsearch/dsl/search_base.py +1040 -0
- elasticsearch/dsl/serializer.py +34 -0
- elasticsearch/dsl/types.py +6471 -0
- elasticsearch/dsl/update_by_query.py +19 -0
- elasticsearch/dsl/update_by_query_base.py +149 -0
- elasticsearch/dsl/utils.py +687 -0
- elasticsearch/dsl/wrappers.py +119 -0
- {elasticsearch-8.17.2.dist-info → elasticsearch-8.18.0.dist-info}/METADATA +12 -2
- elasticsearch-8.18.0.dist-info/RECORD +161 -0
- elasticsearch-8.17.2.dist-info/RECORD +0 -119
- {elasticsearch-8.17.2.dist-info → elasticsearch-8.18.0.dist-info}/WHEEL +0 -0
- {elasticsearch-8.17.2.dist-info → elasticsearch-8.18.0.dist-info}/licenses/LICENSE +0 -0
- {elasticsearch-8.17.2.dist-info → elasticsearch-8.18.0.dist-info}/licenses/NOTICE +0 -0
|
@@ -0,0 +1,341 @@
|
|
|
1
|
+
# Licensed to Elasticsearch B.V. under one or more contributor
|
|
2
|
+
# license agreements. See the NOTICE file distributed with
|
|
3
|
+
# this work for additional information regarding copyright
|
|
4
|
+
# ownership. Elasticsearch B.V. licenses this file to you under
|
|
5
|
+
# the Apache License, Version 2.0 (the "License"); you may
|
|
6
|
+
# not use this file except in compliance with the License.
|
|
7
|
+
# You may obtain a copy of the License at
|
|
8
|
+
#
|
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
10
|
+
#
|
|
11
|
+
# Unless required by applicable law or agreed to in writing,
|
|
12
|
+
# software distributed under the License is distributed on an
|
|
13
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
14
|
+
# KIND, either express or implied. See the License for the
|
|
15
|
+
# specific language governing permissions and limitations
|
|
16
|
+
# under the License.
|
|
17
|
+
|
|
18
|
+
from typing import Any, ClassVar, Dict, List, Optional, Union, cast
|
|
19
|
+
|
|
20
|
+
from . import async_connections, connections
|
|
21
|
+
from .utils import AsyncUsingType, AttrDict, DslBase, UsingType, merge
|
|
22
|
+
|
|
23
|
+
__all__ = ["tokenizer", "analyzer", "char_filter", "token_filter", "normalizer"]
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class AnalysisBase:
|
|
27
|
+
@classmethod
|
|
28
|
+
def _type_shortcut(
|
|
29
|
+
cls,
|
|
30
|
+
name_or_instance: Union[str, "AnalysisBase"],
|
|
31
|
+
type: Optional[str] = None,
|
|
32
|
+
**kwargs: Any,
|
|
33
|
+
) -> DslBase:
|
|
34
|
+
if isinstance(name_or_instance, cls):
|
|
35
|
+
if type or kwargs:
|
|
36
|
+
raise ValueError(f"{cls.__name__}() cannot accept parameters.")
|
|
37
|
+
return name_or_instance # type: ignore[return-value]
|
|
38
|
+
|
|
39
|
+
if not (type or kwargs):
|
|
40
|
+
return cls.get_dsl_class("builtin")(name_or_instance) # type: ignore[no-any-return, attr-defined]
|
|
41
|
+
|
|
42
|
+
return cls.get_dsl_class(type, "custom")( # type: ignore[no-any-return, attr-defined]
|
|
43
|
+
name_or_instance, type or "custom", **kwargs
|
|
44
|
+
)
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
class CustomAnalysis:
|
|
48
|
+
name = "custom"
|
|
49
|
+
|
|
50
|
+
def __init__(self, filter_name: str, builtin_type: str = "custom", **kwargs: Any):
|
|
51
|
+
self._builtin_type = builtin_type
|
|
52
|
+
self._name = filter_name
|
|
53
|
+
super().__init__(**kwargs)
|
|
54
|
+
|
|
55
|
+
def to_dict(self) -> Dict[str, Any]:
|
|
56
|
+
# only name to present in lists
|
|
57
|
+
return self._name # type: ignore[return-value]
|
|
58
|
+
|
|
59
|
+
def get_definition(self) -> Dict[str, Any]:
|
|
60
|
+
d = super().to_dict() # type: ignore[misc]
|
|
61
|
+
d = d.pop(self.name)
|
|
62
|
+
d["type"] = self._builtin_type
|
|
63
|
+
return d # type: ignore[no-any-return]
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
class CustomAnalysisDefinition(CustomAnalysis):
|
|
67
|
+
_type_name: str
|
|
68
|
+
_param_defs: ClassVar[Dict[str, Any]]
|
|
69
|
+
filter: List[Any]
|
|
70
|
+
char_filter: List[Any]
|
|
71
|
+
|
|
72
|
+
def get_analysis_definition(self) -> Dict[str, Any]:
|
|
73
|
+
out = {self._type_name: {self._name: self.get_definition()}}
|
|
74
|
+
|
|
75
|
+
t = cast("Tokenizer", getattr(self, "tokenizer", None))
|
|
76
|
+
if "tokenizer" in self._param_defs and hasattr(t, "get_definition"):
|
|
77
|
+
out["tokenizer"] = {t._name: t.get_definition()}
|
|
78
|
+
|
|
79
|
+
filters = {
|
|
80
|
+
f._name: f.get_definition()
|
|
81
|
+
for f in self.filter
|
|
82
|
+
if hasattr(f, "get_definition")
|
|
83
|
+
}
|
|
84
|
+
if filters:
|
|
85
|
+
out["filter"] = filters
|
|
86
|
+
|
|
87
|
+
# any sub filter definitions like multiplexers etc?
|
|
88
|
+
for f in self.filter:
|
|
89
|
+
if hasattr(f, "get_analysis_definition"):
|
|
90
|
+
d = f.get_analysis_definition()
|
|
91
|
+
if d:
|
|
92
|
+
merge(out, d, True)
|
|
93
|
+
|
|
94
|
+
char_filters = {
|
|
95
|
+
f._name: f.get_definition()
|
|
96
|
+
for f in self.char_filter
|
|
97
|
+
if hasattr(f, "get_definition")
|
|
98
|
+
}
|
|
99
|
+
if char_filters:
|
|
100
|
+
out["char_filter"] = char_filters
|
|
101
|
+
|
|
102
|
+
return out
|
|
103
|
+
|
|
104
|
+
|
|
105
|
+
class BuiltinAnalysis:
|
|
106
|
+
name = "builtin"
|
|
107
|
+
|
|
108
|
+
def __init__(self, name: str):
|
|
109
|
+
self._name = name
|
|
110
|
+
super().__init__()
|
|
111
|
+
|
|
112
|
+
def to_dict(self) -> Dict[str, Any]:
|
|
113
|
+
# only name to present in lists
|
|
114
|
+
return self._name # type: ignore[return-value]
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
class Analyzer(AnalysisBase, DslBase):
|
|
118
|
+
_type_name = "analyzer"
|
|
119
|
+
name = ""
|
|
120
|
+
|
|
121
|
+
|
|
122
|
+
class BuiltinAnalyzer(BuiltinAnalysis, Analyzer):
|
|
123
|
+
def get_analysis_definition(self) -> Dict[str, Any]:
|
|
124
|
+
return {}
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
class CustomAnalyzer(CustomAnalysisDefinition, Analyzer):
|
|
128
|
+
_param_defs = {
|
|
129
|
+
"filter": {"type": "token_filter", "multi": True},
|
|
130
|
+
"char_filter": {"type": "char_filter", "multi": True},
|
|
131
|
+
"tokenizer": {"type": "tokenizer"},
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
def _get_body(
|
|
135
|
+
self, text: str, explain: bool, attributes: Optional[Dict[str, Any]]
|
|
136
|
+
) -> Dict[str, Any]:
|
|
137
|
+
body = {"text": text, "explain": explain}
|
|
138
|
+
if attributes:
|
|
139
|
+
body["attributes"] = attributes
|
|
140
|
+
|
|
141
|
+
definition = self.get_analysis_definition()
|
|
142
|
+
analyzer_def = self.get_definition()
|
|
143
|
+
|
|
144
|
+
for section in ("tokenizer", "char_filter", "filter"):
|
|
145
|
+
if section not in analyzer_def:
|
|
146
|
+
continue
|
|
147
|
+
sec_def = definition.get(section, {})
|
|
148
|
+
sec_names = analyzer_def[section]
|
|
149
|
+
|
|
150
|
+
if isinstance(sec_names, str):
|
|
151
|
+
body[section] = sec_def.get(sec_names, sec_names)
|
|
152
|
+
else:
|
|
153
|
+
body[section] = [
|
|
154
|
+
sec_def.get(sec_name, sec_name) for sec_name in sec_names
|
|
155
|
+
]
|
|
156
|
+
|
|
157
|
+
if self._builtin_type != "custom":
|
|
158
|
+
body["analyzer"] = self._builtin_type
|
|
159
|
+
|
|
160
|
+
return body
|
|
161
|
+
|
|
162
|
+
def simulate(
|
|
163
|
+
self,
|
|
164
|
+
text: str,
|
|
165
|
+
using: UsingType = "default",
|
|
166
|
+
explain: bool = False,
|
|
167
|
+
attributes: Optional[Dict[str, Any]] = None,
|
|
168
|
+
) -> AttrDict[Any]:
|
|
169
|
+
"""
|
|
170
|
+
Use the Analyze API of elasticsearch to test the outcome of this analyzer.
|
|
171
|
+
|
|
172
|
+
:arg text: Text to be analyzed
|
|
173
|
+
:arg using: connection alias to use, defaults to ``'default'``
|
|
174
|
+
:arg explain: will output all token attributes for each token. You can
|
|
175
|
+
filter token attributes you want to output by setting ``attributes``
|
|
176
|
+
option.
|
|
177
|
+
:arg attributes: if ``explain`` is specified, filter the token
|
|
178
|
+
attributes to return.
|
|
179
|
+
"""
|
|
180
|
+
es = connections.get_connection(using)
|
|
181
|
+
return AttrDict(
|
|
182
|
+
cast(
|
|
183
|
+
Dict[str, Any],
|
|
184
|
+
es.indices.analyze(body=self._get_body(text, explain, attributes)),
|
|
185
|
+
)
|
|
186
|
+
)
|
|
187
|
+
|
|
188
|
+
async def async_simulate(
|
|
189
|
+
self,
|
|
190
|
+
text: str,
|
|
191
|
+
using: AsyncUsingType = "default",
|
|
192
|
+
explain: bool = False,
|
|
193
|
+
attributes: Optional[Dict[str, Any]] = None,
|
|
194
|
+
) -> AttrDict[Any]:
|
|
195
|
+
"""
|
|
196
|
+
Use the Analyze API of elasticsearch to test the outcome of this analyzer.
|
|
197
|
+
|
|
198
|
+
:arg text: Text to be analyzed
|
|
199
|
+
:arg using: connection alias to use, defaults to ``'default'``
|
|
200
|
+
:arg explain: will output all token attributes for each token. You can
|
|
201
|
+
filter token attributes you want to output by setting ``attributes``
|
|
202
|
+
option.
|
|
203
|
+
:arg attributes: if ``explain`` is specified, filter the token
|
|
204
|
+
attributes to return.
|
|
205
|
+
"""
|
|
206
|
+
es = async_connections.get_connection(using)
|
|
207
|
+
return AttrDict(
|
|
208
|
+
cast(
|
|
209
|
+
Dict[str, Any],
|
|
210
|
+
await es.indices.analyze(
|
|
211
|
+
body=self._get_body(text, explain, attributes)
|
|
212
|
+
),
|
|
213
|
+
)
|
|
214
|
+
)
|
|
215
|
+
|
|
216
|
+
|
|
217
|
+
class Normalizer(AnalysisBase, DslBase):
|
|
218
|
+
_type_name = "normalizer"
|
|
219
|
+
name = ""
|
|
220
|
+
|
|
221
|
+
|
|
222
|
+
class BuiltinNormalizer(BuiltinAnalysis, Normalizer):
|
|
223
|
+
def get_analysis_definition(self) -> Dict[str, Any]:
|
|
224
|
+
return {}
|
|
225
|
+
|
|
226
|
+
|
|
227
|
+
class CustomNormalizer(CustomAnalysisDefinition, Normalizer):
|
|
228
|
+
_param_defs = {
|
|
229
|
+
"filter": {"type": "token_filter", "multi": True},
|
|
230
|
+
"char_filter": {"type": "char_filter", "multi": True},
|
|
231
|
+
}
|
|
232
|
+
|
|
233
|
+
|
|
234
|
+
class Tokenizer(AnalysisBase, DslBase):
|
|
235
|
+
_type_name = "tokenizer"
|
|
236
|
+
name = ""
|
|
237
|
+
|
|
238
|
+
|
|
239
|
+
class BuiltinTokenizer(BuiltinAnalysis, Tokenizer):
|
|
240
|
+
pass
|
|
241
|
+
|
|
242
|
+
|
|
243
|
+
class CustomTokenizer(CustomAnalysis, Tokenizer):
|
|
244
|
+
pass
|
|
245
|
+
|
|
246
|
+
|
|
247
|
+
class TokenFilter(AnalysisBase, DslBase):
|
|
248
|
+
_type_name = "token_filter"
|
|
249
|
+
name = ""
|
|
250
|
+
|
|
251
|
+
|
|
252
|
+
class BuiltinTokenFilter(BuiltinAnalysis, TokenFilter):
|
|
253
|
+
pass
|
|
254
|
+
|
|
255
|
+
|
|
256
|
+
class CustomTokenFilter(CustomAnalysis, TokenFilter):
|
|
257
|
+
pass
|
|
258
|
+
|
|
259
|
+
|
|
260
|
+
class MultiplexerTokenFilter(CustomTokenFilter):
|
|
261
|
+
name = "multiplexer"
|
|
262
|
+
|
|
263
|
+
def get_definition(self) -> Dict[str, Any]:
|
|
264
|
+
d = super(CustomTokenFilter, self).get_definition()
|
|
265
|
+
|
|
266
|
+
if "filters" in d:
|
|
267
|
+
d["filters"] = [
|
|
268
|
+
# comma delimited string given by user
|
|
269
|
+
(
|
|
270
|
+
fs
|
|
271
|
+
if isinstance(fs, str)
|
|
272
|
+
else
|
|
273
|
+
# list of strings or TokenFilter objects
|
|
274
|
+
", ".join(f.to_dict() if hasattr(f, "to_dict") else f for f in fs)
|
|
275
|
+
)
|
|
276
|
+
for fs in self.filters
|
|
277
|
+
]
|
|
278
|
+
return d
|
|
279
|
+
|
|
280
|
+
def get_analysis_definition(self) -> Dict[str, Any]:
|
|
281
|
+
if not hasattr(self, "filters"):
|
|
282
|
+
return {}
|
|
283
|
+
|
|
284
|
+
fs: Dict[str, Any] = {}
|
|
285
|
+
d = {"filter": fs}
|
|
286
|
+
for filters in self.filters:
|
|
287
|
+
if isinstance(filters, str):
|
|
288
|
+
continue
|
|
289
|
+
fs.update(
|
|
290
|
+
{
|
|
291
|
+
f._name: f.get_definition()
|
|
292
|
+
for f in filters
|
|
293
|
+
if hasattr(f, "get_definition")
|
|
294
|
+
}
|
|
295
|
+
)
|
|
296
|
+
return d
|
|
297
|
+
|
|
298
|
+
|
|
299
|
+
class ConditionalTokenFilter(CustomTokenFilter):
|
|
300
|
+
name = "condition"
|
|
301
|
+
|
|
302
|
+
def get_definition(self) -> Dict[str, Any]:
|
|
303
|
+
d = super(CustomTokenFilter, self).get_definition()
|
|
304
|
+
if "filter" in d:
|
|
305
|
+
d["filter"] = [
|
|
306
|
+
f.to_dict() if hasattr(f, "to_dict") else f for f in self.filter
|
|
307
|
+
]
|
|
308
|
+
return d
|
|
309
|
+
|
|
310
|
+
def get_analysis_definition(self) -> Dict[str, Any]:
|
|
311
|
+
if not hasattr(self, "filter"):
|
|
312
|
+
return {}
|
|
313
|
+
|
|
314
|
+
return {
|
|
315
|
+
"filter": {
|
|
316
|
+
f._name: f.get_definition()
|
|
317
|
+
for f in self.filter
|
|
318
|
+
if hasattr(f, "get_definition")
|
|
319
|
+
}
|
|
320
|
+
}
|
|
321
|
+
|
|
322
|
+
|
|
323
|
+
class CharFilter(AnalysisBase, DslBase):
|
|
324
|
+
_type_name = "char_filter"
|
|
325
|
+
name = ""
|
|
326
|
+
|
|
327
|
+
|
|
328
|
+
class BuiltinCharFilter(BuiltinAnalysis, CharFilter):
|
|
329
|
+
pass
|
|
330
|
+
|
|
331
|
+
|
|
332
|
+
class CustomCharFilter(CustomAnalysis, CharFilter):
|
|
333
|
+
pass
|
|
334
|
+
|
|
335
|
+
|
|
336
|
+
# shortcuts for direct use
|
|
337
|
+
analyzer = Analyzer._type_shortcut
|
|
338
|
+
tokenizer = Tokenizer._type_shortcut
|
|
339
|
+
token_filter = TokenFilter._type_shortcut
|
|
340
|
+
char_filter = CharFilter._type_shortcut
|
|
341
|
+
normalizer = Normalizer._type_shortcut
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
# Licensed to Elasticsearch B.V. under one or more contributor
|
|
2
|
+
# license agreements. See the NOTICE file distributed with
|
|
3
|
+
# this work for additional information regarding copyright
|
|
4
|
+
# ownership. Elasticsearch B.V. licenses this file to you under
|
|
5
|
+
# the Apache License, Version 2.0 (the "License"); you may
|
|
6
|
+
# not use this file except in compliance with the License.
|
|
7
|
+
# You may obtain a copy of the License at
|
|
8
|
+
#
|
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
10
|
+
#
|
|
11
|
+
# Unless required by applicable law or agreed to in writing,
|
|
12
|
+
# software distributed under the License is distributed on an
|
|
13
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
14
|
+
# KIND, either express or implied. See the License for the
|
|
15
|
+
# specific language governing permissions and limitations
|
|
16
|
+
# under the License.
|
|
17
|
+
|
|
18
|
+
from typing import Type
|
|
19
|
+
|
|
20
|
+
from elasticsearch import AsyncElasticsearch
|
|
21
|
+
|
|
22
|
+
from .connections import Connections
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class AsyncElasticsearchConnections(Connections[AsyncElasticsearch]):
|
|
26
|
+
def __init__(
|
|
27
|
+
self, *, elasticsearch_class: Type[AsyncElasticsearch] = AsyncElasticsearch
|
|
28
|
+
):
|
|
29
|
+
super().__init__(elasticsearch_class=elasticsearch_class)
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
connections = AsyncElasticsearchConnections(elasticsearch_class=AsyncElasticsearch)
|
|
33
|
+
configure = connections.configure
|
|
34
|
+
add_connection = connections.add_connection
|
|
35
|
+
remove_connection = connections.remove_connection
|
|
36
|
+
create_connection = connections.create_connection
|
|
37
|
+
get_connection = connections.get_connection
|
|
@@ -0,0 +1,142 @@
|
|
|
1
|
+
# Licensed to Elasticsearch B.V. under one or more contributor
|
|
2
|
+
# license agreements. See the NOTICE file distributed with
|
|
3
|
+
# this work for additional information regarding copyright
|
|
4
|
+
# ownership. Elasticsearch B.V. licenses this file to you under
|
|
5
|
+
# the Apache License, Version 2.0 (the "License"); you may
|
|
6
|
+
# not use this file except in compliance with the License.
|
|
7
|
+
# You may obtain a copy of the License at
|
|
8
|
+
#
|
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
10
|
+
#
|
|
11
|
+
# Unless required by applicable law or agreed to in writing,
|
|
12
|
+
# software distributed under the License is distributed on an
|
|
13
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
14
|
+
# KIND, either express or implied. See the License for the
|
|
15
|
+
# specific language governing permissions and limitations
|
|
16
|
+
# under the License.
|
|
17
|
+
|
|
18
|
+
from typing import Any, Dict, Generic, Type, TypeVar, Union
|
|
19
|
+
|
|
20
|
+
from elasticsearch import Elasticsearch, __versionstr__
|
|
21
|
+
|
|
22
|
+
from .serializer import serializer
|
|
23
|
+
|
|
24
|
+
_T = TypeVar("_T")
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class Connections(Generic[_T]):
|
|
28
|
+
"""
|
|
29
|
+
Class responsible for holding connections to different clusters. Used as a
|
|
30
|
+
singleton in this module.
|
|
31
|
+
"""
|
|
32
|
+
|
|
33
|
+
def __init__(self, *, elasticsearch_class: Type[_T]):
|
|
34
|
+
self._kwargs: Dict[str, Any] = {}
|
|
35
|
+
self._conns: Dict[str, _T] = {}
|
|
36
|
+
self.elasticsearch_class: Type[_T] = elasticsearch_class
|
|
37
|
+
|
|
38
|
+
def configure(self, **kwargs: Any) -> None:
|
|
39
|
+
"""
|
|
40
|
+
Configure multiple connections at once, useful for passing in config
|
|
41
|
+
dictionaries obtained from other sources, like Django's settings or a
|
|
42
|
+
configuration management tool.
|
|
43
|
+
|
|
44
|
+
Example::
|
|
45
|
+
|
|
46
|
+
connections.configure(
|
|
47
|
+
default={'hosts': 'localhost'},
|
|
48
|
+
dev={'hosts': ['esdev1.example.com:9200'], 'sniff_on_start': True},
|
|
49
|
+
)
|
|
50
|
+
|
|
51
|
+
Connections will only be constructed lazily when requested through
|
|
52
|
+
``get_connection``.
|
|
53
|
+
"""
|
|
54
|
+
for k in list(self._conns):
|
|
55
|
+
# try and preserve existing client to keep the persistent connections alive
|
|
56
|
+
if k in self._kwargs and kwargs.get(k, None) == self._kwargs[k]:
|
|
57
|
+
continue
|
|
58
|
+
del self._conns[k]
|
|
59
|
+
self._kwargs = kwargs
|
|
60
|
+
|
|
61
|
+
def add_connection(self, alias: str, conn: _T) -> None:
|
|
62
|
+
"""
|
|
63
|
+
Add a connection object, it will be passed through as-is.
|
|
64
|
+
"""
|
|
65
|
+
self._conns[alias] = self._with_user_agent(conn)
|
|
66
|
+
|
|
67
|
+
def remove_connection(self, alias: str) -> None:
|
|
68
|
+
"""
|
|
69
|
+
Remove connection from the registry. Raises ``KeyError`` if connection
|
|
70
|
+
wasn't found.
|
|
71
|
+
"""
|
|
72
|
+
errors = 0
|
|
73
|
+
for d in (self._conns, self._kwargs):
|
|
74
|
+
try:
|
|
75
|
+
del d[alias]
|
|
76
|
+
except KeyError:
|
|
77
|
+
errors += 1
|
|
78
|
+
|
|
79
|
+
if errors == 2:
|
|
80
|
+
raise KeyError(f"There is no connection with alias {alias!r}.")
|
|
81
|
+
|
|
82
|
+
def create_connection(self, alias: str = "default", **kwargs: Any) -> _T:
|
|
83
|
+
"""
|
|
84
|
+
Construct an instance of ``elasticsearch.Elasticsearch`` and register
|
|
85
|
+
it under given alias.
|
|
86
|
+
"""
|
|
87
|
+
kwargs.setdefault("serializer", serializer)
|
|
88
|
+
conn = self._conns[alias] = self.elasticsearch_class(**kwargs)
|
|
89
|
+
return self._with_user_agent(conn)
|
|
90
|
+
|
|
91
|
+
def get_connection(self, alias: Union[str, _T] = "default") -> _T:
|
|
92
|
+
"""
|
|
93
|
+
Retrieve a connection, construct it if necessary (only configuration
|
|
94
|
+
was passed to us). If a non-string alias has been passed through we
|
|
95
|
+
assume it's already a client instance and will just return it as-is.
|
|
96
|
+
|
|
97
|
+
Raises ``KeyError`` if no client (or its definition) is registered
|
|
98
|
+
under the alias.
|
|
99
|
+
"""
|
|
100
|
+
# do not check isinstance(Elasticsearch) so that people can wrap their
|
|
101
|
+
# clients
|
|
102
|
+
if not isinstance(alias, str):
|
|
103
|
+
return self._with_user_agent(alias)
|
|
104
|
+
|
|
105
|
+
# connection already established
|
|
106
|
+
try:
|
|
107
|
+
return self._conns[alias]
|
|
108
|
+
except KeyError:
|
|
109
|
+
pass
|
|
110
|
+
|
|
111
|
+
# if not, try to create it
|
|
112
|
+
try:
|
|
113
|
+
return self.create_connection(alias, **self._kwargs[alias])
|
|
114
|
+
except KeyError:
|
|
115
|
+
# no connection and no kwargs to set one up
|
|
116
|
+
raise KeyError(f"There is no connection with alias {alias!r}.")
|
|
117
|
+
|
|
118
|
+
def _with_user_agent(self, conn: _T) -> _T:
|
|
119
|
+
# try to inject our user agent
|
|
120
|
+
if hasattr(conn, "_headers"):
|
|
121
|
+
is_frozen = conn._headers.frozen
|
|
122
|
+
if is_frozen:
|
|
123
|
+
conn._headers = conn._headers.copy()
|
|
124
|
+
conn._headers.update(
|
|
125
|
+
{"user-agent": f"elasticsearch-dsl-py/{__versionstr__}"}
|
|
126
|
+
)
|
|
127
|
+
if is_frozen:
|
|
128
|
+
conn._headers.freeze()
|
|
129
|
+
return conn
|
|
130
|
+
|
|
131
|
+
|
|
132
|
+
class ElasticsearchConnections(Connections[Elasticsearch]):
|
|
133
|
+
def __init__(self, *, elasticsearch_class: Type[Elasticsearch] = Elasticsearch):
|
|
134
|
+
super().__init__(elasticsearch_class=elasticsearch_class)
|
|
135
|
+
|
|
136
|
+
|
|
137
|
+
connections = ElasticsearchConnections()
|
|
138
|
+
configure = connections.configure
|
|
139
|
+
add_connection = connections.add_connection
|
|
140
|
+
remove_connection = connections.remove_connection
|
|
141
|
+
create_connection = connections.create_connection
|
|
142
|
+
get_connection = connections.get_connection
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
# Licensed to Elasticsearch B.V. under one or more contributor
|
|
2
|
+
# license agreements. See the NOTICE file distributed with
|
|
3
|
+
# this work for additional information regarding copyright
|
|
4
|
+
# ownership. Elasticsearch B.V. licenses this file to you under
|
|
5
|
+
# the Apache License, Version 2.0 (the "License"); you may
|
|
6
|
+
# not use this file except in compliance with the License.
|
|
7
|
+
# You may obtain a copy of the License at
|
|
8
|
+
#
|
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
10
|
+
#
|
|
11
|
+
# Unless required by applicable law or agreed to in writing,
|
|
12
|
+
# software distributed under the License is distributed on an
|
|
13
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
14
|
+
# KIND, either express or implied. See the License for the
|
|
15
|
+
# specific language governing permissions and limitations
|
|
16
|
+
# under the License.
|
|
17
|
+
|
|
18
|
+
from ._async.document import AsyncDocument # noqa: F401
|
|
19
|
+
from ._sync.document import Document # noqa: F401
|
|
20
|
+
from .document_base import InnerDoc, MetaField # noqa: F401
|