elasticsearch 8.17.1__py3-none-any.whl → 9.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- elasticsearch/__init__.py +2 -2
- elasticsearch/_async/client/__init__.py +2125 -1053
- elasticsearch/_async/client/_base.py +1 -2
- elasticsearch/_async/client/async_search.py +46 -35
- elasticsearch/_async/client/autoscaling.py +32 -26
- elasticsearch/_async/client/cat.py +244 -176
- elasticsearch/_async/client/ccr.py +268 -128
- elasticsearch/_async/client/cluster.py +191 -164
- elasticsearch/_async/client/connector.py +226 -116
- elasticsearch/_async/client/dangling_indices.py +22 -16
- elasticsearch/_async/client/enrich.py +51 -11
- elasticsearch/_async/client/eql.py +54 -13
- elasticsearch/_async/client/esql.py +351 -7
- elasticsearch/_async/client/features.py +37 -27
- elasticsearch/_async/client/fleet.py +32 -22
- elasticsearch/_async/client/graph.py +10 -9
- elasticsearch/_async/client/ilm.py +115 -77
- elasticsearch/_async/client/indices.py +1119 -772
- elasticsearch/_async/client/inference.py +1933 -84
- elasticsearch/_async/client/ingest.py +83 -50
- elasticsearch/_async/client/license.py +90 -38
- elasticsearch/_async/client/logstash.py +20 -9
- elasticsearch/_async/client/migration.py +26 -17
- elasticsearch/_async/client/ml.py +646 -374
- elasticsearch/_async/client/monitoring.py +6 -3
- elasticsearch/_async/client/nodes.py +52 -54
- elasticsearch/_async/client/query_rules.py +59 -33
- elasticsearch/_async/client/rollup.py +124 -86
- elasticsearch/_async/client/search_application.py +60 -32
- elasticsearch/_async/client/searchable_snapshots.py +25 -12
- elasticsearch/_async/client/security.py +903 -562
- elasticsearch/_async/client/shutdown.py +34 -36
- elasticsearch/_async/client/simulate.py +22 -28
- elasticsearch/_async/client/slm.py +65 -40
- elasticsearch/_async/client/snapshot.py +454 -327
- elasticsearch/_async/client/sql.py +43 -22
- elasticsearch/_async/client/ssl.py +17 -18
- elasticsearch/_async/client/synonyms.py +58 -37
- elasticsearch/_async/client/tasks.py +77 -48
- elasticsearch/_async/client/text_structure.py +65 -56
- elasticsearch/_async/client/transform.py +124 -93
- elasticsearch/_async/client/watcher.py +117 -73
- elasticsearch/_async/client/xpack.py +18 -9
- elasticsearch/_async/helpers.py +1 -2
- elasticsearch/_sync/client/__init__.py +2125 -1053
- elasticsearch/_sync/client/_base.py +1 -2
- elasticsearch/_sync/client/async_search.py +46 -35
- elasticsearch/_sync/client/autoscaling.py +32 -26
- elasticsearch/_sync/client/cat.py +244 -176
- elasticsearch/_sync/client/ccr.py +268 -128
- elasticsearch/_sync/client/cluster.py +191 -164
- elasticsearch/_sync/client/connector.py +226 -116
- elasticsearch/_sync/client/dangling_indices.py +22 -16
- elasticsearch/_sync/client/enrich.py +51 -11
- elasticsearch/_sync/client/eql.py +54 -13
- elasticsearch/_sync/client/esql.py +351 -7
- elasticsearch/_sync/client/features.py +37 -27
- elasticsearch/_sync/client/fleet.py +32 -22
- elasticsearch/_sync/client/graph.py +10 -9
- elasticsearch/_sync/client/ilm.py +115 -77
- elasticsearch/_sync/client/indices.py +1119 -772
- elasticsearch/_sync/client/inference.py +1933 -84
- elasticsearch/_sync/client/ingest.py +83 -50
- elasticsearch/_sync/client/license.py +90 -38
- elasticsearch/_sync/client/logstash.py +20 -9
- elasticsearch/_sync/client/migration.py +26 -17
- elasticsearch/_sync/client/ml.py +646 -374
- elasticsearch/_sync/client/monitoring.py +6 -3
- elasticsearch/_sync/client/nodes.py +52 -54
- elasticsearch/_sync/client/query_rules.py +59 -33
- elasticsearch/_sync/client/rollup.py +124 -86
- elasticsearch/_sync/client/search_application.py +60 -32
- elasticsearch/_sync/client/searchable_snapshots.py +25 -12
- elasticsearch/_sync/client/security.py +903 -562
- elasticsearch/_sync/client/shutdown.py +34 -36
- elasticsearch/_sync/client/simulate.py +22 -28
- elasticsearch/_sync/client/slm.py +65 -40
- elasticsearch/_sync/client/snapshot.py +454 -327
- elasticsearch/_sync/client/sql.py +43 -22
- elasticsearch/_sync/client/ssl.py +17 -18
- elasticsearch/_sync/client/synonyms.py +58 -37
- elasticsearch/_sync/client/tasks.py +77 -48
- elasticsearch/_sync/client/text_structure.py +65 -56
- elasticsearch/_sync/client/transform.py +124 -93
- elasticsearch/_sync/client/utils.py +1 -41
- elasticsearch/_sync/client/watcher.py +117 -73
- elasticsearch/_sync/client/xpack.py +18 -9
- elasticsearch/_version.py +1 -1
- elasticsearch/client.py +2 -0
- elasticsearch/dsl/__init__.py +203 -0
- elasticsearch/dsl/_async/__init__.py +16 -0
- elasticsearch/dsl/_async/document.py +522 -0
- elasticsearch/dsl/_async/faceted_search.py +50 -0
- elasticsearch/dsl/_async/index.py +639 -0
- elasticsearch/dsl/_async/mapping.py +49 -0
- elasticsearch/dsl/_async/search.py +237 -0
- elasticsearch/dsl/_async/update_by_query.py +47 -0
- elasticsearch/dsl/_sync/__init__.py +16 -0
- elasticsearch/dsl/_sync/document.py +514 -0
- elasticsearch/dsl/_sync/faceted_search.py +50 -0
- elasticsearch/dsl/_sync/index.py +597 -0
- elasticsearch/dsl/_sync/mapping.py +49 -0
- elasticsearch/dsl/_sync/search.py +230 -0
- elasticsearch/dsl/_sync/update_by_query.py +45 -0
- elasticsearch/dsl/aggs.py +3734 -0
- elasticsearch/dsl/analysis.py +341 -0
- elasticsearch/dsl/async_connections.py +37 -0
- elasticsearch/dsl/connections.py +142 -0
- elasticsearch/dsl/document.py +20 -0
- elasticsearch/dsl/document_base.py +444 -0
- elasticsearch/dsl/exceptions.py +32 -0
- elasticsearch/dsl/faceted_search.py +28 -0
- elasticsearch/dsl/faceted_search_base.py +489 -0
- elasticsearch/dsl/field.py +4392 -0
- elasticsearch/dsl/function.py +180 -0
- elasticsearch/dsl/index.py +23 -0
- elasticsearch/dsl/index_base.py +178 -0
- elasticsearch/dsl/mapping.py +19 -0
- elasticsearch/dsl/mapping_base.py +219 -0
- elasticsearch/dsl/query.py +2822 -0
- elasticsearch/dsl/response/__init__.py +388 -0
- elasticsearch/dsl/response/aggs.py +100 -0
- elasticsearch/dsl/response/hit.py +53 -0
- elasticsearch/dsl/search.py +20 -0
- elasticsearch/dsl/search_base.py +1053 -0
- elasticsearch/dsl/serializer.py +34 -0
- elasticsearch/dsl/types.py +6453 -0
- elasticsearch/dsl/update_by_query.py +19 -0
- elasticsearch/dsl/update_by_query_base.py +149 -0
- elasticsearch/dsl/utils.py +687 -0
- elasticsearch/dsl/wrappers.py +144 -0
- elasticsearch/helpers/actions.py +1 -1
- elasticsearch/helpers/vectorstore/_async/strategies.py +12 -12
- elasticsearch/helpers/vectorstore/_sync/strategies.py +12 -12
- elasticsearch/helpers/vectorstore/_sync/vectorstore.py +4 -1
- {elasticsearch-8.17.1.dist-info → elasticsearch-9.0.0.dist-info}/METADATA +12 -15
- elasticsearch-9.0.0.dist-info/RECORD +160 -0
- elasticsearch/transport.py +0 -57
- elasticsearch-8.17.1.dist-info/RECORD +0 -119
- {elasticsearch-8.17.1.dist-info → elasticsearch-9.0.0.dist-info}/WHEEL +0 -0
- {elasticsearch-8.17.1.dist-info → elasticsearch-9.0.0.dist-info}/licenses/LICENSE +0 -0
- {elasticsearch-8.17.1.dist-info → elasticsearch-9.0.0.dist-info}/licenses/NOTICE +0 -0
|
@@ -0,0 +1,180 @@
|
|
|
1
|
+
# Licensed to Elasticsearch B.V. under one or more contributor
|
|
2
|
+
# license agreements. See the NOTICE file distributed with
|
|
3
|
+
# this work for additional information regarding copyright
|
|
4
|
+
# ownership. Elasticsearch B.V. licenses this file to you under
|
|
5
|
+
# the Apache License, Version 2.0 (the "License"); you may
|
|
6
|
+
# not use this file except in compliance with the License.
|
|
7
|
+
# You may obtain a copy of the License at
|
|
8
|
+
#
|
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
10
|
+
#
|
|
11
|
+
# Unless required by applicable law or agreed to in writing,
|
|
12
|
+
# software distributed under the License is distributed on an
|
|
13
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
14
|
+
# KIND, either express or implied. See the License for the
|
|
15
|
+
# specific language governing permissions and limitations
|
|
16
|
+
# under the License.
|
|
17
|
+
|
|
18
|
+
import collections.abc
|
|
19
|
+
from copy import deepcopy
|
|
20
|
+
from typing import (
|
|
21
|
+
Any,
|
|
22
|
+
ClassVar,
|
|
23
|
+
Dict,
|
|
24
|
+
Literal,
|
|
25
|
+
MutableMapping,
|
|
26
|
+
Optional,
|
|
27
|
+
Union,
|
|
28
|
+
overload,
|
|
29
|
+
)
|
|
30
|
+
|
|
31
|
+
from elastic_transport.client_utils import DEFAULT, DefaultType
|
|
32
|
+
|
|
33
|
+
from .utils import AttrDict, DslBase
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
@overload
|
|
37
|
+
def SF(name_or_sf: MutableMapping[str, Any]) -> "ScoreFunction": ...
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
@overload
|
|
41
|
+
def SF(name_or_sf: "ScoreFunction") -> "ScoreFunction": ...
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
@overload
|
|
45
|
+
def SF(name_or_sf: str, **params: Any) -> "ScoreFunction": ...
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
def SF(
|
|
49
|
+
name_or_sf: Union[str, "ScoreFunction", MutableMapping[str, Any]],
|
|
50
|
+
**params: Any,
|
|
51
|
+
) -> "ScoreFunction":
|
|
52
|
+
# {"script_score": {"script": "_score"}, "filter": {}}
|
|
53
|
+
if isinstance(name_or_sf, collections.abc.MutableMapping):
|
|
54
|
+
if params:
|
|
55
|
+
raise ValueError("SF() cannot accept parameters when passing in a dict.")
|
|
56
|
+
|
|
57
|
+
kwargs: Dict[str, Any] = {}
|
|
58
|
+
sf = deepcopy(name_or_sf)
|
|
59
|
+
for k in ScoreFunction._param_defs:
|
|
60
|
+
if k in name_or_sf:
|
|
61
|
+
kwargs[k] = sf.pop(k)
|
|
62
|
+
|
|
63
|
+
# not sf, so just filter+weight, which used to be boost factor
|
|
64
|
+
sf_params = params
|
|
65
|
+
if not sf:
|
|
66
|
+
name = "boost_factor"
|
|
67
|
+
# {'FUNCTION': {...}}
|
|
68
|
+
elif len(sf) == 1:
|
|
69
|
+
name, sf_params = sf.popitem()
|
|
70
|
+
else:
|
|
71
|
+
raise ValueError(f"SF() got an unexpected fields in the dictionary: {sf!r}")
|
|
72
|
+
|
|
73
|
+
# boost factor special case, see elasticsearch #6343
|
|
74
|
+
if not isinstance(sf_params, collections.abc.Mapping):
|
|
75
|
+
sf_params = {"value": sf_params}
|
|
76
|
+
|
|
77
|
+
# mix known params (from _param_defs) and from inside the function
|
|
78
|
+
kwargs.update(sf_params)
|
|
79
|
+
return ScoreFunction.get_dsl_class(name)(**kwargs)
|
|
80
|
+
|
|
81
|
+
# ScriptScore(script="_score", filter=Q())
|
|
82
|
+
if isinstance(name_or_sf, ScoreFunction):
|
|
83
|
+
if params:
|
|
84
|
+
raise ValueError(
|
|
85
|
+
"SF() cannot accept parameters when passing in a ScoreFunction object."
|
|
86
|
+
)
|
|
87
|
+
return name_or_sf
|
|
88
|
+
|
|
89
|
+
# "script_score", script="_score", filter=Q()
|
|
90
|
+
return ScoreFunction.get_dsl_class(name_or_sf)(**params)
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
class ScoreFunction(DslBase):
|
|
94
|
+
_type_name = "score_function"
|
|
95
|
+
_type_shortcut = staticmethod(SF)
|
|
96
|
+
_param_defs = {
|
|
97
|
+
"query": {"type": "query"},
|
|
98
|
+
"filter": {"type": "query"},
|
|
99
|
+
"weight": {},
|
|
100
|
+
}
|
|
101
|
+
name: ClassVar[Optional[str]] = None
|
|
102
|
+
|
|
103
|
+
def to_dict(self) -> Dict[str, Any]:
|
|
104
|
+
d = super().to_dict()
|
|
105
|
+
# filter and query dicts should be at the same level as us
|
|
106
|
+
for k in self._param_defs:
|
|
107
|
+
if self.name is not None:
|
|
108
|
+
val = d[self.name]
|
|
109
|
+
if isinstance(val, dict) and k in val:
|
|
110
|
+
d[k] = val.pop(k)
|
|
111
|
+
return d
|
|
112
|
+
|
|
113
|
+
|
|
114
|
+
class ScriptScore(ScoreFunction):
|
|
115
|
+
name = "script_score"
|
|
116
|
+
|
|
117
|
+
|
|
118
|
+
class BoostFactor(ScoreFunction):
|
|
119
|
+
name = "boost_factor"
|
|
120
|
+
|
|
121
|
+
def to_dict(self) -> Dict[str, Any]:
|
|
122
|
+
d = super().to_dict()
|
|
123
|
+
if self.name is not None:
|
|
124
|
+
val = d[self.name]
|
|
125
|
+
if isinstance(val, dict):
|
|
126
|
+
if "value" in val:
|
|
127
|
+
d[self.name] = val.pop("value")
|
|
128
|
+
else:
|
|
129
|
+
del d[self.name]
|
|
130
|
+
return d
|
|
131
|
+
|
|
132
|
+
|
|
133
|
+
class RandomScore(ScoreFunction):
|
|
134
|
+
name = "random_score"
|
|
135
|
+
|
|
136
|
+
|
|
137
|
+
class FieldValueFactorScore(ScoreFunction):
|
|
138
|
+
name = "field_value_factor"
|
|
139
|
+
|
|
140
|
+
|
|
141
|
+
class FieldValueFactor(FieldValueFactorScore): # alias of the above
|
|
142
|
+
pass
|
|
143
|
+
|
|
144
|
+
|
|
145
|
+
class Linear(ScoreFunction):
|
|
146
|
+
name = "linear"
|
|
147
|
+
|
|
148
|
+
|
|
149
|
+
class Gauss(ScoreFunction):
|
|
150
|
+
name = "gauss"
|
|
151
|
+
|
|
152
|
+
|
|
153
|
+
class Exp(ScoreFunction):
|
|
154
|
+
name = "exp"
|
|
155
|
+
|
|
156
|
+
|
|
157
|
+
class DecayFunction(AttrDict[Any]):
|
|
158
|
+
def __init__(
|
|
159
|
+
self,
|
|
160
|
+
*,
|
|
161
|
+
decay: Union[float, "DefaultType"] = DEFAULT,
|
|
162
|
+
offset: Any = DEFAULT,
|
|
163
|
+
scale: Any = DEFAULT,
|
|
164
|
+
origin: Any = DEFAULT,
|
|
165
|
+
multi_value_mode: Union[
|
|
166
|
+
Literal["min", "max", "avg", "sum"], "DefaultType"
|
|
167
|
+
] = DEFAULT,
|
|
168
|
+
**kwargs: Any,
|
|
169
|
+
):
|
|
170
|
+
if decay != DEFAULT:
|
|
171
|
+
kwargs["decay"] = decay
|
|
172
|
+
if offset != DEFAULT:
|
|
173
|
+
kwargs["offset"] = offset
|
|
174
|
+
if scale != DEFAULT:
|
|
175
|
+
kwargs["scale"] = scale
|
|
176
|
+
if origin != DEFAULT:
|
|
177
|
+
kwargs["origin"] = origin
|
|
178
|
+
if multi_value_mode != DEFAULT:
|
|
179
|
+
kwargs["multi_value_mode"] = multi_value_mode
|
|
180
|
+
super().__init__(kwargs)
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
# Licensed to Elasticsearch B.V. under one or more contributor
|
|
2
|
+
# license agreements. See the NOTICE file distributed with
|
|
3
|
+
# this work for additional information regarding copyright
|
|
4
|
+
# ownership. Elasticsearch B.V. licenses this file to you under
|
|
5
|
+
# the Apache License, Version 2.0 (the "License"); you may
|
|
6
|
+
# not use this file except in compliance with the License.
|
|
7
|
+
# You may obtain a copy of the License at
|
|
8
|
+
#
|
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
10
|
+
#
|
|
11
|
+
# Unless required by applicable law or agreed to in writing,
|
|
12
|
+
# software distributed under the License is distributed on an
|
|
13
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
14
|
+
# KIND, either express or implied. See the License for the
|
|
15
|
+
# specific language governing permissions and limitations
|
|
16
|
+
# under the License.
|
|
17
|
+
|
|
18
|
+
from ._async.index import ( # noqa: F401
|
|
19
|
+
AsyncComposableIndexTemplate,
|
|
20
|
+
AsyncIndex,
|
|
21
|
+
AsyncIndexTemplate,
|
|
22
|
+
)
|
|
23
|
+
from ._sync.index import ComposableIndexTemplate, Index, IndexTemplate # noqa: F401
|
|
@@ -0,0 +1,178 @@
|
|
|
1
|
+
# Licensed to Elasticsearch B.V. under one or more contributor
|
|
2
|
+
# license agreements. See the NOTICE file distributed with
|
|
3
|
+
# this work for additional information regarding copyright
|
|
4
|
+
# ownership. Elasticsearch B.V. licenses this file to you under
|
|
5
|
+
# the Apache License, Version 2.0 (the "License"); you may
|
|
6
|
+
# not use this file except in compliance with the License.
|
|
7
|
+
# You may obtain a copy of the License at
|
|
8
|
+
#
|
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
10
|
+
#
|
|
11
|
+
# Unless required by applicable law or agreed to in writing,
|
|
12
|
+
# software distributed under the License is distributed on an
|
|
13
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
14
|
+
# KIND, either express or implied. See the License for the
|
|
15
|
+
# specific language governing permissions and limitations
|
|
16
|
+
# under the License.
|
|
17
|
+
|
|
18
|
+
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple
|
|
19
|
+
|
|
20
|
+
from typing_extensions import Self
|
|
21
|
+
|
|
22
|
+
from . import analysis
|
|
23
|
+
from .utils import AnyUsingType, merge
|
|
24
|
+
|
|
25
|
+
if TYPE_CHECKING:
|
|
26
|
+
from .document_base import DocumentMeta
|
|
27
|
+
from .field import Field
|
|
28
|
+
from .mapping_base import MappingBase
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
class IndexBase:
|
|
32
|
+
def __init__(self, name: str, mapping_class: type, using: AnyUsingType = "default"):
|
|
33
|
+
"""
|
|
34
|
+
:arg name: name of the index
|
|
35
|
+
:arg using: connection alias to use, defaults to ``'default'``
|
|
36
|
+
"""
|
|
37
|
+
self._name = name
|
|
38
|
+
self._doc_types: List["DocumentMeta"] = []
|
|
39
|
+
self._using = using
|
|
40
|
+
self._settings: Dict[str, Any] = {}
|
|
41
|
+
self._aliases: Dict[str, Any] = {}
|
|
42
|
+
self._analysis: Dict[str, Any] = {}
|
|
43
|
+
self._mapping_class = mapping_class
|
|
44
|
+
self._mapping: Optional["MappingBase"] = None
|
|
45
|
+
|
|
46
|
+
def resolve_nested(
|
|
47
|
+
self, field_path: str
|
|
48
|
+
) -> Tuple[List[str], Optional["MappingBase"]]:
|
|
49
|
+
for doc in self._doc_types:
|
|
50
|
+
nested, field = doc._doc_type.mapping.resolve_nested(field_path)
|
|
51
|
+
if field is not None:
|
|
52
|
+
return nested, field
|
|
53
|
+
if self._mapping:
|
|
54
|
+
return self._mapping.resolve_nested(field_path)
|
|
55
|
+
return [], None
|
|
56
|
+
|
|
57
|
+
def resolve_field(self, field_path: str) -> Optional["Field"]:
|
|
58
|
+
for doc in self._doc_types:
|
|
59
|
+
field = doc._doc_type.mapping.resolve_field(field_path)
|
|
60
|
+
if field is not None:
|
|
61
|
+
return field
|
|
62
|
+
if self._mapping:
|
|
63
|
+
return self._mapping.resolve_field(field_path)
|
|
64
|
+
return None
|
|
65
|
+
|
|
66
|
+
def get_or_create_mapping(self) -> "MappingBase":
|
|
67
|
+
if self._mapping is None:
|
|
68
|
+
self._mapping = self._mapping_class()
|
|
69
|
+
return self._mapping
|
|
70
|
+
|
|
71
|
+
def mapping(self, mapping: "MappingBase") -> None:
|
|
72
|
+
"""
|
|
73
|
+
Associate a mapping (an instance of
|
|
74
|
+
:class:`~elasticsearch.dsl.Mapping`) with this index.
|
|
75
|
+
This means that, when this index is created, it will contain the
|
|
76
|
+
mappings for the document type defined by those mappings.
|
|
77
|
+
"""
|
|
78
|
+
self.get_or_create_mapping().update(mapping)
|
|
79
|
+
|
|
80
|
+
def document(self, document: "DocumentMeta") -> "DocumentMeta":
|
|
81
|
+
"""
|
|
82
|
+
Associate a :class:`~elasticsearch.dsl.Document` subclass with an index.
|
|
83
|
+
This means that, when this index is created, it will contain the
|
|
84
|
+
mappings for the ``Document``. If the ``Document`` class doesn't have a
|
|
85
|
+
default index yet (by defining ``class Index``), this instance will be
|
|
86
|
+
used. Can be used as a decorator::
|
|
87
|
+
|
|
88
|
+
i = Index('blog')
|
|
89
|
+
|
|
90
|
+
@i.document
|
|
91
|
+
class Post(Document):
|
|
92
|
+
title = Text()
|
|
93
|
+
|
|
94
|
+
# create the index, including Post mappings
|
|
95
|
+
i.create()
|
|
96
|
+
|
|
97
|
+
# .search() will now return a Search object that will return
|
|
98
|
+
# properly deserialized Post instances
|
|
99
|
+
s = i.search()
|
|
100
|
+
"""
|
|
101
|
+
self._doc_types.append(document)
|
|
102
|
+
|
|
103
|
+
# If the document index does not have any name, that means the user
|
|
104
|
+
# did not set any index already to the document.
|
|
105
|
+
# So set this index as document index
|
|
106
|
+
if document._index._name is None:
|
|
107
|
+
document._index = self
|
|
108
|
+
|
|
109
|
+
return document
|
|
110
|
+
|
|
111
|
+
def settings(self, **kwargs: Any) -> Self:
|
|
112
|
+
"""
|
|
113
|
+
Add settings to the index::
|
|
114
|
+
|
|
115
|
+
i = Index('i')
|
|
116
|
+
i.settings(number_of_shards=1, number_of_replicas=0)
|
|
117
|
+
|
|
118
|
+
Multiple calls to ``settings`` will merge the keys, later overriding
|
|
119
|
+
the earlier.
|
|
120
|
+
"""
|
|
121
|
+
self._settings.update(kwargs)
|
|
122
|
+
return self
|
|
123
|
+
|
|
124
|
+
def aliases(self, **kwargs: Any) -> Self:
|
|
125
|
+
"""
|
|
126
|
+
Add aliases to the index definition::
|
|
127
|
+
|
|
128
|
+
i = Index('blog-v2')
|
|
129
|
+
i.aliases(blog={}, published={'filter': Q('term', published=True)})
|
|
130
|
+
"""
|
|
131
|
+
self._aliases.update(kwargs)
|
|
132
|
+
return self
|
|
133
|
+
|
|
134
|
+
def analyzer(self, *args: Any, **kwargs: Any) -> None:
|
|
135
|
+
"""
|
|
136
|
+
Explicitly add an analyzer to an index. Note that all custom analyzers
|
|
137
|
+
defined in mappings will also be created. This is useful for search analyzers.
|
|
138
|
+
|
|
139
|
+
Example::
|
|
140
|
+
|
|
141
|
+
from elasticsearch.dsl import analyzer, tokenizer
|
|
142
|
+
|
|
143
|
+
my_analyzer = analyzer('my_analyzer',
|
|
144
|
+
tokenizer=tokenizer('trigram', 'nGram', min_gram=3, max_gram=3),
|
|
145
|
+
filter=['lowercase']
|
|
146
|
+
)
|
|
147
|
+
|
|
148
|
+
i = Index('blog')
|
|
149
|
+
i.analyzer(my_analyzer)
|
|
150
|
+
|
|
151
|
+
"""
|
|
152
|
+
analyzer = analysis.analyzer(*args, **kwargs)
|
|
153
|
+
d = analyzer.get_analysis_definition()
|
|
154
|
+
# empty custom analyzer, probably already defined out of our control
|
|
155
|
+
if not d:
|
|
156
|
+
return
|
|
157
|
+
|
|
158
|
+
# merge the definition
|
|
159
|
+
merge(self._analysis, d, True)
|
|
160
|
+
|
|
161
|
+
def to_dict(self) -> Dict[str, Any]:
|
|
162
|
+
out = {}
|
|
163
|
+
if self._settings:
|
|
164
|
+
out["settings"] = self._settings
|
|
165
|
+
if self._aliases:
|
|
166
|
+
out["aliases"] = self._aliases
|
|
167
|
+
mappings = self._mapping.to_dict() if self._mapping else {}
|
|
168
|
+
analysis = self._mapping._collect_analysis() if self._mapping else {}
|
|
169
|
+
for d in self._doc_types:
|
|
170
|
+
mapping = d._doc_type.mapping
|
|
171
|
+
merge(mappings, mapping.to_dict(), True)
|
|
172
|
+
merge(analysis, mapping._collect_analysis(), True)
|
|
173
|
+
if mappings:
|
|
174
|
+
out["mappings"] = mappings
|
|
175
|
+
if analysis or self._analysis:
|
|
176
|
+
merge(analysis, self._analysis)
|
|
177
|
+
out.setdefault("settings", {})["analysis"] = analysis
|
|
178
|
+
return out
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
# Licensed to Elasticsearch B.V. under one or more contributor
|
|
2
|
+
# license agreements. See the NOTICE file distributed with
|
|
3
|
+
# this work for additional information regarding copyright
|
|
4
|
+
# ownership. Elasticsearch B.V. licenses this file to you under
|
|
5
|
+
# the Apache License, Version 2.0 (the "License"); you may
|
|
6
|
+
# not use this file except in compliance with the License.
|
|
7
|
+
# You may obtain a copy of the License at
|
|
8
|
+
#
|
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
10
|
+
#
|
|
11
|
+
# Unless required by applicable law or agreed to in writing,
|
|
12
|
+
# software distributed under the License is distributed on an
|
|
13
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
14
|
+
# KIND, either express or implied. See the License for the
|
|
15
|
+
# specific language governing permissions and limitations
|
|
16
|
+
# under the License.
|
|
17
|
+
|
|
18
|
+
from ._async.mapping import AsyncMapping # noqa: F401
|
|
19
|
+
from ._sync.mapping import Mapping # noqa: F401
|
|
@@ -0,0 +1,219 @@
|
|
|
1
|
+
# Licensed to Elasticsearch B.V. under one or more contributor
|
|
2
|
+
# license agreements. See the NOTICE file distributed with
|
|
3
|
+
# this work for additional information regarding copyright
|
|
4
|
+
# ownership. Elasticsearch B.V. licenses this file to you under
|
|
5
|
+
# the Apache License, Version 2.0 (the "License"); you may
|
|
6
|
+
# not use this file except in compliance with the License.
|
|
7
|
+
# You may obtain a copy of the License at
|
|
8
|
+
#
|
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
10
|
+
#
|
|
11
|
+
# Unless required by applicable law or agreed to in writing,
|
|
12
|
+
# software distributed under the License is distributed on an
|
|
13
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
14
|
+
# KIND, either express or implied. See the License for the
|
|
15
|
+
# specific language governing permissions and limitations
|
|
16
|
+
# under the License.
|
|
17
|
+
|
|
18
|
+
import collections.abc
|
|
19
|
+
from itertools import chain
|
|
20
|
+
from typing import Any, Dict, Iterator, List, Optional, Tuple, cast
|
|
21
|
+
|
|
22
|
+
from typing_extensions import Self
|
|
23
|
+
|
|
24
|
+
from .field import Field, Nested, Text, construct_field
|
|
25
|
+
from .utils import DslBase
|
|
26
|
+
|
|
27
|
+
META_FIELDS = frozenset(
|
|
28
|
+
(
|
|
29
|
+
"dynamic",
|
|
30
|
+
"transform",
|
|
31
|
+
"dynamic_date_formats",
|
|
32
|
+
"date_detection",
|
|
33
|
+
"numeric_detection",
|
|
34
|
+
"dynamic_templates",
|
|
35
|
+
"enabled",
|
|
36
|
+
)
|
|
37
|
+
)
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
class Properties(DslBase):
|
|
41
|
+
name = "properties"
|
|
42
|
+
_param_defs = {"properties": {"type": "field", "hash": True}}
|
|
43
|
+
|
|
44
|
+
properties: Dict[str, Field]
|
|
45
|
+
|
|
46
|
+
def __init__(self) -> None:
|
|
47
|
+
super().__init__()
|
|
48
|
+
|
|
49
|
+
def __repr__(self) -> str:
|
|
50
|
+
return "Properties()"
|
|
51
|
+
|
|
52
|
+
def __getitem__(self, name: str) -> Field:
|
|
53
|
+
return self.properties[name]
|
|
54
|
+
|
|
55
|
+
def __contains__(self, name: str) -> bool:
|
|
56
|
+
return name in self.properties
|
|
57
|
+
|
|
58
|
+
def to_dict(self) -> Dict[str, Any]:
|
|
59
|
+
return cast(Dict[str, Field], super().to_dict()["properties"])
|
|
60
|
+
|
|
61
|
+
def field(self, name: str, *args: Any, **kwargs: Any) -> Self:
|
|
62
|
+
self.properties[name] = construct_field(*args, **kwargs)
|
|
63
|
+
return self
|
|
64
|
+
|
|
65
|
+
def _collect_fields(self) -> Iterator[Field]:
|
|
66
|
+
"""Iterate over all Field objects within, including multi fields."""
|
|
67
|
+
fields = cast(Dict[str, Field], self.properties.to_dict()) # type: ignore[attr-defined]
|
|
68
|
+
for f in fields.values():
|
|
69
|
+
yield f
|
|
70
|
+
# multi fields
|
|
71
|
+
if hasattr(f, "fields"):
|
|
72
|
+
yield from f.fields.to_dict().values()
|
|
73
|
+
# nested and inner objects
|
|
74
|
+
if hasattr(f, "_collect_fields"):
|
|
75
|
+
yield from f._collect_fields()
|
|
76
|
+
|
|
77
|
+
def update(self, other_object: Any) -> None:
|
|
78
|
+
if not hasattr(other_object, "properties"):
|
|
79
|
+
# not an inner/nested object, no merge possible
|
|
80
|
+
return
|
|
81
|
+
|
|
82
|
+
our, other = self.properties, other_object.properties
|
|
83
|
+
for name in other:
|
|
84
|
+
if name in our:
|
|
85
|
+
if hasattr(our[name], "update"):
|
|
86
|
+
our[name].update(other[name])
|
|
87
|
+
continue
|
|
88
|
+
our[name] = other[name]
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
class MappingBase:
|
|
92
|
+
def __init__(self) -> None:
|
|
93
|
+
self.properties = Properties()
|
|
94
|
+
self._meta: Dict[str, Any] = {}
|
|
95
|
+
|
|
96
|
+
def __repr__(self) -> str:
|
|
97
|
+
return "Mapping()"
|
|
98
|
+
|
|
99
|
+
def _clone(self) -> Self:
|
|
100
|
+
m = self.__class__()
|
|
101
|
+
m.properties._params = self.properties._params.copy()
|
|
102
|
+
return m
|
|
103
|
+
|
|
104
|
+
def resolve_nested(
|
|
105
|
+
self, field_path: str
|
|
106
|
+
) -> Tuple[List[str], Optional["MappingBase"]]:
|
|
107
|
+
field = self
|
|
108
|
+
nested = []
|
|
109
|
+
parts = field_path.split(".")
|
|
110
|
+
for i, step in enumerate(parts):
|
|
111
|
+
try:
|
|
112
|
+
field = field[step] # type: ignore[assignment]
|
|
113
|
+
except KeyError:
|
|
114
|
+
return [], None
|
|
115
|
+
if isinstance(field, Nested):
|
|
116
|
+
nested.append(".".join(parts[: i + 1]))
|
|
117
|
+
return nested, field
|
|
118
|
+
|
|
119
|
+
def resolve_field(self, field_path: str) -> Optional[Field]:
|
|
120
|
+
field = self
|
|
121
|
+
for step in field_path.split("."):
|
|
122
|
+
try:
|
|
123
|
+
field = field[step] # type: ignore[assignment]
|
|
124
|
+
except KeyError:
|
|
125
|
+
return None
|
|
126
|
+
return cast(Field, field)
|
|
127
|
+
|
|
128
|
+
def _collect_analysis(self) -> Dict[str, Any]:
|
|
129
|
+
analysis: Dict[str, Any] = {}
|
|
130
|
+
fields = []
|
|
131
|
+
if "_all" in self._meta:
|
|
132
|
+
fields.append(Text(**self._meta["_all"]))
|
|
133
|
+
|
|
134
|
+
for f in chain(fields, self.properties._collect_fields()):
|
|
135
|
+
for analyzer_name in (
|
|
136
|
+
"analyzer",
|
|
137
|
+
"normalizer",
|
|
138
|
+
"search_analyzer",
|
|
139
|
+
"search_quote_analyzer",
|
|
140
|
+
):
|
|
141
|
+
if not hasattr(f, analyzer_name):
|
|
142
|
+
continue
|
|
143
|
+
analyzer = getattr(f, analyzer_name)
|
|
144
|
+
d = analyzer.get_analysis_definition()
|
|
145
|
+
# empty custom analyzer, probably already defined out of our control
|
|
146
|
+
if not d:
|
|
147
|
+
continue
|
|
148
|
+
|
|
149
|
+
# merge the definition
|
|
150
|
+
# TODO: conflict detection/resolution
|
|
151
|
+
for key in d:
|
|
152
|
+
analysis.setdefault(key, {}).update(d[key])
|
|
153
|
+
|
|
154
|
+
return analysis
|
|
155
|
+
|
|
156
|
+
def _update_from_dict(self, raw: Dict[str, Any]) -> None:
|
|
157
|
+
for name, definition in raw.get("properties", {}).items():
|
|
158
|
+
self.field(name, definition)
|
|
159
|
+
|
|
160
|
+
# metadata like _all etc
|
|
161
|
+
for name, value in raw.items():
|
|
162
|
+
if name != "properties":
|
|
163
|
+
if isinstance(value, collections.abc.Mapping):
|
|
164
|
+
self.meta(name, **value)
|
|
165
|
+
else:
|
|
166
|
+
self.meta(name, value)
|
|
167
|
+
|
|
168
|
+
def update(self, mapping: "MappingBase", update_only: bool = False) -> None:
|
|
169
|
+
for name in mapping:
|
|
170
|
+
if update_only and name in self:
|
|
171
|
+
# nested and inner objects, merge recursively
|
|
172
|
+
if hasattr(self[name], "update"):
|
|
173
|
+
# FIXME only merge subfields, not the settings
|
|
174
|
+
self[name].update(mapping[name], update_only)
|
|
175
|
+
continue
|
|
176
|
+
self.field(name, mapping[name])
|
|
177
|
+
|
|
178
|
+
if update_only:
|
|
179
|
+
for name in mapping._meta:
|
|
180
|
+
if name not in self._meta:
|
|
181
|
+
self._meta[name] = mapping._meta[name]
|
|
182
|
+
else:
|
|
183
|
+
self._meta.update(mapping._meta)
|
|
184
|
+
|
|
185
|
+
def __contains__(self, name: str) -> bool:
|
|
186
|
+
return name in self.properties.properties
|
|
187
|
+
|
|
188
|
+
def __getitem__(self, name: str) -> Field:
|
|
189
|
+
return self.properties.properties[name]
|
|
190
|
+
|
|
191
|
+
def __iter__(self) -> Iterator[str]:
|
|
192
|
+
return iter(self.properties.properties)
|
|
193
|
+
|
|
194
|
+
def field(self, *args: Any, **kwargs: Any) -> Self:
|
|
195
|
+
self.properties.field(*args, **kwargs)
|
|
196
|
+
return self
|
|
197
|
+
|
|
198
|
+
def meta(self, name: str, params: Any = None, **kwargs: Any) -> Self:
|
|
199
|
+
if not name.startswith("_") and name not in META_FIELDS:
|
|
200
|
+
name = "_" + name
|
|
201
|
+
|
|
202
|
+
if params and kwargs:
|
|
203
|
+
raise ValueError("Meta configs cannot have both value and a dictionary.")
|
|
204
|
+
|
|
205
|
+
self._meta[name] = kwargs if params is None else params
|
|
206
|
+
return self
|
|
207
|
+
|
|
208
|
+
def to_dict(self) -> Dict[str, Any]:
|
|
209
|
+
meta = self._meta
|
|
210
|
+
|
|
211
|
+
# hard coded serialization of analyzers in _all
|
|
212
|
+
if "_all" in meta:
|
|
213
|
+
meta = meta.copy()
|
|
214
|
+
_all = meta["_all"] = meta["_all"].copy()
|
|
215
|
+
for f in ("analyzer", "search_analyzer", "search_quote_analyzer"):
|
|
216
|
+
if hasattr(_all.get(f, None), "to_dict"):
|
|
217
|
+
_all[f] = _all[f].to_dict()
|
|
218
|
+
meta.update(self.properties.to_dict())
|
|
219
|
+
return meta
|