elasticsearch 9.1.0__py3-none-any.whl → 9.1.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- elasticsearch/_async/client/__init__.py +21 -6
- elasticsearch/_async/client/cat.py +1091 -51
- elasticsearch/_async/client/cluster.py +7 -2
- elasticsearch/_async/client/connector.py +3 -3
- elasticsearch/_async/client/esql.py +20 -6
- elasticsearch/_async/client/indices.py +27 -13
- elasticsearch/_async/client/inference.py +16 -5
- elasticsearch/_async/client/logstash.py +3 -1
- elasticsearch/_async/client/nodes.py +2 -2
- elasticsearch/_async/client/shutdown.py +5 -15
- elasticsearch/_async/client/sql.py +1 -1
- elasticsearch/_async/client/streams.py +186 -0
- elasticsearch/_async/client/transform.py +60 -0
- elasticsearch/_async/client/watcher.py +1 -5
- elasticsearch/_async/helpers.py +58 -9
- elasticsearch/_sync/client/__init__.py +21 -6
- elasticsearch/_sync/client/cat.py +1091 -51
- elasticsearch/_sync/client/cluster.py +7 -2
- elasticsearch/_sync/client/connector.py +3 -3
- elasticsearch/_sync/client/esql.py +20 -6
- elasticsearch/_sync/client/indices.py +27 -13
- elasticsearch/_sync/client/inference.py +16 -5
- elasticsearch/_sync/client/logstash.py +3 -1
- elasticsearch/_sync/client/nodes.py +2 -2
- elasticsearch/_sync/client/shutdown.py +5 -15
- elasticsearch/_sync/client/sql.py +1 -1
- elasticsearch/_sync/client/streams.py +186 -0
- elasticsearch/_sync/client/transform.py +60 -0
- elasticsearch/_sync/client/watcher.py +1 -5
- elasticsearch/_version.py +2 -1
- elasticsearch/client.py +2 -0
- elasticsearch/compat.py +43 -1
- elasticsearch/dsl/__init__.py +28 -0
- elasticsearch/dsl/_async/document.py +84 -0
- elasticsearch/dsl/_sync/document.py +84 -0
- elasticsearch/dsl/aggs.py +97 -0
- elasticsearch/dsl/document_base.py +57 -0
- elasticsearch/dsl/field.py +43 -11
- elasticsearch/dsl/query.py +5 -1
- elasticsearch/dsl/response/__init__.py +3 -0
- elasticsearch/dsl/response/aggs.py +1 -1
- elasticsearch/dsl/types.py +273 -24
- elasticsearch/dsl/utils.py +1 -1
- elasticsearch/esql/__init__.py +2 -1
- elasticsearch/esql/esql.py +85 -34
- elasticsearch/esql/functions.py +37 -25
- elasticsearch/helpers/__init__.py +10 -1
- elasticsearch/helpers/actions.py +106 -33
- {elasticsearch-9.1.0.dist-info → elasticsearch-9.1.2.dist-info}/METADATA +2 -4
- {elasticsearch-9.1.0.dist-info → elasticsearch-9.1.2.dist-info}/RECORD +53 -52
- elasticsearch/esql/esql1.py1 +0 -307
- {elasticsearch-9.1.0.dist-info → elasticsearch-9.1.2.dist-info}/WHEEL +0 -0
- {elasticsearch-9.1.0.dist-info → elasticsearch-9.1.2.dist-info}/licenses/LICENSE +0 -0
- {elasticsearch-9.1.0.dist-info → elasticsearch-9.1.2.dist-info}/licenses/NOTICE +0 -0
elasticsearch/dsl/field.py
CHANGED
|
@@ -119,9 +119,16 @@ class Field(DslBase):
|
|
|
119
119
|
def __getitem__(self, subfield: str) -> "Field":
|
|
120
120
|
return cast(Field, self._params.get("fields", {})[subfield])
|
|
121
121
|
|
|
122
|
-
def _serialize(self, data: Any) -> Any:
|
|
122
|
+
def _serialize(self, data: Any, skip_empty: bool) -> Any:
|
|
123
123
|
return data
|
|
124
124
|
|
|
125
|
+
def _safe_serialize(self, data: Any, skip_empty: bool) -> Any:
|
|
126
|
+
try:
|
|
127
|
+
return self._serialize(data, skip_empty)
|
|
128
|
+
except TypeError:
|
|
129
|
+
# older method signature, without skip_empty
|
|
130
|
+
return self._serialize(data) # type: ignore[call-arg]
|
|
131
|
+
|
|
125
132
|
def _deserialize(self, data: Any) -> Any:
|
|
126
133
|
return data
|
|
127
134
|
|
|
@@ -133,10 +140,16 @@ class Field(DslBase):
|
|
|
133
140
|
return AttrList([])
|
|
134
141
|
return self._empty()
|
|
135
142
|
|
|
136
|
-
def serialize(self, data: Any) -> Any:
|
|
143
|
+
def serialize(self, data: Any, skip_empty: bool = True) -> Any:
|
|
137
144
|
if isinstance(data, (list, AttrList, tuple)):
|
|
138
|
-
return list(
|
|
139
|
-
|
|
145
|
+
return list(
|
|
146
|
+
map(
|
|
147
|
+
self._safe_serialize,
|
|
148
|
+
cast(Iterable[Any], data),
|
|
149
|
+
[skip_empty] * len(data),
|
|
150
|
+
)
|
|
151
|
+
)
|
|
152
|
+
return self._safe_serialize(data, skip_empty)
|
|
140
153
|
|
|
141
154
|
def deserialize(self, data: Any) -> Any:
|
|
142
155
|
if isinstance(data, (list, AttrList, tuple)):
|
|
@@ -186,7 +199,7 @@ class RangeField(Field):
|
|
|
186
199
|
data = {k: self._core_field.deserialize(v) for k, v in data.items()} # type: ignore[union-attr]
|
|
187
200
|
return Range(data)
|
|
188
201
|
|
|
189
|
-
def _serialize(self, data: Any) -> Optional[Dict[str, Any]]:
|
|
202
|
+
def _serialize(self, data: Any, skip_empty: bool) -> Optional[Dict[str, Any]]:
|
|
190
203
|
if data is None:
|
|
191
204
|
return None
|
|
192
205
|
if not isinstance(data, collections.abc.Mapping):
|
|
@@ -550,7 +563,7 @@ class Object(Field):
|
|
|
550
563
|
return self._wrap(data)
|
|
551
564
|
|
|
552
565
|
def _serialize(
|
|
553
|
-
self, data: Optional[Union[Dict[str, Any], "InnerDoc"]]
|
|
566
|
+
self, data: Optional[Union[Dict[str, Any], "InnerDoc"]], skip_empty: bool
|
|
554
567
|
) -> Optional[Dict[str, Any]]:
|
|
555
568
|
if data is None:
|
|
556
569
|
return None
|
|
@@ -559,7 +572,11 @@ class Object(Field):
|
|
|
559
572
|
if isinstance(data, collections.abc.Mapping):
|
|
560
573
|
return data
|
|
561
574
|
|
|
562
|
-
|
|
575
|
+
try:
|
|
576
|
+
return data.to_dict(skip_empty=skip_empty)
|
|
577
|
+
except TypeError:
|
|
578
|
+
# this would only happen if an AttrDict was given instead of an InnerDoc
|
|
579
|
+
return data.to_dict()
|
|
563
580
|
|
|
564
581
|
def clean(self, data: Any) -> Any:
|
|
565
582
|
data = super().clean(data)
|
|
@@ -768,7 +785,7 @@ class Binary(Field):
|
|
|
768
785
|
def _deserialize(self, data: Any) -> bytes:
|
|
769
786
|
return base64.b64decode(data)
|
|
770
787
|
|
|
771
|
-
def _serialize(self, data: Any) -> Optional[str]:
|
|
788
|
+
def _serialize(self, data: Any, skip_empty: bool) -> Optional[str]:
|
|
772
789
|
if data is None:
|
|
773
790
|
return None
|
|
774
791
|
return base64.b64encode(data).decode()
|
|
@@ -2619,7 +2636,7 @@ class Ip(Field):
|
|
|
2619
2636
|
# the ipaddress library for pypy only accepts unicode.
|
|
2620
2637
|
return ipaddress.ip_address(unicode(data))
|
|
2621
2638
|
|
|
2622
|
-
def _serialize(self, data: Any) -> Optional[str]:
|
|
2639
|
+
def _serialize(self, data: Any, skip_empty: bool) -> Optional[str]:
|
|
2623
2640
|
if data is None:
|
|
2624
2641
|
return None
|
|
2625
2642
|
return str(data)
|
|
@@ -3367,7 +3384,7 @@ class Percolator(Field):
|
|
|
3367
3384
|
def _deserialize(self, data: Any) -> "Query":
|
|
3368
3385
|
return Q(data) # type: ignore[no-any-return]
|
|
3369
3386
|
|
|
3370
|
-
def _serialize(self, data: Any) -> Optional[Dict[str, Any]]:
|
|
3387
|
+
def _serialize(self, data: Any, skip_empty: bool) -> Optional[Dict[str, Any]]:
|
|
3371
3388
|
if data is None:
|
|
3372
3389
|
return None
|
|
3373
3390
|
return data.to_dict() # type: ignore[no-any-return]
|
|
@@ -3849,14 +3866,21 @@ class SemanticText(Field):
|
|
|
3849
3866
|
by using the Update mapping API. Use the Create inference API to
|
|
3850
3867
|
create the endpoint. If not specified, the inference endpoint
|
|
3851
3868
|
defined by inference_id will be used at both index and query time.
|
|
3869
|
+
:arg index_options: Settings for index_options that override any
|
|
3870
|
+
defaults used by semantic_text, for example specific quantization
|
|
3871
|
+
settings.
|
|
3852
3872
|
:arg chunking_settings: Settings for chunking text into smaller
|
|
3853
3873
|
passages. If specified, these will override the chunking settings
|
|
3854
3874
|
sent in the inference endpoint associated with inference_id. If
|
|
3855
3875
|
chunking settings are updated, they will not be applied to
|
|
3856
3876
|
existing documents until they are reindexed.
|
|
3877
|
+
:arg fields:
|
|
3857
3878
|
"""
|
|
3858
3879
|
|
|
3859
3880
|
name = "semantic_text"
|
|
3881
|
+
_param_defs = {
|
|
3882
|
+
"fields": {"type": "field", "hash": True},
|
|
3883
|
+
}
|
|
3860
3884
|
|
|
3861
3885
|
def __init__(
|
|
3862
3886
|
self,
|
|
@@ -3864,9 +3888,13 @@ class SemanticText(Field):
|
|
|
3864
3888
|
meta: Union[Mapping[str, str], "DefaultType"] = DEFAULT,
|
|
3865
3889
|
inference_id: Union[str, "DefaultType"] = DEFAULT,
|
|
3866
3890
|
search_inference_id: Union[str, "DefaultType"] = DEFAULT,
|
|
3891
|
+
index_options: Union[
|
|
3892
|
+
"types.SemanticTextIndexOptions", Dict[str, Any], "DefaultType"
|
|
3893
|
+
] = DEFAULT,
|
|
3867
3894
|
chunking_settings: Union[
|
|
3868
|
-
"types.ChunkingSettings", Dict[str, Any], "DefaultType"
|
|
3895
|
+
"types.ChunkingSettings", None, Dict[str, Any], "DefaultType"
|
|
3869
3896
|
] = DEFAULT,
|
|
3897
|
+
fields: Union[Mapping[str, Field], "DefaultType"] = DEFAULT,
|
|
3870
3898
|
**kwargs: Any,
|
|
3871
3899
|
):
|
|
3872
3900
|
if meta is not DEFAULT:
|
|
@@ -3875,8 +3903,12 @@ class SemanticText(Field):
|
|
|
3875
3903
|
kwargs["inference_id"] = inference_id
|
|
3876
3904
|
if search_inference_id is not DEFAULT:
|
|
3877
3905
|
kwargs["search_inference_id"] = search_inference_id
|
|
3906
|
+
if index_options is not DEFAULT:
|
|
3907
|
+
kwargs["index_options"] = index_options
|
|
3878
3908
|
if chunking_settings is not DEFAULT:
|
|
3879
3909
|
kwargs["chunking_settings"] = chunking_settings
|
|
3910
|
+
if fields is not DEFAULT:
|
|
3911
|
+
kwargs["fields"] = fields
|
|
3880
3912
|
super().__init__(*args, **kwargs)
|
|
3881
3913
|
|
|
3882
3914
|
|
elasticsearch/dsl/query.py
CHANGED
|
@@ -1079,6 +1079,8 @@ class Knn(Query):
|
|
|
1079
1079
|
a query_vector_builder or query_vector, but not both.
|
|
1080
1080
|
:arg num_candidates: The number of nearest neighbor candidates to
|
|
1081
1081
|
consider per shard
|
|
1082
|
+
:arg visit_percentage: The percentage of vectors to explore per shard
|
|
1083
|
+
while doing knn search with bbq_disk
|
|
1082
1084
|
:arg k: The final number of nearest neighbors to return as top hits
|
|
1083
1085
|
:arg filter: Filters for the kNN search query
|
|
1084
1086
|
:arg similarity: The minimum similarity for a vector to be considered
|
|
@@ -1107,6 +1109,7 @@ class Knn(Query):
|
|
|
1107
1109
|
"types.QueryVectorBuilder", Dict[str, Any], "DefaultType"
|
|
1108
1110
|
] = DEFAULT,
|
|
1109
1111
|
num_candidates: Union[int, "DefaultType"] = DEFAULT,
|
|
1112
|
+
visit_percentage: Union[float, "DefaultType"] = DEFAULT,
|
|
1110
1113
|
k: Union[int, "DefaultType"] = DEFAULT,
|
|
1111
1114
|
filter: Union[Query, Sequence[Query], "DefaultType"] = DEFAULT,
|
|
1112
1115
|
similarity: Union[float, "DefaultType"] = DEFAULT,
|
|
@@ -1122,6 +1125,7 @@ class Knn(Query):
|
|
|
1122
1125
|
query_vector=query_vector,
|
|
1123
1126
|
query_vector_builder=query_vector_builder,
|
|
1124
1127
|
num_candidates=num_candidates,
|
|
1128
|
+
visit_percentage=visit_percentage,
|
|
1125
1129
|
k=k,
|
|
1126
1130
|
filter=filter,
|
|
1127
1131
|
similarity=similarity,
|
|
@@ -1433,7 +1437,7 @@ class MoreLikeThis(Query):
|
|
|
1433
1437
|
] = DEFAULT,
|
|
1434
1438
|
version: Union[int, "DefaultType"] = DEFAULT,
|
|
1435
1439
|
version_type: Union[
|
|
1436
|
-
Literal["internal", "external", "external_gte"
|
|
1440
|
+
Literal["internal", "external", "external_gte"], "DefaultType"
|
|
1437
1441
|
] = DEFAULT,
|
|
1438
1442
|
boost: Union[float, "DefaultType"] = DEFAULT,
|
|
1439
1443
|
_name: Union[str, "DefaultType"] = DEFAULT,
|
|
@@ -233,10 +233,13 @@ AggregateResponseType = Union[
|
|
|
233
233
|
"types.SimpleValueAggregate",
|
|
234
234
|
"types.DerivativeAggregate",
|
|
235
235
|
"types.BucketMetricValueAggregate",
|
|
236
|
+
"types.ChangePointAggregate",
|
|
236
237
|
"types.StatsAggregate",
|
|
237
238
|
"types.StatsBucketAggregate",
|
|
238
239
|
"types.ExtendedStatsAggregate",
|
|
239
240
|
"types.ExtendedStatsBucketAggregate",
|
|
241
|
+
"types.CartesianBoundsAggregate",
|
|
242
|
+
"types.CartesianCentroidAggregate",
|
|
240
243
|
"types.GeoBoundsAggregate",
|
|
241
244
|
"types.GeoCentroidAggregate",
|
|
242
245
|
"types.HistogramAggregate",
|