elasticsearch 9.0.3__py3-none-any.whl → 9.0.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. elasticsearch/_async/client/__init__.py +19 -6
  2. elasticsearch/_async/client/cat.py +610 -26
  3. elasticsearch/_async/client/esql.py +16 -6
  4. elasticsearch/_async/client/indices.py +2 -2
  5. elasticsearch/_async/client/logstash.py +3 -1
  6. elasticsearch/_async/client/sql.py +1 -1
  7. elasticsearch/_async/client/transform.py +60 -0
  8. elasticsearch/_sync/client/__init__.py +19 -6
  9. elasticsearch/_sync/client/cat.py +610 -26
  10. elasticsearch/_sync/client/esql.py +16 -6
  11. elasticsearch/_sync/client/indices.py +2 -2
  12. elasticsearch/_sync/client/logstash.py +3 -1
  13. elasticsearch/_sync/client/sql.py +1 -1
  14. elasticsearch/_sync/client/transform.py +60 -0
  15. elasticsearch/_version.py +1 -1
  16. elasticsearch/dsl/__init__.py +28 -0
  17. elasticsearch/dsl/_async/document.py +84 -0
  18. elasticsearch/dsl/_sync/document.py +84 -0
  19. elasticsearch/dsl/aggs.py +97 -0
  20. elasticsearch/dsl/document_base.py +43 -0
  21. elasticsearch/dsl/field.py +27 -10
  22. elasticsearch/dsl/response/__init__.py +3 -0
  23. elasticsearch/dsl/response/aggs.py +1 -1
  24. elasticsearch/dsl/types.py +203 -13
  25. elasticsearch/dsl/utils.py +1 -1
  26. elasticsearch/esql/__init__.py +2 -1
  27. elasticsearch/esql/esql.py +85 -34
  28. elasticsearch/esql/functions.py +37 -25
  29. {elasticsearch-9.0.3.dist-info → elasticsearch-9.0.5.dist-info}/METADATA +1 -3
  30. {elasticsearch-9.0.3.dist-info → elasticsearch-9.0.5.dist-info}/RECORD +33 -33
  31. {elasticsearch-9.0.3.dist-info → elasticsearch-9.0.5.dist-info}/WHEEL +1 -1
  32. {elasticsearch-9.0.3.dist-info → elasticsearch-9.0.5.dist-info}/licenses/LICENSE +0 -0
  33. {elasticsearch-9.0.3.dist-info → elasticsearch-9.0.5.dist-info}/licenses/NOTICE +0 -0
@@ -28,6 +28,7 @@ from typing import (
28
28
  List,
29
29
  Optional,
30
30
  Tuple,
31
+ Type,
31
32
  TypeVar,
32
33
  Union,
33
34
  get_args,
@@ -49,6 +50,7 @@ from .utils import DOC_META_FIELDS, ObjectBase
49
50
  if TYPE_CHECKING:
50
51
  from elastic_transport import ObjectApiResponse
51
52
 
53
+ from ..esql.esql import ESQLBase
52
54
  from .index_base import IndexBase
53
55
 
54
56
 
@@ -602,3 +604,44 @@ class DocumentBase(ObjectBase):
602
604
 
603
605
  meta["_source"] = d
604
606
  return meta
607
+
608
+ @classmethod
609
+ def _get_field_names(
610
+ cls, for_esql: bool = False, nested_class: Optional[Type[InnerDoc]] = None
611
+ ) -> List[str]:
612
+ """Return the list of field names used by this document.
613
+ If the document has nested objects, their fields are reported using dot
614
+ notation. If the ``for_esql`` argument is set to ``True``, the list omits
615
+ nested fields, which are currently unsupported in ES|QL.
616
+ """
617
+ fields = []
618
+ class_ = nested_class or cls
619
+ for field_name in class_._doc_type.mapping:
620
+ field = class_._doc_type.mapping[field_name]
621
+ if isinstance(field, Object):
622
+ if for_esql and isinstance(field, Nested):
623
+ # ES|QL does not recognize Nested fields at this time
624
+ continue
625
+ sub_fields = cls._get_field_names(
626
+ for_esql=for_esql, nested_class=field._doc_class
627
+ )
628
+ for sub_field in sub_fields:
629
+ fields.append(f"{field_name}.{sub_field}")
630
+ else:
631
+ fields.append(field_name)
632
+ return fields
633
+
634
+ @classmethod
635
+ def esql_from(cls) -> "ESQLBase":
636
+ """Return a base ES|QL query for instances of this document class.
637
+
638
+ The returned query is initialized with ``FROM`` and ``KEEP`` statements,
639
+ and can be completed as desired.
640
+ """
641
+ from ..esql import ESQL # here to avoid circular imports
642
+
643
+ return (
644
+ ESQL.from_(cls)
645
+ .metadata("_id")
646
+ .keep("_id", *tuple(cls._get_field_names(for_esql=True)))
647
+ )
@@ -119,9 +119,16 @@ class Field(DslBase):
119
119
  def __getitem__(self, subfield: str) -> "Field":
120
120
  return cast(Field, self._params.get("fields", {})[subfield])
121
121
 
122
- def _serialize(self, data: Any) -> Any:
122
+ def _serialize(self, data: Any, skip_empty: bool) -> Any:
123
123
  return data
124
124
 
125
+ def _safe_serialize(self, data: Any, skip_empty: bool) -> Any:
126
+ try:
127
+ return self._serialize(data, skip_empty)
128
+ except TypeError:
129
+ # older method signature, without skip_empty
130
+ return self._serialize(data) # type: ignore[call-arg]
131
+
125
132
  def _deserialize(self, data: Any) -> Any:
126
133
  return data
127
134
 
@@ -133,10 +140,16 @@ class Field(DslBase):
133
140
  return AttrList([])
134
141
  return self._empty()
135
142
 
136
- def serialize(self, data: Any) -> Any:
143
+ def serialize(self, data: Any, skip_empty: bool = True) -> Any:
137
144
  if isinstance(data, (list, AttrList, tuple)):
138
- return list(map(self._serialize, cast(Iterable[Any], data)))
139
- return self._serialize(data)
145
+ return list(
146
+ map(
147
+ self._safe_serialize,
148
+ cast(Iterable[Any], data),
149
+ [skip_empty] * len(data),
150
+ )
151
+ )
152
+ return self._safe_serialize(data, skip_empty)
140
153
 
141
154
  def deserialize(self, data: Any) -> Any:
142
155
  if isinstance(data, (list, AttrList, tuple)):
@@ -186,7 +199,7 @@ class RangeField(Field):
186
199
  data = {k: self._core_field.deserialize(v) for k, v in data.items()} # type: ignore[union-attr]
187
200
  return Range(data)
188
201
 
189
- def _serialize(self, data: Any) -> Optional[Dict[str, Any]]:
202
+ def _serialize(self, data: Any, skip_empty: bool) -> Optional[Dict[str, Any]]:
190
203
  if data is None:
191
204
  return None
192
205
  if not isinstance(data, collections.abc.Mapping):
@@ -550,7 +563,7 @@ class Object(Field):
550
563
  return self._wrap(data)
551
564
 
552
565
  def _serialize(
553
- self, data: Optional[Union[Dict[str, Any], "InnerDoc"]]
566
+ self, data: Optional[Union[Dict[str, Any], "InnerDoc"]], skip_empty: bool
554
567
  ) -> Optional[Dict[str, Any]]:
555
568
  if data is None:
556
569
  return None
@@ -559,7 +572,11 @@ class Object(Field):
559
572
  if isinstance(data, collections.abc.Mapping):
560
573
  return data
561
574
 
562
- return data.to_dict()
575
+ try:
576
+ return data.to_dict(skip_empty=skip_empty)
577
+ except TypeError:
578
+ # this would only happen if an AttrDict was given instead of an InnerDoc
579
+ return data.to_dict()
563
580
 
564
581
  def clean(self, data: Any) -> Any:
565
582
  data = super().clean(data)
@@ -768,7 +785,7 @@ class Binary(Field):
768
785
  def _deserialize(self, data: Any) -> bytes:
769
786
  return base64.b64decode(data)
770
787
 
771
- def _serialize(self, data: Any) -> Optional[str]:
788
+ def _serialize(self, data: Any, skip_empty: bool) -> Optional[str]:
772
789
  if data is None:
773
790
  return None
774
791
  return base64.b64encode(data).decode()
@@ -2619,7 +2636,7 @@ class Ip(Field):
2619
2636
  # the ipaddress library for pypy only accepts unicode.
2620
2637
  return ipaddress.ip_address(unicode(data))
2621
2638
 
2622
- def _serialize(self, data: Any) -> Optional[str]:
2639
+ def _serialize(self, data: Any, skip_empty: bool) -> Optional[str]:
2623
2640
  if data is None:
2624
2641
  return None
2625
2642
  return str(data)
@@ -3367,7 +3384,7 @@ class Percolator(Field):
3367
3384
  def _deserialize(self, data: Any) -> "Query":
3368
3385
  return Q(data) # type: ignore[no-any-return]
3369
3386
 
3370
- def _serialize(self, data: Any) -> Optional[Dict[str, Any]]:
3387
+ def _serialize(self, data: Any, skip_empty: bool) -> Optional[Dict[str, Any]]:
3371
3388
  if data is None:
3372
3389
  return None
3373
3390
  return data.to_dict() # type: ignore[no-any-return]
@@ -233,10 +233,13 @@ AggregateResponseType = Union[
233
233
  "types.SimpleValueAggregate",
234
234
  "types.DerivativeAggregate",
235
235
  "types.BucketMetricValueAggregate",
236
+ "types.ChangePointAggregate",
236
237
  "types.StatsAggregate",
237
238
  "types.StatsBucketAggregate",
238
239
  "types.ExtendedStatsAggregate",
239
240
  "types.ExtendedStatsBucketAggregate",
241
+ "types.CartesianBoundsAggregate",
242
+ "types.CartesianCentroidAggregate",
240
243
  "types.GeoBoundsAggregate",
241
244
  "types.GeoCentroidAggregate",
242
245
  "types.HistogramAggregate",
@@ -63,7 +63,7 @@ class BucketData(AggResponse[_R]):
63
63
  )
64
64
 
65
65
  def __iter__(self) -> Iterator["Agg"]: # type: ignore[override]
66
- return iter(self.buckets) # type: ignore[arg-type]
66
+ return iter(self.buckets)
67
67
 
68
68
  def __len__(self) -> int:
69
69
  return len(self.buckets)
@@ -2673,6 +2673,31 @@ class NumericFielddata(AttrDict[Any]):
2673
2673
  super().__init__(kwargs)
2674
2674
 
2675
2675
 
2676
+ class PValueHeuristic(AttrDict[Any]):
2677
+ """
2678
+ :arg background_is_superset:
2679
+ :arg normalize_above: Should the results be normalized when above the
2680
+ given value. Allows for consistent significance results at various
2681
+ scales. Note: `0` is a special value which means no normalization
2682
+ """
2683
+
2684
+ background_is_superset: Union[bool, DefaultType]
2685
+ normalize_above: Union[int, DefaultType]
2686
+
2687
+ def __init__(
2688
+ self,
2689
+ *,
2690
+ background_is_superset: Union[bool, DefaultType] = DEFAULT,
2691
+ normalize_above: Union[int, DefaultType] = DEFAULT,
2692
+ **kwargs: Any,
2693
+ ):
2694
+ if background_is_superset is not DEFAULT:
2695
+ kwargs["background_is_superset"] = background_is_superset
2696
+ if normalize_above is not DEFAULT:
2697
+ kwargs["normalize_above"] = normalize_above
2698
+ super().__init__(kwargs)
2699
+
2700
+
2676
2701
  class PercentageScoreHeuristic(AttrDict[Any]):
2677
2702
  pass
2678
2703
 
@@ -3853,24 +3878,25 @@ class TestPopulation(AttrDict[Any]):
3853
3878
 
3854
3879
  class TextEmbedding(AttrDict[Any]):
3855
3880
  """
3856
- :arg model_id: (required)
3857
3881
  :arg model_text: (required)
3882
+ :arg model_id: Model ID is required for all dense_vector fields but
3883
+ may be inferred for semantic_text fields
3858
3884
  """
3859
3885
 
3860
- model_id: Union[str, DefaultType]
3861
3886
  model_text: Union[str, DefaultType]
3887
+ model_id: Union[str, DefaultType]
3862
3888
 
3863
3889
  def __init__(
3864
3890
  self,
3865
3891
  *,
3866
- model_id: Union[str, DefaultType] = DEFAULT,
3867
3892
  model_text: Union[str, DefaultType] = DEFAULT,
3893
+ model_id: Union[str, DefaultType] = DEFAULT,
3868
3894
  **kwargs: Any,
3869
3895
  ):
3870
- if model_id is not DEFAULT:
3871
- kwargs["model_id"] = model_id
3872
3896
  if model_text is not DEFAULT:
3873
3897
  kwargs["model_text"] = model_text
3898
+ if model_id is not DEFAULT:
3899
+ kwargs["model_id"] = model_id
3874
3900
  super().__init__(kwargs)
3875
3901
 
3876
3902
 
@@ -4390,7 +4416,7 @@ class ArrayPercentilesItem(AttrDict[Any]):
4390
4416
  :arg value_as_string:
4391
4417
  """
4392
4418
 
4393
- key: str
4419
+ key: float
4394
4420
  value: Union[float, None]
4395
4421
  value_as_string: str
4396
4422
 
@@ -4501,6 +4527,82 @@ class CardinalityAggregate(AttrDict[Any]):
4501
4527
  meta: Mapping[str, Any]
4502
4528
 
4503
4529
 
4530
+ class CartesianBoundsAggregate(AttrDict[Any]):
4531
+ """
4532
+ :arg bounds:
4533
+ :arg meta:
4534
+ """
4535
+
4536
+ bounds: "TopLeftBottomRightGeoBounds"
4537
+ meta: Mapping[str, Any]
4538
+
4539
+
4540
+ class CartesianCentroidAggregate(AttrDict[Any]):
4541
+ """
4542
+ :arg count: (required)
4543
+ :arg location:
4544
+ :arg meta:
4545
+ """
4546
+
4547
+ count: int
4548
+ location: "CartesianPoint"
4549
+ meta: Mapping[str, Any]
4550
+
4551
+
4552
+ class CartesianPoint(AttrDict[Any]):
4553
+ """
4554
+ :arg x: (required)
4555
+ :arg y: (required)
4556
+ """
4557
+
4558
+ x: float
4559
+ y: float
4560
+
4561
+
4562
+ class ChangePointAggregate(AttrDict[Any]):
4563
+ """
4564
+ :arg type: (required)
4565
+ :arg bucket:
4566
+ :arg meta:
4567
+ """
4568
+
4569
+ type: "ChangeType"
4570
+ bucket: "ChangePointBucket"
4571
+ meta: Mapping[str, Any]
4572
+
4573
+
4574
+ class ChangePointBucket(AttrDict[Any]):
4575
+ """
4576
+ :arg key: (required)
4577
+ :arg doc_count: (required)
4578
+ """
4579
+
4580
+ key: Union[int, float, str, bool, None]
4581
+ doc_count: int
4582
+
4583
+
4584
+ class ChangeType(AttrDict[Any]):
4585
+ """
4586
+ :arg dip:
4587
+ :arg distribution_change:
4588
+ :arg indeterminable:
4589
+ :arg non_stationary:
4590
+ :arg spike:
4591
+ :arg stationary:
4592
+ :arg step_change:
4593
+ :arg trend_change:
4594
+ """
4595
+
4596
+ dip: "Dip"
4597
+ distribution_change: "DistributionChange"
4598
+ indeterminable: "Indeterminable"
4599
+ non_stationary: "NonStationary"
4600
+ spike: "Spike"
4601
+ stationary: "Stationary"
4602
+ step_change: "StepChange"
4603
+ trend_change: "TrendChange"
4604
+
4605
+
4504
4606
  class ChildrenAggregate(AttrDict[Any]):
4505
4607
  """
4506
4608
  :arg doc_count: (required)
@@ -4778,6 +4880,26 @@ class DfsStatisticsProfile(AttrDict[Any]):
4778
4880
  children: Sequence["DfsStatisticsProfile"]
4779
4881
 
4780
4882
 
4883
+ class Dip(AttrDict[Any]):
4884
+ """
4885
+ :arg p_value: (required)
4886
+ :arg change_point: (required)
4887
+ """
4888
+
4889
+ p_value: float
4890
+ change_point: int
4891
+
4892
+
4893
+ class DistributionChange(AttrDict[Any]):
4894
+ """
4895
+ :arg p_value: (required)
4896
+ :arg change_point: (required)
4897
+ """
4898
+
4899
+ p_value: float
4900
+ change_point: int
4901
+
4902
+
4781
4903
  class DoubleTermsAggregate(AttrDict[Any]):
4782
4904
  """
4783
4905
  Result of a `terms` aggregation when the field is some kind of decimal
@@ -5236,7 +5358,9 @@ class HdrPercentileRanksAggregate(AttrDict[Any]):
5236
5358
  :arg meta:
5237
5359
  """
5238
5360
 
5239
- values: Union[Mapping[str, Union[str, int, None]], Sequence["ArrayPercentilesItem"]]
5361
+ values: Union[
5362
+ Mapping[str, Union[str, float, None]], Sequence["ArrayPercentilesItem"]
5363
+ ]
5240
5364
  meta: Mapping[str, Any]
5241
5365
 
5242
5366
 
@@ -5246,7 +5370,9 @@ class HdrPercentilesAggregate(AttrDict[Any]):
5246
5370
  :arg meta:
5247
5371
  """
5248
5372
 
5249
- values: Union[Mapping[str, Union[str, int, None]], Sequence["ArrayPercentilesItem"]]
5373
+ values: Union[
5374
+ Mapping[str, Union[str, float, None]], Sequence["ArrayPercentilesItem"]
5375
+ ]
5250
5376
  meta: Mapping[str, Any]
5251
5377
 
5252
5378
 
@@ -5335,6 +5461,14 @@ class HitsMetadata(AttrDict[Any]):
5335
5461
  max_score: Union[float, None]
5336
5462
 
5337
5463
 
5464
+ class Indeterminable(AttrDict[Any]):
5465
+ """
5466
+ :arg reason: (required)
5467
+ """
5468
+
5469
+ reason: str
5470
+
5471
+
5338
5472
  class InferenceAggregate(AttrDict[Any]):
5339
5473
  """
5340
5474
  :arg value:
@@ -5737,6 +5871,18 @@ class NestedIdentity(AttrDict[Any]):
5737
5871
  _nested: "NestedIdentity"
5738
5872
 
5739
5873
 
5874
+ class NonStationary(AttrDict[Any]):
5875
+ """
5876
+ :arg p_value: (required)
5877
+ :arg r_value: (required)
5878
+ :arg trend: (required)
5879
+ """
5880
+
5881
+ p_value: float
5882
+ r_value: float
5883
+ trend: str
5884
+
5885
+
5740
5886
  class ParentAggregate(AttrDict[Any]):
5741
5887
  """
5742
5888
  :arg doc_count: (required)
@@ -5753,7 +5899,9 @@ class PercentilesBucketAggregate(AttrDict[Any]):
5753
5899
  :arg meta:
5754
5900
  """
5755
5901
 
5756
- values: Union[Mapping[str, Union[str, int, None]], Sequence["ArrayPercentilesItem"]]
5902
+ values: Union[
5903
+ Mapping[str, Union[str, float, None]], Sequence["ArrayPercentilesItem"]
5904
+ ]
5757
5905
  meta: Mapping[str, Any]
5758
5906
 
5759
5907
 
@@ -5954,17 +6102,19 @@ class SearchProfile(AttrDict[Any]):
5954
6102
  class ShardFailure(AttrDict[Any]):
5955
6103
  """
5956
6104
  :arg reason: (required)
5957
- :arg shard: (required)
5958
6105
  :arg index:
5959
6106
  :arg node:
6107
+ :arg shard:
5960
6108
  :arg status:
6109
+ :arg primary:
5961
6110
  """
5962
6111
 
5963
6112
  reason: "ErrorCause"
5964
- shard: int
5965
6113
  index: str
5966
6114
  node: str
6115
+ shard: int
5967
6116
  status: str
6117
+ primary: bool
5968
6118
 
5969
6119
 
5970
6120
  class ShardProfile(AttrDict[Any]):
@@ -6090,6 +6240,16 @@ class SimpleValueAggregate(AttrDict[Any]):
6090
6240
  meta: Mapping[str, Any]
6091
6241
 
6092
6242
 
6243
+ class Spike(AttrDict[Any]):
6244
+ """
6245
+ :arg p_value: (required)
6246
+ :arg change_point: (required)
6247
+ """
6248
+
6249
+ p_value: float
6250
+ change_point: int
6251
+
6252
+
6093
6253
  class StandardDeviationBounds(AttrDict[Any]):
6094
6254
  """
6095
6255
  :arg upper: (required)
@@ -6126,6 +6286,10 @@ class StandardDeviationBoundsAsString(AttrDict[Any]):
6126
6286
  lower_sampling: str
6127
6287
 
6128
6288
 
6289
+ class Stationary(AttrDict[Any]):
6290
+ pass
6291
+
6292
+
6129
6293
  class StatsAggregate(AttrDict[Any]):
6130
6294
  """
6131
6295
  Statistics aggregation result. `min`, `max` and `avg` are missing if
@@ -6181,6 +6345,16 @@ class StatsBucketAggregate(AttrDict[Any]):
6181
6345
  meta: Mapping[str, Any]
6182
6346
 
6183
6347
 
6348
+ class StepChange(AttrDict[Any]):
6349
+ """
6350
+ :arg p_value: (required)
6351
+ :arg change_point: (required)
6352
+ """
6353
+
6354
+ p_value: float
6355
+ change_point: int
6356
+
6357
+
6184
6358
  class StringRareTermsAggregate(AttrDict[Any]):
6185
6359
  """
6186
6360
  Result of the `rare_terms` aggregation when the field is a string.
@@ -6288,7 +6462,9 @@ class TDigestPercentileRanksAggregate(AttrDict[Any]):
6288
6462
  :arg meta:
6289
6463
  """
6290
6464
 
6291
- values: Union[Mapping[str, Union[str, int, None]], Sequence["ArrayPercentilesItem"]]
6465
+ values: Union[
6466
+ Mapping[str, Union[str, float, None]], Sequence["ArrayPercentilesItem"]
6467
+ ]
6292
6468
  meta: Mapping[str, Any]
6293
6469
 
6294
6470
 
@@ -6298,7 +6474,9 @@ class TDigestPercentilesAggregate(AttrDict[Any]):
6298
6474
  :arg meta:
6299
6475
  """
6300
6476
 
6301
- values: Union[Mapping[str, Union[str, int, None]], Sequence["ArrayPercentilesItem"]]
6477
+ values: Union[
6478
+ Mapping[str, Union[str, float, None]], Sequence["ArrayPercentilesItem"]
6479
+ ]
6302
6480
  meta: Mapping[str, Any]
6303
6481
 
6304
6482
 
@@ -6408,6 +6586,18 @@ class TotalHits(AttrDict[Any]):
6408
6586
  value: int
6409
6587
 
6410
6588
 
6589
+ class TrendChange(AttrDict[Any]):
6590
+ """
6591
+ :arg p_value: (required)
6592
+ :arg r_value: (required)
6593
+ :arg change_point: (required)
6594
+ """
6595
+
6596
+ p_value: float
6597
+ r_value: float
6598
+ change_point: int
6599
+
6600
+
6411
6601
  class UnmappedRareTermsAggregate(AttrDict[Any]):
6412
6602
  """
6413
6603
  Result of a `rare_terms` aggregation when the field is unmapped.
@@ -603,7 +603,7 @@ class ObjectBase(AttrDict[Any]):
603
603
  # if this is a mapped field,
604
604
  f = self.__get_field(k)
605
605
  if f and f._coerce:
606
- v = f.serialize(v)
606
+ v = f.serialize(v, skip_empty=skip_empty)
607
607
 
608
608
  # if someone assigned AttrList, unwrap it
609
609
  if isinstance(v, AttrList):
@@ -15,4 +15,5 @@
15
15
  # specific language governing permissions and limitations
16
16
  # under the License.
17
17
 
18
- from .esql import ESQL, and_, not_, or_ # noqa: F401
18
+ from ..dsl import E # noqa: F401
19
+ from .esql import ESQL, ESQLBase, and_, not_, or_ # noqa: F401