elasticsearch 8.19.1__py3-none-any.whl → 8.19.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (112) hide show
  1. elasticsearch/_async/client/__init__.py +66 -67
  2. elasticsearch/_async/client/async_search.py +3 -3
  3. elasticsearch/_async/client/autoscaling.py +8 -4
  4. elasticsearch/_async/client/cat.py +521 -27
  5. elasticsearch/_async/client/ccr.py +2 -2
  6. elasticsearch/_async/client/cluster.py +10 -9
  7. elasticsearch/_async/client/connector.py +37 -36
  8. elasticsearch/_async/client/dangling_indices.py +6 -10
  9. elasticsearch/_async/client/eql.py +2 -2
  10. elasticsearch/_async/client/esql.py +1 -1
  11. elasticsearch/_async/client/features.py +2 -2
  12. elasticsearch/_async/client/fleet.py +4 -8
  13. elasticsearch/_async/client/graph.py +1 -5
  14. elasticsearch/_async/client/ilm.py +2 -10
  15. elasticsearch/_async/client/indices.py +393 -56
  16. elasticsearch/_async/client/inference.py +35 -121
  17. elasticsearch/_async/client/ingest.py +1 -1
  18. elasticsearch/_async/client/license.py +3 -5
  19. elasticsearch/_async/client/ml.py +61 -21
  20. elasticsearch/_async/client/monitoring.py +2 -1
  21. elasticsearch/_async/client/nodes.py +9 -9
  22. elasticsearch/_async/client/rollup.py +9 -9
  23. elasticsearch/_async/client/search_application.py +11 -11
  24. elasticsearch/_async/client/searchable_snapshots.py +4 -4
  25. elasticsearch/_async/client/security.py +4 -4
  26. elasticsearch/_async/client/shutdown.py +12 -17
  27. elasticsearch/_async/client/simulate.py +2 -2
  28. elasticsearch/_async/client/slm.py +2 -6
  29. elasticsearch/_async/client/snapshot.py +3 -2
  30. elasticsearch/_async/client/streams.py +185 -0
  31. elasticsearch/_async/client/tasks.py +4 -4
  32. elasticsearch/_async/client/text_structure.py +5 -1
  33. elasticsearch/_async/client/transform.py +37 -0
  34. elasticsearch/_async/client/utils.py +4 -2
  35. elasticsearch/_async/client/watcher.py +3 -7
  36. elasticsearch/_async/client/xpack.py +2 -1
  37. elasticsearch/_async/helpers.py +58 -9
  38. elasticsearch/_sync/client/__init__.py +68 -67
  39. elasticsearch/_sync/client/async_search.py +3 -3
  40. elasticsearch/_sync/client/autoscaling.py +8 -4
  41. elasticsearch/_sync/client/cat.py +521 -27
  42. elasticsearch/_sync/client/ccr.py +2 -2
  43. elasticsearch/_sync/client/cluster.py +10 -9
  44. elasticsearch/_sync/client/connector.py +37 -36
  45. elasticsearch/_sync/client/dangling_indices.py +6 -10
  46. elasticsearch/_sync/client/eql.py +2 -2
  47. elasticsearch/_sync/client/esql.py +1 -1
  48. elasticsearch/_sync/client/features.py +2 -2
  49. elasticsearch/_sync/client/fleet.py +4 -8
  50. elasticsearch/_sync/client/graph.py +1 -5
  51. elasticsearch/_sync/client/ilm.py +2 -10
  52. elasticsearch/_sync/client/indices.py +393 -56
  53. elasticsearch/_sync/client/inference.py +35 -121
  54. elasticsearch/_sync/client/ingest.py +1 -1
  55. elasticsearch/_sync/client/license.py +3 -5
  56. elasticsearch/_sync/client/ml.py +61 -21
  57. elasticsearch/_sync/client/monitoring.py +2 -1
  58. elasticsearch/_sync/client/nodes.py +9 -9
  59. elasticsearch/_sync/client/rollup.py +9 -9
  60. elasticsearch/_sync/client/search_application.py +11 -11
  61. elasticsearch/_sync/client/searchable_snapshots.py +4 -4
  62. elasticsearch/_sync/client/security.py +4 -4
  63. elasticsearch/_sync/client/shutdown.py +12 -17
  64. elasticsearch/_sync/client/simulate.py +2 -2
  65. elasticsearch/_sync/client/slm.py +2 -6
  66. elasticsearch/_sync/client/snapshot.py +3 -2
  67. elasticsearch/_sync/client/streams.py +185 -0
  68. elasticsearch/_sync/client/tasks.py +4 -4
  69. elasticsearch/_sync/client/text_structure.py +5 -1
  70. elasticsearch/_sync/client/transform.py +37 -0
  71. elasticsearch/_sync/client/utils.py +16 -2
  72. elasticsearch/_sync/client/watcher.py +3 -7
  73. elasticsearch/_sync/client/xpack.py +2 -1
  74. elasticsearch/_version.py +2 -1
  75. elasticsearch/client.py +2 -0
  76. elasticsearch/compat.py +45 -1
  77. elasticsearch/dsl/__init__.py +28 -0
  78. elasticsearch/dsl/_async/document.py +4 -5
  79. elasticsearch/dsl/_async/index.py +1 -1
  80. elasticsearch/dsl/_async/search.py +2 -3
  81. elasticsearch/dsl/_sync/document.py +4 -5
  82. elasticsearch/dsl/_sync/index.py +1 -1
  83. elasticsearch/dsl/_sync/search.py +2 -3
  84. elasticsearch/dsl/aggs.py +104 -7
  85. elasticsearch/dsl/async_connections.py +1 -2
  86. elasticsearch/dsl/connections.py +1 -2
  87. elasticsearch/dsl/document_base.py +16 -1
  88. elasticsearch/dsl/field.py +12 -1
  89. elasticsearch/dsl/query.py +24 -1
  90. elasticsearch/dsl/response/__init__.py +3 -0
  91. elasticsearch/dsl/serializer.py +1 -2
  92. elasticsearch/dsl/types.py +187 -9
  93. elasticsearch/dsl/utils.py +1 -2
  94. elasticsearch/esql/esql.py +1 -1
  95. elasticsearch/esql/functions.py +2 -2
  96. elasticsearch/helpers/__init__.py +10 -1
  97. elasticsearch/helpers/actions.py +106 -33
  98. elasticsearch/helpers/vectorstore/__init__.py +7 -7
  99. elasticsearch/helpers/vectorstore/_async/_utils.py +1 -1
  100. elasticsearch/helpers/vectorstore/_async/embedding_service.py +2 -2
  101. elasticsearch/helpers/vectorstore/_async/strategies.py +3 -3
  102. elasticsearch/helpers/vectorstore/_async/vectorstore.py +5 -5
  103. elasticsearch/helpers/vectorstore/_sync/_utils.py +1 -1
  104. elasticsearch/helpers/vectorstore/_sync/embedding_service.py +2 -2
  105. elasticsearch/helpers/vectorstore/_sync/strategies.py +3 -3
  106. elasticsearch/helpers/vectorstore/_sync/vectorstore.py +5 -5
  107. {elasticsearch-8.19.1.dist-info → elasticsearch-8.19.3.dist-info}/METADATA +2 -2
  108. elasticsearch-8.19.3.dist-info/RECORD +166 -0
  109. {elasticsearch-8.19.1.dist-info → elasticsearch-8.19.3.dist-info}/WHEEL +1 -1
  110. elasticsearch-8.19.1.dist-info/RECORD +0 -164
  111. {elasticsearch-8.19.1.dist-info → elasticsearch-8.19.3.dist-info}/licenses/LICENSE +0 -0
  112. {elasticsearch-8.19.1.dist-info → elasticsearch-8.19.3.dist-info}/licenses/NOTICE +0 -0
@@ -85,6 +85,43 @@ class TransformClient(NamespacedClient):
85
85
  path_parts=__path_parts,
86
86
  )
87
87
 
88
+ @_rewrite_parameters()
89
+ def get_node_stats(
90
+ self,
91
+ *,
92
+ error_trace: t.Optional[bool] = None,
93
+ filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None,
94
+ human: t.Optional[bool] = None,
95
+ pretty: t.Optional[bool] = None,
96
+ ) -> ObjectApiResponse[t.Any]:
97
+ """
98
+ .. raw:: html
99
+
100
+ <p>Get node stats.</p>
101
+ <p>Get per-node information about transform usage.</p>
102
+
103
+ """
104
+ __path_parts: t.Dict[str, str] = {}
105
+ __path = "/_transform/_node_stats"
106
+ __query: t.Dict[str, t.Any] = {}
107
+ if error_trace is not None:
108
+ __query["error_trace"] = error_trace
109
+ if filter_path is not None:
110
+ __query["filter_path"] = filter_path
111
+ if human is not None:
112
+ __query["human"] = human
113
+ if pretty is not None:
114
+ __query["pretty"] = pretty
115
+ __headers = {"accept": "application/json"}
116
+ return self.perform_request( # type: ignore[return-value]
117
+ "GET",
118
+ __path,
119
+ params=__query,
120
+ headers=__headers,
121
+ endpoint_id="transform.get_node_stats",
122
+ path_parts=__path_parts,
123
+ )
124
+
88
125
  @_rewrite_parameters(
89
126
  parameter_aliases={"from": "from_"},
90
127
  )
@@ -79,6 +79,12 @@ class Stability(Enum):
79
79
  EXPERIMENTAL = auto()
80
80
 
81
81
 
82
+ class Visibility(Enum):
83
+ PUBLIC = auto()
84
+ FEATURE_FLAG = auto()
85
+ PRIVATE = auto()
86
+
87
+
82
88
  _TYPE_HOSTS = Union[
83
89
  str, Sequence[Union[str, Mapping[str, Union[str, int]], NodeConfig]]
84
90
  ]
@@ -456,15 +462,23 @@ def _rewrite_parameters(
456
462
  return wrapper
457
463
 
458
464
 
459
- def _stability_warning(
465
+ def _availability_warning(
460
466
  stability: Stability,
467
+ visibility: Visibility = Visibility.PUBLIC,
461
468
  version: Optional[str] = None,
462
469
  message: Optional[str] = None,
463
470
  ) -> Callable[[F], F]:
464
471
  def wrapper(api: F) -> F:
465
472
  @wraps(api)
466
473
  def wrapped(*args: Any, **kwargs: Any) -> Any:
467
- if stability == Stability.BETA:
474
+ if visibility == Visibility.PRIVATE:
475
+ warnings.warn(
476
+ "This API is private. "
477
+ "Private APIs are not subject to the support SLA of official GA features.",
478
+ category=GeneralAvailabilityWarning,
479
+ stacklevel=warn_stacklevel(),
480
+ )
481
+ elif stability == Stability.BETA:
468
482
  warnings.warn(
469
483
  "This API is in beta and is subject to change. "
470
484
  "The design and code is less mature than official GA features and is being provided as-is with no warranties. "
@@ -492,9 +492,9 @@ class WatcherClient(NamespacedClient):
492
492
  :param active: The initial state of the watch. The default value is `true`, which
493
493
  means the watch is active by default.
494
494
  :param condition: The condition that defines if the actions should be run.
495
- :param if_primary_term: only update the watch if the last operation that has
495
+ :param if_primary_term: Only update the watch if the last operation that has
496
496
  changed the watch has the specified primary term
497
- :param if_seq_no: only update the watch if the last operation that has changed
497
+ :param if_seq_no: Only update the watch if the last operation that has changed
498
498
  the watch has the specified sequence number
499
499
  :param input: The input that defines the input that loads the data for the watch.
500
500
  :param metadata: Metadata JSON that will be copied into the history entries.
@@ -550,11 +550,7 @@ class WatcherClient(NamespacedClient):
550
550
  __body["transform"] = transform
551
551
  if trigger is not None:
552
552
  __body["trigger"] = trigger
553
- if not __body:
554
- __body = None # type: ignore[assignment]
555
- __headers = {"accept": "application/json"}
556
- if __body is not None:
557
- __headers["content-type"] = "application/json"
553
+ __headers = {"accept": "application/json", "content-type": "application/json"}
558
554
  return self.perform_request( # type: ignore[return-value]
559
555
  "PUT",
560
556
  __path,
@@ -56,7 +56,8 @@ class XPackClient(NamespacedClient):
56
56
 
57
57
  `<https://www.elastic.co/guide/en/elasticsearch/reference/8.19/info-api.html>`_
58
58
 
59
- :param accept_enterprise: If this param is used it must be set to true
59
+ :param accept_enterprise: If used, this otherwise ignored parameter must be set
60
+ to true
60
61
  :param categories: A comma-separated list of the information categories to include
61
62
  in the response. For example, `build,license,features`.
62
63
  """
elasticsearch/_version.py CHANGED
@@ -15,4 +15,5 @@
15
15
  # specific language governing permissions and limitations
16
16
  # under the License.
17
17
 
18
- __versionstr__ = "8.19.1"
18
+ __versionstr__ = "8.19.3"
19
+ __es_specification_commit__ = "1c755b6fc50623dce6993efb7bfd3bdaef68fb90"
elasticsearch/client.py CHANGED
@@ -62,6 +62,7 @@ from ._sync.client.slm import SlmClient as SlmClient # noqa: F401
62
62
  from ._sync.client.snapshot import SnapshotClient as SnapshotClient # noqa: F401
63
63
  from ._sync.client.sql import SqlClient as SqlClient # noqa: F401
64
64
  from ._sync.client.ssl import SslClient as SslClient # noqa: F401
65
+ from ._sync.client.streams import StreamsClient as StreamsClient # noqa: F401
65
66
  from ._sync.client.synonyms import SynonymsClient as SynonymsClient # noqa: F401
66
67
  from ._sync.client.tasks import TasksClient as TasksClient # noqa: F401
67
68
  from ._sync.client.text_structure import ( # noqa: F401
@@ -115,6 +116,7 @@ __all__ = [
115
116
  "SnapshotClient",
116
117
  "SqlClient",
117
118
  "SslClient",
119
+ "StreamsClient",
118
120
  "TasksClient",
119
121
  "TextStructureClient",
120
122
  "TransformClient",
elasticsearch/compat.py CHANGED
@@ -15,11 +15,14 @@
15
15
  # specific language governing permissions and limitations
16
16
  # under the License.
17
17
 
18
+ import asyncio
18
19
  import inspect
19
20
  import os
20
21
  import sys
22
+ from contextlib import asynccontextmanager, contextmanager
21
23
  from pathlib import Path
22
- from typing import Tuple, Type, Union
24
+ from threading import Thread
25
+ from typing import Any, AsyncIterator, Callable, Coroutine, Iterator, Tuple, Type, Union
23
26
 
24
27
  string_types: Tuple[Type[str], Type[bytes]] = (str, bytes)
25
28
 
@@ -76,9 +79,50 @@ def warn_stacklevel() -> int:
76
79
  return 0
77
80
 
78
81
 
82
+ @contextmanager
83
+ def safe_thread(
84
+ target: Callable[..., Any], *args: Any, **kwargs: Any
85
+ ) -> Iterator[Thread]:
86
+ """Run a thread within a context manager block.
87
+
88
+ The thread is automatically joined when the block ends. If the thread raised
89
+ an exception, it is raised in the caller's context.
90
+ """
91
+ captured_exception = None
92
+
93
+ def run() -> None:
94
+ try:
95
+ target(*args, **kwargs)
96
+ except BaseException as exc:
97
+ nonlocal captured_exception
98
+ captured_exception = exc
99
+
100
+ thread = Thread(target=run)
101
+ thread.start()
102
+ yield thread
103
+ thread.join()
104
+ if captured_exception:
105
+ raise captured_exception
106
+
107
+
108
+ @asynccontextmanager
109
+ async def safe_task(
110
+ coro: Coroutine[Any, Any, Any],
111
+ ) -> "AsyncIterator[asyncio.Task[Any]]":
112
+ """Run a background task within a context manager block.
113
+
114
+ The task is awaited when the block ends.
115
+ """
116
+ task = asyncio.create_task(coro)
117
+ yield task
118
+ await task
119
+
120
+
79
121
  __all__ = [
80
122
  "string_types",
81
123
  "to_str",
82
124
  "to_bytes",
83
125
  "warn_stacklevel",
126
+ "safe_thread",
127
+ "safe_task",
84
128
  ]
@@ -38,23 +38,30 @@ from .faceted_search import (
38
38
  TermsFacet,
39
39
  )
40
40
  from .field import (
41
+ AggregateMetricDouble,
42
+ Alias,
41
43
  Binary,
42
44
  Boolean,
43
45
  Byte,
44
46
  Completion,
45
47
  ConstantKeyword,
48
+ CountedKeyword,
46
49
  CustomField,
47
50
  Date,
51
+ DateNanos,
48
52
  DateRange,
49
53
  DenseVector,
50
54
  Double,
51
55
  DoubleRange,
52
56
  Field,
57
+ Flattened,
53
58
  Float,
54
59
  FloatRange,
55
60
  GeoPoint,
56
61
  GeoShape,
57
62
  HalfFloat,
63
+ Histogram,
64
+ IcuCollationKeyword,
58
65
  Integer,
59
66
  IntegerRange,
60
67
  Ip,
@@ -63,21 +70,28 @@ from .field import (
63
70
  Keyword,
64
71
  Long,
65
72
  LongRange,
73
+ MatchOnlyText,
66
74
  Murmur3,
67
75
  Nested,
68
76
  Object,
77
+ Passthrough,
69
78
  Percolator,
70
79
  Point,
71
80
  RangeField,
72
81
  RankFeature,
73
82
  RankFeatures,
83
+ RankVectors,
74
84
  ScaledFloat,
75
85
  SearchAsYouType,
86
+ SemanticText,
76
87
  Shape,
77
88
  Short,
78
89
  SparseVector,
79
90
  Text,
80
91
  TokenCount,
92
+ UnsignedLong,
93
+ Version,
94
+ Wildcard,
81
95
  construct_field,
82
96
  )
83
97
  from .function import SF
@@ -108,6 +122,8 @@ __all__ = [
108
122
  "A",
109
123
  "Agg",
110
124
  "AggResponse",
125
+ "AggregateMetricDouble",
126
+ "Alias",
111
127
  "AsyncComposableIndexTemplate",
112
128
  "AsyncDocument",
113
129
  "AsyncEmptySearch",
@@ -126,9 +142,11 @@ __all__ = [
126
142
  "Completion",
127
143
  "ComposableIndexTemplate",
128
144
  "ConstantKeyword",
145
+ "CountedKeyword",
129
146
  "CustomField",
130
147
  "Date",
131
148
  "DateHistogramFacet",
149
+ "DateNanos",
132
150
  "DateRange",
133
151
  "DenseVector",
134
152
  "Document",
@@ -142,12 +160,15 @@ __all__ = [
142
160
  "FacetedResponse",
143
161
  "FacetedSearch",
144
162
  "Field",
163
+ "Flattened",
145
164
  "Float",
146
165
  "FloatRange",
147
166
  "GeoPoint",
148
167
  "GeoShape",
149
168
  "HalfFloat",
169
+ "Histogram",
150
170
  "HistogramFacet",
171
+ "IcuCollationKeyword",
151
172
  "IllegalOperation",
152
173
  "Index",
153
174
  "IndexTemplate",
@@ -162,12 +183,14 @@ __all__ = [
162
183
  "LongRange",
163
184
  "M",
164
185
  "Mapping",
186
+ "MatchOnlyText",
165
187
  "MetaField",
166
188
  "MultiSearch",
167
189
  "Murmur3",
168
190
  "Nested",
169
191
  "NestedFacet",
170
192
  "Object",
193
+ "Passthrough",
171
194
  "Percolator",
172
195
  "Point",
173
196
  "Q",
@@ -177,11 +200,13 @@ __all__ = [
177
200
  "RangeField",
178
201
  "RankFeature",
179
202
  "RankFeatures",
203
+ "RankVectors",
180
204
  "Response",
181
205
  "SF",
182
206
  "ScaledFloat",
183
207
  "Search",
184
208
  "SearchAsYouType",
209
+ "SemanticText",
185
210
  "Shape",
186
211
  "Short",
187
212
  "SparseVector",
@@ -189,9 +214,12 @@ __all__ = [
189
214
  "Text",
190
215
  "TokenCount",
191
216
  "UnknownDslObject",
217
+ "UnsignedLong",
192
218
  "UpdateByQuery",
193
219
  "UpdateByQueryResponse",
194
220
  "ValidationException",
221
+ "Version",
222
+ "Wildcard",
195
223
  "analyzer",
196
224
  "async_connections",
197
225
  "char_filter",
@@ -31,9 +31,8 @@ from typing import (
31
31
 
32
32
  from typing_extensions import Self, dataclass_transform
33
33
 
34
- from elasticsearch.exceptions import NotFoundError, RequestError
35
- from elasticsearch.helpers import async_bulk
36
-
34
+ from ...exceptions import NotFoundError, RequestError
35
+ from ...helpers import async_bulk
37
36
  from .._async.index import AsyncIndex
38
37
  from ..async_connections import get_connection
39
38
  from ..document_base import DocumentBase, DocumentMeta, mapped_field
@@ -42,8 +41,8 @@ from ..utils import DOC_META_FIELDS, META_FIELDS, AsyncUsingType, merge
42
41
  from .search import AsyncSearch
43
42
 
44
43
  if TYPE_CHECKING:
45
- from elasticsearch import AsyncElasticsearch
46
- from elasticsearch.esql.esql import ESQLBase
44
+ from ... import AsyncElasticsearch
45
+ from ...esql.esql import ESQLBase
47
46
 
48
47
 
49
48
  class AsyncIndexMeta(DocumentMeta):
@@ -30,7 +30,7 @@ from .update_by_query import AsyncUpdateByQuery
30
30
  if TYPE_CHECKING:
31
31
  from elastic_transport import ObjectApiResponse
32
32
 
33
- from elasticsearch import AsyncElasticsearch
33
+ from ... import AsyncElasticsearch
34
34
 
35
35
 
36
36
  class AsyncIndexTemplate:
@@ -29,9 +29,8 @@ from typing import (
29
29
 
30
30
  from typing_extensions import Self
31
31
 
32
- from elasticsearch.exceptions import ApiError
33
- from elasticsearch.helpers import async_scan
34
-
32
+ from ...exceptions import ApiError
33
+ from ...helpers import async_scan
35
34
  from ..async_connections import get_connection
36
35
  from ..response import Response
37
36
  from ..search_base import MultiSearchBase, SearchBase
@@ -31,9 +31,8 @@ from typing import (
31
31
 
32
32
  from typing_extensions import Self, dataclass_transform
33
33
 
34
- from elasticsearch.exceptions import NotFoundError, RequestError
35
- from elasticsearch.helpers import bulk
36
-
34
+ from ...exceptions import NotFoundError, RequestError
35
+ from ...helpers import bulk
37
36
  from .._sync.index import Index
38
37
  from ..connections import get_connection
39
38
  from ..document_base import DocumentBase, DocumentMeta, mapped_field
@@ -42,8 +41,8 @@ from ..utils import DOC_META_FIELDS, META_FIELDS, UsingType, merge
42
41
  from .search import Search
43
42
 
44
43
  if TYPE_CHECKING:
45
- from elasticsearch import Elasticsearch
46
- from elasticsearch.esql.esql import ESQLBase
44
+ from ... import Elasticsearch
45
+ from ...esql.esql import ESQLBase
47
46
 
48
47
 
49
48
  class IndexMeta(DocumentMeta):
@@ -30,7 +30,7 @@ from .update_by_query import UpdateByQuery
30
30
  if TYPE_CHECKING:
31
31
  from elastic_transport import ObjectApiResponse
32
32
 
33
- from elasticsearch import Elasticsearch
33
+ from ... import Elasticsearch
34
34
 
35
35
 
36
36
  class IndexTemplate:
@@ -28,9 +28,8 @@ from typing import (
28
28
 
29
29
  from typing_extensions import Self
30
30
 
31
- from elasticsearch.exceptions import ApiError
32
- from elasticsearch.helpers import scan
33
-
31
+ from ...exceptions import ApiError
32
+ from ...helpers import scan
34
33
  from ..connections import get_connection
35
34
  from ..response import Response
36
35
  from ..search_base import MultiSearchBase, SearchBase
elasticsearch/dsl/aggs.py CHANGED
@@ -652,6 +652,54 @@ class Cardinality(Agg[_R]):
652
652
  )
653
653
 
654
654
 
655
+ class CartesianBounds(Agg[_R]):
656
+ """
657
+ A metric aggregation that computes the spatial bounding box containing
658
+ all values for a Point or Shape field.
659
+
660
+ :arg field: The field on which to run the aggregation.
661
+ :arg missing: The value to apply to documents that do not have a
662
+ value. By default, documents without a value are ignored.
663
+ :arg script:
664
+ """
665
+
666
+ name = "cartesian_bounds"
667
+
668
+ def __init__(
669
+ self,
670
+ *,
671
+ field: Union[str, "InstrumentedField", "DefaultType"] = DEFAULT,
672
+ missing: Union[str, int, float, bool, "DefaultType"] = DEFAULT,
673
+ script: Union["types.Script", Dict[str, Any], "DefaultType"] = DEFAULT,
674
+ **kwargs: Any,
675
+ ):
676
+ super().__init__(field=field, missing=missing, script=script, **kwargs)
677
+
678
+
679
+ class CartesianCentroid(Agg[_R]):
680
+ """
681
+ A metric aggregation that computes the weighted centroid from all
682
+ coordinate values for point and shape fields.
683
+
684
+ :arg field: The field on which to run the aggregation.
685
+ :arg missing: The value to apply to documents that do not have a
686
+ value. By default, documents without a value are ignored.
687
+ :arg script:
688
+ """
689
+
690
+ name = "cartesian_centroid"
691
+
692
+ def __init__(
693
+ self,
694
+ *,
695
+ field: Union[str, "InstrumentedField", "DefaultType"] = DEFAULT,
696
+ missing: Union[str, int, float, bool, "DefaultType"] = DEFAULT,
697
+ script: Union["types.Script", Dict[str, Any], "DefaultType"] = DEFAULT,
698
+ **kwargs: Any,
699
+ ):
700
+ super().__init__(field=field, missing=missing, script=script, **kwargs)
701
+
702
+
655
703
  class CategorizeText(Bucket[_R]):
656
704
  """
657
705
  A multi-bucket aggregation that groups semi-structured text into
@@ -734,6 +782,43 @@ class CategorizeText(Bucket[_R]):
734
782
  )
735
783
 
736
784
 
785
+ class ChangePoint(Pipeline[_R]):
786
+ """
787
+ A sibling pipeline that detects, spikes, dips, and change points in a
788
+ metric. Given a distribution of values provided by the sibling multi-
789
+ bucket aggregation, this aggregation indicates the bucket of any spike
790
+ or dip and/or the bucket at which the largest change in the
791
+ distribution of values, if they are statistically significant. There
792
+ must be at least 22 bucketed values. Fewer than 1,000 is preferred.
793
+
794
+ :arg format: `DecimalFormat` pattern for the output value. If
795
+ specified, the formatted value is returned in the aggregation’s
796
+ `value_as_string` property.
797
+ :arg gap_policy: Policy to apply when gaps are found in the data.
798
+ Defaults to `skip` if omitted.
799
+ :arg buckets_path: Path to the buckets that contain one set of values
800
+ to correlate.
801
+ """
802
+
803
+ name = "change_point"
804
+
805
+ def __init__(
806
+ self,
807
+ *,
808
+ format: Union[str, "DefaultType"] = DEFAULT,
809
+ gap_policy: Union[
810
+ Literal["skip", "insert_zeros", "keep_values"], "DefaultType"
811
+ ] = DEFAULT,
812
+ buckets_path: Union[
813
+ str, Sequence[str], Mapping[str, str], "DefaultType"
814
+ ] = DEFAULT,
815
+ **kwargs: Any,
816
+ ):
817
+ super().__init__(
818
+ format=format, gap_policy=gap_policy, buckets_path=buckets_path, **kwargs
819
+ )
820
+
821
+
737
822
  class Children(Bucket[_R]):
738
823
  """
739
824
  A single bucket aggregation that selects child documents that have the
@@ -1428,9 +1513,9 @@ class GeoLine(Agg[_R]):
1428
1513
  ordered by the chosen sort field.
1429
1514
 
1430
1515
  :arg point: (required) The name of the geo_point field.
1431
- :arg sort: (required) The name of the numeric field to use as the sort
1432
- key for ordering the points. When the `geo_line` aggregation is
1433
- nested inside a `time_series` aggregation, this field defaults to
1516
+ :arg sort: The name of the numeric field to use as the sort key for
1517
+ ordering the points. When the `geo_line` aggregation is nested
1518
+ inside a `time_series` aggregation, this field defaults to
1434
1519
  `@timestamp`, and any other value will result in error.
1435
1520
  :arg include_sort: When `true`, returns an additional array of the
1436
1521
  sort values in the feature properties.
@@ -1767,9 +1852,9 @@ class Inference(Pipeline[_R]):
1767
1852
  class Line(Agg[_R]):
1768
1853
  """
1769
1854
  :arg point: (required) The name of the geo_point field.
1770
- :arg sort: (required) The name of the numeric field to use as the sort
1771
- key for ordering the points. When the `geo_line` aggregation is
1772
- nested inside a `time_series` aggregation, this field defaults to
1855
+ :arg sort: The name of the numeric field to use as the sort key for
1856
+ ordering the points. When the `geo_line` aggregation is nested
1857
+ inside a `time_series` aggregation, this field defaults to
1773
1858
  `@timestamp`, and any other value will result in error.
1774
1859
  :arg include_sort: When `true`, returns an additional array of the
1775
1860
  sort values in the feature properties.
@@ -2592,7 +2677,7 @@ class Percentiles(Agg[_R]):
2592
2677
  self,
2593
2678
  *,
2594
2679
  keyed: Union[bool, "DefaultType"] = DEFAULT,
2595
- percents: Union[Sequence[float], "DefaultType"] = DEFAULT,
2680
+ percents: Union[float, Sequence[float], "DefaultType"] = DEFAULT,
2596
2681
  hdr: Union["types.HdrMethod", Dict[str, Any], "DefaultType"] = DEFAULT,
2597
2682
  tdigest: Union["types.TDigest", Dict[str, Any], "DefaultType"] = DEFAULT,
2598
2683
  format: Union[str, "DefaultType"] = DEFAULT,
@@ -2975,6 +3060,14 @@ class SignificantTerms(Bucket[_R]):
2975
3060
  the foreground sample with a term divided by the number of
2976
3061
  documents in the background with the term.
2977
3062
  :arg script_heuristic: Customized score, implemented via a script.
3063
+ :arg p_value: Significant terms heuristic that calculates the p-value
3064
+ between the term existing in foreground and background sets. The
3065
+ p-value is the probability of obtaining test results at least as
3066
+ extreme as the results actually observed, under the assumption
3067
+ that the null hypothesis is correct. The p-value is calculated
3068
+ assuming that the foreground set and the background set are
3069
+ independent https://en.wikipedia.org/wiki/Bernoulli_trial, with
3070
+ the null hypothesis that the probabilities are the same.
2978
3071
  :arg shard_min_doc_count: Regulates the certainty a shard has if the
2979
3072
  term should actually be added to the candidate list or not with
2980
3073
  respect to the `min_doc_count`. Terms will only be considered if
@@ -3028,6 +3121,9 @@ class SignificantTerms(Bucket[_R]):
3028
3121
  script_heuristic: Union[
3029
3122
  "types.ScriptedHeuristic", Dict[str, Any], "DefaultType"
3030
3123
  ] = DEFAULT,
3124
+ p_value: Union[
3125
+ "types.PValueHeuristic", Dict[str, Any], "DefaultType"
3126
+ ] = DEFAULT,
3031
3127
  shard_min_doc_count: Union[int, "DefaultType"] = DEFAULT,
3032
3128
  shard_size: Union[int, "DefaultType"] = DEFAULT,
3033
3129
  size: Union[int, "DefaultType"] = DEFAULT,
@@ -3046,6 +3142,7 @@ class SignificantTerms(Bucket[_R]):
3046
3142
  mutual_information=mutual_information,
3047
3143
  percentage=percentage,
3048
3144
  script_heuristic=script_heuristic,
3145
+ p_value=p_value,
3049
3146
  shard_min_doc_count=shard_min_doc_count,
3050
3147
  shard_size=shard_size,
3051
3148
  size=size,
@@ -17,8 +17,7 @@
17
17
 
18
18
  from typing import Type
19
19
 
20
- from elasticsearch import AsyncElasticsearch
21
-
20
+ from .. import AsyncElasticsearch
22
21
  from .connections import Connections
23
22
 
24
23
 
@@ -17,8 +17,7 @@
17
17
 
18
18
  from typing import Any, Dict, Generic, Type, TypeVar, Union
19
19
 
20
- from elasticsearch import Elasticsearch, __versionstr__
21
-
20
+ from .. import Elasticsearch, __versionstr__
22
21
  from .serializer import serializer
23
22
 
24
23
  _T = TypeVar("_T")
@@ -35,6 +35,11 @@ from typing import (
35
35
  overload,
36
36
  )
37
37
 
38
+ try:
39
+ import annotationlib
40
+ except ImportError:
41
+ annotationlib = None # type: ignore[assignment]
42
+
38
43
  try:
39
44
  from types import UnionType
40
45
  except ImportError:
@@ -333,7 +338,17 @@ class DocumentOptions:
333
338
  # # ignore attributes
334
339
  # field10: ClassVar[string] = "a regular class variable"
335
340
  annotations = attrs.get("__annotations__", {})
336
- fields = set([n for n in attrs if isinstance(attrs[n], Field)])
341
+ if not annotations and annotationlib:
342
+ # Python 3.14+ uses annotationlib
343
+ annotate = annotationlib.get_annotate_from_class_namespace(attrs)
344
+ if annotate:
345
+ annotations = (
346
+ annotationlib.call_annotate_function(
347
+ annotate, format=annotationlib.Format.VALUE
348
+ )
349
+ or {}
350
+ )
351
+ fields = {n for n in attrs if isinstance(attrs[n], Field)}
337
352
  fields.update(annotations.keys())
338
353
  field_defaults = {}
339
354
  for name in fields: