elasticsearch 8.19.0__py3-none-any.whl → 8.19.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (61) hide show
  1. elasticsearch/_async/client/__init__.py +39 -55
  2. elasticsearch/_async/client/cat.py +605 -35
  3. elasticsearch/_async/client/cluster.py +7 -2
  4. elasticsearch/_async/client/connector.py +3 -3
  5. elasticsearch/_async/client/esql.py +16 -6
  6. elasticsearch/_async/client/fleet.py +1 -5
  7. elasticsearch/_async/client/graph.py +1 -5
  8. elasticsearch/_async/client/ilm.py +2 -10
  9. elasticsearch/_async/client/indices.py +159 -32
  10. elasticsearch/_async/client/inference.py +142 -120
  11. elasticsearch/_async/client/nodes.py +2 -2
  12. elasticsearch/_async/client/shutdown.py +5 -15
  13. elasticsearch/_async/client/slm.py +1 -5
  14. elasticsearch/_async/client/snapshot.py +262 -112
  15. elasticsearch/_async/client/sql.py +1 -1
  16. elasticsearch/_async/client/streams.py +185 -0
  17. elasticsearch/_async/client/transform.py +60 -0
  18. elasticsearch/_async/client/watcher.py +1 -5
  19. elasticsearch/_async/helpers.py +58 -9
  20. elasticsearch/_sync/client/__init__.py +39 -55
  21. elasticsearch/_sync/client/cat.py +605 -35
  22. elasticsearch/_sync/client/cluster.py +7 -2
  23. elasticsearch/_sync/client/connector.py +3 -3
  24. elasticsearch/_sync/client/esql.py +16 -6
  25. elasticsearch/_sync/client/fleet.py +1 -5
  26. elasticsearch/_sync/client/graph.py +1 -5
  27. elasticsearch/_sync/client/ilm.py +2 -10
  28. elasticsearch/_sync/client/indices.py +159 -32
  29. elasticsearch/_sync/client/inference.py +142 -120
  30. elasticsearch/_sync/client/nodes.py +2 -2
  31. elasticsearch/_sync/client/shutdown.py +5 -15
  32. elasticsearch/_sync/client/slm.py +1 -5
  33. elasticsearch/_sync/client/snapshot.py +262 -112
  34. elasticsearch/_sync/client/sql.py +1 -1
  35. elasticsearch/_sync/client/streams.py +185 -0
  36. elasticsearch/_sync/client/transform.py +60 -0
  37. elasticsearch/_sync/client/watcher.py +1 -5
  38. elasticsearch/_version.py +2 -1
  39. elasticsearch/client.py +2 -0
  40. elasticsearch/compat.py +45 -1
  41. elasticsearch/dsl/__init__.py +28 -0
  42. elasticsearch/dsl/_async/document.py +84 -0
  43. elasticsearch/dsl/_sync/document.py +84 -0
  44. elasticsearch/dsl/aggs.py +117 -0
  45. elasticsearch/dsl/document_base.py +59 -1
  46. elasticsearch/dsl/field.py +60 -10
  47. elasticsearch/dsl/query.py +1 -1
  48. elasticsearch/dsl/response/__init__.py +3 -0
  49. elasticsearch/dsl/response/aggs.py +1 -1
  50. elasticsearch/dsl/types.py +325 -20
  51. elasticsearch/dsl/utils.py +1 -1
  52. elasticsearch/esql/__init__.py +2 -1
  53. elasticsearch/esql/esql.py +85 -34
  54. elasticsearch/esql/functions.py +37 -25
  55. elasticsearch/helpers/__init__.py +10 -1
  56. elasticsearch/helpers/actions.py +106 -33
  57. {elasticsearch-8.19.0.dist-info → elasticsearch-8.19.2.dist-info}/METADATA +2 -4
  58. {elasticsearch-8.19.0.dist-info → elasticsearch-8.19.2.dist-info}/RECORD +61 -59
  59. {elasticsearch-8.19.0.dist-info → elasticsearch-8.19.2.dist-info}/WHEEL +0 -0
  60. {elasticsearch-8.19.0.dist-info → elasticsearch-8.19.2.dist-info}/licenses/LICENSE +0 -0
  61. {elasticsearch-8.19.0.dist-info → elasticsearch-8.19.2.dist-info}/licenses/NOTICE +0 -0
@@ -0,0 +1,185 @@
1
+ # Licensed to Elasticsearch B.V. under one or more contributor
2
+ # license agreements. See the NOTICE file distributed with
3
+ # this work for additional information regarding copyright
4
+ # ownership. Elasticsearch B.V. licenses this file to you under
5
+ # the Apache License, Version 2.0 (the "License"); you may
6
+ # not use this file except in compliance with the License.
7
+ # You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
17
+
18
+
19
+ import typing as t
20
+
21
+ from elastic_transport import ObjectApiResponse, TextApiResponse
22
+
23
+ from ._base import NamespacedClient
24
+ from .utils import (
25
+ Stability,
26
+ _rewrite_parameters,
27
+ _stability_warning,
28
+ )
29
+
30
+
31
+ class StreamsClient(NamespacedClient):
32
+
33
+ @_rewrite_parameters()
34
+ @_stability_warning(Stability.EXPERIMENTAL)
35
+ def logs_disable(
36
+ self,
37
+ *,
38
+ error_trace: t.Optional[bool] = None,
39
+ filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None,
40
+ human: t.Optional[bool] = None,
41
+ master_timeout: t.Optional[t.Union[str, t.Literal[-1], t.Literal[0]]] = None,
42
+ pretty: t.Optional[bool] = None,
43
+ timeout: t.Optional[t.Union[str, t.Literal[-1], t.Literal[0]]] = None,
44
+ ) -> t.Union[ObjectApiResponse[t.Any], TextApiResponse]:
45
+ """
46
+ .. raw:: html
47
+
48
+ <p>Disable logs stream.</p>
49
+ <p>Turn off the logs stream feature for this cluster.</p>
50
+
51
+
52
+ `<https://www.elastic.co/docs/api/doc/elasticsearch#TODO>`_
53
+
54
+ :param master_timeout: The period to wait for a connection to the master node.
55
+ If no response is received before the timeout expires, the request fails
56
+ and returns an error.
57
+ :param timeout: The period to wait for a response. If no response is received
58
+ before the timeout expires, the request fails and returns an error.
59
+ """
60
+ __path_parts: t.Dict[str, str] = {}
61
+ __path = "/_streams/logs/_disable"
62
+ __query: t.Dict[str, t.Any] = {}
63
+ if error_trace is not None:
64
+ __query["error_trace"] = error_trace
65
+ if filter_path is not None:
66
+ __query["filter_path"] = filter_path
67
+ if human is not None:
68
+ __query["human"] = human
69
+ if master_timeout is not None:
70
+ __query["master_timeout"] = master_timeout
71
+ if pretty is not None:
72
+ __query["pretty"] = pretty
73
+ if timeout is not None:
74
+ __query["timeout"] = timeout
75
+ __headers = {"accept": "application/json,text/plain"}
76
+ return self.perform_request( # type: ignore[return-value]
77
+ "POST",
78
+ __path,
79
+ params=__query,
80
+ headers=__headers,
81
+ endpoint_id="streams.logs_disable",
82
+ path_parts=__path_parts,
83
+ )
84
+
85
+ @_rewrite_parameters()
86
+ @_stability_warning(Stability.EXPERIMENTAL)
87
+ def logs_enable(
88
+ self,
89
+ *,
90
+ error_trace: t.Optional[bool] = None,
91
+ filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None,
92
+ human: t.Optional[bool] = None,
93
+ master_timeout: t.Optional[t.Union[str, t.Literal[-1], t.Literal[0]]] = None,
94
+ pretty: t.Optional[bool] = None,
95
+ timeout: t.Optional[t.Union[str, t.Literal[-1], t.Literal[0]]] = None,
96
+ ) -> t.Union[ObjectApiResponse[t.Any], TextApiResponse]:
97
+ """
98
+ .. raw:: html
99
+
100
+ <p>Enable logs stream.</p>
101
+ <p>Turn on the logs stream feature for this cluster.</p>
102
+ <p>NOTE: To protect existing data, this feature can be turned on only if the
103
+ cluster does not have existing indices or data streams that match the pattern <code>logs|logs.*</code>.
104
+ If those indices or data streams exist, a <code>409 - Conflict</code> response and error is returned.</p>
105
+
106
+
107
+ `<https://www.elastic.co/docs/api/doc/elasticsearch#TODO>`_
108
+
109
+ :param master_timeout: The period to wait for a connection to the master node.
110
+ If no response is received before the timeout expires, the request fails
111
+ and returns an error.
112
+ :param timeout: The period to wait for a response. If no response is received
113
+ before the timeout expires, the request fails and returns an error.
114
+ """
115
+ __path_parts: t.Dict[str, str] = {}
116
+ __path = "/_streams/logs/_enable"
117
+ __query: t.Dict[str, t.Any] = {}
118
+ if error_trace is not None:
119
+ __query["error_trace"] = error_trace
120
+ if filter_path is not None:
121
+ __query["filter_path"] = filter_path
122
+ if human is not None:
123
+ __query["human"] = human
124
+ if master_timeout is not None:
125
+ __query["master_timeout"] = master_timeout
126
+ if pretty is not None:
127
+ __query["pretty"] = pretty
128
+ if timeout is not None:
129
+ __query["timeout"] = timeout
130
+ __headers = {"accept": "application/json,text/plain"}
131
+ return self.perform_request( # type: ignore[return-value]
132
+ "POST",
133
+ __path,
134
+ params=__query,
135
+ headers=__headers,
136
+ endpoint_id="streams.logs_enable",
137
+ path_parts=__path_parts,
138
+ )
139
+
140
+ @_rewrite_parameters()
141
+ @_stability_warning(Stability.EXPERIMENTAL)
142
+ def status(
143
+ self,
144
+ *,
145
+ error_trace: t.Optional[bool] = None,
146
+ filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None,
147
+ human: t.Optional[bool] = None,
148
+ master_timeout: t.Optional[t.Union[str, t.Literal[-1], t.Literal[0]]] = None,
149
+ pretty: t.Optional[bool] = None,
150
+ ) -> ObjectApiResponse[t.Any]:
151
+ """
152
+ .. raw:: html
153
+
154
+ <p>Get the status of streams.</p>
155
+ <p>Get the current status for all types of streams.</p>
156
+
157
+
158
+ `<https://www.elastic.co/docs/api/doc/elasticsearch#TODO>`_
159
+
160
+ :param master_timeout: Period to wait for a connection to the master node. If
161
+ no response is received before the timeout expires, the request fails and
162
+ returns an error.
163
+ """
164
+ __path_parts: t.Dict[str, str] = {}
165
+ __path = "/_streams/status"
166
+ __query: t.Dict[str, t.Any] = {}
167
+ if error_trace is not None:
168
+ __query["error_trace"] = error_trace
169
+ if filter_path is not None:
170
+ __query["filter_path"] = filter_path
171
+ if human is not None:
172
+ __query["human"] = human
173
+ if master_timeout is not None:
174
+ __query["master_timeout"] = master_timeout
175
+ if pretty is not None:
176
+ __query["pretty"] = pretty
177
+ __headers = {"accept": "application/json"}
178
+ return self.perform_request( # type: ignore[return-value]
179
+ "GET",
180
+ __path,
181
+ params=__query,
182
+ headers=__headers,
183
+ endpoint_id="streams.status",
184
+ path_parts=__path_parts,
185
+ )
@@ -602,6 +602,66 @@ class TransformClient(NamespacedClient):
602
602
  path_parts=__path_parts,
603
603
  )
604
604
 
605
+ @_rewrite_parameters()
606
+ def set_upgrade_mode(
607
+ self,
608
+ *,
609
+ enabled: t.Optional[bool] = None,
610
+ error_trace: t.Optional[bool] = None,
611
+ filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None,
612
+ human: t.Optional[bool] = None,
613
+ pretty: t.Optional[bool] = None,
614
+ timeout: t.Optional[t.Union[str, t.Literal[-1], t.Literal[0]]] = None,
615
+ ) -> ObjectApiResponse[t.Any]:
616
+ """
617
+ .. raw:: html
618
+
619
+ <p>Set upgrade_mode for transform indices.
620
+ Sets a cluster wide upgrade_mode setting that prepares transform
621
+ indices for an upgrade.
622
+ When upgrading your cluster, in some circumstances you must restart your
623
+ nodes and reindex your transform indices. In those circumstances,
624
+ there must be no transforms running. You can close the transforms,
625
+ do the upgrade, then open all the transforms again. Alternatively,
626
+ you can use this API to temporarily halt tasks associated with the transforms
627
+ and prevent new transforms from opening. You can also use this API
628
+ during upgrades that do not require you to reindex your transform
629
+ indices, though stopping transforms is not a requirement in that case.
630
+ You can see the current value for the upgrade_mode setting by using the get
631
+ transform info API.</p>
632
+
633
+
634
+ `<https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-transform-set-upgrade-mode>`_
635
+
636
+ :param enabled: When `true`, it enables `upgrade_mode` which temporarily halts
637
+ all transform tasks and prohibits new transform tasks from starting.
638
+ :param timeout: The time to wait for the request to be completed.
639
+ """
640
+ __path_parts: t.Dict[str, str] = {}
641
+ __path = "/_transform/set_upgrade_mode"
642
+ __query: t.Dict[str, t.Any] = {}
643
+ if enabled is not None:
644
+ __query["enabled"] = enabled
645
+ if error_trace is not None:
646
+ __query["error_trace"] = error_trace
647
+ if filter_path is not None:
648
+ __query["filter_path"] = filter_path
649
+ if human is not None:
650
+ __query["human"] = human
651
+ if pretty is not None:
652
+ __query["pretty"] = pretty
653
+ if timeout is not None:
654
+ __query["timeout"] = timeout
655
+ __headers = {"accept": "application/json"}
656
+ return self.perform_request( # type: ignore[return-value]
657
+ "POST",
658
+ __path,
659
+ params=__query,
660
+ headers=__headers,
661
+ endpoint_id="transform.set_upgrade_mode",
662
+ path_parts=__path_parts,
663
+ )
664
+
605
665
  @_rewrite_parameters(
606
666
  parameter_aliases={"from": "from_"},
607
667
  )
@@ -550,11 +550,7 @@ class WatcherClient(NamespacedClient):
550
550
  __body["transform"] = transform
551
551
  if trigger is not None:
552
552
  __body["trigger"] = trigger
553
- if not __body:
554
- __body = None # type: ignore[assignment]
555
- __headers = {"accept": "application/json"}
556
- if __body is not None:
557
- __headers["content-type"] = "application/json"
553
+ __headers = {"accept": "application/json", "content-type": "application/json"}
558
554
  return self.perform_request( # type: ignore[return-value]
559
555
  "PUT",
560
556
  __path,
elasticsearch/_version.py CHANGED
@@ -15,4 +15,5 @@
15
15
  # specific language governing permissions and limitations
16
16
  # under the License.
17
17
 
18
- __versionstr__ = "8.19.0"
18
+ __versionstr__ = "8.19.2"
19
+ __es_specification_commit__ = "f4816bb41c52c1bfda93c48191f0a0e4e2d575be"
elasticsearch/client.py CHANGED
@@ -62,6 +62,7 @@ from ._sync.client.slm import SlmClient as SlmClient # noqa: F401
62
62
  from ._sync.client.snapshot import SnapshotClient as SnapshotClient # noqa: F401
63
63
  from ._sync.client.sql import SqlClient as SqlClient # noqa: F401
64
64
  from ._sync.client.ssl import SslClient as SslClient # noqa: F401
65
+ from ._sync.client.streams import StreamsClient as StreamsClient # noqa: F401
65
66
  from ._sync.client.synonyms import SynonymsClient as SynonymsClient # noqa: F401
66
67
  from ._sync.client.tasks import TasksClient as TasksClient # noqa: F401
67
68
  from ._sync.client.text_structure import ( # noqa: F401
@@ -115,6 +116,7 @@ __all__ = [
115
116
  "SnapshotClient",
116
117
  "SqlClient",
117
118
  "SslClient",
119
+ "StreamsClient",
118
120
  "TasksClient",
119
121
  "TextStructureClient",
120
122
  "TransformClient",
elasticsearch/compat.py CHANGED
@@ -15,11 +15,14 @@
15
15
  # specific language governing permissions and limitations
16
16
  # under the License.
17
17
 
18
+ import asyncio
18
19
  import inspect
19
20
  import os
20
21
  import sys
22
+ from contextlib import asynccontextmanager, contextmanager
21
23
  from pathlib import Path
22
- from typing import Tuple, Type, Union
24
+ from threading import Thread
25
+ from typing import Any, AsyncIterator, Callable, Coroutine, Iterator, Tuple, Type, Union
23
26
 
24
27
  string_types: Tuple[Type[str], Type[bytes]] = (str, bytes)
25
28
 
@@ -76,9 +79,50 @@ def warn_stacklevel() -> int:
76
79
  return 0
77
80
 
78
81
 
82
+ @contextmanager
83
+ def safe_thread(
84
+ target: Callable[..., Any], *args: Any, **kwargs: Any
85
+ ) -> Iterator[Thread]:
86
+ """Run a thread within a context manager block.
87
+
88
+ The thread is automatically joined when the block ends. If the thread raised
89
+ an exception, it is raised in the caller's context.
90
+ """
91
+ captured_exception = None
92
+
93
+ def run() -> None:
94
+ try:
95
+ target(*args, **kwargs)
96
+ except BaseException as exc:
97
+ nonlocal captured_exception
98
+ captured_exception = exc
99
+
100
+ thread = Thread(target=run)
101
+ thread.start()
102
+ yield thread
103
+ thread.join()
104
+ if captured_exception:
105
+ raise captured_exception
106
+
107
+
108
+ @asynccontextmanager
109
+ async def safe_task(
110
+ coro: Coroutine[Any, Any, Any],
111
+ ) -> "AsyncIterator[asyncio.Task[Any]]":
112
+ """Run a background task within a context manager block.
113
+
114
+ The task is awaited when the block ends.
115
+ """
116
+ task = asyncio.create_task(coro)
117
+ yield task
118
+ await task
119
+
120
+
79
121
  __all__ = [
80
122
  "string_types",
81
123
  "to_str",
82
124
  "to_bytes",
83
125
  "warn_stacklevel",
126
+ "safe_thread",
127
+ "safe_task",
84
128
  ]
@@ -38,23 +38,30 @@ from .faceted_search import (
38
38
  TermsFacet,
39
39
  )
40
40
  from .field import (
41
+ AggregateMetricDouble,
42
+ Alias,
41
43
  Binary,
42
44
  Boolean,
43
45
  Byte,
44
46
  Completion,
45
47
  ConstantKeyword,
48
+ CountedKeyword,
46
49
  CustomField,
47
50
  Date,
51
+ DateNanos,
48
52
  DateRange,
49
53
  DenseVector,
50
54
  Double,
51
55
  DoubleRange,
52
56
  Field,
57
+ Flattened,
53
58
  Float,
54
59
  FloatRange,
55
60
  GeoPoint,
56
61
  GeoShape,
57
62
  HalfFloat,
63
+ Histogram,
64
+ IcuCollationKeyword,
58
65
  Integer,
59
66
  IntegerRange,
60
67
  Ip,
@@ -63,21 +70,28 @@ from .field import (
63
70
  Keyword,
64
71
  Long,
65
72
  LongRange,
73
+ MatchOnlyText,
66
74
  Murmur3,
67
75
  Nested,
68
76
  Object,
77
+ Passthrough,
69
78
  Percolator,
70
79
  Point,
71
80
  RangeField,
72
81
  RankFeature,
73
82
  RankFeatures,
83
+ RankVectors,
74
84
  ScaledFloat,
75
85
  SearchAsYouType,
86
+ SemanticText,
76
87
  Shape,
77
88
  Short,
78
89
  SparseVector,
79
90
  Text,
80
91
  TokenCount,
92
+ UnsignedLong,
93
+ Version,
94
+ Wildcard,
81
95
  construct_field,
82
96
  )
83
97
  from .function import SF
@@ -108,6 +122,8 @@ __all__ = [
108
122
  "A",
109
123
  "Agg",
110
124
  "AggResponse",
125
+ "AggregateMetricDouble",
126
+ "Alias",
111
127
  "AsyncComposableIndexTemplate",
112
128
  "AsyncDocument",
113
129
  "AsyncEmptySearch",
@@ -126,9 +142,11 @@ __all__ = [
126
142
  "Completion",
127
143
  "ComposableIndexTemplate",
128
144
  "ConstantKeyword",
145
+ "CountedKeyword",
129
146
  "CustomField",
130
147
  "Date",
131
148
  "DateHistogramFacet",
149
+ "DateNanos",
132
150
  "DateRange",
133
151
  "DenseVector",
134
152
  "Document",
@@ -142,12 +160,15 @@ __all__ = [
142
160
  "FacetedResponse",
143
161
  "FacetedSearch",
144
162
  "Field",
163
+ "Flattened",
145
164
  "Float",
146
165
  "FloatRange",
147
166
  "GeoPoint",
148
167
  "GeoShape",
149
168
  "HalfFloat",
169
+ "Histogram",
150
170
  "HistogramFacet",
171
+ "IcuCollationKeyword",
151
172
  "IllegalOperation",
152
173
  "Index",
153
174
  "IndexTemplate",
@@ -162,12 +183,14 @@ __all__ = [
162
183
  "LongRange",
163
184
  "M",
164
185
  "Mapping",
186
+ "MatchOnlyText",
165
187
  "MetaField",
166
188
  "MultiSearch",
167
189
  "Murmur3",
168
190
  "Nested",
169
191
  "NestedFacet",
170
192
  "Object",
193
+ "Passthrough",
171
194
  "Percolator",
172
195
  "Point",
173
196
  "Q",
@@ -177,11 +200,13 @@ __all__ = [
177
200
  "RangeField",
178
201
  "RankFeature",
179
202
  "RankFeatures",
203
+ "RankVectors",
180
204
  "Response",
181
205
  "SF",
182
206
  "ScaledFloat",
183
207
  "Search",
184
208
  "SearchAsYouType",
209
+ "SemanticText",
185
210
  "Shape",
186
211
  "Short",
187
212
  "SparseVector",
@@ -189,9 +214,12 @@ __all__ = [
189
214
  "Text",
190
215
  "TokenCount",
191
216
  "UnknownDslObject",
217
+ "UnsignedLong",
192
218
  "UpdateByQuery",
193
219
  "UpdateByQueryResponse",
194
220
  "ValidationException",
221
+ "Version",
222
+ "Wildcard",
195
223
  "analyzer",
196
224
  "async_connections",
197
225
  "char_filter",
@@ -20,6 +20,7 @@ from typing import (
20
20
  TYPE_CHECKING,
21
21
  Any,
22
22
  AsyncIterable,
23
+ AsyncIterator,
23
24
  Dict,
24
25
  List,
25
26
  Optional,
@@ -42,6 +43,7 @@ from .search import AsyncSearch
42
43
 
43
44
  if TYPE_CHECKING:
44
45
  from elasticsearch import AsyncElasticsearch
46
+ from elasticsearch.esql.esql import ESQLBase
45
47
 
46
48
 
47
49
  class AsyncIndexMeta(DocumentMeta):
@@ -520,3 +522,85 @@ class AsyncDocument(DocumentBase, metaclass=AsyncIndexMeta):
520
522
  return action
521
523
 
522
524
  return await async_bulk(es, Generate(actions), **kwargs)
525
+
526
+ @classmethod
527
+ async def esql_execute(
528
+ cls,
529
+ query: "ESQLBase",
530
+ return_additional: bool = False,
531
+ ignore_missing_fields: bool = False,
532
+ using: Optional[AsyncUsingType] = None,
533
+ **kwargs: Any,
534
+ ) -> AsyncIterator[Union[Self, Tuple[Self, Dict[str, Any]]]]:
535
+ """
536
+ Execute the given ES|QL query and return an iterator of 2-element tuples,
537
+ where the first element is an instance of this ``Document`` and the
538
+ second a dictionary with any remaining columns requested in the query.
539
+
540
+ :arg query: an ES|QL query object created with the ``esql_from()`` method.
541
+ :arg return_additional: if ``False`` (the default), this method returns
542
+ document objects. If set to ``True``, the method returns tuples with
543
+ a document in the first element and a dictionary with any additional
544
+ columns returned by the query in the second element.
545
+ :arg ignore_missing_fields: if ``False`` (the default), all the fields of
546
+ the document must be present in the query, or else an exception is
547
+ raised. Set to ``True`` to allow missing fields, which will result in
548
+ partially initialized document objects.
549
+ :arg using: connection alias to use, defaults to ``'default'``
550
+ :arg kwargs: additional options for the ``client.esql.query()`` function.
551
+ """
552
+ es = cls._get_connection(using)
553
+ response = await es.esql.query(query=str(query), **kwargs)
554
+ query_columns = [col["name"] for col in response.body.get("columns", [])]
555
+
556
+ # Here we get the list of columns defined in the document, which are the
557
+ # columns that we will take from each result to assemble the document
558
+ # object.
559
+ # When `for_esql=False` is passed below by default, the list will include
560
+ # nested fields, which ES|QL does not return, causing an error. When passing
561
+ # `ignore_missing_fields=True` the list will be generated with
562
+ # `for_esql=True`, so the error will not occur, but the documents will
563
+ # not have any Nested objects in them.
564
+ doc_fields = set(cls._get_field_names(for_esql=ignore_missing_fields))
565
+ if not ignore_missing_fields and not doc_fields.issubset(set(query_columns)):
566
+ raise ValueError(
567
+ f"Not all fields of {cls.__name__} were returned by the query. "
568
+ "Make sure your document does not use Nested fields, which are "
569
+ "currently not supported in ES|QL. To force the query to be "
570
+ "evaluated in spite of the missing fields, pass set the "
571
+ "ignore_missing_fields=True option in the esql_execute() call."
572
+ )
573
+ non_doc_fields: set[str] = set(query_columns) - doc_fields - {"_id"}
574
+ index_id = query_columns.index("_id")
575
+
576
+ results = response.body.get("values", [])
577
+ for column_values in results:
578
+ # create a dictionary with all the document fields, expanding the
579
+ # dot notation returned by ES|QL into the recursive dictionaries
580
+ # used by Document.from_dict()
581
+ doc_dict: Dict[str, Any] = {}
582
+ for col, val in zip(query_columns, column_values):
583
+ if col in doc_fields:
584
+ cols = col.split(".")
585
+ d = doc_dict
586
+ for c in cols[:-1]:
587
+ if c not in d:
588
+ d[c] = {}
589
+ d = d[c]
590
+ d[cols[-1]] = val
591
+
592
+ # create the document instance
593
+ obj = cls(meta={"_id": column_values[index_id]})
594
+ obj._from_dict(doc_dict)
595
+
596
+ if return_additional:
597
+ # build a dict with any other values included in the response
598
+ other = {
599
+ col: val
600
+ for col, val in zip(query_columns, column_values)
601
+ if col in non_doc_fields
602
+ }
603
+
604
+ yield obj, other
605
+ else:
606
+ yield obj