elasticsearch 9.1.1__py3-none-any.whl → 9.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (76) hide show
  1. elasticsearch/_async/client/__init__.py +96 -44
  2. elasticsearch/_async/client/async_search.py +7 -0
  3. elasticsearch/_async/client/cat.py +489 -26
  4. elasticsearch/_async/client/cluster.py +9 -8
  5. elasticsearch/_async/client/connector.py +3 -3
  6. elasticsearch/_async/client/eql.py +7 -0
  7. elasticsearch/_async/client/esql.py +26 -3
  8. elasticsearch/_async/client/fleet.py +1 -5
  9. elasticsearch/_async/client/graph.py +1 -5
  10. elasticsearch/_async/client/ilm.py +2 -10
  11. elasticsearch/_async/client/indices.py +181 -37
  12. elasticsearch/_async/client/inference.py +291 -124
  13. elasticsearch/_async/client/ingest.py +8 -0
  14. elasticsearch/_async/client/license.py +4 -2
  15. elasticsearch/_async/client/logstash.py +3 -1
  16. elasticsearch/_async/client/ml.py +2 -2
  17. elasticsearch/_async/client/nodes.py +3 -5
  18. elasticsearch/_async/client/project.py +67 -0
  19. elasticsearch/_async/client/security.py +39 -0
  20. elasticsearch/_async/client/shutdown.py +5 -15
  21. elasticsearch/_async/client/simulate.py +8 -0
  22. elasticsearch/_async/client/slm.py +1 -5
  23. elasticsearch/_async/client/snapshot.py +20 -10
  24. elasticsearch/_async/client/sql.py +7 -0
  25. elasticsearch/_async/client/streams.py +185 -0
  26. elasticsearch/_async/client/watcher.py +1 -5
  27. elasticsearch/_async/helpers.py +74 -12
  28. elasticsearch/_sync/client/__init__.py +96 -44
  29. elasticsearch/_sync/client/async_search.py +7 -0
  30. elasticsearch/_sync/client/cat.py +489 -26
  31. elasticsearch/_sync/client/cluster.py +9 -8
  32. elasticsearch/_sync/client/connector.py +3 -3
  33. elasticsearch/_sync/client/eql.py +7 -0
  34. elasticsearch/_sync/client/esql.py +26 -3
  35. elasticsearch/_sync/client/fleet.py +1 -5
  36. elasticsearch/_sync/client/graph.py +1 -5
  37. elasticsearch/_sync/client/ilm.py +2 -10
  38. elasticsearch/_sync/client/indices.py +181 -37
  39. elasticsearch/_sync/client/inference.py +291 -124
  40. elasticsearch/_sync/client/ingest.py +8 -0
  41. elasticsearch/_sync/client/license.py +4 -2
  42. elasticsearch/_sync/client/logstash.py +3 -1
  43. elasticsearch/_sync/client/ml.py +2 -2
  44. elasticsearch/_sync/client/nodes.py +3 -5
  45. elasticsearch/_sync/client/project.py +67 -0
  46. elasticsearch/_sync/client/security.py +39 -0
  47. elasticsearch/_sync/client/shutdown.py +5 -15
  48. elasticsearch/_sync/client/simulate.py +8 -0
  49. elasticsearch/_sync/client/slm.py +1 -5
  50. elasticsearch/_sync/client/snapshot.py +20 -10
  51. elasticsearch/_sync/client/sql.py +7 -0
  52. elasticsearch/_sync/client/streams.py +185 -0
  53. elasticsearch/_sync/client/watcher.py +1 -5
  54. elasticsearch/_version.py +2 -1
  55. elasticsearch/client.py +4 -0
  56. elasticsearch/compat.py +30 -1
  57. elasticsearch/dsl/__init__.py +28 -0
  58. elasticsearch/dsl/_async/document.py +2 -1
  59. elasticsearch/dsl/_sync/document.py +2 -1
  60. elasticsearch/dsl/aggs.py +97 -0
  61. elasticsearch/dsl/document_base.py +53 -13
  62. elasticsearch/dsl/field.py +21 -2
  63. elasticsearch/dsl/pydantic.py +152 -0
  64. elasticsearch/dsl/query.py +5 -1
  65. elasticsearch/dsl/response/__init__.py +3 -0
  66. elasticsearch/dsl/search_base.py +5 -1
  67. elasticsearch/dsl/types.py +226 -14
  68. elasticsearch/esql/esql.py +331 -41
  69. elasticsearch/esql/functions.py +88 -0
  70. elasticsearch/helpers/__init__.py +10 -1
  71. elasticsearch/helpers/actions.py +106 -33
  72. {elasticsearch-9.1.1.dist-info → elasticsearch-9.2.0.dist-info}/METADATA +27 -5
  73. {elasticsearch-9.1.1.dist-info → elasticsearch-9.2.0.dist-info}/RECORD +76 -71
  74. {elasticsearch-9.1.1.dist-info → elasticsearch-9.2.0.dist-info}/WHEEL +0 -0
  75. {elasticsearch-9.1.1.dist-info → elasticsearch-9.2.0.dist-info}/licenses/LICENSE +0 -0
  76. {elasticsearch-9.1.1.dist-info → elasticsearch-9.2.0.dist-info}/licenses/NOTICE +0 -0
@@ -38,6 +38,20 @@ def abs(number: ExpressionType) -> InstrumentedExpression:
38
38
  return InstrumentedExpression(f"ABS({_render(number)})")
39
39
 
40
40
 
41
+ def absent(field: ExpressionType) -> InstrumentedExpression:
42
+ """Returns true if the input expression yields no non-null values within the
43
+ current aggregation context.
44
+
45
+ :param field: Expression that outputs values to be checked for absence.
46
+ """
47
+ return InstrumentedExpression(f"ABSENT({_render(field)})")
48
+
49
+
50
+ def absent_over_time(field: ExpressionType) -> InstrumentedExpression:
51
+ """Calculates the absence of a field in the output result over time range."""
52
+ return InstrumentedExpression(f"ABSENT_OVER_TIME({_render(field)})")
53
+
54
+
41
55
  def acos(number: ExpressionType) -> InstrumentedExpression:
42
56
  """Returns the arccosine of `n` as an angle, expressed in radians.
43
57
 
@@ -364,6 +378,11 @@ def exp(number: ExpressionType) -> InstrumentedExpression:
364
378
  return InstrumentedExpression(f"EXP({_render(number)})")
365
379
 
366
380
 
381
+ def first(value: ExpressionType, sort: ExpressionType) -> InstrumentedExpression:
382
+ """Calculates the earliest value of a field."""
383
+ return InstrumentedExpression(f"FIRST({_render(value)}, {_render(sort)})")
384
+
385
+
367
386
  def first_over_time(field: ExpressionType) -> InstrumentedExpression:
368
387
  """The earliest value of a field, where recency determined by the
369
388
  `@timestamp` field.
@@ -463,6 +482,11 @@ def kql(query: ExpressionType) -> InstrumentedExpression:
463
482
  return InstrumentedExpression(f"KQL({_render(query)})")
464
483
 
465
484
 
485
+ def last(value: ExpressionType, sort: ExpressionType) -> InstrumentedExpression:
486
+ """Calculates the latest value of a field."""
487
+ return InstrumentedExpression(f"LAST({_render(value)}, {_render(sort)})")
488
+
489
+
466
490
  def last_over_time(field: ExpressionType) -> InstrumentedExpression:
467
491
  """The latest value of a field, where recency determined by the
468
492
  `@timestamp` field.
@@ -697,6 +721,18 @@ def mv_concat(string: ExpressionType, delim: ExpressionType) -> InstrumentedExpr
697
721
  return InstrumentedExpression(f"MV_CONCAT({_render(string)}, {_render(delim)})")
698
722
 
699
723
 
724
+ def mv_contains(
725
+ superset: ExpressionType, subset: ExpressionType
726
+ ) -> InstrumentedExpression:
727
+ """Checks if all values yielded by the second multivalue expression are present in the
728
+ values yielded by the first multivalue expression. Returns a boolean. Null values are
729
+ treated as an empty set.
730
+ """
731
+ return InstrumentedExpression(
732
+ f"MV_CONTAINS({_render(superset)}, {_render(subset)})"
733
+ )
734
+
735
+
700
736
  def mv_count(field: ExpressionType) -> InstrumentedExpression:
701
737
  """Converts a multivalued expression into a single valued column containing
702
738
  a count of the number of values.
@@ -894,6 +930,18 @@ def pow(base: ExpressionType, exponent: ExpressionType) -> InstrumentedExpressio
894
930
  return InstrumentedExpression(f"POW({_render(base)}, {_render(exponent)})")
895
931
 
896
932
 
933
+ def present(field: ExpressionType) -> InstrumentedExpression:
934
+ """Returns true if the input expression yields any non-null values within the current
935
+ aggregation context. Otherwise it returns false.
936
+ """
937
+ return InstrumentedExpression(f"PRESENT({_render(field)})")
938
+
939
+
940
+ def present_over_time(field: ExpressionType) -> InstrumentedExpression:
941
+ """Calculates the presence of a field in the output result over time range."""
942
+ return InstrumentedExpression(f"PRESENT_OVER_TIME({_render(field)})")
943
+
944
+
897
945
  def qstr(
898
946
  query: ExpressionType, options: ExpressionType = None
899
947
  ) -> InstrumentedExpression:
@@ -1452,6 +1500,11 @@ def sum(number: ExpressionType) -> InstrumentedExpression:
1452
1500
  return InstrumentedExpression(f"SUM({_render(number)})")
1453
1501
 
1454
1502
 
1503
+ def sum_over_time(field: ExpressionType) -> InstrumentedExpression:
1504
+ """Calculates the sum over time value of a field."""
1505
+ return InstrumentedExpression(f"SUM({_render(field)})")
1506
+
1507
+
1455
1508
  def tan(angle: ExpressionType) -> InstrumentedExpression:
1456
1509
  """Returns the tangent of an angle.
1457
1510
 
@@ -1483,6 +1536,17 @@ def term(field: ExpressionType, query: ExpressionType) -> InstrumentedExpression
1483
1536
  return InstrumentedExpression(f"TERM({_render(field)}, {_render(query)})")
1484
1537
 
1485
1538
 
1539
+ def text_embedding(
1540
+ text: ExpressionType, inference_id: ExpressionType
1541
+ ) -> InstrumentedExpression:
1542
+ """Generates dense vector embeddings from text input using a specified inference endpoint.
1543
+ Use this function to generate query vectors for KNN searches against your vectorized data
1544
+ or others dense vector based operations."""
1545
+ return InstrumentedExpression(
1546
+ f"TEXT_EMBEDDING({_render(text)}, {_render(inference_id)})"
1547
+ )
1548
+
1549
+
1486
1550
  def top(
1487
1551
  field: ExpressionType, limit: ExpressionType, order: ExpressionType
1488
1552
  ) -> InstrumentedExpression:
@@ -1596,6 +1660,22 @@ def to_double(field: ExpressionType) -> InstrumentedExpression:
1596
1660
  return InstrumentedExpression(f"TO_DOUBLE({_render(field)})")
1597
1661
 
1598
1662
 
1663
+ def to_geohash(field: ExpressionType) -> InstrumentedExpression:
1664
+ """Converts an input value to a geohash value. A string will only be successfully
1665
+ converted if it respects the geohash format, as described for the geohash grid
1666
+ aggregation.
1667
+ """
1668
+ return InstrumentedExpression(f"TO_GEOHASH({_render(field)})")
1669
+
1670
+
1671
+ def to_geohex(field: ExpressionType) -> InstrumentedExpression:
1672
+ """Converts an input value to a geohex value. A string will only be successfully
1673
+ converted if it respects the geohex format, as described for the geohex grid
1674
+ aggregation.
1675
+ """
1676
+ return InstrumentedExpression(f"TO_GEOHEX({_render(field)})")
1677
+
1678
+
1599
1679
  def to_geopoint(field: ExpressionType) -> InstrumentedExpression:
1600
1680
  """Converts an input value to a `geo_point` value. A string will only be
1601
1681
  successfully converted if it respects the WKT Point format.
@@ -1616,6 +1696,14 @@ def to_geoshape(field: ExpressionType) -> InstrumentedExpression:
1616
1696
  return InstrumentedExpression(f"TO_GEOSHAPE({_render(field)})")
1617
1697
 
1618
1698
 
1699
+ def to_geotile(field: ExpressionType) -> InstrumentedExpression:
1700
+ """Converts an input value to a geotile value. A string will only be successfully
1701
+ converted if it respects the geotile format, as described for the geotile grid
1702
+ aggregation.
1703
+ """
1704
+ return InstrumentedExpression(f"TO_GEOTILE({_render(field)})")
1705
+
1706
+
1619
1707
  def to_integer(field: ExpressionType) -> InstrumentedExpression:
1620
1708
  """Converts an input value to an integer value. If the input parameter is
1621
1709
  of a date type, its value will be interpreted as milliseconds since the
@@ -19,12 +19,21 @@ from .._async.helpers import async_bulk, async_reindex, async_scan, async_stream
19
19
  from .._utils import fixup_module_metadata
20
20
  from .actions import _chunk_actions # noqa: F401
21
21
  from .actions import _process_bulk_chunk # noqa: F401
22
- from .actions import bulk, expand_action, parallel_bulk, reindex, scan, streaming_bulk
22
+ from .actions import (
23
+ BULK_FLUSH,
24
+ bulk,
25
+ expand_action,
26
+ parallel_bulk,
27
+ reindex,
28
+ scan,
29
+ streaming_bulk,
30
+ )
23
31
  from .errors import BulkIndexError, ScanError
24
32
 
25
33
  __all__ = [
26
34
  "BulkIndexError",
27
35
  "ScanError",
36
+ "BULK_FLUSH",
28
37
  "expand_action",
29
38
  "streaming_bulk",
30
39
  "bulk",
@@ -16,9 +16,10 @@
16
16
  # under the License.
17
17
 
18
18
  import logging
19
+ import queue
19
20
  import time
21
+ from enum import Enum
20
22
  from operator import methodcaller
21
- from queue import Queue
22
23
  from typing import (
23
24
  Any,
24
25
  Callable,
@@ -37,13 +38,21 @@ from typing import (
37
38
  from elastic_transport import OpenTelemetrySpan
38
39
 
39
40
  from .. import Elasticsearch
40
- from ..compat import to_bytes
41
+ from ..compat import safe_thread, to_bytes
41
42
  from ..exceptions import ApiError, NotFoundError, TransportError
42
43
  from ..serializer import Serializer
43
44
  from .errors import BulkIndexError, ScanError
44
45
 
45
46
  logger = logging.getLogger("elasticsearch.helpers")
46
47
 
48
+
49
+ class BulkMeta(Enum):
50
+ flush = 1
51
+ done = 2
52
+
53
+
54
+ BULK_FLUSH = BulkMeta.flush
55
+
47
56
  _TYPE_BULK_ACTION = Union[bytes, str, Dict[str, Any]]
48
57
  _TYPE_BULK_ACTION_HEADER = Dict[str, Any]
49
58
  _TYPE_BULK_ACTION_BODY = Union[None, bytes, Dict[str, Any]]
@@ -51,6 +60,13 @@ _TYPE_BULK_ACTION_HEADER_AND_BODY = Tuple[
51
60
  _TYPE_BULK_ACTION_HEADER, _TYPE_BULK_ACTION_BODY
52
61
  ]
53
62
 
63
+ _TYPE_BULK_ACTION_WITH_META = Union[bytes, str, Dict[str, Any], BulkMeta]
64
+ _TYPE_BULK_ACTION_HEADER_WITH_META = Union[Dict[str, Any], BulkMeta]
65
+ _TYPE_BULK_ACTION_HEADER_WITH_META_AND_BODY = Union[
66
+ Tuple[_TYPE_BULK_ACTION_HEADER, _TYPE_BULK_ACTION_BODY],
67
+ Tuple[BulkMeta, Any],
68
+ ]
69
+
54
70
 
55
71
  def expand_action(data: _TYPE_BULK_ACTION) -> _TYPE_BULK_ACTION_HEADER_AND_BODY:
56
72
  """
@@ -139,7 +155,9 @@ class _ActionChunker:
139
155
  ] = []
140
156
 
141
157
  def feed(
142
- self, action: _TYPE_BULK_ACTION_HEADER, data: _TYPE_BULK_ACTION_BODY
158
+ self,
159
+ action: _TYPE_BULK_ACTION_HEADER_WITH_META,
160
+ data: _TYPE_BULK_ACTION_BODY,
143
161
  ) -> Optional[
144
162
  Tuple[
145
163
  List[
@@ -152,23 +170,25 @@ class _ActionChunker:
152
170
  ]
153
171
  ]:
154
172
  ret = None
155
- raw_action = action
156
- raw_data = data
157
- action_bytes = to_bytes(self.serializer.dumps(action), "utf-8")
158
- # +1 to account for the trailing new line character
159
- cur_size = len(action_bytes) + 1
160
-
161
- data_bytes: Optional[bytes]
162
- if data is not None:
163
- data_bytes = to_bytes(self.serializer.dumps(data), "utf-8")
164
- cur_size += len(data_bytes) + 1
165
- else:
166
- data_bytes = None
173
+ action_bytes = b""
174
+ data_bytes: Optional[bytes] = None
175
+ cur_size = 0
176
+ if not isinstance(action, BulkMeta):
177
+ action_bytes = to_bytes(self.serializer.dumps(action), "utf-8")
178
+ # +1 to account for the trailing new line character
179
+ cur_size = len(action_bytes) + 1
180
+
181
+ if data is not None:
182
+ data_bytes = to_bytes(self.serializer.dumps(data), "utf-8")
183
+ cur_size += len(data_bytes) + 1
184
+ else:
185
+ data_bytes = None
167
186
 
168
187
  # full chunk, send it and start a new one
169
188
  if self.bulk_actions and (
170
189
  self.size + cur_size > self.max_chunk_bytes
171
190
  or self.action_count == self.chunk_size
191
+ or (action == BulkMeta.flush and self.bulk_actions)
172
192
  ):
173
193
  ret = (self.bulk_data, self.bulk_actions)
174
194
  self.bulk_actions = []
@@ -176,15 +196,16 @@ class _ActionChunker:
176
196
  self.size = 0
177
197
  self.action_count = 0
178
198
 
179
- self.bulk_actions.append(action_bytes)
180
- if data_bytes is not None:
181
- self.bulk_actions.append(data_bytes)
182
- self.bulk_data.append((raw_action, raw_data))
183
- else:
184
- self.bulk_data.append((raw_action,))
199
+ if not isinstance(action, BulkMeta):
200
+ self.bulk_actions.append(action_bytes)
201
+ if data_bytes is not None:
202
+ self.bulk_actions.append(data_bytes)
203
+ self.bulk_data.append((action, data))
204
+ else:
205
+ self.bulk_data.append((action,))
185
206
 
186
- self.size += cur_size
187
- self.action_count += 1
207
+ self.size += cur_size
208
+ self.action_count += 1
188
209
  return ret
189
210
 
190
211
  def flush(
@@ -209,9 +230,10 @@ class _ActionChunker:
209
230
 
210
231
 
211
232
  def _chunk_actions(
212
- actions: Iterable[_TYPE_BULK_ACTION_HEADER_AND_BODY],
233
+ actions: Iterable[_TYPE_BULK_ACTION_HEADER_WITH_META_AND_BODY],
213
234
  chunk_size: int,
214
235
  max_chunk_bytes: int,
236
+ flush_after_seconds: Optional[float],
215
237
  serializer: Serializer,
216
238
  ) -> Iterable[
217
239
  Tuple[
@@ -231,10 +253,41 @@ def _chunk_actions(
231
253
  chunker = _ActionChunker(
232
254
  chunk_size=chunk_size, max_chunk_bytes=max_chunk_bytes, serializer=serializer
233
255
  )
234
- for action, data in actions:
235
- ret = chunker.feed(action, data)
236
- if ret:
237
- yield ret
256
+
257
+ if not flush_after_seconds:
258
+ for action, data in actions:
259
+ ret = chunker.feed(action, data)
260
+ if ret:
261
+ yield ret
262
+ else:
263
+ item_queue: queue.Queue[_TYPE_BULK_ACTION_HEADER_WITH_META_AND_BODY] = (
264
+ queue.Queue(maxsize=1)
265
+ )
266
+
267
+ def get_items() -> None:
268
+ try:
269
+ for item in actions:
270
+ item_queue.put(item)
271
+ finally:
272
+ # make sure we signal the end even if there is an exception
273
+ item_queue.put((BulkMeta.done, None))
274
+
275
+ with safe_thread(get_items):
276
+ timeout: Optional[float] = flush_after_seconds
277
+ while True:
278
+ try:
279
+ action, data = item_queue.get(timeout=timeout)
280
+ timeout = flush_after_seconds
281
+ except queue.Empty:
282
+ action, data = BulkMeta.flush, None
283
+ timeout = None
284
+
285
+ if action is BulkMeta.done:
286
+ break
287
+ ret = chunker.feed(action, data)
288
+ if ret:
289
+ yield ret
290
+
238
291
  ret = chunker.flush()
239
292
  if ret:
240
293
  yield ret
@@ -361,9 +414,10 @@ def _process_bulk_chunk(
361
414
 
362
415
  def streaming_bulk(
363
416
  client: Elasticsearch,
364
- actions: Iterable[_TYPE_BULK_ACTION],
417
+ actions: Iterable[_TYPE_BULK_ACTION_WITH_META],
365
418
  chunk_size: int = 500,
366
419
  max_chunk_bytes: int = 100 * 1024 * 1024,
420
+ flush_after_seconds: Optional[float] = None,
367
421
  raise_on_error: bool = True,
368
422
  expand_action_callback: Callable[
369
423
  [_TYPE_BULK_ACTION], _TYPE_BULK_ACTION_HEADER_AND_BODY
@@ -397,6 +451,9 @@ def streaming_bulk(
397
451
  :arg actions: iterable containing the actions to be executed
398
452
  :arg chunk_size: number of docs in one chunk sent to es (default: 500)
399
453
  :arg max_chunk_bytes: the maximum size of the request in bytes (default: 100MB)
454
+ :arg flush_after_seconds: time in seconds after which a chunk is written even
455
+ if hasn't reached `chunk_size` or `max_chunk_bytes`. Set to 0 to not use a
456
+ timeout-based flush. (default: 0)
400
457
  :arg raise_on_error: raise ``BulkIndexError`` containing errors (as `.errors`)
401
458
  from the execution of the last chunk when some occur. By default we raise.
402
459
  :arg raise_on_exception: if ``False`` then don't propagate exceptions from
@@ -425,6 +482,13 @@ def streaming_bulk(
425
482
 
426
483
  serializer = client.transport.serializers.get_serializer("application/json")
427
484
 
485
+ def expand_action_with_meta(
486
+ data: _TYPE_BULK_ACTION_WITH_META,
487
+ ) -> _TYPE_BULK_ACTION_HEADER_WITH_META_AND_BODY:
488
+ if isinstance(data, BulkMeta):
489
+ return data, None
490
+ return expand_action_callback(data)
491
+
428
492
  bulk_data: List[
429
493
  Union[
430
494
  Tuple[_TYPE_BULK_ACTION_HEADER],
@@ -433,9 +497,10 @@ def streaming_bulk(
433
497
  ]
434
498
  bulk_actions: List[bytes]
435
499
  for bulk_data, bulk_actions in _chunk_actions(
436
- map(expand_action_callback, actions),
500
+ map(expand_action_with_meta, actions),
437
501
  chunk_size,
438
502
  max_chunk_bytes,
503
+ flush_after_seconds,
439
504
  serializer,
440
505
  ):
441
506
  for attempt in range(max_retries + 1):
@@ -557,6 +622,7 @@ def parallel_bulk(
557
622
  thread_count: int = 4,
558
623
  chunk_size: int = 500,
559
624
  max_chunk_bytes: int = 100 * 1024 * 1024,
625
+ flush_after_seconds: Optional[float] = None,
560
626
  queue_size: int = 4,
561
627
  expand_action_callback: Callable[
562
628
  [_TYPE_BULK_ACTION], _TYPE_BULK_ACTION_HEADER_AND_BODY
@@ -573,6 +639,9 @@ def parallel_bulk(
573
639
  :arg thread_count: size of the threadpool to use for the bulk requests
574
640
  :arg chunk_size: number of docs in one chunk sent to es (default: 500)
575
641
  :arg max_chunk_bytes: the maximum size of the request in bytes (default: 100MB)
642
+ :arg flush_after_seconds: time in seconds after which a chunk is written even
643
+ if hasn't reached `chunk_size` or `max_chunk_bytes`. Set to 0 to not use a
644
+ timeout-based flush. (default: 0)
576
645
  :arg raise_on_error: raise ``BulkIndexError`` containing errors (as `.errors`)
577
646
  from the execution of the last chunk when some occur. By default we raise.
578
647
  :arg raise_on_exception: if ``False`` then don't propagate exceptions from
@@ -596,7 +665,7 @@ def parallel_bulk(
596
665
  super()._setup_queues() # type: ignore[misc]
597
666
  # The queue must be at least the size of the number of threads to
598
667
  # prevent hanging when inserting sentinel values during teardown.
599
- self._inqueue: Queue[
668
+ self._inqueue: queue.Queue[
600
669
  Tuple[
601
670
  List[
602
671
  Union[
@@ -605,7 +674,7 @@ def parallel_bulk(
605
674
  ],
606
675
  List[bytes],
607
676
  ]
608
- ] = Queue(max(queue_size, thread_count))
677
+ ] = queue.Queue(max(queue_size, thread_count))
609
678
  self._quick_put = self._inqueue.put
610
679
 
611
680
  with client._otel.helpers_span("helpers.parallel_bulk") as otel_span:
@@ -625,7 +694,11 @@ def parallel_bulk(
625
694
  )
626
695
  ),
627
696
  _chunk_actions(
628
- expanded_actions, chunk_size, max_chunk_bytes, serializer
697
+ expanded_actions,
698
+ chunk_size,
699
+ max_chunk_bytes,
700
+ flush_after_seconds,
701
+ serializer,
629
702
  ),
630
703
  ):
631
704
  yield from result
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: elasticsearch
3
- Version: 9.1.1
3
+ Version: 9.2.0
4
4
  Summary: Python client for Elasticsearch
5
5
  Project-URL: Documentation, https://elasticsearch-py.readthedocs.io/
6
6
  Project-URL: Homepage, https://github.com/elastic/elasticsearch-py
@@ -18,16 +18,17 @@ Classifier: License :: OSI Approved :: Apache Software License
18
18
  Classifier: Operating System :: OS Independent
19
19
  Classifier: Programming Language :: Python
20
20
  Classifier: Programming Language :: Python :: 3
21
- Classifier: Programming Language :: Python :: 3.9
22
21
  Classifier: Programming Language :: Python :: 3.10
23
22
  Classifier: Programming Language :: Python :: 3.11
24
23
  Classifier: Programming Language :: Python :: 3.12
25
24
  Classifier: Programming Language :: Python :: 3.13
26
25
  Classifier: Programming Language :: Python :: Implementation :: CPython
27
26
  Classifier: Programming Language :: Python :: Implementation :: PyPy
28
- Requires-Python: >=3.9
29
- Requires-Dist: elastic-transport<10,>=9.1.0
27
+ Requires-Python: >=3.10
28
+ Requires-Dist: anyio
29
+ Requires-Dist: elastic-transport<10,>=9.2.0
30
30
  Requires-Dist: python-dateutil
31
+ Requires-Dist: sniffio
31
32
  Requires-Dist: typing-extensions
32
33
  Provides-Extra: async
33
34
  Requires-Dist: aiohttp<4,>=3; extra == 'async'
@@ -36,6 +37,7 @@ Requires-Dist: aiohttp; extra == 'dev'
36
37
  Requires-Dist: black; extra == 'dev'
37
38
  Requires-Dist: build; extra == 'dev'
38
39
  Requires-Dist: coverage; extra == 'dev'
40
+ Requires-Dist: httpx; extra == 'dev'
39
41
  Requires-Dist: isort; extra == 'dev'
40
42
  Requires-Dist: jinja2; extra == 'dev'
41
43
  Requires-Dist: mapbox-vector-tile; extra == 'dev'
@@ -44,7 +46,8 @@ Requires-Dist: nox; extra == 'dev'
44
46
  Requires-Dist: numpy; extra == 'dev'
45
47
  Requires-Dist: orjson; extra == 'dev'
46
48
  Requires-Dist: pandas; extra == 'dev'
47
- Requires-Dist: pyarrow; extra == 'dev'
49
+ Requires-Dist: pyarrow; (python_version < '3.14') and extra == 'dev'
50
+ Requires-Dist: pydantic; extra == 'dev'
48
51
  Requires-Dist: pyright; extra == 'dev'
49
52
  Requires-Dist: pytest; extra == 'dev'
50
53
  Requires-Dist: pytest-asyncio; extra == 'dev'
@@ -55,6 +58,7 @@ Requires-Dist: pyyaml>=5.4; extra == 'dev'
55
58
  Requires-Dist: requests<3,>=2; extra == 'dev'
56
59
  Requires-Dist: simsimd; extra == 'dev'
57
60
  Requires-Dist: tqdm; extra == 'dev'
61
+ Requires-Dist: trio; extra == 'dev'
58
62
  Requires-Dist: twine; extra == 'dev'
59
63
  Requires-Dist: types-python-dateutil; extra == 'dev'
60
64
  Requires-Dist: types-tqdm; extra == 'dev'
@@ -166,6 +170,24 @@ Documentation for the client is [available on elastic.co] and [Read the Docs].
166
170
  [Read the Docs]: https://elasticsearch-py.readthedocs.io
167
171
 
168
172
 
173
+ ## Try Elasticsearch and Kibana locally
174
+
175
+ If you want to try Elasticsearch and Kibana locally, you can run the following command:
176
+
177
+ ```bash
178
+ curl -fsSL https://elastic.co/start-local | sh
179
+ ```
180
+
181
+ This will run Elasticsearch at [http://localhost:9200](http://localhost:9200) and Kibana at [http://localhost:5601](http://localhost:5601).
182
+
183
+ More information is available [here](https://www.elastic.co/guide/en/elasticsearch/reference/current/run-elasticsearch-locally.html).
184
+
185
+
186
+ ## Contributing
187
+
188
+ See [CONTRIBUTING.md](./CONTRIBUTING.md)
189
+
190
+
169
191
  ## License
170
192
 
171
193
  This software is licensed under the [Apache License 2.0](./LICENSE). See [NOTICE](./NOTICE).