elasticsearch 9.1.0__py3-none-any.whl → 9.1.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (54) hide show
  1. elasticsearch/_async/client/__init__.py +21 -6
  2. elasticsearch/_async/client/cat.py +1091 -51
  3. elasticsearch/_async/client/cluster.py +7 -2
  4. elasticsearch/_async/client/connector.py +3 -3
  5. elasticsearch/_async/client/esql.py +20 -6
  6. elasticsearch/_async/client/indices.py +27 -13
  7. elasticsearch/_async/client/inference.py +16 -5
  8. elasticsearch/_async/client/logstash.py +3 -1
  9. elasticsearch/_async/client/nodes.py +2 -2
  10. elasticsearch/_async/client/shutdown.py +5 -15
  11. elasticsearch/_async/client/sql.py +1 -1
  12. elasticsearch/_async/client/streams.py +186 -0
  13. elasticsearch/_async/client/transform.py +60 -0
  14. elasticsearch/_async/client/watcher.py +1 -5
  15. elasticsearch/_async/helpers.py +58 -9
  16. elasticsearch/_sync/client/__init__.py +21 -6
  17. elasticsearch/_sync/client/cat.py +1091 -51
  18. elasticsearch/_sync/client/cluster.py +7 -2
  19. elasticsearch/_sync/client/connector.py +3 -3
  20. elasticsearch/_sync/client/esql.py +20 -6
  21. elasticsearch/_sync/client/indices.py +27 -13
  22. elasticsearch/_sync/client/inference.py +16 -5
  23. elasticsearch/_sync/client/logstash.py +3 -1
  24. elasticsearch/_sync/client/nodes.py +2 -2
  25. elasticsearch/_sync/client/shutdown.py +5 -15
  26. elasticsearch/_sync/client/sql.py +1 -1
  27. elasticsearch/_sync/client/streams.py +186 -0
  28. elasticsearch/_sync/client/transform.py +60 -0
  29. elasticsearch/_sync/client/watcher.py +1 -5
  30. elasticsearch/_version.py +2 -1
  31. elasticsearch/client.py +2 -0
  32. elasticsearch/compat.py +43 -1
  33. elasticsearch/dsl/__init__.py +28 -0
  34. elasticsearch/dsl/_async/document.py +84 -0
  35. elasticsearch/dsl/_sync/document.py +84 -0
  36. elasticsearch/dsl/aggs.py +97 -0
  37. elasticsearch/dsl/document_base.py +57 -0
  38. elasticsearch/dsl/field.py +43 -11
  39. elasticsearch/dsl/query.py +5 -1
  40. elasticsearch/dsl/response/__init__.py +3 -0
  41. elasticsearch/dsl/response/aggs.py +1 -1
  42. elasticsearch/dsl/types.py +273 -24
  43. elasticsearch/dsl/utils.py +1 -1
  44. elasticsearch/esql/__init__.py +2 -1
  45. elasticsearch/esql/esql.py +85 -34
  46. elasticsearch/esql/functions.py +37 -25
  47. elasticsearch/helpers/__init__.py +10 -1
  48. elasticsearch/helpers/actions.py +106 -33
  49. {elasticsearch-9.1.0.dist-info → elasticsearch-9.1.2.dist-info}/METADATA +2 -4
  50. {elasticsearch-9.1.0.dist-info → elasticsearch-9.1.2.dist-info}/RECORD +53 -52
  51. elasticsearch/esql/esql1.py1 +0 -307
  52. {elasticsearch-9.1.0.dist-info → elasticsearch-9.1.2.dist-info}/WHEEL +0 -0
  53. {elasticsearch-9.1.0.dist-info → elasticsearch-9.1.2.dist-info}/licenses/LICENSE +0 -0
  54. {elasticsearch-9.1.0.dist-info → elasticsearch-9.1.2.dist-info}/licenses/NOTICE +0 -0
@@ -16,6 +16,7 @@
16
16
  # under the License.
17
17
 
18
18
  import json
19
+ import re
19
20
  from abc import ABC, abstractmethod
20
21
  from typing import Any, Dict, Optional, Tuple, Type, Union
21
22
 
@@ -111,6 +112,29 @@ class ESQLBase(ABC):
111
112
  def _render_internal(self) -> str:
112
113
  pass
113
114
 
115
+ @staticmethod
116
+ def _format_index(index: IndexType) -> str:
117
+ return index._index._name if hasattr(index, "_index") else str(index)
118
+
119
+ @staticmethod
120
+ def _format_id(id: FieldType, allow_patterns: bool = False) -> str:
121
+ s = str(id) # in case it is an InstrumentedField
122
+ if allow_patterns and "*" in s:
123
+ return s # patterns cannot be escaped
124
+ if re.fullmatch(r"[a-zA-Z_@][a-zA-Z0-9_\.]*", s):
125
+ return s
126
+ # this identifier needs to be escaped
127
+ s.replace("`", "``")
128
+ return f"`{s}`"
129
+
130
+ @staticmethod
131
+ def _format_expr(expr: ExpressionType) -> str:
132
+ return (
133
+ json.dumps(expr)
134
+ if not isinstance(expr, (str, InstrumentedExpression))
135
+ else str(expr)
136
+ )
137
+
114
138
  def _is_forked(self) -> bool:
115
139
  if self.__class__.__name__ == "Fork":
116
140
  return True
@@ -427,7 +451,7 @@ class ESQLBase(ABC):
427
451
  """
428
452
  return Sample(self, probability)
429
453
 
430
- def sort(self, *columns: FieldType) -> "Sort":
454
+ def sort(self, *columns: ExpressionType) -> "Sort":
431
455
  """The ``SORT`` processing command sorts a table on one or more columns.
432
456
 
433
457
  :param columns: The columns to sort on.
@@ -570,15 +594,12 @@ class From(ESQLBase):
570
594
  return self
571
595
 
572
596
  def _render_internal(self) -> str:
573
- indices = [
574
- index if isinstance(index, str) else index._index._name
575
- for index in self._indices
576
- ]
597
+ indices = [self._format_index(index) for index in self._indices]
577
598
  s = f'{self.__class__.__name__.upper()} {", ".join(indices)}'
578
599
  if self._metadata_fields:
579
600
  s = (
580
601
  s
581
- + f' METADATA {", ".join([str(field) for field in self._metadata_fields])}'
602
+ + f' METADATA {", ".join([self._format_id(field) for field in self._metadata_fields])}'
582
603
  )
583
604
  return s
584
605
 
@@ -594,7 +615,11 @@ class Row(ESQLBase):
594
615
  def __init__(self, **params: ExpressionType):
595
616
  super().__init__()
596
617
  self._params = {
597
- k: json.dumps(v) if not isinstance(v, InstrumentedExpression) else v
618
+ self._format_id(k): (
619
+ json.dumps(v)
620
+ if not isinstance(v, InstrumentedExpression)
621
+ else self._format_expr(v)
622
+ )
598
623
  for k, v in params.items()
599
624
  }
600
625
 
@@ -615,7 +640,7 @@ class Show(ESQLBase):
615
640
  self._item = item
616
641
 
617
642
  def _render_internal(self) -> str:
618
- return f"SHOW {self._item}"
643
+ return f"SHOW {self._format_id(self._item)}"
619
644
 
620
645
 
621
646
  class Branch(ESQLBase):
@@ -667,11 +692,11 @@ class ChangePoint(ESQLBase):
667
692
  return self
668
693
 
669
694
  def _render_internal(self) -> str:
670
- key = "" if not self._key else f" ON {self._key}"
695
+ key = "" if not self._key else f" ON {self._format_id(self._key)}"
671
696
  names = (
672
697
  ""
673
698
  if not self._type_name and not self._pvalue_name
674
- else f' AS {self._type_name or "type"}, {self._pvalue_name or "pvalue"}'
699
+ else f' AS {self._format_id(self._type_name or "type")}, {self._format_id(self._pvalue_name or "pvalue")}'
675
700
  )
676
701
  return f"CHANGE_POINT {self._value}{key}{names}"
677
702
 
@@ -709,12 +734,13 @@ class Completion(ESQLBase):
709
734
  def _render_internal(self) -> str:
710
735
  if self._inference_id is None:
711
736
  raise ValueError("The completion command requires an inference ID")
737
+ with_ = {"inference_id": self._inference_id}
712
738
  if self._named_prompt:
713
739
  column = list(self._named_prompt.keys())[0]
714
740
  prompt = list(self._named_prompt.values())[0]
715
- return f"COMPLETION {column} = {prompt} WITH {self._inference_id}"
741
+ return f"COMPLETION {self._format_id(column)} = {self._format_id(prompt)} WITH {json.dumps(with_)}"
716
742
  else:
717
- return f"COMPLETION {self._prompt[0]} WITH {self._inference_id}"
743
+ return f"COMPLETION {self._format_id(self._prompt[0])} WITH {json.dumps(with_)}"
718
744
 
719
745
 
720
746
  class Dissect(ESQLBase):
@@ -742,9 +768,13 @@ class Dissect(ESQLBase):
742
768
 
743
769
  def _render_internal(self) -> str:
744
770
  sep = (
745
- "" if self._separator is None else f' APPEND_SEPARATOR="{self._separator}"'
771
+ ""
772
+ if self._separator is None
773
+ else f" APPEND_SEPARATOR={json.dumps(self._separator)}"
774
+ )
775
+ return (
776
+ f"DISSECT {self._format_id(self._input)} {json.dumps(self._pattern)}{sep}"
746
777
  )
747
- return f"DISSECT {self._input} {json.dumps(self._pattern)}{sep}"
748
778
 
749
779
 
750
780
  class Drop(ESQLBase):
@@ -760,7 +790,7 @@ class Drop(ESQLBase):
760
790
  self._columns = columns
761
791
 
762
792
  def _render_internal(self) -> str:
763
- return f'DROP {", ".join([str(col) for col in self._columns])}'
793
+ return f'DROP {", ".join([self._format_id(col, allow_patterns=True) for col in self._columns])}'
764
794
 
765
795
 
766
796
  class Enrich(ESQLBase):
@@ -814,12 +844,18 @@ class Enrich(ESQLBase):
814
844
  return self
815
845
 
816
846
  def _render_internal(self) -> str:
817
- on = "" if self._match_field is None else f" ON {self._match_field}"
847
+ on = (
848
+ ""
849
+ if self._match_field is None
850
+ else f" ON {self._format_id(self._match_field)}"
851
+ )
818
852
  with_ = ""
819
853
  if self._named_fields:
820
- with_ = f' WITH {", ".join([f"{name} = {field}" for name, field in self._named_fields.items()])}'
854
+ with_ = f' WITH {", ".join([f"{self._format_id(name)} = {self._format_id(field)}" for name, field in self._named_fields.items()])}'
821
855
  elif self._fields is not None:
822
- with_ = f' WITH {", ".join([str(field) for field in self._fields])}'
856
+ with_ = (
857
+ f' WITH {", ".join([self._format_id(field) for field in self._fields])}'
858
+ )
823
859
  return f"ENRICH {self._policy}{on}{with_}"
824
860
 
825
861
 
@@ -832,7 +868,10 @@ class Eval(ESQLBase):
832
868
  """
833
869
 
834
870
  def __init__(
835
- self, parent: ESQLBase, *columns: FieldType, **named_columns: FieldType
871
+ self,
872
+ parent: ESQLBase,
873
+ *columns: ExpressionType,
874
+ **named_columns: ExpressionType,
836
875
  ):
837
876
  if columns and named_columns:
838
877
  raise ValueError(
@@ -844,10 +883,13 @@ class Eval(ESQLBase):
844
883
  def _render_internal(self) -> str:
845
884
  if isinstance(self._columns, dict):
846
885
  cols = ", ".join(
847
- [f"{name} = {value}" for name, value in self._columns.items()]
886
+ [
887
+ f"{self._format_id(name)} = {self._format_expr(value)}"
888
+ for name, value in self._columns.items()
889
+ ]
848
890
  )
849
891
  else:
850
- cols = ", ".join([f"{col}" for col in self._columns])
892
+ cols = ", ".join([f"{self._format_expr(col)}" for col in self._columns])
851
893
  return f"EVAL {cols}"
852
894
 
853
895
 
@@ -900,7 +942,7 @@ class Grok(ESQLBase):
900
942
  self._pattern = pattern
901
943
 
902
944
  def _render_internal(self) -> str:
903
- return f"GROK {self._input} {json.dumps(self._pattern)}"
945
+ return f"GROK {self._format_id(self._input)} {json.dumps(self._pattern)}"
904
946
 
905
947
 
906
948
  class Keep(ESQLBase):
@@ -916,7 +958,7 @@ class Keep(ESQLBase):
916
958
  self._columns = columns
917
959
 
918
960
  def _render_internal(self) -> str:
919
- return f'KEEP {", ".join([f"{col}" for col in self._columns])}'
961
+ return f'KEEP {", ".join([f"{self._format_id(col, allow_patterns=True)}" for col in self._columns])}'
920
962
 
921
963
 
922
964
  class Limit(ESQLBase):
@@ -932,7 +974,7 @@ class Limit(ESQLBase):
932
974
  self._max_number_of_rows = max_number_of_rows
933
975
 
934
976
  def _render_internal(self) -> str:
935
- return f"LIMIT {self._max_number_of_rows}"
977
+ return f"LIMIT {json.dumps(self._max_number_of_rows)}"
936
978
 
937
979
 
938
980
  class LookupJoin(ESQLBase):
@@ -967,7 +1009,9 @@ class LookupJoin(ESQLBase):
967
1009
  if isinstance(self._lookup_index, str)
968
1010
  else self._lookup_index._index._name
969
1011
  )
970
- return f"LOOKUP JOIN {index} ON {self._field}"
1012
+ return (
1013
+ f"LOOKUP JOIN {self._format_index(index)} ON {self._format_id(self._field)}"
1014
+ )
971
1015
 
972
1016
 
973
1017
  class MvExpand(ESQLBase):
@@ -983,7 +1027,7 @@ class MvExpand(ESQLBase):
983
1027
  self._column = column
984
1028
 
985
1029
  def _render_internal(self) -> str:
986
- return f"MV_EXPAND {self._column}"
1030
+ return f"MV_EXPAND {self._format_id(self._column)}"
987
1031
 
988
1032
 
989
1033
  class Rename(ESQLBase):
@@ -999,7 +1043,7 @@ class Rename(ESQLBase):
999
1043
  self._columns = columns
1000
1044
 
1001
1045
  def _render_internal(self) -> str:
1002
- return f'RENAME {", ".join([f"{old_name} AS {new_name}" for old_name, new_name in self._columns.items()])}'
1046
+ return f'RENAME {", ".join([f"{self._format_id(old_name)} AS {self._format_id(new_name)}" for old_name, new_name in self._columns.items()])}'
1003
1047
 
1004
1048
 
1005
1049
  class Sample(ESQLBase):
@@ -1015,7 +1059,7 @@ class Sample(ESQLBase):
1015
1059
  self._probability = probability
1016
1060
 
1017
1061
  def _render_internal(self) -> str:
1018
- return f"SAMPLE {self._probability}"
1062
+ return f"SAMPLE {json.dumps(self._probability)}"
1019
1063
 
1020
1064
 
1021
1065
  class Sort(ESQLBase):
@@ -1026,12 +1070,16 @@ class Sort(ESQLBase):
1026
1070
  in a single expression.
1027
1071
  """
1028
1072
 
1029
- def __init__(self, parent: ESQLBase, *columns: FieldType):
1073
+ def __init__(self, parent: ESQLBase, *columns: ExpressionType):
1030
1074
  super().__init__(parent)
1031
1075
  self._columns = columns
1032
1076
 
1033
1077
  def _render_internal(self) -> str:
1034
- return f'SORT {", ".join([f"{col}" for col in self._columns])}'
1078
+ sorts = [
1079
+ " ".join([self._format_id(term) for term in str(col).split(" ")])
1080
+ for col in self._columns
1081
+ ]
1082
+ return f'SORT {", ".join([f"{sort}" for sort in sorts])}'
1035
1083
 
1036
1084
 
1037
1085
  class Stats(ESQLBase):
@@ -1062,14 +1110,17 @@ class Stats(ESQLBase):
1062
1110
 
1063
1111
  def _render_internal(self) -> str:
1064
1112
  if isinstance(self._expressions, dict):
1065
- exprs = [f"{key} = {value}" for key, value in self._expressions.items()]
1113
+ exprs = [
1114
+ f"{self._format_id(key)} = {self._format_expr(value)}"
1115
+ for key, value in self._expressions.items()
1116
+ ]
1066
1117
  else:
1067
- exprs = [f"{expr}" for expr in self._expressions]
1118
+ exprs = [f"{self._format_expr(expr)}" for expr in self._expressions]
1068
1119
  expression_separator = ",\n "
1069
1120
  by = (
1070
1121
  ""
1071
1122
  if self._grouping_expressions is None
1072
- else f'\n BY {", ".join([f"{expr}" for expr in self._grouping_expressions])}'
1123
+ else f'\n BY {", ".join([f"{self._format_expr(expr)}" for expr in self._grouping_expressions])}'
1073
1124
  )
1074
1125
  return f'STATS {expression_separator.join([f"{expr}" for expr in exprs])}{by}'
1075
1126
 
@@ -1087,7 +1138,7 @@ class Where(ESQLBase):
1087
1138
  self._expressions = expressions
1088
1139
 
1089
1140
  def _render_internal(self) -> str:
1090
- return f'WHERE {" AND ".join([f"{expr}" for expr in self._expressions])}'
1141
+ return f'WHERE {" AND ".join([f"{self._format_expr(expr)}" for expr in self._expressions])}'
1091
1142
 
1092
1143
 
1093
1144
  def and_(*expressions: InstrumentedExpression) -> "InstrumentedExpression":
@@ -19,11 +19,15 @@ import json
19
19
  from typing import Any
20
20
 
21
21
  from elasticsearch.dsl.document_base import InstrumentedExpression
22
- from elasticsearch.esql.esql import ExpressionType
22
+ from elasticsearch.esql.esql import ESQLBase, ExpressionType
23
23
 
24
24
 
25
25
  def _render(v: Any) -> str:
26
- return json.dumps(v) if not isinstance(v, InstrumentedExpression) else str(v)
26
+ return (
27
+ json.dumps(v)
28
+ if not isinstance(v, InstrumentedExpression)
29
+ else ESQLBase._format_expr(v)
30
+ )
27
31
 
28
32
 
29
33
  def abs(number: ExpressionType) -> InstrumentedExpression:
@@ -69,7 +73,9 @@ def atan2(
69
73
  :param y_coordinate: y coordinate. If `null`, the function returns `null`.
70
74
  :param x_coordinate: x coordinate. If `null`, the function returns `null`.
71
75
  """
72
- return InstrumentedExpression(f"ATAN2({y_coordinate}, {x_coordinate})")
76
+ return InstrumentedExpression(
77
+ f"ATAN2({_render(y_coordinate)}, {_render(x_coordinate)})"
78
+ )
73
79
 
74
80
 
75
81
  def avg(number: ExpressionType) -> InstrumentedExpression:
@@ -114,7 +120,7 @@ def bucket(
114
120
  :param to: End of the range. Can be a number, a date or a date expressed as a string.
115
121
  """
116
122
  return InstrumentedExpression(
117
- f"BUCKET({_render(field)}, {_render(buckets)}, {from_}, {_render(to)})"
123
+ f"BUCKET({_render(field)}, {_render(buckets)}, {_render(from_)}, {_render(to)})"
118
124
  )
119
125
 
120
126
 
@@ -169,7 +175,7 @@ def cidr_match(ip: ExpressionType, block_x: ExpressionType) -> InstrumentedExpre
169
175
  :param ip: IP address of type `ip` (both IPv4 and IPv6 are supported).
170
176
  :param block_x: CIDR block to test the IP against.
171
177
  """
172
- return InstrumentedExpression(f"CIDR_MATCH({_render(ip)}, {block_x})")
178
+ return InstrumentedExpression(f"CIDR_MATCH({_render(ip)}, {_render(block_x)})")
173
179
 
174
180
 
175
181
  def coalesce(first: ExpressionType, rest: ExpressionType) -> InstrumentedExpression:
@@ -264,7 +270,7 @@ def date_diff(
264
270
  :param end_timestamp: A string representing an end timestamp
265
271
  """
266
272
  return InstrumentedExpression(
267
- f"DATE_DIFF({_render(unit)}, {start_timestamp}, {end_timestamp})"
273
+ f"DATE_DIFF({_render(unit)}, {_render(start_timestamp)}, {_render(end_timestamp)})"
268
274
  )
269
275
 
270
276
 
@@ -285,7 +291,9 @@ def date_extract(
285
291
  the function returns `null`.
286
292
  :param date: Date expression. If `null`, the function returns `null`.
287
293
  """
288
- return InstrumentedExpression(f"DATE_EXTRACT({date_part}, {_render(date)})")
294
+ return InstrumentedExpression(
295
+ f"DATE_EXTRACT({_render(date_part)}, {_render(date)})"
296
+ )
289
297
 
290
298
 
291
299
  def date_format(
@@ -301,7 +309,7 @@ def date_format(
301
309
  """
302
310
  if date_format is not None:
303
311
  return InstrumentedExpression(
304
- f"DATE_FORMAT({json.dumps(date_format)}, {_render(date)})"
312
+ f"DATE_FORMAT({_render(date_format)}, {_render(date)})"
305
313
  )
306
314
  else:
307
315
  return InstrumentedExpression(f"DATE_FORMAT({_render(date)})")
@@ -317,7 +325,9 @@ def date_parse(
317
325
  :param date_string: Date expression as a string. If `null` or an empty
318
326
  string, the function returns `null`.
319
327
  """
320
- return InstrumentedExpression(f"DATE_PARSE({date_pattern}, {date_string})")
328
+ return InstrumentedExpression(
329
+ f"DATE_PARSE({_render(date_pattern)}, {_render(date_string)})"
330
+ )
321
331
 
322
332
 
323
333
  def date_trunc(
@@ -639,7 +649,7 @@ def min_over_time(field: ExpressionType) -> InstrumentedExpression:
639
649
 
640
650
 
641
651
  def multi_match(
642
- query: ExpressionType, fields: ExpressionType, options: ExpressionType = None
652
+ query: ExpressionType, *fields: ExpressionType, options: ExpressionType = None
643
653
  ) -> InstrumentedExpression:
644
654
  """Use `MULTI_MATCH` to perform a multi-match query on the specified field.
645
655
  The multi_match query builds on the match query to allow multi-field queries.
@@ -651,11 +661,11 @@ def multi_match(
651
661
  """
652
662
  if options is not None:
653
663
  return InstrumentedExpression(
654
- f"MULTI_MATCH({_render(query)}, {_render(fields)}, {_render(options)})"
664
+ f'MULTI_MATCH({_render(query)}, {", ".join([_render(c) for c in fields])}, {_render(options)})'
655
665
  )
656
666
  else:
657
667
  return InstrumentedExpression(
658
- f"MULTI_MATCH({_render(query)}, {_render(fields)})"
668
+ f'MULTI_MATCH({_render(query)}, {", ".join([_render(c) for c in fields])})'
659
669
  )
660
670
 
661
671
 
@@ -929,7 +939,7 @@ def replace(
929
939
  :param new_string: Replacement string.
930
940
  """
931
941
  return InstrumentedExpression(
932
- f"REPLACE({_render(string)}, {_render(regex)}, {new_string})"
942
+ f"REPLACE({_render(string)}, {_render(regex)}, {_render(new_string)})"
933
943
  )
934
944
 
935
945
 
@@ -1004,7 +1014,7 @@ def scalb(d: ExpressionType, scale_factor: ExpressionType) -> InstrumentedExpres
1004
1014
  :param scale_factor: Numeric expression for the scale factor. If `null`, the
1005
1015
  function returns `null`.
1006
1016
  """
1007
- return InstrumentedExpression(f"SCALB({_render(d)}, {scale_factor})")
1017
+ return InstrumentedExpression(f"SCALB({_render(d)}, {_render(scale_factor)})")
1008
1018
 
1009
1019
 
1010
1020
  def sha1(input: ExpressionType) -> InstrumentedExpression:
@@ -1116,7 +1126,7 @@ def st_contains(
1116
1126
  first. This means it is not possible to combine `geo_*` and
1117
1127
  `cartesian_*` parameters.
1118
1128
  """
1119
- return InstrumentedExpression(f"ST_CONTAINS({geom_a}, {geom_b})")
1129
+ return InstrumentedExpression(f"ST_CONTAINS({_render(geom_a)}, {_render(geom_b)})")
1120
1130
 
1121
1131
 
1122
1132
  def st_disjoint(
@@ -1135,7 +1145,7 @@ def st_disjoint(
1135
1145
  first. This means it is not possible to combine `geo_*` and
1136
1146
  `cartesian_*` parameters.
1137
1147
  """
1138
- return InstrumentedExpression(f"ST_DISJOINT({geom_a}, {geom_b})")
1148
+ return InstrumentedExpression(f"ST_DISJOINT({_render(geom_a)}, {_render(geom_b)})")
1139
1149
 
1140
1150
 
1141
1151
  def st_distance(
@@ -1153,7 +1163,7 @@ def st_distance(
1153
1163
  also have the same coordinate system as the first. This means it
1154
1164
  is not possible to combine `geo_point` and `cartesian_point` parameters.
1155
1165
  """
1156
- return InstrumentedExpression(f"ST_DISTANCE({geom_a}, {geom_b})")
1166
+ return InstrumentedExpression(f"ST_DISTANCE({_render(geom_a)}, {_render(geom_b)})")
1157
1167
 
1158
1168
 
1159
1169
  def st_envelope(geometry: ExpressionType) -> InstrumentedExpression:
@@ -1208,7 +1218,7 @@ def st_geohash_to_long(grid_id: ExpressionType) -> InstrumentedExpression:
1208
1218
  :param grid_id: Input geohash grid-id. The input can be a single- or
1209
1219
  multi-valued column or an expression.
1210
1220
  """
1211
- return InstrumentedExpression(f"ST_GEOHASH_TO_LONG({grid_id})")
1221
+ return InstrumentedExpression(f"ST_GEOHASH_TO_LONG({_render(grid_id)})")
1212
1222
 
1213
1223
 
1214
1224
  def st_geohash_to_string(grid_id: ExpressionType) -> InstrumentedExpression:
@@ -1218,7 +1228,7 @@ def st_geohash_to_string(grid_id: ExpressionType) -> InstrumentedExpression:
1218
1228
  :param grid_id: Input geohash grid-id. The input can be a single- or
1219
1229
  multi-valued column or an expression.
1220
1230
  """
1221
- return InstrumentedExpression(f"ST_GEOHASH_TO_STRING({grid_id})")
1231
+ return InstrumentedExpression(f"ST_GEOHASH_TO_STRING({_render(grid_id)})")
1222
1232
 
1223
1233
 
1224
1234
  def st_geohex(
@@ -1254,7 +1264,7 @@ def st_geohex_to_long(grid_id: ExpressionType) -> InstrumentedExpression:
1254
1264
  :param grid_id: Input geohex grid-id. The input can be a single- or
1255
1265
  multi-valued column or an expression.
1256
1266
  """
1257
- return InstrumentedExpression(f"ST_GEOHEX_TO_LONG({grid_id})")
1267
+ return InstrumentedExpression(f"ST_GEOHEX_TO_LONG({_render(grid_id)})")
1258
1268
 
1259
1269
 
1260
1270
  def st_geohex_to_string(grid_id: ExpressionType) -> InstrumentedExpression:
@@ -1264,7 +1274,7 @@ def st_geohex_to_string(grid_id: ExpressionType) -> InstrumentedExpression:
1264
1274
  :param grid_id: Input Geohex grid-id. The input can be a single- or
1265
1275
  multi-valued column or an expression.
1266
1276
  """
1267
- return InstrumentedExpression(f"ST_GEOHEX_TO_STRING({grid_id})")
1277
+ return InstrumentedExpression(f"ST_GEOHEX_TO_STRING({_render(grid_id)})")
1268
1278
 
1269
1279
 
1270
1280
  def st_geotile(
@@ -1300,7 +1310,7 @@ def st_geotile_to_long(grid_id: ExpressionType) -> InstrumentedExpression:
1300
1310
  :param grid_id: Input geotile grid-id. The input can be a single- or
1301
1311
  multi-valued column or an expression.
1302
1312
  """
1303
- return InstrumentedExpression(f"ST_GEOTILE_TO_LONG({grid_id})")
1313
+ return InstrumentedExpression(f"ST_GEOTILE_TO_LONG({_render(grid_id)})")
1304
1314
 
1305
1315
 
1306
1316
  def st_geotile_to_string(grid_id: ExpressionType) -> InstrumentedExpression:
@@ -1310,7 +1320,7 @@ def st_geotile_to_string(grid_id: ExpressionType) -> InstrumentedExpression:
1310
1320
  :param grid_id: Input geotile grid-id. The input can be a single- or
1311
1321
  multi-valued column or an expression.
1312
1322
  """
1313
- return InstrumentedExpression(f"ST_GEOTILE_TO_STRING({grid_id})")
1323
+ return InstrumentedExpression(f"ST_GEOTILE_TO_STRING({_render(grid_id)})")
1314
1324
 
1315
1325
 
1316
1326
  def st_intersects(
@@ -1330,7 +1340,9 @@ def st_intersects(
1330
1340
  first. This means it is not possible to combine `geo_*` and
1331
1341
  `cartesian_*` parameters.
1332
1342
  """
1333
- return InstrumentedExpression(f"ST_INTERSECTS({geom_a}, {geom_b})")
1343
+ return InstrumentedExpression(
1344
+ f"ST_INTERSECTS({_render(geom_a)}, {_render(geom_b)})"
1345
+ )
1334
1346
 
1335
1347
 
1336
1348
  def st_within(geom_a: ExpressionType, geom_b: ExpressionType) -> InstrumentedExpression:
@@ -1346,7 +1358,7 @@ def st_within(geom_a: ExpressionType, geom_b: ExpressionType) -> InstrumentedExp
1346
1358
  first. This means it is not possible to combine `geo_*` and
1347
1359
  `cartesian_*` parameters.
1348
1360
  """
1349
- return InstrumentedExpression(f"ST_WITHIN({geom_a}, {geom_b})")
1361
+ return InstrumentedExpression(f"ST_WITHIN({_render(geom_a)}, {_render(geom_b)})")
1350
1362
 
1351
1363
 
1352
1364
  def st_x(point: ExpressionType) -> InstrumentedExpression:
@@ -19,12 +19,21 @@ from .._async.helpers import async_bulk, async_reindex, async_scan, async_stream
19
19
  from .._utils import fixup_module_metadata
20
20
  from .actions import _chunk_actions # noqa: F401
21
21
  from .actions import _process_bulk_chunk # noqa: F401
22
- from .actions import bulk, expand_action, parallel_bulk, reindex, scan, streaming_bulk
22
+ from .actions import (
23
+ BULK_FLUSH,
24
+ bulk,
25
+ expand_action,
26
+ parallel_bulk,
27
+ reindex,
28
+ scan,
29
+ streaming_bulk,
30
+ )
23
31
  from .errors import BulkIndexError, ScanError
24
32
 
25
33
  __all__ = [
26
34
  "BulkIndexError",
27
35
  "ScanError",
36
+ "BULK_FLUSH",
28
37
  "expand_action",
29
38
  "streaming_bulk",
30
39
  "bulk",