elasticsearch 8.19.0__py3-none-any.whl → 8.19.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (61) hide show
  1. elasticsearch/_async/client/__init__.py +39 -55
  2. elasticsearch/_async/client/cat.py +605 -35
  3. elasticsearch/_async/client/cluster.py +7 -2
  4. elasticsearch/_async/client/connector.py +3 -3
  5. elasticsearch/_async/client/esql.py +16 -6
  6. elasticsearch/_async/client/fleet.py +1 -5
  7. elasticsearch/_async/client/graph.py +1 -5
  8. elasticsearch/_async/client/ilm.py +2 -10
  9. elasticsearch/_async/client/indices.py +159 -32
  10. elasticsearch/_async/client/inference.py +142 -120
  11. elasticsearch/_async/client/nodes.py +2 -2
  12. elasticsearch/_async/client/shutdown.py +5 -15
  13. elasticsearch/_async/client/slm.py +1 -5
  14. elasticsearch/_async/client/snapshot.py +262 -112
  15. elasticsearch/_async/client/sql.py +1 -1
  16. elasticsearch/_async/client/streams.py +185 -0
  17. elasticsearch/_async/client/transform.py +60 -0
  18. elasticsearch/_async/client/watcher.py +1 -5
  19. elasticsearch/_async/helpers.py +58 -9
  20. elasticsearch/_sync/client/__init__.py +39 -55
  21. elasticsearch/_sync/client/cat.py +605 -35
  22. elasticsearch/_sync/client/cluster.py +7 -2
  23. elasticsearch/_sync/client/connector.py +3 -3
  24. elasticsearch/_sync/client/esql.py +16 -6
  25. elasticsearch/_sync/client/fleet.py +1 -5
  26. elasticsearch/_sync/client/graph.py +1 -5
  27. elasticsearch/_sync/client/ilm.py +2 -10
  28. elasticsearch/_sync/client/indices.py +159 -32
  29. elasticsearch/_sync/client/inference.py +142 -120
  30. elasticsearch/_sync/client/nodes.py +2 -2
  31. elasticsearch/_sync/client/shutdown.py +5 -15
  32. elasticsearch/_sync/client/slm.py +1 -5
  33. elasticsearch/_sync/client/snapshot.py +262 -112
  34. elasticsearch/_sync/client/sql.py +1 -1
  35. elasticsearch/_sync/client/streams.py +185 -0
  36. elasticsearch/_sync/client/transform.py +60 -0
  37. elasticsearch/_sync/client/watcher.py +1 -5
  38. elasticsearch/_version.py +2 -1
  39. elasticsearch/client.py +2 -0
  40. elasticsearch/compat.py +45 -1
  41. elasticsearch/dsl/__init__.py +28 -0
  42. elasticsearch/dsl/_async/document.py +84 -0
  43. elasticsearch/dsl/_sync/document.py +84 -0
  44. elasticsearch/dsl/aggs.py +117 -0
  45. elasticsearch/dsl/document_base.py +59 -1
  46. elasticsearch/dsl/field.py +60 -10
  47. elasticsearch/dsl/query.py +1 -1
  48. elasticsearch/dsl/response/__init__.py +3 -0
  49. elasticsearch/dsl/response/aggs.py +1 -1
  50. elasticsearch/dsl/types.py +325 -20
  51. elasticsearch/dsl/utils.py +1 -1
  52. elasticsearch/esql/__init__.py +2 -1
  53. elasticsearch/esql/esql.py +85 -34
  54. elasticsearch/esql/functions.py +37 -25
  55. elasticsearch/helpers/__init__.py +10 -1
  56. elasticsearch/helpers/actions.py +106 -33
  57. {elasticsearch-8.19.0.dist-info → elasticsearch-8.19.2.dist-info}/METADATA +2 -4
  58. {elasticsearch-8.19.0.dist-info → elasticsearch-8.19.2.dist-info}/RECORD +61 -59
  59. {elasticsearch-8.19.0.dist-info → elasticsearch-8.19.2.dist-info}/WHEEL +0 -0
  60. {elasticsearch-8.19.0.dist-info → elasticsearch-8.19.2.dist-info}/licenses/LICENSE +0 -0
  61. {elasticsearch-8.19.0.dist-info → elasticsearch-8.19.2.dist-info}/licenses/NOTICE +0 -0
@@ -78,11 +78,7 @@ class InferenceClient(NamespacedClient):
78
78
  __body["input"] = input
79
79
  if task_settings is not None:
80
80
  __body["task_settings"] = task_settings
81
- if not __body:
82
- __body = None # type: ignore[assignment]
83
- __headers = {"accept": "application/json"}
84
- if __body is not None:
85
- __headers["content-type"] = "application/json"
81
+ __headers = {"accept": "application/json", "content-type": "application/json"}
86
82
  return await self.perform_request( # type: ignore[return-value]
87
83
  "POST",
88
84
  __path,
@@ -338,11 +334,7 @@ class InferenceClient(NamespacedClient):
338
334
  __body["query"] = query
339
335
  if task_settings is not None:
340
336
  __body["task_settings"] = task_settings
341
- if not __body:
342
- __body = None # type: ignore[assignment]
343
- __headers = {"accept": "application/json"}
344
- if __body is not None:
345
- __headers["content-type"] = "application/json"
337
+ __headers = {"accept": "application/json", "content-type": "application/json"}
346
338
  return await self.perform_request( # type: ignore[return-value]
347
339
  "POST",
348
340
  __path,
@@ -391,21 +383,23 @@ class InferenceClient(NamespacedClient):
391
383
  <ul>
392
384
  <li>AlibabaCloud AI Search (<code>completion</code>, <code>rerank</code>, <code>sparse_embedding</code>, <code>text_embedding</code>)</li>
393
385
  <li>Amazon Bedrock (<code>completion</code>, <code>text_embedding</code>)</li>
386
+ <li>Amazon SageMaker (<code>chat_completion</code>, <code>completion</code>, <code>rerank</code>, <code>sparse_embedding</code>, <code>text_embedding</code>)</li>
394
387
  <li>Anthropic (<code>completion</code>)</li>
395
388
  <li>Azure AI Studio (<code>completion</code>, <code>text_embedding</code>)</li>
396
389
  <li>Azure OpenAI (<code>completion</code>, <code>text_embedding</code>)</li>
397
390
  <li>Cohere (<code>completion</code>, <code>rerank</code>, <code>text_embedding</code>)</li>
398
- <li>DeepSeek (<code>completion</code>, <code>chat_completion</code>)</li>
391
+ <li>DeepSeek (<code>chat_completion</code>, <code>completion</code>)</li>
399
392
  <li>Elasticsearch (<code>rerank</code>, <code>sparse_embedding</code>, <code>text_embedding</code> - this service is for built-in models and models uploaded through Eland)</li>
400
393
  <li>ELSER (<code>sparse_embedding</code>)</li>
401
394
  <li>Google AI Studio (<code>completion</code>, <code>text_embedding</code>)</li>
402
- <li>Google Vertex AI (<code>rerank</code>, <code>text_embedding</code>)</li>
395
+ <li>Google Vertex AI (<code>chat_completion</code>, <code>completion</code>, <code>rerank</code>, <code>text_embedding</code>)</li>
403
396
  <li>Hugging Face (<code>chat_completion</code>, <code>completion</code>, <code>rerank</code>, <code>text_embedding</code>)</li>
397
+ <li>JinaAI (<code>rerank</code>, <code>text_embedding</code>)</li>
398
+ <li>Llama (<code>chat_completion</code>, <code>completion</code>, <code>text_embedding</code>)</li>
404
399
  <li>Mistral (<code>chat_completion</code>, <code>completion</code>, <code>text_embedding</code>)</li>
405
400
  <li>OpenAI (<code>chat_completion</code>, <code>completion</code>, <code>text_embedding</code>)</li>
406
- <li>VoyageAI (<code>text_embedding</code>, <code>rerank</code>)</li>
401
+ <li>VoyageAI (<code>rerank</code>, <code>text_embedding</code>)</li>
407
402
  <li>Watsonx inference integration (<code>text_embedding</code>)</li>
408
- <li>JinaAI (<code>text_embedding</code>, <code>rerank</code>)</li>
409
403
  </ul>
410
404
 
411
405
 
@@ -544,11 +538,7 @@ class InferenceClient(NamespacedClient):
544
538
  __body["chunking_settings"] = chunking_settings
545
539
  if task_settings is not None:
546
540
  __body["task_settings"] = task_settings
547
- if not __body:
548
- __body = None # type: ignore[assignment]
549
- __headers = {"accept": "application/json"}
550
- if __body is not None:
551
- __headers["content-type"] = "application/json"
541
+ __headers = {"accept": "application/json", "content-type": "application/json"}
552
542
  return await self.perform_request( # type: ignore[return-value]
553
543
  "PUT",
554
544
  __path,
@@ -644,11 +634,7 @@ class InferenceClient(NamespacedClient):
644
634
  __body["chunking_settings"] = chunking_settings
645
635
  if task_settings is not None:
646
636
  __body["task_settings"] = task_settings
647
- if not __body:
648
- __body = None # type: ignore[assignment]
649
- __headers = {"accept": "application/json"}
650
- if __body is not None:
651
- __headers["content-type"] = "application/json"
637
+ __headers = {"accept": "application/json", "content-type": "application/json"}
652
638
  return await self.perform_request( # type: ignore[return-value]
653
639
  "PUT",
654
640
  __path,
@@ -659,6 +645,108 @@ class InferenceClient(NamespacedClient):
659
645
  path_parts=__path_parts,
660
646
  )
661
647
 
648
+ @_rewrite_parameters(
649
+ body_fields=(
650
+ "service",
651
+ "service_settings",
652
+ "chunking_settings",
653
+ "task_settings",
654
+ ),
655
+ )
656
+ async def put_amazonsagemaker(
657
+ self,
658
+ *,
659
+ task_type: t.Union[
660
+ str,
661
+ t.Literal[
662
+ "chat_completion",
663
+ "completion",
664
+ "rerank",
665
+ "sparse_embedding",
666
+ "text_embedding",
667
+ ],
668
+ ],
669
+ amazonsagemaker_inference_id: str,
670
+ service: t.Optional[t.Union[str, t.Literal["amazon_sagemaker"]]] = None,
671
+ service_settings: t.Optional[t.Mapping[str, t.Any]] = None,
672
+ chunking_settings: t.Optional[t.Mapping[str, t.Any]] = None,
673
+ error_trace: t.Optional[bool] = None,
674
+ filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None,
675
+ human: t.Optional[bool] = None,
676
+ pretty: t.Optional[bool] = None,
677
+ task_settings: t.Optional[t.Mapping[str, t.Any]] = None,
678
+ timeout: t.Optional[t.Union[str, t.Literal[-1], t.Literal[0]]] = None,
679
+ body: t.Optional[t.Dict[str, t.Any]] = None,
680
+ ) -> ObjectApiResponse[t.Any]:
681
+ """
682
+ .. raw:: html
683
+
684
+ <p>Create an Amazon SageMaker inference endpoint.</p>
685
+ <p>Create an inference endpoint to perform an inference task with the <code>amazon_sagemaker</code> service.</p>
686
+
687
+
688
+ `<https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-inference-put-amazonsagemaker>`_
689
+
690
+ :param task_type: The type of the inference task that the model will perform.
691
+ :param amazonsagemaker_inference_id: The unique identifier of the inference endpoint.
692
+ :param service: The type of service supported for the specified task type. In
693
+ this case, `amazon_sagemaker`.
694
+ :param service_settings: Settings used to install the inference model. These
695
+ settings are specific to the `amazon_sagemaker` service and `service_settings.api`
696
+ you specified.
697
+ :param chunking_settings: The chunking configuration object.
698
+ :param task_settings: Settings to configure the inference task. These settings
699
+ are specific to the task type and `service_settings.api` you specified.
700
+ :param timeout: Specifies the amount of time to wait for the inference endpoint
701
+ to be created.
702
+ """
703
+ if task_type in SKIP_IN_PATH:
704
+ raise ValueError("Empty value passed for parameter 'task_type'")
705
+ if amazonsagemaker_inference_id in SKIP_IN_PATH:
706
+ raise ValueError(
707
+ "Empty value passed for parameter 'amazonsagemaker_inference_id'"
708
+ )
709
+ if service is None and body is None:
710
+ raise ValueError("Empty value passed for parameter 'service'")
711
+ if service_settings is None and body is None:
712
+ raise ValueError("Empty value passed for parameter 'service_settings'")
713
+ __path_parts: t.Dict[str, str] = {
714
+ "task_type": _quote(task_type),
715
+ "amazonsagemaker_inference_id": _quote(amazonsagemaker_inference_id),
716
+ }
717
+ __path = f'/_inference/{__path_parts["task_type"]}/{__path_parts["amazonsagemaker_inference_id"]}'
718
+ __query: t.Dict[str, t.Any] = {}
719
+ __body: t.Dict[str, t.Any] = body if body is not None else {}
720
+ if error_trace is not None:
721
+ __query["error_trace"] = error_trace
722
+ if filter_path is not None:
723
+ __query["filter_path"] = filter_path
724
+ if human is not None:
725
+ __query["human"] = human
726
+ if pretty is not None:
727
+ __query["pretty"] = pretty
728
+ if timeout is not None:
729
+ __query["timeout"] = timeout
730
+ if not __body:
731
+ if service is not None:
732
+ __body["service"] = service
733
+ if service_settings is not None:
734
+ __body["service_settings"] = service_settings
735
+ if chunking_settings is not None:
736
+ __body["chunking_settings"] = chunking_settings
737
+ if task_settings is not None:
738
+ __body["task_settings"] = task_settings
739
+ __headers = {"accept": "application/json", "content-type": "application/json"}
740
+ return await self.perform_request( # type: ignore[return-value]
741
+ "PUT",
742
+ __path,
743
+ params=__query,
744
+ headers=__headers,
745
+ body=__body,
746
+ endpoint_id="inference.put_amazonsagemaker",
747
+ path_parts=__path_parts,
748
+ )
749
+
662
750
  @_rewrite_parameters(
663
751
  body_fields=(
664
752
  "service",
@@ -741,11 +829,7 @@ class InferenceClient(NamespacedClient):
741
829
  __body["chunking_settings"] = chunking_settings
742
830
  if task_settings is not None:
743
831
  __body["task_settings"] = task_settings
744
- if not __body:
745
- __body = None # type: ignore[assignment]
746
- __headers = {"accept": "application/json"}
747
- if __body is not None:
748
- __headers["content-type"] = "application/json"
832
+ __headers = {"accept": "application/json", "content-type": "application/json"}
749
833
  return await self.perform_request( # type: ignore[return-value]
750
834
  "PUT",
751
835
  __path,
@@ -837,11 +921,7 @@ class InferenceClient(NamespacedClient):
837
921
  __body["chunking_settings"] = chunking_settings
838
922
  if task_settings is not None:
839
923
  __body["task_settings"] = task_settings
840
- if not __body:
841
- __body = None # type: ignore[assignment]
842
- __headers = {"accept": "application/json"}
843
- if __body is not None:
844
- __headers["content-type"] = "application/json"
924
+ __headers = {"accept": "application/json", "content-type": "application/json"}
845
925
  return await self.perform_request( # type: ignore[return-value]
846
926
  "PUT",
847
927
  __path,
@@ -941,11 +1021,7 @@ class InferenceClient(NamespacedClient):
941
1021
  __body["chunking_settings"] = chunking_settings
942
1022
  if task_settings is not None:
943
1023
  __body["task_settings"] = task_settings
944
- if not __body:
945
- __body = None # type: ignore[assignment]
946
- __headers = {"accept": "application/json"}
947
- if __body is not None:
948
- __headers["content-type"] = "application/json"
1024
+ __headers = {"accept": "application/json", "content-type": "application/json"}
949
1025
  return await self.perform_request( # type: ignore[return-value]
950
1026
  "PUT",
951
1027
  __path,
@@ -1035,11 +1111,7 @@ class InferenceClient(NamespacedClient):
1035
1111
  __body["chunking_settings"] = chunking_settings
1036
1112
  if task_settings is not None:
1037
1113
  __body["task_settings"] = task_settings
1038
- if not __body:
1039
- __body = None # type: ignore[assignment]
1040
- __headers = {"accept": "application/json"}
1041
- if __body is not None:
1042
- __headers["content-type"] = "application/json"
1114
+ __headers = {"accept": "application/json", "content-type": "application/json"}
1043
1115
  return await self.perform_request( # type: ignore[return-value]
1044
1116
  "PUT",
1045
1117
  __path,
@@ -1166,11 +1238,7 @@ class InferenceClient(NamespacedClient):
1166
1238
  __body["chunking_settings"] = chunking_settings
1167
1239
  if task_settings is not None:
1168
1240
  __body["task_settings"] = task_settings
1169
- if not __body:
1170
- __body = None # type: ignore[assignment]
1171
- __headers = {"accept": "application/json"}
1172
- if __body is not None:
1173
- __headers["content-type"] = "application/json"
1241
+ __headers = {"accept": "application/json", "content-type": "application/json"}
1174
1242
  return await self.perform_request( # type: ignore[return-value]
1175
1243
  "PUT",
1176
1244
  __path,
@@ -1250,11 +1318,7 @@ class InferenceClient(NamespacedClient):
1250
1318
  __body["service_settings"] = service_settings
1251
1319
  if chunking_settings is not None:
1252
1320
  __body["chunking_settings"] = chunking_settings
1253
- if not __body:
1254
- __body = None # type: ignore[assignment]
1255
- __headers = {"accept": "application/json"}
1256
- if __body is not None:
1257
- __headers["content-type"] = "application/json"
1321
+ __headers = {"accept": "application/json", "content-type": "application/json"}
1258
1322
  return await self.perform_request( # type: ignore[return-value]
1259
1323
  "PUT",
1260
1324
  __path,
@@ -1362,11 +1426,7 @@ class InferenceClient(NamespacedClient):
1362
1426
  __body["chunking_settings"] = chunking_settings
1363
1427
  if task_settings is not None:
1364
1428
  __body["task_settings"] = task_settings
1365
- if not __body:
1366
- __body = None # type: ignore[assignment]
1367
- __headers = {"accept": "application/json"}
1368
- if __body is not None:
1369
- __headers["content-type"] = "application/json"
1429
+ __headers = {"accept": "application/json", "content-type": "application/json"}
1370
1430
  return await self.perform_request( # type: ignore[return-value]
1371
1431
  "PUT",
1372
1432
  __path,
@@ -1460,11 +1520,7 @@ class InferenceClient(NamespacedClient):
1460
1520
  __body["service_settings"] = service_settings
1461
1521
  if chunking_settings is not None:
1462
1522
  __body["chunking_settings"] = chunking_settings
1463
- if not __body:
1464
- __body = None # type: ignore[assignment]
1465
- __headers = {"accept": "application/json"}
1466
- if __body is not None:
1467
- __headers["content-type"] = "application/json"
1523
+ __headers = {"accept": "application/json", "content-type": "application/json"}
1468
1524
  return await self.perform_request( # type: ignore[return-value]
1469
1525
  "PUT",
1470
1526
  __path,
@@ -1546,11 +1602,7 @@ class InferenceClient(NamespacedClient):
1546
1602
  __body["service_settings"] = service_settings
1547
1603
  if chunking_settings is not None:
1548
1604
  __body["chunking_settings"] = chunking_settings
1549
- if not __body:
1550
- __body = None # type: ignore[assignment]
1551
- __headers = {"accept": "application/json"}
1552
- if __body is not None:
1553
- __headers["content-type"] = "application/json"
1605
+ __headers = {"accept": "application/json", "content-type": "application/json"}
1554
1606
  return await self.perform_request( # type: ignore[return-value]
1555
1607
  "PUT",
1556
1608
  __path,
@@ -1644,11 +1696,7 @@ class InferenceClient(NamespacedClient):
1644
1696
  __body["chunking_settings"] = chunking_settings
1645
1697
  if task_settings is not None:
1646
1698
  __body["task_settings"] = task_settings
1647
- if not __body:
1648
- __body = None # type: ignore[assignment]
1649
- __headers = {"accept": "application/json"}
1650
- if __body is not None:
1651
- __headers["content-type"] = "application/json"
1699
+ __headers = {"accept": "application/json", "content-type": "application/json"}
1652
1700
  return await self.perform_request( # type: ignore[return-value]
1653
1701
  "PUT",
1654
1702
  __path,
@@ -1776,11 +1824,7 @@ class InferenceClient(NamespacedClient):
1776
1824
  __body["chunking_settings"] = chunking_settings
1777
1825
  if task_settings is not None:
1778
1826
  __body["task_settings"] = task_settings
1779
- if not __body:
1780
- __body = None # type: ignore[assignment]
1781
- __headers = {"accept": "application/json"}
1782
- if __body is not None:
1783
- __headers["content-type"] = "application/json"
1827
+ __headers = {"accept": "application/json", "content-type": "application/json"}
1784
1828
  return await self.perform_request( # type: ignore[return-value]
1785
1829
  "PUT",
1786
1830
  __path,
@@ -1872,11 +1916,7 @@ class InferenceClient(NamespacedClient):
1872
1916
  __body["chunking_settings"] = chunking_settings
1873
1917
  if task_settings is not None:
1874
1918
  __body["task_settings"] = task_settings
1875
- if not __body:
1876
- __body = None # type: ignore[assignment]
1877
- __headers = {"accept": "application/json"}
1878
- if __body is not None:
1879
- __headers["content-type"] = "application/json"
1919
+ __headers = {"accept": "application/json", "content-type": "application/json"}
1880
1920
  return await self.perform_request( # type: ignore[return-value]
1881
1921
  "PUT",
1882
1922
  __path,
@@ -1958,11 +1998,7 @@ class InferenceClient(NamespacedClient):
1958
1998
  __body["service_settings"] = service_settings
1959
1999
  if chunking_settings is not None:
1960
2000
  __body["chunking_settings"] = chunking_settings
1961
- if not __body:
1962
- __body = None # type: ignore[assignment]
1963
- __headers = {"accept": "application/json"}
1964
- if __body is not None:
1965
- __headers["content-type"] = "application/json"
2001
+ __headers = {"accept": "application/json", "content-type": "application/json"}
1966
2002
  return await self.perform_request( # type: ignore[return-value]
1967
2003
  "PUT",
1968
2004
  __path,
@@ -2056,11 +2092,7 @@ class InferenceClient(NamespacedClient):
2056
2092
  __body["chunking_settings"] = chunking_settings
2057
2093
  if task_settings is not None:
2058
2094
  __body["task_settings"] = task_settings
2059
- if not __body:
2060
- __body = None # type: ignore[assignment]
2061
- __headers = {"accept": "application/json"}
2062
- if __body is not None:
2063
- __headers["content-type"] = "application/json"
2095
+ __headers = {"accept": "application/json", "content-type": "application/json"}
2064
2096
  return await self.perform_request( # type: ignore[return-value]
2065
2097
  "PUT",
2066
2098
  __path,
@@ -2151,11 +2183,7 @@ class InferenceClient(NamespacedClient):
2151
2183
  __body["chunking_settings"] = chunking_settings
2152
2184
  if task_settings is not None:
2153
2185
  __body["task_settings"] = task_settings
2154
- if not __body:
2155
- __body = None # type: ignore[assignment]
2156
- __headers = {"accept": "application/json"}
2157
- if __body is not None:
2158
- __headers["content-type"] = "application/json"
2186
+ __headers = {"accept": "application/json", "content-type": "application/json"}
2159
2187
  return await self.perform_request( # type: ignore[return-value]
2160
2188
  "PUT",
2161
2189
  __path,
@@ -2234,11 +2262,7 @@ class InferenceClient(NamespacedClient):
2234
2262
  __body["service"] = service
2235
2263
  if service_settings is not None:
2236
2264
  __body["service_settings"] = service_settings
2237
- if not __body:
2238
- __body = None # type: ignore[assignment]
2239
- __headers = {"accept": "application/json"}
2240
- if __body is not None:
2241
- __headers["content-type"] = "application/json"
2265
+ __headers = {"accept": "application/json", "content-type": "application/json"}
2242
2266
  return await self.perform_request( # type: ignore[return-value]
2243
2267
  "PUT",
2244
2268
  __path,
@@ -2311,11 +2335,7 @@ class InferenceClient(NamespacedClient):
2311
2335
  __body["query"] = query
2312
2336
  if task_settings is not None:
2313
2337
  __body["task_settings"] = task_settings
2314
- if not __body:
2315
- __body = None # type: ignore[assignment]
2316
- __headers = {"accept": "application/json"}
2317
- if __body is not None:
2318
- __headers["content-type"] = "application/json"
2338
+ __headers = {"accept": "application/json", "content-type": "application/json"}
2319
2339
  return await self.perform_request( # type: ignore[return-value]
2320
2340
  "POST",
2321
2341
  __path,
@@ -2379,11 +2399,7 @@ class InferenceClient(NamespacedClient):
2379
2399
  __body["input"] = input
2380
2400
  if task_settings is not None:
2381
2401
  __body["task_settings"] = task_settings
2382
- if not __body:
2383
- __body = None # type: ignore[assignment]
2384
- __headers = {"accept": "application/json"}
2385
- if __body is not None:
2386
- __headers["content-type"] = "application/json"
2402
+ __headers = {"accept": "application/json", "content-type": "application/json"}
2387
2403
  return await self.perform_request( # type: ignore[return-value]
2388
2404
  "POST",
2389
2405
  __path,
@@ -2395,7 +2411,7 @@ class InferenceClient(NamespacedClient):
2395
2411
  )
2396
2412
 
2397
2413
  @_rewrite_parameters(
2398
- body_fields=("input", "task_settings"),
2414
+ body_fields=("input", "input_type", "task_settings"),
2399
2415
  )
2400
2416
  async def text_embedding(
2401
2417
  self,
@@ -2405,6 +2421,7 @@ class InferenceClient(NamespacedClient):
2405
2421
  error_trace: t.Optional[bool] = None,
2406
2422
  filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None,
2407
2423
  human: t.Optional[bool] = None,
2424
+ input_type: t.Optional[str] = None,
2408
2425
  pretty: t.Optional[bool] = None,
2409
2426
  task_settings: t.Optional[t.Any] = None,
2410
2427
  timeout: t.Optional[t.Union[str, t.Literal[-1], t.Literal[0]]] = None,
@@ -2420,6 +2437,13 @@ class InferenceClient(NamespacedClient):
2420
2437
 
2421
2438
  :param inference_id: The inference Id
2422
2439
  :param input: Inference input. Either a string or an array of strings.
2440
+ :param input_type: The input data type for the text embedding model. Possible
2441
+ values include: * `SEARCH` * `INGEST` * `CLASSIFICATION` * `CLUSTERING` Not
2442
+ all services support all values. Unsupported values will trigger a validation
2443
+ exception. Accepted values depend on the configured inference service, refer
2444
+ to the relevant service-specific documentation for more info. > info > The
2445
+ `input_type` parameter specified on the root level of the request body will
2446
+ take precedence over the `input_type` parameter specified in `task_settings`.
2423
2447
  :param task_settings: Optional task settings
2424
2448
  :param timeout: Specifies the amount of time to wait for the inference request
2425
2449
  to complete.
@@ -2445,13 +2469,11 @@ class InferenceClient(NamespacedClient):
2445
2469
  if not __body:
2446
2470
  if input is not None:
2447
2471
  __body["input"] = input
2472
+ if input_type is not None:
2473
+ __body["input_type"] = input_type
2448
2474
  if task_settings is not None:
2449
2475
  __body["task_settings"] = task_settings
2450
- if not __body:
2451
- __body = None # type: ignore[assignment]
2452
- __headers = {"accept": "application/json"}
2453
- if __body is not None:
2454
- __headers["content-type"] = "application/json"
2476
+ __headers = {"accept": "application/json", "content-type": "application/json"}
2455
2477
  return await self.perform_request( # type: ignore[return-value]
2456
2478
  "POST",
2457
2479
  __path,
@@ -405,8 +405,8 @@ class NodesClient(NamespacedClient):
405
405
  are requested).
406
406
  :param include_unloaded_segments: If `true`, the response includes information
407
407
  from segments that are not loaded into memory.
408
- :param level: Indicates whether statistics are aggregated at the cluster, index,
409
- or shard level.
408
+ :param level: Indicates whether statistics are aggregated at the node, indices,
409
+ or shards level.
410
410
  :param timeout: Period to wait for a response. If no response is received before
411
411
  the timeout expires, the request fails and returns an error.
412
412
  :param types: A comma-separated list of document types for the indexing index
@@ -33,13 +33,9 @@ class ShutdownClient(NamespacedClient):
33
33
  error_trace: t.Optional[bool] = None,
34
34
  filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None,
35
35
  human: t.Optional[bool] = None,
36
- master_timeout: t.Optional[
37
- t.Union[str, t.Literal["d", "h", "m", "micros", "ms", "nanos", "s"]]
38
- ] = None,
36
+ master_timeout: t.Optional[t.Union[str, t.Literal[-1], t.Literal[0]]] = None,
39
37
  pretty: t.Optional[bool] = None,
40
- timeout: t.Optional[
41
- t.Union[str, t.Literal["d", "h", "m", "micros", "ms", "nanos", "s"]]
42
- ] = None,
38
+ timeout: t.Optional[t.Union[str, t.Literal[-1], t.Literal[0]]] = None,
43
39
  ) -> ObjectApiResponse[t.Any]:
44
40
  """
45
41
  .. raw:: html
@@ -97,9 +93,7 @@ class ShutdownClient(NamespacedClient):
97
93
  error_trace: t.Optional[bool] = None,
98
94
  filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None,
99
95
  human: t.Optional[bool] = None,
100
- master_timeout: t.Optional[
101
- t.Union[str, t.Literal["d", "h", "m", "micros", "ms", "nanos", "s"]]
102
- ] = None,
96
+ master_timeout: t.Optional[t.Union[str, t.Literal[-1], t.Literal[0]]] = None,
103
97
  pretty: t.Optional[bool] = None,
104
98
  ) -> ObjectApiResponse[t.Any]:
105
99
  """
@@ -162,14 +156,10 @@ class ShutdownClient(NamespacedClient):
162
156
  error_trace: t.Optional[bool] = None,
163
157
  filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None,
164
158
  human: t.Optional[bool] = None,
165
- master_timeout: t.Optional[
166
- t.Union[str, t.Literal["d", "h", "m", "micros", "ms", "nanos", "s"]]
167
- ] = None,
159
+ master_timeout: t.Optional[t.Union[str, t.Literal[-1], t.Literal[0]]] = None,
168
160
  pretty: t.Optional[bool] = None,
169
161
  target_node_name: t.Optional[str] = None,
170
- timeout: t.Optional[
171
- t.Union[str, t.Literal["d", "h", "m", "micros", "ms", "nanos", "s"]]
172
- ] = None,
162
+ timeout: t.Optional[t.Union[str, t.Literal[-1], t.Literal[0]]] = None,
173
163
  body: t.Optional[t.Dict[str, t.Any]] = None,
174
164
  ) -> ObjectApiResponse[t.Any]:
175
165
  """
@@ -431,11 +431,7 @@ class SlmClient(NamespacedClient):
431
431
  __body["retention"] = retention
432
432
  if schedule is not None:
433
433
  __body["schedule"] = schedule
434
- if not __body:
435
- __body = None # type: ignore[assignment]
436
- __headers = {"accept": "application/json"}
437
- if __body is not None:
438
- __headers["content-type"] = "application/json"
434
+ __headers = {"accept": "application/json", "content-type": "application/json"}
439
435
  return await self.perform_request( # type: ignore[return-value]
440
436
  "PUT",
441
437
  __path,