mlrun 1.7.2rc3__py3-none-any.whl → 1.8.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mlrun might be problematic. Click here for more details.

Files changed (275) hide show
  1. mlrun/__init__.py +26 -22
  2. mlrun/__main__.py +15 -16
  3. mlrun/alerts/alert.py +150 -15
  4. mlrun/api/schemas/__init__.py +1 -9
  5. mlrun/artifacts/__init__.py +2 -3
  6. mlrun/artifacts/base.py +62 -19
  7. mlrun/artifacts/dataset.py +17 -17
  8. mlrun/artifacts/document.py +454 -0
  9. mlrun/artifacts/manager.py +28 -18
  10. mlrun/artifacts/model.py +91 -59
  11. mlrun/artifacts/plots.py +2 -2
  12. mlrun/common/constants.py +8 -0
  13. mlrun/common/formatters/__init__.py +1 -0
  14. mlrun/common/formatters/artifact.py +1 -1
  15. mlrun/common/formatters/feature_set.py +2 -0
  16. mlrun/common/formatters/function.py +1 -0
  17. mlrun/{model_monitoring/db/stores/v3io_kv/__init__.py → common/formatters/model_endpoint.py} +17 -0
  18. mlrun/common/formatters/pipeline.py +1 -2
  19. mlrun/common/formatters/project.py +9 -0
  20. mlrun/common/model_monitoring/__init__.py +0 -5
  21. mlrun/common/model_monitoring/helpers.py +12 -62
  22. mlrun/common/runtimes/constants.py +25 -4
  23. mlrun/common/schemas/__init__.py +9 -5
  24. mlrun/common/schemas/alert.py +114 -19
  25. mlrun/common/schemas/api_gateway.py +3 -3
  26. mlrun/common/schemas/artifact.py +22 -9
  27. mlrun/common/schemas/auth.py +8 -4
  28. mlrun/common/schemas/background_task.py +7 -7
  29. mlrun/common/schemas/client_spec.py +4 -4
  30. mlrun/common/schemas/clusterization_spec.py +2 -2
  31. mlrun/common/schemas/common.py +53 -3
  32. mlrun/common/schemas/constants.py +15 -0
  33. mlrun/common/schemas/datastore_profile.py +1 -1
  34. mlrun/common/schemas/feature_store.py +9 -9
  35. mlrun/common/schemas/frontend_spec.py +4 -4
  36. mlrun/common/schemas/function.py +10 -10
  37. mlrun/common/schemas/hub.py +1 -1
  38. mlrun/common/schemas/k8s.py +3 -3
  39. mlrun/common/schemas/memory_reports.py +3 -3
  40. mlrun/common/schemas/model_monitoring/__init__.py +4 -8
  41. mlrun/common/schemas/model_monitoring/constants.py +127 -46
  42. mlrun/common/schemas/model_monitoring/grafana.py +18 -12
  43. mlrun/common/schemas/model_monitoring/model_endpoints.py +154 -160
  44. mlrun/common/schemas/notification.py +24 -3
  45. mlrun/common/schemas/object.py +1 -1
  46. mlrun/common/schemas/pagination.py +4 -4
  47. mlrun/common/schemas/partition.py +142 -0
  48. mlrun/common/schemas/pipeline.py +3 -3
  49. mlrun/common/schemas/project.py +26 -18
  50. mlrun/common/schemas/runs.py +3 -3
  51. mlrun/common/schemas/runtime_resource.py +5 -5
  52. mlrun/common/schemas/schedule.py +1 -1
  53. mlrun/common/schemas/secret.py +1 -1
  54. mlrun/{model_monitoring/db/stores/sqldb/__init__.py → common/schemas/serving.py} +10 -1
  55. mlrun/common/schemas/tag.py +3 -3
  56. mlrun/common/schemas/workflow.py +6 -5
  57. mlrun/common/types.py +1 -0
  58. mlrun/config.py +157 -89
  59. mlrun/data_types/__init__.py +5 -3
  60. mlrun/data_types/infer.py +13 -3
  61. mlrun/data_types/spark.py +2 -1
  62. mlrun/datastore/__init__.py +59 -18
  63. mlrun/datastore/alibaba_oss.py +4 -1
  64. mlrun/datastore/azure_blob.py +4 -1
  65. mlrun/datastore/base.py +19 -24
  66. mlrun/datastore/datastore.py +10 -4
  67. mlrun/datastore/datastore_profile.py +178 -45
  68. mlrun/datastore/dbfs_store.py +4 -1
  69. mlrun/datastore/filestore.py +4 -1
  70. mlrun/datastore/google_cloud_storage.py +4 -1
  71. mlrun/datastore/hdfs.py +4 -1
  72. mlrun/datastore/inmem.py +4 -1
  73. mlrun/datastore/redis.py +4 -1
  74. mlrun/datastore/s3.py +14 -3
  75. mlrun/datastore/sources.py +89 -92
  76. mlrun/datastore/store_resources.py +7 -4
  77. mlrun/datastore/storeytargets.py +51 -16
  78. mlrun/datastore/targets.py +38 -31
  79. mlrun/datastore/utils.py +87 -4
  80. mlrun/datastore/v3io.py +4 -1
  81. mlrun/datastore/vectorstore.py +291 -0
  82. mlrun/datastore/wasbfs/fs.py +13 -12
  83. mlrun/db/base.py +286 -100
  84. mlrun/db/httpdb.py +1562 -490
  85. mlrun/db/nopdb.py +250 -83
  86. mlrun/errors.py +6 -2
  87. mlrun/execution.py +194 -50
  88. mlrun/feature_store/__init__.py +2 -10
  89. mlrun/feature_store/api.py +20 -458
  90. mlrun/feature_store/common.py +9 -9
  91. mlrun/feature_store/feature_set.py +20 -18
  92. mlrun/feature_store/feature_vector.py +105 -479
  93. mlrun/feature_store/feature_vector_utils.py +466 -0
  94. mlrun/feature_store/retrieval/base.py +15 -11
  95. mlrun/feature_store/retrieval/job.py +2 -1
  96. mlrun/feature_store/retrieval/storey_merger.py +1 -1
  97. mlrun/feature_store/steps.py +3 -3
  98. mlrun/features.py +30 -13
  99. mlrun/frameworks/__init__.py +1 -2
  100. mlrun/frameworks/_common/__init__.py +1 -2
  101. mlrun/frameworks/_common/artifacts_library.py +2 -2
  102. mlrun/frameworks/_common/mlrun_interface.py +10 -6
  103. mlrun/frameworks/_common/model_handler.py +31 -31
  104. mlrun/frameworks/_common/producer.py +3 -1
  105. mlrun/frameworks/_dl_common/__init__.py +1 -2
  106. mlrun/frameworks/_dl_common/loggers/__init__.py +1 -2
  107. mlrun/frameworks/_dl_common/loggers/mlrun_logger.py +4 -4
  108. mlrun/frameworks/_dl_common/loggers/tensorboard_logger.py +3 -3
  109. mlrun/frameworks/_ml_common/__init__.py +1 -2
  110. mlrun/frameworks/_ml_common/loggers/__init__.py +1 -2
  111. mlrun/frameworks/_ml_common/model_handler.py +21 -21
  112. mlrun/frameworks/_ml_common/plans/__init__.py +1 -2
  113. mlrun/frameworks/_ml_common/plans/confusion_matrix_plan.py +3 -1
  114. mlrun/frameworks/_ml_common/plans/dataset_plan.py +3 -3
  115. mlrun/frameworks/_ml_common/plans/roc_curve_plan.py +4 -4
  116. mlrun/frameworks/auto_mlrun/__init__.py +1 -2
  117. mlrun/frameworks/auto_mlrun/auto_mlrun.py +22 -15
  118. mlrun/frameworks/huggingface/__init__.py +1 -2
  119. mlrun/frameworks/huggingface/model_server.py +9 -9
  120. mlrun/frameworks/lgbm/__init__.py +47 -44
  121. mlrun/frameworks/lgbm/callbacks/__init__.py +1 -2
  122. mlrun/frameworks/lgbm/callbacks/logging_callback.py +4 -2
  123. mlrun/frameworks/lgbm/callbacks/mlrun_logging_callback.py +4 -2
  124. mlrun/frameworks/lgbm/mlrun_interfaces/__init__.py +1 -2
  125. mlrun/frameworks/lgbm/mlrun_interfaces/mlrun_interface.py +5 -5
  126. mlrun/frameworks/lgbm/model_handler.py +15 -11
  127. mlrun/frameworks/lgbm/model_server.py +11 -7
  128. mlrun/frameworks/lgbm/utils.py +2 -2
  129. mlrun/frameworks/onnx/__init__.py +1 -2
  130. mlrun/frameworks/onnx/dataset.py +3 -3
  131. mlrun/frameworks/onnx/mlrun_interface.py +2 -2
  132. mlrun/frameworks/onnx/model_handler.py +7 -5
  133. mlrun/frameworks/onnx/model_server.py +8 -6
  134. mlrun/frameworks/parallel_coordinates.py +11 -11
  135. mlrun/frameworks/pytorch/__init__.py +22 -23
  136. mlrun/frameworks/pytorch/callbacks/__init__.py +1 -2
  137. mlrun/frameworks/pytorch/callbacks/callback.py +2 -1
  138. mlrun/frameworks/pytorch/callbacks/logging_callback.py +15 -8
  139. mlrun/frameworks/pytorch/callbacks/mlrun_logging_callback.py +19 -12
  140. mlrun/frameworks/pytorch/callbacks/tensorboard_logging_callback.py +22 -15
  141. mlrun/frameworks/pytorch/callbacks_handler.py +36 -30
  142. mlrun/frameworks/pytorch/mlrun_interface.py +17 -17
  143. mlrun/frameworks/pytorch/model_handler.py +21 -17
  144. mlrun/frameworks/pytorch/model_server.py +13 -9
  145. mlrun/frameworks/sklearn/__init__.py +19 -18
  146. mlrun/frameworks/sklearn/estimator.py +2 -2
  147. mlrun/frameworks/sklearn/metric.py +3 -3
  148. mlrun/frameworks/sklearn/metrics_library.py +8 -6
  149. mlrun/frameworks/sklearn/mlrun_interface.py +3 -2
  150. mlrun/frameworks/sklearn/model_handler.py +4 -3
  151. mlrun/frameworks/tf_keras/__init__.py +11 -12
  152. mlrun/frameworks/tf_keras/callbacks/__init__.py +1 -2
  153. mlrun/frameworks/tf_keras/callbacks/logging_callback.py +17 -14
  154. mlrun/frameworks/tf_keras/callbacks/mlrun_logging_callback.py +15 -12
  155. mlrun/frameworks/tf_keras/callbacks/tensorboard_logging_callback.py +21 -18
  156. mlrun/frameworks/tf_keras/model_handler.py +17 -13
  157. mlrun/frameworks/tf_keras/model_server.py +12 -8
  158. mlrun/frameworks/xgboost/__init__.py +19 -18
  159. mlrun/frameworks/xgboost/model_handler.py +13 -9
  160. mlrun/k8s_utils.py +2 -5
  161. mlrun/launcher/base.py +3 -4
  162. mlrun/launcher/client.py +2 -2
  163. mlrun/launcher/local.py +6 -2
  164. mlrun/launcher/remote.py +1 -1
  165. mlrun/lists.py +8 -4
  166. mlrun/model.py +132 -46
  167. mlrun/model_monitoring/__init__.py +3 -5
  168. mlrun/model_monitoring/api.py +113 -98
  169. mlrun/model_monitoring/applications/__init__.py +0 -5
  170. mlrun/model_monitoring/applications/_application_steps.py +81 -50
  171. mlrun/model_monitoring/applications/base.py +467 -14
  172. mlrun/model_monitoring/applications/context.py +212 -134
  173. mlrun/model_monitoring/{db/stores/base → applications/evidently}/__init__.py +6 -2
  174. mlrun/model_monitoring/applications/evidently/base.py +146 -0
  175. mlrun/model_monitoring/applications/histogram_data_drift.py +89 -56
  176. mlrun/model_monitoring/applications/results.py +67 -15
  177. mlrun/model_monitoring/controller.py +701 -315
  178. mlrun/model_monitoring/db/__init__.py +0 -2
  179. mlrun/model_monitoring/db/_schedules.py +242 -0
  180. mlrun/model_monitoring/db/_stats.py +189 -0
  181. mlrun/model_monitoring/db/tsdb/__init__.py +33 -22
  182. mlrun/model_monitoring/db/tsdb/base.py +243 -49
  183. mlrun/model_monitoring/db/tsdb/tdengine/schemas.py +76 -36
  184. mlrun/model_monitoring/db/tsdb/tdengine/stream_graph_steps.py +33 -0
  185. mlrun/model_monitoring/db/tsdb/tdengine/tdengine_connection.py +213 -0
  186. mlrun/model_monitoring/db/tsdb/tdengine/tdengine_connector.py +534 -88
  187. mlrun/model_monitoring/db/tsdb/v3io/stream_graph_steps.py +1 -0
  188. mlrun/model_monitoring/db/tsdb/v3io/v3io_connector.py +436 -106
  189. mlrun/model_monitoring/helpers.py +356 -114
  190. mlrun/model_monitoring/stream_processing.py +190 -345
  191. mlrun/model_monitoring/tracking_policy.py +11 -4
  192. mlrun/model_monitoring/writer.py +49 -90
  193. mlrun/package/__init__.py +3 -6
  194. mlrun/package/context_handler.py +2 -2
  195. mlrun/package/packager.py +12 -9
  196. mlrun/package/packagers/__init__.py +0 -2
  197. mlrun/package/packagers/default_packager.py +14 -11
  198. mlrun/package/packagers/numpy_packagers.py +16 -7
  199. mlrun/package/packagers/pandas_packagers.py +18 -18
  200. mlrun/package/packagers/python_standard_library_packagers.py +25 -11
  201. mlrun/package/packagers_manager.py +35 -32
  202. mlrun/package/utils/__init__.py +0 -3
  203. mlrun/package/utils/_pickler.py +6 -6
  204. mlrun/platforms/__init__.py +47 -16
  205. mlrun/platforms/iguazio.py +4 -1
  206. mlrun/projects/operations.py +30 -30
  207. mlrun/projects/pipelines.py +116 -47
  208. mlrun/projects/project.py +1292 -329
  209. mlrun/render.py +5 -9
  210. mlrun/run.py +57 -14
  211. mlrun/runtimes/__init__.py +1 -3
  212. mlrun/runtimes/base.py +30 -22
  213. mlrun/runtimes/daskjob.py +9 -9
  214. mlrun/runtimes/databricks_job/databricks_runtime.py +6 -5
  215. mlrun/runtimes/function_reference.py +5 -2
  216. mlrun/runtimes/generators.py +3 -2
  217. mlrun/runtimes/kubejob.py +6 -7
  218. mlrun/runtimes/mounts.py +574 -0
  219. mlrun/runtimes/mpijob/__init__.py +0 -2
  220. mlrun/runtimes/mpijob/abstract.py +7 -6
  221. mlrun/runtimes/nuclio/api_gateway.py +7 -7
  222. mlrun/runtimes/nuclio/application/application.py +11 -13
  223. mlrun/runtimes/nuclio/application/reverse_proxy.go +66 -64
  224. mlrun/runtimes/nuclio/function.py +127 -70
  225. mlrun/runtimes/nuclio/serving.py +105 -37
  226. mlrun/runtimes/pod.py +159 -54
  227. mlrun/runtimes/remotesparkjob.py +3 -2
  228. mlrun/runtimes/sparkjob/__init__.py +0 -2
  229. mlrun/runtimes/sparkjob/spark3job.py +22 -12
  230. mlrun/runtimes/utils.py +7 -6
  231. mlrun/secrets.py +2 -2
  232. mlrun/serving/__init__.py +8 -0
  233. mlrun/serving/merger.py +7 -5
  234. mlrun/serving/remote.py +35 -22
  235. mlrun/serving/routers.py +186 -240
  236. mlrun/serving/server.py +41 -10
  237. mlrun/serving/states.py +432 -118
  238. mlrun/serving/utils.py +13 -2
  239. mlrun/serving/v1_serving.py +3 -2
  240. mlrun/serving/v2_serving.py +161 -203
  241. mlrun/track/__init__.py +1 -1
  242. mlrun/track/tracker.py +2 -2
  243. mlrun/track/trackers/mlflow_tracker.py +6 -5
  244. mlrun/utils/async_http.py +35 -22
  245. mlrun/utils/clones.py +7 -4
  246. mlrun/utils/helpers.py +511 -58
  247. mlrun/utils/logger.py +119 -13
  248. mlrun/utils/notifications/notification/__init__.py +22 -19
  249. mlrun/utils/notifications/notification/base.py +39 -15
  250. mlrun/utils/notifications/notification/console.py +6 -6
  251. mlrun/utils/notifications/notification/git.py +11 -11
  252. mlrun/utils/notifications/notification/ipython.py +10 -9
  253. mlrun/utils/notifications/notification/mail.py +176 -0
  254. mlrun/utils/notifications/notification/slack.py +16 -8
  255. mlrun/utils/notifications/notification/webhook.py +24 -8
  256. mlrun/utils/notifications/notification_pusher.py +191 -200
  257. mlrun/utils/regex.py +12 -2
  258. mlrun/utils/version/version.json +2 -2
  259. {mlrun-1.7.2rc3.dist-info → mlrun-1.8.0.dist-info}/METADATA +81 -54
  260. mlrun-1.8.0.dist-info/RECORD +351 -0
  261. {mlrun-1.7.2rc3.dist-info → mlrun-1.8.0.dist-info}/WHEEL +1 -1
  262. mlrun/model_monitoring/applications/evidently_base.py +0 -137
  263. mlrun/model_monitoring/db/stores/__init__.py +0 -136
  264. mlrun/model_monitoring/db/stores/base/store.py +0 -213
  265. mlrun/model_monitoring/db/stores/sqldb/models/__init__.py +0 -71
  266. mlrun/model_monitoring/db/stores/sqldb/models/base.py +0 -190
  267. mlrun/model_monitoring/db/stores/sqldb/models/mysql.py +0 -103
  268. mlrun/model_monitoring/db/stores/sqldb/models/sqlite.py +0 -40
  269. mlrun/model_monitoring/db/stores/sqldb/sql_store.py +0 -659
  270. mlrun/model_monitoring/db/stores/v3io_kv/kv_store.py +0 -726
  271. mlrun/model_monitoring/model_endpoint.py +0 -118
  272. mlrun-1.7.2rc3.dist-info/RECORD +0 -351
  273. {mlrun-1.7.2rc3.dist-info → mlrun-1.8.0.dist-info}/entry_points.txt +0 -0
  274. {mlrun-1.7.2rc3.dist-info → mlrun-1.8.0.dist-info/licenses}/LICENSE +0 -0
  275. {mlrun-1.7.2rc3.dist-info → mlrun-1.8.0.dist-info}/top_level.txt +0 -0
mlrun/db/httpdb.py CHANGED
@@ -22,19 +22,22 @@ import warnings
22
22
  from copy import deepcopy
23
23
  from datetime import datetime, timedelta
24
24
  from os import path, remove
25
- from typing import Optional, Union
25
+ from typing import Literal, Optional, Union
26
26
  from urllib.parse import urlparse
27
27
 
28
+ import pydantic.v1
28
29
  import requests
29
30
  import semver
30
- from mlrun_pipelines.utils import compile_pipeline
31
+ from pydantic.v1 import parse_obj_as
31
32
 
32
33
  import mlrun
34
+ import mlrun.common.constants
33
35
  import mlrun.common.formatters
34
36
  import mlrun.common.runtimes
35
37
  import mlrun.common.schemas
38
+ import mlrun.common.schemas.model_monitoring.constants as mm_constants
39
+ import mlrun.common.schemas.model_monitoring.model_endpoints as mm_endpoints
36
40
  import mlrun.common.types
37
- import mlrun.model_monitoring.model_endpoint
38
41
  import mlrun.platforms
39
42
  import mlrun.projects
40
43
  import mlrun.runtimes.nuclio.api_gateway
@@ -43,8 +46,10 @@ import mlrun.utils
43
46
  from mlrun.alerts.alert import AlertConfig
44
47
  from mlrun.db.auth_utils import OAuthClientIDTokenProvider, StaticTokenProvider
45
48
  from mlrun.errors import MLRunInvalidArgumentError, err_to_str
49
+ from mlrun_pipelines.utils import compile_pipeline
46
50
 
47
51
  from ..artifacts import Artifact
52
+ from ..common.schemas import AlertActivations
48
53
  from ..config import config
49
54
  from ..datastore.datastore_profile import DatastoreProfile2Json
50
55
  from ..feature_store import FeatureSet, FeatureVector
@@ -169,7 +174,7 @@ class HTTPRunDB(RunDBInterface):
169
174
  return f"{cls}({self.base_url!r})"
170
175
 
171
176
  @staticmethod
172
- def get_api_path_prefix(version: str = None) -> str:
177
+ def get_api_path_prefix(version: Optional[str] = None) -> str:
173
178
  """
174
179
  :param version: API version to use, None (the default) will mean to use the default value from mlrun.config,
175
180
  for un-versioned api set an empty string.
@@ -182,7 +187,7 @@ class HTTPRunDB(RunDBInterface):
182
187
  )
183
188
  return api_version_path
184
189
 
185
- def get_base_api_url(self, path: str, version: str = None) -> str:
190
+ def get_base_api_url(self, path: str, version: Optional[str] = None) -> str:
186
191
  path_prefix = self.get_api_path_prefix(version)
187
192
  url = f"{self.base_url}/{path_prefix}/{path}"
188
193
  return url
@@ -310,9 +315,26 @@ class HTTPRunDB(RunDBInterface):
310
315
  headers=None,
311
316
  timeout=45,
312
317
  version=None,
318
+ return_all=False,
313
319
  ) -> typing.Generator[requests.Response, None, None]:
314
320
  """
315
- Calls the api with pagination, yielding each page of the response
321
+ Calls the API with pagination and yields each page of the response.
322
+
323
+ Depending on the `return_all` parameter:
324
+ - If `return_all` is `True`, fetches and yields all pages of results.
325
+ - If `return_all` is False, only a single page of results is fetched and yielded.
326
+
327
+ :param method: The HTTP method (GET, POST, etc.).
328
+ :param path: The API endpoint path.
329
+ :param error: Error message used for debugging if the request fails.
330
+ :param params: The parameters to pass for the API request, including filters.
331
+ :param body: The body of the request.
332
+ :param json: The JSON payload for the request.
333
+ :param headers: Custom headers for the request.
334
+ :param timeout: Timeout for the request.
335
+ :param version: API version, optional.
336
+ :param return_all: If `True`, fetches all pages and returns them in one shot. If `False`, returns only
337
+ the requested page or the next page.
316
338
  """
317
339
 
318
340
  def _api_call(_params):
@@ -328,38 +350,43 @@ class HTTPRunDB(RunDBInterface):
328
350
  version=version,
329
351
  )
330
352
 
331
- first_page_params = deepcopy(params) or {}
332
- first_page_params["page"] = 1
333
- first_page_params["page-size"] = config.httpdb.pagination.default_page_size
334
- response = _api_call(first_page_params)
335
- page_token = response.json().get("pagination", {}).get("page-token")
336
- if not page_token:
337
- yield response
338
- return
353
+ page_params = self._resolve_page_params(params)
354
+ response = _api_call(page_params)
339
355
 
340
- params_with_page_token = deepcopy(params) or {}
341
- params_with_page_token["page-token"] = page_token
342
- while page_token:
343
- yield response
344
- try:
345
- response = _api_call(params_with_page_token)
346
- except mlrun.errors.MLRunNotFoundError:
347
- # pagination token expired
348
- break
356
+ # yields a single page of results
357
+ yield response
349
358
 
359
+ if return_all:
350
360
  page_token = response.json().get("pagination", {}).get("page-token", None)
351
361
 
362
+ while page_token:
363
+ try:
364
+ # Use the page token to get the next page.
365
+ # No need to supply any other parameters as the token informs the pagination cache
366
+ # which parameters to use.
367
+ response = _api_call({"page-token": page_token})
368
+ except mlrun.errors.MLRunNotFoundError:
369
+ # pagination token expired, we've reached the last page
370
+ break
371
+
372
+ yield response
373
+ page_token = (
374
+ response.json().get("pagination", {}).get("page-token", None)
375
+ )
376
+
352
377
  @staticmethod
353
378
  def process_paginated_responses(
354
379
  responses: typing.Generator[requests.Response, None, None], key: str = "data"
355
- ) -> list[typing.Any]:
380
+ ) -> tuple[list[typing.Any], Optional[str]]:
356
381
  """
357
382
  Processes the paginated responses and returns the combined data
358
383
  """
359
384
  data = []
385
+ page_token = None
360
386
  for response in responses:
387
+ page_token = response.json().get("pagination", {}).get("page-token", None)
361
388
  data.extend(response.json().get(key, []))
362
- return data
389
+ return data, page_token
363
390
 
364
391
  def _init_session(self, retry_on_post: bool = False):
365
392
  return mlrun.utils.HTTPSessionWithRetry(
@@ -525,18 +552,6 @@ class HTTPRunDB(RunDBInterface):
525
552
  server_cfg.get("external_platform_tracking")
526
553
  or config.external_platform_tracking
527
554
  )
528
- config.model_endpoint_monitoring.endpoint_store_connection = (
529
- server_cfg.get("model_endpoint_monitoring_endpoint_store_connection")
530
- or config.model_endpoint_monitoring.endpoint_store_connection
531
- )
532
- config.model_endpoint_monitoring.tsdb_connection = (
533
- server_cfg.get("model_monitoring_tsdb_connection")
534
- or config.model_endpoint_monitoring.tsdb_connection
535
- )
536
- config.model_endpoint_monitoring.stream_connection = (
537
- server_cfg.get("stream_connection")
538
- or config.model_endpoint_monitoring.stream_connection
539
- )
540
555
  config.packagers = server_cfg.get("packagers") or config.packagers
541
556
  server_data_prefixes = server_cfg.get("feature_store_data_prefixes") or {}
542
557
  for prefix in ["default", "nosql", "redisnosql"]:
@@ -550,6 +565,18 @@ class HTTPRunDB(RunDBInterface):
550
565
  or config.feature_store.default_targets
551
566
  )
552
567
  config.alerts.mode = server_cfg.get("alerts_mode") or config.alerts.mode
568
+ config.system_id = server_cfg.get("system_id") or config.system_id
569
+ model_monitoring_store_prefixes = (
570
+ server_cfg.get("model_endpoint_monitoring_store_prefixes") or {}
571
+ )
572
+ for prefix in ["default", "user_space", "monitoring_application"]:
573
+ store_prefix_value = model_monitoring_store_prefixes.get(prefix)
574
+ if server_prefix_value is not None:
575
+ setattr(
576
+ config.model_endpoint_monitoring.store_prefixes,
577
+ prefix,
578
+ store_prefix_value,
579
+ )
553
580
 
554
581
  except Exception as exc:
555
582
  logger.warning(
@@ -726,6 +753,108 @@ class HTTPRunDB(RunDBInterface):
726
753
  )
727
754
  return None
728
755
 
756
+ def push_run_notifications(
757
+ self,
758
+ uid,
759
+ project="",
760
+ timeout=45,
761
+ ):
762
+ """
763
+ Push notifications for a run.
764
+
765
+ :param uid: Unique ID of the run.
766
+ :param project: Project that the run belongs to.
767
+ :returns: :py:class:`~mlrun.common.schemas.BackgroundTask`.
768
+ """
769
+ project = project or config.default_project
770
+ response = self.api_call(
771
+ "POST",
772
+ path=f"projects/{project}/runs/{uid}/push-notifications",
773
+ error="Failed push notifications",
774
+ timeout=timeout,
775
+ )
776
+ if response.status_code == http.HTTPStatus.ACCEPTED:
777
+ background_task = mlrun.common.schemas.BackgroundTask(**response.json())
778
+ background_task = self._wait_for_background_task_to_reach_terminal_state(
779
+ background_task.metadata.name, project=project
780
+ )
781
+ if (
782
+ background_task.status.state
783
+ == mlrun.common.schemas.BackgroundTaskState.succeeded
784
+ ):
785
+ logger.info(
786
+ "Notifications for the run have been pushed",
787
+ project=project,
788
+ run_id=uid,
789
+ )
790
+ elif (
791
+ background_task.status.state
792
+ == mlrun.common.schemas.BackgroundTaskState.failed
793
+ ):
794
+ logger.error(
795
+ "Failed to push run notifications",
796
+ project=project,
797
+ run_id=uid,
798
+ error=background_task.status.error,
799
+ )
800
+ return None
801
+
802
+ def push_pipeline_notifications(
803
+ self,
804
+ pipeline_id,
805
+ project="",
806
+ notifications=None,
807
+ timeout=45,
808
+ ):
809
+ """
810
+ Push notifications for a pipeline.
811
+
812
+ :param pipeline_id: Unique ID of the pipeline(KFP).
813
+ :param project: Project that the run belongs to.
814
+ :param notifications: List of notifications to push.
815
+ :returns: :py:class:`~mlrun.common.schemas.BackgroundTask`.
816
+ """
817
+ if notifications is None or type(notifications) is not list:
818
+ raise MLRunInvalidArgumentError(
819
+ "The 'notifications' parameter must be a list."
820
+ )
821
+
822
+ project = project or config.default_project
823
+
824
+ response = self.api_call(
825
+ "POST",
826
+ path=f"projects/{project}/pipelines/{pipeline_id}/push-notifications",
827
+ error="Failed push notifications",
828
+ body=_as_json([notification.to_dict() for notification in notifications]),
829
+ timeout=timeout,
830
+ )
831
+ if response.status_code == http.HTTPStatus.ACCEPTED:
832
+ background_task = mlrun.common.schemas.BackgroundTask(**response.json())
833
+ background_task = self._wait_for_background_task_to_reach_terminal_state(
834
+ background_task.metadata.name, project=project
835
+ )
836
+ if (
837
+ background_task.status.state
838
+ == mlrun.common.schemas.BackgroundTaskState.succeeded
839
+ ):
840
+ logger.info(
841
+ "Pipeline notifications have been pushed",
842
+ project=project,
843
+ pipeline_id=pipeline_id,
844
+ )
845
+ elif (
846
+ background_task.status.state
847
+ == mlrun.common.schemas.BackgroundTaskState.failed
848
+ ):
849
+ logger.error(
850
+ "Failed to push pipeline notifications",
851
+ project=project,
852
+ pipeline_id=pipeline_id,
853
+ error=background_task.status.error,
854
+ )
855
+
856
+ return None
857
+
729
858
  def read_run(
730
859
  self,
731
860
  uid,
@@ -768,18 +897,19 @@ class HTTPRunDB(RunDBInterface):
768
897
  name: Optional[str] = None,
769
898
  uid: Optional[Union[str, list[str]]] = None,
770
899
  project: Optional[str] = None,
771
- labels: Optional[Union[str, list[str]]] = None,
900
+ labels: Optional[Union[str, dict[str, Optional[str]], list[str]]] = None,
772
901
  state: Optional[
773
902
  mlrun.common.runtimes.constants.RunStates
774
903
  ] = None, # Backward compatibility
775
904
  states: typing.Optional[list[mlrun.common.runtimes.constants.RunStates]] = None,
776
905
  sort: bool = True,
777
- last: int = 0,
778
906
  iter: bool = False,
779
907
  start_time_from: Optional[datetime] = None,
780
908
  start_time_to: Optional[datetime] = None,
781
909
  last_update_time_from: Optional[datetime] = None,
782
910
  last_update_time_to: Optional[datetime] = None,
911
+ end_time_from: Optional[datetime] = None,
912
+ end_time_to: Optional[datetime] = None,
783
913
  partition_by: Optional[
784
914
  Union[mlrun.common.schemas.RunPartitionByField, str]
785
915
  ] = None,
@@ -792,8 +922,9 @@ class HTTPRunDB(RunDBInterface):
792
922
  with_notifications: bool = False,
793
923
  ) -> RunList:
794
924
  """
795
- Retrieve a list of runs, filtered by various options.
796
- If no filter is provided, will return runs from the last week.
925
+ Retrieve a list of runs.
926
+ The default returns the runs from the last week, partitioned by project/name.
927
+ To override the default, specify any filter.
797
928
 
798
929
  Example::
799
930
 
@@ -806,23 +937,28 @@ class HTTPRunDB(RunDBInterface):
806
937
 
807
938
  :param name: Name of the run to retrieve.
808
939
  :param uid: Unique ID of the run, or a list of run UIDs.
809
- :param project: Project that the runs belongs to.
810
- :param labels: A list of labels to filter by. Label filters work by either filtering a specific value
811
- of a label (i.e. list("key=value")) or by looking for the existence of a given
812
- key (i.e. "key").
813
- :param state: Deprecated - List only runs whose state is specified (will be removed in 1.9.0)
940
+ :param project: Project that the runs belongs to. If not specified, the default project will be used.
941
+ :param labels: Filter runs by label key-value pairs or key existence. This can be provided as:
942
+ - A dictionary in the format `{"label": "value"}` to match specific label key-value pairs,
943
+ or `{"label": None}` to check for key existence.
944
+ - A list of strings formatted as `"label=value"` to match specific label key-value pairs,
945
+ or just `"label"` for key existence.
946
+ - A comma-separated string formatted as `"label1=value1,label2"` to match entities with
947
+ the specified key-value pairs or key existence.
948
+ :param state: Deprecated - List only runs whose state is specified (will be removed in 1.10.0)
814
949
  :param states: List only runs whose state is one of the provided states.
815
950
  :param sort: Whether to sort the result according to their start time. Otherwise, results will be
816
951
  returned by their internal order in the DB (order will not be guaranteed).
817
- :param last: Deprecated - currently not used (will be removed in 1.8.0).
818
952
  :param iter: If ``True`` return runs from all iterations. Otherwise, return only runs whose ``iter`` is 0.
819
953
  :param start_time_from: Filter by run start time in ``[start_time_from, start_time_to]``.
820
954
  :param start_time_to: Filter by run start time in ``[start_time_from, start_time_to]``.
821
955
  :param last_update_time_from: Filter by run last update time in ``(last_update_time_from,
822
956
  last_update_time_to)``.
823
957
  :param last_update_time_to: Filter by run last update time in ``(last_update_time_from, last_update_time_to)``.
824
- :param partition_by: Field to group results by. Only allowed value is `name`. When `partition_by` is specified,
825
- the `partition_sort_by` parameter must be provided as well.
958
+ :param end_time_from: Filter by run end time in ``[end_time_from, end_time_to]``.
959
+ :param end_time_to: Filter by run end time in ``[end_time_from, end_time_to]``.
960
+ :param partition_by: Field to group results by. When `partition_by` is specified, the `partition_sort_by`
961
+ parameter must be provided as well.
826
962
  :param rows_per_partition: How many top rows (per sorting defined by `partition_sort_by` and `partition_order`)
827
963
  to return per group. Default value is 1.
828
964
  :param partition_sort_by: What field to sort the results by, within each partition defined by `partition_by`.
@@ -832,81 +968,97 @@ class HTTPRunDB(RunDBInterface):
832
968
  limit.
833
969
  :param with_notifications: Return runs with notifications, and join them to the response. Default is `False`.
834
970
  """
971
+ runs, _ = self._list_runs(
972
+ name=name,
973
+ uid=uid,
974
+ project=project,
975
+ labels=labels,
976
+ state=state,
977
+ states=states,
978
+ sort=sort,
979
+ iter=iter,
980
+ start_time_from=start_time_from,
981
+ start_time_to=start_time_to,
982
+ last_update_time_from=last_update_time_from,
983
+ last_update_time_to=last_update_time_to,
984
+ end_time_from=end_time_from,
985
+ end_time_to=end_time_to,
986
+ partition_by=partition_by,
987
+ rows_per_partition=rows_per_partition,
988
+ partition_sort_by=partition_sort_by,
989
+ partition_order=partition_order,
990
+ max_partitions=max_partitions,
991
+ with_notifications=with_notifications,
992
+ return_all=True,
993
+ )
994
+ return runs
835
995
 
836
- project = project or config.default_project
837
- if with_notifications:
838
- logger.warning(
839
- "Local run notifications are not persisted in the DB, therefore local runs will not be returned when "
840
- "using the `with_notifications` flag."
841
- )
842
-
843
- if last:
844
- # TODO: Remove this in 1.8.0
845
- warnings.warn(
846
- "'last' is deprecated and will be removed in 1.8.0.",
847
- FutureWarning,
848
- )
996
+ def paginated_list_runs(
997
+ self,
998
+ *args,
999
+ page: Optional[int] = None,
1000
+ page_size: Optional[int] = None,
1001
+ page_token: Optional[str] = None,
1002
+ **kwargs,
1003
+ ) -> tuple[RunList, Optional[str]]:
1004
+ """List runs with support for pagination and various filtering options.
849
1005
 
850
- if state:
851
- # TODO: Remove this in 1.9.0
852
- warnings.warn(
853
- "'state' is deprecated and will be removed in 1.9.0. Use 'states' instead.",
854
- FutureWarning,
855
- )
1006
+ This method retrieves a paginated list of runs based on the specified filter parameters.
1007
+ Pagination is controlled using the `page`, `page_size`, and `page_token` parameters. The method
1008
+ will return a list of runs that match the filtering criteria provided.
856
1009
 
857
- if (
858
- not name
859
- and not uid
860
- and not labels
861
- and not state
862
- and not states
863
- and not last
864
- and not start_time_from
865
- and not start_time_to
866
- and not last_update_time_from
867
- and not last_update_time_to
868
- and not partition_by
869
- and not partition_sort_by
870
- and not iter
871
- ):
872
- # default to last week on no filter
873
- start_time_from = datetime.now() - timedelta(days=7)
874
- partition_by = mlrun.common.schemas.RunPartitionByField.project_and_name
875
- partition_sort_by = mlrun.common.schemas.SortField.updated
1010
+ For detailed information about the parameters, refer to the list_runs method:
1011
+ See :py:func:`~list_runs` for more details.
876
1012
 
877
- params = {
878
- "name": name,
879
- "uid": uid,
880
- "label": labels or [],
881
- "state": mlrun.utils.helpers.as_list(state)
882
- if state is not None
883
- else states or None,
884
- "sort": bool2str(sort),
885
- "iter": bool2str(iter),
886
- "start_time_from": datetime_to_iso(start_time_from),
887
- "start_time_to": datetime_to_iso(start_time_to),
888
- "last_update_time_from": datetime_to_iso(last_update_time_from),
889
- "last_update_time_to": datetime_to_iso(last_update_time_to),
890
- "with-notifications": with_notifications,
891
- }
1013
+ Examples::
892
1014
 
893
- if partition_by:
894
- params.update(
895
- self._generate_partition_by_params(
896
- mlrun.common.schemas.RunPartitionByField,
897
- partition_by,
898
- rows_per_partition,
899
- partition_sort_by,
900
- partition_order,
901
- max_partitions,
1015
+ # Fetch first page of runs with page size of 5
1016
+ runs, token = db.paginated_list_runs(project="my-project", page_size=5)
1017
+ # Fetch next page using the pagination token from the previous response
1018
+ runs, token = db.paginated_list_runs(project="my-project", page_token=token)
1019
+ # Fetch runs for a specific page (e.g., page 3)
1020
+ runs, token = db.paginated_list_runs(project="my-project", page=3, page_size=5)
1021
+
1022
+ # Automatically iterate over all pages without explicitly specifying the page number
1023
+ runs = []
1024
+ token = None
1025
+ while True:
1026
+ page_runs, token = db.paginated_list_runs(
1027
+ project="my-project", page_token=token, page_size=5
902
1028
  )
903
- )
904
- error = "list runs"
905
- _path = self._path_of("runs", project)
906
- responses = self.paginated_api_call("GET", _path, error, params=params)
907
- return RunList(self.process_paginated_responses(responses, "runs"))
1029
+ runs.extend(page_runs)
1030
+
1031
+ # If token is None and page_runs is empty, we've reached the end (no more runs).
1032
+ # If token is None and page_runs is not empty, we've fetched the last page of runs.
1033
+ if not token:
1034
+ break
1035
+ print(f"Total runs retrieved: {len(runs)}")
1036
+
1037
+ :param page: The page number to retrieve. If not provided, the next page will be retrieved.
1038
+ :param page_size: The number of items per page to retrieve. Up to `page_size` responses are expected.
1039
+ Defaults to `mlrun.mlconf.httpdb.pagination.default_page_size` if not provided.
1040
+ :param page_token: A pagination token used to retrieve the next page of results. Should not be provided
1041
+ for the first request.
1042
+
1043
+ :returns: A tuple containing the list of runs and an optional `page_token` for pagination.
1044
+ """
1045
+ return self._list_runs(
1046
+ *args,
1047
+ page=page,
1048
+ page_size=page_size,
1049
+ page_token=page_token,
1050
+ return_all=False,
1051
+ **kwargs,
1052
+ )
908
1053
 
909
- def del_runs(self, name=None, project=None, labels=None, state=None, days_ago=0):
1054
+ def del_runs(
1055
+ self,
1056
+ name: Optional[str] = None,
1057
+ project: Optional[str] = None,
1058
+ labels: Optional[Union[str, dict[str, Optional[str]], list[str]]] = None,
1059
+ state: Optional[mlrun.common.runtimes.constants.RunStates] = None,
1060
+ days_ago: int = 0,
1061
+ ):
910
1062
  """Delete a group of runs identified by the parameters of the function.
911
1063
 
912
1064
  Example::
@@ -915,16 +1067,23 @@ class HTTPRunDB(RunDBInterface):
915
1067
 
916
1068
  :param name: Name of the task which the runs belong to.
917
1069
  :param project: Project to which the runs belong.
918
- :param labels: Filter runs that are labeled using these specific label values.
1070
+ :param labels: Filter runs by label key-value pairs or key existence. This can be provided as:
1071
+ - A dictionary in the format `{"label": "value"}` to match specific label key-value pairs,
1072
+ or `{"label": None}` to check for key existence.
1073
+ - A list of strings formatted as `"label=value"` to match specific label key-value pairs,
1074
+ or just `"label"` for key existence.
1075
+ - A comma-separated string formatted as `"label1=value1,label2"` to match entities with
1076
+ the specified key-value pairs or key existence.
919
1077
  :param state: Filter only runs which are in this state.
920
1078
  :param days_ago: Filter runs whose start time is newer than this parameter.
921
1079
  """
922
1080
 
923
1081
  project = project or config.default_project
1082
+ labels = self._parse_labels(labels)
924
1083
  params = {
925
1084
  "name": name,
926
1085
  "project": project,
927
- "label": labels or [],
1086
+ "label": labels,
928
1087
  "state": state,
929
1088
  "days_ago": str(days_ago),
930
1089
  }
@@ -936,34 +1095,22 @@ class HTTPRunDB(RunDBInterface):
936
1095
  self,
937
1096
  key,
938
1097
  artifact,
939
- # TODO: deprecated, remove in 1.8.0
940
- uid=None,
941
1098
  iter=None,
942
1099
  tag=None,
943
1100
  project="",
944
1101
  tree=None,
945
- ):
1102
+ ) -> dict[str, str]:
946
1103
  """Store an artifact in the DB.
947
1104
 
948
1105
  :param key: Identifying key of the artifact.
949
1106
  :param artifact: The :py:class:`~mlrun.artifacts.Artifact` to store.
950
- :param uid: A unique ID for this specific version of the artifact
951
- (deprecated, artifact uid is generated in the backend use `tree` instead)
952
1107
  :param iter: The task iteration which generated this artifact. If ``iter`` is not ``None`` the iteration will
953
1108
  be added to the key provided to generate a unique key for the artifact of the specific iteration.
954
1109
  :param tag: Tag of the artifact.
955
1110
  :param project: Project that the artifact belongs to.
956
1111
  :param tree: The tree (producer id) which generated this artifact.
1112
+ :returns: The stored artifact dictionary.
957
1113
  """
958
- if uid:
959
- warnings.warn(
960
- "'uid' is deprecated in 1.6.0 and will be removed in 1.8.0, use 'tree' instead.",
961
- # TODO: Remove this in 1.8.0
962
- FutureWarning,
963
- )
964
-
965
- # we do this because previously the 'uid' name was used for the 'tree' parameter
966
- tree = tree or uid
967
1114
  project = project or mlrun.mlconf.default_project
968
1115
  endpoint_path = f"projects/{project}/artifacts/{key}"
969
1116
 
@@ -978,9 +1125,10 @@ class HTTPRunDB(RunDBInterface):
978
1125
  params["tree"] = tree
979
1126
 
980
1127
  body = _as_json(artifact)
981
- self.api_call(
1128
+ response = self.api_call(
982
1129
  "PUT", endpoint_path, error, body=body, params=params, version="v2"
983
1130
  )
1131
+ return response.json()
984
1132
 
985
1133
  def read_artifact(
986
1134
  self,
@@ -1028,7 +1176,7 @@ class HTTPRunDB(RunDBInterface):
1028
1176
  deletion_strategy: mlrun.common.schemas.artifact.ArtifactsDeletionStrategies = (
1029
1177
  mlrun.common.schemas.artifact.ArtifactsDeletionStrategies.metadata_only
1030
1178
  ),
1031
- secrets: dict = None,
1179
+ secrets: Optional[dict] = None,
1032
1180
  iter=None,
1033
1181
  ):
1034
1182
  """Delete an artifact.
@@ -1063,29 +1211,39 @@ class HTTPRunDB(RunDBInterface):
1063
1211
 
1064
1212
  def list_artifacts(
1065
1213
  self,
1066
- name=None,
1067
- project=None,
1068
- tag=None,
1069
- labels: Optional[Union[dict[str, str], list[str]]] = None,
1214
+ name: Optional[str] = None,
1215
+ project: Optional[str] = None,
1216
+ tag: Optional[str] = None,
1217
+ labels: Optional[Union[str, dict[str, Optional[str]], list[str]]] = None,
1070
1218
  since: Optional[datetime] = None,
1071
1219
  until: Optional[datetime] = None,
1072
- iter: int = None,
1220
+ iter: Optional[int] = None,
1073
1221
  best_iteration: bool = False,
1074
- kind: str = None,
1222
+ kind: Optional[str] = None,
1075
1223
  category: Union[str, mlrun.common.schemas.ArtifactCategories] = None,
1076
- tree: str = None,
1077
- producer_uri: str = None,
1224
+ tree: Optional[str] = None,
1225
+ producer_uri: Optional[str] = None,
1078
1226
  format_: Optional[
1079
1227
  mlrun.common.formatters.ArtifactFormat
1080
1228
  ] = mlrun.common.formatters.ArtifactFormat.full,
1081
- limit: int = None,
1229
+ limit: Optional[int] = None,
1230
+ partition_by: Optional[
1231
+ Union[mlrun.common.schemas.ArtifactPartitionByField, str]
1232
+ ] = None,
1233
+ rows_per_partition: int = 1,
1234
+ partition_sort_by: Optional[
1235
+ Union[mlrun.common.schemas.SortField, str]
1236
+ ] = mlrun.common.schemas.SortField.updated,
1237
+ partition_order: Union[
1238
+ mlrun.common.schemas.OrderType, str
1239
+ ] = mlrun.common.schemas.OrderType.desc,
1082
1240
  ) -> ArtifactList:
1083
1241
  """List artifacts filtered by various parameters.
1084
1242
 
1085
1243
  Examples::
1086
1244
 
1087
1245
  # Show latest version of all artifacts in project
1088
- latest_artifacts = db.list_artifacts("", tag="latest", project="iris")
1246
+ latest_artifacts = db.list_artifacts(tag="latest", project="iris")
1089
1247
  # check different artifact versions for a specific artifact
1090
1248
  result_versions = db.list_artifacts("results", tag="*", project="iris")
1091
1249
  # Show artifacts with label filters - both uploaded and of binary type
@@ -1098,8 +1256,13 @@ class HTTPRunDB(RunDBInterface):
1098
1256
  ``my_Name_1`` or ``surname``.
1099
1257
  :param project: Project name.
1100
1258
  :param tag: Return artifacts assigned this tag.
1101
- :param labels: Return artifacts that have these labels. Labels can either be a dictionary {"label": "value"} or
1102
- a list of "label=value" (match label key and value) or "label" (match just label key) strings.
1259
+ :param labels: Filter artifacts by label key-value pairs or key existence. This can be provided as:
1260
+ - A dictionary in the format `{"label": "value"}` to match specific label key-value pairs,
1261
+ or `{"label": None}` to check for key existence.
1262
+ - A list of strings formatted as `"label=value"` to match specific label key-value pairs,
1263
+ or just `"label"` for key existence.
1264
+ - A comma-separated string formatted as `"label1=value1,label2"` to match entities with
1265
+ the specified key-value pairs or key existence.
1103
1266
  :param since: Return artifacts updated after this date (as datetime object).
1104
1267
  :param until: Return artifacts updated before this date (as datetime object).
1105
1268
  :param iter: Return artifacts from a specific iteration (where ``iter=0`` means the root iteration). If
@@ -1113,40 +1276,113 @@ class HTTPRunDB(RunDBInterface):
1113
1276
  :param producer_uri: Return artifacts produced by the requested producer URI. Producer URI usually
1114
1277
  points to a run and is used to filter artifacts by the run that produced them when the artifact producer id
1115
1278
  is a workflow id (artifact was created as part of a workflow).
1116
- :param format_: The format in which to return the artifacts. Default is 'full'.
1117
- :param limit: Maximum number of artifacts to return.
1279
+ :param format_: The format in which to return the artifacts. Default is 'full'.
1280
+ :param limit: Deprecated - Maximum number of artifacts to return (will be removed in 1.11.0).
1281
+ :param partition_by: Field to group results by. When `partition_by` is specified, the `partition_sort_by`
1282
+ parameter must be provided as well.
1283
+ :param rows_per_partition: How many top rows (per sorting defined by `partition_sort_by` and `partition_order`)
1284
+ to return per group. Default value is 1.
1285
+ :param partition_sort_by: What field to sort the results by, within each partition defined by `partition_by`.
1286
+ Currently, the only allowed values are `created` and `updated`.
1287
+ :param partition_order: Order of sorting within partitions - `asc` or `desc`. Default is `desc`.
1118
1288
  """
1119
1289
 
1120
- project = project or config.default_project
1290
+ artifacts, _ = self._list_artifacts(
1291
+ name=name,
1292
+ project=project,
1293
+ tag=tag,
1294
+ labels=labels,
1295
+ since=since,
1296
+ until=until,
1297
+ iter=iter,
1298
+ best_iteration=best_iteration,
1299
+ kind=kind,
1300
+ category=category,
1301
+ tree=tree,
1302
+ producer_uri=producer_uri,
1303
+ format_=format_,
1304
+ limit=limit,
1305
+ partition_by=partition_by,
1306
+ rows_per_partition=rows_per_partition,
1307
+ partition_sort_by=partition_sort_by,
1308
+ partition_order=partition_order,
1309
+ return_all=not limit,
1310
+ )
1311
+ return artifacts
1121
1312
 
1122
- labels = labels or []
1123
- if isinstance(labels, dict):
1124
- labels = [f"{key}={value}" for key, value in labels.items()]
1313
+ def paginated_list_artifacts(
1314
+ self,
1315
+ *args,
1316
+ page: Optional[int] = None,
1317
+ page_size: Optional[int] = None,
1318
+ page_token: Optional[str] = None,
1319
+ **kwargs,
1320
+ ) -> tuple[ArtifactList, Optional[str]]:
1321
+ """List artifacts with support for pagination and various filtering options.
1125
1322
 
1126
- params = {
1127
- "name": name,
1128
- "tag": tag,
1129
- "label": labels,
1130
- "iter": iter,
1131
- "best-iteration": best_iteration,
1132
- "kind": kind,
1133
- "category": category,
1134
- "tree": tree,
1135
- "format": format_,
1136
- "producer_uri": producer_uri,
1137
- "limit": limit,
1138
- "since": datetime_to_iso(since),
1139
- "until": datetime_to_iso(until),
1140
- }
1141
- error = "list artifacts"
1142
- endpoint_path = f"projects/{project}/artifacts"
1143
- resp = self.api_call("GET", endpoint_path, error, params=params, version="v2")
1144
- values = ArtifactList(resp.json()["artifacts"])
1145
- values.tag = tag
1146
- return values
1323
+ This method retrieves a paginated list of artifacts based on the specified filter parameters.
1324
+ Pagination is controlled using the `page`, `page_size`, and `page_token` parameters. The method
1325
+ will return a list of artifacts that match the filtering criteria provided.
1326
+
1327
+ For detailed information about the parameters, refer to the list_artifacts method:
1328
+ See :py:func:`~list_artifacts` for more details.
1329
+
1330
+ Examples::
1331
+
1332
+ # Fetch first page of artifacts with page size of 5
1333
+ artifacts, token = db.paginated_list_artifacts(
1334
+ project="my-project", page_size=5
1335
+ )
1336
+ # Fetch next page using the pagination token from the previous response
1337
+ artifacts, token = db.paginated_list_artifacts(
1338
+ project="my-project", page_token=token
1339
+ )
1340
+ # Fetch artifacts for a specific page (e.g., page 3)
1341
+ artifacts, token = db.paginated_list_artifacts(
1342
+ project="my-project", page=3, page_size=5
1343
+ )
1344
+
1345
+ # Automatically iterate over all pages without explicitly specifying the page number
1346
+ artifacts = []
1347
+ token = None
1348
+ while True:
1349
+ page_artifacts, token = db.paginated_list_artifacts(
1350
+ project="my-project", page_token=token, page_size=5
1351
+ )
1352
+ artifacts.extend(page_artifacts)
1353
+
1354
+ # If token is None and page_artifacts is empty, we've reached the end (no more artifacts).
1355
+ # If token is None and page_artifacts is not empty, we've fetched the last page of artifacts.
1356
+ if not token:
1357
+ break
1358
+ print(f"Total artifacts retrieved: {len(artifacts)}")
1359
+
1360
+ :param page: The page number to retrieve. If not provided, the next page will be retrieved.
1361
+ :param page_size: The number of items per page to retrieve. Up to `page_size` responses are expected.
1362
+ Defaults to `mlrun.mlconf.httpdb.pagination.default_page_size` if not provided.
1363
+ :param page_token: A pagination token used to retrieve the next page of results. Should not be provided
1364
+ for the first request.
1365
+
1366
+ :returns: A tuple containing the list of artifacts and an optional `page_token` for pagination.
1367
+ """
1368
+
1369
+ return self._list_artifacts(
1370
+ *args,
1371
+ page=page,
1372
+ page_size=page_size,
1373
+ page_token=page_token,
1374
+ return_all=False,
1375
+ **kwargs,
1376
+ )
1147
1377
 
1148
1378
  def del_artifacts(
1149
- self, name=None, project=None, tag=None, labels=None, days_ago=0, tree=None
1379
+ self,
1380
+ name: Optional[str] = None,
1381
+ project: Optional[str] = None,
1382
+ tag: Optional[str] = None,
1383
+ labels: Optional[Union[str, dict[str, Optional[str]], list[str]]] = None,
1384
+ days_ago=0,
1385
+ tree: Optional[str] = None,
1150
1386
  ):
1151
1387
  """Delete artifacts referenced by the parameters.
1152
1388
 
@@ -1154,15 +1390,24 @@ class HTTPRunDB(RunDBInterface):
1154
1390
  :py:func:`~list_artifacts` for more details.
1155
1391
  :param project: Project that artifacts belong to.
1156
1392
  :param tag: Choose artifacts who are assigned this tag.
1157
- :param labels: Choose artifacts which are labeled.
1393
+ :param labels: Filter artifacts by label key-value pairs or key existence. This can be provided as:
1394
+ - A dictionary in the format `{"label": "value"}` to match specific label key-value pairs,
1395
+ or `{"label": None}` to check for key existence.
1396
+ - A list of strings formatted as `"label=value"` to match specific label key-value pairs,
1397
+ or just `"label"` for key existence.
1398
+ - A comma-separated string formatted as `"label1=value1,label2"` to match entities with
1399
+ the specified key-value pairs or key existence.
1158
1400
  :param days_ago: This parameter is deprecated and not used.
1401
+ :param tree: Delete artifacts filtered by tree.
1159
1402
  """
1160
1403
  project = project or config.default_project
1404
+ labels = self._parse_labels(labels)
1405
+
1161
1406
  params = {
1162
1407
  "name": name,
1163
1408
  "tag": tag,
1164
1409
  "tree": tree,
1165
- "label": labels or [],
1410
+ "label": labels,
1166
1411
  "days_ago": str(days_ago),
1167
1412
  }
1168
1413
  error = "del artifacts"
@@ -1254,41 +1499,125 @@ class HTTPRunDB(RunDBInterface):
1254
1499
  )
1255
1500
 
1256
1501
  def list_functions(
1257
- self, name=None, project=None, tag=None, labels=None, since=None, until=None
1502
+ self,
1503
+ name: Optional[str] = None,
1504
+ project: Optional[str] = None,
1505
+ tag: Optional[str] = None,
1506
+ labels: Optional[Union[str, dict[str, Optional[str]], list[str]]] = None,
1507
+ since: Optional[datetime] = None,
1508
+ until: Optional[datetime] = None,
1509
+ kind: Optional[str] = None,
1510
+ format_: mlrun.common.formatters.FunctionFormat = mlrun.common.formatters.FunctionFormat.full,
1511
+ states: typing.Optional[list[mlrun.common.schemas.FunctionState]] = None,
1258
1512
  ):
1259
1513
  """Retrieve a list of functions, filtered by specific criteria.
1260
1514
 
1261
1515
  :param name: Return only functions with a specific name.
1262
1516
  :param project: Return functions belonging to this project. If not specified, the default project is used.
1263
1517
  :param tag: Return function versions with specific tags. To return only tagged functions, set tag to ``"*"``.
1264
- :param labels: Return functions that have specific labels assigned to them.
1518
+ :param labels: Filter functions by label key-value pairs or key existence. This can be provided as:
1519
+ - A dictionary in the format `{"label": "value"}` to match specific label key-value pairs,
1520
+ or `{"label": None}` to check for key existence.
1521
+ - A list of strings formatted as `"label=value"` to match specific label key-value pairs,
1522
+ or just `"label"` for key existence.
1523
+ - A comma-separated string formatted as `"label1=value1,label2"` to match entities with
1524
+ the specified key-value pairs or key existence.
1265
1525
  :param since: Return functions updated after this date (as datetime object).
1266
1526
  :param until: Return functions updated before this date (as datetime object).
1527
+ :param kind: Return only functions of a specific kind.
1528
+ :param format_: The format in which to return the functions. Default is 'full'.
1529
+ :param states: Return only functions whose state is one of the provided states.
1267
1530
  :returns: List of function objects (as dictionary).
1268
1531
  """
1269
- project = project or config.default_project
1270
- params = {
1271
- "name": name,
1272
- "tag": tag,
1273
- "label": labels or [],
1274
- "since": datetime_to_iso(since),
1275
- "until": datetime_to_iso(until),
1276
- }
1277
- error = "list functions"
1278
- path = f"projects/{project}/functions"
1279
- responses = self.paginated_api_call("GET", path, error, params=params)
1280
- return self.process_paginated_responses(responses, "funcs")
1532
+ functions, _ = self._list_functions(
1533
+ name=name,
1534
+ project=project,
1535
+ tag=tag,
1536
+ kind=kind,
1537
+ labels=labels,
1538
+ format_=format_,
1539
+ since=since,
1540
+ until=until,
1541
+ states=states,
1542
+ return_all=True,
1543
+ )
1544
+ return functions
1281
1545
 
1282
- def list_runtime_resources(
1546
+ def paginated_list_functions(
1283
1547
  self,
1284
- project: Optional[str] = None,
1285
- label_selector: Optional[str] = None,
1286
- kind: Optional[str] = None,
1287
- object_id: Optional[str] = None,
1288
- group_by: Optional[
1289
- mlrun.common.schemas.ListRuntimeResourcesGroupByField
1290
- ] = None,
1291
- ) -> Union[
1548
+ *args,
1549
+ page: Optional[int] = None,
1550
+ page_size: Optional[int] = None,
1551
+ page_token: Optional[str] = None,
1552
+ **kwargs,
1553
+ ) -> tuple[list[dict], Optional[str]]:
1554
+ """List functions with support for pagination and various filtering options.
1555
+
1556
+ This method retrieves a paginated list of functions based on the specified filter parameters.
1557
+ Pagination is controlled using the `page`, `page_size`, and `page_token` parameters. The method
1558
+ will return a list of functions that match the filtering criteria provided.
1559
+
1560
+ For detailed information about the parameters, refer to the list_functions method:
1561
+ See :py:func:`~list_functions` for more details.
1562
+
1563
+ Examples::
1564
+
1565
+ # Fetch first page of functions with page size of 5
1566
+ functions, token = db.paginated_list_functions(
1567
+ project="my-project", page_size=5
1568
+ )
1569
+ # Fetch next page using the pagination token from the previous response
1570
+ functions, token = db.paginated_list_functions(
1571
+ project="my-project", page_token=token
1572
+ )
1573
+ # Fetch functions for a specific page (e.g., page 3)
1574
+ functions, token = db.paginated_list_functions(
1575
+ project="my-project", page=3, page_size=5
1576
+ )
1577
+
1578
+ # Automatically iterate over all pages without explicitly specifying the page number
1579
+ functions = []
1580
+ token = None
1581
+ while True:
1582
+ page_functions, token = db.paginated_list_functions(
1583
+ project="my-project", page_token=token, page_size=5
1584
+ )
1585
+ functions.extend(page_functions)
1586
+
1587
+ # If token is None and page_functions is empty, we've reached the end (no more functions).
1588
+ # If token is None and page_functions is not empty, we've fetched the last page of functions.
1589
+ if not token:
1590
+ break
1591
+ print(f"Total functions retrieved: {len(functions)}")
1592
+
1593
+ :param page: The page number to retrieve. If not provided, the next page will be retrieved.
1594
+ :param page_size: The number of items per page to retrieve. Up to `page_size` responses are expected.
1595
+ Defaults to `mlrun.mlconf.httpdb.pagination.default_page_size` if not provided.
1596
+ :param page_token: A pagination token used to retrieve the next page of results. Should not be provided
1597
+ for the first request.
1598
+
1599
+ :returns: A tuple containing the list of functions objects (as dictionary) and an optional
1600
+ `page_token` for pagination.
1601
+ """
1602
+ return self._list_functions(
1603
+ *args,
1604
+ page=page,
1605
+ page_size=page_size,
1606
+ page_token=page_token,
1607
+ return_all=False,
1608
+ **kwargs,
1609
+ )
1610
+
1611
+ def list_runtime_resources(
1612
+ self,
1613
+ project: Optional[str] = None,
1614
+ label_selector: Optional[str] = None,
1615
+ kind: Optional[str] = None,
1616
+ object_id: Optional[str] = None,
1617
+ group_by: Optional[
1618
+ mlrun.common.schemas.ListRuntimeResourcesGroupByField
1619
+ ] = None,
1620
+ ) -> Union[
1292
1621
  mlrun.common.schemas.RuntimeResourcesOutput,
1293
1622
  mlrun.common.schemas.GroupedByJobRuntimeResourcesOutput,
1294
1623
  mlrun.common.schemas.GroupedByProjectRuntimeResourcesOutput,
@@ -1352,7 +1681,7 @@ class HTTPRunDB(RunDBInterface):
1352
1681
  kind: Optional[str] = None,
1353
1682
  object_id: Optional[str] = None,
1354
1683
  force: bool = False,
1355
- grace_period: int = None,
1684
+ grace_period: Optional[int] = None,
1356
1685
  ) -> mlrun.common.schemas.GroupedByProjectRuntimeResourcesOutput:
1357
1686
  """Delete all runtime resources which are in terminal state.
1358
1687
 
@@ -1399,36 +1728,10 @@ class HTTPRunDB(RunDBInterface):
1399
1728
  def create_schedule(
1400
1729
  self, project: str, schedule: mlrun.common.schemas.ScheduleInput
1401
1730
  ):
1402
- """Create a new schedule on the given project. The details on the actual object to schedule as well as the
1403
- schedule itself are within the schedule object provided.
1404
- The :py:class:`~ScheduleCronTrigger` follows the guidelines in
1405
- https://apscheduler.readthedocs.io/en/3.x/modules/triggers/cron.html.
1406
- It also supports a :py:func:`~ScheduleCronTrigger.from_crontab` function that accepts a
1407
- crontab-formatted string (see https://en.wikipedia.org/wiki/Cron for more information on the format and
1408
- note that the 0 weekday is always monday).
1409
-
1410
-
1411
- Example::
1412
-
1413
- from mlrun.common import schemas
1414
-
1415
- # Execute the get_data_func function every Tuesday at 15:30
1416
- schedule = schemas.ScheduleInput(
1417
- name="run_func_on_tuesdays",
1418
- kind="job",
1419
- scheduled_object=get_data_func,
1420
- cron_trigger=schemas.ScheduleCronTrigger(
1421
- day_of_week="tue", hour=15, minute=30
1422
- ),
1423
- )
1424
- db.create_schedule(project_name, schedule)
1425
- """
1426
-
1427
- project = project or config.default_project
1428
- path = f"projects/{project}/schedules"
1429
-
1430
- error_message = f"Failed creating schedule {project}/{schedule.name}"
1431
- self.api_call("POST", path, error_message, body=dict_to_json(schedule.dict()))
1731
+ """The create_schedule functionality has been deprecated."""
1732
+ raise mlrun.errors.MLRunBadRequestError(
1733
+ "The create_schedule functionality has been deprecated."
1734
+ )
1432
1735
 
1433
1736
  def update_schedule(
1434
1737
  self, project: str, name: str, schedule: mlrun.common.schemas.ScheduleUpdate
@@ -1464,9 +1767,11 @@ class HTTPRunDB(RunDBInterface):
1464
1767
  def list_schedules(
1465
1768
  self,
1466
1769
  project: str,
1467
- name: str = None,
1770
+ name: Optional[str] = None,
1468
1771
  kind: mlrun.common.schemas.ScheduleKinds = None,
1469
1772
  include_last_run: bool = False,
1773
+ next_run_time_since: Optional[datetime] = None,
1774
+ next_run_time_until: Optional[datetime] = None,
1470
1775
  ) -> mlrun.common.schemas.SchedulesOutput:
1471
1776
  """Retrieve list of schedules of specific name or kind.
1472
1777
 
@@ -1475,10 +1780,18 @@ class HTTPRunDB(RunDBInterface):
1475
1780
  :param kind: Kind of schedule objects to retrieve, can be either ``job`` or ``pipeline``.
1476
1781
  :param include_last_run: Whether to return for each schedule returned also the results of the last run of
1477
1782
  that schedule.
1783
+ :param next_run_time_since: Return only schedules with next run time after this date.
1784
+ :param next_run_time_until: Return only schedules with next run time before this date.
1478
1785
  """
1479
1786
 
1480
1787
  project = project or config.default_project
1481
- params = {"kind": kind, "name": name, "include_last_run": include_last_run}
1788
+ params = {
1789
+ "kind": kind,
1790
+ "name": name,
1791
+ "include_last_run": include_last_run,
1792
+ "next_run_time_since": datetime_to_iso(next_run_time_since),
1793
+ "next_run_time_until": datetime_to_iso(next_run_time_until),
1794
+ }
1482
1795
  path = f"projects/{project}/schedules"
1483
1796
  error_message = f"Failed listing schedules for {project} ? {kind} {name}"
1484
1797
  resp = self.api_call("GET", path, error_message, params=params)
@@ -1636,6 +1949,7 @@ class HTTPRunDB(RunDBInterface):
1636
1949
  logs: bool = True,
1637
1950
  last_log_timestamp: float = 0.0,
1638
1951
  verbose: bool = False,
1952
+ events_offset: int = 0,
1639
1953
  ):
1640
1954
  """Retrieve the status of a build operation currently in progress.
1641
1955
 
@@ -1645,6 +1959,7 @@ class HTTPRunDB(RunDBInterface):
1645
1959
  :param last_log_timestamp: Last timestamp of logs that were already retrieved. Function will return only logs
1646
1960
  later than this parameter.
1647
1961
  :param verbose: Add verbose logs into the output.
1962
+ :param events_offset: Offset into the build events to retrieve events from.
1648
1963
 
1649
1964
  :returns: The following parameters:
1650
1965
 
@@ -1661,6 +1976,7 @@ class HTTPRunDB(RunDBInterface):
1661
1976
  "tag": func.metadata.tag,
1662
1977
  "logs": bool2str(logs),
1663
1978
  "offset": str(offset),
1979
+ "events_offset": str(events_offset),
1664
1980
  "last_log_timestamp": str(last_log_timestamp),
1665
1981
  "verbose": bool2str(verbose),
1666
1982
  }
@@ -1673,6 +1989,7 @@ class HTTPRunDB(RunDBInterface):
1673
1989
  logger.warning(f"failed resp, {resp.text}")
1674
1990
  raise RunDBError("bad function build response")
1675
1991
 
1992
+ deploy_status_text_kind = mlrun.common.constants.DeployStatusTextKind.logs
1676
1993
  if resp.headers:
1677
1994
  func.status.state = resp.headers.get("x-mlrun-function-status", "")
1678
1995
  last_log_timestamp = float(
@@ -1691,13 +2008,20 @@ class HTTPRunDB(RunDBInterface):
1691
2008
  if function_image:
1692
2009
  func.spec.image = function_image
1693
2010
 
2011
+ deploy_status_text_kind = resp.headers.get(
2012
+ "deploy_status_text_kind",
2013
+ mlrun.common.constants.DeployStatusTextKind.logs,
2014
+ )
2015
+
1694
2016
  text = ""
1695
2017
  if resp.content:
1696
2018
  text = resp.content.decode()
1697
- return text, last_log_timestamp
2019
+ return text, last_log_timestamp, deploy_status_text_kind
1698
2020
 
1699
2021
  def start_function(
1700
- self, func_url: str = None, function: "mlrun.runtimes.BaseRuntime" = None
2022
+ self,
2023
+ func_url: Optional[str] = None,
2024
+ function: "mlrun.runtimes.BaseRuntime" = None,
1701
2025
  ) -> mlrun.common.schemas.BackgroundTask:
1702
2026
  """Execute a function remotely, Used for ``dask`` functions.
1703
2027
 
@@ -1897,18 +2221,20 @@ class HTTPRunDB(RunDBInterface):
1897
2221
  elif pipe_file.endswith(".zip"):
1898
2222
  headers = {"content-type": "application/zip"}
1899
2223
  else:
1900
- raise ValueError("pipeline file must be .yaml or .zip")
2224
+ raise ValueError("'pipeline' file must be .yaml or .zip")
1901
2225
  if arguments:
1902
2226
  if not isinstance(arguments, dict):
1903
- raise ValueError("arguments must be dict type")
2227
+ raise ValueError("'arguments' must be dict type")
1904
2228
  headers[mlrun.common.schemas.HeaderNames.pipeline_arguments] = str(
1905
2229
  arguments
1906
2230
  )
1907
2231
 
1908
2232
  if not path.isfile(pipe_file):
1909
- raise OSError(f"file {pipe_file} doesnt exist")
2233
+ raise OSError(f"File {pipe_file} doesnt exist")
1910
2234
  with open(pipe_file, "rb") as fp:
1911
2235
  data = fp.read()
2236
+ if not data:
2237
+ raise ValueError("The compiled pipe file is empty")
1912
2238
  if not isinstance(pipeline, str):
1913
2239
  remove(pipe_file)
1914
2240
 
@@ -1939,14 +2265,14 @@ class HTTPRunDB(RunDBInterface):
1939
2265
  def list_pipelines(
1940
2266
  self,
1941
2267
  project: str,
1942
- namespace: str = None,
2268
+ namespace: Optional[str] = None,
1943
2269
  sort_by: str = "",
1944
2270
  page_token: str = "",
1945
2271
  filter_: str = "",
1946
2272
  format_: Union[
1947
2273
  str, mlrun.common.formatters.PipelineFormat
1948
2274
  ] = mlrun.common.formatters.PipelineFormat.metadata_only,
1949
- page_size: int = None,
2275
+ page_size: Optional[int] = None,
1950
2276
  ) -> mlrun.common.schemas.PipelinesOutput:
1951
2277
  """Retrieve a list of KFP pipelines. This function can be invoked to get all pipelines from all projects,
1952
2278
  by specifying ``project=*``, in which case pagination can be used and the various sorting and pagination
@@ -1988,12 +2314,12 @@ class HTTPRunDB(RunDBInterface):
1988
2314
  def get_pipeline(
1989
2315
  self,
1990
2316
  run_id: str,
1991
- namespace: str = None,
2317
+ namespace: Optional[str] = None,
1992
2318
  timeout: int = 30,
1993
2319
  format_: Union[
1994
2320
  str, mlrun.common.formatters.PipelineFormat
1995
2321
  ] = mlrun.common.formatters.PipelineFormat.summary,
1996
- project: str = None,
2322
+ project: Optional[str] = None,
1997
2323
  ):
1998
2324
  """Retrieve details of a specific pipeline using its run ID (as provided when the pipeline was executed)."""
1999
2325
 
@@ -2015,6 +2341,73 @@ class HTTPRunDB(RunDBInterface):
2015
2341
 
2016
2342
  return resp.json()
2017
2343
 
2344
+ def retry_pipeline(
2345
+ self,
2346
+ run_id: str,
2347
+ project: str,
2348
+ namespace: Optional[str] = None,
2349
+ timeout: int = 30,
2350
+ ):
2351
+ """
2352
+ Retry a specific pipeline run using its run ID. This function sends an API request
2353
+ to retry a pipeline run. If a project is specified, the run must belong to that
2354
+ project; otherwise, all projects are queried.
2355
+
2356
+ :param run_id: The unique ID of the pipeline run to retry.
2357
+ :param namespace: Kubernetes namespace where the pipeline is running. Optional.
2358
+ :param timeout: Timeout (in seconds) for the API call. Defaults to 30 seconds.
2359
+ :param project: Name of the MLRun project associated with the pipeline.
2360
+
2361
+ :raises ValueError: Raised if the API response is not successful or contains an
2362
+ error.
2363
+
2364
+ :return: JSON response containing details of the retried pipeline run.
2365
+ """
2366
+
2367
+ params = {}
2368
+ if namespace:
2369
+ params["namespace"] = namespace
2370
+
2371
+ resp_text = ""
2372
+ resp_code = None
2373
+ try:
2374
+ resp = self.api_call(
2375
+ "POST",
2376
+ f"projects/{project}/pipelines/{run_id}/retry",
2377
+ params=params,
2378
+ timeout=timeout,
2379
+ )
2380
+ resp_code = resp.status_code
2381
+ resp_text = resp.text
2382
+ if not resp.ok:
2383
+ raise mlrun.errors.MLRunHTTPError(
2384
+ f"Failed to retry pipeline run '{run_id}'. "
2385
+ f"HTTP {resp_code}: {resp_text}"
2386
+ )
2387
+ except Exception as exc:
2388
+ logger.error(
2389
+ "Retry pipeline API call encountered an error.",
2390
+ run_id=run_id,
2391
+ project=project,
2392
+ namespace=namespace,
2393
+ response_code=resp_code,
2394
+ response_text=resp_text,
2395
+ error=str(exc),
2396
+ )
2397
+ if isinstance(exc, mlrun.errors.MLRunHTTPError):
2398
+ raise exc # Re-raise known HTTP errors
2399
+ raise mlrun.errors.MLRunRuntimeError(
2400
+ f"Unexpected error while retrying pipeline run '{run_id}'."
2401
+ ) from exc
2402
+
2403
+ logger.info(
2404
+ "Successfully retried pipeline run",
2405
+ run_id=run_id,
2406
+ project=project,
2407
+ namespace=namespace,
2408
+ )
2409
+ return resp.json()
2410
+
2018
2411
  @staticmethod
2019
2412
  def _resolve_reference(tag, uid):
2020
2413
  if uid and tag:
@@ -2061,7 +2454,11 @@ class HTTPRunDB(RunDBInterface):
2061
2454
  return resp.json()
2062
2455
 
2063
2456
  def get_feature_set(
2064
- self, name: str, project: str = "", tag: str = None, uid: str = None
2457
+ self,
2458
+ name: str,
2459
+ project: str = "",
2460
+ tag: Optional[str] = None,
2461
+ uid: Optional[str] = None,
2065
2462
  ) -> FeatureSet:
2066
2463
  """Retrieve a ~mlrun.feature_store.FeatureSet` object. If both ``tag`` and ``uid`` are not specified, then
2067
2464
  the object tagged ``latest`` will be retrieved.
@@ -2081,11 +2478,11 @@ class HTTPRunDB(RunDBInterface):
2081
2478
 
2082
2479
  def list_features(
2083
2480
  self,
2084
- project: str,
2085
- name: str = None,
2086
- tag: str = None,
2087
- entities: list[str] = None,
2088
- labels: list[str] = None,
2481
+ project: Optional[str] = None,
2482
+ name: Optional[str] = None,
2483
+ tag: Optional[str] = None,
2484
+ entities: Optional[list[str]] = None,
2485
+ labels: Optional[Union[str, dict[str, Optional[str]], list[str]]] = None,
2089
2486
  ) -> list[dict]:
2090
2487
  """List feature-sets which contain specific features. This function may return multiple versions of the same
2091
2488
  feature-set if a specific tag is not requested. Note that the various filters of this function actually
@@ -2096,18 +2493,25 @@ class HTTPRunDB(RunDBInterface):
2096
2493
  example, looking for ``feat`` will return features which are named ``MyFeature`` as well as ``defeat``.
2097
2494
  :param tag: Return feature-sets which contain the features looked for, and are tagged with the specific tag.
2098
2495
  :param entities: Return only feature-sets which contain an entity whose name is contained in this list.
2099
- :param labels: Return only feature-sets which are labeled as requested.
2496
+ :param labels: Filter feature-sets by label key-value pairs or key existence. This can be provided as:
2497
+ - A dictionary in the format `{"label": "value"}` to match specific label key-value pairs,
2498
+ or `{"label": None}` to check for key existence.
2499
+ - A list of strings formatted as `"label=value"` to match specific label key-value pairs,
2500
+ or just `"label"` for key existence.
2501
+ - A comma-separated string formatted as `"label1=value1,label2"` to match entities with
2502
+ the specified key-value pairs or key existence.
2100
2503
  :returns: A list of mapping from feature to a digest of the feature-set, which contains the feature-set
2101
2504
  meta-data. Multiple entries may be returned for any specific feature due to multiple tags or versions
2102
2505
  of the feature-set.
2103
2506
  """
2104
2507
 
2105
2508
  project = project or config.default_project
2509
+ labels = self._parse_labels(labels)
2106
2510
  params = {
2107
2511
  "name": name,
2108
2512
  "tag": tag,
2109
2513
  "entity": entities or [],
2110
- "label": labels or [],
2514
+ "label": labels,
2111
2515
  }
2112
2516
 
2113
2517
  path = f"projects/{project}/features"
@@ -2118,11 +2522,11 @@ class HTTPRunDB(RunDBInterface):
2118
2522
 
2119
2523
  def list_features_v2(
2120
2524
  self,
2121
- project: str,
2122
- name: str = None,
2123
- tag: str = None,
2124
- entities: list[str] = None,
2125
- labels: list[str] = None,
2525
+ project: Optional[str] = None,
2526
+ name: Optional[str] = None,
2527
+ tag: Optional[str] = None,
2528
+ entities: Optional[list[str]] = None,
2529
+ labels: Optional[Union[str, dict[str, Optional[str]], list[str]]] = None,
2126
2530
  ) -> dict[str, list[dict]]:
2127
2531
  """List feature-sets which contain specific features. This function may return multiple versions of the same
2128
2532
  feature-set if a specific tag is not requested. Note that the various filters of this function actually
@@ -2133,16 +2537,23 @@ class HTTPRunDB(RunDBInterface):
2133
2537
  example, looking for ``feat`` will return features which are named ``MyFeature`` as well as ``defeat``.
2134
2538
  :param tag: Return feature-sets which contain the features looked for, and are tagged with the specific tag.
2135
2539
  :param entities: Return only feature-sets which contain an entity whose name is contained in this list.
2136
- :param labels: Return only feature-sets which are labeled as requested.
2540
+ :param labels: Filter feature-sets by label key-value pairs or key existence. This can be provided as:
2541
+ - A dictionary in the format `{"label": "value"}` to match specific label key-value pairs,
2542
+ or `{"label": None}` to check for key existence.
2543
+ - A list of strings formatted as `"label=value"` to match specific label key-value pairs,
2544
+ or just `"label"` for key existence.
2545
+ - A comma-separated string formatted as `"label1=value1,label2"` to match entities with
2546
+ the specified key-value pairs or key existence.
2137
2547
  :returns: A list of features, and a list of their corresponding feature sets.
2138
2548
  """
2139
2549
 
2140
2550
  project = project or config.default_project
2551
+ labels = self._parse_labels(labels)
2141
2552
  params = {
2142
2553
  "name": name,
2143
2554
  "tag": tag,
2144
2555
  "entity": entities or [],
2145
- "label": labels or [],
2556
+ "label": labels,
2146
2557
  }
2147
2558
 
2148
2559
  path = f"projects/{project}/features"
@@ -2153,21 +2564,34 @@ class HTTPRunDB(RunDBInterface):
2153
2564
 
2154
2565
  def list_entities(
2155
2566
  self,
2156
- project: str,
2157
- name: str = None,
2158
- tag: str = None,
2159
- labels: list[str] = None,
2567
+ project: Optional[str] = None,
2568
+ name: Optional[str] = None,
2569
+ tag: Optional[str] = None,
2570
+ labels: Optional[Union[str, dict[str, Optional[str]], list[str]]] = None,
2160
2571
  ) -> list[dict]:
2161
2572
  """Retrieve a list of entities and their mapping to the containing feature-sets. This function is similar
2162
2573
  to the :py:func:`~list_features` function, and uses the same logic. However, the entities are matched
2163
2574
  against the name rather than the features.
2575
+
2576
+ :param project: The project containing the entities.
2577
+ :param name: The name of the entities to retrieve.
2578
+ :param tag: The tag of the specific entity version to retrieve.
2579
+ :param labels: Filter entities by label key-value pairs or key existence. This can be provided as:
2580
+ - A dictionary in the format `{"label": "value"}` to match specific label key-value pairs,
2581
+ or `{"label": None}` to check for key existence.
2582
+ - A list of strings formatted as `"label=value"` to match specific label key-value pairs,
2583
+ or just `"label"` for key existence.
2584
+ - A comma-separated string formatted as `"label1=value1,label2"` to match entities with
2585
+ the specified key-value pairs or key existence.
2586
+ :returns: A list of entities.
2164
2587
  """
2165
2588
 
2166
2589
  project = project or config.default_project
2590
+ labels = self._parse_labels(labels)
2167
2591
  params = {
2168
2592
  "name": name,
2169
2593
  "tag": tag,
2170
- "label": labels or [],
2594
+ "label": labels,
2171
2595
  }
2172
2596
 
2173
2597
  path = f"projects/{project}/entities"
@@ -2178,21 +2602,34 @@ class HTTPRunDB(RunDBInterface):
2178
2602
 
2179
2603
  def list_entities_v2(
2180
2604
  self,
2181
- project: str,
2182
- name: str = None,
2183
- tag: str = None,
2184
- labels: list[str] = None,
2605
+ project: Optional[str] = None,
2606
+ name: Optional[str] = None,
2607
+ tag: Optional[str] = None,
2608
+ labels: Optional[Union[str, dict[str, Optional[str]], list[str]]] = None,
2185
2609
  ) -> dict[str, list[dict]]:
2186
2610
  """Retrieve a list of entities and their mapping to the containing feature-sets. This function is similar
2187
2611
  to the :py:func:`~list_features_v2` function, and uses the same logic. However, the entities are matched
2188
2612
  against the name rather than the features.
2613
+
2614
+ :param project: The project containing the entities.
2615
+ :param name: The name of the entities to retrieve.
2616
+ :param tag: The tag of the specific entity version to retrieve.
2617
+ :param labels: Filter entities by label key-value pairs or key existence. This can be provided as:
2618
+ - A dictionary in the format `{"label": "value"}` to match specific label key-value pairs,
2619
+ or `{"label": None}` to check for key existence.
2620
+ - A list of strings formatted as `"label=value"` to match specific label key-value pairs,
2621
+ or just `"label"` for key existence.
2622
+ - A comma-separated string formatted as `"label1=value1,label2"` to match entities with
2623
+ the specified key-value pairs or key existence.
2624
+ :returns: A list of entities.
2189
2625
  """
2190
2626
 
2191
2627
  project = project or config.default_project
2628
+ labels = self._parse_labels(labels)
2192
2629
  params = {
2193
2630
  "name": name,
2194
2631
  "tag": tag,
2195
- "label": labels or [],
2632
+ "label": labels,
2196
2633
  }
2197
2634
 
2198
2635
  path = f"projects/{project}/entities"
@@ -2203,7 +2640,6 @@ class HTTPRunDB(RunDBInterface):
2203
2640
 
2204
2641
  @staticmethod
2205
2642
  def _generate_partition_by_params(
2206
- partition_by_cls,
2207
2643
  partition_by,
2208
2644
  rows_per_partition,
2209
2645
  sort_by,
@@ -2222,13 +2658,13 @@ class HTTPRunDB(RunDBInterface):
2222
2658
 
2223
2659
  def list_feature_sets(
2224
2660
  self,
2225
- project: str = "",
2226
- name: str = None,
2227
- tag: str = None,
2228
- state: str = None,
2229
- entities: list[str] = None,
2230
- features: list[str] = None,
2231
- labels: list[str] = None,
2661
+ project: Optional[str] = None,
2662
+ name: Optional[str] = None,
2663
+ tag: Optional[str] = None,
2664
+ state: Optional[str] = None,
2665
+ entities: Optional[list[str]] = None,
2666
+ features: Optional[list[str]] = None,
2667
+ labels: Optional[Union[str, dict[str, Optional[str]], list[str]]] = None,
2232
2668
  partition_by: Union[
2233
2669
  mlrun.common.schemas.FeatureStorePartitionByField, str
2234
2670
  ] = None,
@@ -2249,7 +2685,13 @@ class HTTPRunDB(RunDBInterface):
2249
2685
  :param state: Match feature-sets with a specific state.
2250
2686
  :param entities: Match feature-sets which contain entities whose name is in this list.
2251
2687
  :param features: Match feature-sets which contain features whose name is in this list.
2252
- :param labels: Match feature-sets which have these labels.
2688
+ :param labels: Filter feature-sets by label key-value pairs or key existence. This can be provided as:
2689
+ - A dictionary in the format `{"label": "value"}` to match specific label key-value pairs,
2690
+ or `{"label": None}` to check for key existence.
2691
+ - A list of strings formatted as `"label=value"` to match specific label key-value pairs,
2692
+ or just `"label"` for key existence.
2693
+ - A comma-separated string formatted as `"label1=value1,label2"` to match entities with
2694
+ the specified key-value pairs or key existence.
2253
2695
  :param partition_by: Field to group results by. Only allowed value is `name`. When `partition_by` is specified,
2254
2696
  the `partition_sort_by` parameter must be provided as well.
2255
2697
  :param rows_per_partition: How many top rows (per sorting defined by `partition_sort_by` and `partition_order`)
@@ -2264,20 +2706,19 @@ class HTTPRunDB(RunDBInterface):
2264
2706
  """
2265
2707
 
2266
2708
  project = project or config.default_project
2267
-
2709
+ labels = self._parse_labels(labels)
2268
2710
  params = {
2269
2711
  "name": name,
2270
2712
  "state": state,
2271
2713
  "tag": tag,
2272
2714
  "entity": entities or [],
2273
2715
  "feature": features or [],
2274
- "label": labels or [],
2716
+ "label": labels,
2275
2717
  "format": format_,
2276
2718
  }
2277
2719
  if partition_by:
2278
2720
  params.update(
2279
2721
  self._generate_partition_by_params(
2280
- mlrun.common.schemas.FeatureStorePartitionByField,
2281
2722
  partition_by,
2282
2723
  rows_per_partition,
2283
2724
  partition_sort_by,
@@ -2436,7 +2877,11 @@ class HTTPRunDB(RunDBInterface):
2436
2877
  return resp.json()
2437
2878
 
2438
2879
  def get_feature_vector(
2439
- self, name: str, project: str = "", tag: str = None, uid: str = None
2880
+ self,
2881
+ name: str,
2882
+ project: str = "",
2883
+ tag: Optional[str] = None,
2884
+ uid: Optional[str] = None,
2440
2885
  ) -> FeatureVector:
2441
2886
  """Return a specific feature-vector referenced by its tag or uid. If none are provided, ``latest`` tag will
2442
2887
  be used."""
@@ -2450,11 +2895,11 @@ class HTTPRunDB(RunDBInterface):
2450
2895
 
2451
2896
  def list_feature_vectors(
2452
2897
  self,
2453
- project: str = "",
2454
- name: str = None,
2455
- tag: str = None,
2456
- state: str = None,
2457
- labels: list[str] = None,
2898
+ project: Optional[str] = None,
2899
+ name: Optional[str] = None,
2900
+ tag: Optional[str] = None,
2901
+ state: Optional[str] = None,
2902
+ labels: Optional[Union[str, dict[str, Optional[str]], list[str]]] = None,
2458
2903
  partition_by: Union[
2459
2904
  mlrun.common.schemas.FeatureStorePartitionByField, str
2460
2905
  ] = None,
@@ -2470,7 +2915,13 @@ class HTTPRunDB(RunDBInterface):
2470
2915
  :param name: Name of feature-vector to match. This is a like query, and is case-insensitive.
2471
2916
  :param tag: Match feature-vectors with specific tag.
2472
2917
  :param state: Match feature-vectors with a specific state.
2473
- :param labels: Match feature-vectors which have these labels.
2918
+ :param labels: Filter feature-vectors by label key-value pairs or key existence. This can be provided as:
2919
+ - A dictionary in the format `{"label": "value"}` to match specific label key-value pairs,
2920
+ or `{"label": None}` to check for key existence.
2921
+ - A list of strings formatted as `"label=value"` to match specific label key-value pairs,
2922
+ or just `"label"` for key existence.
2923
+ - A comma-separated string formatted as `"label1=value1,label2"` to match entities with
2924
+ the specified key-value pairs or key existence.
2474
2925
  :param partition_by: Field to group results by. Only allowed value is `name`. When `partition_by` is specified,
2475
2926
  the `partition_sort_by` parameter must be provided as well.
2476
2927
  :param rows_per_partition: How many top rows (per sorting defined by `partition_sort_by` and `partition_order`)
@@ -2482,17 +2933,16 @@ class HTTPRunDB(RunDBInterface):
2482
2933
  """
2483
2934
 
2484
2935
  project = project or config.default_project
2485
-
2936
+ labels = self._parse_labels(labels)
2486
2937
  params = {
2487
2938
  "name": name,
2488
2939
  "state": state,
2489
2940
  "tag": tag,
2490
- "label": labels or [],
2941
+ "label": labels,
2491
2942
  }
2492
2943
  if partition_by:
2493
2944
  params.update(
2494
2945
  self._generate_partition_by_params(
2495
- mlrun.common.schemas.FeatureStorePartitionByField,
2496
2946
  partition_by,
2497
2947
  rows_per_partition,
2498
2948
  partition_sort_by,
@@ -2699,11 +3149,11 @@ class HTTPRunDB(RunDBInterface):
2699
3149
 
2700
3150
  def list_projects(
2701
3151
  self,
2702
- owner: str = None,
3152
+ owner: Optional[str] = None,
2703
3153
  format_: Union[
2704
3154
  str, mlrun.common.formatters.ProjectFormat
2705
3155
  ] = mlrun.common.formatters.ProjectFormat.name_only,
2706
- labels: list[str] = None,
3156
+ labels: Optional[Union[str, dict[str, Optional[str]], list[str]]] = None,
2707
3157
  state: Union[str, mlrun.common.schemas.ProjectState] = None,
2708
3158
  ) -> list[Union[mlrun.projects.MlrunProject, str]]:
2709
3159
  """Return a list of the existing projects, potentially filtered by specific criteria.
@@ -2715,15 +3165,22 @@ class HTTPRunDB(RunDBInterface):
2715
3165
  - ``minimal`` - Return minimal project objects (minimization happens in the BE).
2716
3166
  - ``full`` - Return full project objects.
2717
3167
 
2718
- :param labels: Filter by labels attached to the project.
3168
+ :param labels: Filter projects by label key-value pairs or key existence. This can be provided as:
3169
+ - A dictionary in the format `{"label": "value"}` to match specific label key-value pairs,
3170
+ or `{"label": None}` to check for key existence.
3171
+ - A list of strings formatted as `"label=value"` to match specific label key-value pairs,
3172
+ or just `"label"` for key existence.
3173
+ - A comma-separated string formatted as `"label1=value1,label2"` to match entities with
3174
+ the specified key-value pairs or key existence.
2719
3175
  :param state: Filter by project's state. Can be either ``online`` or ``archived``.
2720
3176
  """
3177
+ labels = self._parse_labels(labels)
2721
3178
 
2722
3179
  params = {
2723
3180
  "owner": owner,
2724
3181
  "state": state,
2725
3182
  "format": format_,
2726
- "label": labels or [],
3183
+ "label": labels,
2727
3184
  }
2728
3185
 
2729
3186
  error_message = f"Failed listing projects, query: {params}"
@@ -2739,7 +3196,7 @@ class HTTPRunDB(RunDBInterface):
2739
3196
  for project_dict in response.json()["projects"]
2740
3197
  ]
2741
3198
 
2742
- def get_project(self, name: str) -> mlrun.projects.MlrunProject:
3199
+ def get_project(self, name: str) -> "mlrun.MlrunProject":
2743
3200
  """Get details for a specific project."""
2744
3201
 
2745
3202
  if not name:
@@ -2748,7 +3205,7 @@ class HTTPRunDB(RunDBInterface):
2748
3205
  path = f"projects/{name}"
2749
3206
  error_message = f"Failed retrieving project {name}"
2750
3207
  response = self.api_call("GET", path, error_message)
2751
- return mlrun.projects.MlrunProject.from_dict(response.json())
3208
+ return mlrun.MlrunProject.from_dict(response.json())
2752
3209
 
2753
3210
  def delete_project(
2754
3211
  self,
@@ -2919,7 +3376,7 @@ class HTTPRunDB(RunDBInterface):
2919
3376
  provider: Union[
2920
3377
  str, mlrun.common.schemas.SecretProviderName
2921
3378
  ] = mlrun.common.schemas.SecretProviderName.kubernetes,
2922
- secrets: dict = None,
3379
+ secrets: Optional[dict] = None,
2923
3380
  ):
2924
3381
  """Create project-context secrets using either ``vault`` or ``kubernetes`` provider.
2925
3382
  When using with Vault, this will create needed Vault structures for storing secrets in project-context, and
@@ -2963,11 +3420,11 @@ class HTTPRunDB(RunDBInterface):
2963
3420
  def list_project_secrets(
2964
3421
  self,
2965
3422
  project: str,
2966
- token: str = None,
3423
+ token: Optional[str] = None,
2967
3424
  provider: Union[
2968
3425
  str, mlrun.common.schemas.SecretProviderName
2969
3426
  ] = mlrun.common.schemas.SecretProviderName.kubernetes,
2970
- secrets: list[str] = None,
3427
+ secrets: Optional[list[str]] = None,
2971
3428
  ) -> mlrun.common.schemas.SecretsData:
2972
3429
  """Retrieve project-context secrets from Vault.
2973
3430
 
@@ -3010,7 +3467,7 @@ class HTTPRunDB(RunDBInterface):
3010
3467
  provider: Union[
3011
3468
  str, mlrun.common.schemas.SecretProviderName
3012
3469
  ] = mlrun.common.schemas.SecretProviderName.kubernetes,
3013
- token: str = None,
3470
+ token: Optional[str] = None,
3014
3471
  ) -> mlrun.common.schemas.SecretKeysData:
3015
3472
  """Retrieve project-context secret keys from Vault or Kubernetes.
3016
3473
 
@@ -3056,7 +3513,7 @@ class HTTPRunDB(RunDBInterface):
3056
3513
  provider: Union[
3057
3514
  str, mlrun.common.schemas.SecretProviderName
3058
3515
  ] = mlrun.common.schemas.SecretProviderName.kubernetes,
3059
- secrets: list[str] = None,
3516
+ secrets: Optional[list[str]] = None,
3060
3517
  ):
3061
3518
  """Delete project-context secrets from Kubernetes.
3062
3519
 
@@ -3076,13 +3533,86 @@ class HTTPRunDB(RunDBInterface):
3076
3533
  params=params,
3077
3534
  )
3078
3535
 
3536
+ def get_model_endpoint_monitoring_metrics(
3537
+ self,
3538
+ project: str,
3539
+ endpoint_id: str,
3540
+ type: Literal["results", "metrics", "all"] = "all",
3541
+ ) -> list[mm_endpoints.ModelEndpointMonitoringMetric]:
3542
+ """Get application metrics/results by endpoint id and project.
3543
+
3544
+ :param project: The name of the project.
3545
+ :param endpoint_id: The unique id of the model endpoint.
3546
+ :param type: The type of the metrics to return. "all" means "results" and "metrics".
3547
+
3548
+ :return: A list of the application metrics or/and results for this model endpoint.
3549
+ """
3550
+ path = f"projects/{project}/model-endpoints/{endpoint_id}/metrics"
3551
+ params = {"type": type}
3552
+ error_message = (
3553
+ f"Failed to get model endpoint monitoring metrics,"
3554
+ f" endpoint_id: {endpoint_id}, project: {project}"
3555
+ )
3556
+ response = self.api_call(
3557
+ mlrun.common.types.HTTPMethod.GET,
3558
+ path,
3559
+ error_message,
3560
+ params=params,
3561
+ )
3562
+ monitoring_metrics = response.json()
3563
+ return parse_obj_as(
3564
+ list[mm_endpoints.ModelEndpointMonitoringMetric], monitoring_metrics
3565
+ )
3566
+
3567
+ def get_metrics_by_multiple_endpoints(
3568
+ self,
3569
+ project: str,
3570
+ endpoint_ids: Union[str, list[str]],
3571
+ type: Literal["results", "metrics", "all"] = "all",
3572
+ events_format: mm_constants.GetEventsFormat = mm_constants.GetEventsFormat.SEPARATION,
3573
+ ) -> dict[str, list[mm_endpoints.ModelEndpointMonitoringMetric]]:
3574
+ """Get application metrics/results by endpoint id and project.
3575
+
3576
+ :param project: The name of the project.
3577
+ :param endpoint_ids: The unique id of the model endpoint. Can be a single id or a list of ids.
3578
+ :param type: The type of the metrics to return. "all" means "results" and "metrics".
3579
+ :param events_format: response format:
3580
+
3581
+ separation: {"mep_id1":[...], "mep_id2":[...]}
3582
+ intersection {"intersect_metrics":[], "intersect_results":[]}
3583
+ :return: A dictionary of application metrics and/or results for the model endpoints formatted by events_format.
3584
+ """
3585
+ path = f"projects/{project}/model-endpoints/metrics"
3586
+ params = {
3587
+ "type": type,
3588
+ "endpoint-id": endpoint_ids,
3589
+ "events-format": events_format,
3590
+ }
3591
+ error_message = (
3592
+ f"Failed to get model monitoring metrics,"
3593
+ f" endpoint_ids: {endpoint_ids}, project: {project}"
3594
+ )
3595
+ response = self.api_call(
3596
+ mlrun.common.types.HTTPMethod.GET,
3597
+ path,
3598
+ error_message,
3599
+ params=params,
3600
+ )
3601
+ monitoring_metrics_by_endpoint = response.json()
3602
+ parsed_metrics_by_endpoint = {}
3603
+ for endpoint, metrics in monitoring_metrics_by_endpoint.items():
3604
+ parsed_metrics_by_endpoint[endpoint] = parse_obj_as(
3605
+ list[mm_endpoints.ModelEndpointMonitoringMetric], metrics
3606
+ )
3607
+ return parsed_metrics_by_endpoint
3608
+
3079
3609
  def create_user_secrets(
3080
3610
  self,
3081
3611
  user: str,
3082
3612
  provider: Union[
3083
3613
  str, mlrun.common.schemas.SecretProviderName
3084
3614
  ] = mlrun.common.schemas.SecretProviderName.vault,
3085
- secrets: dict = None,
3615
+ secrets: Optional[dict] = None,
3086
3616
  ):
3087
3617
  """Create user-context secret in Vault. Please refer to :py:func:`create_project_secrets` for more details
3088
3618
  and status of this functionality.
@@ -3164,212 +3694,236 @@ class HTTPRunDB(RunDBInterface):
3164
3694
 
3165
3695
  def create_model_endpoint(
3166
3696
  self,
3167
- project: str,
3168
- endpoint_id: str,
3169
- model_endpoint: Union[
3170
- mlrun.model_monitoring.model_endpoint.ModelEndpoint, dict
3171
- ],
3172
- ):
3697
+ model_endpoint: mlrun.common.schemas.ModelEndpoint,
3698
+ creation_strategy: Optional[
3699
+ mm_constants.ModelEndpointCreationStrategy
3700
+ ] = mm_constants.ModelEndpointCreationStrategy.INPLACE,
3701
+ ) -> mlrun.common.schemas.ModelEndpoint:
3173
3702
  """
3174
3703
  Creates a DB record with the given model_endpoint record.
3175
3704
 
3176
- :param project: The name of the project.
3177
- :param endpoint_id: The id of the endpoint.
3178
3705
  :param model_endpoint: An object representing the model endpoint.
3179
- """
3180
-
3181
- if isinstance(
3182
- model_endpoint, mlrun.model_monitoring.model_endpoint.ModelEndpoint
3183
- ):
3184
- model_endpoint = model_endpoint.to_dict()
3185
-
3186
- path = f"projects/{project}/model-endpoints/{endpoint_id}"
3187
- self.api_call(
3188
- method="POST",
3706
+ :param creation_strategy: Strategy for creating or updating the model endpoint:
3707
+ * **overwrite**:
3708
+ 1. If model endpoints with the same name exist, delete the `latest` one.
3709
+ 2. Create a new model endpoint entry and set it as `latest`.
3710
+ * **inplace** (default):
3711
+ 1. If model endpoints with the same name exist, update the `latest` entry.
3712
+ 2. Otherwise, create a new entry.
3713
+ * **archive**:
3714
+ 1. If model endpoints with the same name exist, preserve them.
3715
+ 2. Create a new model endpoint with the same name and set it to `latest`.
3716
+ :return: The created model endpoint object.
3717
+ """
3718
+
3719
+ path = f"projects/{model_endpoint.metadata.project}/model-endpoints"
3720
+ response = self.api_call(
3721
+ method=mlrun.common.types.HTTPMethod.POST,
3189
3722
  path=path,
3190
- body=dict_to_json(model_endpoint),
3723
+ body=model_endpoint.json(),
3724
+ params={
3725
+ "creation-strategy": creation_strategy,
3726
+ },
3191
3727
  )
3728
+ return mlrun.common.schemas.ModelEndpoint(**response.json())
3192
3729
 
3193
3730
  def delete_model_endpoint(
3194
3731
  self,
3732
+ name: str,
3195
3733
  project: str,
3196
- endpoint_id: str,
3734
+ function_name: Optional[str] = None,
3735
+ function_tag: Optional[str] = None,
3736
+ endpoint_id: Optional[str] = None,
3197
3737
  ):
3198
3738
  """
3199
3739
  Deletes the DB record of a given model endpoint, project and endpoint_id are used for lookup
3200
3740
 
3741
+ :param name: The name of the model endpoint
3201
3742
  :param project: The name of the project
3743
+ :param function_name: The name of the function
3744
+ :param function_tag: The tag of the function
3202
3745
  :param endpoint_id: The id of the endpoint
3203
3746
  """
3204
-
3205
- path = f"projects/{project}/model-endpoints/{endpoint_id}"
3747
+ self._check_model_endpoint_representation(
3748
+ function_name, function_tag, endpoint_id
3749
+ )
3750
+ path = f"projects/{project}/model-endpoints/{name}"
3206
3751
  self.api_call(
3207
- method="DELETE",
3752
+ method=mlrun.common.types.HTTPMethod.DELETE,
3208
3753
  path=path,
3754
+ params={
3755
+ "function-name": function_name,
3756
+ "function-tag": function_tag,
3757
+ "endpoint-id": endpoint_id,
3758
+ },
3209
3759
  )
3210
3760
 
3211
3761
  def list_model_endpoints(
3212
3762
  self,
3213
3763
  project: str,
3214
- model: Optional[str] = None,
3215
- function: Optional[str] = None,
3216
- labels: list[str] = None,
3217
- start: str = "now-1h",
3218
- end: str = "now",
3219
- metrics: Optional[list[str]] = None,
3764
+ names: Optional[Union[str, list[str]]] = None,
3765
+ function_name: Optional[str] = None,
3766
+ function_tag: Optional[str] = None,
3767
+ model_name: Optional[str] = None,
3768
+ model_tag: Optional[str] = None,
3769
+ labels: Optional[Union[str, dict[str, Optional[str]], list[str]]] = None,
3770
+ start: Optional[datetime] = None,
3771
+ end: Optional[datetime] = None,
3772
+ tsdb_metrics: bool = False,
3773
+ metric_list: Optional[list[str]] = None,
3220
3774
  top_level: bool = False,
3221
3775
  uids: Optional[list[str]] = None,
3222
- ) -> list[mlrun.model_monitoring.model_endpoint.ModelEndpoint]:
3223
- """
3224
- Returns a list of `ModelEndpoint` objects. Each `ModelEndpoint` object represents the current state of a
3225
- model endpoint. This functions supports filtering by the following parameters:
3226
- 1) model
3227
- 2) function
3228
- 3) labels
3229
- 4) top level
3230
- 5) uids
3231
- By default, when no filters are applied, all available endpoints for the given project will be listed.
3232
-
3233
- In addition, this functions provides a facade for listing endpoint related metrics. This facade is time-based
3234
- and depends on the 'start' and 'end' parameters. By default, when the metrics parameter is None, no metrics are
3235
- added to the output of this function.
3236
-
3237
- :param project: The name of the project
3238
- :param model: The name of the model to filter by
3239
- :param function: The name of the function to filter by
3240
- :param labels: A list of labels to filter by. Label filters work by either filtering a specific value of a
3241
- label (i.e. list("key=value")) or by looking for the existence of a given key (i.e. "key")
3242
- :param metrics: A list of metrics to return for each endpoint, read more in 'TimeMetric'
3243
- :param start: The start time of the metrics. Can be represented by a string containing an RFC 3339 time, a
3244
- Unix timestamp in milliseconds, a relative time (`'now'` or `'now-[0-9]+[mhd]'`, where
3245
- `m` = minutes, `h` = hours, `'d'` = days, and `'s'` = seconds), or 0 for the earliest time.
3246
- :param end: The end time of the metrics. Can be represented by a string containing an RFC 3339 time, a
3247
- Unix timestamp in milliseconds, a relative time (`'now'` or `'now-[0-9]+[mhd]'`, where
3248
- `m` = minutes, `h` = hours, `'d'` = days, and `'s'` = seconds), or 0 for the earliest time.
3249
- :param top_level: if true will return only routers and endpoint that are NOT children of any router
3250
- :param uids: if passed will return a list `ModelEndpoint` object with uid in uids
3776
+ latest_only: bool = False,
3777
+ ) -> mlrun.common.schemas.ModelEndpointList:
3778
+ """
3779
+ List model endpoints with optional filtering by name, function name, model name, labels, and time range.
3780
+
3781
+ :param project: The name of the project
3782
+ :param names: The name of the model endpoint, or list of names of the model endpoints
3783
+ :param function_name: The name of the function
3784
+ :param function_tag: The tag of the function
3785
+ :param model_name: The name of the model
3786
+ :param model_tag: The tag of the model
3787
+ :param labels: A list of labels to filter by. (see mlrun.common.schemas.LabelsModel)
3788
+ :param start: The start time to filter by.Corresponding to the `created` field.
3789
+ :param end: The end time to filter by. Corresponding to the `created` field.
3790
+ :param tsdb_metrics: Whether to include metrics from the time series DB.
3791
+ :param metric_list: List of metrics to include from the time series DB. Defaults to all metrics.
3792
+ If tsdb_metrics=False, this parameter will be ignored and no tsdb metrics
3793
+ will be included.
3794
+ :param top_level: Whether to return only top level model endpoints.
3795
+ :param uids: A list of unique ids to filter by.
3796
+ :param latest_only: Whether to return only the latest model endpoint version.
3797
+ :return: A list of model endpoints.
3251
3798
  """
3252
-
3253
3799
  path = f"projects/{project}/model-endpoints"
3254
-
3255
- if labels and isinstance(labels, dict):
3256
- labels = [f"{key}={value}" for key, value in labels.items()]
3257
-
3800
+ labels = self._parse_labels(labels)
3801
+ if names and isinstance(names, str):
3802
+ names = [names]
3258
3803
  response = self.api_call(
3259
- method="GET",
3804
+ method=mlrun.common.types.HTTPMethod.GET,
3260
3805
  path=path,
3261
3806
  params={
3262
- "model": model,
3263
- "function": function,
3264
- "label": labels or [],
3265
- "start": start,
3266
- "end": end,
3267
- "metric": metrics or [],
3807
+ "name": names,
3808
+ "model-name": model_name,
3809
+ "model-tag": model_tag,
3810
+ "function-name": function_name,
3811
+ "function-tag": function_tag,
3812
+ "label": labels,
3813
+ "start": datetime_to_iso(start),
3814
+ "end": datetime_to_iso(end),
3815
+ "tsdb-metrics": tsdb_metrics,
3816
+ "metric": metric_list,
3268
3817
  "top-level": top_level,
3269
3818
  "uid": uids,
3819
+ "latest-only": latest_only,
3270
3820
  },
3271
3821
  )
3272
3822
 
3273
- # Generate a list of a model endpoint dictionaries
3274
- model_endpoints = response.json()["endpoints"]
3275
- if model_endpoints:
3276
- return [
3277
- mlrun.model_monitoring.model_endpoint.ModelEndpoint.from_dict(obj)
3278
- for obj in model_endpoints
3279
- ]
3280
- return []
3823
+ return mlrun.common.schemas.ModelEndpointList(**response.json())
3281
3824
 
3282
3825
  def get_model_endpoint(
3283
3826
  self,
3827
+ name: str,
3284
3828
  project: str,
3285
- endpoint_id: str,
3286
- start: Optional[str] = None,
3287
- end: Optional[str] = None,
3288
- metrics: Optional[list[str]] = None,
3829
+ function_name: Optional[str] = None,
3830
+ function_tag: Optional[str] = None,
3831
+ endpoint_id: Optional[str] = None,
3832
+ tsdb_metrics: bool = True,
3833
+ metric_list: Optional[list[str]] = None,
3289
3834
  feature_analysis: bool = False,
3290
- ) -> mlrun.model_monitoring.model_endpoint.ModelEndpoint:
3835
+ ) -> mlrun.common.schemas.ModelEndpoint:
3291
3836
  """
3292
3837
  Returns a single `ModelEndpoint` object with additional metrics and feature related data.
3293
3838
 
3839
+ :param name: The name of the model endpoint
3294
3840
  :param project: The name of the project
3295
- :param endpoint_id: The unique id of the model endpoint.
3296
- :param start: The start time of the metrics. Can be represented by a string containing an
3297
- RFC 3339 time, a Unix timestamp in milliseconds, a relative time
3298
- (`'now'` or `'now-[0-9]+[mhd]'`, where `m` = minutes, `h` = hours,
3299
- `'d'` = days, and `'s'` = seconds), or 0 for the earliest time.
3300
- :param end: The end time of the metrics. Can be represented by a string containing an
3301
- RFC 3339 time, a Unix timestamp in milliseconds, a relative time
3302
- (`'now'` or `'now-[0-9]+[mhd]'`, where `m` = minutes, `h` = hours,
3303
- `'d'` = days, and `'s'` = seconds), or 0 for the earliest time.
3304
- :param metrics: A list of metrics to return for the model endpoint. There are pre-defined
3305
- metrics for model endpoints such as predictions_per_second and
3306
- latency_avg_5m but also custom metrics defined by the user. Please note that
3307
- these metrics are stored in the time series DB and the results will be
3308
- appeared under model_endpoint.spec.metrics.
3309
- :param feature_analysis: When True, the base feature statistics and current feature statistics will
3310
- be added to the output of the resulting object.
3311
-
3312
- :returns: A `ModelEndpoint` object.
3313
- """
3314
-
3315
- path = f"projects/{project}/model-endpoints/{endpoint_id}"
3841
+ :param function_name: The name of the function
3842
+ :param function_tag: The tag of the function
3843
+ :param endpoint_id: The id of the endpoint
3844
+ :param tsdb_metrics: Whether to include metrics from the time series DB.
3845
+ :param metric_list: List of metrics to include from the time series DB. Defaults to all metrics.
3846
+ If tsdb_metrics=False, this parameter will be ignored and no tsdb metrics
3847
+ will be included.
3848
+ :param feature_analysis: Whether to include feature analysis data (feature_stats,
3849
+ current_stats & drift_measures).
3850
+
3851
+ :return: A `ModelEndpoint` object.
3852
+ """
3853
+ self._check_model_endpoint_representation(
3854
+ function_name, function_tag, endpoint_id
3855
+ )
3856
+ path = f"projects/{project}/model-endpoints/{name}"
3316
3857
  response = self.api_call(
3317
- method="GET",
3858
+ method=mlrun.common.types.HTTPMethod.GET,
3318
3859
  path=path,
3319
3860
  params={
3320
- "start": start,
3321
- "end": end,
3322
- "metric": metrics or [],
3323
- "feature_analysis": feature_analysis,
3861
+ "function-name": function_name,
3862
+ "function-tag": function_tag,
3863
+ "endpoint-id": endpoint_id,
3864
+ "tsdb-metrics": tsdb_metrics,
3865
+ "metric": metric_list,
3866
+ "feature-analysis": feature_analysis,
3324
3867
  },
3325
3868
  )
3326
3869
 
3327
- return mlrun.model_monitoring.model_endpoint.ModelEndpoint.from_dict(
3328
- response.json()
3329
- )
3870
+ return mlrun.common.schemas.ModelEndpoint(**response.json())
3330
3871
 
3331
3872
  def patch_model_endpoint(
3332
3873
  self,
3874
+ name: str,
3333
3875
  project: str,
3334
- endpoint_id: str,
3335
3876
  attributes: dict,
3336
- ):
3337
- """
3338
- Updates model endpoint with provided attributes.
3339
-
3340
- :param project: The name of the project.
3341
- :param endpoint_id: The id of the endpoint.
3342
- :param attributes: Dictionary of attributes that will be used for update the model endpoint. The keys
3343
- of this dictionary should exist in the target table. Note that the values should be from type string or from
3344
- a valid numerical type such as int or float. More details about the model endpoint available attributes can
3345
- be found under :py:class:`~mlrun.common.schemas.ModelEndpoint`.
3346
-
3347
- Example::
3348
-
3349
- # Generate current stats for two features
3350
- current_stats = {'tvd_sum': 2.2,
3351
- 'tvd_mean': 0.5,
3352
- 'hellinger_sum': 3.6,
3353
- 'hellinger_mean': 0.9,
3354
- 'kld_sum': 24.2,
3355
- 'kld_mean': 6.0,
3356
- 'f1': {'tvd': 0.5, 'hellinger': 1.0, 'kld': 6.4},
3357
- 'f2': {'tvd': 0.5, 'hellinger': 1.0, 'kld': 6.5}}
3358
-
3359
- # Create attributes dictionary according to the required format
3360
- attributes = {`current_stats`: json.dumps(current_stats),
3361
- `drift_status`: "DRIFT_DETECTED"}
3362
-
3877
+ function_name: Optional[str] = None,
3878
+ function_tag: Optional[str] = None,
3879
+ endpoint_id: Optional[str] = None,
3880
+ ) -> None:
3363
3881
  """
3882
+ Updates a model endpoint with the given attributes.
3364
3883
 
3365
- attributes = {"attributes": _as_json(attributes)}
3366
- path = f"projects/{project}/model-endpoints/{endpoint_id}"
3367
- self.api_call(
3368
- method="PATCH",
3884
+ :param name: The name of the model endpoint
3885
+ :param project: The name of the project
3886
+ :param attributes: The attributes to update
3887
+ :param function_name: The name of the function
3888
+ :param function_tag: The tag of the function
3889
+ :param endpoint_id: The id of the endpoint
3890
+ """
3891
+ attributes_keys = list(attributes.keys())
3892
+ attributes["name"] = name
3893
+ attributes["project"] = project
3894
+ attributes["function_name"] = function_name or None
3895
+ attributes["function_tag"] = function_tag or None
3896
+ attributes["uid"] = endpoint_id or None
3897
+ model_endpoint = mlrun.common.schemas.ModelEndpoint.from_flat_dict(attributes)
3898
+ path = f"projects/{project}/model-endpoints"
3899
+ logger.info(
3900
+ "Patching model endpoint",
3901
+ attributes_keys=attributes_keys,
3902
+ model_endpoint=model_endpoint,
3903
+ )
3904
+ response = self.api_call(
3905
+ method=mlrun.common.types.HTTPMethod.PATCH,
3369
3906
  path=path,
3370
- params=attributes,
3907
+ params={
3908
+ "attribute-key": attributes_keys,
3909
+ },
3910
+ body=model_endpoint.json(),
3911
+ )
3912
+ logger.info(
3913
+ "Updating model endpoint done",
3914
+ model_endpoint_uid=response.json(),
3915
+ status_code=response.status_code,
3371
3916
  )
3372
3917
 
3918
+ @staticmethod
3919
+ def _check_model_endpoint_representation(
3920
+ function_name: str, function_tag: str, uid: str
3921
+ ):
3922
+ if not uid and not (function_name and function_tag):
3923
+ raise MLRunInvalidArgumentError(
3924
+ "Either endpoint_uid or function_name and function_tag must be provided"
3925
+ )
3926
+
3373
3927
  def update_model_monitoring_controller(
3374
3928
  self,
3375
3929
  project: str,
@@ -3387,7 +3941,7 @@ class HTTPRunDB(RunDBInterface):
3387
3941
  """
3388
3942
  self.api_call(
3389
3943
  method=mlrun.common.types.HTTPMethod.PATCH,
3390
- path=f"projects/{project}/model-monitoring/model-monitoring-controller",
3944
+ path=f"projects/{project}/model-monitoring/controller",
3391
3945
  params={
3392
3946
  "base_period": base_period,
3393
3947
  "image": image,
@@ -3400,7 +3954,6 @@ class HTTPRunDB(RunDBInterface):
3400
3954
  base_period: int = 10,
3401
3955
  image: str = "mlrun/mlrun",
3402
3956
  deploy_histogram_data_drift_app: bool = True,
3403
- rebuild_images: bool = False,
3404
3957
  fetch_credentials_from_sys_config: bool = False,
3405
3958
  ) -> None:
3406
3959
  """
@@ -3418,18 +3971,16 @@ class HTTPRunDB(RunDBInterface):
3418
3971
  stream functions, which are real time nuclio functions.
3419
3972
  By default, the image is mlrun/mlrun.
3420
3973
  :param deploy_histogram_data_drift_app: If true, deploy the default histogram-based data drift application.
3421
- :param rebuild_images: If true, force rebuild of model monitoring infrastructure images.
3422
3974
  :param fetch_credentials_from_sys_config: If true, fetch the credentials from the system configuration.
3423
3975
 
3424
3976
  """
3425
3977
  self.api_call(
3426
- method=mlrun.common.types.HTTPMethod.POST,
3427
- path=f"projects/{project}/model-monitoring/enable-model-monitoring",
3978
+ method=mlrun.common.types.HTTPMethod.PUT,
3979
+ path=f"projects/{project}/model-monitoring/",
3428
3980
  params={
3429
3981
  "base_period": base_period,
3430
3982
  "image": image,
3431
3983
  "deploy_histogram_data_drift_app": deploy_histogram_data_drift_app,
3432
- "rebuild_images": rebuild_images,
3433
3984
  "fetch_credentials_from_sys_config": fetch_credentials_from_sys_config,
3434
3985
  },
3435
3986
  )
@@ -3441,7 +3992,7 @@ class HTTPRunDB(RunDBInterface):
3441
3992
  delete_stream_function: bool = False,
3442
3993
  delete_histogram_data_drift_app: bool = True,
3443
3994
  delete_user_applications: bool = False,
3444
- user_application_list: list[str] = None,
3995
+ user_application_list: Optional[list[str]] = None,
3445
3996
  ) -> bool:
3446
3997
  """
3447
3998
  Disable model monitoring application controller, writer, stream, histogram data drift application
@@ -3468,7 +4019,7 @@ class HTTPRunDB(RunDBInterface):
3468
4019
  """
3469
4020
  response = self.api_call(
3470
4021
  method=mlrun.common.types.HTTPMethod.DELETE,
3471
- path=f"projects/{project}/model-monitoring/disable-model-monitoring",
4022
+ path=f"projects/{project}/model-monitoring/",
3472
4023
  params={
3473
4024
  "delete_resources": delete_resources,
3474
4025
  "delete_stream_function": delete_stream_function,
@@ -3541,25 +4092,10 @@ class HTTPRunDB(RunDBInterface):
3541
4092
  deletion_failed = True
3542
4093
  return not deletion_failed
3543
4094
 
3544
- def deploy_histogram_data_drift_app(
3545
- self, project: str, image: str = "mlrun/mlrun"
3546
- ) -> None:
3547
- """
3548
- Deploy the histogram data drift application.
3549
-
3550
- :param project: Project name.
3551
- :param image: The image on which the application will run.
3552
- """
3553
- self.api_call(
3554
- method=mlrun.common.types.HTTPMethod.POST,
3555
- path=f"projects/{project}/model-monitoring/deploy-histogram-data-drift-app",
3556
- params={"image": image},
3557
- )
3558
-
3559
4095
  def set_model_monitoring_credentials(
3560
4096
  self,
3561
4097
  project: str,
3562
- credentials: dict[str, str],
4098
+ credentials: dict[str, Optional[str]],
3563
4099
  replace_creds: bool,
3564
4100
  ) -> None:
3565
4101
  """
@@ -3570,8 +4106,8 @@ class HTTPRunDB(RunDBInterface):
3570
4106
  :param replace_creds: If True, will override the existing credentials.
3571
4107
  """
3572
4108
  self.api_call(
3573
- method=mlrun.common.types.HTTPMethod.POST,
3574
- path=f"projects/{project}/model-monitoring/set-model-monitoring-credentials",
4109
+ method=mlrun.common.types.HTTPMethod.PUT,
4110
+ path=f"projects/{project}/model-monitoring/credentials",
3575
4111
  params={**credentials, "replace_creds": replace_creds},
3576
4112
  )
3577
4113
 
@@ -3714,8 +4250,8 @@ class HTTPRunDB(RunDBInterface):
3714
4250
  def get_hub_catalog(
3715
4251
  self,
3716
4252
  source_name: str,
3717
- version: str = None,
3718
- tag: str = None,
4253
+ version: Optional[str] = None,
4254
+ tag: Optional[str] = None,
3719
4255
  force_refresh: bool = False,
3720
4256
  ):
3721
4257
  """
@@ -3745,7 +4281,7 @@ class HTTPRunDB(RunDBInterface):
3745
4281
  self,
3746
4282
  source_name: str,
3747
4283
  item_name: str,
3748
- version: str = None,
4284
+ version: Optional[str] = None,
3749
4285
  tag: str = "latest",
3750
4286
  force_refresh: bool = False,
3751
4287
  ):
@@ -3775,7 +4311,7 @@ class HTTPRunDB(RunDBInterface):
3775
4311
  source_name: str,
3776
4312
  item_name: str,
3777
4313
  asset_name: str,
3778
- version: str = None,
4314
+ version: Optional[str] = None,
3779
4315
  tag: str = "latest",
3780
4316
  ):
3781
4317
  """
@@ -3897,18 +4433,27 @@ class HTTPRunDB(RunDBInterface):
3897
4433
  "operations/migrations",
3898
4434
  "Failed triggering migrations",
3899
4435
  )
3900
- if response.status_code == http.HTTPStatus.ACCEPTED:
3901
- background_task = mlrun.common.schemas.BackgroundTask(**response.json())
3902
- return self._wait_for_background_task_to_reach_terminal_state(
3903
- background_task.metadata.name
3904
- )
3905
- return None
4436
+ return self._wait_for_background_task_from_response(response)
4437
+
4438
+ def refresh_smtp_configuration(
4439
+ self,
4440
+ ) -> Optional[mlrun.common.schemas.BackgroundTask]:
4441
+ """Refresh smtp configuration and wait for the task to finish
4442
+
4443
+ :returns: :py:class:`~mlrun.common.schemas.BackgroundTask`.
4444
+ """
4445
+ response = self.api_call(
4446
+ "POST",
4447
+ "operations/refresh-smtp-configuration",
4448
+ "Failed refreshing smtp configuration",
4449
+ )
4450
+ return self._wait_for_background_task_from_response(response)
3906
4451
 
3907
4452
  def set_run_notifications(
3908
4453
  self,
3909
4454
  project: str,
3910
4455
  run_uid: str,
3911
- notifications: list[mlrun.model.Notification] = None,
4456
+ notifications: Optional[list[mlrun.model.Notification]] = None,
3912
4457
  ):
3913
4458
  """
3914
4459
  Set notifications on a run. This will override any existing notifications on the run.
@@ -3934,7 +4479,7 @@ class HTTPRunDB(RunDBInterface):
3934
4479
  self,
3935
4480
  project: str,
3936
4481
  schedule_name: str,
3937
- notifications: list[mlrun.model.Notification] = None,
4482
+ notifications: Optional[list[mlrun.model.Notification]] = None,
3938
4483
  ):
3939
4484
  """
3940
4485
  Set notifications on a schedule. This will override any existing notifications on the schedule.
@@ -3960,7 +4505,7 @@ class HTTPRunDB(RunDBInterface):
3960
4505
  self,
3961
4506
  notification_objects: list[mlrun.model.Notification],
3962
4507
  run_uid: str,
3963
- project: str = None,
4508
+ project: Optional[str] = None,
3964
4509
  mask_params: bool = True,
3965
4510
  ):
3966
4511
  """
@@ -3994,7 +4539,7 @@ class HTTPRunDB(RunDBInterface):
3994
4539
  source: Optional[str] = None,
3995
4540
  run_name: Optional[str] = None,
3996
4541
  namespace: Optional[str] = None,
3997
- notifications: list[mlrun.model.Notification] = None,
4542
+ notifications: Optional[list[mlrun.model.Notification]] = None,
3998
4543
  ) -> mlrun.common.schemas.WorkflowResponse:
3999
4544
  """
4000
4545
  Submitting workflow for a remote execution.
@@ -4216,6 +4761,7 @@ class HTTPRunDB(RunDBInterface):
4216
4761
  alert_name: str,
4217
4762
  alert_data: Union[dict, AlertConfig],
4218
4763
  project="",
4764
+ force_reset: bool = False,
4219
4765
  ) -> AlertConfig:
4220
4766
  """
4221
4767
  Create/modify an alert.
@@ -4223,6 +4769,7 @@ class HTTPRunDB(RunDBInterface):
4223
4769
  :param alert_name: The name of the alert.
4224
4770
  :param alert_data: The data of the alert.
4225
4771
  :param project: The project that the alert belongs to.
4772
+ :param force_reset: If True and the alert already exists, the alert would be reset.
4226
4773
  :returns: The created/modified alert.
4227
4774
  """
4228
4775
  if not alert_data:
@@ -4247,7 +4794,10 @@ class HTTPRunDB(RunDBInterface):
4247
4794
 
4248
4795
  alert_data = alert_instance.to_dict()
4249
4796
  body = _as_json(alert_data)
4250
- response = self.api_call("PUT", endpoint_path, error_message, body=body)
4797
+ params = {"force_reset": bool2str(force_reset)} if force_reset else {}
4798
+ response = self.api_call(
4799
+ "PUT", endpoint_path, error_message, params=params, body=body
4800
+ )
4251
4801
  return AlertConfig.from_dict(response.json())
4252
4802
 
4253
4803
  def get_alert_config(self, alert_name: str, project="") -> AlertConfig:
@@ -4265,20 +4815,33 @@ class HTTPRunDB(RunDBInterface):
4265
4815
  response = self.api_call("GET", endpoint_path, error_message)
4266
4816
  return AlertConfig.from_dict(response.json())
4267
4817
 
4268
- def list_alerts_configs(self, project="") -> list[AlertConfig]:
4818
+ def list_alerts_configs(
4819
+ self, project="", limit: Optional[int] = None, offset: Optional[int] = None
4820
+ ) -> list[AlertConfig]:
4269
4821
  """
4270
4822
  Retrieve list of alerts of a project.
4271
4823
 
4272
4824
  :param project: The project name.
4825
+ :param limit: The maximum number of alerts to return.
4826
+ Defaults to `mlconf.alerts.default_list_alert_configs_limit` if not provided.
4827
+ :param offset: The number of alerts to skip.
4273
4828
 
4274
4829
  :returns: All the alerts objects of the project.
4275
4830
  """
4276
4831
  project = project or config.default_project
4277
4832
  endpoint_path = f"projects/{project}/alerts"
4278
4833
  error_message = f"get alerts {project}/alerts"
4279
- response = self.api_call("GET", endpoint_path, error_message).json()
4834
+ params = {}
4835
+ # TODO: Deprecate limit and offset when pagination is implemented
4836
+ if limit:
4837
+ params["page-size"] = limit
4838
+ if offset:
4839
+ params["offset"] = offset
4840
+ response = self.api_call(
4841
+ "GET", endpoint_path, error_message, params=params
4842
+ ).json()
4280
4843
  results = []
4281
- for item in response:
4844
+ for item in response.get("alerts", []):
4282
4845
  results.append(AlertConfig(**item))
4283
4846
  return results
4284
4847
 
@@ -4334,6 +4897,515 @@ class HTTPRunDB(RunDBInterface):
4334
4897
  results.append(mlrun.common.schemas.AlertTemplate(**item))
4335
4898
  return results
4336
4899
 
4900
+ def list_alert_activations(
4901
+ self,
4902
+ project: Optional[str] = None,
4903
+ name: Optional[str] = None,
4904
+ since: Optional[datetime] = None,
4905
+ until: Optional[datetime] = None,
4906
+ entity: Optional[str] = None,
4907
+ severity: Optional[
4908
+ list[Union[mlrun.common.schemas.alert.AlertSeverity, str]]
4909
+ ] = None,
4910
+ entity_kind: Optional[
4911
+ Union[mlrun.common.schemas.alert.EventEntityKind, str]
4912
+ ] = None,
4913
+ event_kind: Optional[Union[mlrun.common.schemas.alert.EventKind, str]] = None,
4914
+ ) -> mlrun.common.schemas.AlertActivations:
4915
+ """
4916
+ Retrieve a list of all alert activations.
4917
+
4918
+ :param project: The project name to filter by. If None, results are not filtered by project.
4919
+ :param name: The alert name to filter by. Supports exact matching or partial matching if prefixed with `~`.
4920
+ :param since: Filters for alert activations occurring after this timestamp.
4921
+ :param until: Filters for alert activations occurring before this timestamp.
4922
+ :param entity: The entity ID to filter by. Supports wildcard matching if prefixed with `~`.
4923
+ :param severity: A list of severity levels to filter by (e.g., ["high", "low"]).
4924
+ :param entity_kind: The kind of entity (e.g., "job", "endpoint") to filter by.
4925
+ :param event_kind: The kind of event (e.g., ""data-drift-detected"", "failed") to filter by.
4926
+
4927
+ :returns: A list of alert activations matching the provided filters.
4928
+ """
4929
+
4930
+ alert_activations, _ = self._list_alert_activations(
4931
+ project=project,
4932
+ name=name,
4933
+ since=since,
4934
+ until=until,
4935
+ entity=entity,
4936
+ severity=severity,
4937
+ entity_kind=entity_kind,
4938
+ event_kind=event_kind,
4939
+ return_all=True,
4940
+ )
4941
+ return alert_activations
4942
+
4943
+ def paginated_list_alert_activations(
4944
+ self,
4945
+ *args,
4946
+ page: Optional[int] = None,
4947
+ page_size: Optional[int] = None,
4948
+ page_token: Optional[str] = None,
4949
+ **kwargs,
4950
+ ) -> tuple[AlertActivations, Optional[str]]:
4951
+ """List alerts activations with support for pagination and various filtering options.
4952
+
4953
+ This method retrieves a paginated list of alert activations based on the specified filter parameters.
4954
+ Pagination is controlled using the `page`, `page_size`, and `page_token` parameters. The method
4955
+ will return a list of alert activations that match the filtering criteria provided.
4956
+
4957
+ For detailed information about the parameters, refer to the list_alert_activations method:
4958
+ See :py:func:`~list_alert_activations` for more details.
4959
+
4960
+ Examples::
4961
+
4962
+ # Fetch first page of alert activations with page size of 5
4963
+ alert_activations, token = db.paginated_list_alert_activations(
4964
+ project="my-project", page_size=5
4965
+ )
4966
+ # Fetch next page using the pagination token from the previous response
4967
+ alert_activations, token = db.paginated_list_alert_activations(
4968
+ project="my-project", page_token=token
4969
+ )
4970
+ # Fetch alert activations for a specific page (e.g., page 3)
4971
+ alert_activations, token = db.paginated_list_alert_activations(
4972
+ project="my-project", page=3, page_size=5
4973
+ )
4974
+
4975
+ # Automatically iterate over all pages without explicitly specifying the page number
4976
+ alert_activations = []
4977
+ token = None
4978
+ while True:
4979
+ page_alert_activations, token = db.paginated_list_alert_activations(
4980
+ project="my-project", page_token=token, page_size=5
4981
+ )
4982
+ alert_activations.extend(page_alert_activations)
4983
+
4984
+ # If token is None and page_alert_activations is empty, we've reached the end (no more activations).
4985
+ # If token is None and page_alert_activations is not empty, we've fetched the last page of activations.
4986
+ if not token:
4987
+ break
4988
+ print(f"Total alert activations retrieved: {len(alert_activations)}")
4989
+
4990
+ :param page: The page number to retrieve. If not provided, the next page will be retrieved.
4991
+ :param page_size: The number of items per page to retrieve. Up to `page_size` responses are expected.
4992
+ Defaults to `mlrun.mlconf.httpdb.pagination.default_page_size` if not provided.
4993
+ :param page_token: A pagination token used to retrieve the next page of results. Should not be provided
4994
+ for the first request.
4995
+
4996
+ :returns: A tuple containing the list of alert activations and an optional `page_token` for pagination.
4997
+ """
4998
+ return self._list_alert_activations(
4999
+ *args,
5000
+ page=page,
5001
+ page_size=page_size,
5002
+ page_token=page_token,
5003
+ return_all=False,
5004
+ **kwargs,
5005
+ )
5006
+
5007
+ def get_alert_activation(
5008
+ self,
5009
+ project,
5010
+ activation_id,
5011
+ ) -> mlrun.common.schemas.AlertActivation:
5012
+ """
5013
+ Retrieve the alert activation by id
5014
+
5015
+ :param project: Project name for which the summary belongs.
5016
+ :param activation_id: alert activation id.
5017
+ :returns: alert activation object.
5018
+ """
5019
+ project = project or config.default_project
5020
+
5021
+ error = "get alert activation"
5022
+ path = f"projects/{project}/alert-activations/{activation_id}"
5023
+
5024
+ response = self.api_call("GET", path, error)
5025
+
5026
+ return mlrun.common.schemas.AlertActivation(**response.json())
5027
+
5028
+ def get_project_summary(
5029
+ self, project: Optional[str] = None
5030
+ ) -> mlrun.common.schemas.ProjectSummary:
5031
+ """
5032
+ Retrieve the summary of a project.
5033
+
5034
+ :param project: Project name for which the summary belongs.
5035
+ :returns: A summary of the project.
5036
+ """
5037
+ project = project or config.default_project
5038
+
5039
+ endpoint_path = f"project-summaries/{project}"
5040
+ error_message = f"Failed retrieving project summary for {project}"
5041
+ response = self.api_call("GET", endpoint_path, error_message)
5042
+ return mlrun.common.schemas.ProjectSummary(**response.json())
5043
+
5044
+ @staticmethod
5045
+ def _parse_labels(
5046
+ labels: Optional[Union[str, dict[str, Optional[str]], list[str]]],
5047
+ ):
5048
+ """
5049
+ Parse labels to support providing a dictionary from the SDK,
5050
+ which may not be directly supported in the endpoints.
5051
+
5052
+ :param labels: The labels to parse, which can be a dictionary, a list of strings,
5053
+ or a comma-separated string. This function converts them into a list
5054
+ of labels in the format 'key=value' or 'key'.
5055
+ :return: A list of parsed labels in the format 'key=value' or 'key'.
5056
+ :raises MLRunValueError: If the labels format is invalid.
5057
+ """
5058
+ try:
5059
+ return mlrun.common.schemas.common.LabelsModel(labels=labels).labels
5060
+ except pydantic.v1.error_wrappers.ValidationError as exc:
5061
+ raise mlrun.errors.MLRunValueError(
5062
+ "Invalid labels format. Must be a dictionary of strings, a list of strings, "
5063
+ "or a comma-separated string."
5064
+ ) from exc
5065
+
5066
+ def _list_artifacts(
5067
+ self,
5068
+ name: Optional[str] = None,
5069
+ project: Optional[str] = None,
5070
+ tag: Optional[str] = None,
5071
+ labels: Optional[Union[str, dict[str, Optional[str]], list[str]]] = None,
5072
+ since: Optional[datetime] = None,
5073
+ until: Optional[datetime] = None,
5074
+ iter: Optional[int] = None,
5075
+ best_iteration: bool = False,
5076
+ kind: Optional[str] = None,
5077
+ category: Union[str, mlrun.common.schemas.ArtifactCategories] = None,
5078
+ tree: Optional[str] = None,
5079
+ producer_uri: Optional[str] = None,
5080
+ format_: Optional[
5081
+ mlrun.common.formatters.ArtifactFormat
5082
+ ] = mlrun.common.formatters.ArtifactFormat.full,
5083
+ limit: Optional[int] = None,
5084
+ partition_by: Optional[
5085
+ Union[mlrun.common.schemas.ArtifactPartitionByField, str]
5086
+ ] = None,
5087
+ rows_per_partition: int = 1,
5088
+ partition_sort_by: Optional[
5089
+ Union[mlrun.common.schemas.SortField, str]
5090
+ ] = mlrun.common.schemas.SortField.updated,
5091
+ partition_order: Union[
5092
+ mlrun.common.schemas.OrderType, str
5093
+ ] = mlrun.common.schemas.OrderType.desc,
5094
+ page: Optional[int] = None,
5095
+ page_size: Optional[int] = None,
5096
+ page_token: Optional[str] = None,
5097
+ return_all: bool = False,
5098
+ ) -> tuple[ArtifactList, Optional[str]]:
5099
+ """Handles list artifacts, both paginated and not."""
5100
+
5101
+ project = project or config.default_project
5102
+ labels = self._parse_labels(labels)
5103
+
5104
+ if limit:
5105
+ # TODO: Remove this in 1.11.0
5106
+ warnings.warn(
5107
+ "'limit' is deprecated and will be removed in 1.11.0. Use 'page' and 'page_size' instead.",
5108
+ FutureWarning,
5109
+ )
5110
+
5111
+ params = {
5112
+ "name": name,
5113
+ "tag": tag,
5114
+ "label": labels,
5115
+ "iter": iter,
5116
+ "best-iteration": best_iteration,
5117
+ "kind": kind,
5118
+ "category": category,
5119
+ "tree": tree,
5120
+ "format": format_,
5121
+ "producer_uri": producer_uri,
5122
+ "since": datetime_to_iso(since),
5123
+ "until": datetime_to_iso(until),
5124
+ "limit": limit,
5125
+ "page": page,
5126
+ "page-size": page_size,
5127
+ "page-token": page_token,
5128
+ }
5129
+
5130
+ if partition_by:
5131
+ params.update(
5132
+ self._generate_partition_by_params(
5133
+ partition_by,
5134
+ rows_per_partition,
5135
+ partition_sort_by,
5136
+ partition_order,
5137
+ )
5138
+ )
5139
+ error = "list artifacts"
5140
+ endpoint_path = f"projects/{project}/artifacts"
5141
+
5142
+ # Fetch the responses, either one page or all based on `return_all`
5143
+ responses = self.paginated_api_call(
5144
+ "GET",
5145
+ endpoint_path,
5146
+ error,
5147
+ params=params,
5148
+ version="v2",
5149
+ return_all=return_all,
5150
+ )
5151
+ paginated_responses, token = self.process_paginated_responses(
5152
+ responses, "artifacts"
5153
+ )
5154
+
5155
+ values = ArtifactList(paginated_responses)
5156
+ values.tag = tag
5157
+ return values, token
5158
+
5159
+ def _list_functions(
5160
+ self,
5161
+ name: Optional[str] = None,
5162
+ project: Optional[str] = None,
5163
+ tag: Optional[str] = None,
5164
+ kind: Optional[str] = None,
5165
+ labels: Optional[Union[str, dict[str, Optional[str]], list[str]]] = None,
5166
+ format_: Optional[str] = None,
5167
+ since: Optional[datetime] = None,
5168
+ until: Optional[datetime] = None,
5169
+ states: typing.Optional[list[mlrun.common.schemas.FunctionState]] = None,
5170
+ page: Optional[int] = None,
5171
+ page_size: Optional[int] = None,
5172
+ page_token: Optional[str] = None,
5173
+ return_all: bool = False,
5174
+ ) -> tuple[list, Optional[str]]:
5175
+ """Handles list functions, both paginated and not."""
5176
+
5177
+ project = project or config.default_project
5178
+ labels = self._parse_labels(labels)
5179
+ params = {
5180
+ "name": name,
5181
+ "tag": tag,
5182
+ "kind": kind,
5183
+ "label": labels,
5184
+ "since": datetime_to_iso(since),
5185
+ "until": datetime_to_iso(until),
5186
+ "format": format_,
5187
+ "state": states or None,
5188
+ "page": page,
5189
+ "page-size": page_size,
5190
+ "page-token": page_token,
5191
+ }
5192
+ error = "list functions"
5193
+ path = f"projects/{project}/functions"
5194
+
5195
+ # Fetch the responses, either one page or all based on `return_all`
5196
+ responses = self.paginated_api_call(
5197
+ "GET", path, error, params=params, return_all=return_all
5198
+ )
5199
+ paginated_responses, token = self.process_paginated_responses(
5200
+ responses, "funcs"
5201
+ )
5202
+ return paginated_responses, token
5203
+
5204
+ def _list_runs(
5205
+ self,
5206
+ name: Optional[str] = None,
5207
+ uid: Optional[Union[str, list[str]]] = None,
5208
+ project: Optional[str] = None,
5209
+ labels: Optional[Union[str, dict[str, Optional[str]], list[str]]] = None,
5210
+ state: Optional[
5211
+ mlrun.common.runtimes.constants.RunStates
5212
+ ] = None, # Backward compatibility
5213
+ states: typing.Optional[list[mlrun.common.runtimes.constants.RunStates]] = None,
5214
+ sort: bool = True,
5215
+ iter: bool = False,
5216
+ start_time_from: Optional[datetime] = None,
5217
+ start_time_to: Optional[datetime] = None,
5218
+ last_update_time_from: Optional[datetime] = None,
5219
+ last_update_time_to: Optional[datetime] = None,
5220
+ end_time_from: Optional[datetime] = None,
5221
+ end_time_to: Optional[datetime] = None,
5222
+ partition_by: Optional[
5223
+ Union[mlrun.common.schemas.RunPartitionByField, str]
5224
+ ] = None,
5225
+ rows_per_partition: int = 1,
5226
+ partition_sort_by: Optional[Union[mlrun.common.schemas.SortField, str]] = None,
5227
+ partition_order: Union[
5228
+ mlrun.common.schemas.OrderType, str
5229
+ ] = mlrun.common.schemas.OrderType.desc,
5230
+ max_partitions: int = 0,
5231
+ with_notifications: bool = False,
5232
+ page: Optional[int] = None,
5233
+ page_size: Optional[int] = None,
5234
+ page_token: Optional[str] = None,
5235
+ return_all: bool = False,
5236
+ ) -> tuple[RunList, Optional[str]]:
5237
+ """Handles list runs, both paginated and not."""
5238
+
5239
+ project = project or config.default_project
5240
+ if with_notifications:
5241
+ logger.warning(
5242
+ "Local run notifications are not persisted in the DB, therefore local runs will not be returned when "
5243
+ "using the `with_notifications` flag."
5244
+ )
5245
+
5246
+ if state:
5247
+ # TODO: Remove this in 1.10.0
5248
+ warnings.warn(
5249
+ "'state' is deprecated in 1.7.0 and will be removed in 1.10.0. Use 'states' instead.",
5250
+ FutureWarning,
5251
+ )
5252
+
5253
+ labels = self._parse_labels(labels)
5254
+
5255
+ if (
5256
+ not name
5257
+ and not uid
5258
+ and not labels
5259
+ and not state
5260
+ and not states
5261
+ and not start_time_from
5262
+ and not start_time_to
5263
+ and not last_update_time_from
5264
+ and not last_update_time_to
5265
+ and not end_time_from
5266
+ and not end_time_to
5267
+ and not partition_by
5268
+ and not partition_sort_by
5269
+ and not iter
5270
+ ):
5271
+ # default to last week on no filter
5272
+ start_time_from = datetime.now() - timedelta(days=7)
5273
+ partition_by = mlrun.common.schemas.RunPartitionByField.project_and_name
5274
+ partition_sort_by = mlrun.common.schemas.SortField.updated
5275
+
5276
+ params = {
5277
+ "name": name,
5278
+ "uid": uid,
5279
+ "label": labels,
5280
+ "state": (
5281
+ mlrun.utils.helpers.as_list(state)
5282
+ if state is not None
5283
+ else states or None
5284
+ ),
5285
+ "sort": bool2str(sort),
5286
+ "iter": bool2str(iter),
5287
+ "start_time_from": datetime_to_iso(start_time_from),
5288
+ "start_time_to": datetime_to_iso(start_time_to),
5289
+ "last_update_time_from": datetime_to_iso(last_update_time_from),
5290
+ "last_update_time_to": datetime_to_iso(last_update_time_to),
5291
+ "end_time_from": datetime_to_iso(end_time_from),
5292
+ "end_time_to": datetime_to_iso(end_time_to),
5293
+ "with-notifications": with_notifications,
5294
+ "page": page,
5295
+ "page-size": page_size,
5296
+ "page-token": page_token,
5297
+ }
5298
+
5299
+ if partition_by:
5300
+ params.update(
5301
+ self._generate_partition_by_params(
5302
+ partition_by,
5303
+ rows_per_partition,
5304
+ partition_sort_by,
5305
+ partition_order,
5306
+ max_partitions,
5307
+ )
5308
+ )
5309
+ error = "list runs"
5310
+ _path = self._path_of("runs", project)
5311
+
5312
+ # Fetch the responses, either one page or all based on `return_all`
5313
+ responses = self.paginated_api_call(
5314
+ "GET", _path, error, params=params, return_all=return_all
5315
+ )
5316
+ paginated_responses, token = self.process_paginated_responses(responses, "runs")
5317
+ return RunList(paginated_responses), token
5318
+
5319
+ def _list_alert_activations(
5320
+ self,
5321
+ project: Optional[str] = None,
5322
+ name: Optional[str] = None,
5323
+ since: Optional[datetime] = None,
5324
+ until: Optional[datetime] = None,
5325
+ entity: Optional[str] = None,
5326
+ severity: Optional[
5327
+ Union[
5328
+ mlrun.common.schemas.alert.AlertSeverity,
5329
+ str,
5330
+ list[Union[mlrun.common.schemas.alert.AlertSeverity, str]],
5331
+ ]
5332
+ ] = None,
5333
+ entity_kind: Optional[
5334
+ Union[mlrun.common.schemas.alert.EventEntityKind, str]
5335
+ ] = None,
5336
+ event_kind: Optional[Union[mlrun.common.schemas.alert.EventKind, str]] = None,
5337
+ page: Optional[int] = None,
5338
+ page_size: Optional[int] = None,
5339
+ page_token: Optional[str] = None,
5340
+ return_all: bool = False,
5341
+ ) -> tuple[mlrun.common.schemas.AlertActivations, Optional[str]]:
5342
+ project = project or config.default_project
5343
+ params = {
5344
+ "name": name,
5345
+ "since": datetime_to_iso(since),
5346
+ "until": datetime_to_iso(until),
5347
+ "entity": entity,
5348
+ "severity": mlrun.utils.helpers.as_list(severity) if severity else None,
5349
+ "entity-kind": entity_kind,
5350
+ "event-kind": event_kind,
5351
+ "page": page,
5352
+ "page-size": page_size,
5353
+ "page-token": page_token,
5354
+ }
5355
+ error = "list alert activations"
5356
+ path = f"projects/{project}/alert-activations"
5357
+
5358
+ # Fetch the responses, either one page or all based on `return_all`
5359
+ responses = self.paginated_api_call(
5360
+ "GET", path, error, params=params, return_all=return_all
5361
+ )
5362
+ paginated_responses, token = self.process_paginated_responses(
5363
+ responses, "activations"
5364
+ )
5365
+ paginated_results = mlrun.common.schemas.AlertActivations(
5366
+ activations=[
5367
+ mlrun.common.schemas.AlertActivation(**item)
5368
+ for item in paginated_responses
5369
+ ]
5370
+ )
5371
+
5372
+ return paginated_results, token
5373
+
5374
+ def _wait_for_background_task_from_response(self, response):
5375
+ if response.status_code == http.HTTPStatus.ACCEPTED:
5376
+ background_task = mlrun.common.schemas.BackgroundTask(**response.json())
5377
+ return self._wait_for_background_task_to_reach_terminal_state(
5378
+ background_task.metadata.name
5379
+ )
5380
+ return None
5381
+
5382
+ def _resolve_page_params(self, params: typing.Optional[dict]) -> dict:
5383
+ """
5384
+ Resolve the page parameters, setting defaults where necessary.
5385
+ """
5386
+ page_params = deepcopy(params) or {}
5387
+ if page_params.get("page-token") is None and page_params.get("page") is None:
5388
+ page_params["page"] = 1
5389
+ if page_params.get("page-size") is None:
5390
+ page_size = config.httpdb.pagination.default_page_size
5391
+
5392
+ if page_params.get("limit") is not None:
5393
+ page_size = page_params["limit"]
5394
+
5395
+ # limit and page/page size are conflicting
5396
+ page_params.pop("limit")
5397
+ page_params["page-size"] = page_size
5398
+
5399
+ # this may happen only when page-size was explicitly set along with limit
5400
+ # this is to ensure we will not get stopped by API on similar below validation
5401
+ # but rather simply fallback to use page-size.
5402
+ if page_params.get("page-size") and page_params.get("limit"):
5403
+ logger.warning(
5404
+ "Both 'limit' and 'page-size' are provided, using 'page-size'."
5405
+ )
5406
+ page_params.pop("limit")
5407
+ return page_params
5408
+
4337
5409
 
4338
5410
  def _as_json(obj):
4339
5411
  fn = getattr(obj, "to_json", None)