mlrun 1.7.2rc3__py3-none-any.whl → 1.8.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mlrun might be problematic. Click here for more details.

Files changed (275) hide show
  1. mlrun/__init__.py +26 -22
  2. mlrun/__main__.py +15 -16
  3. mlrun/alerts/alert.py +150 -15
  4. mlrun/api/schemas/__init__.py +1 -9
  5. mlrun/artifacts/__init__.py +2 -3
  6. mlrun/artifacts/base.py +62 -19
  7. mlrun/artifacts/dataset.py +17 -17
  8. mlrun/artifacts/document.py +454 -0
  9. mlrun/artifacts/manager.py +28 -18
  10. mlrun/artifacts/model.py +91 -59
  11. mlrun/artifacts/plots.py +2 -2
  12. mlrun/common/constants.py +8 -0
  13. mlrun/common/formatters/__init__.py +1 -0
  14. mlrun/common/formatters/artifact.py +1 -1
  15. mlrun/common/formatters/feature_set.py +2 -0
  16. mlrun/common/formatters/function.py +1 -0
  17. mlrun/{model_monitoring/db/stores/v3io_kv/__init__.py → common/formatters/model_endpoint.py} +17 -0
  18. mlrun/common/formatters/pipeline.py +1 -2
  19. mlrun/common/formatters/project.py +9 -0
  20. mlrun/common/model_monitoring/__init__.py +0 -5
  21. mlrun/common/model_monitoring/helpers.py +12 -62
  22. mlrun/common/runtimes/constants.py +25 -4
  23. mlrun/common/schemas/__init__.py +9 -5
  24. mlrun/common/schemas/alert.py +114 -19
  25. mlrun/common/schemas/api_gateway.py +3 -3
  26. mlrun/common/schemas/artifact.py +22 -9
  27. mlrun/common/schemas/auth.py +8 -4
  28. mlrun/common/schemas/background_task.py +7 -7
  29. mlrun/common/schemas/client_spec.py +4 -4
  30. mlrun/common/schemas/clusterization_spec.py +2 -2
  31. mlrun/common/schemas/common.py +53 -3
  32. mlrun/common/schemas/constants.py +15 -0
  33. mlrun/common/schemas/datastore_profile.py +1 -1
  34. mlrun/common/schemas/feature_store.py +9 -9
  35. mlrun/common/schemas/frontend_spec.py +4 -4
  36. mlrun/common/schemas/function.py +10 -10
  37. mlrun/common/schemas/hub.py +1 -1
  38. mlrun/common/schemas/k8s.py +3 -3
  39. mlrun/common/schemas/memory_reports.py +3 -3
  40. mlrun/common/schemas/model_monitoring/__init__.py +4 -8
  41. mlrun/common/schemas/model_monitoring/constants.py +127 -46
  42. mlrun/common/schemas/model_monitoring/grafana.py +18 -12
  43. mlrun/common/schemas/model_monitoring/model_endpoints.py +154 -160
  44. mlrun/common/schemas/notification.py +24 -3
  45. mlrun/common/schemas/object.py +1 -1
  46. mlrun/common/schemas/pagination.py +4 -4
  47. mlrun/common/schemas/partition.py +142 -0
  48. mlrun/common/schemas/pipeline.py +3 -3
  49. mlrun/common/schemas/project.py +26 -18
  50. mlrun/common/schemas/runs.py +3 -3
  51. mlrun/common/schemas/runtime_resource.py +5 -5
  52. mlrun/common/schemas/schedule.py +1 -1
  53. mlrun/common/schemas/secret.py +1 -1
  54. mlrun/{model_monitoring/db/stores/sqldb/__init__.py → common/schemas/serving.py} +10 -1
  55. mlrun/common/schemas/tag.py +3 -3
  56. mlrun/common/schemas/workflow.py +6 -5
  57. mlrun/common/types.py +1 -0
  58. mlrun/config.py +157 -89
  59. mlrun/data_types/__init__.py +5 -3
  60. mlrun/data_types/infer.py +13 -3
  61. mlrun/data_types/spark.py +2 -1
  62. mlrun/datastore/__init__.py +59 -18
  63. mlrun/datastore/alibaba_oss.py +4 -1
  64. mlrun/datastore/azure_blob.py +4 -1
  65. mlrun/datastore/base.py +19 -24
  66. mlrun/datastore/datastore.py +10 -4
  67. mlrun/datastore/datastore_profile.py +178 -45
  68. mlrun/datastore/dbfs_store.py +4 -1
  69. mlrun/datastore/filestore.py +4 -1
  70. mlrun/datastore/google_cloud_storage.py +4 -1
  71. mlrun/datastore/hdfs.py +4 -1
  72. mlrun/datastore/inmem.py +4 -1
  73. mlrun/datastore/redis.py +4 -1
  74. mlrun/datastore/s3.py +14 -3
  75. mlrun/datastore/sources.py +89 -92
  76. mlrun/datastore/store_resources.py +7 -4
  77. mlrun/datastore/storeytargets.py +51 -16
  78. mlrun/datastore/targets.py +38 -31
  79. mlrun/datastore/utils.py +87 -4
  80. mlrun/datastore/v3io.py +4 -1
  81. mlrun/datastore/vectorstore.py +291 -0
  82. mlrun/datastore/wasbfs/fs.py +13 -12
  83. mlrun/db/base.py +286 -100
  84. mlrun/db/httpdb.py +1562 -490
  85. mlrun/db/nopdb.py +250 -83
  86. mlrun/errors.py +6 -2
  87. mlrun/execution.py +194 -50
  88. mlrun/feature_store/__init__.py +2 -10
  89. mlrun/feature_store/api.py +20 -458
  90. mlrun/feature_store/common.py +9 -9
  91. mlrun/feature_store/feature_set.py +20 -18
  92. mlrun/feature_store/feature_vector.py +105 -479
  93. mlrun/feature_store/feature_vector_utils.py +466 -0
  94. mlrun/feature_store/retrieval/base.py +15 -11
  95. mlrun/feature_store/retrieval/job.py +2 -1
  96. mlrun/feature_store/retrieval/storey_merger.py +1 -1
  97. mlrun/feature_store/steps.py +3 -3
  98. mlrun/features.py +30 -13
  99. mlrun/frameworks/__init__.py +1 -2
  100. mlrun/frameworks/_common/__init__.py +1 -2
  101. mlrun/frameworks/_common/artifacts_library.py +2 -2
  102. mlrun/frameworks/_common/mlrun_interface.py +10 -6
  103. mlrun/frameworks/_common/model_handler.py +31 -31
  104. mlrun/frameworks/_common/producer.py +3 -1
  105. mlrun/frameworks/_dl_common/__init__.py +1 -2
  106. mlrun/frameworks/_dl_common/loggers/__init__.py +1 -2
  107. mlrun/frameworks/_dl_common/loggers/mlrun_logger.py +4 -4
  108. mlrun/frameworks/_dl_common/loggers/tensorboard_logger.py +3 -3
  109. mlrun/frameworks/_ml_common/__init__.py +1 -2
  110. mlrun/frameworks/_ml_common/loggers/__init__.py +1 -2
  111. mlrun/frameworks/_ml_common/model_handler.py +21 -21
  112. mlrun/frameworks/_ml_common/plans/__init__.py +1 -2
  113. mlrun/frameworks/_ml_common/plans/confusion_matrix_plan.py +3 -1
  114. mlrun/frameworks/_ml_common/plans/dataset_plan.py +3 -3
  115. mlrun/frameworks/_ml_common/plans/roc_curve_plan.py +4 -4
  116. mlrun/frameworks/auto_mlrun/__init__.py +1 -2
  117. mlrun/frameworks/auto_mlrun/auto_mlrun.py +22 -15
  118. mlrun/frameworks/huggingface/__init__.py +1 -2
  119. mlrun/frameworks/huggingface/model_server.py +9 -9
  120. mlrun/frameworks/lgbm/__init__.py +47 -44
  121. mlrun/frameworks/lgbm/callbacks/__init__.py +1 -2
  122. mlrun/frameworks/lgbm/callbacks/logging_callback.py +4 -2
  123. mlrun/frameworks/lgbm/callbacks/mlrun_logging_callback.py +4 -2
  124. mlrun/frameworks/lgbm/mlrun_interfaces/__init__.py +1 -2
  125. mlrun/frameworks/lgbm/mlrun_interfaces/mlrun_interface.py +5 -5
  126. mlrun/frameworks/lgbm/model_handler.py +15 -11
  127. mlrun/frameworks/lgbm/model_server.py +11 -7
  128. mlrun/frameworks/lgbm/utils.py +2 -2
  129. mlrun/frameworks/onnx/__init__.py +1 -2
  130. mlrun/frameworks/onnx/dataset.py +3 -3
  131. mlrun/frameworks/onnx/mlrun_interface.py +2 -2
  132. mlrun/frameworks/onnx/model_handler.py +7 -5
  133. mlrun/frameworks/onnx/model_server.py +8 -6
  134. mlrun/frameworks/parallel_coordinates.py +11 -11
  135. mlrun/frameworks/pytorch/__init__.py +22 -23
  136. mlrun/frameworks/pytorch/callbacks/__init__.py +1 -2
  137. mlrun/frameworks/pytorch/callbacks/callback.py +2 -1
  138. mlrun/frameworks/pytorch/callbacks/logging_callback.py +15 -8
  139. mlrun/frameworks/pytorch/callbacks/mlrun_logging_callback.py +19 -12
  140. mlrun/frameworks/pytorch/callbacks/tensorboard_logging_callback.py +22 -15
  141. mlrun/frameworks/pytorch/callbacks_handler.py +36 -30
  142. mlrun/frameworks/pytorch/mlrun_interface.py +17 -17
  143. mlrun/frameworks/pytorch/model_handler.py +21 -17
  144. mlrun/frameworks/pytorch/model_server.py +13 -9
  145. mlrun/frameworks/sklearn/__init__.py +19 -18
  146. mlrun/frameworks/sklearn/estimator.py +2 -2
  147. mlrun/frameworks/sklearn/metric.py +3 -3
  148. mlrun/frameworks/sklearn/metrics_library.py +8 -6
  149. mlrun/frameworks/sklearn/mlrun_interface.py +3 -2
  150. mlrun/frameworks/sklearn/model_handler.py +4 -3
  151. mlrun/frameworks/tf_keras/__init__.py +11 -12
  152. mlrun/frameworks/tf_keras/callbacks/__init__.py +1 -2
  153. mlrun/frameworks/tf_keras/callbacks/logging_callback.py +17 -14
  154. mlrun/frameworks/tf_keras/callbacks/mlrun_logging_callback.py +15 -12
  155. mlrun/frameworks/tf_keras/callbacks/tensorboard_logging_callback.py +21 -18
  156. mlrun/frameworks/tf_keras/model_handler.py +17 -13
  157. mlrun/frameworks/tf_keras/model_server.py +12 -8
  158. mlrun/frameworks/xgboost/__init__.py +19 -18
  159. mlrun/frameworks/xgboost/model_handler.py +13 -9
  160. mlrun/k8s_utils.py +2 -5
  161. mlrun/launcher/base.py +3 -4
  162. mlrun/launcher/client.py +2 -2
  163. mlrun/launcher/local.py +6 -2
  164. mlrun/launcher/remote.py +1 -1
  165. mlrun/lists.py +8 -4
  166. mlrun/model.py +132 -46
  167. mlrun/model_monitoring/__init__.py +3 -5
  168. mlrun/model_monitoring/api.py +113 -98
  169. mlrun/model_monitoring/applications/__init__.py +0 -5
  170. mlrun/model_monitoring/applications/_application_steps.py +81 -50
  171. mlrun/model_monitoring/applications/base.py +467 -14
  172. mlrun/model_monitoring/applications/context.py +212 -134
  173. mlrun/model_monitoring/{db/stores/base → applications/evidently}/__init__.py +6 -2
  174. mlrun/model_monitoring/applications/evidently/base.py +146 -0
  175. mlrun/model_monitoring/applications/histogram_data_drift.py +89 -56
  176. mlrun/model_monitoring/applications/results.py +67 -15
  177. mlrun/model_monitoring/controller.py +701 -315
  178. mlrun/model_monitoring/db/__init__.py +0 -2
  179. mlrun/model_monitoring/db/_schedules.py +242 -0
  180. mlrun/model_monitoring/db/_stats.py +189 -0
  181. mlrun/model_monitoring/db/tsdb/__init__.py +33 -22
  182. mlrun/model_monitoring/db/tsdb/base.py +243 -49
  183. mlrun/model_monitoring/db/tsdb/tdengine/schemas.py +76 -36
  184. mlrun/model_monitoring/db/tsdb/tdengine/stream_graph_steps.py +33 -0
  185. mlrun/model_monitoring/db/tsdb/tdengine/tdengine_connection.py +213 -0
  186. mlrun/model_monitoring/db/tsdb/tdengine/tdengine_connector.py +534 -88
  187. mlrun/model_monitoring/db/tsdb/v3io/stream_graph_steps.py +1 -0
  188. mlrun/model_monitoring/db/tsdb/v3io/v3io_connector.py +436 -106
  189. mlrun/model_monitoring/helpers.py +356 -114
  190. mlrun/model_monitoring/stream_processing.py +190 -345
  191. mlrun/model_monitoring/tracking_policy.py +11 -4
  192. mlrun/model_monitoring/writer.py +49 -90
  193. mlrun/package/__init__.py +3 -6
  194. mlrun/package/context_handler.py +2 -2
  195. mlrun/package/packager.py +12 -9
  196. mlrun/package/packagers/__init__.py +0 -2
  197. mlrun/package/packagers/default_packager.py +14 -11
  198. mlrun/package/packagers/numpy_packagers.py +16 -7
  199. mlrun/package/packagers/pandas_packagers.py +18 -18
  200. mlrun/package/packagers/python_standard_library_packagers.py +25 -11
  201. mlrun/package/packagers_manager.py +35 -32
  202. mlrun/package/utils/__init__.py +0 -3
  203. mlrun/package/utils/_pickler.py +6 -6
  204. mlrun/platforms/__init__.py +47 -16
  205. mlrun/platforms/iguazio.py +4 -1
  206. mlrun/projects/operations.py +30 -30
  207. mlrun/projects/pipelines.py +116 -47
  208. mlrun/projects/project.py +1292 -329
  209. mlrun/render.py +5 -9
  210. mlrun/run.py +57 -14
  211. mlrun/runtimes/__init__.py +1 -3
  212. mlrun/runtimes/base.py +30 -22
  213. mlrun/runtimes/daskjob.py +9 -9
  214. mlrun/runtimes/databricks_job/databricks_runtime.py +6 -5
  215. mlrun/runtimes/function_reference.py +5 -2
  216. mlrun/runtimes/generators.py +3 -2
  217. mlrun/runtimes/kubejob.py +6 -7
  218. mlrun/runtimes/mounts.py +574 -0
  219. mlrun/runtimes/mpijob/__init__.py +0 -2
  220. mlrun/runtimes/mpijob/abstract.py +7 -6
  221. mlrun/runtimes/nuclio/api_gateway.py +7 -7
  222. mlrun/runtimes/nuclio/application/application.py +11 -13
  223. mlrun/runtimes/nuclio/application/reverse_proxy.go +66 -64
  224. mlrun/runtimes/nuclio/function.py +127 -70
  225. mlrun/runtimes/nuclio/serving.py +105 -37
  226. mlrun/runtimes/pod.py +159 -54
  227. mlrun/runtimes/remotesparkjob.py +3 -2
  228. mlrun/runtimes/sparkjob/__init__.py +0 -2
  229. mlrun/runtimes/sparkjob/spark3job.py +22 -12
  230. mlrun/runtimes/utils.py +7 -6
  231. mlrun/secrets.py +2 -2
  232. mlrun/serving/__init__.py +8 -0
  233. mlrun/serving/merger.py +7 -5
  234. mlrun/serving/remote.py +35 -22
  235. mlrun/serving/routers.py +186 -240
  236. mlrun/serving/server.py +41 -10
  237. mlrun/serving/states.py +432 -118
  238. mlrun/serving/utils.py +13 -2
  239. mlrun/serving/v1_serving.py +3 -2
  240. mlrun/serving/v2_serving.py +161 -203
  241. mlrun/track/__init__.py +1 -1
  242. mlrun/track/tracker.py +2 -2
  243. mlrun/track/trackers/mlflow_tracker.py +6 -5
  244. mlrun/utils/async_http.py +35 -22
  245. mlrun/utils/clones.py +7 -4
  246. mlrun/utils/helpers.py +511 -58
  247. mlrun/utils/logger.py +119 -13
  248. mlrun/utils/notifications/notification/__init__.py +22 -19
  249. mlrun/utils/notifications/notification/base.py +39 -15
  250. mlrun/utils/notifications/notification/console.py +6 -6
  251. mlrun/utils/notifications/notification/git.py +11 -11
  252. mlrun/utils/notifications/notification/ipython.py +10 -9
  253. mlrun/utils/notifications/notification/mail.py +176 -0
  254. mlrun/utils/notifications/notification/slack.py +16 -8
  255. mlrun/utils/notifications/notification/webhook.py +24 -8
  256. mlrun/utils/notifications/notification_pusher.py +191 -200
  257. mlrun/utils/regex.py +12 -2
  258. mlrun/utils/version/version.json +2 -2
  259. {mlrun-1.7.2rc3.dist-info → mlrun-1.8.0.dist-info}/METADATA +81 -54
  260. mlrun-1.8.0.dist-info/RECORD +351 -0
  261. {mlrun-1.7.2rc3.dist-info → mlrun-1.8.0.dist-info}/WHEEL +1 -1
  262. mlrun/model_monitoring/applications/evidently_base.py +0 -137
  263. mlrun/model_monitoring/db/stores/__init__.py +0 -136
  264. mlrun/model_monitoring/db/stores/base/store.py +0 -213
  265. mlrun/model_monitoring/db/stores/sqldb/models/__init__.py +0 -71
  266. mlrun/model_monitoring/db/stores/sqldb/models/base.py +0 -190
  267. mlrun/model_monitoring/db/stores/sqldb/models/mysql.py +0 -103
  268. mlrun/model_monitoring/db/stores/sqldb/models/sqlite.py +0 -40
  269. mlrun/model_monitoring/db/stores/sqldb/sql_store.py +0 -659
  270. mlrun/model_monitoring/db/stores/v3io_kv/kv_store.py +0 -726
  271. mlrun/model_monitoring/model_endpoint.py +0 -118
  272. mlrun-1.7.2rc3.dist-info/RECORD +0 -351
  273. {mlrun-1.7.2rc3.dist-info → mlrun-1.8.0.dist-info}/entry_points.txt +0 -0
  274. {mlrun-1.7.2rc3.dist-info → mlrun-1.8.0.dist-info/licenses}/LICENSE +0 -0
  275. {mlrun-1.7.2rc3.dist-info → mlrun-1.8.0.dist-info}/top_level.txt +0 -0
mlrun/projects/project.py CHANGED
@@ -28,42 +28,64 @@ import warnings
28
28
  import zipfile
29
29
  from copy import deepcopy
30
30
  from os import environ, makedirs, path
31
- from typing import Callable, Optional, Union
31
+ from typing import Callable, Optional, Union, cast
32
32
 
33
+ import deprecated
33
34
  import dotenv
34
35
  import git
35
36
  import git.exc
36
- import mlrun_pipelines.common.models
37
- import mlrun_pipelines.mounts
38
37
  import nuclio.utils
39
38
  import requests
40
39
  import yaml
41
- from mlrun_pipelines.models import PipelineNodeWrapper
42
40
 
41
+ import mlrun.artifacts.model
43
42
  import mlrun.common.formatters
44
43
  import mlrun.common.helpers
45
44
  import mlrun.common.runtimes.constants
45
+ import mlrun.common.schemas.alert
46
46
  import mlrun.common.schemas.artifact
47
47
  import mlrun.common.schemas.model_monitoring.constants as mm_constants
48
+ import mlrun.datastore.datastore_profile
48
49
  import mlrun.db
49
50
  import mlrun.errors
50
51
  import mlrun.k8s_utils
51
52
  import mlrun.lists
52
53
  import mlrun.model_monitoring.applications as mm_app
53
54
  import mlrun.runtimes
55
+ import mlrun.runtimes.mounts
54
56
  import mlrun.runtimes.nuclio.api_gateway
55
57
  import mlrun.runtimes.pod
56
58
  import mlrun.runtimes.utils
57
59
  import mlrun.serving
58
60
  import mlrun.utils
59
61
  import mlrun.utils.regex
62
+ import mlrun_pipelines.common.models
60
63
  from mlrun.alerts.alert import AlertConfig
61
- from mlrun.common.schemas.alert import AlertTemplate
62
- from mlrun.datastore.datastore_profile import DatastoreProfile, DatastoreProfile2Json
64
+ from mlrun.common.schemas import alert as alert_constants
65
+ from mlrun.datastore.datastore_profile import (
66
+ DatastoreProfile,
67
+ DatastoreProfile2Json,
68
+ datastore_profile_read,
69
+ )
70
+ from mlrun.datastore.vectorstore import VectorStoreCollection
71
+ from mlrun.model_monitoring.helpers import (
72
+ filter_results_by_regex,
73
+ get_alert_name_from_result_fqn,
74
+ get_result_instance_fqn,
75
+ )
63
76
  from mlrun.runtimes.nuclio.function import RemoteRuntime
77
+ from mlrun_pipelines.models import PipelineNodeWrapper
64
78
 
65
- from ..artifacts import Artifact, ArtifactProducer, DatasetArtifact, ModelArtifact
79
+ from ..artifacts import (
80
+ Artifact,
81
+ ArtifactProducer,
82
+ DatasetArtifact,
83
+ DocumentArtifact,
84
+ DocumentLoaderSpec,
85
+ ModelArtifact,
86
+ )
66
87
  from ..artifacts.manager import ArtifactManager, dict_to_artifact, extend_artifact_path
88
+ from ..common.runtimes.constants import RunStates
67
89
  from ..datastore import store_manager
68
90
  from ..features import Feature
69
91
  from ..model import EntrypointParam, ImageBuilder, ModelObj
@@ -126,15 +148,15 @@ def new_project(
126
148
  context: str = "./",
127
149
  init_git: bool = False,
128
150
  user_project: bool = False,
129
- remote: str = None,
130
- from_template: str = None,
131
- secrets: dict = None,
132
- description: str = None,
133
- subpath: str = None,
151
+ remote: Optional[str] = None,
152
+ from_template: Optional[str] = None,
153
+ secrets: Optional[dict] = None,
154
+ description: Optional[str] = None,
155
+ subpath: Optional[str] = None,
134
156
  save: bool = True,
135
157
  overwrite: bool = False,
136
- parameters: dict = None,
137
- default_function_node_selector: dict = None,
158
+ parameters: Optional[dict] = None,
159
+ default_function_node_selector: Optional[dict] = None,
138
160
  ) -> "MlrunProject":
139
161
  """Create a new MLRun project, optionally load it from a yaml/zip/git template
140
162
 
@@ -291,17 +313,17 @@ def new_project(
291
313
 
292
314
  def load_project(
293
315
  context: str = "./",
294
- url: str = None,
295
- name: str = None,
296
- secrets: dict = None,
316
+ url: Optional[str] = None,
317
+ name: Optional[str] = None,
318
+ secrets: Optional[dict] = None,
297
319
  init_git: bool = False,
298
- subpath: str = None,
320
+ subpath: Optional[str] = None,
299
321
  clone: bool = False,
300
322
  user_project: bool = False,
301
323
  save: bool = True,
302
324
  sync_functions: bool = False,
303
- parameters: dict = None,
304
- allow_cross_project: bool = None,
325
+ parameters: Optional[dict] = None,
326
+ allow_cross_project: Optional[bool] = None,
305
327
  ) -> "MlrunProject":
306
328
  """Load an MLRun project from git or tar or dir
307
329
 
@@ -437,18 +459,19 @@ def load_project(
437
459
  def get_or_create_project(
438
460
  name: str,
439
461
  context: str = "./",
440
- url: str = None,
441
- secrets: dict = None,
462
+ url: Optional[str] = None,
463
+ secrets: Optional[dict] = None,
442
464
  init_git=False,
443
- subpath: str = None,
465
+ subpath: Optional[str] = None,
444
466
  clone: bool = False,
445
467
  user_project: bool = False,
446
- from_template: str = None,
468
+ from_template: Optional[str] = None,
447
469
  save: bool = True,
448
- parameters: dict = None,
449
- allow_cross_project: bool = None,
470
+ parameters: Optional[dict] = None,
471
+ allow_cross_project: Optional[bool] = None,
450
472
  ) -> "MlrunProject":
451
- """Load a project from MLRun DB, or create/import if it does not exist
473
+ """Load a project from MLRun DB, or create/import if it does not exist.
474
+ The project will become the default project for the current session.
452
475
 
453
476
  MLRun looks for a project.yaml file with project definition and objects in the project root path
454
477
  and use it to initialize the project, in addition it runs the project_setup.py file (if it exists)
@@ -734,10 +757,10 @@ def _project_instance_from_struct(struct, name, allow_cross_project):
734
757
  )
735
758
 
736
759
  if allow_cross_project is None:
737
- # TODO: Remove this warning in version 1.9.0 and also fix cli to support allow_cross_project
760
+ # TODO: Remove this warning in version 1.10.0 and also fix cli to support allow_cross_project
738
761
  warnings.warn(
739
762
  f"Project {name=} is different than specified on the context's project yaml. "
740
- "This behavior is deprecated and will not be supported from version 1.9.0."
763
+ "This behavior is deprecated and will not be supported from version 1.10.0."
741
764
  )
742
765
  logger.warn(error_message)
743
766
  elif allow_cross_project:
@@ -823,15 +846,16 @@ class ProjectSpec(ModelObj):
823
846
  origin_url=None,
824
847
  goals=None,
825
848
  load_source_on_run=None,
826
- default_requirements: typing.Union[str, list[str]] = None,
849
+ default_requirements: Optional[typing.Union[str, list[str]]] = None,
827
850
  desired_state=mlrun.common.schemas.ProjectState.online.value,
828
851
  owner=None,
829
852
  disable_auto_mount=None,
830
853
  workdir=None,
831
854
  default_image=None,
832
855
  build=None,
833
- custom_packagers: list[tuple[str, bool]] = None,
856
+ custom_packagers: Optional[list[tuple[str, bool]]] = None,
834
857
  default_function_node_selector=None,
858
+ notifications=None,
835
859
  ):
836
860
  self.repo = None
837
861
 
@@ -872,6 +896,7 @@ class ProjectSpec(ModelObj):
872
896
  # whether it is mandatory for a run (raise exception on collection error) or not.
873
897
  self.custom_packagers = custom_packagers or []
874
898
  self._default_function_node_selector = default_function_node_selector or None
899
+ self.notifications = notifications or []
875
900
 
876
901
  @property
877
902
  def source(self) -> str:
@@ -1153,7 +1178,6 @@ class MlrunProject(ModelObj):
1153
1178
  self._artifact_manager = None
1154
1179
  self._notifiers = CustomNotificationPusher(
1155
1180
  [
1156
- NotificationTypes.slack,
1157
1181
  NotificationTypes.console,
1158
1182
  NotificationTypes.ipython,
1159
1183
  ]
@@ -1231,8 +1255,14 @@ class MlrunProject(ModelObj):
1231
1255
  mlrun.utils.helpers.validate_builder_source(source, pull_at_runtime, workdir)
1232
1256
 
1233
1257
  self.spec.load_source_on_run = pull_at_runtime
1258
+
1259
+ source_has_changed = source != self.spec.source
1234
1260
  self.spec.source = source or self.spec.source
1235
1261
 
1262
+ # new source should not relay on old workdir
1263
+ if source_has_changed:
1264
+ self.spec.workdir = workdir
1265
+
1236
1266
  if self.spec.source.startswith("git://"):
1237
1267
  source, reference, branch = resolve_git_reference_from_source(source)
1238
1268
  if not branch and not reference:
@@ -1241,7 +1271,6 @@ class MlrunProject(ModelObj):
1241
1271
  "'git://<url>/org/repo.git#<branch-name or refs/heads/..>'"
1242
1272
  )
1243
1273
 
1244
- self.spec.workdir = workdir or self.spec.workdir
1245
1274
  try:
1246
1275
  # reset function objects (to recalculate build attributes)
1247
1276
  self.sync_functions()
@@ -1253,7 +1282,11 @@ class MlrunProject(ModelObj):
1253
1282
  raise exc
1254
1283
 
1255
1284
  def get_artifact_uri(
1256
- self, key: str, category: str = "artifact", tag: str = None, iter: int = None
1285
+ self,
1286
+ key: str,
1287
+ category: str = "artifact",
1288
+ tag: Optional[str] = None,
1289
+ iter: Optional[int] = None,
1257
1290
  ) -> str:
1258
1291
  """return the project artifact uri (store://..) from the artifact key
1259
1292
 
@@ -1353,7 +1386,7 @@ class MlrunProject(ModelObj):
1353
1386
  workflow_path: str,
1354
1387
  embed: bool = False,
1355
1388
  engine: Optional[str] = None,
1356
- args_schema: list[EntrypointParam] = None,
1389
+ args_schema: Optional[list[EntrypointParam]] = None,
1357
1390
  handler: Optional[str] = None,
1358
1391
  schedule: typing.Union[str, mlrun.common.schemas.ScheduleCronTrigger] = None,
1359
1392
  ttl: Optional[int] = None,
@@ -1380,7 +1413,9 @@ class MlrunProject(ModelObj):
1380
1413
  """
1381
1414
 
1382
1415
  # validate the provided workflow_path
1383
- self._validate_file_path(workflow_path, param_name="workflow_path")
1416
+ self._validate_file_path(
1417
+ workflow_path, param_name="workflow_path", engine=engine
1418
+ )
1384
1419
 
1385
1420
  if engine and "local" in engine and schedule:
1386
1421
  raise ValueError("'schedule' argument is not supported for 'local' engine.")
@@ -1425,8 +1460,8 @@ class MlrunProject(ModelObj):
1425
1460
  self,
1426
1461
  key,
1427
1462
  artifact: typing.Union[str, dict, Artifact] = None,
1428
- target_path: str = None,
1429
- tag: str = None,
1463
+ target_path: Optional[str] = None,
1464
+ tag: Optional[str] = None,
1430
1465
  ):
1431
1466
  """add/set an artifact in the project spec (will be registered on load)
1432
1467
 
@@ -1515,6 +1550,12 @@ class MlrunProject(ModelObj):
1515
1550
  is_retained_producer=is_retained_producer,
1516
1551
  )
1517
1552
 
1553
+ def update_artifact(self, artifact_object: Artifact):
1554
+ artifacts_manager = self._get_artifact_manager()
1555
+ project_tag = self._get_project_tag()
1556
+ producer, _ = self._resolve_artifact_producer(artifact_object, project_tag)
1557
+ artifacts_manager.update_artifact(producer, artifact_object)
1558
+
1518
1559
  def _get_artifact_manager(self):
1519
1560
  if self._artifact_manager:
1520
1561
  return self._artifact_manager
@@ -1646,7 +1687,7 @@ class MlrunProject(ModelObj):
1646
1687
  deletion_strategy: mlrun.common.schemas.artifact.ArtifactsDeletionStrategies = (
1647
1688
  mlrun.common.schemas.artifact.ArtifactsDeletionStrategies.metadata_only
1648
1689
  ),
1649
- secrets: dict = None,
1690
+ secrets: Optional[dict] = None,
1650
1691
  ):
1651
1692
  """Delete an artifact object in the DB and optionally delete the artifact data
1652
1693
 
@@ -1710,7 +1751,7 @@ class MlrunProject(ModelObj):
1710
1751
  :param upload: upload to datastore (default is True)
1711
1752
  :param labels: a set of key/value labels to tag the artifact with
1712
1753
 
1713
- :returns: artifact object
1754
+ :returns: dataset artifact object
1714
1755
  """
1715
1756
  ds = DatasetArtifact(
1716
1757
  key,
@@ -1723,14 +1764,17 @@ class MlrunProject(ModelObj):
1723
1764
  **kwargs,
1724
1765
  )
1725
1766
 
1726
- item = self.log_artifact(
1727
- ds,
1728
- local_path=local_path,
1729
- artifact_path=artifact_path,
1730
- target_path=target_path,
1731
- tag=tag,
1732
- upload=upload,
1733
- labels=labels,
1767
+ item = cast(
1768
+ DatasetArtifact,
1769
+ self.log_artifact(
1770
+ ds,
1771
+ local_path=local_path,
1772
+ artifact_path=artifact_path,
1773
+ target_path=target_path,
1774
+ tag=tag,
1775
+ upload=upload,
1776
+ labels=labels,
1777
+ ),
1734
1778
  )
1735
1779
  return item
1736
1780
 
@@ -1777,7 +1821,7 @@ class MlrunProject(ModelObj):
1777
1821
  :param key: artifact key or artifact class ()
1778
1822
  :param body: will use the body as the artifact content
1779
1823
  :param model_file: path to the local model file we upload (see also model_dir)
1780
- or to a model file data url (e.g. http://host/path/model.pkl)
1824
+ or to a model file data url (e.g. `http://host/path/model.pkl`)
1781
1825
  :param model_dir: path to the local dir holding the model file and extra files
1782
1826
  :param artifact_path: target artifact path (when not using the default)
1783
1827
  to define a subpath under the default location use:
@@ -1798,7 +1842,7 @@ class MlrunProject(ModelObj):
1798
1842
  :param extra_data: key/value list of extra files/charts to link with this dataset
1799
1843
  value can be absolute path | relative path (to model dir) | bytes | artifact object
1800
1844
 
1801
- :returns: artifact object
1845
+ :returns: model artifact object
1802
1846
  """
1803
1847
 
1804
1848
  if training_set is not None and inputs:
@@ -1825,14 +1869,152 @@ class MlrunProject(ModelObj):
1825
1869
  if training_set is not None:
1826
1870
  model.infer_from_df(training_set, label_column)
1827
1871
 
1828
- item = self.log_artifact(
1829
- model,
1830
- artifact_path=artifact_path,
1872
+ item = cast(
1873
+ ModelArtifact,
1874
+ self.log_artifact(
1875
+ model,
1876
+ artifact_path=artifact_path,
1877
+ tag=tag,
1878
+ upload=upload,
1879
+ labels=labels,
1880
+ ),
1881
+ )
1882
+ return item
1883
+
1884
+ def get_vector_store_collection(
1885
+ self,
1886
+ vector_store: "VectorStore", # noqa: F821
1887
+ collection_name: Optional[str] = None,
1888
+ ) -> VectorStoreCollection:
1889
+ """
1890
+ Create a VectorStoreCollection wrapper for a given vector store instance.
1891
+
1892
+ This method wraps a vector store implementation (like Milvus, Chroma) with MLRun
1893
+ integration capabilities. The wrapper provides access to the underlying vector
1894
+ store's functionality while adding MLRun-specific features like document and
1895
+ artifact management.
1896
+
1897
+ Args:
1898
+ vector_store: The vector store instance to wrap (e.g., Milvus, Chroma).
1899
+ This is the underlying implementation that will handle
1900
+ vector storage and retrieval.
1901
+ collection_name: Optional name for the collection. If not provided,
1902
+ will attempt to extract it from the vector_store object
1903
+ by looking for 'collection_name', '_collection_name',
1904
+ 'index_name', or '_index_name' attributes.
1905
+
1906
+ Returns:
1907
+ VectorStoreCollection: A wrapped vector store instance with MLRun integration.
1908
+ This wrapper provides both access to the original vector
1909
+ store's capabilities and additional MLRun functionality.
1910
+
1911
+ Example:
1912
+ >>> vector_store = Chroma(embedding_function=embeddings)
1913
+ >>> collection = project.get_vector_store_collection(
1914
+ ... vector_store, collection_name="my_collection"
1915
+ ... )
1916
+ """
1917
+ return VectorStoreCollection(
1918
+ self,
1919
+ vector_store,
1920
+ collection_name,
1921
+ )
1922
+
1923
+ def log_document(
1924
+ self,
1925
+ key: str = "",
1926
+ tag: str = "",
1927
+ local_path: str = "",
1928
+ artifact_path: Optional[str] = None,
1929
+ document_loader_spec: Optional[DocumentLoaderSpec] = None,
1930
+ upload: Optional[bool] = False,
1931
+ labels: Optional[dict[str, str]] = None,
1932
+ target_path: Optional[str] = None,
1933
+ **kwargs,
1934
+ ) -> DocumentArtifact:
1935
+ """
1936
+ Log a document as an artifact.
1937
+
1938
+ :param key: Optional artifact key. If not provided, will be derived from local_path
1939
+ or target_path using DocumentArtifact.key_from_source()
1940
+ :param tag: Version tag
1941
+ :param local_path: path to the local file we upload, will also be use
1942
+ as the destination subpath (under "artifact_path")
1943
+ :param artifact_path: Target path for artifact storage
1944
+ :param document_loader_spec: Spec to use to load the artifact as langchain document.
1945
+
1946
+ By default, uses DocumentLoaderSpec() which initializes with:
1947
+
1948
+ * loader_class_name="langchain_community.document_loaders.TextLoader"
1949
+ * src_name="file_path"
1950
+ * kwargs=None
1951
+
1952
+ Can be customized for different document types, e.g.::
1953
+
1954
+ DocumentLoaderSpec(
1955
+ loader_class_name="langchain_community.document_loaders.PDFLoader",
1956
+ src_name="file_path",
1957
+ kwargs={"extract_images": True}
1958
+ )
1959
+ :param upload: Whether to upload the artifact
1960
+ :param labels: Key-value labels. A 'source' label is automatically added using either
1961
+ local_path or target_path to facilitate easier document searching.
1962
+ :param target_path: Target file path
1963
+ :param kwargs: Additional keyword arguments
1964
+ :return: DocumentArtifact object
1965
+
1966
+ Example:
1967
+ >>> # Log a PDF document with custom loader
1968
+ >>> project.log_document(
1969
+ ... local_path="path/to/doc.pdf",
1970
+ ... document_loader=DocumentLoaderSpec(
1971
+ ... loader_class_name="langchain_community.document_loaders.PDFLoader",
1972
+ ... src_name="file_path",
1973
+ ... kwargs={"extract_images": True},
1974
+ ... ),
1975
+ ... )
1976
+
1977
+ """
1978
+ if not key and not local_path and not target_path:
1979
+ raise ValueError(
1980
+ "Must provide either 'key' parameter or 'local_path'/'target_path' to derive the key from"
1981
+ )
1982
+ if not key:
1983
+ key = DocumentArtifact.key_from_source(local_path or target_path)
1984
+
1985
+ document_loader_spec = document_loader_spec or DocumentLoaderSpec()
1986
+ if not document_loader_spec.download_object and upload:
1987
+ raise ValueError(
1988
+ "The document loader is configured to not support downloads but the upload flag is set to True."
1989
+ "Either set loader.download_object=True or set upload=False"
1990
+ )
1991
+ original_source = local_path or target_path
1992
+ doc_artifact = DocumentArtifact(
1993
+ key=key,
1994
+ original_source=original_source,
1995
+ document_loader_spec=document_loader_spec,
1996
+ collections=kwargs.pop("collections", None),
1997
+ **kwargs,
1998
+ )
1999
+
2000
+ # limit label to a max of 255 characters (for db reasons)
2001
+ max_length = 255
2002
+ labels = labels or {}
2003
+ labels["source"] = (
2004
+ original_source[: max_length - 3] + "..."
2005
+ if len(original_source) > max_length
2006
+ else original_source
2007
+ )
2008
+
2009
+ return self.log_artifact(
2010
+ item=doc_artifact,
1831
2011
  tag=tag,
2012
+ local_path=local_path,
2013
+ artifact_path=artifact_path,
1832
2014
  upload=upload,
1833
2015
  labels=labels,
2016
+ target_path=target_path,
1834
2017
  )
1835
- return item
1836
2018
 
1837
2019
  def import_artifact(
1838
2020
  self, item_path: str, new_key=None, artifact_path=None, tag=None
@@ -1946,37 +2128,148 @@ class MlrunProject(ModelObj):
1946
2128
  )
1947
2129
  return _run_project_setup(self, setup_file_path, save)
1948
2130
 
2131
+ def create_model_monitoring_alert_configs(
2132
+ self,
2133
+ name: str,
2134
+ summary: str,
2135
+ endpoints: mlrun.common.schemas.ModelEndpointList,
2136
+ events: Union[list[alert_constants.EventKind], alert_constants.EventKind],
2137
+ notifications: list[alert_constants.AlertNotification],
2138
+ result_names: Optional[
2139
+ list[str]
2140
+ ] = None, # can use wildcards - see below for explanation.
2141
+ severity: alert_constants.AlertSeverity = alert_constants.AlertSeverity.MEDIUM,
2142
+ criteria: alert_constants.AlertCriteria = alert_constants.AlertCriteria(
2143
+ count=1, period="10m"
2144
+ ),
2145
+ reset_policy: mlrun.common.schemas.alert.ResetPolicy = mlrun.common.schemas.alert.ResetPolicy.AUTO,
2146
+ ) -> list[mlrun.alerts.alert.AlertConfig]:
2147
+ """
2148
+ Generate alert configurations based on specified model endpoints and result names, which can be defined
2149
+ explicitly or using regex patterns.
2150
+
2151
+ :param name: The name of the AlertConfig template. It will be combined with
2152
+ mep id, app name and result name to generate a unique name.
2153
+ :param summary: Summary of the alert, will be sent in the generated notifications
2154
+ :param endpoints: The endpoints from which metrics will be retrieved to configure
2155
+ the alerts.
2156
+ The ModelEndpointList object is obtained via the `list_model_endpoints`
2157
+ method or created manually using `ModelEndpoint` objects.
2158
+ :param events: AlertTrigger event types (EventKind).
2159
+ :param notifications: List of notifications to invoke once the alert is triggered
2160
+ :param result_names: Optional. Filters the result names used to create the alert
2161
+ configuration, constructed from the app and result_name regex.
2162
+
2163
+ For example:
2164
+ [`app1.result-*`, `*.result1`]
2165
+ will match "mep_uid1.app1.result.result-1" and
2166
+ "mep_uid1.app2.result.result1".
2167
+ A specific result_name (not a wildcard) will always create a new alert
2168
+ config, regardless of whether the result name exists.
2169
+ :param severity: Severity of the alert.
2170
+ :param criteria: The threshold for triggering the alert based on the
2171
+ specified number of events within the defined time period.
2172
+ :param reset_policy: When to clear the alert. Either "manual" for manual reset of the alert,
2173
+ or "auto" if the criteria contains a time period.
2174
+ :returns: List of AlertConfig according to endpoints results,
2175
+ filtered by result_names.
2176
+ """
2177
+ db = mlrun.db.get_run_db(secrets=self._secrets)
2178
+ matching_results = []
2179
+ specific_result_names = [
2180
+ result_name
2181
+ for result_name in result_names
2182
+ if result_name.count(".") == 3 and "*" not in result_name
2183
+ ]
2184
+ alerts = []
2185
+ endpoint_ids = [endpoint.metadata.uid for endpoint in endpoints.endpoints]
2186
+ # using separation to group by endpoint IDs:
2187
+ # {"mep_id1": [...], "mep_id2": [...]}
2188
+ results_by_endpoint = db.get_metrics_by_multiple_endpoints(
2189
+ project=self.name,
2190
+ endpoint_ids=endpoint_ids,
2191
+ type="results",
2192
+ events_format=mm_constants.GetEventsFormat.SEPARATION,
2193
+ )
2194
+ for endpoint_uid, results in results_by_endpoint.items():
2195
+ results_fqn_by_endpoint = [
2196
+ get_result_instance_fqn(
2197
+ model_endpoint_id=endpoint_uid,
2198
+ app_name=result.app,
2199
+ result_name=result.name,
2200
+ )
2201
+ for result in results
2202
+ ]
2203
+ matching_results += filter_results_by_regex(
2204
+ existing_result_names=results_fqn_by_endpoint,
2205
+ result_name_filters=result_names,
2206
+ )
2207
+ for specific_result_name in specific_result_names:
2208
+ if specific_result_name not in matching_results:
2209
+ logger.warning(
2210
+ f"The specific result name '{specific_result_name}' was"
2211
+ f" not found in the existing endpoint results. Adding alert configuration anyway."
2212
+ )
2213
+ alert_result_names = list(set(specific_result_names + matching_results))
2214
+ for result_fqn in alert_result_names:
2215
+ result_fqn_name = get_alert_name_from_result_fqn(result_fqn)
2216
+ alerts.append(
2217
+ mlrun.alerts.alert.AlertConfig(
2218
+ project=self.name,
2219
+ name=f"{name}--{result_fqn_name}",
2220
+ summary=summary,
2221
+ severity=severity,
2222
+ entities=alert_constants.EventEntities(
2223
+ kind=alert_constants.EventEntityKind.MODEL_ENDPOINT_RESULT,
2224
+ project=self.name,
2225
+ ids=[result_fqn],
2226
+ ),
2227
+ trigger=alert_constants.AlertTrigger(
2228
+ events=events if isinstance(events, list) else [events]
2229
+ ),
2230
+ criteria=criteria,
2231
+ notifications=notifications,
2232
+ reset_policy=reset_policy,
2233
+ )
2234
+ )
2235
+ if not alerts:
2236
+ warnings.warn(
2237
+ "No alert config has been created. "
2238
+ "Try specifying a result name explicitly or verifying that results are available"
2239
+ )
2240
+
2241
+ return alerts
2242
+
1949
2243
  def set_model_monitoring_function(
1950
2244
  self,
1951
- func: typing.Union[str, mlrun.runtimes.BaseRuntime, None] = None,
2245
+ name: str,
2246
+ func: typing.Union[str, mlrun.runtimes.RemoteRuntime, None] = None,
1952
2247
  application_class: typing.Union[
1953
- str,
1954
- mm_app.ModelMonitoringApplicationBase,
2248
+ str, mm_app.ModelMonitoringApplicationBase, None
1955
2249
  ] = None,
1956
- name: str = None,
1957
- image: str = None,
1958
- handler=None,
1959
- with_repo: bool = None,
1960
- tag: str = None,
1961
- requirements: typing.Union[str, list[str]] = None,
2250
+ image: Optional[str] = None,
2251
+ handler: Optional[str] = None,
2252
+ with_repo: Optional[bool] = None,
2253
+ tag: Optional[str] = None,
2254
+ requirements: Optional[typing.Union[str, list[str]]] = None,
1962
2255
  requirements_file: str = "",
1963
2256
  **application_kwargs,
1964
- ) -> mlrun.runtimes.BaseRuntime:
2257
+ ) -> mlrun.runtimes.RemoteRuntime:
1965
2258
  """
1966
2259
  Update or add a monitoring function to the project.
1967
2260
  Note: to deploy the function after linking it to the project,
1968
2261
  call `fn.deploy()` where `fn` is the object returned by this method.
1969
2262
 
1970
- examples::
2263
+ Example::
1971
2264
 
1972
2265
  project.set_model_monitoring_function(
1973
2266
  name="myApp", application_class="MyApp", image="mlrun/mlrun"
1974
2267
  )
1975
2268
 
1976
- :param func: Function object or spec/code url, None refers to current Notebook
2269
+ :param func: Remote function object or spec/code URL. :code:`None` refers to the current
2270
+ notebook.
1977
2271
  :param name: Name of the function (under the project), can be specified with a tag to support
1978
- versions (e.g. myfunc:v1)
1979
- Default: job
2272
+ versions (e.g. myfunc:v1).
1980
2273
  :param image: Docker image to be used, can also be specified in
1981
2274
  the function object/yaml
1982
2275
  :param handler: Default function handler to invoke (can only be set with .py/.ipynb files)
@@ -1989,9 +2282,8 @@ class MlrunProject(ModelObj):
1989
2282
  :param application_class: Name or an Instance of a class that implements the monitoring application.
1990
2283
  :param application_kwargs: Additional keyword arguments to be passed to the
1991
2284
  monitoring application's constructor.
2285
+ :returns: The model monitoring remote function object.
1992
2286
  """
1993
-
1994
- function_object: RemoteRuntime = None
1995
2287
  (
1996
2288
  resolved_function_name,
1997
2289
  function_object,
@@ -2015,33 +2307,33 @@ class MlrunProject(ModelObj):
2015
2307
 
2016
2308
  def create_model_monitoring_function(
2017
2309
  self,
2018
- func: str = None,
2310
+ name: str,
2311
+ func: Optional[str] = None,
2019
2312
  application_class: typing.Union[
2020
2313
  str,
2021
2314
  mm_app.ModelMonitoringApplicationBase,
2315
+ None,
2022
2316
  ] = None,
2023
- name: str = None,
2024
- image: str = None,
2025
- handler: str = None,
2026
- with_repo: bool = None,
2027
- tag: str = None,
2028
- requirements: typing.Union[str, list[str]] = None,
2317
+ image: Optional[str] = None,
2318
+ handler: Optional[str] = None,
2319
+ with_repo: Optional[bool] = None,
2320
+ tag: Optional[str] = None,
2321
+ requirements: Optional[typing.Union[str, list[str]]] = None,
2029
2322
  requirements_file: str = "",
2030
2323
  **application_kwargs,
2031
- ) -> mlrun.runtimes.BaseRuntime:
2324
+ ) -> mlrun.runtimes.RemoteRuntime:
2032
2325
  """
2033
2326
  Create a monitoring function object without setting it to the project
2034
2327
 
2035
- examples::
2328
+ Example::
2036
2329
 
2037
2330
  project.create_model_monitoring_function(
2038
- application_class_name="MyApp", image="mlrun/mlrun", name="myApp"
2331
+ name="myApp", application_class="MyApp", image="mlrun/mlrun"
2039
2332
  )
2040
2333
 
2041
- :param func: Code url, None refers to current Notebook
2334
+ :param func: The function's code URL. :code:`None` refers to the current notebook.
2042
2335
  :param name: Name of the function, can be specified with a tag to support
2043
- versions (e.g. myfunc:v1)
2044
- Default: job
2336
+ versions (e.g. myfunc:v1).
2045
2337
  :param image: Docker image to be used, can also be specified in
2046
2338
  the function object/yaml
2047
2339
  :param handler: Default function handler to invoke (can only be set with .py/.ipynb files)
@@ -2054,6 +2346,7 @@ class MlrunProject(ModelObj):
2054
2346
  :param application_class: Name or an Instance of a class that implementing the monitoring application.
2055
2347
  :param application_kwargs: Additional keyword arguments to be passed to the
2056
2348
  monitoring application's constructor.
2349
+ :returns: The model monitoring remote function object.
2057
2350
  """
2058
2351
 
2059
2352
  _, function_object, _ = self._instantiate_model_monitoring_function(
@@ -2086,10 +2379,9 @@ class MlrunProject(ModelObj):
2086
2379
  requirements: typing.Union[str, list[str], None] = None,
2087
2380
  requirements_file: str = "",
2088
2381
  **application_kwargs,
2089
- ) -> tuple[str, mlrun.runtimes.BaseRuntime, dict]:
2382
+ ) -> tuple[str, mlrun.runtimes.RemoteRuntime, dict]:
2090
2383
  import mlrun.model_monitoring.api
2091
2384
 
2092
- function_object: RemoteRuntime = None
2093
2385
  kind = None
2094
2386
  if (isinstance(func, str) or func is None) and application_class is not None:
2095
2387
  kind = mlrun.run.RuntimeKinds.serving
@@ -2128,9 +2420,6 @@ class MlrunProject(ModelObj):
2128
2420
  mm_constants.ModelMonitoringAppLabel.VAL,
2129
2421
  )
2130
2422
 
2131
- if not mlrun.mlconf.is_ce_mode():
2132
- function_object.apply(mlrun.mount_v3io())
2133
-
2134
2423
  return resolved_function_name, function_object, func
2135
2424
 
2136
2425
  def _wait_for_functions_deployment(self, function_names: list[str]) -> None:
@@ -2151,7 +2440,6 @@ class MlrunProject(ModelObj):
2151
2440
  *,
2152
2441
  deploy_histogram_data_drift_app: bool = True,
2153
2442
  wait_for_deployment: bool = False,
2154
- rebuild_images: bool = False,
2155
2443
  fetch_credentials_from_sys_config: bool = False,
2156
2444
  ) -> None:
2157
2445
  """
@@ -2169,17 +2457,31 @@ class MlrunProject(ModelObj):
2169
2457
  :param image: The image of the model monitoring controller, writer, monitoring
2170
2458
  stream & histogram data drift functions, which are real time nuclio
2171
2459
  functions. By default, the image is mlrun/mlrun.
2172
- :param deploy_histogram_data_drift_app: If true, deploy the default histogram-based data drift application.
2460
+ :param deploy_histogram_data_drift_app: If true, deploy the default histogram-based data drift application:
2461
+ :py:class:`~mlrun.model_monitoring.applications.histogram_data_drift.HistogramDataDriftApplication`.
2462
+ If false, and you want to deploy the histogram data drift application
2463
+ afterwards, you may use the
2464
+ :py:func:`~set_model_monitoring_function` method::
2465
+
2466
+ import mlrun.model_monitoring.applications.histogram_data_drift as histogram_data_drift
2467
+
2468
+ hist_app = project.set_model_monitoring_function(
2469
+ name=histogram_data_drift.HistogramDataDriftApplicationConstants.NAME, # keep the default name
2470
+ func=histogram_data_drift.__file__,
2471
+ application_class=histogram_data_drift.HistogramDataDriftApplication.__name__,
2472
+ )
2473
+
2474
+ project.deploy_function(hist_app)
2475
+
2173
2476
  :param wait_for_deployment: If true, return only after the deployment is done on the backend.
2174
2477
  Otherwise, deploy the model monitoring infrastructure on the
2175
2478
  background, including the histogram data drift app if selected.
2176
- :param rebuild_images: If true, force rebuild of model monitoring infrastructure images.
2177
2479
  :param fetch_credentials_from_sys_config: If true, fetch the credentials from the system configuration.
2178
2480
  """
2179
2481
  if default_controller_image != "mlrun/mlrun":
2180
- # TODO: Remove this in 1.9.0
2482
+ # TODO: Remove this in 1.10.0
2181
2483
  warnings.warn(
2182
- "'default_controller_image' is deprecated and will be removed in 1.9.0, "
2484
+ "'default_controller_image' is deprecated in 1.7.0 and will be removed in 1.10.0, "
2183
2485
  "use 'image' instead",
2184
2486
  FutureWarning,
2185
2487
  )
@@ -2196,7 +2498,6 @@ class MlrunProject(ModelObj):
2196
2498
  image=image,
2197
2499
  base_period=base_period,
2198
2500
  deploy_histogram_data_drift_app=deploy_histogram_data_drift_app,
2199
- rebuild_images=rebuild_images,
2200
2501
  fetch_credentials_from_sys_config=fetch_credentials_from_sys_config,
2201
2502
  )
2202
2503
 
@@ -2208,30 +2509,6 @@ class MlrunProject(ModelObj):
2208
2509
  )
2209
2510
  self._wait_for_functions_deployment(deployment_functions)
2210
2511
 
2211
- def deploy_histogram_data_drift_app(
2212
- self,
2213
- *,
2214
- image: str = "mlrun/mlrun",
2215
- db: Optional[mlrun.db.RunDBInterface] = None,
2216
- wait_for_deployment: bool = False,
2217
- ) -> None:
2218
- """
2219
- Deploy the histogram data drift application.
2220
-
2221
- :param image: The image on which the application will run.
2222
- :param db: An optional DB object.
2223
- :param wait_for_deployment: If true, return only after the deployment is done on the backend.
2224
- Otherwise, deploy the application on the background.
2225
- """
2226
- if db is None:
2227
- db = mlrun.db.get_run_db(secrets=self._secrets)
2228
- db.deploy_histogram_data_drift_app(project=self.name, image=image)
2229
-
2230
- if wait_for_deployment:
2231
- self._wait_for_functions_deployment(
2232
- [mm_constants.HistogramDataDriftApplicationConstants.NAME]
2233
- )
2234
-
2235
2512
  def update_model_monitoring_controller(
2236
2513
  self,
2237
2514
  base_period: int = 10,
@@ -2269,7 +2546,7 @@ class MlrunProject(ModelObj):
2269
2546
  delete_stream_function: bool = False,
2270
2547
  delete_histogram_data_drift_app: bool = True,
2271
2548
  delete_user_applications: bool = False,
2272
- user_application_list: list[str] = None,
2549
+ user_application_list: Optional[list[str]] = None,
2273
2550
  ) -> None:
2274
2551
  """
2275
2552
  Disable model monitoring application controller, writer, stream, histogram data drift application
@@ -2326,14 +2603,14 @@ class MlrunProject(ModelObj):
2326
2603
 
2327
2604
  def set_function(
2328
2605
  self,
2329
- func: typing.Union[str, mlrun.runtimes.BaseRuntime] = None,
2606
+ func: typing.Union[str, mlrun.runtimes.BaseRuntime, None] = None,
2330
2607
  name: str = "",
2331
2608
  kind: str = "job",
2332
- image: str = None,
2333
- handler: str = None,
2334
- with_repo: bool = None,
2335
- tag: str = None,
2336
- requirements: typing.Union[str, list[str]] = None,
2609
+ image: Optional[str] = None,
2610
+ handler: Optional[str] = None,
2611
+ with_repo: Optional[bool] = None,
2612
+ tag: Optional[str] = None,
2613
+ requirements: Optional[typing.Union[str, list[str]]] = None,
2337
2614
  requirements_file: str = "",
2338
2615
  ) -> mlrun.runtimes.BaseRuntime:
2339
2616
  """
@@ -2422,16 +2699,64 @@ class MlrunProject(ModelObj):
2422
2699
  self._set_function(resolved_function_name, tag, function_object, func)
2423
2700
  return function_object
2424
2701
 
2702
+ def push_run_notifications(
2703
+ self,
2704
+ uid,
2705
+ timeout=45,
2706
+ ):
2707
+ """
2708
+ Push notifications for a run.
2709
+
2710
+ :param uid: Unique ID of the run.
2711
+ :returns: :py:class:`~mlrun.common.schemas.BackgroundTask`.
2712
+ """
2713
+ db = mlrun.db.get_run_db(secrets=self._secrets)
2714
+ return db.push_run_notifications(
2715
+ project=self.name,
2716
+ uid=uid,
2717
+ timeout=timeout,
2718
+ )
2719
+
2720
+ def push_pipeline_notification_kfp_runner(
2721
+ self,
2722
+ pipeline_id: str,
2723
+ current_run_state: mlrun_pipelines.common.models.RunStatuses,
2724
+ message: str,
2725
+ notifications: Optional[list] = None,
2726
+ ):
2727
+ """
2728
+ Push notifications for a pipeline run(KFP).
2729
+
2730
+ :param pipeline_id: Unique ID of the pipeline run.
2731
+ :param current_run_state: Current run state of the pipeline.
2732
+ :param message: Message to send in the notification.
2733
+ :param notifications: List of notifications to send.
2734
+ """
2735
+ current_run_state = RunStates.pipeline_run_status_to_run_state(
2736
+ current_run_state
2737
+ )
2738
+ db = mlrun.get_run_db()
2739
+ notifications = notifications or self.spec.notifications
2740
+ notifications_to_send = []
2741
+ for notification in notifications:
2742
+ if current_run_state in notification.when:
2743
+ notification_copy = notification.copy()
2744
+ notification_copy.message = message
2745
+ notifications_to_send.append(notification_copy)
2746
+ db.push_pipeline_notifications(
2747
+ pipeline_id, self.metadata.name, notifications_to_send
2748
+ )
2749
+
2425
2750
  def _instantiate_function(
2426
2751
  self,
2427
2752
  func: typing.Union[str, mlrun.runtimes.BaseRuntime] = None,
2428
2753
  name: str = "",
2429
2754
  kind: str = "",
2430
- image: str = None,
2431
- handler: str = None,
2432
- with_repo: bool = None,
2433
- tag: str = None,
2434
- requirements: typing.Union[str, list[str]] = None,
2755
+ image: Optional[str] = None,
2756
+ handler: Optional[str] = None,
2757
+ with_repo: Optional[bool] = None,
2758
+ tag: Optional[str] = None,
2759
+ requirements: Optional[typing.Union[str, list[str]]] = None,
2435
2760
  requirements_file: str = "",
2436
2761
  ) -> tuple[str, str, mlrun.runtimes.BaseRuntime, dict]:
2437
2762
  if (
@@ -2536,6 +2861,13 @@ class MlrunProject(ModelObj):
2536
2861
 
2537
2862
  self.spec.set_function(name, function_object, func)
2538
2863
 
2864
+ # TODO: Remove this in 1.11.0
2865
+ @deprecated.deprecated(
2866
+ version="1.8.0",
2867
+ reason="'remove_function' is deprecated and will be removed in 1.11.0. "
2868
+ "Please use `delete_function` instead.",
2869
+ category=FutureWarning,
2870
+ )
2539
2871
  def remove_function(self, name):
2540
2872
  """remove the specified function from the project
2541
2873
 
@@ -2543,14 +2875,26 @@ class MlrunProject(ModelObj):
2543
2875
  """
2544
2876
  self.spec.remove_function(name)
2545
2877
 
2878
+ def delete_function(self, name, delete_from_db=False):
2879
+ """deletes the specified function from the project
2880
+
2881
+ :param name: name of the function (under the project)
2882
+ :param delete_from_db: default is False. If False, the function is removed
2883
+ only from the project's cache and spec.
2884
+ If True, the function is also removed from the database.
2885
+ """
2886
+ if delete_from_db:
2887
+ mlrun.db.get_run_db().delete_function(name=name, project=self.metadata.name)
2888
+ self.spec.remove_function(name)
2889
+
2546
2890
  def remove_model_monitoring_function(self, name: Union[str, list[str]]):
2547
2891
  """delete the specified model-monitoring-app function/s
2548
2892
 
2549
2893
  :param name: name of the model-monitoring-function/s (under the project)
2550
2894
  """
2551
- # TODO: Remove this in 1.9.0
2895
+ # TODO: Remove this in 1.10.0
2552
2896
  warnings.warn(
2553
- "'remove_model_monitoring_function' is deprecated and will be removed in 1.9.0. "
2897
+ "'remove_model_monitoring_function' is deprecated in 1.7.0 and will be removed in 1.10.0. "
2554
2898
  "Please use `delete_model_monitoring_function` instead.",
2555
2899
  FutureWarning,
2556
2900
  )
@@ -2654,8 +2998,8 @@ class MlrunProject(ModelObj):
2654
2998
 
2655
2999
  def pull(
2656
3000
  self,
2657
- branch: str = None,
2658
- remote: str = None,
3001
+ branch: Optional[str] = None,
3002
+ remote: Optional[str] = None,
2659
3003
  secrets: Union[SecretsStore, dict] = None,
2660
3004
  ):
2661
3005
  """pull/update sources from git or tar into the context dir
@@ -2768,10 +3112,10 @@ class MlrunProject(ModelObj):
2768
3112
  branch,
2769
3113
  message=None,
2770
3114
  update=True,
2771
- remote: str = None,
2772
- add: list = None,
2773
- author_name: str = None,
2774
- author_email: str = None,
3115
+ remote: Optional[str] = None,
3116
+ add: Optional[list] = None,
3117
+ author_name: Optional[str] = None,
3118
+ author_email: Optional[str] = None,
2775
3119
  secrets: Union[SecretsStore, dict] = None,
2776
3120
  ):
2777
3121
  """update spec and push updates to remote git repo
@@ -2834,7 +3178,7 @@ class MlrunProject(ModelObj):
2834
3178
 
2835
3179
  def sync_functions(
2836
3180
  self,
2837
- names: list = None,
3181
+ names: Optional[list] = None,
2838
3182
  always: bool = True,
2839
3183
  save: bool = False,
2840
3184
  silent: bool = False,
@@ -2978,8 +3322,8 @@ class MlrunProject(ModelObj):
2978
3322
 
2979
3323
  def set_secrets(
2980
3324
  self,
2981
- secrets: dict = None,
2982
- file_path: str = None,
3325
+ secrets: Optional[dict] = None,
3326
+ file_path: Optional[str] = None,
2983
3327
  provider: typing.Union[str, mlrun.common.schemas.SecretProviderName] = None,
2984
3328
  ):
2985
3329
  """
@@ -3056,25 +3400,26 @@ class MlrunProject(ModelObj):
3056
3400
 
3057
3401
  def run(
3058
3402
  self,
3059
- name: str = None,
3060
- workflow_path: str = None,
3061
- arguments: dict[str, typing.Any] = None,
3062
- artifact_path: str = None,
3063
- workflow_handler: typing.Union[str, typing.Callable] = None,
3064
- namespace: str = None,
3403
+ name: Optional[str] = None,
3404
+ workflow_path: Optional[str] = None,
3405
+ arguments: Optional[dict[str, typing.Any]] = None,
3406
+ artifact_path: Optional[str] = None,
3407
+ workflow_handler: Optional[typing.Union[str, typing.Callable]] = None,
3408
+ namespace: Optional[str] = None,
3065
3409
  sync: bool = False,
3066
3410
  watch: bool = False,
3067
3411
  dirty: bool = False,
3068
- engine: str = None,
3069
- local: bool = None,
3412
+ engine: Optional[str] = None,
3413
+ local: Optional[bool] = None,
3070
3414
  schedule: typing.Union[
3071
3415
  str, mlrun.common.schemas.ScheduleCronTrigger, bool
3072
3416
  ] = None,
3073
- timeout: int = None,
3074
- source: str = None,
3075
- cleanup_ttl: int = None,
3076
- notifications: list[mlrun.model.Notification] = None,
3417
+ timeout: Optional[int] = None,
3418
+ source: Optional[str] = None,
3419
+ cleanup_ttl: Optional[int] = None,
3420
+ notifications: Optional[list[mlrun.model.Notification]] = None,
3077
3421
  workflow_runner_node_selector: typing.Optional[dict[str, str]] = None,
3422
+ context: typing.Optional[mlrun.execution.MLClientCtx] = None,
3078
3423
  ) -> _PipelineRunStatus:
3079
3424
  """Run a workflow using kubeflow pipelines
3080
3425
 
@@ -3117,24 +3462,26 @@ class MlrunProject(ModelObj):
3117
3462
  This allows you to control and specify where the workflow runner pod will be scheduled.
3118
3463
  This setting is only relevant when the engine is set to 'remote' or for scheduled workflows,
3119
3464
  and it will be ignored if the workflow is not run on a remote engine.
3465
+ :param context: mlrun context.
3120
3466
  :returns: ~py:class:`~mlrun.projects.pipelines._PipelineRunStatus` instance
3121
3467
  """
3122
3468
 
3123
3469
  arguments = arguments or {}
3124
3470
  need_repo = self.spec._need_repo()
3125
- if self.spec.repo and self.spec.repo.is_dirty():
3126
- msg = "You seem to have uncommitted git changes, use .push()"
3127
- if dirty or not need_repo:
3128
- logger.warning("WARNING!, " + msg)
3129
- else:
3130
- raise ProjectError(msg + " or dirty=True")
3471
+ if not dirty:
3472
+ if self.spec.repo and self.spec.repo.is_dirty():
3473
+ msg = "You seem to have uncommitted git changes, use .push()"
3474
+ if not need_repo:
3475
+ logger.warning("WARNING!, " + msg)
3476
+ else:
3477
+ raise ProjectError(msg + " or dirty=True")
3131
3478
 
3132
3479
  if need_repo and self.spec.repo and not self.spec.source:
3133
3480
  raise ProjectError(
3134
3481
  "Remote repo is not defined, use .create_remote() + push()"
3135
3482
  )
3136
3483
 
3137
- if engine not in ["remote"] and not schedule:
3484
+ if (engine is None or not engine.startswith("remote")) and not schedule:
3138
3485
  # For remote/scheduled runs there is no need to sync functions as they can be loaded dynamically during run
3139
3486
  self.sync_functions(always=sync, silent=True)
3140
3487
  if not self.spec._function_objects:
@@ -3203,6 +3550,7 @@ class MlrunProject(ModelObj):
3203
3550
  namespace=namespace,
3204
3551
  source=source,
3205
3552
  notifications=notifications,
3553
+ context=context,
3206
3554
  )
3207
3555
  # run is None when scheduling
3208
3556
  if run and run.state == mlrun_pipelines.common.models.RunStatuses.failed:
@@ -3280,7 +3628,7 @@ class MlrunProject(ModelObj):
3280
3628
 
3281
3629
  return db.create_project(self.to_dict())
3282
3630
 
3283
- def export(self, filepath=None, include_files: str = None):
3631
+ def export(self, filepath=None, include_files: Optional[str] = None):
3284
3632
  """save the project object into a yaml file or zip archive (default to project.yaml)
3285
3633
 
3286
3634
  By default, the project object is exported to a yaml file, when the filepath suffix is '.zip'
@@ -3324,54 +3672,104 @@ class MlrunProject(ModelObj):
3324
3672
 
3325
3673
  def set_model_monitoring_credentials(
3326
3674
  self,
3327
- access_key: Optional[str] = None,
3328
- endpoint_store_connection: Optional[str] = None,
3329
- stream_path: Optional[str] = None,
3330
- tsdb_connection: Optional[str] = None,
3675
+ *,
3676
+ tsdb_profile_name: str,
3677
+ stream_profile_name: str,
3331
3678
  replace_creds: bool = False,
3332
- ):
3679
+ ) -> None:
3333
3680
  """
3334
- Set the credentials that will be used by the project's model monitoring
3335
- infrastructure functions. Important to note that you have to set the credentials before deploying any
3336
- model monitoring or serving function.
3337
-
3338
- :param access_key: Model monitoring access key for managing user permissions.
3339
- :param endpoint_store_connection: Endpoint store connection string. By default, None. Options:
3340
-
3341
- * None - will be set from the system configuration.
3342
- * v3io - for v3io endpoint store, pass `v3io` and the system will generate the
3343
- exact path.
3344
- * MySQL/SQLite - for SQL endpoint store, provide the full connection string,
3345
- for example: mysql+pymysql://<username>:<password>@<host>:<port>/<db_name>
3346
- :param stream_path: Path to the model monitoring stream. By default, None. Options:
3347
-
3348
- * None - will be set from the system configuration.
3349
- * v3io - for v3io stream, pass `v3io` and the system will generate the exact
3350
- path.
3351
- * Kafka - for Kafka stream, provide the full connection string without custom
3352
- topic, for example kafka://<some_kafka_broker>:<port>.
3353
- :param tsdb_connection: Connection string to the time series database. By default, None.
3354
- Options:
3355
-
3356
- * None - will be set from the system configuration.
3357
- * v3io - for v3io stream, pass `v3io` and the system will generate the exact
3358
- path.
3359
- * TDEngine - for TDEngine tsdb, provide the full websocket connection URL,
3360
- for example taosws://<username>:<password>@<host>:<port>.
3361
- :param replace_creds: If True, will override the existing credentials.
3362
- Please keep in mind that if you already enabled model monitoring on
3363
- your project this action can cause data loose and will require redeploying
3364
- all model monitoring functions & model monitoring infra
3365
- & tracked model server.
3681
+ Set the credentials that will be used by the project's model monitoring infrastructure functions.
3682
+ Please note that you have to set the credentials before deploying any model monitoring application
3683
+ or a tracked serving function.
3684
+
3685
+ For example, the full flow for enabling model monitoring infrastructure with **TDEngine** and **Kafka**, is:
3686
+
3687
+ .. code-block:: python
3688
+
3689
+ import mlrun
3690
+ from mlrun.datastore.datastore_profile import (
3691
+ DatastoreProfileKafkaSource,
3692
+ DatastoreProfileTDEngine,
3693
+ )
3694
+
3695
+ project = mlrun.get_or_create_project("mm-infra-setup")
3696
+
3697
+ # Create and register TSDB profile
3698
+ tsdb_profile = DatastoreProfileTDEngine(
3699
+ name="my-tdengine",
3700
+ host="<tdengine-server-ip-address>",
3701
+ port=6041,
3702
+ user="username",
3703
+ password="<tdengine-password>",
3704
+ )
3705
+ project.register_datastore_profile(tsdb_profile)
3706
+
3707
+ # Create and register stream profile
3708
+ stream_profile = DatastoreProfileKafkaSource(
3709
+ name="my-kafka",
3710
+ brokers=["<kafka-broker-ip-address>:9094"],
3711
+ topics=[], # Keep the topics list empty
3712
+ ## SASL is supported
3713
+ # sasl_user="user1",
3714
+ # sasl_pass="<kafka-sasl-password>",
3715
+ )
3716
+ project.register_datastore_profile(stream_profile)
3717
+
3718
+ # Set model monitoring credentials and enable the infrastructure
3719
+ project.set_model_monitoring_credentials(
3720
+ tsdb_profile_name=tsdb_profile.name,
3721
+ stream_profile_name=stream_profile.name,
3722
+ )
3723
+ project.enable_model_monitoring()
3724
+
3725
+ Note that you will need to change the profiles if you want to use **V3IO** TSDB and stream:
3726
+
3727
+ .. code-block:: python
3728
+
3729
+ from mlrun.datastore.datastore_profile import DatastoreProfileV3io
3730
+
3731
+ # Create and register TSDB profile
3732
+ tsdb_profile = DatastoreProfileV3io(
3733
+ name="my-v3io-tsdb",
3734
+ )
3735
+ project.register_datastore_profile(tsdb_profile)
3736
+
3737
+ # Create and register stream profile
3738
+ stream_profile = DatastoreProfileV3io(
3739
+ name="my-v3io-stream",
3740
+ v3io_access_key=mlrun.mlconf.get_v3io_access_key(),
3741
+ )
3742
+ project.register_datastore_profile(stream_profile)
3743
+
3744
+ In the V3IO datastore, you must provide an explicit access key to the stream, but not to the TSDB.
3745
+
3746
+ :param tsdb_profile_name: The datastore profile name of the time-series database to be used in model
3747
+ monitoring. The supported profiles are:
3748
+
3749
+ * :py:class:`~mlrun.datastore.datastore_profile.DatastoreProfileV3io`
3750
+ * :py:class:`~mlrun.datastore.datastore_profile.DatastoreProfileTDEngine`
3751
+
3752
+ You need to register one of them, and pass the profile's name.
3753
+ :param stream_profile_name: The datastore profile name of the stream to be used in model monitoring.
3754
+ The supported profiles are:
3755
+
3756
+ * :py:class:`~mlrun.datastore.datastore_profile.DatastoreProfileV3io`
3757
+ * :py:class:`~mlrun.datastore.datastore_profile.DatastoreProfileKafkaSource`
3758
+
3759
+ You need to register one of them, and pass the profile's name.
3760
+ :param replace_creds: If ``True`` - override the existing credentials.
3761
+ Please keep in mind that if you have already enabled model monitoring
3762
+ on your project, replacing the credentials can cause data loss, and will
3763
+ require redeploying all the model monitoring functions, model monitoring
3764
+ infrastructure, and tracked model servers.
3366
3765
  """
3367
3766
  db = mlrun.db.get_run_db(secrets=self._secrets)
3767
+
3368
3768
  db.set_model_monitoring_credentials(
3369
3769
  project=self.name,
3370
3770
  credentials={
3371
- "access_key": access_key,
3372
- "endpoint_store_connection": endpoint_store_connection,
3373
- "stream_path": stream_path,
3374
- "tsdb_connection": tsdb_connection,
3771
+ "tsdb_profile_name": tsdb_profile_name,
3772
+ "stream_profile_name": stream_profile_name,
3375
3773
  },
3376
3774
  replace_creds=replace_creds,
3377
3775
  )
@@ -3380,35 +3778,109 @@ class MlrunProject(ModelObj):
3380
3778
  "Model monitoring credentials were set successfully. "
3381
3779
  "Please keep in mind that if you already had model monitoring functions "
3382
3780
  "/ model monitoring infra / tracked model server "
3383
- "deployed on your project, you will need to redeploy them."
3384
- "For redeploying the model monitoring infra, please use `enable_model_monitoring` API "
3385
- "and set `rebuild_images=True`"
3781
+ "deployed on your project, you will need to redeploy them. "
3782
+ "For redeploying the model monitoring infra, first disable it using "
3783
+ "`project.disable_model_monitoring()` and then enable it using `project.enable_model_monitoring()`."
3386
3784
  )
3387
3785
 
3786
+ def list_model_endpoints(
3787
+ self,
3788
+ names: Optional[Union[str, list[str]]] = None,
3789
+ model_name: Optional[str] = None,
3790
+ model_tag: Optional[str] = None,
3791
+ function_name: Optional[str] = None,
3792
+ function_tag: Optional[str] = None,
3793
+ labels: Optional[list[str]] = None,
3794
+ start: Optional[datetime.datetime] = None,
3795
+ end: Optional[datetime.datetime] = None,
3796
+ top_level: bool = False,
3797
+ uids: Optional[list[str]] = None,
3798
+ latest_only: bool = False,
3799
+ tsdb_metrics: bool = False,
3800
+ metric_list: Optional[list[str]] = None,
3801
+ ) -> mlrun.common.schemas.ModelEndpointList:
3802
+ """
3803
+ Returns a list of `ModelEndpoint` objects. Each `ModelEndpoint` object represents the current state of a
3804
+ model endpoint. This functions supports filtering by the following parameters:
3805
+ 1) name
3806
+ 2) model_name
3807
+ 3) model_tag
3808
+ 4) function_name
3809
+ 5) function_tag
3810
+ 6) labels
3811
+ 7) top level
3812
+ 8) uids
3813
+ 9) start and end time, corresponding to the `created` field.
3814
+ By default, when no filters are applied, all available endpoints for the given project will be listed.
3815
+
3816
+ In addition, this functions provides a facade for listing endpoint related metrics. This facade is time-based
3817
+ and depends on the 'start' and 'end' parameters.
3818
+
3819
+ :param names: The name of the model to filter by
3820
+ :param model_name: The name of the model to filter by
3821
+ :param function_name: The name of the function to filter by
3822
+ :param function_tag: The tag of the function to filter by
3823
+ :param labels: Filter model endpoints by label key-value pairs or key existence. This can be provided as:
3824
+ - A dictionary in the format `{"label": "value"}` to match specific label key-value pairs,
3825
+ or `{"label": None}` to check for key existence.
3826
+ - A list of strings formatted as `"label=value"` to match specific label key-value pairs,
3827
+ or just `"label"` for key existence.
3828
+ - A comma-separated string formatted as `"label1=value1,label2"` to match entities with
3829
+ the specified key-value pairs or key existence.
3830
+ :param start: The start time to filter by.Corresponding to the `created` field.
3831
+ :param end: The end time to filter by. Corresponding to the `created` field.
3832
+ :param top_level: If true will return only routers and endpoint that are NOT children of any router.
3833
+ :param uids: If passed will return a list `ModelEndpoint` object with uid in uids.
3834
+ :param tsdb_metrics: When True, the time series metrics will be added to the output
3835
+ of the resulting.
3836
+ :param metric_list: List of metrics to include from the time series DB. Defaults to all metrics.
3837
+ If tsdb_metrics=False, this parameter will be ignored and no tsdb metrics
3838
+ will be included.
3839
+
3840
+ :returns: Returns a list of `ModelEndpoint` objects.
3841
+ """
3842
+ db = mlrun.db.get_run_db(secrets=self._secrets)
3843
+ return db.list_model_endpoints(
3844
+ project=self.name,
3845
+ names=names,
3846
+ model_name=model_name,
3847
+ model_tag=model_tag,
3848
+ function_name=function_name,
3849
+ function_tag=function_tag,
3850
+ labels=labels,
3851
+ start=start,
3852
+ end=end,
3853
+ top_level=top_level,
3854
+ uids=uids,
3855
+ latest_only=latest_only,
3856
+ tsdb_metrics=tsdb_metrics,
3857
+ metric_list=metric_list,
3858
+ )
3859
+
3388
3860
  def run_function(
3389
3861
  self,
3390
3862
  function: typing.Union[str, mlrun.runtimes.BaseRuntime],
3391
- handler: str = None,
3863
+ handler: Optional[str] = None,
3392
3864
  name: str = "",
3393
- params: dict = None,
3394
- hyperparams: dict = None,
3865
+ params: Optional[dict] = None,
3866
+ hyperparams: Optional[dict] = None,
3395
3867
  hyper_param_options: mlrun.model.HyperParamOptions = None,
3396
- inputs: dict = None,
3397
- outputs: list[str] = None,
3868
+ inputs: Optional[dict] = None,
3869
+ outputs: Optional[list[str]] = None,
3398
3870
  workdir: str = "",
3399
- labels: dict = None,
3871
+ labels: Optional[dict] = None,
3400
3872
  base_task: mlrun.model.RunTemplate = None,
3401
3873
  watch: bool = True,
3402
- local: bool = None,
3403
- verbose: bool = None,
3404
- selector: str = None,
3405
- auto_build: bool = None,
3874
+ local: Optional[bool] = None,
3875
+ verbose: Optional[bool] = None,
3876
+ selector: Optional[str] = None,
3877
+ auto_build: Optional[bool] = None,
3406
3878
  schedule: typing.Union[str, mlrun.common.schemas.ScheduleCronTrigger] = None,
3407
- artifact_path: str = None,
3408
- notifications: list[mlrun.model.Notification] = None,
3879
+ artifact_path: Optional[str] = None,
3880
+ notifications: Optional[list[mlrun.model.Notification]] = None,
3409
3881
  returns: Optional[list[Union[str, dict[str, str]]]] = None,
3410
3882
  builder_env: Optional[dict] = None,
3411
- reset_on_run: bool = None,
3883
+ reset_on_run: Optional[bool] = None,
3412
3884
  ) -> typing.Union[mlrun.model.RunObject, PipelineNodeWrapper]:
3413
3885
  """Run a local or remote task as part of a local/kubeflow pipeline
3414
3886
 
@@ -3501,18 +3973,18 @@ class MlrunProject(ModelObj):
3501
3973
  def build_function(
3502
3974
  self,
3503
3975
  function: typing.Union[str, mlrun.runtimes.BaseRuntime],
3504
- with_mlrun: bool = None,
3976
+ with_mlrun: Optional[bool] = None,
3505
3977
  skip_deployed: bool = False,
3506
- image: str = None,
3507
- base_image: str = None,
3508
- commands: list = None,
3509
- secret_name: str = None,
3510
- requirements: typing.Union[str, list[str]] = None,
3511
- mlrun_version_specifier: str = None,
3512
- builder_env: dict = None,
3978
+ image: Optional[str] = None,
3979
+ base_image: Optional[str] = None,
3980
+ commands: Optional[list] = None,
3981
+ secret_name: Optional[str] = None,
3982
+ requirements: Optional[typing.Union[str, list[str]]] = None,
3983
+ mlrun_version_specifier: Optional[str] = None,
3984
+ builder_env: Optional[dict] = None,
3513
3985
  overwrite_build_params: bool = False,
3514
- requirements_file: str = None,
3515
- extra_args: str = None,
3986
+ requirements_file: Optional[str] = None,
3987
+ extra_args: Optional[str] = None,
3516
3988
  force_build: bool = False,
3517
3989
  ) -> typing.Union[BuildStatus, PipelineNodeWrapper]:
3518
3990
  """deploy ML function, build container with its dependencies
@@ -3531,8 +4003,10 @@ class MlrunProject(ModelObj):
3531
4003
  e.g. builder_env={"GIT_TOKEN": token}, does not work yet in KFP
3532
4004
  :param overwrite_build_params: Overwrite existing build configuration (currently applies to
3533
4005
  requirements and commands)
4006
+
3534
4007
  * False: The new params are merged with the existing
3535
4008
  * True: The existing params are replaced by the new ones
4009
+
3536
4010
  :param extra_args: A string containing additional builder arguments in the format of command-line options,
3537
4011
  e.g. extra_args="--skip-tls-verify --build-arg A=val"
3538
4012
  :param force_build: force building the image, even when no changes were made
@@ -3557,18 +4031,18 @@ class MlrunProject(ModelObj):
3557
4031
 
3558
4032
  def build_config(
3559
4033
  self,
3560
- image: str = None,
4034
+ image: Optional[str] = None,
3561
4035
  set_as_default: bool = False,
3562
- with_mlrun: bool = None,
3563
- base_image: str = None,
3564
- commands: list = None,
3565
- secret_name: str = None,
3566
- requirements: typing.Union[str, list[str]] = None,
4036
+ with_mlrun: Optional[bool] = None,
4037
+ base_image: Optional[str] = None,
4038
+ commands: Optional[list] = None,
4039
+ secret_name: Optional[str] = None,
4040
+ requirements: Optional[typing.Union[str, list[str]]] = None,
3567
4041
  overwrite_build_params: bool = False,
3568
- requirements_file: str = None,
3569
- builder_env: dict = None,
3570
- extra_args: str = None,
3571
- source_code_target_dir: str = None,
4042
+ requirements_file: Optional[str] = None,
4043
+ builder_env: Optional[dict] = None,
4044
+ extra_args: Optional[str] = None,
4045
+ source_code_target_dir: Optional[str] = None,
3572
4046
  ):
3573
4047
  """specify builder configuration for the project
3574
4048
 
@@ -3583,8 +4057,10 @@ class MlrunProject(ModelObj):
3583
4057
  :param requirements_file: requirements file to install on the built image
3584
4058
  :param overwrite_build_params: Overwrite existing build configuration (currently applies to
3585
4059
  requirements and commands)
4060
+
3586
4061
  * False: The new params are merged with the existing
3587
4062
  * True: The existing params are replaced by the new ones
4063
+
3588
4064
  :param builder_env: Kaniko builder pod env vars dict (for config/credentials)
3589
4065
  e.g. builder_env={"GIT_TOKEN": token}, does not work yet in KFP
3590
4066
  :param extra_args: A string containing additional builder arguments in the format of command-line options,
@@ -3593,9 +4069,9 @@ class MlrunProject(ModelObj):
3593
4069
  (by default `/home/mlrun_code`)
3594
4070
  """
3595
4071
  if not overwrite_build_params:
3596
- # TODO: change overwrite_build_params default to True in 1.8.0
4072
+ # TODO: change overwrite_build_params default to True in 1.10.0
3597
4073
  warnings.warn(
3598
- "The `overwrite_build_params` parameter default will change from 'False' to 'True' in 1.8.0.",
4074
+ "The `overwrite_build_params` parameter default will change from 'False' to 'True' in 1.10.0.",
3599
4075
  mlrun.utils.OverwriteBuildParamsWarning,
3600
4076
  )
3601
4077
  default_image_name = mlrun.mlconf.default_project_image_name.format(
@@ -3622,19 +4098,19 @@ class MlrunProject(ModelObj):
3622
4098
 
3623
4099
  def build_image(
3624
4100
  self,
3625
- image: str = None,
4101
+ image: Optional[str] = None,
3626
4102
  set_as_default: bool = True,
3627
- with_mlrun: bool = None,
3628
- base_image: str = None,
3629
- commands: list = None,
3630
- secret_name: str = None,
3631
- requirements: typing.Union[str, list[str]] = None,
3632
- mlrun_version_specifier: str = None,
3633
- builder_env: dict = None,
4103
+ with_mlrun: Optional[bool] = None,
4104
+ base_image: Optional[str] = None,
4105
+ commands: Optional[list] = None,
4106
+ secret_name: Optional[str] = None,
4107
+ requirements: Optional[typing.Union[str, list[str]]] = None,
4108
+ mlrun_version_specifier: Optional[str] = None,
4109
+ builder_env: Optional[dict] = None,
3634
4110
  overwrite_build_params: bool = False,
3635
- requirements_file: str = None,
3636
- extra_args: str = None,
3637
- target_dir: str = None,
4111
+ requirements_file: Optional[str] = None,
4112
+ extra_args: Optional[str] = None,
4113
+ target_dir: Optional[str] = None,
3638
4114
  ) -> typing.Union[BuildStatus, PipelineNodeWrapper]:
3639
4115
  """Builder docker image for the project, based on the project's build config. Parameters allow to override
3640
4116
  the build config.
@@ -3643,7 +4119,7 @@ class MlrunProject(ModelObj):
3643
4119
 
3644
4120
  :param image: target image name/path. If not specified the project's existing `default_image` name will be
3645
4121
  used. If not set, the `mlconf.default_project_image_name` value will be used
3646
- :param set_as_default: set `image` to be the project's default image (default False)
4122
+ :param set_as_default: set `image` to be the project's default image (default True)
3647
4123
  :param with_mlrun: add the current mlrun package to the container build
3648
4124
  :param base_image: base image name/path (commands and source code will be added to it) defaults to
3649
4125
  mlrun.mlconf.default_base_image
@@ -3656,8 +4132,10 @@ class MlrunProject(ModelObj):
3656
4132
  e.g. builder_env={"GIT_TOKEN": token}, does not work yet in KFP
3657
4133
  :param overwrite_build_params: Overwrite existing build configuration (currently applies to
3658
4134
  requirements and commands)
4135
+
3659
4136
  * False: The new params are merged with the existing
3660
4137
  * True: The existing params are replaced by the new ones
4138
+
3661
4139
  :param extra_args: A string containing additional builder arguments in the format of command-line options,
3662
4140
  e.g. extra_args="--skip-tls-verify --build-arg A=val"
3663
4141
  :param target_dir: Path on the image where source code would be extracted (by default `/home/mlrun_code`)
@@ -3670,9 +4148,9 @@ class MlrunProject(ModelObj):
3670
4148
  )
3671
4149
 
3672
4150
  if not overwrite_build_params:
3673
- # TODO: change overwrite_build_params default to True in 1.8.0
4151
+ # TODO: change overwrite_build_params default to True in 1.10.0
3674
4152
  warnings.warn(
3675
- "The `overwrite_build_params` parameter default will change from 'False' to 'True' in 1.8.0.",
4153
+ "The `overwrite_build_params` parameter default will change from 'False' to 'True' in 1.10.0.",
3676
4154
  mlrun.utils.OverwriteBuildParamsWarning,
3677
4155
  )
3678
4156
 
@@ -3740,12 +4218,12 @@ class MlrunProject(ModelObj):
3740
4218
  def deploy_function(
3741
4219
  self,
3742
4220
  function: typing.Union[str, mlrun.runtimes.BaseRuntime],
3743
- models: list = None,
3744
- env: dict = None,
3745
- tag: str = None,
3746
- verbose: bool = None,
3747
- builder_env: dict = None,
3748
- mock: bool = None,
4221
+ models: Optional[list] = None,
4222
+ env: Optional[dict] = None,
4223
+ tag: Optional[str] = None,
4224
+ verbose: Optional[bool] = None,
4225
+ builder_env: Optional[dict] = None,
4226
+ mock: Optional[bool] = None,
3749
4227
  ) -> typing.Union[DeployStatus, PipelineNodeWrapper]:
3750
4228
  """deploy real-time (nuclio based) functions
3751
4229
 
@@ -3768,18 +4246,21 @@ class MlrunProject(ModelObj):
3768
4246
  mock=mock,
3769
4247
  )
3770
4248
 
3771
- def get_artifact(self, key, tag=None, iter=None, tree=None):
4249
+ def get_artifact(
4250
+ self, key, tag=None, iter=None, tree=None, uid=None
4251
+ ) -> typing.Optional[Artifact]:
3772
4252
  """Return an artifact object
3773
4253
 
3774
- :param key: artifact key
3775
- :param tag: version tag
3776
- :param iter: iteration number (for hyper-param tasks)
3777
- :param tree: the producer id (tree)
4254
+ :param key: Artifact key
4255
+ :param tag: Version tag
4256
+ :param iter: Iteration number (for hyper-param tasks)
4257
+ :param tree: The producer id (tree)
4258
+ :param uid: The artifact uid
3778
4259
  :return: Artifact object
3779
4260
  """
3780
4261
  db = mlrun.db.get_run_db(secrets=self._secrets)
3781
4262
  artifact = db.read_artifact(
3782
- key, tag, iter=iter, project=self.metadata.name, tree=tree
4263
+ key, tag, iter=iter, project=self.metadata.name, tree=tree, uid=uid
3783
4264
  )
3784
4265
 
3785
4266
  # in tests, if an artifact is not found, the db returns None
@@ -3792,18 +4273,28 @@ class MlrunProject(ModelObj):
3792
4273
  self,
3793
4274
  name=None,
3794
4275
  tag=None,
3795
- labels: Optional[Union[dict[str, str], list[str]]] = None,
4276
+ labels: Optional[Union[str, dict[str, Optional[str]], list[str]]] = None,
3796
4277
  since=None,
3797
4278
  until=None,
3798
- iter: int = None,
4279
+ iter: Optional[int] = None,
3799
4280
  best_iteration: bool = False,
3800
- kind: str = None,
4281
+ kind: Optional[str] = None,
3801
4282
  category: typing.Union[str, mlrun.common.schemas.ArtifactCategories] = None,
3802
- tree: str = None,
3803
- limit: int = None,
4283
+ tree: Optional[str] = None,
4284
+ limit: Optional[int] = None,
3804
4285
  format_: Optional[
3805
4286
  mlrun.common.formatters.ArtifactFormat
3806
4287
  ] = mlrun.common.formatters.ArtifactFormat.full,
4288
+ partition_by: Optional[
4289
+ Union[mlrun.common.schemas.ArtifactPartitionByField, str]
4290
+ ] = None,
4291
+ rows_per_partition: int = 1,
4292
+ partition_sort_by: Optional[
4293
+ Union[mlrun.common.schemas.SortField, str]
4294
+ ] = mlrun.common.schemas.SortField.updated,
4295
+ partition_order: Union[
4296
+ mlrun.common.schemas.OrderType, str
4297
+ ] = mlrun.common.schemas.OrderType.desc,
3807
4298
  ) -> mlrun.lists.ArtifactList:
3808
4299
  """List artifacts filtered by various parameters.
3809
4300
 
@@ -3813,7 +4304,7 @@ class MlrunProject(ModelObj):
3813
4304
  Examples::
3814
4305
 
3815
4306
  # Get latest version of all artifacts in project
3816
- latest_artifacts = project.list_artifacts("", tag="latest")
4307
+ latest_artifacts = project.list_artifacts(tag="latest")
3817
4308
  # check different artifact versions for a specific artifact, return as objects list
3818
4309
  result_versions = project.list_artifacts("results", tag="*").to_objects()
3819
4310
 
@@ -3821,8 +4312,15 @@ class MlrunProject(ModelObj):
3821
4312
  case-sensitive. This means that querying for ``~name`` may return artifacts named
3822
4313
  ``my_Name_1`` or ``surname``.
3823
4314
  :param tag: Return artifacts assigned this tag.
3824
- :param labels: Return artifacts that have these labels. Labels can either be a dictionary {"label": "value"} or
3825
- a list of "label=value" (match label key and value) or "label" (match just label key) strings.
4315
+ :param labels: Filter artifacts by label key-value pairs or key existence. This can be provided as:
4316
+
4317
+ - A dictionary in the format `{"label": "value"}` to match specific label key-value pairs,
4318
+ or `{"label": None}` to check for key existence.
4319
+ - A list of strings formatted as `"label=value"` to match specific label key-value pairs,
4320
+ or just `"label"` for key existence.
4321
+ - A comma-separated string formatted as `"label1=value1,label2"` to match entities with
4322
+ the specified key-value pairs or key existence.
4323
+
3826
4324
  :param since: Not in use in :py:class:`HTTPRunDB`.
3827
4325
  :param until: Not in use in :py:class:`HTTPRunDB`.
3828
4326
  :param iter: Return artifacts from a specific iteration (where ``iter=0`` means the root iteration). If
@@ -3833,10 +4331,25 @@ class MlrunProject(ModelObj):
3833
4331
  :param kind: Return artifacts of the requested kind.
3834
4332
  :param category: Return artifacts of the requested category.
3835
4333
  :param tree: Return artifacts of the requested tree.
3836
- :param limit: Maximum number of artifacts to return.
4334
+ :param limit: Deprecated - Maximum number of artifacts to return (will be removed in 1.11.0).
3837
4335
  :param format_: The format in which to return the artifacts. Default is 'full'.
4336
+ :param partition_by: Field to group results by. When `partition_by` is specified, the `partition_sort_by`
4337
+ parameter must be provided as well.
4338
+ :param rows_per_partition: How many top rows (per sorting defined by `partition_sort_by` and `partition_order`)
4339
+ to return per group. Default value is 1.
4340
+ :param partition_sort_by: What field to sort the results by, within each partition defined by `partition_by`.
4341
+ Currently the only allowed values are `created` and `updated`.
4342
+ :param partition_order: Order of sorting within partitions - `asc` or `desc`. Default is `desc`.
3838
4343
  """
3839
4344
  db = mlrun.db.get_run_db(secrets=self._secrets)
4345
+
4346
+ if limit:
4347
+ # TODO: Remove this in 1.11.0
4348
+ warnings.warn(
4349
+ "'limit' is deprecated and will be removed in 1.11.0. Use 'page' and 'page_size' instead.",
4350
+ FutureWarning,
4351
+ )
4352
+
3840
4353
  return db.list_artifacts(
3841
4354
  name,
3842
4355
  self.metadata.name,
@@ -3851,19 +4364,87 @@ class MlrunProject(ModelObj):
3851
4364
  tree=tree,
3852
4365
  format_=format_,
3853
4366
  limit=limit,
4367
+ partition_by=partition_by,
4368
+ rows_per_partition=rows_per_partition,
4369
+ partition_sort_by=partition_sort_by,
4370
+ partition_order=partition_order,
4371
+ )
4372
+
4373
+ def paginated_list_artifacts(
4374
+ self,
4375
+ *args,
4376
+ page: Optional[int] = None,
4377
+ page_size: Optional[int] = None,
4378
+ page_token: Optional[str] = None,
4379
+ **kwargs,
4380
+ ) -> tuple[mlrun.lists.ArtifactList, Optional[str]]:
4381
+ """List artifacts with support for pagination and various filtering options.
4382
+
4383
+ This method retrieves a paginated list of artifacts based on the specified filter parameters.
4384
+ Pagination is controlled using the `page`, `page_size`, and `page_token` parameters. The method
4385
+ will return a list of artifacts that match the filtering criteria provided.
4386
+
4387
+ The returned result is an `ArtifactList` (list of dict), use `.to_objects()` to convert it to a list of
4388
+ RunObjects, `.show()` to view graphically in Jupyter, and `.to_df()` to convert to a DataFrame.
4389
+
4390
+ For detailed information about the parameters, refer to the list_artifacts method:
4391
+ See :py:func:`~list_artifacts` for more details.
4392
+
4393
+ Examples::
4394
+
4395
+ # Fetch first page of artifacts with page size of 5
4396
+ artifacts, token = project.paginated_list_artifacts("results", page_size=5)
4397
+ # Fetch next page using the pagination token from the previous response
4398
+ artifacts, token = project.paginated_list_artifacts("results", page_token=token)
4399
+ # Fetch artifacts for a specific page (e.g., page 3)
4400
+ artifacts, token = project.paginated_list_artifacts(
4401
+ "results", page=3, page_size=5
4402
+ )
4403
+
4404
+ # Automatically iterate over all pages without explicitly specifying the page number
4405
+ artifacts = []
4406
+ token = None
4407
+ while True:
4408
+ page_artifacts, token = project.paginated_list_artifacts(
4409
+ page_token=token, page_size=5
4410
+ )
4411
+ artifacts.extend(page_artifacts)
4412
+
4413
+ # If token is None and page_artifacts is empty, we've reached the end (no more artifacts).
4414
+ # If token is None and page_artifacts is not empty, we've fetched the last page of artifacts.
4415
+ if not token:
4416
+ break
4417
+ print(f"Total artifacts retrieved: {len(artifacts)}")
4418
+
4419
+ :param page: The page number to retrieve. If not provided, the next page will be retrieved.
4420
+ :param page_size: The number of items per page to retrieve. Up to `page_size` responses are expected.
4421
+ Defaults to `mlrun.mlconf.httpdb.pagination.default_page_size` if not provided.
4422
+ :param page_token: A pagination token used to retrieve the next page of results. Should not be provided
4423
+ for the first request.
4424
+
4425
+ :returns: A tuple containing the list of artifacts and an optional `page_token` for pagination.
4426
+ """
4427
+ db = mlrun.db.get_run_db(secrets=self._secrets)
4428
+ return db.paginated_list_artifacts(
4429
+ *args,
4430
+ project=self.metadata.name,
4431
+ page=page,
4432
+ page_size=page_size,
4433
+ page_token=page_token,
4434
+ **kwargs,
3854
4435
  )
3855
4436
 
3856
4437
  def list_models(
3857
4438
  self,
3858
4439
  name=None,
3859
4440
  tag=None,
3860
- labels: Optional[Union[dict[str, str], list[str]]] = None,
4441
+ labels: Optional[Union[str, dict[str, Optional[str]], list[str]]] = None,
3861
4442
  since=None,
3862
4443
  until=None,
3863
- iter: int = None,
4444
+ iter: Optional[int] = None,
3864
4445
  best_iteration: bool = False,
3865
- tree: str = None,
3866
- limit: int = None,
4446
+ tree: Optional[str] = None,
4447
+ limit: Optional[int] = None,
3867
4448
  format_: Optional[
3868
4449
  mlrun.common.formatters.ArtifactFormat
3869
4450
  ] = mlrun.common.formatters.ArtifactFormat.full,
@@ -3873,15 +4454,22 @@ class MlrunProject(ModelObj):
3873
4454
  Examples::
3874
4455
 
3875
4456
  # Get latest version of all models in project
3876
- latest_models = project.list_models("", tag="latest")
4457
+ latest_models = project.list_models(tag="latest")
3877
4458
 
3878
4459
 
3879
4460
  :param name: Name of artifacts to retrieve. Name with '~' prefix is used as a like query, and is not
3880
4461
  case-sensitive. This means that querying for ``~name`` may return artifacts named
3881
4462
  ``my_Name_1`` or ``surname``.
3882
4463
  :param tag: Return artifacts assigned this tag.
3883
- :param labels: Return artifacts that have these labels. Labels can either be a dictionary {"label": "value"} or
3884
- a list of "label=value" (match label key and value) or "label" (match just label key) strings.
4464
+ :param labels: Filter model artifacts by label key-value pairs or key existence. This can be provided as:
4465
+
4466
+ - A dictionary in the format `{"label": "value"}` to match specific label key-value pairs,
4467
+ or `{"label": None}` to check for key existence.
4468
+ - A list of strings formatted as `"label=value"` to match specific label key-value pairs,
4469
+ or just `"label"` for key existence.
4470
+ - A comma-separated string formatted as `"label1=value1,label2"` to match entities with
4471
+ the specified key-value pairs or key existence.
4472
+
3885
4473
  :param since: Not in use in :py:class:`HTTPRunDB`.
3886
4474
  :param until: Not in use in :py:class:`HTTPRunDB`.
3887
4475
  :param iter: Return artifacts from a specific iteration (where ``iter=0`` means the root iteration). If
@@ -3890,7 +4478,7 @@ class MlrunProject(ModelObj):
3890
4478
  artifacts generated from a hyper-param run. If only a single iteration exists, will return the artifact
3891
4479
  from that iteration. If using ``best_iter``, the ``iter`` parameter must not be used.
3892
4480
  :param tree: Return artifacts of the requested tree.
3893
- :param limit: Maximum number of artifacts to return.
4481
+ :param limit: Deprecated - Maximum number of artifacts to return (will be removed in 1.11.0).
3894
4482
  :param format_: The format in which to return the artifacts. Default is 'full'.
3895
4483
  """
3896
4484
  db = mlrun.db.get_run_db(secrets=self._secrets)
@@ -3903,13 +4491,80 @@ class MlrunProject(ModelObj):
3903
4491
  until=until,
3904
4492
  iter=iter,
3905
4493
  best_iteration=best_iteration,
3906
- kind="model",
4494
+ kind=mlrun.artifacts.model.ModelArtifact.kind,
3907
4495
  tree=tree,
3908
4496
  limit=limit,
3909
4497
  format_=format_,
3910
4498
  ).to_objects()
3911
4499
 
3912
- def list_functions(self, name=None, tag=None, labels=None):
4500
+ def paginated_list_models(
4501
+ self,
4502
+ *args,
4503
+ page: Optional[int] = None,
4504
+ page_size: Optional[int] = None,
4505
+ page_token: Optional[str] = None,
4506
+ **kwargs,
4507
+ ) -> tuple[mlrun.lists.ArtifactList, Optional[str]]:
4508
+ """List models in project with support for pagination and various filtering options.
4509
+
4510
+ This method retrieves a paginated list of artifacts based on the specified filter parameters.
4511
+ Pagination is controlled using the `page`, `page_size`, and `page_token` parameters. The method
4512
+ will return a list of artifacts that match the filtering criteria provided.
4513
+
4514
+ For detailed information about the parameters, refer to the list_models method:
4515
+ See :py:func:`~list_models` for more details.
4516
+
4517
+ Examples::
4518
+
4519
+ # Fetch first page of artifacts with page size of 5
4520
+ artifacts, token = project.paginated_list_models("results", page_size=5)
4521
+ # Fetch next page using the pagination token from the previous response
4522
+ artifacts, token = project.paginated_list_models("results", page_token=token)
4523
+ # Fetch artifacts for a specific page (e.g., page 3)
4524
+ artifacts, token = project.paginated_list_models("results", page=3, page_size=5)
4525
+
4526
+ # Automatically iterate over all pages without explicitly specifying the page number
4527
+ artifacts = []
4528
+ token = None
4529
+ while True:
4530
+ page_artifacts, token = project.paginated_list_models(
4531
+ page_token=token, page_size=5
4532
+ )
4533
+ artifacts.extend(page_artifacts)
4534
+
4535
+ # If token is None and page_artifacts is empty, we've reached the end (no more artifacts).
4536
+ # If token is None and page_artifacts is not empty, we've fetched the last page of artifacts.
4537
+ if not token:
4538
+ break
4539
+ print(f"Total artifacts retrieved: {len(artifacts)}")
4540
+
4541
+ :param page: The page number to retrieve. If not provided, the next page will be retrieved.
4542
+ :param page_size: The number of items per page to retrieve. Up to `page_size` responses are expected.
4543
+ Defaults to `mlrun.mlconf.httpdb.pagination.default_page_size` if not provided.
4544
+ :param page_token: A pagination token used to retrieve the next page of results. Should not be provided
4545
+ for the first request.
4546
+
4547
+ :returns: A tuple containing the list of artifacts and an optional `page_token` for pagination.
4548
+ """
4549
+ db = mlrun.db.get_run_db(secrets=self._secrets)
4550
+ return db.paginated_list_artifacts(
4551
+ *args,
4552
+ project=self.metadata.name,
4553
+ kind=mlrun.artifacts.model.ModelArtifact.kind,
4554
+ page=page,
4555
+ page_size=page_size,
4556
+ page_token=page_token,
4557
+ **kwargs,
4558
+ )
4559
+
4560
+ def list_functions(
4561
+ self,
4562
+ name: Optional[str] = None,
4563
+ tag: Optional[str] = None,
4564
+ labels: Optional[Union[str, dict[str, Optional[str]], list[str]]] = None,
4565
+ kind: Optional[str] = None,
4566
+ format_: Optional[str] = None,
4567
+ ):
3913
4568
  """Retrieve a list of functions, filtered by specific criteria.
3914
4569
 
3915
4570
  example::
@@ -3919,20 +4574,98 @@ class MlrunProject(ModelObj):
3919
4574
 
3920
4575
  :param name: Return only functions with a specific name.
3921
4576
  :param tag: Return function versions with specific tags. To return only tagged functions, set tag to ``"*"``.
3922
- :param labels: Return functions that have specific labels assigned to them.
4577
+ :param labels: Filter functions by label key-value pairs or key existence. This can be provided as:
4578
+
4579
+ - A dictionary in the format `{"label": "value"}` to match specific label key-value pairs,
4580
+ or `{"label": None}` to check for key existence.
4581
+ - A list of strings formatted as `"label=value"` to match specific label key-value pairs,
4582
+ or just `"label"` for key existence.
4583
+ - A comma-separated string formatted as `"label1=value1,label2"` to match entities with
4584
+ the specified key-value pairs or key existence.
4585
+
4586
+ :param kind: Return functions of the specified kind. If not provided, all function kinds will be returned.
4587
+ :param format_: The format in which to return the functions. Default is 'full'.
3923
4588
  :returns: List of function objects.
3924
4589
  """
3925
4590
  db = mlrun.db.get_run_db(secrets=self._secrets)
3926
- functions = db.list_functions(name, self.metadata.name, tag=tag, labels=labels)
4591
+ functions = db.list_functions(
4592
+ name,
4593
+ project=self.metadata.name,
4594
+ tag=tag,
4595
+ kind=kind,
4596
+ labels=labels,
4597
+ format_=format_,
4598
+ )
3927
4599
  if functions:
3928
4600
  # convert dict to function objects
3929
4601
  return [mlrun.new_function(runtime=func) for func in functions]
3930
4602
 
4603
+ def paginated_list_functions(
4604
+ self,
4605
+ *args,
4606
+ page: Optional[int] = None,
4607
+ page_size: Optional[int] = None,
4608
+ page_token: Optional[str] = None,
4609
+ **kwargs,
4610
+ ) -> tuple[list, Optional[str]]:
4611
+ """List functions with support for pagination and various filtering options.
4612
+
4613
+ This method retrieves a paginated list of functions based on the specified filter parameters.
4614
+ Pagination is controlled using the `page`, `page_size`, and `page_token` parameters. The method
4615
+ will return a list of functions that match the filtering criteria provided.
4616
+
4617
+ For detailed information about the parameters, refer to the list_functions method:
4618
+ See :py:func:`~list_functions` for more details.
4619
+
4620
+ Examples::
4621
+
4622
+ # Fetch first page of functions with page size of 5
4623
+ functions, token = project.paginated_list_functions(page_size=5)
4624
+ # Fetch next page using the pagination token from the previous response
4625
+ functions, token = project.paginated_list_functions(page_token=token)
4626
+ # Fetch functions for a specific page (e.g., page 3)
4627
+ functions, token = project.paginated_list_functions(page=3, page_size=5)
4628
+
4629
+ # Automatically iterate over all pages without explicitly specifying the page number
4630
+ functions = []
4631
+ token = None
4632
+ while True:
4633
+ page_functions, token = project.paginated_list_functions(
4634
+ page_token=token, page_size=5
4635
+ )
4636
+ functions.extend(page_functions)
4637
+
4638
+ # If token is None and page_functions is empty, we've reached the end (no more functions).
4639
+ # If token is None and page_functions is not empty, we've fetched the last page of functions.
4640
+ if not token:
4641
+ break
4642
+ print(f"Total functions retrieved: {len(functions)}")
4643
+
4644
+ :param page: The page number to retrieve. If not provided, the next page will be retrieved.
4645
+ :param page_size: The number of items per page to retrieve. Up to `page_size` responses are expected.
4646
+ Defaults to `mlrun.mlconf.httpdb.pagination.default_page_size` if not provided.
4647
+ :param page_token: A pagination token used to retrieve the next page of results. Should not be provided
4648
+ for the first request.
4649
+
4650
+ :returns: A tuple containing the list of functions and an optional `page_token` for pagination.
4651
+ """
4652
+ db = mlrun.db.get_run_db(secrets=self._secrets)
4653
+ functions, token = db.paginated_list_functions(
4654
+ *args,
4655
+ project=self.metadata.name,
4656
+ page=page,
4657
+ page_size=page_size,
4658
+ page_token=page_token,
4659
+ **kwargs,
4660
+ )
4661
+ # convert dict to function objects
4662
+ return [mlrun.new_function(runtime=func) for func in functions], token
4663
+
3931
4664
  def list_model_monitoring_functions(
3932
4665
  self,
3933
4666
  name: Optional[str] = None,
3934
4667
  tag: Optional[str] = None,
3935
- labels: Optional[list[str]] = None,
4668
+ labels: Optional[Union[str, dict[str, Optional[str]], list[str]]] = None,
3936
4669
  ) -> Optional[list]:
3937
4670
  """
3938
4671
  Retrieve a list of all the model monitoring functions.
@@ -3942,7 +4675,14 @@ class MlrunProject(ModelObj):
3942
4675
 
3943
4676
  :param name: Return only functions with a specific name.
3944
4677
  :param tag: Return function versions with specific tags.
3945
- :param labels: Return functions that have specific labels assigned to them.
4678
+ :param labels: Filter functions by label key-value pairs or key existence. This can be provided as:
4679
+
4680
+ - A dictionary in the format `{"label": "value"}` to match specific label key-value pairs,
4681
+ or `{"label": None}` to check for key existence.
4682
+ - A list of strings formatted as `"label=value"` to match specific label key-value pairs,
4683
+ or just `"label"` for key existence.
4684
+ - A comma-separated string formatted as `"label1=value1,label2"` to match entities with
4685
+ the specified key-value pairs or key existence.
3946
4686
 
3947
4687
  :returns: List of function objects.
3948
4688
  """
@@ -3960,21 +4700,24 @@ class MlrunProject(ModelObj):
3960
4700
  self,
3961
4701
  name: Optional[str] = None,
3962
4702
  uid: Optional[Union[str, list[str]]] = None,
3963
- labels: Optional[Union[str, list[str]]] = None,
4703
+ labels: Optional[Union[str, dict[str, Optional[str]], list[str]]] = None,
3964
4704
  state: Optional[
3965
4705
  mlrun.common.runtimes.constants.RunStates
3966
4706
  ] = None, # Backward compatibility
3967
4707
  states: typing.Optional[list[mlrun.common.runtimes.constants.RunStates]] = None,
3968
4708
  sort: bool = True,
3969
- last: int = 0,
3970
4709
  iter: bool = False,
3971
4710
  start_time_from: Optional[datetime.datetime] = None,
3972
4711
  start_time_to: Optional[datetime.datetime] = None,
3973
4712
  last_update_time_from: Optional[datetime.datetime] = None,
3974
4713
  last_update_time_to: Optional[datetime.datetime] = None,
4714
+ end_time_from: Optional[datetime.datetime] = None,
4715
+ end_time_to: Optional[datetime.datetime] = None,
3975
4716
  **kwargs,
3976
4717
  ) -> mlrun.lists.RunList:
3977
- """Retrieve a list of runs, filtered by various options.
4718
+ """Retrieve a list of runs.
4719
+ The default returns the runs from the last week, partitioned by name.
4720
+ To override the default, specify any filter.
3978
4721
 
3979
4722
  The returned result is a `` (list of dict), use `.to_objects()` to convert it to a list of RunObjects,
3980
4723
  `.show()` to view graphically in Jupyter, `.to_df()` to convert to a DataFrame, and `compare()` to
@@ -3995,25 +4738,33 @@ class MlrunProject(ModelObj):
3995
4738
 
3996
4739
  :param name: Name of the run to retrieve.
3997
4740
  :param uid: Unique ID of the run.
3998
- :param labels: A list of labels to filter by. Label filters work by either filtering a specific value
3999
- of a label (i.e. list("key=value")) or by looking for the existence of a given
4000
- key (i.e. "key").
4741
+ :param labels: Filter runs by label key-value pairs or key existence. This can be provided as:
4742
+
4743
+ - A dictionary in the format `{"label": "value"}` to match specific label key-value pairs,
4744
+ or `{"label": None}` to check for key existence.
4745
+ - A list of strings formatted as `"label=value"` to match specific label key-value pairs,
4746
+ or just `"label"` for key existence.
4747
+ - A comma-separated string formatted as `"label1=value1,label2"` to match entities with
4748
+ the specified key-value pairs or key existence.
4749
+
4001
4750
  :param state: Deprecated - List only runs whose state is specified.
4002
4751
  :param states: List only runs whose state is one of the provided states.
4003
4752
  :param sort: Whether to sort the result according to their start time. Otherwise, results will be
4004
4753
  returned by their internal order in the DB (order will not be guaranteed).
4005
- :param last: Deprecated - currently not used (will be removed in 1.9.0).
4754
+ :param last: Deprecated - currently not used (will be removed in 1.10.0).
4006
4755
  :param iter: If ``True`` return runs from all iterations. Otherwise, return only runs whose ``iter`` is 0.
4007
4756
  :param start_time_from: Filter by run start time in ``[start_time_from, start_time_to]``.
4008
4757
  :param start_time_to: Filter by run start time in ``[start_time_from, start_time_to]``.
4009
4758
  :param last_update_time_from: Filter by run last update time in ``(last_update_time_from,
4010
4759
  last_update_time_to)``.
4011
4760
  :param last_update_time_to: Filter by run last update time in ``(last_update_time_from, last_update_time_to)``.
4761
+ :param end_time_from: Filter by run end time in ``[end_time_from, end_time_to]``.
4762
+ :param end_time_to: Filter by run end time in ``[end_time_from, end_time_to]``.
4012
4763
  """
4013
4764
  if state:
4014
- # TODO: Remove this in 1.9.0
4765
+ # TODO: Remove this in 1.10.0
4015
4766
  warnings.warn(
4016
- "'state' is deprecated and will be removed in 1.9.0. Use 'states' instead.",
4767
+ "'state' is deprecated in 1.7.0 and will be removed in 1.10.0. Use 'states' instead.",
4017
4768
  FutureWarning,
4018
4769
  )
4019
4770
 
@@ -4023,16 +4774,82 @@ class MlrunProject(ModelObj):
4023
4774
  uid,
4024
4775
  self.metadata.name,
4025
4776
  labels=labels,
4026
- states=mlrun.utils.helpers.as_list(state)
4027
- if state is not None
4028
- else states or None,
4777
+ states=(
4778
+ mlrun.utils.helpers.as_list(state)
4779
+ if state is not None
4780
+ else states or None
4781
+ ),
4029
4782
  sort=sort,
4030
- last=last,
4031
4783
  iter=iter,
4032
4784
  start_time_from=start_time_from,
4033
4785
  start_time_to=start_time_to,
4034
4786
  last_update_time_from=last_update_time_from,
4035
4787
  last_update_time_to=last_update_time_to,
4788
+ end_time_from=end_time_from,
4789
+ end_time_to=end_time_to,
4790
+ **kwargs,
4791
+ )
4792
+
4793
+ def paginated_list_runs(
4794
+ self,
4795
+ *args,
4796
+ page: Optional[int] = None,
4797
+ page_size: Optional[int] = None,
4798
+ page_token: Optional[str] = None,
4799
+ **kwargs,
4800
+ ) -> tuple[mlrun.lists.RunList, Optional[str]]:
4801
+ """List runs with support for pagination and various filtering options.
4802
+
4803
+ This method retrieves a paginated list of runs based on the specified filter parameters.
4804
+ Pagination is controlled using the `page`, `page_size`, and `page_token` parameters. The method
4805
+ will return a list of runs that match the filtering criteria provided.
4806
+
4807
+ The returned result is a `` (list of dict), use `.to_objects()` to convert it to a list of RunObjects,
4808
+ `.show()` to view graphically in Jupyter, `.to_df()` to convert to a DataFrame, and `compare()` to
4809
+ generate comparison table and PCP plot.
4810
+
4811
+ For detailed information about the parameters, refer to the list_runs method:
4812
+ See :py:func:`~list_runs` for more details.
4813
+
4814
+ Examples::
4815
+
4816
+ # Fetch first page of runs with page size of 5
4817
+ runs, token = project.paginated_list_runs(page_size=5)
4818
+ # Fetch next page using the pagination token from the previous response
4819
+ runs, token = project.paginated_list_runs(page_token=token)
4820
+ # Fetch runs for a specific page (e.g., page 3)
4821
+ runs, token = project.paginated_list_runs(page=3, page_size=5)
4822
+
4823
+ # Automatically iterate over all pages without explicitly specifying the page number
4824
+ runs = []
4825
+ token = None
4826
+ while True:
4827
+ page_runs, token = project.paginated_list_runs(
4828
+ page_token=token, page_size=5
4829
+ )
4830
+ runs.extend(page_runs)
4831
+
4832
+ # If token is None and page_runs is empty, we've reached the end (no more runs).
4833
+ # If token is None and page_runs is not empty, we've fetched the last page of runs.
4834
+ if not token:
4835
+ break
4836
+ print(f"Total runs retrieved: {len(runs)}")
4837
+
4838
+ :param page: The page number to retrieve. If not provided, the next page will be retrieved.
4839
+ :param page_size: The number of items per page to retrieve. Up to `page_size` responses are expected.
4840
+ Defaults to `mlrun.mlconf.httpdb.pagination.default_page_size` if not provided.
4841
+ :param page_token: A pagination token used to retrieve the next page of results. Should not be provided
4842
+ for the first request.
4843
+
4844
+ :returns: A tuple containing the list of runs and an optional `page_token` for pagination.
4845
+ """
4846
+ db = mlrun.db.get_run_db(secrets=self._secrets)
4847
+ return db.paginated_list_runs(
4848
+ *args,
4849
+ project=self.metadata.name,
4850
+ page=page,
4851
+ page_size=page_size,
4852
+ page_token=page_token,
4036
4853
  **kwargs,
4037
4854
  )
4038
4855
 
@@ -4051,6 +4868,25 @@ class MlrunProject(ModelObj):
4051
4868
  profile, self.name
4052
4869
  )
4053
4870
 
4871
+ def get_config_profile_attributes(self, name: str) -> dict:
4872
+ """
4873
+ Get the merged attributes from a named configuration profile.
4874
+
4875
+ Retrieves a profile from the datastore using the provided name and returns its
4876
+ merged public and private attributes as a dictionary.
4877
+
4878
+ Args:
4879
+ name (str): Name of the configuration profile to retrieve. Will be prefixed
4880
+ with "ds://" to form the full profile path.
4881
+
4882
+ Returns:
4883
+ dict: The merged attributes dictionary containing both public and private
4884
+ configuration settings from the profile. Returns nested dictionaries if
4885
+ the profile contains nested configurations.
4886
+ """
4887
+ profile = datastore_profile_read(f"ds://{name}", self.name)
4888
+ return profile.attributes()
4889
+
4054
4890
  def delete_datastore_profile(self, profile: str):
4055
4891
  mlrun.db.get_run_db(secrets=self._secrets).delete_datastore_profile(
4056
4892
  profile, self.name
@@ -4188,13 +5024,17 @@ class MlrunProject(ModelObj):
4188
5024
  mlrun.db.get_run_db().delete_api_gateway(name=name, project=self.name)
4189
5025
 
4190
5026
  def store_alert_config(
4191
- self, alert_data: AlertConfig, alert_name: typing.Optional[str] = None
5027
+ self,
5028
+ alert_data: AlertConfig,
5029
+ alert_name: typing.Optional[str] = None,
5030
+ force_reset: bool = False,
4192
5031
  ) -> AlertConfig:
4193
5032
  """
4194
5033
  Create/modify an alert.
4195
5034
 
4196
5035
  :param alert_data: The data of the alert.
4197
5036
  :param alert_name: The name of the alert.
5037
+ :param force_reset: If True and the alert already exists, the alert would be reset.
4198
5038
  :return: the created/modified alert.
4199
5039
  """
4200
5040
  if not alert_data:
@@ -4208,7 +5048,9 @@ class MlrunProject(ModelObj):
4208
5048
  project=alert_data.project,
4209
5049
  )
4210
5050
  alert_data.project = self.metadata.name
4211
- return db.store_alert_config(alert_name, alert_data, project=self.metadata.name)
5051
+ return db.store_alert_config(
5052
+ alert_name, alert_data, project=self.metadata.name, force_reset=force_reset
5053
+ )
4212
5054
 
4213
5055
  def get_alert_config(self, alert_name: str) -> AlertConfig:
4214
5056
  """
@@ -4220,17 +5062,23 @@ class MlrunProject(ModelObj):
4220
5062
  db = mlrun.db.get_run_db(secrets=self._secrets)
4221
5063
  return db.get_alert_config(alert_name, self.metadata.name)
4222
5064
 
4223
- def list_alerts_configs(self) -> list[AlertConfig]:
5065
+ def list_alerts_configs(
5066
+ self, limit: Optional[int] = None, offset: Optional[int] = None
5067
+ ) -> list[AlertConfig]:
4224
5068
  """
4225
5069
  Retrieve list of alerts of a project.
4226
5070
 
5071
+ :param limit: The maximum number of alerts to return.
5072
+ Defaults to `mlconf.alerts.default_list_alert_configs_limit` if not provided.
5073
+ :param offset: The number of alerts to skip before starting to collect alerts.
5074
+
4227
5075
  :return: All the alerts objects of the project.
4228
5076
  """
4229
5077
  db = mlrun.db.get_run_db(secrets=self._secrets)
4230
- return db.list_alerts_configs(self.metadata.name)
5078
+ return db.list_alerts_configs(self.metadata.name, limit=limit, offset=offset)
4231
5079
 
4232
5080
  def delete_alert_config(
4233
- self, alert_data: AlertConfig = None, alert_name: str = None
5081
+ self, alert_data: AlertConfig = None, alert_name: Optional[str] = None
4234
5082
  ):
4235
5083
  """
4236
5084
  Delete an alert.
@@ -4250,7 +5098,7 @@ class MlrunProject(ModelObj):
4250
5098
  db.delete_alert_config(alert_name, self.metadata.name)
4251
5099
 
4252
5100
  def reset_alert_config(
4253
- self, alert_data: AlertConfig = None, alert_name: str = None
5101
+ self, alert_data: AlertConfig = None, alert_name: Optional[str] = None
4254
5102
  ):
4255
5103
  """
4256
5104
  Reset an alert.
@@ -4269,7 +5117,9 @@ class MlrunProject(ModelObj):
4269
5117
  alert_name = alert_data.name
4270
5118
  db.reset_alert_config(alert_name, self.metadata.name)
4271
5119
 
4272
- def get_alert_template(self, template_name: str) -> AlertTemplate:
5120
+ def get_alert_template(
5121
+ self, template_name: str
5122
+ ) -> mlrun.common.schemas.alert.AlertTemplate:
4273
5123
  """
4274
5124
  Retrieve a specific alert template.
4275
5125
 
@@ -4279,7 +5129,7 @@ class MlrunProject(ModelObj):
4279
5129
  db = mlrun.db.get_run_db(secrets=self._secrets)
4280
5130
  return db.get_alert_template(template_name)
4281
5131
 
4282
- def list_alert_templates(self) -> list[AlertTemplate]:
5132
+ def list_alert_templates(self) -> list[mlrun.common.schemas.alert.AlertTemplate]:
4283
5133
  """
4284
5134
  Retrieve list of all alert templates.
4285
5135
 
@@ -4288,12 +5138,115 @@ class MlrunProject(ModelObj):
4288
5138
  db = mlrun.db.get_run_db(secrets=self._secrets)
4289
5139
  return db.list_alert_templates()
4290
5140
 
5141
+ def list_alert_activations(
5142
+ self,
5143
+ name: Optional[str] = None,
5144
+ since: Optional[datetime.datetime] = None,
5145
+ until: Optional[datetime.datetime] = None,
5146
+ entity: Optional[str] = None,
5147
+ severity: Optional[
5148
+ list[Union[mlrun.common.schemas.alert.AlertSeverity, str]]
5149
+ ] = None,
5150
+ entity_kind: Optional[
5151
+ Union[mlrun.common.schemas.alert.EventEntityKind, str]
5152
+ ] = None,
5153
+ event_kind: Optional[Union[mlrun.common.schemas.alert.EventKind, str]] = None,
5154
+ ) -> list[mlrun.common.schemas.alert.AlertActivation]:
5155
+ """
5156
+ Retrieve a list of alert activations for a project.
5157
+
5158
+ :param name: The alert name to filter by. Supports exact matching or partial matching if prefixed with `~`.
5159
+ :param since: Filters for alert activations occurring after this timestamp.
5160
+ :param until: Filters for alert activations occurring before this timestamp.
5161
+ :param entity: The entity ID to filter by. Supports wildcard matching if prefixed with `~`.
5162
+ :param severity: A list of severity levels to filter by (e.g., ["high", "low"]).
5163
+ :param entity_kind: The kind of entity (e.g., "job", "endpoint") to filter by.
5164
+ :param event_kind: The kind of event (e.g., ""data-drift-detected"", "failed") to filter by.
5165
+
5166
+ :returns: A list of alert activations matching the provided filters.
5167
+ """
5168
+ db = mlrun.db.get_run_db(secrets=self._secrets)
5169
+ return db.list_alert_activations(
5170
+ project=self.metadata.name,
5171
+ name=name,
5172
+ since=since,
5173
+ until=until,
5174
+ entity=entity,
5175
+ severity=severity,
5176
+ entity_kind=entity_kind,
5177
+ event_kind=event_kind,
5178
+ )
5179
+
5180
+ def paginated_list_alert_activations(
5181
+ self,
5182
+ *args,
5183
+ page: Optional[int] = None,
5184
+ page_size: Optional[int] = None,
5185
+ page_token: Optional[str] = None,
5186
+ **kwargs,
5187
+ ) -> tuple[mlrun.common.schemas.alert.AlertActivation, Optional[str]]:
5188
+ """
5189
+ List alerts activations with support for pagination and various filtering options.
5190
+
5191
+ This method retrieves a paginated list of alert activations based on the specified filter parameters.
5192
+ Pagination is controlled using the `page`, `page_size`, and `page_token` parameters. The method
5193
+ will return a list of alert activations that match the filtering criteria provided.
5194
+
5195
+ For detailed information about the parameters, refer to the list_alert_activations method:
5196
+ See :py:func:`~list_alert_activations` for more details.
5197
+
5198
+ Examples::
5199
+
5200
+ # Fetch first page of alert activations with page size of 5
5201
+ alert_activations, token = project.paginated_list_alert_activations(page_size=5)
5202
+ # Fetch next page using the pagination token from the previous response
5203
+ alert_activations, token = project.paginated_list_alert_activations(
5204
+ page_token=token
5205
+ )
5206
+ # Fetch alert activations for a specific page (e.g., page 3)
5207
+ alert_activations, token = project.paginated_list_alert_activations(
5208
+ page=3, page_size=5
5209
+ )
5210
+
5211
+ # Automatically iterate over all pages without explicitly specifying the page number
5212
+ alert_activations = []
5213
+ token = None
5214
+ while True:
5215
+ page_alert_activations, token = project.paginated_list_alert_activations(
5216
+ page_token=token, page_size=5
5217
+ )
5218
+ alert_activations.extend(page_alert_activations)
5219
+
5220
+ # If token is None and page_alert_activations is empty, we've reached the end (no more activations).
5221
+ # If token is None and page_alert_activations is not empty, we've fetched the last page of activations.
5222
+ if not token:
5223
+ break
5224
+ print(f"Total alert activations retrieved: {len(alert_activations)}")
5225
+
5226
+ :param page: The page number to retrieve. If not provided, the next page will be retrieved.
5227
+ :param page_size: The number of items per page to retrieve. Up to `page_size` responses are expected.
5228
+ Defaults to `mlrun.mlconf.httpdb.pagination.default_page_size` if not provided.
5229
+ :param page_token: A pagination token used to retrieve the next page of results. Should not be provided
5230
+ for the first request.
5231
+
5232
+ :returns: A tuple containing the list of alert activations and an optional `page_token` for pagination.
5233
+ """
5234
+ db = mlrun.db.get_run_db(secrets=self._secrets)
5235
+ return db.paginated_list_alert_activations(
5236
+ *args,
5237
+ project=self.metadata.name,
5238
+ page=page,
5239
+ page_size=page_size,
5240
+ page_token=page_token,
5241
+ **kwargs,
5242
+ )
5243
+
4291
5244
  def _run_authenticated_git_action(
4292
5245
  self,
4293
5246
  action: Callable,
4294
5247
  remote: str,
4295
- args: list = None,
4296
- kwargs: dict = None,
5248
+ args: Optional[list] = None,
5249
+ kwargs: Optional[dict] = None,
4297
5250
  secrets: Union[SecretsStore, dict] = None,
4298
5251
  ):
4299
5252
  """Run an arbitrary Git routine while the remote is enriched with secrets
@@ -4324,7 +5277,7 @@ class MlrunProject(ModelObj):
4324
5277
  if is_remote_enriched:
4325
5278
  self.spec.repo.remotes[remote].set_url(clean_remote, enriched_remote)
4326
5279
 
4327
- def _validate_file_path(self, file_path: str, param_name: str):
5280
+ def _validate_file_path(self, file_path: str, param_name: str, engine: str):
4328
5281
  """
4329
5282
  The function checks if the given file_path is a valid path.
4330
5283
  If the file_path is a relative path, it is completed by joining it with the self.spec.get_code_path()
@@ -4349,6 +5302,10 @@ class MlrunProject(ModelObj):
4349
5302
  f"Invalid '{param_name}': '{file_path}'. Got a remote URL without a file suffix."
4350
5303
  )
4351
5304
 
5305
+ # if engine is remote then skip the local file validation
5306
+ if engine and engine.startswith("remote"):
5307
+ return
5308
+
4352
5309
  code_path = self.spec.get_code_path()
4353
5310
 
4354
5311
  # If the file path is a relative path, it is completed by joining it with the code_path.
@@ -4370,7 +5327,7 @@ class MlrunProject(ModelObj):
4370
5327
  def _resolve_artifact_producer(
4371
5328
  self,
4372
5329
  artifact: typing.Union[str, Artifact],
4373
- project_producer_tag: str = None,
5330
+ project_producer_tag: Optional[str] = None,
4374
5331
  ) -> tuple[ArtifactProducer, bool]:
4375
5332
  """
4376
5333
  Resolve the artifact producer of the given artifact.
@@ -4400,28 +5357,34 @@ class MlrunProject(ModelObj):
4400
5357
  )
4401
5358
 
4402
5359
  if producer_dict.get("kind", "") == "run":
4403
- return ArtifactProducer(
4404
- name=producer_dict.get("name", ""),
4405
- kind=producer_dict.get("kind", ""),
4406
- project=producer_project,
4407
- tag=producer_tag,
4408
- owner=producer_dict.get("owner", ""),
4409
- ), True
5360
+ return (
5361
+ ArtifactProducer(
5362
+ name=producer_dict.get("name", ""),
5363
+ kind=producer_dict.get("kind", ""),
5364
+ project=producer_project,
5365
+ tag=producer_tag,
5366
+ owner=producer_dict.get("owner", ""),
5367
+ ),
5368
+ True,
5369
+ )
4410
5370
 
4411
5371
  # do not retain the artifact's producer, replace it with the project as the producer
4412
5372
  project_producer_tag = project_producer_tag or self._get_project_tag()
4413
- return ArtifactProducer(
4414
- kind="project",
4415
- name=self.metadata.name,
4416
- project=self.metadata.name,
4417
- tag=project_producer_tag,
4418
- owner=self._resolve_artifact_owner(),
4419
- ), False
5373
+ return (
5374
+ ArtifactProducer(
5375
+ kind="project",
5376
+ name=self.metadata.name,
5377
+ project=self.metadata.name,
5378
+ tag=project_producer_tag,
5379
+ owner=self._resolve_artifact_owner(),
5380
+ ),
5381
+ False,
5382
+ )
4420
5383
 
4421
5384
  def _resolve_existing_artifact(
4422
5385
  self,
4423
5386
  item: typing.Union[str, Artifact],
4424
- tag: str = None,
5387
+ tag: Optional[str] = None,
4425
5388
  ) -> typing.Optional[Artifact]:
4426
5389
  """
4427
5390
  Check if there is and existing artifact with the given item and tag.