mlrun 1.7.0rc13__py3-none-any.whl → 1.7.0rc21__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mlrun might be problematic. Click here for more details.

Files changed (156) hide show
  1. mlrun/__init__.py +10 -1
  2. mlrun/__main__.py +23 -111
  3. mlrun/alerts/__init__.py +15 -0
  4. mlrun/alerts/alert.py +144 -0
  5. mlrun/api/schemas/__init__.py +4 -3
  6. mlrun/artifacts/__init__.py +8 -3
  7. mlrun/artifacts/base.py +36 -253
  8. mlrun/artifacts/dataset.py +9 -190
  9. mlrun/artifacts/manager.py +46 -42
  10. mlrun/artifacts/model.py +9 -141
  11. mlrun/artifacts/plots.py +14 -375
  12. mlrun/common/constants.py +65 -3
  13. mlrun/common/formatters/__init__.py +19 -0
  14. mlrun/{runtimes/mpijob/v1alpha1.py → common/formatters/artifact.py} +6 -14
  15. mlrun/common/formatters/base.py +113 -0
  16. mlrun/common/formatters/function.py +46 -0
  17. mlrun/common/formatters/pipeline.py +53 -0
  18. mlrun/common/formatters/project.py +51 -0
  19. mlrun/{runtimes → common/runtimes}/constants.py +32 -4
  20. mlrun/common/schemas/__init__.py +10 -5
  21. mlrun/common/schemas/alert.py +92 -11
  22. mlrun/common/schemas/api_gateway.py +56 -0
  23. mlrun/common/schemas/artifact.py +15 -5
  24. mlrun/common/schemas/auth.py +2 -0
  25. mlrun/common/schemas/client_spec.py +1 -0
  26. mlrun/common/schemas/frontend_spec.py +1 -0
  27. mlrun/common/schemas/function.py +4 -0
  28. mlrun/common/schemas/model_monitoring/__init__.py +15 -3
  29. mlrun/common/schemas/model_monitoring/constants.py +58 -7
  30. mlrun/common/schemas/model_monitoring/grafana.py +9 -5
  31. mlrun/common/schemas/model_monitoring/model_endpoints.py +86 -2
  32. mlrun/common/schemas/pipeline.py +0 -9
  33. mlrun/common/schemas/project.py +6 -11
  34. mlrun/common/types.py +1 -0
  35. mlrun/config.py +36 -8
  36. mlrun/data_types/to_pandas.py +9 -9
  37. mlrun/datastore/base.py +41 -9
  38. mlrun/datastore/datastore.py +6 -2
  39. mlrun/datastore/datastore_profile.py +56 -4
  40. mlrun/datastore/hdfs.py +5 -0
  41. mlrun/datastore/inmem.py +2 -2
  42. mlrun/datastore/redis.py +2 -2
  43. mlrun/datastore/s3.py +5 -0
  44. mlrun/datastore/sources.py +147 -7
  45. mlrun/datastore/store_resources.py +7 -7
  46. mlrun/datastore/targets.py +129 -9
  47. mlrun/datastore/utils.py +42 -0
  48. mlrun/datastore/v3io.py +1 -1
  49. mlrun/db/auth_utils.py +152 -0
  50. mlrun/db/base.py +55 -11
  51. mlrun/db/httpdb.py +346 -107
  52. mlrun/db/nopdb.py +52 -10
  53. mlrun/errors.py +11 -0
  54. mlrun/execution.py +24 -9
  55. mlrun/feature_store/__init__.py +0 -2
  56. mlrun/feature_store/api.py +12 -47
  57. mlrun/feature_store/feature_set.py +9 -0
  58. mlrun/feature_store/feature_vector.py +8 -0
  59. mlrun/feature_store/ingestion.py +7 -6
  60. mlrun/feature_store/retrieval/base.py +9 -4
  61. mlrun/feature_store/retrieval/conversion.py +9 -9
  62. mlrun/feature_store/retrieval/dask_merger.py +2 -0
  63. mlrun/feature_store/retrieval/job.py +9 -3
  64. mlrun/feature_store/retrieval/local_merger.py +2 -0
  65. mlrun/feature_store/retrieval/spark_merger.py +16 -0
  66. mlrun/frameworks/_dl_common/loggers/tensorboard_logger.py +7 -12
  67. mlrun/frameworks/parallel_coordinates.py +2 -1
  68. mlrun/frameworks/tf_keras/__init__.py +4 -1
  69. mlrun/k8s_utils.py +10 -11
  70. mlrun/launcher/base.py +4 -3
  71. mlrun/launcher/client.py +5 -3
  72. mlrun/launcher/local.py +8 -2
  73. mlrun/launcher/remote.py +8 -2
  74. mlrun/lists.py +6 -2
  75. mlrun/model.py +62 -20
  76. mlrun/model_monitoring/__init__.py +1 -1
  77. mlrun/model_monitoring/api.py +41 -18
  78. mlrun/model_monitoring/application.py +5 -305
  79. mlrun/model_monitoring/applications/__init__.py +11 -0
  80. mlrun/model_monitoring/applications/_application_steps.py +157 -0
  81. mlrun/model_monitoring/applications/base.py +280 -0
  82. mlrun/model_monitoring/applications/context.py +214 -0
  83. mlrun/model_monitoring/applications/evidently_base.py +211 -0
  84. mlrun/model_monitoring/applications/histogram_data_drift.py +132 -91
  85. mlrun/model_monitoring/applications/results.py +99 -0
  86. mlrun/model_monitoring/controller.py +3 -1
  87. mlrun/model_monitoring/db/__init__.py +2 -0
  88. mlrun/model_monitoring/db/stores/__init__.py +0 -2
  89. mlrun/model_monitoring/db/stores/base/store.py +22 -37
  90. mlrun/model_monitoring/db/stores/sqldb/models/__init__.py +43 -21
  91. mlrun/model_monitoring/db/stores/sqldb/models/base.py +39 -8
  92. mlrun/model_monitoring/db/stores/sqldb/models/mysql.py +27 -7
  93. mlrun/model_monitoring/db/stores/sqldb/models/sqlite.py +5 -0
  94. mlrun/model_monitoring/db/stores/sqldb/sql_store.py +246 -224
  95. mlrun/model_monitoring/db/stores/v3io_kv/kv_store.py +232 -216
  96. mlrun/model_monitoring/db/tsdb/__init__.py +100 -0
  97. mlrun/model_monitoring/db/tsdb/base.py +329 -0
  98. mlrun/model_monitoring/db/tsdb/helpers.py +30 -0
  99. mlrun/model_monitoring/db/tsdb/tdengine/__init__.py +15 -0
  100. mlrun/model_monitoring/db/tsdb/tdengine/schemas.py +240 -0
  101. mlrun/model_monitoring/db/tsdb/tdengine/stream_graph_steps.py +45 -0
  102. mlrun/model_monitoring/db/tsdb/tdengine/tdengine_connector.py +397 -0
  103. mlrun/model_monitoring/db/tsdb/v3io/__init__.py +15 -0
  104. mlrun/model_monitoring/db/tsdb/v3io/stream_graph_steps.py +117 -0
  105. mlrun/model_monitoring/db/tsdb/v3io/v3io_connector.py +636 -0
  106. mlrun/model_monitoring/evidently_application.py +6 -118
  107. mlrun/model_monitoring/helpers.py +46 -1
  108. mlrun/model_monitoring/model_endpoint.py +3 -2
  109. mlrun/model_monitoring/stream_processing.py +57 -216
  110. mlrun/model_monitoring/writer.py +134 -124
  111. mlrun/package/utils/_formatter.py +2 -2
  112. mlrun/platforms/__init__.py +10 -9
  113. mlrun/platforms/iguazio.py +21 -202
  114. mlrun/projects/operations.py +19 -12
  115. mlrun/projects/pipelines.py +103 -109
  116. mlrun/projects/project.py +377 -137
  117. mlrun/render.py +15 -14
  118. mlrun/run.py +16 -47
  119. mlrun/runtimes/__init__.py +6 -3
  120. mlrun/runtimes/base.py +8 -7
  121. mlrun/runtimes/databricks_job/databricks_wrapper.py +1 -1
  122. mlrun/runtimes/funcdoc.py +0 -28
  123. mlrun/runtimes/kubejob.py +2 -1
  124. mlrun/runtimes/local.py +5 -2
  125. mlrun/runtimes/mpijob/__init__.py +0 -20
  126. mlrun/runtimes/mpijob/v1.py +1 -1
  127. mlrun/runtimes/nuclio/api_gateway.py +440 -208
  128. mlrun/runtimes/nuclio/application/application.py +170 -8
  129. mlrun/runtimes/nuclio/function.py +39 -49
  130. mlrun/runtimes/pod.py +21 -41
  131. mlrun/runtimes/remotesparkjob.py +9 -3
  132. mlrun/runtimes/sparkjob/spark3job.py +1 -1
  133. mlrun/runtimes/utils.py +6 -45
  134. mlrun/serving/server.py +2 -1
  135. mlrun/serving/states.py +53 -2
  136. mlrun/serving/v2_serving.py +5 -1
  137. mlrun/track/tracker.py +2 -1
  138. mlrun/utils/async_http.py +25 -5
  139. mlrun/utils/helpers.py +107 -75
  140. mlrun/utils/logger.py +39 -7
  141. mlrun/utils/notifications/notification/__init__.py +14 -9
  142. mlrun/utils/notifications/notification/base.py +1 -1
  143. mlrun/utils/notifications/notification/slack.py +61 -13
  144. mlrun/utils/notifications/notification/webhook.py +1 -1
  145. mlrun/utils/notifications/notification_pusher.py +147 -16
  146. mlrun/utils/regex.py +9 -0
  147. mlrun/utils/v3io_clients.py +0 -1
  148. mlrun/utils/version/version.json +2 -2
  149. {mlrun-1.7.0rc13.dist-info → mlrun-1.7.0rc21.dist-info}/METADATA +14 -6
  150. {mlrun-1.7.0rc13.dist-info → mlrun-1.7.0rc21.dist-info}/RECORD +154 -133
  151. mlrun/kfpops.py +0 -865
  152. mlrun/platforms/other.py +0 -305
  153. {mlrun-1.7.0rc13.dist-info → mlrun-1.7.0rc21.dist-info}/LICENSE +0 -0
  154. {mlrun-1.7.0rc13.dist-info → mlrun-1.7.0rc21.dist-info}/WHEEL +0 -0
  155. {mlrun-1.7.0rc13.dist-info → mlrun-1.7.0rc21.dist-info}/entry_points.txt +0 -0
  156. {mlrun-1.7.0rc13.dist-info → mlrun-1.7.0rc21.dist-info}/top_level.txt +0 -0
mlrun/projects/project.py CHANGED
@@ -31,23 +31,29 @@ from typing import Callable, Optional, Union
31
31
  import dotenv
32
32
  import git
33
33
  import git.exc
34
- import kfp
34
+ import mlrun_pipelines.common.models
35
+ import mlrun_pipelines.mounts
35
36
  import nuclio.utils
36
37
  import requests
37
38
  import yaml
39
+ from mlrun_pipelines.models import PipelineNodeWrapper
38
40
 
39
41
  import mlrun.common.helpers
42
+ import mlrun.common.runtimes.constants
43
+ import mlrun.common.schemas.artifact
40
44
  import mlrun.common.schemas.model_monitoring.constants as mm_constants
41
45
  import mlrun.db
42
46
  import mlrun.errors
43
47
  import mlrun.k8s_utils
48
+ import mlrun.model_monitoring.applications as mm_app
44
49
  import mlrun.runtimes
45
50
  import mlrun.runtimes.nuclio.api_gateway
46
51
  import mlrun.runtimes.pod
47
52
  import mlrun.runtimes.utils
48
53
  import mlrun.serving
49
54
  import mlrun.utils.regex
50
- from mlrun.common.schemas import AlertConfig
55
+ from mlrun.alerts.alert import AlertConfig
56
+ from mlrun.common.schemas.alert import AlertTemplate
51
57
  from mlrun.datastore.datastore_profile import DatastoreProfile, DatastoreProfile2Json
52
58
  from mlrun.runtimes.nuclio.function import RemoteRuntime
53
59
 
@@ -56,14 +62,10 @@ from ..artifacts.manager import ArtifactManager, dict_to_artifact, extend_artifa
56
62
  from ..datastore import store_manager
57
63
  from ..features import Feature
58
64
  from ..model import EntrypointParam, ImageBuilder, ModelObj
59
- from ..model_monitoring.application import (
60
- ModelMonitoringApplicationBase,
61
- )
62
65
  from ..run import code_to_function, get_object, import_function, new_function
63
66
  from ..secrets import SecretsStore
64
67
  from ..utils import (
65
68
  is_ipython,
66
- is_legacy_artifact,
67
69
  is_relative_path,
68
70
  is_yaml_path,
69
71
  logger,
@@ -76,7 +78,10 @@ from ..utils.clones import (
76
78
  clone_zip,
77
79
  get_repo_url,
78
80
  )
79
- from ..utils.helpers import ensure_git_branch, resolve_git_reference_from_source
81
+ from ..utils.helpers import (
82
+ ensure_git_branch,
83
+ resolve_git_reference_from_source,
84
+ )
80
85
  from ..utils.notifications import CustomNotificationPusher, NotificationTypes
81
86
  from .operations import (
82
87
  BuildStatus,
@@ -207,14 +212,16 @@ def new_project(
207
212
  "Unsupported option, cannot use subpath argument with project templates"
208
213
  )
209
214
  if from_template.endswith(".yaml"):
210
- project = _load_project_file(from_template, name, secrets)
215
+ project = _load_project_file(
216
+ from_template, name, secrets, allow_cross_project=True
217
+ )
211
218
  elif from_template.startswith("git://"):
212
219
  clone_git(from_template, context, secrets, clone=True)
213
220
  shutil.rmtree(path.join(context, ".git"))
214
- project = _load_project_dir(context, name)
221
+ project = _load_project_dir(context, name, allow_cross_project=True)
215
222
  elif from_template.endswith(".zip"):
216
223
  clone_zip(from_template, context, secrets)
217
- project = _load_project_dir(context, name)
224
+ project = _load_project_dir(context, name, allow_cross_project=True)
218
225
  else:
219
226
  raise ValueError("template must be a path to .yaml or .zip file")
220
227
  project.metadata.name = name
@@ -296,6 +303,7 @@ def load_project(
296
303
  save: bool = True,
297
304
  sync_functions: bool = False,
298
305
  parameters: dict = None,
306
+ allow_cross_project: bool = None,
299
307
  ) -> "MlrunProject":
300
308
  """Load an MLRun project from git or tar or dir
301
309
 
@@ -342,6 +350,8 @@ def load_project(
342
350
  :param save: whether to save the created project and artifact in the DB
343
351
  :param sync_functions: sync the project's functions into the project object (will be saved to the DB if save=True)
344
352
  :param parameters: key/value pairs to add to the project.spec.params
353
+ :param allow_cross_project: if True, override the loaded project name. This flag ensures awareness of
354
+ loading an existing project yaml as a baseline for a new project with a different name
345
355
 
346
356
  :returns: project object
347
357
  """
@@ -357,7 +367,7 @@ def load_project(
357
367
  if url:
358
368
  url = str(url) # to support path objects
359
369
  if is_yaml_path(url):
360
- project = _load_project_file(url, name, secrets)
370
+ project = _load_project_file(url, name, secrets, allow_cross_project)
361
371
  project.spec.context = context
362
372
  elif url.startswith("git://"):
363
373
  url, repo = clone_git(url, context, secrets, clone)
@@ -384,7 +394,7 @@ def load_project(
384
394
  repo, url = init_repo(context, url, init_git)
385
395
 
386
396
  if not project:
387
- project = _load_project_dir(context, name, subpath)
397
+ project = _load_project_dir(context, name, subpath, allow_cross_project)
388
398
 
389
399
  if not project.metadata.name:
390
400
  raise ValueError("Project name must be specified")
@@ -438,6 +448,7 @@ def get_or_create_project(
438
448
  from_template: str = None,
439
449
  save: bool = True,
440
450
  parameters: dict = None,
451
+ allow_cross_project: bool = None,
441
452
  ) -> "MlrunProject":
442
453
  """Load a project from MLRun DB, or create/import if it does not exist
443
454
 
@@ -482,12 +493,12 @@ def get_or_create_project(
482
493
  :param from_template: path to project YAML file that will be used as from_template (for new projects)
483
494
  :param save: whether to save the created project in the DB
484
495
  :param parameters: key/value pairs to add to the project.spec.params
496
+ :param allow_cross_project: if True, override the loaded project name. This flag ensures awareness of
497
+ loading an existing project yaml as a baseline for a new project with a different name
485
498
 
486
499
  :returns: project object
487
500
  """
488
501
  context = context or "./"
489
- spec_path = path.join(context, subpath or "", "project.yaml")
490
- load_from_path = url or path.isfile(spec_path)
491
502
  try:
492
503
  # load project from the DB.
493
504
  # use `name` as `url` as we load the project from the DB
@@ -503,13 +514,15 @@ def get_or_create_project(
503
514
  # only loading project from db so no need to save it
504
515
  save=False,
505
516
  parameters=parameters,
517
+ allow_cross_project=allow_cross_project,
506
518
  )
507
519
  logger.info("Project loaded successfully", project_name=name)
508
520
  return project
509
-
510
521
  except mlrun.errors.MLRunNotFoundError:
511
522
  logger.debug("Project not found in db", project_name=name)
512
523
 
524
+ spec_path = path.join(context, subpath or "", "project.yaml")
525
+ load_from_path = url or path.isfile(spec_path)
513
526
  # do not nest under "try" or else the exceptions raised below will be logged along with the "not found" message
514
527
  if load_from_path:
515
528
  # loads a project from archive or local project.yaml
@@ -525,6 +538,7 @@ def get_or_create_project(
525
538
  user_project=user_project,
526
539
  save=save,
527
540
  parameters=parameters,
541
+ allow_cross_project=allow_cross_project,
528
542
  )
529
543
 
530
544
  logger.info(
@@ -599,7 +613,7 @@ def _run_project_setup(
599
613
  return project
600
614
 
601
615
 
602
- def _load_project_dir(context, name="", subpath=""):
616
+ def _load_project_dir(context, name="", subpath="", allow_cross_project=None):
603
617
  subpath_str = subpath or ""
604
618
 
605
619
  # support both .yaml and .yml file extensions
@@ -613,7 +627,7 @@ def _load_project_dir(context, name="", subpath=""):
613
627
  with open(project_file_path) as fp:
614
628
  data = fp.read()
615
629
  struct = yaml.load(data, Loader=yaml.FullLoader)
616
- project = _project_instance_from_struct(struct, name)
630
+ project = _project_instance_from_struct(struct, name, allow_cross_project)
617
631
  project.spec.context = context
618
632
  elif function_files := glob.glob(function_file_path):
619
633
  function_path = function_files[0]
@@ -686,19 +700,41 @@ def _delete_project_from_db(project_name, secrets, deletion_strategy):
686
700
  return db.delete_project(project_name, deletion_strategy=deletion_strategy)
687
701
 
688
702
 
689
- def _load_project_file(url, name="", secrets=None):
703
+ def _load_project_file(url, name="", secrets=None, allow_cross_project=None):
690
704
  try:
691
705
  obj = get_object(url, secrets)
692
706
  except FileNotFoundError as exc:
693
707
  raise FileNotFoundError(f"cant find project file at {url}") from exc
694
708
  struct = yaml.load(obj, Loader=yaml.FullLoader)
695
- return _project_instance_from_struct(struct, name)
709
+ return _project_instance_from_struct(struct, name, allow_cross_project)
696
710
 
697
711
 
698
- def _project_instance_from_struct(struct, name):
699
- struct.setdefault("metadata", {})["name"] = name or struct.get("metadata", {}).get(
700
- "name", ""
701
- )
712
+ def _project_instance_from_struct(struct, name, allow_cross_project):
713
+ name_from_struct = struct.get("metadata", {}).get("name", "")
714
+ if name and name_from_struct and name_from_struct != name:
715
+ error_message = (
716
+ f"project name mismatch, {name_from_struct} != {name}, please do one of the following:\n"
717
+ "1. Set the `allow_cross_project=True` when loading the project.\n"
718
+ f"2. Delete the existing project yaml, or ensure its name is equal to {name}.\n"
719
+ "3. Use different project context dir."
720
+ )
721
+
722
+ if allow_cross_project is None:
723
+ # TODO: Remove this warning in version 1.9.0 and also fix cli to support allow_cross_project
724
+ logger.warn(
725
+ "Project name is different than specified on its project yaml."
726
+ "You should fix it until version 1.9.0",
727
+ description=error_message,
728
+ )
729
+ elif allow_cross_project:
730
+ logger.warn(
731
+ "Project name is different than specified on its project yaml. Overriding.",
732
+ existing_name=name_from_struct,
733
+ overriding_name=name,
734
+ )
735
+ else:
736
+ raise ValueError(error_message)
737
+ struct.setdefault("metadata", {})["name"] = name or name_from_struct
702
738
  return MlrunProject.from_dict(struct)
703
739
 
704
740
 
@@ -960,13 +996,9 @@ class ProjectSpec(ModelObj):
960
996
  if not isinstance(artifact, dict) and not hasattr(artifact, "to_dict"):
961
997
  raise ValueError("artifacts must be a dict or class")
962
998
  if isinstance(artifact, dict):
963
- # Support legacy artifacts
964
- if is_legacy_artifact(artifact) or _is_imported_artifact(artifact):
965
- key = artifact.get("key")
966
- else:
967
- key = artifact.get("metadata").get("key", "")
999
+ key = artifact.get("metadata", {}).get("key", "")
968
1000
  if not key:
969
- raise ValueError('artifacts "key" must be specified')
1001
+ raise ValueError('artifacts "metadata.key" must be specified')
970
1002
  else:
971
1003
  key = artifact.key
972
1004
  artifact = artifact.to_dict()
@@ -1243,6 +1275,14 @@ class MlrunProject(ModelObj):
1243
1275
  def description(self, description):
1244
1276
  self.spec.description = description
1245
1277
 
1278
+ @property
1279
+ def default_function_node_selector(self) -> dict:
1280
+ return self.spec.default_function_node_selector
1281
+
1282
+ @default_function_node_selector.setter
1283
+ def default_function_node_selector(self, default_function_node_selector):
1284
+ self.spec.default_function_node_selector = default_function_node_selector
1285
+
1246
1286
  @property
1247
1287
  def default_image(self) -> str:
1248
1288
  return self.spec.default_image
@@ -1562,6 +1602,23 @@ class MlrunProject(ModelObj):
1562
1602
  )
1563
1603
  return item
1564
1604
 
1605
+ def delete_artifact(
1606
+ self,
1607
+ item: Artifact,
1608
+ deletion_strategy: mlrun.common.schemas.artifact.ArtifactsDeletionStrategies = (
1609
+ mlrun.common.schemas.artifact.ArtifactsDeletionStrategies.metadata_only
1610
+ ),
1611
+ secrets: dict = None,
1612
+ ):
1613
+ """Delete an artifact object in the DB and optionally delete the artifact data
1614
+
1615
+ :param item: Artifact object (can be any type, such as dataset, model, feature store).
1616
+ :param deletion_strategy: The artifact deletion strategy types.
1617
+ :param secrets: Credentials needed to access the artifact data.
1618
+ """
1619
+ am = self._get_artifact_manager()
1620
+ am.delete_artifact(item, deletion_strategy, secrets)
1621
+
1565
1622
  def log_dataset(
1566
1623
  self,
1567
1624
  key,
@@ -1814,10 +1871,18 @@ class MlrunProject(ModelObj):
1814
1871
  """
1815
1872
  context = context or self.spec.context
1816
1873
  if context:
1817
- project = _load_project_dir(context, self.metadata.name, self.spec.subpath)
1874
+ project = _load_project_dir(
1875
+ context,
1876
+ self.metadata.name,
1877
+ self.spec.subpath,
1878
+ allow_cross_project=False,
1879
+ )
1818
1880
  else:
1819
1881
  project = _load_project_file(
1820
- self.spec.origin_url, self.metadata.name, self._secrets
1882
+ self.spec.origin_url,
1883
+ self.metadata.name,
1884
+ self._secrets,
1885
+ allow_cross_project=None,
1821
1886
  )
1822
1887
  project.spec.source = self.spec.source
1823
1888
  project.spec.repo = self.spec.repo
@@ -1846,7 +1911,11 @@ class MlrunProject(ModelObj):
1846
1911
  def set_model_monitoring_function(
1847
1912
  self,
1848
1913
  func: typing.Union[str, mlrun.runtimes.BaseRuntime, None] = None,
1849
- application_class: typing.Union[str, ModelMonitoringApplicationBase] = None,
1914
+ application_class: typing.Union[
1915
+ str,
1916
+ mm_app.ModelMonitoringApplicationBase,
1917
+ mm_app.ModelMonitoringApplicationBaseV2,
1918
+ ] = None,
1850
1919
  name: str = None,
1851
1920
  image: str = None,
1852
1921
  handler=None,
@@ -1862,6 +1931,7 @@ class MlrunProject(ModelObj):
1862
1931
  call `fn.deploy()` where `fn` is the object returned by this method.
1863
1932
 
1864
1933
  examples::
1934
+
1865
1935
  project.set_model_monitoring_function(
1866
1936
  name="myApp", application_class="MyApp", image="mlrun/mlrun"
1867
1937
  )
@@ -1884,11 +1954,6 @@ class MlrunProject(ModelObj):
1884
1954
  monitoring application's constructor.
1885
1955
  """
1886
1956
 
1887
- if name in mm_constants.MonitoringFunctionNames.list():
1888
- raise mlrun.errors.MLRunInvalidArgumentError(
1889
- f"An application cannot have the following names: "
1890
- f"{mm_constants.MonitoringFunctionNames.list()}"
1891
- )
1892
1957
  function_object: RemoteRuntime = None
1893
1958
  (
1894
1959
  resolved_function_name,
@@ -1914,7 +1979,11 @@ class MlrunProject(ModelObj):
1914
1979
  def create_model_monitoring_function(
1915
1980
  self,
1916
1981
  func: str = None,
1917
- application_class: typing.Union[str, ModelMonitoringApplicationBase] = None,
1982
+ application_class: typing.Union[
1983
+ str,
1984
+ mm_app.ModelMonitoringApplicationBase,
1985
+ mm_app.ModelMonitoringApplicationBaseV2,
1986
+ ] = None,
1918
1987
  name: str = None,
1919
1988
  image: str = None,
1920
1989
  handler: str = None,
@@ -1928,6 +1997,7 @@ class MlrunProject(ModelObj):
1928
1997
  Create a monitoring function object without setting it to the project
1929
1998
 
1930
1999
  examples::
2000
+
1931
2001
  project.create_model_monitoring_function(
1932
2002
  application_class_name="MyApp", image="mlrun/mlrun", name="myApp"
1933
2003
  )
@@ -1949,6 +2019,7 @@ class MlrunProject(ModelObj):
1949
2019
  :param application_kwargs: Additional keyword arguments to be passed to the
1950
2020
  monitoring application's constructor.
1951
2021
  """
2022
+
1952
2023
  _, function_object, _ = self._instantiate_model_monitoring_function(
1953
2024
  func,
1954
2025
  application_class,
@@ -1967,7 +2038,10 @@ class MlrunProject(ModelObj):
1967
2038
  self,
1968
2039
  func: typing.Union[str, mlrun.runtimes.BaseRuntime, None] = None,
1969
2040
  application_class: typing.Union[
1970
- str, ModelMonitoringApplicationBase, None
2041
+ str,
2042
+ mm_app.ModelMonitoringApplicationBase,
2043
+ mm_app.ModelMonitoringApplicationBaseV2,
2044
+ None,
1971
2045
  ] = None,
1972
2046
  name: typing.Optional[str] = None,
1973
2047
  image: typing.Optional[str] = None,
@@ -2024,12 +2098,24 @@ class MlrunProject(ModelObj):
2024
2098
 
2025
2099
  return resolved_function_name, function_object, func
2026
2100
 
2101
+ def _wait_for_functions_deployment(self, function_names: list[str]) -> None:
2102
+ """
2103
+ Wait for the deployment of functions on the backend.
2104
+
2105
+ :param function_names: A list of function names.
2106
+ """
2107
+ for fn_name in function_names:
2108
+ fn = typing.cast(RemoteRuntime, self.get_function(key=fn_name))
2109
+ fn._wait_for_function_deployment(db=fn._get_db())
2110
+
2027
2111
  def enable_model_monitoring(
2028
2112
  self,
2029
2113
  default_controller_image: str = "mlrun/mlrun",
2030
2114
  base_period: int = 10,
2031
2115
  image: str = "mlrun/mlrun",
2116
+ *,
2032
2117
  deploy_histogram_data_drift_app: bool = True,
2118
+ wait_for_deployment: bool = False,
2033
2119
  ) -> None:
2034
2120
  """
2035
2121
  Deploy model monitoring application controller, writer and stream functions.
@@ -2039,7 +2125,6 @@ class MlrunProject(ModelObj):
2039
2125
  The stream function goal is to monitor the log of the data stream. It is triggered when a new log entry
2040
2126
  is detected. It processes the new events into statistics that are then written to statistics databases.
2041
2127
 
2042
-
2043
2128
  :param default_controller_image: Deprecated.
2044
2129
  :param base_period: The time period in minutes in which the model monitoring controller
2045
2130
  function is triggered. By default, the base period is 10 minutes.
@@ -2047,6 +2132,9 @@ class MlrunProject(ModelObj):
2047
2132
  stream & histogram data drift functions, which are real time nuclio
2048
2133
  functions. By default, the image is mlrun/mlrun.
2049
2134
  :param deploy_histogram_data_drift_app: If true, deploy the default histogram-based data drift application.
2135
+ :param wait_for_deployment: If true, return only after the deployment is done on the backend.
2136
+ Otherwise, deploy the model monitoring infrastructure on the
2137
+ background, including the histogram data drift app if selected.
2050
2138
  """
2051
2139
  if default_controller_image != "mlrun/mlrun":
2052
2140
  # TODO: Remove this in 1.9.0
@@ -2064,37 +2152,55 @@ class MlrunProject(ModelObj):
2064
2152
  deploy_histogram_data_drift_app=deploy_histogram_data_drift_app,
2065
2153
  )
2066
2154
 
2155
+ if wait_for_deployment:
2156
+ deployment_functions = mm_constants.MonitoringFunctionNames.list()
2157
+ if deploy_histogram_data_drift_app:
2158
+ deployment_functions.append(
2159
+ mm_constants.HistogramDataDriftApplicationConstants.NAME
2160
+ )
2161
+ self._wait_for_functions_deployment(deployment_functions)
2162
+
2067
2163
  def deploy_histogram_data_drift_app(
2068
2164
  self,
2069
2165
  *,
2070
2166
  image: str = "mlrun/mlrun",
2071
2167
  db: Optional[mlrun.db.RunDBInterface] = None,
2168
+ wait_for_deployment: bool = False,
2072
2169
  ) -> None:
2073
2170
  """
2074
2171
  Deploy the histogram data drift application.
2075
2172
 
2076
- :param image: The image on which the application will run.
2077
- :param db: An optional DB object.
2173
+ :param image: The image on which the application will run.
2174
+ :param db: An optional DB object.
2175
+ :param wait_for_deployment: If true, return only after the deployment is done on the backend.
2176
+ Otherwise, deploy the application on the background.
2078
2177
  """
2079
2178
  if db is None:
2080
2179
  db = mlrun.db.get_run_db(secrets=self._secrets)
2081
2180
  db.deploy_histogram_data_drift_app(project=self.name, image=image)
2082
2181
 
2182
+ if wait_for_deployment:
2183
+ self._wait_for_functions_deployment(
2184
+ [mm_constants.HistogramDataDriftApplicationConstants.NAME]
2185
+ )
2186
+
2083
2187
  def update_model_monitoring_controller(
2084
2188
  self,
2085
2189
  base_period: int = 10,
2086
2190
  image: str = "mlrun/mlrun",
2191
+ *,
2192
+ wait_for_deployment: bool = False,
2087
2193
  ) -> None:
2088
2194
  """
2089
2195
  Redeploy model monitoring application controller functions.
2090
2196
 
2091
-
2092
- :param base_period: The time period in minutes in which the model monitoring controller function
2093
- is triggered. By default, the base period is 10 minutes.
2094
- :param image: The image of the model monitoring controller, writer & monitoring
2095
- stream functions, which are real time nuclio functions.
2096
- By default, the image is mlrun/mlrun.
2097
- :returns: model monitoring controller job as a dictionary.
2197
+ :param base_period: The time period in minutes in which the model monitoring controller function
2198
+ is triggered. By default, the base period is 10 minutes.
2199
+ :param image: The image of the model monitoring controller, writer & monitoring
2200
+ stream functions, which are real time nuclio functions.
2201
+ By default, the image is mlrun/mlrun.
2202
+ :param wait_for_deployment: If true, return only after the deployment is done on the backend.
2203
+ Otherwise, deploy the controller on the background.
2098
2204
  """
2099
2205
  db = mlrun.db.get_run_db(secrets=self._secrets)
2100
2206
  db.update_model_monitoring_controller(
@@ -2103,24 +2209,73 @@ class MlrunProject(ModelObj):
2103
2209
  image=image,
2104
2210
  )
2105
2211
 
2212
+ if wait_for_deployment:
2213
+ self._wait_for_functions_deployment(
2214
+ [mm_constants.MonitoringFunctionNames.APPLICATION_CONTROLLER]
2215
+ )
2216
+
2106
2217
  def disable_model_monitoring(
2107
- self, *, delete_histogram_data_drift_app: bool = True
2218
+ self,
2219
+ *,
2220
+ delete_resources: bool = True,
2221
+ delete_stream_function: bool = False,
2222
+ delete_histogram_data_drift_app: bool = True,
2223
+ delete_user_applications: bool = False,
2224
+ user_application_list: list[str] = None,
2108
2225
  ) -> None:
2109
2226
  """
2110
- Note: This method is currently not advised for use. See ML-3432.
2111
- Disable model monitoring by deleting the underlying functions infrastructure from MLRun database.
2112
-
2113
- :param delete_histogram_data_drift_app: Whether to delete the histogram data drift app.
2227
+ Disable model monitoring application controller, writer, stream, histogram data drift application
2228
+ and the user's applications functions, according to the given params.
2229
+
2230
+ :param delete_resources: If True, it would delete the model monitoring controller & writer
2231
+ functions. Default True
2232
+ :param delete_stream_function: If True, it would delete model monitoring stream function,
2233
+ need to use wisely because if you're deleting this function
2234
+ this can cause data loss in case you will want to
2235
+ enable the model monitoring capability to the project.
2236
+ Default False.
2237
+ :param delete_histogram_data_drift_app: If True, it would delete the default histogram-based data drift
2238
+ application. Default False.
2239
+ :param delete_user_applications: If True, it would delete the user's model monitoring
2240
+ application according to user_application_list, Default False.
2241
+ :param user_application_list: List of the user's model monitoring application to disable.
2242
+ Default all the applications.
2243
+ Note: you have to set delete_user_applications to True
2244
+ in order to delete the desired application.
2114
2245
  """
2115
- db = mlrun.db.get_run_db(secrets=self._secrets)
2116
- for fn_name in mm_constants.MonitoringFunctionNames.list():
2117
- db.delete_function(project=self.name, name=fn_name)
2118
- if delete_histogram_data_drift_app:
2119
- db.delete_function(
2120
- project=self.name,
2121
- name=mm_constants.HistogramDataDriftApplicationConstants.NAME,
2246
+ if not delete_user_applications and user_application_list:
2247
+ raise mlrun.errors.MLRunInvalidArgumentError(
2248
+ "user_application_list can be specified only if delete_user_applications is set to True"
2122
2249
  )
2123
2250
 
2251
+ db = mlrun.db.get_run_db(secrets=self._secrets)
2252
+ succeed = db.disable_model_monitoring(
2253
+ project=self.name,
2254
+ delete_resources=delete_resources,
2255
+ delete_stream_function=delete_stream_function,
2256
+ delete_histogram_data_drift_app=delete_histogram_data_drift_app,
2257
+ delete_user_applications=delete_user_applications,
2258
+ user_application_list=user_application_list,
2259
+ )
2260
+ if succeed and delete_resources:
2261
+ if delete_resources:
2262
+ logger.info("Model Monitoring disabled", project=self.name)
2263
+ if delete_user_applications:
2264
+ logger.info(
2265
+ "All the desired monitoring application were deleted",
2266
+ project=self.name,
2267
+ )
2268
+ else:
2269
+ if delete_resources:
2270
+ logger.info(
2271
+ "Model Monitoring was not disabled properly", project=self.name
2272
+ )
2273
+ if delete_user_applications:
2274
+ logger.info(
2275
+ "Some of the desired monitoring application were not deleted",
2276
+ project=self.name,
2277
+ )
2278
+
2124
2279
  def set_function(
2125
2280
  self,
2126
2281
  func: typing.Union[str, mlrun.runtimes.BaseRuntime] = None,
@@ -2315,22 +2470,47 @@ class MlrunProject(ModelObj):
2315
2470
  """
2316
2471
  self.spec.remove_function(name)
2317
2472
 
2318
- def remove_model_monitoring_function(self, name):
2319
- """remove the specified model-monitoring-app function from the project and from the db
2473
+ def remove_model_monitoring_function(self, name: Union[str, list[str]]):
2474
+ """remove the specified model-monitoring-app function/s from the project spec
2320
2475
 
2321
- :param name: name of the model-monitoring-app function (under the project)
2476
+ :param name: name of the model-monitoring-function/s (under the project)
2322
2477
  """
2323
- function = self.get_function(key=name)
2324
- if (
2325
- function.metadata.labels.get(mm_constants.ModelMonitoringAppLabel.KEY)
2326
- == mm_constants.ModelMonitoringAppLabel.VAL
2327
- ):
2328
- self.remove_function(name=name)
2329
- mlrun.db.get_run_db().delete_function(name=name.lower())
2330
- logger.info(f"{name} function has been removed from {self.name} project")
2478
+ names = name if isinstance(name, list) else [name]
2479
+ for func_name in names:
2480
+ function = self.get_function(key=func_name)
2481
+ if (
2482
+ function.metadata.labels.get(mm_constants.ModelMonitoringAppLabel.KEY)
2483
+ == mm_constants.ModelMonitoringAppLabel.VAL
2484
+ ):
2485
+ self.remove_function(name=func_name)
2486
+ logger.info(
2487
+ f"{func_name} function has been removed from {self.name} project"
2488
+ )
2489
+ else:
2490
+ raise logger.warn(
2491
+ f"There is no model monitoring function with {func_name} name"
2492
+ )
2493
+
2494
+ def delete_model_monitoring_function(self, name: Union[str, list[str]]):
2495
+ """delete the specified model-monitoring-app function/s
2496
+
2497
+ :param name: name of the model-monitoring-function/s (under the project)
2498
+ """
2499
+ db = mlrun.db.get_run_db(secrets=self._secrets)
2500
+ succeed = db.delete_model_monitoring_function(
2501
+ project=self.name,
2502
+ functions=name if isinstance(name, list) else [name],
2503
+ )
2504
+ if succeed:
2505
+ logger.info(
2506
+ "All the desired monitoring functions were deleted",
2507
+ project=self.name,
2508
+ functions=name,
2509
+ )
2331
2510
  else:
2332
- raise logger.error(
2333
- f"There is no model monitoring function with {name} name"
2511
+ logger.info(
2512
+ "Some of the desired monitoring functions were not deleted",
2513
+ project=self.name,
2334
2514
  )
2335
2515
 
2336
2516
  def get_function(
@@ -2438,10 +2618,10 @@ class MlrunProject(ModelObj):
2438
2618
  def create_remote(self, url, name="origin", branch=None):
2439
2619
  """Create remote for the project git
2440
2620
 
2441
- This method creates a new remote repository associated with the project's Git repository.
2442
- If a remote with the specified name already exists, it will not be overwritten.
2621
+ This method creates a new remote repository associated with the project's Git repository.
2622
+ If a remote with the specified name already exists, it will not be overwritten.
2443
2623
 
2444
- If you wish to update the URL of an existing remote, use the `set_remote` method instead.
2624
+ If you wish to update the URL of an existing remote, use the `set_remote` method instead.
2445
2625
 
2446
2626
  :param url: remote git url
2447
2627
  :param name: name for the remote (default is 'origin')
@@ -2793,21 +2973,23 @@ class MlrunProject(ModelObj):
2793
2973
  (which will be converted to the class using its `from_crontab` constructor),
2794
2974
  see this link for help:
2795
2975
  https://apscheduler.readthedocs.io/en/3.x/modules/triggers/cron.html#module-apscheduler.triggers.cron
2796
- for using the pre-defined workflow's schedule, set `schedule=True`
2976
+ For using the pre-defined workflow's schedule, set `schedule=True`
2797
2977
  :param timeout: Timeout in seconds to wait for pipeline completion (watch will be activated)
2798
2978
  :param source: Source to use instead of the actual `project.spec.source` (used when engine is remote).
2799
- Can be a one of:
2800
- 1. Remote URL which is loaded dynamically to the workflow runner.
2801
- 2. A path to the project's context on the workflow runner's image.
2979
+ Can be one of:
2980
+
2981
+ * Remote URL which is loaded dynamically to the workflow runner.
2982
+ * A path to the project's context on the workflow runner's image.
2802
2983
  Path can be absolute or relative to `project.spec.build.source_code_target_dir` if defined
2803
2984
  (enriched when building a project image with source, see `MlrunProject.build_image`).
2804
2985
  For other engines the source is used to validate that the code is up-to-date.
2805
2986
  :param cleanup_ttl:
2806
2987
  Pipeline cleanup ttl in secs (time to wait after workflow completion, at which point the
2807
- Workflow and all its resources are deleted)
2988
+ workflow and all its resources are deleted)
2808
2989
  :param notifications:
2809
2990
  List of notifications to send for workflow completion
2810
- :returns: Run id
2991
+
2992
+ :returns: ~py:class:`~mlrun.projects.pipelines._PipelineRunStatus` instance
2811
2993
  """
2812
2994
 
2813
2995
  arguments = arguments or {}
@@ -2824,12 +3006,14 @@ class MlrunProject(ModelObj):
2824
3006
  "Remote repo is not defined, use .create_remote() + push()"
2825
3007
  )
2826
3008
 
2827
- self.sync_functions(always=sync)
2828
- if not self.spec._function_objects:
2829
- raise ValueError(
2830
- "There are no functions in the project."
2831
- " Make sure you've set your functions with project.set_function()."
2832
- )
3009
+ if engine not in ["remote"]:
3010
+ # for remote runs we don't require the functions to be synced as they can be loaded dynamically during run
3011
+ self.sync_functions(always=sync)
3012
+ if not self.spec._function_objects:
3013
+ raise ValueError(
3014
+ "There are no functions in the project."
3015
+ " Make sure you've set your functions with project.set_function()."
3016
+ )
2833
3017
 
2834
3018
  if not name and not workflow_path and not workflow_handler:
2835
3019
  raise ValueError("Workflow name, path, or handler must be specified")
@@ -2863,8 +3047,12 @@ class MlrunProject(ModelObj):
2863
3047
  engine = "remote"
2864
3048
  # The default engine is kfp if not given:
2865
3049
  workflow_engine = get_workflow_engine(engine or workflow_spec.engine, local)
2866
- if not inner_engine and engine == "remote":
2867
- inner_engine = get_workflow_engine(workflow_spec.engine, local).engine
3050
+ if not inner_engine and workflow_engine.engine == "remote":
3051
+ # if inner engine is set to remote, assume kfp as the default inner engine with remote as the runner
3052
+ engine_kind = (
3053
+ workflow_spec.engine if workflow_spec.engine != "remote" else "kfp"
3054
+ )
3055
+ inner_engine = get_workflow_engine(engine_kind, local).engine
2868
3056
  workflow_spec.engine = inner_engine or workflow_engine.engine
2869
3057
 
2870
3058
  run = workflow_engine.run(
@@ -2879,7 +3067,7 @@ class MlrunProject(ModelObj):
2879
3067
  notifications=notifications,
2880
3068
  )
2881
3069
  # run is None when scheduling
2882
- if run and run.state == mlrun.run.RunStatuses.failed:
3070
+ if run and run.state == mlrun_pipelines.common.models.RunStatuses.failed:
2883
3071
  return run
2884
3072
  if not workflow_spec.schedule:
2885
3073
  # Failure and schedule messages already logged
@@ -2888,14 +3076,17 @@ class MlrunProject(ModelObj):
2888
3076
  )
2889
3077
  workflow_spec.clear_tmp()
2890
3078
  if (timeout or watch) and not workflow_spec.schedule:
3079
+ run_status_kwargs = {}
2891
3080
  status_engine = run._engine
2892
3081
  # run's engine gets replaced with inner engine if engine is remote,
2893
3082
  # so in that case we need to get the status from the remote engine manually
2894
- # TODO: support watch for remote:local
2895
- if engine == "remote" and status_engine.engine != "local":
3083
+ if workflow_engine.engine == "remote":
2896
3084
  status_engine = _RemoteRunner
3085
+ run_status_kwargs["inner_engine"] = run._engine
2897
3086
 
2898
- status_engine.get_run_status(project=self, run=run, timeout=timeout)
3087
+ status_engine.get_run_status(
3088
+ project=self, run=run, timeout=timeout, **run_status_kwargs
3089
+ )
2899
3090
  return run
2900
3091
 
2901
3092
  def save_workflow(self, name, target, artifact_path=None, ttl=None):
@@ -2995,17 +3186,18 @@ class MlrunProject(ModelObj):
2995
3186
 
2996
3187
  def set_model_monitoring_credentials(
2997
3188
  self,
2998
- access_key: str = None,
2999
- endpoint_store_connection: str = None,
3000
- stream_path: str = None,
3189
+ access_key: Optional[str] = None,
3190
+ endpoint_store_connection: Optional[str] = None,
3191
+ stream_path: Optional[str] = None,
3192
+ tsdb_connection: Optional[str] = None,
3001
3193
  ):
3002
3194
  """Set the credentials that will be used by the project's model monitoring
3003
3195
  infrastructure functions.
3004
3196
 
3005
- :param access_key: Model Monitoring access key for managing user permissions
3006
3197
  :param access_key: Model Monitoring access key for managing user permissions
3007
3198
  :param endpoint_store_connection: Endpoint store connection string
3008
3199
  :param stream_path: Path to the model monitoring stream
3200
+ :param tsdb_connection: Connection string to the time series database
3009
3201
  """
3010
3202
 
3011
3203
  secrets_dict = {}
@@ -3028,6 +3220,16 @@ class MlrunProject(ModelObj):
3028
3220
  mlrun.common.schemas.model_monitoring.ProjectSecretKeys.STREAM_PATH
3029
3221
  ] = stream_path
3030
3222
 
3223
+ if tsdb_connection:
3224
+ if not tsdb_connection.startswith("taosws://"):
3225
+ raise mlrun.errors.MLRunInvalidArgumentError(
3226
+ "Currently only TDEngine websocket connection is supported for non-v3io TSDB,"
3227
+ "please provide a full URL (e.g. taosws://user:password@host:port)"
3228
+ )
3229
+ secrets_dict[
3230
+ mlrun.common.schemas.model_monitoring.ProjectSecretKeys.TSDB_CONNECTION
3231
+ ] = tsdb_connection
3232
+
3031
3233
  self.set_secrets(
3032
3234
  secrets=secrets_dict,
3033
3235
  provider=mlrun.common.schemas.SecretProviderName.kubernetes,
@@ -3056,7 +3258,7 @@ class MlrunProject(ModelObj):
3056
3258
  notifications: list[mlrun.model.Notification] = None,
3057
3259
  returns: Optional[list[Union[str, dict[str, str]]]] = None,
3058
3260
  builder_env: Optional[dict] = None,
3059
- ) -> typing.Union[mlrun.model.RunObject, kfp.dsl.ContainerOp]:
3261
+ ) -> typing.Union[mlrun.model.RunObject, PipelineNodeWrapper]:
3060
3262
  """Run a local or remote task as part of a local/kubeflow pipeline
3061
3263
 
3062
3264
  example (use with project)::
@@ -3112,7 +3314,7 @@ class MlrunProject(ModelObj):
3112
3314
  artifact type can be given there. The artifact key must appear in the dictionary as
3113
3315
  "key": "the_key".
3114
3316
  :param builder_env: env vars dict for source archive config/credentials e.g. builder_env={"GIT_TOKEN": token}
3115
- :return: MLRun RunObject or KubeFlow containerOp
3317
+ :return: MLRun RunObject or PipelineNodeWrapper
3116
3318
  """
3117
3319
  return run_function(
3118
3320
  function,
@@ -3155,7 +3357,7 @@ class MlrunProject(ModelObj):
3155
3357
  requirements_file: str = None,
3156
3358
  extra_args: str = None,
3157
3359
  force_build: bool = False,
3158
- ) -> typing.Union[BuildStatus, kfp.dsl.ContainerOp]:
3360
+ ) -> typing.Union[BuildStatus, PipelineNodeWrapper]:
3159
3361
  """deploy ML function, build container with its dependencies
3160
3362
 
3161
3363
  :param function: name of the function (in the project) or function object
@@ -3266,7 +3468,6 @@ class MlrunProject(ModelObj):
3266
3468
  image: str = None,
3267
3469
  set_as_default: bool = True,
3268
3470
  with_mlrun: bool = None,
3269
- skip_deployed: bool = False,
3270
3471
  base_image: str = None,
3271
3472
  commands: list = None,
3272
3473
  secret_name: str = None,
@@ -3277,7 +3478,7 @@ class MlrunProject(ModelObj):
3277
3478
  requirements_file: str = None,
3278
3479
  extra_args: str = None,
3279
3480
  target_dir: str = None,
3280
- ) -> typing.Union[BuildStatus, kfp.dsl.ContainerOp]:
3481
+ ) -> typing.Union[BuildStatus, PipelineNodeWrapper]:
3281
3482
  """Builder docker image for the project, based on the project's build config. Parameters allow to override
3282
3483
  the build config.
3283
3484
  If the project has a source configured and pull_at_runtime is not configured, this source will be cloned to the
@@ -3287,7 +3488,6 @@ class MlrunProject(ModelObj):
3287
3488
  used. If not set, the `mlconf.default_project_image_name` value will be used
3288
3489
  :param set_as_default: set `image` to be the project's default image (default False)
3289
3490
  :param with_mlrun: add the current mlrun package to the container build
3290
- :param skip_deployed: *Deprecated* parameter is ignored
3291
3491
  :param base_image: base image name/path (commands and source code will be added to it) defaults to
3292
3492
  mlrun.mlconf.default_base_image
3293
3493
  :param commands: list of docker build (RUN) commands e.g. ['pip install pandas']
@@ -3312,14 +3512,6 @@ class MlrunProject(ModelObj):
3312
3512
  base_image=base_image,
3313
3513
  )
3314
3514
 
3315
- if skip_deployed:
3316
- warnings.warn(
3317
- "The 'skip_deployed' parameter is deprecated and will be removed in 1.7.0. "
3318
- "This parameter is ignored.",
3319
- # TODO: remove in 1.7.0
3320
- FutureWarning,
3321
- )
3322
-
3323
3515
  if not overwrite_build_params:
3324
3516
  # TODO: change overwrite_build_params default to True in 1.8.0
3325
3517
  warnings.warn(
@@ -3397,7 +3589,7 @@ class MlrunProject(ModelObj):
3397
3589
  verbose: bool = None,
3398
3590
  builder_env: dict = None,
3399
3591
  mock: bool = None,
3400
- ) -> typing.Union[DeployStatus, kfp.dsl.ContainerOp]:
3592
+ ) -> typing.Union[DeployStatus, PipelineNodeWrapper]:
3401
3593
  """deploy real-time (nuclio based) functions
3402
3594
 
3403
3595
  :param function: name of the function (in the project) or function object
@@ -3582,9 +3774,7 @@ class MlrunProject(ModelObj):
3582
3774
  :returns: List of function objects.
3583
3775
  """
3584
3776
 
3585
- model_monitoring_labels_list = [
3586
- f"{mm_constants.ModelMonitoringAppLabel.KEY}={mm_constants.ModelMonitoringAppLabel.VAL}"
3587
- ]
3777
+ model_monitoring_labels_list = [str(mm_constants.ModelMonitoringAppLabel())]
3588
3778
  if labels:
3589
3779
  model_monitoring_labels_list += labels
3590
3780
  return self.list_functions(
@@ -3598,7 +3788,10 @@ class MlrunProject(ModelObj):
3598
3788
  name: Optional[str] = None,
3599
3789
  uid: Optional[Union[str, list[str]]] = None,
3600
3790
  labels: Optional[Union[str, list[str]]] = None,
3601
- state: Optional[str] = None,
3791
+ state: Optional[
3792
+ mlrun.common.runtimes.constants.RunStates
3793
+ ] = None, # Backward compatibility
3794
+ states: typing.Optional[list[mlrun.common.runtimes.constants.RunStates]] = None,
3602
3795
  sort: bool = True,
3603
3796
  last: int = 0,
3604
3797
  iter: bool = False,
@@ -3632,10 +3825,11 @@ class MlrunProject(ModelObj):
3632
3825
  :param labels: A list of labels to filter by. Label filters work by either filtering a specific value
3633
3826
  of a label (i.e. list("key=value")) or by looking for the existence of a given
3634
3827
  key (i.e. "key").
3635
- :param state: List only runs whose state is specified.
3828
+ :param state: Deprecated - List only runs whose state is specified.
3829
+ :param states: List only runs whose state is one of the provided states.
3636
3830
  :param sort: Whether to sort the result according to their start time. Otherwise, results will be
3637
3831
  returned by their internal order in the DB (order will not be guaranteed).
3638
- :param last: Deprecated - currently not used (will be removed in 1.8.0).
3832
+ :param last: Deprecated - currently not used (will be removed in 1.9.0).
3639
3833
  :param iter: If ``True`` return runs from all iterations. Otherwise, return only runs whose ``iter`` is 0.
3640
3834
  :param start_time_from: Filter by run start time in ``[start_time_from, start_time_to]``.
3641
3835
  :param start_time_to: Filter by run start time in ``[start_time_from, start_time_to]``.
@@ -3643,13 +3837,22 @@ class MlrunProject(ModelObj):
3643
3837
  last_update_time_to)``.
3644
3838
  :param last_update_time_to: Filter by run last update time in ``(last_update_time_from, last_update_time_to)``.
3645
3839
  """
3840
+ if state:
3841
+ # TODO: Remove this in 1.9.0
3842
+ warnings.warn(
3843
+ "'state' is deprecated and will be removed in 1.9.0. Use 'states' instead.",
3844
+ FutureWarning,
3845
+ )
3846
+
3646
3847
  db = mlrun.db.get_run_db(secrets=self._secrets)
3647
3848
  return db.list_runs(
3648
3849
  name,
3649
3850
  uid,
3650
3851
  self.metadata.name,
3651
3852
  labels=labels,
3652
- state=state,
3853
+ states=mlrun.utils.helpers.as_list(state)
3854
+ if state is not None
3855
+ else states or None,
3653
3856
  sort=sort,
3654
3857
  last=last,
3655
3858
  iter=iter,
@@ -3744,15 +3947,15 @@ class MlrunProject(ModelObj):
3744
3947
  on MLRun and Nuclio sides, such as the 'host' attribute.
3745
3948
  Nuclio docs here: https://docs.nuclio.io/en/latest/reference/api-gateway/http.html
3746
3949
 
3747
- :param api_gateway: An instance of :py:class:`~mlrun.runtimes.nuclio.APIGateway` representing the configuration
3748
- of the API Gateway to be created or updated.
3749
- :param wait_for_readiness: (Optional) A boolean indicating whether to wait for the API Gateway to become ready
3750
- after creation or update (default is True)
3751
- :param max_wait_time: (Optional) Maximum time to wait for API Gateway readiness in seconds (default is 90s)
3950
+ :param api_gateway: An instance of :py:class:`~mlrun.runtimes.nuclio.APIGateway` representing the
3951
+ configuration of the API Gateway to be created or updated.
3952
+ :param wait_for_readiness: (Optional) A boolean indicating whether to wait for the API Gateway to become
3953
+ ready after creation or update (default is True).
3954
+ :param max_wait_time: (Optional) Maximum time to wait for API Gateway readiness in seconds (default is 90s)
3752
3955
 
3753
3956
 
3754
- @return: An instance of :py:class:`~mlrun.runtimes.nuclio.APIGateway` with all fields populated based on the
3755
- information retrieved from the Nuclio API
3957
+ :returns: An instance of :py:class:`~mlrun.runtimes.nuclio.APIGateway` with all fields populated based on the
3958
+ information retrieved from the Nuclio API
3756
3959
  """
3757
3960
 
3758
3961
  api_gateway_json = mlrun.db.get_run_db().store_api_gateway(
@@ -3774,8 +3977,8 @@ class MlrunProject(ModelObj):
3774
3977
  """
3775
3978
  Retrieves a list of Nuclio API gateways associated with the project.
3776
3979
 
3777
- @return: List of :py:class:`~mlrun.runtimes.nuclio.api_gateway.APIGateway` objects representing
3778
- the Nuclio API gateways associated with the project.
3980
+ :returns: List of :py:class:`~mlrun.runtimes.nuclio.api_gateway.APIGateway` objects representing
3981
+ the Nuclio API gateways associated with the project.
3779
3982
  """
3780
3983
  gateways_list = mlrun.db.get_run_db().list_api_gateways(self.name)
3781
3984
  return [
@@ -3811,9 +4014,12 @@ class MlrunProject(ModelObj):
3811
4014
 
3812
4015
  mlrun.db.get_run_db().delete_api_gateway(name=name, project=self.name)
3813
4016
 
3814
- def store_alert_config(self, alert_data: AlertConfig, alert_name=None):
4017
+ def store_alert_config(
4018
+ self, alert_data: AlertConfig, alert_name=None
4019
+ ) -> AlertConfig:
3815
4020
  """
3816
4021
  Create/modify an alert.
4022
+
3817
4023
  :param alert_data: The data of the alert.
3818
4024
  :param alert_name: The name of the alert.
3819
4025
  :return: the created/modified alert.
@@ -3821,20 +4027,22 @@ class MlrunProject(ModelObj):
3821
4027
  db = mlrun.db.get_run_db(secrets=self._secrets)
3822
4028
  if alert_name is None:
3823
4029
  alert_name = alert_data.name
3824
- return db.store_alert_config(alert_name, alert_data.dict(), self.metadata.name)
4030
+ return db.store_alert_config(alert_name, alert_data, project=self.metadata.name)
3825
4031
 
3826
4032
  def get_alert_config(self, alert_name: str) -> AlertConfig:
3827
4033
  """
3828
4034
  Retrieve an alert.
4035
+
3829
4036
  :param alert_name: The name of the alert to retrieve.
3830
4037
  :return: The alert object.
3831
4038
  """
3832
4039
  db = mlrun.db.get_run_db(secrets=self._secrets)
3833
4040
  return db.get_alert_config(alert_name, self.metadata.name)
3834
4041
 
3835
- def list_alerts_configs(self):
4042
+ def list_alerts_configs(self) -> list[AlertConfig]:
3836
4043
  """
3837
4044
  Retrieve list of alerts of a project.
4045
+
3838
4046
  :return: All the alerts objects of the project.
3839
4047
  """
3840
4048
  db = mlrun.db.get_run_db(secrets=self._secrets)
@@ -3845,6 +4053,7 @@ class MlrunProject(ModelObj):
3845
4053
  ):
3846
4054
  """
3847
4055
  Delete an alert.
4056
+
3848
4057
  :param alert_data: The data of the alert.
3849
4058
  :param alert_name: The name of the alert to delete.
3850
4059
  """
@@ -3864,6 +4073,7 @@ class MlrunProject(ModelObj):
3864
4073
  ):
3865
4074
  """
3866
4075
  Reset an alert.
4076
+
3867
4077
  :param alert_data: The data of the alert.
3868
4078
  :param alert_name: The name of the alert to reset.
3869
4079
  """
@@ -3878,12 +4088,31 @@ class MlrunProject(ModelObj):
3878
4088
  alert_name = alert_data.name
3879
4089
  db.reset_alert_config(alert_name, self.metadata.name)
3880
4090
 
4091
+ def get_alert_template(self, template_name: str) -> AlertTemplate:
4092
+ """
4093
+ Retrieve a specific alert template.
4094
+
4095
+ :param template_name: The name of the template to retrieve.
4096
+ :return: The template object.
4097
+ """
4098
+ db = mlrun.db.get_run_db(secrets=self._secrets)
4099
+ return db.get_alert_template(template_name)
4100
+
4101
+ def list_alert_templates(self) -> list[AlertTemplate]:
4102
+ """
4103
+ Retrieve list of all alert templates.
4104
+
4105
+ :return: All the alert template objects in the database.
4106
+ """
4107
+ db = mlrun.db.get_run_db(secrets=self._secrets)
4108
+ return db.list_alert_templates()
4109
+
3881
4110
  def _run_authenticated_git_action(
3882
4111
  self,
3883
4112
  action: Callable,
3884
4113
  remote: str,
3885
- args: list = [],
3886
- kwargs: dict = {},
4114
+ args: list = None,
4115
+ kwargs: dict = None,
3887
4116
  secrets: Union[SecretsStore, dict] = None,
3888
4117
  ):
3889
4118
  """Run an arbitrary Git routine while the remote is enriched with secrets
@@ -3903,6 +4132,8 @@ class MlrunProject(ModelObj):
3903
4132
  try:
3904
4133
  if is_remote_enriched:
3905
4134
  self.spec.repo.remotes[remote].set_url(enriched_remote, clean_remote)
4135
+ args = args or []
4136
+ kwargs = kwargs or {}
3906
4137
  action(*args, **kwargs)
3907
4138
  except RuntimeError as e:
3908
4139
  raise mlrun.errors.MLRunRuntimeError(
@@ -3978,12 +4209,21 @@ class MlrunProject(ModelObj):
3978
4209
  else:
3979
4210
  producer_dict = artifact.spec.producer
3980
4211
 
4212
+ producer_tag = producer_dict.get("tag", None)
4213
+ producer_project = producer_dict.get("project", None)
4214
+ if not producer_tag or not producer_project:
4215
+ # try resolving the producer tag from the uri
4216
+ producer_uri = artifact.spec.producer.get("uri", "")
4217
+ producer_project, producer_tag, _ = ArtifactProducer.parse_uri(
4218
+ producer_uri
4219
+ )
4220
+
3981
4221
  if producer_dict.get("kind", "") == "run":
3982
4222
  return ArtifactProducer(
3983
4223
  name=producer_dict.get("name", ""),
3984
4224
  kind=producer_dict.get("kind", ""),
3985
- project=producer_dict.get("project", ""),
3986
- tag=producer_dict.get("tag", ""),
4225
+ project=producer_project,
4226
+ tag=producer_tag,
3987
4227
  ), True
3988
4228
 
3989
4229
  # do not retain the artifact's producer, replace it with the project as the producer