mlrun 1.4.0rc25__py3-none-any.whl → 1.5.0rc2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mlrun might be problematic. Click here for more details.

Files changed (184) hide show
  1. mlrun/__init__.py +2 -35
  2. mlrun/__main__.py +3 -41
  3. mlrun/api/api/api.py +6 -0
  4. mlrun/api/api/endpoints/feature_store.py +0 -4
  5. mlrun/api/api/endpoints/files.py +14 -2
  6. mlrun/api/api/endpoints/frontend_spec.py +2 -1
  7. mlrun/api/api/endpoints/functions.py +95 -59
  8. mlrun/api/api/endpoints/grafana_proxy.py +9 -9
  9. mlrun/api/api/endpoints/logs.py +17 -3
  10. mlrun/api/api/endpoints/model_endpoints.py +3 -2
  11. mlrun/api/api/endpoints/pipelines.py +1 -5
  12. mlrun/api/api/endpoints/projects.py +88 -0
  13. mlrun/api/api/endpoints/runs.py +48 -6
  14. mlrun/api/api/endpoints/submit.py +2 -1
  15. mlrun/api/api/endpoints/workflows.py +355 -0
  16. mlrun/api/api/utils.py +3 -4
  17. mlrun/api/crud/__init__.py +1 -0
  18. mlrun/api/crud/client_spec.py +6 -2
  19. mlrun/api/crud/feature_store.py +5 -0
  20. mlrun/api/crud/model_monitoring/__init__.py +1 -0
  21. mlrun/api/crud/model_monitoring/deployment.py +497 -0
  22. mlrun/api/crud/model_monitoring/grafana.py +96 -42
  23. mlrun/api/crud/model_monitoring/helpers.py +159 -0
  24. mlrun/api/crud/model_monitoring/model_endpoints.py +202 -476
  25. mlrun/api/crud/notifications.py +9 -4
  26. mlrun/api/crud/pipelines.py +6 -11
  27. mlrun/api/crud/projects.py +2 -2
  28. mlrun/api/crud/runtime_resources.py +4 -3
  29. mlrun/api/crud/runtimes/nuclio/helpers.py +5 -1
  30. mlrun/api/crud/secrets.py +21 -0
  31. mlrun/api/crud/workflows.py +352 -0
  32. mlrun/api/db/base.py +16 -1
  33. mlrun/api/db/init_db.py +2 -4
  34. mlrun/api/db/session.py +1 -1
  35. mlrun/api/db/sqldb/db.py +129 -31
  36. mlrun/api/db/sqldb/models/models_mysql.py +15 -1
  37. mlrun/api/db/sqldb/models/models_sqlite.py +16 -2
  38. mlrun/api/launcher.py +38 -6
  39. mlrun/api/main.py +3 -2
  40. mlrun/api/rundb/__init__.py +13 -0
  41. mlrun/{db → api/rundb}/sqldb.py +36 -84
  42. mlrun/api/runtime_handlers/__init__.py +56 -0
  43. mlrun/api/runtime_handlers/base.py +1247 -0
  44. mlrun/api/runtime_handlers/daskjob.py +209 -0
  45. mlrun/api/runtime_handlers/kubejob.py +37 -0
  46. mlrun/api/runtime_handlers/mpijob.py +147 -0
  47. mlrun/api/runtime_handlers/remotesparkjob.py +29 -0
  48. mlrun/api/runtime_handlers/sparkjob.py +148 -0
  49. mlrun/api/schemas/__init__.py +17 -6
  50. mlrun/api/utils/builder.py +1 -4
  51. mlrun/api/utils/clients/chief.py +14 -0
  52. mlrun/api/utils/clients/iguazio.py +33 -33
  53. mlrun/api/utils/clients/nuclio.py +2 -2
  54. mlrun/api/utils/periodic.py +9 -2
  55. mlrun/api/utils/projects/follower.py +14 -7
  56. mlrun/api/utils/projects/leader.py +2 -1
  57. mlrun/api/utils/projects/remotes/nop_follower.py +2 -2
  58. mlrun/api/utils/projects/remotes/nop_leader.py +2 -2
  59. mlrun/api/utils/runtimes/__init__.py +14 -0
  60. mlrun/api/utils/runtimes/nuclio.py +43 -0
  61. mlrun/api/utils/scheduler.py +98 -15
  62. mlrun/api/utils/singletons/db.py +5 -1
  63. mlrun/api/utils/singletons/project_member.py +4 -1
  64. mlrun/api/utils/singletons/scheduler.py +1 -1
  65. mlrun/artifacts/base.py +6 -6
  66. mlrun/artifacts/dataset.py +4 -4
  67. mlrun/artifacts/manager.py +2 -3
  68. mlrun/artifacts/model.py +2 -2
  69. mlrun/artifacts/plots.py +8 -8
  70. mlrun/common/db/__init__.py +14 -0
  71. mlrun/common/helpers.py +37 -0
  72. mlrun/{mlutils → common/model_monitoring}/__init__.py +3 -2
  73. mlrun/common/model_monitoring/helpers.py +69 -0
  74. mlrun/common/schemas/__init__.py +13 -1
  75. mlrun/common/schemas/auth.py +4 -1
  76. mlrun/common/schemas/client_spec.py +1 -1
  77. mlrun/common/schemas/function.py +17 -0
  78. mlrun/common/schemas/model_monitoring/__init__.py +48 -0
  79. mlrun/common/{model_monitoring.py → schemas/model_monitoring/constants.py} +11 -23
  80. mlrun/common/schemas/model_monitoring/grafana.py +55 -0
  81. mlrun/common/schemas/{model_endpoints.py → model_monitoring/model_endpoints.py} +32 -65
  82. mlrun/common/schemas/notification.py +1 -0
  83. mlrun/common/schemas/object.py +4 -0
  84. mlrun/common/schemas/project.py +1 -0
  85. mlrun/common/schemas/regex.py +1 -1
  86. mlrun/common/schemas/runs.py +1 -8
  87. mlrun/common/schemas/schedule.py +1 -8
  88. mlrun/common/schemas/workflow.py +54 -0
  89. mlrun/config.py +45 -42
  90. mlrun/datastore/__init__.py +21 -0
  91. mlrun/datastore/base.py +1 -1
  92. mlrun/datastore/datastore.py +9 -0
  93. mlrun/datastore/dbfs_store.py +168 -0
  94. mlrun/datastore/helpers.py +18 -0
  95. mlrun/datastore/sources.py +1 -0
  96. mlrun/datastore/store_resources.py +2 -5
  97. mlrun/datastore/v3io.py +1 -2
  98. mlrun/db/__init__.py +4 -68
  99. mlrun/db/base.py +12 -0
  100. mlrun/db/factory.py +65 -0
  101. mlrun/db/httpdb.py +175 -20
  102. mlrun/db/nopdb.py +4 -2
  103. mlrun/execution.py +4 -2
  104. mlrun/feature_store/__init__.py +1 -0
  105. mlrun/feature_store/api.py +1 -2
  106. mlrun/feature_store/common.py +2 -1
  107. mlrun/feature_store/feature_set.py +1 -11
  108. mlrun/feature_store/feature_vector.py +340 -2
  109. mlrun/feature_store/ingestion.py +5 -10
  110. mlrun/feature_store/retrieval/base.py +118 -104
  111. mlrun/feature_store/retrieval/dask_merger.py +17 -10
  112. mlrun/feature_store/retrieval/job.py +4 -1
  113. mlrun/feature_store/retrieval/local_merger.py +18 -18
  114. mlrun/feature_store/retrieval/spark_merger.py +21 -14
  115. mlrun/feature_store/retrieval/storey_merger.py +22 -16
  116. mlrun/kfpops.py +3 -9
  117. mlrun/launcher/base.py +57 -53
  118. mlrun/launcher/client.py +5 -4
  119. mlrun/launcher/factory.py +24 -13
  120. mlrun/launcher/local.py +6 -6
  121. mlrun/launcher/remote.py +4 -4
  122. mlrun/lists.py +0 -11
  123. mlrun/model.py +11 -17
  124. mlrun/model_monitoring/__init__.py +2 -22
  125. mlrun/model_monitoring/features_drift_table.py +1 -1
  126. mlrun/model_monitoring/helpers.py +22 -210
  127. mlrun/model_monitoring/model_endpoint.py +1 -1
  128. mlrun/model_monitoring/model_monitoring_batch.py +127 -50
  129. mlrun/model_monitoring/prometheus.py +219 -0
  130. mlrun/model_monitoring/stores/__init__.py +16 -11
  131. mlrun/model_monitoring/stores/kv_model_endpoint_store.py +95 -23
  132. mlrun/model_monitoring/stores/models/mysql.py +47 -29
  133. mlrun/model_monitoring/stores/models/sqlite.py +47 -29
  134. mlrun/model_monitoring/stores/sql_model_endpoint_store.py +31 -19
  135. mlrun/model_monitoring/{stream_processing_fs.py → stream_processing.py} +206 -64
  136. mlrun/model_monitoring/tracking_policy.py +104 -0
  137. mlrun/package/packager.py +6 -8
  138. mlrun/package/packagers/default_packager.py +121 -10
  139. mlrun/package/packagers/numpy_packagers.py +1 -1
  140. mlrun/platforms/__init__.py +0 -2
  141. mlrun/platforms/iguazio.py +0 -56
  142. mlrun/projects/pipelines.py +53 -159
  143. mlrun/projects/project.py +10 -37
  144. mlrun/render.py +1 -1
  145. mlrun/run.py +8 -124
  146. mlrun/runtimes/__init__.py +6 -42
  147. mlrun/runtimes/base.py +29 -1249
  148. mlrun/runtimes/daskjob.py +2 -198
  149. mlrun/runtimes/funcdoc.py +0 -9
  150. mlrun/runtimes/function.py +25 -29
  151. mlrun/runtimes/kubejob.py +5 -29
  152. mlrun/runtimes/local.py +1 -1
  153. mlrun/runtimes/mpijob/__init__.py +2 -2
  154. mlrun/runtimes/mpijob/abstract.py +10 -1
  155. mlrun/runtimes/mpijob/v1.py +0 -76
  156. mlrun/runtimes/mpijob/v1alpha1.py +1 -74
  157. mlrun/runtimes/nuclio.py +3 -2
  158. mlrun/runtimes/pod.py +28 -18
  159. mlrun/runtimes/remotesparkjob.py +1 -15
  160. mlrun/runtimes/serving.py +14 -6
  161. mlrun/runtimes/sparkjob/__init__.py +0 -1
  162. mlrun/runtimes/sparkjob/abstract.py +4 -131
  163. mlrun/runtimes/utils.py +0 -26
  164. mlrun/serving/routers.py +7 -7
  165. mlrun/serving/server.py +11 -8
  166. mlrun/serving/states.py +7 -1
  167. mlrun/serving/v2_serving.py +6 -6
  168. mlrun/utils/helpers.py +23 -42
  169. mlrun/utils/notifications/notification/__init__.py +4 -0
  170. mlrun/utils/notifications/notification/webhook.py +61 -0
  171. mlrun/utils/notifications/notification_pusher.py +5 -25
  172. mlrun/utils/regex.py +7 -2
  173. mlrun/utils/version/version.json +2 -2
  174. {mlrun-1.4.0rc25.dist-info → mlrun-1.5.0rc2.dist-info}/METADATA +26 -25
  175. {mlrun-1.4.0rc25.dist-info → mlrun-1.5.0rc2.dist-info}/RECORD +180 -158
  176. {mlrun-1.4.0rc25.dist-info → mlrun-1.5.0rc2.dist-info}/WHEEL +1 -1
  177. mlrun/mlutils/data.py +0 -160
  178. mlrun/mlutils/models.py +0 -78
  179. mlrun/mlutils/plots.py +0 -902
  180. mlrun/utils/model_monitoring.py +0 -249
  181. /mlrun/{api/db/sqldb/session.py → common/db/sql_session.py} +0 -0
  182. {mlrun-1.4.0rc25.dist-info → mlrun-1.5.0rc2.dist-info}/LICENSE +0 -0
  183. {mlrun-1.4.0rc25.dist-info → mlrun-1.5.0rc2.dist-info}/entry_points.txt +0 -0
  184. {mlrun-1.4.0rc25.dist-info → mlrun-1.5.0rc2.dist-info}/top_level.txt +0 -0
mlrun/projects/project.py CHANGED
@@ -38,8 +38,7 @@ import requests
38
38
  import yaml
39
39
  from deprecated import deprecated
40
40
 
41
- import mlrun.common.model_monitoring as model_monitoring_constants
42
- import mlrun.common.schemas
41
+ import mlrun.common.schemas.model_monitoring
43
42
  import mlrun.db
44
43
  import mlrun.errors
45
44
  import mlrun.runtimes
@@ -376,7 +375,7 @@ def load_project(
376
375
  except Exception:
377
376
  pass
378
377
 
379
- to_save = save and mlrun.mlconf.dbpath
378
+ to_save = bool(save and mlrun.mlconf.dbpath)
380
379
  if to_save:
381
380
  project.save()
382
381
 
@@ -1246,6 +1245,7 @@ class MlrunProject(ModelObj):
1246
1245
  handler=None,
1247
1246
  schedule: typing.Union[str, mlrun.common.schemas.ScheduleCronTrigger] = None,
1248
1247
  ttl=None,
1248
+ image: str = None,
1249
1249
  **args,
1250
1250
  ):
1251
1251
  """add or update a workflow, specify a name and the code path
@@ -1261,6 +1261,7 @@ class MlrunProject(ModelObj):
1261
1261
  see this link for help:
1262
1262
  https://apscheduler.readthedocs.io/en/3.x/modules/triggers/cron.html#module-apscheduler.triggers.cron
1263
1263
  :param ttl: pipeline ttl in secs (after that the pods will be removed)
1264
+ :param image: image for workflow runner job, only for scheduled and remote workflows
1264
1265
  :param args: argument values (key=value, ..)
1265
1266
  """
1266
1267
 
@@ -1300,6 +1301,8 @@ class MlrunProject(ModelObj):
1300
1301
  workflow["schedule"] = schedule
1301
1302
  if ttl:
1302
1303
  workflow["ttl"] = ttl
1304
+ if image:
1305
+ workflow["image"] = image
1303
1306
  self.spec.set_workflow(name, workflow)
1304
1307
 
1305
1308
  def set_artifact(
@@ -2199,15 +2202,12 @@ class MlrunProject(ModelObj):
2199
2202
  sync: bool = False,
2200
2203
  watch: bool = False,
2201
2204
  dirty: bool = False,
2202
- # TODO: deprecated, remove in 1.5.0
2203
- ttl: int = None,
2204
2205
  engine: str = None,
2205
2206
  local: bool = None,
2206
2207
  schedule: typing.Union[
2207
2208
  str, mlrun.common.schemas.ScheduleCronTrigger, bool
2208
2209
  ] = None,
2209
2210
  timeout: int = None,
2210
- overwrite: bool = False,
2211
2211
  source: str = None,
2212
2212
  cleanup_ttl: int = None,
2213
2213
  ) -> _PipelineRunStatus:
@@ -2227,8 +2227,6 @@ class MlrunProject(ModelObj):
2227
2227
  :param sync: force functions sync before run
2228
2228
  :param watch: wait for pipeline completion
2229
2229
  :param dirty: allow running the workflow when the git repo is dirty
2230
- :param ttl: pipeline cleanup ttl in secs (time to wait after workflow completion, at which point the
2231
- workflow and all its resources are deleted) (deprecated, use cleanup_ttl instead)
2232
2230
  :param engine: workflow engine running the workflow.
2233
2231
  supported values are 'kfp' (default), 'local' or 'remote'.
2234
2232
  for setting engine for remote running use 'remote:local' or 'remote:kfp'.
@@ -2239,8 +2237,6 @@ class MlrunProject(ModelObj):
2239
2237
  https://apscheduler.readthedocs.io/en/3.x/modules/triggers/cron.html#module-apscheduler.triggers.cron
2240
2238
  for using the pre-defined workflow's schedule, set `schedule=True`
2241
2239
  :param timeout: timeout in seconds to wait for pipeline completion (watch will be activated)
2242
- :param overwrite: (deprecated) replacing the schedule of the same workflow (under the same name) if exists
2243
- with the new one.
2244
2240
  :param source: remote source to use instead of the actual `project.spec.source` (used when engine is remote).
2245
2241
  for other engines the source is to validate that the code is up-to-date
2246
2242
  :param cleanup_ttl:
@@ -2249,22 +2245,6 @@ class MlrunProject(ModelObj):
2249
2245
  :returns: run id
2250
2246
  """
2251
2247
 
2252
- if ttl:
2253
- warnings.warn(
2254
- "'ttl' is deprecated, use 'cleanup_ttl' instead. "
2255
- "This will be removed in 1.5.0",
2256
- # TODO: Remove this in 1.5.0
2257
- FutureWarning,
2258
- )
2259
-
2260
- if overwrite:
2261
- warnings.warn(
2262
- "'overwrite' is deprecated, running a schedule is now an upsert operation. "
2263
- "This will be removed in 1.5.0",
2264
- # TODO: Remove this in 1.5.0
2265
- FutureWarning,
2266
- )
2267
-
2268
2248
  arguments = arguments or {}
2269
2249
  need_repo = self.spec._need_repo()
2270
2250
  if self.spec.repo and self.spec.repo.is_dirty():
@@ -2297,9 +2277,7 @@ class MlrunProject(ModelObj):
2297
2277
  else:
2298
2278
  workflow_spec = self.spec._workflows[name].copy()
2299
2279
  workflow_spec.merge_args(arguments)
2300
- workflow_spec.cleanup_ttl = (
2301
- cleanup_ttl or ttl or workflow_spec.cleanup_ttl or workflow_spec.ttl
2302
- )
2280
+ workflow_spec.cleanup_ttl = cleanup_ttl or workflow_spec.cleanup_ttl
2303
2281
  workflow_spec.run_local = local
2304
2282
 
2305
2283
  name = f"{self.metadata.name}-{name}" if name else self.metadata.name
@@ -2372,11 +2350,6 @@ class MlrunProject(ModelObj):
2372
2350
  expected_statuses=None,
2373
2351
  notifiers: CustomNotificationPusher = None,
2374
2352
  ):
2375
- warnings.warn(
2376
- "This is deprecated in 1.3.0, and will be removed in 1.5.0. "
2377
- "Use `timeout` parameter in `project.run()` method instead",
2378
- FutureWarning,
2379
- )
2380
2353
  return run._engine.get_run_status(
2381
2354
  project=self,
2382
2355
  run=run,
@@ -2497,12 +2470,12 @@ class MlrunProject(ModelObj):
2497
2470
  secrets_dict = {}
2498
2471
  if access_key:
2499
2472
  secrets_dict[
2500
- model_monitoring_constants.ProjectSecretKeys.ACCESS_KEY
2473
+ mlrun.common.schemas.model_monitoring.ProjectSecretKeys.ACCESS_KEY
2501
2474
  ] = access_key
2502
2475
 
2503
2476
  if endpoint_store_connection:
2504
2477
  secrets_dict[
2505
- model_monitoring_constants.ProjectSecretKeys.ENDPOINT_STORE_CONNECTION
2478
+ mlrun.common.schemas.model_monitoring.ProjectSecretKeys.ENDPOINT_STORE_CONNECTION
2506
2479
  ] = endpoint_store_connection
2507
2480
 
2508
2481
  if stream_path:
@@ -2511,7 +2484,7 @@ class MlrunProject(ModelObj):
2511
2484
  "Custom kafka topic is not allowed"
2512
2485
  )
2513
2486
  secrets_dict[
2514
- model_monitoring_constants.ProjectSecretKeys.STREAM_PATH
2487
+ mlrun.common.schemas.model_monitoring.ProjectSecretKeys.STREAM_PATH
2515
2488
  ] = stream_path
2516
2489
 
2517
2490
  self.set_secrets(
mlrun/render.py CHANGED
@@ -121,7 +121,7 @@ def artifacts_html(
121
121
  html = ""
122
122
 
123
123
  for artifact in artifacts:
124
- # TODO: remove this in 1.5.0 once we no longer support legacy format
124
+ # TODO: remove this in 1.6.0 once we no longer support legacy format
125
125
  if mlrun.utils.is_legacy_artifact(artifact):
126
126
  attribute_value = artifact.get(attribute_name)
127
127
  else:
mlrun/run.py CHANGED
@@ -19,7 +19,6 @@ import socket
19
19
  import tempfile
20
20
  import time
21
21
  import uuid
22
- import warnings
23
22
  from base64 import b64decode
24
23
  from copy import deepcopy
25
24
  from os import environ, makedirs, path
@@ -36,9 +35,9 @@ import mlrun.errors
36
35
  import mlrun.utils.helpers
37
36
  from mlrun.kfpops import format_summary_from_kfp_run, show_kfp_run
38
37
 
38
+ from .common.helpers import parse_versioned_object_uri
39
39
  from .config import config as mlconf
40
40
  from .datastore import store_manager
41
- from .db import get_or_set_dburl, get_run_db
42
41
  from .errors import MLRunInvalidArgumentError, MLRunTimeoutError
43
42
  from .execution import MLClientCtx
44
43
  from .model import BaseMetadata, RunObject, RunTemplate
@@ -63,8 +62,6 @@ from .utils import (
63
62
  extend_hub_uri_if_needed,
64
63
  get_in,
65
64
  logger,
66
- new_pipe_metadata,
67
- parse_versioned_object_uri,
68
65
  retry_until_successful,
69
66
  run_keys,
70
67
  update_in,
@@ -476,7 +473,7 @@ def import_function(url="", secrets=None, db="", project=None, new_name=None):
476
473
  if url.startswith("db://"):
477
474
  url = url[5:]
478
475
  _project, name, tag, hash_key = parse_versioned_object_uri(url)
479
- db = get_run_db(db or get_or_set_dburl(), secrets=secrets)
476
+ db = mlrun.db.get_run_db(db or mlrun.db.get_or_set_dburl(), secrets=secrets)
480
477
  runtime = db.get_function(name, _project, tag, hash_key)
481
478
  if not runtime:
482
479
  raise KeyError(f"function {name}:{tag} not found in the DB")
@@ -860,6 +857,12 @@ def code_to_function(
860
857
  kind=subkind,
861
858
  ignored_tags=ignored_tags,
862
859
  )
860
+ spec["spec"]["env"].append(
861
+ {
862
+ "name": "MLRUN_HTTPDB__NUCLIO__EXPLICIT_ACK",
863
+ "value": mlrun.mlconf.is_explicit_ack(),
864
+ }
865
+ )
863
866
  spec_kind = get_in(spec, "kind", "")
864
867
  if not kind and spec_kind not in ["", "Function"]:
865
868
  kind = spec_kind.lower()
@@ -943,125 +946,6 @@ def code_to_function(
943
946
  return r
944
947
 
945
948
 
946
- @deprecated(
947
- version="1.3.0",
948
- reason="'run_pipeline' will be removed in 1.5.0, use 'project.run' instead",
949
- category=FutureWarning,
950
- )
951
- def run_pipeline(
952
- pipeline,
953
- arguments=None,
954
- project=None,
955
- experiment=None,
956
- run=None,
957
- namespace=None,
958
- artifact_path=None,
959
- ops=None,
960
- url=None,
961
- # TODO: deprecated, remove in 1.5.0
962
- ttl=None,
963
- remote: bool = True,
964
- cleanup_ttl=None,
965
- ):
966
- """
967
- remote KubeFlow pipeline execution
968
-
969
- Submit a workflow task to KFP via mlrun API service
970
-
971
- :param pipeline: KFP pipeline function or path to .yaml/.zip pipeline file
972
- :param arguments: pipeline arguments
973
- :param project: name of project
974
- :param experiment: experiment name
975
- :param run: optional, run name
976
- :param namespace: Kubernetes namespace (if not using default)
977
- :param url: optional, url to mlrun API service
978
- :param artifact_path: target location/url for mlrun artifacts
979
- :param ops: additional operators (.apply() to all pipeline functions)
980
- :param ttl: pipeline cleanup ttl in secs (time to wait after workflow completion, at which point the
981
- workflow and all its resources are deleted) (deprecated, use cleanup_ttl instead)
982
- :param remote: read kfp data from mlrun service (default=True). Run pipeline from local kfp data (remote=False)
983
- is deprecated. Should not be used
984
- :param cleanup_ttl:
985
- pipeline cleanup ttl in secs (time to wait after workflow completion, at which point the
986
- workflow and all its resources are deleted)
987
-
988
- :returns: kubeflow pipeline id
989
- """
990
- if ttl:
991
- warnings.warn(
992
- "'ttl' is deprecated, use 'cleanup_ttl' instead. "
993
- "This will be removed in 1.5.0",
994
- # TODO: Remove this in 1.5.0
995
- FutureWarning,
996
- )
997
-
998
- artifact_path = artifact_path or mlconf.artifact_path
999
- project = project or mlconf.default_project
1000
- artifact_path = mlrun.utils.helpers.fill_artifact_path_template(
1001
- artifact_path, project or mlconf.default_project
1002
- )
1003
- if artifact_path and "{{run.uid}}" in artifact_path:
1004
- artifact_path.replace("{{run.uid}}", "{{workflow.uid}}")
1005
- if not artifact_path:
1006
- raise ValueError("artifact path was not specified")
1007
-
1008
- namespace = namespace or mlconf.namespace
1009
- arguments = arguments or {}
1010
-
1011
- if remote or url:
1012
- from .projects.pipelines import WorkflowSpec, pipeline_context
1013
-
1014
- clear_pipeline_context = False
1015
- # if pipeline_context.workflow isn't set it means the `run_pipeline` method was called directly
1016
- # so to make sure the pipeline and functions inside are being run in the KFP pipeline we set the pipeline
1017
- # context with KFP engine
1018
- if not pipeline_context.workflow:
1019
- workflow_spec = WorkflowSpec(engine="kfp")
1020
- pipeline_context.set(pipeline_context.project, workflow=workflow_spec)
1021
- clear_pipeline_context = True
1022
-
1023
- pipeline_run_id = _run_pipeline(
1024
- pipeline=pipeline,
1025
- arguments=arguments,
1026
- project=project,
1027
- experiment=experiment,
1028
- run=run,
1029
- namespace=namespace,
1030
- artifact_path=artifact_path,
1031
- ops=ops,
1032
- url=url,
1033
- cleanup_ttl=cleanup_ttl or ttl,
1034
- )
1035
-
1036
- if clear_pipeline_context:
1037
- pipeline_context.clear()
1038
-
1039
- # this shouldn't be used, keeping for backwards compatibility until the entire method is deprecated
1040
- else:
1041
- client = Client(namespace=namespace)
1042
- if isinstance(pipeline, str):
1043
- experiment = client.create_experiment(name=experiment)
1044
- run_result = client.run_pipeline(
1045
- experiment.id, run, pipeline, params=arguments
1046
- )
1047
- else:
1048
- conf = new_pipe_metadata(
1049
- artifact_path=artifact_path, cleanup_ttl=ttl, op_transformers=ops
1050
- )
1051
- run_result = client.create_run_from_pipeline_func(
1052
- pipeline,
1053
- arguments,
1054
- run_name=run,
1055
- experiment_name=experiment,
1056
- pipeline_conf=conf,
1057
- )
1058
-
1059
- pipeline_run_id = run_result.run_id
1060
- logger.info(f"Pipeline run id={id}, check UI for progress")
1061
-
1062
- return pipeline_run_id
1063
-
1064
-
1065
949
  def _run_pipeline(
1066
950
  pipeline,
1067
951
  arguments=None,
@@ -30,22 +30,17 @@ from mlrun.runtimes.utils import (
30
30
  resolve_spark_operator_version,
31
31
  )
32
32
 
33
- from .base import BaseRuntime, BaseRuntimeHandler, RunError, RuntimeClassMode # noqa
33
+ from .base import BaseRuntime, RunError, RuntimeClassMode # noqa
34
34
  from .constants import MPIJobCRDVersions
35
- from .daskjob import DaskCluster, DaskRuntimeHandler, get_dask_resource # noqa
35
+ from .daskjob import DaskCluster, get_dask_resource # noqa
36
36
  from .function import RemoteRuntime
37
- from .kubejob import KubejobRuntime, KubeRuntimeHandler # noqa
37
+ from .kubejob import KubejobRuntime # noqa
38
38
  from .local import HandlerRuntime, LocalRuntime # noqa
39
- from .mpijob import ( # noqa
40
- MpiRuntimeV1,
41
- MpiRuntimeV1Alpha1,
42
- MpiV1Alpha1RuntimeHandler,
43
- MpiV1RuntimeHandler,
44
- )
39
+ from .mpijob import MpiRuntimeV1, MpiRuntimeV1Alpha1 # noqa
45
40
  from .nuclio import nuclio_init_hook
46
- from .remotesparkjob import RemoteSparkRuntime, RemoteSparkRuntimeHandler
41
+ from .remotesparkjob import RemoteSparkRuntime
47
42
  from .serving import ServingRuntime, new_v2_model_server
48
- from .sparkjob import Spark3Runtime, SparkRuntimeHandler
43
+ from .sparkjob import Spark3Runtime
49
44
 
50
45
  # for legacy imports (MLModelServer moved from here to /serving)
51
46
  from ..serving import MLModelServer, new_v1_model_server # noqa isort: skip
@@ -216,37 +211,6 @@ class RuntimeKinds(object):
216
211
 
217
212
  runtime_resources_map = {RuntimeKinds.dask: get_dask_resource()}
218
213
 
219
- runtime_handler_instances_cache = {}
220
-
221
-
222
- def get_runtime_handler(kind: str) -> BaseRuntimeHandler:
223
- global runtime_handler_instances_cache
224
- if kind == RuntimeKinds.mpijob:
225
- mpijob_crd_version = resolve_mpijob_crd_version()
226
- crd_version_to_runtime_handler_class = {
227
- MPIJobCRDVersions.v1alpha1: MpiV1Alpha1RuntimeHandler,
228
- MPIJobCRDVersions.v1: MpiV1RuntimeHandler,
229
- }
230
- runtime_handler_class = crd_version_to_runtime_handler_class[mpijob_crd_version]
231
- if not runtime_handler_instances_cache.setdefault(RuntimeKinds.mpijob, {}).get(
232
- mpijob_crd_version
233
- ):
234
- runtime_handler_instances_cache[RuntimeKinds.mpijob][
235
- mpijob_crd_version
236
- ] = runtime_handler_class()
237
- return runtime_handler_instances_cache[RuntimeKinds.mpijob][mpijob_crd_version]
238
-
239
- kind_runtime_handler_map = {
240
- RuntimeKinds.dask: DaskRuntimeHandler,
241
- RuntimeKinds.spark: SparkRuntimeHandler,
242
- RuntimeKinds.remotespark: RemoteSparkRuntimeHandler,
243
- RuntimeKinds.job: KubeRuntimeHandler,
244
- }
245
- runtime_handler_class = kind_runtime_handler_map[kind]
246
- if not runtime_handler_instances_cache.get(kind):
247
- runtime_handler_instances_cache[kind] = runtime_handler_class()
248
- return runtime_handler_instances_cache[kind]
249
-
250
214
 
251
215
  def get_runtime_class(kind: str):
252
216
  if kind == RuntimeKinds.mpijob: