mlrun 1.5.0rc1__py3-none-any.whl → 1.5.0rc2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mlrun might be problematic. Click here for more details.
- mlrun/__init__.py +2 -35
- mlrun/__main__.py +1 -40
- mlrun/api/api/api.py +6 -0
- mlrun/api/api/endpoints/feature_store.py +0 -4
- mlrun/api/api/endpoints/files.py +14 -2
- mlrun/api/api/endpoints/functions.py +6 -1
- mlrun/api/api/endpoints/logs.py +17 -3
- mlrun/api/api/endpoints/pipelines.py +1 -5
- mlrun/api/api/endpoints/projects.py +88 -0
- mlrun/api/api/endpoints/runs.py +48 -6
- mlrun/api/api/endpoints/workflows.py +355 -0
- mlrun/api/api/utils.py +1 -1
- mlrun/api/crud/__init__.py +1 -0
- mlrun/api/crud/client_spec.py +3 -0
- mlrun/api/crud/model_monitoring/deployment.py +36 -7
- mlrun/api/crud/model_monitoring/grafana.py +1 -1
- mlrun/api/crud/model_monitoring/helpers.py +32 -2
- mlrun/api/crud/model_monitoring/model_endpoints.py +27 -5
- mlrun/api/crud/notifications.py +9 -4
- mlrun/api/crud/pipelines.py +4 -9
- mlrun/api/crud/runtime_resources.py +4 -3
- mlrun/api/crud/secrets.py +21 -0
- mlrun/api/crud/workflows.py +352 -0
- mlrun/api/db/base.py +16 -1
- mlrun/api/db/sqldb/db.py +97 -16
- mlrun/api/launcher.py +26 -7
- mlrun/api/main.py +3 -4
- mlrun/{mlutils → api/rundb}/__init__.py +2 -6
- mlrun/{db → api/rundb}/sqldb.py +35 -83
- mlrun/api/runtime_handlers/__init__.py +56 -0
- mlrun/api/runtime_handlers/base.py +1247 -0
- mlrun/api/runtime_handlers/daskjob.py +209 -0
- mlrun/api/runtime_handlers/kubejob.py +37 -0
- mlrun/api/runtime_handlers/mpijob.py +147 -0
- mlrun/api/runtime_handlers/remotesparkjob.py +29 -0
- mlrun/api/runtime_handlers/sparkjob.py +148 -0
- mlrun/api/utils/builder.py +1 -4
- mlrun/api/utils/clients/chief.py +14 -0
- mlrun/api/utils/scheduler.py +98 -15
- mlrun/api/utils/singletons/db.py +4 -0
- mlrun/artifacts/manager.py +1 -2
- mlrun/common/schemas/__init__.py +6 -0
- mlrun/common/schemas/auth.py +4 -1
- mlrun/common/schemas/client_spec.py +1 -1
- mlrun/common/schemas/model_monitoring/__init__.py +1 -0
- mlrun/common/schemas/model_monitoring/constants.py +11 -0
- mlrun/common/schemas/project.py +1 -0
- mlrun/common/schemas/runs.py +1 -8
- mlrun/common/schemas/schedule.py +1 -8
- mlrun/common/schemas/workflow.py +54 -0
- mlrun/config.py +42 -40
- mlrun/datastore/sources.py +1 -1
- mlrun/db/__init__.py +4 -68
- mlrun/db/base.py +12 -0
- mlrun/db/factory.py +65 -0
- mlrun/db/httpdb.py +175 -19
- mlrun/db/nopdb.py +4 -2
- mlrun/execution.py +4 -2
- mlrun/feature_store/__init__.py +1 -0
- mlrun/feature_store/api.py +1 -2
- mlrun/feature_store/feature_set.py +0 -10
- mlrun/feature_store/feature_vector.py +340 -2
- mlrun/feature_store/ingestion.py +5 -10
- mlrun/feature_store/retrieval/base.py +118 -104
- mlrun/feature_store/retrieval/dask_merger.py +17 -10
- mlrun/feature_store/retrieval/job.py +4 -1
- mlrun/feature_store/retrieval/local_merger.py +18 -18
- mlrun/feature_store/retrieval/spark_merger.py +21 -14
- mlrun/feature_store/retrieval/storey_merger.py +21 -15
- mlrun/kfpops.py +3 -9
- mlrun/launcher/base.py +3 -3
- mlrun/launcher/client.py +3 -2
- mlrun/launcher/factory.py +16 -13
- mlrun/lists.py +0 -11
- mlrun/model.py +9 -15
- mlrun/model_monitoring/helpers.py +15 -25
- mlrun/model_monitoring/model_monitoring_batch.py +72 -4
- mlrun/model_monitoring/prometheus.py +219 -0
- mlrun/model_monitoring/stores/__init__.py +15 -9
- mlrun/model_monitoring/stores/sql_model_endpoint_store.py +3 -1
- mlrun/model_monitoring/stream_processing.py +181 -29
- mlrun/package/packager.py +6 -8
- mlrun/package/packagers/default_packager.py +121 -10
- mlrun/platforms/__init__.py +0 -2
- mlrun/platforms/iguazio.py +0 -56
- mlrun/projects/pipelines.py +57 -158
- mlrun/projects/project.py +6 -32
- mlrun/render.py +1 -1
- mlrun/run.py +2 -124
- mlrun/runtimes/__init__.py +6 -42
- mlrun/runtimes/base.py +26 -1241
- mlrun/runtimes/daskjob.py +2 -198
- mlrun/runtimes/function.py +16 -5
- mlrun/runtimes/kubejob.py +5 -29
- mlrun/runtimes/mpijob/__init__.py +2 -2
- mlrun/runtimes/mpijob/abstract.py +10 -1
- mlrun/runtimes/mpijob/v1.py +0 -76
- mlrun/runtimes/mpijob/v1alpha1.py +1 -74
- mlrun/runtimes/nuclio.py +3 -2
- mlrun/runtimes/pod.py +0 -10
- mlrun/runtimes/remotesparkjob.py +1 -15
- mlrun/runtimes/serving.py +1 -1
- mlrun/runtimes/sparkjob/__init__.py +0 -1
- mlrun/runtimes/sparkjob/abstract.py +4 -131
- mlrun/serving/states.py +1 -1
- mlrun/utils/db.py +0 -2
- mlrun/utils/helpers.py +19 -13
- mlrun/utils/notifications/notification_pusher.py +5 -25
- mlrun/utils/regex.py +7 -2
- mlrun/utils/version/version.json +2 -2
- {mlrun-1.5.0rc1.dist-info → mlrun-1.5.0rc2.dist-info}/METADATA +24 -23
- {mlrun-1.5.0rc1.dist-info → mlrun-1.5.0rc2.dist-info}/RECORD +116 -107
- {mlrun-1.5.0rc1.dist-info → mlrun-1.5.0rc2.dist-info}/WHEEL +1 -1
- mlrun/mlutils/data.py +0 -160
- mlrun/mlutils/models.py +0 -78
- mlrun/mlutils/plots.py +0 -902
- {mlrun-1.5.0rc1.dist-info → mlrun-1.5.0rc2.dist-info}/LICENSE +0 -0
- {mlrun-1.5.0rc1.dist-info → mlrun-1.5.0rc2.dist-info}/entry_points.txt +0 -0
- {mlrun-1.5.0rc1.dist-info → mlrun-1.5.0rc2.dist-info}/top_level.txt +0 -0
mlrun/projects/project.py
CHANGED
|
@@ -375,7 +375,7 @@ def load_project(
|
|
|
375
375
|
except Exception:
|
|
376
376
|
pass
|
|
377
377
|
|
|
378
|
-
to_save = save and mlrun.mlconf.dbpath
|
|
378
|
+
to_save = bool(save and mlrun.mlconf.dbpath)
|
|
379
379
|
if to_save:
|
|
380
380
|
project.save()
|
|
381
381
|
|
|
@@ -1245,6 +1245,7 @@ class MlrunProject(ModelObj):
|
|
|
1245
1245
|
handler=None,
|
|
1246
1246
|
schedule: typing.Union[str, mlrun.common.schemas.ScheduleCronTrigger] = None,
|
|
1247
1247
|
ttl=None,
|
|
1248
|
+
image: str = None,
|
|
1248
1249
|
**args,
|
|
1249
1250
|
):
|
|
1250
1251
|
"""add or update a workflow, specify a name and the code path
|
|
@@ -1260,6 +1261,7 @@ class MlrunProject(ModelObj):
|
|
|
1260
1261
|
see this link for help:
|
|
1261
1262
|
https://apscheduler.readthedocs.io/en/3.x/modules/triggers/cron.html#module-apscheduler.triggers.cron
|
|
1262
1263
|
:param ttl: pipeline ttl in secs (after that the pods will be removed)
|
|
1264
|
+
:param image: image for workflow runner job, only for scheduled and remote workflows
|
|
1263
1265
|
:param args: argument values (key=value, ..)
|
|
1264
1266
|
"""
|
|
1265
1267
|
|
|
@@ -1299,6 +1301,8 @@ class MlrunProject(ModelObj):
|
|
|
1299
1301
|
workflow["schedule"] = schedule
|
|
1300
1302
|
if ttl:
|
|
1301
1303
|
workflow["ttl"] = ttl
|
|
1304
|
+
if image:
|
|
1305
|
+
workflow["image"] = image
|
|
1302
1306
|
self.spec.set_workflow(name, workflow)
|
|
1303
1307
|
|
|
1304
1308
|
def set_artifact(
|
|
@@ -2198,15 +2202,12 @@ class MlrunProject(ModelObj):
|
|
|
2198
2202
|
sync: bool = False,
|
|
2199
2203
|
watch: bool = False,
|
|
2200
2204
|
dirty: bool = False,
|
|
2201
|
-
# TODO: deprecated, remove in 1.5.0
|
|
2202
|
-
ttl: int = None,
|
|
2203
2205
|
engine: str = None,
|
|
2204
2206
|
local: bool = None,
|
|
2205
2207
|
schedule: typing.Union[
|
|
2206
2208
|
str, mlrun.common.schemas.ScheduleCronTrigger, bool
|
|
2207
2209
|
] = None,
|
|
2208
2210
|
timeout: int = None,
|
|
2209
|
-
overwrite: bool = False,
|
|
2210
2211
|
source: str = None,
|
|
2211
2212
|
cleanup_ttl: int = None,
|
|
2212
2213
|
) -> _PipelineRunStatus:
|
|
@@ -2226,8 +2227,6 @@ class MlrunProject(ModelObj):
|
|
|
2226
2227
|
:param sync: force functions sync before run
|
|
2227
2228
|
:param watch: wait for pipeline completion
|
|
2228
2229
|
:param dirty: allow running the workflow when the git repo is dirty
|
|
2229
|
-
:param ttl: pipeline cleanup ttl in secs (time to wait after workflow completion, at which point the
|
|
2230
|
-
workflow and all its resources are deleted) (deprecated, use cleanup_ttl instead)
|
|
2231
2230
|
:param engine: workflow engine running the workflow.
|
|
2232
2231
|
supported values are 'kfp' (default), 'local' or 'remote'.
|
|
2233
2232
|
for setting engine for remote running use 'remote:local' or 'remote:kfp'.
|
|
@@ -2238,8 +2237,6 @@ class MlrunProject(ModelObj):
|
|
|
2238
2237
|
https://apscheduler.readthedocs.io/en/3.x/modules/triggers/cron.html#module-apscheduler.triggers.cron
|
|
2239
2238
|
for using the pre-defined workflow's schedule, set `schedule=True`
|
|
2240
2239
|
:param timeout: timeout in seconds to wait for pipeline completion (watch will be activated)
|
|
2241
|
-
:param overwrite: (deprecated) replacing the schedule of the same workflow (under the same name) if exists
|
|
2242
|
-
with the new one.
|
|
2243
2240
|
:param source: remote source to use instead of the actual `project.spec.source` (used when engine is remote).
|
|
2244
2241
|
for other engines the source is to validate that the code is up-to-date
|
|
2245
2242
|
:param cleanup_ttl:
|
|
@@ -2248,22 +2245,6 @@ class MlrunProject(ModelObj):
|
|
|
2248
2245
|
:returns: run id
|
|
2249
2246
|
"""
|
|
2250
2247
|
|
|
2251
|
-
if ttl:
|
|
2252
|
-
warnings.warn(
|
|
2253
|
-
"'ttl' is deprecated, use 'cleanup_ttl' instead. "
|
|
2254
|
-
"This will be removed in 1.5.0",
|
|
2255
|
-
# TODO: Remove this in 1.5.0
|
|
2256
|
-
FutureWarning,
|
|
2257
|
-
)
|
|
2258
|
-
|
|
2259
|
-
if overwrite:
|
|
2260
|
-
warnings.warn(
|
|
2261
|
-
"'overwrite' is deprecated, running a schedule is now an upsert operation. "
|
|
2262
|
-
"This will be removed in 1.5.0",
|
|
2263
|
-
# TODO: Remove this in 1.5.0
|
|
2264
|
-
FutureWarning,
|
|
2265
|
-
)
|
|
2266
|
-
|
|
2267
2248
|
arguments = arguments or {}
|
|
2268
2249
|
need_repo = self.spec._need_repo()
|
|
2269
2250
|
if self.spec.repo and self.spec.repo.is_dirty():
|
|
@@ -2296,9 +2277,7 @@ class MlrunProject(ModelObj):
|
|
|
2296
2277
|
else:
|
|
2297
2278
|
workflow_spec = self.spec._workflows[name].copy()
|
|
2298
2279
|
workflow_spec.merge_args(arguments)
|
|
2299
|
-
workflow_spec.cleanup_ttl =
|
|
2300
|
-
cleanup_ttl or ttl or workflow_spec.cleanup_ttl or workflow_spec.ttl
|
|
2301
|
-
)
|
|
2280
|
+
workflow_spec.cleanup_ttl = cleanup_ttl or workflow_spec.cleanup_ttl
|
|
2302
2281
|
workflow_spec.run_local = local
|
|
2303
2282
|
|
|
2304
2283
|
name = f"{self.metadata.name}-{name}" if name else self.metadata.name
|
|
@@ -2371,11 +2350,6 @@ class MlrunProject(ModelObj):
|
|
|
2371
2350
|
expected_statuses=None,
|
|
2372
2351
|
notifiers: CustomNotificationPusher = None,
|
|
2373
2352
|
):
|
|
2374
|
-
warnings.warn(
|
|
2375
|
-
"This is deprecated in 1.3.0, and will be removed in 1.5.0. "
|
|
2376
|
-
"Use `timeout` parameter in `project.run()` method instead",
|
|
2377
|
-
FutureWarning,
|
|
2378
|
-
)
|
|
2379
2353
|
return run._engine.get_run_status(
|
|
2380
2354
|
project=self,
|
|
2381
2355
|
run=run,
|
mlrun/render.py
CHANGED
|
@@ -121,7 +121,7 @@ def artifacts_html(
|
|
|
121
121
|
html = ""
|
|
122
122
|
|
|
123
123
|
for artifact in artifacts:
|
|
124
|
-
# TODO: remove this in 1.
|
|
124
|
+
# TODO: remove this in 1.6.0 once we no longer support legacy format
|
|
125
125
|
if mlrun.utils.is_legacy_artifact(artifact):
|
|
126
126
|
attribute_value = artifact.get(attribute_name)
|
|
127
127
|
else:
|
mlrun/run.py
CHANGED
|
@@ -19,7 +19,6 @@ import socket
|
|
|
19
19
|
import tempfile
|
|
20
20
|
import time
|
|
21
21
|
import uuid
|
|
22
|
-
import warnings
|
|
23
22
|
from base64 import b64decode
|
|
24
23
|
from copy import deepcopy
|
|
25
24
|
from os import environ, makedirs, path
|
|
@@ -39,7 +38,6 @@ from mlrun.kfpops import format_summary_from_kfp_run, show_kfp_run
|
|
|
39
38
|
from .common.helpers import parse_versioned_object_uri
|
|
40
39
|
from .config import config as mlconf
|
|
41
40
|
from .datastore import store_manager
|
|
42
|
-
from .db import get_or_set_dburl, get_run_db
|
|
43
41
|
from .errors import MLRunInvalidArgumentError, MLRunTimeoutError
|
|
44
42
|
from .execution import MLClientCtx
|
|
45
43
|
from .model import BaseMetadata, RunObject, RunTemplate
|
|
@@ -64,7 +62,6 @@ from .utils import (
|
|
|
64
62
|
extend_hub_uri_if_needed,
|
|
65
63
|
get_in,
|
|
66
64
|
logger,
|
|
67
|
-
new_pipe_metadata,
|
|
68
65
|
retry_until_successful,
|
|
69
66
|
run_keys,
|
|
70
67
|
update_in,
|
|
@@ -476,7 +473,7 @@ def import_function(url="", secrets=None, db="", project=None, new_name=None):
|
|
|
476
473
|
if url.startswith("db://"):
|
|
477
474
|
url = url[5:]
|
|
478
475
|
_project, name, tag, hash_key = parse_versioned_object_uri(url)
|
|
479
|
-
db = get_run_db(db or get_or_set_dburl(), secrets=secrets)
|
|
476
|
+
db = mlrun.db.get_run_db(db or mlrun.db.get_or_set_dburl(), secrets=secrets)
|
|
480
477
|
runtime = db.get_function(name, _project, tag, hash_key)
|
|
481
478
|
if not runtime:
|
|
482
479
|
raise KeyError(f"function {name}:{tag} not found in the DB")
|
|
@@ -863,7 +860,7 @@ def code_to_function(
|
|
|
863
860
|
spec["spec"]["env"].append(
|
|
864
861
|
{
|
|
865
862
|
"name": "MLRUN_HTTPDB__NUCLIO__EXPLICIT_ACK",
|
|
866
|
-
"value": mlrun.mlconf.
|
|
863
|
+
"value": mlrun.mlconf.is_explicit_ack(),
|
|
867
864
|
}
|
|
868
865
|
)
|
|
869
866
|
spec_kind = get_in(spec, "kind", "")
|
|
@@ -949,125 +946,6 @@ def code_to_function(
|
|
|
949
946
|
return r
|
|
950
947
|
|
|
951
948
|
|
|
952
|
-
@deprecated(
|
|
953
|
-
version="1.3.0",
|
|
954
|
-
reason="'run_pipeline' will be removed in 1.5.0, use 'project.run' instead",
|
|
955
|
-
category=FutureWarning,
|
|
956
|
-
)
|
|
957
|
-
def run_pipeline(
|
|
958
|
-
pipeline,
|
|
959
|
-
arguments=None,
|
|
960
|
-
project=None,
|
|
961
|
-
experiment=None,
|
|
962
|
-
run=None,
|
|
963
|
-
namespace=None,
|
|
964
|
-
artifact_path=None,
|
|
965
|
-
ops=None,
|
|
966
|
-
url=None,
|
|
967
|
-
# TODO: deprecated, remove in 1.5.0
|
|
968
|
-
ttl=None,
|
|
969
|
-
remote: bool = True,
|
|
970
|
-
cleanup_ttl=None,
|
|
971
|
-
):
|
|
972
|
-
"""
|
|
973
|
-
remote KubeFlow pipeline execution
|
|
974
|
-
|
|
975
|
-
Submit a workflow task to KFP via mlrun API service
|
|
976
|
-
|
|
977
|
-
:param pipeline: KFP pipeline function or path to .yaml/.zip pipeline file
|
|
978
|
-
:param arguments: pipeline arguments
|
|
979
|
-
:param project: name of project
|
|
980
|
-
:param experiment: experiment name
|
|
981
|
-
:param run: optional, run name
|
|
982
|
-
:param namespace: Kubernetes namespace (if not using default)
|
|
983
|
-
:param url: optional, url to mlrun API service
|
|
984
|
-
:param artifact_path: target location/url for mlrun artifacts
|
|
985
|
-
:param ops: additional operators (.apply() to all pipeline functions)
|
|
986
|
-
:param ttl: pipeline cleanup ttl in secs (time to wait after workflow completion, at which point the
|
|
987
|
-
workflow and all its resources are deleted) (deprecated, use cleanup_ttl instead)
|
|
988
|
-
:param remote: read kfp data from mlrun service (default=True). Run pipeline from local kfp data (remote=False)
|
|
989
|
-
is deprecated. Should not be used
|
|
990
|
-
:param cleanup_ttl:
|
|
991
|
-
pipeline cleanup ttl in secs (time to wait after workflow completion, at which point the
|
|
992
|
-
workflow and all its resources are deleted)
|
|
993
|
-
|
|
994
|
-
:returns: kubeflow pipeline id
|
|
995
|
-
"""
|
|
996
|
-
if ttl:
|
|
997
|
-
warnings.warn(
|
|
998
|
-
"'ttl' is deprecated, use 'cleanup_ttl' instead. "
|
|
999
|
-
"This will be removed in 1.5.0",
|
|
1000
|
-
# TODO: Remove this in 1.5.0
|
|
1001
|
-
FutureWarning,
|
|
1002
|
-
)
|
|
1003
|
-
|
|
1004
|
-
artifact_path = artifact_path or mlconf.artifact_path
|
|
1005
|
-
project = project or mlconf.default_project
|
|
1006
|
-
artifact_path = mlrun.utils.helpers.fill_artifact_path_template(
|
|
1007
|
-
artifact_path, project or mlconf.default_project
|
|
1008
|
-
)
|
|
1009
|
-
if artifact_path and "{{run.uid}}" in artifact_path:
|
|
1010
|
-
artifact_path.replace("{{run.uid}}", "{{workflow.uid}}")
|
|
1011
|
-
if not artifact_path:
|
|
1012
|
-
raise ValueError("artifact path was not specified")
|
|
1013
|
-
|
|
1014
|
-
namespace = namespace or mlconf.namespace
|
|
1015
|
-
arguments = arguments or {}
|
|
1016
|
-
|
|
1017
|
-
if remote or url:
|
|
1018
|
-
from .projects.pipelines import WorkflowSpec, pipeline_context
|
|
1019
|
-
|
|
1020
|
-
clear_pipeline_context = False
|
|
1021
|
-
# if pipeline_context.workflow isn't set it means the `run_pipeline` method was called directly
|
|
1022
|
-
# so to make sure the pipeline and functions inside are being run in the KFP pipeline we set the pipeline
|
|
1023
|
-
# context with KFP engine
|
|
1024
|
-
if not pipeline_context.workflow:
|
|
1025
|
-
workflow_spec = WorkflowSpec(engine="kfp")
|
|
1026
|
-
pipeline_context.set(pipeline_context.project, workflow=workflow_spec)
|
|
1027
|
-
clear_pipeline_context = True
|
|
1028
|
-
|
|
1029
|
-
pipeline_run_id = _run_pipeline(
|
|
1030
|
-
pipeline=pipeline,
|
|
1031
|
-
arguments=arguments,
|
|
1032
|
-
project=project,
|
|
1033
|
-
experiment=experiment,
|
|
1034
|
-
run=run,
|
|
1035
|
-
namespace=namespace,
|
|
1036
|
-
artifact_path=artifact_path,
|
|
1037
|
-
ops=ops,
|
|
1038
|
-
url=url,
|
|
1039
|
-
cleanup_ttl=cleanup_ttl or ttl,
|
|
1040
|
-
)
|
|
1041
|
-
|
|
1042
|
-
if clear_pipeline_context:
|
|
1043
|
-
pipeline_context.clear()
|
|
1044
|
-
|
|
1045
|
-
# this shouldn't be used, keeping for backwards compatibility until the entire method is deprecated
|
|
1046
|
-
else:
|
|
1047
|
-
client = Client(namespace=namespace)
|
|
1048
|
-
if isinstance(pipeline, str):
|
|
1049
|
-
experiment = client.create_experiment(name=experiment)
|
|
1050
|
-
run_result = client.run_pipeline(
|
|
1051
|
-
experiment.id, run, pipeline, params=arguments
|
|
1052
|
-
)
|
|
1053
|
-
else:
|
|
1054
|
-
conf = new_pipe_metadata(
|
|
1055
|
-
artifact_path=artifact_path, cleanup_ttl=ttl, op_transformers=ops
|
|
1056
|
-
)
|
|
1057
|
-
run_result = client.create_run_from_pipeline_func(
|
|
1058
|
-
pipeline,
|
|
1059
|
-
arguments,
|
|
1060
|
-
run_name=run,
|
|
1061
|
-
experiment_name=experiment,
|
|
1062
|
-
pipeline_conf=conf,
|
|
1063
|
-
)
|
|
1064
|
-
|
|
1065
|
-
pipeline_run_id = run_result.run_id
|
|
1066
|
-
logger.info(f"Pipeline run id={id}, check UI for progress")
|
|
1067
|
-
|
|
1068
|
-
return pipeline_run_id
|
|
1069
|
-
|
|
1070
|
-
|
|
1071
949
|
def _run_pipeline(
|
|
1072
950
|
pipeline,
|
|
1073
951
|
arguments=None,
|
mlrun/runtimes/__init__.py
CHANGED
|
@@ -30,22 +30,17 @@ from mlrun.runtimes.utils import (
|
|
|
30
30
|
resolve_spark_operator_version,
|
|
31
31
|
)
|
|
32
32
|
|
|
33
|
-
from .base import BaseRuntime,
|
|
33
|
+
from .base import BaseRuntime, RunError, RuntimeClassMode # noqa
|
|
34
34
|
from .constants import MPIJobCRDVersions
|
|
35
|
-
from .daskjob import DaskCluster,
|
|
35
|
+
from .daskjob import DaskCluster, get_dask_resource # noqa
|
|
36
36
|
from .function import RemoteRuntime
|
|
37
|
-
from .kubejob import KubejobRuntime
|
|
37
|
+
from .kubejob import KubejobRuntime # noqa
|
|
38
38
|
from .local import HandlerRuntime, LocalRuntime # noqa
|
|
39
|
-
from .mpijob import
|
|
40
|
-
MpiRuntimeV1,
|
|
41
|
-
MpiRuntimeV1Alpha1,
|
|
42
|
-
MpiV1Alpha1RuntimeHandler,
|
|
43
|
-
MpiV1RuntimeHandler,
|
|
44
|
-
)
|
|
39
|
+
from .mpijob import MpiRuntimeV1, MpiRuntimeV1Alpha1 # noqa
|
|
45
40
|
from .nuclio import nuclio_init_hook
|
|
46
|
-
from .remotesparkjob import RemoteSparkRuntime
|
|
41
|
+
from .remotesparkjob import RemoteSparkRuntime
|
|
47
42
|
from .serving import ServingRuntime, new_v2_model_server
|
|
48
|
-
from .sparkjob import Spark3Runtime
|
|
43
|
+
from .sparkjob import Spark3Runtime
|
|
49
44
|
|
|
50
45
|
# for legacy imports (MLModelServer moved from here to /serving)
|
|
51
46
|
from ..serving import MLModelServer, new_v1_model_server # noqa isort: skip
|
|
@@ -216,37 +211,6 @@ class RuntimeKinds(object):
|
|
|
216
211
|
|
|
217
212
|
runtime_resources_map = {RuntimeKinds.dask: get_dask_resource()}
|
|
218
213
|
|
|
219
|
-
runtime_handler_instances_cache = {}
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
def get_runtime_handler(kind: str) -> BaseRuntimeHandler:
|
|
223
|
-
global runtime_handler_instances_cache
|
|
224
|
-
if kind == RuntimeKinds.mpijob:
|
|
225
|
-
mpijob_crd_version = resolve_mpijob_crd_version()
|
|
226
|
-
crd_version_to_runtime_handler_class = {
|
|
227
|
-
MPIJobCRDVersions.v1alpha1: MpiV1Alpha1RuntimeHandler,
|
|
228
|
-
MPIJobCRDVersions.v1: MpiV1RuntimeHandler,
|
|
229
|
-
}
|
|
230
|
-
runtime_handler_class = crd_version_to_runtime_handler_class[mpijob_crd_version]
|
|
231
|
-
if not runtime_handler_instances_cache.setdefault(RuntimeKinds.mpijob, {}).get(
|
|
232
|
-
mpijob_crd_version
|
|
233
|
-
):
|
|
234
|
-
runtime_handler_instances_cache[RuntimeKinds.mpijob][
|
|
235
|
-
mpijob_crd_version
|
|
236
|
-
] = runtime_handler_class()
|
|
237
|
-
return runtime_handler_instances_cache[RuntimeKinds.mpijob][mpijob_crd_version]
|
|
238
|
-
|
|
239
|
-
kind_runtime_handler_map = {
|
|
240
|
-
RuntimeKinds.dask: DaskRuntimeHandler,
|
|
241
|
-
RuntimeKinds.spark: SparkRuntimeHandler,
|
|
242
|
-
RuntimeKinds.remotespark: RemoteSparkRuntimeHandler,
|
|
243
|
-
RuntimeKinds.job: KubeRuntimeHandler,
|
|
244
|
-
}
|
|
245
|
-
runtime_handler_class = kind_runtime_handler_map[kind]
|
|
246
|
-
if not runtime_handler_instances_cache.get(kind):
|
|
247
|
-
runtime_handler_instances_cache[kind] = runtime_handler_class()
|
|
248
|
-
return runtime_handler_instances_cache[kind]
|
|
249
|
-
|
|
250
214
|
|
|
251
215
|
def get_runtime_class(kind: str):
|
|
252
216
|
if kind == RuntimeKinds.mpijob:
|