mlrun 1.6.4rc2__py3-none-any.whl → 1.7.0rc20__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mlrun might be problematic. Click here for more details.
- mlrun/__init__.py +11 -1
- mlrun/__main__.py +26 -112
- mlrun/alerts/__init__.py +15 -0
- mlrun/alerts/alert.py +144 -0
- mlrun/api/schemas/__init__.py +5 -4
- mlrun/artifacts/__init__.py +8 -3
- mlrun/artifacts/base.py +46 -257
- mlrun/artifacts/dataset.py +11 -192
- mlrun/artifacts/manager.py +47 -48
- mlrun/artifacts/model.py +31 -159
- mlrun/artifacts/plots.py +23 -380
- mlrun/common/constants.py +69 -0
- mlrun/common/db/sql_session.py +2 -3
- mlrun/common/formatters/__init__.py +19 -0
- mlrun/common/formatters/artifact.py +21 -0
- mlrun/common/formatters/base.py +78 -0
- mlrun/common/formatters/function.py +41 -0
- mlrun/common/formatters/pipeline.py +53 -0
- mlrun/common/formatters/project.py +51 -0
- mlrun/common/helpers.py +1 -2
- mlrun/common/model_monitoring/helpers.py +9 -5
- mlrun/{runtimes → common/runtimes}/constants.py +37 -9
- mlrun/common/schemas/__init__.py +24 -4
- mlrun/common/schemas/alert.py +203 -0
- mlrun/common/schemas/api_gateway.py +148 -0
- mlrun/common/schemas/artifact.py +18 -8
- mlrun/common/schemas/auth.py +11 -5
- mlrun/common/schemas/background_task.py +1 -1
- mlrun/common/schemas/client_spec.py +4 -1
- mlrun/common/schemas/feature_store.py +16 -16
- mlrun/common/schemas/frontend_spec.py +8 -7
- mlrun/common/schemas/function.py +5 -1
- mlrun/common/schemas/hub.py +11 -18
- mlrun/common/schemas/memory_reports.py +2 -2
- mlrun/common/schemas/model_monitoring/__init__.py +18 -3
- mlrun/common/schemas/model_monitoring/constants.py +83 -26
- mlrun/common/schemas/model_monitoring/grafana.py +13 -9
- mlrun/common/schemas/model_monitoring/model_endpoints.py +99 -16
- mlrun/common/schemas/notification.py +4 -4
- mlrun/common/schemas/object.py +2 -2
- mlrun/{runtimes/mpijob/v1alpha1.py → common/schemas/pagination.py} +10 -13
- mlrun/common/schemas/pipeline.py +1 -10
- mlrun/common/schemas/project.py +24 -23
- mlrun/common/schemas/runtime_resource.py +8 -12
- mlrun/common/schemas/schedule.py +3 -3
- mlrun/common/schemas/tag.py +1 -2
- mlrun/common/schemas/workflow.py +2 -2
- mlrun/common/types.py +7 -1
- mlrun/config.py +54 -17
- mlrun/data_types/to_pandas.py +10 -12
- mlrun/datastore/__init__.py +5 -8
- mlrun/datastore/alibaba_oss.py +130 -0
- mlrun/datastore/azure_blob.py +17 -5
- mlrun/datastore/base.py +62 -39
- mlrun/datastore/datastore.py +28 -9
- mlrun/datastore/datastore_profile.py +146 -20
- mlrun/datastore/filestore.py +0 -1
- mlrun/datastore/google_cloud_storage.py +6 -2
- mlrun/datastore/hdfs.py +56 -0
- mlrun/datastore/inmem.py +2 -2
- mlrun/datastore/redis.py +6 -2
- mlrun/datastore/s3.py +9 -0
- mlrun/datastore/snowflake_utils.py +43 -0
- mlrun/datastore/sources.py +201 -96
- mlrun/datastore/spark_utils.py +1 -2
- mlrun/datastore/store_resources.py +7 -7
- mlrun/datastore/targets.py +358 -104
- mlrun/datastore/utils.py +72 -58
- mlrun/datastore/v3io.py +5 -1
- mlrun/db/base.py +185 -35
- mlrun/db/factory.py +1 -1
- mlrun/db/httpdb.py +614 -179
- mlrun/db/nopdb.py +210 -26
- mlrun/errors.py +12 -1
- mlrun/execution.py +41 -24
- mlrun/feature_store/__init__.py +0 -2
- mlrun/feature_store/api.py +40 -72
- mlrun/feature_store/common.py +1 -1
- mlrun/feature_store/feature_set.py +76 -55
- mlrun/feature_store/feature_vector.py +28 -30
- mlrun/feature_store/ingestion.py +7 -6
- mlrun/feature_store/retrieval/base.py +16 -11
- mlrun/feature_store/retrieval/conversion.py +11 -13
- mlrun/feature_store/retrieval/dask_merger.py +2 -0
- mlrun/feature_store/retrieval/job.py +9 -3
- mlrun/feature_store/retrieval/local_merger.py +2 -0
- mlrun/feature_store/retrieval/spark_merger.py +34 -24
- mlrun/feature_store/steps.py +37 -34
- mlrun/features.py +9 -20
- mlrun/frameworks/_common/artifacts_library.py +9 -9
- mlrun/frameworks/_common/mlrun_interface.py +5 -5
- mlrun/frameworks/_common/model_handler.py +48 -48
- mlrun/frameworks/_common/plan.py +2 -3
- mlrun/frameworks/_common/producer.py +3 -4
- mlrun/frameworks/_common/utils.py +5 -5
- mlrun/frameworks/_dl_common/loggers/logger.py +6 -7
- mlrun/frameworks/_dl_common/loggers/mlrun_logger.py +9 -9
- mlrun/frameworks/_dl_common/loggers/tensorboard_logger.py +23 -47
- mlrun/frameworks/_ml_common/artifacts_library.py +1 -2
- mlrun/frameworks/_ml_common/loggers/logger.py +3 -4
- mlrun/frameworks/_ml_common/loggers/mlrun_logger.py +4 -5
- mlrun/frameworks/_ml_common/model_handler.py +24 -24
- mlrun/frameworks/_ml_common/pkl_model_server.py +2 -2
- mlrun/frameworks/_ml_common/plan.py +1 -1
- mlrun/frameworks/_ml_common/plans/calibration_curve_plan.py +2 -3
- mlrun/frameworks/_ml_common/plans/confusion_matrix_plan.py +2 -3
- mlrun/frameworks/_ml_common/plans/dataset_plan.py +3 -3
- mlrun/frameworks/_ml_common/plans/feature_importance_plan.py +3 -3
- mlrun/frameworks/_ml_common/plans/roc_curve_plan.py +4 -4
- mlrun/frameworks/_ml_common/utils.py +4 -4
- mlrun/frameworks/auto_mlrun/auto_mlrun.py +9 -9
- mlrun/frameworks/huggingface/model_server.py +4 -4
- mlrun/frameworks/lgbm/__init__.py +33 -33
- mlrun/frameworks/lgbm/callbacks/callback.py +2 -4
- mlrun/frameworks/lgbm/callbacks/logging_callback.py +4 -5
- mlrun/frameworks/lgbm/callbacks/mlrun_logging_callback.py +4 -5
- mlrun/frameworks/lgbm/mlrun_interfaces/booster_mlrun_interface.py +1 -3
- mlrun/frameworks/lgbm/mlrun_interfaces/mlrun_interface.py +6 -6
- mlrun/frameworks/lgbm/model_handler.py +10 -10
- mlrun/frameworks/lgbm/model_server.py +6 -6
- mlrun/frameworks/lgbm/utils.py +5 -5
- mlrun/frameworks/onnx/dataset.py +8 -8
- mlrun/frameworks/onnx/mlrun_interface.py +3 -3
- mlrun/frameworks/onnx/model_handler.py +6 -6
- mlrun/frameworks/onnx/model_server.py +7 -7
- mlrun/frameworks/parallel_coordinates.py +4 -3
- mlrun/frameworks/pytorch/__init__.py +18 -18
- mlrun/frameworks/pytorch/callbacks/callback.py +4 -5
- mlrun/frameworks/pytorch/callbacks/logging_callback.py +17 -17
- mlrun/frameworks/pytorch/callbacks/mlrun_logging_callback.py +11 -11
- mlrun/frameworks/pytorch/callbacks/tensorboard_logging_callback.py +23 -29
- mlrun/frameworks/pytorch/callbacks_handler.py +38 -38
- mlrun/frameworks/pytorch/mlrun_interface.py +20 -20
- mlrun/frameworks/pytorch/model_handler.py +17 -17
- mlrun/frameworks/pytorch/model_server.py +7 -7
- mlrun/frameworks/sklearn/__init__.py +13 -13
- mlrun/frameworks/sklearn/estimator.py +4 -4
- mlrun/frameworks/sklearn/metrics_library.py +14 -14
- mlrun/frameworks/sklearn/mlrun_interface.py +3 -6
- mlrun/frameworks/sklearn/model_handler.py +2 -2
- mlrun/frameworks/tf_keras/__init__.py +10 -7
- mlrun/frameworks/tf_keras/callbacks/logging_callback.py +15 -15
- mlrun/frameworks/tf_keras/callbacks/mlrun_logging_callback.py +11 -11
- mlrun/frameworks/tf_keras/callbacks/tensorboard_logging_callback.py +19 -23
- mlrun/frameworks/tf_keras/mlrun_interface.py +9 -11
- mlrun/frameworks/tf_keras/model_handler.py +14 -14
- mlrun/frameworks/tf_keras/model_server.py +6 -6
- mlrun/frameworks/xgboost/__init__.py +13 -13
- mlrun/frameworks/xgboost/model_handler.py +6 -6
- mlrun/k8s_utils.py +14 -16
- mlrun/launcher/__init__.py +1 -1
- mlrun/launcher/base.py +16 -15
- mlrun/launcher/client.py +8 -6
- mlrun/launcher/factory.py +1 -1
- mlrun/launcher/local.py +17 -11
- mlrun/launcher/remote.py +16 -10
- mlrun/lists.py +7 -6
- mlrun/model.py +238 -73
- mlrun/model_monitoring/__init__.py +1 -1
- mlrun/model_monitoring/api.py +138 -315
- mlrun/model_monitoring/application.py +5 -296
- mlrun/model_monitoring/applications/__init__.py +24 -0
- mlrun/model_monitoring/applications/_application_steps.py +157 -0
- mlrun/model_monitoring/applications/base.py +282 -0
- mlrun/model_monitoring/applications/context.py +214 -0
- mlrun/model_monitoring/applications/evidently_base.py +211 -0
- mlrun/model_monitoring/applications/histogram_data_drift.py +349 -0
- mlrun/model_monitoring/applications/results.py +99 -0
- mlrun/model_monitoring/controller.py +104 -84
- mlrun/model_monitoring/controller_handler.py +13 -5
- mlrun/model_monitoring/db/__init__.py +18 -0
- mlrun/model_monitoring/{stores → db/stores}/__init__.py +43 -36
- mlrun/model_monitoring/db/stores/base/__init__.py +15 -0
- mlrun/model_monitoring/{stores/model_endpoint_store.py → db/stores/base/store.py} +64 -40
- mlrun/model_monitoring/db/stores/sqldb/__init__.py +13 -0
- mlrun/model_monitoring/db/stores/sqldb/models/__init__.py +71 -0
- mlrun/model_monitoring/{stores → db/stores/sqldb}/models/base.py +109 -5
- mlrun/model_monitoring/db/stores/sqldb/models/mysql.py +88 -0
- mlrun/model_monitoring/{stores/models/mysql.py → db/stores/sqldb/models/sqlite.py} +19 -13
- mlrun/model_monitoring/db/stores/sqldb/sql_store.py +684 -0
- mlrun/model_monitoring/db/stores/v3io_kv/__init__.py +13 -0
- mlrun/model_monitoring/{stores/kv_model_endpoint_store.py → db/stores/v3io_kv/kv_store.py} +310 -165
- mlrun/model_monitoring/db/tsdb/__init__.py +100 -0
- mlrun/model_monitoring/db/tsdb/base.py +329 -0
- mlrun/model_monitoring/db/tsdb/helpers.py +30 -0
- mlrun/model_monitoring/db/tsdb/tdengine/__init__.py +15 -0
- mlrun/model_monitoring/db/tsdb/tdengine/schemas.py +240 -0
- mlrun/model_monitoring/db/tsdb/tdengine/stream_graph_steps.py +45 -0
- mlrun/model_monitoring/db/tsdb/tdengine/tdengine_connector.py +397 -0
- mlrun/model_monitoring/db/tsdb/v3io/__init__.py +15 -0
- mlrun/model_monitoring/db/tsdb/v3io/stream_graph_steps.py +117 -0
- mlrun/model_monitoring/db/tsdb/v3io/v3io_connector.py +630 -0
- mlrun/model_monitoring/evidently_application.py +6 -118
- mlrun/model_monitoring/features_drift_table.py +134 -106
- mlrun/model_monitoring/helpers.py +127 -28
- mlrun/model_monitoring/metrics/__init__.py +13 -0
- mlrun/model_monitoring/metrics/histogram_distance.py +127 -0
- mlrun/model_monitoring/model_endpoint.py +3 -2
- mlrun/model_monitoring/prometheus.py +1 -4
- mlrun/model_monitoring/stream_processing.py +62 -231
- mlrun/model_monitoring/tracking_policy.py +9 -2
- mlrun/model_monitoring/writer.py +152 -124
- mlrun/package/__init__.py +6 -6
- mlrun/package/context_handler.py +5 -5
- mlrun/package/packager.py +7 -7
- mlrun/package/packagers/default_packager.py +6 -6
- mlrun/package/packagers/numpy_packagers.py +15 -15
- mlrun/package/packagers/pandas_packagers.py +5 -5
- mlrun/package/packagers/python_standard_library_packagers.py +10 -10
- mlrun/package/packagers_manager.py +19 -23
- mlrun/package/utils/_formatter.py +6 -6
- mlrun/package/utils/_pickler.py +2 -2
- mlrun/package/utils/_supported_format.py +4 -4
- mlrun/package/utils/log_hint_utils.py +2 -2
- mlrun/package/utils/type_hint_utils.py +4 -9
- mlrun/platforms/__init__.py +11 -10
- mlrun/platforms/iguazio.py +24 -203
- mlrun/projects/operations.py +35 -21
- mlrun/projects/pipelines.py +68 -99
- mlrun/projects/project.py +830 -266
- mlrun/render.py +3 -11
- mlrun/run.py +162 -166
- mlrun/runtimes/__init__.py +62 -7
- mlrun/runtimes/base.py +39 -32
- mlrun/runtimes/daskjob.py +8 -8
- mlrun/runtimes/databricks_job/databricks_cancel_task.py +1 -1
- mlrun/runtimes/databricks_job/databricks_runtime.py +7 -7
- mlrun/runtimes/databricks_job/databricks_wrapper.py +1 -1
- mlrun/runtimes/funcdoc.py +0 -28
- mlrun/runtimes/function_reference.py +1 -1
- mlrun/runtimes/kubejob.py +28 -122
- mlrun/runtimes/local.py +6 -3
- mlrun/runtimes/mpijob/__init__.py +0 -20
- mlrun/runtimes/mpijob/abstract.py +9 -10
- mlrun/runtimes/mpijob/v1.py +1 -1
- mlrun/{model_monitoring/stores/models/sqlite.py → runtimes/nuclio/__init__.py} +7 -9
- mlrun/runtimes/nuclio/api_gateway.py +709 -0
- mlrun/runtimes/nuclio/application/__init__.py +15 -0
- mlrun/runtimes/nuclio/application/application.py +523 -0
- mlrun/runtimes/nuclio/application/reverse_proxy.go +95 -0
- mlrun/runtimes/{function.py → nuclio/function.py} +112 -73
- mlrun/runtimes/{nuclio.py → nuclio/nuclio.py} +6 -6
- mlrun/runtimes/{serving.py → nuclio/serving.py} +45 -51
- mlrun/runtimes/pod.py +286 -88
- mlrun/runtimes/remotesparkjob.py +2 -2
- mlrun/runtimes/sparkjob/spark3job.py +51 -34
- mlrun/runtimes/utils.py +7 -75
- mlrun/secrets.py +9 -5
- mlrun/serving/remote.py +2 -7
- mlrun/serving/routers.py +13 -10
- mlrun/serving/server.py +22 -26
- mlrun/serving/states.py +99 -25
- mlrun/serving/utils.py +3 -3
- mlrun/serving/v1_serving.py +6 -7
- mlrun/serving/v2_serving.py +59 -20
- mlrun/track/tracker.py +2 -1
- mlrun/track/tracker_manager.py +3 -3
- mlrun/track/trackers/mlflow_tracker.py +1 -2
- mlrun/utils/async_http.py +5 -7
- mlrun/utils/azure_vault.py +1 -1
- mlrun/utils/clones.py +1 -2
- mlrun/utils/condition_evaluator.py +3 -3
- mlrun/utils/db.py +3 -3
- mlrun/utils/helpers.py +183 -197
- mlrun/utils/http.py +2 -5
- mlrun/utils/logger.py +76 -14
- mlrun/utils/notifications/notification/__init__.py +17 -12
- mlrun/utils/notifications/notification/base.py +14 -2
- mlrun/utils/notifications/notification/console.py +2 -0
- mlrun/utils/notifications/notification/git.py +3 -1
- mlrun/utils/notifications/notification/ipython.py +3 -1
- mlrun/utils/notifications/notification/slack.py +101 -21
- mlrun/utils/notifications/notification/webhook.py +11 -1
- mlrun/utils/notifications/notification_pusher.py +155 -30
- mlrun/utils/retryer.py +208 -0
- mlrun/utils/singleton.py +1 -1
- mlrun/utils/v3io_clients.py +2 -4
- mlrun/utils/version/version.json +2 -2
- mlrun/utils/version/version.py +2 -6
- {mlrun-1.6.4rc2.dist-info → mlrun-1.7.0rc20.dist-info}/METADATA +31 -19
- mlrun-1.7.0rc20.dist-info/RECORD +353 -0
- mlrun/kfpops.py +0 -868
- mlrun/model_monitoring/batch.py +0 -1095
- mlrun/model_monitoring/stores/models/__init__.py +0 -27
- mlrun/model_monitoring/stores/sql_model_endpoint_store.py +0 -384
- mlrun/platforms/other.py +0 -306
- mlrun-1.6.4rc2.dist-info/RECORD +0 -314
- {mlrun-1.6.4rc2.dist-info → mlrun-1.7.0rc20.dist-info}/LICENSE +0 -0
- {mlrun-1.6.4rc2.dist-info → mlrun-1.7.0rc20.dist-info}/WHEEL +0 -0
- {mlrun-1.6.4rc2.dist-info → mlrun-1.7.0rc20.dist-info}/entry_points.txt +0 -0
- {mlrun-1.6.4rc2.dist-info → mlrun-1.7.0rc20.dist-info}/top_level.txt +0 -0
mlrun/projects/project.py
CHANGED
|
@@ -11,6 +11,7 @@
|
|
|
11
11
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
12
|
# See the License for the specific language governing permissions and
|
|
13
13
|
# limitations under the License.
|
|
14
|
+
|
|
14
15
|
import datetime
|
|
15
16
|
import getpass
|
|
16
17
|
import glob
|
|
@@ -25,43 +26,46 @@ import uuid
|
|
|
25
26
|
import warnings
|
|
26
27
|
import zipfile
|
|
27
28
|
from os import environ, makedirs, path
|
|
28
|
-
from typing import Callable,
|
|
29
|
+
from typing import Callable, Optional, Union
|
|
29
30
|
|
|
30
31
|
import dotenv
|
|
31
32
|
import git
|
|
32
33
|
import git.exc
|
|
33
|
-
import
|
|
34
|
-
import
|
|
34
|
+
import mlrun_pipelines.common.models
|
|
35
|
+
import mlrun_pipelines.mounts
|
|
36
|
+
import nuclio.utils
|
|
35
37
|
import requests
|
|
36
38
|
import yaml
|
|
39
|
+
from mlrun_pipelines.models import PipelineNodeWrapper
|
|
37
40
|
|
|
38
41
|
import mlrun.common.helpers
|
|
39
|
-
import mlrun.common.
|
|
42
|
+
import mlrun.common.runtimes.constants
|
|
43
|
+
import mlrun.common.schemas.artifact
|
|
40
44
|
import mlrun.common.schemas.model_monitoring.constants as mm_constants
|
|
41
45
|
import mlrun.db
|
|
42
46
|
import mlrun.errors
|
|
43
47
|
import mlrun.k8s_utils
|
|
48
|
+
import mlrun.model_monitoring.applications as mm_app
|
|
44
49
|
import mlrun.runtimes
|
|
50
|
+
import mlrun.runtimes.nuclio.api_gateway
|
|
45
51
|
import mlrun.runtimes.pod
|
|
46
52
|
import mlrun.runtimes.utils
|
|
53
|
+
import mlrun.serving
|
|
47
54
|
import mlrun.utils.regex
|
|
55
|
+
from mlrun.alerts.alert import AlertConfig
|
|
56
|
+
from mlrun.common.schemas.alert import AlertTemplate
|
|
48
57
|
from mlrun.datastore.datastore_profile import DatastoreProfile, DatastoreProfile2Json
|
|
58
|
+
from mlrun.runtimes.nuclio.function import RemoteRuntime
|
|
49
59
|
|
|
50
60
|
from ..artifacts import Artifact, ArtifactProducer, DatasetArtifact, ModelArtifact
|
|
51
61
|
from ..artifacts.manager import ArtifactManager, dict_to_artifact, extend_artifact_path
|
|
52
62
|
from ..datastore import store_manager
|
|
53
63
|
from ..features import Feature
|
|
54
64
|
from ..model import EntrypointParam, ImageBuilder, ModelObj
|
|
55
|
-
from ..model_monitoring.application import (
|
|
56
|
-
ModelMonitoringApplicationBase,
|
|
57
|
-
PushToMonitoringWriter,
|
|
58
|
-
)
|
|
59
65
|
from ..run import code_to_function, get_object, import_function, new_function
|
|
60
|
-
from ..runtimes.function import RemoteRuntime
|
|
61
66
|
from ..secrets import SecretsStore
|
|
62
67
|
from ..utils import (
|
|
63
68
|
is_ipython,
|
|
64
|
-
is_legacy_artifact,
|
|
65
69
|
is_relative_path,
|
|
66
70
|
is_yaml_path,
|
|
67
71
|
logger,
|
|
@@ -74,7 +78,10 @@ from ..utils.clones import (
|
|
|
74
78
|
clone_zip,
|
|
75
79
|
get_repo_url,
|
|
76
80
|
)
|
|
77
|
-
from ..utils.helpers import
|
|
81
|
+
from ..utils.helpers import (
|
|
82
|
+
ensure_git_branch,
|
|
83
|
+
resolve_git_reference_from_source,
|
|
84
|
+
)
|
|
78
85
|
from ..utils.notifications import CustomNotificationPusher, NotificationTypes
|
|
79
86
|
from .operations import (
|
|
80
87
|
BuildStatus,
|
|
@@ -128,6 +135,7 @@ def new_project(
|
|
|
128
135
|
save: bool = True,
|
|
129
136
|
overwrite: bool = False,
|
|
130
137
|
parameters: dict = None,
|
|
138
|
+
default_function_node_selector: dict = None,
|
|
131
139
|
) -> "MlrunProject":
|
|
132
140
|
"""Create a new MLRun project, optionally load it from a yaml/zip/git template
|
|
133
141
|
|
|
@@ -138,11 +146,15 @@ def new_project(
|
|
|
138
146
|
example::
|
|
139
147
|
|
|
140
148
|
# create a project with local and hub functions, a workflow, and an artifact
|
|
141
|
-
project = mlrun.new_project(
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
project.
|
|
145
|
-
|
|
149
|
+
project = mlrun.new_project(
|
|
150
|
+
"myproj", "./", init_git=True, description="my new project"
|
|
151
|
+
)
|
|
152
|
+
project.set_function(
|
|
153
|
+
"prep_data.py", "prep-data", image="mlrun/mlrun", handler="prep_data"
|
|
154
|
+
)
|
|
155
|
+
project.set_function("hub://auto-trainer", "train")
|
|
156
|
+
project.set_artifact("data", Artifact(target_path=data_url))
|
|
157
|
+
project.set_workflow("main", "./myflow.py")
|
|
146
158
|
project.save()
|
|
147
159
|
|
|
148
160
|
# run the "main" workflow (watch=True to wait for run completion)
|
|
@@ -152,19 +164,25 @@ def new_project(
|
|
|
152
164
|
|
|
153
165
|
# create a new project from a zip template (can also use yaml/git templates)
|
|
154
166
|
# initialize a local git, and register the git remote path
|
|
155
|
-
project = mlrun.new_project(
|
|
156
|
-
|
|
157
|
-
|
|
167
|
+
project = mlrun.new_project(
|
|
168
|
+
"myproj",
|
|
169
|
+
"./",
|
|
170
|
+
init_git=True,
|
|
171
|
+
remote="git://github.com/mlrun/project-demo.git",
|
|
172
|
+
from_template="http://mysite/proj.zip",
|
|
173
|
+
)
|
|
158
174
|
project.run("main", watch=True)
|
|
159
175
|
|
|
160
176
|
|
|
161
177
|
example using project_setup.py to init the project objects::
|
|
162
178
|
|
|
163
179
|
def setup(project):
|
|
164
|
-
project.set_function(
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
project.
|
|
180
|
+
project.set_function(
|
|
181
|
+
"prep_data.py", "prep-data", image="mlrun/mlrun", handler="prep_data"
|
|
182
|
+
)
|
|
183
|
+
project.set_function("hub://auto-trainer", "train")
|
|
184
|
+
project.set_artifact("data", Artifact(target_path=data_url))
|
|
185
|
+
project.set_workflow("main", "./myflow.py")
|
|
168
186
|
return project
|
|
169
187
|
|
|
170
188
|
|
|
@@ -181,6 +199,7 @@ def new_project(
|
|
|
181
199
|
:param overwrite: overwrite project using 'cascade' deletion strategy (deletes project resources)
|
|
182
200
|
if project with name exists
|
|
183
201
|
:param parameters: key/value pairs to add to the project.spec.params
|
|
202
|
+
:param default_function_node_selector: defines the default node selector for scheduling functions within the project
|
|
184
203
|
|
|
185
204
|
:returns: project object
|
|
186
205
|
"""
|
|
@@ -193,14 +212,16 @@ def new_project(
|
|
|
193
212
|
"Unsupported option, cannot use subpath argument with project templates"
|
|
194
213
|
)
|
|
195
214
|
if from_template.endswith(".yaml"):
|
|
196
|
-
project = _load_project_file(
|
|
215
|
+
project = _load_project_file(
|
|
216
|
+
from_template, name, secrets, allow_cross_project=True
|
|
217
|
+
)
|
|
197
218
|
elif from_template.startswith("git://"):
|
|
198
219
|
clone_git(from_template, context, secrets, clone=True)
|
|
199
220
|
shutil.rmtree(path.join(context, ".git"))
|
|
200
|
-
project = _load_project_dir(context, name)
|
|
221
|
+
project = _load_project_dir(context, name, allow_cross_project=True)
|
|
201
222
|
elif from_template.endswith(".zip"):
|
|
202
223
|
clone_zip(from_template, context, secrets)
|
|
203
|
-
project = _load_project_dir(context, name)
|
|
224
|
+
project = _load_project_dir(context, name, allow_cross_project=True)
|
|
204
225
|
else:
|
|
205
226
|
raise ValueError("template must be a path to .yaml or .zip file")
|
|
206
227
|
project.metadata.name = name
|
|
@@ -227,6 +248,11 @@ def new_project(
|
|
|
227
248
|
project.spec.origin_url = url
|
|
228
249
|
if description:
|
|
229
250
|
project.spec.description = description
|
|
251
|
+
|
|
252
|
+
if default_function_node_selector:
|
|
253
|
+
for key, val in default_function_node_selector.items():
|
|
254
|
+
project.spec.default_function_node_selector[key] = val
|
|
255
|
+
|
|
230
256
|
if parameters:
|
|
231
257
|
# Enable setting project parameters at load time, can be used to customize the project_setup
|
|
232
258
|
for key, val in parameters.items():
|
|
@@ -277,6 +303,7 @@ def load_project(
|
|
|
277
303
|
save: bool = True,
|
|
278
304
|
sync_functions: bool = False,
|
|
279
305
|
parameters: dict = None,
|
|
306
|
+
allow_cross_project: bool = None,
|
|
280
307
|
) -> "MlrunProject":
|
|
281
308
|
"""Load an MLRun project from git or tar or dir
|
|
282
309
|
|
|
@@ -290,7 +317,7 @@ def load_project(
|
|
|
290
317
|
# When using git as the url source the context directory must be an empty or
|
|
291
318
|
# non-existent folder as the git repo will be cloned there
|
|
292
319
|
project = load_project("./demo_proj", "git://github.com/mlrun/project-demo.git")
|
|
293
|
-
project.run("main", arguments={
|
|
320
|
+
project.run("main", arguments={"data": data_url})
|
|
294
321
|
|
|
295
322
|
|
|
296
323
|
project_setup.py example::
|
|
@@ -323,6 +350,8 @@ def load_project(
|
|
|
323
350
|
:param save: whether to save the created project and artifact in the DB
|
|
324
351
|
:param sync_functions: sync the project's functions into the project object (will be saved to the DB if save=True)
|
|
325
352
|
:param parameters: key/value pairs to add to the project.spec.params
|
|
353
|
+
:param allow_cross_project: if True, override the loaded project name. This flag ensures awareness of
|
|
354
|
+
loading an existing project yaml as a baseline for a new project with a different name
|
|
326
355
|
|
|
327
356
|
:returns: project object
|
|
328
357
|
"""
|
|
@@ -338,7 +367,7 @@ def load_project(
|
|
|
338
367
|
if url:
|
|
339
368
|
url = str(url) # to support path objects
|
|
340
369
|
if is_yaml_path(url):
|
|
341
|
-
project = _load_project_file(url, name, secrets)
|
|
370
|
+
project = _load_project_file(url, name, secrets, allow_cross_project)
|
|
342
371
|
project.spec.context = context
|
|
343
372
|
elif url.startswith("git://"):
|
|
344
373
|
url, repo = clone_git(url, context, secrets, clone)
|
|
@@ -365,7 +394,7 @@ def load_project(
|
|
|
365
394
|
repo, url = init_repo(context, url, init_git)
|
|
366
395
|
|
|
367
396
|
if not project:
|
|
368
|
-
project = _load_project_dir(context, name, subpath)
|
|
397
|
+
project = _load_project_dir(context, name, subpath, allow_cross_project)
|
|
369
398
|
|
|
370
399
|
if not project.metadata.name:
|
|
371
400
|
raise ValueError("Project name must be specified")
|
|
@@ -419,6 +448,7 @@ def get_or_create_project(
|
|
|
419
448
|
from_template: str = None,
|
|
420
449
|
save: bool = True,
|
|
421
450
|
parameters: dict = None,
|
|
451
|
+
allow_cross_project: bool = None,
|
|
422
452
|
) -> "MlrunProject":
|
|
423
453
|
"""Load a project from MLRun DB, or create/import if it does not exist
|
|
424
454
|
|
|
@@ -429,9 +459,11 @@ def get_or_create_project(
|
|
|
429
459
|
Usage example::
|
|
430
460
|
|
|
431
461
|
# load project from the DB (if exist) or the source repo
|
|
432
|
-
project = get_or_create_project(
|
|
462
|
+
project = get_or_create_project(
|
|
463
|
+
"myproj", "./", "git://github.com/mlrun/demo-xgb-project.git"
|
|
464
|
+
)
|
|
433
465
|
project.pull("development") # pull the latest code from git
|
|
434
|
-
project.run("main", arguments={
|
|
466
|
+
project.run("main", arguments={"data": data_url}) # run the workflow "main"
|
|
435
467
|
|
|
436
468
|
|
|
437
469
|
project_setup.py example::
|
|
@@ -461,12 +493,12 @@ def get_or_create_project(
|
|
|
461
493
|
:param from_template: path to project YAML file that will be used as from_template (for new projects)
|
|
462
494
|
:param save: whether to save the created project in the DB
|
|
463
495
|
:param parameters: key/value pairs to add to the project.spec.params
|
|
496
|
+
:param allow_cross_project: if True, override the loaded project name. This flag ensures awareness of
|
|
497
|
+
loading an existing project yaml as a baseline for a new project with a different name
|
|
464
498
|
|
|
465
499
|
:returns: project object
|
|
466
500
|
"""
|
|
467
501
|
context = context or "./"
|
|
468
|
-
spec_path = path.join(context, subpath or "", "project.yaml")
|
|
469
|
-
load_from_path = url or path.isfile(spec_path)
|
|
470
502
|
try:
|
|
471
503
|
# load project from the DB.
|
|
472
504
|
# use `name` as `url` as we load the project from the DB
|
|
@@ -482,13 +514,15 @@ def get_or_create_project(
|
|
|
482
514
|
# only loading project from db so no need to save it
|
|
483
515
|
save=False,
|
|
484
516
|
parameters=parameters,
|
|
517
|
+
allow_cross_project=allow_cross_project,
|
|
485
518
|
)
|
|
486
519
|
logger.info("Project loaded successfully", project_name=name)
|
|
487
520
|
return project
|
|
488
|
-
|
|
489
521
|
except mlrun.errors.MLRunNotFoundError:
|
|
490
522
|
logger.debug("Project not found in db", project_name=name)
|
|
491
523
|
|
|
524
|
+
spec_path = path.join(context, subpath or "", "project.yaml")
|
|
525
|
+
load_from_path = url or path.isfile(spec_path)
|
|
492
526
|
# do not nest under "try" or else the exceptions raised below will be logged along with the "not found" message
|
|
493
527
|
if load_from_path:
|
|
494
528
|
# loads a project from archive or local project.yaml
|
|
@@ -504,6 +538,7 @@ def get_or_create_project(
|
|
|
504
538
|
user_project=user_project,
|
|
505
539
|
save=save,
|
|
506
540
|
parameters=parameters,
|
|
541
|
+
allow_cross_project=allow_cross_project,
|
|
507
542
|
)
|
|
508
543
|
|
|
509
544
|
logger.info(
|
|
@@ -578,26 +613,38 @@ def _run_project_setup(
|
|
|
578
613
|
return project
|
|
579
614
|
|
|
580
615
|
|
|
581
|
-
def _load_project_dir(context, name="", subpath=""):
|
|
616
|
+
def _load_project_dir(context, name="", subpath="", allow_cross_project=None):
|
|
582
617
|
subpath_str = subpath or ""
|
|
583
|
-
|
|
618
|
+
|
|
619
|
+
# support both .yaml and .yml file extensions
|
|
620
|
+
project_file_path = path.join(context, subpath_str, "project.y*ml")
|
|
621
|
+
function_file_path = path.join(context, subpath_str, "function.y*ml")
|
|
584
622
|
setup_file_path = path.join(context, subpath_str, "project_setup.py")
|
|
585
|
-
|
|
586
|
-
|
|
623
|
+
|
|
624
|
+
if project_files := glob.glob(project_file_path):
|
|
625
|
+
# if there are multiple project files, use the first one
|
|
626
|
+
project_file_path = project_files[0]
|
|
627
|
+
with open(project_file_path) as fp:
|
|
587
628
|
data = fp.read()
|
|
588
629
|
struct = yaml.load(data, Loader=yaml.FullLoader)
|
|
589
|
-
project = _project_instance_from_struct(struct, name)
|
|
630
|
+
project = _project_instance_from_struct(struct, name, allow_cross_project)
|
|
590
631
|
project.spec.context = context
|
|
591
|
-
|
|
592
|
-
|
|
593
|
-
func = import_function(
|
|
632
|
+
elif function_files := glob.glob(function_file_path):
|
|
633
|
+
function_path = function_files[0]
|
|
634
|
+
func = import_function(function_path)
|
|
635
|
+
function_file_name = path.basename(path.normpath(function_path))
|
|
594
636
|
project = MlrunProject.from_dict(
|
|
595
637
|
{
|
|
596
638
|
"metadata": {
|
|
597
639
|
"name": func.metadata.project,
|
|
598
640
|
},
|
|
599
641
|
"spec": {
|
|
600
|
-
"functions": [
|
|
642
|
+
"functions": [
|
|
643
|
+
{
|
|
644
|
+
"url": function_file_name,
|
|
645
|
+
"name": func.metadata.name,
|
|
646
|
+
},
|
|
647
|
+
],
|
|
601
648
|
},
|
|
602
649
|
}
|
|
603
650
|
)
|
|
@@ -653,19 +700,41 @@ def _delete_project_from_db(project_name, secrets, deletion_strategy):
|
|
|
653
700
|
return db.delete_project(project_name, deletion_strategy=deletion_strategy)
|
|
654
701
|
|
|
655
702
|
|
|
656
|
-
def _load_project_file(url, name="", secrets=None):
|
|
703
|
+
def _load_project_file(url, name="", secrets=None, allow_cross_project=None):
|
|
657
704
|
try:
|
|
658
705
|
obj = get_object(url, secrets)
|
|
659
706
|
except FileNotFoundError as exc:
|
|
660
707
|
raise FileNotFoundError(f"cant find project file at {url}") from exc
|
|
661
708
|
struct = yaml.load(obj, Loader=yaml.FullLoader)
|
|
662
|
-
return _project_instance_from_struct(struct, name)
|
|
709
|
+
return _project_instance_from_struct(struct, name, allow_cross_project)
|
|
663
710
|
|
|
664
711
|
|
|
665
|
-
def _project_instance_from_struct(struct, name):
|
|
666
|
-
struct.
|
|
667
|
-
|
|
668
|
-
|
|
712
|
+
def _project_instance_from_struct(struct, name, allow_cross_project):
|
|
713
|
+
name_from_struct = struct.get("metadata", {}).get("name", "")
|
|
714
|
+
if name and name_from_struct and name_from_struct != name:
|
|
715
|
+
error_message = (
|
|
716
|
+
f"project name mismatch, {name_from_struct} != {name}, please do one of the following:\n"
|
|
717
|
+
"1. Set the `allow_cross_project=True` when loading the project.\n"
|
|
718
|
+
f"2. Delete the existing project yaml, or ensure its name is equal to {name}.\n"
|
|
719
|
+
"3. Use different project context dir."
|
|
720
|
+
)
|
|
721
|
+
|
|
722
|
+
if allow_cross_project is None:
|
|
723
|
+
# TODO: Remove this warning in version 1.9.0 and also fix cli to support allow_cross_project
|
|
724
|
+
logger.warn(
|
|
725
|
+
"Project name is different than specified on its project yaml."
|
|
726
|
+
"You should fix it until version 1.9.0",
|
|
727
|
+
description=error_message,
|
|
728
|
+
)
|
|
729
|
+
elif allow_cross_project:
|
|
730
|
+
logger.warn(
|
|
731
|
+
"Project name is different than specified on its project yaml. Overriding.",
|
|
732
|
+
existing_name=name_from_struct,
|
|
733
|
+
overriding_name=name,
|
|
734
|
+
)
|
|
735
|
+
else:
|
|
736
|
+
raise ValueError(error_message)
|
|
737
|
+
struct.setdefault("metadata", {})["name"] = name or name_from_struct
|
|
669
738
|
return MlrunProject.from_dict(struct)
|
|
670
739
|
|
|
671
740
|
|
|
@@ -740,14 +809,15 @@ class ProjectSpec(ModelObj):
|
|
|
740
809
|
origin_url=None,
|
|
741
810
|
goals=None,
|
|
742
811
|
load_source_on_run=None,
|
|
743
|
-
default_requirements: typing.Union[str,
|
|
812
|
+
default_requirements: typing.Union[str, list[str]] = None,
|
|
744
813
|
desired_state=mlrun.common.schemas.ProjectState.online.value,
|
|
745
814
|
owner=None,
|
|
746
815
|
disable_auto_mount=None,
|
|
747
816
|
workdir=None,
|
|
748
817
|
default_image=None,
|
|
749
818
|
build=None,
|
|
750
|
-
custom_packagers:
|
|
819
|
+
custom_packagers: list[tuple[str, bool]] = None,
|
|
820
|
+
default_function_node_selector=None,
|
|
751
821
|
):
|
|
752
822
|
self.repo = None
|
|
753
823
|
|
|
@@ -787,6 +857,7 @@ class ProjectSpec(ModelObj):
|
|
|
787
857
|
# in a tuple where the first index is the packager module's path (str) and the second is a flag (bool) for
|
|
788
858
|
# whether it is mandatory for a run (raise exception on collection error) or not.
|
|
789
859
|
self.custom_packagers = custom_packagers or []
|
|
860
|
+
self.default_function_node_selector = default_function_node_selector or {}
|
|
790
861
|
|
|
791
862
|
@property
|
|
792
863
|
def source(self) -> str:
|
|
@@ -864,14 +935,14 @@ class ProjectSpec(ModelObj):
|
|
|
864
935
|
del self._function_definitions[name]
|
|
865
936
|
|
|
866
937
|
@property
|
|
867
|
-
def workflows(self) ->
|
|
938
|
+
def workflows(self) -> list[dict]:
|
|
868
939
|
"""
|
|
869
940
|
:returns: list of workflows specs dicts used in this project
|
|
870
941
|
"""
|
|
871
942
|
return [workflow.to_dict() for workflow in self._workflows.values()]
|
|
872
943
|
|
|
873
944
|
@workflows.setter
|
|
874
|
-
def workflows(self, workflows:
|
|
945
|
+
def workflows(self, workflows: list[typing.Union[dict, WorkflowSpec]]):
|
|
875
946
|
if not workflows:
|
|
876
947
|
workflows = []
|
|
877
948
|
if not isinstance(workflows, list):
|
|
@@ -925,13 +996,9 @@ class ProjectSpec(ModelObj):
|
|
|
925
996
|
if not isinstance(artifact, dict) and not hasattr(artifact, "to_dict"):
|
|
926
997
|
raise ValueError("artifacts must be a dict or class")
|
|
927
998
|
if isinstance(artifact, dict):
|
|
928
|
-
|
|
929
|
-
if is_legacy_artifact(artifact) or _is_imported_artifact(artifact):
|
|
930
|
-
key = artifact.get("key")
|
|
931
|
-
else:
|
|
932
|
-
key = artifact.get("metadata").get("key", "")
|
|
999
|
+
key = artifact.get("metadata", {}).get("key", "")
|
|
933
1000
|
if not key:
|
|
934
|
-
raise ValueError('artifacts "key" must be specified')
|
|
1001
|
+
raise ValueError('artifacts "metadata.key" must be specified')
|
|
935
1002
|
else:
|
|
936
1003
|
key = artifact.key
|
|
937
1004
|
artifact = artifact.to_dict()
|
|
@@ -987,7 +1054,7 @@ class ProjectSpec(ModelObj):
|
|
|
987
1054
|
:raise MLRunInvalidArgumentError: In case the packager was not in the list.
|
|
988
1055
|
"""
|
|
989
1056
|
# Look for the packager tuple in the list to remove it:
|
|
990
|
-
packager_tuple:
|
|
1057
|
+
packager_tuple: tuple[str, bool] = None
|
|
991
1058
|
for custom_packager in self.custom_packagers:
|
|
992
1059
|
if custom_packager[0] == packager:
|
|
993
1060
|
packager_tuple = custom_packager
|
|
@@ -1038,8 +1105,8 @@ class MlrunProject(ModelObj):
|
|
|
1038
1105
|
|
|
1039
1106
|
def __init__(
|
|
1040
1107
|
self,
|
|
1041
|
-
metadata: Optional[Union[ProjectMetadata,
|
|
1042
|
-
spec: Optional[Union[ProjectSpec,
|
|
1108
|
+
metadata: Optional[Union[ProjectMetadata, dict]] = None,
|
|
1109
|
+
spec: Optional[Union[ProjectSpec, dict]] = None,
|
|
1043
1110
|
):
|
|
1044
1111
|
self.metadata: ProjectMetadata = metadata
|
|
1045
1112
|
self.spec: ProjectSpec = spec
|
|
@@ -1208,6 +1275,14 @@ class MlrunProject(ModelObj):
|
|
|
1208
1275
|
def description(self, description):
|
|
1209
1276
|
self.spec.description = description
|
|
1210
1277
|
|
|
1278
|
+
@property
|
|
1279
|
+
def default_function_node_selector(self) -> dict:
|
|
1280
|
+
return self.spec.default_function_node_selector
|
|
1281
|
+
|
|
1282
|
+
@default_function_node_selector.setter
|
|
1283
|
+
def default_function_node_selector(self, default_function_node_selector):
|
|
1284
|
+
self.spec.default_function_node_selector = default_function_node_selector
|
|
1285
|
+
|
|
1211
1286
|
@property
|
|
1212
1287
|
def default_image(self) -> str:
|
|
1213
1288
|
return self.spec.default_image
|
|
@@ -1287,7 +1362,7 @@ class MlrunProject(ModelObj):
|
|
|
1287
1362
|
and not workflow_path.startswith(self.context)
|
|
1288
1363
|
):
|
|
1289
1364
|
workflow_path = path.join(self.context, workflow_path)
|
|
1290
|
-
with open(workflow_path
|
|
1365
|
+
with open(workflow_path) as fp:
|
|
1291
1366
|
txt = fp.read()
|
|
1292
1367
|
workflow = {"name": name, "code": txt}
|
|
1293
1368
|
else:
|
|
@@ -1322,13 +1397,15 @@ class MlrunProject(ModelObj):
|
|
|
1322
1397
|
example::
|
|
1323
1398
|
|
|
1324
1399
|
# register a simple file artifact
|
|
1325
|
-
project.set_artifact(
|
|
1400
|
+
project.set_artifact("data", target_path=data_url)
|
|
1326
1401
|
# register a model artifact
|
|
1327
|
-
project.set_artifact(
|
|
1402
|
+
project.set_artifact(
|
|
1403
|
+
"model", ModelArtifact(model_file="model.pkl"), target_path=model_dir_url
|
|
1404
|
+
)
|
|
1328
1405
|
|
|
1329
1406
|
# register a path to artifact package (will be imported on project load)
|
|
1330
1407
|
# to generate such package use `artifact.export(target_path)`
|
|
1331
|
-
project.set_artifact(
|
|
1408
|
+
project.set_artifact("model", "https://mystuff.com/models/mymodel.zip")
|
|
1332
1409
|
|
|
1333
1410
|
:param key: artifact key/name
|
|
1334
1411
|
:param artifact: mlrun Artifact object/dict (or its subclasses) or path to artifact
|
|
@@ -1363,14 +1440,7 @@ class MlrunProject(ModelObj):
|
|
|
1363
1440
|
artifact_path = mlrun.utils.helpers.template_artifact_path(
|
|
1364
1441
|
self.spec.artifact_path or mlrun.mlconf.artifact_path, self.metadata.name
|
|
1365
1442
|
)
|
|
1366
|
-
|
|
1367
|
-
# we need to maintain the different trees that generated them
|
|
1368
|
-
producer = ArtifactProducer(
|
|
1369
|
-
"project",
|
|
1370
|
-
self.metadata.name,
|
|
1371
|
-
self.metadata.name,
|
|
1372
|
-
tag=self._get_hexsha() or str(uuid.uuid4()),
|
|
1373
|
-
)
|
|
1443
|
+
project_tag = self._get_project_tag()
|
|
1374
1444
|
for artifact_dict in self.spec.artifacts:
|
|
1375
1445
|
if _is_imported_artifact(artifact_dict):
|
|
1376
1446
|
import_from = artifact_dict["import_from"]
|
|
@@ -1390,8 +1460,23 @@ class MlrunProject(ModelObj):
|
|
|
1390
1460
|
artifact.src_path = path.join(
|
|
1391
1461
|
self.spec.get_code_path(), artifact.src_path
|
|
1392
1462
|
)
|
|
1463
|
+
producer, is_retained_producer = self._resolve_artifact_producer(
|
|
1464
|
+
artifact, project_tag
|
|
1465
|
+
)
|
|
1466
|
+
# log the artifact only if it doesn't already exist
|
|
1467
|
+
if (
|
|
1468
|
+
producer.name != self.metadata.name
|
|
1469
|
+
and self._resolve_existing_artifact(
|
|
1470
|
+
artifact,
|
|
1471
|
+
)
|
|
1472
|
+
):
|
|
1473
|
+
continue
|
|
1393
1474
|
artifact_manager.log_artifact(
|
|
1394
|
-
producer,
|
|
1475
|
+
producer,
|
|
1476
|
+
artifact,
|
|
1477
|
+
artifact_path=artifact_path,
|
|
1478
|
+
project=self.metadata.name,
|
|
1479
|
+
is_retained_producer=is_retained_producer,
|
|
1395
1480
|
)
|
|
1396
1481
|
|
|
1397
1482
|
def _get_artifact_manager(self):
|
|
@@ -1414,7 +1499,7 @@ class MlrunProject(ModelObj):
|
|
|
1414
1499
|
self,
|
|
1415
1500
|
url: str,
|
|
1416
1501
|
check_path_in_context: bool = False,
|
|
1417
|
-
) ->
|
|
1502
|
+
) -> tuple[str, bool]:
|
|
1418
1503
|
"""
|
|
1419
1504
|
Get the absolute path of the artifact or function file
|
|
1420
1505
|
:param url: remote url, absolute path or relative path
|
|
@@ -1486,12 +1571,20 @@ class MlrunProject(ModelObj):
|
|
|
1486
1571
|
artifact_path = mlrun.utils.helpers.template_artifact_path(
|
|
1487
1572
|
artifact_path, self.metadata.name
|
|
1488
1573
|
)
|
|
1489
|
-
producer =
|
|
1490
|
-
|
|
1491
|
-
|
|
1492
|
-
self.
|
|
1493
|
-
|
|
1494
|
-
|
|
1574
|
+
producer, is_retained_producer = self._resolve_artifact_producer(item)
|
|
1575
|
+
if producer.name != self.metadata.name:
|
|
1576
|
+
# the artifact producer is retained, log it only if it doesn't already exist
|
|
1577
|
+
if existing_artifact := self._resolve_existing_artifact(
|
|
1578
|
+
item,
|
|
1579
|
+
tag,
|
|
1580
|
+
):
|
|
1581
|
+
artifact_key = item if isinstance(item, str) else item.key
|
|
1582
|
+
logger.info(
|
|
1583
|
+
"Artifact already exists, skipping logging",
|
|
1584
|
+
key=artifact_key,
|
|
1585
|
+
tag=tag,
|
|
1586
|
+
)
|
|
1587
|
+
return existing_artifact
|
|
1495
1588
|
item = am.log_artifact(
|
|
1496
1589
|
producer,
|
|
1497
1590
|
item,
|
|
@@ -1503,10 +1596,29 @@ class MlrunProject(ModelObj):
|
|
|
1503
1596
|
upload=upload,
|
|
1504
1597
|
labels=labels,
|
|
1505
1598
|
target_path=target_path,
|
|
1599
|
+
project=self.metadata.name,
|
|
1600
|
+
is_retained_producer=is_retained_producer,
|
|
1506
1601
|
**kwargs,
|
|
1507
1602
|
)
|
|
1508
1603
|
return item
|
|
1509
1604
|
|
|
1605
|
+
def delete_artifact(
|
|
1606
|
+
self,
|
|
1607
|
+
item: Artifact,
|
|
1608
|
+
deletion_strategy: mlrun.common.schemas.artifact.ArtifactsDeletionStrategies = (
|
|
1609
|
+
mlrun.common.schemas.artifact.ArtifactsDeletionStrategies.metadata_only
|
|
1610
|
+
),
|
|
1611
|
+
secrets: dict = None,
|
|
1612
|
+
):
|
|
1613
|
+
"""Delete an artifact object in the DB and optionally delete the artifact data
|
|
1614
|
+
|
|
1615
|
+
:param item: Artifact object (can be any type, such as dataset, model, feature store).
|
|
1616
|
+
:param deletion_strategy: The artifact deletion strategy types.
|
|
1617
|
+
:param secrets: Credentials needed to access the artifact data.
|
|
1618
|
+
"""
|
|
1619
|
+
am = self._get_artifact_manager()
|
|
1620
|
+
am.delete_artifact(item, deletion_strategy, secrets)
|
|
1621
|
+
|
|
1510
1622
|
def log_dataset(
|
|
1511
1623
|
self,
|
|
1512
1624
|
key,
|
|
@@ -1537,7 +1649,9 @@ class MlrunProject(ModelObj):
|
|
|
1537
1649
|
"age": [42, 52, 36, 24, 73],
|
|
1538
1650
|
"testScore": [25, 94, 57, 62, 70],
|
|
1539
1651
|
}
|
|
1540
|
-
df = pd.DataFrame(
|
|
1652
|
+
df = pd.DataFrame(
|
|
1653
|
+
raw_data, columns=["first_name", "last_name", "age", "testScore"]
|
|
1654
|
+
)
|
|
1541
1655
|
project.log_dataset("mydf", df=df, stats=True)
|
|
1542
1656
|
|
|
1543
1657
|
:param key: artifact key
|
|
@@ -1596,8 +1710,8 @@ class MlrunProject(ModelObj):
|
|
|
1596
1710
|
artifact_path=None,
|
|
1597
1711
|
upload=None,
|
|
1598
1712
|
labels=None,
|
|
1599
|
-
inputs:
|
|
1600
|
-
outputs:
|
|
1713
|
+
inputs: list[Feature] = None,
|
|
1714
|
+
outputs: list[Feature] = None,
|
|
1601
1715
|
feature_vector: str = None,
|
|
1602
1716
|
feature_weights: list = None,
|
|
1603
1717
|
training_set=None,
|
|
@@ -1611,13 +1725,16 @@ class MlrunProject(ModelObj):
|
|
|
1611
1725
|
|
|
1612
1726
|
example::
|
|
1613
1727
|
|
|
1614
|
-
project.log_model(
|
|
1615
|
-
|
|
1616
|
-
|
|
1617
|
-
|
|
1618
|
-
|
|
1619
|
-
|
|
1620
|
-
|
|
1728
|
+
project.log_model(
|
|
1729
|
+
"model",
|
|
1730
|
+
body=dumps(model),
|
|
1731
|
+
model_file="model.pkl",
|
|
1732
|
+
metrics=context.results,
|
|
1733
|
+
training_set=training_df,
|
|
1734
|
+
label_column="label",
|
|
1735
|
+
feature_vector=feature_vector_uri,
|
|
1736
|
+
labels={"app": "fraud"},
|
|
1737
|
+
)
|
|
1621
1738
|
|
|
1622
1739
|
:param key: artifact key or artifact class ()
|
|
1623
1740
|
:param body: will use the body as the artifact content
|
|
@@ -1721,20 +1838,22 @@ class MlrunProject(ModelObj):
|
|
|
1721
1838
|
with tempfile.TemporaryDirectory() as temp_dir:
|
|
1722
1839
|
with zipfile.ZipFile(item_file, "r") as zf:
|
|
1723
1840
|
zf.extractall(temp_dir)
|
|
1724
|
-
with open(f"{temp_dir}/_spec.yaml"
|
|
1841
|
+
with open(f"{temp_dir}/_spec.yaml") as fp:
|
|
1725
1842
|
data = fp.read()
|
|
1726
1843
|
spec = yaml.load(data, Loader=yaml.FullLoader)
|
|
1727
1844
|
artifact = get_artifact(spec)
|
|
1728
1845
|
with open(f"{temp_dir}/_body", "rb") as fp:
|
|
1729
1846
|
artifact.spec._body = fp.read()
|
|
1730
|
-
artifact.target_path = ""
|
|
1731
1847
|
|
|
1732
1848
|
# if the dataitem is not a file, it means we downloaded it from a remote source to a temp file,
|
|
1733
1849
|
# so we need to remove it after we're done with it
|
|
1734
1850
|
dataitem.remove_local()
|
|
1735
1851
|
|
|
1736
1852
|
return self.log_artifact(
|
|
1737
|
-
artifact,
|
|
1853
|
+
artifact,
|
|
1854
|
+
local_path=temp_dir,
|
|
1855
|
+
artifact_path=artifact_path,
|
|
1856
|
+
upload=True,
|
|
1738
1857
|
)
|
|
1739
1858
|
|
|
1740
1859
|
else:
|
|
@@ -1752,10 +1871,18 @@ class MlrunProject(ModelObj):
|
|
|
1752
1871
|
"""
|
|
1753
1872
|
context = context or self.spec.context
|
|
1754
1873
|
if context:
|
|
1755
|
-
project = _load_project_dir(
|
|
1874
|
+
project = _load_project_dir(
|
|
1875
|
+
context,
|
|
1876
|
+
self.metadata.name,
|
|
1877
|
+
self.spec.subpath,
|
|
1878
|
+
allow_cross_project=False,
|
|
1879
|
+
)
|
|
1756
1880
|
else:
|
|
1757
1881
|
project = _load_project_file(
|
|
1758
|
-
self.spec.origin_url,
|
|
1882
|
+
self.spec.origin_url,
|
|
1883
|
+
self.metadata.name,
|
|
1884
|
+
self._secrets,
|
|
1885
|
+
allow_cross_project=None,
|
|
1759
1886
|
)
|
|
1760
1887
|
project.spec.source = self.spec.source
|
|
1761
1888
|
project.spec.repo = self.spec.repo
|
|
@@ -1784,22 +1911,29 @@ class MlrunProject(ModelObj):
|
|
|
1784
1911
|
def set_model_monitoring_function(
|
|
1785
1912
|
self,
|
|
1786
1913
|
func: typing.Union[str, mlrun.runtimes.BaseRuntime, None] = None,
|
|
1787
|
-
application_class: typing.Union[
|
|
1914
|
+
application_class: typing.Union[
|
|
1915
|
+
str,
|
|
1916
|
+
mm_app.ModelMonitoringApplicationBase,
|
|
1917
|
+
mm_app.ModelMonitoringApplicationBaseV2,
|
|
1918
|
+
] = None,
|
|
1788
1919
|
name: str = None,
|
|
1789
1920
|
image: str = None,
|
|
1790
1921
|
handler=None,
|
|
1791
1922
|
with_repo: bool = None,
|
|
1792
1923
|
tag: str = None,
|
|
1793
|
-
requirements: typing.Union[str,
|
|
1924
|
+
requirements: typing.Union[str, list[str]] = None,
|
|
1794
1925
|
requirements_file: str = "",
|
|
1795
1926
|
**application_kwargs,
|
|
1796
1927
|
) -> mlrun.runtimes.BaseRuntime:
|
|
1797
1928
|
"""
|
|
1798
1929
|
Update or add a monitoring function to the project.
|
|
1930
|
+
Note: to deploy the function after linking it to the project,
|
|
1931
|
+
call `fn.deploy()` where `fn` is the object returned by this method.
|
|
1799
1932
|
|
|
1800
1933
|
examples::
|
|
1801
|
-
project.set_model_monitoring_function(
|
|
1802
|
-
|
|
1934
|
+
project.set_model_monitoring_function(
|
|
1935
|
+
name="myApp", application_class="MyApp", image="mlrun/mlrun"
|
|
1936
|
+
)
|
|
1803
1937
|
|
|
1804
1938
|
:param func: Function object or spec/code url, None refers to current Notebook
|
|
1805
1939
|
:param name: Name of the function (under the project), can be specified with a tag to support
|
|
@@ -1814,7 +1948,7 @@ class MlrunProject(ModelObj):
|
|
|
1814
1948
|
will be enriched with the tag value. (i.e. 'function-name:tag')
|
|
1815
1949
|
:param requirements: A list of python packages
|
|
1816
1950
|
:param requirements_file: Path to a python requirements file
|
|
1817
|
-
:param application_class: Name or an Instance of a class that
|
|
1951
|
+
:param application_class: Name or an Instance of a class that implements the monitoring application.
|
|
1818
1952
|
:param application_kwargs: Additional keyword arguments to be passed to the
|
|
1819
1953
|
monitoring application's constructor.
|
|
1820
1954
|
"""
|
|
@@ -1836,16 +1970,6 @@ class MlrunProject(ModelObj):
|
|
|
1836
1970
|
requirements_file,
|
|
1837
1971
|
**application_kwargs,
|
|
1838
1972
|
)
|
|
1839
|
-
models_names = "all"
|
|
1840
|
-
function_object.set_label(
|
|
1841
|
-
mm_constants.ModelMonitoringAppLabel.KEY,
|
|
1842
|
-
mm_constants.ModelMonitoringAppLabel.VAL,
|
|
1843
|
-
)
|
|
1844
|
-
function_object.set_label("models", models_names)
|
|
1845
|
-
|
|
1846
|
-
if not mlrun.mlconf.is_ce_mode():
|
|
1847
|
-
function_object.apply(mlrun.mount_v3io())
|
|
1848
|
-
|
|
1849
1973
|
# save to project spec
|
|
1850
1974
|
self.spec.set_function(resolved_function_name, function_object, func)
|
|
1851
1975
|
|
|
@@ -1854,13 +1978,17 @@ class MlrunProject(ModelObj):
|
|
|
1854
1978
|
def create_model_monitoring_function(
|
|
1855
1979
|
self,
|
|
1856
1980
|
func: str = None,
|
|
1857
|
-
application_class: typing.Union[
|
|
1981
|
+
application_class: typing.Union[
|
|
1982
|
+
str,
|
|
1983
|
+
mm_app.ModelMonitoringApplicationBase,
|
|
1984
|
+
mm_app.ModelMonitoringApplicationBaseV2,
|
|
1985
|
+
] = None,
|
|
1858
1986
|
name: str = None,
|
|
1859
1987
|
image: str = None,
|
|
1860
1988
|
handler: str = None,
|
|
1861
1989
|
with_repo: bool = None,
|
|
1862
1990
|
tag: str = None,
|
|
1863
|
-
requirements: typing.Union[str,
|
|
1991
|
+
requirements: typing.Union[str, list[str]] = None,
|
|
1864
1992
|
requirements_file: str = "",
|
|
1865
1993
|
**application_kwargs,
|
|
1866
1994
|
) -> mlrun.runtimes.BaseRuntime:
|
|
@@ -1868,8 +1996,9 @@ class MlrunProject(ModelObj):
|
|
|
1868
1996
|
Create a monitoring function object without setting it to the project
|
|
1869
1997
|
|
|
1870
1998
|
examples::
|
|
1871
|
-
project.create_model_monitoring_function(
|
|
1872
|
-
|
|
1999
|
+
project.create_model_monitoring_function(
|
|
2000
|
+
application_class_name="MyApp", image="mlrun/mlrun", name="myApp"
|
|
2001
|
+
)
|
|
1873
2002
|
|
|
1874
2003
|
:param func: Code url, None refers to current Notebook
|
|
1875
2004
|
:param name: Name of the function, can be specified with a tag to support
|
|
@@ -1904,49 +2033,41 @@ class MlrunProject(ModelObj):
|
|
|
1904
2033
|
|
|
1905
2034
|
def _instantiate_model_monitoring_function(
|
|
1906
2035
|
self,
|
|
1907
|
-
func: typing.Union[str, mlrun.runtimes.BaseRuntime] = None,
|
|
1908
|
-
application_class: typing.Union[
|
|
1909
|
-
|
|
1910
|
-
|
|
1911
|
-
|
|
1912
|
-
|
|
1913
|
-
|
|
1914
|
-
|
|
2036
|
+
func: typing.Union[str, mlrun.runtimes.BaseRuntime, None] = None,
|
|
2037
|
+
application_class: typing.Union[
|
|
2038
|
+
str,
|
|
2039
|
+
mm_app.ModelMonitoringApplicationBase,
|
|
2040
|
+
mm_app.ModelMonitoringApplicationBaseV2,
|
|
2041
|
+
None,
|
|
2042
|
+
] = None,
|
|
2043
|
+
name: typing.Optional[str] = None,
|
|
2044
|
+
image: typing.Optional[str] = None,
|
|
2045
|
+
handler: typing.Optional[str] = None,
|
|
2046
|
+
with_repo: typing.Optional[bool] = None,
|
|
2047
|
+
tag: typing.Optional[str] = None,
|
|
2048
|
+
requirements: typing.Union[str, list[str], None] = None,
|
|
1915
2049
|
requirements_file: str = "",
|
|
1916
2050
|
**application_kwargs,
|
|
1917
|
-
) ->
|
|
2051
|
+
) -> tuple[str, mlrun.runtimes.BaseRuntime, dict]:
|
|
2052
|
+
import mlrun.model_monitoring.api
|
|
2053
|
+
|
|
1918
2054
|
function_object: RemoteRuntime = None
|
|
1919
2055
|
kind = None
|
|
1920
2056
|
if (isinstance(func, str) or func is None) and application_class is not None:
|
|
1921
|
-
kind =
|
|
1922
|
-
|
|
1923
|
-
|
|
1924
|
-
|
|
1925
|
-
|
|
2057
|
+
kind = mlrun.run.RuntimeKinds.serving
|
|
2058
|
+
func = mlrun.model_monitoring.api._create_model_monitoring_function_base(
|
|
2059
|
+
project=self.name,
|
|
2060
|
+
func=func,
|
|
2061
|
+
application_class=application_class,
|
|
1926
2062
|
name=name,
|
|
1927
|
-
project=self.metadata.name,
|
|
1928
|
-
tag=tag,
|
|
1929
|
-
kind=kind,
|
|
1930
2063
|
image=image,
|
|
2064
|
+
tag=tag,
|
|
1931
2065
|
requirements=requirements,
|
|
1932
2066
|
requirements_file=requirements_file,
|
|
2067
|
+
**application_kwargs,
|
|
1933
2068
|
)
|
|
1934
|
-
graph = func.set_topology("flow")
|
|
1935
|
-
if isinstance(application_class, str):
|
|
1936
|
-
first_step = graph.to(
|
|
1937
|
-
class_name=application_class, **application_kwargs
|
|
1938
|
-
)
|
|
1939
|
-
else:
|
|
1940
|
-
first_step = graph.to(class_name=application_class)
|
|
1941
|
-
first_step.to(
|
|
1942
|
-
class_name=PushToMonitoringWriter(
|
|
1943
|
-
project=self.metadata.name,
|
|
1944
|
-
writer_application_name=mm_constants.MonitoringFunctionNames.WRITER,
|
|
1945
|
-
stream_uri=None,
|
|
1946
|
-
),
|
|
1947
|
-
).respond()
|
|
1948
2069
|
elif isinstance(func, str) and isinstance(handler, str):
|
|
1949
|
-
kind =
|
|
2070
|
+
kind = mlrun.run.RuntimeKinds.nuclio
|
|
1950
2071
|
|
|
1951
2072
|
(
|
|
1952
2073
|
resolved_function_name,
|
|
@@ -1964,61 +2085,160 @@ class MlrunProject(ModelObj):
|
|
|
1964
2085
|
requirements,
|
|
1965
2086
|
requirements_file,
|
|
1966
2087
|
)
|
|
1967
|
-
models_names = "all"
|
|
1968
2088
|
function_object.set_label(
|
|
1969
2089
|
mm_constants.ModelMonitoringAppLabel.KEY,
|
|
1970
2090
|
mm_constants.ModelMonitoringAppLabel.VAL,
|
|
1971
2091
|
)
|
|
1972
|
-
function_object.set_label("models", models_names)
|
|
1973
2092
|
|
|
1974
2093
|
if not mlrun.mlconf.is_ce_mode():
|
|
1975
2094
|
function_object.apply(mlrun.mount_v3io())
|
|
1976
2095
|
|
|
1977
2096
|
return resolved_function_name, function_object, func
|
|
1978
2097
|
|
|
2098
|
+
def _wait_for_functions_deployment(self, function_names: list[str]) -> None:
|
|
2099
|
+
"""
|
|
2100
|
+
Wait for the deployment of functions on the backend.
|
|
2101
|
+
|
|
2102
|
+
:param function_names: A list of function names.
|
|
2103
|
+
"""
|
|
2104
|
+
for fn_name in function_names:
|
|
2105
|
+
fn = typing.cast(RemoteRuntime, self.get_function(key=fn_name))
|
|
2106
|
+
fn._wait_for_function_deployment(db=fn._get_db())
|
|
2107
|
+
|
|
1979
2108
|
def enable_model_monitoring(
|
|
1980
2109
|
self,
|
|
1981
2110
|
default_controller_image: str = "mlrun/mlrun",
|
|
1982
2111
|
base_period: int = 10,
|
|
1983
|
-
|
|
1984
|
-
|
|
1985
|
-
|
|
1986
|
-
|
|
1987
|
-
|
|
1988
|
-
|
|
1989
|
-
|
|
1990
|
-
|
|
1991
|
-
|
|
1992
|
-
|
|
1993
|
-
|
|
1994
|
-
|
|
1995
|
-
|
|
1996
|
-
:
|
|
2112
|
+
image: str = "mlrun/mlrun",
|
|
2113
|
+
*,
|
|
2114
|
+
deploy_histogram_data_drift_app: bool = True,
|
|
2115
|
+
wait_for_deployment: bool = False,
|
|
2116
|
+
) -> None:
|
|
2117
|
+
"""
|
|
2118
|
+
Deploy model monitoring application controller, writer and stream functions.
|
|
2119
|
+
While the main goal of the controller function is to handle the monitoring processing and triggering
|
|
2120
|
+
applications, the goal of the model monitoring writer function is to write all the monitoring
|
|
2121
|
+
application results to the databases.
|
|
2122
|
+
The stream function goal is to monitor the log of the data stream. It is triggered when a new log entry
|
|
2123
|
+
is detected. It processes the new events into statistics that are then written to statistics databases.
|
|
2124
|
+
|
|
2125
|
+
:param default_controller_image: Deprecated.
|
|
2126
|
+
:param base_period: The time period in minutes in which the model monitoring controller
|
|
2127
|
+
function is triggered. By default, the base period is 10 minutes.
|
|
2128
|
+
:param image: The image of the model monitoring controller, writer, monitoring
|
|
2129
|
+
stream & histogram data drift functions, which are real time nuclio
|
|
2130
|
+
functions. By default, the image is mlrun/mlrun.
|
|
2131
|
+
:param deploy_histogram_data_drift_app: If true, deploy the default histogram-based data drift application.
|
|
2132
|
+
:param wait_for_deployment: If true, return only after the deployment is done on the backend.
|
|
2133
|
+
Otherwise, deploy the model monitoring infrastructure on the
|
|
2134
|
+
background, including the histogram data drift app if selected.
|
|
1997
2135
|
"""
|
|
2136
|
+
if default_controller_image != "mlrun/mlrun":
|
|
2137
|
+
# TODO: Remove this in 1.9.0
|
|
2138
|
+
warnings.warn(
|
|
2139
|
+
"'default_controller_image' is deprecated and will be removed in 1.9.0, "
|
|
2140
|
+
"use 'image' instead",
|
|
2141
|
+
FutureWarning,
|
|
2142
|
+
)
|
|
2143
|
+
image = default_controller_image
|
|
1998
2144
|
db = mlrun.db.get_run_db(secrets=self._secrets)
|
|
1999
|
-
|
|
2145
|
+
db.enable_model_monitoring(
|
|
2000
2146
|
project=self.name,
|
|
2001
|
-
|
|
2147
|
+
image=image,
|
|
2002
2148
|
base_period=base_period,
|
|
2149
|
+
deploy_histogram_data_drift_app=deploy_histogram_data_drift_app,
|
|
2003
2150
|
)
|
|
2004
2151
|
|
|
2005
|
-
|
|
2152
|
+
if wait_for_deployment:
|
|
2153
|
+
deployment_functions = mm_constants.MonitoringFunctionNames.list()
|
|
2154
|
+
if deploy_histogram_data_drift_app:
|
|
2155
|
+
deployment_functions.append(
|
|
2156
|
+
mm_constants.HistogramDataDriftApplicationConstants.NAME
|
|
2157
|
+
)
|
|
2158
|
+
self._wait_for_functions_deployment(deployment_functions)
|
|
2159
|
+
|
|
2160
|
+
def deploy_histogram_data_drift_app(
|
|
2161
|
+
self,
|
|
2162
|
+
*,
|
|
2163
|
+
image: str = "mlrun/mlrun",
|
|
2164
|
+
db: Optional[mlrun.db.RunDBInterface] = None,
|
|
2165
|
+
wait_for_deployment: bool = False,
|
|
2166
|
+
) -> None:
|
|
2167
|
+
"""
|
|
2168
|
+
Deploy the histogram data drift application.
|
|
2169
|
+
|
|
2170
|
+
:param image: The image on which the application will run.
|
|
2171
|
+
:param db: An optional DB object.
|
|
2172
|
+
:param wait_for_deployment: If true, return only after the deployment is done on the backend.
|
|
2173
|
+
Otherwise, deploy the application on the background.
|
|
2174
|
+
"""
|
|
2175
|
+
if db is None:
|
|
2176
|
+
db = mlrun.db.get_run_db(secrets=self._secrets)
|
|
2177
|
+
db.deploy_histogram_data_drift_app(project=self.name, image=image)
|
|
2178
|
+
|
|
2179
|
+
if wait_for_deployment:
|
|
2180
|
+
self._wait_for_functions_deployment(
|
|
2181
|
+
[mm_constants.HistogramDataDriftApplicationConstants.NAME]
|
|
2182
|
+
)
|
|
2183
|
+
|
|
2184
|
+
def update_model_monitoring_controller(
|
|
2185
|
+
self,
|
|
2186
|
+
base_period: int = 10,
|
|
2187
|
+
image: str = "mlrun/mlrun",
|
|
2188
|
+
*,
|
|
2189
|
+
wait_for_deployment: bool = False,
|
|
2190
|
+
) -> None:
|
|
2191
|
+
"""
|
|
2192
|
+
Redeploy model monitoring application controller functions.
|
|
2193
|
+
|
|
2194
|
+
:param base_period: The time period in minutes in which the model monitoring controller function
|
|
2195
|
+
is triggered. By default, the base period is 10 minutes.
|
|
2196
|
+
:param image: The image of the model monitoring controller, writer & monitoring
|
|
2197
|
+
stream functions, which are real time nuclio functions.
|
|
2198
|
+
By default, the image is mlrun/mlrun.
|
|
2199
|
+
:param wait_for_deployment: If true, return only after the deployment is done on the backend.
|
|
2200
|
+
Otherwise, deploy the controller on the background.
|
|
2201
|
+
"""
|
|
2006
2202
|
db = mlrun.db.get_run_db(secrets=self._secrets)
|
|
2007
|
-
db.
|
|
2203
|
+
db.update_model_monitoring_controller(
|
|
2008
2204
|
project=self.name,
|
|
2009
|
-
|
|
2205
|
+
base_period=base_period,
|
|
2206
|
+
image=image,
|
|
2010
2207
|
)
|
|
2011
2208
|
|
|
2209
|
+
if wait_for_deployment:
|
|
2210
|
+
self._wait_for_functions_deployment(
|
|
2211
|
+
[mm_constants.MonitoringFunctionNames.APPLICATION_CONTROLLER]
|
|
2212
|
+
)
|
|
2213
|
+
|
|
2214
|
+
def disable_model_monitoring(
|
|
2215
|
+
self, *, delete_histogram_data_drift_app: bool = True
|
|
2216
|
+
) -> None:
|
|
2217
|
+
"""
|
|
2218
|
+
Note: This method is currently not advised for use. See ML-3432.
|
|
2219
|
+
Disable model monitoring by deleting the underlying functions infrastructure from MLRun database.
|
|
2220
|
+
|
|
2221
|
+
:param delete_histogram_data_drift_app: Whether to delete the histogram data drift app.
|
|
2222
|
+
"""
|
|
2223
|
+
db = mlrun.db.get_run_db(secrets=self._secrets)
|
|
2224
|
+
for fn_name in mm_constants.MonitoringFunctionNames.list():
|
|
2225
|
+
db.delete_function(project=self.name, name=fn_name)
|
|
2226
|
+
if delete_histogram_data_drift_app:
|
|
2227
|
+
db.delete_function(
|
|
2228
|
+
project=self.name,
|
|
2229
|
+
name=mm_constants.HistogramDataDriftApplicationConstants.NAME,
|
|
2230
|
+
)
|
|
2231
|
+
|
|
2012
2232
|
def set_function(
|
|
2013
2233
|
self,
|
|
2014
2234
|
func: typing.Union[str, mlrun.runtimes.BaseRuntime] = None,
|
|
2015
2235
|
name: str = "",
|
|
2016
|
-
kind: str = "",
|
|
2236
|
+
kind: str = "job",
|
|
2017
2237
|
image: str = None,
|
|
2018
2238
|
handler: str = None,
|
|
2019
2239
|
with_repo: bool = None,
|
|
2020
2240
|
tag: str = None,
|
|
2021
|
-
requirements: typing.Union[str,
|
|
2241
|
+
requirements: typing.Union[str, list[str]] = None,
|
|
2022
2242
|
requirements_file: str = "",
|
|
2023
2243
|
) -> mlrun.runtimes.BaseRuntime:
|
|
2024
2244
|
"""update or add a function object to the project
|
|
@@ -2033,19 +2253,20 @@ class MlrunProject(ModelObj):
|
|
|
2033
2253
|
examples::
|
|
2034
2254
|
|
|
2035
2255
|
proj.set_function(func_object)
|
|
2036
|
-
proj.set_function(
|
|
2037
|
-
|
|
2038
|
-
|
|
2039
|
-
proj.set_function(
|
|
2040
|
-
proj.set_function(
|
|
2256
|
+
proj.set_function(
|
|
2257
|
+
"./src/mycode.py", "ingest", image="myrepo/ing:latest", with_repo=True
|
|
2258
|
+
)
|
|
2259
|
+
proj.set_function("http://.../mynb.ipynb", "train")
|
|
2260
|
+
proj.set_function("./func.yaml")
|
|
2261
|
+
proj.set_function("hub://get_toy_data", "getdata")
|
|
2041
2262
|
|
|
2042
2263
|
# set function requirements
|
|
2043
2264
|
|
|
2044
2265
|
# by providing a list of packages
|
|
2045
|
-
proj.set_function(
|
|
2266
|
+
proj.set_function("my.py", requirements=["requests", "pandas"])
|
|
2046
2267
|
|
|
2047
2268
|
# by providing a path to a pip requirements file
|
|
2048
|
-
proj.set_function(
|
|
2269
|
+
proj.set_function("my.py", requirements="requirements.txt")
|
|
2049
2270
|
|
|
2050
2271
|
:param func: Function object or spec/code url, None refers to current Notebook
|
|
2051
2272
|
:param name: Name of the function (under the project), can be specified with a tag to support
|
|
@@ -2094,22 +2315,19 @@ class MlrunProject(ModelObj):
|
|
|
2094
2315
|
handler: str = None,
|
|
2095
2316
|
with_repo: bool = None,
|
|
2096
2317
|
tag: str = None,
|
|
2097
|
-
requirements: typing.Union[str,
|
|
2318
|
+
requirements: typing.Union[str, list[str]] = None,
|
|
2098
2319
|
requirements_file: str = "",
|
|
2099
|
-
) ->
|
|
2320
|
+
) -> tuple[str, str, mlrun.runtimes.BaseRuntime, dict]:
|
|
2100
2321
|
if func is None and not _has_module(handler, kind):
|
|
2101
2322
|
# if function path is not provided and it is not a module (no ".")
|
|
2102
2323
|
# use the current notebook as default
|
|
2103
|
-
if
|
|
2104
|
-
|
|
2105
|
-
"Function path or module must be specified (when not running inside a Notebook)"
|
|
2106
|
-
)
|
|
2107
|
-
from IPython import get_ipython
|
|
2324
|
+
if is_ipython:
|
|
2325
|
+
from IPython import get_ipython
|
|
2108
2326
|
|
|
2109
|
-
|
|
2110
|
-
|
|
2111
|
-
|
|
2112
|
-
|
|
2327
|
+
kernel = get_ipython()
|
|
2328
|
+
func = nuclio.utils.notebook_file_name(kernel)
|
|
2329
|
+
if func.startswith(path.abspath(self.spec.context)):
|
|
2330
|
+
func = path.relpath(func, self.spec.context)
|
|
2113
2331
|
|
|
2114
2332
|
func = func or ""
|
|
2115
2333
|
|
|
@@ -2293,7 +2511,7 @@ class MlrunProject(ModelObj):
|
|
|
2293
2511
|
self.sync_functions()
|
|
2294
2512
|
return FunctionsDict(self)
|
|
2295
2513
|
|
|
2296
|
-
def get_function_names(self) ->
|
|
2514
|
+
def get_function_names(self) -> list[str]:
|
|
2297
2515
|
"""get a list of all the project function names"""
|
|
2298
2516
|
return [func["name"] for func in self.spec.functions]
|
|
2299
2517
|
|
|
@@ -2326,13 +2544,47 @@ class MlrunProject(ModelObj):
|
|
|
2326
2544
|
clone_zip(url, self.spec.context, self._secrets)
|
|
2327
2545
|
|
|
2328
2546
|
def create_remote(self, url, name="origin", branch=None):
|
|
2329
|
-
"""
|
|
2547
|
+
"""Create remote for the project git
|
|
2548
|
+
|
|
2549
|
+
This method creates a new remote repository associated with the project's Git repository.
|
|
2550
|
+
If a remote with the specified name already exists, it will not be overwritten.
|
|
2551
|
+
|
|
2552
|
+
If you wish to update the URL of an existing remote, use the `set_remote` method instead.
|
|
2330
2553
|
|
|
2331
2554
|
:param url: remote git url
|
|
2332
2555
|
:param name: name for the remote (default is 'origin')
|
|
2333
2556
|
:param branch: Git branch to use as source
|
|
2334
2557
|
"""
|
|
2558
|
+
self.set_remote(url, name=name, branch=branch, overwrite=False)
|
|
2559
|
+
|
|
2560
|
+
def set_remote(self, url, name="origin", branch=None, overwrite=True):
|
|
2561
|
+
"""Create or update a remote for the project git repository.
|
|
2562
|
+
|
|
2563
|
+
This method allows you to manage remote repositories associated with the project.
|
|
2564
|
+
It checks if a remote with the specified name already exists.
|
|
2565
|
+
|
|
2566
|
+
If a remote with the same name does not exist, it will be created.
|
|
2567
|
+
If a remote with the same name already exists,
|
|
2568
|
+
the behavior depends on the value of the 'overwrite' flag.
|
|
2569
|
+
|
|
2570
|
+
:param url: remote git url
|
|
2571
|
+
:param name: name for the remote (default is 'origin')
|
|
2572
|
+
:param branch: Git branch to use as source
|
|
2573
|
+
:param overwrite: if True (default), updates the existing remote with the given URL if it already exists.
|
|
2574
|
+
if False, raises an error when attempting to create a remote with a name that already exists.
|
|
2575
|
+
:raises MLRunConflictError: If a remote with the same name already exists and overwrite
|
|
2576
|
+
is set to False.
|
|
2577
|
+
"""
|
|
2335
2578
|
self._ensure_git_repo()
|
|
2579
|
+
if self._remote_exists(name):
|
|
2580
|
+
if overwrite:
|
|
2581
|
+
self.spec.repo.delete_remote(name)
|
|
2582
|
+
else:
|
|
2583
|
+
raise mlrun.errors.MLRunConflictError(
|
|
2584
|
+
f"Remote '{name}' already exists in the project, "
|
|
2585
|
+
f"each remote in the project must have a unique name."
|
|
2586
|
+
"Use 'set_remote' with 'override=True' inorder to update the remote, or choose a different name."
|
|
2587
|
+
)
|
|
2336
2588
|
self.spec.repo.create_remote(name, url=url)
|
|
2337
2589
|
url = url.replace("https://", "git://")
|
|
2338
2590
|
if not branch:
|
|
@@ -2345,6 +2597,22 @@ class MlrunProject(ModelObj):
|
|
|
2345
2597
|
self.spec._source = self.spec.source or url
|
|
2346
2598
|
self.spec.origin_url = self.spec.origin_url or url
|
|
2347
2599
|
|
|
2600
|
+
def remove_remote(self, name):
|
|
2601
|
+
"""Remove a remote from the project's Git repository.
|
|
2602
|
+
|
|
2603
|
+
This method removes the remote repository associated with the specified name from the project's Git repository.
|
|
2604
|
+
|
|
2605
|
+
:param name: Name of the remote to remove.
|
|
2606
|
+
"""
|
|
2607
|
+
if self._remote_exists(name):
|
|
2608
|
+
self.spec.repo.delete_remote(name)
|
|
2609
|
+
else:
|
|
2610
|
+
logger.warning(f"The remote '{name}' does not exist. Nothing to remove.")
|
|
2611
|
+
|
|
2612
|
+
def _remote_exists(self, name):
|
|
2613
|
+
"""Check if a remote with the given name already exists"""
|
|
2614
|
+
return any(remote.name == name for remote in self.spec.repo.remotes)
|
|
2615
|
+
|
|
2348
2616
|
def _ensure_git_repo(self):
|
|
2349
2617
|
if self.spec.repo:
|
|
2350
2618
|
return
|
|
@@ -2441,6 +2709,16 @@ class MlrunProject(ModelObj):
|
|
|
2441
2709
|
f = self.spec._function_definitions.get(name)
|
|
2442
2710
|
if not f:
|
|
2443
2711
|
raise ValueError(f"function named {name} not found")
|
|
2712
|
+
# If this function is already available locally, don't recreate it unless always=True
|
|
2713
|
+
if (
|
|
2714
|
+
isinstance(
|
|
2715
|
+
self.spec._function_objects.get(name, None),
|
|
2716
|
+
mlrun.runtimes.base.BaseRuntime,
|
|
2717
|
+
)
|
|
2718
|
+
and not always
|
|
2719
|
+
):
|
|
2720
|
+
funcs[name] = self.spec._function_objects[name]
|
|
2721
|
+
continue
|
|
2444
2722
|
if hasattr(f, "to_dict"):
|
|
2445
2723
|
name, func = _init_function_from_obj(f, self, name)
|
|
2446
2724
|
else:
|
|
@@ -2466,9 +2744,9 @@ class MlrunProject(ModelObj):
|
|
|
2466
2744
|
|
|
2467
2745
|
read secrets from a source provider to be used in workflows, example::
|
|
2468
2746
|
|
|
2469
|
-
proj.with_secrets(
|
|
2470
|
-
proj.with_secrets(
|
|
2471
|
-
proj.with_secrets(
|
|
2747
|
+
proj.with_secrets("file", "file.txt")
|
|
2748
|
+
proj.with_secrets("inline", {"key": "val"})
|
|
2749
|
+
proj.with_secrets("env", "ENV1,ENV2", prefix="PFX_")
|
|
2472
2750
|
|
|
2473
2751
|
Vault secret source has several options::
|
|
2474
2752
|
|
|
@@ -2479,7 +2757,7 @@ class MlrunProject(ModelObj):
|
|
|
2479
2757
|
The 2nd option uses the current project name as context.
|
|
2480
2758
|
Can also use empty secret list::
|
|
2481
2759
|
|
|
2482
|
-
proj.with_secrets(
|
|
2760
|
+
proj.with_secrets("vault", [])
|
|
2483
2761
|
|
|
2484
2762
|
This will enable access to all secrets in vault registered to the current project.
|
|
2485
2763
|
|
|
@@ -2510,17 +2788,20 @@ class MlrunProject(ModelObj):
|
|
|
2510
2788
|
file_path: str = None,
|
|
2511
2789
|
provider: typing.Union[str, mlrun.common.schemas.SecretProviderName] = None,
|
|
2512
2790
|
):
|
|
2513
|
-
"""
|
|
2791
|
+
"""
|
|
2792
|
+
Set project secrets from dict or secrets env file
|
|
2514
2793
|
when using a secrets file it should have lines in the form KEY=VALUE, comment line start with "#"
|
|
2515
2794
|
V3IO paths/credentials and MLrun service API address are dropped from the secrets
|
|
2516
2795
|
|
|
2517
|
-
example secrets file
|
|
2796
|
+
example secrets file:
|
|
2797
|
+
|
|
2798
|
+
.. code-block:: shell
|
|
2518
2799
|
|
|
2519
2800
|
# this is an env file
|
|
2520
|
-
AWS_ACCESS_KEY_ID
|
|
2801
|
+
AWS_ACCESS_KEY_ID=XXXX
|
|
2521
2802
|
AWS_SECRET_ACCESS_KEY=YYYY
|
|
2522
2803
|
|
|
2523
|
-
usage
|
|
2804
|
+
usage:
|
|
2524
2805
|
|
|
2525
2806
|
# read env vars from dict or file and set as project secrets
|
|
2526
2807
|
project.set_secrets({"SECRET1": "value"})
|
|
@@ -2583,7 +2864,7 @@ class MlrunProject(ModelObj):
|
|
|
2583
2864
|
self,
|
|
2584
2865
|
name: str = None,
|
|
2585
2866
|
workflow_path: str = None,
|
|
2586
|
-
arguments:
|
|
2867
|
+
arguments: dict[str, typing.Any] = None,
|
|
2587
2868
|
artifact_path: str = None,
|
|
2588
2869
|
workflow_handler: typing.Union[str, typing.Callable] = None,
|
|
2589
2870
|
namespace: str = None,
|
|
@@ -2598,7 +2879,7 @@ class MlrunProject(ModelObj):
|
|
|
2598
2879
|
timeout: int = None,
|
|
2599
2880
|
source: str = None,
|
|
2600
2881
|
cleanup_ttl: int = None,
|
|
2601
|
-
notifications:
|
|
2882
|
+
notifications: list[mlrun.model.Notification] = None,
|
|
2602
2883
|
) -> _PipelineRunStatus:
|
|
2603
2884
|
"""Run a workflow using kubeflow pipelines
|
|
2604
2885
|
|
|
@@ -2629,9 +2910,11 @@ class MlrunProject(ModelObj):
|
|
|
2629
2910
|
Path can be absolute or relative to `project.spec.build.source_code_target_dir` if defined
|
|
2630
2911
|
(enriched when building a project image with source, see `MlrunProject.build_image`).
|
|
2631
2912
|
For other engines the source is used to validate that the code is up-to-date.
|
|
2632
|
-
:param cleanup_ttl:
|
|
2633
|
-
|
|
2634
|
-
|
|
2913
|
+
:param cleanup_ttl:
|
|
2914
|
+
Pipeline cleanup ttl in secs (time to wait after workflow completion, at which point the
|
|
2915
|
+
workflow and all its resources are deleted)
|
|
2916
|
+
:param notifications:
|
|
2917
|
+
List of notifications to send for workflow completion
|
|
2635
2918
|
|
|
2636
2919
|
:returns: ~py:class:`~mlrun.projects.pipelines._PipelineRunStatus` instance
|
|
2637
2920
|
"""
|
|
@@ -2691,8 +2974,12 @@ class MlrunProject(ModelObj):
|
|
|
2691
2974
|
engine = "remote"
|
|
2692
2975
|
# The default engine is kfp if not given:
|
|
2693
2976
|
workflow_engine = get_workflow_engine(engine or workflow_spec.engine, local)
|
|
2694
|
-
if not inner_engine and engine == "remote":
|
|
2695
|
-
|
|
2977
|
+
if not inner_engine and workflow_engine.engine == "remote":
|
|
2978
|
+
# if inner engine is set to remote, assume kfp as the default inner engine with remote as the runner
|
|
2979
|
+
engine_kind = (
|
|
2980
|
+
workflow_spec.engine if workflow_spec.engine != "remote" else "kfp"
|
|
2981
|
+
)
|
|
2982
|
+
inner_engine = get_workflow_engine(engine_kind, local).engine
|
|
2696
2983
|
workflow_spec.engine = inner_engine or workflow_engine.engine
|
|
2697
2984
|
|
|
2698
2985
|
run = workflow_engine.run(
|
|
@@ -2707,7 +2994,7 @@ class MlrunProject(ModelObj):
|
|
|
2707
2994
|
notifications=notifications,
|
|
2708
2995
|
)
|
|
2709
2996
|
# run is None when scheduling
|
|
2710
|
-
if run and run.state ==
|
|
2997
|
+
if run and run.state == mlrun_pipelines.common.models.RunStatuses.failed:
|
|
2711
2998
|
return run
|
|
2712
2999
|
if not workflow_spec.schedule:
|
|
2713
3000
|
# Failure and schedule messages already logged
|
|
@@ -2716,14 +3003,17 @@ class MlrunProject(ModelObj):
|
|
|
2716
3003
|
)
|
|
2717
3004
|
workflow_spec.clear_tmp()
|
|
2718
3005
|
if (timeout or watch) and not workflow_spec.schedule:
|
|
3006
|
+
run_status_kwargs = {}
|
|
2719
3007
|
status_engine = run._engine
|
|
2720
3008
|
# run's engine gets replaced with inner engine if engine is remote,
|
|
2721
3009
|
# so in that case we need to get the status from the remote engine manually
|
|
2722
|
-
|
|
2723
|
-
if engine == "remote" and status_engine.engine != "local":
|
|
3010
|
+
if workflow_engine.engine == "remote":
|
|
2724
3011
|
status_engine = _RemoteRunner
|
|
3012
|
+
run_status_kwargs["inner_engine"] = run._engine
|
|
2725
3013
|
|
|
2726
|
-
status_engine.get_run_status(
|
|
3014
|
+
status_engine.get_run_status(
|
|
3015
|
+
project=self, run=run, timeout=timeout, **run_status_kwargs
|
|
3016
|
+
)
|
|
2727
3017
|
return run
|
|
2728
3018
|
|
|
2729
3019
|
def save_workflow(self, name, target, artifact_path=None, ttl=None):
|
|
@@ -2823,17 +3113,18 @@ class MlrunProject(ModelObj):
|
|
|
2823
3113
|
|
|
2824
3114
|
def set_model_monitoring_credentials(
|
|
2825
3115
|
self,
|
|
2826
|
-
access_key: str = None,
|
|
2827
|
-
endpoint_store_connection: str = None,
|
|
2828
|
-
stream_path: str = None,
|
|
3116
|
+
access_key: Optional[str] = None,
|
|
3117
|
+
endpoint_store_connection: Optional[str] = None,
|
|
3118
|
+
stream_path: Optional[str] = None,
|
|
3119
|
+
tsdb_connection: Optional[str] = None,
|
|
2829
3120
|
):
|
|
2830
3121
|
"""Set the credentials that will be used by the project's model monitoring
|
|
2831
3122
|
infrastructure functions.
|
|
2832
3123
|
|
|
2833
|
-
:param access_key: Model Monitoring access key for managing user permissions
|
|
2834
3124
|
:param access_key: Model Monitoring access key for managing user permissions
|
|
2835
3125
|
:param endpoint_store_connection: Endpoint store connection string
|
|
2836
3126
|
:param stream_path: Path to the model monitoring stream
|
|
3127
|
+
:param tsdb_connection: Connection string to the time series database
|
|
2837
3128
|
"""
|
|
2838
3129
|
|
|
2839
3130
|
secrets_dict = {}
|
|
@@ -2856,6 +3147,16 @@ class MlrunProject(ModelObj):
|
|
|
2856
3147
|
mlrun.common.schemas.model_monitoring.ProjectSecretKeys.STREAM_PATH
|
|
2857
3148
|
] = stream_path
|
|
2858
3149
|
|
|
3150
|
+
if tsdb_connection:
|
|
3151
|
+
if not tsdb_connection.startswith("taosws://"):
|
|
3152
|
+
raise mlrun.errors.MLRunInvalidArgumentError(
|
|
3153
|
+
"Currently only TDEngine websocket connection is supported for non-v3io TSDB,"
|
|
3154
|
+
"please provide a full URL (e.g. taosws://user:password@host:port)"
|
|
3155
|
+
)
|
|
3156
|
+
secrets_dict[
|
|
3157
|
+
mlrun.common.schemas.model_monitoring.ProjectSecretKeys.TSDB_CONNECTION
|
|
3158
|
+
] = tsdb_connection
|
|
3159
|
+
|
|
2859
3160
|
self.set_secrets(
|
|
2860
3161
|
secrets=secrets_dict,
|
|
2861
3162
|
provider=mlrun.common.schemas.SecretProviderName.kubernetes,
|
|
@@ -2870,7 +3171,7 @@ class MlrunProject(ModelObj):
|
|
|
2870
3171
|
hyperparams: dict = None,
|
|
2871
3172
|
hyper_param_options: mlrun.model.HyperParamOptions = None,
|
|
2872
3173
|
inputs: dict = None,
|
|
2873
|
-
outputs:
|
|
3174
|
+
outputs: list[str] = None,
|
|
2874
3175
|
workdir: str = "",
|
|
2875
3176
|
labels: dict = None,
|
|
2876
3177
|
base_task: mlrun.model.RunTemplate = None,
|
|
@@ -2881,10 +3182,10 @@ class MlrunProject(ModelObj):
|
|
|
2881
3182
|
auto_build: bool = None,
|
|
2882
3183
|
schedule: typing.Union[str, mlrun.common.schemas.ScheduleCronTrigger] = None,
|
|
2883
3184
|
artifact_path: str = None,
|
|
2884
|
-
notifications:
|
|
2885
|
-
returns: Optional[
|
|
3185
|
+
notifications: list[mlrun.model.Notification] = None,
|
|
3186
|
+
returns: Optional[list[Union[str, dict[str, str]]]] = None,
|
|
2886
3187
|
builder_env: Optional[dict] = None,
|
|
2887
|
-
) -> typing.Union[mlrun.model.RunObject,
|
|
3188
|
+
) -> typing.Union[mlrun.model.RunObject, PipelineNodeWrapper]:
|
|
2888
3189
|
"""Run a local or remote task as part of a local/kubeflow pipeline
|
|
2889
3190
|
|
|
2890
3191
|
example (use with project)::
|
|
@@ -2896,8 +3197,11 @@ class MlrunProject(ModelObj):
|
|
|
2896
3197
|
|
|
2897
3198
|
# run functions (refer to them by name)
|
|
2898
3199
|
run1 = project.run_function("myfunc", params={"x": 7})
|
|
2899
|
-
run2 = project.run_function(
|
|
2900
|
-
|
|
3200
|
+
run2 = project.run_function(
|
|
3201
|
+
"train",
|
|
3202
|
+
params={"label_columns": LABELS},
|
|
3203
|
+
inputs={"dataset": run1.outputs["data"]},
|
|
3204
|
+
)
|
|
2901
3205
|
|
|
2902
3206
|
:param function: name of the function (in the project) or function object
|
|
2903
3207
|
:param handler: name of the function handler
|
|
@@ -2937,7 +3241,7 @@ class MlrunProject(ModelObj):
|
|
|
2937
3241
|
artifact type can be given there. The artifact key must appear in the dictionary as
|
|
2938
3242
|
"key": "the_key".
|
|
2939
3243
|
:param builder_env: env vars dict for source archive config/credentials e.g. builder_env={"GIT_TOKEN": token}
|
|
2940
|
-
:return: MLRun RunObject or
|
|
3244
|
+
:return: MLRun RunObject or PipelineNodeWrapper
|
|
2941
3245
|
"""
|
|
2942
3246
|
return run_function(
|
|
2943
3247
|
function,
|
|
@@ -2973,14 +3277,14 @@ class MlrunProject(ModelObj):
|
|
|
2973
3277
|
base_image: str = None,
|
|
2974
3278
|
commands: list = None,
|
|
2975
3279
|
secret_name: str = None,
|
|
2976
|
-
requirements: typing.Union[str,
|
|
3280
|
+
requirements: typing.Union[str, list[str]] = None,
|
|
2977
3281
|
mlrun_version_specifier: str = None,
|
|
2978
3282
|
builder_env: dict = None,
|
|
2979
3283
|
overwrite_build_params: bool = False,
|
|
2980
3284
|
requirements_file: str = None,
|
|
2981
3285
|
extra_args: str = None,
|
|
2982
3286
|
force_build: bool = False,
|
|
2983
|
-
) -> typing.Union[BuildStatus,
|
|
3287
|
+
) -> typing.Union[BuildStatus, PipelineNodeWrapper]:
|
|
2984
3288
|
"""deploy ML function, build container with its dependencies
|
|
2985
3289
|
|
|
2986
3290
|
:param function: name of the function (in the project) or function object
|
|
@@ -3029,7 +3333,7 @@ class MlrunProject(ModelObj):
|
|
|
3029
3333
|
base_image: str = None,
|
|
3030
3334
|
commands: list = None,
|
|
3031
3335
|
secret_name: str = None,
|
|
3032
|
-
requirements: typing.Union[str,
|
|
3336
|
+
requirements: typing.Union[str, list[str]] = None,
|
|
3033
3337
|
overwrite_build_params: bool = False,
|
|
3034
3338
|
requirements_file: str = None,
|
|
3035
3339
|
builder_env: dict = None,
|
|
@@ -3091,18 +3395,17 @@ class MlrunProject(ModelObj):
|
|
|
3091
3395
|
image: str = None,
|
|
3092
3396
|
set_as_default: bool = True,
|
|
3093
3397
|
with_mlrun: bool = None,
|
|
3094
|
-
skip_deployed: bool = False,
|
|
3095
3398
|
base_image: str = None,
|
|
3096
3399
|
commands: list = None,
|
|
3097
3400
|
secret_name: str = None,
|
|
3098
|
-
requirements: typing.Union[str,
|
|
3401
|
+
requirements: typing.Union[str, list[str]] = None,
|
|
3099
3402
|
mlrun_version_specifier: str = None,
|
|
3100
3403
|
builder_env: dict = None,
|
|
3101
3404
|
overwrite_build_params: bool = False,
|
|
3102
3405
|
requirements_file: str = None,
|
|
3103
3406
|
extra_args: str = None,
|
|
3104
3407
|
target_dir: str = None,
|
|
3105
|
-
) -> typing.Union[BuildStatus,
|
|
3408
|
+
) -> typing.Union[BuildStatus, PipelineNodeWrapper]:
|
|
3106
3409
|
"""Builder docker image for the project, based on the project's build config. Parameters allow to override
|
|
3107
3410
|
the build config.
|
|
3108
3411
|
If the project has a source configured and pull_at_runtime is not configured, this source will be cloned to the
|
|
@@ -3112,7 +3415,6 @@ class MlrunProject(ModelObj):
|
|
|
3112
3415
|
used. If not set, the `mlconf.default_project_image_name` value will be used
|
|
3113
3416
|
:param set_as_default: set `image` to be the project's default image (default False)
|
|
3114
3417
|
:param with_mlrun: add the current mlrun package to the container build
|
|
3115
|
-
:param skip_deployed: *Deprecated* parameter is ignored
|
|
3116
3418
|
:param base_image: base image name/path (commands and source code will be added to it) defaults to
|
|
3117
3419
|
mlrun.mlconf.default_base_image
|
|
3118
3420
|
:param commands: list of docker build (RUN) commands e.g. ['pip install pandas']
|
|
@@ -3137,14 +3439,6 @@ class MlrunProject(ModelObj):
|
|
|
3137
3439
|
base_image=base_image,
|
|
3138
3440
|
)
|
|
3139
3441
|
|
|
3140
|
-
if skip_deployed:
|
|
3141
|
-
warnings.warn(
|
|
3142
|
-
"The 'skip_deployed' parameter is deprecated and will be removed in 1.7.0. "
|
|
3143
|
-
"This parameter is ignored.",
|
|
3144
|
-
# TODO: remove in 1.7.0
|
|
3145
|
-
FutureWarning,
|
|
3146
|
-
)
|
|
3147
|
-
|
|
3148
3442
|
if not overwrite_build_params:
|
|
3149
3443
|
# TODO: change overwrite_build_params default to True in 1.8.0
|
|
3150
3444
|
warnings.warn(
|
|
@@ -3208,7 +3502,7 @@ class MlrunProject(ModelObj):
|
|
|
3208
3502
|
logger.warning(
|
|
3209
3503
|
f"Image was successfully built, but failed to delete temporary function {function.metadata.name}."
|
|
3210
3504
|
" To remove the function, attempt to manually delete it.",
|
|
3211
|
-
exc=
|
|
3505
|
+
exc=mlrun.errors.err_to_str(exc),
|
|
3212
3506
|
)
|
|
3213
3507
|
|
|
3214
3508
|
return result
|
|
@@ -3222,7 +3516,7 @@ class MlrunProject(ModelObj):
|
|
|
3222
3516
|
verbose: bool = None,
|
|
3223
3517
|
builder_env: dict = None,
|
|
3224
3518
|
mock: bool = None,
|
|
3225
|
-
) -> typing.Union[DeployStatus,
|
|
3519
|
+
) -> typing.Union[DeployStatus, PipelineNodeWrapper]:
|
|
3226
3520
|
"""deploy real-time (nuclio based) functions
|
|
3227
3521
|
|
|
3228
3522
|
:param function: name of the function (in the project) or function object
|
|
@@ -3257,13 +3551,18 @@ class MlrunProject(ModelObj):
|
|
|
3257
3551
|
artifact = db.read_artifact(
|
|
3258
3552
|
key, tag, iter=iter, project=self.metadata.name, tree=tree
|
|
3259
3553
|
)
|
|
3260
|
-
|
|
3554
|
+
|
|
3555
|
+
# in tests, if an artifact is not found, the db returns None
|
|
3556
|
+
# in real usage, the db should raise an exception
|
|
3557
|
+
if artifact:
|
|
3558
|
+
return dict_to_artifact(artifact)
|
|
3559
|
+
return None
|
|
3261
3560
|
|
|
3262
3561
|
def list_artifacts(
|
|
3263
3562
|
self,
|
|
3264
3563
|
name=None,
|
|
3265
3564
|
tag=None,
|
|
3266
|
-
labels: Optional[Union[
|
|
3565
|
+
labels: Optional[Union[dict[str, str], list[str]]] = None,
|
|
3267
3566
|
since=None,
|
|
3268
3567
|
until=None,
|
|
3269
3568
|
iter: int = None,
|
|
@@ -3280,9 +3579,9 @@ class MlrunProject(ModelObj):
|
|
|
3280
3579
|
Examples::
|
|
3281
3580
|
|
|
3282
3581
|
# Get latest version of all artifacts in project
|
|
3283
|
-
latest_artifacts = project.list_artifacts(
|
|
3582
|
+
latest_artifacts = project.list_artifacts("", tag="latest")
|
|
3284
3583
|
# check different artifact versions for a specific artifact, return as objects list
|
|
3285
|
-
result_versions = project.list_artifacts(
|
|
3584
|
+
result_versions = project.list_artifacts("results", tag="*").to_objects()
|
|
3286
3585
|
|
|
3287
3586
|
:param name: Name of artifacts to retrieve. Name with '~' prefix is used as a like query, and is not
|
|
3288
3587
|
case-sensitive. This means that querying for ``~name`` may return artifacts named
|
|
@@ -3320,7 +3619,7 @@ class MlrunProject(ModelObj):
|
|
|
3320
3619
|
self,
|
|
3321
3620
|
name=None,
|
|
3322
3621
|
tag=None,
|
|
3323
|
-
labels: Optional[Union[
|
|
3622
|
+
labels: Optional[Union[dict[str, str], list[str]]] = None,
|
|
3324
3623
|
since=None,
|
|
3325
3624
|
until=None,
|
|
3326
3625
|
iter: int = None,
|
|
@@ -3332,7 +3631,7 @@ class MlrunProject(ModelObj):
|
|
|
3332
3631
|
Examples::
|
|
3333
3632
|
|
|
3334
3633
|
# Get latest version of all models in project
|
|
3335
|
-
latest_models = project.list_models(
|
|
3634
|
+
latest_models = project.list_models("", tag="latest")
|
|
3336
3635
|
|
|
3337
3636
|
|
|
3338
3637
|
:param name: Name of artifacts to retrieve. Name with '~' prefix is used as a like query, and is not
|
|
@@ -3402,9 +3701,7 @@ class MlrunProject(ModelObj):
|
|
|
3402
3701
|
:returns: List of function objects.
|
|
3403
3702
|
"""
|
|
3404
3703
|
|
|
3405
|
-
model_monitoring_labels_list = [
|
|
3406
|
-
f"{mm_constants.ModelMonitoringAppLabel.KEY}={mm_constants.ModelMonitoringAppLabel.VAL}"
|
|
3407
|
-
]
|
|
3704
|
+
model_monitoring_labels_list = [str(mm_constants.ModelMonitoringAppLabel())]
|
|
3408
3705
|
if labels:
|
|
3409
3706
|
model_monitoring_labels_list += labels
|
|
3410
3707
|
return self.list_functions(
|
|
@@ -3416,9 +3713,12 @@ class MlrunProject(ModelObj):
|
|
|
3416
3713
|
def list_runs(
|
|
3417
3714
|
self,
|
|
3418
3715
|
name: Optional[str] = None,
|
|
3419
|
-
uid: Optional[Union[str,
|
|
3420
|
-
labels: Optional[Union[str,
|
|
3421
|
-
state: Optional[
|
|
3716
|
+
uid: Optional[Union[str, list[str]]] = None,
|
|
3717
|
+
labels: Optional[Union[str, list[str]]] = None,
|
|
3718
|
+
state: Optional[
|
|
3719
|
+
mlrun.common.runtimes.constants.RunStates
|
|
3720
|
+
] = None, # Backward compatibility
|
|
3721
|
+
states: typing.Optional[list[mlrun.common.runtimes.constants.RunStates]] = None,
|
|
3422
3722
|
sort: bool = True,
|
|
3423
3723
|
last: int = 0,
|
|
3424
3724
|
iter: bool = False,
|
|
@@ -3437,14 +3737,14 @@ class MlrunProject(ModelObj):
|
|
|
3437
3737
|
Example::
|
|
3438
3738
|
|
|
3439
3739
|
# return a list of runs matching the name and label and compare
|
|
3440
|
-
runs = project.list_runs(name=
|
|
3740
|
+
runs = project.list_runs(name="download", labels="owner=admin")
|
|
3441
3741
|
runs.compare()
|
|
3442
3742
|
|
|
3443
3743
|
# multi-label filter can also be provided
|
|
3444
|
-
runs = project.list_runs(name=
|
|
3744
|
+
runs = project.list_runs(name="download", labels=["kind=job", "owner=admin"])
|
|
3445
3745
|
|
|
3446
3746
|
# If running in Jupyter, can use the .show() function to display the results
|
|
3447
|
-
project.list_runs(name=
|
|
3747
|
+
project.list_runs(name="").show()
|
|
3448
3748
|
|
|
3449
3749
|
|
|
3450
3750
|
:param name: Name of the run to retrieve.
|
|
@@ -3452,10 +3752,11 @@ class MlrunProject(ModelObj):
|
|
|
3452
3752
|
:param labels: A list of labels to filter by. Label filters work by either filtering a specific value
|
|
3453
3753
|
of a label (i.e. list("key=value")) or by looking for the existence of a given
|
|
3454
3754
|
key (i.e. "key").
|
|
3455
|
-
:param state: List only runs whose state is specified.
|
|
3755
|
+
:param state: Deprecated - List only runs whose state is specified.
|
|
3756
|
+
:param states: List only runs whose state is one of the provided states.
|
|
3456
3757
|
:param sort: Whether to sort the result according to their start time. Otherwise, results will be
|
|
3457
3758
|
returned by their internal order in the DB (order will not be guaranteed).
|
|
3458
|
-
:param last: Deprecated - currently not used (will be removed in 1.
|
|
3759
|
+
:param last: Deprecated - currently not used (will be removed in 1.9.0).
|
|
3459
3760
|
:param iter: If ``True`` return runs from all iterations. Otherwise, return only runs whose ``iter`` is 0.
|
|
3460
3761
|
:param start_time_from: Filter by run start time in ``[start_time_from, start_time_to]``.
|
|
3461
3762
|
:param start_time_to: Filter by run start time in ``[start_time_from, start_time_to]``.
|
|
@@ -3463,13 +3764,22 @@ class MlrunProject(ModelObj):
|
|
|
3463
3764
|
last_update_time_to)``.
|
|
3464
3765
|
:param last_update_time_to: Filter by run last update time in ``(last_update_time_from, last_update_time_to)``.
|
|
3465
3766
|
"""
|
|
3767
|
+
if state:
|
|
3768
|
+
# TODO: Remove this in 1.9.0
|
|
3769
|
+
warnings.warn(
|
|
3770
|
+
"'state' is deprecated and will be removed in 1.9.0. Use 'states' instead.",
|
|
3771
|
+
FutureWarning,
|
|
3772
|
+
)
|
|
3773
|
+
|
|
3466
3774
|
db = mlrun.db.get_run_db(secrets=self._secrets)
|
|
3467
3775
|
return db.list_runs(
|
|
3468
3776
|
name,
|
|
3469
3777
|
uid,
|
|
3470
3778
|
self.metadata.name,
|
|
3471
3779
|
labels=labels,
|
|
3472
|
-
|
|
3780
|
+
states=mlrun.utils.helpers.as_list(state)
|
|
3781
|
+
if state is not None
|
|
3782
|
+
else states or None,
|
|
3473
3783
|
sort=sort,
|
|
3474
3784
|
last=last,
|
|
3475
3785
|
iter=iter,
|
|
@@ -3505,7 +3815,7 @@ class MlrunProject(ModelObj):
|
|
|
3505
3815
|
profile, self.name
|
|
3506
3816
|
)
|
|
3507
3817
|
|
|
3508
|
-
def list_datastore_profiles(self) ->
|
|
3818
|
+
def list_datastore_profiles(self) -> list[DatastoreProfile]:
|
|
3509
3819
|
"""
|
|
3510
3820
|
Returns a list of datastore profiles associated with the project.
|
|
3511
3821
|
The information excludes private details, showcasing only public data.
|
|
@@ -3514,7 +3824,7 @@ class MlrunProject(ModelObj):
|
|
|
3514
3824
|
self.name
|
|
3515
3825
|
)
|
|
3516
3826
|
|
|
3517
|
-
def get_custom_packagers(self) ->
|
|
3827
|
+
def get_custom_packagers(self) -> list[tuple[str, bool]]:
|
|
3518
3828
|
"""
|
|
3519
3829
|
Get the custom packagers registered in the project.
|
|
3520
3830
|
|
|
@@ -3550,12 +3860,179 @@ class MlrunProject(ModelObj):
|
|
|
3550
3860
|
"""
|
|
3551
3861
|
self.spec.remove_custom_packager(packager=packager)
|
|
3552
3862
|
|
|
3863
|
+
def store_api_gateway(
|
|
3864
|
+
self,
|
|
3865
|
+
api_gateway: mlrun.runtimes.nuclio.api_gateway.APIGateway,
|
|
3866
|
+
wait_for_readiness=True,
|
|
3867
|
+
max_wait_time=90,
|
|
3868
|
+
) -> mlrun.runtimes.nuclio.api_gateway.APIGateway:
|
|
3869
|
+
"""
|
|
3870
|
+
Creates or updates a Nuclio API Gateway using the provided APIGateway object.
|
|
3871
|
+
|
|
3872
|
+
This method interacts with the MLRun service to create/update a Nuclio API Gateway based on the provided
|
|
3873
|
+
APIGateway object. Once done, it returns the updated APIGateway object containing all fields propagated
|
|
3874
|
+
on MLRun and Nuclio sides, such as the 'host' attribute.
|
|
3875
|
+
Nuclio docs here: https://docs.nuclio.io/en/latest/reference/api-gateway/http.html
|
|
3876
|
+
|
|
3877
|
+
:param api_gateway: An instance of :py:class:`~mlrun.runtimes.nuclio.APIGateway` representing the configuration
|
|
3878
|
+
of the API Gateway to be created or updated.
|
|
3879
|
+
:param wait_for_readiness: (Optional) A boolean indicating whether to wait for the API Gateway to become ready
|
|
3880
|
+
after creation or update (default is True)
|
|
3881
|
+
:param max_wait_time: (Optional) Maximum time to wait for API Gateway readiness in seconds (default is 90s)
|
|
3882
|
+
|
|
3883
|
+
|
|
3884
|
+
@return: An instance of :py:class:`~mlrun.runtimes.nuclio.APIGateway` with all fields populated based on the
|
|
3885
|
+
information retrieved from the Nuclio API
|
|
3886
|
+
"""
|
|
3887
|
+
|
|
3888
|
+
api_gateway_json = mlrun.db.get_run_db().store_api_gateway(
|
|
3889
|
+
api_gateway=api_gateway,
|
|
3890
|
+
project=self.name,
|
|
3891
|
+
)
|
|
3892
|
+
|
|
3893
|
+
if api_gateway_json:
|
|
3894
|
+
# fill in all the fields in the user's api_gateway object
|
|
3895
|
+
api_gateway = mlrun.runtimes.nuclio.api_gateway.APIGateway.from_scheme(
|
|
3896
|
+
api_gateway_json
|
|
3897
|
+
)
|
|
3898
|
+
if wait_for_readiness:
|
|
3899
|
+
api_gateway.wait_for_readiness(max_wait_time=max_wait_time)
|
|
3900
|
+
|
|
3901
|
+
return api_gateway
|
|
3902
|
+
|
|
3903
|
+
def list_api_gateways(self) -> list[mlrun.runtimes.nuclio.api_gateway.APIGateway]:
|
|
3904
|
+
"""
|
|
3905
|
+
Retrieves a list of Nuclio API gateways associated with the project.
|
|
3906
|
+
|
|
3907
|
+
@return: List of :py:class:`~mlrun.runtimes.nuclio.api_gateway.APIGateway` objects representing
|
|
3908
|
+
the Nuclio API gateways associated with the project.
|
|
3909
|
+
"""
|
|
3910
|
+
gateways_list = mlrun.db.get_run_db().list_api_gateways(self.name)
|
|
3911
|
+
return [
|
|
3912
|
+
mlrun.runtimes.nuclio.api_gateway.APIGateway.from_scheme(gateway_dict)
|
|
3913
|
+
for gateway_dict in gateways_list.api_gateways.values()
|
|
3914
|
+
]
|
|
3915
|
+
|
|
3916
|
+
def get_api_gateway(
|
|
3917
|
+
self,
|
|
3918
|
+
name: str,
|
|
3919
|
+
) -> mlrun.runtimes.nuclio.api_gateway.APIGateway:
|
|
3920
|
+
"""
|
|
3921
|
+
Retrieves an API gateway by name instance.
|
|
3922
|
+
|
|
3923
|
+
:param name: The name of the API gateway to retrieve.
|
|
3924
|
+
|
|
3925
|
+
Returns:
|
|
3926
|
+
mlrun.runtimes.nuclio.APIGateway: An instance of APIGateway.
|
|
3927
|
+
"""
|
|
3928
|
+
|
|
3929
|
+
gateway = mlrun.db.get_run_db().get_api_gateway(name=name, project=self.name)
|
|
3930
|
+
return mlrun.runtimes.nuclio.api_gateway.APIGateway.from_scheme(gateway)
|
|
3931
|
+
|
|
3932
|
+
def delete_api_gateway(
|
|
3933
|
+
self,
|
|
3934
|
+
name: str,
|
|
3935
|
+
):
|
|
3936
|
+
"""
|
|
3937
|
+
Deletes an API gateway by name.
|
|
3938
|
+
|
|
3939
|
+
:param name: The name of the API gateway to delete.
|
|
3940
|
+
"""
|
|
3941
|
+
|
|
3942
|
+
mlrun.db.get_run_db().delete_api_gateway(name=name, project=self.name)
|
|
3943
|
+
|
|
3944
|
+
def store_alert_config(
|
|
3945
|
+
self, alert_data: AlertConfig, alert_name=None
|
|
3946
|
+
) -> AlertConfig:
|
|
3947
|
+
"""
|
|
3948
|
+
Create/modify an alert.
|
|
3949
|
+
:param alert_data: The data of the alert.
|
|
3950
|
+
:param alert_name: The name of the alert.
|
|
3951
|
+
:return: the created/modified alert.
|
|
3952
|
+
"""
|
|
3953
|
+
db = mlrun.db.get_run_db(secrets=self._secrets)
|
|
3954
|
+
if alert_name is None:
|
|
3955
|
+
alert_name = alert_data.name
|
|
3956
|
+
return db.store_alert_config(alert_name, alert_data, project=self.metadata.name)
|
|
3957
|
+
|
|
3958
|
+
def get_alert_config(self, alert_name: str) -> AlertConfig:
|
|
3959
|
+
"""
|
|
3960
|
+
Retrieve an alert.
|
|
3961
|
+
:param alert_name: The name of the alert to retrieve.
|
|
3962
|
+
:return: The alert object.
|
|
3963
|
+
"""
|
|
3964
|
+
db = mlrun.db.get_run_db(secrets=self._secrets)
|
|
3965
|
+
return db.get_alert_config(alert_name, self.metadata.name)
|
|
3966
|
+
|
|
3967
|
+
def list_alerts_configs(self) -> list[AlertConfig]:
|
|
3968
|
+
"""
|
|
3969
|
+
Retrieve list of alerts of a project.
|
|
3970
|
+
:return: All the alerts objects of the project.
|
|
3971
|
+
"""
|
|
3972
|
+
db = mlrun.db.get_run_db(secrets=self._secrets)
|
|
3973
|
+
return db.list_alerts_configs(self.metadata.name)
|
|
3974
|
+
|
|
3975
|
+
def delete_alert_config(
|
|
3976
|
+
self, alert_data: AlertConfig = None, alert_name: str = None
|
|
3977
|
+
):
|
|
3978
|
+
"""
|
|
3979
|
+
Delete an alert.
|
|
3980
|
+
:param alert_data: The data of the alert.
|
|
3981
|
+
:param alert_name: The name of the alert to delete.
|
|
3982
|
+
"""
|
|
3983
|
+
if alert_data is None and alert_name is None:
|
|
3984
|
+
raise ValueError(
|
|
3985
|
+
"At least one of alert_data or alert_name must be provided"
|
|
3986
|
+
)
|
|
3987
|
+
if alert_data and alert_name and alert_data.name != alert_name:
|
|
3988
|
+
raise ValueError("Alert_data name does not match the provided alert_name")
|
|
3989
|
+
db = mlrun.db.get_run_db(secrets=self._secrets)
|
|
3990
|
+
if alert_data:
|
|
3991
|
+
alert_name = alert_data.name
|
|
3992
|
+
db.delete_alert_config(alert_name, self.metadata.name)
|
|
3993
|
+
|
|
3994
|
+
def reset_alert_config(
|
|
3995
|
+
self, alert_data: AlertConfig = None, alert_name: str = None
|
|
3996
|
+
):
|
|
3997
|
+
"""
|
|
3998
|
+
Reset an alert.
|
|
3999
|
+
:param alert_data: The data of the alert.
|
|
4000
|
+
:param alert_name: The name of the alert to reset.
|
|
4001
|
+
"""
|
|
4002
|
+
if alert_data is None and alert_name is None:
|
|
4003
|
+
raise ValueError(
|
|
4004
|
+
"At least one of alert_data or alert_name must be provided"
|
|
4005
|
+
)
|
|
4006
|
+
if alert_data and alert_name and alert_data.name != alert_name:
|
|
4007
|
+
raise ValueError("Alert_data name does not match the provided alert_name")
|
|
4008
|
+
db = mlrun.db.get_run_db(secrets=self._secrets)
|
|
4009
|
+
if alert_data:
|
|
4010
|
+
alert_name = alert_data.name
|
|
4011
|
+
db.reset_alert_config(alert_name, self.metadata.name)
|
|
4012
|
+
|
|
4013
|
+
def get_alert_template(self, template_name: str) -> AlertTemplate:
|
|
4014
|
+
"""
|
|
4015
|
+
Retrieve a specific alert template.
|
|
4016
|
+
:param template_name: The name of the template to retrieve.
|
|
4017
|
+
:return: The template object.
|
|
4018
|
+
"""
|
|
4019
|
+
db = mlrun.db.get_run_db(secrets=self._secrets)
|
|
4020
|
+
return db.get_alert_template(template_name)
|
|
4021
|
+
|
|
4022
|
+
def list_alert_templates(self) -> list[AlertTemplate]:
|
|
4023
|
+
"""
|
|
4024
|
+
Retrieve list of all alert templates.
|
|
4025
|
+
:return: All the alert template objects in the database.
|
|
4026
|
+
"""
|
|
4027
|
+
db = mlrun.db.get_run_db(secrets=self._secrets)
|
|
4028
|
+
return db.list_alert_templates()
|
|
4029
|
+
|
|
3553
4030
|
def _run_authenticated_git_action(
|
|
3554
4031
|
self,
|
|
3555
4032
|
action: Callable,
|
|
3556
4033
|
remote: str,
|
|
3557
|
-
args: list =
|
|
3558
|
-
kwargs: dict =
|
|
4034
|
+
args: list = None,
|
|
4035
|
+
kwargs: dict = None,
|
|
3559
4036
|
secrets: Union[SecretsStore, dict] = None,
|
|
3560
4037
|
):
|
|
3561
4038
|
"""Run an arbitrary Git routine while the remote is enriched with secrets
|
|
@@ -3575,6 +4052,8 @@ class MlrunProject(ModelObj):
|
|
|
3575
4052
|
try:
|
|
3576
4053
|
if is_remote_enriched:
|
|
3577
4054
|
self.spec.repo.remotes[remote].set_url(enriched_remote, clean_remote)
|
|
4055
|
+
args = args or []
|
|
4056
|
+
kwargs = kwargs or {}
|
|
3578
4057
|
action(*args, **kwargs)
|
|
3579
4058
|
except RuntimeError as e:
|
|
3580
4059
|
raise mlrun.errors.MLRunRuntimeError(
|
|
@@ -3627,6 +4106,83 @@ class MlrunProject(ModelObj):
|
|
|
3627
4106
|
f"<project.spec.get_code_path()>/<{param_name}>)."
|
|
3628
4107
|
)
|
|
3629
4108
|
|
|
4109
|
+
def _resolve_artifact_producer(
|
|
4110
|
+
self,
|
|
4111
|
+
artifact: typing.Union[str, Artifact],
|
|
4112
|
+
project_producer_tag: str = None,
|
|
4113
|
+
) -> tuple[ArtifactProducer, bool]:
|
|
4114
|
+
"""
|
|
4115
|
+
Resolve the artifact producer of the given artifact.
|
|
4116
|
+
If the artifact's producer is a run, the artifact is registered with the original producer.
|
|
4117
|
+
Otherwise, the artifact is registered with the current project as the producer.
|
|
4118
|
+
|
|
4119
|
+
:param artifact: The artifact to resolve its producer.
|
|
4120
|
+
:param project_producer_tag: The tag to use for the project as the producer. If not provided, a tag will be
|
|
4121
|
+
generated for the project.
|
|
4122
|
+
:return: A tuple of the resolved producer and whether it is retained or not.
|
|
4123
|
+
"""
|
|
4124
|
+
|
|
4125
|
+
if not isinstance(artifact, str) and artifact.spec.producer:
|
|
4126
|
+
# if the artifact was imported from a yaml file, the producer can be a dict
|
|
4127
|
+
if isinstance(artifact.spec.producer, ArtifactProducer):
|
|
4128
|
+
producer_dict = artifact.spec.producer.get_meta()
|
|
4129
|
+
else:
|
|
4130
|
+
producer_dict = artifact.spec.producer
|
|
4131
|
+
|
|
4132
|
+
if producer_dict.get("kind", "") == "run":
|
|
4133
|
+
return ArtifactProducer(
|
|
4134
|
+
name=producer_dict.get("name", ""),
|
|
4135
|
+
kind=producer_dict.get("kind", ""),
|
|
4136
|
+
project=producer_dict.get("project", ""),
|
|
4137
|
+
tag=producer_dict.get("tag", ""),
|
|
4138
|
+
), True
|
|
4139
|
+
|
|
4140
|
+
# do not retain the artifact's producer, replace it with the project as the producer
|
|
4141
|
+
project_producer_tag = project_producer_tag or self._get_project_tag()
|
|
4142
|
+
return ArtifactProducer(
|
|
4143
|
+
kind="project",
|
|
4144
|
+
name=self.metadata.name,
|
|
4145
|
+
project=self.metadata.name,
|
|
4146
|
+
tag=project_producer_tag,
|
|
4147
|
+
), False
|
|
4148
|
+
|
|
4149
|
+
def _resolve_existing_artifact(
|
|
4150
|
+
self,
|
|
4151
|
+
item: typing.Union[str, Artifact],
|
|
4152
|
+
tag: str = None,
|
|
4153
|
+
) -> typing.Optional[Artifact]:
|
|
4154
|
+
"""
|
|
4155
|
+
Check if there is and existing artifact with the given item and tag.
|
|
4156
|
+
If there is, return the existing artifact. Otherwise, return None.
|
|
4157
|
+
|
|
4158
|
+
:param item: The item (or key) to check if there is an existing artifact for.
|
|
4159
|
+
:param tag: The tag to check if there is an existing artifact for.
|
|
4160
|
+
:return: The existing artifact if there is one, otherwise None.
|
|
4161
|
+
"""
|
|
4162
|
+
try:
|
|
4163
|
+
if isinstance(item, str):
|
|
4164
|
+
existing_artifact = self.get_artifact(key=item, tag=tag)
|
|
4165
|
+
else:
|
|
4166
|
+
existing_artifact = self.get_artifact(
|
|
4167
|
+
key=item.key,
|
|
4168
|
+
tag=item.tag,
|
|
4169
|
+
iter=item.iter,
|
|
4170
|
+
tree=item.tree,
|
|
4171
|
+
)
|
|
4172
|
+
if existing_artifact is not None:
|
|
4173
|
+
return existing_artifact.from_dict(existing_artifact)
|
|
4174
|
+
except mlrun.errors.MLRunNotFoundError:
|
|
4175
|
+
logger.debug(
|
|
4176
|
+
"No existing artifact was found",
|
|
4177
|
+
key=item if isinstance(item, str) else item.key,
|
|
4178
|
+
tag=tag if isinstance(item, str) else item.tag,
|
|
4179
|
+
tree=None if isinstance(item, str) else item.tree,
|
|
4180
|
+
)
|
|
4181
|
+
return None
|
|
4182
|
+
|
|
4183
|
+
def _get_project_tag(self):
|
|
4184
|
+
return self._get_hexsha() or str(uuid.uuid4())
|
|
4185
|
+
|
|
3630
4186
|
|
|
3631
4187
|
def _set_as_current_default_project(project: MlrunProject):
|
|
3632
4188
|
mlrun.mlconf.default_project = project.metadata.name
|
|
@@ -3637,7 +4193,7 @@ def _init_function_from_dict(
|
|
|
3637
4193
|
f: dict,
|
|
3638
4194
|
project: MlrunProject,
|
|
3639
4195
|
name: typing.Optional[str] = None,
|
|
3640
|
-
) ->
|
|
4196
|
+
) -> tuple[str, mlrun.runtimes.BaseRuntime]:
|
|
3641
4197
|
name = name or f.get("name", "")
|
|
3642
4198
|
url = f.get("url", "")
|
|
3643
4199
|
kind = f.get("kind", "")
|
|
@@ -3649,10 +4205,6 @@ def _init_function_from_dict(
|
|
|
3649
4205
|
tag = f.get("tag", None)
|
|
3650
4206
|
|
|
3651
4207
|
has_module = _has_module(handler, kind)
|
|
3652
|
-
if not url and "spec" not in f and not has_module:
|
|
3653
|
-
# function must point to a file or a module or have a spec
|
|
3654
|
-
raise ValueError("Function missing a url or a spec or a module")
|
|
3655
|
-
|
|
3656
4208
|
relative_url = url
|
|
3657
4209
|
url, in_context = project.get_item_absolute_path(url)
|
|
3658
4210
|
|
|
@@ -3712,6 +4264,18 @@ def _init_function_from_dict(
|
|
|
3712
4264
|
tag=tag,
|
|
3713
4265
|
)
|
|
3714
4266
|
|
|
4267
|
+
elif kind in mlrun.runtimes.RuntimeKinds.nuclio_runtimes():
|
|
4268
|
+
func = new_function(
|
|
4269
|
+
name,
|
|
4270
|
+
command=relative_url,
|
|
4271
|
+
image=image,
|
|
4272
|
+
kind=kind,
|
|
4273
|
+
handler=handler,
|
|
4274
|
+
tag=tag,
|
|
4275
|
+
)
|
|
4276
|
+
if image and kind != mlrun.runtimes.RuntimeKinds.application:
|
|
4277
|
+
logger.info("Function code not specified, setting entry point to image")
|
|
4278
|
+
func.from_image(image)
|
|
3715
4279
|
else:
|
|
3716
4280
|
raise ValueError(f"Unsupported function url:handler {url}:{handler} or no spec")
|
|
3717
4281
|
|
|
@@ -3732,7 +4296,7 @@ def _init_function_from_obj(
|
|
|
3732
4296
|
func: mlrun.runtimes.BaseRuntime,
|
|
3733
4297
|
project: MlrunProject,
|
|
3734
4298
|
name: typing.Optional[str] = None,
|
|
3735
|
-
) ->
|
|
4299
|
+
) -> tuple[str, mlrun.runtimes.BaseRuntime]:
|
|
3736
4300
|
build = func.spec.build
|
|
3737
4301
|
if project.spec.origin_url:
|
|
3738
4302
|
origin = project.spec.origin_url
|