mlrun 1.6.4rc7__py3-none-any.whl → 1.7.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mlrun might be problematic. Click here for more details.
- mlrun/__init__.py +11 -1
- mlrun/__main__.py +40 -122
- mlrun/alerts/__init__.py +15 -0
- mlrun/alerts/alert.py +248 -0
- mlrun/api/schemas/__init__.py +5 -4
- mlrun/artifacts/__init__.py +8 -3
- mlrun/artifacts/base.py +47 -257
- mlrun/artifacts/dataset.py +11 -192
- mlrun/artifacts/manager.py +79 -47
- mlrun/artifacts/model.py +31 -159
- mlrun/artifacts/plots.py +23 -380
- mlrun/common/constants.py +74 -1
- mlrun/common/db/sql_session.py +5 -5
- mlrun/common/formatters/__init__.py +21 -0
- mlrun/common/formatters/artifact.py +45 -0
- mlrun/common/formatters/base.py +113 -0
- mlrun/common/formatters/feature_set.py +33 -0
- mlrun/common/formatters/function.py +46 -0
- mlrun/common/formatters/pipeline.py +53 -0
- mlrun/common/formatters/project.py +51 -0
- mlrun/common/formatters/run.py +29 -0
- mlrun/common/helpers.py +12 -3
- mlrun/common/model_monitoring/helpers.py +9 -5
- mlrun/{runtimes → common/runtimes}/constants.py +37 -9
- mlrun/common/schemas/__init__.py +31 -5
- mlrun/common/schemas/alert.py +202 -0
- mlrun/common/schemas/api_gateway.py +196 -0
- mlrun/common/schemas/artifact.py +25 -4
- mlrun/common/schemas/auth.py +16 -5
- mlrun/common/schemas/background_task.py +1 -1
- mlrun/common/schemas/client_spec.py +4 -2
- mlrun/common/schemas/common.py +7 -4
- mlrun/common/schemas/constants.py +3 -0
- mlrun/common/schemas/feature_store.py +74 -44
- mlrun/common/schemas/frontend_spec.py +15 -7
- mlrun/common/schemas/function.py +12 -1
- mlrun/common/schemas/hub.py +11 -18
- mlrun/common/schemas/memory_reports.py +2 -2
- mlrun/common/schemas/model_monitoring/__init__.py +20 -4
- mlrun/common/schemas/model_monitoring/constants.py +123 -42
- mlrun/common/schemas/model_monitoring/grafana.py +13 -9
- mlrun/common/schemas/model_monitoring/model_endpoints.py +101 -54
- mlrun/common/schemas/notification.py +71 -14
- mlrun/common/schemas/object.py +2 -2
- mlrun/{model_monitoring/controller_handler.py → common/schemas/pagination.py} +9 -12
- mlrun/common/schemas/pipeline.py +8 -1
- mlrun/common/schemas/project.py +69 -18
- mlrun/common/schemas/runs.py +7 -1
- mlrun/common/schemas/runtime_resource.py +8 -12
- mlrun/common/schemas/schedule.py +4 -4
- mlrun/common/schemas/tag.py +1 -2
- mlrun/common/schemas/workflow.py +12 -4
- mlrun/common/types.py +14 -1
- mlrun/config.py +154 -69
- mlrun/data_types/data_types.py +6 -1
- mlrun/data_types/spark.py +2 -2
- mlrun/data_types/to_pandas.py +67 -37
- mlrun/datastore/__init__.py +6 -8
- mlrun/datastore/alibaba_oss.py +131 -0
- mlrun/datastore/azure_blob.py +143 -42
- mlrun/datastore/base.py +102 -58
- mlrun/datastore/datastore.py +34 -13
- mlrun/datastore/datastore_profile.py +146 -20
- mlrun/datastore/dbfs_store.py +3 -7
- mlrun/datastore/filestore.py +1 -4
- mlrun/datastore/google_cloud_storage.py +97 -33
- mlrun/datastore/hdfs.py +56 -0
- mlrun/datastore/inmem.py +6 -3
- mlrun/datastore/redis.py +7 -2
- mlrun/datastore/s3.py +34 -12
- mlrun/datastore/snowflake_utils.py +45 -0
- mlrun/datastore/sources.py +303 -111
- mlrun/datastore/spark_utils.py +31 -2
- mlrun/datastore/store_resources.py +9 -7
- mlrun/datastore/storeytargets.py +151 -0
- mlrun/datastore/targets.py +453 -176
- mlrun/datastore/utils.py +72 -58
- mlrun/datastore/v3io.py +6 -1
- mlrun/db/base.py +274 -41
- mlrun/db/factory.py +1 -1
- mlrun/db/httpdb.py +893 -225
- mlrun/db/nopdb.py +291 -33
- mlrun/errors.py +36 -6
- mlrun/execution.py +115 -42
- mlrun/feature_store/__init__.py +0 -2
- mlrun/feature_store/api.py +65 -73
- mlrun/feature_store/common.py +7 -12
- mlrun/feature_store/feature_set.py +76 -55
- mlrun/feature_store/feature_vector.py +39 -31
- mlrun/feature_store/ingestion.py +7 -6
- mlrun/feature_store/retrieval/base.py +16 -11
- mlrun/feature_store/retrieval/dask_merger.py +2 -0
- mlrun/feature_store/retrieval/job.py +13 -4
- mlrun/feature_store/retrieval/local_merger.py +2 -0
- mlrun/feature_store/retrieval/spark_merger.py +24 -32
- mlrun/feature_store/steps.py +45 -34
- mlrun/features.py +11 -21
- mlrun/frameworks/_common/artifacts_library.py +9 -9
- mlrun/frameworks/_common/mlrun_interface.py +5 -5
- mlrun/frameworks/_common/model_handler.py +48 -48
- mlrun/frameworks/_common/plan.py +5 -6
- mlrun/frameworks/_common/producer.py +3 -4
- mlrun/frameworks/_common/utils.py +5 -5
- mlrun/frameworks/_dl_common/loggers/logger.py +6 -7
- mlrun/frameworks/_dl_common/loggers/mlrun_logger.py +9 -9
- mlrun/frameworks/_dl_common/loggers/tensorboard_logger.py +23 -47
- mlrun/frameworks/_ml_common/artifacts_library.py +1 -2
- mlrun/frameworks/_ml_common/loggers/logger.py +3 -4
- mlrun/frameworks/_ml_common/loggers/mlrun_logger.py +4 -5
- mlrun/frameworks/_ml_common/model_handler.py +24 -24
- mlrun/frameworks/_ml_common/pkl_model_server.py +2 -2
- mlrun/frameworks/_ml_common/plan.py +2 -2
- mlrun/frameworks/_ml_common/plans/calibration_curve_plan.py +2 -3
- mlrun/frameworks/_ml_common/plans/confusion_matrix_plan.py +2 -3
- mlrun/frameworks/_ml_common/plans/dataset_plan.py +3 -3
- mlrun/frameworks/_ml_common/plans/feature_importance_plan.py +3 -3
- mlrun/frameworks/_ml_common/plans/roc_curve_plan.py +4 -4
- mlrun/frameworks/_ml_common/utils.py +4 -4
- mlrun/frameworks/auto_mlrun/auto_mlrun.py +9 -9
- mlrun/frameworks/huggingface/model_server.py +4 -4
- mlrun/frameworks/lgbm/__init__.py +33 -33
- mlrun/frameworks/lgbm/callbacks/callback.py +2 -4
- mlrun/frameworks/lgbm/callbacks/logging_callback.py +4 -5
- mlrun/frameworks/lgbm/callbacks/mlrun_logging_callback.py +4 -5
- mlrun/frameworks/lgbm/mlrun_interfaces/booster_mlrun_interface.py +1 -3
- mlrun/frameworks/lgbm/mlrun_interfaces/mlrun_interface.py +6 -6
- mlrun/frameworks/lgbm/model_handler.py +10 -10
- mlrun/frameworks/lgbm/model_server.py +6 -6
- mlrun/frameworks/lgbm/utils.py +5 -5
- mlrun/frameworks/onnx/dataset.py +8 -8
- mlrun/frameworks/onnx/mlrun_interface.py +3 -3
- mlrun/frameworks/onnx/model_handler.py +6 -6
- mlrun/frameworks/onnx/model_server.py +7 -7
- mlrun/frameworks/parallel_coordinates.py +6 -6
- mlrun/frameworks/pytorch/__init__.py +18 -18
- mlrun/frameworks/pytorch/callbacks/callback.py +4 -5
- mlrun/frameworks/pytorch/callbacks/logging_callback.py +17 -17
- mlrun/frameworks/pytorch/callbacks/mlrun_logging_callback.py +11 -11
- mlrun/frameworks/pytorch/callbacks/tensorboard_logging_callback.py +23 -29
- mlrun/frameworks/pytorch/callbacks_handler.py +38 -38
- mlrun/frameworks/pytorch/mlrun_interface.py +20 -20
- mlrun/frameworks/pytorch/model_handler.py +17 -17
- mlrun/frameworks/pytorch/model_server.py +7 -7
- mlrun/frameworks/sklearn/__init__.py +13 -13
- mlrun/frameworks/sklearn/estimator.py +4 -4
- mlrun/frameworks/sklearn/metrics_library.py +14 -14
- mlrun/frameworks/sklearn/mlrun_interface.py +16 -9
- mlrun/frameworks/sklearn/model_handler.py +2 -2
- mlrun/frameworks/tf_keras/__init__.py +10 -7
- mlrun/frameworks/tf_keras/callbacks/logging_callback.py +15 -15
- mlrun/frameworks/tf_keras/callbacks/mlrun_logging_callback.py +11 -11
- mlrun/frameworks/tf_keras/callbacks/tensorboard_logging_callback.py +19 -23
- mlrun/frameworks/tf_keras/mlrun_interface.py +9 -11
- mlrun/frameworks/tf_keras/model_handler.py +14 -14
- mlrun/frameworks/tf_keras/model_server.py +6 -6
- mlrun/frameworks/xgboost/__init__.py +13 -13
- mlrun/frameworks/xgboost/model_handler.py +6 -6
- mlrun/k8s_utils.py +61 -17
- mlrun/launcher/__init__.py +1 -1
- mlrun/launcher/base.py +16 -15
- mlrun/launcher/client.py +13 -11
- mlrun/launcher/factory.py +1 -1
- mlrun/launcher/local.py +23 -13
- mlrun/launcher/remote.py +17 -10
- mlrun/lists.py +7 -6
- mlrun/model.py +478 -103
- mlrun/model_monitoring/__init__.py +1 -1
- mlrun/model_monitoring/api.py +163 -371
- mlrun/{runtimes/mpijob/v1alpha1.py → model_monitoring/applications/__init__.py} +9 -15
- mlrun/model_monitoring/applications/_application_steps.py +188 -0
- mlrun/model_monitoring/applications/base.py +108 -0
- mlrun/model_monitoring/applications/context.py +341 -0
- mlrun/model_monitoring/{evidently_application.py → applications/evidently_base.py} +27 -22
- mlrun/model_monitoring/applications/histogram_data_drift.py +354 -0
- mlrun/model_monitoring/applications/results.py +99 -0
- mlrun/model_monitoring/controller.py +131 -278
- mlrun/model_monitoring/db/__init__.py +18 -0
- mlrun/model_monitoring/db/stores/__init__.py +136 -0
- mlrun/model_monitoring/db/stores/base/__init__.py +15 -0
- mlrun/model_monitoring/db/stores/base/store.py +213 -0
- mlrun/model_monitoring/db/stores/sqldb/__init__.py +13 -0
- mlrun/model_monitoring/db/stores/sqldb/models/__init__.py +71 -0
- mlrun/model_monitoring/db/stores/sqldb/models/base.py +190 -0
- mlrun/model_monitoring/db/stores/sqldb/models/mysql.py +103 -0
- mlrun/model_monitoring/{stores/models/mysql.py → db/stores/sqldb/models/sqlite.py} +19 -13
- mlrun/model_monitoring/db/stores/sqldb/sql_store.py +659 -0
- mlrun/model_monitoring/db/stores/v3io_kv/__init__.py +13 -0
- mlrun/model_monitoring/db/stores/v3io_kv/kv_store.py +726 -0
- mlrun/model_monitoring/db/tsdb/__init__.py +105 -0
- mlrun/model_monitoring/db/tsdb/base.py +448 -0
- mlrun/model_monitoring/db/tsdb/helpers.py +30 -0
- mlrun/model_monitoring/db/tsdb/tdengine/__init__.py +15 -0
- mlrun/model_monitoring/db/tsdb/tdengine/schemas.py +279 -0
- mlrun/model_monitoring/db/tsdb/tdengine/stream_graph_steps.py +42 -0
- mlrun/model_monitoring/db/tsdb/tdengine/tdengine_connector.py +507 -0
- mlrun/model_monitoring/db/tsdb/v3io/__init__.py +15 -0
- mlrun/model_monitoring/db/tsdb/v3io/stream_graph_steps.py +158 -0
- mlrun/model_monitoring/db/tsdb/v3io/v3io_connector.py +849 -0
- mlrun/model_monitoring/features_drift_table.py +134 -106
- mlrun/model_monitoring/helpers.py +199 -55
- mlrun/model_monitoring/metrics/__init__.py +13 -0
- mlrun/model_monitoring/metrics/histogram_distance.py +127 -0
- mlrun/model_monitoring/model_endpoint.py +3 -2
- mlrun/model_monitoring/stream_processing.py +131 -398
- mlrun/model_monitoring/tracking_policy.py +9 -2
- mlrun/model_monitoring/writer.py +161 -125
- mlrun/package/__init__.py +6 -6
- mlrun/package/context_handler.py +5 -5
- mlrun/package/packager.py +7 -7
- mlrun/package/packagers/default_packager.py +8 -8
- mlrun/package/packagers/numpy_packagers.py +15 -15
- mlrun/package/packagers/pandas_packagers.py +5 -5
- mlrun/package/packagers/python_standard_library_packagers.py +10 -10
- mlrun/package/packagers_manager.py +19 -23
- mlrun/package/utils/_formatter.py +6 -6
- mlrun/package/utils/_pickler.py +2 -2
- mlrun/package/utils/_supported_format.py +4 -4
- mlrun/package/utils/log_hint_utils.py +2 -2
- mlrun/package/utils/type_hint_utils.py +4 -9
- mlrun/platforms/__init__.py +11 -10
- mlrun/platforms/iguazio.py +24 -203
- mlrun/projects/operations.py +52 -25
- mlrun/projects/pipelines.py +191 -197
- mlrun/projects/project.py +1227 -400
- mlrun/render.py +16 -19
- mlrun/run.py +209 -184
- mlrun/runtimes/__init__.py +83 -15
- mlrun/runtimes/base.py +51 -35
- mlrun/runtimes/daskjob.py +17 -10
- mlrun/runtimes/databricks_job/databricks_cancel_task.py +1 -1
- mlrun/runtimes/databricks_job/databricks_runtime.py +8 -7
- mlrun/runtimes/databricks_job/databricks_wrapper.py +1 -1
- mlrun/runtimes/funcdoc.py +1 -29
- mlrun/runtimes/function_reference.py +1 -1
- mlrun/runtimes/kubejob.py +34 -128
- mlrun/runtimes/local.py +40 -11
- mlrun/runtimes/mpijob/__init__.py +0 -20
- mlrun/runtimes/mpijob/abstract.py +9 -10
- mlrun/runtimes/mpijob/v1.py +1 -1
- mlrun/{model_monitoring/stores/models/sqlite.py → runtimes/nuclio/__init__.py} +7 -9
- mlrun/runtimes/nuclio/api_gateway.py +769 -0
- mlrun/runtimes/nuclio/application/__init__.py +15 -0
- mlrun/runtimes/nuclio/application/application.py +758 -0
- mlrun/runtimes/nuclio/application/reverse_proxy.go +95 -0
- mlrun/runtimes/{function.py → nuclio/function.py} +200 -83
- mlrun/runtimes/{nuclio.py → nuclio/nuclio.py} +6 -6
- mlrun/runtimes/{serving.py → nuclio/serving.py} +65 -68
- mlrun/runtimes/pod.py +281 -101
- mlrun/runtimes/remotesparkjob.py +12 -9
- mlrun/runtimes/sparkjob/spark3job.py +67 -51
- mlrun/runtimes/utils.py +41 -75
- mlrun/secrets.py +9 -5
- mlrun/serving/__init__.py +8 -1
- mlrun/serving/remote.py +2 -7
- mlrun/serving/routers.py +85 -69
- mlrun/serving/server.py +69 -44
- mlrun/serving/states.py +209 -36
- mlrun/serving/utils.py +22 -14
- mlrun/serving/v1_serving.py +6 -7
- mlrun/serving/v2_serving.py +129 -54
- mlrun/track/tracker.py +2 -1
- mlrun/track/tracker_manager.py +3 -3
- mlrun/track/trackers/mlflow_tracker.py +6 -2
- mlrun/utils/async_http.py +6 -8
- mlrun/utils/azure_vault.py +1 -1
- mlrun/utils/clones.py +1 -2
- mlrun/utils/condition_evaluator.py +3 -3
- mlrun/utils/db.py +21 -3
- mlrun/utils/helpers.py +405 -225
- mlrun/utils/http.py +3 -6
- mlrun/utils/logger.py +112 -16
- mlrun/utils/notifications/notification/__init__.py +17 -13
- mlrun/utils/notifications/notification/base.py +50 -2
- mlrun/utils/notifications/notification/console.py +2 -0
- mlrun/utils/notifications/notification/git.py +24 -1
- mlrun/utils/notifications/notification/ipython.py +3 -1
- mlrun/utils/notifications/notification/slack.py +96 -21
- mlrun/utils/notifications/notification/webhook.py +59 -2
- mlrun/utils/notifications/notification_pusher.py +149 -30
- mlrun/utils/regex.py +9 -0
- mlrun/utils/retryer.py +208 -0
- mlrun/utils/singleton.py +1 -1
- mlrun/utils/v3io_clients.py +4 -6
- mlrun/utils/version/version.json +2 -2
- mlrun/utils/version/version.py +2 -6
- mlrun-1.7.0.dist-info/METADATA +378 -0
- mlrun-1.7.0.dist-info/RECORD +351 -0
- {mlrun-1.6.4rc7.dist-info → mlrun-1.7.0.dist-info}/WHEEL +1 -1
- mlrun/feature_store/retrieval/conversion.py +0 -273
- mlrun/kfpops.py +0 -868
- mlrun/model_monitoring/application.py +0 -310
- mlrun/model_monitoring/batch.py +0 -1095
- mlrun/model_monitoring/prometheus.py +0 -219
- mlrun/model_monitoring/stores/__init__.py +0 -111
- mlrun/model_monitoring/stores/kv_model_endpoint_store.py +0 -576
- mlrun/model_monitoring/stores/model_endpoint_store.py +0 -147
- mlrun/model_monitoring/stores/models/__init__.py +0 -27
- mlrun/model_monitoring/stores/models/base.py +0 -84
- mlrun/model_monitoring/stores/sql_model_endpoint_store.py +0 -384
- mlrun/platforms/other.py +0 -306
- mlrun-1.6.4rc7.dist-info/METADATA +0 -272
- mlrun-1.6.4rc7.dist-info/RECORD +0 -314
- {mlrun-1.6.4rc7.dist-info → mlrun-1.7.0.dist-info}/LICENSE +0 -0
- {mlrun-1.6.4rc7.dist-info → mlrun-1.7.0.dist-info}/entry_points.txt +0 -0
- {mlrun-1.6.4rc7.dist-info → mlrun-1.7.0.dist-info}/top_level.txt +0 -0
|
@@ -15,7 +15,7 @@
|
|
|
15
15
|
import os
|
|
16
16
|
import shutil
|
|
17
17
|
import zipfile
|
|
18
|
-
from typing import
|
|
18
|
+
from typing import Union
|
|
19
19
|
|
|
20
20
|
import numpy as np
|
|
21
21
|
import tensorflow as tf
|
|
@@ -67,8 +67,8 @@ class TFKerasModelHandler(DLModelHandler):
|
|
|
67
67
|
model_name: str = None,
|
|
68
68
|
model_format: str = ModelFormats.SAVED_MODEL,
|
|
69
69
|
context: mlrun.MLClientCtx = None,
|
|
70
|
-
modules_map: Union[
|
|
71
|
-
custom_objects_map: Union[
|
|
70
|
+
modules_map: Union[dict[str, Union[None, str, list[str]]], str] = None,
|
|
71
|
+
custom_objects_map: Union[dict[str, Union[str, list[str]]], str] = None,
|
|
72
72
|
custom_objects_directory: str = None,
|
|
73
73
|
save_traces: bool = False,
|
|
74
74
|
**kwargs,
|
|
@@ -174,7 +174,7 @@ class TFKerasModelHandler(DLModelHandler):
|
|
|
174
174
|
self._weights_file = None # type: str
|
|
175
175
|
|
|
176
176
|
# Setup the base handler class:
|
|
177
|
-
super(
|
|
177
|
+
super().__init__(
|
|
178
178
|
model=model,
|
|
179
179
|
model_path=model_path,
|
|
180
180
|
model_name=model_name,
|
|
@@ -190,8 +190,8 @@ class TFKerasModelHandler(DLModelHandler):
|
|
|
190
190
|
|
|
191
191
|
def set_labels(
|
|
192
192
|
self,
|
|
193
|
-
to_add:
|
|
194
|
-
to_remove:
|
|
193
|
+
to_add: dict[str, Union[str, int, float]] = None,
|
|
194
|
+
to_remove: list[str] = None,
|
|
195
195
|
):
|
|
196
196
|
"""
|
|
197
197
|
Update the labels dictionary of this model artifact. There are required labels that cannot be edited or removed.
|
|
@@ -200,7 +200,7 @@ class TFKerasModelHandler(DLModelHandler):
|
|
|
200
200
|
:param to_remove: A list of labels keys to remove.
|
|
201
201
|
"""
|
|
202
202
|
# Update the user's labels:
|
|
203
|
-
super(
|
|
203
|
+
super().set_labels(to_add=to_add, to_remove=to_remove)
|
|
204
204
|
|
|
205
205
|
# Set the required labels:
|
|
206
206
|
self._labels[self._LabelKeys.MODEL_FORMAT] = self._model_format
|
|
@@ -211,7 +211,7 @@ class TFKerasModelHandler(DLModelHandler):
|
|
|
211
211
|
@without_mlrun_interface(interface=TFKerasMLRunInterface)
|
|
212
212
|
def save(
|
|
213
213
|
self, output_path: str = None, **kwargs
|
|
214
|
-
) -> Union[
|
|
214
|
+
) -> Union[dict[str, Artifact], None]:
|
|
215
215
|
"""
|
|
216
216
|
Save the handled model at the given output path. If a MLRun context is available, the saved model files will be
|
|
217
217
|
logged and returned as artifacts.
|
|
@@ -221,7 +221,7 @@ class TFKerasModelHandler(DLModelHandler):
|
|
|
221
221
|
|
|
222
222
|
:return The saved model additional artifacts (if needed) dictionary if context is available and None otherwise.
|
|
223
223
|
"""
|
|
224
|
-
super(
|
|
224
|
+
super().save(output_path=output_path)
|
|
225
225
|
|
|
226
226
|
# Setup the returning model artifacts list:
|
|
227
227
|
artifacts = {} # type: Dict[str, Artifact]
|
|
@@ -291,7 +291,7 @@ class TFKerasModelHandler(DLModelHandler):
|
|
|
291
291
|
"Loading a model using checkpoint is not yet implemented."
|
|
292
292
|
)
|
|
293
293
|
|
|
294
|
-
super(
|
|
294
|
+
super().load()
|
|
295
295
|
|
|
296
296
|
# ModelFormats.H5 - Load from a h5 file:
|
|
297
297
|
if self._model_format == TFKerasModelHandler.ModelFormats.H5:
|
|
@@ -308,7 +308,7 @@ class TFKerasModelHandler(DLModelHandler):
|
|
|
308
308
|
# ModelFormats.JSON_ARCHITECTURE_H5_WEIGHTS - Load from a json architecture file and a h5 weights file:
|
|
309
309
|
else:
|
|
310
310
|
# Load the model architecture (json):
|
|
311
|
-
with open(self._model_file
|
|
311
|
+
with open(self._model_file) as json_file:
|
|
312
312
|
model_architecture = json_file.read()
|
|
313
313
|
self._model = keras.models.model_from_json(
|
|
314
314
|
model_architecture, custom_objects=self._custom_objects
|
|
@@ -321,7 +321,7 @@ class TFKerasModelHandler(DLModelHandler):
|
|
|
321
321
|
model_name: str = None,
|
|
322
322
|
optimize: bool = True,
|
|
323
323
|
input_signature: Union[
|
|
324
|
-
|
|
324
|
+
list[tf.TensorSpec], list[np.ndarray], tf.TensorSpec, np.ndarray
|
|
325
325
|
] = None,
|
|
326
326
|
output_path: str = None,
|
|
327
327
|
log: bool = None,
|
|
@@ -487,7 +487,7 @@ class TFKerasModelHandler(DLModelHandler):
|
|
|
487
487
|
].local()
|
|
488
488
|
|
|
489
489
|
# Continue collecting from abstract class:
|
|
490
|
-
super(
|
|
490
|
+
super()._collect_files_from_store_object()
|
|
491
491
|
|
|
492
492
|
def _collect_files_from_local_path(self):
|
|
493
493
|
"""
|
|
@@ -554,7 +554,7 @@ class TFKerasModelHandler(DLModelHandler):
|
|
|
554
554
|
"""
|
|
555
555
|
# Supported types:
|
|
556
556
|
if isinstance(sample, np.ndarray):
|
|
557
|
-
return super(
|
|
557
|
+
return super()._read_sample(sample=sample)
|
|
558
558
|
elif isinstance(sample, tf.TensorSpec):
|
|
559
559
|
return Feature(
|
|
560
560
|
name=sample.name,
|
|
@@ -12,7 +12,7 @@
|
|
|
12
12
|
# See the License for the specific language governing permissions and
|
|
13
13
|
# limitations under the License.
|
|
14
14
|
#
|
|
15
|
-
from typing import Any,
|
|
15
|
+
from typing import Any, Union
|
|
16
16
|
|
|
17
17
|
import numpy as np
|
|
18
18
|
from tensorflow import keras
|
|
@@ -36,8 +36,8 @@ class TFKerasModelServer(V2ModelServer):
|
|
|
36
36
|
model: keras.Model = None,
|
|
37
37
|
model_path: str = None,
|
|
38
38
|
model_name: str = None,
|
|
39
|
-
modules_map: Union[
|
|
40
|
-
custom_objects_map: Union[
|
|
39
|
+
modules_map: Union[dict[str, Union[None, str, list[str]]], str] = None,
|
|
40
|
+
custom_objects_map: Union[dict[str, Union[str, list[str]]], str] = None,
|
|
41
41
|
custom_objects_directory: str = None,
|
|
42
42
|
model_format: str = TFKerasModelHandler.ModelFormats.SAVED_MODEL,
|
|
43
43
|
to_list: bool = False,
|
|
@@ -103,7 +103,7 @@ class TFKerasModelServer(V2ModelServer):
|
|
|
103
103
|
:param protocol: -
|
|
104
104
|
:param class_args: -
|
|
105
105
|
"""
|
|
106
|
-
super(
|
|
106
|
+
super().__init__(
|
|
107
107
|
context=context,
|
|
108
108
|
name=name,
|
|
109
109
|
model_path=model_path,
|
|
@@ -146,7 +146,7 @@ class TFKerasModelServer(V2ModelServer):
|
|
|
146
146
|
self._model_handler.load()
|
|
147
147
|
self.model = self._model_handler.model
|
|
148
148
|
|
|
149
|
-
def predict(self, request:
|
|
149
|
+
def predict(self, request: dict[str, Any]) -> Union[np.ndarray, list]:
|
|
150
150
|
"""
|
|
151
151
|
Infer the inputs through the model using 'keras.Model.predict' and return its output. The inferred data will be
|
|
152
152
|
read from the "inputs" key of the request.
|
|
@@ -165,7 +165,7 @@ class TFKerasModelServer(V2ModelServer):
|
|
|
165
165
|
# Return as list if required:
|
|
166
166
|
return prediction if not self.to_list else prediction.tolist()
|
|
167
167
|
|
|
168
|
-
def explain(self, request:
|
|
168
|
+
def explain(self, request: dict[str, Any]) -> str:
|
|
169
169
|
"""
|
|
170
170
|
Return a string explaining what model is being serve in this serving function and the function name.
|
|
171
171
|
|
|
@@ -13,7 +13,7 @@
|
|
|
13
13
|
# limitations under the License.
|
|
14
14
|
#
|
|
15
15
|
# flake8: noqa - this is until we take care of the F401 violations with respect to __all__ & sphinx
|
|
16
|
-
from typing import
|
|
16
|
+
from typing import Union
|
|
17
17
|
|
|
18
18
|
import xgboost as xgb
|
|
19
19
|
|
|
@@ -35,25 +35,25 @@ def apply_mlrun(
|
|
|
35
35
|
model_name: str = "model",
|
|
36
36
|
tag: str = "",
|
|
37
37
|
model_path: str = None,
|
|
38
|
-
modules_map: Union[
|
|
39
|
-
custom_objects_map: Union[
|
|
38
|
+
modules_map: Union[dict[str, Union[None, str, list[str]]], str] = None,
|
|
39
|
+
custom_objects_map: Union[dict[str, Union[str, list[str]]], str] = None,
|
|
40
40
|
custom_objects_directory: str = None,
|
|
41
41
|
context: mlrun.MLClientCtx = None,
|
|
42
|
-
artifacts: Union[
|
|
42
|
+
artifacts: Union[list[MLPlan], list[str], dict[str, dict]] = None,
|
|
43
43
|
metrics: Union[
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
44
|
+
list[Metric],
|
|
45
|
+
list[XGBoostTypes.MetricEntryType],
|
|
46
|
+
dict[str, XGBoostTypes.MetricEntryType],
|
|
47
47
|
] = None,
|
|
48
48
|
x_test: XGBoostTypes.DatasetType = None,
|
|
49
49
|
y_test: XGBoostTypes.DatasetType = None,
|
|
50
50
|
sample_set: Union[XGBoostTypes.DatasetType, mlrun.DataItem, str] = None,
|
|
51
|
-
y_columns: Union[
|
|
51
|
+
y_columns: Union[list[str], list[int]] = None,
|
|
52
52
|
feature_vector: str = None,
|
|
53
|
-
feature_weights:
|
|
54
|
-
labels:
|
|
55
|
-
parameters:
|
|
56
|
-
extra_data:
|
|
53
|
+
feature_weights: list[float] = None,
|
|
54
|
+
labels: dict[str, Union[str, int, float]] = None,
|
|
55
|
+
parameters: dict[str, Union[str, int, float]] = None,
|
|
56
|
+
extra_data: dict[str, XGBoostTypes.ExtraDataType] = None,
|
|
57
57
|
auto_log: bool = True,
|
|
58
58
|
**kwargs,
|
|
59
59
|
) -> XGBoostModelHandler:
|
|
@@ -90,7 +90,7 @@ def apply_mlrun(
|
|
|
90
90
|
|
|
91
91
|
{
|
|
92
92
|
"/.../custom_model.py": "MyModel",
|
|
93
|
-
"/.../custom_objects.py": ["object1", "object2"]
|
|
93
|
+
"/.../custom_objects.py": ["object1", "object2"],
|
|
94
94
|
}
|
|
95
95
|
|
|
96
96
|
All the paths will be accessed from the given 'custom_objects_directory', meaning
|
|
@@ -14,7 +14,7 @@
|
|
|
14
14
|
#
|
|
15
15
|
import os
|
|
16
16
|
import pickle
|
|
17
|
-
from typing import
|
|
17
|
+
from typing import Union
|
|
18
18
|
|
|
19
19
|
import cloudpickle
|
|
20
20
|
|
|
@@ -47,8 +47,8 @@ class XGBoostModelHandler(MLModelHandler):
|
|
|
47
47
|
model: XGBoostTypes.ModelType = None,
|
|
48
48
|
model_path: str = None,
|
|
49
49
|
model_name: str = None,
|
|
50
|
-
modules_map: Union[
|
|
51
|
-
custom_objects_map: Union[
|
|
50
|
+
modules_map: Union[dict[str, Union[None, str, list[str]]], str] = None,
|
|
51
|
+
custom_objects_map: Union[dict[str, Union[str, list[str]]], str] = None,
|
|
52
52
|
custom_objects_directory: str = None,
|
|
53
53
|
context: mlrun.MLClientCtx = None,
|
|
54
54
|
model_format: str = ModelFormats.PKL,
|
|
@@ -124,7 +124,7 @@ class XGBoostModelHandler(MLModelHandler):
|
|
|
124
124
|
# Store the configuration:
|
|
125
125
|
self._model_format = model_format
|
|
126
126
|
|
|
127
|
-
super(
|
|
127
|
+
super().__init__(
|
|
128
128
|
model=model,
|
|
129
129
|
model_path=model_path,
|
|
130
130
|
model_name=model_name,
|
|
@@ -162,7 +162,7 @@ class XGBoostModelHandler(MLModelHandler):
|
|
|
162
162
|
|
|
163
163
|
:return The saved model additional artifacts (if needed) dictionary if context is available and None otherwise.
|
|
164
164
|
"""
|
|
165
|
-
super(
|
|
165
|
+
super().save(output_path=output_path)
|
|
166
166
|
|
|
167
167
|
# ModelFormats.PICKLE - Save from a pkl file:
|
|
168
168
|
if self._model_format == XGBoostModelHandler.ModelFormats.PKL:
|
|
@@ -177,7 +177,7 @@ class XGBoostModelHandler(MLModelHandler):
|
|
|
177
177
|
Load the specified model in this handler. Additional parameters for the class initializer can be passed via the
|
|
178
178
|
kwargs dictionary.
|
|
179
179
|
"""
|
|
180
|
-
super(
|
|
180
|
+
super().load()
|
|
181
181
|
|
|
182
182
|
# ModelFormats.PICKLE - Load from a pkl file:
|
|
183
183
|
if self._model_format == XGBoostModelHandler.ModelFormats.PKL:
|
mlrun/k8s_utils.py
CHANGED
|
@@ -12,7 +12,7 @@
|
|
|
12
12
|
# See the License for the specific language governing permissions and
|
|
13
13
|
# limitations under the License.
|
|
14
14
|
import re
|
|
15
|
-
import
|
|
15
|
+
import warnings
|
|
16
16
|
|
|
17
17
|
import kubernetes.client
|
|
18
18
|
|
|
@@ -38,7 +38,7 @@ def is_running_inside_kubernetes_cluster():
|
|
|
38
38
|
|
|
39
39
|
def generate_preemptible_node_selector_requirements(
|
|
40
40
|
node_selector_operator: str,
|
|
41
|
-
) ->
|
|
41
|
+
) -> list[kubernetes.client.V1NodeSelectorRequirement]:
|
|
42
42
|
"""
|
|
43
43
|
Generate node selector requirements based on the pre-configured node selector of the preemptible nodes.
|
|
44
44
|
node selector operator represents a key's relationship to a set of values.
|
|
@@ -62,7 +62,7 @@ def generate_preemptible_node_selector_requirements(
|
|
|
62
62
|
|
|
63
63
|
|
|
64
64
|
def generate_preemptible_nodes_anti_affinity_terms() -> (
|
|
65
|
-
|
|
65
|
+
list[kubernetes.client.V1NodeSelectorTerm]
|
|
66
66
|
):
|
|
67
67
|
"""
|
|
68
68
|
Generate node selector term containing anti-affinity expressions based on the
|
|
@@ -84,7 +84,7 @@ def generate_preemptible_nodes_anti_affinity_terms() -> (
|
|
|
84
84
|
|
|
85
85
|
|
|
86
86
|
def generate_preemptible_nodes_affinity_terms() -> (
|
|
87
|
-
|
|
87
|
+
list[kubernetes.client.V1NodeSelectorTerm]
|
|
88
88
|
):
|
|
89
89
|
"""
|
|
90
90
|
Use for purpose of scheduling on node having at least one of the node selectors.
|
|
@@ -105,7 +105,7 @@ def generate_preemptible_nodes_affinity_terms() -> (
|
|
|
105
105
|
return node_selector_terms
|
|
106
106
|
|
|
107
107
|
|
|
108
|
-
def generate_preemptible_tolerations() ->
|
|
108
|
+
def generate_preemptible_tolerations() -> list[kubernetes.client.V1Toleration]:
|
|
109
109
|
tolerations = mlconfig.get_preemptible_tolerations()
|
|
110
110
|
|
|
111
111
|
toleration_objects = []
|
|
@@ -134,7 +134,7 @@ def sanitize_label_value(value: str) -> str:
|
|
|
134
134
|
return re.sub(r"([^a-zA-Z0-9_.-]|^[^a-zA-Z0-9]|[^a-zA-Z0-9]$)", "-", value[:63])
|
|
135
135
|
|
|
136
136
|
|
|
137
|
-
def verify_label_key(key: str):
|
|
137
|
+
def verify_label_key(key: str, allow_k8s_prefix: bool = False):
|
|
138
138
|
"""
|
|
139
139
|
Verify that the label key is valid for Kubernetes.
|
|
140
140
|
Refer to https://kubernetes.io/docs/concepts/overview/working-with-objects/labels/#syntax-and-character-set
|
|
@@ -142,22 +142,15 @@ def verify_label_key(key: str):
|
|
|
142
142
|
if not key:
|
|
143
143
|
raise mlrun.errors.MLRunInvalidArgumentError("label key cannot be empty")
|
|
144
144
|
|
|
145
|
-
mlrun.utils.helpers.verify_field_regex(
|
|
146
|
-
f"project.metadata.labels.'{key}'",
|
|
147
|
-
key,
|
|
148
|
-
mlrun.utils.regex.k8s_character_limit,
|
|
149
|
-
)
|
|
150
|
-
|
|
151
|
-
if key.startswith("k8s.io/") or key.startswith("kubernetes.io/"):
|
|
152
|
-
raise mlrun.errors.MLRunInvalidArgumentError(
|
|
153
|
-
"Labels cannot start with 'k8s.io/' or 'kubernetes.io/'"
|
|
154
|
-
)
|
|
155
|
-
|
|
156
145
|
parts = key.split("/")
|
|
157
146
|
if len(parts) == 1:
|
|
158
147
|
name = parts[0]
|
|
159
148
|
elif len(parts) == 2:
|
|
160
149
|
prefix, name = parts
|
|
150
|
+
if len(name) == 0:
|
|
151
|
+
raise mlrun.errors.MLRunInvalidArgumentError(
|
|
152
|
+
"Label key name cannot be empty when a prefix is set"
|
|
153
|
+
)
|
|
161
154
|
if len(prefix) == 0:
|
|
162
155
|
raise mlrun.errors.MLRunInvalidArgumentError(
|
|
163
156
|
"Label key prefix cannot be empty"
|
|
@@ -174,12 +167,28 @@ def verify_label_key(key: str):
|
|
|
174
167
|
"Label key can only contain one '/'"
|
|
175
168
|
)
|
|
176
169
|
|
|
170
|
+
mlrun.utils.helpers.verify_field_regex(
|
|
171
|
+
f"project.metadata.labels.'{key}'",
|
|
172
|
+
name,
|
|
173
|
+
mlrun.utils.regex.k8s_character_limit,
|
|
174
|
+
)
|
|
177
175
|
mlrun.utils.helpers.verify_field_regex(
|
|
178
176
|
f"project.metadata.labels.'{key}'",
|
|
179
177
|
name,
|
|
180
178
|
mlrun.utils.regex.qualified_name,
|
|
181
179
|
)
|
|
182
180
|
|
|
181
|
+
# Allow the use of Kubernetes reserved prefixes ('k8s.io/' or 'kubernetes.io/')
|
|
182
|
+
# only when setting node selectors, not when adding new labels.
|
|
183
|
+
if (
|
|
184
|
+
key.startswith("k8s.io/")
|
|
185
|
+
or key.startswith("kubernetes.io/")
|
|
186
|
+
and not allow_k8s_prefix
|
|
187
|
+
):
|
|
188
|
+
raise mlrun.errors.MLRunInvalidArgumentError(
|
|
189
|
+
"Labels cannot start with 'k8s.io/' or 'kubernetes.io/'"
|
|
190
|
+
)
|
|
191
|
+
|
|
183
192
|
|
|
184
193
|
def verify_label_value(value, label_key):
|
|
185
194
|
mlrun.utils.helpers.verify_field_regex(
|
|
@@ -187,3 +196,38 @@ def verify_label_value(value, label_key):
|
|
|
187
196
|
value,
|
|
188
197
|
mlrun.utils.regex.label_value,
|
|
189
198
|
)
|
|
199
|
+
|
|
200
|
+
|
|
201
|
+
def validate_node_selectors(
|
|
202
|
+
node_selectors: dict[str, str], raise_on_error: bool = True
|
|
203
|
+
) -> bool:
|
|
204
|
+
"""
|
|
205
|
+
Ensures that user-defined node selectors adhere to Kubernetes label standards:
|
|
206
|
+
- Validates that each key conforms to Kubernetes naming conventions, with specific rules for name and prefix.
|
|
207
|
+
- Ensures values comply with Kubernetes label value rules.
|
|
208
|
+
- If raise_on_error is True, raises errors for invalid selectors.
|
|
209
|
+
- If raise_on_error is False, logs warnings for invalid selectors.
|
|
210
|
+
"""
|
|
211
|
+
|
|
212
|
+
# Helper function for handling errors or warnings
|
|
213
|
+
def handle_invalid(message):
|
|
214
|
+
if raise_on_error:
|
|
215
|
+
raise
|
|
216
|
+
else:
|
|
217
|
+
warnings.warn(
|
|
218
|
+
f"{message}\n"
|
|
219
|
+
f"The node selector you’ve set does not meet the validation rules for the current Kubernetes version. "
|
|
220
|
+
f"Please note that invalid node selectors may cause issues with function scheduling."
|
|
221
|
+
)
|
|
222
|
+
|
|
223
|
+
node_selectors = node_selectors or {}
|
|
224
|
+
for key, value in node_selectors.items():
|
|
225
|
+
try:
|
|
226
|
+
verify_label_key(key, allow_k8s_prefix=True)
|
|
227
|
+
verify_label_value(value, label_key=key)
|
|
228
|
+
except mlrun.errors.MLRunInvalidArgumentError as err:
|
|
229
|
+
# An error or warning is raised by handle_invalid due to validation failure.
|
|
230
|
+
# Returning False indicates validation failed, allowing us to exit the function.
|
|
231
|
+
handle_invalid(str(err))
|
|
232
|
+
return False
|
|
233
|
+
return True
|
mlrun/launcher/__init__.py
CHANGED
mlrun/launcher/base.py
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
# Copyright 2023
|
|
1
|
+
# Copyright 2023 Iguazio
|
|
2
2
|
#
|
|
3
3
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
4
|
# you may not use this file except in compliance with the License.
|
|
@@ -16,12 +16,13 @@ import ast
|
|
|
16
16
|
import copy
|
|
17
17
|
import os
|
|
18
18
|
import uuid
|
|
19
|
-
from typing import Any, Callable,
|
|
19
|
+
from typing import Any, Callable, Optional, Union
|
|
20
|
+
|
|
21
|
+
import mlrun_pipelines.common.ops
|
|
20
22
|
|
|
21
23
|
import mlrun.common.schemas
|
|
22
24
|
import mlrun.config
|
|
23
25
|
import mlrun.errors
|
|
24
|
-
import mlrun.kfpops
|
|
25
26
|
import mlrun.lists
|
|
26
27
|
import mlrun.model
|
|
27
28
|
import mlrun.runtimes
|
|
@@ -53,7 +54,7 @@ class BaseLauncher(abc.ABC):
|
|
|
53
54
|
name: Optional[str] = "",
|
|
54
55
|
project: Optional[str] = "",
|
|
55
56
|
params: Optional[dict] = None,
|
|
56
|
-
inputs: Optional[
|
|
57
|
+
inputs: Optional[dict[str, str]] = None,
|
|
57
58
|
out_path: Optional[str] = "",
|
|
58
59
|
workdir: Optional[str] = "",
|
|
59
60
|
artifact_path: Optional[str] = "",
|
|
@@ -61,16 +62,16 @@ class BaseLauncher(abc.ABC):
|
|
|
61
62
|
schedule: Optional[
|
|
62
63
|
Union[str, mlrun.common.schemas.schedule.ScheduleCronTrigger]
|
|
63
64
|
] = None,
|
|
64
|
-
hyperparams:
|
|
65
|
+
hyperparams: dict[str, list] = None,
|
|
65
66
|
hyper_param_options: Optional[mlrun.model.HyperParamOptions] = None,
|
|
66
67
|
verbose: Optional[bool] = None,
|
|
67
68
|
scrape_metrics: Optional[bool] = None,
|
|
68
69
|
local_code_path: Optional[str] = None,
|
|
69
70
|
auto_build: Optional[bool] = None,
|
|
70
|
-
param_file_secrets: Optional[
|
|
71
|
-
notifications: Optional[
|
|
72
|
-
returns: Optional[
|
|
73
|
-
state_thresholds: Optional[
|
|
71
|
+
param_file_secrets: Optional[dict[str, str]] = None,
|
|
72
|
+
notifications: Optional[list[mlrun.model.Notification]] = None,
|
|
73
|
+
returns: Optional[list[Union[str, dict[str, str]]]] = None,
|
|
74
|
+
state_thresholds: Optional[dict[str, int]] = None,
|
|
74
75
|
) -> "mlrun.run.RunObject":
|
|
75
76
|
"""run the function from the server/client[local/remote]"""
|
|
76
77
|
pass
|
|
@@ -175,7 +176,7 @@ class BaseLauncher(abc.ABC):
|
|
|
175
176
|
if message:
|
|
176
177
|
logger.warning(message, output_path=run.spec.output_path)
|
|
177
178
|
|
|
178
|
-
def _validate_run_params(self, parameters:
|
|
179
|
+
def _validate_run_params(self, parameters: dict[str, Any]):
|
|
179
180
|
for param_name, param_value in parameters.items():
|
|
180
181
|
if isinstance(param_value, dict):
|
|
181
182
|
# if the parameter is a dict, we might have some nested parameters,
|
|
@@ -237,8 +238,8 @@ class BaseLauncher(abc.ABC):
|
|
|
237
238
|
out_path=None,
|
|
238
239
|
artifact_path=None,
|
|
239
240
|
workdir=None,
|
|
240
|
-
notifications:
|
|
241
|
-
state_thresholds: Optional[
|
|
241
|
+
notifications: list[mlrun.model.Notification] = None,
|
|
242
|
+
state_thresholds: Optional[dict[str, int]] = None,
|
|
242
243
|
):
|
|
243
244
|
run.spec.handler = (
|
|
244
245
|
handler or run.spec.handler or runtime.spec.default_handler or ""
|
|
@@ -353,7 +354,7 @@ class BaseLauncher(abc.ABC):
|
|
|
353
354
|
or {}
|
|
354
355
|
)
|
|
355
356
|
state_thresholds = (
|
|
356
|
-
mlrun.
|
|
357
|
+
mlrun.mlconf.function.spec.state_thresholds.default.to_dict()
|
|
357
358
|
| state_thresholds
|
|
358
359
|
)
|
|
359
360
|
run.spec.state_thresholds = state_thresholds or run.spec.state_thresholds
|
|
@@ -390,7 +391,7 @@ class BaseLauncher(abc.ABC):
|
|
|
390
391
|
return
|
|
391
392
|
|
|
392
393
|
if result and runtime.kfp and err is None:
|
|
393
|
-
|
|
394
|
+
mlrun_pipelines.common.ops.write_kfpmeta(result)
|
|
394
395
|
|
|
395
396
|
self._log_track_results(runtime.is_child, result, run)
|
|
396
397
|
|
|
@@ -403,7 +404,7 @@ class BaseLauncher(abc.ABC):
|
|
|
403
404
|
)
|
|
404
405
|
if (
|
|
405
406
|
run.status.state
|
|
406
|
-
in mlrun.runtimes.constants.RunStates.error_and_abortion_states()
|
|
407
|
+
in mlrun.common.runtimes.constants.RunStates.error_and_abortion_states()
|
|
407
408
|
):
|
|
408
409
|
if runtime._is_remote and not runtime.is_child:
|
|
409
410
|
logger.error(
|
mlrun/launcher/client.py
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
# Copyright 2023
|
|
1
|
+
# Copyright 2023 Iguazio
|
|
2
2
|
#
|
|
3
3
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
4
|
# you may not use this file except in compliance with the License.
|
|
@@ -14,14 +14,15 @@
|
|
|
14
14
|
import abc
|
|
15
15
|
from typing import Optional
|
|
16
16
|
|
|
17
|
-
import IPython
|
|
17
|
+
import IPython.display
|
|
18
18
|
|
|
19
|
+
import mlrun.common.constants as mlrun_constants
|
|
19
20
|
import mlrun.errors
|
|
20
21
|
import mlrun.launcher.base as launcher
|
|
21
22
|
import mlrun.lists
|
|
22
23
|
import mlrun.model
|
|
23
24
|
import mlrun.runtimes
|
|
24
|
-
|
|
25
|
+
import mlrun.utils
|
|
25
26
|
|
|
26
27
|
|
|
27
28
|
class ClientBaseLauncher(launcher.BaseLauncher, abc.ABC):
|
|
@@ -47,7 +48,7 @@ class ClientBaseLauncher(launcher.BaseLauncher, abc.ABC):
|
|
|
47
48
|
If build is needed, set the image as the base_image for the build.
|
|
48
49
|
If image is not given set the default one.
|
|
49
50
|
"""
|
|
50
|
-
if runtime.kind in mlrun.runtimes.RuntimeKinds.
|
|
51
|
+
if runtime.kind in mlrun.runtimes.RuntimeKinds.pure_nuclio_deployed_runtimes():
|
|
51
52
|
return
|
|
52
53
|
|
|
53
54
|
require_build = runtime.requires_build()
|
|
@@ -69,13 +70,14 @@ class ClientBaseLauncher(launcher.BaseLauncher, abc.ABC):
|
|
|
69
70
|
def _store_function(
|
|
70
71
|
runtime: "mlrun.runtimes.BaseRuntime", run: "mlrun.run.RunObject"
|
|
71
72
|
):
|
|
72
|
-
run.metadata.labels[
|
|
73
|
+
run.metadata.labels[mlrun_constants.MLRunInternalLabels.kind] = runtime.kind
|
|
73
74
|
mlrun.runtimes.utils.enrich_run_labels(
|
|
74
|
-
run.metadata.labels, [mlrun.runtimes.constants.RunLabels.owner]
|
|
75
|
+
run.metadata.labels, [mlrun.common.runtimes.constants.RunLabels.owner]
|
|
75
76
|
)
|
|
76
77
|
if run.spec.output_path:
|
|
77
78
|
run.spec.output_path = run.spec.output_path.replace(
|
|
78
|
-
"{{run.user}}",
|
|
79
|
+
"{{run.user}}",
|
|
80
|
+
run.metadata.labels[mlrun_constants.MLRunInternalLabels.owner],
|
|
79
81
|
)
|
|
80
82
|
db = runtime._get_db()
|
|
81
83
|
if db and runtime.kind != "handler":
|
|
@@ -126,10 +128,10 @@ class ClientBaseLauncher(launcher.BaseLauncher, abc.ABC):
|
|
|
126
128
|
if result:
|
|
127
129
|
results_tbl.append(result)
|
|
128
130
|
else:
|
|
129
|
-
logger.info("no returned result (job may still be in progress)")
|
|
131
|
+
mlrun.utils.logger.info("no returned result (job may still be in progress)")
|
|
130
132
|
results_tbl.append(run.to_dict())
|
|
131
133
|
|
|
132
|
-
if mlrun.utils.
|
|
134
|
+
if mlrun.utils.is_jupyter and mlrun.mlconf.ipython_widget:
|
|
133
135
|
results_tbl.show()
|
|
134
136
|
print()
|
|
135
137
|
ui_url = mlrun.utils.get_ui_url(project, uid)
|
|
@@ -145,9 +147,9 @@ class ClientBaseLauncher(launcher.BaseLauncher, abc.ABC):
|
|
|
145
147
|
project_flag = f"-p {project}" if project else ""
|
|
146
148
|
info_cmd = f"mlrun get run {uid} {project_flag}"
|
|
147
149
|
logs_cmd = f"mlrun logs {uid} {project_flag}"
|
|
148
|
-
logger.info(
|
|
150
|
+
mlrun.utils.logger.info(
|
|
149
151
|
"To track results use the CLI", info_cmd=info_cmd, logs_cmd=logs_cmd
|
|
150
152
|
)
|
|
151
153
|
ui_url = mlrun.utils.get_ui_url(project, uid)
|
|
152
154
|
if ui_url:
|
|
153
|
-
logger.info("Or click for UI", ui_url=ui_url)
|
|
155
|
+
mlrun.utils.logger.info("Or click for UI", ui_url=ui_url)
|
mlrun/launcher/factory.py
CHANGED