mlrun 1.7.2rc3__py3-none-any.whl → 1.8.0rc1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mlrun might be problematic. Click here for more details.
- mlrun/__init__.py +14 -12
- mlrun/__main__.py +3 -3
- mlrun/alerts/alert.py +19 -12
- mlrun/artifacts/__init__.py +0 -2
- mlrun/artifacts/base.py +34 -11
- mlrun/artifacts/dataset.py +16 -16
- mlrun/artifacts/manager.py +13 -13
- mlrun/artifacts/model.py +66 -53
- mlrun/common/constants.py +6 -0
- mlrun/common/formatters/__init__.py +1 -0
- mlrun/common/formatters/feature_set.py +1 -0
- mlrun/common/formatters/function.py +1 -0
- mlrun/common/formatters/model_endpoint.py +30 -0
- mlrun/common/formatters/pipeline.py +1 -2
- mlrun/common/model_monitoring/__init__.py +0 -3
- mlrun/common/model_monitoring/helpers.py +1 -1
- mlrun/common/runtimes/constants.py +1 -2
- mlrun/common/schemas/__init__.py +4 -2
- mlrun/common/schemas/artifact.py +0 -6
- mlrun/common/schemas/common.py +50 -0
- mlrun/common/schemas/model_monitoring/__init__.py +8 -1
- mlrun/common/schemas/model_monitoring/constants.py +62 -12
- mlrun/common/schemas/model_monitoring/model_endpoint_v2.py +149 -0
- mlrun/common/schemas/model_monitoring/model_endpoints.py +21 -5
- mlrun/common/schemas/partition.py +122 -0
- mlrun/config.py +43 -15
- mlrun/data_types/__init__.py +0 -2
- mlrun/data_types/data_types.py +0 -1
- mlrun/data_types/infer.py +3 -1
- mlrun/data_types/spark.py +4 -4
- mlrun/data_types/to_pandas.py +2 -11
- mlrun/datastore/__init__.py +0 -2
- mlrun/datastore/alibaba_oss.py +4 -1
- mlrun/datastore/azure_blob.py +4 -1
- mlrun/datastore/base.py +12 -4
- mlrun/datastore/datastore.py +9 -3
- mlrun/datastore/datastore_profile.py +1 -1
- mlrun/datastore/dbfs_store.py +4 -1
- mlrun/datastore/filestore.py +4 -1
- mlrun/datastore/google_cloud_storage.py +4 -1
- mlrun/datastore/hdfs.py +4 -1
- mlrun/datastore/inmem.py +4 -1
- mlrun/datastore/redis.py +4 -1
- mlrun/datastore/s3.py +4 -1
- mlrun/datastore/sources.py +51 -49
- mlrun/datastore/store_resources.py +0 -2
- mlrun/datastore/targets.py +22 -23
- mlrun/datastore/utils.py +2 -2
- mlrun/datastore/v3io.py +4 -1
- mlrun/datastore/wasbfs/fs.py +13 -12
- mlrun/db/base.py +126 -62
- mlrun/db/factory.py +3 -0
- mlrun/db/httpdb.py +767 -231
- mlrun/db/nopdb.py +126 -57
- mlrun/errors.py +2 -2
- mlrun/execution.py +55 -29
- mlrun/feature_store/__init__.py +0 -2
- mlrun/feature_store/api.py +40 -40
- mlrun/feature_store/common.py +9 -9
- mlrun/feature_store/feature_set.py +20 -18
- mlrun/feature_store/feature_vector.py +27 -24
- mlrun/feature_store/retrieval/base.py +14 -9
- mlrun/feature_store/retrieval/job.py +2 -1
- mlrun/feature_store/steps.py +2 -2
- mlrun/features.py +30 -13
- mlrun/frameworks/__init__.py +1 -2
- mlrun/frameworks/_common/__init__.py +1 -2
- mlrun/frameworks/_common/artifacts_library.py +2 -2
- mlrun/frameworks/_common/mlrun_interface.py +10 -6
- mlrun/frameworks/_common/model_handler.py +29 -27
- mlrun/frameworks/_common/producer.py +3 -1
- mlrun/frameworks/_dl_common/__init__.py +1 -2
- mlrun/frameworks/_dl_common/loggers/__init__.py +1 -2
- mlrun/frameworks/_dl_common/loggers/mlrun_logger.py +4 -4
- mlrun/frameworks/_dl_common/loggers/tensorboard_logger.py +3 -3
- mlrun/frameworks/_ml_common/__init__.py +1 -2
- mlrun/frameworks/_ml_common/loggers/__init__.py +1 -2
- mlrun/frameworks/_ml_common/model_handler.py +21 -21
- mlrun/frameworks/_ml_common/plans/__init__.py +1 -2
- mlrun/frameworks/_ml_common/plans/confusion_matrix_plan.py +3 -1
- mlrun/frameworks/_ml_common/plans/dataset_plan.py +3 -3
- mlrun/frameworks/_ml_common/plans/roc_curve_plan.py +4 -4
- mlrun/frameworks/auto_mlrun/__init__.py +1 -2
- mlrun/frameworks/auto_mlrun/auto_mlrun.py +22 -15
- mlrun/frameworks/huggingface/__init__.py +1 -2
- mlrun/frameworks/huggingface/model_server.py +9 -9
- mlrun/frameworks/lgbm/__init__.py +47 -44
- mlrun/frameworks/lgbm/callbacks/__init__.py +1 -2
- mlrun/frameworks/lgbm/callbacks/logging_callback.py +4 -2
- mlrun/frameworks/lgbm/callbacks/mlrun_logging_callback.py +4 -2
- mlrun/frameworks/lgbm/mlrun_interfaces/__init__.py +1 -2
- mlrun/frameworks/lgbm/mlrun_interfaces/mlrun_interface.py +5 -5
- mlrun/frameworks/lgbm/model_handler.py +15 -11
- mlrun/frameworks/lgbm/model_server.py +11 -7
- mlrun/frameworks/lgbm/utils.py +2 -2
- mlrun/frameworks/onnx/__init__.py +1 -2
- mlrun/frameworks/onnx/dataset.py +3 -3
- mlrun/frameworks/onnx/mlrun_interface.py +2 -2
- mlrun/frameworks/onnx/model_handler.py +7 -5
- mlrun/frameworks/onnx/model_server.py +8 -6
- mlrun/frameworks/parallel_coordinates.py +11 -11
- mlrun/frameworks/pytorch/__init__.py +22 -23
- mlrun/frameworks/pytorch/callbacks/__init__.py +1 -2
- mlrun/frameworks/pytorch/callbacks/callback.py +2 -1
- mlrun/frameworks/pytorch/callbacks/logging_callback.py +15 -8
- mlrun/frameworks/pytorch/callbacks/mlrun_logging_callback.py +19 -12
- mlrun/frameworks/pytorch/callbacks/tensorboard_logging_callback.py +22 -15
- mlrun/frameworks/pytorch/callbacks_handler.py +36 -30
- mlrun/frameworks/pytorch/mlrun_interface.py +17 -17
- mlrun/frameworks/pytorch/model_handler.py +21 -17
- mlrun/frameworks/pytorch/model_server.py +13 -9
- mlrun/frameworks/sklearn/__init__.py +19 -18
- mlrun/frameworks/sklearn/estimator.py +2 -2
- mlrun/frameworks/sklearn/metric.py +3 -3
- mlrun/frameworks/sklearn/metrics_library.py +8 -6
- mlrun/frameworks/sklearn/mlrun_interface.py +3 -2
- mlrun/frameworks/sklearn/model_handler.py +4 -3
- mlrun/frameworks/tf_keras/__init__.py +11 -12
- mlrun/frameworks/tf_keras/callbacks/__init__.py +1 -2
- mlrun/frameworks/tf_keras/callbacks/logging_callback.py +17 -14
- mlrun/frameworks/tf_keras/callbacks/mlrun_logging_callback.py +15 -12
- mlrun/frameworks/tf_keras/callbacks/tensorboard_logging_callback.py +21 -18
- mlrun/frameworks/tf_keras/model_handler.py +17 -13
- mlrun/frameworks/tf_keras/model_server.py +12 -8
- mlrun/frameworks/xgboost/__init__.py +19 -18
- mlrun/frameworks/xgboost/model_handler.py +13 -9
- mlrun/launcher/base.py +3 -4
- mlrun/launcher/local.py +1 -1
- mlrun/launcher/remote.py +1 -1
- mlrun/lists.py +4 -3
- mlrun/model.py +108 -44
- mlrun/model_monitoring/__init__.py +1 -2
- mlrun/model_monitoring/api.py +6 -6
- mlrun/model_monitoring/applications/_application_steps.py +13 -15
- mlrun/model_monitoring/applications/histogram_data_drift.py +41 -15
- mlrun/model_monitoring/applications/results.py +55 -3
- mlrun/model_monitoring/controller.py +185 -223
- mlrun/model_monitoring/db/_schedules.py +156 -0
- mlrun/model_monitoring/db/_stats.py +189 -0
- mlrun/model_monitoring/db/stores/__init__.py +1 -1
- mlrun/model_monitoring/db/stores/base/store.py +6 -65
- mlrun/model_monitoring/db/stores/sqldb/models/__init__.py +0 -25
- mlrun/model_monitoring/db/stores/sqldb/models/base.py +0 -97
- mlrun/model_monitoring/db/stores/sqldb/models/mysql.py +2 -58
- mlrun/model_monitoring/db/stores/sqldb/models/sqlite.py +0 -15
- mlrun/model_monitoring/db/stores/sqldb/sql_store.py +6 -257
- mlrun/model_monitoring/db/stores/v3io_kv/kv_store.py +9 -271
- mlrun/model_monitoring/db/tsdb/base.py +74 -22
- mlrun/model_monitoring/db/tsdb/tdengine/schemas.py +66 -35
- mlrun/model_monitoring/db/tsdb/tdengine/stream_graph_steps.py +33 -0
- mlrun/model_monitoring/db/tsdb/tdengine/tdengine_connector.py +284 -51
- mlrun/model_monitoring/db/tsdb/v3io/stream_graph_steps.py +1 -0
- mlrun/model_monitoring/db/tsdb/v3io/v3io_connector.py +35 -17
- mlrun/model_monitoring/helpers.py +97 -1
- mlrun/model_monitoring/model_endpoint.py +4 -2
- mlrun/model_monitoring/stream_processing.py +2 -2
- mlrun/model_monitoring/tracking_policy.py +10 -3
- mlrun/model_monitoring/writer.py +47 -26
- mlrun/package/__init__.py +3 -6
- mlrun/package/context_handler.py +1 -1
- mlrun/package/packager.py +12 -9
- mlrun/package/packagers/__init__.py +0 -2
- mlrun/package/packagers/default_packager.py +14 -11
- mlrun/package/packagers/numpy_packagers.py +16 -7
- mlrun/package/packagers/pandas_packagers.py +18 -18
- mlrun/package/packagers/python_standard_library_packagers.py +25 -11
- mlrun/package/packagers_manager.py +31 -14
- mlrun/package/utils/__init__.py +0 -3
- mlrun/package/utils/_pickler.py +6 -6
- mlrun/platforms/__init__.py +3 -3
- mlrun/platforms/iguazio.py +4 -1
- mlrun/projects/__init__.py +1 -6
- mlrun/projects/operations.py +27 -27
- mlrun/projects/pipelines.py +85 -215
- mlrun/projects/project.py +444 -158
- mlrun/run.py +9 -9
- mlrun/runtimes/__init__.py +1 -3
- mlrun/runtimes/base.py +13 -10
- mlrun/runtimes/daskjob.py +9 -9
- mlrun/runtimes/generators.py +2 -1
- mlrun/runtimes/kubejob.py +4 -5
- mlrun/runtimes/mpijob/__init__.py +0 -2
- mlrun/runtimes/mpijob/abstract.py +7 -6
- mlrun/runtimes/nuclio/api_gateway.py +7 -7
- mlrun/runtimes/nuclio/application/application.py +11 -11
- mlrun/runtimes/nuclio/function.py +14 -13
- mlrun/runtimes/nuclio/serving.py +9 -9
- mlrun/runtimes/pod.py +74 -29
- mlrun/runtimes/remotesparkjob.py +3 -2
- mlrun/runtimes/sparkjob/__init__.py +0 -2
- mlrun/runtimes/sparkjob/spark3job.py +21 -11
- mlrun/runtimes/utils.py +6 -5
- mlrun/serving/merger.py +6 -4
- mlrun/serving/remote.py +18 -17
- mlrun/serving/routers.py +27 -27
- mlrun/serving/server.py +1 -1
- mlrun/serving/states.py +76 -71
- mlrun/serving/utils.py +13 -2
- mlrun/serving/v1_serving.py +3 -2
- mlrun/serving/v2_serving.py +4 -4
- mlrun/track/__init__.py +1 -1
- mlrun/track/tracker.py +2 -2
- mlrun/track/trackers/mlflow_tracker.py +6 -5
- mlrun/utils/async_http.py +1 -1
- mlrun/utils/helpers.py +72 -28
- mlrun/utils/logger.py +104 -2
- mlrun/utils/notifications/notification/base.py +23 -4
- mlrun/utils/notifications/notification/console.py +1 -1
- mlrun/utils/notifications/notification/git.py +6 -6
- mlrun/utils/notifications/notification/ipython.py +5 -4
- mlrun/utils/notifications/notification/slack.py +1 -1
- mlrun/utils/notifications/notification/webhook.py +13 -17
- mlrun/utils/notifications/notification_pusher.py +23 -19
- mlrun/utils/regex.py +1 -1
- mlrun/utils/version/version.json +2 -2
- {mlrun-1.7.2rc3.dist-info → mlrun-1.8.0rc1.dist-info}/METADATA +186 -186
- mlrun-1.8.0rc1.dist-info/RECORD +356 -0
- {mlrun-1.7.2rc3.dist-info → mlrun-1.8.0rc1.dist-info}/WHEEL +1 -1
- mlrun-1.7.2rc3.dist-info/RECORD +0 -351
- {mlrun-1.7.2rc3.dist-info → mlrun-1.8.0rc1.dist-info}/LICENSE +0 -0
- {mlrun-1.7.2rc3.dist-info → mlrun-1.8.0rc1.dist-info}/entry_points.txt +0 -0
- {mlrun-1.7.2rc3.dist-info → mlrun-1.8.0rc1.dist-info}/top_level.txt +0 -0
|
@@ -11,9 +11,8 @@
|
|
|
11
11
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
12
|
# See the License for the specific language governing permissions and
|
|
13
13
|
# limitations under the License.
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
from typing import Union
|
|
14
|
+
|
|
15
|
+
from typing import Optional, Union
|
|
17
16
|
|
|
18
17
|
import xgboost as xgb
|
|
19
18
|
|
|
@@ -34,26 +33,28 @@ def apply_mlrun(
|
|
|
34
33
|
model: xgb.XGBModel = None,
|
|
35
34
|
model_name: str = "model",
|
|
36
35
|
tag: str = "",
|
|
37
|
-
model_path: str = None,
|
|
38
|
-
modules_map: Union[dict[str, Union[None, str, list[str]]], str] = None,
|
|
39
|
-
custom_objects_map: Union[dict[str, Union[str, list[str]]], str] = None,
|
|
40
|
-
custom_objects_directory: str = None,
|
|
36
|
+
model_path: Optional[str] = None,
|
|
37
|
+
modules_map: Optional[Union[dict[str, Union[None, str, list[str]]], str]] = None,
|
|
38
|
+
custom_objects_map: Optional[Union[dict[str, Union[str, list[str]]], str]] = None,
|
|
39
|
+
custom_objects_directory: Optional[str] = None,
|
|
41
40
|
context: mlrun.MLClientCtx = None,
|
|
42
|
-
artifacts: Union[list[MLPlan], list[str], dict[str, dict]] = None,
|
|
43
|
-
metrics:
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
41
|
+
artifacts: Optional[Union[list[MLPlan], list[str], dict[str, dict]]] = None,
|
|
42
|
+
metrics: Optional[
|
|
43
|
+
Union[
|
|
44
|
+
list[Metric],
|
|
45
|
+
list[XGBoostTypes.MetricEntryType],
|
|
46
|
+
dict[str, XGBoostTypes.MetricEntryType],
|
|
47
|
+
]
|
|
47
48
|
] = None,
|
|
48
49
|
x_test: XGBoostTypes.DatasetType = None,
|
|
49
50
|
y_test: XGBoostTypes.DatasetType = None,
|
|
50
51
|
sample_set: Union[XGBoostTypes.DatasetType, mlrun.DataItem, str] = None,
|
|
51
|
-
y_columns: Union[list[str], list[int]] = None,
|
|
52
|
-
feature_vector: str = None,
|
|
53
|
-
feature_weights: list[float] = None,
|
|
54
|
-
labels: dict[str, Union[str, int, float]] = None,
|
|
55
|
-
parameters: dict[str, Union[str, int, float]] = None,
|
|
56
|
-
extra_data: dict[str, XGBoostTypes.ExtraDataType] = None,
|
|
52
|
+
y_columns: Optional[Union[list[str], list[int]]] = None,
|
|
53
|
+
feature_vector: Optional[str] = None,
|
|
54
|
+
feature_weights: Optional[list[float]] = None,
|
|
55
|
+
labels: Optional[dict[str, Union[str, int, float]]] = None,
|
|
56
|
+
parameters: Optional[dict[str, Union[str, int, float]]] = None,
|
|
57
|
+
extra_data: Optional[dict[str, XGBoostTypes.ExtraDataType]] = None,
|
|
57
58
|
auto_log: bool = True,
|
|
58
59
|
**kwargs,
|
|
59
60
|
) -> XGBoostModelHandler:
|
|
@@ -14,7 +14,7 @@
|
|
|
14
14
|
#
|
|
15
15
|
import os
|
|
16
16
|
import pickle
|
|
17
|
-
from typing import Union
|
|
17
|
+
from typing import Optional, Union
|
|
18
18
|
|
|
19
19
|
import cloudpickle
|
|
20
20
|
|
|
@@ -45,11 +45,15 @@ class XGBoostModelHandler(MLModelHandler):
|
|
|
45
45
|
def __init__(
|
|
46
46
|
self,
|
|
47
47
|
model: XGBoostTypes.ModelType = None,
|
|
48
|
-
model_path: str = None,
|
|
49
|
-
model_name: str = None,
|
|
50
|
-
modules_map:
|
|
51
|
-
|
|
52
|
-
|
|
48
|
+
model_path: Optional[str] = None,
|
|
49
|
+
model_name: Optional[str] = None,
|
|
50
|
+
modules_map: Optional[
|
|
51
|
+
Union[dict[str, Union[None, str, list[str]]], str]
|
|
52
|
+
] = None,
|
|
53
|
+
custom_objects_map: Optional[
|
|
54
|
+
Union[dict[str, Union[str, list[str]]], str]
|
|
55
|
+
] = None,
|
|
56
|
+
custom_objects_directory: Optional[str] = None,
|
|
53
57
|
context: mlrun.MLClientCtx = None,
|
|
54
58
|
model_format: str = ModelFormats.PKL,
|
|
55
59
|
**kwargs,
|
|
@@ -152,7 +156,7 @@ class XGBoostModelHandler(MLModelHandler):
|
|
|
152
156
|
)
|
|
153
157
|
|
|
154
158
|
@without_mlrun_interface(interface=XGBModelMLRunInterface)
|
|
155
|
-
def save(self, output_path: str = None, **kwargs):
|
|
159
|
+
def save(self, output_path: Optional[str] = None, **kwargs):
|
|
156
160
|
"""
|
|
157
161
|
Save the handled model at the given output path. If a MLRun context is available, the saved model files will be
|
|
158
162
|
logged and returned as artifacts.
|
|
@@ -186,10 +190,10 @@ class XGBoostModelHandler(MLModelHandler):
|
|
|
186
190
|
|
|
187
191
|
def to_onnx(
|
|
188
192
|
self,
|
|
189
|
-
model_name: str = None,
|
|
193
|
+
model_name: Optional[str] = None,
|
|
190
194
|
optimize: bool = True,
|
|
191
195
|
input_sample: XGBoostTypes = None,
|
|
192
|
-
log: bool = None,
|
|
196
|
+
log: Optional[bool] = None,
|
|
193
197
|
):
|
|
194
198
|
"""
|
|
195
199
|
Convert the model in this handler to an ONNX model. The inputs names are optional, they do not change the
|
mlrun/launcher/base.py
CHANGED
|
@@ -18,8 +18,6 @@ import os
|
|
|
18
18
|
import uuid
|
|
19
19
|
from typing import Any, Callable, Optional, Union
|
|
20
20
|
|
|
21
|
-
import mlrun_pipelines.common.ops
|
|
22
|
-
|
|
23
21
|
import mlrun.common.schemas
|
|
24
22
|
import mlrun.config
|
|
25
23
|
import mlrun.errors
|
|
@@ -27,6 +25,7 @@ import mlrun.lists
|
|
|
27
25
|
import mlrun.model
|
|
28
26
|
import mlrun.runtimes
|
|
29
27
|
import mlrun.utils.regex
|
|
28
|
+
import mlrun_pipelines.common.ops
|
|
30
29
|
from mlrun.utils import logger
|
|
31
30
|
|
|
32
31
|
run_modes = ["pass"]
|
|
@@ -62,7 +61,7 @@ class BaseLauncher(abc.ABC):
|
|
|
62
61
|
schedule: Optional[
|
|
63
62
|
Union[str, mlrun.common.schemas.schedule.ScheduleCronTrigger]
|
|
64
63
|
] = None,
|
|
65
|
-
hyperparams: dict[str, list] = None,
|
|
64
|
+
hyperparams: Optional[dict[str, list]] = None,
|
|
66
65
|
hyper_param_options: Optional[mlrun.model.HyperParamOptions] = None,
|
|
67
66
|
verbose: Optional[bool] = None,
|
|
68
67
|
scrape_metrics: Optional[bool] = None,
|
|
@@ -238,7 +237,7 @@ class BaseLauncher(abc.ABC):
|
|
|
238
237
|
out_path=None,
|
|
239
238
|
artifact_path=None,
|
|
240
239
|
workdir=None,
|
|
241
|
-
notifications: list[mlrun.model.Notification] = None,
|
|
240
|
+
notifications: Optional[list[mlrun.model.Notification]] = None,
|
|
242
241
|
state_thresholds: Optional[dict[str, int]] = None,
|
|
243
242
|
):
|
|
244
243
|
run.spec.handler = (
|
mlrun/launcher/local.py
CHANGED
|
@@ -59,7 +59,7 @@ class ClientLocalLauncher(launcher.ClientBaseLauncher):
|
|
|
59
59
|
schedule: Optional[
|
|
60
60
|
Union[str, mlrun.common.schemas.schedule.ScheduleCronTrigger]
|
|
61
61
|
] = None,
|
|
62
|
-
hyperparams: dict[str, list] = None,
|
|
62
|
+
hyperparams: Optional[dict[str, list]] = None,
|
|
63
63
|
hyper_param_options: Optional[mlrun.model.HyperParamOptions] = None,
|
|
64
64
|
verbose: Optional[bool] = None,
|
|
65
65
|
scrape_metrics: Optional[bool] = None,
|
mlrun/launcher/remote.py
CHANGED
|
@@ -49,7 +49,7 @@ class ClientRemoteLauncher(launcher.ClientBaseLauncher):
|
|
|
49
49
|
schedule: Optional[
|
|
50
50
|
Union[str, mlrun.common.schemas.schedule.ScheduleCronTrigger]
|
|
51
51
|
] = None,
|
|
52
|
-
hyperparams: dict[str, list] = None,
|
|
52
|
+
hyperparams: Optional[dict[str, list]] = None,
|
|
53
53
|
hyper_param_options: Optional[mlrun.model.HyperParamOptions] = None,
|
|
54
54
|
verbose: Optional[bool] = None,
|
|
55
55
|
scrape_metrics: Optional[bool] = None,
|
mlrun/lists.py
CHANGED
|
@@ -12,6 +12,7 @@
|
|
|
12
12
|
# See the License for the specific language governing permissions and
|
|
13
13
|
# limitations under the License.
|
|
14
14
|
from copy import copy
|
|
15
|
+
from typing import Optional
|
|
15
16
|
|
|
16
17
|
import pandas as pd
|
|
17
18
|
|
|
@@ -129,11 +130,11 @@ class RunList(list):
|
|
|
129
130
|
def compare(
|
|
130
131
|
self,
|
|
131
132
|
hide_identical: bool = True,
|
|
132
|
-
exclude: list = None,
|
|
133
|
-
show: bool = None,
|
|
133
|
+
exclude: Optional[list] = None,
|
|
134
|
+
show: Optional[bool] = None,
|
|
134
135
|
extend_iterations=True,
|
|
135
136
|
filename=None,
|
|
136
|
-
colorscale: str = None,
|
|
137
|
+
colorscale: Optional[str] = None,
|
|
137
138
|
):
|
|
138
139
|
"""return/show parallel coordinates plot + table to compare between the list of runs
|
|
139
140
|
|
mlrun/model.py
CHANGED
|
@@ -74,7 +74,10 @@ class ModelObj:
|
|
|
74
74
|
|
|
75
75
|
@mlrun.utils.filter_warnings("ignore", FutureWarning)
|
|
76
76
|
def to_dict(
|
|
77
|
-
self,
|
|
77
|
+
self,
|
|
78
|
+
fields: Optional[list] = None,
|
|
79
|
+
exclude: Optional[list] = None,
|
|
80
|
+
strip: bool = False,
|
|
78
81
|
) -> dict:
|
|
79
82
|
"""
|
|
80
83
|
Convert the object to a dict
|
|
@@ -141,7 +144,7 @@ class ModelObj:
|
|
|
141
144
|
self._apply_enrichment_before_to_dict_completion(struct, strip=strip)
|
|
142
145
|
return struct
|
|
143
146
|
|
|
144
|
-
def _resolve_initial_to_dict_fields(self, fields: list = None) -> list:
|
|
147
|
+
def _resolve_initial_to_dict_fields(self, fields: Optional[list] = None) -> list:
|
|
145
148
|
"""
|
|
146
149
|
Resolve fields to be used in to_dict method.
|
|
147
150
|
If fields is None, use `_dict_fields` attribute of the object.
|
|
@@ -184,7 +187,7 @@ class ModelObj:
|
|
|
184
187
|
self,
|
|
185
188
|
struct: dict,
|
|
186
189
|
method: typing.Callable,
|
|
187
|
-
fields: typing.Union[list, set] = None,
|
|
190
|
+
fields: Optional[typing.Union[list, set]] = None,
|
|
188
191
|
strip: bool = False,
|
|
189
192
|
) -> dict:
|
|
190
193
|
for field_name in fields:
|
|
@@ -196,14 +199,14 @@ class ModelObj:
|
|
|
196
199
|
return struct
|
|
197
200
|
|
|
198
201
|
def _serialize_field(
|
|
199
|
-
self, struct: dict, field_name: str = None, strip: bool = False
|
|
202
|
+
self, struct: dict, field_name: Optional[str] = None, strip: bool = False
|
|
200
203
|
) -> typing.Any:
|
|
201
204
|
# We pull the field from self and not from struct because it was excluded from the struct when looping over
|
|
202
205
|
# the fields to save.
|
|
203
206
|
return getattr(self, field_name, None)
|
|
204
207
|
|
|
205
208
|
def _enrich_field(
|
|
206
|
-
self, struct: dict, field_name: str = None, strip: bool = False
|
|
209
|
+
self, struct: dict, field_name: Optional[str] = None, strip: bool = False
|
|
207
210
|
) -> typing.Any:
|
|
208
211
|
# We first try to pull from struct because the field might have been already serialized and if not,
|
|
209
212
|
# we pull from self
|
|
@@ -215,7 +218,9 @@ class ModelObj:
|
|
|
215
218
|
return struct
|
|
216
219
|
|
|
217
220
|
@classmethod
|
|
218
|
-
def from_dict(
|
|
221
|
+
def from_dict(
|
|
222
|
+
cls, struct=None, fields=None, deprecated_fields: Optional[dict] = None
|
|
223
|
+
):
|
|
219
224
|
"""create an object from a python dictionary"""
|
|
220
225
|
struct = {} if struct is None else struct
|
|
221
226
|
deprecated_fields = deprecated_fields or {}
|
|
@@ -430,7 +435,7 @@ class Credentials(ModelObj):
|
|
|
430
435
|
|
|
431
436
|
def __init__(
|
|
432
437
|
self,
|
|
433
|
-
access_key: str = None,
|
|
438
|
+
access_key: Optional[str] = None,
|
|
434
439
|
):
|
|
435
440
|
self.access_key = access_key
|
|
436
441
|
|
|
@@ -500,7 +505,7 @@ class ImageBuilder(ModelObj):
|
|
|
500
505
|
origin_filename=None,
|
|
501
506
|
with_mlrun=None,
|
|
502
507
|
auto_build=None,
|
|
503
|
-
requirements: list = None,
|
|
508
|
+
requirements: Optional[list] = None,
|
|
504
509
|
extra_args=None,
|
|
505
510
|
builder_env=None,
|
|
506
511
|
source_code_target_dir=None,
|
|
@@ -549,7 +554,7 @@ class ImageBuilder(ModelObj):
|
|
|
549
554
|
self,
|
|
550
555
|
image="",
|
|
551
556
|
base_image=None,
|
|
552
|
-
commands: list = None,
|
|
557
|
+
commands: Optional[list] = None,
|
|
553
558
|
secret=None,
|
|
554
559
|
source=None,
|
|
555
560
|
extra=None,
|
|
@@ -750,14 +755,14 @@ class Notification(ModelObj):
|
|
|
750
755
|
"Notification params size exceeds max size of 1 MB"
|
|
751
756
|
)
|
|
752
757
|
|
|
753
|
-
def validate_notification_params(self):
|
|
754
|
-
|
|
755
|
-
|
|
756
|
-
|
|
757
|
-
|
|
758
|
+
def validate_notification_params(self, default_notification_params=None):
|
|
759
|
+
default_notification_params = default_notification_params or {}
|
|
760
|
+
notification_type = mlrun.utils.notifications.NotificationTypes(self.kind)
|
|
761
|
+
notification_class = notification_type.get_notification()
|
|
758
762
|
secret_params = self.secret_params or {}
|
|
759
763
|
params = self.params or {}
|
|
760
|
-
|
|
764
|
+
default_params = default_notification_params.get(notification_type, {})
|
|
765
|
+
params = notification_class.enrich_default_params(params, default_params)
|
|
761
766
|
# if the secret_params are already masked - no need to validate
|
|
762
767
|
params_secret = secret_params.get("secret", "")
|
|
763
768
|
if params_secret:
|
|
@@ -973,7 +978,7 @@ class RunSpec(ModelObj):
|
|
|
973
978
|
self.node_selector = node_selector or {}
|
|
974
979
|
|
|
975
980
|
def _serialize_field(
|
|
976
|
-
self, struct: dict, field_name: str = None, strip: bool = False
|
|
981
|
+
self, struct: dict, field_name: Optional[str] = None, strip: bool = False
|
|
977
982
|
) -> Optional[str]:
|
|
978
983
|
# We pull the field from self and not from struct because it was excluded from the struct
|
|
979
984
|
if field_name == "handler":
|
|
@@ -1275,9 +1280,9 @@ class RunStatus(ModelObj):
|
|
|
1275
1280
|
last_update=None,
|
|
1276
1281
|
iterations=None,
|
|
1277
1282
|
ui_url=None,
|
|
1278
|
-
reason: str = None,
|
|
1279
|
-
notifications: dict[str, Notification] = None,
|
|
1280
|
-
artifact_uris: dict[str, str] = None,
|
|
1283
|
+
reason: Optional[str] = None,
|
|
1284
|
+
notifications: Optional[dict[str, Notification]] = None,
|
|
1285
|
+
artifact_uris: Optional[dict[str, str]] = None,
|
|
1281
1286
|
):
|
|
1282
1287
|
self.state = state or "created"
|
|
1283
1288
|
self.status_text = status_text
|
|
@@ -1285,7 +1290,7 @@ class RunStatus(ModelObj):
|
|
|
1285
1290
|
self.host = host
|
|
1286
1291
|
self.commit = commit
|
|
1287
1292
|
self.results = results
|
|
1288
|
-
self.
|
|
1293
|
+
self._artifacts = artifacts
|
|
1289
1294
|
self.start_time = start_time
|
|
1290
1295
|
self.last_update = last_update
|
|
1291
1296
|
self.iterations = iterations
|
|
@@ -1293,7 +1298,59 @@ class RunStatus(ModelObj):
|
|
|
1293
1298
|
self.reason = reason
|
|
1294
1299
|
self.notifications = notifications or {}
|
|
1295
1300
|
# Artifact key -> URI mapping, since the full artifacts are not stored in the runs DB table
|
|
1296
|
-
self.
|
|
1301
|
+
self._artifact_uris = artifact_uris or {}
|
|
1302
|
+
|
|
1303
|
+
@classmethod
|
|
1304
|
+
def from_dict(
|
|
1305
|
+
cls, struct=None, fields=None, deprecated_fields: Optional[dict] = None
|
|
1306
|
+
):
|
|
1307
|
+
deprecated_fields = {
|
|
1308
|
+
# Set artifacts as deprecated for lazy loading
|
|
1309
|
+
"artifacts": "artifact_uris"
|
|
1310
|
+
}
|
|
1311
|
+
return super().from_dict(
|
|
1312
|
+
struct, fields=fields, deprecated_fields=deprecated_fields
|
|
1313
|
+
)
|
|
1314
|
+
|
|
1315
|
+
@property
|
|
1316
|
+
def artifacts(self):
|
|
1317
|
+
"""
|
|
1318
|
+
Artifacts are lazy loaded to reduce memory consumption.
|
|
1319
|
+
We keep artifact_uris (key -> store URI dictionary) to be able to get the run artifacts easily.
|
|
1320
|
+
If the artifact is not already in the cache, we get it from the store (DB).
|
|
1321
|
+
:return: List of artifact dictionaries
|
|
1322
|
+
"""
|
|
1323
|
+
self._artifacts = self._artifacts or []
|
|
1324
|
+
existing_artifact_keys = {
|
|
1325
|
+
artifact["metadata"]["key"] for artifact in self._artifacts
|
|
1326
|
+
}
|
|
1327
|
+
for key, uri in self.artifact_uris.items():
|
|
1328
|
+
if key not in existing_artifact_keys:
|
|
1329
|
+
artifact = mlrun.datastore.get_store_resource(uri)
|
|
1330
|
+
self._artifacts.append(artifact.to_dict())
|
|
1331
|
+
return self._artifacts
|
|
1332
|
+
|
|
1333
|
+
@artifacts.setter
|
|
1334
|
+
def artifacts(self, artifacts):
|
|
1335
|
+
self._artifacts = artifacts
|
|
1336
|
+
|
|
1337
|
+
@property
|
|
1338
|
+
def artifact_uris(self):
|
|
1339
|
+
return self._artifact_uris
|
|
1340
|
+
|
|
1341
|
+
@artifact_uris.setter
|
|
1342
|
+
def artifact_uris(self, artifact_uris):
|
|
1343
|
+
resolved_artifact_uris = {}
|
|
1344
|
+
if isinstance(artifact_uris, list):
|
|
1345
|
+
# artifact_uris is the deprecated list of artifacts - convert to new form
|
|
1346
|
+
for artifact in artifact_uris:
|
|
1347
|
+
if isinstance(artifact, dict):
|
|
1348
|
+
artifact = mlrun.artifacts.dict_to_artifact(artifact)
|
|
1349
|
+
resolved_artifact_uris[artifact.key] = artifact.uri
|
|
1350
|
+
else:
|
|
1351
|
+
resolved_artifact_uris = artifact_uris
|
|
1352
|
+
|
|
1353
|
+
self._artifact_uris = resolved_artifact_uris
|
|
1297
1354
|
|
|
1298
1355
|
def is_failed(self) -> Optional[bool]:
|
|
1299
1356
|
"""
|
|
@@ -1601,7 +1658,7 @@ class RunObject(RunTemplate):
|
|
|
1601
1658
|
|
|
1602
1659
|
return outputs
|
|
1603
1660
|
|
|
1604
|
-
def artifact(self, key: str) -> "mlrun.DataItem":
|
|
1661
|
+
def artifact(self, key: str) -> typing.Optional["mlrun.DataItem"]:
|
|
1605
1662
|
"""Return artifact DataItem by key.
|
|
1606
1663
|
|
|
1607
1664
|
This method waits for the outputs to complete, searches for the artifact matching the given key,
|
|
@@ -1644,7 +1701,7 @@ class RunObject(RunTemplate):
|
|
|
1644
1701
|
:param key: The key of the artifact to retrieve.
|
|
1645
1702
|
:return: The last artifact DataItem with the given key, or None if no such artifact is found.
|
|
1646
1703
|
"""
|
|
1647
|
-
if not self.status.artifacts:
|
|
1704
|
+
if not self.status.artifacts and not self.status.artifact_uris:
|
|
1648
1705
|
return None
|
|
1649
1706
|
|
|
1650
1707
|
# Collect artifacts that match the key
|
|
@@ -1655,7 +1712,12 @@ class RunObject(RunTemplate):
|
|
|
1655
1712
|
]
|
|
1656
1713
|
|
|
1657
1714
|
if not matching_artifacts:
|
|
1658
|
-
|
|
1715
|
+
if key not in self.status.artifact_uris:
|
|
1716
|
+
return None
|
|
1717
|
+
|
|
1718
|
+
# Get artifact by store URI sanity (should have been enriched by now in status.artifacts property)
|
|
1719
|
+
artifact_uri = self.status.artifact_uris[key]
|
|
1720
|
+
return mlrun.datastore.get_store_resource(artifact_uri)
|
|
1659
1721
|
|
|
1660
1722
|
# Sort matching artifacts by creation date in ascending order.
|
|
1661
1723
|
# The last element in the list will be the one created most recently.
|
|
@@ -1870,7 +1932,7 @@ class EntrypointParam(ModelObj):
|
|
|
1870
1932
|
default=None,
|
|
1871
1933
|
doc="",
|
|
1872
1934
|
required=None,
|
|
1873
|
-
choices: list = None,
|
|
1935
|
+
choices: Optional[list] = None,
|
|
1874
1936
|
):
|
|
1875
1937
|
self.name = name
|
|
1876
1938
|
self.type = type
|
|
@@ -2065,12 +2127,12 @@ class DataSource(ModelObj):
|
|
|
2065
2127
|
|
|
2066
2128
|
def __init__(
|
|
2067
2129
|
self,
|
|
2068
|
-
name: str = None,
|
|
2069
|
-
path: str = None,
|
|
2070
|
-
attributes: dict[str, object] = None,
|
|
2071
|
-
key_field: str = None,
|
|
2072
|
-
time_field: str = None,
|
|
2073
|
-
schedule: str = None,
|
|
2130
|
+
name: Optional[str] = None,
|
|
2131
|
+
path: Optional[str] = None,
|
|
2132
|
+
attributes: Optional[dict[str, object]] = None,
|
|
2133
|
+
key_field: Optional[str] = None,
|
|
2134
|
+
time_field: Optional[str] = None,
|
|
2135
|
+
schedule: Optional[str] = None,
|
|
2074
2136
|
start_time: Optional[Union[datetime, str]] = None,
|
|
2075
2137
|
end_time: Optional[Union[datetime, str]] = None,
|
|
2076
2138
|
):
|
|
@@ -2092,7 +2154,7 @@ class DataSource(ModelObj):
|
|
|
2092
2154
|
self._secrets = secrets
|
|
2093
2155
|
|
|
2094
2156
|
def _serialize_field(
|
|
2095
|
-
self, struct: dict, field_name: str = None, strip: bool = False
|
|
2157
|
+
self, struct: dict, field_name: Optional[str] = None, strip: bool = False
|
|
2096
2158
|
) -> typing.Any:
|
|
2097
2159
|
value = super()._serialize_field(struct, field_name, strip)
|
|
2098
2160
|
# We pull the field from self and not from struct because it was excluded from the struct when looping over
|
|
@@ -2124,7 +2186,9 @@ class DataTargetBase(ModelObj):
|
|
|
2124
2186
|
]
|
|
2125
2187
|
|
|
2126
2188
|
@classmethod
|
|
2127
|
-
def from_dict(
|
|
2189
|
+
def from_dict(
|
|
2190
|
+
cls, struct=None, fields=None, deprecated_fields: Optional[dict] = None
|
|
2191
|
+
):
|
|
2128
2192
|
return super().from_dict(struct, fields=fields)
|
|
2129
2193
|
|
|
2130
2194
|
def get_path(self):
|
|
@@ -2140,10 +2204,10 @@ class DataTargetBase(ModelObj):
|
|
|
2140
2204
|
|
|
2141
2205
|
def __init__(
|
|
2142
2206
|
self,
|
|
2143
|
-
kind: str = None,
|
|
2207
|
+
kind: Optional[str] = None,
|
|
2144
2208
|
name: str = "",
|
|
2145
2209
|
path=None,
|
|
2146
|
-
attributes: dict[str, str] = None,
|
|
2210
|
+
attributes: Optional[dict[str, str]] = None,
|
|
2147
2211
|
after_step=None,
|
|
2148
2212
|
partitioned: bool = False,
|
|
2149
2213
|
key_bucketing_number: Optional[int] = None,
|
|
@@ -2151,8 +2215,8 @@ class DataTargetBase(ModelObj):
|
|
|
2151
2215
|
time_partitioning_granularity: Optional[str] = None,
|
|
2152
2216
|
max_events: Optional[int] = None,
|
|
2153
2217
|
flush_after_seconds: Optional[int] = None,
|
|
2154
|
-
storage_options: dict[str, str] = None,
|
|
2155
|
-
schema: dict[str, Any] = None,
|
|
2218
|
+
storage_options: Optional[dict[str, str]] = None,
|
|
2219
|
+
schema: Optional[dict[str, Any]] = None,
|
|
2156
2220
|
credentials_prefix=None,
|
|
2157
2221
|
):
|
|
2158
2222
|
self.name = name
|
|
@@ -2208,7 +2272,7 @@ class DataTarget(DataTargetBase):
|
|
|
2208
2272
|
|
|
2209
2273
|
def __init__(
|
|
2210
2274
|
self,
|
|
2211
|
-
kind: str = None,
|
|
2275
|
+
kind: Optional[str] = None,
|
|
2212
2276
|
name: str = "",
|
|
2213
2277
|
path=None,
|
|
2214
2278
|
online=None,
|
|
@@ -2237,12 +2301,12 @@ class DataTarget(DataTargetBase):
|
|
|
2237
2301
|
class VersionedObjMetadata(ModelObj):
|
|
2238
2302
|
def __init__(
|
|
2239
2303
|
self,
|
|
2240
|
-
name: str = None,
|
|
2241
|
-
tag: str = None,
|
|
2242
|
-
uid: str = None,
|
|
2243
|
-
project: str = None,
|
|
2244
|
-
labels: dict[str, str] = None,
|
|
2245
|
-
annotations: dict[str, str] = None,
|
|
2304
|
+
name: Optional[str] = None,
|
|
2305
|
+
tag: Optional[str] = None,
|
|
2306
|
+
uid: Optional[str] = None,
|
|
2307
|
+
project: Optional[str] = None,
|
|
2308
|
+
labels: Optional[dict[str, str]] = None,
|
|
2309
|
+
annotations: Optional[dict[str, str]] = None,
|
|
2246
2310
|
updated=None,
|
|
2247
2311
|
):
|
|
2248
2312
|
self.name = name
|
|
@@ -11,8 +11,7 @@
|
|
|
11
11
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
12
|
# See the License for the specific language governing permissions and
|
|
13
13
|
# limitations under the License.
|
|
14
|
-
|
|
15
|
-
# flake8: noqa - this is until we take care of the F401 violations with respect to __all__ & sphinx
|
|
14
|
+
|
|
16
15
|
# for backwards compatibility
|
|
17
16
|
|
|
18
17
|
from .db import get_store_object, get_tsdb_connector
|
mlrun/model_monitoring/api.py
CHANGED
|
@@ -45,7 +45,7 @@ def get_or_create_model_endpoint(
|
|
|
45
45
|
endpoint_id: str = "",
|
|
46
46
|
function_name: str = "",
|
|
47
47
|
context: mlrun.MLClientCtx = None,
|
|
48
|
-
sample_set_statistics: dict[str, typing.Any] = None,
|
|
48
|
+
sample_set_statistics: typing.Optional[dict[str, typing.Any]] = None,
|
|
49
49
|
drift_threshold: typing.Optional[float] = None,
|
|
50
50
|
possible_drift_threshold: typing.Optional[float] = None,
|
|
51
51
|
monitoring_mode: mm_constants.ModelMonitoringMode = mm_constants.ModelMonitoringMode.disabled,
|
|
@@ -234,7 +234,7 @@ def record_results(
|
|
|
234
234
|
def _model_endpoint_validations(
|
|
235
235
|
model_endpoint: ModelEndpoint,
|
|
236
236
|
model_path: str = "",
|
|
237
|
-
sample_set_statistics: dict[str, typing.Any] = None,
|
|
237
|
+
sample_set_statistics: typing.Optional[dict[str, typing.Any]] = None,
|
|
238
238
|
) -> None:
|
|
239
239
|
"""
|
|
240
240
|
Validate that provided model endpoint configurations match the stored fields of the provided `ModelEndpoint`
|
|
@@ -387,7 +387,7 @@ def _generate_model_endpoint(
|
|
|
387
387
|
|
|
388
388
|
def get_sample_set_statistics(
|
|
389
389
|
sample_set: DatasetType = None,
|
|
390
|
-
model_artifact_feature_stats: dict = None,
|
|
390
|
+
model_artifact_feature_stats: typing.Optional[dict] = None,
|
|
391
391
|
sample_set_columns: typing.Optional[list] = None,
|
|
392
392
|
sample_set_drop_columns: typing.Optional[list] = None,
|
|
393
393
|
sample_set_label_columns: typing.Optional[list] = None,
|
|
@@ -445,9 +445,9 @@ def get_sample_set_statistics(
|
|
|
445
445
|
|
|
446
446
|
def read_dataset_as_dataframe(
|
|
447
447
|
dataset: DatasetType,
|
|
448
|
-
feature_columns: typing.Union[str, list[str]] = None,
|
|
449
|
-
label_columns: typing.Union[str, list[str]] = None,
|
|
450
|
-
drop_columns: typing.Union[str, list[str], int, list[int]] = None,
|
|
448
|
+
feature_columns: typing.Optional[typing.Union[str, list[str]]] = None,
|
|
449
|
+
label_columns: typing.Optional[typing.Union[str, list[str]]] = None,
|
|
450
|
+
drop_columns: typing.Optional[typing.Union[str, list[str], int, list[int]]] = None,
|
|
451
451
|
) -> tuple[pd.DataFrame, list[str]]:
|
|
452
452
|
"""
|
|
453
453
|
Parse the given dataset into a DataFrame and drop the columns accordingly. In addition, the label columns will be
|
|
@@ -26,7 +26,11 @@ from mlrun.serving.utils import StepToDict
|
|
|
26
26
|
from mlrun.utils import logger
|
|
27
27
|
|
|
28
28
|
from .context import MonitoringApplicationContext
|
|
29
|
-
from .results import
|
|
29
|
+
from .results import (
|
|
30
|
+
ModelMonitoringApplicationMetric,
|
|
31
|
+
ModelMonitoringApplicationResult,
|
|
32
|
+
_ModelMonitoringApplicationStats,
|
|
33
|
+
)
|
|
30
34
|
|
|
31
35
|
|
|
32
36
|
class _PushToMonitoringWriter(StepToDict):
|
|
@@ -61,7 +65,9 @@ class _PushToMonitoringWriter(StepToDict):
|
|
|
61
65
|
event: tuple[
|
|
62
66
|
list[
|
|
63
67
|
Union[
|
|
64
|
-
ModelMonitoringApplicationResult,
|
|
68
|
+
ModelMonitoringApplicationResult,
|
|
69
|
+
ModelMonitoringApplicationMetric,
|
|
70
|
+
_ModelMonitoringApplicationStats,
|
|
65
71
|
]
|
|
66
72
|
],
|
|
67
73
|
MonitoringApplicationContext,
|
|
@@ -90,21 +96,15 @@ class _PushToMonitoringWriter(StepToDict):
|
|
|
90
96
|
writer_event[mm_constant.WriterEvent.EVENT_KIND] = (
|
|
91
97
|
mm_constant.WriterEventKind.RESULT
|
|
92
98
|
)
|
|
93
|
-
|
|
94
|
-
|
|
99
|
+
elif isinstance(result, _ModelMonitoringApplicationStats):
|
|
100
|
+
writer_event[mm_constant.WriterEvent.EVENT_KIND] = (
|
|
101
|
+
mm_constant.WriterEventKind.STATS
|
|
95
102
|
)
|
|
96
|
-
writer_event[mm_constant.WriterEvent.DATA] = json.dumps(data)
|
|
97
103
|
else:
|
|
98
104
|
writer_event[mm_constant.WriterEvent.EVENT_KIND] = (
|
|
99
105
|
mm_constant.WriterEventKind.METRIC
|
|
100
106
|
)
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
writer_event[mm_constant.WriterEvent.EVENT_KIND] = (
|
|
104
|
-
mm_constant.WriterEventKind.RESULT
|
|
105
|
-
if isinstance(result, ModelMonitoringApplicationResult)
|
|
106
|
-
else mm_constant.WriterEventKind.METRIC
|
|
107
|
-
)
|
|
107
|
+
writer_event[mm_constant.WriterEvent.DATA] = json.dumps(data)
|
|
108
108
|
logger.info(
|
|
109
109
|
f"Pushing data = {writer_event} \n to stream = {self.stream_uri}"
|
|
110
110
|
)
|
|
@@ -113,9 +113,7 @@ class _PushToMonitoringWriter(StepToDict):
|
|
|
113
113
|
|
|
114
114
|
def _lazy_init(self):
|
|
115
115
|
if self.output_stream is None:
|
|
116
|
-
self.output_stream = mlrun.datastore.get_stream_pusher(
|
|
117
|
-
self.stream_uri,
|
|
118
|
-
)
|
|
116
|
+
self.output_stream = mlrun.datastore.get_stream_pusher(self.stream_uri)
|
|
119
117
|
|
|
120
118
|
|
|
121
119
|
class _PrepareMonitoringEvent(StepToDict):
|