mlrun 1.7.0rc14__py3-none-any.whl → 1.7.0rc22__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mlrun might be problematic. Click here for more details.
- mlrun/__init__.py +10 -1
- mlrun/__main__.py +23 -111
- mlrun/alerts/__init__.py +15 -0
- mlrun/alerts/alert.py +169 -0
- mlrun/api/schemas/__init__.py +4 -3
- mlrun/artifacts/__init__.py +8 -3
- mlrun/artifacts/base.py +36 -253
- mlrun/artifacts/dataset.py +9 -190
- mlrun/artifacts/manager.py +46 -42
- mlrun/artifacts/model.py +9 -141
- mlrun/artifacts/plots.py +14 -375
- mlrun/common/constants.py +65 -3
- mlrun/common/formatters/__init__.py +19 -0
- mlrun/{runtimes/mpijob/v1alpha1.py → common/formatters/artifact.py} +6 -14
- mlrun/common/formatters/base.py +113 -0
- mlrun/common/formatters/function.py +46 -0
- mlrun/common/formatters/pipeline.py +53 -0
- mlrun/common/formatters/project.py +51 -0
- mlrun/{runtimes → common/runtimes}/constants.py +32 -4
- mlrun/common/schemas/__init__.py +10 -5
- mlrun/common/schemas/alert.py +92 -11
- mlrun/common/schemas/api_gateway.py +56 -0
- mlrun/common/schemas/artifact.py +15 -5
- mlrun/common/schemas/auth.py +2 -0
- mlrun/common/schemas/client_spec.py +1 -0
- mlrun/common/schemas/frontend_spec.py +1 -0
- mlrun/common/schemas/function.py +4 -0
- mlrun/common/schemas/model_monitoring/__init__.py +15 -3
- mlrun/common/schemas/model_monitoring/constants.py +58 -7
- mlrun/common/schemas/model_monitoring/grafana.py +9 -5
- mlrun/common/schemas/model_monitoring/model_endpoints.py +86 -2
- mlrun/common/schemas/pipeline.py +0 -9
- mlrun/common/schemas/project.py +5 -11
- mlrun/common/types.py +1 -0
- mlrun/config.py +30 -9
- mlrun/data_types/to_pandas.py +9 -9
- mlrun/datastore/base.py +41 -9
- mlrun/datastore/datastore.py +6 -2
- mlrun/datastore/datastore_profile.py +56 -4
- mlrun/datastore/inmem.py +2 -2
- mlrun/datastore/redis.py +2 -2
- mlrun/datastore/s3.py +5 -0
- mlrun/datastore/sources.py +147 -7
- mlrun/datastore/store_resources.py +7 -7
- mlrun/datastore/targets.py +110 -42
- mlrun/datastore/utils.py +42 -0
- mlrun/db/base.py +54 -10
- mlrun/db/httpdb.py +282 -79
- mlrun/db/nopdb.py +52 -10
- mlrun/errors.py +11 -0
- mlrun/execution.py +26 -9
- mlrun/feature_store/__init__.py +0 -2
- mlrun/feature_store/api.py +12 -47
- mlrun/feature_store/feature_set.py +9 -0
- mlrun/feature_store/feature_vector.py +8 -0
- mlrun/feature_store/ingestion.py +7 -6
- mlrun/feature_store/retrieval/base.py +9 -4
- mlrun/feature_store/retrieval/conversion.py +9 -9
- mlrun/feature_store/retrieval/dask_merger.py +2 -0
- mlrun/feature_store/retrieval/job.py +9 -3
- mlrun/feature_store/retrieval/local_merger.py +2 -0
- mlrun/feature_store/retrieval/spark_merger.py +16 -0
- mlrun/frameworks/__init__.py +6 -0
- mlrun/frameworks/_dl_common/loggers/tensorboard_logger.py +7 -12
- mlrun/frameworks/parallel_coordinates.py +2 -1
- mlrun/frameworks/tf_keras/__init__.py +4 -1
- mlrun/k8s_utils.py +10 -11
- mlrun/launcher/base.py +4 -3
- mlrun/launcher/client.py +5 -3
- mlrun/launcher/local.py +12 -2
- mlrun/launcher/remote.py +9 -2
- mlrun/lists.py +6 -2
- mlrun/model.py +47 -21
- mlrun/model_monitoring/__init__.py +1 -1
- mlrun/model_monitoring/api.py +42 -18
- mlrun/model_monitoring/application.py +5 -305
- mlrun/model_monitoring/applications/__init__.py +11 -0
- mlrun/model_monitoring/applications/_application_steps.py +157 -0
- mlrun/model_monitoring/applications/base.py +280 -0
- mlrun/model_monitoring/applications/context.py +214 -0
- mlrun/model_monitoring/applications/evidently_base.py +211 -0
- mlrun/model_monitoring/applications/histogram_data_drift.py +132 -91
- mlrun/model_monitoring/applications/results.py +99 -0
- mlrun/model_monitoring/controller.py +3 -1
- mlrun/model_monitoring/db/__init__.py +2 -0
- mlrun/model_monitoring/db/stores/__init__.py +0 -2
- mlrun/model_monitoring/db/stores/base/store.py +22 -37
- mlrun/model_monitoring/db/stores/sqldb/models/__init__.py +43 -21
- mlrun/model_monitoring/db/stores/sqldb/models/base.py +39 -8
- mlrun/model_monitoring/db/stores/sqldb/models/mysql.py +27 -7
- mlrun/model_monitoring/db/stores/sqldb/models/sqlite.py +5 -0
- mlrun/model_monitoring/db/stores/sqldb/sql_store.py +246 -224
- mlrun/model_monitoring/db/stores/v3io_kv/kv_store.py +232 -216
- mlrun/model_monitoring/db/tsdb/__init__.py +100 -0
- mlrun/model_monitoring/db/tsdb/base.py +316 -0
- mlrun/model_monitoring/db/tsdb/helpers.py +30 -0
- mlrun/model_monitoring/db/tsdb/tdengine/__init__.py +15 -0
- mlrun/model_monitoring/db/tsdb/tdengine/schemas.py +240 -0
- mlrun/model_monitoring/db/tsdb/tdengine/stream_graph_steps.py +45 -0
- mlrun/model_monitoring/db/tsdb/tdengine/tdengine_connector.py +401 -0
- mlrun/model_monitoring/db/tsdb/v3io/__init__.py +15 -0
- mlrun/model_monitoring/db/tsdb/v3io/stream_graph_steps.py +117 -0
- mlrun/model_monitoring/db/tsdb/v3io/v3io_connector.py +658 -0
- mlrun/model_monitoring/evidently_application.py +6 -118
- mlrun/model_monitoring/helpers.py +63 -1
- mlrun/model_monitoring/model_endpoint.py +3 -2
- mlrun/model_monitoring/stream_processing.py +57 -216
- mlrun/model_monitoring/writer.py +134 -124
- mlrun/package/__init__.py +13 -1
- mlrun/package/packagers/__init__.py +6 -1
- mlrun/package/utils/_formatter.py +2 -2
- mlrun/platforms/__init__.py +10 -9
- mlrun/platforms/iguazio.py +21 -202
- mlrun/projects/operations.py +24 -12
- mlrun/projects/pipelines.py +79 -102
- mlrun/projects/project.py +271 -103
- mlrun/render.py +15 -14
- mlrun/run.py +16 -46
- mlrun/runtimes/__init__.py +6 -3
- mlrun/runtimes/base.py +14 -7
- mlrun/runtimes/daskjob.py +1 -0
- mlrun/runtimes/databricks_job/databricks_runtime.py +1 -0
- mlrun/runtimes/databricks_job/databricks_wrapper.py +1 -1
- mlrun/runtimes/funcdoc.py +0 -28
- mlrun/runtimes/kubejob.py +2 -1
- mlrun/runtimes/local.py +12 -3
- mlrun/runtimes/mpijob/__init__.py +0 -20
- mlrun/runtimes/mpijob/v1.py +1 -1
- mlrun/runtimes/nuclio/api_gateway.py +194 -84
- mlrun/runtimes/nuclio/application/application.py +170 -8
- mlrun/runtimes/nuclio/function.py +39 -49
- mlrun/runtimes/pod.py +16 -36
- mlrun/runtimes/remotesparkjob.py +9 -3
- mlrun/runtimes/sparkjob/spark3job.py +1 -1
- mlrun/runtimes/utils.py +6 -45
- mlrun/serving/__init__.py +8 -1
- mlrun/serving/server.py +2 -1
- mlrun/serving/states.py +51 -8
- mlrun/serving/utils.py +19 -11
- mlrun/serving/v2_serving.py +5 -1
- mlrun/track/tracker.py +2 -1
- mlrun/utils/async_http.py +25 -5
- mlrun/utils/helpers.py +157 -83
- mlrun/utils/logger.py +39 -7
- mlrun/utils/notifications/notification/__init__.py +14 -9
- mlrun/utils/notifications/notification/base.py +1 -1
- mlrun/utils/notifications/notification/slack.py +34 -7
- mlrun/utils/notifications/notification/webhook.py +1 -1
- mlrun/utils/notifications/notification_pusher.py +147 -16
- mlrun/utils/regex.py +9 -0
- mlrun/utils/v3io_clients.py +0 -1
- mlrun/utils/version/version.json +2 -2
- {mlrun-1.7.0rc14.dist-info → mlrun-1.7.0rc22.dist-info}/METADATA +14 -6
- {mlrun-1.7.0rc14.dist-info → mlrun-1.7.0rc22.dist-info}/RECORD +158 -138
- mlrun/kfpops.py +0 -865
- mlrun/platforms/other.py +0 -305
- {mlrun-1.7.0rc14.dist-info → mlrun-1.7.0rc22.dist-info}/LICENSE +0 -0
- {mlrun-1.7.0rc14.dist-info → mlrun-1.7.0rc22.dist-info}/WHEEL +0 -0
- {mlrun-1.7.0rc14.dist-info → mlrun-1.7.0rc22.dist-info}/entry_points.txt +0 -0
- {mlrun-1.7.0rc14.dist-info → mlrun-1.7.0rc22.dist-info}/top_level.txt +0 -0
mlrun/utils/helpers.py
CHANGED
|
@@ -26,7 +26,7 @@ import sys
|
|
|
26
26
|
import typing
|
|
27
27
|
import warnings
|
|
28
28
|
from datetime import datetime, timezone
|
|
29
|
-
from importlib import import_module
|
|
29
|
+
from importlib import import_module, reload
|
|
30
30
|
from os import path
|
|
31
31
|
from types import ModuleType
|
|
32
32
|
from typing import Any, Optional
|
|
@@ -39,7 +39,7 @@ import pandas
|
|
|
39
39
|
import semver
|
|
40
40
|
import yaml
|
|
41
41
|
from dateutil import parser
|
|
42
|
-
from
|
|
42
|
+
from mlrun_pipelines.models import PipelineRun
|
|
43
43
|
from pandas._libs.tslibs.timestamps import Timedelta, Timestamp
|
|
44
44
|
from yaml.representer import RepresenterError
|
|
45
45
|
|
|
@@ -76,19 +76,6 @@ class OverwriteBuildParamsWarning(FutureWarning):
|
|
|
76
76
|
pass
|
|
77
77
|
|
|
78
78
|
|
|
79
|
-
# TODO: remove in 1.7.0
|
|
80
|
-
@deprecated(
|
|
81
|
-
version="1.5.0",
|
|
82
|
-
reason="'parse_versioned_object_uri' will be removed from this file in 1.7.0, use "
|
|
83
|
-
"'mlrun.common.helpers.parse_versioned_object_uri' instead",
|
|
84
|
-
category=FutureWarning,
|
|
85
|
-
)
|
|
86
|
-
def parse_versioned_object_uri(uri: str, default_project: str = ""):
|
|
87
|
-
return mlrun.common.helpers.parse_versioned_object_uri(
|
|
88
|
-
uri=uri, default_project=default_project
|
|
89
|
-
)
|
|
90
|
-
|
|
91
|
-
|
|
92
79
|
class StorePrefix:
|
|
93
80
|
"""map mlrun store objects to prefixes"""
|
|
94
81
|
|
|
@@ -119,14 +106,9 @@ class StorePrefix:
|
|
|
119
106
|
|
|
120
107
|
|
|
121
108
|
def get_artifact_target(item: dict, project=None):
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
tree = item.get("tree")
|
|
126
|
-
else:
|
|
127
|
-
db_key = item["spec"].get("db_key")
|
|
128
|
-
project_str = project or item["metadata"].get("project")
|
|
129
|
-
tree = item["metadata"].get("tree")
|
|
109
|
+
db_key = item["spec"].get("db_key")
|
|
110
|
+
project_str = project or item["metadata"].get("project")
|
|
111
|
+
tree = item["metadata"].get("tree")
|
|
130
112
|
|
|
131
113
|
kind = item.get("kind")
|
|
132
114
|
if kind in ["dataset", "model", "artifact"] and db_key:
|
|
@@ -135,11 +117,15 @@ def get_artifact_target(item: dict, project=None):
|
|
|
135
117
|
target = f"{target}@{tree}"
|
|
136
118
|
return target
|
|
137
119
|
|
|
138
|
-
return (
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
120
|
+
return item["spec"].get("target_path")
|
|
121
|
+
|
|
122
|
+
|
|
123
|
+
# TODO: left for migrations testing purposes. Remove in 1.8.0.
|
|
124
|
+
def is_legacy_artifact(artifact):
|
|
125
|
+
if isinstance(artifact, dict):
|
|
126
|
+
return "metadata" not in artifact
|
|
127
|
+
else:
|
|
128
|
+
return not hasattr(artifact, "metadata")
|
|
143
129
|
|
|
144
130
|
|
|
145
131
|
logger = create_logger(config.log_level, config.log_formatter, "mlrun", sys.stdout)
|
|
@@ -195,8 +181,12 @@ def verify_field_regex(
|
|
|
195
181
|
)
|
|
196
182
|
if mode == mlrun.common.schemas.RegexMatchModes.all:
|
|
197
183
|
if raise_on_failure:
|
|
184
|
+
if len(field_name) > max_chars:
|
|
185
|
+
field_name = field_name[:max_chars] + "...truncated"
|
|
186
|
+
if len(field_value) > max_chars:
|
|
187
|
+
field_value = field_value[:max_chars] + "...truncated"
|
|
198
188
|
raise mlrun.errors.MLRunInvalidArgumentError(
|
|
199
|
-
f"Field '{field_name
|
|
189
|
+
f"Field '{field_name}' is malformed. '{field_value}' "
|
|
200
190
|
f"does not match required pattern: {pattern}"
|
|
201
191
|
)
|
|
202
192
|
return False
|
|
@@ -669,7 +659,7 @@ def parse_artifact_uri(uri, default_project=""):
|
|
|
669
659
|
[3] = tag
|
|
670
660
|
[4] = tree
|
|
671
661
|
"""
|
|
672
|
-
uri_pattern =
|
|
662
|
+
uri_pattern = mlrun.utils.regex.artifact_uri_pattern
|
|
673
663
|
match = re.match(uri_pattern, uri)
|
|
674
664
|
if not match:
|
|
675
665
|
raise ValueError(
|
|
@@ -801,34 +791,6 @@ def gen_html_table(header, rows=None):
|
|
|
801
791
|
return style + '<table class="tg">\n' + out + "</table>\n\n"
|
|
802
792
|
|
|
803
793
|
|
|
804
|
-
def new_pipe_metadata(
|
|
805
|
-
artifact_path: str = None,
|
|
806
|
-
cleanup_ttl: int = None,
|
|
807
|
-
op_transformers: list[typing.Callable] = None,
|
|
808
|
-
):
|
|
809
|
-
from kfp.dsl import PipelineConf
|
|
810
|
-
|
|
811
|
-
def _set_artifact_path(task):
|
|
812
|
-
from kubernetes import client as k8s_client
|
|
813
|
-
|
|
814
|
-
task.add_env_variable(
|
|
815
|
-
k8s_client.V1EnvVar(name="MLRUN_ARTIFACT_PATH", value=artifact_path)
|
|
816
|
-
)
|
|
817
|
-
return task
|
|
818
|
-
|
|
819
|
-
conf = PipelineConf()
|
|
820
|
-
cleanup_ttl = cleanup_ttl or int(config.kfp_ttl)
|
|
821
|
-
|
|
822
|
-
if cleanup_ttl:
|
|
823
|
-
conf.set_ttl_seconds_after_finished(cleanup_ttl)
|
|
824
|
-
if artifact_path:
|
|
825
|
-
conf.add_op_transformer(_set_artifact_path)
|
|
826
|
-
if op_transformers:
|
|
827
|
-
for op_transformer in op_transformers:
|
|
828
|
-
conf.add_op_transformer(op_transformer)
|
|
829
|
-
return conf
|
|
830
|
-
|
|
831
|
-
|
|
832
794
|
def _convert_python_package_version_to_image_tag(version: typing.Optional[str]):
|
|
833
795
|
return (
|
|
834
796
|
version.replace("+", "-").replace("0.0.0-", "") if version is not None else None
|
|
@@ -1015,17 +977,27 @@ def get_ui_url(project, uid=None):
|
|
|
1015
977
|
return url
|
|
1016
978
|
|
|
1017
979
|
|
|
980
|
+
def get_model_endpoint_url(project, model_name, model_endpoint_id):
|
|
981
|
+
url = ""
|
|
982
|
+
if mlrun.mlconf.resolve_ui_url():
|
|
983
|
+
url = f"{mlrun.mlconf.resolve_ui_url()}/{mlrun.mlconf.ui.projects_prefix}/{project}/models"
|
|
984
|
+
if model_name:
|
|
985
|
+
url += f"/model-endpoints/{model_name}/{model_endpoint_id}/overview"
|
|
986
|
+
return url
|
|
987
|
+
|
|
988
|
+
|
|
1018
989
|
def get_workflow_url(project, id=None):
|
|
1019
990
|
url = ""
|
|
1020
991
|
if mlrun.mlconf.resolve_ui_url():
|
|
1021
|
-
url =
|
|
1022
|
-
mlrun.mlconf.resolve_ui_url()
|
|
992
|
+
url = (
|
|
993
|
+
f"{mlrun.mlconf.resolve_ui_url()}/{mlrun.mlconf.ui.projects_prefix}"
|
|
994
|
+
f"/{project}/jobs/monitor-workflows/workflow/{id}"
|
|
1023
995
|
)
|
|
1024
996
|
return url
|
|
1025
997
|
|
|
1026
998
|
|
|
1027
999
|
def are_strings_in_exception_chain_messages(
|
|
1028
|
-
exception: Exception, strings_list
|
|
1000
|
+
exception: Exception, strings_list: list[str]
|
|
1029
1001
|
) -> bool:
|
|
1030
1002
|
while exception is not None:
|
|
1031
1003
|
if any([string in str(exception) for string in strings_list]):
|
|
@@ -1047,16 +1019,35 @@ def create_class(pkg_class: str):
|
|
|
1047
1019
|
return class_
|
|
1048
1020
|
|
|
1049
1021
|
|
|
1050
|
-
def create_function(pkg_func: str):
|
|
1022
|
+
def create_function(pkg_func: str, reload_modules: bool = False):
|
|
1051
1023
|
"""Create a function from a package.module.function string
|
|
1052
1024
|
|
|
1053
1025
|
:param pkg_func: full function location,
|
|
1054
1026
|
e.g. "sklearn.feature_selection.f_classif"
|
|
1027
|
+
:param reload_modules: reload the function again.
|
|
1055
1028
|
"""
|
|
1056
1029
|
splits = pkg_func.split(".")
|
|
1057
1030
|
pkg_module = ".".join(splits[:-1])
|
|
1058
1031
|
cb_fname = splits[-1]
|
|
1059
1032
|
pkg_module = __import__(pkg_module, fromlist=[cb_fname])
|
|
1033
|
+
|
|
1034
|
+
if reload_modules:
|
|
1035
|
+
# Even though the function appears in the modules list, we need to reload
|
|
1036
|
+
# the code again because it may have changed
|
|
1037
|
+
try:
|
|
1038
|
+
logger.debug("Reloading module", module=pkg_func)
|
|
1039
|
+
_reload(
|
|
1040
|
+
pkg_module,
|
|
1041
|
+
max_recursion_depth=mlrun.mlconf.function.spec.reload_max_recursion_depth,
|
|
1042
|
+
)
|
|
1043
|
+
except Exception as exc:
|
|
1044
|
+
logger.warning(
|
|
1045
|
+
"Failed to reload module. Not all associated modules can be reloaded, import them manually."
|
|
1046
|
+
"Or, with Jupyter, restart the Python kernel.",
|
|
1047
|
+
module=pkg_func,
|
|
1048
|
+
err=mlrun.errors.err_to_str(exc),
|
|
1049
|
+
)
|
|
1050
|
+
|
|
1060
1051
|
function_ = getattr(pkg_module, cb_fname)
|
|
1061
1052
|
return function_
|
|
1062
1053
|
|
|
@@ -1114,8 +1105,14 @@ def get_class(class_name, namespace=None):
|
|
|
1114
1105
|
return class_object
|
|
1115
1106
|
|
|
1116
1107
|
|
|
1117
|
-
def get_function(function,
|
|
1118
|
-
"""
|
|
1108
|
+
def get_function(function, namespaces, reload_modules: bool = False):
|
|
1109
|
+
"""Return function callable object from function name string
|
|
1110
|
+
|
|
1111
|
+
:param function: path to the function ([class_name::]function)
|
|
1112
|
+
:param namespaces: one or list of namespaces/modules to search the function in
|
|
1113
|
+
:param reload_modules: reload the function again
|
|
1114
|
+
:return: function handler (callable)
|
|
1115
|
+
"""
|
|
1119
1116
|
if callable(function):
|
|
1120
1117
|
return function
|
|
1121
1118
|
|
|
@@ -1124,12 +1121,12 @@ def get_function(function, namespace):
|
|
|
1124
1121
|
if not function.endswith(")"):
|
|
1125
1122
|
raise ValueError('function expression must start with "(" and end with ")"')
|
|
1126
1123
|
return eval("lambda event: " + function[1:-1], {}, {})
|
|
1127
|
-
function_object = _search_in_namespaces(function,
|
|
1124
|
+
function_object = _search_in_namespaces(function, namespaces)
|
|
1128
1125
|
if function_object is not None:
|
|
1129
1126
|
return function_object
|
|
1130
1127
|
|
|
1131
1128
|
try:
|
|
1132
|
-
function_object = create_function(function)
|
|
1129
|
+
function_object = create_function(function, reload_modules)
|
|
1133
1130
|
except (ImportError, ValueError) as exc:
|
|
1134
1131
|
raise ImportError(
|
|
1135
1132
|
f"state/function init failed, handler '{function}' not found"
|
|
@@ -1138,18 +1135,24 @@ def get_function(function, namespace):
|
|
|
1138
1135
|
|
|
1139
1136
|
|
|
1140
1137
|
def get_handler_extended(
|
|
1141
|
-
handler_path: str,
|
|
1138
|
+
handler_path: str,
|
|
1139
|
+
context=None,
|
|
1140
|
+
class_args: dict = None,
|
|
1141
|
+
namespaces=None,
|
|
1142
|
+
reload_modules: bool = False,
|
|
1142
1143
|
):
|
|
1143
|
-
"""
|
|
1144
|
+
"""Get function handler from [class_name::]handler string
|
|
1144
1145
|
|
|
1145
1146
|
:param handler_path: path to the function ([class_name::]handler)
|
|
1146
1147
|
:param context: MLRun function/job client context
|
|
1147
1148
|
:param class_args: optional dict of class init kwargs
|
|
1148
1149
|
:param namespaces: one or list of namespaces/modules to search the handler in
|
|
1150
|
+
:param reload_modules: reload the function again
|
|
1149
1151
|
:return: function handler (callable)
|
|
1150
1152
|
"""
|
|
1153
|
+
class_args = class_args or {}
|
|
1151
1154
|
if "::" not in handler_path:
|
|
1152
|
-
return get_function(handler_path, namespaces)
|
|
1155
|
+
return get_function(handler_path, namespaces, reload_modules)
|
|
1153
1156
|
|
|
1154
1157
|
splitted = handler_path.split("::")
|
|
1155
1158
|
class_path = splitted[0].strip()
|
|
@@ -1224,7 +1227,7 @@ def calculate_dataframe_hash(dataframe: pandas.DataFrame):
|
|
|
1224
1227
|
return hashlib.sha1(pandas.util.hash_pandas_object(dataframe).values).hexdigest()
|
|
1225
1228
|
|
|
1226
1229
|
|
|
1227
|
-
def template_artifact_path(artifact_path, project, run_uid=
|
|
1230
|
+
def template_artifact_path(artifact_path, project, run_uid=None):
|
|
1228
1231
|
"""
|
|
1229
1232
|
Replace {{run.uid}} with the run uid and {{project}} with the project name in the artifact path.
|
|
1230
1233
|
If no run uid is provided, the word `project` will be used instead as it is assumed to be a project
|
|
@@ -1232,6 +1235,7 @@ def template_artifact_path(artifact_path, project, run_uid="project"):
|
|
|
1232
1235
|
"""
|
|
1233
1236
|
if not artifact_path:
|
|
1234
1237
|
return artifact_path
|
|
1238
|
+
run_uid = run_uid or "project"
|
|
1235
1239
|
artifact_path = artifact_path.replace("{{run.uid}}", run_uid)
|
|
1236
1240
|
artifact_path = _fill_project_path_template(artifact_path, project)
|
|
1237
1241
|
return artifact_path
|
|
@@ -1291,13 +1295,6 @@ def str_to_timestamp(time_str: str, now_time: Timestamp = None):
|
|
|
1291
1295
|
return Timestamp(time_str)
|
|
1292
1296
|
|
|
1293
1297
|
|
|
1294
|
-
def is_legacy_artifact(artifact):
|
|
1295
|
-
if isinstance(artifact, dict):
|
|
1296
|
-
return "metadata" not in artifact
|
|
1297
|
-
else:
|
|
1298
|
-
return not hasattr(artifact, "metadata")
|
|
1299
|
-
|
|
1300
|
-
|
|
1301
1298
|
def is_link_artifact(artifact):
|
|
1302
1299
|
if isinstance(artifact, dict):
|
|
1303
1300
|
return (
|
|
@@ -1307,7 +1304,7 @@ def is_link_artifact(artifact):
|
|
|
1307
1304
|
return artifact.kind == mlrun.common.schemas.ArtifactCategories.link.value
|
|
1308
1305
|
|
|
1309
1306
|
|
|
1310
|
-
def format_run(run:
|
|
1307
|
+
def format_run(run: PipelineRun, with_project=False) -> dict:
|
|
1311
1308
|
fields = [
|
|
1312
1309
|
"id",
|
|
1313
1310
|
"name",
|
|
@@ -1344,17 +1341,17 @@ def format_run(run: dict, with_project=False) -> dict:
|
|
|
1344
1341
|
# pipelines are yet to populate the status or workflow has failed
|
|
1345
1342
|
# as observed https://jira.iguazeng.com/browse/ML-5195
|
|
1346
1343
|
# set to unknown to ensure a status is returned
|
|
1347
|
-
if run
|
|
1348
|
-
run["status"] = inflection.titleize(
|
|
1344
|
+
if run.get("status", None) is None:
|
|
1345
|
+
run["status"] = inflection.titleize(
|
|
1346
|
+
mlrun.common.runtimes.constants.RunStates.unknown
|
|
1347
|
+
)
|
|
1349
1348
|
|
|
1350
1349
|
return run
|
|
1351
1350
|
|
|
1352
1351
|
|
|
1353
1352
|
def get_in_artifact(artifact: dict, key, default=None, raise_on_missing=False):
|
|
1354
1353
|
"""artifact can be dict or Artifact object"""
|
|
1355
|
-
if
|
|
1356
|
-
return artifact.get(key, default)
|
|
1357
|
-
elif key == "kind":
|
|
1354
|
+
if key == "kind":
|
|
1358
1355
|
return artifact.get(key, default)
|
|
1359
1356
|
else:
|
|
1360
1357
|
for block in ["metadata", "spec", "status"]:
|
|
@@ -1596,3 +1593,80 @@ def get_serving_spec():
|
|
|
1596
1593
|
)
|
|
1597
1594
|
spec = json.loads(data)
|
|
1598
1595
|
return spec
|
|
1596
|
+
|
|
1597
|
+
|
|
1598
|
+
def additional_filters_warning(additional_filters, class_name):
|
|
1599
|
+
if additional_filters and any(additional_filters):
|
|
1600
|
+
mlrun.utils.logger.warn(
|
|
1601
|
+
f"additional_filters parameter is not supported in {class_name},"
|
|
1602
|
+
f" parameter has been ignored."
|
|
1603
|
+
)
|
|
1604
|
+
|
|
1605
|
+
|
|
1606
|
+
def validate_component_version_compatibility(
|
|
1607
|
+
component_name: typing.Literal["iguazio", "nuclio"], *min_versions: str
|
|
1608
|
+
):
|
|
1609
|
+
"""
|
|
1610
|
+
:param component_name: Name of the component to validate compatibility for.
|
|
1611
|
+
:param min_versions: Valid minimum version(s) required, assuming no 2 versions has equal major and minor.
|
|
1612
|
+
"""
|
|
1613
|
+
parsed_min_versions = [
|
|
1614
|
+
semver.VersionInfo.parse(min_version) for min_version in min_versions
|
|
1615
|
+
]
|
|
1616
|
+
parsed_current_version = None
|
|
1617
|
+
component_current_version = None
|
|
1618
|
+
try:
|
|
1619
|
+
if component_name == "iguazio":
|
|
1620
|
+
component_current_version = mlrun.mlconf.igz_version
|
|
1621
|
+
parsed_current_version = mlrun.mlconf.get_parsed_igz_version()
|
|
1622
|
+
|
|
1623
|
+
if parsed_current_version:
|
|
1624
|
+
# ignore pre-release and build metadata, as iguazio version always has them, and we only care about the
|
|
1625
|
+
# major, minor, and patch versions
|
|
1626
|
+
parsed_current_version = semver.VersionInfo.parse(
|
|
1627
|
+
f"{parsed_current_version.major}.{parsed_current_version.minor}.{parsed_current_version.patch}"
|
|
1628
|
+
)
|
|
1629
|
+
if component_name == "nuclio":
|
|
1630
|
+
component_current_version = mlrun.mlconf.nuclio_version
|
|
1631
|
+
parsed_current_version = semver.VersionInfo.parse(
|
|
1632
|
+
mlrun.mlconf.nuclio_version
|
|
1633
|
+
)
|
|
1634
|
+
if not parsed_current_version:
|
|
1635
|
+
return True
|
|
1636
|
+
except ValueError:
|
|
1637
|
+
# only log when version is set but invalid
|
|
1638
|
+
if component_current_version:
|
|
1639
|
+
logger.warning(
|
|
1640
|
+
"Unable to parse current version, assuming compatibility",
|
|
1641
|
+
component_name=component_name,
|
|
1642
|
+
current_version=component_current_version,
|
|
1643
|
+
min_versions=min_versions,
|
|
1644
|
+
)
|
|
1645
|
+
return True
|
|
1646
|
+
|
|
1647
|
+
parsed_min_versions.sort(reverse=True)
|
|
1648
|
+
for parsed_min_version in parsed_min_versions:
|
|
1649
|
+
if parsed_current_version < parsed_min_version:
|
|
1650
|
+
return False
|
|
1651
|
+
return True
|
|
1652
|
+
|
|
1653
|
+
|
|
1654
|
+
def format_alert_summary(
|
|
1655
|
+
alert: mlrun.common.schemas.AlertConfig, event_data: mlrun.common.schemas.Event
|
|
1656
|
+
) -> str:
|
|
1657
|
+
result = alert.summary.replace("{{project}}", alert.project)
|
|
1658
|
+
result = result.replace("{{name}}", alert.name)
|
|
1659
|
+
result = result.replace("{{entity}}", event_data.entity.ids[0])
|
|
1660
|
+
return result
|
|
1661
|
+
|
|
1662
|
+
|
|
1663
|
+
def _reload(module, max_recursion_depth):
|
|
1664
|
+
"""Recursively reload modules."""
|
|
1665
|
+
if max_recursion_depth <= 0:
|
|
1666
|
+
return
|
|
1667
|
+
|
|
1668
|
+
reload(module)
|
|
1669
|
+
for attribute_name in dir(module):
|
|
1670
|
+
attribute = getattr(module, attribute_name)
|
|
1671
|
+
if type(attribute) is ModuleType:
|
|
1672
|
+
_reload(attribute, max_recursion_depth - 1)
|
mlrun/utils/logger.py
CHANGED
|
@@ -13,6 +13,7 @@
|
|
|
13
13
|
# limitations under the License.
|
|
14
14
|
|
|
15
15
|
import logging
|
|
16
|
+
import typing
|
|
16
17
|
from enum import Enum
|
|
17
18
|
from sys import stdout
|
|
18
19
|
from traceback import format_exception
|
|
@@ -92,7 +93,25 @@ class HumanReadableFormatter(_BaseFormatter):
|
|
|
92
93
|
|
|
93
94
|
class HumanReadableExtendedFormatter(HumanReadableFormatter):
|
|
94
95
|
def format(self, record) -> str:
|
|
95
|
-
more =
|
|
96
|
+
more = ""
|
|
97
|
+
record_with = self._record_with(record)
|
|
98
|
+
if record_with:
|
|
99
|
+
|
|
100
|
+
def _format_value(val):
|
|
101
|
+
formatted_val = (
|
|
102
|
+
val
|
|
103
|
+
if isinstance(val, str)
|
|
104
|
+
else str(orjson.loads(self._json_dump(val)))
|
|
105
|
+
)
|
|
106
|
+
return (
|
|
107
|
+
formatted_val.replace("\n", "\n\t\t")
|
|
108
|
+
if len(formatted_val) < 4096
|
|
109
|
+
else repr(formatted_val)
|
|
110
|
+
)
|
|
111
|
+
|
|
112
|
+
more = "\n\t" + "\n\t".join(
|
|
113
|
+
[f"{key}: {_format_value(val)}" for key, val in record_with.items()]
|
|
114
|
+
)
|
|
96
115
|
return (
|
|
97
116
|
"> "
|
|
98
117
|
f"{self.formatTime(record, self.datefmt)} "
|
|
@@ -221,14 +240,27 @@ class FormatterKinds(Enum):
|
|
|
221
240
|
JSON = "json"
|
|
222
241
|
|
|
223
242
|
|
|
224
|
-
def
|
|
243
|
+
def resolve_formatter_by_kind(
|
|
244
|
+
formatter_kind: FormatterKinds,
|
|
245
|
+
) -> type[
|
|
246
|
+
typing.Union[HumanReadableFormatter, HumanReadableExtendedFormatter, JSONFormatter]
|
|
247
|
+
]:
|
|
225
248
|
return {
|
|
226
|
-
FormatterKinds.HUMAN: HumanReadableFormatter
|
|
227
|
-
FormatterKinds.HUMAN_EXTENDED: HumanReadableExtendedFormatter
|
|
228
|
-
FormatterKinds.JSON: JSONFormatter
|
|
249
|
+
FormatterKinds.HUMAN: HumanReadableFormatter,
|
|
250
|
+
FormatterKinds.HUMAN_EXTENDED: HumanReadableExtendedFormatter,
|
|
251
|
+
FormatterKinds.JSON: JSONFormatter,
|
|
229
252
|
}[formatter_kind]
|
|
230
253
|
|
|
231
254
|
|
|
255
|
+
def create_test_logger(name: str = "mlrun", stream: IO[str] = stdout) -> Logger:
|
|
256
|
+
return create_logger(
|
|
257
|
+
level="debug",
|
|
258
|
+
formatter_kind=FormatterKinds.HUMAN_EXTENDED.name,
|
|
259
|
+
name=name,
|
|
260
|
+
stream=stream,
|
|
261
|
+
)
|
|
262
|
+
|
|
263
|
+
|
|
232
264
|
def create_logger(
|
|
233
265
|
level: Optional[str] = None,
|
|
234
266
|
formatter_kind: str = FormatterKinds.HUMAN.name,
|
|
@@ -243,11 +275,11 @@ def create_logger(
|
|
|
243
275
|
logger_instance = Logger(level, name=name, propagate=False)
|
|
244
276
|
|
|
245
277
|
# resolve formatter
|
|
246
|
-
formatter_instance =
|
|
278
|
+
formatter_instance = resolve_formatter_by_kind(
|
|
247
279
|
FormatterKinds(formatter_kind.lower())
|
|
248
280
|
)
|
|
249
281
|
|
|
250
282
|
# set handler
|
|
251
|
-
logger_instance.set_handler("default", stream or stdout, formatter_instance)
|
|
283
|
+
logger_instance.set_handler("default", stream or stdout, formatter_instance())
|
|
252
284
|
|
|
253
285
|
return logger_instance
|
|
@@ -51,14 +51,19 @@ class NotificationTypes(str, enum.Enum):
|
|
|
51
51
|
self.console: [self.ipython],
|
|
52
52
|
}.get(self, [])
|
|
53
53
|
|
|
54
|
+
@classmethod
|
|
55
|
+
def local(cls) -> list[str]:
|
|
56
|
+
return [
|
|
57
|
+
cls.console,
|
|
58
|
+
cls.ipython,
|
|
59
|
+
]
|
|
60
|
+
|
|
54
61
|
@classmethod
|
|
55
62
|
def all(cls) -> list[str]:
|
|
56
|
-
return
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
]
|
|
64
|
-
)
|
|
63
|
+
return [
|
|
64
|
+
cls.console,
|
|
65
|
+
cls.git,
|
|
66
|
+
cls.ipython,
|
|
67
|
+
cls.slack,
|
|
68
|
+
cls.webhook,
|
|
69
|
+
]
|
|
@@ -77,7 +77,7 @@ class NotificationBase:
|
|
|
77
77
|
return f"[{severity}] {message}"
|
|
78
78
|
return (
|
|
79
79
|
f"[{severity}] {message} for project {alert.project} "
|
|
80
|
-
f"UID {event_data.entity.
|
|
80
|
+
f"UID {event_data.entity.ids[0]}. Values {event_data.value_dict}"
|
|
81
81
|
)
|
|
82
82
|
|
|
83
83
|
if not runs:
|
|
@@ -32,6 +32,7 @@ class SlackNotification(NotificationBase):
|
|
|
32
32
|
"completed": ":smiley:",
|
|
33
33
|
"running": ":man-running:",
|
|
34
34
|
"error": ":x:",
|
|
35
|
+
"skipped": ":zzz:",
|
|
35
36
|
}
|
|
36
37
|
|
|
37
38
|
async def push(
|
|
@@ -135,8 +136,16 @@ class SlackNotification(NotificationBase):
|
|
|
135
136
|
line = [
|
|
136
137
|
self._get_slack_row(f":bell: {alert.name} alert has occurred"),
|
|
137
138
|
self._get_slack_row(f"*Project:*\n{alert.project}"),
|
|
138
|
-
self._get_slack_row(f"*
|
|
139
|
+
self._get_slack_row(f"*ID:*\n{event_data.entity.ids[0]}"),
|
|
139
140
|
]
|
|
141
|
+
|
|
142
|
+
if alert.summary:
|
|
143
|
+
line.append(
|
|
144
|
+
self._get_slack_row(
|
|
145
|
+
f"*Summary:*\n{mlrun.utils.helpers.format_alert_summary(alert, event_data)}"
|
|
146
|
+
)
|
|
147
|
+
)
|
|
148
|
+
|
|
140
149
|
if event_data.value_dict:
|
|
141
150
|
data_lines = []
|
|
142
151
|
for key, value in event_data.value_dict.items():
|
|
@@ -144,32 +153,50 @@ class SlackNotification(NotificationBase):
|
|
|
144
153
|
data_text = "\n".join(data_lines)
|
|
145
154
|
line.append(self._get_slack_row(f"*Event data:*\n{data_text}"))
|
|
146
155
|
|
|
147
|
-
if
|
|
148
|
-
|
|
156
|
+
if (
|
|
157
|
+
event_data.entity.kind == mlrun.common.schemas.alert.EventEntityKind.JOB
|
|
158
|
+
): # JOB entity
|
|
159
|
+
uid = event_data.value_dict.get("uid")
|
|
160
|
+
url = mlrun.utils.helpers.get_ui_url(alert.project, uid)
|
|
161
|
+
overview_type = "Job overview"
|
|
162
|
+
else: # MODEL entity
|
|
163
|
+
model_name = event_data.value_dict.get("model")
|
|
164
|
+
model_endpoint_id = event_data.value_dict.get("model_endpoint_id")
|
|
165
|
+
url = mlrun.utils.helpers.get_model_endpoint_url(
|
|
166
|
+
alert.project, model_name, model_endpoint_id
|
|
167
|
+
)
|
|
168
|
+
overview_type = "Model endpoint"
|
|
169
|
+
|
|
170
|
+
line.append(self._get_slack_row(f"*Overview:*\n<{url}|*{overview_type}*>"))
|
|
149
171
|
|
|
150
172
|
return line
|
|
151
173
|
|
|
152
174
|
def _get_run_line(self, run: dict) -> dict:
|
|
153
175
|
meta = run["metadata"]
|
|
154
176
|
url = mlrun.utils.helpers.get_ui_url(meta.get("project"), meta.get("uid"))
|
|
155
|
-
|
|
177
|
+
|
|
178
|
+
# Only show the URL if the run is not a function (serving or mlrun function)
|
|
179
|
+
kind = run.get("step_kind")
|
|
180
|
+
state = run["status"].get("state", "")
|
|
181
|
+
if state != "skipped" and (url and not kind or kind == "run"):
|
|
156
182
|
line = f'<{url}|*{meta.get("name")}*>'
|
|
157
183
|
else:
|
|
158
184
|
line = meta.get("name")
|
|
159
|
-
|
|
185
|
+
if kind:
|
|
186
|
+
line = f'{line} *({run.get("step_kind", run.get("kind", ""))})*'
|
|
160
187
|
line = f'{self.emojis.get(state, ":question:")} {line}'
|
|
161
188
|
return self._get_slack_row(line)
|
|
162
189
|
|
|
163
190
|
def _get_run_result(self, run: dict) -> dict:
|
|
164
191
|
state = run["status"].get("state", "")
|
|
165
192
|
if state == "error":
|
|
166
|
-
error_status = run["status"].get("error", "")
|
|
193
|
+
error_status = run["status"].get("error", "") or state
|
|
167
194
|
result = f"*{error_status}*"
|
|
168
195
|
else:
|
|
169
196
|
result = mlrun.utils.helpers.dict_to_str(
|
|
170
197
|
run["status"].get("results", {}), ", "
|
|
171
198
|
)
|
|
172
|
-
return self._get_slack_row(result or
|
|
199
|
+
return self._get_slack_row(result or state)
|
|
173
200
|
|
|
174
201
|
@staticmethod
|
|
175
202
|
def _get_slack_row(text: str) -> dict:
|
|
@@ -57,7 +57,7 @@ class WebhookNotification(NotificationBase):
|
|
|
57
57
|
request_body["alert"] = alert.dict()
|
|
58
58
|
if event_data:
|
|
59
59
|
request_body["value"] = event_data.value_dict
|
|
60
|
-
request_body["id"] = event_data.entity.
|
|
60
|
+
request_body["id"] = event_data.entity.ids[0]
|
|
61
61
|
|
|
62
62
|
if custom_html:
|
|
63
63
|
request_body["custom_html"] = custom_html
|