mlrun 1.7.0rc14__py3-none-any.whl → 1.7.0rc22__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mlrun might be problematic. Click here for more details.

Files changed (160) hide show
  1. mlrun/__init__.py +10 -1
  2. mlrun/__main__.py +23 -111
  3. mlrun/alerts/__init__.py +15 -0
  4. mlrun/alerts/alert.py +169 -0
  5. mlrun/api/schemas/__init__.py +4 -3
  6. mlrun/artifacts/__init__.py +8 -3
  7. mlrun/artifacts/base.py +36 -253
  8. mlrun/artifacts/dataset.py +9 -190
  9. mlrun/artifacts/manager.py +46 -42
  10. mlrun/artifacts/model.py +9 -141
  11. mlrun/artifacts/plots.py +14 -375
  12. mlrun/common/constants.py +65 -3
  13. mlrun/common/formatters/__init__.py +19 -0
  14. mlrun/{runtimes/mpijob/v1alpha1.py → common/formatters/artifact.py} +6 -14
  15. mlrun/common/formatters/base.py +113 -0
  16. mlrun/common/formatters/function.py +46 -0
  17. mlrun/common/formatters/pipeline.py +53 -0
  18. mlrun/common/formatters/project.py +51 -0
  19. mlrun/{runtimes → common/runtimes}/constants.py +32 -4
  20. mlrun/common/schemas/__init__.py +10 -5
  21. mlrun/common/schemas/alert.py +92 -11
  22. mlrun/common/schemas/api_gateway.py +56 -0
  23. mlrun/common/schemas/artifact.py +15 -5
  24. mlrun/common/schemas/auth.py +2 -0
  25. mlrun/common/schemas/client_spec.py +1 -0
  26. mlrun/common/schemas/frontend_spec.py +1 -0
  27. mlrun/common/schemas/function.py +4 -0
  28. mlrun/common/schemas/model_monitoring/__init__.py +15 -3
  29. mlrun/common/schemas/model_monitoring/constants.py +58 -7
  30. mlrun/common/schemas/model_monitoring/grafana.py +9 -5
  31. mlrun/common/schemas/model_monitoring/model_endpoints.py +86 -2
  32. mlrun/common/schemas/pipeline.py +0 -9
  33. mlrun/common/schemas/project.py +5 -11
  34. mlrun/common/types.py +1 -0
  35. mlrun/config.py +30 -9
  36. mlrun/data_types/to_pandas.py +9 -9
  37. mlrun/datastore/base.py +41 -9
  38. mlrun/datastore/datastore.py +6 -2
  39. mlrun/datastore/datastore_profile.py +56 -4
  40. mlrun/datastore/inmem.py +2 -2
  41. mlrun/datastore/redis.py +2 -2
  42. mlrun/datastore/s3.py +5 -0
  43. mlrun/datastore/sources.py +147 -7
  44. mlrun/datastore/store_resources.py +7 -7
  45. mlrun/datastore/targets.py +110 -42
  46. mlrun/datastore/utils.py +42 -0
  47. mlrun/db/base.py +54 -10
  48. mlrun/db/httpdb.py +282 -79
  49. mlrun/db/nopdb.py +52 -10
  50. mlrun/errors.py +11 -0
  51. mlrun/execution.py +26 -9
  52. mlrun/feature_store/__init__.py +0 -2
  53. mlrun/feature_store/api.py +12 -47
  54. mlrun/feature_store/feature_set.py +9 -0
  55. mlrun/feature_store/feature_vector.py +8 -0
  56. mlrun/feature_store/ingestion.py +7 -6
  57. mlrun/feature_store/retrieval/base.py +9 -4
  58. mlrun/feature_store/retrieval/conversion.py +9 -9
  59. mlrun/feature_store/retrieval/dask_merger.py +2 -0
  60. mlrun/feature_store/retrieval/job.py +9 -3
  61. mlrun/feature_store/retrieval/local_merger.py +2 -0
  62. mlrun/feature_store/retrieval/spark_merger.py +16 -0
  63. mlrun/frameworks/__init__.py +6 -0
  64. mlrun/frameworks/_dl_common/loggers/tensorboard_logger.py +7 -12
  65. mlrun/frameworks/parallel_coordinates.py +2 -1
  66. mlrun/frameworks/tf_keras/__init__.py +4 -1
  67. mlrun/k8s_utils.py +10 -11
  68. mlrun/launcher/base.py +4 -3
  69. mlrun/launcher/client.py +5 -3
  70. mlrun/launcher/local.py +12 -2
  71. mlrun/launcher/remote.py +9 -2
  72. mlrun/lists.py +6 -2
  73. mlrun/model.py +47 -21
  74. mlrun/model_monitoring/__init__.py +1 -1
  75. mlrun/model_monitoring/api.py +42 -18
  76. mlrun/model_monitoring/application.py +5 -305
  77. mlrun/model_monitoring/applications/__init__.py +11 -0
  78. mlrun/model_monitoring/applications/_application_steps.py +157 -0
  79. mlrun/model_monitoring/applications/base.py +280 -0
  80. mlrun/model_monitoring/applications/context.py +214 -0
  81. mlrun/model_monitoring/applications/evidently_base.py +211 -0
  82. mlrun/model_monitoring/applications/histogram_data_drift.py +132 -91
  83. mlrun/model_monitoring/applications/results.py +99 -0
  84. mlrun/model_monitoring/controller.py +3 -1
  85. mlrun/model_monitoring/db/__init__.py +2 -0
  86. mlrun/model_monitoring/db/stores/__init__.py +0 -2
  87. mlrun/model_monitoring/db/stores/base/store.py +22 -37
  88. mlrun/model_monitoring/db/stores/sqldb/models/__init__.py +43 -21
  89. mlrun/model_monitoring/db/stores/sqldb/models/base.py +39 -8
  90. mlrun/model_monitoring/db/stores/sqldb/models/mysql.py +27 -7
  91. mlrun/model_monitoring/db/stores/sqldb/models/sqlite.py +5 -0
  92. mlrun/model_monitoring/db/stores/sqldb/sql_store.py +246 -224
  93. mlrun/model_monitoring/db/stores/v3io_kv/kv_store.py +232 -216
  94. mlrun/model_monitoring/db/tsdb/__init__.py +100 -0
  95. mlrun/model_monitoring/db/tsdb/base.py +316 -0
  96. mlrun/model_monitoring/db/tsdb/helpers.py +30 -0
  97. mlrun/model_monitoring/db/tsdb/tdengine/__init__.py +15 -0
  98. mlrun/model_monitoring/db/tsdb/tdengine/schemas.py +240 -0
  99. mlrun/model_monitoring/db/tsdb/tdengine/stream_graph_steps.py +45 -0
  100. mlrun/model_monitoring/db/tsdb/tdengine/tdengine_connector.py +401 -0
  101. mlrun/model_monitoring/db/tsdb/v3io/__init__.py +15 -0
  102. mlrun/model_monitoring/db/tsdb/v3io/stream_graph_steps.py +117 -0
  103. mlrun/model_monitoring/db/tsdb/v3io/v3io_connector.py +658 -0
  104. mlrun/model_monitoring/evidently_application.py +6 -118
  105. mlrun/model_monitoring/helpers.py +63 -1
  106. mlrun/model_monitoring/model_endpoint.py +3 -2
  107. mlrun/model_monitoring/stream_processing.py +57 -216
  108. mlrun/model_monitoring/writer.py +134 -124
  109. mlrun/package/__init__.py +13 -1
  110. mlrun/package/packagers/__init__.py +6 -1
  111. mlrun/package/utils/_formatter.py +2 -2
  112. mlrun/platforms/__init__.py +10 -9
  113. mlrun/platforms/iguazio.py +21 -202
  114. mlrun/projects/operations.py +24 -12
  115. mlrun/projects/pipelines.py +79 -102
  116. mlrun/projects/project.py +271 -103
  117. mlrun/render.py +15 -14
  118. mlrun/run.py +16 -46
  119. mlrun/runtimes/__init__.py +6 -3
  120. mlrun/runtimes/base.py +14 -7
  121. mlrun/runtimes/daskjob.py +1 -0
  122. mlrun/runtimes/databricks_job/databricks_runtime.py +1 -0
  123. mlrun/runtimes/databricks_job/databricks_wrapper.py +1 -1
  124. mlrun/runtimes/funcdoc.py +0 -28
  125. mlrun/runtimes/kubejob.py +2 -1
  126. mlrun/runtimes/local.py +12 -3
  127. mlrun/runtimes/mpijob/__init__.py +0 -20
  128. mlrun/runtimes/mpijob/v1.py +1 -1
  129. mlrun/runtimes/nuclio/api_gateway.py +194 -84
  130. mlrun/runtimes/nuclio/application/application.py +170 -8
  131. mlrun/runtimes/nuclio/function.py +39 -49
  132. mlrun/runtimes/pod.py +16 -36
  133. mlrun/runtimes/remotesparkjob.py +9 -3
  134. mlrun/runtimes/sparkjob/spark3job.py +1 -1
  135. mlrun/runtimes/utils.py +6 -45
  136. mlrun/serving/__init__.py +8 -1
  137. mlrun/serving/server.py +2 -1
  138. mlrun/serving/states.py +51 -8
  139. mlrun/serving/utils.py +19 -11
  140. mlrun/serving/v2_serving.py +5 -1
  141. mlrun/track/tracker.py +2 -1
  142. mlrun/utils/async_http.py +25 -5
  143. mlrun/utils/helpers.py +157 -83
  144. mlrun/utils/logger.py +39 -7
  145. mlrun/utils/notifications/notification/__init__.py +14 -9
  146. mlrun/utils/notifications/notification/base.py +1 -1
  147. mlrun/utils/notifications/notification/slack.py +34 -7
  148. mlrun/utils/notifications/notification/webhook.py +1 -1
  149. mlrun/utils/notifications/notification_pusher.py +147 -16
  150. mlrun/utils/regex.py +9 -0
  151. mlrun/utils/v3io_clients.py +0 -1
  152. mlrun/utils/version/version.json +2 -2
  153. {mlrun-1.7.0rc14.dist-info → mlrun-1.7.0rc22.dist-info}/METADATA +14 -6
  154. {mlrun-1.7.0rc14.dist-info → mlrun-1.7.0rc22.dist-info}/RECORD +158 -138
  155. mlrun/kfpops.py +0 -865
  156. mlrun/platforms/other.py +0 -305
  157. {mlrun-1.7.0rc14.dist-info → mlrun-1.7.0rc22.dist-info}/LICENSE +0 -0
  158. {mlrun-1.7.0rc14.dist-info → mlrun-1.7.0rc22.dist-info}/WHEEL +0 -0
  159. {mlrun-1.7.0rc14.dist-info → mlrun-1.7.0rc22.dist-info}/entry_points.txt +0 -0
  160. {mlrun-1.7.0rc14.dist-info → mlrun-1.7.0rc22.dist-info}/top_level.txt +0 -0
mlrun/utils/helpers.py CHANGED
@@ -26,7 +26,7 @@ import sys
26
26
  import typing
27
27
  import warnings
28
28
  from datetime import datetime, timezone
29
- from importlib import import_module
29
+ from importlib import import_module, reload
30
30
  from os import path
31
31
  from types import ModuleType
32
32
  from typing import Any, Optional
@@ -39,7 +39,7 @@ import pandas
39
39
  import semver
40
40
  import yaml
41
41
  from dateutil import parser
42
- from deprecated import deprecated
42
+ from mlrun_pipelines.models import PipelineRun
43
43
  from pandas._libs.tslibs.timestamps import Timedelta, Timestamp
44
44
  from yaml.representer import RepresenterError
45
45
 
@@ -76,19 +76,6 @@ class OverwriteBuildParamsWarning(FutureWarning):
76
76
  pass
77
77
 
78
78
 
79
- # TODO: remove in 1.7.0
80
- @deprecated(
81
- version="1.5.0",
82
- reason="'parse_versioned_object_uri' will be removed from this file in 1.7.0, use "
83
- "'mlrun.common.helpers.parse_versioned_object_uri' instead",
84
- category=FutureWarning,
85
- )
86
- def parse_versioned_object_uri(uri: str, default_project: str = ""):
87
- return mlrun.common.helpers.parse_versioned_object_uri(
88
- uri=uri, default_project=default_project
89
- )
90
-
91
-
92
79
  class StorePrefix:
93
80
  """map mlrun store objects to prefixes"""
94
81
 
@@ -119,14 +106,9 @@ class StorePrefix:
119
106
 
120
107
 
121
108
  def get_artifact_target(item: dict, project=None):
122
- if is_legacy_artifact(item):
123
- db_key = item.get("db_key")
124
- project_str = project or item.get("project")
125
- tree = item.get("tree")
126
- else:
127
- db_key = item["spec"].get("db_key")
128
- project_str = project or item["metadata"].get("project")
129
- tree = item["metadata"].get("tree")
109
+ db_key = item["spec"].get("db_key")
110
+ project_str = project or item["metadata"].get("project")
111
+ tree = item["metadata"].get("tree")
130
112
 
131
113
  kind = item.get("kind")
132
114
  if kind in ["dataset", "model", "artifact"] and db_key:
@@ -135,11 +117,15 @@ def get_artifact_target(item: dict, project=None):
135
117
  target = f"{target}@{tree}"
136
118
  return target
137
119
 
138
- return (
139
- item.get("target_path")
140
- if is_legacy_artifact(item)
141
- else item["spec"].get("target_path")
142
- )
120
+ return item["spec"].get("target_path")
121
+
122
+
123
+ # TODO: left for migrations testing purposes. Remove in 1.8.0.
124
+ def is_legacy_artifact(artifact):
125
+ if isinstance(artifact, dict):
126
+ return "metadata" not in artifact
127
+ else:
128
+ return not hasattr(artifact, "metadata")
143
129
 
144
130
 
145
131
  logger = create_logger(config.log_level, config.log_formatter, "mlrun", sys.stdout)
@@ -195,8 +181,12 @@ def verify_field_regex(
195
181
  )
196
182
  if mode == mlrun.common.schemas.RegexMatchModes.all:
197
183
  if raise_on_failure:
184
+ if len(field_name) > max_chars:
185
+ field_name = field_name[:max_chars] + "...truncated"
186
+ if len(field_value) > max_chars:
187
+ field_value = field_value[:max_chars] + "...truncated"
198
188
  raise mlrun.errors.MLRunInvalidArgumentError(
199
- f"Field '{field_name[:max_chars]}' is malformed. '{field_value[:max_chars]}' "
189
+ f"Field '{field_name}' is malformed. '{field_value}' "
200
190
  f"does not match required pattern: {pattern}"
201
191
  )
202
192
  return False
@@ -669,7 +659,7 @@ def parse_artifact_uri(uri, default_project=""):
669
659
  [3] = tag
670
660
  [4] = tree
671
661
  """
672
- uri_pattern = r"^((?P<project>.*)/)?(?P<key>.*?)(\#(?P<iteration>.*?))?(:(?P<tag>.*?))?(@(?P<tree>.*))?$"
662
+ uri_pattern = mlrun.utils.regex.artifact_uri_pattern
673
663
  match = re.match(uri_pattern, uri)
674
664
  if not match:
675
665
  raise ValueError(
@@ -801,34 +791,6 @@ def gen_html_table(header, rows=None):
801
791
  return style + '<table class="tg">\n' + out + "</table>\n\n"
802
792
 
803
793
 
804
- def new_pipe_metadata(
805
- artifact_path: str = None,
806
- cleanup_ttl: int = None,
807
- op_transformers: list[typing.Callable] = None,
808
- ):
809
- from kfp.dsl import PipelineConf
810
-
811
- def _set_artifact_path(task):
812
- from kubernetes import client as k8s_client
813
-
814
- task.add_env_variable(
815
- k8s_client.V1EnvVar(name="MLRUN_ARTIFACT_PATH", value=artifact_path)
816
- )
817
- return task
818
-
819
- conf = PipelineConf()
820
- cleanup_ttl = cleanup_ttl or int(config.kfp_ttl)
821
-
822
- if cleanup_ttl:
823
- conf.set_ttl_seconds_after_finished(cleanup_ttl)
824
- if artifact_path:
825
- conf.add_op_transformer(_set_artifact_path)
826
- if op_transformers:
827
- for op_transformer in op_transformers:
828
- conf.add_op_transformer(op_transformer)
829
- return conf
830
-
831
-
832
794
  def _convert_python_package_version_to_image_tag(version: typing.Optional[str]):
833
795
  return (
834
796
  version.replace("+", "-").replace("0.0.0-", "") if version is not None else None
@@ -1015,17 +977,27 @@ def get_ui_url(project, uid=None):
1015
977
  return url
1016
978
 
1017
979
 
980
+ def get_model_endpoint_url(project, model_name, model_endpoint_id):
981
+ url = ""
982
+ if mlrun.mlconf.resolve_ui_url():
983
+ url = f"{mlrun.mlconf.resolve_ui_url()}/{mlrun.mlconf.ui.projects_prefix}/{project}/models"
984
+ if model_name:
985
+ url += f"/model-endpoints/{model_name}/{model_endpoint_id}/overview"
986
+ return url
987
+
988
+
1018
989
  def get_workflow_url(project, id=None):
1019
990
  url = ""
1020
991
  if mlrun.mlconf.resolve_ui_url():
1021
- url = "{}/{}/{}/jobs/monitor-workflows/workflow/{}".format(
1022
- mlrun.mlconf.resolve_ui_url(), mlrun.mlconf.ui.projects_prefix, project, id
992
+ url = (
993
+ f"{mlrun.mlconf.resolve_ui_url()}/{mlrun.mlconf.ui.projects_prefix}"
994
+ f"/{project}/jobs/monitor-workflows/workflow/{id}"
1023
995
  )
1024
996
  return url
1025
997
 
1026
998
 
1027
999
  def are_strings_in_exception_chain_messages(
1028
- exception: Exception, strings_list=list[str]
1000
+ exception: Exception, strings_list: list[str]
1029
1001
  ) -> bool:
1030
1002
  while exception is not None:
1031
1003
  if any([string in str(exception) for string in strings_list]):
@@ -1047,16 +1019,35 @@ def create_class(pkg_class: str):
1047
1019
  return class_
1048
1020
 
1049
1021
 
1050
- def create_function(pkg_func: str):
1022
+ def create_function(pkg_func: str, reload_modules: bool = False):
1051
1023
  """Create a function from a package.module.function string
1052
1024
 
1053
1025
  :param pkg_func: full function location,
1054
1026
  e.g. "sklearn.feature_selection.f_classif"
1027
+ :param reload_modules: reload the function again.
1055
1028
  """
1056
1029
  splits = pkg_func.split(".")
1057
1030
  pkg_module = ".".join(splits[:-1])
1058
1031
  cb_fname = splits[-1]
1059
1032
  pkg_module = __import__(pkg_module, fromlist=[cb_fname])
1033
+
1034
+ if reload_modules:
1035
+ # Even though the function appears in the modules list, we need to reload
1036
+ # the code again because it may have changed
1037
+ try:
1038
+ logger.debug("Reloading module", module=pkg_func)
1039
+ _reload(
1040
+ pkg_module,
1041
+ max_recursion_depth=mlrun.mlconf.function.spec.reload_max_recursion_depth,
1042
+ )
1043
+ except Exception as exc:
1044
+ logger.warning(
1045
+ "Failed to reload module. Not all associated modules can be reloaded, import them manually."
1046
+ "Or, with Jupyter, restart the Python kernel.",
1047
+ module=pkg_func,
1048
+ err=mlrun.errors.err_to_str(exc),
1049
+ )
1050
+
1060
1051
  function_ = getattr(pkg_module, cb_fname)
1061
1052
  return function_
1062
1053
 
@@ -1114,8 +1105,14 @@ def get_class(class_name, namespace=None):
1114
1105
  return class_object
1115
1106
 
1116
1107
 
1117
- def get_function(function, namespace):
1118
- """return function callable object from function name string"""
1108
+ def get_function(function, namespaces, reload_modules: bool = False):
1109
+ """Return function callable object from function name string
1110
+
1111
+ :param function: path to the function ([class_name::]function)
1112
+ :param namespaces: one or list of namespaces/modules to search the function in
1113
+ :param reload_modules: reload the function again
1114
+ :return: function handler (callable)
1115
+ """
1119
1116
  if callable(function):
1120
1117
  return function
1121
1118
 
@@ -1124,12 +1121,12 @@ def get_function(function, namespace):
1124
1121
  if not function.endswith(")"):
1125
1122
  raise ValueError('function expression must start with "(" and end with ")"')
1126
1123
  return eval("lambda event: " + function[1:-1], {}, {})
1127
- function_object = _search_in_namespaces(function, namespace)
1124
+ function_object = _search_in_namespaces(function, namespaces)
1128
1125
  if function_object is not None:
1129
1126
  return function_object
1130
1127
 
1131
1128
  try:
1132
- function_object = create_function(function)
1129
+ function_object = create_function(function, reload_modules)
1133
1130
  except (ImportError, ValueError) as exc:
1134
1131
  raise ImportError(
1135
1132
  f"state/function init failed, handler '{function}' not found"
@@ -1138,18 +1135,24 @@ def get_function(function, namespace):
1138
1135
 
1139
1136
 
1140
1137
  def get_handler_extended(
1141
- handler_path: str, context=None, class_args: dict = {}, namespaces=None
1138
+ handler_path: str,
1139
+ context=None,
1140
+ class_args: dict = None,
1141
+ namespaces=None,
1142
+ reload_modules: bool = False,
1142
1143
  ):
1143
- """get function handler from [class_name::]handler string
1144
+ """Get function handler from [class_name::]handler string
1144
1145
 
1145
1146
  :param handler_path: path to the function ([class_name::]handler)
1146
1147
  :param context: MLRun function/job client context
1147
1148
  :param class_args: optional dict of class init kwargs
1148
1149
  :param namespaces: one or list of namespaces/modules to search the handler in
1150
+ :param reload_modules: reload the function again
1149
1151
  :return: function handler (callable)
1150
1152
  """
1153
+ class_args = class_args or {}
1151
1154
  if "::" not in handler_path:
1152
- return get_function(handler_path, namespaces)
1155
+ return get_function(handler_path, namespaces, reload_modules)
1153
1156
 
1154
1157
  splitted = handler_path.split("::")
1155
1158
  class_path = splitted[0].strip()
@@ -1224,7 +1227,7 @@ def calculate_dataframe_hash(dataframe: pandas.DataFrame):
1224
1227
  return hashlib.sha1(pandas.util.hash_pandas_object(dataframe).values).hexdigest()
1225
1228
 
1226
1229
 
1227
- def template_artifact_path(artifact_path, project, run_uid="project"):
1230
+ def template_artifact_path(artifact_path, project, run_uid=None):
1228
1231
  """
1229
1232
  Replace {{run.uid}} with the run uid and {{project}} with the project name in the artifact path.
1230
1233
  If no run uid is provided, the word `project` will be used instead as it is assumed to be a project
@@ -1232,6 +1235,7 @@ def template_artifact_path(artifact_path, project, run_uid="project"):
1232
1235
  """
1233
1236
  if not artifact_path:
1234
1237
  return artifact_path
1238
+ run_uid = run_uid or "project"
1235
1239
  artifact_path = artifact_path.replace("{{run.uid}}", run_uid)
1236
1240
  artifact_path = _fill_project_path_template(artifact_path, project)
1237
1241
  return artifact_path
@@ -1291,13 +1295,6 @@ def str_to_timestamp(time_str: str, now_time: Timestamp = None):
1291
1295
  return Timestamp(time_str)
1292
1296
 
1293
1297
 
1294
- def is_legacy_artifact(artifact):
1295
- if isinstance(artifact, dict):
1296
- return "metadata" not in artifact
1297
- else:
1298
- return not hasattr(artifact, "metadata")
1299
-
1300
-
1301
1298
  def is_link_artifact(artifact):
1302
1299
  if isinstance(artifact, dict):
1303
1300
  return (
@@ -1307,7 +1304,7 @@ def is_link_artifact(artifact):
1307
1304
  return artifact.kind == mlrun.common.schemas.ArtifactCategories.link.value
1308
1305
 
1309
1306
 
1310
- def format_run(run: dict, with_project=False) -> dict:
1307
+ def format_run(run: PipelineRun, with_project=False) -> dict:
1311
1308
  fields = [
1312
1309
  "id",
1313
1310
  "name",
@@ -1344,17 +1341,17 @@ def format_run(run: dict, with_project=False) -> dict:
1344
1341
  # pipelines are yet to populate the status or workflow has failed
1345
1342
  # as observed https://jira.iguazeng.com/browse/ML-5195
1346
1343
  # set to unknown to ensure a status is returned
1347
- if run["status"] is None:
1348
- run["status"] = inflection.titleize(mlrun.runtimes.constants.RunStates.unknown)
1344
+ if run.get("status", None) is None:
1345
+ run["status"] = inflection.titleize(
1346
+ mlrun.common.runtimes.constants.RunStates.unknown
1347
+ )
1349
1348
 
1350
1349
  return run
1351
1350
 
1352
1351
 
1353
1352
  def get_in_artifact(artifact: dict, key, default=None, raise_on_missing=False):
1354
1353
  """artifact can be dict or Artifact object"""
1355
- if is_legacy_artifact(artifact):
1356
- return artifact.get(key, default)
1357
- elif key == "kind":
1354
+ if key == "kind":
1358
1355
  return artifact.get(key, default)
1359
1356
  else:
1360
1357
  for block in ["metadata", "spec", "status"]:
@@ -1596,3 +1593,80 @@ def get_serving_spec():
1596
1593
  )
1597
1594
  spec = json.loads(data)
1598
1595
  return spec
1596
+
1597
+
1598
+ def additional_filters_warning(additional_filters, class_name):
1599
+ if additional_filters and any(additional_filters):
1600
+ mlrun.utils.logger.warn(
1601
+ f"additional_filters parameter is not supported in {class_name},"
1602
+ f" parameter has been ignored."
1603
+ )
1604
+
1605
+
1606
+ def validate_component_version_compatibility(
1607
+ component_name: typing.Literal["iguazio", "nuclio"], *min_versions: str
1608
+ ):
1609
+ """
1610
+ :param component_name: Name of the component to validate compatibility for.
1611
+ :param min_versions: Valid minimum version(s) required, assuming no 2 versions has equal major and minor.
1612
+ """
1613
+ parsed_min_versions = [
1614
+ semver.VersionInfo.parse(min_version) for min_version in min_versions
1615
+ ]
1616
+ parsed_current_version = None
1617
+ component_current_version = None
1618
+ try:
1619
+ if component_name == "iguazio":
1620
+ component_current_version = mlrun.mlconf.igz_version
1621
+ parsed_current_version = mlrun.mlconf.get_parsed_igz_version()
1622
+
1623
+ if parsed_current_version:
1624
+ # ignore pre-release and build metadata, as iguazio version always has them, and we only care about the
1625
+ # major, minor, and patch versions
1626
+ parsed_current_version = semver.VersionInfo.parse(
1627
+ f"{parsed_current_version.major}.{parsed_current_version.minor}.{parsed_current_version.patch}"
1628
+ )
1629
+ if component_name == "nuclio":
1630
+ component_current_version = mlrun.mlconf.nuclio_version
1631
+ parsed_current_version = semver.VersionInfo.parse(
1632
+ mlrun.mlconf.nuclio_version
1633
+ )
1634
+ if not parsed_current_version:
1635
+ return True
1636
+ except ValueError:
1637
+ # only log when version is set but invalid
1638
+ if component_current_version:
1639
+ logger.warning(
1640
+ "Unable to parse current version, assuming compatibility",
1641
+ component_name=component_name,
1642
+ current_version=component_current_version,
1643
+ min_versions=min_versions,
1644
+ )
1645
+ return True
1646
+
1647
+ parsed_min_versions.sort(reverse=True)
1648
+ for parsed_min_version in parsed_min_versions:
1649
+ if parsed_current_version < parsed_min_version:
1650
+ return False
1651
+ return True
1652
+
1653
+
1654
+ def format_alert_summary(
1655
+ alert: mlrun.common.schemas.AlertConfig, event_data: mlrun.common.schemas.Event
1656
+ ) -> str:
1657
+ result = alert.summary.replace("{{project}}", alert.project)
1658
+ result = result.replace("{{name}}", alert.name)
1659
+ result = result.replace("{{entity}}", event_data.entity.ids[0])
1660
+ return result
1661
+
1662
+
1663
+ def _reload(module, max_recursion_depth):
1664
+ """Recursively reload modules."""
1665
+ if max_recursion_depth <= 0:
1666
+ return
1667
+
1668
+ reload(module)
1669
+ for attribute_name in dir(module):
1670
+ attribute = getattr(module, attribute_name)
1671
+ if type(attribute) is ModuleType:
1672
+ _reload(attribute, max_recursion_depth - 1)
mlrun/utils/logger.py CHANGED
@@ -13,6 +13,7 @@
13
13
  # limitations under the License.
14
14
 
15
15
  import logging
16
+ import typing
16
17
  from enum import Enum
17
18
  from sys import stdout
18
19
  from traceback import format_exception
@@ -92,7 +93,25 @@ class HumanReadableFormatter(_BaseFormatter):
92
93
 
93
94
  class HumanReadableExtendedFormatter(HumanReadableFormatter):
94
95
  def format(self, record) -> str:
95
- more = self._resolve_more(record)
96
+ more = ""
97
+ record_with = self._record_with(record)
98
+ if record_with:
99
+
100
+ def _format_value(val):
101
+ formatted_val = (
102
+ val
103
+ if isinstance(val, str)
104
+ else str(orjson.loads(self._json_dump(val)))
105
+ )
106
+ return (
107
+ formatted_val.replace("\n", "\n\t\t")
108
+ if len(formatted_val) < 4096
109
+ else repr(formatted_val)
110
+ )
111
+
112
+ more = "\n\t" + "\n\t".join(
113
+ [f"{key}: {_format_value(val)}" for key, val in record_with.items()]
114
+ )
96
115
  return (
97
116
  "> "
98
117
  f"{self.formatTime(record, self.datefmt)} "
@@ -221,14 +240,27 @@ class FormatterKinds(Enum):
221
240
  JSON = "json"
222
241
 
223
242
 
224
- def create_formatter_instance(formatter_kind: FormatterKinds) -> logging.Formatter:
243
+ def resolve_formatter_by_kind(
244
+ formatter_kind: FormatterKinds,
245
+ ) -> type[
246
+ typing.Union[HumanReadableFormatter, HumanReadableExtendedFormatter, JSONFormatter]
247
+ ]:
225
248
  return {
226
- FormatterKinds.HUMAN: HumanReadableFormatter(),
227
- FormatterKinds.HUMAN_EXTENDED: HumanReadableExtendedFormatter(),
228
- FormatterKinds.JSON: JSONFormatter(),
249
+ FormatterKinds.HUMAN: HumanReadableFormatter,
250
+ FormatterKinds.HUMAN_EXTENDED: HumanReadableExtendedFormatter,
251
+ FormatterKinds.JSON: JSONFormatter,
229
252
  }[formatter_kind]
230
253
 
231
254
 
255
+ def create_test_logger(name: str = "mlrun", stream: IO[str] = stdout) -> Logger:
256
+ return create_logger(
257
+ level="debug",
258
+ formatter_kind=FormatterKinds.HUMAN_EXTENDED.name,
259
+ name=name,
260
+ stream=stream,
261
+ )
262
+
263
+
232
264
  def create_logger(
233
265
  level: Optional[str] = None,
234
266
  formatter_kind: str = FormatterKinds.HUMAN.name,
@@ -243,11 +275,11 @@ def create_logger(
243
275
  logger_instance = Logger(level, name=name, propagate=False)
244
276
 
245
277
  # resolve formatter
246
- formatter_instance = create_formatter_instance(
278
+ formatter_instance = resolve_formatter_by_kind(
247
279
  FormatterKinds(formatter_kind.lower())
248
280
  )
249
281
 
250
282
  # set handler
251
- logger_instance.set_handler("default", stream or stdout, formatter_instance)
283
+ logger_instance.set_handler("default", stream or stdout, formatter_instance())
252
284
 
253
285
  return logger_instance
@@ -51,14 +51,19 @@ class NotificationTypes(str, enum.Enum):
51
51
  self.console: [self.ipython],
52
52
  }.get(self, [])
53
53
 
54
+ @classmethod
55
+ def local(cls) -> list[str]:
56
+ return [
57
+ cls.console,
58
+ cls.ipython,
59
+ ]
60
+
54
61
  @classmethod
55
62
  def all(cls) -> list[str]:
56
- return list(
57
- [
58
- cls.console,
59
- cls.git,
60
- cls.ipython,
61
- cls.slack,
62
- cls.webhook,
63
- ]
64
- )
63
+ return [
64
+ cls.console,
65
+ cls.git,
66
+ cls.ipython,
67
+ cls.slack,
68
+ cls.webhook,
69
+ ]
@@ -77,7 +77,7 @@ class NotificationBase:
77
77
  return f"[{severity}] {message}"
78
78
  return (
79
79
  f"[{severity}] {message} for project {alert.project} "
80
- f"UID {event_data.entity.id}. Value {event_data.value}"
80
+ f"UID {event_data.entity.ids[0]}. Values {event_data.value_dict}"
81
81
  )
82
82
 
83
83
  if not runs:
@@ -32,6 +32,7 @@ class SlackNotification(NotificationBase):
32
32
  "completed": ":smiley:",
33
33
  "running": ":man-running:",
34
34
  "error": ":x:",
35
+ "skipped": ":zzz:",
35
36
  }
36
37
 
37
38
  async def push(
@@ -135,8 +136,16 @@ class SlackNotification(NotificationBase):
135
136
  line = [
136
137
  self._get_slack_row(f":bell: {alert.name} alert has occurred"),
137
138
  self._get_slack_row(f"*Project:*\n{alert.project}"),
138
- self._get_slack_row(f"*UID:*\n{event_data.entity.id}"),
139
+ self._get_slack_row(f"*ID:*\n{event_data.entity.ids[0]}"),
139
140
  ]
141
+
142
+ if alert.summary:
143
+ line.append(
144
+ self._get_slack_row(
145
+ f"*Summary:*\n{mlrun.utils.helpers.format_alert_summary(alert, event_data)}"
146
+ )
147
+ )
148
+
140
149
  if event_data.value_dict:
141
150
  data_lines = []
142
151
  for key, value in event_data.value_dict.items():
@@ -144,32 +153,50 @@ class SlackNotification(NotificationBase):
144
153
  data_text = "\n".join(data_lines)
145
154
  line.append(self._get_slack_row(f"*Event data:*\n{data_text}"))
146
155
 
147
- if url := mlrun.utils.helpers.get_ui_url(alert.project, event_data.entity.id):
148
- line.append(self._get_slack_row(f"*Overview:*\n<{url}|*Job overview*>"))
156
+ if (
157
+ event_data.entity.kind == mlrun.common.schemas.alert.EventEntityKind.JOB
158
+ ): # JOB entity
159
+ uid = event_data.value_dict.get("uid")
160
+ url = mlrun.utils.helpers.get_ui_url(alert.project, uid)
161
+ overview_type = "Job overview"
162
+ else: # MODEL entity
163
+ model_name = event_data.value_dict.get("model")
164
+ model_endpoint_id = event_data.value_dict.get("model_endpoint_id")
165
+ url = mlrun.utils.helpers.get_model_endpoint_url(
166
+ alert.project, model_name, model_endpoint_id
167
+ )
168
+ overview_type = "Model endpoint"
169
+
170
+ line.append(self._get_slack_row(f"*Overview:*\n<{url}|*{overview_type}*>"))
149
171
 
150
172
  return line
151
173
 
152
174
  def _get_run_line(self, run: dict) -> dict:
153
175
  meta = run["metadata"]
154
176
  url = mlrun.utils.helpers.get_ui_url(meta.get("project"), meta.get("uid"))
155
- if url:
177
+
178
+ # Only show the URL if the run is not a function (serving or mlrun function)
179
+ kind = run.get("step_kind")
180
+ state = run["status"].get("state", "")
181
+ if state != "skipped" and (url and not kind or kind == "run"):
156
182
  line = f'<{url}|*{meta.get("name")}*>'
157
183
  else:
158
184
  line = meta.get("name")
159
- state = run["status"].get("state", "")
185
+ if kind:
186
+ line = f'{line} *({run.get("step_kind", run.get("kind", ""))})*'
160
187
  line = f'{self.emojis.get(state, ":question:")} {line}'
161
188
  return self._get_slack_row(line)
162
189
 
163
190
  def _get_run_result(self, run: dict) -> dict:
164
191
  state = run["status"].get("state", "")
165
192
  if state == "error":
166
- error_status = run["status"].get("error", "")
193
+ error_status = run["status"].get("error", "") or state
167
194
  result = f"*{error_status}*"
168
195
  else:
169
196
  result = mlrun.utils.helpers.dict_to_str(
170
197
  run["status"].get("results", {}), ", "
171
198
  )
172
- return self._get_slack_row(result or "None")
199
+ return self._get_slack_row(result or state)
173
200
 
174
201
  @staticmethod
175
202
  def _get_slack_row(text: str) -> dict:
@@ -57,7 +57,7 @@ class WebhookNotification(NotificationBase):
57
57
  request_body["alert"] = alert.dict()
58
58
  if event_data:
59
59
  request_body["value"] = event_data.value_dict
60
- request_body["id"] = event_data.entity.id
60
+ request_body["id"] = event_data.entity.ids[0]
61
61
 
62
62
  if custom_html:
63
63
  request_body["custom_html"] = custom_html