mlrun 1.6.4rc8__py3-none-any.whl → 1.7.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mlrun might be problematic. Click here for more details.

Files changed (305) hide show
  1. mlrun/__init__.py +11 -1
  2. mlrun/__main__.py +40 -122
  3. mlrun/alerts/__init__.py +15 -0
  4. mlrun/alerts/alert.py +248 -0
  5. mlrun/api/schemas/__init__.py +5 -4
  6. mlrun/artifacts/__init__.py +8 -3
  7. mlrun/artifacts/base.py +47 -257
  8. mlrun/artifacts/dataset.py +11 -192
  9. mlrun/artifacts/manager.py +79 -47
  10. mlrun/artifacts/model.py +31 -159
  11. mlrun/artifacts/plots.py +23 -380
  12. mlrun/common/constants.py +74 -1
  13. mlrun/common/db/sql_session.py +5 -5
  14. mlrun/common/formatters/__init__.py +21 -0
  15. mlrun/common/formatters/artifact.py +45 -0
  16. mlrun/common/formatters/base.py +113 -0
  17. mlrun/common/formatters/feature_set.py +33 -0
  18. mlrun/common/formatters/function.py +46 -0
  19. mlrun/common/formatters/pipeline.py +53 -0
  20. mlrun/common/formatters/project.py +51 -0
  21. mlrun/common/formatters/run.py +29 -0
  22. mlrun/common/helpers.py +12 -3
  23. mlrun/common/model_monitoring/helpers.py +9 -5
  24. mlrun/{runtimes → common/runtimes}/constants.py +37 -9
  25. mlrun/common/schemas/__init__.py +31 -5
  26. mlrun/common/schemas/alert.py +202 -0
  27. mlrun/common/schemas/api_gateway.py +196 -0
  28. mlrun/common/schemas/artifact.py +25 -4
  29. mlrun/common/schemas/auth.py +16 -5
  30. mlrun/common/schemas/background_task.py +1 -1
  31. mlrun/common/schemas/client_spec.py +4 -2
  32. mlrun/common/schemas/common.py +7 -4
  33. mlrun/common/schemas/constants.py +3 -0
  34. mlrun/common/schemas/feature_store.py +74 -44
  35. mlrun/common/schemas/frontend_spec.py +15 -7
  36. mlrun/common/schemas/function.py +12 -1
  37. mlrun/common/schemas/hub.py +11 -18
  38. mlrun/common/schemas/memory_reports.py +2 -2
  39. mlrun/common/schemas/model_monitoring/__init__.py +20 -4
  40. mlrun/common/schemas/model_monitoring/constants.py +123 -42
  41. mlrun/common/schemas/model_monitoring/grafana.py +13 -9
  42. mlrun/common/schemas/model_monitoring/model_endpoints.py +101 -54
  43. mlrun/common/schemas/notification.py +71 -14
  44. mlrun/common/schemas/object.py +2 -2
  45. mlrun/{model_monitoring/controller_handler.py → common/schemas/pagination.py} +9 -12
  46. mlrun/common/schemas/pipeline.py +8 -1
  47. mlrun/common/schemas/project.py +69 -18
  48. mlrun/common/schemas/runs.py +7 -1
  49. mlrun/common/schemas/runtime_resource.py +8 -12
  50. mlrun/common/schemas/schedule.py +4 -4
  51. mlrun/common/schemas/tag.py +1 -2
  52. mlrun/common/schemas/workflow.py +12 -4
  53. mlrun/common/types.py +14 -1
  54. mlrun/config.py +154 -69
  55. mlrun/data_types/data_types.py +6 -1
  56. mlrun/data_types/spark.py +2 -2
  57. mlrun/data_types/to_pandas.py +67 -37
  58. mlrun/datastore/__init__.py +6 -8
  59. mlrun/datastore/alibaba_oss.py +131 -0
  60. mlrun/datastore/azure_blob.py +143 -42
  61. mlrun/datastore/base.py +102 -58
  62. mlrun/datastore/datastore.py +34 -13
  63. mlrun/datastore/datastore_profile.py +146 -20
  64. mlrun/datastore/dbfs_store.py +3 -7
  65. mlrun/datastore/filestore.py +1 -4
  66. mlrun/datastore/google_cloud_storage.py +97 -33
  67. mlrun/datastore/hdfs.py +56 -0
  68. mlrun/datastore/inmem.py +6 -3
  69. mlrun/datastore/redis.py +7 -2
  70. mlrun/datastore/s3.py +34 -12
  71. mlrun/datastore/snowflake_utils.py +45 -0
  72. mlrun/datastore/sources.py +303 -111
  73. mlrun/datastore/spark_utils.py +31 -2
  74. mlrun/datastore/store_resources.py +9 -7
  75. mlrun/datastore/storeytargets.py +151 -0
  76. mlrun/datastore/targets.py +453 -176
  77. mlrun/datastore/utils.py +72 -58
  78. mlrun/datastore/v3io.py +6 -1
  79. mlrun/db/base.py +274 -41
  80. mlrun/db/factory.py +1 -1
  81. mlrun/db/httpdb.py +893 -225
  82. mlrun/db/nopdb.py +291 -33
  83. mlrun/errors.py +36 -6
  84. mlrun/execution.py +115 -42
  85. mlrun/feature_store/__init__.py +0 -2
  86. mlrun/feature_store/api.py +65 -73
  87. mlrun/feature_store/common.py +7 -12
  88. mlrun/feature_store/feature_set.py +76 -55
  89. mlrun/feature_store/feature_vector.py +39 -31
  90. mlrun/feature_store/ingestion.py +7 -6
  91. mlrun/feature_store/retrieval/base.py +16 -11
  92. mlrun/feature_store/retrieval/dask_merger.py +2 -0
  93. mlrun/feature_store/retrieval/job.py +13 -4
  94. mlrun/feature_store/retrieval/local_merger.py +2 -0
  95. mlrun/feature_store/retrieval/spark_merger.py +24 -32
  96. mlrun/feature_store/steps.py +45 -34
  97. mlrun/features.py +11 -21
  98. mlrun/frameworks/_common/artifacts_library.py +9 -9
  99. mlrun/frameworks/_common/mlrun_interface.py +5 -5
  100. mlrun/frameworks/_common/model_handler.py +48 -48
  101. mlrun/frameworks/_common/plan.py +5 -6
  102. mlrun/frameworks/_common/producer.py +3 -4
  103. mlrun/frameworks/_common/utils.py +5 -5
  104. mlrun/frameworks/_dl_common/loggers/logger.py +6 -7
  105. mlrun/frameworks/_dl_common/loggers/mlrun_logger.py +9 -9
  106. mlrun/frameworks/_dl_common/loggers/tensorboard_logger.py +23 -47
  107. mlrun/frameworks/_ml_common/artifacts_library.py +1 -2
  108. mlrun/frameworks/_ml_common/loggers/logger.py +3 -4
  109. mlrun/frameworks/_ml_common/loggers/mlrun_logger.py +4 -5
  110. mlrun/frameworks/_ml_common/model_handler.py +24 -24
  111. mlrun/frameworks/_ml_common/pkl_model_server.py +2 -2
  112. mlrun/frameworks/_ml_common/plan.py +2 -2
  113. mlrun/frameworks/_ml_common/plans/calibration_curve_plan.py +2 -3
  114. mlrun/frameworks/_ml_common/plans/confusion_matrix_plan.py +2 -3
  115. mlrun/frameworks/_ml_common/plans/dataset_plan.py +3 -3
  116. mlrun/frameworks/_ml_common/plans/feature_importance_plan.py +3 -3
  117. mlrun/frameworks/_ml_common/plans/roc_curve_plan.py +4 -4
  118. mlrun/frameworks/_ml_common/utils.py +4 -4
  119. mlrun/frameworks/auto_mlrun/auto_mlrun.py +9 -9
  120. mlrun/frameworks/huggingface/model_server.py +4 -4
  121. mlrun/frameworks/lgbm/__init__.py +33 -33
  122. mlrun/frameworks/lgbm/callbacks/callback.py +2 -4
  123. mlrun/frameworks/lgbm/callbacks/logging_callback.py +4 -5
  124. mlrun/frameworks/lgbm/callbacks/mlrun_logging_callback.py +4 -5
  125. mlrun/frameworks/lgbm/mlrun_interfaces/booster_mlrun_interface.py +1 -3
  126. mlrun/frameworks/lgbm/mlrun_interfaces/mlrun_interface.py +6 -6
  127. mlrun/frameworks/lgbm/model_handler.py +10 -10
  128. mlrun/frameworks/lgbm/model_server.py +6 -6
  129. mlrun/frameworks/lgbm/utils.py +5 -5
  130. mlrun/frameworks/onnx/dataset.py +8 -8
  131. mlrun/frameworks/onnx/mlrun_interface.py +3 -3
  132. mlrun/frameworks/onnx/model_handler.py +6 -6
  133. mlrun/frameworks/onnx/model_server.py +7 -7
  134. mlrun/frameworks/parallel_coordinates.py +6 -6
  135. mlrun/frameworks/pytorch/__init__.py +18 -18
  136. mlrun/frameworks/pytorch/callbacks/callback.py +4 -5
  137. mlrun/frameworks/pytorch/callbacks/logging_callback.py +17 -17
  138. mlrun/frameworks/pytorch/callbacks/mlrun_logging_callback.py +11 -11
  139. mlrun/frameworks/pytorch/callbacks/tensorboard_logging_callback.py +23 -29
  140. mlrun/frameworks/pytorch/callbacks_handler.py +38 -38
  141. mlrun/frameworks/pytorch/mlrun_interface.py +20 -20
  142. mlrun/frameworks/pytorch/model_handler.py +17 -17
  143. mlrun/frameworks/pytorch/model_server.py +7 -7
  144. mlrun/frameworks/sklearn/__init__.py +13 -13
  145. mlrun/frameworks/sklearn/estimator.py +4 -4
  146. mlrun/frameworks/sklearn/metrics_library.py +14 -14
  147. mlrun/frameworks/sklearn/mlrun_interface.py +16 -9
  148. mlrun/frameworks/sklearn/model_handler.py +2 -2
  149. mlrun/frameworks/tf_keras/__init__.py +10 -7
  150. mlrun/frameworks/tf_keras/callbacks/logging_callback.py +15 -15
  151. mlrun/frameworks/tf_keras/callbacks/mlrun_logging_callback.py +11 -11
  152. mlrun/frameworks/tf_keras/callbacks/tensorboard_logging_callback.py +19 -23
  153. mlrun/frameworks/tf_keras/mlrun_interface.py +9 -11
  154. mlrun/frameworks/tf_keras/model_handler.py +14 -14
  155. mlrun/frameworks/tf_keras/model_server.py +6 -6
  156. mlrun/frameworks/xgboost/__init__.py +13 -13
  157. mlrun/frameworks/xgboost/model_handler.py +6 -6
  158. mlrun/k8s_utils.py +61 -17
  159. mlrun/launcher/__init__.py +1 -1
  160. mlrun/launcher/base.py +16 -15
  161. mlrun/launcher/client.py +13 -11
  162. mlrun/launcher/factory.py +1 -1
  163. mlrun/launcher/local.py +23 -13
  164. mlrun/launcher/remote.py +17 -10
  165. mlrun/lists.py +7 -6
  166. mlrun/model.py +478 -103
  167. mlrun/model_monitoring/__init__.py +1 -1
  168. mlrun/model_monitoring/api.py +163 -371
  169. mlrun/{runtimes/mpijob/v1alpha1.py → model_monitoring/applications/__init__.py} +9 -15
  170. mlrun/model_monitoring/applications/_application_steps.py +188 -0
  171. mlrun/model_monitoring/applications/base.py +108 -0
  172. mlrun/model_monitoring/applications/context.py +341 -0
  173. mlrun/model_monitoring/{evidently_application.py → applications/evidently_base.py} +27 -22
  174. mlrun/model_monitoring/applications/histogram_data_drift.py +354 -0
  175. mlrun/model_monitoring/applications/results.py +99 -0
  176. mlrun/model_monitoring/controller.py +131 -278
  177. mlrun/model_monitoring/db/__init__.py +18 -0
  178. mlrun/model_monitoring/db/stores/__init__.py +136 -0
  179. mlrun/model_monitoring/db/stores/base/__init__.py +15 -0
  180. mlrun/model_monitoring/db/stores/base/store.py +213 -0
  181. mlrun/model_monitoring/db/stores/sqldb/__init__.py +13 -0
  182. mlrun/model_monitoring/db/stores/sqldb/models/__init__.py +71 -0
  183. mlrun/model_monitoring/db/stores/sqldb/models/base.py +190 -0
  184. mlrun/model_monitoring/db/stores/sqldb/models/mysql.py +103 -0
  185. mlrun/model_monitoring/{stores/models/mysql.py → db/stores/sqldb/models/sqlite.py} +19 -13
  186. mlrun/model_monitoring/db/stores/sqldb/sql_store.py +659 -0
  187. mlrun/model_monitoring/db/stores/v3io_kv/__init__.py +13 -0
  188. mlrun/model_monitoring/db/stores/v3io_kv/kv_store.py +726 -0
  189. mlrun/model_monitoring/db/tsdb/__init__.py +105 -0
  190. mlrun/model_monitoring/db/tsdb/base.py +448 -0
  191. mlrun/model_monitoring/db/tsdb/helpers.py +30 -0
  192. mlrun/model_monitoring/db/tsdb/tdengine/__init__.py +15 -0
  193. mlrun/model_monitoring/db/tsdb/tdengine/schemas.py +279 -0
  194. mlrun/model_monitoring/db/tsdb/tdengine/stream_graph_steps.py +42 -0
  195. mlrun/model_monitoring/db/tsdb/tdengine/tdengine_connector.py +507 -0
  196. mlrun/model_monitoring/db/tsdb/v3io/__init__.py +15 -0
  197. mlrun/model_monitoring/db/tsdb/v3io/stream_graph_steps.py +158 -0
  198. mlrun/model_monitoring/db/tsdb/v3io/v3io_connector.py +849 -0
  199. mlrun/model_monitoring/features_drift_table.py +134 -106
  200. mlrun/model_monitoring/helpers.py +199 -55
  201. mlrun/model_monitoring/metrics/__init__.py +13 -0
  202. mlrun/model_monitoring/metrics/histogram_distance.py +127 -0
  203. mlrun/model_monitoring/model_endpoint.py +3 -2
  204. mlrun/model_monitoring/stream_processing.py +134 -398
  205. mlrun/model_monitoring/tracking_policy.py +9 -2
  206. mlrun/model_monitoring/writer.py +161 -125
  207. mlrun/package/__init__.py +6 -6
  208. mlrun/package/context_handler.py +5 -5
  209. mlrun/package/packager.py +7 -7
  210. mlrun/package/packagers/default_packager.py +8 -8
  211. mlrun/package/packagers/numpy_packagers.py +15 -15
  212. mlrun/package/packagers/pandas_packagers.py +5 -5
  213. mlrun/package/packagers/python_standard_library_packagers.py +10 -10
  214. mlrun/package/packagers_manager.py +19 -23
  215. mlrun/package/utils/_formatter.py +6 -6
  216. mlrun/package/utils/_pickler.py +2 -2
  217. mlrun/package/utils/_supported_format.py +4 -4
  218. mlrun/package/utils/log_hint_utils.py +2 -2
  219. mlrun/package/utils/type_hint_utils.py +4 -9
  220. mlrun/platforms/__init__.py +11 -10
  221. mlrun/platforms/iguazio.py +24 -203
  222. mlrun/projects/operations.py +52 -25
  223. mlrun/projects/pipelines.py +191 -197
  224. mlrun/projects/project.py +1227 -400
  225. mlrun/render.py +16 -19
  226. mlrun/run.py +209 -184
  227. mlrun/runtimes/__init__.py +83 -15
  228. mlrun/runtimes/base.py +51 -35
  229. mlrun/runtimes/daskjob.py +17 -10
  230. mlrun/runtimes/databricks_job/databricks_cancel_task.py +1 -1
  231. mlrun/runtimes/databricks_job/databricks_runtime.py +8 -7
  232. mlrun/runtimes/databricks_job/databricks_wrapper.py +1 -1
  233. mlrun/runtimes/funcdoc.py +1 -29
  234. mlrun/runtimes/function_reference.py +1 -1
  235. mlrun/runtimes/kubejob.py +34 -128
  236. mlrun/runtimes/local.py +40 -11
  237. mlrun/runtimes/mpijob/__init__.py +0 -20
  238. mlrun/runtimes/mpijob/abstract.py +9 -10
  239. mlrun/runtimes/mpijob/v1.py +1 -1
  240. mlrun/{model_monitoring/stores/models/sqlite.py → runtimes/nuclio/__init__.py} +7 -9
  241. mlrun/runtimes/nuclio/api_gateway.py +769 -0
  242. mlrun/runtimes/nuclio/application/__init__.py +15 -0
  243. mlrun/runtimes/nuclio/application/application.py +758 -0
  244. mlrun/runtimes/nuclio/application/reverse_proxy.go +95 -0
  245. mlrun/runtimes/{function.py → nuclio/function.py} +200 -83
  246. mlrun/runtimes/{nuclio.py → nuclio/nuclio.py} +6 -6
  247. mlrun/runtimes/{serving.py → nuclio/serving.py} +65 -68
  248. mlrun/runtimes/pod.py +281 -101
  249. mlrun/runtimes/remotesparkjob.py +12 -9
  250. mlrun/runtimes/sparkjob/spark3job.py +67 -51
  251. mlrun/runtimes/utils.py +41 -75
  252. mlrun/secrets.py +9 -5
  253. mlrun/serving/__init__.py +8 -1
  254. mlrun/serving/remote.py +2 -7
  255. mlrun/serving/routers.py +85 -69
  256. mlrun/serving/server.py +69 -44
  257. mlrun/serving/states.py +209 -36
  258. mlrun/serving/utils.py +22 -14
  259. mlrun/serving/v1_serving.py +6 -7
  260. mlrun/serving/v2_serving.py +133 -54
  261. mlrun/track/tracker.py +2 -1
  262. mlrun/track/tracker_manager.py +3 -3
  263. mlrun/track/trackers/mlflow_tracker.py +6 -2
  264. mlrun/utils/async_http.py +6 -8
  265. mlrun/utils/azure_vault.py +1 -1
  266. mlrun/utils/clones.py +1 -2
  267. mlrun/utils/condition_evaluator.py +3 -3
  268. mlrun/utils/db.py +21 -3
  269. mlrun/utils/helpers.py +405 -225
  270. mlrun/utils/http.py +3 -6
  271. mlrun/utils/logger.py +112 -16
  272. mlrun/utils/notifications/notification/__init__.py +17 -13
  273. mlrun/utils/notifications/notification/base.py +50 -2
  274. mlrun/utils/notifications/notification/console.py +2 -0
  275. mlrun/utils/notifications/notification/git.py +24 -1
  276. mlrun/utils/notifications/notification/ipython.py +3 -1
  277. mlrun/utils/notifications/notification/slack.py +96 -21
  278. mlrun/utils/notifications/notification/webhook.py +59 -2
  279. mlrun/utils/notifications/notification_pusher.py +149 -30
  280. mlrun/utils/regex.py +9 -0
  281. mlrun/utils/retryer.py +208 -0
  282. mlrun/utils/singleton.py +1 -1
  283. mlrun/utils/v3io_clients.py +4 -6
  284. mlrun/utils/version/version.json +2 -2
  285. mlrun/utils/version/version.py +2 -6
  286. mlrun-1.7.0.dist-info/METADATA +378 -0
  287. mlrun-1.7.0.dist-info/RECORD +351 -0
  288. {mlrun-1.6.4rc8.dist-info → mlrun-1.7.0.dist-info}/WHEEL +1 -1
  289. mlrun/feature_store/retrieval/conversion.py +0 -273
  290. mlrun/kfpops.py +0 -868
  291. mlrun/model_monitoring/application.py +0 -310
  292. mlrun/model_monitoring/batch.py +0 -1095
  293. mlrun/model_monitoring/prometheus.py +0 -219
  294. mlrun/model_monitoring/stores/__init__.py +0 -111
  295. mlrun/model_monitoring/stores/kv_model_endpoint_store.py +0 -576
  296. mlrun/model_monitoring/stores/model_endpoint_store.py +0 -147
  297. mlrun/model_monitoring/stores/models/__init__.py +0 -27
  298. mlrun/model_monitoring/stores/models/base.py +0 -84
  299. mlrun/model_monitoring/stores/sql_model_endpoint_store.py +0 -384
  300. mlrun/platforms/other.py +0 -306
  301. mlrun-1.6.4rc8.dist-info/METADATA +0 -272
  302. mlrun-1.6.4rc8.dist-info/RECORD +0 -314
  303. {mlrun-1.6.4rc8.dist-info → mlrun-1.7.0.dist-info}/LICENSE +0 -0
  304. {mlrun-1.6.4rc8.dist-info → mlrun-1.7.0.dist-info}/entry_points.txt +0 -0
  305. {mlrun-1.6.4rc8.dist-info → mlrun-1.7.0.dist-info}/top_level.txt +0 -0
@@ -11,8 +11,8 @@
11
11
  # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
- #
15
14
 
15
+ import warnings
16
16
  from typing import Union
17
17
 
18
18
  import mlrun.common.schemas.schedule
@@ -55,6 +55,12 @@ class TrackingPolicy(mlrun.model.ModelObj):
55
55
  writer function, which is a real time nuclio functino, will be deployed
56
56
  with the same image. By default, the image is mlrun/mlrun.
57
57
  """
58
+ warnings.warn(
59
+ "The `TrackingPolicy` class is deprecated from version 1.7.0 and is not "
60
+ "used anymore. It will be removed in 1.9.0.",
61
+ FutureWarning,
62
+ )
63
+
58
64
  if isinstance(default_batch_intervals, str):
59
65
  default_batch_intervals = (
60
66
  mlrun.common.schemas.schedule.ScheduleCronTrigger.from_crontab(
@@ -96,12 +102,13 @@ class TrackingPolicy(mlrun.model.ModelObj):
96
102
  )
97
103
  return new_obj
98
104
 
99
- def to_dict(self, fields=None, exclude=None):
105
+ def to_dict(self, fields: list = None, exclude: list = None, strip: bool = False):
100
106
  struct = super().to_dict(
101
107
  fields,
102
108
  exclude=[
103
109
  mlrun.common.schemas.model_monitoring.EventFieldType.DEFAULT_BATCH_INTERVALS
104
110
  ],
111
+ strip=strip,
105
112
  )
106
113
  if self.default_batch_intervals:
107
114
  struct[
@@ -12,29 +12,29 @@
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
14
 
15
- import datetime
16
15
  import json
17
- from http import HTTPStatus
18
- from typing import Any, NewType
19
-
20
- import pandas as pd
21
- from v3io.dataplane import Client as V3IOClient
22
- from v3io_frames.client import ClientBase as V3IOFramesClient
23
- from v3io_frames.errors import Error as V3IOFramesError
24
- from v3io_frames.frames_pb2 import IGNORE
16
+ from typing import Any, Callable, NewType
25
17
 
26
18
  import mlrun.common.model_monitoring
19
+ import mlrun.common.schemas
20
+ import mlrun.common.schemas.alert as alert_objects
27
21
  import mlrun.model_monitoring
28
- import mlrun.utils.v3io_clients
29
- from mlrun.common.schemas.model_monitoring.constants import ResultStatusApp, WriterEvent
22
+ from mlrun.common.schemas.model_monitoring.constants import (
23
+ EventFieldType,
24
+ HistogramDataDriftApplicationConstants,
25
+ MetricData,
26
+ ResultData,
27
+ ResultKindApp,
28
+ ResultStatusApp,
29
+ WriterEvent,
30
+ WriterEventKind,
31
+ )
30
32
  from mlrun.common.schemas.notification import NotificationKind, NotificationSeverity
33
+ from mlrun.model_monitoring.helpers import get_result_instance_fqn
31
34
  from mlrun.serving.utils import StepToDict
32
35
  from mlrun.utils import logger
33
36
  from mlrun.utils.notifications.notification_pusher import CustomNotificationPusher
34
37
 
35
- _TSDB_BE = "tsdb"
36
- _TSDB_RATE = "1/s"
37
- _TSDB_TABLE = "app-results"
38
38
  _RawEvent = dict[str, Any]
39
39
  _AppResultEvent = NewType("_AppResultEvent", _RawEvent)
40
40
 
@@ -69,20 +69,20 @@ class _Notifier:
69
69
  self._severity = severity
70
70
 
71
71
  def _should_send_event(self) -> bool:
72
- return self._event[WriterEvent.RESULT_STATUS] >= ResultStatusApp.detected
72
+ return self._event[ResultData.RESULT_STATUS] >= ResultStatusApp.detected.value
73
73
 
74
74
  def _generate_message(self) -> str:
75
75
  return f"""\
76
76
  The monitoring app `{self._event[WriterEvent.APPLICATION_NAME]}` \
77
- of kind `{self._event[WriterEvent.RESULT_KIND]}` \
77
+ of kind `{self._event[ResultData.RESULT_KIND]}` \
78
78
  detected a problem in model endpoint ID `{self._event[WriterEvent.ENDPOINT_ID]}` \
79
79
  at time `{self._event[WriterEvent.START_INFER_TIME]}`.
80
80
 
81
81
  Result data:
82
- Name: `{self._event[WriterEvent.RESULT_NAME]}`
83
- Value: `{self._event[WriterEvent.RESULT_VALUE]}`
84
- Status: `{self._event[WriterEvent.RESULT_STATUS]}`
85
- Extra data: `{self._event[WriterEvent.RESULT_EXTRA_DATA]}`\
82
+ Name: `{self._event[ResultData.RESULT_NAME]}`
83
+ Value: `{self._event[ResultData.RESULT_VALUE]}`
84
+ Status: `{self._event[ResultData.RESULT_STATUS]}`
85
+ Extra data: `{self._event[ResultData.RESULT_EXTRA_DATA]}`\
86
86
  """
87
87
 
88
88
  def notify(self) -> None:
@@ -97,140 +97,176 @@ Extra data: `{self._event[WriterEvent.RESULT_EXTRA_DATA]}`\
97
97
 
98
98
  class ModelMonitoringWriter(StepToDict):
99
99
  """
100
- Write monitoring app events to V3IO KV storage
100
+ Write monitoring application results to the target databases
101
101
  """
102
102
 
103
103
  kind = "monitoring_application_stream_pusher"
104
104
 
105
- def __init__(self, project: str) -> None:
105
+ def __init__(
106
+ self,
107
+ project: str,
108
+ secret_provider: Callable = None,
109
+ ) -> None:
106
110
  self.project = project
107
111
  self.name = project # required for the deployment process
108
- self._v3io_container = self.get_v3io_container(self.name)
109
- self._kv_client = self._get_v3io_client().kv
110
- self._tsdb_client = self._get_v3io_frames_client(self._v3io_container)
112
+
111
113
  self._custom_notifier = CustomNotificationPusher(
112
114
  notification_types=[NotificationKind.slack]
113
115
  )
114
- self._create_tsdb_table()
115
- self._kv_schemas = []
116
-
117
- @staticmethod
118
- def get_v3io_container(project_name: str) -> str:
119
- return f"users/pipelines/{project_name}/monitoring-apps"
120
116
 
121
- @staticmethod
122
- def _get_v3io_client() -> V3IOClient:
123
- return mlrun.utils.v3io_clients.get_v3io_client(
124
- endpoint=mlrun.mlconf.v3io_api,
117
+ self._app_result_store = mlrun.model_monitoring.get_store_object(
118
+ project=self.project, secret_provider=secret_provider
125
119
  )
126
-
127
- @staticmethod
128
- def _get_v3io_frames_client(v3io_container: str) -> V3IOFramesClient:
129
- return mlrun.utils.v3io_clients.get_frames_client(
130
- address=mlrun.mlconf.v3io_framesd,
131
- container=v3io_container,
120
+ self._tsdb_connector = mlrun.model_monitoring.get_tsdb_connector(
121
+ project=self.project, secret_provider=secret_provider
132
122
  )
123
+ self._endpoints_records = {}
133
124
 
134
- def _create_tsdb_table(self) -> None:
135
- self._tsdb_client.create(
136
- backend=_TSDB_BE,
137
- table=_TSDB_TABLE,
138
- if_exists=IGNORE,
139
- rate=_TSDB_RATE,
125
+ def _generate_event_on_drift(
126
+ self,
127
+ entity_id: str,
128
+ result_status: int,
129
+ event_value: dict,
130
+ project_name: str,
131
+ result_kind: int,
132
+ ) -> None:
133
+ entity = mlrun.common.schemas.alert.EventEntities(
134
+ kind=alert_objects.EventEntityKind.MODEL_ENDPOINT_RESULT,
135
+ project=project_name,
136
+ ids=[entity_id],
140
137
  )
141
138
 
142
- def _update_kv_db(self, event: _AppResultEvent) -> None:
143
- event = _AppResultEvent(event.copy())
144
- endpoint_id = event.pop(WriterEvent.ENDPOINT_ID)
145
- app_name = event.pop(WriterEvent.APPLICATION_NAME)
146
- metric_name = event.pop(WriterEvent.RESULT_NAME)
147
- attributes = {metric_name: json.dumps(event)}
148
- self._kv_client.update(
149
- container=self._v3io_container,
150
- table_path=endpoint_id,
151
- key=app_name,
152
- attributes=attributes,
139
+ event_kind = self._generate_alert_event_kind(
140
+ result_status=result_status, result_kind=result_kind
153
141
  )
154
- if endpoint_id not in self._kv_schemas:
155
- self._generate_kv_schema(endpoint_id)
156
- logger.info("Updated V3IO KV successfully", key=app_name)
157
-
158
- def _generate_kv_schema(self, endpoint_id: str):
159
- """Generate V3IO KV schema file which will be used by the model monitoring applications dashboard in Grafana."""
160
- fields = [
161
- {"name": WriterEvent.RESULT_NAME, "type": "string", "nullable": False}
162
- ]
163
- res = self._kv_client.create_schema(
164
- container=self._v3io_container,
165
- table_path=endpoint_id,
166
- key=WriterEvent.APPLICATION_NAME,
167
- fields=fields,
142
+
143
+ event_data = mlrun.common.schemas.Event(
144
+ kind=alert_objects.EventKind(value=event_kind),
145
+ entity=entity,
146
+ value_dict=event_value,
168
147
  )
169
- if res.status_code != HTTPStatus.OK.value:
170
- raise mlrun.errors.MLRunBadRequestError(
171
- f"Couldn't infer schema for endpoint {endpoint_id} which is required for Grafana dashboards"
172
- )
173
- else:
174
- logger.info(
175
- "Generated V3IO KV schema successfully", endpoint_id=endpoint_id
176
- )
177
- self._kv_schemas.append(endpoint_id)
148
+ logger.info("Sending a drift event")
149
+ mlrun.get_run_db().generate_event(event_kind, event_data)
150
+ logger.info("Drift event sent successfully")
178
151
 
179
- def _update_tsdb(self, event: _AppResultEvent) -> None:
180
- event = _AppResultEvent(event.copy())
181
- event[WriterEvent.END_INFER_TIME] = datetime.datetime.fromisoformat(
182
- event[WriterEvent.END_INFER_TIME]
152
+ @staticmethod
153
+ def _generate_alert_event_kind(
154
+ result_kind: int, result_status: int
155
+ ) -> alert_objects.EventKind:
156
+ """Generate the required Event Kind format for the alerting system"""
157
+ event_kind = ResultKindApp(value=result_kind).name
158
+
159
+ if result_status == ResultStatusApp.detected.value:
160
+ event_kind = f"{event_kind}_detected"
161
+ else:
162
+ event_kind = f"{event_kind}_suspected"
163
+ return alert_objects.EventKind(
164
+ value=mlrun.utils.helpers.normalize_name(event_kind)
183
165
  )
184
- del event[WriterEvent.RESULT_EXTRA_DATA]
185
- try:
186
- self._tsdb_client.write(
187
- backend=_TSDB_BE,
188
- table=_TSDB_TABLE,
189
- dfs=pd.DataFrame.from_records([event]),
190
- index_cols=[
191
- WriterEvent.END_INFER_TIME,
192
- WriterEvent.ENDPOINT_ID,
193
- WriterEvent.APPLICATION_NAME,
194
- WriterEvent.RESULT_NAME,
195
- ],
196
- )
197
- logger.info("Updated V3IO TSDB successfully", table=_TSDB_TABLE)
198
- except V3IOFramesError as err:
199
- logger.warn(
200
- "Could not write drift measures to TSDB",
201
- err=err,
202
- table=_TSDB_TABLE,
203
- event=event,
204
- )
205
166
 
206
167
  @staticmethod
207
- def _reconstruct_event(event: _RawEvent) -> _AppResultEvent:
168
+ def _reconstruct_event(event: _RawEvent) -> tuple[_AppResultEvent, WriterEventKind]:
208
169
  """
209
170
  Modify the raw event into the expected monitoring application event
210
171
  schema as defined in `mlrun.common.schemas.model_monitoring.constants.WriterEvent`
211
172
  """
212
- try:
213
- result_event = _AppResultEvent(
214
- {key: event[key] for key in WriterEvent.list()}
173
+ if not isinstance(event, dict):
174
+ raise _WriterEventTypeError(
175
+ f"The event is of type: {type(event)}, expected a dictionary"
215
176
  )
216
- result_event[WriterEvent.CURRENT_STATS] = json.loads(
217
- event[WriterEvent.CURRENT_STATS]
177
+ kind = event.pop(WriterEvent.EVENT_KIND, WriterEventKind.RESULT)
178
+ result_event = _AppResultEvent(json.loads(event.pop(WriterEvent.DATA, "{}")))
179
+ if not result_event: # BC for < 1.7.0, can be removed in 1.9.0
180
+ result_event = _AppResultEvent(event)
181
+ else:
182
+ result_event.update(_AppResultEvent(event))
183
+
184
+ expected_keys = list(
185
+ set(WriterEvent.list()).difference(
186
+ [WriterEvent.EVENT_KIND, WriterEvent.DATA]
218
187
  )
219
- return result_event
220
- except KeyError as err:
188
+ )
189
+ if kind == WriterEventKind.METRIC:
190
+ expected_keys.extend(MetricData.list())
191
+ elif kind == WriterEventKind.RESULT:
192
+ expected_keys.extend(ResultData.list())
193
+ else:
221
194
  raise _WriterEventValueError(
222
- "The received event misses some keys compared to the expected "
223
- "monitoring application event schema"
224
- ) from err
225
- except TypeError as err:
226
- raise _WriterEventTypeError(
227
- f"The event is of type: {type(event)}, expected a dictionary"
228
- ) from err
195
+ f"Unknown event kind: {kind}, expected one of: {WriterEventKind.list()}"
196
+ )
197
+ missing_keys = [key for key in expected_keys if key not in result_event]
198
+ if missing_keys:
199
+ raise _WriterEventValueError(
200
+ f"The received event misses some keys compared to the expected "
201
+ f"monitoring application event schema: {missing_keys}"
202
+ )
203
+
204
+ return result_event, kind
229
205
 
230
206
  def do(self, event: _RawEvent) -> None:
231
- event = self._reconstruct_event(event)
207
+ event, kind = self._reconstruct_event(event)
232
208
  logger.info("Starting to write event", event=event)
233
- self._update_tsdb(event)
234
- self._update_kv_db(event)
235
- _Notifier(event=event, notification_pusher=self._custom_notifier).notify()
209
+ self._tsdb_connector.write_application_event(event=event.copy(), kind=kind)
210
+ self._app_result_store.write_application_event(event=event.copy(), kind=kind)
211
+
236
212
  logger.info("Completed event DB writes")
213
+
214
+ if kind == WriterEventKind.RESULT:
215
+ _Notifier(event=event, notification_pusher=self._custom_notifier).notify()
216
+
217
+ if (
218
+ mlrun.mlconf.alerts.mode == mlrun.common.schemas.alert.AlertsModes.enabled
219
+ and kind == WriterEventKind.RESULT
220
+ and (
221
+ event[ResultData.RESULT_STATUS] == ResultStatusApp.detected.value
222
+ or event[ResultData.RESULT_STATUS]
223
+ == ResultStatusApp.potential_detection.value
224
+ )
225
+ ):
226
+ endpoint_id = event[WriterEvent.ENDPOINT_ID]
227
+ endpoint_record = self._endpoints_records.setdefault(
228
+ endpoint_id,
229
+ self._app_result_store.get_model_endpoint(endpoint_id=endpoint_id),
230
+ )
231
+ event_value = {
232
+ "app_name": event[WriterEvent.APPLICATION_NAME],
233
+ "model": endpoint_record.get(EventFieldType.MODEL),
234
+ "model_endpoint_id": event[WriterEvent.ENDPOINT_ID],
235
+ "result_name": event[ResultData.RESULT_NAME],
236
+ "result_value": event[ResultData.RESULT_VALUE],
237
+ }
238
+ self._generate_event_on_drift(
239
+ entity_id=get_result_instance_fqn(
240
+ event[WriterEvent.ENDPOINT_ID],
241
+ event[WriterEvent.APPLICATION_NAME],
242
+ event[ResultData.RESULT_NAME],
243
+ ),
244
+ result_status=event[ResultData.RESULT_STATUS],
245
+ event_value=event_value,
246
+ project_name=self.project,
247
+ result_kind=event[ResultData.RESULT_KIND],
248
+ )
249
+
250
+ if (
251
+ kind == WriterEventKind.RESULT
252
+ and event[WriterEvent.APPLICATION_NAME]
253
+ == HistogramDataDriftApplicationConstants.NAME
254
+ and event[ResultData.RESULT_NAME]
255
+ == HistogramDataDriftApplicationConstants.GENERAL_RESULT_NAME
256
+ ):
257
+ endpoint_id = event[WriterEvent.ENDPOINT_ID]
258
+ logger.info(
259
+ "Updating the model endpoint with metadata specific to the histogram "
260
+ "data drift app",
261
+ endpoint_id=endpoint_id,
262
+ )
263
+ attributes = json.loads(event[ResultData.RESULT_EXTRA_DATA])
264
+ attributes[EventFieldType.DRIFT_STATUS] = str(
265
+ attributes[EventFieldType.DRIFT_STATUS]
266
+ )
267
+ self._app_result_store.update_model_endpoint(
268
+ endpoint_id=endpoint_id,
269
+ attributes=attributes,
270
+ )
271
+
272
+ logger.info("Model monitoring writer finished handling event")
mlrun/package/__init__.py CHANGED
@@ -18,7 +18,7 @@
18
18
  import functools
19
19
  import inspect
20
20
  from collections import OrderedDict
21
- from typing import Callable, Dict, List, Type, Union
21
+ from typing import Callable, Union
22
22
 
23
23
  from ..config import config
24
24
  from .context_handler import ContextHandler
@@ -40,9 +40,9 @@ from .utils import (
40
40
 
41
41
 
42
42
  def handler(
43
- labels: Dict[str, str] = None,
44
- outputs: List[Union[str, Dict[str, str]]] = None,
45
- inputs: Union[bool, Dict[str, Union[str, Type]]] = True,
43
+ labels: dict[str, str] = None,
44
+ outputs: list[Union[str, dict[str, str]]] = None,
45
+ inputs: Union[bool, dict[str, Union[str, type]]] = True,
46
46
  ):
47
47
  """
48
48
  MLRun's handler is a decorator to wrap a function and enable setting labels, parsing inputs (`mlrun.DataItem`) using
@@ -58,7 +58,7 @@ def handler(
58
58
  * `str` - A string in the format of '{key}:{artifact_type}'. If a string was given without ':' it
59
59
  will indicate the key, and the artifact type will be according to the returned value type's
60
60
  default artifact type. The artifact types supported are listed in the relevant type packager.
61
- * `Dict[str, str]` - A dictionary of logging configuration. the key 'key' is mandatory for the
61
+ * `dict[str, str]` - A dictionary of logging configuration. the key 'key' is mandatory for the
62
62
  logged artifact key.
63
63
  * None - Do not log the output.
64
64
 
@@ -73,7 +73,7 @@ def handler(
73
73
  * True - Parse all found inputs to the assigned type hint in the function's signature. If there is no
74
74
  type hint assigned, the value will remain an `mlrun.DataItem`.
75
75
  * False - Do not parse inputs, leaving the inputs as `mlrun.DataItem`.
76
- * Dict[str, Union[Type, str]] - A dictionary with argument name as key and the expected type to parse
76
+ * dict[str, Union[Type, str]] - A dictionary with argument name as key and the expected type to parse
77
77
  the `mlrun.DataItem` to. The expected type can be a string as well, idicating the full module path.
78
78
 
79
79
  Default: True - meaning inputs will be parsed from DataItem's as long as they are type hinted.
@@ -15,7 +15,7 @@
15
15
  import inspect
16
16
  import os
17
17
  from collections import OrderedDict
18
- from typing import Dict, List, Union
18
+ from typing import Union
19
19
 
20
20
  from mlrun.datastore import DataItem
21
21
  from mlrun.errors import MLRunInvalidArgumentError
@@ -181,7 +181,7 @@ class ContextHandler:
181
181
  def log_outputs(
182
182
  self,
183
183
  outputs: list,
184
- log_hints: List[Union[Dict[str, str], str, None]],
184
+ log_hints: list[Union[dict[str, str], str, None]],
185
185
  ):
186
186
  """
187
187
  Log the given outputs as artifacts (or results) with the stored context. Errors raised during the packing will
@@ -229,7 +229,7 @@ class ContextHandler:
229
229
  # Clear packagers outputs:
230
230
  self._packagers_manager.clear_packagers_outputs()
231
231
 
232
- def set_labels(self, labels: Dict[str, str]):
232
+ def set_labels(self, labels: dict[str, str]):
233
233
  """
234
234
  Set the given labels with the stored context.
235
235
 
@@ -239,7 +239,7 @@ class ContextHandler:
239
239
  self._context.set_label(key=key, value=value)
240
240
 
241
241
  def _collect_packagers(
242
- self, packagers: List[str], is_mandatory: bool, is_custom_packagers: bool
242
+ self, packagers: list[str], is_mandatory: bool, is_custom_packagers: bool
243
243
  ):
244
244
  """
245
245
  Collect packagers with the stored manager. The collection can ignore errors raised by setting the mandatory flag
@@ -310,7 +310,7 @@ class ContextHandler:
310
310
  def _validate_objects_to_log_hints_length(
311
311
  self,
312
312
  outputs: list,
313
- log_hints: List[Union[Dict[str, str], str, None]],
313
+ log_hints: list[Union[dict[str, str], str, None]],
314
314
  ):
315
315
  """
316
316
  Validate the outputs and log hints are the same length. If they are not, warnings will be printed on what will
mlrun/package/packager.py CHANGED
@@ -14,7 +14,7 @@
14
14
  #
15
15
  from abc import ABC, abstractmethod
16
16
  from pathlib import Path
17
- from typing import Any, List, Tuple, Type, Union
17
+ from typing import Any, Union
18
18
 
19
19
  from mlrun.artifacts import Artifact
20
20
  from mlrun.datastore import DataItem
@@ -93,7 +93,7 @@ class Packager(ABC):
93
93
  """
94
94
 
95
95
  #: The type of object this packager can pack and unpack.
96
- PACKABLE_OBJECT_TYPE: Type = ...
96
+ PACKABLE_OBJECT_TYPE: type = ...
97
97
 
98
98
  #: The priority of this packager in the packagers collection of the manager (lower is better).
99
99
  PRIORITY: int = ...
@@ -104,7 +104,7 @@ class Packager(ABC):
104
104
  self._priority = Packager.PRIORITY
105
105
 
106
106
  # List of all paths to be deleted by the manager of this packager after logging the packages:
107
- self._future_clearing_path_list: List[str] = []
107
+ self._future_clearing_path_list: list[str] = []
108
108
 
109
109
  @abstractmethod
110
110
  def get_default_packing_artifact_type(self, obj: Any) -> str:
@@ -132,7 +132,7 @@ class Packager(ABC):
132
132
  pass
133
133
 
134
134
  @abstractmethod
135
- def get_supported_artifact_types(self) -> List[str]:
135
+ def get_supported_artifact_types(self) -> list[str]:
136
136
  """
137
137
  Get all the supported artifact types on this packager.
138
138
 
@@ -147,7 +147,7 @@ class Packager(ABC):
147
147
  key: str = None,
148
148
  artifact_type: str = None,
149
149
  configurations: dict = None,
150
- ) -> Union[Tuple[Artifact, dict], dict]:
150
+ ) -> Union[tuple[Artifact, dict], dict]:
151
151
  """
152
152
  Pack an object as the given artifact type using the provided configurations.
153
153
 
@@ -212,7 +212,7 @@ class Packager(ABC):
212
212
  return True
213
213
 
214
214
  def is_unpackable(
215
- self, data_item: DataItem, type_hint: Type, artifact_type: str = None
215
+ self, data_item: DataItem, type_hint: type, artifact_type: str = None
216
216
  ) -> bool:
217
217
  """
218
218
  Check if this packager can unpack an input according to the user-given type hint and the provided artifact type.
@@ -269,7 +269,7 @@ class Packager(ABC):
269
269
  self._priority = priority
270
270
 
271
271
  @property
272
- def future_clearing_path_list(self) -> List[str]:
272
+ def future_clearing_path_list(self) -> list[str]:
273
273
  """
274
274
  Get the packager's future clearing path list.
275
275
 
@@ -15,7 +15,7 @@
15
15
  import inspect
16
16
  from abc import ABCMeta
17
17
  from types import MethodType
18
- from typing import Any, List, Tuple, Type, Union
18
+ from typing import Any, Union
19
19
 
20
20
  import docstring_parser
21
21
 
@@ -34,7 +34,7 @@ class _DefaultPackagerMeta(ABCMeta):
34
34
  dynamically generated docstring that will include a summary of the packager.
35
35
  """
36
36
 
37
- def __new__(mcls, name: str, bases: tuple, namespace: dict, **kwargs):
37
+ def __new__(cls, name: str, bases: tuple, namespace: dict, **kwargs):
38
38
  """
39
39
  Create a new DefaultPackager metaclass that saves the original packager docstring to another attribute named
40
40
  `_packager_doc`.
@@ -48,10 +48,10 @@ class _DefaultPackagerMeta(ABCMeta):
48
48
  namespace["_packager_doc"] = namespace.get("__doc__", "")
49
49
 
50
50
  # Continue creating the metaclass:
51
- return super().__new__(mcls, name, bases, namespace, **kwargs)
51
+ return super().__new__(cls, name, bases, namespace, **kwargs)
52
52
 
53
53
  @property
54
- def __doc__(cls: Type["DefaultPackager"]) -> str:
54
+ def __doc__(cls: type["DefaultPackager"]) -> str:
55
55
  """
56
56
  Override the `__doc__` attribute of a `DefaultPackager` to be a property in order to auto-summarize the
57
57
  packager's class docstring. The summary is concatenated after the original class doc string.
@@ -273,7 +273,7 @@ class DefaultPackager(Packager, metaclass=_DefaultPackagerMeta):
273
273
  """
274
274
 
275
275
  #: The type of object this packager can pack and unpack.
276
- PACKABLE_OBJECT_TYPE: Type = ...
276
+ PACKABLE_OBJECT_TYPE: type = ...
277
277
 
278
278
  #: A flag for indicating whether to also pack all subclasses of the `PACKABLE_OBJECT_TYPE`.
279
279
  PACK_SUBCLASSES = False
@@ -306,7 +306,7 @@ class DefaultPackager(Packager, metaclass=_DefaultPackagerMeta):
306
306
  """
307
307
  return self.DEFAULT_UNPACKING_ARTIFACT_TYPE
308
308
 
309
- def get_supported_artifact_types(self) -> List[str]:
309
+ def get_supported_artifact_types(self) -> list[str]:
310
310
  """
311
311
  Get all the supported artifact types on this packager.
312
312
 
@@ -326,7 +326,7 @@ class DefaultPackager(Packager, metaclass=_DefaultPackagerMeta):
326
326
  key: str = None,
327
327
  artifact_type: str = None,
328
328
  configurations: dict = None,
329
- ) -> Union[Tuple[Artifact, dict], dict]:
329
+ ) -> Union[tuple[Artifact, dict], dict]:
330
330
  """
331
331
  Pack an object as the given artifact type using the provided configurations.
332
332
 
@@ -442,7 +442,7 @@ class DefaultPackager(Packager, metaclass=_DefaultPackagerMeta):
442
442
  obj: Any,
443
443
  key: str,
444
444
  pickle_module_name: str = DEFAULT_PICKLE_MODULE,
445
- ) -> Tuple[Artifact, dict]:
445
+ ) -> tuple[Artifact, dict]:
446
446
  """
447
447
  Pack a python object, pickling it into a pkl file and store it in an artifact.
448
448