mlrun 1.6.4rc2__py3-none-any.whl → 1.7.0rc20__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mlrun might be problematic. Click here for more details.

Files changed (291) hide show
  1. mlrun/__init__.py +11 -1
  2. mlrun/__main__.py +26 -112
  3. mlrun/alerts/__init__.py +15 -0
  4. mlrun/alerts/alert.py +144 -0
  5. mlrun/api/schemas/__init__.py +5 -4
  6. mlrun/artifacts/__init__.py +8 -3
  7. mlrun/artifacts/base.py +46 -257
  8. mlrun/artifacts/dataset.py +11 -192
  9. mlrun/artifacts/manager.py +47 -48
  10. mlrun/artifacts/model.py +31 -159
  11. mlrun/artifacts/plots.py +23 -380
  12. mlrun/common/constants.py +69 -0
  13. mlrun/common/db/sql_session.py +2 -3
  14. mlrun/common/formatters/__init__.py +19 -0
  15. mlrun/common/formatters/artifact.py +21 -0
  16. mlrun/common/formatters/base.py +78 -0
  17. mlrun/common/formatters/function.py +41 -0
  18. mlrun/common/formatters/pipeline.py +53 -0
  19. mlrun/common/formatters/project.py +51 -0
  20. mlrun/common/helpers.py +1 -2
  21. mlrun/common/model_monitoring/helpers.py +9 -5
  22. mlrun/{runtimes → common/runtimes}/constants.py +37 -9
  23. mlrun/common/schemas/__init__.py +24 -4
  24. mlrun/common/schemas/alert.py +203 -0
  25. mlrun/common/schemas/api_gateway.py +148 -0
  26. mlrun/common/schemas/artifact.py +18 -8
  27. mlrun/common/schemas/auth.py +11 -5
  28. mlrun/common/schemas/background_task.py +1 -1
  29. mlrun/common/schemas/client_spec.py +4 -1
  30. mlrun/common/schemas/feature_store.py +16 -16
  31. mlrun/common/schemas/frontend_spec.py +8 -7
  32. mlrun/common/schemas/function.py +5 -1
  33. mlrun/common/schemas/hub.py +11 -18
  34. mlrun/common/schemas/memory_reports.py +2 -2
  35. mlrun/common/schemas/model_monitoring/__init__.py +18 -3
  36. mlrun/common/schemas/model_monitoring/constants.py +83 -26
  37. mlrun/common/schemas/model_monitoring/grafana.py +13 -9
  38. mlrun/common/schemas/model_monitoring/model_endpoints.py +99 -16
  39. mlrun/common/schemas/notification.py +4 -4
  40. mlrun/common/schemas/object.py +2 -2
  41. mlrun/{runtimes/mpijob/v1alpha1.py → common/schemas/pagination.py} +10 -13
  42. mlrun/common/schemas/pipeline.py +1 -10
  43. mlrun/common/schemas/project.py +24 -23
  44. mlrun/common/schemas/runtime_resource.py +8 -12
  45. mlrun/common/schemas/schedule.py +3 -3
  46. mlrun/common/schemas/tag.py +1 -2
  47. mlrun/common/schemas/workflow.py +2 -2
  48. mlrun/common/types.py +7 -1
  49. mlrun/config.py +54 -17
  50. mlrun/data_types/to_pandas.py +10 -12
  51. mlrun/datastore/__init__.py +5 -8
  52. mlrun/datastore/alibaba_oss.py +130 -0
  53. mlrun/datastore/azure_blob.py +17 -5
  54. mlrun/datastore/base.py +62 -39
  55. mlrun/datastore/datastore.py +28 -9
  56. mlrun/datastore/datastore_profile.py +146 -20
  57. mlrun/datastore/filestore.py +0 -1
  58. mlrun/datastore/google_cloud_storage.py +6 -2
  59. mlrun/datastore/hdfs.py +56 -0
  60. mlrun/datastore/inmem.py +2 -2
  61. mlrun/datastore/redis.py +6 -2
  62. mlrun/datastore/s3.py +9 -0
  63. mlrun/datastore/snowflake_utils.py +43 -0
  64. mlrun/datastore/sources.py +201 -96
  65. mlrun/datastore/spark_utils.py +1 -2
  66. mlrun/datastore/store_resources.py +7 -7
  67. mlrun/datastore/targets.py +358 -104
  68. mlrun/datastore/utils.py +72 -58
  69. mlrun/datastore/v3io.py +5 -1
  70. mlrun/db/base.py +185 -35
  71. mlrun/db/factory.py +1 -1
  72. mlrun/db/httpdb.py +614 -179
  73. mlrun/db/nopdb.py +210 -26
  74. mlrun/errors.py +12 -1
  75. mlrun/execution.py +41 -24
  76. mlrun/feature_store/__init__.py +0 -2
  77. mlrun/feature_store/api.py +40 -72
  78. mlrun/feature_store/common.py +1 -1
  79. mlrun/feature_store/feature_set.py +76 -55
  80. mlrun/feature_store/feature_vector.py +28 -30
  81. mlrun/feature_store/ingestion.py +7 -6
  82. mlrun/feature_store/retrieval/base.py +16 -11
  83. mlrun/feature_store/retrieval/conversion.py +11 -13
  84. mlrun/feature_store/retrieval/dask_merger.py +2 -0
  85. mlrun/feature_store/retrieval/job.py +9 -3
  86. mlrun/feature_store/retrieval/local_merger.py +2 -0
  87. mlrun/feature_store/retrieval/spark_merger.py +34 -24
  88. mlrun/feature_store/steps.py +37 -34
  89. mlrun/features.py +9 -20
  90. mlrun/frameworks/_common/artifacts_library.py +9 -9
  91. mlrun/frameworks/_common/mlrun_interface.py +5 -5
  92. mlrun/frameworks/_common/model_handler.py +48 -48
  93. mlrun/frameworks/_common/plan.py +2 -3
  94. mlrun/frameworks/_common/producer.py +3 -4
  95. mlrun/frameworks/_common/utils.py +5 -5
  96. mlrun/frameworks/_dl_common/loggers/logger.py +6 -7
  97. mlrun/frameworks/_dl_common/loggers/mlrun_logger.py +9 -9
  98. mlrun/frameworks/_dl_common/loggers/tensorboard_logger.py +23 -47
  99. mlrun/frameworks/_ml_common/artifacts_library.py +1 -2
  100. mlrun/frameworks/_ml_common/loggers/logger.py +3 -4
  101. mlrun/frameworks/_ml_common/loggers/mlrun_logger.py +4 -5
  102. mlrun/frameworks/_ml_common/model_handler.py +24 -24
  103. mlrun/frameworks/_ml_common/pkl_model_server.py +2 -2
  104. mlrun/frameworks/_ml_common/plan.py +1 -1
  105. mlrun/frameworks/_ml_common/plans/calibration_curve_plan.py +2 -3
  106. mlrun/frameworks/_ml_common/plans/confusion_matrix_plan.py +2 -3
  107. mlrun/frameworks/_ml_common/plans/dataset_plan.py +3 -3
  108. mlrun/frameworks/_ml_common/plans/feature_importance_plan.py +3 -3
  109. mlrun/frameworks/_ml_common/plans/roc_curve_plan.py +4 -4
  110. mlrun/frameworks/_ml_common/utils.py +4 -4
  111. mlrun/frameworks/auto_mlrun/auto_mlrun.py +9 -9
  112. mlrun/frameworks/huggingface/model_server.py +4 -4
  113. mlrun/frameworks/lgbm/__init__.py +33 -33
  114. mlrun/frameworks/lgbm/callbacks/callback.py +2 -4
  115. mlrun/frameworks/lgbm/callbacks/logging_callback.py +4 -5
  116. mlrun/frameworks/lgbm/callbacks/mlrun_logging_callback.py +4 -5
  117. mlrun/frameworks/lgbm/mlrun_interfaces/booster_mlrun_interface.py +1 -3
  118. mlrun/frameworks/lgbm/mlrun_interfaces/mlrun_interface.py +6 -6
  119. mlrun/frameworks/lgbm/model_handler.py +10 -10
  120. mlrun/frameworks/lgbm/model_server.py +6 -6
  121. mlrun/frameworks/lgbm/utils.py +5 -5
  122. mlrun/frameworks/onnx/dataset.py +8 -8
  123. mlrun/frameworks/onnx/mlrun_interface.py +3 -3
  124. mlrun/frameworks/onnx/model_handler.py +6 -6
  125. mlrun/frameworks/onnx/model_server.py +7 -7
  126. mlrun/frameworks/parallel_coordinates.py +4 -3
  127. mlrun/frameworks/pytorch/__init__.py +18 -18
  128. mlrun/frameworks/pytorch/callbacks/callback.py +4 -5
  129. mlrun/frameworks/pytorch/callbacks/logging_callback.py +17 -17
  130. mlrun/frameworks/pytorch/callbacks/mlrun_logging_callback.py +11 -11
  131. mlrun/frameworks/pytorch/callbacks/tensorboard_logging_callback.py +23 -29
  132. mlrun/frameworks/pytorch/callbacks_handler.py +38 -38
  133. mlrun/frameworks/pytorch/mlrun_interface.py +20 -20
  134. mlrun/frameworks/pytorch/model_handler.py +17 -17
  135. mlrun/frameworks/pytorch/model_server.py +7 -7
  136. mlrun/frameworks/sklearn/__init__.py +13 -13
  137. mlrun/frameworks/sklearn/estimator.py +4 -4
  138. mlrun/frameworks/sklearn/metrics_library.py +14 -14
  139. mlrun/frameworks/sklearn/mlrun_interface.py +3 -6
  140. mlrun/frameworks/sklearn/model_handler.py +2 -2
  141. mlrun/frameworks/tf_keras/__init__.py +10 -7
  142. mlrun/frameworks/tf_keras/callbacks/logging_callback.py +15 -15
  143. mlrun/frameworks/tf_keras/callbacks/mlrun_logging_callback.py +11 -11
  144. mlrun/frameworks/tf_keras/callbacks/tensorboard_logging_callback.py +19 -23
  145. mlrun/frameworks/tf_keras/mlrun_interface.py +9 -11
  146. mlrun/frameworks/tf_keras/model_handler.py +14 -14
  147. mlrun/frameworks/tf_keras/model_server.py +6 -6
  148. mlrun/frameworks/xgboost/__init__.py +13 -13
  149. mlrun/frameworks/xgboost/model_handler.py +6 -6
  150. mlrun/k8s_utils.py +14 -16
  151. mlrun/launcher/__init__.py +1 -1
  152. mlrun/launcher/base.py +16 -15
  153. mlrun/launcher/client.py +8 -6
  154. mlrun/launcher/factory.py +1 -1
  155. mlrun/launcher/local.py +17 -11
  156. mlrun/launcher/remote.py +16 -10
  157. mlrun/lists.py +7 -6
  158. mlrun/model.py +238 -73
  159. mlrun/model_monitoring/__init__.py +1 -1
  160. mlrun/model_monitoring/api.py +138 -315
  161. mlrun/model_monitoring/application.py +5 -296
  162. mlrun/model_monitoring/applications/__init__.py +24 -0
  163. mlrun/model_monitoring/applications/_application_steps.py +157 -0
  164. mlrun/model_monitoring/applications/base.py +282 -0
  165. mlrun/model_monitoring/applications/context.py +214 -0
  166. mlrun/model_monitoring/applications/evidently_base.py +211 -0
  167. mlrun/model_monitoring/applications/histogram_data_drift.py +349 -0
  168. mlrun/model_monitoring/applications/results.py +99 -0
  169. mlrun/model_monitoring/controller.py +104 -84
  170. mlrun/model_monitoring/controller_handler.py +13 -5
  171. mlrun/model_monitoring/db/__init__.py +18 -0
  172. mlrun/model_monitoring/{stores → db/stores}/__init__.py +43 -36
  173. mlrun/model_monitoring/db/stores/base/__init__.py +15 -0
  174. mlrun/model_monitoring/{stores/model_endpoint_store.py → db/stores/base/store.py} +64 -40
  175. mlrun/model_monitoring/db/stores/sqldb/__init__.py +13 -0
  176. mlrun/model_monitoring/db/stores/sqldb/models/__init__.py +71 -0
  177. mlrun/model_monitoring/{stores → db/stores/sqldb}/models/base.py +109 -5
  178. mlrun/model_monitoring/db/stores/sqldb/models/mysql.py +88 -0
  179. mlrun/model_monitoring/{stores/models/mysql.py → db/stores/sqldb/models/sqlite.py} +19 -13
  180. mlrun/model_monitoring/db/stores/sqldb/sql_store.py +684 -0
  181. mlrun/model_monitoring/db/stores/v3io_kv/__init__.py +13 -0
  182. mlrun/model_monitoring/{stores/kv_model_endpoint_store.py → db/stores/v3io_kv/kv_store.py} +310 -165
  183. mlrun/model_monitoring/db/tsdb/__init__.py +100 -0
  184. mlrun/model_monitoring/db/tsdb/base.py +329 -0
  185. mlrun/model_monitoring/db/tsdb/helpers.py +30 -0
  186. mlrun/model_monitoring/db/tsdb/tdengine/__init__.py +15 -0
  187. mlrun/model_monitoring/db/tsdb/tdengine/schemas.py +240 -0
  188. mlrun/model_monitoring/db/tsdb/tdengine/stream_graph_steps.py +45 -0
  189. mlrun/model_monitoring/db/tsdb/tdengine/tdengine_connector.py +397 -0
  190. mlrun/model_monitoring/db/tsdb/v3io/__init__.py +15 -0
  191. mlrun/model_monitoring/db/tsdb/v3io/stream_graph_steps.py +117 -0
  192. mlrun/model_monitoring/db/tsdb/v3io/v3io_connector.py +630 -0
  193. mlrun/model_monitoring/evidently_application.py +6 -118
  194. mlrun/model_monitoring/features_drift_table.py +134 -106
  195. mlrun/model_monitoring/helpers.py +127 -28
  196. mlrun/model_monitoring/metrics/__init__.py +13 -0
  197. mlrun/model_monitoring/metrics/histogram_distance.py +127 -0
  198. mlrun/model_monitoring/model_endpoint.py +3 -2
  199. mlrun/model_monitoring/prometheus.py +1 -4
  200. mlrun/model_monitoring/stream_processing.py +62 -231
  201. mlrun/model_monitoring/tracking_policy.py +9 -2
  202. mlrun/model_monitoring/writer.py +152 -124
  203. mlrun/package/__init__.py +6 -6
  204. mlrun/package/context_handler.py +5 -5
  205. mlrun/package/packager.py +7 -7
  206. mlrun/package/packagers/default_packager.py +6 -6
  207. mlrun/package/packagers/numpy_packagers.py +15 -15
  208. mlrun/package/packagers/pandas_packagers.py +5 -5
  209. mlrun/package/packagers/python_standard_library_packagers.py +10 -10
  210. mlrun/package/packagers_manager.py +19 -23
  211. mlrun/package/utils/_formatter.py +6 -6
  212. mlrun/package/utils/_pickler.py +2 -2
  213. mlrun/package/utils/_supported_format.py +4 -4
  214. mlrun/package/utils/log_hint_utils.py +2 -2
  215. mlrun/package/utils/type_hint_utils.py +4 -9
  216. mlrun/platforms/__init__.py +11 -10
  217. mlrun/platforms/iguazio.py +24 -203
  218. mlrun/projects/operations.py +35 -21
  219. mlrun/projects/pipelines.py +68 -99
  220. mlrun/projects/project.py +830 -266
  221. mlrun/render.py +3 -11
  222. mlrun/run.py +162 -166
  223. mlrun/runtimes/__init__.py +62 -7
  224. mlrun/runtimes/base.py +39 -32
  225. mlrun/runtimes/daskjob.py +8 -8
  226. mlrun/runtimes/databricks_job/databricks_cancel_task.py +1 -1
  227. mlrun/runtimes/databricks_job/databricks_runtime.py +7 -7
  228. mlrun/runtimes/databricks_job/databricks_wrapper.py +1 -1
  229. mlrun/runtimes/funcdoc.py +0 -28
  230. mlrun/runtimes/function_reference.py +1 -1
  231. mlrun/runtimes/kubejob.py +28 -122
  232. mlrun/runtimes/local.py +6 -3
  233. mlrun/runtimes/mpijob/__init__.py +0 -20
  234. mlrun/runtimes/mpijob/abstract.py +9 -10
  235. mlrun/runtimes/mpijob/v1.py +1 -1
  236. mlrun/{model_monitoring/stores/models/sqlite.py → runtimes/nuclio/__init__.py} +7 -9
  237. mlrun/runtimes/nuclio/api_gateway.py +709 -0
  238. mlrun/runtimes/nuclio/application/__init__.py +15 -0
  239. mlrun/runtimes/nuclio/application/application.py +523 -0
  240. mlrun/runtimes/nuclio/application/reverse_proxy.go +95 -0
  241. mlrun/runtimes/{function.py → nuclio/function.py} +112 -73
  242. mlrun/runtimes/{nuclio.py → nuclio/nuclio.py} +6 -6
  243. mlrun/runtimes/{serving.py → nuclio/serving.py} +45 -51
  244. mlrun/runtimes/pod.py +286 -88
  245. mlrun/runtimes/remotesparkjob.py +2 -2
  246. mlrun/runtimes/sparkjob/spark3job.py +51 -34
  247. mlrun/runtimes/utils.py +7 -75
  248. mlrun/secrets.py +9 -5
  249. mlrun/serving/remote.py +2 -7
  250. mlrun/serving/routers.py +13 -10
  251. mlrun/serving/server.py +22 -26
  252. mlrun/serving/states.py +99 -25
  253. mlrun/serving/utils.py +3 -3
  254. mlrun/serving/v1_serving.py +6 -7
  255. mlrun/serving/v2_serving.py +59 -20
  256. mlrun/track/tracker.py +2 -1
  257. mlrun/track/tracker_manager.py +3 -3
  258. mlrun/track/trackers/mlflow_tracker.py +1 -2
  259. mlrun/utils/async_http.py +5 -7
  260. mlrun/utils/azure_vault.py +1 -1
  261. mlrun/utils/clones.py +1 -2
  262. mlrun/utils/condition_evaluator.py +3 -3
  263. mlrun/utils/db.py +3 -3
  264. mlrun/utils/helpers.py +183 -197
  265. mlrun/utils/http.py +2 -5
  266. mlrun/utils/logger.py +76 -14
  267. mlrun/utils/notifications/notification/__init__.py +17 -12
  268. mlrun/utils/notifications/notification/base.py +14 -2
  269. mlrun/utils/notifications/notification/console.py +2 -0
  270. mlrun/utils/notifications/notification/git.py +3 -1
  271. mlrun/utils/notifications/notification/ipython.py +3 -1
  272. mlrun/utils/notifications/notification/slack.py +101 -21
  273. mlrun/utils/notifications/notification/webhook.py +11 -1
  274. mlrun/utils/notifications/notification_pusher.py +155 -30
  275. mlrun/utils/retryer.py +208 -0
  276. mlrun/utils/singleton.py +1 -1
  277. mlrun/utils/v3io_clients.py +2 -4
  278. mlrun/utils/version/version.json +2 -2
  279. mlrun/utils/version/version.py +2 -6
  280. {mlrun-1.6.4rc2.dist-info → mlrun-1.7.0rc20.dist-info}/METADATA +31 -19
  281. mlrun-1.7.0rc20.dist-info/RECORD +353 -0
  282. mlrun/kfpops.py +0 -868
  283. mlrun/model_monitoring/batch.py +0 -1095
  284. mlrun/model_monitoring/stores/models/__init__.py +0 -27
  285. mlrun/model_monitoring/stores/sql_model_endpoint_store.py +0 -384
  286. mlrun/platforms/other.py +0 -306
  287. mlrun-1.6.4rc2.dist-info/RECORD +0 -314
  288. {mlrun-1.6.4rc2.dist-info → mlrun-1.7.0rc20.dist-info}/LICENSE +0 -0
  289. {mlrun-1.6.4rc2.dist-info → mlrun-1.7.0rc20.dist-info}/WHEEL +0 -0
  290. {mlrun-1.6.4rc2.dist-info → mlrun-1.7.0rc20.dist-info}/entry_points.txt +0 -0
  291. {mlrun-1.6.4rc2.dist-info → mlrun-1.7.0rc20.dist-info}/top_level.txt +0 -0
@@ -12,29 +12,29 @@
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
14
 
15
- import datetime
16
15
  import json
17
- from http import HTTPStatus
18
16
  from typing import Any, NewType
19
17
 
20
- import pandas as pd
21
- from v3io.dataplane import Client as V3IOClient
22
- from v3io_frames.client import ClientBase as V3IOFramesClient
23
- from v3io_frames.errors import Error as V3IOFramesError
24
- from v3io_frames.frames_pb2 import IGNORE
25
-
26
18
  import mlrun.common.model_monitoring
19
+ import mlrun.common.schemas
20
+ import mlrun.common.schemas.alert as alert_objects
27
21
  import mlrun.model_monitoring
28
- import mlrun.utils.v3io_clients
29
- from mlrun.common.schemas.model_monitoring.constants import ResultStatusApp, WriterEvent
22
+ from mlrun.common.schemas.model_monitoring.constants import (
23
+ EventFieldType,
24
+ HistogramDataDriftApplicationConstants,
25
+ MetricData,
26
+ ResultData,
27
+ ResultKindApp,
28
+ ResultStatusApp,
29
+ WriterEvent,
30
+ WriterEventKind,
31
+ )
30
32
  from mlrun.common.schemas.notification import NotificationKind, NotificationSeverity
33
+ from mlrun.model_monitoring.helpers import get_endpoint_record, get_result_instance_fqn
31
34
  from mlrun.serving.utils import StepToDict
32
35
  from mlrun.utils import logger
33
36
  from mlrun.utils.notifications.notification_pusher import CustomNotificationPusher
34
37
 
35
- _TSDB_BE = "tsdb"
36
- _TSDB_RATE = "1/s"
37
- _TSDB_TABLE = "app-results"
38
38
  _RawEvent = dict[str, Any]
39
39
  _AppResultEvent = NewType("_AppResultEvent", _RawEvent)
40
40
 
@@ -69,20 +69,20 @@ class _Notifier:
69
69
  self._severity = severity
70
70
 
71
71
  def _should_send_event(self) -> bool:
72
- return self._event[WriterEvent.RESULT_STATUS] >= ResultStatusApp.detected
72
+ return self._event[ResultData.RESULT_STATUS] >= ResultStatusApp.detected.value
73
73
 
74
74
  def _generate_message(self) -> str:
75
75
  return f"""\
76
76
  The monitoring app `{self._event[WriterEvent.APPLICATION_NAME]}` \
77
- of kind `{self._event[WriterEvent.RESULT_KIND]}` \
77
+ of kind `{self._event[ResultData.RESULT_KIND]}` \
78
78
  detected a problem in model endpoint ID `{self._event[WriterEvent.ENDPOINT_ID]}` \
79
79
  at time `{self._event[WriterEvent.START_INFER_TIME]}`.
80
80
 
81
81
  Result data:
82
- Name: `{self._event[WriterEvent.RESULT_NAME]}`
83
- Value: `{self._event[WriterEvent.RESULT_VALUE]}`
84
- Status: `{self._event[WriterEvent.RESULT_STATUS]}`
85
- Extra data: `{self._event[WriterEvent.RESULT_EXTRA_DATA]}`\
82
+ Name: `{self._event[ResultData.RESULT_NAME]}`
83
+ Value: `{self._event[ResultData.RESULT_VALUE]}`
84
+ Status: `{self._event[ResultData.RESULT_STATUS]}`
85
+ Extra data: `{self._event[ResultData.RESULT_EXTRA_DATA]}`\
86
86
  """
87
87
 
88
88
  def notify(self) -> None:
@@ -97,140 +97,168 @@ Extra data: `{self._event[WriterEvent.RESULT_EXTRA_DATA]}`\
97
97
 
98
98
  class ModelMonitoringWriter(StepToDict):
99
99
  """
100
- Write monitoring app events to V3IO KV storage
100
+ Write monitoring application results to the target databases
101
101
  """
102
102
 
103
103
  kind = "monitoring_application_stream_pusher"
104
104
 
105
- def __init__(self, project: str) -> None:
105
+ def __init__(self, project: str, tsdb_secret_provider=None) -> None:
106
106
  self.project = project
107
107
  self.name = project # required for the deployment process
108
- self._v3io_container = self.get_v3io_container(self.name)
109
- self._kv_client = self._get_v3io_client().kv
110
- self._tsdb_client = self._get_v3io_frames_client(self._v3io_container)
108
+
111
109
  self._custom_notifier = CustomNotificationPusher(
112
110
  notification_types=[NotificationKind.slack]
113
111
  )
114
- self._create_tsdb_table()
115
- self._kv_schemas = []
116
-
117
- @staticmethod
118
- def get_v3io_container(project_name: str) -> str:
119
- return f"users/pipelines/{project_name}/monitoring-apps"
120
112
 
121
- @staticmethod
122
- def _get_v3io_client() -> V3IOClient:
123
- return mlrun.utils.v3io_clients.get_v3io_client(
124
- endpoint=mlrun.mlconf.v3io_api,
113
+ self._app_result_store = mlrun.model_monitoring.get_store_object(
114
+ project=self.project
125
115
  )
126
-
127
- @staticmethod
128
- def _get_v3io_frames_client(v3io_container: str) -> V3IOFramesClient:
129
- return mlrun.utils.v3io_clients.get_frames_client(
130
- address=mlrun.mlconf.v3io_framesd,
131
- container=v3io_container,
116
+ self._tsdb_connector = mlrun.model_monitoring.get_tsdb_connector(
117
+ project=self.project, secret_provider=tsdb_secret_provider
132
118
  )
119
+ self._endpoints_records = {}
133
120
 
134
- def _create_tsdb_table(self) -> None:
135
- self._tsdb_client.create(
136
- backend=_TSDB_BE,
137
- table=_TSDB_TABLE,
138
- if_exists=IGNORE,
139
- rate=_TSDB_RATE,
121
+ def _generate_event_on_drift(
122
+ self,
123
+ entity_id: str,
124
+ result_status: int,
125
+ event_value: dict,
126
+ project_name: str,
127
+ result_kind: int,
128
+ ) -> None:
129
+ logger.info("Sending an event")
130
+ entity = mlrun.common.schemas.alert.EventEntities(
131
+ kind=alert_objects.EventEntityKind.MODEL_ENDPOINT_RESULT,
132
+ project=project_name,
133
+ ids=[entity_id],
140
134
  )
141
135
 
142
- def _update_kv_db(self, event: _AppResultEvent) -> None:
143
- event = _AppResultEvent(event.copy())
144
- endpoint_id = event.pop(WriterEvent.ENDPOINT_ID)
145
- app_name = event.pop(WriterEvent.APPLICATION_NAME)
146
- metric_name = event.pop(WriterEvent.RESULT_NAME)
147
- attributes = {metric_name: json.dumps(event)}
148
- self._kv_client.update(
149
- container=self._v3io_container,
150
- table_path=endpoint_id,
151
- key=app_name,
152
- attributes=attributes,
136
+ event_kind = self._generate_alert_event_kind(
137
+ result_status=result_status, result_kind=result_kind
153
138
  )
154
- if endpoint_id not in self._kv_schemas:
155
- self._generate_kv_schema(endpoint_id)
156
- logger.info("Updated V3IO KV successfully", key=app_name)
157
-
158
- def _generate_kv_schema(self, endpoint_id: str):
159
- """Generate V3IO KV schema file which will be used by the model monitoring applications dashboard in Grafana."""
160
- fields = [
161
- {"name": WriterEvent.RESULT_NAME, "type": "string", "nullable": False}
162
- ]
163
- res = self._kv_client.create_schema(
164
- container=self._v3io_container,
165
- table_path=endpoint_id,
166
- key=WriterEvent.APPLICATION_NAME,
167
- fields=fields,
139
+
140
+ event_data = mlrun.common.schemas.Event(
141
+ kind=alert_objects.EventKind(value=event_kind),
142
+ entity=entity,
143
+ value_dict=event_value,
168
144
  )
169
- if res.status_code != HTTPStatus.OK.value:
170
- raise mlrun.errors.MLRunBadRequestError(
171
- f"Couldn't infer schema for endpoint {endpoint_id} which is required for Grafana dashboards"
172
- )
145
+ mlrun.get_run_db().generate_event(event_kind, event_data)
146
+
147
+ @staticmethod
148
+ def _generate_alert_event_kind(
149
+ result_kind: int, result_status: int
150
+ ) -> alert_objects.EventKind:
151
+ """Generate the required Event Kind format for the alerting system"""
152
+ if result_kind == ResultKindApp.custom.value:
153
+ # Custom kind is represented as an anomaly detection
154
+ event_kind = "mm_app_anomaly"
173
155
  else:
174
- logger.info(
175
- "Generated V3IO KV schema successfully", endpoint_id=endpoint_id
176
- )
177
- self._kv_schemas.append(endpoint_id)
156
+ event_kind = ResultKindApp(value=result_kind).name
178
157
 
179
- def _update_tsdb(self, event: _AppResultEvent) -> None:
180
- event = _AppResultEvent(event.copy())
181
- event[WriterEvent.END_INFER_TIME] = datetime.datetime.fromisoformat(
182
- event[WriterEvent.END_INFER_TIME]
183
- )
184
- del event[WriterEvent.RESULT_EXTRA_DATA]
185
- try:
186
- self._tsdb_client.write(
187
- backend=_TSDB_BE,
188
- table=_TSDB_TABLE,
189
- dfs=pd.DataFrame.from_records([event]),
190
- index_cols=[
191
- WriterEvent.END_INFER_TIME,
192
- WriterEvent.ENDPOINT_ID,
193
- WriterEvent.APPLICATION_NAME,
194
- WriterEvent.RESULT_NAME,
195
- ],
196
- )
197
- logger.info("Updated V3IO TSDB successfully", table=_TSDB_TABLE)
198
- except V3IOFramesError as err:
199
- logger.warn(
200
- "Could not write drift measures to TSDB",
201
- err=err,
202
- table=_TSDB_TABLE,
203
- event=event,
204
- )
158
+ if result_status == ResultStatusApp.detected.value:
159
+ event_kind = f"{event_kind}_detected"
160
+ else:
161
+ event_kind = f"{event_kind}_suspected"
162
+ return alert_objects.EventKind(value=event_kind)
205
163
 
206
164
  @staticmethod
207
- def _reconstruct_event(event: _RawEvent) -> _AppResultEvent:
165
+ def _reconstruct_event(event: _RawEvent) -> tuple[_AppResultEvent, WriterEventKind]:
208
166
  """
209
167
  Modify the raw event into the expected monitoring application event
210
168
  schema as defined in `mlrun.common.schemas.model_monitoring.constants.WriterEvent`
211
169
  """
212
- try:
213
- result_event = _AppResultEvent(
214
- {key: event[key] for key in WriterEvent.list()}
170
+ if not isinstance(event, dict):
171
+ raise _WriterEventTypeError(
172
+ f"The event is of type: {type(event)}, expected a dictionary"
215
173
  )
216
- result_event[WriterEvent.CURRENT_STATS] = json.loads(
217
- event[WriterEvent.CURRENT_STATS]
174
+ kind = event.pop(WriterEvent.EVENT_KIND, WriterEventKind.RESULT)
175
+ result_event = _AppResultEvent(json.loads(event.pop(WriterEvent.DATA, "{}")))
176
+ if not result_event: # BC for < 1.7.0, can be removed in 1.9.0
177
+ result_event = _AppResultEvent(event)
178
+ else:
179
+ result_event.update(_AppResultEvent(event))
180
+
181
+ expected_keys = list(
182
+ set(WriterEvent.list()).difference(
183
+ [WriterEvent.EVENT_KIND, WriterEvent.DATA]
218
184
  )
219
- return result_event
220
- except KeyError as err:
185
+ )
186
+ if kind == WriterEventKind.METRIC:
187
+ expected_keys.extend(MetricData.list())
188
+ elif kind == WriterEventKind.RESULT:
189
+ expected_keys.extend(ResultData.list())
190
+ else:
221
191
  raise _WriterEventValueError(
222
- "The received event misses some keys compared to the expected "
223
- "monitoring application event schema"
224
- ) from err
225
- except TypeError as err:
226
- raise _WriterEventTypeError(
227
- f"The event is of type: {type(event)}, expected a dictionary"
228
- ) from err
192
+ f"Unknown event kind: {kind}, expected one of: {WriterEventKind.list()}"
193
+ )
194
+ missing_keys = [key for key in expected_keys if key not in result_event]
195
+ if missing_keys:
196
+ raise _WriterEventValueError(
197
+ f"The received event misses some keys compared to the expected "
198
+ f"monitoring application event schema: {missing_keys}"
199
+ )
200
+
201
+ return result_event, kind
229
202
 
230
203
  def do(self, event: _RawEvent) -> None:
231
- event = self._reconstruct_event(event)
204
+ event, kind = self._reconstruct_event(event)
232
205
  logger.info("Starting to write event", event=event)
233
- self._update_tsdb(event)
234
- self._update_kv_db(event)
235
- _Notifier(event=event, notification_pusher=self._custom_notifier).notify()
206
+ self._tsdb_connector.write_application_event(event=event.copy(), kind=kind)
207
+ self._app_result_store.write_application_event(event=event.copy(), kind=kind)
208
+
236
209
  logger.info("Completed event DB writes")
210
+
211
+ if kind == WriterEventKind.RESULT:
212
+ _Notifier(event=event, notification_pusher=self._custom_notifier).notify()
213
+
214
+ if (
215
+ mlrun.mlconf.alerts.mode == mlrun.common.schemas.alert.AlertsModes.enabled
216
+ and kind == WriterEventKind.RESULT
217
+ and (
218
+ event[ResultData.RESULT_STATUS] == ResultStatusApp.detected.value
219
+ or event[ResultData.RESULT_STATUS]
220
+ == ResultStatusApp.potential_detection.value
221
+ )
222
+ ):
223
+ endpoint_id = event[WriterEvent.ENDPOINT_ID]
224
+ endpoint_record = self._endpoints_records.setdefault(
225
+ endpoint_id,
226
+ get_endpoint_record(project=self.project, endpoint_id=endpoint_id),
227
+ )
228
+ event_value = {
229
+ "app_name": event[WriterEvent.APPLICATION_NAME],
230
+ "model": endpoint_record.get(EventFieldType.MODEL),
231
+ "model_endpoint_id": event[WriterEvent.ENDPOINT_ID],
232
+ "result_name": event[ResultData.RESULT_NAME],
233
+ "result_value": event[ResultData.RESULT_VALUE],
234
+ }
235
+ self._generate_event_on_drift(
236
+ entity_id=get_result_instance_fqn(
237
+ event[WriterEvent.ENDPOINT_ID],
238
+ event[WriterEvent.APPLICATION_NAME],
239
+ event[ResultData.RESULT_NAME],
240
+ ),
241
+ result_status=event[ResultData.RESULT_STATUS],
242
+ event_value=event_value,
243
+ project_name=self.project,
244
+ result_kind=event[ResultData.RESULT_KIND],
245
+ )
246
+
247
+ if (
248
+ kind == WriterEventKind.RESULT
249
+ and event[WriterEvent.APPLICATION_NAME]
250
+ == HistogramDataDriftApplicationConstants.NAME
251
+ and event[ResultData.RESULT_NAME]
252
+ == HistogramDataDriftApplicationConstants.GENERAL_RESULT_NAME
253
+ ):
254
+ endpoint_id = event[WriterEvent.ENDPOINT_ID]
255
+ logger.info(
256
+ "Updating the model endpoint with metadata specific to the histogram "
257
+ "data drift app",
258
+ endpoint_id=endpoint_id,
259
+ )
260
+ store = mlrun.model_monitoring.get_store_object(project=self.project)
261
+ store.update_model_endpoint(
262
+ endpoint_id=endpoint_id,
263
+ attributes=json.loads(event[ResultData.RESULT_EXTRA_DATA]),
264
+ )
mlrun/package/__init__.py CHANGED
@@ -18,7 +18,7 @@
18
18
  import functools
19
19
  import inspect
20
20
  from collections import OrderedDict
21
- from typing import Callable, Dict, List, Type, Union
21
+ from typing import Callable, Union
22
22
 
23
23
  from ..config import config
24
24
  from .context_handler import ContextHandler
@@ -40,9 +40,9 @@ from .utils import (
40
40
 
41
41
 
42
42
  def handler(
43
- labels: Dict[str, str] = None,
44
- outputs: List[Union[str, Dict[str, str]]] = None,
45
- inputs: Union[bool, Dict[str, Union[str, Type]]] = True,
43
+ labels: dict[str, str] = None,
44
+ outputs: list[Union[str, dict[str, str]]] = None,
45
+ inputs: Union[bool, dict[str, Union[str, type]]] = True,
46
46
  ):
47
47
  """
48
48
  MLRun's handler is a decorator to wrap a function and enable setting labels, parsing inputs (`mlrun.DataItem`) using
@@ -58,7 +58,7 @@ def handler(
58
58
  * `str` - A string in the format of '{key}:{artifact_type}'. If a string was given without ':' it
59
59
  will indicate the key, and the artifact type will be according to the returned value type's
60
60
  default artifact type. The artifact types supported are listed in the relevant type packager.
61
- * `Dict[str, str]` - A dictionary of logging configuration. the key 'key' is mandatory for the
61
+ * `dict[str, str]` - A dictionary of logging configuration. the key 'key' is mandatory for the
62
62
  logged artifact key.
63
63
  * None - Do not log the output.
64
64
 
@@ -73,7 +73,7 @@ def handler(
73
73
  * True - Parse all found inputs to the assigned type hint in the function's signature. If there is no
74
74
  type hint assigned, the value will remain an `mlrun.DataItem`.
75
75
  * False - Do not parse inputs, leaving the inputs as `mlrun.DataItem`.
76
- * Dict[str, Union[Type, str]] - A dictionary with argument name as key and the expected type to parse
76
+ * dict[str, Union[Type, str]] - A dictionary with argument name as key and the expected type to parse
77
77
  the `mlrun.DataItem` to. The expected type can be a string as well, idicating the full module path.
78
78
 
79
79
  Default: True - meaning inputs will be parsed from DataItem's as long as they are type hinted.
@@ -15,7 +15,7 @@
15
15
  import inspect
16
16
  import os
17
17
  from collections import OrderedDict
18
- from typing import Dict, List, Union
18
+ from typing import Union
19
19
 
20
20
  from mlrun.datastore import DataItem
21
21
  from mlrun.errors import MLRunInvalidArgumentError
@@ -181,7 +181,7 @@ class ContextHandler:
181
181
  def log_outputs(
182
182
  self,
183
183
  outputs: list,
184
- log_hints: List[Union[Dict[str, str], str, None]],
184
+ log_hints: list[Union[dict[str, str], str, None]],
185
185
  ):
186
186
  """
187
187
  Log the given outputs as artifacts (or results) with the stored context. Errors raised during the packing will
@@ -229,7 +229,7 @@ class ContextHandler:
229
229
  # Clear packagers outputs:
230
230
  self._packagers_manager.clear_packagers_outputs()
231
231
 
232
- def set_labels(self, labels: Dict[str, str]):
232
+ def set_labels(self, labels: dict[str, str]):
233
233
  """
234
234
  Set the given labels with the stored context.
235
235
 
@@ -239,7 +239,7 @@ class ContextHandler:
239
239
  self._context.set_label(key=key, value=value)
240
240
 
241
241
  def _collect_packagers(
242
- self, packagers: List[str], is_mandatory: bool, is_custom_packagers: bool
242
+ self, packagers: list[str], is_mandatory: bool, is_custom_packagers: bool
243
243
  ):
244
244
  """
245
245
  Collect packagers with the stored manager. The collection can ignore errors raised by setting the mandatory flag
@@ -310,7 +310,7 @@ class ContextHandler:
310
310
  def _validate_objects_to_log_hints_length(
311
311
  self,
312
312
  outputs: list,
313
- log_hints: List[Union[Dict[str, str], str, None]],
313
+ log_hints: list[Union[dict[str, str], str, None]],
314
314
  ):
315
315
  """
316
316
  Validate the outputs and log hints are the same length. If they are not, warnings will be printed on what will
mlrun/package/packager.py CHANGED
@@ -14,7 +14,7 @@
14
14
  #
15
15
  from abc import ABC, abstractmethod
16
16
  from pathlib import Path
17
- from typing import Any, List, Tuple, Type, Union
17
+ from typing import Any, Union
18
18
 
19
19
  from mlrun.artifacts import Artifact
20
20
  from mlrun.datastore import DataItem
@@ -93,7 +93,7 @@ class Packager(ABC):
93
93
  """
94
94
 
95
95
  #: The type of object this packager can pack and unpack.
96
- PACKABLE_OBJECT_TYPE: Type = ...
96
+ PACKABLE_OBJECT_TYPE: type = ...
97
97
 
98
98
  #: The priority of this packager in the packagers collection of the manager (lower is better).
99
99
  PRIORITY: int = ...
@@ -104,7 +104,7 @@ class Packager(ABC):
104
104
  self._priority = Packager.PRIORITY
105
105
 
106
106
  # List of all paths to be deleted by the manager of this packager after logging the packages:
107
- self._future_clearing_path_list: List[str] = []
107
+ self._future_clearing_path_list: list[str] = []
108
108
 
109
109
  @abstractmethod
110
110
  def get_default_packing_artifact_type(self, obj: Any) -> str:
@@ -132,7 +132,7 @@ class Packager(ABC):
132
132
  pass
133
133
 
134
134
  @abstractmethod
135
- def get_supported_artifact_types(self) -> List[str]:
135
+ def get_supported_artifact_types(self) -> list[str]:
136
136
  """
137
137
  Get all the supported artifact types on this packager.
138
138
 
@@ -147,7 +147,7 @@ class Packager(ABC):
147
147
  key: str = None,
148
148
  artifact_type: str = None,
149
149
  configurations: dict = None,
150
- ) -> Union[Tuple[Artifact, dict], dict]:
150
+ ) -> Union[tuple[Artifact, dict], dict]:
151
151
  """
152
152
  Pack an object as the given artifact type using the provided configurations.
153
153
 
@@ -212,7 +212,7 @@ class Packager(ABC):
212
212
  return True
213
213
 
214
214
  def is_unpackable(
215
- self, data_item: DataItem, type_hint: Type, artifact_type: str = None
215
+ self, data_item: DataItem, type_hint: type, artifact_type: str = None
216
216
  ) -> bool:
217
217
  """
218
218
  Check if this packager can unpack an input according to the user-given type hint and the provided artifact type.
@@ -269,7 +269,7 @@ class Packager(ABC):
269
269
  self._priority = priority
270
270
 
271
271
  @property
272
- def future_clearing_path_list(self) -> List[str]:
272
+ def future_clearing_path_list(self) -> list[str]:
273
273
  """
274
274
  Get the packager's future clearing path list.
275
275
 
@@ -15,7 +15,7 @@
15
15
  import inspect
16
16
  from abc import ABCMeta
17
17
  from types import MethodType
18
- from typing import Any, List, Tuple, Type, Union
18
+ from typing import Any, Union
19
19
 
20
20
  import docstring_parser
21
21
 
@@ -51,7 +51,7 @@ class _DefaultPackagerMeta(ABCMeta):
51
51
  return super().__new__(mcls, name, bases, namespace, **kwargs)
52
52
 
53
53
  @property
54
- def __doc__(cls: Type["DefaultPackager"]) -> str:
54
+ def __doc__(cls: type["DefaultPackager"]) -> str:
55
55
  """
56
56
  Override the `__doc__` attribute of a `DefaultPackager` to be a property in order to auto-summarize the
57
57
  packager's class docstring. The summary is concatenated after the original class doc string.
@@ -273,7 +273,7 @@ class DefaultPackager(Packager, metaclass=_DefaultPackagerMeta):
273
273
  """
274
274
 
275
275
  #: The type of object this packager can pack and unpack.
276
- PACKABLE_OBJECT_TYPE: Type = ...
276
+ PACKABLE_OBJECT_TYPE: type = ...
277
277
 
278
278
  #: A flag for indicating whether to also pack all subclasses of the `PACKABLE_OBJECT_TYPE`.
279
279
  PACK_SUBCLASSES = False
@@ -306,7 +306,7 @@ class DefaultPackager(Packager, metaclass=_DefaultPackagerMeta):
306
306
  """
307
307
  return self.DEFAULT_UNPACKING_ARTIFACT_TYPE
308
308
 
309
- def get_supported_artifact_types(self) -> List[str]:
309
+ def get_supported_artifact_types(self) -> list[str]:
310
310
  """
311
311
  Get all the supported artifact types on this packager.
312
312
 
@@ -326,7 +326,7 @@ class DefaultPackager(Packager, metaclass=_DefaultPackagerMeta):
326
326
  key: str = None,
327
327
  artifact_type: str = None,
328
328
  configurations: dict = None,
329
- ) -> Union[Tuple[Artifact, dict], dict]:
329
+ ) -> Union[tuple[Artifact, dict], dict]:
330
330
  """
331
331
  Pack an object as the given artifact type using the provided configurations.
332
332
 
@@ -442,7 +442,7 @@ class DefaultPackager(Packager, metaclass=_DefaultPackagerMeta):
442
442
  obj: Any,
443
443
  key: str,
444
444
  pickle_module_name: str = DEFAULT_PICKLE_MODULE,
445
- ) -> Tuple[Artifact, dict]:
445
+ ) -> tuple[Artifact, dict]:
446
446
  """
447
447
  Pack a python object, pickling it into a pkl file and store it in an artifact.
448
448
 
@@ -16,7 +16,7 @@ import os
16
16
  import pathlib
17
17
  import tempfile
18
18
  from abc import ABC, abstractmethod
19
- from typing import Any, Dict, List, Tuple, Union
19
+ from typing import Any, Union
20
20
 
21
21
  import numpy as np
22
22
  import pandas as pd
@@ -29,7 +29,7 @@ from ..utils import ArtifactType, SupportedFormat
29
29
  from .default_packager import DefaultPackager
30
30
 
31
31
  # Type for collection of numpy arrays (list / dict of arrays):
32
- NumPyArrayCollectionType = Union[List[np.ndarray], Dict[str, np.ndarray]]
32
+ NumPyArrayCollectionType = Union[list[np.ndarray], dict[str, np.ndarray]]
33
33
 
34
34
 
35
35
  class _Formatter(ABC):
@@ -194,7 +194,7 @@ class _NPZFormatter(_Formatter):
194
194
  save_function(file_path, **obj)
195
195
 
196
196
  @classmethod
197
- def load(cls, file_path: str, **load_kwargs: dict) -> Dict[str, np.ndarray]:
197
+ def load(cls, file_path: str, **load_kwargs: dict) -> dict[str, np.ndarray]:
198
198
  """
199
199
  Load the arrays from the given 'npz' file path.
200
200
 
@@ -226,7 +226,7 @@ class NumPySupportedFormat(SupportedFormat[_Formatter]):
226
226
  }
227
227
 
228
228
  @classmethod
229
- def get_single_array_formats(cls) -> List[str]:
229
+ def get_single_array_formats(cls) -> list[str]:
230
230
  """
231
231
  Get the supported formats for saving one numpy array.
232
232
 
@@ -235,7 +235,7 @@ class NumPySupportedFormat(SupportedFormat[_Formatter]):
235
235
  return [cls.NPY, cls.TXT, cls.GZ, cls.CSV]
236
236
 
237
237
  @classmethod
238
- def get_multi_array_formats(cls) -> List[str]:
238
+ def get_multi_array_formats(cls) -> list[str]:
239
239
  """
240
240
  Get the supported formats for saving a collection (multiple) numpy arrays - e.g. list of arrays or dictionary of
241
241
  arrays.
@@ -310,7 +310,7 @@ class NumPyNDArrayPackager(DefaultPackager):
310
310
  key: str,
311
311
  file_format: str = DEFAULT_NUMPY_ARRAY_FORMAT,
312
312
  **save_kwargs,
313
- ) -> Tuple[Artifact, dict]:
313
+ ) -> tuple[Artifact, dict]:
314
314
  """
315
315
  Pack an array as a file by the given format.
316
316
 
@@ -342,7 +342,7 @@ class NumPyNDArrayPackager(DefaultPackager):
342
342
  obj: np.ndarray,
343
343
  key: str,
344
344
  file_format: str = "",
345
- ) -> Tuple[Artifact, dict]:
345
+ ) -> tuple[Artifact, dict]:
346
346
  """
347
347
  Pack an array as a dataset.
348
348
 
@@ -442,7 +442,7 @@ class _NumPyNDArrayCollectionPackager(DefaultPackager):
442
442
  key: str,
443
443
  file_format: str = DEFAULT_NUMPPY_ARRAY_COLLECTION_FORMAT,
444
444
  **save_kwargs,
445
- ) -> Tuple[Artifact, dict]:
445
+ ) -> tuple[Artifact, dict]:
446
446
  """
447
447
  Pack an array collection as a file by the given format.
448
448
 
@@ -476,7 +476,7 @@ class _NumPyNDArrayCollectionPackager(DefaultPackager):
476
476
  data_item: DataItem,
477
477
  file_format: str = None,
478
478
  allow_pickle: bool = False,
479
- ) -> Dict[str, np.ndarray]:
479
+ ) -> dict[str, np.ndarray]:
480
480
  """
481
481
  Unpack a numppy array collection from file.
482
482
 
@@ -545,7 +545,7 @@ class NumPyNDArrayDictPackager(_NumPyNDArrayCollectionPackager):
545
545
  ``dict[str, numpy.ndarray]`` packager.
546
546
  """
547
547
 
548
- PACKABLE_OBJECT_TYPE = Dict[str, np.ndarray]
548
+ PACKABLE_OBJECT_TYPE = dict[str, np.ndarray]
549
549
 
550
550
  def is_packable(
551
551
  self, obj: Any, artifact_type: str = None, configurations: dict = None
@@ -583,7 +583,7 @@ class NumPyNDArrayDictPackager(_NumPyNDArrayCollectionPackager):
583
583
 
584
584
  return True
585
585
 
586
- def pack_result(self, obj: Dict[str, np.ndarray], key: str) -> dict:
586
+ def pack_result(self, obj: dict[str, np.ndarray], key: str) -> dict:
587
587
  """
588
588
  Pack a dictionary of numpy arrays as a result.
589
589
 
@@ -604,7 +604,7 @@ class NumPyNDArrayDictPackager(_NumPyNDArrayCollectionPackager):
604
604
  data_item: DataItem,
605
605
  file_format: str = None,
606
606
  allow_pickle: bool = False,
607
- ) -> Dict[str, np.ndarray]:
607
+ ) -> dict[str, np.ndarray]:
608
608
  """
609
609
  Unpack a numppy array dictionary from file.
610
610
 
@@ -630,7 +630,7 @@ class NumPyNDArrayListPackager(_NumPyNDArrayCollectionPackager):
630
630
  ``list[numpy.ndarray]`` packager.
631
631
  """
632
632
 
633
- PACKABLE_OBJECT_TYPE = List[np.ndarray]
633
+ PACKABLE_OBJECT_TYPE = list[np.ndarray]
634
634
 
635
635
  def is_packable(
636
636
  self, obj: Any, artifact_type: str = None, configurations: dict = None
@@ -665,7 +665,7 @@ class NumPyNDArrayListPackager(_NumPyNDArrayCollectionPackager):
665
665
 
666
666
  return True
667
667
 
668
- def pack_result(self, obj: List[np.ndarray], key: str) -> dict:
668
+ def pack_result(self, obj: list[np.ndarray], key: str) -> dict:
669
669
  """
670
670
  Pack a list of numpy arrays as a result.
671
671
 
@@ -681,7 +681,7 @@ class NumPyNDArrayListPackager(_NumPyNDArrayCollectionPackager):
681
681
  data_item: DataItem,
682
682
  file_format: str = None,
683
683
  allow_pickle: bool = False,
684
- ) -> List[np.ndarray]:
684
+ ) -> list[np.ndarray]:
685
685
  """
686
686
  Unpack a numppy array list from file.
687
687