mlrun 1.6.4rc7__py3-none-any.whl → 1.7.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mlrun might be problematic. Click here for more details.

Files changed (305) hide show
  1. mlrun/__init__.py +11 -1
  2. mlrun/__main__.py +40 -122
  3. mlrun/alerts/__init__.py +15 -0
  4. mlrun/alerts/alert.py +248 -0
  5. mlrun/api/schemas/__init__.py +5 -4
  6. mlrun/artifacts/__init__.py +8 -3
  7. mlrun/artifacts/base.py +47 -257
  8. mlrun/artifacts/dataset.py +11 -192
  9. mlrun/artifacts/manager.py +79 -47
  10. mlrun/artifacts/model.py +31 -159
  11. mlrun/artifacts/plots.py +23 -380
  12. mlrun/common/constants.py +74 -1
  13. mlrun/common/db/sql_session.py +5 -5
  14. mlrun/common/formatters/__init__.py +21 -0
  15. mlrun/common/formatters/artifact.py +45 -0
  16. mlrun/common/formatters/base.py +113 -0
  17. mlrun/common/formatters/feature_set.py +33 -0
  18. mlrun/common/formatters/function.py +46 -0
  19. mlrun/common/formatters/pipeline.py +53 -0
  20. mlrun/common/formatters/project.py +51 -0
  21. mlrun/common/formatters/run.py +29 -0
  22. mlrun/common/helpers.py +12 -3
  23. mlrun/common/model_monitoring/helpers.py +9 -5
  24. mlrun/{runtimes → common/runtimes}/constants.py +37 -9
  25. mlrun/common/schemas/__init__.py +31 -5
  26. mlrun/common/schemas/alert.py +202 -0
  27. mlrun/common/schemas/api_gateway.py +196 -0
  28. mlrun/common/schemas/artifact.py +25 -4
  29. mlrun/common/schemas/auth.py +16 -5
  30. mlrun/common/schemas/background_task.py +1 -1
  31. mlrun/common/schemas/client_spec.py +4 -2
  32. mlrun/common/schemas/common.py +7 -4
  33. mlrun/common/schemas/constants.py +3 -0
  34. mlrun/common/schemas/feature_store.py +74 -44
  35. mlrun/common/schemas/frontend_spec.py +15 -7
  36. mlrun/common/schemas/function.py +12 -1
  37. mlrun/common/schemas/hub.py +11 -18
  38. mlrun/common/schemas/memory_reports.py +2 -2
  39. mlrun/common/schemas/model_monitoring/__init__.py +20 -4
  40. mlrun/common/schemas/model_monitoring/constants.py +123 -42
  41. mlrun/common/schemas/model_monitoring/grafana.py +13 -9
  42. mlrun/common/schemas/model_monitoring/model_endpoints.py +101 -54
  43. mlrun/common/schemas/notification.py +71 -14
  44. mlrun/common/schemas/object.py +2 -2
  45. mlrun/{model_monitoring/controller_handler.py → common/schemas/pagination.py} +9 -12
  46. mlrun/common/schemas/pipeline.py +8 -1
  47. mlrun/common/schemas/project.py +69 -18
  48. mlrun/common/schemas/runs.py +7 -1
  49. mlrun/common/schemas/runtime_resource.py +8 -12
  50. mlrun/common/schemas/schedule.py +4 -4
  51. mlrun/common/schemas/tag.py +1 -2
  52. mlrun/common/schemas/workflow.py +12 -4
  53. mlrun/common/types.py +14 -1
  54. mlrun/config.py +154 -69
  55. mlrun/data_types/data_types.py +6 -1
  56. mlrun/data_types/spark.py +2 -2
  57. mlrun/data_types/to_pandas.py +67 -37
  58. mlrun/datastore/__init__.py +6 -8
  59. mlrun/datastore/alibaba_oss.py +131 -0
  60. mlrun/datastore/azure_blob.py +143 -42
  61. mlrun/datastore/base.py +102 -58
  62. mlrun/datastore/datastore.py +34 -13
  63. mlrun/datastore/datastore_profile.py +146 -20
  64. mlrun/datastore/dbfs_store.py +3 -7
  65. mlrun/datastore/filestore.py +1 -4
  66. mlrun/datastore/google_cloud_storage.py +97 -33
  67. mlrun/datastore/hdfs.py +56 -0
  68. mlrun/datastore/inmem.py +6 -3
  69. mlrun/datastore/redis.py +7 -2
  70. mlrun/datastore/s3.py +34 -12
  71. mlrun/datastore/snowflake_utils.py +45 -0
  72. mlrun/datastore/sources.py +303 -111
  73. mlrun/datastore/spark_utils.py +31 -2
  74. mlrun/datastore/store_resources.py +9 -7
  75. mlrun/datastore/storeytargets.py +151 -0
  76. mlrun/datastore/targets.py +453 -176
  77. mlrun/datastore/utils.py +72 -58
  78. mlrun/datastore/v3io.py +6 -1
  79. mlrun/db/base.py +274 -41
  80. mlrun/db/factory.py +1 -1
  81. mlrun/db/httpdb.py +893 -225
  82. mlrun/db/nopdb.py +291 -33
  83. mlrun/errors.py +36 -6
  84. mlrun/execution.py +115 -42
  85. mlrun/feature_store/__init__.py +0 -2
  86. mlrun/feature_store/api.py +65 -73
  87. mlrun/feature_store/common.py +7 -12
  88. mlrun/feature_store/feature_set.py +76 -55
  89. mlrun/feature_store/feature_vector.py +39 -31
  90. mlrun/feature_store/ingestion.py +7 -6
  91. mlrun/feature_store/retrieval/base.py +16 -11
  92. mlrun/feature_store/retrieval/dask_merger.py +2 -0
  93. mlrun/feature_store/retrieval/job.py +13 -4
  94. mlrun/feature_store/retrieval/local_merger.py +2 -0
  95. mlrun/feature_store/retrieval/spark_merger.py +24 -32
  96. mlrun/feature_store/steps.py +45 -34
  97. mlrun/features.py +11 -21
  98. mlrun/frameworks/_common/artifacts_library.py +9 -9
  99. mlrun/frameworks/_common/mlrun_interface.py +5 -5
  100. mlrun/frameworks/_common/model_handler.py +48 -48
  101. mlrun/frameworks/_common/plan.py +5 -6
  102. mlrun/frameworks/_common/producer.py +3 -4
  103. mlrun/frameworks/_common/utils.py +5 -5
  104. mlrun/frameworks/_dl_common/loggers/logger.py +6 -7
  105. mlrun/frameworks/_dl_common/loggers/mlrun_logger.py +9 -9
  106. mlrun/frameworks/_dl_common/loggers/tensorboard_logger.py +23 -47
  107. mlrun/frameworks/_ml_common/artifacts_library.py +1 -2
  108. mlrun/frameworks/_ml_common/loggers/logger.py +3 -4
  109. mlrun/frameworks/_ml_common/loggers/mlrun_logger.py +4 -5
  110. mlrun/frameworks/_ml_common/model_handler.py +24 -24
  111. mlrun/frameworks/_ml_common/pkl_model_server.py +2 -2
  112. mlrun/frameworks/_ml_common/plan.py +2 -2
  113. mlrun/frameworks/_ml_common/plans/calibration_curve_plan.py +2 -3
  114. mlrun/frameworks/_ml_common/plans/confusion_matrix_plan.py +2 -3
  115. mlrun/frameworks/_ml_common/plans/dataset_plan.py +3 -3
  116. mlrun/frameworks/_ml_common/plans/feature_importance_plan.py +3 -3
  117. mlrun/frameworks/_ml_common/plans/roc_curve_plan.py +4 -4
  118. mlrun/frameworks/_ml_common/utils.py +4 -4
  119. mlrun/frameworks/auto_mlrun/auto_mlrun.py +9 -9
  120. mlrun/frameworks/huggingface/model_server.py +4 -4
  121. mlrun/frameworks/lgbm/__init__.py +33 -33
  122. mlrun/frameworks/lgbm/callbacks/callback.py +2 -4
  123. mlrun/frameworks/lgbm/callbacks/logging_callback.py +4 -5
  124. mlrun/frameworks/lgbm/callbacks/mlrun_logging_callback.py +4 -5
  125. mlrun/frameworks/lgbm/mlrun_interfaces/booster_mlrun_interface.py +1 -3
  126. mlrun/frameworks/lgbm/mlrun_interfaces/mlrun_interface.py +6 -6
  127. mlrun/frameworks/lgbm/model_handler.py +10 -10
  128. mlrun/frameworks/lgbm/model_server.py +6 -6
  129. mlrun/frameworks/lgbm/utils.py +5 -5
  130. mlrun/frameworks/onnx/dataset.py +8 -8
  131. mlrun/frameworks/onnx/mlrun_interface.py +3 -3
  132. mlrun/frameworks/onnx/model_handler.py +6 -6
  133. mlrun/frameworks/onnx/model_server.py +7 -7
  134. mlrun/frameworks/parallel_coordinates.py +6 -6
  135. mlrun/frameworks/pytorch/__init__.py +18 -18
  136. mlrun/frameworks/pytorch/callbacks/callback.py +4 -5
  137. mlrun/frameworks/pytorch/callbacks/logging_callback.py +17 -17
  138. mlrun/frameworks/pytorch/callbacks/mlrun_logging_callback.py +11 -11
  139. mlrun/frameworks/pytorch/callbacks/tensorboard_logging_callback.py +23 -29
  140. mlrun/frameworks/pytorch/callbacks_handler.py +38 -38
  141. mlrun/frameworks/pytorch/mlrun_interface.py +20 -20
  142. mlrun/frameworks/pytorch/model_handler.py +17 -17
  143. mlrun/frameworks/pytorch/model_server.py +7 -7
  144. mlrun/frameworks/sklearn/__init__.py +13 -13
  145. mlrun/frameworks/sklearn/estimator.py +4 -4
  146. mlrun/frameworks/sklearn/metrics_library.py +14 -14
  147. mlrun/frameworks/sklearn/mlrun_interface.py +16 -9
  148. mlrun/frameworks/sklearn/model_handler.py +2 -2
  149. mlrun/frameworks/tf_keras/__init__.py +10 -7
  150. mlrun/frameworks/tf_keras/callbacks/logging_callback.py +15 -15
  151. mlrun/frameworks/tf_keras/callbacks/mlrun_logging_callback.py +11 -11
  152. mlrun/frameworks/tf_keras/callbacks/tensorboard_logging_callback.py +19 -23
  153. mlrun/frameworks/tf_keras/mlrun_interface.py +9 -11
  154. mlrun/frameworks/tf_keras/model_handler.py +14 -14
  155. mlrun/frameworks/tf_keras/model_server.py +6 -6
  156. mlrun/frameworks/xgboost/__init__.py +13 -13
  157. mlrun/frameworks/xgboost/model_handler.py +6 -6
  158. mlrun/k8s_utils.py +61 -17
  159. mlrun/launcher/__init__.py +1 -1
  160. mlrun/launcher/base.py +16 -15
  161. mlrun/launcher/client.py +13 -11
  162. mlrun/launcher/factory.py +1 -1
  163. mlrun/launcher/local.py +23 -13
  164. mlrun/launcher/remote.py +17 -10
  165. mlrun/lists.py +7 -6
  166. mlrun/model.py +478 -103
  167. mlrun/model_monitoring/__init__.py +1 -1
  168. mlrun/model_monitoring/api.py +163 -371
  169. mlrun/{runtimes/mpijob/v1alpha1.py → model_monitoring/applications/__init__.py} +9 -15
  170. mlrun/model_monitoring/applications/_application_steps.py +188 -0
  171. mlrun/model_monitoring/applications/base.py +108 -0
  172. mlrun/model_monitoring/applications/context.py +341 -0
  173. mlrun/model_monitoring/{evidently_application.py → applications/evidently_base.py} +27 -22
  174. mlrun/model_monitoring/applications/histogram_data_drift.py +354 -0
  175. mlrun/model_monitoring/applications/results.py +99 -0
  176. mlrun/model_monitoring/controller.py +131 -278
  177. mlrun/model_monitoring/db/__init__.py +18 -0
  178. mlrun/model_monitoring/db/stores/__init__.py +136 -0
  179. mlrun/model_monitoring/db/stores/base/__init__.py +15 -0
  180. mlrun/model_monitoring/db/stores/base/store.py +213 -0
  181. mlrun/model_monitoring/db/stores/sqldb/__init__.py +13 -0
  182. mlrun/model_monitoring/db/stores/sqldb/models/__init__.py +71 -0
  183. mlrun/model_monitoring/db/stores/sqldb/models/base.py +190 -0
  184. mlrun/model_monitoring/db/stores/sqldb/models/mysql.py +103 -0
  185. mlrun/model_monitoring/{stores/models/mysql.py → db/stores/sqldb/models/sqlite.py} +19 -13
  186. mlrun/model_monitoring/db/stores/sqldb/sql_store.py +659 -0
  187. mlrun/model_monitoring/db/stores/v3io_kv/__init__.py +13 -0
  188. mlrun/model_monitoring/db/stores/v3io_kv/kv_store.py +726 -0
  189. mlrun/model_monitoring/db/tsdb/__init__.py +105 -0
  190. mlrun/model_monitoring/db/tsdb/base.py +448 -0
  191. mlrun/model_monitoring/db/tsdb/helpers.py +30 -0
  192. mlrun/model_monitoring/db/tsdb/tdengine/__init__.py +15 -0
  193. mlrun/model_monitoring/db/tsdb/tdengine/schemas.py +279 -0
  194. mlrun/model_monitoring/db/tsdb/tdengine/stream_graph_steps.py +42 -0
  195. mlrun/model_monitoring/db/tsdb/tdengine/tdengine_connector.py +507 -0
  196. mlrun/model_monitoring/db/tsdb/v3io/__init__.py +15 -0
  197. mlrun/model_monitoring/db/tsdb/v3io/stream_graph_steps.py +158 -0
  198. mlrun/model_monitoring/db/tsdb/v3io/v3io_connector.py +849 -0
  199. mlrun/model_monitoring/features_drift_table.py +134 -106
  200. mlrun/model_monitoring/helpers.py +199 -55
  201. mlrun/model_monitoring/metrics/__init__.py +13 -0
  202. mlrun/model_monitoring/metrics/histogram_distance.py +127 -0
  203. mlrun/model_monitoring/model_endpoint.py +3 -2
  204. mlrun/model_monitoring/stream_processing.py +131 -398
  205. mlrun/model_monitoring/tracking_policy.py +9 -2
  206. mlrun/model_monitoring/writer.py +161 -125
  207. mlrun/package/__init__.py +6 -6
  208. mlrun/package/context_handler.py +5 -5
  209. mlrun/package/packager.py +7 -7
  210. mlrun/package/packagers/default_packager.py +8 -8
  211. mlrun/package/packagers/numpy_packagers.py +15 -15
  212. mlrun/package/packagers/pandas_packagers.py +5 -5
  213. mlrun/package/packagers/python_standard_library_packagers.py +10 -10
  214. mlrun/package/packagers_manager.py +19 -23
  215. mlrun/package/utils/_formatter.py +6 -6
  216. mlrun/package/utils/_pickler.py +2 -2
  217. mlrun/package/utils/_supported_format.py +4 -4
  218. mlrun/package/utils/log_hint_utils.py +2 -2
  219. mlrun/package/utils/type_hint_utils.py +4 -9
  220. mlrun/platforms/__init__.py +11 -10
  221. mlrun/platforms/iguazio.py +24 -203
  222. mlrun/projects/operations.py +52 -25
  223. mlrun/projects/pipelines.py +191 -197
  224. mlrun/projects/project.py +1227 -400
  225. mlrun/render.py +16 -19
  226. mlrun/run.py +209 -184
  227. mlrun/runtimes/__init__.py +83 -15
  228. mlrun/runtimes/base.py +51 -35
  229. mlrun/runtimes/daskjob.py +17 -10
  230. mlrun/runtimes/databricks_job/databricks_cancel_task.py +1 -1
  231. mlrun/runtimes/databricks_job/databricks_runtime.py +8 -7
  232. mlrun/runtimes/databricks_job/databricks_wrapper.py +1 -1
  233. mlrun/runtimes/funcdoc.py +1 -29
  234. mlrun/runtimes/function_reference.py +1 -1
  235. mlrun/runtimes/kubejob.py +34 -128
  236. mlrun/runtimes/local.py +40 -11
  237. mlrun/runtimes/mpijob/__init__.py +0 -20
  238. mlrun/runtimes/mpijob/abstract.py +9 -10
  239. mlrun/runtimes/mpijob/v1.py +1 -1
  240. mlrun/{model_monitoring/stores/models/sqlite.py → runtimes/nuclio/__init__.py} +7 -9
  241. mlrun/runtimes/nuclio/api_gateway.py +769 -0
  242. mlrun/runtimes/nuclio/application/__init__.py +15 -0
  243. mlrun/runtimes/nuclio/application/application.py +758 -0
  244. mlrun/runtimes/nuclio/application/reverse_proxy.go +95 -0
  245. mlrun/runtimes/{function.py → nuclio/function.py} +200 -83
  246. mlrun/runtimes/{nuclio.py → nuclio/nuclio.py} +6 -6
  247. mlrun/runtimes/{serving.py → nuclio/serving.py} +65 -68
  248. mlrun/runtimes/pod.py +281 -101
  249. mlrun/runtimes/remotesparkjob.py +12 -9
  250. mlrun/runtimes/sparkjob/spark3job.py +67 -51
  251. mlrun/runtimes/utils.py +41 -75
  252. mlrun/secrets.py +9 -5
  253. mlrun/serving/__init__.py +8 -1
  254. mlrun/serving/remote.py +2 -7
  255. mlrun/serving/routers.py +85 -69
  256. mlrun/serving/server.py +69 -44
  257. mlrun/serving/states.py +209 -36
  258. mlrun/serving/utils.py +22 -14
  259. mlrun/serving/v1_serving.py +6 -7
  260. mlrun/serving/v2_serving.py +129 -54
  261. mlrun/track/tracker.py +2 -1
  262. mlrun/track/tracker_manager.py +3 -3
  263. mlrun/track/trackers/mlflow_tracker.py +6 -2
  264. mlrun/utils/async_http.py +6 -8
  265. mlrun/utils/azure_vault.py +1 -1
  266. mlrun/utils/clones.py +1 -2
  267. mlrun/utils/condition_evaluator.py +3 -3
  268. mlrun/utils/db.py +21 -3
  269. mlrun/utils/helpers.py +405 -225
  270. mlrun/utils/http.py +3 -6
  271. mlrun/utils/logger.py +112 -16
  272. mlrun/utils/notifications/notification/__init__.py +17 -13
  273. mlrun/utils/notifications/notification/base.py +50 -2
  274. mlrun/utils/notifications/notification/console.py +2 -0
  275. mlrun/utils/notifications/notification/git.py +24 -1
  276. mlrun/utils/notifications/notification/ipython.py +3 -1
  277. mlrun/utils/notifications/notification/slack.py +96 -21
  278. mlrun/utils/notifications/notification/webhook.py +59 -2
  279. mlrun/utils/notifications/notification_pusher.py +149 -30
  280. mlrun/utils/regex.py +9 -0
  281. mlrun/utils/retryer.py +208 -0
  282. mlrun/utils/singleton.py +1 -1
  283. mlrun/utils/v3io_clients.py +4 -6
  284. mlrun/utils/version/version.json +2 -2
  285. mlrun/utils/version/version.py +2 -6
  286. mlrun-1.7.0.dist-info/METADATA +378 -0
  287. mlrun-1.7.0.dist-info/RECORD +351 -0
  288. {mlrun-1.6.4rc7.dist-info → mlrun-1.7.0.dist-info}/WHEEL +1 -1
  289. mlrun/feature_store/retrieval/conversion.py +0 -273
  290. mlrun/kfpops.py +0 -868
  291. mlrun/model_monitoring/application.py +0 -310
  292. mlrun/model_monitoring/batch.py +0 -1095
  293. mlrun/model_monitoring/prometheus.py +0 -219
  294. mlrun/model_monitoring/stores/__init__.py +0 -111
  295. mlrun/model_monitoring/stores/kv_model_endpoint_store.py +0 -576
  296. mlrun/model_monitoring/stores/model_endpoint_store.py +0 -147
  297. mlrun/model_monitoring/stores/models/__init__.py +0 -27
  298. mlrun/model_monitoring/stores/models/base.py +0 -84
  299. mlrun/model_monitoring/stores/sql_model_endpoint_store.py +0 -384
  300. mlrun/platforms/other.py +0 -306
  301. mlrun-1.6.4rc7.dist-info/METADATA +0 -272
  302. mlrun-1.6.4rc7.dist-info/RECORD +0 -314
  303. {mlrun-1.6.4rc7.dist-info → mlrun-1.7.0.dist-info}/LICENSE +0 -0
  304. {mlrun-1.6.4rc7.dist-info → mlrun-1.7.0.dist-info}/entry_points.txt +0 -0
  305. {mlrun-1.6.4rc7.dist-info → mlrun-1.7.0.dist-info}/top_level.txt +0 -0
@@ -15,7 +15,7 @@
15
15
  import os
16
16
  from abc import abstractmethod
17
17
  from datetime import datetime
18
- from typing import Any, Callable, Dict, Generic, List, Union
18
+ from typing import Any, Callable, Generic, Union
19
19
 
20
20
  import yaml
21
21
 
@@ -60,7 +60,7 @@ class TensorboardLogger(Logger, Generic[DLTypes.WeightType]):
60
60
 
61
61
  def __init__(
62
62
  self,
63
- statistics_functions: List[
63
+ statistics_functions: list[
64
64
  Callable[[DLTypes.WeightType], Union[float, DLTypes.WeightType]]
65
65
  ],
66
66
  context: mlrun.MLClientCtx = None,
@@ -94,7 +94,7 @@ class TensorboardLogger(Logger, Generic[DLTypes.WeightType]):
94
94
  :raise MLRunInvalidArgumentError: If the `update_frequency` is illegal or if `tensorboard_directory` and
95
95
  `context` were not given.
96
96
  """
97
- super(TensorboardLogger, self).__init__(context=context)
97
+ super().__init__(context=context)
98
98
 
99
99
  # Validate the context and tensorboard directory combination:
100
100
  if tensorboard_directory is None and context is None:
@@ -136,7 +136,7 @@ class TensorboardLogger(Logger, Generic[DLTypes.WeightType]):
136
136
  self._weights_statistics[statistic_function.__name__] = {} # type: Dict[str, List[float]]
137
137
 
138
138
  @property
139
- def weights(self) -> Dict[str, DLTypes.WeightType]:
139
+ def weights(self) -> dict[str, DLTypes.WeightType]:
140
140
  """
141
141
  Get the logged weights dictionary. Each of the logged weight will be found by its name.
142
142
 
@@ -145,7 +145,7 @@ class TensorboardLogger(Logger, Generic[DLTypes.WeightType]):
145
145
  return self._weights
146
146
 
147
147
  @property
148
- def weight_statistics(self) -> Dict[str, Dict[str, List[float]]]:
148
+ def weight_statistics(self) -> dict[str, dict[str, list[float]]]:
149
149
  """
150
150
  Get the logged statistics for all the tracked weights. Each statistic has a dictionary of weights and their list
151
151
  of epochs values.
@@ -514,23 +514,15 @@ class TensorboardLogger(Logger, Generic[DLTypes.WeightType]):
514
514
  + list(self._dynamic_hyperparameters.keys()),
515
515
  ],
516
516
  ):
517
- text += "\n * **{}**: {}".format(
518
- property_name.capitalize(),
519
- self._markdown_print(value=property_value, tabs=2),
520
- )
517
+ text += f"\n * **{property_name.capitalize()}**: {self._markdown_print(value=property_value, tabs=2)}"
521
518
 
522
519
  # Add the context state:
523
520
  if self._context is not None:
524
- text += "\n####Context initial state: ({})".format(
525
- self._generate_context_link(context=self._context)
526
- )
521
+ text += f"\n####Context initial state: ({self._generate_context_link(context=self._context)})"
527
522
  for property_name, property_value in self._extract_properties_from_context(
528
523
  context=self._context
529
524
  ).items():
530
- text += "\n * **{}**: {}".format(
531
- property_name.capitalize(),
532
- self._markdown_print(value=property_value, tabs=2),
533
- )
525
+ text += f"\n * **{property_name.capitalize()}**: {self._markdown_print(value=property_value, tabs=2)}"
534
526
 
535
527
  return text
536
528
 
@@ -541,7 +533,7 @@ class TensorboardLogger(Logger, Generic[DLTypes.WeightType]):
541
533
 
542
534
  :return: The generated text.
543
535
  """
544
- text = "####Epoch {} summary:".format(self._epochs)
536
+ text = f"####Epoch {self._epochs} summary:"
545
537
  if self._context is not None:
546
538
  for property_name, property_value in self._extract_properties_from_context(
547
539
  context=self._context
@@ -555,16 +547,13 @@ class TensorboardLogger(Logger, Generic[DLTypes.WeightType]):
555
547
  "inputs",
556
548
  "parameters",
557
549
  ]:
558
- text += "\n * **{}**: {}".format(
559
- property_name.capitalize(),
560
- self._markdown_print(value=property_value, tabs=2),
550
+ text += (
551
+ f"\n * **{property_name.capitalize()}**: "
552
+ f"{self._markdown_print(value=property_value, tabs=2)}"
561
553
  )
562
554
  else:
563
555
  for property_name, property_value in self._extract_epoch_results().items():
564
- text += "\n * **{}**: {}".format(
565
- property_name.capitalize(),
566
- self._markdown_print(value=property_value, tabs=2),
567
- )
556
+ text += f"\n * **{property_name.capitalize()}**: {self._markdown_print(value=property_value, tabs=2)}"
568
557
  return text
569
558
 
570
559
  def _generate_run_end_text(self) -> str:
@@ -575,30 +564,22 @@ class TensorboardLogger(Logger, Generic[DLTypes.WeightType]):
575
564
  :return: The generated text.
576
565
  """
577
566
  # Write the run summary:
578
- text = "\n####Run final summary - epoch {}:".format(self._epochs)
567
+ text = f"\n####Run final summary - epoch {self._epochs}:"
579
568
  for property_name, property_value in self._extract_epoch_results().items():
580
- text += "\n * **{}**: {}".format(
581
- property_name.capitalize(),
582
- self._markdown_print(value=property_value, tabs=2),
583
- )
569
+ text += f"\n * **{property_name.capitalize()}**: {self._markdown_print(value=property_value, tabs=2)}"
584
570
 
585
571
  # Add the context final state:
586
572
  if self._context is not None:
587
- text += "\n####Context final state: ({})".format(
588
- self._generate_context_link(context=self._context)
589
- )
573
+ text += f"\n####Context final state: ({self._generate_context_link(context=self._context)})"
590
574
  for property_name, property_value in self._extract_properties_from_context(
591
575
  context=self._context
592
576
  ).items():
593
- text += "\n * **{}**: {}".format(
594
- property_name.capitalize(),
595
- self._markdown_print(value=property_value, tabs=2),
596
- )
577
+ text += f"\n * **{property_name.capitalize()}**: {self._markdown_print(value=property_value, tabs=2)}"
597
578
  return text
598
579
 
599
580
  def _extract_epoch_results(
600
581
  self, epoch: int = -1
601
- ) -> Dict[str, Dict[str, DLTypes.TrackableType]]:
582
+ ) -> dict[str, dict[str, DLTypes.TrackableType]]:
602
583
  """
603
584
  Extract the given epoch results from all the collected values and results.
604
585
 
@@ -633,17 +614,12 @@ class TensorboardLogger(Logger, Generic[DLTypes.WeightType]):
633
614
  :return: The generated link.
634
615
  """
635
616
  return (
636
- '<a href="{}/{}/{}/jobs/monitor/{}/overview" target="_blank">{}</a>'.format(
637
- config.resolve_ui_url(),
638
- config.ui.projects_prefix,
639
- context.project,
640
- context.uid,
641
- link_text,
642
- )
617
+ f'<a href="{config.resolve_ui_url()}/{config.ui.projects_prefix}/{context.project}'
618
+ f'/jobs/monitor/{context.uid}/overview" target="_blank">{link_text}</a>'
643
619
  )
644
620
 
645
621
  @staticmethod
646
- def _extract_properties_from_context(context: mlrun.MLClientCtx) -> Dict[str, Any]:
622
+ def _extract_properties_from_context(context: mlrun.MLClientCtx) -> dict[str, Any]:
647
623
  """
648
624
  Extract the properties of the run this context belongs to.
649
625
 
@@ -672,13 +648,13 @@ class TensorboardLogger(Logger, Generic[DLTypes.WeightType]):
672
648
  if isinstance(value, list):
673
649
  if len(value) == 0:
674
650
  return ""
675
- text = "\n" + yaml.dump(value)
651
+ text = "\n" + yaml.safe_dump(value)
676
652
  text = " \n".join([" " * tabs + line for line in text.splitlines()])
677
653
  return text
678
654
  if isinstance(value, dict):
679
655
  if len(value) == 0:
680
656
  return ""
681
- text = yaml.dump(value)
657
+ text = yaml.safe_dump(value)
682
658
  text = " \n".join(
683
659
  [" " * tabs + "- " + line for line in text.splitlines()]
684
660
  )
@@ -13,7 +13,6 @@
13
13
  # limitations under the License.
14
14
  #
15
15
  from abc import ABC
16
- from typing import List
17
16
 
18
17
  from .._common.artifacts_library import ArtifactsLibrary, Plan
19
18
  from .plans import (
@@ -47,7 +46,7 @@ class MLArtifactsLibrary(ArtifactsLibrary, ABC):
47
46
  @classmethod
48
47
  def default(
49
48
  cls, model: MLTypes.ModelType, y: MLTypes.DatasetType = None, *args, **kwargs
50
- ) -> List[Plan]:
49
+ ) -> list[Plan]:
51
50
  """
52
51
  Get the default artifacts plans list of this framework's library.
53
52
 
@@ -12,7 +12,6 @@
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
14
  #
15
- from typing import Dict, List
16
15
 
17
16
  from ..utils import MLTypes
18
17
 
@@ -42,7 +41,7 @@ class Logger:
42
41
  self._iterations = 0
43
42
 
44
43
  @property
45
- def results(self) -> Dict[str, Dict[str, List[float]]]:
44
+ def results(self) -> dict[str, dict[str, list[float]]]:
46
45
  """
47
46
  Get the results logged. The results will be stored in a dictionary where each key is the validation set name
48
47
  and the value is a dictionary of metrics to their list of iterations values.
@@ -52,7 +51,7 @@ class Logger:
52
51
  return self._results
53
52
 
54
53
  @property
55
- def static_hyperparameters(self) -> Dict[str, MLTypes.TrackableType]:
54
+ def static_hyperparameters(self) -> dict[str, MLTypes.TrackableType]:
56
55
  """
57
56
  Get the static hyperparameters logged. The hyperparameters will be stored in a dictionary where each key is the
58
57
  hyperparameter name and the value is his logged value.
@@ -62,7 +61,7 @@ class Logger:
62
61
  return self._static_hyperparameters
63
62
 
64
63
  @property
65
- def dynamic_hyperparameters(self) -> Dict[str, List[MLTypes.TrackableType]]:
64
+ def dynamic_hyperparameters(self) -> dict[str, list[MLTypes.TrackableType]]:
66
65
  """
67
66
  Get the dynamic hyperparameters logged. The hyperparameters will be stored in a dictionary where each key is the
68
67
  hyperparameter name and the value is a list of his logged values per epoch.
@@ -13,7 +13,6 @@
13
13
  # limitations under the License.
14
14
  #
15
15
  import re
16
- from typing import Dict, List
17
16
 
18
17
  import numpy as np
19
18
  import plotly.graph_objects as go
@@ -39,7 +38,7 @@ class MLRunLogger(Logger):
39
38
 
40
39
  :param context: MLRun context to log to. The context parameters can be logged as static hyperparameters.
41
40
  """
42
- super(MLRunLogger, self).__init__()
41
+ super().__init__()
43
42
 
44
43
  # An MLRun context to log to:
45
44
  self._context = context
@@ -47,7 +46,7 @@ class MLRunLogger(Logger):
47
46
  # Prepare the artifacts dictionary:
48
47
  self._artifacts = {} # type: Dict[str, Artifact]
49
48
 
50
- def get_artifacts(self) -> Dict[str, Artifact]:
49
+ def get_artifacts(self) -> dict[str, Artifact]:
51
50
  """
52
51
  Get the artifacts created by this logger.
53
52
 
@@ -55,7 +54,7 @@ class MLRunLogger(Logger):
55
54
  """
56
55
  return self._artifacts
57
56
 
58
- def get_metrics(self) -> Dict[str, float]:
57
+ def get_metrics(self) -> dict[str, float]:
59
58
  """
60
59
  Generate a metrics summary to log along the model.
61
60
 
@@ -144,7 +143,7 @@ class MLRunLogger(Logger):
144
143
 
145
144
  @staticmethod
146
145
  def _produce_convergence_plot_artifact(
147
- name: str, values: List[float]
146
+ name: str, values: list[float]
148
147
  ) -> PlotlyArtifact:
149
148
  """
150
149
  Produce the convergences for the provided metric according.
@@ -13,7 +13,7 @@
13
13
  # limitations under the License.
14
14
  #
15
15
  from abc import ABC
16
- from typing import Dict, List, Union
16
+ from typing import Union
17
17
 
18
18
  import mlrun
19
19
  from mlrun.artifacts import Artifact
@@ -35,10 +35,10 @@ class MLModelHandler(ModelHandler, ABC):
35
35
  model_path: MLTypes.PathType = None,
36
36
  model_name: str = None,
37
37
  modules_map: Union[
38
- Dict[str, Union[None, str, List[str]]], MLTypes.PathType
38
+ dict[str, Union[None, str, list[str]]], MLTypes.PathType
39
39
  ] = None,
40
40
  custom_objects_map: Union[
41
- Dict[str, Union[str, List[str]]], MLTypes.PathType
41
+ dict[str, Union[str, list[str]]], MLTypes.PathType
42
42
  ] = None,
43
43
  custom_objects_directory: MLTypes.PathType = None,
44
44
  context: mlrun.MLClientCtx = None,
@@ -105,7 +105,7 @@ class MLModelHandler(ModelHandler, ABC):
105
105
  self._feature_weights = None # type: List[float]
106
106
 
107
107
  # Continue the initialization:
108
- super(MLModelHandler, self).__init__(
108
+ super().__init__(
109
109
  model=model,
110
110
  model_path=model_path,
111
111
  model_name=model_name,
@@ -153,7 +153,7 @@ class MLModelHandler(ModelHandler, ABC):
153
153
  return self._feature_vector
154
154
 
155
155
  @property
156
- def feature_weights(self) -> List[float]:
156
+ def feature_weights(self) -> list[float]:
157
157
  """
158
158
  Get the feature weights set in this handler.
159
159
 
@@ -213,7 +213,7 @@ class MLModelHandler(ModelHandler, ABC):
213
213
  """
214
214
  self._feature_vector = feature_vector
215
215
 
216
- def set_feature_weights(self, feature_weights: List[float]):
216
+ def set_feature_weights(self, feature_weights: list[float]):
217
217
  """
218
218
  Set the feature weights this model will be logged with.
219
219
 
@@ -224,18 +224,18 @@ class MLModelHandler(ModelHandler, ABC):
224
224
  def log(
225
225
  self,
226
226
  tag: str = "",
227
- labels: Dict[str, Union[str, int, float]] = None,
228
- parameters: Dict[str, Union[str, int, float]] = None,
229
- inputs: List[Feature] = None,
230
- outputs: List[Feature] = None,
231
- metrics: Dict[str, Union[int, float]] = None,
232
- artifacts: Dict[str, Artifact] = None,
233
- extra_data: Dict[str, MLTypes.ExtraDataType] = None,
227
+ labels: dict[str, Union[str, int, float]] = None,
228
+ parameters: dict[str, Union[str, int, float]] = None,
229
+ inputs: list[Feature] = None,
230
+ outputs: list[Feature] = None,
231
+ metrics: dict[str, Union[int, float]] = None,
232
+ artifacts: dict[str, Artifact] = None,
233
+ extra_data: dict[str, MLTypes.ExtraDataType] = None,
234
234
  algorithm: str = None,
235
235
  sample_set: MLTypes.DatasetType = None,
236
236
  target_columns: MLTypes.TargetColumnsNamesType = None,
237
237
  feature_vector: str = None,
238
- feature_weights: List[float] = None,
238
+ feature_weights: list[float] = None,
239
239
  ):
240
240
  """
241
241
  Log the model held by this handler into the MLRun context provided.
@@ -281,7 +281,7 @@ class MLModelHandler(ModelHandler, ABC):
281
281
  self.set_feature_weights(feature_weights=feature_weights)
282
282
 
283
283
  # Continue with the handler logging:
284
- super(MLModelHandler, self).log(
284
+ super().log(
285
285
  tag=tag,
286
286
  labels=labels,
287
287
  parameters=parameters,
@@ -299,15 +299,15 @@ class MLModelHandler(ModelHandler, ABC):
299
299
 
300
300
  def update(
301
301
  self,
302
- labels: Dict[str, Union[str, int, float]] = None,
303
- parameters: Dict[str, Union[str, int, float]] = None,
304
- inputs: List[Feature] = None,
305
- outputs: List[Feature] = None,
306
- metrics: Dict[str, Union[int, float]] = None,
307
- artifacts: Dict[str, Artifact] = None,
308
- extra_data: Dict[str, MLTypes.ExtraDataType] = None,
302
+ labels: dict[str, Union[str, int, float]] = None,
303
+ parameters: dict[str, Union[str, int, float]] = None,
304
+ inputs: list[Feature] = None,
305
+ outputs: list[Feature] = None,
306
+ metrics: dict[str, Union[int, float]] = None,
307
+ artifacts: dict[str, Artifact] = None,
308
+ extra_data: dict[str, MLTypes.ExtraDataType] = None,
309
309
  feature_vector: str = None,
310
- feature_weights: List[float] = None,
310
+ feature_weights: list[float] = None,
311
311
  ):
312
312
  """
313
313
  Update the model held by this handler into the MLRun context provided, updating the model's artifact properties
@@ -336,7 +336,7 @@ class MLModelHandler(ModelHandler, ABC):
336
336
  self._feature_weights = feature_weights
337
337
 
338
338
  # Continue with the handler update:
339
- super(MLModelHandler, self).update(
339
+ super().update(
340
340
  labels=labels,
341
341
  parameters=parameters,
342
342
  inputs=inputs,
@@ -12,7 +12,7 @@
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
14
  #
15
- from typing import Any, Dict
15
+ from typing import Any
16
16
 
17
17
  import numpy as np
18
18
  import pandas as pd
@@ -59,7 +59,7 @@ class PickleModelServer(V2ModelServer):
59
59
 
60
60
  return y_pred.tolist()
61
61
 
62
- def explain(self, request: Dict[str, Any]) -> str:
62
+ def explain(self, request: dict[str, Any]) -> str:
63
63
  """
64
64
  Returns a string listing the model that is being served in this serving function and the function name.
65
65
 
@@ -16,7 +16,7 @@ import json
16
16
  from abc import ABC, abstractmethod
17
17
  from enum import Enum
18
18
 
19
- from IPython.core.display import HTML, display
19
+ from IPython.display import HTML, display
20
20
 
21
21
  import mlrun
22
22
 
@@ -57,7 +57,7 @@ class MLPlan(Plan, ABC):
57
57
  False.
58
58
  """
59
59
  self._need_probabilities = need_probabilities
60
- super(MLPlan, self).__init__()
60
+ super().__init__()
61
61
 
62
62
  @property
63
63
  def need_probabilities(self) -> bool:
@@ -12,7 +12,6 @@
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
14
  #
15
- from typing import Dict
16
15
 
17
16
  import plotly.graph_objects as go
18
17
  from sklearn.calibration import calibration_curve
@@ -51,7 +50,7 @@ class CalibrationCurvePlan(MLPlotPlan):
51
50
  self._strategy = strategy
52
51
 
53
52
  # Continue the initialization for the MLPlan:
54
- super(CalibrationCurvePlan, self).__init__(need_probabilities=True)
53
+ super().__init__(need_probabilities=True)
55
54
 
56
55
  def is_ready(self, stage: MLPlanStages, is_probabilities: bool) -> bool:
57
56
  """
@@ -73,7 +72,7 @@ class CalibrationCurvePlan(MLPlotPlan):
73
72
  model: MLTypes.ModelType = None,
74
73
  x: MLTypes.DatasetType = None,
75
74
  **kwargs,
76
- ) -> Dict[str, Artifact]:
75
+ ) -> dict[str, Artifact]:
77
76
  """
78
77
  Produce the calibration curve according to the ground truth (y) and predictions (y_pred) values. If predictions
79
78
  are not available, the model and a dataset can be given to produce them.
@@ -12,7 +12,6 @@
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
14
  #
15
- from typing import Dict
16
15
 
17
16
  import numpy as np
18
17
  import pandas as pd
@@ -57,7 +56,7 @@ class ConfusionMatrixPlan(MLPlotPlan):
57
56
  self._normalize = normalize
58
57
 
59
58
  # Continue the initialization for the MLPlan:
60
- super(ConfusionMatrixPlan, self).__init__()
59
+ super().__init__()
61
60
 
62
61
  def is_ready(self, stage: MLPlanStages, is_probabilities: bool) -> bool:
63
62
  """
@@ -79,7 +78,7 @@ class ConfusionMatrixPlan(MLPlotPlan):
79
78
  model: MLTypes.ModelType = None,
80
79
  x: MLTypes.DatasetType = None,
81
80
  **kwargs,
82
- ) -> Dict[str, Artifact]:
81
+ ) -> dict[str, Artifact]:
83
82
  """
84
83
  Produce the confusion matrix according to the ground truth (y) and predictions (y_pred) values. If predictions
85
84
  are not available, the model and a dataset can be given to produce them.
@@ -13,7 +13,7 @@
13
13
  # limitations under the License.
14
14
  #
15
15
  from enum import Enum
16
- from typing import Dict, Union
16
+ from typing import Union
17
17
 
18
18
  import mlrun.errors
19
19
  from mlrun.artifacts import Artifact, DatasetArtifact
@@ -92,7 +92,7 @@ class DatasetPlan(MLPlan):
92
92
  self._plans = {} # TODO: Implement DatasetPlansLibrary with dataset specific artifacts plans.
93
93
 
94
94
  # Continue initializing the plan:
95
- super(DatasetPlan, self).__init__(need_probabilities=False)
95
+ super().__init__(need_probabilities=False)
96
96
 
97
97
  def is_ready(self, stage: MLPlanStages, is_probabilities: bool) -> bool:
98
98
  """
@@ -124,7 +124,7 @@ class DatasetPlan(MLPlan):
124
124
  y: MLTypes.DatasetType = None,
125
125
  target_columns_names: MLTypes.TargetColumnsNamesType = None,
126
126
  **kwargs,
127
- ) -> Dict[str, Artifact]:
127
+ ) -> dict[str, Artifact]:
128
128
  """
129
129
  Produce the dataset artifact according to this plan.
130
130
 
@@ -12,7 +12,7 @@
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
14
  #
15
- from typing import Dict, Union
15
+ from typing import Union
16
16
 
17
17
  import numpy as np
18
18
  import plotly.graph_objects as go
@@ -38,7 +38,7 @@ class FeatureImportancePlan(MLPlotPlan):
38
38
  An example of use can be seen at the Scikit-Learn docs here:
39
39
  https://scikit-learn.org/stable/auto_examples/ensemble/plot_forest_importances.html
40
40
  """
41
- super(FeatureImportancePlan, self).__init__()
41
+ super().__init__()
42
42
 
43
43
  def is_ready(self, stage: MLPlanStages, is_probabilities: bool) -> bool:
44
44
  """
@@ -55,7 +55,7 @@ class FeatureImportancePlan(MLPlotPlan):
55
55
 
56
56
  def produce(
57
57
  self, model: MLTypes.ModelType, x: MLTypes.DatasetType, **kwargs
58
- ) -> Dict[str, Artifact]:
58
+ ) -> dict[str, Artifact]:
59
59
  """
60
60
  Produce the feature importance according to the given model and dataset ('x').
61
61
 
@@ -12,7 +12,7 @@
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
14
  #
15
- from typing import Dict, List, Union
15
+ from typing import Union
16
16
 
17
17
  import numpy as np
18
18
  import pandas as pd
@@ -41,7 +41,7 @@ class ROCCurvePlan(MLPlotPlan):
41
41
  average: str = "macro",
42
42
  max_fpr: float = None,
43
43
  multi_class: str = "raise",
44
- labels: List[str] = None,
44
+ labels: list[str] = None,
45
45
  ):
46
46
  """
47
47
  Initialize a receiver operating characteristic plan with the given configuration.
@@ -75,7 +75,7 @@ class ROCCurvePlan(MLPlotPlan):
75
75
  self._labels = labels
76
76
 
77
77
  # Continue the initialization for the MLPlan:
78
- super(ROCCurvePlan, self).__init__(need_probabilities=True)
78
+ super().__init__(need_probabilities=True)
79
79
 
80
80
  def is_ready(self, stage: MLPlanStages, is_probabilities: bool) -> bool:
81
81
  """
@@ -97,7 +97,7 @@ class ROCCurvePlan(MLPlotPlan):
97
97
  model: MLTypes.ModelType = None,
98
98
  x: MLTypes.DatasetType = None,
99
99
  **kwargs,
100
- ) -> Dict[str, Artifact]:
100
+ ) -> dict[str, Artifact]:
101
101
  """
102
102
  Produce the roc curve according to the ground truth (y) and predictions (y_pred) values. If predictions are not
103
103
  available, the model and a dataset can be given to produce them.
@@ -14,7 +14,7 @@
14
14
  #
15
15
  from abc import ABC
16
16
  from enum import Enum
17
- from typing import Callable, List, Tuple, Union
17
+ from typing import Callable, Union
18
18
 
19
19
  import pandas as pd
20
20
  from sklearn.base import is_classifier, is_regressor
@@ -137,10 +137,10 @@ class MLTypes(CommonTypes, ABC):
137
137
  # of the function and the full module path to the function to import. Arguments to use when calling the metric can
138
138
  # be joined by wrapping it as a tuple:
139
139
  # TODO: will be moved to SKLearn's framework once LightGBM and XGBoost are fully supported.
140
- MetricEntryType = Union[Tuple[Union[Callable, str], dict], Callable, str]
140
+ MetricEntryType = Union[tuple[Union[Callable, str], dict], Callable, str]
141
141
 
142
142
  # Type for the target column name - a list of indices or column names that are the ground truth (y) of a dataset.
143
- TargetColumnsNamesType = Union[List[str], List[int]]
143
+ TargetColumnsNamesType = Union[list[str], list[int]]
144
144
 
145
145
 
146
146
  class MLUtils(CommonUtils, ABC):
@@ -154,7 +154,7 @@ class MLUtils(CommonUtils, ABC):
154
154
  y: CommonTypes.DatasetType = None,
155
155
  target_columns_names: MLTypes.TargetColumnsNamesType = None,
156
156
  default_target_column_prefix: str = "y_",
157
- ) -> Tuple[pd.DataFrame, Union[MLTypes.TargetColumnsNamesType, None]]:
157
+ ) -> tuple[pd.DataFrame, Union[MLTypes.TargetColumnsNamesType, None]]:
158
158
  """
159
159
  Concatenating the provided x and y data into a single pd.DataFrame, casting from np.ndarray and renaming y's
160
160
  original columns if 'y_columns' was not provided. The concatenated dataset index level will be reset to 0
@@ -13,7 +13,7 @@
13
13
  # limitations under the License.
14
14
  #
15
15
  # flake8: noqa - this is until we take care of the F401 violations with respect to __all__ & sphinx
16
- from typing import Callable, Dict, List, Tuple, Type, Union
16
+ from typing import Callable, Union
17
17
 
18
18
  import mlrun
19
19
  from mlrun.artifacts import get_model
@@ -165,7 +165,7 @@ def get_framework_by_class_name(model: CommonTypes.ModelType) -> str:
165
165
  )
166
166
 
167
167
 
168
- def framework_to_model_handler(framework: str) -> Type[ModelHandler]:
168
+ def framework_to_model_handler(framework: str) -> type[ModelHandler]:
169
169
  """
170
170
  Get the ModelHandler class of the given framework's name.
171
171
 
@@ -262,7 +262,7 @@ class AutoMLRun:
262
262
  @staticmethod
263
263
  def _get_framework(
264
264
  model: CommonTypes.ModelType = None, model_path: str = None
265
- ) -> Union[Tuple[str, dict]]:
265
+ ) -> Union[tuple[str, dict]]:
266
266
  """
267
267
  Try to get the framework from the model or model path provided. The framework can be read from the model path
268
268
  only if the model path is of a logged model artifact (store object uri).
@@ -322,8 +322,8 @@ class AutoMLRun:
322
322
  model_path: str,
323
323
  model_name: str = None,
324
324
  context: mlrun.MLClientCtx = None,
325
- modules_map: Union[Dict[str, Union[None, str, List[str]]], str] = None,
326
- custom_objects_map: Union[Dict[str, Union[str, List[str]]], str] = None,
325
+ modules_map: Union[dict[str, Union[None, str, list[str]]], str] = None,
326
+ custom_objects_map: Union[dict[str, Union[str, list[str]]], str] = None,
327
327
  custom_objects_directory: str = None,
328
328
  framework: str = None,
329
329
  **kwargs,
@@ -363,7 +363,7 @@ class AutoMLRun:
363
363
 
364
364
  {
365
365
  "/.../custom_model.py": "MyModel",
366
- "/.../custom_objects.py": ["object1", "object2"]
366
+ "/.../custom_objects.py": ["object1", "object2"],
367
367
  }
368
368
 
369
369
  All the paths will be accessed from the given 'custom_objects_directory',
@@ -420,8 +420,8 @@ class AutoMLRun:
420
420
  model_name: str = None,
421
421
  tag: str = "",
422
422
  model_path: str = None,
423
- modules_map: Union[Dict[str, Union[None, str, List[str]]], str] = None,
424
- custom_objects_map: Union[Dict[str, Union[str, List[str]]], str] = None,
423
+ modules_map: Union[dict[str, Union[None, str, list[str]]], str] = None,
424
+ custom_objects_map: Union[dict[str, Union[str, list[str]]], str] = None,
425
425
  custom_objects_directory: str = None,
426
426
  context: mlrun.MLClientCtx = None,
427
427
  framework: str = None,
@@ -464,7 +464,7 @@ class AutoMLRun:
464
464
 
465
465
  {
466
466
  "/.../custom_model.py": "MyModel",
467
- "/.../custom_objects.py": ["object1", "object2"]
467
+ "/.../custom_objects.py": ["object1", "object2"],
468
468
  }
469
469
 
470
470
  All the paths will be accessed from the given 'custom_objects_directory',