mlrun 1.6.4rc2__py3-none-any.whl → 1.7.0rc20__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mlrun might be problematic. Click here for more details.

Files changed (291) hide show
  1. mlrun/__init__.py +11 -1
  2. mlrun/__main__.py +26 -112
  3. mlrun/alerts/__init__.py +15 -0
  4. mlrun/alerts/alert.py +144 -0
  5. mlrun/api/schemas/__init__.py +5 -4
  6. mlrun/artifacts/__init__.py +8 -3
  7. mlrun/artifacts/base.py +46 -257
  8. mlrun/artifacts/dataset.py +11 -192
  9. mlrun/artifacts/manager.py +47 -48
  10. mlrun/artifacts/model.py +31 -159
  11. mlrun/artifacts/plots.py +23 -380
  12. mlrun/common/constants.py +69 -0
  13. mlrun/common/db/sql_session.py +2 -3
  14. mlrun/common/formatters/__init__.py +19 -0
  15. mlrun/common/formatters/artifact.py +21 -0
  16. mlrun/common/formatters/base.py +78 -0
  17. mlrun/common/formatters/function.py +41 -0
  18. mlrun/common/formatters/pipeline.py +53 -0
  19. mlrun/common/formatters/project.py +51 -0
  20. mlrun/common/helpers.py +1 -2
  21. mlrun/common/model_monitoring/helpers.py +9 -5
  22. mlrun/{runtimes → common/runtimes}/constants.py +37 -9
  23. mlrun/common/schemas/__init__.py +24 -4
  24. mlrun/common/schemas/alert.py +203 -0
  25. mlrun/common/schemas/api_gateway.py +148 -0
  26. mlrun/common/schemas/artifact.py +18 -8
  27. mlrun/common/schemas/auth.py +11 -5
  28. mlrun/common/schemas/background_task.py +1 -1
  29. mlrun/common/schemas/client_spec.py +4 -1
  30. mlrun/common/schemas/feature_store.py +16 -16
  31. mlrun/common/schemas/frontend_spec.py +8 -7
  32. mlrun/common/schemas/function.py +5 -1
  33. mlrun/common/schemas/hub.py +11 -18
  34. mlrun/common/schemas/memory_reports.py +2 -2
  35. mlrun/common/schemas/model_monitoring/__init__.py +18 -3
  36. mlrun/common/schemas/model_monitoring/constants.py +83 -26
  37. mlrun/common/schemas/model_monitoring/grafana.py +13 -9
  38. mlrun/common/schemas/model_monitoring/model_endpoints.py +99 -16
  39. mlrun/common/schemas/notification.py +4 -4
  40. mlrun/common/schemas/object.py +2 -2
  41. mlrun/{runtimes/mpijob/v1alpha1.py → common/schemas/pagination.py} +10 -13
  42. mlrun/common/schemas/pipeline.py +1 -10
  43. mlrun/common/schemas/project.py +24 -23
  44. mlrun/common/schemas/runtime_resource.py +8 -12
  45. mlrun/common/schemas/schedule.py +3 -3
  46. mlrun/common/schemas/tag.py +1 -2
  47. mlrun/common/schemas/workflow.py +2 -2
  48. mlrun/common/types.py +7 -1
  49. mlrun/config.py +54 -17
  50. mlrun/data_types/to_pandas.py +10 -12
  51. mlrun/datastore/__init__.py +5 -8
  52. mlrun/datastore/alibaba_oss.py +130 -0
  53. mlrun/datastore/azure_blob.py +17 -5
  54. mlrun/datastore/base.py +62 -39
  55. mlrun/datastore/datastore.py +28 -9
  56. mlrun/datastore/datastore_profile.py +146 -20
  57. mlrun/datastore/filestore.py +0 -1
  58. mlrun/datastore/google_cloud_storage.py +6 -2
  59. mlrun/datastore/hdfs.py +56 -0
  60. mlrun/datastore/inmem.py +2 -2
  61. mlrun/datastore/redis.py +6 -2
  62. mlrun/datastore/s3.py +9 -0
  63. mlrun/datastore/snowflake_utils.py +43 -0
  64. mlrun/datastore/sources.py +201 -96
  65. mlrun/datastore/spark_utils.py +1 -2
  66. mlrun/datastore/store_resources.py +7 -7
  67. mlrun/datastore/targets.py +358 -104
  68. mlrun/datastore/utils.py +72 -58
  69. mlrun/datastore/v3io.py +5 -1
  70. mlrun/db/base.py +185 -35
  71. mlrun/db/factory.py +1 -1
  72. mlrun/db/httpdb.py +614 -179
  73. mlrun/db/nopdb.py +210 -26
  74. mlrun/errors.py +12 -1
  75. mlrun/execution.py +41 -24
  76. mlrun/feature_store/__init__.py +0 -2
  77. mlrun/feature_store/api.py +40 -72
  78. mlrun/feature_store/common.py +1 -1
  79. mlrun/feature_store/feature_set.py +76 -55
  80. mlrun/feature_store/feature_vector.py +28 -30
  81. mlrun/feature_store/ingestion.py +7 -6
  82. mlrun/feature_store/retrieval/base.py +16 -11
  83. mlrun/feature_store/retrieval/conversion.py +11 -13
  84. mlrun/feature_store/retrieval/dask_merger.py +2 -0
  85. mlrun/feature_store/retrieval/job.py +9 -3
  86. mlrun/feature_store/retrieval/local_merger.py +2 -0
  87. mlrun/feature_store/retrieval/spark_merger.py +34 -24
  88. mlrun/feature_store/steps.py +37 -34
  89. mlrun/features.py +9 -20
  90. mlrun/frameworks/_common/artifacts_library.py +9 -9
  91. mlrun/frameworks/_common/mlrun_interface.py +5 -5
  92. mlrun/frameworks/_common/model_handler.py +48 -48
  93. mlrun/frameworks/_common/plan.py +2 -3
  94. mlrun/frameworks/_common/producer.py +3 -4
  95. mlrun/frameworks/_common/utils.py +5 -5
  96. mlrun/frameworks/_dl_common/loggers/logger.py +6 -7
  97. mlrun/frameworks/_dl_common/loggers/mlrun_logger.py +9 -9
  98. mlrun/frameworks/_dl_common/loggers/tensorboard_logger.py +23 -47
  99. mlrun/frameworks/_ml_common/artifacts_library.py +1 -2
  100. mlrun/frameworks/_ml_common/loggers/logger.py +3 -4
  101. mlrun/frameworks/_ml_common/loggers/mlrun_logger.py +4 -5
  102. mlrun/frameworks/_ml_common/model_handler.py +24 -24
  103. mlrun/frameworks/_ml_common/pkl_model_server.py +2 -2
  104. mlrun/frameworks/_ml_common/plan.py +1 -1
  105. mlrun/frameworks/_ml_common/plans/calibration_curve_plan.py +2 -3
  106. mlrun/frameworks/_ml_common/plans/confusion_matrix_plan.py +2 -3
  107. mlrun/frameworks/_ml_common/plans/dataset_plan.py +3 -3
  108. mlrun/frameworks/_ml_common/plans/feature_importance_plan.py +3 -3
  109. mlrun/frameworks/_ml_common/plans/roc_curve_plan.py +4 -4
  110. mlrun/frameworks/_ml_common/utils.py +4 -4
  111. mlrun/frameworks/auto_mlrun/auto_mlrun.py +9 -9
  112. mlrun/frameworks/huggingface/model_server.py +4 -4
  113. mlrun/frameworks/lgbm/__init__.py +33 -33
  114. mlrun/frameworks/lgbm/callbacks/callback.py +2 -4
  115. mlrun/frameworks/lgbm/callbacks/logging_callback.py +4 -5
  116. mlrun/frameworks/lgbm/callbacks/mlrun_logging_callback.py +4 -5
  117. mlrun/frameworks/lgbm/mlrun_interfaces/booster_mlrun_interface.py +1 -3
  118. mlrun/frameworks/lgbm/mlrun_interfaces/mlrun_interface.py +6 -6
  119. mlrun/frameworks/lgbm/model_handler.py +10 -10
  120. mlrun/frameworks/lgbm/model_server.py +6 -6
  121. mlrun/frameworks/lgbm/utils.py +5 -5
  122. mlrun/frameworks/onnx/dataset.py +8 -8
  123. mlrun/frameworks/onnx/mlrun_interface.py +3 -3
  124. mlrun/frameworks/onnx/model_handler.py +6 -6
  125. mlrun/frameworks/onnx/model_server.py +7 -7
  126. mlrun/frameworks/parallel_coordinates.py +4 -3
  127. mlrun/frameworks/pytorch/__init__.py +18 -18
  128. mlrun/frameworks/pytorch/callbacks/callback.py +4 -5
  129. mlrun/frameworks/pytorch/callbacks/logging_callback.py +17 -17
  130. mlrun/frameworks/pytorch/callbacks/mlrun_logging_callback.py +11 -11
  131. mlrun/frameworks/pytorch/callbacks/tensorboard_logging_callback.py +23 -29
  132. mlrun/frameworks/pytorch/callbacks_handler.py +38 -38
  133. mlrun/frameworks/pytorch/mlrun_interface.py +20 -20
  134. mlrun/frameworks/pytorch/model_handler.py +17 -17
  135. mlrun/frameworks/pytorch/model_server.py +7 -7
  136. mlrun/frameworks/sklearn/__init__.py +13 -13
  137. mlrun/frameworks/sklearn/estimator.py +4 -4
  138. mlrun/frameworks/sklearn/metrics_library.py +14 -14
  139. mlrun/frameworks/sklearn/mlrun_interface.py +3 -6
  140. mlrun/frameworks/sklearn/model_handler.py +2 -2
  141. mlrun/frameworks/tf_keras/__init__.py +10 -7
  142. mlrun/frameworks/tf_keras/callbacks/logging_callback.py +15 -15
  143. mlrun/frameworks/tf_keras/callbacks/mlrun_logging_callback.py +11 -11
  144. mlrun/frameworks/tf_keras/callbacks/tensorboard_logging_callback.py +19 -23
  145. mlrun/frameworks/tf_keras/mlrun_interface.py +9 -11
  146. mlrun/frameworks/tf_keras/model_handler.py +14 -14
  147. mlrun/frameworks/tf_keras/model_server.py +6 -6
  148. mlrun/frameworks/xgboost/__init__.py +13 -13
  149. mlrun/frameworks/xgboost/model_handler.py +6 -6
  150. mlrun/k8s_utils.py +14 -16
  151. mlrun/launcher/__init__.py +1 -1
  152. mlrun/launcher/base.py +16 -15
  153. mlrun/launcher/client.py +8 -6
  154. mlrun/launcher/factory.py +1 -1
  155. mlrun/launcher/local.py +17 -11
  156. mlrun/launcher/remote.py +16 -10
  157. mlrun/lists.py +7 -6
  158. mlrun/model.py +238 -73
  159. mlrun/model_monitoring/__init__.py +1 -1
  160. mlrun/model_monitoring/api.py +138 -315
  161. mlrun/model_monitoring/application.py +5 -296
  162. mlrun/model_monitoring/applications/__init__.py +24 -0
  163. mlrun/model_monitoring/applications/_application_steps.py +157 -0
  164. mlrun/model_monitoring/applications/base.py +282 -0
  165. mlrun/model_monitoring/applications/context.py +214 -0
  166. mlrun/model_monitoring/applications/evidently_base.py +211 -0
  167. mlrun/model_monitoring/applications/histogram_data_drift.py +349 -0
  168. mlrun/model_monitoring/applications/results.py +99 -0
  169. mlrun/model_monitoring/controller.py +104 -84
  170. mlrun/model_monitoring/controller_handler.py +13 -5
  171. mlrun/model_monitoring/db/__init__.py +18 -0
  172. mlrun/model_monitoring/{stores → db/stores}/__init__.py +43 -36
  173. mlrun/model_monitoring/db/stores/base/__init__.py +15 -0
  174. mlrun/model_monitoring/{stores/model_endpoint_store.py → db/stores/base/store.py} +64 -40
  175. mlrun/model_monitoring/db/stores/sqldb/__init__.py +13 -0
  176. mlrun/model_monitoring/db/stores/sqldb/models/__init__.py +71 -0
  177. mlrun/model_monitoring/{stores → db/stores/sqldb}/models/base.py +109 -5
  178. mlrun/model_monitoring/db/stores/sqldb/models/mysql.py +88 -0
  179. mlrun/model_monitoring/{stores/models/mysql.py → db/stores/sqldb/models/sqlite.py} +19 -13
  180. mlrun/model_monitoring/db/stores/sqldb/sql_store.py +684 -0
  181. mlrun/model_monitoring/db/stores/v3io_kv/__init__.py +13 -0
  182. mlrun/model_monitoring/{stores/kv_model_endpoint_store.py → db/stores/v3io_kv/kv_store.py} +310 -165
  183. mlrun/model_monitoring/db/tsdb/__init__.py +100 -0
  184. mlrun/model_monitoring/db/tsdb/base.py +329 -0
  185. mlrun/model_monitoring/db/tsdb/helpers.py +30 -0
  186. mlrun/model_monitoring/db/tsdb/tdengine/__init__.py +15 -0
  187. mlrun/model_monitoring/db/tsdb/tdengine/schemas.py +240 -0
  188. mlrun/model_monitoring/db/tsdb/tdengine/stream_graph_steps.py +45 -0
  189. mlrun/model_monitoring/db/tsdb/tdengine/tdengine_connector.py +397 -0
  190. mlrun/model_monitoring/db/tsdb/v3io/__init__.py +15 -0
  191. mlrun/model_monitoring/db/tsdb/v3io/stream_graph_steps.py +117 -0
  192. mlrun/model_monitoring/db/tsdb/v3io/v3io_connector.py +630 -0
  193. mlrun/model_monitoring/evidently_application.py +6 -118
  194. mlrun/model_monitoring/features_drift_table.py +134 -106
  195. mlrun/model_monitoring/helpers.py +127 -28
  196. mlrun/model_monitoring/metrics/__init__.py +13 -0
  197. mlrun/model_monitoring/metrics/histogram_distance.py +127 -0
  198. mlrun/model_monitoring/model_endpoint.py +3 -2
  199. mlrun/model_monitoring/prometheus.py +1 -4
  200. mlrun/model_monitoring/stream_processing.py +62 -231
  201. mlrun/model_monitoring/tracking_policy.py +9 -2
  202. mlrun/model_monitoring/writer.py +152 -124
  203. mlrun/package/__init__.py +6 -6
  204. mlrun/package/context_handler.py +5 -5
  205. mlrun/package/packager.py +7 -7
  206. mlrun/package/packagers/default_packager.py +6 -6
  207. mlrun/package/packagers/numpy_packagers.py +15 -15
  208. mlrun/package/packagers/pandas_packagers.py +5 -5
  209. mlrun/package/packagers/python_standard_library_packagers.py +10 -10
  210. mlrun/package/packagers_manager.py +19 -23
  211. mlrun/package/utils/_formatter.py +6 -6
  212. mlrun/package/utils/_pickler.py +2 -2
  213. mlrun/package/utils/_supported_format.py +4 -4
  214. mlrun/package/utils/log_hint_utils.py +2 -2
  215. mlrun/package/utils/type_hint_utils.py +4 -9
  216. mlrun/platforms/__init__.py +11 -10
  217. mlrun/platforms/iguazio.py +24 -203
  218. mlrun/projects/operations.py +35 -21
  219. mlrun/projects/pipelines.py +68 -99
  220. mlrun/projects/project.py +830 -266
  221. mlrun/render.py +3 -11
  222. mlrun/run.py +162 -166
  223. mlrun/runtimes/__init__.py +62 -7
  224. mlrun/runtimes/base.py +39 -32
  225. mlrun/runtimes/daskjob.py +8 -8
  226. mlrun/runtimes/databricks_job/databricks_cancel_task.py +1 -1
  227. mlrun/runtimes/databricks_job/databricks_runtime.py +7 -7
  228. mlrun/runtimes/databricks_job/databricks_wrapper.py +1 -1
  229. mlrun/runtimes/funcdoc.py +0 -28
  230. mlrun/runtimes/function_reference.py +1 -1
  231. mlrun/runtimes/kubejob.py +28 -122
  232. mlrun/runtimes/local.py +6 -3
  233. mlrun/runtimes/mpijob/__init__.py +0 -20
  234. mlrun/runtimes/mpijob/abstract.py +9 -10
  235. mlrun/runtimes/mpijob/v1.py +1 -1
  236. mlrun/{model_monitoring/stores/models/sqlite.py → runtimes/nuclio/__init__.py} +7 -9
  237. mlrun/runtimes/nuclio/api_gateway.py +709 -0
  238. mlrun/runtimes/nuclio/application/__init__.py +15 -0
  239. mlrun/runtimes/nuclio/application/application.py +523 -0
  240. mlrun/runtimes/nuclio/application/reverse_proxy.go +95 -0
  241. mlrun/runtimes/{function.py → nuclio/function.py} +112 -73
  242. mlrun/runtimes/{nuclio.py → nuclio/nuclio.py} +6 -6
  243. mlrun/runtimes/{serving.py → nuclio/serving.py} +45 -51
  244. mlrun/runtimes/pod.py +286 -88
  245. mlrun/runtimes/remotesparkjob.py +2 -2
  246. mlrun/runtimes/sparkjob/spark3job.py +51 -34
  247. mlrun/runtimes/utils.py +7 -75
  248. mlrun/secrets.py +9 -5
  249. mlrun/serving/remote.py +2 -7
  250. mlrun/serving/routers.py +13 -10
  251. mlrun/serving/server.py +22 -26
  252. mlrun/serving/states.py +99 -25
  253. mlrun/serving/utils.py +3 -3
  254. mlrun/serving/v1_serving.py +6 -7
  255. mlrun/serving/v2_serving.py +59 -20
  256. mlrun/track/tracker.py +2 -1
  257. mlrun/track/tracker_manager.py +3 -3
  258. mlrun/track/trackers/mlflow_tracker.py +1 -2
  259. mlrun/utils/async_http.py +5 -7
  260. mlrun/utils/azure_vault.py +1 -1
  261. mlrun/utils/clones.py +1 -2
  262. mlrun/utils/condition_evaluator.py +3 -3
  263. mlrun/utils/db.py +3 -3
  264. mlrun/utils/helpers.py +183 -197
  265. mlrun/utils/http.py +2 -5
  266. mlrun/utils/logger.py +76 -14
  267. mlrun/utils/notifications/notification/__init__.py +17 -12
  268. mlrun/utils/notifications/notification/base.py +14 -2
  269. mlrun/utils/notifications/notification/console.py +2 -0
  270. mlrun/utils/notifications/notification/git.py +3 -1
  271. mlrun/utils/notifications/notification/ipython.py +3 -1
  272. mlrun/utils/notifications/notification/slack.py +101 -21
  273. mlrun/utils/notifications/notification/webhook.py +11 -1
  274. mlrun/utils/notifications/notification_pusher.py +155 -30
  275. mlrun/utils/retryer.py +208 -0
  276. mlrun/utils/singleton.py +1 -1
  277. mlrun/utils/v3io_clients.py +2 -4
  278. mlrun/utils/version/version.json +2 -2
  279. mlrun/utils/version/version.py +2 -6
  280. {mlrun-1.6.4rc2.dist-info → mlrun-1.7.0rc20.dist-info}/METADATA +31 -19
  281. mlrun-1.7.0rc20.dist-info/RECORD +353 -0
  282. mlrun/kfpops.py +0 -868
  283. mlrun/model_monitoring/batch.py +0 -1095
  284. mlrun/model_monitoring/stores/models/__init__.py +0 -27
  285. mlrun/model_monitoring/stores/sql_model_endpoint_store.py +0 -384
  286. mlrun/platforms/other.py +0 -306
  287. mlrun-1.6.4rc2.dist-info/RECORD +0 -314
  288. {mlrun-1.6.4rc2.dist-info → mlrun-1.7.0rc20.dist-info}/LICENSE +0 -0
  289. {mlrun-1.6.4rc2.dist-info → mlrun-1.7.0rc20.dist-info}/WHEEL +0 -0
  290. {mlrun-1.6.4rc2.dist-info → mlrun-1.7.0rc20.dist-info}/entry_points.txt +0 -0
  291. {mlrun-1.6.4rc2.dist-info → mlrun-1.7.0rc20.dist-info}/top_level.txt +0 -0
@@ -13,7 +13,7 @@
13
13
  # limitations under the License.
14
14
  #
15
15
  from datetime import datetime
16
- from typing import Callable, Dict, List, Union
16
+ from typing import Callable, Union
17
17
 
18
18
  import tensorflow as tf
19
19
  from packaging import version
@@ -38,7 +38,7 @@ class _TFKerasTensorboardLogger(TensorboardLogger):
38
38
 
39
39
  def __init__(
40
40
  self,
41
- statistics_functions: List[Callable[[Union[Variable]], Union[float, Variable]]],
41
+ statistics_functions: list[Callable[[Union[Variable]], Union[float, Variable]]],
42
42
  context: mlrun.MLClientCtx = None,
43
43
  tensorboard_directory: str = None,
44
44
  run_name: str = None,
@@ -67,7 +67,7 @@ class _TFKerasTensorboardLogger(TensorboardLogger):
67
67
  update. Notice that writing to tensorboard too frequently may cause the training
68
68
  to be slower. Default: 'epoch'.
69
69
  """
70
- super(_TFKerasTensorboardLogger, self).__init__(
70
+ super().__init__(
71
71
  statistics_functions=statistics_functions,
72
72
  context=context,
73
73
  tensorboard_directory=tensorboard_directory,
@@ -255,15 +255,15 @@ class TensorboardLoggingCallback(LoggingCallback):
255
255
  context: mlrun.MLClientCtx = None,
256
256
  tensorboard_directory: str = None,
257
257
  run_name: str = None,
258
- weights: Union[bool, List[str]] = False,
259
- statistics_functions: List[
258
+ weights: Union[bool, list[str]] = False,
259
+ statistics_functions: list[
260
260
  Callable[[Union[Variable, Tensor]], Union[float, Tensor]]
261
261
  ] = None,
262
- dynamic_hyperparameters: Dict[
263
- str, Union[List[Union[str, int]], Callable[[], TFKerasTypes.TrackableType]]
262
+ dynamic_hyperparameters: dict[
263
+ str, Union[list[Union[str, int]], Callable[[], TFKerasTypes.TrackableType]]
264
264
  ] = None,
265
- static_hyperparameters: Dict[
266
- str, Union[TFKerasTypes.TrackableType, List[Union[str, int]]]
265
+ static_hyperparameters: dict[
266
+ str, Union[TFKerasTypes.TrackableType, list[Union[str, int]]]
267
267
  ] = None,
268
268
  update_frequency: Union[int, str] = "epoch",
269
269
  auto_log: bool = False,
@@ -325,7 +325,7 @@ class TensorboardLoggingCallback(LoggingCallback):
325
325
  :raise MLRunInvalidArgumentError: In case both 'context' and 'tensorboard_directory' parameters were not given
326
326
  or the 'update_frequency' was incorrect.
327
327
  """
328
- super(TensorboardLoggingCallback, self).__init__(
328
+ super().__init__(
329
329
  dynamic_hyperparameters=dynamic_hyperparameters,
330
330
  static_hyperparameters=static_hyperparameters,
331
331
  auto_log=auto_log,
@@ -352,7 +352,7 @@ class TensorboardLoggingCallback(LoggingCallback):
352
352
  self._logged_model = False
353
353
  self._logged_hyperparameters = False
354
354
 
355
- def get_weights(self) -> Dict[str, Variable]:
355
+ def get_weights(self) -> dict[str, Variable]:
356
356
  """
357
357
  Get the weights tensors tracked. The weights will be stored in a dictionary where each key is the weight's name
358
358
  and the value is the weight's parameter (tensor).
@@ -361,7 +361,7 @@ class TensorboardLoggingCallback(LoggingCallback):
361
361
  """
362
362
  return self._logger.weights
363
363
 
364
- def get_weights_statistics(self) -> Dict[str, Dict[str, List[float]]]:
364
+ def get_weights_statistics(self) -> dict[str, dict[str, list[float]]]:
365
365
  """
366
366
  Get the weights mean results logged. The results will be stored in a dictionary where each key is the weight's
367
367
  name and the value is a list of mean values per epoch.
@@ -408,7 +408,7 @@ class TensorboardLoggingCallback(LoggingCallback):
408
408
  :param logs: Currently the output of the last call to `on_epoch_end()` is passed to this argument for this
409
409
  method but that may change in the future.
410
410
  """
411
- super(TensorboardLoggingCallback, self).on_train_end()
411
+ super().on_train_end()
412
412
 
413
413
  # Write the final run summary:
414
414
  self._logger.write_final_summary_text()
@@ -453,7 +453,7 @@ class TensorboardLoggingCallback(LoggingCallback):
453
453
  :param logs: Currently no data is passed to this argument for this method but that may change in the
454
454
  future.
455
455
  """
456
- super(TensorboardLoggingCallback, self).on_test_end(logs=logs)
456
+ super().on_test_end(logs=logs)
457
457
 
458
458
  # Check if needed to end the run (in case of evaluation and not training):
459
459
  if not self._is_training:
@@ -477,7 +477,7 @@ class TensorboardLoggingCallback(LoggingCallback):
477
477
  `Model`'s metrics are returned. Example : `{'loss': 0.2, 'acc': 0.7}`.
478
478
  """
479
479
  # Update the dynamic hyperparameters
480
- super(TensorboardLoggingCallback, self).on_epoch_end(epoch=epoch)
480
+ super().on_epoch_end(epoch=epoch)
481
481
 
482
482
  # Log the weights statistics:
483
483
  self._logger.log_weights_statistics()
@@ -515,9 +515,7 @@ class TensorboardLoggingCallback(LoggingCallback):
515
515
  :param logs: Aggregated metric results up until this batch.
516
516
  """
517
517
  # Log the batch's results:
518
- super(TensorboardLoggingCallback, self).on_train_batch_end(
519
- batch=batch, logs=logs
520
- )
518
+ super().on_train_batch_end(batch=batch, logs=logs)
521
519
 
522
520
  # Write the batch loss and metrics results to their graphs:
523
521
  self._logger.write_training_results()
@@ -540,9 +538,7 @@ class TensorboardLoggingCallback(LoggingCallback):
540
538
  :param logs: Aggregated metric results up until this batch.
541
539
  """
542
540
  # Log the batch's results:
543
- super(TensorboardLoggingCallback, self).on_test_batch_end(
544
- batch=batch, logs=logs
545
- )
541
+ super().on_test_batch_end(batch=batch, logs=logs)
546
542
 
547
543
  # Write the batch loss and metrics results to their graphs:
548
544
  self._logger.write_validation_results()
@@ -555,7 +551,7 @@ class TensorboardLoggingCallback(LoggingCallback):
555
551
 
556
552
  @staticmethod
557
553
  def get_default_weight_statistics_list() -> (
558
- List[Callable[[Union[Variable, Tensor]], Union[float, Tensor]]]
554
+ list[Callable[[Union[Variable, Tensor]], Union[float, Tensor]]]
559
555
  ):
560
556
  """
561
557
  Get the default list of statistics functions being applied on the tracked weights each epoch.
@@ -569,7 +565,7 @@ class TensorboardLoggingCallback(LoggingCallback):
569
565
  After the trainer / evaluator run begins, this method will be called to setup the results, hyperparameters
570
566
  and weights dictionaries for logging.
571
567
  """
572
- super(TensorboardLoggingCallback, self)._setup_run()
568
+ super()._setup_run()
573
569
 
574
570
  # Check if needed to track weights:
575
571
  if self._tracked_weights is False:
@@ -15,11 +15,12 @@
15
15
  import importlib
16
16
  import os
17
17
  from abc import ABC
18
- from typing import List, Tuple, Union
18
+ from typing import Union
19
19
 
20
20
  import tensorflow as tf
21
21
  from tensorflow import keras
22
- from tensorflow.keras.callbacks import (
22
+ from tensorflow.keras.optimizers import Optimizer
23
+ from tensorflow.python.keras.callbacks import (
23
24
  BaseLogger,
24
25
  Callback,
25
26
  CSVLogger,
@@ -27,7 +28,6 @@ from tensorflow.keras.callbacks import (
27
28
  ProgbarLogger,
28
29
  TensorBoard,
29
30
  )
30
- from tensorflow.keras.optimizers import Optimizer
31
31
 
32
32
  import mlrun
33
33
 
@@ -88,9 +88,7 @@ class TFKerasMLRunInterface(MLRunInterface, ABC):
88
88
  :param restoration: Restoration information tuple as returned from 'remove_interface' in order to
89
89
  add the interface in a certain state.
90
90
  """
91
- super(TFKerasMLRunInterface, cls).add_interface(
92
- obj=obj, restoration=restoration
93
- )
91
+ super().add_interface(obj=obj, restoration=restoration)
94
92
 
95
93
  def mlrun_compile(self, *args, **kwargs):
96
94
  """
@@ -237,7 +235,7 @@ class TFKerasMLRunInterface(MLRunInterface, ABC):
237
235
  """
238
236
  self._RANK_0_ONLY_CALLBACKS.add(callback_name)
239
237
 
240
- def _pre_compile(self, optimizer: Optimizer) -> Tuple[Optimizer, Union[bool, None]]:
238
+ def _pre_compile(self, optimizer: Optimizer) -> tuple[Optimizer, Union[bool, None]]:
241
239
  """
242
240
  Method to call before calling 'compile' to setup the run and inputs for using horovod.
243
241
 
@@ -295,11 +293,11 @@ class TFKerasMLRunInterface(MLRunInterface, ABC):
295
293
 
296
294
  def _pre_fit(
297
295
  self,
298
- callbacks: List[Callback],
296
+ callbacks: list[Callback],
299
297
  verbose: int,
300
298
  steps_per_epoch: Union[int, None],
301
299
  validation_steps: Union[int, None],
302
- ) -> Tuple[List[Callback], int, Union[int, None], Union[int, None]]:
300
+ ) -> tuple[list[Callback], int, Union[int, None], Union[int, None]]:
303
301
  """
304
302
  Method to call before calling 'fit' to setup the run and inputs for using horovod.
305
303
 
@@ -366,9 +364,9 @@ class TFKerasMLRunInterface(MLRunInterface, ABC):
366
364
 
367
365
  def _pre_evaluate(
368
366
  self,
369
- callbacks: List[Callback],
367
+ callbacks: list[Callback],
370
368
  steps: Union[int, None],
371
- ) -> Tuple[List[Callback], Union[int, None]]:
369
+ ) -> tuple[list[Callback], Union[int, None]]:
372
370
  """
373
371
  Method to call before calling 'evaluate' to setup the run and inputs for using horovod.
374
372
 
@@ -15,7 +15,7 @@
15
15
  import os
16
16
  import shutil
17
17
  import zipfile
18
- from typing import Dict, List, Union
18
+ from typing import Union
19
19
 
20
20
  import numpy as np
21
21
  import tensorflow as tf
@@ -67,8 +67,8 @@ class TFKerasModelHandler(DLModelHandler):
67
67
  model_name: str = None,
68
68
  model_format: str = ModelFormats.SAVED_MODEL,
69
69
  context: mlrun.MLClientCtx = None,
70
- modules_map: Union[Dict[str, Union[None, str, List[str]]], str] = None,
71
- custom_objects_map: Union[Dict[str, Union[str, List[str]]], str] = None,
70
+ modules_map: Union[dict[str, Union[None, str, list[str]]], str] = None,
71
+ custom_objects_map: Union[dict[str, Union[str, list[str]]], str] = None,
72
72
  custom_objects_directory: str = None,
73
73
  save_traces: bool = False,
74
74
  **kwargs,
@@ -174,7 +174,7 @@ class TFKerasModelHandler(DLModelHandler):
174
174
  self._weights_file = None # type: str
175
175
 
176
176
  # Setup the base handler class:
177
- super(TFKerasModelHandler, self).__init__(
177
+ super().__init__(
178
178
  model=model,
179
179
  model_path=model_path,
180
180
  model_name=model_name,
@@ -190,8 +190,8 @@ class TFKerasModelHandler(DLModelHandler):
190
190
 
191
191
  def set_labels(
192
192
  self,
193
- to_add: Dict[str, Union[str, int, float]] = None,
194
- to_remove: List[str] = None,
193
+ to_add: dict[str, Union[str, int, float]] = None,
194
+ to_remove: list[str] = None,
195
195
  ):
196
196
  """
197
197
  Update the labels dictionary of this model artifact. There are required labels that cannot be edited or removed.
@@ -200,7 +200,7 @@ class TFKerasModelHandler(DLModelHandler):
200
200
  :param to_remove: A list of labels keys to remove.
201
201
  """
202
202
  # Update the user's labels:
203
- super(TFKerasModelHandler, self).set_labels(to_add=to_add, to_remove=to_remove)
203
+ super().set_labels(to_add=to_add, to_remove=to_remove)
204
204
 
205
205
  # Set the required labels:
206
206
  self._labels[self._LabelKeys.MODEL_FORMAT] = self._model_format
@@ -211,7 +211,7 @@ class TFKerasModelHandler(DLModelHandler):
211
211
  @without_mlrun_interface(interface=TFKerasMLRunInterface)
212
212
  def save(
213
213
  self, output_path: str = None, **kwargs
214
- ) -> Union[Dict[str, Artifact], None]:
214
+ ) -> Union[dict[str, Artifact], None]:
215
215
  """
216
216
  Save the handled model at the given output path. If a MLRun context is available, the saved model files will be
217
217
  logged and returned as artifacts.
@@ -221,7 +221,7 @@ class TFKerasModelHandler(DLModelHandler):
221
221
 
222
222
  :return The saved model additional artifacts (if needed) dictionary if context is available and None otherwise.
223
223
  """
224
- super(TFKerasModelHandler, self).save(output_path=output_path)
224
+ super().save(output_path=output_path)
225
225
 
226
226
  # Setup the returning model artifacts list:
227
227
  artifacts = {} # type: Dict[str, Artifact]
@@ -291,7 +291,7 @@ class TFKerasModelHandler(DLModelHandler):
291
291
  "Loading a model using checkpoint is not yet implemented."
292
292
  )
293
293
 
294
- super(TFKerasModelHandler, self).load()
294
+ super().load()
295
295
 
296
296
  # ModelFormats.H5 - Load from a h5 file:
297
297
  if self._model_format == TFKerasModelHandler.ModelFormats.H5:
@@ -308,7 +308,7 @@ class TFKerasModelHandler(DLModelHandler):
308
308
  # ModelFormats.JSON_ARCHITECTURE_H5_WEIGHTS - Load from a json architecture file and a h5 weights file:
309
309
  else:
310
310
  # Load the model architecture (json):
311
- with open(self._model_file, "r") as json_file:
311
+ with open(self._model_file) as json_file:
312
312
  model_architecture = json_file.read()
313
313
  self._model = keras.models.model_from_json(
314
314
  model_architecture, custom_objects=self._custom_objects
@@ -321,7 +321,7 @@ class TFKerasModelHandler(DLModelHandler):
321
321
  model_name: str = None,
322
322
  optimize: bool = True,
323
323
  input_signature: Union[
324
- List[tf.TensorSpec], List[np.ndarray], tf.TensorSpec, np.ndarray
324
+ list[tf.TensorSpec], list[np.ndarray], tf.TensorSpec, np.ndarray
325
325
  ] = None,
326
326
  output_path: str = None,
327
327
  log: bool = None,
@@ -487,7 +487,7 @@ class TFKerasModelHandler(DLModelHandler):
487
487
  ].local()
488
488
 
489
489
  # Continue collecting from abstract class:
490
- super(TFKerasModelHandler, self)._collect_files_from_store_object()
490
+ super()._collect_files_from_store_object()
491
491
 
492
492
  def _collect_files_from_local_path(self):
493
493
  """
@@ -554,7 +554,7 @@ class TFKerasModelHandler(DLModelHandler):
554
554
  """
555
555
  # Supported types:
556
556
  if isinstance(sample, np.ndarray):
557
- return super(TFKerasModelHandler, self)._read_sample(sample=sample)
557
+ return super()._read_sample(sample=sample)
558
558
  elif isinstance(sample, tf.TensorSpec):
559
559
  return Feature(
560
560
  name=sample.name,
@@ -12,7 +12,7 @@
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
14
  #
15
- from typing import Any, Dict, List, Union
15
+ from typing import Any, Union
16
16
 
17
17
  import numpy as np
18
18
  from tensorflow import keras
@@ -36,8 +36,8 @@ class TFKerasModelServer(V2ModelServer):
36
36
  model: keras.Model = None,
37
37
  model_path: str = None,
38
38
  model_name: str = None,
39
- modules_map: Union[Dict[str, Union[None, str, List[str]]], str] = None,
40
- custom_objects_map: Union[Dict[str, Union[str, List[str]]], str] = None,
39
+ modules_map: Union[dict[str, Union[None, str, list[str]]], str] = None,
40
+ custom_objects_map: Union[dict[str, Union[str, list[str]]], str] = None,
41
41
  custom_objects_directory: str = None,
42
42
  model_format: str = TFKerasModelHandler.ModelFormats.SAVED_MODEL,
43
43
  to_list: bool = False,
@@ -103,7 +103,7 @@ class TFKerasModelServer(V2ModelServer):
103
103
  :param protocol: -
104
104
  :param class_args: -
105
105
  """
106
- super(TFKerasModelServer, self).__init__(
106
+ super().__init__(
107
107
  context=context,
108
108
  name=name,
109
109
  model_path=model_path,
@@ -146,7 +146,7 @@ class TFKerasModelServer(V2ModelServer):
146
146
  self._model_handler.load()
147
147
  self.model = self._model_handler.model
148
148
 
149
- def predict(self, request: Dict[str, Any]) -> Union[np.ndarray, list]:
149
+ def predict(self, request: dict[str, Any]) -> Union[np.ndarray, list]:
150
150
  """
151
151
  Infer the inputs through the model using 'keras.Model.predict' and return its output. The inferred data will be
152
152
  read from the "inputs" key of the request.
@@ -165,7 +165,7 @@ class TFKerasModelServer(V2ModelServer):
165
165
  # Return as list if required:
166
166
  return prediction if not self.to_list else prediction.tolist()
167
167
 
168
- def explain(self, request: Dict[str, Any]) -> str:
168
+ def explain(self, request: dict[str, Any]) -> str:
169
169
  """
170
170
  Return a string explaining what model is being serve in this serving function and the function name.
171
171
 
@@ -13,7 +13,7 @@
13
13
  # limitations under the License.
14
14
  #
15
15
  # flake8: noqa - this is until we take care of the F401 violations with respect to __all__ & sphinx
16
- from typing import Dict, List, Union
16
+ from typing import Union
17
17
 
18
18
  import xgboost as xgb
19
19
 
@@ -35,25 +35,25 @@ def apply_mlrun(
35
35
  model_name: str = "model",
36
36
  tag: str = "",
37
37
  model_path: str = None,
38
- modules_map: Union[Dict[str, Union[None, str, List[str]]], str] = None,
39
- custom_objects_map: Union[Dict[str, Union[str, List[str]]], str] = None,
38
+ modules_map: Union[dict[str, Union[None, str, list[str]]], str] = None,
39
+ custom_objects_map: Union[dict[str, Union[str, list[str]]], str] = None,
40
40
  custom_objects_directory: str = None,
41
41
  context: mlrun.MLClientCtx = None,
42
- artifacts: Union[List[MLPlan], List[str], Dict[str, dict]] = None,
42
+ artifacts: Union[list[MLPlan], list[str], dict[str, dict]] = None,
43
43
  metrics: Union[
44
- List[Metric],
45
- List[XGBoostTypes.MetricEntryType],
46
- Dict[str, XGBoostTypes.MetricEntryType],
44
+ list[Metric],
45
+ list[XGBoostTypes.MetricEntryType],
46
+ dict[str, XGBoostTypes.MetricEntryType],
47
47
  ] = None,
48
48
  x_test: XGBoostTypes.DatasetType = None,
49
49
  y_test: XGBoostTypes.DatasetType = None,
50
50
  sample_set: Union[XGBoostTypes.DatasetType, mlrun.DataItem, str] = None,
51
- y_columns: Union[List[str], List[int]] = None,
51
+ y_columns: Union[list[str], list[int]] = None,
52
52
  feature_vector: str = None,
53
- feature_weights: List[float] = None,
54
- labels: Dict[str, Union[str, int, float]] = None,
55
- parameters: Dict[str, Union[str, int, float]] = None,
56
- extra_data: Dict[str, XGBoostTypes.ExtraDataType] = None,
53
+ feature_weights: list[float] = None,
54
+ labels: dict[str, Union[str, int, float]] = None,
55
+ parameters: dict[str, Union[str, int, float]] = None,
56
+ extra_data: dict[str, XGBoostTypes.ExtraDataType] = None,
57
57
  auto_log: bool = True,
58
58
  **kwargs,
59
59
  ) -> XGBoostModelHandler:
@@ -90,7 +90,7 @@ def apply_mlrun(
90
90
 
91
91
  {
92
92
  "/.../custom_model.py": "MyModel",
93
- "/.../custom_objects.py": ["object1", "object2"]
93
+ "/.../custom_objects.py": ["object1", "object2"],
94
94
  }
95
95
 
96
96
  All the paths will be accessed from the given 'custom_objects_directory', meaning
@@ -14,7 +14,7 @@
14
14
  #
15
15
  import os
16
16
  import pickle
17
- from typing import Dict, List, Union
17
+ from typing import Union
18
18
 
19
19
  import cloudpickle
20
20
 
@@ -47,8 +47,8 @@ class XGBoostModelHandler(MLModelHandler):
47
47
  model: XGBoostTypes.ModelType = None,
48
48
  model_path: str = None,
49
49
  model_name: str = None,
50
- modules_map: Union[Dict[str, Union[None, str, List[str]]], str] = None,
51
- custom_objects_map: Union[Dict[str, Union[str, List[str]]], str] = None,
50
+ modules_map: Union[dict[str, Union[None, str, list[str]]], str] = None,
51
+ custom_objects_map: Union[dict[str, Union[str, list[str]]], str] = None,
52
52
  custom_objects_directory: str = None,
53
53
  context: mlrun.MLClientCtx = None,
54
54
  model_format: str = ModelFormats.PKL,
@@ -124,7 +124,7 @@ class XGBoostModelHandler(MLModelHandler):
124
124
  # Store the configuration:
125
125
  self._model_format = model_format
126
126
 
127
- super(XGBoostModelHandler, self).__init__(
127
+ super().__init__(
128
128
  model=model,
129
129
  model_path=model_path,
130
130
  model_name=model_name,
@@ -162,7 +162,7 @@ class XGBoostModelHandler(MLModelHandler):
162
162
 
163
163
  :return The saved model additional artifacts (if needed) dictionary if context is available and None otherwise.
164
164
  """
165
- super(XGBoostModelHandler, self).save(output_path=output_path)
165
+ super().save(output_path=output_path)
166
166
 
167
167
  # ModelFormats.PICKLE - Save from a pkl file:
168
168
  if self._model_format == XGBoostModelHandler.ModelFormats.PKL:
@@ -177,7 +177,7 @@ class XGBoostModelHandler(MLModelHandler):
177
177
  Load the specified model in this handler. Additional parameters for the class initializer can be passed via the
178
178
  kwargs dictionary.
179
179
  """
180
- super(XGBoostModelHandler, self).load()
180
+ super().load()
181
181
 
182
182
  # ModelFormats.PICKLE - Load from a pkl file:
183
183
  if self._model_format == XGBoostModelHandler.ModelFormats.PKL:
mlrun/k8s_utils.py CHANGED
@@ -12,7 +12,6 @@
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
14
  import re
15
- import typing
16
15
 
17
16
  import kubernetes.client
18
17
 
@@ -38,7 +37,7 @@ def is_running_inside_kubernetes_cluster():
38
37
 
39
38
  def generate_preemptible_node_selector_requirements(
40
39
  node_selector_operator: str,
41
- ) -> typing.List[kubernetes.client.V1NodeSelectorRequirement]:
40
+ ) -> list[kubernetes.client.V1NodeSelectorRequirement]:
42
41
  """
43
42
  Generate node selector requirements based on the pre-configured node selector of the preemptible nodes.
44
43
  node selector operator represents a key's relationship to a set of values.
@@ -62,7 +61,7 @@ def generate_preemptible_node_selector_requirements(
62
61
 
63
62
 
64
63
  def generate_preemptible_nodes_anti_affinity_terms() -> (
65
- typing.List[kubernetes.client.V1NodeSelectorTerm]
64
+ list[kubernetes.client.V1NodeSelectorTerm]
66
65
  ):
67
66
  """
68
67
  Generate node selector term containing anti-affinity expressions based on the
@@ -84,7 +83,7 @@ def generate_preemptible_nodes_anti_affinity_terms() -> (
84
83
 
85
84
 
86
85
  def generate_preemptible_nodes_affinity_terms() -> (
87
- typing.List[kubernetes.client.V1NodeSelectorTerm]
86
+ list[kubernetes.client.V1NodeSelectorTerm]
88
87
  ):
89
88
  """
90
89
  Use for purpose of scheduling on node having at least one of the node selectors.
@@ -105,7 +104,7 @@ def generate_preemptible_nodes_affinity_terms() -> (
105
104
  return node_selector_terms
106
105
 
107
106
 
108
- def generate_preemptible_tolerations() -> typing.List[kubernetes.client.V1Toleration]:
107
+ def generate_preemptible_tolerations() -> list[kubernetes.client.V1Toleration]:
109
108
  tolerations = mlconfig.get_preemptible_tolerations()
110
109
 
111
110
  toleration_objects = []
@@ -142,17 +141,6 @@ def verify_label_key(key: str):
142
141
  if not key:
143
142
  raise mlrun.errors.MLRunInvalidArgumentError("label key cannot be empty")
144
143
 
145
- mlrun.utils.helpers.verify_field_regex(
146
- f"project.metadata.labels.'{key}'",
147
- key,
148
- mlrun.utils.regex.k8s_character_limit,
149
- )
150
-
151
- if key.startswith("k8s.io/") or key.startswith("kubernetes.io/"):
152
- raise mlrun.errors.MLRunInvalidArgumentError(
153
- "Labels cannot start with 'k8s.io/' or 'kubernetes.io/'"
154
- )
155
-
156
144
  parts = key.split("/")
157
145
  if len(parts) == 1:
158
146
  name = parts[0]
@@ -174,12 +162,22 @@ def verify_label_key(key: str):
174
162
  "Label key can only contain one '/'"
175
163
  )
176
164
 
165
+ mlrun.utils.helpers.verify_field_regex(
166
+ f"project.metadata.labels.'{key}'",
167
+ name,
168
+ mlrun.utils.regex.k8s_character_limit,
169
+ )
177
170
  mlrun.utils.helpers.verify_field_regex(
178
171
  f"project.metadata.labels.'{key}'",
179
172
  name,
180
173
  mlrun.utils.regex.qualified_name,
181
174
  )
182
175
 
176
+ if key.startswith("k8s.io/") or key.startswith("kubernetes.io/"):
177
+ raise mlrun.errors.MLRunInvalidArgumentError(
178
+ "Labels cannot start with 'k8s.io/' or 'kubernetes.io/'"
179
+ )
180
+
183
181
 
184
182
  def verify_label_value(value, label_key):
185
183
  mlrun.utils.helpers.verify_field_regex(
@@ -1,4 +1,4 @@
1
- # Copyright 2023 MLRun Authors
1
+ # Copyright 2023 Iguazio
2
2
  #
3
3
  # Licensed under the Apache License, Version 2.0 (the "License");
4
4
  # you may not use this file except in compliance with the License.
mlrun/launcher/base.py CHANGED
@@ -1,4 +1,4 @@
1
- # Copyright 2023 MLRun Authors
1
+ # Copyright 2023 Iguazio
2
2
  #
3
3
  # Licensed under the Apache License, Version 2.0 (the "License");
4
4
  # you may not use this file except in compliance with the License.
@@ -16,12 +16,13 @@ import ast
16
16
  import copy
17
17
  import os
18
18
  import uuid
19
- from typing import Any, Callable, Dict, List, Optional, Union
19
+ from typing import Any, Callable, Optional, Union
20
+
21
+ import mlrun_pipelines.common.ops
20
22
 
21
23
  import mlrun.common.schemas
22
24
  import mlrun.config
23
25
  import mlrun.errors
24
- import mlrun.kfpops
25
26
  import mlrun.lists
26
27
  import mlrun.model
27
28
  import mlrun.runtimes
@@ -53,7 +54,7 @@ class BaseLauncher(abc.ABC):
53
54
  name: Optional[str] = "",
54
55
  project: Optional[str] = "",
55
56
  params: Optional[dict] = None,
56
- inputs: Optional[Dict[str, str]] = None,
57
+ inputs: Optional[dict[str, str]] = None,
57
58
  out_path: Optional[str] = "",
58
59
  workdir: Optional[str] = "",
59
60
  artifact_path: Optional[str] = "",
@@ -61,16 +62,16 @@ class BaseLauncher(abc.ABC):
61
62
  schedule: Optional[
62
63
  Union[str, mlrun.common.schemas.schedule.ScheduleCronTrigger]
63
64
  ] = None,
64
- hyperparams: Dict[str, list] = None,
65
+ hyperparams: dict[str, list] = None,
65
66
  hyper_param_options: Optional[mlrun.model.HyperParamOptions] = None,
66
67
  verbose: Optional[bool] = None,
67
68
  scrape_metrics: Optional[bool] = None,
68
69
  local_code_path: Optional[str] = None,
69
70
  auto_build: Optional[bool] = None,
70
- param_file_secrets: Optional[Dict[str, str]] = None,
71
- notifications: Optional[List[mlrun.model.Notification]] = None,
72
- returns: Optional[List[Union[str, Dict[str, str]]]] = None,
73
- state_thresholds: Optional[Dict[str, int]] = None,
71
+ param_file_secrets: Optional[dict[str, str]] = None,
72
+ notifications: Optional[list[mlrun.model.Notification]] = None,
73
+ returns: Optional[list[Union[str, dict[str, str]]]] = None,
74
+ state_thresholds: Optional[dict[str, int]] = None,
74
75
  ) -> "mlrun.run.RunObject":
75
76
  """run the function from the server/client[local/remote]"""
76
77
  pass
@@ -175,7 +176,7 @@ class BaseLauncher(abc.ABC):
175
176
  if message:
176
177
  logger.warning(message, output_path=run.spec.output_path)
177
178
 
178
- def _validate_run_params(self, parameters: Dict[str, Any]):
179
+ def _validate_run_params(self, parameters: dict[str, Any]):
179
180
  for param_name, param_value in parameters.items():
180
181
  if isinstance(param_value, dict):
181
182
  # if the parameter is a dict, we might have some nested parameters,
@@ -237,8 +238,8 @@ class BaseLauncher(abc.ABC):
237
238
  out_path=None,
238
239
  artifact_path=None,
239
240
  workdir=None,
240
- notifications: List[mlrun.model.Notification] = None,
241
- state_thresholds: Optional[Dict[str, int]] = None,
241
+ notifications: list[mlrun.model.Notification] = None,
242
+ state_thresholds: Optional[dict[str, int]] = None,
242
243
  ):
243
244
  run.spec.handler = (
244
245
  handler or run.spec.handler or runtime.spec.default_handler or ""
@@ -353,7 +354,7 @@ class BaseLauncher(abc.ABC):
353
354
  or {}
354
355
  )
355
356
  state_thresholds = (
356
- mlrun.config.config.function.spec.state_thresholds.default.to_dict()
357
+ mlrun.mlconf.function.spec.state_thresholds.default.to_dict()
357
358
  | state_thresholds
358
359
  )
359
360
  run.spec.state_thresholds = state_thresholds or run.spec.state_thresholds
@@ -390,7 +391,7 @@ class BaseLauncher(abc.ABC):
390
391
  return
391
392
 
392
393
  if result and runtime.kfp and err is None:
393
- mlrun.kfpops.write_kfpmeta(result)
394
+ mlrun_pipelines.common.ops.write_kfpmeta(result)
394
395
 
395
396
  self._log_track_results(runtime.is_child, result, run)
396
397
 
@@ -403,7 +404,7 @@ class BaseLauncher(abc.ABC):
403
404
  )
404
405
  if (
405
406
  run.status.state
406
- in mlrun.runtimes.constants.RunStates.error_and_abortion_states()
407
+ in mlrun.common.runtimes.constants.RunStates.error_and_abortion_states()
407
408
  ):
408
409
  if runtime._is_remote and not runtime.is_child:
409
410
  logger.error(