mlrun 1.7.2rc3__py3-none-any.whl → 1.8.0rc2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mlrun might be problematic. Click here for more details.

Files changed (250) hide show
  1. mlrun/__init__.py +18 -18
  2. mlrun/__main__.py +3 -3
  3. mlrun/alerts/alert.py +19 -12
  4. mlrun/artifacts/__init__.py +0 -2
  5. mlrun/artifacts/base.py +34 -11
  6. mlrun/artifacts/dataset.py +16 -16
  7. mlrun/artifacts/manager.py +13 -13
  8. mlrun/artifacts/model.py +66 -53
  9. mlrun/common/constants.py +6 -0
  10. mlrun/common/formatters/__init__.py +1 -0
  11. mlrun/common/formatters/feature_set.py +1 -0
  12. mlrun/common/formatters/function.py +1 -0
  13. mlrun/common/formatters/model_endpoint.py +30 -0
  14. mlrun/common/formatters/pipeline.py +1 -2
  15. mlrun/common/formatters/project.py +9 -0
  16. mlrun/common/model_monitoring/__init__.py +0 -3
  17. mlrun/common/model_monitoring/helpers.py +1 -1
  18. mlrun/common/runtimes/constants.py +1 -2
  19. mlrun/common/schemas/__init__.py +7 -2
  20. mlrun/common/schemas/alert.py +31 -18
  21. mlrun/common/schemas/api_gateway.py +3 -3
  22. mlrun/common/schemas/artifact.py +7 -13
  23. mlrun/common/schemas/auth.py +6 -4
  24. mlrun/common/schemas/background_task.py +7 -7
  25. mlrun/common/schemas/client_spec.py +2 -2
  26. mlrun/common/schemas/clusterization_spec.py +2 -2
  27. mlrun/common/schemas/common.py +53 -3
  28. mlrun/common/schemas/datastore_profile.py +1 -1
  29. mlrun/common/schemas/feature_store.py +9 -9
  30. mlrun/common/schemas/frontend_spec.py +4 -4
  31. mlrun/common/schemas/function.py +10 -10
  32. mlrun/common/schemas/hub.py +1 -1
  33. mlrun/common/schemas/k8s.py +3 -3
  34. mlrun/common/schemas/memory_reports.py +3 -3
  35. mlrun/common/schemas/model_monitoring/__init__.py +8 -1
  36. mlrun/common/schemas/model_monitoring/constants.py +62 -12
  37. mlrun/common/schemas/model_monitoring/grafana.py +1 -1
  38. mlrun/common/schemas/model_monitoring/model_endpoint_v2.py +149 -0
  39. mlrun/common/schemas/model_monitoring/model_endpoints.py +22 -6
  40. mlrun/common/schemas/notification.py +18 -3
  41. mlrun/common/schemas/object.py +1 -1
  42. mlrun/common/schemas/pagination.py +4 -4
  43. mlrun/common/schemas/partition.py +137 -0
  44. mlrun/common/schemas/pipeline.py +2 -2
  45. mlrun/common/schemas/project.py +22 -17
  46. mlrun/common/schemas/runs.py +2 -2
  47. mlrun/common/schemas/runtime_resource.py +5 -5
  48. mlrun/common/schemas/schedule.py +1 -1
  49. mlrun/common/schemas/secret.py +1 -1
  50. mlrun/common/schemas/tag.py +3 -3
  51. mlrun/common/schemas/workflow.py +5 -5
  52. mlrun/config.py +65 -15
  53. mlrun/data_types/__init__.py +0 -2
  54. mlrun/data_types/data_types.py +0 -1
  55. mlrun/data_types/infer.py +3 -1
  56. mlrun/data_types/spark.py +4 -4
  57. mlrun/data_types/to_pandas.py +2 -11
  58. mlrun/datastore/__init__.py +0 -2
  59. mlrun/datastore/alibaba_oss.py +4 -1
  60. mlrun/datastore/azure_blob.py +4 -1
  61. mlrun/datastore/base.py +12 -4
  62. mlrun/datastore/datastore.py +9 -3
  63. mlrun/datastore/datastore_profile.py +20 -20
  64. mlrun/datastore/dbfs_store.py +4 -1
  65. mlrun/datastore/filestore.py +4 -1
  66. mlrun/datastore/google_cloud_storage.py +4 -1
  67. mlrun/datastore/hdfs.py +4 -1
  68. mlrun/datastore/inmem.py +4 -1
  69. mlrun/datastore/redis.py +4 -1
  70. mlrun/datastore/s3.py +4 -1
  71. mlrun/datastore/sources.py +51 -49
  72. mlrun/datastore/store_resources.py +0 -2
  73. mlrun/datastore/targets.py +22 -23
  74. mlrun/datastore/utils.py +2 -2
  75. mlrun/datastore/v3io.py +4 -1
  76. mlrun/datastore/wasbfs/fs.py +13 -12
  77. mlrun/db/base.py +170 -64
  78. mlrun/db/factory.py +3 -0
  79. mlrun/db/httpdb.py +986 -238
  80. mlrun/db/nopdb.py +155 -57
  81. mlrun/errors.py +2 -2
  82. mlrun/execution.py +55 -29
  83. mlrun/feature_store/__init__.py +0 -2
  84. mlrun/feature_store/api.py +40 -40
  85. mlrun/feature_store/common.py +9 -9
  86. mlrun/feature_store/feature_set.py +20 -18
  87. mlrun/feature_store/feature_vector.py +27 -24
  88. mlrun/feature_store/retrieval/base.py +14 -9
  89. mlrun/feature_store/retrieval/job.py +2 -1
  90. mlrun/feature_store/steps.py +2 -2
  91. mlrun/features.py +30 -13
  92. mlrun/frameworks/__init__.py +1 -2
  93. mlrun/frameworks/_common/__init__.py +1 -2
  94. mlrun/frameworks/_common/artifacts_library.py +2 -2
  95. mlrun/frameworks/_common/mlrun_interface.py +10 -6
  96. mlrun/frameworks/_common/model_handler.py +29 -27
  97. mlrun/frameworks/_common/producer.py +3 -1
  98. mlrun/frameworks/_dl_common/__init__.py +1 -2
  99. mlrun/frameworks/_dl_common/loggers/__init__.py +1 -2
  100. mlrun/frameworks/_dl_common/loggers/mlrun_logger.py +4 -4
  101. mlrun/frameworks/_dl_common/loggers/tensorboard_logger.py +3 -3
  102. mlrun/frameworks/_ml_common/__init__.py +1 -2
  103. mlrun/frameworks/_ml_common/loggers/__init__.py +1 -2
  104. mlrun/frameworks/_ml_common/model_handler.py +21 -21
  105. mlrun/frameworks/_ml_common/plans/__init__.py +1 -2
  106. mlrun/frameworks/_ml_common/plans/confusion_matrix_plan.py +3 -1
  107. mlrun/frameworks/_ml_common/plans/dataset_plan.py +3 -3
  108. mlrun/frameworks/_ml_common/plans/roc_curve_plan.py +4 -4
  109. mlrun/frameworks/auto_mlrun/__init__.py +1 -2
  110. mlrun/frameworks/auto_mlrun/auto_mlrun.py +22 -15
  111. mlrun/frameworks/huggingface/__init__.py +1 -2
  112. mlrun/frameworks/huggingface/model_server.py +9 -9
  113. mlrun/frameworks/lgbm/__init__.py +47 -44
  114. mlrun/frameworks/lgbm/callbacks/__init__.py +1 -2
  115. mlrun/frameworks/lgbm/callbacks/logging_callback.py +4 -2
  116. mlrun/frameworks/lgbm/callbacks/mlrun_logging_callback.py +4 -2
  117. mlrun/frameworks/lgbm/mlrun_interfaces/__init__.py +1 -2
  118. mlrun/frameworks/lgbm/mlrun_interfaces/mlrun_interface.py +5 -5
  119. mlrun/frameworks/lgbm/model_handler.py +15 -11
  120. mlrun/frameworks/lgbm/model_server.py +11 -7
  121. mlrun/frameworks/lgbm/utils.py +2 -2
  122. mlrun/frameworks/onnx/__init__.py +1 -2
  123. mlrun/frameworks/onnx/dataset.py +3 -3
  124. mlrun/frameworks/onnx/mlrun_interface.py +2 -2
  125. mlrun/frameworks/onnx/model_handler.py +7 -5
  126. mlrun/frameworks/onnx/model_server.py +8 -6
  127. mlrun/frameworks/parallel_coordinates.py +11 -11
  128. mlrun/frameworks/pytorch/__init__.py +22 -23
  129. mlrun/frameworks/pytorch/callbacks/__init__.py +1 -2
  130. mlrun/frameworks/pytorch/callbacks/callback.py +2 -1
  131. mlrun/frameworks/pytorch/callbacks/logging_callback.py +15 -8
  132. mlrun/frameworks/pytorch/callbacks/mlrun_logging_callback.py +19 -12
  133. mlrun/frameworks/pytorch/callbacks/tensorboard_logging_callback.py +22 -15
  134. mlrun/frameworks/pytorch/callbacks_handler.py +36 -30
  135. mlrun/frameworks/pytorch/mlrun_interface.py +17 -17
  136. mlrun/frameworks/pytorch/model_handler.py +21 -17
  137. mlrun/frameworks/pytorch/model_server.py +13 -9
  138. mlrun/frameworks/sklearn/__init__.py +19 -18
  139. mlrun/frameworks/sklearn/estimator.py +2 -2
  140. mlrun/frameworks/sklearn/metric.py +3 -3
  141. mlrun/frameworks/sklearn/metrics_library.py +8 -6
  142. mlrun/frameworks/sklearn/mlrun_interface.py +3 -2
  143. mlrun/frameworks/sklearn/model_handler.py +4 -3
  144. mlrun/frameworks/tf_keras/__init__.py +11 -12
  145. mlrun/frameworks/tf_keras/callbacks/__init__.py +1 -2
  146. mlrun/frameworks/tf_keras/callbacks/logging_callback.py +17 -14
  147. mlrun/frameworks/tf_keras/callbacks/mlrun_logging_callback.py +15 -12
  148. mlrun/frameworks/tf_keras/callbacks/tensorboard_logging_callback.py +21 -18
  149. mlrun/frameworks/tf_keras/model_handler.py +17 -13
  150. mlrun/frameworks/tf_keras/model_server.py +12 -8
  151. mlrun/frameworks/xgboost/__init__.py +19 -18
  152. mlrun/frameworks/xgboost/model_handler.py +13 -9
  153. mlrun/launcher/base.py +3 -4
  154. mlrun/launcher/local.py +1 -1
  155. mlrun/launcher/remote.py +1 -1
  156. mlrun/lists.py +4 -3
  157. mlrun/model.py +110 -46
  158. mlrun/model_monitoring/__init__.py +1 -2
  159. mlrun/model_monitoring/api.py +6 -6
  160. mlrun/model_monitoring/applications/_application_steps.py +13 -15
  161. mlrun/model_monitoring/applications/histogram_data_drift.py +41 -15
  162. mlrun/model_monitoring/applications/results.py +55 -3
  163. mlrun/model_monitoring/controller.py +185 -223
  164. mlrun/model_monitoring/db/_schedules.py +156 -0
  165. mlrun/model_monitoring/db/_stats.py +189 -0
  166. mlrun/model_monitoring/db/stores/__init__.py +1 -1
  167. mlrun/model_monitoring/db/stores/base/store.py +6 -65
  168. mlrun/model_monitoring/db/stores/sqldb/models/__init__.py +0 -25
  169. mlrun/model_monitoring/db/stores/sqldb/models/base.py +0 -97
  170. mlrun/model_monitoring/db/stores/sqldb/models/mysql.py +2 -58
  171. mlrun/model_monitoring/db/stores/sqldb/models/sqlite.py +0 -15
  172. mlrun/model_monitoring/db/stores/sqldb/sql_store.py +6 -257
  173. mlrun/model_monitoring/db/stores/v3io_kv/kv_store.py +9 -271
  174. mlrun/model_monitoring/db/tsdb/base.py +76 -24
  175. mlrun/model_monitoring/db/tsdb/tdengine/schemas.py +61 -6
  176. mlrun/model_monitoring/db/tsdb/tdengine/stream_graph_steps.py +33 -0
  177. mlrun/model_monitoring/db/tsdb/tdengine/tdengine_connector.py +253 -28
  178. mlrun/model_monitoring/db/tsdb/v3io/stream_graph_steps.py +1 -0
  179. mlrun/model_monitoring/db/tsdb/v3io/v3io_connector.py +35 -17
  180. mlrun/model_monitoring/helpers.py +91 -1
  181. mlrun/model_monitoring/model_endpoint.py +4 -2
  182. mlrun/model_monitoring/stream_processing.py +16 -13
  183. mlrun/model_monitoring/tracking_policy.py +10 -3
  184. mlrun/model_monitoring/writer.py +47 -26
  185. mlrun/package/__init__.py +3 -6
  186. mlrun/package/context_handler.py +1 -1
  187. mlrun/package/packager.py +12 -9
  188. mlrun/package/packagers/__init__.py +0 -2
  189. mlrun/package/packagers/default_packager.py +14 -11
  190. mlrun/package/packagers/numpy_packagers.py +16 -7
  191. mlrun/package/packagers/pandas_packagers.py +18 -18
  192. mlrun/package/packagers/python_standard_library_packagers.py +25 -11
  193. mlrun/package/packagers_manager.py +31 -14
  194. mlrun/package/utils/__init__.py +0 -3
  195. mlrun/package/utils/_pickler.py +6 -6
  196. mlrun/platforms/__init__.py +3 -16
  197. mlrun/platforms/iguazio.py +4 -1
  198. mlrun/projects/operations.py +27 -27
  199. mlrun/projects/pipelines.py +34 -35
  200. mlrun/projects/project.py +535 -182
  201. mlrun/run.py +13 -10
  202. mlrun/runtimes/__init__.py +1 -3
  203. mlrun/runtimes/base.py +15 -11
  204. mlrun/runtimes/daskjob.py +9 -9
  205. mlrun/runtimes/generators.py +2 -1
  206. mlrun/runtimes/kubejob.py +4 -5
  207. mlrun/runtimes/mounts.py +572 -0
  208. mlrun/runtimes/mpijob/__init__.py +0 -2
  209. mlrun/runtimes/mpijob/abstract.py +7 -6
  210. mlrun/runtimes/nuclio/api_gateway.py +7 -7
  211. mlrun/runtimes/nuclio/application/application.py +11 -11
  212. mlrun/runtimes/nuclio/function.py +13 -13
  213. mlrun/runtimes/nuclio/serving.py +9 -9
  214. mlrun/runtimes/pod.py +154 -45
  215. mlrun/runtimes/remotesparkjob.py +3 -2
  216. mlrun/runtimes/sparkjob/__init__.py +0 -2
  217. mlrun/runtimes/sparkjob/spark3job.py +21 -11
  218. mlrun/runtimes/utils.py +6 -5
  219. mlrun/serving/merger.py +6 -4
  220. mlrun/serving/remote.py +18 -17
  221. mlrun/serving/routers.py +27 -27
  222. mlrun/serving/server.py +1 -1
  223. mlrun/serving/states.py +76 -71
  224. mlrun/serving/utils.py +13 -2
  225. mlrun/serving/v1_serving.py +3 -2
  226. mlrun/serving/v2_serving.py +4 -4
  227. mlrun/track/__init__.py +1 -1
  228. mlrun/track/tracker.py +2 -2
  229. mlrun/track/trackers/mlflow_tracker.py +6 -5
  230. mlrun/utils/async_http.py +1 -1
  231. mlrun/utils/helpers.py +70 -16
  232. mlrun/utils/logger.py +106 -4
  233. mlrun/utils/notifications/notification/__init__.py +22 -19
  234. mlrun/utils/notifications/notification/base.py +33 -14
  235. mlrun/utils/notifications/notification/console.py +6 -6
  236. mlrun/utils/notifications/notification/git.py +11 -11
  237. mlrun/utils/notifications/notification/ipython.py +10 -9
  238. mlrun/utils/notifications/notification/mail.py +149 -0
  239. mlrun/utils/notifications/notification/slack.py +6 -6
  240. mlrun/utils/notifications/notification/webhook.py +18 -22
  241. mlrun/utils/notifications/notification_pusher.py +43 -31
  242. mlrun/utils/regex.py +3 -1
  243. mlrun/utils/version/version.json +2 -2
  244. {mlrun-1.7.2rc3.dist-info → mlrun-1.8.0rc2.dist-info}/METADATA +18 -14
  245. mlrun-1.8.0rc2.dist-info/RECORD +358 -0
  246. {mlrun-1.7.2rc3.dist-info → mlrun-1.8.0rc2.dist-info}/WHEEL +1 -1
  247. mlrun-1.7.2rc3.dist-info/RECORD +0 -351
  248. {mlrun-1.7.2rc3.dist-info → mlrun-1.8.0rc2.dist-info}/LICENSE +0 -0
  249. {mlrun-1.7.2rc3.dist-info → mlrun-1.8.0rc2.dist-info}/entry_points.txt +0 -0
  250. {mlrun-1.7.2rc3.dist-info → mlrun-1.8.0rc2.dist-info}/top_level.txt +0 -0
@@ -15,7 +15,7 @@
15
15
  import os
16
16
  import pathlib
17
17
  import tempfile
18
- from typing import Union
18
+ from typing import Optional, Union
19
19
 
20
20
  from mlrun.artifacts import Artifact
21
21
  from mlrun.datastore import DataItem
@@ -140,7 +140,7 @@ class StrPackager(DefaultPackager):
140
140
  self,
141
141
  data_item: DataItem,
142
142
  is_directory: bool = False,
143
- archive_format: str = None,
143
+ archive_format: Optional[str] = None,
144
144
  ) -> str:
145
145
  """
146
146
  Unpack a data item representing a path string. If the path is of a file, the file is downloaded to a local
@@ -222,7 +222,7 @@ class _BuiltinCollectionPackager(DefaultPackager):
222
222
  return artifact, instructions
223
223
 
224
224
  def unpack_file(
225
- self, data_item: DataItem, file_format: str = None
225
+ self, data_item: DataItem, file_format: Optional[str] = None
226
226
  ) -> Union[dict, list]:
227
227
  """
228
228
  Unpack a builtin collection from file.
@@ -259,7 +259,9 @@ class DictPackager(_BuiltinCollectionPackager):
259
259
 
260
260
  PACKABLE_OBJECT_TYPE = dict
261
261
 
262
- def unpack_file(self, data_item: DataItem, file_format: str = None) -> dict:
262
+ def unpack_file(
263
+ self, data_item: DataItem, file_format: Optional[str] = None
264
+ ) -> dict:
263
265
  """
264
266
  Unpack a dictionary from file.
265
267
 
@@ -285,7 +287,9 @@ class ListPackager(_BuiltinCollectionPackager):
285
287
 
286
288
  PACKABLE_OBJECT_TYPE = list
287
289
 
288
- def unpack_file(self, data_item: DataItem, file_format: str = None) -> list:
290
+ def unpack_file(
291
+ self, data_item: DataItem, file_format: Optional[str] = None
292
+ ) -> list:
289
293
  """
290
294
  Unpack a list from file.
291
295
 
@@ -355,7 +359,9 @@ class TuplePackager(ListPackager):
355
359
  """
356
360
  return super().pack_file(obj=list(obj), key=key, file_format=file_format)
357
361
 
358
- def unpack_file(self, data_item: DataItem, file_format: str = None) -> tuple:
362
+ def unpack_file(
363
+ self, data_item: DataItem, file_format: Optional[str] = None
364
+ ) -> tuple:
359
365
  """
360
366
  Unpack a tuple from file.
361
367
 
@@ -400,7 +406,9 @@ class SetPackager(ListPackager):
400
406
  """
401
407
  return super().pack_file(obj=list(obj), key=key, file_format=file_format)
402
408
 
403
- def unpack_file(self, data_item: DataItem, file_format: str = None) -> set:
409
+ def unpack_file(
410
+ self, data_item: DataItem, file_format: Optional[str] = None
411
+ ) -> set:
404
412
  """
405
413
  Unpack a set from file.
406
414
 
@@ -434,7 +442,9 @@ class FrozensetPackager(SetPackager):
434
442
  """
435
443
  return super().pack_file(obj=set(obj), key=key, file_format=file_format)
436
444
 
437
- def unpack_file(self, data_item: DataItem, file_format: str = None) -> frozenset:
445
+ def unpack_file(
446
+ self, data_item: DataItem, file_format: Optional[str] = None
447
+ ) -> frozenset:
438
448
  """
439
449
  Unpack a frozenset from file.
440
450
 
@@ -481,7 +491,9 @@ class BytesPackager(ListPackager):
481
491
  """
482
492
  return super().pack_file(obj=list(obj), key=key, file_format=file_format)
483
493
 
484
- def unpack_file(self, data_item: DataItem, file_format: str = None) -> bytes:
494
+ def unpack_file(
495
+ self, data_item: DataItem, file_format: Optional[str] = None
496
+ ) -> bytes:
485
497
  """
486
498
  Unpack a bytes from file.
487
499
 
@@ -526,7 +538,9 @@ class BytearrayPackager(BytesPackager):
526
538
  """
527
539
  return super().pack_file(obj=bytes(obj), key=key, file_format=file_format)
528
540
 
529
- def unpack_file(self, data_item: DataItem, file_format: str = None) -> bytearray:
541
+ def unpack_file(
542
+ self, data_item: DataItem, file_format: Optional[str] = None
543
+ ) -> bytearray:
530
544
  """
531
545
  Unpack a bytearray from file.
532
546
 
@@ -585,7 +599,7 @@ class PathPackager(StrPackager):
585
599
  self,
586
600
  data_item: DataItem,
587
601
  is_directory: bool = False,
588
- archive_format: str = None,
602
+ archive_format: Optional[str] = None,
589
603
  ) -> pathlib.Path:
590
604
  """
591
605
  Unpack a data item representing a `Path`. If the path is of a file, the file is downloaded to a local
@@ -17,10 +17,11 @@ import inspect
17
17
  import os
18
18
  import shutil
19
19
  import traceback
20
- from typing import Any, Union
20
+ from typing import Any, Optional, Union
21
21
 
22
+ import mlrun.errors
22
23
  from mlrun.artifacts import Artifact
23
- from mlrun.datastore import DataItem, store_manager
24
+ from mlrun.datastore import DataItem, get_store_resource, store_manager
24
25
  from mlrun.errors import MLRunInvalidArgumentError
25
26
  from mlrun.utils import logger
26
27
 
@@ -41,7 +42,7 @@ class PackagersManager:
41
42
  It prepares the instructions / log hint configurations and then looks for the first packager that fits the task.
42
43
  """
43
44
 
44
- def __init__(self, default_packager: type[Packager] = None):
45
+ def __init__(self, default_packager: Optional[type[Packager]] = None):
45
46
  """
46
47
  Initialize a packagers manager.
47
48
 
@@ -302,18 +303,17 @@ class PackagersManager:
302
303
 
303
304
  def link_packages(
304
305
  self,
305
- additional_artifacts: list[Artifact],
306
+ additional_artifact_uris: dict,
306
307
  additional_results: dict,
307
308
  ):
308
309
  """
309
310
  Link packages to each other according to the provided extra data and metrics spec keys. A future link is
310
311
  marked with ellipses (...). If no link is found, None is used and a warning is printed.
311
312
 
312
- :param additional_artifacts: Additional artifacts to link (should come from an `mlrun.MLClientCtx`).
313
- :param additional_results: Additional results to link (should come from an `mlrun.MLClientCtx`).
313
+ :param additional_artifact_uris: Additional artifact URIs to link (should come from an `mlrun.MLClientCtx`).
314
+ :param additional_results: Additional results to link (should come from an `mlrun.MLClientCtx`).
314
315
  """
315
316
  # Join the manager's artifacts and results with the additional ones to look for a link in all of them:
316
- joined_artifacts = [*additional_artifacts, *self.artifacts]
317
317
  joined_results = {**additional_results, **self.results}
318
318
 
319
319
  # Go over the artifacts and link:
@@ -324,7 +324,10 @@ class PackagersManager:
324
324
  if artifact.spec.extra_data[key] is ...:
325
325
  # Look for an artifact or result with this key to link it:
326
326
  extra_data = self._look_for_extra_data(
327
- key=key, artifacts=joined_artifacts, results=joined_results
327
+ key=key,
328
+ artifacts=self.artifacts,
329
+ artifact_uris=additional_artifact_uris,
330
+ results=joined_results,
328
331
  )
329
332
  # Print a warning if a link is missing:
330
333
  if extra_data is None:
@@ -398,8 +401,8 @@ class PackagersManager:
398
401
  def _get_packager_for_packing(
399
402
  self,
400
403
  obj: Any,
401
- artifact_type: str = None,
402
- configurations: dict = None,
404
+ artifact_type: Optional[str] = None,
405
+ configurations: Optional[dict] = None,
403
406
  ) -> Union[Packager, None]:
404
407
  """
405
408
  Look for a packager that can pack the provided object as the provided artifact type.
@@ -426,7 +429,7 @@ class PackagersManager:
426
429
  self,
427
430
  data_item: Any,
428
431
  type_hint: type,
429
- artifact_type: str = None,
432
+ artifact_type: Optional[str] = None,
430
433
  ) -> Union[Packager, None]:
431
434
  """
432
435
  Look for a packager that can unpack the data item of the given type hint as the provided artifact type.
@@ -715,17 +718,31 @@ class PackagersManager:
715
718
  def _look_for_extra_data(
716
719
  key: str,
717
720
  artifacts: list[Artifact],
721
+ artifact_uris: dict,
718
722
  results: dict,
719
723
  ) -> Union[Artifact, str, int, float, None]:
720
724
  """
721
725
  Look for an extra data item (artifact or result) by given key. If not found, None is returned.
722
726
 
723
- :param key: Key to look for.
724
- :param artifacts: Artifacts to look in.
725
- :param results: Results to look in.
727
+ :param key: Key to look for.
728
+ :param artifacts: Artifacts to look in.
729
+ :param artifact_uris: Artifacts URIs to look in.
730
+ :param results: Results to look in.
726
731
 
727
732
  :return: The artifact or result with the same key or None if not found.
728
733
  """
734
+ artifact_uris = artifact_uris or {}
735
+ for _key, uri in artifact_uris.items():
736
+ if key == _key:
737
+ try:
738
+ return get_store_resource(uri)
739
+ except mlrun.errors.MLRunNotFoundError as exc:
740
+ logger.warn(
741
+ f"Artifact {key=} not found when looking for extra data",
742
+ exc=mlrun.errors.err_to_str(exc),
743
+ )
744
+ return None
745
+
729
746
  # Look in the artifacts:
730
747
  for artifact in artifacts:
731
748
  if key == artifact.key:
@@ -11,9 +11,6 @@
11
11
  # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
- #
15
-
16
- # flake8: noqa - this is until we take care of the F401 violations with respect to __all__ & sphinx
17
14
 
18
15
  from ._archiver import ArchiveSupportedFormat
19
16
  from ._formatter import StructFileSupportedFormat
@@ -19,7 +19,7 @@ import sys
19
19
  import tempfile
20
20
  import warnings
21
21
  from types import ModuleType
22
- from typing import Any, Union
22
+ from typing import Any, Optional, Union
23
23
 
24
24
  from mlrun.errors import MLRunInvalidArgumentError
25
25
  from mlrun.utils import logger
@@ -34,7 +34,7 @@ class Pickler:
34
34
 
35
35
  @staticmethod
36
36
  def pickle(
37
- obj: Any, pickle_module_name: str, output_path: str = None
37
+ obj: Any, pickle_module_name: str, output_path: Optional[str] = None
38
38
  ) -> tuple[str, dict[str, Union[str, None]]]:
39
39
  """
40
40
  Pickle an object using the given module. The pickled object will be saved to file to the given output path.
@@ -91,10 +91,10 @@ class Pickler:
91
91
  def unpickle(
92
92
  pickle_path: str,
93
93
  pickle_module_name: str,
94
- object_module_name: str = None,
95
- python_version: str = None,
96
- pickle_module_version: str = None,
97
- object_module_version: str = None,
94
+ object_module_name: Optional[str] = None,
95
+ python_version: Optional[str] = None,
96
+ pickle_module_version: Optional[str] = None,
97
+ object_module_version: Optional[str] = None,
98
98
  ) -> Any:
99
99
  """
100
100
  Unpickle an object using the given instructions. Warnings may be raised in case any of the versions are
@@ -12,23 +12,10 @@
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
14
 
15
- # flake8: noqa - this is until we take care of the F401 violations with respect to __all__ & sphinx
16
15
  import json
17
16
  from pprint import pprint
18
17
  from time import sleep
19
-
20
- from mlrun_pipelines.common.mounts import VolumeMount
21
- from mlrun_pipelines.mounts import (
22
- auto_mount,
23
- mount_configmap,
24
- mount_hostpath,
25
- mount_pvc,
26
- mount_s3,
27
- mount_secret,
28
- mount_v3io,
29
- set_env_variables,
30
- v3io_cred,
31
- )
18
+ from typing import Optional
32
19
 
33
20
  from .iguazio import (
34
21
  V3ioStreamClient,
@@ -39,8 +26,8 @@ from .iguazio import (
39
26
 
40
27
  def watch_stream(
41
28
  url,
42
- shard_ids: list = None,
43
- seek_to: str = None,
29
+ shard_ids: Optional[list] = None,
30
+ seek_to: Optional[str] = None,
44
31
  interval=None,
45
32
  is_json=False,
46
33
  **kwargs,
@@ -15,6 +15,7 @@
15
15
  import json
16
16
  import os
17
17
  import urllib
18
+ from typing import Optional
18
19
  from urllib.parse import urlparse
19
20
 
20
21
  import requests
@@ -250,7 +251,9 @@ class KafkaOutputStream:
250
251
 
251
252
 
252
253
  class V3ioStreamClient:
253
- def __init__(self, url: str, shard_id: int = 0, seek_to: str = None, **kwargs):
254
+ def __init__(
255
+ self, url: str, shard_id: int = 0, seek_to: Optional[str] = None, **kwargs
256
+ ):
254
257
  endpoint, stream_path = parse_path(url)
255
258
  seek_options = ["EARLIEST", "LATEST", "TIME", "SEQUENCE"]
256
259
  seek_to = seek_to or "LATEST"
@@ -12,16 +12,16 @@
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
14
  #
15
+ import typing
15
16
  import warnings
16
17
  from typing import Optional, Union
17
18
 
18
- import mlrun_pipelines.common.models
19
- import mlrun_pipelines.models
20
-
21
19
  import mlrun
22
20
  import mlrun.common.constants as mlrun_constants
23
21
  import mlrun.common.schemas.function
24
22
  import mlrun.common.schemas.workflow
23
+ import mlrun_pipelines.common.models
24
+ import mlrun_pipelines.models
25
25
  from mlrun.utils import hub_prefix
26
26
 
27
27
  from .pipelines import enrich_function_object, pipeline_context
@@ -59,25 +59,25 @@ def _get_engine_and_function(function, project=None):
59
59
 
60
60
  def run_function(
61
61
  function: Union[str, mlrun.runtimes.BaseRuntime],
62
- handler: str = None,
62
+ handler: Optional[Union[str, typing.Callable]] = None,
63
63
  name: str = "",
64
- params: dict = None,
65
- hyperparams: dict = None,
64
+ params: Optional[dict] = None,
65
+ hyperparams: Optional[dict] = None,
66
66
  hyper_param_options: mlrun.model.HyperParamOptions = None,
67
- inputs: dict = None,
68
- outputs: list[str] = None,
67
+ inputs: Optional[dict] = None,
68
+ outputs: Optional[list[str]] = None,
69
69
  workdir: str = "",
70
- labels: dict = None,
70
+ labels: Optional[dict] = None,
71
71
  base_task: mlrun.model.RunTemplate = None,
72
72
  watch: bool = True,
73
- local: bool = None,
74
- verbose: bool = None,
75
- selector: str = None,
73
+ local: Optional[bool] = None,
74
+ verbose: Optional[bool] = None,
75
+ selector: Optional[str] = None,
76
76
  project_object=None,
77
- auto_build: bool = None,
77
+ auto_build: Optional[bool] = None,
78
78
  schedule: Union[str, mlrun.common.schemas.ScheduleCronTrigger] = None,
79
- artifact_path: str = None,
80
- notifications: list[mlrun.model.Notification] = None,
79
+ artifact_path: Optional[str] = None,
80
+ notifications: Optional[list[mlrun.model.Notification]] = None,
81
81
  returns: Optional[list[Union[str, dict[str, str]]]] = None,
82
82
  builder_env: Optional[list] = None,
83
83
  reset_on_run: Optional[bool] = None,
@@ -255,19 +255,19 @@ class BuildStatus:
255
255
 
256
256
  def build_function(
257
257
  function: Union[str, mlrun.runtimes.BaseRuntime],
258
- with_mlrun: bool = None,
258
+ with_mlrun: Optional[bool] = None,
259
259
  skip_deployed: bool = False,
260
260
  image=None,
261
261
  base_image=None,
262
- commands: list = None,
262
+ commands: Optional[list] = None,
263
263
  secret_name=None,
264
- requirements: Union[str, list[str]] = None,
265
- requirements_file: str = None,
264
+ requirements: Optional[Union[str, list[str]]] = None,
265
+ requirements_file: Optional[str] = None,
266
266
  mlrun_version_specifier=None,
267
- builder_env: dict = None,
267
+ builder_env: Optional[dict] = None,
268
268
  project_object=None,
269
269
  overwrite_build_params: bool = False,
270
- extra_args: str = None,
270
+ extra_args: Optional[str] = None,
271
271
  force_build: bool = False,
272
272
  ) -> Union[BuildStatus, mlrun_pipelines.models.PipelineNodeWrapper]:
273
273
  """deploy ML function, build container with its dependencies
@@ -371,13 +371,13 @@ class DeployStatus:
371
371
 
372
372
  def deploy_function(
373
373
  function: Union[str, mlrun.runtimes.BaseRuntime],
374
- models: list = None,
375
- env: dict = None,
376
- tag: str = None,
377
- verbose: bool = None,
378
- builder_env: dict = None,
374
+ models: Optional[list] = None,
375
+ env: Optional[dict] = None,
376
+ tag: Optional[str] = None,
377
+ verbose: Optional[bool] = None,
378
+ builder_env: Optional[dict] = None,
379
379
  project_object=None,
380
- mock: bool = None,
380
+ mock: Optional[bool] = None,
381
381
  ) -> Union[DeployStatus, mlrun_pipelines.models.PipelineNodeWrapper]:
382
382
  """deploy real-time (nuclio based) functions
383
383
 
@@ -20,16 +20,15 @@ import tempfile
20
20
  import typing
21
21
  import uuid
22
22
 
23
- import mlrun_pipelines.common.models
24
- import mlrun_pipelines.patcher
25
- import mlrun_pipelines.utils
26
-
27
23
  import mlrun
28
24
  import mlrun.common.runtimes.constants
29
25
  import mlrun.common.schemas
30
26
  import mlrun.common.schemas.function
31
27
  import mlrun.common.schemas.workflow
32
28
  import mlrun.utils.notifications
29
+ import mlrun_pipelines.common.models
30
+ import mlrun_pipelines.patcher
31
+ import mlrun_pipelines.utils
33
32
  from mlrun.errors import err_to_str
34
33
  from mlrun.utils import (
35
34
  get_ui_url,
@@ -317,7 +316,7 @@ def get_db_function(project, key) -> mlrun.runtimes.BaseRuntime:
317
316
  def enrich_function_object(
318
317
  project: mlrun.common.schemas.Project,
319
318
  function: mlrun.runtimes.BaseRuntime,
320
- decorator: typing.Callable = None,
319
+ decorator: typing.Optional[typing.Callable] = None,
321
320
  copy_function: bool = True,
322
321
  try_auto_mount: bool = True,
323
322
  ) -> mlrun.runtimes.BaseRuntime:
@@ -380,7 +379,7 @@ class _PipelineRunStatus:
380
379
  project: "mlrun.projects.MlrunProject",
381
380
  workflow: WorkflowSpec = None,
382
381
  state: mlrun_pipelines.common.models.RunStatuses = "",
383
- exc: Exception = None,
382
+ exc: typing.Optional[Exception] = None,
384
383
  ):
385
384
  """
386
385
  :param run_id: unique id of the pipeline run
@@ -453,7 +452,7 @@ class _PipelineRunner(abc.ABC):
453
452
  artifact_path=None,
454
453
  namespace=None,
455
454
  source=None,
456
- notifications: list[mlrun.model.Notification] = None,
455
+ notifications: typing.Optional[list[mlrun.model.Notification]] = None,
457
456
  ) -> _PipelineRunStatus:
458
457
  pass
459
458
 
@@ -463,7 +462,7 @@ class _PipelineRunner(abc.ABC):
463
462
  run: "_PipelineRunStatus",
464
463
  project: typing.Optional["mlrun.projects.MlrunProject"] = None,
465
464
  timeout: typing.Optional[int] = None,
466
- expected_statuses: list[str] = None,
465
+ expected_statuses: typing.Optional[list[str]] = None,
467
466
  ):
468
467
  pass
469
468
 
@@ -577,7 +576,7 @@ class _KFPRunner(_PipelineRunner):
577
576
  artifact_path=None,
578
577
  namespace=None,
579
578
  source=None,
580
- notifications: list[mlrun.model.Notification] = None,
579
+ notifications: typing.Optional[list[mlrun.model.Notification]] = None,
581
580
  ) -> _PipelineRunStatus:
582
581
  pipeline_context.set(project, workflow_spec)
583
582
  workflow_handler = _PipelineRunner._get_handler(
@@ -641,7 +640,7 @@ class _KFPRunner(_PipelineRunner):
641
640
  run: "_PipelineRunStatus",
642
641
  project: typing.Optional["mlrun.projects.MlrunProject"] = None,
643
642
  timeout: typing.Optional[int] = None,
644
- expected_statuses: list[str] = None,
643
+ expected_statuses: typing.Optional[list[str]] = None,
645
644
  ):
646
645
  project_name = project.metadata.name if project else ""
647
646
  logger.info(
@@ -686,7 +685,7 @@ class _LocalRunner(_PipelineRunner):
686
685
  artifact_path=None,
687
686
  namespace=None,
688
687
  source=None,
689
- notifications: list[mlrun.model.Notification] = None,
688
+ notifications: typing.Optional[list[mlrun.model.Notification]] = None,
690
689
  ) -> _PipelineRunStatus:
691
690
  pipeline_context.set(project, workflow_spec)
692
691
  workflow_handler = _PipelineRunner._get_handler(
@@ -763,13 +762,13 @@ class _RemoteRunner(_PipelineRunner):
763
762
  cls,
764
763
  project: "mlrun.projects.MlrunProject",
765
764
  workflow_spec: WorkflowSpec,
766
- name: str = None,
767
- workflow_handler: typing.Union[str, typing.Callable] = None,
765
+ name: typing.Optional[str] = None,
766
+ workflow_handler: typing.Optional[typing.Union[str, typing.Callable]] = None,
768
767
  secrets: mlrun.secrets.SecretsStore = None,
769
- artifact_path: str = None,
770
- namespace: str = None,
771
- source: str = None,
772
- notifications: list[mlrun.model.Notification] = None,
768
+ artifact_path: typing.Optional[str] = None,
769
+ namespace: typing.Optional[str] = None,
770
+ source: typing.Optional[str] = None,
771
+ notifications: typing.Optional[list[mlrun.model.Notification]] = None,
773
772
  ) -> typing.Optional[_PipelineRunStatus]:
774
773
  workflow_name = normalize_workflow_name(name=name, project_name=project.name)
775
774
  workflow_id = None
@@ -890,7 +889,7 @@ class _RemoteRunner(_PipelineRunner):
890
889
  timeout=None,
891
890
  expected_statuses=None,
892
891
  notifiers: mlrun.utils.notifications.CustomNotificationPusher = None,
893
- inner_engine: type[_PipelineRunner] = None,
892
+ inner_engine: typing.Optional[type[_PipelineRunner]] = None,
894
893
  ):
895
894
  inner_engine = inner_engine or _KFPRunner
896
895
  if inner_engine.engine == _KFPRunner.engine:
@@ -998,25 +997,25 @@ def load_and_run(context, *args, **kwargs):
998
997
 
999
998
  def load_and_run_workflow(
1000
999
  context: mlrun.execution.MLClientCtx,
1001
- url: str = None,
1000
+ url: typing.Optional[str] = None,
1002
1001
  project_name: str = "",
1003
- init_git: bool = None,
1004
- subpath: str = None,
1002
+ init_git: typing.Optional[bool] = None,
1003
+ subpath: typing.Optional[str] = None,
1005
1004
  clone: bool = False,
1006
- workflow_name: str = None,
1007
- workflow_path: str = None,
1008
- workflow_arguments: dict[str, typing.Any] = None,
1009
- artifact_path: str = None,
1010
- workflow_handler: typing.Union[str, typing.Callable] = None,
1011
- namespace: str = None,
1005
+ workflow_name: typing.Optional[str] = None,
1006
+ workflow_path: typing.Optional[str] = None,
1007
+ workflow_arguments: typing.Optional[dict[str, typing.Any]] = None,
1008
+ artifact_path: typing.Optional[str] = None,
1009
+ workflow_handler: typing.Optional[typing.Union[str, typing.Callable]] = None,
1010
+ namespace: typing.Optional[str] = None,
1012
1011
  sync: bool = False,
1013
1012
  dirty: bool = False,
1014
- engine: str = None,
1015
- local: bool = None,
1013
+ engine: typing.Optional[str] = None,
1014
+ local: typing.Optional[bool] = None,
1016
1015
  schedule: typing.Union[str, mlrun.common.schemas.ScheduleCronTrigger] = None,
1017
- cleanup_ttl: int = None,
1016
+ cleanup_ttl: typing.Optional[int] = None,
1018
1017
  wait_for_completion: bool = False,
1019
- project_context: str = None,
1018
+ project_context: typing.Optional[str] = None,
1020
1019
  ):
1021
1020
  """
1022
1021
  Auxiliary function that the RemoteRunner run once or run every schedule.
@@ -1220,13 +1219,13 @@ def handle_workflow_completion(
1220
1219
 
1221
1220
  def import_remote_project(
1222
1221
  context: mlrun.execution.MLClientCtx,
1223
- url: str = None,
1222
+ url: typing.Optional[str] = None,
1224
1223
  project_name: str = "",
1225
- init_git: bool = None,
1226
- subpath: str = None,
1224
+ init_git: typing.Optional[bool] = None,
1225
+ subpath: typing.Optional[str] = None,
1227
1226
  clone: bool = False,
1228
1227
  save: bool = True,
1229
- project_context: str = None,
1228
+ project_context: typing.Optional[str] = None,
1230
1229
  ):
1231
1230
  """
1232
1231
  This function loads a project from a given remote source.