mlrun 1.6.0rc35__py3-none-any.whl → 1.7.0rc2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mlrun might be problematic. Click here for more details.
- mlrun/__main__.py +3 -3
- mlrun/api/schemas/__init__.py +1 -1
- mlrun/artifacts/base.py +11 -6
- mlrun/artifacts/dataset.py +2 -2
- mlrun/artifacts/model.py +30 -24
- mlrun/artifacts/plots.py +2 -2
- mlrun/common/db/sql_session.py +5 -3
- mlrun/common/helpers.py +1 -2
- mlrun/common/schemas/artifact.py +3 -3
- mlrun/common/schemas/auth.py +3 -3
- mlrun/common/schemas/background_task.py +1 -1
- mlrun/common/schemas/client_spec.py +1 -1
- mlrun/common/schemas/feature_store.py +16 -16
- mlrun/common/schemas/frontend_spec.py +7 -7
- mlrun/common/schemas/function.py +1 -1
- mlrun/common/schemas/hub.py +4 -9
- mlrun/common/schemas/memory_reports.py +2 -2
- mlrun/common/schemas/model_monitoring/grafana.py +4 -4
- mlrun/common/schemas/model_monitoring/model_endpoints.py +14 -15
- mlrun/common/schemas/notification.py +4 -4
- mlrun/common/schemas/object.py +2 -2
- mlrun/common/schemas/pipeline.py +1 -1
- mlrun/common/schemas/project.py +3 -3
- mlrun/common/schemas/runtime_resource.py +8 -12
- mlrun/common/schemas/schedule.py +3 -3
- mlrun/common/schemas/tag.py +1 -2
- mlrun/common/schemas/workflow.py +2 -2
- mlrun/config.py +8 -4
- mlrun/data_types/to_pandas.py +1 -3
- mlrun/datastore/base.py +0 -28
- mlrun/datastore/datastore_profile.py +9 -9
- mlrun/datastore/filestore.py +0 -1
- mlrun/datastore/google_cloud_storage.py +1 -1
- mlrun/datastore/sources.py +7 -11
- mlrun/datastore/spark_utils.py +1 -2
- mlrun/datastore/targets.py +31 -31
- mlrun/datastore/utils.py +4 -6
- mlrun/datastore/v3io.py +70 -46
- mlrun/db/base.py +22 -23
- mlrun/db/httpdb.py +34 -34
- mlrun/db/nopdb.py +19 -19
- mlrun/errors.py +1 -1
- mlrun/execution.py +4 -4
- mlrun/feature_store/api.py +20 -21
- mlrun/feature_store/common.py +1 -1
- mlrun/feature_store/feature_set.py +28 -32
- mlrun/feature_store/feature_vector.py +24 -27
- mlrun/feature_store/retrieval/base.py +7 -7
- mlrun/feature_store/retrieval/conversion.py +2 -4
- mlrun/feature_store/steps.py +7 -15
- mlrun/features.py +5 -7
- mlrun/frameworks/_common/artifacts_library.py +9 -9
- mlrun/frameworks/_common/mlrun_interface.py +5 -5
- mlrun/frameworks/_common/model_handler.py +48 -48
- mlrun/frameworks/_common/plan.py +2 -3
- mlrun/frameworks/_common/producer.py +3 -4
- mlrun/frameworks/_common/utils.py +5 -5
- mlrun/frameworks/_dl_common/loggers/logger.py +6 -7
- mlrun/frameworks/_dl_common/loggers/mlrun_logger.py +9 -9
- mlrun/frameworks/_dl_common/loggers/tensorboard_logger.py +16 -35
- mlrun/frameworks/_ml_common/artifacts_library.py +1 -2
- mlrun/frameworks/_ml_common/loggers/logger.py +3 -4
- mlrun/frameworks/_ml_common/loggers/mlrun_logger.py +4 -5
- mlrun/frameworks/_ml_common/model_handler.py +24 -24
- mlrun/frameworks/_ml_common/pkl_model_server.py +2 -2
- mlrun/frameworks/_ml_common/plan.py +1 -1
- mlrun/frameworks/_ml_common/plans/calibration_curve_plan.py +2 -3
- mlrun/frameworks/_ml_common/plans/confusion_matrix_plan.py +2 -3
- mlrun/frameworks/_ml_common/plans/dataset_plan.py +3 -3
- mlrun/frameworks/_ml_common/plans/feature_importance_plan.py +3 -3
- mlrun/frameworks/_ml_common/plans/roc_curve_plan.py +4 -4
- mlrun/frameworks/_ml_common/utils.py +4 -4
- mlrun/frameworks/auto_mlrun/auto_mlrun.py +7 -7
- mlrun/frameworks/huggingface/model_server.py +4 -4
- mlrun/frameworks/lgbm/__init__.py +32 -32
- mlrun/frameworks/lgbm/callbacks/logging_callback.py +4 -5
- mlrun/frameworks/lgbm/callbacks/mlrun_logging_callback.py +4 -5
- mlrun/frameworks/lgbm/mlrun_interfaces/booster_mlrun_interface.py +1 -3
- mlrun/frameworks/lgbm/mlrun_interfaces/mlrun_interface.py +6 -6
- mlrun/frameworks/lgbm/model_handler.py +9 -9
- mlrun/frameworks/lgbm/model_server.py +6 -6
- mlrun/frameworks/lgbm/utils.py +5 -5
- mlrun/frameworks/onnx/dataset.py +8 -8
- mlrun/frameworks/onnx/mlrun_interface.py +3 -3
- mlrun/frameworks/onnx/model_handler.py +6 -6
- mlrun/frameworks/onnx/model_server.py +7 -7
- mlrun/frameworks/parallel_coordinates.py +2 -2
- mlrun/frameworks/pytorch/__init__.py +16 -16
- mlrun/frameworks/pytorch/callbacks/callback.py +4 -5
- mlrun/frameworks/pytorch/callbacks/logging_callback.py +17 -17
- mlrun/frameworks/pytorch/callbacks/mlrun_logging_callback.py +11 -11
- mlrun/frameworks/pytorch/callbacks/tensorboard_logging_callback.py +23 -29
- mlrun/frameworks/pytorch/callbacks_handler.py +38 -38
- mlrun/frameworks/pytorch/mlrun_interface.py +20 -20
- mlrun/frameworks/pytorch/model_handler.py +17 -17
- mlrun/frameworks/pytorch/model_server.py +7 -7
- mlrun/frameworks/sklearn/__init__.py +12 -12
- mlrun/frameworks/sklearn/estimator.py +4 -4
- mlrun/frameworks/sklearn/metrics_library.py +14 -14
- mlrun/frameworks/sklearn/mlrun_interface.py +3 -6
- mlrun/frameworks/sklearn/model_handler.py +2 -2
- mlrun/frameworks/tf_keras/__init__.py +5 -5
- mlrun/frameworks/tf_keras/callbacks/logging_callback.py +14 -14
- mlrun/frameworks/tf_keras/callbacks/mlrun_logging_callback.py +11 -11
- mlrun/frameworks/tf_keras/callbacks/tensorboard_logging_callback.py +19 -23
- mlrun/frameworks/tf_keras/mlrun_interface.py +7 -9
- mlrun/frameworks/tf_keras/model_handler.py +14 -14
- mlrun/frameworks/tf_keras/model_server.py +6 -6
- mlrun/frameworks/xgboost/__init__.py +12 -12
- mlrun/frameworks/xgboost/model_handler.py +6 -6
- mlrun/k8s_utils.py +4 -5
- mlrun/kfpops.py +2 -2
- mlrun/launcher/base.py +10 -10
- mlrun/launcher/local.py +8 -8
- mlrun/launcher/remote.py +7 -7
- mlrun/lists.py +3 -4
- mlrun/model.py +205 -55
- mlrun/model_monitoring/api.py +21 -24
- mlrun/model_monitoring/application.py +4 -4
- mlrun/model_monitoring/batch.py +17 -17
- mlrun/model_monitoring/controller.py +2 -1
- mlrun/model_monitoring/features_drift_table.py +44 -31
- mlrun/model_monitoring/prometheus.py +1 -4
- mlrun/model_monitoring/stores/kv_model_endpoint_store.py +11 -13
- mlrun/model_monitoring/stores/model_endpoint_store.py +9 -11
- mlrun/model_monitoring/stores/models/__init__.py +2 -2
- mlrun/model_monitoring/stores/sql_model_endpoint_store.py +11 -13
- mlrun/model_monitoring/stream_processing.py +16 -34
- mlrun/model_monitoring/tracking_policy.py +2 -1
- mlrun/package/__init__.py +6 -6
- mlrun/package/context_handler.py +5 -5
- mlrun/package/packager.py +7 -7
- mlrun/package/packagers/default_packager.py +6 -6
- mlrun/package/packagers/numpy_packagers.py +15 -15
- mlrun/package/packagers/pandas_packagers.py +5 -5
- mlrun/package/packagers/python_standard_library_packagers.py +10 -10
- mlrun/package/packagers_manager.py +18 -23
- mlrun/package/utils/_formatter.py +4 -4
- mlrun/package/utils/_pickler.py +2 -2
- mlrun/package/utils/_supported_format.py +4 -4
- mlrun/package/utils/log_hint_utils.py +2 -2
- mlrun/package/utils/type_hint_utils.py +4 -9
- mlrun/platforms/other.py +1 -2
- mlrun/projects/operations.py +5 -5
- mlrun/projects/pipelines.py +9 -9
- mlrun/projects/project.py +58 -46
- mlrun/render.py +1 -1
- mlrun/run.py +9 -9
- mlrun/runtimes/__init__.py +7 -4
- mlrun/runtimes/base.py +20 -23
- mlrun/runtimes/constants.py +5 -5
- mlrun/runtimes/daskjob.py +8 -8
- mlrun/runtimes/databricks_job/databricks_cancel_task.py +1 -1
- mlrun/runtimes/databricks_job/databricks_runtime.py +7 -7
- mlrun/runtimes/function_reference.py +1 -1
- mlrun/runtimes/local.py +1 -1
- mlrun/runtimes/mpijob/abstract.py +1 -2
- mlrun/runtimes/nuclio/__init__.py +20 -0
- mlrun/runtimes/{function.py → nuclio/function.py} +15 -16
- mlrun/runtimes/{nuclio.py → nuclio/nuclio.py} +6 -6
- mlrun/runtimes/{serving.py → nuclio/serving.py} +13 -12
- mlrun/runtimes/pod.py +95 -48
- mlrun/runtimes/remotesparkjob.py +1 -1
- mlrun/runtimes/sparkjob/spark3job.py +50 -33
- mlrun/runtimes/utils.py +1 -2
- mlrun/secrets.py +3 -3
- mlrun/serving/remote.py +0 -4
- mlrun/serving/routers.py +6 -6
- mlrun/serving/server.py +4 -4
- mlrun/serving/states.py +29 -0
- mlrun/serving/utils.py +3 -3
- mlrun/serving/v1_serving.py +6 -7
- mlrun/serving/v2_serving.py +50 -8
- mlrun/track/tracker_manager.py +3 -3
- mlrun/track/trackers/mlflow_tracker.py +1 -2
- mlrun/utils/async_http.py +5 -7
- mlrun/utils/azure_vault.py +1 -1
- mlrun/utils/clones.py +1 -2
- mlrun/utils/condition_evaluator.py +3 -3
- mlrun/utils/db.py +3 -3
- mlrun/utils/helpers.py +37 -119
- mlrun/utils/http.py +1 -4
- mlrun/utils/logger.py +49 -14
- mlrun/utils/notifications/notification/__init__.py +3 -3
- mlrun/utils/notifications/notification/base.py +2 -2
- mlrun/utils/notifications/notification/ipython.py +1 -1
- mlrun/utils/notifications/notification_pusher.py +8 -14
- mlrun/utils/retryer.py +207 -0
- mlrun/utils/singleton.py +1 -1
- mlrun/utils/v3io_clients.py +2 -3
- mlrun/utils/version/version.json +2 -2
- mlrun/utils/version/version.py +2 -6
- {mlrun-1.6.0rc35.dist-info → mlrun-1.7.0rc2.dist-info}/METADATA +9 -9
- mlrun-1.7.0rc2.dist-info/RECORD +315 -0
- mlrun-1.6.0rc35.dist-info/RECORD +0 -313
- {mlrun-1.6.0rc35.dist-info → mlrun-1.7.0rc2.dist-info}/LICENSE +0 -0
- {mlrun-1.6.0rc35.dist-info → mlrun-1.7.0rc2.dist-info}/WHEEL +0 -0
- {mlrun-1.6.0rc35.dist-info → mlrun-1.7.0rc2.dist-info}/entry_points.txt +0 -0
- {mlrun-1.6.0rc35.dist-info → mlrun-1.7.0rc2.dist-info}/top_level.txt +0 -0
|
@@ -15,7 +15,7 @@
|
|
|
15
15
|
import os
|
|
16
16
|
import pathlib
|
|
17
17
|
import tempfile
|
|
18
|
-
from typing import
|
|
18
|
+
from typing import Union
|
|
19
19
|
|
|
20
20
|
from mlrun.artifacts import Artifact
|
|
21
21
|
from mlrun.datastore import DataItem
|
|
@@ -45,7 +45,7 @@ class NonePackager(DefaultPackager):
|
|
|
45
45
|
DEFAULT_PACKING_ARTIFACT_TYPE = ArtifactType.RESULT
|
|
46
46
|
|
|
47
47
|
# TODO: `None` as pickle will be available from Python 3.10, so this method can be removed once we move to 3.10.
|
|
48
|
-
def get_supported_artifact_types(self) ->
|
|
48
|
+
def get_supported_artifact_types(self) -> list[str]:
|
|
49
49
|
"""
|
|
50
50
|
Get all the supported artifact types on this packager. It will be the same as `DefaultPackager` but without the
|
|
51
51
|
'object' artifact type support (None cannot be pickled, only from Python 3.10, and it should not be pickled
|
|
@@ -96,7 +96,7 @@ class StrPackager(DefaultPackager):
|
|
|
96
96
|
|
|
97
97
|
def pack_path(
|
|
98
98
|
self, obj: str, key: str, archive_format: str = DEFAULT_ARCHIVE_FORMAT
|
|
99
|
-
) ->
|
|
99
|
+
) -> tuple[Artifact, dict]:
|
|
100
100
|
"""
|
|
101
101
|
Pack a path string value content (pack the file or directory in that path).
|
|
102
102
|
|
|
@@ -198,7 +198,7 @@ class _BuiltinCollectionPackager(DefaultPackager):
|
|
|
198
198
|
obj: Union[dict, list],
|
|
199
199
|
key: str,
|
|
200
200
|
file_format: str = DEFAULT_STRUCT_FILE_FORMAT,
|
|
201
|
-
) ->
|
|
201
|
+
) -> tuple[Artifact, dict]:
|
|
202
202
|
"""
|
|
203
203
|
Pack a builtin collection as a file by the given format.
|
|
204
204
|
|
|
@@ -343,7 +343,7 @@ class TuplePackager(ListPackager):
|
|
|
343
343
|
|
|
344
344
|
def pack_file(
|
|
345
345
|
self, obj: tuple, key: str, file_format: str = DEFAULT_STRUCT_FILE_FORMAT
|
|
346
|
-
) ->
|
|
346
|
+
) -> tuple[Artifact, dict]:
|
|
347
347
|
"""
|
|
348
348
|
Pack a tuple as a file by the given format.
|
|
349
349
|
|
|
@@ -388,7 +388,7 @@ class SetPackager(ListPackager):
|
|
|
388
388
|
|
|
389
389
|
def pack_file(
|
|
390
390
|
self, obj: set, key: str, file_format: str = DEFAULT_STRUCT_FILE_FORMAT
|
|
391
|
-
) ->
|
|
391
|
+
) -> tuple[Artifact, dict]:
|
|
392
392
|
"""
|
|
393
393
|
Pack a set as a file by the given format.
|
|
394
394
|
|
|
@@ -422,7 +422,7 @@ class FrozensetPackager(SetPackager):
|
|
|
422
422
|
|
|
423
423
|
def pack_file(
|
|
424
424
|
self, obj: frozenset, key: str, file_format: str = DEFAULT_STRUCT_FILE_FORMAT
|
|
425
|
-
) ->
|
|
425
|
+
) -> tuple[Artifact, dict]:
|
|
426
426
|
"""
|
|
427
427
|
Pack a frozenset as a file by the given format.
|
|
428
428
|
|
|
@@ -469,7 +469,7 @@ class BytesPackager(ListPackager):
|
|
|
469
469
|
|
|
470
470
|
def pack_file(
|
|
471
471
|
self, obj: bytes, key: str, file_format: str = DEFAULT_STRUCT_FILE_FORMAT
|
|
472
|
-
) ->
|
|
472
|
+
) -> tuple[Artifact, dict]:
|
|
473
473
|
"""
|
|
474
474
|
Pack a bytes as a file by the given format.
|
|
475
475
|
|
|
@@ -514,7 +514,7 @@ class BytearrayPackager(BytesPackager):
|
|
|
514
514
|
|
|
515
515
|
def pack_file(
|
|
516
516
|
self, obj: bytearray, key: str, file_format: str = DEFAULT_STRUCT_FILE_FORMAT
|
|
517
|
-
) ->
|
|
517
|
+
) -> tuple[Artifact, dict]:
|
|
518
518
|
"""
|
|
519
519
|
Pack a bytearray as a file by the given format.
|
|
520
520
|
|
|
@@ -569,7 +569,7 @@ class PathPackager(StrPackager):
|
|
|
569
569
|
|
|
570
570
|
def pack_path(
|
|
571
571
|
self, obj: pathlib.Path, key: str, archive_format: str = DEFAULT_ARCHIVE_FORMAT
|
|
572
|
-
) ->
|
|
572
|
+
) -> tuple[Artifact, dict]:
|
|
573
573
|
"""
|
|
574
574
|
Pack a `Path` value (pack the file or directory in that path).
|
|
575
575
|
|
|
@@ -16,9 +16,8 @@ import importlib
|
|
|
16
16
|
import inspect
|
|
17
17
|
import os
|
|
18
18
|
import shutil
|
|
19
|
-
import sys
|
|
20
19
|
import traceback
|
|
21
|
-
from typing import Any,
|
|
20
|
+
from typing import Any, Union
|
|
22
21
|
|
|
23
22
|
from mlrun.artifacts import Artifact
|
|
24
23
|
from mlrun.datastore import DataItem, store_manager
|
|
@@ -42,7 +41,7 @@ class PackagersManager:
|
|
|
42
41
|
It prepares the instructions / log hint configurations and then looks for the first packager that fits the task.
|
|
43
42
|
"""
|
|
44
43
|
|
|
45
|
-
def __init__(self, default_packager:
|
|
44
|
+
def __init__(self, default_packager: type[Packager] = None):
|
|
46
45
|
"""
|
|
47
46
|
Initialize a packagers manager.
|
|
48
47
|
|
|
@@ -54,15 +53,15 @@ class PackagersManager:
|
|
|
54
53
|
self._default_packager = (default_packager or DefaultPackager)()
|
|
55
54
|
|
|
56
55
|
# Initialize the packagers list (with the default packager in it):
|
|
57
|
-
self._packagers:
|
|
56
|
+
self._packagers: list[Packager] = []
|
|
58
57
|
|
|
59
58
|
# Set an artifacts list and results dictionary to collect all packed objects (will be used later to write extra
|
|
60
59
|
# data if noted by the user using the log hint key "extra_data")
|
|
61
|
-
self._artifacts:
|
|
60
|
+
self._artifacts: list[Artifact] = []
|
|
62
61
|
self._results = {}
|
|
63
62
|
|
|
64
63
|
@property
|
|
65
|
-
def artifacts(self) ->
|
|
64
|
+
def artifacts(self) -> list[Artifact]:
|
|
66
65
|
"""
|
|
67
66
|
Get the artifacts that were packed by the manager.
|
|
68
67
|
|
|
@@ -80,7 +79,7 @@ class PackagersManager:
|
|
|
80
79
|
return self._results
|
|
81
80
|
|
|
82
81
|
def collect_packagers(
|
|
83
|
-
self, packagers:
|
|
82
|
+
self, packagers: list[Union[type[Packager], str]], default_priority: int = 5
|
|
84
83
|
):
|
|
85
84
|
"""
|
|
86
85
|
Collect the provided packagers. Packagers passed as module paths are imported and validated to be of type
|
|
@@ -171,8 +170,8 @@ class PackagersManager:
|
|
|
171
170
|
self._packagers.sort()
|
|
172
171
|
|
|
173
172
|
def pack(
|
|
174
|
-
self, obj: Any, log_hint:
|
|
175
|
-
) -> Union[Artifact, dict, None,
|
|
173
|
+
self, obj: Any, log_hint: dict[str, str]
|
|
174
|
+
) -> Union[Artifact, dict, None, list[Union[Artifact, dict, None]]]:
|
|
176
175
|
"""
|
|
177
176
|
Pack an object using one of the manager's packagers. A `dict` ("**") or `list` ("*") unpacking syntax in the
|
|
178
177
|
log hint key packs the objects within them in separate packages.
|
|
@@ -244,7 +243,7 @@ class PackagersManager:
|
|
|
244
243
|
# If multiple packages were packed, return a list, otherwise return the single package:
|
|
245
244
|
return packages if len(packages) > 1 else packages[0]
|
|
246
245
|
|
|
247
|
-
def unpack(self, data_item: DataItem, type_hint:
|
|
246
|
+
def unpack(self, data_item: DataItem, type_hint: type) -> Any:
|
|
248
247
|
"""
|
|
249
248
|
Unpack an object using one of the manager's packagers. The data item can be unpacked in two ways:
|
|
250
249
|
|
|
@@ -264,11 +263,7 @@ class PackagersManager:
|
|
|
264
263
|
:return: The unpacked object parsed as type hinted.
|
|
265
264
|
"""
|
|
266
265
|
# Check if `DataItem` is hinted - meaning the user can expect a data item and do not want to unpack it:
|
|
267
|
-
|
|
268
|
-
if sys.version_info[1] < 8:
|
|
269
|
-
if self._get_type_name(typ=DataItem) in str(type_hint):
|
|
270
|
-
return data_item
|
|
271
|
-
elif TypeHintUtils.is_matching(object_type=DataItem, type_hint=type_hint):
|
|
266
|
+
if TypeHintUtils.is_matching(object_type=DataItem, type_hint=type_hint):
|
|
272
267
|
return data_item
|
|
273
268
|
|
|
274
269
|
# Set variables to hold the manager notes and packager instructions:
|
|
@@ -306,7 +301,7 @@ class PackagersManager:
|
|
|
306
301
|
|
|
307
302
|
def link_packages(
|
|
308
303
|
self,
|
|
309
|
-
additional_artifacts:
|
|
304
|
+
additional_artifacts: list[Artifact],
|
|
310
305
|
additional_results: dict,
|
|
311
306
|
):
|
|
312
307
|
"""
|
|
@@ -371,7 +366,7 @@ class PackagersManager:
|
|
|
371
366
|
ARTIFACT_TYPE = "artifact_type"
|
|
372
367
|
INSTRUCTIONS = "instructions"
|
|
373
368
|
|
|
374
|
-
def _get_packagers_with_default_packager(self) ->
|
|
369
|
+
def _get_packagers_with_default_packager(self) -> list[Packager]:
|
|
375
370
|
"""
|
|
376
371
|
Get the full list of packagers - the collected packagers and the default packager (located at last place in the
|
|
377
372
|
list - the lowest priority).
|
|
@@ -635,7 +630,7 @@ class PackagersManager:
|
|
|
635
630
|
)
|
|
636
631
|
return self._unpack_data_item(data_item=data_item, type_hint=type_hint)
|
|
637
632
|
|
|
638
|
-
def _unpack_data_item(self, data_item: DataItem, type_hint:
|
|
633
|
+
def _unpack_data_item(self, data_item: DataItem, type_hint: type):
|
|
639
634
|
"""
|
|
640
635
|
Unpack a data item to the desired hinted type. In case the type hint includes multiple types (as in the case of
|
|
641
636
|
`typing.Union`), the manager goes over the types, and reduces them while looking for the first packager that
|
|
@@ -649,7 +644,7 @@ class PackagersManager:
|
|
|
649
644
|
:raise MLRunPackageUnpackingError: If there is no packager that supports the provided type hint.
|
|
650
645
|
"""
|
|
651
646
|
# Prepare a list of a packager and exception string for all the failures in case there was no fitting packager:
|
|
652
|
-
found_packagers:
|
|
647
|
+
found_packagers: list[tuple[Packager, str]] = []
|
|
653
648
|
|
|
654
649
|
# Try to unpack as one of the possible types in the type hint:
|
|
655
650
|
possible_type_hints = {type_hint}
|
|
@@ -718,7 +713,7 @@ class PackagersManager:
|
|
|
718
713
|
@staticmethod
|
|
719
714
|
def _look_for_extra_data(
|
|
720
715
|
key: str,
|
|
721
|
-
artifacts:
|
|
716
|
+
artifacts: list[Artifact],
|
|
722
717
|
results: dict,
|
|
723
718
|
) -> Union[Artifact, str, int, float, None]:
|
|
724
719
|
"""
|
|
@@ -739,7 +734,7 @@ class PackagersManager:
|
|
|
739
734
|
return results.get(key, None)
|
|
740
735
|
|
|
741
736
|
@staticmethod
|
|
742
|
-
def _split_module_path(module_path: str) ->
|
|
737
|
+
def _split_module_path(module_path: str) -> tuple[str, str]:
|
|
743
738
|
"""
|
|
744
739
|
Split a module path to the module name and the class name. Inner classes are not supported.
|
|
745
740
|
|
|
@@ -756,7 +751,7 @@ class PackagersManager:
|
|
|
756
751
|
return module_name, class_name
|
|
757
752
|
|
|
758
753
|
@staticmethod
|
|
759
|
-
def _get_type_name(typ:
|
|
754
|
+
def _get_type_name(typ: type) -> str:
|
|
760
755
|
"""
|
|
761
756
|
Get an object type full name - its module path. For example, the name of a pandas data frame is "DataFrame"
|
|
762
757
|
but its full name (module path) is: "pandas.core.frame.DataFrame".
|
|
@@ -777,7 +772,7 @@ class PackagersManager:
|
|
|
777
772
|
return f"{module_name}.{class_name}" if module_name else class_name
|
|
778
773
|
|
|
779
774
|
@staticmethod
|
|
780
|
-
def _get_type_from_name(type_name: str) ->
|
|
775
|
+
def _get_type_from_name(type_name: str) -> type:
|
|
781
776
|
"""
|
|
782
777
|
Get the type object out of the given module path. The module must be a full module path (for example:
|
|
783
778
|
"pandas.DataFrame" and not "DataFrame") otherwise it assumes to be from the local run module - __main__.
|
|
@@ -82,7 +82,7 @@ class _JSONFormatter(_Formatter):
|
|
|
82
82
|
|
|
83
83
|
:return: The read object.
|
|
84
84
|
"""
|
|
85
|
-
with open(file_path
|
|
85
|
+
with open(file_path) as file:
|
|
86
86
|
obj = json.load(file)
|
|
87
87
|
return obj
|
|
88
88
|
|
|
@@ -117,7 +117,7 @@ class _JSONLFormatter(_Formatter):
|
|
|
117
117
|
|
|
118
118
|
:return: The read object.
|
|
119
119
|
"""
|
|
120
|
-
with open(file_path
|
|
120
|
+
with open(file_path) as file:
|
|
121
121
|
lines = file.readlines()
|
|
122
122
|
|
|
123
123
|
obj = []
|
|
@@ -157,7 +157,7 @@ class _YAMLFormatter(_Formatter):
|
|
|
157
157
|
|
|
158
158
|
:return: The read object.
|
|
159
159
|
"""
|
|
160
|
-
with open(file_path
|
|
160
|
+
with open(file_path) as file:
|
|
161
161
|
obj = yaml.safe_load(file)
|
|
162
162
|
return obj
|
|
163
163
|
|
|
@@ -188,7 +188,7 @@ class _TXTFormatter(_Formatter):
|
|
|
188
188
|
|
|
189
189
|
:return: The read object.
|
|
190
190
|
"""
|
|
191
|
-
with open(file_path
|
|
191
|
+
with open(file_path) as file:
|
|
192
192
|
obj = ast.literal_eval(file.read())
|
|
193
193
|
return obj
|
|
194
194
|
|
mlrun/package/utils/_pickler.py
CHANGED
|
@@ -19,7 +19,7 @@ import sys
|
|
|
19
19
|
import tempfile
|
|
20
20
|
import warnings
|
|
21
21
|
from types import ModuleType
|
|
22
|
-
from typing import Any,
|
|
22
|
+
from typing import Any, Union
|
|
23
23
|
|
|
24
24
|
from mlrun.errors import MLRunInvalidArgumentError
|
|
25
25
|
from mlrun.utils import logger
|
|
@@ -35,7 +35,7 @@ class Pickler:
|
|
|
35
35
|
@staticmethod
|
|
36
36
|
def pickle(
|
|
37
37
|
obj: Any, pickle_module_name: str, output_path: str = None
|
|
38
|
-
) ->
|
|
38
|
+
) -> tuple[str, dict[str, Union[str, None]]]:
|
|
39
39
|
"""
|
|
40
40
|
Pickle an object using the given module. The pickled object will be saved to file to the given output path.
|
|
41
41
|
|
|
@@ -13,7 +13,7 @@
|
|
|
13
13
|
# limitations under the License.
|
|
14
14
|
#
|
|
15
15
|
from abc import ABC
|
|
16
|
-
from typing import
|
|
16
|
+
from typing import Generic, TypeVar, Union
|
|
17
17
|
|
|
18
18
|
# A generic type for a supported format handler class type:
|
|
19
19
|
FileHandlerType = TypeVar("FileHandlerType")
|
|
@@ -29,10 +29,10 @@ class SupportedFormat(ABC, Generic[FileHandlerType]):
|
|
|
29
29
|
|
|
30
30
|
# The map to use in the method `get_format_handler`. A dictionary of string key to a class type to handle that
|
|
31
31
|
# format. New supported formats and handlers should be added to it:
|
|
32
|
-
_FORMAT_HANDLERS_MAP:
|
|
32
|
+
_FORMAT_HANDLERS_MAP: dict[str, type[FileHandlerType]] = {}
|
|
33
33
|
|
|
34
34
|
@classmethod
|
|
35
|
-
def get_all_formats(cls) ->
|
|
35
|
+
def get_all_formats(cls) -> list[str]:
|
|
36
36
|
"""
|
|
37
37
|
Get all supported formats.
|
|
38
38
|
|
|
@@ -45,7 +45,7 @@ class SupportedFormat(ABC, Generic[FileHandlerType]):
|
|
|
45
45
|
]
|
|
46
46
|
|
|
47
47
|
@classmethod
|
|
48
|
-
def get_format_handler(cls, fmt: str) ->
|
|
48
|
+
def get_format_handler(cls, fmt: str) -> type[FileHandlerType]:
|
|
49
49
|
"""
|
|
50
50
|
Get the format handler to the provided format (file extension):
|
|
51
51
|
|
|
@@ -35,8 +35,8 @@ class LogHintUtils:
|
|
|
35
35
|
|
|
36
36
|
@staticmethod
|
|
37
37
|
def parse_log_hint(
|
|
38
|
-
log_hint: typing.Union[
|
|
39
|
-
) -> typing.Union[
|
|
38
|
+
log_hint: typing.Union[dict[str, str], str, None],
|
|
39
|
+
) -> typing.Union[dict[str, str], None]:
|
|
40
40
|
"""
|
|
41
41
|
Parse a given log hint from string to a logging configuration dictionary. The string will be read as the
|
|
42
42
|
artifact key ('key' in the dictionary) and if the string have a single colon, the following structure is
|
|
@@ -16,7 +16,6 @@ import builtins
|
|
|
16
16
|
import importlib
|
|
17
17
|
import itertools
|
|
18
18
|
import re
|
|
19
|
-
import sys
|
|
20
19
|
import typing
|
|
21
20
|
|
|
22
21
|
from mlrun.errors import MLRunInvalidArgumentError
|
|
@@ -151,7 +150,7 @@ class TypeHintUtils:
|
|
|
151
150
|
@staticmethod
|
|
152
151
|
def is_matching(
|
|
153
152
|
object_type: type,
|
|
154
|
-
type_hint: typing.Union[type,
|
|
153
|
+
type_hint: typing.Union[type, set[type]],
|
|
155
154
|
include_subclasses: bool = True,
|
|
156
155
|
reduce_type_hint: bool = True,
|
|
157
156
|
) -> bool:
|
|
@@ -189,8 +188,8 @@ class TypeHintUtils:
|
|
|
189
188
|
|
|
190
189
|
@staticmethod
|
|
191
190
|
def reduce_type_hint(
|
|
192
|
-
type_hint: typing.Union[type,
|
|
193
|
-
) ->
|
|
191
|
+
type_hint: typing.Union[type, set[type]],
|
|
192
|
+
) -> set[type]:
|
|
194
193
|
"""
|
|
195
194
|
Reduce a type hint (or a set of type hints) using the `_reduce_type_hint` function.
|
|
196
195
|
|
|
@@ -212,7 +211,7 @@ class TypeHintUtils:
|
|
|
212
211
|
)
|
|
213
212
|
|
|
214
213
|
@staticmethod
|
|
215
|
-
def _reduce_type_hint(type_hint: type) ->
|
|
214
|
+
def _reduce_type_hint(type_hint: type) -> list[type]:
|
|
216
215
|
"""
|
|
217
216
|
Reduce a type hint. If the type hint is a `typing` module, it will be reduced to its original hinted types. For
|
|
218
217
|
example: `typing.Union[int, float, typing.List[int]]` will return `[int, float, List[int]]` and
|
|
@@ -225,10 +224,6 @@ class TypeHintUtils:
|
|
|
225
224
|
|
|
226
225
|
:return: The reduced type hint as list of hinted types or an empty list if the type hint could not be reduced.
|
|
227
226
|
"""
|
|
228
|
-
# TODO: Remove when we'll no longer support Python 3.7:
|
|
229
|
-
if sys.version_info[1] < 8:
|
|
230
|
-
return []
|
|
231
|
-
|
|
232
227
|
# If it's not a typing type (meaning it's an actual object type) then we can't reduce it further:
|
|
233
228
|
if not TypeHintUtils.is_typing_type(type_hint=type_hint):
|
|
234
229
|
return []
|
mlrun/platforms/other.py
CHANGED
|
@@ -14,7 +14,6 @@
|
|
|
14
14
|
#
|
|
15
15
|
# this file is based on the code from kubeflow pipelines git
|
|
16
16
|
import os
|
|
17
|
-
from typing import Dict
|
|
18
17
|
|
|
19
18
|
import kfp.dsl
|
|
20
19
|
|
|
@@ -274,7 +273,7 @@ def mount_s3(
|
|
|
274
273
|
return _use_s3_cred
|
|
275
274
|
|
|
276
275
|
|
|
277
|
-
def set_env_variables(env_vars_dict:
|
|
276
|
+
def set_env_variables(env_vars_dict: dict[str, str] = None, **kwargs):
|
|
278
277
|
"""
|
|
279
278
|
Modifier function to apply a set of environment variables to a runtime. Variables may be passed
|
|
280
279
|
as either a dictionary of name-value pairs, or as arguments to the function.
|
mlrun/projects/operations.py
CHANGED
|
@@ -13,7 +13,7 @@
|
|
|
13
13
|
# limitations under the License.
|
|
14
14
|
#
|
|
15
15
|
import warnings
|
|
16
|
-
from typing import
|
|
16
|
+
from typing import Optional, Union
|
|
17
17
|
|
|
18
18
|
import kfp
|
|
19
19
|
|
|
@@ -61,7 +61,7 @@ def run_function(
|
|
|
61
61
|
hyperparams: dict = None,
|
|
62
62
|
hyper_param_options: mlrun.model.HyperParamOptions = None,
|
|
63
63
|
inputs: dict = None,
|
|
64
|
-
outputs:
|
|
64
|
+
outputs: list[str] = None,
|
|
65
65
|
workdir: str = "",
|
|
66
66
|
labels: dict = None,
|
|
67
67
|
base_task: mlrun.model.RunTemplate = None,
|
|
@@ -73,8 +73,8 @@ def run_function(
|
|
|
73
73
|
auto_build: bool = None,
|
|
74
74
|
schedule: Union[str, mlrun.common.schemas.ScheduleCronTrigger] = None,
|
|
75
75
|
artifact_path: str = None,
|
|
76
|
-
notifications:
|
|
77
|
-
returns: Optional[
|
|
76
|
+
notifications: list[mlrun.model.Notification] = None,
|
|
77
|
+
returns: Optional[list[Union[str, dict[str, str]]]] = None,
|
|
78
78
|
builder_env: Optional[list] = None,
|
|
79
79
|
) -> Union[mlrun.model.RunObject, kfp.dsl.ContainerOp]:
|
|
80
80
|
"""Run a local or remote task as part of a local/kubeflow pipeline
|
|
@@ -239,7 +239,7 @@ def build_function(
|
|
|
239
239
|
base_image=None,
|
|
240
240
|
commands: list = None,
|
|
241
241
|
secret_name=None,
|
|
242
|
-
requirements: Union[str,
|
|
242
|
+
requirements: Union[str, list[str]] = None,
|
|
243
243
|
requirements_file: str = None,
|
|
244
244
|
mlrun_version_specifier=None,
|
|
245
245
|
builder_env: dict = None,
|
mlrun/projects/pipelines.py
CHANGED
|
@@ -312,9 +312,9 @@ def _enrich_kfp_pod_security_context(kfp_pod_template, function):
|
|
|
312
312
|
def _create_enriched_mlrun_workflow(
|
|
313
313
|
self,
|
|
314
314
|
pipeline_func: typing.Callable,
|
|
315
|
-
pipeline_name: typing.Optional[
|
|
316
|
-
pipeline_description: typing.Optional[
|
|
317
|
-
params_list: typing.Optional[
|
|
315
|
+
pipeline_name: typing.Optional[str] = None,
|
|
316
|
+
pipeline_description: typing.Optional[str] = None,
|
|
317
|
+
params_list: typing.Optional[list[dsl.PipelineParam]] = None,
|
|
318
318
|
pipeline_conf: typing.Optional[dsl.PipelineConf] = None,
|
|
319
319
|
):
|
|
320
320
|
"""Call internal implementation of create_workflow and enrich with mlrun functions attributes"""
|
|
@@ -425,7 +425,7 @@ class _PipelineRunStatus:
|
|
|
425
425
|
def __init__(
|
|
426
426
|
self,
|
|
427
427
|
run_id: str,
|
|
428
|
-
engine:
|
|
428
|
+
engine: type["_PipelineRunner"],
|
|
429
429
|
project: "mlrun.projects.MlrunProject",
|
|
430
430
|
workflow: WorkflowSpec = None,
|
|
431
431
|
state: str = "",
|
|
@@ -496,7 +496,7 @@ class _PipelineRunner(abc.ABC):
|
|
|
496
496
|
artifact_path=None,
|
|
497
497
|
namespace=None,
|
|
498
498
|
source=None,
|
|
499
|
-
notifications:
|
|
499
|
+
notifications: list[mlrun.model.Notification] = None,
|
|
500
500
|
) -> _PipelineRunStatus:
|
|
501
501
|
pass
|
|
502
502
|
|
|
@@ -574,7 +574,7 @@ class _KFPRunner(_PipelineRunner):
|
|
|
574
574
|
artifact_path=None,
|
|
575
575
|
namespace=None,
|
|
576
576
|
source=None,
|
|
577
|
-
notifications:
|
|
577
|
+
notifications: list[mlrun.model.Notification] = None,
|
|
578
578
|
) -> _PipelineRunStatus:
|
|
579
579
|
pipeline_context.set(project, workflow_spec)
|
|
580
580
|
workflow_handler = _PipelineRunner._get_handler(
|
|
@@ -717,7 +717,7 @@ class _LocalRunner(_PipelineRunner):
|
|
|
717
717
|
artifact_path=None,
|
|
718
718
|
namespace=None,
|
|
719
719
|
source=None,
|
|
720
|
-
notifications:
|
|
720
|
+
notifications: list[mlrun.model.Notification] = None,
|
|
721
721
|
) -> _PipelineRunStatus:
|
|
722
722
|
pipeline_context.set(project, workflow_spec)
|
|
723
723
|
workflow_handler = _PipelineRunner._get_handler(
|
|
@@ -806,7 +806,7 @@ class _RemoteRunner(_PipelineRunner):
|
|
|
806
806
|
artifact_path: str = None,
|
|
807
807
|
namespace: str = None,
|
|
808
808
|
source: str = None,
|
|
809
|
-
notifications:
|
|
809
|
+
notifications: list[mlrun.model.Notification] = None,
|
|
810
810
|
) -> typing.Optional[_PipelineRunStatus]:
|
|
811
811
|
workflow_name = normalize_workflow_name(name=name, project_name=project.name)
|
|
812
812
|
workflow_id = None
|
|
@@ -976,7 +976,7 @@ def load_and_run(
|
|
|
976
976
|
save: bool = True,
|
|
977
977
|
workflow_name: str = None,
|
|
978
978
|
workflow_path: str = None,
|
|
979
|
-
workflow_arguments:
|
|
979
|
+
workflow_arguments: dict[str, typing.Any] = None,
|
|
980
980
|
artifact_path: str = None,
|
|
981
981
|
workflow_handler: typing.Union[str, typing.Callable] = None,
|
|
982
982
|
namespace: str = None,
|