mlrun 1.6.0rc6__py3-none-any.whl → 1.6.0rc8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mlrun might be problematic. Click here for more details.

Files changed (50) hide show
  1. mlrun/__main__.py +32 -31
  2. mlrun/common/schemas/auth.py +2 -0
  3. mlrun/common/schemas/workflow.py +2 -0
  4. mlrun/config.py +3 -3
  5. mlrun/datastore/base.py +9 -3
  6. mlrun/datastore/datastore.py +10 -7
  7. mlrun/datastore/datastore_profile.py +19 -2
  8. mlrun/datastore/dbfs_store.py +6 -6
  9. mlrun/datastore/s3.py +6 -2
  10. mlrun/datastore/sources.py +12 -2
  11. mlrun/datastore/targets.py +43 -20
  12. mlrun/db/httpdb.py +22 -0
  13. mlrun/feature_store/feature_set.py +5 -2
  14. mlrun/feature_store/retrieval/spark_merger.py +7 -1
  15. mlrun/kfpops.py +1 -1
  16. mlrun/launcher/client.py +1 -6
  17. mlrun/launcher/remote.py +5 -3
  18. mlrun/model.py +2 -2
  19. mlrun/model_monitoring/batch_application.py +61 -94
  20. mlrun/package/packager.py +115 -89
  21. mlrun/package/packagers/default_packager.py +66 -65
  22. mlrun/package/packagers/numpy_packagers.py +109 -62
  23. mlrun/package/packagers/pandas_packagers.py +12 -23
  24. mlrun/package/packagers/python_standard_library_packagers.py +35 -57
  25. mlrun/package/packagers_manager.py +16 -13
  26. mlrun/package/utils/_pickler.py +8 -18
  27. mlrun/package/utils/_supported_format.py +1 -1
  28. mlrun/projects/pipelines.py +63 -4
  29. mlrun/projects/project.py +34 -11
  30. mlrun/runtimes/__init__.py +6 -0
  31. mlrun/runtimes/base.py +12 -1
  32. mlrun/runtimes/daskjob.py +73 -5
  33. mlrun/runtimes/databricks_job/databricks_runtime.py +2 -0
  34. mlrun/runtimes/function.py +53 -4
  35. mlrun/runtimes/kubejob.py +1 -1
  36. mlrun/runtimes/local.py +9 -9
  37. mlrun/runtimes/pod.py +1 -1
  38. mlrun/runtimes/remotesparkjob.py +1 -0
  39. mlrun/runtimes/serving.py +11 -1
  40. mlrun/runtimes/sparkjob/spark3job.py +4 -1
  41. mlrun/runtimes/utils.py +1 -46
  42. mlrun/utils/helpers.py +1 -17
  43. mlrun/utils/notifications/notification_pusher.py +27 -6
  44. mlrun/utils/version/version.json +2 -2
  45. {mlrun-1.6.0rc6.dist-info → mlrun-1.6.0rc8.dist-info}/METADATA +7 -6
  46. {mlrun-1.6.0rc6.dist-info → mlrun-1.6.0rc8.dist-info}/RECORD +50 -50
  47. {mlrun-1.6.0rc6.dist-info → mlrun-1.6.0rc8.dist-info}/WHEEL +1 -1
  48. {mlrun-1.6.0rc6.dist-info → mlrun-1.6.0rc8.dist-info}/LICENSE +0 -0
  49. {mlrun-1.6.0rc6.dist-info → mlrun-1.6.0rc8.dist-info}/entry_points.txt +0 -0
  50. {mlrun-1.6.0rc6.dist-info → mlrun-1.6.0rc8.dist-info}/top_level.txt +0 -0
mlrun/package/packager.py CHANGED
@@ -12,9 +12,7 @@
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
14
  #
15
- import pathlib
16
- import tempfile
17
- from abc import ABC, ABCMeta, abstractmethod
15
+ from abc import ABC, abstractmethod
18
16
  from pathlib import Path
19
17
  from typing import Any, List, Tuple, Type, Union
20
18
 
@@ -24,53 +22,9 @@ from mlrun.datastore import DataItem
24
22
  from .utils import TypeHintUtils
25
23
 
26
24
 
27
- # TODO: When 3.7 is no longer supported, add "Packager" as reference type hint to cls (cls: Type["Packager"]) and other.
28
- class _PackagerMeta(ABCMeta):
25
+ class Packager(ABC):
29
26
  """
30
- Metaclass for `Packager` to override type class methods.
31
- """
32
-
33
- def __lt__(cls, other) -> bool:
34
- """
35
- A less than implementation to compare by priority in order to be able to sort the packagers by it.
36
-
37
- :param other: The compared packager.
38
-
39
- :return: True if priority is lower (means better) and False otherwise.
40
- """
41
- return cls.PRIORITY < other.PRIORITY
42
-
43
- def __repr__(cls) -> str:
44
- """
45
- Get the string representation of a packager in the following format:
46
- <packager name>(type=<handled type>, artifact_types=[<all supported artifact types>], priority=<priority>)
47
-
48
- :return: The string representation of e packager.
49
- """
50
- # Get the packager info into variables:
51
- packager_name = cls.__name__
52
- handled_type = (
53
- (
54
- # Types have __name__ attribute but typing's types do not.
55
- cls.PACKABLE_OBJECT_TYPE.__name__
56
- if hasattr(cls.PACKABLE_OBJECT_TYPE, "__name__")
57
- else str(cls.PACKABLE_OBJECT_TYPE)
58
- )
59
- if cls.PACKABLE_OBJECT_TYPE is not ...
60
- else "Any"
61
- )
62
- supported_artifact_types = cls.get_supported_artifact_types()
63
-
64
- # Return the string representation in the format noted above:
65
- return (
66
- f"{packager_name}(packable_type={handled_type}, artifact_types={supported_artifact_types}, "
67
- f"priority={cls.PRIORITY})"
68
- )
69
-
70
-
71
- class Packager(ABC, metaclass=_PackagerMeta):
72
- """
73
- The abstract base class for a packager. A packager is a static class that has two main duties:
27
+ The abstract base class for a packager. Packager has two main duties:
74
28
 
75
29
  1. **Packing** - get an object that was returned from a function and log it to MLRun. The user can specify packing
76
30
  configurations to the packager using log hints. The packed object can be an artifact or a result.
@@ -134,7 +88,7 @@ class Packager(ABC, metaclass=_PackagerMeta):
134
88
  with open("./some_file.txt", "w") as file:
135
89
  file.write("Pack me")
136
90
  artifact = Artifact(key="my_artifact")
137
- cls.add_future_clearing_path(path="./some_file.txt")
91
+ self.add_future_clearing_path(path="./some_file.txt")
138
92
  return artifact, None
139
93
  """
140
94
 
@@ -144,12 +98,16 @@ class Packager(ABC, metaclass=_PackagerMeta):
144
98
  #: The priority of this packager in the packagers collection of the manager (lower is better).
145
99
  PRIORITY: int = ...
146
100
 
147
- # List of all paths to be deleted by the manager of this packager after logging the packages:
148
- _CLEARING_PATH_LIST: List[str] = []
101
+ def __init__(self):
102
+ # Assign the packager's priority (notice that if it is equal to `...` then it will bbe overriden by the packager
103
+ # manager when collected):
104
+ self._priority = Packager.PRIORITY
105
+
106
+ # List of all paths to be deleted by the manager of this packager after logging the packages:
107
+ self._future_clearing_path_list: List[str] = []
149
108
 
150
- @classmethod
151
109
  @abstractmethod
152
- def get_default_packing_artifact_type(cls, obj: Any) -> str:
110
+ def get_default_packing_artifact_type(self, obj: Any) -> str:
153
111
  """
154
112
  Get the default artifact type used for packing. The method is used when an object is sent for packing
155
113
  without an artifact type noted by the user.
@@ -160,9 +118,8 @@ class Packager(ABC, metaclass=_PackagerMeta):
160
118
  """
161
119
  pass
162
120
 
163
- @classmethod
164
121
  @abstractmethod
165
- def get_default_unpacking_artifact_type(cls, data_item: DataItem) -> str:
122
+ def get_default_unpacking_artifact_type(self, data_item: DataItem) -> str:
166
123
  """
167
124
  Get the default artifact type used for unpacking a data item holding an object of this packager. The method
168
125
  is used when a data item is sent for unpacking without it being a package, but is a simple url or an old
@@ -174,9 +131,8 @@ class Packager(ABC, metaclass=_PackagerMeta):
174
131
  """
175
132
  pass
176
133
 
177
- @classmethod
178
134
  @abstractmethod
179
- def get_supported_artifact_types(cls) -> List[str]:
135
+ def get_supported_artifact_types(self) -> List[str]:
180
136
  """
181
137
  Get all the supported artifact types on this packager.
182
138
 
@@ -184,10 +140,9 @@ class Packager(ABC, metaclass=_PackagerMeta):
184
140
  """
185
141
  pass
186
142
 
187
- @classmethod
188
143
  @abstractmethod
189
144
  def pack(
190
- cls,
145
+ self,
191
146
  obj: Any,
192
147
  key: str = None,
193
148
  artifact_type: str = None,
@@ -206,10 +161,9 @@ class Packager(ABC, metaclass=_PackagerMeta):
206
161
  """
207
162
  pass
208
163
 
209
- @classmethod
210
164
  @abstractmethod
211
165
  def unpack(
212
- cls,
166
+ self,
213
167
  data_item: DataItem,
214
168
  artifact_type: str = None,
215
169
  instructions: dict = None,
@@ -225,9 +179,8 @@ class Packager(ABC, metaclass=_PackagerMeta):
225
179
  """
226
180
  pass
227
181
 
228
- @classmethod
229
182
  def is_packable(
230
- cls, obj: Any, artifact_type: str = None, configurations: dict = None
183
+ self, obj: Any, artifact_type: str = None, configurations: dict = None
231
184
  ) -> bool:
232
185
  """
233
186
  Check if this packager can pack an object of the provided type as the provided artifact type.
@@ -247,20 +200,19 @@ class Packager(ABC, metaclass=_PackagerMeta):
247
200
 
248
201
  # Validate the object type (ellipses means any type):
249
202
  if (
250
- cls.PACKABLE_OBJECT_TYPE is not ...
251
- and object_type != cls.PACKABLE_OBJECT_TYPE
203
+ self.PACKABLE_OBJECT_TYPE is not ...
204
+ and object_type != self.PACKABLE_OBJECT_TYPE
252
205
  ):
253
206
  return False
254
207
 
255
208
  # Validate the artifact type (if given):
256
- if artifact_type and artifact_type not in cls.get_supported_artifact_types():
209
+ if artifact_type and artifact_type not in self.get_supported_artifact_types():
257
210
  return False
258
211
 
259
212
  return True
260
213
 
261
- @classmethod
262
214
  def is_unpackable(
263
- cls, data_item: DataItem, type_hint: Type, artifact_type: str = None
215
+ self, data_item: DataItem, type_hint: Type, artifact_type: str = None
264
216
  ) -> bool:
265
217
  """
266
218
  Check if this packager can unpack an input according to the user-given type hint and the provided artifact type.
@@ -275,44 +227,118 @@ class Packager(ABC, metaclass=_PackagerMeta):
275
227
  :return: True if unpackable and False otherwise.
276
228
  """
277
229
  # Check type (ellipses means any type):
278
- if cls.PACKABLE_OBJECT_TYPE is not ...:
230
+ if self.PACKABLE_OBJECT_TYPE is not ...:
279
231
  if not TypeHintUtils.is_matching(
280
232
  object_type=type_hint, # The type hint is the expected object type the MLRun function wants.
281
- type_hint=cls.PACKABLE_OBJECT_TYPE,
233
+ type_hint=self.PACKABLE_OBJECT_TYPE,
282
234
  reduce_type_hint=False,
283
235
  ):
284
236
  return False
285
237
 
286
238
  # Check the artifact type:
287
- if artifact_type and artifact_type not in cls.get_supported_artifact_types():
239
+ if artifact_type and artifact_type not in self.get_supported_artifact_types():
288
240
  return False
289
241
 
290
242
  # Unpackable:
291
243
  return True
292
244
 
293
- @classmethod
294
- def add_future_clearing_path(
295
- cls, path: Union[str, Path], add_temp_paths_only: bool = True
296
- ):
245
+ def add_future_clearing_path(self, path: Union[str, Path]):
297
246
  """
298
247
  Mark a path to be cleared by this packager's manager after logging the packaged artifacts.
299
248
 
300
- :param path: The path to clear.
301
- :param add_temp_paths_only: Whether to add only temporary files. When running locally on local files
302
- ``DataItem.local()`` returns the local given path, which should not be deleted.
303
- This flag helps to avoid deleting files in that scenario.
249
+ :param path: The path to clear post logging the artifacts.
250
+ """
251
+ self._future_clearing_path_list.append(str(path))
252
+
253
+ @property
254
+ def priority(self) -> int:
255
+ """
256
+ Get the packager's priority.
257
+
258
+ :return: The packager's priority.
259
+ """
260
+ return self._priority
261
+
262
+ @priority.setter
263
+ def priority(self, priority: int):
304
264
  """
305
- if add_temp_paths_only:
306
- if pathlib.Path(path).is_relative_to(tempfile.gettempdir()):
307
- cls._CLEARING_PATH_LIST.append(str(path))
308
- return
309
- cls._CLEARING_PATH_LIST.append(str(path))
310
-
311
- @classmethod
312
- def get_future_clearing_path_list(cls) -> List[str]:
265
+ Set the packager's priority.
266
+
267
+ :param priority: The priority to set.
268
+ """
269
+ self._priority = priority
270
+
271
+ @property
272
+ def future_clearing_path_list(self) -> List[str]:
313
273
  """
314
274
  Get the packager's future clearing path list.
315
275
 
316
276
  :return: The clearing path list.
317
277
  """
318
- return cls._CLEARING_PATH_LIST
278
+ return self._future_clearing_path_list
279
+
280
+ def __lt__(self, other: "Packager") -> bool:
281
+ """
282
+ A less than implementation to compare by priority in order to be able to sort the packagers by it.
283
+
284
+ :param other: The compared packager.
285
+
286
+ :return: True if priority is lower (means better) and False otherwise.
287
+ """
288
+ return self.priority < other.priority
289
+
290
+ def __repr__(self) -> str:
291
+ """
292
+ Get the string representation of a packager in the following format:
293
+ <packager name>(type=<handled type>, artifact_types=[<all supported artifact types>], priority=<priority>)
294
+
295
+ :return: The string representation of e packager.
296
+ """
297
+ # Get the packager info into variables:
298
+ packager_name = self.__class__.__name__
299
+ handled_type = (
300
+ (
301
+ # Types have __name__ attribute but typing's types do not.
302
+ self.PACKABLE_OBJECT_TYPE.__name__
303
+ if hasattr(self.PACKABLE_OBJECT_TYPE, "__name__")
304
+ else str(self.PACKABLE_OBJECT_TYPE)
305
+ )
306
+ if self.PACKABLE_OBJECT_TYPE is not ...
307
+ else "Any"
308
+ )
309
+ supported_artifact_types = self.get_supported_artifact_types()
310
+
311
+ # Return the string representation in the format noted above:
312
+ return (
313
+ f"{packager_name}(packable_type={handled_type}, artifact_types={supported_artifact_types}, "
314
+ f"priority={self.priority})"
315
+ )
316
+
317
+ def get_data_item_local_path(
318
+ self, data_item: DataItem, add_to_future_clearing_path: bool = None
319
+ ) -> str:
320
+ """
321
+ Get the local path to the item handled by the data item provided. The local path can be the same as the data
322
+ item in case the data item points to a local path, or will be downloaded to a temporary directory and return
323
+ this newly created temporary local path.
324
+
325
+ :param data_item: The data item to get its item local path.
326
+ :param add_to_future_clearing_path: Whether to add the local path to the future clearing paths list. If None, it
327
+ will add the path to the list only if the data item is not of kind 'file',
328
+ meaning it represents a local file and hence we don't want to delete it post
329
+ running automatically. We wish to delete it only if the local path is
330
+ temporary (and that will be in case kind is not 'file', so it is being
331
+ downloaded to a temporary directory).
332
+
333
+ :return: The data item local path.
334
+ """
335
+ # Get the local path to the item handled by the data item (download it to temporary if not local already):
336
+ local_path = data_item.local()
337
+
338
+ # Check if needed to add to the future clear list:
339
+ if add_to_future_clearing_path or (
340
+ add_to_future_clearing_path is None and data_item.kind != "file"
341
+ ):
342
+ self.add_future_clearing_path(path=local_path)
343
+
344
+ return local_path
@@ -13,6 +13,7 @@
13
13
  # limitations under the License.
14
14
  #
15
15
  import inspect
16
+ from abc import ABCMeta
16
17
  from types import MethodType
17
18
  from typing import Any, List, Tuple, Type, Union
18
19
 
@@ -23,11 +24,11 @@ from mlrun.datastore import DataItem
23
24
  from mlrun.utils import logger
24
25
 
25
26
  from ..errors import MLRunPackagePackingError, MLRunPackageUnpackingError
26
- from ..packager import Packager, _PackagerMeta
27
+ from ..packager import Packager
27
28
  from ..utils import DEFAULT_PICKLE_MODULE, ArtifactType, Pickler, TypeHintUtils
28
29
 
29
30
 
30
- class _DefaultPackagerMeta(_PackagerMeta):
31
+ class _DefaultPackagerMeta(ABCMeta):
31
32
  """
32
33
  Metaclass for `DefaultPackager` to override `__doc__` attribute into a class property. This way sphinx will get a
33
34
  dynamically generated docstring that will include a summary of the packager.
@@ -50,7 +51,7 @@ class _DefaultPackagerMeta(_PackagerMeta):
50
51
  return super().__new__(mcls, name, bases, namespace, **kwargs)
51
52
 
52
53
  @property
53
- def __doc__(cls) -> str:
54
+ def __doc__(cls: Type["DefaultPackager"]) -> str:
54
55
  """
55
56
  Override the `__doc__` attribute of a `DefaultPackager` to be a property in order to auto-summarize the
56
57
  packager's class docstring. The summary is concatenated after the original class doc string.
@@ -86,6 +87,13 @@ class _DefaultPackagerMeta(_PackagerMeta):
86
87
 
87
88
  :returns: The original docstring with the generated packager summary.
88
89
  """
90
+ # Create a packager instance:
91
+ packager = cls()
92
+
93
+ # Get the packager's name and module:
94
+ packager_name = packager.__class__.__name__
95
+ packager_module = packager.__module__
96
+
89
97
  # Get the original packager class doc string:
90
98
  packager_doc_string = cls._packager_doc.split("\n")
91
99
  packager_doc_string = "\n".join(line[4:] for line in packager_doc_string)
@@ -93,21 +101,23 @@ class _DefaultPackagerMeta(_PackagerMeta):
93
101
  # Parse the packable type section:
94
102
  type_name = (
95
103
  "Any type"
96
- if cls.PACKABLE_OBJECT_TYPE is ...
104
+ if packager.PACKABLE_OBJECT_TYPE is ...
97
105
  else (
98
- f"``{str(cls.PACKABLE_OBJECT_TYPE)}``"
99
- if TypeHintUtils.is_typing_type(type_hint=cls.PACKABLE_OBJECT_TYPE)
100
- else f"``{cls.PACKABLE_OBJECT_TYPE.__module__}.{cls.PACKABLE_OBJECT_TYPE.__name__}``"
106
+ f"``{str(packager.PACKABLE_OBJECT_TYPE)}``"
107
+ if TypeHintUtils.is_typing_type(type_hint=packager.PACKABLE_OBJECT_TYPE)
108
+ else f"``{packager.PACKABLE_OBJECT_TYPE.__module__}.{packager.PACKABLE_OBJECT_TYPE.__name__}``"
101
109
  )
102
110
  )
103
111
  packing_type = f"**Packing Type**: {type_name}"
104
112
 
105
113
  # Subclasses support section:
106
- packing_sub_classes = f"**Packing Sub-Classes**: {cls.PACK_SUBCLASSES}"
114
+ packing_sub_classes = f"**Packing Sub-Classes**: {packager.PACK_SUBCLASSES}"
107
115
 
108
116
  # Priority section:
109
117
  priority_value = (
110
- cls.PRIORITY if cls.PRIORITY is not ... else "Default priority (5)"
118
+ packager.priority
119
+ if packager.priority is not ...
120
+ else "Default priority (5)"
111
121
  )
112
122
  priority = f"**Priority**: {priority_value}"
113
123
 
@@ -117,9 +127,13 @@ class _DefaultPackagerMeta(_PackagerMeta):
117
127
  method_name = f"get_{pack_or_unpack}"
118
128
  argument_name = pack_or_unpack.upper()
119
129
  return (
120
- getattr(cls, argument_name)
121
- if cls.__name__ == "DefaultPackager" or method_name not in cls.__dict__
122
- else f"Refer to the packager's :py:meth:`~{cls.__module__}.{cls.__name__}.{method_name}` method."
130
+ getattr(packager, argument_name)
131
+ if packager_name == "DefaultPackager"
132
+ or method_name not in packager.__class__.__dict__
133
+ else (
134
+ f"Refer to the packager's "
135
+ f":py:meth:`~{packager_module}.{packager_name}.{method_name}` method."
136
+ )
123
137
  )
124
138
 
125
139
  default_artifact_types = (
@@ -130,17 +144,17 @@ class _DefaultPackagerMeta(_PackagerMeta):
130
144
 
131
145
  # Artifact types section:
132
146
  artifact_types = "**Artifact Types**:"
133
- for artifact_type in cls.get_supported_artifact_types():
147
+ for artifact_type in packager.get_supported_artifact_types():
134
148
  # Get the packing method docstring:
135
149
  method_doc = docstring_parser.parse(
136
- getattr(cls, f"pack_{artifact_type}").__doc__
150
+ getattr(packager, f"pack_{artifact_type}").__doc__
137
151
  )
138
152
  # Add the artifact type bullet:
139
153
  artifact_type_doc = f"{method_doc.short_description or ''}{method_doc.long_description or ''}".replace(
140
154
  "\n", ""
141
155
  )
142
156
  artifact_types += (
143
- f"\n\n* :py:meth:`{artifact_type}<{cls.__module__}.{cls.__name__}.pack_{artifact_type}>` - "
157
+ f"\n\n* :py:meth:`{artifact_type}<{packager_module}.{packager_name}.pack_{artifact_type}>` - "
144
158
  + artifact_type_doc
145
159
  )
146
160
  # Add the artifact type configurations (ignoring the `obj` and `key` parameters):
@@ -189,8 +203,7 @@ class DefaultPackager(Packager, metaclass=_DefaultPackagerMeta):
189
203
  the artifact type was not provided, it uses the default). For example: if the artifact type is `x` then
190
204
  the class method ``pack_x`` must be implemented. The signature of each pack class method must be::
191
205
 
192
- @classmethod
193
- def pack_x(cls, obj: Any, key: str, ...) -> Union[Tuple[Artifact, dict], dict]:
206
+ def pack_x(self, obj: Any, key: str, ...) -> Union[Tuple[Artifact, dict], dict]:
194
207
  pass
195
208
 
196
209
  Where 'x' is the artifact type, 'obj' is the object to pack, `key` is the key to name the artifact and `...` are
@@ -205,8 +218,7 @@ class DefaultPackager(Packager, metaclass=_DefaultPackagerMeta):
205
218
  For example: if the artifact type stored within the ``DataItem`` is `x` then the class method
206
219
  ``unpack_x`` must be implemented. The signature of each unpack class method must be::
207
220
 
208
- @classmethod
209
- def unpack_x(cls, data_item: mlrun.DataItem, ...) -> Any:
221
+ def unpack_x(self, data_item: mlrun.DataItem, ...) -> Any:
210
222
  pass
211
223
 
212
224
  Where 'x' is the artifact type, 'data_item' is the artifact's data item to unpack, `...` are the instructions that
@@ -255,7 +267,7 @@ class DefaultPackager(Packager, metaclass=_DefaultPackagerMeta):
255
267
  with open("./some_file.txt", "w") as file:
256
268
  file.write("Pack me")
257
269
  artifact = Artifact(key="my_artifact")
258
- cls.future_clear(path="./some_file.txt")
270
+ self.add_future_clearing_path(path="./some_file.txt")
259
271
  return artifact, None
260
272
 
261
273
  """
@@ -272,8 +284,7 @@ class DefaultPackager(Packager, metaclass=_DefaultPackagerMeta):
272
284
  #: The default artifact type to unpack from.
273
285
  DEFAULT_UNPACKING_ARTIFACT_TYPE = ArtifactType.OBJECT
274
286
 
275
- @classmethod
276
- def get_default_packing_artifact_type(cls, obj: Any) -> str:
287
+ def get_default_packing_artifact_type(self, obj: Any) -> str:
277
288
  """
278
289
  Get the default artifact type for packing an object of this packager.
279
290
 
@@ -281,10 +292,9 @@ class DefaultPackager(Packager, metaclass=_DefaultPackagerMeta):
281
292
 
282
293
  :return: The default artifact type.
283
294
  """
284
- return cls.DEFAULT_PACKING_ARTIFACT_TYPE
295
+ return self.DEFAULT_PACKING_ARTIFACT_TYPE
285
296
 
286
- @classmethod
287
- def get_default_unpacking_artifact_type(cls, data_item: DataItem) -> str:
297
+ def get_default_unpacking_artifact_type(self, data_item: DataItem) -> str:
288
298
  """
289
299
  Get the default artifact type used for unpacking a data item holding an object of this packager. The method
290
300
  is used when a data item is sent for unpacking without it being a package, but is a simple url or an old /
@@ -294,10 +304,9 @@ class DefaultPackager(Packager, metaclass=_DefaultPackagerMeta):
294
304
 
295
305
  :return: The default artifact type.
296
306
  """
297
- return cls.DEFAULT_UNPACKING_ARTIFACT_TYPE
307
+ return self.DEFAULT_UNPACKING_ARTIFACT_TYPE
298
308
 
299
- @classmethod
300
- def get_supported_artifact_types(cls) -> List[str]:
309
+ def get_supported_artifact_types(self) -> List[str]:
301
310
  """
302
311
  Get all the supported artifact types on this packager.
303
312
 
@@ -307,13 +316,12 @@ class DefaultPackager(Packager, metaclass=_DefaultPackagerMeta):
307
316
  # unpacked. Result has no unpacking so we add it separately.
308
317
  return [
309
318
  key[len("pack_") :]
310
- for key in dir(cls)
311
- if key.startswith("pack_") and f"unpack_{key[len('pack_'):]}" in dir(cls)
319
+ for key in dir(self)
320
+ if key.startswith("pack_") and f"unpack_{key[len('pack_'):]}" in dir(self)
312
321
  ] + ["result"]
313
322
 
314
- @classmethod
315
323
  def pack(
316
- cls,
324
+ self,
317
325
  obj: Any,
318
326
  key: str = None,
319
327
  artifact_type: str = None,
@@ -332,16 +340,16 @@ class DefaultPackager(Packager, metaclass=_DefaultPackagerMeta):
332
340
  """
333
341
  # Get default artifact type in case it was not provided:
334
342
  if artifact_type is None:
335
- artifact_type = cls.get_default_packing_artifact_type(obj=obj)
343
+ artifact_type = self.get_default_packing_artifact_type(obj=obj)
336
344
 
337
345
  # Set empty dictionary in case no configurations were given:
338
346
  configurations = configurations or {}
339
347
 
340
348
  # Get the packing method according to the artifact type:
341
- pack_method = getattr(cls, f"pack_{artifact_type}")
349
+ pack_method = getattr(self, f"pack_{artifact_type}")
342
350
 
343
351
  # Validate correct configurations were passed:
344
- cls._validate_method_arguments(
352
+ self._validate_method_arguments(
345
353
  method=pack_method,
346
354
  arguments=configurations,
347
355
  is_packing=True,
@@ -350,9 +358,8 @@ class DefaultPackager(Packager, metaclass=_DefaultPackagerMeta):
350
358
  # Call the packing method and return the package:
351
359
  return pack_method(obj=obj, key=key, **configurations)
352
360
 
353
- @classmethod
354
361
  def unpack(
355
- cls,
362
+ self,
356
363
  data_item: DataItem,
357
364
  artifact_type: str = None,
358
365
  instructions: dict = None,
@@ -371,16 +378,18 @@ class DefaultPackager(Packager, metaclass=_DefaultPackagerMeta):
371
378
  """
372
379
  # Get default artifact type in case it was not provided:
373
380
  if artifact_type is None:
374
- artifact_type = cls.get_default_unpacking_artifact_type(data_item=data_item)
381
+ artifact_type = self.get_default_unpacking_artifact_type(
382
+ data_item=data_item
383
+ )
375
384
 
376
385
  # Set empty dictionary in case no instructions were given:
377
386
  instructions = instructions or {}
378
387
 
379
388
  # Get the unpacking method according to the artifact type:
380
- unpack_method = getattr(cls, f"unpack_{artifact_type}")
389
+ unpack_method = getattr(self, f"unpack_{artifact_type}")
381
390
 
382
391
  # Validate correct instructions were passed:
383
- cls._validate_method_arguments(
392
+ self._validate_method_arguments(
384
393
  method=unpack_method,
385
394
  arguments=instructions,
386
395
  is_packing=False,
@@ -389,9 +398,8 @@ class DefaultPackager(Packager, metaclass=_DefaultPackagerMeta):
389
398
  # Call the unpacking method and return the object:
390
399
  return unpack_method(data_item, **instructions)
391
400
 
392
- @classmethod
393
401
  def is_packable(
394
- cls, obj: Any, artifact_type: str = None, configurations: dict = None
402
+ self, obj: Any, artifact_type: str = None, configurations: dict = None
395
403
  ) -> bool:
396
404
  """
397
405
  Check if this packager can pack an object of the provided type as the provided artifact type.
@@ -410,11 +418,11 @@ class DefaultPackager(Packager, metaclass=_DefaultPackagerMeta):
410
418
  object_type = type(obj)
411
419
 
412
420
  # Check type (ellipses means any type):
413
- if cls.PACKABLE_OBJECT_TYPE is not ...:
421
+ if self.PACKABLE_OBJECT_TYPE is not ...:
414
422
  if not TypeHintUtils.is_matching(
415
423
  object_type=object_type,
416
- type_hint=cls.PACKABLE_OBJECT_TYPE,
417
- include_subclasses=cls.PACK_SUBCLASSES,
424
+ type_hint=self.PACKABLE_OBJECT_TYPE,
425
+ include_subclasses=self.PACK_SUBCLASSES,
418
426
  reduce_type_hint=False,
419
427
  ):
420
428
  return False
@@ -422,16 +430,15 @@ class DefaultPackager(Packager, metaclass=_DefaultPackagerMeta):
422
430
  # Check the artifact type:
423
431
  if (
424
432
  artifact_type is not None
425
- and artifact_type not in cls.get_supported_artifact_types()
433
+ and artifact_type not in self.get_supported_artifact_types()
426
434
  ):
427
435
  return False
428
436
 
429
437
  # Packable:
430
438
  return True
431
439
 
432
- @classmethod
433
440
  def pack_object(
434
- cls,
441
+ self,
435
442
  obj: Any,
436
443
  key: str,
437
444
  pickle_module_name: str = DEFAULT_PICKLE_MODULE,
@@ -454,12 +461,11 @@ class DefaultPackager(Packager, metaclass=_DefaultPackagerMeta):
454
461
  artifact = Artifact(key=key, src_path=pickle_path)
455
462
 
456
463
  # Add the pickle path to the clearing list:
457
- cls.add_future_clearing_path(path=pickle_path)
464
+ self.add_future_clearing_path(path=pickle_path)
458
465
 
459
466
  return artifact, instructions
460
467
 
461
- @classmethod
462
- def pack_result(cls, obj: Any, key: str) -> dict:
468
+ def pack_result(self, obj: Any, key: str) -> dict:
463
469
  """
464
470
  Pack an object as a result.
465
471
 
@@ -470,9 +476,8 @@ class DefaultPackager(Packager, metaclass=_DefaultPackagerMeta):
470
476
  """
471
477
  return {key: obj}
472
478
 
473
- @classmethod
474
479
  def unpack_object(
475
- cls,
480
+ self,
476
481
  data_item: DataItem,
477
482
  pickle_module_name: str = DEFAULT_PICKLE_MODULE,
478
483
  object_module_name: str = None,
@@ -500,10 +505,7 @@ class DefaultPackager(Packager, metaclass=_DefaultPackagerMeta):
500
505
  :return: The un-pickled python object.
501
506
  """
502
507
  # Get the pkl file to local directory:
503
- pickle_path = data_item.local()
504
-
505
- # Add the pickle path to the clearing list:
506
- cls.add_future_clearing_path(path=pickle_path)
508
+ pickle_path = self.get_data_item_local_path(data_item=data_item)
507
509
 
508
510
  # Unpickle and return:
509
511
  return Pickler.unpickle(
@@ -515,9 +517,8 @@ class DefaultPackager(Packager, metaclass=_DefaultPackagerMeta):
515
517
  object_module_version=object_module_version,
516
518
  )
517
519
 
518
- @classmethod
519
520
  def _validate_method_arguments(
520
- cls, method: MethodType, arguments: dict, is_packing: bool
521
+ self, method: MethodType, arguments: dict, is_packing: bool
521
522
  ):
522
523
  """
523
524
  Validate keyword arguments to pass to a method. Used for validating log hint configurations for packing methods
@@ -561,13 +562,13 @@ class DefaultPackager(Packager, metaclass=_DefaultPackagerMeta):
561
562
  if missing_arguments:
562
563
  if is_packing:
563
564
  raise MLRunPackagePackingError(
564
- f"The packager '{cls.__name__}' could not pack the package due to missing configurations: "
565
- f"{', '.join(missing_arguments)}. Add the missing arguments to the log hint of this object in "
566
- f"order to pack it. Make sure you pass a dictionary log hint and not a string in order to pass "
567
- f"configurations in the log hint."
565
+ f"The packager '{self.__class__.__name__}' could not pack the package due to missing "
566
+ f"configurations: {', '.join(missing_arguments)}. Add the missing arguments to the log hint of "
567
+ f"this object in order to pack it. Make sure you pass a dictionary log hint and not a string in "
568
+ f"order to pass configurations in the log hint."
568
569
  )
569
570
  raise MLRunPackageUnpackingError(
570
- f"The packager '{cls.__name__}' could not unpack the package due to missing instructions: "
571
+ f"The packager '{self.__class__.__name__}' could not unpack the package due to missing instructions: "
571
572
  f"{', '.join(missing_arguments)}. Missing instructions are likely due to an update in the packager's "
572
573
  f"code that not support the old implementation. This backward compatibility should not occur. To "
573
574
  f"overcome it, try to edit the instructions in the artifact's spec to enable unpacking it again."
@@ -580,7 +581,7 @@ class DefaultPackager(Packager, metaclass=_DefaultPackagerMeta):
580
581
  if incorrect_arguments:
581
582
  arguments_type = "configurations" if is_packing else "instructions"
582
583
  logger.warn(
583
- f"Unexpected {arguments_type} given for {cls.__name__}: {', '.join(incorrect_arguments)}. "
584
+ f"Unexpected {arguments_type} given for {self.__class__.__name__}: {', '.join(incorrect_arguments)}. "
584
585
  f"Possible {arguments_type} are: {', '.join(possible_arguments.keys())}. The packager tries to "
585
586
  f"continue by ignoring the incorrect arguments."
586
587
  )