mlrun 1.6.0rc7__py3-none-any.whl → 1.6.0rc8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mlrun might be problematic. Click here for more details.

Files changed (38) hide show
  1. mlrun/__main__.py +27 -27
  2. mlrun/common/schemas/auth.py +2 -0
  3. mlrun/config.py +2 -2
  4. mlrun/datastore/dbfs_store.py +0 -3
  5. mlrun/datastore/sources.py +12 -2
  6. mlrun/datastore/targets.py +3 -0
  7. mlrun/db/httpdb.py +15 -0
  8. mlrun/feature_store/feature_set.py +5 -2
  9. mlrun/feature_store/retrieval/spark_merger.py +7 -1
  10. mlrun/kfpops.py +1 -1
  11. mlrun/launcher/client.py +1 -6
  12. mlrun/launcher/remote.py +5 -3
  13. mlrun/model.py +1 -1
  14. mlrun/model_monitoring/batch_application.py +48 -85
  15. mlrun/package/packager.py +115 -89
  16. mlrun/package/packagers/default_packager.py +66 -65
  17. mlrun/package/packagers/numpy_packagers.py +109 -62
  18. mlrun/package/packagers/pandas_packagers.py +12 -23
  19. mlrun/package/packagers/python_standard_library_packagers.py +35 -57
  20. mlrun/package/packagers_manager.py +16 -13
  21. mlrun/package/utils/_pickler.py +8 -18
  22. mlrun/package/utils/_supported_format.py +1 -1
  23. mlrun/projects/pipelines.py +11 -6
  24. mlrun/projects/project.py +11 -4
  25. mlrun/runtimes/__init__.py +6 -0
  26. mlrun/runtimes/base.py +8 -0
  27. mlrun/runtimes/daskjob.py +73 -5
  28. mlrun/runtimes/local.py +9 -9
  29. mlrun/runtimes/remotesparkjob.py +1 -0
  30. mlrun/runtimes/utils.py +1 -1
  31. mlrun/utils/notifications/notification_pusher.py +1 -1
  32. mlrun/utils/version/version.json +2 -2
  33. {mlrun-1.6.0rc7.dist-info → mlrun-1.6.0rc8.dist-info}/METADATA +2 -2
  34. {mlrun-1.6.0rc7.dist-info → mlrun-1.6.0rc8.dist-info}/RECORD +38 -38
  35. {mlrun-1.6.0rc7.dist-info → mlrun-1.6.0rc8.dist-info}/WHEEL +1 -1
  36. {mlrun-1.6.0rc7.dist-info → mlrun-1.6.0rc8.dist-info}/LICENSE +0 -0
  37. {mlrun-1.6.0rc7.dist-info → mlrun-1.6.0rc8.dist-info}/entry_points.txt +0 -0
  38. {mlrun-1.6.0rc7.dist-info → mlrun-1.6.0rc8.dist-info}/top_level.txt +0 -0
@@ -13,6 +13,7 @@
13
13
  # limitations under the License.
14
14
  #
15
15
  import inspect
16
+ from abc import ABCMeta
16
17
  from types import MethodType
17
18
  from typing import Any, List, Tuple, Type, Union
18
19
 
@@ -23,11 +24,11 @@ from mlrun.datastore import DataItem
23
24
  from mlrun.utils import logger
24
25
 
25
26
  from ..errors import MLRunPackagePackingError, MLRunPackageUnpackingError
26
- from ..packager import Packager, _PackagerMeta
27
+ from ..packager import Packager
27
28
  from ..utils import DEFAULT_PICKLE_MODULE, ArtifactType, Pickler, TypeHintUtils
28
29
 
29
30
 
30
- class _DefaultPackagerMeta(_PackagerMeta):
31
+ class _DefaultPackagerMeta(ABCMeta):
31
32
  """
32
33
  Metaclass for `DefaultPackager` to override `__doc__` attribute into a class property. This way sphinx will get a
33
34
  dynamically generated docstring that will include a summary of the packager.
@@ -50,7 +51,7 @@ class _DefaultPackagerMeta(_PackagerMeta):
50
51
  return super().__new__(mcls, name, bases, namespace, **kwargs)
51
52
 
52
53
  @property
53
- def __doc__(cls) -> str:
54
+ def __doc__(cls: Type["DefaultPackager"]) -> str:
54
55
  """
55
56
  Override the `__doc__` attribute of a `DefaultPackager` to be a property in order to auto-summarize the
56
57
  packager's class docstring. The summary is concatenated after the original class doc string.
@@ -86,6 +87,13 @@ class _DefaultPackagerMeta(_PackagerMeta):
86
87
 
87
88
  :returns: The original docstring with the generated packager summary.
88
89
  """
90
+ # Create a packager instance:
91
+ packager = cls()
92
+
93
+ # Get the packager's name and module:
94
+ packager_name = packager.__class__.__name__
95
+ packager_module = packager.__module__
96
+
89
97
  # Get the original packager class doc string:
90
98
  packager_doc_string = cls._packager_doc.split("\n")
91
99
  packager_doc_string = "\n".join(line[4:] for line in packager_doc_string)
@@ -93,21 +101,23 @@ class _DefaultPackagerMeta(_PackagerMeta):
93
101
  # Parse the packable type section:
94
102
  type_name = (
95
103
  "Any type"
96
- if cls.PACKABLE_OBJECT_TYPE is ...
104
+ if packager.PACKABLE_OBJECT_TYPE is ...
97
105
  else (
98
- f"``{str(cls.PACKABLE_OBJECT_TYPE)}``"
99
- if TypeHintUtils.is_typing_type(type_hint=cls.PACKABLE_OBJECT_TYPE)
100
- else f"``{cls.PACKABLE_OBJECT_TYPE.__module__}.{cls.PACKABLE_OBJECT_TYPE.__name__}``"
106
+ f"``{str(packager.PACKABLE_OBJECT_TYPE)}``"
107
+ if TypeHintUtils.is_typing_type(type_hint=packager.PACKABLE_OBJECT_TYPE)
108
+ else f"``{packager.PACKABLE_OBJECT_TYPE.__module__}.{packager.PACKABLE_OBJECT_TYPE.__name__}``"
101
109
  )
102
110
  )
103
111
  packing_type = f"**Packing Type**: {type_name}"
104
112
 
105
113
  # Subclasses support section:
106
- packing_sub_classes = f"**Packing Sub-Classes**: {cls.PACK_SUBCLASSES}"
114
+ packing_sub_classes = f"**Packing Sub-Classes**: {packager.PACK_SUBCLASSES}"
107
115
 
108
116
  # Priority section:
109
117
  priority_value = (
110
- cls.PRIORITY if cls.PRIORITY is not ... else "Default priority (5)"
118
+ packager.priority
119
+ if packager.priority is not ...
120
+ else "Default priority (5)"
111
121
  )
112
122
  priority = f"**Priority**: {priority_value}"
113
123
 
@@ -117,9 +127,13 @@ class _DefaultPackagerMeta(_PackagerMeta):
117
127
  method_name = f"get_{pack_or_unpack}"
118
128
  argument_name = pack_or_unpack.upper()
119
129
  return (
120
- getattr(cls, argument_name)
121
- if cls.__name__ == "DefaultPackager" or method_name not in cls.__dict__
122
- else f"Refer to the packager's :py:meth:`~{cls.__module__}.{cls.__name__}.{method_name}` method."
130
+ getattr(packager, argument_name)
131
+ if packager_name == "DefaultPackager"
132
+ or method_name not in packager.__class__.__dict__
133
+ else (
134
+ f"Refer to the packager's "
135
+ f":py:meth:`~{packager_module}.{packager_name}.{method_name}` method."
136
+ )
123
137
  )
124
138
 
125
139
  default_artifact_types = (
@@ -130,17 +144,17 @@ class _DefaultPackagerMeta(_PackagerMeta):
130
144
 
131
145
  # Artifact types section:
132
146
  artifact_types = "**Artifact Types**:"
133
- for artifact_type in cls.get_supported_artifact_types():
147
+ for artifact_type in packager.get_supported_artifact_types():
134
148
  # Get the packing method docstring:
135
149
  method_doc = docstring_parser.parse(
136
- getattr(cls, f"pack_{artifact_type}").__doc__
150
+ getattr(packager, f"pack_{artifact_type}").__doc__
137
151
  )
138
152
  # Add the artifact type bullet:
139
153
  artifact_type_doc = f"{method_doc.short_description or ''}{method_doc.long_description or ''}".replace(
140
154
  "\n", ""
141
155
  )
142
156
  artifact_types += (
143
- f"\n\n* :py:meth:`{artifact_type}<{cls.__module__}.{cls.__name__}.pack_{artifact_type}>` - "
157
+ f"\n\n* :py:meth:`{artifact_type}<{packager_module}.{packager_name}.pack_{artifact_type}>` - "
144
158
  + artifact_type_doc
145
159
  )
146
160
  # Add the artifact type configurations (ignoring the `obj` and `key` parameters):
@@ -189,8 +203,7 @@ class DefaultPackager(Packager, metaclass=_DefaultPackagerMeta):
189
203
  the artifact type was not provided, it uses the default). For example: if the artifact type is `x` then
190
204
  the class method ``pack_x`` must be implemented. The signature of each pack class method must be::
191
205
 
192
- @classmethod
193
- def pack_x(cls, obj: Any, key: str, ...) -> Union[Tuple[Artifact, dict], dict]:
206
+ def pack_x(self, obj: Any, key: str, ...) -> Union[Tuple[Artifact, dict], dict]:
194
207
  pass
195
208
 
196
209
  Where 'x' is the artifact type, 'obj' is the object to pack, `key` is the key to name the artifact and `...` are
@@ -205,8 +218,7 @@ class DefaultPackager(Packager, metaclass=_DefaultPackagerMeta):
205
218
  For example: if the artifact type stored within the ``DataItem`` is `x` then the class method
206
219
  ``unpack_x`` must be implemented. The signature of each unpack class method must be::
207
220
 
208
- @classmethod
209
- def unpack_x(cls, data_item: mlrun.DataItem, ...) -> Any:
221
+ def unpack_x(self, data_item: mlrun.DataItem, ...) -> Any:
210
222
  pass
211
223
 
212
224
  Where 'x' is the artifact type, 'data_item' is the artifact's data item to unpack, `...` are the instructions that
@@ -255,7 +267,7 @@ class DefaultPackager(Packager, metaclass=_DefaultPackagerMeta):
255
267
  with open("./some_file.txt", "w") as file:
256
268
  file.write("Pack me")
257
269
  artifact = Artifact(key="my_artifact")
258
- cls.future_clear(path="./some_file.txt")
270
+ self.add_future_clearing_path(path="./some_file.txt")
259
271
  return artifact, None
260
272
 
261
273
  """
@@ -272,8 +284,7 @@ class DefaultPackager(Packager, metaclass=_DefaultPackagerMeta):
272
284
  #: The default artifact type to unpack from.
273
285
  DEFAULT_UNPACKING_ARTIFACT_TYPE = ArtifactType.OBJECT
274
286
 
275
- @classmethod
276
- def get_default_packing_artifact_type(cls, obj: Any) -> str:
287
+ def get_default_packing_artifact_type(self, obj: Any) -> str:
277
288
  """
278
289
  Get the default artifact type for packing an object of this packager.
279
290
 
@@ -281,10 +292,9 @@ class DefaultPackager(Packager, metaclass=_DefaultPackagerMeta):
281
292
 
282
293
  :return: The default artifact type.
283
294
  """
284
- return cls.DEFAULT_PACKING_ARTIFACT_TYPE
295
+ return self.DEFAULT_PACKING_ARTIFACT_TYPE
285
296
 
286
- @classmethod
287
- def get_default_unpacking_artifact_type(cls, data_item: DataItem) -> str:
297
+ def get_default_unpacking_artifact_type(self, data_item: DataItem) -> str:
288
298
  """
289
299
  Get the default artifact type used for unpacking a data item holding an object of this packager. The method
290
300
  is used when a data item is sent for unpacking without it being a package, but is a simple url or an old /
@@ -294,10 +304,9 @@ class DefaultPackager(Packager, metaclass=_DefaultPackagerMeta):
294
304
 
295
305
  :return: The default artifact type.
296
306
  """
297
- return cls.DEFAULT_UNPACKING_ARTIFACT_TYPE
307
+ return self.DEFAULT_UNPACKING_ARTIFACT_TYPE
298
308
 
299
- @classmethod
300
- def get_supported_artifact_types(cls) -> List[str]:
309
+ def get_supported_artifact_types(self) -> List[str]:
301
310
  """
302
311
  Get all the supported artifact types on this packager.
303
312
 
@@ -307,13 +316,12 @@ class DefaultPackager(Packager, metaclass=_DefaultPackagerMeta):
307
316
  # unpacked. Result has no unpacking so we add it separately.
308
317
  return [
309
318
  key[len("pack_") :]
310
- for key in dir(cls)
311
- if key.startswith("pack_") and f"unpack_{key[len('pack_'):]}" in dir(cls)
319
+ for key in dir(self)
320
+ if key.startswith("pack_") and f"unpack_{key[len('pack_'):]}" in dir(self)
312
321
  ] + ["result"]
313
322
 
314
- @classmethod
315
323
  def pack(
316
- cls,
324
+ self,
317
325
  obj: Any,
318
326
  key: str = None,
319
327
  artifact_type: str = None,
@@ -332,16 +340,16 @@ class DefaultPackager(Packager, metaclass=_DefaultPackagerMeta):
332
340
  """
333
341
  # Get default artifact type in case it was not provided:
334
342
  if artifact_type is None:
335
- artifact_type = cls.get_default_packing_artifact_type(obj=obj)
343
+ artifact_type = self.get_default_packing_artifact_type(obj=obj)
336
344
 
337
345
  # Set empty dictionary in case no configurations were given:
338
346
  configurations = configurations or {}
339
347
 
340
348
  # Get the packing method according to the artifact type:
341
- pack_method = getattr(cls, f"pack_{artifact_type}")
349
+ pack_method = getattr(self, f"pack_{artifact_type}")
342
350
 
343
351
  # Validate correct configurations were passed:
344
- cls._validate_method_arguments(
352
+ self._validate_method_arguments(
345
353
  method=pack_method,
346
354
  arguments=configurations,
347
355
  is_packing=True,
@@ -350,9 +358,8 @@ class DefaultPackager(Packager, metaclass=_DefaultPackagerMeta):
350
358
  # Call the packing method and return the package:
351
359
  return pack_method(obj=obj, key=key, **configurations)
352
360
 
353
- @classmethod
354
361
  def unpack(
355
- cls,
362
+ self,
356
363
  data_item: DataItem,
357
364
  artifact_type: str = None,
358
365
  instructions: dict = None,
@@ -371,16 +378,18 @@ class DefaultPackager(Packager, metaclass=_DefaultPackagerMeta):
371
378
  """
372
379
  # Get default artifact type in case it was not provided:
373
380
  if artifact_type is None:
374
- artifact_type = cls.get_default_unpacking_artifact_type(data_item=data_item)
381
+ artifact_type = self.get_default_unpacking_artifact_type(
382
+ data_item=data_item
383
+ )
375
384
 
376
385
  # Set empty dictionary in case no instructions were given:
377
386
  instructions = instructions or {}
378
387
 
379
388
  # Get the unpacking method according to the artifact type:
380
- unpack_method = getattr(cls, f"unpack_{artifact_type}")
389
+ unpack_method = getattr(self, f"unpack_{artifact_type}")
381
390
 
382
391
  # Validate correct instructions were passed:
383
- cls._validate_method_arguments(
392
+ self._validate_method_arguments(
384
393
  method=unpack_method,
385
394
  arguments=instructions,
386
395
  is_packing=False,
@@ -389,9 +398,8 @@ class DefaultPackager(Packager, metaclass=_DefaultPackagerMeta):
389
398
  # Call the unpacking method and return the object:
390
399
  return unpack_method(data_item, **instructions)
391
400
 
392
- @classmethod
393
401
  def is_packable(
394
- cls, obj: Any, artifact_type: str = None, configurations: dict = None
402
+ self, obj: Any, artifact_type: str = None, configurations: dict = None
395
403
  ) -> bool:
396
404
  """
397
405
  Check if this packager can pack an object of the provided type as the provided artifact type.
@@ -410,11 +418,11 @@ class DefaultPackager(Packager, metaclass=_DefaultPackagerMeta):
410
418
  object_type = type(obj)
411
419
 
412
420
  # Check type (ellipses means any type):
413
- if cls.PACKABLE_OBJECT_TYPE is not ...:
421
+ if self.PACKABLE_OBJECT_TYPE is not ...:
414
422
  if not TypeHintUtils.is_matching(
415
423
  object_type=object_type,
416
- type_hint=cls.PACKABLE_OBJECT_TYPE,
417
- include_subclasses=cls.PACK_SUBCLASSES,
424
+ type_hint=self.PACKABLE_OBJECT_TYPE,
425
+ include_subclasses=self.PACK_SUBCLASSES,
418
426
  reduce_type_hint=False,
419
427
  ):
420
428
  return False
@@ -422,16 +430,15 @@ class DefaultPackager(Packager, metaclass=_DefaultPackagerMeta):
422
430
  # Check the artifact type:
423
431
  if (
424
432
  artifact_type is not None
425
- and artifact_type not in cls.get_supported_artifact_types()
433
+ and artifact_type not in self.get_supported_artifact_types()
426
434
  ):
427
435
  return False
428
436
 
429
437
  # Packable:
430
438
  return True
431
439
 
432
- @classmethod
433
440
  def pack_object(
434
- cls,
441
+ self,
435
442
  obj: Any,
436
443
  key: str,
437
444
  pickle_module_name: str = DEFAULT_PICKLE_MODULE,
@@ -454,12 +461,11 @@ class DefaultPackager(Packager, metaclass=_DefaultPackagerMeta):
454
461
  artifact = Artifact(key=key, src_path=pickle_path)
455
462
 
456
463
  # Add the pickle path to the clearing list:
457
- cls.add_future_clearing_path(path=pickle_path)
464
+ self.add_future_clearing_path(path=pickle_path)
458
465
 
459
466
  return artifact, instructions
460
467
 
461
- @classmethod
462
- def pack_result(cls, obj: Any, key: str) -> dict:
468
+ def pack_result(self, obj: Any, key: str) -> dict:
463
469
  """
464
470
  Pack an object as a result.
465
471
 
@@ -470,9 +476,8 @@ class DefaultPackager(Packager, metaclass=_DefaultPackagerMeta):
470
476
  """
471
477
  return {key: obj}
472
478
 
473
- @classmethod
474
479
  def unpack_object(
475
- cls,
480
+ self,
476
481
  data_item: DataItem,
477
482
  pickle_module_name: str = DEFAULT_PICKLE_MODULE,
478
483
  object_module_name: str = None,
@@ -500,10 +505,7 @@ class DefaultPackager(Packager, metaclass=_DefaultPackagerMeta):
500
505
  :return: The un-pickled python object.
501
506
  """
502
507
  # Get the pkl file to local directory:
503
- pickle_path = data_item.local()
504
-
505
- # Add the pickle path to the clearing list:
506
- cls.add_future_clearing_path(path=pickle_path)
508
+ pickle_path = self.get_data_item_local_path(data_item=data_item)
507
509
 
508
510
  # Unpickle and return:
509
511
  return Pickler.unpickle(
@@ -515,9 +517,8 @@ class DefaultPackager(Packager, metaclass=_DefaultPackagerMeta):
515
517
  object_module_version=object_module_version,
516
518
  )
517
519
 
518
- @classmethod
519
520
  def _validate_method_arguments(
520
- cls, method: MethodType, arguments: dict, is_packing: bool
521
+ self, method: MethodType, arguments: dict, is_packing: bool
521
522
  ):
522
523
  """
523
524
  Validate keyword arguments to pass to a method. Used for validating log hint configurations for packing methods
@@ -561,13 +562,13 @@ class DefaultPackager(Packager, metaclass=_DefaultPackagerMeta):
561
562
  if missing_arguments:
562
563
  if is_packing:
563
564
  raise MLRunPackagePackingError(
564
- f"The packager '{cls.__name__}' could not pack the package due to missing configurations: "
565
- f"{', '.join(missing_arguments)}. Add the missing arguments to the log hint of this object in "
566
- f"order to pack it. Make sure you pass a dictionary log hint and not a string in order to pass "
567
- f"configurations in the log hint."
565
+ f"The packager '{self.__class__.__name__}' could not pack the package due to missing "
566
+ f"configurations: {', '.join(missing_arguments)}. Add the missing arguments to the log hint of "
567
+ f"this object in order to pack it. Make sure you pass a dictionary log hint and not a string in "
568
+ f"order to pass configurations in the log hint."
568
569
  )
569
570
  raise MLRunPackageUnpackingError(
570
- f"The packager '{cls.__name__}' could not unpack the package due to missing instructions: "
571
+ f"The packager '{self.__class__.__name__}' could not unpack the package due to missing instructions: "
571
572
  f"{', '.join(missing_arguments)}. Missing instructions are likely due to an update in the packager's "
572
573
  f"code that not support the old implementation. This backward compatibility should not occur. To "
573
574
  f"overcome it, try to edit the instructions in the artifact's spec to enable unpacking it again."
@@ -580,7 +581,7 @@ class DefaultPackager(Packager, metaclass=_DefaultPackagerMeta):
580
581
  if incorrect_arguments:
581
582
  arguments_type = "configurations" if is_packing else "instructions"
582
583
  logger.warn(
583
- f"Unexpected {arguments_type} given for {cls.__name__}: {', '.join(incorrect_arguments)}. "
584
+ f"Unexpected {arguments_type} given for {self.__class__.__name__}: {', '.join(incorrect_arguments)}. "
584
585
  f"Possible {arguments_type} are: {', '.join(possible_arguments.keys())}. The packager tries to "
585
586
  f"continue by ignoring the incorrect arguments."
586
587
  )