mlrun 1.6.0rc6__py3-none-any.whl → 1.6.0rc8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mlrun might be problematic. Click here for more details.

Files changed (50) hide show
  1. mlrun/__main__.py +32 -31
  2. mlrun/common/schemas/auth.py +2 -0
  3. mlrun/common/schemas/workflow.py +2 -0
  4. mlrun/config.py +3 -3
  5. mlrun/datastore/base.py +9 -3
  6. mlrun/datastore/datastore.py +10 -7
  7. mlrun/datastore/datastore_profile.py +19 -2
  8. mlrun/datastore/dbfs_store.py +6 -6
  9. mlrun/datastore/s3.py +6 -2
  10. mlrun/datastore/sources.py +12 -2
  11. mlrun/datastore/targets.py +43 -20
  12. mlrun/db/httpdb.py +22 -0
  13. mlrun/feature_store/feature_set.py +5 -2
  14. mlrun/feature_store/retrieval/spark_merger.py +7 -1
  15. mlrun/kfpops.py +1 -1
  16. mlrun/launcher/client.py +1 -6
  17. mlrun/launcher/remote.py +5 -3
  18. mlrun/model.py +2 -2
  19. mlrun/model_monitoring/batch_application.py +61 -94
  20. mlrun/package/packager.py +115 -89
  21. mlrun/package/packagers/default_packager.py +66 -65
  22. mlrun/package/packagers/numpy_packagers.py +109 -62
  23. mlrun/package/packagers/pandas_packagers.py +12 -23
  24. mlrun/package/packagers/python_standard_library_packagers.py +35 -57
  25. mlrun/package/packagers_manager.py +16 -13
  26. mlrun/package/utils/_pickler.py +8 -18
  27. mlrun/package/utils/_supported_format.py +1 -1
  28. mlrun/projects/pipelines.py +63 -4
  29. mlrun/projects/project.py +34 -11
  30. mlrun/runtimes/__init__.py +6 -0
  31. mlrun/runtimes/base.py +12 -1
  32. mlrun/runtimes/daskjob.py +73 -5
  33. mlrun/runtimes/databricks_job/databricks_runtime.py +2 -0
  34. mlrun/runtimes/function.py +53 -4
  35. mlrun/runtimes/kubejob.py +1 -1
  36. mlrun/runtimes/local.py +9 -9
  37. mlrun/runtimes/pod.py +1 -1
  38. mlrun/runtimes/remotesparkjob.py +1 -0
  39. mlrun/runtimes/serving.py +11 -1
  40. mlrun/runtimes/sparkjob/spark3job.py +4 -1
  41. mlrun/runtimes/utils.py +1 -46
  42. mlrun/utils/helpers.py +1 -17
  43. mlrun/utils/notifications/notification_pusher.py +27 -6
  44. mlrun/utils/version/version.json +2 -2
  45. {mlrun-1.6.0rc6.dist-info → mlrun-1.6.0rc8.dist-info}/METADATA +7 -6
  46. {mlrun-1.6.0rc6.dist-info → mlrun-1.6.0rc8.dist-info}/RECORD +50 -50
  47. {mlrun-1.6.0rc6.dist-info → mlrun-1.6.0rc8.dist-info}/WHEEL +1 -1
  48. {mlrun-1.6.0rc6.dist-info → mlrun-1.6.0rc8.dist-info}/LICENSE +0 -0
  49. {mlrun-1.6.0rc6.dist-info → mlrun-1.6.0rc8.dist-info}/entry_points.txt +0 -0
  50. {mlrun-1.6.0rc6.dist-info → mlrun-1.6.0rc8.dist-info}/top_level.txt +0 -0
@@ -45,8 +45,7 @@ class NonePackager(DefaultPackager):
45
45
  DEFAULT_PACKING_ARTIFACT_TYPE = ArtifactType.RESULT
46
46
 
47
47
  # TODO: `None` as pickle will be available from Python 3.10, so this method can be removed once we move to 3.10.
48
- @classmethod
49
- def get_supported_artifact_types(cls) -> List[str]:
48
+ def get_supported_artifact_types(self) -> List[str]:
50
49
  """
51
50
  Get all the supported artifact types on this packager. It will be the same as `DefaultPackager` but without the
52
51
  'object' artifact type support (None cannot be pickled, only from Python 3.10, and it should not be pickled
@@ -95,9 +94,8 @@ class StrPackager(DefaultPackager):
95
94
  DEFAULT_PACKING_ARTIFACT_TYPE = ArtifactType.RESULT
96
95
  DEFAULT_UNPACKING_ARTIFACT_TYPE = ArtifactType.PATH
97
96
 
98
- @classmethod
99
97
  def pack_path(
100
- cls, obj: str, key: str, archive_format: str = DEFAULT_ARCHIVE_FORMAT
98
+ self, obj: str, key: str, archive_format: str = DEFAULT_ARCHIVE_FORMAT
101
99
  ) -> Tuple[Artifact, dict]:
102
100
  """
103
101
  Pack a path string value content (pack the file or directory in that path).
@@ -138,9 +136,11 @@ class StrPackager(DefaultPackager):
138
136
 
139
137
  return artifact, instructions
140
138
 
141
- @classmethod
142
139
  def unpack_path(
143
- cls, data_item: DataItem, is_directory: bool = False, archive_format: str = None
140
+ self,
141
+ data_item: DataItem,
142
+ is_directory: bool = False,
143
+ archive_format: str = None,
144
144
  ) -> str:
145
145
  """
146
146
  Unpack a data item representing a path string. If the path is of a file, the file is downloaded to a local
@@ -155,11 +155,8 @@ class StrPackager(DefaultPackager):
155
155
 
156
156
  :return: The unpacked string.
157
157
  """
158
- # Get the file to a local temporary directory:
159
- path = data_item.local()
160
-
161
- # Mark the downloaded file for future clear:
162
- cls.add_future_clearing_path(path=path)
158
+ # Get the file:
159
+ path = self.get_data_item_local_path(data_item=data_item)
163
160
 
164
161
  # If it's not a directory, return the file path. Otherwise, it should be extracted according to the archive
165
162
  # format:
@@ -182,7 +179,7 @@ class StrPackager(DefaultPackager):
182
179
  )
183
180
 
184
181
  # Mark the extracted content for future clear:
185
- cls.add_future_clearing_path(path=directory_path)
182
+ self.add_future_clearing_path(path=directory_path)
186
183
 
187
184
  # Return the extracted directory path:
188
185
  return directory_path
@@ -196,9 +193,8 @@ class _BuiltinCollectionPackager(DefaultPackager):
196
193
  DEFAULT_PACKING_ARTIFACT_TYPE = ArtifactType.RESULT
197
194
  DEFAULT_UNPACKING_ARTIFACT_TYPE = ArtifactType.FILE
198
195
 
199
- @classmethod
200
196
  def pack_file(
201
- cls,
197
+ self,
202
198
  obj: Union[dict, list],
203
199
  key: str,
204
200
  file_format: str = DEFAULT_STRUCT_FILE_FORMAT,
@@ -215,7 +211,7 @@ class _BuiltinCollectionPackager(DefaultPackager):
215
211
  # Write to file:
216
212
  formatter = StructFileSupportedFormat.get_format_handler(fmt=file_format)
217
213
  temp_directory = pathlib.Path(tempfile.mkdtemp())
218
- cls.add_future_clearing_path(path=temp_directory)
214
+ self.add_future_clearing_path(path=temp_directory)
219
215
  file_path = temp_directory / f"{key}.{file_format}"
220
216
  formatter.write(obj=obj, file_path=str(file_path))
221
217
 
@@ -225,9 +221,8 @@ class _BuiltinCollectionPackager(DefaultPackager):
225
221
 
226
222
  return artifact, instructions
227
223
 
228
- @classmethod
229
224
  def unpack_file(
230
- cls, data_item: DataItem, file_format: str = None
225
+ self, data_item: DataItem, file_format: str = None
231
226
  ) -> Union[dict, list]:
232
227
  """
233
228
  Unpack a builtin collection from file.
@@ -239,8 +234,7 @@ class _BuiltinCollectionPackager(DefaultPackager):
239
234
  :return: The unpacked builtin collection.
240
235
  """
241
236
  # Get the file:
242
- file_path = data_item.local()
243
- cls.add_future_clearing_path(path=file_path)
237
+ file_path = self.get_data_item_local_path(data_item=data_item)
244
238
 
245
239
  # Get the archive format by the file extension if needed:
246
240
  if file_format is None:
@@ -265,8 +259,7 @@ class DictPackager(_BuiltinCollectionPackager):
265
259
 
266
260
  PACKABLE_OBJECT_TYPE = dict
267
261
 
268
- @classmethod
269
- def unpack_file(cls, data_item: DataItem, file_format: str = None) -> dict:
262
+ def unpack_file(self, data_item: DataItem, file_format: str = None) -> dict:
270
263
  """
271
264
  Unpack a dictionary from file.
272
265
 
@@ -292,8 +285,7 @@ class ListPackager(_BuiltinCollectionPackager):
292
285
 
293
286
  PACKABLE_OBJECT_TYPE = list
294
287
 
295
- @classmethod
296
- def unpack_file(cls, data_item: DataItem, file_format: str = None) -> list:
288
+ def unpack_file(self, data_item: DataItem, file_format: str = None) -> list:
297
289
  """
298
290
  Unpack a list from file.
299
291
 
@@ -338,8 +330,7 @@ class TuplePackager(ListPackager):
338
330
 
339
331
  PACKABLE_OBJECT_TYPE = tuple
340
332
 
341
- @classmethod
342
- def pack_result(cls, obj: tuple, key: str) -> dict:
333
+ def pack_result(self, obj: tuple, key: str) -> dict:
343
334
  """
344
335
  Pack a tuple as a result.
345
336
 
@@ -350,9 +341,8 @@ class TuplePackager(ListPackager):
350
341
  """
351
342
  return super().pack_result(obj=list(obj), key=key)
352
343
 
353
- @classmethod
354
344
  def pack_file(
355
- cls, obj: tuple, key: str, file_format: str = DEFAULT_STRUCT_FILE_FORMAT
345
+ self, obj: tuple, key: str, file_format: str = DEFAULT_STRUCT_FILE_FORMAT
356
346
  ) -> Tuple[Artifact, dict]:
357
347
  """
358
348
  Pack a tuple as a file by the given format.
@@ -365,8 +355,7 @@ class TuplePackager(ListPackager):
365
355
  """
366
356
  return super().pack_file(obj=list(obj), key=key, file_format=file_format)
367
357
 
368
- @classmethod
369
- def unpack_file(cls, data_item: DataItem, file_format: str = None) -> tuple:
358
+ def unpack_file(self, data_item: DataItem, file_format: str = None) -> tuple:
370
359
  """
371
360
  Unpack a tuple from file.
372
361
 
@@ -386,8 +375,7 @@ class SetPackager(ListPackager):
386
375
 
387
376
  PACKABLE_OBJECT_TYPE = set
388
377
 
389
- @classmethod
390
- def pack_result(cls, obj: set, key: str) -> dict:
378
+ def pack_result(self, obj: set, key: str) -> dict:
391
379
  """
392
380
  Pack a set as a result.
393
381
 
@@ -398,9 +386,8 @@ class SetPackager(ListPackager):
398
386
  """
399
387
  return super().pack_result(obj=list(obj), key=key)
400
388
 
401
- @classmethod
402
389
  def pack_file(
403
- cls, obj: set, key: str, file_format: str = DEFAULT_STRUCT_FILE_FORMAT
390
+ self, obj: set, key: str, file_format: str = DEFAULT_STRUCT_FILE_FORMAT
404
391
  ) -> Tuple[Artifact, dict]:
405
392
  """
406
393
  Pack a set as a file by the given format.
@@ -413,8 +400,7 @@ class SetPackager(ListPackager):
413
400
  """
414
401
  return super().pack_file(obj=list(obj), key=key, file_format=file_format)
415
402
 
416
- @classmethod
417
- def unpack_file(cls, data_item: DataItem, file_format: str = None) -> set:
403
+ def unpack_file(self, data_item: DataItem, file_format: str = None) -> set:
418
404
  """
419
405
  Unpack a set from file.
420
406
 
@@ -434,9 +420,8 @@ class FrozensetPackager(SetPackager):
434
420
 
435
421
  PACKABLE_OBJECT_TYPE = frozenset
436
422
 
437
- @classmethod
438
423
  def pack_file(
439
- cls, obj: frozenset, key: str, file_format: str = DEFAULT_STRUCT_FILE_FORMAT
424
+ self, obj: frozenset, key: str, file_format: str = DEFAULT_STRUCT_FILE_FORMAT
440
425
  ) -> Tuple[Artifact, dict]:
441
426
  """
442
427
  Pack a frozenset as a file by the given format.
@@ -449,8 +434,7 @@ class FrozensetPackager(SetPackager):
449
434
  """
450
435
  return super().pack_file(obj=set(obj), key=key, file_format=file_format)
451
436
 
452
- @classmethod
453
- def unpack_file(cls, data_item: DataItem, file_format: str = None) -> frozenset:
437
+ def unpack_file(self, data_item: DataItem, file_format: str = None) -> frozenset:
454
438
  """
455
439
  Unpack a frozenset from file.
456
440
 
@@ -472,8 +456,7 @@ class BytesPackager(ListPackager):
472
456
 
473
457
  PACKABLE_OBJECT_TYPE = bytes
474
458
 
475
- @classmethod
476
- def pack_result(cls, obj: bytes, key: str) -> dict:
459
+ def pack_result(self, obj: bytes, key: str) -> dict:
477
460
  """
478
461
  Pack bytes as a result.
479
462
 
@@ -484,9 +467,8 @@ class BytesPackager(ListPackager):
484
467
  """
485
468
  return {key: obj}
486
469
 
487
- @classmethod
488
470
  def pack_file(
489
- cls, obj: bytes, key: str, file_format: str = DEFAULT_STRUCT_FILE_FORMAT
471
+ self, obj: bytes, key: str, file_format: str = DEFAULT_STRUCT_FILE_FORMAT
490
472
  ) -> Tuple[Artifact, dict]:
491
473
  """
492
474
  Pack a bytes as a file by the given format.
@@ -499,8 +481,7 @@ class BytesPackager(ListPackager):
499
481
  """
500
482
  return super().pack_file(obj=list(obj), key=key, file_format=file_format)
501
483
 
502
- @classmethod
503
- def unpack_file(cls, data_item: DataItem, file_format: str = None) -> bytes:
484
+ def unpack_file(self, data_item: DataItem, file_format: str = None) -> bytes:
504
485
  """
505
486
  Unpack a bytes from file.
506
487
 
@@ -520,8 +501,7 @@ class BytearrayPackager(BytesPackager):
520
501
 
521
502
  PACKABLE_OBJECT_TYPE = bytearray
522
503
 
523
- @classmethod
524
- def pack_result(cls, obj: bytearray, key: str) -> dict:
504
+ def pack_result(self, obj: bytearray, key: str) -> dict:
525
505
  """
526
506
  Pack a bytearray as a result.
527
507
 
@@ -532,9 +512,8 @@ class BytearrayPackager(BytesPackager):
532
512
  """
533
513
  return {key: bytes(obj)}
534
514
 
535
- @classmethod
536
515
  def pack_file(
537
- cls, obj: bytearray, key: str, file_format: str = DEFAULT_STRUCT_FILE_FORMAT
516
+ self, obj: bytearray, key: str, file_format: str = DEFAULT_STRUCT_FILE_FORMAT
538
517
  ) -> Tuple[Artifact, dict]:
539
518
  """
540
519
  Pack a bytearray as a file by the given format.
@@ -547,8 +526,7 @@ class BytearrayPackager(BytesPackager):
547
526
  """
548
527
  return super().pack_file(obj=bytes(obj), key=key, file_format=file_format)
549
528
 
550
- @classmethod
551
- def unpack_file(cls, data_item: DataItem, file_format: str = None) -> bytearray:
529
+ def unpack_file(self, data_item: DataItem, file_format: str = None) -> bytearray:
552
530
  """
553
531
  Unpack a bytearray from file.
554
532
 
@@ -578,8 +556,7 @@ class PathPackager(StrPackager):
578
556
  PACK_SUBCLASSES = True
579
557
  DEFAULT_PACKING_ARTIFACT_TYPE = "path"
580
558
 
581
- @classmethod
582
- def pack_result(cls, obj: pathlib.Path, key: str) -> dict:
559
+ def pack_result(self, obj: pathlib.Path, key: str) -> dict:
583
560
  """
584
561
  Pack the `Path` as a string result.
585
562
 
@@ -590,9 +567,8 @@ class PathPackager(StrPackager):
590
567
  """
591
568
  return super().pack_result(obj=str(obj), key=key)
592
569
 
593
- @classmethod
594
570
  def pack_path(
595
- cls, obj: pathlib.Path, key: str, archive_format: str = DEFAULT_ARCHIVE_FORMAT
571
+ self, obj: pathlib.Path, key: str, archive_format: str = DEFAULT_ARCHIVE_FORMAT
596
572
  ) -> Tuple[Artifact, dict]:
597
573
  """
598
574
  Pack a `Path` value (pack the file or directory in that path).
@@ -605,9 +581,11 @@ class PathPackager(StrPackager):
605
581
  """
606
582
  return super().pack_path(obj=str(obj), key=key, archive_format=archive_format)
607
583
 
608
- @classmethod
609
584
  def unpack_path(
610
- cls, data_item: DataItem, is_directory: bool = False, archive_format: str = None
585
+ self,
586
+ data_item: DataItem,
587
+ is_directory: bool = False,
588
+ archive_format: str = None,
611
589
  ) -> pathlib.Path:
612
590
  """
613
591
  Unpack a data item representing a `Path`. If the path is of a file, the file is downloaded to a local
@@ -51,10 +51,10 @@ class PackagersManager:
51
51
  object or data item. Default to ``mlrun.DefaultPackager``.
52
52
  """
53
53
  # Set the default packager:
54
- self._default_packager = default_packager or DefaultPackager
54
+ self._default_packager = (default_packager or DefaultPackager)()
55
55
 
56
56
  # Initialize the packagers list (with the default packager in it):
57
- self._packagers: List[Type[Packager]] = []
57
+ self._packagers: List[Packager] = []
58
58
 
59
59
  # Set an artifacts list and results dictionary to collect all packed objects (will be used later to write extra
60
60
  # data if noted by the user using the log hint key "extra_data")
@@ -80,7 +80,7 @@ class PackagersManager:
80
80
  return self._results
81
81
 
82
82
  def collect_packagers(
83
- self, packagers: List[Union[Type, str]], default_priority: int = 5
83
+ self, packagers: List[Union[Type[Packager], str]], default_priority: int = 5
84
84
  ):
85
85
  """
86
86
  Collect the provided packagers. Packagers passed as module paths are imported and validated to be of type
@@ -155,9 +155,11 @@ class PackagersManager:
155
155
  raise MLRunPackageCollectionError(
156
156
  f"The packager '{packager.__name__}' could not be collected as it is not a `mlrun.Packager`."
157
157
  )
158
+ # Initialize the packager class:
159
+ packager = packager()
158
160
  # Set default priority in case it is not set in the packager's class:
159
- if packager.PRIORITY is ...:
160
- packager.PRIORITY = default_priority
161
+ if packager.priority is ...:
162
+ packager.priority = default_priority
161
163
  # Collect the packager (putting him first in the list for highest priority:
162
164
  self._packagers.insert(0, packager)
163
165
  # For debugging, we'll print the collected packager:
@@ -350,13 +352,14 @@ class PackagersManager:
350
352
  artifacts, to ensure that files that require uploading have already been uploaded.
351
353
  """
352
354
  for packager in self._get_packagers_with_default_packager():
353
- for path in packager.get_future_clearing_path_list():
355
+ for path in packager.future_clearing_path_list:
354
356
  if not os.path.exists(path):
355
357
  continue
356
358
  if os.path.isdir(path):
357
359
  shutil.rmtree(path)
358
360
  else:
359
361
  os.remove(path)
362
+ packager.future_clearing_path_list.clear()
360
363
 
361
364
  class _InstructionsNotesKey:
362
365
  """
@@ -368,7 +371,7 @@ class PackagersManager:
368
371
  ARTIFACT_TYPE = "artifact_type"
369
372
  INSTRUCTIONS = "instructions"
370
373
 
371
- def _get_packagers_with_default_packager(self) -> List[Type[Packager]]:
374
+ def _get_packagers_with_default_packager(self) -> List[Packager]:
372
375
  """
373
376
  Get the full list of packagers - the collected packagers and the default packager (located at last place in the
374
377
  list - the lowest priority).
@@ -377,7 +380,7 @@ class PackagersManager:
377
380
  """
378
381
  return [*self._packagers, self._default_packager]
379
382
 
380
- def _get_packager_by_name(self, name: str) -> Union[Type[Packager], None]:
383
+ def _get_packager_by_name(self, name: str) -> Union[Packager, None]:
381
384
  """
382
385
  Look for a packager with the given name and return it.
383
386
 
@@ -389,7 +392,7 @@ class PackagersManager:
389
392
  """
390
393
  # Look for a packager by exact name:
391
394
  for packager in self._get_packagers_with_default_packager():
392
- if packager.__name__ == name:
395
+ if packager.__class__.__name__ == name:
393
396
  return packager
394
397
 
395
398
  # No packager was found:
@@ -401,7 +404,7 @@ class PackagersManager:
401
404
  obj: Any,
402
405
  artifact_type: str = None,
403
406
  configurations: dict = None,
404
- ) -> Union[Type[Packager], None]:
407
+ ) -> Union[Packager, None]:
405
408
  """
406
409
  Look for a packager that can pack the provided object as the provided artifact type.
407
410
 
@@ -428,7 +431,7 @@ class PackagersManager:
428
431
  data_item: Any,
429
432
  type_hint: type,
430
433
  artifact_type: str = None,
431
- ) -> Union[Type[Packager], None]:
434
+ ) -> Union[Packager, None]:
432
435
  """
433
436
  Look for a packager that can unpack the data item of the given type hint as the provided artifact type.
434
437
 
@@ -495,7 +498,7 @@ class PackagersManager:
495
498
 
496
499
  # Prepare the manager's unpackaging instructions:
497
500
  unpackaging_instructions = {
498
- self._InstructionsNotesKey.PACKAGER_NAME: packager.__name__,
501
+ self._InstructionsNotesKey.PACKAGER_NAME: packager.__class__.__name__,
499
502
  self._InstructionsNotesKey.OBJECT_TYPE: self._get_type_name(typ=type(obj)),
500
503
  self._InstructionsNotesKey.ARTIFACT_TYPE: (
501
504
  artifact_type
@@ -646,7 +649,7 @@ class PackagersManager:
646
649
  :raise MLRunPackageUnpackingError: If there is no packager that supports the provided type hint.
647
650
  """
648
651
  # Prepare a list of a packager and exception string for all the failures in case there was no fitting packager:
649
- found_packagers: List[Tuple[Type[Packager], str]] = []
652
+ found_packagers: List[Tuple[Packager, str]] = []
650
653
 
651
654
  # Try to unpack as one of the possible types in the type hint:
652
655
  possible_type_hints = {type_hint}
@@ -13,6 +13,7 @@
13
13
  # limitations under the License.
14
14
  #
15
15
  import importlib
16
+ import importlib.metadata as importlib_metadata
16
17
  import os
17
18
  import sys
18
19
  import tempfile
@@ -188,26 +189,15 @@ class Pickler:
188
189
  """
189
190
  # First we'll try to get the module version from `importlib`:
190
191
  try:
191
- # Since Python 3.8, `version` is part of `importlib.metadata`. Before 3.8, we'll use the module
192
- # `importlib_metadata` to get `version`.
193
- if (
194
- sys.version_info[1] > 7
195
- ): # TODO: Remove once Python 3.7 is not supported.
196
- from importlib.metadata import version
197
- else:
198
- from importlib_metadata import version
199
-
200
- return version(module_name)
201
- except (ModuleNotFoundError, importlib.metadata.PackageNotFoundError):
202
- # User won't necessarily have the `importlib_metadata` module, so we will ignore it by catching
203
- # `ModuleNotFoundError`. `PackageNotFoundError` is ignored as well as this is raised when `version` could
204
- # not find the package related to the module.
192
+ return importlib_metadata.version(module_name)
193
+ except importlib.metadata.PackageNotFoundError:
194
+ # `PackageNotFoundError` is ignored this is raised when `version` could not find the package related to the
195
+ # module.
205
196
  pass
206
197
 
207
- # Secondly, if importlib could not get the version (most likely 'importlib_metadata' is not installed), we'll
208
- # try to use `pkg_resources` to get the version (the version will be found only if the package name is equal to
209
- # the module name. For example, if the module name is 'x' then the way we installed the package must be
210
- # 'pip install x'):
198
+ # Secondly, if importlib could not get the version, we'll try to use `pkg_resources` to get the version (the
199
+ # version will be found only if the package name is equal to the module name. For example, if the module name is
200
+ # 'x' then the way we installed the package must be 'pip install x'):
211
201
  import pkg_resources
212
202
 
213
203
  with warnings.catch_warnings():
@@ -24,7 +24,7 @@ class SupportedFormat(ABC, Generic[FileHandlerType]):
24
24
  Library of supported formats by some builtin MLRun packagers.
25
25
  """
26
26
 
27
- # Add here the all the supported formats in ALL CAPS and their value as a string:
27
+ # Add here all the supported formats in ALL CAPS and their value as a string:
28
28
  ...
29
29
 
30
30
  # The map to use in the method `get_format_handler`. A dictionary of string key to a class type to handle that
@@ -133,7 +133,7 @@ class WorkflowSpec(mlrun.model.ModelObj):
133
133
  required.remove(k)
134
134
  if required:
135
135
  raise mlrun.errors.MLRunInvalidArgumentError(
136
- f"workflow argument(s) {','.join(required)} are required and were not specified"
136
+ f"Workflow argument(s) {','.join(required)} are required and were not specified"
137
137
  )
138
138
 
139
139
  def clear_tmp(self):
@@ -246,7 +246,7 @@ class _PipelineContext:
246
246
  return True
247
247
  if raise_exception:
248
248
  raise ValueError(
249
- "pipeline context is not initialized, must be used inside a pipeline"
249
+ "Pipeline context is not initialized, must be used inside a pipeline"
250
250
  )
251
251
  return False
252
252
 
@@ -496,6 +496,7 @@ class _PipelineRunner(abc.ABC):
496
496
  artifact_path=None,
497
497
  namespace=None,
498
498
  source=None,
499
+ notifications: typing.List[mlrun.model.Notification] = None,
499
500
  ) -> _PipelineRunStatus:
500
501
  pass
501
502
 
@@ -573,6 +574,7 @@ class _KFPRunner(_PipelineRunner):
573
574
  artifact_path=None,
574
575
  namespace=None,
575
576
  source=None,
577
+ notifications: typing.List[mlrun.model.Notification] = None,
576
578
  ) -> _PipelineRunStatus:
577
579
  pipeline_context.set(project, workflow_spec)
578
580
  workflow_handler = _PipelineRunner._get_handler(
@@ -582,6 +584,19 @@ class _KFPRunner(_PipelineRunner):
582
584
  project.set_source(source=source)
583
585
 
584
586
  namespace = namespace or config.namespace
587
+
588
+ # fallback to old notification behavior
589
+ if notifications:
590
+ logger.warning(
591
+ "Setting notifications on kfp pipeline runner uses old notification behavior. "
592
+ "Notifications will only be sent if you wait for pipeline completion. "
593
+ "To use the new notification behavior, use the remote pipeline runner."
594
+ )
595
+ for notification in notifications:
596
+ project.notifiers.add_notification(
597
+ notification.kind, notification.params
598
+ )
599
+
585
600
  run_id = _run_pipeline(
586
601
  workflow_handler,
587
602
  project=project.metadata.name,
@@ -655,7 +670,7 @@ class _KFPRunner(_PipelineRunner):
655
670
  raise_error = None
656
671
  try:
657
672
  if timeout:
658
- logger.info("waiting for pipeline run completion")
673
+ logger.info("Waiting for pipeline run completion")
659
674
  state = run.wait_for_completion(
660
675
  timeout=timeout, expected_statuses=expected_statuses
661
676
  )
@@ -666,6 +681,7 @@ class _KFPRunner(_PipelineRunner):
666
681
  mldb = mlrun.db.get_run_db(secrets=project._secrets)
667
682
  runs = mldb.list_runs(project=project.name, labels=f"workflow={run.run_id}")
668
683
 
684
+ # TODO: The below section duplicates notifiers.push_pipeline_run_results() logic. We should use it instead.
669
685
  had_errors = 0
670
686
  for r in runs:
671
687
  if r["status"].get("state", "") == "error":
@@ -701,12 +717,17 @@ class _LocalRunner(_PipelineRunner):
701
717
  artifact_path=None,
702
718
  namespace=None,
703
719
  source=None,
720
+ notifications: typing.List[mlrun.model.Notification] = None,
704
721
  ) -> _PipelineRunStatus:
705
722
  pipeline_context.set(project, workflow_spec)
706
723
  workflow_handler = _PipelineRunner._get_handler(
707
724
  workflow_handler, workflow_spec, project, secrets
708
725
  )
709
726
 
727
+ # fallback to old notification behavior
728
+ for notification in notifications or []:
729
+ project.notifiers.add_notification(notification.kind, notification.params)
730
+
710
731
  workflow_id = uuid.uuid4().hex
711
732
  pipeline_context.workflow_id = workflow_id
712
733
  # When using KFP, it would do this replacement. When running locally, we need to take care of it.
@@ -785,10 +806,15 @@ class _RemoteRunner(_PipelineRunner):
785
806
  artifact_path: str = None,
786
807
  namespace: str = None,
787
808
  source: str = None,
809
+ notifications: typing.List[mlrun.model.Notification] = None,
788
810
  ) -> typing.Optional[_PipelineRunStatus]:
789
811
  workflow_name = normalize_workflow_name(name=name, project_name=project.name)
790
812
  workflow_id = None
791
813
 
814
+ # for start message, fallback to old notification behavior
815
+ for notification in notifications or []:
816
+ project.notifiers.add_notification(notification.kind, notification.params)
817
+
792
818
  # The returned engine for this runner is the engine of the workflow.
793
819
  # In this way wait_for_completion/get_run_status would be executed by the correct pipeline runner.
794
820
  inner_engine = get_workflow_engine(workflow_spec.engine)
@@ -827,6 +853,7 @@ class _RemoteRunner(_PipelineRunner):
827
853
  workflow_name
828
854
  ),
829
855
  namespace=namespace,
856
+ notifications=notifications,
830
857
  )
831
858
  if workflow_spec.schedule:
832
859
  logger.info(
@@ -873,11 +900,33 @@ class _RemoteRunner(_PipelineRunner):
873
900
  exc=err,
874
901
  )
875
902
 
903
+ @staticmethod
904
+ def get_run_status(
905
+ project,
906
+ run,
907
+ timeout=None,
908
+ expected_statuses=None,
909
+ notifiers: mlrun.utils.notifications.CustomNotificationPusher = None,
910
+ ):
911
+ # ignore notifiers, as they are handled by the remote pipeline notifications,
912
+ # so overriding with CustomNotificationPusher with empty list of notifiers
913
+ state, had_errors, text = _KFPRunner.get_run_status(
914
+ project,
915
+ run,
916
+ timeout,
917
+ expected_statuses,
918
+ notifiers=mlrun.utils.notifications.CustomNotificationPusher([]),
919
+ )
920
+
921
+ # indicate the pipeline status since we don't push the notifications in the remote runner
922
+ logger.info(text)
923
+ return state, had_errors, text
924
+
876
925
 
877
926
  def create_pipeline(project, pipeline, functions, secrets=None, handler=None):
878
927
  spec = imputil.spec_from_file_location("workflow", pipeline)
879
928
  if spec is None:
880
- raise ImportError(f"cannot import workflow {pipeline}")
929
+ raise ImportError(f"Cannot import workflow {pipeline}")
881
930
  mod = imputil.module_from_spec(spec)
882
931
  spec.loader.exec_module(mod)
883
932
 
@@ -938,6 +987,7 @@ def load_and_run(
938
987
  schedule: typing.Union[str, mlrun.common.schemas.ScheduleCronTrigger] = None,
939
988
  cleanup_ttl: int = None,
940
989
  load_only: bool = False,
990
+ wait_for_completion: bool = False,
941
991
  ):
942
992
  """
943
993
  Auxiliary function that the RemoteRunner run once or run every schedule.
@@ -967,6 +1017,7 @@ def load_and_run(
967
1017
  :param cleanup_ttl: pipeline cleanup ttl in secs (time to wait after workflow completion, at which point the
968
1018
  workflow and all its resources are deleted)
969
1019
  :param load_only: for just loading the project, inner use.
1020
+ :param wait_for_completion: wait for workflow completion before returning
970
1021
  """
971
1022
  try:
972
1023
  project = mlrun.load_project(
@@ -1028,3 +1079,11 @@ def load_and_run(
1028
1079
 
1029
1080
  if run.state == mlrun.run.RunStatuses.failed:
1030
1081
  raise RuntimeError(f"Workflow {workflow_log_message} failed") from run.exc
1082
+
1083
+ if wait_for_completion:
1084
+ pipeline_state, _, _ = project.get_run_status(run)
1085
+ context.log_result(key="workflow_state", value=pipeline_state, commit=True)
1086
+ if pipeline_state != mlrun.run.RunStatuses.succeeded:
1087
+ raise RuntimeError(
1088
+ f"Workflow {workflow_log_message} failed, state={pipeline_state}"
1089
+ )