mlrun 1.6.0rc15__py3-none-any.whl → 1.6.0rc16__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mlrun might be problematic. Click here for more details.

Files changed (66) hide show
  1. mlrun/artifacts/base.py +1 -5
  2. mlrun/artifacts/dataset.py +0 -4
  3. mlrun/artifacts/model.py +0 -5
  4. mlrun/common/schemas/auth.py +3 -4
  5. mlrun/config.py +1 -0
  6. mlrun/data_types/to_pandas.py +0 -1
  7. mlrun/datastore/base.py +0 -1
  8. mlrun/datastore/dbfs_store.py +0 -1
  9. mlrun/datastore/sources.py +1 -1
  10. mlrun/datastore/v3io.py +1 -1
  11. mlrun/datastore/wasbfs/fs.py +0 -1
  12. mlrun/errors.py +0 -1
  13. mlrun/feature_store/retrieval/base.py +2 -3
  14. mlrun/feature_store/retrieval/job.py +0 -1
  15. mlrun/feature_store/retrieval/spark_merger.py +0 -2
  16. mlrun/feature_store/steps.py +0 -3
  17. mlrun/frameworks/_common/model_handler.py +2 -4
  18. mlrun/frameworks/_dl_common/loggers/logger.py +1 -3
  19. mlrun/frameworks/_dl_common/loggers/tensorboard_logger.py +1 -3
  20. mlrun/frameworks/_ml_common/loggers/logger.py +1 -3
  21. mlrun/frameworks/_ml_common/plans/calibration_curve_plan.py +1 -1
  22. mlrun/frameworks/_ml_common/plans/confusion_matrix_plan.py +1 -1
  23. mlrun/frameworks/_ml_common/plans/dataset_plan.py +1 -3
  24. mlrun/frameworks/lgbm/__init__.py +2 -2
  25. mlrun/frameworks/pytorch/callbacks/tensorboard_logging_callback.py +3 -3
  26. mlrun/frameworks/pytorch/mlrun_interface.py +1 -1
  27. mlrun/frameworks/tf_keras/__init__.py +4 -4
  28. mlrun/frameworks/tf_keras/callbacks/tensorboard_logging_callback.py +3 -3
  29. mlrun/frameworks/tf_keras/mlrun_interface.py +6 -1
  30. mlrun/frameworks/xgboost/__init__.py +1 -1
  31. mlrun/k8s_utils.py +6 -6
  32. mlrun/kfpops.py +0 -3
  33. mlrun/launcher/base.py +0 -1
  34. mlrun/launcher/local.py +0 -3
  35. mlrun/model.py +4 -3
  36. mlrun/model_monitoring/batch.py +3 -1
  37. mlrun/package/packagers/numpy_packagers.py +1 -1
  38. mlrun/package/utils/log_hint_utils.py +1 -1
  39. mlrun/package/utils/type_hint_utils.py +3 -1
  40. mlrun/platforms/iguazio.py +2 -4
  41. mlrun/projects/project.py +2 -3
  42. mlrun/runtimes/constants.py +7 -0
  43. mlrun/runtimes/daskjob.py +0 -2
  44. mlrun/runtimes/function.py +0 -3
  45. mlrun/runtimes/local.py +1 -1
  46. mlrun/runtimes/mpijob/abstract.py +0 -1
  47. mlrun/runtimes/pod.py +5 -11
  48. mlrun/runtimes/sparkjob/spark3job.py +0 -1
  49. mlrun/secrets.py +0 -1
  50. mlrun/serving/states.py +0 -2
  51. mlrun/serving/utils.py +0 -1
  52. mlrun/serving/v1_serving.py +0 -1
  53. mlrun/track/tracker.py +1 -1
  54. mlrun/track/tracker_manager.py +3 -1
  55. mlrun/utils/azure_vault.py +0 -1
  56. mlrun/utils/condition_evaluator.py +0 -2
  57. mlrun/utils/helpers.py +0 -1
  58. mlrun/utils/logger.py +0 -1
  59. mlrun/utils/notifications/notification_pusher.py +0 -3
  60. mlrun/utils/version/version.json +2 -2
  61. {mlrun-1.6.0rc15.dist-info → mlrun-1.6.0rc16.dist-info}/METADATA +3 -3
  62. {mlrun-1.6.0rc15.dist-info → mlrun-1.6.0rc16.dist-info}/RECORD +66 -66
  63. {mlrun-1.6.0rc15.dist-info → mlrun-1.6.0rc16.dist-info}/LICENSE +0 -0
  64. {mlrun-1.6.0rc15.dist-info → mlrun-1.6.0rc16.dist-info}/WHEEL +0 -0
  65. {mlrun-1.6.0rc15.dist-info → mlrun-1.6.0rc16.dist-info}/entry_points.txt +0 -0
  66. {mlrun-1.6.0rc15.dist-info → mlrun-1.6.0rc16.dist-info}/top_level.txt +0 -0
@@ -305,7 +305,6 @@ class OutputStream:
305
305
  self._mock_queue = []
306
306
 
307
307
  if create and not mock:
308
-
309
308
  # this import creates an import loop via the utils module, so putting it in execution path
310
309
  from mlrun.utils.helpers import logger
311
310
 
@@ -318,7 +317,7 @@ class OutputStream:
318
317
  retention_in_hours=retention_in_hours,
319
318
  )
320
319
 
321
- response = self._v3io_client.create_stream(
320
+ response = self._v3io_client.stream.create(
322
321
  container=self._container,
323
322
  path=self._stream_path,
324
323
  shard_count=shards or 1,
@@ -343,7 +342,7 @@ class OutputStream:
343
342
  # for mock testing
344
343
  self._mock_queue.extend(records)
345
344
  else:
346
- self._v3io_client.put_records(
345
+ self._v3io_client.stream.put_records(
347
346
  container=self._container, path=self._stream_path, records=records
348
347
  )
349
348
 
@@ -368,7 +367,6 @@ class HTTPOutputStream:
368
367
  data = [data]
369
368
 
370
369
  for record in data:
371
-
372
370
  # Convert the new record to the required format
373
371
  serialized_record = dump_record(record)
374
372
  response = requests.post(self._stream_path, data=serialized_record)
mlrun/projects/project.py CHANGED
@@ -793,9 +793,8 @@ class ProjectSpec(ModelObj):
793
793
  for name, function in self._function_definitions.items():
794
794
  if hasattr(function, "to_dict"):
795
795
  spec = function.to_dict(strip=True)
796
- if (
797
- function.spec.build.source
798
- and function.spec.build.source.startswith(self._source_repo())
796
+ if function.spec.build.source and function.spec.build.source.startswith(
797
+ self._source_repo()
799
798
  ):
800
799
  update_in(spec, "spec.build.source", "./")
801
800
  functions.append({"name": name, "spec": spec})
@@ -157,6 +157,13 @@ class RunStates(object):
157
157
  RunStates.aborted,
158
158
  ]
159
159
 
160
+ @staticmethod
161
+ def error_states():
162
+ return [
163
+ RunStates.error,
164
+ RunStates.aborted,
165
+ ]
166
+
160
167
  @staticmethod
161
168
  def non_terminal_states():
162
169
  return list(set(RunStates.all()) - set(RunStates.terminal_states()))
mlrun/runtimes/daskjob.py CHANGED
@@ -94,7 +94,6 @@ class DaskSpec(KubeResourceSpec):
94
94
  clone_target_dir=None,
95
95
  state_thresholds=None,
96
96
  ):
97
-
98
97
  super().__init__(
99
98
  command=command,
100
99
  args=args,
@@ -526,7 +525,6 @@ class DaskCluster(KubejobRuntime):
526
525
  )
527
526
 
528
527
  def _run(self, runobj: RunObject, execution):
529
-
530
528
  handler = runobj.spec.handler
531
529
  self._force_handler(handler)
532
530
 
@@ -63,7 +63,6 @@ def validate_nuclio_version_compatibility(*min_versions):
63
63
  try:
64
64
  parsed_current_version = semver.VersionInfo.parse(mlconf.nuclio_version)
65
65
  except ValueError:
66
-
67
66
  # only log when version is set but invalid
68
67
  if mlconf.nuclio_version:
69
68
  logger.warning(
@@ -166,7 +165,6 @@ class NuclioSpec(KubeResourceSpec):
166
165
  state_thresholds=None,
167
166
  disable_default_http_trigger=None,
168
167
  ):
169
-
170
168
  super().__init__(
171
169
  command=command,
172
170
  args=args,
@@ -1105,7 +1103,6 @@ class RemoteRuntime(KubeResource):
1105
1103
  return results
1106
1104
 
1107
1105
  def _resolve_invocation_url(self, path, force_external_address):
1108
-
1109
1106
  if not path.startswith("/") and path != "":
1110
1107
  path = f"/{path}"
1111
1108
 
mlrun/runtimes/local.py CHANGED
@@ -489,7 +489,7 @@ def exec_from_params(handler, runobj: RunObject, context: MLClientCtx, cwd=None)
489
489
  context.set_state("completed", commit=False)
490
490
  except Exception as exc:
491
491
  err = err_to_str(exc)
492
- logger.error(f"execution error, {traceback.format_exc()}")
492
+ logger.error(f"Execution error, {traceback.format_exc()}")
493
493
  context.set_state(error=err, commit=False)
494
494
  logger.set_logger_level(old_level)
495
495
 
@@ -111,7 +111,6 @@ class AbstractMPIJobRuntime(KubejobRuntime, abc.ABC):
111
111
 
112
112
  @staticmethod
113
113
  def _get_run_completion_updates(run: dict) -> dict:
114
-
115
114
  # TODO: add a 'workers' section in run objects state, each worker will update its state while
116
115
  # the run state will be resolved by the server.
117
116
  # update the run object state if empty so that it won't default to 'created' state
mlrun/runtimes/pod.py CHANGED
@@ -491,9 +491,7 @@ class KubeResourceSpec(FunctionSpec):
491
491
  self._initialize_node_affinity(affinity_field_name)
492
492
 
493
493
  self_affinity = getattr(self, affinity_field_name)
494
- self_affinity.node_affinity.required_during_scheduling_ignored_during_execution = (
495
- node_selector
496
- )
494
+ self_affinity.node_affinity.required_during_scheduling_ignored_during_execution = node_selector
497
495
 
498
496
  def enrich_function_preemption_spec(
499
497
  self,
@@ -593,7 +591,6 @@ class KubeResourceSpec(FunctionSpec):
593
591
  )
594
592
  # purge any affinity / anti-affinity preemption related configuration and enrich with preemptible tolerations
595
593
  elif self_preemption_mode == PreemptionModes.allow.value:
596
-
597
594
  # remove preemptible anti-affinity
598
595
  self._prune_affinity_node_selector_requirement(
599
596
  generate_preemptible_node_selector_requirements(
@@ -655,17 +652,13 @@ class KubeResourceSpec(FunctionSpec):
655
652
  self._initialize_node_affinity(affinity_field_name)
656
653
 
657
654
  self_affinity = getattr(self, affinity_field_name)
658
- if (
659
- not self_affinity.node_affinity.required_during_scheduling_ignored_during_execution
660
- ):
655
+ if not self_affinity.node_affinity.required_during_scheduling_ignored_during_execution:
661
656
  self_affinity.node_affinity.required_during_scheduling_ignored_during_execution = k8s_client.V1NodeSelector(
662
657
  node_selector_terms=node_selector_terms
663
658
  )
664
659
  return
665
660
 
666
- node_selector = (
667
- self_affinity.node_affinity.required_during_scheduling_ignored_during_execution
668
- )
661
+ node_selector = self_affinity.node_affinity.required_during_scheduling_ignored_during_execution
669
662
  new_node_selector_terms = []
670
663
 
671
664
  for node_selector_term_to_add in node_selector_terms:
@@ -741,9 +734,11 @@ class KubeResourceSpec(FunctionSpec):
741
734
  self._initialize_affinity(affinity_field_name)
742
735
  self._initialize_node_affinity(affinity_field_name)
743
736
 
737
+ # fmt: off
744
738
  self_affinity.node_affinity.required_during_scheduling_ignored_during_execution = (
745
739
  new_required_during_scheduling_ignored_during_execution
746
740
  )
741
+ # fmt: on
747
742
 
748
743
  @staticmethod
749
744
  def _prune_node_selector_requirements_from_node_selector_terms(
@@ -894,7 +889,6 @@ class AutoMountType(str, Enum):
894
889
  return mlrun.platforms.other.mount_pvc if pvc_configured else None
895
890
 
896
891
  def get_modifier(self):
897
-
898
892
  return {
899
893
  AutoMountType.none: None,
900
894
  AutoMountType.v3io_credentials: mlrun.v3io_cred,
@@ -128,7 +128,6 @@ class Spark3JobSpec(KubeResourceSpec):
128
128
  clone_target_dir=None,
129
129
  state_thresholds=None,
130
130
  ):
131
-
132
131
  super().__init__(
133
132
  command=command,
134
133
  args=args,
mlrun/secrets.py CHANGED
@@ -39,7 +39,6 @@ class SecretsStore:
39
39
  pass
40
40
 
41
41
  def add_source(self, kind, source="", prefix=""):
42
-
43
42
  if kind == "inline":
44
43
  if isinstance(source, str):
45
44
  source = literal_eval(source)
mlrun/serving/states.py CHANGED
@@ -469,7 +469,6 @@ class TaskStep(BaseStep):
469
469
  class_name = class_name.__name__
470
470
  elif not class_object:
471
471
  if class_name == "$remote":
472
-
473
472
  from mlrun.serving.remote import RemoteStep
474
473
 
475
474
  class_object = RemoteStep
@@ -1130,7 +1129,6 @@ class FlowStep(BaseStep):
1130
1129
  return event
1131
1130
 
1132
1131
  def run(self, event, *args, **kwargs):
1133
-
1134
1132
  if self._controller:
1135
1133
  # async flow (using storey)
1136
1134
  event._awaitable_result = None
mlrun/serving/utils.py CHANGED
@@ -98,7 +98,6 @@ class StepToDict:
98
98
 
99
99
 
100
100
  class RouterToDict(StepToDict):
101
-
102
101
  _STEP_KIND = "router"
103
102
 
104
103
  def to_dict(self, fields=None, exclude=None):
@@ -160,7 +160,6 @@ def nuclio_serving_init(context, data):
160
160
 
161
161
 
162
162
  def nuclio_serving_handler(context, event):
163
-
164
163
  # check if valid route & model
165
164
  try:
166
165
  if hasattr(event, "trigger") and event.trigger.kind not in ["http", ""]:
mlrun/track/tracker.py CHANGED
@@ -63,7 +63,7 @@ class Tracker(ABC):
63
63
  reference_id: Any,
64
64
  function_name: str,
65
65
  handler: str = None,
66
- **kwargs
66
+ **kwargs,
67
67
  ) -> RunObject:
68
68
  """
69
69
  Import a previous run from a 3rd party vendor to MLRun.
@@ -45,7 +45,6 @@ class TrackerManager(metaclass=Singleton):
45
45
 
46
46
  # Check general config for tracking usage, if false we return an empty manager
47
47
  if mlconf.external_platform_tracking.enabled:
48
-
49
48
  # Check if the available trackers were collected:
50
49
  if _AVAILABLE_TRACKERS is None:
51
50
  self._collect_available_trackers()
@@ -85,6 +84,9 @@ class TrackerManager(metaclass=Singleton):
85
84
 
86
85
  :return: The context updated with the trackers products.
87
86
  """
87
+ if not self._trackers:
88
+ return context
89
+
88
90
  # Check if the context received is a dict to initialize it as an `MLClientCtx` object:
89
91
  is_context_dict = isinstance(context, dict)
90
92
  if is_context_dict:
@@ -21,7 +21,6 @@ from .helpers import logger
21
21
 
22
22
  class AzureVaultStore:
23
23
  def __init__(self, vault_name):
24
-
25
24
  try:
26
25
  from azure.identity import EnvironmentCredential
27
26
  from azure.keyvault.secrets import SecretClient
@@ -21,7 +21,6 @@ from mlrun.utils import logger
21
21
  def evaluate_condition_in_separate_process(
22
22
  condition: str, context: typing.Dict[str, typing.Any], timeout: int = 5
23
23
  ):
24
-
25
24
  if not condition:
26
25
  return True
27
26
 
@@ -52,7 +51,6 @@ def _evaluate_condition_wrapper(
52
51
 
53
52
 
54
53
  def _evaluate_condition(condition: str, context: typing.Dict[str, typing.Any]):
55
-
56
54
  import jinja2.sandbox
57
55
 
58
56
  jinja_env = jinja2.sandbox.SandboxedEnvironment()
mlrun/utils/helpers.py CHANGED
@@ -952,7 +952,6 @@ def fill_object_hash(object_dict, uid_property_name, tag=""):
952
952
 
953
953
 
954
954
  def fill_artifact_object_hash(object_dict, iteration=None, producer_id=None):
955
-
956
955
  # remove artifact related fields before calculating hash
957
956
  object_dict.setdefault("metadata", {})
958
957
  labels = object_dict["metadata"].pop("labels", None)
mlrun/utils/logger.py CHANGED
@@ -92,7 +92,6 @@ class Logger(object):
92
92
  def set_handler(
93
93
  self, handler_name: str, file: IO[str], formatter: logging.Formatter
94
94
  ):
95
-
96
95
  # check if there's a handler by this name
97
96
  for handler in self._logger.handlers:
98
97
  if handler.name == handler_name:
@@ -36,7 +36,6 @@ class _NotificationPusherBase(object):
36
36
  def _push(
37
37
  self, sync_push_callback: typing.Callable, async_push_callback: typing.Callable
38
38
  ):
39
-
40
39
  if mlrun.utils.helpers.is_running_in_jupyter_notebook():
41
40
  # Running in Jupyter notebook.
42
41
  # In this case, we need to create a new thread, run a separate event loop in
@@ -88,7 +87,6 @@ class _NotificationPusherBase(object):
88
87
 
89
88
 
90
89
  class NotificationPusher(_NotificationPusherBase):
91
-
92
90
  messages = {
93
91
  "completed": "{resource} completed",
94
92
  "error": "{resource} failed",
@@ -384,7 +382,6 @@ class NotificationPusher(_NotificationPusherBase):
384
382
  # but also for human readability reasons.
385
383
  notification.reason = notification.reason[:255]
386
384
  else:
387
-
388
385
  # empty out the reason if the notification is in a non-error state
389
386
  # in case a retry would kick in (when such mechanism would be implemented)
390
387
  notification.reason = None
@@ -1,4 +1,4 @@
1
1
  {
2
- "git_commit": "42e6b3f87d0da8af39127d19a4d7435ea7009322",
3
- "version": "1.6.0-rc15"
2
+ "git_commit": "348a73793c3d97cae54b62c2bb5f2dbf4f9c12d3",
3
+ "version": "1.6.0-rc16"
4
4
  }
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: mlrun
3
- Version: 1.6.0rc15
3
+ Version: 1.6.0rc16
4
4
  Summary: Tracking and config of machine learning runs
5
5
  Home-page: https://github.com/mlrun/mlrun
6
6
  Author: Yaron Haviv
@@ -192,7 +192,7 @@ Requires-Dist: sqlalchemy ~=1.4 ; extra == 'sqlalchemy'
192
192
  [![License](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](https://opensource.org/licenses/Apache-2.0)
193
193
  [![PyPI version fury.io](https://badge.fury.io/py/mlrun.svg)](https://pypi.python.org/pypi/mlrun/)
194
194
  [![Documentation](https://readthedocs.org/projects/mlrun/badge/?version=latest)](https://mlrun.readthedocs.io/en/latest/?badge=latest)
195
- [![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black)
195
+ [![Ruff](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json)](https://github.com/astral-sh/ruff)
196
196
  ![GitHub commit activity](https://img.shields.io/github/commit-activity/w/mlrun/mlrun)
197
197
  ![GitHub release (latest SemVer)](https://img.shields.io/github/v/release/mlrun/mlrun?sort=semver)
198
198
  [![Join MLOps Live](https://img.shields.io/badge/slack-join_chat-white.svg?logo=slack&style=social)](https://mlopslive.slack.com)
@@ -226,7 +226,7 @@ See: **Docs:** [Projects and Automation](https://docs.mlrun.org/en/latest/projec
226
226
 
227
227
  ### Ingest and process data
228
228
 
229
- MLRun provides abstract interfaces to various offline and online [**data sources**](./store/datastore.html), supports batch or realtime data processing at scale, data lineage and versioning, structured and unstructured data, and more.
229
+ MLRun provides abstract interfaces to various offline and online [**data sources**](https://docs.mlrun.org/en/latest/store/datastore.html), supports batch or realtime data processing at scale, data lineage and versioning, structured and unstructured data, and more.
230
230
  In addition, the MLRun [**Feature Store**](https://docs.mlrun.org/en/latest/feature-store/feature-store.html) automates the collection, transformation, storage, catalog, serving, and monitoring of data features across the ML lifecycle and enables feature reuse and sharing.
231
231
 
232
232
  See: **Docs:** [Ingest and process data](https://docs.mlrun.org/en/latest/data-prep/index.html), [Feature Store](https://docs.mlrun.org/en/latest/feature-store/feature-store.html), [Data & Artifacts](https://docs.mlrun.org/en/latest/concepts/data.html); **Tutorials:** [Quick start](https://docs.mlrun.org/en/latest/tutorials/01-mlrun-basics.html), [Feature Store](https://docs.mlrun.org/en/latest/feature-store/basic-demo.html).