mlrun 1.8.0rc15__py3-none-any.whl → 1.8.0rc17__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mlrun might be problematic. Click here for more details.

@@ -31,7 +31,6 @@ from mlrun.common.schemas.model_monitoring.constants import (
31
31
  WriterEvent,
32
32
  WriterEventKind,
33
33
  )
34
- from mlrun.common.schemas.notification import NotificationKind, NotificationSeverity
35
34
  from mlrun.model_monitoring.db._stats import (
36
35
  ModelMonitoringCurrentStatsFile,
37
36
  ModelMonitoringDriftMeasuresFile,
@@ -39,7 +38,6 @@ from mlrun.model_monitoring.db._stats import (
39
38
  from mlrun.model_monitoring.helpers import get_result_instance_fqn
40
39
  from mlrun.serving.utils import StepToDict
41
40
  from mlrun.utils import logger
42
- from mlrun.utils.notifications.notification_pusher import CustomNotificationPusher
43
41
 
44
42
  _RawEvent = dict[str, Any]
45
43
  _AppResultEvent = NewType("_AppResultEvent", _RawEvent)
@@ -57,50 +55,6 @@ class _WriterEventTypeError(_WriterEventError, TypeError):
57
55
  pass
58
56
 
59
57
 
60
- class _Notifier:
61
- def __init__(
62
- self,
63
- event: _AppResultEvent,
64
- notification_pusher: CustomNotificationPusher,
65
- severity: NotificationSeverity = NotificationSeverity.WARNING,
66
- ) -> None:
67
- """
68
- Event notifier - send push notification when appropriate to the notifiers in
69
- `notification pusher`.
70
- Note that if you use a Slack App webhook, you need to define it as an MLRun secret
71
- `SLACK_WEBHOOK`.
72
- """
73
- self._event = event
74
- self._custom_notifier = notification_pusher
75
- self._severity = severity
76
-
77
- def _should_send_event(self) -> bool:
78
- return self._event[ResultData.RESULT_STATUS] >= ResultStatusApp.detected.value
79
-
80
- def _generate_message(self) -> str:
81
- return f"""\
82
- The monitoring app `{self._event[WriterEvent.APPLICATION_NAME]}` \
83
- of kind `{self._event[ResultData.RESULT_KIND]}` \
84
- detected a problem in model endpoint ID `{self._event[WriterEvent.ENDPOINT_ID]}` \
85
- at time `{self._event[WriterEvent.START_INFER_TIME]}`.
86
-
87
- Result data:
88
- Name: `{self._event[ResultData.RESULT_NAME]}`
89
- Value: `{self._event[ResultData.RESULT_VALUE]}`
90
- Status: `{self._event[ResultData.RESULT_STATUS]}`
91
- Extra data: `{self._event[ResultData.RESULT_EXTRA_DATA]}`\
92
- """
93
-
94
- def notify(self) -> None:
95
- """Send notification if appropriate"""
96
- if not self._should_send_event():
97
- logger.debug("Not sending a notification")
98
- return
99
- message = self._generate_message()
100
- self._custom_notifier.push(message=message, severity=self._severity)
101
- logger.debug("A notification should have been sent")
102
-
103
-
104
58
  class ModelMonitoringWriter(StepToDict):
105
59
  """
106
60
  Write monitoring application results to the target databases
@@ -116,10 +70,6 @@ class ModelMonitoringWriter(StepToDict):
116
70
  self.project = project
117
71
  self.name = project # required for the deployment process
118
72
 
119
- self._custom_notifier = CustomNotificationPusher(
120
- notification_types=[NotificationKind.slack]
121
- )
122
-
123
73
  self._tsdb_connector = mlrun.model_monitoring.get_tsdb_connector(
124
74
  project=self.project, secret_provider=secret_provider
125
75
  )
@@ -250,9 +200,6 @@ class ModelMonitoringWriter(StepToDict):
250
200
 
251
201
  logger.info("Completed event DB writes")
252
202
 
253
- if kind == WriterEventKind.RESULT:
254
- _Notifier(event=event, notification_pusher=self._custom_notifier).notify()
255
-
256
203
  if (
257
204
  mlrun.mlconf.alerts.mode == mlrun.common.schemas.alert.AlertsModes.enabled
258
205
  and kind == WriterEventKind.RESULT
@@ -647,7 +647,7 @@ class _KFPRunner(_PipelineRunner):
647
647
  exc_info=err_to_str(exc),
648
648
  )
649
649
 
650
- # TODO: we should check how can we get the run uid when we don't the the context (for example on
650
+ # TODO: we should check how can we get the run uid when we don't have the context (for example on
651
651
  # mlrun.load_project() and later call directly to project.run)
652
652
  if context:
653
653
  project.notifiers.push_pipeline_start_message(
mlrun/projects/project.py CHANGED
@@ -44,6 +44,7 @@ import mlrun.common.runtimes.constants
44
44
  import mlrun.common.schemas.alert
45
45
  import mlrun.common.schemas.artifact
46
46
  import mlrun.common.schemas.model_monitoring.constants as mm_constants
47
+ import mlrun.datastore.datastore_profile
47
48
  import mlrun.db
48
49
  import mlrun.errors
49
50
  import mlrun.k8s_utils
@@ -1961,6 +1962,11 @@ class MlrunProject(ModelObj):
1961
1962
  ... )
1962
1963
 
1963
1964
  """
1965
+ if not document_loader_spec.download_object and upload:
1966
+ raise ValueError(
1967
+ "This document loader expects direct links/URLs and does not support file uploads. "
1968
+ "Either set download_object=True or set upload=False"
1969
+ )
1964
1970
  doc_artifact = DocumentArtifact(
1965
1971
  key=key,
1966
1972
  original_source=local_path or target_path,
@@ -3579,8 +3585,6 @@ class MlrunProject(ModelObj):
3579
3585
  * None - will be set from the system configuration.
3580
3586
  * v3io - for v3io endpoint store, pass `v3io` and the system will generate the
3581
3587
  exact path.
3582
- * MySQL/SQLite - for SQL endpoint store, provide the full connection string,
3583
- for example: mysql+pymysql://<username>:<password>@<host>:<port>/<db_name>
3584
3588
  :param stream_path: Path to the model monitoring stream. By default, None. Options:
3585
3589
 
3586
3590
  * None - will be set from the system configuration.
@@ -3603,12 +3607,30 @@ class MlrunProject(ModelObj):
3603
3607
  & tracked model server.
3604
3608
  """
3605
3609
  db = mlrun.db.get_run_db(secrets=self._secrets)
3610
+ if tsdb_connection == "v3io":
3611
+ tsdb_profile = mlrun.datastore.datastore_profile.DatastoreProfileV3io(
3612
+ name="mm-infra-tsdb"
3613
+ )
3614
+ self.register_datastore_profile(tsdb_profile)
3615
+ tsdb_profile_name = tsdb_profile.name
3616
+ else:
3617
+ tsdb_profile_name = None
3618
+ if stream_path == "v3io":
3619
+ stream_profile = mlrun.datastore.datastore_profile.DatastoreProfileV3io(
3620
+ name="mm-infra-stream"
3621
+ )
3622
+ self.register_datastore_profile(stream_profile)
3623
+ stream_profile_name = stream_profile.name
3624
+ else:
3625
+ stream_profile_name = None
3606
3626
  db.set_model_monitoring_credentials(
3607
3627
  project=self.name,
3608
3628
  credentials={
3609
3629
  "access_key": access_key,
3610
3630
  "stream_path": stream_path,
3611
3631
  "tsdb_connection": tsdb_connection,
3632
+ "tsdb_profile_name": tsdb_profile_name,
3633
+ "stream_profile_name": stream_profile_name,
3612
3634
  },
3613
3635
  replace_creds=replace_creds,
3614
3636
  )
mlrun/secrets.py CHANGED
@@ -151,7 +151,7 @@ def get_secret_or_env(
151
151
  secret_provider: Union[dict, SecretsStore, Callable, None] = None,
152
152
  default: Optional[str] = None,
153
153
  prefix: Optional[str] = None,
154
- ) -> str:
154
+ ) -> Optional[str]:
155
155
  """Retrieve value of a secret, either from a user-provided secret store, or from environment variables.
156
156
  The function will retrieve a secret value, attempting to find it according to the following order:
157
157
 
mlrun/serving/routers.py CHANGED
@@ -30,7 +30,6 @@ import mlrun.common.model_monitoring
30
30
  import mlrun.common.schemas.model_monitoring
31
31
  from mlrun.utils import logger, now_date
32
32
 
33
- from .server import GraphServer
34
33
  from .utils import RouterToDict, _extract_input_data, _update_result_body
35
34
  from .v2_serving import _ModelLogPusher
36
35
 
@@ -607,24 +606,24 @@ class VotingEnsemble(ParallelRun):
607
606
  self.prediction_col_name = prediction_col_name or "prediction"
608
607
  self.format_response_with_col_name_flag = format_response_with_col_name_flag
609
608
  self.model_endpoint_uid = None
609
+ self.model_endpoint = None
610
610
  self.shard_by_endpoint = shard_by_endpoint
611
611
 
612
612
  def post_init(self, mode="sync", **kwargs):
613
- server = getattr(self.context, "_server", None) or getattr(
614
- self.context, "server", None
615
- )
613
+ server: mlrun.serving.GraphServer = getattr(
614
+ self.context, "_server", None
615
+ ) or getattr(self.context, "server", None)
616
616
  if not server:
617
617
  logger.warn("GraphServer not initialized for VotingEnsemble instance")
618
618
  return
619
-
620
619
  if not self.context.is_mock or self.context.monitoring_mock:
621
- self.model_endpoint_uid = _init_endpoint_record(
622
- server,
623
- self,
624
- creation_strategy=kwargs.get("creation_strategy"),
625
- endpoint_type=kwargs.get("endpoint_type"),
620
+ self.model_endpoint = mlrun.get_run_db().get_model_endpoint(
621
+ project=server.project,
622
+ name=self.name,
623
+ function_name=server.function_name,
624
+ function_tag=server.function_tag or "latest",
626
625
  )
627
-
626
+ self.model_endpoint_uid = self.model_endpoint.metadata.uid
628
627
  self._update_weights(self.weights)
629
628
 
630
629
  def _resolve_route(self, body, urlpath):
@@ -1004,81 +1003,6 @@ class VotingEnsemble(ParallelRun):
1004
1003
  self._weights[model] = 0
1005
1004
 
1006
1005
 
1007
- def _init_endpoint_record(
1008
- graph_server: GraphServer,
1009
- voting_ensemble: VotingEnsemble,
1010
- creation_strategy: mlrun.common.schemas.ModelEndpointCreationStrategy,
1011
- endpoint_type: mlrun.common.schemas.EndpointType,
1012
- ) -> Union[str, None]:
1013
- """
1014
- Initialize model endpoint record and write it into the DB. In general, this method retrieve the unique model
1015
- endpoint ID which is generated according to the function uri and the model version. If the model endpoint is
1016
- already exist in the DB, we skip the creation process. Otherwise, it writes the new model endpoint record to the DB.
1017
-
1018
- :param graph_server: A GraphServer object which will be used for getting the function uri.
1019
- :param voting_ensemble: Voting ensemble serving class. It contains important details for the model endpoint record
1020
- such as model name, model path, model version, and the ids of the children model endpoints.
1021
- :param creation_strategy: Strategy for creating or updating the model endpoint:
1022
- * **overwrite**:
1023
- 1. If model endpoints with the same name exist, delete the `latest` one.
1024
- 2. Create a new model endpoint entry and set it as `latest`.
1025
- * **inplace** (default):
1026
- 1. If model endpoints with the same name exist, update the `latest` entry.
1027
- 2. Otherwise, create a new entry.
1028
- * **archive**:
1029
- 1. If model endpoints with the same name exist, preserve them.
1030
- 2. Create a new model endpoint with the same name and set it to `latest`.
1031
-
1032
- :param endpoint_type: model endpoint type
1033
- :return: Model endpoint unique ID.
1034
- """
1035
-
1036
- logger.info("Initializing endpoint records")
1037
- children_uids = []
1038
- children_names = []
1039
- for _, c in voting_ensemble.routes.items():
1040
- if hasattr(c, "endpoint_uid"):
1041
- children_uids.append(c.endpoint_uid)
1042
- children_names.append(c.name)
1043
- try:
1044
- logger.info(
1045
- "Creating Or Updating a new model endpoint record",
1046
- name=voting_ensemble.name,
1047
- project=graph_server.project,
1048
- function_name=graph_server.function_name,
1049
- function_tag=graph_server.function_tag or "latest",
1050
- model_class=voting_ensemble.__class__.__name__,
1051
- creation_strategy=creation_strategy,
1052
- )
1053
- model_endpoint = mlrun.common.schemas.ModelEndpoint(
1054
- metadata=mlrun.common.schemas.ModelEndpointMetadata(
1055
- project=graph_server.project,
1056
- name=voting_ensemble.name,
1057
- endpoint_type=endpoint_type,
1058
- ),
1059
- spec=mlrun.common.schemas.ModelEndpointSpec(
1060
- function_name=graph_server.function_name,
1061
- function_tag=graph_server.function_tag or "latest",
1062
- model_class=voting_ensemble.__class__.__name__,
1063
- children_uids=children_uids,
1064
- children=children_names,
1065
- ),
1066
- status=mlrun.common.schemas.ModelEndpointStatus(
1067
- monitoring_mode=mlrun.common.schemas.model_monitoring.ModelMonitoringMode.enabled
1068
- if voting_ensemble.context.server.track_models
1069
- else mlrun.common.schemas.model_monitoring.ModelMonitoringMode.disabled,
1070
- ),
1071
- )
1072
- db = mlrun.get_run_db()
1073
- db.create_model_endpoint(
1074
- model_endpoint=model_endpoint, creation_strategy=creation_strategy
1075
- )
1076
- except mlrun.errors.MLRunInvalidArgumentError as e:
1077
- logger.info("Failed to create model endpoint record", error=e)
1078
- return None
1079
- return model_endpoint.metadata.uid
1080
-
1081
-
1082
1006
  class EnrichmentModelRouter(ModelRouter):
1083
1007
  """
1084
1008
  Model router with feature enrichment and imputing
mlrun/serving/states.py CHANGED
@@ -126,6 +126,9 @@ class BaseStep(ModelObj):
126
126
  self.shape = shape
127
127
  self.on_error = None
128
128
  self._on_error_handler = None
129
+ self.model_endpoint_creation_strategy = (
130
+ schemas.ModelEndpointCreationStrategy.SKIP
131
+ )
129
132
 
130
133
  def get_shape(self):
131
134
  """graphviz shape"""
@@ -428,7 +431,7 @@ class TaskStep(BaseStep):
428
431
  result_path: Optional[str] = None,
429
432
  model_endpoint_creation_strategy: Optional[
430
433
  schemas.ModelEndpointCreationStrategy
431
- ] = schemas.ModelEndpointCreationStrategy.INPLACE,
434
+ ] = schemas.ModelEndpointCreationStrategy.SKIP,
432
435
  endpoint_type: Optional[schemas.EndpointType] = schemas.EndpointType.NODE_EP,
433
436
  ):
434
437
  super().__init__(name, after)
@@ -723,6 +726,11 @@ class RouterStep(TaskStep):
723
726
  self._routes: ObjectDict = None
724
727
  self.routes = routes
725
728
  self.endpoint_type = schemas.EndpointType.ROUTER
729
+ self.model_endpoint_creation_strategy = (
730
+ schemas.ModelEndpointCreationStrategy.INPLACE
731
+ if class_name and "serving.VotingEnsemble" in class_name
732
+ else schemas.ModelEndpointCreationStrategy.SKIP
733
+ )
726
734
 
727
735
  def get_children(self):
728
736
  """get child steps (routes)"""
@@ -15,7 +15,7 @@
15
15
  import threading
16
16
  import time
17
17
  import traceback
18
- from typing import Optional, Union
18
+ from typing import Optional
19
19
 
20
20
  import mlrun.artifacts
21
21
  import mlrun.common.model_monitoring.helpers
@@ -23,7 +23,6 @@ import mlrun.common.schemas.model_monitoring
23
23
  import mlrun.model_monitoring
24
24
  from mlrun.utils import logger, now_date
25
25
 
26
- from .server import GraphServer
27
26
  from .utils import StepToDict, _extract_input_data, _update_result_body
28
27
 
29
28
 
@@ -114,8 +113,8 @@ class V2ModelServer(StepToDict):
114
113
  if model:
115
114
  self.model = model
116
115
  self.ready = True
117
- self._versioned_model_name = None
118
116
  self.model_endpoint_uid = None
117
+ self.model_endpoint = None
119
118
  self.shard_by_endpoint = shard_by_endpoint
120
119
  self._model_logger = None
121
120
 
@@ -139,20 +138,23 @@ class V2ModelServer(StepToDict):
139
138
  else:
140
139
  self._load_and_update_state()
141
140
 
142
- server = getattr(self.context, "_server", None) or getattr(
143
- self.context, "server", None
144
- )
141
+ server: mlrun.serving.GraphServer = getattr(
142
+ self.context, "_server", None
143
+ ) or getattr(self.context, "server", None)
145
144
  if not server:
146
145
  logger.warn("GraphServer not initialized for VotingEnsemble instance")
147
146
  return
148
147
 
148
+ if not self.context.is_mock and not self.model_spec:
149
+ self.get_model()
149
150
  if not self.context.is_mock or self.context.monitoring_mock:
150
- self.model_endpoint_uid = _init_endpoint_record(
151
- graph_server=server,
152
- model=self,
153
- creation_strategy=kwargs.get("creation_strategy"),
154
- endpoint_type=kwargs.get("endpoint_type"),
151
+ self.model_endpoint = mlrun.get_run_db().get_model_endpoint(
152
+ project=server.project,
153
+ name=self.name,
154
+ function_name=server.function_name,
155
+ function_tag=server.function_tag or "latest",
155
156
  )
157
+ self.model_endpoint_uid = self.model_endpoint.metadata.uid
156
158
  self._model_logger = (
157
159
  _ModelLogPusher(self, self.context)
158
160
  if self.context and self.context.stream.enabled
@@ -233,23 +235,6 @@ class V2ModelServer(StepToDict):
233
235
  request = self.preprocess(event_body, op)
234
236
  return self.validate(request, op)
235
237
 
236
- @property
237
- def versioned_model_name(self):
238
- if self._versioned_model_name:
239
- return self._versioned_model_name
240
-
241
- # Generating version model value based on the model name and model version
242
- if self.model_path and self.model_path.startswith("store://"):
243
- # Enrich the model server with the model artifact metadata
244
- self.get_model()
245
- if not self.version:
246
- # Enrich the model version with the model artifact tag
247
- self.version = self.model_spec.tag
248
- self.labels = self.model_spec.labels
249
- version = self.version or "latest"
250
- self._versioned_model_name = f"{self.name}:{version}"
251
- return self._versioned_model_name
252
-
253
238
  def do_event(self, event, *args, **kwargs):
254
239
  """main model event handler method"""
255
240
  start = now_date()
@@ -553,95 +538,3 @@ class _ModelLogPusher:
553
538
  if getattr(self.model, "metrics", None):
554
539
  data["metrics"] = self.model.metrics
555
540
  self.output_stream.push([data], partition_key=partition_key)
556
-
557
-
558
- def _init_endpoint_record(
559
- graph_server: GraphServer,
560
- model: V2ModelServer,
561
- creation_strategy: mlrun.common.schemas.ModelEndpointCreationStrategy,
562
- endpoint_type: mlrun.common.schemas.EndpointType,
563
- ) -> Union[str, None]:
564
- """
565
- Initialize model endpoint record and write it into the DB. In general, this method retrieve the unique model
566
- endpoint ID which is generated according to the function uri and the model version. If the model endpoint is
567
- already exist in the DB, we skip the creation process. Otherwise, it writes the new model endpoint record to the DB.
568
-
569
- :param graph_server: A GraphServer object which will be used for getting the function uri.
570
- :param model: Base model serving class (v2). It contains important details for the model endpoint record
571
- such as model name, model path, and model version.
572
- :param creation_strategy: Strategy for creating or updating the model endpoint:
573
- * **overwrite**:
574
- 1. If model endpoints with the same name exist, delete the `latest` one.
575
- 2. Create a new model endpoint entry and set it as `latest`.
576
- * **inplace** (default):
577
- 1. If model endpoints with the same name exist, update the `latest` entry.
578
- 2. Otherwise, create a new entry.
579
- * **archive**:
580
- 1. If model endpoints with the same name exist, preserve them.
581
- 2. Create a new model endpoint with the same name and set it to `latest`.
582
- :param endpoint_type model endpoint type
583
-
584
- :return: Model endpoint unique ID.
585
- """
586
-
587
- logger.info("Initializing endpoint records")
588
- if not model.model_spec:
589
- model.get_model()
590
- if model.model_spec:
591
- model_name = model.model_spec.metadata.key
592
- model_db_key = model.model_spec.spec.db_key
593
- model_uid = model.model_spec.metadata.uid
594
- model_tag = model.model_spec.tag
595
- model_labels = model.model_spec.labels # todo : check if we still need this
596
- else:
597
- model_name = None
598
- model_db_key = None
599
- model_uid = None
600
- model_tag = None
601
- model_labels = {}
602
- logger.info(
603
- "Creating Or Updating a new model endpoint record",
604
- name=model.name,
605
- project=graph_server.project,
606
- function_name=graph_server.function_name,
607
- function_tag=graph_server.function_tag or "latest",
608
- model_name=model_name,
609
- model_tag=model_tag,
610
- model_db_key=model_db_key,
611
- model_uid=model_uid,
612
- model_class=model.__class__.__name__,
613
- creation_strategy=creation_strategy,
614
- endpoint_type=endpoint_type,
615
- )
616
- try:
617
- model_ep = mlrun.common.schemas.ModelEndpoint(
618
- metadata=mlrun.common.schemas.ModelEndpointMetadata(
619
- project=graph_server.project,
620
- labels=model_labels,
621
- name=model.name,
622
- endpoint_type=endpoint_type,
623
- ),
624
- spec=mlrun.common.schemas.ModelEndpointSpec(
625
- function_name=graph_server.function_name,
626
- function_tag=graph_server.function_tag or "latest",
627
- model_name=model_name,
628
- model_db_key=model_db_key,
629
- model_uid=model_uid,
630
- model_class=model.__class__.__name__,
631
- model_tag=model_tag,
632
- ),
633
- status=mlrun.common.schemas.ModelEndpointStatus(
634
- monitoring_mode=mlrun.common.schemas.model_monitoring.ModelMonitoringMode.enabled
635
- if model.context.server.track_models
636
- else mlrun.common.schemas.model_monitoring.ModelMonitoringMode.disabled,
637
- ),
638
- )
639
- db = mlrun.get_run_db()
640
- model_ep = db.create_model_endpoint(
641
- model_endpoint=model_ep, creation_strategy=creation_strategy
642
- )
643
- except mlrun.errors.MLRunBadRequestError as e:
644
- logger.info("Failed to create model endpoint record", error=e)
645
- return None
646
-
647
- return model_ep.metadata.uid
@@ -1,4 +1,4 @@
1
1
  {
2
- "git_commit": "6aa0bfdf77d03774890da6714fc3aca778bb9f26",
3
- "version": "1.8.0-rc15"
2
+ "git_commit": "eb1fcda410abefdd86a344d75af69fe49dc5eb07",
3
+ "version": "1.8.0-rc17"
4
4
  }
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: mlrun
3
- Version: 1.8.0rc15
3
+ Version: 1.8.0rc17
4
4
  Summary: Tracking and config of machine learning runs
5
5
  Home-page: https://github.com/mlrun/mlrun
6
6
  Author: Yaron Haviv
@@ -31,7 +31,7 @@ Requires-Dist: ipython~=8.10
31
31
  Requires-Dist: nuclio-jupyter~=0.11.1
32
32
  Requires-Dist: numpy<1.27.0,>=1.26.4
33
33
  Requires-Dist: pandas<2.2,>=1.2
34
- Requires-Dist: pyarrow<18,>=10.0
34
+ Requires-Dist: pyarrow<17,>=10.0
35
35
  Requires-Dist: pyyaml<7,>=6.0.2
36
36
  Requires-Dist: requests~=2.32
37
37
  Requires-Dist: tabulate~=0.8.6
@@ -88,9 +88,9 @@ Requires-Dist: avro~=1.11; extra == "kafka"
88
88
  Provides-Extra: redis
89
89
  Requires-Dist: redis~=4.3; extra == "redis"
90
90
  Provides-Extra: mlflow
91
- Requires-Dist: mlflow~=2.8; extra == "mlflow"
91
+ Requires-Dist: mlflow~=2.16; extra == "mlflow"
92
92
  Provides-Extra: databricks-sdk
93
- Requires-Dist: databricks-sdk~=0.13.0; extra == "databricks-sdk"
93
+ Requires-Dist: databricks-sdk~=0.20.0; extra == "databricks-sdk"
94
94
  Provides-Extra: sqlalchemy
95
95
  Requires-Dist: sqlalchemy~=1.4; extra == "sqlalchemy"
96
96
  Provides-Extra: dask
@@ -121,6 +121,7 @@ Requires-Dist: timelength~=1.1; extra == "api"
121
121
  Requires-Dist: memray~=1.12; sys_platform != "win32" and extra == "api"
122
122
  Requires-Dist: aiosmtplib~=3.0; extra == "api"
123
123
  Requires-Dist: pydantic<2,>=1; extra == "api"
124
+ Requires-Dist: mlrun-pipelines-kfp-v1-8[kfp]~=0.3.3; python_version < "3.11" and extra == "api"
124
125
  Provides-Extra: all
125
126
  Requires-Dist: adlfs==2023.9.0; extra == "all"
126
127
  Requires-Dist: aiobotocore<2.16,>=2.5.0; extra == "all"
@@ -131,7 +132,7 @@ Requires-Dist: azure-keyvault-secrets~=4.2; extra == "all"
131
132
  Requires-Dist: bokeh>=2.4.2,~=2.4; extra == "all"
132
133
  Requires-Dist: boto3<1.36,>=1.28.0; extra == "all"
133
134
  Requires-Dist: dask~=2023.12.1; extra == "all"
134
- Requires-Dist: databricks-sdk~=0.13.0; extra == "all"
135
+ Requires-Dist: databricks-sdk~=0.20.0; extra == "all"
135
136
  Requires-Dist: distributed~=2023.12.1; extra == "all"
136
137
  Requires-Dist: gcsfs<2024.7,>=2023.9.2; extra == "all"
137
138
  Requires-Dist: google-cloud-bigquery-storage~=2.17; extra == "all"
@@ -140,7 +141,7 @@ Requires-Dist: google-cloud-storage==2.14.0; extra == "all"
140
141
  Requires-Dist: google-cloud==0.34; extra == "all"
141
142
  Requires-Dist: graphviz~=0.20.0; extra == "all"
142
143
  Requires-Dist: kafka-python~=2.0; extra == "all"
143
- Requires-Dist: mlflow~=2.8; extra == "all"
144
+ Requires-Dist: mlflow~=2.16; extra == "all"
144
145
  Requires-Dist: msrest~=0.6.21; extra == "all"
145
146
  Requires-Dist: oss2==2.18.1; extra == "all"
146
147
  Requires-Dist: ossfs==2023.12.0; extra == "all"
@@ -161,7 +162,7 @@ Requires-Dist: azure-identity~=1.5; extra == "complete"
161
162
  Requires-Dist: azure-keyvault-secrets~=4.2; extra == "complete"
162
163
  Requires-Dist: boto3<1.36,>=1.28.0; extra == "complete"
163
164
  Requires-Dist: dask~=2023.12.1; extra == "complete"
164
- Requires-Dist: databricks-sdk~=0.13.0; extra == "complete"
165
+ Requires-Dist: databricks-sdk~=0.20.0; extra == "complete"
165
166
  Requires-Dist: distributed~=2023.12.1; extra == "complete"
166
167
  Requires-Dist: gcsfs<2024.7,>=2023.9.2; extra == "complete"
167
168
  Requires-Dist: google-cloud-bigquery-storage~=2.17; extra == "complete"
@@ -170,7 +171,7 @@ Requires-Dist: google-cloud-storage==2.14.0; extra == "complete"
170
171
  Requires-Dist: google-cloud==0.34; extra == "complete"
171
172
  Requires-Dist: graphviz~=0.20.0; extra == "complete"
172
173
  Requires-Dist: kafka-python~=2.0; extra == "complete"
173
- Requires-Dist: mlflow~=2.8; extra == "complete"
174
+ Requires-Dist: mlflow~=2.16; extra == "complete"
174
175
  Requires-Dist: msrest~=0.6.21; extra == "complete"
175
176
  Requires-Dist: oss2==2.18.1; extra == "complete"
176
177
  Requires-Dist: ossfs==2023.12.0; extra == "complete"
@@ -195,7 +196,7 @@ Requires-Dist: azure-keyvault-secrets~=4.2; extra == "complete-api"
195
196
  Requires-Dist: boto3<1.36,>=1.28.0; extra == "complete-api"
196
197
  Requires-Dist: dask-kubernetes~=0.11.0; extra == "complete-api"
197
198
  Requires-Dist: dask~=2023.12.1; extra == "complete-api"
198
- Requires-Dist: databricks-sdk~=0.13.0; extra == "complete-api"
199
+ Requires-Dist: databricks-sdk~=0.20.0; extra == "complete-api"
199
200
  Requires-Dist: distributed~=2023.12.1; extra == "complete-api"
200
201
  Requires-Dist: fastapi~=0.115.6; extra == "complete-api"
201
202
  Requires-Dist: gcsfs<2024.7,>=2023.9.2; extra == "complete-api"
@@ -208,7 +209,8 @@ Requires-Dist: humanfriendly~=10.0; extra == "complete-api"
208
209
  Requires-Dist: igz-mgmt~=0.4.1; extra == "complete-api"
209
210
  Requires-Dist: kafka-python~=2.0; extra == "complete-api"
210
211
  Requires-Dist: memray~=1.12; sys_platform != "win32" and extra == "complete-api"
211
- Requires-Dist: mlflow~=2.8; extra == "complete-api"
212
+ Requires-Dist: mlflow~=2.16; extra == "complete-api"
213
+ Requires-Dist: mlrun-pipelines-kfp-v1-8[kfp]~=0.3.3; python_version < "3.11" and extra == "complete-api"
212
214
  Requires-Dist: msrest~=0.6.21; extra == "complete-api"
213
215
  Requires-Dist: objgraph~=3.6; extra == "complete-api"
214
216
  Requires-Dist: oss2==2.18.1; extra == "complete-api"