mlrun 1.10.0rc8__py3-none-any.whl → 1.10.0rc9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mlrun might be problematic. Click here for more details.

Files changed (35) hide show
  1. mlrun/common/db/dialects.py +25 -0
  2. mlrun/common/schemas/function.py +1 -0
  3. mlrun/common/schemas/model_monitoring/model_endpoints.py +8 -0
  4. mlrun/common/schemas/partition.py +13 -3
  5. mlrun/datastore/utils.py +0 -1
  6. mlrun/db/__init__.py +1 -0
  7. mlrun/db/sql_types.py +160 -0
  8. mlrun/frameworks/tf_keras/mlrun_interface.py +4 -1
  9. mlrun/frameworks/tf_keras/model_handler.py +11 -0
  10. mlrun/launcher/base.py +0 -1
  11. mlrun/launcher/client.py +0 -1
  12. mlrun/launcher/local.py +0 -4
  13. mlrun/model_monitoring/applications/base.py +21 -1
  14. mlrun/model_monitoring/applications/context.py +2 -1
  15. mlrun/projects/pipelines.py +2 -0
  16. mlrun/projects/project.py +0 -13
  17. mlrun/runtimes/daskjob.py +0 -2
  18. mlrun/runtimes/kubejob.py +0 -4
  19. mlrun/runtimes/mpijob/abstract.py +0 -2
  20. mlrun/runtimes/mpijob/v1.py +0 -2
  21. mlrun/runtimes/nuclio/function.py +0 -2
  22. mlrun/runtimes/nuclio/serving.py +0 -46
  23. mlrun/runtimes/pod.py +0 -3
  24. mlrun/runtimes/remotesparkjob.py +0 -2
  25. mlrun/runtimes/sparkjob/spark3job.py +0 -2
  26. mlrun/serving/server.py +3 -97
  27. mlrun/utils/helpers.py +1 -1
  28. mlrun/utils/version/version.json +2 -2
  29. {mlrun-1.10.0rc8.dist-info → mlrun-1.10.0rc9.dist-info}/METADATA +15 -11
  30. {mlrun-1.10.0rc8.dist-info → mlrun-1.10.0rc9.dist-info}/RECORD +34 -33
  31. mlrun/common/db/sql_session.py +0 -79
  32. {mlrun-1.10.0rc8.dist-info → mlrun-1.10.0rc9.dist-info}/WHEEL +0 -0
  33. {mlrun-1.10.0rc8.dist-info → mlrun-1.10.0rc9.dist-info}/entry_points.txt +0 -0
  34. {mlrun-1.10.0rc8.dist-info → mlrun-1.10.0rc9.dist-info}/licenses/LICENSE +0 -0
  35. {mlrun-1.10.0rc8.dist-info → mlrun-1.10.0rc9.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,25 @@
1
+ # Copyright 2025 Iguazio
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ import mlrun.common.types
15
+
16
+
17
+ class Dialects(mlrun.common.types.StrEnum):
18
+ MYSQL = "mysql"
19
+ POSTGRESQL = "postgresql"
20
+ SQLITE = "sqlite"
21
+
22
+ @classmethod
23
+ def all(cls) -> list[str]:
24
+ """Return all dialects as a list of strings."""
25
+ return [dialect.value for dialect in cls]
@@ -47,6 +47,7 @@ class FunctionState:
47
47
 
48
48
  # for pipeline steps
49
49
  skipped = "skipped"
50
+ initialized = "initialized"
50
51
 
51
52
  @classmethod
52
53
  def get_function_state_from_pod_state(cls, pod_state: str):
@@ -15,7 +15,9 @@ import abc
15
15
  import json
16
16
  from datetime import datetime
17
17
  from typing import Any, NamedTuple, Optional, TypeVar
18
+ from uuid import UUID
18
19
 
20
+ from pydantic import validator # use `validator` if you’re still on Pydantic v1
19
21
  from pydantic.v1 import BaseModel, Field, constr
20
22
 
21
23
  # TODO: remove the unused import below after `mlrun.datastore` and `mlrun.utils` usage is removed.
@@ -121,6 +123,12 @@ class ModelEndpointMetadata(ObjectMetadata, ModelEndpointParser):
121
123
  def mutable_fields(cls):
122
124
  return ["labels"]
123
125
 
126
+ @validator("uid", pre=True)
127
+ def _uid_to_str(cls, v): # noqa: N805
128
+ if isinstance(v, UUID):
129
+ return str(v)
130
+ return v
131
+
124
132
 
125
133
  class ModelEndpointSpec(ObjectSpec, ModelEndpointParser):
126
134
  model_class: Optional[str] = ""
@@ -14,10 +14,10 @@
14
14
 
15
15
  from datetime import datetime, timedelta
16
16
 
17
- from mlrun.common.types import StrEnum
17
+ import mlrun.common.types
18
18
 
19
19
 
20
- class PartitionInterval(StrEnum):
20
+ class PartitionInterval(mlrun.common.types.StrEnum):
21
21
  DAY = "DAY"
22
22
  MONTH = "MONTH"
23
23
  YEARWEEK = "YEARWEEK"
@@ -44,6 +44,8 @@ class PartitionInterval(StrEnum):
44
44
  return timedelta(days=30)
45
45
  elif self == PartitionInterval.YEARWEEK:
46
46
  return timedelta(weeks=1)
47
+ else:
48
+ raise ValueError(f"Unsupported PartitionInterval: {self}")
47
49
 
48
50
  @classmethod
49
51
  def from_expression(cls, partition_expression: str):
@@ -83,7 +85,7 @@ class PartitionInterval(StrEnum):
83
85
  current_datetime = start_datetime
84
86
 
85
87
  for _ in range(partition_number):
86
- partition_name = self.get_partition_name(current_datetime)
88
+ partition_name = f"p{self.get_partition_name(current_datetime)}"
87
89
  partition_boundary_date = self.get_next_partition_time(current_datetime)
88
90
  partition_value = self.get_partition_name(partition_boundary_date)
89
91
  partitioning_information_list.append((partition_name, partition_value))
@@ -109,6 +111,8 @@ class PartitionInterval(StrEnum):
109
111
  return (current_datetime.replace(day=1) + timedelta(days=32)).replace(day=1)
110
112
  elif self == PartitionInterval.YEARWEEK:
111
113
  return current_datetime + timedelta(weeks=1)
114
+ else:
115
+ raise ValueError(f"Unsupported PartitionInterval: {self}")
112
116
 
113
117
  def get_partition_name(self, current_datetime: datetime) -> str:
114
118
  if self == PartitionInterval.DAY:
@@ -118,6 +122,8 @@ class PartitionInterval(StrEnum):
118
122
  elif self == PartitionInterval.YEARWEEK:
119
123
  year, week, _ = current_datetime.isocalendar()
120
124
  return f"{year}{week:02d}"
125
+ else:
126
+ raise ValueError(f"Unsupported PartitionInterval: {self}")
121
127
 
122
128
  def get_partition_expression(self, column_name: str):
123
129
  if self == PartitionInterval.YEARWEEK:
@@ -130,6 +136,8 @@ class PartitionInterval(StrEnum):
130
136
  # generates value in format %Y%m in mysql
131
137
  # mysql query example: `select YEAR(NOW())*100 + MONTH(NOW());`
132
138
  return f"YEAR({column_name}) * 100 + MONTH({column_name})"
139
+ else:
140
+ raise ValueError(f"Unsupported PartitionInterval: {self}")
133
141
 
134
142
  def get_number_of_partitions(self, days: int) -> int:
135
143
  # Calculate the number partitions based on given number of days
@@ -140,3 +148,5 @@ class PartitionInterval(StrEnum):
140
148
  return int(days / 30.44)
141
149
  elif self == PartitionInterval.YEARWEEK:
142
150
  return int(days / 7)
151
+ else:
152
+ raise ValueError(f"Unsupported PartitionInterval: {self}")
mlrun/datastore/utils.py CHANGED
@@ -150,7 +150,6 @@ def _generate_sql_query_with_time_filter(
150
150
  table = sqlalchemy.Table(
151
151
  table_name,
152
152
  sqlalchemy.MetaData(),
153
- autoload=True,
154
153
  autoload_with=engine,
155
154
  )
156
155
  query = sqlalchemy.select(table)
mlrun/db/__init__.py CHANGED
@@ -14,6 +14,7 @@
14
14
  from os import environ
15
15
 
16
16
  from ..config import config
17
+ from . import sql_types
17
18
  from .base import RunDBError, RunDBInterface # noqa
18
19
 
19
20
 
mlrun/db/sql_types.py ADDED
@@ -0,0 +1,160 @@
1
+ # Copyright 2025 Iguazio
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ """
15
+ This module provides SQLAlchemy TypeDecorator subclasses that are aware of
16
+ database dialects (MySQL, PostgreSQL, SQLite) and automatically select
17
+ appropriate native types (e.g., UUID, BLOB, TIMESTAMP with precision) or
18
+ fallbacks (e.g., hex-string storage) to ensure consistent behavior across
19
+ different database backends.
20
+ """
21
+
22
+ import uuid
23
+ from typing import Any, Optional, Union
24
+
25
+ import sqlalchemy.types
26
+ from sqlalchemy import CHAR, Text
27
+ from sqlalchemy.dialects.mysql import DATETIME as MYSQL_DATETIME
28
+ from sqlalchemy.dialects.mysql import MEDIUMBLOB
29
+ from sqlalchemy.dialects.postgresql import BYTEA
30
+ from sqlalchemy.dialects.postgresql import TIMESTAMP as PG_TIMESTAMP
31
+ from sqlalchemy.dialects.postgresql import UUID as PG_UUID
32
+ from sqlalchemy.engine.interfaces import Dialect
33
+ from sqlalchemy.types import TypeDecorator
34
+
35
+ import mlrun.common.db.dialects
36
+
37
+
38
+ class DateTime(TypeDecorator):
39
+ impl = sqlalchemy.types.DateTime
40
+ cache_ok = True
41
+ precision: int = 3
42
+
43
+ def load_dialect_impl(
44
+ self,
45
+ dialect: Dialect,
46
+ ) -> sqlalchemy.types.TypeEngine:
47
+ if dialect.name == mlrun.common.db.dialects.Dialects.MYSQL:
48
+ return dialect.type_descriptor(
49
+ MYSQL_DATETIME(
50
+ fsp=self.precision,
51
+ timezone=True,
52
+ )
53
+ )
54
+ if dialect.name == mlrun.common.db.dialects.Dialects.POSTGRESQL:
55
+ return dialect.type_descriptor(
56
+ PG_TIMESTAMP(
57
+ precision=self.precision,
58
+ timezone=True,
59
+ )
60
+ )
61
+ return dialect.type_descriptor(sqlalchemy.types.DateTime)
62
+
63
+
64
+ class MicroSecondDateTime(DateTime):
65
+ cache_ok = True
66
+ precision: int = 6
67
+
68
+
69
+ class Blob(TypeDecorator):
70
+ impl = sqlalchemy.types.LargeBinary
71
+ cache_ok = True
72
+
73
+ def load_dialect_impl(
74
+ self,
75
+ dialect: Dialect,
76
+ ) -> sqlalchemy.types.TypeEngine:
77
+ if dialect.name == mlrun.common.db.dialects.Dialects.MYSQL:
78
+ return dialect.type_descriptor(MEDIUMBLOB)
79
+ if dialect.name == mlrun.common.db.dialects.Dialects.POSTGRESQL:
80
+ return dialect.type_descriptor(BYTEA)
81
+ return dialect.type_descriptor(self.impl)
82
+
83
+
84
+ class Utf8BinText(TypeDecorator):
85
+ impl = Text
86
+ cache_ok = True
87
+
88
+ def load_dialect_impl(
89
+ self,
90
+ dialect: Dialect,
91
+ ) -> sqlalchemy.types.TypeEngine:
92
+ if dialect.name == mlrun.common.db.dialects.Dialects.MYSQL:
93
+ return dialect.type_descriptor(
94
+ sqlalchemy.dialects.mysql.VARCHAR(
95
+ collation="utf8_bin",
96
+ length=255,
97
+ )
98
+ )
99
+ if dialect.name == mlrun.common.db.dialects.Dialects.POSTGRESQL:
100
+ # This collation is created as part of the database creation
101
+ return dialect.type_descriptor(
102
+ Text(
103
+ collation="utf8_bin",
104
+ )
105
+ )
106
+ if dialect.name == mlrun.common.db.dialects.Dialects.SQLITE:
107
+ return dialect.type_descriptor(
108
+ Text(
109
+ collation="BINARY",
110
+ )
111
+ )
112
+ return dialect.type_descriptor(self.impl)
113
+
114
+
115
+ class UuidType(TypeDecorator):
116
+ """
117
+ A UUID type which stores as native UUID on Postgres (as_uuid=True)
118
+ and as 32-char hex strings on other dialects.
119
+ """
120
+
121
+ impl = CHAR(32)
122
+ cache_ok = True
123
+
124
+ def load_dialect_impl(self, dialect: Dialect) -> sqlalchemy.types.TypeEngine:
125
+ if dialect.name == mlrun.common.db.dialects.Dialects.POSTGRESQL:
126
+ return dialect.type_descriptor(PG_UUID(as_uuid=True))
127
+ return dialect.type_descriptor(CHAR(32))
128
+
129
+ def process_bind_param(
130
+ self,
131
+ value: Optional[Union[uuid.UUID, str]],
132
+ dialect: Dialect,
133
+ ) -> Optional[Union[uuid.UUID, str]]:
134
+ if value is None:
135
+ return None
136
+ if isinstance(value, uuid.UUID):
137
+ return (
138
+ value
139
+ if dialect.name == mlrun.common.db.dialects.Dialects.POSTGRESQL
140
+ else value.hex
141
+ )
142
+ if isinstance(value, str):
143
+ u = uuid.UUID(value)
144
+ return (
145
+ u
146
+ if dialect.name == mlrun.common.db.dialects.Dialects.POSTGRESQL
147
+ else u.hex
148
+ )
149
+ raise ValueError(f"Cannot bind UUID value {value!r}")
150
+
151
+ def process_result_value(
152
+ self, value: Optional[Union[uuid.UUID, bytes, str]], dialect: Dialect
153
+ ) -> Optional[uuid.UUID]:
154
+ if value is None:
155
+ return None
156
+ return value if isinstance(value, uuid.UUID) else uuid.UUID(value)
157
+
158
+ def coerce_compared_value(self, op: Any, value: Any) -> TypeDecorator:
159
+ # ensure STR comparisons are coerced through this type
160
+ return self
@@ -280,7 +280,10 @@ class TFKerasMLRunInterface(MLRunInterface, ABC):
280
280
  print(f"Horovod worker #{self._hvd.rank()} is using CPU")
281
281
 
282
282
  # Adjust learning rate based on the number of GPUs:
283
- optimizer.lr = optimizer.lr * self._hvd.size()
283
+ if hasattr(self.optimizer, "lr"):
284
+ optimizer.lr *= self._hvd.size()
285
+ else:
286
+ optimizer.learning_rate *= self._hvd.size()
284
287
 
285
288
  # Wrap the optimizer in horovod's distributed optimizer: 'hvd.DistributedOptimizer'.
286
289
  optimizer = self._hvd.DistributedOptimizer(optimizer)
@@ -527,11 +527,22 @@ class TFKerasModelHandler(DLModelHandler):
527
527
  self._model_file = os.path.join(
528
528
  os.path.dirname(self._model_file), self._model_name
529
529
  )
530
+ elif self._model_format == TFKerasModelHandler.ModelFormats.KERAS:
531
+ # When keras tried to load it, it validates the suffix. The `artifacts.model.get_model` function is
532
+ # downloading the keras file to a temp file with a `pkl` suffix, so it needs to be replaced:
533
+ self._model_file = self._model_file.rsplit(".pkl", 1)[0] + ".keras"
534
+ elif self._model_format == TFKerasModelHandler.ModelFormats.H5:
535
+ # When keras tried to load it, it validates the suffix. The `artifacts.model.get_model` function is
536
+ # downloading the keras file to a temp file with a `pkl` suffix, so it needs to be replaced:
537
+ self._model_file = self._model_file.rsplit(".pkl", 1)[0] + ".h5"
530
538
  # # ModelFormats.JSON_ARCHITECTURE_H5_WEIGHTS - Get the weights file:
531
539
  elif (
532
540
  self._model_format
533
541
  == TFKerasModelHandler.ModelFormats.JSON_ARCHITECTURE_H5_WEIGHTS
534
542
  ):
543
+ # When keras tried to load it, it validates the suffix. The `artifacts.model.get_model` function is
544
+ # downloading the keras file to a temp file with a `pkl` suffix, so it needs to be replaced:
545
+ self._model_file = self._model_file.rsplit(".pkl", 1)[0] + ".json"
535
546
  # Get the weights file:
536
547
  self._weights_file = self._extra_data[
537
548
  self._get_weights_file_artifact_name()
mlrun/launcher/base.py CHANGED
@@ -82,7 +82,6 @@ class BaseLauncher(abc.ABC):
82
82
  runtime: "mlrun.runtimes.base.BaseRuntime",
83
83
  project_name: Optional[str] = "",
84
84
  full: bool = True,
85
- client_version: str = "",
86
85
  ):
87
86
  pass
88
87
 
mlrun/launcher/client.py CHANGED
@@ -36,7 +36,6 @@ class ClientBaseLauncher(launcher.BaseLauncher, abc.ABC):
36
36
  runtime: "mlrun.runtimes.base.BaseRuntime",
37
37
  project_name: Optional[str] = "",
38
38
  full: bool = True,
39
- client_version: str = "",
40
39
  ):
41
40
  runtime.try_auto_mount_based_on_config()
42
41
  runtime._fill_credentials()
mlrun/launcher/local.py CHANGED
@@ -13,7 +13,6 @@
13
13
  # limitations under the License.
14
14
  import os
15
15
  import pathlib
16
- from os import environ
17
16
  from typing import Callable, Optional, Union
18
17
 
19
18
  import mlrun.common.constants as mlrun_constants
@@ -252,9 +251,6 @@ class ClientLocalLauncher(launcher.ClientBaseLauncher):
252
251
  # copy the code/base-spec to the local function (for the UI and code logging)
253
252
  fn.spec.description = runtime.spec.description
254
253
  fn.spec.build = runtime.spec.build
255
- serving_spec = getattr(runtime.spec, "serving_spec", None)
256
- if serving_spec:
257
- environ["SERVING_SPEC_ENV"] = serving_spec
258
254
 
259
255
  run.spec.handler = handler
260
256
  run.spec.reset_on_run = reset_on_run
@@ -166,13 +166,29 @@ class ModelMonitoringApplicationBase(MonitoringApplicationToDict, ABC):
166
166
  return result
167
167
 
168
168
  @staticmethod
169
+ def _check_writer_is_up(project: "mlrun.MlrunProject") -> None:
170
+ try:
171
+ project.get_function(
172
+ mm_constants.MonitoringFunctionNames.WRITER, ignore_cache=True
173
+ )
174
+ except mlrun.errors.MLRunNotFoundError:
175
+ raise mlrun.errors.MLRunValueError(
176
+ "Writing outputs to the databases is blocked as the model monitoring infrastructure is disabled.\n"
177
+ "To unblock, enable model monitoring with `project.enable_model_monitoring()`."
178
+ )
179
+
180
+ @classmethod
169
181
  @contextmanager
170
182
  def _push_to_writer(
183
+ cls,
171
184
  *,
172
185
  write_output: bool,
173
186
  stream_profile: Optional[ds_profile.DatastoreProfile],
187
+ project: "mlrun.MlrunProject",
174
188
  ) -> Iterator[dict[str, list[tuple]]]:
175
189
  endpoints_output: dict[str, list[tuple]] = defaultdict(list)
190
+ if write_output:
191
+ cls._check_writer_is_up(project)
176
192
  try:
177
193
  yield endpoints_output
178
194
  finally:
@@ -220,6 +236,9 @@ class ModelMonitoringApplicationBase(MonitoringApplicationToDict, ABC):
220
236
  for an MLRun job.
221
237
  This method should not be called directly.
222
238
  """
239
+ project = context.get_project_object()
240
+ if not project:
241
+ raise mlrun.errors.MLRunValueError("Could not load project from context")
223
242
 
224
243
  if write_output and (
225
244
  not endpoints or sample_data is not None or reference_data is not None
@@ -236,7 +255,7 @@ class ModelMonitoringApplicationBase(MonitoringApplicationToDict, ABC):
236
255
  )
237
256
 
238
257
  with self._push_to_writer(
239
- write_output=write_output, stream_profile=stream_profile
258
+ write_output=write_output, stream_profile=stream_profile, project=project
240
259
  ) as endpoints_output:
241
260
 
242
261
  def call_do_tracking(event: Optional[dict] = None):
@@ -249,6 +268,7 @@ class ModelMonitoringApplicationBase(MonitoringApplicationToDict, ABC):
249
268
  event=event,
250
269
  application_name=self.__class__.__name__,
251
270
  context=context,
271
+ project=project,
252
272
  sample_df=sample_data,
253
273
  feature_stats=feature_stats,
254
274
  )
@@ -137,13 +137,14 @@ class MonitoringApplicationContext:
137
137
  cls,
138
138
  context: "mlrun.MLClientCtx",
139
139
  *,
140
+ project: Optional["mlrun.MlrunProject"] = None,
140
141
  application_name: str,
141
142
  event: dict[str, Any],
142
143
  model_endpoint_dict: Optional[dict[str, ModelEndpoint]] = None,
143
144
  sample_df: Optional[pd.DataFrame] = None,
144
145
  feature_stats: Optional[FeatureStats] = None,
145
146
  ) -> "MonitoringApplicationContext":
146
- project = context.get_project_object()
147
+ project = project or context.get_project_object()
147
148
  if not project:
148
149
  raise mlrun.errors.MLRunValueError("Could not load project from context")
149
150
  logger = context.logger
@@ -1153,6 +1153,7 @@ def load_and_run_workflow(
1153
1153
  project = mlrun.get_or_create_project(
1154
1154
  context=project_context or f"./{project_name}",
1155
1155
  name=project_name,
1156
+ allow_cross_project=True,
1156
1157
  )
1157
1158
 
1158
1159
  # extract "start" notification if exists
@@ -1245,6 +1246,7 @@ def pull_remote_project_files(
1245
1246
  subpath=subpath,
1246
1247
  clone=clone,
1247
1248
  save=False,
1249
+ allow_cross_project=True,
1248
1250
  )
1249
1251
  except Exception as error:
1250
1252
  notify_scheduled_workflow_failure(
mlrun/projects/project.py CHANGED
@@ -2961,19 +2961,6 @@ class MlrunProject(ModelObj):
2961
2961
  mlrun.db.get_run_db().delete_function(name=name, project=self.metadata.name)
2962
2962
  self.spec.remove_function(name)
2963
2963
 
2964
- def remove_model_monitoring_function(self, name: Union[str, list[str]]):
2965
- """delete the specified model-monitoring-app function/s
2966
-
2967
- :param name: name of the model-monitoring-function/s (under the project)
2968
- """
2969
- # TODO: Remove this in 1.10.0
2970
- warnings.warn(
2971
- "'remove_model_monitoring_function' is deprecated in 1.7.0 and will be removed in 1.10.0. "
2972
- "Please use `delete_model_monitoring_function` instead.",
2973
- FutureWarning,
2974
- )
2975
- self.delete_model_monitoring_function(name)
2976
-
2977
2964
  def delete_model_monitoring_function(self, name: Union[str, list[str]]):
2978
2965
  """delete the specified model-monitoring-app function/s
2979
2966
 
mlrun/runtimes/daskjob.py CHANGED
@@ -92,7 +92,6 @@ class DaskSpec(KubeResourceSpec):
92
92
  preemption_mode=None,
93
93
  security_context=None,
94
94
  state_thresholds=None,
95
- serving_spec=None,
96
95
  ):
97
96
  super().__init__(
98
97
  command=command,
@@ -122,7 +121,6 @@ class DaskSpec(KubeResourceSpec):
122
121
  preemption_mode=preemption_mode,
123
122
  security_context=security_context,
124
123
  state_thresholds=state_thresholds,
125
- serving_spec=serving_spec,
126
124
  )
127
125
  self.args = args
128
126
 
mlrun/runtimes/kubejob.py CHANGED
@@ -207,7 +207,3 @@ class KubejobRuntime(KubeResource):
207
207
  raise NotImplementedError(
208
208
  f"Running a {self.kind} function from the client is not supported. Use .run() to submit the job to the API."
209
209
  )
210
-
211
- @property
212
- def serving_spec(self):
213
- return self.spec.serving_spec
@@ -54,7 +54,6 @@ class MPIResourceSpec(KubeResourceSpec):
54
54
  preemption_mode=None,
55
55
  security_context=None,
56
56
  state_thresholds=None,
57
- serving_spec=None,
58
57
  ):
59
58
  super().__init__(
60
59
  command=command,
@@ -84,7 +83,6 @@ class MPIResourceSpec(KubeResourceSpec):
84
83
  preemption_mode=preemption_mode,
85
84
  security_context=security_context,
86
85
  state_thresholds=state_thresholds,
87
- serving_spec=serving_spec,
88
86
  )
89
87
  self.mpi_args = mpi_args or [
90
88
  "-x",
@@ -49,7 +49,6 @@ class MPIV1ResourceSpec(MPIResourceSpec):
49
49
  preemption_mode=None,
50
50
  security_context=None,
51
51
  state_thresholds=None,
52
- serving_spec=None,
53
52
  ):
54
53
  super().__init__(
55
54
  command=command,
@@ -80,7 +79,6 @@ class MPIV1ResourceSpec(MPIResourceSpec):
80
79
  preemption_mode=preemption_mode,
81
80
  security_context=security_context,
82
81
  state_thresholds=state_thresholds,
83
- serving_spec=serving_spec,
84
82
  )
85
83
  self.clean_pod_policy = clean_pod_policy or MPIJobV1CleanPodPolicies.default()
86
84
 
@@ -154,7 +154,6 @@ class NuclioSpec(KubeResourceSpec):
154
154
  add_templated_ingress_host_mode=None,
155
155
  state_thresholds=None,
156
156
  disable_default_http_trigger=None,
157
- serving_spec=None,
158
157
  ):
159
158
  super().__init__(
160
159
  command=command,
@@ -184,7 +183,6 @@ class NuclioSpec(KubeResourceSpec):
184
183
  preemption_mode=preemption_mode,
185
184
  security_context=security_context,
186
185
  state_thresholds=state_thresholds,
187
- serving_spec=serving_spec,
188
186
  )
189
187
 
190
188
  self.base_spec = base_spec or {}
@@ -42,8 +42,6 @@ from mlrun.serving.states import (
42
42
  )
43
43
  from mlrun.utils import get_caller_globals, logger, set_paths
44
44
 
45
- from .. import KubejobRuntime
46
- from ..pod import KubeResourceSpec
47
45
  from .function import NuclioSpec, RemoteRuntime, min_nuclio_versions
48
46
 
49
47
  serving_subkind = "serving_v2"
@@ -151,7 +149,6 @@ class ServingSpec(NuclioSpec):
151
149
  state_thresholds=None,
152
150
  disable_default_http_trigger=None,
153
151
  model_endpoint_creation_task_name=None,
154
- serving_spec=None,
155
152
  ):
156
153
  super().__init__(
157
154
  command=command,
@@ -192,7 +189,6 @@ class ServingSpec(NuclioSpec):
192
189
  service_type=service_type,
193
190
  add_templated_ingress_host_mode=add_templated_ingress_host_mode,
194
191
  disable_default_http_trigger=disable_default_http_trigger,
195
- serving_spec=serving_spec,
196
192
  )
197
193
 
198
194
  self.models = models or {}
@@ -707,7 +703,6 @@ class ServingRuntime(RemoteRuntime):
707
703
  "track_models": self.spec.track_models,
708
704
  "default_content_type": self.spec.default_content_type,
709
705
  "model_endpoint_creation_task_name": self.spec.model_endpoint_creation_task_name,
710
- "filename": getattr(self.spec, "filename", None),
711
706
  }
712
707
 
713
708
  if self.spec.secret_sources:
@@ -716,10 +711,6 @@ class ServingRuntime(RemoteRuntime):
716
711
 
717
712
  return json.dumps(serving_spec)
718
713
 
719
- @property
720
- def serving_spec(self):
721
- return self._get_serving_spec()
722
-
723
714
  def to_mock_server(
724
715
  self,
725
716
  namespace=None,
@@ -824,40 +815,3 @@ class ServingRuntime(RemoteRuntime):
824
815
  "Turn off the mock (mock=False) and make sure Nuclio is installed for real deployment to Nuclio"
825
816
  )
826
817
  self._mock_server = self.to_mock_server()
827
-
828
- def to_job(self) -> KubejobRuntime:
829
- """Convert this ServingRuntime to a KubejobRuntime, so that the graph can be run as a standalone job."""
830
- if self.spec.function_refs:
831
- raise mlrun.errors.MLRunInvalidArgumentError(
832
- f"Cannot convert function '{self.metadata.name}' to a job because it has child functions"
833
- )
834
-
835
- spec = KubeResourceSpec(
836
- image=self.spec.image,
837
- mode=self.spec.mode,
838
- volumes=self.spec.volumes,
839
- volume_mounts=self.spec.volume_mounts,
840
- env=self.spec.env,
841
- resources=self.spec.resources,
842
- default_handler="mlrun.serving.server.execute_graph",
843
- pythonpath=self.spec.pythonpath,
844
- entry_points=self.spec.entry_points,
845
- description=self.spec.description,
846
- workdir=self.spec.workdir,
847
- image_pull_secret=self.spec.image_pull_secret,
848
- node_name=self.spec.node_name,
849
- node_selector=self.spec.node_selector,
850
- affinity=self.spec.affinity,
851
- disable_auto_mount=self.spec.disable_auto_mount,
852
- priority_class_name=self.spec.priority_class_name,
853
- tolerations=self.spec.tolerations,
854
- preemption_mode=self.spec.preemption_mode,
855
- security_context=self.spec.security_context,
856
- state_thresholds=self.spec.state_thresholds,
857
- serving_spec=self._get_serving_spec(),
858
- )
859
- job = KubejobRuntime(
860
- spec=spec,
861
- metadata=self.metadata,
862
- )
863
- return job
mlrun/runtimes/pod.py CHANGED
@@ -103,7 +103,6 @@ class KubeResourceSpec(FunctionSpec):
103
103
  "preemption_mode",
104
104
  "security_context",
105
105
  "state_thresholds",
106
- "serving_spec",
107
106
  ]
108
107
  _default_fields_to_strip = FunctionSpec._default_fields_to_strip + [
109
108
  "volumes",
@@ -179,7 +178,6 @@ class KubeResourceSpec(FunctionSpec):
179
178
  preemption_mode=None,
180
179
  security_context=None,
181
180
  state_thresholds=None,
182
- serving_spec=None,
183
181
  ):
184
182
  super().__init__(
185
183
  command=command,
@@ -225,7 +223,6 @@ class KubeResourceSpec(FunctionSpec):
225
223
  state_thresholds
226
224
  or mlrun.mlconf.function.spec.state_thresholds.default.to_dict()
227
225
  )
228
- self.serving_spec = serving_spec
229
226
  # Termination grace period is internal for runtimes that have a pod termination hook hence it is not in the
230
227
  # _dict_fields and doesn't have a setter.
231
228
  self._termination_grace_period_seconds = None
@@ -58,7 +58,6 @@ class RemoteSparkSpec(KubeResourceSpec):
58
58
  preemption_mode=None,
59
59
  security_context=None,
60
60
  state_thresholds=None,
61
- serving_spec=None,
62
61
  ):
63
62
  super().__init__(
64
63
  command=command,
@@ -88,7 +87,6 @@ class RemoteSparkSpec(KubeResourceSpec):
88
87
  preemption_mode=preemption_mode,
89
88
  security_context=security_context,
90
89
  state_thresholds=state_thresholds,
91
- serving_spec=serving_spec,
92
90
  )
93
91
  self.provider = provider
94
92
 
@@ -168,7 +168,6 @@ class Spark3JobSpec(KubeResourceSpec):
168
168
  executor_cores=None,
169
169
  security_context=None,
170
170
  state_thresholds=None,
171
- serving_spec=None,
172
171
  ):
173
172
  super().__init__(
174
173
  command=command,
@@ -198,7 +197,6 @@ class Spark3JobSpec(KubeResourceSpec):
198
197
  preemption_mode=preemption_mode,
199
198
  security_context=security_context,
200
199
  state_thresholds=state_thresholds,
201
- serving_spec=serving_spec,
202
200
  )
203
201
 
204
202
  self.driver_resources = driver_resources or {}
mlrun/serving/server.py CHANGED
@@ -21,9 +21,8 @@ import os
21
21
  import socket
22
22
  import traceback
23
23
  import uuid
24
- from typing import Any, Optional, Union
24
+ from typing import Optional, Union
25
25
 
26
- import storey
27
26
  from nuclio import Context as NuclioContext
28
27
  from nuclio.request import Logger as NuclioLogger
29
28
 
@@ -39,10 +38,9 @@ from mlrun.secrets import SecretsStore
39
38
 
40
39
  from ..common.helpers import parse_versioned_object_uri
41
40
  from ..common.schemas.model_monitoring.constants import FileTargetKind
42
- from ..datastore import DataItem, get_stream_pusher
41
+ from ..datastore import get_stream_pusher
43
42
  from ..datastore.store_resources import ResourceCache
44
43
  from ..errors import MLRunInvalidArgumentError
45
- from ..execution import MLClientCtx
46
44
  from ..model import ModelObj
47
45
  from ..utils import get_caller_globals
48
46
  from .states import (
@@ -324,11 +322,7 @@ class GraphServer(ModelObj):
324
322
 
325
323
  def _process_response(self, context, response, get_body):
326
324
  body = response.body
327
- if (
328
- isinstance(context, MLClientCtx)
329
- or isinstance(body, context.Response)
330
- or get_body
331
- ):
325
+ if isinstance(body, context.Response) or get_body:
332
326
  return body
333
327
 
334
328
  if body and not isinstance(body, (str, bytes)):
@@ -541,94 +535,6 @@ def v2_serving_init(context, namespace=None):
541
535
  _set_callbacks(server, context)
542
536
 
543
537
 
544
- async def async_execute_graph(
545
- context: MLClientCtx,
546
- data: DataItem,
547
- batching: bool,
548
- batch_size: Optional[int],
549
- ) -> list[Any]:
550
- spec = mlrun.utils.get_serving_spec()
551
-
552
- source_filename = spec.get("filename", None)
553
- namespace = {}
554
- if source_filename:
555
- with open(source_filename) as f:
556
- exec(f.read(), namespace)
557
-
558
- server = GraphServer.from_dict(spec)
559
-
560
- if config.log_level.lower() == "debug":
561
- server.verbose = True
562
- context.logger.info_with("Initializing states", namespace=namespace)
563
- kwargs = {}
564
- if hasattr(context, "is_mock"):
565
- kwargs["is_mock"] = context.is_mock
566
- server.init_states(
567
- context=None, # this context is expected to be a nuclio context, which we don't have in this flow
568
- namespace=namespace,
569
- **kwargs,
570
- )
571
- context.logger.info("Initializing graph steps")
572
- server.init_object(namespace)
573
-
574
- context.logger.info_with("Graph was initialized", verbose=server.verbose)
575
-
576
- if server.verbose:
577
- context.logger.info(server.to_yaml())
578
-
579
- df = data.as_df()
580
-
581
- responses = []
582
-
583
- async def run(body):
584
- event = storey.Event(id=index, body=body)
585
- response = await server.run(event, context)
586
- responses.append(response)
587
-
588
- if batching and not batch_size:
589
- batch_size = len(df)
590
-
591
- batch = []
592
- for index, row in df.iterrows():
593
- data = row.to_dict()
594
- if batching:
595
- batch.append(data)
596
- if len(batch) == batch_size:
597
- await run(batch)
598
- batch = []
599
- else:
600
- await run(data)
601
-
602
- if batch:
603
- await run(batch)
604
-
605
- termination_result = server.wait_for_completion()
606
- if asyncio.iscoroutine(termination_result):
607
- await termination_result
608
-
609
- return responses
610
-
611
-
612
- def execute_graph(
613
- context: MLClientCtx,
614
- data: DataItem,
615
- batching: bool = False,
616
- batch_size: Optional[int] = None,
617
- ) -> (list[Any], Any):
618
- """
619
- Execute graph as a job, from start to finish.
620
-
621
- :param context: The job's execution client context.
622
- :param data: The input data to the job, to be pushed into the graph row by row, or in batches.
623
- :param batching: Whether to push one or more batches into the graph rather than row by row.
624
- :param batch_size: The number of rows to push per batch. If not set, and batching=True, the entire dataset will
625
- be pushed into the graph in one batch.
626
-
627
- :return: A list of responses.
628
- """
629
- return asyncio.run(async_execute_graph(context, data, batching, batch_size))
630
-
631
-
632
538
  def _set_callbacks(server, context):
633
539
  if not server.graph.supports_termination() or not hasattr(context, "platform"):
634
540
  return
mlrun/utils/helpers.py CHANGED
@@ -911,7 +911,7 @@ def enrich_image_url(
911
911
  if is_mlrun_image and "mlrun/ml-base" in image_url:
912
912
  if tag:
913
913
  if mlrun.utils.helpers.validate_component_version_compatibility(
914
- "mlrun-client", "1.10.0", mlrun_client_version=tag
914
+ "mlrun-client", "1.10.0-rc0", mlrun_client_version=tag
915
915
  ):
916
916
  warnings.warn(
917
917
  "'mlrun/ml-base' image is deprecated in 1.10.0 and will be removed in 1.12.0, "
@@ -1,4 +1,4 @@
1
1
  {
2
- "git_commit": "fede26558b2c8db736315ad1f48e15e3ce2f387d",
3
- "version": "1.10.0-rc8"
2
+ "git_commit": "15a29118a80d3da0d4438ad82c31f14af981fccc",
3
+ "version": "1.10.0-rc9"
4
4
  }
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: mlrun
3
- Version: 1.10.0rc8
3
+ Version: 1.10.0rc9
4
4
  Summary: Tracking and config of machine learning runs
5
5
  Home-page: https://github.com/mlrun/mlrun
6
6
  Author: Yaron Haviv
@@ -85,11 +85,11 @@ Requires-Dist: avro~=1.11; extra == "kafka"
85
85
  Provides-Extra: redis
86
86
  Requires-Dist: redis~=4.3; extra == "redis"
87
87
  Provides-Extra: mlflow
88
- Requires-Dist: mlflow~=2.16; extra == "mlflow"
88
+ Requires-Dist: mlflow~=2.22; extra == "mlflow"
89
89
  Provides-Extra: databricks-sdk
90
90
  Requires-Dist: databricks-sdk~=0.20.0; extra == "databricks-sdk"
91
91
  Provides-Extra: sqlalchemy
92
- Requires-Dist: sqlalchemy~=1.4; extra == "sqlalchemy"
92
+ Requires-Dist: sqlalchemy~=2.0; extra == "sqlalchemy"
93
93
  Provides-Extra: dask
94
94
  Requires-Dist: dask~=2024.12.1; python_version >= "3.11" and extra == "dask"
95
95
  Requires-Dist: distributed~=2024.12.1; python_version >= "3.11" and extra == "dask"
@@ -112,8 +112,10 @@ Requires-Dist: objgraph~=3.6; extra == "api"
112
112
  Requires-Dist: igz-mgmt~=0.4.1; extra == "api"
113
113
  Requires-Dist: humanfriendly~=10.0; extra == "api"
114
114
  Requires-Dist: fastapi~=0.115.6; extra == "api"
115
- Requires-Dist: sqlalchemy~=1.4; extra == "api"
115
+ Requires-Dist: sqlalchemy~=2.0; extra == "api"
116
+ Requires-Dist: sqlalchemy-utils~=0.41.2; extra == "api"
116
117
  Requires-Dist: pymysql~=1.1; extra == "api"
118
+ Requires-Dist: psycopg2-binary~=2.9; extra == "api"
117
119
  Requires-Dist: alembic~=1.14; extra == "api"
118
120
  Requires-Dist: timelength~=1.1; extra == "api"
119
121
  Requires-Dist: memray~=1.12; sys_platform != "win32" and extra == "api"
@@ -141,7 +143,7 @@ Requires-Dist: google-cloud-storage==2.14.0; extra == "all"
141
143
  Requires-Dist: google-cloud==0.34; extra == "all"
142
144
  Requires-Dist: graphviz~=0.20.0; extra == "all"
143
145
  Requires-Dist: kafka-python~=2.1.0; extra == "all"
144
- Requires-Dist: mlflow~=2.16; extra == "all"
146
+ Requires-Dist: mlflow~=2.22; extra == "all"
145
147
  Requires-Dist: msrest~=0.6.21; extra == "all"
146
148
  Requires-Dist: oss2==2.18.1; extra == "all"
147
149
  Requires-Dist: ossfs==2023.12.0; extra == "all"
@@ -150,7 +152,7 @@ Requires-Dist: pyopenssl>=23; extra == "all"
150
152
  Requires-Dist: redis~=4.3; extra == "all"
151
153
  Requires-Dist: s3fs<2024.7,>=2023.9.2; extra == "all"
152
154
  Requires-Dist: snowflake-connector-python~=3.7; extra == "all"
153
- Requires-Dist: sqlalchemy~=1.4; extra == "all"
155
+ Requires-Dist: sqlalchemy~=2.0; extra == "all"
154
156
  Requires-Dist: taos-ws-py==0.3.2; extra == "all"
155
157
  Provides-Extra: complete
156
158
  Requires-Dist: adlfs==2023.9.0; extra == "complete"
@@ -172,7 +174,7 @@ Requires-Dist: google-cloud-storage==2.14.0; extra == "complete"
172
174
  Requires-Dist: google-cloud==0.34; extra == "complete"
173
175
  Requires-Dist: graphviz~=0.20.0; extra == "complete"
174
176
  Requires-Dist: kafka-python~=2.1.0; extra == "complete"
175
- Requires-Dist: mlflow~=2.16; extra == "complete"
177
+ Requires-Dist: mlflow~=2.22; extra == "complete"
176
178
  Requires-Dist: msrest~=0.6.21; extra == "complete"
177
179
  Requires-Dist: oss2==2.18.1; extra == "complete"
178
180
  Requires-Dist: ossfs==2023.12.0; extra == "complete"
@@ -181,7 +183,7 @@ Requires-Dist: pyopenssl>=23; extra == "complete"
181
183
  Requires-Dist: redis~=4.3; extra == "complete"
182
184
  Requires-Dist: s3fs<2024.7,>=2023.9.2; extra == "complete"
183
185
  Requires-Dist: snowflake-connector-python~=3.7; extra == "complete"
184
- Requires-Dist: sqlalchemy~=1.4; extra == "complete"
186
+ Requires-Dist: sqlalchemy~=2.0; extra == "complete"
185
187
  Requires-Dist: taos-ws-py==0.3.2; extra == "complete"
186
188
  Provides-Extra: complete-api
187
189
  Requires-Dist: adlfs==2023.9.0; extra == "complete-api"
@@ -212,20 +214,22 @@ Requires-Dist: humanfriendly~=10.0; extra == "complete-api"
212
214
  Requires-Dist: igz-mgmt~=0.4.1; extra == "complete-api"
213
215
  Requires-Dist: kafka-python~=2.1.0; extra == "complete-api"
214
216
  Requires-Dist: memray~=1.12; sys_platform != "win32" and extra == "complete-api"
215
- Requires-Dist: mlflow~=2.16; extra == "complete-api"
217
+ Requires-Dist: mlflow~=2.22; extra == "complete-api"
216
218
  Requires-Dist: mlrun-pipelines-kfp-v1-8~=0.5.4; extra == "complete-api"
217
219
  Requires-Dist: msrest~=0.6.21; extra == "complete-api"
218
220
  Requires-Dist: objgraph~=3.6; extra == "complete-api"
219
221
  Requires-Dist: oss2==2.18.1; extra == "complete-api"
220
222
  Requires-Dist: ossfs==2023.12.0; extra == "complete-api"
221
223
  Requires-Dist: plotly~=5.23; extra == "complete-api"
224
+ Requires-Dist: psycopg2-binary~=2.9; extra == "complete-api"
222
225
  Requires-Dist: pydantic<2,>=1; extra == "complete-api"
223
226
  Requires-Dist: pymysql~=1.1; extra == "complete-api"
224
227
  Requires-Dist: pyopenssl>=23; extra == "complete-api"
225
228
  Requires-Dist: redis~=4.3; extra == "complete-api"
226
229
  Requires-Dist: s3fs<2024.7,>=2023.9.2; extra == "complete-api"
227
230
  Requires-Dist: snowflake-connector-python~=3.7; extra == "complete-api"
228
- Requires-Dist: sqlalchemy~=1.4; extra == "complete-api"
231
+ Requires-Dist: sqlalchemy-utils~=0.41.2; extra == "complete-api"
232
+ Requires-Dist: sqlalchemy~=2.0; extra == "complete-api"
229
233
  Requires-Dist: taos-ws-py==0.3.2; extra == "complete-api"
230
234
  Requires-Dist: timelength~=1.1; extra == "complete-api"
231
235
  Requires-Dist: uvicorn~=0.32.1; extra == "complete-api"
@@ -249,7 +253,7 @@ Dynamic: summary
249
253
  [![PyPI version fury.io](https://badge.fury.io/py/mlrun.svg)](https://pypi.python.org/pypi/mlrun/)
250
254
  [![Documentation](https://readthedocs.org/projects/mlrun/badge/?version=latest)](https://mlrun.readthedocs.io/en/latest/?badge=latest)
251
255
  [![Ruff](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json)](https://github.com/astral-sh/ruff)
252
- ![GitHub commit activity](https://img.shields.io/github/commit-activity/w/mlrun/mlrun)
256
+ [![GitHub commit activity](https://img.shields.io/github/commit-activity/w/mlrun/mlrun)](https://github.com/mlrun/mlrun/commits/main)
253
257
  [![GitHub release (latest SemVer)](https://img.shields.io/github/v/release/mlrun/mlrun?sort=semver)](https://github.com/mlrun/mlrun/releases)
254
258
  [![Join MLOps Live](https://img.shields.io/badge/slack-join_chat-white.svg?logo=slack&style=social)](https://mlopslive.slack.com)
255
259
 
@@ -28,7 +28,7 @@ mlrun/common/helpers.py,sha256=DIdqs_eN3gO5bZ8iFobIvx8cEiOxYxhFIyut6-O69T0,1385
28
28
  mlrun/common/secrets.py,sha256=8g9xtIw-9DGcwiZRT62a5ozSQM-aYo8yK5Ghey9WM0g,5179
29
29
  mlrun/common/types.py,sha256=1gxThbmC0Vd0U1ffIkEwz4T4S7JOgHt70rvw8TCO21c,1073
30
30
  mlrun/common/db/__init__.py,sha256=kXGBqhLN0rlAx0kTXhozGzFsIdSqW0uTSKMmsLgq_is,569
31
- mlrun/common/db/sql_session.py,sha256=QuEDcxLtL9pBilNbx-jG7BEIj4TckNWjbrIkmKkM084,2707
31
+ mlrun/common/db/dialects.py,sha256=QN9bx7CTo32IIdJ2J3ZrsX8IUdp_BPxBtl0LyjMEC9g,868
32
32
  mlrun/common/formatters/__init__.py,sha256=au7S3M3wa9964RpQhFSvflk5-i5SWMeb3kek8Gvt4kg,889
33
33
  mlrun/common/formatters/artifact.py,sha256=NT2RPfReUGWVQ-Rhv1zexgXfLuKI5FvtneJDhihZK5g,1462
34
34
  mlrun/common/formatters/base.py,sha256=85vQ0t4ZfqCs8b8QV1RLfRcEvANztKvTvsa__sD3zTo,4099
@@ -55,7 +55,7 @@ mlrun/common/schemas/datastore_profile.py,sha256=jDva-_XTCU0EyExs5uB2PVR7-Tj4g4Z
55
55
  mlrun/common/schemas/events.py,sha256=LjAU7t-aNhkECbF_o2mzXiZ5mn4299d-_HOd20Xv6iQ,1025
56
56
  mlrun/common/schemas/feature_store.py,sha256=Kz7AWQ1RCPA8sTL9cGRZnfUBhWf4MX_5yyYswtCOcCk,4802
57
57
  mlrun/common/schemas/frontend_spec.py,sha256=tR8k78cppYK-X8kCWe0mz1gk8yqpsn2IxM3QmBdTJs8,2622
58
- mlrun/common/schemas/function.py,sha256=vBJjA-cCDqoyxKTPuINPX_asth2ZafUuwiA6ivgyR5o,4849
58
+ mlrun/common/schemas/function.py,sha256=HMBZFf46foDWnycciG1a4HASBbmnRDSlDRsREfa5Tog,4881
59
59
  mlrun/common/schemas/http.py,sha256=KozLgGV1vpNXQ8Qptr_Zm6BEbc2VcU42hSphe_ffe_A,704
60
60
  mlrun/common/schemas/hub.py,sha256=zYupE3yBKlVEAhHNb4rn9g5T63sekRUnI4Ql3v4a_c4,4118
61
61
  mlrun/common/schemas/k8s.py,sha256=YgyDK7KNt29GHCOxd1vw-jnl_757cIPLzViCTNT1Zcc,1403
@@ -63,7 +63,7 @@ mlrun/common/schemas/memory_reports.py,sha256=Q6w7xofQlMD-iqjE8uK9yU5ijLPkht_EsX
63
63
  mlrun/common/schemas/notification.py,sha256=Q-tBaU_V7YZiuj3ankuACf3_-hb874_osxq0eaW90Ww,5549
64
64
  mlrun/common/schemas/object.py,sha256=9g2bK3KUXmzhaGavbmpVf6rxDhquYogp8bb12dzP4XE,1982
65
65
  mlrun/common/schemas/pagination.py,sha256=8NEmiIkCXw5_sv-lE0MWgWz-WpxhSSn-vBtbPDBOGXc,899
66
- mlrun/common/schemas/partition.py,sha256=crl61DzS-9i5rCyHUbjtpTCk03lluxfb2dS0o1gdLH4,5920
66
+ mlrun/common/schemas/partition.py,sha256=8T-1nfA-SdzkS8dv48NaYHVEUQOcZpaxE7mWrD8Ra68,6357
67
67
  mlrun/common/schemas/pipeline.py,sha256=lzaNHudyOiSMpNsoKoNMZhJJa2R4wdA4TR8A98L_yIo,996
68
68
  mlrun/common/schemas/project.py,sha256=9O9Wa2PkRy74WzwiLd7A6jBXf4FJxQMJj9wh2VpsZao,6138
69
69
  mlrun/common/schemas/regex.py,sha256=r-phg_9ge1lFraPCQd_wpnYGQ1oOCj3xChycJxZtIQY,775
@@ -78,7 +78,7 @@ mlrun/common/schemas/model_monitoring/__init__.py,sha256=Hx3IxW63edOSLxMnbQpY3Yv
78
78
  mlrun/common/schemas/model_monitoring/constants.py,sha256=yjTaSGiRs0zYIE20QSuJuMNnS5iuJpnV1wBiq7leVpg,13238
79
79
  mlrun/common/schemas/model_monitoring/functions.py,sha256=OKBt029ap6dD-1pFTN4z1u7IkRpiH0HCjbrJoAWUFnE,2123
80
80
  mlrun/common/schemas/model_monitoring/grafana.py,sha256=THQlLfPBevBksta8p5OaIsBaJtsNSXexLvHrDxOaVns,2095
81
- mlrun/common/schemas/model_monitoring/model_endpoints.py,sha256=YYFai89qBTnKM8dSUncVD25uwz8QdcTLrEb7vMefyTc,12391
81
+ mlrun/common/schemas/model_monitoring/model_endpoints.py,sha256=1Wd67iFCeVB7Ggo1jgkwaGSHH87z9P4crVafDWBhevc,12647
82
82
  mlrun/data_types/__init__.py,sha256=wdxGS1PTnaKXiNZ7PYGxxo86OifHH7NYoArIjDJksLA,1054
83
83
  mlrun/data_types/data_types.py,sha256=0_oKLC6-sXL2_nnaDMP_HSXB3fD1nJAG4J2Jq6sGNNw,4998
84
84
  mlrun/data_types/infer.py,sha256=F_dW7oR6jrhdONzTl4ngeGh9x7twHdpUJBd2xMVA1Vw,6476
@@ -104,17 +104,18 @@ mlrun/datastore/spark_utils.py,sha256=dn0RWpYzee-M8UZw-NVuHAdqlNAZ7VO-fNtI8ZiDky
104
104
  mlrun/datastore/store_resources.py,sha256=s2794zqkzy_mjRMvRedDNs_tycTLoF8wxTqsWRQphCE,6839
105
105
  mlrun/datastore/storeytargets.py,sha256=OwWw-YrrtrnKpbUSP8tgWyKusArsLygkD365X7TT4yE,6500
106
106
  mlrun/datastore/targets.py,sha256=pxVHBAmGDkyQAnqXvc__qjgOG_ctVc-_oZ_g1QDai_g,79121
107
- mlrun/datastore/utils.py,sha256=Xt7YSFOmSZgxaB01I_tjF2WTR6NsNf_p4hSxNgXtt1c,10853
107
+ mlrun/datastore/utils.py,sha256=uQ2v_bZshITpy3Sm3_wcP4_cthWDeGAEaTJuvQ60NG0,10830
108
108
  mlrun/datastore/v3io.py,sha256=QSYBORRLcJTeM9mt0EaWzyLcdmzrPkqrF7k5uLTam5U,8209
109
109
  mlrun/datastore/vectorstore.py,sha256=k-yom5gfw20hnVG0Rg7aBEehuXwvAloZwn0cx0VGals,11708
110
110
  mlrun/datastore/wasbfs/__init__.py,sha256=s5Ul-0kAhYqFjKDR2X0O2vDGDbLQQduElb32Ev56Te4,1343
111
111
  mlrun/datastore/wasbfs/fs.py,sha256=ge8NK__5vTcFT-krI155_8RDUywQw4SIRX6BWATXy9Q,6299
112
- mlrun/db/__init__.py,sha256=WqJ4x8lqJ7ZoKbhEyFqkYADd9P6E3citckx9e9ZLcIU,1163
112
+ mlrun/db/__init__.py,sha256=K8Cy-MF7gasYnT7gfE9zP5eOAQV-USsJtuVJRXYQI6s,1187
113
113
  mlrun/db/auth_utils.py,sha256=hpg8D2r82oN0BWabuWN04BTNZ7jYMAF242YSUpK7LFM,5211
114
114
  mlrun/db/base.py,sha256=koY7QLXBOtpkAphllC3z5wIVJ1EtHcXRTdvQ0uzIxko,30734
115
115
  mlrun/db/factory.py,sha256=yP2vVmveUE7LYTCHbS6lQIxP9rW--zdISWuPd_I3d_4,2111
116
116
  mlrun/db/httpdb.py,sha256=8OCLIp--B1Z_AH98XhCm84Ls22IqXLO4OWkd7oQ4UQc,234747
117
117
  mlrun/db/nopdb.py,sha256=vKYszH4G3Fn5zBx3ri_e7r89k95obDtQfEAi2tDDF1w,27308
118
+ mlrun/db/sql_types.py,sha256=g-gmiRNr0SpFyV0wPFfvLpB7hB0jdW-WFkt-aoQ_qIU,5399
118
119
  mlrun/feature_store/__init__.py,sha256=SlI845bWt6xX34SXunHHqhmFAR9-5v2ak8N-qpcAPGo,1328
119
120
  mlrun/feature_store/api.py,sha256=qKj5Tk6prTab6XWatWhBuPRVp0eJEctoxRMN2wz48vA,32168
120
121
  mlrun/feature_store/common.py,sha256=JlQA7XWkg9fLuw7cXFmWpUneQqM3NBhwv7DU_xlenWI,12819
@@ -202,8 +203,8 @@ mlrun/frameworks/sklearn/mlrun_interface.py,sha256=Lk1MKzP7d72R6_1PTWO5pKY1VUEHc
202
203
  mlrun/frameworks/sklearn/model_handler.py,sha256=m7ohGO8sphuVU0vZAzVNBATY0WNUpQb_SmaO6xkZx8U,4752
203
204
  mlrun/frameworks/sklearn/utils.py,sha256=OPyuXOwod6Tjs5PcIStwtwZfIaQk-hL9wzNbjQ29LuU,1208
204
205
  mlrun/frameworks/tf_keras/__init__.py,sha256=M2sMbYHLrlF-KFR5kvA9mevRo3Nf8U0B5a_DM9rzwCY,10484
205
- mlrun/frameworks/tf_keras/mlrun_interface.py,sha256=r5M01twRT0BjwD0r0Lbasxso_cmCGkAAuKqR5aLbDIg,16616
206
- mlrun/frameworks/tf_keras/model_handler.py,sha256=5mhmQMwCvAUcUvALz4EAJfOe-G5tWMZxKLi-ZqBQ9TQ,31101
206
+ mlrun/frameworks/tf_keras/mlrun_interface.py,sha256=Rd4DY-TRGh3ZErmT9vzwDsXUjeetb3vgUWmXdyG77Mc,16718
207
+ mlrun/frameworks/tf_keras/model_handler.py,sha256=4Ri5SLL0Xl_9I4faYYq9yxSOhDxNKzBz6SmuqKX1v7o,32135
207
208
  mlrun/frameworks/tf_keras/model_server.py,sha256=60iJRl_9ZYPCzxdfiJM_-BtECKZZTOKWBJ36O-GLjEc,9652
208
209
  mlrun/frameworks/tf_keras/utils.py,sha256=Z8hA1CgpSJWLC_T6Ay7xZKVyWlX9B85MSmQr2biXRag,4582
209
210
  mlrun/frameworks/tf_keras/callbacks/__init__.py,sha256=sd8aWG2jO9mO_noZca0ReVf8X6fSCqO_di1Z-mT8FH8,742
@@ -215,10 +216,10 @@ mlrun/frameworks/xgboost/mlrun_interface.py,sha256=KINOf0udbY75raTewjEFGNlIRyE0e
215
216
  mlrun/frameworks/xgboost/model_handler.py,sha256=bJq4D1VK3rzhALovqIV5mS0LvGiTlsgAkHanD25pU2c,11663
216
217
  mlrun/frameworks/xgboost/utils.py,sha256=4rShiFChzDbWJ4HoTo4qV_lj-Z89pHBAp6Z1yHmU8wA,1068
217
218
  mlrun/launcher/__init__.py,sha256=JL8qkT1lLr1YvW6iP0hmwDTaSR2RfrMDx0-1gWRhTOE,571
218
- mlrun/launcher/base.py,sha256=gaqBvrL-Yg4uXSM7bqbS_LR6H_Fm5jeRdF-81Sx1wUw,16707
219
- mlrun/launcher/client.py,sha256=cl40ZdF2fU1QbUKdl4Xnucb1u2h-8_dn095qIUyxbuM,6402
219
+ mlrun/launcher/base.py,sha256=8iicRQsfUF5cghEm7AaBP0Wt9MUmYMWXr9E66s6IZsI,16673
220
+ mlrun/launcher/client.py,sha256=U1_brEqqsol8HQFbqCZOSM9tD5oVk2JrP0xCLXME6XE,6368
220
221
  mlrun/launcher/factory.py,sha256=RW7mfzEFi8fR0M-4W1JQg1iq3_muUU6OTqT_3l4Ubrk,2338
221
- mlrun/launcher/local.py,sha256=BCA8WQc8yDjJTzktyBRxB0iiWEXOW7832tV9KsAZkn8,11639
222
+ mlrun/launcher/local.py,sha256=9XEkWSRYokXbtL1d_XEH3yTNU2fQXX7JcUwfC_8NG_4,11457
222
223
  mlrun/launcher/remote.py,sha256=GYXsxVIwcUZ1V-cv2R3Yk4nSoUeAtRurEawrUN3AkEE,7715
223
224
  mlrun/model_monitoring/__init__.py,sha256=2zigVN5JUnOhRcqGBd4gj0ctubVlyEvxmxXix0De5GQ,709
224
225
  mlrun/model_monitoring/api.py,sha256=lAsUp-gzqw8D1cpHVGA2_nPMYn5R4jdxk9UaGOiQ8fE,25945
@@ -229,8 +230,8 @@ mlrun/model_monitoring/stream_processing.py,sha256=Gu3TQzYoNjbreZYI73-F49QpYrod9
229
230
  mlrun/model_monitoring/writer.py,sha256=rGRFzSOkqZWvD3Y6sVk2H1Gepfnkzkp9ce00PsApTLo,8288
230
231
  mlrun/model_monitoring/applications/__init__.py,sha256=MaH_n4GiqqQvSkntM5yQ7_FCANtM_IfgK-IJTdo4G_E,757
231
232
  mlrun/model_monitoring/applications/_application_steps.py,sha256=t9LDIqQUGE10cyjyhlg0QqN1yVx0apD1HpERYLJfm8U,7409
232
- mlrun/model_monitoring/applications/base.py,sha256=NBIlsiB91it__JfUQHL28YzIzkCg-Enb8w4oYCZlkYs,31528
233
- mlrun/model_monitoring/applications/context.py,sha256=VfyPCIdO4z73uqFcJs87jzSI4PatX5N5Xicg8Ye1Bag,16968
233
+ mlrun/model_monitoring/applications/base.py,sha256=MIJujitxyzFBVIYVBR3otp8l-p5FuD06rsVxJpJyQvc,32413
234
+ mlrun/model_monitoring/applications/context.py,sha256=fAGFNCyNhSnVJPSIeJxv-XmEL2JhDmjK5Ouog9qyvdc,17035
234
235
  mlrun/model_monitoring/applications/histogram_data_drift.py,sha256=2qgfFmrpHf-x0_EaHD-0T28piwSQzw-HH71aV1GwbZs,15389
235
236
  mlrun/model_monitoring/applications/results.py,sha256=_qmj6TWT0SR2bi7gUyRKBU418eGgGoLW2_hTJ7S-ock,5782
236
237
  mlrun/model_monitoring/applications/evidently/__init__.py,sha256=-DqdPnBSrjZhFvKOu_Ie3MiFvlur9sPTZpZ1u0_1AE8,690
@@ -272,42 +273,42 @@ mlrun/platforms/__init__.py,sha256=ZuyeHCHHUxYEoZRmaJqzFSfwhaTyUdBZXMeVp75ql1w,3
272
273
  mlrun/platforms/iguazio.py,sha256=6VBTq8eQ3mzT96tzjYhAtcMQ2VjF4x8LpIPW5DAcX2Q,13749
273
274
  mlrun/projects/__init__.py,sha256=0Krf0WIKfnZa71WthYOg0SoaTodGg3sV_hK3f_OlTPI,1220
274
275
  mlrun/projects/operations.py,sha256=Aa3qDjEOLI1JTvm3t3VaESeJ4511e_vOR-GqVaEy-yI,20237
275
- mlrun/projects/pipelines.py,sha256=R2b4zD6y9GSHFUmCPf5fr0ymx3QZTqg_axpp9mRaTPQ,49373
276
- mlrun/projects/project.py,sha256=RsCLXAThD65lKKfUO4PgzitQBD9eW0vP4sm3X-50S9Q,250007
276
+ mlrun/projects/pipelines.py,sha256=qNWB6_E-cLp7S2kWUmPQRom9dnF5MHW5ybRX4-m6qd0,49445
277
+ mlrun/projects/project.py,sha256=UT4DUC2qbJz3lrMykLc_UwtnpXgCZXbCFY_2Ym_qfJM,249447
277
278
  mlrun/runtimes/__init__.py,sha256=J9Sy2HiyMlztNv6VUurMzF5H2XzttNil8nRsWDsqLyg,8923
278
279
  mlrun/runtimes/base.py,sha256=FmjyXA5MhOUOe8TxNpC3p8nc_IwGGaC2ZPrgTylzFXk,37325
279
- mlrun/runtimes/daskjob.py,sha256=1VYia3vEgArKNyZe-yJjQevhifWO6F5gP1Q44y-tKqk,19842
280
+ mlrun/runtimes/daskjob.py,sha256=RQNX0TJn77mT32CKNoNg2hE0Rk758QVTGpJdxk3xW_k,19776
280
281
  mlrun/runtimes/funcdoc.py,sha256=zRFHrJsV8rhDLJwoUhcfZ7Cs0j-tQ76DxwUqdXV_Wyc,9810
281
282
  mlrun/runtimes/function_reference.py,sha256=fnMKUEieKgy4JyVLhFpDtr6JvKgOaQP8F_K2H3-Pk9U,5030
282
283
  mlrun/runtimes/generators.py,sha256=X8NDlCEPveDDPOHtOGcSpbl3pAVM3DP7fuPj5xVhxEY,7290
283
- mlrun/runtimes/kubejob.py,sha256=wadCzmSgjv9OU_Ax8CQNHfXLo0v-ev9ZGHUFGcNc9Qw,8577
284
+ mlrun/runtimes/kubejob.py,sha256=BuAyuXacwH4gh063kPvPgsMKix7dolef2tVZXodiHHA,8496
284
285
  mlrun/runtimes/local.py,sha256=yedo3R1c46cB1mX7aOz8zORXswQPvX86U-_fYxXoqTY,22717
285
286
  mlrun/runtimes/mounts.py,sha256=2dkoktm3TXHe4XHmRhvC0UfvWzq2vy_13MeaW7wgyPo,18735
286
- mlrun/runtimes/pod.py,sha256=YwkvZQqj2M2YP1xg6ECbzh-DfcBo9rJCIJxfrsJ8thA,51711
287
- mlrun/runtimes/remotesparkjob.py,sha256=IMnolOY6jh1xMrCtxs-awUqQoWVgRpan4l0b91vUqdI,7693
287
+ mlrun/runtimes/pod.py,sha256=_jfzCIJ5Gi6AWT-5nYFYfpX3zodQLVU79Rqa2vFAECY,51619
288
+ mlrun/runtimes/remotesparkjob.py,sha256=s3iknqndUMSDvZNZ7w_uO9mO-gmasexN5x9KALeCHfw,7627
288
289
  mlrun/runtimes/utils.py,sha256=VFKA7dWuILAcJGia_7Pw_zBBG00wZlat7o2N6u5EItw,16284
289
290
  mlrun/runtimes/databricks_job/__init__.py,sha256=kXGBqhLN0rlAx0kTXhozGzFsIdSqW0uTSKMmsLgq_is,569
290
291
  mlrun/runtimes/databricks_job/databricks_cancel_task.py,sha256=ufjcLKA5E6FSDF5CXm5l8uP_mUSFppwr5krLHln1kAU,2243
291
292
  mlrun/runtimes/databricks_job/databricks_runtime.py,sha256=YOJdyIEeKbn9jKxSciciZE9RBsH7Hbk7n-3T_8NLz_w,12810
292
293
  mlrun/runtimes/databricks_job/databricks_wrapper.py,sha256=jD1T36pRmSFRGgJVGRzccYJxwYH8eVze_FJrF2aSS-g,8682
293
294
  mlrun/runtimes/mpijob/__init__.py,sha256=6sUPQRFwigi4mqjDVZmRE-qgaLw2ILY5NbneVUuMKto,947
294
- mlrun/runtimes/mpijob/abstract.py,sha256=AJYRF4Jv0NWTUY3buri3XbbrXqC1ZMPHKLKUMcqWRW8,9237
295
- mlrun/runtimes/mpijob/v1.py,sha256=k04Tu1Y58tt9qckyR6Kd8l6O3fLMGvGyAzxYunfpqEE,3201
295
+ mlrun/runtimes/mpijob/abstract.py,sha256=a07c6AvNLQ51dVkiCyeeQUruST1UQNuWi3sIvPMESPo,9171
296
+ mlrun/runtimes/mpijob/v1.py,sha256=Rf8tvntkOv7RLN0TEk5XOY4QCdWMHu_sSmVgBViGcfU,3135
296
297
  mlrun/runtimes/nuclio/__init__.py,sha256=gx1kizzKv8pGT5TNloN1js1hdbxqDw3rM90sLVYVffY,794
297
298
  mlrun/runtimes/nuclio/api_gateway.py,sha256=vH9ClKVP4Mb24rvA67xPuAvAhX-gAv6vVtjVxyplhdc,26969
298
- mlrun/runtimes/nuclio/function.py,sha256=odONJcrnhZT8veIyvd3ygEfUxP0cDEceTfP91mg1XAA,54352
299
+ mlrun/runtimes/nuclio/function.py,sha256=K8M9zFR0j28Tfqu6xrwd6MWb2Sh1IZZVoaNYZectG8g,54286
299
300
  mlrun/runtimes/nuclio/nuclio.py,sha256=sLK8KdGO1LbftlL3HqPZlFOFTAAuxJACZCVl1c0Ha6E,2942
300
- mlrun/runtimes/nuclio/serving.py,sha256=GEQG7K5q5RWvXpvko0aQWCKdnu9SSIJzpbd1PD71ldY,34673
301
+ mlrun/runtimes/nuclio/serving.py,sha256=tXAobNFmUsTztvT-gX0UMdAmW150Kdl7rzdgLFlLiro,32804
301
302
  mlrun/runtimes/nuclio/application/__init__.py,sha256=rRs5vasy_G9IyoTpYIjYDafGoL6ifFBKgBtsXn31Atw,614
302
303
  mlrun/runtimes/nuclio/application/application.py,sha256=3WeVCeVUb6U5wJDVJSuTDzJ-Pcr3ifg08E4gKIEIkmo,28945
303
304
  mlrun/runtimes/nuclio/application/reverse_proxy.go,sha256=lEHH74vr2PridIHp1Jkc_NjkrWb5b6zawRrNxHQhwGU,2913
304
305
  mlrun/runtimes/sparkjob/__init__.py,sha256=GPP_ekItxiU9Ydn3mJa4Obph02Bg6DO-JYs791_MV58,607
305
- mlrun/runtimes/sparkjob/spark3job.py,sha256=5TdmQy5yDBtaq9y9fQGrNYTJ_0UqR9VnV7-SGiZEOyc,41287
306
+ mlrun/runtimes/sparkjob/spark3job.py,sha256=FmrfR1lTVeH_F3YOx8vj16QsvN3b7veUS00_3d3PslI,41221
306
307
  mlrun/serving/__init__.py,sha256=1MjUInuyxsF-dTHZuKelq2XLhg2GInH9LjAY3PcWEzs,1364
307
308
  mlrun/serving/merger.py,sha256=pfOQoozUyObCTpqXAMk94PmhZefn4bBrKufO3MKnkAc,6193
308
309
  mlrun/serving/remote.py,sha256=Igha2FipK3-6rV_PZ1K464kTbiTu8rhc6SMm-HiEJ6o,18817
309
310
  mlrun/serving/routers.py,sha256=SY6AsaiSnh8ssXq8hQE2z9MYapOxFOFJBx9QomiZMO8,53915
310
- mlrun/serving/server.py,sha256=SkpT2qBZ0GdBh4j5KrGwzsnAiDL2wGxwoHls9Z0Nc8M,32504
311
+ mlrun/serving/server.py,sha256=WCJqggfZHTa4qbw1UopoZq_X2g-LutPjp35wGfxDsyI,29790
311
312
  mlrun/serving/serving_wrapper.py,sha256=UL9hhWCfMPcTJO_XrkvNaFvck1U1E7oS8trTZyak0cA,835
312
313
  mlrun/serving/states.py,sha256=mSj9n7BT0CWBD02MTXWBzGr4hwj0Zef9dFUkQY5aXWg,91248
313
314
  mlrun/serving/system_steps.py,sha256=lVdv6OVInLxKO1RgMLuKw0zeeMAlLWcgNtKW_tYw9jY,16334
@@ -325,7 +326,7 @@ mlrun/utils/azure_vault.py,sha256=IEFizrDGDbAaoWwDr1WoA88S_EZ0T--vjYtY-i0cvYQ,34
325
326
  mlrun/utils/clones.py,sha256=qbAGyEbSvlewn3Tw_DpQZP9z6MGzFhSaZfI1CblX8Fg,7515
326
327
  mlrun/utils/condition_evaluator.py,sha256=-nGfRmZzivn01rHTroiGY4rqEv8T1irMyhzxEei-sKc,1897
327
328
  mlrun/utils/db.py,sha256=UIYDPHvPxim8tpjeD4S2QbfTx9Bhe-VqUQjqYTRHFuo,2185
328
- mlrun/utils/helpers.py,sha256=c0U2Cjmwvlskm1DA53pZhSRaZfDMpEnBsQR3hRCLKlY,80444
329
+ mlrun/utils/helpers.py,sha256=DS_SEUupY6KkLO-ejHiVLPdAEKBvanoxifIng2d9qwg,80448
329
330
  mlrun/utils/http.py,sha256=5ZU2VpokaUM_DT3HBSqTm8xjUqTPjZN5fKkSIvKlTl0,8704
330
331
  mlrun/utils/logger.py,sha256=RG0m1rx6gfkJ-2C1r_p41MMpPiaDYqaYM2lYHDlNZEU,14767
331
332
  mlrun/utils/regex.py,sha256=FcRwWD8x9X3HLhCCU2F0AVKTFah784Pr7ZAe3a02jw8,5199
@@ -344,11 +345,11 @@ mlrun/utils/notifications/notification/mail.py,sha256=ZyJ3eqd8simxffQmXzqd3bgbAq
344
345
  mlrun/utils/notifications/notification/slack.py,sha256=kfhogR5keR7Zjh0VCjJNK3NR5_yXT7Cv-x9GdOUW4Z8,7294
345
346
  mlrun/utils/notifications/notification/webhook.py,sha256=zxh8CAlbPnTazsk6r05X5TKwqUZVOH5KBU2fJbzQlG4,5330
346
347
  mlrun/utils/version/__init__.py,sha256=YnzE6tlf24uOQ8y7Z7l96QLAI6-QEii7-77g8ynmzy0,613
347
- mlrun/utils/version/version.json,sha256=u88EOlGPvf0X2l1xROyF0kP-OWIGQh_LaAXZjrww6Bk,89
348
+ mlrun/utils/version/version.json,sha256=un2xBgcuKWkNshBYY5GPOv_fh0i0DOb22tFDow1dBx0,89
348
349
  mlrun/utils/version/version.py,sha256=M2hVhRrgkN3SxacZHs3ZqaOsqAA7B6a22ne324IQ1HE,1877
349
- mlrun-1.10.0rc8.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
350
- mlrun-1.10.0rc8.dist-info/METADATA,sha256=cVBX84tyD5g6e4hF9lZpKDl3cooN1TDLxPV2oaTBwRQ,25869
351
- mlrun-1.10.0rc8.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
352
- mlrun-1.10.0rc8.dist-info/entry_points.txt,sha256=1Owd16eAclD5pfRCoJpYC2ZJSyGNTtUr0nCELMioMmU,46
353
- mlrun-1.10.0rc8.dist-info/top_level.txt,sha256=NObLzw3maSF9wVrgSeYBv-fgnHkAJ1kEkh12DLdd5KM,6
354
- mlrun-1.10.0rc8.dist-info/RECORD,,
350
+ mlrun-1.10.0rc9.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
351
+ mlrun-1.10.0rc9.dist-info/METADATA,sha256=4UfqJ9LgtS6kt03USPp2kEAuiROPdk7BI0cYQaCVSoE,26150
352
+ mlrun-1.10.0rc9.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
353
+ mlrun-1.10.0rc9.dist-info/entry_points.txt,sha256=1Owd16eAclD5pfRCoJpYC2ZJSyGNTtUr0nCELMioMmU,46
354
+ mlrun-1.10.0rc9.dist-info/top_level.txt,sha256=NObLzw3maSF9wVrgSeYBv-fgnHkAJ1kEkh12DLdd5KM,6
355
+ mlrun-1.10.0rc9.dist-info/RECORD,,
@@ -1,79 +0,0 @@
1
- # Copyright 2023 Iguazio
2
- #
3
- # Licensed under the Apache License, Version 2.0 (the "License");
4
- # you may not use this file except in compliance with the License.
5
- # You may obtain a copy of the License at
6
- #
7
- # http://www.apache.org/licenses/LICENSE-2.0
8
- #
9
- # Unless required by applicable law or agreed to in writing, software
10
- # distributed under the License is distributed on an "AS IS" BASIS,
11
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
- # See the License for the specific language governing permissions and
13
- # limitations under the License.
14
-
15
- from sqlalchemy import create_engine
16
- from sqlalchemy.engine import Engine
17
- from sqlalchemy.orm import Session
18
- from sqlalchemy.orm import (
19
- sessionmaker as SessionMaker, # noqa: N812 - `sessionmaker` is a class
20
- )
21
-
22
- from mlrun.config import config
23
-
24
- # TODO: wrap the following functions in a singleton class
25
- _engines: dict[str, Engine] = {}
26
- _session_makers: dict[str, SessionMaker] = {}
27
-
28
-
29
- # doing lazy load to allow tests to initialize the engine
30
- def get_engine(dsn=None) -> Engine:
31
- global _engines
32
- dsn = dsn or config.httpdb.dsn
33
- if dsn not in _engines:
34
- _init_engine(dsn=dsn)
35
- return _engines[dsn]
36
-
37
-
38
- def create_session(dsn=None) -> Session:
39
- session_maker = _get_session_maker(dsn=dsn)
40
- return session_maker()
41
-
42
-
43
- # doing lazy load to allow tests to initialize the engine
44
- def _get_session_maker(dsn) -> SessionMaker:
45
- global _session_makers
46
- dsn = dsn or config.httpdb.dsn
47
- if dsn not in _session_makers:
48
- _init_session_maker(dsn=dsn)
49
- return _session_makers[dsn]
50
-
51
-
52
- # TODO: we accept the dsn here to enable tests to override it, the "right" thing will be that config will be easily
53
- # overridable by tests (today when you import the config it is already being initialized.. should be lazy load)
54
- def _init_engine(dsn=None):
55
- global _engines
56
- dsn = dsn or config.httpdb.dsn
57
- kwargs = {}
58
- if "mysql" in dsn:
59
- pool_size = config.httpdb.db.connections_pool_size
60
- if pool_size is None:
61
- pool_size = config.httpdb.max_workers
62
- max_overflow = config.httpdb.db.connections_pool_max_overflow
63
- if max_overflow is None:
64
- max_overflow = config.httpdb.max_workers
65
-
66
- kwargs = {
67
- "pool_size": pool_size,
68
- "max_overflow": max_overflow,
69
- "pool_pre_ping": config.httpdb.db.connections_pool_pre_ping,
70
- "pool_recycle": config.httpdb.db.connections_pool_recycle,
71
- }
72
- engine = create_engine(dsn, **kwargs)
73
- _engines[dsn] = engine
74
- _init_session_maker(dsn=dsn)
75
-
76
-
77
- def _init_session_maker(dsn):
78
- global _session_makers
79
- _session_makers[dsn] = SessionMaker(bind=get_engine(dsn=dsn))