mlrun 1.8.0rc32__py3-none-any.whl → 1.8.0rc33__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mlrun might be problematic. Click here for more details.

mlrun/__main__.py CHANGED
@@ -19,7 +19,7 @@ import socket
19
19
  import traceback
20
20
  import warnings
21
21
  from ast import literal_eval
22
- from base64 import b64decode, b64encode
22
+ from base64 import b64decode
23
23
  from os import environ, path, remove
24
24
  from pprint import pprint
25
25
 
@@ -298,7 +298,7 @@ def run(
298
298
  if url_file and path.isfile(url_file):
299
299
  with open(url_file) as fp:
300
300
  body = fp.read()
301
- based = b64encode(body.encode("utf-8")).decode("utf-8")
301
+ based = mlrun.utils.helpers.encode_user_code(body)
302
302
  logger.info(f"packing code at {url_file}")
303
303
  update_in(runtime, "spec.build.functionSourceCode", based)
304
304
  url = f"main{pathlib.Path(url_file).suffix} {url_args}"
@@ -557,7 +557,7 @@ def build(
557
557
  exit(1)
558
558
  with open(source) as fp:
559
559
  body = fp.read()
560
- based = b64encode(body.encode("utf-8")).decode("utf-8")
560
+ based = mlrun.utils.helpers.encode_user_code(body)
561
561
  logger.info(f"Packing code at {source}")
562
562
  b.functionSourceCode = based
563
563
  func.spec.command = ""
mlrun/config.py CHANGED
@@ -30,7 +30,6 @@ import typing
30
30
  import warnings
31
31
  from collections.abc import Mapping
32
32
  from datetime import timedelta
33
- from distutils.util import strtobool
34
33
  from os.path import expanduser
35
34
  from threading import Lock
36
35
 
@@ -105,7 +104,7 @@ default_config = {
105
104
  # custom logger format, workes only with log_formatter: custom
106
105
  # Note that your custom format must include those 4 fields - timestamp, level, message and more
107
106
  "log_format_override": None,
108
- "submit_timeout": "180", # timeout when submitting a new k8s resource
107
+ "submit_timeout": "280", # timeout when submitting a new k8s resource
109
108
  # runtimes cleanup interval in seconds
110
109
  "runtimes_cleanup_interval": "300",
111
110
  "monitoring": {
@@ -267,6 +266,7 @@ default_config = {
267
266
  # When the module is reloaded, the maximum depth recursion configuration for the recursive reload
268
267
  # function is used to prevent infinite loop
269
268
  "reload_max_recursion_depth": 100,
269
+ "source_code_max_bytes": 10000,
270
270
  },
271
271
  "databricks": {
272
272
  "artifact_directory_path": "/mlrun_databricks_runtime/artifacts_dictionaries"
@@ -1472,17 +1472,6 @@ def _convert_resources_to_str(config: typing.Optional[dict] = None):
1472
1472
  resource_requirement[resource_type] = str(value)
1473
1473
 
1474
1474
 
1475
- def _convert_str(value, typ):
1476
- if typ in (str, _none_type):
1477
- return value
1478
-
1479
- if typ is bool:
1480
- return strtobool(value)
1481
-
1482
- # e.g. int('8080') → 8080
1483
- return typ(value)
1484
-
1485
-
1486
1475
  def _configure_ssl_verification(verify_ssl: bool) -> None:
1487
1476
  """Configure SSL verification warnings based on the setting."""
1488
1477
  if not verify_ssl:
@@ -88,19 +88,24 @@ def get_or_create_model_endpoint(
88
88
  # Generate a runtime database
89
89
  db_session = mlrun.get_run_db()
90
90
  model_endpoint = None
91
+ if not function_name and context:
92
+ function_name = FunctionURI.from_string(
93
+ context.to_dict()["spec"]["function"]
94
+ ).function
91
95
  try:
92
- if endpoint_id:
93
- model_endpoint = db_session.get_model_endpoint(
94
- project=project,
95
- name=model_endpoint_name,
96
- endpoint_id=endpoint_id,
97
- )
98
- # If other fields provided, validate that they are correspond to the existing model endpoint data
99
- _model_endpoint_validations(
100
- model_endpoint=model_endpoint,
101
- model_path=model_path,
102
- sample_set_statistics=sample_set_statistics,
103
- )
96
+ model_endpoint = db_session.get_model_endpoint(
97
+ project=project,
98
+ name=model_endpoint_name,
99
+ endpoint_id=endpoint_id,
100
+ function_name=function_name,
101
+ function_tag=function_tag or "latest",
102
+ )
103
+ # If other fields provided, validate that they are correspond to the existing model endpoint data
104
+ _model_endpoint_validations(
105
+ model_endpoint=model_endpoint,
106
+ model_path=model_path,
107
+ sample_set_statistics=sample_set_statistics,
108
+ )
104
109
 
105
110
  except mlrun.errors.MLRunNotFoundError:
106
111
  # Create a new model endpoint with the provided details
@@ -361,10 +366,6 @@ def _generate_model_endpoint(
361
366
 
362
367
  :return `mlrun.common.schemas.ModelEndpoint` object.
363
368
  """
364
- if not function_name and context:
365
- function_name = FunctionURI.from_string(
366
- context.to_dict()["spec"]["function"]
367
- ).function
368
369
  model_obj = None
369
370
  if model_path:
370
371
  model_obj: mlrun.artifacts.ModelArtifact = (
@@ -15,9 +15,4 @@
15
15
 
16
16
  from .base import ModelMonitoringApplicationBase
17
17
  from .context import MonitoringApplicationContext
18
- from .evidently_base import (
19
- _HAS_EVIDENTLY,
20
- SUPPORTED_EVIDENTLY_VERSION,
21
- EvidentlyModelMonitoringApplicationBase,
22
- )
23
18
  from .results import ModelMonitoringApplicationMetric, ModelMonitoringApplicationResult
@@ -417,24 +417,26 @@ class ModelMonitoringApplicationBase(MonitoringApplicationToDict, ABC):
417
417
  allow providing a list of model_endpoint names or name for a single model_endpoint.
418
418
  Note: provide names retrieves the model all the active model endpoints using those
419
419
  names (cross function model endpoints)
420
- If provided, and ``sample_data`` is not, you have to provide also the ``start`` and
421
- ``end`` times of the data to analyze from the model endpoints.
420
+ If provided, and ``sample_data`` is not ``None``, you have to provide also the
421
+ ``start`` and ``end`` times of the data to analyze from the model endpoints.
422
422
  :param start: The start time of the endpoint's data, not included.
423
423
  If you want the model endpoint's data at ``start`` included, you need to subtract a
424
424
  small ``datetime.timedelta`` from it.
425
425
  :param end: The end time of the endpoint's data, included.
426
426
  Please note: when ``start`` and ``end`` are set, they create a left-open time interval
427
- ("window") :math:`(\\text{start}, \\text{end}]` that excludes the endpoint's data at
428
- ``start`` and includes the data at ``end``:
429
- :math:`\\text{start} < t \\leq \\text{end}`, :math:`t` is the time taken in the
430
- window's data.
427
+ ("window") :math:`(\\operatorname{start}, \\operatorname{end}]` that excludes the
428
+ endpoint's data at ``start`` and includes the data at ``end``:
429
+ :math:`\\operatorname{start} < t \\leq \\operatorname{end}`, :math:`t` is the time
430
+ taken in the window's data.
431
431
  :param base_period: The window length in minutes. If ``None``, the whole window from ``start`` to ``end``
432
432
  is taken. If an integer is specified, the application is run from ``start`` to ``end``
433
433
  in ``base_period`` length windows, except for the last window that ends at ``end`` and
434
434
  therefore may be shorter:
435
- :math:`(\\text{start}, \\text{start} + \\text{base_period}],
436
- (\\text{start} + \\text{base_period}, \\text{start} + 2\\cdot\\text{base_period}],
437
- ..., (\\text{start} + m\\cdot\\text{base_period}, \\text{end}]`,
435
+ :math:`(\\operatorname{start}, \\operatorname{start} + \\operatorname{base\\_period}],
436
+ (\\operatorname{start} + \\operatorname{base\\_period},
437
+ \\operatorname{start} + 2\\cdot\\operatorname{base\\_period}],
438
+ ..., (\\operatorname{start} +
439
+ m\\cdot\\operatorname{base\\_period}, \\operatorname{end}]`,
438
440
  where :math:`m` is some positive integer.
439
441
 
440
442
  :returns: The output of the
@@ -0,0 +1,19 @@
1
+ # Copyright 2025 Iguazio
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ from .base import (
16
+ _HAS_EVIDENTLY,
17
+ SUPPORTED_EVIDENTLY_VERSION,
18
+ EvidentlyModelMonitoringApplicationBase,
19
+ )
@@ -13,6 +13,7 @@
13
13
  # limitations under the License.
14
14
  import asyncio
15
15
  from datetime import datetime, timedelta
16
+ from threading import Lock
16
17
  from typing import Callable, Literal, Optional, Union
17
18
 
18
19
  import pandas as pd
@@ -30,6 +31,9 @@ from mlrun.model_monitoring.db import TSDBConnector
30
31
  from mlrun.model_monitoring.helpers import get_invocations_fqn
31
32
  from mlrun.utils import logger
32
33
 
34
+ _connection = None
35
+ _connection_lock = Lock()
36
+
33
37
 
34
38
  class TDEngineConnector(TSDBConnector):
35
39
  """
@@ -37,23 +41,18 @@ class TDEngineConnector(TSDBConnector):
37
41
  """
38
42
 
39
43
  type: str = mm_schemas.TSDBTarget.TDEngine
44
+ database = f"{tdengine_schemas._MODEL_MONITORING_DATABASE}_{mlrun.mlconf.system_id}"
40
45
 
41
46
  def __init__(
42
47
  self,
43
48
  project: str,
44
49
  profile: DatastoreProfile,
45
- database: Optional[str] = None,
46
50
  **kwargs,
47
51
  ):
48
52
  super().__init__(project=project)
49
53
 
50
54
  self._tdengine_connection_profile = profile
51
- self.database = (
52
- database
53
- or f"{tdengine_schemas._MODEL_MONITORING_DATABASE}_{mlrun.mlconf.system_id}"
54
- )
55
55
 
56
- self._connection = None
57
56
  self._init_super_tables()
58
57
 
59
58
  self._timeout = mlrun.mlconf.model_endpoint_monitoring.tdengine.timeout
@@ -61,9 +60,16 @@ class TDEngineConnector(TSDBConnector):
61
60
 
62
61
  @property
63
62
  def connection(self) -> TDEngineConnection:
64
- if not self._connection:
65
- self._connection = self._create_connection()
66
- return self._connection
63
+ global _connection
64
+
65
+ if _connection:
66
+ return _connection
67
+
68
+ with _connection_lock:
69
+ if not _connection:
70
+ _connection = self._create_connection()
71
+
72
+ return _connection
67
73
 
68
74
  def _create_connection(self) -> TDEngineConnection:
69
75
  """Establish a connection to the TSDB server."""
@@ -99,7 +105,8 @@ class TDEngineConnector(TSDBConnector):
99
105
  """Create TDEngine supertables."""
100
106
  for table in self.tables:
101
107
  create_table_query = self.tables[table]._create_super_table_query()
102
- self.connection.run(
108
+ conn = self.connection
109
+ conn.run(
103
110
  statements=create_table_query,
104
111
  timeout=self._timeout,
105
112
  retries=self._retries,
@@ -141,7 +141,8 @@ def get_stream_path(
141
141
  elif isinstance(
142
142
  profile, mlrun.datastore.datastore_profile.DatastoreProfileKafkaSource
143
143
  ):
144
- stream_uri = f"kafka://{profile.brokers[0]}"
144
+ attributes = profile.attributes()
145
+ stream_uri = f"kafka://{attributes['brokers'][0]}"
145
146
  else:
146
147
  raise mlrun.errors.MLRunValueError(
147
148
  f"Received an unexpected stream profile type: {type(profile)}\n"
mlrun/runtimes/base.py CHANGED
@@ -16,7 +16,6 @@ import http
16
16
  import re
17
17
  import typing
18
18
  import warnings
19
- from base64 import b64encode
20
19
  from os import environ
21
20
  from typing import Callable, Optional, Union
22
21
 
@@ -795,9 +794,7 @@ class BaseRuntime(ModelObj):
795
794
  mlrun.runtimes.nuclio.serving.serving_subkind
796
795
  )
797
796
 
798
- self.spec.build.functionSourceCode = b64encode(body.encode("utf-8")).decode(
799
- "utf-8"
800
- )
797
+ self.spec.build.functionSourceCode = mlrun.utils.helpers.encode_user_code(body)
801
798
  if with_doc:
802
799
  update_function_entry_points(self, body)
803
800
  return self
@@ -13,12 +13,13 @@
13
13
  # limitations under the License.
14
14
 
15
15
  from ast import FunctionDef, parse, unparse
16
- from base64 import b64decode, b64encode
16
+ from base64 import b64decode
17
17
  from typing import Callable, Optional, Union
18
18
 
19
19
  import mlrun
20
20
  import mlrun.runtimes.kubejob as kubejob
21
21
  import mlrun.runtimes.pod as pod
22
+ import mlrun.utils.helpers
22
23
  from mlrun.errors import MLRunInvalidArgumentError
23
24
  from mlrun.model import HyperParamOptions, RunObject
24
25
 
@@ -162,7 +163,7 @@ class DatabricksRuntime(kubejob.KubejobRuntime):
162
163
  if original_handler:
163
164
  decoded_code += f"\nresult = {original_handler}(**handler_arguments)\n"
164
165
  decoded_code += _return_artifacts_code
165
- return b64encode(decoded_code.encode("utf-8")).decode("utf-8")
166
+ return mlrun.utils.helpers.encode_user_code(decoded_code)
166
167
 
167
168
  def get_internal_parameters(self, runobj: RunObject):
168
169
  """
@@ -202,7 +203,7 @@ from mlrun.runtimes.databricks_job import databricks_wrapper
202
203
  def run_mlrun_databricks_job(context,task_parameters: dict, **kwargs):
203
204
  databricks_wrapper.run_mlrun_databricks_job(context, task_parameters, **kwargs)
204
205
  """
205
- wrap_code = b64encode(wrap_code).decode("utf-8")
206
+ wrap_code = mlrun.utils.helpers.encode_user_code(wrap_code)
206
207
  self.spec.build.functionSourceCode = wrap_code
207
208
  runspec.spec.handler = "run_mlrun_databricks_job"
208
209
 
@@ -13,11 +13,11 @@
13
13
  # limitations under the License.
14
14
 
15
15
  import os
16
- from base64 import b64encode
17
16
 
18
17
  from nuclio.build import mlrun_footer
19
18
 
20
19
  import mlrun
20
+ import mlrun.utils.helpers
21
21
 
22
22
  from ..model import ModelObj
23
23
  from ..utils import generate_object_uri
@@ -116,7 +116,7 @@ class FunctionReference(ModelObj):
116
116
  func = mlrun.new_function(
117
117
  self.name, kind=kind, image=self.image or default_image
118
118
  )
119
- data = b64encode(code.encode("utf-8")).decode("utf-8")
119
+ data = mlrun.utils.helpers.encode_user_code(code)
120
120
  func.spec.build.functionSourceCode = data
121
121
  if kind not in mlrun.runtimes.RuntimeKinds.nuclio_runtimes():
122
122
  func.spec.default_handler = "handler"
mlrun/runtimes/pod.py CHANGED
@@ -17,6 +17,7 @@ import os
17
17
  import re
18
18
  import time
19
19
  import typing
20
+ import warnings
20
21
  from collections.abc import Iterable
21
22
  from enum import Enum
22
23
 
@@ -703,29 +704,7 @@ class KubeResourceSpec(FunctionSpec):
703
704
  ),
704
705
  affinity_field_name=affinity_field_name,
705
706
  )
706
- # purge any affinity / anti-affinity preemption related configuration and enrich with preemptible tolerations
707
707
  elif self_preemption_mode == PreemptionModes.allow.value:
708
- # remove preemptible anti-affinity
709
- self._prune_affinity_node_selector_requirement(
710
- generate_preemptible_node_selector_requirements(
711
- NodeSelectorOperator.node_selector_op_not_in.value
712
- ),
713
- affinity_field_name=affinity_field_name,
714
- )
715
- # remove preemptible affinity
716
- self._prune_affinity_node_selector_requirement(
717
- generate_preemptible_node_selector_requirements(
718
- NodeSelectorOperator.node_selector_op_in.value
719
- ),
720
- affinity_field_name=affinity_field_name,
721
- )
722
-
723
- # remove preemptible nodes constrain
724
- self._prune_node_selector(
725
- mlconf.get_preemptible_node_selector(),
726
- node_selector_field_name=node_selector_field_name,
727
- )
728
-
729
708
  # enrich with tolerations
730
709
  self._merge_tolerations(
731
710
  generate_preemptible_tolerations(),
@@ -1201,6 +1180,132 @@ class KubeResource(BaseRuntime):
1201
1180
  """
1202
1181
  self.spec.with_requests(mem, cpu, patch=patch)
1203
1182
 
1183
+ def detect_preemptible_node_selector(
1184
+ self, node_selector: dict[str, str]
1185
+ ) -> list[str]:
1186
+ """
1187
+ Checks if any provided node selector matches the preemptible node selectors.
1188
+ Issues a warning if a selector may be pruned at runtime depending on preemption mode.
1189
+
1190
+ :param node_selector: The user-provided node selector dictionary.
1191
+ """
1192
+ preemptible_node_selector = mlconf.get_preemptible_node_selector()
1193
+
1194
+ return [
1195
+ f"'{key}': '{val}'"
1196
+ for key, val in node_selector.items()
1197
+ if preemptible_node_selector.get(key) == val
1198
+ ]
1199
+
1200
+ def detect_preemptible_tolerations(
1201
+ self, tolerations: list[k8s_client.V1Toleration]
1202
+ ) -> list[str]:
1203
+ """
1204
+ Checks if any provided toleration matches preemptible tolerations.
1205
+ Issues a warning if a toleration may be pruned at runtime depending on preemption mode.
1206
+
1207
+ :param tolerations: The user-provided list of tolerations.
1208
+ """
1209
+ preemptible_tolerations = [
1210
+ k8s_client.V1Toleration(
1211
+ key=toleration.get("key"),
1212
+ value=toleration.get("value"),
1213
+ effect=toleration.get("effect"),
1214
+ )
1215
+ for toleration in mlconf.get_preemptible_tolerations()
1216
+ ]
1217
+
1218
+ def _format_toleration(toleration):
1219
+ return f"'{toleration.key}'='{toleration.value}' (effect: '{toleration.effect}')"
1220
+
1221
+ return [
1222
+ _format_toleration(toleration)
1223
+ for toleration in tolerations
1224
+ if toleration in preemptible_tolerations
1225
+ ]
1226
+
1227
+ def detect_preemptible_affinity(self, affinity: k8s_client.V1Affinity) -> list[str]:
1228
+ """
1229
+ Checks if any provided affinity rules match preemptible affinity configurations.
1230
+ Issues a warning if an affinity rule may be pruned at runtime depending on preemption mode.
1231
+
1232
+ :param affinity: The user-provided affinity object.
1233
+ """
1234
+
1235
+ preemptible_affinity_terms = generate_preemptible_nodes_affinity_terms()
1236
+ conflicting_affinities = []
1237
+
1238
+ if (
1239
+ affinity
1240
+ and affinity.node_affinity
1241
+ and affinity.node_affinity.required_during_scheduling_ignored_during_execution
1242
+ ):
1243
+ user_terms = affinity.node_affinity.required_during_scheduling_ignored_during_execution.node_selector_terms
1244
+ for user_term in user_terms:
1245
+ user_expressions = {
1246
+ (expr.key, expr.operator, tuple(expr.values or []))
1247
+ for expr in user_term.match_expressions or []
1248
+ }
1249
+
1250
+ for preemptible_term in preemptible_affinity_terms:
1251
+ preemptible_expressions = {
1252
+ (expr.key, expr.operator, tuple(expr.values or []))
1253
+ for expr in preemptible_term.match_expressions or []
1254
+ }
1255
+
1256
+ # Ensure operators match and preemptible expressions are present
1257
+ common_exprs = user_expressions & preemptible_expressions
1258
+ if common_exprs:
1259
+ formatted = ", ".join(
1260
+ f"'{key} {operator} {list(values)}'"
1261
+ for key, operator, values in common_exprs
1262
+ )
1263
+ conflicting_affinities.append(formatted)
1264
+ return conflicting_affinities
1265
+
1266
+ def raise_preemptible_warning(
1267
+ self,
1268
+ node_selector: typing.Optional[dict[str, str]],
1269
+ tolerations: typing.Optional[list[k8s_client.V1Toleration]],
1270
+ affinity: typing.Optional[k8s_client.V1Affinity],
1271
+ ) -> None:
1272
+ """
1273
+ Detects conflicts and issues a single warning if necessary.
1274
+
1275
+ :param node_selector: The user-provided node selector dictionary.
1276
+ :param tolerations: The user-provided list of tolerations.
1277
+ :param affinity: The user-provided affinity object.
1278
+ """
1279
+ conflict_messages = []
1280
+
1281
+ if node_selector:
1282
+ ns_conflicts = ", ".join(
1283
+ self.detect_preemptible_node_selector(node_selector)
1284
+ )
1285
+ if ns_conflicts:
1286
+ conflict_messages.append(f"Node selectors: {ns_conflicts}")
1287
+
1288
+ if tolerations:
1289
+ tol_conflicts = ", ".join(self.detect_preemptible_tolerations(tolerations))
1290
+ if tol_conflicts:
1291
+ conflict_messages.append(f"Tolerations: {tol_conflicts}")
1292
+
1293
+ if affinity:
1294
+ affinity_conflicts = ", ".join(self.detect_preemptible_affinity(affinity))
1295
+ if affinity_conflicts:
1296
+ conflict_messages.append(f"Affinity: {affinity_conflicts}")
1297
+
1298
+ if conflict_messages:
1299
+ warning_componentes = "; \n".join(conflict_messages)
1300
+ warnings.warn(
1301
+ f"Warning: based on the preemptible node settings configured in your MLRun configuration,\n"
1302
+ f"{warning_componentes}\n"
1303
+ f" may be removed or adjusted at runtime.\n"
1304
+ "This adjustment depends on the function's preemption mode. \n"
1305
+ "The list of potential adjusted preemptible selectors can be viewed here: "
1306
+ "mlrun.mlconf.get_preemptible_node_selector() and mlrun.mlconf.get_preemptible_tolerations()."
1307
+ )
1308
+
1204
1309
  def with_node_selection(
1205
1310
  self,
1206
1311
  node_name: typing.Optional[str] = None,
@@ -1209,19 +1314,14 @@ class KubeResource(BaseRuntime):
1209
1314
  tolerations: typing.Optional[list[k8s_client.V1Toleration]] = None,
1210
1315
  ):
1211
1316
  """
1212
- Enables to control on which k8s node the job will run
1213
-
1214
- :param node_name: The name of the k8s node
1215
- :param node_selector: Label selector, only nodes with matching labels will be eligible to be picked
1216
- :param affinity: Expands the types of constraints you can express - see
1217
- https://kubernetes.io/docs/concepts/scheduling-eviction/assign-pod-node/#affinity-and-anti-affinity
1218
- for details
1219
- :param tolerations: Tolerations are applied to pods, and allow (but do not require) the pods to schedule
1220
- onto nodes with matching taints - see
1221
- https://kubernetes.io/docs/concepts/scheduling-eviction/taint-and-toleration
1222
- for details
1317
+ Enables control over which Kubernetes node the job will run on.
1223
1318
 
1319
+ :param node_name: The name of the Kubernetes node.
1320
+ :param node_selector: Label selector, only nodes with matching labels will be eligible.
1321
+ :param affinity: Defines scheduling constraints.
1322
+ :param tolerations: Allows scheduling onto nodes with matching taints.
1224
1323
  """
1324
+ # Apply values as before
1225
1325
  if node_name:
1226
1326
  self.spec.node_name = node_name
1227
1327
  if node_selector is not None:
@@ -1232,6 +1332,12 @@ class KubeResource(BaseRuntime):
1232
1332
  if tolerations is not None:
1233
1333
  self.spec.tolerations = tolerations
1234
1334
 
1335
+ self.raise_preemptible_warning(
1336
+ node_selector=self.spec.node_selector,
1337
+ tolerations=self.spec.tolerations,
1338
+ affinity=self.spec.affinity,
1339
+ )
1340
+
1235
1341
  def with_priority_class(self, name: typing.Optional[str] = None):
1236
1342
  """
1237
1343
  Enables to control the priority of the pod
mlrun/utils/helpers.py CHANGED
@@ -1329,7 +1329,11 @@ def get_handler_extended(
1329
1329
  def datetime_from_iso(time_str: str) -> Optional[datetime]:
1330
1330
  if not time_str:
1331
1331
  return
1332
- return parser.isoparse(time_str)
1332
+ dt = parser.isoparse(time_str)
1333
+ if dt.tzinfo is None:
1334
+ dt = dt.replace(tzinfo=timezone.utc)
1335
+ # ensure the datetime is in UTC, converting if necessary
1336
+ return dt.astimezone(timezone.utc)
1333
1337
 
1334
1338
 
1335
1339
  def datetime_to_iso(time_obj: Optional[datetime]) -> Optional[str]:
@@ -1459,6 +1463,16 @@ def str_to_timestamp(time_str: str, now_time: Timestamp = None):
1459
1463
  return Timestamp(time_str)
1460
1464
 
1461
1465
 
1466
+ def str_to_bool(value: str) -> bool:
1467
+ """Convert a string to a boolean value."""
1468
+ value = value.lower()
1469
+ if value in ("true", "1", "t", "y", "yes", "on"):
1470
+ return True
1471
+ if value in ("false", "0", "f", "n", "no", "off"):
1472
+ return False
1473
+ raise ValueError(f"invalid boolean value: {value}")
1474
+
1475
+
1462
1476
  def is_link_artifact(artifact):
1463
1477
  if isinstance(artifact, dict):
1464
1478
  return (
@@ -2129,3 +2143,16 @@ def as_dict(data: typing.Union[dict, str]) -> dict:
2129
2143
  if isinstance(data, str):
2130
2144
  return json.loads(data)
2131
2145
  return data
2146
+
2147
+
2148
+ def encode_user_code(
2149
+ user_code: str, max_len_warning: typing.Optional[int] = None
2150
+ ) -> str:
2151
+ max_len_warning = max_len_warning or config.function.spec.source_code_max_bytes
2152
+ encoded = base64.b64encode(user_code.encode("utf-8")).decode("utf-8")
2153
+ if len(encoded) > max_len_warning:
2154
+ logger.warning(
2155
+ f"User code exceeds the maximum allowed size of {max_len_warning} bytes for non remote source. "
2156
+ "Consider using `with_source_archive` to add user code as a remote source to the function."
2157
+ )
2158
+ return encoded
@@ -1,4 +1,4 @@
1
1
  {
2
- "git_commit": "c816b6df64e933458e71286173355dc766e50e68",
3
- "version": "1.8.0-rc32"
2
+ "git_commit": "35557735bf140b7a1333fb726dd47e5b48002bc0",
3
+ "version": "1.8.0-rc33"
4
4
  }
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: mlrun
3
- Version: 1.8.0rc32
3
+ Version: 1.8.0rc33
4
4
  Summary: Tracking and config of machine learning runs
5
5
  Home-page: https://github.com/mlrun/mlrun
6
6
  Author: Yaron Haviv
@@ -51,8 +51,8 @@ Requires-Dist: setuptools>=75.2
51
51
  Requires-Dist: deprecated~=1.2
52
52
  Requires-Dist: jinja2>=3.1.3,~=3.1
53
53
  Requires-Dist: orjson<4,>=3.9.15
54
- Requires-Dist: mlrun-pipelines-kfp-common~=0.3.9
55
- Requires-Dist: mlrun-pipelines-kfp-v1-8~=0.3.5; python_version < "3.11"
54
+ Requires-Dist: mlrun-pipelines-kfp-common~=0.3.11
55
+ Requires-Dist: mlrun-pipelines-kfp-v1-8~=0.3.7; python_version < "3.11"
56
56
  Requires-Dist: docstring_parser~=0.16
57
57
  Requires-Dist: aiosmtplib~=3.0
58
58
  Provides-Extra: s3
@@ -99,7 +99,7 @@ Requires-Dist: ossfs==2023.12.0; extra == "alibaba-oss"
99
99
  Requires-Dist: oss2==2.18.1; extra == "alibaba-oss"
100
100
  Provides-Extra: tdengine
101
101
  Requires-Dist: taos-ws-py==0.3.2; extra == "tdengine"
102
- Requires-Dist: taoswswrap~=0.3.0; extra == "tdengine"
102
+ Requires-Dist: taoswswrap~=0.3.2; extra == "tdengine"
103
103
  Provides-Extra: snowflake
104
104
  Requires-Dist: snowflake-connector-python~=3.7; extra == "snowflake"
105
105
  Provides-Extra: kfp18
@@ -119,7 +119,7 @@ Requires-Dist: timelength~=1.1; extra == "api"
119
119
  Requires-Dist: memray~=1.12; sys_platform != "win32" and extra == "api"
120
120
  Requires-Dist: aiosmtplib~=3.0; extra == "api"
121
121
  Requires-Dist: pydantic<2,>=1; extra == "api"
122
- Requires-Dist: mlrun-pipelines-kfp-v1-8[kfp]~=0.3.5; python_version < "3.11" and extra == "api"
122
+ Requires-Dist: mlrun-pipelines-kfp-v1-8[kfp]~=0.3.7; python_version < "3.11" and extra == "api"
123
123
  Requires-Dist: grpcio~=1.70.0; extra == "api"
124
124
  Provides-Extra: all
125
125
  Requires-Dist: adlfs==2023.9.0; extra == "all"
@@ -152,7 +152,7 @@ Requires-Dist: s3fs<2024.7,>=2023.9.2; extra == "all"
152
152
  Requires-Dist: snowflake-connector-python~=3.7; extra == "all"
153
153
  Requires-Dist: sqlalchemy~=1.4; extra == "all"
154
154
  Requires-Dist: taos-ws-py==0.3.2; extra == "all"
155
- Requires-Dist: taoswswrap~=0.3.0; extra == "all"
155
+ Requires-Dist: taoswswrap~=0.3.2; extra == "all"
156
156
  Provides-Extra: complete
157
157
  Requires-Dist: adlfs==2023.9.0; extra == "complete"
158
158
  Requires-Dist: aiobotocore<2.16,>=2.5.0; extra == "complete"
@@ -184,7 +184,7 @@ Requires-Dist: s3fs<2024.7,>=2023.9.2; extra == "complete"
184
184
  Requires-Dist: snowflake-connector-python~=3.7; extra == "complete"
185
185
  Requires-Dist: sqlalchemy~=1.4; extra == "complete"
186
186
  Requires-Dist: taos-ws-py==0.3.2; extra == "complete"
187
- Requires-Dist: taoswswrap~=0.3.0; extra == "complete"
187
+ Requires-Dist: taoswswrap~=0.3.2; extra == "complete"
188
188
  Provides-Extra: complete-api
189
189
  Requires-Dist: adlfs==2023.9.0; extra == "complete-api"
190
190
  Requires-Dist: aiobotocore<2.16,>=2.5.0; extra == "complete-api"
@@ -215,7 +215,7 @@ Requires-Dist: igz-mgmt~=0.4.1; extra == "complete-api"
215
215
  Requires-Dist: kafka-python~=2.0; extra == "complete-api"
216
216
  Requires-Dist: memray~=1.12; sys_platform != "win32" and extra == "complete-api"
217
217
  Requires-Dist: mlflow~=2.16; extra == "complete-api"
218
- Requires-Dist: mlrun-pipelines-kfp-v1-8[kfp]~=0.3.5; python_version < "3.11" and extra == "complete-api"
218
+ Requires-Dist: mlrun-pipelines-kfp-v1-8[kfp]~=0.3.7; python_version < "3.11" and extra == "complete-api"
219
219
  Requires-Dist: msrest~=0.6.21; extra == "complete-api"
220
220
  Requires-Dist: objgraph~=3.6; extra == "complete-api"
221
221
  Requires-Dist: oss2==2.18.1; extra == "complete-api"
@@ -229,7 +229,7 @@ Requires-Dist: s3fs<2024.7,>=2023.9.2; extra == "complete-api"
229
229
  Requires-Dist: snowflake-connector-python~=3.7; extra == "complete-api"
230
230
  Requires-Dist: sqlalchemy~=1.4; extra == "complete-api"
231
231
  Requires-Dist: taos-ws-py==0.3.2; extra == "complete-api"
232
- Requires-Dist: taoswswrap~=0.3.0; extra == "complete-api"
232
+ Requires-Dist: taoswswrap~=0.3.2; extra == "complete-api"
233
233
  Requires-Dist: timelength~=1.1; extra == "complete-api"
234
234
  Requires-Dist: uvicorn~=0.32.1; extra == "complete-api"
235
235
  Dynamic: author
@@ -1,6 +1,6 @@
1
1
  mlrun/__init__.py,sha256=Cqm9U9eCEdLpMejhU2BEhubu0mHL71igJJIwYa738EA,7450
2
- mlrun/__main__.py,sha256=ysteSDo1LYe_YOXVdIVEJ3BhLPOfBngkEfRg5iaGGg4,46202
3
- mlrun/config.py,sha256=bW8K3MHK7jommbncuDr5N5EM9yoBqT4kA4DjKQ_1vxA,71264
2
+ mlrun/__main__.py,sha256=xYWflUbfSRpo8F4uzrHhBN1dcaBUADmfoK5Q-iqEBeQ,46181
3
+ mlrun/config.py,sha256=hATfrO5ZtykMiDJ6TNtkIDVgq6YwcgVrZf7H3doh5UE,71077
4
4
  mlrun/errors.py,sha256=LkcbXTLANGdsgo2CRX2pdbyNmt--lMsjGv0XZMgP-Nc,8222
5
5
  mlrun/execution.py,sha256=FUktsD3puSFjc3LZJU35b-OmFBrBPBNntViCLQVuwnk,50008
6
6
  mlrun/features.py,sha256=ReBaNGsBYXqcbgI012n-SO_j6oHIbk_Vpv0CGPXbUmo,15842
@@ -217,20 +217,21 @@ mlrun/launcher/factory.py,sha256=RW7mfzEFi8fR0M-4W1JQg1iq3_muUU6OTqT_3l4Ubrk,233
217
217
  mlrun/launcher/local.py,sha256=775HY-8S9LFUX5ubGXrLO0N1lVh8bn-DHFmNYuNqQPA,11451
218
218
  mlrun/launcher/remote.py,sha256=rLJW4UAnUT5iUb4BsGBOAV3K4R29a0X4lFtRkVKlyYU,7709
219
219
  mlrun/model_monitoring/__init__.py,sha256=ELy7njEtZnz09Dc6PGZSFFEGtnwI15bJNWM3Pj4_YIs,753
220
- mlrun/model_monitoring/api.py,sha256=3QoMEmJ523rzoWFRkx6SmZ9s0Y5b3RX8bZZMHUoZnf0,28484
220
+ mlrun/model_monitoring/api.py,sha256=w6jjrWhlptm7MruzWIJOqCSi4W_zZSq5kkUgTjvqrOk,28508
221
221
  mlrun/model_monitoring/controller.py,sha256=j6hqNYKhrw37PJZBcW4BgjsCpG7PtVMvFTpnZO95QVQ,29078
222
222
  mlrun/model_monitoring/features_drift_table.py,sha256=c6GpKtpOJbuT1u5uMWDL_S-6N4YPOmlktWMqPme3KFY,25308
223
- mlrun/model_monitoring/helpers.py,sha256=rsWH_u0qPRwS9URYI1yK8hUeQc7jhJ2alvt_RLz63oU,22402
223
+ mlrun/model_monitoring/helpers.py,sha256=fx2mCQkDu_PgO9LT7ykJ3qcZ7BwELaPqCt_MejqeVxo,22450
224
224
  mlrun/model_monitoring/stream_processing.py,sha256=NcvUdfVzveFzmphU65sFGfxp9Jh5ZKq2tSiWWftML9A,34531
225
225
  mlrun/model_monitoring/tracking_policy.py,sha256=PBIGrUYWrwcE5gwXupBIVzOb0QRRwPJsgQm_yLGQxB4,5595
226
226
  mlrun/model_monitoring/writer.py,sha256=vbL7bqTyNu8q4bNcebX72sUMybVDAoTWg-CXq4fov3Y,8429
227
- mlrun/model_monitoring/applications/__init__.py,sha256=QYvzgCutFdAkzqKPD3mvkX_3c1X4tzd-kW8ojUOE9ic,889
227
+ mlrun/model_monitoring/applications/__init__.py,sha256=xDBxkBjl-whHSG_4t1mLkxiypLH-fzn8TmAW9Mjo2uI,759
228
228
  mlrun/model_monitoring/applications/_application_steps.py,sha256=97taCEkfGx-QO-gD9uKnRF1PDIxQhY7sjPg85GxgIpA,6628
229
- mlrun/model_monitoring/applications/base.py,sha256=c8sYo58iAYNP42Ov4PHJR1UxTpFwXY19vdywGXrfEZQ,23870
229
+ mlrun/model_monitoring/applications/base.py,sha256=1jdZAreSqugcVd2xD0-dT6DAt16MO7WKQEK71G6Q6qs,24067
230
230
  mlrun/model_monitoring/applications/context.py,sha256=xqbKS61iXE6jBekyW8zjo_E3lxe2D8VepuXG_BA5y2k,14931
231
- mlrun/model_monitoring/applications/evidently_base.py,sha256=hRjXuXf6xf8sbjGt9yYfGDUGnvS5rV3W7tkJroF3QJA,5098
232
231
  mlrun/model_monitoring/applications/histogram_data_drift.py,sha256=G26_4gQfcwDZe3S6SIZ4Uc_qyrHAJ6lDTFOQGkbfQR8,14455
233
232
  mlrun/model_monitoring/applications/results.py,sha256=_qmj6TWT0SR2bi7gUyRKBU418eGgGoLW2_hTJ7S-ock,5782
233
+ mlrun/model_monitoring/applications/evidently/__init__.py,sha256=-DqdPnBSrjZhFvKOu_Ie3MiFvlur9sPTZpZ1u0_1AE8,690
234
+ mlrun/model_monitoring/applications/evidently/base.py,sha256=hRjXuXf6xf8sbjGt9yYfGDUGnvS5rV3W7tkJroF3QJA,5098
234
235
  mlrun/model_monitoring/db/__init__.py,sha256=r47xPGZpIfMuv8J3PQCZTSqVPMhUta4sSJCZFKcS7FM,644
235
236
  mlrun/model_monitoring/db/_schedules.py,sha256=AKyCJBAt0opNE3K3pg2TjCoD_afk1LKw5TY88rLQ2VA,6097
236
237
  mlrun/model_monitoring/db/_stats.py,sha256=VVMWLMqG3Us3ozBkLaokJF22Ewv8WKmVE1-OvS_g9vA,6943
@@ -240,7 +241,7 @@ mlrun/model_monitoring/db/tsdb/helpers.py,sha256=0oUXc4aUkYtP2SGP6jTb3uPPKImIUsV
240
241
  mlrun/model_monitoring/db/tsdb/tdengine/__init__.py,sha256=vgBdsKaXUURKqIf3M0y4sRatmSVA4CQiJs7J5dcVBkQ,620
241
242
  mlrun/model_monitoring/db/tsdb/tdengine/schemas.py,sha256=qfKDUZhgteL0mp2A1aP1iMmcthgUMKmZqMUidZjQktQ,12649
242
243
  mlrun/model_monitoring/db/tsdb/tdengine/stream_graph_steps.py,sha256=Uadj0UvAmln2MxDWod-kAzau1uNlqZh981rPhbUH_5M,2857
243
- mlrun/model_monitoring/db/tsdb/tdengine/tdengine_connector.py,sha256=mFvajKtmZ7EHre7b1px0QdnvHBjcipLr6PrKZekjSwY,32716
244
+ mlrun/model_monitoring/db/tsdb/tdengine/tdengine_connector.py,sha256=OgIA-S-DZ3DM455zOk24rUsmM2EdrKbliuKs1Lzcpuw,32793
244
245
  mlrun/model_monitoring/db/tsdb/v3io/__init__.py,sha256=aL3bfmQsUQ-sbvKGdNihFj8gLCK3mSys0qDcXtYOwgc,616
245
246
  mlrun/model_monitoring/db/tsdb/v3io/stream_graph_steps.py,sha256=_-zo9relCDtjGgievxAcAP9gVN9nDWs8BzGtFwTjb9M,6284
246
247
  mlrun/model_monitoring/db/tsdb/v3io/v3io_connector.py,sha256=foxYWx7OjOfat2SHmzYrG8bIfaQ5NDnBtpDZua_NVGE,41141
@@ -270,20 +271,20 @@ mlrun/projects/operations.py,sha256=VXUlMrouFTls-I-bMhdN5pPfQ34TR7bFQ-NUSWNvl84,
270
271
  mlrun/projects/pipelines.py,sha256=QH2nEhaJxhafJdT0AXPzpDhTniyHtc0Cg74Spdz6Oeg,48255
271
272
  mlrun/projects/project.py,sha256=89fKq2kdfV1mtMMj0EIT55eI_F3IUKk_dCeAk7S-5AA,234506
272
273
  mlrun/runtimes/__init__.py,sha256=J9Sy2HiyMlztNv6VUurMzF5H2XzttNil8nRsWDsqLyg,8923
273
- mlrun/runtimes/base.py,sha256=aAEGZKPkcFs30UzURS7al3xYEDDARpJQ8kFhtBKUhik,37845
274
+ mlrun/runtimes/base.py,sha256=K5-zfFrE_HR6AaHWs2figaOTr7eosw3-4bELkYzpRk4,37789
274
275
  mlrun/runtimes/daskjob.py,sha256=JwuGvOiPsxEDHHMMUS4Oie4hLlYYIZwihAl6DjroTY0,19521
275
276
  mlrun/runtimes/funcdoc.py,sha256=zRFHrJsV8rhDLJwoUhcfZ7Cs0j-tQ76DxwUqdXV_Wyc,9810
276
- mlrun/runtimes/function_reference.py,sha256=iWKRe4r2GTc5S8FOIASYUNLwwne8NqIui51PFr8Q4mg,4918
277
+ mlrun/runtimes/function_reference.py,sha256=CLvRY-wXX9qhI9YEzSl0VWt8piH_-5FQYQ8ObUYLLDc,4911
277
278
  mlrun/runtimes/generators.py,sha256=X8NDlCEPveDDPOHtOGcSpbl3pAVM3DP7fuPj5xVhxEY,7290
278
279
  mlrun/runtimes/kubejob.py,sha256=gJnlAJ0RJw65yeiIPuLEjxJkDYfbpRgS3lyWkDDFXTk,8797
279
280
  mlrun/runtimes/local.py,sha256=yedo3R1c46cB1mX7aOz8zORXswQPvX86U-_fYxXoqTY,22717
280
281
  mlrun/runtimes/mounts.py,sha256=pGQlnsNTUxAhFMWLS_53E784z-IH9a6oQjKjSp1gbJE,18733
281
- mlrun/runtimes/pod.py,sha256=VsxviESdIW9eMM4XUnpIQk8OlRiCEUbZpI68Mmwy5Ro,67708
282
+ mlrun/runtimes/pod.py,sha256=hOIXw6X5y7Vkzd6lvt-2reeh2ockj3Bf7xVBUn6xGeo,71824
282
283
  mlrun/runtimes/remotesparkjob.py,sha256=dod99nqz3GdRfmnBoQKfwFCXTetfuCScd2pKH3HJyoY,7394
283
284
  mlrun/runtimes/utils.py,sha256=3_Vu_OHlhi8f0vh_w9ii2eTKgS5dh6RVi1HwX9oDKuU,15675
284
285
  mlrun/runtimes/databricks_job/__init__.py,sha256=kXGBqhLN0rlAx0kTXhozGzFsIdSqW0uTSKMmsLgq_is,569
285
286
  mlrun/runtimes/databricks_job/databricks_cancel_task.py,sha256=sIqIg5DQAf4j0wCPA-G0GoxY6vacRddxCy5KDUZszek,2245
286
- mlrun/runtimes/databricks_job/databricks_runtime.py,sha256=p80j2_jHzlH20dHT-avjfcbaDBTY2re1WjlJjbg5uSQ,12794
287
+ mlrun/runtimes/databricks_job/databricks_runtime.py,sha256=THzAuVMdGWeqTiXGj6Dy0d8SZpHbUZdDKvoxKYO5fTM,12816
287
288
  mlrun/runtimes/databricks_job/databricks_wrapper.py,sha256=oJzym54jD957yzxRXiSYpituSV8JV_XJh90YTKIwapY,8684
288
289
  mlrun/runtimes/mpijob/__init__.py,sha256=6sUPQRFwigi4mqjDVZmRE-qgaLw2ILY5NbneVUuMKto,947
289
290
  mlrun/runtimes/mpijob/abstract.py,sha256=JGMjcJ4dvpJbctF6psU9UvYyNCutMxTMgBQeTlzpkro,9249
@@ -319,7 +320,7 @@ mlrun/utils/azure_vault.py,sha256=IEFizrDGDbAaoWwDr1WoA88S_EZ0T--vjYtY-i0cvYQ,34
319
320
  mlrun/utils/clones.py,sha256=y3zC9QS7z5mLuvyQ6vFd6sJnikbgtDwrBvieQq0sovY,7359
320
321
  mlrun/utils/condition_evaluator.py,sha256=-nGfRmZzivn01rHTroiGY4rqEv8T1irMyhzxEei-sKc,1897
321
322
  mlrun/utils/db.py,sha256=blQgkWMfFH9lcN4sgJQcPQgEETz2Dl_zwbVA0SslpFg,2186
322
- mlrun/utils/helpers.py,sha256=kaYKV1HoH-DGYSMPEZ_x1yc-1YtU6auGkx4I48XbnbU,72230
323
+ mlrun/utils/helpers.py,sha256=adXqZYrCWXhbNOGI4J1K4WBXsSAIkv-FWaNayt424xs,73279
323
324
  mlrun/utils/http.py,sha256=t6FrXQstZm9xVVjxqIGiLzrwZNCR4CSienSOuVgNIcI,8706
324
325
  mlrun/utils/logger.py,sha256=RG0m1rx6gfkJ-2C1r_p41MMpPiaDYqaYM2lYHDlNZEU,14767
325
326
  mlrun/utils/regex.py,sha256=jbR7IiOp6OO0mg9Fl_cVZCpWb9fL9nTPONCUxCDNWXg,5201
@@ -338,11 +339,11 @@ mlrun/utils/notifications/notification/mail.py,sha256=ZyJ3eqd8simxffQmXzqd3bgbAq
338
339
  mlrun/utils/notifications/notification/slack.py,sha256=eQvmctTh6wIG5xVOesLLV9S1-UUCu5UEQ9JIJOor3ts,7183
339
340
  mlrun/utils/notifications/notification/webhook.py,sha256=NeyIMSBojjjTJaUHmPbxMByp34GxYkl1-16NqzU27fU,4943
340
341
  mlrun/utils/version/__init__.py,sha256=7kkrB7hEZ3cLXoWj1kPoDwo4MaswsI2JVOBpbKgPAgc,614
341
- mlrun/utils/version/version.json,sha256=Tm_MnhrvV-ygaHPbOywgCjTbNQUZ0lblX2agecVqT1w,89
342
+ mlrun/utils/version/version.json,sha256=HiW1MAPj8GSuC6bLGzBX2dEOLzDlgCTLe3iWUM7fm1s,89
342
343
  mlrun/utils/version/version.py,sha256=eEW0tqIAkU9Xifxv8Z9_qsYnNhn3YH7NRAfM-pPLt1g,1878
343
- mlrun-1.8.0rc32.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
344
- mlrun-1.8.0rc32.dist-info/METADATA,sha256=fXIsyQiNvixypLDBHlMSCp6SSQ1exll98yKjLGxawbM,25985
345
- mlrun-1.8.0rc32.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
346
- mlrun-1.8.0rc32.dist-info/entry_points.txt,sha256=1Owd16eAclD5pfRCoJpYC2ZJSyGNTtUr0nCELMioMmU,46
347
- mlrun-1.8.0rc32.dist-info/top_level.txt,sha256=NObLzw3maSF9wVrgSeYBv-fgnHkAJ1kEkh12DLdd5KM,6
348
- mlrun-1.8.0rc32.dist-info/RECORD,,
344
+ mlrun-1.8.0rc33.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
345
+ mlrun-1.8.0rc33.dist-info/METADATA,sha256=CKvUMuVAhnREX815xtMEgz1TMTjllhP0ywgeXRFctiw,25986
346
+ mlrun-1.8.0rc33.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
347
+ mlrun-1.8.0rc33.dist-info/entry_points.txt,sha256=1Owd16eAclD5pfRCoJpYC2ZJSyGNTtUr0nCELMioMmU,46
348
+ mlrun-1.8.0rc33.dist-info/top_level.txt,sha256=NObLzw3maSF9wVrgSeYBv-fgnHkAJ1kEkh12DLdd5KM,6
349
+ mlrun-1.8.0rc33.dist-info/RECORD,,