mlrun 1.10.0rc26__py3-none-any.whl → 1.10.0rc28__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
mlrun/config.py CHANGED
@@ -255,7 +255,8 @@ default_config = {
255
255
  },
256
256
  "runtimes": {
257
257
  "dask": "600",
258
- "dask_cluster_start": "300",
258
+ # cluster start might take some time in case k8s needs to spin up new nodes
259
+ "dask_cluster_start": "600",
259
260
  },
260
261
  "push_notifications": "60",
261
262
  },
mlrun/db/httpdb.py CHANGED
@@ -5200,7 +5200,7 @@ class HTTPRunDB(RunDBInterface):
5200
5200
 
5201
5201
  :return: A ModelEndpointDriftValues object containing the drift counts over time.
5202
5202
  """
5203
- endpoint_path = f"projects/{project}/model-endpoints/drift-over-time"
5203
+ endpoint_path = f"projects/{project}/model-monitoring/drift-over-time"
5204
5204
  error_message = f"Failed retrieving drift data for {project}"
5205
5205
  response = self.api_call(
5206
5206
  method="GET",
@@ -544,15 +544,17 @@ class ModelMonitoringApplicationBase(MonitoringApplicationToDict, ABC):
544
544
  else:
545
545
  raise mlrun.errors.MLRunValueError(
546
546
  "The start time for the application and endpoint precedes the last analyzed time: "
547
- f"{start_dt=}, {last_analyzed=}, {application_name=}, {endpoint_id=}. "
547
+ f"start_dt='{start_dt}', last_analyzed='{last_analyzed}', {application_name=}, "
548
+ f"{endpoint_id=}. "
548
549
  "Writing data out of order is not supported, and the start time could not be "
549
550
  "dynamically reset, as last_analyzed is later than the given end time or that "
550
- f"base_period was specified ({end_dt=}, {base_period=})."
551
+ f"base_period was specified (end_dt='{end_dt}', {base_period=})."
551
552
  )
552
553
  else:
553
554
  raise mlrun.errors.MLRunValueError(
554
555
  "The start time for the application and endpoint precedes the last analyzed time: "
555
- f"{start_dt=}, {last_analyzed=}, {application_name=}, {endpoint_id=}. "
556
+ f"start_dt='{start_dt}', last_analyzed='{last_analyzed}', {application_name=}, "
557
+ f"{endpoint_id=}. "
556
558
  "Writing data out of order is not supported. You should change the start time to "
557
559
  f"'{last_analyzed}' or later."
558
560
  )
@@ -1499,20 +1499,51 @@ class V3IOTSDBConnector(TSDBConnector):
1499
1499
  ) -> mm_schemas.ModelEndpointDriftValues:
1500
1500
  table = mm_schemas.V3IOTSDBTables.APP_RESULTS
1501
1501
  start, end, interval = self._prepare_aligned_start_end(start, end)
1502
-
1503
- # get per time-interval x endpoint_id combination the max result status
1504
1502
  df = self._get_records(
1505
1503
  table=table,
1506
1504
  start=start,
1507
1505
  end=end,
1508
- interval=interval,
1509
- sliding_window_step=interval,
1510
1506
  columns=[mm_schemas.ResultData.RESULT_STATUS],
1511
- agg_funcs=["max"],
1512
- group_by=mm_schemas.WriterEvent.ENDPOINT_ID,
1513
1507
  )
1508
+ df = self._aggregate_raw_drift_data(df, start, end, interval)
1514
1509
  if df.empty:
1515
1510
  return mm_schemas.ModelEndpointDriftValues(values=[])
1516
1511
  df = df[df[f"max({mm_schemas.ResultData.RESULT_STATUS})"] >= 1]
1517
- df = df.reset_index(names="_wstart")
1518
1512
  return self._df_to_drift_data(df)
1513
+
1514
+ @staticmethod
1515
+ def _aggregate_raw_drift_data(
1516
+ df: pd.DataFrame, start: datetime, end: datetime, interval: str
1517
+ ) -> pd.DataFrame:
1518
+ if df.empty:
1519
+ return df
1520
+ if not isinstance(df.index, pd.DatetimeIndex):
1521
+ raise TypeError("Expected a DatetimeIndex on the DataFrame (time index).")
1522
+ df[EventFieldType.ENDPOINT_ID] = (
1523
+ df[EventFieldType.ENDPOINT_ID].astype("string").str.strip()
1524
+ ) # remove extra data carried by the category dtype
1525
+ window = df.loc[
1526
+ (df.index >= start) & (df.index < end),
1527
+ [mm_schemas.ResultData.RESULT_STATUS, EventFieldType.ENDPOINT_ID],
1528
+ ]
1529
+ out = (
1530
+ window.groupby(
1531
+ [
1532
+ EventFieldType.ENDPOINT_ID,
1533
+ pd.Grouper(
1534
+ freq=interval, origin=start, label="left", closed="left"
1535
+ ),
1536
+ ]
1537
+ # align to start, [start, end) intervals
1538
+ )[mm_schemas.ResultData.RESULT_STATUS]
1539
+ .max()
1540
+ .reset_index()
1541
+ .rename(
1542
+ columns={
1543
+ mm_schemas.ResultData.RESULT_STATUS: f"max({mm_schemas.ResultData.RESULT_STATUS})"
1544
+ }
1545
+ )
1546
+ )
1547
+ return out.rename(
1548
+ columns={"time": "_wstart"}
1549
+ ) # rename datetime column to _wstart to align with the tdengine result
@@ -659,3 +659,26 @@ def get_start_end(
659
659
  )
660
660
 
661
661
  return start, end
662
+
663
+
664
+ def validate_time_range(
665
+ start: Optional[datetime.datetime] = None, end: Optional[datetime.datetime] = None
666
+ ) -> tuple[datetime.datetime, datetime.datetime]:
667
+ """
668
+ validate start and end parameters and set default values if needed.
669
+ :param start: Either None or datetime, None is handled as datetime.now(tz=timezone.utc) - timedelta(days=1)
670
+ :param end: Either None or datetime, None is handled as datetime.now(tz=timezone.utc)
671
+ :return: start datetime, end datetime
672
+ """
673
+ end = end or mlrun.utils.helpers.datetime_now()
674
+ start = start or (end - datetime.timedelta(days=1))
675
+ if start.tzinfo is None or end.tzinfo is None:
676
+ raise mlrun.errors.MLRunInvalidArgumentTypeError(
677
+ "Custom start and end times must contain the timezone."
678
+ )
679
+ if start > end:
680
+ raise mlrun.errors.MLRunInvalidArgumentError(
681
+ "The start time must be before the end time. Note that if end time is not provided, "
682
+ "the current time is used by default."
683
+ )
684
+ return start, end
@@ -228,11 +228,11 @@ class _PipelineContext:
228
228
  force_run_local = mlrun.mlconf.force_run_local
229
229
  if force_run_local is None or force_run_local == "auto":
230
230
  force_run_local = not mlrun.mlconf.is_api_running_on_k8s()
231
+
232
+ if self.workflow:
231
233
  if not mlrun.mlconf.kfp_url:
232
234
  logger.debug("Kubeflow pipeline URL is not set, running locally")
233
235
  force_run_local = True
234
-
235
- if self.workflow:
236
236
  force_run_local = force_run_local or self.workflow.run_local
237
237
 
238
238
  return force_run_local
mlrun/run.py CHANGED
@@ -222,7 +222,8 @@ def get_or_create_ctx(
222
222
  :param spec: dictionary holding run spec
223
223
  :param with_env: look for context in environment vars, default True
224
224
  :param rundb: path/url to the metadata and artifact database
225
- :param project: project to initiate the context in (by default `mlrun.mlconf.active_project`)
225
+ :param project: project to initiate the context in (by default `mlrun.mlconf.active_project`).
226
+ If not set, an active project must exist.
226
227
  :param upload_artifacts: when using local context (not as part of a job/run), upload artifacts to the
227
228
  system default artifact path location
228
229
  :return: execution context
@@ -277,6 +278,16 @@ def get_or_create_ctx(
277
278
  if newspec and not isinstance(newspec, dict):
278
279
  newspec = json.loads(newspec)
279
280
 
281
+ if (
282
+ not newspec.get("metadata", {}).get("project")
283
+ and not project
284
+ and not mlconf.active_project
285
+ ):
286
+ raise mlrun.errors.MLRunMissingProjectError(
287
+ """No active project found. Make sure to set an active project using: mlrun.get_or_create_project()
288
+ You can verify the active project with: mlrun.mlconf.active_project"""
289
+ )
290
+
280
291
  if not newspec:
281
292
  newspec = {}
282
293
  if upload_artifacts:
mlrun/runtimes/base.py CHANGED
@@ -443,9 +443,11 @@ class BaseRuntime(ModelObj):
443
443
  :param runobj: Run context object (RunObject) with run metadata and status
444
444
  :return: Dictionary with all the variables that could be parsed
445
445
  """
446
+ active_project = self.metadata.project or config.active_project
446
447
  runtime_env = {
447
- mlrun_constants.MLRUN_ACTIVE_PROJECT: self.metadata.project
448
- or config.active_project
448
+ mlrun_constants.MLRUN_ACTIVE_PROJECT: active_project,
449
+ # TODO: Remove this in 1.12.0 as MLRUN_DEFAULT_PROJECT is deprecated and should not be injected anymore
450
+ "MLRUN_DEFAULT_PROJECT": active_project,
449
451
  }
450
452
  if runobj:
451
453
  runtime_env["MLRUN_EXEC_CONFIG"] = runobj.to_json(
@@ -968,24 +968,6 @@ class RemoteRuntime(KubeResource):
968
968
  self._mock_server = None
969
969
 
970
970
  if "://" not in path:
971
- if not self.status.address:
972
- # here we check that if default http trigger is disabled, function contains a custom http trigger
973
- # Otherwise, the function is not invokable, so we raise an error
974
- if (
975
- not self._trigger_of_kind_exists(kind="http")
976
- and self.spec.disable_default_http_trigger
977
- ):
978
- raise mlrun.errors.MLRunPreconditionFailedError(
979
- "Default http trigger creation is disabled and there is no any other custom http trigger, "
980
- "so function can not be invoked via http. Either enable default http trigger creation or "
981
- "create custom http trigger"
982
- )
983
- state, _, _ = self._get_state()
984
- if state not in ["ready", "scaledToZero"]:
985
- logger.warning(f"Function is in the {state} state")
986
- if not self.status.address:
987
- raise ValueError("no function address first run .deploy()")
988
-
989
971
  path = self._resolve_invocation_url(path, force_external_address)
990
972
 
991
973
  if headers is None:
@@ -1228,19 +1210,47 @@ class RemoteRuntime(KubeResource):
1228
1210
  # internal / external invocation urls is a nuclio >= 1.6.x feature
1229
1211
  # try to infer the invocation url from the internal and if not exists, use external.
1230
1212
  # $$$$ we do not want to use the external invocation url (e.g.: ingress, nodePort, etc.)
1213
+
1214
+ # check function state before invocation
1215
+ state, _, _ = self._get_state()
1216
+ if state not in ["ready", "scaledToZero"]:
1217
+ logger.warning(f"Function is in the {state} state")
1218
+
1219
+ # prefer internal invocation url if running inside k8s cluster
1231
1220
  if (
1232
1221
  not force_external_address
1233
1222
  and self.status.internal_invocation_urls
1234
1223
  and mlrun.k8s_utils.is_running_inside_kubernetes_cluster()
1235
1224
  ):
1236
- return mlrun.utils.helpers.join_urls(
1225
+ url = mlrun.utils.helpers.join_urls(
1237
1226
  f"http://{self.status.internal_invocation_urls[0]}", path
1238
1227
  )
1228
+ logger.debug(
1229
+ f"Using internal invocation url {url}. Make sure you have network access to the k8s cluster. "
1230
+ f"Otherwise, set force_external_address to True"
1231
+ )
1232
+ return url
1239
1233
 
1240
1234
  if self.status.external_invocation_urls:
1241
1235
  return mlrun.utils.helpers.join_urls(
1242
1236
  f"http://{self.status.external_invocation_urls[0]}", path
1243
1237
  )
1238
+
1239
+ if not self.status.address:
1240
+ # if there is no address
1241
+ # here we check that if default http trigger is disabled, function contains a custom http trigger
1242
+ # Otherwise, the function is not invokable, so we raise an error
1243
+ if (
1244
+ not self._trigger_of_kind_exists(kind="http")
1245
+ and self.spec.disable_default_http_trigger
1246
+ ):
1247
+ raise mlrun.errors.MLRunPreconditionFailedError(
1248
+ "Default http trigger creation is disabled and there is no any other custom http trigger, "
1249
+ "so function can not be invoked via http. Either enable default http trigger creation or "
1250
+ "create custom http trigger"
1251
+ )
1252
+ else:
1253
+ raise ValueError("no function address first run .deploy()")
1244
1254
  else:
1245
1255
  return mlrun.utils.helpers.join_urls(f"http://{self.status.address}", path)
1246
1256
 
@@ -1294,6 +1304,8 @@ class RemoteRuntime(KubeResource):
1294
1304
  def get_url(
1295
1305
  self,
1296
1306
  force_external_address: bool = False,
1307
+ # leaving auth_info for BC
1308
+ # TODO: remove in 1.12.0
1297
1309
  auth_info: AuthInfo = None,
1298
1310
  ):
1299
1311
  """
@@ -1304,13 +1316,10 @@ class RemoteRuntime(KubeResource):
1304
1316
 
1305
1317
  :return: returns function's url
1306
1318
  """
1307
- if not self.status.address:
1308
- state, _, _ = self._get_state(auth_info=auth_info)
1309
- if state != "ready" or not self.status.address:
1310
- raise ValueError(
1311
- "no function address or not ready, first run .deploy()"
1312
- )
1313
-
1319
+ if auth_info:
1320
+ logger.warning(
1321
+ "Deprecated parameter 'auth_info' was provided, but will be ignored. Will be removed in 1.12.0."
1322
+ )
1314
1323
  return self._resolve_invocation_url("", force_external_address)
1315
1324
 
1316
1325
  @staticmethod
mlrun/utils/helpers.py CHANGED
@@ -15,7 +15,6 @@
15
15
  import asyncio
16
16
  import base64
17
17
  import enum
18
- import functools
19
18
  import gzip
20
19
  import hashlib
21
20
  import inspect
@@ -46,6 +45,7 @@ import pytz
46
45
  import semver
47
46
  import yaml
48
47
  from dateutil import parser
48
+ from orjson import orjson
49
49
  from pandas import Timedelta, Timestamp
50
50
  from yaml.representer import RepresenterError
51
51
 
@@ -915,12 +915,10 @@ def enrich_image_url(
915
915
  )
916
916
  mlrun_version = config.images_tag or client_version or server_version
917
917
  tag = mlrun_version or ""
918
-
919
- # TODO: Remove condition when mlrun/mlrun-kfp image is also supported
920
- if "mlrun-kfp" not in image_url:
921
- tag += resolve_image_tag_suffix(
922
- mlrun_version=mlrun_version, python_version=client_python_version
923
- )
918
+ tag += resolve_image_tag_suffix(
919
+ mlrun_version=mlrun_version,
920
+ python_version=client_python_version,
921
+ )
924
922
 
925
923
  # it's an mlrun image if the repository is mlrun
926
924
  is_mlrun_image = image_url.startswith("mlrun/") or "/mlrun/" in image_url
@@ -1217,52 +1215,58 @@ def get_workflow_url(
1217
1215
 
1218
1216
 
1219
1217
  def get_kfp_list_runs_filter(
1220
- project_name: Optional[str] = None,
1221
- end_date: Optional[str] = None,
1222
1218
  start_date: Optional[str] = None,
1219
+ end_date: Optional[str] = None,
1220
+ filter_: Optional[str] = None,
1221
+ experiment_ids: Optional[list[str]] = None,
1223
1222
  ) -> str:
1224
1223
  """
1225
- Generates a filter for listing Kubeflow Pipelines (KFP) runs.
1226
-
1227
- :param project_name: The name of the project. If "*", it won't filter by project.
1228
- :param end_date: The latest creation date for filtering runs (ISO 8601 format).
1229
- :param start_date: The earliest creation date for filtering runs (ISO 8601 format).
1230
- :return: A JSON-formatted filter string for KFP.
1224
+ Generate a filter for KFP runs based on start and end dates, and experiment IDs.
1231
1225
  """
1226
+ existing_filter_object = json.loads(filter_) if filter_ else {"predicates": []}
1227
+ preserved_predicates = [
1228
+ predicate
1229
+ for predicate in existing_filter_object.get("predicates", [])
1230
+ if predicate.get("key") != "name"
1231
+ ]
1232
1232
 
1233
- # KFP filter operation codes
1234
- kfp_less_than_or_equal_op = 7 # '<='
1235
- kfp_greater_than_or_equal_op = 5 # '>='
1236
- kfp_substring_op = 9 # Substring match
1237
-
1238
- filters = {"predicates": []}
1239
-
1233
+ new_predicates = []
1240
1234
  if end_date:
1241
- filters["predicates"].append(
1235
+ new_predicates.append(
1242
1236
  {
1243
- "key": "created_at",
1244
- "op": kfp_less_than_or_equal_op,
1237
+ "key": mlrun_pipelines.models.FilterFields.CREATED_AT,
1238
+ "op": mlrun_pipelines.models.FilterOperations.LESS_THAN_EQUALS.value,
1245
1239
  "timestamp_value": end_date,
1246
1240
  }
1247
1241
  )
1248
1242
 
1249
- if project_name and project_name != "*":
1250
- filters["predicates"].append(
1243
+ if start_date:
1244
+ new_predicates.append(
1251
1245
  {
1252
- "key": "name",
1253
- "op": kfp_substring_op,
1254
- "string_value": project_name,
1246
+ "key": mlrun_pipelines.models.FilterFields.CREATED_AT,
1247
+ "op": mlrun_pipelines.models.FilterOperations.GREATER_THAN_EQUALS.value,
1248
+ "timestamp_value": start_date,
1255
1249
  }
1256
1250
  )
1257
- if start_date:
1258
- filters["predicates"].append(
1251
+
1252
+ if experiment_ids and all(experiment_ids):
1253
+ new_predicates.append(
1259
1254
  {
1260
- "key": "created_at",
1261
- "op": kfp_greater_than_or_equal_op,
1262
- "timestamp_value": start_date,
1255
+ "key": mlrun_pipelines.models.FilterFields.EXPERIMENT_ID,
1256
+ "op": mlrun_pipelines.models.FilterOperations.IN.value,
1257
+ "string_values": {"values": experiment_ids},
1263
1258
  }
1264
1259
  )
1265
- return json.dumps(filters)
1260
+
1261
+ final_filter_object = {"predicates": preserved_predicates + new_predicates}
1262
+ if not final_filter_object["predicates"]:
1263
+ return ""
1264
+
1265
+ logger.debug(
1266
+ "Generated KFP runs filter",
1267
+ filter_object_with_predicates=final_filter_object,
1268
+ )
1269
+ return orjson.dumps(final_filter_object).decode()
1266
1270
 
1267
1271
 
1268
1272
  def validate_and_convert_date(date_input: str) -> str:
@@ -1862,10 +1866,7 @@ async def run_in_threadpool(func, *args, **kwargs):
1862
1866
  Run a sync-function in the loop default thread pool executor pool and await its result.
1863
1867
  Note that this function is not suitable for CPU-bound tasks, as it will block the event loop.
1864
1868
  """
1865
- loop = asyncio.get_running_loop()
1866
- if kwargs:
1867
- func = functools.partial(func, **kwargs)
1868
- return await loop.run_in_executor(None, func, *args)
1869
+ return await asyncio.to_thread(func, *args, **kwargs)
1869
1870
 
1870
1871
 
1871
1872
  def is_explicit_ack_supported(context):
@@ -15,11 +15,29 @@
15
15
  import asyncio
16
16
  import typing
17
17
  from copy import deepcopy
18
+ from typing import Optional
19
+
20
+ import aiohttp
18
21
 
19
22
  import mlrun.common.schemas
20
23
  import mlrun.lists
21
24
 
22
25
 
26
+ class TimedHTTPClient:
27
+ def __init__(self, timeout: Optional[float] = 30.0):
28
+ """
29
+ HTTP client wrapper with built-in timeout.
30
+
31
+ Args:
32
+ timeout: Request timeout in seconds (default: 30.0)
33
+ """
34
+ self.timeout = aiohttp.ClientTimeout(total=timeout)
35
+
36
+ def session(self, **kwargs) -> aiohttp.ClientSession:
37
+ """Create a new ClientSession with the configured timeout and additional parameters."""
38
+ return aiohttp.ClientSession(timeout=self.timeout, **kwargs)
39
+
40
+
23
41
  class NotificationBase:
24
42
  def __init__(
25
43
  self,
@@ -16,13 +16,11 @@ import json
16
16
  import os
17
17
  import typing
18
18
 
19
- import aiohttp
20
-
21
19
  import mlrun.common.schemas
22
20
  import mlrun.errors
23
21
  import mlrun.lists
24
22
 
25
- from .base import NotificationBase
23
+ from .base import NotificationBase, TimedHTTPClient
26
24
 
27
25
 
28
26
  class GitNotification(NotificationBase):
@@ -148,7 +146,7 @@ class GitNotification(NotificationBase):
148
146
  }
149
147
  url = f"https://{server}/repos/{repo}/issues/{issue}/comments"
150
148
 
151
- async with aiohttp.ClientSession() as session:
149
+ async with TimedHTTPClient().session() as session:
152
150
  resp = await session.post(url, headers=headers, json={"body": message})
153
151
  if not resp.ok:
154
152
  resp_text = await resp.text()
@@ -14,14 +14,12 @@
14
14
 
15
15
  import typing
16
16
 
17
- import aiohttp
18
-
19
17
  import mlrun.common.runtimes.constants as runtimes_constants
20
18
  import mlrun.common.schemas
21
19
  import mlrun.lists
22
20
  import mlrun.utils.helpers
23
21
 
24
- from .base import NotificationBase
22
+ from .base import NotificationBase, TimedHTTPClient
25
23
 
26
24
 
27
25
  class SlackNotification(NotificationBase):
@@ -67,7 +65,7 @@ class SlackNotification(NotificationBase):
67
65
 
68
66
  data = self._generate_slack_data(message, severity, runs, alert, event_data)
69
67
 
70
- async with aiohttp.ClientSession() as session:
68
+ async with TimedHTTPClient().session() as session:
71
69
  async with session.post(webhook, json=data) as response:
72
70
  response.raise_for_status()
73
71
 
@@ -15,14 +15,13 @@
15
15
  import re
16
16
  import typing
17
17
 
18
- import aiohttp
19
18
  import orjson
20
19
 
21
20
  import mlrun.common.schemas
22
21
  import mlrun.lists
23
22
  import mlrun.utils.helpers
24
23
 
25
- from .base import NotificationBase
24
+ from .base import NotificationBase, TimedHTTPClient
26
25
 
27
26
 
28
27
  class WebhookNotification(NotificationBase):
@@ -87,9 +86,7 @@ class WebhookNotification(NotificationBase):
87
86
  # we automatically handle it as `ssl=None` for their convenience.
88
87
  verify_ssl = verify_ssl and None if url.startswith("https") else None
89
88
 
90
- async with aiohttp.ClientSession(
91
- json_serialize=self._encoder,
92
- ) as session:
89
+ async with TimedHTTPClient().session(json_serialize=self._encoder) as session:
93
90
  response = await getattr(session, method)(
94
91
  url,
95
92
  headers=headers,
@@ -1,4 +1,4 @@
1
1
  {
2
- "git_commit": "fc51af08faf3c93220de4d619b679cf1950ba5ed",
3
- "version": "1.10.0-rc26"
2
+ "git_commit": "ec44cbf076b74e7961c4597ce7b9d92dcaf87f90",
3
+ "version": "1.10.0-rc28"
4
4
  }
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: mlrun
3
- Version: 1.10.0rc26
3
+ Version: 1.10.0rc28
4
4
  Summary: Tracking and config of machine learning runs
5
5
  Home-page: https://github.com/mlrun/mlrun
6
6
  Author: Yaron Haviv
@@ -21,8 +21,8 @@ Classifier: Topic :: Software Development :: Libraries
21
21
  Requires-Python: >=3.9, <3.12
22
22
  Description-Content-Type: text/markdown
23
23
  License-File: LICENSE
24
- Requires-Dist: urllib3>=1.26.20; python_version < "3.11"
25
- Requires-Dist: urllib3>=2.5.0; python_version >= "3.11"
24
+ Requires-Dist: urllib3>=1.26.20
25
+ Requires-Dist: v3io-frames>=0.10.15
26
26
  Requires-Dist: GitPython>=3.1.41,~=3.1
27
27
  Requires-Dist: aiohttp~=3.11
28
28
  Requires-Dist: aiohttp-retry~=2.9
@@ -39,8 +39,6 @@ Requires-Dist: tabulate~=0.8.6
39
39
  Requires-Dist: v3io~=0.7.0
40
40
  Requires-Dist: pydantic>=1.10.15
41
41
  Requires-Dist: mergedeep~=1.3
42
- Requires-Dist: v3io-frames~=0.10.15; python_version < "3.11"
43
- Requires-Dist: v3io-frames>=0.13.0; python_version >= "3.11"
44
42
  Requires-Dist: semver~=3.0
45
43
  Requires-Dist: dependency-injector~=4.41
46
44
  Requires-Dist: fsspec<=2025.7.0,>=2025.5.1
@@ -92,10 +90,10 @@ Requires-Dist: databricks-sdk~=0.20.0; extra == "databricks-sdk"
92
90
  Provides-Extra: sqlalchemy
93
91
  Requires-Dist: sqlalchemy~=2.0; extra == "sqlalchemy"
94
92
  Provides-Extra: dask
95
- Requires-Dist: dask~=2024.12.1; python_version >= "3.11" and extra == "dask"
96
- Requires-Dist: distributed~=2024.12.1; python_version >= "3.11" and extra == "dask"
97
- Requires-Dist: dask~=2023.12.1; python_version < "3.11" and extra == "dask"
98
- Requires-Dist: distributed~=2023.12.1; python_version < "3.11" and extra == "dask"
93
+ Requires-Dist: dask>=2023.12.1; python_version < "3.11" and extra == "dask"
94
+ Requires-Dist: dask>=2024.8; python_version >= "3.11" and extra == "dask"
95
+ Requires-Dist: distributed>=2023.12.1; python_version < "3.11" and extra == "dask"
96
+ Requires-Dist: distributed>=2024.8; python_version >= "3.11" and extra == "dask"
99
97
  Provides-Extra: alibaba-oss
100
98
  Requires-Dist: ossfs==2025.5.0; extra == "alibaba-oss"
101
99
  Requires-Dist: oss2==2.18.4; extra == "alibaba-oss"
@@ -106,7 +104,7 @@ Requires-Dist: snowflake-connector-python~=3.7; extra == "snowflake"
106
104
  Provides-Extra: dev-postgres
107
105
  Requires-Dist: pytest-mock-resources[postgres]~=2.12; extra == "dev-postgres"
108
106
  Provides-Extra: kfp18
109
- Requires-Dist: mlrun_pipelines_kfp_v1_8[kfp]>=0.5.0; python_version < "3.11" and extra == "kfp18"
107
+ Requires-Dist: mlrun_pipelines_kfp_v1_8[kfp]>=0.5.7; extra == "kfp18"
110
108
  Provides-Extra: api
111
109
  Requires-Dist: uvicorn~=0.32.1; extra == "api"
112
110
  Requires-Dist: dask-kubernetes~=0.11.0; extra == "api"
@@ -125,7 +123,6 @@ Requires-Dist: memray~=1.12; sys_platform != "win32" and extra == "api"
125
123
  Requires-Dist: aiosmtplib~=3.0; extra == "api"
126
124
  Requires-Dist: pydantic<2,>=1; extra == "api"
127
125
  Requires-Dist: mlrun-pipelines-kfp-v1-8~=0.5.7; extra == "api"
128
- Requires-Dist: grpcio~=1.70.0; extra == "api"
129
126
  Provides-Extra: all
130
127
  Requires-Dist: adlfs==2024.12.0; extra == "all"
131
128
  Requires-Dist: aiobotocore<2.16,>=2.5.0; extra == "all"
@@ -134,11 +131,11 @@ Requires-Dist: azure-core~=1.24; extra == "all"
134
131
  Requires-Dist: azure-identity~=1.5; extra == "all"
135
132
  Requires-Dist: azure-keyvault-secrets~=4.2; extra == "all"
136
133
  Requires-Dist: boto3<1.36,>=1.28.0; extra == "all"
137
- Requires-Dist: dask~=2023.12.1; python_version < "3.11" and extra == "all"
138
- Requires-Dist: dask~=2024.12.1; python_version >= "3.11" and extra == "all"
134
+ Requires-Dist: dask>=2023.12.1; python_version < "3.11" and extra == "all"
135
+ Requires-Dist: dask>=2024.8; python_version >= "3.11" and extra == "all"
139
136
  Requires-Dist: databricks-sdk~=0.20.0; extra == "all"
140
- Requires-Dist: distributed~=2023.12.1; python_version < "3.11" and extra == "all"
141
- Requires-Dist: distributed~=2024.12.1; python_version >= "3.11" and extra == "all"
137
+ Requires-Dist: distributed>=2023.12.1; python_version < "3.11" and extra == "all"
138
+ Requires-Dist: distributed>=2024.8; python_version >= "3.11" and extra == "all"
142
139
  Requires-Dist: gcsfs<=2025.7.0,>=2025.5.1; extra == "all"
143
140
  Requires-Dist: google-cloud-bigquery-storage~=2.17; extra == "all"
144
141
  Requires-Dist: google-cloud-bigquery[bqstorage,pandas]==3.14.1; extra == "all"
@@ -165,11 +162,11 @@ Requires-Dist: azure-core~=1.24; extra == "complete"
165
162
  Requires-Dist: azure-identity~=1.5; extra == "complete"
166
163
  Requires-Dist: azure-keyvault-secrets~=4.2; extra == "complete"
167
164
  Requires-Dist: boto3<1.36,>=1.28.0; extra == "complete"
168
- Requires-Dist: dask~=2023.12.1; python_version < "3.11" and extra == "complete"
169
- Requires-Dist: dask~=2024.12.1; python_version >= "3.11" and extra == "complete"
165
+ Requires-Dist: dask>=2023.12.1; python_version < "3.11" and extra == "complete"
166
+ Requires-Dist: dask>=2024.8; python_version >= "3.11" and extra == "complete"
170
167
  Requires-Dist: databricks-sdk~=0.20.0; extra == "complete"
171
- Requires-Dist: distributed~=2023.12.1; python_version < "3.11" and extra == "complete"
172
- Requires-Dist: distributed~=2024.12.1; python_version >= "3.11" and extra == "complete"
168
+ Requires-Dist: distributed>=2023.12.1; python_version < "3.11" and extra == "complete"
169
+ Requires-Dist: distributed>=2024.8; python_version >= "3.11" and extra == "complete"
173
170
  Requires-Dist: gcsfs<=2025.7.0,>=2025.5.1; extra == "complete"
174
171
  Requires-Dist: google-cloud-bigquery-storage~=2.17; extra == "complete"
175
172
  Requires-Dist: google-cloud-bigquery[bqstorage,pandas]==3.14.1; extra == "complete"
@@ -200,11 +197,11 @@ Requires-Dist: azure-identity~=1.5; extra == "complete-api"
200
197
  Requires-Dist: azure-keyvault-secrets~=4.2; extra == "complete-api"
201
198
  Requires-Dist: boto3<1.36,>=1.28.0; extra == "complete-api"
202
199
  Requires-Dist: dask-kubernetes~=0.11.0; extra == "complete-api"
203
- Requires-Dist: dask~=2023.12.1; python_version < "3.11" and extra == "complete-api"
204
- Requires-Dist: dask~=2024.12.1; python_version >= "3.11" and extra == "complete-api"
200
+ Requires-Dist: dask>=2023.12.1; python_version < "3.11" and extra == "complete-api"
201
+ Requires-Dist: dask>=2024.8; python_version >= "3.11" and extra == "complete-api"
205
202
  Requires-Dist: databricks-sdk~=0.20.0; extra == "complete-api"
206
- Requires-Dist: distributed~=2023.12.1; python_version < "3.11" and extra == "complete-api"
207
- Requires-Dist: distributed~=2024.12.1; python_version >= "3.11" and extra == "complete-api"
203
+ Requires-Dist: distributed>=2023.12.1; python_version < "3.11" and extra == "complete-api"
204
+ Requires-Dist: distributed>=2024.8; python_version >= "3.11" and extra == "complete-api"
208
205
  Requires-Dist: fastapi~=0.116.0; extra == "complete-api"
209
206
  Requires-Dist: gcsfs<=2025.7.0,>=2025.5.1; extra == "complete-api"
210
207
  Requires-Dist: google-cloud-bigquery-storage~=2.17; extra == "complete-api"
@@ -212,7 +209,6 @@ Requires-Dist: google-cloud-bigquery[bqstorage,pandas]==3.14.1; extra == "comple
212
209
  Requires-Dist: google-cloud-storage==2.14.0; extra == "complete-api"
213
210
  Requires-Dist: google-cloud==0.34; extra == "complete-api"
214
211
  Requires-Dist: graphviz~=0.20.0; extra == "complete-api"
215
- Requires-Dist: grpcio~=1.70.0; extra == "complete-api"
216
212
  Requires-Dist: humanfriendly~=10.0; extra == "complete-api"
217
213
  Requires-Dist: igz-mgmt~=0.4.1; extra == "complete-api"
218
214
  Requires-Dist: kafka-python~=2.1.0; extra == "complete-api"
@@ -1,6 +1,6 @@
1
1
  mlrun/__init__.py,sha256=JYy9uteFFNPbPoC0geDEPhaLrfiqTijxUhLZSToAky4,8029
2
2
  mlrun/__main__.py,sha256=wQNaxW7QsqFBtWffnPkw-497fnpsrQzUnscBQQAP_UM,48364
3
- mlrun/config.py,sha256=3FWf5jbJDJrwfipSXoiNWOQ5A2Vy4FcZl7Ai8Pv4yIg,73057
3
+ mlrun/config.py,sha256=F1PDI88t2cFujGnDr4YslEBzG6SckBKjUSdkxFX3zUE,73149
4
4
  mlrun/errors.py,sha256=bAk0t_qmCxQSPNK0TugOAfA5R6f0G6OYvEvXUWSJ_5U,9062
5
5
  mlrun/execution.py,sha256=wkmT1k0QROgGJFMBIsYUsJaqEF2bkqaYVzp_ZQb527Q,58814
6
6
  mlrun/features.py,sha256=jMEXo6NB36A6iaxNEJWzdtYwUmglYD90OIKTIEeWhE8,15841
@@ -8,7 +8,7 @@ mlrun/k8s_utils.py,sha256=zIacVyvsXrXVO-DdxAoGQOGEDWOGJEFJzYPhPVnn3z8,24548
8
8
  mlrun/lists.py,sha256=OlaV2QIFUzmenad9kxNJ3k4whlDyxI3zFbGwr6vpC5Y,8561
9
9
  mlrun/model.py,sha256=wHtM8LylSOEFk6Hxl95CVm8DOPhofjsANYdIvKHH6dw,88956
10
10
  mlrun/render.py,sha256=5DlhD6JtzHgmj5RVlpaYiHGhX84Q7qdi4RCEUj2UMgw,13195
11
- mlrun/run.py,sha256=WwcAkbmfnT0Qslxte4xchl-B_UN5YkJIz6_gDGT9_mo,48208
11
+ mlrun/run.py,sha256=OBx7rvtgd288dyl42DOll-zU05cFzsA5Is958qnX4Qo,48673
12
12
  mlrun/secrets.py,sha256=dZPdkc_zzfscVQepOHUwmzFqnBavDCBXV9DQoH_eIYM,7800
13
13
  mlrun/alerts/__init__.py,sha256=0gtG1BG0DXxFrXegIkjbM1XEN4sP9ODo0ucXrNld1hU,601
14
14
  mlrun/alerts/alert.py,sha256=QQFZGydQbx9RvAaSiaH-ALQZVcDKQX5lgizqj_rXW2k,15948
@@ -118,7 +118,7 @@ mlrun/db/__init__.py,sha256=WqJ4x8lqJ7ZoKbhEyFqkYADd9P6E3citckx9e9ZLcIU,1163
118
118
  mlrun/db/auth_utils.py,sha256=hpg8D2r82oN0BWabuWN04BTNZ7jYMAF242YSUpK7LFM,5211
119
119
  mlrun/db/base.py,sha256=D4P8jhsp4j3ZPg2tKRTgFgm4hrGAx7kVnmio9qfJZDI,32295
120
120
  mlrun/db/factory.py,sha256=yP2vVmveUE7LYTCHbS6lQIxP9rW--zdISWuPd_I3d_4,2111
121
- mlrun/db/httpdb.py,sha256=-Jl7WsZDFcTSpiSmBrutLoMPGALDjpDYI899i3F9hcw,238788
121
+ mlrun/db/httpdb.py,sha256=q0AggpqHdG5oLukZQUsUOXG26eeXlrvdcTacueNbabY,238789
122
122
  mlrun/db/nopdb.py,sha256=SZqCaCnaijT8-vivdVjj0VvZcZyqzat4YFFhOJlrTtI,28661
123
123
  mlrun/feature_store/__init__.py,sha256=SlI845bWt6xX34SXunHHqhmFAR9-5v2ak8N-qpcAPGo,1328
124
124
  mlrun/feature_store/api.py,sha256=qKj5Tk6prTab6XWatWhBuPRVp0eJEctoxRMN2wz48vA,32168
@@ -229,12 +229,12 @@ mlrun/model_monitoring/__init__.py,sha256=qDQnncjya9XPTlfvGyfWsZWiXc-glGZrrNja-5
229
229
  mlrun/model_monitoring/api.py,sha256=k0eOm-vW8z2u05PwMK2PI2mSAplK0xGIrUe_XWk7mRM,27000
230
230
  mlrun/model_monitoring/controller.py,sha256=2XOkOZRB03K9ph6TH-ICspHga-GQOURL0C8-0GTHaTY,43961
231
231
  mlrun/model_monitoring/features_drift_table.py,sha256=c6GpKtpOJbuT1u5uMWDL_S-6N4YPOmlktWMqPme3KFY,25308
232
- mlrun/model_monitoring/helpers.py,sha256=VQ_afyhqzFMOqPdqpkpX3md_afjlkFtsUWIg28zxp5g,23596
232
+ mlrun/model_monitoring/helpers.py,sha256=50oFqgIc5xFHYPIVgq3M-Gbr7epqAI5NgHmvOeMy52U,24667
233
233
  mlrun/model_monitoring/stream_processing.py,sha256=bryYO3D0cC10MAQ-liHxUZ79MrL-VFXCb7KNyj6bl-8,34655
234
234
  mlrun/model_monitoring/writer.py,sha256=rGRFzSOkqZWvD3Y6sVk2H1Gepfnkzkp9ce00PsApTLo,8288
235
235
  mlrun/model_monitoring/applications/__init__.py,sha256=BwlmRELlFJf2b2YMyv5kUSHNe8--OyqWhDgRlT8a_8g,779
236
236
  mlrun/model_monitoring/applications/_application_steps.py,sha256=t9LDIqQUGE10cyjyhlg0QqN1yVx0apD1HpERYLJfm8U,7409
237
- mlrun/model_monitoring/applications/base.py,sha256=vFLsku105XPPMiDtLgiDZbazlR9dWP4o7gnJHwtqsrk,47785
237
+ mlrun/model_monitoring/applications/base.py,sha256=X-9zjdnW7i-zfhEdsT76JaxlSBk9J1HSchx-FcJ-Eqo,47911
238
238
  mlrun/model_monitoring/applications/context.py,sha256=3W3AW4oyJgx_nW_5mDsV59Iy5D3frkfYMQSc6DgBc4c,17004
239
239
  mlrun/model_monitoring/applications/histogram_data_drift.py,sha256=2qgfFmrpHf-x0_EaHD-0T28piwSQzw-HH71aV1GwbZs,15389
240
240
  mlrun/model_monitoring/applications/results.py,sha256=LfBQOmkpKGvVGNrcj5QiXsRIG2IRgcv_Xqe4QJBmauk,5699
@@ -253,7 +253,7 @@ mlrun/model_monitoring/db/tsdb/tdengine/tdengine_connection.py,sha256=dtkaHaWKWE
253
253
  mlrun/model_monitoring/db/tsdb/tdengine/tdengine_connector.py,sha256=Vj8eWZ6jxXs9nTlo5Du1jJjYutwSNp4ZtztvKsnrr4M,51333
254
254
  mlrun/model_monitoring/db/tsdb/v3io/__init__.py,sha256=aL3bfmQsUQ-sbvKGdNihFj8gLCK3mSys0qDcXtYOwgc,616
255
255
  mlrun/model_monitoring/db/tsdb/v3io/stream_graph_steps.py,sha256=sNQFj6qyJx5eSBKRC3gyTc1cfh1l2IkRpPtuZwtzCW0,6844
256
- mlrun/model_monitoring/db/tsdb/v3io/v3io_connector.py,sha256=fLzZrtZvJQk22eBT-zUzJfvbrlFwyBB3FbTSgivdLNQ,60242
256
+ mlrun/model_monitoring/db/tsdb/v3io/v3io_connector.py,sha256=3GNMudOpedhu_OId4Gp-r9nj1gtqh_353yn2gWta-BY,61459
257
257
  mlrun/model_monitoring/metrics/__init__.py,sha256=6CsTXAxeLbbf8yfCADTaxmiavqwrLEdYFJ-qc5kgDAY,569
258
258
  mlrun/model_monitoring/metrics/histogram_distance.py,sha256=E9_WIl2vd6qNvoHVHoFcnuQk3ekbFWOdi8aU7sHrfk4,4724
259
259
  mlrun/package/__init__.py,sha256=v7VDyK9kDOOuDvFo4oiGV2fx-vM1KL7fdN9pGLakhUQ,7008
@@ -277,10 +277,10 @@ mlrun/platforms/__init__.py,sha256=QgtpAt1lpfTKk0mLtesB1P8szK9cpNDPeYzu2qDbPCM,3
277
277
  mlrun/platforms/iguazio.py,sha256=32_o95Ntx9z3ciowt2NcnX7tAiLBwX3VB0mbTQ-KrIQ,13848
278
278
  mlrun/projects/__init__.py,sha256=hdCOA6_fp8X4qGGGT7Bj7sPbkM1PayWuaVZL0DkpuZw,1240
279
279
  mlrun/projects/operations.py,sha256=Rc__P5ucNAY2G-lHc2LrnZs15PUbNFt8-NqNNT2Bjpk,20623
280
- mlrun/projects/pipelines.py,sha256=nGDzBABEOqoe9sWbax4SfF8CVLgrvK0NLWBadzEthVE,52219
280
+ mlrun/projects/pipelines.py,sha256=ZOfuIEHOXfuc4qAkuWvbWhCjP6kqpLkv-yBBaY9RXhg,52219
281
281
  mlrun/projects/project.py,sha256=xbh6qXfU9ckkDSTcd-LyPWxHtHzz6gsqDI63ZZQPuhs,256413
282
282
  mlrun/runtimes/__init__.py,sha256=8cqrYKy1a0_87XG7V_p96untQ4t8RocadM4LVEEN1JM,9029
283
- mlrun/runtimes/base.py,sha256=8AMJcTnm9_LSEKLlmmbEzGSCRAoCh3vAQudKzuIVXhY,38285
283
+ mlrun/runtimes/base.py,sha256=LyrF2SmyNN2uBRL-5bUzFxTm186pXywjnFmu5mhtNtA,38483
284
284
  mlrun/runtimes/daskjob.py,sha256=IN6gKKrmCIjWooj5FgFm-pAb2i7ra1ERRzClfu_rYGI,20102
285
285
  mlrun/runtimes/funcdoc.py,sha256=zRFHrJsV8rhDLJwoUhcfZ7Cs0j-tQ76DxwUqdXV_Wyc,9810
286
286
  mlrun/runtimes/function_reference.py,sha256=fnMKUEieKgy4JyVLhFpDtr6JvKgOaQP8F_K2H3-Pk9U,5030
@@ -300,7 +300,7 @@ mlrun/runtimes/mpijob/abstract.py,sha256=QjAG4OZ6JEQ58w5-qYNd6hUGwvaW8ynLtlr9jNf
300
300
  mlrun/runtimes/mpijob/v1.py,sha256=zSlRkiWHz4B3yht66sVf4mlfDs8YT9EnP9DfBLn5VNs,3372
301
301
  mlrun/runtimes/nuclio/__init__.py,sha256=osOVMN9paIOuUoOTizmkxMb_OXRP-SlPwXHJSSYK_wk,834
302
302
  mlrun/runtimes/nuclio/api_gateway.py,sha256=vH9ClKVP4Mb24rvA67xPuAvAhX-gAv6vVtjVxyplhdc,26969
303
- mlrun/runtimes/nuclio/function.py,sha256=SZAZOyRprArd1DrUD-QR8P9Bh1UuQfTMUpjTLq2jg3Q,54916
303
+ mlrun/runtimes/nuclio/function.py,sha256=jNlBbtdIoQdd2ZKljqDgKUTHuM4iE_JiO_ANzQyssZE,55168
304
304
  mlrun/runtimes/nuclio/nuclio.py,sha256=sLK8KdGO1LbftlL3HqPZlFOFTAAuxJACZCVl1c0Ha6E,2942
305
305
  mlrun/runtimes/nuclio/serving.py,sha256=0JEMW1_0Eqx5j-Wpksytm9GhUmoho4L7glIs1qEswMc,35641
306
306
  mlrun/runtimes/nuclio/application/__init__.py,sha256=rRs5vasy_G9IyoTpYIjYDafGoL6ifFBKgBtsXn31Atw,614
@@ -329,7 +329,7 @@ mlrun/utils/async_http.py,sha256=8Olx8TNNeXB07nEGwlqhEgFgnFAD71vBU_bqaA9JW-w,122
329
329
  mlrun/utils/azure_vault.py,sha256=IEFizrDGDbAaoWwDr1WoA88S_EZ0T--vjYtY-i0cvYQ,3450
330
330
  mlrun/utils/clones.py,sha256=qbAGyEbSvlewn3Tw_DpQZP9z6MGzFhSaZfI1CblX8Fg,7515
331
331
  mlrun/utils/condition_evaluator.py,sha256=-nGfRmZzivn01rHTroiGY4rqEv8T1irMyhzxEei-sKc,1897
332
- mlrun/utils/helpers.py,sha256=_koZL7kTzp9e77MehChyjM4InuZ_DMS8iEjobqljhGA,83337
332
+ mlrun/utils/helpers.py,sha256=dCISzUwBjGztqsl27-WrznT485nZa7ZliLF_WXw2i_4,83457
333
333
  mlrun/utils/http.py,sha256=5ZU2VpokaUM_DT3HBSqTm8xjUqTPjZN5fKkSIvKlTl0,8704
334
334
  mlrun/utils/logger.py,sha256=uaCgI_ezzaXf7nJDCy-1Nrjds8vSXqDbzmjmb3IyCQo,14864
335
335
  mlrun/utils/regex.py,sha256=FcRwWD8x9X3HLhCCU2F0AVKTFah784Pr7ZAe3a02jw8,5199
@@ -340,19 +340,19 @@ mlrun/utils/vault.py,sha256=-36b_PG0Fk9coPJiX6F704NF1nmKDdCH9Bg17wep88w,10446
340
340
  mlrun/utils/notifications/__init__.py,sha256=eUzQDBxSQmMZASRY-YAnYS6tL5801P0wEjycp3Dvoe0,990
341
341
  mlrun/utils/notifications/notification_pusher.py,sha256=tspup8ZNUggLxx4No2da9EY7GwHsihY33A8oN_tHKpk,27356
342
342
  mlrun/utils/notifications/notification/__init__.py,sha256=9Rfy6Jm8n0LaEDO1VAQb6kIbr7_uVuQhK1pS_abELIY,2581
343
- mlrun/utils/notifications/notification/base.py,sha256=-9e3XqUixrWwImnTGrIL4enJRSIUP9gMrJVxwaLqeXc,5403
343
+ mlrun/utils/notifications/notification/base.py,sha256=xrKdA5-a6eGWXogmSAtgJS0cKqb6znh4M-EpuYhziX0,5967
344
344
  mlrun/utils/notifications/notification/console.py,sha256=ICbIhOf9fEBJky_3j9TFiKAewDGyDHJr9l4VeT7G2sc,2745
345
- mlrun/utils/notifications/notification/git.py,sha256=t2lqRrPRBO4awf_uhxJreH9CpcbYSH8T3CvHtwspHkE,6306
345
+ mlrun/utils/notifications/notification/git.py,sha256=JKAiEfs5qOMn0IeU__AzdQ4u6GctMzu1xMauNJ4wdUw,6311
346
346
  mlrun/utils/notifications/notification/ipython.py,sha256=9uZvI1uOLFaNuAsfJPXmL3l6dOzFoWdBK5GYNYFAfks,2282
347
347
  mlrun/utils/notifications/notification/mail.py,sha256=ZyJ3eqd8simxffQmXzqd3bgbAqp1vij7C6aRJ9h2mgs,6012
348
- mlrun/utils/notifications/notification/slack.py,sha256=kfhogR5keR7Zjh0VCjJNK3NR5_yXT7Cv-x9GdOUW4Z8,7294
349
- mlrun/utils/notifications/notification/webhook.py,sha256=zxh8CAlbPnTazsk6r05X5TKwqUZVOH5KBU2fJbzQlG4,5330
348
+ mlrun/utils/notifications/notification/slack.py,sha256=wSu_7W0EnGLBNwIgWCYEeTP8j9SPAMPDBnfUcPnVZYA,7299
349
+ mlrun/utils/notifications/notification/webhook.py,sha256=FM5-LQAKAVJKp37MRzR3SsejalcnpM6r_9Oe7znxZEA,5313
350
350
  mlrun/utils/version/__init__.py,sha256=YnzE6tlf24uOQ8y7Z7l96QLAI6-QEii7-77g8ynmzy0,613
351
- mlrun/utils/version/version.json,sha256=1HcZ7ziLdO_Jwkc199n3y4lbqnW_9yB-fHAGeTJa6oY,90
351
+ mlrun/utils/version/version.json,sha256=8LqPKs3owy8BezmiKv4mJzJ7Q6092ElXwEN7UTAst3U,90
352
352
  mlrun/utils/version/version.py,sha256=M2hVhRrgkN3SxacZHs3ZqaOsqAA7B6a22ne324IQ1HE,1877
353
- mlrun-1.10.0rc26.dist-info/licenses/LICENSE,sha256=zTiv1CxWNkOk1q8eJS1G_8oD4gWpWLwWxj_Agcsi8Os,11337
354
- mlrun-1.10.0rc26.dist-info/METADATA,sha256=_WVF1hjawp46wHlKlTG9pWxJa9J7OuDCoFlvZEKOCV8,26337
355
- mlrun-1.10.0rc26.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
356
- mlrun-1.10.0rc26.dist-info/entry_points.txt,sha256=1Owd16eAclD5pfRCoJpYC2ZJSyGNTtUr0nCELMioMmU,46
357
- mlrun-1.10.0rc26.dist-info/top_level.txt,sha256=NObLzw3maSF9wVrgSeYBv-fgnHkAJ1kEkh12DLdd5KM,6
358
- mlrun-1.10.0rc26.dist-info/RECORD,,
353
+ mlrun-1.10.0rc28.dist-info/licenses/LICENSE,sha256=zTiv1CxWNkOk1q8eJS1G_8oD4gWpWLwWxj_Agcsi8Os,11337
354
+ mlrun-1.10.0rc28.dist-info/METADATA,sha256=whhfF5YIKLUim-iWuo6lB4V9mzSCh99eaYisXKLAnEI,26017
355
+ mlrun-1.10.0rc28.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
356
+ mlrun-1.10.0rc28.dist-info/entry_points.txt,sha256=1Owd16eAclD5pfRCoJpYC2ZJSyGNTtUr0nCELMioMmU,46
357
+ mlrun-1.10.0rc28.dist-info/top_level.txt,sha256=NObLzw3maSF9wVrgSeYBv-fgnHkAJ1kEkh12DLdd5KM,6
358
+ mlrun-1.10.0rc28.dist-info/RECORD,,