mlrun 1.6.0rc12__py3-none-any.whl → 1.6.0rc13__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mlrun might be problematic. Click here for more details.

mlrun/config.py CHANGED
@@ -1130,7 +1130,7 @@ class Config:
1130
1130
 
1131
1131
  def is_explicit_ack(self) -> bool:
1132
1132
  return self.httpdb.nuclio.explicit_ack == "enabled" and (
1133
- not self.nuclio_version or self.nuclio_version >= "1.12.7"
1133
+ not self.nuclio_version or self.nuclio_version >= "1.12.9"
1134
1134
  )
1135
1135
 
1136
1136
 
@@ -877,7 +877,7 @@ class ParquetTarget(BaseStoreTarget):
877
877
  else:
878
878
  storage_options = storage_options or self.storage_options
879
879
 
880
- graph.add_step(
880
+ step = graph.add_step(
881
881
  name=self.name or "ParquetTarget",
882
882
  after=after,
883
883
  graph_shape="cylinder",
@@ -894,6 +894,16 @@ class ParquetTarget(BaseStoreTarget):
894
894
  **self.attributes,
895
895
  )
896
896
 
897
+ original_to_dict = step.to_dict
898
+
899
+ def delete_update_last_written(*arg, **kargs):
900
+ result = original_to_dict(*arg, **kargs)
901
+ del result["class_args"]["update_last_written"]
902
+ return result
903
+
904
+ # update_last_written is not serializable (ML-5108)
905
+ step.to_dict = delete_update_last_written
906
+
897
907
  def get_spark_options(self, key_column=None, timestamp_key=None, overwrite=True):
898
908
  partition_cols = []
899
909
  if timestamp_key:
@@ -385,19 +385,24 @@ class BaseMerger(abc.ABC):
385
385
  pass
386
386
 
387
387
  def _normalize_timestamp_column(
388
- self, entity_timestamp_column, reference_df, featureset_df, featureset_name
388
+ self,
389
+ entity_timestamp_column,
390
+ reference_df,
391
+ featureset_timestamp,
392
+ featureset_df,
393
+ featureset_name,
389
394
  ):
390
395
  reference_df_timestamp_type = reference_df[entity_timestamp_column].dtype.name
391
- featureset_df_timestamp_type = featureset_df[entity_timestamp_column].dtype.name
396
+ featureset_df_timestamp_type = featureset_df[featureset_timestamp].dtype.name
392
397
 
393
398
  if reference_df_timestamp_type != featureset_df_timestamp_type:
394
399
  logger.info(
395
400
  f"Merger detected timestamp resolution incompatibility between feature set {featureset_name} and "
396
401
  f"others: {reference_df_timestamp_type} and {featureset_df_timestamp_type}. Converting feature set "
397
- f"timestamp column '{entity_timestamp_column}' to type {reference_df_timestamp_type}."
402
+ f"timestamp column '{featureset_timestamp}' to type {reference_df_timestamp_type}."
398
403
  )
399
- featureset_df[entity_timestamp_column] = featureset_df[
400
- entity_timestamp_column
404
+ featureset_df[featureset_timestamp] = featureset_df[
405
+ featureset_timestamp
401
406
  ].astype(reference_df_timestamp_type)
402
407
 
403
408
  return featureset_df
@@ -53,7 +53,11 @@ class DaskFeatureMerger(BaseMerger):
53
53
  from dask.dataframe.multi import merge_asof
54
54
 
55
55
  featureset_df = self._normalize_timestamp_column(
56
- entity_timestamp_column, entity_df, featureset_df, featureset_name
56
+ entity_timestamp_column,
57
+ entity_df,
58
+ featureset_timestamp,
59
+ featureset_df,
60
+ featureset_name,
57
61
  )
58
62
 
59
63
  def sort_partition(partition, timestamp):
@@ -48,7 +48,11 @@ class LocalFeatureMerger(BaseMerger):
48
48
  featureset_df.sort_values(by=featureset_timstamp, inplace=True)
49
49
 
50
50
  featureset_df = self._normalize_timestamp_column(
51
- entity_timestamp_column, entity_df, featureset_df, featureset_name
51
+ entity_timestamp_column,
52
+ entity_df,
53
+ featureset_timstamp,
54
+ featureset_df,
55
+ featureset_name,
52
56
  )
53
57
 
54
58
  merged_df = pd.merge_asof(
@@ -11,7 +11,6 @@
11
11
  # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
- #
15
14
 
16
15
  import datetime
17
16
  import hashlib
@@ -30,6 +29,7 @@ from mlrun.utils import logger
30
29
 
31
30
  from .batch import VirtualDrift
32
31
  from .features_drift_table import FeaturesDriftTablePlot
32
+ from .helpers import bump_model_endpoint_last_request
33
33
  from .model_endpoint import ModelEndpoint
34
34
 
35
35
  # A union of all supported dataset types:
@@ -125,13 +125,14 @@ def record_results(
125
125
  model_endpoint_name: str,
126
126
  endpoint_id: str = "",
127
127
  function_name: str = "",
128
- context: mlrun.MLClientCtx = None,
129
- infer_results_df: pd.DataFrame = None,
130
- sample_set_statistics: typing.Dict[str, typing.Any] = None,
128
+ context: typing.Optional[mlrun.MLClientCtx] = None,
129
+ infer_results_df: typing.Optional[pd.DataFrame] = None,
130
+ sample_set_statistics: typing.Optional[dict[str, typing.Any]] = None,
131
131
  monitoring_mode: ModelMonitoringMode = ModelMonitoringMode.enabled,
132
- drift_threshold: float = None,
133
- possible_drift_threshold: float = None,
132
+ drift_threshold: typing.Optional[float] = None,
133
+ possible_drift_threshold: typing.Optional[float] = None,
134
134
  trigger_monitoring_job: bool = False,
135
+ last_in_batch_set: typing.Optional[bool] = True,
135
136
  artifacts_tag: str = "",
136
137
  default_batch_image="mlrun/mlrun",
137
138
  ) -> ModelEndpoint:
@@ -164,6 +165,14 @@ def record_results(
164
165
  :param possible_drift_threshold: The threshold of which to mark possible drifts.
165
166
  :param trigger_monitoring_job: If true, run the batch drift job. If not exists, the monitoring batch function
166
167
  will be registered through MLRun API with the provided image.
168
+ :param last_in_batch_set: This flag can (and should only) be used when the model endpoint does not have
169
+ model-monitoring set.
170
+ If set to `True` (the default), this flag marks the current monitoring window
171
+ (on this monitoring endpoint) is completed - the data inferred so far is assumed
172
+ to be the total data for this monitoring window.
173
+ You may want to set this flag to `False` if you want to record multiple results in
174
+ close time proximity ("batch set"). In this case, set this flag to `False` on all
175
+ but the last batch in the set.
167
176
  :param artifacts_tag: Tag to use for all the artifacts resulted from the function. Will be relevant
168
177
  only if the monitoring batch job has been triggered.
169
178
 
@@ -186,6 +195,7 @@ def record_results(
186
195
  monitoring_mode=monitoring_mode,
187
196
  db_session=db,
188
197
  )
198
+ logger.debug("Model endpoint", endpoint=model_endpoint.to_dict())
189
199
 
190
200
  if infer_results_df is not None:
191
201
  # Write the monitoring parquet to the relevant model endpoint context
@@ -195,6 +205,27 @@ def record_results(
195
205
  infer_results_df=infer_results_df,
196
206
  )
197
207
 
208
+ if model_endpoint.spec.stream_path == "":
209
+ if last_in_batch_set:
210
+ logger.info(
211
+ "Updating the last request time to mark the current monitoring window as completed",
212
+ project=project,
213
+ endpoint_id=model_endpoint.metadata.uid,
214
+ )
215
+ bump_model_endpoint_last_request(
216
+ project=project, model_endpoint=model_endpoint, db=db
217
+ )
218
+ else:
219
+ if last_in_batch_set is not None:
220
+ logger.warning(
221
+ "`last_in_batch_set` is not `None`, but the model endpoint has a stream path. "
222
+ "Ignoring `last_in_batch_set`, as it is relevant only when the model "
223
+ "endpoint does not have a model monitoring infrastructure in place (i.e. stream path is "
224
+ " empty). Set `last_in_batch_set` to `None` to resolve this warning.",
225
+ project=project,
226
+ endpoint_id=model_endpoint.metadata.uid,
227
+ )
228
+
198
229
  if trigger_monitoring_job:
199
230
  # Run the monitoring batch drift job
200
231
  trigger_drift_batch_job(
@@ -11,13 +11,21 @@
11
11
  # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
- #
15
14
 
16
15
 
16
+ import datetime
17
17
  import typing
18
18
 
19
+ import mlrun
19
20
  import mlrun.common.model_monitoring.helpers
20
21
  import mlrun.common.schemas
22
+ from mlrun.common.schemas.model_monitoring import EventFieldType
23
+ from mlrun.errors import MLRunInvalidArgumentError
24
+ from mlrun.model_monitoring.model_endpoint import ModelEndpoint
25
+ from mlrun.utils import logger
26
+
27
+ if typing.TYPE_CHECKING:
28
+ from mlrun.db.base import RunDBInterface
21
29
 
22
30
 
23
31
  def get_stream_path(project: str = None, application_name: str = None):
@@ -89,3 +97,53 @@ def get_connection_string(secret_provider: typing.Callable = None) -> str:
89
97
  )
90
98
  or mlrun.mlconf.model_endpoint_monitoring.endpoint_store_connection
91
99
  )
100
+
101
+
102
+ def bump_model_endpoint_last_request(
103
+ project: str,
104
+ model_endpoint: ModelEndpoint,
105
+ db: "RunDBInterface",
106
+ minutes_delta: int = 10, # TODO: move to config - should be the same as `batch_interval`
107
+ seconds_delta: int = 1,
108
+ ) -> None:
109
+ """
110
+ Update the last request field of the model endpoint to be after the current last request time.
111
+
112
+ :param project: Project name.
113
+ :param model_endpoint: Model endpoint object.
114
+ :param db: DB interface.
115
+ :param minutes_delta: Minutes delta to add to the last request time.
116
+ :param seconds_delta: Seconds delta to add to the last request time. This is mainly to ensure that the last
117
+ request time is strongly greater than the previous one (with respect to the window time)
118
+ after adding the minutes delta.
119
+ """
120
+ if not model_endpoint.status.last_request:
121
+ logger.error(
122
+ "Model endpoint last request time is empty, cannot bump it.",
123
+ project=project,
124
+ endpoint_id=model_endpoint.metadata.uid,
125
+ )
126
+ raise MLRunInvalidArgumentError("Model endpoint last request time is empty")
127
+
128
+ bumped_last_request = (
129
+ datetime.datetime.fromisoformat(model_endpoint.status.last_request)
130
+ + datetime.timedelta(
131
+ minutes=minutes_delta,
132
+ seconds=seconds_delta,
133
+ )
134
+ + datetime.timedelta(
135
+ seconds=mlrun.mlconf.model_endpoint_monitoring.parquet_batching_timeout_secs
136
+ )
137
+ ).strftime(EventFieldType.TIME_FORMAT)
138
+ logger.info(
139
+ "Bumping model endpoint last request time",
140
+ project=project,
141
+ endpoint_id=model_endpoint.metadata.uid,
142
+ last_request=model_endpoint.status.last_request,
143
+ bumped_last_request=bumped_last_request,
144
+ )
145
+ db.patch_model_endpoint(
146
+ project=project,
147
+ endpoint_id=model_endpoint.metadata.uid,
148
+ attributes={EventFieldType.LAST_REQUEST: bumped_last_request},
149
+ )
@@ -12,6 +12,7 @@
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
14
  #
15
+ import warnings
15
16
  from typing import Dict, List, Optional, Union
16
17
 
17
18
  import kfp
@@ -270,10 +271,17 @@ def build_function(
270
271
  e.g. extra_args="--skip-tls-verify --build-arg A=val"
271
272
  :param force_build: Force building the image, even when no changes were made
272
273
  """
274
+ if not overwrite_build_params:
275
+ # TODO: change overwrite_build_params default to True in 1.8.0
276
+ warnings.warn(
277
+ "The `overwrite_build_params` parameter default will change from 'False' to 'True in 1.8.0.",
278
+ mlrun.utils.OverwriteBuildParamsWarning,
279
+ )
280
+
273
281
  engine, function = _get_engine_and_function(function, project_object)
274
282
  if function.kind in mlrun.runtimes.RuntimeKinds.nuclio_runtimes():
275
283
  raise mlrun.errors.MLRunInvalidArgumentError(
276
- "cannot build use deploy_function()"
284
+ "Cannot build use deploy_function()"
277
285
  )
278
286
  if engine == "kfp":
279
287
  if overwrite_build_params:
@@ -291,15 +299,21 @@ def build_function(
291
299
  skip_deployed=skip_deployed,
292
300
  )
293
301
  else:
294
- function.build_config(
295
- image=image,
296
- base_image=base_image,
297
- commands=commands,
298
- secret=secret_name,
299
- requirements=requirements,
300
- overwrite=overwrite_build_params,
301
- extra_args=extra_args,
302
- )
302
+ # TODO: remove filter once overwrite_build_params default is changed to True in 1.8.0
303
+ with warnings.catch_warnings():
304
+ warnings.simplefilter(
305
+ "ignore", category=mlrun.utils.OverwriteBuildParamsWarning
306
+ )
307
+
308
+ function.build_config(
309
+ image=image,
310
+ base_image=base_image,
311
+ commands=commands,
312
+ secret=secret_name,
313
+ requirements=requirements,
314
+ overwrite=overwrite_build_params,
315
+ extra_args=extra_args,
316
+ )
303
317
  ready = function.deploy(
304
318
  watch=True,
305
319
  with_mlrun=with_mlrun,
mlrun/projects/project.py CHANGED
@@ -1944,8 +1944,9 @@ class MlrunProject(ModelObj):
1944
1944
  :param default_controller_image: The default image of the model monitoring controller job. Note that the writer
1945
1945
  function, which is a real time nuclio functino, will be deployed with the same
1946
1946
  image. By default, the image is mlrun/mlrun.
1947
- :param base_period: Minutes to determine the frequency in which the model monitoring controller job
1948
- is running. By default, the base period is 5 minutes.
1947
+ :param base_period: The time period in minutes in which the model monitoring controller job
1948
+ runs. By default, the base period is 10 minutes. The schedule for the job
1949
+ will be the following cron expression: "*/{base_period} * * * *".
1949
1950
  :return: model monitoring controller job as a dictionary.
1950
1951
  """
1951
1952
  db = mlrun.db.get_run_db(secrets=self._secrets)
@@ -2981,6 +2982,12 @@ class MlrunProject(ModelObj):
2981
2982
  :param extra_args: A string containing additional builder arguments in the format of command-line options,
2982
2983
  e.g. extra_args="--skip-tls-verify --build-arg A=val"
2983
2984
  """
2985
+ if not overwrite_build_params:
2986
+ # TODO: change overwrite_build_params default to True in 1.8.0
2987
+ warnings.warn(
2988
+ "The `overwrite_build_params` parameter default will change from 'False' to 'True in 1.8.0.",
2989
+ mlrun.utils.OverwriteBuildParamsWarning,
2990
+ )
2984
2991
  default_image_name = mlrun.mlconf.default_project_image_name.format(
2985
2992
  name=self.name
2986
2993
  )
@@ -3052,35 +3059,48 @@ class MlrunProject(ModelObj):
3052
3059
  FutureWarning,
3053
3060
  )
3054
3061
 
3055
- self.build_config(
3056
- image=image,
3057
- set_as_default=set_as_default,
3058
- base_image=base_image,
3059
- commands=commands,
3060
- secret_name=secret_name,
3061
- with_mlrun=with_mlrun,
3062
- requirements=requirements,
3063
- requirements_file=requirements_file,
3064
- overwrite_build_params=overwrite_build_params,
3065
- )
3062
+ if not overwrite_build_params:
3063
+ # TODO: change overwrite_build_params default to True in 1.8.0
3064
+ warnings.warn(
3065
+ "The `overwrite_build_params` parameter default will change from 'False' to 'True in 1.8.0.",
3066
+ mlrun.utils.OverwriteBuildParamsWarning,
3067
+ )
3066
3068
 
3067
- function = mlrun.new_function("mlrun--project--image--builder", kind="job")
3068
-
3069
- build = self.spec.build
3070
- result = self.build_function(
3071
- function=function,
3072
- with_mlrun=build.with_mlrun,
3073
- image=build.image,
3074
- base_image=build.base_image,
3075
- commands=build.commands,
3076
- secret_name=build.secret,
3077
- requirements=build.requirements,
3078
- overwrite_build_params=overwrite_build_params,
3079
- mlrun_version_specifier=mlrun_version_specifier,
3080
- builder_env=builder_env,
3081
- extra_args=extra_args,
3082
- force_build=force_build,
3083
- )
3069
+ # TODO: remove filter once overwrite_build_params default is changed to True in 1.8.0
3070
+ with warnings.catch_warnings():
3071
+ warnings.simplefilter(
3072
+ "ignore", category=mlrun.utils.OverwriteBuildParamsWarning
3073
+ )
3074
+
3075
+ self.build_config(
3076
+ image=image,
3077
+ set_as_default=set_as_default,
3078
+ base_image=base_image,
3079
+ commands=commands,
3080
+ secret_name=secret_name,
3081
+ with_mlrun=with_mlrun,
3082
+ requirements=requirements,
3083
+ requirements_file=requirements_file,
3084
+ overwrite_build_params=overwrite_build_params,
3085
+ )
3086
+
3087
+ function = mlrun.new_function("mlrun--project--image--builder", kind="job")
3088
+
3089
+ build = self.spec.build
3090
+ result = self.build_function(
3091
+ function=function,
3092
+ with_mlrun=build.with_mlrun,
3093
+ image=build.image,
3094
+ base_image=build.base_image,
3095
+ commands=build.commands,
3096
+ secret_name=build.secret,
3097
+ requirements=build.requirements,
3098
+ overwrite_build_params=overwrite_build_params,
3099
+ mlrun_version_specifier=mlrun_version_specifier,
3100
+ builder_env=builder_env,
3101
+ extra_args=extra_args,
3102
+ force_build=force_build,
3103
+ )
3084
3104
 
3085
3105
  try:
3086
3106
  mlrun.db.get_run_db(secrets=self._secrets).delete_function(
@@ -13,6 +13,7 @@
13
13
  # limitations under the License.
14
14
 
15
15
  import os
16
+ from ast import FunctionDef, parse, unparse
16
17
  from base64 import b64decode, b64encode
17
18
  from typing import Callable, Dict, List, Optional, Union
18
19
 
@@ -22,61 +23,92 @@ from mlrun.model import HyperParamOptions, RunObject
22
23
  from mlrun.runtimes.kubejob import KubejobRuntime
23
24
 
24
25
 
26
+ def get_log_artifacts_code(runobj: RunObject, task_parameters: dict):
27
+ artifact_json_dir = task_parameters.get(
28
+ "artifact_json_dir",
29
+ mlrun.mlconf.function.databricks.artifact_directory_path,
30
+ )
31
+ artifact_json_path = (
32
+ f"{artifact_json_dir}/mlrun_artifact_{runobj.metadata.uid}.json"
33
+ )
34
+ return (
35
+ log_artifacts_code_template.format(f"/dbfs{artifact_json_path}"),
36
+ artifact_json_path,
37
+ )
38
+
39
+
40
+ def replace_log_artifact_function(code: str, log_artifacts_code: str):
41
+ # user can use a dummy function in oder to avoid edit his code.
42
+ # replace mlrun_log_artifact function if already exist.
43
+ is_replaced = False
44
+ parsed_code = parse(code)
45
+ for node in parsed_code.body:
46
+ if isinstance(node, FunctionDef) and node.name == "mlrun_log_artifact":
47
+ new_function_ast = parse(log_artifacts_code)
48
+ node.args = new_function_ast.body[0].args
49
+ node.body = new_function_ast.body[0].body
50
+ is_replaced = True
51
+ break
52
+ return unparse(parsed_code), is_replaced
53
+
54
+
25
55
  class DatabricksRuntime(KubejobRuntime):
26
56
  kind = "databricks"
27
57
  _is_remote = True
28
58
 
29
- def _get_log_artifacts_code(self, runobj: RunObject, task_parameters: dict):
30
- artifact_json_dir = task_parameters.get(
31
- "artifact_json_dir",
32
- mlrun.mlconf.function.databricks.artifact_directory_path,
33
- )
34
- artifact_json_path = (
35
- f"{artifact_json_dir}/mlrun_artifact_{runobj.metadata.uid}.json"
59
+ @staticmethod
60
+ def _verify_returns(returns):
61
+ # TODO complete returns feature
62
+ if returns:
63
+ raise MLRunInvalidArgumentError(
64
+ "Databricks function does not support returns."
65
+ )
66
+
67
+ def _get_modified_user_code(self, original_handler: str, log_artifacts_code: str):
68
+ encoded_code = (
69
+ self.spec.build.functionSourceCode if hasattr(self.spec, "build") else None
36
70
  )
37
- return (
38
- artifacts_code_template.format(f"/dbfs{artifact_json_path}"),
39
- artifact_json_path,
71
+ if not encoded_code:
72
+ raise ValueError("Databricks function must be provided with user code")
73
+
74
+ decoded_code = b64decode(encoded_code).decode("utf-8")
75
+ decoded_code, is_replaced = replace_log_artifact_function(
76
+ code=decoded_code, log_artifacts_code=log_artifacts_code
40
77
  )
78
+ if is_replaced:
79
+ decoded_code = (
80
+ logger_and_consts_code + _databricks_script_code + decoded_code
81
+ )
82
+ else:
83
+ decoded_code = (
84
+ logger_and_consts_code
85
+ + log_artifacts_code
86
+ + _databricks_script_code
87
+ + decoded_code
88
+ )
89
+ if original_handler:
90
+ decoded_code += f"\nresult = {original_handler}(**handler_arguments)\n"
91
+ decoded_code += _return_artifacts_code
92
+ return b64encode(decoded_code.encode("utf-8")).decode("utf-8")
41
93
 
42
94
  def get_internal_parameters(self, runobj: RunObject):
43
95
  """
44
- Return the internal function code.
96
+ Return the internal function parameters + code.
45
97
  """
46
98
  task_parameters = runobj.spec.parameters.get("task_parameters", {})
47
99
  if "original_handler" in task_parameters:
48
100
  original_handler = task_parameters["original_handler"]
49
101
  else:
50
102
  original_handler = runobj.spec.handler or ""
51
- encoded_code = (
52
- self.spec.build.functionSourceCode if hasattr(self.spec, "build") else None
53
- )
54
- if not encoded_code:
55
- raise ValueError("Databricks function must be provided with user code")
56
- decoded_code = b64decode(encoded_code).decode("utf-8")
57
- artifacts_code, artifact_json_path = self._get_log_artifacts_code(
103
+ log_artifacts_code, artifact_json_path = get_log_artifacts_code(
58
104
  runobj=runobj, task_parameters=task_parameters
59
105
  )
60
- code = artifacts_code + _databricks_script_code + decoded_code
61
- if original_handler:
62
- code += f"\nresult = {original_handler}(**handler_arguments)\n"
63
- code += """\n
64
- default_key_template = 'mlrun_return_value_'
65
- if result:
66
- if isinstance(result, dict):
67
- for key, path in result.items():
68
- mlrun_log_artifact(name=key, path=path)
69
- elif isinstance(result, (list, tuple, set)):
70
- for index, value in enumerate(result):
71
- key = f'{default_key_template}{index+1}'
72
- mlrun_log_artifact(name=key, path=value)
73
- elif isinstance(result, str):
74
- mlrun_log_artifact(name=f'{default_key_template}1', path=result)
75
- else:
76
- mlrun_logger.warning(f'cannot log artifacts with the result of handler function \
77
- - result in unsupported type. {type(result)}')
78
- """
79
- code = b64encode(code.encode("utf-8")).decode("utf-8")
106
+ returns = runobj.spec.returns or []
107
+ self._verify_returns(returns=returns)
108
+ code = self._get_modified_user_code(
109
+ original_handler=original_handler,
110
+ log_artifacts_code=log_artifacts_code,
111
+ )
80
112
  updated_task_parameters = {
81
113
  "original_handler": original_handler,
82
114
  "artifact_json_path": artifact_json_path,
@@ -159,6 +191,22 @@ if result:
159
191
  )
160
192
 
161
193
 
194
+ logger_and_consts_code = """ \n
195
+ import os
196
+ import logging
197
+ mlrun_logger = logging.getLogger('mlrun_logger')
198
+ mlrun_logger.setLevel(logging.DEBUG)
199
+
200
+ mlrun_console_handler = logging.StreamHandler()
201
+ mlrun_console_handler.setLevel(logging.DEBUG)
202
+ mlrun_formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
203
+ mlrun_console_handler.setFormatter(mlrun_formatter)
204
+ mlrun_logger.addHandler(mlrun_console_handler)
205
+
206
+ mlrun_default_artifact_template = 'mlrun_return_value_'
207
+ mlrun_artifact_index = 0
208
+ """
209
+
162
210
  _databricks_script_code = """
163
211
 
164
212
  import argparse
@@ -168,16 +216,17 @@ parser.add_argument('handler_arguments')
168
216
  handler_arguments = parser.parse_args().handler_arguments
169
217
  handler_arguments = json.loads(handler_arguments)
170
218
 
171
- """
172
219
 
173
- artifacts_code_template = """\n
174
- import logging
175
- mlrun_logger = logging.getLogger('mlrun_logger')
176
- mlrun_logger.setLevel(logging.DEBUG)
220
+ """
177
221
 
178
- def mlrun_log_artifact(name, path):
179
- if not name or not path:
180
- mlrun_logger.error(f'name and path required for logging an mlrun artifact - {{name}} : {{path}}')
222
+ log_artifacts_code_template = """\n
223
+ def mlrun_log_artifact(name='', path=''):
224
+ global mlrun_artifact_index
225
+ mlrun_artifact_index+=1 # by how many artifacts we tried to log, not how many succeed.
226
+ if name is None or name == '':
227
+ name = f'{{mlrun_default_artifact_template}}{{mlrun_artifact_index}}'
228
+ if not path:
229
+ mlrun_logger.error(f'path required for logging an mlrun artifact - {{name}} : {{path}}')
181
230
  return
182
231
  if not isinstance(name, str) or not isinstance(path, str):
183
232
  mlrun_logger.error(f'name and path must be in string type for logging an mlrun artifact - {{name}} : {{path}}')
@@ -186,21 +235,38 @@ def mlrun_log_artifact(name, path):
186
235
  mlrun_logger.error(f'path for an mlrun artifact must start with /dbfs or dbfs:/ - {{name}} : {{path}}')
187
236
  return
188
237
  mlrun_artifacts_path = '{}'
189
- import json
190
- import os
191
- new_data = {{name:path}}
192
- if os.path.exists(mlrun_artifacts_path):
193
- with open(mlrun_artifacts_path, 'r+') as json_file:
194
- existing_data = json.load(json_file)
195
- existing_data.update(new_data)
196
- json_file.seek(0)
197
- json.dump(existing_data, json_file)
198
- else:
199
- parent_dir = os.path.dirname(mlrun_artifacts_path)
200
- if parent_dir != '/dbfs':
201
- os.makedirs(parent_dir, exist_ok=True)
202
- with open(mlrun_artifacts_path, 'w') as json_file:
203
- json.dump(new_data, json_file)
204
- mlrun_logger.info(f'successfully wrote artifact details to the artifact JSON file in DBFS - {{name}} : {{path}}')
238
+ try:
239
+ new_data = {{name:path}}
240
+ if os.path.exists(mlrun_artifacts_path):
241
+ with open(mlrun_artifacts_path, 'r+') as json_file:
242
+ existing_data = json.load(json_file)
243
+ existing_data.update(new_data)
244
+ json_file.seek(0)
245
+ json.dump(existing_data, json_file)
246
+ else:
247
+ parent_dir = os.path.dirname(mlrun_artifacts_path)
248
+ if parent_dir != '/dbfs':
249
+ os.makedirs(parent_dir, exist_ok=True)
250
+ with open(mlrun_artifacts_path, 'w') as json_file:
251
+ json.dump(new_data, json_file)
252
+ success_log = f'successfully wrote artifact details to the artifact JSON file in DBFS - {{name}} : {{path}}'
253
+ mlrun_logger.info(success_log)
254
+ except Exception as unknown_exception:
255
+ mlrun_logger.error(f'log mlrun artifact failed - {{name}} : {{path}}. error: {{unknown_exception}}')
205
256
  \n
206
257
  """
258
+
259
+ _return_artifacts_code = """\n
260
+ if result:
261
+ if isinstance(result, dict):
262
+ for key, path in result.items():
263
+ mlrun_log_artifact(name=key, path=path)
264
+ elif isinstance(result, (list, tuple, set)):
265
+ for artifact_path in result:
266
+ mlrun_log_artifact(path=artifact_path)
267
+ elif isinstance(result, str):
268
+ mlrun_log_artifact(path=result)
269
+ else:
270
+ mlrun_logger.warning(f'can not log artifacts with the result of handler function \
271
+ - result in unsupported type. {type(result)}')
272
+ """
@@ -200,7 +200,6 @@ def run_mlrun_databricks_job(
200
200
  is_finished=True,
201
201
  )
202
202
  run_output = workspace.jobs.get_run_output(get_task(run).run_id)
203
- context.log_result("databricks_runtime_task", run_output.as_dict())
204
203
  finally:
205
204
  workspace.dbfs.delete(script_path_on_dbfs)
206
205
  workspace.dbfs.delete(artifact_json_path)
@@ -550,6 +550,13 @@ class RemoteRuntime(KubeResource):
550
550
  """
551
551
  # todo: verify that the function name is normalized
552
552
 
553
+ old_http_session = getattr(self, "_http_session", None)
554
+ if old_http_session:
555
+ # ensure existing http session is terminated prior to (re)deploy to ensure that a connection to an old
556
+ # replica will not be reused
557
+ old_http_session.close()
558
+ self._http_session = None
559
+
553
560
  verbose = verbose or self.verbose
554
561
  if verbose:
555
562
  self.set_env("MLRUN_LOG_LEVEL", "DEBUG")
@@ -939,7 +946,7 @@ class RemoteRuntime(KubeResource):
939
946
  http_client_kwargs["json"] = body
940
947
  try:
941
948
  logger.info("invoking function", method=method, path=path)
942
- if not hasattr(self, "_http_session"):
949
+ if not getattr(self, "_http_session", None):
943
950
  self._http_session = requests.Session()
944
951
  resp = self._http_session.request(
945
952
  method, path, headers=headers, **http_client_kwargs
mlrun/runtimes/kubejob.py CHANGED
@@ -13,6 +13,7 @@
13
13
  # limitations under the License.
14
14
 
15
15
  import time
16
+ import warnings
16
17
 
17
18
  import mlrun.common.schemas
18
19
  import mlrun.db
@@ -130,7 +131,12 @@ class KubejobRuntime(KubeResource):
130
131
  :param builder_env: Kaniko builder pod env vars dict (for config/credentials)
131
132
  e.g. builder_env={"GIT_TOKEN": token}
132
133
  """
133
-
134
+ if not overwrite:
135
+ # TODO: change overwrite default to True in 1.8.0
136
+ warnings.warn(
137
+ "The `overwrite` parameter default will change from 'False' to 'True in 1.8.0.",
138
+ mlrun.utils.OverwriteBuildParamsWarning,
139
+ )
134
140
  image = mlrun.utils.helpers.remove_image_protocol_prefix(image)
135
141
  self.spec.build.build_config(
136
142
  image=image,
mlrun/serving/server.py CHANGED
@@ -357,7 +357,7 @@ def v2_serving_init(context, namespace=None):
357
357
  if hasattr(context, "platform") and hasattr(
358
358
  context.platform, "set_termination_callback"
359
359
  ):
360
- context.logger.debug(
360
+ context.logger.info(
361
361
  "Setting termination callback to terminate graph on worker shutdown"
362
362
  )
363
363
 
@@ -368,6 +368,23 @@ def v2_serving_init(context, namespace=None):
368
368
 
369
369
  context.platform.set_termination_callback(termination_callback)
370
370
 
371
+ if hasattr(context, "platform") and hasattr(context.platform, "set_drain_callback"):
372
+ context.logger.info(
373
+ "Setting drain callback to terminate and restart the graph on a drain event (such as rebalancing)"
374
+ )
375
+
376
+ def drain_callback():
377
+ context.logger.info("Drain callback called")
378
+ server.wait_for_completion()
379
+ context.logger.info(
380
+ "Termination of async flow is completed. Rerunning async flow."
381
+ )
382
+ # Rerun the flow without reconstructing it
383
+ server.graph._run_async_flow()
384
+ context.logger.info("Async flow restarted")
385
+
386
+ context.platform.set_drain_callback(drain_callback)
387
+
371
388
 
372
389
  def v2_serving_handler(context, event, get_body=False):
373
390
  """hook for nuclio handler()"""
mlrun/serving/states.py CHANGED
@@ -921,6 +921,7 @@ class FlowStep(BaseStep):
921
921
 
922
922
  if self.engine != "sync":
923
923
  self._build_async_flow()
924
+ self._run_async_flow()
924
925
 
925
926
  def check_and_process_graph(self, allow_empty=False):
926
927
  """validate correct graph layout and initialize the .next links"""
@@ -1075,7 +1076,10 @@ class FlowStep(BaseStep):
1075
1076
  if next_state.async_object and error_step.async_object:
1076
1077
  error_step.async_object.to(next_state.async_object)
1077
1078
 
1078
- self._controller = source.run()
1079
+ self._async_flow = source
1080
+
1081
+ def _run_async_flow(self):
1082
+ self._controller = self._async_flow.run()
1079
1083
 
1080
1084
  def get_queue_links(self):
1081
1085
  """return dict of function and queue its listening on, for building stream triggers"""
mlrun/utils/helpers.py CHANGED
@@ -65,6 +65,10 @@ DEFAULT_TIME_PARTITIONS = ["year", "month", "day", "hour"]
65
65
  DEFAULT_TIME_PARTITIONING_GRANULARITY = "hour"
66
66
 
67
67
 
68
+ class OverwriteBuildParamsWarning(FutureWarning):
69
+ pass
70
+
71
+
68
72
  # TODO: remove in 1.7.0
69
73
  @deprecated(
70
74
  version="1.5.0",
@@ -183,7 +187,7 @@ def verify_field_regex(
183
187
  if mode == mlrun.common.schemas.RegexMatchModes.all:
184
188
  if raise_on_failure:
185
189
  raise mlrun.errors.MLRunInvalidArgumentError(
186
- f"Field '{field_name}' is malformed. {field_value} does not match required pattern: {pattern}"
190
+ f"Field '{field_name}' is malformed. '{field_value}' does not match required pattern: {pattern}"
187
191
  )
188
192
  return False
189
193
  elif mode == mlrun.common.schemas.RegexMatchModes.any:
@@ -193,7 +197,7 @@ def verify_field_regex(
193
197
  elif mode == mlrun.common.schemas.RegexMatchModes.any:
194
198
  if raise_on_failure:
195
199
  raise mlrun.errors.MLRunInvalidArgumentError(
196
- f"Field '{field_name}' is malformed. {field_value} does not match any of the"
200
+ f"Field '{field_name}' is malformed. '{field_value}' does not match any of the"
197
201
  f" required patterns: {patterns}"
198
202
  )
199
203
  return False
@@ -333,7 +337,7 @@ def remove_image_protocol_prefix(image: str) -> str:
333
337
  def verify_field_of_type(field_name: str, field_value, expected_type: type):
334
338
  if not isinstance(field_value, expected_type):
335
339
  raise mlrun.errors.MLRunInvalidArgumentError(
336
- f"Field '{field_name}' should be of type {expected_type.__name__} "
340
+ f"Field '{field_name}' should be of type '{expected_type.__name__}' "
337
341
  f"(got: {type(field_value).__name__} with value: {field_value})."
338
342
  )
339
343
 
@@ -357,14 +361,14 @@ def verify_dict_items_type(
357
361
  if dictionary:
358
362
  if type(dictionary) != dict:
359
363
  raise mlrun.errors.MLRunInvalidArgumentTypeError(
360
- f"{name} expected to be of type dict, got type : {type(dictionary)}"
364
+ f"'{name}' expected to be of type dict, got type: {type(dictionary)}"
361
365
  )
362
366
  try:
363
367
  verify_list_items_type(dictionary.keys(), expected_keys_types)
364
368
  verify_list_items_type(dictionary.values(), expected_values_types)
365
369
  except mlrun.errors.MLRunInvalidArgumentTypeError as exc:
366
370
  raise mlrun.errors.MLRunInvalidArgumentTypeError(
367
- f"{name} should be of type Dict[{get_pretty_types_names(expected_keys_types)},"
371
+ f"'{name}' should be of type Dict[{get_pretty_types_names(expected_keys_types)},"
368
372
  f"{get_pretty_types_names(expected_values_types)}]."
369
373
  ) from exc
370
374
 
@@ -407,7 +411,7 @@ def normalize_name(name: str, verbose: bool = True):
407
411
  if verbose:
408
412
  warnings.warn(
409
413
  "Names with underscore '_' are about to be deprecated, use dashes '-' instead. "
410
- f"Replacing {name} underscores with dashes.",
414
+ f"Replacing '{name}' underscores with dashes.",
411
415
  FutureWarning,
412
416
  )
413
417
  name = name.replace("_", "-")
@@ -669,7 +673,7 @@ def parse_artifact_uri(uri, default_project=""):
669
673
  iteration = int(iteration)
670
674
  except ValueError:
671
675
  raise ValueError(
672
- f"illegal store path {uri}, iteration must be integer value"
676
+ f"illegal store path '{uri}', iteration must be integer value"
673
677
  )
674
678
  return (
675
679
  group_dict["project"] or default_project,
@@ -1199,7 +1203,7 @@ def get_function(function, namespace):
1199
1203
  function_object = create_function(function)
1200
1204
  except (ImportError, ValueError) as exc:
1201
1205
  raise ImportError(
1202
- f"state/function init failed, handler {function} not found"
1206
+ f"state/function init failed, handler '{function}' not found"
1203
1207
  ) from exc
1204
1208
  return function_object
1205
1209
 
@@ -1383,7 +1387,7 @@ def get_in_artifact(artifact: dict, key, default=None, raise_on_missing=False):
1383
1387
 
1384
1388
  if raise_on_missing:
1385
1389
  raise mlrun.errors.MLRunInvalidArgumentError(
1386
- f"artifact {artifact} is missing metadata/spec/status"
1390
+ f"artifact '{artifact}' is missing metadata/spec/status"
1387
1391
  )
1388
1392
  return default
1389
1393
 
@@ -1419,7 +1423,7 @@ def is_running_in_jupyter_notebook() -> bool:
1419
1423
 
1420
1424
  def as_number(field_name, field_value):
1421
1425
  if isinstance(field_value, str) and not field_value.isnumeric():
1422
- raise ValueError(f"{field_name} must be numeric (str/int types)")
1426
+ raise ValueError(f"'{field_name}' must be numeric (str/int types)")
1423
1427
  return int(field_value)
1424
1428
 
1425
1429
 
@@ -1,4 +1,4 @@
1
1
  {
2
- "git_commit": "4196c53355a2275531e389ac20dfb2aff559137e",
3
- "version": "1.6.0-rc12"
2
+ "git_commit": "abca3cad961e987ec75d8ecfcdf0b6856dc9b7d1",
3
+ "version": "1.6.0-rc13"
4
4
  }
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: mlrun
3
- Version: 1.6.0rc12
3
+ Version: 1.6.0rc13
4
4
  Summary: Tracking and config of machine learning runs
5
5
  Home-page: https://github.com/mlrun/mlrun
6
6
  Author: Yaron Haviv
@@ -65,7 +65,7 @@ Requires-Dist: boto3 <1.29.0,>=1.28.0 ; extra == 'all'
65
65
  Requires-Dist: dask ~=2023.9.0 ; extra == 'all'
66
66
  Requires-Dist: databricks-sdk ~=0.3.0 ; extra == 'all'
67
67
  Requires-Dist: distributed ~=2023.9.0 ; extra == 'all'
68
- Requires-Dist: gcsfs ==2023.9.0 ; extra == 'all'
68
+ Requires-Dist: gcsfs ==2023.9.1 ; extra == 'all'
69
69
  Requires-Dist: google-cloud-bigquery[bqstorage,pandas] ~=3.2 ; extra == 'all'
70
70
  Requires-Dist: google-cloud-storage ~=1.20 ; extra == 'all'
71
71
  Requires-Dist: google-cloud ~=0.34 ; extra == 'all'
@@ -76,7 +76,7 @@ Requires-Dist: msrest ~=0.6.21 ; extra == 'all'
76
76
  Requires-Dist: plotly <5.12.0,~=5.4 ; extra == 'all'
77
77
  Requires-Dist: pyopenssl >=23 ; extra == 'all'
78
78
  Requires-Dist: redis ~=4.3 ; extra == 'all'
79
- Requires-Dist: s3fs ==2023.9.0 ; extra == 'all'
79
+ Requires-Dist: s3fs ==2023.9.1 ; extra == 'all'
80
80
  Requires-Dist: sqlalchemy ~=1.4 ; extra == 'all'
81
81
  Provides-Extra: api
82
82
  Requires-Dist: uvicorn ~=0.23.2 ; extra == 'api'
@@ -115,7 +115,7 @@ Requires-Dist: boto3 <1.29.0,>=1.28.0 ; extra == 'complete'
115
115
  Requires-Dist: dask ~=2023.9.0 ; extra == 'complete'
116
116
  Requires-Dist: databricks-sdk ~=0.3.0 ; extra == 'complete'
117
117
  Requires-Dist: distributed ~=2023.9.0 ; extra == 'complete'
118
- Requires-Dist: gcsfs ==2023.9.0 ; extra == 'complete'
118
+ Requires-Dist: gcsfs ==2023.9.1 ; extra == 'complete'
119
119
  Requires-Dist: google-cloud-bigquery[bqstorage,pandas] ~=3.2 ; extra == 'complete'
120
120
  Requires-Dist: graphviz ~=0.20.0 ; extra == 'complete'
121
121
  Requires-Dist: kafka-python ~=2.0 ; extra == 'complete'
@@ -124,7 +124,7 @@ Requires-Dist: msrest ~=0.6.21 ; extra == 'complete'
124
124
  Requires-Dist: plotly <5.12.0,~=5.4 ; extra == 'complete'
125
125
  Requires-Dist: pyopenssl >=23 ; extra == 'complete'
126
126
  Requires-Dist: redis ~=4.3 ; extra == 'complete'
127
- Requires-Dist: s3fs ==2023.9.0 ; extra == 'complete'
127
+ Requires-Dist: s3fs ==2023.9.1 ; extra == 'complete'
128
128
  Requires-Dist: sqlalchemy ~=1.4 ; extra == 'complete'
129
129
  Provides-Extra: complete-api
130
130
  Requires-Dist: adlfs ==2023.9.0 ; extra == 'complete-api'
@@ -142,7 +142,7 @@ Requires-Dist: dask ~=2023.9.0 ; extra == 'complete-api'
142
142
  Requires-Dist: databricks-sdk ~=0.3.0 ; extra == 'complete-api'
143
143
  Requires-Dist: distributed ~=2023.9.0 ; extra == 'complete-api'
144
144
  Requires-Dist: fastapi ~=0.103.2 ; extra == 'complete-api'
145
- Requires-Dist: gcsfs ==2023.9.0 ; extra == 'complete-api'
145
+ Requires-Dist: gcsfs ==2023.9.1 ; extra == 'complete-api'
146
146
  Requires-Dist: google-cloud-bigquery[bqstorage,pandas] ~=3.2 ; extra == 'complete-api'
147
147
  Requires-Dist: graphviz ~=0.20.0 ; extra == 'complete-api'
148
148
  Requires-Dist: humanfriendly ~=9.2 ; extra == 'complete-api'
@@ -155,7 +155,7 @@ Requires-Dist: plotly <5.12.0,~=5.4 ; extra == 'complete-api'
155
155
  Requires-Dist: pymysql ~=1.0 ; extra == 'complete-api'
156
156
  Requires-Dist: pyopenssl >=23 ; extra == 'complete-api'
157
157
  Requires-Dist: redis ~=4.3 ; extra == 'complete-api'
158
- Requires-Dist: s3fs ==2023.9.0 ; extra == 'complete-api'
158
+ Requires-Dist: s3fs ==2023.9.1 ; extra == 'complete-api'
159
159
  Requires-Dist: sqlalchemy ~=1.4 ; extra == 'complete-api'
160
160
  Requires-Dist: sqlite3-to-mysql ~=1.4 ; extra == 'complete-api'
161
161
  Requires-Dist: timelength ~=1.1 ; extra == 'complete-api'
@@ -172,7 +172,7 @@ Requires-Dist: google-cloud ~=0.34 ; extra == 'google-cloud'
172
172
  Provides-Extra: google-cloud-bigquery
173
173
  Requires-Dist: google-cloud-bigquery[bqstorage,pandas] ~=3.2 ; extra == 'google-cloud-bigquery'
174
174
  Provides-Extra: google-cloud-storage
175
- Requires-Dist: gcsfs ==2023.9.0 ; extra == 'google-cloud-storage'
175
+ Requires-Dist: gcsfs ==2023.9.1 ; extra == 'google-cloud-storage'
176
176
  Provides-Extra: graphviz
177
177
  Requires-Dist: graphviz ~=0.20.0 ; extra == 'graphviz'
178
178
  Provides-Extra: kafka
@@ -187,12 +187,12 @@ Requires-Dist: redis ~=4.3 ; extra == 'redis'
187
187
  Provides-Extra: s3
188
188
  Requires-Dist: boto3 <1.29.0,>=1.28.0 ; extra == 's3'
189
189
  Requires-Dist: aiobotocore <2.8,>=2.5.0 ; extra == 's3'
190
- Requires-Dist: s3fs ==2023.9.0 ; extra == 's3'
190
+ Requires-Dist: s3fs ==2023.9.1 ; extra == 's3'
191
191
  Provides-Extra: sqlalchemy
192
192
  Requires-Dist: sqlalchemy ~=1.4 ; extra == 'sqlalchemy'
193
193
 
194
194
  <a id="top"></a>
195
- [![Build Status](https://github.com/mlrun/mlrun/workflows/CI/badge.svg)](https://github.com/mlrun/mlrun/actions)
195
+ [![Build Status](https://github.com/mlrun/mlrun/actions/workflows/build.yaml/badge.svg?branch=development)](https://github.com/mlrun/mlrun/actions/workflows/build.yaml?query=branch%3Adevelopment)
196
196
  [![License](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](https://opensource.org/licenses/Apache-2.0)
197
197
  [![PyPI version fury.io](https://badge.fury.io/py/mlrun.svg)](https://pypi.python.org/pypi/mlrun/)
198
198
  [![Documentation](https://readthedocs.org/projects/mlrun/badge/?version=latest)](https://mlrun.readthedocs.io/en/latest/?badge=latest)
@@ -208,7 +208,7 @@ Requires-Dist: sqlalchemy ~=1.4 ; extra == 'sqlalchemy'
208
208
  MLRun is an open MLOps platform for quickly building and managing continuous ML applications across their lifecycle. MLRun integrates into your development and CI/CD environment and automates the delivery of production data, ML pipelines, and online applications, significantly reducing engineering efforts, time to production, and computation resources.
209
209
  With MLRun, you can choose any IDE on your local machine or on the cloud. MLRun breaks the silos between data, ML, software, and DevOps/MLOps teams, enabling collaboration and fast continuous improvements.
210
210
 
211
- Get started with MLRun [**Tutorials and Examples**](https://docs.mlrun.org/en/latest/tutorial/index.html), [**Installation and setup guide**](https://docs.mlrun.org/en/latest/install.html), or read about [**MLRun Architecture**](https://docs.mlrun.org/en/latest/architecture.html).
211
+ Get started with MLRun [**Tutorials and Examples**](https://docs.mlrun.org/en/latest/tutorials/index.html), [**Installation and setup guide**](https://docs.mlrun.org/en/latest/install.html), or read about [**MLRun Architecture**](https://docs.mlrun.org/en/latest/architecture.html).
212
212
 
213
213
  This page explains how MLRun addresses the [**MLOps Tasks**](#mlops-tasks) and the [**MLRun core components**](#core-components).
214
214
 
@@ -226,32 +226,32 @@ In MLRun the assets, metadata, and services (data, functions, jobs, artifacts, m
226
226
  Projects can be imported/exported as a whole, mapped to git repositories or IDE projects (in PyCharm, VSCode, etc.), which enables versioning, collaboration, and CI/CD.
227
227
  Project access can be restricted to a set of users and roles.
228
228
 
229
- See: **Docs:** [Projects and Automation](https://docs.mlrun.org/en/latest/projects/project.html), [CI/CD Integration](https://docs.mlrun.org/en/latest/projects/ci-integration.html), **Tutorials:** [Quick start](https://docs.mlrun.org/en/latest/tutorial/01-mlrun-basics.html), [Automated ML Pipeline](https://docs.mlrun.org/en/latest/tutorial/04-pipeline.html), **Video:** [quick start](https://youtu.be/xI8KVGLlj7Q).
229
+ See: **Docs:** [Projects and Automation](https://docs.mlrun.org/en/latest/projects/project.html), [CI/CD Integration](https://docs.mlrun.org/en/latest/projects/ci-integration.html), **Tutorials:** [Quick start](https://docs.mlrun.org/en/latest/tutorials/01-mlrun-basics.html), [Automated ML Pipeline](https://docs.mlrun.org/en/latest/tutorials/04-pipeline.html), **Video:** [quick start](https://youtu.be/xI8KVGLlj7Q).
230
230
 
231
231
  ### Ingest and process data
232
232
 
233
233
  MLRun provides abstract interfaces to various offline and online [**data sources**](https://docs.mlrun.org/en/latest/concepts/data-feature-store.html), supports batch or realtime data processing at scale, data lineage and versioning, structured and unstructured data, and more.
234
234
  In addition, the MLRun [**Feature Store**](https://docs.mlrun.org/en/latest/feature-store/feature-store.html) automates the collection, transformation, storage, catalog, serving, and monitoring of data features across the ML lifecycle and enables feature reuse and sharing.
235
235
 
236
- See: **Docs:** [Ingest and process data](https://docs.mlrun.org/en/latest/data-prep/index.html), [Feature Store](https://docs.mlrun.org/en/latest/feature-store/feature-store.html), [Data & Artifacts](https://docs.mlrun.org/en/latest/concepts/data-feature-store.html); **Tutorials:** [Quick start](https://docs.mlrun.org/en/latest/tutorial/01-mlrun-basics.html), [Feature Store](https://docs.mlrun.org/en/latest/feature-store/basic-demo.html).
236
+ See: **Docs:** [Ingest and process data](https://docs.mlrun.org/en/latest/data-prep/index.html), [Feature Store](https://docs.mlrun.org/en/latest/feature-store/feature-store.html), [Data & Artifacts](https://docs.mlrun.org/en/latest/concepts/data-feature-store.html); **Tutorials:** [Quick start](https://docs.mlrun.org/en/latest/tutorials/01-mlrun-basics.html), [Feature Store](https://docs.mlrun.org/en/latest/feature-store/basic-demo.html).
237
237
 
238
238
  ### Develop and train models
239
239
 
240
240
  MLRun allows you to easily build ML pipelines that take data from various sources or the Feature Store and process it, train models at scale with multiple parameters, test models, tracks each experiments, register, version and deploy models, etc. MLRun provides scalable built-in or custom model training services, integrate with any framework and can work with 3rd party training/auto-ML services. You can also bring your own pre-trained model and use it in the pipeline.
241
241
 
242
- See: **Docs:** [Develop and train models](https://docs.mlrun.org/en/latest/development/index.html), [Model Training and Tracking](https://docs.mlrun.org/en/latest/development/model-training-tracking.html), [Batch Runs and Workflows](https://docs.mlrun.org/en/latest/concepts/runs-workflows.html); **Tutorials:** [Train & Eval Models](https://docs.mlrun.org/en/latest/tutorial/02-model-training.html), [Automated ML Pipeline](https://docs.mlrun.org/en/latest/tutorial/04-pipeline.html); **Video:** [Training models](https://youtu.be/bZgBsmLMdQo).
242
+ See: **Docs:** [Develop and train models](https://docs.mlrun.org/en/latest/development/index.html), [Model Training and Tracking](https://docs.mlrun.org/en/latest/development/model-training-tracking.html), [Batch Runs and Workflows](https://docs.mlrun.org/en/latest/concepts/runs-workflows.html); **Tutorials:** [Train & Eval Models](https://docs.mlrun.org/en/latest/tutorials/02-model-training.html), [Automated ML Pipeline](https://docs.mlrun.org/en/latest/tutorials/04-pipeline.html); **Video:** [Training models](https://youtu.be/bZgBsmLMdQo).
243
243
 
244
244
  ### Deploy models and applications
245
245
 
246
246
  MLRun rapidly deploys and manages production-grade real-time or batch application pipelines using elastic and resilient serverless functions. MLRun addresses the entire ML application: intercepting application/user requests, running data processing tasks, inferencing using one or more models, driving actions, and integrating with the application logic.
247
247
 
248
- See: **Docs:** [Deploy models and applications](https://docs.mlrun.org/en/latest/deployment/index.html), [Realtime Pipelines](https://docs.mlrun.org/en/latest/serving/serving-graph.html), [Batch Inference](https://docs.mlrun.org/en/latest/concepts/TBD.html), **Tutorials:** [Realtime Serving](https://docs.mlrun.org/en/latest/tutorial/03-model-serving.html), [Batch Inference](https://docs.mlrun.org/en/latest/tutorial/07-batch-infer.html), [Advanced Pipeline](https://docs.mlrun.org/en/latest/tutorial/07-batch-infer.html); **Video:** [Serving models](https://youtu.be/OUjOus4dZfw).
248
+ See: **Docs:** [Deploy models and applications](https://docs.mlrun.org/en/latest/deployment/index.html), [Realtime Pipelines](https://docs.mlrun.org/en/latest/serving/serving-graph.html), [Batch Inference](https://docs.mlrun.org/en/latest/concepts/TBD.html), **Tutorials:** [Realtime Serving](https://docs.mlrun.org/en/latest/tutorials/03-model-serving.html), [Batch Inference](https://docs.mlrun.org/en/latest/tutorials/07-batch-infer.html), [Advanced Pipeline](https://docs.mlrun.org/en/latest/tutorials/07-batch-infer.html); **Video:** [Serving models](https://youtu.be/OUjOus4dZfw).
249
249
 
250
250
  ### Monitor and alert
251
251
 
252
252
  Observability is built into the different MLRun objects (data, functions, jobs, models, pipelines, etc.), eliminating the need for complex integrations and code instrumentation. With MLRun, you can observe the application/model resource usage and model behavior (drift, performance, etc.), define custom app metrics, and trigger alerts or retraining jobs.
253
253
 
254
- See: **Docs:** [Monitor and alert](https://docs.mlrun.org/en/latest/monitoring/index.html), [Model Monitoring Overview](https://docs.mlrun.org/en/latest/monitoring/model-monitoring-deployment.html), **Tutorials:** [Model Monitoring & Drift Detection](https://docs.mlrun.org/en/latest/tutorial/05-model-monitoring.html).
254
+ See: **Docs:** [Monitor and alert](https://docs.mlrun.org/en/latest/monitoring/index.html), [Model Monitoring Overview](https://docs.mlrun.org/en/latest/monitoring/model-monitoring-deployment.html), **Tutorials:** [Model Monitoring & Drift Detection](https://docs.mlrun.org/en/latest/tutorials/05-model-monitoring.html).
255
255
 
256
256
 
257
257
  <a id="core-components"></a>
@@ -1,6 +1,6 @@
1
1
  mlrun/__init__.py,sha256=o9dHUfVFADfsi6GnOPLr2OkfkHdPvOnA7rkoECen0-I,7248
2
2
  mlrun/__main__.py,sha256=QdVcqPRIzc1_em8nXiO7PLkGZYvzqyQKSNoaN9Czk6Y,49066
3
- mlrun/config.py,sha256=-o8BrGcQolvpN_qjnRhA6r9wbGsa5A07BZHVMi_9-qI,60736
3
+ mlrun/config.py,sha256=aXeKVEIGbsPnwIP_CVWEFCuN4S4jWr9IYRbFTvf2ZtY,60736
4
4
  mlrun/errors.py,sha256=pYZZtJsMqkFktG6T8iTyfyESrrhOLfKvB3U_jUDHMo8,6780
5
5
  mlrun/execution.py,sha256=dWus2hBaT7qj5CwsVlmNSjFYopcWW1q9ceU3AoDNW80,39708
6
6
  mlrun/features.py,sha256=UQQ2uh5Xh9XsMGiYBqh3bKgDhOHANjv1gQgWyId9qQE,15624
@@ -77,7 +77,7 @@ mlrun/datastore/s3.py,sha256=9X5_uwzPwklDHUNZ3G4LkX1law4Z-weTDCTYz8PBplk,8064
77
77
  mlrun/datastore/sources.py,sha256=3P2Q7CUP9e76BqYLv5LlVGLCp6tUsys4fs-txgh5ac8,39493
78
78
  mlrun/datastore/spark_udf.py,sha256=NnnB3DZxZb-rqpRy7b-NC7QWXuuqFn3XkBDc86tU4mQ,1498
79
79
  mlrun/datastore/store_resources.py,sha256=SUY9oJieq3r8PEq8G661XxmXem_e-CxDoy2AJ7dpXBk,6906
80
- mlrun/datastore/targets.py,sha256=PTcYvYM8XYYHqkuqPCPvAo87uFxpZhFof5wck5-jEWM,69463
80
+ mlrun/datastore/targets.py,sha256=-sI5DTpPCuXTsayoyxu700iabaKFyg7u9UNubKPXxjw,69817
81
81
  mlrun/datastore/utils.py,sha256=vuuNFM1ystkvpOlvI8QJL5i8ZPKxvSO0wPv8s4W1raE,5768
82
82
  mlrun/datastore/v3io.py,sha256=ywd-rrB5Uicdk7KGMk-nJ4mKPjvg2z5w6TVx5Bo5jIo,8099
83
83
  mlrun/datastore/wasbfs/__init__.py,sha256=s5Ul-0kAhYqFjKDR2X0O2vDGDbLQQduElb32Ev56Te4,1343
@@ -95,10 +95,10 @@ mlrun/feature_store/feature_vector.py,sha256=hbEk5BzCEFxUyJPA3jefTlE95whD9c8a3f9
95
95
  mlrun/feature_store/ingestion.py,sha256=GZkrke5_JJfA_PGOFc6ekbHKujHgMgqr6t4vop5n_bg,11210
96
96
  mlrun/feature_store/steps.py,sha256=Xsvq6TGWraLeN_OC7T3ZTrJP8WhVNjDC12VSNACRePE,29118
97
97
  mlrun/feature_store/retrieval/__init__.py,sha256=bwA4copPpLQi8fyoUAYtOyrlw0-6f3-Knct8GbJSvRg,1282
98
- mlrun/feature_store/retrieval/base.py,sha256=06S465nMB4gOrVesnVZXN4M-01afcleX7U6sSozqoJk,31673
99
- mlrun/feature_store/retrieval/dask_merger.py,sha256=E_ziv7sARcnRmdNTCGV9vskRGDtPAAGsIc4PRTaL63s,5420
98
+ mlrun/feature_store/retrieval/base.py,sha256=e7n-z3LMYMD2J13oWqgvOthnhOMpq8_MIIMuW8ZBWKg,31724
99
+ mlrun/feature_store/retrieval/dask_merger.py,sha256=_AiEu0iRPi9nKt97EhlXqXCYfq4LgHsdpG7ZzpycReM,5491
100
100
  mlrun/feature_store/retrieval/job.py,sha256=udrpAf8Q1uNhGj3ATvoEV7M8hlZr3dW_P0T5MqKc1Dc,8262
101
- mlrun/feature_store/retrieval/local_merger.py,sha256=2BWCBm9lcyRfYiDRSJ42E9QjcUo7ux0URgxUHpGD-Fw,4359
101
+ mlrun/feature_store/retrieval/local_merger.py,sha256=jkzTml_umIU-d6isKFNPtmLZ5K9upqbJLWUde2DdiL8,4429
102
102
  mlrun/feature_store/retrieval/spark_merger.py,sha256=-SaIen7Jc9RAPs4TwQ0E5qEO2Yh_Kp-YVD8b902nXAw,11079
103
103
  mlrun/feature_store/retrieval/storey_merger.py,sha256=5YM0UPrLjGOobulHkowRO-1LuvFD2cm_0GxcpnTdu0I,6314
104
104
  mlrun/frameworks/__init__.py,sha256=qRHe_nUfxpoLaSASAkIxcW6IyunMtxq5LXhjzZMO_1E,743
@@ -192,14 +192,14 @@ mlrun/launcher/factory.py,sha256=tk6foFWox7f_xaeTgkWTx9ht_5fv0XzLDR8ucdb8oTE,234
192
192
  mlrun/launcher/local.py,sha256=iSPQjnAeBmIz-oWprDmQqD9TUkz85h9aaJIbybcT1R0,10930
193
193
  mlrun/launcher/remote.py,sha256=e7orva_ozrtmvEE-QihoFi8MKNCAHxzeUNQpqwQbEoQ,7007
194
194
  mlrun/model_monitoring/__init__.py,sha256=XaYyvWsIXpjJQ2gCPj8tFvfSbRSEEqgDtNz4tCE5H4g,915
195
- mlrun/model_monitoring/api.py,sha256=MyS6dmFv2w9X-kGEPV4Z-Sn2Sihx85hmSe8Yd-NRQgI,34624
195
+ mlrun/model_monitoring/api.py,sha256=7aB6EbLHeH8PMQYp8mZQfDCxZeaD391Kb-tUVhS9hSs,36674
196
196
  mlrun/model_monitoring/application.py,sha256=fgsSOLwWJWbiFV_hCOpYVTTOnZLIUDTQ0fHIeGvhMWw,12370
197
197
  mlrun/model_monitoring/batch.py,sha256=7Iq0LNbuG6yAzaZ3ut1qFMZTP2ODvGd57cufrP84wtg,43286
198
198
  mlrun/model_monitoring/controller.py,sha256=B5PNThwgjYVhjb1t1Xy2m_UIOykLF0CZHMw5mTsevew,26483
199
199
  mlrun/model_monitoring/controller_handler.py,sha256=Yk8urHYKSU_RUhO0B375VO5YWd41yvPW_SMFpO-C8vg,1095
200
200
  mlrun/model_monitoring/evidently_application.py,sha256=vlua7ikl736bFKwigCz3Qh4KV4CoWaqJ1ZVPbwCac90,3792
201
201
  mlrun/model_monitoring/features_drift_table.py,sha256=2r51W4xQ8gNq3PXt73IfsYu4l4mjwD-dLfRVAvKplTE,24209
202
- mlrun/model_monitoring/helpers.py,sha256=lJhH8SiRQg4jHo0HDC_89t2jghwPqOp1Xks1tFdQBj0,3269
202
+ mlrun/model_monitoring/helpers.py,sha256=fn5bwqX-29BqecHzJLJ3JvItyZUjYVZ89p8XYbnnFhk,5589
203
203
  mlrun/model_monitoring/model_endpoint.py,sha256=BBtxdY5ciormI_al4zshmIp0GN7hGhOCn-hLgpCXek0,3938
204
204
  mlrun/model_monitoring/prometheus.py,sha256=Z0UWmhQ-dpGGH31gCiGdfmhfj-RFRf1Tu1bYVe-k4jk,7605
205
205
  mlrun/model_monitoring/stream_processing.py,sha256=LqRMosAWF1ncKjP829zDTdpkx5pPcydv5QJp3RGQY_U,48755
@@ -234,18 +234,18 @@ mlrun/platforms/__init__.py,sha256=ArWn_iZiEE6qz7hvY_1RqMkFnHGuKjP3k5xYKnfKA58,2
234
234
  mlrun/platforms/iguazio.py,sha256=LU1d33ll5EKIyp2zitCffZIbq-3fRwNSNO9MK2cIsHc,21729
235
235
  mlrun/platforms/other.py,sha256=z4pWqxXkVVuMLk-MbNb0Y_ZR5pmIsUm0R8vHnqpEnew,11852
236
236
  mlrun/projects/__init__.py,sha256=Lv5rfxyXJrw6WGOWJKhBz66M6t3_zsNMCfUD6waPwx4,1153
237
- mlrun/projects/operations.py,sha256=ew_wGc6GVWjX1upsh7kBMZHtWyjKglxWPFQ0Tg9o5OI,18134
237
+ mlrun/projects/operations.py,sha256=Qn7V-ixdUDD_u21U1IwshKhAe6fTbXGruU3Ekx5-8ls,18744
238
238
  mlrun/projects/pipelines.py,sha256=qVlG5ZxdcXxdgtxeht-r6QHDUMw7DUkDPv-yuvui3Rk,39297
239
- mlrun/projects/project.py,sha256=2Y1dlDLY_HOqvJqBc_LRdgR9GvZrkoIBhFn3o3zxpJc,144948
239
+ mlrun/projects/project.py,sha256=vWv-tOFWjy6OSoG0n97BHo6eAVDLXPxugQY2KMANNNY,146079
240
240
  mlrun/runtimes/__init__.py,sha256=_93Hbsamu5NDC4HTuZl6UCOQN23dJQ7CwNbkT4GAMF8,7028
241
241
  mlrun/runtimes/base.py,sha256=056I8Oh1KX-qD5hFSVmiuB2bX0ZhOztohOkomMoJ9s8,35525
242
242
  mlrun/runtimes/constants.py,sha256=Y7ZETb5-sD1m6H0dqEHmPrtvhqrwYf-uYcKgMxHFYhw,8657
243
243
  mlrun/runtimes/daskjob.py,sha256=EoTnCeQgOupl90Tg6IrQjo8Fc2I2ZwdTFMq8HD9r8Qw,19100
244
244
  mlrun/runtimes/funcdoc.py,sha256=FHwnLfFzoD6yGlsAJXAl_3VTtudgg4fTrsw_XqLOkC0,10508
245
- mlrun/runtimes/function.py,sha256=VI57LNEiM0mYM5xLC16HiPZRB3pUB755ZutF-q1tZyk,47204
245
+ mlrun/runtimes/function.py,sha256=bA5-FojJXn91l8kQV6KhFzqIlwHgiaC812ne64vfsTU,47535
246
246
  mlrun/runtimes/function_reference.py,sha256=SJ0J-4ww0FQdijmdnUwGUKhMb-h5wtzqCPItTWKIL40,4911
247
247
  mlrun/runtimes/generators.py,sha256=v28HdNgxdHvj888G1dTnUeQZz-D9iTO0hoGeZbCdiuQ,7241
248
- mlrun/runtimes/kubejob.py,sha256=ojmFYLdECGZJbyAXMdee_U3VSkB2393AqyRIWAOrygM,12088
248
+ mlrun/runtimes/kubejob.py,sha256=BiSnLOvcm_YvpayADtxDtqyUmuYSwdAlVo6TlHfoqus,12386
249
249
  mlrun/runtimes/local.py,sha256=wF2WZ_NyG6OKdte05Hkr085AFxJr1COI4fJfsJeHbdI,21160
250
250
  mlrun/runtimes/nuclio.py,sha256=hwk4dUaZefI-Qbb4s289vQpt1h0nAucxf6eINzVI-d8,2908
251
251
  mlrun/runtimes/pod.py,sha256=TNM8fee1lSSVHrfig0aPQRV2Vp6ICH09DhFoA6nTO-U,56312
@@ -254,8 +254,8 @@ mlrun/runtimes/serving.py,sha256=IKAv5gHrk7GXkWaVW_tvF7uPtXcdPyQnETJRVj7wXEw,303
254
254
  mlrun/runtimes/utils.py,sha256=eEbWJLC7vl2vG6bRTxsKhvbRBJuPHsf4hfM5cGHcd08,15324
255
255
  mlrun/runtimes/databricks_job/__init__.py,sha256=kXGBqhLN0rlAx0kTXhozGzFsIdSqW0uTSKMmsLgq_is,569
256
256
  mlrun/runtimes/databricks_job/databricks_cancel_task.py,sha256=lYItecKYdWkvmuE0gCLqx2OkBfC6JFp4PE3dW6WXHsI,2249
257
- mlrun/runtimes/databricks_job/databricks_runtime.py,sha256=3FpmWoSsRRYwLJlWJ-aTCG3UOWtX3Mqi-UvhIgLgfC8,8307
258
- mlrun/runtimes/databricks_job/databricks_wrapper.py,sha256=dTnUp7GJxHSaeEfVMb7zxKLfyVrGLLXgnbYA3GXgc_8,8104
257
+ mlrun/runtimes/databricks_job/databricks_runtime.py,sha256=hbxhFqw68eRsdOu-NlxErwv55cLBE5j_m8J1Y8EfCk4,10732
258
+ mlrun/runtimes/databricks_job/databricks_wrapper.py,sha256=MuyenBc-SdV2zz2ukCroXiRzEABClD19eCdovgcSFOc,8028
259
259
  mlrun/runtimes/mpijob/__init__.py,sha256=jZf2uPBv6IB18Jj-dGSQ9NU5_xxni7XS4dnDZGwESFE,1583
260
260
  mlrun/runtimes/mpijob/abstract.py,sha256=B9opVnv3Hdm17rFITEemYLoh9ft8q-I4wql4g12jidg,9179
261
261
  mlrun/runtimes/mpijob/v1.py,sha256=_RUlFo_3NcFf7x-QpUNVm8f7qNbRDIdUmPf_ijrv54U,3206
@@ -266,9 +266,9 @@ mlrun/serving/__init__.py,sha256=_6HRAOuS2Ehjo3vwx5h1aI_-JppxEAsl4VfEERAbGFE,107
266
266
  mlrun/serving/merger.py,sha256=PXLn3A21FiLteJHaDSLm5xKNT-80eTTjfHUJnBX1gKY,6116
267
267
  mlrun/serving/remote.py,sha256=XtCgEY-azxcP0VUG1TupZXQ_dttPkAKIAtszW-GfGpQ,18038
268
268
  mlrun/serving/routers.py,sha256=jIZIUsWaktwwzD6--02Itlgb_CCCTxi620S-jw63WhM,55310
269
- mlrun/serving/server.py,sha256=p0EhIF7VUnQTDtRQ-mcT-mjhov1kVfP-wDVS0tokGhA,21054
269
+ mlrun/serving/server.py,sha256=-R_XP1NW2vG0V7F3nlPRGW0tobpL5tgZFTwcEhIZFbo,21764
270
270
  mlrun/serving/serving_wrapper.py,sha256=R670-S6PX_d5ER6jiHtRvacuPyFzQH0mEf2K0sBIIOM,836
271
- mlrun/serving/states.py,sha256=eh5Jq7BSKiE5QjKVNeRGI_EddnCE-DZowH_lik0_tiM,54712
271
+ mlrun/serving/states.py,sha256=Gq7-iiFpqP1o4W1SiPdYqeyITuvnSOtz7ju46mZr3k4,54823
272
272
  mlrun/serving/utils.py,sha256=MHu3RljqtuEu3KxYcbECOzfgQZbfvYK09Tg5IAdvFgg,3902
273
273
  mlrun/serving/v1_serving.py,sha256=m_W8ylcASCvdP66lZSD6yQjamYCUJ3VZdOEQY4F8ozk,11814
274
274
  mlrun/serving/v2_serving.py,sha256=fAMA52lBvpLFpUa8LKHjRAwBZp_sR0wUmmtrN1ChJC4,21823
@@ -283,7 +283,7 @@ mlrun/utils/azure_vault.py,sha256=VNs2fz0XlFrV5Ggz3T0mR7mOWHefEcC14wM7QpsbY44,34
283
283
  mlrun/utils/clones.py,sha256=QG2ka65-ysfrOaoziudEjJqGgAxJvFKZOXkiD9WZGN4,7386
284
284
  mlrun/utils/condition_evaluator.py,sha256=oR-GjryAg76D4G79G-DzVkx631D6Gd4jJgbr_d3Btnw,1920
285
285
  mlrun/utils/db.py,sha256=2pdIYKIA0GiwwuWLW0PJ_bPu9M1rd7ESBqnMr5wWuW4,1662
286
- mlrun/utils/helpers.py,sha256=lyKKdQn7JqnQI9_LuNRAyvo1-iMFIVdC0O6amPoqBAY,50018
286
+ mlrun/utils/helpers.py,sha256=lsV5lTHbqfuFXXKCBV_89kPCx1I8ao3KvHznXNEoi6M,50098
287
287
  mlrun/utils/http.py,sha256=_3pJPuDPz7M9pU4uRN-NPUmCyaANCQsAWAIrlVLZPiY,8733
288
288
  mlrun/utils/logger.py,sha256=ZVT9vnDGeuMKsbiEHTN3AXyF_hvUgpv4jGa1lgwFXJU,7049
289
289
  mlrun/utils/regex.py,sha256=V0kaw1-zuehkN20g_Pq6SgkJTBLRdBqNkXOGN_2TJEw,4430
@@ -300,11 +300,11 @@ mlrun/utils/notifications/notification/ipython.py,sha256=qrBmtECiRG6sZpCIVMg7RZc
300
300
  mlrun/utils/notifications/notification/slack.py,sha256=5JysqIpUYUZKXPSeeZtbl7qb2L9dj7p2NvnEBcEsZkA,3898
301
301
  mlrun/utils/notifications/notification/webhook.py,sha256=QHezCuN5uXkLcroAGxGrhGHaxAdUvkDLIsp27_Yrfd4,2390
302
302
  mlrun/utils/version/__init__.py,sha256=7kkrB7hEZ3cLXoWj1kPoDwo4MaswsI2JVOBpbKgPAgc,614
303
- mlrun/utils/version/version.json,sha256=11dCOroTQd3OhO0j2jxhnC53Rcm3CZPMrFOYj9IhoQk,89
303
+ mlrun/utils/version/version.json,sha256=9jck-PA2eA74HRX-no59aljy7jDHijdOjUqFpYQaFiM,89
304
304
  mlrun/utils/version/version.py,sha256=HMwseV8xjTQ__6T6yUWojx_z6yUj7Io7O4NcCCH_sz8,1970
305
- mlrun-1.6.0rc12.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
306
- mlrun-1.6.0rc12.dist-info/METADATA,sha256=a2zOfUoFRUfX3m3QaRHkFvexPjuC7pptpbLXilwkXbk,18521
307
- mlrun-1.6.0rc12.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92
308
- mlrun-1.6.0rc12.dist-info/entry_points.txt,sha256=ZbXmb36B9JmK7EaleP8MIAbZSOQXQV0iwKR6si0HUWk,47
309
- mlrun-1.6.0rc12.dist-info/top_level.txt,sha256=NObLzw3maSF9wVrgSeYBv-fgnHkAJ1kEkh12DLdd5KM,6
310
- mlrun-1.6.0rc12.dist-info/RECORD,,
305
+ mlrun-1.6.0rc13.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
306
+ mlrun-1.6.0rc13.dist-info/METADATA,sha256=Hz1qTXPT7RuursgzNl78eUHM7ecNT6SlnCBkjkCdQ6Y,18614
307
+ mlrun-1.6.0rc13.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92
308
+ mlrun-1.6.0rc13.dist-info/entry_points.txt,sha256=ZbXmb36B9JmK7EaleP8MIAbZSOQXQV0iwKR6si0HUWk,47
309
+ mlrun-1.6.0rc13.dist-info/top_level.txt,sha256=NObLzw3maSF9wVrgSeYBv-fgnHkAJ1kEkh12DLdd5KM,6
310
+ mlrun-1.6.0rc13.dist-info/RECORD,,