mlrun 1.5.0rc15__py3-none-any.whl → 1.5.0rc17__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mlrun might be problematic. Click here for more details.

@@ -409,6 +409,11 @@ async def ingest_feature_set(
409
409
  auth_info,
410
410
  )
411
411
 
412
+ # Set the run db instance with the current db session
413
+ await run_in_threadpool(
414
+ feature_set._override_run_db,
415
+ mlrun.api.api.utils.get_run_db_instance(db_session),
416
+ )
412
417
  if ingest_parameters.targets:
413
418
  data_targets = [
414
419
  DataTargetBase.from_dict(data_target.dict())
@@ -220,26 +220,24 @@ def _is_requested_schedule(
220
220
  return workflow_spec.schedule is not None
221
221
 
222
222
  project_workflow = _get_workflow_by_name(project, name)
223
- return bool(project_workflow.get("schedule"))
223
+ return bool(project_workflow.get("schedule")) if project_workflow else False
224
224
 
225
225
 
226
226
  def _get_workflow_by_name(
227
227
  project: mlrun.common.schemas.Project, name: str
228
228
  ) -> typing.Optional[Dict]:
229
229
  """
230
- Getting workflow from project
230
+ Getting workflow from project by name.
231
231
 
232
232
  :param project: MLRun project
233
233
  :param name: workflow name
234
234
 
235
- :return: workflow as a dict if project has the workflow, otherwise raises a bad request exception
235
+ :return: workflow as a dict if project has the workflow and empty dict if not.
236
236
  """
237
237
  for workflow in project.spec.workflows:
238
238
  if workflow["name"] == name:
239
239
  return workflow
240
- log_and_raise(
241
- reason=f"workflow {name} not found in project",
242
- )
240
+ return {}
243
241
 
244
242
 
245
243
  def _fill_workflow_missing_fields_from_project(
@@ -258,7 +256,9 @@ def _fill_workflow_missing_fields_from_project(
258
256
 
259
257
  :return: completed workflow spec
260
258
  """
261
- # Verifying workflow exists in project:
259
+
260
+ # while we expect workflow to be exists on project spec, we might get a case where the workflow is not exists.
261
+ # this is possible when workflow is not set prior to its execution.
262
262
  workflow = _get_workflow_by_name(project, workflow_name)
263
263
 
264
264
  if spec:
@@ -267,6 +267,13 @@ def _fill_workflow_missing_fields_from_project(
267
267
  workflow = copy.deepcopy(workflow)
268
268
  workflow = _update_dict(workflow, spec.dict())
269
269
 
270
+ if "name" not in workflow:
271
+ log_and_raise(
272
+ reason=f"workflow {workflow_name} not found in project"
273
+ if not workflow
274
+ else "workflow spec is invalid",
275
+ )
276
+
270
277
  workflow_spec = mlrun.common.schemas.WorkflowSpec(**workflow)
271
278
  # Overriding arguments of the existing workflow:
272
279
  if arguments:
@@ -227,6 +227,11 @@ class ModelEndpoints:
227
227
  timestamp_key=mlrun.common.schemas.model_monitoring.EventFieldType.TIMESTAMP,
228
228
  description=f"Monitoring feature set for endpoint: {model_endpoint.spec.model}",
229
229
  )
230
+
231
+ # Set the run db instance with the current db session
232
+ feature_set._override_run_db(
233
+ mlrun.api.api.utils.get_run_db_instance(db_session)
234
+ )
230
235
  feature_set.metadata.project = model_endpoint.metadata.project
231
236
 
232
237
  feature_set.metadata.labels = {
@@ -52,7 +52,6 @@ class WorkflowRunners(
52
52
  name=run_name,
53
53
  project=project,
54
54
  kind=mlrun.runtimes.RuntimeKinds.job,
55
- # For preventing deployment:
56
55
  image=image,
57
56
  )
58
57
 
@@ -138,6 +138,7 @@ class CSVSource(BaseSourceDriver):
138
138
  :parameter path: path to CSV file
139
139
  :parameter key_field: the CSV field to be used as the key for events. May be an int (field index) or string
140
140
  (field name) if with_header is True. Defaults to None (no key). Can be a list of keys.
141
+ :parameter time_field: DEPRECATED. Use parse_dates to parse timestamps.
141
142
  :parameter schedule: string to configure scheduling of the ingestion job.
142
143
  :parameter attributes: additional parameters to pass to storey. For example:
143
144
  attributes={"timestamp_format": '%Y%m%d%H'}
@@ -155,13 +156,29 @@ class CSVSource(BaseSourceDriver):
155
156
  path: str = None,
156
157
  attributes: Dict[str, str] = None,
157
158
  key_field: str = None,
159
+ time_field: str = None,
158
160
  schedule: str = None,
159
161
  parse_dates: Union[None, int, str, List[int], List[str]] = None,
160
162
  **kwargs,
161
163
  ):
162
- super().__init__(name, path, attributes, key_field, schedule=schedule, **kwargs)
164
+ super().__init__(
165
+ name, path, attributes, key_field, time_field, schedule, **kwargs
166
+ )
163
167
  if parse_dates and not isinstance(parse_dates, list):
164
168
  parse_dates = [parse_dates]
169
+ if time_field is not None:
170
+ warnings.warn(
171
+ "CSVSource's time_field parameter is deprecated in 1.4.0 and will be removed in 1.6.0. "
172
+ "Use parse_dates instead.",
173
+ # TODO: remove in 1.6.0
174
+ FutureWarning,
175
+ )
176
+
177
+ if parse_dates is None:
178
+ parse_dates = [time_field]
179
+ elif time_field not in parse_dates:
180
+ parse_dates = copy(parse_dates)
181
+ parse_dates.append(time_field)
165
182
  self._parse_dates = parse_dates
166
183
 
167
184
  def to_step(self, key_field=None, time_field=None, context=None):
mlrun/db/httpdb.py CHANGED
@@ -985,7 +985,7 @@ class HTTPRunDB(RunDBInterface):
985
985
  :param group_by: Object to group results by. Allowed values are `job` and `project`.
986
986
  """
987
987
  params = {
988
- "label_selector": label_selector,
988
+ "label-selector": label_selector,
989
989
  "group-by": group_by,
990
990
  "kind": kind,
991
991
  "object-id": object_id,
@@ -3240,6 +3240,11 @@ class HTTPRunDB(RunDBInterface):
3240
3240
  if hasattr(workflow_spec, "image")
3241
3241
  else workflow_spec.get("image", None)
3242
3242
  )
3243
+ workflow_name = name or (
3244
+ workflow_spec.name
3245
+ if hasattr(workflow_spec, "name")
3246
+ else workflow_spec.get("name", None)
3247
+ )
3243
3248
  req = {
3244
3249
  "arguments": arguments,
3245
3250
  "artifact_path": artifact_path,
@@ -3247,16 +3252,20 @@ class HTTPRunDB(RunDBInterface):
3247
3252
  "run_name": run_name,
3248
3253
  "namespace": namespace,
3249
3254
  }
3250
- if isinstance(workflow_spec, mlrun.common.schemas.WorkflowSpec):
3255
+ if isinstance(
3256
+ workflow_spec,
3257
+ mlrun.common.schemas.WorkflowSpec,
3258
+ ):
3251
3259
  req["spec"] = workflow_spec.dict()
3252
3260
  elif isinstance(workflow_spec, mlrun.projects.pipelines.WorkflowSpec):
3253
3261
  req["spec"] = workflow_spec.to_dict()
3254
3262
  else:
3255
3263
  req["spec"] = workflow_spec
3256
3264
  req["spec"]["image"] = image
3265
+ req["spec"]["name"] = workflow_name
3257
3266
  response = self.api_call(
3258
3267
  "POST",
3259
- f"projects/{project}/workflows/{name}/submit",
3268
+ f"projects/{project}/workflows/{workflow_name}/submit",
3260
3269
  json=req,
3261
3270
  )
3262
3271
  return mlrun.common.schemas.WorkflowResponse(**response.json())
@@ -62,7 +62,7 @@ from .ingestion import (
62
62
  run_ingestion_job,
63
63
  run_spark_graph,
64
64
  )
65
- from .retrieval import get_merger, run_merge_job
65
+ from .retrieval import RemoteVectorResponse, get_merger, run_merge_job
66
66
 
67
67
  _v3iofs = None
68
68
  spark_transform_handler = "transform"
@@ -106,7 +106,7 @@ def get_offline_features(
106
106
  order_by: Union[str, List[str]] = None,
107
107
  spark_service: str = None,
108
108
  timestamp_for_filtering: Union[str, Dict[str, str]] = None,
109
- ) -> OfflineVectorResponse:
109
+ ) -> Union[OfflineVectorResponse, RemoteVectorResponse]:
110
110
  """retrieve offline feature vector results
111
111
 
112
112
  specify a feature vector object/uri and retrieve the desired features, their metadata
@@ -424,6 +424,9 @@ class FeatureSet(ModelObj):
424
424
  else:
425
425
  return mlrun.get_run_db()
426
426
 
427
+ def _override_run_db(self, run_db):
428
+ self._run_db = run_db
429
+
427
430
  def get_target_path(self, name=None):
428
431
  """get the url/path for an offline or specified data target"""
429
432
  target = get_offline_target(self, name=name)
@@ -165,6 +165,7 @@ class FeatureVectorStatus(ModelObj):
165
165
  preview=None,
166
166
  run_uri=None,
167
167
  index_keys=None,
168
+ timestamp_key=None,
168
169
  ):
169
170
  self._targets: ObjectList = None
170
171
  self._features: ObjectList = None
@@ -177,6 +178,7 @@ class FeatureVectorStatus(ModelObj):
177
178
  self.preview = preview or []
178
179
  self.features: List[Feature] = features or []
179
180
  self.run_uri = run_uri
181
+ self.timestamp_key = timestamp_key
180
182
 
181
183
  @property
182
184
  def targets(self) -> List[DataTarget]:
@@ -695,13 +697,14 @@ class FeatureVector(ModelObj):
695
697
  for key in feature_set.spec.entities.keys():
696
698
  if key not in index_keys:
697
699
  index_keys.append(key)
698
- for name, _ in fields:
700
+ for name, alias in fields:
699
701
  if name in feature_set.status.stats and update_stats:
700
702
  self.status.stats[name] = feature_set.status.stats[name]
701
703
  if name in feature_set.spec.features.keys():
702
704
  feature = feature_set.spec.features[name].copy()
703
705
  feature.origin = f"{feature_set.fullname}.{name}"
704
- self.status.features[name] = feature
706
+ feature.name = alias or name
707
+ self.status.features[alias or name] = feature
705
708
 
706
709
  self.status.index_keys = index_keys
707
710
  return feature_set_objects, feature_set_fields
@@ -727,7 +730,6 @@ class OnlineVectorService:
727
730
  vector,
728
731
  graph,
729
732
  index_columns,
730
- all_fs_entities: List[str] = None,
731
733
  impute_policy: dict = None,
732
734
  requested_columns: List[str] = None,
733
735
  ):
@@ -736,7 +738,6 @@ class OnlineVectorService:
736
738
 
737
739
  self._controller = graph.controller
738
740
  self._index_columns = index_columns
739
- self._all_fs_entities = all_fs_entities
740
741
  self._impute_values = {}
741
742
  self._requested_columns = requested_columns
742
743
 
@@ -865,7 +866,7 @@ class OnlineVectorService:
865
866
  ):
866
867
  data[name] = self._impute_values.get(name, v)
867
868
  if not self.vector.spec.with_indexes:
868
- for name in self._all_fs_entities:
869
+ for name in self.vector.status.index_keys:
869
870
  data.pop(name, None)
870
871
  if not any(data.values()):
871
872
  data = None
@@ -14,7 +14,7 @@
14
14
  import mlrun.errors
15
15
 
16
16
  from .dask_merger import DaskFeatureMerger
17
- from .job import run_merge_job # noqa
17
+ from .job import RemoteVectorResponse, run_merge_job # noqa
18
18
  from .local_merger import LocalFeatureMerger
19
19
  from .spark_merger import SparkFeatureMerger
20
20
  from .storey_merger import StoreyFeatureMerger
@@ -21,7 +21,7 @@ import pandas as pd
21
21
  import mlrun
22
22
  from mlrun.datastore.targets import CSVTarget, ParquetTarget
23
23
  from mlrun.feature_store.feature_set import FeatureSet
24
- from mlrun.feature_store.feature_vector import Feature, JoinGraph
24
+ from mlrun.feature_store.feature_vector import JoinGraph
25
25
 
26
26
  from ...utils import logger, str_to_timestamp
27
27
  from ..feature_vector import OfflineVectorResponse
@@ -137,6 +137,10 @@ class BaseMerger(abc.ABC):
137
137
  )
138
138
 
139
139
  def _write_to_offline_target(self, timestamp_key=None):
140
+ save_vector = False
141
+ if not self._drop_indexes and timestamp_key not in self._drop_columns:
142
+ self.vector.status.timestamp_key = timestamp_key
143
+ save_vector = True
140
144
  if self._target:
141
145
  is_persistent_vector = self.vector.metadata.name is not None
142
146
  if not self._target.path and not is_persistent_vector:
@@ -145,22 +149,13 @@ class BaseMerger(abc.ABC):
145
149
  )
146
150
  self._target.set_resource(self.vector)
147
151
  size = self._target.write_dataframe(
148
- self._result_df,
149
- timestamp_key=timestamp_key
150
- if not self._drop_indexes and timestamp_key not in self._drop_columns
151
- else None,
152
+ self._result_df, timestamp_key=self.vector.status.timestamp_key
152
153
  )
153
154
  if is_persistent_vector:
154
155
  target_status = self._target.update_resource_status("ready", size=size)
155
156
  logger.info(f"wrote target: {target_status}")
156
- self.vector.save()
157
- if not self._drop_indexes:
158
- self.vector.spec.entity_fields = [
159
- Feature(name=feature, value_type=self._result_df[feature].dtype)
160
- if self._result_df[feature].dtype.name != "object"
161
- else Feature(name=feature, value_type="str")
162
- for feature in self._index_columns
163
- ]
157
+ save_vector = True
158
+ if save_vector:
164
159
  self.vector.save()
165
160
 
166
161
  def _set_indexes(self, df):
@@ -161,6 +161,13 @@ class RemoteVectorResponse:
161
161
  :param df_module: optional, py module used to create the DataFrame (e.g. pd, dd, cudf, ..)
162
162
  :param kwargs: extended DataItem.as_df() args
163
163
  """
164
+ self._is_ready()
165
+ if not columns:
166
+ columns = list(self.vector.status.features.keys())
167
+ if self.with_indexes:
168
+ columns += self.vector.status.index_keys
169
+ if self.vector.status.timestamp_key is not None:
170
+ columns.insert(0, self.vector.status.timestamp_key)
164
171
 
165
172
  file_format = kwargs.get("format")
166
173
  if not file_format:
@@ -169,9 +176,7 @@ class RemoteVectorResponse:
169
176
  columns=columns, df_module=df_module, format=file_format, **kwargs
170
177
  )
171
178
  if self.with_indexes:
172
- df.set_index(
173
- list(self.vector.spec.entity_fields.keys()), inplace=True, drop=True
174
- )
179
+ df.set_index(self.vector.status.index_keys, inplace=True, drop=True)
175
180
  return df
176
181
 
177
182
  @property
@@ -151,7 +151,6 @@ class StoreyFeatureMerger(BaseMerger):
151
151
  server = create_graph_server(graph=graph, parameters={})
152
152
 
153
153
  cache = ResourceCache()
154
- all_fs_entities = []
155
154
  for featureset in feature_set_objects.values():
156
155
  driver = get_online_target(featureset)
157
156
  if not driver:
@@ -160,9 +159,6 @@ class StoreyFeatureMerger(BaseMerger):
160
159
  )
161
160
  cache.cache_table(featureset.uri, driver.get_table_object())
162
161
 
163
- for key in featureset.spec.entities.keys():
164
- if key not in all_fs_entities:
165
- all_fs_entities.append(key)
166
162
  server.init_states(context=None, namespace=None, resource_cache=cache)
167
163
  server.init_object(None)
168
164
 
@@ -170,7 +166,6 @@ class StoreyFeatureMerger(BaseMerger):
170
166
  self.vector,
171
167
  graph,
172
168
  entity_keys,
173
- all_fs_entities=all_fs_entities,
174
169
  impute_policy=self.impute_policy,
175
170
  requested_columns=requested_columns,
176
171
  )
@@ -356,7 +356,14 @@ class VirtualDrift:
356
356
  # Calculate the feature's drift mean:
357
357
  tvd = results[TotalVarianceDistance.NAME]
358
358
  hellinger = results[HellingerDistance.NAME]
359
- if not tvd or not hellinger:
359
+ if tvd is None or hellinger is None:
360
+ logger.warning(
361
+ "Can't calculate drift for this feature because at least one of the required"
362
+ "statistical metrics is missing",
363
+ feature=feature,
364
+ tvd=tvd,
365
+ hellinger=hellinger,
366
+ )
360
367
  continue
361
368
  metrics_results_dictionary = (tvd + hellinger) / 2
362
369
  # Decision rule for drift detection:
@@ -52,7 +52,7 @@ def get_workflow_engine(engine_kind, local=False):
52
52
  elif engine_kind == "remote":
53
53
  raise mlrun.errors.MLRunInvalidArgumentError(
54
54
  "cannot run a remote pipeline locally using `kind='remote'` and `local=True`. "
55
- "in order to run a local pipeline remotely, please use `engine='remote: local'` instead"
55
+ "in order to run a local pipeline remotely, please use `engine='remote:local'` instead"
56
56
  )
57
57
  return _LocalRunner
58
58
  if not engine_kind or engine_kind == "kfp":
@@ -782,6 +782,21 @@ class _RemoteRunner(_PipelineRunner):
782
782
  project_name=project.name,
783
783
  )
784
784
 
785
+ # set it relative to project path
786
+ # as the runner pod will mount and use `load_and_run` which will use the project context
787
+ # to load the workflow file to.
788
+ # e.g.
789
+ # /path/to/project/workflow.py -> ./workflow.py
790
+ # /path/to/project/subdir/workflow.py -> ./workflow.py
791
+ if workflow_spec.path:
792
+ prefix = project.spec.get_code_path()
793
+ if workflow_spec.path.startswith(prefix):
794
+ workflow_spec.path = workflow_spec.path.removeprefix(prefix)
795
+ relative_prefix = "."
796
+ if not workflow_spec.path.startswith("/"):
797
+ relative_prefix += "/"
798
+ workflow_spec.path = f"{relative_prefix}{workflow_spec.path}"
799
+
785
800
  workflow_response = run_db.submit_workflow(
786
801
  project=project.name,
787
802
  name=workflow_name,
mlrun/projects/project.py CHANGED
@@ -1265,7 +1265,7 @@ class MlrunProject(ModelObj):
1265
1265
  :param name: name of the workflow
1266
1266
  :param workflow_path: url/path for the workflow file
1267
1267
  :param embed: add the workflow code into the project.yaml
1268
- :param engine: workflow processing engine ("kfp" or "local")
1268
+ :param engine: workflow processing engine ("kfp", "local", "remote" or "remote:local")
1269
1269
  :param args_schema: list of arg schema definitions (:py:class`~mlrun.model.EntrypointParam`)
1270
1270
  :param handler: workflow function handler
1271
1271
  :param schedule: ScheduleCronTrigger class instance or a standard crontab expression string
@@ -1285,8 +1285,9 @@ class MlrunProject(ModelObj):
1285
1285
  f"Invalid 'workflow_path': '{workflow_path}'. Please provide a valid URL/path to a file."
1286
1286
  )
1287
1287
 
1288
- if image and engine not in ["remote"]:
1289
- logger.warning("Image is only relevant for remote workflows, ignoring it")
1288
+ # engine could be "remote" or "remote:local"
1289
+ if image and ((engine and "remote" in engine) or schedule):
1290
+ logger.warning("Image is only relevant for 'remote' engine, ignoring it")
1290
1291
 
1291
1292
  if embed:
1292
1293
  if (
@@ -2496,6 +2497,8 @@ class MlrunProject(ModelObj):
2496
2497
  inner_engine = None
2497
2498
  if engine and engine.startswith("remote"):
2498
2499
  if ":" in engine:
2500
+
2501
+ # inner could be either kfp or local
2499
2502
  engine, inner_engine = engine.split(":")
2500
2503
  elif workflow_spec.schedule:
2501
2504
  inner_engine = engine
@@ -1,4 +1,4 @@
1
1
  {
2
- "git_commit": "9e89715c84b418844eba2c0a644c10efc830748b",
3
- "version": "1.5.0-rc15"
2
+ "git_commit": "f429a0c8d769659dd17515b9056234b9d9561d86",
3
+ "version": "1.5.0-rc17"
4
4
  }
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: mlrun
3
- Version: 1.5.0rc15
3
+ Version: 1.5.0rc17
4
4
  Summary: Tracking and config of machine learning runs
5
5
  Home-page: https://github.com/mlrun/mlrun
6
6
  Author: Yaron Haviv
@@ -29,7 +29,7 @@ mlrun/api/api/endpoints/background_tasks.py,sha256=5SV4iDF-dZR878kp77VpANRb86BJY
29
29
  mlrun/api/api/endpoints/client_spec.py,sha256=lN2By-KGzHuzNTGiJk0RK3SM6ZUr3VLgsudsymrtZE8,1214
30
30
  mlrun/api/api/endpoints/clusterization_spec.py,sha256=0eSG7TSt647V7-A-uI5CLRItkVOBqIBM9ep_fiMFqPI,1175
31
31
  mlrun/api/api/endpoints/datastore_profile.py,sha256=yzW4VI7yYwWRAHieMcBch4IVz3WE2ctLrE2fN55ci2w,7475
32
- mlrun/api/api/endpoints/feature_store.py,sha256=mq6XVv9zaW7zU-XRHh8X2gTwk-z6a1QE3bcuOdR6jQU,28853
32
+ mlrun/api/api/endpoints/feature_store.py,sha256=OwNnHw_yUZ6P4NZEwD2w4j2hks6xmBaGbTqi1Q8j75c,29045
33
33
  mlrun/api/api/endpoints/files.py,sha256=dFwmP40O2MXI1CmU3gJ1XrOVjSubpb0KJtguWJC5Bn4,6712
34
34
  mlrun/api/api/endpoints/frontend_spec.py,sha256=r8r6BSDpYcUpWKJX7zqXJPtYBZHzfZILd1w-SIikea8,5530
35
35
  mlrun/api/api/endpoints/functions.py,sha256=WSQWv0QXMoGgnEunSMQBbPFVig0H8hdbxkaGokUgrBQ,41111
@@ -48,7 +48,7 @@ mlrun/api/api/endpoints/schedules.py,sha256=Ko2dtaNKb3dHzjWR_LeoVFbhxCxkDDhaQ-ER
48
48
  mlrun/api/api/endpoints/secrets.py,sha256=jPOQiTXMRUYcR9K0NT74ScNQaZpI1o-DMUnjtt6RsvE,6061
49
49
  mlrun/api/api/endpoints/submit.py,sha256=xKUX-9q6Zh0s9XOZueorn-Dc6knK4eSz3SS73Q0fXKc,5327
50
50
  mlrun/api/api/endpoints/tags.py,sha256=KIZziSd6XF2TR94b_4_UDsptXHecDXgOIwQYFRApGmA,4849
51
- mlrun/api/api/endpoints/workflows.py,sha256=vZEgZ6gEDdTRJgBxJPd8-XIsyNsNGUZxTrFSntMhG3c,13795
51
+ mlrun/api/api/endpoints/workflows.py,sha256=ROItMDYiQphxZK4kqzy2K24XBETPp9isBVah97laQ-U,14099
52
52
  mlrun/api/api/endpoints/internal/__init__.py,sha256=IVXgIyUv8lVStgCjFm3fZDDMIqEzmMAvkbbbOKwNJgo,1203
53
53
  mlrun/api/api/endpoints/internal/config.py,sha256=IVqf6YWokNXY3LC5o1BWqlEDVQ-AJdRrbWRwTFFAwfY,1043
54
54
  mlrun/api/api/endpoints/internal/memory_reports.py,sha256=PRVaj8i_WAbIqjP37xBlYfPunkZHTCXMprka0Qoxhxw,1718
@@ -68,12 +68,12 @@ mlrun/api/crud/runs.py,sha256=k-M7Dt4QEBWZvpmeFtysz432CwxpoEMJfbOk6h04ehk,6577
68
68
  mlrun/api/crud/runtime_resources.py,sha256=hAulDtCyWrMDTe35gIetg3ApiHumBmCA7Ap3Fo2fSqw,5645
69
69
  mlrun/api/crud/secrets.py,sha256=hlO4WCxGYF6_DdSBRWgmsZ9IiBxUNHfnCnxkImlTxmQ,22489
70
70
  mlrun/api/crud/tags.py,sha256=pcXXrmlfgCBCtw5uBX6hAHdqO96XIg0ov5vn-u6r5Q4,3277
71
- mlrun/api/crud/workflows.py,sha256=7RtE1cEL32pcBzotIArDGwQhae9_s7tQHmYqShtYIhU,13612
71
+ mlrun/api/crud/workflows.py,sha256=ho-8iG38wJJbVLXH4Wu9gLWobbWq4nQDanPfpu5JPYM,13571
72
72
  mlrun/api/crud/model_monitoring/__init__.py,sha256=_rZmfvis_Qkhephv_SEgzqUVuC4bY-D84mCyyztWRs0,759
73
73
  mlrun/api/crud/model_monitoring/deployment.py,sha256=hHvknNfPJKBhC_QmxSEjojfvFUTkokw8ZTi8FBL8s1o,32279
74
74
  mlrun/api/crud/model_monitoring/grafana.py,sha256=7D-sUY3PjFh7_HjYe-KWryvepxO9zLArVC3U7IChuqA,17494
75
75
  mlrun/api/crud/model_monitoring/helpers.py,sha256=5K6y7hIjBdxY__rNfluINFgis1IJZQWuK-TjP26ypag,6114
76
- mlrun/api/crud/model_monitoring/model_endpoints.py,sha256=s1KwAAZ0sz6SY5XZT4eyyVKBTysnqHzEnLdtwd11aTU,31423
76
+ mlrun/api/crud/model_monitoring/model_endpoints.py,sha256=WSVopB2hV2YFiyKTaXXBdpvBDt-zQ0Nv8qPrOiQ2T2c,31598
77
77
  mlrun/api/crud/runtimes/__init__.py,sha256=xY3wHC4TEJgez7qtnn1pQvHosi8-5UJOCtyGBS7FcGE,571
78
78
  mlrun/api/crud/runtimes/nuclio/__init__.py,sha256=xY3wHC4TEJgez7qtnn1pQvHosi8-5UJOCtyGBS7FcGE,571
79
79
  mlrun/api/crud/runtimes/nuclio/function.py,sha256=Tgca7uACPlyeoGiLkIHsX0d9SePy_a6pEW03ryGMdPc,18762
@@ -248,7 +248,7 @@ mlrun/datastore/helpers.py,sha256=-bKveE9rteLd0hJd6OSMuMbfz09W_OXyu1G5O2ihZjs,62
248
248
  mlrun/datastore/inmem.py,sha256=6PAltUk7uyYlDgnsaJPOkg_P98iku1ys2e2wpAmPRkc,2779
249
249
  mlrun/datastore/redis.py,sha256=x2A2LgGeDPTtajPoB3RBqYysDgPrSW5iQd5PR_TLhNI,5466
250
250
  mlrun/datastore/s3.py,sha256=Zd3Bmd9IMA3WEG4fWQnhJqyAjwW2PouHPpk4uDnMttg,7035
251
- mlrun/datastore/sources.py,sha256=yP81r4qzl0PF6TdfZFLNCjA-qr5gZpGsksOB5iY5he8,37997
251
+ mlrun/datastore/sources.py,sha256=xbdgYGWuDfshdMY6O9pU3B-kiA7sIx43zTm9ys318OA,38649
252
252
  mlrun/datastore/spark_udf.py,sha256=NnnB3DZxZb-rqpRy7b-NC7QWXuuqFn3XkBDc86tU4mQ,1498
253
253
  mlrun/datastore/store_resources.py,sha256=SUY9oJieq3r8PEq8G661XxmXem_e-CxDoy2AJ7dpXBk,6906
254
254
  mlrun/datastore/targets.py,sha256=vPirtqpyynFQiARums9gxMiEFbodt-H6ab4U2oNV0r0,66570
@@ -259,22 +259,22 @@ mlrun/datastore/wasbfs/fs.py,sha256=FfKli7rBwres1pg8AxDlyyg1D5WukBEKb8Mi1SF5HqY,
259
259
  mlrun/db/__init__.py,sha256=Wy3NbZwgAneRHXCIKygQE-68tnAhvF7lVxrFSh9G6Y4,1145
260
260
  mlrun/db/base.py,sha256=vfUrS6Bs5Rqxdx285tpNuxRr60C82tyaULY8M5gCjg4,17364
261
261
  mlrun/db/factory.py,sha256=wTEKHEmdDkylM6IkTYvmEYVF8gn2HdjLoLoWICCyatI,2403
262
- mlrun/db/httpdb.py,sha256=JeegiQT3fabDlFXtw6RGwteSA460ARzkbZ2MpJY-h-Y,145536
262
+ mlrun/db/httpdb.py,sha256=0jHxfh50YUJSP6tnDardv8uOu8QHIUIznsHijcAnO9A,145794
263
263
  mlrun/db/nopdb.py,sha256=Z7Di4Tz7OXMZNq8clVZInKlY0QzHu5xoo4KN5soGw_E,13990
264
264
  mlrun/feature_store/__init__.py,sha256=n1F5m1svFW2chbE2dJdWzZJJiYS4E-y8PQsG9Q-F0lU,1584
265
- mlrun/feature_store/api.py,sha256=SLKU9-Lsr-VJ2Vjsw-o1ElrzxhhdL2czWyp4NaspsrU,45225
265
+ mlrun/feature_store/api.py,sha256=0-YFccl_p3c7jPizxJ2P1lcdtSkbhJhIIUHP8TRQay8,45276
266
266
  mlrun/feature_store/common.py,sha256=sl2pmkuv5w4KtTMjt0ky34Qps0iuoXNWbyYGuBkCxjc,12809
267
- mlrun/feature_store/feature_set.py,sha256=Kqtpasmdn6aypXZBXTrYGQDRLh7NUDya-JZOjkaNL6U,47422
268
- mlrun/feature_store/feature_vector.py,sha256=o9-yJXcYFesbtNQYqAQ1g-8rmcVH4XhHHCklDvGo2H4,34962
267
+ mlrun/feature_store/feature_set.py,sha256=oRZkMT4oHSb0Z3ViaHprPNXEBk8CrEYwOLVNO8FVQLo,47493
268
+ mlrun/feature_store/feature_vector.py,sha256=3v0MFS4LvcFQ7cJA6-vPoa1tJ-qMIejnDuJhHGY7J5k,35012
269
269
  mlrun/feature_store/ingestion.py,sha256=GZkrke5_JJfA_PGOFc6ekbHKujHgMgqr6t4vop5n_bg,11210
270
270
  mlrun/feature_store/steps.py,sha256=jleXgtUbR_BDk0Q060vHYwfBwX01VoU3Y1s5S6QF8mo,29028
271
- mlrun/feature_store/retrieval/__init__.py,sha256=6qtZFx8-vtsHv8ZGJuTv3UDe9m7XIlOq4ryEYC58Eww,1260
272
- mlrun/feature_store/retrieval/base.py,sha256=tzDuEl6mN5sdjibCuYhqUxUCh7kcyhnD9re2r_vHSzw,30988
271
+ mlrun/feature_store/retrieval/__init__.py,sha256=bwA4copPpLQi8fyoUAYtOyrlw0-6f3-Knct8GbJSvRg,1282
272
+ mlrun/feature_store/retrieval/base.py,sha256=gzY7sesiy2sMQa6VtG2nSpscEIjcrsLm5QgS8LxM2nw,30732
273
273
  mlrun/feature_store/retrieval/dask_merger.py,sha256=Lxoj16c8hp9i5RHf-8BmG9NmReeEev6AHu96oEN2Gp8,4895
274
- mlrun/feature_store/retrieval/job.py,sha256=ZveUaLml6jU8EZCGoxM4vLKNWnjC7EDndGMH3aOuAu0,7714
274
+ mlrun/feature_store/retrieval/job.py,sha256=CapLTnlN-5hzFVnC2wVXX1ydhXs_szNPKrAoOLK3cZc,8010
275
275
  mlrun/feature_store/retrieval/local_merger.py,sha256=M0R2FWc-kuLVvyAYbawrxAJvqjshtaRK2gQqF6DjgxM,4218
276
276
  mlrun/feature_store/retrieval/spark_merger.py,sha256=6A1YJF2W3zbMqnQ1WrBVzRpNExW9vX54ybREO2B9aXU,9819
277
- mlrun/feature_store/retrieval/storey_merger.py,sha256=6bRnVm7YQBm3igviKeO0uXH3TSf74ZxspFnD4w23J5w,6539
277
+ mlrun/feature_store/retrieval/storey_merger.py,sha256=5YM0UPrLjGOobulHkowRO-1LuvFD2cm_0GxcpnTdu0I,6314
278
278
  mlrun/frameworks/__init__.py,sha256=qRHe_nUfxpoLaSASAkIxcW6IyunMtxq5LXhjzZMO_1E,743
279
279
  mlrun/frameworks/parallel_coordinates.py,sha256=8buVHWA-mD1R5R9jm71XN5fvDyz9Bkp7D1xD4vFYHTE,11466
280
280
  mlrun/frameworks/_common/__init__.py,sha256=7afutDCDVp999gyWSWQZMJRKGuW3VP3MFil8cobRsyg,962
@@ -368,7 +368,7 @@ mlrun/launcher/remote.py,sha256=neqIWNi64uR694DIFg6BdJk6Bbi0w_FIhfjChX1wsmk,6772
368
368
  mlrun/model_monitoring/__init__.py,sha256=XaYyvWsIXpjJQ2gCPj8tFvfSbRSEEqgDtNz4tCE5H4g,915
369
369
  mlrun/model_monitoring/api.py,sha256=MyS6dmFv2w9X-kGEPV4Z-Sn2Sihx85hmSe8Yd-NRQgI,34624
370
370
  mlrun/model_monitoring/application.py,sha256=OlpzGxBkTuBbqFIwN9AwiibqDP5YZaBHv6a7bWahm5c,12209
371
- mlrun/model_monitoring/batch.py,sha256=tnhTUkY9nDedkK7NZ5U53jL0P9i83_7QZlwEh1U0AhM,42337
371
+ mlrun/model_monitoring/batch.py,sha256=kcSXUFaUp4KEiX_jihXvqjJW-IZmV56obEYpVrMwBFc,42654
372
372
  mlrun/model_monitoring/batch_application.py,sha256=kFGZUss7CBkpcei9NlHFmKoU1Wu5l7_pdq8GuOhK0bk,21938
373
373
  mlrun/model_monitoring/batch_application_handler.py,sha256=6I3XmganxCfI-IUFEvFpdsUFiw3OiIMq58BS-XARIMs,1046
374
374
  mlrun/model_monitoring/evidently_application.py,sha256=ToQ9BfXZJqhVSxsVyWVEpYjKYaaTAT9wB83Tj7KVJdA,3401
@@ -409,8 +409,8 @@ mlrun/platforms/iguazio.py,sha256=LU1d33ll5EKIyp2zitCffZIbq-3fRwNSNO9MK2cIsHc,21
409
409
  mlrun/platforms/other.py,sha256=z4pWqxXkVVuMLk-MbNb0Y_ZR5pmIsUm0R8vHnqpEnew,11852
410
410
  mlrun/projects/__init__.py,sha256=Lv5rfxyXJrw6WGOWJKhBz66M6t3_zsNMCfUD6waPwx4,1153
411
411
  mlrun/projects/operations.py,sha256=AJsin0LrJHTisAPi-9t2ciFNZ83QdMRESIG31Mh_0x4,17948
412
- mlrun/projects/pipelines.py,sha256=z12g0yzYswZpaFe_XL0eTjgYPkJ3bpCCL-cjxZ0ni64,35745
413
- mlrun/projects/project.py,sha256=7OWnIn5GWY6GsF2CrQ6SQG_IpyKccw4xwJPb1r7OwGs,138028
412
+ mlrun/projects/pipelines.py,sha256=1ld4bzpUsqSmElwMBvEPdFJlDFenbGRhOok0BB4h1ug,36544
413
+ mlrun/projects/project.py,sha256=P11VYOYntNZgUE6jxwULoT5N_VhJ3O-vcP8_BrcF0LY,138181
414
414
  mlrun/runtimes/__init__.py,sha256=OuwnzCoaoXnqAv_RhoYRF6mRPfZ2pkslenxASpXhHQM,6707
415
415
  mlrun/runtimes/base.py,sha256=qMOhtgnym5SlX9xXIGnC-KjwGng8a4yRI7KH1mqVC9U,38261
416
416
  mlrun/runtimes/constants.py,sha256=TnkD0nQ7pcFq1aJvbweoM4BgbkhaA6fHhg9rnAkxDBE,6689
@@ -470,11 +470,11 @@ mlrun/utils/notifications/notification/ipython.py,sha256=qrBmtECiRG6sZpCIVMg7RZc
470
470
  mlrun/utils/notifications/notification/slack.py,sha256=5JysqIpUYUZKXPSeeZtbl7qb2L9dj7p2NvnEBcEsZkA,3898
471
471
  mlrun/utils/notifications/notification/webhook.py,sha256=QHezCuN5uXkLcroAGxGrhGHaxAdUvkDLIsp27_Yrfd4,2390
472
472
  mlrun/utils/version/__init__.py,sha256=7kkrB7hEZ3cLXoWj1kPoDwo4MaswsI2JVOBpbKgPAgc,614
473
- mlrun/utils/version/version.json,sha256=0_calU8oHb8tbScd8SuakRDjZJQ9mfJG28_MySTXRhU,89
473
+ mlrun/utils/version/version.json,sha256=DVu8iNZNdV07ZdarkvN82a5XXbX_rb2pa6VDs5Lru-U,89
474
474
  mlrun/utils/version/version.py,sha256=HMwseV8xjTQ__6T6yUWojx_z6yUj7Io7O4NcCCH_sz8,1970
475
- mlrun-1.5.0rc15.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
476
- mlrun-1.5.0rc15.dist-info/METADATA,sha256=K3O8vF8cGzEJIVRkf4EarTcBYFNyjmckdaDORmt5H2w,17826
477
- mlrun-1.5.0rc15.dist-info/WHEEL,sha256=yQN5g4mg4AybRjkgi-9yy4iQEFibGQmlz78Pik5Or-A,92
478
- mlrun-1.5.0rc15.dist-info/entry_points.txt,sha256=ZbXmb36B9JmK7EaleP8MIAbZSOQXQV0iwKR6si0HUWk,47
479
- mlrun-1.5.0rc15.dist-info/top_level.txt,sha256=NObLzw3maSF9wVrgSeYBv-fgnHkAJ1kEkh12DLdd5KM,6
480
- mlrun-1.5.0rc15.dist-info/RECORD,,
475
+ mlrun-1.5.0rc17.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
476
+ mlrun-1.5.0rc17.dist-info/METADATA,sha256=KKrbNsNaRqjfgMhKOqgvGzCElfKZPcK6VCwfI8U5Tfs,17826
477
+ mlrun-1.5.0rc17.dist-info/WHEEL,sha256=yQN5g4mg4AybRjkgi-9yy4iQEFibGQmlz78Pik5Or-A,92
478
+ mlrun-1.5.0rc17.dist-info/entry_points.txt,sha256=ZbXmb36B9JmK7EaleP8MIAbZSOQXQV0iwKR6si0HUWk,47
479
+ mlrun-1.5.0rc17.dist-info/top_level.txt,sha256=NObLzw3maSF9wVrgSeYBv-fgnHkAJ1kEkh12DLdd5KM,6
480
+ mlrun-1.5.0rc17.dist-info/RECORD,,