mlrun 1.8.0rc7__py3-none-any.whl → 1.8.0rc10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mlrun might be problematic. Click here for more details.

mlrun/db/nopdb.py CHANGED
@@ -582,8 +582,9 @@ class NopDB(RunDBInterface):
582
582
  self,
583
583
  name: str,
584
584
  project: str,
585
- function_name: str,
586
- endpoint_id: str,
585
+ function_name: Optional[str] = None,
586
+ function_tag: Optional[str] = None,
587
+ endpoint_id: Optional[str] = None,
587
588
  ):
588
589
  pass
589
590
 
@@ -592,6 +593,7 @@ class NopDB(RunDBInterface):
592
593
  project: str,
593
594
  name: Optional[str] = None,
594
595
  function_name: Optional[str] = None,
596
+ function_tag: Optional[str] = None,
595
597
  model_name: Optional[str] = None,
596
598
  labels: Optional[Union[str, dict[str, Optional[str]], list[str]]] = None,
597
599
  start: Optional[datetime.datetime] = None,
@@ -608,6 +610,7 @@ class NopDB(RunDBInterface):
608
610
  name: str,
609
611
  project: str,
610
612
  function_name: Optional[str] = None,
613
+ function_tag: Optional[str] = None,
611
614
  endpoint_id: Optional[str] = None,
612
615
  tsdb_metrics: bool = True,
613
616
  feature_analysis: bool = False,
@@ -620,6 +623,7 @@ class NopDB(RunDBInterface):
620
623
  project: str,
621
624
  attributes: dict,
622
625
  function_name: Optional[str] = None,
626
+ function_tag: Optional[str] = None,
623
627
  endpoint_id: Optional[str] = None,
624
628
  ) -> mlrun.common.schemas.ModelEndpoint:
625
629
  pass
@@ -375,8 +375,10 @@ def _generate_model_endpoint(
375
375
  ),
376
376
  spec=mlrun.common.schemas.ModelEndpointSpec(
377
377
  function_name=function_name,
378
- model_name=model_obj.metadata.key if model_path else None,
379
- model_uid=model_obj.metadata.uid if model_path else None,
378
+ model_name=model_obj.metadata.key if model_obj else None,
379
+ model_uid=model_obj.metadata.uid if model_obj else None,
380
+ model_tag=model_obj.metadata.tag if model_obj else None,
381
+ model_db_key=model_obj.spec.db_key if model_obj else None,
380
382
  model_class="drift-analysis",
381
383
  ),
382
384
  status=mlrun.common.schemas.ModelEndpointStatus(
@@ -622,4 +624,13 @@ def _create_model_monitoring_function_base(
622
624
  project=project,
623
625
  writer_application_name=mm_constants.MonitoringFunctionNames.WRITER,
624
626
  )
627
+
628
+ def block_to_mock_server(*args, **kwargs) -> typing.NoReturn:
629
+ raise NotImplementedError(
630
+ "Model monitoring serving functions do not support `.to_mock_server`. "
631
+ "You may call your model monitoring application object logic via the `.evaluate` method."
632
+ )
633
+
634
+ func_obj.to_mock_server = block_to_mock_server # Until ML-7643 is implemented
635
+
625
636
  return func_obj
@@ -318,7 +318,6 @@ def update_model_endpoint_last_request(
318
318
  project=project,
319
319
  endpoint_id=model_endpoint.metadata.uid,
320
320
  name=model_endpoint.metadata.name,
321
- function_name=model_endpoint.spec.function_name,
322
321
  attributes={mm_constants.EventFieldType.LAST_REQUEST: current_request},
323
322
  )
324
323
  else: # model endpoint without any serving function - close the window "manually"
@@ -339,8 +339,8 @@ class ProcessEndpointEvent(mlrun.feature_store.steps.MapClass):
339
339
 
340
340
  # In case this process fails, resume state from existing record
341
341
  self.resume_state(
342
- endpoint_id,
343
- full_event.body.get(EventFieldType.MODEL),
342
+ endpoint_id=endpoint_id,
343
+ endpoint_name=full_event.body.get(EventFieldType.MODEL),
344
344
  )
345
345
 
346
346
  # Validate event fields
mlrun/projects/project.py CHANGED
@@ -1870,13 +1870,13 @@ class MlrunProject(ModelObj):
1870
1870
 
1871
1871
  def get_vector_store_collection(
1872
1872
  self,
1873
- collection_name: str,
1874
1873
  vector_store: "VectorStore", # noqa: F821
1874
+ collection_name: Optional[str] = None,
1875
1875
  ) -> VectorStoreCollection:
1876
1876
  return VectorStoreCollection(
1877
1877
  self,
1878
- collection_name,
1879
1878
  vector_store,
1879
+ collection_name,
1880
1880
  )
1881
1881
 
1882
1882
  def log_document(
@@ -2117,10 +2117,9 @@ class MlrunProject(ModelObj):
2117
2117
 
2118
2118
  def set_model_monitoring_function(
2119
2119
  self,
2120
- func: typing.Union[str, mlrun.runtimes.BaseRuntime, None] = None,
2120
+ func: typing.Union[str, mlrun.runtimes.RemoteRuntime, None] = None,
2121
2121
  application_class: typing.Union[
2122
- str,
2123
- mm_app.ModelMonitoringApplicationBase,
2122
+ str, mm_app.ModelMonitoringApplicationBase, None
2124
2123
  ] = None,
2125
2124
  name: Optional[str] = None,
2126
2125
  image: Optional[str] = None,
@@ -2130,7 +2129,7 @@ class MlrunProject(ModelObj):
2130
2129
  requirements: Optional[typing.Union[str, list[str]]] = None,
2131
2130
  requirements_file: str = "",
2132
2131
  **application_kwargs,
2133
- ) -> mlrun.runtimes.BaseRuntime:
2132
+ ) -> mlrun.runtimes.RemoteRuntime:
2134
2133
  """
2135
2134
  Update or add a monitoring function to the project.
2136
2135
  Note: to deploy the function after linking it to the project,
@@ -2142,7 +2141,8 @@ class MlrunProject(ModelObj):
2142
2141
  name="myApp", application_class="MyApp", image="mlrun/mlrun"
2143
2142
  )
2144
2143
 
2145
- :param func: Function object or spec/code url, None refers to current Notebook
2144
+ :param func: Remote function object or spec/code URL. :code:`None` refers to the current
2145
+ notebook.
2146
2146
  :param name: Name of the function (under the project), can be specified with a tag to support
2147
2147
  versions (e.g. myfunc:v1)
2148
2148
  Default: job
@@ -2158,6 +2158,7 @@ class MlrunProject(ModelObj):
2158
2158
  :param application_class: Name or an Instance of a class that implements the monitoring application.
2159
2159
  :param application_kwargs: Additional keyword arguments to be passed to the
2160
2160
  monitoring application's constructor.
2161
+ :returns: The model monitoring remote function object.
2161
2162
  """
2162
2163
  (
2163
2164
  resolved_function_name,
@@ -2195,7 +2196,7 @@ class MlrunProject(ModelObj):
2195
2196
  requirements: Optional[typing.Union[str, list[str]]] = None,
2196
2197
  requirements_file: str = "",
2197
2198
  **application_kwargs,
2198
- ) -> mlrun.runtimes.BaseRuntime:
2199
+ ) -> mlrun.runtimes.RemoteRuntime:
2199
2200
  """
2200
2201
  Create a monitoring function object without setting it to the project
2201
2202
 
@@ -2205,7 +2206,7 @@ class MlrunProject(ModelObj):
2205
2206
  application_class_name="MyApp", image="mlrun/mlrun", name="myApp"
2206
2207
  )
2207
2208
 
2208
- :param func: Code url, None refers to current Notebook
2209
+ :param func: The function's code URL. :code:`None` refers to the current notebook.
2209
2210
  :param name: Name of the function, can be specified with a tag to support
2210
2211
  versions (e.g. myfunc:v1)
2211
2212
  Default: job
@@ -2221,6 +2222,7 @@ class MlrunProject(ModelObj):
2221
2222
  :param application_class: Name or an Instance of a class that implementing the monitoring application.
2222
2223
  :param application_kwargs: Additional keyword arguments to be passed to the
2223
2224
  monitoring application's constructor.
2225
+ :returns: The model monitoring remote function object.
2224
2226
  """
2225
2227
 
2226
2228
  _, function_object, _ = self._instantiate_model_monitoring_function(
@@ -2253,7 +2255,7 @@ class MlrunProject(ModelObj):
2253
2255
  requirements: typing.Union[str, list[str], None] = None,
2254
2256
  requirements_file: str = "",
2255
2257
  **application_kwargs,
2256
- ) -> tuple[str, mlrun.runtimes.BaseRuntime, dict]:
2258
+ ) -> tuple[str, mlrun.runtimes.RemoteRuntime, dict]:
2257
2259
  import mlrun.model_monitoring.api
2258
2260
 
2259
2261
  kind = None
@@ -3550,11 +3552,13 @@ class MlrunProject(ModelObj):
3550
3552
  name: Optional[str] = None,
3551
3553
  model_name: Optional[str] = None,
3552
3554
  function_name: Optional[str] = None,
3555
+ function_tag: Optional[str] = None,
3553
3556
  labels: Optional[list[str]] = None,
3554
3557
  start: Optional[datetime.datetime] = None,
3555
3558
  end: Optional[datetime.datetime] = None,
3556
3559
  top_level: bool = False,
3557
3560
  uids: Optional[list[str]] = None,
3561
+ latest_only: bool = False,
3558
3562
  ) -> mlrun.common.schemas.ModelEndpointList:
3559
3563
  """
3560
3564
  Returns a list of `ModelEndpoint` objects. Each `ModelEndpoint` object represents the current state of a
@@ -3562,10 +3566,11 @@ class MlrunProject(ModelObj):
3562
3566
  1) name
3563
3567
  2) model_name
3564
3568
  3) function_name
3565
- 4) labels
3566
- 5) top level
3567
- 6) uids
3568
- 7) start and end time, corresponding to the `created` field.
3569
+ 4) function_tag
3570
+ 5) labels
3571
+ 6) top level
3572
+ 7) uids
3573
+ 8) start and end time, corresponding to the `created` field.
3569
3574
  By default, when no filters are applied, all available endpoints for the given project will be listed.
3570
3575
 
3571
3576
  In addition, this functions provides a facade for listing endpoint related metrics. This facade is time-based
@@ -3574,6 +3579,7 @@ class MlrunProject(ModelObj):
3574
3579
  :param name: The name of the model to filter by
3575
3580
  :param model_name: The name of the model to filter by
3576
3581
  :param function_name: The name of the function to filter by
3582
+ :param function_tag: The tag of the function to filter by
3577
3583
  :param labels: Filter model endpoints by label key-value pairs or key existence. This can be provided as:
3578
3584
  - A dictionary in the format `{"label": "value"}` to match specific label key-value pairs,
3579
3585
  or `{"label": None}` to check for key existence.
@@ -3594,11 +3600,13 @@ class MlrunProject(ModelObj):
3594
3600
  name=name,
3595
3601
  model_name=model_name,
3596
3602
  function_name=function_name,
3603
+ function_tag=function_tag,
3597
3604
  labels=labels,
3598
3605
  start=start,
3599
3606
  end=end,
3600
3607
  top_level=top_level,
3601
3608
  uids=uids,
3609
+ latest_only=latest_only,
3602
3610
  )
3603
3611
 
3604
3612
  def run_function(
@@ -3984,18 +3992,21 @@ class MlrunProject(ModelObj):
3984
3992
  mock=mock,
3985
3993
  )
3986
3994
 
3987
- def get_artifact(self, key, tag=None, iter=None, tree=None):
3995
+ def get_artifact(
3996
+ self, key, tag=None, iter=None, tree=None, uid=None
3997
+ ) -> typing.Optional[Artifact]:
3988
3998
  """Return an artifact object
3989
3999
 
3990
- :param key: artifact key
3991
- :param tag: version tag
3992
- :param iter: iteration number (for hyper-param tasks)
3993
- :param tree: the producer id (tree)
4000
+ :param key: Artifact key
4001
+ :param tag: Version tag
4002
+ :param iter: Iteration number (for hyper-param tasks)
4003
+ :param tree: The producer id (tree)
4004
+ :param uid: The artifact uid
3994
4005
  :return: Artifact object
3995
4006
  """
3996
4007
  db = mlrun.db.get_run_db(secrets=self._secrets)
3997
4008
  artifact = db.read_artifact(
3998
- key, tag, iter=iter, project=self.metadata.name, tree=tree
4009
+ key, tag, iter=iter, project=self.metadata.name, tree=tree, uid=uid
3999
4010
  )
4000
4011
 
4001
4012
  # in tests, if an artifact is not found, the db returns None
@@ -421,8 +421,6 @@ class ServingRuntime(RemoteRuntime):
421
421
  class_name.model_path = model_path
422
422
  key, state = params_to_step(class_name, key)
423
423
  else:
424
- if not model_path and not model_url:
425
- raise ValueError("model_path or model_url must be provided")
426
424
  class_name = class_name or self.spec.default_class
427
425
  if class_name and not isinstance(class_name, str):
428
426
  raise ValueError(
mlrun/serving/routers.py CHANGED
@@ -1021,6 +1021,7 @@ def _init_endpoint_record(
1021
1021
  project=graph_server.project,
1022
1022
  name=voting_ensemble.name,
1023
1023
  function_name=graph_server.function_name,
1024
+ function_tag=graph_server.function_tag or "latest",
1024
1025
  )
1025
1026
  except mlrun.errors.MLRunNotFoundError:
1026
1027
  model_endpoint = None
@@ -1049,6 +1050,7 @@ def _init_endpoint_record(
1049
1050
  name=voting_ensemble.name,
1050
1051
  project=graph_server.project,
1051
1052
  function_name=graph_server.function_name,
1053
+ function_tag=graph_server.function_tag or "latest",
1052
1054
  function_uid=function_uid,
1053
1055
  model_class=voting_ensemble.__class__.__name__,
1054
1056
  )
@@ -1101,7 +1103,6 @@ def _init_endpoint_record(
1101
1103
  model_endpoint = db.patch_model_endpoint(
1102
1104
  project=model_endpoint.metadata.project,
1103
1105
  name=model_endpoint.metadata.name,
1104
- function_name=model_endpoint.spec.function_name,
1105
1106
  endpoint_id=model_endpoint.metadata.uid,
1106
1107
  attributes=attributes,
1107
1108
  )
@@ -1121,7 +1122,6 @@ def _init_endpoint_record(
1121
1122
  mlrun.get_run_db().patch_model_endpoint(
1122
1123
  name=name,
1123
1124
  project=graph_server.project,
1124
- function_name=graph_server.function_name,
1125
1125
  endpoint_id=uid,
1126
1126
  attributes={
1127
1127
  ModelEndpointSchema.ENDPOINT_TYPE: mlrun.common.schemas.model_monitoring.EndpointType.LEAF_EP
@@ -197,13 +197,15 @@ class V2ModelServer(StepToDict):
197
197
  extra dataitems dictionary
198
198
 
199
199
  """
200
- model_file, self.model_spec, extra_dataitems = mlrun.artifacts.get_model(
201
- self.model_path, suffix
202
- )
203
- if self.model_spec and self.model_spec.parameters:
204
- for key, value in self.model_spec.parameters.items():
205
- self._params[key] = value
206
- return model_file, extra_dataitems
200
+ if self.model_path:
201
+ model_file, self.model_spec, extra_dataitems = mlrun.artifacts.get_model(
202
+ self.model_path, suffix
203
+ )
204
+ if self.model_spec and self.model_spec.parameters:
205
+ for key, value in self.model_spec.parameters.items():
206
+ self._params[key] = value
207
+ return model_file, extra_dataitems
208
+ return None, None
207
209
 
208
210
  def load(self):
209
211
  """model loading function, see also .get_model() method"""
@@ -569,11 +571,24 @@ def _init_endpoint_record(
569
571
  logger.info("Initializing endpoint records")
570
572
  if not model.model_spec:
571
573
  model.get_model()
574
+ if model.model_spec:
575
+ model_name = model.model_spec.metadata.key
576
+ model_db_key = model.model_spec.spec.db_key
577
+ model_uid = model.model_spec.metadata.uid
578
+ model_tag = model.model_spec.tag
579
+ model_labels = model.model_spec.labels # todo : check if we still need this
580
+ else:
581
+ model_name = None
582
+ model_db_key = None
583
+ model_uid = None
584
+ model_tag = None
585
+ model_labels = {}
572
586
  try:
573
587
  model_ep = mlrun.get_run_db().get_model_endpoint(
574
588
  project=graph_server.project,
575
589
  name=model.name,
576
590
  function_name=graph_server.function_name,
591
+ function_tag=graph_server.function_tag or "latest",
577
592
  )
578
593
  except mlrun.errors.MLRunNotFoundError:
579
594
  model_ep = None
@@ -595,16 +610,18 @@ def _init_endpoint_record(
595
610
  name=model.name,
596
611
  project=graph_server.project,
597
612
  function_name=graph_server.function_name,
613
+ function_tag=graph_server.function_tag or "latest",
598
614
  function_uid=function_uid,
599
- model_name=model.model_spec.metadata.key,
600
- model_uid=model.model_spec.metadata.uid,
615
+ model_name=model_name,
616
+ model_tag=model_tag,
617
+ model_db_key=model_db_key,
618
+ model_uid=model_uid,
601
619
  model_class=model.__class__.__name__,
602
- model_tag=model.model_spec.tag,
603
620
  )
604
621
  model_ep = mlrun.common.schemas.ModelEndpoint(
605
622
  metadata=mlrun.common.schemas.ModelEndpointMetadata(
606
623
  project=graph_server.project,
607
- labels=model.model_spec.labels,
624
+ labels=model_labels,
608
625
  name=model.name,
609
626
  endpoint_type=mlrun.common.schemas.model_monitoring.EndpointType.NODE_EP,
610
627
  ),
@@ -612,9 +629,11 @@ def _init_endpoint_record(
612
629
  function_name=graph_server.function_name,
613
630
  function_uid=function_uid,
614
631
  function_tag=graph_server.function_tag or "latest",
615
- model_name=model.model_spec.metadata.key,
616
- model_uid=model.model_spec.metadata.uid,
632
+ model_name=model_name,
633
+ model_db_key=model_db_key,
634
+ model_uid=model_uid,
617
635
  model_class=model.__class__.__name__,
636
+ model_tag=model_tag,
618
637
  ),
619
638
  status=mlrun.common.schemas.ModelEndpointStatus(
620
639
  monitoring_mode=mlrun.common.schemas.model_monitoring.ModelMonitoringMode.enabled
@@ -623,16 +642,22 @@ def _init_endpoint_record(
623
642
  ),
624
643
  )
625
644
  db = mlrun.get_run_db()
626
- db.create_model_endpoint(model_endpoint=model_ep)
645
+ model_ep = db.create_model_endpoint(model_endpoint=model_ep)
627
646
 
628
647
  elif model_ep:
629
648
  attributes = {}
630
649
  if function_uid != model_ep.spec.function_uid:
631
650
  attributes[ModelEndpointSchema.FUNCTION_UID] = function_uid
632
- if model.model_spec.metadata.key != model_ep.spec.model_name:
633
- attributes[ModelEndpointSchema.MODEL_NAME] = model.model_spec.metadata.key
634
- if model.model_spec.metadata.uid != model_ep.spec.model_uid:
635
- attributes[ModelEndpointSchema.MODEL_UID] = model.model_spec.metadata.uid
651
+ if model_name != model_ep.spec.model_name:
652
+ attributes[ModelEndpointSchema.MODEL_NAME] = model_name
653
+ if model_uid != model_ep.spec.model_uid:
654
+ attributes[ModelEndpointSchema.MODEL_UID] = model_uid
655
+ if model_tag != model_ep.spec.model_tag:
656
+ attributes[ModelEndpointSchema.MODEL_TAG] = model_tag
657
+ if model_db_key != model_ep.spec.model_db_key:
658
+ attributes[ModelEndpointSchema.MODEL_DB_KEY] = model_db_key
659
+ if model_labels != model_ep.metadata.labels:
660
+ attributes[ModelEndpointSchema.LABELS] = model_labels
636
661
  if model.__class__.__name__ != model_ep.spec.model_class:
637
662
  attributes[ModelEndpointSchema.MODEL_CLASS] = model.__class__.__name__
638
663
  if (
@@ -656,7 +681,6 @@ def _init_endpoint_record(
656
681
  model_ep = db.patch_model_endpoint(
657
682
  project=model_ep.metadata.project,
658
683
  name=model_ep.metadata.name,
659
- function_name=model_ep.spec.function_name,
660
684
  endpoint_id=model_ep.metadata.uid,
661
685
  attributes=attributes,
662
686
  )
mlrun/utils/helpers.py CHANGED
@@ -670,8 +670,8 @@ def dict_to_json(struct):
670
670
 
671
671
  def parse_artifact_uri(uri, default_project=""):
672
672
  """
673
- Parse artifact URI into project, key, tag, iter, tree
674
- URI format: [<project>/]<key>[#<iter>][:<tag>][@<tree>]
673
+ Parse artifact URI into project, key, tag, iter, tree, uid
674
+ URI format: [<project>/]<key>[#<iter>][:<tag>][@<tree>][^<uid>]
675
675
 
676
676
  :param uri: uri to parse
677
677
  :param default_project: default project name if not in URI
@@ -681,6 +681,7 @@ def parse_artifact_uri(uri, default_project=""):
681
681
  [2] = iteration
682
682
  [3] = tag
683
683
  [4] = tree
684
+ [5] = uid
684
685
  """
685
686
  uri_pattern = mlrun.utils.regex.artifact_uri_pattern
686
687
  match = re.match(uri_pattern, uri)
@@ -705,6 +706,7 @@ def parse_artifact_uri(uri, default_project=""):
705
706
  iteration,
706
707
  group_dict["tag"],
707
708
  group_dict["tree"],
709
+ group_dict["uid"],
708
710
  )
709
711
 
710
712
 
@@ -719,7 +721,9 @@ def generate_object_uri(project, name, tag=None, hash_key=None):
719
721
  return uri
720
722
 
721
723
 
722
- def generate_artifact_uri(project, key, tag=None, iter=None, tree=None):
724
+ def generate_artifact_uri(
725
+ project, key, tag=None, iter=None, tree=None, uid=None
726
+ ) -> str:
723
727
  artifact_uri = f"{project}/{key}"
724
728
  if iter is not None:
725
729
  artifact_uri = f"{artifact_uri}#{iter}"
@@ -727,6 +731,8 @@ def generate_artifact_uri(project, key, tag=None, iter=None, tree=None):
727
731
  artifact_uri = f"{artifact_uri}:{tag}"
728
732
  if tree is not None:
729
733
  artifact_uri = f"{artifact_uri}@{tree}"
734
+ if uid is not None:
735
+ artifact_uri = f"{artifact_uri}^{uid}"
730
736
  return artifact_uri
731
737
 
732
738
 
mlrun/utils/regex.py CHANGED
@@ -96,7 +96,14 @@ v3io_stream_consumer_group = [r"^(?!_)[a-zA-Z0-9_]{1,256}$"]
96
96
  # URI patterns
97
97
  run_uri_pattern = r"^(?P<project>.*)@(?P<uid>.*)\#(?P<iteration>.*?)(:(?P<tag>.*))?$"
98
98
 
99
- artifact_uri_pattern = r"^((?P<project>.*)/)?(?P<key>.*?)(\#(?P<iteration>.*?))?(:(?P<tag>.*?))?(@(?P<tree>.*))?$"
99
+ artifact_uri_pattern = (
100
+ r"^((?P<project>.*)/)?" # Optional project
101
+ r"(?P<key>.*?)" # Key
102
+ r"(\#(?P<iteration>.*?))?" # Optional iteration
103
+ r"(:(?P<tag>.*?))?" # Optional tag
104
+ r"(@(?P<tree>.*?))?" # Optional tree
105
+ r"(\^(?P<uid>.*))?$" # Optional uid
106
+ )
100
107
 
101
108
  artifact_producer_uri_pattern = (
102
109
  r"^((?P<project>.*)/)?(?P<uid>.*?)(\-(?P<iteration>.*?))?$"
@@ -1,4 +1,4 @@
1
1
  {
2
- "git_commit": "052529570e5bbae67e99d98a89713d70c6751607",
3
- "version": "1.8.0-rc7"
2
+ "git_commit": "9e2d1e195daed90072d1cd33a5eee339577dc35a",
3
+ "version": "1.8.0-rc10"
4
4
  }
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: mlrun
3
- Version: 1.8.0rc7
3
+ Version: 1.8.0rc10
4
4
  Summary: Tracking and config of machine learning runs
5
5
  Home-page: https://github.com/mlrun/mlrun
6
6
  Author: Yaron Haviv