mlrun 1.7.0rc22__py3-none-any.whl → 1.7.0rc28__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mlrun might be problematic. Click here for more details.

Files changed (81) hide show
  1. mlrun/__main__.py +10 -8
  2. mlrun/alerts/alert.py +13 -1
  3. mlrun/artifacts/manager.py +5 -0
  4. mlrun/common/constants.py +2 -2
  5. mlrun/common/formatters/__init__.py +1 -0
  6. mlrun/common/formatters/artifact.py +26 -3
  7. mlrun/common/formatters/base.py +9 -9
  8. mlrun/common/formatters/run.py +26 -0
  9. mlrun/common/helpers.py +11 -0
  10. mlrun/common/schemas/__init__.py +4 -0
  11. mlrun/common/schemas/alert.py +5 -9
  12. mlrun/common/schemas/api_gateway.py +64 -16
  13. mlrun/common/schemas/artifact.py +11 -0
  14. mlrun/common/schemas/constants.py +3 -0
  15. mlrun/common/schemas/feature_store.py +58 -28
  16. mlrun/common/schemas/model_monitoring/constants.py +21 -12
  17. mlrun/common/schemas/model_monitoring/model_endpoints.py +0 -12
  18. mlrun/common/schemas/pipeline.py +16 -0
  19. mlrun/common/schemas/project.py +17 -0
  20. mlrun/common/schemas/runs.py +17 -0
  21. mlrun/common/schemas/schedule.py +1 -1
  22. mlrun/common/types.py +5 -0
  23. mlrun/config.py +10 -25
  24. mlrun/datastore/azure_blob.py +2 -1
  25. mlrun/datastore/datastore.py +3 -3
  26. mlrun/datastore/google_cloud_storage.py +6 -2
  27. mlrun/datastore/snowflake_utils.py +3 -1
  28. mlrun/datastore/sources.py +26 -11
  29. mlrun/datastore/store_resources.py +2 -0
  30. mlrun/datastore/targets.py +68 -16
  31. mlrun/db/base.py +64 -2
  32. mlrun/db/httpdb.py +129 -41
  33. mlrun/db/nopdb.py +44 -3
  34. mlrun/errors.py +5 -3
  35. mlrun/execution.py +18 -10
  36. mlrun/feature_store/retrieval/spark_merger.py +2 -1
  37. mlrun/frameworks/__init__.py +0 -6
  38. mlrun/model.py +23 -0
  39. mlrun/model_monitoring/api.py +6 -52
  40. mlrun/model_monitoring/applications/histogram_data_drift.py +1 -1
  41. mlrun/model_monitoring/db/stores/__init__.py +37 -24
  42. mlrun/model_monitoring/db/stores/base/store.py +40 -1
  43. mlrun/model_monitoring/db/stores/sqldb/sql_store.py +42 -87
  44. mlrun/model_monitoring/db/stores/v3io_kv/kv_store.py +27 -35
  45. mlrun/model_monitoring/db/tsdb/__init__.py +15 -15
  46. mlrun/model_monitoring/db/tsdb/base.py +1 -1
  47. mlrun/model_monitoring/db/tsdb/v3io/v3io_connector.py +6 -4
  48. mlrun/model_monitoring/helpers.py +17 -9
  49. mlrun/model_monitoring/stream_processing.py +9 -11
  50. mlrun/model_monitoring/writer.py +11 -11
  51. mlrun/package/__init__.py +1 -13
  52. mlrun/package/packagers/__init__.py +1 -6
  53. mlrun/projects/pipelines.py +10 -9
  54. mlrun/projects/project.py +95 -81
  55. mlrun/render.py +10 -5
  56. mlrun/run.py +13 -8
  57. mlrun/runtimes/base.py +11 -4
  58. mlrun/runtimes/daskjob.py +7 -1
  59. mlrun/runtimes/local.py +16 -3
  60. mlrun/runtimes/nuclio/application/application.py +0 -2
  61. mlrun/runtimes/nuclio/function.py +20 -0
  62. mlrun/runtimes/nuclio/serving.py +9 -6
  63. mlrun/runtimes/pod.py +5 -29
  64. mlrun/serving/routers.py +75 -59
  65. mlrun/serving/server.py +11 -0
  66. mlrun/serving/states.py +29 -0
  67. mlrun/serving/v2_serving.py +62 -39
  68. mlrun/utils/helpers.py +39 -1
  69. mlrun/utils/logger.py +36 -2
  70. mlrun/utils/notifications/notification/base.py +43 -7
  71. mlrun/utils/notifications/notification/git.py +21 -0
  72. mlrun/utils/notifications/notification/slack.py +9 -14
  73. mlrun/utils/notifications/notification/webhook.py +41 -1
  74. mlrun/utils/notifications/notification_pusher.py +3 -9
  75. mlrun/utils/version/version.json +2 -2
  76. {mlrun-1.7.0rc22.dist-info → mlrun-1.7.0rc28.dist-info}/METADATA +12 -7
  77. {mlrun-1.7.0rc22.dist-info → mlrun-1.7.0rc28.dist-info}/RECORD +81 -80
  78. {mlrun-1.7.0rc22.dist-info → mlrun-1.7.0rc28.dist-info}/WHEEL +1 -1
  79. {mlrun-1.7.0rc22.dist-info → mlrun-1.7.0rc28.dist-info}/LICENSE +0 -0
  80. {mlrun-1.7.0rc22.dist-info → mlrun-1.7.0rc28.dist-info}/entry_points.txt +0 -0
  81. {mlrun-1.7.0rc22.dist-info → mlrun-1.7.0rc28.dist-info}/top_level.txt +0 -0
mlrun/db/base.py CHANGED
@@ -16,6 +16,8 @@ import datetime
16
16
  from abc import ABC, abstractmethod
17
17
  from typing import Optional, Union
18
18
 
19
+ from deprecated import deprecated
20
+
19
21
  import mlrun.alerts
20
22
  import mlrun.common
21
23
  import mlrun.common.formatters
@@ -56,7 +58,13 @@ class RunDBInterface(ABC):
56
58
  pass
57
59
 
58
60
  @abstractmethod
59
- def read_run(self, uid, project="", iter=0):
61
+ def read_run(
62
+ self,
63
+ uid: str,
64
+ project: str = "",
65
+ iter: int = 0,
66
+ format_: mlrun.common.formatters.RunFormat = mlrun.common.formatters.RunFormat.full,
67
+ ):
60
68
  pass
61
69
 
62
70
  @abstractmethod
@@ -103,7 +111,16 @@ class RunDBInterface(ABC):
103
111
  pass
104
112
 
105
113
  @abstractmethod
106
- def read_artifact(self, key, tag="", iter=None, project="", tree=None, uid=None):
114
+ def read_artifact(
115
+ self,
116
+ key,
117
+ tag="",
118
+ iter=None,
119
+ project="",
120
+ tree=None,
121
+ uid=None,
122
+ format_: mlrun.common.formatters.ArtifactFormat = mlrun.common.formatters.ArtifactFormat.full,
123
+ ):
107
124
  pass
108
125
 
109
126
  @abstractmethod
@@ -120,6 +137,8 @@ class RunDBInterface(ABC):
120
137
  kind: str = None,
121
138
  category: Union[str, mlrun.common.schemas.ArtifactCategories] = None,
122
139
  tree: str = None,
140
+ format_: mlrun.common.formatters.ArtifactFormat = mlrun.common.formatters.ArtifactFormat.full,
141
+ limit: int = None,
123
142
  ):
124
143
  pass
125
144
 
@@ -302,6 +321,12 @@ class RunDBInterface(ABC):
302
321
  ) -> dict:
303
322
  pass
304
323
 
324
+ # TODO: remove in 1.9.0
325
+ @deprecated(
326
+ version="1.9.0",
327
+ reason="'list_features' will be removed in 1.9.0, use 'list_features_v2' instead",
328
+ category=FutureWarning,
329
+ )
305
330
  @abstractmethod
306
331
  def list_features(
307
332
  self,
@@ -313,6 +338,23 @@ class RunDBInterface(ABC):
313
338
  ) -> mlrun.common.schemas.FeaturesOutput:
314
339
  pass
315
340
 
341
+ @abstractmethod
342
+ def list_features_v2(
343
+ self,
344
+ project: str,
345
+ name: str = None,
346
+ tag: str = None,
347
+ entities: list[str] = None,
348
+ labels: list[str] = None,
349
+ ) -> mlrun.common.schemas.FeaturesOutputV2:
350
+ pass
351
+
352
+ # TODO: remove in 1.9.0
353
+ @deprecated(
354
+ version="1.9.0",
355
+ reason="'list_entities' will be removed in 1.9.0, use 'list_entities_v2' instead",
356
+ category=FutureWarning,
357
+ )
316
358
  @abstractmethod
317
359
  def list_entities(
318
360
  self,
@@ -323,6 +365,16 @@ class RunDBInterface(ABC):
323
365
  ) -> mlrun.common.schemas.EntitiesOutput:
324
366
  pass
325
367
 
368
+ @abstractmethod
369
+ def list_entities_v2(
370
+ self,
371
+ project: str,
372
+ name: str = None,
373
+ tag: str = None,
374
+ labels: list[str] = None,
375
+ ) -> mlrun.common.schemas.EntitiesOutputV2:
376
+ pass
377
+
326
378
  @abstractmethod
327
379
  def list_feature_sets(
328
380
  self,
@@ -838,6 +890,8 @@ class RunDBInterface(ABC):
838
890
  base_period: int = 10,
839
891
  image: str = "mlrun/mlrun",
840
892
  deploy_histogram_data_drift_app: bool = True,
893
+ rebuild_images: bool = False,
894
+ fetch_credentials_from_sys_config: bool = False,
841
895
  ) -> None:
842
896
  pass
843
897
 
@@ -864,3 +918,11 @@ class RunDBInterface(ABC):
864
918
  self, project: str, image: str = "mlrun/mlrun"
865
919
  ) -> None:
866
920
  pass
921
+
922
+ @abstractmethod
923
+ def set_model_monitoring_credentials(
924
+ self,
925
+ project: str,
926
+ credentials: dict[str, str],
927
+ ) -> None:
928
+ pass
mlrun/db/httpdb.py CHANGED
@@ -38,6 +38,7 @@ import mlrun.model_monitoring.model_endpoint
38
38
  import mlrun.platforms
39
39
  import mlrun.projects
40
40
  import mlrun.runtimes.nuclio.api_gateway
41
+ import mlrun.runtimes.nuclio.function
41
42
  import mlrun.utils
42
43
  from mlrun.alerts.alert import AlertConfig
43
44
  from mlrun.db.auth_utils import OAuthClientIDTokenProvider, StaticTokenProvider
@@ -536,6 +537,10 @@ class HTTPRunDB(RunDBInterface):
536
537
  server_cfg.get("model_monitoring_tsdb_connection")
537
538
  or config.model_endpoint_monitoring.tsdb_connection
538
539
  )
540
+ config.model_endpoint_monitoring.stream_connection = (
541
+ server_cfg.get("stream_connection")
542
+ or config.model_endpoint_monitoring.stream_connection
543
+ )
539
544
  config.packagers = server_cfg.get("packagers") or config.packagers
540
545
  server_data_prefixes = server_cfg.get("feature_store_data_prefixes") or {}
541
546
  for prefix in ["default", "nosql", "redisnosql"]:
@@ -725,16 +730,26 @@ class HTTPRunDB(RunDBInterface):
725
730
  )
726
731
  return None
727
732
 
728
- def read_run(self, uid, project="", iter=0):
733
+ def read_run(
734
+ self,
735
+ uid,
736
+ project="",
737
+ iter=0,
738
+ format_: mlrun.common.formatters.RunFormat = mlrun.common.formatters.RunFormat.full,
739
+ ):
729
740
  """Read the details of a stored run from the DB.
730
741
 
731
- :param uid: The run's unique ID.
732
- :param project: Project name.
733
- :param iter: Iteration within a specific execution.
742
+ :param uid: The run's unique ID.
743
+ :param project: Project name.
744
+ :param iter: Iteration within a specific execution.
745
+ :param format_: The format in which to return the run details.
734
746
  """
735
747
 
736
748
  path = self._path_of("runs", project, uid)
737
- params = {"iter": iter}
749
+ params = {
750
+ "iter": iter,
751
+ "format": format_.value,
752
+ }
738
753
  error = f"get run {project}/{uid}"
739
754
  resp = self.api_call("GET", path, error, params=params)
740
755
  return resp.json()["data"]
@@ -860,7 +875,7 @@ class HTTPRunDB(RunDBInterface):
860
875
  ):
861
876
  # default to last week on no filter
862
877
  start_time_from = datetime.now() - timedelta(days=7)
863
- partition_by = mlrun.common.schemas.RunPartitionByField.name
878
+ partition_by = mlrun.common.schemas.RunPartitionByField.project_and_name
864
879
  partition_sort_by = mlrun.common.schemas.SortField.updated
865
880
 
866
881
  params = {
@@ -953,7 +968,7 @@ class HTTPRunDB(RunDBInterface):
953
968
 
954
969
  # we do this because previously the 'uid' name was used for the 'tree' parameter
955
970
  tree = tree or uid
956
-
971
+ project = project or mlrun.mlconf.default_project
957
972
  endpoint_path = f"projects/{project}/artifacts/{key}"
958
973
 
959
974
  error = f"store artifact {project}/{key}"
@@ -979,6 +994,7 @@ class HTTPRunDB(RunDBInterface):
979
994
  project="",
980
995
  tree=None,
981
996
  uid=None,
997
+ format_: mlrun.common.formatters.ArtifactFormat = mlrun.common.formatters.ArtifactFormat.full,
982
998
  ):
983
999
  """Read an artifact, identified by its key, tag, tree and iteration.
984
1000
 
@@ -988,20 +1004,20 @@ class HTTPRunDB(RunDBInterface):
988
1004
  :param project: Project that the artifact belongs to.
989
1005
  :param tree: The tree which generated this artifact.
990
1006
  :param uid: A unique ID for this specific version of the artifact (the uid that was generated in the backend)
1007
+ :param format_: The format in which to return the artifact. Default is 'full'.
991
1008
  """
992
1009
 
993
- project = project or config.default_project
1010
+ project = project or mlrun.mlconf.default_project
994
1011
  tag = tag or "latest"
995
1012
  endpoint_path = f"projects/{project}/artifacts/{key}"
996
1013
  error = f"read artifact {project}/{key}"
997
- # explicitly set artifacts format to 'full' since old servers may default to 'legacy'
998
1014
  params = {
999
- "format": mlrun.common.formatters.ArtifactFormat.full.value,
1015
+ "format": format_,
1000
1016
  "tag": tag,
1001
1017
  "tree": tree,
1002
1018
  "uid": uid,
1003
1019
  }
1004
- if iter:
1020
+ if iter is not None:
1005
1021
  params["iter"] = str(iter)
1006
1022
  resp = self.api_call("GET", endpoint_path, error, params=params, version="v2")
1007
1023
  return resp.json()
@@ -1028,7 +1044,7 @@ class HTTPRunDB(RunDBInterface):
1028
1044
  :param deletion_strategy: The artifact deletion strategy types.
1029
1045
  :param secrets: Credentials needed to access the artifact data.
1030
1046
  """
1031
-
1047
+ project = project or mlrun.mlconf.default_project
1032
1048
  endpoint_path = f"projects/{project}/artifacts/{key}"
1033
1049
  params = {
1034
1050
  "key": key,
@@ -1061,6 +1077,8 @@ class HTTPRunDB(RunDBInterface):
1061
1077
  category: Union[str, mlrun.common.schemas.ArtifactCategories] = None,
1062
1078
  tree: str = None,
1063
1079
  producer_uri: str = None,
1080
+ format_: mlrun.common.formatters.ArtifactFormat = mlrun.common.formatters.ArtifactFormat.full,
1081
+ limit: int = None,
1064
1082
  ) -> ArtifactList:
1065
1083
  """List artifacts filtered by various parameters.
1066
1084
 
@@ -1095,6 +1113,8 @@ class HTTPRunDB(RunDBInterface):
1095
1113
  :param producer_uri: Return artifacts produced by the requested producer URI. Producer URI usually
1096
1114
  points to a run and is used to filter artifacts by the run that produced them when the artifact producer id
1097
1115
  is a workflow id (artifact was created as part of a workflow).
1116
+ :param format_: The format in which to return the artifacts. Default is 'full'.
1117
+ :param limit: Maximum number of artifacts to return.
1098
1118
  """
1099
1119
 
1100
1120
  project = project or config.default_project
@@ -1112,8 +1132,9 @@ class HTTPRunDB(RunDBInterface):
1112
1132
  "kind": kind,
1113
1133
  "category": category,
1114
1134
  "tree": tree,
1115
- "format": mlrun.common.formatters.ArtifactFormat.full.value,
1135
+ "format": format_,
1116
1136
  "producer_uri": producer_uri,
1137
+ "limit": limit,
1117
1138
  }
1118
1139
  error = "list artifacts"
1119
1140
  endpoint_path = f"projects/{project}/artifacts"
@@ -1594,20 +1615,11 @@ class HTTPRunDB(RunDBInterface):
1594
1615
  raise RunDBError("bad function build response")
1595
1616
 
1596
1617
  if resp.headers:
1597
- func.status.state = resp.headers.get("x-mlrun-function-status", "")
1598
1618
  last_log_timestamp = float(
1599
1619
  resp.headers.get("x-mlrun-last-timestamp", "0.0")
1600
1620
  )
1601
- func.status.address = resp.headers.get("x-mlrun-address", "")
1602
- func.status.nuclio_name = resp.headers.get("x-mlrun-name", "")
1603
- func.status.internal_invocation_urls = resp.headers.get(
1604
- "x-mlrun-internal-invocation-urls", ""
1605
- ).split(",")
1606
- func.status.external_invocation_urls = resp.headers.get(
1607
- "x-mlrun-external-invocation-urls", ""
1608
- ).split(",")
1609
- func.status.container_image = resp.headers.get(
1610
- "x-mlrun-container-image", ""
1621
+ mlrun.runtimes.nuclio.function.enrich_nuclio_function_from_headers(
1622
+ func, resp.headers
1611
1623
  )
1612
1624
 
1613
1625
  text = ""
@@ -1665,16 +1677,8 @@ class HTTPRunDB(RunDBInterface):
1665
1677
  resp.headers.get("x-mlrun-last-timestamp", "0.0")
1666
1678
  )
1667
1679
  if func.kind in mlrun.runtimes.RuntimeKinds.nuclio_runtimes():
1668
- func.status.address = resp.headers.get("x-mlrun-address", "")
1669
- func.status.nuclio_name = resp.headers.get("x-mlrun-name", "")
1670
- func.status.internal_invocation_urls = resp.headers.get(
1671
- "x-mlrun-internal-invocation-urls", ""
1672
- ).split(",")
1673
- func.status.external_invocation_urls = resp.headers.get(
1674
- "x-mlrun-external-invocation-urls", ""
1675
- ).split(",")
1676
- func.status.container_image = resp.headers.get(
1677
- "x-mlrun-container-image", ""
1680
+ mlrun.runtimes.nuclio.function.enrich_nuclio_function_from_headers(
1681
+ func, resp.headers
1678
1682
  )
1679
1683
 
1680
1684
  builder_pod = resp.headers.get("builder_pod", "")
@@ -2110,6 +2114,41 @@ class HTTPRunDB(RunDBInterface):
2110
2114
  resp = self.api_call("GET", path, error_message, params=params)
2111
2115
  return resp.json()["features"]
2112
2116
 
2117
+ def list_features_v2(
2118
+ self,
2119
+ project: str,
2120
+ name: str = None,
2121
+ tag: str = None,
2122
+ entities: list[str] = None,
2123
+ labels: list[str] = None,
2124
+ ) -> dict[str, list[dict]]:
2125
+ """List feature-sets which contain specific features. This function may return multiple versions of the same
2126
+ feature-set if a specific tag is not requested. Note that the various filters of this function actually
2127
+ refer to the feature-set object containing the features, not to the features themselves.
2128
+
2129
+ :param project: Project which contains these features.
2130
+ :param name: Name of the feature to look for. The name is used in a like query, and is not case-sensitive. For
2131
+ example, looking for ``feat`` will return features which are named ``MyFeature`` as well as ``defeat``.
2132
+ :param tag: Return feature-sets which contain the features looked for, and are tagged with the specific tag.
2133
+ :param entities: Return only feature-sets which contain an entity whose name is contained in this list.
2134
+ :param labels: Return only feature-sets which are labeled as requested.
2135
+ :returns: A list of features, and a list of their corresponding feature sets.
2136
+ """
2137
+
2138
+ project = project or config.default_project
2139
+ params = {
2140
+ "name": name,
2141
+ "tag": tag,
2142
+ "entity": entities or [],
2143
+ "label": labels or [],
2144
+ }
2145
+
2146
+ path = f"projects/{project}/features"
2147
+
2148
+ error_message = f"Failed listing features, project: {project}, query: {params}"
2149
+ resp = self.api_call("GET", path, error_message, params=params, version="v2")
2150
+ return resp.json()
2151
+
2113
2152
  def list_entities(
2114
2153
  self,
2115
2154
  project: str,
@@ -2135,6 +2174,31 @@ class HTTPRunDB(RunDBInterface):
2135
2174
  resp = self.api_call("GET", path, error_message, params=params)
2136
2175
  return resp.json()["entities"]
2137
2176
 
2177
+ def list_entities_v2(
2178
+ self,
2179
+ project: str,
2180
+ name: str = None,
2181
+ tag: str = None,
2182
+ labels: list[str] = None,
2183
+ ) -> dict[str, list[dict]]:
2184
+ """Retrieve a list of entities and their mapping to the containing feature-sets. This function is similar
2185
+ to the :py:func:`~list_features_v2` function, and uses the same logic. However, the entities are matched
2186
+ against the name rather than the features.
2187
+ """
2188
+
2189
+ project = project or config.default_project
2190
+ params = {
2191
+ "name": name,
2192
+ "tag": tag,
2193
+ "label": labels or [],
2194
+ }
2195
+
2196
+ path = f"projects/{project}/entities"
2197
+
2198
+ error_message = f"Failed listing entities, project: {project}, query: {params}"
2199
+ resp = self.api_call("GET", path, error_message, params=params, version="v2")
2200
+ return resp.json()
2201
+
2138
2202
  @staticmethod
2139
2203
  def _generate_partition_by_params(
2140
2204
  partition_by_cls,
@@ -3320,6 +3384,8 @@ class HTTPRunDB(RunDBInterface):
3320
3384
  base_period: int = 10,
3321
3385
  image: str = "mlrun/mlrun",
3322
3386
  deploy_histogram_data_drift_app: bool = True,
3387
+ rebuild_images: bool = False,
3388
+ fetch_credentials_from_sys_config: bool = False,
3323
3389
  ) -> None:
3324
3390
  """
3325
3391
  Deploy model monitoring application controller, writer and stream functions.
@@ -3329,13 +3395,16 @@ class HTTPRunDB(RunDBInterface):
3329
3395
  The stream function goal is to monitor the log of the data stream. It is triggered when a new log entry
3330
3396
  is detected. It processes the new events into statistics that are then written to statistics databases.
3331
3397
 
3332
- :param project: Project name.
3333
- :param base_period: The time period in minutes in which the model monitoring controller function
3334
- triggers. By default, the base period is 10 minutes.
3335
- :param image: The image of the model monitoring controller, writer & monitoring
3336
- stream functions, which are real time nuclio functions.
3337
- By default, the image is mlrun/mlrun.
3338
- :param deploy_histogram_data_drift_app: If true, deploy the default histogram-based data drift application.
3398
+ :param project: Project name.
3399
+ :param base_period: The time period in minutes in which the model monitoring controller
3400
+ function triggers. By default, the base period is 10 minutes.
3401
+ :param image: The image of the model monitoring controller, writer & monitoring
3402
+ stream functions, which are real time nuclio functions.
3403
+ By default, the image is mlrun/mlrun.
3404
+ :param deploy_histogram_data_drift_app: If true, deploy the default histogram-based data drift application.
3405
+ :param rebuild_images: If true, force rebuild of model monitoring infrastructure images.
3406
+ :param fetch_credentials_from_sys_config: If true, fetch the credentials from the system configuration.
3407
+
3339
3408
  """
3340
3409
  self.api_call(
3341
3410
  method=mlrun.common.types.HTTPMethod.POST,
@@ -3344,6 +3413,8 @@ class HTTPRunDB(RunDBInterface):
3344
3413
  "base_period": base_period,
3345
3414
  "image": image,
3346
3415
  "deploy_histogram_data_drift_app": deploy_histogram_data_drift_app,
3416
+ "rebuild_images": rebuild_images,
3417
+ "fetch_credentials_from_sys_config": fetch_credentials_from_sys_config,
3347
3418
  },
3348
3419
  )
3349
3420
 
@@ -3469,6 +3540,23 @@ class HTTPRunDB(RunDBInterface):
3469
3540
  params={"image": image},
3470
3541
  )
3471
3542
 
3543
+ def set_model_monitoring_credentials(
3544
+ self,
3545
+ project: str,
3546
+ credentials: dict[str, str],
3547
+ ) -> None:
3548
+ """
3549
+ Set the credentials for the model monitoring application.
3550
+
3551
+ :param project: Project name.
3552
+ :param credentials: Credentials to set.
3553
+ """
3554
+ self.api_call(
3555
+ method=mlrun.common.types.HTTPMethod.POST,
3556
+ path=f"projects/{project}/model-monitoring/set-model-monitoring-credentials",
3557
+ params={**credentials},
3558
+ )
3559
+
3472
3560
  def create_hub_source(
3473
3561
  self, source: Union[dict, mlrun.common.schemas.IndexedHubSource]
3474
3562
  ):
mlrun/db/nopdb.py CHANGED
@@ -73,7 +73,13 @@ class NopDB(RunDBInterface):
73
73
  def abort_run(self, uid, project="", iter=0, timeout=45, status_text=""):
74
74
  pass
75
75
 
76
- def read_run(self, uid, project="", iter=0):
76
+ def read_run(
77
+ self,
78
+ uid,
79
+ project="",
80
+ iter=0,
81
+ format_: mlrun.common.formatters.RunFormat = mlrun.common.formatters.RunFormat.full,
82
+ ):
77
83
  pass
78
84
 
79
85
  def list_runs(
@@ -115,7 +121,16 @@ class NopDB(RunDBInterface):
115
121
  ):
116
122
  pass
117
123
 
118
- def read_artifact(self, key, tag="", iter=None, project="", tree=None, uid=None):
124
+ def read_artifact(
125
+ self,
126
+ key,
127
+ tag="",
128
+ iter=None,
129
+ project="",
130
+ tree=None,
131
+ uid=None,
132
+ format_: mlrun.common.formatters.ArtifactFormat = mlrun.common.formatters.ArtifactFormat.full,
133
+ ):
119
134
  pass
120
135
 
121
136
  def list_artifacts(
@@ -131,6 +146,8 @@ class NopDB(RunDBInterface):
131
146
  kind: str = None,
132
147
  category: Union[str, mlrun.common.schemas.ArtifactCategories] = None,
133
148
  tree: str = None,
149
+ format_: mlrun.common.formatters.ArtifactFormat = mlrun.common.formatters.ArtifactFormat.full,
150
+ limit: int = None,
134
151
  ):
135
152
  pass
136
153
 
@@ -252,11 +269,26 @@ class NopDB(RunDBInterface):
252
269
  ) -> mlrun.common.schemas.FeaturesOutput:
253
270
  pass
254
271
 
272
+ def list_features_v2(
273
+ self,
274
+ project: str,
275
+ name: str = None,
276
+ tag: str = None,
277
+ entities: list[str] = None,
278
+ labels: list[str] = None,
279
+ ) -> mlrun.common.schemas.FeaturesOutputV2:
280
+ pass
281
+
255
282
  def list_entities(
256
283
  self, project: str, name: str = None, tag: str = None, labels: list[str] = None
257
284
  ) -> mlrun.common.schemas.EntitiesOutput:
258
285
  pass
259
286
 
287
+ def list_entities_v2(
288
+ self, project: str, name: str = None, tag: str = None, labels: list[str] = None
289
+ ) -> mlrun.common.schemas.EntitiesOutputV2:
290
+ pass
291
+
260
292
  def list_feature_sets(
261
293
  self,
262
294
  project: str = "",
@@ -675,6 +707,8 @@ class NopDB(RunDBInterface):
675
707
  base_period: int = 10,
676
708
  image: str = "mlrun/mlrun",
677
709
  deploy_histogram_data_drift_app: bool = True,
710
+ rebuild_images: bool = False,
711
+ fetch_credentials_from_sys_config: bool = False,
678
712
  ) -> None:
679
713
  pass
680
714
 
@@ -697,7 +731,14 @@ class NopDB(RunDBInterface):
697
731
  def deploy_histogram_data_drift_app(
698
732
  self, project: str, image: str = "mlrun/mlrun"
699
733
  ) -> None:
700
- raise NotImplementedError
734
+ pass
735
+
736
+ def set_model_monitoring_credentials(
737
+ self,
738
+ project: str,
739
+ credentials: dict[str, str],
740
+ ) -> None:
741
+ pass
701
742
 
702
743
  def generate_event(
703
744
  self, name: str, event_data: Union[dict, mlrun.common.schemas.Event], project=""
mlrun/errors.py CHANGED
@@ -92,9 +92,7 @@ def raise_for_status(
92
92
  try:
93
93
  response.raise_for_status()
94
94
  except (requests.HTTPError, aiohttp.ClientResponseError) as exc:
95
- error_message = err_to_str(exc)
96
- if message:
97
- error_message = f"{error_message}: {message}"
95
+ error_message = err_to_str(exc) if not message else message
98
96
  status_code = (
99
97
  response.status_code
100
98
  if hasattr(response, "status_code")
@@ -207,6 +205,10 @@ class MLRunTimeoutError(MLRunHTTPStatusError, TimeoutError):
207
205
  error_status_code = HTTPStatus.GATEWAY_TIMEOUT.value
208
206
 
209
207
 
208
+ class MLRunInvalidMMStoreType(MLRunHTTPStatusError, ValueError):
209
+ error_status_code = HTTPStatus.BAD_REQUEST.value
210
+
211
+
210
212
  class MLRunRetryExhaustedError(Exception):
211
213
  pass
212
214
 
mlrun/execution.py CHANGED
@@ -34,13 +34,13 @@ from .features import Feature
34
34
  from .model import HyperParamOptions
35
35
  from .secrets import SecretsStore
36
36
  from .utils import (
37
+ RunKeys,
37
38
  dict_to_json,
38
39
  dict_to_yaml,
39
40
  get_in,
40
41
  is_relative_path,
41
42
  logger,
42
43
  now_date,
43
- run_keys,
44
44
  to_date_str,
45
45
  update_in,
46
46
  )
@@ -85,6 +85,7 @@ class MLClientCtx:
85
85
 
86
86
  self._labels = {}
87
87
  self._annotations = {}
88
+ self._node_selector = {}
88
89
 
89
90
  self._function = ""
90
91
  self._parameters = {}
@@ -207,6 +208,11 @@ class MLClientCtx:
207
208
  """Dictionary with labels (read-only)"""
208
209
  return deepcopy(self._labels)
209
210
 
211
+ @property
212
+ def node_selector(self):
213
+ """Dictionary with node selectors (read-only)"""
214
+ return deepcopy(self._node_selector)
215
+
210
216
  @property
211
217
  def annotations(self):
212
218
  """Dictionary with annotations (read-only)"""
@@ -365,7 +371,7 @@ class MLClientCtx:
365
371
  self._labels = meta.get("labels", self._labels)
366
372
  spec = attrs.get("spec")
367
373
  if spec:
368
- self._secrets_manager = SecretsStore.from_list(spec.get(run_keys.secrets))
374
+ self._secrets_manager = SecretsStore.from_list(spec.get(RunKeys.secrets))
369
375
  self._log_level = spec.get("log_level", self._log_level)
370
376
  self._function = spec.get("function", self._function)
371
377
  self._parameters = spec.get("parameters", self._parameters)
@@ -383,13 +389,14 @@ class MLClientCtx:
383
389
  self._allow_empty_resources = spec.get(
384
390
  "allow_empty_resources", self._allow_empty_resources
385
391
  )
386
- self.artifact_path = spec.get(run_keys.output_path, self.artifact_path)
387
- self._in_path = spec.get(run_keys.input_path, self._in_path)
388
- inputs = spec.get(run_keys.inputs)
392
+ self.artifact_path = spec.get(RunKeys.output_path, self.artifact_path)
393
+ self._in_path = spec.get(RunKeys.input_path, self._in_path)
394
+ inputs = spec.get(RunKeys.inputs)
389
395
  self._notifications = spec.get("notifications", self._notifications)
390
396
  self._state_thresholds = spec.get(
391
397
  "state_thresholds", self._state_thresholds
392
398
  )
399
+ self._node_selector = spec.get("node_selector", self._node_selector)
393
400
  self._reset_on_run = spec.get("reset_on_run", self._reset_on_run)
394
401
 
395
402
  self._init_dbs(rundb)
@@ -567,7 +574,7 @@ class MLClientCtx:
567
574
  self._results["best_iteration"] = best
568
575
  for k, v in get_in(task, ["status", "results"], {}).items():
569
576
  self._results[k] = v
570
- for artifact in get_in(task, ["status", run_keys.artifacts], []):
577
+ for artifact in get_in(task, ["status", RunKeys.artifacts], []):
571
578
  self._artifacts_manager.artifacts[artifact["metadata"]["key"]] = (
572
579
  artifact
573
580
  )
@@ -939,10 +946,11 @@ class MLClientCtx:
939
946
  "parameters": self._parameters,
940
947
  "handler": self._handler,
941
948
  "outputs": self._outputs,
942
- run_keys.output_path: self.artifact_path,
943
- run_keys.inputs: self._inputs,
949
+ RunKeys.output_path: self.artifact_path,
950
+ RunKeys.inputs: self._inputs,
944
951
  "notifications": self._notifications,
945
952
  "state_thresholds": self._state_thresholds,
953
+ "node_selector": self._node_selector,
946
954
  },
947
955
  "status": {
948
956
  "results": self._results,
@@ -964,7 +972,7 @@ class MLClientCtx:
964
972
  set_if_not_none(struct["status"], "commit", self._commit)
965
973
  set_if_not_none(struct["status"], "iterations", self._iteration_results)
966
974
 
967
- struct["status"][run_keys.artifacts] = self._artifacts_manager.artifact_list()
975
+ struct["status"][RunKeys.artifacts] = self._artifacts_manager.artifact_list()
968
976
  self._data_stores.to_dict(struct["spec"])
969
977
  return struct
970
978
 
@@ -1058,7 +1066,7 @@ class MLClientCtx:
1058
1066
  set_if_not_none(struct, "status.commit", self._commit)
1059
1067
  set_if_not_none(struct, "status.iterations", self._iteration_results)
1060
1068
 
1061
- struct[f"status.{run_keys.artifacts}"] = self._artifacts_manager.artifact_list()
1069
+ struct[f"status.{RunKeys.artifacts}"] = self._artifacts_manager.artifact_list()
1062
1070
  return struct
1063
1071
 
1064
1072
  def _init_dbs(self, rundb):
@@ -45,6 +45,7 @@ def spark_df_to_pandas(spark_df):
45
45
  ),
46
46
  )
47
47
  type_conversion_dict[field.name] = "datetime64[ns]"
48
+
48
49
  df = PandasConversionMixin.toPandas(spark_df)
49
50
  if type_conversion_dict:
50
51
  df = df.astype(type_conversion_dict)
@@ -252,7 +253,7 @@ class SparkFeatureMerger(BaseMerger):
252
253
  )
253
254
  source_kind = target.kind
254
255
  source_path = target.get_target_path()
255
-
256
+ source_kwargs = target.source_spark_attributes
256
257
  # handling case where there are multiple feature sets and user creates vector where
257
258
  # entity_timestamp_column is from a specific feature set (can't be entity timestamp)
258
259
  source_driver = mlrun.datastore.sources.source_kind_to_driver[source_kind]
@@ -12,11 +12,5 @@
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
14
  #
15
- """
16
- MLRun provides a quick and easy integration into your code with mlrun.frameworks: a collection of sub-modules
17
- for the most commonly used machine and deep learning frameworks, providing features such as automatic logging,
18
- model management, and distributed training.
19
- """
20
-
21
15
  # flake8: noqa - this is until we take care of the F401 violations with respect to __all__ & sphinx
22
16
  from .parallel_coordinates import compare_db_runs, compare_run_objects