mlrun 1.7.0rc26__py3-none-any.whl → 1.7.0rc27__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mlrun might be problematic. Click here for more details.

Files changed (48) hide show
  1. mlrun/__main__.py +7 -7
  2. mlrun/alerts/alert.py +13 -1
  3. mlrun/artifacts/manager.py +5 -0
  4. mlrun/common/constants.py +2 -2
  5. mlrun/common/formatters/base.py +9 -9
  6. mlrun/common/schemas/alert.py +4 -8
  7. mlrun/common/schemas/api_gateway.py +7 -0
  8. mlrun/common/schemas/constants.py +3 -0
  9. mlrun/common/schemas/model_monitoring/constants.py +20 -9
  10. mlrun/config.py +6 -11
  11. mlrun/datastore/datastore.py +3 -3
  12. mlrun/datastore/snowflake_utils.py +3 -1
  13. mlrun/datastore/sources.py +23 -9
  14. mlrun/datastore/targets.py +27 -13
  15. mlrun/db/base.py +9 -0
  16. mlrun/db/httpdb.py +39 -30
  17. mlrun/db/nopdb.py +9 -1
  18. mlrun/execution.py +18 -10
  19. mlrun/feature_store/retrieval/spark_merger.py +2 -1
  20. mlrun/model.py +21 -0
  21. mlrun/model_monitoring/db/stores/__init__.py +5 -3
  22. mlrun/model_monitoring/db/stores/base/store.py +36 -1
  23. mlrun/model_monitoring/db/stores/sqldb/sql_store.py +4 -38
  24. mlrun/model_monitoring/db/stores/v3io_kv/kv_store.py +19 -27
  25. mlrun/model_monitoring/db/tsdb/__init__.py +4 -7
  26. mlrun/model_monitoring/db/tsdb/v3io/v3io_connector.py +4 -1
  27. mlrun/model_monitoring/helpers.py +9 -5
  28. mlrun/projects/project.py +63 -68
  29. mlrun/render.py +10 -5
  30. mlrun/run.py +2 -2
  31. mlrun/runtimes/nuclio/function.py +20 -0
  32. mlrun/runtimes/pod.py +5 -29
  33. mlrun/serving/routers.py +75 -59
  34. mlrun/serving/server.py +1 -0
  35. mlrun/serving/v2_serving.py +8 -1
  36. mlrun/utils/helpers.py +33 -1
  37. mlrun/utils/notifications/notification/base.py +4 -0
  38. mlrun/utils/notifications/notification/git.py +21 -0
  39. mlrun/utils/notifications/notification/slack.py +8 -0
  40. mlrun/utils/notifications/notification/webhook.py +29 -0
  41. mlrun/utils/notifications/notification_pusher.py +1 -1
  42. mlrun/utils/version/version.json +2 -2
  43. {mlrun-1.7.0rc26.dist-info → mlrun-1.7.0rc27.dist-info}/METADATA +4 -4
  44. {mlrun-1.7.0rc26.dist-info → mlrun-1.7.0rc27.dist-info}/RECORD +48 -48
  45. {mlrun-1.7.0rc26.dist-info → mlrun-1.7.0rc27.dist-info}/WHEEL +1 -1
  46. {mlrun-1.7.0rc26.dist-info → mlrun-1.7.0rc27.dist-info}/LICENSE +0 -0
  47. {mlrun-1.7.0rc26.dist-info → mlrun-1.7.0rc27.dist-info}/entry_points.txt +0 -0
  48. {mlrun-1.7.0rc26.dist-info → mlrun-1.7.0rc27.dist-info}/top_level.txt +0 -0
mlrun/execution.py CHANGED
@@ -34,13 +34,13 @@ from .features import Feature
34
34
  from .model import HyperParamOptions
35
35
  from .secrets import SecretsStore
36
36
  from .utils import (
37
+ RunKeys,
37
38
  dict_to_json,
38
39
  dict_to_yaml,
39
40
  get_in,
40
41
  is_relative_path,
41
42
  logger,
42
43
  now_date,
43
- run_keys,
44
44
  to_date_str,
45
45
  update_in,
46
46
  )
@@ -85,6 +85,7 @@ class MLClientCtx:
85
85
 
86
86
  self._labels = {}
87
87
  self._annotations = {}
88
+ self._node_selector = {}
88
89
 
89
90
  self._function = ""
90
91
  self._parameters = {}
@@ -207,6 +208,11 @@ class MLClientCtx:
207
208
  """Dictionary with labels (read-only)"""
208
209
  return deepcopy(self._labels)
209
210
 
211
+ @property
212
+ def node_selector(self):
213
+ """Dictionary with node selectors (read-only)"""
214
+ return deepcopy(self._node_selector)
215
+
210
216
  @property
211
217
  def annotations(self):
212
218
  """Dictionary with annotations (read-only)"""
@@ -365,7 +371,7 @@ class MLClientCtx:
365
371
  self._labels = meta.get("labels", self._labels)
366
372
  spec = attrs.get("spec")
367
373
  if spec:
368
- self._secrets_manager = SecretsStore.from_list(spec.get(run_keys.secrets))
374
+ self._secrets_manager = SecretsStore.from_list(spec.get(RunKeys.secrets))
369
375
  self._log_level = spec.get("log_level", self._log_level)
370
376
  self._function = spec.get("function", self._function)
371
377
  self._parameters = spec.get("parameters", self._parameters)
@@ -383,13 +389,14 @@ class MLClientCtx:
383
389
  self._allow_empty_resources = spec.get(
384
390
  "allow_empty_resources", self._allow_empty_resources
385
391
  )
386
- self.artifact_path = spec.get(run_keys.output_path, self.artifact_path)
387
- self._in_path = spec.get(run_keys.input_path, self._in_path)
388
- inputs = spec.get(run_keys.inputs)
392
+ self.artifact_path = spec.get(RunKeys.output_path, self.artifact_path)
393
+ self._in_path = spec.get(RunKeys.input_path, self._in_path)
394
+ inputs = spec.get(RunKeys.inputs)
389
395
  self._notifications = spec.get("notifications", self._notifications)
390
396
  self._state_thresholds = spec.get(
391
397
  "state_thresholds", self._state_thresholds
392
398
  )
399
+ self._node_selector = spec.get("node_selector", self._node_selector)
393
400
  self._reset_on_run = spec.get("reset_on_run", self._reset_on_run)
394
401
 
395
402
  self._init_dbs(rundb)
@@ -567,7 +574,7 @@ class MLClientCtx:
567
574
  self._results["best_iteration"] = best
568
575
  for k, v in get_in(task, ["status", "results"], {}).items():
569
576
  self._results[k] = v
570
- for artifact in get_in(task, ["status", run_keys.artifacts], []):
577
+ for artifact in get_in(task, ["status", RunKeys.artifacts], []):
571
578
  self._artifacts_manager.artifacts[artifact["metadata"]["key"]] = (
572
579
  artifact
573
580
  )
@@ -939,10 +946,11 @@ class MLClientCtx:
939
946
  "parameters": self._parameters,
940
947
  "handler": self._handler,
941
948
  "outputs": self._outputs,
942
- run_keys.output_path: self.artifact_path,
943
- run_keys.inputs: self._inputs,
949
+ RunKeys.output_path: self.artifact_path,
950
+ RunKeys.inputs: self._inputs,
944
951
  "notifications": self._notifications,
945
952
  "state_thresholds": self._state_thresholds,
953
+ "node_selector": self._node_selector,
946
954
  },
947
955
  "status": {
948
956
  "results": self._results,
@@ -964,7 +972,7 @@ class MLClientCtx:
964
972
  set_if_not_none(struct["status"], "commit", self._commit)
965
973
  set_if_not_none(struct["status"], "iterations", self._iteration_results)
966
974
 
967
- struct["status"][run_keys.artifacts] = self._artifacts_manager.artifact_list()
975
+ struct["status"][RunKeys.artifacts] = self._artifacts_manager.artifact_list()
968
976
  self._data_stores.to_dict(struct["spec"])
969
977
  return struct
970
978
 
@@ -1058,7 +1066,7 @@ class MLClientCtx:
1058
1066
  set_if_not_none(struct, "status.commit", self._commit)
1059
1067
  set_if_not_none(struct, "status.iterations", self._iteration_results)
1060
1068
 
1061
- struct[f"status.{run_keys.artifacts}"] = self._artifacts_manager.artifact_list()
1069
+ struct[f"status.{RunKeys.artifacts}"] = self._artifacts_manager.artifact_list()
1062
1070
  return struct
1063
1071
 
1064
1072
  def _init_dbs(self, rundb):
@@ -45,6 +45,7 @@ def spark_df_to_pandas(spark_df):
45
45
  ),
46
46
  )
47
47
  type_conversion_dict[field.name] = "datetime64[ns]"
48
+
48
49
  df = PandasConversionMixin.toPandas(spark_df)
49
50
  if type_conversion_dict:
50
51
  df = df.astype(type_conversion_dict)
@@ -252,7 +253,7 @@ class SparkFeatureMerger(BaseMerger):
252
253
  )
253
254
  source_kind = target.kind
254
255
  source_path = target.get_target_path()
255
-
256
+ source_kwargs = target.source_spark_attributes
256
257
  # handling case where there are multiple feature sets and user creates vector where
257
258
  # entity_timestamp_column is from a specific feature set (can't be entity timestamp)
258
259
  source_driver = mlrun.datastore.sources.source_kind_to_driver[source_kind]
mlrun/model.py CHANGED
@@ -732,6 +732,25 @@ class Notification(ModelObj):
732
732
  "Notification params size exceeds max size of 1 MB"
733
733
  )
734
734
 
735
+ def validate_notification_params(self):
736
+ notification_class = mlrun.utils.notifications.NotificationTypes(
737
+ self.kind
738
+ ).get_notification()
739
+
740
+ secret_params = self.secret_params
741
+ params = self.params
742
+
743
+ if not secret_params and not params:
744
+ raise mlrun.errors.MLRunInvalidArgumentError(
745
+ "Both 'secret_params' and 'params' are empty, at least one must be defined."
746
+ )
747
+ if secret_params and params and secret_params != params:
748
+ raise mlrun.errors.MLRunInvalidArgumentError(
749
+ "Both 'secret_params' and 'params' are defined but they contain different values"
750
+ )
751
+
752
+ notification_class.validate_params(secret_params or params)
753
+
735
754
  @staticmethod
736
755
  def validate_notification_uniqueness(notifications: list["Notification"]):
737
756
  """Validate that all notifications in the list are unique by name"""
@@ -873,6 +892,7 @@ class RunSpec(ModelObj):
873
892
  notifications=None,
874
893
  state_thresholds=None,
875
894
  reset_on_run=None,
895
+ node_selector=None,
876
896
  ):
877
897
  # A dictionary of parsing configurations that will be read from the inputs the user set. The keys are the inputs
878
898
  # keys (parameter names) and the values are the type hint given in the input keys after the colon.
@@ -910,6 +930,7 @@ class RunSpec(ModelObj):
910
930
  self._notifications = notifications or []
911
931
  self.state_thresholds = state_thresholds or {}
912
932
  self.reset_on_run = reset_on_run
933
+ self.node_selector = node_selector or {}
913
934
 
914
935
  def _serialize_field(
915
936
  self, struct: dict, field_name: str = None, strip: bool = False
@@ -111,10 +111,12 @@ def get_store_object(
111
111
  ):
112
112
  store_type = mlrun.common.schemas.model_monitoring.ModelEndpointTarget.SQL
113
113
  kwargs["store_connection_string"] = store_connection_string
114
+ elif store_connection_string and store_connection_string == "v3io":
115
+ store_type = (
116
+ mlrun.common.schemas.model_monitoring.ModelEndpointTarget.V3IO_NOSQL
117
+ )
114
118
  else:
115
- # Set the default store type if no connection has been set
116
- store_type = mlrun.mlconf.model_endpoint_monitoring.store_type
117
-
119
+ store_type = None
118
120
  # Get store type value from ObjectStoreFactory enum class
119
121
  store_type_fact = ObjectStoreFactory(store_type)
120
122
 
@@ -11,7 +11,7 @@
11
11
  # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
-
14
+ import json
15
15
  import typing
16
16
  from abc import ABC, abstractmethod
17
17
 
@@ -170,3 +170,38 @@ class StoreBase(ABC):
170
170
 
171
171
  :return: A list of the available metrics.
172
172
  """
173
+
174
+ @staticmethod
175
+ def _validate_labels(
176
+ endpoint_dict: dict,
177
+ labels: list,
178
+ ) -> bool:
179
+ """Validate that the model endpoint dictionary has the provided labels. There are 2 possible cases:
180
+ 1 - Labels were provided as a list of key-values pairs (e.g. ['label_1=value_1', 'label_2=value_2']): Validate
181
+ that each pair exist in the endpoint dictionary.
182
+ 2 - Labels were provided as a list of key labels (e.g. ['label_1', 'label_2']): Validate that each key exist in
183
+ the endpoint labels dictionary.
184
+
185
+ :param endpoint_dict: Dictionary of the model endpoint records.
186
+ :param labels: List of dictionary of required labels.
187
+
188
+ :return: True if the labels exist in the endpoint labels dictionary, otherwise False.
189
+ """
190
+
191
+ # Convert endpoint labels into dictionary
192
+ endpoint_labels = json.loads(
193
+ endpoint_dict.get(mm_schemas.EventFieldType.LABELS)
194
+ )
195
+
196
+ for label in labels:
197
+ # Case 1 - label is a key=value pair
198
+ if "=" in label:
199
+ lbl, value = list(map(lambda x: x.strip(), label.split("=")))
200
+ if lbl not in endpoint_labels or str(endpoint_labels[lbl]) != value:
201
+ return False
202
+ # Case 2 - label is just a key
203
+ else:
204
+ if label not in endpoint_labels:
205
+ return False
206
+
207
+ return True
@@ -13,7 +13,6 @@
13
13
  # limitations under the License.
14
14
 
15
15
  import datetime
16
- import json
17
16
  import typing
18
17
  import uuid
19
18
 
@@ -303,6 +302,7 @@ class SQLStoreBase(StoreBase):
303
302
 
304
303
  # Apply filters
305
304
  if model:
305
+ model = model if ":" in model else f"{model}:latest"
306
306
  query = self._filter_values(
307
307
  query=query,
308
308
  model_endpoints_table=model_endpoints_table,
@@ -310,11 +310,12 @@ class SQLStoreBase(StoreBase):
310
310
  filtered_values=[model],
311
311
  )
312
312
  if function:
313
+ function_uri = f"{self.project}/{function}"
313
314
  query = self._filter_values(
314
315
  query=query,
315
316
  model_endpoints_table=model_endpoints_table,
316
- key_filter=mm_schemas.EventFieldType.FUNCTION,
317
- filtered_values=[function],
317
+ key_filter=mm_schemas.EventFieldType.FUNCTION_URI,
318
+ filtered_values=[function_uri],
318
319
  )
319
320
  if uids:
320
321
  query = self._filter_values(
@@ -581,41 +582,6 @@ class SQLStoreBase(StoreBase):
581
582
  # Apply AND operator on the SQL query object with the filters tuple
582
583
  return query.filter(sqlalchemy.and_(*filter_query))
583
584
 
584
- @staticmethod
585
- def _validate_labels(
586
- endpoint_dict: dict,
587
- labels: list,
588
- ) -> bool:
589
- """Validate that the model endpoint dictionary has the provided labels. There are 2 possible cases:
590
- 1 - Labels were provided as a list of key-values pairs (e.g. ['label_1=value_1', 'label_2=value_2']): Validate
591
- that each pair exist in the endpoint dictionary.
592
- 2 - Labels were provided as a list of key labels (e.g. ['label_1', 'label_2']): Validate that each key exist in
593
- the endpoint labels dictionary.
594
-
595
- :param endpoint_dict: Dictionary of the model endpoint records.
596
- :param labels: List of dictionary of required labels.
597
-
598
- :return: True if the labels exist in the endpoint labels dictionary, otherwise False.
599
- """
600
-
601
- # Convert endpoint labels into dictionary
602
- endpoint_labels = json.loads(
603
- endpoint_dict.get(mm_schemas.EventFieldType.LABELS)
604
- )
605
-
606
- for label in labels:
607
- # Case 1 - label is a key=value pair
608
- if "=" in label:
609
- lbl, value = list(map(lambda x: x.strip(), label.split("=")))
610
- if lbl not in endpoint_labels or str(endpoint_labels[lbl]) != value:
611
- return False
612
- # Case 2 - label is just a key
613
- else:
614
- if label not in endpoint_labels:
615
- return False
616
-
617
- return True
618
-
619
585
  def delete_model_endpoints_resources(self) -> None:
620
586
  """
621
587
  Delete all the model monitoring resources of the project in the SQL tables.
@@ -256,7 +256,6 @@ class KVStoreBase(StoreBase):
256
256
  self.project,
257
257
  function,
258
258
  model,
259
- labels,
260
259
  top_level,
261
260
  ),
262
261
  raise_for_status=v3io.dataplane.RaiseForStatus.never,
@@ -269,7 +268,6 @@ class KVStoreBase(StoreBase):
269
268
  exc=mlrun.errors.err_to_str(exc),
270
269
  )
271
270
  return endpoint_list
272
-
273
271
  # Create a list of model endpoints unique ids
274
272
  if uids is None:
275
273
  uids = []
@@ -282,10 +280,16 @@ class KVStoreBase(StoreBase):
282
280
 
283
281
  # Add each relevant model endpoint to the model endpoints list
284
282
  for endpoint_id in uids:
285
- endpoint = self.get_model_endpoint(
283
+ endpoint_dict = self.get_model_endpoint(
286
284
  endpoint_id=endpoint_id,
287
285
  )
288
- endpoint_list.append(endpoint)
286
+
287
+ if labels and not self._validate_labels(
288
+ endpoint_dict=endpoint_dict, labels=labels
289
+ ):
290
+ continue
291
+
292
+ endpoint_list.append(endpoint_dict)
289
293
 
290
294
  return endpoint_list
291
295
 
@@ -509,20 +513,16 @@ class KVStoreBase(StoreBase):
509
513
  project: str,
510
514
  function: str = None,
511
515
  model: str = None,
512
- labels: list[str] = None,
513
516
  top_level: bool = False,
514
517
  ) -> str:
515
518
  """
516
519
  Convert the provided filters into a valid filter expression. The expected filter expression includes different
517
520
  conditions, divided by ' AND '.
518
521
 
519
- :param project: The name of the project.
520
- :param model: The name of the model to filter by.
521
- :param function: The name of the function to filter by.
522
- :param labels: A list of labels to filter by. Label filters work by either filtering a specific value of
523
- a label (i.e. list("key=value")) or by looking for the existence of a given
524
- key (i.e. "key").
525
- :param top_level: If True will return only routers and endpoint that are NOT children of any router.
522
+ :param project: The name of the project.
523
+ :param model: The name of the model to filter by.
524
+ :param function: The name of the function to filter by.
525
+ :param top_level: If True will return only routers and endpoint that are NOT children of any router.
526
526
 
527
527
  :return: A valid filter expression as a string.
528
528
 
@@ -533,25 +533,17 @@ class KVStoreBase(StoreBase):
533
533
  raise mlrun.errors.MLRunInvalidArgumentError("project can't be empty")
534
534
 
535
535
  # Add project filter
536
- filter_expression = [f"project=='{project}'"]
536
+ filter_expression = [f"{mm_schemas.EventFieldType.PROJECT}=='{project}'"]
537
537
 
538
538
  # Add function and model filters
539
539
  if function:
540
- filter_expression.append(f"function=='{function}'")
540
+ function_uri = f"{project}/{function}" if function else None
541
+ filter_expression.append(
542
+ f"{mm_schemas.EventFieldType.FUNCTION_URI}=='{function_uri}'"
543
+ )
541
544
  if model:
542
- filter_expression.append(f"model=='{model}'")
543
-
544
- # Add labels filters
545
- if labels:
546
- for label in labels:
547
- if not label.startswith("_"):
548
- label = f"_{label}"
549
-
550
- if "=" in label:
551
- lbl, value = list(map(lambda x: x.strip(), label.split("=")))
552
- filter_expression.append(f"{lbl}=='{value}'")
553
- else:
554
- filter_expression.append(f"exists({label})")
545
+ model = model if ":" in model else f"{model}:latest"
546
+ filter_expression.append(f"{mm_schemas.EventFieldType.MODEL}=='{model}'")
555
547
 
556
548
  # Apply top_level filter (remove endpoints that considered a child of a router)
557
549
  if top_level:
@@ -64,7 +64,6 @@ class ObjectTSDBFactory(enum.Enum):
64
64
 
65
65
  def get_tsdb_connector(
66
66
  project: str,
67
- tsdb_connector_type: str = "",
68
67
  secret_provider: typing.Optional[typing.Callable[[str], str]] = None,
69
68
  **kwargs,
70
69
  ) -> TSDBConnector:
@@ -86,12 +85,10 @@ def get_tsdb_connector(
86
85
  if tsdb_connection_string and tsdb_connection_string.startswith("taosws"):
87
86
  tsdb_connector_type = mlrun.common.schemas.model_monitoring.TSDBTarget.TDEngine
88
87
  kwargs["connection_string"] = tsdb_connection_string
89
-
90
- # Set the default TSDB connector type if no connection has been set
91
- tsdb_connector_type = (
92
- tsdb_connector_type
93
- or mlrun.mlconf.model_endpoint_monitoring.tsdb_connector_type
94
- )
88
+ elif tsdb_connection_string and tsdb_connection_string == "v3io":
89
+ tsdb_connector_type = mlrun.common.schemas.model_monitoring.TSDBTarget.V3IO_TSDB
90
+ else:
91
+ tsdb_connector_type = None
95
92
 
96
93
  # Get connector type value from ObjectTSDBFactory enum class
97
94
  tsdb_connector_factory = ObjectTSDBFactory(tsdb_connector_type)
@@ -38,7 +38,10 @@ def _is_no_schema_error(exc: v3io_frames.ReadError) -> bool:
38
38
  In case of a nonexistent TSDB table - a `v3io_frames.ReadError` error is raised.
39
39
  Check if the error message contains the relevant string to verify the cause.
40
40
  """
41
- return "No TSDB schema file found" in str(exc)
41
+ msg = str(exc)
42
+ # https://github.com/v3io/v3io-tsdb/blob/v0.14.1/pkg/tsdb/v3iotsdb.go#L205
43
+ # https://github.com/v3io/v3io-tsdb/blob/v0.14.1/pkg/partmgr/partmgr.go#L238
44
+ return "No TSDB schema file found" in msg or "Failed to read schema at path" in msg
42
45
 
43
46
 
44
47
  class V3IOTSDBConnector(TSDBConnector):
@@ -59,13 +59,17 @@ def get_stream_path(
59
59
 
60
60
  stream_uri = mlrun.get_secret_or_env(
61
61
  mlrun.common.schemas.model_monitoring.ProjectSecretKeys.STREAM_PATH
62
- ) or mlrun.mlconf.get_model_monitoring_file_target_path(
63
- project=project,
64
- kind=mlrun.common.schemas.model_monitoring.FileTargetKind.STREAM,
65
- target="online",
66
- function_name=function_name,
67
62
  )
68
63
 
64
+ if not stream_uri or stream_uri == "v3io":
65
+ # TODO : remove the first part of this condition in 1.9.0
66
+ stream_uri = mlrun.mlconf.get_model_monitoring_file_target_path(
67
+ project=project,
68
+ kind=mlrun.common.schemas.model_monitoring.FileTargetKind.STREAM,
69
+ target="online",
70
+ function_name=function_name,
71
+ )
72
+
69
73
  if isinstance(stream_uri, list): # ML-6043 - user side gets only the new stream uri
70
74
  stream_uri = stream_uri[1] # get new stream path, under projects
71
75
  return mlrun.common.model_monitoring.helpers.parse_monitoring_stream_path(
mlrun/projects/project.py CHANGED
@@ -1007,8 +1007,13 @@ class ProjectSpec(ModelObj):
1007
1007
  key = artifact.key
1008
1008
  artifact = artifact.to_dict()
1009
1009
  else: # artifact is a dict
1010
- # imported artifacts don't have metadata,spec,status fields
1011
- key_field = "key" if _is_imported_artifact(artifact) else "metadata.key"
1010
+ # imported/legacy artifacts don't have metadata,spec,status fields
1011
+ key_field = (
1012
+ "key"
1013
+ if _is_imported_artifact(artifact)
1014
+ or mlrun.utils.is_legacy_artifact(artifact)
1015
+ else "metadata.key"
1016
+ )
1012
1017
  key = mlrun.utils.get_in(artifact, key_field, "")
1013
1018
  if not key:
1014
1019
  raise ValueError(f'artifacts "{key_field}" must be specified')
@@ -2127,6 +2132,7 @@ class MlrunProject(ModelObj):
2127
2132
  deploy_histogram_data_drift_app: bool = True,
2128
2133
  wait_for_deployment: bool = False,
2129
2134
  rebuild_images: bool = False,
2135
+ fetch_credentials_from_sys_config: bool = False,
2130
2136
  ) -> None:
2131
2137
  """
2132
2138
  Deploy model monitoring application controller, writer and stream functions.
@@ -2136,17 +2142,18 @@ class MlrunProject(ModelObj):
2136
2142
  The stream function goal is to monitor the log of the data stream. It is triggered when a new log entry
2137
2143
  is detected. It processes the new events into statistics that are then written to statistics databases.
2138
2144
 
2139
- :param default_controller_image: Deprecated.
2140
- :param base_period: The time period in minutes in which the model monitoring controller
2141
- function is triggered. By default, the base period is 10 minutes.
2142
- :param image: The image of the model monitoring controller, writer, monitoring
2143
- stream & histogram data drift functions, which are real time nuclio
2144
- functions. By default, the image is mlrun/mlrun.
2145
- :param deploy_histogram_data_drift_app: If true, deploy the default histogram-based data drift application.
2146
- :param wait_for_deployment: If true, return only after the deployment is done on the backend.
2147
- Otherwise, deploy the model monitoring infrastructure on the
2148
- background, including the histogram data drift app if selected.
2149
- :param rebuild_images: If true, force rebuild of model monitoring infrastructure images.
2145
+ :param default_controller_image: Deprecated.
2146
+ :param base_period: The time period in minutes in which the model monitoring controller
2147
+ function is triggered. By default, the base period is 10 minutes.
2148
+ :param image: The image of the model monitoring controller, writer, monitoring
2149
+ stream & histogram data drift functions, which are real time nuclio
2150
+ functions. By default, the image is mlrun/mlrun.
2151
+ :param deploy_histogram_data_drift_app: If true, deploy the default histogram-based data drift application.
2152
+ :param wait_for_deployment: If true, return only after the deployment is done on the backend.
2153
+ Otherwise, deploy the model monitoring infrastructure on the
2154
+ background, including the histogram data drift app if selected.
2155
+ :param rebuild_images: If true, force rebuild of model monitoring infrastructure images.
2156
+ :param fetch_credentials_from_sys_config: If true, fetch the credentials from the system configuration.
2150
2157
  """
2151
2158
  if default_controller_image != "mlrun/mlrun":
2152
2159
  # TODO: Remove this in 1.9.0
@@ -2163,6 +2170,7 @@ class MlrunProject(ModelObj):
2163
2170
  base_period=base_period,
2164
2171
  deploy_histogram_data_drift_app=deploy_histogram_data_drift_app,
2165
2172
  rebuild_images=rebuild_images,
2173
+ fetch_credentials_from_sys_config=fetch_credentials_from_sys_config,
2166
2174
  )
2167
2175
 
2168
2176
  if wait_for_deployment:
@@ -2485,25 +2493,17 @@ class MlrunProject(ModelObj):
2485
2493
  self.spec.remove_function(name)
2486
2494
 
2487
2495
  def remove_model_monitoring_function(self, name: Union[str, list[str]]):
2488
- """remove the specified model-monitoring-app function/s from the project spec
2496
+ """delete the specified model-monitoring-app function/s
2489
2497
 
2490
2498
  :param name: name of the model-monitoring-function/s (under the project)
2491
2499
  """
2492
- names = name if isinstance(name, list) else [name]
2493
- for func_name in names:
2494
- function = self.get_function(key=func_name)
2495
- if (
2496
- function.metadata.labels.get(mm_constants.ModelMonitoringAppLabel.KEY)
2497
- == mm_constants.ModelMonitoringAppLabel.VAL
2498
- ):
2499
- self.remove_function(name=func_name)
2500
- logger.info(
2501
- f"{func_name} function has been removed from {self.name} project"
2502
- )
2503
- else:
2504
- raise logger.warn(
2505
- f"There is no model monitoring function with {func_name} name"
2506
- )
2500
+ # TODO: Remove this in 1.9.0
2501
+ warnings.warn(
2502
+ "'remove_model_monitoring_function' is deprecated and will be removed in 1.9.0. "
2503
+ "Please use `delete_model_monitoring_function` instead.",
2504
+ FutureWarning,
2505
+ )
2506
+ self.delete_model_monitoring_function(name)
2507
2507
 
2508
2508
  def delete_model_monitoring_function(self, name: Union[str, list[str]]):
2509
2509
  """delete the specified model-monitoring-app function/s
@@ -3205,49 +3205,44 @@ class MlrunProject(ModelObj):
3205
3205
  stream_path: Optional[str] = None,
3206
3206
  tsdb_connection: Optional[str] = None,
3207
3207
  ):
3208
- """Set the credentials that will be used by the project's model monitoring
3208
+ """
3209
+ Set the credentials that will be used by the project's model monitoring
3209
3210
  infrastructure functions. Important to note that you have to set the credentials before deploying any
3210
3211
  model monitoring or serving function.
3211
3212
 
3212
- :param access_key: Model Monitoring access key for managing user permissions
3213
- :param endpoint_store_connection: Endpoint store connection string
3214
- :param stream_path: Path to the model monitoring stream
3215
- :param tsdb_connection: Connection string to the time series database
3213
+ :param access_key: Model Monitoring access key for managing user permissions.
3214
+ :param endpoint_store_connection: Endpoint store connection string. By default, None.
3215
+ Options:
3216
+ 1. None, will be set from the system configuration.
3217
+ 2. v3io - for v3io endpoint store,
3218
+ pass `v3io` and the system will generate the exact path.
3219
+ 3. MySQL/SQLite - for SQL endpoint store, please provide full
3220
+ connection string, for example
3221
+ mysql+pymysql://<username>:<password>@<host>:<port>/<db_name>
3222
+ :param stream_path: Path to the model monitoring stream. By default, None.
3223
+ Options:
3224
+ 1. None, will be set from the system configuration.
3225
+ 2. v3io - for v3io stream,
3226
+ pass `v3io` and the system will generate the exact path.
3227
+ 3. Kafka - for Kafka stream, please provide full connection string without
3228
+ custom topic, for example kafka://<some_kafka_broker>:<port>.
3229
+ :param tsdb_connection: Connection string to the time series database. By default, None.
3230
+ Options:
3231
+ 1. None, will be set from the system configuration.
3232
+ 2. v3io - for v3io stream,
3233
+ pass `v3io` and the system will generate the exact path.
3234
+ 3. TDEngine - for TDEngine tsdb, please provide full websocket connection URL,
3235
+ for example taosws://<username>:<password>@<host>:<port>.
3216
3236
  """
3217
-
3218
- secrets_dict = {}
3219
- if access_key:
3220
- secrets_dict[
3221
- mlrun.common.schemas.model_monitoring.ProjectSecretKeys.ACCESS_KEY
3222
- ] = access_key
3223
-
3224
- if endpoint_store_connection:
3225
- secrets_dict[
3226
- mlrun.common.schemas.model_monitoring.ProjectSecretKeys.ENDPOINT_STORE_CONNECTION
3227
- ] = endpoint_store_connection
3228
-
3229
- if stream_path:
3230
- if stream_path.startswith("kafka://") and "?topic" in stream_path:
3231
- raise mlrun.errors.MLRunInvalidArgumentError(
3232
- "Custom kafka topic is not allowed"
3233
- )
3234
- secrets_dict[
3235
- mlrun.common.schemas.model_monitoring.ProjectSecretKeys.STREAM_PATH
3236
- ] = stream_path
3237
-
3238
- if tsdb_connection:
3239
- if not tsdb_connection.startswith("taosws://"):
3240
- raise mlrun.errors.MLRunInvalidArgumentError(
3241
- "Currently only TDEngine websocket connection is supported for non-v3io TSDB,"
3242
- "please provide a full URL (e.g. taosws://user:password@host:port)"
3243
- )
3244
- secrets_dict[
3245
- mlrun.common.schemas.model_monitoring.ProjectSecretKeys.TSDB_CONNECTION
3246
- ] = tsdb_connection
3247
-
3248
- self.set_secrets(
3249
- secrets=secrets_dict,
3250
- provider=mlrun.common.schemas.SecretProviderName.kubernetes,
3237
+ db = mlrun.db.get_run_db(secrets=self._secrets)
3238
+ db.set_model_monitoring_credentials(
3239
+ project=self.name,
3240
+ credentials={
3241
+ "access_key": access_key,
3242
+ "endpoint_store_connection": endpoint_store_connection,
3243
+ "stream_path": stream_path,
3244
+ "tsdb_connection": tsdb_connection,
3245
+ },
3251
3246
  )
3252
3247
 
3253
3248
  def run_function(