mlrun 1.7.0rc13__py3-none-any.whl → 1.7.0rc15__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mlrun might be problematic. Click here for more details.

Files changed (85) hide show
  1. mlrun/__main__.py +0 -105
  2. mlrun/artifacts/__init__.py +1 -2
  3. mlrun/artifacts/base.py +8 -250
  4. mlrun/artifacts/dataset.py +1 -190
  5. mlrun/artifacts/manager.py +2 -41
  6. mlrun/artifacts/model.py +1 -140
  7. mlrun/artifacts/plots.py +1 -375
  8. mlrun/common/schemas/model_monitoring/__init__.py +4 -0
  9. mlrun/common/schemas/model_monitoring/constants.py +24 -3
  10. mlrun/common/schemas/model_monitoring/model_endpoints.py +13 -1
  11. mlrun/common/schemas/project.py +1 -0
  12. mlrun/config.py +14 -4
  13. mlrun/data_types/to_pandas.py +4 -4
  14. mlrun/datastore/base.py +41 -9
  15. mlrun/datastore/datastore_profile.py +50 -3
  16. mlrun/datastore/hdfs.py +5 -0
  17. mlrun/datastore/inmem.py +2 -2
  18. mlrun/datastore/sources.py +43 -2
  19. mlrun/datastore/store_resources.py +2 -6
  20. mlrun/datastore/targets.py +125 -6
  21. mlrun/datastore/v3io.py +1 -1
  22. mlrun/db/auth_utils.py +152 -0
  23. mlrun/db/base.py +1 -1
  24. mlrun/db/httpdb.py +69 -33
  25. mlrun/feature_store/__init__.py +0 -2
  26. mlrun/feature_store/api.py +12 -47
  27. mlrun/feature_store/feature_set.py +9 -0
  28. mlrun/feature_store/retrieval/base.py +9 -4
  29. mlrun/feature_store/retrieval/conversion.py +4 -4
  30. mlrun/feature_store/retrieval/dask_merger.py +2 -0
  31. mlrun/feature_store/retrieval/job.py +2 -0
  32. mlrun/feature_store/retrieval/local_merger.py +2 -0
  33. mlrun/feature_store/retrieval/spark_merger.py +5 -0
  34. mlrun/frameworks/_dl_common/loggers/tensorboard_logger.py +5 -10
  35. mlrun/kfpops.py +5 -10
  36. mlrun/launcher/base.py +1 -1
  37. mlrun/launcher/client.py +1 -1
  38. mlrun/lists.py +2 -2
  39. mlrun/model.py +36 -9
  40. mlrun/model_monitoring/api.py +41 -18
  41. mlrun/model_monitoring/application.py +5 -305
  42. mlrun/model_monitoring/applications/__init__.py +11 -0
  43. mlrun/model_monitoring/applications/_application_steps.py +158 -0
  44. mlrun/model_monitoring/applications/base.py +282 -0
  45. mlrun/model_monitoring/applications/context.py +214 -0
  46. mlrun/model_monitoring/applications/evidently_base.py +211 -0
  47. mlrun/model_monitoring/applications/histogram_data_drift.py +92 -77
  48. mlrun/model_monitoring/applications/results.py +99 -0
  49. mlrun/model_monitoring/controller.py +3 -1
  50. mlrun/model_monitoring/db/stores/sqldb/models/base.py +7 -6
  51. mlrun/model_monitoring/db/stores/sqldb/sql_store.py +1 -1
  52. mlrun/model_monitoring/db/stores/v3io_kv/kv_store.py +67 -4
  53. mlrun/model_monitoring/evidently_application.py +6 -118
  54. mlrun/model_monitoring/helpers.py +1 -1
  55. mlrun/model_monitoring/model_endpoint.py +3 -2
  56. mlrun/model_monitoring/stream_processing.py +2 -3
  57. mlrun/model_monitoring/writer.py +69 -39
  58. mlrun/platforms/iguazio.py +2 -2
  59. mlrun/projects/pipelines.py +24 -7
  60. mlrun/projects/project.py +130 -65
  61. mlrun/render.py +2 -10
  62. mlrun/run.py +1 -4
  63. mlrun/runtimes/__init__.py +3 -3
  64. mlrun/runtimes/base.py +3 -3
  65. mlrun/runtimes/funcdoc.py +0 -28
  66. mlrun/runtimes/local.py +1 -1
  67. mlrun/runtimes/mpijob/__init__.py +0 -20
  68. mlrun/runtimes/mpijob/v1.py +1 -1
  69. mlrun/runtimes/nuclio/api_gateway.py +275 -153
  70. mlrun/runtimes/nuclio/function.py +1 -1
  71. mlrun/runtimes/pod.py +5 -5
  72. mlrun/runtimes/utils.py +1 -1
  73. mlrun/serving/states.py +53 -2
  74. mlrun/utils/helpers.py +27 -40
  75. mlrun/utils/notifications/notification/slack.py +31 -8
  76. mlrun/utils/notifications/notification_pusher.py +133 -14
  77. mlrun/utils/version/version.json +2 -2
  78. {mlrun-1.7.0rc13.dist-info → mlrun-1.7.0rc15.dist-info}/METADATA +2 -2
  79. {mlrun-1.7.0rc13.dist-info → mlrun-1.7.0rc15.dist-info}/RECORD +84 -79
  80. mlrun/runtimes/mpijob/v1alpha1.py +0 -29
  81. /mlrun/{runtimes → common/runtimes}/constants.py +0 -0
  82. {mlrun-1.7.0rc13.dist-info → mlrun-1.7.0rc15.dist-info}/LICENSE +0 -0
  83. {mlrun-1.7.0rc13.dist-info → mlrun-1.7.0rc15.dist-info}/WHEEL +0 -0
  84. {mlrun-1.7.0rc13.dist-info → mlrun-1.7.0rc15.dist-info}/entry_points.txt +0 -0
  85. {mlrun-1.7.0rc13.dist-info → mlrun-1.7.0rc15.dist-info}/top_level.txt +0 -0
@@ -656,6 +656,29 @@ class BaseStoreTarget(DataTargetBase):
656
656
  def _target_path_object(self):
657
657
  """return the actual/computed target path"""
658
658
  is_single_file = hasattr(self, "is_single_file") and self.is_single_file()
659
+
660
+ if self._resource and self.path:
661
+ parsed_url = urlparse(self.path)
662
+ # When the URL consists only from scheme and endpoint and no path,
663
+ # make a default path for DS and redis targets.
664
+ # Also ignore KafkaTarget when it uses the ds scheme (no default path for KafkaTarget)
665
+ if (
666
+ not isinstance(self, KafkaTarget)
667
+ and parsed_url.scheme in ["ds", "redis", "rediss"]
668
+ and (not parsed_url.path or parsed_url.path == "/")
669
+ ):
670
+ return TargetPathObject(
671
+ _get_target_path(
672
+ self,
673
+ self._resource,
674
+ self.run_id is not None,
675
+ netloc=parsed_url.netloc,
676
+ scheme=parsed_url.scheme,
677
+ ),
678
+ self.run_id,
679
+ is_single_file,
680
+ )
681
+
659
682
  return self.get_path() or (
660
683
  TargetPathObject(
661
684
  _get_target_path(self, self._resource, self.run_id is not None),
@@ -714,9 +737,13 @@ class BaseStoreTarget(DataTargetBase):
714
737
  start_time=None,
715
738
  end_time=None,
716
739
  time_column=None,
740
+ additional_filters=None,
717
741
  **kwargs,
718
742
  ):
719
743
  """return the target data as dataframe"""
744
+ mlrun.utils.helpers.additional_filters_warning(
745
+ additional_filters, self.__class__
746
+ )
720
747
  return mlrun.get_dataitem(self.get_target_path()).as_df(
721
748
  columns=columns,
722
749
  df_module=df_module,
@@ -961,6 +988,7 @@ class ParquetTarget(BaseStoreTarget):
961
988
  start_time=None,
962
989
  end_time=None,
963
990
  time_column=None,
991
+ additional_filters=None,
964
992
  **kwargs,
965
993
  ):
966
994
  """return the target data as dataframe"""
@@ -971,6 +999,7 @@ class ParquetTarget(BaseStoreTarget):
971
999
  start_time=start_time,
972
1000
  end_time=end_time,
973
1001
  time_column=time_column,
1002
+ additional_filters=additional_filters,
974
1003
  **kwargs,
975
1004
  )
976
1005
  if not columns:
@@ -1101,8 +1130,12 @@ class CSVTarget(BaseStoreTarget):
1101
1130
  start_time=None,
1102
1131
  end_time=None,
1103
1132
  time_column=None,
1133
+ additional_filters=None,
1104
1134
  **kwargs,
1105
1135
  ):
1136
+ mlrun.utils.helpers.additional_filters_warning(
1137
+ additional_filters, self.__class__
1138
+ )
1106
1139
  df = super().as_df(
1107
1140
  columns=columns,
1108
1141
  df_module=df_module,
@@ -1209,6 +1242,7 @@ class SnowflakeTarget(BaseStoreTarget):
1209
1242
  start_time=None,
1210
1243
  end_time=None,
1211
1244
  time_column=None,
1245
+ additional_filters=None,
1212
1246
  **kwargs,
1213
1247
  ):
1214
1248
  raise NotImplementedError()
@@ -1275,7 +1309,17 @@ class NoSqlBaseTarget(BaseStoreTarget):
1275
1309
  def get_dask_options(self):
1276
1310
  return {"format": "csv"}
1277
1311
 
1278
- def as_df(self, columns=None, df_module=None, **kwargs):
1312
+ def as_df(
1313
+ self,
1314
+ columns=None,
1315
+ df_module=None,
1316
+ entities=None,
1317
+ start_time=None,
1318
+ end_time=None,
1319
+ time_column=None,
1320
+ additional_filters=None,
1321
+ **kwargs,
1322
+ ):
1279
1323
  raise NotImplementedError()
1280
1324
 
1281
1325
  def write_dataframe(
@@ -1511,11 +1555,40 @@ class StreamTarget(BaseStoreTarget):
1511
1555
  **self.attributes,
1512
1556
  )
1513
1557
 
1514
- def as_df(self, columns=None, df_module=None, **kwargs):
1558
+ def as_df(
1559
+ self,
1560
+ columns=None,
1561
+ df_module=None,
1562
+ entities=None,
1563
+ start_time=None,
1564
+ end_time=None,
1565
+ time_column=None,
1566
+ additional_filters=None,
1567
+ **kwargs,
1568
+ ):
1515
1569
  raise NotImplementedError()
1516
1570
 
1517
1571
 
1518
1572
  class KafkaTarget(BaseStoreTarget):
1573
+ """
1574
+ Kafka target storage driver, used to write data into kafka topics.
1575
+ example::
1576
+ # define target
1577
+ kafka_target = KafkaTarget(
1578
+ name="kafka", path="my_topic", brokers="localhost:9092"
1579
+ )
1580
+ # ingest
1581
+ stocks_set.ingest(stocks, [kafka_target])
1582
+ :param name: target name
1583
+ :param path: topic name e.g. "my_topic"
1584
+ :param after_step: optional, after what step in the graph to add the target
1585
+ :param columns: optional, which columns from data to write
1586
+ :param bootstrap_servers: Deprecated. Use the brokers parameter instead
1587
+ :param producer_options: additional configurations for kafka producer
1588
+ :param brokers: kafka broker as represented by a host:port pair, or a list of kafka brokers, e.g.
1589
+ "localhost:9092", or ["kafka-broker-1:9092", "kafka-broker-2:9092"]
1590
+ """
1591
+
1519
1592
  kind = TargetTypes.kafka
1520
1593
  is_table = False
1521
1594
  is_online = False
@@ -1597,7 +1670,17 @@ class KafkaTarget(BaseStoreTarget):
1597
1670
  **attributes,
1598
1671
  )
1599
1672
 
1600
- def as_df(self, columns=None, df_module=None, **kwargs):
1673
+ def as_df(
1674
+ self,
1675
+ columns=None,
1676
+ df_module=None,
1677
+ entities=None,
1678
+ start_time=None,
1679
+ end_time=None,
1680
+ time_column=None,
1681
+ additional_filters=None,
1682
+ **kwargs,
1683
+ ):
1601
1684
  raise NotImplementedError()
1602
1685
 
1603
1686
  def purge(self):
@@ -1644,7 +1727,17 @@ class TSDBTarget(BaseStoreTarget):
1644
1727
  **self.attributes,
1645
1728
  )
1646
1729
 
1647
- def as_df(self, columns=None, df_module=None, **kwargs):
1730
+ def as_df(
1731
+ self,
1732
+ columns=None,
1733
+ df_module=None,
1734
+ entities=None,
1735
+ start_time=None,
1736
+ end_time=None,
1737
+ time_column=None,
1738
+ additional_filters=None,
1739
+ **kwargs,
1740
+ ):
1648
1741
  raise NotImplementedError()
1649
1742
 
1650
1743
  def write_dataframe(
@@ -1755,11 +1848,16 @@ class DFTarget(BaseStoreTarget):
1755
1848
  self,
1756
1849
  columns=None,
1757
1850
  df_module=None,
1851
+ entities=None,
1758
1852
  start_time=None,
1759
1853
  end_time=None,
1760
1854
  time_column=None,
1855
+ additional_filters=None,
1761
1856
  **kwargs,
1762
1857
  ):
1858
+ mlrun.utils.helpers.additional_filters_warning(
1859
+ additional_filters, self.__class__
1860
+ )
1763
1861
  return select_columns_from_df(
1764
1862
  filter_df_start_end_time(
1765
1863
  self._df,
@@ -1934,6 +2032,7 @@ class SQLTarget(BaseStoreTarget):
1934
2032
  start_time=None,
1935
2033
  end_time=None,
1936
2034
  time_column=None,
2035
+ additional_filters=None,
1937
2036
  **kwargs,
1938
2037
  ):
1939
2038
  try:
@@ -1942,6 +2041,10 @@ class SQLTarget(BaseStoreTarget):
1942
2041
  except (ModuleNotFoundError, ImportError) as exc:
1943
2042
  self._raise_sqlalchemy_import_error(exc)
1944
2043
 
2044
+ mlrun.utils.helpers.additional_filters_warning(
2045
+ additional_filters, self.__class__
2046
+ )
2047
+
1945
2048
  db_path, table_name, _, _, _, _ = self._parse_url()
1946
2049
  engine = sqlalchemy.create_engine(db_path)
1947
2050
  parse_dates: Optional[list[str]] = self.attributes.get("parse_dates")
@@ -2088,7 +2191,7 @@ kind_to_driver = {
2088
2191
  }
2089
2192
 
2090
2193
 
2091
- def _get_target_path(driver, resource, run_id_mode=False):
2194
+ def _get_target_path(driver, resource, run_id_mode=False, netloc=None, scheme=""):
2092
2195
  """return the default target path given the resource and target kind"""
2093
2196
  kind = driver.kind
2094
2197
  suffix = driver.suffix
@@ -2105,11 +2208,27 @@ def _get_target_path(driver, resource, run_id_mode=False):
2105
2208
  )
2106
2209
  name = resource.metadata.name
2107
2210
  project = resource.metadata.project or mlrun.mlconf.default_project
2108
- data_prefix = get_default_prefix_for_target(kind).format(
2211
+
2212
+ default_kind_name = kind
2213
+ if scheme == "ds":
2214
+ # "dsnosql" is not an actual target like Parquet or Redis; rather, it serves
2215
+ # as a placeholder that can be used in any specified target
2216
+ default_kind_name = "dsnosql"
2217
+ if scheme == "redis" or scheme == "rediss":
2218
+ default_kind_name = TargetTypes.redisnosql
2219
+
2220
+ netloc = netloc or ""
2221
+ data_prefix = get_default_prefix_for_target(default_kind_name).format(
2222
+ ds_profile_name=netloc, # In case of ds profile, set its the name
2223
+ authority=netloc, # In case of redis, replace {authority} with netloc
2109
2224
  project=project,
2110
2225
  kind=kind,
2111
2226
  name=name,
2112
2227
  )
2228
+
2229
+ if scheme == "rediss":
2230
+ data_prefix = data_prefix.replace("redis://", "rediss://", 1)
2231
+
2113
2232
  # todo: handle ver tag changes, may need to copy files?
2114
2233
  if not run_id_mode:
2115
2234
  version = resource.metadata.tag
mlrun/datastore/v3io.py CHANGED
@@ -29,7 +29,7 @@ from .base import (
29
29
  )
30
30
 
31
31
  V3IO_LOCAL_ROOT = "v3io"
32
- V3IO_DEFAULT_UPLOAD_CHUNK_SIZE = 1024 * 1024 * 100
32
+ V3IO_DEFAULT_UPLOAD_CHUNK_SIZE = 1024 * 1024 * 10
33
33
 
34
34
 
35
35
  class V3ioStore(DataStore):
mlrun/db/auth_utils.py ADDED
@@ -0,0 +1,152 @@
1
+ # Copyright 2024 Iguazio
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ from abc import ABC, abstractmethod
16
+ from datetime import datetime, timedelta
17
+
18
+ import requests
19
+
20
+ import mlrun.errors
21
+ from mlrun.utils import logger
22
+
23
+
24
+ class TokenProvider(ABC):
25
+ @abstractmethod
26
+ def get_token(self):
27
+ pass
28
+
29
+ @abstractmethod
30
+ def is_iguazio_session(self):
31
+ pass
32
+
33
+
34
+ class StaticTokenProvider(TokenProvider):
35
+ def __init__(self, token: str):
36
+ self.token = token
37
+
38
+ def get_token(self):
39
+ return self.token
40
+
41
+ def is_iguazio_session(self):
42
+ return mlrun.platforms.iguazio.is_iguazio_session(self.token)
43
+
44
+
45
+ class OAuthClientIDTokenProvider(TokenProvider):
46
+ def __init__(
47
+ self, token_endpoint: str, client_id: str, client_secret: str, timeout=5
48
+ ):
49
+ if not token_endpoint or not client_id or not client_secret:
50
+ raise mlrun.errors.MLRunValueError(
51
+ "Invalid client_id configuration for authentication. Must provide token endpoint, client-id and secret"
52
+ )
53
+ self.token_endpoint = token_endpoint
54
+ self.client_id = client_id
55
+ self.client_secret = client_secret
56
+ self.timeout = timeout
57
+
58
+ # Since we're only issuing POST requests, which are actually a disguised GET, then it's ok to allow retries
59
+ # on them.
60
+ self._session = mlrun.utils.HTTPSessionWithRetry(
61
+ retry_on_post=True,
62
+ verbose=True,
63
+ )
64
+
65
+ self._cleanup()
66
+ self._refresh_token_if_needed()
67
+
68
+ def get_token(self):
69
+ self._refresh_token_if_needed()
70
+ return self.token
71
+
72
+ def is_iguazio_session(self):
73
+ return False
74
+
75
+ def _cleanup(self):
76
+ self.token = self.token_expiry_time = self.token_refresh_time = None
77
+
78
+ def _refresh_token_if_needed(self):
79
+ now = datetime.now()
80
+ if self.token:
81
+ if self.token_refresh_time and now <= self.token_refresh_time:
82
+ return self.token
83
+
84
+ # We only cleanup if token was really expired - even if we fail in refreshing the token, we can still
85
+ # use the existing one given that it's not expired.
86
+ if now >= self.token_expiry_time:
87
+ self._cleanup()
88
+
89
+ self._issue_token_request()
90
+ return self.token
91
+
92
+ def _issue_token_request(self, raise_on_error=False):
93
+ try:
94
+ headers = {"Content-Type": "application/x-www-form-urlencoded"}
95
+ request_body = {
96
+ "grant_type": "client_credentials",
97
+ "client_id": self.client_id,
98
+ "client_secret": self.client_secret,
99
+ }
100
+ response = self._session.request(
101
+ "POST",
102
+ self.token_endpoint,
103
+ timeout=self.timeout,
104
+ headers=headers,
105
+ data=request_body,
106
+ )
107
+ except requests.RequestException as exc:
108
+ error = f"Retrieving token failed: {mlrun.errors.err_to_str(exc)}"
109
+ if raise_on_error:
110
+ raise mlrun.errors.MLRunRuntimeError(error) from exc
111
+ else:
112
+ logger.warning(error)
113
+ return
114
+
115
+ if not response.ok:
116
+ error = "No error available"
117
+ if response.content:
118
+ try:
119
+ data = response.json()
120
+ error = data.get("error")
121
+ except Exception:
122
+ pass
123
+ logger.warning(
124
+ "Retrieving token failed", status=response.status_code, error=error
125
+ )
126
+ if raise_on_error:
127
+ mlrun.errors.raise_for_status(response)
128
+ return
129
+
130
+ self._parse_response(response.json())
131
+
132
+ def _parse_response(self, data: dict):
133
+ # Response is described in https://datatracker.ietf.org/doc/html/rfc6749#section-4.4.3
134
+ # According to spec, there isn't a refresh token - just the access token and its expiry time (in seconds).
135
+ self.token = data.get("access_token")
136
+ expires_in = data.get("expires_in")
137
+ if not self.token or not expires_in:
138
+ token_str = "****" if self.token else "missing"
139
+ logger.warning(
140
+ "Failed to parse token response", token=token_str, expires_in=expires_in
141
+ )
142
+ return
143
+
144
+ now = datetime.now()
145
+ self.token_expiry_time = now + timedelta(seconds=expires_in)
146
+ self.token_refresh_time = now + timedelta(seconds=expires_in / 2)
147
+ logger.info(
148
+ "Successfully retrieved client-id token",
149
+ expires_in=expires_in,
150
+ expiry=str(self.token_expiry_time),
151
+ refresh=str(self.token_refresh_time),
152
+ )
mlrun/db/base.py CHANGED
@@ -802,7 +802,7 @@ class RunDBInterface(ABC):
802
802
  project: str,
803
803
  base_period: int = 10,
804
804
  image: str = "mlrun/mlrun",
805
- ):
805
+ ) -> None:
806
806
  pass
807
807
 
808
808
  @abstractmethod
mlrun/db/httpdb.py CHANGED
@@ -38,6 +38,7 @@ import mlrun.platforms
38
38
  import mlrun.projects
39
39
  import mlrun.runtimes.nuclio.api_gateway
40
40
  import mlrun.utils
41
+ from mlrun.db.auth_utils import OAuthClientIDTokenProvider, StaticTokenProvider
41
42
  from mlrun.errors import MLRunInvalidArgumentError, err_to_str
42
43
 
43
44
  from ..artifacts import Artifact
@@ -138,17 +139,28 @@ class HTTPRunDB(RunDBInterface):
138
139
  endpoint += f":{parsed_url.port}"
139
140
  base_url = f"{parsed_url.scheme}://{endpoint}{parsed_url.path}"
140
141
 
142
+ self.base_url = base_url
141
143
  username = parsed_url.username or config.httpdb.user
142
144
  password = parsed_url.password or config.httpdb.password
145
+ self.token_provider = None
143
146
 
144
- username, password, token = mlrun.platforms.add_or_refresh_credentials(
145
- parsed_url.hostname, username, password, config.httpdb.token
146
- )
147
+ if config.auth_with_client_id.enabled:
148
+ self.token_provider = OAuthClientIDTokenProvider(
149
+ token_endpoint=mlrun.get_secret_or_env("MLRUN_AUTH_TOKEN_ENDPOINT"),
150
+ client_id=mlrun.get_secret_or_env("MLRUN_AUTH_CLIENT_ID"),
151
+ client_secret=mlrun.get_secret_or_env("MLRUN_AUTH_CLIENT_SECRET"),
152
+ timeout=config.auth_with_client_id.request_timeout,
153
+ )
154
+ else:
155
+ username, password, token = mlrun.platforms.add_or_refresh_credentials(
156
+ parsed_url.hostname, username, password, config.httpdb.token
157
+ )
158
+
159
+ if token:
160
+ self.token_provider = StaticTokenProvider(token)
147
161
 
148
- self.base_url = base_url
149
162
  self.user = username
150
163
  self.password = password
151
- self.token = token
152
164
 
153
165
  def __repr__(self):
154
166
  cls = self.__class__.__name__
@@ -218,17 +230,19 @@ class HTTPRunDB(RunDBInterface):
218
230
 
219
231
  if self.user:
220
232
  kw["auth"] = (self.user, self.password)
221
- elif self.token:
222
- # Iguazio auth doesn't support passing token through bearer, so use cookie instead
223
- if mlrun.platforms.iguazio.is_iguazio_session(self.token):
224
- session_cookie = f'j:{{"sid": "{self.token}"}}'
225
- cookies = {
226
- "session": session_cookie,
227
- }
228
- kw["cookies"] = cookies
229
- else:
230
- if "Authorization" not in kw.setdefault("headers", {}):
231
- kw["headers"].update({"Authorization": "Bearer " + self.token})
233
+ elif self.token_provider:
234
+ token = self.token_provider.get_token()
235
+ if token:
236
+ # Iguazio auth doesn't support passing token through bearer, so use cookie instead
237
+ if self.token_provider.is_iguazio_session():
238
+ session_cookie = f'j:{{"sid": "{token}"}}'
239
+ cookies = {
240
+ "session": session_cookie,
241
+ }
242
+ kw["cookies"] = cookies
243
+ else:
244
+ if "Authorization" not in kw.setdefault("headers", {}):
245
+ kw["headers"].update({"Authorization": "Bearer " + token})
232
246
 
233
247
  if mlrun.common.schemas.HeaderNames.client_version not in kw.setdefault(
234
248
  "headers", {}
@@ -645,10 +659,10 @@ class HTTPRunDB(RunDBInterface):
645
659
  nil_resp += 1
646
660
 
647
661
  if watch and state in [
648
- mlrun.runtimes.constants.RunStates.pending,
649
- mlrun.runtimes.constants.RunStates.running,
650
- mlrun.runtimes.constants.RunStates.created,
651
- mlrun.runtimes.constants.RunStates.aborting,
662
+ mlrun.common.runtimes.constants.RunStates.pending,
663
+ mlrun.common.runtimes.constants.RunStates.running,
664
+ mlrun.common.runtimes.constants.RunStates.created,
665
+ mlrun.common.runtimes.constants.RunStates.aborting,
652
666
  ]:
653
667
  continue
654
668
  else:
@@ -1142,7 +1156,29 @@ class HTTPRunDB(RunDBInterface):
1142
1156
  project = project or config.default_project
1143
1157
  path = f"projects/{project}/functions/{name}"
1144
1158
  error_message = f"Failed deleting function {project}/{name}"
1145
- self.api_call("DELETE", path, error_message)
1159
+ response = self.api_call("DELETE", path, error_message, version="v2")
1160
+ if response.status_code == http.HTTPStatus.ACCEPTED:
1161
+ logger.info(
1162
+ "Function is being deleted", project_name=project, function_name=name
1163
+ )
1164
+ background_task = mlrun.common.schemas.BackgroundTask(**response.json())
1165
+ background_task = self._wait_for_background_task_to_reach_terminal_state(
1166
+ background_task.metadata.name, project=project
1167
+ )
1168
+ if (
1169
+ background_task.status.state
1170
+ == mlrun.common.schemas.BackgroundTaskState.succeeded
1171
+ ):
1172
+ logger.info(
1173
+ "Function deleted", project_name=project, function_name=name
1174
+ )
1175
+ elif (
1176
+ background_task.status.state
1177
+ == mlrun.common.schemas.BackgroundTaskState.failed
1178
+ ):
1179
+ logger.info(
1180
+ "Function deletion failed", project_name=project, function_name=name
1181
+ )
1146
1182
 
1147
1183
  def list_functions(self, name=None, project=None, tag=None, labels=None):
1148
1184
  """Retrieve a list of functions, filtered by specific criteria.
@@ -1488,16 +1524,15 @@ class HTTPRunDB(RunDBInterface):
1488
1524
  """
1489
1525
 
1490
1526
  try:
1527
+ normalized_name = normalize_name(func.metadata.name)
1491
1528
  params = {
1492
- "name": normalize_name(func.metadata.name),
1529
+ "name": normalized_name,
1493
1530
  "project": func.metadata.project,
1494
1531
  "tag": func.metadata.tag,
1495
1532
  "last_log_timestamp": str(last_log_timestamp),
1496
1533
  "verbose": bool2str(verbose),
1497
1534
  }
1498
- _path = (
1499
- f"projects/{func.metadata.project}/nuclio/{func.metadata.name}/deploy"
1500
- )
1535
+ _path = f"projects/{func.metadata.project}/nuclio/{normalized_name}/deploy"
1501
1536
  resp = self.api_call("GET", _path, params=params)
1502
1537
  except OSError as err:
1503
1538
  logger.error(f"error getting deploy status: {err_to_str(err)}")
@@ -3214,7 +3249,7 @@ class HTTPRunDB(RunDBInterface):
3214
3249
  project: str,
3215
3250
  base_period: int = 10,
3216
3251
  image: str = "mlrun/mlrun",
3217
- ):
3252
+ ) -> None:
3218
3253
  """
3219
3254
  Redeploy model monitoring application controller function.
3220
3255
 
@@ -3224,13 +3259,14 @@ class HTTPRunDB(RunDBInterface):
3224
3259
  :param image: The image of the model monitoring controller function.
3225
3260
  By default, the image is mlrun/mlrun.
3226
3261
  """
3227
-
3228
- params = {
3229
- "image": image,
3230
- "base_period": base_period,
3231
- }
3232
- path = f"projects/{project}/model-monitoring/model-monitoring-controller"
3233
- self.api_call(method="POST", path=path, params=params)
3262
+ self.api_call(
3263
+ method=mlrun.common.types.HTTPMethod.POST,
3264
+ path=f"projects/{project}/model-monitoring/model-monitoring-controller",
3265
+ params={
3266
+ "base_period": base_period,
3267
+ "image": image,
3268
+ },
3269
+ )
3234
3270
 
3235
3271
  def enable_model_monitoring(
3236
3272
  self,
@@ -19,7 +19,6 @@ __all__ = [
19
19
  "get_online_feature_service",
20
20
  "ingest",
21
21
  "preview",
22
- "deploy_ingestion_service",
23
22
  "deploy_ingestion_service_v2",
24
23
  "delete_feature_set",
25
24
  "delete_feature_vector",
@@ -41,7 +40,6 @@ from ..features import Entity, Feature
41
40
  from .api import (
42
41
  delete_feature_set,
43
42
  delete_feature_vector,
44
- deploy_ingestion_service,
45
43
  deploy_ingestion_service_v2,
46
44
  get_feature_set,
47
45
  get_feature_vector,