mlrun 1.7.0rc14__py3-none-any.whl → 1.7.0rc22__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mlrun might be problematic. Click here for more details.
- mlrun/__init__.py +10 -1
- mlrun/__main__.py +23 -111
- mlrun/alerts/__init__.py +15 -0
- mlrun/alerts/alert.py +169 -0
- mlrun/api/schemas/__init__.py +4 -3
- mlrun/artifacts/__init__.py +8 -3
- mlrun/artifacts/base.py +36 -253
- mlrun/artifacts/dataset.py +9 -190
- mlrun/artifacts/manager.py +46 -42
- mlrun/artifacts/model.py +9 -141
- mlrun/artifacts/plots.py +14 -375
- mlrun/common/constants.py +65 -3
- mlrun/common/formatters/__init__.py +19 -0
- mlrun/{runtimes/mpijob/v1alpha1.py → common/formatters/artifact.py} +6 -14
- mlrun/common/formatters/base.py +113 -0
- mlrun/common/formatters/function.py +46 -0
- mlrun/common/formatters/pipeline.py +53 -0
- mlrun/common/formatters/project.py +51 -0
- mlrun/{runtimes → common/runtimes}/constants.py +32 -4
- mlrun/common/schemas/__init__.py +10 -5
- mlrun/common/schemas/alert.py +92 -11
- mlrun/common/schemas/api_gateway.py +56 -0
- mlrun/common/schemas/artifact.py +15 -5
- mlrun/common/schemas/auth.py +2 -0
- mlrun/common/schemas/client_spec.py +1 -0
- mlrun/common/schemas/frontend_spec.py +1 -0
- mlrun/common/schemas/function.py +4 -0
- mlrun/common/schemas/model_monitoring/__init__.py +15 -3
- mlrun/common/schemas/model_monitoring/constants.py +58 -7
- mlrun/common/schemas/model_monitoring/grafana.py +9 -5
- mlrun/common/schemas/model_monitoring/model_endpoints.py +86 -2
- mlrun/common/schemas/pipeline.py +0 -9
- mlrun/common/schemas/project.py +5 -11
- mlrun/common/types.py +1 -0
- mlrun/config.py +30 -9
- mlrun/data_types/to_pandas.py +9 -9
- mlrun/datastore/base.py +41 -9
- mlrun/datastore/datastore.py +6 -2
- mlrun/datastore/datastore_profile.py +56 -4
- mlrun/datastore/inmem.py +2 -2
- mlrun/datastore/redis.py +2 -2
- mlrun/datastore/s3.py +5 -0
- mlrun/datastore/sources.py +147 -7
- mlrun/datastore/store_resources.py +7 -7
- mlrun/datastore/targets.py +110 -42
- mlrun/datastore/utils.py +42 -0
- mlrun/db/base.py +54 -10
- mlrun/db/httpdb.py +282 -79
- mlrun/db/nopdb.py +52 -10
- mlrun/errors.py +11 -0
- mlrun/execution.py +26 -9
- mlrun/feature_store/__init__.py +0 -2
- mlrun/feature_store/api.py +12 -47
- mlrun/feature_store/feature_set.py +9 -0
- mlrun/feature_store/feature_vector.py +8 -0
- mlrun/feature_store/ingestion.py +7 -6
- mlrun/feature_store/retrieval/base.py +9 -4
- mlrun/feature_store/retrieval/conversion.py +9 -9
- mlrun/feature_store/retrieval/dask_merger.py +2 -0
- mlrun/feature_store/retrieval/job.py +9 -3
- mlrun/feature_store/retrieval/local_merger.py +2 -0
- mlrun/feature_store/retrieval/spark_merger.py +16 -0
- mlrun/frameworks/__init__.py +6 -0
- mlrun/frameworks/_dl_common/loggers/tensorboard_logger.py +7 -12
- mlrun/frameworks/parallel_coordinates.py +2 -1
- mlrun/frameworks/tf_keras/__init__.py +4 -1
- mlrun/k8s_utils.py +10 -11
- mlrun/launcher/base.py +4 -3
- mlrun/launcher/client.py +5 -3
- mlrun/launcher/local.py +12 -2
- mlrun/launcher/remote.py +9 -2
- mlrun/lists.py +6 -2
- mlrun/model.py +47 -21
- mlrun/model_monitoring/__init__.py +1 -1
- mlrun/model_monitoring/api.py +42 -18
- mlrun/model_monitoring/application.py +5 -305
- mlrun/model_monitoring/applications/__init__.py +11 -0
- mlrun/model_monitoring/applications/_application_steps.py +157 -0
- mlrun/model_monitoring/applications/base.py +280 -0
- mlrun/model_monitoring/applications/context.py +214 -0
- mlrun/model_monitoring/applications/evidently_base.py +211 -0
- mlrun/model_monitoring/applications/histogram_data_drift.py +132 -91
- mlrun/model_monitoring/applications/results.py +99 -0
- mlrun/model_monitoring/controller.py +3 -1
- mlrun/model_monitoring/db/__init__.py +2 -0
- mlrun/model_monitoring/db/stores/__init__.py +0 -2
- mlrun/model_monitoring/db/stores/base/store.py +22 -37
- mlrun/model_monitoring/db/stores/sqldb/models/__init__.py +43 -21
- mlrun/model_monitoring/db/stores/sqldb/models/base.py +39 -8
- mlrun/model_monitoring/db/stores/sqldb/models/mysql.py +27 -7
- mlrun/model_monitoring/db/stores/sqldb/models/sqlite.py +5 -0
- mlrun/model_monitoring/db/stores/sqldb/sql_store.py +246 -224
- mlrun/model_monitoring/db/stores/v3io_kv/kv_store.py +232 -216
- mlrun/model_monitoring/db/tsdb/__init__.py +100 -0
- mlrun/model_monitoring/db/tsdb/base.py +316 -0
- mlrun/model_monitoring/db/tsdb/helpers.py +30 -0
- mlrun/model_monitoring/db/tsdb/tdengine/__init__.py +15 -0
- mlrun/model_monitoring/db/tsdb/tdengine/schemas.py +240 -0
- mlrun/model_monitoring/db/tsdb/tdengine/stream_graph_steps.py +45 -0
- mlrun/model_monitoring/db/tsdb/tdengine/tdengine_connector.py +401 -0
- mlrun/model_monitoring/db/tsdb/v3io/__init__.py +15 -0
- mlrun/model_monitoring/db/tsdb/v3io/stream_graph_steps.py +117 -0
- mlrun/model_monitoring/db/tsdb/v3io/v3io_connector.py +658 -0
- mlrun/model_monitoring/evidently_application.py +6 -118
- mlrun/model_monitoring/helpers.py +63 -1
- mlrun/model_monitoring/model_endpoint.py +3 -2
- mlrun/model_monitoring/stream_processing.py +57 -216
- mlrun/model_monitoring/writer.py +134 -124
- mlrun/package/__init__.py +13 -1
- mlrun/package/packagers/__init__.py +6 -1
- mlrun/package/utils/_formatter.py +2 -2
- mlrun/platforms/__init__.py +10 -9
- mlrun/platforms/iguazio.py +21 -202
- mlrun/projects/operations.py +24 -12
- mlrun/projects/pipelines.py +79 -102
- mlrun/projects/project.py +271 -103
- mlrun/render.py +15 -14
- mlrun/run.py +16 -46
- mlrun/runtimes/__init__.py +6 -3
- mlrun/runtimes/base.py +14 -7
- mlrun/runtimes/daskjob.py +1 -0
- mlrun/runtimes/databricks_job/databricks_runtime.py +1 -0
- mlrun/runtimes/databricks_job/databricks_wrapper.py +1 -1
- mlrun/runtimes/funcdoc.py +0 -28
- mlrun/runtimes/kubejob.py +2 -1
- mlrun/runtimes/local.py +12 -3
- mlrun/runtimes/mpijob/__init__.py +0 -20
- mlrun/runtimes/mpijob/v1.py +1 -1
- mlrun/runtimes/nuclio/api_gateway.py +194 -84
- mlrun/runtimes/nuclio/application/application.py +170 -8
- mlrun/runtimes/nuclio/function.py +39 -49
- mlrun/runtimes/pod.py +16 -36
- mlrun/runtimes/remotesparkjob.py +9 -3
- mlrun/runtimes/sparkjob/spark3job.py +1 -1
- mlrun/runtimes/utils.py +6 -45
- mlrun/serving/__init__.py +8 -1
- mlrun/serving/server.py +2 -1
- mlrun/serving/states.py +51 -8
- mlrun/serving/utils.py +19 -11
- mlrun/serving/v2_serving.py +5 -1
- mlrun/track/tracker.py +2 -1
- mlrun/utils/async_http.py +25 -5
- mlrun/utils/helpers.py +157 -83
- mlrun/utils/logger.py +39 -7
- mlrun/utils/notifications/notification/__init__.py +14 -9
- mlrun/utils/notifications/notification/base.py +1 -1
- mlrun/utils/notifications/notification/slack.py +34 -7
- mlrun/utils/notifications/notification/webhook.py +1 -1
- mlrun/utils/notifications/notification_pusher.py +147 -16
- mlrun/utils/regex.py +9 -0
- mlrun/utils/v3io_clients.py +0 -1
- mlrun/utils/version/version.json +2 -2
- {mlrun-1.7.0rc14.dist-info → mlrun-1.7.0rc22.dist-info}/METADATA +14 -6
- {mlrun-1.7.0rc14.dist-info → mlrun-1.7.0rc22.dist-info}/RECORD +158 -138
- mlrun/kfpops.py +0 -865
- mlrun/platforms/other.py +0 -305
- {mlrun-1.7.0rc14.dist-info → mlrun-1.7.0rc22.dist-info}/LICENSE +0 -0
- {mlrun-1.7.0rc14.dist-info → mlrun-1.7.0rc22.dist-info}/WHEEL +0 -0
- {mlrun-1.7.0rc14.dist-info → mlrun-1.7.0rc22.dist-info}/entry_points.txt +0 -0
- {mlrun-1.7.0rc14.dist-info → mlrun-1.7.0rc22.dist-info}/top_level.txt +0 -0
mlrun/datastore/targets.py
CHANGED
|
@@ -30,6 +30,7 @@ import mlrun
|
|
|
30
30
|
import mlrun.utils.helpers
|
|
31
31
|
from mlrun.config import config
|
|
32
32
|
from mlrun.datastore.snowflake_utils import get_snowflake_spark_options
|
|
33
|
+
from mlrun.datastore.utils import transform_list_filters_to_tuple
|
|
33
34
|
from mlrun.model import DataSource, DataTarget, DataTargetBase, TargetPathObject
|
|
34
35
|
from mlrun.utils import logger, now_date
|
|
35
36
|
from mlrun.utils.helpers import to_parquet
|
|
@@ -656,6 +657,29 @@ class BaseStoreTarget(DataTargetBase):
|
|
|
656
657
|
def _target_path_object(self):
|
|
657
658
|
"""return the actual/computed target path"""
|
|
658
659
|
is_single_file = hasattr(self, "is_single_file") and self.is_single_file()
|
|
660
|
+
|
|
661
|
+
if self._resource and self.path:
|
|
662
|
+
parsed_url = urlparse(self.path)
|
|
663
|
+
# When the URL consists only from scheme and endpoint and no path,
|
|
664
|
+
# make a default path for DS and redis targets.
|
|
665
|
+
# Also ignore KafkaTarget when it uses the ds scheme (no default path for KafkaTarget)
|
|
666
|
+
if (
|
|
667
|
+
not isinstance(self, KafkaTarget)
|
|
668
|
+
and parsed_url.scheme in ["ds", "redis", "rediss"]
|
|
669
|
+
and (not parsed_url.path or parsed_url.path == "/")
|
|
670
|
+
):
|
|
671
|
+
return TargetPathObject(
|
|
672
|
+
_get_target_path(
|
|
673
|
+
self,
|
|
674
|
+
self._resource,
|
|
675
|
+
self.run_id is not None,
|
|
676
|
+
netloc=parsed_url.netloc,
|
|
677
|
+
scheme=parsed_url.scheme,
|
|
678
|
+
),
|
|
679
|
+
self.run_id,
|
|
680
|
+
is_single_file,
|
|
681
|
+
)
|
|
682
|
+
|
|
659
683
|
return self.get_path() or (
|
|
660
684
|
TargetPathObject(
|
|
661
685
|
_get_target_path(self, self._resource, self.run_id is not None),
|
|
@@ -714,9 +738,13 @@ class BaseStoreTarget(DataTargetBase):
|
|
|
714
738
|
start_time=None,
|
|
715
739
|
end_time=None,
|
|
716
740
|
time_column=None,
|
|
741
|
+
additional_filters=None,
|
|
717
742
|
**kwargs,
|
|
718
743
|
):
|
|
719
744
|
"""return the target data as dataframe"""
|
|
745
|
+
mlrun.utils.helpers.additional_filters_warning(
|
|
746
|
+
additional_filters, self.__class__
|
|
747
|
+
)
|
|
720
748
|
return mlrun.get_dataitem(self.get_target_path()).as_df(
|
|
721
749
|
columns=columns,
|
|
722
750
|
df_module=df_module,
|
|
@@ -730,7 +758,7 @@ class BaseStoreTarget(DataTargetBase):
|
|
|
730
758
|
# options used in spark.read.load(**options)
|
|
731
759
|
raise NotImplementedError()
|
|
732
760
|
|
|
733
|
-
def prepare_spark_df(self, df, key_columns, timestamp_key=None, spark_options=
|
|
761
|
+
def prepare_spark_df(self, df, key_columns, timestamp_key=None, spark_options=None):
|
|
734
762
|
return df
|
|
735
763
|
|
|
736
764
|
def get_dask_options(self):
|
|
@@ -961,6 +989,7 @@ class ParquetTarget(BaseStoreTarget):
|
|
|
961
989
|
start_time=None,
|
|
962
990
|
end_time=None,
|
|
963
991
|
time_column=None,
|
|
992
|
+
additional_filters=None,
|
|
964
993
|
**kwargs,
|
|
965
994
|
):
|
|
966
995
|
"""return the target data as dataframe"""
|
|
@@ -971,6 +1000,7 @@ class ParquetTarget(BaseStoreTarget):
|
|
|
971
1000
|
start_time=start_time,
|
|
972
1001
|
end_time=end_time,
|
|
973
1002
|
time_column=time_column,
|
|
1003
|
+
additional_filters=transform_list_filters_to_tuple(additional_filters),
|
|
974
1004
|
**kwargs,
|
|
975
1005
|
)
|
|
976
1006
|
if not columns:
|
|
@@ -1101,8 +1131,12 @@ class CSVTarget(BaseStoreTarget):
|
|
|
1101
1131
|
start_time=None,
|
|
1102
1132
|
end_time=None,
|
|
1103
1133
|
time_column=None,
|
|
1134
|
+
additional_filters=None,
|
|
1104
1135
|
**kwargs,
|
|
1105
1136
|
):
|
|
1137
|
+
mlrun.utils.helpers.additional_filters_warning(
|
|
1138
|
+
additional_filters, self.__class__
|
|
1139
|
+
)
|
|
1106
1140
|
df = super().as_df(
|
|
1107
1141
|
columns=columns,
|
|
1108
1142
|
df_module=df_module,
|
|
@@ -1209,6 +1243,7 @@ class SnowflakeTarget(BaseStoreTarget):
|
|
|
1209
1243
|
start_time=None,
|
|
1210
1244
|
end_time=None,
|
|
1211
1245
|
time_column=None,
|
|
1246
|
+
additional_filters=None,
|
|
1212
1247
|
**kwargs,
|
|
1213
1248
|
):
|
|
1214
1249
|
raise NotImplementedError()
|
|
@@ -1275,7 +1310,17 @@ class NoSqlBaseTarget(BaseStoreTarget):
|
|
|
1275
1310
|
def get_dask_options(self):
|
|
1276
1311
|
return {"format": "csv"}
|
|
1277
1312
|
|
|
1278
|
-
def as_df(
|
|
1313
|
+
def as_df(
|
|
1314
|
+
self,
|
|
1315
|
+
columns=None,
|
|
1316
|
+
df_module=None,
|
|
1317
|
+
entities=None,
|
|
1318
|
+
start_time=None,
|
|
1319
|
+
end_time=None,
|
|
1320
|
+
time_column=None,
|
|
1321
|
+
additional_filters=None,
|
|
1322
|
+
**kwargs,
|
|
1323
|
+
):
|
|
1279
1324
|
raise NotImplementedError()
|
|
1280
1325
|
|
|
1281
1326
|
def write_dataframe(
|
|
@@ -1390,39 +1435,6 @@ class RedisNoSqlTarget(NoSqlBaseTarget):
|
|
|
1390
1435
|
support_spark = True
|
|
1391
1436
|
writer_step_name = "RedisNoSqlTarget"
|
|
1392
1437
|
|
|
1393
|
-
@property
|
|
1394
|
-
def _target_path_object(self):
|
|
1395
|
-
url = self.path or mlrun.mlconf.redis.url
|
|
1396
|
-
if self._resource and url:
|
|
1397
|
-
parsed_url = urlparse(url)
|
|
1398
|
-
if not parsed_url.path or parsed_url.path == "/":
|
|
1399
|
-
kind_prefix = (
|
|
1400
|
-
"sets"
|
|
1401
|
-
if self._resource.kind
|
|
1402
|
-
== mlrun.common.schemas.ObjectKind.feature_set
|
|
1403
|
-
else "vectors"
|
|
1404
|
-
)
|
|
1405
|
-
kind = self.kind
|
|
1406
|
-
name = self._resource.metadata.name
|
|
1407
|
-
project = (
|
|
1408
|
-
self._resource.metadata.project or mlrun.mlconf.default_project
|
|
1409
|
-
)
|
|
1410
|
-
data_prefix = get_default_prefix_for_target(kind).format(
|
|
1411
|
-
ds_profile_name=parsed_url.netloc,
|
|
1412
|
-
authority=parsed_url.netloc,
|
|
1413
|
-
project=project,
|
|
1414
|
-
kind=kind,
|
|
1415
|
-
name=name,
|
|
1416
|
-
)
|
|
1417
|
-
if url.startswith("rediss://"):
|
|
1418
|
-
data_prefix = data_prefix.replace("redis://", "rediss://", 1)
|
|
1419
|
-
if not self.run_id:
|
|
1420
|
-
version = self._resource.metadata.tag or "latest"
|
|
1421
|
-
name = f"{name}-{version}"
|
|
1422
|
-
url = f"{data_prefix}/{kind_prefix}/{name}"
|
|
1423
|
-
return TargetPathObject(url, self.run_id, False)
|
|
1424
|
-
return super()._target_path_object
|
|
1425
|
-
|
|
1426
1438
|
# Fetch server url from the RedisNoSqlTarget::__init__() 'path' parameter.
|
|
1427
1439
|
# If not set fetch it from 'mlrun.mlconf.redis.url' (MLRUN_REDIS__URL environment variable).
|
|
1428
1440
|
# Then look for username and password at REDIS_xxx secrets
|
|
@@ -1544,7 +1556,17 @@ class StreamTarget(BaseStoreTarget):
|
|
|
1544
1556
|
**self.attributes,
|
|
1545
1557
|
)
|
|
1546
1558
|
|
|
1547
|
-
def as_df(
|
|
1559
|
+
def as_df(
|
|
1560
|
+
self,
|
|
1561
|
+
columns=None,
|
|
1562
|
+
df_module=None,
|
|
1563
|
+
entities=None,
|
|
1564
|
+
start_time=None,
|
|
1565
|
+
end_time=None,
|
|
1566
|
+
time_column=None,
|
|
1567
|
+
additional_filters=None,
|
|
1568
|
+
**kwargs,
|
|
1569
|
+
):
|
|
1548
1570
|
raise NotImplementedError()
|
|
1549
1571
|
|
|
1550
1572
|
|
|
@@ -1649,7 +1671,17 @@ class KafkaTarget(BaseStoreTarget):
|
|
|
1649
1671
|
**attributes,
|
|
1650
1672
|
)
|
|
1651
1673
|
|
|
1652
|
-
def as_df(
|
|
1674
|
+
def as_df(
|
|
1675
|
+
self,
|
|
1676
|
+
columns=None,
|
|
1677
|
+
df_module=None,
|
|
1678
|
+
entities=None,
|
|
1679
|
+
start_time=None,
|
|
1680
|
+
end_time=None,
|
|
1681
|
+
time_column=None,
|
|
1682
|
+
additional_filters=None,
|
|
1683
|
+
**kwargs,
|
|
1684
|
+
):
|
|
1653
1685
|
raise NotImplementedError()
|
|
1654
1686
|
|
|
1655
1687
|
def purge(self):
|
|
@@ -1696,7 +1728,17 @@ class TSDBTarget(BaseStoreTarget):
|
|
|
1696
1728
|
**self.attributes,
|
|
1697
1729
|
)
|
|
1698
1730
|
|
|
1699
|
-
def as_df(
|
|
1731
|
+
def as_df(
|
|
1732
|
+
self,
|
|
1733
|
+
columns=None,
|
|
1734
|
+
df_module=None,
|
|
1735
|
+
entities=None,
|
|
1736
|
+
start_time=None,
|
|
1737
|
+
end_time=None,
|
|
1738
|
+
time_column=None,
|
|
1739
|
+
additional_filters=None,
|
|
1740
|
+
**kwargs,
|
|
1741
|
+
):
|
|
1700
1742
|
raise NotImplementedError()
|
|
1701
1743
|
|
|
1702
1744
|
def write_dataframe(
|
|
@@ -1807,11 +1849,16 @@ class DFTarget(BaseStoreTarget):
|
|
|
1807
1849
|
self,
|
|
1808
1850
|
columns=None,
|
|
1809
1851
|
df_module=None,
|
|
1852
|
+
entities=None,
|
|
1810
1853
|
start_time=None,
|
|
1811
1854
|
end_time=None,
|
|
1812
1855
|
time_column=None,
|
|
1856
|
+
additional_filters=None,
|
|
1813
1857
|
**kwargs,
|
|
1814
1858
|
):
|
|
1859
|
+
mlrun.utils.helpers.additional_filters_warning(
|
|
1860
|
+
additional_filters, self.__class__
|
|
1861
|
+
)
|
|
1815
1862
|
return select_columns_from_df(
|
|
1816
1863
|
filter_df_start_end_time(
|
|
1817
1864
|
self._df,
|
|
@@ -1986,6 +2033,7 @@ class SQLTarget(BaseStoreTarget):
|
|
|
1986
2033
|
start_time=None,
|
|
1987
2034
|
end_time=None,
|
|
1988
2035
|
time_column=None,
|
|
2036
|
+
additional_filters=None,
|
|
1989
2037
|
**kwargs,
|
|
1990
2038
|
):
|
|
1991
2039
|
try:
|
|
@@ -1994,6 +2042,10 @@ class SQLTarget(BaseStoreTarget):
|
|
|
1994
2042
|
except (ModuleNotFoundError, ImportError) as exc:
|
|
1995
2043
|
self._raise_sqlalchemy_import_error(exc)
|
|
1996
2044
|
|
|
2045
|
+
mlrun.utils.helpers.additional_filters_warning(
|
|
2046
|
+
additional_filters, self.__class__
|
|
2047
|
+
)
|
|
2048
|
+
|
|
1997
2049
|
db_path, table_name, _, _, _, _ = self._parse_url()
|
|
1998
2050
|
engine = sqlalchemy.create_engine(db_path)
|
|
1999
2051
|
parse_dates: Optional[list[str]] = self.attributes.get("parse_dates")
|
|
@@ -2083,7 +2135,7 @@ class SQLTarget(BaseStoreTarget):
|
|
|
2083
2135
|
raise ValueError(f"Table named {table_name} is not exist")
|
|
2084
2136
|
|
|
2085
2137
|
elif not table_exists and create_table:
|
|
2086
|
-
|
|
2138
|
+
type_to_sql_type = {
|
|
2087
2139
|
int: sqlalchemy.Integer,
|
|
2088
2140
|
str: sqlalchemy.String(self.attributes.get("varchar_len")),
|
|
2089
2141
|
datetime.datetime: sqlalchemy.dialects.mysql.DATETIME(fsp=6),
|
|
@@ -2096,7 +2148,7 @@ class SQLTarget(BaseStoreTarget):
|
|
|
2096
2148
|
# creat new table with the given name
|
|
2097
2149
|
columns = []
|
|
2098
2150
|
for col, col_type in self.schema.items():
|
|
2099
|
-
col_type_sql =
|
|
2151
|
+
col_type_sql = type_to_sql_type.get(col_type)
|
|
2100
2152
|
if col_type_sql is None:
|
|
2101
2153
|
raise TypeError(
|
|
2102
2154
|
f"'{col_type}' unsupported type for column '{col}'"
|
|
@@ -2140,7 +2192,7 @@ kind_to_driver = {
|
|
|
2140
2192
|
}
|
|
2141
2193
|
|
|
2142
2194
|
|
|
2143
|
-
def _get_target_path(driver, resource, run_id_mode=False):
|
|
2195
|
+
def _get_target_path(driver, resource, run_id_mode=False, netloc=None, scheme=""):
|
|
2144
2196
|
"""return the default target path given the resource and target kind"""
|
|
2145
2197
|
kind = driver.kind
|
|
2146
2198
|
suffix = driver.suffix
|
|
@@ -2157,11 +2209,27 @@ def _get_target_path(driver, resource, run_id_mode=False):
|
|
|
2157
2209
|
)
|
|
2158
2210
|
name = resource.metadata.name
|
|
2159
2211
|
project = resource.metadata.project or mlrun.mlconf.default_project
|
|
2160
|
-
|
|
2212
|
+
|
|
2213
|
+
default_kind_name = kind
|
|
2214
|
+
if scheme == "ds":
|
|
2215
|
+
# "dsnosql" is not an actual target like Parquet or Redis; rather, it serves
|
|
2216
|
+
# as a placeholder that can be used in any specified target
|
|
2217
|
+
default_kind_name = "dsnosql"
|
|
2218
|
+
if scheme == "redis" or scheme == "rediss":
|
|
2219
|
+
default_kind_name = TargetTypes.redisnosql
|
|
2220
|
+
|
|
2221
|
+
netloc = netloc or ""
|
|
2222
|
+
data_prefix = get_default_prefix_for_target(default_kind_name).format(
|
|
2223
|
+
ds_profile_name=netloc, # In case of ds profile, set its the name
|
|
2224
|
+
authority=netloc, # In case of redis, replace {authority} with netloc
|
|
2161
2225
|
project=project,
|
|
2162
2226
|
kind=kind,
|
|
2163
2227
|
name=name,
|
|
2164
2228
|
)
|
|
2229
|
+
|
|
2230
|
+
if scheme == "rediss":
|
|
2231
|
+
data_prefix = data_prefix.replace("redis://", "rediss://", 1)
|
|
2232
|
+
|
|
2165
2233
|
# todo: handle ver tag changes, may need to copy files?
|
|
2166
2234
|
if not run_id_mode:
|
|
2167
2235
|
version = resource.metadata.tag
|
mlrun/datastore/utils.py
CHANGED
|
@@ -12,6 +12,7 @@
|
|
|
12
12
|
# See the License for the specific language governing permissions and
|
|
13
13
|
# limitations under the License.
|
|
14
14
|
#
|
|
15
|
+
import math
|
|
15
16
|
import tarfile
|
|
16
17
|
import tempfile
|
|
17
18
|
import typing
|
|
@@ -180,3 +181,44 @@ def get_kafka_brokers_from_dict(options: dict, pop=False) -> typing.Optional[str
|
|
|
180
181
|
FutureWarning,
|
|
181
182
|
)
|
|
182
183
|
return kafka_bootstrap_servers
|
|
184
|
+
|
|
185
|
+
|
|
186
|
+
def transform_list_filters_to_tuple(additional_filters):
|
|
187
|
+
tuple_filters = []
|
|
188
|
+
if not additional_filters:
|
|
189
|
+
return tuple_filters
|
|
190
|
+
validate_additional_filters(additional_filters)
|
|
191
|
+
for additional_filter in additional_filters:
|
|
192
|
+
tuple_filters.append(tuple(additional_filter))
|
|
193
|
+
return tuple_filters
|
|
194
|
+
|
|
195
|
+
|
|
196
|
+
def validate_additional_filters(additional_filters):
|
|
197
|
+
nan_error_message = "using NaN in additional_filters is not supported"
|
|
198
|
+
if additional_filters in [None, [], ()]:
|
|
199
|
+
return
|
|
200
|
+
for filter_tuple in additional_filters:
|
|
201
|
+
if filter_tuple == () or filter_tuple == []:
|
|
202
|
+
continue
|
|
203
|
+
if not isinstance(filter_tuple, (list, tuple)):
|
|
204
|
+
raise mlrun.errors.MLRunInvalidArgumentError(
|
|
205
|
+
f"mlrun supports additional_filters only as a list of tuples."
|
|
206
|
+
f" Current additional_filters: {additional_filters}"
|
|
207
|
+
)
|
|
208
|
+
if isinstance(filter_tuple[0], (list, tuple)):
|
|
209
|
+
raise mlrun.errors.MLRunInvalidArgumentError(
|
|
210
|
+
f"additional_filters does not support nested list inside filter tuples except in -in- logic."
|
|
211
|
+
f" Current filter_tuple: {filter_tuple}."
|
|
212
|
+
)
|
|
213
|
+
if len(filter_tuple) != 3:
|
|
214
|
+
raise mlrun.errors.MLRunInvalidArgumentError(
|
|
215
|
+
f"illegal filter tuple length, {filter_tuple} in additional filters:"
|
|
216
|
+
f" {additional_filters}"
|
|
217
|
+
)
|
|
218
|
+
col_name, op, value = filter_tuple
|
|
219
|
+
if isinstance(value, float) and math.isnan(value):
|
|
220
|
+
raise mlrun.errors.MLRunInvalidArgumentError(nan_error_message)
|
|
221
|
+
elif isinstance(value, (list, tuple)):
|
|
222
|
+
for sub_value in value:
|
|
223
|
+
if isinstance(sub_value, float) and math.isnan(sub_value):
|
|
224
|
+
raise mlrun.errors.MLRunInvalidArgumentError(nan_error_message)
|
mlrun/db/base.py
CHANGED
|
@@ -16,6 +16,10 @@ import datetime
|
|
|
16
16
|
from abc import ABC, abstractmethod
|
|
17
17
|
from typing import Optional, Union
|
|
18
18
|
|
|
19
|
+
import mlrun.alerts
|
|
20
|
+
import mlrun.common
|
|
21
|
+
import mlrun.common.formatters
|
|
22
|
+
import mlrun.common.runtimes.constants
|
|
19
23
|
import mlrun.common.schemas
|
|
20
24
|
import mlrun.model_monitoring
|
|
21
25
|
|
|
@@ -62,7 +66,10 @@ class RunDBInterface(ABC):
|
|
|
62
66
|
uid: Optional[Union[str, list[str]]] = None,
|
|
63
67
|
project: Optional[str] = None,
|
|
64
68
|
labels: Optional[Union[str, list[str]]] = None,
|
|
65
|
-
state: Optional[
|
|
69
|
+
state: Optional[
|
|
70
|
+
mlrun.common.runtimes.constants.RunStates
|
|
71
|
+
] = None, # Backward compatibility
|
|
72
|
+
states: Optional[list[mlrun.common.runtimes.constants.RunStates]] = None,
|
|
66
73
|
sort: bool = True,
|
|
67
74
|
last: int = 0,
|
|
68
75
|
iter: bool = False,
|
|
@@ -117,7 +124,18 @@ class RunDBInterface(ABC):
|
|
|
117
124
|
pass
|
|
118
125
|
|
|
119
126
|
@abstractmethod
|
|
120
|
-
def del_artifact(
|
|
127
|
+
def del_artifact(
|
|
128
|
+
self,
|
|
129
|
+
key,
|
|
130
|
+
tag="",
|
|
131
|
+
project="",
|
|
132
|
+
tree=None,
|
|
133
|
+
uid=None,
|
|
134
|
+
deletion_strategy: mlrun.common.schemas.artifact.ArtifactsDeletionStrategies = (
|
|
135
|
+
mlrun.common.schemas.artifact.ArtifactsDeletionStrategies.metadata_only
|
|
136
|
+
),
|
|
137
|
+
secrets: dict = None,
|
|
138
|
+
):
|
|
121
139
|
pass
|
|
122
140
|
|
|
123
141
|
@abstractmethod
|
|
@@ -251,7 +269,7 @@ class RunDBInterface(ABC):
|
|
|
251
269
|
def list_projects(
|
|
252
270
|
self,
|
|
253
271
|
owner: str = None,
|
|
254
|
-
format_: mlrun.common.
|
|
272
|
+
format_: mlrun.common.formatters.ProjectFormat = mlrun.common.formatters.ProjectFormat.name_only,
|
|
255
273
|
labels: list[str] = None,
|
|
256
274
|
state: mlrun.common.schemas.ProjectState = None,
|
|
257
275
|
) -> mlrun.common.schemas.ProjectsOutput:
|
|
@@ -427,8 +445,8 @@ class RunDBInterface(ABC):
|
|
|
427
445
|
namespace: str = None,
|
|
428
446
|
timeout: int = 30,
|
|
429
447
|
format_: Union[
|
|
430
|
-
str, mlrun.common.
|
|
431
|
-
] = mlrun.common.
|
|
448
|
+
str, mlrun.common.formatters.PipelineFormat
|
|
449
|
+
] = mlrun.common.formatters.PipelineFormat.summary,
|
|
432
450
|
project: str = None,
|
|
433
451
|
):
|
|
434
452
|
pass
|
|
@@ -442,8 +460,8 @@ class RunDBInterface(ABC):
|
|
|
442
460
|
page_token: str = "",
|
|
443
461
|
filter_: str = "",
|
|
444
462
|
format_: Union[
|
|
445
|
-
str, mlrun.common.
|
|
446
|
-
] = mlrun.common.
|
|
463
|
+
str, mlrun.common.formatters.PipelineFormat
|
|
464
|
+
] = mlrun.common.formatters.PipelineFormat.metadata_only,
|
|
447
465
|
page_size: int = None,
|
|
448
466
|
) -> mlrun.common.schemas.PipelinesOutput:
|
|
449
467
|
pass
|
|
@@ -543,7 +561,7 @@ class RunDBInterface(ABC):
|
|
|
543
561
|
end: Optional[str] = None,
|
|
544
562
|
metrics: Optional[list[str]] = None,
|
|
545
563
|
features: bool = False,
|
|
546
|
-
):
|
|
564
|
+
) -> mlrun.model_monitoring.ModelEndpoint:
|
|
547
565
|
pass
|
|
548
566
|
|
|
549
567
|
@abstractmethod
|
|
@@ -617,8 +635,8 @@ class RunDBInterface(ABC):
|
|
|
617
635
|
@abstractmethod
|
|
618
636
|
def store_api_gateway(
|
|
619
637
|
self,
|
|
620
|
-
project: str,
|
|
621
638
|
api_gateway: mlrun.common.schemas.APIGateway,
|
|
639
|
+
project: str = None,
|
|
622
640
|
):
|
|
623
641
|
pass
|
|
624
642
|
|
|
@@ -664,7 +682,7 @@ class RunDBInterface(ABC):
|
|
|
664
682
|
def store_alert_config(
|
|
665
683
|
self,
|
|
666
684
|
alert_name: str,
|
|
667
|
-
alert_data: Union[dict, mlrun.
|
|
685
|
+
alert_data: Union[dict, mlrun.alerts.alert.AlertConfig],
|
|
668
686
|
project="",
|
|
669
687
|
):
|
|
670
688
|
pass
|
|
@@ -685,6 +703,14 @@ class RunDBInterface(ABC):
|
|
|
685
703
|
def reset_alert_config(self, alert_name: str, project=""):
|
|
686
704
|
pass
|
|
687
705
|
|
|
706
|
+
@abstractmethod
|
|
707
|
+
def get_alert_template(self, template_name: str):
|
|
708
|
+
pass
|
|
709
|
+
|
|
710
|
+
@abstractmethod
|
|
711
|
+
def list_alert_templates(self):
|
|
712
|
+
pass
|
|
713
|
+
|
|
688
714
|
@abstractmethod
|
|
689
715
|
def get_builder_status(
|
|
690
716
|
self,
|
|
@@ -815,6 +841,24 @@ class RunDBInterface(ABC):
|
|
|
815
841
|
) -> None:
|
|
816
842
|
pass
|
|
817
843
|
|
|
844
|
+
@abstractmethod
|
|
845
|
+
def disable_model_monitoring(
|
|
846
|
+
self,
|
|
847
|
+
project: str,
|
|
848
|
+
delete_resources: bool = True,
|
|
849
|
+
delete_stream_function: bool = False,
|
|
850
|
+
delete_histogram_data_drift_app: bool = True,
|
|
851
|
+
delete_user_applications: bool = False,
|
|
852
|
+
user_application_list: list[str] = None,
|
|
853
|
+
) -> bool:
|
|
854
|
+
pass
|
|
855
|
+
|
|
856
|
+
@abstractmethod
|
|
857
|
+
def delete_model_monitoring_function(
|
|
858
|
+
self, project: str, functions: list[str]
|
|
859
|
+
) -> bool:
|
|
860
|
+
pass
|
|
861
|
+
|
|
818
862
|
@abstractmethod
|
|
819
863
|
def deploy_histogram_data_drift_app(
|
|
820
864
|
self, project: str, image: str = "mlrun/mlrun"
|