mlrun 1.7.0rc4__py3-none-any.whl → 1.7.0rc6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mlrun might be problematic. Click here for more details.
- mlrun/artifacts/base.py +2 -1
- mlrun/artifacts/plots.py +9 -5
- mlrun/common/constants.py +1 -0
- mlrun/common/schemas/__init__.py +10 -0
- mlrun/common/schemas/api_gateway.py +85 -0
- mlrun/common/schemas/auth.py +2 -2
- mlrun/config.py +19 -4
- mlrun/datastore/sources.py +5 -4
- mlrun/datastore/targets.py +16 -20
- mlrun/db/base.py +16 -0
- mlrun/db/factory.py +1 -1
- mlrun/db/httpdb.py +50 -8
- mlrun/db/nopdb.py +13 -0
- mlrun/launcher/__init__.py +1 -1
- mlrun/launcher/base.py +1 -1
- mlrun/launcher/client.py +1 -1
- mlrun/launcher/factory.py +1 -1
- mlrun/launcher/local.py +1 -1
- mlrun/launcher/remote.py +1 -1
- mlrun/model_monitoring/api.py +6 -12
- mlrun/model_monitoring/application.py +21 -21
- mlrun/model_monitoring/applications/histogram_data_drift.py +130 -40
- mlrun/model_monitoring/batch.py +1 -42
- mlrun/model_monitoring/controller.py +1 -8
- mlrun/model_monitoring/features_drift_table.py +34 -22
- mlrun/model_monitoring/helpers.py +45 -4
- mlrun/model_monitoring/stream_processing.py +2 -0
- mlrun/projects/project.py +229 -16
- mlrun/run.py +70 -74
- mlrun/runtimes/__init__.py +35 -0
- mlrun/runtimes/base.py +15 -11
- mlrun/runtimes/nuclio/__init__.py +1 -0
- mlrun/runtimes/nuclio/api_gateway.py +300 -0
- mlrun/runtimes/nuclio/application/__init__.py +15 -0
- mlrun/runtimes/nuclio/application/application.py +283 -0
- mlrun/runtimes/nuclio/application/reverse_proxy.go +87 -0
- mlrun/runtimes/nuclio/function.py +50 -1
- mlrun/runtimes/pod.py +1 -1
- mlrun/serving/states.py +7 -19
- mlrun/utils/logger.py +2 -2
- mlrun/utils/version/version.json +2 -2
- {mlrun-1.7.0rc4.dist-info → mlrun-1.7.0rc6.dist-info}/METADATA +1 -1
- {mlrun-1.7.0rc4.dist-info → mlrun-1.7.0rc6.dist-info}/RECORD +47 -42
- {mlrun-1.7.0rc4.dist-info → mlrun-1.7.0rc6.dist-info}/WHEEL +1 -1
- {mlrun-1.7.0rc4.dist-info → mlrun-1.7.0rc6.dist-info}/LICENSE +0 -0
- {mlrun-1.7.0rc4.dist-info → mlrun-1.7.0rc6.dist-info}/entry_points.txt +0 -0
- {mlrun-1.7.0rc4.dist-info → mlrun-1.7.0rc6.dist-info}/top_level.txt +0 -0
mlrun/artifacts/base.py
CHANGED
|
@@ -88,9 +88,10 @@ class ArtifactSpec(ModelObj):
|
|
|
88
88
|
"db_key",
|
|
89
89
|
"extra_data",
|
|
90
90
|
"unpackaging_instructions",
|
|
91
|
+
"producer",
|
|
91
92
|
]
|
|
92
93
|
|
|
93
|
-
_extra_fields = ["annotations", "
|
|
94
|
+
_extra_fields = ["annotations", "sources", "license", "encoding"]
|
|
94
95
|
_exclude_fields_from_uid_hash = [
|
|
95
96
|
# if the artifact is first created, it will not have a db_key,
|
|
96
97
|
# exclude it so further updates of the artifacts will have the same hash
|
mlrun/artifacts/plots.py
CHANGED
|
@@ -12,6 +12,7 @@
|
|
|
12
12
|
# See the License for the specific language governing permissions and
|
|
13
13
|
# limitations under the License.
|
|
14
14
|
import base64
|
|
15
|
+
import typing
|
|
15
16
|
from io import BytesIO
|
|
16
17
|
|
|
17
18
|
from deprecated import deprecated
|
|
@@ -21,6 +22,9 @@ import mlrun
|
|
|
21
22
|
from ..utils import dict_to_json
|
|
22
23
|
from .base import Artifact, LegacyArtifact
|
|
23
24
|
|
|
25
|
+
if typing.TYPE_CHECKING:
|
|
26
|
+
from plotly.graph_objs import Figure
|
|
27
|
+
|
|
24
28
|
|
|
25
29
|
class PlotArtifact(Artifact):
|
|
26
30
|
kind = "plot"
|
|
@@ -207,10 +211,10 @@ class PlotlyArtifact(Artifact):
|
|
|
207
211
|
|
|
208
212
|
def __init__(
|
|
209
213
|
self,
|
|
210
|
-
figure=None,
|
|
211
|
-
key: str = None,
|
|
212
|
-
target_path: str = None,
|
|
213
|
-
):
|
|
214
|
+
figure: typing.Optional["Figure"] = None,
|
|
215
|
+
key: typing.Optional[str] = None,
|
|
216
|
+
target_path: typing.Optional[str] = None,
|
|
217
|
+
) -> None:
|
|
214
218
|
"""
|
|
215
219
|
Initialize a Plotly artifact with the given figure.
|
|
216
220
|
|
|
@@ -247,7 +251,7 @@ class PlotlyArtifact(Artifact):
|
|
|
247
251
|
self._figure = figure
|
|
248
252
|
self.spec.format = "html"
|
|
249
253
|
|
|
250
|
-
def get_body(self):
|
|
254
|
+
def get_body(self) -> str:
|
|
251
255
|
"""
|
|
252
256
|
Get the artifact's body - the Plotly figure's html code.
|
|
253
257
|
|
mlrun/common/constants.py
CHANGED
mlrun/common/schemas/__init__.py
CHANGED
|
@@ -14,6 +14,16 @@
|
|
|
14
14
|
#
|
|
15
15
|
# flake8: noqa - this is until we take care of the F401 violations with respect to __all__ & sphinx
|
|
16
16
|
|
|
17
|
+
from .api_gateway import (
|
|
18
|
+
APIGateway,
|
|
19
|
+
APIGatewayAuthenticationMode,
|
|
20
|
+
APIGatewayBasicAuth,
|
|
21
|
+
APIGatewayMetadata,
|
|
22
|
+
APIGatewaysOutput,
|
|
23
|
+
APIGatewaySpec,
|
|
24
|
+
APIGatewayStatus,
|
|
25
|
+
APIGatewayUpstream,
|
|
26
|
+
)
|
|
17
27
|
from .artifact import (
|
|
18
28
|
Artifact,
|
|
19
29
|
ArtifactCategories,
|
|
@@ -0,0 +1,85 @@
|
|
|
1
|
+
# Copyright 2023 Iguazio
|
|
2
|
+
#
|
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
+
# you may not use this file except in compliance with the License.
|
|
5
|
+
# You may obtain a copy of the License at
|
|
6
|
+
#
|
|
7
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
+
#
|
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
+
# See the License for the specific language governing permissions and
|
|
13
|
+
# limitations under the License.
|
|
14
|
+
#
|
|
15
|
+
import typing
|
|
16
|
+
from typing import Optional
|
|
17
|
+
|
|
18
|
+
import pydantic
|
|
19
|
+
|
|
20
|
+
import mlrun.common.types
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class APIGatewayAuthenticationMode(mlrun.common.types.StrEnum):
|
|
24
|
+
basic = "basicAuth"
|
|
25
|
+
none = "none"
|
|
26
|
+
|
|
27
|
+
@classmethod
|
|
28
|
+
def from_str(cls, authentication_mode: str):
|
|
29
|
+
if authentication_mode == "none":
|
|
30
|
+
return cls.none
|
|
31
|
+
elif authentication_mode == "basicAuth":
|
|
32
|
+
return cls.basic
|
|
33
|
+
else:
|
|
34
|
+
raise mlrun.errors.MLRunInvalidArgumentError(
|
|
35
|
+
f"Authentication mode `{authentication_mode}` is not supported",
|
|
36
|
+
)
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
class _APIGatewayBaseModel(pydantic.BaseModel):
|
|
40
|
+
class Config:
|
|
41
|
+
extra = pydantic.Extra.allow
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
class APIGatewayMetadata(_APIGatewayBaseModel):
|
|
45
|
+
name: str
|
|
46
|
+
namespace: Optional[str]
|
|
47
|
+
labels: Optional[dict] = {}
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
class APIGatewayBasicAuth(_APIGatewayBaseModel):
|
|
51
|
+
username: str
|
|
52
|
+
password: str
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
class APIGatewayUpstream(_APIGatewayBaseModel):
|
|
56
|
+
kind: Optional[str] = "nucliofunction"
|
|
57
|
+
nucliofunction: dict[str, str]
|
|
58
|
+
percentage: Optional[int] = 0
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
class APIGatewaySpec(_APIGatewayBaseModel):
|
|
62
|
+
name: str
|
|
63
|
+
description: Optional[str]
|
|
64
|
+
path: Optional[str] = "/"
|
|
65
|
+
authenticationMode: Optional[APIGatewayAuthenticationMode] = (
|
|
66
|
+
APIGatewayAuthenticationMode.none
|
|
67
|
+
)
|
|
68
|
+
upstreams: list[APIGatewayUpstream]
|
|
69
|
+
authentication: Optional[dict[str, Optional[APIGatewayBasicAuth]]]
|
|
70
|
+
host: Optional[str]
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
class APIGatewayStatus(_APIGatewayBaseModel):
|
|
74
|
+
name: Optional[str]
|
|
75
|
+
state: Optional[str]
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
class APIGateway(_APIGatewayBaseModel):
|
|
79
|
+
metadata: APIGatewayMetadata
|
|
80
|
+
spec: APIGatewaySpec
|
|
81
|
+
status: Optional[APIGatewayStatus]
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
class APIGatewaysOutput(_APIGatewayBaseModel):
|
|
85
|
+
api_gateways: typing.Optional[dict[str, APIGateway]] = {}
|
mlrun/common/schemas/auth.py
CHANGED
|
@@ -59,7 +59,7 @@ class AuthorizationResourceTypes(mlrun.common.types.StrEnum):
|
|
|
59
59
|
hub_source = "hub-source"
|
|
60
60
|
workflow = "workflow"
|
|
61
61
|
datastore_profile = "datastore-profile"
|
|
62
|
-
|
|
62
|
+
api_gateway = "api-gateway"
|
|
63
63
|
|
|
64
64
|
def to_resource_string(
|
|
65
65
|
self,
|
|
@@ -94,7 +94,7 @@ class AuthorizationResourceTypes(mlrun.common.types.StrEnum):
|
|
|
94
94
|
AuthorizationResourceTypes.hub_source: "/marketplace/sources",
|
|
95
95
|
# workflow define how to run a pipeline and can be considered as the specification of a pipeline.
|
|
96
96
|
AuthorizationResourceTypes.workflow: "/projects/{project_name}/workflows/{resource_name}",
|
|
97
|
-
AuthorizationResourceTypes.
|
|
97
|
+
AuthorizationResourceTypes.api_gateway: "/projects/{project_name}/api-gateways/{resource_name}",
|
|
98
98
|
}[self].format(project_name=project_name, resource_name=resource_name)
|
|
99
99
|
|
|
100
100
|
|
mlrun/config.py
CHANGED
|
@@ -324,7 +324,13 @@ default_config = {
|
|
|
324
324
|
# optional values (as per https://dev.mysql.com/doc/refman/8.0/en/sql-mode.html#sql-mode-full):
|
|
325
325
|
#
|
|
326
326
|
# if set to "nil" or "none", nothing would be set
|
|
327
|
-
"modes":
|
|
327
|
+
"modes": (
|
|
328
|
+
"STRICT_TRANS_TABLES"
|
|
329
|
+
",NO_ZERO_IN_DATE"
|
|
330
|
+
",NO_ZERO_DATE"
|
|
331
|
+
",ERROR_FOR_DIVISION_BY_ZERO"
|
|
332
|
+
",NO_ENGINE_SUBSTITUTION",
|
|
333
|
+
)
|
|
328
334
|
},
|
|
329
335
|
},
|
|
330
336
|
"jobs": {
|
|
@@ -443,7 +449,7 @@ default_config = {
|
|
|
443
449
|
# pip install <requirement_specifier>, e.g. mlrun==0.5.4, mlrun~=0.5,
|
|
444
450
|
# git+https://github.com/mlrun/mlrun@development. by default uses the version
|
|
445
451
|
"mlrun_version_specifier": "",
|
|
446
|
-
"kaniko_image": "gcr.io/kaniko-project/executor:v1.
|
|
452
|
+
"kaniko_image": "gcr.io/kaniko-project/executor:v1.21.1", # kaniko builder image
|
|
447
453
|
"kaniko_init_container_image": "alpine:3.18",
|
|
448
454
|
# image for kaniko init container when docker registry is ECR
|
|
449
455
|
"kaniko_aws_cli_image": "amazon/aws-cli:2.7.10",
|
|
@@ -1348,12 +1354,21 @@ def read_env(env=None, prefix=env_prefix):
|
|
|
1348
1354
|
if igz_domain:
|
|
1349
1355
|
config["ui_url"] = f"https://mlrun-ui.{igz_domain}"
|
|
1350
1356
|
|
|
1351
|
-
if config.get("log_level"):
|
|
1357
|
+
if log_level := config.get("log_level"):
|
|
1352
1358
|
import mlrun.utils.logger
|
|
1353
1359
|
|
|
1354
1360
|
# logger created (because of imports mess) before the config is loaded (in tests), therefore we're changing its
|
|
1355
1361
|
# level manually
|
|
1356
|
-
mlrun.utils.logger.set_logger_level(
|
|
1362
|
+
mlrun.utils.logger.set_logger_level(log_level)
|
|
1363
|
+
|
|
1364
|
+
if log_formatter_name := config.get("log_formatter"):
|
|
1365
|
+
import mlrun.utils.logger
|
|
1366
|
+
|
|
1367
|
+
log_formatter = mlrun.utils.create_formatter_instance(
|
|
1368
|
+
mlrun.utils.FormatterKinds(log_formatter_name)
|
|
1369
|
+
)
|
|
1370
|
+
mlrun.utils.logger.get_handler("default").setFormatter(log_formatter)
|
|
1371
|
+
|
|
1357
1372
|
# The default function pod resource values are of type str; however, when reading from environment variable numbers,
|
|
1358
1373
|
# it converts them to type int if contains only number, so we want to convert them to str.
|
|
1359
1374
|
_convert_resources_to_str(config)
|
mlrun/datastore/sources.py
CHANGED
|
@@ -204,11 +204,11 @@ class CSVSource(BaseSourceDriver):
|
|
|
204
204
|
)
|
|
205
205
|
|
|
206
206
|
def get_spark_options(self):
|
|
207
|
-
store, path,
|
|
207
|
+
store, path, _ = mlrun.store_manager.get_or_create_store(self.path)
|
|
208
208
|
spark_options = store.get_spark_options()
|
|
209
209
|
spark_options.update(
|
|
210
210
|
{
|
|
211
|
-
"path":
|
|
211
|
+
"path": store.spark_url + path,
|
|
212
212
|
"format": "csv",
|
|
213
213
|
"header": "true",
|
|
214
214
|
"inferSchema": "true",
|
|
@@ -357,7 +357,7 @@ class ParquetSource(BaseSourceDriver):
|
|
|
357
357
|
)
|
|
358
358
|
|
|
359
359
|
def get_spark_options(self):
|
|
360
|
-
store, path,
|
|
360
|
+
store, path, _ = mlrun.store_manager.get_or_create_store(self.path)
|
|
361
361
|
spark_options = store.get_spark_options()
|
|
362
362
|
spark_options.update(
|
|
363
363
|
{
|
|
@@ -794,7 +794,8 @@ class OnlineSource(BaseSourceDriver):
|
|
|
794
794
|
explicit_ack = (
|
|
795
795
|
is_explicit_ack_supported(context) and mlrun.mlconf.is_explicit_ack()
|
|
796
796
|
)
|
|
797
|
-
|
|
797
|
+
# TODO: Change to AsyncEmitSource once we can drop support for nuclio<1.12.10
|
|
798
|
+
src_class = storey.SyncEmitSource(
|
|
798
799
|
context=context,
|
|
799
800
|
key_field=self.key_field or key_field,
|
|
800
801
|
full_event=True,
|
mlrun/datastore/targets.py
CHANGED
|
@@ -451,7 +451,7 @@ class BaseStoreTarget(DataTargetBase):
|
|
|
451
451
|
self.get_target_path(),
|
|
452
452
|
credentials_prefix_secrets,
|
|
453
453
|
)
|
|
454
|
-
return store, url
|
|
454
|
+
return store, resolved_store_path, url
|
|
455
455
|
|
|
456
456
|
def _get_column_list(self, features, timestamp_key, key_columns, with_type=False):
|
|
457
457
|
result = []
|
|
@@ -500,7 +500,7 @@ class BaseStoreTarget(DataTargetBase):
|
|
|
500
500
|
write_spark_dataframe_with_options(options, df, "overwrite")
|
|
501
501
|
elif hasattr(df, "dask"):
|
|
502
502
|
dask_options = self.get_dask_options()
|
|
503
|
-
store, target_path = self._get_store_and_path()
|
|
503
|
+
store, path_in_store, target_path = self._get_store_and_path()
|
|
504
504
|
storage_options = store.get_storage_options()
|
|
505
505
|
df = df.repartition(partition_size="100MB")
|
|
506
506
|
try:
|
|
@@ -521,7 +521,7 @@ class BaseStoreTarget(DataTargetBase):
|
|
|
521
521
|
except Exception as exc:
|
|
522
522
|
raise RuntimeError("Failed to write Dask Dataframe") from exc
|
|
523
523
|
else:
|
|
524
|
-
store, target_path = self._get_store_and_path()
|
|
524
|
+
store, path_in_store, target_path = self._get_store_and_path()
|
|
525
525
|
target_path = generate_path_with_chunk(self, chunk_id, target_path)
|
|
526
526
|
file_system = store.filesystem
|
|
527
527
|
if file_system.protocol == "file":
|
|
@@ -688,7 +688,7 @@ class BaseStoreTarget(DataTargetBase):
|
|
|
688
688
|
raise NotImplementedError()
|
|
689
689
|
|
|
690
690
|
def purge(self):
|
|
691
|
-
store, target_path = self._get_store_and_path()
|
|
691
|
+
store, path_in_store, target_path = self._get_store_and_path()
|
|
692
692
|
store.rm(target_path, recursive=True)
|
|
693
693
|
|
|
694
694
|
def as_df(
|
|
@@ -868,7 +868,7 @@ class ParquetTarget(BaseStoreTarget):
|
|
|
868
868
|
for key_column in key_columns:
|
|
869
869
|
tuple_key_columns.append((key_column.name, key_column.value_type))
|
|
870
870
|
|
|
871
|
-
store, target_path = self._get_store_and_path()
|
|
871
|
+
store, path_in_store, target_path = self._get_store_and_path()
|
|
872
872
|
|
|
873
873
|
storage_options = store.get_storage_options()
|
|
874
874
|
if storage_options and self.storage_options:
|
|
@@ -921,9 +921,7 @@ class ParquetTarget(BaseStoreTarget):
|
|
|
921
921
|
if unit == time_partitioning_granularity:
|
|
922
922
|
break
|
|
923
923
|
|
|
924
|
-
store, path, url =
|
|
925
|
-
self.get_target_path()
|
|
926
|
-
)
|
|
924
|
+
store, path, url = self._get_store_and_path()
|
|
927
925
|
spark_options = store.get_spark_options()
|
|
928
926
|
spark_options.update(
|
|
929
927
|
{
|
|
@@ -1042,7 +1040,7 @@ class CSVTarget(BaseStoreTarget):
|
|
|
1042
1040
|
column_list = self._get_column_list(
|
|
1043
1041
|
features=features, timestamp_key=timestamp_key, key_columns=key_columns
|
|
1044
1042
|
)
|
|
1045
|
-
store, target_path = self._get_store_and_path()
|
|
1043
|
+
store, path_in_store, target_path = self._get_store_and_path()
|
|
1046
1044
|
graph.add_step(
|
|
1047
1045
|
name=self.name or "CSVTarget",
|
|
1048
1046
|
after=after,
|
|
@@ -1057,9 +1055,7 @@ class CSVTarget(BaseStoreTarget):
|
|
|
1057
1055
|
)
|
|
1058
1056
|
|
|
1059
1057
|
def get_spark_options(self, key_column=None, timestamp_key=None, overwrite=True):
|
|
1060
|
-
store, path, url =
|
|
1061
|
-
self.get_target_path()
|
|
1062
|
-
)
|
|
1058
|
+
store, path, url = self._get_store_and_path()
|
|
1063
1059
|
spark_options = store.get_spark_options()
|
|
1064
1060
|
spark_options.update(
|
|
1065
1061
|
{
|
|
@@ -1193,7 +1189,7 @@ class NoSqlBaseTarget(BaseStoreTarget):
|
|
|
1193
1189
|
df = df.copy(deep=False)
|
|
1194
1190
|
access_key = self._get_credential("V3IO_ACCESS_KEY")
|
|
1195
1191
|
|
|
1196
|
-
store, target_path = self._get_store_and_path()
|
|
1192
|
+
store, path_in_store, target_path = self._get_store_and_path()
|
|
1197
1193
|
storage_options = store.get_storage_options()
|
|
1198
1194
|
access_key = storage_options.get("v3io_access_key", access_key)
|
|
1199
1195
|
|
|
@@ -1215,7 +1211,7 @@ class NoSqlTarget(NoSqlBaseTarget):
|
|
|
1215
1211
|
def get_table_object(self):
|
|
1216
1212
|
from storey import Table, V3ioDriver
|
|
1217
1213
|
|
|
1218
|
-
store, target_path = self._get_store_and_path()
|
|
1214
|
+
store, path_in_store, target_path = self._get_store_and_path()
|
|
1219
1215
|
endpoint, uri = parse_path(target_path)
|
|
1220
1216
|
storage_options = store.get_storage_options()
|
|
1221
1217
|
access_key = storage_options.get("v3io_access_key")
|
|
@@ -1227,7 +1223,7 @@ class NoSqlTarget(NoSqlBaseTarget):
|
|
|
1227
1223
|
)
|
|
1228
1224
|
|
|
1229
1225
|
def get_spark_options(self, key_column=None, timestamp_key=None, overwrite=True):
|
|
1230
|
-
store, target_path = self._get_store_and_path()
|
|
1226
|
+
store, path_in_store, target_path = self._get_store_and_path()
|
|
1231
1227
|
storage_options = store.get_storage_options()
|
|
1232
1228
|
store_access_key = storage_options.get("v3io_access_key")
|
|
1233
1229
|
env_access_key = self._secrets.get(
|
|
@@ -1239,7 +1235,7 @@ class NoSqlTarget(NoSqlBaseTarget):
|
|
|
1239
1235
|
"Spark will disregard the store-provided key."
|
|
1240
1236
|
)
|
|
1241
1237
|
spark_options = {
|
|
1242
|
-
"path": store.spark_url +
|
|
1238
|
+
"path": store.spark_url + path_in_store,
|
|
1243
1239
|
"format": "io.iguaz.v3io.spark.sql.kv",
|
|
1244
1240
|
}
|
|
1245
1241
|
if isinstance(key_column, list) and len(key_column) >= 1:
|
|
@@ -1332,10 +1328,10 @@ class RedisNoSqlTarget(NoSqlBaseTarget):
|
|
|
1332
1328
|
def get_spark_options(self, key_column=None, timestamp_key=None, overwrite=True):
|
|
1333
1329
|
endpoint, uri = self._get_server_endpoint()
|
|
1334
1330
|
parsed_endpoint = urlparse(endpoint)
|
|
1335
|
-
store, path = self._get_store_and_path()
|
|
1331
|
+
store, path_in_store, path = self._get_store_and_path()
|
|
1336
1332
|
return {
|
|
1337
1333
|
"key.column": "_spark_object_name",
|
|
1338
|
-
"table": "{" +
|
|
1334
|
+
"table": "{" + path_in_store,
|
|
1339
1335
|
"format": "org.apache.spark.sql.redis",
|
|
1340
1336
|
"host": parsed_endpoint.hostname,
|
|
1341
1337
|
"port": parsed_endpoint.port,
|
|
@@ -1383,7 +1379,7 @@ class StreamTarget(BaseStoreTarget):
|
|
|
1383
1379
|
from storey import V3ioDriver
|
|
1384
1380
|
|
|
1385
1381
|
key_columns = list(key_columns.keys())
|
|
1386
|
-
store, path = self._get_store_and_path()
|
|
1382
|
+
store, path_in_store, path = self._get_store_and_path()
|
|
1387
1383
|
if not path:
|
|
1388
1384
|
raise mlrun.errors.MLRunInvalidArgumentError("StreamTarget requires a path")
|
|
1389
1385
|
endpoint, uri = parse_path(path)
|
|
@@ -1537,7 +1533,7 @@ class TSDBTarget(BaseStoreTarget):
|
|
|
1537
1533
|
key_column = [key_column]
|
|
1538
1534
|
new_index.extend(key_column)
|
|
1539
1535
|
|
|
1540
|
-
store, target_path = self._get_store_and_path()
|
|
1536
|
+
store, path_in_store, target_path = self._get_store_and_path()
|
|
1541
1537
|
storage_options = store.get_storage_options()
|
|
1542
1538
|
access_key = storage_options.get("v3io_access_key", access_key)
|
|
1543
1539
|
|
mlrun/db/base.py
CHANGED
|
@@ -616,6 +616,22 @@ class RunDBInterface(ABC):
|
|
|
616
616
|
):
|
|
617
617
|
pass
|
|
618
618
|
|
|
619
|
+
@abstractmethod
|
|
620
|
+
def store_api_gateway(
|
|
621
|
+
self,
|
|
622
|
+
project: str,
|
|
623
|
+
api_gateway: mlrun.common.schemas.APIGateway,
|
|
624
|
+
):
|
|
625
|
+
pass
|
|
626
|
+
|
|
627
|
+
@abstractmethod
|
|
628
|
+
def list_api_gateways(self, project=None) -> mlrun.common.schemas.APIGatewaysOutput:
|
|
629
|
+
pass
|
|
630
|
+
|
|
631
|
+
@abstractmethod
|
|
632
|
+
def get_api_gateway(self, name, project=None) -> mlrun.common.schemas.APIGateway:
|
|
633
|
+
pass
|
|
634
|
+
|
|
619
635
|
def get_builder_status(
|
|
620
636
|
self,
|
|
621
637
|
func: "mlrun.runtimes.BaseRuntime",
|
mlrun/db/factory.py
CHANGED
mlrun/db/httpdb.py
CHANGED
|
@@ -33,6 +33,7 @@ import mlrun.common.schemas
|
|
|
33
33
|
import mlrun.model_monitoring.model_endpoint
|
|
34
34
|
import mlrun.platforms
|
|
35
35
|
import mlrun.projects
|
|
36
|
+
import mlrun.runtimes.nuclio.api_gateway
|
|
36
37
|
from mlrun.errors import MLRunInvalidArgumentError, err_to_str
|
|
37
38
|
|
|
38
39
|
from ..artifacts import Artifact
|
|
@@ -3369,20 +3370,61 @@ class HTTPRunDB(RunDBInterface):
|
|
|
3369
3370
|
body=dict_to_json(authorization_verification_input.dict()),
|
|
3370
3371
|
)
|
|
3371
3372
|
|
|
3372
|
-
def list_api_gateways(self, project=None):
|
|
3373
|
+
def list_api_gateways(self, project=None) -> mlrun.common.schemas.APIGatewaysOutput:
|
|
3373
3374
|
"""
|
|
3374
3375
|
Returns a list of Nuclio api gateways
|
|
3375
|
-
:param project: optional str parameter to filter by project, if not passed, default
|
|
3376
|
+
:param project: optional str parameter to filter by project, if not passed, default project value is taken
|
|
3376
3377
|
|
|
3377
|
-
:return:
|
|
3378
|
-
(json example is here
|
|
3379
|
-
https://github.com/nuclio/nuclio/blob/development/docs/reference/api/README.md#listing-all-api-gateways)
|
|
3378
|
+
:return: :py:class:`~mlrun.common.schemas.APIGateways`.
|
|
3380
3379
|
"""
|
|
3381
3380
|
project = project or config.default_project
|
|
3382
3381
|
error = "list api gateways"
|
|
3383
|
-
endpoint_path = f"projects/{project}/
|
|
3384
|
-
|
|
3385
|
-
return
|
|
3382
|
+
endpoint_path = f"projects/{project}/api-gateways"
|
|
3383
|
+
response = self.api_call("GET", endpoint_path, error)
|
|
3384
|
+
return mlrun.common.schemas.APIGatewaysOutput(**response.json())
|
|
3385
|
+
|
|
3386
|
+
def get_api_gateway(self, name, project=None) -> mlrun.common.schemas.APIGateway:
|
|
3387
|
+
"""
|
|
3388
|
+
Returns an API gateway
|
|
3389
|
+
:param name: API gateway name
|
|
3390
|
+
:param project: optional str parameter to filter by project, if not passed, default project value is taken
|
|
3391
|
+
|
|
3392
|
+
:return: :py:class:`~mlrun.common.schemas.APIGateway`.
|
|
3393
|
+
"""
|
|
3394
|
+
project = project or config.default_project
|
|
3395
|
+
error = "get api gateway"
|
|
3396
|
+
endpoint_path = f"projects/{project}/api-gateways/{name}"
|
|
3397
|
+
response = self.api_call("GET", endpoint_path, error)
|
|
3398
|
+
return mlrun.common.schemas.APIGateway(**response.json())
|
|
3399
|
+
|
|
3400
|
+
def store_api_gateway(
|
|
3401
|
+
self,
|
|
3402
|
+
api_gateway: Union[
|
|
3403
|
+
mlrun.common.schemas.APIGateway,
|
|
3404
|
+
mlrun.runtimes.nuclio.api_gateway.APIGateway,
|
|
3405
|
+
],
|
|
3406
|
+
project: Optional[str] = None,
|
|
3407
|
+
) -> mlrun.common.schemas.APIGateway:
|
|
3408
|
+
"""
|
|
3409
|
+
Stores an API Gateway.
|
|
3410
|
+
:param api_gateway :py:class:`~mlrun.runtimes.nuclio.APIGateway`
|
|
3411
|
+
or :py:class:`~mlrun.common.schemas.APIGateway`: API Gateway entity.
|
|
3412
|
+
:param project: project name. Mandatory if api_gateway is mlrun.common.schemas.APIGateway.
|
|
3413
|
+
|
|
3414
|
+
:return: :py:class:`~mlrun.common.schemas.APIGateway`.
|
|
3415
|
+
"""
|
|
3416
|
+
|
|
3417
|
+
if isinstance(api_gateway, mlrun.runtimes.nuclio.api_gateway.APIGateway):
|
|
3418
|
+
api_gateway = api_gateway.to_scheme()
|
|
3419
|
+
endpoint_path = f"projects/{project}/api-gateways/{api_gateway.metadata.name}"
|
|
3420
|
+
error = "store api gateways"
|
|
3421
|
+
response = self.api_call(
|
|
3422
|
+
"PUT",
|
|
3423
|
+
endpoint_path,
|
|
3424
|
+
error,
|
|
3425
|
+
json=api_gateway.dict(exclude_unset=True, exclude_none=True),
|
|
3426
|
+
)
|
|
3427
|
+
return mlrun.common.schemas.APIGateway(**response.json())
|
|
3386
3428
|
|
|
3387
3429
|
def trigger_migrations(self) -> Optional[mlrun.common.schemas.BackgroundTask]:
|
|
3388
3430
|
"""Trigger migrations (will do nothing if no migrations are needed) and wait for them to finish if actually
|
mlrun/db/nopdb.py
CHANGED
|
@@ -506,6 +506,19 @@ class NopDB(RunDBInterface):
|
|
|
506
506
|
):
|
|
507
507
|
pass
|
|
508
508
|
|
|
509
|
+
def store_api_gateway(
|
|
510
|
+
self,
|
|
511
|
+
project: str,
|
|
512
|
+
api_gateway: mlrun.runtimes.nuclio.APIGateway,
|
|
513
|
+
) -> mlrun.common.schemas.APIGateway:
|
|
514
|
+
pass
|
|
515
|
+
|
|
516
|
+
def list_api_gateways(self, project=None):
|
|
517
|
+
pass
|
|
518
|
+
|
|
519
|
+
def get_api_gateway(self, name, project=None):
|
|
520
|
+
pass
|
|
521
|
+
|
|
509
522
|
def verify_authorization(
|
|
510
523
|
self,
|
|
511
524
|
authorization_verification_input: mlrun.common.schemas.AuthorizationVerificationInput,
|
mlrun/launcher/__init__.py
CHANGED
mlrun/launcher/base.py
CHANGED
mlrun/launcher/client.py
CHANGED
mlrun/launcher/factory.py
CHANGED
mlrun/launcher/local.py
CHANGED
mlrun/launcher/remote.py
CHANGED
mlrun/model_monitoring/api.py
CHANGED
|
@@ -704,8 +704,8 @@ def perform_drift_analysis(
|
|
|
704
704
|
drift_detected_threshold=drift_threshold,
|
|
705
705
|
)
|
|
706
706
|
|
|
707
|
-
# Drift table
|
|
708
|
-
|
|
707
|
+
# Drift table artifact
|
|
708
|
+
plotly_artifact = FeaturesDriftTablePlot().produce(
|
|
709
709
|
sample_set_statistics=sample_set_statistics,
|
|
710
710
|
inputs_statistics=inputs_statistics,
|
|
711
711
|
metrics=metrics,
|
|
@@ -732,7 +732,7 @@ def perform_drift_analysis(
|
|
|
732
732
|
# Log the different artifacts
|
|
733
733
|
_log_drift_artifacts(
|
|
734
734
|
context=context,
|
|
735
|
-
|
|
735
|
+
plotly_artifact=plotly_artifact,
|
|
736
736
|
metrics_per_feature=metrics_per_feature,
|
|
737
737
|
drift_status=drift_status,
|
|
738
738
|
drift_metric=drift_metric,
|
|
@@ -742,7 +742,7 @@ def perform_drift_analysis(
|
|
|
742
742
|
|
|
743
743
|
def _log_drift_artifacts(
|
|
744
744
|
context: mlrun.MLClientCtx,
|
|
745
|
-
|
|
745
|
+
plotly_artifact: mlrun.artifacts.Artifact,
|
|
746
746
|
metrics_per_feature: dict[str, float],
|
|
747
747
|
drift_status: bool,
|
|
748
748
|
drift_metric: float,
|
|
@@ -755,20 +755,14 @@ def _log_drift_artifacts(
|
|
|
755
755
|
3 - Results of the total drift analysis
|
|
756
756
|
|
|
757
757
|
:param context: MLRun context. Will log the artifacts.
|
|
758
|
-
:param
|
|
758
|
+
:param plotly_artifact: The plotly artifact.
|
|
759
759
|
:param metrics_per_feature: Dictionary in which the key is a feature name and the value is the drift numerical
|
|
760
760
|
result.
|
|
761
761
|
:param drift_status: Boolean value that represents the final drift analysis result.
|
|
762
762
|
:param drift_metric: The final drift numerical result.
|
|
763
763
|
:param artifacts_tag: Tag to use for all the artifacts resulted from the function.
|
|
764
|
-
|
|
765
764
|
"""
|
|
766
|
-
context.log_artifact(
|
|
767
|
-
mlrun.artifacts.Artifact(
|
|
768
|
-
body=html_plot.encode("utf-8"), format="html", key="drift_table_plot"
|
|
769
|
-
),
|
|
770
|
-
tag=artifacts_tag,
|
|
771
|
-
)
|
|
765
|
+
context.log_artifact(plotly_artifact, tag=artifacts_tag)
|
|
772
766
|
context.log_artifact(
|
|
773
767
|
mlrun.artifacts.Artifact(
|
|
774
768
|
body=json.dumps(metrics_per_feature),
|