mlrun 1.7.0rc48__py3-none-any.whl → 1.7.0rc49__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mlrun might be problematic. Click here for more details.
- mlrun/common/schemas/model_monitoring/constants.py +0 -7
- mlrun/db/httpdb.py +11 -4
- mlrun/model_monitoring/api.py +1 -12
- mlrun/model_monitoring/applications/__init__.py +1 -2
- mlrun/model_monitoring/applications/base.py +2 -182
- mlrun/model_monitoring/applications/context.py +2 -9
- mlrun/model_monitoring/applications/evidently_base.py +0 -74
- mlrun/model_monitoring/applications/histogram_data_drift.py +2 -2
- mlrun/model_monitoring/controller.py +45 -208
- mlrun/projects/project.py +1 -4
- mlrun/runtimes/nuclio/api_gateway.py +6 -0
- mlrun/utils/version/version.json +2 -2
- {mlrun-1.7.0rc48.dist-info → mlrun-1.7.0rc49.dist-info}/METADATA +1 -1
- {mlrun-1.7.0rc48.dist-info → mlrun-1.7.0rc49.dist-info}/RECORD +18 -20
- mlrun/model_monitoring/application.py +0 -19
- mlrun/model_monitoring/evidently_application.py +0 -20
- {mlrun-1.7.0rc48.dist-info → mlrun-1.7.0rc49.dist-info}/LICENSE +0 -0
- {mlrun-1.7.0rc48.dist-info → mlrun-1.7.0rc49.dist-info}/WHEEL +0 -0
- {mlrun-1.7.0rc48.dist-info → mlrun-1.7.0rc49.dist-info}/entry_points.txt +0 -0
- {mlrun-1.7.0rc48.dist-info → mlrun-1.7.0rc49.dist-info}/top_level.txt +0 -0
|
@@ -105,15 +105,8 @@ class ApplicationEvent:
|
|
|
105
105
|
APPLICATION_NAME = "application_name"
|
|
106
106
|
START_INFER_TIME = "start_infer_time"
|
|
107
107
|
END_INFER_TIME = "end_infer_time"
|
|
108
|
-
LAST_REQUEST = "last_request"
|
|
109
108
|
ENDPOINT_ID = "endpoint_id"
|
|
110
109
|
OUTPUT_STREAM_URI = "output_stream_uri"
|
|
111
|
-
MLRUN_CONTEXT = "mlrun_context"
|
|
112
|
-
|
|
113
|
-
# Deprecated fields - TODO : delete in 1.9.0 (V1 app deprecation)
|
|
114
|
-
SAMPLE_PARQUET_PATH = "sample_parquet_path"
|
|
115
|
-
CURRENT_STATS = "current_stats"
|
|
116
|
-
FEATURE_STATS = "feature_stats"
|
|
117
110
|
|
|
118
111
|
|
|
119
112
|
class WriterEvent(MonitoringStrEnum):
|
mlrun/db/httpdb.py
CHANGED
|
@@ -2754,7 +2754,7 @@ class HTTPRunDB(RunDBInterface):
|
|
|
2754
2754
|
deletion_strategy: Union[
|
|
2755
2755
|
str, mlrun.common.schemas.DeletionStrategy
|
|
2756
2756
|
] = mlrun.common.schemas.DeletionStrategy.default(),
|
|
2757
|
-
):
|
|
2757
|
+
) -> None:
|
|
2758
2758
|
"""Delete a project.
|
|
2759
2759
|
|
|
2760
2760
|
:param name: Name of the project to delete.
|
|
@@ -2773,7 +2773,7 @@ class HTTPRunDB(RunDBInterface):
|
|
|
2773
2773
|
"DELETE", f"projects/{name}", error_message, headers=headers, version="v2"
|
|
2774
2774
|
)
|
|
2775
2775
|
if response.status_code == http.HTTPStatus.ACCEPTED:
|
|
2776
|
-
logger.info("
|
|
2776
|
+
logger.info("Waiting for project to be deleted", project_name=name)
|
|
2777
2777
|
background_task = mlrun.common.schemas.BackgroundTask(**response.json())
|
|
2778
2778
|
background_task = self._wait_for_background_task_to_reach_terminal_state(
|
|
2779
2779
|
background_task.metadata.name
|
|
@@ -2783,10 +2783,17 @@ class HTTPRunDB(RunDBInterface):
|
|
|
2783
2783
|
== mlrun.common.schemas.BackgroundTaskState.succeeded
|
|
2784
2784
|
):
|
|
2785
2785
|
logger.info("Project deleted", project_name=name)
|
|
2786
|
-
|
|
2786
|
+
elif (
|
|
2787
|
+
background_task.status.state
|
|
2788
|
+
== mlrun.common.schemas.BackgroundTaskState.failed
|
|
2789
|
+
):
|
|
2790
|
+
logger.error(
|
|
2791
|
+
"Project deletion failed",
|
|
2792
|
+
project_name=name,
|
|
2793
|
+
error=background_task.status.error,
|
|
2794
|
+
)
|
|
2787
2795
|
elif response.status_code == http.HTTPStatus.NO_CONTENT:
|
|
2788
2796
|
logger.info("Project deleted", project_name=name)
|
|
2789
|
-
return
|
|
2790
2797
|
|
|
2791
2798
|
def store_project(
|
|
2792
2799
|
self,
|
mlrun/model_monitoring/api.py
CHANGED
|
@@ -24,7 +24,6 @@ import mlrun.artifacts
|
|
|
24
24
|
import mlrun.common.helpers
|
|
25
25
|
import mlrun.common.schemas.model_monitoring.constants as mm_constants
|
|
26
26
|
import mlrun.feature_store
|
|
27
|
-
import mlrun.model_monitoring.application
|
|
28
27
|
import mlrun.model_monitoring.applications as mm_app
|
|
29
28
|
import mlrun.serving
|
|
30
29
|
from mlrun.data_types.infer import InferOptions, get_df_stats
|
|
@@ -561,8 +560,7 @@ def _create_model_monitoring_function_base(
|
|
|
561
560
|
func: typing.Union[str, None] = None,
|
|
562
561
|
application_class: typing.Union[
|
|
563
562
|
str,
|
|
564
|
-
|
|
565
|
-
mm_app.ModelMonitoringApplicationBaseV2,
|
|
563
|
+
mm_app.ModelMonitoringApplicationBase,
|
|
566
564
|
None,
|
|
567
565
|
] = None,
|
|
568
566
|
name: typing.Optional[str] = None,
|
|
@@ -576,15 +574,6 @@ def _create_model_monitoring_function_base(
|
|
|
576
574
|
Note: this is an internal API only.
|
|
577
575
|
This function does not set the labels or mounts v3io.
|
|
578
576
|
"""
|
|
579
|
-
if isinstance(
|
|
580
|
-
application_class,
|
|
581
|
-
mlrun.model_monitoring.application.ModelMonitoringApplicationBase,
|
|
582
|
-
):
|
|
583
|
-
warnings.warn(
|
|
584
|
-
"The `ModelMonitoringApplicationBase` class is deprecated from version 1.7.0, "
|
|
585
|
-
"please use `ModelMonitoringApplicationBaseV2`. It will be removed in 1.9.0.",
|
|
586
|
-
FutureWarning,
|
|
587
|
-
)
|
|
588
577
|
if name in mm_constants._RESERVED_FUNCTION_NAMES:
|
|
589
578
|
raise mlrun.errors.MLRunInvalidArgumentError(
|
|
590
579
|
"An application cannot have the following names: "
|
|
@@ -13,12 +13,11 @@
|
|
|
13
13
|
# limitations under the License.
|
|
14
14
|
#
|
|
15
15
|
|
|
16
|
-
from .base import ModelMonitoringApplicationBase
|
|
16
|
+
from .base import ModelMonitoringApplicationBase
|
|
17
17
|
from .context import MonitoringApplicationContext
|
|
18
18
|
from .evidently_base import (
|
|
19
19
|
_HAS_EVIDENTLY,
|
|
20
20
|
SUPPORTED_EVIDENTLY_VERSION,
|
|
21
21
|
EvidentlyModelMonitoringApplicationBase,
|
|
22
|
-
EvidentlyModelMonitoringApplicationBaseV2,
|
|
23
22
|
)
|
|
24
23
|
from .results import ModelMonitoringApplicationMetric, ModelMonitoringApplicationResult
|
|
@@ -13,19 +13,14 @@
|
|
|
13
13
|
# limitations under the License.
|
|
14
14
|
|
|
15
15
|
from abc import ABC, abstractmethod
|
|
16
|
-
from typing import Any, Union
|
|
16
|
+
from typing import Any, Union
|
|
17
17
|
|
|
18
|
-
import numpy as np
|
|
19
|
-
import pandas as pd
|
|
20
|
-
from deprecated import deprecated
|
|
21
|
-
|
|
22
|
-
import mlrun
|
|
23
18
|
import mlrun.model_monitoring.applications.context as mm_context
|
|
24
19
|
import mlrun.model_monitoring.applications.results as mm_results
|
|
25
20
|
from mlrun.serving.utils import MonitoringApplicationToDict
|
|
26
21
|
|
|
27
22
|
|
|
28
|
-
class
|
|
23
|
+
class ModelMonitoringApplicationBase(MonitoringApplicationToDict, ABC):
|
|
29
24
|
"""
|
|
30
25
|
A base class for a model monitoring application.
|
|
31
26
|
Inherit from this class to create a custom model monitoring application.
|
|
@@ -111,178 +106,3 @@ class ModelMonitoringApplicationBaseV2(MonitoringApplicationToDict, ABC):
|
|
|
111
106
|
each metric name is the key and the metric value is the corresponding value).
|
|
112
107
|
"""
|
|
113
108
|
raise NotImplementedError
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
# TODO: Remove in 1.9.0
|
|
117
|
-
@deprecated(
|
|
118
|
-
version="1.7.0",
|
|
119
|
-
reason="The `ModelMonitoringApplicationBase` class is deprecated from "
|
|
120
|
-
"version 1.7.0 and will be removed in version 1.9.0. "
|
|
121
|
-
"Use `ModelMonitoringApplicationBaseV2` as your application's base class.",
|
|
122
|
-
)
|
|
123
|
-
class ModelMonitoringApplicationBase(MonitoringApplicationToDict, ABC):
|
|
124
|
-
"""
|
|
125
|
-
A base class for a model monitoring application.
|
|
126
|
-
Inherit from this class to create a custom model monitoring application.
|
|
127
|
-
|
|
128
|
-
example for very simple custom application::
|
|
129
|
-
|
|
130
|
-
class MyApp(ApplicationBase):
|
|
131
|
-
def do_tracking(
|
|
132
|
-
self,
|
|
133
|
-
sample_df_stats: mlrun.common.model_monitoring.helpers.FeatureStats,
|
|
134
|
-
feature_stats: mlrun.common.model_monitoring.helpers.FeatureStats,
|
|
135
|
-
start_infer_time: pd.Timestamp,
|
|
136
|
-
end_infer_time: pd.Timestamp,
|
|
137
|
-
schedule_time: pd.Timestamp,
|
|
138
|
-
latest_request: pd.Timestamp,
|
|
139
|
-
endpoint_id: str,
|
|
140
|
-
output_stream_uri: str,
|
|
141
|
-
) -> ModelMonitoringApplicationResult:
|
|
142
|
-
self.context.log_artifact(
|
|
143
|
-
TableArtifact(
|
|
144
|
-
"sample_df_stats", df=self.dict_to_histogram(sample_df_stats)
|
|
145
|
-
)
|
|
146
|
-
)
|
|
147
|
-
return ModelMonitoringApplicationResult(
|
|
148
|
-
name="data_drift_test",
|
|
149
|
-
value=0.5,
|
|
150
|
-
kind=mm_constant.ResultKindApp.data_drift,
|
|
151
|
-
status=mm_constant.ResultStatusApp.detected,
|
|
152
|
-
)
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
"""
|
|
156
|
-
|
|
157
|
-
kind = "monitoring_application"
|
|
158
|
-
|
|
159
|
-
def do(
|
|
160
|
-
self, monitoring_context: mm_context.MonitoringApplicationContext
|
|
161
|
-
) -> tuple[
|
|
162
|
-
list[mm_results.ModelMonitoringApplicationResult],
|
|
163
|
-
mm_context.MonitoringApplicationContext,
|
|
164
|
-
]:
|
|
165
|
-
"""
|
|
166
|
-
Process the monitoring event and return application results.
|
|
167
|
-
|
|
168
|
-
:param monitoring_context: (MonitoringApplicationContext) The monitoring context to process.
|
|
169
|
-
:returns: A tuple of:
|
|
170
|
-
[0] = list of application results that can be either from type
|
|
171
|
-
`ModelMonitoringApplicationResult` or from type
|
|
172
|
-
`ModelMonitoringApplicationResult`.
|
|
173
|
-
[1] = the original application event, wrapped in `MonitoringApplicationContext`
|
|
174
|
-
object
|
|
175
|
-
"""
|
|
176
|
-
resolved_event = self._resolve_event(monitoring_context)
|
|
177
|
-
if not (
|
|
178
|
-
hasattr(self, "context") and isinstance(self.context, mlrun.MLClientCtx)
|
|
179
|
-
):
|
|
180
|
-
self._lazy_init(monitoring_context)
|
|
181
|
-
results = self.do_tracking(*resolved_event)
|
|
182
|
-
results = results if isinstance(results, list) else [results]
|
|
183
|
-
return results, monitoring_context
|
|
184
|
-
|
|
185
|
-
def _lazy_init(self, monitoring_context: mm_context.MonitoringApplicationContext):
|
|
186
|
-
self.context = cast(mlrun.MLClientCtx, monitoring_context)
|
|
187
|
-
|
|
188
|
-
@abstractmethod
|
|
189
|
-
def do_tracking(
|
|
190
|
-
self,
|
|
191
|
-
application_name: str,
|
|
192
|
-
sample_df_stats: pd.DataFrame,
|
|
193
|
-
feature_stats: pd.DataFrame,
|
|
194
|
-
sample_df: pd.DataFrame,
|
|
195
|
-
start_infer_time: pd.Timestamp,
|
|
196
|
-
end_infer_time: pd.Timestamp,
|
|
197
|
-
latest_request: pd.Timestamp,
|
|
198
|
-
endpoint_id: str,
|
|
199
|
-
output_stream_uri: str,
|
|
200
|
-
) -> Union[
|
|
201
|
-
mm_results.ModelMonitoringApplicationResult,
|
|
202
|
-
list[mm_results.ModelMonitoringApplicationResult],
|
|
203
|
-
]:
|
|
204
|
-
"""
|
|
205
|
-
Implement this method with your custom monitoring logic.
|
|
206
|
-
|
|
207
|
-
:param application_name: (str) the app name
|
|
208
|
-
:param sample_df_stats: (pd.DataFrame) The new sample distribution.
|
|
209
|
-
:param feature_stats: (pd.DataFrame) The train sample distribution.
|
|
210
|
-
:param sample_df: (pd.DataFrame) The new sample DataFrame.
|
|
211
|
-
:param start_infer_time: (pd.Timestamp) Start time of the monitoring schedule.
|
|
212
|
-
:param end_infer_time: (pd.Timestamp) End time of the monitoring schedule.
|
|
213
|
-
:param latest_request: (pd.Timestamp) Timestamp of the latest request on this endpoint_id.
|
|
214
|
-
:param endpoint_id: (str) ID of the monitored model endpoint
|
|
215
|
-
:param output_stream_uri: (str) URI of the output stream for results
|
|
216
|
-
|
|
217
|
-
:returns: (ModelMonitoringApplicationResult) or
|
|
218
|
-
(list[ModelMonitoringApplicationResult]) of the application results.
|
|
219
|
-
"""
|
|
220
|
-
raise NotImplementedError
|
|
221
|
-
|
|
222
|
-
@classmethod
|
|
223
|
-
def _resolve_event(
|
|
224
|
-
cls,
|
|
225
|
-
monitoring_context: mm_context.MonitoringApplicationContext,
|
|
226
|
-
) -> tuple[
|
|
227
|
-
str,
|
|
228
|
-
pd.DataFrame,
|
|
229
|
-
pd.DataFrame,
|
|
230
|
-
pd.DataFrame,
|
|
231
|
-
pd.Timestamp,
|
|
232
|
-
pd.Timestamp,
|
|
233
|
-
pd.Timestamp,
|
|
234
|
-
str,
|
|
235
|
-
str,
|
|
236
|
-
]:
|
|
237
|
-
"""
|
|
238
|
-
Converting the event into a single tuple that will be used for passing the event arguments to the running
|
|
239
|
-
application
|
|
240
|
-
|
|
241
|
-
:param monitoring_context: (MonitoringApplicationContext) The monitoring context to process.
|
|
242
|
-
|
|
243
|
-
:return: A tuple of:
|
|
244
|
-
[0] = (str) application name
|
|
245
|
-
[1] = (pd.DataFrame) current input statistics
|
|
246
|
-
[2] = (pd.DataFrame) train statistics
|
|
247
|
-
[3] = (pd.DataFrame) current input data
|
|
248
|
-
[4] = (pd.Timestamp) start time of the monitoring schedule
|
|
249
|
-
[5] = (pd.Timestamp) end time of the monitoring schedule
|
|
250
|
-
[6] = (pd.Timestamp) timestamp of the latest request
|
|
251
|
-
[7] = (str) endpoint id
|
|
252
|
-
[8] = (str) output stream uri
|
|
253
|
-
"""
|
|
254
|
-
return (
|
|
255
|
-
monitoring_context.application_name,
|
|
256
|
-
cls.dict_to_histogram(monitoring_context.sample_df_stats),
|
|
257
|
-
cls.dict_to_histogram(monitoring_context.feature_stats),
|
|
258
|
-
monitoring_context.sample_df,
|
|
259
|
-
monitoring_context.start_infer_time,
|
|
260
|
-
monitoring_context.end_infer_time,
|
|
261
|
-
monitoring_context.latest_request,
|
|
262
|
-
monitoring_context.endpoint_id,
|
|
263
|
-
monitoring_context.output_stream_uri,
|
|
264
|
-
)
|
|
265
|
-
|
|
266
|
-
@staticmethod
|
|
267
|
-
def dict_to_histogram(
|
|
268
|
-
histogram_dict: mlrun.common.model_monitoring.helpers.FeatureStats,
|
|
269
|
-
) -> pd.DataFrame:
|
|
270
|
-
"""
|
|
271
|
-
Convert histogram dictionary to pandas DataFrame with feature histograms as columns
|
|
272
|
-
|
|
273
|
-
:param histogram_dict: Histogram dictionary
|
|
274
|
-
|
|
275
|
-
:returns: Histogram dataframe
|
|
276
|
-
"""
|
|
277
|
-
|
|
278
|
-
# Create a dictionary with feature histograms as values
|
|
279
|
-
histograms = {}
|
|
280
|
-
for feature, stats in histogram_dict.items():
|
|
281
|
-
if "hist" in stats:
|
|
282
|
-
# Normalize to probability distribution of each feature
|
|
283
|
-
histograms[feature] = np.array(stats["hist"][0]) / stats["count"]
|
|
284
|
-
|
|
285
|
-
# Convert the dictionary to pandas DataFrame
|
|
286
|
-
histograms = pd.DataFrame(histograms)
|
|
287
|
-
|
|
288
|
-
return histograms
|
|
@@ -98,9 +98,6 @@ class MonitoringApplicationContext:
|
|
|
98
98
|
self.end_infer_time = pd.Timestamp(
|
|
99
99
|
cast(str, event.get(mm_constants.ApplicationEvent.END_INFER_TIME))
|
|
100
100
|
)
|
|
101
|
-
self.latest_request = pd.Timestamp(
|
|
102
|
-
cast(str, event.get(mm_constants.ApplicationEvent.LAST_REQUEST))
|
|
103
|
-
)
|
|
104
101
|
self.endpoint_id = cast(
|
|
105
102
|
str, event.get(mm_constants.ApplicationEvent.ENDPOINT_ID)
|
|
106
103
|
)
|
|
@@ -108,12 +105,8 @@ class MonitoringApplicationContext:
|
|
|
108
105
|
str, event.get(mm_constants.ApplicationEvent.OUTPUT_STREAM_URI)
|
|
109
106
|
)
|
|
110
107
|
|
|
111
|
-
self._feature_stats: Optional[FeatureStats] =
|
|
112
|
-
|
|
113
|
-
)
|
|
114
|
-
self._sample_df_stats: Optional[FeatureStats] = json.loads(
|
|
115
|
-
event.get(mm_constants.ApplicationEvent.CURRENT_STATS, "{}")
|
|
116
|
-
)
|
|
108
|
+
self._feature_stats: Optional[FeatureStats] = None
|
|
109
|
+
self._sample_df_stats: Optional[FeatureStats] = None
|
|
117
110
|
|
|
118
111
|
# Default labels for the artifacts
|
|
119
112
|
self._default_labels = self._get_default_labels()
|
|
@@ -18,7 +18,6 @@ from abc import ABC
|
|
|
18
18
|
|
|
19
19
|
import pandas as pd
|
|
20
20
|
import semver
|
|
21
|
-
from deprecated import deprecated
|
|
22
21
|
|
|
23
22
|
import mlrun.model_monitoring.applications.base as mm_base
|
|
24
23
|
import mlrun.model_monitoring.applications.context as mm_context
|
|
@@ -64,13 +63,6 @@ if _HAS_EVIDENTLY:
|
|
|
64
63
|
from evidently.utils.dashboard import TemplateParams, file_html_template
|
|
65
64
|
|
|
66
65
|
|
|
67
|
-
# TODO: Remove in 1.9.0
|
|
68
|
-
@deprecated(
|
|
69
|
-
version="1.7.0",
|
|
70
|
-
reason="The `EvidentlyModelMonitoringApplicationBase` class is deprecated from "
|
|
71
|
-
"version 1.7.0 and will be removed in version 1.9.0. "
|
|
72
|
-
"Use `EvidentlyModelMonitoringApplicationBaseV2` as your application's base class.",
|
|
73
|
-
)
|
|
74
66
|
class EvidentlyModelMonitoringApplicationBase(
|
|
75
67
|
mm_base.ModelMonitoringApplicationBase, ABC
|
|
76
68
|
):
|
|
@@ -85,72 +77,6 @@ class EvidentlyModelMonitoringApplicationBase(
|
|
|
85
77
|
:param evidently_workspace_path: (str) The path to the Evidently workspace.
|
|
86
78
|
:param evidently_project_id: (str) The ID of the Evidently project.
|
|
87
79
|
|
|
88
|
-
"""
|
|
89
|
-
if not _HAS_EVIDENTLY:
|
|
90
|
-
raise ModuleNotFoundError("Evidently is not installed - the app cannot run")
|
|
91
|
-
self.evidently_workspace = Workspace.create(evidently_workspace_path)
|
|
92
|
-
self.evidently_project_id = evidently_project_id
|
|
93
|
-
self.evidently_project = self.evidently_workspace.get_project(
|
|
94
|
-
evidently_project_id
|
|
95
|
-
)
|
|
96
|
-
|
|
97
|
-
def log_evidently_object(
|
|
98
|
-
self, evidently_object: "Display", artifact_name: str
|
|
99
|
-
) -> None:
|
|
100
|
-
"""
|
|
101
|
-
Logs an Evidently report or suite as an artifact.
|
|
102
|
-
|
|
103
|
-
:param evidently_object: (Display) The Evidently display to log, e.g. a report or a test suite object.
|
|
104
|
-
:param artifact_name: (str) The name for the logged artifact.
|
|
105
|
-
"""
|
|
106
|
-
evidently_object_html = evidently_object.get_html()
|
|
107
|
-
self.context.log_artifact(
|
|
108
|
-
artifact_name, body=evidently_object_html.encode("utf-8"), format="html"
|
|
109
|
-
)
|
|
110
|
-
|
|
111
|
-
def log_project_dashboard(
|
|
112
|
-
self,
|
|
113
|
-
timestamp_start: pd.Timestamp,
|
|
114
|
-
timestamp_end: pd.Timestamp,
|
|
115
|
-
artifact_name: str = "dashboard",
|
|
116
|
-
):
|
|
117
|
-
"""
|
|
118
|
-
Logs an Evidently project dashboard.
|
|
119
|
-
|
|
120
|
-
:param timestamp_start: (pd.Timestamp) The start timestamp for the dashboard data.
|
|
121
|
-
:param timestamp_end: (pd.Timestamp) The end timestamp for the dashboard data.
|
|
122
|
-
:param artifact_name: (str) The name for the logged artifact.
|
|
123
|
-
"""
|
|
124
|
-
|
|
125
|
-
dashboard_info = self.evidently_project.build_dashboard_info(
|
|
126
|
-
timestamp_start, timestamp_end
|
|
127
|
-
)
|
|
128
|
-
template_params = TemplateParams(
|
|
129
|
-
dashboard_id="pd_" + str(uuid.uuid4()).replace("-", ""),
|
|
130
|
-
dashboard_info=dashboard_info,
|
|
131
|
-
additional_graphs={},
|
|
132
|
-
)
|
|
133
|
-
|
|
134
|
-
dashboard_html = file_html_template(params=template_params)
|
|
135
|
-
self.context.log_artifact(
|
|
136
|
-
artifact_name, body=dashboard_html.encode("utf-8"), format="html"
|
|
137
|
-
)
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
class EvidentlyModelMonitoringApplicationBaseV2(
|
|
141
|
-
mm_base.ModelMonitoringApplicationBaseV2, ABC
|
|
142
|
-
):
|
|
143
|
-
def __init__(
|
|
144
|
-
self, evidently_workspace_path: str, evidently_project_id: "STR_UUID"
|
|
145
|
-
) -> None:
|
|
146
|
-
"""
|
|
147
|
-
A class for integrating Evidently for mlrun model monitoring within a monitoring application.
|
|
148
|
-
Note: evidently is not installed by default in the mlrun/mlrun image.
|
|
149
|
-
It must be installed separately to use this class.
|
|
150
|
-
|
|
151
|
-
:param evidently_workspace_path: (str) The path to the Evidently workspace.
|
|
152
|
-
:param evidently_project_id: (str) The ID of the Evidently project.
|
|
153
|
-
|
|
154
80
|
"""
|
|
155
81
|
|
|
156
82
|
# TODO : more then one project (mep -> project)
|
|
@@ -31,7 +31,7 @@ from mlrun.common.schemas.model_monitoring.constants import (
|
|
|
31
31
|
ResultStatusApp,
|
|
32
32
|
)
|
|
33
33
|
from mlrun.model_monitoring.applications import (
|
|
34
|
-
|
|
34
|
+
ModelMonitoringApplicationBase,
|
|
35
35
|
)
|
|
36
36
|
from mlrun.model_monitoring.metrics.histogram_distance import (
|
|
37
37
|
HellingerDistance,
|
|
@@ -87,7 +87,7 @@ class DataDriftClassifier:
|
|
|
87
87
|
return ResultStatusApp.no_detection
|
|
88
88
|
|
|
89
89
|
|
|
90
|
-
class HistogramDataDriftApplication(
|
|
90
|
+
class HistogramDataDriftApplication(ModelMonitoringApplicationBase):
|
|
91
91
|
"""
|
|
92
92
|
MLRun's default data drift application for model monitoring.
|
|
93
93
|
|
|
@@ -15,28 +15,22 @@
|
|
|
15
15
|
import concurrent.futures
|
|
16
16
|
import datetime
|
|
17
17
|
import json
|
|
18
|
-
import multiprocessing
|
|
19
18
|
import os
|
|
20
19
|
import re
|
|
21
20
|
from collections.abc import Iterator
|
|
22
|
-
from typing import
|
|
21
|
+
from typing import NamedTuple, Optional, Union, cast
|
|
23
22
|
|
|
24
23
|
import nuclio
|
|
25
24
|
|
|
26
25
|
import mlrun
|
|
27
26
|
import mlrun.common.schemas.model_monitoring.constants as mm_constants
|
|
28
27
|
import mlrun.data_types.infer
|
|
29
|
-
import mlrun.feature_store as fstore
|
|
30
28
|
import mlrun.model_monitoring.db.stores
|
|
31
|
-
from mlrun.common.model_monitoring.helpers import FeatureStats, pad_features_hist
|
|
32
29
|
from mlrun.datastore import get_stream_pusher
|
|
33
|
-
from mlrun.datastore.targets import ParquetTarget
|
|
34
30
|
from mlrun.errors import err_to_str
|
|
35
31
|
from mlrun.model_monitoring.helpers import (
|
|
36
32
|
_BatchDict,
|
|
37
33
|
batch_dict2timedelta,
|
|
38
|
-
calculate_inputs_statistics,
|
|
39
|
-
get_monitoring_parquet_path,
|
|
40
34
|
get_stream_path,
|
|
41
35
|
)
|
|
42
36
|
from mlrun.utils import datetime_now, logger
|
|
@@ -292,15 +286,9 @@ class MonitoringApplicationController:
|
|
|
292
286
|
)
|
|
293
287
|
|
|
294
288
|
self.model_monitoring_access_key = self._get_model_monitoring_access_key()
|
|
295
|
-
self.
|
|
296
|
-
self.
|
|
297
|
-
kind=mm_constants.FileTargetKind.APPS_PARQUET,
|
|
289
|
+
self.tsdb_connector = mlrun.model_monitoring.get_tsdb_connector(
|
|
290
|
+
project=self.project
|
|
298
291
|
)
|
|
299
|
-
self.storage_options = None
|
|
300
|
-
if not mlrun.mlconf.is_ce_mode():
|
|
301
|
-
self._initialize_v3io_configurations()
|
|
302
|
-
elif self.parquet_directory.startswith("s3://"):
|
|
303
|
-
self.storage_options = mlrun.mlconf.get_s3_storage_options()
|
|
304
292
|
|
|
305
293
|
@staticmethod
|
|
306
294
|
def _get_model_monitoring_access_key() -> Optional[str]:
|
|
@@ -310,12 +298,6 @@ class MonitoringApplicationController:
|
|
|
310
298
|
access_key = mlrun.mlconf.get_v3io_access_key()
|
|
311
299
|
return access_key
|
|
312
300
|
|
|
313
|
-
def _initialize_v3io_configurations(self) -> None:
|
|
314
|
-
self.storage_options = dict(
|
|
315
|
-
v3io_access_key=self.model_monitoring_access_key,
|
|
316
|
-
v3io_api=mlrun.mlconf.v3io_api,
|
|
317
|
-
)
|
|
318
|
-
|
|
319
301
|
def run(self) -> None:
|
|
320
302
|
"""
|
|
321
303
|
Main method for run all the relevant monitoring applications on each endpoint.
|
|
@@ -367,11 +349,8 @@ class MonitoringApplicationController:
|
|
|
367
349
|
)
|
|
368
350
|
return
|
|
369
351
|
# Initialize a process pool that will be used to run each endpoint applications on a dedicated process
|
|
370
|
-
with concurrent.futures.
|
|
352
|
+
with concurrent.futures.ThreadPoolExecutor(
|
|
371
353
|
max_workers=min(len(endpoints), 10),
|
|
372
|
-
# On Linux, the default is "fork" (this is set to change in Python 3.14), which inherits the current heap
|
|
373
|
-
# and resources (such as sockets), which is not what we want (ML-7160)
|
|
374
|
-
mp_context=multiprocessing.get_context("spawn"),
|
|
375
354
|
) as pool:
|
|
376
355
|
for endpoint in endpoints:
|
|
377
356
|
if (
|
|
@@ -395,13 +374,10 @@ class MonitoringApplicationController:
|
|
|
395
374
|
applications_names=applications_names,
|
|
396
375
|
batch_window_generator=self._batch_window_generator,
|
|
397
376
|
project=self.project,
|
|
398
|
-
parquet_directory=self.parquet_directory,
|
|
399
|
-
storage_options=self.storage_options,
|
|
400
377
|
model_monitoring_access_key=self.model_monitoring_access_key,
|
|
378
|
+
tsdb_connector=self.tsdb_connector,
|
|
401
379
|
)
|
|
402
380
|
|
|
403
|
-
self._delete_old_parquet(endpoints=endpoints)
|
|
404
|
-
|
|
405
381
|
@classmethod
|
|
406
382
|
def model_endpoint_process(
|
|
407
383
|
cls,
|
|
@@ -409,9 +385,8 @@ class MonitoringApplicationController:
|
|
|
409
385
|
applications_names: list[str],
|
|
410
386
|
batch_window_generator: _BatchWindowGenerator,
|
|
411
387
|
project: str,
|
|
412
|
-
parquet_directory: str,
|
|
413
|
-
storage_options: dict,
|
|
414
388
|
model_monitoring_access_key: str,
|
|
389
|
+
tsdb_connector: mlrun.model_monitoring.db.tsdb.TSDBConnector,
|
|
415
390
|
) -> None:
|
|
416
391
|
"""
|
|
417
392
|
Process a model endpoint and trigger the monitoring applications. This function running on different process
|
|
@@ -422,16 +397,13 @@ class MonitoringApplicationController:
|
|
|
422
397
|
:param applications_names: (list[str]) List of application names to push results to.
|
|
423
398
|
:param batch_window_generator: (_BatchWindowGenerator) An object that generates _BatchWindow objects.
|
|
424
399
|
:param project: (str) Project name.
|
|
425
|
-
:param parquet_directory: (str) Directory to store application parquet files
|
|
426
|
-
:param storage_options: (dict) Storage options for writing ParquetTarget.
|
|
427
400
|
:param model_monitoring_access_key: (str) Access key to apply the model monitoring process.
|
|
401
|
+
:param tsdb_connector: (mlrun.model_monitoring.db.tsdb.TSDBConnector) TSDB connector
|
|
428
402
|
"""
|
|
429
403
|
endpoint_id = endpoint[mm_constants.EventFieldType.UID]
|
|
404
|
+
# if false the endpoint represent batch infer step.
|
|
405
|
+
has_stream = endpoint[mm_constants.EventFieldType.STREAM_PATH] != ""
|
|
430
406
|
try:
|
|
431
|
-
m_fs = fstore.get_feature_set(
|
|
432
|
-
endpoint[mm_constants.EventFieldType.FEATURE_SET_URI]
|
|
433
|
-
)
|
|
434
|
-
|
|
435
407
|
for application in applications_names:
|
|
436
408
|
batch_window = batch_window_generator.get_batch_window(
|
|
437
409
|
project=project,
|
|
@@ -439,158 +411,70 @@ class MonitoringApplicationController:
|
|
|
439
411
|
application=application,
|
|
440
412
|
first_request=endpoint[mm_constants.EventFieldType.FIRST_REQUEST],
|
|
441
413
|
last_request=endpoint[mm_constants.EventFieldType.LAST_REQUEST],
|
|
442
|
-
has_stream=
|
|
414
|
+
has_stream=has_stream,
|
|
443
415
|
)
|
|
444
416
|
|
|
445
417
|
for start_infer_time, end_infer_time in batch_window.get_intervals():
|
|
446
|
-
|
|
447
|
-
|
|
448
|
-
|
|
449
|
-
|
|
450
|
-
|
|
418
|
+
prediction_metric = tsdb_connector.read_predictions(
|
|
419
|
+
endpoint_id=endpoint_id,
|
|
420
|
+
start=start_infer_time,
|
|
421
|
+
end=end_infer_time,
|
|
422
|
+
)
|
|
423
|
+
if not prediction_metric.data and has_stream:
|
|
424
|
+
logger.info(
|
|
425
|
+
"No data found for the given interval",
|
|
426
|
+
start=start_infer_time,
|
|
427
|
+
end=end_infer_time,
|
|
428
|
+
endpoint_id=endpoint_id,
|
|
429
|
+
)
|
|
430
|
+
else:
|
|
431
|
+
logger.info(
|
|
432
|
+
"Data found for the given interval",
|
|
433
|
+
start=start_infer_time,
|
|
434
|
+
end=end_infer_time,
|
|
451
435
|
endpoint_id=endpoint_id,
|
|
436
|
+
)
|
|
437
|
+
cls._push_to_applications(
|
|
452
438
|
start_infer_time=start_infer_time,
|
|
453
439
|
end_infer_time=end_infer_time,
|
|
454
|
-
|
|
455
|
-
|
|
456
|
-
|
|
457
|
-
|
|
458
|
-
|
|
459
|
-
df = offline_response.to_dataframe()
|
|
460
|
-
parquet_target_path = offline_response.vector.get_target_path()
|
|
461
|
-
|
|
462
|
-
if len(df) == 0:
|
|
463
|
-
logger.info(
|
|
464
|
-
"During this time window, the endpoint has not received any data",
|
|
465
|
-
endpoint=endpoint[mm_constants.EventFieldType.UID],
|
|
466
|
-
start_time=start_infer_time,
|
|
467
|
-
end_time=end_infer_time,
|
|
468
|
-
)
|
|
469
|
-
continue
|
|
470
|
-
|
|
471
|
-
except FileNotFoundError:
|
|
472
|
-
logger.warn(
|
|
473
|
-
"No parquets were written yet",
|
|
474
|
-
endpoint=endpoint[mm_constants.EventFieldType.UID],
|
|
440
|
+
endpoint_id=endpoint_id,
|
|
441
|
+
project=project,
|
|
442
|
+
applications_names=[application],
|
|
443
|
+
model_monitoring_access_key=model_monitoring_access_key,
|
|
475
444
|
)
|
|
476
|
-
continue
|
|
477
|
-
|
|
478
|
-
# Get the timestamp of the latest request:
|
|
479
|
-
latest_request = df[mm_constants.EventFieldType.TIMESTAMP].iloc[-1]
|
|
480
|
-
|
|
481
|
-
# Get the feature stats from the model endpoint for reference data
|
|
482
|
-
feature_stats = json.loads(
|
|
483
|
-
endpoint[mm_constants.EventFieldType.FEATURE_STATS]
|
|
484
|
-
)
|
|
485
|
-
|
|
486
|
-
# Pad the original feature stats to accommodate current
|
|
487
|
-
# data out of the original range (unless already padded)
|
|
488
|
-
pad_features_hist(FeatureStats(feature_stats))
|
|
489
|
-
|
|
490
|
-
# Get the current stats:
|
|
491
|
-
current_stats = calculate_inputs_statistics(
|
|
492
|
-
sample_set_statistics=feature_stats, inputs=df
|
|
493
|
-
)
|
|
494
|
-
# end - TODO : delete in 1.9.0 (V1 app deprecation)
|
|
495
|
-
cls._push_to_applications(
|
|
496
|
-
current_stats=current_stats,
|
|
497
|
-
feature_stats=feature_stats,
|
|
498
|
-
start_infer_time=start_infer_time,
|
|
499
|
-
end_infer_time=end_infer_time,
|
|
500
|
-
endpoint_id=endpoint_id,
|
|
501
|
-
latest_request=latest_request,
|
|
502
|
-
project=project,
|
|
503
|
-
applications_names=[application],
|
|
504
|
-
model_monitoring_access_key=model_monitoring_access_key,
|
|
505
|
-
parquet_target_path=parquet_target_path,
|
|
506
|
-
)
|
|
507
445
|
except Exception:
|
|
508
446
|
logger.exception(
|
|
509
447
|
"Encountered an exception",
|
|
510
448
|
endpoint_id=endpoint[mm_constants.EventFieldType.UID],
|
|
511
449
|
)
|
|
512
450
|
|
|
513
|
-
def _delete_old_parquet(self, endpoints: list[dict[str, Any]], days: int = 1):
|
|
514
|
-
"""
|
|
515
|
-
Delete application parquets older than the argument days.
|
|
516
|
-
|
|
517
|
-
:param endpoints: A list of dictionaries of model endpoints records.
|
|
518
|
-
"""
|
|
519
|
-
if self.parquet_directory.startswith("v3io:///"):
|
|
520
|
-
# create fs with access to the user side (under projects)
|
|
521
|
-
store, _, _ = mlrun.store_manager.get_or_create_store(
|
|
522
|
-
self.parquet_directory,
|
|
523
|
-
{"V3IO_ACCESS_KEY": self.model_monitoring_access_key},
|
|
524
|
-
)
|
|
525
|
-
fs = store.filesystem
|
|
526
|
-
|
|
527
|
-
# calculate time threshold (keep only files from the last 24 hours)
|
|
528
|
-
time_to_keep = (
|
|
529
|
-
datetime.datetime.now(tz=datetime.timezone.utc)
|
|
530
|
-
- datetime.timedelta(days=days)
|
|
531
|
-
).timestamp()
|
|
532
|
-
|
|
533
|
-
for endpoint in endpoints:
|
|
534
|
-
try:
|
|
535
|
-
apps_parquet_directories = fs.listdir(
|
|
536
|
-
path=f"{self.parquet_directory}"
|
|
537
|
-
f"/key={endpoint[mm_constants.EventFieldType.UID]}"
|
|
538
|
-
)
|
|
539
|
-
for directory in apps_parquet_directories:
|
|
540
|
-
if directory["mtime"] < time_to_keep:
|
|
541
|
-
# Delete files
|
|
542
|
-
fs.rm(path=directory["name"], recursive=True)
|
|
543
|
-
# Delete directory
|
|
544
|
-
fs.rmdir(path=directory["name"])
|
|
545
|
-
except FileNotFoundError:
|
|
546
|
-
logger.info(
|
|
547
|
-
"Application parquet directory is empty, "
|
|
548
|
-
"probably parquets have not yet been created for this app",
|
|
549
|
-
endpoint=endpoint[mm_constants.EventFieldType.UID],
|
|
550
|
-
path=f"{self.parquet_directory}"
|
|
551
|
-
f"/key={endpoint[mm_constants.EventFieldType.UID]}",
|
|
552
|
-
)
|
|
553
|
-
|
|
554
451
|
@staticmethod
|
|
555
452
|
def _push_to_applications(
|
|
556
|
-
|
|
557
|
-
|
|
558
|
-
|
|
559
|
-
|
|
560
|
-
|
|
561
|
-
|
|
562
|
-
project,
|
|
563
|
-
applications_names,
|
|
564
|
-
model_monitoring_access_key,
|
|
565
|
-
parquet_target_path,
|
|
453
|
+
start_infer_time: datetime.datetime,
|
|
454
|
+
end_infer_time: datetime.datetime,
|
|
455
|
+
endpoint_id: str,
|
|
456
|
+
project: str,
|
|
457
|
+
applications_names: list[str],
|
|
458
|
+
model_monitoring_access_key: str,
|
|
566
459
|
):
|
|
567
460
|
"""
|
|
568
461
|
Pushes data to multiple stream applications.
|
|
569
462
|
|
|
570
|
-
:param
|
|
571
|
-
:param
|
|
572
|
-
:param
|
|
573
|
-
:param
|
|
574
|
-
:param
|
|
575
|
-
:param
|
|
576
|
-
:param project: mlrun Project name.
|
|
577
|
-
:param applications_names: List of application names to which data will be pushed.
|
|
463
|
+
:param start_infer_time: The beginning of the infer interval window.
|
|
464
|
+
:param end_infer_time: The end of the infer interval window.
|
|
465
|
+
:param endpoint_id: Identifier for the model endpoint.
|
|
466
|
+
:param project: mlrun Project name.
|
|
467
|
+
:param applications_names: List of application names to which data will be pushed.
|
|
468
|
+
:param model_monitoring_access_key: Access key to apply the model monitoring process.
|
|
578
469
|
|
|
579
470
|
"""
|
|
580
|
-
|
|
581
471
|
data = {
|
|
582
|
-
mm_constants.ApplicationEvent.CURRENT_STATS: json.dumps(current_stats),
|
|
583
|
-
mm_constants.ApplicationEvent.FEATURE_STATS: json.dumps(feature_stats),
|
|
584
|
-
mm_constants.ApplicationEvent.SAMPLE_PARQUET_PATH: parquet_target_path,
|
|
585
472
|
mm_constants.ApplicationEvent.START_INFER_TIME: start_infer_time.isoformat(
|
|
586
473
|
sep=" ", timespec="microseconds"
|
|
587
474
|
),
|
|
588
475
|
mm_constants.ApplicationEvent.END_INFER_TIME: end_infer_time.isoformat(
|
|
589
476
|
sep=" ", timespec="microseconds"
|
|
590
477
|
),
|
|
591
|
-
mm_constants.ApplicationEvent.LAST_REQUEST: latest_request.isoformat(
|
|
592
|
-
sep=" ", timespec="microseconds"
|
|
593
|
-
),
|
|
594
478
|
mm_constants.ApplicationEvent.ENDPOINT_ID: endpoint_id,
|
|
595
479
|
mm_constants.ApplicationEvent.OUTPUT_STREAM_URI: get_stream_path(
|
|
596
480
|
project=project,
|
|
@@ -608,53 +492,6 @@ class MonitoringApplicationController:
|
|
|
608
492
|
[data]
|
|
609
493
|
)
|
|
610
494
|
|
|
611
|
-
@staticmethod
|
|
612
|
-
def _get_sample_df(
|
|
613
|
-
feature_set: mlrun.common.schemas.FeatureSet,
|
|
614
|
-
endpoint_id: str,
|
|
615
|
-
start_infer_time: datetime.datetime,
|
|
616
|
-
end_infer_time: datetime.datetime,
|
|
617
|
-
parquet_directory: str,
|
|
618
|
-
storage_options: dict,
|
|
619
|
-
application_name: str,
|
|
620
|
-
) -> mlrun.feature_store.OfflineVectorResponse:
|
|
621
|
-
"""
|
|
622
|
-
Retrieves a sample DataFrame of the current input according to the provided infer interval window.
|
|
623
|
-
|
|
624
|
-
:param feature_set: The main feature set.
|
|
625
|
-
:param endpoint_id: Identifier for the model endpoint.
|
|
626
|
-
:param start_infer_time: The beginning of the infer interval window.
|
|
627
|
-
:param end_infer_time: The end of the infer interval window.
|
|
628
|
-
:param parquet_directory: Directory where Parquet files are stored.
|
|
629
|
-
:param storage_options: Storage options for accessing the data.
|
|
630
|
-
:param application_name: Current application name.
|
|
631
|
-
|
|
632
|
-
:return: OfflineVectorResponse that can be used for generating a sample DataFrame for the specified endpoint.
|
|
633
|
-
|
|
634
|
-
"""
|
|
635
|
-
features = [f"{feature_set.metadata.name}.*"]
|
|
636
|
-
vector = fstore.FeatureVector(
|
|
637
|
-
name=f"{endpoint_id}_vector",
|
|
638
|
-
features=features,
|
|
639
|
-
with_indexes=True,
|
|
640
|
-
)
|
|
641
|
-
vector.metadata.tag = application_name
|
|
642
|
-
vector.feature_set_objects = {feature_set.metadata.name: feature_set}
|
|
643
|
-
|
|
644
|
-
# get offline features based on application start and end time.
|
|
645
|
-
# store the result parquet by partitioning by controller end processing time
|
|
646
|
-
offline_response = vector.get_offline_features(
|
|
647
|
-
start_time=start_infer_time,
|
|
648
|
-
end_time=end_infer_time,
|
|
649
|
-
timestamp_for_filtering=mm_constants.EventFieldType.TIMESTAMP,
|
|
650
|
-
target=ParquetTarget(
|
|
651
|
-
path=parquet_directory
|
|
652
|
-
+ f"/key={endpoint_id}/{int(start_infer_time.timestamp())}/{application_name}.parquet",
|
|
653
|
-
storage_options=storage_options,
|
|
654
|
-
),
|
|
655
|
-
)
|
|
656
|
-
return offline_response
|
|
657
|
-
|
|
658
495
|
|
|
659
496
|
def handler(context: nuclio.Context, event: nuclio.Event) -> None:
|
|
660
497
|
"""
|
mlrun/projects/project.py
CHANGED
|
@@ -708,7 +708,7 @@ def _load_project_from_db(url, secrets, user_project=False):
|
|
|
708
708
|
|
|
709
709
|
def _delete_project_from_db(project_name, secrets, deletion_strategy):
|
|
710
710
|
db = mlrun.db.get_run_db(secrets=secrets)
|
|
711
|
-
|
|
711
|
+
db.delete_project(project_name, deletion_strategy=deletion_strategy)
|
|
712
712
|
|
|
713
713
|
|
|
714
714
|
def _load_project_file(url, name="", secrets=None, allow_cross_project=None):
|
|
@@ -1950,7 +1950,6 @@ class MlrunProject(ModelObj):
|
|
|
1950
1950
|
application_class: typing.Union[
|
|
1951
1951
|
str,
|
|
1952
1952
|
mm_app.ModelMonitoringApplicationBase,
|
|
1953
|
-
mm_app.ModelMonitoringApplicationBaseV2,
|
|
1954
1953
|
] = None,
|
|
1955
1954
|
name: str = None,
|
|
1956
1955
|
image: str = None,
|
|
@@ -2018,7 +2017,6 @@ class MlrunProject(ModelObj):
|
|
|
2018
2017
|
application_class: typing.Union[
|
|
2019
2018
|
str,
|
|
2020
2019
|
mm_app.ModelMonitoringApplicationBase,
|
|
2021
|
-
mm_app.ModelMonitoringApplicationBaseV2,
|
|
2022
2020
|
] = None,
|
|
2023
2021
|
name: str = None,
|
|
2024
2022
|
image: str = None,
|
|
@@ -2076,7 +2074,6 @@ class MlrunProject(ModelObj):
|
|
|
2076
2074
|
application_class: typing.Union[
|
|
2077
2075
|
str,
|
|
2078
2076
|
mm_app.ModelMonitoringApplicationBase,
|
|
2079
|
-
mm_app.ModelMonitoringApplicationBaseV2,
|
|
2080
2077
|
None,
|
|
2081
2078
|
] = None,
|
|
2082
2079
|
name: typing.Optional[str] = None,
|
|
@@ -22,6 +22,7 @@ from nuclio.auth import AuthKinds as NuclioAuthKinds
|
|
|
22
22
|
|
|
23
23
|
import mlrun
|
|
24
24
|
import mlrun.common.constants as mlrun_constants
|
|
25
|
+
import mlrun.common.helpers
|
|
25
26
|
import mlrun.common.schemas as schemas
|
|
26
27
|
import mlrun.common.types
|
|
27
28
|
from mlrun.model import ModelObj
|
|
@@ -202,8 +203,13 @@ class APIGatewaySpec(ModelObj):
|
|
|
202
203
|
self.project = project
|
|
203
204
|
self.ports = ports
|
|
204
205
|
|
|
206
|
+
self.enrich()
|
|
205
207
|
self.validate(project=project, functions=functions, canary=canary, ports=ports)
|
|
206
208
|
|
|
209
|
+
def enrich(self):
|
|
210
|
+
if self.path and not self.path.startswith("/"):
|
|
211
|
+
self.path = f"/{self.path}"
|
|
212
|
+
|
|
207
213
|
def validate(
|
|
208
214
|
self,
|
|
209
215
|
project: str,
|
mlrun/utils/version/version.json
CHANGED
|
@@ -69,7 +69,7 @@ mlrun/common/schemas/secret.py,sha256=51tCN1F8DFTq4y_XdHIMDy3I1TnMEBX8kO8BHKavYF
|
|
|
69
69
|
mlrun/common/schemas/tag.py,sha256=OAn9Qt6z8ibqw8uU8WQSvuwY8irUv45Dhx2Ko5FzUss,884
|
|
70
70
|
mlrun/common/schemas/workflow.py,sha256=WxmlwtwrzwL4lfHYjQTOp03uv6PWYMpZ4cNBMOA6N6E,1897
|
|
71
71
|
mlrun/common/schemas/model_monitoring/__init__.py,sha256=q2icasMdgI7OG-p5eVwCu6sBuPrBMpRxByC6rxYk0DM,1813
|
|
72
|
-
mlrun/common/schemas/model_monitoring/constants.py,sha256=
|
|
72
|
+
mlrun/common/schemas/model_monitoring/constants.py,sha256=Wha21Iev3Nr9ugB1Ms_wrmcY42YzWTQqLKPYZD2dRHA,9896
|
|
73
73
|
mlrun/common/schemas/model_monitoring/grafana.py,sha256=SG13MFUUz_tk6-mWeSx17qcdEW4ekicxqNtnMSwRTCY,1559
|
|
74
74
|
mlrun/common/schemas/model_monitoring/model_endpoints.py,sha256=5vvjNX1bV98VSGdT4jwHr5ArKC9v_c1iHlaTf82fSUY,13198
|
|
75
75
|
mlrun/data_types/__init__.py,sha256=EkxfkFoHb91zz3Aymq-KZfCHlPMzEc3bBqgzPUwmHWY,1087
|
|
@@ -105,7 +105,7 @@ mlrun/db/__init__.py,sha256=WqJ4x8lqJ7ZoKbhEyFqkYADd9P6E3citckx9e9ZLcIU,1163
|
|
|
105
105
|
mlrun/db/auth_utils.py,sha256=hpg8D2r82oN0BWabuWN04BTNZ7jYMAF242YSUpK7LFM,5211
|
|
106
106
|
mlrun/db/base.py,sha256=lUfJrCWbuRUErIrUUXAKI2sSlrwfB-dHDz-Ck_cnZHU,24297
|
|
107
107
|
mlrun/db/factory.py,sha256=ibIrE5QkIIyzDU1FXKrfbc31cZiRLYKDZb8dqCpQwyU,2397
|
|
108
|
-
mlrun/db/httpdb.py,sha256=
|
|
108
|
+
mlrun/db/httpdb.py,sha256=SaJT3OkxBqBJvwvGVMnYqd7yKf1vbfHV5If0bYPiX-Y,184934
|
|
109
109
|
mlrun/db/nopdb.py,sha256=1oCZR2EmQQDkwXUgmyI3SB76zvOwA6Ml3Lk_xvuwHfc,21620
|
|
110
110
|
mlrun/feature_store/__init__.py,sha256=FhHRc8NdqL_HWpCs7A8dKruxJS5wEm55Gs3dcgBiRUg,1522
|
|
111
111
|
mlrun/feature_store/api.py,sha256=SWBbFD4KU2U4TUaAbD2hRLSquFWxX46mZGCToI0GfFQ,49994
|
|
@@ -212,22 +212,20 @@ mlrun/launcher/factory.py,sha256=RW7mfzEFi8fR0M-4W1JQg1iq3_muUU6OTqT_3l4Ubrk,233
|
|
|
212
212
|
mlrun/launcher/local.py,sha256=pP9-ZrNL8OnNDEiXTAKAZQnmLpS_mCc2v-mJw329eks,11269
|
|
213
213
|
mlrun/launcher/remote.py,sha256=tGICSfWtvUHeR31mbzy6gqHejmDxjPUgjtxXTWhRubg,7699
|
|
214
214
|
mlrun/model_monitoring/__init__.py,sha256=dm5_j0_pwqrdzFwTaEtGnKfv2nVpNaM56nBI-oqLbNU,879
|
|
215
|
-
mlrun/model_monitoring/api.py,sha256=
|
|
216
|
-
mlrun/model_monitoring/
|
|
217
|
-
mlrun/model_monitoring/controller.py,sha256=ZKp3mWMhj6irCuREs-OH1MYYh5DzqNEDe04kVPVrZzw,27971
|
|
218
|
-
mlrun/model_monitoring/evidently_application.py,sha256=iOc42IVjj8m6PDBmVcKIMWm46Bu0EdO9SDcH40Eqhyo,769
|
|
215
|
+
mlrun/model_monitoring/api.py,sha256=2EHCzB_5sCDgalYPkrFbI01cSO7LVWBv9yWoooJ-a0g,28106
|
|
216
|
+
mlrun/model_monitoring/controller.py,sha256=dvqEyoE-iCd2jqDeoUpcrQFUeoTME58i3Wa2MhYi57k,20444
|
|
219
217
|
mlrun/model_monitoring/features_drift_table.py,sha256=c6GpKtpOJbuT1u5uMWDL_S-6N4YPOmlktWMqPme3KFY,25308
|
|
220
218
|
mlrun/model_monitoring/helpers.py,sha256=KsbSH0kEjCPajvLUpv3q5GWyvx0bZj-JkghGJlzbLZI,12757
|
|
221
219
|
mlrun/model_monitoring/model_endpoint.py,sha256=7VX0cBATqLsA4sSinDzouf41ndxqh2mf5bO9BW0G5Z4,4017
|
|
222
220
|
mlrun/model_monitoring/stream_processing.py,sha256=0eu1Gq1Obq87LFno6eIZ55poXoFaeloqYTLiQgyfd0k,38687
|
|
223
221
|
mlrun/model_monitoring/tracking_policy.py,sha256=sQq956akAQpntkrJwIgFWcEq-JpyVcg0FxgNa4h3V70,5502
|
|
224
222
|
mlrun/model_monitoring/writer.py,sha256=TrBwngRmdwr67De71UCcCFsJOfcqQe8jDp0vkBvGf0o,10177
|
|
225
|
-
mlrun/model_monitoring/applications/__init__.py,sha256=
|
|
223
|
+
mlrun/model_monitoring/applications/__init__.py,sha256=QYvzgCutFdAkzqKPD3mvkX_3c1X4tzd-kW8ojUOE9ic,889
|
|
226
224
|
mlrun/model_monitoring/applications/_application_steps.py,sha256=fvZbtat7eXe5mo927_jyhq4BqWCapKZn7OVjptepIAI,7055
|
|
227
|
-
mlrun/model_monitoring/applications/base.py,sha256=
|
|
228
|
-
mlrun/model_monitoring/applications/context.py,sha256=
|
|
229
|
-
mlrun/model_monitoring/applications/evidently_base.py,sha256=
|
|
230
|
-
mlrun/model_monitoring/applications/histogram_data_drift.py,sha256=
|
|
225
|
+
mlrun/model_monitoring/applications/base.py,sha256=uzc14lFlwTJnL0p2VBCzmp-CNoHd73cK_Iz0YHC1KAs,4380
|
|
226
|
+
mlrun/model_monitoring/applications/context.py,sha256=vOZ_ZgUuy5UsNe22-puJSt7TB32HiZtqBdN1hegykuQ,12436
|
|
227
|
+
mlrun/model_monitoring/applications/evidently_base.py,sha256=FSzmoDZP8EiSQ3tq5RmU7kJ6edh8bWaKQh0rBORjODY,5099
|
|
228
|
+
mlrun/model_monitoring/applications/histogram_data_drift.py,sha256=wRCttgK1H4eRDiAJJ7Aid2hPuQPzUoBY3hSHlVkdE5w,13337
|
|
231
229
|
mlrun/model_monitoring/applications/results.py,sha256=B0YuLig4rgBzBs3OAh01yLavhtNgj8Oz1RD8UfEkENU,3590
|
|
232
230
|
mlrun/model_monitoring/db/__init__.py,sha256=6Ic-X3Fh9XLPYMytmevGNSs-Hii1rAjLLoFTSPwTguw,736
|
|
233
231
|
mlrun/model_monitoring/db/stores/__init__.py,sha256=m6Z6rPQyaufq5oXF3HVUYGDN34biAX1JE1F6OxLN9B8,4752
|
|
@@ -275,7 +273,7 @@ mlrun/platforms/iguazio.py,sha256=1h5BpdAEQJBg2vIt7ySjUADU0ip5OkaMYr0_VREi9ys,13
|
|
|
275
273
|
mlrun/projects/__init__.py,sha256=Lv5rfxyXJrw6WGOWJKhBz66M6t3_zsNMCfUD6waPwx4,1153
|
|
276
274
|
mlrun/projects/operations.py,sha256=UEpiW4bDscth4pwWcLWF1xz-IU7bnZfckPR7sXp3O-g,19441
|
|
277
275
|
mlrun/projects/pipelines.py,sha256=bumAbKDYPLbMkWW1CyHvUpEclKzX63dImCuG7qf3s1s,40496
|
|
278
|
-
mlrun/projects/project.py,sha256=
|
|
276
|
+
mlrun/projects/project.py,sha256=FjgkBBBP6geuxOGGp1Es5EFqsrs3M6PNWejBdoM08ng,190769
|
|
279
277
|
mlrun/runtimes/__init__.py,sha256=egLM94cDMUyQ1GVABdFGXUQcDhU70lP3k7qSnM_UnHY,9008
|
|
280
278
|
mlrun/runtimes/base.py,sha256=JXWmTIcm3b0klGUOHDlyFNa3bUgsNzQIgWhUQpSZoE0,37692
|
|
281
279
|
mlrun/runtimes/daskjob.py,sha256=Ka_xqim8LkCYjp-M_WgteJy6ZN_3qfmLLHvXs7N6pa4,19411
|
|
@@ -295,7 +293,7 @@ mlrun/runtimes/mpijob/__init__.py,sha256=V_1gQD1VHa0Qvjqgyv8RLouH27Sy9YTwj2ZG62o
|
|
|
295
293
|
mlrun/runtimes/mpijob/abstract.py,sha256=kDWo-IY1FKLZhI30j38Xx9HMhlUvHezfd1DT2ShoxZY,9161
|
|
296
294
|
mlrun/runtimes/mpijob/v1.py,sha256=1XQZC7AIMGX_AQCbApcwpH8I7y39-v0v2O35MvxjXoo,3213
|
|
297
295
|
mlrun/runtimes/nuclio/__init__.py,sha256=gx1kizzKv8pGT5TNloN1js1hdbxqDw3rM90sLVYVffY,794
|
|
298
|
-
mlrun/runtimes/nuclio/api_gateway.py,sha256=
|
|
296
|
+
mlrun/runtimes/nuclio/api_gateway.py,sha256=oQRSOvqtODKCzT2LqlqSXZbq2vcZ7epsFZwO9jvarhc,26899
|
|
299
297
|
mlrun/runtimes/nuclio/function.py,sha256=TQt6RyxK_iyzNJr2r57BRtVXuy2GMrhdeFOlFjb2AZg,52106
|
|
300
298
|
mlrun/runtimes/nuclio/nuclio.py,sha256=sLK8KdGO1LbftlL3HqPZlFOFTAAuxJACZCVl1c0Ha6E,2942
|
|
301
299
|
mlrun/runtimes/nuclio/serving.py,sha256=Tsv-MssXJPe4di9stVOAyCj2MTMI7zQxvtFbAgdAtu0,29717
|
|
@@ -343,11 +341,11 @@ mlrun/utils/notifications/notification/ipython.py,sha256=ZtVL30B_Ha0VGoo4LxO-voT
|
|
|
343
341
|
mlrun/utils/notifications/notification/slack.py,sha256=wqpFGr5BTvFO5KuUSzFfxsgmyU1Ohq7fbrGeNe9TXOk,7006
|
|
344
342
|
mlrun/utils/notifications/notification/webhook.py,sha256=cb9w1Mc8ENfJBdgan7iiVHK9eVls4-R3tUxmXM-P-8I,4746
|
|
345
343
|
mlrun/utils/version/__init__.py,sha256=7kkrB7hEZ3cLXoWj1kPoDwo4MaswsI2JVOBpbKgPAgc,614
|
|
346
|
-
mlrun/utils/version/version.json,sha256=
|
|
344
|
+
mlrun/utils/version/version.json,sha256=pELIT4QjjQIKJSjfMWiqmLKG4NM9pCDJcL5y_gLO75s,89
|
|
347
345
|
mlrun/utils/version/version.py,sha256=eEW0tqIAkU9Xifxv8Z9_qsYnNhn3YH7NRAfM-pPLt1g,1878
|
|
348
|
-
mlrun-1.7.
|
|
349
|
-
mlrun-1.7.
|
|
350
|
-
mlrun-1.7.
|
|
351
|
-
mlrun-1.7.
|
|
352
|
-
mlrun-1.7.
|
|
353
|
-
mlrun-1.7.
|
|
346
|
+
mlrun-1.7.0rc49.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
|
|
347
|
+
mlrun-1.7.0rc49.dist-info/METADATA,sha256=9pS_HGVwCSrwcrgRVGO0WxVj-YFB2HGek-34ps4vUDA,19943
|
|
348
|
+
mlrun-1.7.0rc49.dist-info/WHEEL,sha256=GV9aMThwP_4oNCtvEC2ec3qUYutgWeAzklro_0m4WJQ,91
|
|
349
|
+
mlrun-1.7.0rc49.dist-info/entry_points.txt,sha256=1Owd16eAclD5pfRCoJpYC2ZJSyGNTtUr0nCELMioMmU,46
|
|
350
|
+
mlrun-1.7.0rc49.dist-info/top_level.txt,sha256=NObLzw3maSF9wVrgSeYBv-fgnHkAJ1kEkh12DLdd5KM,6
|
|
351
|
+
mlrun-1.7.0rc49.dist-info/RECORD,,
|
|
@@ -1,19 +0,0 @@
|
|
|
1
|
-
# Copyright 2023 Iguazio
|
|
2
|
-
#
|
|
3
|
-
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
-
# you may not use this file except in compliance with the License.
|
|
5
|
-
# You may obtain a copy of the License at
|
|
6
|
-
#
|
|
7
|
-
# http://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
-
#
|
|
9
|
-
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
-
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
-
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
-
# See the License for the specific language governing permissions and
|
|
13
|
-
# limitations under the License.
|
|
14
|
-
|
|
15
|
-
# TODO : delete this file in 1.9.0
|
|
16
|
-
from mlrun.model_monitoring.applications import ( # noqa: F401
|
|
17
|
-
ModelMonitoringApplicationBase,
|
|
18
|
-
ModelMonitoringApplicationResult,
|
|
19
|
-
)
|
|
@@ -1,20 +0,0 @@
|
|
|
1
|
-
# Copyright 2023 Iguazio
|
|
2
|
-
#
|
|
3
|
-
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
-
# you may not use this file except in compliance with the License.
|
|
5
|
-
# You may obtain a copy of the License at
|
|
6
|
-
#
|
|
7
|
-
# http://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
-
#
|
|
9
|
-
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
-
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
-
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
-
# See the License for the specific language governing permissions and
|
|
13
|
-
# limitations under the License.
|
|
14
|
-
|
|
15
|
-
# TODO : delete this file in 1.9.0
|
|
16
|
-
from mlrun.model_monitoring.applications import ( # noqa: F401
|
|
17
|
-
_HAS_EVIDENTLY,
|
|
18
|
-
SUPPORTED_EVIDENTLY_VERSION,
|
|
19
|
-
EvidentlyModelMonitoringApplicationBase,
|
|
20
|
-
)
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|