mlrun 1.8.0rc46__py3-none-any.whl → 1.8.0rc48__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mlrun might be problematic. Click here for more details.
- mlrun/alerts/alert.py +1 -1
- mlrun/config.py +2 -0
- mlrun/datastore/storeytargets.py +9 -5
- mlrun/datastore/targets.py +1 -1
- mlrun/model_monitoring/api.py +26 -16
- mlrun/model_monitoring/applications/evidently/base.py +38 -0
- mlrun/model_monitoring/controller.py +113 -40
- mlrun/model_monitoring/db/tsdb/tdengine/tdengine_connector.py +6 -1
- mlrun/model_monitoring/db/tsdb/v3io/v3io_connector.py +11 -5
- mlrun/model_monitoring/helpers.py +16 -51
- mlrun/model_monitoring/stream_processing.py +3 -0
- mlrun/projects/project.py +28 -23
- mlrun/runtimes/nuclio/function.py +48 -0
- mlrun/serving/states.py +48 -27
- mlrun/utils/helpers.py +5 -2
- mlrun/utils/version/version.json +2 -2
- {mlrun-1.8.0rc46.dist-info → mlrun-1.8.0rc48.dist-info}/METADATA +6 -6
- {mlrun-1.8.0rc46.dist-info → mlrun-1.8.0rc48.dist-info}/RECORD +22 -22
- {mlrun-1.8.0rc46.dist-info → mlrun-1.8.0rc48.dist-info}/WHEEL +1 -1
- {mlrun-1.8.0rc46.dist-info → mlrun-1.8.0rc48.dist-info}/entry_points.txt +0 -0
- {mlrun-1.8.0rc46.dist-info → mlrun-1.8.0rc48.dist-info}/licenses/LICENSE +0 -0
- {mlrun-1.8.0rc46.dist-info → mlrun-1.8.0rc48.dist-info}/top_level.txt +0 -0
mlrun/alerts/alert.py
CHANGED
|
@@ -112,7 +112,7 @@ class AlertConfig(ModelObj):
|
|
|
112
112
|
complex trigger which is based on a prometheus alert
|
|
113
113
|
:param criteria: When the alert will be triggered based on the specified number of events within the
|
|
114
114
|
defined time period.
|
|
115
|
-
:param reset_policy: When to clear the alert.
|
|
115
|
+
:param reset_policy: When to clear the alert. Either "manual" for manual reset of the alert, or
|
|
116
116
|
"auto" if the criteria contains a time period
|
|
117
117
|
:param notifications: List of notifications to invoke once the alert is triggered
|
|
118
118
|
:param entities: Entities that the event relates to. The entity object will contain fields that
|
mlrun/config.py
CHANGED
|
@@ -631,6 +631,8 @@ default_config = {
|
|
|
631
631
|
"parquet_batching_max_events": 10_000,
|
|
632
632
|
"parquet_batching_timeout_secs": timedelta(minutes=1).total_seconds(),
|
|
633
633
|
"tdengine": {
|
|
634
|
+
"run_directly": True,
|
|
635
|
+
# timeout and retry are ignored when run_directly is set to True
|
|
634
636
|
"timeout": 10,
|
|
635
637
|
"retries": 1,
|
|
636
638
|
},
|
mlrun/datastore/storeytargets.py
CHANGED
|
@@ -109,17 +109,20 @@ class StreamStoreyTarget(storey.StreamTarget):
|
|
|
109
109
|
raise mlrun.errors.MLRunInvalidArgumentError("StreamTarget requires a path")
|
|
110
110
|
|
|
111
111
|
_, storage_options = get_url_and_storage_options(uri)
|
|
112
|
-
|
|
112
|
+
_, path = parse_path(uri)
|
|
113
113
|
|
|
114
114
|
access_key = storage_options.get("v3io_access_key")
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
115
|
+
|
|
116
|
+
if alt_key_name := kwargs.pop("alternative_v3io_access_key", None):
|
|
117
|
+
if alt_key := mlrun.get_secret_or_env(alt_key_name):
|
|
118
|
+
access_key = alt_key
|
|
119
|
+
|
|
120
|
+
storage = V3ioDriver(access_key=access_key)
|
|
118
121
|
|
|
119
122
|
if storage_options:
|
|
120
123
|
kwargs["storage"] = storage
|
|
121
124
|
if args:
|
|
122
|
-
args[0] =
|
|
125
|
+
args[0] = path
|
|
123
126
|
if "stream_path" in kwargs:
|
|
124
127
|
kwargs["stream_path"] = path
|
|
125
128
|
|
|
@@ -128,6 +131,7 @@ class StreamStoreyTarget(storey.StreamTarget):
|
|
|
128
131
|
|
|
129
132
|
class KafkaStoreyTarget(storey.KafkaTarget):
|
|
130
133
|
def __init__(self, *args, **kwargs):
|
|
134
|
+
kwargs.pop("alternative_v3io_access_key", None)
|
|
131
135
|
path = kwargs.pop("path")
|
|
132
136
|
attributes = kwargs.pop("attributes", {})
|
|
133
137
|
if path and path.startswith("ds://"):
|
mlrun/datastore/targets.py
CHANGED
|
@@ -87,7 +87,7 @@ def generate_target_run_id():
|
|
|
87
87
|
|
|
88
88
|
def write_spark_dataframe_with_options(spark_options, df, mode, write_format=None):
|
|
89
89
|
# TODO: Replace with just df.sparkSession when Spark 3.2 support is dropped
|
|
90
|
-
spark_session = getattr(df, "sparkSession") or df.sql_ctx.sparkSession
|
|
90
|
+
spark_session = getattr(df, "sparkSession", None) or df.sql_ctx.sparkSession
|
|
91
91
|
non_hadoop_spark_options = spark_session_update_hadoop_options(
|
|
92
92
|
spark_session, spark_options
|
|
93
93
|
)
|
mlrun/model_monitoring/api.py
CHANGED
|
@@ -160,7 +160,8 @@ def record_results(
|
|
|
160
160
|
:param context: MLRun context. Note that the context is required generating the model endpoint.
|
|
161
161
|
:param infer_results_df: DataFrame that will be stored under the model endpoint parquet target. Will be
|
|
162
162
|
used for doing the drift analysis. Please make sure that the dataframe includes
|
|
163
|
-
both feature names and label columns.
|
|
163
|
+
both feature names and label columns. If you are recording results for existing
|
|
164
|
+
model endpoint, the endpoint should be a batch endpoint.
|
|
164
165
|
:param sample_set_statistics: Dictionary of sample set statistics that will be used as a reference data for
|
|
165
166
|
the current model endpoint.
|
|
166
167
|
:param monitoring_mode: If enabled, apply model monitoring features on the provided endpoint id. Enabled
|
|
@@ -221,23 +222,32 @@ def record_results(
|
|
|
221
222
|
)
|
|
222
223
|
logger.debug("Model endpoint", endpoint=model_endpoint)
|
|
223
224
|
|
|
224
|
-
timestamp = datetime_now()
|
|
225
225
|
if infer_results_df is not None:
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
226
|
+
if (
|
|
227
|
+
model_endpoint.metadata.endpoint_type
|
|
228
|
+
!= mlrun.common.schemas.model_monitoring.EndpointType.BATCH_EP
|
|
229
|
+
):
|
|
230
|
+
logger.warning(
|
|
231
|
+
"Inference results can be recorded only for batch endpoints. "
|
|
232
|
+
"Therefore the current results won't be monitored."
|
|
233
|
+
)
|
|
234
|
+
else:
|
|
235
|
+
timestamp = datetime_now()
|
|
236
|
+
# Write the monitoring parquet to the relevant model endpoint context
|
|
237
|
+
write_monitoring_df(
|
|
238
|
+
feature_set_uri=model_endpoint.spec.monitoring_feature_set_uri,
|
|
239
|
+
infer_datetime=timestamp,
|
|
240
|
+
endpoint_id=model_endpoint.metadata.uid,
|
|
241
|
+
infer_results_df=infer_results_df,
|
|
242
|
+
)
|
|
233
243
|
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
244
|
+
# Update the last request time
|
|
245
|
+
update_model_endpoint_last_request(
|
|
246
|
+
project=project,
|
|
247
|
+
model_endpoint=model_endpoint,
|
|
248
|
+
current_request=timestamp,
|
|
249
|
+
db=db,
|
|
250
|
+
)
|
|
241
251
|
|
|
242
252
|
return model_endpoint
|
|
243
253
|
|
|
@@ -12,12 +12,15 @@
|
|
|
12
12
|
# See the License for the specific language governing permissions and
|
|
13
13
|
# limitations under the License.
|
|
14
14
|
|
|
15
|
+
import json
|
|
16
|
+
import posixpath
|
|
15
17
|
import uuid
|
|
16
18
|
import warnings
|
|
17
19
|
from abc import ABC
|
|
18
20
|
|
|
19
21
|
import pandas as pd
|
|
20
22
|
import semver
|
|
23
|
+
from evidently.ui.storage.local.base import METADATA_PATH, FSLocation
|
|
21
24
|
|
|
22
25
|
import mlrun.model_monitoring.applications.base as mm_base
|
|
23
26
|
import mlrun.model_monitoring.applications.context as mm_context
|
|
@@ -81,12 +84,47 @@ class EvidentlyModelMonitoringApplicationBase(
|
|
|
81
84
|
# TODO : more then one project (mep -> project)
|
|
82
85
|
if not _HAS_EVIDENTLY:
|
|
83
86
|
raise ModuleNotFoundError("Evidently is not installed - the app cannot run")
|
|
87
|
+
self._log_location(evidently_workspace_path)
|
|
84
88
|
self.evidently_workspace = Workspace.create(evidently_workspace_path)
|
|
85
89
|
self.evidently_project_id = evidently_project_id
|
|
86
90
|
self.evidently_project = self.evidently_workspace.get_project(
|
|
87
91
|
evidently_project_id
|
|
88
92
|
)
|
|
89
93
|
|
|
94
|
+
@staticmethod
|
|
95
|
+
def _log_location(evidently_workspace_path):
|
|
96
|
+
# TODO remove function + usage after solving issue ML-9530
|
|
97
|
+
location = FSLocation(base_path=evidently_workspace_path)
|
|
98
|
+
location.invalidate_cache("")
|
|
99
|
+
paths = [p for p in location.listdir("") if location.isdir(p)]
|
|
100
|
+
|
|
101
|
+
for path in paths:
|
|
102
|
+
metadata_path = posixpath.join(path, METADATA_PATH)
|
|
103
|
+
full_path = posixpath.join(location.path, metadata_path)
|
|
104
|
+
print(f"evidently json issue, working on path: {full_path}")
|
|
105
|
+
try:
|
|
106
|
+
with location.open(metadata_path) as f:
|
|
107
|
+
content = json.load(f)
|
|
108
|
+
print(
|
|
109
|
+
f"evidently json issue, successful load path: {full_path}, content: {content}"
|
|
110
|
+
)
|
|
111
|
+
except FileNotFoundError:
|
|
112
|
+
print(f"evidently json issue, path not found: {full_path}")
|
|
113
|
+
continue
|
|
114
|
+
except json.decoder.JSONDecodeError as json_error:
|
|
115
|
+
print(
|
|
116
|
+
f"evidently json issue, path got json error, path:{full_path}, error: {json_error}"
|
|
117
|
+
)
|
|
118
|
+
print("evidently json issue, file content:")
|
|
119
|
+
with location.open(metadata_path) as f:
|
|
120
|
+
print(f.read())
|
|
121
|
+
continue
|
|
122
|
+
except Exception as error:
|
|
123
|
+
print(
|
|
124
|
+
f"evidently json issue, path got general error, path:{full_path}, error: {error}"
|
|
125
|
+
)
|
|
126
|
+
continue
|
|
127
|
+
|
|
90
128
|
@staticmethod
|
|
91
129
|
def log_evidently_object(
|
|
92
130
|
monitoring_context: mm_context.MonitoringApplicationContext,
|
|
@@ -12,15 +12,17 @@
|
|
|
12
12
|
# See the License for the specific language governing permissions and
|
|
13
13
|
# limitations under the License.
|
|
14
14
|
|
|
15
|
+
import collections
|
|
15
16
|
import concurrent.futures
|
|
16
17
|
import datetime
|
|
17
18
|
import json
|
|
18
19
|
import os
|
|
19
20
|
import traceback
|
|
21
|
+
from collections import OrderedDict
|
|
20
22
|
from collections.abc import Iterator
|
|
21
23
|
from contextlib import AbstractContextManager
|
|
22
24
|
from types import TracebackType
|
|
23
|
-
from typing import Any, NamedTuple, Optional, cast
|
|
25
|
+
from typing import Any, NamedTuple, Optional, Union, cast
|
|
24
26
|
|
|
25
27
|
import nuclio_sdk
|
|
26
28
|
|
|
@@ -30,6 +32,7 @@ import mlrun.feature_store as fstore
|
|
|
30
32
|
import mlrun.model_monitoring
|
|
31
33
|
import mlrun.model_monitoring.db._schedules as schedules
|
|
32
34
|
import mlrun.model_monitoring.helpers
|
|
35
|
+
import mlrun.platforms.iguazio
|
|
33
36
|
from mlrun.common.schemas import EndpointType
|
|
34
37
|
from mlrun.common.schemas.model_monitoring.constants import (
|
|
35
38
|
ControllerEvent,
|
|
@@ -243,7 +246,7 @@ class MonitoringApplicationController:
|
|
|
243
246
|
Note that the MonitoringApplicationController object requires access keys along with valid project configurations.
|
|
244
247
|
"""
|
|
245
248
|
|
|
246
|
-
|
|
249
|
+
_MAX_FEATURE_SET_PER_WORKER = 1000
|
|
247
250
|
|
|
248
251
|
def __init__(self) -> None:
|
|
249
252
|
"""Initialize Monitoring Application Controller"""
|
|
@@ -259,6 +262,63 @@ class MonitoringApplicationController:
|
|
|
259
262
|
mlrun.mlconf.artifact_path
|
|
260
263
|
)
|
|
261
264
|
self.storage_options = store.get_storage_options()
|
|
265
|
+
self._controller_stream: Optional[
|
|
266
|
+
Union[
|
|
267
|
+
mlrun.platforms.iguazio.OutputStream,
|
|
268
|
+
mlrun.platforms.iguazio.KafkaOutputStream,
|
|
269
|
+
]
|
|
270
|
+
] = None
|
|
271
|
+
self._model_monitoring_stream: Optional[
|
|
272
|
+
Union[
|
|
273
|
+
mlrun.platforms.iguazio.OutputStream,
|
|
274
|
+
mlrun.platforms.iguazio.KafkaOutputStream,
|
|
275
|
+
]
|
|
276
|
+
] = None
|
|
277
|
+
self.applications_streams: dict[
|
|
278
|
+
str,
|
|
279
|
+
Union[
|
|
280
|
+
mlrun.platforms.iguazio.OutputStream,
|
|
281
|
+
mlrun.platforms.iguazio.KafkaOutputStream,
|
|
282
|
+
],
|
|
283
|
+
] = {}
|
|
284
|
+
self.feature_sets: OrderedDict[str, mlrun.feature_store.FeatureSet] = (
|
|
285
|
+
collections.OrderedDict()
|
|
286
|
+
)
|
|
287
|
+
self.tsdb_connector = mlrun.model_monitoring.get_tsdb_connector(
|
|
288
|
+
project=self.project
|
|
289
|
+
)
|
|
290
|
+
|
|
291
|
+
@property
|
|
292
|
+
def controller_stream(
|
|
293
|
+
self,
|
|
294
|
+
) -> Union[
|
|
295
|
+
mlrun.platforms.iguazio.OutputStream,
|
|
296
|
+
mlrun.platforms.iguazio.KafkaOutputStream,
|
|
297
|
+
]:
|
|
298
|
+
if self._controller_stream is None:
|
|
299
|
+
self._controller_stream = mlrun.model_monitoring.helpers.get_output_stream(
|
|
300
|
+
project=self.project,
|
|
301
|
+
function_name=mm_constants.MonitoringFunctionNames.APPLICATION_CONTROLLER,
|
|
302
|
+
v3io_access_key=self.v3io_access_key,
|
|
303
|
+
)
|
|
304
|
+
return self._controller_stream
|
|
305
|
+
|
|
306
|
+
@property
|
|
307
|
+
def model_monitoring_stream(
|
|
308
|
+
self,
|
|
309
|
+
) -> Union[
|
|
310
|
+
mlrun.platforms.iguazio.OutputStream,
|
|
311
|
+
mlrun.platforms.iguazio.KafkaOutputStream,
|
|
312
|
+
]:
|
|
313
|
+
if self._model_monitoring_stream is None:
|
|
314
|
+
self._model_monitoring_stream = (
|
|
315
|
+
mlrun.model_monitoring.helpers.get_output_stream(
|
|
316
|
+
project=self.project,
|
|
317
|
+
function_name=mm_constants.MonitoringFunctionNames.STREAM,
|
|
318
|
+
v3io_access_key=self.model_monitoring_access_key,
|
|
319
|
+
)
|
|
320
|
+
)
|
|
321
|
+
return self._model_monitoring_stream
|
|
262
322
|
|
|
263
323
|
@staticmethod
|
|
264
324
|
def _get_model_monitoring_access_key() -> Optional[str]:
|
|
@@ -422,9 +482,9 @@ class MonitoringApplicationController:
|
|
|
422
482
|
]
|
|
423
483
|
|
|
424
484
|
not_batch_endpoint = (
|
|
425
|
-
event[ControllerEvent.
|
|
485
|
+
event[ControllerEvent.ENDPOINT_TYPE] != EndpointType.BATCH_EP
|
|
426
486
|
)
|
|
427
|
-
|
|
487
|
+
|
|
428
488
|
logger.info(
|
|
429
489
|
"Starting analyzing for", timestamp=event[ControllerEvent.TIMESTAMP]
|
|
430
490
|
)
|
|
@@ -449,13 +509,39 @@ class MonitoringApplicationController:
|
|
|
449
509
|
first_request=first_request,
|
|
450
510
|
last_request=last_stream_timestamp,
|
|
451
511
|
):
|
|
452
|
-
|
|
453
|
-
|
|
454
|
-
|
|
455
|
-
|
|
456
|
-
|
|
457
|
-
|
|
458
|
-
|
|
512
|
+
data_in_window = False
|
|
513
|
+
if not_batch_endpoint:
|
|
514
|
+
# Serving endpoint - get the relevant window data from the TSDB
|
|
515
|
+
prediction_metric = self.tsdb_connector.read_predictions(
|
|
516
|
+
start=start_infer_time,
|
|
517
|
+
end=end_infer_time,
|
|
518
|
+
endpoint_id=endpoint_id,
|
|
519
|
+
)
|
|
520
|
+
if prediction_metric.data:
|
|
521
|
+
data_in_window = True
|
|
522
|
+
else:
|
|
523
|
+
if endpoint_id not in self.feature_sets:
|
|
524
|
+
self.feature_sets[endpoint_id] = fstore.get_feature_set(
|
|
525
|
+
event[ControllerEvent.FEATURE_SET_URI]
|
|
526
|
+
)
|
|
527
|
+
self.feature_sets.move_to_end(endpoint_id, last=False)
|
|
528
|
+
if (
|
|
529
|
+
len(self.feature_sets)
|
|
530
|
+
> self._MAX_FEATURE_SET_PER_WORKER
|
|
531
|
+
):
|
|
532
|
+
self.feature_sets.popitem(last=True)
|
|
533
|
+
m_fs = self.feature_sets.get(endpoint_id)
|
|
534
|
+
|
|
535
|
+
# Batch endpoint - get the relevant window data from the parquet target
|
|
536
|
+
df = m_fs.to_dataframe(
|
|
537
|
+
start_time=start_infer_time,
|
|
538
|
+
end_time=end_infer_time,
|
|
539
|
+
time_column=mm_constants.EventFieldType.TIMESTAMP,
|
|
540
|
+
storage_options=self.storage_options,
|
|
541
|
+
)
|
|
542
|
+
if len(df) > 0:
|
|
543
|
+
data_in_window = True
|
|
544
|
+
if not data_in_window:
|
|
459
545
|
logger.info(
|
|
460
546
|
"No data found for the given interval",
|
|
461
547
|
start=start_infer_time,
|
|
@@ -528,8 +614,8 @@ class MonitoringApplicationController:
|
|
|
528
614
|
endpoint_id=event[ControllerEvent.ENDPOINT_ID],
|
|
529
615
|
)
|
|
530
616
|
|
|
531
|
-
@staticmethod
|
|
532
617
|
def _push_to_applications(
|
|
618
|
+
self,
|
|
533
619
|
start_infer_time: datetime.datetime,
|
|
534
620
|
end_infer_time: datetime.datetime,
|
|
535
621
|
endpoint_id: str,
|
|
@@ -563,12 +649,15 @@ class MonitoringApplicationController:
|
|
|
563
649
|
}
|
|
564
650
|
for app_name in applications_names:
|
|
565
651
|
data.update({mm_constants.ApplicationEvent.APPLICATION_NAME: app_name})
|
|
566
|
-
|
|
567
|
-
|
|
568
|
-
|
|
569
|
-
|
|
570
|
-
|
|
571
|
-
|
|
652
|
+
if app_name not in self.applications_streams:
|
|
653
|
+
self.applications_streams[app_name] = (
|
|
654
|
+
mlrun.model_monitoring.helpers.get_output_stream(
|
|
655
|
+
project=project,
|
|
656
|
+
function_name=app_name,
|
|
657
|
+
v3io_access_key=model_monitoring_access_key,
|
|
658
|
+
)
|
|
659
|
+
)
|
|
660
|
+
app_stream = self.applications_streams.get(app_name)
|
|
572
661
|
|
|
573
662
|
logger.info(
|
|
574
663
|
"Pushing data to application stream",
|
|
@@ -581,7 +670,6 @@ class MonitoringApplicationController:
|
|
|
581
670
|
def push_regular_event_to_controller_stream(self) -> None:
|
|
582
671
|
"""
|
|
583
672
|
pushes a regular event to the controller stream.
|
|
584
|
-
:param event: the nuclio trigger event
|
|
585
673
|
"""
|
|
586
674
|
logger.info("Starting monitoring controller chief")
|
|
587
675
|
applications_names = []
|
|
@@ -637,7 +725,6 @@ class MonitoringApplicationController:
|
|
|
637
725
|
endpoint,
|
|
638
726
|
policy,
|
|
639
727
|
set(applications_names),
|
|
640
|
-
self.v3io_access_key,
|
|
641
728
|
schedule_file,
|
|
642
729
|
): endpoint
|
|
643
730
|
for endpoint in endpoints
|
|
@@ -662,7 +749,6 @@ class MonitoringApplicationController:
|
|
|
662
749
|
endpoint: mlrun.common.schemas.ModelEndpoint,
|
|
663
750
|
policy: dict,
|
|
664
751
|
applications_names: set,
|
|
665
|
-
v3io_access_key: str,
|
|
666
752
|
schedule_file: schedules.ModelMonitoringSchedulesFileChief,
|
|
667
753
|
) -> None:
|
|
668
754
|
if self._should_monitor_endpoint(
|
|
@@ -688,12 +774,11 @@ class MonitoringApplicationController:
|
|
|
688
774
|
policy[ControllerEventEndpointPolicy.ENDPOINT_UPDATED] = (
|
|
689
775
|
endpoint.metadata.updated.isoformat()
|
|
690
776
|
)
|
|
691
|
-
|
|
777
|
+
self.push_to_controller_stream(
|
|
692
778
|
kind=mm_constants.ControllerEventKind.REGULAR_EVENT,
|
|
693
779
|
project=endpoint.metadata.project,
|
|
694
780
|
endpoint_id=endpoint.metadata.uid,
|
|
695
781
|
endpoint_name=endpoint.metadata.name,
|
|
696
|
-
stream_access_key=v3io_access_key,
|
|
697
782
|
timestamp=endpoint.status.last_request.isoformat(
|
|
698
783
|
sep=" ", timespec="microseconds"
|
|
699
784
|
),
|
|
@@ -705,13 +790,12 @@ class MonitoringApplicationController:
|
|
|
705
790
|
endpoint_policy=policy,
|
|
706
791
|
)
|
|
707
792
|
|
|
708
|
-
@staticmethod
|
|
709
793
|
def push_to_controller_stream(
|
|
794
|
+
self,
|
|
710
795
|
kind: str,
|
|
711
796
|
project: str,
|
|
712
797
|
endpoint_id: str,
|
|
713
798
|
endpoint_name: str,
|
|
714
|
-
stream_access_key: str,
|
|
715
799
|
timestamp: str,
|
|
716
800
|
first_request: str,
|
|
717
801
|
endpoint_type: int,
|
|
@@ -729,7 +813,6 @@ class MonitoringApplicationController:
|
|
|
729
813
|
:param endpoint_name: the endpoint name string
|
|
730
814
|
:param endpoint_type: Enum of the endpoint type
|
|
731
815
|
:param feature_set_uri: the feature set uri string
|
|
732
|
-
:param stream_access_key: access key to apply the model monitoring process.
|
|
733
816
|
"""
|
|
734
817
|
event = {
|
|
735
818
|
ControllerEvent.KIND.value: kind,
|
|
@@ -742,18 +825,13 @@ class MonitoringApplicationController:
|
|
|
742
825
|
ControllerEvent.FEATURE_SET_URI.value: feature_set_uri,
|
|
743
826
|
ControllerEvent.ENDPOINT_POLICY.value: endpoint_policy,
|
|
744
827
|
}
|
|
745
|
-
controller_stream = mlrun.model_monitoring.helpers.get_output_stream(
|
|
746
|
-
project=project,
|
|
747
|
-
function_name=mm_constants.MonitoringFunctionNames.APPLICATION_CONTROLLER,
|
|
748
|
-
v3io_access_key=stream_access_key,
|
|
749
|
-
)
|
|
750
828
|
logger.info(
|
|
751
829
|
"Pushing data to controller stream",
|
|
752
830
|
event=event,
|
|
753
831
|
endpoint_id=endpoint_id,
|
|
754
|
-
controller_stream_type=str(type(controller_stream)),
|
|
832
|
+
controller_stream_type=str(type(self.controller_stream)),
|
|
755
833
|
)
|
|
756
|
-
controller_stream.push([event], partition_key=endpoint_id)
|
|
834
|
+
self.controller_stream.push([event], partition_key=endpoint_id)
|
|
757
835
|
|
|
758
836
|
def _push_to_main_stream(self, event: dict, endpoint_id: str) -> None:
|
|
759
837
|
"""
|
|
@@ -761,18 +839,13 @@ class MonitoringApplicationController:
|
|
|
761
839
|
:param event: event dictionary to push to stream
|
|
762
840
|
:param endpoint_id: endpoint id string
|
|
763
841
|
"""
|
|
764
|
-
mm_stream = mlrun.model_monitoring.helpers.get_output_stream(
|
|
765
|
-
project=event.get(ControllerEvent.PROJECT),
|
|
766
|
-
function_name=mm_constants.MonitoringFunctionNames.APPLICATION_CONTROLLER,
|
|
767
|
-
v3io_access_key=self.v3io_access_key,
|
|
768
|
-
)
|
|
769
842
|
logger.info(
|
|
770
843
|
"Pushing data to main stream, NOP event is been generated",
|
|
771
844
|
event=json.dumps(event),
|
|
772
845
|
endpoint_id=endpoint_id,
|
|
773
|
-
mm_stream_type=str(type(
|
|
846
|
+
mm_stream_type=str(type(self.model_monitoring_stream)),
|
|
774
847
|
)
|
|
775
|
-
|
|
848
|
+
self.model_monitoring_stream.push([event], partition_key=endpoint_id)
|
|
776
849
|
|
|
777
850
|
|
|
778
851
|
def handler(context: nuclio_sdk.Context, event: nuclio_sdk.Event) -> None:
|
|
@@ -55,6 +55,9 @@ class TDEngineConnector(TSDBConnector):
|
|
|
55
55
|
|
|
56
56
|
self._init_super_tables()
|
|
57
57
|
|
|
58
|
+
self._run_directly = (
|
|
59
|
+
mlrun.mlconf.model_endpoint_monitoring.tdengine.run_directly
|
|
60
|
+
)
|
|
58
61
|
self._timeout = mlrun.mlconf.model_endpoint_monitoring.tdengine.timeout
|
|
59
62
|
self._retries = mlrun.mlconf.model_endpoint_monitoring.tdengine.retries
|
|
60
63
|
|
|
@@ -74,7 +77,9 @@ class TDEngineConnector(TSDBConnector):
|
|
|
74
77
|
def _create_connection(self) -> TDEngineConnection:
|
|
75
78
|
"""Establish a connection to the TSDB server."""
|
|
76
79
|
logger.debug("Creating a new connection to TDEngine", project=self.project)
|
|
77
|
-
conn = TDEngineConnection(
|
|
80
|
+
conn = TDEngineConnection(
|
|
81
|
+
self._tdengine_connection_profile.dsn(), run_directly=self._run_directly
|
|
82
|
+
)
|
|
78
83
|
conn.prefix_statements = [f"USE {self.database}"]
|
|
79
84
|
|
|
80
85
|
return conn
|
|
@@ -1090,9 +1090,9 @@ class V3IOTSDBConnector(TSDBConnector):
|
|
|
1090
1090
|
Fetch basic metrics from V3IO TSDB and add them to MEP objects.
|
|
1091
1091
|
|
|
1092
1092
|
:param model_endpoint_objects: A list of `ModelEndpoint` objects that will
|
|
1093
|
-
|
|
1093
|
+
be filled with the relevant basic metrics.
|
|
1094
1094
|
:param project: The name of the project.
|
|
1095
|
-
:param run_in_threadpool:
|
|
1095
|
+
:param run_in_threadpool: A function that runs another function in a thread pool.
|
|
1096
1096
|
|
|
1097
1097
|
:return: A list of `ModelEndpointMonitoringMetric` objects.
|
|
1098
1098
|
"""
|
|
@@ -1104,9 +1104,15 @@ class V3IOTSDBConnector(TSDBConnector):
|
|
|
1104
1104
|
uids.append(uid)
|
|
1105
1105
|
model_endpoint_objects_by_uid[uid] = model_endpoint_object
|
|
1106
1106
|
|
|
1107
|
-
error_count_res =
|
|
1108
|
-
|
|
1109
|
-
|
|
1107
|
+
error_count_res = await run_in_threadpool(
|
|
1108
|
+
self.get_error_count, endpoint_ids=uids, get_raw=True
|
|
1109
|
+
)
|
|
1110
|
+
avg_latency_res = await run_in_threadpool(
|
|
1111
|
+
self.get_avg_latency, endpoint_ids=uids, get_raw=True
|
|
1112
|
+
)
|
|
1113
|
+
drift_status_res = await run_in_threadpool(
|
|
1114
|
+
self.get_drift_status, endpoint_ids=uids, get_raw=True
|
|
1115
|
+
)
|
|
1110
1116
|
|
|
1111
1117
|
def add_metric(
|
|
1112
1118
|
metric: str,
|
|
@@ -432,58 +432,23 @@ def update_model_endpoint_last_request(
|
|
|
432
432
|
:param current_request: current request time
|
|
433
433
|
:param db: DB interface.
|
|
434
434
|
"""
|
|
435
|
-
is_batch_endpoint = (
|
|
436
|
-
model_endpoint.metadata.endpoint_type == mm_constants.EndpointType.BATCH_EP
|
|
437
|
-
)
|
|
438
|
-
if not is_batch_endpoint:
|
|
439
|
-
logger.info(
|
|
440
|
-
"Update model endpoint last request time (EP with serving)",
|
|
441
|
-
project=project,
|
|
442
|
-
endpoint_id=model_endpoint.metadata.uid,
|
|
443
|
-
name=model_endpoint.metadata.name,
|
|
444
|
-
function_name=model_endpoint.spec.function_name,
|
|
445
|
-
last_request=model_endpoint.status.last_request,
|
|
446
|
-
current_request=current_request,
|
|
447
|
-
)
|
|
448
|
-
db.patch_model_endpoint(
|
|
449
|
-
project=project,
|
|
450
|
-
endpoint_id=model_endpoint.metadata.uid,
|
|
451
|
-
name=model_endpoint.metadata.name,
|
|
452
|
-
attributes={mm_constants.EventFieldType.LAST_REQUEST: current_request},
|
|
453
|
-
)
|
|
454
|
-
else: # model endpoint without any serving function - close the window "manually"
|
|
455
|
-
try:
|
|
456
|
-
time_window = _get_monitoring_time_window_from_controller_run(project, db)
|
|
457
|
-
except mlrun.errors.MLRunNotFoundError:
|
|
458
|
-
logger.warn(
|
|
459
|
-
"Not bumping model endpoint last request time - the monitoring controller isn't deployed yet.\n"
|
|
460
|
-
"Call `project.enable_model_monitoring()` first."
|
|
461
|
-
)
|
|
462
|
-
return
|
|
463
435
|
|
|
464
|
-
|
|
465
|
-
|
|
466
|
-
|
|
467
|
-
|
|
468
|
-
|
|
469
|
-
|
|
470
|
-
|
|
471
|
-
|
|
472
|
-
|
|
473
|
-
|
|
474
|
-
|
|
475
|
-
|
|
476
|
-
|
|
477
|
-
|
|
478
|
-
|
|
479
|
-
|
|
480
|
-
db.patch_model_endpoint(
|
|
481
|
-
project=project,
|
|
482
|
-
endpoint_id=model_endpoint.metadata.uid,
|
|
483
|
-
name=model_endpoint.metadata.name,
|
|
484
|
-
function_name=model_endpoint.spec.function_name,
|
|
485
|
-
attributes={mm_constants.EventFieldType.LAST_REQUEST: bumped_last_request},
|
|
486
|
-
)
|
|
436
|
+
logger.info(
|
|
437
|
+
"Update model endpoint last request time (EP with serving)",
|
|
438
|
+
project=project,
|
|
439
|
+
endpoint_id=model_endpoint.metadata.uid,
|
|
440
|
+
name=model_endpoint.metadata.name,
|
|
441
|
+
function_name=model_endpoint.spec.function_name,
|
|
442
|
+
last_request=model_endpoint.status.last_request,
|
|
443
|
+
current_request=current_request,
|
|
444
|
+
)
|
|
445
|
+
db.patch_model_endpoint(
|
|
446
|
+
project=project,
|
|
447
|
+
endpoint_id=model_endpoint.metadata.uid,
|
|
448
|
+
name=model_endpoint.metadata.name,
|
|
449
|
+
function_name=model_endpoint.spec.function_name,
|
|
450
|
+
attributes={mm_constants.EventFieldType.LAST_REQUEST: current_request},
|
|
451
|
+
)
|
|
487
452
|
|
|
488
453
|
|
|
489
454
|
def calculate_inputs_statistics(
|
|
@@ -264,6 +264,9 @@ class EventStreamProcessor:
|
|
|
264
264
|
path=stream_uri,
|
|
265
265
|
sharding_func=ControllerEvent.ENDPOINT_ID,
|
|
266
266
|
after="ForwardNOP",
|
|
267
|
+
# Force using the pipeline key instead of the one in the profile in case of v3io profile.
|
|
268
|
+
# In case of Kafka, this parameter will be ignored.
|
|
269
|
+
alternative_v3io_access_key="V3IO_ACCESS_KEY",
|
|
267
270
|
)
|
|
268
271
|
|
|
269
272
|
apply_push_controller_stream(controller_stream_uri)
|
mlrun/projects/project.py
CHANGED
|
@@ -2144,29 +2144,34 @@ class MlrunProject(ModelObj):
|
|
|
2144
2144
|
reset_policy: mlrun.common.schemas.alert.ResetPolicy = mlrun.common.schemas.alert.ResetPolicy.AUTO,
|
|
2145
2145
|
) -> list[mlrun.alerts.alert.AlertConfig]:
|
|
2146
2146
|
"""
|
|
2147
|
-
|
|
2148
|
-
|
|
2149
|
-
|
|
2150
|
-
|
|
2151
|
-
|
|
2152
|
-
|
|
2153
|
-
|
|
2154
|
-
|
|
2155
|
-
|
|
2156
|
-
|
|
2157
|
-
|
|
2158
|
-
|
|
2159
|
-
|
|
2160
|
-
|
|
2161
|
-
|
|
2162
|
-
|
|
2163
|
-
|
|
2164
|
-
|
|
2165
|
-
|
|
2166
|
-
|
|
2167
|
-
|
|
2168
|
-
|
|
2169
|
-
|
|
2147
|
+
Generate alert configurations based on specified model endpoints and result names, which can be defined
|
|
2148
|
+
explicitly or using regex patterns.
|
|
2149
|
+
|
|
2150
|
+
:param name: The name of the AlertConfig template. It will be combined with
|
|
2151
|
+
mep id, app name and result name to generate a unique name.
|
|
2152
|
+
:param summary: Summary of the alert, will be sent in the generated notifications
|
|
2153
|
+
:param endpoints: The endpoints from which metrics will be retrieved to configure
|
|
2154
|
+
the alerts.
|
|
2155
|
+
The ModelEndpointList object is obtained via the `list_model_endpoints`
|
|
2156
|
+
method or created manually using `ModelEndpoint` objects.
|
|
2157
|
+
:param events: AlertTrigger event types (EventKind).
|
|
2158
|
+
:param notifications: List of notifications to invoke once the alert is triggered
|
|
2159
|
+
:param result_names: Optional. Filters the result names used to create the alert
|
|
2160
|
+
configuration, constructed from the app and result_name regex.
|
|
2161
|
+
|
|
2162
|
+
For example:
|
|
2163
|
+
[`app1.result-*`, `*.result1`]
|
|
2164
|
+
will match "mep_uid1.app1.result.result-1" and
|
|
2165
|
+
"mep_uid1.app2.result.result1".
|
|
2166
|
+
A specific result_name (not a wildcard) will always create a new alert
|
|
2167
|
+
config, regardless of whether the result name exists.
|
|
2168
|
+
:param severity: Severity of the alert.
|
|
2169
|
+
:param criteria: The threshold for triggering the alert based on the
|
|
2170
|
+
specified number of events within the defined time period.
|
|
2171
|
+
:param reset_policy: When to clear the alert. Either "manual" for manual reset of the alert,
|
|
2172
|
+
or "auto" if the criteria contains a time period.
|
|
2173
|
+
:returns: List of AlertConfig according to endpoints results,
|
|
2174
|
+
filtered by result_names.
|
|
2170
2175
|
"""
|
|
2171
2176
|
db = mlrun.db.get_run_db(secrets=self._secrets)
|
|
2172
2177
|
matching_results = []
|
|
@@ -13,6 +13,7 @@
|
|
|
13
13
|
# limitations under the License.
|
|
14
14
|
|
|
15
15
|
import asyncio
|
|
16
|
+
import copy
|
|
16
17
|
import json
|
|
17
18
|
import typing
|
|
18
19
|
import warnings
|
|
@@ -50,6 +51,19 @@ from mlrun.runtimes.utils import get_item_name, log_std
|
|
|
50
51
|
from mlrun.utils import get_in, logger, update_in
|
|
51
52
|
from mlrun_pipelines.common.ops import deploy_op
|
|
52
53
|
|
|
54
|
+
SENSITIVE_PATHS_IN_TRIGGER_CONFIG = {
|
|
55
|
+
"password",
|
|
56
|
+
"secret",
|
|
57
|
+
"attributes/password",
|
|
58
|
+
"attributes/accesskeyid",
|
|
59
|
+
"attributes/secretaccesskey",
|
|
60
|
+
"attributes/cacert",
|
|
61
|
+
"attributes/accesskey",
|
|
62
|
+
"attributes/accesscertificate",
|
|
63
|
+
"attributes/sasl/password",
|
|
64
|
+
"attributes/sasl/oauth/clientsecret",
|
|
65
|
+
}
|
|
66
|
+
|
|
53
67
|
|
|
54
68
|
def validate_nuclio_version_compatibility(*min_versions):
|
|
55
69
|
"""
|
|
@@ -274,6 +288,37 @@ class RemoteRuntime(KubeResource):
|
|
|
274
288
|
if self.metadata.tag:
|
|
275
289
|
mlrun.utils.validate_tag_name(self.metadata.tag, "function.metadata.tag")
|
|
276
290
|
|
|
291
|
+
def mask_sensitive_data_in_config(self):
|
|
292
|
+
if not self.spec.config:
|
|
293
|
+
return {}
|
|
294
|
+
|
|
295
|
+
raw_config = copy.deepcopy(self.spec.config)
|
|
296
|
+
|
|
297
|
+
for key, value in self.spec.config.items():
|
|
298
|
+
if key.startswith("spec.triggers"):
|
|
299
|
+
trigger_name = key.split(".")[-1]
|
|
300
|
+
|
|
301
|
+
for path in SENSITIVE_PATHS_IN_TRIGGER_CONFIG:
|
|
302
|
+
# Handle nested keys
|
|
303
|
+
nested_keys = path.split("/")
|
|
304
|
+
target = value
|
|
305
|
+
for sub_key in nested_keys[:-1]:
|
|
306
|
+
target = target.get(sub_key, {})
|
|
307
|
+
|
|
308
|
+
last_key = nested_keys[-1]
|
|
309
|
+
if last_key in target:
|
|
310
|
+
sensitive_field = target[last_key]
|
|
311
|
+
if sensitive_field.startswith(
|
|
312
|
+
mlrun.model.Credentials.secret_reference_prefix
|
|
313
|
+
):
|
|
314
|
+
# already masked
|
|
315
|
+
continue
|
|
316
|
+
target[last_key] = (
|
|
317
|
+
f"{mlrun.model.Credentials.secret_reference_prefix}/spec/triggers/{trigger_name}/{path}"
|
|
318
|
+
)
|
|
319
|
+
|
|
320
|
+
return raw_config
|
|
321
|
+
|
|
277
322
|
def set_config(self, key, value):
|
|
278
323
|
self.spec.config[key] = value
|
|
279
324
|
return self
|
|
@@ -1230,6 +1275,9 @@ class RemoteRuntime(KubeResource):
|
|
|
1230
1275
|
if remote_env.get("name") in credentials_env_var_names:
|
|
1231
1276
|
new_env.append(remote_env)
|
|
1232
1277
|
|
|
1278
|
+
# update nuclio-specific credentials
|
|
1279
|
+
self.mask_sensitive_data_in_config()
|
|
1280
|
+
|
|
1233
1281
|
self.spec.env = new_env
|
|
1234
1282
|
|
|
1235
1283
|
def _set_as_mock(self, enable):
|
mlrun/serving/states.py
CHANGED
|
@@ -363,15 +363,22 @@ class BaseStep(ModelObj):
|
|
|
363
363
|
event: {"x": 5} , result_path="y" means the output of the step will be written
|
|
364
364
|
to event["y"] resulting in {"x": 5, "y": <result>}
|
|
365
365
|
:param model_endpoint_creation_strategy: Strategy for creating or updating the model endpoint:
|
|
366
|
-
|
|
367
|
-
|
|
368
|
-
|
|
369
|
-
|
|
370
|
-
|
|
371
|
-
|
|
372
|
-
|
|
373
|
-
|
|
374
|
-
|
|
366
|
+
|
|
367
|
+
* **overwrite**:
|
|
368
|
+
|
|
369
|
+
1. If model endpoints with the same name exist, delete the `latest` one.
|
|
370
|
+
2. Create a new model endpoint entry and set it as `latest`.
|
|
371
|
+
|
|
372
|
+
* **inplace** (default):
|
|
373
|
+
|
|
374
|
+
1. If model endpoints with the same name exist, update the `latest` entry.
|
|
375
|
+
2. Otherwise, create a new entry.
|
|
376
|
+
|
|
377
|
+
* **archive**:
|
|
378
|
+
|
|
379
|
+
1. If model endpoints with the same name exist, preserve them.
|
|
380
|
+
2. Create a new model endpoint with the same name and set it to `latest`.
|
|
381
|
+
|
|
375
382
|
:param class_args: class init arguments
|
|
376
383
|
"""
|
|
377
384
|
if hasattr(self, "steps"):
|
|
@@ -810,15 +817,22 @@ class RouterStep(TaskStep):
|
|
|
810
817
|
:param handler: class handler to invoke on run/event
|
|
811
818
|
:param function: function this step should run in
|
|
812
819
|
:param creation_strategy: Strategy for creating or updating the model endpoint:
|
|
813
|
-
|
|
814
|
-
|
|
815
|
-
|
|
816
|
-
|
|
817
|
-
|
|
818
|
-
|
|
819
|
-
|
|
820
|
-
|
|
821
|
-
|
|
820
|
+
|
|
821
|
+
* **overwrite**:
|
|
822
|
+
|
|
823
|
+
1. If model endpoints with the same name exist, delete the `latest` one.
|
|
824
|
+
2. Create a new model endpoint entry and set it as `latest`.
|
|
825
|
+
|
|
826
|
+
* **inplace** (default):
|
|
827
|
+
|
|
828
|
+
1. If model endpoints with the same name exist, update the `latest` entry.
|
|
829
|
+
2. Otherwise, create a new entry.
|
|
830
|
+
|
|
831
|
+
* **archive**:
|
|
832
|
+
|
|
833
|
+
1. If model endpoints with the same name exist, preserve them.
|
|
834
|
+
2. Create a new model endpoint with the same name and set it to `latest`.
|
|
835
|
+
|
|
822
836
|
"""
|
|
823
837
|
|
|
824
838
|
if len(self.routes.keys()) >= MAX_MODELS_PER_ROUTER and key not in self.routes:
|
|
@@ -1207,15 +1221,22 @@ class FlowStep(BaseStep):
|
|
|
1207
1221
|
event: {"x": 5} , result_path="y" means the output of the step will be written
|
|
1208
1222
|
to event["y"] resulting in {"x": 5, "y": <result>}
|
|
1209
1223
|
:param model_endpoint_creation_strategy: Strategy for creating or updating the model endpoint:
|
|
1210
|
-
|
|
1211
|
-
|
|
1212
|
-
|
|
1213
|
-
|
|
1214
|
-
|
|
1215
|
-
|
|
1216
|
-
|
|
1217
|
-
|
|
1218
|
-
|
|
1224
|
+
|
|
1225
|
+
* **overwrite**:
|
|
1226
|
+
|
|
1227
|
+
1. If model endpoints with the same name exist, delete the `latest` one.
|
|
1228
|
+
2. Create a new model endpoint entry and set it as `latest`.
|
|
1229
|
+
|
|
1230
|
+
* **inplace** (default):
|
|
1231
|
+
|
|
1232
|
+
1. If model endpoints with the same name exist, update the `latest` entry.
|
|
1233
|
+
2. Otherwise, create a new entry.
|
|
1234
|
+
|
|
1235
|
+
* **archive**:
|
|
1236
|
+
|
|
1237
|
+
1. If model endpoints with the same name exist, preserve them.
|
|
1238
|
+
2. Create a new model endpoint with the same name and set it to `latest`.
|
|
1239
|
+
|
|
1219
1240
|
:param class_args: class init arguments
|
|
1220
1241
|
"""
|
|
1221
1242
|
|
mlrun/utils/helpers.py
CHANGED
|
@@ -1371,13 +1371,16 @@ def has_timezone(timestamp):
|
|
|
1371
1371
|
return False
|
|
1372
1372
|
|
|
1373
1373
|
|
|
1374
|
-
def format_datetime(dt: datetime) -> str:
|
|
1374
|
+
def format_datetime(dt: datetime, fmt: Optional[str] = None) -> str:
|
|
1375
|
+
if dt is None:
|
|
1376
|
+
return ""
|
|
1377
|
+
|
|
1375
1378
|
# If the datetime is naive
|
|
1376
1379
|
if dt.tzinfo is None:
|
|
1377
1380
|
dt = dt.replace(tzinfo=timezone.utc)
|
|
1378
1381
|
|
|
1379
1382
|
# TODO: Once Python 3.12 is the minimal version, use %:z to format the timezone offset with a colon
|
|
1380
|
-
formatted_time = dt.strftime("%Y-%m-%d %H:%M:%S.%f%z")
|
|
1383
|
+
formatted_time = dt.strftime(fmt or "%Y-%m-%d %H:%M:%S.%f%z")
|
|
1381
1384
|
|
|
1382
1385
|
# For versions earlier than Python 3.12, we manually insert the colon in the timezone offset
|
|
1383
1386
|
return formatted_time[:-2] + ":" + formatted_time[-2:]
|
mlrun/utils/version/version.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: mlrun
|
|
3
|
-
Version: 1.8.
|
|
3
|
+
Version: 1.8.0rc48
|
|
4
4
|
Summary: Tracking and config of machine learning runs
|
|
5
5
|
Home-page: https://github.com/mlrun/mlrun
|
|
6
6
|
Author: Yaron Haviv
|
|
@@ -44,7 +44,7 @@ Requires-Dist: semver~=3.0
|
|
|
44
44
|
Requires-Dist: dependency-injector~=4.41
|
|
45
45
|
Requires-Dist: fsspec<2024.7,>=2023.9.2
|
|
46
46
|
Requires-Dist: v3iofs~=0.1.17
|
|
47
|
-
Requires-Dist: storey~=1.8.
|
|
47
|
+
Requires-Dist: storey~=1.8.10
|
|
48
48
|
Requires-Dist: inflection~=0.5.0
|
|
49
49
|
Requires-Dist: python-dotenv~=1.0
|
|
50
50
|
Requires-Dist: setuptools>=75.2
|
|
@@ -99,7 +99,7 @@ Requires-Dist: ossfs==2023.12.0; extra == "alibaba-oss"
|
|
|
99
99
|
Requires-Dist: oss2==2.18.1; extra == "alibaba-oss"
|
|
100
100
|
Provides-Extra: tdengine
|
|
101
101
|
Requires-Dist: taos-ws-py==0.3.2; extra == "tdengine"
|
|
102
|
-
Requires-Dist: taoswswrap~=0.3.
|
|
102
|
+
Requires-Dist: taoswswrap~=0.3.4; extra == "tdengine"
|
|
103
103
|
Provides-Extra: snowflake
|
|
104
104
|
Requires-Dist: snowflake-connector-python~=3.7; extra == "snowflake"
|
|
105
105
|
Provides-Extra: kfp18
|
|
@@ -152,7 +152,7 @@ Requires-Dist: s3fs<2024.7,>=2023.9.2; extra == "all"
|
|
|
152
152
|
Requires-Dist: snowflake-connector-python~=3.7; extra == "all"
|
|
153
153
|
Requires-Dist: sqlalchemy~=1.4; extra == "all"
|
|
154
154
|
Requires-Dist: taos-ws-py==0.3.2; extra == "all"
|
|
155
|
-
Requires-Dist: taoswswrap~=0.3.
|
|
155
|
+
Requires-Dist: taoswswrap~=0.3.4; extra == "all"
|
|
156
156
|
Provides-Extra: complete
|
|
157
157
|
Requires-Dist: adlfs==2023.9.0; extra == "complete"
|
|
158
158
|
Requires-Dist: aiobotocore<2.16,>=2.5.0; extra == "complete"
|
|
@@ -184,7 +184,7 @@ Requires-Dist: s3fs<2024.7,>=2023.9.2; extra == "complete"
|
|
|
184
184
|
Requires-Dist: snowflake-connector-python~=3.7; extra == "complete"
|
|
185
185
|
Requires-Dist: sqlalchemy~=1.4; extra == "complete"
|
|
186
186
|
Requires-Dist: taos-ws-py==0.3.2; extra == "complete"
|
|
187
|
-
Requires-Dist: taoswswrap~=0.3.
|
|
187
|
+
Requires-Dist: taoswswrap~=0.3.4; extra == "complete"
|
|
188
188
|
Provides-Extra: complete-api
|
|
189
189
|
Requires-Dist: adlfs==2023.9.0; extra == "complete-api"
|
|
190
190
|
Requires-Dist: aiobotocore<2.16,>=2.5.0; extra == "complete-api"
|
|
@@ -229,7 +229,7 @@ Requires-Dist: s3fs<2024.7,>=2023.9.2; extra == "complete-api"
|
|
|
229
229
|
Requires-Dist: snowflake-connector-python~=3.7; extra == "complete-api"
|
|
230
230
|
Requires-Dist: sqlalchemy~=1.4; extra == "complete-api"
|
|
231
231
|
Requires-Dist: taos-ws-py==0.3.2; extra == "complete-api"
|
|
232
|
-
Requires-Dist: taoswswrap~=0.3.
|
|
232
|
+
Requires-Dist: taoswswrap~=0.3.4; extra == "complete-api"
|
|
233
233
|
Requires-Dist: timelength~=1.1; extra == "complete-api"
|
|
234
234
|
Requires-Dist: uvicorn~=0.32.1; extra == "complete-api"
|
|
235
235
|
Dynamic: author
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
mlrun/__init__.py,sha256=Cqm9U9eCEdLpMejhU2BEhubu0mHL71igJJIwYa738EA,7450
|
|
2
2
|
mlrun/__main__.py,sha256=0NDzPf9VFRO8KFfGgb8mkGUPIDS285aASV8Hbxs-ND0,45920
|
|
3
|
-
mlrun/config.py,sha256=
|
|
3
|
+
mlrun/config.py,sha256=GoUHHZ7782V7m6rMYpVVBcKJyoePDBSoEgskDIKVnWY,71931
|
|
4
4
|
mlrun/errors.py,sha256=LkcbXTLANGdsgo2CRX2pdbyNmt--lMsjGv0XZMgP-Nc,8222
|
|
5
5
|
mlrun/execution.py,sha256=FUktsD3puSFjc3LZJU35b-OmFBrBPBNntViCLQVuwnk,50008
|
|
6
6
|
mlrun/features.py,sha256=ReBaNGsBYXqcbgI012n-SO_j6oHIbk_Vpv0CGPXbUmo,15842
|
|
@@ -11,7 +11,7 @@ mlrun/render.py,sha256=940H9fBBFeghH4dlifbURvtjlvw4GlWdAXezN6ky4rI,13275
|
|
|
11
11
|
mlrun/run.py,sha256=n9n5IWBEaOrMIeSakp01DyL09_6FvLy3LCqWpBtvc08,45140
|
|
12
12
|
mlrun/secrets.py,sha256=dZPdkc_zzfscVQepOHUwmzFqnBavDCBXV9DQoH_eIYM,7800
|
|
13
13
|
mlrun/alerts/__init__.py,sha256=0gtG1BG0DXxFrXegIkjbM1XEN4sP9ODo0ucXrNld1hU,601
|
|
14
|
-
mlrun/alerts/alert.py,sha256=
|
|
14
|
+
mlrun/alerts/alert.py,sha256=QQFZGydQbx9RvAaSiaH-ALQZVcDKQX5lgizqj_rXW2k,15948
|
|
15
15
|
mlrun/api/schemas/__init__.py,sha256=tVAnpexDkfI0JWMJNlPSnVOzoV4xqIjWGSln9UkPS4I,13921
|
|
16
16
|
mlrun/artifacts/__init__.py,sha256=ofC2extBCOC1wg1YtdTzWzH3eeG_f-sFBUkHjYtZJpk,1175
|
|
17
17
|
mlrun/artifacts/base.py,sha256=SFHe44o9RV9C3-WODOD53WdBjWk0Ya8lnap9LmERwrQ,29959
|
|
@@ -99,8 +99,8 @@ mlrun/datastore/sources.py,sha256=KQp1nNN7TcaewFm3It03H1R28uzlWGZDDHJyqiT--vw,49
|
|
|
99
99
|
mlrun/datastore/spark_udf.py,sha256=NnnB3DZxZb-rqpRy7b-NC7QWXuuqFn3XkBDc86tU4mQ,1498
|
|
100
100
|
mlrun/datastore/spark_utils.py,sha256=_AsVoU5Ix_-W7Gyq8io8V-2GTk0m8THJNDP3WGGaWJY,2865
|
|
101
101
|
mlrun/datastore/store_resources.py,sha256=PFOMrZ6KH6hBOb0PiO-cHx_kv0UpHu5P2t8_mrR-lS4,6842
|
|
102
|
-
mlrun/datastore/storeytargets.py,sha256=
|
|
103
|
-
mlrun/datastore/targets.py,sha256=
|
|
102
|
+
mlrun/datastore/storeytargets.py,sha256=dSy9wr4IyxrIE1GHBxzVEeEY1sdU66s4w-oUuaIfa2U,6620
|
|
103
|
+
mlrun/datastore/targets.py,sha256=7qLf26BDH3qYTHOR7TSP0tUMPBhYOkaaOwffUBxgqY0,81201
|
|
104
104
|
mlrun/datastore/utils.py,sha256=CbKbDI6CdFRCqyAXe-jykVvN_GH6R0JkxIQFAogR2GA,10604
|
|
105
105
|
mlrun/datastore/v3io.py,sha256=QSYBORRLcJTeM9mt0EaWzyLcdmzrPkqrF7k5uLTam5U,8209
|
|
106
106
|
mlrun/datastore/vectorstore.py,sha256=k-yom5gfw20hnVG0Rg7aBEehuXwvAloZwn0cx0VGals,11708
|
|
@@ -218,11 +218,11 @@ mlrun/launcher/factory.py,sha256=RW7mfzEFi8fR0M-4W1JQg1iq3_muUU6OTqT_3l4Ubrk,233
|
|
|
218
218
|
mlrun/launcher/local.py,sha256=775HY-8S9LFUX5ubGXrLO0N1lVh8bn-DHFmNYuNqQPA,11451
|
|
219
219
|
mlrun/launcher/remote.py,sha256=rLJW4UAnUT5iUb4BsGBOAV3K4R29a0X4lFtRkVKlyYU,7709
|
|
220
220
|
mlrun/model_monitoring/__init__.py,sha256=ELy7njEtZnz09Dc6PGZSFFEGtnwI15bJNWM3Pj4_YIs,753
|
|
221
|
-
mlrun/model_monitoring/api.py,sha256=
|
|
222
|
-
mlrun/model_monitoring/controller.py,sha256=
|
|
221
|
+
mlrun/model_monitoring/api.py,sha256=LU58dzE4QZiMH23lgiqfI__3m2E3eEZP-DQe2ioUSwM,28317
|
|
222
|
+
mlrun/model_monitoring/controller.py,sha256=m4Zx_NQ0C-A7WtjBoXnqBmS11RRtLvBaFgbFbIgrdVc,36847
|
|
223
223
|
mlrun/model_monitoring/features_drift_table.py,sha256=c6GpKtpOJbuT1u5uMWDL_S-6N4YPOmlktWMqPme3KFY,25308
|
|
224
|
-
mlrun/model_monitoring/helpers.py,sha256=
|
|
225
|
-
mlrun/model_monitoring/stream_processing.py,sha256=
|
|
224
|
+
mlrun/model_monitoring/helpers.py,sha256=8QsoYRPOVSnR3Lcv99m4XYrp_cR6hSqBUflYSOkJmFQ,21019
|
|
225
|
+
mlrun/model_monitoring/stream_processing.py,sha256=A66vbrgfWL_sBdAkiPJZmPUFXvQU5phYuyKX6yECtfQ,33558
|
|
226
226
|
mlrun/model_monitoring/tracking_policy.py,sha256=PBIGrUYWrwcE5gwXupBIVzOb0QRRwPJsgQm_yLGQxB4,5595
|
|
227
227
|
mlrun/model_monitoring/writer.py,sha256=ibbhvfSHb8Reqlb7RGFEAUNM4iTyK1gk8-2m46mP6VM,8428
|
|
228
228
|
mlrun/model_monitoring/applications/__init__.py,sha256=xDBxkBjl-whHSG_4t1mLkxiypLH-fzn8TmAW9Mjo2uI,759
|
|
@@ -232,7 +232,7 @@ mlrun/model_monitoring/applications/context.py,sha256=DKUDOfN4iY5wpOMjfsarx4pVN9
|
|
|
232
232
|
mlrun/model_monitoring/applications/histogram_data_drift.py,sha256=09t0tfC35W0SeJA3fzN29pJiB6G-V_8GlcvULVq6H9Q,15179
|
|
233
233
|
mlrun/model_monitoring/applications/results.py,sha256=_qmj6TWT0SR2bi7gUyRKBU418eGgGoLW2_hTJ7S-ock,5782
|
|
234
234
|
mlrun/model_monitoring/applications/evidently/__init__.py,sha256=-DqdPnBSrjZhFvKOu_Ie3MiFvlur9sPTZpZ1u0_1AE8,690
|
|
235
|
-
mlrun/model_monitoring/applications/evidently/base.py,sha256=
|
|
235
|
+
mlrun/model_monitoring/applications/evidently/base.py,sha256=_n_2CCQL-fC6hGUZSCLZxZuvXqMqjDHSFX0Giok8HZw,6793
|
|
236
236
|
mlrun/model_monitoring/db/__init__.py,sha256=r47xPGZpIfMuv8J3PQCZTSqVPMhUta4sSJCZFKcS7FM,644
|
|
237
237
|
mlrun/model_monitoring/db/_schedules.py,sha256=RWn4wtKsIXg668gMLpxO9I8GlkxvPSaA5y7w-wFDcgE,9048
|
|
238
238
|
mlrun/model_monitoring/db/_stats.py,sha256=VVMWLMqG3Us3ozBkLaokJF22Ewv8WKmVE1-OvS_g9vA,6943
|
|
@@ -242,10 +242,10 @@ mlrun/model_monitoring/db/tsdb/helpers.py,sha256=0oUXc4aUkYtP2SGP6jTb3uPPKImIUsV
|
|
|
242
242
|
mlrun/model_monitoring/db/tsdb/tdengine/__init__.py,sha256=vgBdsKaXUURKqIf3M0y4sRatmSVA4CQiJs7J5dcVBkQ,620
|
|
243
243
|
mlrun/model_monitoring/db/tsdb/tdengine/schemas.py,sha256=EslhaR65jfeNdD5Ibk-3Hb4e5r5qYPfHb9rTChX3sG0,12689
|
|
244
244
|
mlrun/model_monitoring/db/tsdb/tdengine/stream_graph_steps.py,sha256=Uadj0UvAmln2MxDWod-kAzau1uNlqZh981rPhbUH_5M,2857
|
|
245
|
-
mlrun/model_monitoring/db/tsdb/tdengine/tdengine_connector.py,sha256=
|
|
245
|
+
mlrun/model_monitoring/db/tsdb/tdengine/tdengine_connector.py,sha256=5jgimfu2-omy8Cnnby7GpgB_MWEp9mmLX0zpbGC2JZ8,37934
|
|
246
246
|
mlrun/model_monitoring/db/tsdb/v3io/__init__.py,sha256=aL3bfmQsUQ-sbvKGdNihFj8gLCK3mSys0qDcXtYOwgc,616
|
|
247
247
|
mlrun/model_monitoring/db/tsdb/v3io/stream_graph_steps.py,sha256=_-zo9relCDtjGgievxAcAP9gVN9nDWs8BzGtFwTjb9M,6284
|
|
248
|
-
mlrun/model_monitoring/db/tsdb/v3io/v3io_connector.py,sha256=
|
|
248
|
+
mlrun/model_monitoring/db/tsdb/v3io/v3io_connector.py,sha256=IzdThNwWMBWo0D0VzXV-WVvGg-z7Y9e8ke8_LYJTeVA,46214
|
|
249
249
|
mlrun/model_monitoring/metrics/__init__.py,sha256=6CsTXAxeLbbf8yfCADTaxmiavqwrLEdYFJ-qc5kgDAY,569
|
|
250
250
|
mlrun/model_monitoring/metrics/histogram_distance.py,sha256=E9_WIl2vd6qNvoHVHoFcnuQk3ekbFWOdi8aU7sHrfk4,4724
|
|
251
251
|
mlrun/package/__init__.py,sha256=v7VDyK9kDOOuDvFo4oiGV2fx-vM1KL7fdN9pGLakhUQ,7008
|
|
@@ -270,7 +270,7 @@ mlrun/platforms/iguazio.py,sha256=6VBTq8eQ3mzT96tzjYhAtcMQ2VjF4x8LpIPW5DAcX2Q,13
|
|
|
270
270
|
mlrun/projects/__init__.py,sha256=0Krf0WIKfnZa71WthYOg0SoaTodGg3sV_hK3f_OlTPI,1220
|
|
271
271
|
mlrun/projects/operations.py,sha256=TzPbTYBgmYrjxTKP_wOtBJYFFFwDCQtaVvF1Snr0TfM,20029
|
|
272
272
|
mlrun/projects/pipelines.py,sha256=wud7ezeEmhIJvfYE_wzQbA4ygEfGXHtbOtoOpan6poY,48556
|
|
273
|
-
mlrun/projects/project.py,sha256=
|
|
273
|
+
mlrun/projects/project.py,sha256=WsNZUz_k52llBI5rLBwJeGuIzSlAapVXBQfCL7NVI8E,235765
|
|
274
274
|
mlrun/runtimes/__init__.py,sha256=J9Sy2HiyMlztNv6VUurMzF5H2XzttNil8nRsWDsqLyg,8923
|
|
275
275
|
mlrun/runtimes/base.py,sha256=EL14Kmc1vWEjnBPJwLj5hHC6CtRAQHJLmohCD3sFEHo,37855
|
|
276
276
|
mlrun/runtimes/daskjob.py,sha256=JwuGvOiPsxEDHHMMUS4Oie4hLlYYIZwihAl6DjroTY0,19521
|
|
@@ -292,7 +292,7 @@ mlrun/runtimes/mpijob/abstract.py,sha256=JGMjcJ4dvpJbctF6psU9UvYyNCutMxTMgBQeTlz
|
|
|
292
292
|
mlrun/runtimes/mpijob/v1.py,sha256=1XQZC7AIMGX_AQCbApcwpH8I7y39-v0v2O35MvxjXoo,3213
|
|
293
293
|
mlrun/runtimes/nuclio/__init__.py,sha256=gx1kizzKv8pGT5TNloN1js1hdbxqDw3rM90sLVYVffY,794
|
|
294
294
|
mlrun/runtimes/nuclio/api_gateway.py,sha256=vH9ClKVP4Mb24rvA67xPuAvAhX-gAv6vVtjVxyplhdc,26969
|
|
295
|
-
mlrun/runtimes/nuclio/function.py,sha256=
|
|
295
|
+
mlrun/runtimes/nuclio/function.py,sha256=1EFdGFqlyEfPUVK4Rhh8zWUrff7MNKaHrg7V-bejewg,54618
|
|
296
296
|
mlrun/runtimes/nuclio/nuclio.py,sha256=sLK8KdGO1LbftlL3HqPZlFOFTAAuxJACZCVl1c0Ha6E,2942
|
|
297
297
|
mlrun/runtimes/nuclio/serving.py,sha256=qetAyl-nfn8SWp7KyNgRtMNUVcX_q75SY9dLZP0uH6o,33365
|
|
298
298
|
mlrun/runtimes/nuclio/application/__init__.py,sha256=rRs5vasy_G9IyoTpYIjYDafGoL6ifFBKgBtsXn31Atw,614
|
|
@@ -306,7 +306,7 @@ mlrun/serving/remote.py,sha256=gxJkj_J3j-sZcVUbUzbAmJafP6t6y4NVFsu0kWmYngA,18818
|
|
|
306
306
|
mlrun/serving/routers.py,sha256=SY6AsaiSnh8ssXq8hQE2z9MYapOxFOFJBx9QomiZMO8,53915
|
|
307
307
|
mlrun/serving/server.py,sha256=KiNhW0nTV5STZPzR6kEAUFVzCCAX8qv0g9AoCopARrM,23429
|
|
308
308
|
mlrun/serving/serving_wrapper.py,sha256=R670-S6PX_d5ER6jiHtRvacuPyFzQH0mEf2K0sBIIOM,836
|
|
309
|
-
mlrun/serving/states.py,sha256=
|
|
309
|
+
mlrun/serving/states.py,sha256=UWiE85MB_SK3rgzWgNqQU2MKeyN2yF2BCvMcMAqLMTs,73247
|
|
310
310
|
mlrun/serving/utils.py,sha256=k2EIYDWHUGkE-IBI6T0UNT32fw-KySsccIJM_LObI00,4171
|
|
311
311
|
mlrun/serving/v1_serving.py,sha256=c6J_MtpE-Tqu00-6r4eJOCO6rUasHDal9W2eBIcrl50,11853
|
|
312
312
|
mlrun/serving/v2_serving.py,sha256=b3C5Utv2_AOPrH_hPi3NarjNbAK3kRoeIfqMU4qNuUo,25362
|
|
@@ -321,7 +321,7 @@ mlrun/utils/azure_vault.py,sha256=IEFizrDGDbAaoWwDr1WoA88S_EZ0T--vjYtY-i0cvYQ,34
|
|
|
321
321
|
mlrun/utils/clones.py,sha256=yXOeuLtgIiKZdmjeKK0Z_vIrH19ds5JuoJaCeDjhwOo,7516
|
|
322
322
|
mlrun/utils/condition_evaluator.py,sha256=-nGfRmZzivn01rHTroiGY4rqEv8T1irMyhzxEei-sKc,1897
|
|
323
323
|
mlrun/utils/db.py,sha256=blQgkWMfFH9lcN4sgJQcPQgEETz2Dl_zwbVA0SslpFg,2186
|
|
324
|
-
mlrun/utils/helpers.py,sha256=
|
|
324
|
+
mlrun/utils/helpers.py,sha256=0qKuvXA88Xeu_pbIFE9VQQWonxbAQpkiRSzmsxM8jtk,74465
|
|
325
325
|
mlrun/utils/http.py,sha256=t6FrXQstZm9xVVjxqIGiLzrwZNCR4CSienSOuVgNIcI,8706
|
|
326
326
|
mlrun/utils/logger.py,sha256=RG0m1rx6gfkJ-2C1r_p41MMpPiaDYqaYM2lYHDlNZEU,14767
|
|
327
327
|
mlrun/utils/regex.py,sha256=jbR7IiOp6OO0mg9Fl_cVZCpWb9fL9nTPONCUxCDNWXg,5201
|
|
@@ -340,11 +340,11 @@ mlrun/utils/notifications/notification/mail.py,sha256=ZyJ3eqd8simxffQmXzqd3bgbAq
|
|
|
340
340
|
mlrun/utils/notifications/notification/slack.py,sha256=eQvmctTh6wIG5xVOesLLV9S1-UUCu5UEQ9JIJOor3ts,7183
|
|
341
341
|
mlrun/utils/notifications/notification/webhook.py,sha256=NeyIMSBojjjTJaUHmPbxMByp34GxYkl1-16NqzU27fU,4943
|
|
342
342
|
mlrun/utils/version/__init__.py,sha256=7kkrB7hEZ3cLXoWj1kPoDwo4MaswsI2JVOBpbKgPAgc,614
|
|
343
|
-
mlrun/utils/version/version.json,sha256=
|
|
343
|
+
mlrun/utils/version/version.json,sha256=VgW5oJpqfzAo9q08BKtzegYzji4XRJ88Lp0aBY1vEaw,89
|
|
344
344
|
mlrun/utils/version/version.py,sha256=eEW0tqIAkU9Xifxv8Z9_qsYnNhn3YH7NRAfM-pPLt1g,1878
|
|
345
|
-
mlrun-1.8.
|
|
346
|
-
mlrun-1.8.
|
|
347
|
-
mlrun-1.8.
|
|
348
|
-
mlrun-1.8.
|
|
349
|
-
mlrun-1.8.
|
|
350
|
-
mlrun-1.8.
|
|
345
|
+
mlrun-1.8.0rc48.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
|
|
346
|
+
mlrun-1.8.0rc48.dist-info/METADATA,sha256=T7yAGdC4Yb-8_y8KkzMQ3oLLlK1vJNQ9sEbfMlZz7oM,26009
|
|
347
|
+
mlrun-1.8.0rc48.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
|
|
348
|
+
mlrun-1.8.0rc48.dist-info/entry_points.txt,sha256=1Owd16eAclD5pfRCoJpYC2ZJSyGNTtUr0nCELMioMmU,46
|
|
349
|
+
mlrun-1.8.0rc48.dist-info/top_level.txt,sha256=NObLzw3maSF9wVrgSeYBv-fgnHkAJ1kEkh12DLdd5KM,6
|
|
350
|
+
mlrun-1.8.0rc48.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|