mlrun 1.7.2rc3__py3-none-any.whl → 1.8.0rc2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mlrun might be problematic. Click here for more details.
- mlrun/__init__.py +18 -18
- mlrun/__main__.py +3 -3
- mlrun/alerts/alert.py +19 -12
- mlrun/artifacts/__init__.py +0 -2
- mlrun/artifacts/base.py +34 -11
- mlrun/artifacts/dataset.py +16 -16
- mlrun/artifacts/manager.py +13 -13
- mlrun/artifacts/model.py +66 -53
- mlrun/common/constants.py +6 -0
- mlrun/common/formatters/__init__.py +1 -0
- mlrun/common/formatters/feature_set.py +1 -0
- mlrun/common/formatters/function.py +1 -0
- mlrun/common/formatters/model_endpoint.py +30 -0
- mlrun/common/formatters/pipeline.py +1 -2
- mlrun/common/formatters/project.py +9 -0
- mlrun/common/model_monitoring/__init__.py +0 -3
- mlrun/common/model_monitoring/helpers.py +1 -1
- mlrun/common/runtimes/constants.py +1 -2
- mlrun/common/schemas/__init__.py +7 -2
- mlrun/common/schemas/alert.py +31 -18
- mlrun/common/schemas/api_gateway.py +3 -3
- mlrun/common/schemas/artifact.py +7 -13
- mlrun/common/schemas/auth.py +6 -4
- mlrun/common/schemas/background_task.py +7 -7
- mlrun/common/schemas/client_spec.py +2 -2
- mlrun/common/schemas/clusterization_spec.py +2 -2
- mlrun/common/schemas/common.py +53 -3
- mlrun/common/schemas/datastore_profile.py +1 -1
- mlrun/common/schemas/feature_store.py +9 -9
- mlrun/common/schemas/frontend_spec.py +4 -4
- mlrun/common/schemas/function.py +10 -10
- mlrun/common/schemas/hub.py +1 -1
- mlrun/common/schemas/k8s.py +3 -3
- mlrun/common/schemas/memory_reports.py +3 -3
- mlrun/common/schemas/model_monitoring/__init__.py +8 -1
- mlrun/common/schemas/model_monitoring/constants.py +62 -12
- mlrun/common/schemas/model_monitoring/grafana.py +1 -1
- mlrun/common/schemas/model_monitoring/model_endpoint_v2.py +149 -0
- mlrun/common/schemas/model_monitoring/model_endpoints.py +22 -6
- mlrun/common/schemas/notification.py +18 -3
- mlrun/common/schemas/object.py +1 -1
- mlrun/common/schemas/pagination.py +4 -4
- mlrun/common/schemas/partition.py +137 -0
- mlrun/common/schemas/pipeline.py +2 -2
- mlrun/common/schemas/project.py +22 -17
- mlrun/common/schemas/runs.py +2 -2
- mlrun/common/schemas/runtime_resource.py +5 -5
- mlrun/common/schemas/schedule.py +1 -1
- mlrun/common/schemas/secret.py +1 -1
- mlrun/common/schemas/tag.py +3 -3
- mlrun/common/schemas/workflow.py +5 -5
- mlrun/config.py +65 -15
- mlrun/data_types/__init__.py +0 -2
- mlrun/data_types/data_types.py +0 -1
- mlrun/data_types/infer.py +3 -1
- mlrun/data_types/spark.py +4 -4
- mlrun/data_types/to_pandas.py +2 -11
- mlrun/datastore/__init__.py +0 -2
- mlrun/datastore/alibaba_oss.py +4 -1
- mlrun/datastore/azure_blob.py +4 -1
- mlrun/datastore/base.py +12 -4
- mlrun/datastore/datastore.py +9 -3
- mlrun/datastore/datastore_profile.py +20 -20
- mlrun/datastore/dbfs_store.py +4 -1
- mlrun/datastore/filestore.py +4 -1
- mlrun/datastore/google_cloud_storage.py +4 -1
- mlrun/datastore/hdfs.py +4 -1
- mlrun/datastore/inmem.py +4 -1
- mlrun/datastore/redis.py +4 -1
- mlrun/datastore/s3.py +4 -1
- mlrun/datastore/sources.py +51 -49
- mlrun/datastore/store_resources.py +0 -2
- mlrun/datastore/targets.py +22 -23
- mlrun/datastore/utils.py +2 -2
- mlrun/datastore/v3io.py +4 -1
- mlrun/datastore/wasbfs/fs.py +13 -12
- mlrun/db/base.py +170 -64
- mlrun/db/factory.py +3 -0
- mlrun/db/httpdb.py +986 -238
- mlrun/db/nopdb.py +155 -57
- mlrun/errors.py +2 -2
- mlrun/execution.py +55 -29
- mlrun/feature_store/__init__.py +0 -2
- mlrun/feature_store/api.py +40 -40
- mlrun/feature_store/common.py +9 -9
- mlrun/feature_store/feature_set.py +20 -18
- mlrun/feature_store/feature_vector.py +27 -24
- mlrun/feature_store/retrieval/base.py +14 -9
- mlrun/feature_store/retrieval/job.py +2 -1
- mlrun/feature_store/steps.py +2 -2
- mlrun/features.py +30 -13
- mlrun/frameworks/__init__.py +1 -2
- mlrun/frameworks/_common/__init__.py +1 -2
- mlrun/frameworks/_common/artifacts_library.py +2 -2
- mlrun/frameworks/_common/mlrun_interface.py +10 -6
- mlrun/frameworks/_common/model_handler.py +29 -27
- mlrun/frameworks/_common/producer.py +3 -1
- mlrun/frameworks/_dl_common/__init__.py +1 -2
- mlrun/frameworks/_dl_common/loggers/__init__.py +1 -2
- mlrun/frameworks/_dl_common/loggers/mlrun_logger.py +4 -4
- mlrun/frameworks/_dl_common/loggers/tensorboard_logger.py +3 -3
- mlrun/frameworks/_ml_common/__init__.py +1 -2
- mlrun/frameworks/_ml_common/loggers/__init__.py +1 -2
- mlrun/frameworks/_ml_common/model_handler.py +21 -21
- mlrun/frameworks/_ml_common/plans/__init__.py +1 -2
- mlrun/frameworks/_ml_common/plans/confusion_matrix_plan.py +3 -1
- mlrun/frameworks/_ml_common/plans/dataset_plan.py +3 -3
- mlrun/frameworks/_ml_common/plans/roc_curve_plan.py +4 -4
- mlrun/frameworks/auto_mlrun/__init__.py +1 -2
- mlrun/frameworks/auto_mlrun/auto_mlrun.py +22 -15
- mlrun/frameworks/huggingface/__init__.py +1 -2
- mlrun/frameworks/huggingface/model_server.py +9 -9
- mlrun/frameworks/lgbm/__init__.py +47 -44
- mlrun/frameworks/lgbm/callbacks/__init__.py +1 -2
- mlrun/frameworks/lgbm/callbacks/logging_callback.py +4 -2
- mlrun/frameworks/lgbm/callbacks/mlrun_logging_callback.py +4 -2
- mlrun/frameworks/lgbm/mlrun_interfaces/__init__.py +1 -2
- mlrun/frameworks/lgbm/mlrun_interfaces/mlrun_interface.py +5 -5
- mlrun/frameworks/lgbm/model_handler.py +15 -11
- mlrun/frameworks/lgbm/model_server.py +11 -7
- mlrun/frameworks/lgbm/utils.py +2 -2
- mlrun/frameworks/onnx/__init__.py +1 -2
- mlrun/frameworks/onnx/dataset.py +3 -3
- mlrun/frameworks/onnx/mlrun_interface.py +2 -2
- mlrun/frameworks/onnx/model_handler.py +7 -5
- mlrun/frameworks/onnx/model_server.py +8 -6
- mlrun/frameworks/parallel_coordinates.py +11 -11
- mlrun/frameworks/pytorch/__init__.py +22 -23
- mlrun/frameworks/pytorch/callbacks/__init__.py +1 -2
- mlrun/frameworks/pytorch/callbacks/callback.py +2 -1
- mlrun/frameworks/pytorch/callbacks/logging_callback.py +15 -8
- mlrun/frameworks/pytorch/callbacks/mlrun_logging_callback.py +19 -12
- mlrun/frameworks/pytorch/callbacks/tensorboard_logging_callback.py +22 -15
- mlrun/frameworks/pytorch/callbacks_handler.py +36 -30
- mlrun/frameworks/pytorch/mlrun_interface.py +17 -17
- mlrun/frameworks/pytorch/model_handler.py +21 -17
- mlrun/frameworks/pytorch/model_server.py +13 -9
- mlrun/frameworks/sklearn/__init__.py +19 -18
- mlrun/frameworks/sklearn/estimator.py +2 -2
- mlrun/frameworks/sklearn/metric.py +3 -3
- mlrun/frameworks/sklearn/metrics_library.py +8 -6
- mlrun/frameworks/sklearn/mlrun_interface.py +3 -2
- mlrun/frameworks/sklearn/model_handler.py +4 -3
- mlrun/frameworks/tf_keras/__init__.py +11 -12
- mlrun/frameworks/tf_keras/callbacks/__init__.py +1 -2
- mlrun/frameworks/tf_keras/callbacks/logging_callback.py +17 -14
- mlrun/frameworks/tf_keras/callbacks/mlrun_logging_callback.py +15 -12
- mlrun/frameworks/tf_keras/callbacks/tensorboard_logging_callback.py +21 -18
- mlrun/frameworks/tf_keras/model_handler.py +17 -13
- mlrun/frameworks/tf_keras/model_server.py +12 -8
- mlrun/frameworks/xgboost/__init__.py +19 -18
- mlrun/frameworks/xgboost/model_handler.py +13 -9
- mlrun/launcher/base.py +3 -4
- mlrun/launcher/local.py +1 -1
- mlrun/launcher/remote.py +1 -1
- mlrun/lists.py +4 -3
- mlrun/model.py +110 -46
- mlrun/model_monitoring/__init__.py +1 -2
- mlrun/model_monitoring/api.py +6 -6
- mlrun/model_monitoring/applications/_application_steps.py +13 -15
- mlrun/model_monitoring/applications/histogram_data_drift.py +41 -15
- mlrun/model_monitoring/applications/results.py +55 -3
- mlrun/model_monitoring/controller.py +185 -223
- mlrun/model_monitoring/db/_schedules.py +156 -0
- mlrun/model_monitoring/db/_stats.py +189 -0
- mlrun/model_monitoring/db/stores/__init__.py +1 -1
- mlrun/model_monitoring/db/stores/base/store.py +6 -65
- mlrun/model_monitoring/db/stores/sqldb/models/__init__.py +0 -25
- mlrun/model_monitoring/db/stores/sqldb/models/base.py +0 -97
- mlrun/model_monitoring/db/stores/sqldb/models/mysql.py +2 -58
- mlrun/model_monitoring/db/stores/sqldb/models/sqlite.py +0 -15
- mlrun/model_monitoring/db/stores/sqldb/sql_store.py +6 -257
- mlrun/model_monitoring/db/stores/v3io_kv/kv_store.py +9 -271
- mlrun/model_monitoring/db/tsdb/base.py +76 -24
- mlrun/model_monitoring/db/tsdb/tdengine/schemas.py +61 -6
- mlrun/model_monitoring/db/tsdb/tdengine/stream_graph_steps.py +33 -0
- mlrun/model_monitoring/db/tsdb/tdengine/tdengine_connector.py +253 -28
- mlrun/model_monitoring/db/tsdb/v3io/stream_graph_steps.py +1 -0
- mlrun/model_monitoring/db/tsdb/v3io/v3io_connector.py +35 -17
- mlrun/model_monitoring/helpers.py +91 -1
- mlrun/model_monitoring/model_endpoint.py +4 -2
- mlrun/model_monitoring/stream_processing.py +16 -13
- mlrun/model_monitoring/tracking_policy.py +10 -3
- mlrun/model_monitoring/writer.py +47 -26
- mlrun/package/__init__.py +3 -6
- mlrun/package/context_handler.py +1 -1
- mlrun/package/packager.py +12 -9
- mlrun/package/packagers/__init__.py +0 -2
- mlrun/package/packagers/default_packager.py +14 -11
- mlrun/package/packagers/numpy_packagers.py +16 -7
- mlrun/package/packagers/pandas_packagers.py +18 -18
- mlrun/package/packagers/python_standard_library_packagers.py +25 -11
- mlrun/package/packagers_manager.py +31 -14
- mlrun/package/utils/__init__.py +0 -3
- mlrun/package/utils/_pickler.py +6 -6
- mlrun/platforms/__init__.py +3 -16
- mlrun/platforms/iguazio.py +4 -1
- mlrun/projects/operations.py +27 -27
- mlrun/projects/pipelines.py +34 -35
- mlrun/projects/project.py +535 -182
- mlrun/run.py +13 -10
- mlrun/runtimes/__init__.py +1 -3
- mlrun/runtimes/base.py +15 -11
- mlrun/runtimes/daskjob.py +9 -9
- mlrun/runtimes/generators.py +2 -1
- mlrun/runtimes/kubejob.py +4 -5
- mlrun/runtimes/mounts.py +572 -0
- mlrun/runtimes/mpijob/__init__.py +0 -2
- mlrun/runtimes/mpijob/abstract.py +7 -6
- mlrun/runtimes/nuclio/api_gateway.py +7 -7
- mlrun/runtimes/nuclio/application/application.py +11 -11
- mlrun/runtimes/nuclio/function.py +13 -13
- mlrun/runtimes/nuclio/serving.py +9 -9
- mlrun/runtimes/pod.py +154 -45
- mlrun/runtimes/remotesparkjob.py +3 -2
- mlrun/runtimes/sparkjob/__init__.py +0 -2
- mlrun/runtimes/sparkjob/spark3job.py +21 -11
- mlrun/runtimes/utils.py +6 -5
- mlrun/serving/merger.py +6 -4
- mlrun/serving/remote.py +18 -17
- mlrun/serving/routers.py +27 -27
- mlrun/serving/server.py +1 -1
- mlrun/serving/states.py +76 -71
- mlrun/serving/utils.py +13 -2
- mlrun/serving/v1_serving.py +3 -2
- mlrun/serving/v2_serving.py +4 -4
- mlrun/track/__init__.py +1 -1
- mlrun/track/tracker.py +2 -2
- mlrun/track/trackers/mlflow_tracker.py +6 -5
- mlrun/utils/async_http.py +1 -1
- mlrun/utils/helpers.py +70 -16
- mlrun/utils/logger.py +106 -4
- mlrun/utils/notifications/notification/__init__.py +22 -19
- mlrun/utils/notifications/notification/base.py +33 -14
- mlrun/utils/notifications/notification/console.py +6 -6
- mlrun/utils/notifications/notification/git.py +11 -11
- mlrun/utils/notifications/notification/ipython.py +10 -9
- mlrun/utils/notifications/notification/mail.py +149 -0
- mlrun/utils/notifications/notification/slack.py +6 -6
- mlrun/utils/notifications/notification/webhook.py +18 -22
- mlrun/utils/notifications/notification_pusher.py +43 -31
- mlrun/utils/regex.py +3 -1
- mlrun/utils/version/version.json +2 -2
- {mlrun-1.7.2rc3.dist-info → mlrun-1.8.0rc2.dist-info}/METADATA +18 -14
- mlrun-1.8.0rc2.dist-info/RECORD +358 -0
- {mlrun-1.7.2rc3.dist-info → mlrun-1.8.0rc2.dist-info}/WHEEL +1 -1
- mlrun-1.7.2rc3.dist-info/RECORD +0 -351
- {mlrun-1.7.2rc3.dist-info → mlrun-1.8.0rc2.dist-info}/LICENSE +0 -0
- {mlrun-1.7.2rc3.dist-info → mlrun-1.8.0rc2.dist-info}/entry_points.txt +0 -0
- {mlrun-1.7.2rc3.dist-info → mlrun-1.8.0rc2.dist-info}/top_level.txt +0 -0
|
@@ -15,10 +15,9 @@
|
|
|
15
15
|
import typing
|
|
16
16
|
from abc import ABC, abstractmethod
|
|
17
17
|
from datetime import datetime
|
|
18
|
-
from typing import Union
|
|
19
18
|
|
|
20
19
|
import pandas as pd
|
|
21
|
-
import pydantic
|
|
20
|
+
import pydantic.v1
|
|
22
21
|
|
|
23
22
|
import mlrun.common.schemas.model_monitoring as mm_schemas
|
|
24
23
|
import mlrun.model_monitoring.db.tsdb.helpers
|
|
@@ -132,6 +131,7 @@ class TSDBConnector(ABC):
|
|
|
132
131
|
end: datetime,
|
|
133
132
|
metrics: list[mm_schemas.ModelEndpointMonitoringMetric],
|
|
134
133
|
type: typing.Literal["metrics", "results"],
|
|
134
|
+
with_result_extra_data: bool,
|
|
135
135
|
) -> typing.Union[
|
|
136
136
|
list[
|
|
137
137
|
typing.Union[
|
|
@@ -150,11 +150,13 @@ class TSDBConnector(ABC):
|
|
|
150
150
|
Read metrics OR results from the TSDB and return as a list.
|
|
151
151
|
|
|
152
152
|
:param endpoint_id: The model endpoint identifier.
|
|
153
|
-
:param start:
|
|
154
|
-
:param end:
|
|
155
|
-
:param metrics:
|
|
156
|
-
:param type:
|
|
157
|
-
:
|
|
153
|
+
:param start: The start time of the query.
|
|
154
|
+
:param end: The end time of the query.
|
|
155
|
+
:param metrics: The list of metrics to get the values for.
|
|
156
|
+
:param type: "metrics" or "results" - the type of each item in metrics.
|
|
157
|
+
:param with_result_extra_data: Whether to include the extra data in the results, relevant only when
|
|
158
|
+
`type="results"`.
|
|
159
|
+
:return: A list of result values or a list of metric values.
|
|
158
160
|
"""
|
|
159
161
|
|
|
160
162
|
@abstractmethod
|
|
@@ -193,9 +195,9 @@ class TSDBConnector(ABC):
|
|
|
193
195
|
@abstractmethod
|
|
194
196
|
def get_last_request(
|
|
195
197
|
self,
|
|
196
|
-
endpoint_ids: Union[str, list[str]],
|
|
197
|
-
start:
|
|
198
|
-
end:
|
|
198
|
+
endpoint_ids: typing.Union[str, list[str]],
|
|
199
|
+
start: typing.Optional[datetime] = None,
|
|
200
|
+
end: typing.Optional[datetime] = None,
|
|
199
201
|
) -> pd.DataFrame:
|
|
200
202
|
"""
|
|
201
203
|
Fetches data from the predictions TSDB table and returns the most recent request
|
|
@@ -212,9 +214,9 @@ class TSDBConnector(ABC):
|
|
|
212
214
|
@abstractmethod
|
|
213
215
|
def get_drift_status(
|
|
214
216
|
self,
|
|
215
|
-
endpoint_ids: Union[str, list[str]],
|
|
216
|
-
start:
|
|
217
|
-
end:
|
|
217
|
+
endpoint_ids: typing.Union[str, list[str]],
|
|
218
|
+
start: typing.Optional[datetime] = None,
|
|
219
|
+
end: typing.Optional[datetime] = None,
|
|
218
220
|
) -> pd.DataFrame:
|
|
219
221
|
"""
|
|
220
222
|
Fetches data from the app-results TSDB table and returns the highest status among all
|
|
@@ -233,8 +235,8 @@ class TSDBConnector(ABC):
|
|
|
233
235
|
def get_metrics_metadata(
|
|
234
236
|
self,
|
|
235
237
|
endpoint_id: str,
|
|
236
|
-
start:
|
|
237
|
-
end:
|
|
238
|
+
start: typing.Optional[datetime] = None,
|
|
239
|
+
end: typing.Optional[datetime] = None,
|
|
238
240
|
) -> pd.DataFrame:
|
|
239
241
|
"""
|
|
240
242
|
Fetches distinct metrics metadata from the metrics TSDB table for a specified model endpoint.
|
|
@@ -251,8 +253,8 @@ class TSDBConnector(ABC):
|
|
|
251
253
|
def get_results_metadata(
|
|
252
254
|
self,
|
|
253
255
|
endpoint_id: str,
|
|
254
|
-
start:
|
|
255
|
-
end:
|
|
256
|
+
start: typing.Optional[datetime] = None,
|
|
257
|
+
end: typing.Optional[datetime] = None,
|
|
256
258
|
) -> pd.DataFrame:
|
|
257
259
|
"""
|
|
258
260
|
Fetches distinct results metadata from the app-results TSDB table for a specified model endpoint.
|
|
@@ -268,9 +270,9 @@ class TSDBConnector(ABC):
|
|
|
268
270
|
@abstractmethod
|
|
269
271
|
def get_error_count(
|
|
270
272
|
self,
|
|
271
|
-
endpoint_ids: Union[str, list[str]],
|
|
272
|
-
start:
|
|
273
|
-
end:
|
|
273
|
+
endpoint_ids: typing.Union[str, list[str]],
|
|
274
|
+
start: typing.Optional[datetime] = None,
|
|
275
|
+
end: typing.Optional[datetime] = None,
|
|
274
276
|
) -> pd.DataFrame:
|
|
275
277
|
"""
|
|
276
278
|
Fetches data from the error TSDB table and returns the error count for each specified endpoint.
|
|
@@ -286,9 +288,9 @@ class TSDBConnector(ABC):
|
|
|
286
288
|
@abstractmethod
|
|
287
289
|
def get_avg_latency(
|
|
288
290
|
self,
|
|
289
|
-
endpoint_ids: Union[str, list[str]],
|
|
290
|
-
start:
|
|
291
|
-
end:
|
|
291
|
+
endpoint_ids: typing.Union[str, list[str]],
|
|
292
|
+
start: typing.Optional[datetime] = None,
|
|
293
|
+
end: typing.Optional[datetime] = None,
|
|
292
294
|
) -> pd.DataFrame:
|
|
293
295
|
"""
|
|
294
296
|
Fetches data from the predictions TSDB table and returns the average latency for each specified endpoint
|
|
@@ -420,11 +422,12 @@ class TSDBConnector(ABC):
|
|
|
420
422
|
sub_df.index,
|
|
421
423
|
sub_df[mm_schemas.ResultData.RESULT_VALUE],
|
|
422
424
|
sub_df[mm_schemas.ResultData.RESULT_STATUS],
|
|
425
|
+
sub_df[mm_schemas.ResultData.RESULT_EXTRA_DATA],
|
|
423
426
|
)
|
|
424
427
|
), # pyright: ignore[reportArgumentType]
|
|
425
428
|
)
|
|
426
429
|
)
|
|
427
|
-
except pydantic.ValidationError:
|
|
430
|
+
except pydantic.v1.ValidationError:
|
|
428
431
|
logger.exception(
|
|
429
432
|
"Failed to convert data-frame into `ModelEndpointMonitoringResultValues`",
|
|
430
433
|
full_name=full_name,
|
|
@@ -446,3 +449,52 @@ class TSDBConnector(ABC):
|
|
|
446
449
|
)
|
|
447
450
|
|
|
448
451
|
return metrics_values
|
|
452
|
+
|
|
453
|
+
@staticmethod
|
|
454
|
+
def df_to_metrics_list(
|
|
455
|
+
*,
|
|
456
|
+
df: pd.DataFrame,
|
|
457
|
+
project: str,
|
|
458
|
+
type: str,
|
|
459
|
+
) -> list[mm_schemas.ModelEndpointMonitoringMetric]:
|
|
460
|
+
"""
|
|
461
|
+
Parse a DataFrame of metrics from the TSDB into a list of mm metrics objects.
|
|
462
|
+
|
|
463
|
+
:param df: The DataFrame to parse.
|
|
464
|
+
:param project: The project name.
|
|
465
|
+
:param type: The type of the metrics (either "result" or "metric").
|
|
466
|
+
|
|
467
|
+
:return: A list of mm metrics objects.
|
|
468
|
+
"""
|
|
469
|
+
return list(
|
|
470
|
+
map(
|
|
471
|
+
lambda record: mm_schemas.ModelEndpointMonitoringMetric(
|
|
472
|
+
project=project,
|
|
473
|
+
type=type,
|
|
474
|
+
app=record.get(mm_schemas.WriterEvent.APPLICATION_NAME),
|
|
475
|
+
name=record.get(mm_schemas.ResultData.RESULT_NAME)
|
|
476
|
+
or record.get(mm_schemas.MetricData.METRIC_NAME),
|
|
477
|
+
kind=record.get(mm_schemas.ResultData.RESULT_KIND),
|
|
478
|
+
),
|
|
479
|
+
df.to_dict("records"),
|
|
480
|
+
)
|
|
481
|
+
)
|
|
482
|
+
|
|
483
|
+
@staticmethod
|
|
484
|
+
def _get_start_end(
|
|
485
|
+
start: typing.Union[datetime, None],
|
|
486
|
+
end: typing.Union[datetime, None],
|
|
487
|
+
) -> tuple[datetime, datetime]:
|
|
488
|
+
"""
|
|
489
|
+
static utils function for tsdb start end format
|
|
490
|
+
:param start: Either None or datetime, None is handled as datetime.min(tz=timezone.utc)
|
|
491
|
+
:param end: Either None or datetime, None is handled as datetime.now(tz=timezone.utc)
|
|
492
|
+
:return: start datetime, end datetime
|
|
493
|
+
"""
|
|
494
|
+
start = start or mlrun.utils.datetime_min()
|
|
495
|
+
end = end or mlrun.utils.datetime_now()
|
|
496
|
+
if not (isinstance(start, datetime) and isinstance(end, datetime)):
|
|
497
|
+
raise mlrun.errors.MLRunInvalidArgumentError(
|
|
498
|
+
"Both start and end must be datetime objects"
|
|
499
|
+
)
|
|
500
|
+
return start, end
|
|
@@ -26,7 +26,7 @@ _MODEL_MONITORING_DATABASE = "mlrun_model_monitoring"
|
|
|
26
26
|
|
|
27
27
|
|
|
28
28
|
class _TDEngineColumnType:
|
|
29
|
-
def __init__(self, data_type: str, length: int = None):
|
|
29
|
+
def __init__(self, data_type: str, length: Optional[int] = None):
|
|
30
30
|
self.data_type = data_type
|
|
31
31
|
self.length = length
|
|
32
32
|
|
|
@@ -46,7 +46,7 @@ class _TDEngineColumn(mlrun.common.types.StrEnum):
|
|
|
46
46
|
INT = _TDEngineColumnType("INT")
|
|
47
47
|
BINARY_40 = _TDEngineColumnType("BINARY", 40)
|
|
48
48
|
BINARY_64 = _TDEngineColumnType("BINARY", 64)
|
|
49
|
-
|
|
49
|
+
BINARY_1000 = _TDEngineColumnType("BINARY", 1000)
|
|
50
50
|
|
|
51
51
|
|
|
52
52
|
def values_to_column(values, column_type):
|
|
@@ -61,7 +61,7 @@ def values_to_column(values, column_type):
|
|
|
61
61
|
return taosws.binary_to_column(values)
|
|
62
62
|
if column_type == _TDEngineColumn.BINARY_64:
|
|
63
63
|
return taosws.binary_to_column(values)
|
|
64
|
-
if column_type == _TDEngineColumn.
|
|
64
|
+
if column_type == _TDEngineColumn.BINARY_1000:
|
|
65
65
|
return taosws.binary_to_column(values)
|
|
66
66
|
|
|
67
67
|
raise mlrun.errors.MLRunInvalidArgumentError(
|
|
@@ -170,7 +170,7 @@ class TDEngineSchema:
|
|
|
170
170
|
table: str,
|
|
171
171
|
start: datetime.datetime,
|
|
172
172
|
end: datetime.datetime,
|
|
173
|
-
columns_to_filter: list[str] = None,
|
|
173
|
+
columns_to_filter: Optional[list[str]] = None,
|
|
174
174
|
filter_query: Optional[str] = None,
|
|
175
175
|
interval: Optional[str] = None,
|
|
176
176
|
limit: int = 0,
|
|
@@ -178,6 +178,10 @@ class TDEngineSchema:
|
|
|
178
178
|
sliding_window_step: Optional[str] = None,
|
|
179
179
|
timestamp_column: str = "time",
|
|
180
180
|
database: str = _MODEL_MONITORING_DATABASE,
|
|
181
|
+
group_by: Optional[Union[list[str], str]] = None,
|
|
182
|
+
preform_agg_funcs_columns: Optional[list[str]] = None,
|
|
183
|
+
order_by: Optional[str] = None,
|
|
184
|
+
desc: Optional[bool] = None,
|
|
181
185
|
) -> str:
|
|
182
186
|
if agg_funcs and not columns_to_filter:
|
|
183
187
|
raise mlrun.errors.MLRunInvalidArgumentError(
|
|
@@ -194,15 +198,37 @@ class TDEngineSchema:
|
|
|
194
198
|
raise mlrun.errors.MLRunInvalidArgumentError(
|
|
195
199
|
"`interval` must be provided when using sliding window"
|
|
196
200
|
)
|
|
201
|
+
if group_by and not agg_funcs:
|
|
202
|
+
raise mlrun.errors.MLRunInvalidArgumentError(
|
|
203
|
+
"aggregate functions must be provided when using group by"
|
|
204
|
+
)
|
|
205
|
+
if desc and not order_by:
|
|
206
|
+
raise mlrun.errors.MLRunInvalidArgumentError(
|
|
207
|
+
"`order_by` must be provided when using descending"
|
|
208
|
+
)
|
|
197
209
|
|
|
198
210
|
with StringIO() as query:
|
|
199
211
|
query.write("SELECT ")
|
|
200
212
|
if interval:
|
|
201
213
|
query.write("_wstart, _wend, ")
|
|
202
214
|
if agg_funcs:
|
|
215
|
+
preform_agg_funcs_columns = (
|
|
216
|
+
columns_to_filter
|
|
217
|
+
if preform_agg_funcs_columns is None
|
|
218
|
+
else preform_agg_funcs_columns
|
|
219
|
+
)
|
|
203
220
|
query.write(
|
|
204
221
|
", ".join(
|
|
205
|
-
[
|
|
222
|
+
[
|
|
223
|
+
f"{a}({col})"
|
|
224
|
+
if col.upper()
|
|
225
|
+
in map(
|
|
226
|
+
str.upper, preform_agg_funcs_columns
|
|
227
|
+
) # Case-insensitive check
|
|
228
|
+
else f"{col}"
|
|
229
|
+
for a in agg_funcs
|
|
230
|
+
for col in columns_to_filter
|
|
231
|
+
]
|
|
206
232
|
)
|
|
207
233
|
)
|
|
208
234
|
elif columns_to_filter:
|
|
@@ -219,6 +245,13 @@ class TDEngineSchema:
|
|
|
219
245
|
query.write(f"{timestamp_column} >= '{start}' AND ")
|
|
220
246
|
if end:
|
|
221
247
|
query.write(f"{timestamp_column} <= '{end}'")
|
|
248
|
+
if group_by:
|
|
249
|
+
if isinstance(group_by, list):
|
|
250
|
+
group_by = ", ".join(group_by)
|
|
251
|
+
query.write(f" GROUP BY {group_by}")
|
|
252
|
+
if order_by:
|
|
253
|
+
desc = " DESC" if desc else ""
|
|
254
|
+
query.write(f" ORDER BY {order_by}{desc}")
|
|
222
255
|
if interval:
|
|
223
256
|
query.write(f" INTERVAL({interval})")
|
|
224
257
|
if sliding_window_step:
|
|
@@ -238,6 +271,7 @@ class AppResultTable(TDEngineSchema):
|
|
|
238
271
|
mm_schemas.WriterEvent.START_INFER_TIME: _TDEngineColumn.TIMESTAMP,
|
|
239
272
|
mm_schemas.ResultData.RESULT_VALUE: _TDEngineColumn.FLOAT,
|
|
240
273
|
mm_schemas.ResultData.RESULT_STATUS: _TDEngineColumn.INT,
|
|
274
|
+
mm_schemas.ResultData.RESULT_EXTRA_DATA: _TDEngineColumn.BINARY_1000,
|
|
241
275
|
}
|
|
242
276
|
tags = {
|
|
243
277
|
mm_schemas.WriterEvent.ENDPOINT_ID: _TDEngineColumn.BINARY_64,
|
|
@@ -284,10 +318,31 @@ class Predictions(TDEngineSchema):
|
|
|
284
318
|
columns = {
|
|
285
319
|
mm_schemas.EventFieldType.TIME: _TDEngineColumn.TIMESTAMP,
|
|
286
320
|
mm_schemas.EventFieldType.LATENCY: _TDEngineColumn.FLOAT,
|
|
287
|
-
mm_schemas.EventKeyMetrics.CUSTOM_METRICS: _TDEngineColumn.
|
|
321
|
+
mm_schemas.EventKeyMetrics.CUSTOM_METRICS: _TDEngineColumn.BINARY_1000,
|
|
322
|
+
}
|
|
323
|
+
tags = {
|
|
324
|
+
mm_schemas.WriterEvent.ENDPOINT_ID: _TDEngineColumn.BINARY_64,
|
|
325
|
+
}
|
|
326
|
+
super().__init__(
|
|
327
|
+
super_table=super_table,
|
|
328
|
+
columns=columns,
|
|
329
|
+
tags=tags,
|
|
330
|
+
database=database,
|
|
331
|
+
project=project,
|
|
332
|
+
)
|
|
333
|
+
|
|
334
|
+
|
|
335
|
+
@dataclass
|
|
336
|
+
class Errors(TDEngineSchema):
|
|
337
|
+
def __init__(self, project: str, database: Optional[str] = None):
|
|
338
|
+
super_table = mm_schemas.TDEngineSuperTables.ERRORS
|
|
339
|
+
columns = {
|
|
340
|
+
mm_schemas.EventFieldType.TIME: _TDEngineColumn.TIMESTAMP,
|
|
341
|
+
mm_schemas.EventFieldType.MODEL_ERROR: _TDEngineColumn.BINARY_1000,
|
|
288
342
|
}
|
|
289
343
|
tags = {
|
|
290
344
|
mm_schemas.WriterEvent.ENDPOINT_ID: _TDEngineColumn.BINARY_64,
|
|
345
|
+
mm_schemas.EventFieldType.ERROR_TYPE: _TDEngineColumn.BINARY_64,
|
|
291
346
|
}
|
|
292
347
|
super().__init__(
|
|
293
348
|
super_table=super_table,
|
|
@@ -13,12 +13,14 @@
|
|
|
13
13
|
# limitations under the License.
|
|
14
14
|
|
|
15
15
|
import json
|
|
16
|
+
from datetime import datetime
|
|
16
17
|
|
|
17
18
|
import mlrun.feature_store.steps
|
|
18
19
|
from mlrun.common.schemas.model_monitoring import (
|
|
19
20
|
EventFieldType,
|
|
20
21
|
EventKeyMetrics,
|
|
21
22
|
)
|
|
23
|
+
from mlrun.utils import logger
|
|
22
24
|
|
|
23
25
|
|
|
24
26
|
class ProcessBeforeTDEngine(mlrun.feature_store.steps.MapClass):
|
|
@@ -40,3 +42,34 @@ class ProcessBeforeTDEngine(mlrun.feature_store.steps.MapClass):
|
|
|
40
42
|
event[EventFieldType.TABLE_COLUMN] = "_" + event.get(EventFieldType.ENDPOINT_ID)
|
|
41
43
|
|
|
42
44
|
return event
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
class ErrorExtractor(mlrun.feature_store.steps.MapClass):
|
|
48
|
+
def __init__(self, **kwargs):
|
|
49
|
+
"""
|
|
50
|
+
Prepare the event for insertion into the TDEngine error table
|
|
51
|
+
"""
|
|
52
|
+
super().__init__(**kwargs)
|
|
53
|
+
|
|
54
|
+
def do(self, event):
|
|
55
|
+
error = str(event.get("error"))
|
|
56
|
+
if len(error) > 1000:
|
|
57
|
+
error = error[-1000:]
|
|
58
|
+
logger.warning(
|
|
59
|
+
f"Error message exceeds 1000 chars: The error message writen to TSDB will be it last "
|
|
60
|
+
f"1000 chars, Error: {error}",
|
|
61
|
+
event=event,
|
|
62
|
+
)
|
|
63
|
+
timestamp = datetime.fromisoformat(event.get("when"))
|
|
64
|
+
endpoint_id = event[EventFieldType.ENDPOINT_ID]
|
|
65
|
+
event = {
|
|
66
|
+
EventFieldType.MODEL_ERROR: error,
|
|
67
|
+
EventFieldType.ERROR_TYPE: EventFieldType.INFER_ERROR,
|
|
68
|
+
EventFieldType.ENDPOINT_ID: endpoint_id,
|
|
69
|
+
EventFieldType.TIME: timestamp,
|
|
70
|
+
EventFieldType.PROJECT: event[EventFieldType.FUNCTION_URI].split("/")[0],
|
|
71
|
+
EventFieldType.TABLE_COLUMN: "_err_"
|
|
72
|
+
+ event.get(EventFieldType.ENDPOINT_ID),
|
|
73
|
+
}
|
|
74
|
+
logger.info("Write error to errors TSDB table", event=event)
|
|
75
|
+
return event
|