mlrun 1.7.0rc5__py3-none-any.whl → 1.7.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mlrun might be problematic. Click here for more details.
- mlrun/__init__.py +11 -1
- mlrun/__main__.py +39 -121
- mlrun/{datastore/helpers.py → alerts/__init__.py} +2 -5
- mlrun/alerts/alert.py +248 -0
- mlrun/api/schemas/__init__.py +4 -3
- mlrun/artifacts/__init__.py +8 -3
- mlrun/artifacts/base.py +39 -254
- mlrun/artifacts/dataset.py +9 -190
- mlrun/artifacts/manager.py +73 -46
- mlrun/artifacts/model.py +30 -158
- mlrun/artifacts/plots.py +23 -380
- mlrun/common/constants.py +73 -2
- mlrun/common/db/sql_session.py +3 -2
- mlrun/common/formatters/__init__.py +21 -0
- mlrun/common/formatters/artifact.py +46 -0
- mlrun/common/formatters/base.py +113 -0
- mlrun/common/formatters/feature_set.py +44 -0
- mlrun/common/formatters/function.py +46 -0
- mlrun/common/formatters/pipeline.py +53 -0
- mlrun/common/formatters/project.py +51 -0
- mlrun/common/formatters/run.py +29 -0
- mlrun/common/helpers.py +11 -1
- mlrun/{runtimes → common/runtimes}/constants.py +32 -4
- mlrun/common/schemas/__init__.py +21 -4
- mlrun/common/schemas/alert.py +202 -0
- mlrun/common/schemas/api_gateway.py +113 -2
- mlrun/common/schemas/artifact.py +28 -1
- mlrun/common/schemas/auth.py +11 -0
- mlrun/common/schemas/client_spec.py +2 -1
- mlrun/common/schemas/common.py +7 -4
- mlrun/common/schemas/constants.py +3 -0
- mlrun/common/schemas/feature_store.py +58 -28
- mlrun/common/schemas/frontend_spec.py +8 -0
- mlrun/common/schemas/function.py +11 -0
- mlrun/common/schemas/hub.py +7 -9
- mlrun/common/schemas/model_monitoring/__init__.py +21 -4
- mlrun/common/schemas/model_monitoring/constants.py +136 -42
- mlrun/common/schemas/model_monitoring/grafana.py +9 -5
- mlrun/common/schemas/model_monitoring/model_endpoints.py +89 -41
- mlrun/common/schemas/notification.py +69 -12
- mlrun/{runtimes/mpijob/v1alpha1.py → common/schemas/pagination.py} +10 -13
- mlrun/common/schemas/pipeline.py +7 -0
- mlrun/common/schemas/project.py +67 -16
- mlrun/common/schemas/runs.py +17 -0
- mlrun/common/schemas/schedule.py +1 -1
- mlrun/common/schemas/workflow.py +10 -2
- mlrun/common/types.py +14 -1
- mlrun/config.py +224 -58
- mlrun/data_types/data_types.py +11 -1
- mlrun/data_types/spark.py +5 -4
- mlrun/data_types/to_pandas.py +75 -34
- mlrun/datastore/__init__.py +8 -10
- mlrun/datastore/alibaba_oss.py +131 -0
- mlrun/datastore/azure_blob.py +131 -43
- mlrun/datastore/base.py +107 -47
- mlrun/datastore/datastore.py +17 -7
- mlrun/datastore/datastore_profile.py +91 -7
- mlrun/datastore/dbfs_store.py +3 -7
- mlrun/datastore/filestore.py +1 -3
- mlrun/datastore/google_cloud_storage.py +92 -32
- mlrun/datastore/hdfs.py +5 -0
- mlrun/datastore/inmem.py +6 -3
- mlrun/datastore/redis.py +3 -2
- mlrun/datastore/s3.py +30 -12
- mlrun/datastore/snowflake_utils.py +45 -0
- mlrun/datastore/sources.py +274 -59
- mlrun/datastore/spark_utils.py +30 -0
- mlrun/datastore/store_resources.py +9 -7
- mlrun/datastore/storeytargets.py +151 -0
- mlrun/datastore/targets.py +374 -102
- mlrun/datastore/utils.py +68 -5
- mlrun/datastore/v3io.py +28 -50
- mlrun/db/auth_utils.py +152 -0
- mlrun/db/base.py +231 -22
- mlrun/db/factory.py +1 -4
- mlrun/db/httpdb.py +864 -228
- mlrun/db/nopdb.py +268 -16
- mlrun/errors.py +35 -5
- mlrun/execution.py +111 -38
- mlrun/feature_store/__init__.py +0 -2
- mlrun/feature_store/api.py +46 -53
- mlrun/feature_store/common.py +6 -11
- mlrun/feature_store/feature_set.py +48 -23
- mlrun/feature_store/feature_vector.py +13 -2
- mlrun/feature_store/ingestion.py +7 -6
- mlrun/feature_store/retrieval/base.py +9 -4
- mlrun/feature_store/retrieval/dask_merger.py +2 -0
- mlrun/feature_store/retrieval/job.py +13 -4
- mlrun/feature_store/retrieval/local_merger.py +2 -0
- mlrun/feature_store/retrieval/spark_merger.py +24 -32
- mlrun/feature_store/steps.py +38 -19
- mlrun/features.py +6 -14
- mlrun/frameworks/_common/plan.py +3 -3
- mlrun/frameworks/_dl_common/loggers/tensorboard_logger.py +7 -12
- mlrun/frameworks/_ml_common/plan.py +1 -1
- mlrun/frameworks/auto_mlrun/auto_mlrun.py +2 -2
- mlrun/frameworks/lgbm/__init__.py +1 -1
- mlrun/frameworks/lgbm/callbacks/callback.py +2 -4
- mlrun/frameworks/lgbm/model_handler.py +1 -1
- mlrun/frameworks/parallel_coordinates.py +4 -4
- mlrun/frameworks/pytorch/__init__.py +2 -2
- mlrun/frameworks/sklearn/__init__.py +1 -1
- mlrun/frameworks/sklearn/mlrun_interface.py +13 -3
- mlrun/frameworks/tf_keras/__init__.py +5 -2
- mlrun/frameworks/tf_keras/callbacks/logging_callback.py +1 -1
- mlrun/frameworks/tf_keras/mlrun_interface.py +2 -2
- mlrun/frameworks/xgboost/__init__.py +1 -1
- mlrun/k8s_utils.py +57 -12
- mlrun/launcher/__init__.py +1 -1
- mlrun/launcher/base.py +6 -5
- mlrun/launcher/client.py +13 -11
- mlrun/launcher/factory.py +1 -1
- mlrun/launcher/local.py +15 -5
- mlrun/launcher/remote.py +10 -3
- mlrun/lists.py +6 -2
- mlrun/model.py +297 -48
- mlrun/model_monitoring/__init__.py +1 -1
- mlrun/model_monitoring/api.py +152 -357
- mlrun/model_monitoring/applications/__init__.py +10 -0
- mlrun/model_monitoring/applications/_application_steps.py +190 -0
- mlrun/model_monitoring/applications/base.py +108 -0
- mlrun/model_monitoring/applications/context.py +341 -0
- mlrun/model_monitoring/{evidently_application.py → applications/evidently_base.py} +27 -22
- mlrun/model_monitoring/applications/histogram_data_drift.py +227 -91
- mlrun/model_monitoring/applications/results.py +99 -0
- mlrun/model_monitoring/controller.py +130 -303
- mlrun/model_monitoring/{stores/models/sqlite.py → db/__init__.py} +5 -10
- mlrun/model_monitoring/db/stores/__init__.py +136 -0
- mlrun/model_monitoring/db/stores/base/__init__.py +15 -0
- mlrun/model_monitoring/db/stores/base/store.py +213 -0
- mlrun/model_monitoring/db/stores/sqldb/__init__.py +13 -0
- mlrun/model_monitoring/db/stores/sqldb/models/__init__.py +71 -0
- mlrun/model_monitoring/db/stores/sqldb/models/base.py +190 -0
- mlrun/model_monitoring/db/stores/sqldb/models/mysql.py +103 -0
- mlrun/model_monitoring/{stores/models/mysql.py → db/stores/sqldb/models/sqlite.py} +19 -13
- mlrun/model_monitoring/db/stores/sqldb/sql_store.py +659 -0
- mlrun/model_monitoring/db/stores/v3io_kv/__init__.py +13 -0
- mlrun/model_monitoring/db/stores/v3io_kv/kv_store.py +726 -0
- mlrun/model_monitoring/db/tsdb/__init__.py +105 -0
- mlrun/model_monitoring/db/tsdb/base.py +448 -0
- mlrun/model_monitoring/db/tsdb/helpers.py +30 -0
- mlrun/model_monitoring/db/tsdb/tdengine/__init__.py +15 -0
- mlrun/model_monitoring/db/tsdb/tdengine/schemas.py +298 -0
- mlrun/model_monitoring/db/tsdb/tdengine/stream_graph_steps.py +42 -0
- mlrun/model_monitoring/db/tsdb/tdengine/tdengine_connector.py +522 -0
- mlrun/model_monitoring/db/tsdb/v3io/__init__.py +15 -0
- mlrun/model_monitoring/db/tsdb/v3io/stream_graph_steps.py +158 -0
- mlrun/model_monitoring/db/tsdb/v3io/v3io_connector.py +849 -0
- mlrun/model_monitoring/features_drift_table.py +34 -22
- mlrun/model_monitoring/helpers.py +177 -39
- mlrun/model_monitoring/model_endpoint.py +3 -2
- mlrun/model_monitoring/stream_processing.py +165 -398
- mlrun/model_monitoring/tracking_policy.py +7 -1
- mlrun/model_monitoring/writer.py +161 -125
- mlrun/package/packagers/default_packager.py +2 -2
- mlrun/package/packagers_manager.py +1 -0
- mlrun/package/utils/_formatter.py +2 -2
- mlrun/platforms/__init__.py +11 -10
- mlrun/platforms/iguazio.py +67 -228
- mlrun/projects/__init__.py +6 -1
- mlrun/projects/operations.py +47 -20
- mlrun/projects/pipelines.py +396 -249
- mlrun/projects/project.py +1125 -414
- mlrun/render.py +28 -22
- mlrun/run.py +207 -180
- mlrun/runtimes/__init__.py +76 -11
- mlrun/runtimes/base.py +40 -14
- mlrun/runtimes/daskjob.py +9 -2
- mlrun/runtimes/databricks_job/databricks_runtime.py +1 -0
- mlrun/runtimes/databricks_job/databricks_wrapper.py +1 -1
- mlrun/runtimes/funcdoc.py +1 -29
- mlrun/runtimes/kubejob.py +34 -128
- mlrun/runtimes/local.py +39 -10
- mlrun/runtimes/mpijob/__init__.py +0 -20
- mlrun/runtimes/mpijob/abstract.py +8 -8
- mlrun/runtimes/mpijob/v1.py +1 -1
- mlrun/runtimes/nuclio/api_gateway.py +646 -177
- mlrun/runtimes/nuclio/application/__init__.py +15 -0
- mlrun/runtimes/nuclio/application/application.py +758 -0
- mlrun/runtimes/nuclio/application/reverse_proxy.go +95 -0
- mlrun/runtimes/nuclio/function.py +188 -68
- mlrun/runtimes/nuclio/serving.py +57 -60
- mlrun/runtimes/pod.py +191 -58
- mlrun/runtimes/remotesparkjob.py +11 -8
- mlrun/runtimes/sparkjob/spark3job.py +17 -18
- mlrun/runtimes/utils.py +40 -73
- mlrun/secrets.py +6 -2
- mlrun/serving/__init__.py +8 -1
- mlrun/serving/remote.py +2 -3
- mlrun/serving/routers.py +89 -64
- mlrun/serving/server.py +54 -26
- mlrun/serving/states.py +187 -56
- mlrun/serving/utils.py +19 -11
- mlrun/serving/v2_serving.py +136 -63
- mlrun/track/tracker.py +2 -1
- mlrun/track/trackers/mlflow_tracker.py +5 -0
- mlrun/utils/async_http.py +26 -6
- mlrun/utils/db.py +18 -0
- mlrun/utils/helpers.py +375 -105
- mlrun/utils/http.py +2 -2
- mlrun/utils/logger.py +75 -9
- mlrun/utils/notifications/notification/__init__.py +14 -10
- mlrun/utils/notifications/notification/base.py +48 -0
- mlrun/utils/notifications/notification/console.py +2 -0
- mlrun/utils/notifications/notification/git.py +24 -1
- mlrun/utils/notifications/notification/ipython.py +2 -0
- mlrun/utils/notifications/notification/slack.py +96 -21
- mlrun/utils/notifications/notification/webhook.py +63 -2
- mlrun/utils/notifications/notification_pusher.py +146 -16
- mlrun/utils/regex.py +9 -0
- mlrun/utils/retryer.py +3 -2
- mlrun/utils/v3io_clients.py +2 -3
- mlrun/utils/version/version.json +2 -2
- mlrun-1.7.2.dist-info/METADATA +390 -0
- mlrun-1.7.2.dist-info/RECORD +351 -0
- {mlrun-1.7.0rc5.dist-info → mlrun-1.7.2.dist-info}/WHEEL +1 -1
- mlrun/feature_store/retrieval/conversion.py +0 -271
- mlrun/kfpops.py +0 -868
- mlrun/model_monitoring/application.py +0 -310
- mlrun/model_monitoring/batch.py +0 -974
- mlrun/model_monitoring/controller_handler.py +0 -37
- mlrun/model_monitoring/prometheus.py +0 -216
- mlrun/model_monitoring/stores/__init__.py +0 -111
- mlrun/model_monitoring/stores/kv_model_endpoint_store.py +0 -574
- mlrun/model_monitoring/stores/model_endpoint_store.py +0 -145
- mlrun/model_monitoring/stores/models/__init__.py +0 -27
- mlrun/model_monitoring/stores/models/base.py +0 -84
- mlrun/model_monitoring/stores/sql_model_endpoint_store.py +0 -382
- mlrun/platforms/other.py +0 -305
- mlrun-1.7.0rc5.dist-info/METADATA +0 -269
- mlrun-1.7.0rc5.dist-info/RECORD +0 -323
- {mlrun-1.7.0rc5.dist-info → mlrun-1.7.2.dist-info}/LICENSE +0 -0
- {mlrun-1.7.0rc5.dist-info → mlrun-1.7.2.dist-info}/entry_points.txt +0 -0
- {mlrun-1.7.0rc5.dist-info → mlrun-1.7.2.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,659 @@
|
|
|
1
|
+
# Copyright 2024 Iguazio
|
|
2
|
+
#
|
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
+
# you may not use this file except in compliance with the License.
|
|
5
|
+
# You may obtain a copy of the License at
|
|
6
|
+
#
|
|
7
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
+
#
|
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
+
# See the License for the specific language governing permissions and
|
|
13
|
+
# limitations under the License.
|
|
14
|
+
|
|
15
|
+
import datetime
|
|
16
|
+
import typing
|
|
17
|
+
import uuid
|
|
18
|
+
|
|
19
|
+
import pandas as pd
|
|
20
|
+
import sqlalchemy
|
|
21
|
+
import sqlalchemy.exc
|
|
22
|
+
import sqlalchemy.orm
|
|
23
|
+
from sqlalchemy.engine import Engine, make_url
|
|
24
|
+
from sqlalchemy.sql.elements import BinaryExpression
|
|
25
|
+
|
|
26
|
+
import mlrun.common.model_monitoring.helpers
|
|
27
|
+
import mlrun.common.schemas.model_monitoring as mm_schemas
|
|
28
|
+
import mlrun.model_monitoring.db.stores.sqldb.models
|
|
29
|
+
import mlrun.model_monitoring.helpers
|
|
30
|
+
from mlrun.common.db.sql_session import create_session, get_engine
|
|
31
|
+
from mlrun.model_monitoring.db import StoreBase
|
|
32
|
+
from mlrun.utils import datetime_now, logger
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
class SQLStoreBase(StoreBase):
|
|
36
|
+
type: typing.ClassVar[str] = mm_schemas.ModelEndpointTarget.SQL
|
|
37
|
+
"""
|
|
38
|
+
Handles the DB operations when the DB target is from type SQL. For the SQL operations, we use SQLAlchemy, a Python
|
|
39
|
+
SQL toolkit that handles the communication with the database. When using SQL for storing the model monitoring
|
|
40
|
+
data, the user needs to provide a valid connection string for the database.
|
|
41
|
+
"""
|
|
42
|
+
|
|
43
|
+
_tables = {}
|
|
44
|
+
|
|
45
|
+
def __init__(
|
|
46
|
+
self,
|
|
47
|
+
project: str,
|
|
48
|
+
**kwargs,
|
|
49
|
+
):
|
|
50
|
+
"""
|
|
51
|
+
Initialize SQL store target object.
|
|
52
|
+
|
|
53
|
+
:param project: The name of the project.
|
|
54
|
+
"""
|
|
55
|
+
|
|
56
|
+
super().__init__(project=project)
|
|
57
|
+
|
|
58
|
+
if "store_connection_string" not in kwargs:
|
|
59
|
+
raise mlrun.errors.MLRunInvalidArgumentError(
|
|
60
|
+
"connection_string is a required parameter for SQLStoreBase."
|
|
61
|
+
)
|
|
62
|
+
|
|
63
|
+
self._sql_connection_string = kwargs.get("store_connection_string")
|
|
64
|
+
self._engine = None
|
|
65
|
+
self._init_tables()
|
|
66
|
+
|
|
67
|
+
@property
|
|
68
|
+
def engine(self) -> Engine:
|
|
69
|
+
if not self._engine:
|
|
70
|
+
self._engine = get_engine(dsn=self._sql_connection_string)
|
|
71
|
+
return self._engine
|
|
72
|
+
|
|
73
|
+
def create_tables(self):
|
|
74
|
+
self._create_tables_if_not_exist()
|
|
75
|
+
|
|
76
|
+
def _init_tables(self):
|
|
77
|
+
self._init_model_endpoints_table()
|
|
78
|
+
self._init_application_results_table()
|
|
79
|
+
self._init_application_metrics_table()
|
|
80
|
+
self._init_monitoring_schedules_table()
|
|
81
|
+
|
|
82
|
+
def _init_model_endpoints_table(self):
|
|
83
|
+
self.model_endpoints_table = (
|
|
84
|
+
mlrun.model_monitoring.db.stores.sqldb.models._get_model_endpoints_table(
|
|
85
|
+
connection_string=self._sql_connection_string
|
|
86
|
+
)
|
|
87
|
+
)
|
|
88
|
+
self._tables[mm_schemas.EventFieldType.MODEL_ENDPOINTS] = (
|
|
89
|
+
self.model_endpoints_table
|
|
90
|
+
)
|
|
91
|
+
|
|
92
|
+
def _init_application_results_table(self):
|
|
93
|
+
self.application_results_table = (
|
|
94
|
+
mlrun.model_monitoring.db.stores.sqldb.models._get_application_result_table(
|
|
95
|
+
connection_string=self._sql_connection_string
|
|
96
|
+
)
|
|
97
|
+
)
|
|
98
|
+
self._tables[mm_schemas.FileTargetKind.APP_RESULTS] = (
|
|
99
|
+
self.application_results_table
|
|
100
|
+
)
|
|
101
|
+
|
|
102
|
+
def _init_application_metrics_table(self) -> None:
|
|
103
|
+
self.application_metrics_table = mlrun.model_monitoring.db.stores.sqldb.models._get_application_metrics_table(
|
|
104
|
+
connection_string=self._sql_connection_string
|
|
105
|
+
)
|
|
106
|
+
self._tables[mm_schemas.FileTargetKind.APP_METRICS] = (
|
|
107
|
+
self.application_metrics_table
|
|
108
|
+
)
|
|
109
|
+
|
|
110
|
+
def _init_monitoring_schedules_table(self):
|
|
111
|
+
self.MonitoringSchedulesTable = mlrun.model_monitoring.db.stores.sqldb.models._get_monitoring_schedules_table(
|
|
112
|
+
connection_string=self._sql_connection_string
|
|
113
|
+
)
|
|
114
|
+
self._tables[mm_schemas.FileTargetKind.MONITORING_SCHEDULES] = (
|
|
115
|
+
self.MonitoringSchedulesTable
|
|
116
|
+
)
|
|
117
|
+
|
|
118
|
+
def _write(self, table_name: str, event: dict[str, typing.Any]) -> None:
|
|
119
|
+
"""
|
|
120
|
+
Create a new record in the SQL table.
|
|
121
|
+
|
|
122
|
+
:param table_name: Target table name.
|
|
123
|
+
:param event: Event dictionary that will be written into the DB.
|
|
124
|
+
"""
|
|
125
|
+
with self.engine.connect() as connection:
|
|
126
|
+
# Convert the result into a pandas Dataframe and write it into the database
|
|
127
|
+
event_df = pd.DataFrame([event])
|
|
128
|
+
event_df.to_sql(table_name, con=connection, index=False, if_exists="append")
|
|
129
|
+
|
|
130
|
+
def _update(
|
|
131
|
+
self,
|
|
132
|
+
attributes: dict[str, typing.Any],
|
|
133
|
+
table: sqlalchemy.orm.decl_api.DeclarativeMeta,
|
|
134
|
+
criteria: list[BinaryExpression],
|
|
135
|
+
) -> None:
|
|
136
|
+
"""
|
|
137
|
+
Update a record in the SQL table.
|
|
138
|
+
|
|
139
|
+
:param attributes: Dictionary of attributes that will be used for update the record. Note that the keys
|
|
140
|
+
of the attributes dictionary should exist in the SQL table.
|
|
141
|
+
:param table: SQLAlchemy declarative table.
|
|
142
|
+
:param criteria: A list of binary expressions that filter the query.
|
|
143
|
+
"""
|
|
144
|
+
with create_session(dsn=self._sql_connection_string) as session:
|
|
145
|
+
# Generate and commit the update session query
|
|
146
|
+
session.query(
|
|
147
|
+
table # pyright: ignore[reportOptionalCall]
|
|
148
|
+
).filter(*criteria).update(attributes, synchronize_session=False)
|
|
149
|
+
session.commit()
|
|
150
|
+
|
|
151
|
+
def _get(
|
|
152
|
+
self,
|
|
153
|
+
table: sqlalchemy.orm.decl_api.DeclarativeMeta,
|
|
154
|
+
criteria: list[BinaryExpression],
|
|
155
|
+
):
|
|
156
|
+
"""
|
|
157
|
+
Get a record from the SQL table.
|
|
158
|
+
|
|
159
|
+
param table: SQLAlchemy declarative table.
|
|
160
|
+
:param criteria: A list of binary expressions that filter the query.
|
|
161
|
+
"""
|
|
162
|
+
with create_session(dsn=self._sql_connection_string) as session:
|
|
163
|
+
logger.debug(
|
|
164
|
+
"Querying the DB",
|
|
165
|
+
table=table.__name__,
|
|
166
|
+
criteria=[str(criterion) for criterion in criteria],
|
|
167
|
+
)
|
|
168
|
+
# Generate the get query
|
|
169
|
+
return (
|
|
170
|
+
session.query(table) # pyright: ignore[reportOptionalCall]
|
|
171
|
+
.filter(*criteria)
|
|
172
|
+
.one_or_none()
|
|
173
|
+
)
|
|
174
|
+
|
|
175
|
+
def _delete(
|
|
176
|
+
self,
|
|
177
|
+
table: sqlalchemy.orm.decl_api.DeclarativeMeta,
|
|
178
|
+
criteria: list[BinaryExpression],
|
|
179
|
+
) -> None:
|
|
180
|
+
"""
|
|
181
|
+
Delete records from the SQL table.
|
|
182
|
+
|
|
183
|
+
param table: SQLAlchemy declarative table.
|
|
184
|
+
:param criteria: A list of binary expressions that filter the query.
|
|
185
|
+
"""
|
|
186
|
+
if not self.engine.has_table(table.__tablename__):
|
|
187
|
+
logger.debug(
|
|
188
|
+
f"Table {table.__tablename__} does not exist in the database. Skipping deletion."
|
|
189
|
+
)
|
|
190
|
+
return
|
|
191
|
+
with create_session(dsn=self._sql_connection_string) as session:
|
|
192
|
+
# Generate and commit the delete query
|
|
193
|
+
session.query(
|
|
194
|
+
table # pyright: ignore[reportOptionalCall]
|
|
195
|
+
).filter(*criteria).delete(synchronize_session=False)
|
|
196
|
+
session.commit()
|
|
197
|
+
|
|
198
|
+
def write_model_endpoint(self, endpoint: dict[str, typing.Any]):
|
|
199
|
+
"""
|
|
200
|
+
Create a new endpoint record in the SQL table. This method also creates the model endpoints table within the
|
|
201
|
+
SQL database if not exist.
|
|
202
|
+
|
|
203
|
+
:param endpoint: model endpoint dictionary that will be written into the DB.
|
|
204
|
+
"""
|
|
205
|
+
|
|
206
|
+
# Adjust timestamps fields
|
|
207
|
+
endpoint[mm_schemas.EventFieldType.FIRST_REQUEST] = (endpoint)[
|
|
208
|
+
mm_schemas.EventFieldType.LAST_REQUEST
|
|
209
|
+
] = datetime_now()
|
|
210
|
+
|
|
211
|
+
self._write(
|
|
212
|
+
table_name=mm_schemas.EventFieldType.MODEL_ENDPOINTS, event=endpoint
|
|
213
|
+
)
|
|
214
|
+
|
|
215
|
+
def update_model_endpoint(
|
|
216
|
+
self, endpoint_id: str, attributes: dict[str, typing.Any]
|
|
217
|
+
):
|
|
218
|
+
"""
|
|
219
|
+
Update a model endpoint record with a given attributes.
|
|
220
|
+
|
|
221
|
+
:param endpoint_id: The unique id of the model endpoint.
|
|
222
|
+
:param attributes: Dictionary of attributes that will be used for update the model endpoint. Note that the keys
|
|
223
|
+
of the attributes dictionary should exist in the SQL table.
|
|
224
|
+
|
|
225
|
+
"""
|
|
226
|
+
|
|
227
|
+
attributes.pop(mm_schemas.EventFieldType.ENDPOINT_ID, None)
|
|
228
|
+
|
|
229
|
+
self._update(
|
|
230
|
+
attributes=attributes,
|
|
231
|
+
table=self.model_endpoints_table,
|
|
232
|
+
criteria=[self.model_endpoints_table.uid == endpoint_id],
|
|
233
|
+
)
|
|
234
|
+
|
|
235
|
+
def delete_model_endpoint(self, endpoint_id: str) -> None:
|
|
236
|
+
"""
|
|
237
|
+
Deletes the SQL record of a given model endpoint id.
|
|
238
|
+
|
|
239
|
+
:param endpoint_id: The unique id of the model endpoint.
|
|
240
|
+
"""
|
|
241
|
+
# Delete the model endpoint record using sqlalchemy ORM
|
|
242
|
+
self._delete(
|
|
243
|
+
table=self.model_endpoints_table,
|
|
244
|
+
criteria=[self.model_endpoints_table.uid == endpoint_id],
|
|
245
|
+
)
|
|
246
|
+
|
|
247
|
+
def get_model_endpoint(
|
|
248
|
+
self,
|
|
249
|
+
endpoint_id: str,
|
|
250
|
+
) -> dict[str, typing.Any]:
|
|
251
|
+
"""
|
|
252
|
+
Get a single model endpoint record.
|
|
253
|
+
|
|
254
|
+
:param endpoint_id: The unique id of the model endpoint.
|
|
255
|
+
|
|
256
|
+
:return: A model endpoint record as a dictionary.
|
|
257
|
+
|
|
258
|
+
:raise MLRunNotFoundError: If the model endpoints table was not found or the model endpoint id was not found.
|
|
259
|
+
"""
|
|
260
|
+
|
|
261
|
+
# Get the model endpoint record
|
|
262
|
+
endpoint_record = self._get(
|
|
263
|
+
table=self.model_endpoints_table,
|
|
264
|
+
criteria=[self.model_endpoints_table.uid == endpoint_id],
|
|
265
|
+
)
|
|
266
|
+
|
|
267
|
+
if not endpoint_record:
|
|
268
|
+
raise mlrun.errors.MLRunNotFoundError(f"Endpoint {endpoint_id} not found")
|
|
269
|
+
|
|
270
|
+
# Convert the database values and the table columns into a python dictionary
|
|
271
|
+
return endpoint_record.to_dict()
|
|
272
|
+
|
|
273
|
+
def list_model_endpoints(
|
|
274
|
+
self,
|
|
275
|
+
model: str = None,
|
|
276
|
+
function: str = None,
|
|
277
|
+
labels: list[str] = None,
|
|
278
|
+
top_level: bool = None,
|
|
279
|
+
uids: list = None,
|
|
280
|
+
include_stats: bool = None,
|
|
281
|
+
) -> list[dict[str, typing.Any]]:
|
|
282
|
+
# Generate an empty model endpoints that will be filled afterwards with model endpoint dictionaries
|
|
283
|
+
endpoint_list = []
|
|
284
|
+
|
|
285
|
+
model_endpoints_table = (
|
|
286
|
+
self.model_endpoints_table.__table__ # pyright: ignore[reportAttributeAccessIssue]
|
|
287
|
+
)
|
|
288
|
+
# Get the model endpoints records using sqlalchemy ORM
|
|
289
|
+
with create_session(dsn=self._sql_connection_string) as session:
|
|
290
|
+
# Generate the list query
|
|
291
|
+
query = session.query(self.model_endpoints_table).filter_by(
|
|
292
|
+
project=self.project
|
|
293
|
+
)
|
|
294
|
+
|
|
295
|
+
# Apply filters
|
|
296
|
+
if model:
|
|
297
|
+
model = model if ":" in model else f"{model}:latest"
|
|
298
|
+
query = self._filter_values(
|
|
299
|
+
query=query,
|
|
300
|
+
model_endpoints_table=model_endpoints_table,
|
|
301
|
+
key_filter=mm_schemas.EventFieldType.MODEL,
|
|
302
|
+
filtered_values=[model],
|
|
303
|
+
)
|
|
304
|
+
if function:
|
|
305
|
+
function_uri = f"{self.project}/{function}"
|
|
306
|
+
query = self._filter_values(
|
|
307
|
+
query=query,
|
|
308
|
+
model_endpoints_table=model_endpoints_table,
|
|
309
|
+
key_filter=mm_schemas.EventFieldType.FUNCTION_URI,
|
|
310
|
+
filtered_values=[function_uri],
|
|
311
|
+
)
|
|
312
|
+
if uids:
|
|
313
|
+
query = self._filter_values(
|
|
314
|
+
query=query,
|
|
315
|
+
model_endpoints_table=model_endpoints_table,
|
|
316
|
+
key_filter=mm_schemas.EventFieldType.UID,
|
|
317
|
+
filtered_values=uids,
|
|
318
|
+
combined=False,
|
|
319
|
+
)
|
|
320
|
+
if top_level:
|
|
321
|
+
node_ep = str(mm_schemas.EndpointType.NODE_EP.value)
|
|
322
|
+
router_ep = str(mm_schemas.EndpointType.ROUTER.value)
|
|
323
|
+
endpoint_types = [node_ep, router_ep]
|
|
324
|
+
query = self._filter_values(
|
|
325
|
+
query=query,
|
|
326
|
+
model_endpoints_table=model_endpoints_table,
|
|
327
|
+
key_filter=mm_schemas.EventFieldType.ENDPOINT_TYPE,
|
|
328
|
+
filtered_values=endpoint_types,
|
|
329
|
+
combined=False,
|
|
330
|
+
)
|
|
331
|
+
# Convert the results from the DB into a ModelEndpoint object and append it to the model endpoints list
|
|
332
|
+
for endpoint_record in query.all():
|
|
333
|
+
endpoint_dict = endpoint_record.to_dict()
|
|
334
|
+
|
|
335
|
+
# Filter labels
|
|
336
|
+
if labels and not self._validate_labels(
|
|
337
|
+
endpoint_dict=endpoint_dict, labels=labels
|
|
338
|
+
):
|
|
339
|
+
continue
|
|
340
|
+
|
|
341
|
+
if not include_stats:
|
|
342
|
+
# Exclude these fields when listing model endpoints to avoid returning too much data (ML-6594)
|
|
343
|
+
# TODO: Remove stats from table schema (ML-7196)
|
|
344
|
+
endpoint_dict.pop(mm_schemas.EventFieldType.FEATURE_STATS)
|
|
345
|
+
endpoint_dict.pop(mm_schemas.EventFieldType.CURRENT_STATS)
|
|
346
|
+
|
|
347
|
+
endpoint_list.append(endpoint_dict)
|
|
348
|
+
|
|
349
|
+
return endpoint_list
|
|
350
|
+
|
|
351
|
+
def write_application_event(
|
|
352
|
+
self,
|
|
353
|
+
event: dict[str, typing.Any],
|
|
354
|
+
kind: mm_schemas.WriterEventKind = mm_schemas.WriterEventKind.RESULT,
|
|
355
|
+
) -> None:
|
|
356
|
+
"""
|
|
357
|
+
Write a new application event in the target table.
|
|
358
|
+
|
|
359
|
+
:param event: An event dictionary that represents the application result or metric,
|
|
360
|
+
should be corresponded to the schema defined in the
|
|
361
|
+
:py:class:`~mm_constants.constants.WriterEvent` object.
|
|
362
|
+
:param kind: The type of the event, can be either "result" or "metric".
|
|
363
|
+
"""
|
|
364
|
+
|
|
365
|
+
if kind == mm_schemas.WriterEventKind.METRIC:
|
|
366
|
+
table = self.application_metrics_table
|
|
367
|
+
table_name = mm_schemas.FileTargetKind.APP_METRICS
|
|
368
|
+
elif kind == mm_schemas.WriterEventKind.RESULT:
|
|
369
|
+
table = self.application_results_table
|
|
370
|
+
table_name = mm_schemas.FileTargetKind.APP_RESULTS
|
|
371
|
+
else:
|
|
372
|
+
raise ValueError(f"Invalid {kind = }")
|
|
373
|
+
|
|
374
|
+
application_result_uid = self._generate_application_result_uid(event, kind=kind)
|
|
375
|
+
criteria = [table.uid == application_result_uid]
|
|
376
|
+
|
|
377
|
+
application_record = self._get(table=table, criteria=criteria)
|
|
378
|
+
if application_record:
|
|
379
|
+
self._convert_to_datetime(
|
|
380
|
+
event=event, key=mm_schemas.WriterEvent.START_INFER_TIME
|
|
381
|
+
)
|
|
382
|
+
self._convert_to_datetime(
|
|
383
|
+
event=event, key=mm_schemas.WriterEvent.END_INFER_TIME
|
|
384
|
+
)
|
|
385
|
+
# Update an existing application result
|
|
386
|
+
self._update(attributes=event, table=table, criteria=criteria)
|
|
387
|
+
else:
|
|
388
|
+
# Write a new application result
|
|
389
|
+
event[mm_schemas.EventFieldType.UID] = application_result_uid
|
|
390
|
+
self._write(table_name=table_name, event=event)
|
|
391
|
+
|
|
392
|
+
@staticmethod
|
|
393
|
+
def _convert_to_datetime(event: dict[str, typing.Any], key: str) -> None:
|
|
394
|
+
if isinstance(event[key], str):
|
|
395
|
+
event[key] = datetime.datetime.fromisoformat(event[key])
|
|
396
|
+
event[key] = event[key].astimezone(tz=datetime.timezone.utc)
|
|
397
|
+
|
|
398
|
+
@staticmethod
|
|
399
|
+
def _generate_application_result_uid(
|
|
400
|
+
event: dict[str, typing.Any],
|
|
401
|
+
kind: mm_schemas.WriterEventKind = mm_schemas.WriterEventKind.RESULT,
|
|
402
|
+
) -> str:
|
|
403
|
+
if kind == mm_schemas.WriterEventKind.RESULT:
|
|
404
|
+
name = event[mm_schemas.ResultData.RESULT_NAME]
|
|
405
|
+
else:
|
|
406
|
+
name = event[mm_schemas.MetricData.METRIC_NAME]
|
|
407
|
+
return "_".join(
|
|
408
|
+
[
|
|
409
|
+
event[mm_schemas.WriterEvent.ENDPOINT_ID],
|
|
410
|
+
event[mm_schemas.WriterEvent.APPLICATION_NAME],
|
|
411
|
+
name,
|
|
412
|
+
]
|
|
413
|
+
)
|
|
414
|
+
|
|
415
|
+
@staticmethod
|
|
416
|
+
def _get_filter_criteria(
|
|
417
|
+
*,
|
|
418
|
+
table: sqlalchemy.orm.decl_api.DeclarativeMeta,
|
|
419
|
+
endpoint_id: str,
|
|
420
|
+
application_name: typing.Optional[str] = None,
|
|
421
|
+
) -> list[BinaryExpression]:
|
|
422
|
+
"""
|
|
423
|
+
Return the filter criteria for the given endpoint_id and application_name.
|
|
424
|
+
Note: the table object must include the relevant columns:
|
|
425
|
+
`endpoint_id` and `application_name`.
|
|
426
|
+
"""
|
|
427
|
+
criteria = [table.endpoint_id == endpoint_id]
|
|
428
|
+
if application_name is not None:
|
|
429
|
+
criteria.append(table.application_name == application_name)
|
|
430
|
+
return criteria
|
|
431
|
+
|
|
432
|
+
def get_last_analyzed(self, endpoint_id: str, application_name: str) -> int:
|
|
433
|
+
"""
|
|
434
|
+
Get the last analyzed time for the provided model endpoint and application.
|
|
435
|
+
|
|
436
|
+
:param endpoint_id: The unique id of the model endpoint.
|
|
437
|
+
:param application_name: Registered application name.
|
|
438
|
+
|
|
439
|
+
:return: Timestamp as a Unix time.
|
|
440
|
+
:raise: MLRunNotFoundError if last analyzed value is not found.
|
|
441
|
+
"""
|
|
442
|
+
monitoring_schedule_record = self._get(
|
|
443
|
+
table=self.MonitoringSchedulesTable,
|
|
444
|
+
criteria=self._get_filter_criteria(
|
|
445
|
+
table=self.MonitoringSchedulesTable,
|
|
446
|
+
endpoint_id=endpoint_id,
|
|
447
|
+
application_name=application_name,
|
|
448
|
+
),
|
|
449
|
+
)
|
|
450
|
+
if not monitoring_schedule_record:
|
|
451
|
+
raise mlrun.errors.MLRunNotFoundError(
|
|
452
|
+
f"No last analyzed value has been found for {application_name} "
|
|
453
|
+
f"that processes model endpoint {endpoint_id}"
|
|
454
|
+
)
|
|
455
|
+
return monitoring_schedule_record.last_analyzed
|
|
456
|
+
|
|
457
|
+
def update_last_analyzed(
|
|
458
|
+
self, endpoint_id: str, application_name: str, last_analyzed: int
|
|
459
|
+
):
|
|
460
|
+
"""
|
|
461
|
+
Update the last analyzed time for the provided model endpoint and application.
|
|
462
|
+
|
|
463
|
+
:param endpoint_id: The unique id of the model endpoint.
|
|
464
|
+
:param application_name: Registered application name.
|
|
465
|
+
:param last_analyzed: Timestamp as a Unix time that represents the last analyzed time of a certain
|
|
466
|
+
application and model endpoint.
|
|
467
|
+
"""
|
|
468
|
+
criteria = self._get_filter_criteria(
|
|
469
|
+
table=self.MonitoringSchedulesTable,
|
|
470
|
+
endpoint_id=endpoint_id,
|
|
471
|
+
application_name=application_name,
|
|
472
|
+
)
|
|
473
|
+
monitoring_schedule_record = self._get(
|
|
474
|
+
table=self.MonitoringSchedulesTable, criteria=criteria
|
|
475
|
+
)
|
|
476
|
+
if not monitoring_schedule_record:
|
|
477
|
+
# Add a new record with last analyzed value
|
|
478
|
+
self._write(
|
|
479
|
+
table_name=mm_schemas.FileTargetKind.MONITORING_SCHEDULES,
|
|
480
|
+
event={
|
|
481
|
+
mm_schemas.SchedulingKeys.UID: uuid.uuid4().hex,
|
|
482
|
+
mm_schemas.SchedulingKeys.APPLICATION_NAME: application_name,
|
|
483
|
+
mm_schemas.SchedulingKeys.ENDPOINT_ID: endpoint_id,
|
|
484
|
+
mm_schemas.SchedulingKeys.LAST_ANALYZED: last_analyzed,
|
|
485
|
+
},
|
|
486
|
+
)
|
|
487
|
+
|
|
488
|
+
self._update(
|
|
489
|
+
attributes={mm_schemas.SchedulingKeys.LAST_ANALYZED: last_analyzed},
|
|
490
|
+
table=self.MonitoringSchedulesTable,
|
|
491
|
+
criteria=criteria,
|
|
492
|
+
)
|
|
493
|
+
|
|
494
|
+
def _delete_last_analyzed(
|
|
495
|
+
self, endpoint_id: str, application_name: typing.Optional[str] = None
|
|
496
|
+
) -> None:
|
|
497
|
+
criteria = self._get_filter_criteria(
|
|
498
|
+
table=self.MonitoringSchedulesTable,
|
|
499
|
+
endpoint_id=endpoint_id,
|
|
500
|
+
application_name=application_name,
|
|
501
|
+
)
|
|
502
|
+
# Delete the model endpoint record using sqlalchemy ORM
|
|
503
|
+
self._delete(table=self.MonitoringSchedulesTable, criteria=criteria)
|
|
504
|
+
|
|
505
|
+
def _delete_application_result(
|
|
506
|
+
self, endpoint_id: str, application_name: typing.Optional[str] = None
|
|
507
|
+
) -> None:
|
|
508
|
+
criteria = self._get_filter_criteria(
|
|
509
|
+
table=self.application_results_table,
|
|
510
|
+
endpoint_id=endpoint_id,
|
|
511
|
+
application_name=application_name,
|
|
512
|
+
)
|
|
513
|
+
# Delete the relevant records from the results table
|
|
514
|
+
self._delete(table=self.application_results_table, criteria=criteria)
|
|
515
|
+
|
|
516
|
+
def _delete_application_metrics(
|
|
517
|
+
self, endpoint_id: str, application_name: typing.Optional[str] = None
|
|
518
|
+
) -> None:
|
|
519
|
+
criteria = self._get_filter_criteria(
|
|
520
|
+
table=self.application_metrics_table,
|
|
521
|
+
endpoint_id=endpoint_id,
|
|
522
|
+
application_name=application_name,
|
|
523
|
+
)
|
|
524
|
+
# Delete the relevant records from the metrics table
|
|
525
|
+
self._delete(table=self.application_metrics_table, criteria=criteria)
|
|
526
|
+
|
|
527
|
+
def _create_tables_if_not_exist(self):
|
|
528
|
+
self._init_tables()
|
|
529
|
+
|
|
530
|
+
for table in self._tables:
|
|
531
|
+
# Create table if not exist. The `metadata` contains the `ModelEndpointsTable`
|
|
532
|
+
db_name = make_url(self._sql_connection_string).database
|
|
533
|
+
if not self.engine.has_table(table):
|
|
534
|
+
logger.info(f"Creating table {table} on {db_name} db.")
|
|
535
|
+
self._tables[table].metadata.create_all(bind=self.engine)
|
|
536
|
+
else:
|
|
537
|
+
logger.info(f"Table {table} already exists on {db_name} db.")
|
|
538
|
+
|
|
539
|
+
@staticmethod
|
|
540
|
+
def _filter_values(
|
|
541
|
+
query: sqlalchemy.orm.query.Query,
|
|
542
|
+
model_endpoints_table: sqlalchemy.Table,
|
|
543
|
+
key_filter: str,
|
|
544
|
+
filtered_values: list,
|
|
545
|
+
combined=True,
|
|
546
|
+
) -> sqlalchemy.orm.query.Query:
|
|
547
|
+
"""Filtering the SQL query object according to the provided filters.
|
|
548
|
+
|
|
549
|
+
:param query: SQLAlchemy ORM query object. Includes the SELECT statements generated by the ORM
|
|
550
|
+
for getting the model endpoint data from the SQL table.
|
|
551
|
+
:param model_endpoints_table: SQLAlchemy table object that represents the model endpoints table.
|
|
552
|
+
:param key_filter: Key column to filter by.
|
|
553
|
+
:param filtered_values: List of values to filter the query the result.
|
|
554
|
+
:param combined: If true, then apply AND operator on the filtered values list. Otherwise, apply OR
|
|
555
|
+
operator.
|
|
556
|
+
|
|
557
|
+
return: SQLAlchemy ORM query object that represents the updated query with the provided
|
|
558
|
+
filters.
|
|
559
|
+
"""
|
|
560
|
+
|
|
561
|
+
if combined and len(filtered_values) > 1:
|
|
562
|
+
raise mlrun.errors.MLRunInvalidArgumentError(
|
|
563
|
+
"Can't apply combined policy with multiple values"
|
|
564
|
+
)
|
|
565
|
+
|
|
566
|
+
if not combined:
|
|
567
|
+
return query.filter(
|
|
568
|
+
model_endpoints_table.c[key_filter].in_(filtered_values)
|
|
569
|
+
)
|
|
570
|
+
|
|
571
|
+
# Generating a tuple with the relevant filters
|
|
572
|
+
filter_query = []
|
|
573
|
+
for _filter in filtered_values:
|
|
574
|
+
filter_query.append(model_endpoints_table.c[key_filter] == _filter)
|
|
575
|
+
|
|
576
|
+
# Apply AND operator on the SQL query object with the filters tuple
|
|
577
|
+
return query.filter(sqlalchemy.and_(*filter_query))
|
|
578
|
+
|
|
579
|
+
def delete_model_endpoints_resources(self) -> None:
|
|
580
|
+
"""
|
|
581
|
+
Delete all the model monitoring resources of the project in the SQL tables.
|
|
582
|
+
"""
|
|
583
|
+
logger.debug(
|
|
584
|
+
"Deleting model monitoring endpoints resources from the SQL tables",
|
|
585
|
+
project=self.project,
|
|
586
|
+
)
|
|
587
|
+
endpoints = self.list_model_endpoints()
|
|
588
|
+
|
|
589
|
+
for endpoint_dict in endpoints:
|
|
590
|
+
endpoint_id = endpoint_dict[mm_schemas.EventFieldType.UID]
|
|
591
|
+
logger.debug(
|
|
592
|
+
"Deleting model endpoint resources from the SQL tables",
|
|
593
|
+
endpoint_id=endpoint_id,
|
|
594
|
+
project=self.project,
|
|
595
|
+
)
|
|
596
|
+
# Delete last analyzed records
|
|
597
|
+
self._delete_last_analyzed(endpoint_id=endpoint_id)
|
|
598
|
+
|
|
599
|
+
# Delete application results and metrics records
|
|
600
|
+
self._delete_application_result(endpoint_id=endpoint_id)
|
|
601
|
+
self._delete_application_metrics(endpoint_id=endpoint_id)
|
|
602
|
+
|
|
603
|
+
# Delete model endpoint record
|
|
604
|
+
self.delete_model_endpoint(endpoint_id=endpoint_id)
|
|
605
|
+
logger.debug(
|
|
606
|
+
"Successfully deleted model endpoint resources",
|
|
607
|
+
endpoint_id=endpoint_id,
|
|
608
|
+
project=self.project,
|
|
609
|
+
)
|
|
610
|
+
|
|
611
|
+
logger.debug(
|
|
612
|
+
"Successfully deleted model monitoring endpoints resources from the SQL tables",
|
|
613
|
+
project=self.project,
|
|
614
|
+
)
|
|
615
|
+
|
|
616
|
+
def get_model_endpoint_metrics(
|
|
617
|
+
self, endpoint_id: str, type: mm_schemas.ModelEndpointMonitoringMetricType
|
|
618
|
+
) -> list[mm_schemas.ModelEndpointMonitoringMetric]:
|
|
619
|
+
"""
|
|
620
|
+
Fetch the model endpoint metrics or results (according to `type`) for the
|
|
621
|
+
requested endpoint.
|
|
622
|
+
"""
|
|
623
|
+
logger.debug(
|
|
624
|
+
"Fetching metrics for model endpoint",
|
|
625
|
+
project=self.project,
|
|
626
|
+
endpoint_id=endpoint_id,
|
|
627
|
+
type=type,
|
|
628
|
+
)
|
|
629
|
+
if type == mm_schemas.ModelEndpointMonitoringMetricType.METRIC:
|
|
630
|
+
table = self.application_metrics_table
|
|
631
|
+
name_col = mm_schemas.MetricData.METRIC_NAME
|
|
632
|
+
else:
|
|
633
|
+
table = self.application_results_table
|
|
634
|
+
name_col = mm_schemas.ResultData.RESULT_NAME
|
|
635
|
+
|
|
636
|
+
# Note: the block below does not use self._get, as we need here all the
|
|
637
|
+
# results, not only `one_or_none`.
|
|
638
|
+
with sqlalchemy.orm.Session(self.engine) as session:
|
|
639
|
+
metric_rows = (
|
|
640
|
+
session.query(table) # pyright: ignore[reportOptionalCall]
|
|
641
|
+
.filter(table.endpoint_id == endpoint_id)
|
|
642
|
+
.all()
|
|
643
|
+
)
|
|
644
|
+
|
|
645
|
+
return [
|
|
646
|
+
mm_schemas.ModelEndpointMonitoringMetric(
|
|
647
|
+
project=self.project,
|
|
648
|
+
app=metric_row.application_name,
|
|
649
|
+
type=type,
|
|
650
|
+
name=getattr(metric_row, name_col),
|
|
651
|
+
full_name=mlrun.model_monitoring.helpers._compose_full_name(
|
|
652
|
+
project=self.project,
|
|
653
|
+
app=metric_row.application_name,
|
|
654
|
+
type=type,
|
|
655
|
+
name=getattr(metric_row, name_col),
|
|
656
|
+
),
|
|
657
|
+
)
|
|
658
|
+
for metric_row in metric_rows
|
|
659
|
+
]
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
# Copyright 2024 Iguazio
|
|
2
|
+
#
|
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
+
# you may not use this file except in compliance with the License.
|
|
5
|
+
# You may obtain a copy of the License at
|
|
6
|
+
#
|
|
7
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
+
#
|
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
+
# See the License for the specific language governing permissions and
|
|
13
|
+
# limitations under the License.
|