mlrun 1.7.0rc18__py3-none-any.whl → 1.7.0rc19__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mlrun might be problematic. Click here for more details.

Files changed (63) hide show
  1. mlrun/__main__.py +5 -2
  2. mlrun/common/constants.py +64 -3
  3. mlrun/common/formatters/__init__.py +16 -0
  4. mlrun/common/formatters/base.py +59 -0
  5. mlrun/common/formatters/function.py +41 -0
  6. mlrun/common/runtimes/constants.py +29 -4
  7. mlrun/common/schemas/__init__.py +0 -1
  8. mlrun/common/schemas/api_gateway.py +52 -0
  9. mlrun/common/schemas/frontend_spec.py +1 -0
  10. mlrun/common/schemas/model_monitoring/__init__.py +6 -3
  11. mlrun/common/schemas/model_monitoring/constants.py +2 -7
  12. mlrun/config.py +7 -2
  13. mlrun/datastore/sources.py +16 -22
  14. mlrun/datastore/store_resources.py +5 -1
  15. mlrun/datastore/targets.py +3 -2
  16. mlrun/datastore/utils.py +42 -0
  17. mlrun/execution.py +16 -6
  18. mlrun/feature_store/ingestion.py +7 -6
  19. mlrun/feature_store/retrieval/job.py +4 -1
  20. mlrun/frameworks/parallel_coordinates.py +2 -1
  21. mlrun/frameworks/tf_keras/__init__.py +4 -1
  22. mlrun/launcher/client.py +4 -2
  23. mlrun/launcher/local.py +8 -2
  24. mlrun/launcher/remote.py +8 -2
  25. mlrun/model.py +5 -1
  26. mlrun/model_monitoring/db/stores/__init__.py +0 -2
  27. mlrun/model_monitoring/db/stores/base/store.py +1 -2
  28. mlrun/model_monitoring/db/stores/sqldb/models/__init__.py +43 -21
  29. mlrun/model_monitoring/db/stores/sqldb/models/base.py +32 -2
  30. mlrun/model_monitoring/db/stores/sqldb/models/mysql.py +25 -5
  31. mlrun/model_monitoring/db/stores/sqldb/models/sqlite.py +5 -0
  32. mlrun/model_monitoring/db/stores/sqldb/sql_store.py +207 -139
  33. mlrun/model_monitoring/db/tsdb/__init__.py +1 -1
  34. mlrun/model_monitoring/db/tsdb/base.py +225 -38
  35. mlrun/model_monitoring/db/tsdb/helpers.py +30 -0
  36. mlrun/model_monitoring/db/tsdb/tdengine/schemas.py +48 -15
  37. mlrun/model_monitoring/db/tsdb/tdengine/tdengine_connector.py +182 -16
  38. mlrun/model_monitoring/db/tsdb/v3io/v3io_connector.py +229 -42
  39. mlrun/model_monitoring/helpers.py +13 -0
  40. mlrun/model_monitoring/writer.py +36 -11
  41. mlrun/projects/operations.py +8 -5
  42. mlrun/projects/pipelines.py +42 -15
  43. mlrun/projects/project.py +22 -6
  44. mlrun/runtimes/base.py +2 -1
  45. mlrun/runtimes/local.py +4 -1
  46. mlrun/runtimes/nuclio/api_gateway.py +32 -8
  47. mlrun/runtimes/nuclio/application/application.py +3 -3
  48. mlrun/runtimes/nuclio/function.py +1 -4
  49. mlrun/runtimes/utils.py +5 -6
  50. mlrun/serving/server.py +2 -1
  51. mlrun/utils/helpers.py +8 -6
  52. mlrun/utils/logger.py +28 -1
  53. mlrun/utils/notifications/notification/__init__.py +14 -9
  54. mlrun/utils/notifications/notification_pusher.py +10 -3
  55. mlrun/utils/v3io_clients.py +0 -1
  56. mlrun/utils/version/version.json +2 -2
  57. {mlrun-1.7.0rc18.dist-info → mlrun-1.7.0rc19.dist-info}/METADATA +3 -3
  58. {mlrun-1.7.0rc18.dist-info → mlrun-1.7.0rc19.dist-info}/RECORD +62 -59
  59. mlrun/model_monitoring/db/v3io_tsdb_reader.py +0 -335
  60. {mlrun-1.7.0rc18.dist-info → mlrun-1.7.0rc19.dist-info}/LICENSE +0 -0
  61. {mlrun-1.7.0rc18.dist-info → mlrun-1.7.0rc19.dist-info}/WHEEL +0 -0
  62. {mlrun-1.7.0rc18.dist-info → mlrun-1.7.0rc19.dist-info}/entry_points.txt +0 -0
  63. {mlrun-1.7.0rc18.dist-info → mlrun-1.7.0rc19.dist-info}/top_level.txt +0 -0
@@ -19,6 +19,9 @@ import uuid
19
19
 
20
20
  import pandas as pd
21
21
  import sqlalchemy
22
+ import sqlalchemy.exc
23
+ import sqlalchemy.orm
24
+ from sqlalchemy.sql.elements import BinaryExpression
22
25
 
23
26
  import mlrun.common.model_monitoring.helpers
24
27
  import mlrun.common.schemas.model_monitoring as mm_schemas
@@ -26,6 +29,7 @@ import mlrun.model_monitoring.db
26
29
  import mlrun.model_monitoring.db.stores.sqldb.models
27
30
  import mlrun.model_monitoring.helpers
28
31
  from mlrun.common.db.sql_session import create_session, get_engine
32
+ from mlrun.utils import datetime_now, logger
29
33
 
30
34
 
31
35
  class SQLStoreBase(mlrun.model_monitoring.db.StoreBase):
@@ -35,7 +39,6 @@ class SQLStoreBase(mlrun.model_monitoring.db.StoreBase):
35
39
  data, the user needs to provide a valid connection string for the database.
36
40
  """
37
41
 
38
- _engine = None
39
42
  _tables = {}
40
43
 
41
44
  def __init__(
@@ -63,6 +66,7 @@ class SQLStoreBase(mlrun.model_monitoring.db.StoreBase):
63
66
  def _init_tables(self):
64
67
  self._init_model_endpoints_table()
65
68
  self._init_application_results_table()
69
+ self._init_application_metrics_table()
66
70
  self._init_monitoring_schedules_table()
67
71
 
68
72
  def _init_model_endpoints_table(self):
@@ -76,13 +80,21 @@ class SQLStoreBase(mlrun.model_monitoring.db.StoreBase):
76
80
  )
77
81
 
78
82
  def _init_application_results_table(self):
79
- self.ApplicationResultsTable = (
83
+ self.application_results_table = (
80
84
  mlrun.model_monitoring.db.stores.sqldb.models._get_application_result_table(
81
85
  connection_string=self._sql_connection_string
82
86
  )
83
87
  )
84
88
  self._tables[mm_schemas.FileTargetKind.APP_RESULTS] = (
85
- self.ApplicationResultsTable
89
+ self.application_results_table
90
+ )
91
+
92
+ def _init_application_metrics_table(self) -> None:
93
+ self.application_metrics_table = mlrun.model_monitoring.db.stores.sqldb.models._get_application_metrics_table(
94
+ connection_string=self._sql_connection_string
95
+ )
96
+ self._tables[mm_schemas.FileTargetKind.APP_METRICS] = (
97
+ self.application_metrics_table
86
98
  )
87
99
 
88
100
  def _init_monitoring_schedules_table(self):
@@ -93,61 +105,61 @@ class SQLStoreBase(mlrun.model_monitoring.db.StoreBase):
93
105
  self.MonitoringSchedulesTable
94
106
  )
95
107
 
96
- def _write(self, table: str, event: dict[str, typing.Any]):
108
+ def _write(self, table_name: str, event: dict[str, typing.Any]) -> None:
97
109
  """
98
110
  Create a new record in the SQL table.
99
111
 
100
- :param table: Target table name.
101
- :param event: Event dictionary that will be written into the DB.
112
+ :param table_name: Target table name.
113
+ :param event: Event dictionary that will be written into the DB.
102
114
  """
103
-
104
115
  with self._engine.connect() as connection:
105
116
  # Convert the result into a pandas Dataframe and write it into the database
106
117
  event_df = pd.DataFrame([event])
107
-
108
- event_df.to_sql(table, con=connection, index=False, if_exists="append")
118
+ event_df.to_sql(table_name, con=connection, index=False, if_exists="append")
109
119
 
110
120
  def _update(
111
121
  self,
112
122
  attributes: dict[str, typing.Any],
113
123
  table: sqlalchemy.orm.decl_api.DeclarativeMeta,
114
- **filtered_values,
115
- ):
124
+ criteria: list[BinaryExpression],
125
+ ) -> None:
116
126
  """
117
127
  Update a record in the SQL table.
118
128
 
119
129
  :param attributes: Dictionary of attributes that will be used for update the record. Note that the keys
120
130
  of the attributes dictionary should exist in the SQL table.
121
131
  :param table: SQLAlchemy declarative table.
122
-
132
+ :param criteria: A list of binary expressions that filter the query.
123
133
  """
124
- filter_query_ = []
125
- for _filter in filtered_values:
126
- filter_query_.append(f"{_filter} = '{filtered_values[_filter]}'")
127
-
128
134
  with create_session(dsn=self._sql_connection_string) as session:
129
135
  # Generate and commit the update session query
130
- session.query(table).filter(sqlalchemy.sql.text(*filter_query_)).update(
131
- attributes, synchronize_session=False
132
- )
136
+ session.query(
137
+ table # pyright: ignore[reportOptionalCall]
138
+ ).filter(*criteria).update(attributes, synchronize_session=False)
133
139
  session.commit()
134
140
 
135
- def _get(self, table: sqlalchemy.orm.decl_api.DeclarativeMeta, **filtered_values):
141
+ def _get(
142
+ self,
143
+ table: sqlalchemy.orm.decl_api.DeclarativeMeta,
144
+ criteria: list[BinaryExpression],
145
+ ):
136
146
  """
137
147
  Get a record from the SQL table.
138
148
 
139
- param table: SQLAlchemy declarative table.
149
+ param table: SQLAlchemy declarative table.
150
+ :param criteria: A list of binary expressions that filter the query.
140
151
  """
141
-
142
- filter_query_ = []
143
- for _filter in filtered_values:
144
- filter_query_.append(f"{_filter} = '{filtered_values[_filter]}'")
145
152
  with create_session(dsn=self._sql_connection_string) as session:
146
153
  try:
154
+ logger.debug(
155
+ "Querying the DB",
156
+ table=table.__name__,
157
+ criteria=[str(criterion) for criterion in criteria],
158
+ )
147
159
  # Generate the get query
148
160
  return (
149
- session.query(table)
150
- .filter(sqlalchemy.sql.text(*filter_query_))
161
+ session.query(table) # pyright: ignore[reportOptionalCall]
162
+ .filter(*criteria)
151
163
  .one_or_none()
152
164
  )
153
165
  except sqlalchemy.exc.ProgrammingError:
@@ -156,21 +168,21 @@ class SQLStoreBase(mlrun.model_monitoring.db.StoreBase):
156
168
  return
157
169
 
158
170
  def _delete(
159
- self, table: sqlalchemy.orm.decl_api.DeclarativeMeta, **filtered_values
160
- ):
171
+ self,
172
+ table: sqlalchemy.orm.decl_api.DeclarativeMeta,
173
+ criteria: list[BinaryExpression],
174
+ ) -> None:
161
175
  """
162
176
  Delete records from the SQL table.
163
177
 
164
- param table: SQLAlchemy declarative table.
178
+ param table: SQLAlchemy declarative table.
179
+ :param criteria: A list of binary expressions that filter the query.
165
180
  """
166
- filter_query_ = []
167
- for _filter in filtered_values:
168
- filter_query_.append(f"{_filter} = '{filtered_values[_filter]}'")
169
181
  with create_session(dsn=self._sql_connection_string) as session:
170
182
  # Generate and commit the delete query
171
- session.query(table).filter(sqlalchemy.sql.text(*filter_query_)).delete(
172
- synchronize_session=False
173
- )
183
+ session.query(
184
+ table # pyright: ignore[reportOptionalCall]
185
+ ).filter(*criteria).delete(synchronize_session=False)
174
186
  session.commit()
175
187
 
176
188
  def write_model_endpoint(self, endpoint: dict[str, typing.Any]):
@@ -184,11 +196,10 @@ class SQLStoreBase(mlrun.model_monitoring.db.StoreBase):
184
196
  # Adjust timestamps fields
185
197
  endpoint[mm_schemas.EventFieldType.FIRST_REQUEST] = (endpoint)[
186
198
  mm_schemas.EventFieldType.LAST_REQUEST
187
- ] = mlrun.utils.datetime_now()
199
+ ] = datetime_now()
188
200
 
189
201
  self._write(
190
- table=mm_schemas.EventFieldType.MODEL_ENDPOINTS,
191
- event=endpoint,
202
+ table_name=mm_schemas.EventFieldType.MODEL_ENDPOINTS, event=endpoint
192
203
  )
193
204
 
194
205
  def update_model_endpoint(
@@ -206,23 +217,24 @@ class SQLStoreBase(mlrun.model_monitoring.db.StoreBase):
206
217
 
207
218
  attributes.pop(mm_schemas.EventFieldType.ENDPOINT_ID, None)
208
219
 
209
- filter_endpoint = {mm_schemas.EventFieldType.UID: endpoint_id}
210
-
211
220
  self._update(
212
- attributes=attributes, table=self.ModelEndpointsTable, **filter_endpoint
221
+ attributes=attributes,
222
+ table=self.ModelEndpointsTable,
223
+ criteria=[self.ModelEndpointsTable.uid == endpoint_id],
213
224
  )
214
225
 
215
- def delete_model_endpoint(self, endpoint_id: str):
226
+ def delete_model_endpoint(self, endpoint_id: str) -> None:
216
227
  """
217
228
  Deletes the SQL record of a given model endpoint id.
218
229
 
219
230
  :param endpoint_id: The unique id of the model endpoint.
220
231
  """
221
232
  self._init_model_endpoints_table()
222
-
223
- filter_endpoint = {mm_schemas.EventFieldType.UID: endpoint_id}
224
233
  # Delete the model endpoint record using sqlalchemy ORM
225
- self._delete(table=self.ModelEndpointsTable, **filter_endpoint)
234
+ self._delete(
235
+ table=self.ModelEndpointsTable,
236
+ criteria=[self.ModelEndpointsTable.uid == endpoint_id],
237
+ )
226
238
 
227
239
  def get_model_endpoint(
228
240
  self,
@@ -239,9 +251,11 @@ class SQLStoreBase(mlrun.model_monitoring.db.StoreBase):
239
251
  """
240
252
  self._init_model_endpoints_table()
241
253
 
242
- # Get the model endpoint record using sqlalchemy ORM
243
- filter_endpoint = {mm_schemas.EventFieldType.UID: endpoint_id}
244
- endpoint_record = self._get(table=self.ModelEndpointsTable, **filter_endpoint)
254
+ # Get the model endpoint record
255
+ endpoint_record = self._get(
256
+ table=self.ModelEndpointsTable,
257
+ criteria=[self.ModelEndpointsTable.uid == endpoint_id],
258
+ )
245
259
 
246
260
  if not endpoint_record:
247
261
  raise mlrun.errors.MLRunNotFoundError(f"Endpoint {endpoint_id} not found")
@@ -277,7 +291,7 @@ class SQLStoreBase(mlrun.model_monitoring.db.StoreBase):
277
291
  endpoint_list = []
278
292
 
279
293
  model_endpoints_table = (
280
- self.ModelEndpointsTable.__table__ # pyright: ignore[reportGeneralTypeIssues]
294
+ self.ModelEndpointsTable.__table__ # pyright: ignore[reportAttributeAccessIssue]
281
295
  )
282
296
 
283
297
  # Get the model endpoints records using sqlalchemy ORM
@@ -343,66 +357,81 @@ class SQLStoreBase(mlrun.model_monitoring.db.StoreBase):
343
357
  """
344
358
  Write a new application event in the target table.
345
359
 
346
- :param event: An event dictionary that represents the application result, should be corresponded to the
347
- schema defined in the :py:class:`~mm_constants.constants.WriterEvent`
348
- object.
360
+ :param event: An event dictionary that represents the application result or metric,
361
+ should be corresponded to the schema defined in the
362
+ :py:class:`~mm_constants.constants.WriterEvent` object.
349
363
  :param kind: The type of the event, can be either "result" or "metric".
350
364
  """
351
365
 
352
366
  if kind == mm_schemas.WriterEventKind.METRIC:
353
- # TODO : Implement the logic for writing metrics to MySQL
354
- return
355
-
356
- self._init_application_results_table()
367
+ self._init_application_metrics_table()
368
+ table = self.application_metrics_table
369
+ table_name = mm_schemas.FileTargetKind.APP_METRICS
370
+ elif kind == mm_schemas.WriterEventKind.RESULT:
371
+ self._init_application_results_table()
372
+ table = self.application_results_table
373
+ table_name = mm_schemas.FileTargetKind.APP_RESULTS
374
+ else:
375
+ raise ValueError(f"Invalid {kind = }")
357
376
 
358
- application_filter_dict = {
359
- mm_schemas.EventFieldType.UID: self._generate_application_result_uid(event)
360
- }
377
+ application_result_uid = self._generate_application_result_uid(event, kind=kind)
378
+ criteria = [table.uid == application_result_uid]
361
379
 
362
- application_record = self._get(
363
- table=self.ApplicationResultsTable, **application_filter_dict
364
- )
380
+ application_record = self._get(table=table, criteria=criteria)
365
381
  if application_record:
366
382
  self._convert_to_datetime(
367
- event=event,
368
- key=mm_schemas.WriterEvent.START_INFER_TIME,
383
+ event=event, key=mm_schemas.WriterEvent.START_INFER_TIME
369
384
  )
370
385
  self._convert_to_datetime(
371
- event=event,
372
- key=mm_schemas.WriterEvent.END_INFER_TIME,
386
+ event=event, key=mm_schemas.WriterEvent.END_INFER_TIME
373
387
  )
374
388
  # Update an existing application result
375
- self._update(
376
- attributes=event,
377
- table=self.ApplicationResultsTable,
378
- **application_filter_dict,
379
- )
389
+ self._update(attributes=event, table=table, criteria=criteria)
380
390
  else:
381
391
  # Write a new application result
382
- event[mm_schemas.EventFieldType.UID] = application_filter_dict[
383
- mm_schemas.EventFieldType.UID
384
- ]
385
-
386
- self._write(
387
- table=mm_schemas.FileTargetKind.APP_RESULTS,
388
- event=event,
389
- )
392
+ event[mm_schemas.EventFieldType.UID] = application_result_uid
393
+ self._write(table_name=table_name, event=event)
390
394
 
391
395
  @staticmethod
392
- def _convert_to_datetime(event: dict[str, typing.Any], key: str):
396
+ def _convert_to_datetime(event: dict[str, typing.Any], key: str) -> None:
393
397
  if isinstance(event[key], str):
394
398
  event[key] = datetime.datetime.fromisoformat(event[key])
399
+ event[key] = event[key].astimezone(tz=datetime.timezone.utc)
395
400
 
396
401
  @staticmethod
397
- def _generate_application_result_uid(event: dict[str, typing.Any]) -> str:
398
- return (
399
- event[mm_schemas.WriterEvent.ENDPOINT_ID]
400
- + "_"
401
- + event[mm_schemas.WriterEvent.APPLICATION_NAME]
402
- + "_"
403
- + event[mm_schemas.ResultData.RESULT_NAME]
402
+ def _generate_application_result_uid(
403
+ event: dict[str, typing.Any],
404
+ kind: mm_schemas.WriterEventKind = mm_schemas.WriterEventKind.RESULT,
405
+ ) -> str:
406
+ if kind == mm_schemas.WriterEventKind.RESULT:
407
+ name = event[mm_schemas.ResultData.RESULT_NAME]
408
+ else:
409
+ name = event[mm_schemas.MetricData.METRIC_NAME]
410
+ return "_".join(
411
+ [
412
+ event[mm_schemas.WriterEvent.ENDPOINT_ID],
413
+ event[mm_schemas.WriterEvent.APPLICATION_NAME],
414
+ name,
415
+ ]
404
416
  )
405
417
 
418
+ @staticmethod
419
+ def _get_filter_criteria(
420
+ *,
421
+ table: sqlalchemy.orm.decl_api.DeclarativeMeta,
422
+ endpoint_id: str,
423
+ application_name: typing.Optional[str] = None,
424
+ ) -> list[BinaryExpression]:
425
+ """
426
+ Return the filter criteria for the given endpoint_id and application_name.
427
+ Note: the table object must include the relevant columns:
428
+ `endpoint_id` and `application_name`.
429
+ """
430
+ criteria = [table.endpoint_id == endpoint_id]
431
+ if application_name is not None:
432
+ criteria.append(table.application_name == application_name)
433
+ return criteria
434
+
406
435
  def get_last_analyzed(self, endpoint_id: str, application_name: str) -> int:
407
436
  """
408
437
  Get the last analyzed time for the provided model endpoint and application.
@@ -412,14 +441,15 @@ class SQLStoreBase(mlrun.model_monitoring.db.StoreBase):
412
441
 
413
442
  :return: Timestamp as a Unix time.
414
443
  :raise: MLRunNotFoundError if last analyzed value is not found.
415
-
416
444
  """
417
445
  self._init_monitoring_schedules_table()
418
- application_filter_dict = self.filter_endpoint_and_application_name(
419
- endpoint_id=endpoint_id, application_name=application_name
420
- )
421
446
  monitoring_schedule_record = self._get(
422
- table=self.MonitoringSchedulesTable, **application_filter_dict
447
+ table=self.MonitoringSchedulesTable,
448
+ criteria=self._get_filter_criteria(
449
+ table=self.MonitoringSchedulesTable,
450
+ endpoint_id=endpoint_id,
451
+ application_name=application_name,
452
+ ),
423
453
  )
424
454
  if not monitoring_schedule_record:
425
455
  raise mlrun.errors.MLRunNotFoundError(
@@ -441,16 +471,18 @@ class SQLStoreBase(mlrun.model_monitoring.db.StoreBase):
441
471
  """
442
472
  self._init_monitoring_schedules_table()
443
473
 
444
- application_filter_dict = self.filter_endpoint_and_application_name(
445
- endpoint_id=endpoint_id, application_name=application_name
474
+ criteria = self._get_filter_criteria(
475
+ table=self.MonitoringSchedulesTable,
476
+ endpoint_id=endpoint_id,
477
+ application_name=application_name,
446
478
  )
447
479
  monitoring_schedule_record = self._get(
448
- table=self.MonitoringSchedulesTable, **application_filter_dict
480
+ table=self.MonitoringSchedulesTable, criteria=criteria
449
481
  )
450
482
  if not monitoring_schedule_record:
451
- # Add a new record with empty last analyzed value
483
+ # Add a new record with last analyzed value
452
484
  self._write(
453
- table=mm_schemas.FileTargetKind.MONITORING_SCHEDULES,
485
+ table_name=mm_schemas.FileTargetKind.MONITORING_SCHEDULES,
454
486
  event={
455
487
  mm_schemas.SchedulingKeys.UID: uuid.uuid4().hex,
456
488
  mm_schemas.SchedulingKeys.APPLICATION_NAME: application_name,
@@ -462,30 +494,44 @@ class SQLStoreBase(mlrun.model_monitoring.db.StoreBase):
462
494
  self._update(
463
495
  attributes={mm_schemas.SchedulingKeys.LAST_ANALYZED: last_analyzed},
464
496
  table=self.MonitoringSchedulesTable,
465
- **application_filter_dict,
497
+ criteria=criteria,
466
498
  )
467
499
 
468
- def _delete_last_analyzed(self, endpoint_id: str = "", application_name: str = ""):
500
+ def _delete_last_analyzed(
501
+ self, endpoint_id: str, application_name: typing.Optional[str] = None
502
+ ) -> None:
469
503
  self._init_monitoring_schedules_table()
470
-
471
- application_filter_dict = self.filter_endpoint_and_application_name(
472
- endpoint_id=endpoint_id, application_name=application_name
504
+ criteria = self._get_filter_criteria(
505
+ table=self.MonitoringSchedulesTable,
506
+ endpoint_id=endpoint_id,
507
+ application_name=application_name,
473
508
  )
474
-
475
509
  # Delete the model endpoint record using sqlalchemy ORM
476
- self._delete(table=self.MonitoringSchedulesTable, **application_filter_dict)
510
+ self._delete(table=self.MonitoringSchedulesTable, criteria=criteria)
477
511
 
478
512
  def _delete_application_result(
479
- self, endpoint_id: str = "", application_name: str = ""
480
- ):
513
+ self, endpoint_id: str, application_name: typing.Optional[str] = None
514
+ ) -> None:
481
515
  self._init_application_results_table()
482
-
483
- application_filter_dict = self.filter_endpoint_and_application_name(
484
- endpoint_id=endpoint_id, application_name=application_name
516
+ criteria = self._get_filter_criteria(
517
+ table=self.application_results_table,
518
+ endpoint_id=endpoint_id,
519
+ application_name=application_name,
485
520
  )
521
+ # Delete the relevant records from the results table
522
+ self._delete(table=self.application_results_table, criteria=criteria)
486
523
 
487
- # Delete the model endpoint record using sqlalchemy ORM
488
- self._delete(table=self.ApplicationResultsTable, **application_filter_dict)
524
+ def _delete_application_metrics(
525
+ self, endpoint_id: str, application_name: typing.Optional[str] = None
526
+ ) -> None:
527
+ self._init_application_metrics_table()
528
+ criteria = self._get_filter_criteria(
529
+ table=self.application_metrics_table,
530
+ endpoint_id=endpoint_id,
531
+ application_name=application_name,
532
+ )
533
+ # Delete the relevant records from the metrics table
534
+ self._delete(table=self.application_metrics_table, criteria=criteria)
489
535
 
490
536
  def _create_tables_if_not_exist(self):
491
537
  self._init_tables()
@@ -493,9 +539,7 @@ class SQLStoreBase(mlrun.model_monitoring.db.StoreBase):
493
539
  for table in self._tables:
494
540
  # Create table if not exist. The `metadata` contains the `ModelEndpointsTable`
495
541
  if not self._engine.has_table(table):
496
- self._tables[table].metadata.create_all( # pyright: ignore[reportGeneralTypeIssues]
497
- bind=self._engine
498
- )
542
+ self._tables[table].metadata.create_all(bind=self._engine)
499
543
 
500
544
  @staticmethod
501
545
  def _filter_values(
@@ -572,31 +616,12 @@ class SQLStoreBase(mlrun.model_monitoring.db.StoreBase):
572
616
 
573
617
  return True
574
618
 
575
- @staticmethod
576
- def filter_endpoint_and_application_name(
577
- endpoint_id: str, application_name: str
578
- ) -> dict[str, str]:
579
- """Generate a dictionary filter for endpoint id and application name"""
580
- if not endpoint_id and not application_name:
581
- raise mlrun.errors.MLRunBadRequestError(
582
- "Please provide a valid endpoint_id and/or application_name"
583
- )
584
- application_filter_dict = {}
585
- if endpoint_id:
586
- application_filter_dict[mm_schemas.SchedulingKeys.ENDPOINT_ID] = endpoint_id
587
- if application_name:
588
- application_filter_dict[mm_schemas.SchedulingKeys.APPLICATION_NAME] = (
589
- application_name
590
- )
591
- return application_filter_dict
592
-
593
- def delete_model_endpoints_resources(self):
619
+ def delete_model_endpoints_resources(self) -> None:
594
620
  """
595
- Delete all model endpoints resources in both SQL and the time series DB.
596
-
621
+ Delete all the model monitoring resources of the project in the SQL tables.
597
622
  """
598
-
599
623
  endpoints = self.list_model_endpoints()
624
+ logger.debug("Deleting model monitoring resources", project=self.project)
600
625
 
601
626
  for endpoint_dict in endpoints:
602
627
  endpoint_id = endpoint_dict[mm_schemas.EventFieldType.UID]
@@ -604,8 +629,9 @@ class SQLStoreBase(mlrun.model_monitoring.db.StoreBase):
604
629
  # Delete last analyzed records
605
630
  self._delete_last_analyzed(endpoint_id=endpoint_id)
606
631
 
607
- # Delete application results records
632
+ # Delete application results and metrics records
608
633
  self._delete_application_result(endpoint_id=endpoint_id)
634
+ self._delete_application_metrics(endpoint_id=endpoint_id)
609
635
 
610
636
  # Delete model endpoint record
611
637
  self.delete_model_endpoint(endpoint_id=endpoint_id)
@@ -613,4 +639,46 @@ class SQLStoreBase(mlrun.model_monitoring.db.StoreBase):
613
639
  def get_model_endpoint_metrics(
614
640
  self, endpoint_id: str, type: mm_schemas.ModelEndpointMonitoringMetricType
615
641
  ) -> list[mm_schemas.ModelEndpointMonitoringMetric]:
616
- raise NotImplementedError
642
+ """
643
+ Fetch the model endpoint metrics or results (according to `type`) for the
644
+ requested endpoint.
645
+ """
646
+ logger.debug(
647
+ "Fetching metrics for model endpoint",
648
+ project=self.project,
649
+ endpoint_id=endpoint_id,
650
+ type=type,
651
+ )
652
+ if type == mm_schemas.ModelEndpointMonitoringMetricType.METRIC:
653
+ self._init_application_metrics_table()
654
+ table = self.application_metrics_table
655
+ name_col = mm_schemas.MetricData.METRIC_NAME
656
+ else:
657
+ self._init_application_results_table()
658
+ table = self.application_results_table
659
+ name_col = mm_schemas.ResultData.RESULT_NAME
660
+
661
+ # Note: the block below does not use self._get, as we need here all the
662
+ # results, not only `one_or_none`.
663
+ with sqlalchemy.orm.Session(self._engine) as session:
664
+ metric_rows = (
665
+ session.query(table) # pyright: ignore[reportOptionalCall]
666
+ .filter(table.endpoint_id == endpoint_id)
667
+ .all()
668
+ )
669
+
670
+ return [
671
+ mm_schemas.ModelEndpointMonitoringMetric(
672
+ project=self.project,
673
+ app=metric_row.application_name,
674
+ type=type,
675
+ name=getattr(metric_row, name_col),
676
+ full_name=mlrun.model_monitoring.helpers._compose_full_name(
677
+ project=self.project,
678
+ app=metric_row.application_name,
679
+ type=type,
680
+ name=getattr(metric_row, name_col),
681
+ ),
682
+ )
683
+ for metric_row in metric_rows
684
+ ]
@@ -65,7 +65,7 @@ class ObjectTSDBFactory(enum.Enum):
65
65
  def get_tsdb_connector(
66
66
  project: str,
67
67
  tsdb_connector_type: str = "",
68
- secret_provider: typing.Callable = None,
68
+ secret_provider: typing.Optional[typing.Callable] = None,
69
69
  **kwargs,
70
70
  ) -> TSDBConnector:
71
71
  """