mlrun 1.8.0rc4__py3-none-any.whl → 1.8.0rc7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mlrun might be problematic. Click here for more details.

Files changed (75) hide show
  1. mlrun/__init__.py +5 -3
  2. mlrun/alerts/alert.py +129 -2
  3. mlrun/artifacts/__init__.py +1 -1
  4. mlrun/artifacts/base.py +12 -1
  5. mlrun/artifacts/document.py +59 -38
  6. mlrun/common/constants.py +1 -0
  7. mlrun/common/model_monitoring/__init__.py +0 -2
  8. mlrun/common/model_monitoring/helpers.py +0 -28
  9. mlrun/common/schemas/__init__.py +2 -4
  10. mlrun/common/schemas/alert.py +80 -1
  11. mlrun/common/schemas/artifact.py +4 -0
  12. mlrun/common/schemas/client_spec.py +0 -1
  13. mlrun/common/schemas/model_monitoring/__init__.py +0 -6
  14. mlrun/common/schemas/model_monitoring/constants.py +11 -9
  15. mlrun/common/schemas/model_monitoring/model_endpoints.py +77 -149
  16. mlrun/common/schemas/notification.py +6 -0
  17. mlrun/common/schemas/project.py +3 -0
  18. mlrun/config.py +2 -3
  19. mlrun/datastore/datastore_profile.py +57 -17
  20. mlrun/datastore/sources.py +1 -2
  21. mlrun/datastore/vectorstore.py +67 -59
  22. mlrun/db/base.py +29 -19
  23. mlrun/db/factory.py +0 -3
  24. mlrun/db/httpdb.py +224 -161
  25. mlrun/db/nopdb.py +36 -17
  26. mlrun/execution.py +46 -32
  27. mlrun/feature_store/api.py +1 -0
  28. mlrun/model.py +7 -0
  29. mlrun/model_monitoring/__init__.py +3 -2
  30. mlrun/model_monitoring/api.py +55 -53
  31. mlrun/model_monitoring/applications/_application_steps.py +4 -2
  32. mlrun/model_monitoring/applications/base.py +165 -6
  33. mlrun/model_monitoring/applications/context.py +88 -37
  34. mlrun/model_monitoring/applications/evidently_base.py +0 -1
  35. mlrun/model_monitoring/applications/histogram_data_drift.py +3 -7
  36. mlrun/model_monitoring/controller.py +43 -37
  37. mlrun/model_monitoring/db/__init__.py +0 -2
  38. mlrun/model_monitoring/db/tsdb/base.py +2 -1
  39. mlrun/model_monitoring/db/tsdb/tdengine/tdengine_connector.py +2 -1
  40. mlrun/model_monitoring/db/tsdb/v3io/v3io_connector.py +43 -0
  41. mlrun/model_monitoring/helpers.py +79 -66
  42. mlrun/model_monitoring/stream_processing.py +83 -270
  43. mlrun/model_monitoring/writer.py +1 -10
  44. mlrun/projects/pipelines.py +37 -1
  45. mlrun/projects/project.py +171 -74
  46. mlrun/run.py +40 -0
  47. mlrun/runtimes/nuclio/function.py +7 -6
  48. mlrun/runtimes/nuclio/serving.py +9 -2
  49. mlrun/serving/routers.py +158 -145
  50. mlrun/serving/server.py +6 -0
  51. mlrun/serving/states.py +21 -7
  52. mlrun/serving/v2_serving.py +70 -61
  53. mlrun/utils/helpers.py +14 -30
  54. mlrun/utils/notifications/notification/mail.py +36 -9
  55. mlrun/utils/notifications/notification_pusher.py +43 -18
  56. mlrun/utils/version/version.json +2 -2
  57. {mlrun-1.8.0rc4.dist-info → mlrun-1.8.0rc7.dist-info}/METADATA +5 -4
  58. {mlrun-1.8.0rc4.dist-info → mlrun-1.8.0rc7.dist-info}/RECORD +62 -75
  59. mlrun/common/schemas/model_monitoring/model_endpoint_v2.py +0 -149
  60. mlrun/model_monitoring/db/stores/__init__.py +0 -136
  61. mlrun/model_monitoring/db/stores/base/__init__.py +0 -15
  62. mlrun/model_monitoring/db/stores/base/store.py +0 -154
  63. mlrun/model_monitoring/db/stores/sqldb/__init__.py +0 -13
  64. mlrun/model_monitoring/db/stores/sqldb/models/__init__.py +0 -46
  65. mlrun/model_monitoring/db/stores/sqldb/models/base.py +0 -93
  66. mlrun/model_monitoring/db/stores/sqldb/models/mysql.py +0 -47
  67. mlrun/model_monitoring/db/stores/sqldb/models/sqlite.py +0 -25
  68. mlrun/model_monitoring/db/stores/sqldb/sql_store.py +0 -408
  69. mlrun/model_monitoring/db/stores/v3io_kv/__init__.py +0 -13
  70. mlrun/model_monitoring/db/stores/v3io_kv/kv_store.py +0 -464
  71. mlrun/model_monitoring/model_endpoint.py +0 -120
  72. {mlrun-1.8.0rc4.dist-info → mlrun-1.8.0rc7.dist-info}/LICENSE +0 -0
  73. {mlrun-1.8.0rc4.dist-info → mlrun-1.8.0rc7.dist-info}/WHEEL +0 -0
  74. {mlrun-1.8.0rc4.dist-info → mlrun-1.8.0rc7.dist-info}/entry_points.txt +0 -0
  75. {mlrun-1.8.0rc4.dist-info → mlrun-1.8.0rc7.dist-info}/top_level.txt +0 -0
mlrun/execution.py CHANGED
@@ -17,7 +17,7 @@ import os
17
17
  import uuid
18
18
  import warnings
19
19
  from copy import deepcopy
20
- from typing import Optional, Union
20
+ from typing import Optional, Union, cast
21
21
 
22
22
  import numpy as np
23
23
  import yaml
@@ -42,6 +42,7 @@ from .features import Feature
42
42
  from .model import HyperParamOptions
43
43
  from .secrets import SecretsStore
44
44
  from .utils import (
45
+ Logger,
45
46
  RunKeys,
46
47
  dict_to_json,
47
48
  dict_to_yaml,
@@ -158,7 +159,7 @@ class MLClientCtx:
158
159
  return self._project
159
160
 
160
161
  @property
161
- def logger(self):
162
+ def logger(self) -> Logger:
162
163
  """Built-in logger interface
163
164
 
164
165
  Example::
@@ -500,11 +501,11 @@ class MLClientCtx:
500
501
  return default
501
502
  return self._parameters[key]
502
503
 
503
- def get_project_object(self):
504
+ def get_project_object(self) -> Optional["mlrun.MlrunProject"]:
504
505
  """
505
506
  Get the MLRun project object by the project name set in the context.
506
507
 
507
- :return: The project object or None if it couldn't be retrieved.
508
+ :returns: The project object or None if it couldn't be retrieved.
508
509
  """
509
510
  return self._load_project_object()
510
511
 
@@ -628,7 +629,7 @@ class MLClientCtx:
628
629
  format=None,
629
630
  db_key=None,
630
631
  **kwargs,
631
- ):
632
+ ) -> Artifact:
632
633
  """Log an output artifact and optionally upload it to datastore
633
634
 
634
635
  Example::
@@ -698,7 +699,7 @@ class MLClientCtx:
698
699
  extra_data=None,
699
700
  label_column: Optional[str] = None,
700
701
  **kwargs,
701
- ):
702
+ ) -> DatasetArtifact:
702
703
  """Log a dataset artifact and optionally upload it to datastore
703
704
 
704
705
  If the dataset exists with the same key and tag, it will be overwritten.
@@ -736,7 +737,7 @@ class MLClientCtx:
736
737
  :param db_key: The key to use in the artifact DB table, by default its run name + '_' + key
737
738
  db_key=False will not register it in the artifacts table
738
739
 
739
- :returns: Artifact object
740
+ :returns: Dataset artifact object
740
741
  """
741
742
  ds = DatasetArtifact(
742
743
  key,
@@ -749,16 +750,19 @@ class MLClientCtx:
749
750
  **kwargs,
750
751
  )
751
752
 
752
- item = self._artifacts_manager.log_artifact(
753
- self,
754
- ds,
755
- local_path=local_path,
756
- artifact_path=extend_artifact_path(artifact_path, self.artifact_path),
757
- target_path=target_path,
758
- tag=tag,
759
- upload=upload,
760
- db_key=db_key,
761
- labels=labels,
753
+ item = cast(
754
+ DatasetArtifact,
755
+ self._artifacts_manager.log_artifact(
756
+ self,
757
+ ds,
758
+ local_path=local_path,
759
+ artifact_path=extend_artifact_path(artifact_path, self.artifact_path),
760
+ target_path=target_path,
761
+ tag=tag,
762
+ upload=upload,
763
+ db_key=db_key,
764
+ labels=labels,
765
+ ),
762
766
  )
763
767
  self._update_run()
764
768
  return item
@@ -786,7 +790,7 @@ class MLClientCtx:
786
790
  extra_data=None,
787
791
  db_key=None,
788
792
  **kwargs,
789
- ):
793
+ ) -> ModelArtifact:
790
794
  """Log a model artifact and optionally upload it to datastore
791
795
 
792
796
  Example::
@@ -828,7 +832,7 @@ class MLClientCtx:
828
832
  :param db_key: The key to use in the artifact DB table, by default its run name + '_' + key
829
833
  db_key=False will not register it in the artifacts table
830
834
 
831
- :returns: Artifact object
835
+ :returns: Model artifact object
832
836
  """
833
837
 
834
838
  if training_set is not None and inputs:
@@ -855,14 +859,17 @@ class MLClientCtx:
855
859
  if training_set is not None:
856
860
  model.infer_from_df(training_set, label_column)
857
861
 
858
- item = self._artifacts_manager.log_artifact(
859
- self,
860
- model,
861
- artifact_path=extend_artifact_path(artifact_path, self.artifact_path),
862
- tag=tag,
863
- upload=upload,
864
- db_key=db_key,
865
- labels=labels,
862
+ item = cast(
863
+ ModelArtifact,
864
+ self._artifacts_manager.log_artifact(
865
+ self,
866
+ model,
867
+ artifact_path=extend_artifact_path(artifact_path, self.artifact_path),
868
+ tag=tag,
869
+ upload=upload,
870
+ db_key=db_key,
871
+ labels=labels,
872
+ ),
866
873
  )
867
874
  self._update_run()
868
875
  return item
@@ -870,28 +877,35 @@ class MLClientCtx:
870
877
  def log_document(
871
878
  self,
872
879
  key: str,
880
+ tag: str = "",
881
+ local_path: str = "",
873
882
  artifact_path: Optional[str] = None,
874
883
  document_loader: DocumentLoaderSpec = DocumentLoaderSpec(),
875
- tag: str = "",
876
884
  upload: Optional[bool] = False,
877
885
  labels: Optional[dict[str, str]] = None,
886
+ target_path: Optional[str] = None,
878
887
  **kwargs,
879
888
  ) -> DocumentArtifact:
880
889
  """
881
890
  Log a document as an artifact.
882
891
 
883
892
  :param key: Artifact key
884
- :param target_path: Path to the local file
885
- :param artifact_path: Target path for artifact storage
886
- :param document_loader: Spec to use to load the artifact as langchain document
887
893
  :param tag: Version tag
894
+ :param local_path: path to the local file we upload, will also be use
895
+ as the destination subpath (under "artifact_path")
896
+ :param artifact_path: Target artifact path (when not using the default)
897
+ to define a subpath under the default location use:
898
+ `artifact_path=context.artifact_subpath('data')`
899
+ :param document_loader: Spec to use to load the artifact as langchain document
888
900
  :param upload: Whether to upload the artifact
889
901
  :param labels: Key-value labels
902
+ :param target_path: Path to the local file
890
903
  :param kwargs: Additional keyword arguments
891
904
  :return: DocumentArtifact object
892
905
  """
893
906
  doc_artifact = DocumentArtifact(
894
907
  key=key,
908
+ original_source=local_path or target_path,
895
909
  document_loader=document_loader,
896
910
  **kwargs,
897
911
  )
@@ -1193,7 +1207,7 @@ class MLClientCtx:
1193
1207
  self._data_stores = store_manager.set(self._secrets_manager, db=self._rundb)
1194
1208
  self._artifacts_manager = ArtifactManager(db=self._rundb)
1195
1209
 
1196
- def _load_project_object(self):
1210
+ def _load_project_object(self) -> Optional["mlrun.MlrunProject"]:
1197
1211
  if not self._project_object:
1198
1212
  if not self._project:
1199
1213
  self.logger.warning(
@@ -11,6 +11,7 @@
11
11
  # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
+
14
15
  import copy
15
16
  import importlib.util
16
17
  import pathlib
mlrun/model.py CHANGED
@@ -117,6 +117,8 @@ class ModelObj:
117
117
  # If one of the attributes is a third party object that has to_dict method (such as k8s objects), then
118
118
  # add it to the object's _fields_to_serialize attribute and handle it in the _serialize_field method.
119
119
  if hasattr(field_value, "to_dict"):
120
+ # TODO: Allow passing fields to exclude from the parent object to the child object
121
+ # e.g.: run.to_dict(exclude=["status.artifacts"])
120
122
  field_value = field_value.to_dict(strip=strip)
121
123
  if self._is_valid_field_value_for_serialization(
122
124
  field_name, field_value, strip
@@ -443,6 +445,7 @@ class Credentials(ModelObj):
443
445
  class BaseMetadata(ModelObj):
444
446
  _default_fields_to_strip = ModelObj._default_fields_to_strip + [
445
447
  "hash",
448
+ "uid",
446
449
  # Below are environment specific fields, no need to keep when stripping
447
450
  "namespace",
448
451
  "project",
@@ -465,10 +468,12 @@ class BaseMetadata(ModelObj):
465
468
  categories=None,
466
469
  updated=None,
467
470
  credentials=None,
471
+ uid=None,
468
472
  ):
469
473
  self.name = name
470
474
  self.tag = tag
471
475
  self.hash = hash
476
+ self.uid = uid
472
477
  self.namespace = namespace
473
478
  self.project = project or ""
474
479
  self.labels = labels or {}
@@ -1267,6 +1272,8 @@ class RunSpec(ModelObj):
1267
1272
  class RunStatus(ModelObj):
1268
1273
  """Run status"""
1269
1274
 
1275
+ _default_fields_to_strip = ModelObj._default_fields_to_strip + ["artifacts"]
1276
+
1270
1277
  def __init__(
1271
1278
  self,
1272
1279
  state=None,
@@ -14,7 +14,8 @@
14
14
 
15
15
  # for backwards compatibility
16
16
 
17
- from .db import get_store_object, get_tsdb_connector
17
+ from mlrun.common.schemas import ModelEndpoint, ModelEndpointList
18
+
19
+ from .db import get_tsdb_connector
18
20
  from .helpers import get_stream_path
19
- from .model_endpoint import ModelEndpoint
20
21
  from .tracking_policy import TrackingPolicy
@@ -23,18 +23,28 @@ import pandas as pd
23
23
  import mlrun.artifacts
24
24
  import mlrun.common.helpers
25
25
  import mlrun.common.schemas.model_monitoring.constants as mm_constants
26
+ import mlrun.datastore.base
26
27
  import mlrun.feature_store
27
28
  import mlrun.model_monitoring.applications as mm_app
28
29
  import mlrun.serving
30
+ from mlrun.common.schemas import ModelEndpoint
31
+ from mlrun.common.schemas.model_monitoring import (
32
+ FunctionURI,
33
+ )
29
34
  from mlrun.data_types.infer import InferOptions, get_df_stats
30
35
  from mlrun.utils import datetime_now, logger
31
36
 
32
37
  from .helpers import update_model_endpoint_last_request
33
- from .model_endpoint import ModelEndpoint
34
38
 
35
39
  # A union of all supported dataset types:
36
40
  DatasetType = typing.Union[
37
- mlrun.DataItem, list, dict, pd.DataFrame, pd.Series, np.ndarray, typing.Any
41
+ mlrun.datastore.base.DataItem,
42
+ list,
43
+ dict,
44
+ pd.DataFrame,
45
+ pd.Series,
46
+ np.ndarray,
47
+ typing.Any,
38
48
  ]
39
49
 
40
50
 
@@ -44,10 +54,8 @@ def get_or_create_model_endpoint(
44
54
  model_endpoint_name: str = "",
45
55
  endpoint_id: str = "",
46
56
  function_name: str = "",
47
- context: mlrun.MLClientCtx = None,
57
+ context: typing.Optional["mlrun.MLClientCtx"] = None,
48
58
  sample_set_statistics: typing.Optional[dict[str, typing.Any]] = None,
49
- drift_threshold: typing.Optional[float] = None,
50
- possible_drift_threshold: typing.Optional[float] = None,
51
59
  monitoring_mode: mm_constants.ModelMonitoringMode = mm_constants.ModelMonitoringMode.disabled,
52
60
  db_session=None,
53
61
  ) -> ModelEndpoint:
@@ -68,10 +76,6 @@ def get_or_create_model_endpoint(
68
76
  full function hash.
69
77
  :param sample_set_statistics: Dictionary of sample set statistics that will be used as a reference data for
70
78
  the new model endpoint (applicable only to new endpoint_id).
71
- :param drift_threshold: (deprecated) The threshold of which to mark drifts (applicable only to new
72
- endpoint_id).
73
- :param possible_drift_threshold: (deprecated) The threshold of which to mark possible drifts (applicable only to new
74
- endpoint_id).
75
79
  :param monitoring_mode: If enabled, apply model monitoring features on the provided endpoint id
76
80
  (applicable only to new endpoint_id).
77
81
  :param db_session: A runtime session that manages the current dialog with the database.
@@ -79,18 +83,15 @@ def get_or_create_model_endpoint(
79
83
  :return: A ModelEndpoint object
80
84
  """
81
85
 
82
- if not endpoint_id:
83
- # Generate a new model endpoint id based on the project name and model name
84
- endpoint_id = hashlib.sha1(
85
- f"{project}_{model_endpoint_name}".encode()
86
- ).hexdigest()
87
-
88
86
  if not db_session:
89
87
  # Generate a runtime database
90
88
  db_session = mlrun.get_run_db()
91
89
  try:
92
90
  model_endpoint = db_session.get_model_endpoint(
93
- project=project, endpoint_id=endpoint_id
91
+ project=project,
92
+ name=model_endpoint_name,
93
+ endpoint_id=endpoint_id,
94
+ function_name=function_name,
94
95
  )
95
96
  # If other fields provided, validate that they are correspond to the existing model endpoint data
96
97
  _model_endpoint_validations(
@@ -104,7 +105,6 @@ def get_or_create_model_endpoint(
104
105
  model_endpoint = _generate_model_endpoint(
105
106
  project=project,
106
107
  db_session=db_session,
107
- endpoint_id=endpoint_id,
108
108
  model_path=model_path,
109
109
  model_endpoint_name=model_endpoint_name,
110
110
  function_name=function_name,
@@ -121,7 +121,7 @@ def record_results(
121
121
  model_endpoint_name: str,
122
122
  endpoint_id: str = "",
123
123
  function_name: str = "",
124
- context: typing.Optional[mlrun.MLClientCtx] = None,
124
+ context: typing.Optional["mlrun.MLClientCtx"] = None,
125
125
  infer_results_df: typing.Optional[pd.DataFrame] = None,
126
126
  sample_set_statistics: typing.Optional[dict[str, typing.Any]] = None,
127
127
  monitoring_mode: mm_constants.ModelMonitoringMode = mm_constants.ModelMonitoringMode.enabled,
@@ -208,13 +208,13 @@ def record_results(
208
208
  monitoring_mode=monitoring_mode,
209
209
  db_session=db,
210
210
  )
211
- logger.debug("Model endpoint", endpoint=model_endpoint.to_dict())
211
+ logger.debug("Model endpoint", endpoint=model_endpoint)
212
212
 
213
213
  timestamp = datetime_now()
214
214
  if infer_results_df is not None:
215
215
  # Write the monitoring parquet to the relevant model endpoint context
216
216
  write_monitoring_df(
217
- feature_set_uri=model_endpoint.status.monitoring_feature_set_uri,
217
+ feature_set_uri=model_endpoint.spec.monitoring_feature_set_uri,
218
218
  infer_datetime=timestamp,
219
219
  endpoint_id=model_endpoint.metadata.uid,
220
220
  infer_results_df=infer_results_df,
@@ -278,7 +278,7 @@ def _model_endpoint_validations(
278
278
  # Feature stats
279
279
  if (
280
280
  sample_set_statistics
281
- and sample_set_statistics != model_endpoint.status.feature_stats
281
+ and sample_set_statistics != model_endpoint.spec.feature_stats
282
282
  ):
283
283
  logger.warning(
284
284
  "Provided sample set statistics is different from the registered statistics. "
@@ -290,7 +290,7 @@ def write_monitoring_df(
290
290
  endpoint_id: str,
291
291
  infer_results_df: pd.DataFrame,
292
292
  infer_datetime: datetime,
293
- monitoring_feature_set: typing.Optional[mlrun.feature_store.FeatureSet] = None,
293
+ monitoring_feature_set: typing.Optional["mlrun.feature_store.FeatureSet"] = None,
294
294
  feature_set_uri: str = "",
295
295
  ) -> None:
296
296
  """Write infer results dataframe to the monitoring parquet target of the current model endpoint. The dataframe will
@@ -330,11 +330,10 @@ def write_monitoring_df(
330
330
  def _generate_model_endpoint(
331
331
  project: str,
332
332
  db_session,
333
- endpoint_id: str,
334
333
  model_path: str,
335
334
  model_endpoint_name: str,
336
335
  function_name: str,
337
- context: mlrun.MLClientCtx,
336
+ context: "mlrun.MLClientCtx",
338
337
  sample_set_statistics: dict[str, typing.Any],
339
338
  monitoring_mode: mm_constants.ModelMonitoringMode = mm_constants.ModelMonitoringMode.disabled,
340
339
  ) -> ModelEndpoint:
@@ -344,7 +343,6 @@ def _generate_model_endpoint(
344
343
  :param project: Project name.
345
344
 
346
345
  :param db_session: A session that manages the current dialog with the database.
347
- :param endpoint_id: Model endpoint unique ID.
348
346
  :param model_path: The model Store path.
349
347
  :param model_endpoint_name: Model endpoint name will be presented under the new model endpoint.
350
348
  :param function_name: If a new model endpoint is created, use this function name for generating the
@@ -355,34 +353,40 @@ def _generate_model_endpoint(
355
353
  the current model endpoint. Will be stored under
356
354
  `model_endpoint.status.feature_stats`.
357
355
 
358
- :return `mlrun.model_monitoring.model_endpoint.ModelEndpoint` object.
356
+ :return `mlrun.common.schemas.ModelEndpoint` object.
359
357
  """
360
- model_endpoint = ModelEndpoint()
361
- model_endpoint.metadata.project = project
362
- model_endpoint.metadata.uid = endpoint_id
363
- if function_name:
364
- model_endpoint.spec.function_uri = project + "/" + function_name
365
- elif not context:
366
- raise mlrun.errors.MLRunInvalidArgumentError(
367
- "Please provide either a function name or a valid MLRun context"
358
+ if not function_name and context:
359
+ function_name = FunctionURI.from_string(
360
+ context.to_dict()["spec"]["function"]
361
+ ).function
362
+ model_obj = None
363
+ if model_path:
364
+ model_obj: mlrun.artifacts.ModelArtifact = (
365
+ mlrun.datastore.store_resources.get_store_resource(
366
+ model_path, db=db_session
367
+ )
368
368
  )
369
- else:
370
- model_endpoint.spec.function_uri = context.to_dict()["spec"]["function"]
371
- model_endpoint.spec.model_uri = model_path
372
- model_endpoint.spec.model = model_endpoint_name
373
- model_endpoint.spec.model_class = "drift-analysis"
374
- model_endpoint.spec.monitoring_mode = monitoring_mode
375
- model_endpoint.status.first_request = model_endpoint.status.last_request = (
376
- datetime_now().isoformat()
377
- )
378
- if sample_set_statistics:
379
- model_endpoint.status.feature_stats = sample_set_statistics
380
-
381
- db_session.create_model_endpoint(
382
- project=project, endpoint_id=endpoint_id, model_endpoint=model_endpoint
369
+ current_time = datetime_now()
370
+ model_endpoint = mlrun.common.schemas.ModelEndpoint(
371
+ metadata=mlrun.common.schemas.ModelEndpointMetadata(
372
+ project=project,
373
+ name=model_endpoint_name,
374
+ endpoint_type=mlrun.common.schemas.model_monitoring.EndpointType.BATCH_EP,
375
+ ),
376
+ spec=mlrun.common.schemas.ModelEndpointSpec(
377
+ function_name=function_name,
378
+ model_name=model_obj.metadata.key if model_path else None,
379
+ model_uid=model_obj.metadata.uid if model_path else None,
380
+ model_class="drift-analysis",
381
+ ),
382
+ status=mlrun.common.schemas.ModelEndpointStatus(
383
+ monitoring_mode=monitoring_mode,
384
+ first_request=current_time,
385
+ last_request=current_time,
386
+ ),
383
387
  )
384
388
 
385
- return db_session.get_model_endpoint(project=project, endpoint_id=endpoint_id)
389
+ return db_session.create_model_endpoint(model_endpoint=model_endpoint)
386
390
 
387
391
 
388
392
  def get_sample_set_statistics(
@@ -531,7 +535,7 @@ def read_dataset_as_dataframe(
531
535
 
532
536
 
533
537
  def log_result(
534
- context: mlrun.MLClientCtx,
538
+ context: "mlrun.MLClientCtx",
535
539
  result_set_name: str,
536
540
  result_set: pd.DataFrame,
537
541
  artifacts_tag: str,
@@ -559,9 +563,7 @@ def _create_model_monitoring_function_base(
559
563
  project: str,
560
564
  func: typing.Union[str, None] = None,
561
565
  application_class: typing.Union[
562
- str,
563
- mm_app.ModelMonitoringApplicationBase,
564
- None,
566
+ str, "mm_app.ModelMonitoringApplicationBase", None
565
567
  ] = None,
566
568
  name: typing.Optional[str] = None,
567
569
  image: typing.Optional[str] = None,
@@ -16,6 +16,7 @@ import json
16
16
  import traceback
17
17
  from typing import Any, Optional, Union
18
18
 
19
+ import mlrun.common.schemas
19
20
  import mlrun.common.schemas.alert as alert_objects
20
21
  import mlrun.common.schemas.model_monitoring.constants as mm_constant
21
22
  import mlrun.datastore
@@ -81,6 +82,7 @@ class _PushToMonitoringWriter(StepToDict):
81
82
  self._lazy_init()
82
83
  application_results, application_context = event
83
84
  writer_event = {
85
+ mm_constant.WriterEvent.ENDPOINT_NAME: application_context.endpoint_name,
84
86
  mm_constant.WriterEvent.APPLICATION_NAME: application_context.application_name,
85
87
  mm_constant.WriterEvent.ENDPOINT_ID: application_context.endpoint_id,
86
88
  mm_constant.WriterEvent.START_INFER_TIME: application_context.start_infer_time.isoformat(
@@ -125,7 +127,7 @@ class _PrepareMonitoringEvent(StepToDict):
125
127
  """
126
128
  self.graph_context = context
127
129
  self.application_name = application_name
128
- self.model_endpoints: dict[str, mlrun.model_monitoring.ModelEndpoint] = {}
130
+ self.model_endpoints: dict[str, mlrun.common.schemas.ModelEndpoint] = {}
129
131
 
130
132
  def do(self, event: dict[str, Any]) -> MonitoringApplicationContext:
131
133
  """
@@ -135,10 +137,10 @@ class _PrepareMonitoringEvent(StepToDict):
135
137
  :return: Application context.
136
138
  """
137
139
  application_context = MonitoringApplicationContext(
138
- graph_context=self.graph_context,
139
140
  application_name=self.application_name,
140
141
  event=event,
141
142
  model_endpoint_dict=self.model_endpoints,
143
+ graph_context=self.graph_context,
142
144
  )
143
145
 
144
146
  self.model_endpoints.setdefault(