mlrun 1.8.0rc10__py3-none-any.whl → 1.8.0rc13__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mlrun might be problematic. Click here for more details.

Files changed (40) hide show
  1. mlrun/artifacts/document.py +32 -6
  2. mlrun/common/constants.py +1 -0
  3. mlrun/common/formatters/artifact.py +1 -1
  4. mlrun/common/schemas/__init__.py +2 -0
  5. mlrun/common/schemas/model_monitoring/__init__.py +1 -0
  6. mlrun/common/schemas/model_monitoring/constants.py +6 -0
  7. mlrun/common/schemas/model_monitoring/model_endpoints.py +35 -0
  8. mlrun/common/schemas/partition.py +23 -18
  9. mlrun/datastore/vectorstore.py +69 -26
  10. mlrun/db/base.py +14 -0
  11. mlrun/db/httpdb.py +48 -1
  12. mlrun/db/nopdb.py +13 -0
  13. mlrun/execution.py +43 -11
  14. mlrun/feature_store/steps.py +1 -1
  15. mlrun/model_monitoring/api.py +26 -19
  16. mlrun/model_monitoring/applications/_application_steps.py +1 -1
  17. mlrun/model_monitoring/applications/base.py +44 -7
  18. mlrun/model_monitoring/applications/context.py +94 -71
  19. mlrun/projects/pipelines.py +6 -3
  20. mlrun/projects/project.py +95 -17
  21. mlrun/runtimes/nuclio/function.py +2 -1
  22. mlrun/runtimes/nuclio/serving.py +33 -5
  23. mlrun/serving/__init__.py +8 -0
  24. mlrun/serving/merger.py +1 -1
  25. mlrun/serving/remote.py +17 -5
  26. mlrun/serving/routers.py +36 -87
  27. mlrun/serving/server.py +6 -2
  28. mlrun/serving/states.py +162 -13
  29. mlrun/serving/v2_serving.py +39 -82
  30. mlrun/utils/helpers.py +6 -0
  31. mlrun/utils/notifications/notification/base.py +1 -1
  32. mlrun/utils/notifications/notification/webhook.py +13 -12
  33. mlrun/utils/notifications/notification_pusher.py +18 -23
  34. mlrun/utils/version/version.json +2 -2
  35. {mlrun-1.8.0rc10.dist-info → mlrun-1.8.0rc13.dist-info}/METADATA +10 -10
  36. {mlrun-1.8.0rc10.dist-info → mlrun-1.8.0rc13.dist-info}/RECORD +40 -40
  37. {mlrun-1.8.0rc10.dist-info → mlrun-1.8.0rc13.dist-info}/LICENSE +0 -0
  38. {mlrun-1.8.0rc10.dist-info → mlrun-1.8.0rc13.dist-info}/WHEEL +0 -0
  39. {mlrun-1.8.0rc10.dist-info → mlrun-1.8.0rc13.dist-info}/entry_points.txt +0 -0
  40. {mlrun-1.8.0rc10.dist-info → mlrun-1.8.0rc13.dist-info}/top_level.txt +0 -0
mlrun/execution.py CHANGED
@@ -880,7 +880,7 @@ class MLClientCtx:
880
880
  tag: str = "",
881
881
  local_path: str = "",
882
882
  artifact_path: Optional[str] = None,
883
- document_loader: DocumentLoaderSpec = DocumentLoaderSpec(),
883
+ document_loader_spec: DocumentLoaderSpec = DocumentLoaderSpec(),
884
884
  upload: Optional[bool] = False,
885
885
  labels: Optional[dict[str, str]] = None,
886
886
  target_path: Optional[str] = None,
@@ -891,22 +891,48 @@ class MLClientCtx:
891
891
 
892
892
  :param key: Artifact key
893
893
  :param tag: Version tag
894
- :param local_path: path to the local file we upload, will also be use
895
- as the destination subpath (under "artifact_path")
896
- :param artifact_path: Target artifact path (when not using the default)
897
- to define a subpath under the default location use:
898
- `artifact_path=context.artifact_subpath('data')`
899
- :param document_loader: Spec to use to load the artifact as langchain document
894
+ :param local_path: path to the local file we upload, will also be use
895
+ as the destination subpath (under "artifact_path")
896
+ :param artifact_path: Target artifact path (when not using the default)
897
+ to define a subpath under the default location use:
898
+ `artifact_path=context.artifact_subpath('data')`
899
+ :param document_loader_spec: Spec to use to load the artifact as langchain document.
900
+
901
+ By default, uses DocumentLoaderSpec() which initializes with:
902
+
903
+ * loader_class_name="langchain_community.document_loaders.TextLoader"
904
+ * src_name="file_path"
905
+ * kwargs=None
906
+
907
+ Can be customized for different document types, e.g.::
908
+
909
+ DocumentLoaderSpec(
910
+ loader_class_name="langchain_community.document_loaders.PDFLoader",
911
+ src_name="file_path",
912
+ kwargs={"extract_images": True}
913
+ )
900
914
  :param upload: Whether to upload the artifact
901
915
  :param labels: Key-value labels
902
916
  :param target_path: Path to the local file
903
917
  :param kwargs: Additional keyword arguments
904
918
  :return: DocumentArtifact object
919
+
920
+ Example:
921
+ >>> # Log a PDF document with custom loader
922
+ >>> project.log_document(
923
+ ... key="my_doc",
924
+ ... local_path="path/to/doc.pdf",
925
+ ... document_loader_spec=DocumentLoaderSpec(
926
+ ... loader_class_name="langchain_community.document_loaders.PDFLoader",
927
+ ... src_name="file_path",
928
+ ... kwargs={"extract_images": True},
929
+ ... ),
930
+ ... )
905
931
  """
906
932
  doc_artifact = DocumentArtifact(
907
933
  key=key,
908
934
  original_source=local_path or target_path,
909
- document_loader=document_loader,
935
+ document_loader_spec=document_loader_spec,
910
936
  **kwargs,
911
937
  )
912
938
 
@@ -929,9 +955,15 @@ class MLClientCtx:
929
955
  )
930
956
  return self.get_artifact(key)
931
957
 
932
- def get_artifact(self, key: str) -> Artifact:
933
- artifact_uri = self._artifacts_manager.artifact_uris[key]
934
- return self.get_store_resource(artifact_uri)
958
+ def get_artifact(
959
+ self, key, tag=None, iter=None, tree=None, uid=None
960
+ ) -> Optional[Artifact]:
961
+ if tag or iter or tree or uid:
962
+ project = self.get_project_object()
963
+ return project.get_artifact(key=key, tag=tag, iter=iter, tree=tree, uid=uid)
964
+ else:
965
+ artifact_uri = self._artifacts_manager.artifact_uris[key]
966
+ return self.get_store_resource(artifact_uri)
935
967
 
936
968
  def update_artifact(self, artifact_object: Artifact):
937
969
  """Update an artifact object in the DB and the cached uri"""
@@ -671,7 +671,7 @@ class SetEventMetadata(MapClass):
671
671
 
672
672
  self._tagging_funcs = []
673
673
 
674
- def post_init(self, mode="sync"):
674
+ def post_init(self, mode="sync", **kwargs):
675
675
  def add_metadata(name, path, operator=str):
676
676
  def _add_meta(event):
677
677
  value = get_in(event.body, path)
@@ -54,9 +54,10 @@ def get_or_create_model_endpoint(
54
54
  model_endpoint_name: str = "",
55
55
  endpoint_id: str = "",
56
56
  function_name: str = "",
57
+ function_tag: str = "latest",
57
58
  context: typing.Optional["mlrun.MLClientCtx"] = None,
58
59
  sample_set_statistics: typing.Optional[dict[str, typing.Any]] = None,
59
- monitoring_mode: mm_constants.ModelMonitoringMode = mm_constants.ModelMonitoringMode.disabled,
60
+ monitoring_mode: mm_constants.ModelMonitoringMode = mm_constants.ModelMonitoringMode.enabled,
60
61
  db_session=None,
61
62
  ) -> ModelEndpoint:
62
63
  """
@@ -70,8 +71,8 @@ def get_or_create_model_endpoint(
70
71
  under this endpoint (applicable only to new endpoint_id).
71
72
  :param endpoint_id: Model endpoint unique ID. If not exist in DB, will generate a new record based
72
73
  on the provided `endpoint_id`.
73
- :param function_name: If a new model endpoint is created, use this function name for generating the
74
- function URI (applicable only to new endpoint_id).
74
+ :param function_name: If a new model endpoint is created, use this function name.
75
+ :param function_tag: If a new model endpoint is created, use this function tag.
75
76
  :param context: MLRun context. If `function_name` not provided, use the context to generate the
76
77
  full function hash.
77
78
  :param sample_set_statistics: Dictionary of sample set statistics that will be used as a reference data for
@@ -86,28 +87,32 @@ def get_or_create_model_endpoint(
86
87
  if not db_session:
87
88
  # Generate a runtime database
88
89
  db_session = mlrun.get_run_db()
90
+ model_endpoint = None
89
91
  try:
90
- model_endpoint = db_session.get_model_endpoint(
91
- project=project,
92
- name=model_endpoint_name,
93
- endpoint_id=endpoint_id,
94
- function_name=function_name,
95
- )
96
- # If other fields provided, validate that they are correspond to the existing model endpoint data
97
- _model_endpoint_validations(
98
- model_endpoint=model_endpoint,
99
- model_path=model_path,
100
- sample_set_statistics=sample_set_statistics,
101
- )
92
+ if endpoint_id:
93
+ model_endpoint = db_session.get_model_endpoint(
94
+ project=project,
95
+ name=model_endpoint_name,
96
+ endpoint_id=endpoint_id,
97
+ )
98
+ # If other fields provided, validate that they are correspond to the existing model endpoint data
99
+ _model_endpoint_validations(
100
+ model_endpoint=model_endpoint,
101
+ model_path=model_path,
102
+ sample_set_statistics=sample_set_statistics,
103
+ )
102
104
 
103
105
  except mlrun.errors.MLRunNotFoundError:
104
106
  # Create a new model endpoint with the provided details
107
+ pass
108
+ if not model_endpoint:
105
109
  model_endpoint = _generate_model_endpoint(
106
110
  project=project,
107
111
  db_session=db_session,
108
112
  model_path=model_path,
109
113
  model_endpoint_name=model_endpoint_name,
110
114
  function_name=function_name,
115
+ function_tag=function_tag,
111
116
  context=context,
112
117
  sample_set_statistics=sample_set_statistics,
113
118
  monitoring_mode=monitoring_mode,
@@ -333,9 +338,10 @@ def _generate_model_endpoint(
333
338
  model_path: str,
334
339
  model_endpoint_name: str,
335
340
  function_name: str,
341
+ function_tag: str,
336
342
  context: "mlrun.MLClientCtx",
337
343
  sample_set_statistics: dict[str, typing.Any],
338
- monitoring_mode: mm_constants.ModelMonitoringMode = mm_constants.ModelMonitoringMode.disabled,
344
+ monitoring_mode: mm_constants.ModelMonitoringMode = mm_constants.ModelMonitoringMode.enabled,
339
345
  ) -> ModelEndpoint:
340
346
  """
341
347
  Write a new model endpoint record.
@@ -345,8 +351,8 @@ def _generate_model_endpoint(
345
351
  :param db_session: A session that manages the current dialog with the database.
346
352
  :param model_path: The model Store path.
347
353
  :param model_endpoint_name: Model endpoint name will be presented under the new model endpoint.
348
- :param function_name: If a new model endpoint is created, use this function name for generating the
349
- function URI.
354
+ :param function_name: If a new model endpoint is created, use this function name.
355
+ :param function_tag: If a new model endpoint is created, use this function tag.
350
356
  :param context: MLRun context. If function_name not provided, use the context to generate the
351
357
  full function hash.
352
358
  :param sample_set_statistics: Dictionary of sample set statistics that will be used as a reference data for
@@ -374,7 +380,8 @@ def _generate_model_endpoint(
374
380
  endpoint_type=mlrun.common.schemas.model_monitoring.EndpointType.BATCH_EP,
375
381
  ),
376
382
  spec=mlrun.common.schemas.ModelEndpointSpec(
377
- function_name=function_name,
383
+ function_name=function_name or "function",
384
+ function_tag=function_tag or "latest",
378
385
  model_name=model_obj.metadata.key if model_obj else None,
379
386
  model_uid=model_obj.metadata.uid if model_obj else None,
380
387
  model_tag=model_obj.metadata.tag if model_obj else None,
@@ -136,7 +136,7 @@ class _PrepareMonitoringEvent(StepToDict):
136
136
  :param event: Application event.
137
137
  :return: Application context.
138
138
  """
139
- application_context = MonitoringApplicationContext(
139
+ application_context = MonitoringApplicationContext._from_graph_ctx(
140
140
  application_name=self.application_name,
141
141
  event=event,
142
142
  model_endpoint_dict=self.model_endpoints,
@@ -112,11 +112,10 @@ class ModelMonitoringApplicationBase(MonitoringApplicationToDict, ABC):
112
112
  def call_do_tracking(event: Optional[dict] = None):
113
113
  if event is None:
114
114
  event = {}
115
- monitoring_context = mm_context.MonitoringApplicationContext(
115
+ monitoring_context = mm_context.MonitoringApplicationContext._from_ml_ctx(
116
116
  event=event,
117
117
  application_name=self.__class__.__name__,
118
- logger=context.logger,
119
- artifacts_logger=context,
118
+ context=context,
120
119
  sample_df=sample_data,
121
120
  feature_stats=feature_stats,
122
121
  )
@@ -148,6 +147,44 @@ class ModelMonitoringApplicationBase(MonitoringApplicationToDict, ABC):
148
147
  )
149
148
  return start, end
150
149
 
150
+ @classmethod
151
+ def deploy(
152
+ cls,
153
+ func_name: str,
154
+ func_path: Optional[str] = None,
155
+ image: Optional[str] = None,
156
+ handler: Optional[str] = None,
157
+ with_repo: Optional[bool] = False,
158
+ tag: Optional[str] = None,
159
+ requirements: Optional[Union[str, list[str]]] = None,
160
+ requirements_file: str = "",
161
+ **application_kwargs,
162
+ ) -> None:
163
+ """
164
+ Set the application to the current project and deploy it as a Nuclio serving function.
165
+ Required for your model monitoring application to work as a part of the model monitoring framework.
166
+
167
+ :param func_name: The name of the function.
168
+ :param func_path: The path of the function, :code:`None` refers to the current Jupyter notebook.
169
+
170
+ For the other arguments, refer to
171
+ :py:meth:`~mlrun.projects.MlrunProject.set_model_monitoring_function`.
172
+ """
173
+ project = cast("mlrun.MlrunProject", mlrun.get_current_project())
174
+ function = project.set_model_monitoring_function(
175
+ name=func_name,
176
+ func=func_path,
177
+ application_class=cls.__name__,
178
+ handler=handler,
179
+ image=image,
180
+ with_repo=with_repo,
181
+ requirements=requirements,
182
+ requirements_file=requirements_file,
183
+ tag=tag,
184
+ **application_kwargs,
185
+ )
186
+ function.deploy()
187
+
151
188
  @classmethod
152
189
  def evaluate(
153
190
  cls,
@@ -175,10 +212,10 @@ class ModelMonitoringApplicationBase(MonitoringApplicationToDict, ABC):
175
212
  :param func_name: The name of the function. If not passed, the class name is used.
176
213
  :param tag: An optional tag for the function.
177
214
  :param run_local: Whether to run the function locally or remotely.
178
- :param sample_df: Optional - pandas data-frame as the current dataset.
179
- When set, it replaces the data read from the model endpoint's offline source.
180
- :param feature_stats: Optional - statistics dictionary of the reference data.
181
- When set, it overrides the model endpoint's feature stats.
215
+ :param sample_data: Optional - pandas data-frame as the current dataset.
216
+ When set, it replaces the data read from the model endpoint's offline source.
217
+ :param reference_data: Optional - pandas data-frame of the reference dataset.
218
+ When set, its statistics override the model endpoint's feature statistics.
182
219
  :param image: Docker image to run the job on.
183
220
  :param with_repo: Whether to clone the current repo to the build source.
184
221
  :param requirements: List of Python requirements to be installed in the image.
@@ -45,32 +45,6 @@ class _ArtifactsLogger(Protocol):
45
45
 
46
46
 
47
47
  class MonitoringApplicationContext:
48
- """
49
- The monitoring context holds all the relevant information for the monitoring application,
50
- and also it can be used for logging artifacts and results.
51
- The monitoring context has the following attributes:
52
-
53
- :param application_name: (str) The model monitoring application name.
54
- :param project_name: (str) The project name.
55
- :param project: (MlrunProject) The project object.
56
- :param logger: (mlrun.utils.Logger) MLRun logger.
57
- :param nuclio_logger: (nuclio.request.Logger) Nuclio logger.
58
- :param sample_df_stats: (FeatureStats) The new sample distribution dictionary.
59
- :param feature_stats: (FeatureStats) The train sample distribution dictionary.
60
- :param sample_df: (pd.DataFrame) The new sample DataFrame.
61
- :param start_infer_time: (pd.Timestamp) Start time of the monitoring schedule.
62
- :param end_infer_time: (pd.Timestamp) End time of the monitoring schedule.
63
- :param latest_request: (pd.Timestamp) Timestamp of the latest request on this endpoint_id.
64
- :param endpoint_id: (str) ID of the monitored model endpoint
65
- :param endpoint_name: (str) Name of the monitored model endpoint
66
- :param output_stream_uri: (str) URI of the output stream for results
67
- :param model_endpoint: (ModelEndpoint) The model endpoint object.
68
- :param feature_names: (list[str]) List of models feature names.
69
- :param label_names: (list[str]) List of models label names.
70
- :param model: (tuple[str, ModelArtifact, dict]) The model file, model spec object,
71
- and a list of extra data items.
72
- """
73
-
74
48
  _logger_name = "monitoring-application"
75
49
 
76
50
  def __init__(
@@ -78,64 +52,51 @@ class MonitoringApplicationContext:
78
52
  *,
79
53
  application_name: str,
80
54
  event: dict[str, Any],
55
+ project: "mlrun.MlrunProject",
56
+ artifacts_logger: _ArtifactsLogger,
57
+ logger: mlrun.utils.Logger,
58
+ nuclio_logger: nuclio.request.Logger,
81
59
  model_endpoint_dict: Optional[dict[str, ModelEndpoint]] = None,
82
- logger: Optional[mlrun.utils.Logger] = None,
83
- graph_context: Optional[mlrun.serving.GraphContext] = None,
84
- context: Optional["mlrun.MLClientCtx"] = None,
85
- artifacts_logger: Optional[_ArtifactsLogger] = None,
86
60
  sample_df: Optional[pd.DataFrame] = None,
87
61
  feature_stats: Optional[FeatureStats] = None,
88
62
  ) -> None:
89
63
  """
90
- The :code:`__init__` method initializes a :code:`MonitoringApplicationContext` object
91
- and has the following attributes.
92
- Note: this object should not be instantiated manually.
93
-
94
- :param application_name: The application name.
95
- :param event: The instance data dictionary.
96
- :param model_endpoint_dict: Optional - dictionary of model endpoints.
97
- :param logger: Optional - MLRun logger instance.
98
- :param graph_context: Optional - GraphContext instance.
99
- :param context: Optional - MLClientCtx instance.
100
- :param artifacts_logger: Optional - an object that can log artifacts,
101
- typically :py:class:`~mlrun.projects.MlrunProject` or
102
- :py:class:`~mlrun.execution.MLClientCtx`.
103
- :param sample_df: Optional - pandas data-frame as the current dataset.
104
- When set, it replaces the data read from the offline source.
105
- :param feature_stats: Optional - statistics dictionary of the reference data.
106
- When set, it overrides the model endpoint's feature stats.
64
+ The :code:`MonitoringApplicationContext` object holds all the relevant information for the
65
+ model monitoring application, and can be used for logging artifacts and messages.
66
+ The monitoring context has the following attributes:
67
+
68
+ :param application_name: (str) The model monitoring application name.
69
+ :param project: (:py:class:`~mlrun.projects.MlrunProject`) The current MLRun project object.
70
+ :param project_name: (str) The project name.
71
+ :param logger: (:py:class:`~mlrun.utils.Logger`) MLRun logger.
72
+ :param nuclio_logger: (nuclio.request.Logger) Nuclio logger.
73
+ :param sample_df_stats: (FeatureStats) The new sample distribution dictionary.
74
+ :param feature_stats: (FeatureStats) The train sample distribution dictionary.
75
+ :param sample_df: (pd.DataFrame) The new sample DataFrame.
76
+ :param start_infer_time: (pd.Timestamp) Start time of the monitoring schedule.
77
+ :param end_infer_time: (pd.Timestamp) End time of the monitoring schedule.
78
+ :param latest_request: (pd.Timestamp) Timestamp of the latest request on this endpoint_id.
79
+ :param endpoint_id: (str) ID of the monitored model endpoint
80
+ :param endpoint_name: (str) Name of the monitored model endpoint
81
+ :param output_stream_uri: (str) URI of the output stream for results
82
+ :param model_endpoint: (ModelEndpoint) The model endpoint object.
83
+ :param feature_names: (list[str]) List of models feature names.
84
+ :param label_names: (list[str]) List of models label names.
85
+ :param model: (tuple[str, ModelArtifact, dict]) The model file, model spec object,
86
+ and a list of extra data items.
107
87
  """
108
88
  self.application_name = application_name
109
89
 
110
- if graph_context:
111
- self.project_name = graph_context.project
112
- self.project = mlrun.load_project(url=self.project_name)
113
- elif context:
114
- potential_project = context.get_project_object()
115
- if not potential_project:
116
- raise mlrun.errors.MLRunValueError(
117
- "Could not load project from context"
118
- )
119
- self.project = potential_project
120
- self.project_name = self.project.name
90
+ self.project = project
91
+ self.project_name = project.name
121
92
 
122
- self._artifacts_logger: _ArtifactsLogger = artifacts_logger or self.project
93
+ self._artifacts_logger = artifacts_logger
123
94
 
124
95
  # MLRun Logger
125
- self.logger = logger or mlrun.utils.create_logger(
126
- level=mlrun.mlconf.log_level,
127
- formatter_kind=mlrun.mlconf.log_formatter,
128
- name=self._logger_name,
129
- )
96
+ self.logger = logger
130
97
  # Nuclio logger - `nuclio.request.Logger`.
131
98
  # Note: this logger accepts keyword arguments only in its `_with` methods, e.g. `info_with`.
132
- self.nuclio_logger = (
133
- graph_context.logger
134
- if graph_context
135
- else nuclio.request.Logger(
136
- level=mlrun.mlconf.log_level, name=self._logger_name
137
- )
138
- )
99
+ self.nuclio_logger = nuclio_logger
139
100
 
140
101
  # event data
141
102
  self.start_infer_time = pd.Timestamp(
@@ -166,6 +127,68 @@ class MonitoringApplicationContext:
166
127
  model_endpoint_dict.get(self.endpoint_id) if model_endpoint_dict else None
167
128
  )
168
129
 
130
+ @classmethod
131
+ def _from_ml_ctx(
132
+ cls,
133
+ context: "mlrun.MLClientCtx",
134
+ *,
135
+ application_name: str,
136
+ event: dict[str, Any],
137
+ model_endpoint_dict: Optional[dict[str, ModelEndpoint]] = None,
138
+ sample_df: Optional[pd.DataFrame] = None,
139
+ feature_stats: Optional[FeatureStats] = None,
140
+ ) -> "MonitoringApplicationContext":
141
+ project = context.get_project_object()
142
+ if not project:
143
+ raise mlrun.errors.MLRunValueError("Could not load project from context")
144
+ logger = context.logger
145
+ artifacts_logger = context
146
+ nuclio_logger = nuclio.request.Logger(
147
+ level=mlrun.mlconf.log_level, name=cls._logger_name
148
+ )
149
+ return cls(
150
+ application_name=application_name,
151
+ event=event,
152
+ model_endpoint_dict=model_endpoint_dict,
153
+ project=project,
154
+ logger=logger,
155
+ nuclio_logger=nuclio_logger,
156
+ artifacts_logger=artifacts_logger,
157
+ sample_df=sample_df,
158
+ feature_stats=feature_stats,
159
+ )
160
+
161
+ @classmethod
162
+ def _from_graph_ctx(
163
+ cls,
164
+ graph_context: mlrun.serving.GraphContext,
165
+ *,
166
+ application_name: str,
167
+ event: dict[str, Any],
168
+ model_endpoint_dict: Optional[dict[str, ModelEndpoint]] = None,
169
+ sample_df: Optional[pd.DataFrame] = None,
170
+ feature_stats: Optional[FeatureStats] = None,
171
+ ) -> "MonitoringApplicationContext":
172
+ project = mlrun.load_project(url=graph_context.project)
173
+ nuclio_logger = graph_context.logger
174
+ artifacts_logger = project
175
+ logger = mlrun.utils.create_logger(
176
+ level=mlrun.mlconf.log_level,
177
+ formatter_kind=mlrun.mlconf.log_formatter,
178
+ name=cls._logger_name,
179
+ )
180
+ return cls(
181
+ application_name=application_name,
182
+ event=event,
183
+ project=project,
184
+ model_endpoint_dict=model_endpoint_dict,
185
+ logger=logger,
186
+ nuclio_logger=nuclio_logger,
187
+ artifacts_logger=artifacts_logger,
188
+ sample_df=sample_df,
189
+ feature_stats=feature_stats,
190
+ )
191
+
169
192
  def _get_default_labels(self) -> dict[str, str]:
170
193
  labels = {
171
194
  mlrun_constants.MLRunInternalLabels.runner_pod: socket.gethostname(),
@@ -471,6 +471,7 @@ class _PipelineRunner(abc.ABC):
471
471
  namespace=None,
472
472
  source=None,
473
473
  notifications: typing.Optional[list[mlrun.model.Notification]] = None,
474
+ context: typing.Optional[mlrun.execution.MLClientCtx] = None,
474
475
  ) -> _PipelineRunStatus:
475
476
  pass
476
477
 
@@ -595,6 +596,7 @@ class _KFPRunner(_PipelineRunner):
595
596
  namespace=None,
596
597
  source=None,
597
598
  notifications: typing.Optional[list[mlrun.model.Notification]] = None,
599
+ context: typing.Optional[mlrun.execution.MLClientCtx] = None,
598
600
  ) -> _PipelineRunStatus:
599
601
  pipeline_context.set(project, workflow_spec)
600
602
  workflow_handler = _PipelineRunner._get_handler(
@@ -646,9 +648,7 @@ class _KFPRunner(_PipelineRunner):
646
648
  )
647
649
  project.notifiers.push_pipeline_start_message(
648
650
  project.metadata.name,
649
- project.get_param("commit_id", None),
650
- run_id,
651
- True,
651
+ context.uid,
652
652
  )
653
653
  pipeline_context.clear()
654
654
  return _PipelineRunStatus(run_id, cls, project=project, workflow=workflow_spec)
@@ -722,6 +722,7 @@ class _LocalRunner(_PipelineRunner):
722
722
  namespace=None,
723
723
  source=None,
724
724
  notifications: typing.Optional[list[mlrun.model.Notification]] = None,
725
+ context: typing.Optional[mlrun.execution.MLClientCtx] = None,
725
726
  ) -> _PipelineRunStatus:
726
727
  pipeline_context.set(project, workflow_spec)
727
728
  workflow_handler = _PipelineRunner._get_handler(
@@ -805,6 +806,7 @@ class _RemoteRunner(_PipelineRunner):
805
806
  namespace: typing.Optional[str] = None,
806
807
  source: typing.Optional[str] = None,
807
808
  notifications: typing.Optional[list[mlrun.model.Notification]] = None,
809
+ context: typing.Optional[mlrun.execution.MLClientCtx] = None,
808
810
  ) -> typing.Optional[_PipelineRunStatus]:
809
811
  workflow_name = normalize_workflow_name(name=name, project_name=project.name)
810
812
  workflow_id = None
@@ -1127,6 +1129,7 @@ def load_and_run_workflow(
1127
1129
  engine=engine,
1128
1130
  local=local,
1129
1131
  notifications=start_notifications,
1132
+ context=context,
1130
1133
  )
1131
1134
  context.log_result(key="workflow_id", value=run.run_id)
1132
1135
  context.log_result(key="engine", value=run._engine.engine, commit=True)