mlrun 1.8.0rc4__py3-none-any.whl → 1.8.0rc7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mlrun might be problematic. Click here for more details.

Files changed (75) hide show
  1. mlrun/__init__.py +5 -3
  2. mlrun/alerts/alert.py +129 -2
  3. mlrun/artifacts/__init__.py +1 -1
  4. mlrun/artifacts/base.py +12 -1
  5. mlrun/artifacts/document.py +59 -38
  6. mlrun/common/constants.py +1 -0
  7. mlrun/common/model_monitoring/__init__.py +0 -2
  8. mlrun/common/model_monitoring/helpers.py +0 -28
  9. mlrun/common/schemas/__init__.py +2 -4
  10. mlrun/common/schemas/alert.py +80 -1
  11. mlrun/common/schemas/artifact.py +4 -0
  12. mlrun/common/schemas/client_spec.py +0 -1
  13. mlrun/common/schemas/model_monitoring/__init__.py +0 -6
  14. mlrun/common/schemas/model_monitoring/constants.py +11 -9
  15. mlrun/common/schemas/model_monitoring/model_endpoints.py +77 -149
  16. mlrun/common/schemas/notification.py +6 -0
  17. mlrun/common/schemas/project.py +3 -0
  18. mlrun/config.py +2 -3
  19. mlrun/datastore/datastore_profile.py +57 -17
  20. mlrun/datastore/sources.py +1 -2
  21. mlrun/datastore/vectorstore.py +67 -59
  22. mlrun/db/base.py +29 -19
  23. mlrun/db/factory.py +0 -3
  24. mlrun/db/httpdb.py +224 -161
  25. mlrun/db/nopdb.py +36 -17
  26. mlrun/execution.py +46 -32
  27. mlrun/feature_store/api.py +1 -0
  28. mlrun/model.py +7 -0
  29. mlrun/model_monitoring/__init__.py +3 -2
  30. mlrun/model_monitoring/api.py +55 -53
  31. mlrun/model_monitoring/applications/_application_steps.py +4 -2
  32. mlrun/model_monitoring/applications/base.py +165 -6
  33. mlrun/model_monitoring/applications/context.py +88 -37
  34. mlrun/model_monitoring/applications/evidently_base.py +0 -1
  35. mlrun/model_monitoring/applications/histogram_data_drift.py +3 -7
  36. mlrun/model_monitoring/controller.py +43 -37
  37. mlrun/model_monitoring/db/__init__.py +0 -2
  38. mlrun/model_monitoring/db/tsdb/base.py +2 -1
  39. mlrun/model_monitoring/db/tsdb/tdengine/tdengine_connector.py +2 -1
  40. mlrun/model_monitoring/db/tsdb/v3io/v3io_connector.py +43 -0
  41. mlrun/model_monitoring/helpers.py +79 -66
  42. mlrun/model_monitoring/stream_processing.py +83 -270
  43. mlrun/model_monitoring/writer.py +1 -10
  44. mlrun/projects/pipelines.py +37 -1
  45. mlrun/projects/project.py +171 -74
  46. mlrun/run.py +40 -0
  47. mlrun/runtimes/nuclio/function.py +7 -6
  48. mlrun/runtimes/nuclio/serving.py +9 -2
  49. mlrun/serving/routers.py +158 -145
  50. mlrun/serving/server.py +6 -0
  51. mlrun/serving/states.py +21 -7
  52. mlrun/serving/v2_serving.py +70 -61
  53. mlrun/utils/helpers.py +14 -30
  54. mlrun/utils/notifications/notification/mail.py +36 -9
  55. mlrun/utils/notifications/notification_pusher.py +43 -18
  56. mlrun/utils/version/version.json +2 -2
  57. {mlrun-1.8.0rc4.dist-info → mlrun-1.8.0rc7.dist-info}/METADATA +5 -4
  58. {mlrun-1.8.0rc4.dist-info → mlrun-1.8.0rc7.dist-info}/RECORD +62 -75
  59. mlrun/common/schemas/model_monitoring/model_endpoint_v2.py +0 -149
  60. mlrun/model_monitoring/db/stores/__init__.py +0 -136
  61. mlrun/model_monitoring/db/stores/base/__init__.py +0 -15
  62. mlrun/model_monitoring/db/stores/base/store.py +0 -154
  63. mlrun/model_monitoring/db/stores/sqldb/__init__.py +0 -13
  64. mlrun/model_monitoring/db/stores/sqldb/models/__init__.py +0 -46
  65. mlrun/model_monitoring/db/stores/sqldb/models/base.py +0 -93
  66. mlrun/model_monitoring/db/stores/sqldb/models/mysql.py +0 -47
  67. mlrun/model_monitoring/db/stores/sqldb/models/sqlite.py +0 -25
  68. mlrun/model_monitoring/db/stores/sqldb/sql_store.py +0 -408
  69. mlrun/model_monitoring/db/stores/v3io_kv/__init__.py +0 -13
  70. mlrun/model_monitoring/db/stores/v3io_kv/kv_store.py +0 -464
  71. mlrun/model_monitoring/model_endpoint.py +0 -120
  72. {mlrun-1.8.0rc4.dist-info → mlrun-1.8.0rc7.dist-info}/LICENSE +0 -0
  73. {mlrun-1.8.0rc4.dist-info → mlrun-1.8.0rc7.dist-info}/WHEEL +0 -0
  74. {mlrun-1.8.0rc4.dist-info → mlrun-1.8.0rc7.dist-info}/entry_points.txt +0 -0
  75. {mlrun-1.8.0rc4.dist-info → mlrun-1.8.0rc7.dist-info}/top_level.txt +0 -0
mlrun/__init__.py CHANGED
@@ -39,6 +39,7 @@ from .execution import MLClientCtx
39
39
  from .model import RunObject, RunTemplate, new_task
40
40
  from .package import ArtifactType, DefaultPackager, Packager, handler
41
41
  from .projects import (
42
+ MlrunProject,
42
43
  ProjectMetadata,
43
44
  build_function,
44
45
  deploy_function,
@@ -59,6 +60,7 @@ from .run import (
59
60
  get_pipeline,
60
61
  import_function,
61
62
  new_function,
63
+ retry_pipeline,
62
64
  wait_for_pipeline_completion,
63
65
  )
64
66
  from .runtimes import mounts, new_model_server
@@ -162,10 +164,10 @@ def set_environment(
162
164
  return mlconf.default_project, mlconf.artifact_path
163
165
 
164
166
 
165
- def get_current_project(silent=False):
167
+ def get_current_project(silent: bool = False) -> Optional[MlrunProject]:
166
168
  if not pipeline_context.project and not silent:
167
169
  raise MLRunInvalidArgumentError(
168
- "current project is not initialized, use new, get or load project methods first"
170
+ "No current project is initialized. Use new, get or load project functions first."
169
171
  )
170
172
  return pipeline_context.project
171
173
 
@@ -182,7 +184,7 @@ def get_sample_path(subpath=""):
182
184
  return samples_path
183
185
 
184
186
 
185
- def set_env_from_file(env_file: str, return_dict: bool = False):
187
+ def set_env_from_file(env_file: str, return_dict: bool = False) -> Optional[dict]:
186
188
  """Read and set and/or return environment variables from a file
187
189
  the env file should have lines in the form KEY=VALUE, comment line start with "#"
188
190
 
mlrun/alerts/alert.py CHANGED
@@ -11,7 +11,7 @@
11
11
  # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
-
14
+ from datetime import datetime
15
15
  from typing import Optional, Union
16
16
 
17
17
  import mlrun
@@ -30,6 +30,7 @@ class AlertConfig(ModelObj):
30
30
  "state",
31
31
  "count",
32
32
  "created",
33
+ "updated",
33
34
  ]
34
35
  _fields_to_serialize = ModelObj._fields_to_serialize + [
35
36
  "entities",
@@ -55,6 +56,7 @@ class AlertConfig(ModelObj):
55
56
  state: alert_objects.AlertActiveState = None,
56
57
  created: Optional[str] = None,
57
58
  count: Optional[int] = None,
59
+ updated: Optional[str] = None,
58
60
  ):
59
61
  """Alert config object
60
62
 
@@ -118,6 +120,7 @@ class AlertConfig(ModelObj):
118
120
  :param state: State of the alert, may be active/inactive (user should not supply it)
119
121
  :param created: When the alert is created (user should not supply it)
120
122
  :param count: Internal counter of the alert (user should not supply it)
123
+ :param updated: The last update time of the alert (user should not supply it)
121
124
  """
122
125
  self.project = project
123
126
  self.name = name
@@ -131,12 +134,39 @@ class AlertConfig(ModelObj):
131
134
  self.entities = entities
132
135
  self.id = id
133
136
  self.state = state
134
- self.created = created
137
+ self._created = created
135
138
  self.count = count
139
+ self._updated = updated
136
140
 
137
141
  if template:
138
142
  self._apply_template(template)
139
143
 
144
+ @property
145
+ def created(self) -> datetime:
146
+ """
147
+ Get the `created` field as a datetime object.
148
+ """
149
+ if isinstance(self._created, str):
150
+ return datetime.fromisoformat(self._created)
151
+ return self._created
152
+
153
+ @created.setter
154
+ def created(self, created):
155
+ self._created = created
156
+
157
+ @property
158
+ def updated(self) -> datetime:
159
+ """
160
+ Get the `updated` field as a datetime object.
161
+ """
162
+ if isinstance(self._updated, str):
163
+ return datetime.fromisoformat(self._updated)
164
+ return self._updated
165
+
166
+ @updated.setter
167
+ def updated(self, updated):
168
+ self._updated = updated
169
+
140
170
  def validate_required_fields(self):
141
171
  if not self.name:
142
172
  raise mlrun.errors.MLRunInvalidArgumentError("Alert name must be provided")
@@ -253,3 +283,100 @@ class AlertConfig(ModelObj):
253
283
  self.criteria = self.criteria or template.criteria
254
284
  self.trigger = self.trigger or template.trigger
255
285
  self.reset_policy = self.reset_policy or template.reset_policy
286
+
287
+ def list_activations(
288
+ self,
289
+ since: Optional[datetime] = None,
290
+ until: Optional[datetime] = None,
291
+ from_last_update: bool = False,
292
+ ) -> list[mlrun.common.schemas.alert.AlertActivation]:
293
+ """
294
+ Retrieve a list of all alert activations.
295
+
296
+ :param since: Filters for alert activations occurring after this timestamp.
297
+ :param until: Filters for alert activations occurring before this timestamp.
298
+ :param from_last_update: If set to True, retrieves alert activations since the alert's last update time.
299
+ if both since and from_last_update=True are provided, from_last_update takes precedence
300
+ and the since value will be overridden by the alert's last update timestamp.
301
+
302
+ :returns: A list of alert activations matching the provided filters.
303
+ """
304
+ db = mlrun.get_run_db()
305
+ if from_last_update and self._updated:
306
+ since = self.updated
307
+
308
+ return db.list_alert_activations(
309
+ project=self.project,
310
+ name=self.name,
311
+ since=since,
312
+ until=until,
313
+ )
314
+
315
+ def paginated_list_activations(
316
+ self,
317
+ *args,
318
+ page: Optional[int] = None,
319
+ page_size: Optional[int] = None,
320
+ page_token: Optional[str] = None,
321
+ from_last_update: bool = False,
322
+ **kwargs,
323
+ ) -> tuple[mlrun.common.schemas.alert.AlertActivation, Optional[str]]:
324
+ """
325
+ List alerts activations with support for pagination and various filtering options.
326
+
327
+ This method retrieves a paginated list of alert activations based on the specified filter parameters.
328
+ Pagination is controlled using the `page`, `page_size`, and `page_token` parameters. The method
329
+ will return a list of alert activations that match the filtering criteria provided.
330
+
331
+ For detailed information about the parameters, refer to the list_activations method:
332
+ See :py:func:`~list_activations` for more details.
333
+
334
+ Examples::
335
+
336
+ # Fetch first page of alert activations with page size of 5
337
+ alert_activations, token = alert_config.paginated_list_activations(page_size=5)
338
+ # Fetch next page using the pagination token from the previous response
339
+ alert_activations, token = alert_config.paginated_list_activations(
340
+ page_token=token
341
+ )
342
+ # Fetch alert activations for a specific page (e.g., page 3)
343
+ alert_activations, token = alert_config.paginated_list_activations(
344
+ page=3, page_size=5
345
+ )
346
+
347
+ # Automatically iterate over all pages without explicitly specifying the page number
348
+ alert_activations = []
349
+ token = None
350
+ while True:
351
+ page_alert_activations, token = alert_config.paginated_list_activations(
352
+ page_token=token, page_size=5
353
+ )
354
+ alert_activations.extend(page_alert_activations)
355
+
356
+ # If token is None and page_alert_activations is empty, we've reached the end (no more activations).
357
+ # If token is None and page_alert_activations is not empty, we've fetched the last page of activations.
358
+ if not token:
359
+ break
360
+ print(f"Total alert activations retrieved: {len(alert_activations)}")
361
+
362
+ :param page: The page number to retrieve. If not provided, the next page will be retrieved.
363
+ :param page_size: The number of items per page to retrieve. Up to `page_size` responses are expected.
364
+ :param page_token: A pagination token used to retrieve the next page of results. Should not be provided
365
+ for the first request.
366
+ :param from_last_update: If set to True, retrieves alert activations since the alert's last update time.
367
+
368
+ :returns: A tuple containing the list of alert activations and an optional `page_token` for pagination.
369
+ """
370
+ if from_last_update and self._updated:
371
+ kwargs["since"] = self.updated
372
+
373
+ db = mlrun.get_run_db()
374
+ return db.paginated_list_alert_activations(
375
+ *args,
376
+ project=self.project,
377
+ name=self.name,
378
+ page=page,
379
+ page_size=page_size,
380
+ page_token=page_token,
381
+ **kwargs,
382
+ )
@@ -23,7 +23,7 @@ from .base import (
23
23
  get_artifact_meta,
24
24
  )
25
25
  from .dataset import DatasetArtifact, TableArtifact, update_dataset_meta
26
- from .document import DocumentArtifact, DocumentLoader, DocumentLoaderSpec
26
+ from .document import DocumentArtifact, DocumentLoaderSpec, MLRunLoader
27
27
  from .manager import (
28
28
  ArtifactManager,
29
29
  ArtifactProducer,
mlrun/artifacts/base.py CHANGED
@@ -36,7 +36,16 @@ from ..utils import (
36
36
 
37
37
 
38
38
  class ArtifactMetadata(ModelObj):
39
- _dict_fields = ["key", "project", "iter", "tree", "description", "hash", "tag"]
39
+ _dict_fields = [
40
+ "key",
41
+ "project",
42
+ "iter",
43
+ "tree",
44
+ "description",
45
+ "hash",
46
+ "tag",
47
+ "uid",
48
+ ]
40
49
  _extra_fields = ["updated", "labels"]
41
50
 
42
51
  def __init__(
@@ -48,6 +57,7 @@ class ArtifactMetadata(ModelObj):
48
57
  description=None,
49
58
  hash=None,
50
59
  tag=None,
60
+ uid=None,
51
61
  ):
52
62
  self.key = key
53
63
  self.project = project
@@ -58,6 +68,7 @@ class ArtifactMetadata(ModelObj):
58
68
  self.labels = {}
59
69
  self.updated = None
60
70
  self.tag = tag # temp store of the tag
71
+ self.uid = uid
61
72
 
62
73
  def base_dict(self):
63
74
  return super().to_dict()
@@ -12,7 +12,6 @@
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
14
 
15
- import ast
16
15
  import re
17
16
  import tempfile
18
17
  from collections.abc import Iterator
@@ -74,14 +73,14 @@ class DocumentLoaderSpec(ModelObj):
74
73
  return loader
75
74
 
76
75
 
77
- class DocumentLoader:
76
+ class MLRunLoader:
78
77
  """
79
78
  A factory class for creating instances of a dynamically defined document loader.
80
79
 
81
80
  Args:
82
81
  artifact_key (str): The key for the artifact to be logged.It can include '%%' which will be replaced
83
82
  by a hex-encoded version of the source path.
84
- source_path (str): The source path of the document to be loaded.
83
+ local_path (str): The source path of the document to be loaded.
85
84
  loader_spec (DocumentLoaderSpec): Specification for the document loader.
86
85
  producer (Optional[Union[MlrunProject, str, MLClientCtx]], optional): The producer of the document
87
86
  upload (bool, optional): Flag indicating whether to upload the document.
@@ -104,7 +103,7 @@ class DocumentLoader:
104
103
  class DynamicDocumentLoader(BaseLoader):
105
104
  def __init__(
106
105
  self,
107
- source_path,
106
+ local_path,
108
107
  loader_spec,
109
108
  artifact_key,
110
109
  producer,
@@ -112,12 +111,12 @@ class DocumentLoader:
112
111
  ):
113
112
  self.producer = producer
114
113
  self.artifact_key = (
115
- DocumentLoader.artifact_key_instance(artifact_key, source_path)
114
+ MLRunLoader.artifact_key_instance(artifact_key, local_path)
116
115
  if "%%" in artifact_key
117
116
  else artifact_key
118
117
  )
119
118
  self.loader_spec = loader_spec
120
- self.source_path = source_path
119
+ self.local_path = local_path
121
120
  self.upload = upload
122
121
 
123
122
  # Resolve the producer
@@ -129,16 +128,17 @@ class DocumentLoader:
129
128
  def lazy_load(self) -> Iterator["Document"]: # noqa: F821
130
129
  artifact = self.producer.log_document(
131
130
  key=self.artifact_key,
132
- document_loader=self.loader_spec,
133
- src_path=self.source_path,
131
+ document_loader_spec=self.loader_spec,
132
+ local_path=self.local_path,
134
133
  upload=self.upload,
135
134
  )
136
- yield artifact.to_langchain_documents()
135
+ res = artifact.to_langchain_documents()
136
+ yield res[0]
137
137
 
138
138
  # Return an instance of the dynamically defined subclass
139
139
  instance = DynamicDocumentLoader(
140
140
  artifact_key=artifact_key,
141
- source_path=source_path,
141
+ local_path=source_path,
142
142
  loader_spec=loader_spec,
143
143
  producer=producer,
144
144
  upload=upload,
@@ -195,12 +195,15 @@ class DocumentArtifact(Artifact):
195
195
  def __init__(
196
196
  self,
197
197
  *args,
198
+ document_loader: Optional[DocumentLoaderSpec] = None,
199
+ collections: Optional[dict] = None,
200
+ original_source: Optional[str] = None,
198
201
  **kwargs,
199
202
  ):
200
203
  super().__init__(*args, **kwargs)
201
- self.document_loader = None
202
- self.collections = set()
203
- self.original_source = None
204
+ self.document_loader = document_loader
205
+ self.collections = collections if collections is not None else {}
206
+ self.original_source = original_source
204
207
 
205
208
  """
206
209
  A specific artifact class inheriting from generic artifact, used to maintain Document meta-data.
@@ -216,14 +219,17 @@ class DocumentArtifact(Artifact):
216
219
 
217
220
  def __init__(
218
221
  self,
219
- key=None,
220
- document_loader: DocumentLoaderSpec = DocumentLoaderSpec(),
222
+ original_source: Optional[str] = None,
223
+ document_loader_spec: Optional[DocumentLoaderSpec] = None,
221
224
  **kwargs,
222
225
  ):
223
- super().__init__(key, **kwargs)
224
- self.spec.document_loader = document_loader.to_str()
225
- if "src_path" in kwargs:
226
- self.spec.original_source = kwargs["src_path"]
226
+ super().__init__(**kwargs)
227
+ self.spec.document_loader = (
228
+ document_loader_spec.to_dict()
229
+ if document_loader_spec
230
+ else self.spec.document_loader
231
+ )
232
+ self.spec.original_source = original_source or self.spec.original_source
227
233
 
228
234
  @property
229
235
  def spec(self) -> DocumentArtifactSpec:
@@ -234,17 +240,8 @@ class DocumentArtifact(Artifact):
234
240
  self._spec = self._verify_dict(
235
241
  spec, "spec", DocumentArtifact.DocumentArtifactSpec
236
242
  )
237
- # _verify_dict doesn't handle set, so we need to convert it back
238
- if isinstance(self._spec.collections, str):
239
- self._spec.collections = ast.literal_eval(self._spec.collections)
240
-
241
- @property
242
- def inputs(self):
243
- # To keep the interface consistent with the project.update_artifact() when we update the artifact
244
- return None
245
243
 
246
- @property
247
- def source(self):
244
+ def get_source(self):
248
245
  return generate_artifact_uri(self.metadata.project, self.spec.db_key)
249
246
 
250
247
  def to_langchain_documents(
@@ -262,9 +259,8 @@ class DocumentArtifact(Artifact):
262
259
  Returns:
263
260
  list[Document]: A list of LangChain Document objects.
264
261
  """
265
- dictionary = ast.literal_eval(self.spec.document_loader)
266
- loader_spec = DocumentLoaderSpec.from_dict(dictionary)
267
262
 
263
+ loader_spec = DocumentLoaderSpec.from_dict(self.spec.document_loader)
268
264
  if self.get_target_path():
269
265
  with tempfile.NamedTemporaryFile() as tmp_file:
270
266
  mlrun.datastore.store_manager.object(
@@ -272,8 +268,8 @@ class DocumentArtifact(Artifact):
272
268
  ).download(tmp_file.name)
273
269
  loader = loader_spec.make_loader(tmp_file.name)
274
270
  documents = loader.load()
275
- elif self.src_path:
276
- loader = loader_spec.make_loader(self.src_path)
271
+ elif self.spec.original_source:
272
+ loader = loader_spec.make_loader(self.spec.original_source)
277
273
  documents = loader.load()
278
274
  else:
279
275
  raise ValueError(
@@ -289,8 +285,8 @@ class DocumentArtifact(Artifact):
289
285
 
290
286
  metadata = document.metadata
291
287
 
292
- metadata[self.METADATA_ORIGINAL_SOURCE_KEY] = self.src_path
293
- metadata[self.METADATA_SOURCE_KEY] = self.source
288
+ metadata[self.METADATA_ORIGINAL_SOURCE_KEY] = self.spec.original_source
289
+ metadata[self.METADATA_SOURCE_KEY] = self.get_source()
294
290
  metadata[self.METADATA_ARTIFACT_URI_KEY] = self.uri
295
291
  if self.get_target_path():
296
292
  metadata[self.METADATA_ARTIFACT_TARGET_PATH_KEY] = (
@@ -301,13 +297,38 @@ class DocumentArtifact(Artifact):
301
297
  metadata[self.METADATA_CHUNK_KEY] = str(idx)
302
298
  doc = Document(
303
299
  page_content=text,
304
- metadata=metadata,
300
+ metadata=metadata.copy(),
305
301
  )
306
302
  results.append(doc)
307
303
  return results
308
304
 
309
305
  def collection_add(self, collection_id: str) -> None:
310
- self.spec.collections.add(collection_id)
306
+ """
307
+ Add a collection ID to the artifact's collection list.
308
+
309
+ Adds the specified collection ID to the artifact's collection mapping if it
310
+ doesn't already exist.
311
+ This method only modifies the client-side artifact object and does not persist
312
+ the changes to the MLRun DB. To save the changes permanently, you must call
313
+ project.update_artifact() after this method.
314
+
315
+ Args:
316
+ collection_id (str): The ID of the collection to add
317
+ """
318
+ if collection_id not in self.spec.collections:
319
+ self.spec.collections[collection_id] = "1"
311
320
 
312
321
  def collection_remove(self, collection_id: str) -> None:
313
- return self.spec.collections.discard(collection_id)
322
+ """
323
+ Remove a collection ID from the artifact's collection list.
324
+
325
+ Removes the specified collection ID from the artifact's local collection mapping.
326
+ This method only modifies the client-side artifact object and does not persist
327
+ the changes to the MLRun DB. To save the changes permanently, you must call
328
+ project.update_artifact() or context.update_artifact() after this method.
329
+
330
+ Args:
331
+ collection_id (str): The ID of the collection to remove
332
+ """
333
+ if collection_id in self.spec.collections:
334
+ self.spec.collections.pop(collection_id)
mlrun/common/constants.py CHANGED
@@ -69,6 +69,7 @@ class MLRunInternalLabels:
69
69
  producer_type = f"{MLRUN_LABEL_PREFIX}producer-type"
70
70
  app_name = f"{MLRUN_LABEL_PREFIX}app-name"
71
71
  endpoint_id = f"{MLRUN_LABEL_PREFIX}endpoint-id"
72
+ endpoint_name = f"{MLRUN_LABEL_PREFIX}endpoint-name"
72
73
  host = "host"
73
74
  job_type = "job-type"
74
75
  kind = "kind"
@@ -11,5 +11,3 @@
11
11
  # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
-
15
- from .helpers import create_model_endpoint_uid
@@ -17,11 +17,6 @@ import typing
17
17
 
18
18
  import mlrun.common
19
19
  import mlrun.common.schemas.model_monitoring.constants as mm_constants
20
- from mlrun.common.schemas.model_monitoring import (
21
- EndpointUID,
22
- FunctionURI,
23
- VersionedModel,
24
- )
25
20
 
26
21
  FeatureStats = typing.NewType("FeatureStats", dict[str, dict[str, typing.Any]])
27
22
  Histogram = typing.NewType("Histogram", list[list])
@@ -31,29 +26,6 @@ BinEdges = typing.NewType("BinEdges", list[float])
31
26
  _MAX_FLOAT = sys.float_info.max
32
27
 
33
28
 
34
- def create_model_endpoint_uid(function_uri: str, versioned_model: str):
35
- function_uri = FunctionURI.from_string(function_uri)
36
- versioned_model = VersionedModel.from_string(versioned_model)
37
-
38
- if (
39
- not function_uri.project
40
- or not function_uri.function
41
- or not versioned_model.model
42
- ):
43
- raise ValueError("Both function_uri and versioned_model have to be initialized")
44
-
45
- uid = EndpointUID(
46
- function_uri.project,
47
- function_uri.function,
48
- function_uri.tag,
49
- function_uri.hash_key,
50
- versioned_model.model,
51
- versioned_model.version,
52
- )
53
-
54
- return uid
55
-
56
-
57
29
  def parse_model_endpoint_project_prefix(path: str, project_name: str):
58
30
  return path.split(project_name, 1)[0] + project_name
59
31
 
@@ -14,6 +14,7 @@
14
14
 
15
15
  from .alert import (
16
16
  AlertActivation,
17
+ AlertActivations,
17
18
  AlertActiveState,
18
19
  AlertConfig,
19
20
  AlertNotification,
@@ -149,10 +150,6 @@ from .model_monitoring import (
149
150
  ModelEndpointMetadata,
150
151
  ModelEndpointSpec,
151
152
  ModelEndpointStatus,
152
- ModelEndpointV2,
153
- ModelEndpointV2Metadata,
154
- ModelEndpointV2Spec,
155
- ModelEndpointV2Status,
156
153
  ModelMonitoringMode,
157
154
  ModelMonitoringStoreKinds,
158
155
  MonitoringFunctionNames,
@@ -165,6 +162,7 @@ from .notification import (
165
162
  NotificationSeverity,
166
163
  NotificationState,
167
164
  NotificationStatus,
165
+ NotificationSummary,
168
166
  SetNotificationRequest,
169
167
  )
170
168
  from .object import ObjectKind, ObjectMetadata, ObjectSpec, ObjectStatus
@@ -12,8 +12,10 @@
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
14
  #
15
+ from collections import defaultdict
16
+ from collections.abc import Iterator
15
17
  from datetime import datetime
16
- from typing import Annotated, Optional, Union
18
+ from typing import Annotated, Any, Callable, Optional, Union
17
19
 
18
20
  import pydantic.v1
19
21
 
@@ -156,6 +158,7 @@ class AlertConfig(pydantic.v1.BaseModel):
156
158
  notifications: pydantic.v1.conlist(AlertNotification, min_items=1)
157
159
  state: AlertActiveState = AlertActiveState.INACTIVE
158
160
  count: Optional[int] = 0
161
+ updated: datetime = None
159
162
 
160
163
  def get_raw_notifications(self) -> list[notification_objects.Notification]:
161
164
  return [
@@ -203,6 +206,7 @@ class AlertTemplate(
203
206
 
204
207
 
205
208
  class AlertActivation(pydantic.v1.BaseModel):
209
+ id: int
206
210
  name: str
207
211
  project: str
208
212
  severity: AlertSeverity
@@ -213,3 +217,78 @@ class AlertActivation(pydantic.v1.BaseModel):
213
217
  event_kind: EventKind
214
218
  number_of_events: int
215
219
  notifications: list[notification_objects.NotificationState]
220
+ reset_time: Optional[datetime] = None
221
+
222
+ def group_key(self, attributes: list[str]) -> Union[Any, tuple]:
223
+ """
224
+ Dynamically create a key for grouping based on the provided attributes.
225
+ - If there's only one attribute, return the value directly (not a single-element tuple).
226
+ - If there are multiple attributes, return them as a tuple for grouping.
227
+
228
+ This ensures grouping behaves intuitively without redundant tuple representations.
229
+ """
230
+ if len(attributes) == 1:
231
+ # Avoid single-element tuple like (high,) when only one grouping attribute is used
232
+ return getattr(self, attributes[0])
233
+ # Otherwise, return a tuple of all specified attributes
234
+ return tuple(getattr(self, attr) for attr in attributes)
235
+
236
+
237
+ class AlertActivations(pydantic.v1.BaseModel):
238
+ activations: list[AlertActivation]
239
+ pagination: Optional[dict]
240
+
241
+ def __iter__(self) -> Iterator[AlertActivation]:
242
+ return iter(self.activations)
243
+
244
+ def __getitem__(self, index: int) -> AlertActivation:
245
+ return self.activations[index]
246
+
247
+ def __len__(self) -> int:
248
+ return len(self.activations)
249
+
250
+ def group_by(self, *attributes: str) -> dict:
251
+ """
252
+ Group alert activations by specified attributes.
253
+
254
+ Args:
255
+ :param attributes: Attributes to group by.
256
+
257
+ :returns: A dictionary where keys are tuples of attribute values and values are lists of
258
+ AlertActivation objects.
259
+
260
+ Example:
261
+ # Group by project and severity
262
+ grouped = activations.group_by("project", "severity")
263
+ """
264
+ grouped = defaultdict(list)
265
+ for activation in self.activations:
266
+ key = activation.group_key(attributes)
267
+ grouped[key].append(activation)
268
+ return dict(grouped)
269
+
270
+ def aggregate_by(
271
+ self,
272
+ group_by_attrs: list[str],
273
+ aggregation_function: Callable[[list[AlertActivation]], Any],
274
+ ) -> dict:
275
+ """
276
+ Aggregate alert activations by specified attributes using a given aggregation function.
277
+
278
+ Args:
279
+ :param group_by_attrs: Attributes to group by.
280
+ :param aggregation_function: Function to aggregate grouped activations.
281
+
282
+ :returns: A dictionary where keys are tuples of attribute values and values are the result
283
+ of the aggregation function.
284
+
285
+ Example:
286
+ # Aggregate by name and entity_id and count number of activations in each group
287
+ activations.aggregate_by(["name", "entity_id"], lambda activations: len(activations))
288
+ """
289
+ grouped = self.group_by(*group_by_attrs)
290
+ aggregated = {
291
+ key: aggregation_function(activations)
292
+ for key, activations in grouped.items()
293
+ }
294
+ return aggregated
@@ -25,6 +25,7 @@ from .object import ObjectStatus
25
25
  class ArtifactCategories(mlrun.common.types.StrEnum):
26
26
  model = "model"
27
27
  dataset = "dataset"
28
+ document = "document"
28
29
  other = "other"
29
30
 
30
31
  # we define the link as a category to prevent import cycles, but it's not a real category
@@ -38,11 +39,14 @@ class ArtifactCategories(mlrun.common.types.StrEnum):
38
39
  return [ArtifactCategories.model.value, link_kind], False
39
40
  if self.value == ArtifactCategories.dataset.value:
40
41
  return [ArtifactCategories.dataset.value, link_kind], False
42
+ if self.value == ArtifactCategories.document.value:
43
+ return [ArtifactCategories.document.value, link_kind], False
41
44
  if self.value == ArtifactCategories.other.value:
42
45
  return (
43
46
  [
44
47
  ArtifactCategories.model.value,
45
48
  ArtifactCategories.dataset.value,
49
+ ArtifactCategories.document.value,
46
50
  ],
47
51
  True,
48
52
  )
@@ -57,7 +57,6 @@ class ClientSpec(pydantic.v1.BaseModel):
57
57
  redis_url: typing.Optional[str]
58
58
  redis_type: typing.Optional[str]
59
59
  sql_url: typing.Optional[str]
60
- model_endpoint_monitoring_endpoint_store_connection: typing.Optional[str]
61
60
  model_monitoring_tsdb_connection: typing.Optional[str]
62
61
  ce: typing.Optional[dict]
63
62
  # not passing them as one object as it possible client user would like to override only one of the params