mlrun 1.8.0rc5__py3-none-any.whl → 1.8.0rc9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mlrun might be problematic. Click here for more details.

Files changed (74) hide show
  1. mlrun/__init__.py +1 -0
  2. mlrun/artifacts/__init__.py +1 -1
  3. mlrun/artifacts/base.py +21 -1
  4. mlrun/artifacts/document.py +62 -39
  5. mlrun/artifacts/manager.py +12 -5
  6. mlrun/common/constants.py +1 -0
  7. mlrun/common/model_monitoring/__init__.py +0 -2
  8. mlrun/common/model_monitoring/helpers.py +0 -28
  9. mlrun/common/schemas/__init__.py +2 -4
  10. mlrun/common/schemas/alert.py +77 -1
  11. mlrun/common/schemas/client_spec.py +0 -1
  12. mlrun/common/schemas/model_monitoring/__init__.py +0 -6
  13. mlrun/common/schemas/model_monitoring/constants.py +11 -9
  14. mlrun/common/schemas/model_monitoring/model_endpoints.py +77 -149
  15. mlrun/common/schemas/notification.py +6 -0
  16. mlrun/common/schemas/project.py +3 -0
  17. mlrun/config.py +2 -3
  18. mlrun/datastore/datastore_profile.py +57 -17
  19. mlrun/datastore/sources.py +1 -2
  20. mlrun/datastore/store_resources.py +7 -2
  21. mlrun/datastore/vectorstore.py +99 -62
  22. mlrun/db/base.py +34 -20
  23. mlrun/db/httpdb.py +249 -163
  24. mlrun/db/nopdb.py +40 -17
  25. mlrun/execution.py +14 -7
  26. mlrun/feature_store/api.py +1 -0
  27. mlrun/model.py +3 -0
  28. mlrun/model_monitoring/__init__.py +3 -2
  29. mlrun/model_monitoring/api.py +64 -53
  30. mlrun/model_monitoring/applications/_application_steps.py +3 -1
  31. mlrun/model_monitoring/applications/base.py +115 -15
  32. mlrun/model_monitoring/applications/context.py +42 -24
  33. mlrun/model_monitoring/applications/histogram_data_drift.py +1 -1
  34. mlrun/model_monitoring/controller.py +43 -37
  35. mlrun/model_monitoring/db/__init__.py +0 -2
  36. mlrun/model_monitoring/db/tsdb/base.py +2 -1
  37. mlrun/model_monitoring/db/tsdb/tdengine/tdengine_connector.py +2 -1
  38. mlrun/model_monitoring/db/tsdb/v3io/v3io_connector.py +43 -0
  39. mlrun/model_monitoring/helpers.py +78 -66
  40. mlrun/model_monitoring/stream_processing.py +83 -270
  41. mlrun/model_monitoring/writer.py +1 -10
  42. mlrun/projects/pipelines.py +37 -1
  43. mlrun/projects/project.py +173 -70
  44. mlrun/run.py +40 -0
  45. mlrun/runtimes/nuclio/function.py +7 -6
  46. mlrun/runtimes/nuclio/serving.py +9 -4
  47. mlrun/serving/routers.py +158 -145
  48. mlrun/serving/server.py +6 -0
  49. mlrun/serving/states.py +21 -7
  50. mlrun/serving/v2_serving.py +94 -68
  51. mlrun/utils/helpers.py +23 -33
  52. mlrun/utils/notifications/notification/mail.py +17 -6
  53. mlrun/utils/notifications/notification_pusher.py +9 -5
  54. mlrun/utils/regex.py +8 -1
  55. mlrun/utils/version/version.json +2 -2
  56. {mlrun-1.8.0rc5.dist-info → mlrun-1.8.0rc9.dist-info}/METADATA +2 -2
  57. {mlrun-1.8.0rc5.dist-info → mlrun-1.8.0rc9.dist-info}/RECORD +61 -74
  58. mlrun/common/schemas/model_monitoring/model_endpoint_v2.py +0 -149
  59. mlrun/model_monitoring/db/stores/__init__.py +0 -136
  60. mlrun/model_monitoring/db/stores/base/__init__.py +0 -15
  61. mlrun/model_monitoring/db/stores/base/store.py +0 -154
  62. mlrun/model_monitoring/db/stores/sqldb/__init__.py +0 -13
  63. mlrun/model_monitoring/db/stores/sqldb/models/__init__.py +0 -46
  64. mlrun/model_monitoring/db/stores/sqldb/models/base.py +0 -93
  65. mlrun/model_monitoring/db/stores/sqldb/models/mysql.py +0 -47
  66. mlrun/model_monitoring/db/stores/sqldb/models/sqlite.py +0 -25
  67. mlrun/model_monitoring/db/stores/sqldb/sql_store.py +0 -408
  68. mlrun/model_monitoring/db/stores/v3io_kv/__init__.py +0 -13
  69. mlrun/model_monitoring/db/stores/v3io_kv/kv_store.py +0 -464
  70. mlrun/model_monitoring/model_endpoint.py +0 -120
  71. {mlrun-1.8.0rc5.dist-info → mlrun-1.8.0rc9.dist-info}/LICENSE +0 -0
  72. {mlrun-1.8.0rc5.dist-info → mlrun-1.8.0rc9.dist-info}/WHEEL +0 -0
  73. {mlrun-1.8.0rc5.dist-info → mlrun-1.8.0rc9.dist-info}/entry_points.txt +0 -0
  74. {mlrun-1.8.0rc5.dist-info → mlrun-1.8.0rc9.dist-info}/top_level.txt +0 -0
mlrun/__init__.py CHANGED
@@ -60,6 +60,7 @@ from .run import (
60
60
  get_pipeline,
61
61
  import_function,
62
62
  new_function,
63
+ retry_pipeline,
63
64
  wait_for_pipeline_completion,
64
65
  )
65
66
  from .runtimes import mounts, new_model_server
@@ -23,7 +23,7 @@ from .base import (
23
23
  get_artifact_meta,
24
24
  )
25
25
  from .dataset import DatasetArtifact, TableArtifact, update_dataset_meta
26
- from .document import DocumentArtifact, DocumentLoader, DocumentLoaderSpec
26
+ from .document import DocumentArtifact, DocumentLoaderSpec, MLRunLoader
27
27
  from .manager import (
28
28
  ArtifactManager,
29
29
  ArtifactProducer,
mlrun/artifacts/base.py CHANGED
@@ -36,7 +36,16 @@ from ..utils import (
36
36
 
37
37
 
38
38
  class ArtifactMetadata(ModelObj):
39
- _dict_fields = ["key", "project", "iter", "tree", "description", "hash", "tag"]
39
+ _dict_fields = [
40
+ "key",
41
+ "project",
42
+ "iter",
43
+ "tree",
44
+ "description",
45
+ "hash",
46
+ "tag",
47
+ "uid",
48
+ ]
40
49
  _extra_fields = ["updated", "labels"]
41
50
 
42
51
  def __init__(
@@ -48,6 +57,7 @@ class ArtifactMetadata(ModelObj):
48
57
  description=None,
49
58
  hash=None,
50
59
  tag=None,
60
+ uid=None,
51
61
  ):
52
62
  self.key = key
53
63
  self.project = project
@@ -58,6 +68,7 @@ class ArtifactMetadata(ModelObj):
58
68
  self.labels = {}
59
69
  self.updated = None
60
70
  self.tag = tag # temp store of the tag
71
+ self.uid = uid
61
72
 
62
73
  def base_dict(self):
63
74
  return super().to_dict()
@@ -368,6 +379,7 @@ class Artifact(ModelObj):
368
379
  iter=self.metadata.iter,
369
380
  tree=tree,
370
381
  tag=tag,
382
+ uid=self.uid,
371
383
  )
372
384
  return mlrun.datastore.get_store_uri(self._store_prefix, uri)
373
385
 
@@ -642,6 +654,14 @@ class Artifact(ModelObj):
642
654
  def hash(self, hash):
643
655
  self.metadata.hash = hash
644
656
 
657
+ @property
658
+ def uid(self):
659
+ return self.metadata.uid
660
+
661
+ @uid.setter
662
+ def uid(self, uid):
663
+ self.metadata.uid = uid
664
+
645
665
  def generate_target_path(self, artifact_path, producer):
646
666
  return generate_target_path(self, artifact_path, producer)
647
667
 
@@ -12,7 +12,6 @@
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
14
 
15
- import ast
16
15
  import re
17
16
  import tempfile
18
17
  from collections.abc import Iterator
@@ -74,14 +73,14 @@ class DocumentLoaderSpec(ModelObj):
74
73
  return loader
75
74
 
76
75
 
77
- class DocumentLoader:
76
+ class MLRunLoader:
78
77
  """
79
78
  A factory class for creating instances of a dynamically defined document loader.
80
79
 
81
80
  Args:
82
81
  artifact_key (str): The key for the artifact to be logged.It can include '%%' which will be replaced
83
82
  by a hex-encoded version of the source path.
84
- source_path (str): The source path of the document to be loaded.
83
+ local_path (str): The source path of the document to be loaded.
85
84
  loader_spec (DocumentLoaderSpec): Specification for the document loader.
86
85
  producer (Optional[Union[MlrunProject, str, MLClientCtx]], optional): The producer of the document
87
86
  upload (bool, optional): Flag indicating whether to upload the document.
@@ -104,7 +103,7 @@ class DocumentLoader:
104
103
  class DynamicDocumentLoader(BaseLoader):
105
104
  def __init__(
106
105
  self,
107
- source_path,
106
+ local_path,
108
107
  loader_spec,
109
108
  artifact_key,
110
109
  producer,
@@ -112,12 +111,12 @@ class DocumentLoader:
112
111
  ):
113
112
  self.producer = producer
114
113
  self.artifact_key = (
115
- DocumentLoader.artifact_key_instance(artifact_key, source_path)
114
+ MLRunLoader.artifact_key_instance(artifact_key, local_path)
116
115
  if "%%" in artifact_key
117
116
  else artifact_key
118
117
  )
119
118
  self.loader_spec = loader_spec
120
- self.source_path = source_path
119
+ self.local_path = local_path
121
120
  self.upload = upload
122
121
 
123
122
  # Resolve the producer
@@ -129,16 +128,17 @@ class DocumentLoader:
129
128
  def lazy_load(self) -> Iterator["Document"]: # noqa: F821
130
129
  artifact = self.producer.log_document(
131
130
  key=self.artifact_key,
132
- document_loader=self.loader_spec,
133
- src_path=self.source_path,
131
+ document_loader_spec=self.loader_spec,
132
+ local_path=self.local_path,
134
133
  upload=self.upload,
135
134
  )
136
- yield artifact.to_langchain_documents()
135
+ res = artifact.to_langchain_documents()
136
+ yield res[0]
137
137
 
138
138
  # Return an instance of the dynamically defined subclass
139
139
  instance = DynamicDocumentLoader(
140
140
  artifact_key=artifact_key,
141
- source_path=source_path,
141
+ local_path=source_path,
142
142
  loader_spec=loader_spec,
143
143
  producer=producer,
144
144
  upload=upload,
@@ -195,12 +195,15 @@ class DocumentArtifact(Artifact):
195
195
  def __init__(
196
196
  self,
197
197
  *args,
198
+ document_loader: Optional[DocumentLoaderSpec] = None,
199
+ collections: Optional[dict] = None,
200
+ original_source: Optional[str] = None,
198
201
  **kwargs,
199
202
  ):
200
203
  super().__init__(*args, **kwargs)
201
- self.document_loader = None
202
- self.collections = set()
203
- self.original_source = None
204
+ self.document_loader = document_loader
205
+ self.collections = collections if collections is not None else {}
206
+ self.original_source = original_source
204
207
 
205
208
  """
206
209
  A specific artifact class inheriting from generic artifact, used to maintain Document meta-data.
@@ -216,14 +219,17 @@ class DocumentArtifact(Artifact):
216
219
 
217
220
  def __init__(
218
221
  self,
219
- key=None,
220
- document_loader: DocumentLoaderSpec = DocumentLoaderSpec(),
222
+ original_source: Optional[str] = None,
223
+ document_loader_spec: Optional[DocumentLoaderSpec] = None,
221
224
  **kwargs,
222
225
  ):
223
- super().__init__(key, **kwargs)
224
- self.spec.document_loader = document_loader.to_str()
225
- if "src_path" in kwargs:
226
- self.spec.original_source = kwargs["src_path"]
226
+ super().__init__(**kwargs)
227
+ self.spec.document_loader = (
228
+ document_loader_spec.to_dict()
229
+ if document_loader_spec
230
+ else self.spec.document_loader
231
+ )
232
+ self.spec.original_source = original_source or self.spec.original_source
227
233
 
228
234
  @property
229
235
  def spec(self) -> DocumentArtifactSpec:
@@ -234,17 +240,8 @@ class DocumentArtifact(Artifact):
234
240
  self._spec = self._verify_dict(
235
241
  spec, "spec", DocumentArtifact.DocumentArtifactSpec
236
242
  )
237
- # _verify_dict doesn't handle set, so we need to convert it back
238
- if isinstance(self._spec.collections, str):
239
- self._spec.collections = ast.literal_eval(self._spec.collections)
240
-
241
- @property
242
- def inputs(self):
243
- # To keep the interface consistent with the project.update_artifact() when we update the artifact
244
- return None
245
243
 
246
- @property
247
- def source(self):
244
+ def get_source(self):
248
245
  return generate_artifact_uri(self.metadata.project, self.spec.db_key)
249
246
 
250
247
  def to_langchain_documents(
@@ -262,9 +259,8 @@ class DocumentArtifact(Artifact):
262
259
  Returns:
263
260
  list[Document]: A list of LangChain Document objects.
264
261
  """
265
- dictionary = ast.literal_eval(self.spec.document_loader)
266
- loader_spec = DocumentLoaderSpec.from_dict(dictionary)
267
262
 
263
+ loader_spec = DocumentLoaderSpec.from_dict(self.spec.document_loader)
268
264
  if self.get_target_path():
269
265
  with tempfile.NamedTemporaryFile() as tmp_file:
270
266
  mlrun.datastore.store_manager.object(
@@ -272,8 +268,8 @@ class DocumentArtifact(Artifact):
272
268
  ).download(tmp_file.name)
273
269
  loader = loader_spec.make_loader(tmp_file.name)
274
270
  documents = loader.load()
275
- elif self.src_path:
276
- loader = loader_spec.make_loader(self.src_path)
271
+ elif self.spec.original_source:
272
+ loader = loader_spec.make_loader(self.spec.original_source)
277
273
  documents = loader.load()
278
274
  else:
279
275
  raise ValueError(
@@ -281,6 +277,7 @@ class DocumentArtifact(Artifact):
281
277
  )
282
278
 
283
279
  results = []
280
+ idx = 0
284
281
  for document in documents:
285
282
  if splitter:
286
283
  texts = splitter.split_text(document.page_content)
@@ -289,25 +286,51 @@ class DocumentArtifact(Artifact):
289
286
 
290
287
  metadata = document.metadata
291
288
 
292
- metadata[self.METADATA_ORIGINAL_SOURCE_KEY] = self.src_path
293
- metadata[self.METADATA_SOURCE_KEY] = self.source
289
+ metadata[self.METADATA_ORIGINAL_SOURCE_KEY] = self.spec.original_source
290
+ metadata[self.METADATA_SOURCE_KEY] = self.get_source()
294
291
  metadata[self.METADATA_ARTIFACT_URI_KEY] = self.uri
295
292
  if self.get_target_path():
296
293
  metadata[self.METADATA_ARTIFACT_TARGET_PATH_KEY] = (
297
294
  self.get_target_path()
298
295
  )
299
296
 
300
- for idx, text in enumerate(texts):
297
+ for text in texts:
301
298
  metadata[self.METADATA_CHUNK_KEY] = str(idx)
302
299
  doc = Document(
303
300
  page_content=text,
304
- metadata=metadata,
301
+ metadata=metadata.copy(),
305
302
  )
306
303
  results.append(doc)
304
+ idx = idx + 1
307
305
  return results
308
306
 
309
307
  def collection_add(self, collection_id: str) -> None:
310
- self.spec.collections.add(collection_id)
308
+ """
309
+ Add a collection ID to the artifact's collection list.
310
+
311
+ Adds the specified collection ID to the artifact's collection mapping if it
312
+ doesn't already exist.
313
+ This method only modifies the client-side artifact object and does not persist
314
+ the changes to the MLRun DB. To save the changes permanently, you must call
315
+ project.update_artifact() after this method.
316
+
317
+ Args:
318
+ collection_id (str): The ID of the collection to add
319
+ """
320
+ if collection_id not in self.spec.collections:
321
+ self.spec.collections[collection_id] = "1"
311
322
 
312
323
  def collection_remove(self, collection_id: str) -> None:
313
- return self.spec.collections.discard(collection_id)
324
+ """
325
+ Remove a collection ID from the artifact's collection list.
326
+
327
+ Removes the specified collection ID from the artifact's local collection mapping.
328
+ This method only modifies the client-side artifact object and does not persist
329
+ the changes to the MLRun DB. To save the changes permanently, you must call
330
+ project.update_artifact() or context.update_artifact() after this method.
331
+
332
+ Args:
333
+ collection_id (str): The ID of the collection to remove
334
+ """
335
+ if collection_id in self.spec.collections:
336
+ self.spec.collections.pop(collection_id)
@@ -306,7 +306,6 @@ class ArtifactManager:
306
306
  item.target_path = target_path
307
307
 
308
308
  item.before_log()
309
- self.artifact_uris[key] = item.uri
310
309
 
311
310
  if ((upload is None and item.kind != "dir") or upload) and not item.is_inline():
312
311
  # before uploading the item, we want to ensure that its tags are valid,
@@ -315,7 +314,12 @@ class ArtifactManager:
315
314
  item.upload(artifact_path=artifact_path)
316
315
 
317
316
  if db_key:
318
- self._log_to_db(db_key, project, producer.inputs, item)
317
+ artifact_uid = self._log_to_db(db_key, project, producer.inputs, item)
318
+ if artifact_uid is not None:
319
+ item.uid = artifact_uid
320
+ # Generate the artifact URI after logging to the database and retrieving the artifact UID, if available.
321
+ self.artifact_uris[key] = item.uri
322
+
319
323
  size = str(item.size) or "?"
320
324
  db_str = "Y" if (self.artifact_db and db_key) else "N"
321
325
  logger.debug(
@@ -327,20 +331,21 @@ class ArtifactManager:
327
331
  self.artifact_uris[item.key] = item.uri
328
332
  self._log_to_db(item.db_key, producer.project, producer.inputs, item)
329
333
 
330
- def _log_to_db(self, key, project, sources, item, tag=None):
334
+ def _log_to_db(self, key, project, sources, item, tag=None) -> typing.Optional[str]:
331
335
  """
332
336
  log artifact to db
333
337
  :param key: Identifying key of the artifact.
334
338
  :param project: Project that the artifact belongs to.
335
- :param sources: List of artifact sources ( Mainly passed from the producer.items ).
339
+ :param sources: List of artifact sources ( Mainly passed from the `producer.items` ).
336
340
  :param item: The actual artifact to store.
337
341
  :param tag: The name of the Tag of the artifact.
342
+ :return: The logged artifact uid.
338
343
  """
339
344
  if self.artifact_db:
340
345
  item.updated = None
341
346
  if sources:
342
347
  item.sources = [{"name": k, "path": str(v)} for k, v in sources.items()]
343
- self.artifact_db.store_artifact(
348
+ artifact_item = self.artifact_db.store_artifact(
344
349
  key,
345
350
  item.to_dict(),
346
351
  iter=item.iter,
@@ -348,6 +353,8 @@ class ArtifactManager:
348
353
  project=project,
349
354
  tree=item.tree,
350
355
  )
356
+ if artifact_item:
357
+ return artifact_item.get("metadata", {}).get("uid")
351
358
 
352
359
  def link_artifact(
353
360
  self,
mlrun/common/constants.py CHANGED
@@ -69,6 +69,7 @@ class MLRunInternalLabels:
69
69
  producer_type = f"{MLRUN_LABEL_PREFIX}producer-type"
70
70
  app_name = f"{MLRUN_LABEL_PREFIX}app-name"
71
71
  endpoint_id = f"{MLRUN_LABEL_PREFIX}endpoint-id"
72
+ endpoint_name = f"{MLRUN_LABEL_PREFIX}endpoint-name"
72
73
  host = "host"
73
74
  job_type = "job-type"
74
75
  kind = "kind"
@@ -11,5 +11,3 @@
11
11
  # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
-
15
- from .helpers import create_model_endpoint_uid
@@ -17,11 +17,6 @@ import typing
17
17
 
18
18
  import mlrun.common
19
19
  import mlrun.common.schemas.model_monitoring.constants as mm_constants
20
- from mlrun.common.schemas.model_monitoring import (
21
- EndpointUID,
22
- FunctionURI,
23
- VersionedModel,
24
- )
25
20
 
26
21
  FeatureStats = typing.NewType("FeatureStats", dict[str, dict[str, typing.Any]])
27
22
  Histogram = typing.NewType("Histogram", list[list])
@@ -31,29 +26,6 @@ BinEdges = typing.NewType("BinEdges", list[float])
31
26
  _MAX_FLOAT = sys.float_info.max
32
27
 
33
28
 
34
- def create_model_endpoint_uid(function_uri: str, versioned_model: str):
35
- function_uri = FunctionURI.from_string(function_uri)
36
- versioned_model = VersionedModel.from_string(versioned_model)
37
-
38
- if (
39
- not function_uri.project
40
- or not function_uri.function
41
- or not versioned_model.model
42
- ):
43
- raise ValueError("Both function_uri and versioned_model have to be initialized")
44
-
45
- uid = EndpointUID(
46
- function_uri.project,
47
- function_uri.function,
48
- function_uri.tag,
49
- function_uri.hash_key,
50
- versioned_model.model,
51
- versioned_model.version,
52
- )
53
-
54
- return uid
55
-
56
-
57
29
  def parse_model_endpoint_project_prefix(path: str, project_name: str):
58
30
  return path.split(project_name, 1)[0] + project_name
59
31
 
@@ -14,6 +14,7 @@
14
14
 
15
15
  from .alert import (
16
16
  AlertActivation,
17
+ AlertActivations,
17
18
  AlertActiveState,
18
19
  AlertConfig,
19
20
  AlertNotification,
@@ -149,10 +150,6 @@ from .model_monitoring import (
149
150
  ModelEndpointMetadata,
150
151
  ModelEndpointSpec,
151
152
  ModelEndpointStatus,
152
- ModelEndpointV2,
153
- ModelEndpointV2Metadata,
154
- ModelEndpointV2Spec,
155
- ModelEndpointV2Status,
156
153
  ModelMonitoringMode,
157
154
  ModelMonitoringStoreKinds,
158
155
  MonitoringFunctionNames,
@@ -165,6 +162,7 @@ from .notification import (
165
162
  NotificationSeverity,
166
163
  NotificationState,
167
164
  NotificationStatus,
165
+ NotificationSummary,
168
166
  SetNotificationRequest,
169
167
  )
170
168
  from .object import ObjectKind, ObjectMetadata, ObjectSpec, ObjectStatus
@@ -12,8 +12,10 @@
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
14
  #
15
+ from collections import defaultdict
16
+ from collections.abc import Iterator
15
17
  from datetime import datetime
16
- from typing import Annotated, Optional, Union
18
+ from typing import Annotated, Any, Callable, Optional, Union
17
19
 
18
20
  import pydantic.v1
19
21
 
@@ -216,3 +218,77 @@ class AlertActivation(pydantic.v1.BaseModel):
216
218
  number_of_events: int
217
219
  notifications: list[notification_objects.NotificationState]
218
220
  reset_time: Optional[datetime] = None
221
+
222
+ def group_key(self, attributes: list[str]) -> Union[Any, tuple]:
223
+ """
224
+ Dynamically create a key for grouping based on the provided attributes.
225
+ - If there's only one attribute, return the value directly (not a single-element tuple).
226
+ - If there are multiple attributes, return them as a tuple for grouping.
227
+
228
+ This ensures grouping behaves intuitively without redundant tuple representations.
229
+ """
230
+ if len(attributes) == 1:
231
+ # Avoid single-element tuple like (high,) when only one grouping attribute is used
232
+ return getattr(self, attributes[0])
233
+ # Otherwise, return a tuple of all specified attributes
234
+ return tuple(getattr(self, attr) for attr in attributes)
235
+
236
+
237
+ class AlertActivations(pydantic.v1.BaseModel):
238
+ activations: list[AlertActivation]
239
+ pagination: Optional[dict]
240
+
241
+ def __iter__(self) -> Iterator[AlertActivation]:
242
+ return iter(self.activations)
243
+
244
+ def __getitem__(self, index: int) -> AlertActivation:
245
+ return self.activations[index]
246
+
247
+ def __len__(self) -> int:
248
+ return len(self.activations)
249
+
250
+ def group_by(self, *attributes: str) -> dict:
251
+ """
252
+ Group alert activations by specified attributes.
253
+
254
+ Args:
255
+ :param attributes: Attributes to group by.
256
+
257
+ :returns: A dictionary where keys are tuples of attribute values and values are lists of
258
+ AlertActivation objects.
259
+
260
+ Example:
261
+ # Group by project and severity
262
+ grouped = activations.group_by("project", "severity")
263
+ """
264
+ grouped = defaultdict(list)
265
+ for activation in self.activations:
266
+ key = activation.group_key(attributes)
267
+ grouped[key].append(activation)
268
+ return dict(grouped)
269
+
270
+ def aggregate_by(
271
+ self,
272
+ group_by_attrs: list[str],
273
+ aggregation_function: Callable[[list[AlertActivation]], Any],
274
+ ) -> dict:
275
+ """
276
+ Aggregate alert activations by specified attributes using a given aggregation function.
277
+
278
+ Args:
279
+ :param group_by_attrs: Attributes to group by.
280
+ :param aggregation_function: Function to aggregate grouped activations.
281
+
282
+ :returns: A dictionary where keys are tuples of attribute values and values are the result
283
+ of the aggregation function.
284
+
285
+ Example:
286
+ # Aggregate by name and entity_id and count number of activations in each group
287
+ activations.aggregate_by(["name", "entity_id"], lambda activations: len(activations))
288
+ """
289
+ grouped = self.group_by(*group_by_attrs)
290
+ aggregated = {
291
+ key: aggregation_function(activations)
292
+ for key, activations in grouped.items()
293
+ }
294
+ return aggregated
@@ -57,7 +57,6 @@ class ClientSpec(pydantic.v1.BaseModel):
57
57
  redis_url: typing.Optional[str]
58
58
  redis_type: typing.Optional[str]
59
59
  sql_url: typing.Optional[str]
60
- model_endpoint_monitoring_endpoint_store_connection: typing.Optional[str]
61
60
  model_monitoring_tsdb_connection: typing.Optional[str]
62
61
  ce: typing.Optional[dict]
63
62
  # not passing them as one object as it possible client user would like to override only one of the params
@@ -55,12 +55,6 @@ from .grafana import (
55
55
  GrafanaTable,
56
56
  GrafanaTimeSeriesTarget,
57
57
  )
58
- from .model_endpoint_v2 import (
59
- ModelEndpointV2,
60
- ModelEndpointV2Metadata,
61
- ModelEndpointV2Spec,
62
- ModelEndpointV2Status,
63
- )
64
58
  from .model_endpoints import (
65
59
  Features,
66
60
  FeatureValues,
@@ -41,6 +41,7 @@ class ModelEndpointSchema(MonitoringStrEnum):
41
41
 
42
42
  # spec
43
43
  FUNCTION_NAME = "function_name"
44
+ FUNCTION_TAG = "function_tag"
44
45
  FUNCTION_UID = "function_uid"
45
46
  MODEL_NAME = "model_name"
46
47
  MODEL_TAG = "model_tag"
@@ -48,23 +49,23 @@ class ModelEndpointSchema(MonitoringStrEnum):
48
49
  MODEL_UID = "model_uid"
49
50
  FEATURE_NAMES = "feature_names"
50
51
  LABEL_NAMES = "label_names"
51
-
52
- # status
53
- STATE = "state"
54
- MONITORING_MODE = "monitoring_mode"
52
+ FEATURE_STATS = "feature_stats"
55
53
  MONITORING_FEATURE_SET_URI = "monitoring_feature_set_uri"
56
54
  CHILDREN = "children"
57
55
  CHILDREN_UIDS = "children_uids"
58
- FIRST_REQUEST = "first_request"
59
56
  FUNCTION_URI = "function_uri"
60
57
  MODEL_URI = "model_uri"
61
58
 
59
+ # status
60
+ STATE = "state"
61
+ MONITORING_MODE = "monitoring_mode"
62
+ FIRST_REQUEST = "first_request"
63
+
62
64
  # status - operative
63
65
  LAST_REQUEST = "last_request"
64
- DRIFT_STATUS = "drift_status"
66
+ RESULT_STATUS = "result_status"
65
67
  AVG_LATENCY = "avg_latency"
66
68
  ERROR_COUNT = "error_count"
67
- FEATURE_STATS = "feature_stats"
68
69
  CURRENT_STATS = "current_stats"
69
70
  DRIFT_MEASURES = "drift_measures"
70
71
 
@@ -80,6 +81,7 @@ class EventFieldType:
80
81
  TIMESTAMP = "timestamp"
81
82
  # `endpoint_id` is deprecated as a field in the model endpoint schema since 1.3.1, replaced by `uid`.
82
83
  ENDPOINT_ID = "endpoint_id"
84
+ ENDPOINT_NAME = "endpoint_name"
83
85
  UID = "uid"
84
86
  ENDPOINT_TYPE = "endpoint_type"
85
87
  REQUEST_ID = "request_id"
@@ -148,10 +150,12 @@ class ApplicationEvent:
148
150
  START_INFER_TIME = "start_infer_time"
149
151
  END_INFER_TIME = "end_infer_time"
150
152
  ENDPOINT_ID = "endpoint_id"
153
+ ENDPOINT_NAME = "endpoint_name"
151
154
  OUTPUT_STREAM_URI = "output_stream_uri"
152
155
 
153
156
 
154
157
  class WriterEvent(MonitoringStrEnum):
158
+ ENDPOINT_NAME = "endpoint_name"
155
159
  APPLICATION_NAME = "application_name"
156
160
  ENDPOINT_ID = "endpoint_id"
157
161
  START_INFER_TIME = "start_infer_time"
@@ -222,7 +226,6 @@ class TSDBTarget(MonitoringStrEnum):
222
226
 
223
227
 
224
228
  class ProjectSecretKeys:
225
- ENDPOINT_STORE_CONNECTION = "MODEL_MONITORING_ENDPOINT_STORE_CONNECTION"
226
229
  ACCESS_KEY = "MODEL_MONITORING_ACCESS_KEY"
227
230
  STREAM_PATH = "STREAM_PATH"
228
231
  TSDB_CONNECTION = "TSDB_CONNECTION"
@@ -230,7 +233,6 @@ class ProjectSecretKeys:
230
233
  @classmethod
231
234
  def mandatory_secrets(cls):
232
235
  return [
233
- cls.ENDPOINT_STORE_CONNECTION,
234
236
  cls.STREAM_PATH,
235
237
  cls.TSDB_CONNECTION,
236
238
  ]