mlrun 1.8.0rc4__py3-none-any.whl → 1.8.0rc6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mlrun might be problematic. Click here for more details.

Files changed (69) hide show
  1. mlrun/__init__.py +4 -3
  2. mlrun/alerts/alert.py +129 -2
  3. mlrun/artifacts/__init__.py +1 -1
  4. mlrun/artifacts/base.py +12 -1
  5. mlrun/artifacts/document.py +59 -38
  6. mlrun/common/model_monitoring/__init__.py +0 -2
  7. mlrun/common/model_monitoring/helpers.py +0 -28
  8. mlrun/common/schemas/__init__.py +1 -4
  9. mlrun/common/schemas/alert.py +3 -0
  10. mlrun/common/schemas/artifact.py +4 -0
  11. mlrun/common/schemas/client_spec.py +0 -1
  12. mlrun/common/schemas/model_monitoring/__init__.py +0 -6
  13. mlrun/common/schemas/model_monitoring/constants.py +11 -9
  14. mlrun/common/schemas/model_monitoring/model_endpoints.py +77 -149
  15. mlrun/common/schemas/notification.py +6 -0
  16. mlrun/config.py +0 -2
  17. mlrun/datastore/datastore_profile.py +57 -17
  18. mlrun/datastore/vectorstore.py +67 -59
  19. mlrun/db/base.py +22 -18
  20. mlrun/db/factory.py +0 -3
  21. mlrun/db/httpdb.py +122 -150
  22. mlrun/db/nopdb.py +33 -17
  23. mlrun/execution.py +43 -29
  24. mlrun/model.py +7 -0
  25. mlrun/model_monitoring/__init__.py +3 -2
  26. mlrun/model_monitoring/api.py +40 -43
  27. mlrun/model_monitoring/applications/_application_steps.py +4 -2
  28. mlrun/model_monitoring/applications/base.py +65 -6
  29. mlrun/model_monitoring/applications/context.py +64 -33
  30. mlrun/model_monitoring/applications/evidently_base.py +0 -1
  31. mlrun/model_monitoring/applications/histogram_data_drift.py +2 -6
  32. mlrun/model_monitoring/controller.py +43 -37
  33. mlrun/model_monitoring/db/__init__.py +0 -2
  34. mlrun/model_monitoring/db/tsdb/base.py +2 -1
  35. mlrun/model_monitoring/db/tsdb/tdengine/tdengine_connector.py +2 -1
  36. mlrun/model_monitoring/db/tsdb/v3io/v3io_connector.py +43 -0
  37. mlrun/model_monitoring/helpers.py +12 -66
  38. mlrun/model_monitoring/stream_processing.py +83 -270
  39. mlrun/model_monitoring/writer.py +1 -10
  40. mlrun/projects/project.py +87 -74
  41. mlrun/runtimes/nuclio/function.py +7 -6
  42. mlrun/runtimes/nuclio/serving.py +7 -1
  43. mlrun/serving/routers.py +158 -145
  44. mlrun/serving/server.py +6 -0
  45. mlrun/serving/states.py +2 -0
  46. mlrun/serving/v2_serving.py +69 -60
  47. mlrun/utils/helpers.py +14 -30
  48. mlrun/utils/notifications/notification/mail.py +36 -9
  49. mlrun/utils/notifications/notification_pusher.py +34 -13
  50. mlrun/utils/version/version.json +2 -2
  51. {mlrun-1.8.0rc4.dist-info → mlrun-1.8.0rc6.dist-info}/METADATA +5 -4
  52. {mlrun-1.8.0rc4.dist-info → mlrun-1.8.0rc6.dist-info}/RECORD +56 -69
  53. mlrun/common/schemas/model_monitoring/model_endpoint_v2.py +0 -149
  54. mlrun/model_monitoring/db/stores/__init__.py +0 -136
  55. mlrun/model_monitoring/db/stores/base/__init__.py +0 -15
  56. mlrun/model_monitoring/db/stores/base/store.py +0 -154
  57. mlrun/model_monitoring/db/stores/sqldb/__init__.py +0 -13
  58. mlrun/model_monitoring/db/stores/sqldb/models/__init__.py +0 -46
  59. mlrun/model_monitoring/db/stores/sqldb/models/base.py +0 -93
  60. mlrun/model_monitoring/db/stores/sqldb/models/mysql.py +0 -47
  61. mlrun/model_monitoring/db/stores/sqldb/models/sqlite.py +0 -25
  62. mlrun/model_monitoring/db/stores/sqldb/sql_store.py +0 -408
  63. mlrun/model_monitoring/db/stores/v3io_kv/__init__.py +0 -13
  64. mlrun/model_monitoring/db/stores/v3io_kv/kv_store.py +0 -464
  65. mlrun/model_monitoring/model_endpoint.py +0 -120
  66. {mlrun-1.8.0rc4.dist-info → mlrun-1.8.0rc6.dist-info}/LICENSE +0 -0
  67. {mlrun-1.8.0rc4.dist-info → mlrun-1.8.0rc6.dist-info}/WHEEL +0 -0
  68. {mlrun-1.8.0rc4.dist-info → mlrun-1.8.0rc6.dist-info}/entry_points.txt +0 -0
  69. {mlrun-1.8.0rc4.dist-info → mlrun-1.8.0rc6.dist-info}/top_level.txt +0 -0
mlrun/__init__.py CHANGED
@@ -39,6 +39,7 @@ from .execution import MLClientCtx
39
39
  from .model import RunObject, RunTemplate, new_task
40
40
  from .package import ArtifactType, DefaultPackager, Packager, handler
41
41
  from .projects import (
42
+ MlrunProject,
42
43
  ProjectMetadata,
43
44
  build_function,
44
45
  deploy_function,
@@ -162,10 +163,10 @@ def set_environment(
162
163
  return mlconf.default_project, mlconf.artifact_path
163
164
 
164
165
 
165
- def get_current_project(silent=False):
166
+ def get_current_project(silent: bool = False) -> Optional[MlrunProject]:
166
167
  if not pipeline_context.project and not silent:
167
168
  raise MLRunInvalidArgumentError(
168
- "current project is not initialized, use new, get or load project methods first"
169
+ "No current project is initialized. Use new, get or load project functions first."
169
170
  )
170
171
  return pipeline_context.project
171
172
 
@@ -182,7 +183,7 @@ def get_sample_path(subpath=""):
182
183
  return samples_path
183
184
 
184
185
 
185
- def set_env_from_file(env_file: str, return_dict: bool = False):
186
+ def set_env_from_file(env_file: str, return_dict: bool = False) -> Optional[dict]:
186
187
  """Read and set and/or return environment variables from a file
187
188
  the env file should have lines in the form KEY=VALUE, comment line start with "#"
188
189
 
mlrun/alerts/alert.py CHANGED
@@ -11,7 +11,7 @@
11
11
  # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
-
14
+ from datetime import datetime
15
15
  from typing import Optional, Union
16
16
 
17
17
  import mlrun
@@ -30,6 +30,7 @@ class AlertConfig(ModelObj):
30
30
  "state",
31
31
  "count",
32
32
  "created",
33
+ "updated",
33
34
  ]
34
35
  _fields_to_serialize = ModelObj._fields_to_serialize + [
35
36
  "entities",
@@ -55,6 +56,7 @@ class AlertConfig(ModelObj):
55
56
  state: alert_objects.AlertActiveState = None,
56
57
  created: Optional[str] = None,
57
58
  count: Optional[int] = None,
59
+ updated: Optional[str] = None,
58
60
  ):
59
61
  """Alert config object
60
62
 
@@ -118,6 +120,7 @@ class AlertConfig(ModelObj):
118
120
  :param state: State of the alert, may be active/inactive (user should not supply it)
119
121
  :param created: When the alert is created (user should not supply it)
120
122
  :param count: Internal counter of the alert (user should not supply it)
123
+ :param updated: The last update time of the alert (user should not supply it)
121
124
  """
122
125
  self.project = project
123
126
  self.name = name
@@ -131,12 +134,39 @@ class AlertConfig(ModelObj):
131
134
  self.entities = entities
132
135
  self.id = id
133
136
  self.state = state
134
- self.created = created
137
+ self._created = created
135
138
  self.count = count
139
+ self._updated = updated
136
140
 
137
141
  if template:
138
142
  self._apply_template(template)
139
143
 
144
+ @property
145
+ def created(self) -> datetime:
146
+ """
147
+ Get the `created` field as a datetime object.
148
+ """
149
+ if isinstance(self._created, str):
150
+ return datetime.fromisoformat(self._created)
151
+ return self._created
152
+
153
+ @created.setter
154
+ def created(self, created):
155
+ self._created = created
156
+
157
+ @property
158
+ def updated(self) -> datetime:
159
+ """
160
+ Get the `updated` field as a datetime object.
161
+ """
162
+ if isinstance(self._updated, str):
163
+ return datetime.fromisoformat(self._updated)
164
+ return self._updated
165
+
166
+ @updated.setter
167
+ def updated(self, updated):
168
+ self._updated = updated
169
+
140
170
  def validate_required_fields(self):
141
171
  if not self.name:
142
172
  raise mlrun.errors.MLRunInvalidArgumentError("Alert name must be provided")
@@ -253,3 +283,100 @@ class AlertConfig(ModelObj):
253
283
  self.criteria = self.criteria or template.criteria
254
284
  self.trigger = self.trigger or template.trigger
255
285
  self.reset_policy = self.reset_policy or template.reset_policy
286
+
287
+ def list_activations(
288
+ self,
289
+ since: Optional[datetime] = None,
290
+ until: Optional[datetime] = None,
291
+ from_last_update: bool = False,
292
+ ) -> list[mlrun.common.schemas.alert.AlertActivation]:
293
+ """
294
+ Retrieve a list of all alert activations.
295
+
296
+ :param since: Filters for alert activations occurring after this timestamp.
297
+ :param until: Filters for alert activations occurring before this timestamp.
298
+ :param from_last_update: If set to True, retrieves alert activations since the alert's last update time.
299
+ if both since and from_last_update=True are provided, from_last_update takes precedence
300
+ and the since value will be overridden by the alert's last update timestamp.
301
+
302
+ :returns: A list of alert activations matching the provided filters.
303
+ """
304
+ db = mlrun.get_run_db()
305
+ if from_last_update and self._updated:
306
+ since = self.updated
307
+
308
+ return db.list_alert_activations(
309
+ project=self.project,
310
+ name=self.name,
311
+ since=since,
312
+ until=until,
313
+ )
314
+
315
+ def paginated_list_activations(
316
+ self,
317
+ *args,
318
+ page: Optional[int] = None,
319
+ page_size: Optional[int] = None,
320
+ page_token: Optional[str] = None,
321
+ from_last_update: bool = False,
322
+ **kwargs,
323
+ ) -> tuple[mlrun.common.schemas.alert.AlertActivation, Optional[str]]:
324
+ """
325
+ List alerts activations with support for pagination and various filtering options.
326
+
327
+ This method retrieves a paginated list of alert activations based on the specified filter parameters.
328
+ Pagination is controlled using the `page`, `page_size`, and `page_token` parameters. The method
329
+ will return a list of alert activations that match the filtering criteria provided.
330
+
331
+ For detailed information about the parameters, refer to the list_activations method:
332
+ See :py:func:`~list_activations` for more details.
333
+
334
+ Examples::
335
+
336
+ # Fetch first page of alert activations with page size of 5
337
+ alert_activations, token = alert_config.paginated_list_activations(page_size=5)
338
+ # Fetch next page using the pagination token from the previous response
339
+ alert_activations, token = alert_config.paginated_list_activations(
340
+ page_token=token
341
+ )
342
+ # Fetch alert activations for a specific page (e.g., page 3)
343
+ alert_activations, token = alert_config.paginated_list_activations(
344
+ page=3, page_size=5
345
+ )
346
+
347
+ # Automatically iterate over all pages without explicitly specifying the page number
348
+ alert_activations = []
349
+ token = None
350
+ while True:
351
+ page_alert_activations, token = alert_config.paginated_list_activations(
352
+ page_token=token, page_size=5
353
+ )
354
+ alert_activations.extend(page_alert_activations)
355
+
356
+ # If token is None and page_alert_activations is empty, we've reached the end (no more activations).
357
+ # If token is None and page_alert_activations is not empty, we've fetched the last page of activations.
358
+ if not token:
359
+ break
360
+ print(f"Total alert activations retrieved: {len(alert_activations)}")
361
+
362
+ :param page: The page number to retrieve. If not provided, the next page will be retrieved.
363
+ :param page_size: The number of items per page to retrieve. Up to `page_size` responses are expected.
364
+ :param page_token: A pagination token used to retrieve the next page of results. Should not be provided
365
+ for the first request.
366
+ :param from_last_update: If set to True, retrieves alert activations since the alert's last update time.
367
+
368
+ :returns: A tuple containing the list of alert activations and an optional `page_token` for pagination.
369
+ """
370
+ if from_last_update and self._updated:
371
+ kwargs["since"] = self.updated
372
+
373
+ db = mlrun.get_run_db()
374
+ return db.paginated_list_alert_activations(
375
+ *args,
376
+ project=self.project,
377
+ name=self.name,
378
+ page=page,
379
+ page_size=page_size,
380
+ page_token=page_token,
381
+ **kwargs,
382
+ )
@@ -23,7 +23,7 @@ from .base import (
23
23
  get_artifact_meta,
24
24
  )
25
25
  from .dataset import DatasetArtifact, TableArtifact, update_dataset_meta
26
- from .document import DocumentArtifact, DocumentLoader, DocumentLoaderSpec
26
+ from .document import DocumentArtifact, DocumentLoaderSpec, MLRunLoader
27
27
  from .manager import (
28
28
  ArtifactManager,
29
29
  ArtifactProducer,
mlrun/artifacts/base.py CHANGED
@@ -36,7 +36,16 @@ from ..utils import (
36
36
 
37
37
 
38
38
  class ArtifactMetadata(ModelObj):
39
- _dict_fields = ["key", "project", "iter", "tree", "description", "hash", "tag"]
39
+ _dict_fields = [
40
+ "key",
41
+ "project",
42
+ "iter",
43
+ "tree",
44
+ "description",
45
+ "hash",
46
+ "tag",
47
+ "uid",
48
+ ]
40
49
  _extra_fields = ["updated", "labels"]
41
50
 
42
51
  def __init__(
@@ -48,6 +57,7 @@ class ArtifactMetadata(ModelObj):
48
57
  description=None,
49
58
  hash=None,
50
59
  tag=None,
60
+ uid=None,
51
61
  ):
52
62
  self.key = key
53
63
  self.project = project
@@ -58,6 +68,7 @@ class ArtifactMetadata(ModelObj):
58
68
  self.labels = {}
59
69
  self.updated = None
60
70
  self.tag = tag # temp store of the tag
71
+ self.uid = uid
61
72
 
62
73
  def base_dict(self):
63
74
  return super().to_dict()
@@ -12,7 +12,6 @@
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
14
 
15
- import ast
16
15
  import re
17
16
  import tempfile
18
17
  from collections.abc import Iterator
@@ -74,14 +73,14 @@ class DocumentLoaderSpec(ModelObj):
74
73
  return loader
75
74
 
76
75
 
77
- class DocumentLoader:
76
+ class MLRunLoader:
78
77
  """
79
78
  A factory class for creating instances of a dynamically defined document loader.
80
79
 
81
80
  Args:
82
81
  artifact_key (str): The key for the artifact to be logged.It can include '%%' which will be replaced
83
82
  by a hex-encoded version of the source path.
84
- source_path (str): The source path of the document to be loaded.
83
+ local_path (str): The source path of the document to be loaded.
85
84
  loader_spec (DocumentLoaderSpec): Specification for the document loader.
86
85
  producer (Optional[Union[MlrunProject, str, MLClientCtx]], optional): The producer of the document
87
86
  upload (bool, optional): Flag indicating whether to upload the document.
@@ -104,7 +103,7 @@ class DocumentLoader:
104
103
  class DynamicDocumentLoader(BaseLoader):
105
104
  def __init__(
106
105
  self,
107
- source_path,
106
+ local_path,
108
107
  loader_spec,
109
108
  artifact_key,
110
109
  producer,
@@ -112,12 +111,12 @@ class DocumentLoader:
112
111
  ):
113
112
  self.producer = producer
114
113
  self.artifact_key = (
115
- DocumentLoader.artifact_key_instance(artifact_key, source_path)
114
+ MLRunLoader.artifact_key_instance(artifact_key, local_path)
116
115
  if "%%" in artifact_key
117
116
  else artifact_key
118
117
  )
119
118
  self.loader_spec = loader_spec
120
- self.source_path = source_path
119
+ self.local_path = local_path
121
120
  self.upload = upload
122
121
 
123
122
  # Resolve the producer
@@ -129,16 +128,17 @@ class DocumentLoader:
129
128
  def lazy_load(self) -> Iterator["Document"]: # noqa: F821
130
129
  artifact = self.producer.log_document(
131
130
  key=self.artifact_key,
132
- document_loader=self.loader_spec,
133
- src_path=self.source_path,
131
+ document_loader_spec=self.loader_spec,
132
+ local_path=self.local_path,
134
133
  upload=self.upload,
135
134
  )
136
- yield artifact.to_langchain_documents()
135
+ res = artifact.to_langchain_documents()
136
+ yield res[0]
137
137
 
138
138
  # Return an instance of the dynamically defined subclass
139
139
  instance = DynamicDocumentLoader(
140
140
  artifact_key=artifact_key,
141
- source_path=source_path,
141
+ local_path=source_path,
142
142
  loader_spec=loader_spec,
143
143
  producer=producer,
144
144
  upload=upload,
@@ -195,12 +195,15 @@ class DocumentArtifact(Artifact):
195
195
  def __init__(
196
196
  self,
197
197
  *args,
198
+ document_loader: Optional[DocumentLoaderSpec] = None,
199
+ collections: Optional[dict] = None,
200
+ original_source: Optional[str] = None,
198
201
  **kwargs,
199
202
  ):
200
203
  super().__init__(*args, **kwargs)
201
- self.document_loader = None
202
- self.collections = set()
203
- self.original_source = None
204
+ self.document_loader = document_loader
205
+ self.collections = collections if collections is not None else {}
206
+ self.original_source = original_source
204
207
 
205
208
  """
206
209
  A specific artifact class inheriting from generic artifact, used to maintain Document meta-data.
@@ -216,14 +219,17 @@ class DocumentArtifact(Artifact):
216
219
 
217
220
  def __init__(
218
221
  self,
219
- key=None,
220
- document_loader: DocumentLoaderSpec = DocumentLoaderSpec(),
222
+ original_source: Optional[str] = None,
223
+ document_loader_spec: Optional[DocumentLoaderSpec] = None,
221
224
  **kwargs,
222
225
  ):
223
- super().__init__(key, **kwargs)
224
- self.spec.document_loader = document_loader.to_str()
225
- if "src_path" in kwargs:
226
- self.spec.original_source = kwargs["src_path"]
226
+ super().__init__(**kwargs)
227
+ self.spec.document_loader = (
228
+ document_loader_spec.to_dict()
229
+ if document_loader_spec
230
+ else self.spec.document_loader
231
+ )
232
+ self.spec.original_source = original_source or self.spec.original_source
227
233
 
228
234
  @property
229
235
  def spec(self) -> DocumentArtifactSpec:
@@ -234,17 +240,8 @@ class DocumentArtifact(Artifact):
234
240
  self._spec = self._verify_dict(
235
241
  spec, "spec", DocumentArtifact.DocumentArtifactSpec
236
242
  )
237
- # _verify_dict doesn't handle set, so we need to convert it back
238
- if isinstance(self._spec.collections, str):
239
- self._spec.collections = ast.literal_eval(self._spec.collections)
240
-
241
- @property
242
- def inputs(self):
243
- # To keep the interface consistent with the project.update_artifact() when we update the artifact
244
- return None
245
243
 
246
- @property
247
- def source(self):
244
+ def get_source(self):
248
245
  return generate_artifact_uri(self.metadata.project, self.spec.db_key)
249
246
 
250
247
  def to_langchain_documents(
@@ -262,9 +259,8 @@ class DocumentArtifact(Artifact):
262
259
  Returns:
263
260
  list[Document]: A list of LangChain Document objects.
264
261
  """
265
- dictionary = ast.literal_eval(self.spec.document_loader)
266
- loader_spec = DocumentLoaderSpec.from_dict(dictionary)
267
262
 
263
+ loader_spec = DocumentLoaderSpec.from_dict(self.spec.document_loader)
268
264
  if self.get_target_path():
269
265
  with tempfile.NamedTemporaryFile() as tmp_file:
270
266
  mlrun.datastore.store_manager.object(
@@ -272,8 +268,8 @@ class DocumentArtifact(Artifact):
272
268
  ).download(tmp_file.name)
273
269
  loader = loader_spec.make_loader(tmp_file.name)
274
270
  documents = loader.load()
275
- elif self.src_path:
276
- loader = loader_spec.make_loader(self.src_path)
271
+ elif self.spec.original_source:
272
+ loader = loader_spec.make_loader(self.spec.original_source)
277
273
  documents = loader.load()
278
274
  else:
279
275
  raise ValueError(
@@ -289,8 +285,8 @@ class DocumentArtifact(Artifact):
289
285
 
290
286
  metadata = document.metadata
291
287
 
292
- metadata[self.METADATA_ORIGINAL_SOURCE_KEY] = self.src_path
293
- metadata[self.METADATA_SOURCE_KEY] = self.source
288
+ metadata[self.METADATA_ORIGINAL_SOURCE_KEY] = self.spec.original_source
289
+ metadata[self.METADATA_SOURCE_KEY] = self.get_source()
294
290
  metadata[self.METADATA_ARTIFACT_URI_KEY] = self.uri
295
291
  if self.get_target_path():
296
292
  metadata[self.METADATA_ARTIFACT_TARGET_PATH_KEY] = (
@@ -301,13 +297,38 @@ class DocumentArtifact(Artifact):
301
297
  metadata[self.METADATA_CHUNK_KEY] = str(idx)
302
298
  doc = Document(
303
299
  page_content=text,
304
- metadata=metadata,
300
+ metadata=metadata.copy(),
305
301
  )
306
302
  results.append(doc)
307
303
  return results
308
304
 
309
305
  def collection_add(self, collection_id: str) -> None:
310
- self.spec.collections.add(collection_id)
306
+ """
307
+ Add a collection ID to the artifact's collection list.
308
+
309
+ Adds the specified collection ID to the artifact's collection mapping if it
310
+ doesn't already exist.
311
+ This method only modifies the client-side artifact object and does not persist
312
+ the changes to the MLRun DB. To save the changes permanently, you must call
313
+ project.update_artifact() after this method.
314
+
315
+ Args:
316
+ collection_id (str): The ID of the collection to add
317
+ """
318
+ if collection_id not in self.spec.collections:
319
+ self.spec.collections[collection_id] = "1"
311
320
 
312
321
  def collection_remove(self, collection_id: str) -> None:
313
- return self.spec.collections.discard(collection_id)
322
+ """
323
+ Remove a collection ID from the artifact's collection list.
324
+
325
+ Removes the specified collection ID from the artifact's local collection mapping.
326
+ This method only modifies the client-side artifact object and does not persist
327
+ the changes to the MLRun DB. To save the changes permanently, you must call
328
+ project.update_artifact() or context.update_artifact() after this method.
329
+
330
+ Args:
331
+ collection_id (str): The ID of the collection to remove
332
+ """
333
+ if collection_id in self.spec.collections:
334
+ self.spec.collections.pop(collection_id)
@@ -11,5 +11,3 @@
11
11
  # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
-
15
- from .helpers import create_model_endpoint_uid
@@ -17,11 +17,6 @@ import typing
17
17
 
18
18
  import mlrun.common
19
19
  import mlrun.common.schemas.model_monitoring.constants as mm_constants
20
- from mlrun.common.schemas.model_monitoring import (
21
- EndpointUID,
22
- FunctionURI,
23
- VersionedModel,
24
- )
25
20
 
26
21
  FeatureStats = typing.NewType("FeatureStats", dict[str, dict[str, typing.Any]])
27
22
  Histogram = typing.NewType("Histogram", list[list])
@@ -31,29 +26,6 @@ BinEdges = typing.NewType("BinEdges", list[float])
31
26
  _MAX_FLOAT = sys.float_info.max
32
27
 
33
28
 
34
- def create_model_endpoint_uid(function_uri: str, versioned_model: str):
35
- function_uri = FunctionURI.from_string(function_uri)
36
- versioned_model = VersionedModel.from_string(versioned_model)
37
-
38
- if (
39
- not function_uri.project
40
- or not function_uri.function
41
- or not versioned_model.model
42
- ):
43
- raise ValueError("Both function_uri and versioned_model have to be initialized")
44
-
45
- uid = EndpointUID(
46
- function_uri.project,
47
- function_uri.function,
48
- function_uri.tag,
49
- function_uri.hash_key,
50
- versioned_model.model,
51
- versioned_model.version,
52
- )
53
-
54
- return uid
55
-
56
-
57
29
  def parse_model_endpoint_project_prefix(path: str, project_name: str):
58
30
  return path.split(project_name, 1)[0] + project_name
59
31
 
@@ -149,10 +149,6 @@ from .model_monitoring import (
149
149
  ModelEndpointMetadata,
150
150
  ModelEndpointSpec,
151
151
  ModelEndpointStatus,
152
- ModelEndpointV2,
153
- ModelEndpointV2Metadata,
154
- ModelEndpointV2Spec,
155
- ModelEndpointV2Status,
156
152
  ModelMonitoringMode,
157
153
  ModelMonitoringStoreKinds,
158
154
  MonitoringFunctionNames,
@@ -165,6 +161,7 @@ from .notification import (
165
161
  NotificationSeverity,
166
162
  NotificationState,
167
163
  NotificationStatus,
164
+ NotificationSummary,
168
165
  SetNotificationRequest,
169
166
  )
170
167
  from .object import ObjectKind, ObjectMetadata, ObjectSpec, ObjectStatus
@@ -156,6 +156,7 @@ class AlertConfig(pydantic.v1.BaseModel):
156
156
  notifications: pydantic.v1.conlist(AlertNotification, min_items=1)
157
157
  state: AlertActiveState = AlertActiveState.INACTIVE
158
158
  count: Optional[int] = 0
159
+ updated: datetime = None
159
160
 
160
161
  def get_raw_notifications(self) -> list[notification_objects.Notification]:
161
162
  return [
@@ -203,6 +204,7 @@ class AlertTemplate(
203
204
 
204
205
 
205
206
  class AlertActivation(pydantic.v1.BaseModel):
207
+ id: int
206
208
  name: str
207
209
  project: str
208
210
  severity: AlertSeverity
@@ -213,3 +215,4 @@ class AlertActivation(pydantic.v1.BaseModel):
213
215
  event_kind: EventKind
214
216
  number_of_events: int
215
217
  notifications: list[notification_objects.NotificationState]
218
+ reset_time: Optional[datetime] = None
@@ -25,6 +25,7 @@ from .object import ObjectStatus
25
25
  class ArtifactCategories(mlrun.common.types.StrEnum):
26
26
  model = "model"
27
27
  dataset = "dataset"
28
+ document = "document"
28
29
  other = "other"
29
30
 
30
31
  # we define the link as a category to prevent import cycles, but it's not a real category
@@ -38,11 +39,14 @@ class ArtifactCategories(mlrun.common.types.StrEnum):
38
39
  return [ArtifactCategories.model.value, link_kind], False
39
40
  if self.value == ArtifactCategories.dataset.value:
40
41
  return [ArtifactCategories.dataset.value, link_kind], False
42
+ if self.value == ArtifactCategories.document.value:
43
+ return [ArtifactCategories.document.value, link_kind], False
41
44
  if self.value == ArtifactCategories.other.value:
42
45
  return (
43
46
  [
44
47
  ArtifactCategories.model.value,
45
48
  ArtifactCategories.dataset.value,
49
+ ArtifactCategories.document.value,
46
50
  ],
47
51
  True,
48
52
  )
@@ -57,7 +57,6 @@ class ClientSpec(pydantic.v1.BaseModel):
57
57
  redis_url: typing.Optional[str]
58
58
  redis_type: typing.Optional[str]
59
59
  sql_url: typing.Optional[str]
60
- model_endpoint_monitoring_endpoint_store_connection: typing.Optional[str]
61
60
  model_monitoring_tsdb_connection: typing.Optional[str]
62
61
  ce: typing.Optional[dict]
63
62
  # not passing them as one object as it possible client user would like to override only one of the params
@@ -55,12 +55,6 @@ from .grafana import (
55
55
  GrafanaTable,
56
56
  GrafanaTimeSeriesTarget,
57
57
  )
58
- from .model_endpoint_v2 import (
59
- ModelEndpointV2,
60
- ModelEndpointV2Metadata,
61
- ModelEndpointV2Spec,
62
- ModelEndpointV2Status,
63
- )
64
58
  from .model_endpoints import (
65
59
  Features,
66
60
  FeatureValues,
@@ -41,6 +41,7 @@ class ModelEndpointSchema(MonitoringStrEnum):
41
41
 
42
42
  # spec
43
43
  FUNCTION_NAME = "function_name"
44
+ FUNCTION_TAG = "function_tag"
44
45
  FUNCTION_UID = "function_uid"
45
46
  MODEL_NAME = "model_name"
46
47
  MODEL_TAG = "model_tag"
@@ -48,23 +49,23 @@ class ModelEndpointSchema(MonitoringStrEnum):
48
49
  MODEL_UID = "model_uid"
49
50
  FEATURE_NAMES = "feature_names"
50
51
  LABEL_NAMES = "label_names"
51
-
52
- # status
53
- STATE = "state"
54
- MONITORING_MODE = "monitoring_mode"
52
+ FEATURE_STATS = "feature_stats"
55
53
  MONITORING_FEATURE_SET_URI = "monitoring_feature_set_uri"
56
54
  CHILDREN = "children"
57
55
  CHILDREN_UIDS = "children_uids"
58
- FIRST_REQUEST = "first_request"
59
56
  FUNCTION_URI = "function_uri"
60
57
  MODEL_URI = "model_uri"
61
58
 
59
+ # status
60
+ STATE = "state"
61
+ MONITORING_MODE = "monitoring_mode"
62
+ FIRST_REQUEST = "first_request"
63
+
62
64
  # status - operative
63
65
  LAST_REQUEST = "last_request"
64
- DRIFT_STATUS = "drift_status"
66
+ RESULT_STATUS = "result_status"
65
67
  AVG_LATENCY = "avg_latency"
66
68
  ERROR_COUNT = "error_count"
67
- FEATURE_STATS = "feature_stats"
68
69
  CURRENT_STATS = "current_stats"
69
70
  DRIFT_MEASURES = "drift_measures"
70
71
 
@@ -80,6 +81,7 @@ class EventFieldType:
80
81
  TIMESTAMP = "timestamp"
81
82
  # `endpoint_id` is deprecated as a field in the model endpoint schema since 1.3.1, replaced by `uid`.
82
83
  ENDPOINT_ID = "endpoint_id"
84
+ ENDPOINT_NAME = "endpoint_name"
83
85
  UID = "uid"
84
86
  ENDPOINT_TYPE = "endpoint_type"
85
87
  REQUEST_ID = "request_id"
@@ -148,10 +150,12 @@ class ApplicationEvent:
148
150
  START_INFER_TIME = "start_infer_time"
149
151
  END_INFER_TIME = "end_infer_time"
150
152
  ENDPOINT_ID = "endpoint_id"
153
+ ENDPOINT_NAME = "endpoint_name"
151
154
  OUTPUT_STREAM_URI = "output_stream_uri"
152
155
 
153
156
 
154
157
  class WriterEvent(MonitoringStrEnum):
158
+ ENDPOINT_NAME = "endpoint_name"
155
159
  APPLICATION_NAME = "application_name"
156
160
  ENDPOINT_ID = "endpoint_id"
157
161
  START_INFER_TIME = "start_infer_time"
@@ -222,7 +226,6 @@ class TSDBTarget(MonitoringStrEnum):
222
226
 
223
227
 
224
228
  class ProjectSecretKeys:
225
- ENDPOINT_STORE_CONNECTION = "MODEL_MONITORING_ENDPOINT_STORE_CONNECTION"
226
229
  ACCESS_KEY = "MODEL_MONITORING_ACCESS_KEY"
227
230
  STREAM_PATH = "STREAM_PATH"
228
231
  TSDB_CONNECTION = "TSDB_CONNECTION"
@@ -230,7 +233,6 @@ class ProjectSecretKeys:
230
233
  @classmethod
231
234
  def mandatory_secrets(cls):
232
235
  return [
233
- cls.ENDPOINT_STORE_CONNECTION,
234
236
  cls.STREAM_PATH,
235
237
  cls.TSDB_CONNECTION,
236
238
  ]