mlrun 1.7.1rc4__py3-none-any.whl → 1.8.0rc8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mlrun might be problematic. Click here for more details.

Files changed (257) hide show
  1. mlrun/__init__.py +23 -21
  2. mlrun/__main__.py +3 -3
  3. mlrun/alerts/alert.py +148 -14
  4. mlrun/artifacts/__init__.py +1 -2
  5. mlrun/artifacts/base.py +46 -12
  6. mlrun/artifacts/dataset.py +16 -16
  7. mlrun/artifacts/document.py +334 -0
  8. mlrun/artifacts/manager.py +15 -13
  9. mlrun/artifacts/model.py +66 -53
  10. mlrun/common/constants.py +7 -0
  11. mlrun/common/formatters/__init__.py +1 -0
  12. mlrun/common/formatters/feature_set.py +1 -0
  13. mlrun/common/formatters/function.py +1 -0
  14. mlrun/{model_monitoring/db/stores/base/__init__.py → common/formatters/model_endpoint.py} +16 -1
  15. mlrun/common/formatters/pipeline.py +1 -2
  16. mlrun/common/formatters/project.py +9 -0
  17. mlrun/common/model_monitoring/__init__.py +0 -5
  18. mlrun/common/model_monitoring/helpers.py +1 -29
  19. mlrun/common/runtimes/constants.py +1 -2
  20. mlrun/common/schemas/__init__.py +6 -2
  21. mlrun/common/schemas/alert.py +111 -19
  22. mlrun/common/schemas/api_gateway.py +3 -3
  23. mlrun/common/schemas/artifact.py +11 -7
  24. mlrun/common/schemas/auth.py +6 -4
  25. mlrun/common/schemas/background_task.py +7 -7
  26. mlrun/common/schemas/client_spec.py +2 -3
  27. mlrun/common/schemas/clusterization_spec.py +2 -2
  28. mlrun/common/schemas/common.py +53 -3
  29. mlrun/common/schemas/constants.py +15 -0
  30. mlrun/common/schemas/datastore_profile.py +1 -1
  31. mlrun/common/schemas/feature_store.py +9 -9
  32. mlrun/common/schemas/frontend_spec.py +4 -4
  33. mlrun/common/schemas/function.py +10 -10
  34. mlrun/common/schemas/hub.py +1 -1
  35. mlrun/common/schemas/k8s.py +3 -3
  36. mlrun/common/schemas/memory_reports.py +3 -3
  37. mlrun/common/schemas/model_monitoring/__init__.py +2 -1
  38. mlrun/common/schemas/model_monitoring/constants.py +66 -14
  39. mlrun/common/schemas/model_monitoring/grafana.py +1 -1
  40. mlrun/common/schemas/model_monitoring/model_endpoints.py +91 -147
  41. mlrun/common/schemas/notification.py +24 -3
  42. mlrun/common/schemas/object.py +1 -1
  43. mlrun/common/schemas/pagination.py +4 -4
  44. mlrun/common/schemas/partition.py +137 -0
  45. mlrun/common/schemas/pipeline.py +2 -2
  46. mlrun/common/schemas/project.py +25 -17
  47. mlrun/common/schemas/runs.py +2 -2
  48. mlrun/common/schemas/runtime_resource.py +5 -5
  49. mlrun/common/schemas/schedule.py +1 -1
  50. mlrun/common/schemas/secret.py +1 -1
  51. mlrun/common/schemas/tag.py +3 -3
  52. mlrun/common/schemas/workflow.py +5 -5
  53. mlrun/config.py +67 -10
  54. mlrun/data_types/__init__.py +0 -2
  55. mlrun/data_types/infer.py +3 -1
  56. mlrun/data_types/spark.py +2 -1
  57. mlrun/datastore/__init__.py +0 -2
  58. mlrun/datastore/alibaba_oss.py +4 -1
  59. mlrun/datastore/azure_blob.py +4 -1
  60. mlrun/datastore/base.py +12 -4
  61. mlrun/datastore/datastore.py +9 -3
  62. mlrun/datastore/datastore_profile.py +79 -20
  63. mlrun/datastore/dbfs_store.py +4 -1
  64. mlrun/datastore/filestore.py +4 -1
  65. mlrun/datastore/google_cloud_storage.py +4 -1
  66. mlrun/datastore/hdfs.py +4 -1
  67. mlrun/datastore/inmem.py +4 -1
  68. mlrun/datastore/redis.py +4 -1
  69. mlrun/datastore/s3.py +4 -1
  70. mlrun/datastore/sources.py +52 -51
  71. mlrun/datastore/store_resources.py +0 -2
  72. mlrun/datastore/targets.py +21 -21
  73. mlrun/datastore/utils.py +2 -2
  74. mlrun/datastore/v3io.py +4 -1
  75. mlrun/datastore/vectorstore.py +194 -0
  76. mlrun/datastore/wasbfs/fs.py +13 -12
  77. mlrun/db/base.py +208 -82
  78. mlrun/db/factory.py +0 -3
  79. mlrun/db/httpdb.py +1237 -386
  80. mlrun/db/nopdb.py +201 -74
  81. mlrun/errors.py +2 -2
  82. mlrun/execution.py +136 -50
  83. mlrun/feature_store/__init__.py +0 -2
  84. mlrun/feature_store/api.py +41 -40
  85. mlrun/feature_store/common.py +9 -9
  86. mlrun/feature_store/feature_set.py +20 -18
  87. mlrun/feature_store/feature_vector.py +27 -24
  88. mlrun/feature_store/retrieval/base.py +14 -9
  89. mlrun/feature_store/retrieval/job.py +2 -1
  90. mlrun/feature_store/steps.py +2 -2
  91. mlrun/features.py +30 -13
  92. mlrun/frameworks/__init__.py +1 -2
  93. mlrun/frameworks/_common/__init__.py +1 -2
  94. mlrun/frameworks/_common/artifacts_library.py +2 -2
  95. mlrun/frameworks/_common/mlrun_interface.py +10 -6
  96. mlrun/frameworks/_common/model_handler.py +29 -27
  97. mlrun/frameworks/_common/producer.py +3 -1
  98. mlrun/frameworks/_dl_common/__init__.py +1 -2
  99. mlrun/frameworks/_dl_common/loggers/__init__.py +1 -2
  100. mlrun/frameworks/_dl_common/loggers/mlrun_logger.py +4 -4
  101. mlrun/frameworks/_dl_common/loggers/tensorboard_logger.py +3 -3
  102. mlrun/frameworks/_ml_common/__init__.py +1 -2
  103. mlrun/frameworks/_ml_common/loggers/__init__.py +1 -2
  104. mlrun/frameworks/_ml_common/model_handler.py +21 -21
  105. mlrun/frameworks/_ml_common/plans/__init__.py +1 -2
  106. mlrun/frameworks/_ml_common/plans/confusion_matrix_plan.py +3 -1
  107. mlrun/frameworks/_ml_common/plans/dataset_plan.py +3 -3
  108. mlrun/frameworks/_ml_common/plans/roc_curve_plan.py +4 -4
  109. mlrun/frameworks/auto_mlrun/__init__.py +1 -2
  110. mlrun/frameworks/auto_mlrun/auto_mlrun.py +22 -15
  111. mlrun/frameworks/huggingface/__init__.py +1 -2
  112. mlrun/frameworks/huggingface/model_server.py +9 -9
  113. mlrun/frameworks/lgbm/__init__.py +47 -44
  114. mlrun/frameworks/lgbm/callbacks/__init__.py +1 -2
  115. mlrun/frameworks/lgbm/callbacks/logging_callback.py +4 -2
  116. mlrun/frameworks/lgbm/callbacks/mlrun_logging_callback.py +4 -2
  117. mlrun/frameworks/lgbm/mlrun_interfaces/__init__.py +1 -2
  118. mlrun/frameworks/lgbm/mlrun_interfaces/mlrun_interface.py +5 -5
  119. mlrun/frameworks/lgbm/model_handler.py +15 -11
  120. mlrun/frameworks/lgbm/model_server.py +11 -7
  121. mlrun/frameworks/lgbm/utils.py +2 -2
  122. mlrun/frameworks/onnx/__init__.py +1 -2
  123. mlrun/frameworks/onnx/dataset.py +3 -3
  124. mlrun/frameworks/onnx/mlrun_interface.py +2 -2
  125. mlrun/frameworks/onnx/model_handler.py +7 -5
  126. mlrun/frameworks/onnx/model_server.py +8 -6
  127. mlrun/frameworks/parallel_coordinates.py +11 -11
  128. mlrun/frameworks/pytorch/__init__.py +22 -23
  129. mlrun/frameworks/pytorch/callbacks/__init__.py +1 -2
  130. mlrun/frameworks/pytorch/callbacks/callback.py +2 -1
  131. mlrun/frameworks/pytorch/callbacks/logging_callback.py +15 -8
  132. mlrun/frameworks/pytorch/callbacks/mlrun_logging_callback.py +19 -12
  133. mlrun/frameworks/pytorch/callbacks/tensorboard_logging_callback.py +22 -15
  134. mlrun/frameworks/pytorch/callbacks_handler.py +36 -30
  135. mlrun/frameworks/pytorch/mlrun_interface.py +17 -17
  136. mlrun/frameworks/pytorch/model_handler.py +21 -17
  137. mlrun/frameworks/pytorch/model_server.py +13 -9
  138. mlrun/frameworks/sklearn/__init__.py +19 -18
  139. mlrun/frameworks/sklearn/estimator.py +2 -2
  140. mlrun/frameworks/sklearn/metric.py +3 -3
  141. mlrun/frameworks/sklearn/metrics_library.py +8 -6
  142. mlrun/frameworks/sklearn/mlrun_interface.py +3 -2
  143. mlrun/frameworks/sklearn/model_handler.py +4 -3
  144. mlrun/frameworks/tf_keras/__init__.py +11 -12
  145. mlrun/frameworks/tf_keras/callbacks/__init__.py +1 -2
  146. mlrun/frameworks/tf_keras/callbacks/logging_callback.py +17 -14
  147. mlrun/frameworks/tf_keras/callbacks/mlrun_logging_callback.py +15 -12
  148. mlrun/frameworks/tf_keras/callbacks/tensorboard_logging_callback.py +21 -18
  149. mlrun/frameworks/tf_keras/model_handler.py +17 -13
  150. mlrun/frameworks/tf_keras/model_server.py +12 -8
  151. mlrun/frameworks/xgboost/__init__.py +19 -18
  152. mlrun/frameworks/xgboost/model_handler.py +13 -9
  153. mlrun/launcher/base.py +3 -4
  154. mlrun/launcher/local.py +1 -1
  155. mlrun/launcher/remote.py +1 -1
  156. mlrun/lists.py +4 -3
  157. mlrun/model.py +117 -46
  158. mlrun/model_monitoring/__init__.py +4 -4
  159. mlrun/model_monitoring/api.py +61 -59
  160. mlrun/model_monitoring/applications/_application_steps.py +17 -17
  161. mlrun/model_monitoring/applications/base.py +165 -6
  162. mlrun/model_monitoring/applications/context.py +88 -37
  163. mlrun/model_monitoring/applications/evidently_base.py +1 -2
  164. mlrun/model_monitoring/applications/histogram_data_drift.py +43 -21
  165. mlrun/model_monitoring/applications/results.py +55 -3
  166. mlrun/model_monitoring/controller.py +207 -239
  167. mlrun/model_monitoring/db/__init__.py +0 -2
  168. mlrun/model_monitoring/db/_schedules.py +156 -0
  169. mlrun/model_monitoring/db/_stats.py +189 -0
  170. mlrun/model_monitoring/db/tsdb/base.py +78 -25
  171. mlrun/model_monitoring/db/tsdb/tdengine/schemas.py +90 -16
  172. mlrun/model_monitoring/db/tsdb/tdengine/stream_graph_steps.py +33 -0
  173. mlrun/model_monitoring/db/tsdb/tdengine/tdengine_connector.py +279 -59
  174. mlrun/model_monitoring/db/tsdb/v3io/stream_graph_steps.py +1 -0
  175. mlrun/model_monitoring/db/tsdb/v3io/v3io_connector.py +78 -17
  176. mlrun/model_monitoring/helpers.py +152 -49
  177. mlrun/model_monitoring/stream_processing.py +99 -283
  178. mlrun/model_monitoring/tracking_policy.py +10 -3
  179. mlrun/model_monitoring/writer.py +48 -36
  180. mlrun/package/__init__.py +3 -6
  181. mlrun/package/context_handler.py +1 -1
  182. mlrun/package/packager.py +12 -9
  183. mlrun/package/packagers/__init__.py +0 -2
  184. mlrun/package/packagers/default_packager.py +14 -11
  185. mlrun/package/packagers/numpy_packagers.py +16 -7
  186. mlrun/package/packagers/pandas_packagers.py +18 -18
  187. mlrun/package/packagers/python_standard_library_packagers.py +25 -11
  188. mlrun/package/packagers_manager.py +31 -14
  189. mlrun/package/utils/__init__.py +0 -3
  190. mlrun/package/utils/_pickler.py +6 -6
  191. mlrun/platforms/__init__.py +47 -16
  192. mlrun/platforms/iguazio.py +4 -1
  193. mlrun/projects/operations.py +27 -27
  194. mlrun/projects/pipelines.py +75 -38
  195. mlrun/projects/project.py +865 -206
  196. mlrun/run.py +53 -10
  197. mlrun/runtimes/__init__.py +1 -3
  198. mlrun/runtimes/base.py +15 -11
  199. mlrun/runtimes/daskjob.py +9 -9
  200. mlrun/runtimes/generators.py +2 -1
  201. mlrun/runtimes/kubejob.py +4 -5
  202. mlrun/runtimes/mounts.py +572 -0
  203. mlrun/runtimes/mpijob/__init__.py +0 -2
  204. mlrun/runtimes/mpijob/abstract.py +7 -6
  205. mlrun/runtimes/nuclio/api_gateway.py +7 -7
  206. mlrun/runtimes/nuclio/application/application.py +11 -11
  207. mlrun/runtimes/nuclio/function.py +19 -17
  208. mlrun/runtimes/nuclio/serving.py +18 -11
  209. mlrun/runtimes/pod.py +154 -45
  210. mlrun/runtimes/remotesparkjob.py +3 -2
  211. mlrun/runtimes/sparkjob/__init__.py +0 -2
  212. mlrun/runtimes/sparkjob/spark3job.py +21 -11
  213. mlrun/runtimes/utils.py +6 -5
  214. mlrun/serving/merger.py +6 -4
  215. mlrun/serving/remote.py +18 -17
  216. mlrun/serving/routers.py +185 -172
  217. mlrun/serving/server.py +7 -1
  218. mlrun/serving/states.py +97 -78
  219. mlrun/serving/utils.py +13 -2
  220. mlrun/serving/v1_serving.py +3 -2
  221. mlrun/serving/v2_serving.py +74 -65
  222. mlrun/track/__init__.py +1 -1
  223. mlrun/track/tracker.py +2 -2
  224. mlrun/track/trackers/mlflow_tracker.py +6 -5
  225. mlrun/utils/async_http.py +1 -1
  226. mlrun/utils/clones.py +1 -1
  227. mlrun/utils/helpers.py +66 -18
  228. mlrun/utils/logger.py +106 -4
  229. mlrun/utils/notifications/notification/__init__.py +22 -19
  230. mlrun/utils/notifications/notification/base.py +33 -14
  231. mlrun/utils/notifications/notification/console.py +6 -6
  232. mlrun/utils/notifications/notification/git.py +11 -11
  233. mlrun/utils/notifications/notification/ipython.py +10 -9
  234. mlrun/utils/notifications/notification/mail.py +176 -0
  235. mlrun/utils/notifications/notification/slack.py +6 -6
  236. mlrun/utils/notifications/notification/webhook.py +6 -6
  237. mlrun/utils/notifications/notification_pusher.py +86 -44
  238. mlrun/utils/regex.py +3 -1
  239. mlrun/utils/version/version.json +2 -2
  240. {mlrun-1.7.1rc4.dist-info → mlrun-1.8.0rc8.dist-info}/METADATA +191 -186
  241. mlrun-1.8.0rc8.dist-info/RECORD +347 -0
  242. {mlrun-1.7.1rc4.dist-info → mlrun-1.8.0rc8.dist-info}/WHEEL +1 -1
  243. mlrun/model_monitoring/db/stores/__init__.py +0 -136
  244. mlrun/model_monitoring/db/stores/base/store.py +0 -213
  245. mlrun/model_monitoring/db/stores/sqldb/__init__.py +0 -13
  246. mlrun/model_monitoring/db/stores/sqldb/models/__init__.py +0 -71
  247. mlrun/model_monitoring/db/stores/sqldb/models/base.py +0 -190
  248. mlrun/model_monitoring/db/stores/sqldb/models/mysql.py +0 -103
  249. mlrun/model_monitoring/db/stores/sqldb/models/sqlite.py +0 -40
  250. mlrun/model_monitoring/db/stores/sqldb/sql_store.py +0 -659
  251. mlrun/model_monitoring/db/stores/v3io_kv/__init__.py +0 -13
  252. mlrun/model_monitoring/db/stores/v3io_kv/kv_store.py +0 -726
  253. mlrun/model_monitoring/model_endpoint.py +0 -118
  254. mlrun-1.7.1rc4.dist-info/RECORD +0 -351
  255. {mlrun-1.7.1rc4.dist-info → mlrun-1.8.0rc8.dist-info}/LICENSE +0 -0
  256. {mlrun-1.7.1rc4.dist-info → mlrun-1.8.0rc8.dist-info}/entry_points.txt +0 -0
  257. {mlrun-1.7.1rc4.dist-info → mlrun-1.8.0rc8.dist-info}/top_level.txt +0 -0
@@ -13,6 +13,7 @@
13
13
  # limitations under the License.
14
14
  #
15
15
 
16
+ import datetime
16
17
  import typing
17
18
 
18
19
  import mlrun.common.schemas
@@ -30,11 +31,18 @@ class ProjectFormat(ObjectFormat, mlrun.common.types.StrEnum):
30
31
  # internal - allowed only in follower mode, only for the leader for upgrade purposes
31
32
  leader = "leader"
32
33
 
34
+ name_and_creation_time = "name_and_creation_time"
35
+
33
36
  @staticmethod
34
37
  def format_method(_format: str) -> typing.Optional[typing.Callable]:
35
38
  def _name_only(project: mlrun.common.schemas.Project) -> str:
36
39
  return project.metadata.name
37
40
 
41
+ def _name_and_creation_time(
42
+ project: mlrun.common.schemas.Project,
43
+ ) -> tuple[str, datetime.datetime]:
44
+ return project.metadata.name, project.metadata.created
45
+
38
46
  def _minimal(
39
47
  project: mlrun.common.schemas.Project,
40
48
  ) -> mlrun.common.schemas.Project:
@@ -48,4 +56,5 @@ class ProjectFormat(ObjectFormat, mlrun.common.types.StrEnum):
48
56
  ProjectFormat.name_only: _name_only,
49
57
  ProjectFormat.minimal: _minimal,
50
58
  ProjectFormat.leader: None,
59
+ ProjectFormat.name_and_creation_time: _name_and_creation_time,
51
60
  }[_format]
@@ -11,8 +11,3 @@
11
11
  # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
- #
15
-
16
- # flake8: noqa - this is until we take care of the F401 violations with respect to __all__ & sphinx
17
-
18
- from .helpers import create_model_endpoint_uid
@@ -17,11 +17,6 @@ import typing
17
17
 
18
18
  import mlrun.common
19
19
  import mlrun.common.schemas.model_monitoring.constants as mm_constants
20
- from mlrun.common.schemas.model_monitoring import (
21
- EndpointUID,
22
- FunctionURI,
23
- VersionedModel,
24
- )
25
20
 
26
21
  FeatureStats = typing.NewType("FeatureStats", dict[str, dict[str, typing.Any]])
27
22
  Histogram = typing.NewType("Histogram", list[list])
@@ -31,29 +26,6 @@ BinEdges = typing.NewType("BinEdges", list[float])
31
26
  _MAX_FLOAT = sys.float_info.max
32
27
 
33
28
 
34
- def create_model_endpoint_uid(function_uri: str, versioned_model: str):
35
- function_uri = FunctionURI.from_string(function_uri)
36
- versioned_model = VersionedModel.from_string(versioned_model)
37
-
38
- if (
39
- not function_uri.project
40
- or not function_uri.function
41
- or not versioned_model.model
42
- ):
43
- raise ValueError("Both function_uri and versioned_model have to be initialized")
44
-
45
- uid = EndpointUID(
46
- function_uri.project,
47
- function_uri.function,
48
- function_uri.tag,
49
- function_uri.hash_key,
50
- versioned_model.model,
51
- versioned_model.version,
52
- )
53
-
54
- return uid
55
-
56
-
57
29
  def parse_model_endpoint_project_prefix(path: str, project_name: str):
58
30
  return path.split(project_name, 1)[0] + project_name
59
31
 
@@ -65,7 +37,7 @@ def parse_model_endpoint_store_prefix(store_prefix: str):
65
37
 
66
38
 
67
39
  def parse_monitoring_stream_path(
68
- stream_uri: str, project: str, function_name: str = None
40
+ stream_uri: str, project: str, function_name: typing.Optional[str] = None
69
41
  ):
70
42
  if stream_uri.startswith("kafka://"):
71
43
  if "?topic" in stream_uri:
@@ -15,9 +15,8 @@
15
15
  import enum
16
16
  import typing
17
17
 
18
- import mlrun_pipelines.common.models
19
-
20
18
  import mlrun.common.constants as mlrun_constants
19
+ import mlrun_pipelines.common.models
21
20
 
22
21
 
23
22
  class PodPhases:
@@ -11,10 +11,10 @@
11
11
  # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
- #
15
- # flake8: noqa - this is until we take care of the F401 violations with respect to __all__ & sphinx
16
14
 
17
15
  from .alert import (
16
+ AlertActivation,
17
+ AlertActivations,
18
18
  AlertActiveState,
19
19
  AlertConfig,
20
20
  AlertNotification,
@@ -63,6 +63,7 @@ from .clusterization_spec import (
63
63
  from .common import ImageBuilder
64
64
  from .constants import (
65
65
  APIStates,
66
+ ArtifactPartitionByField,
66
67
  ClusterizationRole,
67
68
  DeletionStrategy,
68
69
  FeatureStorePartitionByField,
@@ -159,11 +160,14 @@ from .notification import (
159
160
  Notification,
160
161
  NotificationKind,
161
162
  NotificationSeverity,
163
+ NotificationState,
162
164
  NotificationStatus,
165
+ NotificationSummary,
163
166
  SetNotificationRequest,
164
167
  )
165
168
  from .object import ObjectKind, ObjectMetadata, ObjectSpec, ObjectStatus
166
169
  from .pagination import PaginationInfo
170
+ from .partition import PartitionInterval
167
171
  from .pipeline import PipelinesOutput, PipelinesPagination
168
172
  from .project import (
169
173
  IguazioProject,
@@ -12,12 +12,14 @@
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
14
  #
15
+ from collections import defaultdict
16
+ from collections.abc import Iterator
15
17
  from datetime import datetime
16
- from typing import Annotated, Optional, Union
18
+ from typing import Annotated, Any, Callable, Optional, Union
17
19
 
18
- import pydantic
20
+ import pydantic.v1
19
21
 
20
- from mlrun.common.schemas.notification import Notification
22
+ import mlrun.common.schemas.notification as notification_objects
21
23
  from mlrun.common.types import StrEnum
22
24
 
23
25
 
@@ -27,10 +29,10 @@ class EventEntityKind(StrEnum):
27
29
  JOB = "job"
28
30
 
29
31
 
30
- class EventEntities(pydantic.BaseModel):
32
+ class EventEntities(pydantic.v1.BaseModel):
31
33
  kind: EventEntityKind
32
34
  project: str
33
- ids: pydantic.conlist(str, min_items=1, max_items=1)
35
+ ids: pydantic.v1.conlist(str, min_items=1, max_items=1)
34
36
 
35
37
 
36
38
  class EventKind(StrEnum):
@@ -64,11 +66,11 @@ _event_kind_entity_map = {
64
66
  }
65
67
 
66
68
 
67
- class Event(pydantic.BaseModel):
69
+ class Event(pydantic.v1.BaseModel):
68
70
  kind: EventKind
69
71
  timestamp: Union[str, datetime] = None # occurrence time
70
72
  entity: EventEntities
71
- value_dict: Optional[dict] = pydantic.Field(default_factory=dict)
73
+ value_dict: Optional[dict] = pydantic.v1.Field(default_factory=dict)
72
74
 
73
75
  def is_valid(self):
74
76
  return self.entity.kind in _event_kind_entity_map[self.kind]
@@ -86,7 +88,7 @@ class AlertSeverity(StrEnum):
86
88
 
87
89
 
88
90
  # what should trigger the alert. must be either event (at least 1), or prometheus query
89
- class AlertTrigger(pydantic.BaseModel):
91
+ class AlertTrigger(pydantic.v1.BaseModel):
90
92
  events: list[EventKind] = []
91
93
  prometheus_alert: str = None
92
94
 
@@ -97,16 +99,16 @@ class AlertTrigger(pydantic.BaseModel):
97
99
  )
98
100
 
99
101
 
100
- class AlertCriteria(pydantic.BaseModel):
102
+ class AlertCriteria(pydantic.v1.BaseModel):
101
103
  count: Annotated[
102
104
  int,
103
- pydantic.Field(
105
+ pydantic.v1.Field(
104
106
  description="Number of events to wait until notification is sent"
105
107
  ),
106
108
  ] = 1
107
109
  period: Annotated[
108
110
  str,
109
- pydantic.Field(
111
+ pydantic.v1.Field(
110
112
  description="Time period during which event occurred. e.g. 1d, 3h, 5m, 15s"
111
113
  ),
112
114
  ] = None
@@ -120,11 +122,11 @@ class ResetPolicy(StrEnum):
120
122
  AUTO = "auto"
121
123
 
122
124
 
123
- class AlertNotification(pydantic.BaseModel):
124
- notification: Notification
125
+ class AlertNotification(pydantic.v1.BaseModel):
126
+ notification: notification_objects.Notification
125
127
  cooldown_period: Annotated[
126
128
  str,
127
- pydantic.Field(
129
+ pydantic.v1.Field(
128
130
  description="Period during which notifications "
129
131
  "will not be sent after initial send. The format of this would be in time."
130
132
  " e.g. 1d, 3h, 5m, 15s"
@@ -132,14 +134,14 @@ class AlertNotification(pydantic.BaseModel):
132
134
  ] = None
133
135
 
134
136
 
135
- class AlertConfig(pydantic.BaseModel):
137
+ class AlertConfig(pydantic.v1.BaseModel):
136
138
  project: str
137
139
  id: int = None
138
140
  name: str
139
141
  description: Optional[str] = ""
140
142
  summary: Annotated[
141
143
  str,
142
- pydantic.Field(
144
+ pydantic.v1.Field(
143
145
  description=(
144
146
  "String to be sent in the notifications generated."
145
147
  "e.g. 'Model {{project}}/{{entity}} is drifting.'"
@@ -153,11 +155,12 @@ class AlertConfig(pydantic.BaseModel):
153
155
  trigger: AlertTrigger
154
156
  criteria: Optional[AlertCriteria]
155
157
  reset_policy: ResetPolicy = ResetPolicy.AUTO
156
- notifications: pydantic.conlist(AlertNotification, min_items=1)
158
+ notifications: pydantic.v1.conlist(AlertNotification, min_items=1)
157
159
  state: AlertActiveState = AlertActiveState.INACTIVE
158
160
  count: Optional[int] = 0
161
+ updated: datetime = None
159
162
 
160
- def get_raw_notifications(self) -> list[Notification]:
163
+ def get_raw_notifications(self) -> list[notification_objects.Notification]:
161
164
  return [
162
165
  alert_notification.notification for alert_notification in self.notifications
163
166
  ]
@@ -169,7 +172,7 @@ class AlertsModes(StrEnum):
169
172
 
170
173
 
171
174
  class AlertTemplate(
172
- pydantic.BaseModel
175
+ pydantic.v1.BaseModel
173
176
  ): # Template fields that are not shared with created configs
174
177
  template_id: int = None
175
178
  template_name: str
@@ -200,3 +203,92 @@ class AlertTemplate(
200
203
  or self.reset_policy != other.reset_policy
201
204
  or self.criteria != other.criteria
202
205
  )
206
+
207
+
208
+ class AlertActivation(pydantic.v1.BaseModel):
209
+ id: int
210
+ name: str
211
+ project: str
212
+ severity: AlertSeverity
213
+ activation_time: datetime
214
+ entity_id: str
215
+ entity_kind: EventEntityKind
216
+ criteria: AlertCriteria
217
+ event_kind: EventKind
218
+ number_of_events: int
219
+ notifications: list[notification_objects.NotificationState]
220
+ reset_time: Optional[datetime] = None
221
+
222
+ def group_key(self, attributes: list[str]) -> Union[Any, tuple]:
223
+ """
224
+ Dynamically create a key for grouping based on the provided attributes.
225
+ - If there's only one attribute, return the value directly (not a single-element tuple).
226
+ - If there are multiple attributes, return them as a tuple for grouping.
227
+
228
+ This ensures grouping behaves intuitively without redundant tuple representations.
229
+ """
230
+ if len(attributes) == 1:
231
+ # Avoid single-element tuple like (high,) when only one grouping attribute is used
232
+ return getattr(self, attributes[0])
233
+ # Otherwise, return a tuple of all specified attributes
234
+ return tuple(getattr(self, attr) for attr in attributes)
235
+
236
+
237
+ class AlertActivations(pydantic.v1.BaseModel):
238
+ activations: list[AlertActivation]
239
+ pagination: Optional[dict]
240
+
241
+ def __iter__(self) -> Iterator[AlertActivation]:
242
+ return iter(self.activations)
243
+
244
+ def __getitem__(self, index: int) -> AlertActivation:
245
+ return self.activations[index]
246
+
247
+ def __len__(self) -> int:
248
+ return len(self.activations)
249
+
250
+ def group_by(self, *attributes: str) -> dict:
251
+ """
252
+ Group alert activations by specified attributes.
253
+
254
+ Args:
255
+ :param attributes: Attributes to group by.
256
+
257
+ :returns: A dictionary where keys are tuples of attribute values and values are lists of
258
+ AlertActivation objects.
259
+
260
+ Example:
261
+ # Group by project and severity
262
+ grouped = activations.group_by("project", "severity")
263
+ """
264
+ grouped = defaultdict(list)
265
+ for activation in self.activations:
266
+ key = activation.group_key(attributes)
267
+ grouped[key].append(activation)
268
+ return dict(grouped)
269
+
270
+ def aggregate_by(
271
+ self,
272
+ group_by_attrs: list[str],
273
+ aggregation_function: Callable[[list[AlertActivation]], Any],
274
+ ) -> dict:
275
+ """
276
+ Aggregate alert activations by specified attributes using a given aggregation function.
277
+
278
+ Args:
279
+ :param group_by_attrs: Attributes to group by.
280
+ :param aggregation_function: Function to aggregate grouped activations.
281
+
282
+ :returns: A dictionary where keys are tuples of attribute values and values are the result
283
+ of the aggregation function.
284
+
285
+ Example:
286
+ # Aggregate by name and entity_id and count number of activations in each group
287
+ activations.aggregate_by(["name", "entity_id"], lambda activations: len(activations))
288
+ """
289
+ grouped = self.group_by(*group_by_attrs)
290
+ aggregated = {
291
+ key: aggregation_function(activations)
292
+ for key, activations in grouped.items()
293
+ }
294
+ return aggregated
@@ -15,7 +15,7 @@
15
15
  import typing
16
16
  from typing import Optional
17
17
 
18
- import pydantic
18
+ import pydantic.v1
19
19
 
20
20
  import mlrun.common.constants as mlrun_constants
21
21
  import mlrun.common.types
@@ -49,9 +49,9 @@ class APIGatewayState(mlrun.common.types.StrEnum):
49
49
  waiting_for_provisioning = "waitingForProvisioning"
50
50
 
51
51
 
52
- class _APIGatewayBaseModel(pydantic.BaseModel):
52
+ class _APIGatewayBaseModel(pydantic.v1.BaseModel):
53
53
  class Config:
54
- extra = pydantic.Extra.allow
54
+ extra = pydantic.v1.Extra.allow
55
55
 
56
56
 
57
57
  class APIGatewayMetadata(_APIGatewayBaseModel):
@@ -14,7 +14,7 @@
14
14
  #
15
15
  import typing
16
16
 
17
- import pydantic
17
+ import pydantic.v1
18
18
  from deprecated import deprecated
19
19
 
20
20
  import mlrun.common.types
@@ -25,6 +25,7 @@ from .object import ObjectStatus
25
25
  class ArtifactCategories(mlrun.common.types.StrEnum):
26
26
  model = "model"
27
27
  dataset = "dataset"
28
+ document = "document"
28
29
  other = "other"
29
30
 
30
31
  # we define the link as a category to prevent import cycles, but it's not a real category
@@ -38,17 +39,20 @@ class ArtifactCategories(mlrun.common.types.StrEnum):
38
39
  return [ArtifactCategories.model.value, link_kind], False
39
40
  if self.value == ArtifactCategories.dataset.value:
40
41
  return [ArtifactCategories.dataset.value, link_kind], False
42
+ if self.value == ArtifactCategories.document.value:
43
+ return [ArtifactCategories.document.value, link_kind], False
41
44
  if self.value == ArtifactCategories.other.value:
42
45
  return (
43
46
  [
44
47
  ArtifactCategories.model.value,
45
48
  ArtifactCategories.dataset.value,
49
+ ArtifactCategories.document.value,
46
50
  ],
47
51
  True,
48
52
  )
49
53
 
50
54
 
51
- class ArtifactIdentifier(pydantic.BaseModel):
55
+ class ArtifactIdentifier(pydantic.v1.BaseModel):
52
56
  # artifact kind
53
57
  kind: typing.Optional[str]
54
58
  key: typing.Optional[str]
@@ -69,7 +73,7 @@ class ArtifactsFormat(mlrun.common.types.StrEnum):
69
73
  full = "full"
70
74
 
71
75
 
72
- class ArtifactMetadata(pydantic.BaseModel):
76
+ class ArtifactMetadata(pydantic.v1.BaseModel):
73
77
  key: str
74
78
  project: str
75
79
  iter: typing.Optional[int]
@@ -77,10 +81,10 @@ class ArtifactMetadata(pydantic.BaseModel):
77
81
  tag: typing.Optional[str]
78
82
 
79
83
  class Config:
80
- extra = pydantic.Extra.allow
84
+ extra = pydantic.v1.Extra.allow
81
85
 
82
86
 
83
- class ArtifactSpec(pydantic.BaseModel):
87
+ class ArtifactSpec(pydantic.v1.BaseModel):
84
88
  src_path: typing.Optional[str]
85
89
  target_path: typing.Optional[str]
86
90
  viewer: typing.Optional[str]
@@ -91,10 +95,10 @@ class ArtifactSpec(pydantic.BaseModel):
91
95
  unpackaging_instructions: typing.Optional[dict[str, typing.Any]]
92
96
 
93
97
  class Config:
94
- extra = pydantic.Extra.allow
98
+ extra = pydantic.v1.Extra.allow
95
99
 
96
100
 
97
- class Artifact(pydantic.BaseModel):
101
+ class Artifact(pydantic.v1.BaseModel):
98
102
  kind: str
99
103
  metadata: ArtifactMetadata
100
104
  spec: ArtifactSpec
@@ -14,7 +14,7 @@
14
14
  #
15
15
  import typing
16
16
 
17
- import pydantic
17
+ import pydantic.v1
18
18
  from nuclio.auth import AuthInfo as NuclioAuthInfo
19
19
  from nuclio.auth import AuthKinds as NuclioAuthKinds
20
20
 
@@ -59,6 +59,7 @@ class AuthorizationResourceTypes(mlrun.common.types.StrEnum):
59
59
  hub_source = "hub-source"
60
60
  workflow = "workflow"
61
61
  alert = "alert"
62
+ alert_activations = "alert-activations"
62
63
  alert_templates = "alert-templates"
63
64
  event = "event"
64
65
  datastore_profile = "datastore-profile"
@@ -90,6 +91,7 @@ class AuthorizationResourceTypes(mlrun.common.types.StrEnum):
90
91
  AuthorizationResourceTypes.run: "/projects/{project_name}/runs/{resource_name}",
91
92
  AuthorizationResourceTypes.event: "/projects/{project_name}/events/{resource_name}",
92
93
  AuthorizationResourceTypes.alert: "/projects/{project_name}/alerts/{resource_name}",
94
+ AuthorizationResourceTypes.alert_activations: "/projects/{project_name}/alerts/{resource_name}/activations",
93
95
  AuthorizationResourceTypes.alert_templates: "/alert-templates/{resource_name}",
94
96
  # runtime resource doesn't have an identifier, we don't need any auth granularity behind project level
95
97
  AuthorizationResourceTypes.runtime_resource: "/projects/{project_name}/runtime-resources",
@@ -106,12 +108,12 @@ class AuthorizationResourceTypes(mlrun.common.types.StrEnum):
106
108
  }[self].format(project_name=project_name, resource_name=resource_name)
107
109
 
108
110
 
109
- class AuthorizationVerificationInput(pydantic.BaseModel):
111
+ class AuthorizationVerificationInput(pydantic.v1.BaseModel):
110
112
  resource: str
111
113
  action: AuthorizationAction
112
114
 
113
115
 
114
- class AuthInfo(pydantic.BaseModel):
116
+ class AuthInfo(pydantic.v1.BaseModel):
115
117
  # Basic + Iguazio auth
116
118
  username: typing.Optional[str] = None
117
119
  # Basic auth
@@ -145,5 +147,5 @@ class AuthInfo(pydantic.BaseModel):
145
147
  return self.data_session or self.session
146
148
 
147
149
 
148
- class Credentials(pydantic.BaseModel):
150
+ class Credentials(pydantic.v1.BaseModel):
149
151
  access_key: typing.Optional[str]
@@ -15,7 +15,7 @@
15
15
  import datetime
16
16
  import typing
17
17
 
18
- import pydantic
18
+ import pydantic.v1
19
19
 
20
20
  import mlrun.common.types
21
21
 
@@ -35,7 +35,7 @@ class BackgroundTaskState(mlrun.common.types.StrEnum):
35
35
  ]
36
36
 
37
37
 
38
- class BackgroundTaskMetadata(pydantic.BaseModel):
38
+ class BackgroundTaskMetadata(pydantic.v1.BaseModel):
39
39
  name: str
40
40
  kind: typing.Optional[str]
41
41
  project: typing.Optional[str]
@@ -44,21 +44,21 @@ class BackgroundTaskMetadata(pydantic.BaseModel):
44
44
  timeout: typing.Optional[int]
45
45
 
46
46
 
47
- class BackgroundTaskSpec(pydantic.BaseModel):
47
+ class BackgroundTaskSpec(pydantic.v1.BaseModel):
48
48
  pass
49
49
 
50
50
 
51
- class BackgroundTaskStatus(pydantic.BaseModel):
51
+ class BackgroundTaskStatus(pydantic.v1.BaseModel):
52
52
  state: BackgroundTaskState
53
53
  error: typing.Optional[str]
54
54
 
55
55
 
56
- class BackgroundTask(pydantic.BaseModel):
57
- kind: ObjectKind = pydantic.Field(ObjectKind.background_task, const=True)
56
+ class BackgroundTask(pydantic.v1.BaseModel):
57
+ kind: ObjectKind = pydantic.v1.Field(ObjectKind.background_task, const=True)
58
58
  metadata: BackgroundTaskMetadata
59
59
  spec: BackgroundTaskSpec
60
60
  status: BackgroundTaskStatus
61
61
 
62
62
 
63
- class BackgroundTaskList(pydantic.BaseModel):
63
+ class BackgroundTaskList(pydantic.v1.BaseModel):
64
64
  background_tasks: list[BackgroundTask]
@@ -14,13 +14,13 @@
14
14
  #
15
15
  import typing
16
16
 
17
- import pydantic
17
+ import pydantic.v1
18
18
 
19
19
  from .function import Function
20
20
  from .k8s import Resources
21
21
 
22
22
 
23
- class ClientSpec(pydantic.BaseModel):
23
+ class ClientSpec(pydantic.v1.BaseModel):
24
24
  version: typing.Optional[str]
25
25
  namespace: typing.Optional[str]
26
26
  docker_registry: typing.Optional[str]
@@ -57,7 +57,6 @@ class ClientSpec(pydantic.BaseModel):
57
57
  redis_url: typing.Optional[str]
58
58
  redis_type: typing.Optional[str]
59
59
  sql_url: typing.Optional[str]
60
- model_endpoint_monitoring_endpoint_store_connection: typing.Optional[str]
61
60
  model_monitoring_tsdb_connection: typing.Optional[str]
62
61
  ce: typing.Optional[dict]
63
62
  # not passing them as one object as it possible client user would like to override only one of the params
@@ -14,12 +14,12 @@
14
14
  #
15
15
  import typing
16
16
 
17
- import pydantic
17
+ import pydantic.v1
18
18
 
19
19
  import mlrun.common.types
20
20
 
21
21
 
22
- class ClusterizationSpec(pydantic.BaseModel):
22
+ class ClusterizationSpec(pydantic.v1.BaseModel):
23
23
  chief_api_state: typing.Optional[str]
24
24
  chief_version: typing.Optional[str]
25
25
 
@@ -14,10 +14,12 @@
14
14
 
15
15
  import typing
16
16
 
17
- import pydantic
17
+ import pydantic.v1
18
18
 
19
+ import mlrun.errors
19
20
 
20
- class ImageBuilder(pydantic.BaseModel):
21
+
22
+ class ImageBuilder(pydantic.v1.BaseModel):
21
23
  functionSourceCode: typing.Optional[str] = None # noqa: N815
22
24
  codeEntryType: typing.Optional[str] = None # noqa: N815
23
25
  codeEntryAttributes: typing.Optional[str] = None # noqa: N815
@@ -40,4 +42,52 @@ class ImageBuilder(pydantic.BaseModel):
40
42
  source_code_target_dir: typing.Optional[str] = None
41
43
 
42
44
  class Config:
43
- extra = pydantic.Extra.allow
45
+ extra = pydantic.v1.Extra.allow
46
+
47
+
48
+ class LabelsModel(pydantic.v1.BaseModel):
49
+ """
50
+ This class accepts either a dictionary, a list, or a string for filtering by labels.
51
+
52
+ :param labels:
53
+ - If a dictionary is provided, it should be in the format {'label_name': 'value'}.
54
+ The values can also be `None`, which will result in the format 'label_name' (without a value).
55
+ This will be converted to a list of strings in the format 'label_name=value'.
56
+ - If a list is provided, all items must be strings. Each string can either
57
+ be a simple label name (e.g., 'label1') or a key-value pair in the format
58
+ 'label=value'.
59
+ - If a string is provided, it should be a comma-separated list of labels
60
+ (e.g., 'label1,label2').
61
+ - If no labels are specified, the default is an empty list.
62
+ """
63
+
64
+ labels: typing.Optional[
65
+ typing.Union[str, dict[str, typing.Optional[str]], list[str]]
66
+ ]
67
+
68
+ @pydantic.v1.validator("labels")
69
+ @classmethod
70
+ def validate(cls, labels) -> list[str]:
71
+ if labels is None:
72
+ return []
73
+
74
+ # If labels is a string, split it by commas
75
+ if isinstance(labels, str):
76
+ return [label.strip() for label in labels.split(",") if label.strip()]
77
+
78
+ if isinstance(labels, list):
79
+ if not all(isinstance(item, str) for item in labels):
80
+ raise mlrun.errors.MLRunValueError(
81
+ "All items in the list must be strings."
82
+ )
83
+ return labels
84
+
85
+ if isinstance(labels, dict):
86
+ return [
87
+ f"{key}={value}" if value is not None else key
88
+ for key, value in labels.items()
89
+ ]
90
+
91
+ raise mlrun.errors.MLRunValueError(
92
+ "Invalid labels format. Must be a string, dictionary of strings, or a list of strings."
93
+ )
@@ -133,6 +133,21 @@ class RunPartitionByField(mlrun.common.types.StrEnum):
133
133
  )
134
134
 
135
135
 
136
+ class ArtifactPartitionByField(mlrun.common.types.StrEnum):
137
+ name = "name" # Supported for artifacts objects
138
+ project_and_name = "project_and_name" # Supported for artifacts objects
139
+
140
+ def to_partition_by_db_field(self, db_cls):
141
+ if self.value == ArtifactPartitionByField.name:
142
+ return db_cls.key
143
+ elif self.value == ArtifactPartitionByField.project_and_name:
144
+ return db_cls.project, db_cls.key
145
+ else:
146
+ raise mlrun.errors.MLRunInvalidArgumentError(
147
+ f"Unknown group by field: {self.value}"
148
+ )
149
+
150
+
136
151
  class SortField(mlrun.common.types.StrEnum):
137
152
  created = "created"
138
153
  updated = "updated"
@@ -15,7 +15,7 @@
15
15
 
16
16
  import typing
17
17
 
18
- from pydantic import BaseModel
18
+ from pydantic.v1 import BaseModel
19
19
 
20
20
 
21
21
  class DatastoreProfile(BaseModel):