mlrun 1.7.2rc4__py3-none-any.whl → 1.8.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mlrun might be problematic. Click here for more details.

Files changed (275) hide show
  1. mlrun/__init__.py +26 -22
  2. mlrun/__main__.py +15 -16
  3. mlrun/alerts/alert.py +150 -15
  4. mlrun/api/schemas/__init__.py +1 -9
  5. mlrun/artifacts/__init__.py +2 -3
  6. mlrun/artifacts/base.py +62 -19
  7. mlrun/artifacts/dataset.py +17 -17
  8. mlrun/artifacts/document.py +454 -0
  9. mlrun/artifacts/manager.py +28 -18
  10. mlrun/artifacts/model.py +91 -59
  11. mlrun/artifacts/plots.py +2 -2
  12. mlrun/common/constants.py +8 -0
  13. mlrun/common/formatters/__init__.py +1 -0
  14. mlrun/common/formatters/artifact.py +1 -1
  15. mlrun/common/formatters/feature_set.py +2 -0
  16. mlrun/common/formatters/function.py +1 -0
  17. mlrun/{model_monitoring/db/stores/v3io_kv/__init__.py → common/formatters/model_endpoint.py} +17 -0
  18. mlrun/common/formatters/pipeline.py +1 -2
  19. mlrun/common/formatters/project.py +9 -0
  20. mlrun/common/model_monitoring/__init__.py +0 -5
  21. mlrun/common/model_monitoring/helpers.py +12 -62
  22. mlrun/common/runtimes/constants.py +25 -4
  23. mlrun/common/schemas/__init__.py +9 -5
  24. mlrun/common/schemas/alert.py +114 -19
  25. mlrun/common/schemas/api_gateway.py +3 -3
  26. mlrun/common/schemas/artifact.py +22 -9
  27. mlrun/common/schemas/auth.py +8 -4
  28. mlrun/common/schemas/background_task.py +7 -7
  29. mlrun/common/schemas/client_spec.py +4 -4
  30. mlrun/common/schemas/clusterization_spec.py +2 -2
  31. mlrun/common/schemas/common.py +53 -3
  32. mlrun/common/schemas/constants.py +15 -0
  33. mlrun/common/schemas/datastore_profile.py +1 -1
  34. mlrun/common/schemas/feature_store.py +9 -9
  35. mlrun/common/schemas/frontend_spec.py +4 -4
  36. mlrun/common/schemas/function.py +10 -10
  37. mlrun/common/schemas/hub.py +1 -1
  38. mlrun/common/schemas/k8s.py +3 -3
  39. mlrun/common/schemas/memory_reports.py +3 -3
  40. mlrun/common/schemas/model_monitoring/__init__.py +4 -8
  41. mlrun/common/schemas/model_monitoring/constants.py +127 -46
  42. mlrun/common/schemas/model_monitoring/grafana.py +18 -12
  43. mlrun/common/schemas/model_monitoring/model_endpoints.py +154 -160
  44. mlrun/common/schemas/notification.py +24 -3
  45. mlrun/common/schemas/object.py +1 -1
  46. mlrun/common/schemas/pagination.py +4 -4
  47. mlrun/common/schemas/partition.py +142 -0
  48. mlrun/common/schemas/pipeline.py +3 -3
  49. mlrun/common/schemas/project.py +26 -18
  50. mlrun/common/schemas/runs.py +3 -3
  51. mlrun/common/schemas/runtime_resource.py +5 -5
  52. mlrun/common/schemas/schedule.py +1 -1
  53. mlrun/common/schemas/secret.py +1 -1
  54. mlrun/{model_monitoring/db/stores/sqldb/__init__.py → common/schemas/serving.py} +10 -1
  55. mlrun/common/schemas/tag.py +3 -3
  56. mlrun/common/schemas/workflow.py +6 -5
  57. mlrun/common/types.py +1 -0
  58. mlrun/config.py +157 -89
  59. mlrun/data_types/__init__.py +5 -3
  60. mlrun/data_types/infer.py +13 -3
  61. mlrun/data_types/spark.py +2 -1
  62. mlrun/datastore/__init__.py +59 -18
  63. mlrun/datastore/alibaba_oss.py +4 -1
  64. mlrun/datastore/azure_blob.py +4 -1
  65. mlrun/datastore/base.py +19 -24
  66. mlrun/datastore/datastore.py +10 -4
  67. mlrun/datastore/datastore_profile.py +178 -45
  68. mlrun/datastore/dbfs_store.py +4 -1
  69. mlrun/datastore/filestore.py +4 -1
  70. mlrun/datastore/google_cloud_storage.py +4 -1
  71. mlrun/datastore/hdfs.py +4 -1
  72. mlrun/datastore/inmem.py +4 -1
  73. mlrun/datastore/redis.py +4 -1
  74. mlrun/datastore/s3.py +14 -3
  75. mlrun/datastore/sources.py +89 -92
  76. mlrun/datastore/store_resources.py +7 -4
  77. mlrun/datastore/storeytargets.py +51 -16
  78. mlrun/datastore/targets.py +38 -31
  79. mlrun/datastore/utils.py +87 -4
  80. mlrun/datastore/v3io.py +4 -1
  81. mlrun/datastore/vectorstore.py +291 -0
  82. mlrun/datastore/wasbfs/fs.py +13 -12
  83. mlrun/db/base.py +286 -100
  84. mlrun/db/httpdb.py +1562 -490
  85. mlrun/db/nopdb.py +250 -83
  86. mlrun/errors.py +6 -2
  87. mlrun/execution.py +194 -50
  88. mlrun/feature_store/__init__.py +2 -10
  89. mlrun/feature_store/api.py +20 -458
  90. mlrun/feature_store/common.py +9 -9
  91. mlrun/feature_store/feature_set.py +20 -18
  92. mlrun/feature_store/feature_vector.py +105 -479
  93. mlrun/feature_store/feature_vector_utils.py +466 -0
  94. mlrun/feature_store/retrieval/base.py +15 -11
  95. mlrun/feature_store/retrieval/job.py +2 -1
  96. mlrun/feature_store/retrieval/storey_merger.py +1 -1
  97. mlrun/feature_store/steps.py +3 -3
  98. mlrun/features.py +30 -13
  99. mlrun/frameworks/__init__.py +1 -2
  100. mlrun/frameworks/_common/__init__.py +1 -2
  101. mlrun/frameworks/_common/artifacts_library.py +2 -2
  102. mlrun/frameworks/_common/mlrun_interface.py +10 -6
  103. mlrun/frameworks/_common/model_handler.py +31 -31
  104. mlrun/frameworks/_common/producer.py +3 -1
  105. mlrun/frameworks/_dl_common/__init__.py +1 -2
  106. mlrun/frameworks/_dl_common/loggers/__init__.py +1 -2
  107. mlrun/frameworks/_dl_common/loggers/mlrun_logger.py +4 -4
  108. mlrun/frameworks/_dl_common/loggers/tensorboard_logger.py +3 -3
  109. mlrun/frameworks/_ml_common/__init__.py +1 -2
  110. mlrun/frameworks/_ml_common/loggers/__init__.py +1 -2
  111. mlrun/frameworks/_ml_common/model_handler.py +21 -21
  112. mlrun/frameworks/_ml_common/plans/__init__.py +1 -2
  113. mlrun/frameworks/_ml_common/plans/confusion_matrix_plan.py +3 -1
  114. mlrun/frameworks/_ml_common/plans/dataset_plan.py +3 -3
  115. mlrun/frameworks/_ml_common/plans/roc_curve_plan.py +4 -4
  116. mlrun/frameworks/auto_mlrun/__init__.py +1 -2
  117. mlrun/frameworks/auto_mlrun/auto_mlrun.py +22 -15
  118. mlrun/frameworks/huggingface/__init__.py +1 -2
  119. mlrun/frameworks/huggingface/model_server.py +9 -9
  120. mlrun/frameworks/lgbm/__init__.py +47 -44
  121. mlrun/frameworks/lgbm/callbacks/__init__.py +1 -2
  122. mlrun/frameworks/lgbm/callbacks/logging_callback.py +4 -2
  123. mlrun/frameworks/lgbm/callbacks/mlrun_logging_callback.py +4 -2
  124. mlrun/frameworks/lgbm/mlrun_interfaces/__init__.py +1 -2
  125. mlrun/frameworks/lgbm/mlrun_interfaces/mlrun_interface.py +5 -5
  126. mlrun/frameworks/lgbm/model_handler.py +15 -11
  127. mlrun/frameworks/lgbm/model_server.py +11 -7
  128. mlrun/frameworks/lgbm/utils.py +2 -2
  129. mlrun/frameworks/onnx/__init__.py +1 -2
  130. mlrun/frameworks/onnx/dataset.py +3 -3
  131. mlrun/frameworks/onnx/mlrun_interface.py +2 -2
  132. mlrun/frameworks/onnx/model_handler.py +7 -5
  133. mlrun/frameworks/onnx/model_server.py +8 -6
  134. mlrun/frameworks/parallel_coordinates.py +11 -11
  135. mlrun/frameworks/pytorch/__init__.py +22 -23
  136. mlrun/frameworks/pytorch/callbacks/__init__.py +1 -2
  137. mlrun/frameworks/pytorch/callbacks/callback.py +2 -1
  138. mlrun/frameworks/pytorch/callbacks/logging_callback.py +15 -8
  139. mlrun/frameworks/pytorch/callbacks/mlrun_logging_callback.py +19 -12
  140. mlrun/frameworks/pytorch/callbacks/tensorboard_logging_callback.py +22 -15
  141. mlrun/frameworks/pytorch/callbacks_handler.py +36 -30
  142. mlrun/frameworks/pytorch/mlrun_interface.py +17 -17
  143. mlrun/frameworks/pytorch/model_handler.py +21 -17
  144. mlrun/frameworks/pytorch/model_server.py +13 -9
  145. mlrun/frameworks/sklearn/__init__.py +19 -18
  146. mlrun/frameworks/sklearn/estimator.py +2 -2
  147. mlrun/frameworks/sklearn/metric.py +3 -3
  148. mlrun/frameworks/sklearn/metrics_library.py +8 -6
  149. mlrun/frameworks/sklearn/mlrun_interface.py +3 -2
  150. mlrun/frameworks/sklearn/model_handler.py +4 -3
  151. mlrun/frameworks/tf_keras/__init__.py +11 -12
  152. mlrun/frameworks/tf_keras/callbacks/__init__.py +1 -2
  153. mlrun/frameworks/tf_keras/callbacks/logging_callback.py +17 -14
  154. mlrun/frameworks/tf_keras/callbacks/mlrun_logging_callback.py +15 -12
  155. mlrun/frameworks/tf_keras/callbacks/tensorboard_logging_callback.py +21 -18
  156. mlrun/frameworks/tf_keras/model_handler.py +17 -13
  157. mlrun/frameworks/tf_keras/model_server.py +12 -8
  158. mlrun/frameworks/xgboost/__init__.py +19 -18
  159. mlrun/frameworks/xgboost/model_handler.py +13 -9
  160. mlrun/k8s_utils.py +2 -5
  161. mlrun/launcher/base.py +3 -4
  162. mlrun/launcher/client.py +2 -2
  163. mlrun/launcher/local.py +6 -2
  164. mlrun/launcher/remote.py +1 -1
  165. mlrun/lists.py +8 -4
  166. mlrun/model.py +132 -46
  167. mlrun/model_monitoring/__init__.py +3 -5
  168. mlrun/model_monitoring/api.py +113 -98
  169. mlrun/model_monitoring/applications/__init__.py +0 -5
  170. mlrun/model_monitoring/applications/_application_steps.py +81 -50
  171. mlrun/model_monitoring/applications/base.py +467 -14
  172. mlrun/model_monitoring/applications/context.py +212 -134
  173. mlrun/model_monitoring/{db/stores/base → applications/evidently}/__init__.py +6 -2
  174. mlrun/model_monitoring/applications/evidently/base.py +146 -0
  175. mlrun/model_monitoring/applications/histogram_data_drift.py +89 -56
  176. mlrun/model_monitoring/applications/results.py +67 -15
  177. mlrun/model_monitoring/controller.py +701 -315
  178. mlrun/model_monitoring/db/__init__.py +0 -2
  179. mlrun/model_monitoring/db/_schedules.py +242 -0
  180. mlrun/model_monitoring/db/_stats.py +189 -0
  181. mlrun/model_monitoring/db/tsdb/__init__.py +33 -22
  182. mlrun/model_monitoring/db/tsdb/base.py +243 -49
  183. mlrun/model_monitoring/db/tsdb/tdengine/schemas.py +76 -36
  184. mlrun/model_monitoring/db/tsdb/tdengine/stream_graph_steps.py +33 -0
  185. mlrun/model_monitoring/db/tsdb/tdengine/tdengine_connection.py +213 -0
  186. mlrun/model_monitoring/db/tsdb/tdengine/tdengine_connector.py +534 -88
  187. mlrun/model_monitoring/db/tsdb/v3io/stream_graph_steps.py +1 -0
  188. mlrun/model_monitoring/db/tsdb/v3io/v3io_connector.py +436 -106
  189. mlrun/model_monitoring/helpers.py +356 -114
  190. mlrun/model_monitoring/stream_processing.py +190 -345
  191. mlrun/model_monitoring/tracking_policy.py +11 -4
  192. mlrun/model_monitoring/writer.py +49 -90
  193. mlrun/package/__init__.py +3 -6
  194. mlrun/package/context_handler.py +2 -2
  195. mlrun/package/packager.py +12 -9
  196. mlrun/package/packagers/__init__.py +0 -2
  197. mlrun/package/packagers/default_packager.py +14 -11
  198. mlrun/package/packagers/numpy_packagers.py +16 -7
  199. mlrun/package/packagers/pandas_packagers.py +18 -18
  200. mlrun/package/packagers/python_standard_library_packagers.py +25 -11
  201. mlrun/package/packagers_manager.py +35 -32
  202. mlrun/package/utils/__init__.py +0 -3
  203. mlrun/package/utils/_pickler.py +6 -6
  204. mlrun/platforms/__init__.py +47 -16
  205. mlrun/platforms/iguazio.py +4 -1
  206. mlrun/projects/operations.py +30 -30
  207. mlrun/projects/pipelines.py +116 -47
  208. mlrun/projects/project.py +1292 -329
  209. mlrun/render.py +5 -9
  210. mlrun/run.py +57 -14
  211. mlrun/runtimes/__init__.py +1 -3
  212. mlrun/runtimes/base.py +30 -22
  213. mlrun/runtimes/daskjob.py +9 -9
  214. mlrun/runtimes/databricks_job/databricks_runtime.py +6 -5
  215. mlrun/runtimes/function_reference.py +5 -2
  216. mlrun/runtimes/generators.py +3 -2
  217. mlrun/runtimes/kubejob.py +6 -7
  218. mlrun/runtimes/mounts.py +574 -0
  219. mlrun/runtimes/mpijob/__init__.py +0 -2
  220. mlrun/runtimes/mpijob/abstract.py +7 -6
  221. mlrun/runtimes/nuclio/api_gateway.py +7 -7
  222. mlrun/runtimes/nuclio/application/application.py +11 -13
  223. mlrun/runtimes/nuclio/application/reverse_proxy.go +66 -64
  224. mlrun/runtimes/nuclio/function.py +127 -70
  225. mlrun/runtimes/nuclio/serving.py +105 -37
  226. mlrun/runtimes/pod.py +159 -54
  227. mlrun/runtimes/remotesparkjob.py +3 -2
  228. mlrun/runtimes/sparkjob/__init__.py +0 -2
  229. mlrun/runtimes/sparkjob/spark3job.py +22 -12
  230. mlrun/runtimes/utils.py +7 -6
  231. mlrun/secrets.py +2 -2
  232. mlrun/serving/__init__.py +8 -0
  233. mlrun/serving/merger.py +7 -5
  234. mlrun/serving/remote.py +35 -22
  235. mlrun/serving/routers.py +186 -240
  236. mlrun/serving/server.py +41 -10
  237. mlrun/serving/states.py +432 -118
  238. mlrun/serving/utils.py +13 -2
  239. mlrun/serving/v1_serving.py +3 -2
  240. mlrun/serving/v2_serving.py +161 -203
  241. mlrun/track/__init__.py +1 -1
  242. mlrun/track/tracker.py +2 -2
  243. mlrun/track/trackers/mlflow_tracker.py +6 -5
  244. mlrun/utils/async_http.py +35 -22
  245. mlrun/utils/clones.py +7 -4
  246. mlrun/utils/helpers.py +511 -58
  247. mlrun/utils/logger.py +119 -13
  248. mlrun/utils/notifications/notification/__init__.py +22 -19
  249. mlrun/utils/notifications/notification/base.py +39 -15
  250. mlrun/utils/notifications/notification/console.py +6 -6
  251. mlrun/utils/notifications/notification/git.py +11 -11
  252. mlrun/utils/notifications/notification/ipython.py +10 -9
  253. mlrun/utils/notifications/notification/mail.py +176 -0
  254. mlrun/utils/notifications/notification/slack.py +16 -8
  255. mlrun/utils/notifications/notification/webhook.py +24 -8
  256. mlrun/utils/notifications/notification_pusher.py +191 -200
  257. mlrun/utils/regex.py +12 -2
  258. mlrun/utils/version/version.json +2 -2
  259. {mlrun-1.7.2rc4.dist-info → mlrun-1.8.0.dist-info}/METADATA +69 -54
  260. mlrun-1.8.0.dist-info/RECORD +351 -0
  261. {mlrun-1.7.2rc4.dist-info → mlrun-1.8.0.dist-info}/WHEEL +1 -1
  262. mlrun/model_monitoring/applications/evidently_base.py +0 -137
  263. mlrun/model_monitoring/db/stores/__init__.py +0 -136
  264. mlrun/model_monitoring/db/stores/base/store.py +0 -213
  265. mlrun/model_monitoring/db/stores/sqldb/models/__init__.py +0 -71
  266. mlrun/model_monitoring/db/stores/sqldb/models/base.py +0 -190
  267. mlrun/model_monitoring/db/stores/sqldb/models/mysql.py +0 -103
  268. mlrun/model_monitoring/db/stores/sqldb/models/sqlite.py +0 -40
  269. mlrun/model_monitoring/db/stores/sqldb/sql_store.py +0 -659
  270. mlrun/model_monitoring/db/stores/v3io_kv/kv_store.py +0 -726
  271. mlrun/model_monitoring/model_endpoint.py +0 -118
  272. mlrun-1.7.2rc4.dist-info/RECORD +0 -351
  273. {mlrun-1.7.2rc4.dist-info → mlrun-1.8.0.dist-info}/entry_points.txt +0 -0
  274. {mlrun-1.7.2rc4.dist-info → mlrun-1.8.0.dist-info/licenses}/LICENSE +0 -0
  275. {mlrun-1.7.2rc4.dist-info → mlrun-1.8.0.dist-info}/top_level.txt +0 -0
@@ -12,12 +12,14 @@
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
14
  #
15
+ from collections import defaultdict
16
+ from collections.abc import Iterator
15
17
  from datetime import datetime
16
- from typing import Annotated, Optional, Union
18
+ from typing import Annotated, Any, Callable, Optional, Union
17
19
 
18
- import pydantic
20
+ import pydantic.v1
19
21
 
20
- from mlrun.common.schemas.notification import Notification
22
+ import mlrun.common.schemas.notification as notification_objects
21
23
  from mlrun.common.types import StrEnum
22
24
 
23
25
 
@@ -27,10 +29,10 @@ class EventEntityKind(StrEnum):
27
29
  JOB = "job"
28
30
 
29
31
 
30
- class EventEntities(pydantic.BaseModel):
32
+ class EventEntities(pydantic.v1.BaseModel):
31
33
  kind: EventEntityKind
32
34
  project: str
33
- ids: pydantic.conlist(str, min_items=1, max_items=1)
35
+ ids: pydantic.v1.conlist(str, min_items=1, max_items=1)
34
36
 
35
37
 
36
38
  class EventKind(StrEnum):
@@ -64,11 +66,11 @@ _event_kind_entity_map = {
64
66
  }
65
67
 
66
68
 
67
- class Event(pydantic.BaseModel):
69
+ class Event(pydantic.v1.BaseModel):
68
70
  kind: EventKind
69
71
  timestamp: Union[str, datetime] = None # occurrence time
70
72
  entity: EventEntities
71
- value_dict: Optional[dict] = pydantic.Field(default_factory=dict)
73
+ value_dict: Optional[dict] = pydantic.v1.Field(default_factory=dict)
72
74
 
73
75
  def is_valid(self):
74
76
  return self.entity.kind in _event_kind_entity_map[self.kind]
@@ -86,7 +88,7 @@ class AlertSeverity(StrEnum):
86
88
 
87
89
 
88
90
  # what should trigger the alert. must be either event (at least 1), or prometheus query
89
- class AlertTrigger(pydantic.BaseModel):
91
+ class AlertTrigger(pydantic.v1.BaseModel):
90
92
  events: list[EventKind] = []
91
93
  prometheus_alert: str = None
92
94
 
@@ -97,16 +99,16 @@ class AlertTrigger(pydantic.BaseModel):
97
99
  )
98
100
 
99
101
 
100
- class AlertCriteria(pydantic.BaseModel):
102
+ class AlertCriteria(pydantic.v1.BaseModel):
101
103
  count: Annotated[
102
104
  int,
103
- pydantic.Field(
105
+ pydantic.v1.Field(
104
106
  description="Number of events to wait until notification is sent"
105
107
  ),
106
108
  ] = 1
107
109
  period: Annotated[
108
110
  str,
109
- pydantic.Field(
111
+ pydantic.v1.Field(
110
112
  description="Time period during which event occurred. e.g. 1d, 3h, 5m, 15s"
111
113
  ),
112
114
  ] = None
@@ -120,11 +122,11 @@ class ResetPolicy(StrEnum):
120
122
  AUTO = "auto"
121
123
 
122
124
 
123
- class AlertNotification(pydantic.BaseModel):
124
- notification: Notification
125
+ class AlertNotification(pydantic.v1.BaseModel):
126
+ notification: notification_objects.Notification
125
127
  cooldown_period: Annotated[
126
128
  str,
127
- pydantic.Field(
129
+ pydantic.v1.Field(
128
130
  description="Period during which notifications "
129
131
  "will not be sent after initial send. The format of this would be in time."
130
132
  " e.g. 1d, 3h, 5m, 15s"
@@ -132,14 +134,14 @@ class AlertNotification(pydantic.BaseModel):
132
134
  ] = None
133
135
 
134
136
 
135
- class AlertConfig(pydantic.BaseModel):
137
+ class AlertConfig(pydantic.v1.BaseModel):
136
138
  project: str
137
139
  id: int = None
138
140
  name: str
139
141
  description: Optional[str] = ""
140
142
  summary: Annotated[
141
143
  str,
142
- pydantic.Field(
144
+ pydantic.v1.Field(
143
145
  description=(
144
146
  "String to be sent in the notifications generated."
145
147
  "e.g. 'Model {{project}}/{{entity}} is drifting.'"
@@ -153,11 +155,15 @@ class AlertConfig(pydantic.BaseModel):
153
155
  trigger: AlertTrigger
154
156
  criteria: Optional[AlertCriteria]
155
157
  reset_policy: ResetPolicy = ResetPolicy.AUTO
156
- notifications: pydantic.conlist(AlertNotification, min_items=1)
158
+ notifications: pydantic.v1.conlist(AlertNotification, min_items=1)
157
159
  state: AlertActiveState = AlertActiveState.INACTIVE
158
160
  count: Optional[int] = 0
161
+ updated: datetime = None
159
162
 
160
- def get_raw_notifications(self) -> list[Notification]:
163
+ class Config:
164
+ extra = pydantic.v1.Extra.allow
165
+
166
+ def get_raw_notifications(self) -> list[notification_objects.Notification]:
161
167
  return [
162
168
  alert_notification.notification for alert_notification in self.notifications
163
169
  ]
@@ -169,7 +175,7 @@ class AlertsModes(StrEnum):
169
175
 
170
176
 
171
177
  class AlertTemplate(
172
- pydantic.BaseModel
178
+ pydantic.v1.BaseModel
173
179
  ): # Template fields that are not shared with created configs
174
180
  template_id: int = None
175
181
  template_name: str
@@ -200,3 +206,92 @@ class AlertTemplate(
200
206
  or self.reset_policy != other.reset_policy
201
207
  or self.criteria != other.criteria
202
208
  )
209
+
210
+
211
+ class AlertActivation(pydantic.v1.BaseModel):
212
+ id: int
213
+ name: str
214
+ project: str
215
+ severity: AlertSeverity
216
+ activation_time: datetime
217
+ entity_id: str
218
+ entity_kind: EventEntityKind
219
+ criteria: AlertCriteria
220
+ event_kind: EventKind
221
+ number_of_events: int
222
+ notifications: list[notification_objects.NotificationState]
223
+ reset_time: Optional[datetime] = None
224
+
225
+ def group_key(self, attributes: list[str]) -> Union[Any, tuple]:
226
+ """
227
+ Dynamically create a key for grouping based on the provided attributes.
228
+ - If there's only one attribute, return the value directly (not a single-element tuple).
229
+ - If there are multiple attributes, return them as a tuple for grouping.
230
+
231
+ This ensures grouping behaves intuitively without redundant tuple representations.
232
+ """
233
+ if len(attributes) == 1:
234
+ # Avoid single-element tuple like (high,) when only one grouping attribute is used
235
+ return getattr(self, attributes[0])
236
+ # Otherwise, return a tuple of all specified attributes
237
+ return tuple(getattr(self, attr) for attr in attributes)
238
+
239
+
240
+ class AlertActivations(pydantic.v1.BaseModel):
241
+ activations: list[AlertActivation]
242
+ pagination: Optional[dict]
243
+
244
+ def __iter__(self) -> Iterator[AlertActivation]:
245
+ return iter(self.activations)
246
+
247
+ def __getitem__(self, index: int) -> AlertActivation:
248
+ return self.activations[index]
249
+
250
+ def __len__(self) -> int:
251
+ return len(self.activations)
252
+
253
+ def group_by(self, *attributes: str) -> dict:
254
+ """
255
+ Group alert activations by specified attributes.
256
+
257
+ Args:
258
+ :param attributes: Attributes to group by.
259
+
260
+ :returns: A dictionary where keys are tuples of attribute values and values are lists of
261
+ AlertActivation objects.
262
+
263
+ Example:
264
+ # Group by project and severity
265
+ grouped = activations.group_by("project", "severity")
266
+ """
267
+ grouped = defaultdict(list)
268
+ for activation in self.activations:
269
+ key = activation.group_key(attributes)
270
+ grouped[key].append(activation)
271
+ return dict(grouped)
272
+
273
+ def aggregate_by(
274
+ self,
275
+ group_by_attrs: list[str],
276
+ aggregation_function: Callable[[list[AlertActivation]], Any],
277
+ ) -> dict:
278
+ """
279
+ Aggregate alert activations by specified attributes using a given aggregation function.
280
+
281
+ Args:
282
+ :param group_by_attrs: Attributes to group by.
283
+ :param aggregation_function: Function to aggregate grouped activations.
284
+
285
+ :returns: A dictionary where keys are tuples of attribute values and values are the result
286
+ of the aggregation function.
287
+
288
+ Example:
289
+ # Aggregate by name and entity_id and count number of activations in each group
290
+ activations.aggregate_by(["name", "entity_id"], lambda activations: len(activations))
291
+ """
292
+ grouped = self.group_by(*group_by_attrs)
293
+ aggregated = {
294
+ key: aggregation_function(activations)
295
+ for key, activations in grouped.items()
296
+ }
297
+ return aggregated
@@ -15,7 +15,7 @@
15
15
  import typing
16
16
  from typing import Optional
17
17
 
18
- import pydantic
18
+ import pydantic.v1
19
19
 
20
20
  import mlrun.common.constants as mlrun_constants
21
21
  import mlrun.common.types
@@ -49,9 +49,9 @@ class APIGatewayState(mlrun.common.types.StrEnum):
49
49
  waiting_for_provisioning = "waitingForProvisioning"
50
50
 
51
51
 
52
- class _APIGatewayBaseModel(pydantic.BaseModel):
52
+ class _APIGatewayBaseModel(pydantic.v1.BaseModel):
53
53
  class Config:
54
- extra = pydantic.Extra.allow
54
+ extra = pydantic.v1.Extra.allow
55
55
 
56
56
 
57
57
  class APIGatewayMetadata(_APIGatewayBaseModel):
@@ -14,7 +14,7 @@
14
14
  #
15
15
  import typing
16
16
 
17
- import pydantic
17
+ import pydantic.v1
18
18
  from deprecated import deprecated
19
19
 
20
20
  import mlrun.common.types
@@ -25,6 +25,7 @@ from .object import ObjectStatus
25
25
  class ArtifactCategories(mlrun.common.types.StrEnum):
26
26
  model = "model"
27
27
  dataset = "dataset"
28
+ document = "document"
28
29
  other = "other"
29
30
 
30
31
  # we define the link as a category to prevent import cycles, but it's not a real category
@@ -38,23 +39,35 @@ class ArtifactCategories(mlrun.common.types.StrEnum):
38
39
  return [ArtifactCategories.model.value, link_kind], False
39
40
  if self.value == ArtifactCategories.dataset.value:
40
41
  return [ArtifactCategories.dataset.value, link_kind], False
42
+ if self.value == ArtifactCategories.document.value:
43
+ return [ArtifactCategories.document.value, link_kind], False
41
44
  if self.value == ArtifactCategories.other.value:
42
45
  return (
43
46
  [
44
47
  ArtifactCategories.model.value,
45
48
  ArtifactCategories.dataset.value,
49
+ ArtifactCategories.document.value,
46
50
  ],
47
51
  True,
48
52
  )
49
53
 
50
54
  @classmethod
51
55
  def from_kind(cls, kind: str) -> "ArtifactCategories":
52
- if kind in [cls.model.value, cls.dataset.value]:
56
+ if kind in [cls.model.value, cls.dataset.value, cls.document.value]:
53
57
  return cls(kind)
54
58
  return cls.other
55
59
 
60
+ @staticmethod
61
+ def all():
62
+ """Return all applicable artifact categories"""
63
+ return [
64
+ ArtifactCategories.model,
65
+ ArtifactCategories.dataset,
66
+ ArtifactCategories.document,
67
+ ]
56
68
 
57
- class ArtifactIdentifier(pydantic.BaseModel):
69
+
70
+ class ArtifactIdentifier(pydantic.v1.BaseModel):
58
71
  # artifact kind
59
72
  kind: typing.Optional[str]
60
73
  key: typing.Optional[str]
@@ -67,7 +80,7 @@ class ArtifactIdentifier(pydantic.BaseModel):
67
80
 
68
81
  @deprecated(
69
82
  version="1.7.0",
70
- reason="mlrun.common.schemas.ArtifactsFormat is deprecated and will be removed in 1.9.0. "
83
+ reason="mlrun.common.schemas.ArtifactsFormat is deprecated and will be removed in 1.10.0. "
71
84
  "Use mlrun.common.formatters.ArtifactFormat instead.",
72
85
  category=FutureWarning,
73
86
  )
@@ -75,7 +88,7 @@ class ArtifactsFormat(mlrun.common.types.StrEnum):
75
88
  full = "full"
76
89
 
77
90
 
78
- class ArtifactMetadata(pydantic.BaseModel):
91
+ class ArtifactMetadata(pydantic.v1.BaseModel):
79
92
  key: str
80
93
  project: str
81
94
  iter: typing.Optional[int]
@@ -83,10 +96,10 @@ class ArtifactMetadata(pydantic.BaseModel):
83
96
  tag: typing.Optional[str]
84
97
 
85
98
  class Config:
86
- extra = pydantic.Extra.allow
99
+ extra = pydantic.v1.Extra.allow
87
100
 
88
101
 
89
- class ArtifactSpec(pydantic.BaseModel):
102
+ class ArtifactSpec(pydantic.v1.BaseModel):
90
103
  src_path: typing.Optional[str]
91
104
  target_path: typing.Optional[str]
92
105
  viewer: typing.Optional[str]
@@ -97,10 +110,10 @@ class ArtifactSpec(pydantic.BaseModel):
97
110
  unpackaging_instructions: typing.Optional[dict[str, typing.Any]]
98
111
 
99
112
  class Config:
100
- extra = pydantic.Extra.allow
113
+ extra = pydantic.v1.Extra.allow
101
114
 
102
115
 
103
- class Artifact(pydantic.BaseModel):
116
+ class Artifact(pydantic.v1.BaseModel):
104
117
  kind: str
105
118
  metadata: ArtifactMetadata
106
119
  spec: ArtifactSpec
@@ -14,7 +14,7 @@
14
14
  #
15
15
  import typing
16
16
 
17
- import pydantic
17
+ import pydantic.v1
18
18
  from nuclio.auth import AuthInfo as NuclioAuthInfo
19
19
  from nuclio.auth import AuthKinds as NuclioAuthKinds
20
20
 
@@ -59,6 +59,7 @@ class AuthorizationResourceTypes(mlrun.common.types.StrEnum):
59
59
  hub_source = "hub-source"
60
60
  workflow = "workflow"
61
61
  alert = "alert"
62
+ alert_activations = "alert-activations"
62
63
  alert_templates = "alert-templates"
63
64
  event = "event"
64
65
  datastore_profile = "datastore-profile"
@@ -90,6 +91,7 @@ class AuthorizationResourceTypes(mlrun.common.types.StrEnum):
90
91
  AuthorizationResourceTypes.run: "/projects/{project_name}/runs/{resource_name}",
91
92
  AuthorizationResourceTypes.event: "/projects/{project_name}/events/{resource_name}",
92
93
  AuthorizationResourceTypes.alert: "/projects/{project_name}/alerts/{resource_name}",
94
+ AuthorizationResourceTypes.alert_activations: "/projects/{project_name}/alerts/{resource_name}/activations",
93
95
  AuthorizationResourceTypes.alert_templates: "/alert-templates/{resource_name}",
94
96
  # runtime resource doesn't have an identifier, we don't need any auth granularity behind project level
95
97
  AuthorizationResourceTypes.runtime_resource: "/projects/{project_name}/runtime-resources",
@@ -106,12 +108,14 @@ class AuthorizationResourceTypes(mlrun.common.types.StrEnum):
106
108
  }[self].format(project_name=project_name, resource_name=resource_name)
107
109
 
108
110
 
109
- class AuthorizationVerificationInput(pydantic.BaseModel):
111
+ class AuthorizationVerificationInput(pydantic.v1.BaseModel):
110
112
  resource: str
111
113
  action: AuthorizationAction
112
114
 
113
115
 
114
- class AuthInfo(pydantic.BaseModel):
116
+ class AuthInfo(pydantic.v1.BaseModel):
117
+ # Keep request headers for inter-service communication
118
+ request_headers: typing.Optional[dict[str, str]] = None
115
119
  # Basic + Iguazio auth
116
120
  username: typing.Optional[str] = None
117
121
  # Basic auth
@@ -145,5 +149,5 @@ class AuthInfo(pydantic.BaseModel):
145
149
  return self.data_session or self.session
146
150
 
147
151
 
148
- class Credentials(pydantic.BaseModel):
152
+ class Credentials(pydantic.v1.BaseModel):
149
153
  access_key: typing.Optional[str]
@@ -15,7 +15,7 @@
15
15
  import datetime
16
16
  import typing
17
17
 
18
- import pydantic
18
+ import pydantic.v1
19
19
 
20
20
  import mlrun.common.types
21
21
 
@@ -35,7 +35,7 @@ class BackgroundTaskState(mlrun.common.types.StrEnum):
35
35
  ]
36
36
 
37
37
 
38
- class BackgroundTaskMetadata(pydantic.BaseModel):
38
+ class BackgroundTaskMetadata(pydantic.v1.BaseModel):
39
39
  name: str
40
40
  kind: typing.Optional[str]
41
41
  project: typing.Optional[str]
@@ -44,21 +44,21 @@ class BackgroundTaskMetadata(pydantic.BaseModel):
44
44
  timeout: typing.Optional[int]
45
45
 
46
46
 
47
- class BackgroundTaskSpec(pydantic.BaseModel):
47
+ class BackgroundTaskSpec(pydantic.v1.BaseModel):
48
48
  pass
49
49
 
50
50
 
51
- class BackgroundTaskStatus(pydantic.BaseModel):
51
+ class BackgroundTaskStatus(pydantic.v1.BaseModel):
52
52
  state: BackgroundTaskState
53
53
  error: typing.Optional[str]
54
54
 
55
55
 
56
- class BackgroundTask(pydantic.BaseModel):
57
- kind: ObjectKind = pydantic.Field(ObjectKind.background_task, const=True)
56
+ class BackgroundTask(pydantic.v1.BaseModel):
57
+ kind: ObjectKind = pydantic.v1.Field(ObjectKind.background_task, const=True)
58
58
  metadata: BackgroundTaskMetadata
59
59
  spec: BackgroundTaskSpec
60
60
  status: BackgroundTaskStatus
61
61
 
62
62
 
63
- class BackgroundTaskList(pydantic.BaseModel):
63
+ class BackgroundTaskList(pydantic.v1.BaseModel):
64
64
  background_tasks: list[BackgroundTask]
@@ -14,13 +14,13 @@
14
14
  #
15
15
  import typing
16
16
 
17
- import pydantic
17
+ import pydantic.v1
18
18
 
19
19
  from .function import Function
20
20
  from .k8s import Resources
21
21
 
22
22
 
23
- class ClientSpec(pydantic.BaseModel):
23
+ class ClientSpec(pydantic.v1.BaseModel):
24
24
  version: typing.Optional[str]
25
25
  namespace: typing.Optional[str]
26
26
  docker_registry: typing.Optional[str]
@@ -57,8 +57,6 @@ class ClientSpec(pydantic.BaseModel):
57
57
  redis_url: typing.Optional[str]
58
58
  redis_type: typing.Optional[str]
59
59
  sql_url: typing.Optional[str]
60
- model_endpoint_monitoring_endpoint_store_connection: typing.Optional[str]
61
- model_monitoring_tsdb_connection: typing.Optional[str]
62
60
  ce: typing.Optional[dict]
63
61
  # not passing them as one object as it possible client user would like to override only one of the params
64
62
  calculate_artifact_hash: typing.Optional[str]
@@ -67,3 +65,5 @@ class ClientSpec(pydantic.BaseModel):
67
65
  packagers: typing.Optional[dict]
68
66
  external_platform_tracking: typing.Optional[dict]
69
67
  alerts_mode: typing.Optional[str]
68
+ system_id: typing.Optional[str]
69
+ model_endpoint_monitoring_store_prefixes: typing.Optional[dict[str, str]]
@@ -14,12 +14,12 @@
14
14
  #
15
15
  import typing
16
16
 
17
- import pydantic
17
+ import pydantic.v1
18
18
 
19
19
  import mlrun.common.types
20
20
 
21
21
 
22
- class ClusterizationSpec(pydantic.BaseModel):
22
+ class ClusterizationSpec(pydantic.v1.BaseModel):
23
23
  chief_api_state: typing.Optional[str]
24
24
  chief_version: typing.Optional[str]
25
25
 
@@ -14,10 +14,12 @@
14
14
 
15
15
  import typing
16
16
 
17
- import pydantic
17
+ import pydantic.v1
18
18
 
19
+ import mlrun.errors
19
20
 
20
- class ImageBuilder(pydantic.BaseModel):
21
+
22
+ class ImageBuilder(pydantic.v1.BaseModel):
21
23
  functionSourceCode: typing.Optional[str] = None # noqa: N815
22
24
  codeEntryType: typing.Optional[str] = None # noqa: N815
23
25
  codeEntryAttributes: typing.Optional[str] = None # noqa: N815
@@ -40,4 +42,52 @@ class ImageBuilder(pydantic.BaseModel):
40
42
  source_code_target_dir: typing.Optional[str] = None
41
43
 
42
44
  class Config:
43
- extra = pydantic.Extra.allow
45
+ extra = pydantic.v1.Extra.allow
46
+
47
+
48
+ class LabelsModel(pydantic.v1.BaseModel):
49
+ """
50
+ This class accepts either a dictionary, a list, or a string for filtering by labels.
51
+
52
+ :param labels:
53
+ - If a dictionary is provided, it should be in the format {'label_name': 'value'}.
54
+ The values can also be `None`, which will result in the format 'label_name' (without a value).
55
+ This will be converted to a list of strings in the format 'label_name=value'.
56
+ - If a list is provided, all items must be strings. Each string can either
57
+ be a simple label name (e.g., 'label1') or a key-value pair in the format
58
+ 'label=value'.
59
+ - If a string is provided, it should be a comma-separated list of labels
60
+ (e.g., 'label1,label2').
61
+ - If no labels are specified, the default is an empty list.
62
+ """
63
+
64
+ labels: typing.Optional[
65
+ typing.Union[str, dict[str, typing.Optional[str]], list[str]]
66
+ ]
67
+
68
+ @pydantic.v1.validator("labels")
69
+ @classmethod
70
+ def validate(cls, labels) -> list[str]:
71
+ if labels is None:
72
+ return []
73
+
74
+ # If labels is a string, split it by commas
75
+ if isinstance(labels, str):
76
+ return [label.strip() for label in labels.split(",") if label.strip()]
77
+
78
+ if isinstance(labels, list):
79
+ if not all(isinstance(item, str) for item in labels):
80
+ raise mlrun.errors.MLRunValueError(
81
+ "All items in the list must be strings."
82
+ )
83
+ return labels
84
+
85
+ if isinstance(labels, dict):
86
+ return [
87
+ f"{key}={value}" if value is not None else key
88
+ for key, value in labels.items()
89
+ ]
90
+
91
+ raise mlrun.errors.MLRunValueError(
92
+ "Invalid labels format. Must be a string, dictionary of strings, or a list of strings."
93
+ )
@@ -133,6 +133,21 @@ class RunPartitionByField(mlrun.common.types.StrEnum):
133
133
  )
134
134
 
135
135
 
136
+ class ArtifactPartitionByField(mlrun.common.types.StrEnum):
137
+ name = "name" # Supported for artifacts objects
138
+ project_and_name = "project_and_name" # Supported for artifacts objects
139
+
140
+ def to_partition_by_db_field(self, db_cls):
141
+ if self.value == ArtifactPartitionByField.name:
142
+ return db_cls.key
143
+ elif self.value == ArtifactPartitionByField.project_and_name:
144
+ return db_cls.project, db_cls.key
145
+ else:
146
+ raise mlrun.errors.MLRunInvalidArgumentError(
147
+ f"Unknown group by field: {self.value}"
148
+ )
149
+
150
+
136
151
  class SortField(mlrun.common.types.StrEnum):
137
152
  created = "created"
138
153
  updated = "updated"
@@ -15,7 +15,7 @@
15
15
 
16
16
  import typing
17
17
 
18
- from pydantic import BaseModel
18
+ from pydantic.v1 import BaseModel
19
19
 
20
20
 
21
21
  class DatastoreProfile(BaseModel):
@@ -14,7 +14,7 @@
14
14
  #
15
15
  from typing import Optional
16
16
 
17
- import pydantic
17
+ import pydantic.v1
18
18
 
19
19
  from .auth import AuthorizationResourceTypes, Credentials
20
20
  from .object import (
@@ -27,7 +27,7 @@ from .object import (
27
27
  )
28
28
 
29
29
 
30
- class FeatureStoreBaseModel(pydantic.BaseModel):
30
+ class FeatureStoreBaseModel(pydantic.v1.BaseModel):
31
31
  """
32
32
  Intermediate base class, in order to override pydantic's configuration, as per
33
33
  https://docs.pydantic.dev/1.10/usage/model_config/#change-behaviour-globally
@@ -43,7 +43,7 @@ class Feature(FeatureStoreBaseModel):
43
43
  labels: Optional[dict] = {}
44
44
 
45
45
  class Config:
46
- extra = pydantic.Extra.allow
46
+ extra = pydantic.v1.Extra.allow
47
47
 
48
48
 
49
49
  class Entity(FeatureStoreBaseModel):
@@ -52,17 +52,17 @@ class Entity(FeatureStoreBaseModel):
52
52
  labels: Optional[dict] = {}
53
53
 
54
54
  class Config:
55
- extra = pydantic.Extra.allow
55
+ extra = pydantic.v1.Extra.allow
56
56
 
57
57
 
58
58
  class FeatureSetSpec(ObjectSpec):
59
59
  entities: list[Entity] = []
60
60
  features: list[Feature] = []
61
- engine: Optional[str] = pydantic.Field(default="storey")
61
+ engine: Optional[str] = pydantic.v1.Field(default="storey")
62
62
 
63
63
 
64
64
  class FeatureSet(FeatureStoreBaseModel):
65
- kind: ObjectKind = pydantic.Field(ObjectKind.feature_set, const=True)
65
+ kind: ObjectKind = pydantic.v1.Field(ObjectKind.feature_set, const=True)
66
66
  metadata: ObjectMetadata
67
67
  spec: FeatureSetSpec
68
68
  status: ObjectStatus
@@ -155,7 +155,7 @@ class EntitiesOutput(FeatureStoreBaseModel):
155
155
 
156
156
 
157
157
  class FeatureVector(FeatureStoreBaseModel):
158
- kind: ObjectKind = pydantic.Field(ObjectKind.feature_vector, const=True)
158
+ kind: ObjectKind = pydantic.v1.Field(ObjectKind.feature_vector, const=True)
159
159
  metadata: ObjectMetadata
160
160
  spec: ObjectSpec
161
161
  status: ObjectStatus
@@ -183,7 +183,7 @@ class DataSource(FeatureStoreBaseModel):
183
183
  path: str
184
184
 
185
185
  class Config:
186
- extra = pydantic.Extra.allow
186
+ extra = pydantic.v1.Extra.allow
187
187
 
188
188
 
189
189
  class DataTarget(FeatureStoreBaseModel):
@@ -192,7 +192,7 @@ class DataTarget(FeatureStoreBaseModel):
192
192
  path: Optional[str]
193
193
 
194
194
  class Config:
195
- extra = pydantic.Extra.allow
195
+ extra = pydantic.v1.Extra.allow
196
196
 
197
197
 
198
198
  class FeatureSetIngestInput(FeatureStoreBaseModel):
@@ -14,7 +14,7 @@
14
14
  #
15
15
  import typing
16
16
 
17
- import pydantic
17
+ import pydantic.v1
18
18
 
19
19
  import mlrun.common.types
20
20
 
@@ -43,20 +43,20 @@ class NuclioStreamsFeatureFlag(mlrun.common.types.StrEnum):
43
43
  disabled = "disabled"
44
44
 
45
45
 
46
- class FeatureFlags(pydantic.BaseModel):
46
+ class FeatureFlags(pydantic.v1.BaseModel):
47
47
  project_membership: ProjectMembershipFeatureFlag
48
48
  authentication: AuthenticationFeatureFlag
49
49
  nuclio_streams: NuclioStreamsFeatureFlag
50
50
  preemption_nodes: PreemptionNodesFeatureFlag
51
51
 
52
52
 
53
- class ArtifactLimits(pydantic.BaseModel):
53
+ class ArtifactLimits(pydantic.v1.BaseModel):
54
54
  max_chunk_size: int
55
55
  max_preview_size: int
56
56
  max_download_size: int
57
57
 
58
58
 
59
- class FrontendSpec(pydantic.BaseModel):
59
+ class FrontendSpec(pydantic.v1.BaseModel):
60
60
  jobs_dashboard_url: typing.Optional[str]
61
61
  model_monitoring_dashboard_url: typing.Optional[str]
62
62
  abortable_function_kinds: list[str] = []