mlrun 1.7.0rc4__py3-none-any.whl → 1.7.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mlrun might be problematic. Click here for more details.

Files changed (235) hide show
  1. mlrun/__init__.py +11 -1
  2. mlrun/__main__.py +39 -121
  3. mlrun/{datastore/helpers.py → alerts/__init__.py} +2 -5
  4. mlrun/alerts/alert.py +248 -0
  5. mlrun/api/schemas/__init__.py +4 -3
  6. mlrun/artifacts/__init__.py +8 -3
  7. mlrun/artifacts/base.py +39 -254
  8. mlrun/artifacts/dataset.py +9 -190
  9. mlrun/artifacts/manager.py +73 -46
  10. mlrun/artifacts/model.py +30 -158
  11. mlrun/artifacts/plots.py +23 -380
  12. mlrun/common/constants.py +73 -1
  13. mlrun/common/db/sql_session.py +3 -2
  14. mlrun/common/formatters/__init__.py +21 -0
  15. mlrun/common/formatters/artifact.py +46 -0
  16. mlrun/common/formatters/base.py +113 -0
  17. mlrun/common/formatters/feature_set.py +44 -0
  18. mlrun/common/formatters/function.py +46 -0
  19. mlrun/common/formatters/pipeline.py +53 -0
  20. mlrun/common/formatters/project.py +51 -0
  21. mlrun/common/formatters/run.py +29 -0
  22. mlrun/common/helpers.py +11 -1
  23. mlrun/{runtimes → common/runtimes}/constants.py +32 -4
  24. mlrun/common/schemas/__init__.py +31 -4
  25. mlrun/common/schemas/alert.py +202 -0
  26. mlrun/common/schemas/api_gateway.py +196 -0
  27. mlrun/common/schemas/artifact.py +28 -1
  28. mlrun/common/schemas/auth.py +13 -2
  29. mlrun/common/schemas/client_spec.py +2 -1
  30. mlrun/common/schemas/common.py +7 -4
  31. mlrun/common/schemas/constants.py +3 -0
  32. mlrun/common/schemas/feature_store.py +58 -28
  33. mlrun/common/schemas/frontend_spec.py +8 -0
  34. mlrun/common/schemas/function.py +11 -0
  35. mlrun/common/schemas/hub.py +7 -9
  36. mlrun/common/schemas/model_monitoring/__init__.py +21 -4
  37. mlrun/common/schemas/model_monitoring/constants.py +136 -42
  38. mlrun/common/schemas/model_monitoring/grafana.py +9 -5
  39. mlrun/common/schemas/model_monitoring/model_endpoints.py +89 -41
  40. mlrun/common/schemas/notification.py +69 -12
  41. mlrun/{runtimes/mpijob/v1alpha1.py → common/schemas/pagination.py} +10 -13
  42. mlrun/common/schemas/pipeline.py +7 -0
  43. mlrun/common/schemas/project.py +67 -16
  44. mlrun/common/schemas/runs.py +17 -0
  45. mlrun/common/schemas/schedule.py +1 -1
  46. mlrun/common/schemas/workflow.py +10 -2
  47. mlrun/common/types.py +14 -1
  48. mlrun/config.py +233 -58
  49. mlrun/data_types/data_types.py +11 -1
  50. mlrun/data_types/spark.py +5 -4
  51. mlrun/data_types/to_pandas.py +75 -34
  52. mlrun/datastore/__init__.py +8 -10
  53. mlrun/datastore/alibaba_oss.py +131 -0
  54. mlrun/datastore/azure_blob.py +131 -43
  55. mlrun/datastore/base.py +107 -47
  56. mlrun/datastore/datastore.py +17 -7
  57. mlrun/datastore/datastore_profile.py +91 -7
  58. mlrun/datastore/dbfs_store.py +3 -7
  59. mlrun/datastore/filestore.py +1 -3
  60. mlrun/datastore/google_cloud_storage.py +92 -32
  61. mlrun/datastore/hdfs.py +5 -0
  62. mlrun/datastore/inmem.py +6 -3
  63. mlrun/datastore/redis.py +3 -2
  64. mlrun/datastore/s3.py +30 -12
  65. mlrun/datastore/snowflake_utils.py +45 -0
  66. mlrun/datastore/sources.py +274 -59
  67. mlrun/datastore/spark_utils.py +30 -0
  68. mlrun/datastore/store_resources.py +9 -7
  69. mlrun/datastore/storeytargets.py +151 -0
  70. mlrun/datastore/targets.py +387 -119
  71. mlrun/datastore/utils.py +68 -5
  72. mlrun/datastore/v3io.py +28 -50
  73. mlrun/db/auth_utils.py +152 -0
  74. mlrun/db/base.py +245 -20
  75. mlrun/db/factory.py +1 -4
  76. mlrun/db/httpdb.py +909 -231
  77. mlrun/db/nopdb.py +279 -14
  78. mlrun/errors.py +35 -5
  79. mlrun/execution.py +111 -38
  80. mlrun/feature_store/__init__.py +0 -2
  81. mlrun/feature_store/api.py +46 -53
  82. mlrun/feature_store/common.py +6 -11
  83. mlrun/feature_store/feature_set.py +48 -23
  84. mlrun/feature_store/feature_vector.py +13 -2
  85. mlrun/feature_store/ingestion.py +7 -6
  86. mlrun/feature_store/retrieval/base.py +9 -4
  87. mlrun/feature_store/retrieval/dask_merger.py +2 -0
  88. mlrun/feature_store/retrieval/job.py +13 -4
  89. mlrun/feature_store/retrieval/local_merger.py +2 -0
  90. mlrun/feature_store/retrieval/spark_merger.py +24 -32
  91. mlrun/feature_store/steps.py +38 -19
  92. mlrun/features.py +6 -14
  93. mlrun/frameworks/_common/plan.py +3 -3
  94. mlrun/frameworks/_dl_common/loggers/tensorboard_logger.py +7 -12
  95. mlrun/frameworks/_ml_common/plan.py +1 -1
  96. mlrun/frameworks/auto_mlrun/auto_mlrun.py +2 -2
  97. mlrun/frameworks/lgbm/__init__.py +1 -1
  98. mlrun/frameworks/lgbm/callbacks/callback.py +2 -4
  99. mlrun/frameworks/lgbm/model_handler.py +1 -1
  100. mlrun/frameworks/parallel_coordinates.py +4 -4
  101. mlrun/frameworks/pytorch/__init__.py +2 -2
  102. mlrun/frameworks/sklearn/__init__.py +1 -1
  103. mlrun/frameworks/sklearn/mlrun_interface.py +13 -3
  104. mlrun/frameworks/tf_keras/__init__.py +5 -2
  105. mlrun/frameworks/tf_keras/callbacks/logging_callback.py +1 -1
  106. mlrun/frameworks/tf_keras/mlrun_interface.py +2 -2
  107. mlrun/frameworks/xgboost/__init__.py +1 -1
  108. mlrun/k8s_utils.py +57 -12
  109. mlrun/launcher/__init__.py +1 -1
  110. mlrun/launcher/base.py +6 -5
  111. mlrun/launcher/client.py +13 -11
  112. mlrun/launcher/factory.py +1 -1
  113. mlrun/launcher/local.py +15 -5
  114. mlrun/launcher/remote.py +10 -3
  115. mlrun/lists.py +6 -2
  116. mlrun/model.py +297 -48
  117. mlrun/model_monitoring/__init__.py +1 -1
  118. mlrun/model_monitoring/api.py +152 -357
  119. mlrun/model_monitoring/applications/__init__.py +10 -0
  120. mlrun/model_monitoring/applications/_application_steps.py +190 -0
  121. mlrun/model_monitoring/applications/base.py +108 -0
  122. mlrun/model_monitoring/applications/context.py +341 -0
  123. mlrun/model_monitoring/{evidently_application.py → applications/evidently_base.py} +27 -22
  124. mlrun/model_monitoring/applications/histogram_data_drift.py +227 -91
  125. mlrun/model_monitoring/applications/results.py +99 -0
  126. mlrun/model_monitoring/controller.py +130 -303
  127. mlrun/model_monitoring/{stores/models/sqlite.py → db/__init__.py} +5 -10
  128. mlrun/model_monitoring/db/stores/__init__.py +136 -0
  129. mlrun/model_monitoring/db/stores/base/__init__.py +15 -0
  130. mlrun/model_monitoring/db/stores/base/store.py +213 -0
  131. mlrun/model_monitoring/db/stores/sqldb/__init__.py +13 -0
  132. mlrun/model_monitoring/db/stores/sqldb/models/__init__.py +71 -0
  133. mlrun/model_monitoring/db/stores/sqldb/models/base.py +190 -0
  134. mlrun/model_monitoring/db/stores/sqldb/models/mysql.py +103 -0
  135. mlrun/model_monitoring/{stores/models/mysql.py → db/stores/sqldb/models/sqlite.py} +19 -13
  136. mlrun/model_monitoring/db/stores/sqldb/sql_store.py +659 -0
  137. mlrun/model_monitoring/db/stores/v3io_kv/__init__.py +13 -0
  138. mlrun/model_monitoring/db/stores/v3io_kv/kv_store.py +726 -0
  139. mlrun/model_monitoring/db/tsdb/__init__.py +105 -0
  140. mlrun/model_monitoring/db/tsdb/base.py +448 -0
  141. mlrun/model_monitoring/db/tsdb/helpers.py +30 -0
  142. mlrun/model_monitoring/db/tsdb/tdengine/__init__.py +15 -0
  143. mlrun/model_monitoring/db/tsdb/tdengine/schemas.py +298 -0
  144. mlrun/model_monitoring/db/tsdb/tdengine/stream_graph_steps.py +42 -0
  145. mlrun/model_monitoring/db/tsdb/tdengine/tdengine_connector.py +522 -0
  146. mlrun/model_monitoring/db/tsdb/v3io/__init__.py +15 -0
  147. mlrun/model_monitoring/db/tsdb/v3io/stream_graph_steps.py +158 -0
  148. mlrun/model_monitoring/db/tsdb/v3io/v3io_connector.py +849 -0
  149. mlrun/model_monitoring/features_drift_table.py +34 -22
  150. mlrun/model_monitoring/helpers.py +177 -39
  151. mlrun/model_monitoring/model_endpoint.py +3 -2
  152. mlrun/model_monitoring/stream_processing.py +165 -398
  153. mlrun/model_monitoring/tracking_policy.py +7 -1
  154. mlrun/model_monitoring/writer.py +161 -125
  155. mlrun/package/packagers/default_packager.py +2 -2
  156. mlrun/package/packagers_manager.py +1 -0
  157. mlrun/package/utils/_formatter.py +2 -2
  158. mlrun/platforms/__init__.py +11 -10
  159. mlrun/platforms/iguazio.py +67 -228
  160. mlrun/projects/__init__.py +6 -1
  161. mlrun/projects/operations.py +47 -20
  162. mlrun/projects/pipelines.py +396 -249
  163. mlrun/projects/project.py +1176 -406
  164. mlrun/render.py +28 -22
  165. mlrun/run.py +208 -181
  166. mlrun/runtimes/__init__.py +76 -11
  167. mlrun/runtimes/base.py +54 -24
  168. mlrun/runtimes/daskjob.py +9 -2
  169. mlrun/runtimes/databricks_job/databricks_runtime.py +1 -0
  170. mlrun/runtimes/databricks_job/databricks_wrapper.py +1 -1
  171. mlrun/runtimes/funcdoc.py +1 -29
  172. mlrun/runtimes/kubejob.py +34 -128
  173. mlrun/runtimes/local.py +39 -10
  174. mlrun/runtimes/mpijob/__init__.py +0 -20
  175. mlrun/runtimes/mpijob/abstract.py +8 -8
  176. mlrun/runtimes/mpijob/v1.py +1 -1
  177. mlrun/runtimes/nuclio/__init__.py +1 -0
  178. mlrun/runtimes/nuclio/api_gateway.py +769 -0
  179. mlrun/runtimes/nuclio/application/__init__.py +15 -0
  180. mlrun/runtimes/nuclio/application/application.py +758 -0
  181. mlrun/runtimes/nuclio/application/reverse_proxy.go +95 -0
  182. mlrun/runtimes/nuclio/function.py +188 -68
  183. mlrun/runtimes/nuclio/serving.py +57 -60
  184. mlrun/runtimes/pod.py +191 -58
  185. mlrun/runtimes/remotesparkjob.py +11 -8
  186. mlrun/runtimes/sparkjob/spark3job.py +17 -18
  187. mlrun/runtimes/utils.py +40 -73
  188. mlrun/secrets.py +6 -2
  189. mlrun/serving/__init__.py +8 -1
  190. mlrun/serving/remote.py +2 -3
  191. mlrun/serving/routers.py +89 -64
  192. mlrun/serving/server.py +54 -26
  193. mlrun/serving/states.py +187 -56
  194. mlrun/serving/utils.py +19 -11
  195. mlrun/serving/v2_serving.py +136 -63
  196. mlrun/track/tracker.py +2 -1
  197. mlrun/track/trackers/mlflow_tracker.py +5 -0
  198. mlrun/utils/async_http.py +26 -6
  199. mlrun/utils/db.py +18 -0
  200. mlrun/utils/helpers.py +375 -105
  201. mlrun/utils/http.py +2 -2
  202. mlrun/utils/logger.py +75 -9
  203. mlrun/utils/notifications/notification/__init__.py +14 -10
  204. mlrun/utils/notifications/notification/base.py +48 -0
  205. mlrun/utils/notifications/notification/console.py +2 -0
  206. mlrun/utils/notifications/notification/git.py +24 -1
  207. mlrun/utils/notifications/notification/ipython.py +2 -0
  208. mlrun/utils/notifications/notification/slack.py +96 -21
  209. mlrun/utils/notifications/notification/webhook.py +63 -2
  210. mlrun/utils/notifications/notification_pusher.py +146 -16
  211. mlrun/utils/regex.py +9 -0
  212. mlrun/utils/retryer.py +3 -2
  213. mlrun/utils/v3io_clients.py +2 -3
  214. mlrun/utils/version/version.json +2 -2
  215. mlrun-1.7.2.dist-info/METADATA +390 -0
  216. mlrun-1.7.2.dist-info/RECORD +351 -0
  217. {mlrun-1.7.0rc4.dist-info → mlrun-1.7.2.dist-info}/WHEEL +1 -1
  218. mlrun/feature_store/retrieval/conversion.py +0 -271
  219. mlrun/kfpops.py +0 -868
  220. mlrun/model_monitoring/application.py +0 -310
  221. mlrun/model_monitoring/batch.py +0 -974
  222. mlrun/model_monitoring/controller_handler.py +0 -37
  223. mlrun/model_monitoring/prometheus.py +0 -216
  224. mlrun/model_monitoring/stores/__init__.py +0 -111
  225. mlrun/model_monitoring/stores/kv_model_endpoint_store.py +0 -574
  226. mlrun/model_monitoring/stores/model_endpoint_store.py +0 -145
  227. mlrun/model_monitoring/stores/models/__init__.py +0 -27
  228. mlrun/model_monitoring/stores/models/base.py +0 -84
  229. mlrun/model_monitoring/stores/sql_model_endpoint_store.py +0 -382
  230. mlrun/platforms/other.py +0 -305
  231. mlrun-1.7.0rc4.dist-info/METADATA +0 -269
  232. mlrun-1.7.0rc4.dist-info/RECORD +0 -321
  233. {mlrun-1.7.0rc4.dist-info → mlrun-1.7.2.dist-info}/LICENSE +0 -0
  234. {mlrun-1.7.0rc4.dist-info → mlrun-1.7.2.dist-info}/entry_points.txt +0 -0
  235. {mlrun-1.7.0rc4.dist-info → mlrun-1.7.2.dist-info}/top_level.txt +0 -0
@@ -11,26 +11,35 @@
11
11
  # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
- #
15
14
 
16
15
  import enum
17
16
  import json
18
- from typing import Any, Optional
17
+ from datetime import datetime
18
+ from typing import Any, NamedTuple, Optional, TypeVar
19
19
 
20
- from pydantic import BaseModel, Field, validator
21
- from pydantic.main import Extra
20
+ from pydantic import BaseModel, Extra, Field, constr, validator
22
21
 
22
+ # TODO: remove the unused import below after `mlrun.datastore` and `mlrun.utils` usage is removed.
23
+ # At the moment `make lint` fails if this is removed.
23
24
  import mlrun.common.model_monitoring
24
25
 
25
26
  from ..object import ObjectKind, ObjectSpec, ObjectStatus
26
27
  from .constants import (
28
+ FQN_REGEX,
29
+ MODEL_ENDPOINT_ID_PATTERN,
30
+ PROJECT_PATTERN,
27
31
  EndpointType,
28
32
  EventFieldType,
29
33
  EventKeyMetrics,
30
34
  EventLiveStats,
35
+ ModelEndpointMonitoringMetricType,
31
36
  ModelMonitoringMode,
37
+ ResultKindApp,
38
+ ResultStatusApp,
32
39
  )
33
40
 
41
+ Model = TypeVar("Model", bound=BaseModel)
42
+
34
43
 
35
44
  class ModelMonitoringStoreKinds:
36
45
  # TODO: do changes in examples & demos In 1.5.0 remove
@@ -39,9 +48,9 @@ class ModelMonitoringStoreKinds:
39
48
 
40
49
 
41
50
  class ModelEndpointMetadata(BaseModel):
42
- project: Optional[str] = ""
51
+ project: constr(regex=PROJECT_PATTERN)
52
+ uid: constr(regex=MODEL_ENDPOINT_ID_PATTERN)
43
53
  labels: Optional[dict] = {}
44
- uid: Optional[str] = ""
45
54
 
46
55
  class Config:
47
56
  extra = Extra.allow
@@ -54,12 +63,11 @@ class ModelEndpointMetadata(BaseModel):
54
63
  :param json_parse_values: List of dictionary keys with a JSON string value that will be parsed into a
55
64
  dictionary using json.loads().
56
65
  """
57
- new_object = cls()
58
66
  if json_parse_values is None:
59
67
  json_parse_values = [EventFieldType.LABELS]
60
68
 
61
69
  return _mapping_attributes(
62
- base_model=new_object,
70
+ model_class=cls,
63
71
  flattened_dictionary=endpoint_dict,
64
72
  json_parse_values=json_parse_values,
65
73
  )
@@ -86,7 +94,6 @@ class ModelEndpointSpec(ObjectSpec):
86
94
  :param json_parse_values: List of dictionary keys with a JSON string value that will be parsed into a
87
95
  dictionary using json.loads().
88
96
  """
89
- new_object = cls()
90
97
  if json_parse_values is None:
91
98
  json_parse_values = [
92
99
  EventFieldType.FEATURE_NAMES,
@@ -94,23 +101,13 @@ class ModelEndpointSpec(ObjectSpec):
94
101
  EventFieldType.MONITOR_CONFIGURATION,
95
102
  ]
96
103
  return _mapping_attributes(
97
- base_model=new_object,
104
+ model_class=cls,
98
105
  flattened_dictionary=endpoint_dict,
99
106
  json_parse_values=json_parse_values,
100
107
  )
101
108
 
102
- @validator("monitor_configuration")
103
- def set_name(cls, monitor_configuration):
104
- return monitor_configuration or {
105
- EventFieldType.DRIFT_DETECTED_THRESHOLD: (
106
- mlrun.mlconf.model_endpoint_monitoring.drift_thresholds.default.drift_detected
107
- ),
108
- EventFieldType.POSSIBLE_DRIFT_THRESHOLD: (
109
- mlrun.mlconf.model_endpoint_monitoring.drift_thresholds.default.possible_drift
110
- ),
111
- }
112
-
113
109
  @validator("model_uri")
110
+ @classmethod
114
111
  def validate_model_uri(cls, model_uri):
115
112
  """Validate that the model uri includes the required prefix"""
116
113
  prefix, uri = mlrun.datastore.parse_store_uri(model_uri)
@@ -198,7 +195,6 @@ class ModelEndpointStatus(ObjectStatus):
198
195
  :param json_parse_values: List of dictionary keys with a JSON string value that will be parsed into a
199
196
  dictionary using json.loads().
200
197
  """
201
- new_object = cls()
202
198
  if json_parse_values is None:
203
199
  json_parse_values = [
204
200
  EventFieldType.FEATURE_STATS,
@@ -210,7 +206,7 @@ class ModelEndpointStatus(ObjectStatus):
210
206
  EventFieldType.ENDPOINT_TYPE,
211
207
  ]
212
208
  return _mapping_attributes(
213
- base_model=new_object,
209
+ model_class=cls,
214
210
  flattened_dictionary=endpoint_dict,
215
211
  json_parse_values=json_parse_values,
216
212
  )
@@ -218,22 +214,13 @@ class ModelEndpointStatus(ObjectStatus):
218
214
 
219
215
  class ModelEndpoint(BaseModel):
220
216
  kind: ObjectKind = Field(ObjectKind.model_endpoint, const=True)
221
- metadata: ModelEndpointMetadata = ModelEndpointMetadata()
217
+ metadata: ModelEndpointMetadata
222
218
  spec: ModelEndpointSpec = ModelEndpointSpec()
223
219
  status: ModelEndpointStatus = ModelEndpointStatus()
224
220
 
225
221
  class Config:
226
222
  extra = Extra.allow
227
223
 
228
- def __init__(self, **data: Any):
229
- super().__init__(**data)
230
- if self.metadata.uid is None:
231
- uid = mlrun.common.model_monitoring.create_model_endpoint_uid(
232
- function_uri=self.spec.function_uri,
233
- versioned_model=self.spec.model,
234
- )
235
- self.metadata.uid = str(uid)
236
-
237
224
  def flat_dict(self):
238
225
  """Generate a flattened `ModelEndpoint` dictionary. The flattened dictionary result is important for storing
239
226
  the model endpoint object in the database.
@@ -274,7 +261,7 @@ class ModelEndpoint(BaseModel):
274
261
  return flatten_dict
275
262
 
276
263
  @classmethod
277
- def from_flat_dict(cls, endpoint_dict: dict):
264
+ def from_flat_dict(cls, endpoint_dict: dict) -> "ModelEndpoint":
278
265
  """Create a `ModelEndpoint` object from an endpoint flattened dictionary. Because the provided dictionary
279
266
  is flattened, we pass it as is to the subclasses without splitting the keys into spec, metadata, and status.
280
267
 
@@ -292,21 +279,82 @@ class ModelEndpointList(BaseModel):
292
279
  endpoints: list[ModelEndpoint] = []
293
280
 
294
281
 
282
+ class ModelEndpointMonitoringMetric(BaseModel):
283
+ project: str
284
+ app: str
285
+ type: ModelEndpointMonitoringMetricType
286
+ name: str
287
+ full_name: str
288
+
289
+
290
+ def _compose_full_name(
291
+ *,
292
+ project: str,
293
+ app: str,
294
+ name: str,
295
+ type: ModelEndpointMonitoringMetricType = ModelEndpointMonitoringMetricType.RESULT,
296
+ ) -> str:
297
+ return ".".join([project, app, type, name])
298
+
299
+
300
+ def _parse_metric_fqn_to_monitoring_metric(fqn: str) -> ModelEndpointMonitoringMetric:
301
+ match = FQN_REGEX.fullmatch(fqn)
302
+ if match is None:
303
+ raise ValueError("The fully qualified name is not in the expected format")
304
+ return ModelEndpointMonitoringMetric.parse_obj(
305
+ match.groupdict() | {"full_name": fqn}
306
+ )
307
+
308
+
309
+ class _MetricPoint(NamedTuple):
310
+ timestamp: datetime
311
+ value: float
312
+
313
+
314
+ class _ResultPoint(NamedTuple):
315
+ timestamp: datetime
316
+ value: float
317
+ status: ResultStatusApp
318
+
319
+
320
+ class _ModelEndpointMonitoringMetricValuesBase(BaseModel):
321
+ full_name: str
322
+ type: ModelEndpointMonitoringMetricType
323
+ data: bool
324
+
325
+
326
+ class ModelEndpointMonitoringMetricValues(_ModelEndpointMonitoringMetricValuesBase):
327
+ type: ModelEndpointMonitoringMetricType = ModelEndpointMonitoringMetricType.METRIC
328
+ values: list[_MetricPoint]
329
+ data: bool = True
330
+
331
+
332
+ class ModelEndpointMonitoringResultValues(_ModelEndpointMonitoringMetricValuesBase):
333
+ type: ModelEndpointMonitoringMetricType = ModelEndpointMonitoringMetricType.RESULT
334
+ result_kind: ResultKindApp
335
+ values: list[_ResultPoint]
336
+ data: bool = True
337
+
338
+
339
+ class ModelEndpointMonitoringMetricNoData(_ModelEndpointMonitoringMetricValuesBase):
340
+ full_name: str
341
+ type: ModelEndpointMonitoringMetricType
342
+ data: bool = False
343
+
344
+
295
345
  def _mapping_attributes(
296
- base_model: BaseModel,
297
- flattened_dictionary: dict,
298
- json_parse_values: list = None,
299
- ):
346
+ model_class: type[Model], flattened_dictionary: dict, json_parse_values: list
347
+ ) -> Model:
300
348
  """Generate a `BaseModel` object with the provided dictionary attributes.
301
349
 
302
- :param base_model: `BaseModel` object (e.g. `ModelEndpointMetadata`).
350
+ :param model_class: `BaseModel` class (e.g. `ModelEndpointMetadata`).
303
351
  :param flattened_dictionary: Flattened dictionary that contains the model endpoint attributes.
304
352
  :param json_parse_values: List of dictionary keys with a JSON string value that will be parsed into a
305
353
  dictionary using json.loads().
306
354
  """
307
355
  # Get the fields of the provided base model object. These fields will be used to filter to relevent keys
308
356
  # from the flattened dictionary.
309
- wanted_keys = base_model.__fields__.keys()
357
+ wanted_keys = model_class.__fields__.keys()
310
358
 
311
359
  # Generate a filtered flattened dictionary that will be parsed into the BaseModel object
312
360
  dict_to_parse = {}
@@ -320,7 +368,7 @@ def _mapping_attributes(
320
368
  else:
321
369
  dict_to_parse[field_key] = flattened_dictionary[field_key]
322
370
 
323
- return base_model.parse_obj(dict_to_parse)
371
+ return model_class.parse_obj(dict_to_parse)
324
372
 
325
373
 
326
374
  def _json_loads_if_not_none(field: Any) -> Any:
@@ -22,11 +22,48 @@ import mlrun.common.types
22
22
 
23
23
 
24
24
  class NotificationKind(mlrun.common.types.StrEnum):
25
- console = "console"
26
- git = "git"
27
- ipython = "ipython"
28
- slack = "slack"
29
- webhook = "webhook"
25
+ """Currently, the supported notification kinds and their params are as follows:"""
26
+
27
+ console: str = "console"
28
+ """no params, local only"""
29
+
30
+ git: str = "git"
31
+ """
32
+ **token** - The git token to use for the git notification.\n
33
+ **repo** - The git repo to which to send the notification.\n
34
+ **issue** - The git issue to which to send the notification.\n
35
+ **merge_request** -
36
+ In GitLab (as opposed to GitHub), merge requests and issues are separate entities.
37
+ If using merge request, the issue will be ignored, and vice versa.\n
38
+ **server** - The git server to which to send the notification.\n
39
+ **gitlab** - (bool) Whether the git server is GitLab or not.\n
40
+ """
41
+
42
+ ipython: str = "ipython"
43
+ """no params, local only"""
44
+
45
+ slack: str = "slack"
46
+ """**webhook** - The slack webhook to which to send the notification."""
47
+
48
+ webhook: str = "webhook"
49
+ """
50
+ **url** - The webhook url to which to send the notification.\n
51
+ **method** - The http method to use when sending the notification (GET, POST, PUT, etc…).\n
52
+ **headers** - (dict) The http headers to send with the notification.\n
53
+ **override_body** -
54
+ (dict) The body to send with the notification. If not specified, the
55
+ default body will be a dictionary containing `name`, `message`, `severity`, and a `runs` list of the
56
+ completed runs. You can also add the run's details.\n
57
+ Example::
58
+
59
+ "override_body": {"message":"Run Completed {{ runs }}"
60
+ # Results would look like:
61
+ "message": "Run Completed [{'project': 'my-project', 'name': 'my-function', 'host': <run-host>,
62
+ 'status': {'state': 'completed', 'results': <run-results>}}]"
63
+ **verify_ssl** -
64
+ (bool) Whether SSL certificates are validated during HTTP requests or not.
65
+ The default is set to True.\n
66
+ """
30
67
 
31
68
 
32
69
  class NotificationSeverity(mlrun.common.types.StrEnum):
@@ -50,15 +87,35 @@ class NotificationLimits(enum.Enum):
50
87
 
51
88
 
52
89
  class Notification(pydantic.BaseModel):
90
+ """
91
+ Notification object schema
92
+
93
+ :param kind: notification implementation kind - slack, webhook, etc.
94
+ :param name: for logging and identification
95
+ :param message: message content in the notification
96
+ :param severity: severity to display in the notification
97
+ :param when: list of statuses to trigger the notification: 'running', 'completed', 'error'
98
+ :param condition: optional condition to trigger the notification, a jinja2 expression that can use run data
99
+ to evaluate if the notification should be sent in addition to the 'when' statuses.
100
+ e.g.: '{{ run["status"]["results"]["accuracy"] < 0.9}}'
101
+ :param params: Implementation specific parameters for the notification implementation (e.g. slack webhook url,
102
+ git repository details, etc.)
103
+ :param secret_params: secret parameters for the notification implementation, same as params but will be stored
104
+ in a k8s secret and passed as a secret reference to the implementation.
105
+ :param status: notification status - pending, sent, error
106
+ :param sent_time: time the notification was sent
107
+ :param reason: failure reason if the notification failed to send
108
+ """
109
+
53
110
  kind: NotificationKind
54
111
  name: str
55
- message: str
56
- severity: NotificationSeverity
57
- when: list[str]
58
- condition: str
59
- params: dict[str, typing.Any] = None
60
- status: NotificationStatus = None
61
- sent_time: typing.Union[str, datetime.datetime] = None
112
+ message: typing.Optional[str] = None
113
+ severity: typing.Optional[NotificationSeverity] = None
114
+ when: typing.Optional[list[str]] = None
115
+ condition: typing.Optional[str] = None
116
+ params: typing.Optional[dict[str, typing.Any]] = None
117
+ status: typing.Optional[NotificationStatus] = None
118
+ sent_time: typing.Optional[typing.Union[str, datetime.datetime]] = None
62
119
  secret_params: typing.Optional[dict[str, typing.Any]] = None
63
120
  reason: typing.Optional[str] = None
64
121
 
@@ -11,19 +11,16 @@
11
11
  # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
- from deprecated import deprecated
15
14
 
16
- from mlrun.runtimes.constants import MPIJobCRDVersions
17
- from mlrun.runtimes.mpijob.abstract import AbstractMPIJobRuntime
15
+ import typing
18
16
 
17
+ import pydantic
19
18
 
20
- # TODO: Remove in 1.7.0
21
- @deprecated(
22
- version="1.5.0",
23
- reason="v1alpha1 mpi will be removed in 1.7.0, use v1 instead",
24
- category=FutureWarning,
25
- )
26
- class MpiRuntimeV1Alpha1(AbstractMPIJobRuntime):
27
- crd_group = "kubeflow.org"
28
- crd_version = MPIJobCRDVersions.v1alpha1
29
- crd_plural = "mpijobs"
19
+
20
+ class PaginationInfo(pydantic.BaseModel):
21
+ class Config:
22
+ allow_population_by_field_name = True
23
+
24
+ page: typing.Optional[int]
25
+ page_size: typing.Optional[int] = pydantic.Field(alias="page-size")
26
+ page_token: typing.Optional[str] = pydantic.Field(alias="page-token")
@@ -15,10 +15,17 @@
15
15
  import typing
16
16
 
17
17
  import pydantic
18
+ from deprecated import deprecated
18
19
 
19
20
  import mlrun.common.types
20
21
 
21
22
 
23
+ @deprecated(
24
+ version="1.7.0",
25
+ reason="mlrun.common.schemas.PipelinesFormat is deprecated and will be removed in 1.9.0. "
26
+ "Use mlrun.common.formatters.PipelineFormat instead.",
27
+ category=FutureWarning,
28
+ )
22
29
  class PipelinesFormat(mlrun.common.types.StrEnum):
23
30
  full = "full"
24
31
  metadata_only = "metadata_only"
@@ -16,6 +16,7 @@ import datetime
16
16
  import typing
17
17
 
18
18
  import pydantic
19
+ from deprecated import deprecated
19
20
 
20
21
  import mlrun.common.types
21
22
 
@@ -23,6 +24,12 @@ from .common import ImageBuilder
23
24
  from .object import ObjectKind, ObjectStatus
24
25
 
25
26
 
27
+ @deprecated(
28
+ version="1.7.0",
29
+ reason="mlrun.common.schemas.ProjectsFormat is deprecated and will be removed in 1.9.0. "
30
+ "Use mlrun.common.formatters.ProjectFormat instead.",
31
+ category=FutureWarning,
32
+ )
26
33
  class ProjectsFormat(mlrun.common.types.StrEnum):
27
34
  full = "full"
28
35
  name_only = "name_only"
@@ -87,6 +94,30 @@ class ProjectSpec(pydantic.BaseModel):
87
94
  custom_packagers: typing.Optional[list[tuple[str, bool]]] = None
88
95
  default_image: typing.Optional[str] = None
89
96
  build: typing.Optional[ImageBuilder] = None
97
+ default_function_node_selector: typing.Optional[dict] = {}
98
+
99
+ class Config:
100
+ extra = pydantic.Extra.allow
101
+
102
+
103
+ class ProjectSpecOut(pydantic.BaseModel):
104
+ description: typing.Optional[str] = None
105
+ owner: typing.Optional[str] = None
106
+ goals: typing.Optional[str] = None
107
+ params: typing.Optional[dict] = {}
108
+ functions: typing.Optional[list] = []
109
+ workflows: typing.Optional[list] = []
110
+ artifacts: typing.Optional[list] = []
111
+ artifact_path: typing.Optional[str] = None
112
+ conda: typing.Optional[str] = None
113
+ source: typing.Optional[str] = None
114
+ subpath: typing.Optional[str] = None
115
+ origin_url: typing.Optional[str] = None
116
+ desired_state: typing.Optional[ProjectDesiredState] = ProjectDesiredState.online
117
+ custom_packagers: typing.Optional[list[tuple[str, bool]]] = None
118
+ default_image: typing.Optional[str] = None
119
+ build: typing.Any = None
120
+ default_function_node_selector: typing.Optional[dict] = {}
90
121
 
91
122
  class Config:
92
123
  extra = pydantic.Extra.allow
@@ -99,6 +130,15 @@ class Project(pydantic.BaseModel):
99
130
  status: ObjectStatus = ObjectStatus()
100
131
 
101
132
 
133
+ # The reason we have a different schema for the response model is that we don't want to validate project.spec.build in
134
+ # the response as the validation was added late and there may be corrupted values in the DB.
135
+ class ProjectOut(pydantic.BaseModel):
136
+ kind: ObjectKind = pydantic.Field(ObjectKind.project, const=True)
137
+ metadata: ProjectMetadata
138
+ spec: ProjectSpecOut = ProjectSpecOut()
139
+ status: ObjectStatus = ObjectStatus()
140
+
141
+
102
142
  class ProjectOwner(pydantic.BaseModel):
103
143
  username: str
104
144
  access_key: str
@@ -106,30 +146,41 @@ class ProjectOwner(pydantic.BaseModel):
106
146
 
107
147
  class ProjectSummary(pydantic.BaseModel):
108
148
  name: str
109
- files_count: int
110
- feature_sets_count: int
111
- models_count: int
112
- runs_failed_recent_count: int
113
- runs_running_count: int
114
- schedules_count: int
149
+ files_count: int = 0
150
+ feature_sets_count: int = 0
151
+ models_count: int = 0
152
+ runs_completed_recent_count: int = 0
153
+ runs_failed_recent_count: int = 0
154
+ runs_running_count: int = 0
155
+ distinct_schedules_count: int = 0
156
+ distinct_scheduled_jobs_pending_count: int = 0
157
+ distinct_scheduled_pipelines_pending_count: int = 0
158
+ pipelines_completed_recent_count: typing.Optional[int] = None
159
+ pipelines_failed_recent_count: typing.Optional[int] = None
115
160
  pipelines_running_count: typing.Optional[int] = None
161
+ updated: typing.Optional[datetime.datetime] = None
116
162
 
117
163
 
118
164
  class IguazioProject(pydantic.BaseModel):
119
165
  data: dict
120
166
 
121
167
 
168
+ # The format query param controls the project type used:
169
+ # full - ProjectOut
170
+ # name_only - str
171
+ # summary - ProjectSummary
172
+ # leader - currently only IguazioProject supported
173
+ # The way pydantic handles typing.Union is that it takes the object and tries to coerce it to be the types of the
174
+ # union by the definition order. Therefore, we can't currently add generic dict for all leader formats, but we need
175
+ # to add a specific classes for them. it's frustrating but couldn't find other workaround, see:
176
+ # https://github.com/samuelcolvin/pydantic/issues/1423, https://github.com/samuelcolvin/pydantic/issues/619
177
+ ProjectOutput = typing.TypeVar(
178
+ "ProjectOutput", ProjectOut, str, ProjectSummary, IguazioProject
179
+ )
180
+
181
+
122
182
  class ProjectsOutput(pydantic.BaseModel):
123
- # The format query param controls the project type used:
124
- # full - Project
125
- # name_only - str
126
- # summary - ProjectSummary
127
- # leader - currently only IguazioProject supported
128
- # The way pydantic handles typing.Union is that it takes the object and tries to coerce it to be the types of the
129
- # union by the definition order. Therefore we can't currently add generic dict for all leader formats, but we need
130
- # to add a specific classes for them. it's frustrating but couldn't find other workaround, see:
131
- # https://github.com/samuelcolvin/pydantic/issues/1423, https://github.com/samuelcolvin/pydantic/issues/619
132
- projects: list[typing.Union[Project, str, ProjectSummary, IguazioProject]]
183
+ projects: list[ProjectOutput]
133
184
 
134
185
 
135
186
  class ProjectSummariesOutput(pydantic.BaseModel):
@@ -15,9 +15,26 @@
15
15
  import typing
16
16
 
17
17
  import pydantic
18
+ from deprecated import deprecated
19
+
20
+ import mlrun.common.types
18
21
 
19
22
 
20
23
  class RunIdentifier(pydantic.BaseModel):
21
24
  kind: typing.Literal["run"] = "run"
22
25
  uid: typing.Optional[str]
23
26
  iter: typing.Optional[int]
27
+
28
+
29
+ @deprecated(
30
+ version="1.7.0",
31
+ reason="mlrun.common.schemas.RunsFormat is deprecated and will be removed in 1.9.0. "
32
+ "Use mlrun.common.formatters.RunFormat instead.",
33
+ category=FutureWarning,
34
+ )
35
+ class RunsFormat(mlrun.common.types.StrEnum):
36
+ # No enrichment, data is pulled as-is from the database.
37
+ standard = "standard"
38
+
39
+ # Performs run enrichment, including the run's artifacts. Only available for the `get` run API.
40
+ full = "full"
@@ -96,7 +96,7 @@ class ScheduleUpdate(BaseModel):
96
96
  scheduled_object: Optional[Any]
97
97
  cron_trigger: Optional[Union[str, ScheduleCronTrigger]]
98
98
  desired_state: Optional[str]
99
- labels: Optional[dict] = {}
99
+ labels: Optional[dict] = None
100
100
  concurrency_limit: Optional[int]
101
101
  credentials: Credentials = Credentials()
102
102
 
@@ -16,8 +16,9 @@ import typing
16
16
 
17
17
  import pydantic
18
18
 
19
- from .notification import Notification
20
- from .schedule import ScheduleCronTrigger
19
+ from mlrun.common.schemas.notification import Notification
20
+ from mlrun.common.schemas.schedule import ScheduleCronTrigger
21
+ from mlrun.common.types import StrEnum
21
22
 
22
23
 
23
24
  class WorkflowSpec(pydantic.BaseModel):
@@ -32,6 +33,7 @@ class WorkflowSpec(pydantic.BaseModel):
32
33
  schedule: typing.Union[str, ScheduleCronTrigger] = None
33
34
  run_local: typing.Optional[bool] = None
34
35
  image: typing.Optional[str] = None
36
+ workflow_runner_node_selector: typing.Optional[dict[str, str]] = None
35
37
 
36
38
 
37
39
  class WorkflowRequest(pydantic.BaseModel):
@@ -54,3 +56,9 @@ class WorkflowResponse(pydantic.BaseModel):
54
56
 
55
57
  class GetWorkflowResponse(pydantic.BaseModel):
56
58
  workflow_id: str = None
59
+
60
+
61
+ class EngineType(StrEnum):
62
+ LOCAL = "local"
63
+ REMOTE = "remote"
64
+ KFP = "kfp"
mlrun/common/types.py CHANGED
@@ -11,7 +11,6 @@
11
11
  # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
- #
15
14
 
16
15
  import enum
17
16
 
@@ -23,3 +22,17 @@ class StrEnum(str, enum.Enum):
23
22
 
24
23
  def __repr__(self):
25
24
  return self.value
25
+
26
+
27
+ # Partial backport from Python 3.11
28
+ # https://docs.python.org/3/library/http.html#http.HTTPMethod
29
+ class HTTPMethod(StrEnum):
30
+ GET = "GET"
31
+ POST = "POST"
32
+ DELETE = "DELETE"
33
+ PATCH = "PATCH"
34
+
35
+
36
+ class Operation(StrEnum):
37
+ ADD = "add"
38
+ REMOVE = "remove"