mlrun 1.3.3rc1__py3-none-any.whl → 1.4.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mlrun might be problematic. Click here for more details.

Files changed (444) hide show
  1. mlrun/__init__.py +3 -3
  2. mlrun/__main__.py +79 -37
  3. mlrun/api/__init__.py +1 -1
  4. mlrun/api/api/__init__.py +1 -1
  5. mlrun/api/api/api.py +4 -4
  6. mlrun/api/api/deps.py +10 -21
  7. mlrun/api/api/endpoints/__init__.py +1 -1
  8. mlrun/api/api/endpoints/artifacts.py +64 -36
  9. mlrun/api/api/endpoints/auth.py +4 -4
  10. mlrun/api/api/endpoints/background_tasks.py +11 -11
  11. mlrun/api/api/endpoints/client_spec.py +5 -5
  12. mlrun/api/api/endpoints/clusterization_spec.py +6 -4
  13. mlrun/api/api/endpoints/feature_store.py +124 -115
  14. mlrun/api/api/endpoints/files.py +22 -14
  15. mlrun/api/api/endpoints/frontend_spec.py +28 -21
  16. mlrun/api/api/endpoints/functions.py +142 -87
  17. mlrun/api/api/endpoints/grafana_proxy.py +89 -442
  18. mlrun/api/api/endpoints/healthz.py +20 -7
  19. mlrun/api/api/endpoints/hub.py +320 -0
  20. mlrun/api/api/endpoints/internal/__init__.py +1 -1
  21. mlrun/api/api/endpoints/internal/config.py +1 -1
  22. mlrun/api/api/endpoints/internal/memory_reports.py +9 -9
  23. mlrun/api/api/endpoints/logs.py +11 -11
  24. mlrun/api/api/endpoints/model_endpoints.py +74 -70
  25. mlrun/api/api/endpoints/operations.py +13 -9
  26. mlrun/api/api/endpoints/pipelines.py +93 -88
  27. mlrun/api/api/endpoints/projects.py +35 -35
  28. mlrun/api/api/endpoints/runs.py +69 -27
  29. mlrun/api/api/endpoints/runtime_resources.py +28 -28
  30. mlrun/api/api/endpoints/schedules.py +98 -41
  31. mlrun/api/api/endpoints/secrets.py +37 -32
  32. mlrun/api/api/endpoints/submit.py +12 -12
  33. mlrun/api/api/endpoints/tags.py +20 -22
  34. mlrun/api/api/utils.py +251 -42
  35. mlrun/api/constants.py +1 -1
  36. mlrun/api/crud/__init__.py +18 -15
  37. mlrun/api/crud/artifacts.py +10 -10
  38. mlrun/api/crud/client_spec.py +4 -4
  39. mlrun/api/crud/clusterization_spec.py +3 -3
  40. mlrun/api/crud/feature_store.py +54 -46
  41. mlrun/api/crud/functions.py +3 -3
  42. mlrun/api/crud/hub.py +312 -0
  43. mlrun/api/crud/logs.py +11 -9
  44. mlrun/api/crud/model_monitoring/__init__.py +3 -3
  45. mlrun/api/crud/model_monitoring/grafana.py +435 -0
  46. mlrun/api/crud/model_monitoring/model_endpoints.py +352 -129
  47. mlrun/api/crud/notifications.py +149 -0
  48. mlrun/api/crud/pipelines.py +67 -52
  49. mlrun/api/crud/projects.py +51 -23
  50. mlrun/api/crud/runs.py +7 -5
  51. mlrun/api/crud/runtime_resources.py +13 -13
  52. mlrun/api/{db/filedb → crud/runtimes}/__init__.py +1 -1
  53. mlrun/api/crud/runtimes/nuclio/__init__.py +14 -0
  54. mlrun/api/crud/runtimes/nuclio/function.py +505 -0
  55. mlrun/api/crud/runtimes/nuclio/helpers.py +310 -0
  56. mlrun/api/crud/secrets.py +88 -46
  57. mlrun/api/crud/tags.py +5 -5
  58. mlrun/api/db/__init__.py +1 -1
  59. mlrun/api/db/base.py +102 -54
  60. mlrun/api/db/init_db.py +2 -3
  61. mlrun/api/db/session.py +4 -12
  62. mlrun/api/db/sqldb/__init__.py +1 -1
  63. mlrun/api/db/sqldb/db.py +439 -196
  64. mlrun/api/db/sqldb/helpers.py +1 -1
  65. mlrun/api/db/sqldb/models/__init__.py +3 -3
  66. mlrun/api/db/sqldb/models/models_mysql.py +82 -64
  67. mlrun/api/db/sqldb/models/models_sqlite.py +76 -64
  68. mlrun/api/db/sqldb/session.py +27 -20
  69. mlrun/api/initial_data.py +82 -24
  70. mlrun/api/launcher.py +196 -0
  71. mlrun/api/main.py +91 -22
  72. mlrun/api/middlewares.py +6 -5
  73. mlrun/api/migrations_mysql/env.py +1 -1
  74. mlrun/api/migrations_mysql/versions/28383af526f3_market_place_to_hub.py +40 -0
  75. mlrun/api/migrations_mysql/versions/32bae1b0e29c_increase_timestamp_fields_precision.py +1 -1
  76. mlrun/api/migrations_mysql/versions/4903aef6a91d_tag_foreign_key_and_cascades.py +1 -1
  77. mlrun/api/migrations_mysql/versions/5f1351c88a19_adding_background_tasks_table.py +1 -1
  78. mlrun/api/migrations_mysql/versions/88e656800d6a_add_requested_logs_column_and_index_to_.py +1 -1
  79. mlrun/api/migrations_mysql/versions/9d16de5f03a7_adding_data_versions_table.py +1 -1
  80. mlrun/api/migrations_mysql/versions/b86f5b53f3d7_adding_name_and_updated_to_runs_table.py +1 -1
  81. mlrun/api/migrations_mysql/versions/c4af40b0bf61_init.py +1 -1
  82. mlrun/api/migrations_mysql/versions/c905d15bd91d_notifications.py +72 -0
  83. mlrun/api/migrations_mysql/versions/ee041e8fdaa0_adding_next_run_time_column_to_schedule_.py +1 -1
  84. mlrun/api/migrations_sqlite/env.py +1 -1
  85. mlrun/api/migrations_sqlite/versions/11f8dd2dc9fe_init.py +1 -1
  86. mlrun/api/migrations_sqlite/versions/1c954f8cb32d_schedule_last_run_uri.py +1 -1
  87. mlrun/api/migrations_sqlite/versions/2b6d23c715aa_adding_feature_sets.py +1 -1
  88. mlrun/api/migrations_sqlite/versions/4acd9430b093_market_place_to_hub.py +77 -0
  89. mlrun/api/migrations_sqlite/versions/6401142f2d7c_adding_next_run_time_column_to_schedule_.py +1 -1
  90. mlrun/api/migrations_sqlite/versions/64d90a1a69bc_adding_background_tasks_table.py +1 -1
  91. mlrun/api/migrations_sqlite/versions/803438ecd005_add_requested_logs_column_to_runs.py +1 -1
  92. mlrun/api/migrations_sqlite/versions/863114f0c659_refactoring_feature_set.py +1 -1
  93. mlrun/api/migrations_sqlite/versions/959ae00528ad_notifications.py +63 -0
  94. mlrun/api/migrations_sqlite/versions/accf9fc83d38_adding_data_versions_table.py +1 -1
  95. mlrun/api/migrations_sqlite/versions/b68e8e897a28_schedule_labels.py +1 -1
  96. mlrun/api/migrations_sqlite/versions/bcd0c1f9720c_adding_project_labels.py +1 -1
  97. mlrun/api/migrations_sqlite/versions/cf21882f938e_schedule_id.py +1 -1
  98. mlrun/api/migrations_sqlite/versions/d781f58f607f_tag_object_name_string.py +1 -1
  99. mlrun/api/migrations_sqlite/versions/deac06871ace_adding_marketplace_sources_table.py +1 -1
  100. mlrun/api/migrations_sqlite/versions/e1dd5983c06b_schedule_concurrency_limit.py +1 -1
  101. mlrun/api/migrations_sqlite/versions/e5594ed3ab53_adding_name_and_updated_to_runs_table.py +1 -1
  102. mlrun/api/migrations_sqlite/versions/f4249b4ba6fa_adding_feature_vectors.py +1 -1
  103. mlrun/api/migrations_sqlite/versions/f7b5a1a03629_adding_feature_labels.py +1 -1
  104. mlrun/api/schemas/__init__.py +216 -138
  105. mlrun/api/utils/__init__.py +1 -1
  106. mlrun/api/utils/asyncio.py +1 -1
  107. mlrun/api/utils/auth/__init__.py +1 -1
  108. mlrun/api/utils/auth/providers/__init__.py +1 -1
  109. mlrun/api/utils/auth/providers/base.py +7 -7
  110. mlrun/api/utils/auth/providers/nop.py +6 -7
  111. mlrun/api/utils/auth/providers/opa.py +17 -17
  112. mlrun/api/utils/auth/verifier.py +36 -34
  113. mlrun/api/utils/background_tasks.py +24 -24
  114. mlrun/{builder.py → api/utils/builder.py} +216 -123
  115. mlrun/api/utils/clients/__init__.py +1 -1
  116. mlrun/api/utils/clients/chief.py +19 -4
  117. mlrun/api/utils/clients/iguazio.py +106 -60
  118. mlrun/api/utils/clients/log_collector.py +1 -1
  119. mlrun/api/utils/clients/nuclio.py +23 -23
  120. mlrun/api/utils/clients/protocols/grpc.py +2 -2
  121. mlrun/api/utils/db/__init__.py +1 -1
  122. mlrun/api/utils/db/alembic.py +1 -1
  123. mlrun/api/utils/db/backup.py +1 -1
  124. mlrun/api/utils/db/mysql.py +24 -25
  125. mlrun/api/utils/db/sql_collation.py +1 -1
  126. mlrun/api/utils/db/sqlite_migration.py +2 -2
  127. mlrun/api/utils/events/__init__.py +14 -0
  128. mlrun/api/utils/events/base.py +57 -0
  129. mlrun/api/utils/events/events_factory.py +41 -0
  130. mlrun/api/utils/events/iguazio.py +217 -0
  131. mlrun/api/utils/events/nop.py +55 -0
  132. mlrun/api/utils/helpers.py +16 -13
  133. mlrun/api/utils/memory_reports.py +1 -1
  134. mlrun/api/utils/periodic.py +6 -3
  135. mlrun/api/utils/projects/__init__.py +1 -1
  136. mlrun/api/utils/projects/follower.py +33 -33
  137. mlrun/api/utils/projects/leader.py +36 -34
  138. mlrun/api/utils/projects/member.py +27 -27
  139. mlrun/api/utils/projects/remotes/__init__.py +1 -1
  140. mlrun/api/utils/projects/remotes/follower.py +13 -13
  141. mlrun/api/utils/projects/remotes/leader.py +10 -10
  142. mlrun/api/utils/projects/remotes/nop_follower.py +27 -21
  143. mlrun/api/utils/projects/remotes/nop_leader.py +17 -16
  144. mlrun/api/utils/scheduler.py +140 -51
  145. mlrun/api/utils/singletons/__init__.py +1 -1
  146. mlrun/api/utils/singletons/db.py +9 -15
  147. mlrun/api/utils/singletons/k8s.py +677 -5
  148. mlrun/api/utils/singletons/logs_dir.py +1 -1
  149. mlrun/api/utils/singletons/project_member.py +1 -1
  150. mlrun/api/utils/singletons/scheduler.py +1 -1
  151. mlrun/artifacts/__init__.py +2 -2
  152. mlrun/artifacts/base.py +8 -2
  153. mlrun/artifacts/dataset.py +5 -3
  154. mlrun/artifacts/manager.py +7 -1
  155. mlrun/artifacts/model.py +15 -4
  156. mlrun/artifacts/plots.py +1 -1
  157. mlrun/common/__init__.py +1 -1
  158. mlrun/common/constants.py +15 -0
  159. mlrun/common/model_monitoring.py +209 -0
  160. mlrun/common/schemas/__init__.py +167 -0
  161. mlrun/{api → common}/schemas/artifact.py +13 -14
  162. mlrun/{api → common}/schemas/auth.py +10 -8
  163. mlrun/{api → common}/schemas/background_task.py +3 -3
  164. mlrun/{api → common}/schemas/client_spec.py +1 -1
  165. mlrun/{api → common}/schemas/clusterization_spec.py +3 -3
  166. mlrun/{api → common}/schemas/constants.py +21 -8
  167. mlrun/common/schemas/events.py +36 -0
  168. mlrun/{api → common}/schemas/feature_store.py +2 -1
  169. mlrun/{api → common}/schemas/frontend_spec.py +7 -6
  170. mlrun/{api → common}/schemas/function.py +5 -5
  171. mlrun/{api → common}/schemas/http.py +3 -3
  172. mlrun/common/schemas/hub.py +134 -0
  173. mlrun/{api → common}/schemas/k8s.py +3 -3
  174. mlrun/{api → common}/schemas/memory_reports.py +1 -1
  175. mlrun/common/schemas/model_endpoints.py +342 -0
  176. mlrun/common/schemas/notification.py +57 -0
  177. mlrun/{api → common}/schemas/object.py +6 -6
  178. mlrun/{api → common}/schemas/pipeline.py +3 -3
  179. mlrun/{api → common}/schemas/project.py +6 -5
  180. mlrun/common/schemas/regex.py +24 -0
  181. mlrun/common/schemas/runs.py +30 -0
  182. mlrun/{api → common}/schemas/runtime_resource.py +3 -3
  183. mlrun/{api → common}/schemas/schedule.py +19 -7
  184. mlrun/{api → common}/schemas/secret.py +3 -3
  185. mlrun/{api → common}/schemas/tag.py +2 -2
  186. mlrun/common/types.py +25 -0
  187. mlrun/config.py +152 -20
  188. mlrun/data_types/__init__.py +7 -2
  189. mlrun/data_types/data_types.py +4 -2
  190. mlrun/data_types/infer.py +1 -1
  191. mlrun/data_types/spark.py +10 -3
  192. mlrun/datastore/__init__.py +10 -3
  193. mlrun/datastore/azure_blob.py +1 -1
  194. mlrun/datastore/base.py +185 -53
  195. mlrun/datastore/datastore.py +1 -1
  196. mlrun/datastore/filestore.py +1 -1
  197. mlrun/datastore/google_cloud_storage.py +1 -1
  198. mlrun/datastore/inmem.py +4 -1
  199. mlrun/datastore/redis.py +1 -1
  200. mlrun/datastore/s3.py +1 -1
  201. mlrun/datastore/sources.py +192 -70
  202. mlrun/datastore/spark_udf.py +44 -0
  203. mlrun/datastore/store_resources.py +4 -4
  204. mlrun/datastore/targets.py +115 -45
  205. mlrun/datastore/utils.py +127 -5
  206. mlrun/datastore/v3io.py +1 -1
  207. mlrun/datastore/wasbfs/__init__.py +1 -1
  208. mlrun/datastore/wasbfs/fs.py +1 -1
  209. mlrun/db/__init__.py +7 -5
  210. mlrun/db/base.py +112 -68
  211. mlrun/db/httpdb.py +445 -277
  212. mlrun/db/nopdb.py +491 -0
  213. mlrun/db/sqldb.py +112 -65
  214. mlrun/errors.py +6 -1
  215. mlrun/execution.py +44 -22
  216. mlrun/feature_store/__init__.py +1 -1
  217. mlrun/feature_store/api.py +143 -95
  218. mlrun/feature_store/common.py +16 -20
  219. mlrun/feature_store/feature_set.py +42 -12
  220. mlrun/feature_store/feature_vector.py +32 -21
  221. mlrun/feature_store/ingestion.py +9 -12
  222. mlrun/feature_store/retrieval/__init__.py +3 -2
  223. mlrun/feature_store/retrieval/base.py +388 -66
  224. mlrun/feature_store/retrieval/dask_merger.py +63 -151
  225. mlrun/feature_store/retrieval/job.py +30 -12
  226. mlrun/feature_store/retrieval/local_merger.py +40 -133
  227. mlrun/feature_store/retrieval/spark_merger.py +129 -127
  228. mlrun/feature_store/retrieval/storey_merger.py +173 -0
  229. mlrun/feature_store/steps.py +132 -15
  230. mlrun/features.py +8 -3
  231. mlrun/frameworks/__init__.py +1 -1
  232. mlrun/frameworks/_common/__init__.py +1 -1
  233. mlrun/frameworks/_common/artifacts_library.py +1 -1
  234. mlrun/frameworks/_common/mlrun_interface.py +1 -1
  235. mlrun/frameworks/_common/model_handler.py +1 -1
  236. mlrun/frameworks/_common/plan.py +1 -1
  237. mlrun/frameworks/_common/producer.py +1 -1
  238. mlrun/frameworks/_common/utils.py +1 -1
  239. mlrun/frameworks/_dl_common/__init__.py +1 -1
  240. mlrun/frameworks/_dl_common/loggers/__init__.py +1 -1
  241. mlrun/frameworks/_dl_common/loggers/logger.py +1 -1
  242. mlrun/frameworks/_dl_common/loggers/mlrun_logger.py +1 -1
  243. mlrun/frameworks/_dl_common/loggers/tensorboard_logger.py +1 -1
  244. mlrun/frameworks/_dl_common/model_handler.py +1 -1
  245. mlrun/frameworks/_dl_common/utils.py +1 -1
  246. mlrun/frameworks/_ml_common/__init__.py +1 -1
  247. mlrun/frameworks/_ml_common/artifacts_library.py +1 -1
  248. mlrun/frameworks/_ml_common/loggers/__init__.py +1 -1
  249. mlrun/frameworks/_ml_common/loggers/logger.py +1 -1
  250. mlrun/frameworks/_ml_common/loggers/mlrun_logger.py +1 -1
  251. mlrun/frameworks/_ml_common/model_handler.py +1 -1
  252. mlrun/frameworks/_ml_common/pkl_model_server.py +13 -1
  253. mlrun/frameworks/_ml_common/plan.py +1 -1
  254. mlrun/frameworks/_ml_common/plans/__init__.py +1 -1
  255. mlrun/frameworks/_ml_common/plans/calibration_curve_plan.py +1 -6
  256. mlrun/frameworks/_ml_common/plans/confusion_matrix_plan.py +1 -1
  257. mlrun/frameworks/_ml_common/plans/dataset_plan.py +1 -1
  258. mlrun/frameworks/_ml_common/plans/feature_importance_plan.py +1 -1
  259. mlrun/frameworks/_ml_common/plans/roc_curve_plan.py +1 -1
  260. mlrun/frameworks/_ml_common/producer.py +1 -1
  261. mlrun/frameworks/_ml_common/utils.py +1 -1
  262. mlrun/frameworks/auto_mlrun/__init__.py +1 -1
  263. mlrun/frameworks/auto_mlrun/auto_mlrun.py +1 -1
  264. mlrun/frameworks/huggingface/__init__.py +1 -1
  265. mlrun/frameworks/huggingface/model_server.py +1 -1
  266. mlrun/frameworks/lgbm/__init__.py +1 -1
  267. mlrun/frameworks/lgbm/callbacks/__init__.py +1 -1
  268. mlrun/frameworks/lgbm/callbacks/callback.py +1 -1
  269. mlrun/frameworks/lgbm/callbacks/logging_callback.py +1 -1
  270. mlrun/frameworks/lgbm/callbacks/mlrun_logging_callback.py +1 -1
  271. mlrun/frameworks/lgbm/mlrun_interfaces/__init__.py +1 -1
  272. mlrun/frameworks/lgbm/mlrun_interfaces/booster_mlrun_interface.py +1 -1
  273. mlrun/frameworks/lgbm/mlrun_interfaces/mlrun_interface.py +1 -1
  274. mlrun/frameworks/lgbm/mlrun_interfaces/model_mlrun_interface.py +1 -1
  275. mlrun/frameworks/lgbm/model_handler.py +1 -1
  276. mlrun/frameworks/lgbm/model_server.py +1 -1
  277. mlrun/frameworks/lgbm/utils.py +1 -1
  278. mlrun/frameworks/onnx/__init__.py +1 -1
  279. mlrun/frameworks/onnx/dataset.py +1 -1
  280. mlrun/frameworks/onnx/mlrun_interface.py +1 -1
  281. mlrun/frameworks/onnx/model_handler.py +1 -1
  282. mlrun/frameworks/onnx/model_server.py +1 -1
  283. mlrun/frameworks/parallel_coordinates.py +1 -1
  284. mlrun/frameworks/pytorch/__init__.py +1 -1
  285. mlrun/frameworks/pytorch/callbacks/__init__.py +1 -1
  286. mlrun/frameworks/pytorch/callbacks/callback.py +1 -1
  287. mlrun/frameworks/pytorch/callbacks/logging_callback.py +1 -1
  288. mlrun/frameworks/pytorch/callbacks/mlrun_logging_callback.py +1 -1
  289. mlrun/frameworks/pytorch/callbacks/tensorboard_logging_callback.py +1 -1
  290. mlrun/frameworks/pytorch/callbacks_handler.py +1 -1
  291. mlrun/frameworks/pytorch/mlrun_interface.py +1 -1
  292. mlrun/frameworks/pytorch/model_handler.py +1 -1
  293. mlrun/frameworks/pytorch/model_server.py +1 -1
  294. mlrun/frameworks/pytorch/utils.py +1 -1
  295. mlrun/frameworks/sklearn/__init__.py +1 -1
  296. mlrun/frameworks/sklearn/estimator.py +1 -1
  297. mlrun/frameworks/sklearn/metric.py +1 -1
  298. mlrun/frameworks/sklearn/metrics_library.py +1 -1
  299. mlrun/frameworks/sklearn/mlrun_interface.py +1 -1
  300. mlrun/frameworks/sklearn/model_handler.py +1 -1
  301. mlrun/frameworks/sklearn/utils.py +1 -1
  302. mlrun/frameworks/tf_keras/__init__.py +1 -1
  303. mlrun/frameworks/tf_keras/callbacks/__init__.py +1 -1
  304. mlrun/frameworks/tf_keras/callbacks/logging_callback.py +1 -1
  305. mlrun/frameworks/tf_keras/callbacks/mlrun_logging_callback.py +1 -1
  306. mlrun/frameworks/tf_keras/callbacks/tensorboard_logging_callback.py +1 -1
  307. mlrun/frameworks/tf_keras/mlrun_interface.py +1 -1
  308. mlrun/frameworks/tf_keras/model_handler.py +1 -1
  309. mlrun/frameworks/tf_keras/model_server.py +1 -1
  310. mlrun/frameworks/tf_keras/utils.py +1 -1
  311. mlrun/frameworks/xgboost/__init__.py +1 -1
  312. mlrun/frameworks/xgboost/mlrun_interface.py +1 -1
  313. mlrun/frameworks/xgboost/model_handler.py +1 -1
  314. mlrun/frameworks/xgboost/utils.py +1 -1
  315. mlrun/k8s_utils.py +14 -765
  316. mlrun/kfpops.py +14 -17
  317. mlrun/launcher/__init__.py +13 -0
  318. mlrun/launcher/base.py +406 -0
  319. mlrun/launcher/client.py +159 -0
  320. mlrun/launcher/factory.py +50 -0
  321. mlrun/launcher/local.py +276 -0
  322. mlrun/launcher/remote.py +178 -0
  323. mlrun/lists.py +10 -2
  324. mlrun/mlutils/__init__.py +1 -1
  325. mlrun/mlutils/data.py +1 -1
  326. mlrun/mlutils/models.py +1 -1
  327. mlrun/mlutils/plots.py +1 -1
  328. mlrun/model.py +252 -14
  329. mlrun/model_monitoring/__init__.py +41 -0
  330. mlrun/model_monitoring/features_drift_table.py +1 -1
  331. mlrun/model_monitoring/helpers.py +123 -38
  332. mlrun/model_monitoring/model_endpoint.py +144 -0
  333. mlrun/model_monitoring/model_monitoring_batch.py +310 -259
  334. mlrun/model_monitoring/stores/__init__.py +106 -0
  335. mlrun/model_monitoring/stores/kv_model_endpoint_store.py +448 -0
  336. mlrun/model_monitoring/stores/model_endpoint_store.py +147 -0
  337. mlrun/model_monitoring/stores/models/__init__.py +23 -0
  338. mlrun/model_monitoring/stores/models/base.py +18 -0
  339. mlrun/model_monitoring/stores/models/mysql.py +100 -0
  340. mlrun/model_monitoring/stores/models/sqlite.py +98 -0
  341. mlrun/model_monitoring/stores/sql_model_endpoint_store.py +370 -0
  342. mlrun/model_monitoring/stream_processing_fs.py +239 -271
  343. mlrun/package/__init__.py +163 -0
  344. mlrun/package/context_handler.py +325 -0
  345. mlrun/package/errors.py +47 -0
  346. mlrun/package/packager.py +298 -0
  347. mlrun/{runtimes/package → package/packagers}/__init__.py +3 -1
  348. mlrun/package/packagers/default_packager.py +422 -0
  349. mlrun/package/packagers/numpy_packagers.py +612 -0
  350. mlrun/package/packagers/pandas_packagers.py +968 -0
  351. mlrun/package/packagers/python_standard_library_packagers.py +616 -0
  352. mlrun/package/packagers_manager.py +786 -0
  353. mlrun/package/utils/__init__.py +53 -0
  354. mlrun/package/utils/_archiver.py +226 -0
  355. mlrun/package/utils/_formatter.py +211 -0
  356. mlrun/package/utils/_pickler.py +234 -0
  357. mlrun/package/utils/_supported_format.py +71 -0
  358. mlrun/package/utils/log_hint_utils.py +93 -0
  359. mlrun/package/utils/type_hint_utils.py +298 -0
  360. mlrun/platforms/__init__.py +1 -1
  361. mlrun/platforms/iguazio.py +34 -2
  362. mlrun/platforms/other.py +1 -1
  363. mlrun/projects/__init__.py +1 -1
  364. mlrun/projects/operations.py +14 -9
  365. mlrun/projects/pipelines.py +31 -13
  366. mlrun/projects/project.py +762 -238
  367. mlrun/render.py +49 -19
  368. mlrun/run.py +57 -326
  369. mlrun/runtimes/__init__.py +3 -9
  370. mlrun/runtimes/base.py +247 -784
  371. mlrun/runtimes/constants.py +1 -1
  372. mlrun/runtimes/daskjob.py +45 -41
  373. mlrun/runtimes/funcdoc.py +43 -7
  374. mlrun/runtimes/function.py +66 -656
  375. mlrun/runtimes/function_reference.py +1 -1
  376. mlrun/runtimes/generators.py +1 -1
  377. mlrun/runtimes/kubejob.py +99 -116
  378. mlrun/runtimes/local.py +59 -66
  379. mlrun/runtimes/mpijob/__init__.py +1 -1
  380. mlrun/runtimes/mpijob/abstract.py +13 -15
  381. mlrun/runtimes/mpijob/v1.py +3 -1
  382. mlrun/runtimes/mpijob/v1alpha1.py +1 -1
  383. mlrun/runtimes/nuclio.py +1 -1
  384. mlrun/runtimes/pod.py +51 -26
  385. mlrun/runtimes/remotesparkjob.py +3 -1
  386. mlrun/runtimes/serving.py +12 -4
  387. mlrun/runtimes/sparkjob/__init__.py +1 -2
  388. mlrun/runtimes/sparkjob/abstract.py +44 -31
  389. mlrun/runtimes/sparkjob/spark3job.py +11 -9
  390. mlrun/runtimes/utils.py +61 -42
  391. mlrun/secrets.py +16 -18
  392. mlrun/serving/__init__.py +3 -2
  393. mlrun/serving/merger.py +1 -1
  394. mlrun/serving/remote.py +1 -1
  395. mlrun/serving/routers.py +39 -42
  396. mlrun/serving/server.py +23 -13
  397. mlrun/serving/serving_wrapper.py +1 -1
  398. mlrun/serving/states.py +172 -39
  399. mlrun/serving/utils.py +1 -1
  400. mlrun/serving/v1_serving.py +1 -1
  401. mlrun/serving/v2_serving.py +29 -21
  402. mlrun/utils/__init__.py +1 -2
  403. mlrun/utils/async_http.py +8 -1
  404. mlrun/utils/azure_vault.py +1 -1
  405. mlrun/utils/clones.py +2 -2
  406. mlrun/utils/condition_evaluator.py +65 -0
  407. mlrun/utils/db.py +52 -0
  408. mlrun/utils/helpers.py +188 -13
  409. mlrun/utils/http.py +89 -54
  410. mlrun/utils/logger.py +48 -8
  411. mlrun/utils/model_monitoring.py +132 -100
  412. mlrun/utils/notifications/__init__.py +1 -1
  413. mlrun/utils/notifications/notification/__init__.py +8 -6
  414. mlrun/utils/notifications/notification/base.py +20 -14
  415. mlrun/utils/notifications/notification/console.py +7 -4
  416. mlrun/utils/notifications/notification/git.py +36 -19
  417. mlrun/utils/notifications/notification/ipython.py +10 -8
  418. mlrun/utils/notifications/notification/slack.py +18 -13
  419. mlrun/utils/notifications/notification_pusher.py +377 -56
  420. mlrun/utils/regex.py +6 -1
  421. mlrun/utils/singleton.py +1 -1
  422. mlrun/utils/v3io_clients.py +1 -1
  423. mlrun/utils/vault.py +270 -269
  424. mlrun/utils/version/__init__.py +1 -1
  425. mlrun/utils/version/version.json +2 -2
  426. mlrun/utils/version/version.py +1 -1
  427. {mlrun-1.3.3rc1.dist-info → mlrun-1.4.0.dist-info}/METADATA +16 -10
  428. mlrun-1.4.0.dist-info/RECORD +434 -0
  429. mlrun/api/api/endpoints/marketplace.py +0 -257
  430. mlrun/api/crud/marketplace.py +0 -221
  431. mlrun/api/crud/model_monitoring/model_endpoint_store.py +0 -847
  432. mlrun/api/db/filedb/db.py +0 -518
  433. mlrun/api/schemas/marketplace.py +0 -128
  434. mlrun/api/schemas/model_endpoints.py +0 -185
  435. mlrun/db/filedb.py +0 -891
  436. mlrun/feature_store/retrieval/online.py +0 -92
  437. mlrun/model_monitoring/constants.py +0 -67
  438. mlrun/runtimes/package/context_handler.py +0 -711
  439. mlrun/runtimes/sparkjob/spark2job.py +0 -59
  440. mlrun-1.3.3rc1.dist-info/RECORD +0 -381
  441. {mlrun-1.3.3rc1.dist-info → mlrun-1.4.0.dist-info}/LICENSE +0 -0
  442. {mlrun-1.3.3rc1.dist-info → mlrun-1.4.0.dist-info}/WHEEL +0 -0
  443. {mlrun-1.3.3rc1.dist-info → mlrun-1.4.0.dist-info}/entry_points.txt +0 -0
  444. {mlrun-1.3.3rc1.dist-info → mlrun-1.4.0.dist-info}/top_level.txt +0 -0
mlrun/api/db/sqldb/db.py CHANGED
@@ -1,4 +1,4 @@
1
- # Copyright 2018 Iguazio
1
+ # Copyright 2023 Iguazio
2
2
  #
3
3
  # Licensed under the Apache License, Version 2.0 (the "License");
4
4
  # you may not use this file except in compliance with the License.
@@ -32,8 +32,10 @@ import mlrun
32
32
  import mlrun.api.db.session
33
33
  import mlrun.api.utils.helpers
34
34
  import mlrun.api.utils.projects.remotes.follower
35
+ import mlrun.api.utils.singletons.k8s
36
+ import mlrun.common.schemas
35
37
  import mlrun.errors
36
- from mlrun.api import schemas
38
+ import mlrun.model
37
39
  from mlrun.api.db.base import DBInterface
38
40
  from mlrun.api.db.sqldb.helpers import (
39
41
  generate_query_predicate_for_name,
@@ -52,14 +54,15 @@ from mlrun.api.db.sqldb.models import (
52
54
  FeatureSet,
53
55
  FeatureVector,
54
56
  Function,
57
+ HubSource,
55
58
  Log,
56
- MarketplaceSource,
57
59
  Project,
58
60
  Run,
59
61
  Schedule,
60
62
  User,
61
63
  _labeled,
62
64
  _tagged,
65
+ _with_notifications,
63
66
  )
64
67
  from mlrun.config import config
65
68
  from mlrun.errors import err_to_str
@@ -323,13 +326,14 @@ class SQLDB(DBInterface):
323
326
  start_time_to=None,
324
327
  last_update_time_from=None,
325
328
  last_update_time_to=None,
326
- partition_by: schemas.RunPartitionByField = None,
329
+ partition_by: mlrun.common.schemas.RunPartitionByField = None,
327
330
  rows_per_partition: int = 1,
328
- partition_sort_by: schemas.SortField = None,
329
- partition_order: schemas.OrderType = schemas.OrderType.desc,
331
+ partition_sort_by: mlrun.common.schemas.SortField = None,
332
+ partition_order: mlrun.common.schemas.OrderType = mlrun.common.schemas.OrderType.desc,
330
333
  max_partitions: int = 0,
331
334
  requested_logs: bool = None,
332
335
  return_as_run_structs: bool = True,
336
+ with_notifications: bool = False,
333
337
  ):
334
338
  project = project or config.default_project
335
339
  query = self._find_runs(session, uid, project, labels)
@@ -359,7 +363,9 @@ class SQLDB(DBInterface):
359
363
  query = query.filter(Run.requested_logs == requested_logs)
360
364
  if partition_by:
361
365
  self._assert_partition_by_parameters(
362
- schemas.RunPartitionByField, partition_by, partition_sort_by
366
+ mlrun.common.schemas.RunPartitionByField,
367
+ partition_by,
368
+ partition_sort_by,
363
369
  )
364
370
  query = self._create_partitioned_query(
365
371
  session,
@@ -374,9 +380,28 @@ class SQLDB(DBInterface):
374
380
  if not return_as_run_structs:
375
381
  return query.all()
376
382
 
383
+ # Purposefully not using outer join to avoid returning runs without notifications
384
+ if with_notifications:
385
+ query = query.join(Run.Notification)
386
+
377
387
  runs = RunList()
378
388
  for run in query:
379
- runs.append(run.struct)
389
+ run_struct = run.struct
390
+ if with_notifications:
391
+ run_struct.setdefault("spec", {}).setdefault("notifications", [])
392
+ run_struct.setdefault("status", {}).setdefault("notifications", {})
393
+ for notification in run.notifications:
394
+ (
395
+ notification_spec,
396
+ notification_status,
397
+ ) = self._transform_notification_record_to_spec_and_status(
398
+ notification
399
+ )
400
+ run_struct["spec"]["notifications"].append(notification_spec)
401
+ run_struct["status"]["notifications"][
402
+ notification.name
403
+ ] = notification_status
404
+ runs.append(run_struct)
380
405
 
381
406
  return runs
382
407
 
@@ -438,7 +463,7 @@ class SQLDB(DBInterface):
438
463
  session: Session,
439
464
  project: str,
440
465
  tag: str,
441
- identifiers: typing.List[mlrun.api.schemas.ArtifactIdentifier],
466
+ identifiers: typing.List[mlrun.common.schemas.ArtifactIdentifier],
442
467
  ):
443
468
  # query all artifacts which match the identifiers
444
469
  artifacts = []
@@ -461,7 +486,7 @@ class SQLDB(DBInterface):
461
486
  session: Session,
462
487
  project: str,
463
488
  tag: str,
464
- identifiers: typing.List[mlrun.api.schemas.ArtifactIdentifier],
489
+ identifiers: typing.List[mlrun.common.schemas.ArtifactIdentifier],
465
490
  ):
466
491
  # query all artifacts which match the identifiers
467
492
  artifacts = []
@@ -478,7 +503,7 @@ class SQLDB(DBInterface):
478
503
  session: Session,
479
504
  project: str,
480
505
  tag: str,
481
- identifiers: typing.List[mlrun.api.schemas.ArtifactIdentifier],
506
+ identifiers: typing.List[mlrun.common.schemas.ArtifactIdentifier],
482
507
  ):
483
508
  # query all artifacts which match the identifiers
484
509
  artifacts = []
@@ -494,7 +519,7 @@ class SQLDB(DBInterface):
494
519
  self,
495
520
  session: Session,
496
521
  project_name: str,
497
- identifier: mlrun.api.schemas.ArtifactIdentifier,
522
+ identifier: mlrun.common.schemas.ArtifactIdentifier,
498
523
  ):
499
524
  return self.list_artifacts(
500
525
  session,
@@ -698,7 +723,7 @@ class SQLDB(DBInterface):
698
723
  since=None,
699
724
  until=None,
700
725
  kind=None,
701
- category: schemas.ArtifactCategories = None,
726
+ category: mlrun.common.schemas.ArtifactCategories = None,
702
727
  iter: int = None,
703
728
  best_iteration: bool = False,
704
729
  as_records: bool = False,
@@ -963,7 +988,31 @@ class SQLDB(DBInterface):
963
988
  self.tag_objects_v2(session, [fn], project, tag)
964
989
  return hash_key
965
990
 
966
- def get_function(self, session, name, project="", tag="", hash_key=""):
991
+ def get_function(self, session, name, project="", tag="", hash_key="") -> dict:
992
+ """
993
+ In version 1.4.0 we added a normalization to the function name before storing.
994
+ To be backwards compatible and allow users to query old non-normalized functions,
995
+ we're providing a fallback to get_function:
996
+ normalize the requested name and try to retrieve it from the database.
997
+ If no answer is received, we will check to see if the original name contained underscores,
998
+ if so, the retrieval will be repeated and the result (if it exists) returned.
999
+ """
1000
+ normalized_function_name = mlrun.utils.normalize_name(name)
1001
+ try:
1002
+ return self._get_function(
1003
+ session, normalized_function_name, project, tag, hash_key
1004
+ )
1005
+ except mlrun.errors.MLRunNotFoundError as exc:
1006
+ if "_" in name:
1007
+ logger.warning(
1008
+ "Failed to get underscore-named function, trying without normalization",
1009
+ function_name=name,
1010
+ )
1011
+ return self._get_function(session, name, project, tag, hash_key)
1012
+ else:
1013
+ raise exc
1014
+
1015
+ def _get_function(self, session, name, project="", tag="", hash_key=""):
967
1016
  project = project or config.default_project
968
1017
  query = self._query(session, Function, name=name, project=project)
969
1018
  computed_tag = tag or "latest"
@@ -1100,7 +1149,7 @@ class SQLDB(DBInterface):
1100
1149
  return [row[0] for row in query]
1101
1150
 
1102
1151
  def list_artifact_tags(
1103
- self, session, project, category: schemas.ArtifactCategories = None
1152
+ self, session, project, category: mlrun.common.schemas.ArtifactCategories = None
1104
1153
  ) -> typing.List[typing.Tuple[str, str, str]]:
1105
1154
  """
1106
1155
  :return: a list of Tuple of (project, artifact.key, tag)
@@ -1131,9 +1180,9 @@ class SQLDB(DBInterface):
1131
1180
  session: Session,
1132
1181
  project: str,
1133
1182
  name: str,
1134
- kind: schemas.ScheduleKinds,
1183
+ kind: mlrun.common.schemas.ScheduleKinds,
1135
1184
  scheduled_object: Any,
1136
- cron_trigger: schemas.ScheduleCronTrigger,
1185
+ cron_trigger: mlrun.common.schemas.ScheduleCronTrigger,
1137
1186
  concurrency_limit: int,
1138
1187
  labels: Dict = None,
1139
1188
  next_run_time: datetime = None,
@@ -1176,7 +1225,7 @@ class SQLDB(DBInterface):
1176
1225
  project: str,
1177
1226
  name: str,
1178
1227
  scheduled_object: Any = None,
1179
- cron_trigger: schemas.ScheduleCronTrigger = None,
1228
+ cron_trigger: mlrun.common.schemas.ScheduleCronTrigger = None,
1180
1229
  labels: Dict = None,
1181
1230
  last_run_uri: str = None,
1182
1231
  concurrency_limit: int = None,
@@ -1222,8 +1271,8 @@ class SQLDB(DBInterface):
1222
1271
  project: str = None,
1223
1272
  name: str = None,
1224
1273
  labels: str = None,
1225
- kind: schemas.ScheduleKinds = None,
1226
- ) -> List[schemas.ScheduleRecord]:
1274
+ kind: mlrun.common.schemas.ScheduleKinds = None,
1275
+ ) -> List[mlrun.common.schemas.ScheduleRecord]:
1227
1276
  logger.debug("Getting schedules from db", project=project, name=name, kind=kind)
1228
1277
  query = self._query(session, Schedule, project=project, kind=kind)
1229
1278
  if name is not None:
@@ -1239,7 +1288,7 @@ class SQLDB(DBInterface):
1239
1288
 
1240
1289
  def get_schedule(
1241
1290
  self, session: Session, project: str, name: str
1242
- ) -> schemas.ScheduleRecord:
1291
+ ) -> mlrun.common.schemas.ScheduleRecord:
1243
1292
  logger.debug("Getting schedule from db", project=project, name=name)
1244
1293
  schedule_record = self._get_schedule_record(session, project, name)
1245
1294
  schedule = self._transform_schedule_record_to_scheme(schedule_record)
@@ -1247,7 +1296,7 @@ class SQLDB(DBInterface):
1247
1296
 
1248
1297
  def _get_schedule_record(
1249
1298
  self, session: Session, project: str, name: str
1250
- ) -> schemas.ScheduleRecord:
1299
+ ) -> mlrun.common.schemas.ScheduleRecord:
1251
1300
  query = self._query(session, Schedule, project=project, name=name)
1252
1301
  schedule_record = query.one_or_none()
1253
1302
  if not schedule_record:
@@ -1340,8 +1389,8 @@ class SQLDB(DBInterface):
1340
1389
  tags.append(tag)
1341
1390
  self._upsert(session, tags)
1342
1391
 
1343
- def create_project(self, session: Session, project: schemas.Project):
1344
- logger.debug("Creating project in DB", project=project)
1392
+ def create_project(self, session: Session, project: mlrun.common.schemas.Project):
1393
+ logger.debug("Creating project in DB", project_name=project.metadata.name)
1345
1394
  created = datetime.utcnow()
1346
1395
  project.metadata.created = created
1347
1396
  # TODO: handle taking out the functions/workflows/artifacts out of the project and save them separately
@@ -1359,8 +1408,17 @@ class SQLDB(DBInterface):
1359
1408
  self._upsert(session, [project_record])
1360
1409
 
1361
1410
  @retry_on_conflict
1362
- def store_project(self, session: Session, name: str, project: schemas.Project):
1363
- logger.debug("Storing project in DB", name=name)
1411
+ def store_project(
1412
+ self, session: Session, name: str, project: mlrun.common.schemas.Project
1413
+ ):
1414
+ logger.debug(
1415
+ "Storing project in DB",
1416
+ name=name,
1417
+ project_metadata=project.metadata,
1418
+ project_owner=project.spec.owner,
1419
+ project_desired_state=project.spec.desired_state,
1420
+ project_status=project.status,
1421
+ )
1364
1422
  project_record = self._get_project_record(
1365
1423
  session, name, raise_on_not_found=False
1366
1424
  )
@@ -1374,7 +1432,7 @@ class SQLDB(DBInterface):
1374
1432
  session: Session,
1375
1433
  name: str,
1376
1434
  project: dict,
1377
- patch_mode: schemas.PatchMode = schemas.PatchMode.replace,
1435
+ patch_mode: mlrun.common.schemas.PatchMode = mlrun.common.schemas.PatchMode.replace,
1378
1436
  ):
1379
1437
  logger.debug("Patching project in DB", name=name, patch_mode=patch_mode)
1380
1438
  project_record = self._get_project_record(session, name)
@@ -1384,7 +1442,7 @@ class SQLDB(DBInterface):
1384
1442
 
1385
1443
  def get_project(
1386
1444
  self, session: Session, name: str = None, project_id: int = None
1387
- ) -> schemas.Project:
1445
+ ) -> mlrun.common.schemas.Project:
1388
1446
  project_record = self._get_project_record(session, name, project_id)
1389
1447
 
1390
1448
  return self._transform_project_record_to_schema(session, project_record)
@@ -1393,7 +1451,7 @@ class SQLDB(DBInterface):
1393
1451
  self,
1394
1452
  session: Session,
1395
1453
  name: str,
1396
- deletion_strategy: schemas.DeletionStrategy = schemas.DeletionStrategy.default(),
1454
+ deletion_strategy: mlrun.common.schemas.DeletionStrategy = mlrun.common.schemas.DeletionStrategy.default(),
1397
1455
  ):
1398
1456
  logger.debug(
1399
1457
  "Deleting project from DB", name=name, deletion_strategy=deletion_strategy
@@ -1404,16 +1462,16 @@ class SQLDB(DBInterface):
1404
1462
  self,
1405
1463
  session: Session,
1406
1464
  owner: str = None,
1407
- format_: mlrun.api.schemas.ProjectsFormat = mlrun.api.schemas.ProjectsFormat.full,
1465
+ format_: mlrun.common.schemas.ProjectsFormat = mlrun.common.schemas.ProjectsFormat.full,
1408
1466
  labels: List[str] = None,
1409
- state: mlrun.api.schemas.ProjectState = None,
1467
+ state: mlrun.common.schemas.ProjectState = None,
1410
1468
  names: typing.Optional[typing.List[str]] = None,
1411
- ) -> schemas.ProjectsOutput:
1469
+ ) -> mlrun.common.schemas.ProjectsOutput:
1412
1470
  query = self._query(session, Project, owner=owner, state=state)
1413
1471
 
1414
1472
  # if format is name_only, we don't need to query the full project object, we can just query the name
1415
1473
  # and return it as a list of strings
1416
- if format_ == mlrun.api.schemas.ProjectsFormat.name_only:
1474
+ if format_ == mlrun.common.schemas.ProjectsFormat.name_only:
1417
1475
  query = self._query(session, Project.name, owner=owner, state=state)
1418
1476
 
1419
1477
  # attach filters to the query
@@ -1427,12 +1485,12 @@ class SQLDB(DBInterface):
1427
1485
  # format the projects according to the requested format
1428
1486
  projects = []
1429
1487
  for project_record in project_records:
1430
- if format_ == mlrun.api.schemas.ProjectsFormat.name_only:
1488
+ if format_ == mlrun.common.schemas.ProjectsFormat.name_only:
1431
1489
  projects.append(project_record.name)
1432
1490
 
1433
- elif format_ == mlrun.api.schemas.ProjectsFormat.minimal:
1491
+ elif format_ == mlrun.common.schemas.ProjectsFormat.minimal:
1434
1492
  projects.append(
1435
- self._minimize_project_schema(
1493
+ mlrun.api.utils.helpers.minimize_project_schema(
1436
1494
  self._transform_project_record_to_schema(
1437
1495
  session, project_record
1438
1496
  )
@@ -1441,8 +1499,8 @@ class SQLDB(DBInterface):
1441
1499
 
1442
1500
  # leader format is only for follower mode which will format the projects returned from here
1443
1501
  elif format_ in [
1444
- mlrun.api.schemas.ProjectsFormat.full,
1445
- mlrun.api.schemas.ProjectsFormat.leader,
1502
+ mlrun.common.schemas.ProjectsFormat.full,
1503
+ mlrun.common.schemas.ProjectsFormat.leader,
1446
1504
  ]:
1447
1505
  projects.append(
1448
1506
  self._transform_project_record_to_schema(session, project_record)
@@ -1451,16 +1509,7 @@ class SQLDB(DBInterface):
1451
1509
  raise NotImplementedError(
1452
1510
  f"Provided format is not supported. format={format_}"
1453
1511
  )
1454
- return schemas.ProjectsOutput(projects=projects)
1455
-
1456
- def _minimize_project_schema(
1457
- self,
1458
- project: mlrun.api.schemas.Project,
1459
- ) -> mlrun.api.schemas.Project:
1460
- project.spec.functions = None
1461
- project.spec.workflows = None
1462
- project.spec.artifacts = None
1463
- return project
1512
+ return mlrun.common.schemas.ProjectsOutput(projects=projects)
1464
1513
 
1465
1514
  async def get_project_resources_counters(
1466
1515
  self,
@@ -1569,7 +1618,10 @@ class SQLDB(DBInterface):
1569
1618
  # We're using the "latest" which gives us only one version of each artifact key, which is what we want to
1570
1619
  # count (artifact count, not artifact versions count)
1571
1620
  file_artifacts = self._find_artifacts(
1572
- session, None, "latest", category=mlrun.api.schemas.ArtifactCategories.other
1621
+ session,
1622
+ None,
1623
+ "latest",
1624
+ category=mlrun.common.schemas.ArtifactCategories.other,
1573
1625
  )
1574
1626
  project_to_files_count = collections.defaultdict(int)
1575
1627
  for file_artifact in file_artifacts:
@@ -1613,7 +1665,7 @@ class SQLDB(DBInterface):
1613
1665
 
1614
1666
  async def generate_projects_summaries(
1615
1667
  self, session: Session, projects: List[str]
1616
- ) -> List[mlrun.api.schemas.ProjectSummary]:
1668
+ ) -> List[mlrun.common.schemas.ProjectSummary]:
1617
1669
  (
1618
1670
  project_to_function_count,
1619
1671
  project_to_schedule_count,
@@ -1625,7 +1677,7 @@ class SQLDB(DBInterface):
1625
1677
  project_summaries = []
1626
1678
  for project in projects:
1627
1679
  project_summaries.append(
1628
- mlrun.api.schemas.ProjectSummary(
1680
+ mlrun.common.schemas.ProjectSummary(
1629
1681
  name=project,
1630
1682
  functions_count=project_to_function_count.get(project, 0),
1631
1683
  schedules_count=project_to_schedule_count.get(project, 0),
@@ -1643,7 +1695,10 @@ class SQLDB(DBInterface):
1643
1695
  return project_summaries
1644
1696
 
1645
1697
  def _update_project_record_from_project(
1646
- self, session: Session, project_record: Project, project: schemas.Project
1698
+ self,
1699
+ session: Session,
1700
+ project_record: Project,
1701
+ project: mlrun.common.schemas.Project,
1647
1702
  ):
1648
1703
  project.metadata.created = project_record.created
1649
1704
  project_dict = project.dict()
@@ -1663,7 +1718,7 @@ class SQLDB(DBInterface):
1663
1718
  name: str,
1664
1719
  project_record: Project,
1665
1720
  project: dict,
1666
- patch_mode: schemas.PatchMode,
1721
+ patch_mode: mlrun.common.schemas.PatchMode,
1667
1722
  ):
1668
1723
  project.setdefault("metadata", {})["created"] = project_record.created
1669
1724
  strategy = patch_mode.to_mergedeep_strategy()
@@ -1671,7 +1726,7 @@ class SQLDB(DBInterface):
1671
1726
  mergedeep.merge(project_record_full_object, project, strategy=strategy)
1672
1727
 
1673
1728
  # If a bad kind value was passed, it will fail here (return 422 to caller)
1674
- project = schemas.Project(**project_record_full_object)
1729
+ project = mlrun.common.schemas.Project(**project_record_full_object)
1675
1730
  self.store_project(
1676
1731
  session,
1677
1732
  name,
@@ -1695,7 +1750,7 @@ class SQLDB(DBInterface):
1695
1750
  name: str = None,
1696
1751
  project_id: int = None,
1697
1752
  raise_on_not_found: bool = True,
1698
- ) -> Project:
1753
+ ) -> typing.Optional[Project]:
1699
1754
  if not any([project_id, name]):
1700
1755
  raise mlrun.errors.MLRunInvalidArgumentError(
1701
1756
  "One of 'name' or 'project_id' must be provided"
@@ -1721,6 +1776,12 @@ class SQLDB(DBInterface):
1721
1776
  self._verify_empty_list_of_project_related_resources(name, logs, "logs")
1722
1777
  runs = self._find_runs(session, None, name, []).all()
1723
1778
  self._verify_empty_list_of_project_related_resources(name, runs, "runs")
1779
+ notifications = []
1780
+ for cls in _with_notifications:
1781
+ notifications.extend(self._get_db_notifications(session, cls, project=name))
1782
+ self._verify_empty_list_of_project_related_resources(
1783
+ name, notifications, "notifications"
1784
+ )
1724
1785
  schedules = self.list_schedules(session, project=name)
1725
1786
  self._verify_empty_list_of_project_related_resources(
1726
1787
  name, schedules, "schedules"
@@ -1741,6 +1802,7 @@ class SQLDB(DBInterface):
1741
1802
  def delete_project_related_resources(self, session: Session, name: str):
1742
1803
  self.del_artifacts(session, project=name)
1743
1804
  self._delete_logs(session, name)
1805
+ self.delete_run_notifications(session, project=name)
1744
1806
  self.del_runs(session, project=name)
1745
1807
  self.delete_schedules(session, name)
1746
1808
  self._delete_functions(session, name)
@@ -1817,7 +1879,7 @@ class SQLDB(DBInterface):
1817
1879
  name: str,
1818
1880
  tag: str = None,
1819
1881
  uid: str = None,
1820
- ) -> schemas.FeatureSet:
1882
+ ) -> mlrun.common.schemas.FeatureSet:
1821
1883
  feature_set = self._get_feature_set(session, project, name, tag, uid)
1822
1884
  if not feature_set:
1823
1885
  feature_set_uri = generate_object_uri(project, name, tag)
@@ -1865,10 +1927,10 @@ class SQLDB(DBInterface):
1865
1927
  return results
1866
1928
 
1867
1929
  @staticmethod
1868
- def _generate_feature_set_digest(feature_set: schemas.FeatureSet):
1869
- return schemas.FeatureSetDigestOutput(
1930
+ def _generate_feature_set_digest(feature_set: mlrun.common.schemas.FeatureSet):
1931
+ return mlrun.common.schemas.FeatureSetDigestOutput(
1870
1932
  metadata=feature_set.metadata,
1871
- spec=schemas.FeatureSetDigestSpec(
1933
+ spec=mlrun.common.schemas.FeatureSetDigestSpec(
1872
1934
  entities=feature_set.spec.entities,
1873
1935
  features=feature_set.spec.features,
1874
1936
  ),
@@ -1910,7 +1972,7 @@ class SQLDB(DBInterface):
1910
1972
  tag: str = None,
1911
1973
  entities: List[str] = None,
1912
1974
  labels: List[str] = None,
1913
- ) -> schemas.FeaturesOutput:
1975
+ ) -> mlrun.common.schemas.FeaturesOutput:
1914
1976
  # We don't filter by feature-set name here, as the name parameter refers to features
1915
1977
  feature_set_id_tags = self._get_records_to_tags_map(
1916
1978
  session, FeatureSet, project, tag, name=None
@@ -1925,7 +1987,7 @@ class SQLDB(DBInterface):
1925
1987
 
1926
1988
  features_results = []
1927
1989
  for row in query:
1928
- feature_record = schemas.FeatureRecord.from_orm(row.Feature)
1990
+ feature_record = mlrun.common.schemas.FeatureRecord.from_orm(row.Feature)
1929
1991
  feature_name = feature_record.name
1930
1992
 
1931
1993
  feature_sets = self._generate_records_with_tags_assigned(
@@ -1952,14 +2014,14 @@ class SQLDB(DBInterface):
1952
2014
  )
1953
2015
 
1954
2016
  features_results.append(
1955
- schemas.FeatureListOutput(
2017
+ mlrun.common.schemas.FeatureListOutput(
1956
2018
  feature=feature,
1957
2019
  feature_set_digest=self._generate_feature_set_digest(
1958
2020
  feature_set
1959
2021
  ),
1960
2022
  )
1961
2023
  )
1962
- return schemas.FeaturesOutput(features=features_results)
2024
+ return mlrun.common.schemas.FeaturesOutput(features=features_results)
1963
2025
 
1964
2026
  def list_entities(
1965
2027
  self,
@@ -1968,7 +2030,7 @@ class SQLDB(DBInterface):
1968
2030
  name: str = None,
1969
2031
  tag: str = None,
1970
2032
  labels: List[str] = None,
1971
- ) -> schemas.EntitiesOutput:
2033
+ ) -> mlrun.common.schemas.EntitiesOutput:
1972
2034
  feature_set_id_tags = self._get_records_to_tags_map(
1973
2035
  session, FeatureSet, project, tag, name=None
1974
2036
  )
@@ -1979,7 +2041,7 @@ class SQLDB(DBInterface):
1979
2041
 
1980
2042
  entities_results = []
1981
2043
  for row in query:
1982
- entity_record = schemas.FeatureRecord.from_orm(row.Entity)
2044
+ entity_record = mlrun.common.schemas.FeatureRecord.from_orm(row.Entity)
1983
2045
  entity_name = entity_record.name
1984
2046
 
1985
2047
  feature_sets = self._generate_records_with_tags_assigned(
@@ -2006,14 +2068,14 @@ class SQLDB(DBInterface):
2006
2068
  )
2007
2069
 
2008
2070
  entities_results.append(
2009
- schemas.EntityListOutput(
2071
+ mlrun.common.schemas.EntityListOutput(
2010
2072
  entity=entity,
2011
2073
  feature_set_digest=self._generate_feature_set_digest(
2012
2074
  feature_set
2013
2075
  ),
2014
2076
  )
2015
2077
  )
2016
- return schemas.EntitiesOutput(entities=entities_results)
2078
+ return mlrun.common.schemas.EntitiesOutput(entities=entities_results)
2017
2079
 
2018
2080
  @staticmethod
2019
2081
  def _assert_partition_by_parameters(partition_by_enum_cls, partition_by, sort):
@@ -2036,11 +2098,12 @@ class SQLDB(DBInterface):
2036
2098
  query,
2037
2099
  cls,
2038
2100
  partition_by: typing.Union[
2039
- schemas.FeatureStorePartitionByField, schemas.RunPartitionByField
2101
+ mlrun.common.schemas.FeatureStorePartitionByField,
2102
+ mlrun.common.schemas.RunPartitionByField,
2040
2103
  ],
2041
2104
  rows_per_partition: int,
2042
- partition_sort_by: schemas.SortField,
2043
- partition_order: schemas.OrderType,
2105
+ partition_sort_by: mlrun.common.schemas.SortField,
2106
+ partition_order: mlrun.common.schemas.OrderType,
2044
2107
  max_partitions: int = 0,
2045
2108
  ):
2046
2109
 
@@ -2106,11 +2169,11 @@ class SQLDB(DBInterface):
2106
2169
  entities: List[str] = None,
2107
2170
  features: List[str] = None,
2108
2171
  labels: List[str] = None,
2109
- partition_by: schemas.FeatureStorePartitionByField = None,
2172
+ partition_by: mlrun.common.schemas.FeatureStorePartitionByField = None,
2110
2173
  rows_per_partition: int = 1,
2111
- partition_sort_by: schemas.SortField = None,
2112
- partition_order: schemas.OrderType = schemas.OrderType.desc,
2113
- ) -> schemas.FeatureSetsOutput:
2174
+ partition_sort_by: mlrun.common.schemas.SortField = None,
2175
+ partition_order: mlrun.common.schemas.OrderType = mlrun.common.schemas.OrderType.desc,
2176
+ ) -> mlrun.common.schemas.FeatureSetsOutput:
2114
2177
  obj_id_tags = self._get_records_to_tags_map(
2115
2178
  session, FeatureSet, project, tag, name
2116
2179
  )
@@ -2133,7 +2196,9 @@ class SQLDB(DBInterface):
2133
2196
 
2134
2197
  if partition_by:
2135
2198
  self._assert_partition_by_parameters(
2136
- schemas.FeatureStorePartitionByField, partition_by, partition_sort_by
2199
+ mlrun.common.schemas.FeatureStorePartitionByField,
2200
+ partition_by,
2201
+ partition_sort_by,
2137
2202
  )
2138
2203
  query = self._create_partitioned_query(
2139
2204
  session,
@@ -2155,7 +2220,7 @@ class SQLDB(DBInterface):
2155
2220
  tag,
2156
2221
  )
2157
2222
  )
2158
- return schemas.FeatureSetsOutput(feature_sets=feature_sets)
2223
+ return mlrun.common.schemas.FeatureSetsOutput(feature_sets=feature_sets)
2159
2224
 
2160
2225
  def list_feature_sets_tags(
2161
2226
  self,
@@ -2228,7 +2293,6 @@ class SQLDB(DBInterface):
2228
2293
  feature_set_spec = new_feature_set_dict.get("spec")
2229
2294
  features = feature_set_spec.pop("features", [])
2230
2295
  entities = feature_set_spec.pop("entities", [])
2231
-
2232
2296
  self._update_feature_set_features(feature_set, features)
2233
2297
  self._update_feature_set_entities(feature_set, entities)
2234
2298
 
@@ -2286,7 +2350,7 @@ class SQLDB(DBInterface):
2286
2350
  session,
2287
2351
  project,
2288
2352
  name,
2289
- feature_set: schemas.FeatureSet,
2353
+ feature_set: mlrun.common.schemas.FeatureSet,
2290
2354
  tag=None,
2291
2355
  uid=None,
2292
2356
  versioned=True,
@@ -2385,7 +2449,7 @@ class SQLDB(DBInterface):
2385
2449
  self,
2386
2450
  session,
2387
2451
  project,
2388
- feature_set: schemas.FeatureSet,
2452
+ feature_set: mlrun.common.schemas.FeatureSet,
2389
2453
  versioned=True,
2390
2454
  ) -> str:
2391
2455
  (uid, tag, feature_set_dict,) = self._validate_and_enrich_record_for_creation(
@@ -2393,7 +2457,6 @@ class SQLDB(DBInterface):
2393
2457
  )
2394
2458
 
2395
2459
  db_feature_set = FeatureSet(project=project)
2396
-
2397
2460
  self._update_db_record_from_object_dict(db_feature_set, feature_set_dict, uid)
2398
2461
  self._update_feature_set_spec(db_feature_set, feature_set_dict)
2399
2462
 
@@ -2410,7 +2473,7 @@ class SQLDB(DBInterface):
2410
2473
  feature_set_patch: dict,
2411
2474
  tag=None,
2412
2475
  uid=None,
2413
- patch_mode: schemas.PatchMode = schemas.PatchMode.replace,
2476
+ patch_mode: mlrun.common.schemas.PatchMode = mlrun.common.schemas.PatchMode.replace,
2414
2477
  ) -> str:
2415
2478
  feature_set_record = self._get_feature_set(session, project, name, tag, uid)
2416
2479
  if not feature_set_record:
@@ -2427,7 +2490,7 @@ class SQLDB(DBInterface):
2427
2490
  versioned = feature_set_record.metadata.uid is not None
2428
2491
 
2429
2492
  # If a bad kind value was passed, it will fail here (return 422 to caller)
2430
- feature_set = schemas.FeatureSet(**feature_set_struct)
2493
+ feature_set = mlrun.common.schemas.FeatureSet(**feature_set_struct)
2431
2494
  return self.store_feature_set(
2432
2495
  session,
2433
2496
  project,
@@ -2478,7 +2541,7 @@ class SQLDB(DBInterface):
2478
2541
  self,
2479
2542
  session,
2480
2543
  project,
2481
- feature_vector: schemas.FeatureVector,
2544
+ feature_vector: mlrun.common.schemas.FeatureVector,
2482
2545
  versioned=True,
2483
2546
  ) -> str:
2484
2547
  (
@@ -2529,7 +2592,7 @@ class SQLDB(DBInterface):
2529
2592
 
2530
2593
  def get_feature_vector(
2531
2594
  self, session, project: str, name: str, tag: str = None, uid: str = None
2532
- ) -> schemas.FeatureVector:
2595
+ ) -> mlrun.common.schemas.FeatureVector:
2533
2596
  feature_vector = self._get_feature_vector(session, project, name, tag, uid)
2534
2597
  if not feature_vector:
2535
2598
  feature_vector_uri = generate_object_uri(project, name, tag)
@@ -2547,11 +2610,11 @@ class SQLDB(DBInterface):
2547
2610
  tag: str = None,
2548
2611
  state: str = None,
2549
2612
  labels: List[str] = None,
2550
- partition_by: schemas.FeatureStorePartitionByField = None,
2613
+ partition_by: mlrun.common.schemas.FeatureStorePartitionByField = None,
2551
2614
  rows_per_partition: int = 1,
2552
- partition_sort_by: schemas.SortField = None,
2553
- partition_order: schemas.OrderType = schemas.OrderType.desc,
2554
- ) -> schemas.FeatureVectorsOutput:
2615
+ partition_sort_by: mlrun.common.schemas.SortField = None,
2616
+ partition_order: mlrun.common.schemas.OrderType = mlrun.common.schemas.OrderType.desc,
2617
+ ) -> mlrun.common.schemas.FeatureVectorsOutput:
2555
2618
  obj_id_tags = self._get_records_to_tags_map(
2556
2619
  session, FeatureVector, project, tag, name
2557
2620
  )
@@ -2570,7 +2633,9 @@ class SQLDB(DBInterface):
2570
2633
 
2571
2634
  if partition_by:
2572
2635
  self._assert_partition_by_parameters(
2573
- schemas.FeatureStorePartitionByField, partition_by, partition_sort_by
2636
+ mlrun.common.schemas.FeatureStorePartitionByField,
2637
+ partition_by,
2638
+ partition_sort_by,
2574
2639
  )
2575
2640
  query = self._create_partitioned_query(
2576
2641
  session,
@@ -2592,7 +2657,9 @@ class SQLDB(DBInterface):
2592
2657
  tag,
2593
2658
  )
2594
2659
  )
2595
- return schemas.FeatureVectorsOutput(feature_vectors=feature_vectors)
2660
+ return mlrun.common.schemas.FeatureVectorsOutput(
2661
+ feature_vectors=feature_vectors
2662
+ )
2596
2663
 
2597
2664
  def list_feature_vectors_tags(
2598
2665
  self,
@@ -2613,7 +2680,7 @@ class SQLDB(DBInterface):
2613
2680
  session,
2614
2681
  project,
2615
2682
  name,
2616
- feature_vector: schemas.FeatureVector,
2683
+ feature_vector: mlrun.common.schemas.FeatureVector,
2617
2684
  tag=None,
2618
2685
  uid=None,
2619
2686
  versioned=True,
@@ -2676,7 +2743,7 @@ class SQLDB(DBInterface):
2676
2743
  feature_vector_update: dict,
2677
2744
  tag=None,
2678
2745
  uid=None,
2679
- patch_mode: schemas.PatchMode = schemas.PatchMode.replace,
2746
+ patch_mode: mlrun.common.schemas.PatchMode = mlrun.common.schemas.PatchMode.replace,
2680
2747
  ) -> str:
2681
2748
  feature_vector_record = self._get_feature_vector(
2682
2749
  session, project, name, tag, uid
@@ -2694,7 +2761,7 @@ class SQLDB(DBInterface):
2694
2761
 
2695
2762
  versioned = feature_vector_record.metadata.uid is not None
2696
2763
 
2697
- feature_vector = schemas.FeatureVector(**feature_vector_struct)
2764
+ feature_vector = mlrun.common.schemas.FeatureVector(**feature_vector_struct)
2698
2765
  return self.store_feature_vector(
2699
2766
  session,
2700
2767
  project,
@@ -2745,17 +2812,6 @@ class SQLDB(DBInterface):
2745
2812
  kw = {k: v for k, v in kw.items() if v is not None}
2746
2813
  return session.query(cls).filter_by(**kw)
2747
2814
 
2748
- def _function_latest_uid(self, session, project, name):
2749
- # FIXME
2750
- query = (
2751
- self._query(session, Function.uid)
2752
- .filter(Function.project == project, Function.name == name)
2753
- .order_by(Function.updated.desc())
2754
- ).limit(1)
2755
- out = query.one_or_none()
2756
- if out:
2757
- return out[0]
2758
-
2759
2815
  def _find_or_create_users(self, session, user_names):
2760
2816
  users = list(self._query(session, User).filter(User.name.in_(user_names)))
2761
2817
  new = set(user_names) - {user.name for user in users}
@@ -2864,6 +2920,13 @@ class SQLDB(DBInterface):
2864
2920
  query = query.filter(Run.uid.in_(uid))
2865
2921
  return self._add_labels_filter(session, query, Run, labels)
2866
2922
 
2923
+ def _get_db_notifications(
2924
+ self, session, cls, name: str = None, parent_id: str = None, project: str = None
2925
+ ):
2926
+ return self._query(
2927
+ session, cls.Notification, name=name, parent_id=parent_id, project=project
2928
+ ).all()
2929
+
2867
2930
  def _latest_uid_filter(self, session, query):
2868
2931
  # Create a sub query of latest uid (by updated) per (project,key)
2869
2932
  subq = (
@@ -2939,7 +3002,7 @@ class SQLDB(DBInterface):
2939
3002
  until=None,
2940
3003
  name=None,
2941
3004
  kind=None,
2942
- category: schemas.ArtifactCategories = None,
3005
+ category: mlrun.common.schemas.ArtifactCategories = None,
2943
3006
  iter=None,
2944
3007
  use_tag_as_uid: bool = None,
2945
3008
  ):
@@ -2990,7 +3053,7 @@ class SQLDB(DBInterface):
2990
3053
  return query.all()
2991
3054
 
2992
3055
  def _filter_artifacts_by_category(
2993
- self, artifacts, category: schemas.ArtifactCategories
3056
+ self, artifacts, category: mlrun.common.schemas.ArtifactCategories
2994
3057
  ):
2995
3058
  kinds, exclude = category.to_kinds_filter()
2996
3059
  return self._filter_artifacts_by_kinds(artifacts, kinds, exclude)
@@ -3137,8 +3200,8 @@ class SQLDB(DBInterface):
3137
3200
  def _transform_schedule_record_to_scheme(
3138
3201
  self,
3139
3202
  schedule_record: Schedule,
3140
- ) -> schemas.ScheduleRecord:
3141
- schedule = schemas.ScheduleRecord.from_orm(schedule_record)
3203
+ ) -> mlrun.common.schemas.ScheduleRecord:
3204
+ schedule = mlrun.common.schemas.ScheduleRecord.from_orm(schedule_record)
3142
3205
  schedule.creation_time = self._add_utc_timezone(schedule.creation_time)
3143
3206
  schedule.next_run_time = self._add_utc_timezone(schedule.next_run_time)
3144
3207
  return schedule
@@ -3158,9 +3221,9 @@ class SQLDB(DBInterface):
3158
3221
  def _transform_feature_set_model_to_schema(
3159
3222
  feature_set_record: FeatureSet,
3160
3223
  tag=None,
3161
- ) -> schemas.FeatureSet:
3224
+ ) -> mlrun.common.schemas.FeatureSet:
3162
3225
  feature_set_full_dict = feature_set_record.full_object
3163
- feature_set_resp = schemas.FeatureSet(**feature_set_full_dict)
3226
+ feature_set_resp = mlrun.common.schemas.FeatureSet(**feature_set_full_dict)
3164
3227
 
3165
3228
  feature_set_resp.metadata.tag = tag
3166
3229
  return feature_set_resp
@@ -3169,9 +3232,11 @@ class SQLDB(DBInterface):
3169
3232
  def _transform_feature_vector_model_to_schema(
3170
3233
  feature_vector_record: FeatureVector,
3171
3234
  tag=None,
3172
- ) -> schemas.FeatureVector:
3235
+ ) -> mlrun.common.schemas.FeatureVector:
3173
3236
  feature_vector_full_dict = feature_vector_record.full_object
3174
- feature_vector_resp = schemas.FeatureVector(**feature_vector_full_dict)
3237
+ feature_vector_resp = mlrun.common.schemas.FeatureVector(
3238
+ **feature_vector_full_dict
3239
+ )
3175
3240
 
3176
3241
  feature_vector_resp.metadata.tag = tag
3177
3242
  feature_vector_resp.metadata.created = feature_vector_record.created
@@ -3179,26 +3244,55 @@ class SQLDB(DBInterface):
3179
3244
 
3180
3245
  def _transform_project_record_to_schema(
3181
3246
  self, session: Session, project_record: Project
3182
- ) -> schemas.Project:
3247
+ ) -> mlrun.common.schemas.Project:
3183
3248
  # in projects that was created before 0.6.0 the full object wasn't created properly - fix that, and return
3184
3249
  if not project_record.full_object:
3185
- project = schemas.Project(
3186
- metadata=schemas.ProjectMetadata(
3250
+ project = mlrun.common.schemas.Project(
3251
+ metadata=mlrun.common.schemas.ProjectMetadata(
3187
3252
  name=project_record.name,
3188
3253
  created=project_record.created,
3189
3254
  ),
3190
- spec=schemas.ProjectSpec(
3255
+ spec=mlrun.common.schemas.ProjectSpec(
3191
3256
  description=project_record.description,
3192
3257
  source=project_record.source,
3193
3258
  ),
3194
- status=schemas.ObjectStatus(
3259
+ status=mlrun.common.schemas.ObjectStatus(
3195
3260
  state=project_record.state,
3196
3261
  ),
3197
3262
  )
3198
3263
  self.store_project(session, project_record.name, project)
3199
3264
  return project
3200
3265
  # TODO: handle transforming the functions/workflows/artifacts references to real objects
3201
- return schemas.Project(**project_record.full_object)
3266
+ return mlrun.common.schemas.Project(**project_record.full_object)
3267
+
3268
+ def _transform_notification_record_to_spec_and_status(
3269
+ self,
3270
+ notification_record,
3271
+ ) -> typing.Tuple[dict, dict]:
3272
+ notification_spec = self._transform_notification_record_to_schema(
3273
+ notification_record
3274
+ ).to_dict()
3275
+ notification_status = {
3276
+ "status": notification_spec.pop("status", None),
3277
+ "sent_time": notification_spec.pop("sent_time", None),
3278
+ }
3279
+ return notification_spec, notification_status
3280
+
3281
+ @staticmethod
3282
+ def _transform_notification_record_to_schema(
3283
+ notification_record,
3284
+ ) -> mlrun.model.Notification:
3285
+ return mlrun.model.Notification(
3286
+ kind=notification_record.kind,
3287
+ name=notification_record.name,
3288
+ message=notification_record.message,
3289
+ severity=notification_record.severity,
3290
+ when=notification_record.when.split(","),
3291
+ condition=notification_record.condition,
3292
+ params=notification_record.params,
3293
+ status=notification_record.status,
3294
+ sent_time=notification_record.sent_time,
3295
+ )
3202
3296
 
3203
3297
  def _move_and_reorder_table_items(
3204
3298
  self, session, moved_object, move_to=None, move_from=None
@@ -3226,11 +3320,9 @@ class SQLDB(DBInterface):
3226
3320
  else:
3227
3321
  start, end = move_to, move_from - 1
3228
3322
 
3229
- query = session.query(MarketplaceSource).filter(
3230
- MarketplaceSource.index >= start
3231
- )
3323
+ query = session.query(HubSource).filter(HubSource.index >= start)
3232
3324
  if end:
3233
- query = query.filter(MarketplaceSource.index <= end)
3325
+ query = query.filter(HubSource.index <= end)
3234
3326
 
3235
3327
  for source_record in query:
3236
3328
  source_record.index = source_record.index + modifier
@@ -3245,54 +3337,54 @@ class SQLDB(DBInterface):
3245
3337
  session.commit()
3246
3338
 
3247
3339
  @staticmethod
3248
- def _transform_marketplace_source_record_to_schema(
3249
- marketplace_source_record: MarketplaceSource,
3250
- ) -> schemas.IndexedMarketplaceSource:
3251
- source_full_dict = marketplace_source_record.full_object
3252
- marketplace_source = schemas.MarketplaceSource(**source_full_dict)
3253
- return schemas.IndexedMarketplaceSource(
3254
- index=marketplace_source_record.index, source=marketplace_source
3340
+ def _transform_hub_source_record_to_schema(
3341
+ hub_source_record: HubSource,
3342
+ ) -> mlrun.common.schemas.IndexedHubSource:
3343
+ source_full_dict = hub_source_record.full_object
3344
+ hub_source = mlrun.common.schemas.HubSource(**source_full_dict)
3345
+ return mlrun.common.schemas.IndexedHubSource(
3346
+ index=hub_source_record.index, source=hub_source
3255
3347
  )
3256
3348
 
3257
3349
  @staticmethod
3258
- def _transform_marketplace_source_schema_to_record(
3259
- marketplace_source_schema: schemas.IndexedMarketplaceSource,
3260
- current_object: MarketplaceSource = None,
3350
+ def _transform_hub_source_schema_to_record(
3351
+ hub_source_schema: mlrun.common.schemas.IndexedHubSource,
3352
+ current_object: HubSource = None,
3261
3353
  ):
3262
3354
  now = datetime.now(timezone.utc)
3263
3355
  if current_object:
3264
- if current_object.name != marketplace_source_schema.source.metadata.name:
3356
+ if current_object.name != hub_source_schema.source.metadata.name:
3265
3357
  raise mlrun.errors.MLRunInternalServerError(
3266
3358
  "Attempt to update object while replacing its name"
3267
3359
  )
3268
3360
  created_timestamp = current_object.created
3269
3361
  else:
3270
- created_timestamp = marketplace_source_schema.source.metadata.created or now
3271
- updated_timestamp = marketplace_source_schema.source.metadata.updated or now
3362
+ created_timestamp = hub_source_schema.source.metadata.created or now
3363
+ updated_timestamp = hub_source_schema.source.metadata.updated or now
3272
3364
 
3273
- marketplace_source_record = MarketplaceSource(
3365
+ hub_source_record = HubSource(
3274
3366
  id=current_object.id if current_object else None,
3275
- name=marketplace_source_schema.source.metadata.name,
3276
- index=marketplace_source_schema.index,
3367
+ name=hub_source_schema.source.metadata.name,
3368
+ index=hub_source_schema.index,
3277
3369
  created=created_timestamp,
3278
3370
  updated=updated_timestamp,
3279
3371
  )
3280
- full_object = marketplace_source_schema.source.dict()
3372
+ full_object = hub_source_schema.source.dict()
3281
3373
  full_object["metadata"]["created"] = str(created_timestamp)
3282
3374
  full_object["metadata"]["updated"] = str(updated_timestamp)
3283
- # Make sure we don't keep any credentials in the DB. These are handled in the marketplace crud object.
3375
+ # Make sure we don't keep any credentials in the DB. These are handled in the hub crud object.
3284
3376
  full_object["spec"].pop("credentials", None)
3285
3377
 
3286
- marketplace_source_record.full_object = full_object
3287
- return marketplace_source_record
3378
+ hub_source_record.full_object = full_object
3379
+ return hub_source_record
3288
3380
 
3289
3381
  @staticmethod
3290
- def _validate_and_adjust_marketplace_order(session, order):
3291
- max_order = session.query(func.max(MarketplaceSource.index)).scalar()
3382
+ def _validate_and_adjust_hub_order(session, order):
3383
+ max_order = session.query(func.max(HubSource.index)).scalar()
3292
3384
  if not max_order or max_order < 0:
3293
3385
  max_order = 0
3294
3386
 
3295
- if order == schemas.marketplace.last_source_index:
3387
+ if order == mlrun.common.schemas.hub.last_source_index:
3296
3388
  order = max_order + 1
3297
3389
 
3298
3390
  if order > max_order + 1:
@@ -3302,62 +3394,54 @@ class SQLDB(DBInterface):
3302
3394
  if order < 1:
3303
3395
  raise mlrun.errors.MLRunInvalidArgumentError(
3304
3396
  "Order of inserted source must be greater than 0 or "
3305
- + f"{schemas.marketplace.last_source_index} (for last). order = {order}"
3397
+ + f"{mlrun.common.schemas.hub.last_source_index} (for last). order = {order}"
3306
3398
  )
3307
3399
  return order
3308
3400
 
3309
- def create_marketplace_source(
3310
- self, session, ordered_source: schemas.IndexedMarketplaceSource
3401
+ def create_hub_source(
3402
+ self, session, ordered_source: mlrun.common.schemas.IndexedHubSource
3311
3403
  ):
3312
3404
  logger.debug(
3313
- "Creating marketplace source in DB",
3405
+ "Creating hub source in DB",
3314
3406
  index=ordered_source.index,
3315
3407
  name=ordered_source.source.metadata.name,
3316
3408
  )
3317
3409
 
3318
- order = self._validate_and_adjust_marketplace_order(
3319
- session, ordered_source.index
3320
- )
3410
+ order = self._validate_and_adjust_hub_order(session, ordered_source.index)
3321
3411
  name = ordered_source.source.metadata.name
3322
- source_record = self._query(session, MarketplaceSource, name=name).one_or_none()
3412
+ source_record = self._query(session, HubSource, name=name).one_or_none()
3323
3413
  if source_record:
3324
3414
  raise mlrun.errors.MLRunConflictError(
3325
- f"Marketplace source name already exists. name = {name}"
3415
+ f"Hub source name already exists. name = {name}"
3326
3416
  )
3327
- source_record = self._transform_marketplace_source_schema_to_record(
3328
- ordered_source
3329
- )
3417
+ source_record = self._transform_hub_source_schema_to_record(ordered_source)
3330
3418
 
3331
3419
  self._move_and_reorder_table_items(
3332
3420
  session, source_record, move_to=order, move_from=None
3333
3421
  )
3334
3422
 
3335
3423
  @retry_on_conflict
3336
- def store_marketplace_source(
3424
+ def store_hub_source(
3337
3425
  self,
3338
3426
  session,
3339
3427
  name,
3340
- ordered_source: schemas.IndexedMarketplaceSource,
3428
+ ordered_source: mlrun.common.schemas.IndexedHubSource,
3341
3429
  ):
3342
- logger.debug(
3343
- "Storing marketplace source in DB", index=ordered_source.index, name=name
3344
- )
3430
+ logger.debug("Storing hub source in DB", index=ordered_source.index, name=name)
3345
3431
 
3346
3432
  if name != ordered_source.source.metadata.name:
3347
3433
  raise mlrun.errors.MLRunInvalidArgumentError(
3348
3434
  "Conflict between resource name and metadata.name in the stored object"
3349
3435
  )
3350
- order = self._validate_and_adjust_marketplace_order(
3351
- session, ordered_source.index
3352
- )
3436
+ order = self._validate_and_adjust_hub_order(session, ordered_source.index)
3353
3437
 
3354
- source_record = self._query(session, MarketplaceSource, name=name).one_or_none()
3438
+ source_record = self._query(session, HubSource, name=name).one_or_none()
3355
3439
  current_order = source_record.index if source_record else None
3356
- if current_order == schemas.marketplace.last_source_index:
3440
+ if current_order == mlrun.common.schemas.hub.last_source_index:
3357
3441
  raise mlrun.errors.MLRunInvalidArgumentError(
3358
- "Attempting to modify the global marketplace source."
3442
+ "Attempting to modify the global hub source."
3359
3443
  )
3360
- source_record = self._transform_marketplace_source_schema_to_record(
3444
+ source_record = self._transform_hub_source_schema_to_record(
3361
3445
  ordered_source, source_record
3362
3446
  )
3363
3447
 
@@ -3365,47 +3449,46 @@ class SQLDB(DBInterface):
3365
3449
  session, source_record, move_to=order, move_from=current_order
3366
3450
  )
3367
3451
 
3368
- def list_marketplace_sources(
3369
- self, session
3370
- ) -> List[schemas.IndexedMarketplaceSource]:
3452
+ def list_hub_sources(self, session) -> List[mlrun.common.schemas.IndexedHubSource]:
3371
3453
  results = []
3372
- query = self._query(session, MarketplaceSource).order_by(
3373
- MarketplaceSource.index.desc()
3374
- )
3454
+ query = self._query(session, HubSource).order_by(HubSource.index.desc())
3375
3455
  for record in query:
3376
- ordered_source = self._transform_marketplace_source_record_to_schema(record)
3456
+ ordered_source = self._transform_hub_source_record_to_schema(record)
3377
3457
  # Need this to make the list return such that the default source is last in the response.
3378
- if ordered_source.index != schemas.last_source_index:
3458
+ if ordered_source.index != mlrun.common.schemas.last_source_index:
3379
3459
  results.insert(0, ordered_source)
3380
3460
  else:
3381
3461
  results.append(ordered_source)
3382
3462
  return results
3383
3463
 
3384
- def delete_marketplace_source(self, session, name):
3385
- logger.debug("Deleting marketplace source from DB", name=name)
3464
+ def _list_hub_sources_without_transform(self, session) -> List[HubSource]:
3465
+ return self._query(session, HubSource).all()
3386
3466
 
3387
- source_record = self._query(session, MarketplaceSource, name=name).one_or_none()
3467
+ def delete_hub_source(self, session, name):
3468
+ logger.debug("Deleting hub source from DB", name=name)
3469
+
3470
+ source_record = self._query(session, HubSource, name=name).one_or_none()
3388
3471
  if not source_record:
3389
3472
  return
3390
3473
 
3391
3474
  current_order = source_record.index
3392
- if current_order == schemas.marketplace.last_source_index:
3475
+ if current_order == mlrun.common.schemas.hub.last_source_index:
3393
3476
  raise mlrun.errors.MLRunInvalidArgumentError(
3394
- "Attempting to delete the global marketplace source."
3477
+ "Attempting to delete the global hub source."
3395
3478
  )
3396
3479
 
3397
3480
  self._move_and_reorder_table_items(
3398
3481
  session, source_record, move_to=None, move_from=current_order
3399
3482
  )
3400
3483
 
3401
- def get_marketplace_source(self, session, name) -> schemas.IndexedMarketplaceSource:
3402
- source_record = self._query(session, MarketplaceSource, name=name).one_or_none()
3484
+ def get_hub_source(self, session, name) -> mlrun.common.schemas.IndexedHubSource:
3485
+ source_record = self._query(session, HubSource, name=name).one_or_none()
3403
3486
  if not source_record:
3404
3487
  raise mlrun.errors.MLRunNotFoundError(
3405
- f"Marketplace source not found. name = {name}"
3488
+ f"Hub source not found. name = {name}"
3406
3489
  )
3407
3490
 
3408
- return self._transform_marketplace_source_record_to_schema(source_record)
3491
+ return self._transform_hub_source_record_to_schema(source_record)
3409
3492
 
3410
3493
  def get_current_data_version(
3411
3494
  self, session, raise_on_not_found=True
@@ -3443,7 +3526,7 @@ class SQLDB(DBInterface):
3443
3526
  session,
3444
3527
  name: str,
3445
3528
  project: str,
3446
- state: str = mlrun.api.schemas.BackgroundTaskState.running,
3529
+ state: str = mlrun.common.schemas.BackgroundTaskState.running,
3447
3530
  timeout: int = None,
3448
3531
  ):
3449
3532
  background_task_record = self._query(
@@ -3457,7 +3540,7 @@ class SQLDB(DBInterface):
3457
3540
  # we don't want to be able to change state after it reached terminal state
3458
3541
  if (
3459
3542
  background_task_record.state
3460
- in mlrun.api.schemas.BackgroundTaskState.terminal_states()
3543
+ in mlrun.common.schemas.BackgroundTaskState.terminal_states()
3461
3544
  and state != background_task_record.state
3462
3545
  ):
3463
3546
  raise mlrun.errors.MLRunRuntimeError(
@@ -3484,7 +3567,7 @@ class SQLDB(DBInterface):
3484
3567
 
3485
3568
  def get_background_task(
3486
3569
  self, session, name: str, project: str
3487
- ) -> schemas.BackgroundTask:
3570
+ ) -> mlrun.common.schemas.BackgroundTask:
3488
3571
  background_task_record = self._get_background_task_record(
3489
3572
  session, name, project
3490
3573
  )
@@ -3495,7 +3578,7 @@ class SQLDB(DBInterface):
3495
3578
  session,
3496
3579
  name,
3497
3580
  project,
3498
- mlrun.api.schemas.background_task.BackgroundTaskState.failed,
3581
+ mlrun.common.schemas.background_task.BackgroundTaskState.failed,
3499
3582
  )
3500
3583
  background_task_record = self._get_background_task_record(
3501
3584
  session, name, project
@@ -3506,17 +3589,17 @@ class SQLDB(DBInterface):
3506
3589
  @staticmethod
3507
3590
  def _transform_background_task_record_to_schema(
3508
3591
  background_task_record: BackgroundTask,
3509
- ) -> schemas.BackgroundTask:
3510
- return schemas.BackgroundTask(
3511
- metadata=schemas.BackgroundTaskMetadata(
3592
+ ) -> mlrun.common.schemas.BackgroundTask:
3593
+ return mlrun.common.schemas.BackgroundTask(
3594
+ metadata=mlrun.common.schemas.BackgroundTaskMetadata(
3512
3595
  name=background_task_record.name,
3513
3596
  project=background_task_record.project,
3514
3597
  created=background_task_record.created,
3515
3598
  updated=background_task_record.updated,
3516
3599
  timeout=background_task_record.timeout,
3517
3600
  ),
3518
- spec=schemas.BackgroundTaskSpec(),
3519
- status=schemas.BackgroundTaskStatus(
3601
+ spec=mlrun.common.schemas.BackgroundTaskSpec(),
3602
+ status=mlrun.common.schemas.BackgroundTaskStatus(
3520
3603
  state=background_task_record.state,
3521
3604
  ),
3522
3605
  )
@@ -3569,9 +3652,169 @@ class SQLDB(DBInterface):
3569
3652
  if (
3570
3653
  timeout
3571
3654
  and background_task_record.state
3572
- not in mlrun.api.schemas.BackgroundTaskState.terminal_states()
3655
+ not in mlrun.common.schemas.BackgroundTaskState.terminal_states()
3573
3656
  and datetime.utcnow()
3574
3657
  > timedelta(seconds=int(timeout)) + background_task_record.updated
3575
3658
  ):
3576
3659
  return True
3577
3660
  return False
3661
+
3662
+ def store_run_notifications(
3663
+ self,
3664
+ session,
3665
+ notification_objects: typing.List[mlrun.model.Notification],
3666
+ run_uid: str,
3667
+ project: str,
3668
+ ):
3669
+ # iteration is 0, as we don't support multiple notifications per hyper param run, only for the whole run
3670
+ run = self._get_run(session, run_uid, project, 0)
3671
+ if not run:
3672
+ raise mlrun.errors.MLRunNotFoundError(
3673
+ f"Run not found: uid={run_uid}, project={project}"
3674
+ )
3675
+
3676
+ self._store_notifications(session, Run, notification_objects, run.id, project)
3677
+
3678
+ def _store_notifications(
3679
+ self,
3680
+ session,
3681
+ cls,
3682
+ notification_objects: typing.List[mlrun.model.Notification],
3683
+ parent_id: str,
3684
+ project: str,
3685
+ ):
3686
+ db_notifications = {
3687
+ notification.name: notification
3688
+ for notification in self._get_db_notifications(
3689
+ session, cls, parent_id=parent_id
3690
+ )
3691
+ }
3692
+ notifications = []
3693
+ logger.debug(
3694
+ "Storing notifications",
3695
+ notifications_length=len(notification_objects),
3696
+ parent_id=parent_id,
3697
+ project=project,
3698
+ )
3699
+ for notification_model in notification_objects:
3700
+ new_notification = False
3701
+ notification = db_notifications.get(notification_model.name, None)
3702
+ if not notification:
3703
+ new_notification = True
3704
+ notification = cls.Notification(
3705
+ name=notification_model.name, parent_id=parent_id, project=project
3706
+ )
3707
+
3708
+ notification.kind = notification_model.kind
3709
+ notification.message = notification_model.message
3710
+ notification.severity = notification_model.severity
3711
+ notification.when = ",".join(notification_model.when)
3712
+ notification.condition = notification_model.condition
3713
+ notification.params = notification_model.params
3714
+ notification.status = (
3715
+ notification_model.status
3716
+ or mlrun.common.schemas.NotificationStatus.PENDING
3717
+ )
3718
+ notification.sent_time = notification_model.sent_time
3719
+
3720
+ logger.debug(
3721
+ f"Storing {'new' if new_notification else 'existing'} notification",
3722
+ notification_name=notification.name,
3723
+ notification_status=notification.status,
3724
+ parent_id=parent_id,
3725
+ project=project,
3726
+ )
3727
+ notifications.append(notification)
3728
+
3729
+ self._upsert(session, notifications)
3730
+
3731
+ def list_run_notifications(
3732
+ self,
3733
+ session,
3734
+ run_uid: str,
3735
+ project: str = "",
3736
+ ) -> typing.List[mlrun.model.Notification]:
3737
+
3738
+ # iteration is 0, as we don't support multiple notifications per hyper param run, only for the whole run
3739
+ run = self._get_run(session, run_uid, project, 0)
3740
+ if not run:
3741
+ return []
3742
+
3743
+ return [
3744
+ self._transform_notification_record_to_schema(notification)
3745
+ for notification in self._query(
3746
+ session, Run.Notification, parent_id=run.id
3747
+ ).all()
3748
+ ]
3749
+
3750
+ def delete_run_notifications(
3751
+ self,
3752
+ session,
3753
+ name: str = None,
3754
+ run_uid: str = None,
3755
+ project: str = None,
3756
+ commit: bool = True,
3757
+ ):
3758
+ run_id = None
3759
+ if run_uid:
3760
+
3761
+ # iteration is 0, as we don't support multiple notifications per hyper param run, only for the whole run
3762
+ run = self._get_run(session, run_uid, project, 0)
3763
+ if not run:
3764
+ raise mlrun.errors.MLRunNotFoundError(
3765
+ f"Run not found: uid={run_uid}, project={project}"
3766
+ )
3767
+ run_id = run.id
3768
+
3769
+ project = project or config.default_project
3770
+ if project == "*":
3771
+ project = None
3772
+
3773
+ query = self._get_db_notifications(session, Run, name, run_id, project)
3774
+ for notification in query:
3775
+ session.delete(notification)
3776
+
3777
+ if commit:
3778
+ session.commit()
3779
+
3780
+ def set_run_notifications(
3781
+ self,
3782
+ session: Session,
3783
+ project: str,
3784
+ notifications: typing.List[mlrun.model.Notification],
3785
+ identifier: mlrun.common.schemas.RunIdentifier,
3786
+ **kwargs,
3787
+ ):
3788
+ """
3789
+ Set notifications for a run. This will replace any existing notifications.
3790
+ :param session: SQLAlchemy session
3791
+ :param project: Project name
3792
+ :param notifications: List of notifications to set
3793
+ :param identifier: Run identifier
3794
+ :param kwargs: Ignored additional arguments (for interfacing purposes)
3795
+ """
3796
+ run = self._get_run(session, identifier.uid, project, None)
3797
+ if not run:
3798
+ raise mlrun.errors.MLRunNotFoundError(
3799
+ f"Run not found: project={project}, uid={identifier.uid}"
3800
+ )
3801
+
3802
+ run.struct.setdefault("spec", {})["notifications"] = [
3803
+ notification.to_dict() for notification in notifications
3804
+ ]
3805
+
3806
+ # update run, delete and store notifications all in one transaction.
3807
+ # using session.add instead of upsert, so we don't commit the run.
3808
+ # the commit will happen at the end (in store_run_notifications, or manually at the end).
3809
+ session.add(run)
3810
+ self.delete_run_notifications(
3811
+ session, run_uid=run.uid, project=project, commit=False
3812
+ )
3813
+ if notifications:
3814
+ self.store_run_notifications(
3815
+ session,
3816
+ notification_objects=notifications,
3817
+ run_uid=run.uid,
3818
+ project=project,
3819
+ )
3820
+ self._commit(session, [run], ignore=True)