zenml-nightly 0.68.1.dev20241106__py3-none-any.whl → 0.70.0.dev20241116__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (232) hide show
  1. README.md +1 -1
  2. RELEASE_NOTES.md +77 -0
  3. zenml/VERSION +1 -1
  4. zenml/__init__.py +2 -0
  5. zenml/artifacts/external_artifact.py +2 -1
  6. zenml/artifacts/utils.py +138 -79
  7. zenml/cli/base.py +4 -4
  8. zenml/cli/model.py +1 -6
  9. zenml/cli/stack.py +1 -0
  10. zenml/client.py +29 -74
  11. zenml/config/server_config.py +17 -1
  12. zenml/constants.py +2 -7
  13. zenml/data_validators/base_data_validator.py +2 -2
  14. zenml/enums.py +20 -4
  15. zenml/exceptions.py +4 -0
  16. zenml/integrations/aws/orchestrators/sagemaker_orchestrator.py +20 -18
  17. zenml/integrations/azure/orchestrators/azureml_orchestrator.py +1 -1
  18. zenml/integrations/deepchecks/data_validators/deepchecks_data_validator.py +1 -1
  19. zenml/integrations/evidently/__init__.py +1 -1
  20. zenml/integrations/gcp/orchestrators/vertex_orchestrator.py +1 -1
  21. zenml/integrations/lightning/orchestrators/lightning_orchestrator.py +29 -9
  22. zenml/integrations/tensorboard/visualizers/tensorboard_visualizer.py +60 -54
  23. zenml/integrations/vllm/services/vllm_deployment.py +16 -7
  24. zenml/metadata/lazy_load.py +20 -7
  25. zenml/model/model.py +17 -64
  26. zenml/model/utils.py +5 -0
  27. zenml/models/__init__.py +0 -12
  28. zenml/models/v2/base/filter.py +121 -8
  29. zenml/models/v2/core/artifact_version.py +42 -7
  30. zenml/models/v2/core/model_version.py +26 -5
  31. zenml/models/v2/core/pipeline_run.py +25 -6
  32. zenml/models/v2/core/run_metadata.py +2 -217
  33. zenml/models/v2/core/step_run.py +62 -24
  34. zenml/orchestrators/base_orchestrator.py +12 -1
  35. zenml/orchestrators/input_utils.py +44 -19
  36. zenml/orchestrators/step_launcher.py +4 -3
  37. zenml/orchestrators/step_run_utils.py +19 -15
  38. zenml/orchestrators/step_runner.py +21 -13
  39. zenml/orchestrators/utils.py +45 -26
  40. zenml/stack_deployments/aws_stack_deployment.py +23 -6
  41. zenml/stack_deployments/azure_stack_deployment.py +28 -5
  42. zenml/stack_deployments/gcp_stack_deployment.py +25 -8
  43. zenml/stack_deployments/stack_deployment.py +3 -5
  44. zenml/steps/base_step.py +1 -1
  45. zenml/steps/entrypoint_function_utils.py +3 -5
  46. zenml/steps/step_context.py +3 -2
  47. zenml/steps/utils.py +13 -2
  48. zenml/utils/metadata_utils.py +335 -0
  49. zenml/zen_server/auth.py +221 -3
  50. zenml/zen_server/cache.py +208 -0
  51. zenml/zen_server/dashboard/assets/{404-DT4QRUqN.js → 404-NVXKFp-x.js} +1 -1
  52. zenml/zen_server/dashboard/assets/{@radix-DP6vWzyx.js → @radix-DeK6qiuw.js} +1 -1
  53. zenml/zen_server/dashboard/assets/{@react-router-BMhZulnd.js → @react-router-B3Z5rLr2.js} +1 -1
  54. zenml/zen_server/dashboard/assets/{@reactflow-8U9qNlMR.js → @reactflow-CK0KJUen.js} +2 -2
  55. zenml/zen_server/dashboard/assets/{@tanstack-BUCbhJyH.js → @tanstack-DT5WLu9C.js} +1 -1
  56. zenml/zen_server/dashboard/assets/AlertDialogDropdownItem-DezXKmDf.js +1 -0
  57. zenml/zen_server/dashboard/assets/{CodeSnippet-CqybNv0k.js → CodeSnippet-JzR8CEtw.js} +2 -2
  58. zenml/zen_server/dashboard/assets/{CollapsibleCard-0r_8G2Lj.js → CollapsibleCard-DQW_ktMO.js} +1 -1
  59. zenml/zen_server/dashboard/assets/{Commands-BDjgBQKi.js → Commands-DL2kwkRd.js} +1 -1
  60. zenml/zen_server/dashboard/assets/ComponentBadge-D_g62Wv8.js +1 -0
  61. zenml/zen_server/dashboard/assets/{CopyButton-C745BrKi.js → CopyButton-LNcWaa14.js} +1 -1
  62. zenml/zen_server/dashboard/assets/{CsvVizualization-PpAq0CeZ.js → CsvVizualization-DknpE5ej.js} +5 -5
  63. zenml/zen_server/dashboard/assets/{DialogItem-DcVCZEew.js → DialogItem-Bxf8FuAT.js} +1 -1
  64. zenml/zen_server/dashboard/assets/{DisplayDate-BeXgUG_C.js → DisplayDate-CDMUcQHS.js} +1 -1
  65. zenml/zen_server/dashboard/assets/{EmptyState-DeK7H4pr.js → EmptyState-BzdlCwp3.js} +1 -1
  66. zenml/zen_server/dashboard/assets/{Error-BMlzibXj.js → Error-DYflYyps.js} +1 -1
  67. zenml/zen_server/dashboard/assets/ExecutionStatus-C7zyIQKZ.js +1 -0
  68. zenml/zen_server/dashboard/assets/{Helpbox-BLf40fLV.js → Helpbox-oYSGpLqd.js} +1 -1
  69. zenml/zen_server/dashboard/assets/{Infobox-BwisKifi.js → Infobox-Cx4xGoXR.js} +1 -1
  70. zenml/zen_server/dashboard/assets/{InlineAvatar-jEgodSgX.js → InlineAvatar-DiGOWNKF.js} +1 -1
  71. zenml/zen_server/dashboard/assets/{Lock-3lLt1ih0.js → Lock-CYYy18Mm.js} +1 -1
  72. zenml/zen_server/dashboard/assets/{MarkdownVisualization-8O9kTr-2.js → MarkdownVisualization-ylXaAxev.js} +1 -1
  73. zenml/zen_server/dashboard/assets/NestedCollapsible-DYbgyKxK.js +1 -0
  74. zenml/zen_server/dashboard/assets/{NumberBox-T9eELfLZ.js → NumberBox-Dtp3J6g5.js} +1 -1
  75. zenml/zen_server/dashboard/assets/Partials-03iZf8-N.js +1 -0
  76. zenml/zen_server/dashboard/assets/{PasswordChecker-CW0kqY0W.js → PasswordChecker-B0nadgh6.js} +1 -1
  77. zenml/zen_server/dashboard/assets/ProBadge-D_EB8HNo.js +1 -0
  78. zenml/zen_server/dashboard/assets/ProCta-DqNS4v3x.js +1 -0
  79. zenml/zen_server/dashboard/assets/ProviderIcon-Bki2aw8w.js +1 -0
  80. zenml/zen_server/dashboard/assets/{ProviderRadio-BROY1700.js → ProviderRadio-8f43sPD4.js} +1 -1
  81. zenml/zen_server/dashboard/assets/RunSelector-DkPiIiNr.js +1 -0
  82. zenml/zen_server/dashboard/assets/RunsBody-07YEO7qI.js +1 -0
  83. zenml/zen_server/dashboard/assets/SearchField-lp1KgU4e.js +1 -0
  84. zenml/zen_server/dashboard/assets/{SecretTooltip-C_qByGWB.js → SecretTooltip-CgnbyeOx.js} +1 -1
  85. zenml/zen_server/dashboard/assets/{SetPassword-7pRB00El.js → SetPassword-CpP418A2.js} +1 -1
  86. zenml/zen_server/dashboard/assets/StackList-WvuKQusZ.js +1 -0
  87. zenml/zen_server/dashboard/assets/Tabs-BktHkCJJ.js +1 -0
  88. zenml/zen_server/dashboard/assets/Tick-BlMoIlJT.js +1 -0
  89. zenml/zen_server/dashboard/assets/{UpdatePasswordSchemas-DckMEkFf.js → UpdatePasswordSchemas-Sc0A0pP-.js} +1 -1
  90. zenml/zen_server/dashboard/assets/{UsageReason-DVceN14P.js → UsageReason-YYduL4fj.js} +1 -1
  91. zenml/zen_server/dashboard/assets/{WizardFooter-CW0Cvd70.js → WizardFooter-dgmizSJC.js} +1 -1
  92. zenml/zen_server/dashboard/assets/all-pipeline-runs-query-D-c2G6lV.js +1 -0
  93. zenml/zen_server/dashboard/assets/check-DloQpStc.js +1 -0
  94. zenml/zen_server/dashboard/assets/{check-circle-Dwxliy1Z.js → check-circle-jNbX5-sR.js} +1 -1
  95. zenml/zen_server/dashboard/assets/{chevron-down-8wLBS5pQ.js → chevron-down-6JyMkfjR.js} +1 -1
  96. zenml/zen_server/dashboard/assets/{chevron-right-double-DoD8iXWM.js → chevron-right-double-D7ojK9Co.js} +1 -1
  97. zenml/zen_server/dashboard/assets/{code-browser-CZUQs3Wa.js → code-browser-CUFUIHfp.js} +1 -1
  98. zenml/zen_server/dashboard/assets/{copy-CaSMXwiU.js → copy-C8XQA2Ug.js} +1 -1
  99. zenml/zen_server/dashboard/assets/create-stack-DM_JPgef.js +1 -0
  100. zenml/zen_server/dashboard/assets/delete-run-CJdh1P_h.js +1 -0
  101. zenml/zen_server/dashboard/assets/{docker-BFNgg-z3.js → docker-BdA9vrnW.js} +1 -1
  102. zenml/zen_server/dashboard/assets/{dots-horizontal-DK5Duzx4.js → dots-horizontal-otGBOSDJ.js} +1 -1
  103. zenml/zen_server/dashboard/assets/{form-schemas-1AyOCx90.js → form-schemas-K6FYKjwa.js} +1 -1
  104. zenml/zen_server/dashboard/assets/{gcp-7M2Yf3ZK.js → gcp-CFtm4BA7.js} +1 -1
  105. zenml/zen_server/dashboard/assets/{help-Dam461dC.js → help-Cc9bBIJH.js} +1 -1
  106. zenml/zen_server/dashboard/assets/index-B1mVPYxf.js +1 -0
  107. zenml/zen_server/dashboard/assets/index-BAkC7FXi.js +1 -0
  108. zenml/zen_server/dashboard/assets/{index-QQb7wQEC.js → index-CCOPpudF.js} +8 -8
  109. zenml/zen_server/dashboard/assets/index-CEV4Cvaf.js +1 -0
  110. zenml/zen_server/dashboard/assets/index-DlGvJQPn.css +1 -0
  111. zenml/zen_server/dashboard/assets/{index-BVJ8n2-j.js → index-Uu49AX48.js} +1 -1
  112. zenml/zen_server/dashboard/assets/{index.esm-cuVep_NJ.js → index.esm-Dy6Z9Ung.js} +1 -1
  113. zenml/zen_server/dashboard/assets/{kubernetes--g7r02Zu.js → kubernetes-B2wmAJ1d.js} +1 -1
  114. zenml/zen_server/dashboard/assets/{layout-DCSYN7-C.js → layout-BtHBmE4w.js} +1 -1
  115. zenml/zen_server/dashboard/assets/{link-external-CBEk6kEG.js → link-external-b9AXw_sW.js} +1 -1
  116. zenml/zen_server/dashboard/assets/{login-mutation-DTcAFP1l.js → login-mutation-hf-lK87O.js} +1 -1
  117. zenml/zen_server/dashboard/assets/{logs-D5bdJGur.js → logs-WMSM52RF.js} +1 -1
  118. zenml/zen_server/dashboard/assets/{not-found-Cc-JkRH2.js → not-found-BGirLjU-.js} +1 -1
  119. zenml/zen_server/dashboard/assets/{package-Cs35Szwh.js → package-C6uypY4h.js} +1 -1
  120. zenml/zen_server/dashboard/assets/page-0JE_-Ec1.js +1 -0
  121. zenml/zen_server/dashboard/assets/{page-DH_Z7iW1.js → page-6m6yHHlE.js} +1 -1
  122. zenml/zen_server/dashboard/assets/page-BDigxVpo.js +1 -0
  123. zenml/zen_server/dashboard/assets/page-BR68V0V1.js +1 -0
  124. zenml/zen_server/dashboard/assets/page-BRLpxOt0.js +1 -0
  125. zenml/zen_server/dashboard/assets/{page-BQQKaabe.js → page-BU7huvKw.js} +3 -3
  126. zenml/zen_server/dashboard/assets/page-BvqLv2Ky.js +1 -0
  127. zenml/zen_server/dashboard/assets/page-C00YAkaB.js +1 -0
  128. zenml/zen_server/dashboard/assets/{page-N4qoPHKb.js → page-CD-DcWoy.js} +1 -1
  129. zenml/zen_server/dashboard/assets/page-COXXJj1k.js +1 -0
  130. zenml/zen_server/dashboard/assets/page-CbpvrsDL.js +1 -0
  131. zenml/zen_server/dashboard/assets/page-CdMWnQak.js +1 -0
  132. zenml/zen_server/dashboard/assets/{page-ClUVkl-O.js → page-CjGdWY13.js} +1 -1
  133. zenml/zen_server/dashboard/assets/page-CwxrFarU.js +1 -0
  134. zenml/zen_server/dashboard/assets/{page-DLixvR-7.js → page-D01JhjQB.js} +1 -1
  135. zenml/zen_server/dashboard/assets/page-D6uU2ax4.js +1 -0
  136. zenml/zen_server/dashboard/assets/page-D7S3aCbF.js +1 -0
  137. zenml/zen_server/dashboard/assets/{page-9yplj5JT.js → page-DLC-bNBP.js} +1 -1
  138. zenml/zen_server/dashboard/assets/page-DXSTpqRD.js +1 -0
  139. zenml/zen_server/dashboard/assets/{page-DzpVUZ8f.js → page-DakHVWXF.js} +1 -1
  140. zenml/zen_server/dashboard/assets/{page-DIOXwhiD.js → page-Df-Fw0aq.js} +1 -1
  141. zenml/zen_server/dashboard/assets/{page-B-y2XKIc.js → page-DfbXf_8s.js} +1 -1
  142. zenml/zen_server/dashboard/assets/page-DjRJCGb3.js +1 -0
  143. zenml/zen_server/dashboard/assets/{page-C0N5q3l7.js → page-Djikxq_S.js} +1 -1
  144. zenml/zen_server/dashboard/assets/page-Dnovpa0i.js +3 -0
  145. zenml/zen_server/dashboard/assets/page-Dot3LPmL.js +1 -0
  146. zenml/zen_server/dashboard/assets/page-Vcxara9U.js +1 -0
  147. zenml/zen_server/dashboard/assets/page-Xynx4btY.js +14 -0
  148. zenml/zen_server/dashboard/assets/page-YpKAqVSa.js +1 -0
  149. zenml/zen_server/dashboard/assets/page-yYC9OI-E.js +1 -0
  150. zenml/zen_server/dashboard/assets/{persist-DNb5cdrU.js → persist-Coz7ZWvz.js} +1 -1
  151. zenml/zen_server/dashboard/assets/{persist-CP0JmYZ4.js → persist-GjC8PZoC.js} +1 -1
  152. zenml/zen_server/dashboard/assets/{plus-C9IxgN2M.js → plus-tf1V2hTJ.js} +1 -1
  153. zenml/zen_server/dashboard/assets/{refresh-BVu22P_C.js → refresh-BjOeWlEq.js} +1 -1
  154. zenml/zen_server/dashboard/assets/{rocket-CONEmRmB.js → rocket-DjT2cDvG.js} +1 -1
  155. zenml/zen_server/dashboard/assets/sharedSchema-CQb14VSr.js +14 -0
  156. zenml/zen_server/dashboard/assets/stack-detail-query-OPEW-cDJ.js +1 -0
  157. zenml/zen_server/dashboard/assets/{tick-circle-CM1ZScbQ.js → tick-circle-BEX_Tp4v.js} +1 -1
  158. zenml/zen_server/dashboard/assets/{trash-DkJHMOg7.js → trash-arLUMWMS.js} +1 -1
  159. zenml/zen_server/dashboard/assets/{update-server-settings-mutation-DsU8cNVl.js → update-server-settings-mutation-LwuQfHYn.js} +1 -1
  160. zenml/zen_server/dashboard/assets/upgrade-form-CwRHBuXB.webp +0 -0
  161. zenml/zen_server/dashboard/assets/url-CkvKAnwF.js +1 -0
  162. zenml/zen_server/dashboard/assets/{zod-D89GC_vc.js → zod-BwEbpOxH.js} +1 -1
  163. zenml/zen_server/dashboard/index.html +7 -7
  164. zenml/zen_server/deploy/helm/Chart.yaml +1 -1
  165. zenml/zen_server/deploy/helm/README.md +2 -2
  166. zenml/zen_server/exceptions.py +2 -0
  167. zenml/zen_server/jwt.py +30 -13
  168. zenml/zen_server/rbac/endpoint_utils.py +43 -1
  169. zenml/zen_server/rbac/utils.py +0 -2
  170. zenml/zen_server/routers/artifact_version_endpoints.py +27 -1
  171. zenml/zen_server/routers/auth_endpoints.py +134 -102
  172. zenml/zen_server/routers/logs_endpoints.py +66 -0
  173. zenml/zen_server/routers/workspaces_endpoints.py +3 -4
  174. zenml/zen_server/template_execution/utils.py +14 -16
  175. zenml/zen_server/utils.py +27 -0
  176. zenml/zen_server/zen_server_api.py +6 -3
  177. zenml/zen_stores/migrations/versions/0.70.0_release.py +23 -0
  178. zenml/zen_stores/migrations/versions/1cb6477f72d6_move_artifact_save_type.py +99 -0
  179. zenml/zen_stores/migrations/versions/904464ea4041_add_pipeline_model_run_unique_constraints.py +192 -0
  180. zenml/zen_stores/migrations/versions/b557b2871693_update_step_run_input_types.py +33 -0
  181. zenml/zen_stores/rest_zen_store.py +68 -64
  182. zenml/zen_stores/schemas/artifact_schemas.py +8 -1
  183. zenml/zen_stores/schemas/model_schemas.py +27 -3
  184. zenml/zen_stores/schemas/pipeline_run_schemas.py +6 -1
  185. zenml/zen_stores/schemas/pipeline_schemas.py +8 -2
  186. zenml/zen_stores/schemas/run_metadata_schemas.py +1 -48
  187. zenml/zen_stores/schemas/step_run_schemas.py +18 -10
  188. zenml/zen_stores/sql_zen_store.py +283 -219
  189. zenml/zen_stores/zen_store_interface.py +15 -42
  190. {zenml_nightly-0.68.1.dev20241106.dist-info → zenml_nightly-0.70.0.dev20241116.dist-info}/METADATA +2 -2
  191. {zenml_nightly-0.68.1.dev20241106.dist-info → zenml_nightly-0.70.0.dev20241116.dist-info}/RECORD +194 -179
  192. zenml/zen_server/dashboard/assets/AlertDialogDropdownItem-C6N2rGrB.js +0 -1
  193. zenml/zen_server/dashboard/assets/ComponentBadge-DUiEYJHu.js +0 -1
  194. zenml/zen_server/dashboard/assets/ComponentFallbackDialog-BFoH5K4V.js +0 -1
  195. zenml/zen_server/dashboard/assets/ComponentIcon-CAIoUis2.js +0 -1
  196. zenml/zen_server/dashboard/assets/Partials-YPBB3V4q.js +0 -1
  197. zenml/zen_server/dashboard/assets/ProviderIcon-Bb3Xha5A.js +0 -1
  198. zenml/zen_server/dashboard/assets/RunSelector-DCiL3M1c.js +0 -1
  199. zenml/zen_server/dashboard/assets/SearchField-DfUiGFVd.js +0 -1
  200. zenml/zen_server/dashboard/assets/Tick-CykQFPj2.js +0 -1
  201. zenml/zen_server/dashboard/assets/cloud-only-B-s_HMDm.js +0 -1
  202. zenml/zen_server/dashboard/assets/codespaces-BitYDX9d.gif +0 -0
  203. zenml/zen_server/dashboard/assets/create-stack-CEmaPZ4c.js +0 -1
  204. zenml/zen_server/dashboard/assets/delete-run-D-LKbGyz.js +0 -1
  205. zenml/zen_server/dashboard/assets/index-Bpmj40BI.js +0 -1
  206. zenml/zen_server/dashboard/assets/index-CbU4Ln_E.css +0 -1
  207. zenml/zen_server/dashboard/assets/index-DKPhqP2B.js +0 -1
  208. zenml/zen_server/dashboard/assets/page-BBpOxVcY.js +0 -1
  209. zenml/zen_server/dashboard/assets/page-BRInM1Lg.js +0 -1
  210. zenml/zen_server/dashboard/assets/page-BjjlMk7s.js +0 -1
  211. zenml/zen_server/dashboard/assets/page-Bvd7YH2A.js +0 -1
  212. zenml/zen_server/dashboard/assets/page-CT3Nep8W.js +0 -1
  213. zenml/zen_server/dashboard/assets/page-C_f47pBf.js +0 -1
  214. zenml/zen_server/dashboard/assets/page-Cmv8C_yM.js +0 -3
  215. zenml/zen_server/dashboard/assets/page-CyN2bdWG.js +0 -1
  216. zenml/zen_server/dashboard/assets/page-CzzXH4fs.js +0 -1
  217. zenml/zen_server/dashboard/assets/page-DTlGjgnG.js +0 -1
  218. zenml/zen_server/dashboard/assets/page-Dbpl86h0.js +0 -1
  219. zenml/zen_server/dashboard/assets/page-Ddgy6kDS.js +0 -1
  220. zenml/zen_server/dashboard/assets/page-DtCAfBLy.js +0 -9
  221. zenml/zen_server/dashboard/assets/page-Dx16z7nA.js +0 -1
  222. zenml/zen_server/dashboard/assets/page-McUyYbo1.js +0 -1
  223. zenml/zen_server/dashboard/assets/page-T1P3RyAR.js +0 -1
  224. zenml/zen_server/dashboard/assets/page-bKaULTGG.js +0 -1
  225. zenml/zen_server/dashboard/assets/page-sbXUJy9t.js +0 -1
  226. zenml/zen_server/dashboard/assets/sharedSchema-TMLu-nYQ.js +0 -14
  227. zenml/zen_server/dashboard/assets/stack-detail-query-xmYxSsUY.js +0 -1
  228. zenml/zen_server/dashboard/assets/url-D5le3J4q.js +0 -1
  229. zenml/zen_server/routers/run_metadata_endpoints.py +0 -96
  230. {zenml_nightly-0.68.1.dev20241106.dist-info → zenml_nightly-0.70.0.dev20241116.dist-info}/LICENSE +0 -0
  231. {zenml_nightly-0.68.1.dev20241106.dist-info → zenml_nightly-0.70.0.dev20241116.dist-info}/WHEEL +0 -0
  232. {zenml_nightly-0.68.1.dev20241106.dist-info → zenml_nightly-0.70.0.dev20241116.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,99 @@
1
+ """move artifact save type [1cb6477f72d6].
2
+
3
+ Revision ID: 1cb6477f72d6
4
+ Revises: c22561cbb3a9
5
+ Create Date: 2024-10-10 15:44:09.465210
6
+
7
+ """
8
+
9
+ import sqlalchemy as sa
10
+ from alembic import op
11
+
12
+ # revision identifiers, used by Alembic.
13
+ revision = "1cb6477f72d6"
14
+ down_revision = "c22561cbb3a9"
15
+ branch_labels = None
16
+ depends_on = None
17
+
18
+
19
+ def upgrade() -> None:
20
+ """Upgrade database schema and/or data, creating a new revision."""
21
+ # Step 1: Add nullable save_type column to artifact_version
22
+ with op.batch_alter_table("artifact_version", schema=None) as batch_op:
23
+ batch_op.add_column(sa.Column("save_type", sa.TEXT(), nullable=True))
24
+
25
+ # Step 2: Move data from step_run_output_artifact.type to artifact_version.save_type
26
+ op.execute("""
27
+ UPDATE artifact_version
28
+ SET save_type = (
29
+ SELECT max(step_run_output_artifact.type)
30
+ FROM step_run_output_artifact
31
+ WHERE step_run_output_artifact.artifact_id = artifact_version.id
32
+ GROUP BY artifact_id
33
+ )
34
+ """)
35
+ op.execute("""
36
+ UPDATE artifact_version
37
+ SET save_type = 'step_output'
38
+ WHERE artifact_version.save_type = 'default'
39
+ """)
40
+ op.execute("""
41
+ UPDATE artifact_version
42
+ SET save_type = 'external'
43
+ WHERE save_type is NULL
44
+ """)
45
+
46
+ # # Step 3: Set save_type to non-nullable
47
+ with op.batch_alter_table("artifact_version", schema=None) as batch_op:
48
+ batch_op.alter_column(
49
+ "save_type",
50
+ existing_type=sa.TEXT(),
51
+ nullable=False,
52
+ )
53
+
54
+ # Step 4: Remove type column from step_run_output_artifact
55
+ with op.batch_alter_table(
56
+ "step_run_output_artifact", schema=None
57
+ ) as batch_op:
58
+ batch_op.drop_column("type")
59
+
60
+
61
+ def downgrade() -> None:
62
+ """Downgrade database schema and/or data back to the previous revision."""
63
+ # Add type column back to step_run_output_artifact
64
+ with op.batch_alter_table(
65
+ "step_run_output_artifact", schema=None
66
+ ) as batch_op:
67
+ batch_op.add_column(
68
+ sa.Column("type", sa.TEXT(), nullable=True),
69
+ )
70
+
71
+ # Move data back from artifact_version.save_type to step_run_output_artifact.type
72
+ op.execute("""
73
+ UPDATE step_run_output_artifact
74
+ SET type = (
75
+ SELECT max(artifact_version.save_type)
76
+ FROM artifact_version
77
+ WHERE step_run_output_artifact.artifact_id = artifact_version.id
78
+ GROUP BY artifact_id
79
+ )
80
+ """)
81
+ op.execute("""
82
+ UPDATE step_run_output_artifact
83
+ SET type = 'default'
84
+ WHERE step_run_output_artifact.type = 'step_output'
85
+ """)
86
+
87
+ # Set type to non-nullable
88
+ with op.batch_alter_table(
89
+ "step_run_output_artifact", schema=None
90
+ ) as batch_op:
91
+ batch_op.alter_column(
92
+ "type",
93
+ existing_type=sa.TEXT(),
94
+ nullable=False,
95
+ )
96
+
97
+ # Remove save_type column from artifact_version
98
+ with op.batch_alter_table("artifact_version", schema=None) as batch_op:
99
+ batch_op.drop_column("save_type")
@@ -0,0 +1,192 @@
1
+ """Add pipeline, model and run unique constraints [904464ea4041].
2
+
3
+ Revision ID: 904464ea4041
4
+ Revises: b557b2871693
5
+ Create Date: 2024-11-04 10:27:05.450092
6
+
7
+ """
8
+
9
+ from collections import defaultdict
10
+ from typing import Any, Dict, Set
11
+
12
+ import sqlalchemy as sa
13
+ from alembic import op
14
+
15
+ from zenml.logger import get_logger
16
+
17
+ logger = get_logger(__name__)
18
+
19
+ # revision identifiers, used by Alembic.
20
+ revision = "904464ea4041"
21
+ down_revision = "b557b2871693"
22
+ branch_labels = None
23
+ depends_on = None
24
+
25
+
26
+ def resolve_duplicate_entities() -> None:
27
+ """Resolve duplicate entities."""
28
+ connection = op.get_bind()
29
+ meta = sa.MetaData()
30
+ meta.reflect(
31
+ bind=connection,
32
+ only=("pipeline_run", "pipeline", "model", "model_version"),
33
+ )
34
+
35
+ # Remove duplicate names for runs, pipelines and models
36
+ for table_name in ["pipeline_run", "pipeline", "model"]:
37
+ table = sa.Table(table_name, meta)
38
+ result = connection.execute(
39
+ sa.select(table.c.id, table.c.name, table.c.workspace_id)
40
+ ).all()
41
+ existing: Dict[str, Set[str]] = defaultdict(set)
42
+
43
+ for id_, name, workspace_id in result:
44
+ names_in_workspace = existing[workspace_id]
45
+
46
+ if name in names_in_workspace:
47
+ new_name = f"{name}_{id_[:6]}"
48
+ logger.warning(
49
+ "Migrating %s name from %s to %s to resolve duplicate name.",
50
+ table_name,
51
+ name,
52
+ new_name,
53
+ )
54
+ connection.execute(
55
+ sa.update(table)
56
+ .where(table.c.id == id_)
57
+ .values(name=new_name)
58
+ )
59
+ names_in_workspace.add(new_name)
60
+ else:
61
+ names_in_workspace.add(name)
62
+
63
+ # Remove duplicate names and version numbers for model versions
64
+ model_version_table = sa.Table("model_version", meta)
65
+ result = connection.execute(
66
+ sa.select(
67
+ model_version_table.c.id,
68
+ model_version_table.c.name,
69
+ model_version_table.c.number,
70
+ model_version_table.c.model_id,
71
+ )
72
+ ).all()
73
+
74
+ existing_names: Dict[str, Set[str]] = defaultdict(set)
75
+ existing_numbers: Dict[str, Set[int]] = defaultdict(set)
76
+
77
+ needs_update = []
78
+
79
+ for id_, name, number, model_id in result:
80
+ names_for_model = existing_names[model_id]
81
+ numbers_for_model = existing_numbers[model_id]
82
+
83
+ needs_new_name = name in names_for_model
84
+ needs_new_number = number in numbers_for_model
85
+
86
+ if needs_new_name or needs_new_number:
87
+ needs_update.append(
88
+ (id_, name, number, model_id, needs_new_name, needs_new_number)
89
+ )
90
+
91
+ names_for_model.add(name)
92
+ numbers_for_model.add(number)
93
+
94
+ for (
95
+ id_,
96
+ name,
97
+ number,
98
+ model_id,
99
+ needs_new_name,
100
+ needs_new_number,
101
+ ) in needs_update:
102
+ values: Dict[str, Any] = {}
103
+
104
+ is_numeric_version = str(number) == name
105
+ next_numeric_version = max(existing_numbers[model_id]) + 1
106
+
107
+ if is_numeric_version:
108
+ # No matter if the name or number clashes, we need to update both
109
+ values["number"] = next_numeric_version
110
+ values["name"] = str(next_numeric_version)
111
+ existing_numbers[model_id].add(next_numeric_version)
112
+ logger.warning(
113
+ "Migrating model version %s to %s to resolve duplicate name.",
114
+ name,
115
+ values["name"],
116
+ )
117
+ else:
118
+ if needs_new_name:
119
+ values["name"] = f"{name}_{id_[:6]}"
120
+ logger.warning(
121
+ "Migrating model version %s to %s to resolve duplicate name.",
122
+ name,
123
+ values["name"],
124
+ )
125
+
126
+ if needs_new_number:
127
+ values["number"] = next_numeric_version
128
+ existing_numbers[model_id].add(next_numeric_version)
129
+
130
+ connection.execute(
131
+ sa.update(model_version_table)
132
+ .where(model_version_table.c.id == id_)
133
+ .values(**values)
134
+ )
135
+
136
+
137
+ def upgrade() -> None:
138
+ """Upgrade database schema and/or data, creating a new revision."""
139
+ # ### commands auto generated by Alembic - please adjust! ###
140
+
141
+ resolve_duplicate_entities()
142
+
143
+ with op.batch_alter_table("pipeline", schema=None) as batch_op:
144
+ batch_op.create_unique_constraint(
145
+ "unique_pipeline_name_in_workspace", ["name", "workspace_id"]
146
+ )
147
+
148
+ with op.batch_alter_table("pipeline_run", schema=None) as batch_op:
149
+ batch_op.create_unique_constraint(
150
+ "unique_run_name_in_workspace", ["name", "workspace_id"]
151
+ )
152
+
153
+ with op.batch_alter_table("model", schema=None) as batch_op:
154
+ batch_op.create_unique_constraint(
155
+ "unique_model_name_in_workspace", ["name", "workspace_id"]
156
+ )
157
+
158
+ with op.batch_alter_table("model_version", schema=None) as batch_op:
159
+ batch_op.create_unique_constraint(
160
+ "unique_version_for_model_id", ["name", "model_id"]
161
+ )
162
+ batch_op.create_unique_constraint(
163
+ "unique_version_number_for_model_id", ["number", "model_id"]
164
+ )
165
+ # ### end Alembic commands ###
166
+
167
+
168
+ def downgrade() -> None:
169
+ """Downgrade database schema and/or data back to the previous revision."""
170
+ # ### commands auto generated by Alembic - please adjust! ###
171
+ with op.batch_alter_table("model_version", schema=None) as batch_op:
172
+ batch_op.drop_constraint(
173
+ "unique_version_number_for_model_id", type_="unique"
174
+ )
175
+ batch_op.drop_constraint("unique_version_for_model_id", type_="unique")
176
+
177
+ with op.batch_alter_table("model", schema=None) as batch_op:
178
+ batch_op.drop_constraint(
179
+ "unique_model_name_in_workspace", type_="unique"
180
+ )
181
+
182
+ with op.batch_alter_table("pipeline_run", schema=None) as batch_op:
183
+ batch_op.drop_constraint(
184
+ "unique_run_name_in_workspace", type_="unique"
185
+ )
186
+
187
+ with op.batch_alter_table("pipeline", schema=None) as batch_op:
188
+ batch_op.drop_constraint(
189
+ "unique_pipeline_name_in_workspace", type_="unique"
190
+ )
191
+
192
+ # ### end Alembic commands ###
@@ -0,0 +1,33 @@
1
+ """Update step run input types [b557b2871693].
2
+
3
+ Revision ID: b557b2871693
4
+ Revises: 1cb6477f72d6
5
+ Create Date: 2024-10-30 13:06:55.147202
6
+
7
+ """
8
+
9
+ from alembic import op
10
+
11
+ # revision identifiers, used by Alembic.
12
+ revision = "b557b2871693"
13
+ down_revision = "1cb6477f72d6"
14
+ branch_labels = None
15
+ depends_on = None
16
+
17
+
18
+ def upgrade() -> None:
19
+ """Upgrade database schema and/or data, creating a new revision."""
20
+ op.execute("""
21
+ UPDATE step_run_input_artifact
22
+ SET type = 'step_output'
23
+ WHERE type = 'default'
24
+ """)
25
+
26
+
27
+ def downgrade() -> None:
28
+ """Downgrade database schema and/or data back to the previous revision."""
29
+ op.execute("""
30
+ UPDATE step_run_input_artifact
31
+ SET type = 'default'
32
+ WHERE type = 'step_output'
33
+ """)
@@ -57,6 +57,7 @@ from zenml.constants import (
57
57
  ARTIFACT_VERSIONS,
58
58
  ARTIFACT_VISUALIZATIONS,
59
59
  ARTIFACTS,
60
+ BATCH,
60
61
  CODE_REFERENCES,
61
62
  CODE_REPOSITORIES,
62
63
  CONFIG,
@@ -109,6 +110,7 @@ from zenml.constants import (
109
110
  WORKSPACES,
110
111
  )
111
112
  from zenml.enums import (
113
+ APITokenType,
112
114
  OAuthGrantTypes,
113
115
  StackDeploymentProvider,
114
116
  StoreType,
@@ -200,9 +202,7 @@ from zenml.models import (
200
202
  PipelineRunResponse,
201
203
  PipelineRunUpdate,
202
204
  PipelineUpdate,
203
- RunMetadataFilter,
204
205
  RunMetadataRequest,
205
- RunMetadataResponse,
206
206
  RunTemplateFilter,
207
207
  RunTemplateRequest,
208
208
  RunTemplateResponse,
@@ -991,6 +991,23 @@ class RestZenStore(BaseZenStore):
991
991
  route=ARTIFACT_VERSIONS,
992
992
  )
993
993
 
994
+ def batch_create_artifact_versions(
995
+ self, artifact_versions: List[ArtifactVersionRequest]
996
+ ) -> List[ArtifactVersionResponse]:
997
+ """Creates a batch of artifact versions.
998
+
999
+ Args:
1000
+ artifact_versions: The artifact versions to create.
1001
+
1002
+ Returns:
1003
+ The created artifact versions.
1004
+ """
1005
+ return self._batch_create_resources(
1006
+ resources=artifact_versions,
1007
+ response_model=ArtifactVersionResponse,
1008
+ route=ARTIFACT_VERSIONS,
1009
+ )
1010
+
994
1011
  def get_artifact_version(
995
1012
  self, artifact_version_id: UUID, hydrate: bool = True
996
1013
  ) -> ArtifactVersionResponse:
@@ -1996,9 +2013,7 @@ class RestZenStore(BaseZenStore):
1996
2013
 
1997
2014
  # ----------------------------- Run Metadata -----------------------------
1998
2015
 
1999
- def create_run_metadata(
2000
- self, run_metadata: RunMetadataRequest
2001
- ) -> List[RunMetadataResponse]:
2016
+ def create_run_metadata(self, run_metadata: RunMetadataRequest) -> None:
2002
2017
  """Creates run metadata.
2003
2018
 
2004
2019
  Args:
@@ -2008,55 +2023,8 @@ class RestZenStore(BaseZenStore):
2008
2023
  The created run metadata.
2009
2024
  """
2010
2025
  route = f"{WORKSPACES}/{str(run_metadata.workspace)}{RUN_METADATA}"
2011
- response_body = self.post(f"{route}", body=run_metadata)
2012
- result: List[RunMetadataResponse] = []
2013
- if isinstance(response_body, list):
2014
- for metadata in response_body or []:
2015
- result.append(RunMetadataResponse.model_validate(metadata))
2016
- return result
2017
-
2018
- def get_run_metadata(
2019
- self, run_metadata_id: UUID, hydrate: bool = True
2020
- ) -> RunMetadataResponse:
2021
- """Gets run metadata with the given ID.
2022
-
2023
- Args:
2024
- run_metadata_id: The ID of the run metadata to get.
2025
- hydrate: Flag deciding whether to hydrate the output model(s)
2026
- by including metadata fields in the response.
2027
-
2028
- Returns:
2029
- The run metadata.
2030
- """
2031
- return self._get_resource(
2032
- resource_id=run_metadata_id,
2033
- route=RUN_METADATA,
2034
- response_model=RunMetadataResponse,
2035
- params={"hydrate": hydrate},
2036
- )
2037
-
2038
- def list_run_metadata(
2039
- self,
2040
- run_metadata_filter_model: RunMetadataFilter,
2041
- hydrate: bool = False,
2042
- ) -> Page[RunMetadataResponse]:
2043
- """List run metadata.
2044
-
2045
- Args:
2046
- run_metadata_filter_model: All filter parameters including
2047
- pagination params.
2048
- hydrate: Flag deciding whether to hydrate the output model(s)
2049
- by including metadata fields in the response.
2050
-
2051
- Returns:
2052
- The run metadata.
2053
- """
2054
- return self._list_paginated_resources(
2055
- route=RUN_METADATA,
2056
- response_model=RunMetadataResponse,
2057
- filter_model=run_metadata_filter_model,
2058
- params={"hydrate": hydrate},
2059
- )
2026
+ self.post(f"{route}", body=run_metadata)
2027
+ return None
2060
2028
 
2061
2029
  # ----------------------------- Schedules -----------------------------
2062
2030
 
@@ -3905,17 +3873,16 @@ class RestZenStore(BaseZenStore):
3905
3873
 
3906
3874
  def get_api_token(
3907
3875
  self,
3908
- pipeline_id: Optional[UUID] = None,
3909
3876
  schedule_id: Optional[UUID] = None,
3910
- expires_minutes: Optional[int] = None,
3877
+ pipeline_run_id: Optional[UUID] = None,
3878
+ step_run_id: Optional[UUID] = None,
3911
3879
  ) -> str:
3912
3880
  """Get an API token for a workload.
3913
3881
 
3914
3882
  Args:
3915
- pipeline_id: The ID of the pipeline to get a token for.
3916
3883
  schedule_id: The ID of the schedule to get a token for.
3917
- expires_minutes: The number of minutes for which the token should
3918
- be valid. If not provided, the token will be valid indefinitely.
3884
+ pipeline_run_id: The ID of the pipeline run to get a token for.
3885
+ step_run_id: The ID of the step run to get a token for.
3919
3886
 
3920
3887
  Returns:
3921
3888
  The API token.
@@ -3923,13 +3890,16 @@ class RestZenStore(BaseZenStore):
3923
3890
  Raises:
3924
3891
  ValueError: if the server response is not valid.
3925
3892
  """
3926
- params: Dict[str, Any] = {}
3927
- if pipeline_id:
3928
- params["pipeline_id"] = pipeline_id
3893
+ params: Dict[str, Any] = {
3894
+ # Python clients may only request workload tokens.
3895
+ "token_type": APITokenType.WORKLOAD.value,
3896
+ }
3929
3897
  if schedule_id:
3930
3898
  params["schedule_id"] = schedule_id
3931
- if expires_minutes:
3932
- params["expires_minutes"] = expires_minutes
3899
+ if pipeline_run_id:
3900
+ params["pipeline_run_id"] = pipeline_run_id
3901
+ if step_run_id:
3902
+ params["step_run_id"] = step_run_id
3933
3903
  response_body = self.get(API_TOKEN, params=params)
3934
3904
  if not isinstance(response_body, str):
3935
3905
  raise ValueError(
@@ -4518,6 +4488,40 @@ class RestZenStore(BaseZenStore):
4518
4488
 
4519
4489
  return response_model.model_validate(response_body)
4520
4490
 
4491
+ def _batch_create_resources(
4492
+ self,
4493
+ resources: List[AnyRequest],
4494
+ response_model: Type[AnyResponse],
4495
+ route: str,
4496
+ params: Optional[Dict[str, Any]] = None,
4497
+ ) -> List[AnyResponse]:
4498
+ """Create a new batch of resources.
4499
+
4500
+ Args:
4501
+ resources: The resources to create.
4502
+ response_model: The response model of an individual resource.
4503
+ route: The resource REST route to use.
4504
+ params: Optional query parameters to pass to the endpoint.
4505
+
4506
+ Returns:
4507
+ List of response models.
4508
+ """
4509
+ json_data = [
4510
+ resource.model_dump(mode="json") for resource in resources
4511
+ ]
4512
+ response = self._request(
4513
+ "POST",
4514
+ self.url + API + VERSION_1 + route + BATCH,
4515
+ json=json_data,
4516
+ params=params,
4517
+ )
4518
+ assert isinstance(response, list)
4519
+
4520
+ return [
4521
+ response_model.model_validate(model_data)
4522
+ for model_data in response
4523
+ ]
4524
+
4521
4525
  def _create_workspace_scoped_resource(
4522
4526
  self,
4523
4527
  resource: AnyWorkspaceScopedRequest,
@@ -13,6 +13,7 @@
13
13
  # permissions and limitations under the License.
14
14
  """SQLModel implementation of artifact table."""
15
15
 
16
+ import json
16
17
  from datetime import datetime
17
18
  from typing import TYPE_CHECKING, Any, List, Optional
18
19
  from uuid import UUID
@@ -23,6 +24,7 @@ from sqlmodel import Field, Relationship
23
24
 
24
25
  from zenml.config.source import Source
25
26
  from zenml.enums import (
27
+ ArtifactSaveType,
26
28
  ArtifactType,
27
29
  ExecutionStatus,
28
30
  MetadataResourceTypes,
@@ -196,6 +198,7 @@ class ArtifactVersionSchema(BaseSchema, table=True):
196
198
  overlaps="tags",
197
199
  ),
198
200
  )
201
+ save_type: str = Field(sa_column=Column(TEXT, nullable=False))
199
202
 
200
203
  # Foreign keys
201
204
  artifact_id: UUID = build_foreign_key_field(
@@ -300,6 +303,7 @@ class ArtifactVersionSchema(BaseSchema, table=True):
300
303
  uri=artifact_version_request.uri,
301
304
  materializer=artifact_version_request.materializer.model_dump_json(),
302
305
  data_type=artifact_version_request.data_type.model_dump_json(),
306
+ save_type=artifact_version_request.save_type.value,
303
307
  )
304
308
 
305
309
  def to_model(
@@ -360,6 +364,7 @@ class ArtifactVersionSchema(BaseSchema, table=True):
360
364
  updated=self.updated,
361
365
  tags=[t.tag.to_model() for t in self.tags],
362
366
  producer_pipeline_run_id=producer_pipeline_run_id,
367
+ save_type=ArtifactSaveType(self.save_type),
363
368
  artifact_store_id=self.artifact_store_id,
364
369
  )
365
370
 
@@ -370,7 +375,9 @@ class ArtifactVersionSchema(BaseSchema, table=True):
370
375
  workspace=self.workspace.to_model(),
371
376
  producer_step_run_id=producer_step_run_id,
372
377
  visualizations=[v.to_model() for v in self.visualizations],
373
- run_metadata={m.key: m.to_model() for m in self.run_metadata},
378
+ run_metadata={
379
+ m.key: json.loads(m.value) for m in self.run_metadata
380
+ },
374
381
  )
375
382
 
376
383
  resources = None
@@ -13,12 +13,13 @@
13
13
  # permissions and limitations under the License.
14
14
  """SQLModel implementation of model tables."""
15
15
 
16
+ import json
16
17
  from datetime import datetime
17
18
  from typing import TYPE_CHECKING, Any, Dict, List, Optional, cast
18
19
  from uuid import UUID
19
20
 
20
21
  from pydantic import ConfigDict
21
- from sqlalchemy import BOOLEAN, INTEGER, TEXT, Column
22
+ from sqlalchemy import BOOLEAN, INTEGER, TEXT, Column, UniqueConstraint
22
23
  from sqlmodel import Field, Relationship
23
24
 
24
25
  from zenml.enums import MetadataResourceTypes, TaggableResourceTypes
@@ -61,6 +62,13 @@ class ModelSchema(NamedSchema, table=True):
61
62
  """SQL Model for model."""
62
63
 
63
64
  __tablename__ = "model"
65
+ __table_args__ = (
66
+ UniqueConstraint(
67
+ "name",
68
+ "workspace_id",
69
+ name="unique_model_name_in_workspace",
70
+ ),
71
+ )
64
72
 
65
73
  workspace_id: UUID = build_foreign_key_field(
66
74
  source=__tablename__,
@@ -219,6 +227,23 @@ class ModelVersionSchema(NamedSchema, table=True):
219
227
  """SQL Model for model version."""
220
228
 
221
229
  __tablename__ = MODEL_VERSION_TABLENAME
230
+ __table_args__ = (
231
+ # We need two unique constraints here:
232
+ # - The first to ensure that each model version for a
233
+ # model has a unique version number
234
+ # - The second one to ensure that explicit names given by
235
+ # users are unique
236
+ UniqueConstraint(
237
+ "number",
238
+ "model_id",
239
+ name="unique_version_number_for_model_id",
240
+ ),
241
+ UniqueConstraint(
242
+ "name",
243
+ "model_id",
244
+ name="unique_version_for_model_id",
245
+ ),
246
+ )
222
247
 
223
248
  workspace_id: UUID = build_foreign_key_field(
224
249
  source=__tablename__,
@@ -379,8 +404,7 @@ class ModelVersionSchema(NamedSchema, table=True):
379
404
  workspace=self.workspace.to_model(),
380
405
  description=self.description,
381
406
  run_metadata={
382
- rm.key: rm.to_model(include_metadata=True)
383
- for rm in self.run_metadata
407
+ rm.key: json.loads(rm.value) for rm in self.run_metadata
384
408
  },
385
409
  )
386
410
 
@@ -72,6 +72,11 @@ class PipelineRunSchema(NamedSchema, table=True):
72
72
  "orchestrator_run_id",
73
73
  name="unique_orchestrator_run_id_for_deployment_id",
74
74
  ),
75
+ UniqueConstraint(
76
+ "name",
77
+ "workspace_id",
78
+ name="unique_run_name_in_workspace",
79
+ ),
75
80
  )
76
81
 
77
82
  # Fields
@@ -271,7 +276,7 @@ class PipelineRunSchema(NamedSchema, table=True):
271
276
  )
272
277
 
273
278
  run_metadata = {
274
- metadata_schema.key: metadata_schema.to_model()
279
+ metadata_schema.key: json.loads(metadata_schema.value)
275
280
  for metadata_schema in self.run_metadata
276
281
  }
277
282
 
@@ -17,7 +17,7 @@ from datetime import datetime
17
17
  from typing import TYPE_CHECKING, Any, List, Optional
18
18
  from uuid import UUID
19
19
 
20
- from sqlalchemy import TEXT, Column
20
+ from sqlalchemy import TEXT, Column, UniqueConstraint
21
21
  from sqlmodel import Field, Relationship
22
22
 
23
23
  from zenml.enums import TaggableResourceTypes
@@ -50,7 +50,13 @@ class PipelineSchema(NamedSchema, table=True):
50
50
  """SQL Model for pipelines."""
51
51
 
52
52
  __tablename__ = "pipeline"
53
-
53
+ __table_args__ = (
54
+ UniqueConstraint(
55
+ "name",
56
+ "workspace_id",
57
+ name="unique_pipeline_name_in_workspace",
58
+ ),
59
+ )
54
60
  # Fields
55
61
  description: Optional[str] = Field(sa_column=Column(TEXT, nullable=True))
56
62