zenml-nightly 0.62.0.dev20240729__py3-none-any.whl → 0.63.0.dev20240731__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (157) hide show
  1. README.md +1 -1
  2. RELEASE_NOTES.md +41 -0
  3. zenml/VERSION +1 -1
  4. zenml/actions/pipeline_run/pipeline_run_action.py +19 -17
  5. zenml/analytics/enums.py +4 -0
  6. zenml/cli/__init__.py +28 -15
  7. zenml/cli/base.py +1 -1
  8. zenml/cli/pipeline.py +54 -61
  9. zenml/cli/stack.py +6 -8
  10. zenml/client.py +232 -99
  11. zenml/config/compiler.py +14 -22
  12. zenml/config/pipeline_run_configuration.py +3 -0
  13. zenml/config/server_config.py +3 -0
  14. zenml/config/source.py +2 -1
  15. zenml/constants.py +2 -0
  16. zenml/enums.py +3 -0
  17. zenml/integrations/aws/orchestrators/sagemaker_orchestrator.py +13 -4
  18. zenml/integrations/databricks/flavors/databricks_orchestrator_flavor.py +11 -2
  19. zenml/integrations/databricks/orchestrators/databricks_orchestrator.py +19 -13
  20. zenml/models/__init__.py +26 -10
  21. zenml/models/v2/base/filter.py +32 -0
  22. zenml/models/v2/core/pipeline.py +73 -89
  23. zenml/models/v2/core/pipeline_build.py +15 -11
  24. zenml/models/v2/core/pipeline_deployment.py +56 -0
  25. zenml/models/v2/core/pipeline_run.py +52 -1
  26. zenml/models/v2/core/run_template.py +393 -0
  27. zenml/models/v2/misc/stack_deployment.py +5 -0
  28. zenml/new/pipelines/build_utils.py +34 -58
  29. zenml/new/pipelines/pipeline.py +17 -76
  30. zenml/new/pipelines/run_utils.py +12 -0
  31. zenml/post_execution/pipeline.py +1 -4
  32. zenml/service_connectors/service_connector_utils.py +4 -2
  33. zenml/stack_deployments/aws_stack_deployment.py +6 -5
  34. zenml/stack_deployments/azure_stack_deployment.py +118 -11
  35. zenml/stack_deployments/gcp_stack_deployment.py +12 -5
  36. zenml/stack_deployments/stack_deployment.py +6 -5
  37. zenml/steps/utils.py +0 -4
  38. zenml/utils/package_utils.py +39 -0
  39. zenml/zen_server/dashboard/assets/{404-B_YdvmwS.js → 404-CI13wQp4.js} +1 -1
  40. zenml/zen_server/dashboard/assets/{@reactflow-l_1hUr1S.js → @reactflow-DIYUhKYX.js} +1 -1
  41. zenml/zen_server/dashboard/assets/{@tanstack-DYiOyJUL.js → @tanstack-k96lU_C-.js} +4 -4
  42. zenml/zen_server/dashboard/assets/{AwarenessChannel-CFg5iX4Z.js → AwarenessChannel-BNg5uWgI.js} +1 -1
  43. zenml/zen_server/dashboard/assets/{CodeSnippet-Dvkx_82E.js → CodeSnippet-Cyp7f4dM.js} +2 -2
  44. zenml/zen_server/dashboard/assets/CollapsibleCard-Cu_A9W57.js +1 -0
  45. zenml/zen_server/dashboard/assets/{Commands-DoN1xrEq.js → Commands-DmQwTXjj.js} +1 -1
  46. zenml/zen_server/dashboard/assets/{CopyButton-Cr7xYEPb.js → CopyButton-B3sWVJ4Z.js} +1 -1
  47. zenml/zen_server/dashboard/assets/{CsvVizualization-Ck-nZ43m.js → CsvVizualization-BvqItd-O.js} +1 -1
  48. zenml/zen_server/dashboard/assets/{Error-kLtljEOM.js → Error-DbXCTGua.js} +1 -1
  49. zenml/zen_server/dashboard/assets/{ExecutionStatus-DguLLgTK.js → ExecutionStatus-9zM7eaLh.js} +1 -1
  50. zenml/zen_server/dashboard/assets/{Helpbox-BXUMP21n.js → Helpbox-BIiNc-uH.js} +1 -1
  51. zenml/zen_server/dashboard/assets/{Infobox-DSt0O-dm.js → Infobox-iv1Nu1A0.js} +1 -1
  52. zenml/zen_server/dashboard/assets/{InlineAvatar-xsrsIGE-.js → InlineAvatar-BvBtO2Dp.js} +1 -1
  53. zenml/zen_server/dashboard/assets/ProviderRadio-pSAvrGRS.js +1 -0
  54. zenml/zen_server/dashboard/assets/SearchField-CXoBknpt.js +1 -0
  55. zenml/zen_server/dashboard/assets/{SetPassword-BXGTWiwj.js → SetPassword-BOxpgh6N.js} +1 -1
  56. zenml/zen_server/dashboard/assets/{SuccessStep-DZC60t0x.js → SuccessStep-CTSKN2lp.js} +1 -1
  57. zenml/zen_server/dashboard/assets/Tick-Bnr2TpW6.js +1 -0
  58. zenml/zen_server/dashboard/assets/{UpdatePasswordSchemas-DGvwFWO1.js → UpdatePasswordSchemas-BeCeaRW5.js} +1 -1
  59. zenml/zen_server/dashboard/assets/chevron-down-D_ZlKMqH.js +1 -0
  60. zenml/zen_server/dashboard/assets/{cloud-only-C_yFCAkP.js → cloud-only-qelmY92E.js} +1 -1
  61. zenml/zen_server/dashboard/assets/components-DWe4cTjS.js +1 -0
  62. zenml/zen_server/dashboard/assets/dots-horizontal-BObFzD5l.js +1 -0
  63. zenml/zen_server/dashboard/assets/{index-BczVOqUf.js → index-KsTz2dHG.js} +5 -5
  64. zenml/zen_server/dashboard/assets/index-vfjX_fJV.css +1 -0
  65. zenml/zen_server/dashboard/assets/index.esm-CbHNSeVw.js +1 -0
  66. zenml/zen_server/dashboard/assets/{login-mutation-CrHrndTI.js → login-mutation-DRpbESS7.js} +1 -1
  67. zenml/zen_server/dashboard/assets/{not-found-DYa4pC-C.js → not-found-Dfx9hfkf.js} +1 -1
  68. zenml/zen_server/dashboard/assets/package-ClbU3KUi.js +1 -0
  69. zenml/zen_server/dashboard/assets/{page-uA5prJGY.js → page-399pVZHU.js} +1 -1
  70. zenml/zen_server/dashboard/assets/{page-1h_sD1jz.js → page-BoFtUD9H.js} +1 -1
  71. zenml/zen_server/dashboard/assets/{page-BDns21Iz.js → page-Btu39x7k.js} +1 -1
  72. zenml/zen_server/dashboard/assets/{page-BnaevhnB.js → page-BxiWdeyg.js} +1 -1
  73. zenml/zen_server/dashboard/assets/{page-1iL8aMqs.js → page-C176KxyB.js} +1 -1
  74. zenml/zen_server/dashboard/assets/page-C6tXXjnK.js +1 -0
  75. zenml/zen_server/dashboard/assets/{page-BkeAAYwp.js → page-CDgZmwxP.js} +1 -1
  76. zenml/zen_server/dashboard/assets/page-CP9obrnG.js +1 -0
  77. zenml/zen_server/dashboard/assets/{page-C6-UGEbH.js → page-CZe9GEBF.js} +1 -1
  78. zenml/zen_server/dashboard/assets/page-CaTOsNNw.js +1 -0
  79. zenml/zen_server/dashboard/assets/{page-CCNRIt_f.js → page-Cjn97HMv.js} +1 -1
  80. zenml/zen_server/dashboard/assets/page-CmXmB_5i.js +1 -0
  81. zenml/zen_server/dashboard/assets/page-CvGAOfad.js +1 -0
  82. zenml/zen_server/dashboard/assets/page-CzucfYPo.js +2 -0
  83. zenml/zen_server/dashboard/assets/{page-Bi-wtWiO.js → page-D0bbc-qr.js} +1 -1
  84. zenml/zen_server/dashboard/assets/page-DLEtD2ex.js +1 -0
  85. zenml/zen_server/dashboard/assets/{page-BhgCDInH.js → page-DVPxY5fT.js} +1 -1
  86. zenml/zen_server/dashboard/assets/{page-BkuQDIf-.js → page-DYBNGxJt.js} +1 -1
  87. zenml/zen_server/dashboard/assets/{page-8a4UMKXZ.js → page-DtpwnNXq.js} +1 -1
  88. zenml/zen_server/dashboard/assets/{page-B6h3iaHJ.js → page-DupV0aBd.js} +1 -1
  89. zenml/zen_server/dashboard/assets/page-EweAR81y.js +1 -0
  90. zenml/zen_server/dashboard/assets/{page-MFQyIJd3.js → page-f3jBVI5Z.js} +1 -1
  91. zenml/zen_server/dashboard/assets/{page-2grKx_MY.js → page-p2hLJdS2.js} +1 -1
  92. zenml/zen_server/dashboard/assets/page-w-YaL77M.js +9 -0
  93. zenml/zen_server/dashboard/assets/persist-BReKApOc.js +14 -0
  94. zenml/zen_server/dashboard/assets/plus-DOeLmm7C.js +1 -0
  95. zenml/zen_server/dashboard/assets/{stack-detail-query-Cficsl6d.js → stack-detail-query-Ck7j7BP_.js} +1 -1
  96. zenml/zen_server/dashboard/assets/{update-server-settings-mutation-7d8xi1tS.js → update-server-settings-mutation-f3ZT7psb.js} +1 -1
  97. zenml/zen_server/dashboard/assets/{url-D7mAQGUM.js → url-rGEp5Umh.js} +1 -1
  98. zenml/zen_server/dashboard/assets/{zod-BhoGpZ63.js → zod-BtSyGx4C.js} +1 -1
  99. zenml/zen_server/dashboard/index.html +5 -5
  100. zenml/zen_server/dashboard_legacy/asset-manifest.json +4 -4
  101. zenml/zen_server/dashboard_legacy/index.html +1 -1
  102. zenml/zen_server/dashboard_legacy/{precache-manifest.12246c7548e71e2c4438e496360de80c.js → precache-manifest.2fa6e528a6e7447caaf35dadfe7514bb.js} +4 -4
  103. zenml/zen_server/dashboard_legacy/service-worker.js +1 -1
  104. zenml/zen_server/dashboard_legacy/static/js/{main.3b27024b.chunk.js → main.4aab7e98.chunk.js} +2 -2
  105. zenml/zen_server/dashboard_legacy/static/js/{main.3b27024b.chunk.js.map → main.4aab7e98.chunk.js.map} +1 -1
  106. zenml/zen_server/deploy/helm/Chart.yaml +1 -1
  107. zenml/zen_server/deploy/helm/README.md +2 -2
  108. zenml/zen_server/rbac/models.py +1 -0
  109. zenml/zen_server/rbac/utils.py +4 -0
  110. zenml/zen_server/routers/pipeline_builds_endpoints.py +2 -66
  111. zenml/zen_server/routers/pipeline_deployments_endpoints.py +2 -53
  112. zenml/zen_server/routers/pipelines_endpoints.py +1 -74
  113. zenml/zen_server/routers/run_templates_endpoints.py +212 -0
  114. zenml/zen_server/routers/workspaces_endpoints.py +79 -0
  115. zenml/zen_server/{pipeline_deployment → template_execution}/runner_entrypoint_configuration.py +1 -8
  116. zenml/zen_server/{pipeline_deployment → template_execution}/utils.py +214 -92
  117. zenml/zen_server/utils.py +2 -2
  118. zenml/zen_server/zen_server_api.py +2 -1
  119. zenml/zen_stores/migrations/versions/0.63.0_release.py +23 -0
  120. zenml/zen_stores/migrations/versions/7d1919bb1ef0_add_run_templates.py +100 -0
  121. zenml/zen_stores/migrations/versions/b59aa68fdb1f_simplify_pipelines.py +139 -0
  122. zenml/zen_stores/rest_zen_store.py +107 -36
  123. zenml/zen_stores/schemas/__init__.py +2 -0
  124. zenml/zen_stores/schemas/pipeline_build_schemas.py +3 -3
  125. zenml/zen_stores/schemas/pipeline_deployment_schemas.py +29 -2
  126. zenml/zen_stores/schemas/pipeline_run_schemas.py +26 -3
  127. zenml/zen_stores/schemas/pipeline_schemas.py +29 -30
  128. zenml/zen_stores/schemas/run_template_schemas.py +264 -0
  129. zenml/zen_stores/schemas/step_run_schemas.py +11 -4
  130. zenml/zen_stores/sql_zen_store.py +364 -150
  131. zenml/zen_stores/template_utils.py +261 -0
  132. zenml/zen_stores/zen_store_interface.py +93 -20
  133. {zenml_nightly-0.62.0.dev20240729.dist-info → zenml_nightly-0.63.0.dev20240731.dist-info}/METADATA +2 -2
  134. {zenml_nightly-0.62.0.dev20240729.dist-info → zenml_nightly-0.63.0.dev20240731.dist-info}/RECORD +139 -129
  135. zenml/models/v2/core/pipeline_namespace.py +0 -113
  136. zenml/new/pipelines/deserialization_utils.py +0 -292
  137. zenml/zen_server/dashboard/assets/CollapsibleCard-opiuBHHc.js +0 -1
  138. zenml/zen_server/dashboard/assets/Pagination-C6X-mifw.js +0 -1
  139. zenml/zen_server/dashboard/assets/index-EpMIKgrI.css +0 -1
  140. zenml/zen_server/dashboard/assets/index-rK_Wuy2W.js +0 -1
  141. zenml/zen_server/dashboard/assets/index.esm-Corw4lXQ.js +0 -1
  142. zenml/zen_server/dashboard/assets/package-B3fWP-Dh.js +0 -1
  143. zenml/zen_server/dashboard/assets/page-5NCOHOsy.js +0 -1
  144. zenml/zen_server/dashboard/assets/page-Bq0YxkLV.js +0 -1
  145. zenml/zen_server/dashboard/assets/page-Bs2F4eoD.js +0 -2
  146. zenml/zen_server/dashboard/assets/page-CHNxpz3n.js +0 -1
  147. zenml/zen_server/dashboard/assets/page-DgorQFqi.js +0 -1
  148. zenml/zen_server/dashboard/assets/page-K8ebxVIs.js +0 -1
  149. zenml/zen_server/dashboard/assets/page-TgCF0P_U.js +0 -1
  150. zenml/zen_server/dashboard/assets/page-ZnCEe-eK.js +0 -9
  151. zenml/zen_server/dashboard/assets/persist-D7HJNBWx.js +0 -1
  152. zenml/zen_server/dashboard/assets/plus-C8WOyCzt.js +0 -1
  153. /zenml/zen_server/{pipeline_deployment → template_execution}/__init__.py +0 -0
  154. /zenml/zen_server/{pipeline_deployment → template_execution}/workload_manager_interface.py +0 -0
  155. {zenml_nightly-0.62.0.dev20240729.dist-info → zenml_nightly-0.63.0.dev20240731.dist-info}/LICENSE +0 -0
  156. {zenml_nightly-0.62.0.dev20240729.dist-info → zenml_nightly-0.63.0.dev20240731.dist-info}/WHEEL +0 -0
  157. {zenml_nightly-0.62.0.dev20240729.dist-info → zenml_nightly-0.63.0.dev20240731.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,139 @@
1
+ """Simplify pipelines [b59aa68fdb1f].
2
+
3
+ Revision ID: b59aa68fdb1f
4
+ Revises: 0.62.0
5
+ Create Date: 2024-07-04 14:00:32.830722
6
+
7
+ """
8
+
9
+ from typing import Dict, Optional
10
+
11
+ import sqlalchemy as sa
12
+ import sqlmodel
13
+ from alembic import op
14
+ from sqlalchemy.dialects import mysql
15
+
16
+ # revision identifiers, used by Alembic.
17
+ revision = "b59aa68fdb1f"
18
+ down_revision = "0.62.0"
19
+ branch_labels = None
20
+ depends_on = None
21
+
22
+
23
+ def upgrade() -> None:
24
+ """Upgrade database schema and/or data, creating a new revision."""
25
+ # ### commands auto generated by Alembic - please adjust! ###
26
+
27
+ with op.batch_alter_table("pipeline_deployment", schema=None) as batch_op:
28
+ batch_op.add_column(
29
+ sa.Column(
30
+ "pipeline_version_hash",
31
+ sqlmodel.sql.sqltypes.AutoString(),
32
+ nullable=True,
33
+ )
34
+ )
35
+ batch_op.add_column(
36
+ sa.Column(
37
+ "pipeline_spec",
38
+ sa.String(length=16777215).with_variant(
39
+ mysql.MEDIUMTEXT, "mysql"
40
+ ),
41
+ nullable=True,
42
+ )
43
+ )
44
+
45
+ connection = op.get_bind()
46
+ meta = sa.MetaData()
47
+ meta.reflect(
48
+ bind=connection,
49
+ only=(
50
+ "pipeline",
51
+ "pipeline_run",
52
+ "pipeline_deployment",
53
+ "pipeline_build",
54
+ "schedule",
55
+ ),
56
+ )
57
+ pipeline_table = sa.Table("pipeline", meta)
58
+ pipeline_run_table = sa.Table("pipeline_run", meta)
59
+ pipeline_deployment_table = sa.Table("pipeline_deployment", meta)
60
+ pipeline_build_table = sa.Table("pipeline_build", meta)
61
+ schedule_table = sa.Table("schedule", meta)
62
+
63
+ def _migrate_pipeline_columns(
64
+ pipeline_id: str,
65
+ version_hash: Optional[str],
66
+ pipeline_spec: Optional[str],
67
+ ) -> None:
68
+ connection.execute(
69
+ sa.update(pipeline_deployment_table)
70
+ .where(pipeline_deployment_table.c.pipeline_id == pipeline_id)
71
+ .values(
72
+ pipeline_version_hash=version_hash, pipeline_spec=pipeline_spec
73
+ )
74
+ )
75
+
76
+ def _update_pipeline_fks(pipeline_id: str, replacement_id: str) -> None:
77
+ for table in [
78
+ pipeline_run_table,
79
+ pipeline_deployment_table,
80
+ pipeline_build_table,
81
+ schedule_table,
82
+ ]:
83
+ connection.execute(
84
+ sa.update(table)
85
+ .where(table.c.pipeline_id == pipeline_id)
86
+ .values(pipeline_id=replacement_id)
87
+ )
88
+
89
+ all_pipelines = connection.execute(sa.select(pipeline_table)).fetchall()
90
+ replacement_mapping: Dict[str, str] = {}
91
+
92
+ for pipeline in all_pipelines:
93
+ _migrate_pipeline_columns(
94
+ pipeline_id=pipeline.id,
95
+ version_hash=pipeline.version_hash,
96
+ pipeline_spec=pipeline.spec,
97
+ )
98
+
99
+ if replacement_id := replacement_mapping.get(pipeline.name):
100
+ _update_pipeline_fks(
101
+ pipeline_id=pipeline.id, replacement_id=replacement_id
102
+ )
103
+ connection.execute(
104
+ sa.delete(pipeline_table).where(
105
+ pipeline_table.c.id == pipeline.id
106
+ )
107
+ )
108
+ else:
109
+ replacement_mapping[pipeline.name] = pipeline.id
110
+
111
+ with op.batch_alter_table("pipeline", schema=None) as batch_op:
112
+ batch_op.add_column(sa.Column("description", sa.TEXT(), nullable=True))
113
+ batch_op.drop_column("spec")
114
+ batch_op.drop_column("docstring")
115
+ batch_op.drop_column("version_hash")
116
+ batch_op.drop_column("version")
117
+
118
+ # ### end Alembic commands ###
119
+
120
+
121
+ def downgrade() -> None:
122
+ """Downgrade database schema and/or data back to the previous revision."""
123
+ # ### commands auto generated by Alembic - please adjust! ###
124
+ with op.batch_alter_table("pipeline_deployment", schema=None) as batch_op:
125
+ batch_op.drop_column("pipeline_spec")
126
+ batch_op.drop_column("pipeline_version_hash")
127
+
128
+ with op.batch_alter_table("pipeline", schema=None) as batch_op:
129
+ batch_op.add_column(sa.Column("version", sa.VARCHAR(), nullable=False))
130
+ batch_op.add_column(
131
+ sa.Column("version_hash", sa.VARCHAR(), nullable=False)
132
+ )
133
+ batch_op.add_column(sa.Column("docstring", sa.TEXT(), nullable=True))
134
+ batch_op.add_column(
135
+ sa.Column("spec", sa.VARCHAR(length=16777215), nullable=False)
136
+ )
137
+ batch_op.drop_column("description")
138
+
139
+ # ### end Alembic commands ###
@@ -80,6 +80,7 @@ from zenml.constants import (
80
80
  PIPELINE_DEPLOYMENTS,
81
81
  PIPELINES,
82
82
  RUN_METADATA,
83
+ RUN_TEMPLATES,
83
84
  RUNS,
84
85
  SCHEDULES,
85
86
  SECRETS,
@@ -192,6 +193,10 @@ from zenml.models import (
192
193
  RunMetadataFilter,
193
194
  RunMetadataRequest,
194
195
  RunMetadataResponse,
196
+ RunTemplateFilter,
197
+ RunTemplateRequest,
198
+ RunTemplateResponse,
199
+ RunTemplateUpdate,
195
200
  ScheduleFilter,
196
201
  ScheduleRequest,
197
202
  ScheduleResponse,
@@ -1524,35 +1529,6 @@ class RestZenStore(BaseZenStore):
1524
1529
  route=PIPELINE_BUILDS,
1525
1530
  )
1526
1531
 
1527
- def run_build(
1528
- self,
1529
- build_id: UUID,
1530
- run_configuration: Optional[PipelineRunConfiguration] = None,
1531
- ) -> PipelineRunResponse:
1532
- """Run a pipeline from a build.
1533
-
1534
- Args:
1535
- build_id: The ID of the build to run.
1536
- run_configuration: Configuration for the run.
1537
-
1538
- Raises:
1539
- RuntimeError: If the server does not support running a build.
1540
-
1541
- Returns:
1542
- Model of the pipeline run.
1543
- """
1544
- run_configuration = run_configuration or PipelineRunConfiguration()
1545
- try:
1546
- response_body = self.post(
1547
- f"{PIPELINE_BUILDS}/{build_id}/runs", body=run_configuration
1548
- )
1549
- except MethodNotAllowedError as e:
1550
- raise RuntimeError(
1551
- "Running a build is not supported for this server."
1552
- ) from e
1553
-
1554
- return PipelineRunResponse.model_validate(response_body)
1555
-
1556
1532
  # -------------------------- Pipeline Deployments --------------------------
1557
1533
 
1558
1534
  def create_deployment(
@@ -1627,19 +1603,114 @@ class RestZenStore(BaseZenStore):
1627
1603
  route=PIPELINE_DEPLOYMENTS,
1628
1604
  )
1629
1605
 
1630
- def run_deployment(
1606
+ # -------------------- Run templates --------------------
1607
+
1608
+ def create_run_template(
1609
+ self,
1610
+ template: RunTemplateRequest,
1611
+ ) -> RunTemplateResponse:
1612
+ """Create a new run template.
1613
+
1614
+ Args:
1615
+ template: The template to create.
1616
+
1617
+ Returns:
1618
+ The newly created template.
1619
+ """
1620
+ return self._create_workspace_scoped_resource(
1621
+ resource=template,
1622
+ route=RUN_TEMPLATES,
1623
+ response_model=RunTemplateResponse,
1624
+ )
1625
+
1626
+ def get_run_template(
1627
+ self, template_id: UUID, hydrate: bool = True
1628
+ ) -> RunTemplateResponse:
1629
+ """Get a run template with a given ID.
1630
+
1631
+ Args:
1632
+ template_id: ID of the template.
1633
+ hydrate: Flag deciding whether to hydrate the output model(s)
1634
+ by including metadata fields in the response.
1635
+
1636
+ Returns:
1637
+ The template.
1638
+ """
1639
+ return self._get_resource(
1640
+ resource_id=template_id,
1641
+ route=RUN_TEMPLATES,
1642
+ response_model=RunTemplateResponse,
1643
+ params={"hydrate": hydrate},
1644
+ )
1645
+
1646
+ def list_run_templates(
1647
+ self,
1648
+ template_filter_model: RunTemplateFilter,
1649
+ hydrate: bool = False,
1650
+ ) -> Page[RunTemplateResponse]:
1651
+ """List all run templates matching the given filter criteria.
1652
+
1653
+ Args:
1654
+ template_filter_model: All filter parameters including pagination
1655
+ params.
1656
+ hydrate: Flag deciding whether to hydrate the output model(s)
1657
+ by including metadata fields in the response.
1658
+
1659
+ Returns:
1660
+ A list of all templates matching the filter criteria.
1661
+ """
1662
+ return self._list_paginated_resources(
1663
+ route=RUN_TEMPLATES,
1664
+ response_model=RunTemplateResponse,
1665
+ filter_model=template_filter_model,
1666
+ params={"hydrate": hydrate},
1667
+ )
1668
+
1669
+ def update_run_template(
1670
+ self,
1671
+ template_id: UUID,
1672
+ template_update: RunTemplateUpdate,
1673
+ ) -> RunTemplateResponse:
1674
+ """Updates a run template.
1675
+
1676
+ Args:
1677
+ template_id: The ID of the template to update.
1678
+ template_update: The update to apply.
1679
+
1680
+ Returns:
1681
+ The updated template.
1682
+ """
1683
+ return self._update_resource(
1684
+ resource_id=template_id,
1685
+ resource_update=template_update,
1686
+ route=RUN_TEMPLATES,
1687
+ response_model=RunTemplateResponse,
1688
+ )
1689
+
1690
+ def delete_run_template(self, template_id: UUID) -> None:
1691
+ """Delete a run template.
1692
+
1693
+ Args:
1694
+ template_id: The ID of the template to delete.
1695
+ """
1696
+ self._delete_resource(
1697
+ resource_id=template_id,
1698
+ route=RUN_TEMPLATES,
1699
+ )
1700
+
1701
+ def run_template(
1631
1702
  self,
1632
- deployment_id: UUID,
1703
+ template_id: UUID,
1633
1704
  run_configuration: Optional[PipelineRunConfiguration] = None,
1634
1705
  ) -> PipelineRunResponse:
1635
- """Run a pipeline from a deployment.
1706
+ """Run a template.
1636
1707
 
1637
1708
  Args:
1638
- deployment_id: The ID of the deployment to run.
1709
+ template_id: The ID of the template to run.
1639
1710
  run_configuration: Configuration for the run.
1640
1711
 
1641
1712
  Raises:
1642
- RuntimeError: If the server does not support running a deployment.
1713
+ RuntimeError: If the server does not support running a template.
1643
1714
 
1644
1715
  Returns:
1645
1716
  Model of the pipeline run.
@@ -1648,12 +1719,12 @@ class RestZenStore(BaseZenStore):
1648
1719
 
1649
1720
  try:
1650
1721
  response_body = self.post(
1651
- f"{PIPELINE_DEPLOYMENTS}/{deployment_id}/runs",
1722
+ f"{RUN_TEMPLATES}/{template_id}/runs",
1652
1723
  body=run_configuration,
1653
1724
  )
1654
1725
  except MethodNotAllowedError as e:
1655
1726
  raise RuntimeError(
1656
- "Running a deployment is not supported for this server."
1727
+ "Running a template is not supported for this server."
1657
1728
  ) from e
1658
1729
 
1659
1730
  return PipelineRunResponse.model_validate(response_body)
@@ -69,6 +69,7 @@ from zenml.zen_stores.schemas.model_schemas import (
69
69
  ModelVersionArtifactSchema,
70
70
  ModelVersionPipelineRunSchema,
71
71
  )
72
+ from zenml.zen_stores.schemas.run_template_schemas import RunTemplateSchema
72
73
  from zenml.zen_stores.schemas.server_settings_schemas import ServerSettingsSchema
73
74
 
74
75
  __all__ = [
@@ -102,6 +103,7 @@ __all__ = [
102
103
  "StepRunOutputArtifactSchema",
103
104
  "StepRunParentsSchema",
104
105
  "StepRunSchema",
106
+ "RunTemplateSchema",
105
107
  "TagSchema",
106
108
  "TagResourceSchema",
107
109
  "TriggerSchema",
@@ -84,7 +84,6 @@ class PipelineBuildSchema(BaseSchema, table=True):
84
84
  back_populates="builds"
85
85
  )
86
86
 
87
- template_deployment_id: Optional[UUID] = None
88
87
  images: str = Field(
89
88
  sa_column=Column(
90
89
  String(length=MEDIUMTEXT_MAX_LENGTH).with_variant(
@@ -100,6 +99,7 @@ class PipelineBuildSchema(BaseSchema, table=True):
100
99
  zenml_version: Optional[str]
101
100
  python_version: Optional[str]
102
101
  checksum: Optional[str]
102
+ stack_checksum: Optional[str]
103
103
 
104
104
  @classmethod
105
105
  def from_request(
@@ -124,7 +124,7 @@ class PipelineBuildSchema(BaseSchema, table=True):
124
124
  zenml_version=request.zenml_version,
125
125
  python_version=request.python_version,
126
126
  checksum=request.checksum,
127
- template_deployment_id=request.template_deployment_id,
127
+ stack_checksum=request.stack_checksum,
128
128
  )
129
129
 
130
130
  def to_model(
@@ -159,9 +159,9 @@ class PipelineBuildSchema(BaseSchema, table=True):
159
159
  zenml_version=self.zenml_version,
160
160
  python_version=self.python_version,
161
161
  checksum=self.checksum,
162
+ stack_checksum=self.stack_checksum,
162
163
  is_local=self.is_local,
163
164
  contains_code=self.contains_code,
164
- template_deployment_id=self.template_deployment_id,
165
165
  )
166
166
  return PipelineBuildResponse(
167
167
  id=self.id,
@@ -22,6 +22,7 @@ from sqlalchemy.dialects.mysql import MEDIUMTEXT
22
22
  from sqlmodel import Field, Relationship
23
23
 
24
24
  from zenml.config.pipeline_configurations import PipelineConfiguration
25
+ from zenml.config.pipeline_spec import PipelineSpec
25
26
  from zenml.config.step_configurations import Step
26
27
  from zenml.constants import MEDIUMTEXT_MAX_LENGTH
27
28
  from zenml.models import (
@@ -74,6 +75,15 @@ class PipelineDeploymentSchema(BaseSchema, table=True):
74
75
  run_name_template: str = Field(nullable=False)
75
76
  client_version: str = Field(nullable=True)
76
77
  server_version: str = Field(nullable=True)
78
+ pipeline_version_hash: Optional[str] = Field(nullable=True, default=None)
79
+ pipeline_spec: Optional[str] = Field(
80
+ sa_column=Column(
81
+ String(length=MEDIUMTEXT_MAX_LENGTH).with_variant(
82
+ MEDIUMTEXT, "mysql"
83
+ ),
84
+ nullable=True,
85
+ )
86
+ )
77
87
 
78
88
  # Foreign keys
79
89
  user_id: Optional[UUID] = build_foreign_key_field(
@@ -132,12 +142,15 @@ class PipelineDeploymentSchema(BaseSchema, table=True):
132
142
  ondelete="SET NULL",
133
143
  nullable=True,
134
144
  )
145
+ # This is not a foreign key to remove a cycle which messes with our DB
146
+ # backup process
147
+ template_id: Optional[UUID] = None
135
148
 
136
149
  # SQLModel Relationships
137
150
  user: Optional["UserSchema"] = Relationship()
138
151
  workspace: "WorkspaceSchema" = Relationship()
139
- stack: "StackSchema" = Relationship()
140
- pipeline: "PipelineSchema" = Relationship()
152
+ stack: Optional["StackSchema"] = Relationship()
153
+ pipeline: Optional["PipelineSchema"] = Relationship()
141
154
  schedule: Optional["ScheduleSchema"] = Relationship()
142
155
  build: Optional["PipelineBuildSchema"] = Relationship(
143
156
  sa_relationship_kwargs={
@@ -176,6 +189,7 @@ class PipelineDeploymentSchema(BaseSchema, table=True):
176
189
  build_id=request.build,
177
190
  user_id=request.user,
178
191
  schedule_id=request.schedule,
192
+ template_id=request.template,
179
193
  code_reference_id=code_reference_id,
180
194
  run_name_template=request.run_name_template,
181
195
  pipeline_configuration=request.pipeline_configuration.model_dump_json(),
@@ -187,6 +201,12 @@ class PipelineDeploymentSchema(BaseSchema, table=True):
187
201
  client_environment=json.dumps(request.client_environment),
188
202
  client_version=request.client_version,
189
203
  server_version=request.server_version,
204
+ pipeline_version_hash=request.pipeline_version_hash,
205
+ pipeline_spec=json.dumps(
206
+ request.pipeline_spec.model_dump(mode="json"), sort_keys=True
207
+ )
208
+ if request.pipeline_spec
209
+ else None,
190
210
  )
191
211
 
192
212
  def to_model(
@@ -235,6 +255,13 @@ class PipelineDeploymentSchema(BaseSchema, table=True):
235
255
  code_reference=self.code_reference.to_model()
236
256
  if self.code_reference
237
257
  else None,
258
+ pipeline_version_hash=self.pipeline_version_hash,
259
+ pipeline_spec=PipelineSpec.model_validate_json(
260
+ self.pipeline_spec
261
+ )
262
+ if self.pipeline_spec
263
+ else None,
264
+ template_id=self.template_id,
238
265
  )
239
266
  return PipelineDeploymentResponse(
240
267
  id=self.id,
@@ -22,7 +22,11 @@ from sqlalchemy import UniqueConstraint
22
22
  from sqlmodel import TEXT, Column, Field, Relationship
23
23
 
24
24
  from zenml.config.pipeline_configurations import PipelineConfiguration
25
- from zenml.enums import ExecutionStatus, MetadataResourceTypes
25
+ from zenml.enums import (
26
+ ExecutionStatus,
27
+ MetadataResourceTypes,
28
+ TaggableResourceTypes,
29
+ )
26
30
  from zenml.models import (
27
31
  PipelineRunRequest,
28
32
  PipelineRunResponse,
@@ -52,6 +56,7 @@ if TYPE_CHECKING:
52
56
  from zenml.zen_stores.schemas.run_metadata_schemas import RunMetadataSchema
53
57
  from zenml.zen_stores.schemas.service_schemas import ServiceSchema
54
58
  from zenml.zen_stores.schemas.step_run_schemas import StepRunSchema
59
+ from zenml.zen_stores.schemas.tag_schemas import TagResourceSchema
55
60
 
56
61
 
57
62
  class PipelineRunSchema(NamedSchema, table=True):
@@ -187,6 +192,13 @@ class PipelineRunSchema(NamedSchema, table=True):
187
192
  services: List["ServiceSchema"] = Relationship(
188
193
  back_populates="pipeline_run",
189
194
  )
195
+ tags: List["TagResourceSchema"] = Relationship(
196
+ sa_relationship_kwargs=dict(
197
+ primaryjoin=f"and_(TagResourceSchema.resource_type=='{TaggableResourceTypes.PIPELINE_RUN.value}', foreign(TagResourceSchema.resource_id)==PipelineRunSchema.id)",
198
+ cascade="delete",
199
+ overlaps="tags",
200
+ ),
201
+ )
190
202
 
191
203
  @classmethod
192
204
  def from_request(
@@ -310,15 +322,26 @@ class PipelineRunSchema(NamedSchema, table=True):
310
322
  client_environment=client_environment,
311
323
  orchestrator_environment=orchestrator_environment,
312
324
  orchestrator_run_id=self.orchestrator_run_id,
325
+ template_id=self.deployment.template_id
326
+ if self.deployment
327
+ else None,
313
328
  )
314
329
 
315
330
  resources = None
316
331
  if include_resources:
317
332
  model_version = None
318
333
  if config.model and config.model.model_version_id:
319
- model_version = config.model._get_model_version(hydrate=False)
334
+ try:
335
+ model_version = config.model._get_model_version(
336
+ hydrate=False
337
+ )
338
+ except KeyError:
339
+ # Unable to find the model version, it was probably deleted
340
+ pass
341
+
320
342
  resources = PipelineRunResponseResources(
321
- model_version=model_version
343
+ model_version=model_version,
344
+ tags=[t.tag.to_model() for t in self.tags],
322
345
  )
323
346
 
324
347
  return PipelineRunResponse(
@@ -13,22 +13,20 @@
13
13
  # permissions and limitations under the License.
14
14
  """SQL Model Implementations for Pipelines and Pipeline Runs."""
15
15
 
16
- import json
17
16
  from datetime import datetime
18
17
  from typing import TYPE_CHECKING, Any, List, Optional
19
18
  from uuid import UUID
20
19
 
21
- from sqlalchemy import TEXT, Column, String
22
- from sqlalchemy.dialects.mysql import MEDIUMTEXT
20
+ from sqlalchemy import TEXT, Column
23
21
  from sqlmodel import Field, Relationship
24
22
 
25
- from zenml.config.pipeline_spec import PipelineSpec
26
- from zenml.constants import MEDIUMTEXT_MAX_LENGTH
23
+ from zenml.enums import TaggableResourceTypes
27
24
  from zenml.models import (
28
25
  PipelineRequest,
29
26
  PipelineResponse,
30
27
  PipelineResponseBody,
31
28
  PipelineResponseMetadata,
29
+ PipelineResponseResources,
32
30
  PipelineUpdate,
33
31
  )
34
32
  from zenml.zen_stores.schemas.base_schemas import NamedSchema
@@ -45,6 +43,7 @@ if TYPE_CHECKING:
45
43
  )
46
44
  from zenml.zen_stores.schemas.pipeline_run_schemas import PipelineRunSchema
47
45
  from zenml.zen_stores.schemas.schedule_schema import ScheduleSchema
46
+ from zenml.zen_stores.schemas.tag_schemas import TagResourceSchema
48
47
 
49
48
 
50
49
  class PipelineSchema(NamedSchema, table=True):
@@ -53,17 +52,7 @@ class PipelineSchema(NamedSchema, table=True):
53
52
  __tablename__ = "pipeline"
54
53
 
55
54
  # Fields
56
- version: str
57
- version_hash: str
58
- docstring: Optional[str] = Field(sa_column=Column(TEXT, nullable=True))
59
- spec: str = Field(
60
- sa_column=Column(
61
- String(length=MEDIUMTEXT_MAX_LENGTH).with_variant(
62
- MEDIUMTEXT, "mysql"
63
- ),
64
- nullable=False,
65
- )
66
- )
55
+ description: Optional[str] = Field(sa_column=Column(TEXT, nullable=True))
67
56
 
68
57
  # Foreign keys
69
58
  workspace_id: UUID = build_foreign_key_field(
@@ -90,13 +79,23 @@ class PipelineSchema(NamedSchema, table=True):
90
79
  schedules: List["ScheduleSchema"] = Relationship(
91
80
  back_populates="pipeline",
92
81
  )
93
- runs: List["PipelineRunSchema"] = Relationship(back_populates="pipeline")
82
+ runs: List["PipelineRunSchema"] = Relationship(
83
+ back_populates="pipeline",
84
+ sa_relationship_kwargs={"order_by": "PipelineRunSchema.created"},
85
+ )
94
86
  builds: List["PipelineBuildSchema"] = Relationship(
95
87
  back_populates="pipeline"
96
88
  )
97
89
  deployments: List["PipelineDeploymentSchema"] = Relationship(
98
90
  back_populates="pipeline",
99
91
  )
92
+ tags: List["TagResourceSchema"] = Relationship(
93
+ sa_relationship_kwargs=dict(
94
+ primaryjoin=f"and_(TagResourceSchema.resource_type=='{TaggableResourceTypes.PIPELINE.value}', foreign(TagResourceSchema.resource_id)==PipelineSchema.id)",
95
+ cascade="delete",
96
+ overlaps="tags",
97
+ ),
98
+ )
100
99
 
101
100
  @classmethod
102
101
  def from_request(
@@ -113,21 +112,15 @@ class PipelineSchema(NamedSchema, table=True):
113
112
  """
114
113
  return cls(
115
114
  name=pipeline_request.name,
116
- version=pipeline_request.version,
117
- version_hash=pipeline_request.version_hash,
115
+ description=pipeline_request.description,
118
116
  workspace_id=pipeline_request.workspace,
119
117
  user_id=pipeline_request.user,
120
- docstring=pipeline_request.docstring,
121
- spec=json.dumps(
122
- pipeline_request.spec.model_dump(mode="json"), sort_keys=True
123
- ),
124
118
  )
125
119
 
126
120
  def to_model(
127
121
  self,
128
122
  include_metadata: bool = False,
129
123
  include_resources: bool = False,
130
- last_x_runs: int = 3,
131
124
  **kwargs: Any,
132
125
  ) -> "PipelineResponse":
133
126
  """Convert a `PipelineSchema` to a `PipelineResponse`.
@@ -136,25 +129,29 @@ class PipelineSchema(NamedSchema, table=True):
136
129
  include_metadata: Whether the metadata will be filled.
137
130
  include_resources: Whether the resources will be filled.
138
131
  **kwargs: Keyword arguments to allow schema specific logic
139
- last_x_runs: How many runs to use for the execution status
140
132
 
141
133
  Returns:
142
134
  The created PipelineResponse.
143
135
  """
144
136
  body = PipelineResponseBody(
145
137
  user=self.user.to_model() if self.user else None,
146
- status=[run.status for run in self.runs[:last_x_runs]],
138
+ latest_run_id=self.runs[-1].id if self.runs else None,
139
+ latest_run_status=self.runs[-1].status if self.runs else None,
147
140
  created=self.created,
148
141
  updated=self.updated,
149
- version=self.version,
150
142
  )
143
+
151
144
  metadata = None
152
145
  if include_metadata:
153
146
  metadata = PipelineResponseMetadata(
154
147
  workspace=self.workspace.to_model(),
155
- version_hash=self.version_hash,
156
- spec=PipelineSpec.model_validate_json(self.spec),
157
- docstring=self.docstring,
148
+ description=self.description,
149
+ )
150
+
151
+ resources = None
152
+ if include_resources:
153
+ resources = PipelineResponseResources(
154
+ tags=[t.tag.to_model() for t in self.tags],
158
155
  )
159
156
 
160
157
  return PipelineResponse(
@@ -162,6 +159,7 @@ class PipelineSchema(NamedSchema, table=True):
162
159
  name=self.name,
163
160
  body=body,
164
161
  metadata=metadata,
162
+ resources=resources,
165
163
  )
166
164
 
167
165
  def update(self, pipeline_update: "PipelineUpdate") -> "PipelineSchema":
@@ -173,5 +171,6 @@ class PipelineSchema(NamedSchema, table=True):
173
171
  Returns:
174
172
  The updated `PipelineSchema`.
175
173
  """
174
+ self.description = pipeline_update.description
176
175
  self.updated = datetime.utcnow()
177
176
  return self