zenml-nightly 0.62.0.dev20240729__py3-none-any.whl → 0.63.0.dev20240731__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (157) hide show
  1. README.md +1 -1
  2. RELEASE_NOTES.md +41 -0
  3. zenml/VERSION +1 -1
  4. zenml/actions/pipeline_run/pipeline_run_action.py +19 -17
  5. zenml/analytics/enums.py +4 -0
  6. zenml/cli/__init__.py +28 -15
  7. zenml/cli/base.py +1 -1
  8. zenml/cli/pipeline.py +54 -61
  9. zenml/cli/stack.py +6 -8
  10. zenml/client.py +232 -99
  11. zenml/config/compiler.py +14 -22
  12. zenml/config/pipeline_run_configuration.py +3 -0
  13. zenml/config/server_config.py +3 -0
  14. zenml/config/source.py +2 -1
  15. zenml/constants.py +2 -0
  16. zenml/enums.py +3 -0
  17. zenml/integrations/aws/orchestrators/sagemaker_orchestrator.py +13 -4
  18. zenml/integrations/databricks/flavors/databricks_orchestrator_flavor.py +11 -2
  19. zenml/integrations/databricks/orchestrators/databricks_orchestrator.py +19 -13
  20. zenml/models/__init__.py +26 -10
  21. zenml/models/v2/base/filter.py +32 -0
  22. zenml/models/v2/core/pipeline.py +73 -89
  23. zenml/models/v2/core/pipeline_build.py +15 -11
  24. zenml/models/v2/core/pipeline_deployment.py +56 -0
  25. zenml/models/v2/core/pipeline_run.py +52 -1
  26. zenml/models/v2/core/run_template.py +393 -0
  27. zenml/models/v2/misc/stack_deployment.py +5 -0
  28. zenml/new/pipelines/build_utils.py +34 -58
  29. zenml/new/pipelines/pipeline.py +17 -76
  30. zenml/new/pipelines/run_utils.py +12 -0
  31. zenml/post_execution/pipeline.py +1 -4
  32. zenml/service_connectors/service_connector_utils.py +4 -2
  33. zenml/stack_deployments/aws_stack_deployment.py +6 -5
  34. zenml/stack_deployments/azure_stack_deployment.py +118 -11
  35. zenml/stack_deployments/gcp_stack_deployment.py +12 -5
  36. zenml/stack_deployments/stack_deployment.py +6 -5
  37. zenml/steps/utils.py +0 -4
  38. zenml/utils/package_utils.py +39 -0
  39. zenml/zen_server/dashboard/assets/{404-B_YdvmwS.js → 404-CI13wQp4.js} +1 -1
  40. zenml/zen_server/dashboard/assets/{@reactflow-l_1hUr1S.js → @reactflow-DIYUhKYX.js} +1 -1
  41. zenml/zen_server/dashboard/assets/{@tanstack-DYiOyJUL.js → @tanstack-k96lU_C-.js} +4 -4
  42. zenml/zen_server/dashboard/assets/{AwarenessChannel-CFg5iX4Z.js → AwarenessChannel-BNg5uWgI.js} +1 -1
  43. zenml/zen_server/dashboard/assets/{CodeSnippet-Dvkx_82E.js → CodeSnippet-Cyp7f4dM.js} +2 -2
  44. zenml/zen_server/dashboard/assets/CollapsibleCard-Cu_A9W57.js +1 -0
  45. zenml/zen_server/dashboard/assets/{Commands-DoN1xrEq.js → Commands-DmQwTXjj.js} +1 -1
  46. zenml/zen_server/dashboard/assets/{CopyButton-Cr7xYEPb.js → CopyButton-B3sWVJ4Z.js} +1 -1
  47. zenml/zen_server/dashboard/assets/{CsvVizualization-Ck-nZ43m.js → CsvVizualization-BvqItd-O.js} +1 -1
  48. zenml/zen_server/dashboard/assets/{Error-kLtljEOM.js → Error-DbXCTGua.js} +1 -1
  49. zenml/zen_server/dashboard/assets/{ExecutionStatus-DguLLgTK.js → ExecutionStatus-9zM7eaLh.js} +1 -1
  50. zenml/zen_server/dashboard/assets/{Helpbox-BXUMP21n.js → Helpbox-BIiNc-uH.js} +1 -1
  51. zenml/zen_server/dashboard/assets/{Infobox-DSt0O-dm.js → Infobox-iv1Nu1A0.js} +1 -1
  52. zenml/zen_server/dashboard/assets/{InlineAvatar-xsrsIGE-.js → InlineAvatar-BvBtO2Dp.js} +1 -1
  53. zenml/zen_server/dashboard/assets/ProviderRadio-pSAvrGRS.js +1 -0
  54. zenml/zen_server/dashboard/assets/SearchField-CXoBknpt.js +1 -0
  55. zenml/zen_server/dashboard/assets/{SetPassword-BXGTWiwj.js → SetPassword-BOxpgh6N.js} +1 -1
  56. zenml/zen_server/dashboard/assets/{SuccessStep-DZC60t0x.js → SuccessStep-CTSKN2lp.js} +1 -1
  57. zenml/zen_server/dashboard/assets/Tick-Bnr2TpW6.js +1 -0
  58. zenml/zen_server/dashboard/assets/{UpdatePasswordSchemas-DGvwFWO1.js → UpdatePasswordSchemas-BeCeaRW5.js} +1 -1
  59. zenml/zen_server/dashboard/assets/chevron-down-D_ZlKMqH.js +1 -0
  60. zenml/zen_server/dashboard/assets/{cloud-only-C_yFCAkP.js → cloud-only-qelmY92E.js} +1 -1
  61. zenml/zen_server/dashboard/assets/components-DWe4cTjS.js +1 -0
  62. zenml/zen_server/dashboard/assets/dots-horizontal-BObFzD5l.js +1 -0
  63. zenml/zen_server/dashboard/assets/{index-BczVOqUf.js → index-KsTz2dHG.js} +5 -5
  64. zenml/zen_server/dashboard/assets/index-vfjX_fJV.css +1 -0
  65. zenml/zen_server/dashboard/assets/index.esm-CbHNSeVw.js +1 -0
  66. zenml/zen_server/dashboard/assets/{login-mutation-CrHrndTI.js → login-mutation-DRpbESS7.js} +1 -1
  67. zenml/zen_server/dashboard/assets/{not-found-DYa4pC-C.js → not-found-Dfx9hfkf.js} +1 -1
  68. zenml/zen_server/dashboard/assets/package-ClbU3KUi.js +1 -0
  69. zenml/zen_server/dashboard/assets/{page-uA5prJGY.js → page-399pVZHU.js} +1 -1
  70. zenml/zen_server/dashboard/assets/{page-1h_sD1jz.js → page-BoFtUD9H.js} +1 -1
  71. zenml/zen_server/dashboard/assets/{page-BDns21Iz.js → page-Btu39x7k.js} +1 -1
  72. zenml/zen_server/dashboard/assets/{page-BnaevhnB.js → page-BxiWdeyg.js} +1 -1
  73. zenml/zen_server/dashboard/assets/{page-1iL8aMqs.js → page-C176KxyB.js} +1 -1
  74. zenml/zen_server/dashboard/assets/page-C6tXXjnK.js +1 -0
  75. zenml/zen_server/dashboard/assets/{page-BkeAAYwp.js → page-CDgZmwxP.js} +1 -1
  76. zenml/zen_server/dashboard/assets/page-CP9obrnG.js +1 -0
  77. zenml/zen_server/dashboard/assets/{page-C6-UGEbH.js → page-CZe9GEBF.js} +1 -1
  78. zenml/zen_server/dashboard/assets/page-CaTOsNNw.js +1 -0
  79. zenml/zen_server/dashboard/assets/{page-CCNRIt_f.js → page-Cjn97HMv.js} +1 -1
  80. zenml/zen_server/dashboard/assets/page-CmXmB_5i.js +1 -0
  81. zenml/zen_server/dashboard/assets/page-CvGAOfad.js +1 -0
  82. zenml/zen_server/dashboard/assets/page-CzucfYPo.js +2 -0
  83. zenml/zen_server/dashboard/assets/{page-Bi-wtWiO.js → page-D0bbc-qr.js} +1 -1
  84. zenml/zen_server/dashboard/assets/page-DLEtD2ex.js +1 -0
  85. zenml/zen_server/dashboard/assets/{page-BhgCDInH.js → page-DVPxY5fT.js} +1 -1
  86. zenml/zen_server/dashboard/assets/{page-BkuQDIf-.js → page-DYBNGxJt.js} +1 -1
  87. zenml/zen_server/dashboard/assets/{page-8a4UMKXZ.js → page-DtpwnNXq.js} +1 -1
  88. zenml/zen_server/dashboard/assets/{page-B6h3iaHJ.js → page-DupV0aBd.js} +1 -1
  89. zenml/zen_server/dashboard/assets/page-EweAR81y.js +1 -0
  90. zenml/zen_server/dashboard/assets/{page-MFQyIJd3.js → page-f3jBVI5Z.js} +1 -1
  91. zenml/zen_server/dashboard/assets/{page-2grKx_MY.js → page-p2hLJdS2.js} +1 -1
  92. zenml/zen_server/dashboard/assets/page-w-YaL77M.js +9 -0
  93. zenml/zen_server/dashboard/assets/persist-BReKApOc.js +14 -0
  94. zenml/zen_server/dashboard/assets/plus-DOeLmm7C.js +1 -0
  95. zenml/zen_server/dashboard/assets/{stack-detail-query-Cficsl6d.js → stack-detail-query-Ck7j7BP_.js} +1 -1
  96. zenml/zen_server/dashboard/assets/{update-server-settings-mutation-7d8xi1tS.js → update-server-settings-mutation-f3ZT7psb.js} +1 -1
  97. zenml/zen_server/dashboard/assets/{url-D7mAQGUM.js → url-rGEp5Umh.js} +1 -1
  98. zenml/zen_server/dashboard/assets/{zod-BhoGpZ63.js → zod-BtSyGx4C.js} +1 -1
  99. zenml/zen_server/dashboard/index.html +5 -5
  100. zenml/zen_server/dashboard_legacy/asset-manifest.json +4 -4
  101. zenml/zen_server/dashboard_legacy/index.html +1 -1
  102. zenml/zen_server/dashboard_legacy/{precache-manifest.12246c7548e71e2c4438e496360de80c.js → precache-manifest.2fa6e528a6e7447caaf35dadfe7514bb.js} +4 -4
  103. zenml/zen_server/dashboard_legacy/service-worker.js +1 -1
  104. zenml/zen_server/dashboard_legacy/static/js/{main.3b27024b.chunk.js → main.4aab7e98.chunk.js} +2 -2
  105. zenml/zen_server/dashboard_legacy/static/js/{main.3b27024b.chunk.js.map → main.4aab7e98.chunk.js.map} +1 -1
  106. zenml/zen_server/deploy/helm/Chart.yaml +1 -1
  107. zenml/zen_server/deploy/helm/README.md +2 -2
  108. zenml/zen_server/rbac/models.py +1 -0
  109. zenml/zen_server/rbac/utils.py +4 -0
  110. zenml/zen_server/routers/pipeline_builds_endpoints.py +2 -66
  111. zenml/zen_server/routers/pipeline_deployments_endpoints.py +2 -53
  112. zenml/zen_server/routers/pipelines_endpoints.py +1 -74
  113. zenml/zen_server/routers/run_templates_endpoints.py +212 -0
  114. zenml/zen_server/routers/workspaces_endpoints.py +79 -0
  115. zenml/zen_server/{pipeline_deployment → template_execution}/runner_entrypoint_configuration.py +1 -8
  116. zenml/zen_server/{pipeline_deployment → template_execution}/utils.py +214 -92
  117. zenml/zen_server/utils.py +2 -2
  118. zenml/zen_server/zen_server_api.py +2 -1
  119. zenml/zen_stores/migrations/versions/0.63.0_release.py +23 -0
  120. zenml/zen_stores/migrations/versions/7d1919bb1ef0_add_run_templates.py +100 -0
  121. zenml/zen_stores/migrations/versions/b59aa68fdb1f_simplify_pipelines.py +139 -0
  122. zenml/zen_stores/rest_zen_store.py +107 -36
  123. zenml/zen_stores/schemas/__init__.py +2 -0
  124. zenml/zen_stores/schemas/pipeline_build_schemas.py +3 -3
  125. zenml/zen_stores/schemas/pipeline_deployment_schemas.py +29 -2
  126. zenml/zen_stores/schemas/pipeline_run_schemas.py +26 -3
  127. zenml/zen_stores/schemas/pipeline_schemas.py +29 -30
  128. zenml/zen_stores/schemas/run_template_schemas.py +264 -0
  129. zenml/zen_stores/schemas/step_run_schemas.py +11 -4
  130. zenml/zen_stores/sql_zen_store.py +364 -150
  131. zenml/zen_stores/template_utils.py +261 -0
  132. zenml/zen_stores/zen_store_interface.py +93 -20
  133. {zenml_nightly-0.62.0.dev20240729.dist-info → zenml_nightly-0.63.0.dev20240731.dist-info}/METADATA +2 -2
  134. {zenml_nightly-0.62.0.dev20240729.dist-info → zenml_nightly-0.63.0.dev20240731.dist-info}/RECORD +139 -129
  135. zenml/models/v2/core/pipeline_namespace.py +0 -113
  136. zenml/new/pipelines/deserialization_utils.py +0 -292
  137. zenml/zen_server/dashboard/assets/CollapsibleCard-opiuBHHc.js +0 -1
  138. zenml/zen_server/dashboard/assets/Pagination-C6X-mifw.js +0 -1
  139. zenml/zen_server/dashboard/assets/index-EpMIKgrI.css +0 -1
  140. zenml/zen_server/dashboard/assets/index-rK_Wuy2W.js +0 -1
  141. zenml/zen_server/dashboard/assets/index.esm-Corw4lXQ.js +0 -1
  142. zenml/zen_server/dashboard/assets/package-B3fWP-Dh.js +0 -1
  143. zenml/zen_server/dashboard/assets/page-5NCOHOsy.js +0 -1
  144. zenml/zen_server/dashboard/assets/page-Bq0YxkLV.js +0 -1
  145. zenml/zen_server/dashboard/assets/page-Bs2F4eoD.js +0 -2
  146. zenml/zen_server/dashboard/assets/page-CHNxpz3n.js +0 -1
  147. zenml/zen_server/dashboard/assets/page-DgorQFqi.js +0 -1
  148. zenml/zen_server/dashboard/assets/page-K8ebxVIs.js +0 -1
  149. zenml/zen_server/dashboard/assets/page-TgCF0P_U.js +0 -1
  150. zenml/zen_server/dashboard/assets/page-ZnCEe-eK.js +0 -9
  151. zenml/zen_server/dashboard/assets/persist-D7HJNBWx.js +0 -1
  152. zenml/zen_server/dashboard/assets/plus-C8WOyCzt.js +0 -1
  153. /zenml/zen_server/{pipeline_deployment → template_execution}/__init__.py +0 -0
  154. /zenml/zen_server/{pipeline_deployment → template_execution}/workload_manager_interface.py +0 -0
  155. {zenml_nightly-0.62.0.dev20240729.dist-info → zenml_nightly-0.63.0.dev20240731.dist-info}/LICENSE +0 -0
  156. {zenml_nightly-0.62.0.dev20240729.dist-info → zenml_nightly-0.63.0.dev20240731.dist-info}/WHEEL +0 -0
  157. {zenml_nightly-0.62.0.dev20240729.dist-info → zenml_nightly-0.63.0.dev20240731.dist-info}/entry_points.txt +0 -0
@@ -52,7 +52,7 @@ from pydantic import (
52
52
  field_validator,
53
53
  model_validator,
54
54
  )
55
- from sqlalchemy import asc, desc, func
55
+ from sqlalchemy import asc, case, desc, func
56
56
  from sqlalchemy.engine import URL, Engine, make_url
57
57
  from sqlalchemy.exc import (
58
58
  ArgumentError,
@@ -96,6 +96,7 @@ from zenml.constants import (
96
96
  ENV_ZENML_LOCAL_SERVER,
97
97
  ENV_ZENML_SERVER,
98
98
  FINISHED_ONBOARDING_SURVEY_KEY,
99
+ SORT_PIPELINES_BY_LATEST_RUN_KEY,
99
100
  SQL_STORE_BACKUP_DIRECTORY_NAME,
100
101
  TEXT_FIELD_MAX_LENGTH,
101
102
  handle_bool_env_var,
@@ -206,9 +207,6 @@ from zenml.models import (
206
207
  PipelineDeploymentRequest,
207
208
  PipelineDeploymentResponse,
208
209
  PipelineFilter,
209
- PipelineNamespaceFilter,
210
- PipelineNamespaceResponse,
211
- PipelineNamespaceResponseBody,
212
210
  PipelineRequest,
213
211
  PipelineResponse,
214
212
  PipelineRunFilter,
@@ -219,6 +217,10 @@ from zenml.models import (
219
217
  RunMetadataFilter,
220
218
  RunMetadataRequest,
221
219
  RunMetadataResponse,
220
+ RunTemplateFilter,
221
+ RunTemplateRequest,
222
+ RunTemplateResponse,
223
+ RunTemplateUpdate,
222
224
  ScheduleFilter,
223
225
  ScheduleRequest,
224
226
  ScheduleResponse,
@@ -294,6 +296,7 @@ from zenml.utils.networking_utils import (
294
296
  )
295
297
  from zenml.utils.pydantic_utils import before_validator_handler
296
298
  from zenml.utils.string_utils import random_str, validate_name
299
+ from zenml.zen_stores import template_utils
297
300
  from zenml.zen_stores.base_zen_store import (
298
301
  BaseZenStore,
299
302
  )
@@ -322,6 +325,7 @@ from zenml.zen_stores.schemas import (
322
325
  PipelineRunSchema,
323
326
  PipelineSchema,
324
327
  RunMetadataSchema,
328
+ RunTemplateSchema,
325
329
  ScheduleSchema,
326
330
  SecretSchema,
327
331
  ServerSettingsSchema,
@@ -991,16 +995,7 @@ class SqlZenStore(BaseZenStore):
991
995
  total = 0
992
996
 
993
997
  # Sorting
994
- column, operand = filter_model.sorting_params
995
- if operand == SorterOps.DESCENDING:
996
- sort_clause = desc(getattr(table, column)) # type: ignore[var-annotated]
997
- else:
998
- sort_clause = asc(getattr(table, column))
999
-
1000
- # We always add the `id` column as a tiebreaker to ensure a stable,
1001
- # repeatable order of items, otherwise subsequent pages might contain
1002
- # the same items.
1003
- query = query.order_by(sort_clause, asc(table.id)) # type: ignore[arg-type]
998
+ query = filter_model.apply_sorting(query=query, table=table)
1004
999
 
1005
1000
  # Get the total amount of pages in the database for a given query
1006
1001
  if total == 0:
@@ -1040,7 +1035,9 @@ class SqlZenStore(BaseZenStore):
1040
1035
  # Otherwise, try to use the `to_model` method of the schema.
1041
1036
  to_model = getattr(schema, "to_model", None)
1042
1037
  if callable(to_model):
1043
- items.append(to_model(include_metadata=hydrate))
1038
+ items.append(
1039
+ to_model(include_metadata=hydrate, include_resources=True)
1040
+ )
1044
1041
  continue
1045
1042
  # If neither of the above work, raise an error.
1046
1043
  raise RuntimeError(
@@ -1916,7 +1913,7 @@ class SqlZenStore(BaseZenStore):
1916
1913
  action = self._get_action(action_id=action_id, session=session)
1917
1914
 
1918
1915
  return action.to_model(
1919
- include_metadata=hydrate, include_resources=hydrate
1916
+ include_metadata=hydrate, include_resources=True
1920
1917
  )
1921
1918
 
1922
1919
  def list_actions(
@@ -2427,7 +2424,7 @@ class SqlZenStore(BaseZenStore):
2427
2424
  "service with this ID found."
2428
2425
  )
2429
2426
  return service.to_model(
2430
- include_metadata=hydrate, include_resources=hydrate
2427
+ include_metadata=hydrate, include_resources=True
2431
2428
  )
2432
2429
 
2433
2430
  def list_services(
@@ -2766,7 +2763,7 @@ class SqlZenStore(BaseZenStore):
2766
2763
  f"found."
2767
2764
  )
2768
2765
  return artifact_version.to_model(
2769
- include_metadata=hydrate, include_resources=hydrate
2766
+ include_metadata=hydrate, include_resources=True
2770
2767
  )
2771
2768
 
2772
2769
  def list_artifact_versions(
@@ -3984,23 +3981,32 @@ class SqlZenStore(BaseZenStore):
3984
3981
  existing_pipeline = session.exec(
3985
3982
  select(PipelineSchema)
3986
3983
  .where(PipelineSchema.name == pipeline.name)
3987
- .where(PipelineSchema.version_hash == pipeline.version_hash)
3988
3984
  .where(PipelineSchema.workspace_id == pipeline.workspace)
3989
3985
  ).first()
3990
3986
  if existing_pipeline is not None:
3991
3987
  raise EntityExistsError(
3992
3988
  f"Unable to create pipeline in workspace "
3993
- f"'{pipeline.workspace}': A pipeline with this name and "
3994
- f"version already exists."
3989
+ f"'{pipeline.workspace}': A pipeline with this name "
3990
+ "already exists."
3995
3991
  )
3996
3992
 
3997
3993
  # Create the pipeline
3998
3994
  new_pipeline = PipelineSchema.from_request(pipeline)
3995
+
3996
+ if pipeline.tags:
3997
+ self._attach_tags_to_resource(
3998
+ tag_names=pipeline.tags,
3999
+ resource_id=new_pipeline.id,
4000
+ resource_type=TaggableResourceTypes.PIPELINE,
4001
+ )
4002
+
3999
4003
  session.add(new_pipeline)
4000
4004
  session.commit()
4001
4005
  session.refresh(new_pipeline)
4002
4006
 
4003
- return new_pipeline.to_model(include_metadata=True)
4007
+ return new_pipeline.to_model(
4008
+ include_metadata=True, include_resources=True
4009
+ )
4004
4010
 
4005
4011
  def get_pipeline(
4006
4012
  self, pipeline_id: UUID, hydrate: bool = True
@@ -4029,80 +4035,8 @@ class SqlZenStore(BaseZenStore):
4029
4035
  "No pipeline with this ID found."
4030
4036
  )
4031
4037
 
4032
- return pipeline.to_model(include_metadata=hydrate)
4033
-
4034
- def list_pipeline_namespaces(
4035
- self,
4036
- filter_model: PipelineNamespaceFilter,
4037
- hydrate: bool = False,
4038
- ) -> Page[PipelineNamespaceResponse]:
4039
- """List all pipeline namespaces matching the given filter criteria.
4040
-
4041
- Args:
4042
- filter_model: All filter parameters including pagination
4043
- params.
4044
- hydrate: Flag deciding whether to hydrate the output model(s)
4045
- by including metadata fields in the response.
4046
-
4047
- Returns:
4048
- A list of all pipeline namespaces matching the filter criteria.
4049
- """
4050
-
4051
- def _custom_conversion(
4052
- row: Tuple[str, UUID, str],
4053
- ) -> PipelineNamespaceResponse:
4054
- name, latest_run_id, latest_run_status = row
4055
-
4056
- body = PipelineNamespaceResponseBody(
4057
- latest_run_id=latest_run_id,
4058
- latest_run_status=latest_run_status,
4059
- )
4060
-
4061
- return PipelineNamespaceResponse(name=name, body=body)
4062
-
4063
- def _custom_fetch(
4064
- session: Session,
4065
- query: Union[Select[Any], SelectOfScalar[Any]],
4066
- filter: BaseFilter,
4067
- ) -> Sequence[Any]:
4068
- return session.exec(query).all()
4069
-
4070
- with Session(self.engine) as session:
4071
- max_date_subquery = (
4072
- select(
4073
- PipelineSchema.name,
4074
- func.max(PipelineRunSchema.created).label("max_created"),
4075
- )
4076
- .outerjoin(
4077
- PipelineRunSchema,
4078
- PipelineSchema.id == PipelineRunSchema.pipeline_id, # type: ignore[arg-type]
4079
- )
4080
- .group_by(PipelineSchema.name)
4081
- .subquery()
4082
- )
4083
-
4084
- query = (
4085
- select(
4086
- max_date_subquery.c.name,
4087
- PipelineRunSchema.id,
4088
- PipelineRunSchema.status,
4089
- )
4090
- .outerjoin(
4091
- PipelineRunSchema,
4092
- PipelineRunSchema.created # type: ignore[arg-type]
4093
- == max_date_subquery.c.max_created,
4094
- )
4095
- .order_by(desc(PipelineRunSchema.updated)) # type: ignore[arg-type]
4096
- )
4097
-
4098
- return self.filter_and_paginate(
4099
- session=session,
4100
- query=query,
4101
- table=PipelineSchema,
4102
- filter_model=filter_model,
4103
- hydrate=hydrate,
4104
- custom_fetch=_custom_fetch,
4105
- custom_schema_to_model_conversion=_custom_conversion,
4038
+ return pipeline.to_model(
4039
+ include_metadata=hydrate, include_resources=True
4106
4040
  )
4107
4041
 
4108
4042
  def list_pipelines(
@@ -4121,8 +4055,48 @@ class SqlZenStore(BaseZenStore):
4121
4055
  Returns:
4122
4056
  A list of all pipelines matching the filter criteria.
4123
4057
  """
4058
+ query = select(PipelineSchema)
4059
+
4060
+ column, operand = pipeline_filter_model.sorting_params
4061
+ if column == SORT_PIPELINES_BY_LATEST_RUN_KEY:
4062
+ with Session(self.engine) as session:
4063
+ max_date_subquery = (
4064
+ # If no run exists for the pipeline yet, we use the pipeline
4065
+ # creation date as a fallback, otherwise newly created
4066
+ # pipeline would always be at the top/bottom
4067
+ select(
4068
+ PipelineSchema.id,
4069
+ case(
4070
+ (
4071
+ func.max(PipelineRunSchema.created).is_(None),
4072
+ PipelineSchema.created,
4073
+ ),
4074
+ else_=func.max(PipelineRunSchema.created),
4075
+ ).label("run_or_created"),
4076
+ )
4077
+ .outerjoin(
4078
+ PipelineRunSchema,
4079
+ PipelineSchema.id == PipelineRunSchema.pipeline_id, # type: ignore[arg-type]
4080
+ )
4081
+ .group_by(col(PipelineSchema.id))
4082
+ .subquery()
4083
+ )
4084
+
4085
+ if operand == SorterOps.DESCENDING:
4086
+ sort_clause = desc
4087
+ else:
4088
+ sort_clause = asc
4089
+
4090
+ query = (
4091
+ query.where(PipelineSchema.id == max_date_subquery.c.id)
4092
+ .order_by(sort_clause(max_date_subquery.c.run_or_created))
4093
+ # We always add the `id` column as a tiebreaker to ensure a
4094
+ # stable, repeatable order of items, otherwise subsequent
4095
+ # pages might contain the same items.
4096
+ .order_by(col(PipelineSchema.id))
4097
+ )
4098
+
4124
4099
  with Session(self.engine) as session:
4125
- query = select(PipelineSchema)
4126
4100
  return self.filter_and_paginate(
4127
4101
  session=session,
4128
4102
  query=query,
@@ -4172,13 +4146,29 @@ class SqlZenStore(BaseZenStore):
4172
4146
  f"No pipeline with this ID found."
4173
4147
  )
4174
4148
 
4175
- # Update the pipeline
4176
- existing_pipeline.update(pipeline_update)
4149
+ if pipeline_update.add_tags:
4150
+ self._attach_tags_to_resource(
4151
+ tag_names=pipeline_update.add_tags,
4152
+ resource_id=existing_pipeline.id,
4153
+ resource_type=TaggableResourceTypes.PIPELINE,
4154
+ )
4155
+ pipeline_update.add_tags = None
4156
+ if pipeline_update.remove_tags:
4157
+ self._detach_tags_from_resource(
4158
+ tag_names=pipeline_update.remove_tags,
4159
+ resource_id=existing_pipeline.id,
4160
+ resource_type=TaggableResourceTypes.PIPELINE,
4161
+ )
4162
+ pipeline_update.remove_tags = None
4177
4163
 
4164
+ existing_pipeline.update(pipeline_update)
4178
4165
  session.add(existing_pipeline)
4179
4166
  session.commit()
4167
+ session.refresh(existing_pipeline)
4180
4168
 
4181
- return existing_pipeline.to_model(include_metadata=True)
4169
+ return existing_pipeline.to_model(
4170
+ include_metadata=True, include_resources=True
4171
+ )
4182
4172
 
4183
4173
  def delete_pipeline(self, pipeline_id: UUID) -> None:
4184
4174
  """Deletes a pipeline.
@@ -4308,24 +4298,6 @@ class SqlZenStore(BaseZenStore):
4308
4298
  session.delete(build)
4309
4299
  session.commit()
4310
4300
 
4311
- def run_build(
4312
- self,
4313
- build_id: UUID,
4314
- run_configuration: Optional[PipelineRunConfiguration] = None,
4315
- ) -> NoReturn:
4316
- """Run a pipeline from a build.
4317
-
4318
- Args:
4319
- build_id: The ID of the build to run.
4320
- run_configuration: Configuration for the run.
4321
-
4322
- Raises:
4323
- NotImplementedError: Always.
4324
- """
4325
- raise NotImplementedError(
4326
- "Running a build is not possible with a local store."
4327
- )
4328
-
4329
4301
  # -------------------------- Pipeline Deployments --------------------------
4330
4302
 
4331
4303
  def create_deployment(
@@ -4436,37 +4408,235 @@ class SqlZenStore(BaseZenStore):
4436
4408
  )
4437
4409
 
4438
4410
  session.delete(deployment)
4411
+ session.commit()
4439
4412
 
4440
- # Look for all pipeline builds that reference this deployment
4441
- # and remove the reference
4442
- pipeline_builds = session.exec(
4443
- select(PipelineBuildSchema).where(
4444
- PipelineBuildSchema.template_deployment_id == deployment_id
4413
+ # -------------------- Run templates --------------------
4414
+
4415
+ @track_decorator(AnalyticsEvent.CREATED_RUN_TEMPLATE)
4416
+ def create_run_template(
4417
+ self,
4418
+ template: RunTemplateRequest,
4419
+ ) -> RunTemplateResponse:
4420
+ """Create a new run template.
4421
+
4422
+ Args:
4423
+ template: The template to create.
4424
+
4425
+ Returns:
4426
+ The newly created template.
4427
+
4428
+ Raises:
4429
+ EntityExistsError: If a template with the same name already exists.
4430
+ ValueError: If the source deployment does not exist or does not
4431
+ have an associated build.
4432
+ """
4433
+ with Session(self.engine) as session:
4434
+ existing_template = session.exec(
4435
+ select(RunTemplateSchema)
4436
+ .where(RunTemplateSchema.name == template.name)
4437
+ .where(RunTemplateSchema.workspace_id == template.workspace)
4438
+ ).first()
4439
+ if existing_template is not None:
4440
+ raise EntityExistsError(
4441
+ f"Unable to create run template in workspace "
4442
+ f"'{existing_template.workspace.name}': A run template "
4443
+ "with this name already exists."
4445
4444
  )
4446
- ).all()
4447
4445
 
4448
- for pipeline_build in pipeline_builds:
4449
- pipeline_build.template_deployment_id = None
4450
- session.add(pipeline_build)
4446
+ deployment = session.exec(
4447
+ select(PipelineDeploymentSchema).where(
4448
+ PipelineDeploymentSchema.id
4449
+ == template.source_deployment_id
4450
+ )
4451
+ ).first()
4452
+ if not deployment:
4453
+ raise ValueError(
4454
+ f"Source deployment {template.source_deployment_id} not "
4455
+ "found."
4456
+ )
4457
+
4458
+ template_utils.validate_deployment_is_templatable(deployment)
4459
+
4460
+ template_schema = RunTemplateSchema.from_request(request=template)
4461
+
4462
+ if template.tags:
4463
+ self._attach_tags_to_resource(
4464
+ tag_names=template.tags,
4465
+ resource_id=template_schema.id,
4466
+ resource_type=TaggableResourceTypes.RUN_TEMPLATE,
4467
+ )
4451
4468
 
4469
+ session.add(template_schema)
4452
4470
  session.commit()
4471
+ session.refresh(template_schema)
4453
4472
 
4454
- def run_deployment(
4473
+ return template_schema.to_model(
4474
+ include_metadata=True, include_resources=True
4475
+ )
4476
+
4477
+ def get_run_template(
4478
+ self, template_id: UUID, hydrate: bool = True
4479
+ ) -> RunTemplateResponse:
4480
+ """Get a run template with a given ID.
4481
+
4482
+ Args:
4483
+ template_id: ID of the template.
4484
+ hydrate: Flag deciding whether to hydrate the output model(s)
4485
+ by including metadata fields in the response.
4486
+
4487
+ Returns:
4488
+ The template.
4489
+
4490
+ Raises:
4491
+ KeyError: If the template does not exist.
4492
+ """
4493
+ with Session(self.engine) as session:
4494
+ template = session.exec(
4495
+ select(RunTemplateSchema).where(
4496
+ RunTemplateSchema.id == template_id
4497
+ )
4498
+ ).first()
4499
+ if template is None:
4500
+ raise KeyError(
4501
+ f"Unable to get run template with ID {template_id}: "
4502
+ f"No run template with this ID found."
4503
+ )
4504
+
4505
+ return template.to_model(
4506
+ include_metadata=hydrate, include_resources=True
4507
+ )
4508
+
4509
+ def list_run_templates(
4510
+ self,
4511
+ template_filter_model: RunTemplateFilter,
4512
+ hydrate: bool = False,
4513
+ ) -> Page[RunTemplateResponse]:
4514
+ """List all run templates matching the given filter criteria.
4515
+
4516
+ Args:
4517
+ template_filter_model: All filter parameters including pagination
4518
+ params.
4519
+ hydrate: Flag deciding whether to hydrate the output model(s)
4520
+ by including metadata fields in the response.
4521
+
4522
+ Returns:
4523
+ A list of all templates matching the filter criteria.
4524
+ """
4525
+ with Session(self.engine) as session:
4526
+ query = select(RunTemplateSchema)
4527
+ return self.filter_and_paginate(
4528
+ session=session,
4529
+ query=query,
4530
+ table=RunTemplateSchema,
4531
+ filter_model=template_filter_model,
4532
+ hydrate=hydrate,
4533
+ )
4534
+
4535
+ def update_run_template(
4536
+ self,
4537
+ template_id: UUID,
4538
+ template_update: RunTemplateUpdate,
4539
+ ) -> RunTemplateResponse:
4540
+ """Updates a run template.
4541
+
4542
+ Args:
4543
+ template_id: The ID of the template to update.
4544
+ template_update: The update to apply.
4545
+
4546
+ Returns:
4547
+ The updated template.
4548
+
4549
+ Raises:
4550
+ KeyError: If the template does not exist.
4551
+ """
4552
+ with Session(self.engine) as session:
4553
+ template = session.exec(
4554
+ select(RunTemplateSchema).where(
4555
+ RunTemplateSchema.id == template_id
4556
+ )
4557
+ ).first()
4558
+ if template is None:
4559
+ raise KeyError(
4560
+ f"Unable to update run template with ID {template_id}: "
4561
+ f"No run template with this ID found."
4562
+ )
4563
+
4564
+ if template_update.add_tags:
4565
+ self._attach_tags_to_resource(
4566
+ tag_names=template_update.add_tags,
4567
+ resource_id=template.id,
4568
+ resource_type=TaggableResourceTypes.RUN_TEMPLATE,
4569
+ )
4570
+ template_update.add_tags = None
4571
+
4572
+ if template_update.remove_tags:
4573
+ self._detach_tags_from_resource(
4574
+ tag_names=template_update.remove_tags,
4575
+ resource_id=template.id,
4576
+ resource_type=TaggableResourceTypes.RUN_TEMPLATE,
4577
+ )
4578
+ template_update.remove_tags = None
4579
+
4580
+ template.update(template_update)
4581
+ session.add(template)
4582
+ session.commit()
4583
+ session.refresh(template)
4584
+
4585
+ return template.to_model(
4586
+ include_metadata=True, include_resources=True
4587
+ )
4588
+
4589
+ def delete_run_template(self, template_id: UUID) -> None:
4590
+ """Delete a run template.
4591
+
4592
+ Args:
4593
+ template_id: The ID of the template to delete.
4594
+
4595
+ Raises:
4596
+ KeyError: If the template does not exist.
4597
+ """
4598
+ with Session(self.engine) as session:
4599
+ template = session.exec(
4600
+ select(RunTemplateSchema).where(
4601
+ RunTemplateSchema.id == template_id
4602
+ )
4603
+ ).first()
4604
+ if template is None:
4605
+ raise KeyError(
4606
+ f"Unable to delete run template with ID {template_id}: "
4607
+ f"No run template with this ID found."
4608
+ )
4609
+
4610
+ session.delete(template)
4611
+ # We set the reference of all deployments to this template to null
4612
+ # manually as we can't have a foreign key there to avoid a cycle
4613
+ deployments = session.exec(
4614
+ select(PipelineDeploymentSchema).where(
4615
+ PipelineDeploymentSchema.template_id == template_id
4616
+ )
4617
+ ).all()
4618
+ for deployment in deployments:
4619
+ deployment.template_id = None
4620
+ session.add(deployment)
4621
+
4622
+ session.commit()
4623
+
4624
+ def run_template(
4455
4625
  self,
4456
- deployment_id: UUID,
4626
+ template_id: UUID,
4457
4627
  run_configuration: Optional[PipelineRunConfiguration] = None,
4458
4628
  ) -> NoReturn:
4459
- """Run a pipeline from a deployment.
4629
+ """Run a template.
4460
4630
 
4461
4631
  Args:
4462
- deployment_id: The ID of the deployment to run.
4632
+ template_id: The ID of the template to run.
4463
4633
  run_configuration: Configuration for the run.
4464
4634
 
4465
4635
  Raises:
4466
4636
  NotImplementedError: Always.
4467
4637
  """
4468
4638
  raise NotImplementedError(
4469
- "Running a deployment is not possible with a local store."
4639
+ "Running a template is not possible with a local store."
4470
4640
  )
4471
4641
 
4472
4642
  # -------------------- Event Sources --------------------
@@ -4563,7 +4733,7 @@ class SqlZenStore(BaseZenStore):
4563
4733
  with Session(self.engine) as session:
4564
4734
  return self._get_event_source(
4565
4735
  event_source_id=event_source_id, session=session
4566
- ).to_model(include_metadata=hydrate, include_resources=hydrate)
4736
+ ).to_model(include_metadata=hydrate, include_resources=True)
4567
4737
 
4568
4738
  def list_event_sources(
4569
4739
  self,
@@ -4681,10 +4851,20 @@ class SqlZenStore(BaseZenStore):
4681
4851
 
4682
4852
  # Create the pipeline run
4683
4853
  new_run = PipelineRunSchema.from_request(pipeline_run)
4854
+
4855
+ if pipeline_run.tags:
4856
+ self._attach_tags_to_resource(
4857
+ tag_names=pipeline_run.tags,
4858
+ resource_id=new_run.id,
4859
+ resource_type=TaggableResourceTypes.PIPELINE_RUN,
4860
+ )
4861
+
4684
4862
  session.add(new_run)
4685
4863
  session.commit()
4686
4864
 
4687
- return new_run.to_model(include_metadata=True)
4865
+ return new_run.to_model(
4866
+ include_metadata=True, include_resources=True
4867
+ )
4688
4868
 
4689
4869
  def get_run(
4690
4870
  self, run_name_or_id: Union[str, UUID], hydrate: bool = True
@@ -4702,7 +4882,7 @@ class SqlZenStore(BaseZenStore):
4702
4882
  with Session(self.engine) as session:
4703
4883
  return self._get_run_schema(
4704
4884
  run_name_or_id, session=session
4705
- ).to_model(include_metadata=hydrate, include_resources=hydrate)
4885
+ ).to_model(include_metadata=hydrate, include_resources=True)
4706
4886
 
4707
4887
  def _replace_placeholder_run(
4708
4888
  self,
@@ -4751,10 +4931,20 @@ class SqlZenStore(BaseZenStore):
4751
4931
  if pre_replacement_hook:
4752
4932
  pre_replacement_hook()
4753
4933
  run_schema.update_placeholder(pipeline_run)
4934
+
4935
+ if pipeline_run.tags:
4936
+ self._attach_tags_to_resource(
4937
+ tag_names=pipeline_run.tags,
4938
+ resource_id=run_schema.id,
4939
+ resource_type=TaggableResourceTypes.PIPELINE_RUN,
4940
+ )
4941
+
4754
4942
  session.add(run_schema)
4755
4943
  session.commit()
4756
4944
 
4757
- return run_schema.to_model(include_metadata=True)
4945
+ return run_schema.to_model(
4946
+ include_metadata=True, include_resources=True
4947
+ )
4758
4948
 
4759
4949
  def _get_run_by_orchestrator_run_id(
4760
4950
  self, orchestrator_run_id: str, deployment_id: UUID
@@ -4788,7 +4978,9 @@ class SqlZenStore(BaseZenStore):
4788
4978
  f"{orchestrator_run_id} and deployment ID {deployment_id}."
4789
4979
  )
4790
4980
 
4791
- return run_schema.to_model(include_metadata=True)
4981
+ return run_schema.to_model(
4982
+ include_metadata=True, include_resources=True
4983
+ )
4792
4984
 
4793
4985
  def get_or_create_run(
4794
4986
  self,
@@ -4949,13 +5141,29 @@ class SqlZenStore(BaseZenStore):
4949
5141
  f"No pipeline run with this ID found."
4950
5142
  )
4951
5143
 
4952
- # Update the pipeline run
5144
+ if run_update.add_tags:
5145
+ self._attach_tags_to_resource(
5146
+ tag_names=run_update.add_tags,
5147
+ resource_id=existing_run.id,
5148
+ resource_type=TaggableResourceTypes.PIPELINE_RUN,
5149
+ )
5150
+ run_update.add_tags = None
5151
+ if run_update.remove_tags:
5152
+ self._detach_tags_from_resource(
5153
+ tag_names=run_update.remove_tags,
5154
+ resource_id=existing_run.id,
5155
+ resource_type=TaggableResourceTypes.PIPELINE_RUN,
5156
+ )
5157
+ run_update.remove_tags = None
5158
+
4953
5159
  existing_run.update(run_update=run_update)
4954
5160
  session.add(existing_run)
4955
5161
  session.commit()
4956
5162
 
4957
5163
  session.refresh(existing_run)
4958
- return existing_run.to_model(include_metadata=True)
5164
+ return existing_run.to_model(
5165
+ include_metadata=True, include_resources=True
5166
+ )
4959
5167
 
4960
5168
  def delete_run(self, run_id: UUID) -> None:
4961
5169
  """Deletes a pipeline run.
@@ -7086,12 +7294,16 @@ class SqlZenStore(BaseZenStore):
7086
7294
  )
7087
7295
  is not False
7088
7296
  ):
7297
+ connector_config = (
7298
+ existing_service_connector.configuration
7299
+ )
7300
+ connector_config["generate_temporary_tokens"] = (
7301
+ False
7302
+ )
7089
7303
  self.update_service_connector(
7090
7304
  existing_service_connector.id,
7091
7305
  ServiceConnectorUpdate(
7092
- configuration=existing_service_connector.configuration.update(
7093
- {"generate_temporary_tokens": False}
7094
- )
7306
+ configuration=connector_config
7095
7307
  ),
7096
7308
  )
7097
7309
  service_connectors.append(
@@ -7100,17 +7312,18 @@ class SqlZenStore(BaseZenStore):
7100
7312
  # Create a new service connector
7101
7313
  else:
7102
7314
  connector_name = full_stack.name
7315
+ connector_config = connector_id_or_info.configuration
7316
+ connector_config[
7317
+ "generate_temporary_tokens"
7318
+ ] = not need_to_generate_permanent_tokens
7319
+
7103
7320
  while True:
7104
7321
  try:
7105
7322
  service_connector_request = ServiceConnectorRequest(
7106
7323
  name=connector_name,
7107
7324
  connector_type=connector_id_or_info.type,
7108
7325
  auth_method=connector_id_or_info.auth_method,
7109
- configuration=connector_id_or_info.configuration.update(
7110
- {
7111
- "generate_temporary_tokens": not need_to_generate_permanent_tokens
7112
- }
7113
- ),
7326
+ configuration=connector_config,
7114
7327
  user=full_stack.user,
7115
7328
  workspace=full_stack.workspace,
7116
7329
  labels={
@@ -7739,7 +7952,7 @@ class SqlZenStore(BaseZenStore):
7739
7952
  "run with this ID found."
7740
7953
  )
7741
7954
  return step_run.to_model(
7742
- include_metadata=hydrate, include_resources=hydrate
7955
+ include_metadata=hydrate, include_resources=True
7743
7956
  )
7744
7957
 
7745
7958
  def list_run_steps(
@@ -8087,6 +8300,7 @@ class SqlZenStore(BaseZenStore):
8087
8300
  ) as analytics_handler:
8088
8301
  analytics_handler.metadata = {
8089
8302
  "pipeline_run_id": pipeline_run_id,
8303
+ "template_id": pipeline_run.deployment.template_id,
8090
8304
  "status": new_status,
8091
8305
  "num_steps": num_steps,
8092
8306
  "start_time": start_time_str,
@@ -8183,7 +8397,7 @@ class SqlZenStore(BaseZenStore):
8183
8397
  if trigger is None:
8184
8398
  raise KeyError(f"Trigger with ID {trigger_id} not found.")
8185
8399
  return trigger.to_model(
8186
- include_metadata=hydrate, include_resources=hydrate
8400
+ include_metadata=hydrate, include_resources=True
8187
8401
  )
8188
8402
 
8189
8403
  def list_triggers(
@@ -9772,7 +9986,7 @@ class SqlZenStore(BaseZenStore):
9772
9986
  f"ID found."
9773
9987
  )
9774
9988
  return model_version.to_model(
9775
- include_metadata=hydrate, include_resources=hydrate
9989
+ include_metadata=hydrate, include_resources=True
9776
9990
  )
9777
9991
 
9778
9992
  def list_model_versions(