zenml-nightly 0.63.0.dev20240802__py3-none-any.whl → 0.64.0.dev20240810__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (198) hide show
  1. README.md +1 -1
  2. RELEASE_NOTES.md +79 -0
  3. zenml/VERSION +1 -1
  4. zenml/__init__.py +0 -4
  5. zenml/analytics/enums.py +0 -6
  6. zenml/cli/__init__.py +0 -61
  7. zenml/cli/base.py +1 -1
  8. zenml/cli/web_login.py +8 -0
  9. zenml/client.py +0 -4
  10. zenml/config/build_configuration.py +43 -17
  11. zenml/config/docker_settings.py +80 -57
  12. zenml/config/source.py +58 -0
  13. zenml/constants.py +9 -2
  14. zenml/entrypoints/base_entrypoint_configuration.py +53 -8
  15. zenml/enums.py +1 -1
  16. zenml/environment.py +25 -9
  17. zenml/image_builders/base_image_builder.py +1 -1
  18. zenml/image_builders/build_context.py +25 -72
  19. zenml/integrations/azure/__init__.py +4 -0
  20. zenml/integrations/azure/flavors/__init__.py +11 -0
  21. zenml/integrations/azure/flavors/azureml_orchestrator_flavor.py +263 -0
  22. zenml/{_hub → integrations/azure/orchestrators}/__init__.py +7 -2
  23. zenml/integrations/azure/orchestrators/azureml_orchestrator.py +544 -0
  24. zenml/integrations/azure/orchestrators/azureml_orchestrator_entrypoint_config.py +86 -0
  25. zenml/integrations/azure/step_operators/azureml_step_operator.py +3 -0
  26. zenml/integrations/databricks/flavors/databricks_orchestrator_flavor.py +9 -0
  27. zenml/integrations/gcp/orchestrators/vertex_orchestrator.py +7 -2
  28. zenml/integrations/gcp/service_connectors/gcp_service_connector.py +123 -6
  29. zenml/integrations/kaniko/image_builders/kaniko_image_builder.py +1 -1
  30. zenml/integrations/mlflow/__init__.py +1 -1
  31. zenml/integrations/mlflow/experiment_trackers/mlflow_experiment_tracker.py +3 -1
  32. zenml/integrations/mlflow/flavors/mlflow_experiment_tracker_flavor.py +3 -0
  33. zenml/logger.py +13 -0
  34. zenml/models/__init__.py +0 -12
  35. zenml/models/v2/core/pipeline_deployment.py +21 -29
  36. zenml/models/v2/core/pipeline_run.py +13 -0
  37. zenml/models/v2/core/server_settings.py +12 -0
  38. zenml/models/v2/core/user.py +0 -21
  39. zenml/models/v2/misc/server_models.py +7 -1
  40. zenml/models/v2/misc/user_auth.py +0 -7
  41. zenml/new/pipelines/build_utils.py +193 -38
  42. zenml/new/pipelines/code_archive.py +157 -0
  43. zenml/new/pipelines/pipeline.py +29 -2
  44. zenml/new/pipelines/run_utils.py +67 -1
  45. zenml/service_connectors/service_connector_utils.py +14 -0
  46. zenml/stack_deployments/aws_stack_deployment.py +26 -3
  47. zenml/stack_deployments/azure_stack_deployment.py +11 -6
  48. zenml/stack_deployments/gcp_stack_deployment.py +24 -2
  49. zenml/stack_deployments/stack_deployment.py +17 -2
  50. zenml/steps/base_step.py +3 -0
  51. zenml/utils/archivable.py +149 -0
  52. zenml/utils/code_utils.py +244 -0
  53. zenml/utils/notebook_utils.py +122 -0
  54. zenml/utils/pipeline_docker_image_builder.py +3 -96
  55. zenml/utils/source_utils.py +109 -1
  56. zenml/zen_server/dashboard/assets/{404-CI13wQp4.js → 404-CRAA_Lew.js} +1 -1
  57. zenml/zen_server/dashboard/assets/@radix-BXWm7HOa.js +85 -0
  58. zenml/zen_server/dashboard/assets/{@react-router-CO-OsFwI.js → @react-router-l3lMcXA2.js} +1 -1
  59. zenml/zen_server/dashboard/assets/{@reactflow-DIYUhKYX.js → @reactflow-CeVxyqYT.js} +2 -2
  60. zenml/zen_server/dashboard/assets/{@tanstack-k96lU_C-.js → @tanstack-FmcYZMuX.js} +4 -4
  61. zenml/zen_server/dashboard/assets/AlertDialogDropdownItem-ErO9aOgK.js +1 -0
  62. zenml/zen_server/dashboard/assets/{AwarenessChannel-BNg5uWgI.js → AwarenessChannel-CLXo5rKM.js} +1 -1
  63. zenml/zen_server/dashboard/assets/{CodeSnippet-Cyp7f4dM.js → CodeSnippet-D0VLxT2A.js} +1 -1
  64. zenml/zen_server/dashboard/assets/{CollapsibleCard-Cu_A9W57.js → CollapsibleCard-BaUPiVg0.js} +1 -1
  65. zenml/zen_server/dashboard/assets/{Commands-DmQwTXjj.js → Commands-JrcZK-3j.js} +1 -1
  66. zenml/zen_server/dashboard/assets/CopyButton-Dbo52T1K.js +2 -0
  67. zenml/zen_server/dashboard/assets/{CsvVizualization-BvqItd-O.js → CsvVizualization-D3kAypDj.js} +3 -3
  68. zenml/zen_server/dashboard/assets/DisplayDate-DizbSeT-.js +1 -0
  69. zenml/zen_server/dashboard/assets/EditSecretDialog-Bd7mFLS4.js +1 -0
  70. zenml/zen_server/dashboard/assets/{EmptyState-BMLnFVlB.js → EmptyState-BHblM39I.js} +1 -1
  71. zenml/zen_server/dashboard/assets/{Error-DbXCTGua.js → Error-C6LeJSER.js} +1 -1
  72. zenml/zen_server/dashboard/assets/{ExecutionStatus-9zM7eaLh.js → ExecutionStatus-jH4OrWBq.js} +1 -1
  73. zenml/zen_server/dashboard/assets/{Helpbox-BIiNc-uH.js → Helpbox-aAB2XP-z.js} +1 -1
  74. zenml/zen_server/dashboard/assets/{Infobox-iv1Nu1A0.js → Infobox-BQ0aty32.js} +1 -1
  75. zenml/zen_server/dashboard/assets/{InlineAvatar-BvBtO2Dp.js → InlineAvatar-DpTLgM3Q.js} +1 -1
  76. zenml/zen_server/dashboard/assets/Lock-CNyJvf2r.js +1 -0
  77. zenml/zen_server/dashboard/assets/{MarkdownVisualization-xp3hhULl.js → MarkdownVisualization-Bajxn0HY.js} +1 -1
  78. zenml/zen_server/dashboard/assets/NumberBox-BmKE0qnO.js +1 -0
  79. zenml/zen_server/dashboard/assets/{PasswordChecker-DUveqlva.js → PasswordChecker-yGGoJSB-.js} +1 -1
  80. zenml/zen_server/dashboard/assets/{ProviderRadio-pSAvrGRS.js → ProviderRadio-BBqkIuTd.js} +1 -1
  81. zenml/zen_server/dashboard/assets/RadioItem-xLhXoiFV.js +1 -0
  82. zenml/zen_server/dashboard/assets/SearchField-C9R0mdaX.js +1 -0
  83. zenml/zen_server/dashboard/assets/{SetPassword-BOxpgh6N.js → SetPassword-52sNxNiO.js} +1 -1
  84. zenml/zen_server/dashboard/assets/{SuccessStep-CTSKN2lp.js → SuccessStep-DlkItqYG.js} +1 -1
  85. zenml/zen_server/dashboard/assets/{Tick-Bnr2TpW6.js → Tick-uxv80Q6a.js} +1 -1
  86. zenml/zen_server/dashboard/assets/{UpdatePasswordSchemas-BeCeaRW5.js → UpdatePasswordSchemas-oN4G3sKz.js} +1 -1
  87. zenml/zen_server/dashboard/assets/{aws-BgKTfTfx.js → aws-0_3UsPif.js} +1 -1
  88. zenml/zen_server/dashboard/assets/{check-circle-i56092KI.js → check-circle-1_I207rW.js} +1 -1
  89. zenml/zen_server/dashboard/assets/{chevron-down-D_ZlKMqH.js → chevron-down-BpaF8JqM.js} +1 -1
  90. zenml/zen_server/dashboard/assets/{chevron-right-double-CZBOf6JM.js → chevron-right-double-Dk8e2L99.js} +1 -1
  91. zenml/zen_server/dashboard/assets/{cloud-only-qelmY92E.js → cloud-only-BkUuI0lZ.js} +1 -1
  92. zenml/zen_server/dashboard/assets/components-Br2ezRib.js +1 -0
  93. zenml/zen_server/dashboard/assets/{copy-BXNk6BjL.js → copy-f3XGPPxt.js} +1 -1
  94. zenml/zen_server/dashboard/assets/{database-1xWSgZfO.js → database-cXYNX9tt.js} +1 -1
  95. zenml/zen_server/dashboard/assets/{docker-CQMVm_4d.js → docker-8uj__HHK.js} +1 -1
  96. zenml/zen_server/dashboard/assets/{dots-horizontal-BObFzD5l.js → dots-horizontal-sKQlWEni.js} +1 -1
  97. zenml/zen_server/dashboard/assets/edit-C0MVvPD2.js +1 -0
  98. zenml/zen_server/dashboard/assets/{file-text-CqD_iu6l.js → file-text-B9JibxTs.js} +1 -1
  99. zenml/zen_server/dashboard/assets/{help-bu_DgLKI.js → help-FuHlZwn0.js} +1 -1
  100. zenml/zen_server/dashboard/assets/index-Bd1xgUQG.js +1 -0
  101. zenml/zen_server/dashboard/assets/index-DaGknux4.css +1 -0
  102. zenml/zen_server/dashboard/assets/{index-KsTz2dHG.js → index-DhIZtpxB.js} +5 -5
  103. zenml/zen_server/dashboard/assets/{index.esm-CbHNSeVw.js → index.esm-DT4uyn2i.js} +1 -1
  104. zenml/zen_server/dashboard/assets/layout-D6oiSbfd.js +1 -0
  105. zenml/zen_server/dashboard/assets/{login-mutation-DRpbESS7.js → login-mutation-13A_JSVA.js} +1 -1
  106. zenml/zen_server/dashboard/assets/{logs-D8k8BVFf.js → logs-CgeE2vZP.js} +1 -1
  107. zenml/zen_server/dashboard/assets/{not-found-Dfx9hfkf.js → not-found-B0Mmb90p.js} +1 -1
  108. zenml/zen_server/dashboard/assets/{package-ClbU3KUi.js → package-DdkziX79.js} +1 -1
  109. zenml/zen_server/dashboard/assets/page-7-v2OBm-.js +1 -0
  110. zenml/zen_server/dashboard/assets/{page-f3jBVI5Z.js → page-B3ozwdD1.js} +1 -1
  111. zenml/zen_server/dashboard/assets/{page-DYBNGxJt.js → page-BGwA9B1M.js} +1 -1
  112. zenml/zen_server/dashboard/assets/{page-C176KxyB.js → page-BkjAUyTA.js} +1 -1
  113. zenml/zen_server/dashboard/assets/page-BnacgBiy.js +1 -0
  114. zenml/zen_server/dashboard/assets/{page-CzucfYPo.js → page-BxF_KMQ3.js} +2 -2
  115. zenml/zen_server/dashboard/assets/page-C4POHC0K.js +1 -0
  116. zenml/zen_server/dashboard/assets/page-C9kudd44.js +9 -0
  117. zenml/zen_server/dashboard/assets/page-CA1j3GpJ.js +1 -0
  118. zenml/zen_server/dashboard/assets/page-CCY6yfmu.js +1 -0
  119. zenml/zen_server/dashboard/assets/page-CgTe7Bme.js +1 -0
  120. zenml/zen_server/dashboard/assets/{page-DtpwnNXq.js → page-Cgn-6v2Y.js} +1 -1
  121. zenml/zen_server/dashboard/assets/page-CxQmQqDw.js +1 -0
  122. zenml/zen_server/dashboard/assets/page-D2Goey3H.js +1 -0
  123. zenml/zen_server/dashboard/assets/page-DLpOnf7u.js +1 -0
  124. zenml/zen_server/dashboard/assets/{page-DVPxY5fT.js → page-DSTQnBk-.js} +1 -1
  125. zenml/zen_server/dashboard/assets/{page-BoFtUD9H.js → page-DTysUGOy.js} +1 -1
  126. zenml/zen_server/dashboard/assets/{page-p2hLJdS2.js → page-D_EXUFJb.js} +1 -1
  127. zenml/zen_server/dashboard/assets/page-Db15QzsM.js +1 -0
  128. zenml/zen_server/dashboard/assets/{page-Btu39x7k.js → page-DugsjcQ_.js} +1 -1
  129. zenml/zen_server/dashboard/assets/{page-CZe9GEBF.js → page-OFKSPyN7.js} +1 -1
  130. zenml/zen_server/dashboard/assets/{page-CDgZmwxP.js → page-RnG-qhv9.js} +1 -1
  131. zenml/zen_server/dashboard/assets/{page-Cjn97HMv.js → page-T2BtjwPl.js} +1 -1
  132. zenml/zen_server/dashboard/assets/page-TXe1Eo3Z.js +1 -0
  133. zenml/zen_server/dashboard/assets/{page-BxiWdeyg.js → page-YiF_fNbe.js} +1 -1
  134. zenml/zen_server/dashboard/assets/{page-399pVZHU.js → page-hQaiQXfg.js} +1 -1
  135. zenml/zen_server/dashboard/assets/persist-3-5nOJ6m.js +1 -0
  136. zenml/zen_server/dashboard/assets/{play-circle-CNtZKDnW.js → play-circle-XSkLR12B.js} +1 -1
  137. zenml/zen_server/dashboard/assets/{plus-DOeLmm7C.js → plus-FB9-lEq_.js} +1 -1
  138. zenml/zen_server/dashboard/assets/refresh-COb6KYDi.js +1 -0
  139. zenml/zen_server/dashboard/assets/sharedSchema-BoYx_B_L.js +14 -0
  140. zenml/zen_server/dashboard/assets/{stack-detail-query-Ck7j7BP_.js → stack-detail-query-B-US_-wa.js} +1 -1
  141. zenml/zen_server/dashboard/assets/{terminal-By9cErXc.js → terminal-grtjrIEJ.js} +1 -1
  142. zenml/zen_server/dashboard/assets/trash-Cd5CSFqA.js +1 -0
  143. zenml/zen_server/dashboard/assets/{update-server-settings-mutation-f3ZT7psb.js → update-server-settings-mutation-B8GB_ubU.js} +1 -1
  144. zenml/zen_server/dashboard/assets/{url-rGEp5Umh.js → url-hcMJkz8p.js} +1 -1
  145. zenml/zen_server/dashboard/assets/{zod-BtSyGx4C.js → zod-CnykDKJj.js} +1 -1
  146. zenml/zen_server/dashboard/index.html +7 -7
  147. zenml/zen_server/dashboard_legacy/asset-manifest.json +4 -4
  148. zenml/zen_server/dashboard_legacy/index.html +1 -1
  149. zenml/zen_server/dashboard_legacy/{precache-manifest.2fa6e528a6e7447caaf35dadfe7514bb.js → precache-manifest.9c473c96a43298343a7ce1256183123b.js} +4 -4
  150. zenml/zen_server/dashboard_legacy/service-worker.js +1 -1
  151. zenml/zen_server/dashboard_legacy/static/js/{main.4aab7e98.chunk.js → main.463c90b9.chunk.js} +2 -2
  152. zenml/zen_server/dashboard_legacy/static/js/{main.4aab7e98.chunk.js.map → main.463c90b9.chunk.js.map} +1 -1
  153. zenml/zen_server/deploy/helm/Chart.yaml +1 -1
  154. zenml/zen_server/deploy/helm/README.md +2 -2
  155. zenml/zen_server/routers/stack_deployment_endpoints.py +6 -0
  156. zenml/zen_server/routers/users_endpoints.py +0 -7
  157. zenml/zen_server/utils.py +75 -0
  158. zenml/zen_server/zen_server_api.py +52 -1
  159. zenml/zen_stores/base_zen_store.py +7 -1
  160. zenml/zen_stores/migrations/versions/0.64.0_release.py +23 -0
  161. zenml/zen_stores/migrations/versions/026d4577b6a0_add_code_path.py +39 -0
  162. zenml/zen_stores/migrations/versions/3dcc5d20e82f_add_last_user_activity.py +51 -0
  163. zenml/zen_stores/migrations/versions/909550c7c4da_remove_user_hub_token.py +36 -0
  164. zenml/zen_stores/rest_zen_store.py +5 -3
  165. zenml/zen_stores/schemas/pipeline_deployment_schemas.py +3 -0
  166. zenml/zen_stores/schemas/pipeline_run_schemas.py +3 -0
  167. zenml/zen_stores/schemas/server_settings_schemas.py +2 -0
  168. zenml/zen_stores/schemas/user_schemas.py +0 -2
  169. zenml/zen_stores/sql_zen_store.py +25 -1
  170. {zenml_nightly-0.63.0.dev20240802.dist-info → zenml_nightly-0.64.0.dev20240810.dist-info}/METADATA +2 -2
  171. {zenml_nightly-0.63.0.dev20240802.dist-info → zenml_nightly-0.64.0.dev20240810.dist-info}/RECORD +174 -157
  172. zenml/_hub/client.py +0 -289
  173. zenml/_hub/constants.py +0 -21
  174. zenml/_hub/utils.py +0 -79
  175. zenml/cli/hub.py +0 -1116
  176. zenml/models/v2/misc/hub_plugin_models.py +0 -79
  177. zenml/zen_server/dashboard/assets/@radix-CFOkMR_E.js +0 -85
  178. zenml/zen_server/dashboard/assets/CopyButton-B3sWVJ4Z.js +0 -2
  179. zenml/zen_server/dashboard/assets/DisplayDate-DYgIjlDF.js +0 -1
  180. zenml/zen_server/dashboard/assets/SearchField-CXoBknpt.js +0 -1
  181. zenml/zen_server/dashboard/assets/components-DWe4cTjS.js +0 -1
  182. zenml/zen_server/dashboard/assets/index-vfjX_fJV.css +0 -1
  183. zenml/zen_server/dashboard/assets/page-C6tXXjnK.js +0 -1
  184. zenml/zen_server/dashboard/assets/page-CP9obrnG.js +0 -1
  185. zenml/zen_server/dashboard/assets/page-CaTOsNNw.js +0 -1
  186. zenml/zen_server/dashboard/assets/page-CmXmB_5i.js +0 -1
  187. zenml/zen_server/dashboard/assets/page-CvGAOfad.js +0 -1
  188. zenml/zen_server/dashboard/assets/page-D0bbc-qr.js +0 -5
  189. zenml/zen_server/dashboard/assets/page-DLEtD2ex.js +0 -1
  190. zenml/zen_server/dashboard/assets/page-DupV0aBd.js +0 -1
  191. zenml/zen_server/dashboard/assets/page-EweAR81y.js +0 -1
  192. zenml/zen_server/dashboard/assets/page-w-YaL77M.js +0 -9
  193. zenml/zen_server/dashboard/assets/persist-BReKApOc.js +0 -14
  194. zenml/zen_server/dashboard/assets/secrets-video-OBJ6irhH.svg +0 -21
  195. zenml/zen_server/dashboard/assets/stacks-video-7gfxpAq4.svg +0 -21
  196. {zenml_nightly-0.63.0.dev20240802.dist-info → zenml_nightly-0.64.0.dev20240810.dist-info}/LICENSE +0 -0
  197. {zenml_nightly-0.63.0.dev20240802.dist-info → zenml_nightly-0.64.0.dev20240810.dist-info}/WHEEL +0 -0
  198. {zenml_nightly-0.63.0.dev20240802.dist-info → zenml_nightly-0.64.0.dev20240810.dist-info}/entry_points.txt +0 -0
@@ -30,6 +30,7 @@ from zenml.code_repositories import BaseCodeRepository
30
30
  from zenml.logger import get_logger
31
31
  from zenml.models import (
32
32
  BuildItem,
33
+ CodeReferenceRequest,
33
34
  PipelineBuildBase,
34
35
  PipelineBuildRequest,
35
36
  PipelineBuildResponse,
@@ -37,9 +38,7 @@ from zenml.models import (
37
38
  StackResponse,
38
39
  )
39
40
  from zenml.stack import Stack
40
- from zenml.utils import (
41
- source_utils,
42
- )
41
+ from zenml.utils import source_utils
43
42
  from zenml.utils.pipeline_docker_image_builder import (
44
43
  PipelineDockerImageBuilder,
45
44
  )
@@ -64,6 +63,93 @@ def build_required(deployment: "PipelineDeploymentBase") -> bool:
64
63
  return bool(stack.get_docker_builds(deployment=deployment))
65
64
 
66
65
 
66
+ def requires_included_code(
67
+ deployment: "PipelineDeploymentBase",
68
+ code_repository: Optional["BaseCodeRepository"] = None,
69
+ ) -> bool:
70
+ """Checks whether the deployment requires included code.
71
+
72
+ Args:
73
+ deployment: The deployment.
74
+ code_repository: If provided, this code repository can be used to
75
+ download the code inside the container images.
76
+
77
+ Returns:
78
+ If the deployment requires code included in the container images.
79
+ """
80
+ for step in deployment.step_configurations.values():
81
+ docker_settings = step.config.docker_settings
82
+
83
+ if docker_settings.allow_download_from_artifact_store:
84
+ return False
85
+
86
+ if docker_settings.allow_download_from_code_repository:
87
+ if code_repository:
88
+ continue
89
+
90
+ if docker_settings.allow_including_files_in_images:
91
+ return True
92
+
93
+ return False
94
+
95
+
96
+ def requires_download_from_code_repository(
97
+ deployment: "PipelineDeploymentBase",
98
+ ) -> bool:
99
+ """Checks whether the deployment needs to download code from a repository.
100
+
101
+ Args:
102
+ deployment: The deployment.
103
+
104
+ Returns:
105
+ If the deployment needs to download code from a code repository.
106
+ """
107
+ for step in deployment.step_configurations.values():
108
+ docker_settings = step.config.docker_settings
109
+
110
+ if docker_settings.allow_download_from_artifact_store:
111
+ return False
112
+
113
+ if docker_settings.allow_including_files_in_images:
114
+ return False
115
+
116
+ if docker_settings.allow_download_from_code_repository:
117
+ # The other two options are false, which means download from a
118
+ # code repo is required.
119
+ return True
120
+
121
+ return False
122
+
123
+
124
+ def code_download_possible(
125
+ deployment: "PipelineDeploymentBase",
126
+ code_repository: Optional["BaseCodeRepository"] = None,
127
+ ) -> bool:
128
+ """Checks whether code download is possible for the deployment.
129
+
130
+ Args:
131
+ deployment: The deployment.
132
+ code_repository: If provided, this code repository can be used to
133
+ download the code inside the container images.
134
+
135
+ Returns:
136
+ Whether code download is possible for the deployment.
137
+ """
138
+ for step in deployment.step_configurations.values():
139
+ if step.config.docker_settings.allow_download_from_artifact_store:
140
+ continue
141
+
142
+ if (
143
+ step.config.docker_settings.allow_download_from_code_repository
144
+ and code_repository
145
+ ):
146
+ continue
147
+
148
+ return False
149
+
150
+ return True
151
+
152
+
67
153
  def reuse_or_create_pipeline_build(
68
154
  deployment: "PipelineDeploymentBase",
69
155
  allow_build_reuse: bool,
@@ -82,8 +168,8 @@ def reuse_or_create_pipeline_build(
82
168
  build: Optional existing build. If given, the build will be fetched
83
169
  (or registered) in the database. If not given, a new build will
84
170
  be created.
85
- code_repository: If provided, this code repository will be used to
86
- download inside the build images.
171
+ code_repository: If provided, this code repository can be used to
172
+ download code inside the container images.
87
173
 
88
174
  Returns:
89
175
  The build response.
@@ -91,8 +177,10 @@ def reuse_or_create_pipeline_build(
91
177
  if not build:
92
178
  if (
93
179
  allow_build_reuse
94
- and code_repository
95
- and not deployment.requires_included_files
180
+ and not deployment.should_prevent_build_reuse
181
+ and not requires_included_code(
182
+ deployment=deployment, code_repository=code_repository
183
+ )
96
184
  and build_required(deployment=deployment)
97
185
  ):
98
186
  existing_build = find_existing_build(
@@ -108,17 +196,13 @@ def reuse_or_create_pipeline_build(
108
196
  return existing_build
109
197
  else:
110
198
  logger.info(
111
- "Unable to find a build to reuse. When using a code "
112
- "repository, a previous build can be reused when the "
113
- "following conditions are met:\n"
199
+ "Unable to find a build to reuse. A previous build can be "
200
+ "reused when the following conditions are met:\n"
114
201
  " * The existing build was created for the same stack, "
115
202
  "ZenML version and Python version\n"
116
203
  " * The stack contains a container registry\n"
117
204
  " * The Docker settings of the pipeline and all its steps "
118
- "are the same as for the existing build\n"
119
- " * The build does not include code. This will only be "
120
- "the case if the existing build was created with a clean "
121
- "code repository."
205
+ "are the same as for the existing build."
122
206
  )
123
207
 
124
208
  return create_pipeline_build(
@@ -150,7 +234,7 @@ def reuse_or_create_pipeline_build(
150
234
 
151
235
  def find_existing_build(
152
236
  deployment: "PipelineDeploymentBase",
153
- code_repository: "BaseCodeRepository",
237
+ code_repository: Optional["BaseCodeRepository"] = None,
154
238
  ) -> Optional["PipelineBuildResponse"]:
155
239
  """Find an existing build for a deployment.
156
240
 
@@ -280,6 +364,11 @@ def create_pipeline_build(
280
364
  download_files = build_config.should_download_files(
281
365
  code_repository=code_repository,
282
366
  )
367
+ pass_code_repo = (
368
+ build_config.should_download_files_from_code_repository(
369
+ code_repository=code_repository
370
+ )
371
+ )
283
372
 
284
373
  (
285
374
  image_name_or_digest,
@@ -293,7 +382,7 @@ def create_pipeline_build(
293
382
  download_files=download_files,
294
383
  entrypoint=build_config.entrypoint,
295
384
  extra_files=build_config.extra_files,
296
- code_repository=code_repository,
385
+ code_repository=code_repository if pass_code_repo else None,
297
386
  )
298
387
  contains_code = include_files
299
388
 
@@ -389,30 +478,30 @@ def verify_local_repository_context(
389
478
  deployment, or None if code download is not possible.
390
479
  """
391
480
  if build_required(deployment=deployment):
392
- if deployment.requires_code_download:
481
+ if requires_download_from_code_repository(deployment=deployment):
393
482
  if not local_repo_context:
394
483
  raise RuntimeError(
395
484
  "The `DockerSettings` of the pipeline or one of its "
396
- "steps specify that code should be included in the "
397
- "Docker image (`source_files='download'`), but there is no "
398
- "code repository active at your current source root "
399
- f"`{source_utils.get_source_root()}`."
485
+ "steps specify that code should be downloaded from a "
486
+ "code repository, but "
487
+ "there is no code repository active at your current source "
488
+ f"root `{source_utils.get_source_root()}`."
400
489
  )
401
490
  elif local_repo_context.is_dirty:
402
491
  raise RuntimeError(
403
492
  "The `DockerSettings` of the pipeline or one of its "
404
- "steps specify that code should be included in the "
405
- "Docker image (`source_files='download'`), but the code "
406
- "repository active at your current source root "
493
+ "steps specify that code should be downloaded from a "
494
+ "code repository, but "
495
+ "the code repository active at your current source root "
407
496
  f"`{source_utils.get_source_root()}` has uncommitted "
408
497
  "changes."
409
498
  )
410
499
  elif local_repo_context.has_local_changes:
411
500
  raise RuntimeError(
412
501
  "The `DockerSettings` of the pipeline or one of its "
413
- "steps specify that code should be included in the "
414
- "Docker image (`source_files='download'`), but the code "
415
- "repository active at your current source root "
502
+ "steps specify that code should be downloaded from a "
503
+ "code repository, but "
504
+ "the code repository active at your current source root "
416
505
  f"`{source_utils.get_source_root()}` has unpushed "
417
506
  "changes."
418
507
  )
@@ -420,13 +509,13 @@ def verify_local_repository_context(
420
509
  if local_repo_context:
421
510
  if local_repo_context.is_dirty:
422
511
  logger.warning(
423
- "Unable to use code repository to download code for this run "
424
- "as there are uncommitted changes."
512
+ "Unable to use code repository to download code for this "
513
+ "run as there are uncommitted changes."
425
514
  )
426
515
  elif local_repo_context.has_local_changes:
427
516
  logger.warning(
428
- "Unable to use code repository to download code for this run "
429
- "as there are unpushed changes."
517
+ "Unable to use code repository to download code for this "
518
+ "run as there are unpushed changes."
430
519
  )
431
520
 
432
521
  code_repository = None
@@ -475,13 +564,41 @@ def verify_custom_build(
475
564
  "might differ from the local code in your client environment."
476
565
  )
477
566
 
478
- if build.requires_code_download and not code_repository:
479
- raise RuntimeError(
480
- "The build you specified does not include code but code download "
481
- "not possible. This might be because you don't have a code "
482
- "repository registered or the code repository contains local "
483
- "changes."
484
- )
567
+ if build.requires_code_download:
568
+ if requires_included_code(
569
+ deployment=deployment, code_repository=code_repository
570
+ ):
571
+ raise RuntimeError(
572
+ "The `DockerSettings` of the pipeline or one of its "
573
+ "steps specify that code should be included in the Docker "
574
+ "image, but the build you "
575
+ "specified requires code download. Either update your "
576
+ "`DockerSettings` or specify a different build and try "
577
+ "again."
578
+ )
579
+
580
+ if (
581
+ requires_download_from_code_repository(deployment=deployment)
582
+ and not code_repository
583
+ ):
584
+ raise RuntimeError(
585
+ "The `DockerSettings` of the pipeline or one of its "
586
+ "steps specify that code should be downloaded from a "
587
+ "code repository but "
588
+ "there is no code repository active at your current source "
589
+ f"root `{source_utils.get_source_root()}`."
590
+ )
591
+
592
+ if not code_download_possible(
593
+ deployment=deployment, code_repository=code_repository
594
+ ):
595
+ raise RuntimeError(
596
+ "The `DockerSettings` of the pipeline or one of its "
597
+ "steps specify that code can not be downloaded from the "
598
+ "artifact store, but the build you specified requires code "
599
+ "download. Either update your `DockerSettings` or specify a "
600
+ "different build and try again."
601
+ )
485
602
 
486
603
  if build.checksum:
487
604
  build_checksum = compute_build_checksum(
@@ -561,3 +678,41 @@ def compute_stack_checksum(stack: StackResponse) -> str:
561
678
  hash_.update(integration.encode())
562
679
 
563
680
  return hash_.hexdigest()
681
+
682
+
683
+ def should_upload_code(
684
+ deployment: PipelineDeploymentBase,
685
+ build: Optional[PipelineBuildResponse],
686
+ code_reference: Optional[CodeReferenceRequest],
687
+ ) -> bool:
688
+ """Checks whether the current code should be uploaded for the deployment.
689
+
690
+ Args:
691
+ deployment: The deployment.
692
+ build: The build for the deployment.
693
+ code_reference: The code reference for the deployment.
694
+
695
+ Returns:
696
+ Whether the current code should be uploaded for the deployment.
697
+ """
698
+ if not build:
699
+ # No build means we don't need to download code into a Docker container
700
+ # for step execution. In other remote orchestrators that don't use
701
+ # Docker containers but instead use e.g. Wheels to run, the code should
702
+ # already be included.
703
+ return False
704
+
705
+ for step in deployment.step_configurations.values():
706
+ docker_settings = step.config.docker_settings
707
+
708
+ if (
709
+ code_reference
710
+ and docker_settings.allow_download_from_code_repository
711
+ ):
712
+ # No upload needed for this step
713
+ continue
714
+
715
+ if docker_settings.allow_download_from_artifact_store:
716
+ return True
717
+
718
+ return False
@@ -0,0 +1,157 @@
1
+ # Copyright (c) ZenML GmbH 2024. All Rights Reserved.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at:
6
+ #
7
+ # https://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
12
+ # or implied. See the License for the specific language governing
13
+ # permissions and limitations under the License.
14
+ """Code archive."""
15
+
16
+ import os
17
+ from pathlib import Path
18
+ from typing import IO, TYPE_CHECKING, Dict, Optional
19
+
20
+ from zenml.logger import get_logger
21
+ from zenml.utils import string_utils
22
+ from zenml.utils.archivable import Archivable
23
+
24
+ if TYPE_CHECKING:
25
+ from git.repo.base import Repo
26
+
27
+
28
+ logger = get_logger(__name__)
29
+
30
+
31
+ class CodeArchive(Archivable):
32
+ """Code archive class.
33
+
34
+ This class is used to archive user code before uploading it to the artifact
35
+ store. If the user code is stored in a Git repository, only files not
36
+ excluded by gitignores will be included in the archive.
37
+ """
38
+
39
+ def __init__(self, root: str) -> None:
40
+ """Initialize the object.
41
+
42
+ Args:
43
+ root: Root directory of the archive.
44
+ """
45
+ super().__init__()
46
+ self._root = root
47
+
48
+ @property
49
+ def git_repo(self) -> Optional["Repo"]:
50
+ """Git repository active at the code archive root.
51
+
52
+ Returns:
53
+ The git repository if available.
54
+ """
55
+ try:
56
+ # These imports fail when git is not installed on the machine
57
+ from git.exc import InvalidGitRepositoryError
58
+ from git.repo.base import Repo
59
+ except ImportError:
60
+ return None
61
+
62
+ try:
63
+ git_repo = Repo(path=self._root, search_parent_directories=True)
64
+ except InvalidGitRepositoryError:
65
+ return None
66
+
67
+ return git_repo
68
+
69
+ def _get_all_files(self) -> Dict[str, str]:
70
+ """Get all files inside the archive root.
71
+
72
+ Returns:
73
+ All files inside the archive root.
74
+ """
75
+ all_files = {}
76
+ for root, _, files in os.walk(self._root):
77
+ for file in files:
78
+ file_path = os.path.join(root, file)
79
+ path_in_archive = os.path.relpath(file_path, self._root)
80
+ all_files[path_in_archive] = file_path
81
+
82
+ return all_files
83
+
84
+ def get_files(self) -> Dict[str, str]:
85
+ """Gets all regular files that should be included in the archive.
86
+
87
+ Raises:
88
+ RuntimeError: If the code archive would not include any files.
89
+
90
+ Returns:
91
+ A dict {path_in_archive: path_on_filesystem} for all regular files
92
+ in the archive.
93
+ """
94
+ all_files = {}
95
+
96
+ if repo := self.git_repo:
97
+ try:
98
+ result = repo.git.ls_files(
99
+ "--cached",
100
+ "--others",
101
+ "--modified",
102
+ "--exclude-standard",
103
+ self._root,
104
+ )
105
+ except Exception as e:
106
+ logger.warning(
107
+ "Failed to get non-ignored files from git: %s", str(e)
108
+ )
109
+ all_files = self._get_all_files()
110
+ else:
111
+ for file in result.split():
112
+ file_path = os.path.join(repo.working_dir, file)
113
+ path_in_archive = os.path.relpath(file_path, self._root)
114
+
115
+ if os.path.exists(file_path):
116
+ all_files[path_in_archive] = file_path
117
+ else:
118
+ all_files = self._get_all_files()
119
+
120
+ if not all_files:
121
+ raise RuntimeError(
122
+ "The code archive to be uploaded does not contain any files. "
123
+ "This is probably because all files in your source root "
124
+ f"`{self._root}` are ignored by a .gitignore file."
125
+ )
126
+
127
+ # Explicitly remove .zen directories as we write an updated version
128
+ # to disk everytime ZenML is called. This updates the mtime of the
129
+ # file, which invalidates the code upload caching. The values in
130
+ # the .zen directory are not needed anyway as we set them as
131
+ # environment variables.
132
+ all_files = {
133
+ path_in_archive: file_path
134
+ for path_in_archive, file_path in sorted(all_files.items())
135
+ if ".zen" not in Path(path_in_archive).parts[:-1]
136
+ }
137
+
138
+ return all_files
139
+
140
+ def write_archive(
141
+ self, output_file: IO[bytes], use_gzip: bool = True
142
+ ) -> None:
143
+ """Writes an archive of the build context to the given file.
144
+
145
+ Args:
146
+ output_file: The file to write the archive to.
147
+ use_gzip: Whether to use `gzip` to compress the file.
148
+ """
149
+ super().write_archive(output_file=output_file, use_gzip=use_gzip)
150
+ archive_size = os.path.getsize(output_file.name)
151
+ if archive_size > 20 * 1024 * 1024:
152
+ logger.warning(
153
+ "Code archive size: `%s`. If you believe this is "
154
+ "unreasonably large, make sure to version your code in git and "
155
+ "ignore unnecessary files using a `.gitignore` file.",
156
+ string_utils.get_human_readable_filesize(archive_size),
157
+ )
@@ -73,6 +73,7 @@ from zenml.new.pipelines.run_utils import (
73
73
  create_placeholder_run,
74
74
  deploy_pipeline,
75
75
  prepare_model_versions,
76
+ upload_notebook_cell_code_if_necessary,
76
77
  )
77
78
  from zenml.stack import Stack
78
79
  from zenml.steps import BaseStep
@@ -82,6 +83,7 @@ from zenml.steps.entrypoint_function_utils import (
82
83
  from zenml.steps.step_invocation import StepInvocation
83
84
  from zenml.utils import (
84
85
  code_repository_utils,
86
+ code_utils,
85
87
  dashboard_utils,
86
88
  dict_utils,
87
89
  pydantic_utils,
@@ -579,7 +581,8 @@ To avoid this consider setting pipeline parameters only in one place (config or
579
581
  method.
580
582
  unlisted: Whether the pipeline run should be unlisted (not assigned
581
583
  to any pipeline).
582
- prevent_build_reuse: Whether to prevent the reuse of a build.
584
+ prevent_build_reuse: DEPRECATED: Use
585
+ `DockerSettings.prevent_build_reuse` instead.
583
586
 
584
587
  Returns:
585
588
  Model of the pipeline run if running without a schedule, `None` if
@@ -667,6 +670,9 @@ To avoid this consider setting pipeline parameters only in one place (config or
667
670
 
668
671
  stack = Client().active_stack
669
672
  stack.validate()
673
+ upload_notebook_cell_code_if_necessary(
674
+ deployment=deployment, stack=stack
675
+ )
670
676
 
671
677
  prepare_model_versions(deployment)
672
678
 
@@ -677,6 +683,13 @@ To avoid this consider setting pipeline parameters only in one place (config or
677
683
  deployment=deployment, local_repo_context=local_repo_context
678
684
  )
679
685
 
686
+ if prevent_build_reuse:
687
+ logger.warning(
688
+ "Passing `prevent_build_reuse=True` to "
689
+ "`pipeline.with_opitions(...)` is deprecated. Use "
690
+ "`DockerSettings.prevent_build_reuse` instead."
691
+ )
692
+
680
693
  build_model = build_utils.reuse_or_create_pipeline_build(
681
694
  deployment=deployment,
682
695
  pipeline_id=pipeline_id,
@@ -701,6 +714,18 @@ To avoid this consider setting pipeline parameters only in one place (config or
701
714
  code_repository=local_repo_context.code_repository_id,
702
715
  )
703
716
 
717
+ code_path = None
718
+ if build_utils.should_upload_code(
719
+ deployment=deployment,
720
+ build=build_model,
721
+ code_reference=code_reference,
722
+ ):
723
+ code_archive = code_utils.CodeArchive(
724
+ root=source_utils.get_source_root()
725
+ )
726
+ logger.info("Archiving pipeline code...")
727
+ code_path = code_utils.upload_code_if_necessary(code_archive)
728
+
704
729
  deployment_request = PipelineDeploymentRequest(
705
730
  user=Client().active_user.id,
706
731
  workspace=Client().active_workspace.id,
@@ -709,6 +734,7 @@ To avoid this consider setting pipeline parameters only in one place (config or
709
734
  build=build_id,
710
735
  schedule=schedule_id,
711
736
  code_reference=code_reference,
737
+ code_path=code_path,
712
738
  **deployment.model_dump(),
713
739
  )
714
740
  deployment_model = Client().zen_store.create_deployment(
@@ -1271,7 +1297,8 @@ To avoid this consider setting pipeline parameters only in one place (config or
1271
1297
  method.
1272
1298
  unlisted: Whether the pipeline run should be unlisted (not assigned
1273
1299
  to any pipeline).
1274
- prevent_build_reuse: Whether to prevent the reuse of a build.
1300
+ prevent_build_reuse: DEPRECATED: Use
1301
+ `DockerSettings.prevent_build_reuse` instead.
1275
1302
  **kwargs: Pipeline configuration options. These will be passed
1276
1303
  to the `pipeline.configure(...)` method.
1277
1304
 
@@ -1,5 +1,6 @@
1
1
  """Utility functions for running pipelines."""
2
2
 
3
+ import hashlib
3
4
  import time
4
5
  from collections import defaultdict
5
6
  from datetime import datetime
@@ -9,6 +10,7 @@ from uuid import UUID
9
10
  from zenml import constants
10
11
  from zenml.client import Client
11
12
  from zenml.config.pipeline_run_configuration import PipelineRunConfiguration
13
+ from zenml.config.source import SourceType
12
14
  from zenml.config.step_configurations import StepConfigurationUpdate
13
15
  from zenml.enums import ExecutionStatus, ModelStages
14
16
  from zenml.logger import get_logger
@@ -23,7 +25,7 @@ from zenml.models import (
23
25
  from zenml.new.pipelines.model_utils import NewModelRequest
24
26
  from zenml.orchestrators.utils import get_run_name
25
27
  from zenml.stack import Flavor, Stack
26
- from zenml.utils import cloud_utils
28
+ from zenml.utils import cloud_utils, code_utils, notebook_utils
27
29
  from zenml.zen_stores.base_zen_store import BaseZenStore
28
30
 
29
31
  if TYPE_CHECKING:
@@ -361,3 +363,67 @@ def validate_run_config_is_runnable_from_server(
361
363
  raise ValueError(
362
364
  "Can't set DockerSettings when running pipeline via Rest API."
363
365
  )
366
+
367
+
368
+ def upload_notebook_cell_code_if_necessary(
369
+ deployment: "PipelineDeploymentBase", stack: "Stack"
370
+ ) -> None:
371
+ """Upload notebook cell code if necessary.
372
+
373
+ This function checks if any of the steps of the pipeline that will be
374
+ executed in a different process are defined in a notebook. If that is the
375
+ case, it will extract that notebook cell code into python files and upload
376
+ an archive of all the necessary files to the artifact store.
377
+
378
+ Args:
379
+ deployment: The deployment.
380
+ stack: The stack on which the deployment will happen.
381
+
382
+ Raises:
383
+ RuntimeError: If the code for one of the steps that will run out of
384
+ process cannot be extracted into a python file.
385
+ """
386
+ code_archive = code_utils.CodeArchive(root=None)
387
+ should_upload = False
388
+ sources_that_require_upload = []
389
+
390
+ for step in deployment.step_configurations.values():
391
+ source = step.spec.source
392
+
393
+ if source.type == SourceType.NOTEBOOK:
394
+ if (
395
+ stack.orchestrator.flavor != "local"
396
+ or step.config.step_operator
397
+ ):
398
+ should_upload = True
399
+ cell_code = getattr(step.spec.source, "_cell_code", None)
400
+
401
+ # Code does not run in-process, which means we need to
402
+ # extract the step code into a python file
403
+ if not cell_code:
404
+ raise RuntimeError(
405
+ f"Unable to run step {step.config.name}. This step is "
406
+ "defined in a notebook and you're trying to run it "
407
+ "in a remote environment, but ZenML was not able to "
408
+ "detect the step code in the notebook. To fix "
409
+ "this error, define your step in a python file instead "
410
+ "of a notebook."
411
+ )
412
+
413
+ notebook_utils.warn_about_notebook_cell_magic_commands(
414
+ cell_code=cell_code
415
+ )
416
+
417
+ code_hash = hashlib.sha1(cell_code.encode()).hexdigest() # nosec
418
+ module_name = f"extracted_notebook_code_{code_hash}"
419
+ file_name = f"{module_name}.py"
420
+ code_archive.add_file(source=cell_code, destination=file_name)
421
+
422
+ setattr(step.spec.source, "replacement_module", module_name)
423
+ sources_that_require_upload.append(source)
424
+
425
+ if should_upload:
426
+ logger.info("Archiving notebook code...")
427
+ code_path = code_utils.upload_code_if_necessary(code_archive)
428
+ for source in sources_that_require_upload:
429
+ setattr(source, "code_path", code_path)
@@ -368,6 +368,20 @@ def get_resources_options_from_resource_model_for_full_stack(
368
368
  flavor_display_name="Skypilot (VM)",
369
369
  )
370
370
  )
371
+ orchestrators.append(
372
+ _prepare_resource_info(
373
+ connector_details=connector_details,
374
+ resource_ids=each.resource_ids,
375
+ stack_component_type=StackComponentType.ORCHESTRATOR,
376
+ flavor="azureml",
377
+ required_configuration={
378
+ "subscription_id": "subscription ID",
379
+ "resource_group": "resource group",
380
+ "workspace": "workspace",
381
+ },
382
+ flavor_display_name="AzureML",
383
+ )
384
+ )
371
385
 
372
386
  if each.resource_type == "kubernetes-cluster":
373
387
  orchestrators.append(