zenml-nightly 0.62.0.dev20240729__py3-none-any.whl → 0.63.0.dev20240731__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- README.md +1 -1
- RELEASE_NOTES.md +41 -0
- zenml/VERSION +1 -1
- zenml/actions/pipeline_run/pipeline_run_action.py +19 -17
- zenml/analytics/enums.py +4 -0
- zenml/cli/__init__.py +28 -15
- zenml/cli/base.py +1 -1
- zenml/cli/pipeline.py +54 -61
- zenml/cli/stack.py +6 -8
- zenml/client.py +232 -99
- zenml/config/compiler.py +14 -22
- zenml/config/pipeline_run_configuration.py +3 -0
- zenml/config/server_config.py +3 -0
- zenml/config/source.py +2 -1
- zenml/constants.py +2 -0
- zenml/enums.py +3 -0
- zenml/integrations/aws/orchestrators/sagemaker_orchestrator.py +13 -4
- zenml/integrations/databricks/flavors/databricks_orchestrator_flavor.py +11 -2
- zenml/integrations/databricks/orchestrators/databricks_orchestrator.py +19 -13
- zenml/models/__init__.py +26 -10
- zenml/models/v2/base/filter.py +32 -0
- zenml/models/v2/core/pipeline.py +73 -89
- zenml/models/v2/core/pipeline_build.py +15 -11
- zenml/models/v2/core/pipeline_deployment.py +56 -0
- zenml/models/v2/core/pipeline_run.py +52 -1
- zenml/models/v2/core/run_template.py +393 -0
- zenml/models/v2/misc/stack_deployment.py +5 -0
- zenml/new/pipelines/build_utils.py +34 -58
- zenml/new/pipelines/pipeline.py +17 -76
- zenml/new/pipelines/run_utils.py +12 -0
- zenml/post_execution/pipeline.py +1 -4
- zenml/service_connectors/service_connector_utils.py +4 -2
- zenml/stack_deployments/aws_stack_deployment.py +6 -5
- zenml/stack_deployments/azure_stack_deployment.py +118 -11
- zenml/stack_deployments/gcp_stack_deployment.py +12 -5
- zenml/stack_deployments/stack_deployment.py +6 -5
- zenml/steps/utils.py +0 -4
- zenml/utils/package_utils.py +39 -0
- zenml/zen_server/dashboard/assets/{404-B_YdvmwS.js → 404-CI13wQp4.js} +1 -1
- zenml/zen_server/dashboard/assets/{@reactflow-l_1hUr1S.js → @reactflow-DIYUhKYX.js} +1 -1
- zenml/zen_server/dashboard/assets/{@tanstack-DYiOyJUL.js → @tanstack-k96lU_C-.js} +4 -4
- zenml/zen_server/dashboard/assets/{AwarenessChannel-CFg5iX4Z.js → AwarenessChannel-BNg5uWgI.js} +1 -1
- zenml/zen_server/dashboard/assets/{CodeSnippet-Dvkx_82E.js → CodeSnippet-Cyp7f4dM.js} +2 -2
- zenml/zen_server/dashboard/assets/CollapsibleCard-Cu_A9W57.js +1 -0
- zenml/zen_server/dashboard/assets/{Commands-DoN1xrEq.js → Commands-DmQwTXjj.js} +1 -1
- zenml/zen_server/dashboard/assets/{CopyButton-Cr7xYEPb.js → CopyButton-B3sWVJ4Z.js} +1 -1
- zenml/zen_server/dashboard/assets/{CsvVizualization-Ck-nZ43m.js → CsvVizualization-BvqItd-O.js} +1 -1
- zenml/zen_server/dashboard/assets/{Error-kLtljEOM.js → Error-DbXCTGua.js} +1 -1
- zenml/zen_server/dashboard/assets/{ExecutionStatus-DguLLgTK.js → ExecutionStatus-9zM7eaLh.js} +1 -1
- zenml/zen_server/dashboard/assets/{Helpbox-BXUMP21n.js → Helpbox-BIiNc-uH.js} +1 -1
- zenml/zen_server/dashboard/assets/{Infobox-DSt0O-dm.js → Infobox-iv1Nu1A0.js} +1 -1
- zenml/zen_server/dashboard/assets/{InlineAvatar-xsrsIGE-.js → InlineAvatar-BvBtO2Dp.js} +1 -1
- zenml/zen_server/dashboard/assets/ProviderRadio-pSAvrGRS.js +1 -0
- zenml/zen_server/dashboard/assets/SearchField-CXoBknpt.js +1 -0
- zenml/zen_server/dashboard/assets/{SetPassword-BXGTWiwj.js → SetPassword-BOxpgh6N.js} +1 -1
- zenml/zen_server/dashboard/assets/{SuccessStep-DZC60t0x.js → SuccessStep-CTSKN2lp.js} +1 -1
- zenml/zen_server/dashboard/assets/Tick-Bnr2TpW6.js +1 -0
- zenml/zen_server/dashboard/assets/{UpdatePasswordSchemas-DGvwFWO1.js → UpdatePasswordSchemas-BeCeaRW5.js} +1 -1
- zenml/zen_server/dashboard/assets/chevron-down-D_ZlKMqH.js +1 -0
- zenml/zen_server/dashboard/assets/{cloud-only-C_yFCAkP.js → cloud-only-qelmY92E.js} +1 -1
- zenml/zen_server/dashboard/assets/components-DWe4cTjS.js +1 -0
- zenml/zen_server/dashboard/assets/dots-horizontal-BObFzD5l.js +1 -0
- zenml/zen_server/dashboard/assets/{index-BczVOqUf.js → index-KsTz2dHG.js} +5 -5
- zenml/zen_server/dashboard/assets/index-vfjX_fJV.css +1 -0
- zenml/zen_server/dashboard/assets/index.esm-CbHNSeVw.js +1 -0
- zenml/zen_server/dashboard/assets/{login-mutation-CrHrndTI.js → login-mutation-DRpbESS7.js} +1 -1
- zenml/zen_server/dashboard/assets/{not-found-DYa4pC-C.js → not-found-Dfx9hfkf.js} +1 -1
- zenml/zen_server/dashboard/assets/package-ClbU3KUi.js +1 -0
- zenml/zen_server/dashboard/assets/{page-uA5prJGY.js → page-399pVZHU.js} +1 -1
- zenml/zen_server/dashboard/assets/{page-1h_sD1jz.js → page-BoFtUD9H.js} +1 -1
- zenml/zen_server/dashboard/assets/{page-BDns21Iz.js → page-Btu39x7k.js} +1 -1
- zenml/zen_server/dashboard/assets/{page-BnaevhnB.js → page-BxiWdeyg.js} +1 -1
- zenml/zen_server/dashboard/assets/{page-1iL8aMqs.js → page-C176KxyB.js} +1 -1
- zenml/zen_server/dashboard/assets/page-C6tXXjnK.js +1 -0
- zenml/zen_server/dashboard/assets/{page-BkeAAYwp.js → page-CDgZmwxP.js} +1 -1
- zenml/zen_server/dashboard/assets/page-CP9obrnG.js +1 -0
- zenml/zen_server/dashboard/assets/{page-C6-UGEbH.js → page-CZe9GEBF.js} +1 -1
- zenml/zen_server/dashboard/assets/page-CaTOsNNw.js +1 -0
- zenml/zen_server/dashboard/assets/{page-CCNRIt_f.js → page-Cjn97HMv.js} +1 -1
- zenml/zen_server/dashboard/assets/page-CmXmB_5i.js +1 -0
- zenml/zen_server/dashboard/assets/page-CvGAOfad.js +1 -0
- zenml/zen_server/dashboard/assets/page-CzucfYPo.js +2 -0
- zenml/zen_server/dashboard/assets/{page-Bi-wtWiO.js → page-D0bbc-qr.js} +1 -1
- zenml/zen_server/dashboard/assets/page-DLEtD2ex.js +1 -0
- zenml/zen_server/dashboard/assets/{page-BhgCDInH.js → page-DVPxY5fT.js} +1 -1
- zenml/zen_server/dashboard/assets/{page-BkuQDIf-.js → page-DYBNGxJt.js} +1 -1
- zenml/zen_server/dashboard/assets/{page-8a4UMKXZ.js → page-DtpwnNXq.js} +1 -1
- zenml/zen_server/dashboard/assets/{page-B6h3iaHJ.js → page-DupV0aBd.js} +1 -1
- zenml/zen_server/dashboard/assets/page-EweAR81y.js +1 -0
- zenml/zen_server/dashboard/assets/{page-MFQyIJd3.js → page-f3jBVI5Z.js} +1 -1
- zenml/zen_server/dashboard/assets/{page-2grKx_MY.js → page-p2hLJdS2.js} +1 -1
- zenml/zen_server/dashboard/assets/page-w-YaL77M.js +9 -0
- zenml/zen_server/dashboard/assets/persist-BReKApOc.js +14 -0
- zenml/zen_server/dashboard/assets/plus-DOeLmm7C.js +1 -0
- zenml/zen_server/dashboard/assets/{stack-detail-query-Cficsl6d.js → stack-detail-query-Ck7j7BP_.js} +1 -1
- zenml/zen_server/dashboard/assets/{update-server-settings-mutation-7d8xi1tS.js → update-server-settings-mutation-f3ZT7psb.js} +1 -1
- zenml/zen_server/dashboard/assets/{url-D7mAQGUM.js → url-rGEp5Umh.js} +1 -1
- zenml/zen_server/dashboard/assets/{zod-BhoGpZ63.js → zod-BtSyGx4C.js} +1 -1
- zenml/zen_server/dashboard/index.html +5 -5
- zenml/zen_server/dashboard_legacy/asset-manifest.json +4 -4
- zenml/zen_server/dashboard_legacy/index.html +1 -1
- zenml/zen_server/dashboard_legacy/{precache-manifest.12246c7548e71e2c4438e496360de80c.js → precache-manifest.2fa6e528a6e7447caaf35dadfe7514bb.js} +4 -4
- zenml/zen_server/dashboard_legacy/service-worker.js +1 -1
- zenml/zen_server/dashboard_legacy/static/js/{main.3b27024b.chunk.js → main.4aab7e98.chunk.js} +2 -2
- zenml/zen_server/dashboard_legacy/static/js/{main.3b27024b.chunk.js.map → main.4aab7e98.chunk.js.map} +1 -1
- zenml/zen_server/deploy/helm/Chart.yaml +1 -1
- zenml/zen_server/deploy/helm/README.md +2 -2
- zenml/zen_server/rbac/models.py +1 -0
- zenml/zen_server/rbac/utils.py +4 -0
- zenml/zen_server/routers/pipeline_builds_endpoints.py +2 -66
- zenml/zen_server/routers/pipeline_deployments_endpoints.py +2 -53
- zenml/zen_server/routers/pipelines_endpoints.py +1 -74
- zenml/zen_server/routers/run_templates_endpoints.py +212 -0
- zenml/zen_server/routers/workspaces_endpoints.py +79 -0
- zenml/zen_server/{pipeline_deployment → template_execution}/runner_entrypoint_configuration.py +1 -8
- zenml/zen_server/{pipeline_deployment → template_execution}/utils.py +214 -92
- zenml/zen_server/utils.py +2 -2
- zenml/zen_server/zen_server_api.py +2 -1
- zenml/zen_stores/migrations/versions/0.63.0_release.py +23 -0
- zenml/zen_stores/migrations/versions/7d1919bb1ef0_add_run_templates.py +100 -0
- zenml/zen_stores/migrations/versions/b59aa68fdb1f_simplify_pipelines.py +139 -0
- zenml/zen_stores/rest_zen_store.py +107 -36
- zenml/zen_stores/schemas/__init__.py +2 -0
- zenml/zen_stores/schemas/pipeline_build_schemas.py +3 -3
- zenml/zen_stores/schemas/pipeline_deployment_schemas.py +29 -2
- zenml/zen_stores/schemas/pipeline_run_schemas.py +26 -3
- zenml/zen_stores/schemas/pipeline_schemas.py +29 -30
- zenml/zen_stores/schemas/run_template_schemas.py +264 -0
- zenml/zen_stores/schemas/step_run_schemas.py +11 -4
- zenml/zen_stores/sql_zen_store.py +364 -150
- zenml/zen_stores/template_utils.py +261 -0
- zenml/zen_stores/zen_store_interface.py +93 -20
- {zenml_nightly-0.62.0.dev20240729.dist-info → zenml_nightly-0.63.0.dev20240731.dist-info}/METADATA +2 -2
- {zenml_nightly-0.62.0.dev20240729.dist-info → zenml_nightly-0.63.0.dev20240731.dist-info}/RECORD +139 -129
- zenml/models/v2/core/pipeline_namespace.py +0 -113
- zenml/new/pipelines/deserialization_utils.py +0 -292
- zenml/zen_server/dashboard/assets/CollapsibleCard-opiuBHHc.js +0 -1
- zenml/zen_server/dashboard/assets/Pagination-C6X-mifw.js +0 -1
- zenml/zen_server/dashboard/assets/index-EpMIKgrI.css +0 -1
- zenml/zen_server/dashboard/assets/index-rK_Wuy2W.js +0 -1
- zenml/zen_server/dashboard/assets/index.esm-Corw4lXQ.js +0 -1
- zenml/zen_server/dashboard/assets/package-B3fWP-Dh.js +0 -1
- zenml/zen_server/dashboard/assets/page-5NCOHOsy.js +0 -1
- zenml/zen_server/dashboard/assets/page-Bq0YxkLV.js +0 -1
- zenml/zen_server/dashboard/assets/page-Bs2F4eoD.js +0 -2
- zenml/zen_server/dashboard/assets/page-CHNxpz3n.js +0 -1
- zenml/zen_server/dashboard/assets/page-DgorQFqi.js +0 -1
- zenml/zen_server/dashboard/assets/page-K8ebxVIs.js +0 -1
- zenml/zen_server/dashboard/assets/page-TgCF0P_U.js +0 -1
- zenml/zen_server/dashboard/assets/page-ZnCEe-eK.js +0 -9
- zenml/zen_server/dashboard/assets/persist-D7HJNBWx.js +0 -1
- zenml/zen_server/dashboard/assets/plus-C8WOyCzt.js +0 -1
- /zenml/zen_server/{pipeline_deployment → template_execution}/__init__.py +0 -0
- /zenml/zen_server/{pipeline_deployment → template_execution}/workload_manager_interface.py +0 -0
- {zenml_nightly-0.62.0.dev20240729.dist-info → zenml_nightly-0.63.0.dev20240731.dist-info}/LICENSE +0 -0
- {zenml_nightly-0.62.0.dev20240729.dist-info → zenml_nightly-0.63.0.dev20240731.dist-info}/WHEEL +0 -0
- {zenml_nightly-0.62.0.dev20240729.dist-info → zenml_nightly-0.63.0.dev20240731.dist-info}/entry_points.txt +0 -0
@@ -15,7 +15,6 @@
|
|
15
15
|
|
16
16
|
import hashlib
|
17
17
|
import platform
|
18
|
-
from pathlib import Path
|
19
18
|
from typing import (
|
20
19
|
TYPE_CHECKING,
|
21
20
|
Dict,
|
@@ -31,12 +30,11 @@ from zenml.code_repositories import BaseCodeRepository
|
|
31
30
|
from zenml.logger import get_logger
|
32
31
|
from zenml.models import (
|
33
32
|
BuildItem,
|
34
|
-
CodeReferenceRequest,
|
35
33
|
PipelineBuildBase,
|
36
34
|
PipelineBuildRequest,
|
37
35
|
PipelineBuildResponse,
|
38
36
|
PipelineDeploymentBase,
|
39
|
-
|
37
|
+
StackResponse,
|
40
38
|
)
|
41
39
|
from zenml.stack import Stack
|
42
40
|
from zenml.utils import (
|
@@ -53,53 +51,6 @@ if TYPE_CHECKING:
|
|
53
51
|
logger = get_logger(__name__)
|
54
52
|
|
55
53
|
|
56
|
-
def _create_deployment(
|
57
|
-
deployment: "PipelineDeploymentBase",
|
58
|
-
pipeline_id: Optional[UUID] = None,
|
59
|
-
code_repository: Optional["BaseCodeRepository"] = None,
|
60
|
-
) -> UUID:
|
61
|
-
"""Creates a deployment in the ZenStore.
|
62
|
-
|
63
|
-
Args:
|
64
|
-
deployment: Base of the deployment to create.
|
65
|
-
pipeline_id: Pipeline ID to use for the deployment.
|
66
|
-
code_repository: Code repository to use for the deployment.
|
67
|
-
|
68
|
-
Returns:
|
69
|
-
The ID of the deployment.
|
70
|
-
"""
|
71
|
-
source_root = source_utils.get_source_root()
|
72
|
-
|
73
|
-
code_reference = None
|
74
|
-
local_repo_context = (
|
75
|
-
code_repository.get_local_context(source_root)
|
76
|
-
if code_repository
|
77
|
-
else None
|
78
|
-
)
|
79
|
-
if local_repo_context and not local_repo_context.is_dirty:
|
80
|
-
subdirectory = (
|
81
|
-
Path(source_root).resolve().relative_to(local_repo_context.root)
|
82
|
-
)
|
83
|
-
|
84
|
-
code_reference = CodeReferenceRequest(
|
85
|
-
commit=local_repo_context.current_commit,
|
86
|
-
subdirectory=subdirectory.as_posix(),
|
87
|
-
code_repository=local_repo_context.code_repository_id,
|
88
|
-
)
|
89
|
-
|
90
|
-
deployment_request = PipelineDeploymentRequest(
|
91
|
-
user=Client().active_user.id,
|
92
|
-
workspace=Client().active_workspace.id,
|
93
|
-
stack=Client().active_stack.id,
|
94
|
-
pipeline=pipeline_id,
|
95
|
-
code_reference=code_reference,
|
96
|
-
**deployment.model_dump(),
|
97
|
-
)
|
98
|
-
return (
|
99
|
-
Client().zen_store.create_deployment(deployment=deployment_request).id
|
100
|
-
)
|
101
|
-
|
102
|
-
|
103
54
|
def build_required(deployment: "PipelineDeploymentBase") -> bool:
|
104
55
|
"""Checks whether a build is required for the deployment and active stack.
|
105
56
|
|
@@ -269,6 +220,7 @@ def create_pipeline_build(
|
|
269
220
|
settings were specified.
|
270
221
|
"""
|
271
222
|
client = Client()
|
223
|
+
stack_model = Client().active_stack_model
|
272
224
|
stack = client.active_stack
|
273
225
|
required_builds = stack.get_docker_builds(deployment=deployment)
|
274
226
|
|
@@ -362,16 +314,11 @@ def create_pipeline_build(
|
|
362
314
|
build_checksum = compute_build_checksum(
|
363
315
|
required_builds, stack=stack, code_repository=code_repository
|
364
316
|
)
|
365
|
-
|
366
|
-
deployment=deployment,
|
367
|
-
pipeline_id=pipeline_id,
|
368
|
-
code_repository=code_repository,
|
369
|
-
)
|
370
|
-
|
317
|
+
stack_checksum = compute_stack_checksum(stack=stack_model)
|
371
318
|
build_request = PipelineBuildRequest(
|
372
319
|
user=client.active_user.id,
|
373
320
|
workspace=client.active_workspace.id,
|
374
|
-
stack=
|
321
|
+
stack=stack_model.id,
|
375
322
|
pipeline=pipeline_id,
|
376
323
|
is_local=is_local,
|
377
324
|
contains_code=contains_code,
|
@@ -379,7 +326,7 @@ def create_pipeline_build(
|
|
379
326
|
zenml_version=zenml.__version__,
|
380
327
|
python_version=platform.python_version(),
|
381
328
|
checksum=build_checksum,
|
382
|
-
|
329
|
+
stack_checksum=stack_checksum,
|
383
330
|
)
|
384
331
|
return client.zen_store.create_build(build_request)
|
385
332
|
|
@@ -585,3 +532,32 @@ def verify_custom_build(
|
|
585
532
|
"your local machine or the image tags have been "
|
586
533
|
"overwritten since the original build happened."
|
587
534
|
)
|
535
|
+
|
536
|
+
|
537
|
+
def compute_stack_checksum(stack: StackResponse) -> str:
|
538
|
+
"""Compute a stack checksum.
|
539
|
+
|
540
|
+
Args:
|
541
|
+
stack: The stack for which to compute the checksum.
|
542
|
+
|
543
|
+
Returns:
|
544
|
+
The checksum.
|
545
|
+
"""
|
546
|
+
hash_ = hashlib.md5() # nosec
|
547
|
+
|
548
|
+
# This checksum is used to see if the stack has been updated since a build
|
549
|
+
# was created for it. We create this checksum not with specific requirements
|
550
|
+
# as these might change with new ZenML releases, but they don't actually
|
551
|
+
# invalidate those Docker images.
|
552
|
+
required_integrations = sorted(
|
553
|
+
{
|
554
|
+
component.integration
|
555
|
+
for components in stack.components.values()
|
556
|
+
for component in components
|
557
|
+
if component.integration and component.integration != "built-in"
|
558
|
+
}
|
559
|
+
)
|
560
|
+
for integration in required_integrations:
|
561
|
+
hash_.update(integration.encode())
|
562
|
+
|
563
|
+
return hash_.hexdigest()
|
zenml/new/pipelines/pipeline.py
CHANGED
@@ -241,20 +241,6 @@ class Pipeline:
|
|
241
241
|
"""
|
242
242
|
return inspect.getsource(self.source_object)
|
243
243
|
|
244
|
-
@classmethod
|
245
|
-
def from_model(cls, model: "PipelineResponse") -> "Pipeline":
|
246
|
-
"""Creates a pipeline instance from a model.
|
247
|
-
|
248
|
-
Args:
|
249
|
-
model: The model to load the pipeline instance from.
|
250
|
-
|
251
|
-
Returns:
|
252
|
-
The pipeline instance.
|
253
|
-
"""
|
254
|
-
from zenml.new.pipelines.deserialization_utils import load_pipeline
|
255
|
-
|
256
|
-
return load_pipeline(model=model)
|
257
|
-
|
258
244
|
@property
|
259
245
|
def model(self) -> "PipelineResponse":
|
260
246
|
"""Gets the registered pipeline model for this instance.
|
@@ -267,14 +253,7 @@ class Pipeline:
|
|
267
253
|
"""
|
268
254
|
self._prepare_if_possible()
|
269
255
|
|
270
|
-
|
271
|
-
version_hash = self._compute_unique_identifier(
|
272
|
-
pipeline_spec=pipeline_spec
|
273
|
-
)
|
274
|
-
|
275
|
-
pipelines = Client().list_pipelines(
|
276
|
-
name=self.name, version_hash=version_hash
|
277
|
-
)
|
256
|
+
pipelines = Client().list_pipelines(name=self.name)
|
278
257
|
if len(pipelines) == 1:
|
279
258
|
return pipelines.items[0]
|
280
259
|
|
@@ -513,8 +492,7 @@ To avoid this consider setting pipeline parameters only in one place (config or
|
|
513
492
|
"pipeline build/run` commands."
|
514
493
|
)
|
515
494
|
|
516
|
-
|
517
|
-
return self._register(pipeline_spec=pipeline_spec)
|
495
|
+
return self._register()
|
518
496
|
|
519
497
|
def build(
|
520
498
|
self,
|
@@ -541,12 +519,12 @@ To avoid this consider setting pipeline parameters only in one place (config or
|
|
541
519
|
"""
|
542
520
|
with track_handler(event=AnalyticsEvent.BUILD_PIPELINE):
|
543
521
|
self._prepare_if_possible()
|
544
|
-
deployment,
|
522
|
+
deployment, _, _ = self._compile(
|
545
523
|
config_path=config_path,
|
546
524
|
steps=step_configurations,
|
547
525
|
settings=settings,
|
548
526
|
)
|
549
|
-
pipeline_id = self._register(
|
527
|
+
pipeline_id = self._register().id
|
550
528
|
|
551
529
|
local_repo = code_repository_utils.find_active_code_repository()
|
552
530
|
code_repository = build_utils.verify_local_repository_context(
|
@@ -623,7 +601,7 @@ To avoid this consider setting pipeline parameters only in one place (config or
|
|
623
601
|
logger.info(f"Initiating a new run for the pipeline: `{self.name}`.")
|
624
602
|
|
625
603
|
with track_handler(AnalyticsEvent.RUN_PIPELINE) as analytics_handler:
|
626
|
-
deployment,
|
604
|
+
deployment, schedule, build = self._compile(
|
627
605
|
config_path=config_path,
|
628
606
|
run_name=run_name,
|
629
607
|
enable_cache=enable_cache,
|
@@ -646,7 +624,7 @@ To avoid this consider setting pipeline parameters only in one place (config or
|
|
646
624
|
|
647
625
|
pipeline_id = None
|
648
626
|
if register_pipeline:
|
649
|
-
pipeline_id = self._register(
|
627
|
+
pipeline_id = self._register().id
|
650
628
|
|
651
629
|
else:
|
652
630
|
logger.debug(f"Pipeline {self.name} is unlisted.")
|
@@ -987,7 +965,6 @@ To avoid this consider setting pipeline parameters only in one place (config or
|
|
987
965
|
self, config_path: Optional[str] = None, **run_configuration_args: Any
|
988
966
|
) -> Tuple[
|
989
967
|
"PipelineDeploymentBase",
|
990
|
-
"PipelineSpec",
|
991
968
|
Optional["Schedule"],
|
992
969
|
Union["PipelineBuildBase", UUID, None],
|
993
970
|
]:
|
@@ -998,7 +975,7 @@ To avoid this consider setting pipeline parameters only in one place (config or
|
|
998
975
|
**run_configuration_args: Configurations for the pipeline run.
|
999
976
|
|
1000
977
|
Returns:
|
1001
|
-
A tuple containing the deployment,
|
978
|
+
A tuple containing the deployment, schedule and build of
|
1002
979
|
the compiled pipeline.
|
1003
980
|
"""
|
1004
981
|
# Activating the built-in integrations to load all materializers
|
@@ -1019,61 +996,41 @@ To avoid this consider setting pipeline parameters only in one place (config or
|
|
1019
996
|
# Update with the values in code so they take precedence
|
1020
997
|
run_config = pydantic_utils.update_model(run_config, update=update)
|
1021
998
|
|
1022
|
-
deployment
|
999
|
+
deployment = Compiler().compile(
|
1023
1000
|
pipeline=self,
|
1024
1001
|
stack=Client().active_stack,
|
1025
1002
|
run_configuration=run_config,
|
1026
1003
|
)
|
1027
1004
|
|
1028
|
-
return deployment,
|
1005
|
+
return deployment, run_config.schedule, run_config.build
|
1029
1006
|
|
1030
|
-
def _register(self
|
1007
|
+
def _register(self) -> "PipelineResponse":
|
1031
1008
|
"""Register the pipeline in the server.
|
1032
1009
|
|
1033
|
-
Args:
|
1034
|
-
pipeline_spec: The pipeline spec to register.
|
1035
|
-
|
1036
1010
|
Returns:
|
1037
1011
|
The registered pipeline model.
|
1038
1012
|
"""
|
1013
|
+
client = Client()
|
1039
1014
|
|
1040
|
-
def _get(
|
1041
|
-
client = Client()
|
1042
|
-
|
1015
|
+
def _get() -> PipelineResponse:
|
1043
1016
|
matching_pipelines = client.list_pipelines(
|
1044
1017
|
name=self.name,
|
1045
|
-
version_hash=version_hash,
|
1046
1018
|
size=1,
|
1047
1019
|
sort_by="desc:created",
|
1048
1020
|
)
|
1021
|
+
|
1049
1022
|
if matching_pipelines.total:
|
1050
1023
|
registered_pipeline = matching_pipelines.items[0]
|
1051
|
-
logger.info(
|
1052
|
-
"Reusing registered pipeline version: `(version: %s)`.",
|
1053
|
-
registered_pipeline.version,
|
1054
|
-
)
|
1055
1024
|
return registered_pipeline
|
1056
1025
|
raise RuntimeError("No matching pipelines found.")
|
1057
1026
|
|
1058
|
-
version_hash = self._compute_unique_identifier(
|
1059
|
-
pipeline_spec=pipeline_spec
|
1060
|
-
)
|
1061
|
-
|
1062
|
-
client = Client()
|
1063
1027
|
try:
|
1064
|
-
return _get(
|
1028
|
+
return _get()
|
1065
1029
|
except RuntimeError:
|
1066
|
-
latest_version = self._get_latest_version() or 0
|
1067
|
-
version = str(latest_version + 1)
|
1068
|
-
|
1069
1030
|
request = PipelineRequest(
|
1070
1031
|
workspace=client.active_workspace.id,
|
1071
1032
|
user=client.active_user.id,
|
1072
1033
|
name=self.name,
|
1073
|
-
version=version,
|
1074
|
-
version_hash=version_hash,
|
1075
|
-
spec=pipeline_spec,
|
1076
|
-
docstring=self.__doc__,
|
1077
1034
|
)
|
1078
1035
|
|
1079
1036
|
try:
|
@@ -1081,12 +1038,12 @@ To avoid this consider setting pipeline parameters only in one place (config or
|
|
1081
1038
|
pipeline=request
|
1082
1039
|
)
|
1083
1040
|
logger.info(
|
1084
|
-
"Registered new
|
1085
|
-
registered_pipeline.
|
1041
|
+
"Registered new pipeline: `%s`.",
|
1042
|
+
registered_pipeline.name,
|
1086
1043
|
)
|
1087
1044
|
return registered_pipeline
|
1088
1045
|
except EntityExistsError:
|
1089
|
-
return _get(
|
1046
|
+
return _get()
|
1090
1047
|
|
1091
1048
|
def _compute_unique_identifier(self, pipeline_spec: PipelineSpec) -> str:
|
1092
1049
|
"""Computes a unique identifier from the pipeline spec and steps.
|
@@ -1113,22 +1070,6 @@ To avoid this consider setting pipeline parameters only in one place (config or
|
|
1113
1070
|
|
1114
1071
|
return hash_.hexdigest()
|
1115
1072
|
|
1116
|
-
def _get_latest_version(self) -> Optional[int]:
|
1117
|
-
"""Gets the latest version of this pipeline.
|
1118
|
-
|
1119
|
-
Returns:
|
1120
|
-
The latest version or `None` if no version exists.
|
1121
|
-
"""
|
1122
|
-
all_pipelines = Client().list_pipelines(
|
1123
|
-
name=self.name, sort_by="desc:created", size=1
|
1124
|
-
)
|
1125
|
-
if not all_pipelines.total:
|
1126
|
-
return None
|
1127
|
-
pipeline = all_pipelines.items[0]
|
1128
|
-
if pipeline.version == "UNVERSIONED":
|
1129
|
-
return None
|
1130
|
-
return int(all_pipelines.items[0].version)
|
1131
|
-
|
1132
1073
|
def add_step_invocation(
|
1133
1074
|
self,
|
1134
1075
|
step: "BaseStep",
|
zenml/new/pipelines/run_utils.py
CHANGED
@@ -36,6 +36,18 @@ if TYPE_CHECKING:
|
|
36
36
|
logger = get_logger(__name__)
|
37
37
|
|
38
38
|
|
39
|
+
def get_default_run_name(pipeline_name: str) -> str:
|
40
|
+
"""Gets the default name for a pipeline run.
|
41
|
+
|
42
|
+
Args:
|
43
|
+
pipeline_name: Name of the pipeline which will be run.
|
44
|
+
|
45
|
+
Returns:
|
46
|
+
Run name.
|
47
|
+
"""
|
48
|
+
return f"{pipeline_name}-{{date}}-{{time}}"
|
49
|
+
|
50
|
+
|
39
51
|
def create_placeholder_run(
|
40
52
|
deployment: "PipelineDeploymentResponse",
|
41
53
|
) -> Optional["PipelineRunResponse"]:
|
zenml/post_execution/pipeline.py
CHANGED
@@ -38,14 +38,11 @@ def get_pipelines() -> List["PipelineResponse"]:
|
|
38
38
|
|
39
39
|
def get_pipeline(
|
40
40
|
pipeline: str,
|
41
|
-
version: Optional[str] = None,
|
42
41
|
) -> Optional["PipelineResponse"]:
|
43
42
|
"""(Deprecated) Fetches a pipeline model.
|
44
43
|
|
45
44
|
Args:
|
46
45
|
pipeline: The name of the pipeline.
|
47
|
-
version: Optional pipeline version. Specifies the version of the
|
48
|
-
pipeline to return. If not given, returns the latest version.
|
49
46
|
|
50
47
|
Returns:
|
51
48
|
The pipeline model.
|
@@ -55,4 +52,4 @@ def get_pipeline(
|
|
55
52
|
"removed in a future release. Please use "
|
56
53
|
"`zenml.client.Client().get_pipeline()` instead."
|
57
54
|
)
|
58
|
-
return Client().get_pipeline(name_id_or_prefix=pipeline
|
55
|
+
return Client().get_pipeline(name_id_or_prefix=pipeline)
|
@@ -290,7 +290,8 @@ def get_resources_options_from_resource_model_for_full_stack(
|
|
290
290
|
resource_ids=each.resource_ids,
|
291
291
|
stack_component_type=StackComponentType.ARTIFACT_STORE,
|
292
292
|
flavor="gcp",
|
293
|
-
required_configuration={},
|
293
|
+
required_configuration={"path": "Path"},
|
294
|
+
use_resource_value_as_fixed_config=True,
|
294
295
|
flavor_display_name="GCS Bucket",
|
295
296
|
)
|
296
297
|
)
|
@@ -350,7 +351,8 @@ def get_resources_options_from_resource_model_for_full_stack(
|
|
350
351
|
resource_ids=each.resource_ids,
|
351
352
|
stack_component_type=StackComponentType.ARTIFACT_STORE,
|
352
353
|
flavor="azure",
|
353
|
-
required_configuration={},
|
354
|
+
required_configuration={"path": "Path"},
|
355
|
+
use_resource_value_as_fixed_config=True,
|
354
356
|
flavor_display_name="Blob container",
|
355
357
|
)
|
356
358
|
)
|
@@ -218,15 +218,16 @@ console.
|
|
218
218
|
URL query parameters as possible.
|
219
219
|
* a textual description of the URL
|
220
220
|
* some deployment providers may require additional configuration
|
221
|
-
parameters to be passed to the cloud provider in addition to
|
222
|
-
deployment URL query parameters. Where that is the case, this method
|
221
|
+
parameters or scripts to be passed to the cloud provider in addition to
|
222
|
+
the deployment URL query parameters. Where that is the case, this method
|
223
223
|
should also return a string that the user can copy and paste into the
|
224
224
|
cloud provider console to deploy the ZenML stack (e.g. a set of
|
225
|
-
environment variables,
|
225
|
+
environment variables, YAML configuration snippet, bash or Terraform
|
226
|
+
script etc.).
|
226
227
|
|
227
228
|
Returns:
|
228
|
-
The configuration to deploy the ZenML stack to the
|
229
|
-
provider.
|
229
|
+
The configuration or script to deploy the ZenML stack to the
|
230
|
+
specified cloud provider.
|
230
231
|
"""
|
231
232
|
params = dict(
|
232
233
|
stackName=self.stack_name,
|
@@ -17,15 +17,15 @@ import re
|
|
17
17
|
from typing import ClassVar, Dict, List
|
18
18
|
|
19
19
|
from zenml.enums import StackDeploymentProvider
|
20
|
+
from zenml.models import StackDeploymentConfig
|
20
21
|
from zenml.stack_deployments.stack_deployment import ZenMLCloudStackDeployment
|
21
22
|
|
22
23
|
|
23
|
-
# TODO: this class just implements the regions list, and is not suitable for other
|
24
|
-
# deployment tasks.
|
25
24
|
class AZUREZenMLCloudStackDeployment(ZenMLCloudStackDeployment):
|
26
25
|
"""Azure ZenML Cloud Stack Deployment."""
|
27
26
|
|
28
27
|
provider: ClassVar[StackDeploymentProvider] = StackDeploymentProvider.AZURE
|
28
|
+
deployment: ClassVar[str] = "azure-cloud-shell"
|
29
29
|
|
30
30
|
@classmethod
|
31
31
|
def description(cls) -> str:
|
@@ -37,8 +37,11 @@ class AZUREZenMLCloudStackDeployment(ZenMLCloudStackDeployment):
|
|
37
37
|
Returns:
|
38
38
|
A MarkDown description of the ZenML Cloud Stack Deployment.
|
39
39
|
"""
|
40
|
-
|
41
|
-
|
40
|
+
return """
|
41
|
+
Provision and register a basic Azure ZenML stack authenticated and connected to
|
42
|
+
all the necessary cloud infrastructure resources required to run pipelines in
|
43
|
+
Azure.
|
44
|
+
"""
|
42
45
|
|
43
46
|
@classmethod
|
44
47
|
def instructions(cls) -> str:
|
@@ -51,8 +54,44 @@ class AZUREZenMLCloudStackDeployment(ZenMLCloudStackDeployment):
|
|
51
54
|
MarkDown instructions on how to deploy the ZenML stack to the
|
52
55
|
specified cloud provider.
|
53
56
|
"""
|
54
|
-
|
55
|
-
|
57
|
+
return """
|
58
|
+
You will be redirected to an Azure Cloud Shell console in your browser where
|
59
|
+
you'll be asked to log into your Azure project and then use
|
60
|
+
[the Azure ZenML Stack Terraform module](https://registry.terraform.io/modules/zenml-io/zenml-stack/azure)
|
61
|
+
to provision the necessary cloud resources for ZenML.
|
62
|
+
|
63
|
+
**NOTE**: The Azure ZenML Stack Terraform module will create the following new
|
64
|
+
resources in your Azure subscription. Please ensure you have the necessary
|
65
|
+
permissions and are aware of any potential costs:
|
66
|
+
|
67
|
+
- An Azure Resource Group to contain all the resources required for the ZenML stack
|
68
|
+
- An Azure Storage Account and Blob Storage Container registered as a [ZenML artifact store](https://docs.zenml.io/stack-components/artifact-stores/azure).
|
69
|
+
- An Azure Container Registry registered as a [ZenML container registry](https://docs.zenml.io/stack-components/container-registries/azure).
|
70
|
+
- SkyPilot will be registered as a [ZenML orchestrator](https://docs.zenml.io/stack-components/orchestrators/skypilot-vm) and used to run pipelines in your Azure subscription.
|
71
|
+
- An Azure Service Principal with the minimum necessary permissions to access
|
72
|
+
the above resources.
|
73
|
+
- An Azure Service Principal client secret used to give access to ZenML to
|
74
|
+
connect to the above resources through a [ZenML service connector](https://docs.zenml.io/how-to/auth-management/azure-service-connector).
|
75
|
+
|
76
|
+
The Azure ZenML Stack Terraform module will automatically create an Azure
|
77
|
+
Service Principal client secret and will share it with ZenML to give it
|
78
|
+
permission to access the resources created by the stack. You can revoke these
|
79
|
+
permissions at any time by deleting the Service Principal in your Azure
|
80
|
+
subscription.
|
81
|
+
|
82
|
+
**Estimated costs**
|
83
|
+
|
84
|
+
A small training job would cost around: $0.60
|
85
|
+
|
86
|
+
These are rough estimates and actual costs may vary based on your usage and specific Azure pricing.
|
87
|
+
Some services may be eligible for the Azure Free Tier. Use [the Azure Pricing Calculator](https://azure.microsoft.com/en-us/pricing/calculator)
|
88
|
+
for a detailed estimate based on your usage.
|
89
|
+
|
90
|
+
|
91
|
+
💡 **After the Terraform deployment is complete, you can close the Cloud
|
92
|
+
Shell session and return to the CLI to view details about the associated ZenML
|
93
|
+
stack automatically registered with ZenML.**
|
94
|
+
"""
|
56
95
|
|
57
96
|
@classmethod
|
58
97
|
def post_deploy_instructions(cls) -> str:
|
@@ -64,8 +103,11 @@ class AZUREZenMLCloudStackDeployment(ZenMLCloudStackDeployment):
|
|
64
103
|
MarkDown instructions on what to do after the deployment is
|
65
104
|
complete.
|
66
105
|
"""
|
67
|
-
|
68
|
-
|
106
|
+
return """
|
107
|
+
The ZenML stack has been successfully deployed and registered. You can delete
|
108
|
+
the provisioned Service Principal and Resource Group at any time to revoke
|
109
|
+
ZenML's access to your Azure subscription.
|
110
|
+
"""
|
69
111
|
|
70
112
|
@classmethod
|
71
113
|
def integrations(cls) -> List[str]:
|
@@ -75,7 +117,7 @@ class AZUREZenMLCloudStackDeployment(ZenMLCloudStackDeployment):
|
|
75
117
|
The list of ZenML integrations that need to be installed for the
|
76
118
|
stack to be usable.
|
77
119
|
"""
|
78
|
-
return ["azure"]
|
120
|
+
return ["azure", "skypilot_azure"]
|
79
121
|
|
80
122
|
@classmethod
|
81
123
|
def permissions(cls) -> Dict[str, List[str]]:
|
@@ -85,8 +127,19 @@ class AZUREZenMLCloudStackDeployment(ZenMLCloudStackDeployment):
|
|
85
127
|
The permissions granted to ZenML to access the cloud resources, as
|
86
128
|
a dictionary grouping permissions by resource.
|
87
129
|
"""
|
88
|
-
|
89
|
-
|
130
|
+
return {
|
131
|
+
"Storage Account": [
|
132
|
+
"Storage Blob Data Contributor",
|
133
|
+
],
|
134
|
+
"Container Registry": [
|
135
|
+
"AcrPull",
|
136
|
+
"AcrPush",
|
137
|
+
"Contributor",
|
138
|
+
],
|
139
|
+
"Subscription": [
|
140
|
+
"Owner (required by SkyPilot)",
|
141
|
+
],
|
142
|
+
}
|
90
143
|
|
91
144
|
@classmethod
|
92
145
|
def locations(cls) -> Dict[str, str]:
|
@@ -177,3 +230,57 @@ class AZUREZenMLCloudStackDeployment(ZenMLCloudStackDeployment):
|
|
177
230
|
for k, v in cls.locations().items()
|
178
231
|
if "(US)" in k and matcher.match(v)
|
179
232
|
}
|
233
|
+
|
234
|
+
def get_deployment_config(
|
235
|
+
self,
|
236
|
+
) -> StackDeploymentConfig:
|
237
|
+
"""Return the configuration to deploy the ZenML stack to the specified cloud provider.
|
238
|
+
|
239
|
+
The configuration should include:
|
240
|
+
|
241
|
+
* a cloud provider console URL where the user will be redirected to
|
242
|
+
deploy the ZenML stack. The URL should include as many pre-filled
|
243
|
+
URL query parameters as possible.
|
244
|
+
* a textual description of the URL
|
245
|
+
* some deployment providers may require additional configuration
|
246
|
+
parameters or scripts to be passed to the cloud provider in addition to
|
247
|
+
the deployment URL query parameters. Where that is the case, this method
|
248
|
+
should also return a string that the user can copy and paste into the
|
249
|
+
cloud provider console to deploy the ZenML stack (e.g. a set of
|
250
|
+
environment variables, YAML configuration snippet, bash or Terraform
|
251
|
+
script etc.).
|
252
|
+
|
253
|
+
Returns:
|
254
|
+
The configuration or script to deploy the ZenML stack to the
|
255
|
+
specified cloud provider.
|
256
|
+
"""
|
257
|
+
config = f"""module "zenml_stack" {{
|
258
|
+
source = "zenml-io/zenml-stack/azure"
|
259
|
+
|
260
|
+
location = "{self.location or "eastus"}"
|
261
|
+
zenml_server_url = "{self.zenml_server_url}"
|
262
|
+
zenml_api_key = ""
|
263
|
+
zenml_api_token = "{self.zenml_server_api_token}"
|
264
|
+
zenml_stack_name = "{self.stack_name}"
|
265
|
+
zenml_stack_deployment = "{self.deployment}"
|
266
|
+
}}
|
267
|
+
output "zenml_stack_id" {{
|
268
|
+
value = module.zenml_stack.zenml_stack_id
|
269
|
+
}}
|
270
|
+
output "zenml_stack_name" {{
|
271
|
+
value = module.zenml_stack.zenml_stack_name
|
272
|
+
}}"""
|
273
|
+
instructions = """
|
274
|
+
1. The Azure Cloud Shell console will open in your browser.
|
275
|
+
2. Create a file named `main.tf` in the Cloud Shell and copy and paste the
|
276
|
+
Terraform configuration below into it.
|
277
|
+
3. Run `terraform init --upgrade` to initialize the Terraform configuration.
|
278
|
+
4. Run `terraform apply` to deploy the ZenML stack to Azure.
|
279
|
+
"""
|
280
|
+
|
281
|
+
return StackDeploymentConfig(
|
282
|
+
deployment_url="https://shell.azure.com",
|
283
|
+
deployment_url_text="Azure Cloud Shell Console",
|
284
|
+
configuration=config,
|
285
|
+
instructions=instructions,
|
286
|
+
)
|
@@ -231,15 +231,16 @@ GCP project and to clean up the resources created by the stack by using
|
|
231
231
|
URL query parameters as possible.
|
232
232
|
* a textual description of the URL
|
233
233
|
* some deployment providers may require additional configuration
|
234
|
-
parameters to be passed to the cloud provider in addition to
|
235
|
-
deployment URL query parameters. Where that is the case, this method
|
234
|
+
parameters or scripts to be passed to the cloud provider in addition to
|
235
|
+
the deployment URL query parameters. Where that is the case, this method
|
236
236
|
should also return a string that the user can copy and paste into the
|
237
237
|
cloud provider console to deploy the ZenML stack (e.g. a set of
|
238
|
-
environment variables,
|
238
|
+
environment variables, YAML configuration snippet, bash or Terraform
|
239
|
+
script etc.).
|
239
240
|
|
240
241
|
Returns:
|
241
|
-
The configuration to deploy the ZenML stack to the
|
242
|
-
provider.
|
242
|
+
The configuration or script to deploy the ZenML stack to the
|
243
|
+
specified cloud provider.
|
243
244
|
"""
|
244
245
|
params = dict(
|
245
246
|
cloudshell_git_repo="https://github.com/zenml-io/zenml",
|
@@ -262,8 +263,14 @@ ZENML_SERVER_URL={self.zenml_server_url}
|
|
262
263
|
ZENML_SERVER_API_TOKEN={self.zenml_server_api_token}
|
263
264
|
### END CONFIGURATION ###"""
|
264
265
|
|
266
|
+
instructions = (
|
267
|
+
"You will be asked to provide the following configuration values "
|
268
|
+
"during the deployment process:"
|
269
|
+
)
|
270
|
+
|
265
271
|
return StackDeploymentConfig(
|
266
272
|
deployment_url=url,
|
267
273
|
deployment_url_text="GCP Cloud Shell Console",
|
268
274
|
configuration=config,
|
275
|
+
instructions=instructions,
|
269
276
|
)
|
@@ -145,15 +145,16 @@ class ZenMLCloudStackDeployment(BaseModel):
|
|
145
145
|
URL query parameters as possible.
|
146
146
|
* a textual description of the URL
|
147
147
|
* some deployment providers may require additional configuration
|
148
|
-
parameters to be passed to the cloud provider in addition to
|
149
|
-
deployment URL query parameters. Where that is the case, this method
|
148
|
+
parameters or scripts to be passed to the cloud provider in addition to
|
149
|
+
the deployment URL query parameters. Where that is the case, this method
|
150
150
|
should also return a string that the user can copy and paste into the
|
151
151
|
cloud provider console to deploy the ZenML stack (e.g. a set of
|
152
|
-
environment variables,
|
152
|
+
environment variables, YAML configuration snippet, bash or Terraform
|
153
|
+
script etc.).
|
153
154
|
|
154
155
|
Returns:
|
155
|
-
The configuration to deploy the ZenML stack to the
|
156
|
-
provider.
|
156
|
+
The configuration or script to deploy the ZenML stack to the
|
157
|
+
specified cloud provider.
|
157
158
|
"""
|
158
159
|
|
159
160
|
def get_stack(
|