zenml-nightly 0.62.0.dev20240729__py3-none-any.whl → 0.63.0.dev20240731__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- README.md +1 -1
- RELEASE_NOTES.md +41 -0
- zenml/VERSION +1 -1
- zenml/actions/pipeline_run/pipeline_run_action.py +19 -17
- zenml/analytics/enums.py +4 -0
- zenml/cli/__init__.py +28 -15
- zenml/cli/base.py +1 -1
- zenml/cli/pipeline.py +54 -61
- zenml/cli/stack.py +6 -8
- zenml/client.py +232 -99
- zenml/config/compiler.py +14 -22
- zenml/config/pipeline_run_configuration.py +3 -0
- zenml/config/server_config.py +3 -0
- zenml/config/source.py +2 -1
- zenml/constants.py +2 -0
- zenml/enums.py +3 -0
- zenml/integrations/aws/orchestrators/sagemaker_orchestrator.py +13 -4
- zenml/integrations/databricks/flavors/databricks_orchestrator_flavor.py +11 -2
- zenml/integrations/databricks/orchestrators/databricks_orchestrator.py +19 -13
- zenml/models/__init__.py +26 -10
- zenml/models/v2/base/filter.py +32 -0
- zenml/models/v2/core/pipeline.py +73 -89
- zenml/models/v2/core/pipeline_build.py +15 -11
- zenml/models/v2/core/pipeline_deployment.py +56 -0
- zenml/models/v2/core/pipeline_run.py +52 -1
- zenml/models/v2/core/run_template.py +393 -0
- zenml/models/v2/misc/stack_deployment.py +5 -0
- zenml/new/pipelines/build_utils.py +34 -58
- zenml/new/pipelines/pipeline.py +17 -76
- zenml/new/pipelines/run_utils.py +12 -0
- zenml/post_execution/pipeline.py +1 -4
- zenml/service_connectors/service_connector_utils.py +4 -2
- zenml/stack_deployments/aws_stack_deployment.py +6 -5
- zenml/stack_deployments/azure_stack_deployment.py +118 -11
- zenml/stack_deployments/gcp_stack_deployment.py +12 -5
- zenml/stack_deployments/stack_deployment.py +6 -5
- zenml/steps/utils.py +0 -4
- zenml/utils/package_utils.py +39 -0
- zenml/zen_server/dashboard/assets/{404-B_YdvmwS.js → 404-CI13wQp4.js} +1 -1
- zenml/zen_server/dashboard/assets/{@reactflow-l_1hUr1S.js → @reactflow-DIYUhKYX.js} +1 -1
- zenml/zen_server/dashboard/assets/{@tanstack-DYiOyJUL.js → @tanstack-k96lU_C-.js} +4 -4
- zenml/zen_server/dashboard/assets/{AwarenessChannel-CFg5iX4Z.js → AwarenessChannel-BNg5uWgI.js} +1 -1
- zenml/zen_server/dashboard/assets/{CodeSnippet-Dvkx_82E.js → CodeSnippet-Cyp7f4dM.js} +2 -2
- zenml/zen_server/dashboard/assets/CollapsibleCard-Cu_A9W57.js +1 -0
- zenml/zen_server/dashboard/assets/{Commands-DoN1xrEq.js → Commands-DmQwTXjj.js} +1 -1
- zenml/zen_server/dashboard/assets/{CopyButton-Cr7xYEPb.js → CopyButton-B3sWVJ4Z.js} +1 -1
- zenml/zen_server/dashboard/assets/{CsvVizualization-Ck-nZ43m.js → CsvVizualization-BvqItd-O.js} +1 -1
- zenml/zen_server/dashboard/assets/{Error-kLtljEOM.js → Error-DbXCTGua.js} +1 -1
- zenml/zen_server/dashboard/assets/{ExecutionStatus-DguLLgTK.js → ExecutionStatus-9zM7eaLh.js} +1 -1
- zenml/zen_server/dashboard/assets/{Helpbox-BXUMP21n.js → Helpbox-BIiNc-uH.js} +1 -1
- zenml/zen_server/dashboard/assets/{Infobox-DSt0O-dm.js → Infobox-iv1Nu1A0.js} +1 -1
- zenml/zen_server/dashboard/assets/{InlineAvatar-xsrsIGE-.js → InlineAvatar-BvBtO2Dp.js} +1 -1
- zenml/zen_server/dashboard/assets/ProviderRadio-pSAvrGRS.js +1 -0
- zenml/zen_server/dashboard/assets/SearchField-CXoBknpt.js +1 -0
- zenml/zen_server/dashboard/assets/{SetPassword-BXGTWiwj.js → SetPassword-BOxpgh6N.js} +1 -1
- zenml/zen_server/dashboard/assets/{SuccessStep-DZC60t0x.js → SuccessStep-CTSKN2lp.js} +1 -1
- zenml/zen_server/dashboard/assets/Tick-Bnr2TpW6.js +1 -0
- zenml/zen_server/dashboard/assets/{UpdatePasswordSchemas-DGvwFWO1.js → UpdatePasswordSchemas-BeCeaRW5.js} +1 -1
- zenml/zen_server/dashboard/assets/chevron-down-D_ZlKMqH.js +1 -0
- zenml/zen_server/dashboard/assets/{cloud-only-C_yFCAkP.js → cloud-only-qelmY92E.js} +1 -1
- zenml/zen_server/dashboard/assets/components-DWe4cTjS.js +1 -0
- zenml/zen_server/dashboard/assets/dots-horizontal-BObFzD5l.js +1 -0
- zenml/zen_server/dashboard/assets/{index-BczVOqUf.js → index-KsTz2dHG.js} +5 -5
- zenml/zen_server/dashboard/assets/index-vfjX_fJV.css +1 -0
- zenml/zen_server/dashboard/assets/index.esm-CbHNSeVw.js +1 -0
- zenml/zen_server/dashboard/assets/{login-mutation-CrHrndTI.js → login-mutation-DRpbESS7.js} +1 -1
- zenml/zen_server/dashboard/assets/{not-found-DYa4pC-C.js → not-found-Dfx9hfkf.js} +1 -1
- zenml/zen_server/dashboard/assets/package-ClbU3KUi.js +1 -0
- zenml/zen_server/dashboard/assets/{page-uA5prJGY.js → page-399pVZHU.js} +1 -1
- zenml/zen_server/dashboard/assets/{page-1h_sD1jz.js → page-BoFtUD9H.js} +1 -1
- zenml/zen_server/dashboard/assets/{page-BDns21Iz.js → page-Btu39x7k.js} +1 -1
- zenml/zen_server/dashboard/assets/{page-BnaevhnB.js → page-BxiWdeyg.js} +1 -1
- zenml/zen_server/dashboard/assets/{page-1iL8aMqs.js → page-C176KxyB.js} +1 -1
- zenml/zen_server/dashboard/assets/page-C6tXXjnK.js +1 -0
- zenml/zen_server/dashboard/assets/{page-BkeAAYwp.js → page-CDgZmwxP.js} +1 -1
- zenml/zen_server/dashboard/assets/page-CP9obrnG.js +1 -0
- zenml/zen_server/dashboard/assets/{page-C6-UGEbH.js → page-CZe9GEBF.js} +1 -1
- zenml/zen_server/dashboard/assets/page-CaTOsNNw.js +1 -0
- zenml/zen_server/dashboard/assets/{page-CCNRIt_f.js → page-Cjn97HMv.js} +1 -1
- zenml/zen_server/dashboard/assets/page-CmXmB_5i.js +1 -0
- zenml/zen_server/dashboard/assets/page-CvGAOfad.js +1 -0
- zenml/zen_server/dashboard/assets/page-CzucfYPo.js +2 -0
- zenml/zen_server/dashboard/assets/{page-Bi-wtWiO.js → page-D0bbc-qr.js} +1 -1
- zenml/zen_server/dashboard/assets/page-DLEtD2ex.js +1 -0
- zenml/zen_server/dashboard/assets/{page-BhgCDInH.js → page-DVPxY5fT.js} +1 -1
- zenml/zen_server/dashboard/assets/{page-BkuQDIf-.js → page-DYBNGxJt.js} +1 -1
- zenml/zen_server/dashboard/assets/{page-8a4UMKXZ.js → page-DtpwnNXq.js} +1 -1
- zenml/zen_server/dashboard/assets/{page-B6h3iaHJ.js → page-DupV0aBd.js} +1 -1
- zenml/zen_server/dashboard/assets/page-EweAR81y.js +1 -0
- zenml/zen_server/dashboard/assets/{page-MFQyIJd3.js → page-f3jBVI5Z.js} +1 -1
- zenml/zen_server/dashboard/assets/{page-2grKx_MY.js → page-p2hLJdS2.js} +1 -1
- zenml/zen_server/dashboard/assets/page-w-YaL77M.js +9 -0
- zenml/zen_server/dashboard/assets/persist-BReKApOc.js +14 -0
- zenml/zen_server/dashboard/assets/plus-DOeLmm7C.js +1 -0
- zenml/zen_server/dashboard/assets/{stack-detail-query-Cficsl6d.js → stack-detail-query-Ck7j7BP_.js} +1 -1
- zenml/zen_server/dashboard/assets/{update-server-settings-mutation-7d8xi1tS.js → update-server-settings-mutation-f3ZT7psb.js} +1 -1
- zenml/zen_server/dashboard/assets/{url-D7mAQGUM.js → url-rGEp5Umh.js} +1 -1
- zenml/zen_server/dashboard/assets/{zod-BhoGpZ63.js → zod-BtSyGx4C.js} +1 -1
- zenml/zen_server/dashboard/index.html +5 -5
- zenml/zen_server/dashboard_legacy/asset-manifest.json +4 -4
- zenml/zen_server/dashboard_legacy/index.html +1 -1
- zenml/zen_server/dashboard_legacy/{precache-manifest.12246c7548e71e2c4438e496360de80c.js → precache-manifest.2fa6e528a6e7447caaf35dadfe7514bb.js} +4 -4
- zenml/zen_server/dashboard_legacy/service-worker.js +1 -1
- zenml/zen_server/dashboard_legacy/static/js/{main.3b27024b.chunk.js → main.4aab7e98.chunk.js} +2 -2
- zenml/zen_server/dashboard_legacy/static/js/{main.3b27024b.chunk.js.map → main.4aab7e98.chunk.js.map} +1 -1
- zenml/zen_server/deploy/helm/Chart.yaml +1 -1
- zenml/zen_server/deploy/helm/README.md +2 -2
- zenml/zen_server/rbac/models.py +1 -0
- zenml/zen_server/rbac/utils.py +4 -0
- zenml/zen_server/routers/pipeline_builds_endpoints.py +2 -66
- zenml/zen_server/routers/pipeline_deployments_endpoints.py +2 -53
- zenml/zen_server/routers/pipelines_endpoints.py +1 -74
- zenml/zen_server/routers/run_templates_endpoints.py +212 -0
- zenml/zen_server/routers/workspaces_endpoints.py +79 -0
- zenml/zen_server/{pipeline_deployment → template_execution}/runner_entrypoint_configuration.py +1 -8
- zenml/zen_server/{pipeline_deployment → template_execution}/utils.py +214 -92
- zenml/zen_server/utils.py +2 -2
- zenml/zen_server/zen_server_api.py +2 -1
- zenml/zen_stores/migrations/versions/0.63.0_release.py +23 -0
- zenml/zen_stores/migrations/versions/7d1919bb1ef0_add_run_templates.py +100 -0
- zenml/zen_stores/migrations/versions/b59aa68fdb1f_simplify_pipelines.py +139 -0
- zenml/zen_stores/rest_zen_store.py +107 -36
- zenml/zen_stores/schemas/__init__.py +2 -0
- zenml/zen_stores/schemas/pipeline_build_schemas.py +3 -3
- zenml/zen_stores/schemas/pipeline_deployment_schemas.py +29 -2
- zenml/zen_stores/schemas/pipeline_run_schemas.py +26 -3
- zenml/zen_stores/schemas/pipeline_schemas.py +29 -30
- zenml/zen_stores/schemas/run_template_schemas.py +264 -0
- zenml/zen_stores/schemas/step_run_schemas.py +11 -4
- zenml/zen_stores/sql_zen_store.py +364 -150
- zenml/zen_stores/template_utils.py +261 -0
- zenml/zen_stores/zen_store_interface.py +93 -20
- {zenml_nightly-0.62.0.dev20240729.dist-info → zenml_nightly-0.63.0.dev20240731.dist-info}/METADATA +2 -2
- {zenml_nightly-0.62.0.dev20240729.dist-info → zenml_nightly-0.63.0.dev20240731.dist-info}/RECORD +139 -129
- zenml/models/v2/core/pipeline_namespace.py +0 -113
- zenml/new/pipelines/deserialization_utils.py +0 -292
- zenml/zen_server/dashboard/assets/CollapsibleCard-opiuBHHc.js +0 -1
- zenml/zen_server/dashboard/assets/Pagination-C6X-mifw.js +0 -1
- zenml/zen_server/dashboard/assets/index-EpMIKgrI.css +0 -1
- zenml/zen_server/dashboard/assets/index-rK_Wuy2W.js +0 -1
- zenml/zen_server/dashboard/assets/index.esm-Corw4lXQ.js +0 -1
- zenml/zen_server/dashboard/assets/package-B3fWP-Dh.js +0 -1
- zenml/zen_server/dashboard/assets/page-5NCOHOsy.js +0 -1
- zenml/zen_server/dashboard/assets/page-Bq0YxkLV.js +0 -1
- zenml/zen_server/dashboard/assets/page-Bs2F4eoD.js +0 -2
- zenml/zen_server/dashboard/assets/page-CHNxpz3n.js +0 -1
- zenml/zen_server/dashboard/assets/page-DgorQFqi.js +0 -1
- zenml/zen_server/dashboard/assets/page-K8ebxVIs.js +0 -1
- zenml/zen_server/dashboard/assets/page-TgCF0P_U.js +0 -1
- zenml/zen_server/dashboard/assets/page-ZnCEe-eK.js +0 -9
- zenml/zen_server/dashboard/assets/persist-D7HJNBWx.js +0 -1
- zenml/zen_server/dashboard/assets/plus-C8WOyCzt.js +0 -1
- /zenml/zen_server/{pipeline_deployment → template_execution}/__init__.py +0 -0
- /zenml/zen_server/{pipeline_deployment → template_execution}/workload_manager_interface.py +0 -0
- {zenml_nightly-0.62.0.dev20240729.dist-info → zenml_nightly-0.63.0.dev20240731.dist-info}/LICENSE +0 -0
- {zenml_nightly-0.62.0.dev20240729.dist-info → zenml_nightly-0.63.0.dev20240731.dist-info}/WHEEL +0 -0
- {zenml_nightly-0.62.0.dev20240729.dist-info → zenml_nightly-0.63.0.dev20240731.dist-info}/entry_points.txt +0 -0
zenml/enums.py
CHANGED
@@ -338,6 +338,9 @@ class TaggableResourceTypes(StrEnum):
|
|
338
338
|
ARTIFACT_VERSION = "artifact_version"
|
339
339
|
MODEL = "model"
|
340
340
|
MODEL_VERSION = "model_version"
|
341
|
+
PIPELINE = "pipeline"
|
342
|
+
PIPELINE_RUN = "pipeline_run"
|
343
|
+
RUN_TEMPLATE = "run_template"
|
341
344
|
|
342
345
|
|
343
346
|
class ResponseUpdateStrategy(StrEnum):
|
@@ -20,6 +20,7 @@ from uuid import UUID
|
|
20
20
|
|
21
21
|
import boto3
|
22
22
|
import sagemaker
|
23
|
+
from botocore.exceptions import WaiterError
|
23
24
|
from sagemaker.network import NetworkConfig
|
24
25
|
from sagemaker.processing import ProcessingInput, ProcessingOutput
|
25
26
|
from sagemaker.workflow.execution_variables import ExecutionVariables
|
@@ -373,10 +374,18 @@ class SagemakerOrchestrator(ContainerizedOrchestrator):
|
|
373
374
|
logger.info(
|
374
375
|
"Executing synchronously. Waiting for pipeline to finish..."
|
375
376
|
)
|
376
|
-
|
377
|
-
|
378
|
-
|
379
|
-
|
377
|
+
try:
|
378
|
+
pipeline_execution.wait(
|
379
|
+
delay=POLLING_DELAY, max_attempts=MAX_POLLING_ATTEMPTS
|
380
|
+
)
|
381
|
+
logger.info("Pipeline completed successfully.")
|
382
|
+
except WaiterError:
|
383
|
+
raise RuntimeError(
|
384
|
+
"Timed out while waiting for pipeline execution to finish. For long-running "
|
385
|
+
"pipelines we recommend configuring your orchestrator for asynchronous execution. "
|
386
|
+
"The following command does this for you: \n"
|
387
|
+
f"`zenml orchestrator update {self.name} --synchronous=False`"
|
388
|
+
)
|
380
389
|
|
381
390
|
def _get_region_name(self) -> str:
|
382
391
|
"""Returns the AWS region name.
|
@@ -20,6 +20,7 @@ from zenml.integrations.databricks import DATABRICKS_ORCHESTRATOR_FLAVOR
|
|
20
20
|
from zenml.logger import get_logger
|
21
21
|
from zenml.orchestrators import BaseOrchestratorConfig
|
22
22
|
from zenml.orchestrators.base_orchestrator import BaseOrchestratorFlavor
|
23
|
+
from zenml.utils.enum_utils import StrEnum
|
23
24
|
from zenml.utils.secret_utils import SecretField
|
24
25
|
|
25
26
|
if TYPE_CHECKING:
|
@@ -27,10 +28,17 @@ if TYPE_CHECKING:
|
|
27
28
|
DatabricksOrchestrator,
|
28
29
|
)
|
29
30
|
|
30
|
-
|
31
31
|
logger = get_logger(__name__)
|
32
32
|
|
33
33
|
|
34
|
+
class DatabricksAvailabilityType(StrEnum):
|
35
|
+
"""Databricks availability type."""
|
36
|
+
|
37
|
+
ON_DEMAND = "ON_DEMAND"
|
38
|
+
SPOT = "SPOT"
|
39
|
+
SPOT_WITH_FALLBACK = "SPOT_WITH_FALLBACK"
|
40
|
+
|
41
|
+
|
34
42
|
class DatabricksOrchestratorSettings(BaseSettings):
|
35
43
|
"""Databricks orchestrator base settings.
|
36
44
|
|
@@ -53,11 +61,12 @@ class DatabricksOrchestratorSettings(BaseSettings):
|
|
53
61
|
node_type_id: Optional[str] = None
|
54
62
|
policy_id: Optional[str] = None
|
55
63
|
autotermination_minutes: Optional[int] = None
|
56
|
-
autoscale: Tuple[int, int] = (
|
64
|
+
autoscale: Tuple[int, int] = (0, 1)
|
57
65
|
single_user_name: Optional[str] = None
|
58
66
|
spark_conf: Optional[Dict[str, str]] = None
|
59
67
|
spark_env_vars: Optional[Dict[str, str]] = None
|
60
68
|
schedule_timezone: Optional[str] = None
|
69
|
+
availability_type: Optional[DatabricksAvailabilityType] = None
|
61
70
|
|
62
71
|
|
63
72
|
class DatabricksOrchestratorConfig(
|
@@ -52,6 +52,7 @@ from zenml.orchestrators.utils import get_orchestrator_run_name
|
|
52
52
|
from zenml.orchestrators.wheeled_orchestrator import WheeledOrchestrator
|
53
53
|
from zenml.stack import StackValidator
|
54
54
|
from zenml.utils import io_utils
|
55
|
+
from zenml.utils.package_utils import clean_requirements
|
55
56
|
from zenml.utils.pipeline_docker_image_builder import (
|
56
57
|
PipelineDockerImageBuilder,
|
57
58
|
)
|
@@ -229,6 +230,9 @@ class DatabricksOrchestrator(WheeledOrchestrator):
|
|
229
230
|
ValueError: If the schedule is not set or if the cron expression
|
230
231
|
is not set.
|
231
232
|
"""
|
233
|
+
settings = cast(
|
234
|
+
DatabricksOrchestratorSettings, self.get_settings(deployment)
|
235
|
+
)
|
232
236
|
if deployment.schedule:
|
233
237
|
if (
|
234
238
|
deployment.schedule.catchup
|
@@ -246,7 +250,7 @@ class DatabricksOrchestrator(WheeledOrchestrator):
|
|
246
250
|
)
|
247
251
|
if (
|
248
252
|
deployment.schedule.cron_expression
|
249
|
-
and
|
253
|
+
and settings.schedule_timezone is None
|
250
254
|
):
|
251
255
|
raise ValueError(
|
252
256
|
"Property `schedule_timezone` must be set when passing "
|
@@ -321,7 +325,7 @@ class DatabricksOrchestrator(WheeledOrchestrator):
|
|
321
325
|
f"{deployment_id}_{step_name}",
|
322
326
|
ZENML_STEP_DEFAULT_ENTRYPOINT_COMMAND,
|
323
327
|
arguments,
|
324
|
-
requirements,
|
328
|
+
clean_requirements(requirements),
|
325
329
|
depends_on=upstream_steps,
|
326
330
|
zenml_project_wheel=zenml_project_wheel,
|
327
331
|
job_cluster_key=job_cluster_key,
|
@@ -366,7 +370,7 @@ class DatabricksOrchestrator(WheeledOrchestrator):
|
|
366
370
|
|
367
371
|
# Construct the env variables for the pipeline
|
368
372
|
env_vars = environment.copy()
|
369
|
-
spark_env_vars =
|
373
|
+
spark_env_vars = settings.spark_env_vars
|
370
374
|
if spark_env_vars:
|
371
375
|
for key, value in spark_env_vars.items():
|
372
376
|
env_vars[key] = value
|
@@ -385,6 +389,7 @@ class DatabricksOrchestrator(WheeledOrchestrator):
|
|
385
389
|
job_cluster_key = self.sanitize_name(f"{deployment_id}")
|
386
390
|
self._upload_and_run_pipeline(
|
387
391
|
pipeline_name=orchestrator_run_name,
|
392
|
+
settings=settings,
|
388
393
|
tasks=_construct_databricks_pipeline(
|
389
394
|
databricks_wheel_path, job_cluster_key
|
390
395
|
),
|
@@ -396,6 +401,7 @@ class DatabricksOrchestrator(WheeledOrchestrator):
|
|
396
401
|
def _upload_and_run_pipeline(
|
397
402
|
self,
|
398
403
|
pipeline_name: str,
|
404
|
+
settings: DatabricksOrchestratorSettings,
|
399
405
|
tasks: List[DatabricksTask],
|
400
406
|
env_vars: Dict[str, str],
|
401
407
|
job_cluster_key: str,
|
@@ -409,6 +415,7 @@ class DatabricksOrchestrator(WheeledOrchestrator):
|
|
409
415
|
env_vars: The environment variables.
|
410
416
|
job_cluster_key: The ID of the Databricks job cluster.
|
411
417
|
schedule: The schedule to run the pipeline
|
418
|
+
settings: The settings for the Databricks orchestrator.
|
412
419
|
|
413
420
|
Raises:
|
414
421
|
ValueError: If the `Job Compute` policy is not found.
|
@@ -416,12 +423,12 @@ class DatabricksOrchestrator(WheeledOrchestrator):
|
|
416
423
|
|
417
424
|
"""
|
418
425
|
databricks_client = self._get_databricks_client()
|
419
|
-
spark_conf =
|
426
|
+
spark_conf = settings.spark_conf or {}
|
420
427
|
spark_conf[
|
421
428
|
"spark.databricks.driver.dbfsLibraryInstallationAllowed"
|
422
429
|
] = "true"
|
423
430
|
|
424
|
-
policy_id =
|
431
|
+
policy_id = settings.policy_id or None
|
425
432
|
for policy in databricks_client.cluster_policies.list():
|
426
433
|
if policy.name == "Job Compute":
|
427
434
|
policy_id = policy.policy_id
|
@@ -432,17 +439,16 @@ class DatabricksOrchestrator(WheeledOrchestrator):
|
|
432
439
|
job_cluster = JobCluster(
|
433
440
|
job_cluster_key=job_cluster_key,
|
434
441
|
new_cluster=ClusterSpec(
|
435
|
-
spark_version=
|
442
|
+
spark_version=settings.spark_version
|
436
443
|
or DATABRICKS_SPARK_DEFAULT_VERSION,
|
437
|
-
num_workers=
|
438
|
-
node_type_id=
|
439
|
-
or "Standard_D4s_v5",
|
444
|
+
num_workers=settings.num_workers,
|
445
|
+
node_type_id=settings.node_type_id or "Standard_D4s_v5",
|
440
446
|
policy_id=policy_id,
|
441
447
|
autoscale=AutoScale(
|
442
|
-
min_workers=
|
443
|
-
max_workers=
|
448
|
+
min_workers=settings.autoscale[0],
|
449
|
+
max_workers=settings.autoscale[1],
|
444
450
|
),
|
445
|
-
single_user_name=
|
451
|
+
single_user_name=settings.single_user_name,
|
446
452
|
spark_env_vars=env_vars,
|
447
453
|
spark_conf=spark_conf,
|
448
454
|
workload_type=WorkloadType(
|
@@ -451,7 +457,7 @@ class DatabricksOrchestrator(WheeledOrchestrator):
|
|
451
457
|
),
|
452
458
|
)
|
453
459
|
if schedule and schedule.cron_expression:
|
454
|
-
schedule_timezone =
|
460
|
+
schedule_timezone = settings.schedule_timezone
|
455
461
|
if schedule_timezone:
|
456
462
|
databricks_schedule = CronSchedule(
|
457
463
|
quartz_cron_expression=schedule.cron_expression,
|
zenml/models/__init__.py
CHANGED
@@ -199,12 +199,7 @@ from zenml.models.v2.core.pipeline import (
|
|
199
199
|
PipelineResponse,
|
200
200
|
PipelineResponseBody,
|
201
201
|
PipelineResponseMetadata,
|
202
|
-
|
203
|
-
from zenml.models.v2.core.pipeline_namespace import (
|
204
|
-
PipelineNamespaceResponseBody,
|
205
|
-
PipelineNamespaceResponseMetadata,
|
206
|
-
PipelineNamespaceResponse,
|
207
|
-
PipelineNamespaceFilter,
|
202
|
+
PipelineResponseResources
|
208
203
|
)
|
209
204
|
from zenml.models.v2.core.pipeline_build import (
|
210
205
|
PipelineBuildBase,
|
@@ -230,6 +225,16 @@ from zenml.models.v2.core.pipeline_run import (
|
|
230
225
|
PipelineRunResponse,
|
231
226
|
PipelineRunResponseBody,
|
232
227
|
PipelineRunResponseMetadata,
|
228
|
+
PipelineRunResponseResources
|
229
|
+
)
|
230
|
+
from zenml.models.v2.core.run_template import (
|
231
|
+
RunTemplateRequest,
|
232
|
+
RunTemplateUpdate,
|
233
|
+
RunTemplateResponse,
|
234
|
+
RunTemplateResponseBody,
|
235
|
+
RunTemplateResponseMetadata,
|
236
|
+
RunTemplateResponseResources,
|
237
|
+
RunTemplateFilter,
|
233
238
|
)
|
234
239
|
from zenml.models.v2.base.base_plugin_flavor import BasePluginFlavorResponse
|
235
240
|
from zenml.models.v2.core.run_metadata import (
|
@@ -424,6 +429,7 @@ ModelVersionPipelineRunResponseBody.model_rebuild()
|
|
424
429
|
OAuthDeviceResponseBody.model_rebuild()
|
425
430
|
PipelineResponseBody.model_rebuild()
|
426
431
|
PipelineResponseMetadata.model_rebuild()
|
432
|
+
PipelineResponseResources.model_rebuild()
|
427
433
|
PipelineBuildBase.model_rebuild()
|
428
434
|
PipelineBuildResponseBody.model_rebuild()
|
429
435
|
PipelineBuildResponseMetadata.model_rebuild()
|
@@ -433,6 +439,11 @@ PipelineDeploymentResponseMetadata.model_rebuild()
|
|
433
439
|
PipelineDeploymentResponseResources.model_rebuild()
|
434
440
|
PipelineRunResponseBody.model_rebuild()
|
435
441
|
PipelineRunResponseMetadata.model_rebuild()
|
442
|
+
PipelineRunResponseResources.model_rebuild()
|
443
|
+
RunTemplateResponseBody.model_rebuild()
|
444
|
+
RunTemplateResponseMetadata.model_rebuild()
|
445
|
+
RunTemplateResponseResources.model_rebuild()
|
446
|
+
RunTemplateResponseBody.model_rebuild()
|
436
447
|
RunMetadataResponseBody.model_rebuild()
|
437
448
|
RunMetadataResponseMetadata.model_rebuild()
|
438
449
|
ScheduleResponseBody.model_rebuild()
|
@@ -590,10 +601,7 @@ __all__ = [
|
|
590
601
|
"PipelineResponse",
|
591
602
|
"PipelineResponseBody",
|
592
603
|
"PipelineResponseMetadata",
|
593
|
-
"
|
594
|
-
"PipelineNamespaceResponse",
|
595
|
-
"PipelineNamespaceResponseBody",
|
596
|
-
"PipelineNamespaceResponseMetadata",
|
604
|
+
"PipelineResponseResources",
|
597
605
|
"PipelineBuildBase",
|
598
606
|
"PipelineBuildRequest",
|
599
607
|
"PipelineBuildFilter",
|
@@ -612,6 +620,14 @@ __all__ = [
|
|
612
620
|
"PipelineRunResponse",
|
613
621
|
"PipelineRunResponseBody",
|
614
622
|
"PipelineRunResponseMetadata",
|
623
|
+
"PipelineRunResponseResources",
|
624
|
+
"RunTemplateRequest",
|
625
|
+
"RunTemplateUpdate",
|
626
|
+
"RunTemplateResponse",
|
627
|
+
"RunTemplateResponseBody",
|
628
|
+
"RunTemplateResponseMetadata",
|
629
|
+
"RunTemplateResponseResources",
|
630
|
+
"RunTemplateFilter",
|
615
631
|
"RunMetadataRequest",
|
616
632
|
"RunMetadataFilter",
|
617
633
|
"RunMetadataResponse",
|
zenml/models/v2/base/filter.py
CHANGED
@@ -36,6 +36,7 @@ from pydantic import (
|
|
36
36
|
field_validator,
|
37
37
|
model_validator,
|
38
38
|
)
|
39
|
+
from sqlalchemy import asc, desc
|
39
40
|
from sqlmodel import SQLModel
|
40
41
|
|
41
42
|
from zenml.constants import (
|
@@ -267,6 +268,7 @@ class BaseFilter(BaseModel):
|
|
267
268
|
"size",
|
268
269
|
"logical_operator",
|
269
270
|
]
|
271
|
+
CUSTOM_SORTING_OPTIONS: ClassVar[List[str]] = []
|
270
272
|
|
271
273
|
# List of fields that are not even mentioned as options in the CLI.
|
272
274
|
CLI_EXCLUDE_FIELDS: ClassVar[List[str]] = []
|
@@ -352,6 +354,8 @@ class BaseFilter(BaseModel):
|
|
352
354
|
)
|
353
355
|
elif column in cls.model_fields:
|
354
356
|
return value
|
357
|
+
elif column in cls.CUSTOM_SORTING_OPTIONS:
|
358
|
+
return value
|
355
359
|
else:
|
356
360
|
raise ValueError(
|
357
361
|
"You can only sort by valid fields of this resource"
|
@@ -861,3 +865,31 @@ class BaseFilter(BaseModel):
|
|
861
865
|
query = query.where(filters)
|
862
866
|
|
863
867
|
return query
|
868
|
+
|
869
|
+
def apply_sorting(
|
870
|
+
self,
|
871
|
+
query: AnyQuery,
|
872
|
+
table: Type["AnySchema"],
|
873
|
+
) -> AnyQuery:
|
874
|
+
"""Apply sorting to the query.
|
875
|
+
|
876
|
+
Args:
|
877
|
+
query: The query to which to apply the sorting.
|
878
|
+
table: The query table.
|
879
|
+
|
880
|
+
Returns:
|
881
|
+
The query with sorting applied.
|
882
|
+
"""
|
883
|
+
column, operand = self.sorting_params
|
884
|
+
|
885
|
+
if operand == SorterOps.DESCENDING:
|
886
|
+
sort_clause = desc(getattr(table, column)) # type: ignore[var-annotated]
|
887
|
+
else:
|
888
|
+
sort_clause = asc(getattr(table, column))
|
889
|
+
|
890
|
+
# We always add the `id` column as a tiebreaker to ensure a stable,
|
891
|
+
# repeatable order of items, otherwise subsequent pages might contain
|
892
|
+
# the same items.
|
893
|
+
query = query.order_by(sort_clause, asc(table.id)) # type: ignore[arg-type]
|
894
|
+
|
895
|
+
return query
|
zenml/models/v2/core/pipeline.py
CHANGED
@@ -13,27 +13,35 @@
|
|
13
13
|
# permissions and limitations under the License.
|
14
14
|
"""Models representing pipelines."""
|
15
15
|
|
16
|
-
from typing import TYPE_CHECKING, Any, List, Optional, Union
|
16
|
+
from typing import TYPE_CHECKING, Any, List, Optional, Type, TypeVar, Union
|
17
17
|
from uuid import UUID
|
18
18
|
|
19
19
|
from pydantic import Field
|
20
20
|
|
21
|
-
from zenml.
|
22
|
-
|
21
|
+
from zenml.constants import (
|
22
|
+
SORT_PIPELINES_BY_LATEST_RUN_KEY,
|
23
|
+
STR_FIELD_MAX_LENGTH,
|
24
|
+
TEXT_FIELD_MAX_LENGTH,
|
25
|
+
)
|
23
26
|
from zenml.enums import ExecutionStatus
|
24
27
|
from zenml.models.v2.base.base import BaseUpdate
|
25
28
|
from zenml.models.v2.base.scoped import (
|
26
|
-
WorkspaceScopedFilter,
|
27
29
|
WorkspaceScopedRequest,
|
28
30
|
WorkspaceScopedResponse,
|
29
31
|
WorkspaceScopedResponseBody,
|
30
32
|
WorkspaceScopedResponseMetadata,
|
31
33
|
WorkspaceScopedResponseResources,
|
34
|
+
WorkspaceScopedTaggableFilter,
|
32
35
|
)
|
36
|
+
from zenml.models.v2.core.tag import TagResponse
|
33
37
|
|
34
38
|
if TYPE_CHECKING:
|
35
39
|
from zenml.models.v2.core.pipeline_run import PipelineRunResponse
|
40
|
+
from zenml.zen_stores.schemas import BaseSchema
|
41
|
+
|
42
|
+
AnySchema = TypeVar("AnySchema", bound=BaseSchema)
|
36
43
|
|
44
|
+
AnyQuery = TypeVar("AnyQuery", bound=Any)
|
37
45
|
|
38
46
|
# ------------------ Request Model ------------------
|
39
47
|
|
@@ -45,20 +53,15 @@ class PipelineRequest(WorkspaceScopedRequest):
|
|
45
53
|
title="The name of the pipeline.",
|
46
54
|
max_length=STR_FIELD_MAX_LENGTH,
|
47
55
|
)
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
-
)
|
52
|
-
version_hash: str = Field(
|
53
|
-
title="The version hash of the pipeline.",
|
54
|
-
max_length=STR_FIELD_MAX_LENGTH,
|
55
|
-
)
|
56
|
-
docstring: Optional[str] = Field(
|
57
|
-
title="The docstring of the pipeline.",
|
56
|
+
description: Optional[str] = Field(
|
57
|
+
default=None,
|
58
|
+
title="The description of the pipeline.",
|
58
59
|
max_length=TEXT_FIELD_MAX_LENGTH,
|
60
|
+
)
|
61
|
+
tags: Optional[List[str]] = Field(
|
59
62
|
default=None,
|
63
|
+
title="Tags of the pipeline.",
|
60
64
|
)
|
61
|
-
spec: PipelineSpec = Field(title="The spec of the pipeline.")
|
62
65
|
|
63
66
|
|
64
67
|
# ------------------ Update Model ------------------
|
@@ -67,29 +70,16 @@ class PipelineRequest(WorkspaceScopedRequest):
|
|
67
70
|
class PipelineUpdate(BaseUpdate):
|
68
71
|
"""Update model for pipelines."""
|
69
72
|
|
70
|
-
|
71
|
-
title="The name of the pipeline.",
|
72
|
-
max_length=STR_FIELD_MAX_LENGTH,
|
73
|
-
default=None,
|
74
|
-
)
|
75
|
-
version: Optional[str] = Field(
|
76
|
-
title="The version of the pipeline.",
|
77
|
-
max_length=STR_FIELD_MAX_LENGTH,
|
73
|
+
description: Optional[str] = Field(
|
78
74
|
default=None,
|
79
|
-
|
80
|
-
version_hash: Optional[str] = Field(
|
81
|
-
title="The version hash of the pipeline.",
|
82
|
-
max_length=STR_FIELD_MAX_LENGTH,
|
83
|
-
default=None,
|
84
|
-
)
|
85
|
-
docstring: Optional[str] = Field(
|
86
|
-
title="The docstring of the pipeline.",
|
75
|
+
title="The description of the pipeline.",
|
87
76
|
max_length=TEXT_FIELD_MAX_LENGTH,
|
88
|
-
default=None,
|
89
77
|
)
|
90
|
-
|
91
|
-
title="
|
92
|
-
|
78
|
+
add_tags: Optional[List[str]] = Field(
|
79
|
+
default=None, title="New tags to add to the pipeline."
|
80
|
+
)
|
81
|
+
remove_tags: Optional[List[str]] = Field(
|
82
|
+
default=None, title="Tags to remove from the pipeline."
|
93
83
|
)
|
94
84
|
|
95
85
|
|
@@ -99,33 +89,32 @@ class PipelineUpdate(BaseUpdate):
|
|
99
89
|
class PipelineResponseBody(WorkspaceScopedResponseBody):
|
100
90
|
"""Response body for pipelines."""
|
101
91
|
|
102
|
-
|
103
|
-
default=None,
|
92
|
+
latest_run_id: Optional[UUID] = Field(
|
93
|
+
default=None,
|
94
|
+
title="The ID of the latest run of the pipeline.",
|
104
95
|
)
|
105
|
-
|
106
|
-
|
107
|
-
|
96
|
+
latest_run_status: Optional[ExecutionStatus] = Field(
|
97
|
+
default=None,
|
98
|
+
title="The status of the latest run of the pipeline.",
|
108
99
|
)
|
109
100
|
|
110
101
|
|
111
102
|
class PipelineResponseMetadata(WorkspaceScopedResponseMetadata):
|
112
103
|
"""Response metadata for pipelines."""
|
113
104
|
|
114
|
-
|
115
|
-
title="The version hash of the pipeline.",
|
116
|
-
max_length=STR_FIELD_MAX_LENGTH,
|
117
|
-
)
|
118
|
-
spec: PipelineSpec = Field(title="The spec of the pipeline.")
|
119
|
-
docstring: Optional[str] = Field(
|
120
|
-
title="The docstring of the pipeline.",
|
121
|
-
max_length=TEXT_FIELD_MAX_LENGTH,
|
105
|
+
description: Optional[str] = Field(
|
122
106
|
default=None,
|
107
|
+
title="The description of the pipeline.",
|
123
108
|
)
|
124
109
|
|
125
110
|
|
126
111
|
class PipelineResponseResources(WorkspaceScopedResponseResources):
|
127
112
|
"""Class for all resource models associated with the pipeline entity."""
|
128
113
|
|
114
|
+
tags: List[TagResponse] = Field(
|
115
|
+
title="Tags associated with the pipeline.",
|
116
|
+
)
|
117
|
+
|
129
118
|
|
130
119
|
class PipelineResponse(
|
131
120
|
WorkspaceScopedResponse[
|
@@ -224,75 +213,46 @@ class PipelineResponse(
|
|
224
213
|
)
|
225
214
|
return runs[0]
|
226
215
|
|
227
|
-
# Body and metadata properties
|
228
|
-
@property
|
229
|
-
def status(self) -> Optional[List[ExecutionStatus]]:
|
230
|
-
"""The `status` property.
|
231
|
-
|
232
|
-
Returns:
|
233
|
-
the value of the property.
|
234
|
-
"""
|
235
|
-
return self.get_body().status
|
236
|
-
|
237
|
-
@property
|
238
|
-
def version(self) -> str:
|
239
|
-
"""The `version` property.
|
240
|
-
|
241
|
-
Returns:
|
242
|
-
the value of the property.
|
243
|
-
"""
|
244
|
-
return self.get_body().version
|
245
|
-
|
246
216
|
@property
|
247
|
-
def
|
248
|
-
"""The `
|
217
|
+
def latest_run_id(self) -> Optional[UUID]:
|
218
|
+
"""The `latest_run_id` property.
|
249
219
|
|
250
220
|
Returns:
|
251
221
|
the value of the property.
|
252
222
|
"""
|
253
|
-
return self.
|
223
|
+
return self.get_body().latest_run_id
|
254
224
|
|
255
225
|
@property
|
256
|
-
def
|
257
|
-
"""The `
|
226
|
+
def latest_run_status(self) -> Optional[ExecutionStatus]:
|
227
|
+
"""The `latest_run_status` property.
|
258
228
|
|
259
229
|
Returns:
|
260
230
|
the value of the property.
|
261
231
|
"""
|
262
|
-
return self.
|
232
|
+
return self.get_body().latest_run_status
|
263
233
|
|
264
234
|
@property
|
265
|
-
def
|
266
|
-
"""The `
|
235
|
+
def tags(self) -> List[TagResponse]:
|
236
|
+
"""The `tags` property.
|
267
237
|
|
268
238
|
Returns:
|
269
239
|
the value of the property.
|
270
240
|
"""
|
271
|
-
return self.
|
241
|
+
return self.get_resources().tags
|
272
242
|
|
273
243
|
|
274
244
|
# ------------------ Filter Model ------------------
|
275
245
|
|
276
246
|
|
277
|
-
class PipelineFilter(
|
247
|
+
class PipelineFilter(WorkspaceScopedTaggableFilter):
|
278
248
|
"""Pipeline filter model."""
|
279
249
|
|
250
|
+
CUSTOM_SORTING_OPTIONS = [SORT_PIPELINES_BY_LATEST_RUN_KEY]
|
251
|
+
|
280
252
|
name: Optional[str] = Field(
|
281
253
|
default=None,
|
282
254
|
description="Name of the Pipeline",
|
283
255
|
)
|
284
|
-
version: Optional[str] = Field(
|
285
|
-
default=None,
|
286
|
-
description="Version of the Pipeline",
|
287
|
-
)
|
288
|
-
version_hash: Optional[str] = Field(
|
289
|
-
default=None,
|
290
|
-
description="Version hash of the Pipeline",
|
291
|
-
)
|
292
|
-
docstring: Optional[str] = Field(
|
293
|
-
default=None,
|
294
|
-
description="Docstring of the Pipeline",
|
295
|
-
)
|
296
256
|
workspace_id: Optional[Union[UUID, str]] = Field(
|
297
257
|
default=None,
|
298
258
|
description="Workspace of the Pipeline",
|
@@ -303,3 +263,27 @@ class PipelineFilter(WorkspaceScopedFilter):
|
|
303
263
|
description="User of the Pipeline",
|
304
264
|
union_mode="left_to_right",
|
305
265
|
)
|
266
|
+
|
267
|
+
def apply_sorting(
|
268
|
+
self,
|
269
|
+
query: AnyQuery,
|
270
|
+
table: Type["AnySchema"],
|
271
|
+
) -> AnyQuery:
|
272
|
+
"""Apply sorting to the query.
|
273
|
+
|
274
|
+
Args:
|
275
|
+
query: The query to which to apply the sorting.
|
276
|
+
table: The query table.
|
277
|
+
|
278
|
+
Returns:
|
279
|
+
The query with sorting applied.
|
280
|
+
"""
|
281
|
+
column, _ = self.sorting_params
|
282
|
+
|
283
|
+
if column == SORT_PIPELINES_BY_LATEST_RUN_KEY:
|
284
|
+
# If sorting by the latest run, the sorting is already done in the
|
285
|
+
# base query in `SqlZenStore.list_pipelines(...)` and we don't need
|
286
|
+
# to to anything here
|
287
|
+
return query
|
288
|
+
else:
|
289
|
+
return super().apply_sorting(query=query, table=table)
|
@@ -157,6 +157,9 @@ class PipelineBuildRequest(PipelineBuildBase, WorkspaceScopedRequest):
|
|
157
157
|
"""Request model for pipelines builds."""
|
158
158
|
|
159
159
|
checksum: Optional[str] = Field(title="The build checksum.", default=None)
|
160
|
+
stack_checksum: Optional[str] = Field(
|
161
|
+
title="The stack checksum.", default=None
|
162
|
+
)
|
160
163
|
|
161
164
|
stack: Optional[UUID] = Field(
|
162
165
|
title="The stack that was used for this build.", default=None
|
@@ -164,7 +167,6 @@ class PipelineBuildRequest(PipelineBuildBase, WorkspaceScopedRequest):
|
|
164
167
|
pipeline: Optional[UUID] = Field(
|
165
168
|
title="The pipeline that was used for this build.", default=None
|
166
169
|
)
|
167
|
-
template_deployment_id: Optional[UUID] = None
|
168
170
|
|
169
171
|
|
170
172
|
# ------------------ Update Model ------------------
|
@@ -196,6 +198,9 @@ class PipelineBuildResponseMetadata(WorkspaceScopedResponseMetadata):
|
|
196
198
|
default=None, title="The Python version used for this build."
|
197
199
|
)
|
198
200
|
checksum: Optional[str] = Field(default=None, title="The build checksum.")
|
201
|
+
stack_checksum: Optional[str] = Field(
|
202
|
+
default=None, title="The stack checksum."
|
203
|
+
)
|
199
204
|
is_local: bool = Field(
|
200
205
|
title="Whether the build images are stored in a container "
|
201
206
|
"registry or locally.",
|
@@ -203,7 +208,6 @@ class PipelineBuildResponseMetadata(WorkspaceScopedResponseMetadata):
|
|
203
208
|
contains_code: bool = Field(
|
204
209
|
title="Whether any image of the build contains user code.",
|
205
210
|
)
|
206
|
-
template_deployment_id: Optional[UUID] = None
|
207
211
|
|
208
212
|
|
209
213
|
class PipelineBuildResponseResources(WorkspaceScopedResponseResources):
|
@@ -409,31 +413,31 @@ class PipelineBuildResponse(
|
|
409
413
|
return self.get_metadata().checksum
|
410
414
|
|
411
415
|
@property
|
412
|
-
def
|
413
|
-
"""The `
|
416
|
+
def stack_checksum(self) -> Optional[str]:
|
417
|
+
"""The `stack_checksum` property.
|
414
418
|
|
415
419
|
Returns:
|
416
420
|
the value of the property.
|
417
421
|
"""
|
418
|
-
return self.get_metadata().
|
422
|
+
return self.get_metadata().stack_checksum
|
419
423
|
|
420
424
|
@property
|
421
|
-
def
|
422
|
-
"""The `
|
425
|
+
def is_local(self) -> bool:
|
426
|
+
"""The `is_local` property.
|
423
427
|
|
424
428
|
Returns:
|
425
429
|
the value of the property.
|
426
430
|
"""
|
427
|
-
return self.get_metadata().
|
431
|
+
return self.get_metadata().is_local
|
428
432
|
|
429
433
|
@property
|
430
|
-
def
|
431
|
-
"""The `
|
434
|
+
def contains_code(self) -> bool:
|
435
|
+
"""The `contains_code` property.
|
432
436
|
|
433
437
|
Returns:
|
434
438
|
the value of the property.
|
435
439
|
"""
|
436
|
-
return self.get_metadata().
|
440
|
+
return self.get_metadata().contains_code
|
437
441
|
|
438
442
|
|
439
443
|
# ------------------ Filter Model ------------------
|